I'm trying to applya texture on each faces of my box, here's the code:
int boxWidth = 40;
Point3D location = new Point3D(0, 0, 0);
var meshBuilder = new MeshBuilder();
meshBuilder.AddBox(new Point3D(location.X, location.Y, location.Z), boxWidth, boxWidth, boxWidth);
PointCollection pntCol = new PointCollection();
pntCol.Add(new Point(boxWidth, boxWidth));
pntCol.Add(new Point(0, boxWidth));
pntCol.Add(new Point(0, 0));
pntCol.Add(new Point(boxWidth, 0));
pntCol.Add(new Point(boxWidth, boxWidth));
pntCol.Add(new Point(0, boxWidth));
pntCol.Add(new Point(0, 0));
pntCol.Add(new Point(boxWidth, 0));
pntCol.Add(new Point(boxWidth, boxWidth));
pntCol.Add(new Point(0, boxWidth));
pntCol.Add(new Point(0, 0));
pntCol.Add(new Point(boxWidth, 0));
pntCol.Add(new Point(boxWidth, boxWidth));
pntCol.Add(new Point(0, boxWidth));
pntCol.Add(new Point(0, 0));
pntCol.Add(new Point(boxWidth, 0));
pntCol.Add(new Point(boxWidth, boxWidth));
pntCol.Add(new Point(0, boxWidth));
pntCol.Add(new Point(0, 0));
pntCol.Add(new Point(boxWidth, 0));
pntCol.Add(new Point(boxWidth, boxWidth));
pntCol.Add(new Point(0, boxWidth));
pntCol.Add(new Point(0, 0));
pntCol.Add(new Point(boxWidth, 0));
meshBuilder.TextureCoordinates = pntCol;
ImageBrush brush = new ImageBrush();
brush.ImageSource = new BitmapImage(new Uri(#"mv-pallet-cubi.jpg", UriKind.Relative));
brush.TileMode = TileMode.Tile;
brush.ViewportUnits = BrushMappingMode.RelativeToBoundingBox;
brush.ViewboxUnits = BrushMappingMode.Absolute;
brush.Stretch = Stretch.None;
brush.AlignmentX = AlignmentX.Left;
brush.AlignmentY = AlignmentY.Top;
brush.Viewbox = new Rect(0, 0, brush.ImageSource.Width, brush.ImageSource.Height);
brush.Viewport = new Rect(0, 0, 1, 1);
DiffuseMaterial mat = new DiffuseMaterial(brush);
GeometryModel3D gModel3D = new GeometryModel3D { Geometry = meshBuilder.ToMesh(), Material = mat };
ModelVisual3D mvt = new ModelVisual3D();
mvt.Content = gModel3D;
viewport.Children.Add(mvt);
The width and height of my box are 40 and the size of the texture are 90x90. Basically the effect I got is this:
Texture 90x90, Box 40x40
If I set the dimensions of my box as 90 (same of the texture), the effect is what I want.
Texture 90x90, Box 90x90
Basically I would stretch the whole texture on a box of any dimension. Any helps?
Your texture coordinate need to set to 0 to 1 range instead of 0 to box width
Related
I need to be able to draw horizontal lines against a background color, say red. I have the lines being drawn, but I don't know how to set the background color. Right now, the background is white.
GeometryGroup stdGeometryGroup = new GeometryGroup();
DrawingBrush db = new DrawingBrush();
GeometryDrawing stdDrawing = new GeometryDrawing(null, pen, stdGeometryGroup);
if (stdDrawing != null)
{
db.Drawing = stdDrawing;
db.ViewboxUnits = BrushMappingMode.Absolute;
db.ViewportUnits = BrushMappingMode.Absolute;
db.Viewbox = new Rect(0, 0, 30, 30);
db.Viewport = new Rect(0, 0, 4, 4);
db.TileMode = TileMode.Tile;
db.Stretch = Stretch.UniformToFill;
db.Transform = new RotateTransform(45, 0.5, 0.5);
stdGeometryGroup.Children.Add(new LineGeometry(new Point(0, 15), new Point(30, 15)));
menuItem.Background = db;
}
In this case, menuItem is a Telerik RadMenuItem. The Pen is black.
Hei, I tried this:
public static BitmapSource RotateImage(Image b, float angle)
{
BitmapSource rotita = (BitmapSource)b.Source;
DrawingVisual drawingVisual = new DrawingVisual();
using (DrawingContext drawingContext = drawingVisual.RenderOpen())
{
var transform = new RotateTransform(angle);
drawingContext.PushTransform(transform);
drawingContext.DrawImage(rotita, new Rect(0,0, rotita.PixelWidth, rotita.PixelHeight));
drawingContext.Pop();
}
RenderTargetBitmap bmp = new RenderTargetBitmap(rotita.PixelWidth, rotita.PixelHeight, 96, 96, PixelFormats.Pbgra32);
bmp.Render(drawingVisual);
rotita = bmp;
return rotita;
}
But this does not work fine. I have this image at 0 degree and after rotation at 30 degrees this image.
What could I make the picture to be complete after rotation?
DrawingVisual drawingVisual = new DrawingVisual();
using (DrawingContext drawingContext = drawingVisual.RenderOpen())
{
drawingContext.DrawImage(back, new Rect(0, 0, imageWidth, imageHeight));
drawingContext.DrawImage(element, new Rect(x,y, elementWidth, elementHeight));
}
RenderTargetBitmap bmp = new RenderTargetBitmap(imageWidth, imageHeight, 96, 96, PixelFormats.Pbgra32);
bmp.Render(drawingVisual);
image.Source = bmp;
Element is the rotated image
The following method creates a composed bitmap from two others, where the second one is rotated around their common center point.
The two crucial parts of this method are the calculation of the transformed bounds of the rotated bitmap, and the alignment of the two bitmaps at their common center point.
private BitmapSource ComposeImage(
BitmapSource image1, BitmapSource image2, double rotationAngle)
{
var rotation = new RotateTransform(rotationAngle);
var size1 = new Size(image1.PixelWidth, image1.PixelHeight);
var size2 = new Size(image2.PixelWidth, image2.PixelHeight);
var center1 = new Vector(size1.Width / 2, size1.Height / 2);
var center2 = new Vector(size2.Width / 2, size2.Height / 2);
var rotatedSize = rotation.TransformBounds(new Rect(size2)).Size;
var totalSize = new Size(
Math.Max(size1.Width, rotatedSize.Width),
Math.Max(size1.Height, rotatedSize.Height));
var center = new Point(totalSize.Width / 2, totalSize.Height / 2);
rotation.CenterX = center.X;
rotation.CenterY = center.Y;
var dv = new DrawingVisual();
using (var dc = dv.RenderOpen())
{
dc.DrawImage(image1, new Rect(center - center1, size1));
dc.PushTransform(rotation);
dc.DrawImage(image2, new Rect(center - center2, size2));
}
var rtb = new RenderTargetBitmap(
(int)totalSize.Width, (int)totalSize.Height, 96, 96, PixelFormats.Default);
rtb.Render(dv);
return rtb;
}
I am rendering a rectangle on a textbox using the drawing context using the following code.
drawingContext.DrawRoundedRectangle(
new SolidColorBrush(Color.FromRgb(255, 246, 178)), null,
new Rect(new Point(rect.TopRight.X + 20, rect.TopRight.Y),
new Size(130, rect.Height)),
3,
3);
I want to render a shadow on this rectangle that I draw programmatically in WPF. How can i do it ?
Add effect to Visual
Try something like this
public class MyControl: Control
{
private Rect rect = new Rect(100, 100, 200, 200);
protected override void OnRender(DrawingContext drawingContext)
{
var r = new Rect(new Point(rect.TopRight.X + 20, rect.TopRight.Y),
new Size(130, rect.Height));
var brush = new SolidColorBrush(Color.FromRgb(255, 246, 178));
DropShadowEffect effect = new DropShadowEffect();
effect = new DropShadowEffect {Color = Colors.Gainsboro, Direction = 30};
this.Effect = effect;
drawingContext.DrawRoundedRectangle(brush, null, r, 3, 3);
base.OnRender(drawingContext);
}
}
This gives me:
EDIT
If you do not have UI element to attach Effect, then you need to do shadow on your own.
Just add another rectangle under your main, with some gradient brush that becomes transparent.
protected override void OnRender(DrawingContext drawingContext)
{
var r = new Rect(new Point(rect.TopRight.X + 20, rect.TopRight.Y),
new Size(130, rect.Height));
var r2 = new Rect(new Point(rect.TopRight.X + 25, rect.TopRight.Y+5),
new Size(130, rect.Height));
var brush = new SolidColorBrush(Color.FromRgb(255, 246, 178));
var gradientBrush = new LinearGradientBrush(Colors.Black, Colors.Gray, 30);
drawingContext.DrawRoundedRectangle(gradientBrush, null, r2, 3, 3);
drawingContext.DrawRoundedRectangle(brush, null, r, 3, 3);
base.OnRender(drawingContext);
}
This will give you something like this
I want to create a Snapshot of the Canvas Area in my Application. I'm using Visual brush to get the Snapshot and saving the same using PngEncoder. But the resulting PNG is just a empty black image. I'm not sure the issue is with the BitmapSource created or the PNGEncoder issue. Here is the code I'm using to obtain the same.
public void ConvertToBitmapSource(UIElement element)
{
var target = new RenderTargetBitmap((int)(element.RenderSize.Width), (int)(element.RenderSize.Height), 96, 96, PixelFormats.Pbgra32);
var brush = new VisualBrush(element);
var visual = new DrawingVisual();
var drawingContext = visual.RenderOpen();
drawingContext.DrawRectangle(brush, null, new Rect(new Point(0, 0),
new Point(element.RenderSize.Width, element.RenderSize.Height)));
drawingContext.Close();
target.Render(visual);
PngBitmapEncoder encoder = new PngBitmapEncoder();
BitmapFrame outputFrame = BitmapFrame.Create(target);
encoder.Frames.Add(outputFrame);
using (FileStream file = File.OpenWrite("TestImage.png"))
{
encoder.Save(file);
}
}
Not sure why exactly your code isn't working. This works:
public void WriteToPng(UIElement element, string filename)
{
var rect = new Rect(element.RenderSize);
var visual = new DrawingVisual();
using (var dc = visual.RenderOpen())
{
dc.DrawRectangle(new VisualBrush(element), null, rect);
}
var bitmap = new RenderTargetBitmap(
(int)rect.Width, (int)rect.Height, 96, 96, PixelFormats.Default);
bitmap.Render(visual);
var encoder = new PngBitmapEncoder();
encoder.Frames.Add(BitmapFrame.Create(bitmap));
using (var file = File.OpenWrite(filename))
{
encoder.Save(file);
}
}
Thank you both for the question and the answer.
For the benefit of the others looking for the same answer
I found that Clemens way leaves a black band in the image with the image shifted either down or right. As if it was not rendering the element at the correct position in the bitmap.
So I had to use the VisualBrush as Amar suggested.
Here is the code that worked for me:
RenderTargetBitmap RenderVisual(UIElement elt)
{
PresentationSource source = PresentationSource.FromVisual(elt);
RenderTargetBitmap rtb = new RenderTargetBitmap((int)elt.RenderSize.Width,
(int)elt.RenderSize.Height, 96, 96, PixelFormats.Default);
VisualBrush sourceBrush = new VisualBrush(elt);
DrawingVisual drawingVisual = new DrawingVisual();
DrawingContext drawingContext = drawingVisual.RenderOpen();
using (drawingContext)
{
drawingContext.DrawRectangle(sourceBrush, null, new Rect(new Point(0, 0),
new Point(elt.RenderSize.Width, elt.RenderSize.Height)));
}
rtb.Render(drawingVisual);
return rtb;
}
I'm trying to add a Viewport2DVisual3D to a Viewport3D in code, but the visual isn't showing up. Any help understanding why not would be appreciated. The following is the code for the main window.
Is it sufficient to just add the Viewport2DVisual3D to the children of the Viewport3D in order for it to be rendered?
public partial class Window1 : System.Windows.Window
{
public Window1()
{
InitializeComponent();
this.Loaded += new RoutedEventHandler(temp);
}
public void temp(object sender, RoutedEventArgs e)
{
Viewport2DVisual3D test = new Viewport2DVisual3D();
MeshGeometry3D testGeometry = new MeshGeometry3D();
Vector3D CameraLookDirection = Main_Target_CameraOR20.LookDirection;
// Calculate the Positions based on the Camera
Point3DCollection myPoint3DCollection = new Point3DCollection();
myPoint3DCollection.Add(new Point3D(-1, 1, 0));
myPoint3DCollection.Add(new Point3D(-1, -1, 0));
myPoint3DCollection.Add(new Point3D(1, -1, 0));
myPoint3DCollection.Add(new Point3D(1, 1, 0));
testGeometry.Positions = myPoint3DCollection;
PointCollection myPointCollection = new PointCollection();
myPointCollection.Add(new Point(0, 0));
myPointCollection.Add(new Point(0, 1));
myPointCollection.Add(new Point(1, 1));
myPointCollection.Add(new Point(1, 0));
testGeometry.TextureCoordinates = myPointCollection;
Int32Collection triangleIndicesCollection = new Int32Collection();
triangleIndicesCollection.Add(0);
triangleIndicesCollection.Add(1);
triangleIndicesCollection.Add(2);
triangleIndicesCollection.Add(2);
triangleIndicesCollection.Add(3);
triangleIndicesCollection.Add(0);
testGeometry.TriangleIndices = triangleIndicesCollection;
DiffuseMaterial myDiffuseMaterial = new DiffuseMaterial(Brushes.White);
Viewport2DVisual3D.SetIsVisualHostMaterial(myDiffuseMaterial, true);
Transform3DGroup myTransform3DGroup = new Transform3DGroup();
ScaleTransform3D myScaleTransform3D = new ScaleTransform3D();
myScaleTransform3D.ScaleX = 2;
myScaleTransform3D.ScaleY = 2;
myScaleTransform3D.ScaleZ = 2;
TranslateTransform3D myTranslateTransform3D = new TranslateTransform3D();
myTranslateTransform3D.OffsetX = -27;
myTranslateTransform3D.OffsetY = 13;
myTranslateTransform3D.OffsetZ = 6;
RotateTransform3D rotateTransform = new RotateTransform3D()
{
Rotation = new AxisAngleRotation3D
{
Angle = -50,
Axis = new Vector3D(0, 1, 0)
}
};
myTransform3DGroup.Children.Add(myTranslateTransform3D);
myTransform3DGroup.Children.Add(myScaleTransform3D);
myTransform3DGroup.Children.Add(rotateTransform);
test.Transform = myTransform3DGroup;
Button myButton = new Button();
myButton.Content = "Test Button";
test.Material = myDiffuseMaterial;
test.Geometry = testGeometry;
test.Visual = myButton;
ZAM3DViewport3D.Children.Add(test);
}
}
It turns out that the problem was the Offset value. So, it is sufficient to add the child to the Viewport3D to have it render. Cheers