将OpenGL绘制的项目保存为图像(Save OpenGL drawn item as an image)

编程入门 行业动态 更新时间:2024-10-12 22:30:43
将OpenGL绘制的项目保存为图像(Save OpenGL drawn item as an image)

我从developer.Apple网站下载了示例代码GLPaint ,使用OpenGL在Canvas上绘制图片。

我对GLPaint应用程序进行了很多更改以满足我的要求。 现在,我想将绘制的项目保存到照片库中作为图像。

我知道在照片库中保存图像的方法。 所以,我试图在绘制图片后创建相应的图像文件。 你知道这样做的好方法是什么吗? 对此的任何帮助都非常感谢。

代码细节如下所述。

PaintingView.h

EAGLContext *context; // OpenGL names for the renderbuffer and framebuffers used to render to this view GLuint viewRenderbuffer, viewFramebuffer; // OpenGL name for the depth buffer that is attached to viewFramebuffer, if it exists (0 if it does not exist) GLuint depthRenderbuffer; GLuint brushTexture; CGPoint location; CGPoint previousLocation;

PaintingView.m

// Handles the start of a touch - (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event { CGRect bounds = [self bounds]; UITouch* touch = [[event touchesForView:self] anyObject]; firstTouch = YES; // Convert touch point from UIView referential to OpenGL one (upside-down flip) location = [touch locationInView:self]; location.y = bounds.size.height - location.y; } // Handles the continuation of a touch. - (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event { CGRect bounds = [self bounds]; UITouch* touch = [[event touchesForView:self] anyObject]; // Convert touch point from UIView referential to OpenGL one (upside-down flip) if (firstTouch) { firstTouch = NO; previousLocation = [touch previousLocationInView:self]; previousLocation.y = bounds.size.height - previousLocation.y; } else { location = [touch locationInView:self]; location.y = bounds.size.height - location.y; previousLocation = [touch previousLocationInView:self]; previousLocation.y = bounds.size.height - previousLocation.y; } // Render the stroke [self renderLineFromPoint:previousLocation toPoint:location]; } // Handles the end of a touch event when the touch is a tap. - (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event { CGRect bounds = [self bounds]; UITouch* touch = [[event touchesForView:self] anyObject]; if (firstTouch) { firstTouch = NO; previousLocation = [touch previousLocationInView:self]; previousLocation.y = bounds.size.height - previousLocation.y; [self renderLineFromPoint:previousLocation toPoint:location]; } } // Drawings a line onscreen based on where the user touches - (void) renderLineFromPoint:(CGPoint)start toPoint:(CGPoint)end { static GLfloat* vertexBuffer = NULL; static NSUInteger vertexMax = 64; NSUInteger vertexCount = 0, count, i; [EAGLContext setCurrentContext:context]; glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer); // Convert locations from Points to Pixels CGFloat scale = self.contentScaleFactor; start.x *= scale; start.y *= scale; end.x *= scale; end.y *= scale; // Allocate vertex array buffer if(vertexBuffer == NULL) vertexBuffer = malloc(vertexMax * 2 * sizeof(GLfloat)); // Add points to the buffer so there are drawing points every X pixels count = MAX(ceilf(sqrtf((end.x - start.x) * (end.x - start.x) + (end.y - start.y) * (end.y - start.y)) / kBrushPixelStep), 1); for(i = 0; i < count; ++i) { if(vertexCount == vertexMax) { vertexMax = 2 * vertexMax; vertexBuffer = realloc(vertexBuffer, vertexMax * 2 * sizeof(GLfloat)); } vertexBuffer[2 * vertexCount + 0] = start.x + (end.x - start.x) * ((GLfloat)i / (GLfloat)count); vertexBuffer[2 * vertexCount + 1] = start.y + (end.y - start.y) * ((GLfloat)i / (GLfloat)count); vertexCount += 1; } // Render the vertex array glVertexPointer(2, GL_FLOAT, 0, vertexBuffer); glDrawArrays(GL_POINTS, 0, vertexCount); // Display the buffer glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer); [context presentRenderbuffer:GL_RENDERBUFFER_OES]; } // Erases the screen - (void) erase { [EAGLContext setCurrentContext:context]; // Clear the buffer glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer); glClearColor(0.0, 0.0, 0.0, 0.0); glClear(GL_COLOR_BUFFER_BIT); // Display the buffer glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer); [context presentRenderbuffer:GL_RENDERBUFFER_OES]; } // The GL view is stored in the nib file. When it's unarchived it's sent -initWithCoder: - (id)initWithCoder:(NSCoder*)coder { CGImageRef brushImage; CGContextRef brushContext; GLubyte *brushData; size_t width, height; if ((self = [super initWithCoder:coder])) { CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer; eaglLayer.opaque = YES; // In this application, we want to retain the EAGLDrawable contents after a call to presentRenderbuffer. eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithBool:YES], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil]; context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1]; if (!context || ![EAGLContext setCurrentContext:context]) { [self release]; return nil; } // Create a texture from an image // First create a UIImage object from the data in a image file, and then extract the Core Graphics image brushImage = [UIImage imageNamed:@"Particle.png"].CGImage; // Get the width and height of the image width = CGImageGetWidth(brushImage); height = CGImageGetHeight(brushImage); // Texture dimensions must be a power of 2. If you write an application that allows users to supply an image, // you'll want to add code that checks the dimensions and takes appropriate action if they are not a power of 2. // Make sure the image exists if(brushImage) { // Allocate memory needed for the bitmap context brushData = (GLubyte *) calloc(width * height * 4, sizeof(GLubyte)); // Use the bitmatp creation function provided by the Core Graphics framework. brushContext = CGBitmapContextCreate(brushData, width, height, 8, width * 4, CGImageGetColorSpace(brushImage), kCGImageAlphaPremultipliedLast); // After you create the context, you can draw the image to the context. CGContextDrawImage(brushContext, CGRectMake(0.0, 0.0, (CGFloat)width, (CGFloat)height), brushImage); // You don't need the context at this point, so you need to release it to avoid memory leaks. CGContextRelease(brushContext); // Use OpenGL ES to generate a name for the texture. glGenTextures(1, &brushTexture); // Bind the texture name. glBindTexture(GL_TEXTURE_2D, brushTexture); // Set the texture parameters to use a minifying filter and a linear filer (weighted average) glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); // Specify a 2D texture image, providing the a pointer to the image data in memory glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, brushData); // Release the image data; it's no longer needed free(brushData); } // Set the view's scale factor self.contentScaleFactor = 1.0; // Setup OpenGL states glMatrixMode(GL_PROJECTION); CGRect frame = self.bounds; CGFloat scale = self.contentScaleFactor; // Setup the view port in Pixels glOrthof(0, frame.size.width * scale, 0, frame.size.height * scale, -1, 1); glViewport(0, 0, frame.size.width * scale, frame.size.height * scale); glMatrixMode(GL_MODELVIEW); glDisable(GL_DITHER); glEnable(GL_TEXTURE_2D); glEnableClientState(GL_VERTEX_ARRAY); glEnable(GL_BLEND); // Set a blending function appropriate for premultiplied alpha pixel data glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA); glEnable(GL_POINT_SPRITE_OES); glTexEnvf(GL_POINT_SPRITE_OES, GL_COORD_REPLACE_OES, GL_TRUE); glPointSize(width / kBrushScale); // Make sure to start with a cleared buffer needsErase = YES; } return self; }

AppDelegate.h


PaintingWindow *window; //its a class inherited from window. PaintingView *drawingView; @property (nonatomic, retain) IBOutlet PaintingWindow *window; @property (nonatomic, retain) IBOutlet PaintingView *drawingView; @synthesize window; @synthesize drawingView;

AppDelegate.m

- (void) applicationDidFinishLaunching:(UIApplication*)application { CGRect rect = [[UIScreen mainScreen] applicationFrame]; CGFloat components[3]; // Create a segmented control so that the user can choose the brush color. UISegmentedControl *segmentedControl = [[UISegmentedControl alloc] initWithItems: [NSArray arrayWithObjects: [UIImage imageNamed:@"Red.png"], [UIImage imageNamed:@"Yellow.png"], [UIImage imageNamed:@"Green.png"], [UIImage imageNamed:@"Blue.png"], [UIImage imageNamed:@"Purple.png"], nil]]; // Compute a rectangle that is positioned correctly for the segmented control you'll use as a brush color palette //CGRect frame = CGRectMake(rect.origin.x + kLeftMargin, rect.size.height - kPaletteHeight - kTopMargin, rect.size.width - (kLeftMargin + kRightMargin), kPaletteHeight); CGRect frame = CGRectMake(50, 22, (rect.size.width - (kLeftMargin + kRightMargin)) - 20, kPaletteHeight); segmentedControl.frame = frame; // When the user chooses a color, the method changeBrushColor: is called. [segmentedControl addTarget:self action:@selector(changeBrushColor:) forControlEvents:UIControlEventValueChanged]; segmentedControl.segmentedControlStyle = UISegmentedControlStyleBar; // Make sure the color of the color complements the black background segmentedControl.tintColor = [UIColor darkGrayColor]; // Set the third color (index values start at 0) segmentedControl.selectedSegmentIndex = 2; // Add the control to the window [window addSubview:segmentedControl]; // Now that the control is added, you can release it [segmentedControl release]; [self addBackgroundSegmentControll]; // Define a starting color HSL2RGB((CGFloat) 2.0 / (CGFloat)kPaletteSize, kSaturation, kLuminosity, &components[0], &components[1], &components[2]); // Defer to the OpenGL view to set the brush color [drawingView setBrushColorWithRed:components[0] green:components[1] blue:components[2]]; // Look in the Info.plist file and you'll see the status bar is hidden // Set the style to black so it matches the background of the application [application setStatusBarStyle:UIStatusBarStyleBlackTranslucent animated:NO]; // Now show the status bar, but animate to the style. [application setStatusBarHidden:NO withAnimation:YES]; // Load the sounds NSBundle *mainBundle = [NSBundle mainBundle]; erasingSound = [[SoundEffect alloc] initWithContentsOfFile:[mainBundle pathForResource:@"Erase" ofType:@"caf"]]; selectSound = [[SoundEffect alloc] initWithContentsOfFile:[mainBundle pathForResource:@"Select" ofType:@"caf"]]; [window setFrame:CGRectMake(0, 0, 768, 1024)]; drawingView.frame = CGRectMake(0, 0, 768, 1024); // Erase the view when recieving a notification named "shake" from the NSNotificationCenter object // The "shake" nofification is posted by the PaintingWindow object when user shakes the device [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(eraseView) name:@"shake" object:nil]; }

I have downloaded the sample code GLPaint from developer.Apple website to draw pictures on a Canvas using OpenGL.

I have made many changes to the GLPaint application to meet my requirements. Now, I would like to save the drawn item into photo-library as an image.

I know the method to save an image in the photo-library. So, I tried to create the corresponding image file after drawing a picture. Do you know what's the good way to do it? Any help on this is highly appreciated.

The code details are described below.

PaintingView.h

EAGLContext *context; // OpenGL names for the renderbuffer and framebuffers used to render to this view GLuint viewRenderbuffer, viewFramebuffer; // OpenGL name for the depth buffer that is attached to viewFramebuffer, if it exists (0 if it does not exist) GLuint depthRenderbuffer; GLuint brushTexture; CGPoint location; CGPoint previousLocation;

PaintingView.m

// Handles the start of a touch - (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event { CGRect bounds = [self bounds]; UITouch* touch = [[event touchesForView:self] anyObject]; firstTouch = YES; // Convert touch point from UIView referential to OpenGL one (upside-down flip) location = [touch locationInView:self]; location.y = bounds.size.height - location.y; } // Handles the continuation of a touch. - (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event { CGRect bounds = [self bounds]; UITouch* touch = [[event touchesForView:self] anyObject]; // Convert touch point from UIView referential to OpenGL one (upside-down flip) if (firstTouch) { firstTouch = NO; previousLocation = [touch previousLocationInView:self]; previousLocation.y = bounds.size.height - previousLocation.y; } else { location = [touch locationInView:self]; location.y = bounds.size.height - location.y; previousLocation = [touch previousLocationInView:self]; previousLocation.y = bounds.size.height - previousLocation.y; } // Render the stroke [self renderLineFromPoint:previousLocation toPoint:location]; } // Handles the end of a touch event when the touch is a tap. - (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event { CGRect bounds = [self bounds]; UITouch* touch = [[event touchesForView:self] anyObject]; if (firstTouch) { firstTouch = NO; previousLocation = [touch previousLocationInView:self]; previousLocation.y = bounds.size.height - previousLocation.y; [self renderLineFromPoint:previousLocation toPoint:location]; } } // Drawings a line onscreen based on where the user touches - (void) renderLineFromPoint:(CGPoint)start toPoint:(CGPoint)end { static GLfloat* vertexBuffer = NULL; static NSUInteger vertexMax = 64; NSUInteger vertexCount = 0, count, i; [EAGLContext setCurrentContext:context]; glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer); // Convert locations from Points to Pixels CGFloat scale = self.contentScaleFactor; start.x *= scale; start.y *= scale; end.x *= scale; end.y *= scale; // Allocate vertex array buffer if(vertexBuffer == NULL) vertexBuffer = malloc(vertexMax * 2 * sizeof(GLfloat)); // Add points to the buffer so there are drawing points every X pixels count = MAX(ceilf(sqrtf((end.x - start.x) * (end.x - start.x) + (end.y - start.y) * (end.y - start.y)) / kBrushPixelStep), 1); for(i = 0; i < count; ++i) { if(vertexCount == vertexMax) { vertexMax = 2 * vertexMax; vertexBuffer = realloc(vertexBuffer, vertexMax * 2 * sizeof(GLfloat)); } vertexBuffer[2 * vertexCount + 0] = start.x + (end.x - start.x) * ((GLfloat)i / (GLfloat)count); vertexBuffer[2 * vertexCount + 1] = start.y + (end.y - start.y) * ((GLfloat)i / (GLfloat)count); vertexCount += 1; } // Render the vertex array glVertexPointer(2, GL_FLOAT, 0, vertexBuffer); glDrawArrays(GL_POINTS, 0, vertexCount); // Display the buffer glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer); [context presentRenderbuffer:GL_RENDERBUFFER_OES]; } // Erases the screen - (void) erase { [EAGLContext setCurrentContext:context]; // Clear the buffer glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer); glClearColor(0.0, 0.0, 0.0, 0.0); glClear(GL_COLOR_BUFFER_BIT); // Display the buffer glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer); [context presentRenderbuffer:GL_RENDERBUFFER_OES]; } // The GL view is stored in the nib file. When it's unarchived it's sent -initWithCoder: - (id)initWithCoder:(NSCoder*)coder { CGImageRef brushImage; CGContextRef brushContext; GLubyte *brushData; size_t width, height; if ((self = [super initWithCoder:coder])) { CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer; eaglLayer.opaque = YES; // In this application, we want to retain the EAGLDrawable contents after a call to presentRenderbuffer. eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithBool:YES], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil]; context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1]; if (!context || ![EAGLContext setCurrentContext:context]) { [self release]; return nil; } // Create a texture from an image // First create a UIImage object from the data in a image file, and then extract the Core Graphics image brushImage = [UIImage imageNamed:@"Particle.png"].CGImage; // Get the width and height of the image width = CGImageGetWidth(brushImage); height = CGImageGetHeight(brushImage); // Texture dimensions must be a power of 2. If you write an application that allows users to supply an image, // you'll want to add code that checks the dimensions and takes appropriate action if they are not a power of 2. // Make sure the image exists if(brushImage) { // Allocate memory needed for the bitmap context brushData = (GLubyte *) calloc(width * height * 4, sizeof(GLubyte)); // Use the bitmatp creation function provided by the Core Graphics framework. brushContext = CGBitmapContextCreate(brushData, width, height, 8, width * 4, CGImageGetColorSpace(brushImage), kCGImageAlphaPremultipliedLast); // After you create the context, you can draw the image to the context. CGContextDrawImage(brushContext, CGRectMake(0.0, 0.0, (CGFloat)width, (CGFloat)height), brushImage); // You don't need the context at this point, so you need to release it to avoid memory leaks. CGContextRelease(brushContext); // Use OpenGL ES to generate a name for the texture. glGenTextures(1, &brushTexture); // Bind the texture name. glBindTexture(GL_TEXTURE_2D, brushTexture); // Set the texture parameters to use a minifying filter and a linear filer (weighted average) glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); // Specify a 2D texture image, providing the a pointer to the image data in memory glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, brushData); // Release the image data; it's no longer needed free(brushData); } // Set the view's scale factor self.contentScaleFactor = 1.0; // Setup OpenGL states glMatrixMode(GL_PROJECTION); CGRect frame = self.bounds; CGFloat scale = self.contentScaleFactor; // Setup the view port in Pixels glOrthof(0, frame.size.width * scale, 0, frame.size.height * scale, -1, 1); glViewport(0, 0, frame.size.width * scale, frame.size.height * scale); glMatrixMode(GL_MODELVIEW); glDisable(GL_DITHER); glEnable(GL_TEXTURE_2D); glEnableClientState(GL_VERTEX_ARRAY); glEnable(GL_BLEND); // Set a blending function appropriate for premultiplied alpha pixel data glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA); glEnable(GL_POINT_SPRITE_OES); glTexEnvf(GL_POINT_SPRITE_OES, GL_COORD_REPLACE_OES, GL_TRUE); glPointSize(width / kBrushScale); // Make sure to start with a cleared buffer needsErase = YES; } return self; }

AppDelegate.h


PaintingWindow *window; //its a class inherited from window. PaintingView *drawingView; @property (nonatomic, retain) IBOutlet PaintingWindow *window; @property (nonatomic, retain) IBOutlet PaintingView *drawingView; @synthesize window; @synthesize drawingView;

AppDelegate.m

- (void) applicationDidFinishLaunching:(UIApplication*)application { CGRect rect = [[UIScreen mainScreen] applicationFrame]; CGFloat components[3]; // Create a segmented control so that the user can choose the brush color. UISegmentedControl *segmentedControl = [[UISegmentedControl alloc] initWithItems: [NSArray arrayWithObjects: [UIImage imageNamed:@"Red.png"], [UIImage imageNamed:@"Yellow.png"], [UIImage imageNamed:@"Green.png"], [UIImage imageNamed:@"Blue.png"], [UIImage imageNamed:@"Purple.png"], nil]]; // Compute a rectangle that is positioned correctly for the segmented control you'll use as a brush color palette //CGRect frame = CGRectMake(rect.origin.x + kLeftMargin, rect.size.height - kPaletteHeight - kTopMargin, rect.size.width - (kLeftMargin + kRightMargin), kPaletteHeight); CGRect frame = CGRectMake(50, 22, (rect.size.width - (kLeftMargin + kRightMargin)) - 20, kPaletteHeight); segmentedControl.frame = frame; // When the user chooses a color, the method changeBrushColor: is called. [segmentedControl addTarget:self action:@selector(changeBrushColor:) forControlEvents:UIControlEventValueChanged]; segmentedControl.segmentedControlStyle = UISegmentedControlStyleBar; // Make sure the color of the color complements the black background segmentedControl.tintColor = [UIColor darkGrayColor]; // Set the third color (index values start at 0) segmentedControl.selectedSegmentIndex = 2; // Add the control to the window [window addSubview:segmentedControl]; // Now that the control is added, you can release it [segmentedControl release]; [self addBackgroundSegmentControll]; // Define a starting color HSL2RGB((CGFloat) 2.0 / (CGFloat)kPaletteSize, kSaturation, kLuminosity, &components[0], &components[1], &components[2]); // Defer to the OpenGL view to set the brush color [drawingView setBrushColorWithRed:components[0] green:components[1] blue:components[2]]; // Look in the Info.plist file and you'll see the status bar is hidden // Set the style to black so it matches the background of the application [application setStatusBarStyle:UIStatusBarStyleBlackTranslucent animated:NO]; // Now show the status bar, but animate to the style. [application setStatusBarHidden:NO withAnimation:YES]; // Load the sounds NSBundle *mainBundle = [NSBundle mainBundle]; erasingSound = [[SoundEffect alloc] initWithContentsOfFile:[mainBundle pathForResource:@"Erase" ofType:@"caf"]]; selectSound = [[SoundEffect alloc] initWithContentsOfFile:[mainBundle pathForResource:@"Select" ofType:@"caf"]]; [window setFrame:CGRectMake(0, 0, 768, 1024)]; drawingView.frame = CGRectMake(0, 0, 768, 1024); // Erase the view when recieving a notification named "shake" from the NSNotificationCenter object // The "shake" nofification is posted by the PaintingWindow object when user shakes the device [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(eraseView) name:@"shake" object:nil]; }

最满意答案

Ramshad答案的改进版本:

这个没有内存泄漏,适用于新版本的iOS和不同的视图大小和显示(视网膜和非视网膜)。

CGFloat scale = [[UIScreen mainScreen] scale]; // use nativeScale on iOS 8.0+ CGSize imageSize = CGSizeMake((scale * view.frame.size.width), (scale * view.frame.size.height)); NSUInteger length = imageSize.width * imageSize.height * 4; GLubyte * buffer = (GLubyte *)malloc(length * sizeof(GLubyte)); if(buffer == NULL) return nil; glReadPixels(0, 0, imageSize.width, imageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, buffer); CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, buffer, length, NULL); int bitsPerComponent = 8; int bitsPerPixel = 32; int bytesPerRow = 4 * imageSize.width; CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB(); CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault; CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault; CGImageRef imageRef = CGImageCreate(imageSize.width, imageSize.height, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent); UIGraphicsBeginImageContext(imageSize); CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0.0, 0.0, imageSize.width, imageSize.height), imageRef); UIImage * image = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); CGImageRelease(imageRef); CGColorSpaceRelease(colorSpaceRef); CGDataProviderRelease(provider); free(buffer); return image;

Please refer the below link to save an OpenGL drawn item as an image in the photo-library.

Save an OpenGL drawn item as an image

Code Details;

Call [self captureToPhotoAlbum]; after writing the below code.

-(void)captureToPhotoAlbum { UIImage *image = [self glToUIImage]; UIImageWriteToSavedPhotosAlbum(image, self, nil, nil); } - (UIImage *)glToUIImage { NSInteger myDataLength = 320 * 480 * 4; // allocate array and read pixels into it. GLubyte *buffer = (GLubyte *) malloc(myDataLength); glReadPixels(0, 0, 320, 480, GL_RGBA, GL_UNSIGNED_BYTE, buffer); // gl renders "upside down" so swap top to bottom into new array. // there's gotta be a better way, but this works. GLubyte *buffer2 = (GLubyte *) malloc(myDataLength); for(int y = 0; y < 480; y++) { for(int x = 0; x < 320 * 4; x++) { buffer2[(479 - y) * 320 * 4 + x] = buffer[y * 4 * 320 + x]; } } // make data provider with data. CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, buffer2, myDataLength, NULL); // prep the ingredients int bitsPerComponent = 8; int bitsPerPixel = 32; int bytesPerRow = 4 * 320; CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB(); CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault; CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault; // make the cgimage CGImageRef imageRef = CGImageCreate(320, 480, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent); // then make the uiimage from that UIImage *myImage = [UIImage imageWithCGImage:imageRef]; return myImage; } For iPad or to fix the scaling issue,change all the width's as 640 instead of 320 and height's as 960 instead of 480. Change the Height and Width values up to meet your scaling. Manage the memory(free the buffers)

Thanks.

更多推荐

本文发布于:2023-07-15 07:16:00,感谢您对本站的认可!
本文链接:https://www.elefans.com/category/jswz/34/1111400.html
版权声明:本站内容均来自互联网,仅供演示用,请勿用于商业和其他非法用途。如果侵犯了您的权益请与我们联系,我们将在24小时内删除。
本文标签:保存为   图像   项目   OpenGL   image

发布评论

评论列表 (有 0 条评论)
草根站长

>www.elefans.com

编程频道|电子爱好者 - 技术资讯及电子产品介绍!