Mi viene in mente questa possibile soluzione. Devi modificare un po 'la libreria di isgl3d.
I passi sono:
1.
Creare delegato per Isgl3dGLContext1:
In Isgl3dGLContext1.h
@protocol ScreenShooterDelegate;
#import <OpenGLES/ES1/gl.h>
#import <OpenGLES/ES1/glext.h>
#import "Isgl3dGLContext.h"
@interface Isgl3dGLContext1 : Isgl3dGLContext {
NSObject<ScreenShooterDelegate>* __unsafe_unretained delegate;
GLuint _colorRenderBuffer;
@private
EAGLContext * _context;
// The OpenGL names for the framebuffer and renderbuffer used to render to this view
GLuint _defaultFrameBuffer;
GLuint _depthAndStencilRenderBuffer;
GLuint _depthRenderBuffer;
GLuint _stencilRenderBuffer;
// OpenGL MSAA buffers
GLuint _msaaFrameBuffer;
GLuint _msaaColorRenderBuffer;
GLuint _msaaDepthAndStencilRenderBuffer;
GLuint _msaaDepthRenderBuffer;
GLuint _msaaStencilRenderBuffer;
}
- (id) initWithLayer:(CAEAGLLayer *) layer;
@property (assign) NSObject<ScreenShooterDelegate>* delegate;
@property BOOL takePicture;
@property GLuint colorRenderBuffer;
@end
@protocol ScreenShooterDelegate
@optional
- (void)takePicture;
@end
2.
. Aggiungere questo codice Isgl3dGLContext1.m:
@synthesize takePicture;
@synthesize colorRenderBuffer = _colorRenderBuffer;
Prima linea [_context presentRenderbuffer: GL_RENDERBUFFER_OES]; a - (void) finalizeRender:
if(takePicture){
takePicture=NO;
if([delegate respondsToSelector:@selector(takePicture)]){
[delegate takePicture];
}
}
3 inserire questo codice nella classe in cui si desidera prendere screenshot:
In Class.h add <ScreenShooterDelegate>
Nel metodo Class.m
[Isgl3dDirector sharedInstance].antiAliasingEnabled = NO;
Photos3DAppDelegate *appDelegate = (Photos3DAppDelegate *)[[UIApplication sharedApplication] delegate];
[appDelegate.inOutSceneView showSphere];
Isgl3dEAGLView* eaglview=(Isgl3dEAGLView*)[[Isgl3dDirector sharedInstance] openGLView];
Isgl3dGLContext1 * _glContext=(Isgl3dGLContext1*)[eaglview glContext];
_glContext.delegate=self;
_glContext.takePicture=YES;
Nel metodo - (void) takePicture {} Inserisci il codice da Apple e alla fine del metodo add [Isgl3dDirector sharedInstance] .antiAliasingEnabled = YES; (Nel caso in cui lo si utilizza)
//https://developer.apple.com/library/ios/#qa/qa1704/_index.html
-(void)takePicture{
NSLog(@"Creating Foto");
GLint backingWidth, backingHeight;
Isgl3dEAGLView* eaglview=(Isgl3dEAGLView*)[[Isgl3dDirector sharedInstance] openGLView];
//Isgl3dGLContext1 * _glContext=(Isgl3dGLContext1*)[eaglview glContext];
//glBindRenderbufferOES(GL_RENDERBUFFER_OES, _glContext.colorRenderBuffer);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
NSInteger x = 0, y = 0, width = backingWidth, height = backingHeight;
NSInteger dataLength = width * height * 4;
GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
// Read pixel data from the framebuffer
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glReadPixels(x, y, width, height, GL_RGBA, GL_UNSIGNED_BYTE, data);
// Create a CGImage with the pixel data
// If your OpenGL ES content is opaque, use kCGImageAlphaNoneSkipLast to ignore the alpha channel
// otherwise, use kCGImageAlphaPremultipliedLast
CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef iref = CGImageCreate(width, height, 8, 32, width * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast,
ref, NULL, true, kCGRenderingIntentDefault);
// OpenGL ES measures data in PIXELS
// Create a graphics context with the target size measured in POINTS
NSInteger widthInPoints, heightInPoints;
if (NULL != UIGraphicsBeginImageContextWithOptions) {
// On iOS 4 and later, use UIGraphicsBeginImageContextWithOptions to take the scale into consideration
// Set the scale parameter to your OpenGL ES view's contentScaleFactor
// so that you get a high-resolution snapshot when its value is greater than 1.0
CGFloat scale = eaglview.contentScaleFactor;
widthInPoints = width/scale;
heightInPoints = height/scale;
UIGraphicsBeginImageContextWithOptions(CGSizeMake(widthInPoints, heightInPoints), NO, scale);
}
else {
// On iOS prior to 4, fall back to use UIGraphicsBeginImageContext
widthInPoints = width;
heightInPoints = height;
UIGraphicsBeginImageContext(CGSizeMake(widthInPoints, heightInPoints));
}
CGContextRef cgcontext = UIGraphicsGetCurrentContext();
// UIKit coordinate system is upside down to GL/Quartz coordinate system
// Flip the CGImage by rendering it to the flipped bitmap context
// The size of the destination area is measured in POINTS
CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, widthInPoints, heightInPoints), iref);
// Retrieve the UIImage from the current context
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
// Clean up
free(data);
CFRelease(ref);
CFRelease(colorspace);
CGImageRelease(iref);
UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil);
[Isgl3dDirector sharedInstance].antiAliasingEnabled = YES;
}
Nota: Per me ha funzionato solo commentando glBindRenderbufferOES (GL_RENDERBUFFER_OES, _colorRenderbuffer); e Nel tuo caso puoi seguire questi passaggi con Isgl3dGLContext2 invece di Isgl3dGLContext1.
Sembra utile, ma da dove vengono visualizzati i valori di rosso, verde e blu? –
Questo è solo un enum per nominare gli offset (0, 1, 2, 3), ha dimenticato di aggiungere questo qui. Aggiornato lo snippet. –