Record the drawing as a m4v video file - OpenGL

2020-05-21 07:39发布

问题:

I have downloaded the sample code GLPaint from developer.Apple website to draw pictures on a Canvas using OpenGL.

I have done the changes in GLPaint application to meet my requirements.

Now I need to record the drawing as a m4v video file that should integrated my spoken instructions which I spoken while drawing the image.

Eg:

  • After I clicked on Draw and Record button, I started drawing a circle and spoken "This is a circle".

  • After I clicked the Done button, the result should be a video file that include the Drawing actions with the voice "This is a circle"

I researched on this topic but I didn't find any good methods to do the functionality.

Code:

PaintingView.h EAGLContext *context;

// OpenGL names for the renderbuffer and framebuffers used to render to this view
GLuint viewRenderbuffer, viewFramebuffer;

// OpenGL name for the depth buffer that is attached to viewFramebuffer, if it exists (0 if it does not exist)
GLuint depthRenderbuffer;

GLuint    brushTexture;
CGPoint    location;
CGPoint    previousLocation;

PaintingView.m

// Handles the start of a touch
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
    CGRect                bounds = [self bounds];
    UITouch*    touch = [[event touchesForView:self] anyObject];
    firstTouch = YES;
    // Convert touch point from UIView referential to OpenGL one (upside-down flip)
    location = [touch locationInView:self];
    location.y = bounds.size.height - location.y;
}

// Handles the continuation of a touch.
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event
{ 

    CGRect                bounds = [self bounds];
    UITouch*            touch = [[event touchesForView:self] anyObject];

    // Convert touch point from UIView referential to OpenGL one (upside-down flip)
    if (firstTouch) {
        firstTouch = NO;
        previousLocation = [touch previousLocationInView:self];
        previousLocation.y = bounds.size.height - previousLocation.y;
    } else {
        location = [touch locationInView:self];
        location.y = bounds.size.height - location.y;
        previousLocation = [touch previousLocationInView:self];
        previousLocation.y = bounds.size.height - previousLocation.y;
    }

    // Render the stroke
    [self renderLineFromPoint:previousLocation toPoint:location];
}

// Handles the end of a touch event when the touch is a tap.
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
{
    CGRect                bounds = [self bounds];
    UITouch*    touch = [[event touchesForView:self] anyObject];
    if (firstTouch) {
        firstTouch = NO;
        previousLocation = [touch previousLocationInView:self];
        previousLocation.y = bounds.size.height - previousLocation.y;
        [self renderLineFromPoint:previousLocation toPoint:location];
    }
}



// Drawings a line onscreen based on where the user touches
- (void) renderLineFromPoint:(CGPoint)start toPoint:(CGPoint)end
{
    static GLfloat*        vertexBuffer = NULL;
    static NSUInteger    vertexMax = 64;
    NSUInteger            vertexCount = 0,
                        count,
                        i;

    [EAGLContext setCurrentContext:context];
    glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);

    // Convert locations from Points to Pixels
    CGFloat scale = self.contentScaleFactor;
    start.x *= scale;
    start.y *= scale;
    end.x *= scale;
    end.y *= scale;

    // Allocate vertex array buffer
    if(vertexBuffer == NULL)
        vertexBuffer = malloc(vertexMax * 2 * sizeof(GLfloat));

    // Add points to the buffer so there are drawing points every X pixels
    count = MAX(ceilf(sqrtf((end.x - start.x) * (end.x - start.x) + (end.y - start.y) * (end.y - start.y)) / kBrushPixelStep), 1);
    for(i = 0; i < count; ++i) {
        if(vertexCount == vertexMax) {
            vertexMax = 2 * vertexMax;
            vertexBuffer = realloc(vertexBuffer, vertexMax * 2 * sizeof(GLfloat));
        }

        vertexBuffer[2 * vertexCount + 0] = start.x + (end.x - start.x) * ((GLfloat)i / (GLfloat)count);
        vertexBuffer[2 * vertexCount + 1] = start.y + (end.y - start.y) * ((GLfloat)i / (GLfloat)count);
        vertexCount += 1;
    }

    // Render the vertex array
    glVertexPointer(2, GL_FLOAT, 0, vertexBuffer);
    glDrawArrays(GL_POINTS, 0, vertexCount);

    // Display the buffer
    glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
    [context presentRenderbuffer:GL_RENDERBUFFER_OES];
}


// Erases the screen
- (void) erase
{
    [EAGLContext setCurrentContext:context];

    // Clear the buffer
    glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
    glClearColor(0.0, 0.0, 0.0, 0.0);
    glClear(GL_COLOR_BUFFER_BIT);

    // Display the buffer
    glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
    [context presentRenderbuffer:GL_RENDERBUFFER_OES];
}



// The GL view is stored in the nib file. When it's unarchived it's sent -initWithCoder:
- (id)initWithCoder:(NSCoder*)coder {


    CGImageRef        brushImage;
    CGContextRef    brushContext;
    GLubyte            *brushData;
    size_t            width, height;

    if ((self = [super initWithCoder:coder])) {
        CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;

        eaglLayer.opaque = YES;
        // In this application, we want to retain the EAGLDrawable contents after a call to presentRenderbuffer.
        eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
                                        [NSNumber numberWithBool:YES], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];

        context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];

        if (!context || ![EAGLContext setCurrentContext:context]) {
            [self release];
            return nil;
        }

        // Create a texture from an image
        // First create a UIImage object from the data in a image file, and then extract the Core Graphics image
        brushImage = [UIImage imageNamed:@"Particle.png"].CGImage;

        // Get the width and height of the image
        width = CGImageGetWidth(brushImage);
        height = CGImageGetHeight(brushImage);

        // Texture dimensions must be a power of 2. If you write an application that allows users to supply an image,
        // you'll want to add code that checks the dimensions and takes appropriate action if they are not a power of 2.

        // Make sure the image exists
        if(brushImage) {
            // Allocate  memory needed for the bitmap context
            brushData = (GLubyte *) calloc(width * height * 4, sizeof(GLubyte));
            // Use  the bitmatp creation function provided by the Core Graphics framework.
            brushContext = CGBitmapContextCreate(brushData, width, height, 8, width * 4, CGImageGetColorSpace(brushImage), kCGImageAlphaPremultipliedLast);
            // After you create the context, you can draw the  image to the context.
            CGContextDrawImage(brushContext, CGRectMake(0.0, 0.0, (CGFloat)width, (CGFloat)height), brushImage);
            // You don't need the context at this point, so you need to release it to avoid memory leaks.
            CGContextRelease(brushContext);
            // Use OpenGL ES to generate a name for the texture.
            glGenTextures(1, &brushTexture);
            // Bind the texture name.
            glBindTexture(GL_TEXTURE_2D, brushTexture);
            // Set the texture parameters to use a minifying filter and a linear filer (weighted average)
            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
            // Specify a 2D texture image, providing the a pointer to the image data in memory
            glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, brushData);
            // Release  the image data; it's no longer needed
            free(brushData);
        }

        // Set the view's scale factor
        self.contentScaleFactor = 1.0;

        // Setup OpenGL states
        glMatrixMode(GL_PROJECTION);
        CGRect frame = self.bounds;
        CGFloat scale = self.contentScaleFactor;
        // Setup the view port in Pixels
        glOrthof(0, frame.size.width * scale, 0, frame.size.height * scale, -1, 1);
        glViewport(0, 0, frame.size.width * scale, frame.size.height * scale);
        glMatrixMode(GL_MODELVIEW);

        glDisable(GL_DITHER);
        glEnable(GL_TEXTURE_2D);
        glEnableClientState(GL_VERTEX_ARRAY);

        glEnable(GL_BLEND);
        // Set a blending function appropriate for premultiplied alpha pixel data
        glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);

        glEnable(GL_POINT_SPRITE_OES);
        glTexEnvf(GL_POINT_SPRITE_OES, GL_COORD_REPLACE_OES, GL_TRUE);
        glPointSize(width / kBrushScale);

        // Make sure to start with a cleared buffer
        needsErase = YES;



    }

AppDelegate.h

 PaintingWindow        *window; //its a class inherited from window.
      PaintingView        *drawingView;

    @property (nonatomic, retain) IBOutlet PaintingWindow *window;
    @property (nonatomic, retain) IBOutlet PaintingView *drawingView;

    @synthesize window;
    @synthesize drawingView;
        return self;
    }

Appdelegate.m

- (void) applicationDidFinishLaunching:(UIApplication*)application
{
    CGRect                    rect = [[UIScreen mainScreen] applicationFrame];
    CGFloat                    components[3];

    // Create a segmented control so that the user can choose the brush color.
    UISegmentedControl *segmentedControl = [[UISegmentedControl alloc] initWithItems:
                                            [NSArray arrayWithObjects:
                                                [UIImage imageNamed:@"Red.png"],
                                                [UIImage imageNamed:@"Yellow.png"],
                                                [UIImage imageNamed:@"Green.png"],
                                                [UIImage imageNamed:@"Blue.png"],
                                                [UIImage imageNamed:@"Purple.png"],
                                                nil]];

    // Compute a rectangle that is positioned correctly for the segmented control you'll use as a brush color palette
    //CGRect frame = CGRectMake(rect.origin.x + kLeftMargin, rect.size.height - kPaletteHeight - kTopMargin, rect.size.width - (kLeftMargin + kRightMargin), kPaletteHeight);
    CGRect frame = CGRectMake(50, 22, (rect.size.width - (kLeftMargin + kRightMargin)) - 20, kPaletteHeight);
    segmentedControl.frame = frame;
    // When the user chooses a color, the method changeBrushColor: is called.
    [segmentedControl addTarget:self action:@selector(changeBrushColor:) forControlEvents:UIControlEventValueChanged];
    segmentedControl.segmentedControlStyle = UISegmentedControlStyleBar;
    // Make sure the color of the color complements the black background
    segmentedControl.tintColor = [UIColor darkGrayColor];
    // Set the third color (index values start at 0)
    segmentedControl.selectedSegmentIndex = 2;

    // Add the control to the window
    [window addSubview:segmentedControl];
    // Now that the control is added, you can release it
    [segmentedControl release];


    [self addBackgroundSegmentControll];

    // Define a starting color
    HSL2RGB((CGFloat) 2.0 / (CGFloat)kPaletteSize, kSaturation, kLuminosity, &components[0], &components[1], &components[2]);
    // Defer to the OpenGL view to set the brush color
    [drawingView setBrushColorWithRed:components[0] green:components[1] blue:components[2]];

    // Look in the Info.plist file and you'll see the status bar is hidden
    // Set the style to black so it matches the background of the application
    [application setStatusBarStyle:UIStatusBarStyleBlackTranslucent animated:NO];
    // Now show the status bar, but animate to the style.
    [application setStatusBarHidden:NO withAnimation:YES];

    // Load the sounds
    NSBundle *mainBundle = [NSBundle mainBundle];   
    erasingSound = [[SoundEffect alloc] initWithContentsOfFile:[mainBundle pathForResource:@"Erase" ofType:@"caf"]];
    selectSound =  [[SoundEffect alloc] initWithContentsOfFile:[mainBundle pathForResource:@"Select" ofType:@"caf"]];

    [window setFrame:CGRectMake(0, 0, 768, 1024)];
    drawingView.frame = CGRectMake(0, 0, 768, 1024);

    // Erase the view when recieving a notification named "shake" from the NSNotificationCenter object
    // The "shake" nofification is posted by the PaintingWindow object when user shakes the device
    [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(eraseView) name:@"shake" object:nil];
}

回答1:

First, GLPaint is a terrible application to start with if you are not at all familiar with OpenGL ES. It's way too complex of a sample application to start with.

That said, I can describe what I use for recording H.264 video from OpenGL ES in my GPUImage framework. If you care to see the full implementation of this, look at the GPUImageMovieWriter class. Note that my implementation of this is based on OpenGL ES 2.0, so you may need to make some adaptations to have this work in OpenGL ES 1.1 (used by GLPaint).

You'll use an AVAssetWriter for this. In order to get decent recording performance, you'll need to provide frames to the writer in BGRA format, not the RGBA you get from reading the screen using glReadPixels(). In my case, I used a color-swizzling shader to convert from RGBA to BGRA before reading, but you don't have that option with OpenGL ES 1.1. I'm not sure what you can do to work around this and still get decent recording speeds (with RGBA frames, I was seeing 3-5 FPS recording, where with BGRA I get a solid 30 FPS).

I set up the writer using code like the following:

frameData = (GLubyte *) malloc((int)videoSize.width * (int)videoSize.height * 4);

NSError *error = nil;

assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL fileType:AVFileTypeAppleM4V error:&error];
if (error != nil)
{
    NSLog(@"Error: %@", error);
}


NSMutableDictionary * outputSettings = [[NSMutableDictionary alloc] init];
[outputSettings setObject: AVVideoCodecH264 forKey: AVVideoCodecKey];
[outputSettings setObject: [NSNumber numberWithInt: videoSize.width] forKey: AVVideoWidthKey];
[outputSettings setObject: [NSNumber numberWithInt: videoSize.height] forKey: AVVideoHeightKey];

assetWriterVideoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
assetWriterVideoInput.expectsMediaDataInRealTime = YES;

// You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA.
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
                                                       [NSNumber numberWithInt:videoSize.width], kCVPixelBufferWidthKey,
                                                       [NSNumber numberWithInt:videoSize.height], kCVPixelBufferHeightKey,
                                                       nil];

assetWriterPixelBufferInput = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:assetWriterVideoInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];

[assetWriter addInput:assetWriterVideoInput];

and begin recording with the following:

startTime = [NSDate date];
[assetWriter startWriting];
[assetWriter startSessionAtSourceTime:kCMTimeZero];

I grab and encode a color-swizzled frame using the following:

CVPixelBufferRef pixel_buffer = NULL;

CVReturn status = CVPixelBufferPoolCreatePixelBuffer (NULL, [assetWriterPixelBufferInput pixelBufferPool], &pixel_buffer);
if ((pixel_buffer == NULL) || (status != kCVReturnSuccess))
{
    return;
}
else
{
    CVPixelBufferLockBaseAddress(pixel_buffer, 0);


    GLubyte *pixelBufferData = (GLubyte *)CVPixelBufferGetBaseAddress(pixel_buffer);
    glReadPixels(0, 0, videoSize.width, videoSize.height, GL_RGBA, GL_UNSIGNED_BYTE, pixelBufferData);
}

// May need to add a check here, because if two consecutive times with the same value are added to the movie, it aborts recording
CMTime currentTime = CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate:startTime],120);

if(![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:currentTime]) 
{
    NSLog(@"Problem appending pixel buffer at time: %lld", currentTime.value);
} 
else 
{
}
CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);

CVPixelBufferRelease(pixel_buffer);

and then when I'm done with it, I finish it off with the following:

[assetWriterVideoInput markAsFinished];
[assetWriter finishWriting];    

Again, you can see this in action in the above-linked framework. You might be able to modify this to use with OpenGL ES 1.1 and the GLPaint sample, but it might not have the best recording performance. As I said at the start, GLPaint is a horrible place for a newcomer to OpenGL ES to begin, so you might want to try with something a lot simpler first.