ios - 使用基于 AVCapture 的 viewController 崩溃

标签 ios opengl-es cadisplaylink wait-fences

我正在开发一个在 UIViewController 中使用 OpenGL VideoCapture 的应用

在这个 Controller 的第一次初始化时,我在控制台中收到了这条消息:

wait_fences: failed to receive reply: 10004003

消息在 viewController 初始化后立即显示

在那之后,如果我切换到另一个 Controller 并再次启动该 Controller ,则在使用 EXC_BAD_ACCESS 启动 ViewController 2/4 后会发生崩溃

asm 输出没有那么大的帮助

0x32f56a34:  bne    0x32f56ad4               ; memmove$VARIANT$CortexA9 + 276
0x32f56a38:  subs   r2, r2, #60
0x32f56a3c:  blo    0x32f56a84               ; memmove$VARIANT$CortexA9 + 196
0x32f56a40:  tst    r0, #28
0x32f56a44:  beq    0x32f56a5c               ; memmove$VARIANT$CortexA9 + 156
0x32f56a48:  ldr    r3, [r1], #4
0x32f56a4c:  subs   r2, r2, #4
0x32f56a50:  str    r3, [r0], #4
0x32f56a54:  bhs    0x32f56a40               ; memmove$VARIANT$CortexA9 + 128
0x32f56a58:  b      0x32f56a84               ; memmove$VARIANT$CortexA9 + 196
0x32f56a5c:  push   {r5, r6, r8, r10}
0x32f56a60:  ldm    r1!, {r3, r4, r5, r6, r8, r9, r10, r12}
0x32f56a64:  subs   r2, r2, #64

源代码的一些相关部分在这里:

- (id)init
{

    self = [super init];

    if (self)
    {

        ADLog(@"AR_ogl_ViewController init");

        animating = FALSE;

        EAGLContext *aContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];

        if (!aContext)
        {
            aContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
        }

        if (!aContext)
        {
            ADLog(@"Failed to create ES context");
        }
        else if (![EAGLContext setCurrentContext:aContext])
        {   
            ADLog(@"Failed to set ES context current");
        }

        self.context = aContext;

        ADSafeRelease(aContext);


        [(AR_EAGLView *)self.view setContext:context];
        [(AR_EAGLView *)self.view setFramebuffer];

        [self createVBO];

        if ([context API] == kEAGLRenderingAPIOpenGLES2)
            [self loadShaders];

        animating               = FALSE;
        displayLinkSupported    = FALSE;
        animationFrameInterval  = 1;
        displayLink             = nil;
        animationTimer          = nil;

        // Use of CADisplayLink requires iOS version 3.1 or greater.
        // The NSTimer object is used as fallback when it isn't available.
        /*  NSString *reqSysVer = @"3.1";

        NSString *currSysVer = [[UIDevice currentDevice] systemVersion];

        if ([currSysVer compare:reqSysVer options:NSNumericSearch] != NSOrderedAscending)*/

        displayLinkSupported = TRUE;

        // start capture
        [self setupCaptureSession];

        // initialize
        frameData = 0;
        pTexData = (GLuint*)malloc(TEX_SIZE*TEX_SIZE*sizeof(GLuint));

        incrementNeedsVideoFrame();

        float x = 0.0f;
        float y = 0.0f;
        float z = 1.0f;
        float angle = 90.0 * PI_OVER_180;

        afIdentity[0]  = 1+(1-cos(angle))*(x*x-1);
        afIdentity[1]  = -z*sin(angle)+(1-cos(angle))*x*y;
        afIdentity[2]  = y*sin(angle)+(1-cos(angle))*x*z;
        afIdentity[3]  = 0;
        afIdentity[4]  = z*sin(angle)+(1-cos(angle))*x*y;
        afIdentity[5]  = 1+(1-cos(angle))*(y*y-1);
        afIdentity[6]  = -x*sin(angle)+(1-cos(angle))*y*z;
        afIdentity[7]  = 0;
        afIdentity[8]  = -y*sin(angle)+(1-cos(angle))*x*z;
        afIdentity[9]  = x*sin(angle)+(1-cos(angle))*y*z;
        afIdentity[10] = 1+(1-cos(angle))*(z*z-1);
        afIdentity[11] = 0;
        afIdentity[12] = 0;
        afIdentity[13] = 0;
        afIdentity[14] = 0;
        afIdentity[15] = 1;

        y = 1.0f;
        z = 0.0f;
        angle = 180.0 * PI_OVER_180;

        afIdentity2[0]  = 1+(1-cos(angle))*(x*x-1);
        afIdentity2[1]  = -z*sin(angle)+(1-cos(angle))*x*y;
        afIdentity2[2]  = y*sin(angle)+(1-cos(angle))*x*z;
        afIdentity2[3]  = 0;
        afIdentity2[4]  = z*sin(angle)+(1-cos(angle))*x*y;
        afIdentity2[5]  = 1+(1-cos(angle))*(y*y-1);
        afIdentity2[6]  = -x*sin(angle)+(1-cos(angle))*y*z;
        afIdentity2[7]  = 0;
        afIdentity2[8]  = -y*sin(angle)+(1-cos(angle))*x*z;
        afIdentity2[9]  = x*sin(angle)+(1-cos(angle))*y*z;
        afIdentity2[10] = 1+(1-cos(angle))*(z*z-1);
        afIdentity2[11] = 0;
        afIdentity2[12] = 0;
        afIdentity2[13] = 0;
        afIdentity2[14] = 0;
        afIdentity2[15] = 1;

    }

    ADLog(@"AR_ogl_ViewController init end");


    return self;

}

- (void)dealloc
{
    ADLog(@"dealloc");
    ADSafeRelease(session);

    //ADSafeRelease(input);
    //ADSafeRelease(output);

    if (program)
    {
        glDeleteProgram(program);
        program = 0;
    }

        if (m_ui32Vbo) 
    {
                glDeleteBuffers(1, &m_ui32Vbo);
                m_ui32Vbo = 0;
        }

        if (frameData) 
    {
                free(frameData);
        frameData = nil;
        }

    if (pTexData) 
    {
                free(pTexData);
        pTexData = nil;
        }

    // Tear down context.
    if ([EAGLContext currentContext] == context)
        [EAGLContext setCurrentContext:nil];

    [context release];
    context = nil;

    [super dealloc];

}

- (void)viewDidAppear:(BOOL)animated
{
    ADLog(@"viewDidAppear");
    [super viewDidAppear:animated];
    [self startAnimation];

}

- (void)viewDidDisappear:(BOOL)animated
{
    [super viewDidDisappear:animated];
    [self stopAnimation];

}

- (void)viewDidUnload
{
        [super viewDidUnload];

    if (program)
    {
        glDeleteProgram(program);
        program = 0;
    }

    // Tear down context.
    if ([EAGLContext currentContext] == context)
        [EAGLContext setCurrentContext:nil];
        self.context = nil; 
}



- (void)startAnimation
{

    ADLog(@"startAnimation");

    if (displayLink)
    {
        [displayLink invalidate];
        displayLink = nil;
    }

    if (animating==FALSE && displayLink==nil)
    {
            ADLog(@"generating displayLink");
            displayLink = [NSClassFromString(@"CADisplayLink") displayLinkWithTarget:self selector:@selector(drawFrame)];
            [displayLink setFrameInterval:animationFrameInterval];
            [displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
            animating = TRUE;

    }

    ADLog(@"startAnimation end");

}

- (void)stopAnimation
{
    ADLog(@"stopAnimation");

    if (displayLink)
    {

        [displayLink invalidate];
        displayLink = nil;

        animating   = FALSE;
    }


}


// Create and configure a capture session and start it running
- (void)setupCaptureSession 
{
    NSError *error = nil;

    // Create the session
    session = [[AVCaptureSession alloc] init];

    // Configure the session to produce lower resolution video frames, if your 
    // processing algorithm can cope. We'll specify medium quality for the
    // chosen device.
        // Low   : 192 x 144
        // Medium: 480 x 320
        // High  : 1280 x 720
    session.sessionPreset = AVCaptureSessionPresetMedium;

    // Find a suitable AVCaptureDevice
    AVCaptureDevice *device = [AVCaptureDevice
                                                           defaultDeviceWithMediaType:AVMediaTypeVideo];

    // Create a device input with the device and add it to the session.
    input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];

    if (!input) 
    {
        // Handling the error appropriately.
    }

    [session addInput:input];

    // Create a VideoDataOutput and add it to the session
    output = [[AVCaptureVideoDataOutput alloc] init];

    [session addOutput:output];

    // Configure your output.
    CaptureQueue = dispatch_queue_create("CaptureQueue", NULL);
    [output setSampleBufferDelegate:self queue:CaptureQueue];
    dispatch_release(CaptureQueue);

    // Specify the pixel format
    output.videoSettings = 
        [NSDictionary dictionaryWithObject:
         [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] 
                                                                forKey:(id)kCVPixelBufferPixelFormatTypeKey];

        output.alwaysDiscardsLateVideoFrames = NO;

    //  If you wish to cap the frame rate to a known value, such as 15 fps, set 
    //  minFrameDuration.
    //  [output setVideoMinFrameDuration:CMTimeMake(1, 20)];

    // Start the session running to start the flow of data

    [session startRunning];

}



@end

有什么建议可以跟踪这次崩溃吗?

最佳答案

您设置 View 的时间过早。上面大部分代码应该在viewDidLoadviewDidAppear:,而不是init

另见 "wait_fences: failed to receive reply: 10004003"?

关于ios - 使用基于 AVCapture 的 viewController 崩溃,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/9991667/

相关文章:

iphone - 选择 OpenGL ES 1.1 还是 OpenGL ES 2.0?

ios - 为什么 UIScrollView 会暂停我的 CADisplayLink?

ios - 模拟器中的慢速 CADisplayLink 动画

ios - malloc 诊断和 malloc 相关环境变量之间有什么关系?

ios - 无法在 Xcode 中构建 React Native 项目

iOS : Create a simple audio waveform animation

android - eglCreateContext 中的属性列表

ios - 在 swift 4 中验证电话号码时,我从 firebase 收到 'invalid token'

opengl-es - GLSL 优化 : check if variable is within range

iphone - CADisplayLink 吞下异常