iphone - 如何从图像显示视频

标签 iphone objective-c ios xcode4.2

    #import "ViewController.h"
    #import "AVFoundation/AVAssetWriter.h"
    #import "AVFoundation/AVAssetWriterInput.h"
    #import "AVFoundation/AVMediaFormat.h"
    #import "AVFoundation/AVVideoSettings.h"


    @implementation ViewController

    - (void)viewDidLoad
    {
        [super viewDidLoad];

         NSArray *documentDirectories = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
        NSString *documentDirectory = [documentDirectories objectAtIndex: 0];

       // NSLog(@"Where it is %@ \n",documentDirectory);
        image1.image = [UIImage imageNamed:@"images1.jpg"];
         CGSize sizeOfImage = image1.image.size;
        // printf("Size of Image width = %f  height = %f\n", sizeOfImage.width,sizeOfImage.height);
        [self writeImageAsMovie:(UIImage*)image1.image toPath:(NSString*)documentDirectory size:(CGSize)sizeOfImage duration:6];

    }


    - (void)writeImageAsMovie:(UIImage*)image toPath:(NSString*)path size:(CGSize)size duration:(int)duration 
    {
        NSError *error = nil;

        AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                                      [NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
                                                                  error:&error];
        NSParameterAssert(videoWriter);


        NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                       AVVideoCodecH264, AVVideoCodecKey,
                                       [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                                       [NSNumber numberWithInt:size.height], AVVideoHeightKey,
                                       nil];


        AVAssetWriterInput *writerInput = [AVAssetWriterInput
                                           assetWriterInputWithMediaType:AVMediaTypeVideo
                                           outputSettings:videoSettings];




        AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                         assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                         sourcePixelBufferAttributes:nil];


        NSParameterAssert(writerInput);
        NSParameterAssert([videoWriter canAddInput:writerInput]);
        [videoWriter addInput:writerInput];

        //Start a session:
        [videoWriter startWriting];

        [videoWriter startSessionAtSourceTime:kCMTimeZero];

        //Write samples:
        //CVPixelBufferRef Utils;
        CVPixelBufferRef buffer = [self newPixelBufferFromCGImage:image.CGImage size:size];
        [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];



        [adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(duration-1, 2)];


        //Finish the session:
        [writerInput markAsFinished];
        [videoWriter endSessionAtSourceTime:CMTimeMake(duration, 2)];



       [videoWriter finishWriting]; 


    } 

-(CVPixelBufferRef) newPixelBufferFromCGImage:(CGImageRef)image size:(CGSize )frameSize
{
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                             nil];


    CVPixelBufferRef pxbuffer = NULL;


    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width, frameSize.height, kCVPixelFormatType_32ARGB,(__bridge CFDictionaryRef)options,&pxbuffer);

    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);

    status = CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width,
                                                 frameSize.height, 8, 4*frameSize.width, rgbColorSpace, 
                                                 kCGImageAlphaNoneSkipFirst);
    NSParameterAssert(context);
    CGAffineTransform frameTransform;
    CGContextConcatCTM(context, frameTransform);
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), 
                                           CGImageGetHeight(image)), image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);


    return pxbuffer;
} 


@end

我是 iOS 新手,我看到一段代码显示堆栈溢出中图像的视频。我运行代码,它没有显示任何错误,但我没有得到所需的输出。我认为错误是这个函数的最后一行

-(void)writeImageAsMovie:(UIImage*)image toPath:(NSString*)path size:(CGSize)size duration:(int)duration) 即 [videoWriter finishWriting]

请帮帮我。

最佳答案

如果您有一个图像数组并且只是想将这些图像作为视频播放,您可以使用 UIImageView 并使用 for 循环将数组中的图像更新为 ImageView 。

如果数组中有图像。 喜欢,

for (int i = 0; i < [array count]; i++)
{    
    imageview.image = (UIImage *)[array objectAtIndex:i];
}

关于iphone - 如何从图像显示视频,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/8635036/

相关文章:

iphone - 核心位置自动暂停,事件类型的影响

ios - Sprite-kit 如何将整数值传递到下一个场景

javascript - 验证用 Phonegap(或等效)包装的 HTML5 应用程序

python - 如何用 C 或 python 找出 mp3 的 bpm

iOS Web App 通话中媒体查询

iphone - 如何在 xcode4 中进行无依赖清理

iphone - SDK 是否提供了对注释进行分组的方法,例如照片(地点)

iOS 取消模态呈现的导航 Controller

ios - 如何使用 Objective C 获取子单元格的 IndexPath 第一节表格 View ?

objective-c - 休息套件。对象管理器和队列