将视频保存到iPhone库中的问题.
我有一个UI Images的阵列,和两个按钮,“convertToVideo”&“savetoiPhoneLib”

-(IBAction) convertToVideo
{
  NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory,NSUserDomainMask,YES);

 Nsstring *documentsDirectory = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;

Nsstring *savedVideoPath = [documentsDirectory stringByAppendingPathComponent:@"videoOutput"];

printf(" \n\n\n-Video file == %s--\n\n\n",[savedVideoPath UTF8String]);

[self writeImageAsMovie:imageArray toPath:savedVideoPath size:self.view.frame.size duration:3];
}


here i'm passing the imageArray and savedVideoPath to the function below


-(void)writeImageAsMovie:(NSArray *)array toPath:(Nsstring*)path size:(CGSize)size duration:(int)duration 
{

 NSError *error = nil;

 AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
          [NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
                 error:&error];


 NSParameterassert(videoWriter);

 NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
           AVVideoCodecH264,AVVideoCodecKey,[NSNumber numberWithInt:size.width],AVVideoWidthKey,[NSNumber numberWithInt:size.height],AVVideoHeightKey,nil];
 AVAssetWriterInput* writerInput = [[AVAssetWriterInput
          assetWriterInputWithMediaType:AVMediaTypeVideo
          outputSettings:videoSettings] retain];


// NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB],kCVPixelBufferPixelFormatTypeKey,nil];

 AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
              assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
              sourcePixelBufferAttributes:nil];


 NSParameterassert(writerInput);
 NSParameterassert([videoWriter canAddInput:writerInput]);
 [videoWriter addInput:writerInput];


 //Start a session:
 [videoWriter startWriting];
 [videoWriter startSessionAtSourceTime:kCMTimeZero];

 CVPixelBufferRef buffer = NULL;

 //convert uiimage to CGImage.

 buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage]];
 [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];

 //Write samples:
......


 //Finish the session:
 [writerInput markAsFinished];
 [videoWriter finishWriting];
}


generate a CVPixelBufferRef here


- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image
{
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
        [NSNumber numberWithBool:YES],kCVPixelBufferCGImageCompatibilityKey,[NSNumber numberWithBool:YES],kCVPixelBufferCGBitmapContextCompatibilityKey,nil];
    CVPixelBufferRef pxbuffer = NULL;

    CVReturn status = CVPixelBufferCreate(kcfAllocatorDefault,self.view.frame.size.width,self.view.frame.size.height,kCVPixelFormatType_32ARGB,(CFDictionaryRef) options,&pxbuffer);
    NSParameterassert(status == kCVReturnSuccess && pxbuffer != NULL);

    CVPixelBufferLockBaseAddress(pxbuffer,0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterassert(pxdata != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata,8,4*self.view.frame.size.width,rgbColorSpace,kCGImageAlphaNoneskipFirst);
    NSParameterassert(context);
    CGContextConcatCTM(context,CGAffineTransformMakeRotation(0));
    CGContextDrawImage(context,CGRectMake(0,CGImageGetWidth(image),CGImageGetHeight(image)),image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer,0);

    return pxbuffer;
}

保存到iPhone库


-(IBAction) savetoiPhoneLib
{

 NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory,YES);

 Nsstring *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;

 Nsstring *getimagePath = [basePath stringByAppendingPathComponent:@"videoOutput"];

 printf(" \n\n\n-Video file == %s--\n\n\n",[getimagePath UTF8String]);

 UISaveVideoAtPathToSavedPhotosAlbum ( getimagePath,self,@selector(video:didFinishSavingWithError: contextInfo:),nil);
}


- (void) video: (Nsstring *) videoPath didFinishSavingWithError: (NSError *) error contextInfo: (void *) contextInfo {
 NSLog(@"Finished saving video with error: %@",error);
}

但是当我保存我收到错误信息时:

完成保存视频,错误:错误域= ALAssetsLibraryErrorDomain代码= -3302“无效数据”UserInfo = 0x1d59f0 {NSLocalizedFailureReason =由于数据无效,无法查看或播放,因此写入此资产时出现问题,NSLocalizedRecoverySuggestion =尝试使用不同的数据,NSLocalizedDescription =无效数据}

请让我知道我的错误.提前致谢

解决方法

-(void)convertimagetoVideo
{
    ///////////// setup OR function def if we move this to a separate function ////////////
    // this should be moved to its own function,that can take an imageArray,videoOutputPath,etc...


NSError *error = nil;


// set up file manager,and file videoOutputPath,remove "test_output.mp4" if it exists...
//Nsstring *videoOutputPath = @"/Users/someuser/Desktop/test_output.mp4";
NSFileManager *fileMgr = [NSFileManager defaultManager];
Nsstring *documentsDirectory = [NSHomeDirectory()
                                stringByAppendingPathComponent:@"Documents"];
Nsstring *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:@"test_output.mp4"];
//NSLog(@"-->videoOutputPath= %@",videoOutputPath);
// get rid of existing mp4 if exists...
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
    NSLog(@"Unable to delete file: %@",[error localizedDescription]);


CGSize imageSize = CGSizeMake(400,200);
//    NSUInteger fps = 30;
    NSUInteger fps = 30;

//NSMutableArray *imageArray;
//imageArray = [[NSMutableArray alloc] initWithObjects:@"download.jpeg",@"download2.jpeg",nil];
NSMutableArray *imageArray;
NSArray* imagePaths = [[NSBundle mainBundle] pathsForResourcesOfType:@"png" inDirectory:nil];
imageArray = [[NSMutableArray alloc] initWithCapacity:imagePaths.count];
NSLog(@"-->imageArray.count= %i",imageArray.count);
for (Nsstring* path in imagePaths)
{
    [imageArray addobject:[UIImage imageWithContentsOfFile:path]];
    //NSLog(@"-->image path= %@",path);
}

//////////////     end setup    ///////////////////////////////////

NSLog(@"Start building video from defined frames.");

AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                              [NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
                                                          error:&error];
NSParameterassert(videoWriter);

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                               AVVideoCodecH264,[NSNumber numberWithInt:imageSize.width],[NSNumber numberWithInt:imageSize.height],nil];

AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeVideo
                                        outputSettings:videoSettings];


AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                 assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
                                                 sourcePixelBufferAttributes:nil];

NSParameterassert(videoWriterInput);
NSParameterassert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];

//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];

CVPixelBufferRef buffer = NULL;

//convert uiimage to CGImage.
int frameCount = 0;
double numberOfSecondsPerFrame = 6;
double frameDuration = fps * numberOfSecondsPerFrame;

//for(VideoFrame * frm in imageArray)
NSLog(@"**************************************************");
for(UIImage * img in imageArray)
{
    //UIImage * img = frm._imageFrame;
    buffer = [self pixelBufferFromCGImage:[img CGImage]];

    BOOL append_ok = NO;
    int j = 0;
    while (!append_ok && j < 30) {
        if (adaptor.assetWriterInput.readyForMoreMediaData)  {
            //print out status:
            NSLog(@"Processing video frame (%d,%d)",frameCount,[imageArray count]);

            //CMTime frameTime = CMTimeMake((int64_t),(int32_t)2);

            CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
            NSLog(@"seconds = %f,%u,%d",CMTimeGetSeconds(frameTime),fps,j);
            append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
            if(!append_ok){
                NSError *error = videoWriter.error;
                if(error!=nil) {
                    NSLog(@"Unresolved error %@,%@.",error,[error userInfo]);
                }
            }
        }
        else {
            printf("adaptor not ready %d,%d\n",j);
            [NSThread sleepForTimeInterval:0.1];
        }
        j++;
    }
    if (!append_ok) {
        printf("error appending image %d times %d\n,with error.",j);
    }
    frameCount++;
}
NSLog(@"**************************************************");

//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
NSLog(@"Write Ended");

}


-(void)CompileFilestomakeVideo
{

    // set up file manager,remove "test_output.mp4" if it exists...
    //Nsstring *videoOutputPath = @"/Users/someuser/Desktop/test_output.mp4";
    Nsstring *documentsDirectory = [NSHomeDirectory()
                                    stringByAppendingPathComponent:@"Documents"];
    Nsstring *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:@"test_output.mp4"];
    //NSLog(@"-->videoOutputPath= %@",videoOutputPath);
    // get rid of existing mp4 if exists...

    AVMutableComposition* mixComposition = [AVMutableComposition composition];

    Nsstring *bundleDirectory = [[NSBundle mainBundle] bundlePath];
    // audio input file...
    Nsstring *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:@"30secs.mp3"];
    NSURL    *audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];

    // this is the video file that was just written above,full path to file is in --> videoOutputPath
    NSURL    *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];

    // create the final video output file as MOV file - may need to be MP4,but this works so far...
    Nsstring *outputFilePath = [documentsDirectory stringByAppendingPathComponent:@"final_video.mp4"];
    NSURL    *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];

    if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
        [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];

    CMTime nextClipStartTime = kCMTimeZero;

    AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
    CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
    AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    [a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];

    //nextClipStartTime = CMTimeAdd(nextClipStartTime,a_timeRange.duration);

    AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
    CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero,audioAsset.duration);
    AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    [b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];



    AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
    _assetExport.outputFileType = @"com.apple.quicktime-movie";
    //_assetExport.outputFileType = @"public.mpeg-4";
    //NSLog(@"support file types= %@",[_assetExport supportedFileTypes]);
    _assetExport.outputURL = outputFileUrl;

    [_assetExport exportAsynchronouslyWithCompletionHandler:
     ^(void ) {
         [self saveVideoToAlbum:outputFilePath];
     }
     ];

    ///// THAT IS IT DONE... the final video file will be written here...
    NSLog(@"DONE.....outputFilePath--->%@",outputFilePath);

    // the final video file will be located somewhere like here:
    // /Users/caferrara/Library/Application Support/iPhone Simulator/6.0/Applications/D4B12FEE-E09C-4B12-B772-7F1BD6011BE1/Documents/outputFile.mov


    ////////////////////////////////////////////////////////////////////////////
    ////////////////////////////////////////////////////////////////////////////
}
- (void) saveVideoToAlbum:(Nsstring*)path {

    NSLog(@"saveVideoToAlbum");

    if(UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(path)){
        UISaveVideoAtPathToSavedPhotosAlbum (path,nil);
    }
}

-(void) video:(Nsstring *)videoPath didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo {
    if(error)
        NSLog(@"error: %@",error);
    else
        NSLog(@" OK");
}



////////////////////////
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {

    CGSize size = CGSizeMake(400,200);

    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES],size.width,size.height,(__bridge CFDictionaryRef) options,&pxbuffer);
    if (status != kCVReturnSuccess){
        NSLog(@"Failed to create pixel buffer");
    }

    CVPixelBufferLockBaseAddress(pxbuffer,0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata,4*size.width,kCGImageAlphaPremultipliedFirst);
    //kCGImageAlphaNoneskipFirst);
    CGContextConcatCTM(context,0);

    return pxbuffer;
}

ios4 – 从UIImage阵列创建视频并将视频保存到iPhone库. AVAssetLibrary AVFoundation的更多相关文章

  1. html5 canvas合成海报所遇问题及解决方案总结

    这篇文章主要介绍了html5 canvas合成海报所遇问题及解决方案总结,小编觉得挺不错的,现在分享给大家,也给大家做个参考。一起跟随小编过来看看吧

  2. Html5移动端适配IphoneX等机型的方法

    这篇文章主要介绍了Html5移动端适配IphoneX等机型的方法,文中通过示例代码介绍的非常详细,对大家的学习或者工作具有一定的参考学习价值,需要的朋友们下面随着小编来一起学习学习吧

  3. 有关HTML5页面在iPhoneX适配问题

    这篇文章主要介绍了有关HTML5页面在iPhoneX适配问题,需要的朋友可以参考下

  4. Html5 页面适配iPhoneX(就是那么简单)

    这篇文章主要介绍了Html5 页面适配iPhoneX(就是那么简单),文中通过示例代码介绍的非常详细,对大家的学习或者工作具有一定的参考学习价值,需要的朋友们下面随着小编来一起学习学习吧

  5. H5页面适配iPhoneX(就是那么简单)

    这篇文章主要介绍了H5页面适配iPhoneX(就是那么简单),文中通过示例代码介绍的非常详细,对大家的学习或者工作具有一定的参考学习价值,需要的朋友们下面随着小编来一起学习学习吧

  6. Html5跳转到APP指定页面的实现

    这篇文章主要介绍了Html5跳转到APP指定页面的实现,文中通过示例代码介绍的非常详细,对大家的学习或者工作具有一定的参考学习价值,需要的朋友们下面随着小编来一起学习学习吧

  7. ios – 在没有iPhone6s或更新的情况下测试ARKit

    我在决定下载Xcode9之前.我想玩新的框架–ARKit.我知道要用ARKit运行app我需要一个带有A9芯片或更新版本的设备.不幸的是我有一个较旧的.我的问题是已经下载了新Xcode的人.在我的情况下有可能运行ARKit应用程序吗?那个或其他任何模拟器?任何想法或我将不得不购买新设备?解决方法任何iOS11设备都可以使用ARKit,但是具有高质量AR体验的全球跟踪功能需要使用A9或更高版本处理器的设备.使用iOS11测试版更新您的设备是必要的.

  8. ios – containerURLForSecurityApplicationGroupIdentifier:在iPhone和Watch模拟器上给出不同的结果

    我使用默认的XCode模板创建了一个WatchKit应用程序.我向iOSTarget,WatchkitAppTarget和WatchkitAppExtensionTarget添加了应用程序组权利.(这是应用程序组名称:group.com.lombax.fiveminutes)然后,我尝试使用iOSApp和WatchKitExtension访问共享文件夹URL:延期:iOS应用:但是,测试NSURL

  9. ios – Trello iPhone应用程序是如何开发的?

    我想知道Trelloiphone应用程序正在使用哪些库或框架.它是一个JS框架的webapp?我很好奇,因为我非常喜欢用户界面,并且在没有运气的情况下搜索了互联网.解决方法我在Trello团队中编写了iPhone应用程序.它是除了附件查看器之外的所有本机代码,它只是一个WebView.我们使用RestKit与我们的API进行通信,并帮助将数据本地缓存到CoreData.否则,它只是一堆自定义UIViews和UIViewControllers.

  10. 真正的iOS设备和Watch Simulator可以进行通信以进行测试

    我想为现有的iOS应用创建一个手表应用.但我处于一种情况,我没有苹果手表,我现有的iOS应用程序只能在不在模拟器上的真实设备上运行.是否可以在iPhone设备上运行应用程序并在手表模拟器中测试我的手表应用程序?解决方法至少在目前,不可能配对真正的iPhone和Watch模拟器.我得出这个结论有三个原因:>Watch模拟器在安装过程中自动与iPhone模拟器配对.>根本无法从界面取消配对Watch模拟器.>在模拟器上无法访问蓝牙以与真实设备进行通信.这是一个proof.

随机推荐

  1. iOS实现拖拽View跟随手指浮动效果

    这篇文章主要为大家详细介绍了iOS实现拖拽View跟随手指浮动,文中示例代码介绍的非常详细,具有一定的参考价值,感兴趣的小伙伴们可以参考一下

  2. iOS – genstrings:无法连接到输出目录en.lproj

    使用我桌面上的项目文件夹,我启动终端输入:cd然后将我的项目文件夹拖到终端,它给了我路径.然后我将这行代码粘贴到终端中找.-name*.m|xargsgenstrings-oen.lproj我在终端中收到此错误消息:genstrings:无法连接到输出目录en.lproj它多次打印这行,然后说我的项目是一个目录的路径?没有.strings文件.对我做错了什么的想法?

  3. iOS 7 UIButtonBarItem图像没有色调

    如何确保按钮图标采用全局色调?解决方法只是想将其转换为根注释,以便为“回答”复选标记提供更好的上下文,并提供更好的格式.我能想出这个!

  4. ios – 在自定义相机层的AVFoundation中自动对焦和自动曝光

    为AVFoundation定制图层相机创建精确的自动对焦和曝光的最佳方法是什么?

  5. ios – Xcode找不到Alamofire,错误:没有这样的模块’Alamofire’

    我正在尝试按照github(https://github.com/Alamofire/Alamofire#cocoapods)指令将Alamofire包含在我的Swift项目中.我创建了一个新项目,导航到项目目录并运行此命令sudogeminstallcocoapods.然后我面临以下错误:搜索后我设法通过运行此命令安装cocoapodssudogeminstall-n/usr/local/bin

  6. ios – 在没有iPhone6s或更新的情况下测试ARKit

    我在决定下载Xcode9之前.我想玩新的框架–ARKit.我知道要用ARKit运行app我需要一个带有A9芯片或更新版本的设备.不幸的是我有一个较旧的.我的问题是已经下载了新Xcode的人.在我的情况下有可能运行ARKit应用程序吗?那个或其他任何模拟器?任何想法或我将不得不购买新设备?解决方法任何iOS11设备都可以使用ARKit,但是具有高质量AR体验的全球跟踪功能需要使用A9或更高版本处理器的设备.使用iOS11测试版更新您的设备是必要的.

  7. 将iOS应用移植到Android

    我们制作了一个具有2000个目标c类的退出大型iOS应用程序.我想知道有一个最佳实践指南将其移植到Android?此外,由于我们的应用程序大量使用UINavigation和UIView控制器,我想知道在Android上有类似的模型和实现.谢谢到目前为止,guenter解决方法老实说,我认为你正在计划的只是制作难以维护的糟糕代码.我意识到这听起来像很多工作,但从长远来看它会更容易,我只是将应用程序的概念“移植”到android并从头开始编写.

  8. ios – 在Swift中覆盖Objective C类方法

    我是Swift的初学者,我正在尝试在Swift项目中使用JSONModel.我想从JSONModel覆盖方法keyMapper,但我没有找到如何覆盖模型类中的Objective-C类方法.该方法的签名是:我怎样才能做到这一点?解决方法您可以像覆盖实例方法一样执行此操作,但使用class关键字除外:

  9. ios – 在WKWebView中获取链接URL

    我想在WKWebView中获取tapped链接的url.链接采用自定义格式,可触发应用中的某些操作.例如HTTP://我的网站/帮助#深层链接对讲.我这样使用KVO:这在第一次点击链接时效果很好.但是,如果我连续两次点击相同的链接,它将不报告链接点击.是否有解决方法来解决这个问题,以便我可以检测每个点击并获取链接?任何关于这个的指针都会很棒!解决方法像这样更改addobserver在observeValue函数中,您可以获得两个值

  10. ios – 在Swift的UIView中找到UILabel

    我正在尝试在我的UIViewControllers的超级视图中找到我的UILabels.这是我的代码:这是在Objective-C中推荐的方式,但是在Swift中我只得到UIViews和CALayer.我肯定在提供给这个方法的视图中有UILabel.我错过了什么?我的UIViewController中的调用:解决方法使用函数式编程概念可以更轻松地实现这一目标.

返回
顶部