2012-05-01 3 views
0

У меня есть массив изображений, и я преобразовал их в видеоролик, и теперь я хочу сейчас, как сохранить преобразованное видео в ipad.Как сэкономить видео программно на iPad1 iOS SDK

может я сохранить, что конвертировать видео в библиотеку фотографий IPad

NSError *error = nil; 

AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie error:&error]; 

NSParameterAssert(videoWriter); 

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: 
           AVVideoCodecH264, AVVideoCodecKey, 
           [NSNumber numberWithInt:size.width], AVVideoWidthKey, 
           [NSNumber numberWithInt:size.height], AVVideoHeightKey, 
           nil]; 
AVAssetWriterInput* writerInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain]; 


// NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil]; 

AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil]; 


NSParameterAssert(writerInput); 
NSParameterAssert([videoWriter canAddInput:writerInput]); 
[videoWriter addInput:writerInput]; 

//Start a session: 
[videoWriter startWriting]; 
[videoWriter startSessionAtSourceTime:kCMTimeZero]; 

CVPixelBufferRef buffer = NULL; 

//convert uiimage to CGImage. 

CFDataRef imgData = (CFDataRef)[array objectAtIndex:0]; 
CGDataProviderRef imgDataProvider = CGDataProviderCreateWithCFData (imgData); 
CGImageRef image1 = CGImageCreateWithPNGDataProvider(imgDataProvider, NULL, true, kCGRenderingIntentDefault); 

buffer = [self pixelBufferFromCGImage:image1]; 
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero]; 

//Write samples:...... 

//Finish the session: 
[writerInput markAsFinished]; 
[videoWriter finishWriting]; 
+0

В каком формате вы конвертировали их в видео, это было бы возможно в основном с помощью AVFoundation или API QTCaptureView – Amitg2k12

+0

@Rohan i преобразовал это с помощью AvAssetWriter, и я отредактировал свой вопрос с кодом, который я написал – user1300511

+0

. У меня вопрос, как сэкономить это преобразованное видео в iPad version1 library – user1300511

ответ

0

Попробуйте с помощью следующего кода:

[_operationQueue addOperationWithBlock:^{ 

    NSInteger i = 0; 

    NSString *path = [NSTemporaryDirectory() stringByAppendingFormat:@"%d.png",i]; 
    UIImage *image; 

    NSDate *startDate; 

    while ((image = [UIImage imageWithContentsOfFile:path])) 
    { 
     while (1) 
     { 
      if (writerInput.readyForMoreMediaData == NO) 
      { 
       sleep(0.01); 
       continue; 
      } 
      else 
      { 
       //First time only 
       if (buffer == NULL) 
       { 
        CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &buffer); 
        startDate = [_dates objectAtIndex:i]; 
       } 

       buffer = [IQProjectVideo pixelBufferFromCGImage:image.CGImage]; 

       if (buffer) 
       { 
        if(i<_dates.count){ 

        NSDate *currentDate = [_dates objectAtIndex:i]; 
        Float64 interval = [currentDate timeIntervalSinceDate:startDate]; 

        int32_t timeScale; 

        if (i == 0) 
        { 
         timeScale = 1.0/([[_dates objectAtIndex:i+1] timeIntervalSinceDate:currentDate]); 
        } 
        else 
        { 
         timeScale = 1.0/([currentDate timeIntervalSinceDate:[_dates objectAtIndex:i-1]]); 
        } 

        /**/ 
        CMTime presentTime=CMTimeMakeWithSeconds(interval, MAX(33, timeScale)); 
        //      NSLog(@"presentTime:%@",(__bridge NSString *)CMTimeCopyDescription(kCFAllocatorDefault, presentTime)); 


        if (_progressBlock != NULL) 
        { 
         dispatch_sync(dispatch_get_main_queue(), ^{ 
          _progressBlock((CGFloat)i/(CGFloat)currentIndex); 
         }); 
        } 


        // append buffer 
        [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime]; 
        CVPixelBufferRelease(buffer); 
       } 
       } 
       break; 
      } 
     } 

     [[NSFileManager defaultManager] removeItemAtPath:path error:nil]; 

     path = [NSTemporaryDirectory() stringByAppendingFormat:@"%d.png",++i]; 
    } 

    //Finish the session: 
    [writerInput markAsFinished]; 

    if ([videoWriter respondsToSelector:@selector(finishWritingWithCompletionHandler:)]) 
    { 
     [videoWriter finishWritingWithCompletionHandler:^{ 
      CVPixelBufferPoolRelease(adaptor.pixelBufferPool); 

     }]; 
    } 
    else 
    { 
     [videoWriter finishWriting]; 
     CVPixelBufferPoolRelease(adaptor.pixelBufferPool); 
    } 

    NSDictionary *fileAttrubutes = [[NSFileManager defaultManager] attributesOfItemAtPath:_path error:nil]; 
    NSDictionary *dictInfo = [NSDictionary dictionaryWithObjectsAndKeys: 
           _path,IQFilePathKey, 
           [fileAttrubutes objectForKey:NSFileSize], IQFileSizeKey, 
           [fileAttrubutes objectForKey:NSFileCreationDate], IQFileCreateDateKey, 
           nil]; 

    if (_completionBlock != NULL) 
    { 
     dispatch_sync(dispatch_get_main_queue(), ^{ 
      _completionBlock(dictInfo,videoWriter.error); 
     }); 
    } 

    NSString *openCommand = [NSString stringWithFormat:@"/usr/bin/open \"%@\"", NSTemporaryDirectory()]; 
    system([openCommand fileSystemRepresentation]);