Создайте файл фильма с массивом изображений и файлом песни, используя AVAsset
Я пытаюсь создать файл фильма, используя массив изображений и аудиофайл. Чтобы сделать фильм с массивом картинок, я использовал большой пост от zoul здесь. Все отлично, у меня есть мой фильм с моей картинкой. Однако, когда я пытаюсь добавить несколько звуковых дорожек, у меня возникает много проблем. Чтобы понять, я поставил свой код:
Когда я вызываю этот метод, массив изображений и файл песни готовы:
-(void) writeImagesToMovieAtPath:(NSString *) path withSize:(CGSize) size
{
NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSArray *dirContents = [[NSFileManager defaultManager] directoryContentsAtPath:documentsDirectoryPath];
for (NSString *tString in dirContents) {
if ([tString isEqualToString:@"essai.mp4"])
{
[[NSFileManager defaultManager]removeItemAtPath:[NSString stringWithFormat:@"%@/%@",documentsDirectoryPath,tString] error:nil];
}
}
NSLog(@"Write Started");
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
NSDictionary *audioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithFloat:44100.0] ,AVSampleRateKey,
[NSNumber numberWithInt: 1] ,AVNumberOfChannelsKey,
[NSNumber numberWithInt:192000],AVEncoderBitRateKey,
[NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)],AVChannelLayoutKey,
nil];
AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInput* audioWriterInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:audioSettings] retain];
NSURL* fileURL = [[NSBundle mainBundle] URLForResource:@"Big_Voice_1" withExtension:@"caf"];
NSLog(@"%@",fileURL);
AVAsset *asset = [[AVURLAsset URLAssetWithURL:fileURL
options:nil] retain];
AVAssetReader *audioReader = [[AVAssetReader assetReaderWithAsset:asset error:&error] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
AVAssetTrack* audioTrack = [asset.tracks objectAtIndex:0];
AVAssetReaderOutput *readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
[audioReader addOutput:readerOutput];
NSParameterAssert(videoWriterInput);
NSParameterAssert(audioWriterInput);
NSParameterAssert([videoWriter canAddInput:audioWriterInput]);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
audioWriterInput.expectsMediaDataInRealTime = NO;
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:audioWriterInput];
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Video encoding
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
for(int i = 0; i<20; i++)
{
buffer = [self pixelBufferFromCGImage:[[m_PictArray objectAtIndex:i] CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) 10);
//CVPixelBufferPoolCreatePixelBuffer (kCFAllocatorDefault, adaptor.pixelBufferPool, &buffer);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
CVPixelBufferPoolRef bufferPool = adaptor.pixelBufferPool;
NSParameterAssert(bufferPool != NULL);
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
}
//Finish the session:
[videoWriterInput markAsFinished];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//Write all picture array in movie file.
int frameCount = 0;
for(int i = 0; i<[m_PictArray count]; i++)
{
buffer = [self pixelBufferFromCGImage:[[m_PictArray objectAtIndex:i] CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) 10);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
CVPixelBufferPoolRef bufferPool = adaptor.pixelBufferPool;
NSParameterAssert(bufferPool != NULL);
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount++;
}
//Finish writing picture:
[videoWriterInput markAsFinished];
Я заканчиваю записывать картинку в файл фильма, и я хочу скопировать аудио в файл, и я делаю это:
[audioReader startReading];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^
{
NSLog(@"Request");
NSLog(@"Asset Writer ready :%d",audioWriterInput.readyForMoreMediaData);
while (audioWriterInput.readyForMoreMediaData) {
NSLog(@"Ready");
CMSampleBufferRef nextBuffer = [readerOutput copyNextSampleBuffer];
if (nextBuffer) {
NSLog(@"NextBuffer");
[audioWriterInput appendSampleBuffer:nextBuffer];
}
}
}
];
[audioWriterInput markAsFinished];
[videoWriter finishWriting];
Однако состояние AssetWriterInput аудиофайла всегда "НЕТ".
Мой вопрос: Как добавить аудио в видео файл с помощью AVFoundation?
Поэтому, пожалуйста, кто-нибудь может мне помочь, сказав, что я что-то забыл или что-то не так.
большое спасибо
2 ответа
Я наконец-то нашел, как сделать фильм с массивом картинок и аудиофайлом. Поэтому, если вы хотите сделать то же самое, я поместил свой код здесь (будьте осторожны с памятью):
Сначала создайте файл фильма с массивом изображений, используя сообщение zoul здесь:
-(void) writeImagesToMovieAtPath:(NSString *) path withSize:(CGSize) size { NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0]; NSArray *dirContents = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:documentsDirectoryPath error:nil]; for (NSString *tString in dirContents) { if ([tString isEqualToString:@"essai.mp4"]) { [[NSFileManager defaultManager]removeItemAtPath:[NSString stringWithFormat:@"%@/%@",documentsDirectoryPath,tString] error:nil]; } } NSLog(@"Write Started"); NSError *error = nil; AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL: [NSURL fileURLWithPath:path] fileType:AVFileTypeMPEG4 error:&error]; NSParameterAssert(videoWriter); NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey, [NSNumber numberWithInt:size.width], AVVideoWidthKey, [NSNumber numberWithInt:size.height], AVVideoHeightKey, nil]; AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain]; AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:nil]; NSParameterAssert(videoWriterInput); NSParameterAssert([videoWriter canAddInput:videoWriterInput]); videoWriterInput.expectsMediaDataInRealTime = YES; [videoWriter addInput:videoWriterInput]; //Start a session: [videoWriter startWriting]; [videoWriter startSessionAtSourceTime:kCMTimeZero]; //Video encoding CVPixelBufferRef buffer = NULL; //convert uiimage to CGImage. int frameCount = 0; for(int i = 0; i<[m_PictArray count]; i++) { buffer = [self pixelBufferFromCGImage:[[m_PictArray objectAtIndex:i] CGImage] andSize:size]; BOOL append_ok = NO; int j = 0; while (!append_ok && j < 30) { if (adaptor.assetWriterInput.readyForMoreMediaData) { printf("appending %d attemp %d\n", frameCount, j); CMTime frameTime = CMTimeMake(frameCount,(int32_t) 10); append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime]; CVPixelBufferPoolRef bufferPool = adaptor.pixelBufferPool; NSParameterAssert(bufferPool != NULL); [NSThread sleepForTimeInterval:0.05]; } else { printf("adaptor not ready %d, %d\n", frameCount, j); [NSThread sleepForTimeInterval:0.1]; } j++; } if (!append_ok) { printf("error appending image %d times %d\n", frameCount, j); } frameCount++; CVBufferRelease(buffer); } [videoWriterInput markAsFinished]; [videoWriter finishWriting]; [videoWriterInput release]; [videoWriter release]; [m_PictArray removeAllObjects]; NSLog(@"Write Ended"); }
После этого вы должны собрать файл фильма и аудио файл. Для этого следуйте моему коду:
-(void)CompileFilesToMakeMovie { AVMutableComposition* mixComposition = [AVMutableComposition composition]; NSString* audio_inputFileName = @"deformed.caf"; NSString* audio_inputFilePath = [Utilities documentsPath:audio_inputFileName]; NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath]; NSString* video_inputFileName = @"essai.mp4"; NSString* video_inputFilePath = [Utilities documentsPath:video_inputFileName]; NSURL* video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath]; NSString* outputFileName = @"outputFile.mov"; NSString* outputFilePath = [Utilities documentsPath:outputFileName]; NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath]; if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath]) [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil]; CMTime nextClipStartTime = kCMTimeZero; AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil]; CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration); AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; [a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil]; //nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration); AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil]; CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration); AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; [b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil]; AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality]; _assetExport.outputFileType = @"com.apple.quicktime-movie"; _assetExport.outputURL = outputFileUrl; [_assetExport exportAsynchronouslyWithCompletionHandler: ^(void ) { [self saveVideoToAlbum:outputFilePath]; } ]; }
Я надеюсь, что помог тебе. Извините, если есть утечка, я занимаюсь оптимизацией памяти.
AVChannelLayoutKey должен указывать на экземпляр NSData, содержащий AudioChannelLayout.
Ваш указывает на NSNumber.