2014-07-22 24 views
5

我有一个音频文件,并希望更改其专辑封面图稿。那么,有可能吗?而且,如何在iOS编程中将音频文件的封面设置为专辑封面?如何在音频文件中添加艺术作品,以在专辑封面中显示?

其实,我合并了两个音频文件,并且想要为iTune中显示的专辑封面添加插图。

守则如下:

- (BOOL) combineVoices1 
{ 
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSLibraryDirectory, NSUserDomainMask, YES); 
NSString *libraryCachesDirectory = [paths objectAtIndex:0]; 
libraryCachesDirectory = [libraryCachesDirectory stringByAppendingPathComponent:@"Caches"]; 
NSString *OutputFilePath = [libraryCachesDirectory stringByAppendingFormat:@"/%@.m4a",textFieldMixFile.text]; 
NSURL *audioFileOutput = [NSURL fileURLWithPath:OutputFilePath]; 
NSURL *audioFileInput1= audioFileURL1;//<Path of orignal audio file> 
NSURL *audioFileInput2= audioFileURL2;//<Path of orignal audio file> 

if (!audioFileInput1 || !audioFileInput2 || !audioFileOutput) 
{ 
    return NO; 
} 

[[NSFileManager defaultManager] removeItemAtURL:audioFileOutput error:NULL]; 
//CMTime nextClipStartTime = kCMTimeZero; 
AVMutableComposition *composition = [[AVMutableComposition alloc] init]; 
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
CMTime nextClipStartTimeMix1; 
if (playbackDelayAfterTimeMix1 > 0) { 
    nextClipStartTimeMix1 = CMTimeMake(playbackDelayAfterTimeMix1, 1); 
}else{ 
    nextClipStartTimeMix1 = kCMTimeZero; 
} 
CMTime startTimeMix1; 
if (playbackDelayMix1 > 0) { 
    startTimeMix1 = CMTimeMake(playbackDelayMix1, 1); 
}else{ 
    startTimeMix1 = kCMTimeZero; 
} 
[compositionAudioTrack setPreferredVolume:[NSTSharedData instance].volumeOfMIX1]; 
NSURL *url = audioFileURL1; //[NSURL fileURLWithPath:soundOne]; 
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil]; 
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio]; 
AVAssetTrack *clipAudioTrack; 
if (tracks.count > 0) { 
    clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 
}else{ 
    return NO; 
} 
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(startTimeMix1, avAsset.duration) ofTrack:clipAudioTrack atTime:nextClipStartTimeMix1 error:nil]; 

//avAsset.commonMetadata 
AVMutableCompositionTrack *compositionAudioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
CMTime nextClipStartTimeMix2; 
if (playbackDelayAfterTimeMix2 > 0) { 
    nextClipStartTimeMix2 = CMTimeMake(playbackDelayAfterTimeMix2, 1); 
}else{ 
    nextClipStartTimeMix2 = kCMTimeZero; 
} 
CMTime startTimeMix2; 
if (playbackDelayMix2 > 0) { 
    startTimeMix2 = CMTimeMake(playbackDelayMix2, 1); 
}else{ 
    startTimeMix2 = kCMTimeZero; 
} 

[compositionAudioTrack1 setPreferredVolume:[NSTSharedData instance].volumeOfMIX2]; 
//NSString *soundOne1 =[[NSBundle mainBundle]pathForResource:@"test" ofType:@"caf"]; 
NSURL *url1 = audioFileURL2; //[NSURL fileURLWithPath:soundOne1]; 
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:url1 options:nil]; 
NSArray *tracks1 = [avAsset1 tracksWithMediaType:AVMediaTypeAudio]; 
AVAssetTrack *clipAudioTrack1; 
if (tracks1.count > 0) { 
    clipAudioTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 
}else{ 
    return NO; 
} 
[compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(startTimeMix2, avAsset1.duration) ofTrack:clipAudioTrack1 atTime:nextClipStartTimeMix2 error:nil]; 


AVAssetExportSession *exportSession = [AVAssetExportSession 
             exportSessionWithAsset:composition 
             presetName:AVAssetExportPresetAppleM4A]; 

if (nil == exportSession) return NO; 


exportSession.outputURL = audioFileOutput; 
exportSession.outputFileType = AVFileTypeAppleM4A; 

[exportSession exportAsynchronouslyWithCompletionHandler:^ 
{ 
    if (AVAssetExportSessionStatusCompleted == exportSession.status) 
    { 

     [self performSelectorOnMainThread:@selector(performAction) withObject:nil waitUntilDone:NO]; 

    } 
    else if (AVAssetExportSessionStatusFailed == exportSession.status) 
    { 
     [self performSelectorOnMainThread:@selector(hideSpinningWheel) withObject:nil waitUntilDone:NO]; 
     [[NSTSharedData instance] showAlertForTitle:@"Error!" andMessage:[NSString stringWithFormat:@"%@",[[exportSession error] localizedDescription]]]; 
     //NSLog(@"Export failed: %@", [[exportSession error] localizedDescription]); 
    } 
}]; 

return YES; 
} 
+1

你在看什么?任何代码没有? – ljacqu

+0

我想这是可能的,可能使用这个类'AVURLAsset','AVMetadataItem'。 –

+0

我合并了两个音频文件,并且想为该合并文件添加图稿。哪些(艺术作品)将在iTune中展示。 – Rathore

回答

3

我解决我的问题,现在,它做工精细,我在上面的代码中加入接近“AVAssetExportSession”代码。最后的方法是:

- (BOOL) combineVoices1 
{ 
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSLibraryDirectory, NSUserDomainMask, YES); 
NSString *libraryCachesDirectory = [paths objectAtIndex:0]; 
libraryCachesDirectory = [libraryCachesDirectory stringByAppendingPathComponent:@"Caches"]; 
NSString *OutputFilePath = [libraryCachesDirectory stringByAppendingFormat:@"/%@.m4a",textFieldMixFile.text]; 
NSURL *audioFileOutput = [NSURL fileURLWithPath:OutputFilePath]; 
NSURL *audioFileInput1= audioFileURL1;//<Path of orignal audio file> 
NSURL *audioFileInput2= audioFileURL2;//<Path of orignal audio file> 

if (!audioFileInput1 || !audioFileInput2 || !audioFileOutput) 
{ 
    return NO; 
} 

[[NSFileManager defaultManager] removeItemAtURL:audioFileOutput error:NULL]; 
//CMTime nextClipStartTime = kCMTimeZero; 
AVMutableComposition *composition = [[AVMutableComposition alloc] init]; 
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
CMTime nextClipStartTimeMix1; 
if (playbackDelayAfterTimeMix1 > 0) { 
    nextClipStartTimeMix1 = CMTimeMake(playbackDelayAfterTimeMix1, 1); 
}else{ 
    nextClipStartTimeMix1 = kCMTimeZero; 
} 
CMTime startTimeMix1; 
if (playbackDelayMix1 > 0) { 
    startTimeMix1 = CMTimeMake(playbackDelayMix1, 1); 
}else{ 
    startTimeMix1 = kCMTimeZero; 
} 
[compositionAudioTrack setPreferredVolume:[NSTSharedData instance].volumeOfMIX1]; 
NSURL *url = audioFileURL1; //[NSURL fileURLWithPath:soundOne]; 
AVAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil]; 
NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeAudio]; 
AVAssetTrack *clipAudioTrack; 
if (tracks.count > 0) { 
    clipAudioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 
}else{ 
    return NO; 
} 
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(startTimeMix1, avAsset.duration) ofTrack:clipAudioTrack atTime:nextClipStartTimeMix1 error:nil]; 

//avAsset.commonMetadata 
AVMutableCompositionTrack *compositionAudioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
CMTime nextClipStartTimeMix2; 
if (playbackDelayAfterTimeMix2 > 0) { 
    nextClipStartTimeMix2 = CMTimeMake(playbackDelayAfterTimeMix2, 1); 
}else{ 
    nextClipStartTimeMix2 = kCMTimeZero; 
} 
CMTime startTimeMix2; 
if (playbackDelayMix2 > 0) { 
    startTimeMix2 = CMTimeMake(playbackDelayMix2, 1); 
}else{ 
    startTimeMix2 = kCMTimeZero; 
} 

[compositionAudioTrack1 setPreferredVolume:[NSTSharedData instance].volumeOfMIX2]; 
//NSString *soundOne1 =[[NSBundle mainBundle]pathForResource:@"test" ofType:@"caf"]; 
NSURL *url1 = audioFileURL2; //[NSURL fileURLWithPath:soundOne1]; 
AVAsset *avAsset1 = [AVURLAsset URLAssetWithURL:url1 options:nil]; 
NSArray *tracks1 = [avAsset1 tracksWithMediaType:AVMediaTypeAudio]; 
AVAssetTrack *clipAudioTrack1; 
if (tracks1.count > 0) { 
    clipAudioTrack1 = [[avAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 
}else{ 
    return NO; 
} 
[compositionAudioTrack1 insertTimeRange:CMTimeRangeMake(startTimeMix2, avAsset1.duration) ofTrack:clipAudioTrack1 atTime:nextClipStartTimeMix2 error:nil]; 

/** 
added MetadataItem 
**/ 

AVMutableMetadataItem *artistMetadata = [[AVMutableMetadataItem alloc] init]; 
artistMetadata.key = AVMetadataiTunesMetadataKeyArtist; 
artistMetadata.keySpace = AVMetadataKeySpaceiTunes; 
artistMetadata.locale = [NSLocale currentLocale]; 
artistMetadata.value = uTakeTheMicArtist; 

AVMutableMetadataItem *albumMetadata = [[AVMutableMetadataItem alloc] init]; 
albumMetadata.key = AVMetadataiTunesMetadataKeyAlbum; 
albumMetadata.keySpace = AVMetadataKeySpaceiTunes; 
albumMetadata.locale = [NSLocale currentLocale]; 
albumMetadata.value = uTakeTheMicAlbum; 

AVMutableMetadataItem *songMetadata = [[AVMutableMetadataItem alloc] init]; 
songMetadata.key = AVMetadataiTunesMetadataKeySongName; 
songMetadata.keySpace = AVMetadataKeySpaceiTunes; 
songMetadata.locale = [NSLocale currentLocale]; 
songMetadata.value = textFieldMixFile.text; 

AVMutableMetadataItem *imageMetadata = [[AVMutableMetadataItem alloc] init]; 
imageMetadata.key = AVMetadataiTunesMetadataKeyCoverArt; 
imageMetadata.keySpace = AVMetadataKeySpaceiTunes; 
imageMetadata.locale = [NSLocale currentLocale]; 
imageMetadata.value = imageData; //imageData is NSData of UIImage. 
NSArray *metadata = [NSArray arrayWithObjects:artistMetadata, albumMetadata, songMetadata, imageMetadata, nil]; 

AVAssetExportSession *exportSession = [AVAssetExportSession 
             exportSessionWithAsset:composition 
             presetName:AVAssetExportPresetAppleM4A]; 

if (nil == exportSession) return NO; 

exportSession.metadata = metadata; 
exportSession.outputURL = audioFileOutput; 
exportSession.outputFileType = AVFileTypeAppleM4A; 

[exportSession exportAsynchronouslyWithCompletionHandler:^ 
{ 
    if (AVAssetExportSessionStatusCompleted == exportSession.status) 
    { 

     [self performSelectorOnMainThread:@selector(performAction) withObject:nil waitUntilDone:NO]; 

    } 
    else if (AVAssetExportSessionStatusFailed == exportSession.status) 
    { 
     [self performSelectorOnMainThread:@selector(hideSpinningWheel) withObject:nil waitUntilDone:NO]; 
     [[NSTSharedData instance] showAlertForTitle:@"Error!" andMessage:[NSString stringWithFormat:@"%@.",[[exportSession error] localizedDescription]]]; 
     //NSLog(@"Export failed: %@", [[exportSession error] localizedDescription]); 
    } 
}]; 

return YES; 
} 
相关问题