aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authortw2012-03-28 20:51:36 -0400
committertw2012-03-28 20:51:36 -0400
commit743790e954359d91d4d2497e80bb9404e2b959c8 (patch)
treea6315c12f5d968d54aabaf7012585493b2386d1c
parent183b9966dbfdd436006aae6f89b6cb98da9fb941 (diff)
parentaf028612ee7ac7326524a66751b2d11aa955d1a9 (diff)
downloadVideo-Tuneup-743790e954359d91d4d2497e80bb9404e2b959c8.tar.bz2
Merge branch 'master' of github.com:bcjordan/Video-Tuneup
-rw-r--r--.gitignore2
-rw-r--r--Video Tuneup/SimpleEditor.h63
-rw-r--r--Video Tuneup/SimpleEditor.m437
-rw-r--r--Video Tuneup/ViewController.m18
4 files changed, 61 insertions, 459 deletions
diff --git a/.gitignore b/.gitignore
index 7092fd1..15cb9c3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -19,3 +19,5 @@ build/*
*.pbxuser
*.xcworkspace
xcuserdata
+
+.idea
diff --git a/Video Tuneup/SimpleEditor.h b/Video Tuneup/SimpleEditor.h
index 1809c44..f505beb 100644
--- a/Video Tuneup/SimpleEditor.h
+++ b/Video Tuneup/SimpleEditor.h
@@ -51,69 +51,48 @@
#import <AVFoundation/AVFoundation.h>
#import <CoreMedia/CMTime.h>
+@interface SimpleEditor : NSObject
+{
+ // Assets
-typedef enum {
- SimpleEditorTransitionTypeNone,
- SimpleEditorTransitionTypeCrossFade,
- SimpleEditorTransitionTypePush
-} SimpleEditorTransitionType;
+ AVURLAsset *_video;
+ CMTime _videoStartTime;
+
+ AVURLAsset *_song;
+ CMTime _songStartTime;
+ // Composition objects
-@interface SimpleEditor : NSObject
-{
- // Configuration
-
- NSArray *_clips; // array of AVURLAssets
- NSArray *_clipTimeRanges; // array of CMTimeRanges stored in NSValues.
-
- AVURLAsset *_commentary;
- CMTime _commentaryStartTime;
-
- SimpleEditorTransitionType _transitionType;
- CMTime _transitionDuration;
-
- NSString *_titleText;
-
-
- // Composition objects.
-
AVComposition *_composition;
- AVVideoComposition *_videoComposition;
AVAudioMix *_audioMix;
- AVPlayerItem *_playerItem;
- AVSynchronizedLayer *_synchronizedLayer;
+ AVPlayerItem *_playerItem; // Reference to player of work-in-progress
}
// Set these properties before building the composition objects.
-@property (nonatomic, retain) NSArray *clips;
-@property (nonatomic, retain) NSArray *clipTimeRanges;
-
-@property (nonatomic, retain) AVURLAsset *commentary;
-@property (nonatomic) CMTime commentaryStartTime;
-@property (nonatomic) SimpleEditorTransitionType transitionType;
-@property (nonatomic) CMTime transitionDuration;
+@property (nonatomic, retain) AVURLAsset *video;
+@property (nonatomic) CMTime videoStartTime;
-@property (nonatomic, retain) NSString *titleText;
+@property (nonatomic, retain) AVURLAsset *song;
+@property (nonatomic) CMTime songStartTime;
-
-// Build the composition, videoComposition, and audioMix.
-// If the composition is being built for playback then a synchronized layer and player item are also constructed.
+// Build the composition, videoComposition, and audioMix.
+// If the composition is being built for playback then a player item is also constructed.
// All of these objects can be retrieved all of these objects with the accessors below.
// Calling buildCompositionObjectsForPlayback: will get rid of any previously created composition objects.
- (void)buildCompositionObjectsForPlayback:(BOOL)forPlayback;
-@property (nonatomic, readonly, retain) AVComposition *composition;
-@property (nonatomic, readonly, retain) AVVideoComposition *videoComposition;
-@property (nonatomic, readonly, retain) AVAudioMix *audioMix;
+@property (nonatomic, retain) AVComposition *composition;
+//@property (nonatomic, readonly, retain) AVVideoComposition *videoComposition;
+@property (nonatomic, readwrite, retain) AVAudioMix *audioMix;
+@property (nonatomic, readwrite, retain) AVPlayerItem *playerItem;
+
- (void)getPlayerItem:(AVPlayerItem**)playerItemOut andSynchronizedLayer:(AVSynchronizedLayer**)synchronizedLayerOut;
// The synchronized layer contains a layer tree which is synchronized with the provided player item.
// Inside the layer tree there is a playerLayer along with other layers related to titling.
-- (AVAssetImageGenerator*)assetImageGenerator;
- (AVAssetExportSession*)assetExportSessionWithPreset:(NSString*)presetName;
-
@end
diff --git a/Video Tuneup/SimpleEditor.m b/Video Tuneup/SimpleEditor.m
index f666d6c..40e07f2 100644
--- a/Video Tuneup/SimpleEditor.m
+++ b/Video Tuneup/SimpleEditor.m
@@ -52,474 +52,109 @@
#import <CoreMedia/CoreMedia.h>
#import <AVFoundation/AVFoundation.h>
-@interface SimpleEditor ()
-@property (nonatomic, readwrite, retain) AVComposition *composition;
-@property (nonatomic, readwrite, retain) AVVideoComposition *videoComposition;
-@property (nonatomic, readwrite, retain) AVAudioMix *audioMix;
-@property (nonatomic, readwrite, retain) AVPlayerItem *playerItem;
-@property (nonatomic, readwrite, retain) AVSynchronizedLayer *synchronizedLayer;
-
-@end
-
-
@implementation SimpleEditor
- (id)init
{
if (self = [super init]) {
- _commentaryStartTime = CMTimeMake(2, 1); // Default start time for the commentary is two seconds.
-
- _transitionDuration = CMTimeMake(1, 1); // Default transition duration is one second.
-
- // just until we have the UI for this wired up
- NSMutableArray *clipTimeRanges = [[NSMutableArray alloc] initWithCapacity:3];
- CMTimeRange defaultTimeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMake(5, 1));
- NSValue *defaultTimeRangeValue = [NSValue valueWithCMTimeRange:defaultTimeRange];
- [clipTimeRanges addObject:defaultTimeRangeValue];
- [clipTimeRanges addObject:defaultTimeRangeValue];
- [clipTimeRanges addObject:defaultTimeRangeValue];
- _clipTimeRanges = clipTimeRanges;
+ _songStartTime = CMTimeMake(0, 1); // Default start time for the song is two seconds.
+ _videoStartTime = CMTimeMake(0, 1); // Default start time for the video is two seconds.
}
return self;
}
// Configuration
-@synthesize clips = _clips, clipTimeRanges = _clipTimeRanges;
-@synthesize commentary = _commentary, commentaryStartTime = _commentaryStartTime;
-@synthesize transitionType = _transitionType, transitionDuration = _transitionDuration;
-@synthesize titleText = _titleText;
+@synthesize video = _video, videoStartTime = _videoStartTime;
+@synthesize song = _song, songStartTime = _songStartTime;
// Composition objects.
@synthesize composition = _composition;
-@synthesize videoComposition =_videoComposition;
@synthesize audioMix = _audioMix;
@synthesize playerItem = _playerItem;
-@synthesize synchronizedLayer = _synchronizedLayer;
-
-static CGImageRef createStarImage(CGFloat radius)
-{
- int i, count = 5;
-#if TARGET_OS_IPHONE
- CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
-#else // not TARGET_OS_IPHONE
- CGColorSpaceRef colorspace = CGColorSpaceCreateWithName(kCGColorSpaceGenericRGB);
-#endif // not TARGET_OS_IPHONE
- CGImageRef image = NULL;
- size_t width = 2*radius;
- size_t height = 2*radius;
- size_t bytesperrow = width * 4;
- CGContextRef context = CGBitmapContextCreate((void *)NULL, width, height, 8, bytesperrow, colorspace, kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedFirst);
- CGContextClearRect(context, CGRectMake(0, 0, 2*radius, 2*radius));
- CGContextSetLineWidth(context, radius / 15.0);
-
- for( i = 0; i < 2 * count; i++ ) {
- CGFloat angle = i * M_PI / count;
- CGFloat pointradius = (i % 2) ? radius * 0.37 : radius * 0.95;
- CGFloat x = radius + pointradius * cos(angle);
- CGFloat y = radius + pointradius * sin(angle);
- if (i == 0)
- CGContextMoveToPoint(context, x, y);
- else
- CGContextAddLineToPoint(context, x, y);
- }
- CGContextClosePath(context);
-
- CGContextSetRGBFillColor(context, 1.0, 1.0, 1.0, 1.0);
- CGContextSetRGBStrokeColor(context, 0.0, 0.0, 0.0, 1.0);
- CGContextDrawPath(context, kCGPathFillStroke);
- CGColorSpaceRelease(colorspace);
- image = CGBitmapContextCreateImage(context);
- CGContextRelease(context);
- return image;
-}
-
-- (void)buildSequenceComposition:(AVMutableComposition *)composition
-{
- CMTime nextClipStartTime = kCMTimeZero;
- NSInteger i;
-
- // No transitions: place clips into one video track and one audio track in composition.
- NSLog(@"Building sequence composition. Count is %i", [_clips count]);
-
- AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
- AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
-
- for (i = 0; i < [_clips count]; i++ ) {
- AVURLAsset *asset = [_clips objectAtIndex:i];
- NSValue *clipTimeRange = [_clipTimeRanges objectAtIndex:i];
- CMTimeRange timeRangeInAsset;
- if (clipTimeRange)
- timeRangeInAsset = [clipTimeRange CMTimeRangeValue];
- else
- timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [asset duration]);
-
- AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
- [compositionVideoTrack insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:nil];
-
- NSLog(@"Composition audio?");
- if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] != 0) {
- AVAssetTrack *clipAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
- NSLog(@"Composition audio!");
- [compositionAudioTrack insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:nextClipStartTime error:nil];
- }
-
- // Note: This is largely equivalent:
- // [composition insertTimeRange:timeRangeInAsset ofAsset:asset atTime:nextClipStartTime error:NULL];
- // except that if the video tracks dimensions do not match, additional video tracks will be added to the composition.
-
- nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration);
- }
-}
-- (void)buildTransitionComposition:(AVMutableComposition *)composition andVideoComposition:(AVMutableVideoComposition *)videoComposition
+- (void)addSongTrackToComposition:(AVMutableComposition *)composition withAudioMix:(AVMutableAudioMix *)audioMix
{
- CMTime nextClipStartTime = kCMTimeZero;
NSInteger i;
-
- // Make transitionDuration no greater than half the shortest clip duration.
- CMTime transitionDuration = self.transitionDuration;
- for (i = 0; i < [_clips count]; i++ ) {
- NSValue *clipTimeRange = [_clipTimeRanges objectAtIndex:i];
- if (clipTimeRange) {
- CMTime halfClipDuration = [clipTimeRange CMTimeRangeValue].duration;
- halfClipDuration.timescale *= 2; // You can halve a rational by doubling its denominator.
- transitionDuration = CMTimeMinimum(transitionDuration, halfClipDuration);
- }
- }
-
- // Add two video tracks and two audio tracks.
- AVMutableCompositionTrack *compositionVideoTracks[2];
- AVMutableCompositionTrack *compositionAudioTracks[2];
- compositionVideoTracks[0] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
- compositionVideoTracks[1] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
- compositionAudioTracks[0] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
- compositionAudioTracks[1] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
+ NSArray *tracksToDuck = [composition tracksWithMediaType:AVMediaTypeAudio]; // before we add the song
- CMTimeRange *passThroughTimeRanges = alloca(sizeof(CMTimeRange) * [_clips count]);
- CMTimeRange *transitionTimeRanges = alloca(sizeof(CMTimeRange) * [_clips count]);
-
- // Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration.
- for (i = 0; i < [_clips count]; i++ ) {
- NSInteger alternatingIndex = i % 2; // alternating targets: 0, 1, 0, 1, ...
- AVURLAsset *asset = [_clips objectAtIndex:i];
- NSValue *clipTimeRange = [_clipTimeRanges objectAtIndex:i];
- CMTimeRange timeRangeInAsset;
- if (clipTimeRange)
- timeRangeInAsset = [clipTimeRange CMTimeRangeValue];
- else
- timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [asset duration]);
-
- AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
- [compositionVideoTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:nil];
-
- AVAssetTrack *clipAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
- [compositionAudioTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:nextClipStartTime error:nil];
-
- // Remember the time range in which this clip should pass through.
- // Every clip after the first begins with a transition.
- // Every clip before the last ends with a transition.
- // Exclude those transitions from the pass through time ranges.
- passThroughTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, timeRangeInAsset.duration);
- if (i > 0) {
- passThroughTimeRanges[i].start = CMTimeAdd(passThroughTimeRanges[i].start, transitionDuration);
- passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration);
- }
- if (i+1 < [_clips count]) {
- passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration);
- }
-
- // The end of this clip will overlap the start of the next by transitionDuration.
- // (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.)
- nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration);
- nextClipStartTime = CMTimeSubtract(nextClipStartTime, transitionDuration);
-
- // Remember the time range for the transition to the next item.
- transitionTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, transitionDuration);
- }
-
- // Set up the video composition if we are to perform crossfade or push transitions between clips.
- NSMutableArray *instructions = [NSMutableArray array];
-
- // Cycle between "pass through A", "transition from A to B", "pass through B", "transition from B to A".
- for (i = 0; i < [_clips count]; i++ ) {
- NSInteger alternatingIndex = i % 2; // alternating targets
-
- // Pass through clip i.
- AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
- passThroughInstruction.timeRange = passThroughTimeRanges[i];
- AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]];
-
- passThroughInstruction.layerInstructions = [NSArray arrayWithObject:passThroughLayer];
- [instructions addObject:passThroughInstruction];
-
- if (i+1 < [_clips count]) {
- // Add transition from clip i to clip i+1.
-
- AVMutableVideoCompositionInstruction *transitionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
- transitionInstruction.timeRange = transitionTimeRanges[i];
- AVMutableVideoCompositionLayerInstruction *fromLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]];
- AVMutableVideoCompositionLayerInstruction *toLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[1-alternatingIndex]];
-
- if (self.transitionType == SimpleEditorTransitionTypeCrossFade) {
- // Fade out the fromLayer by setting a ramp from 1.0 to 0.0.
- [fromLayer setOpacityRampFromStartOpacity:1.0 toEndOpacity:0.0 timeRange:transitionTimeRanges[i]];
- }
- else if (self.transitionType == SimpleEditorTransitionTypePush) {
- // Set a transform ramp on fromLayer from identity to all the way left of the screen.
- [fromLayer setTransformRampFromStartTransform:CGAffineTransformIdentity toEndTransform:CGAffineTransformMakeTranslation(-composition.naturalSize.width, 0.0) timeRange:transitionTimeRanges[i]];
- // Set a transform ramp on toLayer from all the way right of the screen to identity.
- [toLayer setTransformRampFromStartTransform:CGAffineTransformMakeTranslation(+composition.naturalSize.width, 0.0) toEndTransform:CGAffineTransformIdentity timeRange:transitionTimeRanges[i]];
- }
-
- transitionInstruction.layerInstructions = [NSArray arrayWithObjects:fromLayer, toLayer, nil];
- [instructions addObject:transitionInstruction];
- }
- }
-
- videoComposition.instructions = instructions;
-}
-
-- (void)addCommentaryTrackToComposition:(AVMutableComposition *)composition withAudioMix:(AVMutableAudioMix *)audioMix
-{
- NSInteger i;
- NSArray *tracksToDuck = [composition tracksWithMediaType:AVMediaTypeAudio]; // before we add the commentary
-
- // Clip commentary duration to composition duration.
- CMTimeRange commentaryTimeRange = CMTimeRangeMake(self.commentaryStartTime, self.commentary.duration);
- if (CMTIME_COMPARE_INLINE(CMTimeRangeGetEnd(commentaryTimeRange), >, [composition duration]))
- commentaryTimeRange.duration = CMTimeSubtract([composition duration], commentaryTimeRange.start);
-
- // Add the commentary track.
- AVMutableCompositionTrack *compositionCommentaryTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
- [compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, commentaryTimeRange.duration) ofTrack:[[self.commentary tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:commentaryTimeRange.start error:nil];
+ // Clip song duration to composition duration.
+ CMTimeRange songTimeRange = CMTimeRangeMake(self.songStartTime, self.song.duration);
+ if (CMTIME_COMPARE_INLINE(CMTimeRangeGetEnd(songTimeRange), >, [composition duration]))
+ songTimeRange.duration = CMTimeSubtract([composition duration], songTimeRange.start);
+ // Add the song track.
+ AVMutableCompositionTrack *compositionSongTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
+ [compositionSongTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, songTimeRange.duration) ofTrack:[[self.song tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:songTimeRange.start error:nil];
+ // Ramp tracks down and up at beginning and end.
NSMutableArray *trackMixArray = [NSMutableArray array];
CMTime rampDuration = CMTimeMake(1, 2); // half-second ramps
for (i = 0; i < [tracksToDuck count]; i++) {
AVMutableAudioMixInputParameters *trackMix = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:[tracksToDuck objectAtIndex:i]];
- [trackMix setVolumeRampFromStartVolume:1.0 toEndVolume:0.2 timeRange:CMTimeRangeMake(CMTimeSubtract(commentaryTimeRange.start, rampDuration), rampDuration)];
- [trackMix setVolumeRampFromStartVolume:0.2 toEndVolume:1.0 timeRange:CMTimeRangeMake(CMTimeRangeGetEnd(commentaryTimeRange), rampDuration)];
+ [trackMix setVolumeRampFromStartVolume:1.0 toEndVolume:0.2 timeRange:CMTimeRangeMake(CMTimeSubtract(songTimeRange.start, rampDuration), rampDuration)];
+ [trackMix setVolumeRampFromStartVolume:0.2 toEndVolume:1.0 timeRange:CMTimeRangeMake(CMTimeRangeGetEnd(songTimeRange), rampDuration)];
[trackMixArray addObject:trackMix];
}
audioMix.inputParameters = trackMixArray;
}
-- (void)buildPassThroughVideoComposition:(AVMutableVideoComposition *)videoComposition forComposition:(AVMutableComposition *)composition
+- (void)addVideoTrackToComposition:(AVMutableComposition *)composition
{
- // Make a "pass through video track" video composition.
- AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
- passThroughInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, [composition duration]);
-
- AVAssetTrack *videoTrack = [[composition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
- AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
-
- passThroughInstruction.layerInstructions = [NSArray arrayWithObject:passThroughLayer];
- videoComposition.instructions = [NSArray arrayWithObject:passThroughInstruction];
-}
+ AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
+ CMTimeRange videoTimeRange = CMTimeRangeMake(self.videoStartTime, self.video.duration);
-- (CALayer *)buildAnimatedTitleLayerForSize:(CGSize)videoSize
-{
- // Create a layer for the overall title animation.
- CALayer *animatedTitleLayer = [CALayer layer];
-
- // Create a layer for the text of the title.
- CATextLayer *titleLayer = [CATextLayer layer];
- titleLayer.string = self.titleText;
- titleLayer.font = @"Helvetica";
- titleLayer.fontSize = videoSize.height / 6;
- //?? titleLayer.shadowOpacity = 0.5;
- titleLayer.alignmentMode = kCAAlignmentCenter;
- titleLayer.bounds = CGRectMake(0, 0, videoSize.width, videoSize.height / 6);
-
- // Add it to the overall layer.
- [animatedTitleLayer addSublayer:titleLayer];
-
- // Create a layer that contains a ring of stars.
- CALayer *ringOfStarsLayer = [CALayer layer];
-
- NSInteger starCount = 9, s;
- CGFloat starRadius = videoSize.height / 10;
- CGFloat ringRadius = videoSize.height * 0.8 / 2;
- CGImageRef starImage = createStarImage(starRadius);
- for (s = 0; s < starCount; s++) {
- CALayer *starLayer = [CALayer layer];
- CGFloat angle = s * 2 * M_PI / starCount;
- starLayer.bounds = CGRectMake(0, 0, 2 * starRadius, 2 * starRadius);
- starLayer.position = CGPointMake(ringRadius * cos(angle), ringRadius * sin(angle));
-// starLayer.contents = (id)starImage;
- starLayer.contents = (__bridge id)starImage;
- [ringOfStarsLayer addSublayer:starLayer];
- }
- CGImageRelease(starImage);
-
- // Rotate the ring of stars.
- CABasicAnimation *rotationAnimation = [CABasicAnimation animationWithKeyPath:@"transform.rotation"];
- rotationAnimation.repeatCount = 1e100; // forever
- rotationAnimation.fromValue = [NSNumber numberWithFloat:0.0];
- rotationAnimation.toValue = [NSNumber numberWithFloat:2 * M_PI];
- rotationAnimation.duration = 10.0; // repeat every 10 seconds
- rotationAnimation.additive = YES;
- rotationAnimation.removedOnCompletion = NO;
- rotationAnimation.beginTime = 1e-100; // CoreAnimation automatically replaces zero beginTime with CACurrentMediaTime(). The constant AVCoreAnimationBeginTimeAtZero is also available.
- [ringOfStarsLayer addAnimation:rotationAnimation forKey:nil];
-
- // Add the ring of stars to the overall layer.
- animatedTitleLayer.position = CGPointMake(videoSize.width / 2.0, videoSize.height / 2.0);
- [animatedTitleLayer addSublayer:ringOfStarsLayer];
-
- // Animate the opacity of the overall layer so that it fades out from 3 sec to 4 sec.
- CABasicAnimation *fadeAnimation = [CABasicAnimation animationWithKeyPath:@"opacity"];
- fadeAnimation.fromValue = [NSNumber numberWithFloat:1.0];
- fadeAnimation.toValue = [NSNumber numberWithFloat:0.0];
- fadeAnimation.additive = NO;
- fadeAnimation.removedOnCompletion = NO;
- fadeAnimation.beginTime = 10.0;
- fadeAnimation.duration = 2.0;
- fadeAnimation.fillMode = kCAFillModeBoth;
- [animatedTitleLayer addAnimation:fadeAnimation forKey:nil];
-
- return animatedTitleLayer;
+// [compositionVideoTrack insertTimeRange:videoTimeRange ofTrack: self.video atTime:videoTimeRange.start error:nil];
}
- (void)buildCompositionObjectsForPlayback:(BOOL)forPlayback
-{
- NSLog(@"Building. Count is %i", [_clips count]);
- CGSize videoSize = [[_clips objectAtIndex:0] naturalSize];
+{
AVMutableComposition *composition = [AVMutableComposition composition];
- AVMutableVideoComposition *videoComposition = nil;
AVMutableAudioMix *audioMix = nil;
- CALayer *animatedTitleLayer = nil;
-
+
+ CGSize videoSize = [self.video naturalSize];
composition.naturalSize = videoSize;
-
- if (self.transitionType == SimpleEditorTransitionTypeNone) {
- // No transitions: place clips into one video track and one audio track in composition.
-
- [self buildSequenceComposition:composition];
- }
- else {
- // With transitions:
- // Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration.
- // Set up the video composition to cycle between "pass through A", "transition from A to B",
- // "pass through B", "transition from B to A".
-
- videoComposition = [AVMutableVideoComposition videoComposition];
- [self buildTransitionComposition:composition andVideoComposition:videoComposition];
- }
-
- // If one is provided, add a commentary track and duck all other audio during it.
- if (self.commentary) {
- // Add the commentary track and duck all other audio during it.
-
+
+ if (self.video) {
+ [self addVideoTrackToComposition:composition];
+ }
+
+ if (self.song) {
+ // Add the song track and duck all other audio during it.
audioMix = [AVMutableAudioMix audioMix];
- [self addCommentaryTrackToComposition:composition withAudioMix:audioMix];
- }
-
- // Set up Core Animation layers to contribute a title animation overlay if we have a title set.
-// if (self.titleText) {
-// animatedTitleLayer = [self buildAnimatedTitleLayerForSize:videoSize];
-//
-// if (! forPlayback) {
-// // For export: build a Core Animation tree that contains both the animated title and the video.
-// CALayer *parentLayer = [CALayer layer];
-// CALayer *videoLayer = [CALayer layer];
-// parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
-// videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height);
-// [parentLayer addSublayer:videoLayer];
-// [parentLayer addSublayer:animatedTitleLayer];
-//
-// if (! videoComposition) {
-// // No transition set -- make a "pass through video track" video composition so we can include the Core Animation tree as a post-processing stage.
-// videoComposition = [AVMutableVideoComposition videoComposition];
-//
-// [self buildPassThroughVideoComposition:videoComposition forComposition:composition];
-// }
-//
-// videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
-// }
-// }
-
- if (videoComposition) {
- // Every videoComposition needs these properties to be set:
- videoComposition.frameDuration = CMTimeMake(1, 30); // 30 fps
- videoComposition.renderSize = videoSize;
+ [self addSongTrackToComposition:composition withAudioMix:audioMix];
}
-
+
self.composition = composition;
- self.videoComposition = videoComposition;
self.audioMix = audioMix;
- self.synchronizedLayer = nil;
-
- NSLog(@"Almost done building");
-
if (forPlayback) {
#if TARGET_OS_EMBEDDED
// Render high-def movies at half scale for real-time playback (device-only).
if (videoSize.width > 640)
- videoComposition.renderScale = 0.5;
+ composition.renderScale = 0.5;
#endif // TARGET_OS_EMBEDDED
- AVPlayerItem *playerItem = [AVPlayerItem playerItemWithAsset:composition];
- playerItem.videoComposition = videoComposition;
+ AVPlayerItem *playerItem = [AVPlayerItem playerItemWithAsset:self.composition];
playerItem.audioMix = audioMix;
self.playerItem = playerItem;
-// if (animatedTitleLayer) {
-// // Build an AVSynchronizedLayer that contains the animated title.
-// self.synchronizedLayer = [AVSynchronizedLayer synchronizedLayerWithPlayerItem:self.playerItem];
-// self.synchronizedLayer.bounds = CGRectMake(0, 0, videoSize.width, videoSize.height);
-// [self.synchronizedLayer addSublayer:animatedTitleLayer];
-// }
- }
-}
-
-- (void)getPlayerItem:(AVPlayerItem**)playerItemOut andSynchronizedLayer:(AVSynchronizedLayer**)synchronizedLayerOut
-{
- if (playerItemOut) {
- *playerItemOut = _playerItem;
- }
- if (synchronizedLayerOut) {
- *synchronizedLayerOut = _synchronizedLayer;
}
}
- (AVAssetImageGenerator*)assetImageGenerator
{
AVAssetImageGenerator *generator = [AVAssetImageGenerator assetImageGeneratorWithAsset:self.composition];
- generator.videoComposition = self.videoComposition;
return generator;
}
- (AVAssetExportSession*)assetExportSessionWithPreset:(NSString*)presetName
{
- AVAssetExportSession *session = [[AVAssetExportSession alloc] initWithAsset:self.composition presetName:presetName];
- session.videoComposition = self.videoComposition;
+ AVAssetExportSession *session = [[AVAssetExportSession alloc] initWithAsset:self.video presetName:presetName];
+// session.videoComposition = self.composition;
session.audioMix = self.audioMix;
-// return [session autorelease];
return session;
}
-//- (void)dealloc
-//{
-// [_clips release];
-// [_clipTimeRanges release];
-//
-// [_commentary release];
-// [_titleText release];
-//
-//
-// [_composition release];
-// [_videoComposition release];
-// [_audioMix release];
-//
-// [_playerItem release];
-// [_synchronizedLayer release];
-//
-// [super dealloc];
-//}
-
@end
diff --git a/Video Tuneup/ViewController.m b/Video Tuneup/ViewController.m
index fe509c2..4e01e93 100644
--- a/Video Tuneup/ViewController.m
+++ b/Video Tuneup/ViewController.m
@@ -119,23 +119,11 @@ static const NSString *ItemStatusContext;
// Initialize video editor
self.editor = [[SimpleEditor alloc] init];
-
- NSMutableArray *clips = [NSMutableArray arrayWithCapacity:3];
-
- if(asset) {
- [clips addObject:asset];
- [clips addObject:asset];
- [clips addObject:asset];
- } else {NSLog(@"Error! No Asset!"); return;}
-
- // Copy clips into editor
-// self.editor.clips = [clips copy];
- self.editor.clips = clips;
+ self.editor.video = asset;
- NSLog(@"Put clips in. Count is %i", [clips count]);
-
// Begin export
[self.editor buildCompositionObjectsForPlayback:NO];
+
NSLog(@"Put clips in. Build.");
AVAssetExportSession *session = [self.editor assetExportSessionWithPreset:AVAssetExportPresetHighestQuality];
NSLog(@"Session");
@@ -157,8 +145,6 @@ static const NSString *ItemStatusContext;
session.outputURL = [NSURL fileURLWithPath:filePath];
session.outputFileType = AVFileTypeQuickTimeMovie;
- NSLog(@"Exporting asynchronously %i.", [clips count]);
-
[session exportAsynchronouslyWithCompletionHandler:^
{
dispatch_async(dispatch_get_main_queue(), ^{