Skip to content

Commit

Permalink
improved performance and fixed a bug with hap movie export and playba…
Browse files Browse the repository at this point in the history
…ck, corresponds to build 1.2 of the batch exporter
  • Loading branch information
mrRay committed Sep 29, 2015
1 parent b136e36 commit 5a70876
Show file tree
Hide file tree
Showing 12 changed files with 163 additions and 71 deletions.
2 changes: 2 additions & 0 deletions AVF Batch Converter/DestinationController.m
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ - (void) awakeFromNib {

// check the user defaults, populate the fields
numPtr = [def objectForKey:@"sameAsOriginalVal"];
if (numPtr==nil)
numPtr = [NSNumber numberWithInteger:NSOnState];
if (numPtr != nil)
[sameAsOriginalToggle setIntValue:[numPtr intValue]];
stringPtr = [def objectForKey:@"destPath"];
Expand Down
6 changes: 3 additions & 3 deletions AVF Batch Converter/Info.plist
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.1</string>
<string>1.2</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>1.1</string>
<string>1.2</string>
<key>LSMinimumSystemVersion</key>
<string>$(MACOSX_DEPLOYMENT_TARGET)</string>
<key>NSHumanReadableCopyright</key>
Expand All @@ -31,6 +31,6 @@
<key>NSPrincipalClass</key>
<string>NSApplication</string>
<key>CFBundleGetInfoString</key>
<string>1.1</string>
<string>1.2</string>
</dict>
</plist>
24 changes: 12 additions & 12 deletions AVF Batch Converter/VVAVFExportBasicSettingsCtrlr.m
Original file line number Diff line number Diff line change
Expand Up @@ -444,7 +444,7 @@ - (void) populateUIWithVideoSettingsDict:(NSDictionary *)n {
[h264KeyframesMatrix selectCellAtRow:0 column:0];
else {
[h264KeyframesMatrix selectCellAtRow:1 column:0];
[h264KeyframesField setStringValue:[NSString stringWithFormat:@"%ld",[tmpNum integerValue]]];
[h264KeyframesField setStringValue:[NSString stringWithFormat:@"%ld",(long)[tmpNum integerValue]]];
}
[self h264KeyframesMatrixUsed:h264KeyframesMatrix];

Expand Down Expand Up @@ -511,14 +511,14 @@ - (void) populateUIWithVideoSettingsDict:(NSDictionary *)n {
[self noResizeVideoClicked:nil];
else {
[self resizeVideoTextFieldUsed:nil];
[vidWidthField setStringValue:[NSString stringWithFormat:@"%ld",[tmpNum integerValue]]];
[vidWidthField setStringValue:[NSString stringWithFormat:@"%ld",(long)[tmpNum integerValue]]];
}
tmpNum = [n objectForKey:AVVideoHeightKey];
if (tmpNum==nil)
[self noResizeVideoClicked:nil];
else {
[self resizeVideoTextFieldUsed:nil];
[vidHeightField setStringValue:[NSString stringWithFormat:@"%ld",[tmpNum integerValue]]];
[vidHeightField setStringValue:[NSString stringWithFormat:@"%ld",(long)[tmpNum integerValue]]];
}
}
- (void) populateUIWithAudioSettingsDict:(NSDictionary *)n {
Expand Down Expand Up @@ -577,7 +577,7 @@ - (void) populateUIWithAudioSettingsDict:(NSDictionary *)n {
[self noResampleAudioClicked:nil];
else {
[self resampleAudioClicked:nil];
[audioResampleField setStringValue:[NSString stringWithFormat:@"%ld",[tmpNum integerValue]]];
[audioResampleField setStringValue:[NSString stringWithFormat:@"%ld",(long)[tmpNum integerValue]]];
}
}
}
Expand Down Expand Up @@ -654,7 +654,7 @@ - (NSString *) lengthyVideoDescription {
break;
// resizing
case 1:
returnMe = [NSString stringWithFormat:@"%@ Sized to %ld x %ld.",returnMe,[vidWidthField integerValue],[vidHeightField integerValue]];
returnMe = [NSString stringWithFormat:@"%@ Sized to %ld x %ld.",returnMe,(long)[vidWidthField integerValue],(long)[vidHeightField integerValue]];
break;
}
}
Expand Down Expand Up @@ -789,11 +789,11 @@ - (IBAction) resizeVideoTextFieldUsed:(id)sender {
tmpString = [vidWidthField stringValue];
tmpVal = (tmpString==nil) ? -1 : [tmpString integerValue];
if (tmpVal<=0)
[vidWidthField setStringValue:[NSString stringWithFormat:@"%ld",(NSUInteger)displayVideoDims.width]];
[vidWidthField setStringValue:[NSString stringWithFormat:@"%ld",(unsigned long)displayVideoDims.width]];
tmpString = [vidHeightField stringValue];
tmpVal = (tmpString==nil) ? -1 : [tmpString integerValue];
if (tmpVal<=0)
[vidHeightField setStringValue:[NSString stringWithFormat:@"%ld",(NSUInteger)displayVideoDims.height]];
[vidHeightField setStringValue:[NSString stringWithFormat:@"%ld",(unsigned long)displayVideoDims.height]];

}

Expand Down Expand Up @@ -870,7 +870,7 @@ - (IBAction) audioResamplePUBUsed:(id)sender {
NSNumber *tmpNum = [[audioResamplePUB selectedItem] representedObject];
NSUInteger newSampleRate = (tmpNum==nil) ? 0.0 : [tmpNum unsignedLongValue];
if (newSampleRate!=0)
[audioResampleField setStringValue:[NSString stringWithFormat:@"%ld",newSampleRate]];
[audioResampleField setStringValue:[NSString stringWithFormat:@"%ld",(unsigned long)newSampleRate]];
}
- (IBAction) resampleAudioTextFieldUsed:(id)sender {
//NSLog(@"%s",__func__);
Expand All @@ -897,7 +897,7 @@ - (IBAction) resampleAudioTextFieldUsed:(id)sender {
intVal = 8000;
else if (intVal>192000)
intVal = 192000;
[audioResampleField setStringValue:[NSString stringWithFormat:@"%ld",intVal]];
[audioResampleField setStringValue:[NSString stringWithFormat:@"%ld",(unsigned long)intVal]];
//if (intVal<8 || intVal>192)
// [self audioResamplePUBUsed:audioResamplePUB];
}
Expand All @@ -913,15 +913,15 @@ - (void) populateMenu:(NSMenu *)popMenu withItemsForAudioProperty:(uint32_t)popQ

OSStatus osErr = noErr;
uint32_t replySize;
osErr = AudioFormatGetPropertyInfo(popQueryProperty, sizeof(popAudioFormat), &popAudioFormat, &replySize);
osErr = AudioFormatGetPropertyInfo(popQueryProperty, sizeof(popAudioFormat), &popAudioFormat, (UInt32 *)&replySize);
if (osErr!=noErr) {
NSLog(@"\t\terr %d at AudioFormatGetProperty() in %s",(int)osErr,__func__);
NSLog(@"\t\tproperty is %c%c%c%c", (int)((popQueryProperty>>24)&0xFF), (int)((popQueryProperty>>16)&0xFF), (int)((popQueryProperty>>8)&0xFF), (int)((popQueryProperty>>0)&0xFF));
NSLog(@"\t\tformat is %c%c%c%c", (int)((popAudioFormat>>24)&0xFF), (int)((popAudioFormat>>16)&0xFF), (int)((popAudioFormat>>8)&0xFF), (int)((popAudioFormat>>0)&0xFF));
}
else {
void *replyData = malloc(replySize);
osErr = AudioFormatGetProperty(popQueryProperty, sizeof(popAudioFormat), &popAudioFormat, &replySize, replyData);
osErr = AudioFormatGetProperty(popQueryProperty, sizeof(popAudioFormat), &popAudioFormat, (UInt32 *)&replySize, replyData);
if (osErr!=noErr) {
NSLog(@"\t\terr %d at AudioFormatGetProperty() in %s",(int)osErr,__func__);
NSLog(@"\t\tproperty is %c%c%c%c", (int)((popQueryProperty>>24)&0xFF), (int)((popQueryProperty>>16)&0xFF), (int)((popQueryProperty>>8)&0xFF), (int)((popQueryProperty>>0)&0xFF));
Expand All @@ -935,7 +935,7 @@ - (void) populateMenu:(NSMenu *)popMenu withItemsForAudioProperty:(uint32_t)popQ
for (int i=0; i<rangeCount; ++i) {
//NSLog(@"\t\trange %d is %f / %f",i,rangePtr->mMinimum,rangePtr->mMaximum);
NSUInteger tmpInt = rangePtr->mMaximum;
NSMenuItem *tmpItem = [[[NSMenuItem alloc] initWithTitle:[NSString stringWithFormat:@"%ld",tmpInt] action:nil keyEquivalent:@""] autorelease];
NSMenuItem *tmpItem = [[[NSMenuItem alloc] initWithTitle:[NSString stringWithFormat:@"%ld",(unsigned long)tmpInt] action:nil keyEquivalent:@""] autorelease];
[tmpItem setRepresentedObject:[NSNumber numberWithInteger:tmpInt]];
[popMenu addItem:tmpItem];
++rangePtr;
Expand Down
13 changes: 10 additions & 3 deletions HapInAVFoundation.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@
1AC0CA1A1A437C2A00A9BC2C /* HapEncoderFrame.m in Sources */ = {isa = PBXBuildFile; fileRef = 1AC0CA181A437C2A00A9BC2C /* HapEncoderFrame.m */; };
1ACF5DEC1A3E4127005DDF19 /* PixelFormats.c in Sources */ = {isa = PBXBuildFile; fileRef = 1ACF5DEB1A3E4127005DDF19 /* PixelFormats.c */; };
1ACF5DED1A3E41FF005DDF19 /* CoreVideo.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1A0518671A3E1DAB00FC80D2 /* CoreVideo.framework */; };
1AE092DC1AFE96F4004E866B /* VideoToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1AE092DB1AFE96F4004E866B /* VideoToolbox.framework */; };
/* End PBXBuildFile section */

/* Begin PBXContainerItemProxy section */
Expand Down Expand Up @@ -273,6 +274,7 @@
1AC0CA171A437C2A00A9BC2C /* HapEncoderFrame.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = HapEncoderFrame.h; sourceTree = "<group>"; };
1AC0CA181A437C2A00A9BC2C /* HapEncoderFrame.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = HapEncoderFrame.m; sourceTree = "<group>"; };
1ACF5DEB1A3E4127005DDF19 /* PixelFormats.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = PixelFormats.c; path = source/PixelFormats.c; sourceTree = SOURCE_ROOT; };
1AE092DB1AFE96F4004E866B /* VideoToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = VideoToolbox.framework; path = ../../../../System/Library/Frameworks/VideoToolbox.framework; sourceTree = "<group>"; };
/* End PBXFileReference section */

/* Begin PBXFrameworksBuildPhase section */
Expand All @@ -289,6 +291,7 @@
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
1AE092DC1AFE96F4004E866B /* VideoToolbox.framework in Frameworks */,
1A642D1C1A652CE900E2152D /* HapInAVFoundation.framework in Frameworks */,
1A642D181A65245900E2152D /* AVFoundation.framework in Frameworks */,
1A642D171A65245600E2152D /* CoreMedia.framework in Frameworks */,
Expand Down Expand Up @@ -441,6 +444,7 @@
1A0518671A3E1DAB00FC80D2 /* CoreVideo.framework */,
1A680E291A39F67E007678E5 /* CoreMedia.framework */,
1A680E231A39CFFF007678E5 /* AVFoundation.framework */,
1AE092DB1AFE96F4004E866B /* VideoToolbox.framework */,
1A6060741A38D929008E693F /* Carbon.framework */,
1A60606F1A38D8DE008E693F /* Accelerate.framework */,
1A60606D1A38D8D9008E693F /* OpenGL.framework */,
Expand Down Expand Up @@ -981,7 +985,7 @@
1A97F5BF1A38ABBF00E0DC74 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ARCHS = "$(ARCHS_STANDARD_64_BIT)";
ARCHS = "$(ARCHS_STANDARD_32_64_BIT)";
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES;
GCC_ENABLE_CPP_EXCEPTIONS = NO;
Expand All @@ -1002,15 +1006,15 @@
GCC_WARN_UNUSED_VARIABLE = YES;
MACOSX_DEPLOYMENT_TARGET = 10.10;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
ONLY_ACTIVE_ARCH = NO;
SDKROOT = macosx;
};
name = Debug;
};
1A97F5C01A38ABBF00E0DC74 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ARCHS = "$(ARCHS_STANDARD_64_BIT)";
ARCHS = "$(ARCHS_STANDARD_32_64_BIT)";
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES;
GCC_ENABLE_CPP_EXCEPTIONS = NO;
Expand All @@ -1025,6 +1029,7 @@
GCC_WARN_UNUSED_VARIABLE = YES;
MACOSX_DEPLOYMENT_TARGET = 10.10;
MTL_ENABLE_DEBUG_INFO = NO;
ONLY_ACTIVE_ARCH = NO;
SDKROOT = macosx;
};
name = Release;
Expand All @@ -1047,6 +1052,7 @@
INFOPLIST_FILE = HapInAVFoundation/Info.plist;
INSTALL_PATH = "@rpath";
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks @loader_path/../Frameworks @loader_path/Frameworks";
ONLY_ACTIVE_ARCH = NO;
PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES;
};
Expand All @@ -1070,6 +1076,7 @@
INFOPLIST_FILE = HapInAVFoundation/Info.plist;
INSTALL_PATH = "@rpath";
LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks @loader_path/../Frameworks @loader_path/Frameworks";
ONLY_ACTIVE_ARCH = NO;
PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES;
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
buildConfiguration = "Release"
buildConfiguration = "Debug"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
allowLocationSimulation = "YES">
Expand Down
3 changes: 2 additions & 1 deletion HapInAVFoundation/AVAssetWriterHapInput.h
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ This class is the main interface for using AVFoundation to encode and output vid
void *dxtEncoder; // actually a 'HapCodecDXTEncoderRef'

OSSpinLock encoderProgressLock; // locks 'encoderProgressFrames' and 'encoderWaitingToRunOut'
NSMutableArray *encoderProgressFrames; // array of HapEncoderFrame instances. the frames are made when you append a pixel buffer, and are flagged as encoded and appended (as an encoded sample buffer) in the GCD-driven block that did the encoding
__block NSMutableArray *encoderProgressFrames; // array of HapEncoderFrame instances. the frames are made when you append a pixel buffer, and are flagged as encoded and appended (as an encoded sample buffer) in the GCD-driven block that did the encoding
BOOL encoderWaitingToRunOut; // set to YES when the user marks this input as finished (the frames that are "in flight" via GCD need to finish up)

CMTime lastEncodedDuration;
Expand Down Expand Up @@ -71,6 +71,7 @@ Begins encoding the passed pixel buffer and appends the encoded frame to this in
It's not necessary to check this- but for best results, you should mark the AVAssetWriterHapInput as finished, wait until "finishedEncoding" returns a YES, and then tell your AVAssetWriter to finish writing. If you don't wait for this method to return YES, the last X pixel buffers may get dropped (depends how long it takes to wrap up, could be no dropped frames, could be a couple).
*/
- (BOOL) finishedEncoding;
- (void) finishEncoding;


@end
19 changes: 17 additions & 2 deletions HapInAVFoundation/AVAssetWriterHapInput.m
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ - (id) initWithOutputSettings:(NSDictionary *)n {
encoderInputPxlFmtBytesPerRow = roundUpToMultipleOf16(((uint32_t)exportImgSize.width * 4));
formatConvertPoolLength = encoderInputPxlFmtBytesPerRow*(NSUInteger)(exportImgSize.height);

//encodeQueue = dispatch_queue_create("HapEncode", dispatch_queue_attr_make_with_qos_class(DISPATCH_QUEUE_CONCURRENT, DISPATCH_QUEUE_PRIORITY_HIGH, -1));
//encodeQueue = dispatch_queue_create("HapEncode", dispatch_queue_attr_make_with_qos_class(DISPATCH_QUEUE_CONCURRENT, QOS_CLASS_USER_INITIATED, -1));
encodeQueue = dispatch_queue_create("HapEncode", DISPATCH_QUEUE_CONCURRENT);
}
return self;
Expand Down Expand Up @@ -493,6 +493,16 @@ - (BOOL) finishedEncoding {
OSSpinLockUnlock(&encoderProgressLock);
return returnMe;
}
- (void) finishEncoding {
OSSpinLockLock(&encoderProgressLock);
BOOL needsToEncodeMore = NO;
if (encoderWaitingToRunOut || [encoderProgressFrames count]>0)
needsToEncodeMore = YES;
OSSpinLockUnlock(&encoderProgressLock);

if (needsToEncodeMore)
[self appendEncodedFrames];
}
- (void)markAsFinished {
//NSLog(@"%s",__func__);
OSSpinLockLock(&encoderProgressLock);
Expand Down Expand Up @@ -554,8 +564,13 @@ - (void) appendEncodedFrames {


OSSpinLockLock(&encoderProgressLock);
if (![super isReadyForMoreMediaData]) {
NSLog(@"\t\terr: not ready for more media data, %s",__func__);
[encoderProgressFrames removeAllObjects];
[super markAsFinished];
}
// first of all, if there's only one sample and i'm waiting to finish- append the last sample and then i'm done (yay!)
if (encoderWaitingToRunOut && [encoderProgressFrames count]<=1) {
else if (encoderWaitingToRunOut && [encoderProgressFrames count]<=1) {
HapEncoderFrame *lastFrame = ([encoderProgressFrames count]<1) ? nil : [encoderProgressFrames objectAtIndex:0];
if (lastFrame!=nil && [lastFrame encoded]) {
//NSLog(@"\t\tone frame left and it's encoded, making a sample buffer and then appending it");
Expand Down
Loading

0 comments on commit 5a70876

Please sign in to comment.