diff --git a/AVF Batch Converter/DestinationController.m b/AVF Batch Converter/DestinationController.m index b5731ee..aa96431 100644 --- a/AVF Batch Converter/DestinationController.m +++ b/AVF Batch Converter/DestinationController.m @@ -20,6 +20,8 @@ - (void) awakeFromNib { // check the user defaults, populate the fields numPtr = [def objectForKey:@"sameAsOriginalVal"]; + if (numPtr==nil) + numPtr = [NSNumber numberWithInteger:NSOnState]; if (numPtr != nil) [sameAsOriginalToggle setIntValue:[numPtr intValue]]; stringPtr = [def objectForKey:@"destPath"]; diff --git a/AVF Batch Converter/Info.plist b/AVF Batch Converter/Info.plist index 414f9c9..61e0379 100644 --- a/AVF Batch Converter/Info.plist +++ b/AVF Batch Converter/Info.plist @@ -17,11 +17,11 @@ CFBundlePackageType APPL CFBundleShortVersionString - 1.1 + 1.2 CFBundleSignature ???? CFBundleVersion - 1.1 + 1.2 LSMinimumSystemVersion $(MACOSX_DEPLOYMENT_TARGET) NSHumanReadableCopyright @@ -31,6 +31,6 @@ NSPrincipalClass NSApplication CFBundleGetInfoString - 1.1 + 1.2 diff --git a/AVF Batch Converter/VVAVFExportBasicSettingsCtrlr.m b/AVF Batch Converter/VVAVFExportBasicSettingsCtrlr.m index e507694..d29ee15 100644 --- a/AVF Batch Converter/VVAVFExportBasicSettingsCtrlr.m +++ b/AVF Batch Converter/VVAVFExportBasicSettingsCtrlr.m @@ -444,7 +444,7 @@ - (void) populateUIWithVideoSettingsDict:(NSDictionary *)n { [h264KeyframesMatrix selectCellAtRow:0 column:0]; else { [h264KeyframesMatrix selectCellAtRow:1 column:0]; - [h264KeyframesField setStringValue:[NSString stringWithFormat:@"%ld",[tmpNum integerValue]]]; + [h264KeyframesField setStringValue:[NSString stringWithFormat:@"%ld",(long)[tmpNum integerValue]]]; } [self h264KeyframesMatrixUsed:h264KeyframesMatrix]; @@ -511,14 +511,14 @@ - (void) populateUIWithVideoSettingsDict:(NSDictionary *)n { [self noResizeVideoClicked:nil]; else { [self resizeVideoTextFieldUsed:nil]; - [vidWidthField setStringValue:[NSString stringWithFormat:@"%ld",[tmpNum integerValue]]]; + [vidWidthField setStringValue:[NSString stringWithFormat:@"%ld",(long)[tmpNum integerValue]]]; } tmpNum = [n objectForKey:AVVideoHeightKey]; if (tmpNum==nil) [self noResizeVideoClicked:nil]; else { [self resizeVideoTextFieldUsed:nil]; - [vidHeightField setStringValue:[NSString stringWithFormat:@"%ld",[tmpNum integerValue]]]; + [vidHeightField setStringValue:[NSString stringWithFormat:@"%ld",(long)[tmpNum integerValue]]]; } } - (void) populateUIWithAudioSettingsDict:(NSDictionary *)n { @@ -577,7 +577,7 @@ - (void) populateUIWithAudioSettingsDict:(NSDictionary *)n { [self noResampleAudioClicked:nil]; else { [self resampleAudioClicked:nil]; - [audioResampleField setStringValue:[NSString stringWithFormat:@"%ld",[tmpNum integerValue]]]; + [audioResampleField setStringValue:[NSString stringWithFormat:@"%ld",(long)[tmpNum integerValue]]]; } } } @@ -654,7 +654,7 @@ - (NSString *) lengthyVideoDescription { break; // resizing case 1: - returnMe = [NSString stringWithFormat:@"%@ Sized to %ld x %ld.",returnMe,[vidWidthField integerValue],[vidHeightField integerValue]]; + returnMe = [NSString stringWithFormat:@"%@ Sized to %ld x %ld.",returnMe,(long)[vidWidthField integerValue],(long)[vidHeightField integerValue]]; break; } } @@ -789,11 +789,11 @@ - (IBAction) resizeVideoTextFieldUsed:(id)sender { tmpString = [vidWidthField stringValue]; tmpVal = (tmpString==nil) ? -1 : [tmpString integerValue]; if (tmpVal<=0) - [vidWidthField setStringValue:[NSString stringWithFormat:@"%ld",(NSUInteger)displayVideoDims.width]]; + [vidWidthField setStringValue:[NSString stringWithFormat:@"%ld",(unsigned long)displayVideoDims.width]]; tmpString = [vidHeightField stringValue]; tmpVal = (tmpString==nil) ? -1 : [tmpString integerValue]; if (tmpVal<=0) - [vidHeightField setStringValue:[NSString stringWithFormat:@"%ld",(NSUInteger)displayVideoDims.height]]; + [vidHeightField setStringValue:[NSString stringWithFormat:@"%ld",(unsigned long)displayVideoDims.height]]; } @@ -870,7 +870,7 @@ - (IBAction) audioResamplePUBUsed:(id)sender { NSNumber *tmpNum = [[audioResamplePUB selectedItem] representedObject]; NSUInteger newSampleRate = (tmpNum==nil) ? 0.0 : [tmpNum unsignedLongValue]; if (newSampleRate!=0) - [audioResampleField setStringValue:[NSString stringWithFormat:@"%ld",newSampleRate]]; + [audioResampleField setStringValue:[NSString stringWithFormat:@"%ld",(unsigned long)newSampleRate]]; } - (IBAction) resampleAudioTextFieldUsed:(id)sender { //NSLog(@"%s",__func__); @@ -897,7 +897,7 @@ - (IBAction) resampleAudioTextFieldUsed:(id)sender { intVal = 8000; else if (intVal>192000) intVal = 192000; - [audioResampleField setStringValue:[NSString stringWithFormat:@"%ld",intVal]]; + [audioResampleField setStringValue:[NSString stringWithFormat:@"%ld",(unsigned long)intVal]]; //if (intVal<8 || intVal>192) // [self audioResamplePUBUsed:audioResamplePUB]; } @@ -913,7 +913,7 @@ - (void) populateMenu:(NSMenu *)popMenu withItemsForAudioProperty:(uint32_t)popQ OSStatus osErr = noErr; uint32_t replySize; - osErr = AudioFormatGetPropertyInfo(popQueryProperty, sizeof(popAudioFormat), &popAudioFormat, &replySize); + osErr = AudioFormatGetPropertyInfo(popQueryProperty, sizeof(popAudioFormat), &popAudioFormat, (UInt32 *)&replySize); if (osErr!=noErr) { NSLog(@"\t\terr %d at AudioFormatGetProperty() in %s",(int)osErr,__func__); NSLog(@"\t\tproperty is %c%c%c%c", (int)((popQueryProperty>>24)&0xFF), (int)((popQueryProperty>>16)&0xFF), (int)((popQueryProperty>>8)&0xFF), (int)((popQueryProperty>>0)&0xFF)); @@ -921,7 +921,7 @@ - (void) populateMenu:(NSMenu *)popMenu withItemsForAudioProperty:(uint32_t)popQ } else { void *replyData = malloc(replySize); - osErr = AudioFormatGetProperty(popQueryProperty, sizeof(popAudioFormat), &popAudioFormat, &replySize, replyData); + osErr = AudioFormatGetProperty(popQueryProperty, sizeof(popAudioFormat), &popAudioFormat, (UInt32 *)&replySize, replyData); if (osErr!=noErr) { NSLog(@"\t\terr %d at AudioFormatGetProperty() in %s",(int)osErr,__func__); NSLog(@"\t\tproperty is %c%c%c%c", (int)((popQueryProperty>>24)&0xFF), (int)((popQueryProperty>>16)&0xFF), (int)((popQueryProperty>>8)&0xFF), (int)((popQueryProperty>>0)&0xFF)); @@ -935,7 +935,7 @@ - (void) populateMenu:(NSMenu *)popMenu withItemsForAudioProperty:(uint32_t)popQ for (int i=0; imMinimum,rangePtr->mMaximum); NSUInteger tmpInt = rangePtr->mMaximum; - NSMenuItem *tmpItem = [[[NSMenuItem alloc] initWithTitle:[NSString stringWithFormat:@"%ld",tmpInt] action:nil keyEquivalent:@""] autorelease]; + NSMenuItem *tmpItem = [[[NSMenuItem alloc] initWithTitle:[NSString stringWithFormat:@"%ld",(unsigned long)tmpInt] action:nil keyEquivalent:@""] autorelease]; [tmpItem setRepresentedObject:[NSNumber numberWithInteger:tmpInt]]; [popMenu addItem:tmpItem]; ++rangePtr; diff --git a/HapInAVFoundation.xcodeproj/project.pbxproj b/HapInAVFoundation.xcodeproj/project.pbxproj index aa10a3e..9afef74 100644 --- a/HapInAVFoundation.xcodeproj/project.pbxproj +++ b/HapInAVFoundation.xcodeproj/project.pbxproj @@ -93,6 +93,7 @@ 1AC0CA1A1A437C2A00A9BC2C /* HapEncoderFrame.m in Sources */ = {isa = PBXBuildFile; fileRef = 1AC0CA181A437C2A00A9BC2C /* HapEncoderFrame.m */; }; 1ACF5DEC1A3E4127005DDF19 /* PixelFormats.c in Sources */ = {isa = PBXBuildFile; fileRef = 1ACF5DEB1A3E4127005DDF19 /* PixelFormats.c */; }; 1ACF5DED1A3E41FF005DDF19 /* CoreVideo.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1A0518671A3E1DAB00FC80D2 /* CoreVideo.framework */; }; + 1AE092DC1AFE96F4004E866B /* VideoToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1AE092DB1AFE96F4004E866B /* VideoToolbox.framework */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ @@ -273,6 +274,7 @@ 1AC0CA171A437C2A00A9BC2C /* HapEncoderFrame.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = HapEncoderFrame.h; sourceTree = ""; }; 1AC0CA181A437C2A00A9BC2C /* HapEncoderFrame.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = HapEncoderFrame.m; sourceTree = ""; }; 1ACF5DEB1A3E4127005DDF19 /* PixelFormats.c */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.c; name = PixelFormats.c; path = source/PixelFormats.c; sourceTree = SOURCE_ROOT; }; + 1AE092DB1AFE96F4004E866B /* VideoToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = VideoToolbox.framework; path = ../../../../System/Library/Frameworks/VideoToolbox.framework; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -289,6 +291,7 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( + 1AE092DC1AFE96F4004E866B /* VideoToolbox.framework in Frameworks */, 1A642D1C1A652CE900E2152D /* HapInAVFoundation.framework in Frameworks */, 1A642D181A65245900E2152D /* AVFoundation.framework in Frameworks */, 1A642D171A65245600E2152D /* CoreMedia.framework in Frameworks */, @@ -441,6 +444,7 @@ 1A0518671A3E1DAB00FC80D2 /* CoreVideo.framework */, 1A680E291A39F67E007678E5 /* CoreMedia.framework */, 1A680E231A39CFFF007678E5 /* AVFoundation.framework */, + 1AE092DB1AFE96F4004E866B /* VideoToolbox.framework */, 1A6060741A38D929008E693F /* Carbon.framework */, 1A60606F1A38D8DE008E693F /* Accelerate.framework */, 1A60606D1A38D8D9008E693F /* OpenGL.framework */, @@ -981,7 +985,7 @@ 1A97F5BF1A38ABBF00E0DC74 /* Debug */ = { isa = XCBuildConfiguration; buildSettings = { - ARCHS = "$(ARCHS_STANDARD_64_BIT)"; + ARCHS = "$(ARCHS_STANDARD_32_64_BIT)"; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES; GCC_ENABLE_CPP_EXCEPTIONS = NO; @@ -1002,7 +1006,7 @@ GCC_WARN_UNUSED_VARIABLE = YES; MACOSX_DEPLOYMENT_TARGET = 10.10; MTL_ENABLE_DEBUG_INFO = YES; - ONLY_ACTIVE_ARCH = YES; + ONLY_ACTIVE_ARCH = NO; SDKROOT = macosx; }; name = Debug; @@ -1010,7 +1014,7 @@ 1A97F5C01A38ABBF00E0DC74 /* Release */ = { isa = XCBuildConfiguration; buildSettings = { - ARCHS = "$(ARCHS_STANDARD_64_BIT)"; + ARCHS = "$(ARCHS_STANDARD_32_64_BIT)"; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES; GCC_ENABLE_CPP_EXCEPTIONS = NO; @@ -1025,6 +1029,7 @@ GCC_WARN_UNUSED_VARIABLE = YES; MACOSX_DEPLOYMENT_TARGET = 10.10; MTL_ENABLE_DEBUG_INFO = NO; + ONLY_ACTIVE_ARCH = NO; SDKROOT = macosx; }; name = Release; @@ -1047,6 +1052,7 @@ INFOPLIST_FILE = HapInAVFoundation/Info.plist; INSTALL_PATH = "@rpath"; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks @loader_path/../Frameworks @loader_path/Frameworks"; + ONLY_ACTIVE_ARCH = NO; PRODUCT_NAME = "$(TARGET_NAME)"; SKIP_INSTALL = YES; }; @@ -1070,6 +1076,7 @@ INFOPLIST_FILE = HapInAVFoundation/Info.plist; INSTALL_PATH = "@rpath"; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks @loader_path/../Frameworks @loader_path/Frameworks"; + ONLY_ACTIVE_ARCH = NO; PRODUCT_NAME = "$(TARGET_NAME)"; SKIP_INSTALL = YES; }; diff --git a/HapInAVFoundation.xcodeproj/xcshareddata/xcschemes/AVF Batch Converter.xcscheme b/HapInAVFoundation.xcodeproj/xcshareddata/xcschemes/AVF Batch Converter.xcscheme index cffdbc6..351b7ee 100644 --- a/HapInAVFoundation.xcodeproj/xcshareddata/xcschemes/AVF Batch Converter.xcscheme +++ b/HapInAVFoundation.xcodeproj/xcshareddata/xcschemes/AVF Batch Converter.xcscheme @@ -44,7 +44,7 @@ selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB" launchStyle = "0" useCustomWorkingDirectory = "NO" - buildConfiguration = "Release" + buildConfiguration = "Debug" ignoresPersistentStateOnLaunch = "NO" debugDocumentVersioning = "YES" allowLocationSimulation = "YES"> diff --git a/HapInAVFoundation/AVAssetWriterHapInput.h b/HapInAVFoundation/AVAssetWriterHapInput.h index 5d55182..7f66eda 100644 --- a/HapInAVFoundation/AVAssetWriterHapInput.h +++ b/HapInAVFoundation/AVAssetWriterHapInput.h @@ -39,7 +39,7 @@ This class is the main interface for using AVFoundation to encode and output vid void *dxtEncoder; // actually a 'HapCodecDXTEncoderRef' OSSpinLock encoderProgressLock; // locks 'encoderProgressFrames' and 'encoderWaitingToRunOut' - NSMutableArray *encoderProgressFrames; // array of HapEncoderFrame instances. the frames are made when you append a pixel buffer, and are flagged as encoded and appended (as an encoded sample buffer) in the GCD-driven block that did the encoding + __block NSMutableArray *encoderProgressFrames; // array of HapEncoderFrame instances. the frames are made when you append a pixel buffer, and are flagged as encoded and appended (as an encoded sample buffer) in the GCD-driven block that did the encoding BOOL encoderWaitingToRunOut; // set to YES when the user marks this input as finished (the frames that are "in flight" via GCD need to finish up) CMTime lastEncodedDuration; @@ -71,6 +71,7 @@ Begins encoding the passed pixel buffer and appends the encoded frame to this in It's not necessary to check this- but for best results, you should mark the AVAssetWriterHapInput as finished, wait until "finishedEncoding" returns a YES, and then tell your AVAssetWriter to finish writing. If you don't wait for this method to return YES, the last X pixel buffers may get dropped (depends how long it takes to wrap up, could be no dropped frames, could be a couple). */ - (BOOL) finishedEncoding; +- (void) finishEncoding; @end diff --git a/HapInAVFoundation/AVAssetWriterHapInput.m b/HapInAVFoundation/AVAssetWriterHapInput.m index cf9f4d5..7c2ae0a 100644 --- a/HapInAVFoundation/AVAssetWriterHapInput.m +++ b/HapInAVFoundation/AVAssetWriterHapInput.m @@ -164,7 +164,7 @@ - (id) initWithOutputSettings:(NSDictionary *)n { encoderInputPxlFmtBytesPerRow = roundUpToMultipleOf16(((uint32_t)exportImgSize.width * 4)); formatConvertPoolLength = encoderInputPxlFmtBytesPerRow*(NSUInteger)(exportImgSize.height); - //encodeQueue = dispatch_queue_create("HapEncode", dispatch_queue_attr_make_with_qos_class(DISPATCH_QUEUE_CONCURRENT, DISPATCH_QUEUE_PRIORITY_HIGH, -1)); + //encodeQueue = dispatch_queue_create("HapEncode", dispatch_queue_attr_make_with_qos_class(DISPATCH_QUEUE_CONCURRENT, QOS_CLASS_USER_INITIATED, -1)); encodeQueue = dispatch_queue_create("HapEncode", DISPATCH_QUEUE_CONCURRENT); } return self; @@ -493,6 +493,16 @@ - (BOOL) finishedEncoding { OSSpinLockUnlock(&encoderProgressLock); return returnMe; } +- (void) finishEncoding { + OSSpinLockLock(&encoderProgressLock); + BOOL needsToEncodeMore = NO; + if (encoderWaitingToRunOut || [encoderProgressFrames count]>0) + needsToEncodeMore = YES; + OSSpinLockUnlock(&encoderProgressLock); + + if (needsToEncodeMore) + [self appendEncodedFrames]; +} - (void)markAsFinished { //NSLog(@"%s",__func__); OSSpinLockLock(&encoderProgressLock); @@ -554,8 +564,13 @@ - (void) appendEncodedFrames { OSSpinLockLock(&encoderProgressLock); + if (![super isReadyForMoreMediaData]) { + NSLog(@"\t\terr: not ready for more media data, %s",__func__); + [encoderProgressFrames removeAllObjects]; + [super markAsFinished]; + } // first of all, if there's only one sample and i'm waiting to finish- append the last sample and then i'm done (yay!) - if (encoderWaitingToRunOut && [encoderProgressFrames count]<=1) { + else if (encoderWaitingToRunOut && [encoderProgressFrames count]<=1) { HapEncoderFrame *lastFrame = ([encoderProgressFrames count]<1) ? nil : [encoderProgressFrames objectAtIndex:0]; if (lastFrame!=nil && [lastFrame encoded]) { //NSLog(@"\t\tone frame left and it's encoded, making a sample buffer and then appending it"); diff --git a/HapInAVFoundation/AVPlayerItemHapDXTOutput.m b/HapInAVFoundation/AVPlayerItemHapDXTOutput.m index 2b466ab..a5cbe08 100644 --- a/HapInAVFoundation/AVPlayerItemHapDXTOutput.m +++ b/HapInAVFoundation/AVPlayerItemHapDXTOutput.m @@ -122,57 +122,89 @@ - (void) dealloc { } - (HapDecoderFrame *) allocFrameClosestToTime:(CMTime)n { HapDecoderFrame *returnMe = nil; - + BOOL foundExactMatchToTarget = NO; + BOOL exactMatchToTargetWasDecoded = NO; OSSpinLockLock(&propertyLock); if (track!=nil && gen!=nil) { - // check and see if any of my frames have finished decoding + // copy all the frames that have finished decoding into the 'decodedFrames' array NSMutableArray *decodedFrames = nil; for (HapDecoderFrame *framePtr in decompressedFrames) { - // if this frame has been decoded, i'll either be returning it or adding it to an array and returning something from the array - if ([framePtr decoded]) { - // if there's an array of completed frames, add this (decoded) frame to it - if (decodedFrames!=nil) - [decodedFrames addObject:framePtr]; - // else there's no array of completed frames... - else { - // if i haven't found a frame to return yet, i'll be returning this one - if (returnMe==nil) - returnMe = framePtr; - // else i already found a frame to return- i need to start using the decodedFrames array! - else { - decodedFrames = [NSMutableArray arrayWithCapacity:0]; - [decodedFrames addObject:returnMe]; - returnMe = nil; - [decodedFrames addObject:framePtr]; - } + BOOL decodedFrame = [framePtr decoded]; + // i need to know if i encounter a frame that is being decompressed which contains the passed time (if not, i'll have to start decompressing one later) + if ([framePtr containsTime:n]) { + foundExactMatchToTarget = YES; + exactMatchToTargetWasDecoded = decodedFrame; + } + // if the frame is decoded, stick it in an array of decoded frames + if (decodedFrame) { + if (decodedFrames==nil) + decodedFrames = [NSMutableArray arrayWithCapacity:0]; + [decodedFrames addObject:framePtr]; + } + } + + // now find either an exact match to the target time (if available) or the closest available decoded frame... + + // if i found an exact match and the exact match was already decoded, just run through and find it + if (foundExactMatchToTarget && exactMatchToTargetWasDecoded) { + for (HapDecoderFrame *decFrame in decodedFrames) { + if ([decFrame containsTime:n]) { + returnMe = [decFrame retain]; } } } - // if i have an array of decoded frames, sort it by the frame's time, find the frame i'll be returning, remove all of them from the array - if (decodedFrames!=nil) { - [decodedFrames sortUsingComparator:^(id obj1, id obj2) { - return (NSComparisonResult)CMTimeCompare(CMSampleBufferGetPresentationTimeStamp([obj1 hapSampleBuffer]), CMSampleBufferGetPresentationTimeStamp([obj2 hapSampleBuffer])); - }]; - returnMe = [decodedFrames objectAtIndex:0]; - [returnMe retain]; - for (id anObj in decodedFrames) - [decompressedFrames removeObjectIdenticalTo:anObj]; + // else i either didn't find an exact match to the target time, or i did but it's not done being decoded yet- return the closest decoded frame + else { + // find the time of the target frame + AVSampleCursor *targetFrameCursor = [track makeSampleCursorWithPresentationTimeStamp:n]; + CMTime targetFrameTime = [targetFrameCursor presentationTimeStamp]; + // run through all the decoded frames, looking for the frame with the smallest delta > 0 + double runningDelta = 9999.0; + for (HapDecoderFrame *framePtr in decodedFrames) { + CMSampleBufferRef sampleBuffer = [framePtr hapSampleBuffer]; + CMTime frameTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); + double frameDeltaInSeconds = CMTimeGetSeconds(CMTimeSubtract(targetFrameTime, frameTime)); + if (frameDeltaInSeconds>0.0 && frameDeltaInSeconds5) { + [decompressedFrames removeObjectIdenticalTo:framePtr]; + } + } + } + } + OSSpinLockUnlock(&propertyLock); + + // if i didn't find an exact match to the target then i need to start decompressing that frame (i know it's async but i'm going to do this outside the lock anyway) + if (!foundExactMatchToTarget) { + // now use GCD to start decoding the frame dispatch_async(decodeQueue, ^{ [self _decodeFrameForTime:n]; }); - } - OSSpinLockUnlock(&propertyLock); - return returnMe; } - (HapDecoderFrame *) allocFrameForTime:(CMTime)n { @@ -225,10 +257,10 @@ - (void) _decodeSampleBuffer:(CMSampleBufferRef)newSample { //NSLog(@"%s ... %p",__func__,newSample); HapDecoderFrameAllocBlock localAllocFrameBlock = nil; AVFHapDXTPostDecodeBlock localPostDecodeBlock = nil; - CMTime cursorTime = CMSampleBufferGetPresentationTimeStamp(newSample); + //CMTime cursorTime = CMSampleBufferGetPresentationTimeStamp(newSample); OSSpinLockLock(&propertyLock); - lastGeneratedSampleTime = cursorTime; + //lastGeneratedSampleTime = cursorTime; localAllocFrameBlock = (allocFrameBlock==nil) ? nil : [allocFrameBlock retain]; localPostDecodeBlock = (postDecodeBlock==nil) ? nil : [postDecodeBlock retain]; BOOL localOutputAsRGB = outputAsRGB; @@ -293,6 +325,14 @@ - (void) _decodeSampleBuffer:(CMSampleBufferRef)newSample { if (newDecoderFrame==nil) NSLog(@"\t\terr: decoder frame nil, %s",__func__); else { + + // add the frame i just decoded into the 'decompressedFrames' array immediately (so other stuff will "see" the frame and know it's being decoded) + if (newDecoderFrame!=nil) { + OSSpinLockLock(&propertyLock); + [decompressedFrames addObject:newDecoderFrame]; + OSSpinLockUnlock(&propertyLock); + } + // decode the frame (into DXT data) NSSize imgSize = [newDecoderFrame imgSize]; NSSize dxtImgSize = [newDecoderFrame dxtImgSize]; @@ -409,14 +449,16 @@ - (void) _decodeSampleBuffer:(CMSampleBufferRef)newSample { } - OSSpinLockLock(&propertyLock); + + //OSSpinLockLock(&propertyLock); // add the frame i just decoded into the 'decompressedFrames' array if (newDecoderFrame!=nil) { - [decompressedFrames addObject:newDecoderFrame]; + //[decompressedFrames addObject:newDecoderFrame]; [newDecoderFrame release]; newDecoderFrame = nil; } - OSSpinLockUnlock(&propertyLock); + //OSSpinLockUnlock(&propertyLock); + if (localAllocFrameBlock!=nil) diff --git a/HapInAVFoundation/HapDecoderFrame.h b/HapInAVFoundation/HapDecoderFrame.h index 85a0453..062f599 100644 --- a/HapInAVFoundation/HapDecoderFrame.h +++ b/HapInAVFoundation/HapDecoderFrame.h @@ -35,6 +35,7 @@ This object represents a frame, and holds all the values necessary to decode a h id userInfo; // RETAINED, arbitrary ptr used to keep a piece of user-specified data with the frame BOOL decoded; // when decoding is complete, this is set to YES. + int age; // used by the output during decoding, once a frame is "too old" (hasn't been used in a while) it's removed from the output's local cache of decompressed frames } /** @@ -74,6 +75,7 @@ Returns an "empty" decoder frame- all the fields except "dxtData" and "dxtDataSi @property (assign,readwrite,setter=setRGBImgSize:) NSSize rgbImgSize; @property (readonly) CMTime presentationTime; +- (BOOL) containsTime:(CMTime)n; - (CMSampleBufferRef) allocCMSampleBufferFromRGBData; @@ -81,5 +83,7 @@ Returns an "empty" decoder frame- all the fields except "dxtData" and "dxtDataSi @property (retain,readwrite) id userInfo; // Returns YES when the frame has been decoded @property (assign,readwrite) BOOL decoded; +- (void) incrementAge; +- (int) age; @end diff --git a/HapInAVFoundation/HapDecoderFrame.m b/HapInAVFoundation/HapDecoderFrame.m index e3b830d..c782f40 100644 --- a/HapInAVFoundation/HapDecoderFrame.m +++ b/HapInAVFoundation/HapDecoderFrame.m @@ -63,6 +63,7 @@ - (id) initEmptyWithHapSampleBuffer:(CMSampleBufferRef)sb { atomicLock = OS_SPINLOCK_INIT; userInfo = nil; decoded = NO; + age = 0; hapSampleBuffer = sb; if (hapSampleBuffer==NULL) { @@ -197,6 +198,14 @@ - (NSSize) rgbImgSize { - (CMTime) presentationTime { return ((hapSampleBuffer==NULL) ? kCMTimeInvalid : CMSampleBufferGetPresentationTimeStamp(hapSampleBuffer)); } +- (BOOL) containsTime:(CMTime)n { + if (hapSampleBuffer==NULL) + return NO; + CMTimeRange timeRange = CMTimeRangeMake(CMSampleBufferGetPresentationTimeStamp(hapSampleBuffer),CMSampleBufferGetDuration(hapSampleBuffer)); + if (CMTimeRangeContainsTime(timeRange,n)) + return YES; + return NO; +} - (CMSampleBufferRef) allocCMSampleBufferFromRGBData { @@ -267,7 +276,7 @@ - (CMSampleBufferRef) allocCMSampleBufferFromRGBData { &timing, &returnMe); if (osErr!=noErr || returnMe==NULL) - NSLog(@"\t\terr %d at CMSampleBufferCreateForImageBuffer() in %s",osErr,__func__); + NSLog(@"\t\terr %d at CMSampleBufferCreateForImageBuffer() in %s",(int)osErr,__func__); else { //NSLog(@"\t\tsuccessfully allocated a CMSampleBuffer from the RGB data in me! %@/%s",self,__func__); } @@ -316,6 +325,18 @@ - (BOOL) decoded { OSSpinLockUnlock(&atomicLock); return returnMe; } +- (void) incrementAge { + OSSpinLockLock(&atomicLock); + ++age; + OSSpinLockUnlock(&atomicLock); +} +- (int) age { + int returnMe = 0; + OSSpinLockLock(&atomicLock); + returnMe = age; + OSSpinLockUnlock(&atomicLock); + return returnMe; +} @end diff --git a/external/snappy/snappy-mac/snappy.xcodeproj/project.pbxproj b/external/snappy/snappy-mac/snappy.xcodeproj/project.pbxproj index dc0dbfa..5e63e1c 100644 --- a/external/snappy/snappy-mac/snappy.xcodeproj/project.pbxproj +++ b/external/snappy/snappy-mac/snappy.xcodeproj/project.pbxproj @@ -170,7 +170,7 @@ isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; - ARCHS = "$(ARCHS_STANDARD_64_BIT)"; + ARCHS = "$(ARCHS_STANDARD_32_64_BIT)"; CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; CLANG_CXX_LIBRARY = "libstdc++"; CLANG_ENABLE_MODULES = YES; @@ -201,7 +201,7 @@ GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; MACOSX_DEPLOYMENT_TARGET = 10.6; - ONLY_ACTIVE_ARCH = YES; + ONLY_ACTIVE_ARCH = NO; SDKROOT = macosx; }; name = Debug; @@ -210,7 +210,7 @@ isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; - ARCHS = "$(ARCHS_STANDARD_64_BIT)"; + ARCHS = "$(ARCHS_STANDARD_32_64_BIT)"; CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; CLANG_CXX_LIBRARY = "libstdc++"; CLANG_ENABLE_MODULES = YES; diff --git a/external/squish/squish.xcodeproj/project.pbxproj b/external/squish/squish.xcodeproj/project.pbxproj index 2242aa0..1c02827 100644 --- a/external/squish/squish.xcodeproj/project.pbxproj +++ b/external/squish/squish.xcodeproj/project.pbxproj @@ -220,7 +220,7 @@ isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; - ARCHS = "$(ARCHS_STANDARD_64_BIT)"; + ARCHS = "$(ARCHS_STANDARD_32_64_BIT)"; CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; CLANG_CXX_LIBRARY = "libstdc++"; CLANG_ENABLE_MODULES = YES; @@ -250,7 +250,7 @@ GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; MACOSX_DEPLOYMENT_TARGET = 10.6; - ONLY_ACTIVE_ARCH = YES; + ONLY_ACTIVE_ARCH = NO; SDKROOT = macosx; }; name = Debug; @@ -259,7 +259,7 @@ isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; - ARCHS = "$(ARCHS_STANDARD_64_BIT)"; + ARCHS = "$(ARCHS_STANDARD_32_64_BIT)"; CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; CLANG_CXX_LIBRARY = "libstdc++"; CLANG_ENABLE_MODULES = YES;