summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAndrea Guzzo <xant@xant.net>2017-01-05 19:44:51 (GMT)
committer Andrea Guzzo <xant@xant.net>2017-01-05 19:44:51 (GMT)
commit87a2398e0f8b3b7ef6743a074cccc1821c38f3f5 (patch)
treea02b4e47e6b3e43fc4fe9298395cd62f355f139c
parent3d9c4c5f6eaac7ae00ee035586c3c60e327b9f7e (diff)
some more work porting the code to use AVFoundation instead of QTKitavfoundation
still a work in progress and the project doesn't build
-rw-r--r--core/JMXAudioBuffer.h11
-rw-r--r--core/JMXThreadedEntity.h2
-rw-r--r--entities/video/JMXQtMovieEntity.h7
-rw-r--r--entities/video/JMXQtMovieEntity.mm147
4 files changed, 72 insertions, 95 deletions
diff --git a/core/JMXAudioBuffer.h b/core/JMXAudioBuffer.h
index fcec5df..0721e14 100644
--- a/core/JMXAudioBuffer.h
+++ b/core/JMXAudioBuffer.h
@@ -55,6 +55,7 @@
*/
+ (id)audioBufferWithCoreAudioBuffer:(AudioBuffer *)buffer andFormat:(AudioStreamBasicDescription *)format;
+
/*!
@method audioBufferWithCoreAudioBufferList:buffer andFormat:format;
@abstract create a new autoreleased instance
@@ -65,6 +66,7 @@
*/
+ (id)audioBufferWithCoreAudioBufferList:(AudioBufferList *)buffer andFormat:(AudioStreamBasicDescription *)format;
+
/*!
@method audioBufferWithCoreAudioBufferList:andFormat:copy:freeOnRelease:
@abstract create a new autoreleased instance
@@ -77,6 +79,7 @@
*/
+ (id)audioBufferWithCoreAudioBufferList:(AudioBufferList *)buffer andFormat:(AudioStreamBasicDescription *)format copy:(BOOL)wantsCopy freeOnRelease:(BOOL)wantsFree;
+
/*!
@method initWithCoreAudioBuffer:andFormat:
@abstract initialize a newly created instance
@@ -87,6 +90,7 @@
*/
- (id)initWithCoreAudioBuffer:(AudioBuffer *)buffer andFormat:(AudioStreamBasicDescription *)format;
+
/*!
@method initWithCoreAudioBufferList:andFormat:
@abstract initialize a newly created instance
@@ -116,37 +120,44 @@
@return the number of channels contained in the buffer
*/
- (NSUInteger)numChannels;
+
/*!
@method data
@return the raw audio buffer encapsulated in an NSData object
*/
- (NSData *)data;
+
/*!
@method numFrames
@return the number of frames contained in the buffer
*/
- (NSUInteger)numFrames;
+
/*!
@method bytesPerFrame
@return the number of bytes for each frame
*/
- (NSUInteger)bytesPerFrame;
+
/*!
@method bitsPerChannel
@return the number of bits for each channel
*/
- (NSUInteger)bitsPerChannel;
+
/*!
@method channelsPerFrame
@return the number of channels for each frame
*/
- (NSUInteger)channelsPerFrame;
+
/*!
@method sampleRate
@return the samplerate
*/
- (NSUInteger)sampleRate;
+
/*!
@method fillComplexBuffer:countPointer:offset:
@abstract fill the encapsulated buffer (or bufferlist) with the provided data
diff --git a/core/JMXThreadedEntity.h b/core/JMXThreadedEntity.h
index 33849c5..904ffe8 100644
--- a/core/JMXThreadedEntity.h
+++ b/core/JMXThreadedEntity.h
@@ -61,7 +61,7 @@
/*!
@method tick:
- @param timeStamp the current timestamp
+ @param timeStamp the exact timestamp when the 'tick' was initiated
@abstract execute the entity 'runcycle'. Any entity encapsulated in a threaded entity
will have its 'tick:' method called at their configured 'frequency'
*/
diff --git a/entities/video/JMXQtMovieEntity.h b/entities/video/JMXQtMovieEntity.h
index 2ffc3c3..de5b7fe 100644
--- a/entities/video/JMXQtMovieEntity.h
+++ b/entities/video/JMXQtMovieEntity.h
@@ -51,9 +51,10 @@
QTVisualContextRef qtVisualContext; // the context the movie is playing in
#endif
AVAssetReaderAudioMixOutput *audioOutput;
- AVAssetReader *audioReader;
- NSMutableArray *samples;
- int64_t sampleIndex;
+ AVAssetReaderVideoCompositionOutput *videoOutput;
+ AVAssetReader *mediaReader;
+ //NSMutableArray *samples;
+ //int64_t sampleIndex;
}
@property (copy) NSString *moviePath;
diff --git a/entities/video/JMXQtMovieEntity.mm b/entities/video/JMXQtMovieEntity.mm
index 4bf8850..3eb2a94 100644
--- a/entities/video/JMXQtMovieEntity.mm
+++ b/entities/video/JMXQtMovieEntity.mm
@@ -81,6 +81,7 @@ JMXV8_EXPORT_NODE_CLASS(JMXQtMovieEntity);
}
}
+/*
- (void)fillAudioBuffer
{
CMSampleBufferRef sample = [audioOutput copyNextSampleBuffer];
@@ -119,6 +120,7 @@ JMXV8_EXPORT_NODE_CLASS(JMXQtMovieEntity);
CFRelease(toRelease);
}
}
+ */
- (BOOL)_open:(NSString *)file
{
@@ -141,6 +143,13 @@ JMXV8_EXPORT_NODE_CLASS(JMXQtMovieEntity);
return NO;
}
+ if (mediaReader) {
+ [mediaReader cancelReading];
+ [mediaReader release];
+ mediaReader = nil;
+ }
+ mediaReader = [[AVAssetReader assetReaderWithAsset:movieAsset error:&error] retain];
+
NSLog(@"movie: %@", movie);
NSArray* videoTracks = [movie tracksWithMediaType:AVMediaTypeVideo];
@@ -168,59 +177,49 @@ JMXV8_EXPORT_NODE_CLASS(JMXQtMovieEntity);
NSArray *path = [file componentsSeparatedByString:@"/"];
self.label = [path lastObject];
- OSAtomicCompareAndSwap64Barrier(sampleIndex, -1, &sampleIndex);
- if (samples) {
- @synchronized(samples) {
- [samples removeAllObjects];
- }
- [samples release];
- samples = nil;
+ videoOutput = [AVAssetReaderVideoCompositionOutput assetReaderVideoCompositionOutputWithVideoTracks:videoTracks
+ videoSettings:nil];
+ [mediaReader addOutput:videoOutput];
+
+ if (audioOutput) {
+ [audioOutput release];
+ audioOutput = nil;
}
- if (file && [AVAssetReader class]) {
- if (audioReader) {
- [audioReader cancelReading];
- [audioReader release];
- audioReader = nil;
- }
- if (audioOutput) {
- [audioOutput release];
- audioOutput = nil;
- }
- AVAsset *asset = [AVAsset assetWithURL:[NSURL fileURLWithPath:file]];
- audioReader = [[AVAssetReader assetReaderWithAsset:asset error:&error] retain];
- NSArray *audioTracks = [asset tracksWithMediaType:AVMediaTypeAudio];
- if (audioTracks.count) {
- NSDictionary *audioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
- [NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
- [NSNumber numberWithFloat:44100.0], AVSampleRateKey,
- [NSNumber numberWithInt:2], AVNumberOfChannelsKey,
- [NSNumber numberWithInt:32], AVLinearPCMBitDepthKey,
- [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
- [NSNumber numberWithBool:YES], AVLinearPCMIsFloatKey,
- [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
- nil];
- NSArray *outputTracks = [NSArray arrayWithObject:[audioTracks objectAtIndex:0]];
- audioOutput = [[AVAssetReaderAudioMixOutput
- assetReaderAudioMixOutputWithAudioTracks:outputTracks
- audioSettings:audioSettings] retain];
- [audioReader addOutput:audioOutput];
- samples = [[NSMutableArray alloc] initWithCapacity:65535];
- [audioReader startReading];
- }
+ NSArray *audioTracks = [movieAsset tracksWithMediaType:AVMediaTypeAudio];
+ if (audioTracks.count) {
+ NSDictionary *audioSettings = [NSDictionary dictionaryWithObjectsAndKeys:
+ [NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey,
+ [NSNumber numberWithFloat:44100.0], AVSampleRateKey,
+ [NSNumber numberWithInt:2], AVNumberOfChannelsKey,
+ [NSNumber numberWithInt:32], AVLinearPCMBitDepthKey,
+ [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved,
+ [NSNumber numberWithBool:YES], AVLinearPCMIsFloatKey,
+ [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey,
+ nil];
+ NSArray *outputTracks = [NSArray arrayWithObject:[audioTracks objectAtIndex:0]];
+ audioOutput = [[AVAssetReaderAudioMixOutput
+ assetReaderAudioMixOutputWithAudioTracks:outputTracks
+ audioSettings:audioSettings] retain];
+ [mediaReader addOutput:audioOutput];
+ //samples = [[NSMutableArray alloc] initWithCapacity:65535];
}
+ [mediaReader startReading];
+
}
if (moviePath)
[moviePath release];
moviePath = [file copy];
self.active = YES;
+
NSXMLNode *attr = [self attributeForName:@"url"];
[attr setStringValue:moviePath];
+ /*
if (samples) {
[[NSOperationQueue mainQueue] addOperationWithBlock:^{
[self fillAudioBuffer];
}];
- }
+ }*/
return YES;
}
self.active = NO;
@@ -254,8 +253,8 @@ JMXV8_EXPORT_NODE_CLASS(JMXQtMovieEntity);
- (void)dealloc {
if (movie)
[movie release];
- [samples release];
- [audioReader release];
+ //[samples release];
+ [mediaReader release];
[audioOutput release];
[super dealloc];
@@ -263,7 +262,7 @@ JMXV8_EXPORT_NODE_CLASS(JMXQtMovieEntity);
- (void)tick:(uint64_t)timeStamp
{
- CIImage* frame;
+ CIImage* frame = nil;
NSError* error = nil;
CGImageRef pixelBuffer = NULL;
@synchronized(self) {
@@ -271,55 +270,18 @@ JMXV8_EXPORT_NODE_CLASS(JMXQtMovieEntity);
/*
[QTMovie enterQTKitOnThread];
*/
- QTTime now = [movie currentTime];
+ // QTTime now = [movie currentTime];
+
if (!paused) {
if (currentFrame) {
[currentFrame release];
currentFrame = nil;
}
- if (absoluteTime) {
- now.timeValue = absoluteTime / 1e9 * now.timeScale;
- } else {
- uint64_t delta = self.previousTimeStamp
- ? (timeStamp - self.previousTimeStamp) / 1e9 * now.timeScale
- : (now.timeScale / [fps doubleValue]);
-
- uint64_t step = movieFrequency
- ? [fps doubleValue] * delta / movieFrequency
- : 0;
- step += (seekOffset / 1e9 * now.timeScale);
- // Calculate the next frame we need to provide.
- now.timeValue += step;
- }
- if (QTTimeCompare(now, [movie duration]) == NSOrderedAscending) {
- [movie setCurrentTime:now];
- } else { // the movie is ended
- if (repeat) { // check if we need to rewind and re-start extracting frames
- [movie gotoBeginning];
- now.timeValue = 0;
- } else {
- [self stop];
- return [super tick:timeStamp]; // we still want to propagate the signal
- }
- }
- if (now.timeValue == 0 || seekOffset || absoluteTime) {
- OSAtomicCompareAndSwap64Barrier(sampleIndex, -1, &sampleIndex);
+ CMSampleBufferRef sampleBuffer = [videoOutput copyNextSampleBuffer];
+ if (sampleBuffer != nil) {
+ frame = [CIImage imageWithCVPixelBuffer:(CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer)]; //options: [NSDictionary dictionaryWithObjectsAndKeys:[NSNull null], kCIImageColorSpace, nil]];
}
- OSAtomicCompareAndSwap64(absoluteTime, 0, &absoluteTime);
- OSAtomicCompareAndSwap64(seekOffset, 0, &seekOffset);
- NSDictionary *attrs = [NSDictionary dictionaryWithObjectsAndKeys:
- [NSValue valueWithSize:self.size.nsSize],
- QTMovieFrameImageSize,
- QTMovieFrameImageTypeCGImageRef,
- QTMovieFrameImageType,
- [NSNumber numberWithBool:YES],
- QTMovieFrameImageSessionMode,
- nil];
- pixelBuffer = (CGImageRef)[movie frameImageAtTime:now
- withAttributes:attrs error:&error];
- frame = [CIImage imageWithCGImage:pixelBuffer];
-
if (frame)
currentFrame = [frame retain];
@@ -327,22 +289,25 @@ JMXV8_EXPORT_NODE_CLASS(JMXQtMovieEntity);
NSLog(@"%@\n", error);
}
}
- [QTMovie exitQTKitOnThread];
+ // [QTMovie exitQTKitOnThread];
}
[super tick:timeStamp]; // let super notify output pins
}
- (JMXAudioBuffer *)audio
{
- if (self.active && abs([self.fps doubleValue] - movieFrequency) < 0.1) {
+ // if (self.active && abs([self.fps doubleValue] - movieFrequency) < 0.1) {
+ CMSampleBufferRef sampleBuffer = [audioOutput copyNextSampleBuffer];
+ if (sampleBuffer == nil)
+ return nil;
+
+ CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer();
if (sampleIndex == -1) {
- [QTMovie enterQTKitOnThread];
- QTTime now = [movie currentTime];
- double nowSecs = now.timeValue / now.timeScale;
-
+ CMTime now = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
+ double nowSecs = now.value / now.timescale;
sampleIndex = 44100.0 * nowSecs / 512;
- [QTMovie exitQTKitOnThread];
}
+
//return currentAudioSample;
@synchronized(samples) {
if (samples && samples.count) {