5 // Created by 柳 on 09/09/23.
6 // Copyright 2009 __MyCompanyName__. All rights reserved.
9 #import "ElisWriterLegacy.h"
11 // Handle requests for information about the output video data
12 static OSErr QTMoovProcs_VideoTrackPropertyProc (void *theRefcon, long theTrackID, OSType thePropertyType, void *thePropertyValue)
14 #pragma unused(theRefcon, theTrackID)
18 switch (thePropertyType) {
19 case movieExportUseConfiguredSettings:
20 *(Boolean *)thePropertyValue = true;
24 myErr = paramErr; // non-zero value means: use default value provided by export component
32 //--------------------------------------------------------------------------------------------------
34 // Provide the output audio data.
36 static OSErr QTMoovProcs_VideoTrackDataProc(void *theRefcon, MovieExportGetDataParams *theParams)
38 return [(ElisWriterLegacy*)theRefcon exportFrame:theParams];
42 @implementation ElisWriterLegacy
46 gamma_table = malloc(sizeof(unsigned char) * 256);
47 [NSBundle loadNibNamed:@"WriterProgress" owner:self];
52 - (void)write:(NSSavePanel*)sp
54 [self reallyExportMovie:sp toPod:NO];
57 - (void)setMainWindow:(NSWindow*)w
62 - (void)setMainController:(id)c
65 [_mainView setMainController:c];
68 - (void)setMainView:(id)v
73 -(void)reallyExportMovie:(NSSavePanel *)savePanel toPod:(BOOL)exportToPod
75 MovieExportComponent myExporter = NULL;
76 ComponentDescription myCompDesc;
77 Boolean myCancelled = false;
79 MovieExportGetPropertyUPP theAudioPropProcUPP = nil;
80 MovieExportGetDataUPP theAudioDataProcUPP = nil;
81 TimeScale audioScale = 0;
82 void *audioRefCon = 0;
88 floatTime = [_mainController getHipTime];
89 movieDuration = QTMakeTime(floatTime*DEFAULT_FPS, DEFAULT_FPS);
91 [self readyGammmaTable];
92 // NSRect originFrame = [_mainView frame];
93 // [_mainView setFrame:NSMakeRect(originFrame.origin.x, originFrame.origin.y, ProjectMovieSize.size.width, ProjectMovieSize.size.height)];
94 // [_mainView setFrame:NSMakeRect(0, 0, ProjectMovieSize.size.width, ProjectMovieSize.size.height)];
95 [_mainView setHidden:YES];
96 // [_dummyWindow setFrame:*(NSRect*)&ProjectMovieSize display:YES];
97 // [NSApp beginSheet:_dummyWindow modalForWindow:_mainWindow modalDelegate:self
98 // didEndSelector:nil contextInfo:nil];
99 [_dummyWindow setHidesOnDeactivate:YES];
102 [NSApp beginSheet:_barSheet modalForWindow:_mainWindow
103 modalDelegate:self didEndSelector:nil contextInfo:NULL];
105 // Export into a Quicktime movie
106 myCompDesc.componentType = MovieExportType;
107 myCompDesc.componentSubType = MovieFileType;
108 myCompDesc.componentManufacturer = kAppleManufacturer;
109 myCompDesc.componentFlags = canMovieExportFromProcedures;
110 myCompDesc.componentFlagsMask = canMovieExportFromProcedures;
112 // open the selected movie export component
113 myExporter = OpenComponent(FindNextComponent(NULL, &myCompDesc));
114 if (myExporter == NULL) {
115 NSLog(@"could not find export compontent !");
119 // Hey exporter, support modern audio features
120 Boolean useHighResolutionAudio = true;
121 QTSetComponentProperty(myExporter, kQTPropertyClass_MovieExporter,
122 kQTMovieExporterPropertyID_EnableHighResolutionAudioFeatures,
124 &useHighResolutionAudio);
126 // create UPPs for the two app-defined export functions
127 MovieExportGetPropertyUPP theVideoPropProcUPP = NewMovieExportGetPropertyUPP(QTMoovProcs_VideoTrackPropertyProc);
128 MovieExportGetDataUPP theVideoDataProcUPP = NewMovieExportGetDataUPP(QTMoovProcs_VideoTrackDataProc);
131 MovieExportAddDataSource(myExporter, VideoMediaType,
132 movieDuration.timeScale, // use the original timescale
140 NSMutableArray* audioTracks = [[NSMutableArray alloc] init];
141 [_mainController getSoundTrack:audioTracks];
142 int aui, tsize = [audioTracks count];
144 for(aui = 0; aui < tsize; aui += 3){
145 // we are setting up the audio for pass through
147 qtr = [[audioTracks objectAtIndex:aui+2] QTTimeValue];
148 err = MovieExportNewGetDataAndPropertiesProcs(myExporter, SoundMediaType,
150 [[audioTracks objectAtIndex:aui] quickTimeMovie],
151 [[audioTracks objectAtIndex:aui+1] quickTimeTrack], // we only use the first audio here
154 &theAudioPropProcUPP,
155 &theAudioDataProcUPP,
158 NSLog(@"Can't get audio for export");
160 MovieExportAddDataSource(myExporter, SoundMediaType, audioScale, &trackID, theAudioPropProcUPP, theAudioDataProcUPP, audioRefCon);
165 if (NO == exportToPod) {
167 _myExporter = myExporter;
168 [self performSelectorOnMainThread:@selector(movieExportDialogMainThread) withObject:nil waitUntilDone:YES];
169 // MovieExportDoUserDialog(myExporter, NULL, NULL, 0, movieDuration.timeValue, &myCancelled);
170 myExporter = _myExporter;
171 myCancelled = _myCancelled;
174 NSLog(@"User canceled export dialog");
175 DisposeMovieExportGetPropertyUPP(theVideoPropProcUPP);
176 DisposeMovieExportGetDataUPP(theVideoDataProcUPP);
178 if (theAudioPropProcUPP && theAudioDataProcUPP) {
179 MovieExportDisposeGetDataAndPropertiesProcs(myExporter, theAudioPropProcUPP, theAudioDataProcUPP, audioRefCon);
182 CloseComponent(myExporter);
194 NSRect frame = *(NSRect*)&ProjectMovieSize;
197 // create the readback and flipping buffers - see note about flipping in exportFrame method
199 outputWidth = frame.size.width;
200 outputHeight = frame.size.height;
202 //outputHeight = 480;
204 contextRowBytes = outputWidth * outputAlignment;
205 contextPixels = calloc(contextRowBytes * outputHeight, sizeof(char));
206 flippedContextPixels = calloc(contextRowBytes * outputHeight, sizeof(char));
208 // setup the image description for the frame compression
209 outputImageDescription = (ImageDescriptionHandle)NewHandleClear(sizeof(ImageDescription));
210 (*outputImageDescription)->idSize = sizeof(ImageDescription);
211 #ifdef __BIG_ENDIAN__
212 (*outputImageDescription)->cType = k32ARGBPixelFormat;
214 (*outputImageDescription)->cType = k32BGRAPixelFormat;
216 (*outputImageDescription)->vendor = kAppleManufacturer;
217 (*outputImageDescription)->spatialQuality = codecLosslessQuality;
218 (*outputImageDescription)->width = outputWidth;
219 (*outputImageDescription)->height = outputHeight;
220 (*outputImageDescription)->hRes = 72L<<16;
221 (*outputImageDescription)->vRes = 72L<<16;
222 (*outputImageDescription)->dataSize = contextRowBytes * outputHeight;
223 (*outputImageDescription)->frameCount = 1;
224 (*outputImageDescription)->depth = 32;
225 (*outputImageDescription)->clutID = -1;
227 [NSApp beginSheet:_barSheet modalForWindow:_mainWindow
228 modalDelegate:self didEndSelector:nil contextInfo:NULL];
230 [[NSFileManager defaultManager] removeFileAtPath:[savePanel filename] handler:nil];
231 // export the video data to the data reference
232 QTNewDataReferenceFromCFURL((CFURLRef)[savePanel URL], 0, &myDataRef, &myDataType );
235 MovieExportFromProceduresToDataRef(myExporter, myDataRef, myDataType);
237 // we are done with the .mov export so lets clean up
241 free(flippedContextPixels);
242 flippedContextPixels = nil;
243 DisposeMovieExportGetPropertyUPP(theVideoPropProcUPP);
244 DisposeMovieExportGetDataUPP(theVideoDataProcUPP);
246 if (theAudioPropProcUPP && theAudioDataProcUPP) {
247 MovieExportDisposeGetDataAndPropertiesProcs(myExporter, theAudioPropProcUPP, theAudioDataProcUPP, audioRefCon);
250 if (outputImageDescription) DisposeHandle((Handle)outputImageDescription);
251 outputImageDescription = NULL;
253 CloseComponent(myExporter);
255 // dispose the original data reference
256 DisposeHandle(myDataRef);
259 [NSApp endSheet:_barSheet];
261 [_mainView setHidden:NO];
262 [_dummyWindow close];
267 // この関数が出力フレームごとに呼ばれるはず。
268 - (OSErr)exportFrame:(MovieExportGetDataParams *)theParams
270 if (cancelExport) return(userCanceledErr);
272 if (theParams->requestedTime > movieDuration.timeValue) return(eofErr);
274 NSAutoreleasePool *myPool = [[NSAutoreleasePool alloc] init]; // As the export is done in a tight loop it is a good idea to have an
275 // autorelease pool in the render frame call so we don't acumulate
276 // objects over the lengthy progress and therefore fill the memory
279 currentTime.timeValue = theParams->requestedTime;
280 currentTime.timeScale = movieDuration.timeScale;
281 currentTime.flags = 0;
283 // [qtMovie setCurrentTime:currentTime];
284 // MoviesTask([qtMovie quickTimeMovie], 0); // QTKit is not doing this automatically
287 // [self updateCurrentFrame];
289 // [self performSelectorOnMainThread:@selector(renderFrame2:) withObject:[NSValue valueWithQTTime:currentTime] waitUntilDone:YES];
290 [_mainView getFrameForQTTime:currentTime];
292 // read the frame from the context into our buffer
293 // if([self readbackFrameIntoBuffer:contextPixels alignment:outputAlignment width:outputWidth height:outputHeight offsetX:0 offsetY:0]) {
294 // NSLog(@"could not readback image!");
296 [_mainView getCurrentPixelData:outputSize buffer:contextPixels];
298 /* WHY IS THIS memcpy ROUTINE HERE?
299 The way the pixels are read back through glReadPixels is flipped to what QuickTime expects.
300 This can easily be worked around by rendering upside down - just switch the minY and maxY in glOrtho.
301 But since we display the exported image during the export process in this example, we don't want to do this
302 for visual purposes (because the image on the screen would end up being upside down),
303 therefore we resort to flipping the image by hand.
305 int i = outputHeight;
307 memcpy(flippedContextPixels + ((outputHeight - i - 1) * contextRowBytes), contextPixels + (i * contextRowBytes), contextRowBytes);
312 // カラーシフトする分をガンマ補正。これはひどい。
313 [self gammaAdjust:flippedContextPixels size:outputHeight*outputWidth*4];
315 // fill the return parameters for the compression
316 theParams->actualTime = theParams->requestedTime;
317 theParams->dataPtr = (void*)flippedContextPixels;
318 theParams->dataSize = (**(outputImageDescription)).dataSize;
319 theParams->desc = (SampleDescriptionHandle)outputImageDescription;
320 theParams->descType = VideoMediaType;
321 theParams->descSeed = 1;
322 theParams->actualSampleCount = 1;
323 theParams->durationPerSample = currentTime.timeScale / 30;
324 theParams->sampleFlags = 0L;
328 // NSLog(@"%f [s]", (float)currentTime.timeValue/currentTime.timeScale);
329 [self performSelectorOnMainThread:@selector(changeBarValue:)
330 withObject:[NSNumber numberWithDouble:(double)currentTime.timeValue/movieDuration.timeValue]
336 - (void)changeBarValue:(NSNumber*)v
338 [_bar setDoubleValue:[v doubleValue]];
341 - (void)renderFrame2:(NSValue*)v
343 [_mainView getFrameForQTTime:[v QTTimeValue]];
352 - (void)readyGammmaTable
356 for(i = 0; i < 256; i++)
357 gamma_table[i] = (unsigned char)255.0 * pow(i/255.0, 1.0/GAMMA);
360 - (void)gammaAdjust:(unsigned char*)pixels size:(int)s
363 for(i = 0; i < s; i++){
364 if(i%4 == 3) continue;
365 pixels[i] = gamma_table[pixels[i]];
369 - (void)movieExportDialogMainThread
371 MovieExportDoUserDialog(_myExporter, NULL, NULL, 0, movieDuration.timeValue, &_myCancelled);
374 - (NSMutableArray*)getAudioTrack
376 NSMutableArray* soundTrack = [[NSMutableArray alloc] init];
377 QTTrack* track, *newTrack;
381 NSMutableArray* result = [[NSMutableArray alloc] init];
383 [_mainController getSoundTrack:soundTrack];
384 size = [soundTrack count];
386 for(i = 0; i < size; i += 4){
387 newTrack = [[QTTrack alloc] init];
388 track = [soundTrack objectAtIndex:i+1];
389 offset = [[soundTrack objectAtIndex:i+2] QTTimeValue];
390 mapping = [[soundTrack objectAtIndex:i+3] QTTimeRangeValue];
391 [newTrack insertSegmentOfTrack:track timeRange:QTMakeTimeRange(offset, mapping.duration) atTime:mapping.time];
392 [result addObject:[soundTrack objectAtIndex:i]];
393 [result addObject:newTrack];