5 // Created by 柳 on 09/09/23.
6 // Copyright 2009 __MyCompanyName__. All rights reserved.
9 #import "ElisWriterLegacy.h"
11 // Handle requests for information about the output video data
12 static OSErr QTMoovProcs_VideoTrackPropertyProc (void *theRefcon, long theTrackID, OSType thePropertyType, void *thePropertyValue)
14 #pragma unused(theRefcon, theTrackID)
18 switch (thePropertyType) {
19 case movieExportUseConfiguredSettings:
20 *(Boolean *)thePropertyValue = true;
24 myErr = paramErr; // non-zero value means: use default value provided by export component
32 //--------------------------------------------------------------------------------------------------
34 // Provide the output audio data.
36 static OSErr QTMoovProcs_VideoTrackDataProc(void *theRefcon, MovieExportGetDataParams *theParams)
38 return [(ElisWriterLegacy*)theRefcon exportFrame:theParams];
42 @implementation ElisWriterLegacy
46 gamma_table = malloc(sizeof(unsigned char) * 256);
47 [NSBundle loadNibNamed:@"WriterProgress" owner:self];
52 - (void)write:(NSSavePanel*)sp
54 [self reallyExportMovie:sp toPod:NO];
57 - (void)setMainWindow:(NSWindow*)w
62 - (void)setMainController:(id)c
65 [_mainView setMainController:c];
68 - (void)setMainView:(id)v
73 -(void)reallyExportMovie:(NSSavePanel *)savePanel toPod:(BOOL)exportToPod
75 MovieExportComponent myExporter = NULL;
76 ComponentDescription myCompDesc;
77 Boolean myCancelled = false;
79 MovieExportGetPropertyUPP theAudioPropProcUPP = nil;
80 MovieExportGetDataUPP theAudioDataProcUPP = nil;
81 TimeScale audioScale = 0;
82 void *audioRefCon = 0;
88 floatTime = [_mainController getHipTime];
89 movieDuration = QTMakeTime(floatTime*DEFAULT_FPS, DEFAULT_FPS);
91 [self readyGammmaTable];
92 // NSRect originFrame = [_mainView frame];
93 // [_mainView setFrame:NSMakeRect(originFrame.origin.x, originFrame.origin.y, ProjectMovieSize.size.width, ProjectMovieSize.size.height)];
94 // [_mainView setFrame:NSMakeRect(0, 0, ProjectMovieSize.size.width, ProjectMovieSize.size.height)];
95 [_mainView setHidden:YES];
96 // [_dummyWindow setFrame:*(NSRect*)&ProjectMovieSize display:YES];
97 // [NSApp beginSheet:_dummyWindow modalForWindow:_mainWindow modalDelegate:self
98 // didEndSelector:nil contextInfo:nil];
99 // [_dummyWindow setHidesOnDeactivate:YES];
102 [NSApp beginSheet:_barSheet modalForWindow:_mainWindow
103 modalDelegate:self didEndSelector:nil contextInfo:NULL];
105 // Export into a Quicktime movie
106 myCompDesc.componentType = MovieExportType;
107 myCompDesc.componentSubType = MovieFileType;
108 myCompDesc.componentManufacturer = kAppleManufacturer;
109 myCompDesc.componentFlags = canMovieExportFromProcedures;
110 myCompDesc.componentFlagsMask = canMovieExportFromProcedures;
112 // open the selected movie export component
113 myExporter = OpenComponent(FindNextComponent(NULL, &myCompDesc));
114 if (myExporter == NULL) {
115 NSLog(@"could not find export compontent !");
119 // Hey exporter, support modern audio features
120 Boolean useHighResolutionAudio = true;
121 QTSetComponentProperty(myExporter, kQTPropertyClass_MovieExporter,
122 kQTMovieExporterPropertyID_EnableHighResolutionAudioFeatures,
124 &useHighResolutionAudio);
126 // create UPPs for the two app-defined export functions
127 MovieExportGetPropertyUPP theVideoPropProcUPP = NewMovieExportGetPropertyUPP(QTMoovProcs_VideoTrackPropertyProc);
128 MovieExportGetDataUPP theVideoDataProcUPP = NewMovieExportGetDataUPP(QTMoovProcs_VideoTrackDataProc);
131 MovieExportAddDataSource(myExporter, VideoMediaType,
132 movieDuration.timeScale, // use the original timescale
139 NSMutableArray* audioPaths = [_mainController writeAudioFiles];
140 NSMutableArray* audioMovies = [[NSMutableArray alloc] init];
143 for(id audioPath in audioPaths){
144 QTMovie* m = [[QTMovie alloc] initWithFile:audioPath error:nil];
146 if(m == nil) continue;
147 [audioMovies addObject:m];
152 // NSMutableArray* audioTracks = [[NSMutableArray alloc] init];
153 // [_mainController getSoundTrack:audioTracks];
154 // int aui, tsize = [audioTracks count];
156 for(QTMovie* audioMovie in audioMovies){
157 // we are setting up the audio for pass through
159 QTTrack* t = [[audioMovie tracksOfMediaType:QTMediaTypeSound] objectAtIndex:0];
160 err = MovieExportNewGetDataAndPropertiesProcs(myExporter, SoundMediaType,
162 [audioMovie quickTimeMovie],
163 [t quickTimeTrack], // we only use the first audio here
165 [audioMovie duration].timeValue,
166 &theAudioPropProcUPP,
167 &theAudioDataProcUPP,
170 NSLog(@"Can't get audio for export");
172 MovieExportAddDataSource(myExporter, SoundMediaType, audioScale, &trackID, theAudioPropProcUPP, theAudioDataProcUPP, audioRefCon);
177 if (NO == exportToPod) {
179 _myExporter = myExporter;
180 [self performSelectorOnMainThread:@selector(movieExportDialogMainThread) withObject:nil waitUntilDone:YES];
181 // MovieExportDoUserDialog(myExporter, NULL, NULL, 0, movieDuration.timeValue, &myCancelled);
182 myExporter = _myExporter;
183 myCancelled = _myCancelled;
186 NSLog(@"User canceled export dialog");
187 DisposeMovieExportGetPropertyUPP(theVideoPropProcUPP);
188 DisposeMovieExportGetDataUPP(theVideoDataProcUPP);
190 if (theAudioPropProcUPP && theAudioDataProcUPP) {
191 MovieExportDisposeGetDataAndPropertiesProcs(myExporter, theAudioPropProcUPP, theAudioDataProcUPP, audioRefCon);
194 CloseComponent(myExporter);
195 [NSApp endSheet:_barSheet];
197 // [_mainView setHidden:NO];
209 NSRect frame = *(NSRect*)&ProjectMovieSize;
212 // create the readback and flipping buffers - see note about flipping in exportFrame method
214 outputWidth = frame.size.width;
215 outputHeight = frame.size.height;
217 //outputHeight = 480;
219 contextRowBytes = outputWidth * outputAlignment;
220 contextPixels = calloc(contextRowBytes * outputHeight, sizeof(char));
221 flippedContextPixels = calloc(contextRowBytes * outputHeight, sizeof(char));
223 // setup the image description for the frame compression
224 outputImageDescription = (ImageDescriptionHandle)NewHandleClear(sizeof(ImageDescription));
225 (*outputImageDescription)->idSize = sizeof(ImageDescription);
226 #ifdef __BIG_ENDIAN__
227 (*outputImageDescription)->cType = k32ARGBPixelFormat;
229 (*outputImageDescription)->cType = k32BGRAPixelFormat;
231 (*outputImageDescription)->vendor = kAppleManufacturer;
232 (*outputImageDescription)->spatialQuality = codecLosslessQuality;
233 (*outputImageDescription)->width = outputWidth;
234 (*outputImageDescription)->height = outputHeight;
235 (*outputImageDescription)->hRes = 72L<<16;
236 (*outputImageDescription)->vRes = 72L<<16;
237 (*outputImageDescription)->dataSize = contextRowBytes * outputHeight;
238 (*outputImageDescription)->frameCount = 1;
239 (*outputImageDescription)->depth = 32;
240 (*outputImageDescription)->clutID = -1;
242 [NSApp beginSheet:_barSheet modalForWindow:_mainWindow
243 modalDelegate:self didEndSelector:nil contextInfo:NULL];
245 [[NSFileManager defaultManager] removeFileAtPath:[savePanel filename] handler:nil];
246 // export the video data to the data reference
247 QTNewDataReferenceFromCFURL((CFURLRef)[savePanel URL], 0, &myDataRef, &myDataType );
250 MovieExportFromProceduresToDataRef(myExporter, myDataRef, myDataType);
252 // we are done with the .mov export so lets clean up
256 free(flippedContextPixels);
257 flippedContextPixels = nil;
258 DisposeMovieExportGetPropertyUPP(theVideoPropProcUPP);
259 DisposeMovieExportGetDataUPP(theVideoDataProcUPP);
261 if (theAudioPropProcUPP && theAudioDataProcUPP) {
262 MovieExportDisposeGetDataAndPropertiesProcs(myExporter, theAudioPropProcUPP, theAudioDataProcUPP, audioRefCon);
265 if (outputImageDescription) DisposeHandle((Handle)outputImageDescription);
266 outputImageDescription = NULL;
268 CloseComponent(myExporter);
270 // dispose the original data reference
271 DisposeHandle(myDataRef);
273 for(NSString* audioPath in audioPaths)
274 [[NSFileManager defaultManager] removeFileAtPath:audioPath handler:nil];
277 [NSApp endSheet:_barSheet];
279 [_mainView setHidden:NO];
280 [_dummyWindow close];
285 // この関数が出力フレームごとに呼ばれるはず。
286 - (OSErr)exportFrame:(MovieExportGetDataParams *)theParams
288 if (cancelExport) return(userCanceledErr);
290 if (theParams->requestedTime > movieDuration.timeValue) return(eofErr);
292 NSAutoreleasePool *myPool = [[NSAutoreleasePool alloc] init]; // As the export is done in a tight loop it is a good idea to have an
293 // autorelease pool in the render frame call so we don't acumulate
294 // objects over the lengthy progress and therefore fill the memory
297 currentTime.timeValue = theParams->requestedTime;
298 currentTime.timeScale = movieDuration.timeScale;
299 currentTime.flags = 0;
301 // [qtMovie setCurrentTime:currentTime];
302 // MoviesTask([qtMovie quickTimeMovie], 0); // QTKit is not doing this automatically
305 // [self updateCurrentFrame];
307 // [self performSelectorOnMainThread:@selector(renderFrame2:) withObject:[NSValue valueWithQTTime:currentTime] waitUntilDone:YES];
308 [_mainView getFrameForQTTime:currentTime];
310 // read the frame from the context into our buffer
311 // if([self readbackFrameIntoBuffer:contextPixels alignment:outputAlignment width:outputWidth height:outputHeight offsetX:0 offsetY:0]) {
312 // NSLog(@"could not readback image!");
314 [_mainView getCurrentPixelData:outputSize buffer:contextPixels];
316 /* WHY IS THIS memcpy ROUTINE HERE?
317 The way the pixels are read back through glReadPixels is flipped to what QuickTime expects.
318 This can easily be worked around by rendering upside down - just switch the minY and maxY in glOrtho.
319 But since we display the exported image during the export process in this example, we don't want to do this
320 for visual purposes (because the image on the screen would end up being upside down),
321 therefore we resort to flipping the image by hand.
323 int i = outputHeight;
325 memcpy(flippedContextPixels + ((outputHeight - i - 1) * contextRowBytes), contextPixels + (i * contextRowBytes), contextRowBytes);
330 // カラーシフトする分をガンマ補正。これはひどい。
331 [self gammaAdjust:flippedContextPixels size:outputHeight*outputWidth*4];
333 // fill the return parameters for the compression
334 theParams->actualTime = theParams->requestedTime;
335 theParams->dataPtr = (void*)flippedContextPixels;
336 theParams->dataSize = (**(outputImageDescription)).dataSize;
337 theParams->desc = (SampleDescriptionHandle)outputImageDescription;
338 theParams->descType = VideoMediaType;
339 theParams->descSeed = 1;
340 theParams->actualSampleCount = 1;
341 theParams->durationPerSample = currentTime.timeScale / 30;
342 theParams->sampleFlags = 0L;
346 // NSLog(@"%f [s]", (float)currentTime.timeValue/currentTime.timeScale);
347 [self performSelectorOnMainThread:@selector(changeBarValue:)
348 withObject:[NSNumber numberWithDouble:(double)currentTime.timeValue/movieDuration.timeValue]
354 - (void)changeBarValue:(NSNumber*)v
356 [_bar setDoubleValue:[v doubleValue]];
359 - (void)renderFrame2:(NSValue*)v
361 [_mainView getFrameForQTTime:[v QTTimeValue]];
370 - (void)readyGammmaTable
374 for(i = 0; i < 256; i++)
375 gamma_table[i] = (unsigned char)255.0 * pow(i/255.0, 1.0/GAMMA);
378 - (void)gammaAdjust:(unsigned char*)pixels size:(int)s
381 for(i = 0; i < s; i++){
382 if(i%4 == 3) continue;
383 pixels[i] = gamma_table[pixels[i]];
387 - (void)movieExportDialogMainThread
389 MovieExportDoUserDialog(_myExporter, NULL, NULL, 0, movieDuration.timeValue, &_myCancelled);
392 - (NSMutableArray*)getAudioTrack
394 NSMutableArray* soundTrack = [[NSMutableArray alloc] init];
395 QTTrack* track, *newTrack;
399 NSMutableArray* result = [[NSMutableArray alloc] init];
401 [_mainController getSoundTrack:soundTrack];
402 size = [soundTrack count];
404 for(i = 0; i < size; i += 4){
405 newTrack = [[QTTrack alloc] init];
406 track = [soundTrack objectAtIndex:i+1];
407 offset = [[soundTrack objectAtIndex:i+2] QTTimeValue];
408 mapping = [[soundTrack objectAtIndex:i+3] QTTimeRangeValue];
409 [newTrack insertSegmentOfTrack:track timeRange:QTMakeTimeRange(offset, mapping.duration) atTime:mapping.time];
410 [result addObject:[soundTrack objectAtIndex:i]];
411 [result addObject:newTrack];