// // ElisWriterLegacy.m // Elis Colors // // Created by 柳 on 09/09/23. // Copyright 2009 __MyCompanyName__. All rights reserved. // #import "ElisWriterLegacy.h" // Handle requests for information about the output video data static OSErr QTMoovProcs_VideoTrackPropertyProc (void *theRefcon, long theTrackID, OSType thePropertyType, void *thePropertyValue) { #pragma unused(theRefcon, theTrackID) OSErr myErr = noErr; switch (thePropertyType) { case movieExportUseConfiguredSettings: *(Boolean *)thePropertyValue = true; break; default: myErr = paramErr; // non-zero value means: use default value provided by export component break; } return(myErr); } //-------------------------------------------------------------------------------------------------- // Provide the output audio data. // ハンドラ的なもの? static OSErr QTMoovProcs_VideoTrackDataProc(void *theRefcon, MovieExportGetDataParams *theParams) { return [(ElisWriterLegacy*)theRefcon exportFrame:theParams]; } @implementation ElisWriterLegacy - (id)init { gamma_table = malloc(sizeof(unsigned char) * 256); [NSBundle loadNibNamed:@"WriterProgress" owner:self]; return self; } - (void)write:(NSSavePanel*)sp { [self reallyExportMovie:sp toPod:NO]; } - (void)setMainWindow:(NSWindow*)w { _mainWindow = w; } - (void)setMainController:(id)c { _mainController = c; [_mainView setMainController:c]; } - (void)setMainView:(id)v { _mainView = v; } -(void)reallyExportMovie:(NSSavePanel *)savePanel toPod:(BOOL)exportToPod { MovieExportComponent myExporter = NULL; ComponentDescription myCompDesc; Boolean myCancelled = false; long trackID; MovieExportGetPropertyUPP theAudioPropProcUPP = nil; MovieExportGetDataUPP theAudioDataProcUPP = nil; TimeScale audioScale = 0; void *audioRefCon = 0; OSErr err = noErr; // 下準備 float floatTime; floatTime = [_mainController getHipTime]; movieDuration = QTMakeTime(floatTime*DEFAULT_FPS, DEFAULT_FPS); rendering = YES; [self readyGammmaTable]; // NSRect originFrame = [_mainView frame]; // [_mainView setFrame:NSMakeRect(originFrame.origin.x, originFrame.origin.y, ProjectMovieSize.size.width, ProjectMovieSize.size.height)]; // [_mainView setFrame:NSMakeRect(0, 0, ProjectMovieSize.size.width, ProjectMovieSize.size.height)]; [_mainView setHidden:YES]; // [_dummyWindow setFrame:*(NSRect*)&ProjectMovieSize display:YES]; // [NSApp beginSheet:_dummyWindow modalForWindow:_mainWindow modalDelegate:self // didEndSelector:nil contextInfo:nil]; // [_dummyWindow setHidesOnDeactivate:YES]; // プログレスバーを表示 [NSApp beginSheet:_barSheet modalForWindow:_mainWindow modalDelegate:self didEndSelector:nil contextInfo:NULL]; // Export into a Quicktime movie myCompDesc.componentType = MovieExportType; myCompDesc.componentSubType = MovieFileType; myCompDesc.componentManufacturer = kAppleManufacturer; myCompDesc.componentFlags = canMovieExportFromProcedures; myCompDesc.componentFlagsMask = canMovieExportFromProcedures; // open the selected movie export component myExporter = OpenComponent(FindNextComponent(NULL, &myCompDesc)); if (myExporter == NULL) { NSLog(@"could not find export compontent !"); return; } // Hey exporter, support modern audio features Boolean useHighResolutionAudio = true; QTSetComponentProperty(myExporter, kQTPropertyClass_MovieExporter, kQTMovieExporterPropertyID_EnableHighResolutionAudioFeatures, sizeof(Boolean), &useHighResolutionAudio); // create UPPs for the two app-defined export functions MovieExportGetPropertyUPP theVideoPropProcUPP = NewMovieExportGetPropertyUPP(QTMoovProcs_VideoTrackPropertyProc); MovieExportGetDataUPP theVideoDataProcUPP = NewMovieExportGetDataUPP(QTMoovProcs_VideoTrackDataProc); // インスタンス変数を使ってる! MovieExportAddDataSource(myExporter, VideoMediaType, movieDuration.timeScale, // use the original timescale &trackID, theVideoPropProcUPP, theVideoDataProcUPP, self); // オーディオのみ書き出し NSMutableArray* audioPaths = [_mainController writeAudioFiles]; NSMutableArray* audioMovies = [[NSMutableArray alloc] init]; // オーディオファイルを実体化 for(id audioPath in audioPaths){ QTMovie* m = [[QTMovie alloc] initWithFile:audioPath error:nil]; NSLog(audioPath); if(m == nil) continue; [audioMovies addObject:m]; } // とりあえずいらない // setup audio // NSMutableArray* audioTracks = [[NSMutableArray alloc] init]; // [_mainController getSoundTrack:audioTracks]; // int aui, tsize = [audioTracks count]; // QTTime qtr; for(QTMovie* audioMovie in audioMovies){ // we are setting up the audio for pass through // インスタンス変数を使ってる! QTTrack* t = [[audioMovie tracksOfMediaType:QTMediaTypeSound] objectAtIndex:0]; err = MovieExportNewGetDataAndPropertiesProcs(myExporter, SoundMediaType, &audioScale, [audioMovie quickTimeMovie], [t quickTimeTrack], // we only use the first audio here 0, [audioMovie duration].timeValue, &theAudioPropProcUPP, &theAudioDataProcUPP, &audioRefCon); if (err) { NSLog(@"Can't get audio for export"); } else { MovieExportAddDataSource(myExporter, SoundMediaType, audioScale, &trackID, theAudioPropProcUPP, theAudioDataProcUPP, audioRefCon); } } // これは使う。 if (NO == exportToPod) { // インスタンス変数を使ってる! _myExporter = myExporter; [self performSelectorOnMainThread:@selector(movieExportDialogMainThread) withObject:nil waitUntilDone:YES]; // MovieExportDoUserDialog(myExporter, NULL, NULL, 0, movieDuration.timeValue, &myCancelled); myExporter = _myExporter; myCancelled = _myCancelled; if (myCancelled) { NSLog(@"User canceled export dialog"); DisposeMovieExportGetPropertyUPP(theVideoPropProcUPP); DisposeMovieExportGetDataUPP(theVideoDataProcUPP); if (theAudioPropProcUPP && theAudioDataProcUPP) { MovieExportDisposeGetDataAndPropertiesProcs(myExporter, theAudioPropProcUPP, theAudioDataProcUPP, audioRefCon); } CloseComponent(myExporter); [NSApp endSheet:_barSheet]; [_barSheet close]; // [_mainView setHidden:NO]; return; } } isExporting = YES; cancelExport = NO; OSType myDataType; Handle myDataRef; NSRect frame = *(NSRect*)&ProjectMovieSize; outputSize = frame; // create the readback and flipping buffers - see note about flipping in exportFrame method // インスタンス変数やまもり outputWidth = frame.size.width; outputHeight = frame.size.height; //outputWidth = 720; //outputHeight = 480; outputAlignment = 4; contextRowBytes = outputWidth * outputAlignment; contextPixels = calloc(contextRowBytes * outputHeight, sizeof(char)); flippedContextPixels = calloc(contextRowBytes * outputHeight, sizeof(char)); // setup the image description for the frame compression outputImageDescription = (ImageDescriptionHandle)NewHandleClear(sizeof(ImageDescription)); (*outputImageDescription)->idSize = sizeof(ImageDescription); #ifdef __BIG_ENDIAN__ (*outputImageDescription)->cType = k32ARGBPixelFormat; #else (*outputImageDescription)->cType = k32BGRAPixelFormat; #endif (*outputImageDescription)->vendor = kAppleManufacturer; (*outputImageDescription)->spatialQuality = codecLosslessQuality; (*outputImageDescription)->width = outputWidth; (*outputImageDescription)->height = outputHeight; (*outputImageDescription)->hRes = 72L<<16; (*outputImageDescription)->vRes = 72L<<16; (*outputImageDescription)->dataSize = contextRowBytes * outputHeight; (*outputImageDescription)->frameCount = 1; (*outputImageDescription)->depth = 32; (*outputImageDescription)->clutID = -1; [NSApp beginSheet:_barSheet modalForWindow:_mainWindow modalDelegate:self didEndSelector:nil contextInfo:NULL]; [[NSFileManager defaultManager] removeFileAtPath:[savePanel filename] handler:nil]; // export the video data to the data reference QTNewDataReferenceFromCFURL((CFURLRef)[savePanel URL], 0, &myDataRef, &myDataType ); // メインの処理はここじゃね。 MovieExportFromProceduresToDataRef(myExporter, myDataRef, myDataType); // we are done with the .mov export so lets clean up // 終了処理? free(contextPixels); contextPixels = nil; free(flippedContextPixels); flippedContextPixels = nil; DisposeMovieExportGetPropertyUPP(theVideoPropProcUPP); DisposeMovieExportGetDataUPP(theVideoDataProcUPP); if (theAudioPropProcUPP && theAudioDataProcUPP) { MovieExportDisposeGetDataAndPropertiesProcs(myExporter, theAudioPropProcUPP, theAudioDataProcUPP, audioRefCon); } if (outputImageDescription) DisposeHandle((Handle)outputImageDescription); outputImageDescription = NULL; CloseComponent(myExporter); // dispose the original data reference DisposeHandle(myDataRef); for(NSString* audioPath in audioPaths) [[NSFileManager defaultManager] removeFileAtPath:audioPath handler:nil]; [_barSheet close]; [NSApp endSheet:_barSheet]; [_mainView setHidden:NO]; [_dummyWindow close]; isExporting = NO; rendering = NO; } // この関数が出力フレームごとに呼ばれるはず。 - (OSErr)exportFrame:(MovieExportGetDataParams *)theParams { if (cancelExport) return(userCanceledErr); if (theParams->requestedTime > movieDuration.timeValue) return(eofErr); NSAutoreleasePool *myPool = [[NSAutoreleasePool alloc] init]; // As the export is done in a tight loop it is a good idea to have an // autorelease pool in the render frame call so we don't acumulate // objects over the lengthy progress and therefore fill the memory QTTime currentTime; currentTime.timeValue = theParams->requestedTime; currentTime.timeScale = movieDuration.timeScale; currentTime.flags = 0; // [qtMovie setCurrentTime:currentTime]; // MoviesTask([qtMovie quickTimeMovie], 0); // QTKit is not doing this automatically // render the frame // [self updateCurrentFrame]; // [self display]; // [self performSelectorOnMainThread:@selector(renderFrame2:) withObject:[NSValue valueWithQTTime:currentTime] waitUntilDone:YES]; [_mainView getFrameForQTTime:currentTime]; // read the frame from the context into our buffer // if([self readbackFrameIntoBuffer:contextPixels alignment:outputAlignment width:outputWidth height:outputHeight offsetX:0 offsetY:0]) { // NSLog(@"could not readback image!"); // } [_mainView getCurrentPixelData:outputSize buffer:contextPixels]; /* WHY IS THIS memcpy ROUTINE HERE? The way the pixels are read back through glReadPixels is flipped to what QuickTime expects. This can easily be worked around by rendering upside down - just switch the minY and maxY in glOrtho. But since we display the exported image during the export process in this example, we don't want to do this for visual purposes (because the image on the screen would end up being upside down), therefore we resort to flipping the image by hand. */ int i = outputHeight; while(--i >= 0) { memcpy(flippedContextPixels + ((outputHeight - i - 1) * contextRowBytes), contextPixels + (i * contextRowBytes), contextRowBytes); } // end flipping code // カラーシフトする分をガンマ補正。これはひどい。 [self gammaAdjust:flippedContextPixels size:outputHeight*outputWidth*4]; // fill the return parameters for the compression theParams->actualTime = theParams->requestedTime; theParams->dataPtr = (void*)flippedContextPixels; theParams->dataSize = (**(outputImageDescription)).dataSize; theParams->desc = (SampleDescriptionHandle)outputImageDescription; theParams->descType = VideoMediaType; theParams->descSeed = 1; theParams->actualSampleCount = 1; theParams->durationPerSample = currentTime.timeScale / 30; theParams->sampleFlags = 0L; [myPool release]; // NSLog(@"%f [s]", (float)currentTime.timeValue/currentTime.timeScale); [self performSelectorOnMainThread:@selector(changeBarValue:) withObject:[NSNumber numberWithDouble:(double)currentTime.timeValue/movieDuration.timeValue] waitUntilDone:YES]; return noErr; } - (void)changeBarValue:(NSNumber*)v { [_bar setDoubleValue:[v doubleValue]]; } - (void)renderFrame2:(NSValue*)v { [_mainView getFrameForQTTime:[v QTTimeValue]]; } - (void)finalize { free(gamma_table); [super finalize]; } - (void)readyGammmaTable { int i; for(i = 0; i < 256; i++) gamma_table[i] = (unsigned char)255.0 * pow(i/255.0, 1.0/GAMMA); } - (void)gammaAdjust:(unsigned char*)pixels size:(int)s { int i; for(i = 0; i < s; i++){ if(i%4 == 3) continue; pixels[i] = gamma_table[pixels[i]]; } } - (void)movieExportDialogMainThread { MovieExportDoUserDialog(_myExporter, NULL, NULL, 0, movieDuration.timeValue, &_myCancelled); } - (NSMutableArray*)getAudioTrack { NSMutableArray* soundTrack = [[NSMutableArray alloc] init]; QTTrack* track, *newTrack; int i, size; QTTime offset; QTTimeRange mapping; NSMutableArray* result = [[NSMutableArray alloc] init]; [_mainController getSoundTrack:soundTrack]; size = [soundTrack count]; for(i = 0; i < size; i += 4){ newTrack = [[QTTrack alloc] init]; track = [soundTrack objectAtIndex:i+1]; offset = [[soundTrack objectAtIndex:i+2] QTTimeValue]; mapping = [[soundTrack objectAtIndex:i+3] QTTimeRangeValue]; [newTrack insertSegmentOfTrack:track timeRange:QTMakeTimeRange(offset, mapping.duration) atTime:mapping.time]; [result addObject:[soundTrack objectAtIndex:i]]; [result addObject:newTrack]; } return result; } @end