OSDN Git Service

open
[eliscolors/main.git] / ElisWriterLegacy.m
1 //
2 //  ElisWriterLegacy.m
3 //  Elis Colors
4 //
5 //  Created by 柳 on 09/09/23.
6 //  Copyright 2009 __MyCompanyName__. All rights reserved.
7 //
8
9 #import "ElisWriterLegacy.h"
10
11 // Handle requests for information about the output video data
12 static OSErr QTMoovProcs_VideoTrackPropertyProc (void *theRefcon, long theTrackID, OSType thePropertyType, void *thePropertyValue)
13 {
14 #pragma unused(theRefcon, theTrackID)
15     
16         OSErr   myErr = noErr;
17         
18         switch (thePropertyType) {
19                 case movieExportUseConfiguredSettings:
20                         *(Boolean *)thePropertyValue = true;
21                         break;
22                         
23                 default:
24                         myErr = paramErr;       // non-zero value means: use default value provided by export component
25                         break;
26         }
27         
28         return(myErr);
29 }
30
31
32 //--------------------------------------------------------------------------------------------------
33
34 // Provide the output audio data.
35 // ハンドラ的なもの?
36 static OSErr QTMoovProcs_VideoTrackDataProc(void *theRefcon, MovieExportGetDataParams *theParams)
37 {       
38     return [(ElisWriterLegacy*)theRefcon exportFrame:theParams];
39 }
40
41
42 @implementation ElisWriterLegacy
43
44 - (id)init
45 {
46     gamma_table = malloc(sizeof(unsigned char) * 256);
47     [NSBundle loadNibNamed:@"WriterProgress" owner:self];
48     
49     return self;
50 }
51
52 - (void)write:(NSSavePanel*)sp
53 {
54     [self reallyExportMovie:sp toPod:NO];
55 }
56
57 - (void)setMainWindow:(NSWindow*)w
58 {
59     _mainWindow = w;
60 }
61
62 - (void)setMainController:(id)c
63 {
64     _mainController = c;
65     [_mainView setMainController:c];
66 }
67
68 - (void)setMainView:(id)v
69 {
70     _mainView = v;
71 }
72
73 -(void)reallyExportMovie:(NSSavePanel *)savePanel toPod:(BOOL)exportToPod
74 {
75     MovieExportComponent        myExporter = NULL;
76     ComponentDescription        myCompDesc;
77     Boolean                     myCancelled = false;
78     long                        trackID;
79     MovieExportGetPropertyUPP   theAudioPropProcUPP = nil;
80     MovieExportGetDataUPP               theAudioDataProcUPP = nil;
81     TimeScale                   audioScale = 0;
82     void                        *audioRefCon = 0;
83     
84     OSErr                       err = noErr;
85     
86     // 下準備
87     float floatTime;
88     floatTime = [_mainController getHipTime];
89     movieDuration = QTMakeTime(floatTime*DEFAULT_FPS, DEFAULT_FPS);
90     rendering = YES;
91     [self readyGammmaTable];
92 //    NSRect originFrame = [_mainView frame];
93 //    [_mainView setFrame:NSMakeRect(originFrame.origin.x, originFrame.origin.y, ProjectMovieSize.size.width, ProjectMovieSize.size.height)];
94 //    [_mainView setFrame:NSMakeRect(0, 0, ProjectMovieSize.size.width, ProjectMovieSize.size.height)];
95     [_mainView setHidden:YES];
96 //    [_dummyWindow setFrame:*(NSRect*)&ProjectMovieSize display:YES];
97 //    [NSApp beginSheet:_dummyWindow modalForWindow:_mainWindow modalDelegate:self 
98 //       didEndSelector:nil contextInfo:nil];
99     [_dummyWindow setHidesOnDeactivate:YES];
100     
101     // プログレスバーを表示
102     [NSApp beginSheet:_barSheet modalForWindow:_mainWindow
103         modalDelegate:self didEndSelector:nil contextInfo:NULL];
104     
105     // Export into a Quicktime movie
106     myCompDesc.componentType = MovieExportType;
107     myCompDesc.componentSubType = MovieFileType;
108     myCompDesc.componentManufacturer = kAppleManufacturer;
109     myCompDesc.componentFlags = canMovieExportFromProcedures;
110     myCompDesc.componentFlagsMask = canMovieExportFromProcedures;
111     
112     // open the selected movie export component
113     myExporter = OpenComponent(FindNextComponent(NULL, &myCompDesc));
114     if (myExporter == NULL) {
115         NSLog(@"could not find export compontent !");
116         return;
117     }
118     
119     // Hey exporter, support modern audio features
120     Boolean useHighResolutionAudio = true;
121     QTSetComponentProperty(myExporter, kQTPropertyClass_MovieExporter,
122                            kQTMovieExporterPropertyID_EnableHighResolutionAudioFeatures,
123                            sizeof(Boolean),
124                            &useHighResolutionAudio);
125     
126     // create UPPs for the two app-defined export functions
127     MovieExportGetPropertyUPP theVideoPropProcUPP = NewMovieExportGetPropertyUPP(QTMoovProcs_VideoTrackPropertyProc);
128     MovieExportGetDataUPP         theVideoDataProcUPP = NewMovieExportGetDataUPP(QTMoovProcs_VideoTrackDataProc);
129     
130     // インスタンス変数を使ってる!
131     MovieExportAddDataSource(myExporter, VideoMediaType,
132                              movieDuration.timeScale,    // use the original timescale
133                              &trackID,
134                              theVideoPropProcUPP,
135                              theVideoDataProcUPP,
136                              self);
137     
138     // とりあえずいらない
139     // setup audio
140     NSMutableArray* audioTracks = [[NSMutableArray alloc] init];
141     [_mainController getSoundTrack:audioTracks];
142     int aui, tsize = [audioTracks count];
143     QTTime qtr;
144     for(aui = 0; aui < tsize; aui += 3){
145         // we are setting up the audio for pass through
146         // インスタンス変数を使ってる!
147         qtr = [[audioTracks objectAtIndex:aui+2] QTTimeValue];
148         err = MovieExportNewGetDataAndPropertiesProcs(myExporter, SoundMediaType, 
149                                                       &audioScale, 
150                                                       [[audioTracks objectAtIndex:aui] quickTimeMovie],
151                                                       [[audioTracks objectAtIndex:aui+1] quickTimeTrack],       // we only use the first audio here
152                                                       0,
153                                                       qtr.timeValue, 
154                                                       &theAudioPropProcUPP, 
155                                                       &theAudioDataProcUPP,
156                                                       &audioRefCon);
157         if (err) {
158             NSLog(@"Can't get audio for export");
159         } else {
160             MovieExportAddDataSource(myExporter, SoundMediaType, audioScale, &trackID, theAudioPropProcUPP, theAudioDataProcUPP, audioRefCon);
161         }
162     }
163     
164     // これは使う。
165     if (NO == exportToPod) {
166         // インスタンス変数を使ってる!
167         _myExporter = myExporter;
168         [self performSelectorOnMainThread:@selector(movieExportDialogMainThread) withObject:nil waitUntilDone:YES];
169 //        MovieExportDoUserDialog(myExporter, NULL, NULL, 0, movieDuration.timeValue, &myCancelled);
170         myExporter = _myExporter;
171         myCancelled = _myCancelled;
172         
173         if (myCancelled) {
174             NSLog(@"User canceled export dialog");
175             DisposeMovieExportGetPropertyUPP(theVideoPropProcUPP);
176             DisposeMovieExportGetDataUPP(theVideoDataProcUPP);
177             
178             if (theAudioPropProcUPP && theAudioDataProcUPP) {
179                 MovieExportDisposeGetDataAndPropertiesProcs(myExporter, theAudioPropProcUPP, theAudioDataProcUPP, audioRefCon);
180             }
181             
182             CloseComponent(myExporter);
183             
184             return;
185         }
186     }
187     
188     isExporting = YES;
189     cancelExport = NO;
190     
191     OSType myDataType;
192     Handle myDataRef;
193     
194     NSRect      frame = *(NSRect*)&ProjectMovieSize;
195     outputSize = frame;
196     
197     // create the readback and flipping buffers - see note about flipping in exportFrame method
198     // インスタンス変数やまもり
199     outputWidth = frame.size.width;
200     outputHeight = frame.size.height;
201     //outputWidth = 720;
202     //outputHeight = 480;
203     outputAlignment = 4;
204     contextRowBytes = outputWidth * outputAlignment;
205     contextPixels = calloc(contextRowBytes * outputHeight, sizeof(char));
206     flippedContextPixels = calloc(contextRowBytes * outputHeight, sizeof(char));
207     
208     // setup the image description for the frame compression
209     outputImageDescription = (ImageDescriptionHandle)NewHandleClear(sizeof(ImageDescription));
210     (*outputImageDescription)->idSize = sizeof(ImageDescription);
211 #ifdef __BIG_ENDIAN__
212     (*outputImageDescription)->cType = k32ARGBPixelFormat;
213 #else
214     (*outputImageDescription)->cType = k32BGRAPixelFormat;
215 #endif
216     (*outputImageDescription)->vendor = kAppleManufacturer;
217     (*outputImageDescription)->spatialQuality = codecLosslessQuality;
218     (*outputImageDescription)->width = outputWidth;
219     (*outputImageDescription)->height = outputHeight;
220     (*outputImageDescription)->hRes = 72L<<16;
221     (*outputImageDescription)->vRes = 72L<<16;
222     (*outputImageDescription)->dataSize = contextRowBytes * outputHeight;
223     (*outputImageDescription)->frameCount = 1;
224     (*outputImageDescription)->depth = 32;
225     (*outputImageDescription)->clutID = -1;
226     
227     [NSApp beginSheet:_barSheet modalForWindow:_mainWindow
228         modalDelegate:self didEndSelector:nil contextInfo:NULL];
229     
230     [[NSFileManager defaultManager] removeFileAtPath:[savePanel filename] handler:nil];
231     // export the video data to the data reference
232     QTNewDataReferenceFromCFURL((CFURLRef)[savePanel URL], 0, &myDataRef, &myDataType );
233     
234     // メインの処理はここじゃね。
235     MovieExportFromProceduresToDataRef(myExporter, myDataRef, myDataType);
236     
237     // we are done with the .mov export so lets clean up
238     // 終了処理?
239     free(contextPixels);
240     contextPixels = nil;
241     free(flippedContextPixels);
242     flippedContextPixels = nil;
243     DisposeMovieExportGetPropertyUPP(theVideoPropProcUPP);
244     DisposeMovieExportGetDataUPP(theVideoDataProcUPP);
245     
246     if (theAudioPropProcUPP && theAudioDataProcUPP) {
247         MovieExportDisposeGetDataAndPropertiesProcs(myExporter, theAudioPropProcUPP, theAudioDataProcUPP, audioRefCon);
248     }
249     
250     if (outputImageDescription) DisposeHandle((Handle)outputImageDescription);
251     outputImageDescription = NULL;
252     
253     CloseComponent(myExporter);
254     
255     // dispose the original data reference
256     DisposeHandle(myDataRef);
257     
258     [_barSheet close];
259     [NSApp endSheet:_barSheet];
260     
261     [_mainView setHidden:NO];
262     [_dummyWindow close];
263     isExporting = NO;
264     rendering = NO;
265 }
266
267 // この関数が出力フレームごとに呼ばれるはず。
268 - (OSErr)exportFrame:(MovieExportGetDataParams *)theParams
269 {
270     if (cancelExport) return(userCanceledErr);
271     
272     if (theParams->requestedTime > movieDuration.timeValue) return(eofErr);
273     
274     NSAutoreleasePool *myPool = [[NSAutoreleasePool alloc] init];       // As the export is done in a tight loop it is a good idea to have an 
275     // autorelease pool in the render frame call so we don't acumulate 
276     // objects over the lengthy progress and therefore fill the memory
277     QTTime currentTime;
278     
279     currentTime.timeValue = theParams->requestedTime;
280     currentTime.timeScale = movieDuration.timeScale;
281     currentTime.flags = 0;
282     
283 //    [qtMovie setCurrentTime:currentTime];
284 //    MoviesTask([qtMovie quickTimeMovie], 0);  // QTKit is not doing this automatically
285     
286     // render the frame
287 //    [self updateCurrentFrame];                        
288 //    [self display];
289 //    [self performSelectorOnMainThread:@selector(renderFrame2:) withObject:[NSValue valueWithQTTime:currentTime] waitUntilDone:YES];
290     [_mainView getFrameForQTTime:currentTime];
291     
292     // read the frame from the context into our buffer
293 //    if([self readbackFrameIntoBuffer:contextPixels alignment:outputAlignment width:outputWidth height:outputHeight offsetX:0 offsetY:0]) {
294 //        NSLog(@"could not readback image!");
295 //    }
296     [_mainView getCurrentPixelData:outputSize buffer:contextPixels];
297     
298     /* WHY IS THIS memcpy ROUTINE HERE?
299      The way the pixels are read back through glReadPixels is flipped to what QuickTime expects. 
300      This can easily be worked around by rendering upside down - just switch the minY and maxY in glOrtho.
301      But since we display the exported image during the export process in this example, we don't want to do this
302      for visual purposes (because the image on the screen would end up being upside down), 
303      therefore we resort to flipping the image by hand.
304      */
305     int i = outputHeight;
306     while(--i >= 0) {
307         memcpy(flippedContextPixels + ((outputHeight - i - 1) * contextRowBytes), contextPixels + (i * contextRowBytes), contextRowBytes);
308     }
309     
310     // end flipping code
311     
312     // カラーシフトする分をガンマ補正。これはひどい。
313     [self gammaAdjust:flippedContextPixels size:outputHeight*outputWidth*4];
314     
315     // fill the return parameters for the compression
316     theParams->actualTime = theParams->requestedTime;
317     theParams->dataPtr = (void*)flippedContextPixels;
318     theParams->dataSize = (**(outputImageDescription)).dataSize;
319     theParams->desc = (SampleDescriptionHandle)outputImageDescription;
320     theParams->descType = VideoMediaType;
321     theParams->descSeed = 1;
322     theParams->actualSampleCount = 1;
323     theParams->durationPerSample = currentTime.timeScale / 30;
324     theParams->sampleFlags = 0L;
325     
326     [myPool release];
327     
328 //    NSLog(@"%f [s]", (float)currentTime.timeValue/currentTime.timeScale);
329     [self performSelectorOnMainThread:@selector(changeBarValue:) 
330                            withObject:[NSNumber numberWithDouble:(double)currentTime.timeValue/movieDuration.timeValue]
331                         waitUntilDone:YES];
332     
333     return noErr;
334 }
335
336 - (void)changeBarValue:(NSNumber*)v
337 {
338     [_bar setDoubleValue:[v doubleValue]];
339 }
340
341 - (void)renderFrame2:(NSValue*)v
342 {
343     [_mainView getFrameForQTTime:[v QTTimeValue]];
344 }
345
346 - (void)finalize
347 {
348     free(gamma_table);
349     [super finalize];
350 }
351
352 - (void)readyGammmaTable
353 {
354     int i;
355     
356     for(i = 0; i < 256; i++)
357         gamma_table[i] = (unsigned char)255.0 * pow(i/255.0, 1.0/GAMMA);
358 }
359
360 - (void)gammaAdjust:(unsigned char*)pixels size:(int)s
361 {
362     int i;
363     for(i = 0; i < s; i++){
364         if(i%4 == 3) continue;
365         pixels[i] = gamma_table[pixels[i]];
366     }
367 }
368
369 - (void)movieExportDialogMainThread
370 {
371     MovieExportDoUserDialog(_myExporter, NULL, NULL, 0, movieDuration.timeValue, &_myCancelled);
372 }
373
374 - (NSMutableArray*)getAudioTrack
375 {
376     NSMutableArray* soundTrack = [[NSMutableArray alloc] init];
377     QTTrack* track, *newTrack;
378     int i, size;
379     QTTime offset;
380     QTTimeRange mapping;
381     NSMutableArray* result = [[NSMutableArray alloc] init];
382     
383     [_mainController getSoundTrack:soundTrack];
384     size = [soundTrack count];
385     
386     for(i = 0; i < size; i += 4){
387         newTrack = [[QTTrack alloc] init];
388         track = [soundTrack objectAtIndex:i+1];
389         offset = [[soundTrack objectAtIndex:i+2] QTTimeValue];
390         mapping = [[soundTrack objectAtIndex:i+3] QTTimeRangeValue];
391         [newTrack insertSegmentOfTrack:track timeRange:QTMakeTimeRange(offset, mapping.duration) atTime:mapping.time];
392         [result addObject:[soundTrack objectAtIndex:i]];
393         [result addObject:newTrack];
394     }
395     
396     return result;
397 }
398
399 @end