1 // Copyright (c) 2009 Yanagi Asakura
3 // This software is provided 'as-is', without any express or implied
4 // warranty. In no event will the authors be held liable for any damages
5 // arising from the use of this software.
7 // Permission is granted to anyone to use this software for any purpose,
8 // including commercial applications, and to alter it and redistribute it
9 // freely, subject to the following restrictions:
11 // 1. The origin of this software must not be misrepresented; you must not
12 // claim that you wrote the original software. If you use this software
13 // in a product, an acknowledgment in the product documentation would be
14 // appreciated but is not required.
16 // 2. Altered source versions must be plainly marked as such, and must not be
17 // misrepresented as being the original software.
19 // 3. This notice may not be removed or altered from any source
26 // Created by 柳 on 09/09/12.
27 // Copyright 2009 __MyCompanyName__. All rights reserved.
34 static float convertQTTimeToSecond(QTTime t)
36 return (float)t.timeValue/t.timeScale;
39 @implementation ElisLayer
49 effects = [[NSMutableArray alloc] init];
50 pxKeyframe = [[ElisKeyframe alloc] init];
51 pyKeyframe = [[ElisKeyframe alloc] init];
52 [pxKeyframe setValueForTime:0.0 time:QTZeroTime];
53 [pyKeyframe setValueForTime:0.0 time:QTZeroTime];
55 [self addEffect:@"CIOpacity"]; //デフォルトで透過度フィルタと
56 // [self addEffect:@"CILanczosScaleTransform"]; // 拡大縮小フィルタと
57 [self addEffect:@"CIAffineTransform"]; // アフィン変換フィルタはつけておく。
63 - (void)setAlayer:(CALayer *)layer
67 [layer setValue:self forKey:@"ElisLayer"];
70 [layer addObserver:self
72 options:(NSKeyValueObservingOptionNew)
76 - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
81 - (void)addEffect:(NSString*)name
83 [effects addObject:[[ElisEffect alloc] initWithName:name]];
86 // mappingとtrackNumberを変化させる。
89 CGRect frame = alayer.frame;
90 QTTime begin = QTMakeTime(frame.origin.x*DEFAULT_FPS/timeLineScale, DEFAULT_FPS);
91 QTTime d = QTMakeTime(frame.size.width*DEFAULT_FPS/timeLineScale, DEFAULT_FPS);
93 // trackNumber = floor((frame.origin.y+1)/51.0);
94 trackNumber = round((floor(frame.origin.y/51.0) * 51 + 1 + 25)/51.0);
95 mapping = QTMakeTimeRange(begin, d);
97 int i, center = frame.origin.y + frame.size.height/2;
98 for(i = 0; i < TRACK_SIZE; i++){ // これしきのことにループ回すってどういうことなの...
99 if(51.0*i+1 <= center && center <= 51.0*(i+1)+1){
106 - (void)changeOffset:(float)df
108 QTTime new_offset = QTTimeIncrement(offset, QTMakeTime(df*DEFAULT_FPS/timeLineScale, DEFAULT_FPS));
113 - (BOOL)canChangeMapping:(CGRect)rect
117 if([[media type] isEqualToString:@"image"] || [[media type] isEqualToString:@"text"])
118 duration = QTMakeTime(60*10, 1);
120 duration = [media duration];
122 QTTime wantDuration = QTMakeTime(rect.size.width*DEFAULT_FPS/timeLineScale, DEFAULT_FPS);
123 float d = convertQTTimeToSecond(duration) - convertQTTimeToSecond(offset);
124 float wd = convertQTTimeToSecond(wantDuration);
126 return 0.5 <= wd && wd <= d;
129 - (BOOL)canChangeOffset:(float)df
131 float now_offset = convertQTTimeToSecond(offset);
132 float duration = convertQTTimeToSecond([media duration]);
135 return 0.0 <= df + now_offset && df + now_offset <= duration;
138 - (BOOL)isInclude:(QTTime)time
140 float t = convertQTTimeToSecond(time);
141 float begin = convertQTTimeToSecond(mapping.time);
142 return begin <= t && t <= begin + convertQTTimeToSecond(mapping.duration);
143 // return QTTimeInTimeRange(time, mapping); // これだとマルチスレッドにできなくね?
151 - (void)setPositionX:(float)x forTime:(QTTime)time
154 [pxKeyframe setValueForTime:x time:time];
156 [pxKeyframe setValueForTime:x time:QTZeroTime];
159 - (void)setPositionY:(float)y forTime:(QTTime)time
162 [pyKeyframe setValueForTime:y time:time];
164 [pyKeyframe setValueForTime:y time:QTZeroTime];
167 - (CIImage*)getEffectedImage:(CVTimeStamp*)timeStamp forTime:(QTTime)time
169 QTTime innerTime = QTTimeDecrement(time, mapping.time);
171 if([[media type] isEqualToString:@"sound"]) return nil;
173 image = [media getFrameForTime:timeStamp];
175 [media setCurrentTime:QTTimeIncrement(innerTime, offset)];
176 image = [media getFrameForTime:nil];
178 if(image == nil) return nil;
180 return [self applyEffects:image forTime:QTTimeIncrement(innerTime, offset)];
183 - (CIImage*)getEffectedImageWithoutStamp:(QTTime)time
185 QTTime innerTime = QTTimeDecrement(time, mapping.time);
186 [media setCurrentTime:QTTimeIncrement(innerTime, offset)];
187 // CIImage* image = [media getFrameForQTTime:QTTimeIncrement(innerTime, offset)];
188 CIImage* image = [media getFrameForTime:nil];
189 if(image == nil) return nil;
191 return [self applyEffects:image forTime:QTTimeIncrement(innerTime, offset)];
194 - (CIImage*)applyEffects:(CIImage*)image forTime:(QTTime)time
196 int i, size = [effects count];
199 for(i = 0; i < size; i++){
200 ef = [effects objectAtIndex:i];
201 [ef setInputImage:image];
202 image = [ef getImage:time];
208 - (NSPoint)getPositionForTime:(QTTime)time
210 float x = [pxKeyframe getValueForTime:time];
211 float y = [pyKeyframe getValueForTime:time];
213 return NSMakePoint(x, y);
226 - (void)releaseContext
228 [media releaseContext];
231 - (QTTimeRange)mapping
236 - (void)seek:(QTTime)time
238 if([self isInclude:time]){
239 QTTime innerTime = QTTimeDecrement(time, mapping.time); // レイヤー内相対時間へ変換
240 [media setCurrentTime:QTTimeIncrement(innerTime, offset)];
242 [media setCurrentTime:offset];
247 // for Property Table
248 - (void)createPropertyTableDataSource:(NSMutableArray*)t_effects
249 property:(NSMutableArray*)t_propertyNames
250 value:(NSMutableArray*)t_valueNames
252 NSMutableDictionary* params;
254 NSString* paramName, *effectName;
257 [t_propertyNames addObject:@"Position X"];
258 [t_effects addObject:self];
259 [t_valueNames addObject:@""];
260 [t_propertyNames addObject:@"Position Y"];
261 [t_effects addObject:self];
262 [t_valueNames addObject:@""];
264 int i, size = [effects count];
265 for(i = 0; i < size; i++){
266 params = [[effects objectAtIndex:i] getParamDictionary];
267 arr = [params allKeys];
268 arr = [arr sortedArrayUsingSelector:@selector(compare:)];
269 effectName = [[effects objectAtIndex:i] getName];
270 for(paramName in arr){
271 [t_propertyNames addObject:[NSString stringWithFormat:@"%@ %@", effectName, [paramName substringFromIndex:5]]];
272 [t_effects addObject:[effects objectAtIndex:i]];
273 [t_valueNames addObject:paramName];
278 - (QTTime)convertToInnnerTime:(QTTime)globalTime
280 return QTTimeIncrement(QTTimeDecrement(globalTime, mapping.time), offset);
283 - (QTTime)plusOffsetTime:(QTTime)time
285 return QTTimeIncrement(time, offset);
290 [alayer removeObserver:self forKeyPath:@"frame"];
294 - (void)removePositionXKeyframe
296 pxKeyframe = [[ElisKeyframe alloc] init];
297 [pxKeyframe setValueForTime:0.0 time:QTZeroTime];
300 - (void)removePositionYKerframe
302 pyKeyframe = [[ElisKeyframe alloc] init];
303 [pyKeyframe setValueForTime:0.0 time:QTZeroTime];
306 - (void)removeEffect:(ElisEffect*)ef
308 [effects removeObject:ef];
311 - (void)getSoundTrack:(NSMutableArray*)soundTrack
315 t = [media getSoundTrack];
317 [(QTMovie*)[media getSoundMovie] setAttribute:[NSNumber numberWithBool:YES] forKey:QTMovieEditableAttribute];
318 qtr = [media duration];
319 mapping.time = QTMakeTime(convertQTTimeToSecond(mapping.time)*qtr.timeScale, qtr.timeScale);
320 mapping.duration = QTMakeTime(convertQTTimeToSecond(mapping.duration)*qtr.timeScale, qtr.timeScale);
321 [[media getSoundMovie] deleteSegment:QTMakeTimeRange(QTZeroTime, offset)];
322 [[media getSoundMovie] insertEmptySegmentAt:QTMakeTimeRange(QTZeroTime, mapping.time)];
323 // [[media getSoundMovie] scaleSegment:qtr newDuration:QTMakeTimeRange(offset, mapping.duration)];
324 [soundTrack addObject:[media getSoundMovie]];
325 [soundTrack addObject:t];
326 [soundTrack addObject:[NSValue valueWithQTTime:QTTimeIncrement(mapping.duration, mapping.time)]];
327 // [soundTrack addObject:[NSValue valueWithQTTime:offset]];
328 // [soundTrack addObject:[NSValue valueWithQTTimeRange:mapping]];
332 - (void)encodeWithCoder:(NSCoder*)encoder
334 [encoder encodeObject:media forKey:@"media"];
335 [encoder encodeInt:trackNumber forKey:@"trackNumber"];
336 [encoder encodeObject:QTStringFromTimeRange(mapping) forKey:@"mapping"];
337 [encoder encodeObject:QTStringFromTime(offset) forKey:@"offset"];
338 [encoder encodeObject:effects forKey:@"effects"];
339 [encoder encodeObject:pxKeyframe forKey:@"pxKeyframe"];
340 [encoder encodeObject:pyKeyframe forKey:@"pyKeyframe"];
343 - (id)initWithCoder:(NSCoder*)coder
345 media = [coder decodeObjectForKey:@"media"];
346 trackNumber = [coder decodeIntForKey:@"trackNumber"];
347 mapping = QTTimeRangeFromString([coder decodeObjectForKey:@"mapping"]);
348 offset = QTTimeFromString([coder decodeObjectForKey:@"offset"]);
349 effects = [coder decodeObjectForKey:@"effects"];
350 pxKeyframe = [coder decodeObjectForKey:@"pxKeyframe"];
351 pyKeyframe = [coder decodeObjectForKey:@"pyKeyframe"];
356 - (void)saveToEncoder:(NSCoder*)encoder
358 [encoder encodeInt:trackNumber forKey:@"trackNumber"];
359 [encoder encodeObject:QTStringFromTimeRange(mapping) forKey:@"mapping"];
360 [encoder encodeObject:QTStringFromTime(offset) forKey:@"offset"];
361 [encoder encodeObject:effects forKey:@"effects"];
362 [encoder encodeObject:pxKeyframe forKey:@"pxKeyframe"];
363 [encoder encodeObject:pyKeyframe forKey:@"pyKeyframe"];
366 - (void)loadFromDecoder:(NSCoder*)coder
368 trackNumber = [coder decodeIntForKey:@"trackNumber"];
369 mapping = QTTimeRangeFromString([coder decodeObjectForKey:@"mapping"]);
370 offset = QTTimeFromString([coder decodeObjectForKey:@"offset"]);
371 effects = [coder decodeObjectForKey:@"effects"];
372 pxKeyframe = [coder decodeObjectForKey:@"pxKeyframe"];
373 pyKeyframe = [coder decodeObjectForKey:@"pyKeyframe"];
375 [self setLayer:alayer];
378 - (void)setLayer:(CALayer*)layer
380 layer.frame = CGRectMake(convertQTTimeToSecond(mapping.time)*timeLineScale, trackNumber*51+1,
381 (/*convertQTTimeToSecond(mapping.time) + */convertQTTimeToSecond(mapping.duration))*timeLineScale, 50);
382 [self setAlayer:layer];
387 return convertQTTimeToSecond(mapping.time) + convertQTTimeToSecond(mapping.duration);
392 return [[media path] lastPathComponent];