1 // Copyright (c) 2009 Yanagi Asakura
3 // This software is provided 'as-is', without any express or implied
4 // warranty. In no event will the authors be held liable for any damages
5 // arising from the use of this software.
7 // Permission is granted to anyone to use this software for any purpose,
8 // including commercial applications, and to alter it and redistribute it
9 // freely, subject to the following restrictions:
11 // 1. The origin of this software must not be misrepresented; you must not
12 // claim that you wrote the original software. If you use this software
13 // in a product, an acknowledgment in the product documentation would be
14 // appreciated but is not required.
16 // 2. Altered source versions must be plainly marked as such, and must not be
17 // misrepresented as being the original software.
19 // 3. This notice may not be removed or altered from any source
26 // Created by 柳 on 09/09/12.
27 // Copyright 2009 __MyCompanyName__. All rights reserved.
34 static float convertQTTimeToSecond(QTTime t)
36 return (float)t.timeValue/t.timeScale;
39 @implementation ElisLayer
49 effects = [[NSMutableArray alloc] init];
50 pxKeyframe = [[ElisKeyframe alloc] init];
51 pyKeyframe = [[ElisKeyframe alloc] init];
52 [pxKeyframe setValueForTime:0.0 time:QTZeroTime];
53 [pyKeyframe setValueForTime:0.0 time:QTZeroTime];
55 originSize = NSZeroRect;
57 [self addEffect:@"CIOpacity"]; //デフォルトで透過度フィルタと
58 // [self addEffect:@"CILanczosScaleTransform"]; // 拡大縮小フィルタと
59 [self addEffect:@"CIAffineTransform"]; // アフィン変換フィルタはつけておく。
65 - (void)setAlayer:(CALayer *)layer
69 [layer setValue:self forKey:@"ElisLayer"];
72 [layer addObserver:self
74 options:(NSKeyValueObservingOptionNew)
77 originSize = [media size];
80 - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
85 - (void)addEffect:(NSString*)name
87 [effects addObject:[[ElisEffect alloc] initWithName:name]];
90 // mappingとtrackNumberを変化させる。
93 CGRect frame = alayer.frame;
94 QTTime begin = QTMakeTime(frame.origin.x*DEFAULT_FPS/timeLineScale, DEFAULT_FPS);
95 QTTime d = QTMakeTime(frame.size.width*DEFAULT_FPS/timeLineScale, DEFAULT_FPS);
97 // trackNumber = floor((frame.origin.y+1)/51.0);
98 trackNumber = round((floor(frame.origin.y/51.0) * 51 + 1 + 25)/51.0);
99 mapping = QTMakeTimeRange(begin, d);
101 int i, center = frame.origin.y + frame.size.height/2;
102 for(i = 0; i < TRACK_SIZE; i++){ // これしきのことにループ回すってどういうことなの...
103 if(51.0*i+1 <= center && center <= 51.0*(i+1)+1){
110 - (void)changeOffset:(float)df
112 QTTime new_offset = QTTimeIncrement(offset, QTMakeTime(df*DEFAULT_FPS/timeLineScale, DEFAULT_FPS));
117 - (BOOL)canChangeMapping:(CGRect)rect
121 if([[media type] isEqualToString:@"image"] || [[media type] isEqualToString:@"text"])
122 duration = QTMakeTime(60*10, 1);
124 duration = [media duration];
126 QTTime wantDuration = QTMakeTime(rect.size.width*DEFAULT_FPS/timeLineScale, DEFAULT_FPS);
127 float d = convertQTTimeToSecond(duration) - convertQTTimeToSecond(offset);
128 float wd = convertQTTimeToSecond(wantDuration);
130 return 0.5 <= wd && wd <= d;
133 - (BOOL)canChangeOffset:(float)df
135 float now_offset = convertQTTimeToSecond(offset);
136 float duration = convertQTTimeToSecond([media duration]);
139 return 0.0 <= df + now_offset && df + now_offset <= duration;
142 - (BOOL)isInclude:(QTTime)time
144 float t = convertQTTimeToSecond(time);
145 float begin = convertQTTimeToSecond(mapping.time);
146 return begin <= t && t <= begin + convertQTTimeToSecond(mapping.duration);
147 // return QTTimeInTimeRange(time, mapping); // これだとマルチスレッドにできなくね?
155 - (ElisLayer*)cutAtTime:(QTTime)cutTime
157 QTTime innerCutTime = [self convertToInnnerTime:cutTime];
159 NSKeyedArchiver* corder;
160 NSKeyedUnarchiver* decoder;
161 NSMutableData* data = [NSMutableData data];
163 corder = [[NSKeyedArchiver alloc] initForWritingWithMutableData:data];
164 [self encodeWithCoder:corder];
165 [corder finishEncoding];
167 new = [ElisLayer alloc];
168 decoder = [[NSKeyedUnarchiver alloc] initForReadingWithData:data];
169 new = [new initWithCoder:decoder];
170 [decoder finishDecoding];
172 mapping.duration = QTTimeDecrement(mapping.duration, cutTime);
173 [self setLayer:alayer];
175 [new cutBack:innerCutTime];
179 - (void)cutBack:(QTTime)time
183 mapping.duration = QTTimeDecrement(mapping.duration, time);
184 [self setLayer:alayer];
192 - (void)setPositionX:(float)x forTime:(QTTime)time
195 [pxKeyframe setValueForTime:x time:time];
197 [pxKeyframe setValueForTime:x time:QTZeroTime];
200 - (void)setPositionY:(float)y forTime:(QTTime)time
203 [pyKeyframe setValueForTime:y time:time];
205 [pyKeyframe setValueForTime:y time:QTZeroTime];
208 - (CIImage*)getEffectedImage:(CVTimeStamp*)timeStamp forTime:(QTTime)time
210 QTTime innerTime = QTTimeDecrement(time, mapping.time);
212 if([[media type] isEqualToString:@"sound"]) return nil;
214 image = [media getFrameForTime:timeStamp];
216 [media setCurrentTime:QTTimeIncrement(innerTime, offset)];
217 image = [media getFrameForTime:nil];
219 if(image == nil) return nil;
221 return [self applyEffects:image forTime:QTTimeIncrement(innerTime, offset)];
224 - (CIImage*)getEffectedImageWithoutStamp:(QTTime)time
226 QTTime innerTime = QTTimeDecrement(time, mapping.time);
227 [media setCurrentTime:QTTimeIncrement(innerTime, offset)];
228 // CIImage* image = [media getFrameForQTTime:QTTimeIncrement(innerTime, offset)];
229 CIImage* image = [media getFrameForTime:nil];
230 if(image == nil) return nil;
232 return [self applyEffects:image forTime:QTTimeIncrement(innerTime, offset)];
235 - (CIImage*)applyEffects:(CIImage*)image forTime:(QTTime)time
237 int i, size = [effects count];
240 for(i = 0; i < size; i++){
241 ef = [effects objectAtIndex:i];
242 [ef setInputImage:image];
243 image = [ef getImage:time];
249 - (NSPoint)getPositionForTime:(QTTime)time
251 float x = [pxKeyframe getValueForTime:time];
252 float y = [pyKeyframe getValueForTime:time];
254 return NSMakePoint(x, y);
267 - (void)releaseContext
269 [media releaseContext];
272 - (QTTimeRange)mapping
277 - (void)seek:(QTTime)time
279 if([self isInclude:time]){
280 QTTime innerTime = QTTimeDecrement(time, mapping.time); // レイヤー内相対時間へ変換
281 [media setCurrentTime:QTTimeIncrement(innerTime, offset)];
283 [media setCurrentTime:offset];
288 // for Property Table
289 - (void)createPropertyTableDataSource:(NSMutableArray*)t_effects
290 property:(NSMutableArray*)t_propertyNames
291 value:(NSMutableArray*)t_valueNames
293 NSMutableDictionary* params;
295 NSString* paramName, *effectName;
298 [t_propertyNames addObject:@"Position X"];
299 [t_effects addObject:self];
300 [t_valueNames addObject:@""];
301 [t_propertyNames addObject:@"Position Y"];
302 [t_effects addObject:self];
303 [t_valueNames addObject:@""];
305 int i, size = [effects count];
306 for(i = 0; i < size; i++){
307 params = [[effects objectAtIndex:i] getParamDictionary];
308 arr = [params allKeys];
309 arr = [arr sortedArrayUsingSelector:@selector(compare:)];
310 effectName = [[effects objectAtIndex:i] getName];
311 for(paramName in arr){
312 [t_propertyNames addObject:[NSString stringWithFormat:@"%@ %@", effectName, [paramName substringFromIndex:5]]];
313 [t_effects addObject:[effects objectAtIndex:i]];
314 [t_valueNames addObject:paramName];
319 - (QTTime)convertToInnnerTime:(QTTime)globalTime
321 return QTTimeIncrement(QTTimeDecrement(globalTime, mapping.time), offset);
324 - (QTTime)plusOffsetTime:(QTTime)time
326 return QTTimeIncrement(time, offset);
331 [alayer removeObserver:self forKeyPath:@"frame"];
335 - (void)removePositionXKeyframe
337 pxKeyframe = [[ElisKeyframe alloc] init];
338 [pxKeyframe setValueForTime:0.0 time:QTZeroTime];
341 - (void)removePositionYKerframe
343 pyKeyframe = [[ElisKeyframe alloc] init];
344 [pyKeyframe setValueForTime:0.0 time:QTZeroTime];
347 - (void)removeEffect:(ElisEffect*)ef
349 [effects removeObject:ef];
352 - (void)getSoundTrack:(NSMutableArray*)soundTrack
356 t = [media getSoundTrack];
358 [(QTMovie*)[media getSoundMovie] setAttribute:[NSNumber numberWithBool:YES] forKey:QTMovieEditableAttribute];
359 qtr = [media duration];
360 mapping.time = QTMakeTime(convertQTTimeToSecond(mapping.time)*qtr.timeScale, qtr.timeScale);
361 mapping.duration = QTMakeTime(convertQTTimeToSecond(mapping.duration)*qtr.timeScale, qtr.timeScale);
362 [[media getSoundMovie] deleteSegment:QTMakeTimeRange(QTZeroTime, offset)];
363 [[media getSoundMovie] insertEmptySegmentAt:QTMakeTimeRange(QTZeroTime, mapping.time)];
364 // [[media getSoundMovie] scaleSegment:qtr newDuration:QTMakeTimeRange(offset, mapping.duration)];
365 [soundTrack addObject:[media getSoundMovie]];
366 [soundTrack addObject:t];
367 [soundTrack addObject:[NSValue valueWithQTTime:QTTimeIncrement(mapping.duration, mapping.time)]];
368 // [soundTrack addObject:[NSValue valueWithQTTime:offset]];
369 // [soundTrack addObject:[NSValue valueWithQTTimeRange:mapping]];
373 - (void)encodeWithCoder:(NSCoder*)encoder
375 [encoder encodeObject:media forKey:@"media"];
376 [encoder encodeInt:trackNumber forKey:@"trackNumber"];
377 [encoder encodeObject:QTStringFromTimeRange(mapping) forKey:@"mapping"];
378 [encoder encodeObject:QTStringFromTime(offset) forKey:@"offset"];
379 [encoder encodeObject:effects forKey:@"effects"];
380 [encoder encodeObject:pxKeyframe forKey:@"pxKeyframe"];
381 [encoder encodeObject:pyKeyframe forKey:@"pyKeyframe"];
384 - (id)initWithCoder:(NSCoder*)coder
386 media = [coder decodeObjectForKey:@"media"];
387 trackNumber = [coder decodeIntForKey:@"trackNumber"];
388 mapping = QTTimeRangeFromString([coder decodeObjectForKey:@"mapping"]);
389 offset = QTTimeFromString([coder decodeObjectForKey:@"offset"]);
390 effects = [coder decodeObjectForKey:@"effects"];
391 pxKeyframe = [coder decodeObjectForKey:@"pxKeyframe"];
392 pyKeyframe = [coder decodeObjectForKey:@"pyKeyframe"];
397 - (void)saveToEncoder:(NSCoder*)encoder
399 [encoder encodeInt:trackNumber forKey:@"trackNumber"];
400 [encoder encodeObject:QTStringFromTimeRange(mapping) forKey:@"mapping"];
401 [encoder encodeObject:QTStringFromTime(offset) forKey:@"offset"];
402 [encoder encodeObject:effects forKey:@"effects"];
403 [encoder encodeObject:pxKeyframe forKey:@"pxKeyframe"];
404 [encoder encodeObject:pyKeyframe forKey:@"pyKeyframe"];
405 [encoder encodeFloat:[media speed] forKey:@"speed"];
408 - (void)loadFromDecoder:(NSCoder*)coder
410 trackNumber = [coder decodeIntForKey:@"trackNumber"];
411 mapping = QTTimeRangeFromString([coder decodeObjectForKey:@"mapping"]);
412 offset = QTTimeFromString([coder decodeObjectForKey:@"offset"]);
413 effects = [coder decodeObjectForKey:@"effects"];
414 pxKeyframe = [coder decodeObjectForKey:@"pxKeyframe"];
415 pyKeyframe = [coder decodeObjectForKey:@"pyKeyframe"];
416 [media setSpeed:[coder decodeFloatForKey:@"speed"]];
418 [self setLayer:alayer];
421 - (void)setLayer:(CALayer*)layer
423 layer.frame = CGRectMake(convertQTTimeToSecond(mapping.time)*timeLineScale, trackNumber*51+1,
424 (/*convertQTTimeToSecond(mapping.time) + */convertQTTimeToSecond(mapping.duration))*timeLineScale, 50);
425 [self setAlayer:layer];
430 return convertQTTimeToSecond(mapping.time) + convertQTTimeToSecond(mapping.duration);
435 return [[media path] lastPathComponent];
443 - (NSString*)printName
445 if([[media type] isEqualToString:@"text"])
448 return [self getPath];
453 return originSize.size;
458 return [media speed];
461 - (void)setSpeed:(float)s
463 if(s < 0.01 || s > 100.0) return;
465 mapping.duration = [media duration];
466 [self setLayer:alayer];