// Copyright (c) 2009 Yanagi Asakura // // This software is provided 'as-is', without any express or implied // warranty. In no event will the authors be held liable for any damages // arising from the use of this software. // // Permission is granted to anyone to use this software for any purpose, // including commercial applications, and to alter it and redistribute it // freely, subject to the following restrictions: // // 1. The origin of this software must not be misrepresented; you must not // claim that you wrote the original software. If you use this software // in a product, an acknowledgment in the product documentation would be // appreciated but is not required. // // 2. Altered source versions must be plainly marked as such, and must not be // misrepresented as being the original software. // // 3. This notice may not be removed or altered from any source // distribution. // // ElisLayer.m // Elis Colors // // Created by 柳 on 09/09/12. // Copyright 2009 __MyCompanyName__. All rights reserved. // #import "ElisLayer.h" #define TRACK_SIZE 32 static float convertQTTimeToSecond(QTTime t) { return (float)t.timeValue/t.timeScale; } @implementation ElisLayer @synthesize media; - (id)init { self.media = nil; // position.x = 0; // position.y = 0; offset = QTZeroTime; effects = [[NSMutableArray alloc] init]; pxKeyframe = [[ElisKeyframe alloc] init]; pyKeyframe = [[ElisKeyframe alloc] init]; [pxKeyframe setValueForTime:0.0 time:QTZeroTime]; [pyKeyframe setValueForTime:0.0 time:QTZeroTime]; originSize = NSZeroRect; [self addEffect:@"CIOpacity"]; //デフォルトで透過度フィルタと // [self addEffect:@"CILanczosScaleTransform"]; // 拡大縮小フィルタと [self addEffect:@"CIAffineTransform"]; // アフィン変換フィルタはつけておく。 return self; } - (void)setAlayer:(CALayer *)layer { alayer = layer; [layer setValue:self forKey:@"ElisLayer"]; // layer.frameをバインド。 [layer addObserver:self forKeyPath:@"frame" options:(NSKeyValueObservingOptionNew) context:NULL]; originSize = [media size]; } - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context { [self changeMapping]; } - (void)addEffect:(NSString*)name { [effects addObject:[[ElisEffect alloc] initWithName:name]]; } // mappingとtrackNumberを変化させる。 - (void)changeMapping { CGRect frame = alayer.frame; QTTime begin = QTMakeTime(frame.origin.x*DEFAULT_FPS/timeLineScale, DEFAULT_FPS); QTTime d = QTMakeTime(frame.size.width*DEFAULT_FPS/timeLineScale, DEFAULT_FPS); // trackNumber = floor((frame.origin.y+1)/51.0); trackNumber = round((floor(frame.origin.y/51.0) * 51 + 1 + 25)/51.0); mapping = QTMakeTimeRange(begin, d); int i, center = frame.origin.y + frame.size.height/2; for(i = 0; i < TRACK_SIZE; i++){ // これしきのことにループ回すってどういうことなの... if(51.0*i+1 <= center && center <= 51.0*(i+1)+1){ trackNumber = i; return; } } } - (void)changeOffset:(float)df { QTTime new_offset = QTTimeIncrement(offset, QTMakeTime(df*DEFAULT_FPS/timeLineScale, DEFAULT_FPS)); offset = new_offset; } - (BOOL)canChangeMapping:(CGRect)rect { QTTime duration; if([[media type] isEqualToString:@"image"] || [[media type] isEqualToString:@"text"]) duration = QTMakeTime(60*10, 1); else duration = [media duration]; QTTime wantDuration = QTMakeTime(rect.size.width*DEFAULT_FPS/timeLineScale, DEFAULT_FPS); float d = convertQTTimeToSecond(duration) - convertQTTimeToSecond(offset); float wd = convertQTTimeToSecond(wantDuration); return 0.5 <= wd && wd <= d; } - (BOOL)canChangeOffset:(float)df { float now_offset = convertQTTimeToSecond(offset); float duration = convertQTTimeToSecond([media duration]); df /= timeLineScale; return 0.0 <= df + now_offset && df + now_offset <= duration; } - (BOOL)isInclude:(QTTime)time { float t = convertQTTimeToSecond(time); float begin = convertQTTimeToSecond(mapping.time); return begin <= t && t <= begin + convertQTTimeToSecond(mapping.duration); // return QTTimeInTimeRange(time, mapping); // これだとマルチスレッドにできなくね? } - (int)trackNumber { return trackNumber; } - (ElisLayer*)cutAtTime:(QTTime)cutTime { QTTime innerCutTime = [self convertToInnnerTime:cutTime]; ElisLayer* new; NSKeyedArchiver* corder; NSKeyedUnarchiver* decoder; NSMutableData* data = [NSMutableData data]; corder = [[NSKeyedArchiver alloc] initForWritingWithMutableData:data]; [self encodeWithCoder:corder]; [corder finishEncoding]; new = [ElisLayer alloc]; decoder = [[NSKeyedUnarchiver alloc] initForReadingWithData:data]; new = [new initWithCoder:decoder]; [decoder finishDecoding]; mapping.duration = QTTimeDecrement(mapping.duration, cutTime); [self setLayer:alayer]; [new cutBack:innerCutTime]; return new; } - (void)cutBack:(QTTime)time { offset = time; mapping.time = time; mapping.duration = QTTimeDecrement(mapping.duration, time); [self setLayer:alayer]; } - (CALayer*)alayer { return alayer; } - (void)setPositionX:(float)x forTime:(QTTime)time { if(recording) [pxKeyframe setValueForTime:x time:time]; else [pxKeyframe setValueForTime:x time:QTZeroTime]; } - (void)setPositionY:(float)y forTime:(QTTime)time { if(recording) [pyKeyframe setValueForTime:y time:time]; else [pyKeyframe setValueForTime:y time:QTZeroTime]; } - (CIImage*)getEffectedImage:(CVTimeStamp*)timeStamp forTime:(QTTime)time { QTTime innerTime = QTTimeDecrement(time, mapping.time); CIImage* image; if([[media type] isEqualToString:@"sound"]) return nil; if(usingStampMode){ image = [media getFrameForTime:timeStamp]; }else{ [media setCurrentTime:QTTimeIncrement(innerTime, offset)]; image = [media getFrameForTime:nil]; } if(image == nil) return nil; return [self applyEffects:image forTime:QTTimeIncrement(innerTime, offset)]; } - (CIImage*)getEffectedImageWithoutStamp:(QTTime)time { QTTime innerTime = QTTimeDecrement(time, mapping.time); [media setCurrentTime:QTTimeIncrement(innerTime, offset)]; // CIImage* image = [media getFrameForQTTime:QTTimeIncrement(innerTime, offset)]; CIImage* image = [media getFrameForTime:nil]; if(image == nil) return nil; return [self applyEffects:image forTime:QTTimeIncrement(innerTime, offset)]; } - (CIImage*)applyEffects:(CIImage*)image forTime:(QTTime)time { int i, size = [effects count]; ElisEffect* ef; for(i = 0; i < size; i++){ ef = [effects objectAtIndex:i]; [ef setInputImage:image]; image = [ef getImage:time]; } return image; } - (NSPoint)getPositionForTime:(QTTime)time { float x = [pxKeyframe getValueForTime:time]; float y = [pyKeyframe getValueForTime:time]; return NSMakePoint(x, y); } - (void)play { [media play]; } - (void)stop { [media stop]; } - (void)releaseContext { [media releaseContext]; } - (QTTimeRange)mapping { return mapping; } - (void)seek:(QTTime)time { if([self isInclude:time]){ QTTime innerTime = QTTimeDecrement(time, mapping.time); // レイヤー内相対時間へ変換 [media setCurrentTime:QTTimeIncrement(innerTime, offset)]; } else { [media setCurrentTime:offset]; } } // for Property Table - (void)createPropertyTableDataSource:(NSMutableArray*)t_effects property:(NSMutableArray*)t_propertyNames value:(NSMutableArray*)t_valueNames { NSMutableDictionary* params; NSArray* arr; NSString* paramName, *effectName; NSDictionary* dict; [t_propertyNames addObject:@"Position X"]; [t_effects addObject:self]; [t_valueNames addObject:@""]; [t_propertyNames addObject:@"Position Y"]; [t_effects addObject:self]; [t_valueNames addObject:@""]; int i, size = [effects count]; for(i = 0; i < size; i++){ params = [[effects objectAtIndex:i] getParamDictionary]; arr = [params allKeys]; arr = [arr sortedArrayUsingSelector:@selector(compare:)]; effectName = [[effects objectAtIndex:i] getName]; for(paramName in arr){ [t_propertyNames addObject:[NSString stringWithFormat:@"%@ %@", effectName, [paramName substringFromIndex:5]]]; [t_effects addObject:[effects objectAtIndex:i]]; [t_valueNames addObject:paramName]; } } } - (QTTime)convertToInnnerTime:(QTTime)globalTime { return QTTimeIncrement(QTTimeDecrement(globalTime, mapping.time), offset); } - (QTTime)plusOffsetTime:(QTTime)time { return QTTimeIncrement(time, offset); } - (void)finalize { [alayer removeObserver:self forKeyPath:@"frame"]; [super finalize]; } - (void)removePositionXKeyframe { pxKeyframe = [[ElisKeyframe alloc] init]; [pxKeyframe setValueForTime:0.0 time:QTZeroTime]; } - (void)removePositionYKerframe { pyKeyframe = [[ElisKeyframe alloc] init]; [pyKeyframe setValueForTime:0.0 time:QTZeroTime]; } - (void)removeEffect:(ElisEffect*)ef { [effects removeObject:ef]; } - (void)getSoundTrack:(NSMutableArray*)soundTrack { QTTrack* t; QTTime qtr; t = [media getSoundTrack]; if(t){ [(QTMovie*)[media getSoundMovie] setAttribute:[NSNumber numberWithBool:YES] forKey:QTMovieEditableAttribute]; qtr = [media duration]; mapping.time = QTMakeTime(convertQTTimeToSecond(mapping.time)*qtr.timeScale, qtr.timeScale); mapping.duration = QTMakeTime(convertQTTimeToSecond(mapping.duration)*qtr.timeScale, qtr.timeScale); [[media getSoundMovie] deleteSegment:QTMakeTimeRange(QTZeroTime, offset)]; [[media getSoundMovie] insertEmptySegmentAt:QTMakeTimeRange(QTZeroTime, mapping.time)]; // [[media getSoundMovie] scaleSegment:qtr newDuration:QTMakeTimeRange(offset, mapping.duration)]; [soundTrack addObject:[media getSoundMovie]]; [soundTrack addObject:t]; [soundTrack addObject:[NSValue valueWithQTTime:QTTimeIncrement(mapping.duration, mapping.time)]]; // [soundTrack addObject:[NSValue valueWithQTTime:offset]]; // [soundTrack addObject:[NSValue valueWithQTTimeRange:mapping]]; } } - (void)encodeWithCoder:(NSCoder*)encoder { [encoder encodeObject:media forKey:@"media"]; [encoder encodeInt:trackNumber forKey:@"trackNumber"]; [encoder encodeObject:QTStringFromTimeRange(mapping) forKey:@"mapping"]; [encoder encodeObject:QTStringFromTime(offset) forKey:@"offset"]; [encoder encodeObject:effects forKey:@"effects"]; [encoder encodeObject:pxKeyframe forKey:@"pxKeyframe"]; [encoder encodeObject:pyKeyframe forKey:@"pyKeyframe"]; } - (id)initWithCoder:(NSCoder*)coder { media = [coder decodeObjectForKey:@"media"]; trackNumber = [coder decodeIntForKey:@"trackNumber"]; mapping = QTTimeRangeFromString([coder decodeObjectForKey:@"mapping"]); offset = QTTimeFromString([coder decodeObjectForKey:@"offset"]); effects = [coder decodeObjectForKey:@"effects"]; pxKeyframe = [coder decodeObjectForKey:@"pxKeyframe"]; pyKeyframe = [coder decodeObjectForKey:@"pyKeyframe"]; return self; } - (void)saveToEncoder:(NSCoder*)encoder { [encoder encodeInt:trackNumber forKey:@"trackNumber"]; [encoder encodeObject:QTStringFromTimeRange(mapping) forKey:@"mapping"]; [encoder encodeObject:QTStringFromTime(offset) forKey:@"offset"]; [encoder encodeObject:effects forKey:@"effects"]; [encoder encodeObject:pxKeyframe forKey:@"pxKeyframe"]; [encoder encodeObject:pyKeyframe forKey:@"pyKeyframe"]; [encoder encodeFloat:[media speed] forKey:@"speed"]; } - (void)loadFromDecoder:(NSCoder*)coder { trackNumber = [coder decodeIntForKey:@"trackNumber"]; mapping = QTTimeRangeFromString([coder decodeObjectForKey:@"mapping"]); offset = QTTimeFromString([coder decodeObjectForKey:@"offset"]); effects = [coder decodeObjectForKey:@"effects"]; pxKeyframe = [coder decodeObjectForKey:@"pxKeyframe"]; pyKeyframe = [coder decodeObjectForKey:@"pyKeyframe"]; [media setSpeed:[coder decodeFloatForKey:@"speed"]]; [self setLayer:alayer]; } - (void)setLayer:(CALayer*)layer { layer.frame = CGRectMake(convertQTTimeToSecond(mapping.time)*timeLineScale, trackNumber*51+1, (/*convertQTTimeToSecond(mapping.time) + */convertQTTimeToSecond(mapping.duration))*timeLineScale, 50); [self setAlayer:layer]; } - (float)duration { return convertQTTimeToSecond(mapping.time) + convertQTTimeToSecond(mapping.duration); } - (NSString*)getPath { return [[media path] lastPathComponent]; } - (NSString*)getType { return [media type]; } - (NSString*)printName { if([[media type] isEqualToString:@"text"]) return @"text"; return [self getPath]; } - (NSSize)originSize { return originSize.size; } - (float)speed { return [media speed]; } - (void)setSpeed:(float)s { if(s < 0.01 || s > 100.0) return; [media setSpeed:s]; mapping.duration = [media duration]; [self setLayer:alayer]; } @end