+// Copyright (c) 2009 Yanagi Asakura
+//
+// This software is provided 'as-is', without any express or implied
+// warranty. In no event will the authors be held liable for any damages
+// arising from the use of this software.
+//
+// Permission is granted to anyone to use this software for any purpose,
+// including commercial applications, and to alter it and redistribute it
+// freely, subject to the following restrictions:
+//
+// 1. The origin of this software must not be misrepresented; you must not
+// claim that you wrote the original software. If you use this software
+// in a product, an acknowledgment in the product documentation would be
+// appreciated but is not required.
+//
+// 2. Altered source versions must be plainly marked as such, and must not be
+// misrepresented as being the original software.
+//
+// 3. This notice may not be removed or altered from any source
+// distribution.
+
//
// ElisLayer.m
// Elis Colors
#import "ElisLayer.h"
+#define TRACK_SIZE 32
+
static float convertQTTimeToSecond(QTTime t)
{
return (float)t.timeValue/t.timeScale;
[pxKeyframe setValueForTime:0.0 time:QTZeroTime];
[pyKeyframe setValueForTime:0.0 time:QTZeroTime];
- [self addEffect:@"CIOpacity"]; //デフォルトで透過度フィルタと
-// [self addEffect:@"CILanczosScaleTransform"]; // 拡大縮小フィルタと
- [self addEffect:@"CIAffineTransform"]; // アフィン変換フィルタはつけておく。
+ originSize = NSZeroRect;
-
return self;
}
+- (void)setMedia:(ElisMedia *)m
+{
+ media = m;
+ if([[m type] isEqualToString:@"sound"]){
+ volumeKeyframe = [[ElisKeyframe alloc] init];
+ [volumeKeyframe setValueForTime:0.3 time:QTZeroTime];
+ }
+ else{
+ volumeKeyframe = nil;
+ [self addEffect:@"CIOpacity"]; //デフォルトで透過度フィルタと
+ [self addEffect:@"CIAffineTransform"]; // アフィン変換フィルタはつけておく。
+ }
+}
+
- (void)setAlayer:(CALayer *)layer
{
alayer = layer;
forKeyPath:@"frame"
options:(NSKeyValueObservingOptionNew)
context:NULL];
+
+ originSize = [media size];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
- (void)addEffect:(NSString*)name
{
- [effects addObject:[[ElisEffect alloc] initWithName:name]];
+ ElisEffect* e = [[ElisEffect alloc] initWithName:name];
+ if(e)
+ [effects addObject:e];
}
// mappingとtrackNumberを変化させる。
QTTime begin = QTMakeTime(frame.origin.x*DEFAULT_FPS/timeLineScale, DEFAULT_FPS);
QTTime d = QTMakeTime(frame.size.width*DEFAULT_FPS/timeLineScale, DEFAULT_FPS);
- trackNumber = floor(frame.origin.y/51.0);
+// trackNumber = floor((frame.origin.y+1)/51.0);
+ trackNumber = round((floor(frame.origin.y/51.0) * 51 + 1 + 25)/51.0);
mapping = QTMakeTimeRange(begin, d);
+
+ int i, center = frame.origin.y + frame.size.height/2;
+ for(i = 0; i < TRACK_SIZE; i++){ // これしきのことにループ回すってどういうことなの...
+ if(51.0*i+1 <= center && center <= 51.0*(i+1)+1){
+ trackNumber = i;
+ return;
+ }
+ }
}
- (void)changeOffset:(float)df
{
QTTime duration;
- if([[media type] isEqualToString:@"image"])
+ if([[media type] isEqualToString:@"image"] || [[media type] isEqualToString:@"text"] || [media isQuartz])
duration = QTMakeTime(60*10, 1);
else
duration = [media duration];
return trackNumber;
}
+- (ElisLayer*)cutAtTime:(QTTime)cutTime
+{
+ QTTime innerCutTime = [self convertToInnnerTime:cutTime];
+ ElisLayer* new;
+ NSKeyedArchiver* corder;
+ NSKeyedUnarchiver* decoder;
+ NSMutableData* data = [NSMutableData data];
+
+ corder = [[NSKeyedArchiver alloc] initForWritingWithMutableData:data];
+ [self encodeWithCoder:corder];
+ [corder finishEncoding];
+
+ new = [ElisLayer alloc];
+ decoder = [[NSKeyedUnarchiver alloc] initForReadingWithData:data];
+ new = [new initWithCoder:decoder];
+ [decoder finishDecoding];
+
+ mapping.duration = QTTimeDecrement(mapping.duration, cutTime);
+ [self setLayer:alayer];
+
+ [new cutBack:innerCutTime];
+ return new;
+}
+
+- (void)cutBack:(QTTime)time
+{
+ offset = time;
+ mapping.time = time;
+ mapping.duration = QTTimeDecrement(mapping.duration, time);
+ [self setLayer:alayer];
+}
+
+- (CALayer*)alayer
+{
+ return alayer;
+}
+
- (void)setPositionX:(float)x forTime:(QTTime)time
{
if(recording)
- (CIImage*)getEffectedImage:(CVTimeStamp*)timeStamp forTime:(QTTime)time
{
QTTime innerTime = QTTimeDecrement(time, mapping.time);
- CIImage* image = [media getFrameForTime:timeStamp];
- if(image == nil) return nil;
+ CIImage* image;
+ if(usingStampMode){
+ image = [media getFrameForTime:timeStamp];
+ }else{
+ [media setCurrentTime:QTTimeIncrement(innerTime, offset)];
+ image = [media getFrameForTime:nil];
+ }
+ if(image == nil){
+ [media setVolume:[volumeKeyframe getValueForTime:QTTimeIncrement(innerTime, offset)]];
+ return nil;
+ }
return [self applyEffects:image forTime:QTTimeIncrement(innerTime, offset)];
}
- (CIImage*)getEffectedImageWithoutStamp:(QTTime)time
{
QTTime innerTime = QTTimeDecrement(time, mapping.time);
- CIImage* image = [media getFrameForQTTime:QTTimeIncrement(innerTime, offset)];
- if(image == nil) return nil;
+ [media setCurrentTime:QTTimeIncrement(innerTime, offset)];
+ CIImage* image = [media getFrameForTime:nil];
+ if(image == nil){
+ [media setVolume:[volumeKeyframe getValueForTime:QTTimeIncrement(innerTime, offset)]];
+ return nil;
+ }
return [self applyEffects:image forTime:QTTimeIncrement(innerTime, offset)];
}
return NSMakePoint(x, y);
}
+- (float)getVolumeForTime:(QTTime)time
+{
+ return [volumeKeyframe getValueForTime:QTTimeIncrement(QTTimeDecrement(time, mapping.time), offset)];
+}
+
+- (float)getVolumeForInnerTime:(QTTime)time
+{
+ return [volumeKeyframe getValueForTime:time];
+}
+
- (void)play
{
[media play];
NSString* paramName, *effectName;
NSDictionary* dict;
+ if(![[media type] isEqualToString:@"sound"]){
[t_propertyNames addObject:@"Position X"];
[t_effects addObject:self];
[t_valueNames addObject:@""];
[t_valueNames addObject:paramName];
}
}
+ }else{
+ [t_propertyNames addObject:@"Volume"];
+ [t_effects addObject:self];
+ [t_valueNames addObject:@""];
+ }
}
- (QTTime)convertToInnnerTime:(QTTime)globalTime
[pyKeyframe setValueForTime:0.0 time:QTZeroTime];
}
+- (void)removeVolumeKeyframe
+{
+ volumeKeyframe = [[ElisKeyframe alloc] init];
+ [volumeKeyframe setValueForTime:0.3 time:QTZeroTime];
+}
+
- (void)removeEffect:(ElisEffect*)ef
{
[effects removeObject:ef];
- (void)getSoundTrack:(NSMutableArray*)soundTrack
{
QTTrack* t;
+ QTTime qtr;
t = [media getSoundTrack];
if(t){
+ [(QTMovie*)[media getSoundMovie] setAttribute:[NSNumber numberWithBool:YES] forKey:QTMovieEditableAttribute];
+ qtr = [media duration];
+ mapping.time = QTMakeTime(convertQTTimeToSecond(mapping.time)*qtr.timeScale, qtr.timeScale);
+ mapping.duration = QTMakeTime(convertQTTimeToSecond(mapping.duration)*qtr.timeScale, qtr.timeScale);
+ [[media getSoundMovie] deleteSegment:QTMakeTimeRange(QTZeroTime, offset)];
+ [[media getSoundMovie] insertEmptySegmentAt:QTMakeTimeRange(QTZeroTime, mapping.time)];
+// [[media getSoundMovie] scaleSegment:qtr newDuration:QTMakeTimeRange(offset, mapping.duration)];
+ [soundTrack addObject:[media getSoundMovie]];
[soundTrack addObject:t];
- [soundTrack addObject:[NSValue valueWithQTTime:offset]];
- [soundTrack addObject:[NSValue valueWithQTTimeRange:mapping]];
+ [soundTrack addObject:[NSValue valueWithQTTime:QTTimeIncrement(mapping.duration, mapping.time)]];
+// [soundTrack addObject:[NSValue valueWithQTTime:offset]];
+// [soundTrack addObject:[NSValue valueWithQTTimeRange:mapping]];
}
}
[encoder encodeObject:QTStringFromTimeRange(mapping) forKey:@"mapping"];
[encoder encodeObject:QTStringFromTime(offset) forKey:@"offset"];
[encoder encodeObject:effects forKey:@"effects"];
- [encoder encodeObject:pxKeyframe forKey:@"pxKerframe"];
+ [encoder encodeObject:pxKeyframe forKey:@"pxKeyframe"];
[encoder encodeObject:pyKeyframe forKey:@"pyKeyframe"];
-
- NSLog(@"encoding layer");
+ [encoder encodeObject:volumeKeyframe forKey:@"volumeKeyframe"];
}
- (id)initWithCoder:(NSCoder*)coder
effects = [coder decodeObjectForKey:@"effects"];
pxKeyframe = [coder decodeObjectForKey:@"pxKeyframe"];
pyKeyframe = [coder decodeObjectForKey:@"pyKeyframe"];
-
- NSLog(@"decoding layer");
+ volumeKeyframe = [coder decodeObjectForKey:@"volumeKeyframe"];
return self;
}
+- (void)saveToEncoder:(NSCoder*)encoder
+{
+ [encoder encodeInt:trackNumber forKey:@"trackNumber"];
+ [encoder encodeObject:QTStringFromTimeRange(mapping) forKey:@"mapping"];
+ [encoder encodeObject:QTStringFromTime(offset) forKey:@"offset"];
+ [encoder encodeObject:effects forKey:@"effects"];
+ [encoder encodeObject:pxKeyframe forKey:@"pxKeyframe"];
+ [encoder encodeObject:pyKeyframe forKey:@"pyKeyframe"];
+ [encoder encodeObject:volumeKeyframe forKey:@"volumeKeyframe"];
+ [encoder encodeFloat:[media speed] forKey:@"speed"];
+}
+
+- (void)loadFromDecoder:(NSCoder*)coder
+{
+ trackNumber = [coder decodeIntForKey:@"trackNumber"];
+ mapping = QTTimeRangeFromString([coder decodeObjectForKey:@"mapping"]);
+ offset = QTTimeFromString([coder decodeObjectForKey:@"offset"]);
+ effects = [coder decodeObjectForKey:@"effects"];
+ pxKeyframe = [coder decodeObjectForKey:@"pxKeyframe"];
+ pyKeyframe = [coder decodeObjectForKey:@"pyKeyframe"];
+ [media setSpeed:[coder decodeFloatForKey:@"speed"]];
+ volumeKeyframe = [coder decodeObjectForKey:@"volumeKeyframe"];
+
+ [self setLayer:alayer];
+}
+
- (void)setLayer:(CALayer*)layer
{
layer.frame = CGRectMake(convertQTTimeToSecond(mapping.time)*timeLineScale, trackNumber*51+1,
- (convertQTTimeToSecond(mapping.time) + convertQTTimeToSecond(mapping.duration))*timeLineScale, 50);
+ (/*convertQTTimeToSecond(mapping.time) + */convertQTTimeToSecond(mapping.duration))*timeLineScale, 50);
[self setAlayer:layer];
}
return [media type];
}
+- (NSString*)printName
+{
+ if([[media type] isEqualToString:@"text"])
+ return @"text";
+
+ return [self getPath];
+}
+
+- (NSSize)originSize
+{
+ return originSize.size;
+}
+
+- (float)speed
+{
+ return [media speed];
+}
+
+- (void)setSpeed:(float)s
+{
+ if(s < 0.01 || s > 100.0) return;
+ [media setSpeed:s];
+ mapping.duration = [media duration];
+ [self setLayer:alayer];
+}
+
+- (void)setVolume:(float)v forTime:(QTTime)time
+{
+ if(recording)
+ [volumeKeyframe setValueForTime:v time:time];
+ else
+ [volumeKeyframe setValueForTime:v time:QTZeroTime];
+}
+
@end