OSDN Git Service

add Text Layer
[eliscolors/main.git] / ElisLayer.m
index 4108c9a..d9df22a 100644 (file)
@@ -1,3 +1,24 @@
+//  Copyright (c) 2009 Yanagi Asakura
+//
+//  This software is provided 'as-is', without any express or implied
+//  warranty. In no event will the authors be held liable for any damages
+//  arising from the use of this software.
+//
+//  Permission is granted to anyone to use this software for any purpose,
+//  including commercial applications, and to alter it and redistribute it
+//  freely, subject to the following restrictions:
+//
+//  1. The origin of this software must not be misrepresented; you must not
+//  claim that you wrote the original software. If you use this software
+//  in a product, an acknowledgment in the product documentation would be
+//  appreciated but is not required.
+//
+//  2. Altered source versions must be plainly marked as such, and must not be
+//  misrepresented as being the original software.
+//
+//  3. This notice may not be removed or altered from any source
+//  distribution.
+
 //
 //  ElisLayer.m
 //  Elis Colors
@@ -8,6 +29,8 @@
 
 #import "ElisLayer.h"
 
+#define TRACK_SIZE 32
+
 static float convertQTTimeToSecond(QTTime t)
 {
     return (float)t.timeValue/t.timeScale;
@@ -67,8 +90,17 @@ static float convertQTTimeToSecond(QTTime t)
     QTTime begin = QTMakeTime(frame.origin.x*DEFAULT_FPS/timeLineScale, DEFAULT_FPS);
     QTTime d = QTMakeTime(frame.size.width*DEFAULT_FPS/timeLineScale, DEFAULT_FPS);
     
-    trackNumber = floor(frame.origin.y/51.0);
+//    trackNumber = floor((frame.origin.y+1)/51.0);
+    trackNumber = round((floor(frame.origin.y/51.0) * 51 + 1 + 25)/51.0);
     mapping = QTMakeTimeRange(begin, d);
+    
+    int i, center = frame.origin.y + frame.size.height/2;
+    for(i = 0; i < TRACK_SIZE; i++){ // これしきのことにループ回すってどういうことなの...
+        if(51.0*i+1 <= center && center <= 51.0*(i+1)+1){
+            trackNumber = i;
+            return;
+        }
+    }
 }
 
 - (void)changeOffset:(float)df
@@ -82,7 +114,7 @@ static float convertQTTimeToSecond(QTTime t)
 {
     QTTime duration;
     
-    if([[media type] isEqualToString:@"image"])
+    if([[media type] isEqualToString:@"image"] || [[media type] isEqualToString:@"text"])
         duration = QTMakeTime(60*10, 1);
     else
         duration = [media duration];
@@ -135,7 +167,14 @@ static float convertQTTimeToSecond(QTTime t)
 - (CIImage*)getEffectedImage:(CVTimeStamp*)timeStamp forTime:(QTTime)time
 {
     QTTime innerTime = QTTimeDecrement(time, mapping.time);
-    CIImage* image = [media getFrameForTime:timeStamp];
+    CIImage* image;
+    if([[media type] isEqualToString:@"sound"]) return nil;
+    if(usingStampMode){
+        image = [media getFrameForTime:timeStamp];
+    }else{
+        [media setCurrentTime:QTTimeIncrement(innerTime, offset)];
+        image = [media getFrameForTime:nil];
+    }
     if(image == nil) return nil;
     
     return [self applyEffects:image forTime:QTTimeIncrement(innerTime, offset)];
@@ -144,7 +183,9 @@ static float convertQTTimeToSecond(QTTime t)
 - (CIImage*)getEffectedImageWithoutStamp:(QTTime)time
 {
     QTTime innerTime = QTTimeDecrement(time, mapping.time);
-    CIImage* image = [media getFrameForQTTime:QTTimeIncrement(innerTime, offset)];
+    [media setCurrentTime:QTTimeIncrement(innerTime, offset)];
+//    CIImage* image = [media getFrameForQTTime:QTTimeIncrement(innerTime, offset)];
+    CIImage* image = [media getFrameForTime:nil];
     if(image == nil) return nil;
     
     return [self applyEffects:image forTime:QTTimeIncrement(innerTime, offset)];    
@@ -270,11 +311,21 @@ static float convertQTTimeToSecond(QTTime t)
 - (void)getSoundTrack:(NSMutableArray*)soundTrack
 {
     QTTrack* t;
+    QTTime qtr;
     t = [media getSoundTrack];
     if(t){
+        [(QTMovie*)[media getSoundMovie] setAttribute:[NSNumber numberWithBool:YES] forKey:QTMovieEditableAttribute];
+        qtr = [media duration];
+        mapping.time = QTMakeTime(convertQTTimeToSecond(mapping.time)*qtr.timeScale, qtr.timeScale);
+        mapping.duration = QTMakeTime(convertQTTimeToSecond(mapping.duration)*qtr.timeScale, qtr.timeScale);
+        [[media getSoundMovie] deleteSegment:QTMakeTimeRange(QTZeroTime, offset)];
+        [[media getSoundMovie] insertEmptySegmentAt:QTMakeTimeRange(QTZeroTime, mapping.time)];
+//        [[media getSoundMovie] scaleSegment:qtr newDuration:QTMakeTimeRange(offset, mapping.duration)];
+        [soundTrack addObject:[media getSoundMovie]];
         [soundTrack addObject:t];
-        [soundTrack addObject:[NSValue valueWithQTTime:offset]];
-        [soundTrack addObject:[NSValue valueWithQTTimeRange:mapping]];
+        [soundTrack addObject:[NSValue valueWithQTTime:QTTimeIncrement(mapping.duration, mapping.time)]];
+//        [soundTrack addObject:[NSValue valueWithQTTime:offset]];
+//        [soundTrack addObject:[NSValue valueWithQTTimeRange:mapping]];
     }
 }
 
@@ -285,10 +336,8 @@ static float convertQTTimeToSecond(QTTime t)
     [encoder encodeObject:QTStringFromTimeRange(mapping) forKey:@"mapping"];
     [encoder encodeObject:QTStringFromTime(offset) forKey:@"offset"];
     [encoder encodeObject:effects forKey:@"effects"];
-    [encoder encodeObject:pxKeyframe forKey:@"pxKerframe"];
+    [encoder encodeObject:pxKeyframe forKey:@"pxKeyframe"];
     [encoder encodeObject:pyKeyframe forKey:@"pyKeyframe"];
-    
-    NSLog(@"encoding layer");
 }
 
 - (id)initWithCoder:(NSCoder*)coder
@@ -301,15 +350,35 @@ static float convertQTTimeToSecond(QTTime t)
     pxKeyframe = [coder decodeObjectForKey:@"pxKeyframe"];
     pyKeyframe = [coder decodeObjectForKey:@"pyKeyframe"];
     
-    NSLog(@"decoding layer");
-    
     return self;
 }
 
+- (void)saveToEncoder:(NSCoder*)encoder
+{
+    [encoder encodeInt:trackNumber forKey:@"trackNumber"];
+    [encoder encodeObject:QTStringFromTimeRange(mapping) forKey:@"mapping"];
+    [encoder encodeObject:QTStringFromTime(offset) forKey:@"offset"];
+    [encoder encodeObject:effects forKey:@"effects"];
+    [encoder encodeObject:pxKeyframe forKey:@"pxKeyframe"];
+    [encoder encodeObject:pyKeyframe forKey:@"pyKeyframe"];
+}
+
+- (void)loadFromDecoder:(NSCoder*)coder
+{
+    trackNumber = [coder decodeIntForKey:@"trackNumber"];
+    mapping = QTTimeRangeFromString([coder decodeObjectForKey:@"mapping"]);
+    offset = QTTimeFromString([coder decodeObjectForKey:@"offset"]);
+    effects = [coder decodeObjectForKey:@"effects"];
+    pxKeyframe = [coder decodeObjectForKey:@"pxKeyframe"];
+    pyKeyframe = [coder decodeObjectForKey:@"pyKeyframe"];
+    
+    [self setLayer:alayer];
+}
+
 - (void)setLayer:(CALayer*)layer
 {
     layer.frame = CGRectMake(convertQTTimeToSecond(mapping.time)*timeLineScale, trackNumber*51+1,
-                             (convertQTTimeToSecond(mapping.time) + convertQTTimeToSecond(mapping.duration))*timeLineScale, 50);
+                             (/*convertQTTimeToSecond(mapping.time) + */convertQTTimeToSecond(mapping.duration))*timeLineScale, 50);
     [self setAlayer:layer];
 }