/**
 * This package provides classes to handle video in Processing. The API is compatible with the built-in video library of Processing. 
 * GSVideo uses the multimedia toolkit GStreamer (http://www.gstreamer.net/)  through the gstreamer-java bindings by Wayne Meissener:
 * http://code.google.com/p/gstreamer-java/ 
 * @author Andres Colubri
 * @version 0.8
 *
 * Copyright (c) 2008 Andres Colubri
 *
 * This source is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation; either version 2 of the License, or
 * (at your option) any later version.
 * 
 * This code is distributed in the hope that it will be useful, but
 * WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
 * General Public License for more details.
 * 
 * A copy of the GNU General Public License is available on the World
 * Wide Web at <http://www.gnu.org/copyleft/gpl.html>. You can also
 * obtain it by writing to the Free Software Foundation,
 * Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
 */

package codeanticode.gsvideo;

import processing.core.*;

import java.nio.*;
import java.lang.reflect.*;

import org.gstreamer.*;
import org.gstreamer.elements.*;

/**
 * This class allows to create a custom GStreamer pipeline.
 */
public class GSPipeline extends PImage implements PConstants {
  protected Method pipelineEventMethod;
  protected String pipeline;
  protected boolean playing = false;
  protected boolean paused = false;
  protected boolean repeat = false;  
  protected boolean available;
  protected int pipeWidth;
  protected int pipeHeight;
  protected Object eventHandler;
  protected RGBDataSink videoSink = null;
  protected int[] copyPixels = null;
  protected Pipeline gpipe;
  
  protected boolean firstFrame = true;
  
  protected int streamType;
  protected DataSink dataSink = null;
  public byte[] data = null;
  public String dataCaps;
  protected String tempDataCaps;
  protected byte[] copyData = null;  
  
  public GSPipeline(PApplet parent, String pipeStr) {
    this(parent, pipeStr, GSVideo.VIDEO);
  }
  
  /**
   * Creates an instance of GSPipeline using the pipeline specified in the
   * string pipe.
   * 
   * @param parent
   *          PApplet
   * @param pipeStr
   *          String
   * @param type
   *          int    
   */
  public GSPipeline(PApplet parent, String pipeStr, int type) {
    // this creates a fake image so that the first time this
    // attempts to draw, something happens that's not an exception
    super(1, 1, RGB);

    this.parent = parent;

    gpipe = null;

    GSVideo.init();

    // register methods
    parent.registerDispose(this);

    setEventHandlerObject(parent);

    // Determining if the last element is fakesink or filesink.
    int idx;
    String lastElem, lastElemName;
    String[] parts;

    idx = pipeStr.lastIndexOf('!');
    lastElem = pipeStr.substring(idx + 1, pipeStr.length()).trim();

    parts = lastElem.split(" ");
    if (0 < parts.length)
      lastElemName = parts[0];
    else
      lastElemName = "";

    boolean fakeSink = lastElemName.equals("fakesink");
    boolean fileSink = lastElemName.equals("filesink");

    if (PApplet.platform == WINDOWS) {
      // Single backward slashes are replaced by double backward slashes,
      // otherwise gstreamer won't underdand file paths.
      pipeStr = pipeStr.replace("\\", "\\\\");
    }
    
    if (fakeSink || fileSink) {
      // If the pipeline ends in a fakesink or filesink element, the RGBDataSink
      // is not added at the end of it...
      pipeline = pipeStr;
      gpipe = Pipeline.launch(pipeStr);    
    } else {
      if (type == GSVideo.VIDEO) {
        // For video pipelines, we add the RGBDataSink element at the end.
        
        // Making sure we are using the right color space and color masks:
        String caps = " ! ffmpegcolorspace ! video/x-raw-rgb, bpp=32, depth=24, endianness=(int)4321, ";
        // JNA creates ByteBuffer using native byte order, set masks according to that.
        if (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN)
          caps += "red_mask=(int)0xFF00, green_mask=(int)0xFF0000, blue_mask=(int)0xFF000000";
        else
          caps += "red_mask=(int)0xFF0000, green_mask=(int)0xFF00, blue_mask=(int)0xFF";
          
        StringBuilder finalPipeStr = new StringBuilder(pipeStr);
        finalPipeStr.append(caps);
        finalPipeStr.append(" ! fakesink name=VideoSink");

        pipeline = finalPipeStr.toString();
        gpipe = Pipeline.launch(pipeline);
        videoSink = new RGBDataSink("rgb", gpipe, new RGBDataSink.Listener() {
          public void rgbFrame(boolean pre, int w, int h, IntBuffer buffer) {
            invokeVideoEvent(w, h, buffer);
          }
        });
        
        // Setting direct buffer passing in the video sink, so no new buffers are created
        // and disposed by the GC on each frame (thanks to Octavi Estape for pointing 
        // out this one).            
        videoSink.setPassDirectBuffer(GSVideo.passDirectBuffer);    
        
        // No need for videoSink.dispose(), because the append() doesn't increment the
        // refcount of the videoSink object.
      } else if (type == GSVideo.AUDIO) {
        // For audio pipelines, we launch the pipeline as it is.
        pipeline = pipeStr;
        gpipe = Pipeline.launch(pipeStr);       
      } else if (type == GSVideo.RAW) {
        StringBuilder finalPipeStr = new StringBuilder(pipeStr);
        finalPipeStr.append(" ! fakesink name=DataSink");
        
        pipeline = finalPipeStr.toString();
        gpipe = Pipeline.launch(pipeline);
        dataSink = new DataSink("rgb", gpipe, new DataSink.Listener() {
          public void dataFrame(boolean pre, Caps caps, int size, ByteBuffer buffer) {
            invokeRawEvent(caps, size, buffer);
          }
        });
        dataSink.setPassDirectBuffer(GSVideo.passDirectBuffer);         
      } else {
        System.err.println("Unrecognized stream type: Please use VIDEO, AUDIO, or RAW.");
        return;
      }
    }
    
    // Creating bus to handle end-of-stream event.
    Bus bus = gpipe.getBus();
    bus.connect(new Bus.EOS() {
      public void endOfStream(GstObject element) {
        eosEvent();
      }
    });  
    
    streamType = type;
  }

  /**
   * Releases the gstreamer resources associated to this pipeline object.
   * It shouldn't be used after this.
   */
  public void delete() {
    if (gpipe != null) {
      try {
        if (gpipe.isPlaying()) {
          gpipe.stop();
        }
      } catch (IllegalStateException e) {
        System.err.println("error when deleting player, maybe some native resource is already disposed"); 
      } catch (Exception e) {
        e.printStackTrace();
      }
      
      copyPixels = null;
      pixels = null;      
      
      if (videoSink != null) {
        videoSink.removeListener();
        videoSink.dispose();
        videoSink = null;
      }      
      
      copyData = null;
      data = null;
      
      if (dataSink != null) {
        dataSink.removeListener();
        dataSink.dispose();
        dataSink = null;
      }      
      
      gpipe.dispose();
      gpipe = null;
    }
  }
  
  /**
   * Same as delete.
   */    
  public void dispose() {
    delete();
  }  
  
  /**
   * Uses a generic object as handler of the pipeline. This object should have a
   * pipelineEvent method that receives a GSPipeline argument. This method will
   * be called upon a new frame read event. 
   * 
   */
  public void setEventHandlerObject(Object obj) {
    eventHandler = obj;

    try {
      pipelineEventMethod = eventHandler.getClass().getMethod("pipelineEvent",
          new Class[] { GSPipeline.class });
    } catch (Exception e) {
      // no such method, or an error.. which is fine, just ignore
    }
  }
  
  /**
   * Get the full length of this movie (in seconds).
   * 
   * @return float
   */
  public float duration() {
    float sec = gpipe.queryDuration().toSeconds();
    float nanosec = gpipe.queryDuration().getNanoSeconds();
    return sec + GSVideo.nanoSecToSecFrac(nanosec);
  }  
  
  /**
   * Return the current time in seconds.
   * 
   * @return float
   */
  public float time() {
    float sec = gpipe.queryPosition().toSeconds();
    float nanosec = gpipe.queryPosition().getNanoSeconds();
    return sec + GSVideo.nanoSecToSecFrac(nanosec);
  }  
  
  /**
   * Jump to a specific location (in seconds). The number is a float so
   * fractions of seconds can be used.
   * 
   * @param float where
   */
  public void jump(float where) {
    if (playing) {
      gpipe.pause();
    }
    
    boolean res;
    long start = GSVideo.secToNanoLong(where);
    long stop = -1; // or whatever > new_pos
    
    res = gpipe.seek(1.0, Format.TIME, SeekFlags.FLUSH,
                     SeekType.SET, start, SeekType.SET, stop);
    
    if (!res) {
      System.err.println("Seek operation failed.");
    }    

    if (playing) {
      gpipe.play();
    }
  }  
  
  /**
   * Return the true or false depending on whether there is a new frame ready to
   * be read.
   * 
   * @return boolean
   */
  public boolean available() {
    return available;
  }
  
  /**
   * Returns whether the stream is playing or not.
   * 
   * @return boolean
   */
  public boolean isPlaying() {
    return playing;  
  }

  /**
   * Returns whether the stream is paused or not. If isPlaying() and isPaused()
   * both return false it means that the stream is stopped.
   * 
   * @return boolean
   */
  public boolean isPaused() {
    return paused;  
  }  
  
  /**
   * Returns whether the stream is looping or not.
   * 
   * @return boolean
   */
  public boolean isLooping() {
    return repeat;
  }
  
  /**
   * Begin playing the stream, with no repeat.
   */
  public void play() {
    playing = true;
    paused = false;
    gpipe.play();    
  }

  /**
   * Begin playing the stream, with repeat.
   */
  public void loop() {    
    repeat = true;
    play();
  }

  /**
   * Shut off the repeating loop.
   */
  public void noLoop() {
    repeat = false;
  }

  /**
   * Pause the stream at its current time.
   */
  public void pause() {
    playing = false;
    paused = true;
    gpipe.pause();
  }

  /**
   * Stop the stream, and rewind.
   */
  public void stop() {
    if (playing) {      
      goToBeginning();
      playing = false;
    }
    paused = false;    
    gpipe.stop();
  }
  
  /**
   * Reads the current video frame.
   */
  public synchronized void read() {
    if (streamType == GSVideo.VIDEO) {    
      // We loadPixels() first to ensure that at least we always have a non-null
      // pixels array, even if without any valid image inside.
      loadPixels();    
      
      if (copyPixels == null) {
        return;
      }
      
      if (firstFrame) {
        super.init(pipeWidth, pipeHeight, RGB);
        loadPixels();
        firstFrame = false;
      }   
          
      int[] temp = pixels;
      pixels = copyPixels;
      updatePixels();
      copyPixels = temp;
    } else if (streamType == GSVideo.RAW) {
      if (copyData == null) {
        return;
      }        
      
      dataCaps = tempDataCaps;
      if (data == null) {
        data = new byte[copyData.length];
      }
      
      byte[] temp = data;
      data = copyData;
      copyData = temp;      
    }
    
    available = false;
  }

  /**
   * Goes to the first frame of the stream.
   */
  public void goToBeginning() {
    boolean res = gpipe.seek(ClockTime.fromNanos(0));
    if (!res) {
      System.err.println("Seek operation failed.");
    }    
  }
  
  /**
   * Goes to the last frame of the stream.
   */
  public void goToEnd() {
    long nanos = gpipe.queryDuration().getNanoSeconds();
    boolean res = gpipe.seek(ClockTime.fromNanos(nanos));
    if (!res) {
      System.err.println("Seek operation failed.");
    }
  }
  
  /**
   * Get a float-value property from the pipeline. 
   * 
   * @param String name
   * @return boolean 
   */
  public float getProperty(String name) {
    if (playing) {
      return ((Number)gpipe.get(name)).floatValue();
    }
    return 0;
  }    
  
  /**
   * Set a float-value property in the pipeline. 
   * 
   * @param String name
   * @param float v  
   */
  public void setProperty(String name, float v) {
    if (playing) {
      gpipe.set(name, v);
    }
  }  
  
  /**
   * Change the volume. Values are from 0 to 1. It will fail
   * if the pipeline doesn't have a volume property available.
   * 
   * @param float v   
   */
  public void volume(float v) {
    setProperty("volume", v);
  }
  
  /**
   * Returns the text string used to build the pipeline.
   * 
   * @return String 
   */
  public String getPipeline() {
    return pipeline;
  }  
  
  protected void eosEvent() {    
    if (repeat) {
      goToBeginning();
    } else {
      playing = false;
    }
  }  
  
  protected synchronized void invokeVideoEvent(int w, int h, IntBuffer buffer) {
    available = true;
    pipeWidth = w;
    pipeHeight = h;
    if (copyPixels == null) {
      copyPixels = new int[w * h];
    }
    buffer.rewind();    
    try {
      buffer.get(copyPixels);
    } catch (BufferUnderflowException e) {
      e.printStackTrace();
      copyPixels = null;
      return;
    }

    // Creates a pipelineEvent.
    if (pipelineEventMethod != null) {
      try {
        pipelineEventMethod.invoke(eventHandler, new Object[] { this });
      } catch (Exception e) {
        System.err.println("error, disabling pipelineEvent() for " + pipeline);
        e.printStackTrace();
        pipelineEventMethod = null;
      }
    }
  }
  
  protected synchronized void invokeRawEvent(Caps caps, int n, ByteBuffer buffer) {
    available = true;
    
    tempDataCaps = caps.toString();
    
    if (copyData == null) {
      copyData = new byte[n];
    }
    buffer.rewind();    
    try {
      buffer.get(copyData);
    } catch (BufferUnderflowException e) {
      e.printStackTrace();
      copyData = null;
      return;
    }

    if (playing) {
      // Creates a playerEvent.
      if (pipelineEventMethod != null) {
        try {
          pipelineEventMethod.invoke(eventHandler, new Object[] { this });
        } catch (Exception e) {
          System.err.println("error, disabling pipelineEvent() for " + pipeline);
          e.printStackTrace();
          pipelineEventMethod = null;
        }
      }
    }  
  }  
}
