FileDocCategorySizeDatePackage
RawSyncBufferMux.javaAPI DocJMF 2.1.1e8364Mon May 12 12:20:58 BST 2003com.sun.media.multiplexer

RawSyncBufferMux

public class RawSyncBufferMux extends RawBufferMux

Fields Summary
boolean
mpegBFrame
boolean
mpegPFrame
protected boolean
monoIncrTime
private long
monoStartTime
private long
monoTime
private Object
waitLock
private boolean
resetted
private boolean
masterTrackEnded
static AudioFormat
mpegAudio
static VideoFormat
mpegVideo
static int
THRESHOLD
static int
LEEWAY
Constructors Summary
public RawSyncBufferMux()


    // Constructor 
     
	super();
	timeBase = new RawMuxTimeBase();
	// Allow data to be dropped.
	allowDrop = true;
	clock = new BasicClock();
	try{
	    clock.setTimeBase(timeBase);
	} catch (Exception e){ }
    
Methods Summary
public java.lang.StringgetName()
Returns a descriptive name for the plug-in. This is a user readable string.

	return "Raw Sync Buffer Multiplexer";
    
public booleaninitializeTracks(javax.media.Format[] trackFormats)


	if (!super.initializeTracks(trackFormats))
	    return false;

	masterTrackID = 0;
	for (int i = 0; i < trackFormats.length; i++) {
	    if (trackFormats[i] instanceof AudioFormat)
		masterTrackID = i;
	}

	return true;
    
public intprocess(javax.media.Buffer buffer, int trackID)
Process the buffer and multiplex it with data from other tracks. The multiplexed output is sent to the output DataSource.

param
buffer the input buffer
param
trackID the index identifying the track where the input buffer belongs.
return
BUFFER_PROCESSED_OK if the processing is successful. Other possible return codes are defined in PlugIn.
see
PlugIn


	// If the processor starts out having RTP times, before the
	// data comes out of this processor, we should reset the
	// RTP flag and sets it to RELATIVE time.  Otherwise, the
	// next guy in the processing chain may compute the time
	// incorrectly.
	if ((buffer.getFlags() & Buffer.FLAG_RTP_TIME) != 0) {
	    buffer.setFlags((buffer.getFlags() & ~Buffer.FLAG_RTP_TIME) |
				Buffer.FLAG_RELATIVE_TIME);
	}

	// If the monitor is enabled, we'll send the data to the monitor.
	if (mc[trackID] != null && mc[trackID].isEnabled())
	    mc[trackID].process(buffer);

	if ((streams == null) || (buffer == null) || (trackID >= streams.length)){
	    return PlugIn.BUFFER_PROCESSED_FAILED;
	}

	if (buffer.isDiscard())
	    return BUFFER_PROCESSED_OK;

	//
	// Unless the NO_WAIT flag is on, we'll need to wait for
	// the presentation time.
	//
	if ((buffer.getFlags() & Buffer.FLAG_NO_WAIT) == 0) {

	    if (buffer.getFormat() instanceof AudioFormat) {
		// Regular audio requires that we wait for the last
		// bit of audio to be done.  But MPEG's timestamp is
		// at the beginning of the chunk.
		if (mpegAudio.matches(buffer.getFormat()))
		    waitForPT(buffer.getTimeStamp(), trackID);
		else
		    waitForPT(mediaTime[trackID], trackID);
	    } else if (buffer.getTimeStamp() >= 0) {
		if (mpegVideo.matches(buffer.getFormat()) &&
			(buffer.getFlags() & Buffer.FLAG_RTP_MARKER) != 0) {
		    byte[] payload = (byte[])buffer.getData();
		    int offset = buffer.getOffset();
		    int ptype = payload[offset+2] & 0x07;
		    if (ptype > 2) {
			// found a B frame
			mpegBFrame = true;
		    } else if (ptype == 2) {
			// found a P frame
			mpegPFrame = true;
		    }

		    /*
		     * For MPEG the timestamp is the time of the frame but
		     * frames come out of order. Since a stream may not
		     * have all types of frames, have to allow for all
		     * mixes. If B frames are present, only wait on them.
		     * If no B frames, wait on P frames. If no B or P frames
		     * wait on I frames.
		     */
		    if (ptype > 2 || (ptype == 2 && !mpegBFrame)
				|| (ptype == 1 && !(mpegBFrame | mpegPFrame))) {
			waitForPT(buffer.getTimeStamp(), trackID);
		    }
		} else {
		    waitForPT(buffer.getTimeStamp(), trackID);
		}
	    }
	}

	updateTime(buffer, trackID);
	    
	// We are doing the synchronization here so the down stream
	// will not need to.
	buffer.setFlags(buffer.getFlags() | Buffer.FLAG_NO_SYNC);
	
	if (!(buffer.getFormat() instanceof AudioFormat) ||
	     mpegAudio.matches(buffer.getFormat())) {
	    // Convert the timestamps to be monotically increasing.
	    if (monoIncrTime) {
		monoTime = monoStartTime +
			buffer.getTimeStamp() - mediaStartTime * 1000000;

	     /*
		if ((buffer.getFlags() & Buffer.FLAG_RTP_MARKER) != 0)
	    	    System.err.println("monoStartTime = " + monoStartTime +
			" mediaStartTime = " + mediaStartTime +
			" TS = " + buffer.getTimeStamp() +
			" mono TS = " + monoTime);
	      */
		buffer.setTimeStamp(monoTime);
	    }
	}

	if (buffer.isEOM() && trackID == masterTrackID)
	    masterTrackEnded = true;

	buffer.setHeader(new Long(System.currentTimeMillis()));

	return streams[trackID].process(buffer);
    
public voidreset()

	super.reset();
	mpegBFrame = false;
	mpegPFrame = false;
	synchronized (waitLock) {
	    resetted = true;
	    waitLock.notify();
	}
    
public voidsetMediaTime(javax.media.Time now)

	super.setMediaTime(now);
	monoStartTime = monoTime + 10;	// This is so the next frame time
					// will not be exactly the same.
    
public voidsyncStart(javax.media.Time at)

	masterTrackEnded = false;
	super.syncStart(at);
    
protected voidupdateTime(javax.media.Buffer buf, int trackID)
Update the media time per track.


	if (buf.getFormat() instanceof AudioFormat) {

	    if (mpegAudio.matches(buf.getFormat())) {
		if (buf.getTimeStamp() < 0) {
		    if (systemStartTime >= 0)
			mediaTime[trackID] = (mediaStartTime + 
			    System.currentTimeMillis() - systemStartTime) * 1000000;
		} else
		    mediaTime[trackID] = buf.getTimeStamp();
	    } else {
		long t = ((AudioFormat)buf.getFormat()).computeDuration(buf.getLength());

		if (t >= 0)
		    mediaTime[trackID] += t;
		else
		    mediaTime[trackID] = buf.getTimeStamp();
	    }

	} else {
	    if (buf.getTimeStamp() < 0 && systemStartTime >= 0)
		mediaTime[trackID] = (mediaStartTime + 
			  System.currentTimeMillis() - systemStartTime) * 1000000;
	    else
		mediaTime[trackID] = buf.getTimeStamp();
	}

	//System.err.println("mediaTime = " + mediaTime[trackID]);

	timeBase.update();
    
private voidwaitForPT(long pt, int trackID)

 
          
        long delay;

	pt = pt / 1000000;	// To bring it to millisecs range.

	//System.err.println("MT = " + mediaStartTime + 
	//		" ST = " + systemStartTime + 
	//		" pt = " + pt + 
	//		" st = " + System.currentTimeMillis());

	if (masterTrackID == -1 || trackID == masterTrackID) {
	    if (systemStartTime < 0)
		delay = 0;
	    else
		delay = (pt-mediaStartTime) - 
			(System.currentTimeMillis()-systemStartTime); 
	} else {
	    delay = pt - mediaTime[masterTrackID]/1000000;
	}

	//System.err.println("delay = " + delay);

	// This is a workaround for now.  The video capture pipeline
	// is not fully flushed causing wrong values in the timestamps.
	if (delay > 2000)
	    return;

        while (delay > LEEWAY && !masterTrackEnded) {
            if (delay > THRESHOLD)
                delay = THRESHOLD;

	    synchronized (waitLock) {
		try {
		    waitLock.wait(delay);
		} catch (Exception e) {
		    break;
		}
		if (resetted) {
		    resetted = false;
		    break;
		}
	    }

	    if (masterTrackID == -1 || trackID == masterTrackID)
		delay = (pt-mediaStartTime) - 
			(System.currentTimeMillis()-systemStartTime); 
	    else
		delay = pt - mediaTime[masterTrackID]/1000000;
        }