FileDocCategorySizeDatePackage
Jmpx.javaAPI DocJMF 2.1.1e57622Mon May 12 12:21:26 BST 2003com.sun.media.codec.video.jmpx

Jmpx

public final class Jmpx extends BasicController implements Runnable
A utility class to drive the mpx mpeg decoder.

Fields Summary
private PipedInputStream
cmdPipe
protected PipedOutputStream
mpxCmdPipe
protected boolean
killCommandThread
private Vector
listeners
private MpxThread
mpxThread
private DataThread
dataThread
private int
audioRate
private int
audioChannels
private boolean
sourceIsOn
private Component
monitor
private long
lastStatTime
private VideoRenderer
renderer
private VideoFormat
videoformat
protected JmpxAudio
audio
public static boolean
BIG_ENDIAN
public static boolean
LITTLE_ENDIAN
public static final int
MpxStrm_11172
public static final int
MpxStrm_VSEQ
public static final int
MpxStrm_ASEQ
public static final int
MpxStrm_Unknown
public static final int
MpxAudioLeft
public static final int
MpxAudioRight
public static final int
MpxAudioStereo
public static final int
MpxAudioLowQ
public static final int
MpxAudioMedQ
public static final int
MpxAudioHiQ
protected static final int
MCMD_NULL
protected static final int
MCMD_EXIT
protected static final int
MCMD_OPENSRC
protected static final int
MCMD_CLOSESRC
protected static final int
MCMD_REENTER
protected static final int
MCMD_PLAYCTR
protected static final int
MCMD_PRESCTR
protected static final int
MCMD_STREAM
protected static final int
MCMD_SENDSTAT
protected static final int
MCMD_STATUS
protected static final int
MCMD_ACK
protected static final int
MCMD_SETRSRC
protected static final int
MCMD_CAPTURE
protected static final int
MCMD_CDOP
protected static final int
MCMD_TEST
protected static final int
MCMD_QSIZE
protected static final int
MCMD_QSTATS
protected static final double
MULF
protected static final int
MCFL_SNDACK
protected static final int
MCFL_NOACK
protected static final int
MCFL_ORGMPX
protected static final int
MCFL_MPXRSV1
protected static final int
MSC_NONE
protected static final int
MSC_FNAME
protected static final int
MSC_CDFILE
protected static final int
MSC_NETWORK
protected static final int
MSC_FDSCP
protected static final int
MSC_JAVASTRM
protected static final int
MRE_FOFS
protected static final int
MRE_RELOFS
protected static final int
MRE_ASOPEN
protected static final int
MRE_STRMS
protected static final int
MRE_SEEKVSEQ
public static final int
PC_PLAY
public static final int
PC_FWDSPEED
public static final int
PC_FWDSTEP
public static final int
PC_PAUSE
public static final int
PC_AUDMSK
protected static final int
STRM_IGNOREID
protected static final int
STRM_SBCOFF
protected static final int
STRM_AUTOSBC
protected static final int
STRM_IDBITS
protected static final int
PCTR_VMD
protected static final int
PCTR_AMD
protected static final int
PCTR_AVOL
protected static final int
PCTR_LUM
protected static final int
PCTR_SAT
protected static final int
PCTR_GAM
protected static final int
PCTR_LSG
protected static final int
VDM_NONE
protected static final int
VDM_COL
protected static final int
VDM_COLB
protected static final int
VDM_COL8
protected static final int
TRYSET_DONE
protected static final int
TRYSET_CANT
protected static final int
TRYSET_PASTEOM
protected MediaThread
cmdInputThread
protected int
ackSeq
protected int
strmType
protected float
fwdSpeed
protected int
width
protected int
height
protected int
outWidth
protected int
outHeight
protected int
zoom
protected float
java_zoom
protected int
cmap
protected boolean
interleaved
protected boolean
muted
protected boolean
reloaded
protected float
fps
protected float
kbps
protected float
loc
protected float
oldLoc
protected long
time
protected long
size
protected long
frames
private long
peer
protected int
possibleLength
protected int
possibleWidth
protected int
possibleHeight
protected int
useJavaRenderer
protected Object
rgbBuffer
protected FrameRateControl
frameRateControl
protected BitRateControl
bitRateControl
protected GainControl
gainControl
protected Control[]
controls
protected int
needYUVBuffer
protected int
needXBGR
protected boolean
windowAvailable
protected DataSource
source
protected TimeBase
timeBase
protected String
filename
protected SourceStream
sourceStream
protected byte[]
tempArray
protected MemoryImageSource
sourceImage
protected Image
displayImage
int
rMask
int
gMask
int
bMask
protected Time
knownDuration
protected Time
eomDuration
protected Time
requestedMediaTime
protected boolean
mediaTimeChanged
protected boolean
outputSizeSet
protected int
contentLength
private static JMFSecurity
jmfSecurity
private static boolean
securityPrivelege
private Method[]
mSecurity
private Class[]
clSecurity
private Object[]
argsSecurity
private Buffer
outBuffer
Constructors Summary
public Jmpx(int type)

	super();

	ackSeq = 0;
	strmType = type; 
    	width = 320; height = 240;
	outWidth = 320; outHeight = 240;
    	zoom = 1;
	java_zoom = (float) 1.0;
    	cmap = 0;
    	interleaved = true;
    	fps = 0; kbps = 0; loc = 0;
    	time = 0; size = 0;
    	frames = 0;
    	listeners = new Vector();
    
public Jmpx()



     
	try {
	    jmfSecurity = JMFSecurityManager.getJMFSecurity();
	    securityPrivelege = true;
	} catch (SecurityException e) {
	}
    
	this(MpxStrm_Unknown);
    
Methods Summary
protected voidabortPrefetch()

	// Free up mpx.
	closeMPX();
	reloaded = true;
	startSource(false);
    
protected voidabortRealize()

	startSource(false);
    
public voidaddJmpxListener(JmpxListener cl)

	listeners.addElement(cl);
    
private native booleanbigEndian()

private native voidbroadcastMPX(byte[] command)

private native intcheckMpegFile(java.lang.String file)

private native intcheckMpegStream(javax.media.protocol.SourceStream m)

public booleancheckStreamType()

 
	// Return if the stream type is already determined.
	if (strmType != MpxStrm_Unknown) {
	    /* for an mp2 file */
	    possibleLength = -1;
	    if (filename != null)
		getMediaDuration(filename);
	    if (possibleLength > 0)
		eomDuration = new Time((double) possibleLength);
	    return true;
	}

	possibleLength = -1;
	
	if (filename != null) {
	    if ((strmType = checkMpegFile((String)filename)) == 0)
		return false;
	} else if (sourceStream != null) {
	    if ((strmType = checkMpegStream(sourceStream)) == 0)
		return false;
	    // Reset the stream back to 0.  This is essential for video-only
	    // mpeg stream to work.
	    if (sourceStream instanceof Seekable)
		((Seekable)sourceStream).seek(0);
	} else
	    return false;

	if (possibleWidth != -1 && possibleHeight != -1) {
	    width = possibleWidth;
	    height = possibleHeight;
	    outWidth = width;
	    outHeight = height;
	}
	if (possibleLength > 0)
	    eomDuration = new Time((double) possibleLength);
	return true;
    
protected voidcloseAudio()

	if (audio != null) {
	    audio.pause();
	    audio.flush();
	    audio.dispose();
	    audio = null;
	}
    
protected voidcloseMPX()

	if (isInitialized()) {
	    // Stop reading from the command pipe.
	    killCommandThread = true;

	    // Command mpx to exit.  Then free the associated native data
	    // structures.
	    exit();

	    try {
		mpxCmdPipe.flush();
		mpxCmdPipe.close();
		cmdPipe.close();
	    } catch (IOException e) {
		System.err.println("closing command pipe: " + e);
	    }
	}
    
private java.awt.ComponentcreateMonitor(int width, int height)

	Dimension size = new Dimension(width, height);
	// try YUV first
	videoformat = new YUVFormat(size, width * height * 2,
				    Format.byteArray, Format.NOT_SPECIFIED,
				    YUVFormat.YUV_422,
				    width, width / 2, 0, width * height,
				    width * height + width * height / 4);
	renderer = (VideoRenderer) SimpleGraphBuilder.findRenderer(videoformat);
	if (renderer != null) {
	    // found a YUV renderer
	    needYUVBuffer = 1;
	    rgbBuffer = new byte [ width * height * 2];
	    return renderer.getComponent();
	}

	needXBGR = 1;
	if (LITTLE_ENDIAN) {
	    rMask = 0xFF0000;
	    bMask = 0xFF;
	}
        ColorModel cm = Toolkit.getDefaultToolkit().getColorModel();
	if (cm instanceof DirectColorModel) {
	    DirectColorModel dcm = (DirectColorModel) cm;
	    if (dcm.getPixelSize() > 16) {
	        rMask = dcm.getRedMask();
	        gMask = dcm.getGreenMask();
	        bMask = dcm.getBlueMask();
	    }
	    if (rMask != 0xFF && !LITTLE_ENDIAN) {
		needXBGR = 0;
	    }
	}
	videoformat = new RGBFormat(size,
				  size.width * size.height,
				  Format.intArray,
				  30f, 32,
				  rMask, gMask, bMask,
				  1, (size.width + 7) & ~0x7,
				  RGBFormat.FALSE,
				  RGBFormat.NOT_SPECIFIED);

	rgbBuffer = new int[width * height];

	renderer = (VideoRenderer) SimpleGraphBuilder.findRenderer(videoformat);
	if (renderer == null) {
	    System.err.println("No renderer");
	    return null;
	} else
	    return renderer.getComponent();
    
public synchronized voiddisplayImage()

    
        
	if (monitor == null)
	    return;
	synchronized (monitor) {
	    if (videoformat instanceof RGBFormat) {
		outBuffer.setData((int[]) rgbBuffer);
		outBuffer.setLength(width * height);
	    } else {
		outBuffer.setData((byte[]) rgbBuffer);
		// assumes YUV 422
		outBuffer.setLength(width * height + width * height/2);
	    }
	    outBuffer.setFormat(videoformat);
	    outBuffer.setTimeStamp(Buffer.TIME_UNKNOWN);
	    outBuffer.setFlags(0);
	    outBuffer.setHeader(null);

	    renderer.process(outBuffer);
	}
    
public synchronized voiddoClose()


	if (isInitialized()) {
	    windowAvailable = false;
	    closeMPX();

	    // finalize is not called as soon as it should.  We'll attempt
	    // to clear references to these resource-consuming objects
	    // in hope that the garbage collector will collect these
	    // right away...
/*
	    if (blitter != null) {
		blitter.close();
		blitter = null;
	    }
	    rgbBuffer = null;
	    yuvFrame = null;
	    pseudoFrame = null;
	    ditherer = null;
*/
	    mpxThread = null;
	    dataThread = null;
	    cmdInputThread = null;
	    monitor = null;

	}
	if (source != null) {
	    try {
		source.disconnect();
	    } catch (Exception e) {}
	}
    
protected booleandoPrefetch()


	if (isInitialized())
	    return true;

	if (strmType != Jmpx.MpxStrm_VSEQ) {
	    // Try to grab the audio device.
	    boolean gotAudio;

	    // If the rate and channels are not yet known, initialize
	    // it to some format first to claim the audio device.  The
	    // native code will later re-initialize it to the valid format.
	    if (audioRate == 0 || audioChannels == 0)
		gotAudio = setAudio(22050, 1);
	    else
		gotAudio = setAudio(audioRate, audioChannels);

	    if (!gotAudio) {
		sendEvent(new AudioDeviceUnavailableEvent(this));
		if (strmType == Jmpx.MpxStrm_ASEQ)
		    return false;
	    }
	}

    	// Initialize and connect the command pipes
    	cmdPipe = new PipedInputStream();
    	mpxCmdPipe = new PipedOutputStream();

    	try {
	   cmdPipe.connect(mpxCmdPipe);
	} catch (IOException e) {
	   System.err.println("Failed to connect the command pipes " + e);
	   return false;
	}

// // 	mpxThread = new MpxThread(this);
// // 	dataThread = new DataThread(this);

// // 	// Call a native method to start the mpx process.
// // 	if (!initMPX(monitor))
// // 	    return false;

// // 	cmdInputThread = new MediaThread(this);
// // 	cmdInputThread.setName("Jmpx command input thread");

// // 	// The decoding threads are running at Minmum priority.
// // 	// Set this thread to one level over that.
// // 	cmdInputThread.useControlPriority();
// // 	cmdInputThread.start();


	if ( /*securityPrivelege  && */ (jmfSecurity != null) ) {
	    String permission = null;
	    try {
		if (jmfSecurity.getName().startsWith("jmf-security")) {
		    permission = "thread";
		    jmfSecurity.requestPermission(mSecurity, clSecurity, argsSecurity,
						  JMFSecurity.THREAD);
		    mSecurity[0].invoke(clSecurity[0], argsSecurity[0]);
		    
		    permission = "thread group";
		    jmfSecurity.requestPermission(mSecurity, clSecurity, argsSecurity,
						  JMFSecurity.THREAD_GROUP);
		    mSecurity[0].invoke(clSecurity[0], argsSecurity[0]);
// 		} else if (jmfSecurity.getName().startsWith("internet")) {
// 		    PolicyEngine.checkPermission(PermissionID.THREAD);
// 		    PolicyEngine.assertPermission(PermissionID.THREAD);
// 		}
		}
	    } catch (Throwable e) {
		if (JMFSecurityManager.DEBUG) {
		    System.err.println( "Unable to get " + permission +
					" privilege  " + e);
		}
		securityPrivelege = false;
		// TODO: Do the right thing if permissions cannot be obtained.
		// User should be notified via an event
	    }
	}
	if ( (jmfSecurity != null) && (jmfSecurity.getName().startsWith("jdk12"))) {
	    try {
		Constructor cons = jdk12CreateThreadRunnableAction.cons;
		
		mpxThread = (MpxThread) jdk12.doPrivM.invoke(
                                           jdk12.ac,
					   new Object[] {
 					  cons.newInstance(
 					   new Object[] {
                                               MpxThread.class,
                                               this
                                           })});


		dataThread = (DataThread) jdk12.doPrivM.invoke(
                                           jdk12.ac,
					   new Object[] {
 					  cons.newInstance(
 					   new Object[] {
                                               DataThread.class,
                                               this
                                           })});
		
	    // Call a native method to start the mpx process.
	    if (!initMPX(monitor))
		return false;

	    cmdInputThread = (MediaThread) jdk12.doPrivM.invoke(
                                           jdk12.ac,
					   new Object[] {
 					  cons.newInstance(
 					   new Object[] {
                                               MediaThread.class,
                                               this
                                           })});

	    cmdInputThread.setName("Jmpx command input thread");
	    
	    // The decoding threads are running at Minmum priority.
	    // Set this thread to one level over that.
	    cons = jdk12PriorityAction.cons;
	    jdk12.doPrivM.invoke(
				     jdk12.ac,
				     new Object[] {
 					  cons.newInstance(
 					   new Object[] {
                                               cmdInputThread,
                                               new Integer(cmdInputThread.getControlPriority())
                                           })});

	    cmdInputThread.start();
	    } catch (Exception e) {
	    }
	} else {
	    mpxThread = new MpxThread(this);
	    dataThread = new DataThread(this);
	    
	    // Call a native method to start the mpx process.
	    if (!initMPX(monitor))
		return false;
	    
	    cmdInputThread = new MediaThread(this);
	    cmdInputThread.setName("Jmpx command input thread");
	    
	    // The decoding threads are running at Minmum priority.
	    // Set this thread to one level over that.
	    cmdInputThread.useControlPriority();
	    cmdInputThread.start();
	}
	return true;
    
protected booleandoRealize()


    	try {
    	    JMFSecurityManager.loadLibrary("jmutil");
    	    JMFSecurityManager.loadLibrary("jmmpx");
	} catch (UnsatisfiedLinkError e) {
	    return false;
	}

	// Initialize the segv trace.
	// Ema: The Segv handler causes a crash in Sol 2.6. Until we
	// get a new segv handler that works on 2.6, we will remove
	// this debug facility.  
	//new Segv();

	// First thing, check the byte order
	if (bigEndian()) {
	    BIG_ENDIAN = true;
	    LITTLE_ENDIAN = false;
	} else {
	    BIG_ENDIAN = false;
	    LITTLE_ENDIAN = true;
	}
	    
	if (source == null)
	    return false;

	mediaTimeChanged = false;

	startSource(true);
	
	// Determine the source type. 
	sourceStream = null;
	filename = null;
	if (source instanceof PullDataSource) {
	    PullDataSource pds = (PullDataSource)source;
	    URL url = null;
	    String protocol = null;
	    MediaLocator ml = pds.getLocator();
	    if (ml != null) {
		protocol = ml.getProtocol();
		try {
		    url = ml.getURL();
		} catch (MalformedURLException mue) {
		    url = null; // Could be a locator that's not a valid URL
		}
	    }
	    if (source instanceof com.sun.media.protocol.file.DataSource &&
	        protocol != null && protocol.equals("file") && url != null) {
		// It's file protocol.  We'll just allow mpx to read
		// directly from the file to maximize performance.
		filename = url.getFile();
	    } else { 
		// simply use the source stream from the DataSource.
		// We support only the first stream.
		SourceStream ss[] = pds.getStreams();
		if (ss.length >= 1)
		    sourceStream = ss[0];
		contentLength = (int) sourceStream.getContentLength();
		if (contentLength <= 0)
		    contentLength = -1;
	    }
	} else {
	    PushDataSource pds = (PushDataSource)source;
	    // We support only the first stream.
	    SourceStream ss[] = pds.getStreams();
	    if (ss.length >= 1) {
		sourceStream = ss[0];
		((PushSourceStream)sourceStream).setTransferHandler(new PushStreamHandler());
	    }
	}

	// Something's wrong with the DataSource.
	if (sourceStream == null && filename == null) {
	    System.err.println("No source stream");
	    return false;
	}

	// Check the stream type.
	if (!checkStreamType()) {
	    System.err.println("The data is not an MPEG stream.\n"); 
	    return false;
	}

	reloaded = true;

	// Assign the time base.
	timeBase = new MPXTimeBase();

	// Create the graphics components.
	monitor = createMonitor(width, height);

/*
	if (useFastBlt) {
	    // Check if we have a fast blitter
	    useFastBlt = false;
	    try {
		String javaVendor = "Sun";
		try {
		    // Check for 2.6 and (netscape or green), disable xil.
		    JMFSecurity.enablePrivilege.invoke(JMFSecurity.privilegeManager,
						       JMFSecurity.readPropArgs);
		    javaVendor =
			System.getProperty("java.vendor", "Sun").toLowerCase();
		} catch (Exception anyE) {
		}
		int threads = com.sun.media.renderer.video.VideoRenderer.getThreadType();
		int badOS = com.sun.media.renderer.video.VideoRenderer.getBadOS();
		if (!(badOS == 1 &&
		      (javaVendor.indexOf("etscape") > 0 || threads == 1))) {
		    blitterClass = Class.forName("com.sun.media.blitter.xil.Blitter");
		    if (blitterClass != null) {
			Blitter tryBlt = (com.sun.media.Blitter) blitterClass.newInstance();
			useFastBlt = true;
			tryBlt.close();
			tryBlt = null;
		    }
		}
	    } catch (Exception e) {
	    } catch (Error er) {
	    }
	    if (!useFastBlt) {
		try {
		    blitterClass = Class.forName("com.sun.media.blitter.xlib.Blitter");
		    if (blitterClass != null) {
			Blitter tryBlt = (com.sun.media.Blitter) blitterClass.newInstance();
			useFastBlt = true;
			tryBlt.close();
			tryBlt = null;
		    }
		} catch (Exception e) {
		} catch (Error er) {
		}
	    }
	    if (!useFastBlt) {
		try {
		    blitterClass = Class.forName("com.sun.media.blitter.directx.Blitter");
		    if (blitterClass != null) {
			Blitter tryBlt = (com.sun.media.Blitter) blitterClass.newInstance();
			useFastBlt = true;
			tryBlt.close();
			tryBlt = null;
		    }
		} catch (Exception e) {
		} catch (Error er) {
		}
	    }
	    if (!useFastBlt) {
		try {
		    blitterClass = Class.forName("com.sun.media.blitter.gdi.Blitter");
		    if (blitterClass != null) {
			Blitter tryBlt = (com.sun.media.Blitter) blitterClass.newInstance();
			useFastBlt = true;
			tryBlt.close();
			tryBlt = null;
		    }
		} catch (Exception e) {
		} catch (Error er) {
		}
	    }
	}
*/
	/*
	if (Toolkit.getDefaultToolkit().getColorModel() instanceof IndexColorModel) {
	    screenDepth = 8;
	    if (useFastBlt)
		needYUVBuffer = 1;
	} else {
	    screenDepth = 24;
	    needYUVBuffer = 0;
	}
	*/
	//screenDepth = 24;
	startSource(false);

	return true;
    
protected floatdoSetRate(float r)

	if (strmType == MpxStrm_ASEQ)
	   return 1.0f;
	if (r < 0.1f) r = 0.1f;
	else if (r > 10.0f) r = 10.0f;
	fwdSpeed = r;
	return fwdSpeed;
    
protected voiddoStart()


	// Reset EOM threshold flag. This is not thread safe 
	oldLoc = 0f;
	
	if (timeBase instanceof MPXTimeBase)
	    ((MPXTimeBase)timeBase).reset();
	startSource(true);

	if (reloaded) {
	    if (filename != null)
		openFile(filename);
	    else if (sourceStream != null)
		openStream(sourceStream);
	    reloaded = false;
	}

	if (!mediaTimeChanged) {
	    play();
	    return;
	}

	// Can we set the media time ?
	int returnVal = trySetMediaTime(requestedMediaTime);
	if (returnVal == TRYSET_DONE) {
	    play();
	    mediaTimeChanged = false;
	} else if (returnVal == TRYSET_CANT) {
	    super.stop();
	    sendEvent((StopEvent) new SeekFailedEvent(this, Started, Prefetched,
							  getTargetState(),
							  getMediaTime()));
	} else if (returnVal == TRYSET_PASTEOM) {
	    super.stop();
	    sendEvent(new EndOfMediaEvent(this, Started, Prefetched,
					  getTargetState(), getMediaTime()));
	}
    
private synchronized voidexit()

	MpxCmd	cmd = new MpxCmd();
	cmd.packInt(MpxCmd.FLAGS, MCFL_SNDACK | MpxCmd.FLAGS_PAT);
	cmd.packInt(MpxCmd.ID, MCMD_EXIT);
	sendMPX(cmd.getBytes());
	peer = 0;
    
public voidfinalize()

	super.finalize();
	if (isInitialized())
	    close();
    
public synchronized voidflush()

	MpxCmd	cmd = new MpxCmd();
	cmd.packInt(MpxCmd.ID, MCMD_REENTER);
	cmd.packInt(MpxCmd.PARAMS, 0);
	cmd.packInt(MpxCmd.PARAMS+1, 0);
	cmd.packInt(MpxCmd.PARAMS+2, strmType);
	cmd.packInt(MpxCmd.PARAMS+3, 0x2020);
	cmd.packInt(MpxCmd.PARAMS+4, MRE_ASOPEN);
	sendMPX(cmd.getBytes());
    
public intgetContentLength()

	return contentLength;
    
public javax.media.Control[]getControls()


	if (strmType != Jmpx.MpxStrm_ASEQ && frameRateControl == null) {
	    frameRateControl = new FrameRateAdapter(this, 0f, 0f, 30f, false) {
		    
		public float setFrameRate(float rate) {
		    this.value = rate;
		    return this.value;
		}
		    
		public Component getControlComponent() {
		    return null;
		}

		public Object getOwner() {
		    return Jmpx.this;
		}
	    };
	}

	if (strmType != Jmpx.MpxStrm_VSEQ && gainControl == null) {
	    gainControl = new GCA(this);
	}

	if (bitRateControl == null) {
	    bitRateControl = new BitRateAdapter(0, -1, -1, false) {
	    
	        public int setBitRate(int rate) {
		    this.value = rate;
		    return this.value;
		}
		    
		public Component getControlComponent() {
		    return null;
		}
	    };
	}

	int size = 0;

	if (frameRateControl != null) size++;
	if (gainControl != null) size++;
	if (bitRateControl != null) size++;

	controls = new Control[size];

	size = 0;
	if (frameRateControl != null)
	    controls[size++] = frameRateControl;
	if (gainControl != null)
	    controls[size++] = gainControl;
	if (bitRateControl != null)
	    controls[size++] = bitRateControl;

	return controls;
    
public intgetDisplayHeight()

	if (useJavaRenderer == 1)
	    return outHeight;
	else
	    return height * zoom;
    
public intgetDisplayWidth()

	if (useJavaRenderer == 1)
	    return outWidth;
	else
	    return width * zoom;
    
public javax.media.TimegetDuration()
This is again based on a very rough estimation calculated from the current time and the current offset into the media.

	// If we've hit the EOM then we know the duration for sure
	if (eomDuration != Duration.DURATION_UNKNOWN)
	    return eomDuration;

	if (source instanceof Duration) {
	    Time d = ((Duration)source).getDuration();
	    if (d != null && d != Duration.DURATION_UNKNOWN &&
		d != Duration.DURATION_UNBOUNDED)
		return d;
	}
	
	// If it's not a file, we can't estimate the duration
	/*
	if (sourceStream != null) {
	    return knownDuration;
	}
	*/
	
	if (mediaTimeChanged)
	    return knownDuration;
	
	long jtime = getMediaNanoseconds();
	double loc = getLoc();

	// Can't even give an estimation until we've played atleast 3 secs.
	if (loc == 0.0 || jtime < (long) 3E+9)
	    return knownDuration;

	long nanoseconds = (long)(jtime/loc);
	long knownNanoseconds = knownDuration.getNanoseconds();

	
	// If the difference is more than a second
	if (Math.abs(nanoseconds - knownNanoseconds) > 5E+9) {
	    knownDuration = new Time(nanoseconds);
	    sendEvent(new DurationUpdateEvent(this, knownDuration));
	}
	
	return knownDuration;
    
public intgetHeight()

	return height;
    
public java.awt.ComponentgetImage(int w, int h)

	if (width != w || height != h || rgbBuffer == null) {
	    resizeWindow(w, h);
	}
	return monitor;
    
public doublegetLoc()
Get the current location into the media. (0.0 - 1.0)

	return loc;
    
private native intgetMediaDuration(java.lang.String file)

public longgetMediaNanoseconds()
Get the current media time in nanosecond.

	return getMediaTime().getNanoseconds();
    
public intgetStreamType()

	return strmType;
    
public javax.media.TimeBasegetTimeBase()

	super.getTimeBase();
	return timeBase;
    
public java.awt.ComponentgetVisualComponent()

	if (strmType == Jmpx.MpxStrm_ASEQ)
	    return null;
	return monitor;
    
public intgetWidth()

	return width;
    
protected booleaninitAudio()

	return (audio != null);
    
private native booleaninitMPX(java.awt.Component c)

protected booleanisConfigurable()

	return false;
    
public booleanisInitialized()

	return (peer != 0);
    
public booleanisMuted()

	return muted;
    
protected voidlistenCommand()

	// byte	command[] = new byte[512];
	byte	command[] = new byte[128];
	int	len = 0;
	try {
	    if (!killCommandThread)
		len = cmdPipe.read(command);
	    //System.err.println("command len is " + len);
	} catch (IOException e) {
	    return;
	}
	if (len > 0)
	    processCommand(command, len);
    
public synchronized voidmute(boolean m)

	MpxCmd	cmd = new MpxCmd();
	cmd.packInt(MpxCmd.ID, MCMD_STREAM);
	if (m)
	    cmd.packInt(MpxCmd.PARAMS, STRM_IGNOREID | STRM_SBCOFF);
	else
	    cmd.packInt(MpxCmd.PARAMS, 0);
	sendMPX(cmd.getBytes());
	muted = m;
    
public synchronized voidmuteAll(boolean m)

	MpxCmd	cmd = new MpxCmd();
	cmd.packInt(MpxCmd.ID, MCMD_STREAM);
	if (m)
	    cmd.packInt(MpxCmd.PARAMS, STRM_IGNOREID | STRM_SBCOFF);
	else
	    cmd.packInt(MpxCmd.PARAMS, 0);
	broadcastMPX(cmd.getBytes());
    
protected synchronized voidnotifyPushDataArrival()

	notifyAll();
    
private synchronized voidopenFile(java.lang.String path)


	MpxCmd	cmd = new MpxCmd();
	cmd.packInt(MpxCmd.ID, MCMD_OPENSRC);
	cmd.packInt(MpxCmd.PARAMS, 0);
	cmd.packInt(MpxCmd.PARAMS+1, 0);
	cmd.packInt(MpxCmd.PARAMS+2, strmType);
	cmd.packInt(MpxCmd.PARAMS+3, 0);
	cmd.packInt(MpxCmd.PARAMS+4, MRE_FOFS);
	cmd.packInt(MpxCmd.PARAMS+5, MSC_FNAME);
	cmd.packStr(MpxCmd.PARAMS+6, path);
	sendMPX(cmd.getBytes());
    
public synchronized voidopenStream(javax.media.protocol.SourceStream is)


	// Tell mpx to read from the give InputStream.
	setStream(is);

	MpxCmd	cmd = new MpxCmd();
	cmd.packInt(MpxCmd.ID, MCMD_OPENSRC);
	cmd.packInt(MpxCmd.PARAMS, 0);
	cmd.packInt(MpxCmd.PARAMS+1, 0);
	cmd.packInt(MpxCmd.PARAMS+2, strmType);
	cmd.packInt(MpxCmd.PARAMS+3, 0);
	cmd.packInt(MpxCmd.PARAMS+4, MRE_FOFS);
	cmd.packInt(MpxCmd.PARAMS+5, MSC_JAVASTRM);
	sendMPX(cmd.getBytes());
    
public voidpause()

	setAction(PC_PAUSE);
    
public voidplay()

	if (fwdSpeed == 1.0f)
	    setAction(PC_PLAY);
	else
	    setAction(PC_FWDSPEED);
    
protected intprocessCommand(byte[] cmd, int len)

	int	cb[];
	int	i;
	int	flags, type, seq, id;

	// From the byte array convert into integer array.
	// Because byte is signed, simply cb[i] = cmd[i*4]
	// won't do.
	len /= 4;
	cb = new int[len];
	if (BIG_ENDIAN) {
	    for (i = 0; i < len; i++) {
		cb[i] = 0;
		cb[i] |= cmd[i*4] & 0x7f;
		cb[i] |= cmd[i*4] & 0x80;
		cb[i] <<= 8;
		cb[i] |= cmd[i*4+1] & 0x7f;
		cb[i] |= cmd[i*4+1] & 0x80;
		cb[i] <<= 8;
		cb[i] |= cmd[i*4+2] & 0x7f;
		cb[i] |= cmd[i*4+2] & 0x80;
		cb[i] <<= 8;
		cb[i] |= cmd[i*4+3] & 0x7f;
		cb[i] |= cmd[i*4+3] & 0x80;
	    }
	} else {
	    for (i = 0; i < len; i++) {
		cb[i] = 0;
		cb[i] |= cmd[i*4+3] & 0x7f;
		cb[i] |= cmd[i*4+3] & 0x80;
		cb[i] <<= 8;
		cb[i] |= cmd[i*4+2] & 0x7f;
		cb[i] |= cmd[i*4+2] & 0x80;
		cb[i] <<= 8;
		cb[i] |= cmd[i*4+1] & 0x7f;
		cb[i] |= cmd[i*4+1] & 0x80;
		cb[i] <<= 8;
		cb[i] |= cmd[i*4+0] & 0x7f;
		cb[i] |= cmd[i*4+0] & 0x80;
	    }
	}

	// Check for sync pattern
	for (i = 0; i < 4; i++) {
	    if (cb[i] != MpxCmd.SyncPat[i])
		return 0;
	}

	// Check for version #
	if (cb[MpxCmd.VERSION] != MpxCmd.Version)
	    return 0;

	// Parse command flags
	if ((cb[MpxCmd.FLAGS] >>> 16) != 0xcccc)
	    return 0;
	else
	    flags = cb[MpxCmd.FLAGS] & 0xffff;

	// Ignore if not from mpx.
	if ((flags & MCFL_ORGMPX) == 0)
	    return 0;

	if ((cb[MpxCmd.TYPE] >>> 16) != 0xdddd)
	    return 0;
	else
	    type = cb[MpxCmd.TYPE] & 0xffff;

	if (type != 1 && type != 2)
	    return 0;

	// Check for command size.
	if ((type == 1 && len != 32) || (type == 2 && len != 128))
	    return 0;

	seq = cb[MpxCmd.SEQUENCE];
	id = cb[MpxCmd.ID];

	switch (id & 0xff) {
	case /*MCMD_QSIZE*/30:
	    width = cb[MpxCmd.PARAMS];
	    height = cb[MpxCmd.PARAMS+1];
	    if (useJavaRenderer != 1)
		zoom = cb[MpxCmd.PARAMS+2];
	    cmap = cb[MpxCmd.PARAMS+3];

	    updateSize(width, height);
	    sendEvent(new SizeChangeEvent(this, width, height, 1.0f));

	    break;

	case /*MCMD_QSTATS*/31:
	    long now = System.currentTimeMillis();
	    float delta = (float)(now - lastStatTime)/1000f;
	    long frameCount;

	    lastStatTime = now;
	    size = cb[MpxCmd.PARAMS];
	    loc = (float)(cb[MpxCmd.PARAMS+1]/MULF);
	    time = cb[MpxCmd.PARAMS+2];
	    frameCount = (cb[MpxCmd.PARAMS+3] + cb[MpxCmd.PARAMS+4] +
					cb[MpxCmd.PARAMS+5]);
	    fps = (cb[MpxCmd.PARAMS+3] + cb[MpxCmd.PARAMS+4] +
					cb[MpxCmd.PARAMS+5])/delta;
	    fps = ((int)(fps * 10f))/10f;
	    kbps = ((cb[MpxCmd.PARAMS+6] * 8)/delta) / 1000;
	    kbps = ((int)(kbps * 10))/10f;
	    frames += cb[MpxCmd.PARAMS+3] + cb[MpxCmd.PARAMS+4] +
					cb[MpxCmd.PARAMS+5];
	    //update progress controls
	    if (frameRateControl != null)
		frameRateControl.setFrameRate(fps);
	    if (bitRateControl != null)
		bitRateControl.setBitRate((int)(kbps * 1000));

	    if (loc >= 1.0 && (frames != 0 || strmType == MpxStrm_ASEQ)) {
		pause();
		super.stop();
		startSource(false);
		if (oldLoc < 1.0) {
		    eomDuration = getMediaTime();
		    knownDuration = eomDuration;
		    sendEvent(new EndOfMediaEvent(this, Started, Prefetched,
						  getTargetState(), getMediaTime()));
		    sendEvent( new DurationUpdateEvent(this, eomDuration) );
		}
	    }
	    oldLoc = loc;
	    getDuration();
	    break;

	case /*MCMD_ACK*/10:
	    if (listeners.size() > 0) {
		JmpxAckNotifyEvent ack = new JmpxAckNotifyEvent();
		ack.sequence = seq;
		updateListeners(ack);
	    }
	    break;

	default:
	}

	return id;
    
protected intreadFromPushStream(byte[] buf, int offset, int nbytes)

	PushSourceStream pss = (PushSourceStream)sourceStream;
	boolean starved = false, paused = false;
	int n;
	long now;

	synchronized (this) {

	    while ((n = pss.read(buf, offset, nbytes)) == 0) {
		// Check to see if there's data available.

		if (!starved) {
		    // Try to wait for a second to see if data will arrive.
		    try {
			wait(1000);
		    } catch (InterruptedException e) {}

		    starved = true;

		} else {
		    // We have already waited for one sec and no data.
		    // We'll need to pause the player and throw the 
		    // restarting event.
		    super.stop();
		    pause();
		    sendEvent(new RestartingEvent(this, Started, 
				Prefetching, Started, getMediaTime()));
		    paused = true;

		    try {
			wait();
		    } catch (InterruptedException e) {}
		}
	    }
	}

	if (paused) {
	    // restart the player if the player had been paused.
	    sendEvent(new StartEvent(this, Prefetched, Started,
			     Started, getMediaTime(),
			     getTimeBase().getTime()));
	}

	// -1 signifies the end of stream.
	if (n == -1)
	    return 0;
	return n;
    
protected intreadFromStream(int offset, int nbytes)

	if (tempArray == null)
	    tempArray = new byte[65536];
	return readFromStream(tempArray, offset, nbytes);
    
protected intreadFromStream(byte[] buf, int offset, int nbytes)

	if (sourceStream == null) {
	    sendEvent(new ConnectionErrorEvent(this, "No source stream specified!")); 
	    return -1;
	}

	try {
	    if (sourceStream instanceof PullSourceStream)
		return ((PullSourceStream)sourceStream).read(buf, offset, nbytes);

	    else if (sourceStream instanceof PushSourceStream)
		return readFromPushStream(buf, offset, nbytes);

	} catch (IOException e) {
	    // The stream is failing.  Throw an error event.
	    sendEvent(new ConnectionErrorEvent(this, e.getMessage())); 
	}

	return -1;
    
public voidremoveJmpxListener(JmpxListener cl)

	listeners.removeElement(cl);
    
public synchronized voidresizeWindow(int w, int h)

	if (monitor == null) return;
	width = w;
	height = h;
	if (videoformat instanceof RGBFormat) {
	    rgbBuffer = new int[width * height];
	    videoformat = new RGBFormat(new Dimension(width, height),
					width * height,
					Format.intArray,
					30f, 32,
					rMask, gMask, bMask,
					1, width,
					RGBFormat.FALSE,
					RGBFormat.BIG_ENDIAN);

	    outWidth = width;
	    outHeight = height;
	    monitor.setSize(outWidth, outHeight);
	} else {
	    // have to redefine the YUV format
	    videoformat = new YUVFormat(new Dimension(width, height),
				    width * height * 2,
				    Format.byteArray, Format.NOT_SPECIFIED,
				    YUVFormat.YUV_422,
				    width, width / 2, 0, width * height,
				    width * height + width * height / 4);
	    rgbBuffer = new byte[width * height * 2];
	    monitor.setSize(outWidth, outHeight);
	    //System.err.println("Jmpx resized: " + outWidth + "x" + outHeight);
	}
/*
	width = w;
	height = h;
	if (!outputSizeSet) {
	    outWidth = w;
	    outHeight = h;
	}
	monitor.setSize(outWidth, outHeight);
	if (useFastBlt) {
	    if (screenDepth == 8) {
		YUVFormat yuvFormat = new YUVFormat(width, height, width * height * 2, 1,
						    YUVFormat.YUV411);
		yuvFrame = new YUVData(yuvFormat);
		rgbBuffer = (byte []) yuvFrame.getBuffer();
		IndexColorFormat icf = new IndexColorFormat(width, height, width * height, 1,
							    0, null);
		pseudoFrame = new IndexColorData(icf);
		
		if (ditherer == null) {
		    ditherer = new YUVToPseudo();
		    ditherer.initialize( null , monitor);
		}
	    } else {
		rgbFormat = new RGBFormat(width, height, width * height, 4,
					  0x000000FF,
					  0x0000FF00,
					  0x00FF0000,
					  4);
		rgbData = new RGBData(rgbFormat);
		rgbBuffer = (int []) rgbData.getBuffer();
	    }
	} else {
	    rgbBuffer = new int[w * h];
	
	    
	    DirectColorModel cm = new DirectColorModel(24, 0x000000FF,
						       0x0000FF00,
						       0x00FF0000);
	    sourceImage = new MemoryImageSource(w, h, cm, (int [])rgbBuffer, 0, w);
	    sourceImage.setAnimated(true);
	    sourceImage.setFullBufferUpdates(true);
	    displayImage = (monitor).createImage(sourceImage);
	    monitor.prepareImage(displayImage, monitor);    
	}
	
	
	updateSizeChanged(width, height);
	String audio = "";
	if (strmType == MpxStrm_11172)  {
	    audio = JMFI18N.getResource("jmpx.MPEG1-Audio");
	}
	if (strmType == MpxStrm_VSEQ) {
	    audio = JMFI18N.getResource("mediaplayer.N/A");
	}
*/
    
public voidrun()

	while (!killCommandThread) {
	    listenCommand();
	}
    
public synchronized voidseek(double loc)
Seek to an offset into the media. loc is given in 0.0 to 1.0.

	MpxCmd	cmd = new MpxCmd();
	cmd.packInt(MpxCmd.ID, MCMD_REENTER);
	cmd.packInt(MpxCmd.PARAMS, 0);
	cmd.packInt(MpxCmd.PARAMS+1, (int)(loc * MULF));
	cmd.packInt(MpxCmd.PARAMS+2, strmType);
	cmd.packInt(MpxCmd.PARAMS+3, 0);
	cmd.packInt(MpxCmd.PARAMS+4, MRE_FOFS);
	sendMPX(cmd.getBytes());
    
public synchronized voidsendAck()

	MpxCmd	cmd = new MpxCmd();
	cmd.packInt(MpxCmd.FLAGS, MCFL_SNDACK | MpxCmd.FLAGS_PAT);
	cmd.packInt(MpxCmd.ID, MCMD_ACK);
	cmd.packInt(MpxCmd.SEQUENCE, ackSeq);
	sendMPX(cmd.getBytes());
    
private native voidsendMPX(byte[] command)

public synchronized voidsetAction(int act)
Set the action to one of the following types: PC_PLAY, PC_FWDSPEED, PC_FWDSTEP, PC_PAUSE

	MpxCmd	cmd = new MpxCmd();
	cmd.packInt(MpxCmd.ID, MCMD_PLAYCTR);
	cmd.packInt(MpxCmd.PARAMS, act);
	cmd.packInt(MpxCmd.PARAMS+1, (int)(fwdSpeed * MULF));
	sendMPX(cmd.getBytes());
    
protected booleansetAudio(int rate, int channels)


	boolean isMuted = false;
	float gain = -1f;

	if (audio != null) {
	    // Return if there's no change in format.  Otherwise, 
	    // re-initialize the audio.
	    if (audioRate == rate && audioChannels == channels) {
		audio.flush();
		audio.resetSamples();
		return true;
	    }
	    isMuted = audio.getMute();
	    gain = (float)audio.getGain();
	    audio.pause();
	    audio.flush();
	    audio.dispose();
	}

	/*
	System.err.println("Jmpx.setAudio(): " + "\n" +
		"    rate = " + rate + "\n" +
		"    channels = " + channels);
	*/

	AudioFormat format = new AudioFormat(
			AudioFormat.LINEAR,
			rate, 
			16,
			channels,
			BIG_ENDIAN ? AudioFormat.BIG_ENDIAN : AudioFormat.LITTLE_ENDIAN,
			AudioFormat.SIGNED);
	audioRate = rate;
	audioChannels = channels;

        audio = new JmpxAudio();

	if (!audio.initialize(format)) {
	    audio = null;
	    return false;
	}

	audio.resume();

	if (timeBase instanceof MPXTimeBase)
	    ((MPXTimeBase)timeBase).reset();

	//--- added by hsy
	if ( gainControl != null ) {
	    audio.setMute(gainControl.getMute());
	    if ( gainControl.getDB() != -1)
		audio.setGain(gainControl.getDB());
	} else {
	    if (isMuted)
		audio.setMute(isMuted);
	    if (gain != -1)
		audio.setGain(gain);
	}

	return true;
    
public synchronized voidsetAudioMode(int m)

	MpxCmd	cmd = new MpxCmd();
	cmd.packInt(MpxCmd.ID, MCMD_PRESCTR);
	cmd.packInt(MpxCmd.PARAMS, PCTR_AMD);
	if ((m & MpxAudioStereo) == MpxAudioStereo)
	    cmd.packInt(MpxCmd.PARAMS+2, 070);
	else if ((m & MpxAudioRight) == MpxAudioRight)
	    cmd.packInt(MpxCmd.PARAMS+2, 060);
	else
	    cmd.packInt(MpxCmd.PARAMS+2, 050);
	sendMPX(cmd.getBytes());
    
public synchronized voidsetAudioQuality(int q)

	MpxCmd	cmd = new MpxCmd();
	cmd.packInt(MpxCmd.ID, MCMD_PRESCTR);
	cmd.packInt(MpxCmd.PARAMS, PCTR_AMD);
	if ((q & MpxAudioHiQ) == MpxAudioHiQ)
	    cmd.packInt(MpxCmd.PARAMS+2, 04);
	else if ((q & MpxAudioMedQ) == MpxAudioMedQ)
	    cmd.packInt(MpxCmd.PARAMS+2, 05);
	else
	    cmd.packInt(MpxCmd.PARAMS+2, 06);
	sendMPX(cmd.getBytes());
    
public voidsetGain(float g)

	if (audio != null)
	    audio.setGain(g);
    
public synchronized voidsetGamma(double g)

	MpxCmd	cmd = new MpxCmd();
	cmd.packInt(MpxCmd.ID, MCMD_PRESCTR);
	cmd.packInt(MpxCmd.PARAMS, PCTR_GAM);
	cmd.packInt(MpxCmd.PARAMS+6, (int)(g * MULF));
	sendMPX(cmd.getBytes());
    
public synchronized voidsetInterleave(boolean l)

	MpxCmd	cmd = new MpxCmd();
	int	arg;
	cmd.packInt(MpxCmd.ID, MCMD_PRESCTR);
	cmd.packInt(MpxCmd.PARAMS, PCTR_VMD);
	if (l)
	    arg = VDM_COLB;
	else
	    arg = VDM_COL;
	arg <<= 8;
	if (useJavaRenderer == 1)
	    arg |= 1;
	else
	    arg |= zoom;
	cmd.packInt(MpxCmd.PARAMS+1, arg);
	sendMPX(cmd.getBytes());
    
public voidsetMediaTime(javax.media.Time t)
This is base on a rough calculation. Based on the current time and the offset into the media, we can estimate the new offset given a new media time.


	if ((getState() == Unrealized) || (getState() == Realizing))
	    throw new NotRealizedError("Cannot set media time on an unrealized controller");
	Time currentMediaTime = getMediaTime();
	requestedMediaTime = t;
	mediaTimeChanged = true;
	super.setMediaTime(t);
    
public voidsetMute(boolean m)

	if (audio != null)
	    audio.setMute(m);
    
public voidsetSource(javax.media.protocol.DataSource source)

	if (!(source instanceof javax.media.protocol.PullDataSource) && 
	    !(source instanceof javax.media.protocol.PushDataSource)) {
	    System.out.println("Unsupported data source: " + source);
	    throw new IncompatibleSourceException(this + ": doesn't support " + source);
	} else
	    this.source = source;
    
private native voidsetStream(javax.media.protocol.SourceStream m)

public voidsetTimeBase(javax.media.TimeBase tb)

	super.setTimeBase(tb);

	// We can't support any other time base except the ones
	// are based on JavaSound rendering.
	if (tb != null && tb != timeBase) {
	    /**
	     Allow this to silently go through so addController will be
	     slightly more useful.
	     --ivg
	    throw new IncompatibleTimeBaseException("the mpeg handler cannot handle the given timebase.");
	     */
	}
	timeBase = tb;
    
public booleanstartSource(boolean on)

	if (sourceIsOn == on)
	    return true;
	try {
	    if (on)
		source.start();
	    else
		source.stop();
	} catch (Exception ge) {
	    System.err.println("Couldn't stop the data source");
	    return false;
	}
	sourceIsOn = on;
	return true;
    
public voidstop()

	super.stop();
	pause();
	startSource(false);
	if (timeBase instanceof MPXTimeBase)
	    ((MPXTimeBase)timeBase).reset();
	sendEvent((StopEvent)new StopByRequestEvent(this, Started,
						    Prefetched,
						    getTargetState(),
						    getMediaTime()));
    
protected inttrySetMediaTime(javax.media.Time t)


	Time duration = getDuration();
	double secGoto = t.getSeconds();
	double secDuration = duration.getSeconds();

	// Do we know the duration?
	if (duration != DURATION_UNKNOWN) {
	    // Are we seeking outside the media's duration?
	    if (secGoto > secDuration)
		return TRYSET_PASTEOM;
	} else if (secGoto != 0)
	    return TRYSET_CANT;
	
	// Streaming are handled differently.
	if (sourceStream != null) {
	    if (sourceStream instanceof Seekable) {
		Seekable seeker = (Seekable) sourceStream;
		if (secGoto == 0.0) {
		    seeker.seek(0); // seek into the stream
		    seek(0);
		    return TRYSET_DONE;
		} else {
		    if (seeker.isRandomAccess() && getContentLength() > 0) {
			double newLoc = secGoto / secDuration;
			long   seekLoc = (long)(newLoc * getContentLength());

			seeker.seek(seekLoc);
			seek(newLoc);
			return TRYSET_DONE;
		    } else
			return TRYSET_CANT;
		}
	    } else
		return TRYSET_DONE;			      // Not seekable. just ignore the seek. ???
	} else {				      // File seek
	    
	    // Handles the obvious case.
	    if (secGoto == 0.0) {
		seek(0);
		return TRYSET_DONE;
	    }
	    
	    double newloc = secGoto / secDuration;
	    // System.err.println("Newloc = " + newloc);
	    // Sanity check
	    if (newloc > 1.0) newloc = 1.0;
	    
	    seek(newloc);
	    return TRYSET_DONE;
	}
    
public voidupdateListeners(JmpxEvent evt)

	for (int i = 0; i < listeners.size(); i++) {
	    ((JmpxListener)listeners.elementAt(i)).jmpxUpdate(evt);
	}
    
private voidupdateSize(int width, int height)


    
public voidupdateSizeChanged(int width, int height)

	sendEvent(new SizeChangeEvent(this, width, height, 1.0f));
    
public synchronized voidzoom(int z)

	if (useJavaRenderer != 1) {
	    MpxCmd	cmd = new MpxCmd();
	    int	arg;
	    cmd.packInt(MpxCmd.ID, MCMD_PRESCTR);
	    cmd.packInt(MpxCmd.PARAMS, PCTR_VMD);
	    if (interleaved)
		arg = VDM_COLB;
	    else
		arg = VDM_COL;
	    arg <<= 8;
	    arg |= z;
	    cmd.packInt(MpxCmd.PARAMS+1, arg);
	    sendMPX(cmd.getBytes());
	} else {
	    java_zoom = (float) z;
	}
    
public synchronized voidzoomIn()

	if (useJavaRenderer == 1) {
	    if (java_zoom >= (float) 2.0)
		return;
	    java_zoom = java_zoom * 2;
	    updateSizeChanged(width, height);
	} else {
	    zoom(zoom + 1);
	}
    
public synchronized voidzoomOut()

	if (useJavaRenderer == 1) {
	    if (java_zoom <= (float) 0.25)
		return;
	    java_zoom = java_zoom / (float) 2.0;
	    updateSizeChanged(width, height);
	} else {
	    if (zoom <= 1)
		return;
	    zoom(zoom - 1);
	}