FileDocCategorySizeDatePackage
AudioRenderer.javaAPI DocJMF 2.1.1e13563Mon May 12 12:20:48 BST 2003com.sun.media.renderer.audio

AudioRenderer

public abstract class AudioRenderer extends BasicPlugIn implements Renderer, Prefetchable, Drainable, Clock
AudioRenderer
version
1.23, 98/12/23

Fields Summary
Format[]
supportedFormats
protected AudioFormat
inputFormat
protected AudioFormat
devFormat
protected AudioOutput
device
protected TimeBase
timeBase
protected boolean
started
protected boolean
prefetched
protected boolean
resetted
protected boolean
devicePaused
protected GainControl
gainControl
protected BufferControl
bufferControl
protected Control
peakVolumeMeter
protected long
bytesWritten
protected int
bytesPerSec
private Object
writeLock
long
mediaTimeAnchor
long
startTime
long
stopTime
long
ticksSinceLastReset
float
rate
TimeBase
master
static int
DefaultMinBufferSize
static int
DefaultMaxBufferSize
long
bufLenReq
Constructors Summary
public AudioRenderer()


      
        timeBase = new AudioTimeBase(this);
	bufferControl = new BC(this);
    
Methods Summary
protected booleancheckInput(javax.media.Buffer buffer)


	javax.media.Format format = buffer.getFormat();

	// Here comes the real JavaSound processing.

	// Initialize the device if it's not already initialized; or if the
	// input format changes.
	if (device == null || devFormat == null || !devFormat.equals(format)) {
	    if (!initDevice((javax.media.format.AudioFormat)format)) {
		// failed to initialize the device.
		buffer.setDiscard(true);
		return false;
	    }
	    devFormat = (AudioFormat)format;
	}

	return true;
    
public voidclose()

	stop();
	if (device != null) {
	    pauseDevice();
	    device.flush();
	    mediaTimeAnchor = getMediaNanoseconds();
	    ticksSinceLastReset = 0;
	    device.dispose();
	}
	device = null;
    
public intcomputeBufferSize(javax.media.format.AudioFormat f)


	long bytesPerSecond = (long)(f.getSampleRate() * f.getChannels() * 
					f.getSampleSizeInBits() / 8);
	long bufSize;
	long bufLen;   // in millsec.

	// System.out.println("bytesPerSecond is " + bytesPerSecond);

	if (bufLenReq < DefaultMinBufferSize)
	    bufLen = DefaultMinBufferSize;
	else if (bufLenReq > DefaultMaxBufferSize)
	    bufLen = DefaultMaxBufferSize;
	else
	    bufLen = bufLenReq;

	float r = bufLen/1000f;

	bufSize = (long)(bytesPerSecond * r);

	//System.out.println("Render buffer size: " + bufSize);

	return (int)bufSize;
    
protected abstract com.sun.media.renderer.audio.device.AudioOutputcreateDevice(javax.media.format.AudioFormat format)

protected intdoProcessData(javax.media.Buffer buffer)


	byte data[] = (byte [])buffer.getData();
	int remain = buffer.getLength();
	int off = buffer.getOffset();
	int len = 0;

	synchronized (this) {
	  if (!started) {

	    // If we are not marked as started, we'll pause the device here.
	    if (!devicePaused)
		pauseDevice();

	    // This is in the prefetching state
	    // We'll try to write as much as needed so we won't
	    // block.

	    // We are in prefetching cycle now.  Turn of resetted.
	    resetted = false;

	    int available = device.bufferAvailable();

	    if (available > remain)
		available = remain;

	    if (available > 0) {
		len = device.write(data, off, available);
		bytesWritten += len;
	    }

	    buffer.setLength(remain - len);
	    if (buffer.getLength() > 0 || buffer.isEOM()) {
		buffer.setOffset(off + len);
		prefetched = true;
		return INPUT_BUFFER_NOT_CONSUMED;
	    } else {
		return BUFFER_PROCESSED_OK;
	    }
	  }
	}

	// Guard against pausing the device in the middle of a write
	// thus blocking the entire thread.
	synchronized (writeLock) {

	    if (devicePaused)
		return PlugIn.INPUT_BUFFER_NOT_CONSUMED;

	    try {
		while (remain > 0 && !resetted) {
		    // device.write is blocking.  If the device
		    // has not been started and the device's
		    // internal buffer is filled, then it will block.
		    len = device.write(data, off, remain);
		    bytesWritten += len;
		    off += len; remain -= len;
		}
	    } catch (NullPointerException e) {
		return BUFFER_PROCESSED_OK;
	    }
	}

	buffer.setLength(0);
	buffer.setOffset(0);

	return BUFFER_PROCESSED_OK;
    
public synchronized voiddrain()

	if (started && device != null)
	    device.drain();
	prefetched = false;
    
public java.lang.Object[]getControls()

	Control c[] = new Control[] { 
				gainControl,
				bufferControl
				};
	return c;
    
public longgetLatency()

	long ts = bytesWritten * 1000/bytesPerSec * 1000000;
	return ts - getMediaNanoseconds();
    
public longgetMediaNanoseconds()

	return mediaTimeAnchor + 
		(device != null ? device.getMediaNanoseconds() : 0) - 
		ticksSinceLastReset;
    
public javax.media.TimegetMediaTime()

	return new Time(getMediaNanoseconds());
    
public floatgetRate()

	return rate;
    
public javax.media.TimegetStopTime()

	return new Time(stopTime);
    
public javax.media.Format[]getSupportedInputFormats()

	return supportedFormats;
    
public javax.media.TimegetSyncTime()

	return new Time(0);
    
public javax.media.TimeBasegetTimeBase()

	if (master != null)
	    return master;
	else
	    return timeBase;
    
protected booleaninitDevice(javax.media.format.AudioFormat format)


	if (format == null) {
	    System.err.println("AudioRenderer: ERROR: Unknown AudioFormat");
	    return false;
	}

	if (format.getSampleRate() == AudioFormat.NOT_SPECIFIED ||
	    format.getSampleSizeInBits() == AudioFormat.NOT_SPECIFIED) {
	    Log.error("Cannot initialize audio renderer with format: " + format);
	    return false;
	}

	// Close the old device.
	if (device != null) {

	    device.drain();
	    pauseDevice();

	    // Adjust for the media time since the device as well as the
	    // sample count is re-initialized.
	    mediaTimeAnchor = getMediaNanoseconds();
	    ticksSinceLastReset = 0;

	    device.dispose();
	    device = null;
	}

	/*
	System.out.println("sampleRate is " + format.getSampleRate());
	System.out.println("sampleSize is " + sampleSize);
	System.out.println("SamplePerUnit is " + SamplePerUnit);
	System.out.println("channels is " + format.getChannels());
	System.out.println("encoding is " + format.getEncoding());
	System.out.println("bigendian is " + format.isBigEndian());
	System.out.println("signed is " + format.isSigned());
	*/

	// Create AudioPlay based on the format and the current platform.
	AudioFormat audioFormat = new
	    AudioFormat(
		   format.getEncoding(),
		   format.getSampleRate(),
		   format.getSampleSizeInBits(),
		   format.getChannels(),
		   format.getEndian(),
		   format.getSigned()
		 );

	device = createDevice(audioFormat);

	if (device == null || 
	    !device.initialize(audioFormat, computeBufferSize(audioFormat))) {
	    device = null;
	    return false;
	}

        device.setMute(gainControl.getMute());
	device.setGain(gainControl.getDB());

	if (rate != 1.0f) {
	    if (rate != device.setRate(rate)) {
		System.err.println("The AudioRenderer does not support the given rate: " + rate);
		device.setRate(1.0f);
	    }
	}

	if ( started )
	    resumeDevice();

	bytesPerSec = (int)(format.getSampleRate() * format.getChannels() * 
			format.getSampleSizeInBits() / 8);

	return true;
    
public booleanisPrefetched()

	return prefetched;
    
public javax.media.TimemapToTimeBase(javax.media.Time t)

	return new Time((long)((t.getNanoseconds() - mediaTimeAnchor)/rate) + startTime);
    
synchronized voidpauseDevice()

	if (!devicePaused && device != null) {
	    device.pause();
	    devicePaused = true;
	}
	if (timeBase instanceof AudioTimeBase)
	    ((AudioTimeBase)timeBase).mediaStopped();
    
public intprocess(javax.media.Buffer buffer)

	int rtn = processData(buffer);
	if (buffer.isEOM() && rtn != INPUT_BUFFER_NOT_CONSUMED) {
	    // EOM.
	    drain();
	    pauseDevice();
	}
	return rtn;
    
protected voidprocessByWaiting(javax.media.Buffer buffer)


	synchronized (this) {
	    // If not yet started, it's in the prefetching state,
	    // do not consume the data bits.
	    if (!started) {
		prefetched = true;
		return;
	    }
	}

	javax.media.format.AudioFormat format =
		(javax.media.format.AudioFormat)buffer.getFormat();

	int sampleRate = (int)format.getSampleRate();
	int sampleSize = format.getSampleSizeInBits();
	int channels = format.getChannels();
	int timeToWait;
	long duration;

	duration = buffer.getLength() * 1000 /
			((sampleSize/8) * sampleRate * channels);
	timeToWait = (int)((float)duration/getRate());
	/*
	System.err.println("sampleSize = " + sampleSize +
		" sampleRate = " + sampleRate +
		" channels = " + channels +
		" length = " + buffer.getLength() +
		" offset = " + buffer.getOffset() +
		" timeToWait = " + timeToWait);
	*/
	try {
	    Thread.currentThread().sleep(timeToWait);
	} catch (Exception e) {}

	buffer.setLength(0);
	buffer.setOffset(0);

	mediaTimeAnchor += duration * 1000000;
    
protected intprocessData(javax.media.Buffer buffer)


	if (!checkInput(buffer))
	    return BUFFER_PROCESSED_FAILED;

	return doProcessData(buffer);
    
public voidreset()

	resetted = true;

	// Mark the media time before reset.
	mediaTimeAnchor = getMediaNanoseconds();

	if (device != null) {
	    device.flush();
	    ticksSinceLastReset = device.getMediaNanoseconds();
	} else
	    ticksSinceLastReset = 0;

	prefetched = false;
    
synchronized voidresumeDevice()

	if (timeBase instanceof AudioTimeBase)
	    ((AudioTimeBase)timeBase).mediaStarted();
	if (devicePaused && device != null) {
	    device.resume();
	    devicePaused = false;
	}
    
public javax.media.FormatsetInputFormat(javax.media.Format format)

	for (int i = 0; i < supportedFormats.length; i++) {
	    if (supportedFormats[i].matches(format)) {
		inputFormat = (AudioFormat)format;
		return format;
	    }
	}
	return null;
    
public voidsetMediaTime(javax.media.Time now)

	mediaTimeAnchor = now.getNanoseconds();
    
public floatsetRate(float factor)

	if (device != null)
	    rate = device.setRate(factor);
	else
	    rate = 1.0f;
	return rate;
    
public voidsetStopTime(javax.media.Time t)

	stopTime = t.getNanoseconds();
    
public voidsetTimeBase(javax.media.TimeBase master)

  


          
	if (!(master instanceof AudioTimeBase)) {
	    Log.warning("AudioRenderer cannot be controlled by time bases other than its own: " + master);
	   /**
	    Silently allows the time base to be set to make
	    addController slightly more useful.
	    --ivg
	    throw new IncompatibleTimeBaseException();
	    */
	}

	this.master = master;
    
public voidstart()

	syncStart(getTimeBase().getTime());
    
public synchronized voidstop()

	started = false;
	prefetched = false;

	// Guard against pausing in the middle of a write.
	synchronized (writeLock) {
	    pauseDevice();
	}
    
public synchronized voidsyncStart(javax.media.Time at)

	// It doesn't really do syncStart right now.  It just starts
	// it right away.
	started = true;
	prefetched = true;
	resetted = false;
	resumeDevice();
	startTime = at.getNanoseconds();