Methods Summary |
---|
protected boolean | checkInput(javax.media.Buffer buffer)
javax.media.Format format = buffer.getFormat();
// Here comes the real JavaSound processing.
// Initialize the device if it's not already initialized; or if the
// input format changes.
if (device == null || devFormat == null || !devFormat.equals(format)) {
if (!initDevice((javax.media.format.AudioFormat)format)) {
// failed to initialize the device.
buffer.setDiscard(true);
return false;
}
devFormat = (AudioFormat)format;
}
return true;
|
public void | close()
stop();
if (device != null) {
pauseDevice();
device.flush();
mediaTimeAnchor = getMediaNanoseconds();
ticksSinceLastReset = 0;
device.dispose();
}
device = null;
|
public int | computeBufferSize(javax.media.format.AudioFormat f)
long bytesPerSecond = (long)(f.getSampleRate() * f.getChannels() *
f.getSampleSizeInBits() / 8);
long bufSize;
long bufLen; // in millsec.
// System.out.println("bytesPerSecond is " + bytesPerSecond);
if (bufLenReq < DefaultMinBufferSize)
bufLen = DefaultMinBufferSize;
else if (bufLenReq > DefaultMaxBufferSize)
bufLen = DefaultMaxBufferSize;
else
bufLen = bufLenReq;
float r = bufLen/1000f;
bufSize = (long)(bytesPerSecond * r);
//System.out.println("Render buffer size: " + bufSize);
return (int)bufSize;
|
protected abstract com.sun.media.renderer.audio.device.AudioOutput | createDevice(javax.media.format.AudioFormat format)
|
protected int | doProcessData(javax.media.Buffer buffer)
byte data[] = (byte [])buffer.getData();
int remain = buffer.getLength();
int off = buffer.getOffset();
int len = 0;
synchronized (this) {
if (!started) {
// If we are not marked as started, we'll pause the device here.
if (!devicePaused)
pauseDevice();
// This is in the prefetching state
// We'll try to write as much as needed so we won't
// block.
// We are in prefetching cycle now. Turn of resetted.
resetted = false;
int available = device.bufferAvailable();
if (available > remain)
available = remain;
if (available > 0) {
len = device.write(data, off, available);
bytesWritten += len;
}
buffer.setLength(remain - len);
if (buffer.getLength() > 0 || buffer.isEOM()) {
buffer.setOffset(off + len);
prefetched = true;
return INPUT_BUFFER_NOT_CONSUMED;
} else {
return BUFFER_PROCESSED_OK;
}
}
}
// Guard against pausing the device in the middle of a write
// thus blocking the entire thread.
synchronized (writeLock) {
if (devicePaused)
return PlugIn.INPUT_BUFFER_NOT_CONSUMED;
try {
while (remain > 0 && !resetted) {
// device.write is blocking. If the device
// has not been started and the device's
// internal buffer is filled, then it will block.
len = device.write(data, off, remain);
bytesWritten += len;
off += len; remain -= len;
}
} catch (NullPointerException e) {
return BUFFER_PROCESSED_OK;
}
}
buffer.setLength(0);
buffer.setOffset(0);
return BUFFER_PROCESSED_OK;
|
public synchronized void | drain()
if (started && device != null)
device.drain();
prefetched = false;
|
public java.lang.Object[] | getControls()
Control c[] = new Control[] {
gainControl,
bufferControl
};
return c;
|
public long | getLatency()
long ts = bytesWritten * 1000/bytesPerSec * 1000000;
return ts - getMediaNanoseconds();
|
public long | getMediaNanoseconds()
return mediaTimeAnchor +
(device != null ? device.getMediaNanoseconds() : 0) -
ticksSinceLastReset;
|
public javax.media.Time | getMediaTime()
return new Time(getMediaNanoseconds());
|
public float | getRate()
return rate;
|
public javax.media.Time | getStopTime()
return new Time(stopTime);
|
public javax.media.Format[] | getSupportedInputFormats()
return supportedFormats;
|
public javax.media.Time | getSyncTime()
return new Time(0);
|
public javax.media.TimeBase | getTimeBase()
if (master != null)
return master;
else
return timeBase;
|
protected boolean | initDevice(javax.media.format.AudioFormat format)
if (format == null) {
System.err.println("AudioRenderer: ERROR: Unknown AudioFormat");
return false;
}
if (format.getSampleRate() == AudioFormat.NOT_SPECIFIED ||
format.getSampleSizeInBits() == AudioFormat.NOT_SPECIFIED) {
Log.error("Cannot initialize audio renderer with format: " + format);
return false;
}
// Close the old device.
if (device != null) {
device.drain();
pauseDevice();
// Adjust for the media time since the device as well as the
// sample count is re-initialized.
mediaTimeAnchor = getMediaNanoseconds();
ticksSinceLastReset = 0;
device.dispose();
device = null;
}
/*
System.out.println("sampleRate is " + format.getSampleRate());
System.out.println("sampleSize is " + sampleSize);
System.out.println("SamplePerUnit is " + SamplePerUnit);
System.out.println("channels is " + format.getChannels());
System.out.println("encoding is " + format.getEncoding());
System.out.println("bigendian is " + format.isBigEndian());
System.out.println("signed is " + format.isSigned());
*/
// Create AudioPlay based on the format and the current platform.
AudioFormat audioFormat = new
AudioFormat(
format.getEncoding(),
format.getSampleRate(),
format.getSampleSizeInBits(),
format.getChannels(),
format.getEndian(),
format.getSigned()
);
device = createDevice(audioFormat);
if (device == null ||
!device.initialize(audioFormat, computeBufferSize(audioFormat))) {
device = null;
return false;
}
device.setMute(gainControl.getMute());
device.setGain(gainControl.getDB());
if (rate != 1.0f) {
if (rate != device.setRate(rate)) {
System.err.println("The AudioRenderer does not support the given rate: " + rate);
device.setRate(1.0f);
}
}
if ( started )
resumeDevice();
bytesPerSec = (int)(format.getSampleRate() * format.getChannels() *
format.getSampleSizeInBits() / 8);
return true;
|
public boolean | isPrefetched()
return prefetched;
|
public javax.media.Time | mapToTimeBase(javax.media.Time t)
return new Time((long)((t.getNanoseconds() - mediaTimeAnchor)/rate) + startTime);
|
synchronized void | pauseDevice()
if (!devicePaused && device != null) {
device.pause();
devicePaused = true;
}
if (timeBase instanceof AudioTimeBase)
((AudioTimeBase)timeBase).mediaStopped();
|
public int | process(javax.media.Buffer buffer)
int rtn = processData(buffer);
if (buffer.isEOM() && rtn != INPUT_BUFFER_NOT_CONSUMED) {
// EOM.
drain();
pauseDevice();
}
return rtn;
|
protected void | processByWaiting(javax.media.Buffer buffer)
synchronized (this) {
// If not yet started, it's in the prefetching state,
// do not consume the data bits.
if (!started) {
prefetched = true;
return;
}
}
javax.media.format.AudioFormat format =
(javax.media.format.AudioFormat)buffer.getFormat();
int sampleRate = (int)format.getSampleRate();
int sampleSize = format.getSampleSizeInBits();
int channels = format.getChannels();
int timeToWait;
long duration;
duration = buffer.getLength() * 1000 /
((sampleSize/8) * sampleRate * channels);
timeToWait = (int)((float)duration/getRate());
/*
System.err.println("sampleSize = " + sampleSize +
" sampleRate = " + sampleRate +
" channels = " + channels +
" length = " + buffer.getLength() +
" offset = " + buffer.getOffset() +
" timeToWait = " + timeToWait);
*/
try {
Thread.currentThread().sleep(timeToWait);
} catch (Exception e) {}
buffer.setLength(0);
buffer.setOffset(0);
mediaTimeAnchor += duration * 1000000;
|
protected int | processData(javax.media.Buffer buffer)
if (!checkInput(buffer))
return BUFFER_PROCESSED_FAILED;
return doProcessData(buffer);
|
public void | reset()
resetted = true;
// Mark the media time before reset.
mediaTimeAnchor = getMediaNanoseconds();
if (device != null) {
device.flush();
ticksSinceLastReset = device.getMediaNanoseconds();
} else
ticksSinceLastReset = 0;
prefetched = false;
|
synchronized void | resumeDevice()
if (timeBase instanceof AudioTimeBase)
((AudioTimeBase)timeBase).mediaStarted();
if (devicePaused && device != null) {
device.resume();
devicePaused = false;
}
|
public javax.media.Format | setInputFormat(javax.media.Format format)
for (int i = 0; i < supportedFormats.length; i++) {
if (supportedFormats[i].matches(format)) {
inputFormat = (AudioFormat)format;
return format;
}
}
return null;
|
public void | setMediaTime(javax.media.Time now)
mediaTimeAnchor = now.getNanoseconds();
|
public float | setRate(float factor)
if (device != null)
rate = device.setRate(factor);
else
rate = 1.0f;
return rate;
|
public void | setStopTime(javax.media.Time t)
stopTime = t.getNanoseconds();
|
public void | setTimeBase(javax.media.TimeBase master)
if (!(master instanceof AudioTimeBase)) {
Log.warning("AudioRenderer cannot be controlled by time bases other than its own: " + master);
/**
Silently allows the time base to be set to make
addController slightly more useful.
--ivg
throw new IncompatibleTimeBaseException();
*/
}
this.master = master;
|
public void | start()
syncStart(getTimeBase().getTime());
|
public synchronized void | stop()
started = false;
prefetched = false;
// Guard against pausing in the middle of a write.
synchronized (writeLock) {
pauseDevice();
}
|
public synchronized void | syncStart(javax.media.Time at)
// It doesn't really do syncStart right now. It just starts
// it right away.
started = true;
prefetched = true;
resetted = false;
resumeDevice();
startTime = at.getNanoseconds();
|