Methods Summary |
---|
public void | close()Closes the plug-in component and releases resources. No more data
will be accepted by the plug-in after a call to this method. The
plug-in can be reinstated after being closed by calling
open .
// stop() and disconnect() datasource and set it to null
if (source != null){
try{
source.stop();
source.disconnect();
}catch (IOException e){
}
source = null;
}
for (int i = 0; i < mc.length; i++) {
if (mc[i] != null)
mc[i].close();
}
|
public javax.media.protocol.DataSource | getDataOutput()Get the output DataSource from this multiplexer.
The DataSource returned can be a push or pull datasource. i.e. a
Push[Pull]DataSource or
Push[Pull]BufferDataSource .
The datasource must be returned in the connected state.
return source;
|
public long | getMediaNanoseconds()
return clock.getMediaNanoseconds();
|
public javax.media.Time | getMediaTime()
return clock.getMediaTime();
|
public java.lang.String | getName()Returns a descriptive name for the plug-in.
This is a user readable string.
return "Raw Buffer Multiplexer";
|
public float | getRate()
return clock.getRate();
|
public javax.media.Time | getStopTime()
return clock.getStopTime();
|
public javax.media.Format[] | getSupportedInputFormats()
return new Format[] { new AudioFormat(null),
new VideoFormat(null) };
|
public javax.media.protocol.ContentDescriptor[] | getSupportedOutputContentDescriptors(javax.media.Format[] fmt)Lists the possible output contentdescriptors of the processed data.
If input is non-null, then it lists the possible
output contentdescriptors given that the input data are of the
formats specified by inputs . If
inputs is null, then it lists
all possible output content descriptors that this plug-in advertises.
// we support a raw format, so we dont really need to check the input
// formats here as we are just going to pass the stream on
// without looking at the format.
return supported;
|
public javax.media.Time | getSyncTime()
return clock.getSyncTime();
|
public javax.media.TimeBase | getTimeBase()
return clock.getTimeBase();
|
public boolean | initializeTracks(javax.media.Format[] trackFormats)Initialize the tracks in the multiplexer with formats given in an
array of track formats.
The indexes used in the format array are used subsequently
as keys to identify each individual track in the
process method.
This methods should be called only once. A java.lang.Error is
thrown if it's called more than once.
if( source.getStreams() != null)
throw new java.lang.Error("initializeTracks has been called previously. ");
source.initialize(trackFormats);
streams = (RawBufferSourceStream[])source.getStreams();
// we support any input format, so always return true
return true;
|
public javax.media.Time | mapToTimeBase(javax.media.Time t)
return clock.mapToTimeBase(t);
|
public void | open()Opens the plug-in software or hardware component and acquires
necessary resources. If all the needed resources could not be
acquired, it throws a ResourceUnavailableException. Data should not
be passed into the plug-in without first calling this method.
// the datasource must be created in
// setContentDescriptor & streams created in
// initializeTracks(). Make sure the source and streams were
// created and connect the source.
initializeTracks(trackFormats);
if ((source == null) || (source.getStreams() == null))
throw new ResourceUnavailableException("DataSource and SourceStreams were not created succesfully.");
try{
source.connect();
}catch (IOException e){
throw new ResourceUnavailableException(e.getMessage());
}
int len = 0;
int i;
mediaTime = new long[trackFormats.length];
mc = new MonitorAdapter[trackFormats.length];
for (i = 0; i < trackFormats.length; i++) {
mediaTime[i] = 0;
if (trackFormats[i] instanceof VideoFormat ||
trackFormats[i] instanceof AudioFormat) {
mc[i] = new MonitorAdapter(trackFormats[i], this);
if (mc[i] != null)
len++;
}
}
int j = 0;
controls = new Control[len];
for (i = 0; i < mc.length; i++) {
if (mc[i] != null)
controls[j++] = mc[i];
}
|
public int | process(javax.media.Buffer buffer, int trackID)Process the buffer and multiplex it with data from other
tracks. The multiplexed output is sent to the output
DataSource .
// If the processor starts out having RTP times, before the
// data comes out of this processor, we should reset the
// RTP flag and sets it to RELATIVE time. Otherwise, the
// next guy in the processing chain may compute the time
// incorrectly.
if ((buffer.getFlags() & Buffer.FLAG_RTP_TIME) != 0) {
buffer.setFlags((buffer.getFlags() & ~Buffer.FLAG_RTP_TIME) |
Buffer.FLAG_RELATIVE_TIME);
}
// If the monitor is enabled, we'll send the data to the monitor.
if (mc[trackID] != null && mc[trackID].isEnabled())
mc[trackID].process(buffer);
if ((streams == null) || (buffer == null) || (trackID >=
streams.length)){
return PlugIn.BUFFER_PROCESSED_FAILED;
}
updateTime(buffer, trackID);
return streams[trackID].process(buffer);
|
public void | reset()Resets the state of the plug-in. Typically at end of media or when media
is repositioned.
for (int i = 0; i < streams.length; i++) {
streams[i].reset();
if (mc[i] != null)
mc[i].reset();
}
|
public javax.media.protocol.ContentDescriptor | setContentDescriptor(javax.media.protocol.ContentDescriptor outputContentDescriptor)Set the output content-type.
// we support changes in contentdescriptor after it has
// been set, so no need to check to see if its set and no
// need to return FormatChangeException
if (matches(outputContentDescriptor, supported) == null)
return null;
// create the datasource and set its output
// contentdescriptor
contentDesc = outputContentDescriptor;
source = new RawBufferDataSource();
return contentDesc;
|
public javax.media.Format | setInputFormat(javax.media.Format input, int trackID)
if (trackID < numTracks)
trackFormats[trackID] = input;
for (int i = 0; i < numTracks; i++)
if (trackFormats[i] == null)
return input;
// all formats are set
//initializeTracks(trackFormats);
return input;
|
public void | setMediaTime(javax.media.Time now)
synchronized (timeSetSync) {
clock.setMediaTime(now);
for (int i = 0; i < mediaTime.length; i++)
mediaTime[i] = now.getNanoseconds();
timeBase.update();
systemStartTime = System.currentTimeMillis();
mediaStartTime = now.getNanoseconds() / 1000000;
}
|
public int | setNumTracks(int nTracks)
numTracks = nTracks;
trackFormats = new Format[nTracks];
for (int i = 0; i < nTracks; i++)
trackFormats[i] = null;
return nTracks;
|
public float | setRate(float factor)
if (factor == clock.getRate())
return factor;
return clock.setRate(1.0f);
|
public void | setStopTime(javax.media.Time stopTime)
clock.setStopTime(stopTime);
|
public void | setTimeBase(javax.media.TimeBase master)
if (master != timeBase)
throw new IncompatibleTimeBaseException();
|
public void | stop()
synchronized (timeSetSync){
if (!started) return;
started = false;
clock.stop();
timeBase.mediaStopped();
}
|
public void | syncStart(javax.media.Time at)
synchronized (timeSetSync){
if (started) return;
started = true;
clock.syncStart(at);
timeBase.mediaStarted();
systemStartTime = System.currentTimeMillis();
mediaStartTime = getMediaNanoseconds() / 1000000;
}
|
protected void | updateTime(javax.media.Buffer buf, int trackID)Update the media time per track.
if (buf.getFormat() instanceof AudioFormat) {
if (mpegAudio.matches(buf.getFormat())) {
if (buf.getTimeStamp() < 0) {
if (systemStartTime >= 0)
mediaTime[trackID] = (mediaStartTime +
System.currentTimeMillis() - systemStartTime)
* 1000000;
} else
mediaTime[trackID] = buf.getTimeStamp();
} else {
// If it's audio data and the time stamp is undefined,
// we'll compute from the audio duration.
long t = ((AudioFormat)buf.getFormat()).computeDuration(buf.getLength());
if (t >= 0)
mediaTime[trackID] += t;
else
mediaTime[trackID] = buf.getTimeStamp();
}
} else if (buf.getTimeStamp() < 0) {
// This is video with TIME_UNKNOWN.
if (systemStartTime >= 0)
mediaTime[trackID] = (mediaStartTime +
System.currentTimeMillis() - systemStartTime)
* 1000000;
} else
mediaTime[trackID] = buf.getTimeStamp();
timeBase.update();
|