Methods Summary |
---|
private native boolean | avClose(int peer, boolean freePeer)
|
private native double | avGetAudioBitRate(int peer)
|
private native int | avGetAudioChannelNumber(int peer)
|
private native double | avGetAudioSampleRate(int peer)
|
private native int | avGetAudioSampleSize(int peer)
|
private native double | avGetAudioSampleTimestamp(int peer)
|
private native double | avGetBitRate(int peer)
|
private native double | avGetDurationSec(int peer)
|
private native double | avGetFirstAudioTime(int peer)
|
private native double | avGetFirstVideoTime(int peer)
|
private native float | avGetFrameRate(int peer)
|
private native int | avGetHeight(int peer)
|
private native double | avGetNextFrameTime(int peer)
|
private native int | avGetWidth(int peer)
|
private native void | avInit(java.lang.String filename)
|
private native boolean | avIsAudioPresent(int peer)
|
public static native boolean | avIsBigEndian()
|
private native boolean | avIsSeekable(int peer)
|
private native boolean | avIsVideoPresent(int peer)
|
private native boolean | avOpen(int peer)
|
private native boolean | avProcess(int peer, java.lang.Object outData, long outDataBytes, long length, boolean useNativeBuffers, int frameFormat)
|
private native boolean | avProcessAudio(int peer, java.lang.Object outData, long outDataBytes, long length)
|
private native double | avSetPosition(int peer, double position)
|
public void | close()
System.out.println("Closing "+Thread.currentThread());
synchronized(peerSync) {avClose(peer, true);}
|
public int | getAudioSampleNumber()
return avGetAudioSampleSize(peer);
|
public double | getAudioSampleTimestamp()
return avGetAudioSampleTimestamp(peer);
|
public java.lang.Object | getControl(java.lang.String s)
return null;
|
public java.lang.Object[] | getControls()
return null;
|
public javax.media.Time | getDuration()
return duration;
|
public javax.media.Time | getMediaTime()
//System.out.println("PARSER getMediaTime");
return new Time(avGetNextFrameTime(peer));
|
public java.lang.String | getName()
return "FOBS PARSER";
|
public synchronized boolean | getNextAudioFrame(java.lang.Object outData, long outDataBytes, long length)
return avProcessAudio(peer, outData, outDataBytes, length);
|
public synchronized boolean | getNextFrame(java.lang.Object outData, long outDataBytes, long length)
return avProcess(peer, outData, outDataBytes, length, FobsConfiguration.useNativeBuffers, FobsConfiguration.videoFrameFormat);
|
public javax.media.protocol.ContentDescriptor[] | getSupportedInputContentDescriptors()
return inputContent;
|
public double | getTimestamp()
return avGetNextFrameTime(peer);
|
public javax.media.Track[] | getTracks()
return tracks;
|
public static boolean | isBigEndian()
try {
//System.loadLibrary("fobs4jmf");
NativeLibraryFinder.loadLibrary(Parser.class, "fobs4jmf");
System.out.println("Fobs4JMF - Native shared library found");
}
catch (UnsatisfiedLinkError e) {
System.out.println("Fobs4JMF - Native shared library NOT found");
e.printStackTrace();
throw new ExceptionInInitializerError(e.getMessage());
}
return avIsBigEndian();
|
public boolean | isPositionable()
return positionable;
|
public boolean | isRandomAccess()
return randomAccess;
|
public void | open()
System.out.println("Opening "+Thread.currentThread());
synchronized(peerSync) {avOpen(peer);}
|
public void | reset()
System.out.println("Resetting "+Thread.currentThread());
synchronized(peerSync) {avClose(peer, false);}
open();
|
public javax.media.Time | setPosition(javax.media.Time where, int rounding)
System.out.println("Setting position "+Thread.currentThread());
double newTime = 0.0;
synchronized(peerSync) {newTime = avSetPosition(peer, where.getSeconds());}
return new Time(newTime);
|
public void | setSource(javax.media.protocol.DataSource source)
// is it our DataSource?
//if (! (source instanceof com.omnividea.media.protocol.file.DataSource))
//System.out.println("Coming a " + source.getContentType() + " file.");
if(!source.getContentType().equals("video.ffmpeg"))
{
IncompatibleSourceException exp = new IncompatibleSourceException("Invalid DataSource");
exp.printStackTrace();
throw exp;
}
else {
dataSource = (com.omnividea.media.protocol.DataSource) source;
//System.out.println("\tPARSER URL: "+dataSource.getUrlName());
if (dataSource.getUrlName() == null) {
throw new IncompatibleSourceException("Invalid Datasource");
}
} // else is our DS
//Call ffmpeg for data
avInit(dataSource.getUrlName());
synchronized(peerSync) {
if(avOpen(peer) == false)
throw new IncompatibleSourceException("Fobs cannot read such url");
duration = new Time(avGetDurationSec(peer));
trackNumber = 0;
isVideoPresent = avIsVideoPresent(peer);
if(isVideoPresent)
{
trackNumber++;
videoWidth = avGetWidth(peer);
videoHeight = avGetHeight(peer);
videoBitRate = avGetBitRate(peer);
videoFrameRate = avGetFrameRate(peer);
}
isAudioPresent = avIsAudioPresent(peer);
if(isAudioPresent)
{
trackNumber++;
audioBitRate = avGetAudioBitRate(peer);
audioSampleRate = avGetAudioSampleRate(peer);
audioChannelNumber = avGetAudioChannelNumber(peer);
}
positionable = true;
randomAccess = avIsSeekable(peer);
//isAudioPresent = false;
tracks = new javax.media.Track[trackNumber];
int trackIndex = 0;
if(isVideoPresent)
{
Time firstVideoTime = new Time(avGetFirstVideoTime(peer));
tracks[trackIndex++] = new VideoTrack(videoWidth, videoHeight, videoFrameRate, duration, firstVideoTime, this);
}
if(isAudioPresent)
{
Time firstAudioTime = new Time(avGetFirstAudioTime(peer));
tracks[trackIndex++] = new AudioTrack(audioSampleRate, audioChannelNumber,
videoFrameRate, duration, firstAudioTime, this);
}
}//synchronized
|
public void | start()
|
public void | stop()
|