OPISourceStreampublic class OPISourceStream extends BasicSourceStream implements PushBufferStream, OwnedSourceStream for the DataSource
The SourceStream can be accessed with the URL sunvideoplus:
The URL has been extended to allow selecting some of the options:
sunvideoplus://card/port/compression/size/signal where
card = the sunvideoplus card to use (default 0) when multiple
cards are installed.
port = port to use (default 1), s-vhs, 1, or 2.
compression = h261, h263, rgb, yuv or jpeg (default h261).
size = fcif (full), cif, or qcif (default cif). Actual frame size
depends on whether the camera is NTSC or PAL.
signal = source signal (default NTSC), NTSC or PAL |
Fields Summary |
---|
private DataSource | dataSource | private MediaLocator | locator | int | maxDataSize | BufferTransferHandler | transferHandler | private byte[] | data | private int | length | private long | nextSequence | long | timeStamp | OPICapture | opiCap | private VideoFormat | capFormat | private Format[] | supported | SystemTimeBase | systemTimeBase | private static Format[] | supportedNTSC | private static Format[] | supportedPAL | private static Format[] | supportedCommon | private static final boolean | CONTROL_PANEL_HACK | private Frame | controlFrame | private static Integer | OPILock | Integer | readLock | private boolean | started | private boolean | connected | private boolean | connectedOK | private boolean | inUse | private int | cardNo | private static String[] | VALID_PORTS | private static final int | DEFAULT_PORT | private int | portNo | private static String[] | VALID_COMPRESS | private static String[] | VIDEO_COMPRESS | private static final int | DEFAULT_COMPRESS | private static final int | H261_COMPRESS | private static final int | H263_COMPRESS | private static final int | RGB_COMPRESS | private static final int | YUV_COMPRESS | private int | compressNo | private static String[] | VALID_SIGNAL | private static final int | DEFAULT_SIGNAL | private static final int | PAL_SIGNAL | private int | signalNo | private static String[] | VALID_SIZES | private static float[] | VALID_SIZES_FLOAT | private static int[] | VALID_SCALE | private static final int | DEFAULT_SIZE | private static float | SIZE_GRANULARITY | private int | sizeNo | private static final int | DEFAULT_RATE | private int | rateNo | private static final int | DEFAULT_QUALITY | private int | qualityNo | private static final int | LO_BITRATE | private static final int | HI_BITRATE | private static final int | DEFAULT_BITRATE | private int | bitrateNo | private static final int | H263_LO_BITRATE | private static final int | H263_HI_BITRATE | private static final int | H263_DEFAULT_BITRATE | private LocalPortControl | portControl | private SignalControl | signalControl | private RateControl | rateControl | private LocalQualityControl | qualityControl | private BitRateControl | bitrateControl | private LocalFormatControl | formatControl | private float | preferredFrameRate | private PushThread | pt | private static JMFSecurity | jmfSecurity | private static boolean | securityPrivelege | private Method[] | mSecurity | private Class[] | clSecurity | private Object[] | argsSecurity |
Constructors Summary |
---|
public OPISourceStream(DataSource ds)
super(new ContentDescriptor(ContentDescriptor.RAW),
LENGTH_UNKNOWN);
this.dataSource = ds;
this.locator = ds.getLocator();
cardNo = 0;
String remainder = locator.getRemainder();
if (remainder != null && remainder.length() > 0) {
while (remainder.length() > 1 && remainder.charAt(0) == '/")
remainder = remainder.substring(1);
String cardStr, portStr, compStr, signalStr, sizeStr;
portStr = null; // assume no port specified
compStr = null; // assume no compress specified
signalStr = null; // assume no signal specified
sizeStr = null; // assume no size specified
// Now see if there's a port specified.
int off = remainder.indexOf('/");
if (off == -1) {
cardStr = remainder;
} else {
cardStr = remainder.substring(0, off);
remainder = remainder.substring(off + 1);
// Now see if there's a compression specified
off = remainder.indexOf('/");
if (off == -1) {
portStr = remainder;
} else {
portStr = remainder.substring(0, off);
remainder = remainder.substring(off + 1);
// Now see if there's a size specified
off = remainder.indexOf('/");
if (off == -1) {
compStr = remainder;
} else {
compStr = remainder.substring(0, off);
remainder = remainder.substring(off + 1);
// Now see if there's a signal specified
off = remainder.indexOf('/");
if (off == -1) {
sizeStr = remainder;
} else {
sizeStr = remainder.substring(0, off);
signalStr = remainder.substring(off + 1);
}
}
}
}
try {
Integer integer = Integer.valueOf(cardStr);
if (integer != null) {
cardNo = integer.intValue();
}
} catch (Throwable t) {
}
if (portStr != null && portStr.length() > 0) {
for (int i = 0; i < VALID_PORTS.length; i++) {
if (VALID_PORTS[i].equalsIgnoreCase(portStr)) {
portNo = i;
}
}
}
if (compStr != null && compStr.length() > 0) {
for (int i = 0; i < VALID_COMPRESS.length; i++) {
if (VALID_COMPRESS[i].equalsIgnoreCase(compStr)) {
compressNo = i;
}
}
}
if (signalStr != null && signalStr.length() > 0) {
for (int i = 0; i < VALID_SIGNAL.length; i++) {
if (VALID_SIGNAL[i].equalsIgnoreCase(signalStr)) {
signalNo = i;
}
}
signalToFormat(signalNo);
} else {
signalToFormat(-1);
}
if (sizeStr != null && sizeStr.length() > 0) {
for (int i = 0; i < VALID_SIZES.length; i++) {
if (VALID_SIZES[i].equalsIgnoreCase(sizeStr)) {
sizeNo = i;
}
}
}
}
capFormat = new javax.media.format.VideoFormat(
VIDEO_COMPRESS[compressNo],
getSizeDimension(),
javax.media.format.VideoFormat.NOT_SPECIFIED,
Format.byteArray,
getRate());
opiCap = new OPICapture(this);
portControl = new LocalPortControl(this, VALID_PORTS, portNo);
signalControl = new SignalControl(this, VALID_SIGNAL, signalNo);
rateControl = new RateControl(this, (float)DEFAULT_RATE, 1f, 30f);
qualityControl = new LocalQualityControl(this,
((float)DEFAULT_QUALITY/100f),
0.0f, 1.0f);
bitrateControl = new BitRateControl(this, DEFAULT_BITRATE,
LO_BITRATE, HI_BITRATE);
formatControl = new LocalFormatControl(this);
controls = new Object[5];
controls[0] = portControl;
controls[1] = rateControl;
controls[2] = qualityControl;
controls[3] = bitrateControl;
controls[4] = formatControl;
|
Methods Summary |
---|
public void | connect()
synchronized (OPILock) {
if (inUse) {
throw new IOException("Capture device in use");
} else
inUse = true;
connected = false;
if (!doConnect()) {
inUse = false;
throw new IOException("Could not connect to capture device");
}
connected = true;
// Following only needed while the control frame hack is present...
if (CONTROL_PANEL_HACK)
doControlPanelHack();
// Preceding only needed while the control frame hack is present...
}
| synchronized void | disconnect()
// System.err.println("OPISourceStream.disconnect");
if (started) {
try {
stop();
} catch (IOException ioe) {
}
}
synchronized (OPILock) {
connected = false;
opiCap.disconnect();
if (pt != null)
pt.timeToQuit();
pt = null;
inUse = false;
// Following only needed while the control frame hack is present...
if(CONTROL_PANEL_HACK && controlFrame != null) {
controlFrame.setVisible(false);
controlFrame.removeAll();
controlFrame.dispose();
controlFrame = null;
}
// Preceding only needed while the control frame hack is present...
}
| private boolean | doConnect()
// System.err.println("OPISourceStream.doConnect");
if (!opiCap.connect(cardNo, portNo))
return false;
setSize(sizeNo); // set the scale
setSignal(signalNo); // set the signal format
setCompress(compressNo); // set the compression
data = new byte[maxDataSize]; // prime the data field for push
nextSequence = 1; // reset in case it's a reconnect
return true;
| private void | doControlPanelHack()
if (controlFrame != null) {
controlFrame.setVisible(true);
return;
}
controlFrame = new Frame("OPI Controls");
controlFrame.addWindowListener(new WindowAdapter() {
public void windowClosing(WindowEvent e) {
controlFrame.setVisible(false);
}
});
controlFrame.setLayout(new BorderLayout());
Panel p = new Panel();
p.setLayout(new FlowLayout(FlowLayout.LEFT, 1, 1));
// p.add(new LabelComponent("Port"));
p.add(portControl.getControlComponent());
// p.add(new LabelComponent("Format"));
p.add(formatControl.getControlComponent());
controlFrame.add(p, "North");
p = new Panel();
p.setLayout(new FlowLayout(FlowLayout.LEFT, 1, 1));
p.add(rateControl.getControlComponent());
p.add(qualityControl.getControlComponent());
p.add(bitrateControl.getControlComponent());
controlFrame.add(p, "South");
controlFrame.pack();
controlFrame.setVisible(true);
| public void | finalize()
if (connected)
disconnect();
| int | getBitRate()
return bitrateNo;
| public javax.media.CaptureDeviceInfo | getCaptureDeviceInfo()
// TODO - more useful descriptor of device
return new CaptureDeviceInfo("SunVideoPlus device " + cardNo,
locator, supported);
| boolean | getConnected()
return connected;
| byte[] | getData()
return data;
| public java.lang.Object | getDataType()
return Format.byteArray;
| public javax.media.Format | getFormat()
// System.err.println("OPISourceStream.getFormat");
return capFormat;
| public java.lang.Object | getOwner()Owned
try {
jmfSecurity = JMFSecurityManager.getJMFSecurity();
securityPrivelege = true;
} catch (SecurityException e) {
}
supportedCommon = new javax.media.Format[] {
// H.26x
// H.261 formats
new javax.media.format.VideoFormat(
javax.media.format.VideoFormat.H261,
new Dimension(352, 288),
javax.media.format.VideoFormat.NOT_SPECIFIED,
Format.byteArray,
javax.media.format.VideoFormat.NOT_SPECIFIED),
new javax.media.format.VideoFormat(
javax.media.format.VideoFormat.H261,
new Dimension(176, 144),
javax.media.format.VideoFormat.NOT_SPECIFIED,
Format.byteArray,
javax.media.format.VideoFormat.NOT_SPECIFIED),
// H.263 formats
new javax.media.format.VideoFormat(
javax.media.format.VideoFormat.H263,
new Dimension(352, 288),
javax.media.format.VideoFormat.NOT_SPECIFIED,
Format.byteArray,
javax.media.format.VideoFormat.NOT_SPECIFIED),
new javax.media.format.VideoFormat(
javax.media.format.VideoFormat.H263,
new Dimension(176, 144),
javax.media.format.VideoFormat.NOT_SPECIFIED,
Format.byteArray,
javax.media.format.VideoFormat.NOT_SPECIFIED),
};
supportedNTSC = new javax.media.Format[] {
// NTSC
// JPEG formats
new javax.media.format.VideoFormat(
javax.media.format.VideoFormat.JPEG,
new Dimension(320, 240),
javax.media.format.VideoFormat.NOT_SPECIFIED,
Format.byteArray,
javax.media.format.VideoFormat.NOT_SPECIFIED),
new javax.media.format.VideoFormat(
javax.media.format.VideoFormat.JPEG,
new Dimension(160, 120),
javax.media.format.VideoFormat.NOT_SPECIFIED,
Format.byteArray,
javax.media.format.VideoFormat.NOT_SPECIFIED),
// RGB formats
new javax.media.format.VideoFormat(
javax.media.format.VideoFormat.RGB,
new Dimension(640, 480),
javax.media.format.VideoFormat.NOT_SPECIFIED,
Format.byteArray,
javax.media.format.VideoFormat.NOT_SPECIFIED),
new javax.media.format.VideoFormat(
javax.media.format.VideoFormat.RGB,
new Dimension(320, 240),
javax.media.format.VideoFormat.NOT_SPECIFIED,
Format.byteArray,
javax.media.format.VideoFormat.NOT_SPECIFIED),
new javax.media.format.VideoFormat(
javax.media.format.VideoFormat.RGB,
new Dimension(160, 120),
javax.media.format.VideoFormat.NOT_SPECIFIED,
Format.byteArray,
javax.media.format.VideoFormat.NOT_SPECIFIED),
// YUV formats
new javax.media.format.VideoFormat(
javax.media.format.VideoFormat.YUV,
new Dimension(640, 480),
javax.media.format.VideoFormat.NOT_SPECIFIED,
Format.byteArray,
javax.media.format.VideoFormat.NOT_SPECIFIED),
new javax.media.format.VideoFormat(
javax.media.format.VideoFormat.YUV,
new Dimension(320, 240),
javax.media.format.VideoFormat.NOT_SPECIFIED,
Format.byteArray,
javax.media.format.VideoFormat.NOT_SPECIFIED),
new javax.media.format.VideoFormat(
javax.media.format.VideoFormat.YUV,
new Dimension(160, 120),
javax.media.format.VideoFormat.NOT_SPECIFIED,
Format.byteArray,
javax.media.format.VideoFormat.NOT_SPECIFIED),
};
supportedPAL = new javax.media.Format[] {
// PAL
// JPEG formats
new javax.media.format.VideoFormat(
javax.media.format.VideoFormat.JPEG,
new Dimension(384, 288),
javax.media.format.VideoFormat.NOT_SPECIFIED,
Format.byteArray,
javax.media.format.VideoFormat.NOT_SPECIFIED),
new javax.media.format.VideoFormat(
javax.media.format.VideoFormat.JPEG,
new Dimension(192, 144),
javax.media.format.VideoFormat.NOT_SPECIFIED,
Format.byteArray,
javax.media.format.VideoFormat.NOT_SPECIFIED),
// RGB formats
new javax.media.format.VideoFormat(
javax.media.format.VideoFormat.RGB,
new Dimension(768, 576),
javax.media.format.VideoFormat.NOT_SPECIFIED,
Format.byteArray,
javax.media.format.VideoFormat.NOT_SPECIFIED),
new javax.media.format.VideoFormat(
javax.media.format.VideoFormat.RGB,
new Dimension(384, 288),
javax.media.format.VideoFormat.NOT_SPECIFIED,
Format.byteArray,
javax.media.format.VideoFormat.NOT_SPECIFIED),
new javax.media.format.VideoFormat(
javax.media.format.VideoFormat.RGB,
new Dimension(192, 144),
javax.media.format.VideoFormat.NOT_SPECIFIED,
Format.byteArray,
javax.media.format.VideoFormat.NOT_SPECIFIED),
// YUV formats
new javax.media.format.VideoFormat(
javax.media.format.VideoFormat.YUV,
new Dimension(768, 576),
javax.media.format.VideoFormat.NOT_SPECIFIED,
Format.byteArray,
javax.media.format.VideoFormat.NOT_SPECIFIED),
new javax.media.format.VideoFormat(
javax.media.format.VideoFormat.YUV,
new Dimension(384, 288),
javax.media.format.VideoFormat.NOT_SPECIFIED,
Format.byteArray,
javax.media.format.VideoFormat.NOT_SPECIFIED),
new javax.media.format.VideoFormat(
javax.media.format.VideoFormat.YUV,
new Dimension(192, 144),
javax.media.format.VideoFormat.NOT_SPECIFIED,
Format.byteArray,
javax.media.format.VideoFormat.NOT_SPECIFIED),
};
return dataSource;
| float | getQuality()
return ((float) qualityNo / 100f);
| float | getRate()
if (rateNo == 30)
return 29.97f; // NTSC standard broadcast frame rate
return (float) rateNo;
| int | getSize()
return sizeNo;
| java.awt.Dimension | getSizeDimension()
int scale = VALID_SCALE[sizeNo];
int w = 640;
int h = 480;
if (signalNo == PAL_SIGNAL) {
w = 768;
h = 576;
}
if ((compressNo == H263_COMPRESS) || (compressNo == H261_COMPRESS)) {
w = 704;
h = 576;
}
return new Dimension(w / scale, h / scale);
| float | getSizeFloat()
return VALID_SIZES_FLOAT[sizeNo];
| float[] | getSizesFloat()
return VALID_SIZES_FLOAT;
| boolean | getStarted()
return started;
| public javax.media.Format[] | getSupportedFormats()
// System.err.println("OPISourceStream.getSupportedFormats");
return supported;
| void | pushData(int length)
this.length = length;
if (transferHandler != null)
transferHandler.transferData(this);
| public void | read(javax.media.Buffer buffer)
// System.err.println("OPISourceStream.read");
if (!started) {
buffer.setDiscard(true);
length = 0;
// return 0;
return;
}
synchronized (readLock) {
int copyLength = length;
if (length > 0) {
byte [] outgoingData = data;
Object incomingData = buffer.getData();
if (incomingData instanceof byte[] &&
((byte[])incomingData).length >= maxDataSize) {
data = (byte []) incomingData;
} else {
data = new byte[maxDataSize];
}
buffer.setOffset(0);
buffer.setData(outgoingData);
buffer.setLength(length);
buffer.setSequenceNumber(nextSequence++);
buffer.setDiscard(false);
buffer.setTimeStamp(timeStamp);
buffer.setFlags(buffer.getFlags() |
buffer.FLAG_SYSTEM_TIME |
buffer.FLAG_KEY_FRAME |
buffer.FLAG_LIVE_DATA);
buffer.setFormat(capFormat);
} else
buffer.setDiscard(true);
length = 0;
// return copyLength;
return;
}
| void | setBitRate(int bitrate)
bitrateNo = bitrate;
opiCap.setBitRate(bitrateNo);
| private void | setCompress(java.lang.String compress)
if (compress != null && compress.length() > 0) {
for (int i = 0; i < VALID_COMPRESS.length; i++) {
if (VALID_COMPRESS[i].equalsIgnoreCase(compress)) {
compressNo = i;
if (connected)
setCompress(compressNo);
}
}
}
| private void | setCompress(int compress)
compressNo = compress;
opiCap.setCompress(VALID_COMPRESS[compressNo]);
if (compress == H263_COMPRESS) {
bitrateControl.setRange(H263_DEFAULT_BITRATE,
H263_LO_BITRATE, H263_HI_BITRATE);
setBitRate(H263_DEFAULT_BITRATE);
} else {
bitrateControl.setRange(DEFAULT_BITRATE,
LO_BITRATE, HI_BITRATE);
setBitRate(DEFAULT_BITRATE);
}
// also test for YUV when it is supported
if ((compress == RGB_COMPRESS) || (compress == YUV_COMPRESS)) {
qualityControl.setEnabled(false);
bitrateControl.setEnabled(false);
} else {
qualityControl.setEnabled(true);
bitrateControl.setEnabled(true);
}
| void | setData(byte[] buf)
data = buf;
| public javax.media.Format | setFormat(javax.media.Format fmt)
javax.media.Format f = null;
for (int i = 0; i < supported.length; i++) {
if (fmt.matches(supported[i]) &&
(f = fmt.intersects(supported[i])) != null) {
break;
}
}
if (f != null) {
VideoFormat format = (javax.media.format.VideoFormat)f;
if (format.getEncoding().equals(format.H261)) {
setCompress("H261");
} else if (format.getEncoding().equals(format.H263)) {
setCompress("H263");
} else if (format.getEncoding().equals(format.JPEG)) {
setCompress("Jpeg");
} else if (format.getEncoding().equals(format.YUV)) {
setCompress("YUV");
} else {
setCompress("RGB");
}
if (format.getFrameRate() !=
javax.media.format.VideoFormat.NOT_SPECIFIED) {
// rateControl will call back to setRate
rateControl.setFrameRate(format.getFrameRate());
}
setSize(format.getSize());
if (!connected) {
capFormat = format;
}
}
return capFormat;
| public void | setH261Format(int inWidth, int inHeight, int outWidth, int outHeight, int quality, int scale)
setVideoFormat(VideoFormat.H261, inWidth, inHeight,
outWidth, outHeight, quality, scale);
| public void | setH263Format(int inWidth, int inHeight, int outWidth, int outHeight, int quality, int scale)
setVideoFormat(VideoFormat.H263, inWidth, inHeight,
outWidth, outHeight, quality, scale);
| public void | setJpegFormat(int inWidth, int inHeight, int outWidth, int outHeight, int quality, int scale)
if (inWidth <= 0) inWidth = 640; // default to NTSC
if (inHeight <= 0) inHeight = 480; // default to NTSC
if (outWidth <= 0) outWidth = inWidth / VALID_SCALE[sizeNo];
if (outHeight <= 0) outHeight = inHeight / VALID_SCALE[sizeNo];
if (quality > 60)
maxDataSize = 3 * outWidth * outHeight;
else
maxDataSize = 2 * outWidth * outHeight;
// Note that quality is changed from the 1-100 range to 1-255
capFormat = new JPEGFormat(
new java.awt.Dimension(outWidth, outHeight),
maxDataSize, Format.byteArray,
getRate(),
qualityNo / 3,
VideoFormat.NOT_SPECIFIED);
setSizes(inWidth, inHeight, outWidth, outHeight, scale);
if (formatControl != null)
formatControl.setCurrentFormat(capFormat);
| public void | setMpegFormat(int inWidth, int inHeight, int outWidth, int outHeight, int quality, int scale)
if (inWidth <= 0) inWidth = 640; // default to NTSC
if (inHeight <= 0) inHeight = 480; // default to NTSC
if (outWidth <= 0) outWidth = inWidth / VALID_SCALE[sizeNo];
if (outHeight <= 0) outHeight = inHeight / VALID_SCALE[sizeNo];
if (quality > 60)
maxDataSize = 3 * outWidth * outHeight;
else
maxDataSize = 2 * outWidth * outHeight;
capFormat = new VideoFormat(VideoFormat.MPEG,
new java.awt.Dimension(outWidth, outHeight),
maxDataSize, Format.byteArray,
getRate()); // frame rate
setSizes(inWidth, inHeight, outWidth, outHeight, scale);
if (formatControl != null)
formatControl.setCurrentFormat(capFormat);
| private void | setPort(int port)
portNo = port;
opiCap.setPort(portNo);
| void | setQuality(float quality)
qualityNo = (int) ((quality * 100f) + 0.5f);
opiCap.setQuality(qualityNo);
if ((capFormat != null) && (capFormat instanceof JPEGFormat)) {
// Note that quality is changed from the 1-100 range to 1-255
capFormat = new JPEGFormat(
(capFormat == null ? null : capFormat.getSize()),
maxDataSize, Format.byteArray, getRate(),
qualityNo / 3,
Format.NOT_SPECIFIED);
if (formatControl != null)
formatControl.setCurrentFormat(capFormat);
}
| public void | setRGBFormat(int inWidth, int inHeight, int outWidth, int outHeight, int scanLine, int scale)
if (inWidth <= 0) inWidth = 640; // default to NTSC
if (inHeight <= 0) inHeight = 480; // default to NTSC
if (outWidth <= 0) outWidth = inWidth / VALID_SCALE[sizeNo];
if (outHeight <= 0) outHeight = inHeight / VALID_SCALE[sizeNo];
Dimension dim = new java.awt.Dimension(outWidth, outHeight);
// media engine doesn't like NOT_SPECIFIED
if (scanLine == Format.NOT_SPECIFIED)
scanLine = 2 * outWidth;
maxDataSize = scanLine * outHeight;
capFormat = new RGBFormat(new java.awt.Dimension(outWidth, outHeight),
maxDataSize, Format.byteArray,
getRate(), // frame rate
16,
0xF800, 0x7E0, 0x1F,
2, scanLine,
Format.FALSE, // flipped
Format.NOT_SPECIFIED); // endian
setSizes(inWidth, inHeight, outWidth, outHeight, scale);
if (formatControl != null)
formatControl.setCurrentFormat(capFormat);
| void | setRate(float rate)
rateNo = (int) (rate + 0.5);
if (rateNo <= 0)
rateNo = 1;
opiCap.setFrameRate(rateNo);
// Adjust frame rate in format
if (capFormat != null) {
if (VideoFormat.JPEG.equals(capFormat.getEncoding())) {
capFormat = new JPEGFormat(capFormat.getSize(),
maxDataSize, Format.byteArray, getRate(),
qualityNo / 3, JPEGFormat.NOT_SPECIFIED);
} else if (VideoFormat.H261.equals(capFormat.getEncoding())
|| VideoFormat.H263.equals(capFormat.getEncoding())
|| VideoFormat.MPEG.equals(capFormat.getEncoding())) {
capFormat = new VideoFormat(capFormat.getEncoding(),
capFormat.getSize(),
maxDataSize, Format.byteArray,
getRate()); // frame rate
} else if (VideoFormat.YUV.equals(capFormat.getEncoding())) {
capFormat = new YUVFormat(capFormat.getSize(),
maxDataSize, Format.byteArray,
getRate(), // frame rate
YUVFormat.YUV_YUYV,
((YUVFormat)capFormat).getStrideY(),
((YUVFormat)capFormat).getStrideUV(),
0, 3, 1);
} else if (VideoFormat.RGB.equals(capFormat.getEncoding())) {
capFormat = new RGBFormat(capFormat.getSize(),
maxDataSize, Format.byteArray,
getRate(), // frame rate
16,
0xF800, 0x7E0, 0x1F, 2,
((RGBFormat)capFormat).getLineStride(),
Format.FALSE, // flipped
Format.NOT_SPECIFIED); // endian
}
formatControl.setCurrentFormat(capFormat);
}
| private void | setSignal(int signal)
signalNo = signal;
opiCap.setSignal(VALID_SIGNAL[signalNo]);
signalToFormat(signalNo);
| void | setSize(java.awt.Dimension size)
int scale = 1;
// Handle both NTSC and PAL sizes
if (size.width > 384)
scale = 1;
else if (size.width >= 320)
scale = 2;
else
scale = 4;
for (int i = 0; i < VALID_SIZES.length; i++) {
if (VALID_SCALE[i] == scale) {
sizeNo = i;
if (connected)
setSize(sizeNo);
}
}
| void | setSize(int size)
int prevsize = sizeNo;
sizeNo = size;
// size may be rejected for current compression
if (!opiCap.setScale(VALID_SCALE[sizeNo]))
sizeNo = prevsize;
| void | setSize(float size)
if (size > VALID_SIZES_FLOAT[0]) {
sizeNo = 0;
} else {
for (int i = 1; i < VALID_SIZES_FLOAT.length; i++) {
sizeNo = i;
if (size > VALID_SIZES_FLOAT[i]) {
// Allow for the cases where size is not an exact match
if ((VALID_SIZES_FLOAT[i - 1] - size) <
(size - VALID_SIZES_FLOAT[i])) {
sizeNo = i - 1;
break;
}
break;
}
}
}
opiCap.setScale(VALID_SCALE[sizeNo]);
| private void | setSizes(int inWidth, int inHeight, int outWidth, int outHeight, int scale)
for (int i = 0; i < VALID_SCALE.length; i++) {
if (scale == VALID_SCALE[i]) {
if (sizeNo != i)
setSize(i);
break;
}
}
| public void | setTransferHandler(javax.media.protocol.BufferTransferHandler th)
transferHandler = th;
| public void | setVideoFormat(java.lang.String format, int inWidth, int inHeight, int outWidth, int outHeight, int quality, int scale)
if (inWidth <= 0) inWidth = 640; // default to NTSC
if (inHeight <= 0) inHeight = 480; // default to NTSC
if (outWidth <= 0) outWidth = inWidth / VALID_SCALE[sizeNo];
if (outHeight <= 0) outHeight = inHeight / VALID_SCALE[sizeNo];
if (quality > 60)
maxDataSize = 3 * outWidth * outHeight;
else
maxDataSize = 2 * outWidth * outHeight;
capFormat = new VideoFormat(format,
new java.awt.Dimension(outWidth, outHeight),
maxDataSize, Format.byteArray,
getRate()); // frame rate
setSizes(inWidth, inHeight, outWidth, outHeight, scale);
if (formatControl != null)
formatControl.setCurrentFormat(capFormat);
| public void | setYUVFormat(int inWidth, int inHeight, int outWidth, int outHeight, int scanLine, int scale)
if (inWidth <= 0) inWidth = 640; // default to NTSC
if (inHeight <= 0) inHeight = 480; // default to NTSC
if (outWidth <= 0) outWidth = inWidth / VALID_SCALE[sizeNo];
if (outHeight <= 0) outHeight = inHeight / VALID_SCALE[sizeNo];
// media engine doesn't like NOT_SPECIFIED
scanLine = 2 * outWidth;
maxDataSize = scanLine * outHeight;
// The image coming from the Osprey card is YVYU.
capFormat = new YUVFormat(new java.awt.Dimension(outWidth, outHeight),
maxDataSize, Format.byteArray,
getRate(), // frame rate
YUVFormat.YUV_YUYV, scanLine,
scanLine, 0, 3, 1);
setSizes(inWidth, inHeight, outWidth, outHeight, scale);
if (formatControl != null)
formatControl.setCurrentFormat(capFormat);
| private void | signalToFormat(int s)
if (s == -1) {
// support both NTSC and PAL until a choice is made
supported = new javax.media.Format[supportedCommon.length
+ supportedNTSC.length
+ supportedPAL.length];
System.arraycopy(supportedCommon, 0, supported, 0,
supportedCommon.length);
System.arraycopy(supportedNTSC, 0, supported,
supportedCommon.length, supportedNTSC.length);
System.arraycopy(supportedNTSC, 0, supported,
supportedCommon.length + supportedNTSC.length,
supportedPAL.length);
} else if (signalNo == PAL_SIGNAL) {
// support PAL formats
supported = new javax.media.Format[supportedCommon.length
+ supportedPAL.length];
System.arraycopy(supportedCommon, 0, supported, 0,
supportedCommon.length);
System.arraycopy(supportedPAL, 0, supported,
supportedCommon.length, supportedNTSC.length);
} else {
// support NTSC formats
supported = new javax.media.Format[supportedCommon.length
+ supportedNTSC.length];
System.arraycopy(supportedCommon, 0, supported, 0,
supportedCommon.length);
System.arraycopy(supportedNTSC, 0, supported,
supportedCommon.length, supportedNTSC.length);
}
if (formatControl != null)
formatControl.setSupportedFormats(supported);
| void | start()
// System.err.println("OPISourceStream.start");
if (started)
return;
if (!opiCap.start()) {
// System.err.println("OPISourceStream.start failed");
throw (new IOException("OPIStart failed"));
}
synchronized (this) {
started = true;
// Start the video call back polling thread
if (pt == null) {
if ( /*securityPrivelege && */ (jmfSecurity != null) ) {
String permission = null;
try {
if (jmfSecurity.getName().startsWith("jmf-security")) {
permission = "thread";
jmfSecurity.requestPermission(mSecurity, clSecurity, argsSecurity,
JMFSecurity.THREAD);
mSecurity[0].invoke(clSecurity[0], argsSecurity[0]);
permission = "thread group";
jmfSecurity.requestPermission(mSecurity, clSecurity, argsSecurity,
JMFSecurity.THREAD_GROUP);
mSecurity[0].invoke(clSecurity[0], argsSecurity[0]);
} else if (jmfSecurity.getName().startsWith("internet")) {
PolicyEngine.checkPermission(PermissionID.THREAD);
PolicyEngine.assertPermission(PermissionID.THREAD);
}
} catch (Throwable e) {
if (JMFSecurityManager.DEBUG) {
System.err.println( "Unable to get " + permission +
" privilege " + e);
}
securityPrivelege = false;
// TODO: Do the right thing if permissions cannot be obtained.
// User should be notified via an event
}
}
if ( (jmfSecurity != null) && (jmfSecurity.getName().startsWith("jdk12"))) {
try {
Constructor cons = jdk12CreateThreadAction.cons;
pt = (PushThread) jdk12.doPrivM.invoke(
jdk12.ac,
new Object[] {
cons.newInstance(
new Object[] {
PushThread.class,
this
})});
} catch (Exception e) {
System.err.println("OPISourceStream: exception when creating thread");
}
} else {
pt = new PushThread(this);
}
if (pt != null) {
pt.start();
}
}
if (signalControl != null) signalControl.setEnabled(false);
if (formatControl != null) formatControl.getControlComponent().
setEnabled(false);
}
| void | stop()
// System.err.println("OPISourceStream.stop");
started = false;
opiCap.stop();
if (signalControl != null) signalControl.setEnabled(true);
if (formatControl != null) formatControl.getControlComponent().
setEnabled(true);
| public boolean | willReadBlock()
return true;
|
|