Methods Summary |
---|
public long | cancelRequest(int requestId)Cancel the repeating request with the given request id.
return mRequestThreadManager.cancelRepeating(requestId);
|
public void | close()
mRequestThreadManager.quit();
mCallbackHandlerThread.quitSafely();
mResultThread.quitSafely();
try {
mCallbackHandlerThread.join();
} catch (InterruptedException e) {
Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.",
mCallbackHandlerThread.getName(), mCallbackHandlerThread.getId()));
}
try {
mResultThread.join();
} catch (InterruptedException e) {
Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.",
mResultThread.getName(), mResultThread.getId()));
}
mClosed = true;
|
public int | configureOutputs(java.util.List outputs)Configure the device with a set of output surfaces.
Using empty or {@code null} {@code outputs} is the same as unconfiguring.
Every surface in {@code outputs} must be non-{@code null}.
List<Pair<Surface, Size>> sizedSurfaces = new ArrayList<>();
if (outputs != null) {
for (Surface output : outputs) {
if (output == null) {
Log.e(TAG, "configureOutputs - null outputs are not allowed");
return BAD_VALUE;
}
StreamConfigurationMap streamConfigurations = mStaticCharacteristics.
get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
// Validate surface size and format.
try {
Size s = getSurfaceSize(output);
int surfaceType = detectSurfaceType(output);
boolean flexibleConsumer = isFlexibleConsumer(output);
Size[] sizes = streamConfigurations.getOutputSizes(surfaceType);
if (sizes == null) {
// WAR: Override default format to IMPLEMENTATION_DEFINED for b/9487482
if ((surfaceType >= LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888 &&
surfaceType <= LegacyMetadataMapper.HAL_PIXEL_FORMAT_BGRA_8888)) {
// YUV_420_888 is always present in LEGACY for all
// IMPLEMENTATION_DEFINED output sizes, and is publicly visible in the
// API (i.e. {@code #getOutputSizes} works here).
sizes = streamConfigurations.getOutputSizes(ImageFormat.YUV_420_888);
} else if (surfaceType == LegacyMetadataMapper.HAL_PIXEL_FORMAT_BLOB) {
sizes = streamConfigurations.getOutputSizes(ImageFormat.JPEG);
}
}
if (!ArrayUtils.contains(sizes, s)) {
if (flexibleConsumer && (s = findClosestSize(s, sizes)) != null) {
sizedSurfaces.add(new Pair<>(output, s));
} else {
String reason = (sizes == null) ? "format is invalid." :
("size not in valid set: " + Arrays.toString(sizes));
Log.e(TAG, String.format("Surface with size (w=%d, h=%d) and format " +
"0x%x is not valid, %s", s.getWidth(), s.getHeight(),
surfaceType, reason));
return BAD_VALUE;
}
} else {
sizedSurfaces.add(new Pair<>(output, s));
}
} catch (BufferQueueAbandonedException e) {
Log.e(TAG, "Surface bufferqueue is abandoned, cannot configure as output: ", e);
return BAD_VALUE;
}
}
}
boolean success = false;
if (mDeviceState.setConfiguring()) {
mRequestThreadManager.configure(sizedSurfaces);
success = mDeviceState.setIdle();
}
if (success) {
mConfiguredSurfaces = outputs != null ? new ArrayList<>(outputs) : null;
} else {
return CameraBinderDecorator.INVALID_OPERATION;
}
return CameraBinderDecorator.NO_ERROR;
|
static void | configureSurface(android.view.Surface surface, int width, int height, int pixelFormat)
checkNotNull(surface);
checkArgumentPositive(width, "width must be positive.");
checkArgumentPositive(height, "height must be positive.");
LegacyExceptionUtils.throwOnError(nativeConfigureSurface(surface, width, height,
pixelFormat));
|
static boolean | containsSurfaceId(android.view.Surface s, java.util.Collection ids)
long id = getSurfaceId(s);
return ids.contains(id);
|
public static int | detectSurfaceType(android.view.Surface surface)Query the surface for its currently configured format
checkNotNull(surface);
return LegacyExceptionUtils.throwOnError(nativeDetectSurfaceType(surface));
|
static int | detectSurfaceUsageFlags(android.view.Surface surface)Query the surface for its currently configured usage flags
checkNotNull(surface);
return nativeDetectSurfaceUsageFlags(surface);
|
protected void | finalize()
try {
close();
} catch (CameraRuntimeException e) {
Log.e(TAG, "Got error while trying to finalize, ignoring: " + e.getMessage());
} finally {
super.finalize();
}
|
static android.util.Size | findClosestSize(android.util.Size size, android.util.Size[] supportedSizes)
if (size == null || supportedSizes == null) {
return null;
}
Size bestSize = null;
for (Size s : supportedSizes) {
if (s.equals(size)) {
return size;
} else if (s.getWidth() <= MAX_DIMEN_FOR_ROUNDING && (bestSize == null ||
LegacyCameraDevice.findEuclidDistSquare(size, s) <
LegacyCameraDevice.findEuclidDistSquare(bestSize, s))) {
bestSize = s;
}
}
return bestSize;
|
static long | findEuclidDistSquare(android.util.Size a, android.util.Size b)
long d0 = a.getWidth() - b.getWidth();
long d1 = a.getHeight() - b.getHeight();
return d0 * d0 + d1 * d1;
|
public long | flush()Flush any pending requests.
long lastFrame = mRequestThreadManager.flush();
waitUntilIdle();
return lastFrame;
|
private android.hardware.camera2.impl.CaptureResultExtras | getExtrasFromRequest(RequestHolder holder) // maximum allowed width for rounding
if (holder == null) {
return new CaptureResultExtras(ILLEGAL_VALUE, ILLEGAL_VALUE, ILLEGAL_VALUE,
ILLEGAL_VALUE, ILLEGAL_VALUE, ILLEGAL_VALUE);
}
return new CaptureResultExtras(holder.getRequestId(), holder.getSubsequeceId(),
/*afTriggerId*/0, /*precaptureTriggerId*/0, holder.getFrameNumber(),
/*partialResultCount*/1);
|
static long | getSurfaceId(android.view.Surface surface)
checkNotNull(surface);
return nativeGetSurfaceId(surface);
|
static java.util.List | getSurfaceIds(java.util.Collection surfaces)
if (surfaces == null) {
throw new NullPointerException("Null argument surfaces");
}
List<Long> surfaceIds = new ArrayList<>();
for (Surface s : surfaces) {
long id = getSurfaceId(s);
if (id == 0) {
throw new IllegalStateException(
"Configured surface had null native GraphicBufferProducer pointer!");
}
surfaceIds.add(id);
}
return surfaceIds;
|
public static android.util.Size | getSurfaceSize(android.view.Surface surface)Query the surface for its currently configured default buffer size.
checkNotNull(surface);
int[] dimens = new int[2];
LegacyExceptionUtils.throwOnError(nativeDetectSurfaceDimens(surface, /*out*/dimens));
return new Size(dimens[0], dimens[1]);
|
static android.util.Size | getTextureSize(android.graphics.SurfaceTexture surfaceTexture)
checkNotNull(surfaceTexture);
int[] dimens = new int[2];
LegacyExceptionUtils.throwOnError(nativeDetectTextureDimens(surfaceTexture,
/*out*/dimens));
return new Size(dimens[0], dimens[1]);
|
public boolean | isClosed()Return {@code true} if the device has been closed.
return mClosed;
|
public static boolean | isFlexibleConsumer(android.view.Surface output)
int usageFlags = detectSurfaceUsageFlags(output);
// Keep up to date with allowed consumer types in
// frameworks/av/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
int disallowedFlags = GRALLOC_USAGE_HW_VIDEO_ENCODER | GRALLOC_USAGE_RENDERSCRIPT;
int allowedFlags = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_OFTEN |
GRALLOC_USAGE_HW_COMPOSER;
boolean flexibleConsumer = ((usageFlags & disallowedFlags) == 0 &&
(usageFlags & allowedFlags) != 0);
return flexibleConsumer;
|
private static native int | nativeConfigureSurface(android.view.Surface surface, int width, int height, int pixelFormat)
|
private static native int | nativeDetectSurfaceDimens(android.view.Surface surface, int[] dimens)
|
private static native int | nativeDetectSurfaceType(android.view.Surface surface)
|
private static native int | nativeDetectSurfaceUsageFlags(android.view.Surface surface)
|
private static native int | nativeDetectTextureDimens(android.graphics.SurfaceTexture surfaceTexture, int[] dimens)
|
static native int | nativeGetJpegFooterSize()
|
private static native long | nativeGetSurfaceId(android.view.Surface surface)
|
private static native int | nativeProduceFrame(android.view.Surface surface, byte[] pixelBuffer, int width, int height, int pixelFormat)
|
private static native int | nativeSetNextTimestamp(android.view.Surface surface, long timestamp)
|
private static native int | nativeSetSurfaceDimens(android.view.Surface surface, int width, int height)
|
private static native int | nativeSetSurfaceFormat(android.view.Surface surface, int pixelFormat)
|
private static native int | nativeSetSurfaceOrientation(android.view.Surface surface, int facing, int sensorOrientation)
|
static boolean | needsConversion(android.view.Surface s)Check if a given surface uses {@link ImageFormat#YUV_420_888} or format that can be readily
converted to this; YV12 and NV21 are the two currently supported formats.
int nativeType = detectSurfaceType(s);
return nativeType == ImageFormat.YUV_420_888 || nativeType == ImageFormat.YV12 ||
nativeType == ImageFormat.NV21;
|
static void | produceFrame(android.view.Surface surface, byte[] pixelBuffer, int width, int height, int pixelFormat)
checkNotNull(surface);
checkNotNull(pixelBuffer);
checkArgumentPositive(width, "width must be positive.");
checkArgumentPositive(height, "height must be positive.");
LegacyExceptionUtils.throwOnError(nativeProduceFrame(surface, pixelBuffer, width, height,
pixelFormat));
|
static void | setNextTimestamp(android.view.Surface surface, long timestamp)
checkNotNull(surface);
LegacyExceptionUtils.throwOnError(nativeSetNextTimestamp(surface, timestamp));
|
static void | setSurfaceDimens(android.view.Surface surface, int width, int height)
checkNotNull(surface);
checkArgumentPositive(width, "width must be positive.");
checkArgumentPositive(height, "height must be positive.");
LegacyExceptionUtils.throwOnError(nativeSetSurfaceDimens(surface, width, height));
|
static void | setSurfaceFormat(android.view.Surface surface, int pixelFormat)
checkNotNull(surface);
LegacyExceptionUtils.throwOnError(nativeSetSurfaceFormat(surface, pixelFormat));
|
static void | setSurfaceOrientation(android.view.Surface surface, int facing, int sensorOrientation)
checkNotNull(surface);
LegacyExceptionUtils.throwOnError(nativeSetSurfaceOrientation(surface, facing,
sensorOrientation));
|
public int | submitRequest(android.hardware.camera2.CaptureRequest request, boolean repeating, android.hardware.camera2.utils.LongParcelable frameNumber)Submit a single capture request.
ArrayList<CaptureRequest> requestList = new ArrayList<CaptureRequest>();
requestList.add(request);
return submitRequestList(requestList, repeating, frameNumber);
|
public int | submitRequestList(java.util.List requestList, boolean repeating, android.hardware.camera2.utils.LongParcelable frameNumber)Submit a burst of capture requests.
if (requestList == null || requestList.isEmpty()) {
Log.e(TAG, "submitRequestList - Empty/null requests are not allowed");
return BAD_VALUE;
}
List<Long> surfaceIds = (mConfiguredSurfaces == null) ? new ArrayList<Long>() :
getSurfaceIds(mConfiguredSurfaces);
// Make sure that there all requests have at least 1 surface; all surfaces are non-null
for (CaptureRequest request : requestList) {
if (request.getTargets().isEmpty()) {
Log.e(TAG, "submitRequestList - "
+ "Each request must have at least one Surface target");
return BAD_VALUE;
}
for (Surface surface : request.getTargets()) {
if (surface == null) {
Log.e(TAG, "submitRequestList - Null Surface targets are not allowed");
return BAD_VALUE;
} else if (mConfiguredSurfaces == null) {
Log.e(TAG, "submitRequestList - must configure " +
" device with valid surfaces before submitting requests");
return INVALID_OPERATION;
} else if (!containsSurfaceId(surface, surfaceIds)) {
Log.e(TAG, "submitRequestList - cannot use a surface that wasn't configured");
return BAD_VALUE;
}
}
}
// TODO: further validation of request here
mIdle.close();
return mRequestThreadManager.submitCaptureRequests(requestList, repeating,
frameNumber);
|
public void | waitUntilIdle()Block until the {@link ICameraDeviceCallbacks#onCameraIdle()} callback is received.
mIdle.block();
|