RequestThreadManagerpublic class RequestThreadManager extends Object This class executes requests to the {@link Camera}.
The main components of this class are:
- A message queue of requests to the {@link Camera}.
- A thread that consumes requests to the {@link Camera} and executes them.
- A {@link GLThreadManager} that draws to the configured output {@link Surface}s.
- An {@link CameraDeviceState} state machine that manages the callbacks for various operations.
|
Fields Summary |
---|
private final String | TAG | private final int | mCameraId | private final RequestHandlerThread | mRequestThread | private static final boolean | DEBUG | private static final boolean | VERBOSE | private android.hardware.Camera | mCamera | private final android.hardware.camera2.CameraCharacteristics | mCharacteristics | private final CameraDeviceState | mDeviceState | private final CaptureCollector | mCaptureCollector | private final LegacyFocusStateMapper | mFocusStateMapper | private final LegacyFaceDetectMapper | mFaceDetectMapper | private static final int | MSG_CONFIGURE_OUTPUTS | private static final int | MSG_SUBMIT_CAPTURE_REQUEST | private static final int | MSG_CLEANUP | private static final int | MAX_IN_FLIGHT_REQUESTS | private static final int | PREVIEW_FRAME_TIMEOUT | private static final int | JPEG_FRAME_TIMEOUT | private static final int | REQUEST_COMPLETE_TIMEOUT | private static final float | ASPECT_RATIO_TOLERANCE | private boolean | mPreviewRunning | private final List | mPreviewOutputs | private final List | mCallbackOutputs | private GLThreadManager | mGLThreadManager | private android.graphics.SurfaceTexture | mPreviewTexture | private Camera.Parameters | mParams | private final List | mJpegSurfaceIds | private android.util.Size | mIntermediateBufferSize | private final RequestQueue | mRequestQueue | private LegacyRequest | mLastRequest | private android.graphics.SurfaceTexture | mDummyTexture | private android.view.Surface | mDummySurface | private final Object | mIdleLock | private final FpsCounter | mPrevCounter | private final FpsCounter | mRequestCounter | private final AtomicBoolean | mQuit | private static final boolean | USE_BLOB_FORMAT_OVERRIDE | private final Camera.ErrorCallback | mErrorCallback | private final android.os.ConditionVariable | mReceivedJpeg | private final Camera.PictureCallback | mJpegCallback | private final Camera.ShutterCallback | mJpegShutterCallback | private final SurfaceTexture.OnFrameAvailableListener | mPreviewCallback | private final Handler.Callback | mRequestHandlerCb |
Constructors Summary |
---|
public RequestThreadManager(int cameraId, android.hardware.Camera camera, android.hardware.camera2.CameraCharacteristics characteristics, CameraDeviceState deviceState)Create a new RequestThreadManager.
mCamera = checkNotNull(camera, "camera must not be null");
mCameraId = cameraId;
mCharacteristics = checkNotNull(characteristics, "characteristics must not be null");
String name = String.format("RequestThread-%d", cameraId);
TAG = name;
mDeviceState = checkNotNull(deviceState, "deviceState must not be null");
mFocusStateMapper = new LegacyFocusStateMapper(mCamera);
mFaceDetectMapper = new LegacyFaceDetectMapper(mCamera, mCharacteristics);
mCaptureCollector = new CaptureCollector(MAX_IN_FLIGHT_REQUESTS, mDeviceState);
mRequestThread = new RequestHandlerThread(name, mRequestHandlerCb);
mCamera.setErrorCallback(mErrorCallback);
|
Methods Summary |
---|
private android.util.Size | calculatePictureSize(java.util.List callbackOutputs, java.util.List callbackSizes, Camera.Parameters params)Find a JPEG size (that is supported by the legacy camera device) which is equal to or larger
than all of the configured {@code JPEG} outputs (by both width and height).
If multiple supported JPEG sizes are larger, select the smallest of them which
still satisfies the above constraint.
As a result, the returned size is guaranteed to be usable without needing
to upscale any of the outputs. If only one {@code JPEG} surface is used,
then no scaling/cropping is necessary between the taken picture and
the {@code JPEG} output surface.
/*
* Find the largest JPEG size (if any), from the configured outputs:
* - the api1 picture size should be set to the smallest legal size that's at least as large
* as the largest configured JPEG size
*/
if (callbackOutputs.size() != callbackSizes.size()) {
throw new IllegalStateException("Input collections must be same length");
}
List<Size> configuredJpegSizes = new ArrayList<>();
Iterator<Size> sizeIterator = callbackSizes.iterator();
for (Surface callbackSurface : callbackOutputs) {
Size jpegSize = sizeIterator.next();
if (!LegacyCameraDevice.containsSurfaceId(callbackSurface, mJpegSurfaceIds)) {
continue; // Ignore non-JPEG callback formats
}
configuredJpegSizes.add(jpegSize);
}
if (!configuredJpegSizes.isEmpty()) {
/*
* Find the largest configured JPEG width, and height, independently
* of the rest.
*
* The rest of the JPEG streams can be cropped out of this smallest bounding
* rectangle.
*/
int maxConfiguredJpegWidth = -1;
int maxConfiguredJpegHeight = -1;
for (Size jpegSize : configuredJpegSizes) {
maxConfiguredJpegWidth = jpegSize.getWidth() > maxConfiguredJpegWidth ?
jpegSize.getWidth() : maxConfiguredJpegWidth;
maxConfiguredJpegHeight = jpegSize.getHeight() > maxConfiguredJpegHeight ?
jpegSize.getHeight() : maxConfiguredJpegHeight;
}
Size smallestBoundJpegSize = new Size(maxConfiguredJpegWidth, maxConfiguredJpegHeight);
List<Size> supportedJpegSizes = ParameterUtils.convertSizeList(
params.getSupportedPictureSizes());
/*
* Find the smallest supported JPEG size that can fit the smallest bounding
* rectangle for the configured JPEG sizes.
*/
List<Size> candidateSupportedJpegSizes = new ArrayList<>();
for (Size supportedJpegSize : supportedJpegSizes) {
if (supportedJpegSize.getWidth() >= maxConfiguredJpegWidth &&
supportedJpegSize.getHeight() >= maxConfiguredJpegHeight) {
candidateSupportedJpegSizes.add(supportedJpegSize);
}
}
if (candidateSupportedJpegSizes.isEmpty()) {
throw new AssertionError(
"Could not find any supported JPEG sizes large enough to fit " +
smallestBoundJpegSize);
}
Size smallestSupportedJpegSize = Collections.min(candidateSupportedJpegSizes,
new SizeAreaComparator());
if (!smallestSupportedJpegSize.equals(smallestBoundJpegSize)) {
Log.w(TAG,
String.format(
"configureOutputs - Will need to crop picture %s into "
+ "smallest bound size %s",
smallestSupportedJpegSize, smallestBoundJpegSize));
}
return smallestSupportedJpegSize;
}
return null;
| public long | cancelRepeating(int requestId)Cancel a repeating request.
return mRequestQueue.stopRepeating(requestId);
| private static boolean | checkAspectRatiosMatch(android.util.Size a, android.util.Size b)
float aAspect = a.getWidth() / (float) a.getHeight();
float bAspect = b.getWidth() / (float) b.getHeight();
return Math.abs(aAspect - bAspect) < ASPECT_RATIO_TOLERANCE;
| public void | configure(java.util.Collection outputs)Configure with the current list of output Surfaces.
This operation blocks until the configuration is complete.
Using a {@code null} or empty {@code outputs} list is the equivalent of unconfiguring.
Handler handler = mRequestThread.waitAndGetHandler();
final ConditionVariable condition = new ConditionVariable(/*closed*/false);
ConfigureHolder holder = new ConfigureHolder(condition, outputs);
handler.sendMessage(handler.obtainMessage(MSG_CONFIGURE_OUTPUTS, 0, 0, holder));
condition.block();
| private void | configureOutputs(java.util.Collection outputs)
if (DEBUG) {
String outputsStr = outputs == null ? "null" : (outputs.size() + " surfaces");
Log.d(TAG, "configureOutputs with " + outputsStr);
}
try {
stopPreview();
} catch (RuntimeException e) {
Log.e(TAG, "Received device exception in configure call: ", e);
mDeviceState.setError(
CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
return;
}
/*
* Try to release the previous preview's surface texture earlier if we end up
* using a different one; this also reduces the likelihood of getting into a deadlock
* when disconnecting from the old previous texture at a later time.
*/
try {
mCamera.setPreviewTexture(/*surfaceTexture*/null);
} catch (IOException e) {
Log.w(TAG, "Failed to clear prior SurfaceTexture, may cause GL deadlock: ", e);
} catch (RuntimeException e) {
Log.e(TAG, "Received device exception in configure call: ", e);
mDeviceState.setError(
CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
return;
}
if (mGLThreadManager != null) {
mGLThreadManager.waitUntilStarted();
mGLThreadManager.ignoreNewFrames();
mGLThreadManager.waitUntilIdle();
}
resetJpegSurfaceFormats(mCallbackOutputs);
mPreviewOutputs.clear();
mCallbackOutputs.clear();
mJpegSurfaceIds.clear();
mPreviewTexture = null;
List<Size> previewOutputSizes = new ArrayList<>();
List<Size> callbackOutputSizes = new ArrayList<>();
int facing = mCharacteristics.get(CameraCharacteristics.LENS_FACING);
int orientation = mCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
if (outputs != null) {
for (Pair<Surface, Size> outPair : outputs) {
Surface s = outPair.first;
Size outSize = outPair.second;
try {
int format = LegacyCameraDevice.detectSurfaceType(s);
LegacyCameraDevice.setSurfaceOrientation(s, facing, orientation);
switch (format) {
case CameraMetadataNative.NATIVE_JPEG_FORMAT:
if (USE_BLOB_FORMAT_OVERRIDE) {
// Override to RGBA_8888 format.
LegacyCameraDevice.setSurfaceFormat(s,
LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888);
}
mJpegSurfaceIds.add(LegacyCameraDevice.getSurfaceId(s));
mCallbackOutputs.add(s);
callbackOutputSizes.add(outSize);
break;
default:
mPreviewOutputs.add(s);
previewOutputSizes.add(outSize);
break;
}
} catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
Log.w(TAG, "Surface abandoned, skipping...", e);
}
}
}
try {
mParams = mCamera.getParameters();
} catch (RuntimeException e) {
Log.e(TAG, "Received device exception: ", e);
mDeviceState.setError(
CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
return;
}
List<int[]> supportedFpsRanges = mParams.getSupportedPreviewFpsRange();
int[] bestRange = getPhotoPreviewFpsRange(supportedFpsRanges);
if (DEBUG) {
Log.d(TAG, "doPreviewCapture - Selected range [" +
bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] + "," +
bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] + "]");
}
mParams.setPreviewFpsRange(bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
if (previewOutputSizes.size() > 0) {
Size largestOutput = SizeAreaComparator.findLargestByArea(previewOutputSizes);
// Find largest jpeg dimension - assume to have the same aspect ratio as sensor.
Size largestJpegDimen = ParameterUtils.getLargestSupportedJpegSizeByArea(mParams);
List<Size> supportedPreviewSizes = ParameterUtils.convertSizeList(
mParams.getSupportedPreviewSizes());
// Use smallest preview dimension with same aspect ratio as sensor that is >= than all
// of the configured output dimensions. If none exists, fall back to using the largest
// supported preview size.
long largestOutputArea = largestOutput.getHeight() * (long) largestOutput.getWidth();
Size bestPreviewDimen = SizeAreaComparator.findLargestByArea(supportedPreviewSizes);
for (Size s : supportedPreviewSizes) {
long currArea = s.getWidth() * s.getHeight();
long bestArea = bestPreviewDimen.getWidth() * bestPreviewDimen.getHeight();
if (checkAspectRatiosMatch(largestJpegDimen, s) && (currArea < bestArea &&
currArea >= largestOutputArea)) {
bestPreviewDimen = s;
}
}
mIntermediateBufferSize = bestPreviewDimen;
mParams.setPreviewSize(mIntermediateBufferSize.getWidth(),
mIntermediateBufferSize.getHeight());
if (DEBUG) {
Log.d(TAG, "Intermediate buffer selected with dimens: " +
bestPreviewDimen.toString());
}
} else {
mIntermediateBufferSize = null;
if (DEBUG) {
Log.d(TAG, "No Intermediate buffer selected, no preview outputs were configured");
}
}
Size smallestSupportedJpegSize = calculatePictureSize(mCallbackOutputs,
callbackOutputSizes, mParams);
if (smallestSupportedJpegSize != null) {
/*
* Set takePicture size to the smallest supported JPEG size large enough
* to scale/crop out of for the bounding rectangle of the configured JPEG sizes.
*/
Log.i(TAG, "configureOutputs - set take picture size to " + smallestSupportedJpegSize);
mParams.setPictureSize(
smallestSupportedJpegSize.getWidth(), smallestSupportedJpegSize.getHeight());
}
// TODO: Detect and optimize single-output paths here to skip stream teeing.
if (mGLThreadManager == null) {
mGLThreadManager = new GLThreadManager(mCameraId, facing, mDeviceState);
mGLThreadManager.start();
}
mGLThreadManager.waitUntilStarted();
List<Pair<Surface, Size>> previews = new ArrayList<>();
Iterator<Size> previewSizeIter = previewOutputSizes.iterator();
for (Surface p : mPreviewOutputs) {
previews.add(new Pair<>(p, previewSizeIter.next()));
}
mGLThreadManager.setConfigurationAndWait(previews, mCaptureCollector);
mGLThreadManager.allowNewFrames();
mPreviewTexture = mGLThreadManager.getCurrentSurfaceTexture();
if (mPreviewTexture != null) {
mPreviewTexture.setOnFrameAvailableListener(mPreviewCallback);
}
try {
mCamera.setParameters(mParams);
} catch (RuntimeException e) {
Log.e(TAG, "Received device exception while configuring: ", e);
mDeviceState.setError(
CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
}
| private void | createDummySurface()Fake preview for jpeg captures when there is no active preview
if (mDummyTexture == null || mDummySurface == null) {
mDummyTexture = new SurfaceTexture(/*ignored*/0);
// TODO: use smallest default sizes
mDummyTexture.setDefaultBufferSize(640, 480);
mDummySurface = new Surface(mDummyTexture);
}
| private void | doJpegCapture(RequestHolder request)
if (DEBUG) Log.d(TAG, "doJpegCapturePrepare");
mCamera.takePicture(mJpegShutterCallback, /*raw*/null, mJpegCallback);
mPreviewRunning = false;
| private void | doJpegCapturePrepare(RequestHolder request)
if (DEBUG) Log.d(TAG, "doJpegCapturePrepare - preview running? " + mPreviewRunning);
if (!mPreviewRunning) {
if (DEBUG) Log.d(TAG, "doJpegCapture - create fake surface");
createDummySurface();
mCamera.setPreviewTexture(mDummyTexture);
startPreview();
}
| private void | doPreviewCapture(RequestHolder request)
if (VERBOSE) {
Log.v(TAG, "doPreviewCapture - preview running? " + mPreviewRunning);
}
if (mPreviewRunning) {
return; // Already running
}
if (mPreviewTexture == null) {
throw new IllegalStateException(
"Preview capture called with no preview surfaces configured.");
}
mPreviewTexture.setDefaultBufferSize(mIntermediateBufferSize.getWidth(),
mIntermediateBufferSize.getHeight());
mCamera.setPreviewTexture(mPreviewTexture);
startPreview();
| public long | flush()Flush any pending requests.
Log.i(TAG, "Flushing all pending requests.");
long lastFrame = mRequestQueue.stopRepeating();
mCaptureCollector.failAll();
return lastFrame;
| private int[] | getPhotoPreviewFpsRange(java.util.List frameRates)
if (frameRates.size() == 0) {
Log.e(TAG, "No supported frame rates returned!");
return null;
}
int bestMin = 0;
int bestMax = 0;
int bestIndex = 0;
int index = 0;
for (int[] rate : frameRates) {
int minFps = rate[Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
int maxFps = rate[Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
if (maxFps > bestMax || (maxFps == bestMax && minFps > bestMin)) {
bestMin = minFps;
bestMax = maxFps;
bestIndex = index;
}
index++;
}
return frameRates.get(bestIndex);
| public void | quit()Quit the request thread, and clean up everything.
if (!mQuit.getAndSet(true)) { // Avoid sending messages on dead thread's handler.
Handler handler = mRequestThread.waitAndGetHandler();
handler.sendMessageAtFrontOfQueue(handler.obtainMessage(MSG_CLEANUP));
mRequestThread.quitSafely();
try {
mRequestThread.join();
} catch (InterruptedException e) {
Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.",
mRequestThread.getName(), mRequestThread.getId()));
}
}
| private void | resetJpegSurfaceFormats(java.util.Collection surfaces)
if (!USE_BLOB_FORMAT_OVERRIDE || surfaces == null) {
return;
}
for(Surface s : surfaces) {
try {
LegacyCameraDevice.setSurfaceFormat(s, LegacyMetadataMapper.HAL_PIXEL_FORMAT_BLOB);
} catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
Log.w(TAG, "Surface abandoned, skipping...", e);
}
}
| public void | start()Start the request thread.
mRequestThread.start();
| private void | startPreview()
if (VERBOSE) {
Log.v(TAG, "startPreview - preview running? " + mPreviewRunning);
}
if (!mPreviewRunning) {
// XX: CameraClient:;startPreview is not getting called after a stop
mCamera.startPreview();
mPreviewRunning = true;
}
| private void | stopPreview()
if (VERBOSE) {
Log.v(TAG, "stopPreview - preview running? " + mPreviewRunning);
}
if (mPreviewRunning) {
mCamera.stopPreview();
mPreviewRunning = false;
}
| public int | submitCaptureRequests(java.util.List requests, boolean repeating, android.hardware.camera2.utils.LongParcelable frameNumber)Submit the given burst of requests to be captured.
If the burst is repeating, replace the current repeating burst.
Handler handler = mRequestThread.waitAndGetHandler();
int ret;
synchronized (mIdleLock) {
ret = mRequestQueue.submit(requests, repeating, frameNumber);
handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST);
}
return ret;
|
|