Methods Summary |
---|
private static boolean | arrayContains(T[] array, T element)
if (array == null) {
return false;
}
for (T el : array) {
if (Objects.equals(el, element)) {
return true;
}
}
return false;
|
static int | checkArgumentFormat(int format)Ensures that the format is publicly user-defined in either ImageFormat or PixelFormat.
If a format has a different public representation than the internal representation,
passing in the internal representation here will fail.
For example if trying to use {@code HAL_PIXEL_FORMAT_BLOB}:
it has a different internal representation than the public representation
{@link ImageFormat#JPEG}, this check will fail.
Any invalid/undefined formats will raise an exception, including implementation-defined.
Note that {@code @hide} and deprecated formats will not pass this check.
if (!ImageFormat.isPublicFormat(format) && !PixelFormat.isPublicFormat(format)) {
throw new IllegalArgumentException(String.format(
"format 0x%x was not defined in either ImageFormat or PixelFormat", format));
}
return format;
|
static int | checkArgumentFormatInternal(int format)Ensures that the format is either user-defined or implementation defined.
If a format has a different internal representation than the public representation,
passing in the public representation here will fail.
For example if trying to use {@link ImageFormat#JPEG}:
it has a different public representation than the internal representation
{@code HAL_PIXEL_FORMAT_BLOB}, this check will fail.
Any invalid/undefined formats will raise an exception.
switch (format) {
case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
case HAL_PIXEL_FORMAT_BLOB:
case HAL_PIXEL_FORMAT_RAW_OPAQUE:
return format;
case ImageFormat.JPEG:
throw new IllegalArgumentException(
"ImageFormat.JPEG is an unknown internal format");
default:
return checkArgumentFormat(format);
}
|
private int | checkArgumentFormatSupported(int format, boolean output)
checkArgumentFormat(format);
int[] formats = output ? getOutputFormats() : getInputFormats();
for (int i = 0; i < formats.length; ++i) {
if (format == formats[i]) {
return format;
}
}
throw new IllegalArgumentException(String.format(
"format %x is not supported by this stream configuration map", format));
|
public boolean | equals(java.lang.Object obj)Check if this {@link StreamConfigurationMap} is equal to another
{@link StreamConfigurationMap}.
Two vectors are only equal if and only if each of the respective elements is equal.
if (obj == null) {
return false;
}
if (this == obj) {
return true;
}
if (obj instanceof StreamConfigurationMap) {
final StreamConfigurationMap other = (StreamConfigurationMap) obj;
// XX: do we care about order?
return Arrays.equals(mConfigurations, other.mConfigurations) &&
Arrays.equals(mMinFrameDurations, other.mMinFrameDurations) &&
Arrays.equals(mStallDurations, other.mStallDurations) &&
Arrays.equals(mHighSpeedVideoConfigurations,
other.mHighSpeedVideoConfigurations);
}
return false;
|
private StreamConfigurationDuration[] | getDurations(int duration)Get the durations array for the kind of duration
switch (duration) {
case DURATION_MIN_FRAME:
return mMinFrameDurations;
case DURATION_STALL:
return mStallDurations;
default:
throw new IllegalArgumentException("duration was invalid");
}
|
private java.util.HashMap | getFormatsMap(boolean output)Get the format -> size count map for either output or input formats
return output ? mOutputFormats : mInputFormats;
|
public android.util.Range[] | getHighSpeedVideoFpsRanges()Get a list of supported high speed video recording FPS ranges.
When HIGH_SPEED_VIDEO is supported in
{@link CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES available scene modes}, this
method will list the supported high speed video FPS range configurations. Application can
then use {@link #getHighSpeedVideoSizesFor} to query available sizes for one of returned
FPS range.
To enable high speed video recording, application must set
{@link CaptureRequest#CONTROL_SCENE_MODE} to
{@link CaptureRequest#CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO HIGH_SPEED_VIDEO} in capture
requests and select the video size from {@link #getHighSpeedVideoSizesFor} and
{@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from
this method to configure the recording and preview streams and setup the recording requests.
For example, if the application intends to do high speed recording, it can select one FPS
range reported by this method, query the video sizes corresponding to this FPS range by
{@link #getHighSpeedVideoSizesFor} and select one of reported sizes to configure output
streams. Note that for the use case of multiple output streams, application must select one
unique size from {@link #getHighSpeedVideoSizesFor}, and use it for all output streams.
Otherwise a request error might occur when attempting to enable
{@link CaptureRequest#CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO HIGH_SPEED_VIDEO}.
Once the stream is configured, application can set the FPS range in the recording requests.
Set<Range<Integer>> keySet = mHighSpeedVideoFpsRangeMap.keySet();
return keySet.toArray(new Range[keySet.size()]);
|
public android.util.Range[] | getHighSpeedVideoFpsRangesFor(android.util.Size size)Get the frame per second ranges (fpsMin, fpsMax) for input high speed video size.
See {@link #getHighSpeedVideoSizes} for how to enable high speed recording.
For normal video recording use case, where some application will NOT set
{@link CaptureRequest#CONTROL_SCENE_MODE} to
{@link CaptureRequest#CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO HIGH_SPEED_VIDEO} in capture
requests, the {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS ranges} reported in
this method must not be used to setup capture requests, or it will cause request error.
Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size);
if (fpsRangeCount == null || fpsRangeCount == 0) {
throw new IllegalArgumentException(String.format(
"Size %s does not support high speed video recording", size));
}
@SuppressWarnings("unchecked")
Range<Integer>[] fpsRanges = new Range[fpsRangeCount];
int i = 0;
for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
if (size.equals(config.getSize())) {
fpsRanges[i++] = config.getFpsRange();
}
}
return fpsRanges;
|
public android.util.Size[] | getHighSpeedVideoSizes()Get a list of supported high speed video recording sizes.
When HIGH_SPEED_VIDEO is supported in
{@link CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES available scene modes}, this
method will list the supported high speed video size configurations. All the sizes listed
will be a subset of the sizes reported by {@link #getOutputSizes} for processed non-stalling
formats (typically ImageFormat#YUV_420_888, ImageFormat#NV21, ImageFormat#YV12)
To enable high speed video recording, application must set
{@link CaptureRequest#CONTROL_SCENE_MODE} to
{@link CaptureRequest#CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO HIGH_SPEED_VIDEO} in capture
requests and select the video size from this method and
{@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from
{@link #getHighSpeedVideoFpsRangesFor} to configure the recording and preview streams and
setup the recording requests. For example, if the application intends to do high speed
recording, it can select the maximum size reported by this method to configure output
streams. Note that for the use case of multiple output streams, application must select one
unique size from this method to use. Otherwise a request error might occur. Once the size is
selected, application can get the supported FPS ranges by
{@link #getHighSpeedVideoFpsRangesFor}, and use these FPS ranges to setup the recording
requests.
Set<Size> keySet = mHighSpeedVideoSizeMap.keySet();
return keySet.toArray(new Size[keySet.size()]);
|
public android.util.Size[] | getHighSpeedVideoSizesFor(android.util.Range fpsRange)Get the supported video sizes for input FPS range.
See {@link #getHighSpeedVideoFpsRanges} for how to enable high speed recording.
For normal video recording use case, where the application will NOT set
{@link CaptureRequest#CONTROL_SCENE_MODE} to
{@link CaptureRequest#CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO HIGH_SPEED_VIDEO} in capture
requests, the {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS ranges} reported in
this method must not be used to setup capture requests, or it will cause request error.
Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange);
if (sizeCount == null || sizeCount == 0) {
throw new IllegalArgumentException(String.format(
"FpsRange %s does not support high speed video recording", fpsRange));
}
Size[] sizes = new Size[sizeCount];
int i = 0;
for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
if (fpsRange.equals(config.getFpsRange())) {
sizes[i++] = config.getSize();
}
}
return sizes;
|
public final int[] | getInputFormats()Get the image {@code format} input formats in this stream configuration.
All image formats returned by this function will be defined in either {@link ImageFormat}
or in {@link PixelFormat} (and there is no possibility of collision).
return getPublicFormats(/*output*/false);
|
public android.util.Size[] | getInputSizes(int format)Get the supported input sizes for this input format.
The format must have come from {@link #getInputFormats}; otherwise
{@code null} is returned.
return getPublicFormatSizes(format, /*output*/false);
|
private long | getInternalFormatDuration(int format, android.util.Size size, int duration)
// assume format is already checked, since its internal
if (!arrayContains(getInternalFormatSizes(format, /*output*/true), size)) {
throw new IllegalArgumentException("size was not supported");
}
StreamConfigurationDuration[] durations = getDurations(duration);
for (StreamConfigurationDuration configurationDuration : durations) {
if (configurationDuration.getFormat() == format &&
configurationDuration.getWidth() == size.getWidth() &&
configurationDuration.getHeight() == size.getHeight()) {
return configurationDuration.getDuration();
}
}
// Default duration is '0' (unsupported/no extra stall)
return 0;
|
private android.util.Size[] | getInternalFormatSizes(int format, boolean output)
HashMap<Integer, Integer> formatsMap = getFormatsMap(output);
Integer sizesCount = formatsMap.get(format);
if (sizesCount == null) {
throw new IllegalArgumentException("format not available");
}
int len = sizesCount;
Size[] sizes = new Size[len];
int sizeIndex = 0;
for (StreamConfiguration config : mConfigurations) {
if (config.getFormat() == format && config.isOutput() == output) {
sizes[sizeIndex++] = config.getSize();
}
}
if (sizeIndex != len) {
throw new AssertionError(
"Too few sizes (expected " + len + ", actual " + sizeIndex + ")");
}
return sizes;
|
public final int[] | getOutputFormats()Get the image {@code format} output formats in this stream configuration.
All image formats returned by this function will be defined in either {@link ImageFormat}
or in {@link PixelFormat} (and there is no possibility of collision).
Formats listed in this array are guaranteed to return true if queried with
{@link #isOutputSupportedFor(int)}.
return getPublicFormats(/*output*/true);
|
public long | getOutputMinFrameDuration(int format, android.util.Size size)Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
for the format/size combination (in nanoseconds).
{@code format} should be one of the ones returned by {@link #getOutputFormats()}.
{@code size} should be one of the ones returned by
{@link #getOutputSizes(int)}.
This should correspond to the frame duration when only that stream is active, with all
processing (typically in {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.
When multiple streams are used in a request, the minimum frame duration will be
{@code max(individual stream min durations)}.
For devices that do not support manual sensor control
({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}),
this function may return 0.
checkNotNull(size, "size must not be null");
checkArgumentFormatSupported(format, /*output*/true);
return getInternalFormatDuration(imageFormatToInternal(format), size, DURATION_MIN_FRAME);
|
public long | getOutputMinFrameDuration(java.lang.Class klass, android.util.Size size)Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
for the class/size combination (in nanoseconds).
This assumes a the {@code klass} is set up to use an implementation-defined format.
For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.
{@code klass} should be one of the ones which is supported by
{@link #isOutputSupportedFor(Class)}.
{@code size} should be one of the ones returned by
{@link #getOutputSizes(int)}.
This should correspond to the frame duration when only that stream is active, with all
processing (typically in {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.
When multiple streams are used in a request, the minimum frame duration will be
{@code max(individual stream min durations)}.
For devices that do not support manual sensor control
({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}),
this function may return 0.
if (!isOutputSupportedFor(klass)) {
throw new IllegalArgumentException("klass was not supported");
}
return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
size, DURATION_MIN_FRAME);
|
public android.util.Size[] | getOutputSizes(java.lang.Class klass)Get a list of sizes compatible with {@code klass} to use as an output.
Since some of the supported classes may support additional formats beyond
an opaque/implementation-defined (under-the-hood) format; this function only returns
sizes for the implementation-defined format.
Some classes such as {@link android.media.ImageReader} may only support user-defined
formats; in particular {@link #isOutputSupportedFor(Class)} will return {@code true} for
that class and this method will return an empty array (but not {@code null}).
If a well-defined format such as {@code NV21} is required, use
{@link #getOutputSizes(int)} instead.
The {@code klass} should be a supported output, that querying
{@code #isOutputSupportedFor(Class)} should return {@code true}.
// Image reader is "supported", but never for implementation-defined formats; return empty
if (android.media.ImageReader.class.isAssignableFrom(klass)) {
return new Size[0];
}
if (isOutputSupportedFor(klass) == false) {
return null;
}
return getInternalFormatSizes(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, /*output*/true);
|
public android.util.Size[] | getOutputSizes(int format)Get a list of sizes compatible with the requested image {@code format}.
The {@code format} should be a supported format (one of the formats returned by
{@link #getOutputFormats}).
return getPublicFormatSizes(format, /*output*/true);
|
public long | getOutputStallDuration(int format, android.util.Size size)Get the stall duration for the format/size combination (in nanoseconds).
{@code format} should be one of the ones returned by {@link #getOutputFormats()}.
{@code size} should be one of the ones returned by
{@link #getOutputSizes(int)}.
A stall duration is how much extra time would get added to the normal minimum frame duration
for a repeating request that has streams with non-zero stall.
For example, consider JPEG captures which have the following characteristics:
- JPEG streams act like processed YUV streams in requests for which they are not included;
in requests in which they are directly referenced, they act as JPEG streams.
This is because supporting a JPEG stream requires the underlying YUV data to always be ready
for use by a JPEG encoder, but the encoder will only be used (and impact frame duration) on
requests that actually reference a JPEG stream.
- The JPEG processor can run concurrently to the rest of the camera pipeline, but cannot
process more than 1 capture at a time.
In other words, using a repeating YUV request would result in a steady frame rate
(let's say it's 30 FPS). If a single JPEG request is submitted periodically,
the frame rate will stay at 30 FPS (as long as we wait for the previous JPEG to return each
time). If we try to submit a repeating YUV + JPEG request, then the frame rate will drop from
30 FPS.
In general, submitting a new request with a non-0 stall time stream will not cause a
frame rate drop unless there are still outstanding buffers for that stream from previous
requests.
Submitting a repeating request with streams (call this {@code S}) is the same as setting
the minimum frame duration from the normal minimum frame duration corresponding to {@code S},
added with the maximum stall duration for {@code S}.
If interleaving requests with and without a stall duration, a request will stall by the
maximum of the remaining times for each can-stall stream with outstanding buffers.
This means that a stalling request will not have an exposure start until the stall has
completed.
This should correspond to the stall duration when only that stream is active, with all
processing (typically in {@code android.*.mode}) set to {@code FAST} or {@code OFF}.
Setting any of the processing modes to {@code HIGH_QUALITY} effectively results in an
indeterminate stall duration for all streams in a request (the regular stall calculation
rules are ignored).
The following formats may always have a stall duration:
- {@link ImageFormat#JPEG JPEG}
- {@link ImageFormat#RAW_SENSOR RAW16}
The following formats will never have a stall duration:
- {@link ImageFormat#YUV_420_888 YUV_420_888}
- {@link #isOutputSupportedFor(Class) Implementation-Defined}
All other formats may or may not have an allowed stall duration on a per-capability basis;
refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
android.request.availableCapabilities} for more details.
See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}
for more information about calculating the max frame rate (absent stalls).
checkArgumentFormatSupported(format, /*output*/true);
return getInternalFormatDuration(imageFormatToInternal(format),
size, DURATION_STALL);
|
public long | getOutputStallDuration(java.lang.Class klass, android.util.Size size)Get the stall duration for the class/size combination (in nanoseconds).
This assumes a the {@code klass} is set up to use an implementation-defined format.
For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.
{@code klass} should be one of the ones with a non-empty array returned by
{@link #getOutputSizes(Class)}.
{@code size} should be one of the ones returned by
{@link #getOutputSizes(Class)}.
See {@link #getOutputStallDuration(int, Size)} for a definition of a
stall duration.
if (!isOutputSupportedFor(klass)) {
throw new IllegalArgumentException("klass was not supported");
}
return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
size, DURATION_STALL);
|
private int | getPublicFormatCount(boolean output)Count the number of publicly-visible output formats
HashMap<Integer, Integer> formatsMap = getFormatsMap(output);
int size = formatsMap.size();
if (formatsMap.containsKey(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
size -= 1;
}
if (formatsMap.containsKey(HAL_PIXEL_FORMAT_RAW_OPAQUE)) {
size -= 1;
}
return size;
|
private android.util.Size[] | getPublicFormatSizes(int format, boolean output)
try {
checkArgumentFormatSupported(format, output);
} catch (IllegalArgumentException e) {
return null;
}
format = imageFormatToInternal(format);
return getInternalFormatSizes(format, output);
|
private int[] | getPublicFormats(boolean output)Get the list of publically visible output formats; does not include IMPL_DEFINED
int[] formats = new int[getPublicFormatCount(output)];
int i = 0;
for (int format : getFormatsMap(output).keySet()) {
if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
format != HAL_PIXEL_FORMAT_RAW_OPAQUE) {
formats[i++] = format;
}
}
if (formats.length != i) {
throw new AssertionError("Too few formats " + i + ", expected " + formats.length);
}
return imageFormatToPublic(formats);
|
public int | hashCode(){@inheritDoc}
// XX: do we care about order?
return HashCodeHelpers.hashCode(
mConfigurations, mMinFrameDurations,
mStallDurations, mHighSpeedVideoConfigurations);
|
static int | imageFormatToInternal(int format)Convert a public format compatible with {@code ImageFormat} to an internal format
from {@code graphics.h}.
In particular these formats are converted:
- ImageFormat.JPEG => HAL_PIXEL_FORMAT_BLOB
Passing in an implementation-defined format here will fail (it's not a public format);
as will passing in an internal format which has a different public format equivalent.
See {@link #checkArgumentFormat} for more details about a legal public format.
All other formats are returned as-is, no invalid check is performed.
This function is the dual of {@link #imageFormatToPublic}.
switch (format) {
case ImageFormat.JPEG:
return HAL_PIXEL_FORMAT_BLOB;
case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
throw new IllegalArgumentException(
"IMPLEMENTATION_DEFINED is not allowed via public API");
default:
return format;
}
|
public static int[] | imageFormatToInternal(int[] formats)Convert image formats from public to internal formats (in-place).
if (formats == null) {
return null;
}
for (int i = 0; i < formats.length; ++i) {
formats[i] = imageFormatToInternal(formats[i]);
}
return formats;
|
static int | imageFormatToPublic(int format)Convert a public-visible {@code ImageFormat} into an internal format
compatible with {@code graphics.h}.
In particular these formats are converted:
- HAL_PIXEL_FORMAT_BLOB => ImageFormat.JPEG
Passing in an implementation-defined format which has no public equivalent will fail;
as will passing in a public format which has a different internal format equivalent.
See {@link #checkArgumentFormat} for more details about a legal public format.
All other formats are returned as-is, no further invalid check is performed.
This function is the dual of {@link #imageFormatToInternal}.
switch (format) {
case HAL_PIXEL_FORMAT_BLOB:
return ImageFormat.JPEG;
case ImageFormat.JPEG:
throw new IllegalArgumentException(
"ImageFormat.JPEG is an unknown internal format");
case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
throw new IllegalArgumentException(
"IMPLEMENTATION_DEFINED must not leak to public API");
default:
return format;
}
|
static int[] | imageFormatToPublic(int[] formats)Convert image formats from internal to public formats (in-place).
if (formats == null) {
return null;
}
for (int i = 0; i < formats.length; ++i) {
formats[i] = imageFormatToPublic(formats[i]);
}
return formats;
|
public boolean | isOutputSupportedFor(int format)Determine whether or not output surfaces with a particular user-defined format can be passed
{@link CameraDevice#createCaptureSession createCaptureSession}.
This method determines that the output {@code format} is supported by the camera device;
each output {@code surface} target may or may not itself support that {@code format}.
Refer to the class which provides the surface for additional documentation.
Formats for which this returns {@code true} are guaranteed to exist in the result
returned by {@link #getOutputSizes}.
checkArgumentFormat(format);
format = imageFormatToInternal(format);
return getFormatsMap(/*output*/true).containsKey(format);
|
public static boolean | isOutputSupportedFor(java.lang.Class klass)Determine whether or not output streams can be configured with a particular class
as a consumer.
The following list is generally usable for outputs:
- {@link android.media.ImageReader} -
Recommended for image processing or streaming to external resources (such as a file or
network)
- {@link android.media.MediaRecorder} -
Recommended for recording video (simple to use)
- {@link android.media.MediaCodec} -
Recommended for recording video (more complicated to use, with more flexibility)
- {@link android.renderscript.Allocation} -
Recommended for image processing with {@link android.renderscript RenderScript}
- {@link android.view.SurfaceHolder} -
Recommended for low-power camera preview with {@link android.view.SurfaceView}
- {@link android.graphics.SurfaceTexture} -
Recommended for OpenGL-accelerated preview processing or compositing with
{@link android.view.TextureView}
Generally speaking this means that creating a {@link Surface} from that class may
provide a producer endpoint that is suitable to be used with
{@link CameraDevice#createCaptureSession}.
Since not all of the above classes support output of all format and size combinations,
the particular combination should be queried with {@link #isOutputSupportedFor(Surface)}.
checkNotNull(klass, "klass must not be null");
if (klass == android.media.ImageReader.class) {
return true;
} else if (klass == android.media.MediaRecorder.class) {
return true;
} else if (klass == android.media.MediaCodec.class) {
return true;
} else if (klass == android.renderscript.Allocation.class) {
return true;
} else if (klass == android.view.SurfaceHolder.class) {
return true;
} else if (klass == android.graphics.SurfaceTexture.class) {
return true;
}
return false;
|
public boolean | isOutputSupportedFor(android.view.Surface surface)Determine whether or not the {@code surface} in its current state is suitable to be included
in a {@link CameraDevice#createCaptureSession capture session} as an output.
Not all surfaces are usable with the {@link CameraDevice}, and not all configurations
of that {@code surface} are compatible. Some classes that provide the {@code surface} are
compatible with the {@link CameraDevice} in general
(see {@link #isOutputSupportedFor(Class)}, but it is the caller's responsibility to put the
{@code surface} into a state that will be compatible with the {@link CameraDevice}.
Reasons for a {@code surface} being specifically incompatible might be:
- Using a format that's not listed by {@link #getOutputFormats}
- Using a format/size combination that's not listed by {@link #getOutputSizes}
- The {@code surface} itself is not in a state where it can service a new producer.
Surfaces from flexible sources will return true even if the exact size of the Surface does
not match a camera-supported size, as long as the format (or class) is supported and the
camera device supports a size that is equal to or less than 1080p in that format. If such as
Surface is used to create a capture session, it will have its size rounded to the nearest
supported size, below or equal to 1080p. Flexible sources include SurfaceView, SurfaceTexture,
and ImageReader.
This is not an exhaustive list; see the particular class's documentation for further
possible reasons of incompatibility.
checkNotNull(surface, "surface must not be null");
Size surfaceSize;
int surfaceFormat = -1;
try {
surfaceSize = LegacyCameraDevice.getSurfaceSize(surface);
surfaceFormat = LegacyCameraDevice.detectSurfaceType(surface);
} catch(BufferQueueAbandonedException e) {
throw new IllegalArgumentException("Abandoned surface", e);
}
// See if consumer is flexible.
boolean isFlexible = LegacyCameraDevice.isFlexibleConsumer(surface);
// Override RGB formats to IMPLEMENTATION_DEFINED, b/9487482
if ((surfaceFormat >= LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888 &&
surfaceFormat <= LegacyMetadataMapper.HAL_PIXEL_FORMAT_BGRA_8888)) {
surfaceFormat = LegacyMetadataMapper.HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
}
for (StreamConfiguration config : mConfigurations) {
if (config.getFormat() == surfaceFormat && config.isOutput()) {
// Mathing format, either need exact size match, or a flexible consumer
// and a size no bigger than MAX_DIMEN_FOR_ROUNDING
if (config.getSize().equals(surfaceSize)) {
return true;
} else if (isFlexible &&
(config.getSize().getWidth() <= LegacyCameraDevice.MAX_DIMEN_FOR_ROUNDING)) {
return true;
}
}
}
return false;
|