DngCreatorpublic final class DngCreator extends Object implements AutoCloseableThe {@link DngCreator} class provides functions to write raw pixel data as a DNG file.
This class is designed to be used with the {@link android.graphics.ImageFormat#RAW_SENSOR}
buffers available from {@link android.hardware.camera2.CameraDevice}, or with Bayer-type raw
pixel data that is otherwise generated by an application. The DNG metadata tags will be
generated from a {@link android.hardware.camera2.CaptureResult} object or set directly.
The DNG file format is a cross-platform file format that is used to store pixel data from
camera sensors with minimal pre-processing applied. DNG files allow for pixel data to be
defined in a user-defined colorspace, and have associated metadata that allow for this
pixel data to be converted to the standard CIE XYZ colorspace during post-processing.
For more information on the DNG file format and associated metadata, please refer to the
Adobe DNG 1.4.0.0 specification.
|
Fields Summary |
---|
private static final String | TAG | public static final int | MAX_THUMBNAIL_DIMENSIONMax width or height dimension for thumbnails. | private static final String | GPS_LAT_REF_NORTH | private static final String | GPS_LAT_REF_SOUTH | private static final String | GPS_LONG_REF_EAST | private static final String | GPS_LONG_REF_WEST | private static final String | GPS_DATE_FORMAT_STR | private static final String | TIFF_DATETIME_FORMAT | private static final DateFormat | sExifGPSDateStamp | private static final DateFormat | sDateTimeStampFormat | private final Calendar | mGPSTimeStampCalendar | private static final int | DEFAULT_PIXEL_STRIDE | private static final int | BYTES_PER_RGB_PIX | private long | mNativeContextThis field is used by native code, do not access or modify. |
Constructors Summary |
---|
public DngCreator(CameraCharacteristics characteristics, CaptureResult metadata)Create a new DNG object.
It is not necessary to call any set methods to write a well-formatted DNG file.
DNG metadata tags will be generated from the corresponding parameters in the
{@link android.hardware.camera2.CaptureResult} object.
For best quality DNG files, it is strongly recommended that lens shading map output is
enabled if supported. See {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE}.
if (characteristics == null || metadata == null) {
throw new IllegalArgumentException("Null argument to DngCreator constructor");
}
// Find current time
long currentTime = System.currentTimeMillis();
// Find boot time
long bootTimeMillis = currentTime - SystemClock.elapsedRealtime();
// Find capture time (nanos since boot)
Long timestamp = metadata.get(CaptureResult.SENSOR_TIMESTAMP);
long captureTime = currentTime;
if (timestamp != null) {
captureTime = timestamp / 1000000 + bootTimeMillis;
}
// Format for metadata
String formattedCaptureTime = sDateTimeStampFormat.format(captureTime);
nativeInit(characteristics.getNativeCopy(), metadata.getNativeCopy(),
formattedCaptureTime);
|
Methods Summary |
---|
public void | close()
nativeDestroy();
| private static void | colorToRgb(int color, int outOffset, byte[] rgbOut)Convert a single {@link Color} pixel to RGB.
rgbOut[outOffset] = (byte) Color.red(color);
rgbOut[outOffset + 1] = (byte) Color.green(color);
rgbOut[outOffset + 2] = (byte) Color.blue(color);
// Discards Alpha
| private static java.nio.ByteBuffer | convertToRGB(android.media.Image yuvImage)Generate a direct RGB {@link ByteBuffer} from a YUV420_888 {@link Image}.
// TODO: Optimize this with renderscript intrinsic.
int width = yuvImage.getWidth();
int height = yuvImage.getHeight();
ByteBuffer buf = ByteBuffer.allocateDirect(BYTES_PER_RGB_PIX * width * height);
Image.Plane yPlane = yuvImage.getPlanes()[0];
Image.Plane uPlane = yuvImage.getPlanes()[1];
Image.Plane vPlane = yuvImage.getPlanes()[2];
ByteBuffer yBuf = yPlane.getBuffer();
ByteBuffer uBuf = uPlane.getBuffer();
ByteBuffer vBuf = vPlane.getBuffer();
yBuf.rewind();
uBuf.rewind();
vBuf.rewind();
int yRowStride = yPlane.getRowStride();
int vRowStride = vPlane.getRowStride();
int uRowStride = uPlane.getRowStride();
int yPixStride = yPlane.getPixelStride();
int vPixStride = vPlane.getPixelStride();
int uPixStride = uPlane.getPixelStride();
byte[] yuvPixel = { 0, 0, 0 };
byte[] yFullRow = new byte[yPixStride * (width - 1) + 1];
byte[] uFullRow = new byte[uPixStride * (width / 2 - 1) + 1];
byte[] vFullRow = new byte[vPixStride * (width / 2 - 1) + 1];
byte[] finalRow = new byte[BYTES_PER_RGB_PIX * width];
for (int i = 0; i < height; i++) {
int halfH = i / 2;
yBuf.position(yRowStride * i);
yBuf.get(yFullRow);
uBuf.position(uRowStride * halfH);
uBuf.get(uFullRow);
vBuf.position(vRowStride * halfH);
vBuf.get(vFullRow);
for (int j = 0; j < width; j++) {
int halfW = j / 2;
yuvPixel[0] = yFullRow[yPixStride * j];
yuvPixel[1] = uFullRow[uPixStride * halfW];
yuvPixel[2] = vFullRow[vPixStride * halfW];
yuvToRgb(yuvPixel, j * BYTES_PER_RGB_PIX, /*out*/finalRow);
}
buf.put(finalRow);
}
yBuf.rewind();
uBuf.rewind();
vBuf.rewind();
buf.rewind();
return buf;
| private static java.nio.ByteBuffer | convertToRGB(android.graphics.Bitmap argbBitmap)Generate a direct RGB {@link ByteBuffer} from a {@link Bitmap}.
// TODO: Optimize this.
int width = argbBitmap.getWidth();
int height = argbBitmap.getHeight();
ByteBuffer buf = ByteBuffer.allocateDirect(BYTES_PER_RGB_PIX * width * height);
int[] pixelRow = new int[width];
byte[] finalRow = new byte[BYTES_PER_RGB_PIX * width];
for (int i = 0; i < height; i++) {
argbBitmap.getPixels(pixelRow, /*offset*/0, /*stride*/width, /*x*/0, /*y*/i,
/*width*/width, /*height*/1);
for (int j = 0; j < width; j++) {
colorToRgb(pixelRow[j], j * BYTES_PER_RGB_PIX, /*out*/finalRow);
}
buf.put(finalRow);
}
buf.rewind();
return buf;
| protected void | finalize() // max pixel dimension for TIFF/EP
try {
close();
} finally {
super.finalize();
}
| private static native void | nativeClassInit()
| private native synchronized void | nativeDestroy()
| private native synchronized void | nativeInit(android.hardware.camera2.impl.CameraMetadataNative nativeCharacteristics, android.hardware.camera2.impl.CameraMetadataNative nativeResult, java.lang.String captureTime)
| private native synchronized void | nativeSetDescription(java.lang.String description)
| private native synchronized void | nativeSetGpsTags(int[] latTag, java.lang.String latRef, int[] longTag, java.lang.String longRef, java.lang.String dateTag, int[] timeTag)
| private native synchronized void | nativeSetOrientation(int orientation)
| private native synchronized void | nativeSetThumbnail(java.nio.ByteBuffer buffer, int width, int height)
| private native synchronized void | nativeWriteImage(java.io.OutputStream out, int width, int height, java.nio.ByteBuffer rawBuffer, int rowStride, int pixStride, long offset, boolean isDirect)
| private native synchronized void | nativeWriteInputStream(java.io.OutputStream out, java.io.InputStream rawStream, int width, int height, long offset)
| public android.hardware.camera2.DngCreator | setDescription(java.lang.String description)Set the user description string to write.
This is equivalent to setting the TIFF "ImageDescription" tag {@code (0x010E)}.
if (description == null) {
throw new IllegalArgumentException("Null description passed to setDescription.");
}
nativeSetDescription(description);
return this;
| public android.hardware.camera2.DngCreator | setLocation(android.location.Location location)Set image location metadata.
The given location object must contain at least a valid time, latitude, and longitude
(equivalent to the values returned by {@link android.location.Location#getTime()},
{@link android.location.Location#getLatitude()}, and
{@link android.location.Location#getLongitude()} methods).
if (location == null) {
throw new IllegalArgumentException("Null location passed to setLocation");
}
double latitude = location.getLatitude();
double longitude = location.getLongitude();
long time = location.getTime();
int[] latTag = toExifLatLong(latitude);
int[] longTag = toExifLatLong(longitude);
String latRef = latitude >= 0 ? GPS_LAT_REF_NORTH : GPS_LAT_REF_SOUTH;
String longRef = longitude >= 0 ? GPS_LONG_REF_EAST : GPS_LONG_REF_WEST;
String dateTag = sExifGPSDateStamp.format(time);
mGPSTimeStampCalendar.setTimeInMillis(time);
int[] timeTag = new int[] { mGPSTimeStampCalendar.get(Calendar.HOUR_OF_DAY), 1,
mGPSTimeStampCalendar.get(Calendar.MINUTE), 1,
mGPSTimeStampCalendar.get(Calendar.SECOND), 1 };
nativeSetGpsTags(latTag, latRef, longTag, longRef, dateTag, timeTag);
return this;
| public android.hardware.camera2.DngCreator | setOrientation(int orientation)Set the orientation value to write.
This will be written as the TIFF "Orientation" tag {@code (0x0112)}.
Calling this will override any prior settings for this tag.
if (orientation < ExifInterface.ORIENTATION_UNDEFINED ||
orientation > ExifInterface.ORIENTATION_ROTATE_270) {
throw new IllegalArgumentException("Orientation " + orientation +
" is not a valid EXIF orientation value");
}
nativeSetOrientation(orientation);
return this;
| public android.hardware.camera2.DngCreator | setThumbnail(android.graphics.Bitmap pixels)Set the thumbnail image.
Pixel data will be converted to a Baseline TIFF RGB image, with 8 bits per color channel.
The alpha channel will be discarded. Thumbnail images with a dimension larger than
{@link #MAX_THUMBNAIL_DIMENSION} will be rejected.
if (pixels == null) {
throw new IllegalArgumentException("Null argument to setThumbnail");
}
int width = pixels.getWidth();
int height = pixels.getHeight();
if (width > MAX_THUMBNAIL_DIMENSION || height > MAX_THUMBNAIL_DIMENSION) {
throw new IllegalArgumentException("Thumbnail dimensions width,height (" + width +
"," + height + ") too large, dimensions must be smaller than " +
MAX_THUMBNAIL_DIMENSION);
}
ByteBuffer rgbBuffer = convertToRGB(pixels);
nativeSetThumbnail(rgbBuffer, width, height);
return this;
| public android.hardware.camera2.DngCreator | setThumbnail(android.media.Image pixels)Set the thumbnail image.
Pixel data is interpreted as a {@link android.graphics.ImageFormat#YUV_420_888} image.
Thumbnail images with a dimension larger than {@link #MAX_THUMBNAIL_DIMENSION} will be
rejected.
if (pixels == null) {
throw new IllegalArgumentException("Null argument to setThumbnail");
}
int format = pixels.getFormat();
if (format != ImageFormat.YUV_420_888) {
throw new IllegalArgumentException("Unsupported Image format " + format);
}
int width = pixels.getWidth();
int height = pixels.getHeight();
if (width > MAX_THUMBNAIL_DIMENSION || height > MAX_THUMBNAIL_DIMENSION) {
throw new IllegalArgumentException("Thumbnail dimensions width,height (" + width +
"," + height + ") too large, dimensions must be smaller than " +
MAX_THUMBNAIL_DIMENSION);
}
ByteBuffer rgbBuffer = convertToRGB(pixels);
nativeSetThumbnail(rgbBuffer, width, height);
return this;
| private static int[] | toExifLatLong(double value)Convert coordinate to EXIF GPS tag format.
// convert to the format dd/1 mm/1 ssss/100
value = Math.abs(value);
int degrees = (int) value;
value = (value - degrees) * 60;
int minutes = (int) value;
value = (value - minutes) * 6000;
int seconds = (int) value;
return new int[] { degrees, 1, minutes, 1, seconds, 100 };
| private void | writeByteBuffer(int width, int height, java.nio.ByteBuffer pixels, java.io.OutputStream dngOutput, int pixelStride, int rowStride, long offset)Offset, rowStride, and pixelStride are given in bytes. Height and width are given in pixels. // byts per pixel
if (width <= 0 || height <= 0) {
throw new IllegalArgumentException("Image with invalid width, height: (" + width + "," +
height + ") passed to write");
}
long capacity = pixels.capacity();
long totalSize = rowStride * height + offset;
if (capacity < totalSize) {
throw new IllegalArgumentException("Image size " + capacity +
" is too small (must be larger than " + totalSize + ")");
}
int minRowStride = pixelStride * width;
if (minRowStride > rowStride) {
throw new IllegalArgumentException("Invalid image pixel stride, row byte width " +
minRowStride + " is too large, expecting " + rowStride);
}
pixels.clear(); // Reset mark and limit
nativeWriteImage(dngOutput, width, height, pixels, rowStride, pixelStride, offset,
pixels.isDirect());
pixels.clear();
| public void | writeByteBuffer(java.io.OutputStream dngOutput, android.util.Size size, java.nio.ByteBuffer pixels, long offset)Write the {@link android.graphics.ImageFormat#RAW_SENSOR} pixel data to a DNG file with
the currently configured metadata.
Raw pixel data must have 16 bits per pixel, and the input must contain at least
{@code offset + 2 * width * height)} bytes. The width and height of
the input are taken from the width and height set in the {@link DngCreator} metadata tags,
and will typically be equal to the width and height of
{@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE}.
The pixel layout in the input is determined from the reported color filter arrangement (CFA)
set in {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT}. If insufficient
metadata is available to write a well-formatted DNG file, an
{@link java.lang.IllegalStateException} will be thrown.
Any mark or limit set on this {@link ByteBuffer} is ignored, and will be cleared by this
method.
if (dngOutput == null) {
throw new IllegalArgumentException("Null dngOutput passed to writeByteBuffer");
} else if (size == null) {
throw new IllegalArgumentException("Null size passed to writeByteBuffer");
} else if (pixels == null) {
throw new IllegalArgumentException("Null pixels passed to writeByteBuffer");
} else if (offset < 0) {
throw new IllegalArgumentException("Negative offset passed to writeByteBuffer");
}
int width = size.getWidth();
int height = size.getHeight();
writeByteBuffer(width, height, pixels, dngOutput, DEFAULT_PIXEL_STRIDE,
width * DEFAULT_PIXEL_STRIDE, offset);
| public void | writeImage(java.io.OutputStream dngOutput, android.media.Image pixels)Write the pixel data to a DNG file with the currently configured metadata.
For this method to succeed, the {@link android.media.Image} input must contain
{@link android.graphics.ImageFormat#RAW_SENSOR} pixel data, otherwise an
{@link java.lang.IllegalArgumentException} will be thrown.
if (dngOutput == null) {
throw new IllegalArgumentException("Null dngOutput to writeImage");
} else if (pixels == null) {
throw new IllegalArgumentException("Null pixels to writeImage");
}
int format = pixels.getFormat();
if (format != ImageFormat.RAW_SENSOR) {
throw new IllegalArgumentException("Unsupported image format " + format);
}
Image.Plane[] planes = pixels.getPlanes();
if (planes == null || planes.length <= 0) {
throw new IllegalArgumentException("Image with no planes passed to writeImage");
}
ByteBuffer buf = planes[0].getBuffer();
writeByteBuffer(pixels.getWidth(), pixels.getHeight(), buf, dngOutput,
planes[0].getPixelStride(), planes[0].getRowStride(), 0);
| public void | writeInputStream(java.io.OutputStream dngOutput, android.util.Size size, java.io.InputStream pixels, long offset)Write the {@link android.graphics.ImageFormat#RAW_SENSOR} pixel data to a DNG file with
the currently configured metadata.
Raw pixel data must have 16 bits per pixel, and the input must contain at least
{@code offset + 2 * width * height)} bytes. The width and height of
the input are taken from the width and height set in the {@link DngCreator} metadata tags,
and will typically be equal to the width and height of
{@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE}.
The pixel layout in the input is determined from the reported color filter arrangement (CFA)
set in {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT}. If insufficient
metadata is available to write a well-formatted DNG file, an
{@link java.lang.IllegalStateException} will be thrown.
if (dngOutput == null) {
throw new IllegalArgumentException("Null dngOutput passed to writeInputStream");
} else if (size == null) {
throw new IllegalArgumentException("Null size passed to writeInputStream");
} else if (pixels == null) {
throw new IllegalArgumentException("Null pixels passed to writeInputStream");
} else if (offset < 0) {
throw new IllegalArgumentException("Negative offset passed to writeInputStream");
}
int width = size.getWidth();
int height = size.getHeight();
if (width <= 0 || height <= 0) {
throw new IllegalArgumentException("Size with invalid width, height: (" + width + "," +
height + ") passed to writeInputStream");
}
nativeWriteInputStream(dngOutput, pixels, width, height, offset);
| private static void | yuvToRgb(byte[] yuvData, int outOffset, byte[] rgbOut)Convert a single YUV pixel to RGB.
final int COLOR_MAX = 255;
float y = yuvData[0] & 0xFF; // Y channel
float cb = yuvData[1] & 0xFF; // U channel
float cr = yuvData[2] & 0xFF; // V channel
// convert YUV -> RGB (from JFIF's "Conversion to and from RGB" section)
float r = y + 1.402f * (cr - 128);
float g = y - 0.34414f * (cb - 128) - 0.71414f * (cr - 128);
float b = y + 1.772f * (cb - 128);
// clamp to [0,255]
rgbOut[outOffset] = (byte) Math.max(0, Math.min(COLOR_MAX, r));
rgbOut[outOffset + 1] = (byte) Math.max(0, Math.min(COLOR_MAX, g));
rgbOut[outOffset + 2] = (byte) Math.max(0, Math.min(COLOR_MAX, b));
|
|