1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media; 18 19 import android.annotation.Nullable; 20 import android.annotation.TestApi; 21 import android.compat.annotation.UnsupportedAppUsage; 22 import android.graphics.Rect; 23 import android.hardware.HardwareBuffer; 24 25 import java.nio.ByteBuffer; 26 27 /** 28 * <p>A single complete image buffer to use with a media source such as a 29 * {@link MediaCodec} or a 30 * {@link android.hardware.camera2.CameraDevice CameraDevice}.</p> 31 * 32 * <p>This class allows for efficient direct application access to the pixel 33 * data of the Image through one or more 34 * {@link java.nio.ByteBuffer ByteBuffers}. Each buffer is encapsulated in a 35 * {@link Plane} that describes the layout of the pixel data in that plane. Due 36 * to this direct access, and unlike the {@link android.graphics.Bitmap Bitmap} class, 37 * Images are not directly usable as UI resources.</p> 38 * 39 * <p>Since Images are often directly produced or consumed by hardware 40 * components, they are a limited resource shared across the system, and should 41 * be closed as soon as they are no longer needed.</p> 42 * 43 * <p>For example, when using the {@link ImageReader} class to read out Images 44 * from various media sources, not closing old Image objects will prevent the 45 * availability of new Images once 46 * {@link ImageReader#getMaxImages the maximum outstanding image count} is 47 * reached. When this happens, the function acquiring new Images will typically 48 * throw an {@link IllegalStateException}.</p> 49 * 50 * @see ImageReader 51 */ 52 public abstract class Image implements AutoCloseable { 53 /** 54 * @hide 55 */ 56 protected boolean mIsImageValid = false; 57 58 /** 59 * @hide 60 */ 61 @UnsupportedAppUsage 62 @TestApi Image()63 protected Image() { 64 } 65 66 /** 67 * Throw IllegalStateException if the image is invalid (already closed). 68 * 69 * @hide 70 */ throwISEIfImageIsInvalid()71 protected void throwISEIfImageIsInvalid() { 72 if (!mIsImageValid) { 73 throw new IllegalStateException("Image is already closed"); 74 } 75 } 76 /** 77 * Get the format for this image. This format determines the number of 78 * ByteBuffers needed to represent the image, and the general layout of the 79 * pixel data in each ByteBuffer. 80 * 81 * <p> 82 * The format is one of the values from 83 * {@link android.graphics.ImageFormat ImageFormat}. The mapping between the 84 * formats and the planes is as follows: 85 * </p> 86 * 87 * <table> 88 * <tr> 89 * <th>Format</th> 90 * <th>Plane count</th> 91 * <th>Layout details</th> 92 * </tr> 93 * <tr> 94 * <td>{@link android.graphics.ImageFormat#JPEG JPEG}</td> 95 * <td>1</td> 96 * <td>Compressed data, so row and pixel strides are 0. To uncompress, use 97 * {@link android.graphics.BitmapFactory#decodeByteArray BitmapFactory#decodeByteArray}. 98 * </td> 99 * </tr> 100 * <tr> 101 * <td>{@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888}</td> 102 * <td>3</td> 103 * <td>A luminance plane followed by the Cb and Cr chroma planes. 104 * The chroma planes have half the width and height of the luminance 105 * plane (4:2:0 subsampling). Each pixel sample in each plane has 8 bits. 106 * Each plane has its own row stride and pixel stride.</td> 107 * </tr> 108 * <tr> 109 * <td>{@link android.graphics.ImageFormat#YUV_422_888 YUV_422_888}</td> 110 * <td>3</td> 111 * <td>A luminance plane followed by the Cb and Cr chroma planes. 112 * The chroma planes have half the width and the full height of the luminance 113 * plane (4:2:2 subsampling). Each pixel sample in each plane has 8 bits. 114 * Each plane has its own row stride and pixel stride.</td> 115 * </tr> 116 * <tr> 117 * <td>{@link android.graphics.ImageFormat#YUV_444_888 YUV_444_888}</td> 118 * <td>3</td> 119 * <td>A luminance plane followed by the Cb and Cr chroma planes. 120 * The chroma planes have the same width and height as that of the luminance 121 * plane (4:4:4 subsampling). Each pixel sample in each plane has 8 bits. 122 * Each plane has its own row stride and pixel stride.</td> 123 * </tr> 124 * <tr> 125 * <td>{@link android.graphics.ImageFormat#FLEX_RGB_888 FLEX_RGB_888}</td> 126 * <td>3</td> 127 * <td>A R (red) plane followed by the G (green) and B (blue) planes. 128 * All planes have the same widths and heights. 129 * Each pixel sample in each plane has 8 bits. 130 * Each plane has its own row stride and pixel stride.</td> 131 * </tr> 132 * <tr> 133 * <td>{@link android.graphics.ImageFormat#FLEX_RGBA_8888 FLEX_RGBA_8888}</td> 134 * <td>4</td> 135 * <td>A R (red) plane followed by the G (green), B (blue), and 136 * A (alpha) planes. All planes have the same widths and heights. 137 * Each pixel sample in each plane has 8 bits. 138 * Each plane has its own row stride and pixel stride.</td> 139 * </tr> 140 * <tr> 141 * <td>{@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}</td> 142 * <td>1</td> 143 * <td>A single plane of raw sensor image data, with 16 bits per color 144 * sample. The details of the layout need to be queried from the source of 145 * the raw sensor data, such as 146 * {@link android.hardware.camera2.CameraDevice CameraDevice}. 147 * </td> 148 * </tr> 149 * <tr> 150 * <td>{@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE}</td> 151 * <td>1</td> 152 * <td>A single plane of raw sensor image data of private layout. 153 * The details of the layout is implementation specific. Row stride and 154 * pixel stride are undefined for this format. Calling {@link Plane#getRowStride()} 155 * or {@link Plane#getPixelStride()} on RAW_PRIVATE image will cause 156 * UnSupportedOperationException being thrown. 157 * </td> 158 * </tr> 159 * <tr> 160 * <td>{@link android.graphics.ImageFormat#HEIC HEIC}</td> 161 * <td>1</td> 162 * <td>Compressed data, so row and pixel strides are 0. To uncompress, use 163 * {@link android.graphics.BitmapFactory#decodeByteArray BitmapFactory#decodeByteArray}. 164 * </td> 165 * </tr> 166 * </table> 167 * 168 * @see android.graphics.ImageFormat 169 */ getFormat()170 public abstract int getFormat(); 171 172 /** 173 * The width of the image in pixels. For formats where some color channels 174 * are subsampled, this is the width of the largest-resolution plane. 175 */ getWidth()176 public abstract int getWidth(); 177 178 /** 179 * The height of the image in pixels. For formats where some color channels 180 * are subsampled, this is the height of the largest-resolution plane. 181 */ getHeight()182 public abstract int getHeight(); 183 184 /** 185 * Get the timestamp associated with this frame. 186 * <p> 187 * The timestamp is measured in nanoseconds, and is normally monotonically 188 * increasing. The timestamps for the images from different sources may have 189 * different timebases therefore may not be comparable. The specific meaning and 190 * timebase of the timestamp depend on the source providing images. See 191 * {@link android.hardware.Camera Camera}, 192 * {@link android.hardware.camera2.CameraDevice CameraDevice}, 193 * {@link MediaPlayer} and {@link MediaCodec} for more details. 194 * </p> 195 */ getTimestamp()196 public abstract long getTimestamp(); 197 198 /** 199 * Get the transformation associated with this frame. 200 * @return The window transformation that needs to be applied for this frame. 201 * @hide 202 */ 203 @SuppressWarnings("HiddenAbstractMethod") getTransform()204 public abstract int getTransform(); 205 206 /** 207 * Get the scaling mode associated with this frame. 208 * @return The scaling mode that needs to be applied for this frame. 209 * @hide 210 */ 211 @SuppressWarnings("HiddenAbstractMethod") getScalingMode()212 public abstract int getScalingMode(); 213 214 /** 215 * Get the fence file descriptor associated with this frame. 216 * @return The fence file descriptor for this frame. 217 * @hide 218 */ getFenceFd()219 public int getFenceFd() { 220 return -1; 221 } 222 223 /** 224 * Get the number of planes. 225 * @return The number of expected planes. 226 * @hide 227 */ getPlaneCount()228 public int getPlaneCount() { 229 return -1; 230 } 231 /** 232 * Get the {@link android.hardware.HardwareBuffer HardwareBuffer} handle of the input image 233 * intended for GPU and/or hardware access. 234 * <p> 235 * The returned {@link android.hardware.HardwareBuffer HardwareBuffer} shall not be used 236 * after {@link Image#close Image.close()} has been called. 237 * </p> 238 * @return the HardwareBuffer associated with this Image or null if this Image doesn't support 239 * this feature. (Unsupported use cases include Image instances obtained through 240 * {@link android.media.MediaCodec MediaCodec}, and on versions prior to Android P, 241 * {@link android.media.ImageWriter ImageWriter}). 242 */ 243 @Nullable getHardwareBuffer()244 public HardwareBuffer getHardwareBuffer() { 245 throwISEIfImageIsInvalid(); 246 return null; 247 } 248 249 /** 250 * Set the timestamp associated with this frame. 251 * <p> 252 * The timestamp is measured in nanoseconds, and is normally monotonically 253 * increasing. The timestamps for the images from different sources may have 254 * different timebases therefore may not be comparable. The specific meaning and 255 * timebase of the timestamp depend on the source providing images. See 256 * {@link android.hardware.Camera Camera}, 257 * {@link android.hardware.camera2.CameraDevice CameraDevice}, 258 * {@link MediaPlayer} and {@link MediaCodec} for more details. 259 * </p> 260 * <p> 261 * For images dequeued from {@link ImageWriter} via 262 * {@link ImageWriter#dequeueInputImage()}, it's up to the application to 263 * set the timestamps correctly before sending them back to the 264 * {@link ImageWriter}, or the timestamp will be generated automatically when 265 * {@link ImageWriter#queueInputImage queueInputImage()} is called. 266 * </p> 267 * 268 * @param timestamp The timestamp to be set for this image. 269 */ setTimestamp(long timestamp)270 public void setTimestamp(long timestamp) { 271 throwISEIfImageIsInvalid(); 272 return; 273 } 274 275 private Rect mCropRect; 276 277 /** 278 * Get the crop rectangle associated with this frame. 279 * <p> 280 * The crop rectangle specifies the region of valid pixels in the image, 281 * using coordinates in the largest-resolution plane. 282 */ getCropRect()283 public Rect getCropRect() { 284 throwISEIfImageIsInvalid(); 285 286 if (mCropRect == null) { 287 return new Rect(0, 0, getWidth(), getHeight()); 288 } else { 289 return new Rect(mCropRect); // return a copy 290 } 291 } 292 293 /** 294 * Set the crop rectangle associated with this frame. 295 * <p> 296 * The crop rectangle specifies the region of valid pixels in the image, 297 * using coordinates in the largest-resolution plane. 298 */ setCropRect(Rect cropRect)299 public void setCropRect(Rect cropRect) { 300 throwISEIfImageIsInvalid(); 301 302 if (cropRect != null) { 303 cropRect = new Rect(cropRect); // make a copy 304 if (!cropRect.intersect(0, 0, getWidth(), getHeight())) { 305 cropRect.setEmpty(); 306 } 307 } 308 mCropRect = cropRect; 309 } 310 311 /** 312 * Get the array of pixel planes for this Image. The number of planes is 313 * determined by the format of the Image. The application will get an empty 314 * array if the image format is {@link android.graphics.ImageFormat#PRIVATE 315 * PRIVATE}, because the image pixel data is not directly accessible. The 316 * application can check the image format by calling 317 * {@link Image#getFormat()}. 318 */ getPlanes()319 public abstract Plane[] getPlanes(); 320 321 /** 322 * Free up this frame for reuse. 323 * <p> 324 * After calling this method, calling any methods on this {@code Image} will 325 * result in an {@link IllegalStateException}, and attempting to read from 326 * or write to {@link ByteBuffer ByteBuffers} returned by an earlier 327 * {@link Plane#getBuffer} call will have undefined behavior. If the image 328 * was obtained from {@link ImageWriter} via 329 * {@link ImageWriter#dequeueInputImage()}, after calling this method, any 330 * image data filled by the application will be lost and the image will be 331 * returned to {@link ImageWriter} for reuse. Images given to 332 * {@link ImageWriter#queueInputImage queueInputImage()} are automatically 333 * closed. 334 * </p> 335 */ 336 @Override close()337 public abstract void close(); 338 339 /** 340 * <p> 341 * Check if the image can be attached to a new owner (e.g. {@link ImageWriter}). 342 * </p> 343 * <p> 344 * This is a package private method that is only used internally. 345 * </p> 346 * 347 * @return true if the image is attachable to a new owner, false if the image is still attached 348 * to its current owner, or the image is a stand-alone image and is not attachable to 349 * a new owner. 350 * @hide 351 */ isAttachable()352 public boolean isAttachable() { 353 throwISEIfImageIsInvalid(); 354 355 return false; 356 } 357 358 /** 359 * <p> 360 * Get the owner of the {@link Image}. 361 * </p> 362 * <p> 363 * The owner of an {@link Image} could be {@link ImageReader}, {@link ImageWriter}, 364 * {@link MediaCodec} etc. This method returns the owner that produces this image, or null 365 * if the image is stand-alone image or the owner is unknown. 366 * </p> 367 * <p> 368 * This is a package private method that is only used internally. 369 * </p> 370 * 371 * @return The owner of the Image. 372 */ getOwner()373 Object getOwner() { 374 throwISEIfImageIsInvalid(); 375 376 return null; 377 } 378 379 /** 380 * Get native context (buffer pointer) associated with this image. 381 * <p> 382 * This is a package private method that is only used internally. It can be 383 * used to get the native buffer pointer and passed to native, which may be 384 * passed to {@link ImageWriter#attachAndQueueInputImage} to avoid a reverse 385 * JNI call. 386 * </p> 387 * 388 * @return native context associated with this Image. 389 */ getNativeContext()390 long getNativeContext() { 391 throwISEIfImageIsInvalid(); 392 393 return 0; 394 } 395 396 /** 397 * <p>A single color plane of image data.</p> 398 * 399 * <p>The number and meaning of the planes in an Image are determined by the 400 * format of the Image.</p> 401 * 402 * <p>Once the Image has been closed, any access to the the plane's 403 * ByteBuffer will fail.</p> 404 * 405 * @see #getFormat 406 */ 407 public static abstract class Plane { 408 /** 409 * @hide 410 */ 411 @UnsupportedAppUsage 412 @TestApi Plane()413 protected Plane() { 414 } 415 416 /** 417 * <p>The row stride for this color plane, in bytes.</p> 418 * 419 * <p>This is the distance between the start of two consecutive rows of 420 * pixels in the image. Note that row stride is undefined for some formats 421 * such as 422 * {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE}, 423 * and calling getRowStride on images of these formats will 424 * cause an UnsupportedOperationException being thrown. 425 * For formats where row stride is well defined, the row stride 426 * is always greater than 0.</p> 427 */ getRowStride()428 public abstract int getRowStride(); 429 /** 430 * <p>The distance between adjacent pixel samples, in bytes.</p> 431 * 432 * <p>This is the distance between two consecutive pixel values in a row 433 * of pixels. It may be larger than the size of a single pixel to 434 * account for interleaved image data or padded formats. 435 * Note that pixel stride is undefined for some formats such as 436 * {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE}, 437 * and calling getPixelStride on images of these formats will 438 * cause an UnsupportedOperationException being thrown. 439 * For formats where pixel stride is well defined, the pixel stride 440 * is always greater than 0.</p> 441 */ getPixelStride()442 public abstract int getPixelStride(); 443 /** 444 * <p>Get a direct {@link java.nio.ByteBuffer ByteBuffer} 445 * containing the frame data.</p> 446 * 447 * <p>In particular, the buffer returned will always have 448 * {@link java.nio.ByteBuffer#isDirect isDirect} return {@code true}, so 449 * the underlying data could be mapped as a pointer in JNI without doing 450 * any copies with {@code GetDirectBufferAddress}.</p> 451 * 452 * <p>For raw formats, each plane is only guaranteed to contain data 453 * up to the last pixel in the last row. In other words, the stride 454 * after the last row may not be mapped into the buffer. This is a 455 * necessary requirement for any interleaved format.</p> 456 * 457 * @return the byte buffer containing the image data for this plane. 458 */ getBuffer()459 public abstract ByteBuffer getBuffer(); 460 } 461 462 } 463