xref: /aosp_15_r20/cts/tests/camera/utils/src/android/hardware/camera2/cts/CameraTestUtils.java (revision b7c941bb3fa97aba169d73cee0bed2de8ac964bf)
1 /*
2  * Copyright 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.hardware.camera2.cts;
18 
19 import static org.mockito.Mockito.*;
20 
21 import android.content.Context;
22 import android.content.pm.PackageManager;
23 import android.graphics.Bitmap;
24 import android.graphics.BitmapFactory;
25 import android.graphics.ColorSpace;
26 import android.graphics.Gainmap;
27 import android.graphics.ImageFormat;
28 import android.graphics.PointF;
29 import android.graphics.Rect;
30 import android.graphics.SurfaceTexture;
31 import android.hardware.camera2.CameraAccessException;
32 import android.hardware.camera2.CameraCaptureSession;
33 import android.hardware.camera2.CameraCharacteristics;
34 import android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession;
35 import android.hardware.camera2.CameraDevice;
36 import android.hardware.camera2.CameraDevice.CameraDeviceSetup;
37 import android.hardware.camera2.CameraManager;
38 import android.hardware.camera2.CameraMetadata;
39 import android.hardware.camera2.CaptureFailure;
40 import android.hardware.camera2.CaptureRequest;
41 import android.hardware.camera2.CaptureResult;
42 import android.hardware.camera2.MultiResolutionImageReader;
43 import android.hardware.camera2.TotalCaptureResult;
44 import android.hardware.camera2.cts.helpers.CameraErrorCollector;
45 import android.hardware.camera2.cts.helpers.StaticMetadata;
46 import android.hardware.camera2.params.DynamicRangeProfiles;
47 import android.hardware.camera2.params.InputConfiguration;
48 import android.hardware.camera2.params.MandatoryStreamCombination.MandatoryStreamInformation;
49 import android.hardware.camera2.params.MeteringRectangle;
50 import android.hardware.camera2.params.MultiResolutionStreamConfigurationMap;
51 import android.hardware.camera2.params.MultiResolutionStreamInfo;
52 import android.hardware.camera2.params.OutputConfiguration;
53 import android.hardware.camera2.params.SessionConfiguration;
54 import android.hardware.camera2.params.StreamConfigurationMap;
55 import android.hardware.cts.helpers.CameraUtils;
56 import android.location.Location;
57 import android.location.LocationManager;
58 import android.media.CamcorderProfile;
59 import android.media.ExifInterface;
60 import android.media.Image;
61 import android.media.Image.Plane;
62 import android.media.ImageReader;
63 import android.media.ImageWriter;
64 import android.os.Build;
65 import android.os.ConditionVariable;
66 import android.os.Handler;
67 import android.os.Looper;
68 import android.util.Log;
69 import android.util.Pair;
70 import android.util.Range;
71 import android.util.Size;
72 import android.view.Surface;
73 import android.view.WindowManager;
74 import android.view.WindowMetrics;
75 
76 import androidx.annotation.NonNull;
77 
78 import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException;
79 import com.android.ex.camera2.blocking.BlockingSessionCallback;
80 import com.android.ex.camera2.blocking.BlockingStateCallback;
81 import com.android.ex.camera2.exceptions.TimeoutRuntimeException;
82 import com.android.internal.camera.flags.Flags;
83 
84 import junit.framework.Assert;
85 
86 import org.mockito.ArgumentCaptor;
87 import org.mockito.InOrder;
88 import org.mockito.Mockito;
89 
90 import java.io.FileOutputStream;
91 import java.io.IOException;
92 import java.lang.reflect.Array;
93 import java.nio.ByteBuffer;
94 import java.text.ParseException;
95 import java.text.SimpleDateFormat;
96 import java.util.ArrayList;
97 import java.util.Arrays;
98 import java.util.Collection;
99 import java.util.Collections;
100 import java.util.Comparator;
101 import java.util.Date;
102 import java.util.HashMap;
103 import java.util.HashSet;
104 import java.util.List;
105 import java.util.Map;
106 import java.util.Optional;
107 import java.util.Random;
108 import java.util.Set;
109 import java.util.concurrent.Executor;
110 import java.util.concurrent.LinkedBlockingQueue;
111 import java.util.concurrent.Semaphore;
112 import java.util.concurrent.TimeUnit;
113 import java.util.concurrent.atomic.AtomicLong;
114 
115 /**
116  * A package private utility class for wrapping up the camera2 cts test common utility functions
117  */
118 public class CameraTestUtils extends Assert {
119     private static final String TAG = "CameraTestUtils";
120     private static final boolean VERBOSE = Log.isLoggable(TAG, Log.VERBOSE);
121     private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
122     public static final Size SIZE_BOUND_720P = new Size(1280, 720);
123     public static final Size SIZE_BOUND_1080P = new Size(1920, 1088);
124     public static final Size SIZE_BOUND_2K = new Size(2048, 1088);
125     public static final Size SIZE_BOUND_QHD = new Size(2560, 1440);
126     public static final Size SIZE_BOUND_2160P = new Size(3840, 2160);
127     // Only test the preview size that is no larger than 1080p.
128     public static final Size PREVIEW_SIZE_BOUND = SIZE_BOUND_1080P;
129     // Default timeouts for reaching various states
130     public static final int CAMERA_OPEN_TIMEOUT_MS = 3000;
131     public static final int CAMERA_CLOSE_TIMEOUT_MS = 3000;
132     public static final int CAMERA_IDLE_TIMEOUT_MS = 3000;
133     public static final int CAMERA_ACTIVE_TIMEOUT_MS = 1000;
134     public static final int CAMERA_BUSY_TIMEOUT_MS = 1000;
135     public static final int CAMERA_UNCONFIGURED_TIMEOUT_MS = 1000;
136     public static final int CAMERA_CONFIGURE_TIMEOUT_MS = 3000;
137     public static final int CAPTURE_RESULT_TIMEOUT_MS = 3000;
138     public static final int CAPTURE_IMAGE_TIMEOUT_MS = 3000;
139 
140     public static final int SESSION_CONFIGURE_TIMEOUT_MS = 3000;
141     public static final int SESSION_CLOSE_TIMEOUT_MS = 3000;
142     public static final int SESSION_READY_TIMEOUT_MS = 5000;
143     public static final int SESSION_ACTIVE_TIMEOUT_MS = 1000;
144 
145     public static final int MAX_READER_IMAGES = 5;
146 
147     public static final int INDEX_ALGORITHM_AE = 0;
148     public static final int INDEX_ALGORITHM_AWB = 1;
149     public static final int INDEX_ALGORITHM_AF = 2;
150     public static final int NUM_ALGORITHMS = 3; // AE, AWB and AF
151 
152     // Compensate for the loss of "sensitivity" and "sensitivityBoost"
153     public static final int MAX_ISO_MISMATCH = 3;
154 
155     public static final String OFFLINE_CAMERA_ID = "offline_camera_id";
156     public static final String REPORT_LOG_NAME = "CtsCameraTestCases";
157 
158     private static final int EXIF_DATETIME_LENGTH = 19;
159     private static final int EXIF_DATETIME_ERROR_MARGIN_SEC = 60;
160     private static final float EXIF_FOCAL_LENGTH_ERROR_MARGIN = 0.001f;
161     private static final float EXIF_EXPOSURE_TIME_ERROR_MARGIN_RATIO = 0.05f;
162     private static final float EXIF_EXPOSURE_TIME_MIN_ERROR_MARGIN_SEC = 0.002f;
163     private static final float EXIF_APERTURE_ERROR_MARGIN = 0.001f;
164 
165     private static final float ZOOM_RATIO_THRESHOLD = 0.01f;
166 
167     // Set such that 1920x1080 and 1920x1088 be treated as the same aspect ratio.
168     private static final float ASPECT_RATIO_MATCH_THRESHOLD = 0.014f;
169 
170     private static final int AVAILABILITY_TIMEOUT_MS = 10;
171 
172     private static final Location sTestLocation0 = new Location(LocationManager.GPS_PROVIDER);
173     private static final Location sTestLocation1 = new Location(LocationManager.GPS_PROVIDER);
174     private static final Location sTestLocation2 = new Location(LocationManager.NETWORK_PROVIDER);
175 
176     static {
177         sTestLocation0.setTime(1199145600000L);
178         sTestLocation0.setLatitude(37.736071);
179         sTestLocation0.setLongitude(-122.441983);
180         sTestLocation0.setAltitude(21.0);
181 
182         sTestLocation1.setTime(1199145601000L);
183         sTestLocation1.setLatitude(0.736071);
184         sTestLocation1.setLongitude(0.441983);
185         sTestLocation1.setAltitude(1.0);
186 
187         sTestLocation2.setTime(1199145602000L);
188         sTestLocation2.setLatitude(-89.736071);
189         sTestLocation2.setLongitude(-179.441983);
190         sTestLocation2.setAltitude(100000.0);
191     }
192 
193     // Exif test data vectors.
194     public static final ExifTestData[] EXIF_TEST_DATA = {
195             new ExifTestData(
196                     /*gpsLocation*/ sTestLocation0,
197                     /* orientation */90,
198                     /* jpgQuality */(byte) 80,
199                     /* thumbQuality */(byte) 75),
200             new ExifTestData(
201                     /*gpsLocation*/ sTestLocation1,
202                     /* orientation */180,
203                     /* jpgQuality */(byte) 90,
204                     /* thumbQuality */(byte) 85),
205             new ExifTestData(
206                     /*gpsLocation*/ sTestLocation2,
207                     /* orientation */270,
208                     /* jpgQuality */(byte) 100,
209                     /* thumbQuality */(byte) 80)
210     };
211 
212     /**
213      * Create an {@link android.media.ImageReader} object and get the surface.
214      *
215      * @param size The size of this ImageReader to be created.
216      * @param format The format of this ImageReader to be created
217      * @param maxNumImages The max number of images that can be acquired simultaneously.
218      * @param listener The listener used by this ImageReader to notify callbacks.
219      * @param handler The handler to use for any listener callbacks.
220      */
makeImageReader(Size size, int format, int maxNumImages, ImageReader.OnImageAvailableListener listener, Handler handler)221     public static ImageReader makeImageReader(Size size, int format, int maxNumImages,
222             ImageReader.OnImageAvailableListener listener, Handler handler) {
223         ImageReader reader;
224         reader = ImageReader.newInstance(size.getWidth(), size.getHeight(), format,
225                 maxNumImages);
226         reader.setOnImageAvailableListener(listener, handler);
227         if (VERBOSE) Log.v(TAG, "Created ImageReader size " + size);
228         return reader;
229     }
230 
231     /**
232      * Create an ImageWriter and hook up the ImageListener.
233      *
234      * @param inputSurface The input surface of the ImageWriter.
235      * @param maxImages The max number of Images that can be dequeued simultaneously.
236      * @param listener The listener used by this ImageWriter to notify callbacks
237      * @param handler The handler to post listener callbacks.
238      * @return ImageWriter object created.
239      */
makeImageWriter( Surface inputSurface, int maxImages, ImageWriter.OnImageReleasedListener listener, Handler handler)240     public static ImageWriter makeImageWriter(
241             Surface inputSurface, int maxImages,
242             ImageWriter.OnImageReleasedListener listener, Handler handler) {
243         ImageWriter writer = ImageWriter.newInstance(inputSurface, maxImages);
244         writer.setOnImageReleasedListener(listener, handler);
245         return writer;
246     }
247 
248     /**
249      * Utility class to store the targets for mandatory stream combination test.
250      */
251     public static class StreamCombinationTargets {
252         public List<SurfaceTexture> mPrivTargets = new ArrayList<>();
253         public List<ImageReader> mJpegTargets = new ArrayList<>();
254         public List<ImageReader> mYuvTargets = new ArrayList<>();
255         public List<ImageReader> mY8Targets = new ArrayList<>();
256         public List<ImageReader> mRawTargets = new ArrayList<>();
257         public List<ImageReader> mHeicTargets = new ArrayList<>();
258         public List<ImageReader> mDepth16Targets = new ArrayList<>();
259         public List<ImageReader> mP010Targets = new ArrayList<>();
260 
261 
262         public List<MultiResolutionImageReader> mPrivMultiResTargets = new ArrayList<>();
263         public List<MultiResolutionImageReader> mJpegMultiResTargets = new ArrayList<>();
264         public List<MultiResolutionImageReader> mYuvMultiResTargets = new ArrayList<>();
265         public List<MultiResolutionImageReader> mRawMultiResTargets = new ArrayList<>();
266 
close()267         public void close() {
268             for (SurfaceTexture target : mPrivTargets) {
269                 target.release();
270             }
271             for (ImageReader target : mJpegTargets) {
272                 target.close();
273             }
274             for (ImageReader target : mYuvTargets) {
275                 target.close();
276             }
277             for (ImageReader target : mY8Targets) {
278                 target.close();
279             }
280             for (ImageReader target : mRawTargets) {
281                 target.close();
282             }
283             for (ImageReader target : mHeicTargets) {
284                 target.close();
285             }
286             for (ImageReader target : mDepth16Targets) {
287                 target.close();
288             }
289             for (ImageReader target : mP010Targets) {
290                 target.close();
291             }
292 
293             for (MultiResolutionImageReader target : mPrivMultiResTargets) {
294                 target.close();
295             }
296             for (MultiResolutionImageReader target : mJpegMultiResTargets) {
297                 target.close();
298             }
299             for (MultiResolutionImageReader target : mYuvMultiResTargets) {
300                 target.close();
301             }
302             for (MultiResolutionImageReader target : mRawMultiResTargets) {
303                 target.close();
304             }
305         }
306     }
307 
configureTarget(StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, int format, Size targetSize, int numBuffers, String overridePhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, boolean createMultiResiStreamConfig, ImageDropperListener listener, Handler handler, long dynamicRangeProfile, long streamUseCase)308     private static void configureTarget(StreamCombinationTargets targets,
309             List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces,
310             int format, Size targetSize, int numBuffers, String overridePhysicalCameraId,
311             MultiResolutionStreamConfigurationMap multiResStreamConfig,
312             boolean createMultiResiStreamConfig, ImageDropperListener listener, Handler handler,
313             long dynamicRangeProfile, long streamUseCase) {
314         if (createMultiResiStreamConfig) {
315             Collection<MultiResolutionStreamInfo> multiResolutionStreams =
316                     multiResStreamConfig.getOutputInfo(format);
317             MultiResolutionImageReader multiResReader = new MultiResolutionImageReader(
318                     multiResolutionStreams, format, numBuffers);
319             multiResReader.setOnImageAvailableListener(listener, new HandlerExecutor(handler));
320             Collection<OutputConfiguration> configs =
321                     OutputConfiguration.createInstancesForMultiResolutionOutput(multiResReader);
322             outputConfigs.addAll(configs);
323             outputSurfaces.add(multiResReader.getSurface());
324             switch (format) {
325                 case ImageFormat.PRIVATE:
326                     targets.mPrivMultiResTargets.add(multiResReader);
327                     break;
328                 case ImageFormat.JPEG:
329                     targets.mJpegMultiResTargets.add(multiResReader);
330                     break;
331                 case ImageFormat.YUV_420_888:
332                     targets.mYuvMultiResTargets.add(multiResReader);
333                     break;
334                 case ImageFormat.RAW_SENSOR:
335                     targets.mRawMultiResTargets.add(multiResReader);
336                     break;
337                 default:
338                     fail("Unknown/Unsupported output format " + format);
339             }
340         } else {
341             if (format == ImageFormat.PRIVATE) {
342                 SurfaceTexture target = new SurfaceTexture(/*random int*/1);
343                 target.setDefaultBufferSize(targetSize.getWidth(), targetSize.getHeight());
344                 OutputConfiguration config = new OutputConfiguration(new Surface(target));
345                 if (overridePhysicalCameraId != null) {
346                     config.setPhysicalCameraId(overridePhysicalCameraId);
347                 }
348                 config.setDynamicRangeProfile(dynamicRangeProfile);
349                 config.setStreamUseCase(streamUseCase);
350                 outputConfigs.add(config);
351                 outputSurfaces.add(config.getSurface());
352                 targets.mPrivTargets.add(target);
353             } else {
354                 ImageReader target = ImageReader.newInstance(targetSize.getWidth(),
355                         targetSize.getHeight(), format, numBuffers);
356                 target.setOnImageAvailableListener(listener, handler);
357                 OutputConfiguration config = new OutputConfiguration(target.getSurface());
358                 if (overridePhysicalCameraId != null) {
359                     config.setPhysicalCameraId(overridePhysicalCameraId);
360                 }
361                 config.setDynamicRangeProfile(dynamicRangeProfile);
362                 config.setStreamUseCase(streamUseCase);
363                 outputConfigs.add(config);
364                 outputSurfaces.add(config.getSurface());
365 
366                 switch (format) {
367                     case ImageFormat.JPEG:
368                       targets.mJpegTargets.add(target);
369                       break;
370                     case ImageFormat.YUV_420_888:
371                       targets.mYuvTargets.add(target);
372                       break;
373                     case ImageFormat.Y8:
374                       targets.mY8Targets.add(target);
375                       break;
376                     case ImageFormat.RAW_SENSOR:
377                       targets.mRawTargets.add(target);
378                       break;
379                     case ImageFormat.HEIC:
380                       targets.mHeicTargets.add(target);
381                       break;
382                     case ImageFormat.DEPTH16:
383                       targets.mDepth16Targets.add(target);
384                       break;
385                     case ImageFormat.YCBCR_P010:
386                       targets.mP010Targets.add(target);
387                       break;
388                     default:
389                       fail("Unknown/Unsupported output format " + format);
390                 }
391             }
392         }
393     }
394 
setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo, StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, int numBuffers, boolean substituteY8, boolean substituteHeic, String overridenPhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler)395     public static void setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo,
396             StreamCombinationTargets targets,
397             List<OutputConfiguration> outputConfigs,
398             List<Surface> outputSurfaces, int numBuffers,
399             boolean substituteY8, boolean substituteHeic, String overridenPhysicalCameraId,
400             MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler) {
401             List<Surface> uhSurfaces = new ArrayList<Surface>();
402         setupConfigurationTargets(streamsInfo, targets, outputConfigs, outputSurfaces, uhSurfaces,
403             numBuffers, substituteY8, substituteHeic, overridenPhysicalCameraId,
404             multiResStreamConfig, handler);
405     }
406 
setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo, StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, List<Surface> uhSurfaces, int numBuffers, boolean substituteY8, boolean substituteHeic, String overridePhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler)407     public static void setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo,
408             StreamCombinationTargets targets,
409             List<OutputConfiguration> outputConfigs,
410             List<Surface> outputSurfaces, List<Surface> uhSurfaces, int numBuffers,
411             boolean substituteY8, boolean substituteHeic, String overridePhysicalCameraId,
412             MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler) {
413         setupConfigurationTargets(streamsInfo, targets, outputConfigs, outputSurfaces, uhSurfaces,
414                 numBuffers, substituteY8, substituteHeic, overridePhysicalCameraId,
415                 multiResStreamConfig, handler, /*dynamicRangeProfiles*/ null);
416     }
417 
setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo, StreamCombinationTargets targets, List<OutputConfiguration> outputConfigs, List<Surface> outputSurfaces, List<Surface> uhSurfaces, int numBuffers, boolean substituteY8, boolean substituteHeic, String overridePhysicalCameraId, MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler, List<Long> dynamicRangeProfiles)418     public static void setupConfigurationTargets(List<MandatoryStreamInformation> streamsInfo,
419             StreamCombinationTargets targets,
420             List<OutputConfiguration> outputConfigs,
421             List<Surface> outputSurfaces, List<Surface> uhSurfaces, int numBuffers,
422             boolean substituteY8, boolean substituteHeic, String overridePhysicalCameraId,
423             MultiResolutionStreamConfigurationMap multiResStreamConfig, Handler handler,
424             List<Long> dynamicRangeProfiles) {
425 
426         Random rnd = new Random();
427         // 10-bit output capable streams will use a fixed dynamic range profile in case
428         // dynamicRangeProfiles.size() == 1 or random in case dynamicRangeProfiles.size() > 1
429         boolean use10BitRandomProfile = (dynamicRangeProfiles != null) &&
430                 (dynamicRangeProfiles.size() > 1);
431         if (use10BitRandomProfile) {
432             Long seed = rnd.nextLong();
433             Log.i(TAG, "Random seed used for selecting 10-bit output: " + seed);
434             rnd.setSeed(seed);
435         }
436         ImageDropperListener imageDropperListener = new ImageDropperListener();
437         List<Surface> chosenSurfaces;
438         for (MandatoryStreamInformation streamInfo : streamsInfo) {
439             if (streamInfo.isInput()) {
440                 continue;
441             }
442             chosenSurfaces = outputSurfaces;
443             if (streamInfo.isUltraHighResolution()) {
444                 chosenSurfaces = uhSurfaces;
445             }
446             int format = streamInfo.getFormat();
447             if (substituteY8 && (format == ImageFormat.YUV_420_888)) {
448                 format = ImageFormat.Y8;
449             } else if (substituteHeic && (format == ImageFormat.JPEG)) {
450                 format = ImageFormat.HEIC;
451             }
452 
453             long dynamicRangeProfile = DynamicRangeProfiles.STANDARD;
454             if (streamInfo.is10BitCapable() && use10BitRandomProfile) {
455                 boolean override10bit = rnd.nextBoolean();
456                 if (!override10bit) {
457                     dynamicRangeProfile = dynamicRangeProfiles.get(rnd.nextInt(
458                             dynamicRangeProfiles.size()));
459                     format = streamInfo.get10BitFormat();
460                 }
461             } else if (streamInfo.is10BitCapable() && (dynamicRangeProfiles != null)) {
462                 dynamicRangeProfile = dynamicRangeProfiles.get(0);
463                 format = streamInfo.get10BitFormat();
464             }
465             Size[] availableSizes = new Size[streamInfo.getAvailableSizes().size()];
466             availableSizes = streamInfo.getAvailableSizes().toArray(availableSizes);
467             Size targetSize = CameraTestUtils.getMaxSize(availableSizes);
468             boolean createMultiResReader =
469                     (multiResStreamConfig != null &&
470                      !multiResStreamConfig.getOutputInfo(format).isEmpty() &&
471                      streamInfo.isMaximumSize());
472             switch (format) {
473                 case ImageFormat.PRIVATE:
474                 case ImageFormat.JPEG:
475                 case ImageFormat.YUV_420_888:
476                 case ImageFormat.YCBCR_P010:
477                 case ImageFormat.YCBCR_P210:
478                 case ImageFormat.Y8:
479                 case ImageFormat.HEIC:
480                 case ImageFormat.DEPTH16:
481                 {
482                     configureTarget(targets, outputConfigs, chosenSurfaces, format,
483                             targetSize, numBuffers, overridePhysicalCameraId, multiResStreamConfig,
484                             createMultiResReader, imageDropperListener, handler,
485                             dynamicRangeProfile, streamInfo.getStreamUseCase());
486                     break;
487                 }
488                 case ImageFormat.RAW_SENSOR: {
489                     // targetSize could be null in the logical camera case where only
490                     // physical camera supports RAW stream.
491                     if (targetSize != null) {
492                         configureTarget(targets, outputConfigs, chosenSurfaces, format,
493                                 targetSize, numBuffers, overridePhysicalCameraId,
494                                 multiResStreamConfig, createMultiResReader, imageDropperListener,
495                                 handler, dynamicRangeProfile, streamInfo.getStreamUseCase());
496                     }
497                     break;
498                 }
499                 default:
500                     fail("Unknown output format " + format);
501             }
502         }
503     }
504 
505     /**
506      * Close pending images and clean up an {@link android.media.ImageReader} object.
507      * @param reader an {@link android.media.ImageReader} to close.
508      */
closeImageReader(ImageReader reader)509     public static void closeImageReader(ImageReader reader) {
510         if (reader != null) {
511             reader.close();
512         }
513     }
514 
515     /**
516      * Close the pending images then close current active {@link ImageReader} objects.
517      */
closeImageReaders(ImageReader[] readers)518     public static void closeImageReaders(ImageReader[] readers) {
519         if ((readers != null) && (readers.length > 0)) {
520             for (ImageReader reader : readers) {
521                 CameraTestUtils.closeImageReader(reader);
522             }
523         }
524     }
525 
526     /**
527      * Close pending images and clean up an {@link android.media.ImageWriter} object.
528      * @param writer an {@link android.media.ImageWriter} to close.
529      */
closeImageWriter(ImageWriter writer)530     public static void closeImageWriter(ImageWriter writer) {
531         if (writer != null) {
532             writer.close();
533         }
534     }
535 
536     /**
537      * Placeholder listener that release the image immediately once it is available.
538      *
539      * <p>
540      * It can be used for the case where we don't care the image data at all.
541      * </p>
542      */
543     public static class ImageDropperListener implements ImageReader.OnImageAvailableListener {
544         @Override
onImageAvailable(ImageReader reader)545         public synchronized void onImageAvailable(ImageReader reader) {
546             Image image = null;
547             try {
548                 image = reader.acquireNextImage();
549             } finally {
550                 if (image != null) {
551                     image.close();
552                     mImagesDropped++;
553                 }
554             }
555         }
556 
getImageCount()557         public synchronized int getImageCount() {
558             return mImagesDropped;
559         }
560 
resetImageCount()561         public synchronized void resetImageCount() {
562             mImagesDropped = 0;
563         }
564 
565         private int mImagesDropped = 0;
566     }
567 
568     /**
569      * Image listener that release the image immediately after validating the image
570      */
571     public static class ImageVerifierListener implements ImageReader.OnImageAvailableListener {
572         private Size mSize;
573         private int mFormat;
574         // Whether the parent ImageReader is valid or not. If the parent ImageReader
575         // is destroyed, the acquired Image may become invalid.
576         private boolean mReaderIsValid;
577 
ImageVerifierListener(Size sz, int format)578         public ImageVerifierListener(Size sz, int format) {
579             mSize = sz;
580             mFormat = format;
581             mReaderIsValid = true;
582         }
583 
onReaderDestroyed()584         public synchronized void onReaderDestroyed() {
585             mReaderIsValid = false;
586         }
587 
588         @Override
onImageAvailable(ImageReader reader)589         public synchronized void onImageAvailable(ImageReader reader) {
590             Image image = null;
591             try {
592                 image = reader.acquireNextImage();
593             } finally {
594                 if (image != null) {
595                     // Should only do some quick validity checks in callback, as the ImageReader
596                     // could be closed asynchronously, which will close all images acquired from
597                     // this ImageReader.
598                     checkImage(image, mSize.getWidth(), mSize.getHeight(), mFormat);
599                     // checkAndroidImageFormat calls into underlying Image object, which could
600                     // become invalid if the ImageReader is destroyed.
601                     if (mReaderIsValid) {
602                         checkAndroidImageFormat(image);
603                     }
604                     image.close();
605                 }
606             }
607         }
608     }
609 
610     public static class SimpleImageReaderListener
611             implements ImageReader.OnImageAvailableListener {
612         private final LinkedBlockingQueue<Image> mQueue =
613                 new LinkedBlockingQueue<Image>();
614         // Indicate whether this listener will drop images or not,
615         // when the queued images reaches the reader maxImages
616         private final boolean mAsyncMode;
617         // maxImages held by the queue in async mode.
618         private final int mMaxImages;
619 
620         /**
621          * Create a synchronous SimpleImageReaderListener that queues the images
622          * automatically when they are available, no image will be dropped. If
623          * the caller doesn't call getImage(), the producer will eventually run
624          * into buffer starvation.
625          */
SimpleImageReaderListener()626         public SimpleImageReaderListener() {
627             mAsyncMode = false;
628             mMaxImages = 0;
629         }
630 
631         /**
632          * Create a synchronous/asynchronous SimpleImageReaderListener that
633          * queues the images automatically when they are available. For
634          * asynchronous listener, image will be dropped if the queued images
635          * reach to maxImages queued. If the caller doesn't call getImage(), the
636          * producer will not be blocked. For synchronous listener, no image will
637          * be dropped. If the caller doesn't call getImage(), the producer will
638          * eventually run into buffer starvation.
639          *
640          * @param asyncMode If the listener is operating at asynchronous mode.
641          * @param maxImages The max number of images held by this listener.
642          */
643         /**
644          *
645          * @param asyncMode
646          */
SimpleImageReaderListener(boolean asyncMode, int maxImages)647         public SimpleImageReaderListener(boolean asyncMode, int maxImages) {
648             mAsyncMode = asyncMode;
649             mMaxImages = maxImages;
650         }
651 
652         @Override
onImageAvailable(ImageReader reader)653         public void onImageAvailable(ImageReader reader) {
654             try {
655                 Image imge = reader.acquireNextImage();
656                 if (imge == null) {
657                     return;
658                 }
659                 mQueue.put(imge);
660                 if (mAsyncMode && mQueue.size() >= mMaxImages) {
661                     Image img = mQueue.poll();
662                     img.close();
663                 }
664             } catch (InterruptedException e) {
665                 throw new UnsupportedOperationException(
666                         "Can't handle InterruptedException in onImageAvailable");
667             }
668         }
669 
670         /**
671          * Get an image from the image reader.
672          *
673          * @param timeout Timeout value for the wait.
674          * @return The image from the image reader.
675          */
getImage(long timeout)676         public Image getImage(long timeout) throws InterruptedException {
677             Image image = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
678             assertNotNull("Wait for an image timed out in " + timeout + "ms", image);
679             return image;
680         }
681 
682         /**
683          * Drain the pending images held by this listener currently.
684          *
685          */
drain()686         public void drain() {
687             while (!mQueue.isEmpty()) {
688                 Image image = mQueue.poll();
689                 assertNotNull("Unable to get an image", image);
690                 image.close();
691             }
692         }
693     }
694 
695     public static class SimpleImageWriterListener implements ImageWriter.OnImageReleasedListener {
696         private final Semaphore mImageReleasedSema = new Semaphore(0);
697         private final ImageWriter mWriter;
698         @Override
onImageReleased(ImageWriter writer)699         public void onImageReleased(ImageWriter writer) {
700             if (writer != mWriter) {
701                 return;
702             }
703 
704             if (VERBOSE) {
705                 Log.v(TAG, "Input image is released");
706             }
707             mImageReleasedSema.release();
708         }
709 
SimpleImageWriterListener(ImageWriter writer)710         public SimpleImageWriterListener(ImageWriter writer) {
711             if (writer == null) {
712                 throw new IllegalArgumentException("writer cannot be null");
713             }
714             mWriter = writer;
715         }
716 
waitForImageReleased(long timeoutMs)717         public void waitForImageReleased(long timeoutMs) throws InterruptedException {
718             if (!mImageReleasedSema.tryAcquire(timeoutMs, TimeUnit.MILLISECONDS)) {
719                 fail("wait for image available timed out after " + timeoutMs + "ms");
720             }
721         }
722     }
723 
724     public static class ImageAndMultiResStreamInfo {
725         public final Image image;
726         public final MultiResolutionStreamInfo streamInfo;
727 
ImageAndMultiResStreamInfo(Image image, MultiResolutionStreamInfo streamInfo)728         public ImageAndMultiResStreamInfo(Image image, MultiResolutionStreamInfo streamInfo) {
729             this.image = image;
730             this.streamInfo = streamInfo;
731         }
732     }
733 
734     public static class SimpleMultiResolutionImageReaderListener
735             implements ImageReader.OnImageAvailableListener {
SimpleMultiResolutionImageReaderListener(MultiResolutionImageReader owner, int maxBuffers, boolean acquireLatest)736         public SimpleMultiResolutionImageReaderListener(MultiResolutionImageReader owner,
737                 int maxBuffers, boolean acquireLatest) {
738             mOwner = owner;
739             mMaxBuffers = maxBuffers;
740             mAcquireLatest = acquireLatest;
741         }
742 
743         @Override
onImageAvailable(ImageReader reader)744         public void onImageAvailable(ImageReader reader) {
745             if (VERBOSE) Log.v(TAG, "new image available from reader " + reader.toString());
746 
747             if (mAcquireLatest) {
748                 synchronized (mLock) {
749                     // If there is switch of image readers, acquire and releases all images
750                     // from the previous image reader
751                     if (mLastReader != reader) {
752                         if (mLastReader != null) {
753                             Image image = mLastReader.acquireLatestImage();
754                             if (image != null) {
755                                 image.close();
756                             }
757                         }
758                         mLastReader = reader;
759                     }
760                 }
761                 mImageAvailable.open();
762             } else {
763                 if (mQueue.size() < mMaxBuffers) {
764                     Image image = reader.acquireNextImage();
765                     MultiResolutionStreamInfo multiResStreamInfo =
766                             mOwner.getStreamInfoForImageReader(reader);
767                     mQueue.offer(new ImageAndMultiResStreamInfo(image, multiResStreamInfo));
768                 }
769             }
770         }
771 
getAnyImageAndInfoAvailable(long timeoutMs)772         public ImageAndMultiResStreamInfo getAnyImageAndInfoAvailable(long timeoutMs)
773                 throws Exception {
774             if (mAcquireLatest) {
775                 Image image = null;
776                 if (mImageAvailable.block(timeoutMs)) {
777                     synchronized (mLock) {
778                         if (mLastReader != null) {
779                             image = mLastReader.acquireLatestImage();
780                             if (VERBOSE) Log.v(TAG, "acquireLatestImage from "
781                                     + mLastReader.toString() + " produces " + image);
782                         } else {
783                             fail("invalid image reader");
784                         }
785                     }
786                     mImageAvailable.close();
787                 } else {
788                     fail("wait for image available time out after " + timeoutMs + "ms");
789                 }
790                 return image == null ? null : new ImageAndMultiResStreamInfo(image,
791                         mOwner.getStreamInfoForImageReader(mLastReader));
792             } else {
793                 ImageAndMultiResStreamInfo imageAndInfo = mQueue.poll(timeoutMs,
794                         java.util.concurrent.TimeUnit.MILLISECONDS);
795                 if (imageAndInfo == null) {
796                     fail("wait for image available timed out after " + timeoutMs + "ms");
797                 }
798                 return imageAndInfo;
799             }
800         }
801 
reset()802         public void reset() {
803             while (!mQueue.isEmpty()) {
804                 ImageAndMultiResStreamInfo imageAndInfo = mQueue.poll();
805                 assertNotNull("Acquired image is not valid", imageAndInfo.image);
806                 imageAndInfo.image.close();
807             }
808             mImageAvailable.close();
809             mLastReader = null;
810         }
811 
812         private LinkedBlockingQueue<ImageAndMultiResStreamInfo> mQueue =
813                 new LinkedBlockingQueue<ImageAndMultiResStreamInfo>();
814         private final MultiResolutionImageReader mOwner;
815         private final int mMaxBuffers;
816         private final boolean mAcquireLatest;
817         private ConditionVariable mImageAvailable = new ConditionVariable();
818         private ImageReader mLastReader = null;
819         private final Object mLock = new Object();
820     }
821 
822     public static class SimpleCaptureCallback extends CameraCaptureSession.CaptureCallback {
823         private final LinkedBlockingQueue<TotalCaptureResult> mQueue =
824                 new LinkedBlockingQueue<TotalCaptureResult>();
825         private final LinkedBlockingQueue<CaptureFailure> mFailureQueue =
826                 new LinkedBlockingQueue<>();
827         // (Surface, framenumber) pair for lost buffers
828         private final LinkedBlockingQueue<Pair<Surface, Long>> mBufferLostQueue =
829                 new LinkedBlockingQueue<>();
830         private final LinkedBlockingQueue<Integer> mAbortQueue =
831                 new LinkedBlockingQueue<>();
832         // Pair<CaptureRequest, Long> is a pair of capture request and start of exposure timestamp.
833         private final LinkedBlockingQueue<Pair<CaptureRequest, Long>> mCaptureStartQueue =
834                 new LinkedBlockingQueue<>();
835         // Pair<CaptureRequest, Long> is a pair of capture request and readout timestamp.
836         private final LinkedBlockingQueue<Pair<CaptureRequest, Long>> mReadoutStartQueue =
837                 new LinkedBlockingQueue<>();
838         // Pair<Int, Long> is a pair of sequence id and frame number
839         private final LinkedBlockingQueue<Pair<Integer, Long>> mCaptureSequenceCompletedQueue =
840                 new LinkedBlockingQueue<>();
841 
842         private AtomicLong mNumFramesArrived = new AtomicLong(0);
843 
844         @Override
onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber)845         public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request,
846                 long timestamp, long frameNumber) {
847             try {
848                 mCaptureStartQueue.put(new Pair(request, timestamp));
849             } catch (InterruptedException e) {
850                 throw new UnsupportedOperationException(
851                         "Can't handle InterruptedException in onCaptureStarted");
852             }
853         }
854 
855         @Override
onReadoutStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber)856         public void onReadoutStarted(CameraCaptureSession session, CaptureRequest request,
857                 long timestamp, long frameNumber) {
858             try {
859                 mReadoutStartQueue.put(new Pair(request, timestamp));
860             } catch (InterruptedException e) {
861                 throw new UnsupportedOperationException(
862                         "Can't handle InterruptedException in onReadoutStarted");
863             }
864         }
865 
866         @Override
onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result)867         public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
868                 TotalCaptureResult result) {
869             try {
870                 mNumFramesArrived.incrementAndGet();
871                 mQueue.put(result);
872             } catch (InterruptedException e) {
873                 throw new UnsupportedOperationException(
874                         "Can't handle InterruptedException in onCaptureCompleted");
875             }
876         }
877 
878         @Override
onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure)879         public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request,
880                 CaptureFailure failure) {
881             try {
882                 mFailureQueue.put(failure);
883             } catch (InterruptedException e) {
884                 throw new UnsupportedOperationException(
885                         "Can't handle InterruptedException in onCaptureFailed");
886             }
887         }
888 
889         @Override
onCaptureSequenceAborted(CameraCaptureSession session, int sequenceId)890         public void onCaptureSequenceAborted(CameraCaptureSession session, int sequenceId) {
891             try {
892                 mAbortQueue.put(sequenceId);
893             } catch (InterruptedException e) {
894                 throw new UnsupportedOperationException(
895                         "Can't handle InterruptedException in onCaptureAborted");
896             }
897         }
898 
899         @Override
onCaptureSequenceCompleted(CameraCaptureSession session, int sequenceId, long frameNumber)900         public void onCaptureSequenceCompleted(CameraCaptureSession session, int sequenceId,
901                 long frameNumber) {
902             try {
903                 mCaptureSequenceCompletedQueue.put(new Pair(sequenceId, frameNumber));
904             } catch (InterruptedException e) {
905                 throw new UnsupportedOperationException(
906                         "Can't handle InterruptedException in onCaptureSequenceCompleted");
907             }
908         }
909 
910         @Override
onCaptureBufferLost(CameraCaptureSession session, CaptureRequest request, Surface target, long frameNumber)911         public void onCaptureBufferLost(CameraCaptureSession session,
912                 CaptureRequest request, Surface target, long frameNumber) {
913             try {
914                 mBufferLostQueue.put(new Pair<>(target, frameNumber));
915             } catch (InterruptedException e) {
916                 throw new UnsupportedOperationException(
917                         "Can't handle InterruptedException in onCaptureBufferLost");
918             }
919         }
920 
getTotalNumFrames()921         public long getTotalNumFrames() {
922             return mNumFramesArrived.get();
923         }
924 
getCaptureResult(long timeout)925         public CaptureResult getCaptureResult(long timeout) {
926             return getTotalCaptureResult(timeout);
927         }
928 
getCaptureResult(long timeout, long timestamp)929         public TotalCaptureResult getCaptureResult(long timeout, long timestamp) {
930             try {
931                 long currentTs = -1L;
932                 TotalCaptureResult result;
933                 while (true) {
934                     result = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
935                     if (result == null) {
936                         throw new RuntimeException(
937                                 "Wait for a capture result timed out in " + timeout + "ms");
938                     }
939                     currentTs = result.get(CaptureResult.SENSOR_TIMESTAMP);
940                     if (currentTs == timestamp) {
941                         return result;
942                     }
943                 }
944 
945             } catch (InterruptedException e) {
946                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
947             }
948         }
949 
getTotalCaptureResult(long timeout)950         public TotalCaptureResult getTotalCaptureResult(long timeout) {
951             try {
952                 TotalCaptureResult result = mQueue.poll(timeout, TimeUnit.MILLISECONDS);
953                 assertNotNull("Wait for a capture result timed out in " + timeout + "ms", result);
954                 return result;
955             } catch (InterruptedException e) {
956                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
957             }
958         }
959 
960         /**
961          * Get the {@link #CaptureResult capture result} for a given
962          * {@link #CaptureRequest capture request}.
963          *
964          * @param myRequest The {@link #CaptureRequest capture request} whose
965          *            corresponding {@link #CaptureResult capture result} was
966          *            being waited for
967          * @param numResultsWait Number of frames to wait for the capture result
968          *            before timeout.
969          * @throws TimeoutRuntimeException If more than numResultsWait results are
970          *            seen before the result matching myRequest arrives, or each
971          *            individual wait for result times out after
972          *            {@value #CAPTURE_RESULT_TIMEOUT_MS}ms.
973          */
getCaptureResultForRequest(CaptureRequest myRequest, int numResultsWait)974         public CaptureResult getCaptureResultForRequest(CaptureRequest myRequest,
975                 int numResultsWait) {
976             return getTotalCaptureResultForRequest(myRequest, numResultsWait);
977         }
978 
979         /**
980          * Get the {@link #TotalCaptureResult total capture result} for a given
981          * {@link #CaptureRequest capture request}.
982          *
983          * @param myRequest The {@link #CaptureRequest capture request} whose
984          *            corresponding {@link #TotalCaptureResult capture result} was
985          *            being waited for
986          * @param numResultsWait Number of frames to wait for the capture result
987          *            before timeout.
988          * @throws TimeoutRuntimeException If more than numResultsWait results are
989          *            seen before the result matching myRequest arrives, or each
990          *            individual wait for result times out after
991          *            {@value #CAPTURE_RESULT_TIMEOUT_MS}ms.
992          */
getTotalCaptureResultForRequest(CaptureRequest myRequest, int numResultsWait)993         public TotalCaptureResult getTotalCaptureResultForRequest(CaptureRequest myRequest,
994                 int numResultsWait) {
995             return getTotalCaptureResultForRequest(myRequest, numResultsWait,
996                     CAPTURE_RESULT_TIMEOUT_MS);
997         }
998 
999         /**
1000          * Get the {@link #TotalCaptureResult total capture result} for a given
1001          * {@link #CaptureRequest capture request}.
1002          *
1003          * @param myRequest The {@link #CaptureRequest capture request} whose
1004          *            corresponding {@link #TotalCaptureResult capture result} was
1005          *            being waited for
1006          * @param numResultsWait Number of frames to wait for the capture result
1007          *            before timeout.
1008          * @param timeoutForResult Timeout to wait for each capture result.
1009          * @throws TimeoutRuntimeException If more than numResultsWait results are
1010          *            seen before the result matching myRequest arrives, or each
1011          *            individual wait for result times out after
1012          *            timeoutForResult ms.
1013          */
getTotalCaptureResultForRequest(CaptureRequest myRequest, int numResultsWait, int timeoutForResult)1014         public TotalCaptureResult getTotalCaptureResultForRequest(CaptureRequest myRequest,
1015                 int numResultsWait, int timeoutForResult) {
1016             ArrayList<CaptureRequest> captureRequests = new ArrayList<>(1);
1017             captureRequests.add(myRequest);
1018             return getTotalCaptureResultsForRequests(
1019                     captureRequests, numResultsWait, timeoutForResult)[0];
1020         }
1021 
1022         /**
1023          * Get an array of {@link #TotalCaptureResult total capture results} for a given list of
1024          * {@link #CaptureRequest capture requests}. This can be used when the order of results
1025          * may not the same as the order of requests.
1026          *
1027          * @param captureRequests The list of {@link #CaptureRequest capture requests} whose
1028          *            corresponding {@link #TotalCaptureResult capture results} are
1029          *            being waited for.
1030          * @param numResultsWait Number of frames to wait for the capture results
1031          *            before timeout.
1032          * @throws TimeoutRuntimeException If more than numResultsWait results are
1033          *            seen before all the results matching captureRequests arrives.
1034          */
getTotalCaptureResultsForRequests( List<CaptureRequest> captureRequests, int numResultsWait)1035         public TotalCaptureResult[] getTotalCaptureResultsForRequests(
1036                 List<CaptureRequest> captureRequests, int numResultsWait) {
1037             return getTotalCaptureResultsForRequests(captureRequests, numResultsWait,
1038                     CAPTURE_RESULT_TIMEOUT_MS);
1039         }
1040 
1041         /**
1042          * Get an array of {@link #TotalCaptureResult total capture results} for a given list of
1043          * {@link #CaptureRequest capture requests}. This can be used when the order of results
1044          * may not the same as the order of requests.
1045          *
1046          * @param captureRequests The list of {@link #CaptureRequest capture requests} whose
1047          *            corresponding {@link #TotalCaptureResult capture results} are
1048          *            being waited for.
1049          * @param numResultsWait Number of frames to wait for the capture results
1050          *            before timeout.
1051          * @param timeoutForResult Timeout to wait for each capture result.
1052          * @throws TimeoutRuntimeException If more than numResultsWait results are
1053          *            seen before all the results matching captureRequests arrives.
1054          */
getTotalCaptureResultsForRequests( List<CaptureRequest> captureRequests, int numResultsWait, int timeoutForResult)1055         public TotalCaptureResult[] getTotalCaptureResultsForRequests(
1056                 List<CaptureRequest> captureRequests, int numResultsWait, int timeoutForResult) {
1057             if (numResultsWait < 0) {
1058                 throw new IllegalArgumentException("numResultsWait must be no less than 0");
1059             }
1060             if (captureRequests == null || captureRequests.size() == 0) {
1061                 throw new IllegalArgumentException("captureRequests must have at least 1 request.");
1062             }
1063 
1064             // Create a request -> a list of result indices map that it will wait for.
1065             HashMap<CaptureRequest, ArrayList<Integer>> remainingResultIndicesMap = new HashMap<>();
1066             for (int i = 0; i < captureRequests.size(); i++) {
1067                 CaptureRequest request = captureRequests.get(i);
1068                 ArrayList<Integer> indices = remainingResultIndicesMap.get(request);
1069                 if (indices == null) {
1070                     indices = new ArrayList<>();
1071                     remainingResultIndicesMap.put(request, indices);
1072                 }
1073                 indices.add(i);
1074             }
1075 
1076             TotalCaptureResult[] results = new TotalCaptureResult[captureRequests.size()];
1077             int i = 0;
1078             do {
1079                 TotalCaptureResult result = getTotalCaptureResult(timeoutForResult);
1080                 CaptureRequest request = result.getRequest();
1081                 ArrayList<Integer> indices = remainingResultIndicesMap.get(request);
1082                 if (indices != null) {
1083                     results[indices.get(0)] = result;
1084                     indices.remove(0);
1085 
1086                     // Remove the entry if all results for this request has been fulfilled.
1087                     if (indices.isEmpty()) {
1088                         remainingResultIndicesMap.remove(request);
1089                     }
1090                 }
1091 
1092                 if (remainingResultIndicesMap.isEmpty()) {
1093                     return results;
1094                 }
1095             } while (i++ < numResultsWait);
1096 
1097             throw new TimeoutRuntimeException("Unable to get the expected capture result after "
1098                     + "waiting for " + numResultsWait + " results");
1099         }
1100 
1101         /**
1102          * Get an array list of {@link #CaptureFailure capture failure} with maxNumFailures entries
1103          * at most. If it times out before maxNumFailures failures are received, return the failures
1104          * received so far.
1105          *
1106          * @param maxNumFailures The maximal number of failures to return. If it times out before
1107          *                       the maximal number of failures are received, return the received
1108          *                       failures so far.
1109          * @throws UnsupportedOperationException If an error happens while waiting on the failure.
1110          */
getCaptureFailures(long maxNumFailures)1111         public ArrayList<CaptureFailure> getCaptureFailures(long maxNumFailures) {
1112             ArrayList<CaptureFailure> failures = new ArrayList<>();
1113             try {
1114                 for (int i = 0; i < maxNumFailures; i++) {
1115                     CaptureFailure failure = mFailureQueue.poll(CAPTURE_RESULT_TIMEOUT_MS,
1116                             TimeUnit.MILLISECONDS);
1117                     if (failure == null) {
1118                         // If waiting on a failure times out, return the failures so far.
1119                         break;
1120                     }
1121                     failures.add(failure);
1122                 }
1123             }  catch (InterruptedException e) {
1124                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1125             }
1126 
1127             return failures;
1128         }
1129 
1130         /**
1131          * Get an array list of lost buffers with maxNumLost entries at most.
1132          * If it times out before maxNumLost buffer lost callbacks are received, return the
1133          * lost callbacks received so far.
1134          *
1135          * @param maxNumLost The maximal number of buffer lost failures to return. If it times out
1136          *                   before the maximal number of failures are received, return the received
1137          *                   buffer lost failures so far.
1138          * @throws UnsupportedOperationException If an error happens while waiting on the failure.
1139          */
getLostBuffers(long maxNumLost)1140         public ArrayList<Pair<Surface, Long>> getLostBuffers(long maxNumLost) {
1141             ArrayList<Pair<Surface, Long>> failures = new ArrayList<>();
1142             try {
1143                 for (int i = 0; i < maxNumLost; i++) {
1144                     Pair<Surface, Long> failure = mBufferLostQueue.poll(CAPTURE_RESULT_TIMEOUT_MS,
1145                             TimeUnit.MILLISECONDS);
1146                     if (failure == null) {
1147                         // If waiting on a failure times out, return the failures so far.
1148                         break;
1149                     }
1150                     failures.add(failure);
1151                 }
1152             }  catch (InterruptedException e) {
1153                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1154             }
1155 
1156             return failures;
1157         }
1158 
1159         /**
1160          * Get an array list of aborted capture sequence ids with maxNumAborts entries
1161          * at most. If it times out before maxNumAborts are received, return the aborted sequences
1162          * received so far.
1163          *
1164          * @param maxNumAborts The maximal number of aborted sequences to return. If it times out
1165          *                     before the maximal number of aborts are received, return the received
1166          *                     failed sequences so far.
1167          * @throws UnsupportedOperationException If an error happens while waiting on the failed
1168          *                                       sequences.
1169          */
geAbortedSequences(long maxNumAborts)1170         public ArrayList<Integer> geAbortedSequences(long maxNumAborts) {
1171             ArrayList<Integer> abortList = new ArrayList<>();
1172             try {
1173                 for (int i = 0; i < maxNumAborts; i++) {
1174                     Integer abortSequence = mAbortQueue.poll(CAPTURE_RESULT_TIMEOUT_MS,
1175                             TimeUnit.MILLISECONDS);
1176                     if (abortSequence == null) {
1177                         break;
1178                     }
1179                     abortList.add(abortSequence);
1180                 }
1181             }  catch (InterruptedException e) {
1182                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1183             }
1184 
1185             return abortList;
1186         }
1187 
1188         /**
1189          * Wait until the capture start of a request and expected timestamp arrives or it times
1190          * out after a number of capture starts.
1191          *
1192          * @param request The request for the capture start to wait for.
1193          * @param timestamp The timestamp for the capture start to wait for.
1194          * @param numCaptureStartsWait The number of capture start events to wait for before timing
1195          *                             out.
1196          */
waitForCaptureStart(CaptureRequest request, Long timestamp, int numCaptureStartsWait)1197         public void waitForCaptureStart(CaptureRequest request, Long timestamp,
1198                 int numCaptureStartsWait) throws Exception {
1199             Pair<CaptureRequest, Long> expectedShutter = new Pair<>(request, timestamp);
1200 
1201             int i = 0;
1202             do {
1203                 Pair<CaptureRequest, Long> shutter = mCaptureStartQueue.poll(
1204                         CAPTURE_RESULT_TIMEOUT_MS, TimeUnit.MILLISECONDS);
1205 
1206                 if (shutter == null) {
1207                     throw new TimeoutRuntimeException("Unable to get any more capture start " +
1208                             "event after waiting for " + CAPTURE_RESULT_TIMEOUT_MS + " ms.");
1209                 } else if (expectedShutter.equals(shutter)) {
1210                     return;
1211                 }
1212 
1213             } while (i++ < numCaptureStartsWait);
1214 
1215             throw new TimeoutRuntimeException("Unable to get the expected capture start " +
1216                     "event after waiting for " + numCaptureStartsWait + " capture starts");
1217         }
1218 
1219         /**
1220          * Wait until it receives capture sequence completed callback for a given squence ID.
1221          *
1222          * @param sequenceId The sequence ID of the capture sequence completed callback to wait for.
1223          * @param timeoutMs Time to wait for each capture sequence complete callback before
1224          *                  timing out.
1225          */
getCaptureSequenceLastFrameNumber(int sequenceId, long timeoutMs)1226         public long getCaptureSequenceLastFrameNumber(int sequenceId, long timeoutMs) {
1227             try {
1228                 while (true) {
1229                     Pair<Integer, Long> completedSequence =
1230                             mCaptureSequenceCompletedQueue.poll(timeoutMs, TimeUnit.MILLISECONDS);
1231                     assertNotNull("Wait for a capture sequence completed timed out in " +
1232                             timeoutMs + "ms", completedSequence);
1233 
1234                     if (completedSequence.first.equals(sequenceId)) {
1235                         return completedSequence.second.longValue();
1236                     }
1237                 }
1238             } catch (InterruptedException e) {
1239                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1240             }
1241         }
1242 
hasMoreResults()1243         public boolean hasMoreResults()
1244         {
1245             return !mQueue.isEmpty();
1246         }
1247 
hasMoreFailures()1248         public boolean hasMoreFailures()
1249         {
1250             return !mFailureQueue.isEmpty();
1251         }
1252 
getNumLostBuffers()1253         public int getNumLostBuffers()
1254         {
1255             return mBufferLostQueue.size();
1256         }
1257 
hasMoreAbortedSequences()1258         public boolean hasMoreAbortedSequences()
1259         {
1260             return !mAbortQueue.isEmpty();
1261         }
1262 
getCaptureStartTimestamps(int count)1263         public List<Long> getCaptureStartTimestamps(int count) {
1264             List<Long> timestamps = new ArrayList<Long>();
1265             try {
1266                 while (timestamps.size() < count) {
1267                     Pair<CaptureRequest, Long> captureStart = mCaptureStartQueue.poll(
1268                             CAPTURE_RESULT_TIMEOUT_MS, TimeUnit.MILLISECONDS);
1269                     assertNotNull("Wait for a capture start timed out in "
1270                             + CAPTURE_RESULT_TIMEOUT_MS + "ms", captureStart);
1271 
1272                     timestamps.add(captureStart.second);
1273                 }
1274                 return timestamps;
1275             } catch (InterruptedException e) {
1276                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1277             }
1278         }
1279 
1280         /**
1281          * Get start of readout timestamps
1282          *
1283          * @param count The number of captures
1284          * @return The list of start of readout timestamps
1285          */
getReadoutStartTimestamps(int count)1286         public List<Long> getReadoutStartTimestamps(int count) {
1287             List<Long> timestamps = new ArrayList<Long>();
1288             try {
1289                 while (timestamps.size() < count) {
1290                     Pair<CaptureRequest, Long> readoutStart = mReadoutStartQueue.poll(
1291                             CAPTURE_RESULT_TIMEOUT_MS, TimeUnit.MILLISECONDS);
1292                     assertNotNull("Wait for a readout start timed out in "
1293                             + CAPTURE_RESULT_TIMEOUT_MS + "ms", readoutStart);
1294 
1295                     timestamps.add(readoutStart.second);
1296                 }
1297                 return timestamps;
1298             } catch (InterruptedException e) {
1299                 throw new UnsupportedOperationException("Unhandled interrupted exception", e);
1300             }
1301         }
1302 
drain()1303         public void drain() {
1304             mQueue.clear();
1305             mNumFramesArrived.getAndSet(0);
1306             mFailureQueue.clear();
1307             mBufferLostQueue.clear();
1308             mCaptureStartQueue.clear();
1309             mReadoutStartQueue.clear();
1310             mAbortQueue.clear();
1311         }
1312     }
1313 
1314     private static class BlockingCameraManager
1315             extends com.android.ex.camera2.blocking.BlockingCameraManager {
1316 
BlockingCameraManager(CameraManager manager)1317         BlockingCameraManager(CameraManager manager) {
1318             super(manager);
1319         }
1320 
openCamera(String cameraId, boolean overrideToPortrait, CameraDevice.StateCallback listener, Handler handler)1321         public CameraDevice openCamera(String cameraId, boolean overrideToPortrait,
1322                 CameraDevice.StateCallback listener, Handler handler)
1323                 throws CameraAccessException, BlockingOpenException {
1324             if (handler == null) {
1325                 throw new IllegalArgumentException("handler must not be null");
1326             } else if (handler.getLooper() == Looper.myLooper()) {
1327                 throw new IllegalArgumentException(
1328                         "handler's looper must not be the current looper");
1329             }
1330 
1331             return (new OpenListener(mManager, cameraId, overrideToPortrait, listener, handler))
1332                     .blockUntilOpen();
1333         }
1334 
1335         protected class OpenListener
1336                 extends com.android.ex.camera2.blocking.BlockingCameraManager.OpenListener {
OpenListener(CameraManager manager, String cameraId, boolean overrideToPortrait, CameraDevice.StateCallback listener, Handler handler)1337             OpenListener(CameraManager manager, String cameraId, boolean overrideToPortrait,
1338                     CameraDevice.StateCallback listener, Handler handler)
1339                     throws CameraAccessException {
1340                 super(cameraId, listener);
1341                 manager.openCamera(cameraId, overrideToPortrait, handler, this);
1342             }
1343         }
1344     }
1345 
hasCapability(CameraCharacteristics characteristics, int capability)1346     public static boolean hasCapability(CameraCharacteristics characteristics, int capability) {
1347         int [] capabilities =
1348                 characteristics.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
1349         for (int c : capabilities) {
1350             if (c == capability) {
1351                 return true;
1352             }
1353         }
1354         return false;
1355     }
1356 
isSystemCamera(CameraManager manager, String cameraId)1357     public static boolean isSystemCamera(CameraManager manager, String cameraId)
1358             throws CameraAccessException {
1359         CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
1360         return hasCapability(characteristics,
1361                 CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA);
1362     }
1363 
getCameraIdListForTesting(CameraManager manager, boolean getSystemCameras)1364     public static String[] getCameraIdListForTesting(CameraManager manager,
1365             boolean getSystemCameras)
1366             throws CameraAccessException {
1367         String [] ids = manager.getCameraIdListNoLazy();
1368         List<String> idsForTesting = new ArrayList<String>();
1369         for (String id : ids) {
1370             boolean isSystemCamera = isSystemCamera(manager, id);
1371             if (getSystemCameras == isSystemCamera) {
1372                 idsForTesting.add(id);
1373             }
1374         }
1375         return idsForTesting.toArray(new String[idsForTesting.size()]);
1376     }
1377 
getConcurrentCameraIds(CameraManager manager, boolean getSystemCameras)1378     public static Set<Set<String>> getConcurrentCameraIds(CameraManager manager,
1379             boolean getSystemCameras)
1380             throws CameraAccessException {
1381         Set<String> cameraIds = new HashSet<String>(Arrays.asList(getCameraIdListForTesting(manager, getSystemCameras)));
1382         Set<Set<String>> combinations =  manager.getConcurrentCameraIds();
1383         Set<Set<String>> correctComb = new HashSet<Set<String>>();
1384         for (Set<String> comb : combinations) {
1385             Set<String> filteredIds = new HashSet<String>();
1386             for (String id : comb) {
1387                 if (cameraIds.contains(id)) {
1388                     filteredIds.add(id);
1389                 }
1390             }
1391             if (filteredIds.isEmpty()) {
1392                 continue;
1393             }
1394             correctComb.add(filteredIds);
1395         }
1396         return correctComb;
1397     }
1398 
1399     /**
1400      * Block until the camera is opened.
1401      *
1402      * <p>Don't use this to test #onDisconnected/#onError since this will throw
1403      * an AssertionError if it fails to open the camera device.</p>
1404      *
1405      * @return CameraDevice opened camera device
1406      *
1407      * @throws IllegalArgumentException
1408      *            If the handler is null, or if the handler's looper is current.
1409      * @throws CameraAccessException
1410      *            If open fails immediately.
1411      * @throws BlockingOpenException
1412      *            If open fails after blocking for some amount of time.
1413      * @throws TimeoutRuntimeException
1414      *            If opening times out. Typically unrecoverable.
1415      */
openCamera(CameraManager manager, String cameraId, CameraDevice.StateCallback listener, Handler handler)1416     public static CameraDevice openCamera(CameraManager manager, String cameraId,
1417             CameraDevice.StateCallback listener, Handler handler) throws CameraAccessException,
1418             BlockingOpenException {
1419 
1420         /**
1421          * Although camera2 API allows 'null' Handler (it will just use the current
1422          * thread's Looper), this is not what we want for CTS.
1423          *
1424          * In CTS the default looper is used only to process events in between test runs,
1425          * so anything sent there would not be executed inside a test and the test would fail.
1426          *
1427          * In this case, BlockingCameraManager#openCamera performs the check for us.
1428          */
1429         return (new CameraTestUtils.BlockingCameraManager(manager))
1430                 .openCamera(cameraId, listener, handler);
1431     }
1432 
1433     /**
1434      * Block until the camera is opened.
1435      *
1436      * <p>Don't use this to test #onDisconnected/#onError since this will throw
1437      * an AssertionError if it fails to open the camera device.</p>
1438      *
1439      * @throws IllegalArgumentException
1440      *            If the handler is null, or if the handler's looper is current.
1441      * @throws CameraAccessException
1442      *            If open fails immediately.
1443      * @throws BlockingOpenException
1444      *            If open fails after blocking for some amount of time.
1445      * @throws TimeoutRuntimeException
1446      *            If opening times out. Typically unrecoverable.
1447      */
openCamera(CameraManager manager, String cameraId, boolean overrideToPortrait, CameraDevice.StateCallback listener, Handler handler)1448     public static CameraDevice openCamera(CameraManager manager, String cameraId,
1449             boolean overrideToPortrait, CameraDevice.StateCallback listener, Handler handler)
1450             throws CameraAccessException, BlockingOpenException {
1451         return (new CameraTestUtils.BlockingCameraManager(manager))
1452                 .openCamera(cameraId, overrideToPortrait, listener, handler);
1453     }
1454 
1455 
1456     /**
1457      * Block until the camera is opened.
1458      *
1459      * <p>Don't use this to test #onDisconnected/#onError since this will throw
1460      * an AssertionError if it fails to open the camera device.</p>
1461      *
1462      * @throws IllegalArgumentException
1463      *            If the handler is null, or if the handler's looper is current.
1464      * @throws CameraAccessException
1465      *            If open fails immediately.
1466      * @throws BlockingOpenException
1467      *            If open fails after blocking for some amount of time.
1468      * @throws TimeoutRuntimeException
1469      *            If opening times out. Typically unrecoverable.
1470      */
openCamera(CameraManager manager, String cameraId, Handler handler)1471     public static CameraDevice openCamera(CameraManager manager, String cameraId, Handler handler)
1472             throws CameraAccessException,
1473             BlockingOpenException {
1474         return openCamera(manager, cameraId, /*listener*/null, handler);
1475     }
1476 
1477     /**
1478      * Configure a new camera session with output surfaces and type.
1479      *
1480      * @param camera The CameraDevice to be configured.
1481      * @param outputSurfaces The surface list that used for camera output.
1482      * @param listener The callback CameraDevice will notify when capture results are available.
1483      */
configureCameraSession(CameraDevice camera, List<Surface> outputSurfaces, boolean isHighSpeed, CameraCaptureSession.StateCallback listener, Handler handler)1484     public static CameraCaptureSession configureCameraSession(CameraDevice camera,
1485             List<Surface> outputSurfaces, boolean isHighSpeed,
1486             CameraCaptureSession.StateCallback listener, Handler handler)
1487             throws CameraAccessException {
1488         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1489         if (isHighSpeed) {
1490             camera.createConstrainedHighSpeedCaptureSession(outputSurfaces,
1491                     sessionListener, handler);
1492         } else {
1493             camera.createCaptureSession(outputSurfaces, sessionListener, handler);
1494         }
1495         CameraCaptureSession session =
1496                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1497         assertFalse("Camera session should not be a reprocessable session",
1498                 session.isReprocessable());
1499         String sessionType = isHighSpeed ? "High Speed" : "Normal";
1500         assertTrue("Capture session type must be " + sessionType,
1501                 isHighSpeed ==
1502                 CameraConstrainedHighSpeedCaptureSession.class.isAssignableFrom(session.getClass()));
1503 
1504         return session;
1505     }
1506 
1507     /**
1508      * Build a new constrained camera session with output surfaces, type and recording session
1509      * parameters.
1510      *
1511      * @param camera The CameraDevice to be configured.
1512      * @param outputSurfaces The surface list that used for camera output.
1513      * @param listener The callback CameraDevice will notify when capture results are available.
1514      * @param initialRequest Initial request settings to use as session parameters.
1515      */
buildConstrainedCameraSession(CameraDevice camera, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler, CaptureRequest initialRequest)1516     public static CameraCaptureSession buildConstrainedCameraSession(CameraDevice camera,
1517             List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener,
1518             Handler handler, CaptureRequest initialRequest) throws CameraAccessException {
1519         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1520 
1521         List<OutputConfiguration> outConfigurations = new ArrayList<>(outputSurfaces.size());
1522         for (Surface surface : outputSurfaces) {
1523             outConfigurations.add(new OutputConfiguration(surface));
1524         }
1525         SessionConfiguration sessionConfig = new SessionConfiguration(
1526                 SessionConfiguration.SESSION_HIGH_SPEED, outConfigurations,
1527                 new HandlerExecutor(handler), sessionListener);
1528         sessionConfig.setSessionParameters(initialRequest);
1529         camera.createCaptureSession(sessionConfig);
1530 
1531         CameraCaptureSession session =
1532                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1533         assertFalse("Camera session should not be a reprocessable session",
1534                 session.isReprocessable());
1535         assertTrue("Capture session type must be High Speed",
1536                 CameraConstrainedHighSpeedCaptureSession.class.isAssignableFrom(
1537                         session.getClass()));
1538 
1539         return session;
1540     }
1541 
1542     /**
1543      * Configure a new camera session with output configurations.
1544      *
1545      * @param camera The CameraDevice to be configured.
1546      * @param outputs The OutputConfiguration list that is used for camera output.
1547      * @param listener The callback CameraDevice will notify when capture results are available.
1548      */
configureCameraSessionWithConfig(CameraDevice camera, List<OutputConfiguration> outputs, CameraCaptureSession.StateCallback listener, Handler handler)1549     public static CameraCaptureSession configureCameraSessionWithConfig(CameraDevice camera,
1550             List<OutputConfiguration> outputs,
1551             CameraCaptureSession.StateCallback listener, Handler handler)
1552             throws CameraAccessException {
1553         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1554         camera.createCaptureSessionByOutputConfigurations(outputs, sessionListener, handler);
1555         CameraCaptureSession session =
1556                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1557         assertFalse("Camera session should not be a reprocessable session",
1558                 session.isReprocessable());
1559         return session;
1560     }
1561 
1562     /**
1563      * Configure a new camera session with output configurations / a session color space.
1564      *
1565      * @param camera The CameraDevice to be configured.
1566      * @param outputs The OutputConfiguration list that is used for camera output.
1567      * @param listener The callback CameraDevice will notify when capture results are available.
1568      * @param colorSpace The ColorSpace for this session.
1569      */
configureCameraSessionWithColorSpace(CameraDevice camera, List<OutputConfiguration> outputs, CameraCaptureSession.StateCallback listener, Handler handler, ColorSpace.Named colorSpace)1570     public static CameraCaptureSession configureCameraSessionWithColorSpace(CameraDevice camera,
1571             List<OutputConfiguration> outputs,
1572             CameraCaptureSession.StateCallback listener, Handler handler,
1573             ColorSpace.Named colorSpace) throws CameraAccessException {
1574         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1575         SessionConfiguration sessionConfiguration = new SessionConfiguration(
1576                 SessionConfiguration.SESSION_REGULAR, outputs,
1577                 new HandlerExecutor(handler), sessionListener);
1578         sessionConfiguration.setColorSpace(colorSpace);
1579         camera.createCaptureSession(sessionConfiguration);
1580         CameraCaptureSession session =
1581                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1582         assertFalse("Camera session should not be a reprocessable session",
1583                 session.isReprocessable());
1584         return session;
1585     }
1586 
1587     /**
1588      * Try configure a new camera session with output configurations.
1589      *
1590      * @param camera The CameraDevice to be configured.
1591      * @param outputs The OutputConfiguration list that is used for camera output.
1592      * @param initialRequest The session parameters passed in during stream configuration
1593      * @param listener The callback CameraDevice will notify when capture results are available.
1594      */
tryConfigureCameraSessionWithConfig(CameraDevice camera, List<OutputConfiguration> outputs, CaptureRequest initialRequest, CameraCaptureSession.StateCallback listener, Handler handler)1595     public static CameraCaptureSession tryConfigureCameraSessionWithConfig(CameraDevice camera,
1596             List<OutputConfiguration> outputs, CaptureRequest initialRequest,
1597             CameraCaptureSession.StateCallback listener, Handler handler)
1598             throws CameraAccessException {
1599         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1600         SessionConfiguration sessionConfig = new SessionConfiguration(
1601                 SessionConfiguration.SESSION_REGULAR, outputs, new HandlerExecutor(handler),
1602                 sessionListener);
1603         sessionConfig.setSessionParameters(initialRequest);
1604         camera.createCaptureSession(sessionConfig);
1605 
1606         Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY,
1607                                    BlockingSessionCallback.SESSION_CONFIGURE_FAILED};
1608         int state = sessionListener.getStateWaiter().waitForAnyOfStates(
1609                 Arrays.asList(sessionStates), SESSION_CONFIGURE_TIMEOUT_MS);
1610 
1611         CameraCaptureSession session = null;
1612         if (state == BlockingSessionCallback.SESSION_READY) {
1613             session = sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1614             assertFalse("Camera session should not be a reprocessable session",
1615                     session.isReprocessable());
1616         }
1617         return session;
1618     }
1619 
1620     /**
1621      * Configure a new camera session with output surfaces and initial session parameters.
1622      *
1623      * @param camera The CameraDevice to be configured.
1624      * @param outputSurfaces The surface list that used for camera output.
1625      * @param listener The callback CameraDevice will notify when session is available.
1626      * @param handler The handler used to notify callbacks.
1627      * @param initialRequest Initial request settings to use as session parameters.
1628      */
configureCameraSessionWithParameters(CameraDevice camera, List<Surface> outputSurfaces, BlockingSessionCallback listener, Handler handler, CaptureRequest initialRequest)1629     public static CameraCaptureSession configureCameraSessionWithParameters(CameraDevice camera,
1630             List<Surface> outputSurfaces, BlockingSessionCallback listener,
1631             Handler handler, CaptureRequest initialRequest) throws CameraAccessException {
1632         List<OutputConfiguration> outConfigurations = new ArrayList<>(outputSurfaces.size());
1633         for (Surface surface : outputSurfaces) {
1634             outConfigurations.add(new OutputConfiguration(surface));
1635         }
1636         SessionConfiguration sessionConfig = new SessionConfiguration(
1637                 SessionConfiguration.SESSION_REGULAR, outConfigurations,
1638                 new HandlerExecutor(handler), listener);
1639         sessionConfig.setSessionParameters(initialRequest);
1640         camera.createCaptureSession(sessionConfig);
1641 
1642         CameraCaptureSession session = listener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1643         assertFalse("Camera session should not be a reprocessable session",
1644                 session.isReprocessable());
1645         assertFalse("Capture session type must be regular",
1646                 CameraConstrainedHighSpeedCaptureSession.class.isAssignableFrom(
1647                         session.getClass()));
1648 
1649         return session;
1650     }
1651 
1652     /**
1653      * Configure a new camera session with output surfaces.
1654      *
1655      * @param camera The CameraDevice to be configured.
1656      * @param outputSurfaces The surface list that used for camera output.
1657      * @param listener The callback CameraDevice will notify when capture results are available.
1658      */
configureCameraSession(CameraDevice camera, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler)1659     public static CameraCaptureSession configureCameraSession(CameraDevice camera,
1660             List<Surface> outputSurfaces,
1661             CameraCaptureSession.StateCallback listener, Handler handler)
1662             throws CameraAccessException {
1663 
1664         return configureCameraSession(camera, outputSurfaces, /*isHighSpeed*/false,
1665                 listener, handler);
1666     }
1667 
configureReprocessableCameraSession(CameraDevice camera, InputConfiguration inputConfiguration, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler)1668     public static CameraCaptureSession configureReprocessableCameraSession(CameraDevice camera,
1669             InputConfiguration inputConfiguration, List<Surface> outputSurfaces,
1670             CameraCaptureSession.StateCallback listener, Handler handler)
1671             throws CameraAccessException {
1672         List<OutputConfiguration> outputConfigs = new ArrayList<OutputConfiguration>();
1673         for (Surface surface : outputSurfaces) {
1674             outputConfigs.add(new OutputConfiguration(surface));
1675         }
1676         CameraCaptureSession session = configureReprocessableCameraSessionWithConfigurations(
1677                 camera, inputConfiguration, outputConfigs, listener, handler);
1678 
1679         return session;
1680     }
1681 
configureReprocessableCameraSessionWithConfigurations( CameraDevice camera, InputConfiguration inputConfiguration, List<OutputConfiguration> outputConfigs, CameraCaptureSession.StateCallback listener, Handler handler)1682     public static CameraCaptureSession configureReprocessableCameraSessionWithConfigurations(
1683             CameraDevice camera, InputConfiguration inputConfiguration,
1684             List<OutputConfiguration> outputConfigs, CameraCaptureSession.StateCallback listener,
1685             Handler handler) throws CameraAccessException {
1686         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1687         SessionConfiguration sessionConfig = new SessionConfiguration(
1688                 SessionConfiguration.SESSION_REGULAR, outputConfigs, new HandlerExecutor(handler),
1689                 sessionListener);
1690         sessionConfig.setInputConfiguration(inputConfiguration);
1691         camera.createCaptureSession(sessionConfig);
1692 
1693         Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY,
1694                                    BlockingSessionCallback.SESSION_CONFIGURE_FAILED};
1695         int state = sessionListener.getStateWaiter().waitForAnyOfStates(
1696                 Arrays.asList(sessionStates), SESSION_CONFIGURE_TIMEOUT_MS);
1697 
1698         assertTrue("Creating a reprocessable session failed.",
1699                 state == BlockingSessionCallback.SESSION_READY);
1700         CameraCaptureSession session =
1701                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1702         assertTrue("Camera session should be a reprocessable session", session.isReprocessable());
1703 
1704         return session;
1705     }
1706 
1707     /**
1708      * Create a reprocessable camera session with input and output configurations.
1709      *
1710      * @param camera The CameraDevice to be configured.
1711      * @param inputConfiguration The input configuration used to create this session.
1712      * @param outputs The output configurations used to create this session.
1713      * @param listener The callback CameraDevice will notify when capture results are available.
1714      * @param handler The handler used to notify callbacks.
1715      * @return The session ready to use.
1716      * @throws CameraAccessException
1717      */
configureReprocCameraSessionWithConfig(CameraDevice camera, InputConfiguration inputConfiguration, List<OutputConfiguration> outputs, CameraCaptureSession.StateCallback listener, Handler handler)1718     public static CameraCaptureSession configureReprocCameraSessionWithConfig(CameraDevice camera,
1719             InputConfiguration inputConfiguration, List<OutputConfiguration> outputs,
1720             CameraCaptureSession.StateCallback listener, Handler handler)
1721             throws CameraAccessException {
1722         BlockingSessionCallback sessionListener = new BlockingSessionCallback(listener);
1723         camera.createReprocessableCaptureSessionByConfigurations(inputConfiguration, outputs,
1724                 sessionListener, handler);
1725 
1726         Integer[] sessionStates = {BlockingSessionCallback.SESSION_READY,
1727                                    BlockingSessionCallback.SESSION_CONFIGURE_FAILED};
1728         int state = sessionListener.getStateWaiter().waitForAnyOfStates(
1729                 Arrays.asList(sessionStates), SESSION_CONFIGURE_TIMEOUT_MS);
1730 
1731         assertTrue("Creating a reprocessable session failed.",
1732                 state == BlockingSessionCallback.SESSION_READY);
1733 
1734         CameraCaptureSession session =
1735                 sessionListener.waitAndGetSession(SESSION_CONFIGURE_TIMEOUT_MS);
1736         assertTrue("Camera session should be a reprocessable session", session.isReprocessable());
1737 
1738         return session;
1739     }
1740 
assertArrayNotEmpty(T arr, String message)1741     public static <T> void assertArrayNotEmpty(T arr, String message) {
1742         assertTrue(message, arr != null && Array.getLength(arr) > 0);
1743     }
1744 
1745     /**
1746      * Check if the format is a legal YUV format camera supported.
1747      */
checkYuvFormat(int format)1748     public static void checkYuvFormat(int format) {
1749         if ((format != ImageFormat.YUV_420_888) &&
1750                 (format != ImageFormat.NV21) &&
1751                 (format != ImageFormat.YV12)) {
1752             fail("Wrong formats: " + format);
1753         }
1754     }
1755 
1756     /**
1757      * Check if image size and format match given size and format.
1758      */
checkImage(Image image, int width, int height, int format)1759     public static void checkImage(Image image, int width, int height, int format) {
1760         checkImage(image, width, height, format, /*colorSpace*/null);
1761     }
1762 
1763     /**
1764      * Check if image size and format match given size and format.
1765      */
checkImage(Image image, int width, int height, int format, ColorSpace colorSpace)1766     public static void checkImage(Image image, int width, int height, int format,
1767             ColorSpace colorSpace) {
1768         // Image reader will wrap YV12/NV21 image by YUV_420_888
1769         if (format == ImageFormat.NV21 || format == ImageFormat.YV12) {
1770             format = ImageFormat.YUV_420_888;
1771         }
1772         assertNotNull("Input image is invalid", image);
1773         assertEquals("Format doesn't match", format, image.getFormat());
1774         assertEquals("Width doesn't match", width, image.getWidth());
1775         assertEquals("Height doesn't match", height, image.getHeight());
1776 
1777         if (colorSpace != null && format != ImageFormat.JPEG && format != ImageFormat.JPEG_R
1778                 && format != ImageFormat.HEIC) {
1779             int dataSpace = image.getDataSpace();
1780             ColorSpace actualColorSpace = ColorSpace.getFromDataSpace(dataSpace);
1781             assertNotNull("getFromDataSpace() returned null for format "
1782                     + format + ", dataSpace " + dataSpace, actualColorSpace);
1783             assertEquals("colorSpace " + actualColorSpace.getId()
1784                     + " does not match expected color space "
1785                     + colorSpace.getId(), colorSpace.getId(), actualColorSpace.getId());
1786         }
1787     }
1788 
1789     /**
1790      * <p>Read data from all planes of an Image into a contiguous unpadded, unpacked
1791      * 1-D linear byte array, such that it can be write into disk, or accessed by
1792      * software conveniently. It supports YUV_420_888/NV21/YV12 and JPEG input
1793      * Image format.</p>
1794      *
1795      * <p>For YUV_420_888/NV21/YV12/Y8/Y16, it returns a byte array that contains
1796      * the Y plane data first, followed by U(Cb), V(Cr) planes if there is any
1797      * (xstride = width, ystride = height for chroma and luma components).</p>
1798      *
1799      * <p>For JPEG, it returns a 1-D byte array contains a complete JPEG image.</p>
1800      *
1801      * <p>For YUV P010 and P210, it returns a byte array that contains Y plane first,
1802      * followed by the interleaved U(Cb)/V(Cr) plane.</p>
1803      */
getDataFromImage(Image image)1804     public static byte[] getDataFromImage(Image image) {
1805         assertNotNull("Invalid image:", image);
1806         int format = image.getFormat();
1807         int width = image.getWidth();
1808         int height = image.getHeight();
1809         int rowStride, pixelStride;
1810         byte[] data = null;
1811 
1812         // Read image data
1813         Plane[] planes = image.getPlanes();
1814         assertTrue("Fail to get image planes", planes != null && planes.length > 0);
1815 
1816         // Check image validity
1817         checkAndroidImageFormat(image);
1818 
1819         ByteBuffer buffer = null;
1820         // JPEG doesn't have pixelstride and rowstride, treat it as 1D buffer.
1821         // Same goes for DEPTH_POINT_CLOUD, RAW_PRIVATE, DEPTH_JPEG, and HEIC
1822         if (format == ImageFormat.JPEG || format == ImageFormat.DEPTH_POINT_CLOUD ||
1823                 format == ImageFormat.RAW_PRIVATE || format == ImageFormat.DEPTH_JPEG ||
1824                 format == ImageFormat.HEIC || format == ImageFormat.JPEG_R ||
1825                 format == ImageFormat.HEIC_ULTRAHDR) {
1826             buffer = planes[0].getBuffer();
1827             assertNotNull("Fail to get jpeg/depth/heic ByteBuffer", buffer);
1828             data = new byte[buffer.remaining()];
1829             buffer.get(data);
1830             buffer.rewind();
1831             return data;
1832         } else if (format == ImageFormat.YCBCR_P010) {
1833             // P010 samples are stored within 16 bit values
1834             int offset = 0;
1835             int bytesPerPixelRounded = (ImageFormat.getBitsPerPixel(format) + 7) / 8;
1836             data = new byte[width * height * bytesPerPixelRounded];
1837             assertTrue("Unexpected number of planes, expected " + 3 + " actual " + planes.length,
1838                     planes.length == 3);
1839             for (int i = 0; i < 2; i++) {
1840                 buffer = planes[i].getBuffer();
1841                 assertNotNull("Fail to get bytebuffer from plane", buffer);
1842                 buffer.rewind();
1843                 rowStride = planes[i].getRowStride();
1844                 if (VERBOSE) {
1845                     Log.v(TAG, "rowStride " + rowStride);
1846                     Log.v(TAG, "width " + width);
1847                     Log.v(TAG, "height " + height);
1848                 }
1849                 int h = (i == 0) ? height : height / 2;
1850                 for (int row = 0; row < h; row++) {
1851                     // Each 10-bit pixel occupies 2 bytes
1852                     int length = 2 * width;
1853                     buffer.get(data, offset, length);
1854                     offset += length;
1855                     if (row < h - 1) {
1856                         buffer.position(buffer.position() + rowStride - length);
1857                     }
1858                 }
1859                 if (VERBOSE) Log.v(TAG, "Finished reading data from plane " + i);
1860                 buffer.rewind();
1861             }
1862             return data;
1863         } else if (format == ImageFormat.YCBCR_P210) {
1864             // P210 samples are stored within 16 bit values
1865             int offset = 0;
1866             int bytesPerPixelRounded = ImageFormat.getBitsPerPixel(format) / 8;
1867             data = new byte[width * height * bytesPerPixelRounded];
1868             assertTrue("Unexpected number of planes, expected " + 3 + " actual " + planes.length,
1869                     planes.length == 3);
1870             for (int i = 0; i < 2; i++) {
1871                 buffer = planes[i].getBuffer();
1872                 assertNotNull("Fail to get bytebuffer from plane", buffer);
1873                 buffer.rewind();
1874                 rowStride = planes[i].getRowStride();
1875                 if (VERBOSE) {
1876                     Log.v(TAG, "rowStride " + rowStride);
1877                     Log.v(TAG, "width " + width);
1878                     Log.v(TAG, "height " + height);
1879                 }
1880                 for (int row = 0; row < height; row++) {
1881                     // Each 10-bit pixel occupies 2 bytes
1882                     int length = 2 * width;
1883                     buffer.get(data, offset, length);
1884                     offset += length;
1885                     if (row < height - 1) {
1886                         buffer.position(buffer.position() + rowStride - length);
1887                     }
1888                 }
1889                 if (VERBOSE) Log.v(TAG, "Finished reading data from plane " + i);
1890                 buffer.rewind();
1891             }
1892             return data;
1893         }
1894 
1895 
1896         int offset = 0;
1897         data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
1898         int maxRowSize = planes[0].getRowStride();
1899         for (int i = 0; i < planes.length; i++) {
1900             if (maxRowSize < planes[i].getRowStride()) {
1901                 maxRowSize = planes[i].getRowStride();
1902             }
1903         }
1904         byte[] rowData = new byte[maxRowSize];
1905         if(VERBOSE) Log.v(TAG, "get data from " + planes.length + " planes");
1906         for (int i = 0; i < planes.length; i++) {
1907             buffer = planes[i].getBuffer();
1908             assertNotNull("Fail to get bytebuffer from plane", buffer);
1909             buffer.rewind();
1910             rowStride = planes[i].getRowStride();
1911             pixelStride = planes[i].getPixelStride();
1912             assertTrue("pixel stride " + pixelStride + " is invalid", pixelStride > 0);
1913             if (VERBOSE) {
1914                 Log.v(TAG, "pixelStride " + pixelStride);
1915                 Log.v(TAG, "rowStride " + rowStride);
1916                 Log.v(TAG, "width " + width);
1917                 Log.v(TAG, "height " + height);
1918             }
1919             // For multi-planar yuv images, assuming yuv420 with 2x2 chroma subsampling.
1920             int w = (i == 0) ? width : width / 2;
1921             int h = (i == 0) ? height : height / 2;
1922             assertTrue("rowStride " + rowStride + " should be >= width " + w , rowStride >= w);
1923             for (int row = 0; row < h; row++) {
1924                 int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8;
1925                 int length;
1926                 if (pixelStride == bytesPerPixel) {
1927                     // Special case: optimized read of the entire row
1928                     length = w * bytesPerPixel;
1929                     buffer.get(data, offset, length);
1930                     offset += length;
1931                 } else {
1932                     // Generic case: should work for any pixelStride but slower.
1933                     // Use intermediate buffer to avoid read byte-by-byte from
1934                     // DirectByteBuffer, which is very bad for performance
1935                     length = (w - 1) * pixelStride + bytesPerPixel;
1936                     buffer.get(rowData, 0, length);
1937                     for (int col = 0; col < w; col++) {
1938                         data[offset++] = rowData[col * pixelStride];
1939                     }
1940                 }
1941                 // Advance buffer the remainder of the row stride
1942                 if (row < h - 1) {
1943                     buffer.position(buffer.position() + rowStride - length);
1944                 }
1945             }
1946             if (VERBOSE) Log.v(TAG, "Finished reading data from plane " + i);
1947             buffer.rewind();
1948         }
1949         return data;
1950     }
1951 
1952     /**
1953      * <p>Check android image format validity for an image, only support below formats:</p>
1954      *
1955      * <p>YUV_420_888/NV21/YV12, can add more for future</p>
1956      */
checkAndroidImageFormat(Image image)1957     public static void checkAndroidImageFormat(Image image) {
1958         int format = image.getFormat();
1959         Plane[] planes = image.getPlanes();
1960         switch (format) {
1961             case ImageFormat.YUV_420_888:
1962             case ImageFormat.NV21:
1963             case ImageFormat.YV12:
1964             case ImageFormat.YCBCR_P010:
1965             case ImageFormat.YCBCR_P210:
1966                 assertEquals("YUV420 format Images should have 3 planes", 3, planes.length);
1967                 break;
1968             case ImageFormat.JPEG:
1969             case ImageFormat.RAW_SENSOR:
1970             case ImageFormat.RAW_PRIVATE:
1971             case ImageFormat.DEPTH16:
1972             case ImageFormat.DEPTH_POINT_CLOUD:
1973             case ImageFormat.DEPTH_JPEG:
1974             case ImageFormat.Y8:
1975             case ImageFormat.HEIC:
1976             case ImageFormat.HEIC_ULTRAHDR:
1977             case ImageFormat.JPEG_R:
1978                 assertEquals("JPEG/RAW/depth/Y8 Images should have one plane", 1, planes.length);
1979                 break;
1980             default:
1981                 fail("Unsupported Image Format: " + format);
1982         }
1983     }
1984 
dumpFile(String fileName, Bitmap data)1985     public static void dumpFile(String fileName, Bitmap data) {
1986         FileOutputStream outStream;
1987         try {
1988             Log.v(TAG, "output will be saved as " + fileName);
1989             outStream = new FileOutputStream(fileName);
1990         } catch (IOException ioe) {
1991             throw new RuntimeException("Unable to create debug output file " + fileName, ioe);
1992         }
1993 
1994         try {
1995             data.compress(Bitmap.CompressFormat.JPEG, /*quality*/90, outStream);
1996             outStream.close();
1997         } catch (IOException ioe) {
1998             throw new RuntimeException("failed writing data to file " + fileName, ioe);
1999         }
2000     }
2001 
dumpFile(String fileName, byte[] data)2002     public static void dumpFile(String fileName, byte[] data) {
2003         FileOutputStream outStream;
2004         try {
2005             Log.v(TAG, "output will be saved as " + fileName);
2006             outStream = new FileOutputStream(fileName);
2007         } catch (IOException ioe) {
2008             throw new RuntimeException("Unable to create debug output file " + fileName, ioe);
2009         }
2010 
2011         try {
2012             outStream.write(data);
2013             outStream.close();
2014         } catch (IOException ioe) {
2015             throw new RuntimeException("failed writing data to file " + fileName, ioe);
2016         }
2017     }
2018 
2019     /**
2020      * Get the available output sizes for the user-defined {@code format}.
2021      *
2022      * <p>Note that implementation-defined/hidden formats are not supported.</p>
2023      */
getSupportedSizeForFormat(int format, String cameraId, CameraManager cameraManager)2024     public static Size[] getSupportedSizeForFormat(int format, String cameraId,
2025             CameraManager cameraManager) throws CameraAccessException {
2026         return getSupportedSizeForFormat(format, cameraId, cameraManager,
2027                 /*maxResolution*/false);
2028     }
2029 
getSupportedSizeForFormat(int format, String cameraId, CameraManager cameraManager, boolean maxResolution)2030     public static Size[] getSupportedSizeForFormat(int format, String cameraId,
2031             CameraManager cameraManager, boolean maxResolution) throws CameraAccessException {
2032         CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId);
2033         assertNotNull("Can't get camera characteristics!", properties);
2034         if (VERBOSE) {
2035             Log.v(TAG, "get camera characteristics for camera: " + cameraId);
2036         }
2037         CameraCharacteristics.Key<StreamConfigurationMap> configMapTag = maxResolution ?
2038                 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION :
2039                 CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP;
2040         StreamConfigurationMap configMap = properties.get(configMapTag);
2041         if (configMap == null) {
2042             assertTrue("SCALER_STREAM_CONFIGURATION_MAP is null!", maxResolution);
2043             return null;
2044         }
2045 
2046         Size[] availableSizes = configMap.getOutputSizes(format);
2047         if (!maxResolution) {
2048             assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for format: "
2049                     + format);
2050         }
2051         Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(format);
2052         if (highResAvailableSizes != null && highResAvailableSizes.length > 0) {
2053             Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length];
2054             System.arraycopy(availableSizes, 0, allSizes, 0,
2055                     availableSizes.length);
2056             System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length,
2057                     highResAvailableSizes.length);
2058             availableSizes = allSizes;
2059         }
2060         if (VERBOSE) Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes));
2061         return availableSizes;
2062     }
2063 
2064     /**
2065      * Get the available output sizes for the given class.
2066      *
2067      */
getSupportedSizeForClass(Class klass, String cameraId, CameraManager cameraManager)2068     public static Size[] getSupportedSizeForClass(Class klass, String cameraId,
2069             CameraManager cameraManager) throws CameraAccessException {
2070         CameraCharacteristics properties = cameraManager.getCameraCharacteristics(cameraId);
2071         assertNotNull("Can't get camera characteristics!", properties);
2072         if (VERBOSE) {
2073             Log.v(TAG, "get camera characteristics for camera: " + cameraId);
2074         }
2075         StreamConfigurationMap configMap =
2076                 properties.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
2077         Size[] availableSizes = configMap.getOutputSizes(klass);
2078         assertArrayNotEmpty(availableSizes, "availableSizes should not be empty for class: "
2079                 + klass);
2080         Size[] highResAvailableSizes = configMap.getHighResolutionOutputSizes(ImageFormat.PRIVATE);
2081         if (highResAvailableSizes != null && highResAvailableSizes.length > 0) {
2082             Size[] allSizes = new Size[availableSizes.length + highResAvailableSizes.length];
2083             System.arraycopy(availableSizes, 0, allSizes, 0,
2084                     availableSizes.length);
2085             System.arraycopy(highResAvailableSizes, 0, allSizes, availableSizes.length,
2086                     highResAvailableSizes.length);
2087             availableSizes = allSizes;
2088         }
2089         if (VERBOSE) Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(availableSizes));
2090         return availableSizes;
2091     }
2092 
2093     /**
2094      * Size comparator that compares the number of pixels it covers.
2095      *
2096      * <p>If two the areas of two sizes are same, compare the widths.</p>
2097      */
2098     public static class SizeComparator implements Comparator<Size> {
2099         @Override
compare(Size lhs, Size rhs)2100         public int compare(Size lhs, Size rhs) {
2101             return CameraUtils
2102                     .compareSizes(lhs.getWidth(), lhs.getHeight(), rhs.getWidth(), rhs.getHeight());
2103         }
2104     }
2105 
2106     /**
2107      * Get sorted size list in descending order. Remove the sizes larger than
2108      * the bound. If the bound is null, don't do the size bound filtering.
2109      */
getSupportedPreviewSizes(String cameraId, CameraManager cameraManager, Size bound)2110     static public List<Size> getSupportedPreviewSizes(String cameraId,
2111             CameraManager cameraManager, Size bound) throws CameraAccessException {
2112 
2113         Size[] rawSizes = getSupportedSizeForClass(android.view.SurfaceHolder.class, cameraId,
2114                 cameraManager);
2115         assertArrayNotEmpty(rawSizes,
2116                 "Available sizes for SurfaceHolder class should not be empty");
2117         if (VERBOSE) {
2118             Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(rawSizes));
2119         }
2120 
2121         if (bound == null) {
2122             return getAscendingOrderSizes(Arrays.asList(rawSizes), /*ascending*/false);
2123         }
2124 
2125         List<Size> sizes = new ArrayList<Size>();
2126         for (Size sz: rawSizes) {
2127             if (sz.getWidth() <= bound.getWidth() && sz.getHeight() <= bound.getHeight()) {
2128                 sizes.add(sz);
2129             }
2130         }
2131         return getAscendingOrderSizes(sizes, /*ascending*/false);
2132     }
2133 
2134     /**
2135      * Get a sorted list of sizes from a given size list.
2136      *
2137      * <p>
2138      * The size is compare by area it covers, if the areas are same, then
2139      * compare the widths.
2140      * </p>
2141      *
2142      * @param sizeList The input size list to be sorted
2143      * @param ascending True if the order is ascending, otherwise descending order
2144      * @return The ordered list of sizes
2145      */
getAscendingOrderSizes(final List<Size> sizeList, boolean ascending)2146     static public List<Size> getAscendingOrderSizes(final List<Size> sizeList, boolean ascending) {
2147         if (sizeList == null) {
2148             throw new IllegalArgumentException("sizeList shouldn't be null");
2149         }
2150 
2151         Comparator<Size> comparator = new SizeComparator();
2152         List<Size> sortedSizes = new ArrayList<Size>();
2153         sortedSizes.addAll(sizeList);
2154         Collections.sort(sortedSizes, comparator);
2155         if (!ascending) {
2156             Collections.reverse(sortedSizes);
2157         }
2158 
2159         return sortedSizes;
2160     }
2161     /**
2162      * Get sorted (descending order) size list for given format. Remove the sizes larger than
2163      * the bound. If the bound is null, don't do the size bound filtering.
2164      */
getSortedSizesForFormat(String cameraId, CameraManager cameraManager, int format, Size bound)2165     static public List<Size> getSortedSizesForFormat(String cameraId,
2166             CameraManager cameraManager, int format, Size bound) throws CameraAccessException {
2167         return getSortedSizesForFormat(cameraId, cameraManager, format, /*maxResolution*/false,
2168                 bound);
2169     }
2170 
2171     /**
2172      * Get sorted (descending order) size list for given format (with an option to get sizes from
2173      * the maximum resolution stream configuration map). Remove the sizes larger than
2174      * the bound. If the bound is null, don't do the size bound filtering.
2175      */
getSortedSizesForFormat(String cameraId, CameraManager cameraManager, int format, boolean maxResolution, Size bound)2176     static public List<Size> getSortedSizesForFormat(String cameraId,
2177             CameraManager cameraManager, int format, boolean maxResolution, Size bound)
2178             throws CameraAccessException {
2179         Comparator<Size> comparator = new SizeComparator();
2180         Size[] sizes = getSupportedSizeForFormat(format, cameraId, cameraManager, maxResolution);
2181         List<Size> sortedSizes = null;
2182         if (bound != null) {
2183             sortedSizes = new ArrayList<Size>(/*capacity*/1);
2184             for (Size sz : sizes) {
2185                 if (comparator.compare(sz, bound) <= 0) {
2186                     sortedSizes.add(sz);
2187                 }
2188             }
2189         } else {
2190             sortedSizes = Arrays.asList(sizes);
2191         }
2192         assertTrue("Supported size list should have at least one element",
2193                 sortedSizes.size() > 0);
2194 
2195         Collections.sort(sortedSizes, comparator);
2196         // Make it in descending order.
2197         Collections.reverse(sortedSizes);
2198         return sortedSizes;
2199     }
2200 
2201     /**
2202      * Get supported video size list for a given camera device.
2203      *
2204      * <p>
2205      * Filter out the sizes that are larger than the bound. If the bound is
2206      * null, don't do the size bound filtering.
2207      * </p>
2208      */
getSupportedVideoSizes(String cameraId, CameraManager cameraManager, Size bound)2209     static public List<Size> getSupportedVideoSizes(String cameraId,
2210             CameraManager cameraManager, Size bound) throws CameraAccessException {
2211 
2212         Size[] rawSizes = getSupportedSizeForClass(android.media.MediaRecorder.class,
2213                 cameraId, cameraManager);
2214         assertArrayNotEmpty(rawSizes,
2215                 "Available sizes for MediaRecorder class should not be empty");
2216         if (VERBOSE) {
2217             Log.v(TAG, "Supported sizes are: " + Arrays.deepToString(rawSizes));
2218         }
2219 
2220         if (bound == null) {
2221             return getAscendingOrderSizes(Arrays.asList(rawSizes), /*ascending*/false);
2222         }
2223 
2224         List<Size> sizes = new ArrayList<Size>();
2225         for (Size sz: rawSizes) {
2226             if (sz.getWidth() <= bound.getWidth() && sz.getHeight() <= bound.getHeight()) {
2227                 sizes.add(sz);
2228             }
2229         }
2230         return getAscendingOrderSizes(sizes, /*ascending*/false);
2231     }
2232 
2233     /**
2234      * Get supported video size list (descending order) for a given camera device.
2235      *
2236      * <p>
2237      * Filter out the sizes that are larger than the bound. If the bound is
2238      * null, don't do the size bound filtering.
2239      * </p>
2240      */
getSupportedStillSizes(String cameraId, CameraManager cameraManager, Size bound)2241     static public List<Size> getSupportedStillSizes(String cameraId,
2242             CameraManager cameraManager, Size bound) throws CameraAccessException {
2243         return getSortedSizesForFormat(cameraId, cameraManager, ImageFormat.JPEG, bound);
2244     }
2245 
getSupportedHeicSizes(String cameraId, CameraManager cameraManager, Size bound)2246     static public List<Size> getSupportedHeicSizes(String cameraId,
2247             CameraManager cameraManager, Size bound) throws CameraAccessException {
2248         return getSortedSizesForFormat(cameraId, cameraManager, ImageFormat.HEIC, bound);
2249     }
2250 
getMinPreviewSize(String cameraId, CameraManager cameraManager)2251     static public Size getMinPreviewSize(String cameraId, CameraManager cameraManager)
2252             throws CameraAccessException {
2253         List<Size> sizes = getSupportedPreviewSizes(cameraId, cameraManager, null);
2254         return sizes.get(sizes.size() - 1);
2255     }
2256 
2257     /**
2258      * Get max supported preview size for a camera device.
2259      */
getMaxPreviewSize(String cameraId, CameraManager cameraManager)2260     static public Size getMaxPreviewSize(String cameraId, CameraManager cameraManager)
2261             throws CameraAccessException {
2262         return getMaxPreviewSize(cameraId, cameraManager, /*bound*/null);
2263     }
2264 
2265     /**
2266      * Get max preview size for a camera device in the supported sizes that are no larger
2267      * than the bound.
2268      */
getMaxPreviewSize(String cameraId, CameraManager cameraManager, Size bound)2269     static public Size getMaxPreviewSize(String cameraId, CameraManager cameraManager, Size bound)
2270             throws CameraAccessException {
2271         List<Size> sizes = getSupportedPreviewSizes(cameraId, cameraManager, bound);
2272         return sizes.get(0);
2273     }
2274 
2275     /**
2276      * Get max depth size for a camera device.
2277      */
getMaxDepthSize(String cameraId, CameraManager cameraManager)2278     static public Size getMaxDepthSize(String cameraId, CameraManager cameraManager)
2279             throws CameraAccessException {
2280         List<Size> sizes = getSortedSizesForFormat(cameraId, cameraManager, ImageFormat.DEPTH16,
2281                 /*bound*/ null);
2282         return sizes.get(0);
2283     }
2284 
2285     /**
2286      * Return the lower size
2287      * @param a first size
2288      *
2289      * @param b second size
2290      *
2291      * @return Size the smaller size
2292      *
2293      * @throws IllegalArgumentException if either param was null.
2294      *
2295      */
getMinSize(Size a, Size b)2296     @NonNull public static Size getMinSize(Size a, Size b) {
2297         if (a == null || b == null) {
2298             throw new IllegalArgumentException("sizes was empty");
2299         }
2300         if (a.getWidth() * a.getHeight() < b.getHeight() * b.getWidth()) {
2301             return a;
2302         }
2303         return b;
2304     }
2305 
2306     /**
2307      * Get the largest size by area.
2308      *
2309      * @param sizes an array of sizes, must have at least 1 element
2310      *
2311      * @return Largest Size
2312      *
2313      * @throws IllegalArgumentException if sizes was null or had 0 elements
2314      */
getMaxSize(Size... sizes)2315     public static Size getMaxSize(Size... sizes) {
2316         return getMaxSize(sizes, -1 /*aspectRatio*/);
2317     }
2318 
2319     /**
2320      * Get the largest size by area, and with given aspect ratio.
2321      *
2322      * @param sizes an array of sizes, must have at least 1 element
2323      * @param aspectRatio the aspect ratio to match. -1 if aspect ratio doesn't need to match.
2324      *
2325      * @return Largest Size. Null if no such size exists matching aspect ratio.
2326      *
2327      * @throws IllegalArgumentException if sizes was null or had 0 elements
2328      */
getMaxSize(Size[] sizes, float aspectRatio)2329     public static Size getMaxSize(Size[] sizes, float aspectRatio) {
2330         if (sizes == null || sizes.length == 0) {
2331             throw new IllegalArgumentException("sizes was empty");
2332         }
2333 
2334         Size sz = null;
2335         for (Size size : sizes) {
2336             float ar = 1.0f * size.getWidth() / size.getHeight();
2337             if (aspectRatio > 0 && Math.abs(ar - aspectRatio) > ASPECT_RATIO_MATCH_THRESHOLD) {
2338                 continue;
2339             }
2340 
2341             if (sz == null
2342                     || size.getWidth() * size.getHeight() > sz.getWidth() * sz.getHeight()) {
2343                 sz = size;
2344             }
2345         }
2346         return sz;
2347     }
2348 
2349     /**
2350      * Get the largest size by area within (less than) bound
2351      *
2352      * @param sizes an array of sizes, must have at least 1 element
2353      *
2354      * @return Largest Size. Null if no such size exists within bound.
2355      *
2356      * @throws IllegalArgumentException if sizes was null or had 0 elements, or bound is invalid.
2357      */
getMaxSizeWithBound(Size[] sizes, int bound)2358     public static Size getMaxSizeWithBound(Size[] sizes, int bound) {
2359         if (sizes == null || sizes.length == 0) {
2360             throw new IllegalArgumentException("sizes was empty");
2361         }
2362         if (bound <= 0) {
2363             throw new IllegalArgumentException("bound is invalid");
2364         }
2365 
2366         Size sz = null;
2367         for (Size size : sizes) {
2368             if (size.getWidth() * size.getHeight() >= bound) {
2369                 continue;
2370             }
2371 
2372             if (sz == null ||
2373                     size.getWidth() * size.getHeight() > sz.getWidth() * sz.getHeight()) {
2374                 sz = size;
2375             }
2376         }
2377 
2378         return sz;
2379     }
2380 
2381     /**
2382      * Get maximum size in list that's equal or smaller to than the bound.
2383      *
2384      * Returns null if no size is smaller than or equal to the bound.
2385      */
getMaxSizeWithBound(Size[] sizes, Size bound)2386     private static Size getMaxSizeWithBound(Size[] sizes, Size bound) {
2387         return getMaxSizeWithBound(sizes, bound, -1 /*aspectRatio*/);
2388     }
2389 
2390     /**
2391      * Get maximum size in list that's equal or smaller to than the bound and matching
2392      * the aspect ratio.
2393      *
2394      * Returns null if no size is smaller than or equal to the bound while matching aspect
2395      * ratio.
2396      */
getMaxSizeWithBound(Size[] sizes, Size bound, float aspectRatio)2397     private static Size getMaxSizeWithBound(Size[] sizes, Size bound, float aspectRatio) {
2398         if (sizes == null || sizes.length == 0) {
2399             throw new IllegalArgumentException("sizes was empty");
2400         }
2401 
2402         Size sz = null;
2403         for (Size size : sizes) {
2404             // If matching aspect ratio is needed, check aspect ratio
2405             float ar = 1.0f * size.getWidth() / size.getHeight();
2406             if (aspectRatio > 0 && Math.abs(ar - aspectRatio) > ASPECT_RATIO_MATCH_THRESHOLD) {
2407                 continue;
2408             }
2409 
2410             if (size.getWidth() <= bound.getWidth() && size.getHeight() <= bound.getHeight()) {
2411 
2412                 if (sz == null) {
2413                     sz = size;
2414                 } else {
2415                     long curArea = sz.getWidth() * (long) sz.getHeight();
2416                     long newArea = size.getWidth() * (long) size.getHeight();
2417                     if (newArea > curArea) {
2418                         sz = size;
2419                     }
2420                 }
2421             }
2422         }
2423 
2424         assertTrue("No size under bound found: " + Arrays.toString(sizes) + " bound " + bound,
2425                 sz != null);
2426 
2427         return sz;
2428     }
2429 
2430     /**
2431      * Returns true if the given {@code array} contains the given element.
2432      *
2433      * @param array {@code array} to check for {@code elem}
2434      * @param elem {@code elem} to test for
2435      * @return {@code true} if the given element is contained
2436      */
contains(int[] array, int elem)2437     public static boolean contains(int[] array, int elem) {
2438         if (array == null) return false;
2439         for (int i = 0; i < array.length; i++) {
2440             if (elem == array[i]) return true;
2441         }
2442         return false;
2443     }
2444 
contains(long[] array, long elem)2445     public static boolean contains(long[] array, long elem) {
2446         if (array == null) return false;
2447         for (int i = 0; i < array.length; i++) {
2448             if (elem == array[i]) return true;
2449         }
2450         return false;
2451     }
2452 
2453     /**
2454      * Get object array from byte array.
2455      *
2456      * @param array Input byte array to be converted
2457      * @return Byte object array converted from input byte array
2458      */
toObject(byte[] array)2459     public static Byte[] toObject(byte[] array) {
2460         return convertPrimitiveArrayToObjectArray(array, Byte.class);
2461     }
2462 
2463     /**
2464      * Get object array from int array.
2465      *
2466      * @param array Input int array to be converted
2467      * @return Integer object array converted from input int array
2468      */
toObject(int[] array)2469     public static Integer[] toObject(int[] array) {
2470         return convertPrimitiveArrayToObjectArray(array, Integer.class);
2471     }
2472 
2473     /**
2474      * Get object array from float array.
2475      *
2476      * @param array Input float array to be converted
2477      * @return Float object array converted from input float array
2478      */
toObject(float[] array)2479     public static Float[] toObject(float[] array) {
2480         return convertPrimitiveArrayToObjectArray(array, Float.class);
2481     }
2482 
2483     /**
2484      * Get object array from double array.
2485      *
2486      * @param array Input double array to be converted
2487      * @return Double object array converted from input double array
2488      */
toObject(double[] array)2489     public static Double[] toObject(double[] array) {
2490         return convertPrimitiveArrayToObjectArray(array, Double.class);
2491     }
2492 
2493     /**
2494      * Convert a primitive input array into its object array version (e.g. from int[] to Integer[]).
2495      *
2496      * @param array Input array object
2497      * @param wrapperClass The boxed class it converts to
2498      * @return Boxed version of primitive array
2499      */
convertPrimitiveArrayToObjectArray(final Object array, final Class<T> wrapperClass)2500     private static <T> T[] convertPrimitiveArrayToObjectArray(final Object array,
2501             final Class<T> wrapperClass) {
2502         // getLength does the null check and isArray check already.
2503         int arrayLength = Array.getLength(array);
2504         if (arrayLength == 0) {
2505             throw new IllegalArgumentException("Input array shouldn't be empty");
2506         }
2507 
2508         @SuppressWarnings("unchecked")
2509         final T[] result = (T[]) Array.newInstance(wrapperClass, arrayLength);
2510         for (int i = 0; i < arrayLength; i++) {
2511             Array.set(result, i, Array.get(array, i));
2512         }
2513         return result;
2514     }
2515 
2516     /**
2517      * Update one 3A region in capture request builder if that region is supported. Do nothing
2518      * if the specified 3A region is not supported by camera device.
2519      * @param requestBuilder The request to be updated
2520      * @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2)
2521      * @param regions The 3A regions to be set
2522      * @param staticInfo static metadata characteristics
2523      */
update3aRegion( CaptureRequest.Builder requestBuilder, int algoIdx, MeteringRectangle[] regions, StaticMetadata staticInfo)2524     public static void update3aRegion(
2525             CaptureRequest.Builder requestBuilder, int algoIdx, MeteringRectangle[] regions,
2526             StaticMetadata staticInfo)
2527     {
2528         int maxRegions;
2529         CaptureRequest.Key<MeteringRectangle[]> key;
2530 
2531         if (regions == null || regions.length == 0 || staticInfo == null) {
2532             throw new IllegalArgumentException("Invalid input 3A region!");
2533         }
2534 
2535         switch (algoIdx) {
2536             case INDEX_ALGORITHM_AE:
2537                 maxRegions = staticInfo.getAeMaxRegionsChecked();
2538                 key = CaptureRequest.CONTROL_AE_REGIONS;
2539                 break;
2540             case INDEX_ALGORITHM_AWB:
2541                 maxRegions = staticInfo.getAwbMaxRegionsChecked();
2542                 key = CaptureRequest.CONTROL_AWB_REGIONS;
2543                 break;
2544             case INDEX_ALGORITHM_AF:
2545                 maxRegions = staticInfo.getAfMaxRegionsChecked();
2546                 key = CaptureRequest.CONTROL_AF_REGIONS;
2547                 break;
2548             default:
2549                 throw new IllegalArgumentException("Unknown 3A Algorithm!");
2550         }
2551 
2552         if (maxRegions >= regions.length) {
2553             requestBuilder.set(key, regions);
2554         }
2555     }
2556 
2557     /**
2558      * Validate one 3A region in capture result equals to expected region if that region is
2559      * supported. Do nothing if the specified 3A region is not supported by camera device.
2560      * @param result The capture result to be validated
2561      * @param partialResults The partial results to be validated
2562      * @param algoIdx The index to the algorithm. (AE: 0, AWB: 1, AF: 2)
2563      * @param expectRegions The 3A regions expected in capture result
2564      * @param scaleByZoomRatio whether to scale the error threshold by zoom ratio
2565      * @param staticInfo static metadata characteristics
2566      */
validate3aRegion( CaptureResult result, List<CaptureResult> partialResults, int algoIdx, MeteringRectangle[] expectRegions, boolean scaleByZoomRatio, StaticMetadata staticInfo)2567     public static void validate3aRegion(
2568             CaptureResult result, List<CaptureResult> partialResults, int algoIdx,
2569             MeteringRectangle[] expectRegions, boolean scaleByZoomRatio, StaticMetadata staticInfo)
2570     {
2571         // There are multiple cases where result 3A region could be slightly different than the
2572         // request:
2573         // 1. Distortion correction,
2574         // 2. Adding smaller 3a region in the test exposes existing devices' offset is larger
2575         //    than 1.
2576         // 3. Precision loss due to converting to HAL zoom ratio and back
2577         // 4. Error magnification due to active array scale-up when zoom ratio API is used.
2578         //
2579         // To handle all these scenarios, make the threshold larger, and scale the threshold based
2580         // on zoom ratio. The scaling factor should be relatively tight, and shouldn't be smaller
2581         // than 1x.
2582         final int maxCoordOffset = 5;
2583         int maxRegions;
2584         CaptureResult.Key<MeteringRectangle[]> key;
2585         MeteringRectangle[] actualRegion;
2586 
2587         switch (algoIdx) {
2588             case INDEX_ALGORITHM_AE:
2589                 maxRegions = staticInfo.getAeMaxRegionsChecked();
2590                 key = CaptureResult.CONTROL_AE_REGIONS;
2591                 break;
2592             case INDEX_ALGORITHM_AWB:
2593                 maxRegions = staticInfo.getAwbMaxRegionsChecked();
2594                 key = CaptureResult.CONTROL_AWB_REGIONS;
2595                 break;
2596             case INDEX_ALGORITHM_AF:
2597                 maxRegions = staticInfo.getAfMaxRegionsChecked();
2598                 key = CaptureResult.CONTROL_AF_REGIONS;
2599                 break;
2600             default:
2601                 throw new IllegalArgumentException("Unknown 3A Algorithm!");
2602         }
2603 
2604         int maxDist = maxCoordOffset;
2605         if (scaleByZoomRatio) {
2606             Float zoomRatio = result.get(CaptureResult.CONTROL_ZOOM_RATIO);
2607             for (CaptureResult partialResult : partialResults) {
2608                 Float zoomRatioInPartial = partialResult.get(CaptureResult.CONTROL_ZOOM_RATIO);
2609                 if (zoomRatioInPartial != null) {
2610                     assertEquals("CONTROL_ZOOM_RATIO in partial result must match"
2611                             + " that in final result", zoomRatio, zoomRatioInPartial);
2612                 }
2613             }
2614             maxDist = (int)Math.ceil(maxDist * Math.max(zoomRatio / 2, 1.0f));
2615         }
2616 
2617         if (maxRegions > 0)
2618         {
2619             actualRegion = getValueNotNull(result, key);
2620             for (CaptureResult partialResult : partialResults) {
2621                 MeteringRectangle[] actualRegionInPartial = partialResult.get(key);
2622                 if (actualRegionInPartial != null) {
2623                     assertEquals("Key " + key.getName() + " in partial result must match"
2624                             + " that in final result", actualRegionInPartial, actualRegion);
2625                 }
2626             }
2627 
2628             for (int i = 0; i < actualRegion.length; i++) {
2629                 // If the expected region's metering weight is 0, allow the camera device
2630                 // to override it.
2631                 if (expectRegions[i].getMeteringWeight() == 0) {
2632                     continue;
2633                 }
2634 
2635                 Rect a = actualRegion[i].getRect();
2636                 Rect e = expectRegions[i].getRect();
2637 
2638                 if (VERBOSE) {
2639                     Log.v(TAG, "Actual region " + actualRegion[i].toString() +
2640                             ", expected region " + expectRegions[i].toString() +
2641                             ", maxDist " + maxDist);
2642                 }
2643                 assertTrue(
2644                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
2645                     " are not close enough to the actual one: " + Arrays.toString(actualRegion),
2646                     maxDist >= Math.abs(a.left - e.left));
2647 
2648                 assertTrue(
2649                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
2650                     " are not close enough to the actual one: " + Arrays.toString(actualRegion),
2651                     maxDist >= Math.abs(a.right - e.right));
2652 
2653                 assertTrue(
2654                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
2655                     " are not close enough to the actual one: " + Arrays.toString(actualRegion),
2656                     maxDist >= Math.abs(a.top - e.top));
2657                 assertTrue(
2658                     "Expected 3A regions: " + Arrays.toString(expectRegions) +
2659                     " are not close enough to the actual one: " + Arrays.toString(actualRegion),
2660                     maxDist >= Math.abs(a.bottom - e.bottom));
2661             }
2662         }
2663     }
2664 
validateImage(Image image, int width, int height, int format, String filePath)2665     public static void validateImage(Image image, int width, int height, int format,
2666             String filePath) {
2667         validateImage(image, width, height, format, filePath, /*colorSpace*/ null);
2668     }
2669 
2670 
2671     /**
2672      * Validate image based on format and size.
2673      *
2674      * @param image The image to be validated.
2675      * @param width The image width.
2676      * @param height The image height.
2677      * @param format The image format.
2678      * @param filePath The debug dump file path, null if don't want to dump to
2679      *            file.
2680      * @param colorSpace The expected color space of the image, if desired (null otherwise).
2681      * @throws UnsupportedOperationException if calling with an unknown format
2682      */
validateImage(Image image, int width, int height, int format, String filePath, ColorSpace colorSpace)2683     public static void validateImage(Image image, int width, int height, int format,
2684             String filePath, ColorSpace colorSpace) {
2685         checkImage(image, width, height, format, colorSpace);
2686 
2687         if (format == ImageFormat.PRIVATE) {
2688             return;
2689         }
2690 
2691         /**
2692          * TODO: validate timestamp:
2693          * 1. capture result timestamp against the image timestamp (need
2694          * consider frame drops)
2695          * 2. timestamps should be monotonically increasing for different requests
2696          */
2697         if(VERBOSE) Log.v(TAG, "validating Image");
2698         byte[] data = getDataFromImage(image);
2699         assertTrue("Invalid image data", data != null && data.length > 0);
2700 
2701         switch (format) {
2702             // Clients must be able to process and handle depth jpeg images like any other
2703             // regular jpeg.
2704             case ImageFormat.DEPTH_JPEG:
2705             case ImageFormat.JPEG:
2706                 validateJpegData(data, width, height, filePath, colorSpace);
2707                 break;
2708             case ImageFormat.JPEG_R:
2709                 validateJpegData(data, width, height, filePath, null /*colorSpace*/,
2710                         true /*gainMapPresent*/);
2711                 break;
2712             case ImageFormat.YCBCR_P010:
2713                 validateP010Data(data, width, height, format, image.getTimestamp(), filePath);
2714                 break;
2715             case ImageFormat.YCBCR_P210:
2716                 validateP210Data(data, width, height, format, image.getTimestamp(), filePath);
2717                 break;
2718             case ImageFormat.YUV_420_888:
2719             case ImageFormat.YV12:
2720                 validateYuvData(data, width, height, format, image.getTimestamp(), filePath);
2721                 break;
2722             case ImageFormat.RAW_SENSOR:
2723                 validateRaw16Data(data, width, height, format, image.getTimestamp(), filePath);
2724                 break;
2725             case ImageFormat.DEPTH16:
2726                 validateDepth16Data(data, width, height, format, image.getTimestamp(), filePath);
2727                 break;
2728             case ImageFormat.DEPTH_POINT_CLOUD:
2729                 validateDepthPointCloudData(data, width, height, format, image.getTimestamp(), filePath);
2730                 break;
2731             case ImageFormat.RAW_PRIVATE:
2732                 validateRawPrivateData(data, width, height, image.getTimestamp(), filePath);
2733                 break;
2734             case ImageFormat.Y8:
2735                 validateY8Data(data, width, height, format, image.getTimestamp(), filePath);
2736                 break;
2737             case ImageFormat.HEIC:
2738                 validateHeicData(data, width, height, filePath, false /*gainmapPresent*/);
2739                 break;
2740             case ImageFormat.HEIC_ULTRAHDR:
2741                 validateHeicData(data, width, height, filePath, true /*gainmapPresent*/);
2742                 break;
2743             default:
2744                 throw new UnsupportedOperationException("Unsupported format for validation: "
2745                         + format);
2746         }
2747     }
2748 
2749     public static class HandlerExecutor implements Executor {
2750         private final Handler mHandler;
2751 
HandlerExecutor(Handler handler)2752         public HandlerExecutor(Handler handler) {
2753             assertNotNull("handler must be valid", handler);
2754             mHandler = handler;
2755         }
2756 
2757         @Override
execute(Runnable runCmd)2758         public void execute(Runnable runCmd) {
2759             mHandler.post(runCmd);
2760         }
2761     }
2762 
2763     /**
2764      * Provide a mock for {@link CameraDevice.StateCallback}.
2765      *
2766      * <p>Only useful because mockito can't mock {@link CameraDevice.StateCallback} which is an
2767      * abstract class.</p>
2768      *
2769      * <p>
2770      * Use this instead of other classes when needing to verify interactions, since
2771      * trying to spy on {@link BlockingStateCallback} (or others) will cause unnecessary extra
2772      * interactions which will cause false test failures.
2773      * </p>
2774      *
2775      */
2776     public static class MockStateCallback extends CameraDevice.StateCallback {
2777 
2778         @Override
onOpened(CameraDevice camera)2779         public void onOpened(CameraDevice camera) {
2780         }
2781 
2782         @Override
onDisconnected(CameraDevice camera)2783         public void onDisconnected(CameraDevice camera) {
2784         }
2785 
2786         @Override
onError(CameraDevice camera, int error)2787         public void onError(CameraDevice camera, int error) {
2788         }
2789 
MockStateCallback()2790         private MockStateCallback() {}
2791 
2792         /**
2793          * Create a Mockito-ready mocked StateCallback.
2794          */
mock()2795         public static MockStateCallback mock() {
2796             return Mockito.spy(new MockStateCallback());
2797         }
2798     }
2799 
validateJpegData(byte[] jpegData, int width, int height, String filePath)2800     public static void validateJpegData(byte[] jpegData, int width, int height, String filePath) {
2801         validateJpegData(jpegData, width, height, filePath, /*colorSpace*/ null);
2802     }
2803 
validateJpegData(byte[] jpegData, int width, int height, String filePath, ColorSpace colorSpace)2804     public static void validateJpegData(byte[] jpegData, int width, int height, String filePath,
2805             ColorSpace colorSpace) {
2806         validateJpegData(jpegData, width, height, filePath, colorSpace, false /*gainMapPresent*/);
2807     }
2808 
validateJpegData(byte[] jpegData, int width, int height, String filePath, ColorSpace colorSpace, boolean gainMapPresent)2809     public static void validateJpegData(byte[] jpegData, int width, int height, String filePath,
2810             ColorSpace colorSpace, boolean gainMapPresent) {
2811         BitmapFactory.Options bmpOptions = new BitmapFactory.Options();
2812         // DecodeBound mode: only parse the frame header to get width/height.
2813         // it doesn't decode the pixel.
2814         bmpOptions.inJustDecodeBounds = true;
2815         BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length, bmpOptions);
2816         assertEquals(width, bmpOptions.outWidth);
2817         assertEquals(height, bmpOptions.outHeight);
2818 
2819         // Pixel decoding mode: decode whole image. check if the image data
2820         // is decodable here.
2821         Bitmap bitmapImage = BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length);
2822         assertNotNull("Decoding jpeg failed", bitmapImage);
2823         if (colorSpace != null) {
2824             ColorSpace bitmapColorSpace = bitmapImage.getColorSpace();
2825             boolean matchingColorSpace = colorSpace.equals(bitmapColorSpace);
2826             if (!matchingColorSpace) {
2827                 Log.e(TAG, "Expected color space:\n\t" + colorSpace);
2828                 Log.e(TAG, "Bitmap color space:\n\t" + bitmapColorSpace);
2829             }
2830             assertTrue("Color space mismatch in decoded jpeg!", matchingColorSpace);
2831         }
2832         if (gainMapPresent) {
2833             Gainmap gainMap = bitmapImage.getGainmap();
2834             assertNotNull(gainMap);
2835             assertNotNull(gainMap.getGainmapContents());
2836         }
2837         if (DEBUG && filePath != null) {
2838             String fileName =
2839                     filePath + "/" + width + "x" + height + ".jpeg";
2840             dumpFile(fileName, jpegData);
2841         }
2842     }
2843 
validateYuvData(byte[] yuvData, int width, int height, int format, long ts, String filePath)2844     private static void validateYuvData(byte[] yuvData, int width, int height, int format,
2845             long ts, String filePath) {
2846         checkYuvFormat(format);
2847         if (VERBOSE) Log.v(TAG, "Validating YUV data");
2848         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
2849         assertEquals("Yuv data doesn't match", expectedSize, yuvData.length);
2850 
2851         // TODO: Can add data validation for test pattern.
2852 
2853         if (DEBUG && filePath != null) {
2854             String fileName =
2855                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".yuv";
2856             dumpFile(fileName, yuvData);
2857         }
2858     }
2859 
validateP010Data(byte[] p010Data, int width, int height, int format, long ts, String filePath)2860     private static void validateP010Data(byte[] p010Data, int width, int height, int format,
2861             long ts, String filePath) {
2862         if (VERBOSE) Log.v(TAG, "Validating P010 data");
2863         int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8;
2864         int expectedSize = width * height * bytesPerPixel;
2865         assertEquals("P010 data doesn't match", expectedSize, p010Data.length);
2866 
2867         if (DEBUG && filePath != null) {
2868             String fileName =
2869                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".p010";
2870             dumpFile(fileName, p010Data);
2871         }
2872     }
2873 
validateP210Data(byte[] p210Data, int width, int height, int format, long ts, String filePath)2874     private static void validateP210Data(byte[] p210Data, int width, int height, int format,
2875             long ts, String filePath) {
2876         if (VERBOSE) Log.v(TAG, "Validating P210 data");
2877         int bytesPerPixel = ImageFormat.getBitsPerPixel(format) / 8;
2878         int expectedSize = width * height * bytesPerPixel;
2879         assertEquals("P210 data doesn't match", expectedSize, p210Data.length);
2880 
2881         if (DEBUG && filePath != null) {
2882             String fileName =
2883                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".p210";
2884             dumpFile(fileName, p210Data);
2885         }
2886     }
2887 
validateRaw16Data(byte[] rawData, int width, int height, int format, long ts, String filePath)2888     private static void validateRaw16Data(byte[] rawData, int width, int height, int format,
2889             long ts, String filePath) {
2890         if (VERBOSE) Log.v(TAG, "Validating raw data");
2891         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
2892         assertEquals("Raw data doesn't match", expectedSize, rawData.length);
2893 
2894         // TODO: Can add data validation for test pattern.
2895 
2896         if (DEBUG && filePath != null) {
2897             String fileName =
2898                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".raw16";
2899             dumpFile(fileName, rawData);
2900         }
2901 
2902         return;
2903     }
2904 
validateY8Data(byte[] rawData, int width, int height, int format, long ts, String filePath)2905     private static void validateY8Data(byte[] rawData, int width, int height, int format,
2906             long ts, String filePath) {
2907         if (VERBOSE) Log.v(TAG, "Validating Y8 data");
2908         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
2909         assertEquals("Y8 data doesn't match", expectedSize, rawData.length);
2910 
2911         // TODO: Can add data validation for test pattern.
2912 
2913         if (DEBUG && filePath != null) {
2914             String fileName =
2915                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".y8";
2916             dumpFile(fileName, rawData);
2917         }
2918 
2919         return;
2920     }
2921 
validateRawPrivateData(byte[] rawData, int width, int height, long ts, String filePath)2922     private static void validateRawPrivateData(byte[] rawData, int width, int height,
2923             long ts, String filePath) {
2924         if (VERBOSE) Log.v(TAG, "Validating private raw data");
2925         // Expect each RAW pixel should occupy at least one byte and no more than 30 bytes
2926         int expectedSizeMin = width * height;
2927         int expectedSizeMax = width * height * 30;
2928 
2929         assertTrue("Opaque RAW size " + rawData.length + "out of normal bound [" +
2930                 expectedSizeMin + "," + expectedSizeMax + "]",
2931                 expectedSizeMin <= rawData.length && rawData.length <= expectedSizeMax);
2932 
2933         if (DEBUG && filePath != null) {
2934             String fileName =
2935                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".rawPriv";
2936             dumpFile(fileName, rawData);
2937         }
2938 
2939         return;
2940     }
2941 
validateDepth16Data(byte[] depthData, int width, int height, int format, long ts, String filePath)2942     private static void validateDepth16Data(byte[] depthData, int width, int height, int format,
2943             long ts, String filePath) {
2944 
2945         if (VERBOSE) Log.v(TAG, "Validating depth16 data");
2946         int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
2947         assertEquals("Depth data doesn't match", expectedSize, depthData.length);
2948 
2949 
2950         if (DEBUG && filePath != null) {
2951             String fileName =
2952                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".depth16";
2953             dumpFile(fileName, depthData);
2954         }
2955 
2956         return;
2957 
2958     }
2959 
validateDepthPointCloudData(byte[] depthData, int width, int height, int format, long ts, String filePath)2960     private static void validateDepthPointCloudData(byte[] depthData, int width, int height, int format,
2961             long ts, String filePath) {
2962 
2963         if (VERBOSE) Log.v(TAG, "Validating depth point cloud data");
2964 
2965         // Can't validate size since it is variable
2966 
2967         if (DEBUG && filePath != null) {
2968             String fileName =
2969                     filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".depth_point_cloud";
2970             dumpFile(fileName, depthData);
2971         }
2972 
2973         return;
2974 
2975     }
2976 
validateHeicData(byte[] heicData, int width, int height, String filePath, boolean gainMapPresent)2977     private static void validateHeicData(byte[] heicData, int width, int height, String filePath,
2978             boolean gainMapPresent) {
2979         BitmapFactory.Options bmpOptions = new BitmapFactory.Options();
2980         // DecodeBound mode: only parse the frame header to get width/height.
2981         // it doesn't decode the pixel.
2982         bmpOptions.inJustDecodeBounds = true;
2983         BitmapFactory.decodeByteArray(heicData, 0, heicData.length, bmpOptions);
2984         assertEquals(width, bmpOptions.outWidth);
2985         assertEquals(height, bmpOptions.outHeight);
2986 
2987         // Pixel decoding mode: decode whole image. check if the image data
2988         // is decodable here.
2989         Bitmap bitmapImage = BitmapFactory.decodeByteArray(heicData, 0, heicData.length);
2990         assertNotNull("Decoding heic failed", bitmapImage);
2991 
2992         if (DEBUG && filePath != null) {
2993             String fileName =
2994                     filePath + "/" + width + "x" + height + ".heic";
2995             dumpFile(fileName, heicData);
2996         }
2997 
2998         if (gainMapPresent) {
2999             Gainmap gainMap = bitmapImage.getGainmap();
3000             assertNotNull(gainMap);
3001             assertNotNull(gainMap.getGainmapContents());
3002         }
3003     }
3004 
getValueNotNull(CaptureResult result, CaptureResult.Key<T> key)3005     public static <T> T getValueNotNull(CaptureResult result, CaptureResult.Key<T> key) {
3006         if (result == null) {
3007             throw new IllegalArgumentException("Result must not be null");
3008         }
3009 
3010         T value = result.get(key);
3011         assertNotNull("Value of Key " + key.getName() + "shouldn't be null", value);
3012         return value;
3013     }
3014 
getValueNotNull(CameraCharacteristics characteristics, CameraCharacteristics.Key<T> key)3015     public static <T> T getValueNotNull(CameraCharacteristics characteristics,
3016             CameraCharacteristics.Key<T> key) {
3017         if (characteristics == null) {
3018             throw new IllegalArgumentException("Camera characteristics must not be null");
3019         }
3020 
3021         T value = characteristics.get(key);
3022         assertNotNull("Value of Key " + key.getName() + "shouldn't be null", value);
3023         return value;
3024     }
3025 
3026     /**
3027      * Get a crop region for a given zoom factor and center position.
3028      * <p>
3029      * The center position is normalized position in range of [0, 1.0], where
3030      * (0, 0) represents top left corner, (1.0. 1.0) represents bottom right
3031      * corner. The center position could limit the effective minimal zoom
3032      * factor, for example, if the center position is (0.75, 0.75), the
3033      * effective minimal zoom position becomes 2.0. If the requested zoom factor
3034      * is smaller than 2.0, a crop region with 2.0 zoom factor will be returned.
3035      * </p>
3036      * <p>
3037      * The aspect ratio of the crop region is maintained the same as the aspect
3038      * ratio of active array.
3039      * </p>
3040      *
3041      * @param zoomFactor The zoom factor to generate the crop region, it must be
3042      *            >= 1.0
3043      * @param center The normalized zoom center point that is in the range of [0, 1].
3044      * @param maxZoom The max zoom factor supported by this device.
3045      * @param activeArray The active array size of this device.
3046      * @return crop region for the given normalized center and zoom factor.
3047      */
getCropRegionForZoom(float zoomFactor, final PointF center, final float maxZoom, final Rect activeArray)3048     public static Rect getCropRegionForZoom(float zoomFactor, final PointF center,
3049             final float maxZoom, final Rect activeArray) {
3050         if (zoomFactor < 1.0) {
3051             throw new IllegalArgumentException("zoom factor " + zoomFactor + " should be >= 1.0");
3052         }
3053         if (center.x > 1.0 || center.x < 0) {
3054             throw new IllegalArgumentException("center.x " + center.x
3055                     + " should be in range of [0, 1.0]");
3056         }
3057         if (center.y > 1.0 || center.y < 0) {
3058             throw new IllegalArgumentException("center.y " + center.y
3059                     + " should be in range of [0, 1.0]");
3060         }
3061         if (maxZoom < 1.0) {
3062             throw new IllegalArgumentException("max zoom factor " + maxZoom + " should be >= 1.0");
3063         }
3064         if (activeArray == null) {
3065             throw new IllegalArgumentException("activeArray must not be null");
3066         }
3067 
3068         float minCenterLength = Math.min(Math.min(center.x, 1.0f - center.x),
3069                 Math.min(center.y, 1.0f - center.y));
3070         float minEffectiveZoom =  0.5f / minCenterLength;
3071         if (minEffectiveZoom > maxZoom) {
3072             throw new IllegalArgumentException("Requested center " + center.toString() +
3073                     " has minimal zoomable factor " + minEffectiveZoom + ", which exceeds max"
3074                             + " zoom factor " + maxZoom);
3075         }
3076 
3077         if (zoomFactor < minEffectiveZoom) {
3078             Log.w(TAG, "Requested zoomFactor " + zoomFactor + " < minimal zoomable factor "
3079                     + minEffectiveZoom + ". It will be overwritten by " + minEffectiveZoom);
3080             zoomFactor = minEffectiveZoom;
3081         }
3082 
3083         int cropCenterX = (int)(activeArray.width() * center.x);
3084         int cropCenterY = (int)(activeArray.height() * center.y);
3085         int cropWidth = (int) (activeArray.width() / zoomFactor);
3086         int cropHeight = (int) (activeArray.height() / zoomFactor);
3087 
3088         return new Rect(
3089                 /*left*/cropCenterX - cropWidth / 2,
3090                 /*top*/cropCenterY - cropHeight / 2,
3091                 /*right*/ cropCenterX + cropWidth / 2,
3092                 /*bottom*/cropCenterY + cropHeight / 2);
3093     }
3094 
3095     /**
3096      * Get AeAvailableTargetFpsRanges and sort them in descending order by max fps
3097      *
3098      * @param staticInfo camera static metadata
3099      * @return AeAvailableTargetFpsRanges in descending order by max fps
3100      */
getDescendingTargetFpsRanges(StaticMetadata staticInfo)3101     public static Range<Integer>[] getDescendingTargetFpsRanges(StaticMetadata staticInfo) {
3102         Range<Integer>[] fpsRanges = staticInfo.getAeAvailableTargetFpsRangesChecked();
3103         Arrays.sort(fpsRanges, new Comparator<Range<Integer>>() {
3104             public int compare(Range<Integer> r1, Range<Integer> r2) {
3105                 return r2.getUpper() - r1.getUpper();
3106             }
3107         });
3108         return fpsRanges;
3109     }
3110 
3111     /**
3112      * Get AeAvailableTargetFpsRanges with max fps not exceeding 30
3113      *
3114      * @param staticInfo camera static metadata
3115      * @return AeAvailableTargetFpsRanges with max fps not exceeding 30
3116      */
getTargetFpsRangesUpTo30(StaticMetadata staticInfo)3117     public static List<Range<Integer>> getTargetFpsRangesUpTo30(StaticMetadata staticInfo) {
3118         Range<Integer>[] fpsRanges = staticInfo.getAeAvailableTargetFpsRangesChecked();
3119         ArrayList<Range<Integer>> fpsRangesUpTo30 = new ArrayList<Range<Integer>>();
3120         for (Range<Integer> fpsRange : fpsRanges) {
3121             if (fpsRange.getUpper() <= 30) {
3122                 fpsRangesUpTo30.add(fpsRange);
3123             }
3124         }
3125         return fpsRangesUpTo30;
3126     }
3127 
3128     /**
3129      * Get AeAvailableTargetFpsRanges with max fps greater than 30
3130      *
3131      * @param staticInfo camera static metadata
3132      * @return AeAvailableTargetFpsRanges with max fps greater than 30
3133      */
getTargetFpsRangesGreaterThan30(StaticMetadata staticInfo)3134     public static List<Range<Integer>> getTargetFpsRangesGreaterThan30(StaticMetadata staticInfo) {
3135         Range<Integer>[] fpsRanges = staticInfo.getAeAvailableTargetFpsRangesChecked();
3136         ArrayList<Range<Integer>> fpsRangesGreaterThan30 = new ArrayList<Range<Integer>>();
3137         for (Range<Integer> fpsRange : fpsRanges) {
3138             if (fpsRange.getUpper() > 30) {
3139                 fpsRangesGreaterThan30.add(fpsRange);
3140             }
3141         }
3142         return fpsRangesGreaterThan30;
3143     }
3144 
3145     /**
3146      * Calculate output 3A region from the intersection of input 3A region and cropped region.
3147      *
3148      * @param requestRegions The input 3A regions
3149      * @param cropRect The cropped region
3150      * @return expected 3A regions output in capture result
3151      */
getExpectedOutputRegion( MeteringRectangle[] requestRegions, Rect cropRect)3152     public static MeteringRectangle[] getExpectedOutputRegion(
3153             MeteringRectangle[] requestRegions, Rect cropRect){
3154         MeteringRectangle[] resultRegions = new MeteringRectangle[requestRegions.length];
3155         for (int i = 0; i < requestRegions.length; i++) {
3156             Rect requestRect = requestRegions[i].getRect();
3157             Rect resultRect = new Rect();
3158             boolean intersect = resultRect.setIntersect(requestRect, cropRect);
3159             resultRegions[i] = new MeteringRectangle(
3160                     resultRect,
3161                     intersect ? requestRegions[i].getMeteringWeight() : 0);
3162         }
3163         return resultRegions;
3164     }
3165 
3166     /**
3167      * Copy source image data to destination image.
3168      *
3169      * @param src The source image to be copied from.
3170      * @param dst The destination image to be copied to.
3171      * @throws IllegalArgumentException If the source and destination images have
3172      *             different format, size, or one of the images is not copyable.
3173      */
imageCopy(Image src, Image dst)3174     public static void imageCopy(Image src, Image dst) {
3175         if (src == null || dst == null) {
3176             throw new IllegalArgumentException("Images should be non-null");
3177         }
3178         if (src.getFormat() != dst.getFormat()) {
3179             throw new IllegalArgumentException("Src and dst images should have the same format");
3180         }
3181         if (src.getFormat() == ImageFormat.PRIVATE ||
3182                 dst.getFormat() == ImageFormat.PRIVATE) {
3183             throw new IllegalArgumentException("PRIVATE format images are not copyable");
3184         }
3185 
3186         Size srcSize = new Size(src.getWidth(), src.getHeight());
3187         Size dstSize = new Size(dst.getWidth(), dst.getHeight());
3188         if (!srcSize.equals(dstSize)) {
3189             throw new IllegalArgumentException("source image size " + srcSize + " is different"
3190                     + " with " + "destination image size " + dstSize);
3191         }
3192 
3193         // TODO: check the owner of the dst image, it must be from ImageWriter, other source may
3194         // not be writable. Maybe we should add an isWritable() method in image class.
3195 
3196         Plane[] srcPlanes = src.getPlanes();
3197         Plane[] dstPlanes = dst.getPlanes();
3198         ByteBuffer srcBuffer = null;
3199         ByteBuffer dstBuffer = null;
3200         for (int i = 0; i < srcPlanes.length; i++) {
3201             srcBuffer = srcPlanes[i].getBuffer();
3202             dstBuffer = dstPlanes[i].getBuffer();
3203             int srcPos = srcBuffer.position();
3204             srcBuffer.rewind();
3205             dstBuffer.rewind();
3206             int srcRowStride = srcPlanes[i].getRowStride();
3207             int dstRowStride = dstPlanes[i].getRowStride();
3208             int srcPixStride = srcPlanes[i].getPixelStride();
3209             int dstPixStride = dstPlanes[i].getPixelStride();
3210 
3211             if (srcPixStride > 2 || dstPixStride > 2) {
3212                 throw new IllegalArgumentException("source pixel stride " + srcPixStride +
3213                         " with destination pixel stride " + dstPixStride +
3214                         " is not supported");
3215             }
3216 
3217             if (srcRowStride == dstRowStride && srcPixStride == dstPixStride &&
3218                     srcPixStride == 1) {
3219                 // Fast path, just copy the content in the byteBuffer all together.
3220                 dstBuffer.put(srcBuffer);
3221             } else {
3222                 Size effectivePlaneSize = getEffectivePlaneSizeForImage(src, i);
3223                 int srcRowByteCount = srcRowStride;
3224                 int dstRowByteCount = dstRowStride;
3225                 byte[] srcDataRow = new byte[Math.max(srcRowStride, dstRowStride)];
3226 
3227                 if (srcPixStride == dstPixStride && srcPixStride == 1) {
3228                     // Row by row copy case
3229                     for (int row = 0; row < effectivePlaneSize.getHeight(); row++) {
3230                         if (row == effectivePlaneSize.getHeight() - 1) {
3231                             // Special case for interleaved planes: need handle the last row
3232                             // carefully to avoid memory corruption. Check if we have enough bytes
3233                             // to copy.
3234                             srcRowByteCount = Math.min(srcRowByteCount, srcBuffer.remaining());
3235                             dstRowByteCount = Math.min(dstRowByteCount, dstBuffer.remaining());
3236                         }
3237                         srcBuffer.get(srcDataRow, /*offset*/0, srcRowByteCount);
3238                         dstBuffer.put(srcDataRow, /*offset*/0, dstRowByteCount);
3239                     }
3240                 } else {
3241                     // Row by row per pixel copy case
3242                     byte[] dstDataRow = new byte[dstRowByteCount];
3243                     for (int row = 0; row < effectivePlaneSize.getHeight(); row++) {
3244                         if (row == effectivePlaneSize.getHeight() - 1) {
3245                             // Special case for interleaved planes: need handle the last row
3246                             // carefully to avoid memory corruption. Check if we have enough bytes
3247                             // to copy.
3248                             int remainingBytes = srcBuffer.remaining();
3249                             if (srcRowByteCount > remainingBytes) {
3250                                 srcRowByteCount = remainingBytes;
3251                             }
3252                             remainingBytes = dstBuffer.remaining();
3253                             if (dstRowByteCount > remainingBytes) {
3254                                 dstRowByteCount = remainingBytes;
3255                             }
3256                         }
3257                         srcBuffer.get(srcDataRow, /*offset*/0, srcRowByteCount);
3258                         int pos = dstBuffer.position();
3259                         dstBuffer.get(dstDataRow, /*offset*/0, dstRowByteCount);
3260                         dstBuffer.position(pos);
3261                         for (int x = 0; x < effectivePlaneSize.getWidth(); x++) {
3262                             dstDataRow[x * dstPixStride] = srcDataRow[x * srcPixStride];
3263                         }
3264                         dstBuffer.put(dstDataRow, /*offset*/0, dstRowByteCount);
3265                     }
3266                 }
3267             }
3268             srcBuffer.position(srcPos);
3269             dstBuffer.rewind();
3270         }
3271     }
3272 
getEffectivePlaneSizeForImage(Image image, int planeIdx)3273     private static Size getEffectivePlaneSizeForImage(Image image, int planeIdx) {
3274         switch (image.getFormat()) {
3275             case ImageFormat.YUV_420_888:
3276                 if (planeIdx == 0) {
3277                     return new Size(image.getWidth(), image.getHeight());
3278                 } else {
3279                     return new Size(image.getWidth() / 2, image.getHeight() / 2);
3280                 }
3281             case ImageFormat.JPEG:
3282             case ImageFormat.RAW_SENSOR:
3283             case ImageFormat.RAW10:
3284             case ImageFormat.RAW12:
3285             case ImageFormat.DEPTH16:
3286                 return new Size(image.getWidth(), image.getHeight());
3287             case ImageFormat.PRIVATE:
3288                 return new Size(0, 0);
3289             default:
3290                 throw new UnsupportedOperationException(
3291                         String.format("Invalid image format %d", image.getFormat()));
3292         }
3293     }
3294 
3295     /**
3296      * <p>
3297      * Checks whether the two images are strongly equal.
3298      * </p>
3299      * <p>
3300      * Two images are strongly equal if and only if the data, formats, sizes,
3301      * and timestamps are same. For {@link ImageFormat#PRIVATE PRIVATE} format
3302      * images, the image data is not accessible thus the data comparison is
3303      * effectively skipped as the number of planes is zero.
3304      * </p>
3305      * <p>
3306      * Note that this method compares the pixel data even outside of the crop
3307      * region, which may not be necessary for general use case.
3308      * </p>
3309      *
3310      * @param lhsImg First image to be compared with.
3311      * @param rhsImg Second image to be compared with.
3312      * @return true if the two images are equal, false otherwise.
3313      * @throws IllegalArgumentException If either of image is null.
3314      */
isImageStronglyEqual(Image lhsImg, Image rhsImg)3315     public static boolean isImageStronglyEqual(Image lhsImg, Image rhsImg) {
3316         if (lhsImg == null || rhsImg == null) {
3317             throw new IllegalArgumentException("Images should be non-null");
3318         }
3319 
3320         if (lhsImg.getFormat() != rhsImg.getFormat()) {
3321             Log.i(TAG, "lhsImg format " + lhsImg.getFormat() + " is different with rhsImg format "
3322                     + rhsImg.getFormat());
3323             return false;
3324         }
3325 
3326         if (lhsImg.getWidth() != rhsImg.getWidth()) {
3327             Log.i(TAG, "lhsImg width " + lhsImg.getWidth() + " is different with rhsImg width "
3328                     + rhsImg.getWidth());
3329             return false;
3330         }
3331 
3332         if (lhsImg.getHeight() != rhsImg.getHeight()) {
3333             Log.i(TAG, "lhsImg height " + lhsImg.getHeight() + " is different with rhsImg height "
3334                     + rhsImg.getHeight());
3335             return false;
3336         }
3337 
3338         if (lhsImg.getTimestamp() != rhsImg.getTimestamp()) {
3339             Log.i(TAG, "lhsImg timestamp " + lhsImg.getTimestamp()
3340                     + " is different with rhsImg timestamp " + rhsImg.getTimestamp());
3341             return false;
3342         }
3343 
3344         if (!lhsImg.getCropRect().equals(rhsImg.getCropRect())) {
3345             Log.i(TAG, "lhsImg crop rect " + lhsImg.getCropRect()
3346                     + " is different with rhsImg crop rect " + rhsImg.getCropRect());
3347             return false;
3348         }
3349 
3350         // Compare data inside of the image.
3351         Plane[] lhsPlanes = lhsImg.getPlanes();
3352         Plane[] rhsPlanes = rhsImg.getPlanes();
3353         ByteBuffer lhsBuffer = null;
3354         ByteBuffer rhsBuffer = null;
3355         for (int i = 0; i < lhsPlanes.length; i++) {
3356             lhsBuffer = lhsPlanes[i].getBuffer();
3357             rhsBuffer = rhsPlanes[i].getBuffer();
3358             lhsBuffer.rewind();
3359             rhsBuffer.rewind();
3360             // Special case for YUV420_888 buffer with different layout or
3361             // potentially differently interleaved U/V planes.
3362             if (lhsImg.getFormat() == ImageFormat.YUV_420_888 &&
3363                     (lhsPlanes[i].getPixelStride() != rhsPlanes[i].getPixelStride() ||
3364                      lhsPlanes[i].getRowStride() != rhsPlanes[i].getRowStride() ||
3365                      (lhsPlanes[i].getPixelStride() != 1))) {
3366                 int width = getEffectivePlaneSizeForImage(lhsImg, i).getWidth();
3367                 int height = getEffectivePlaneSizeForImage(lhsImg, i).getHeight();
3368                 int rowSizeL = lhsPlanes[i].getRowStride();
3369                 int rowSizeR = rhsPlanes[i].getRowStride();
3370                 byte[] lhsRow = new byte[rowSizeL];
3371                 byte[] rhsRow = new byte[rowSizeR];
3372                 int pixStrideL = lhsPlanes[i].getPixelStride();
3373                 int pixStrideR = rhsPlanes[i].getPixelStride();
3374                 for (int r = 0; r < height; r++) {
3375                     if (r == height -1) {
3376                         rowSizeL = lhsBuffer.remaining();
3377                         rowSizeR = rhsBuffer.remaining();
3378                     }
3379                     lhsBuffer.get(lhsRow, /*offset*/0, rowSizeL);
3380                     rhsBuffer.get(rhsRow, /*offset*/0, rowSizeR);
3381                     for (int c = 0; c < width; c++) {
3382                         if (lhsRow[c * pixStrideL] != rhsRow[c * pixStrideR]) {
3383                             Log.i(TAG, String.format(
3384                                     "byte buffers for plane %d row %d col %d don't match.",
3385                                     i, r, c));
3386                             return false;
3387                         }
3388                     }
3389                 }
3390             } else {
3391                 // Compare entire buffer directly
3392                 if (!lhsBuffer.equals(rhsBuffer)) {
3393                     Log.i(TAG, "byte buffers for plane " +  i + " don't match.");
3394                     return false;
3395                 }
3396             }
3397         }
3398 
3399         return true;
3400     }
3401 
3402     /**
3403      * Set jpeg related keys in a capture request builder.
3404      *
3405      * @param builder The capture request builder to set the keys inl
3406      * @param exifData The exif data to set.
3407      * @param thumbnailSize The thumbnail size to set.
3408      * @param collector The camera error collector to collect errors.
3409      */
setJpegKeys(CaptureRequest.Builder builder, ExifTestData exifData, Size thumbnailSize, CameraErrorCollector collector)3410     public static void setJpegKeys(CaptureRequest.Builder builder, ExifTestData exifData,
3411             Size thumbnailSize, CameraErrorCollector collector) {
3412         builder.set(CaptureRequest.JPEG_THUMBNAIL_SIZE, thumbnailSize);
3413         builder.set(CaptureRequest.JPEG_GPS_LOCATION, exifData.gpsLocation);
3414         builder.set(CaptureRequest.JPEG_ORIENTATION, exifData.jpegOrientation);
3415         builder.set(CaptureRequest.JPEG_QUALITY, exifData.jpegQuality);
3416         builder.set(CaptureRequest.JPEG_THUMBNAIL_QUALITY,
3417                 exifData.thumbnailQuality);
3418 
3419         // Validate request set and get.
3420         collector.expectEquals("JPEG thumbnail size request set and get should match",
3421                 thumbnailSize, builder.get(CaptureRequest.JPEG_THUMBNAIL_SIZE));
3422         collector.expectTrue("GPS locations request set and get should match.",
3423                 areGpsFieldsEqual(exifData.gpsLocation,
3424                 builder.get(CaptureRequest.JPEG_GPS_LOCATION)));
3425         collector.expectEquals("JPEG orientation request set and get should match",
3426                 exifData.jpegOrientation,
3427                 builder.get(CaptureRequest.JPEG_ORIENTATION));
3428         collector.expectEquals("JPEG quality request set and get should match",
3429                 exifData.jpegQuality, builder.get(CaptureRequest.JPEG_QUALITY));
3430         collector.expectEquals("JPEG thumbnail quality request set and get should match",
3431                 exifData.thumbnailQuality,
3432                 builder.get(CaptureRequest.JPEG_THUMBNAIL_QUALITY));
3433     }
3434 
3435     /**
3436      * Simple validation of JPEG image size and format.
3437      * <p>
3438      * Only validate the image object basic correctness. It is fast, but doesn't actually
3439      * check the buffer data. Assert is used here as it make no sense to
3440      * continue the test if the jpeg image captured has some serious failures.
3441      * </p>
3442      *
3443      * @param image The captured JPEG/HEIC image
3444      * @param expectedSize Expected capture JEPG/HEIC size
3445      * @param format JPEG/HEIC image format
3446      */
basicValidateBlobImage(Image image, Size expectedSize, int format)3447     public static void basicValidateBlobImage(Image image, Size expectedSize, int format) {
3448         Size imageSz = new Size(image.getWidth(), image.getHeight());
3449         assertTrue(
3450                 String.format("Image size doesn't match (expected %s, actual %s) ",
3451                         expectedSize.toString(), imageSz.toString()), expectedSize.equals(imageSz));
3452         assertEquals("Image format should be " + ((format == ImageFormat.HEIC) ? "HEIC" : "JPEG"),
3453                 format, image.getFormat());
3454         assertNotNull("Image plane shouldn't be null", image.getPlanes());
3455         assertEquals("Image plane number should be 1", 1, image.getPlanes().length);
3456 
3457         // Jpeg/Heic decoding validate was done in ImageReaderTest,
3458         // no need to duplicate the test here.
3459     }
3460 
3461     /**
3462      * Verify the EXIF and JPEG related keys in a capture result are expected.
3463      * - Capture request get values are same as were set.
3464      * - capture result's exif data is the same as was set by
3465      *   the capture request.
3466      * - new tags in the result set by the camera service are
3467      *   present and semantically correct.
3468      *
3469      * @param image The output JPEG/HEIC image to verify.
3470      * @param captureResult The capture result to verify.
3471      * @param expectedSize The expected JPEG/HEIC size.
3472      * @param expectedThumbnailSize The expected thumbnail size.
3473      * @param expectedExifData The expected EXIF data
3474      * @param staticInfo The static metadata for the camera device.
3475      * @param allStaticInfo The camera Id to static metadata map for all cameras.
3476      * @param blobFilename The filename to dump the jpeg/heic to.
3477      * @param collector The camera error collector to collect errors.
3478      * @param format JPEG/HEIC format
3479      */
verifyJpegKeys(Image image, CaptureResult captureResult, Size expectedSize, Size expectedThumbnailSize, ExifTestData expectedExifData, StaticMetadata staticInfo, HashMap<String, StaticMetadata> allStaticInfo, CameraErrorCollector collector, String debugFileNameBase, int format)3480     public static void verifyJpegKeys(Image image, CaptureResult captureResult, Size expectedSize,
3481             Size expectedThumbnailSize, ExifTestData expectedExifData, StaticMetadata staticInfo,
3482             HashMap<String, StaticMetadata> allStaticInfo, CameraErrorCollector collector,
3483             String debugFileNameBase, int format) throws Exception {
3484 
3485         basicValidateBlobImage(image, expectedSize, format);
3486 
3487         byte[] blobBuffer = getDataFromImage(image);
3488         // Have to dump into a file to be able to use ExifInterface
3489         String filePostfix = (format == ImageFormat.HEIC ? ".heic" : ".jpeg");
3490         String blobFilename = debugFileNameBase + "/verifyJpegKeys" + filePostfix;
3491         dumpFile(blobFilename, blobBuffer);
3492         ExifInterface exif = new ExifInterface(blobFilename);
3493 
3494         if (expectedThumbnailSize.equals(new Size(0,0))) {
3495             collector.expectTrue("Jpeg shouldn't have thumbnail when thumbnail size is (0, 0)",
3496                     !exif.hasThumbnail());
3497         } else {
3498             collector.expectTrue("Jpeg must have thumbnail for thumbnail size " +
3499                     expectedThumbnailSize, exif.hasThumbnail());
3500         }
3501 
3502         // Validate capture result vs. request
3503         Size resultThumbnailSize = captureResult.get(CaptureResult.JPEG_THUMBNAIL_SIZE);
3504         int orientationTested = expectedExifData.jpegOrientation;
3505         // Legacy shim always doesn't rotate thumbnail size
3506         if ((orientationTested == 90 || orientationTested == 270) &&
3507                 staticInfo.isHardwareLevelAtLeastLimited()) {
3508             int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,
3509                     /*defaultValue*/-1);
3510             if (exifOrientation == ExifInterface.ORIENTATION_UNDEFINED) {
3511                 // Device physically rotated image+thumbnail data
3512                 // Expect thumbnail size to be also rotated
3513                 resultThumbnailSize = new Size(resultThumbnailSize.getHeight(),
3514                         resultThumbnailSize.getWidth());
3515             }
3516         }
3517 
3518         collector.expectEquals("JPEG thumbnail size result and request should match",
3519                 expectedThumbnailSize, resultThumbnailSize);
3520         if (collector.expectKeyValueNotNull(captureResult, CaptureResult.JPEG_GPS_LOCATION) !=
3521                 null) {
3522             collector.expectTrue("GPS location result and request should match.",
3523                     areGpsFieldsEqual(expectedExifData.gpsLocation,
3524                     captureResult.get(CaptureResult.JPEG_GPS_LOCATION)));
3525         }
3526         collector.expectEquals("JPEG orientation result and request should match",
3527                 expectedExifData.jpegOrientation,
3528                 captureResult.get(CaptureResult.JPEG_ORIENTATION));
3529         collector.expectEquals("JPEG quality result and request should match",
3530                 expectedExifData.jpegQuality, captureResult.get(CaptureResult.JPEG_QUALITY));
3531         collector.expectEquals("JPEG thumbnail quality result and request should match",
3532                 expectedExifData.thumbnailQuality,
3533                 captureResult.get(CaptureResult.JPEG_THUMBNAIL_QUALITY));
3534 
3535         // Validate other exif tags for all non-legacy devices
3536         if (!staticInfo.isHardwareLevelLegacy()) {
3537             verifyJpegExifExtraTags(exif, expectedSize, captureResult, staticInfo, allStaticInfo,
3538                     collector, expectedExifData);
3539         }
3540     }
3541 
getSurfaceUsage(Surface s)3542     public static Optional<Long> getSurfaceUsage(Surface s) {
3543         if (s == null || !s.isValid()) {
3544             Log.e(TAG, "Invalid Surface!");
3545             return Optional.empty();
3546         }
3547 
3548         long usage = 0;
3549         ImageWriter writer = ImageWriter.newInstance(s, /*maxImages*/1, ImageFormat.YUV_420_888);
3550         try {
3551             Image img = writer.dequeueInputImage();
3552             if (img != null) {
3553                 usage = img.getHardwareBuffer().getUsage();
3554                 img.close();
3555             } else {
3556                 Log.e(TAG, "Unable to dequeue ImageWriter buffer!");
3557                 return Optional.empty();
3558             }
3559         } finally {
3560             writer.close();
3561         }
3562 
3563         return Optional.of(usage);
3564     }
3565 
3566     /**
3567      * Get the degree of an EXIF orientation.
3568      */
getExifOrientationInDegree(int exifOrientation, CameraErrorCollector collector)3569     private static int getExifOrientationInDegree(int exifOrientation,
3570             CameraErrorCollector collector) {
3571         switch (exifOrientation) {
3572             case ExifInterface.ORIENTATION_NORMAL:
3573                 return 0;
3574             case ExifInterface.ORIENTATION_ROTATE_90:
3575                 return 90;
3576             case ExifInterface.ORIENTATION_ROTATE_180:
3577                 return 180;
3578             case ExifInterface.ORIENTATION_ROTATE_270:
3579                 return 270;
3580             default:
3581                 collector.addMessage("It is impossible to get non 0, 90, 180, 270 degress exif" +
3582                         "info based on the request orientation range");
3583                 return 0;
3584         }
3585     }
3586 
3587     /**
3588      * Get all of the supported focal lengths for capture result.
3589      *
3590      * If the camera is a logical camera, return the focal lengths of the logical camera
3591      * and its active physical camera.
3592      *
3593      * If the camera isn't a logical camera, return the focal lengths supported by the
3594      * single camera.
3595      */
getAvailableFocalLengthsForResult(CaptureResult result, StaticMetadata staticInfo, HashMap<String, StaticMetadata> allStaticInfo)3596     public static Set<Float> getAvailableFocalLengthsForResult(CaptureResult result,
3597             StaticMetadata staticInfo,
3598             HashMap<String, StaticMetadata> allStaticInfo) {
3599         Set<Float> focalLengths = new HashSet<Float>();
3600         float[] supportedFocalLengths = staticInfo.getAvailableFocalLengthsChecked();
3601         for (float focalLength : supportedFocalLengths) {
3602             focalLengths.add(focalLength);
3603         }
3604 
3605         if (staticInfo.isLogicalMultiCamera()) {
3606             boolean activePhysicalCameraIdSupported =
3607                     staticInfo.isActivePhysicalCameraIdSupported();
3608             Set<String> physicalCameraIds;
3609             if (activePhysicalCameraIdSupported) {
3610                 String activePhysicalCameraId = result.get(
3611                         CaptureResult.LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID);
3612                 physicalCameraIds = new HashSet<String>();
3613                 physicalCameraIds.add(activePhysicalCameraId);
3614             } else {
3615                 physicalCameraIds = staticInfo.getCharacteristics().getPhysicalCameraIds();
3616             }
3617 
3618             for (String physicalCameraId : physicalCameraIds) {
3619                 StaticMetadata physicalStaticInfo = allStaticInfo.get(physicalCameraId);
3620                 if (physicalStaticInfo != null) {
3621                     float[] focalLengthsArray =
3622                             physicalStaticInfo.getAvailableFocalLengthsChecked();
3623                     for (float focalLength: focalLengthsArray) {
3624                         focalLengths.add(focalLength);
3625                     }
3626                 }
3627             }
3628         }
3629 
3630         return focalLengths;
3631     }
3632 
3633     /**
3634      * Validate and return the focal length.
3635      *
3636      * @param result Capture result to get the focal length
3637      * @param supportedFocalLengths Valid focal lengths to check the result focal length against
3638      * @param collector The camera error collector
3639      * @return Focal length from capture result or -1 if focal length is not available.
3640      */
validateFocalLength(CaptureResult result, Set<Float> supportedFocalLengths, CameraErrorCollector collector)3641     private static float validateFocalLength(CaptureResult result,
3642             Set<Float> supportedFocalLengths, CameraErrorCollector collector) {
3643         Float resultFocalLength = result.get(CaptureResult.LENS_FOCAL_LENGTH);
3644         if (collector.expectTrue("Focal length is invalid",
3645                 resultFocalLength != null && resultFocalLength > 0)) {
3646             collector.expectTrue("Focal length should be one of the available focal length",
3647                     supportedFocalLengths.contains(resultFocalLength));
3648             return resultFocalLength;
3649         }
3650         return -1;
3651     }
3652 
3653     /**
3654      * Get all of the supported apertures for capture result.
3655      *
3656      * If the camera is a logical camera, return the apertures of the logical camera
3657      * and its active physical camera.
3658      *
3659      * If the camera isn't a logical camera, return the apertures supported by the
3660      * single camera.
3661      */
getAvailableAperturesForResult(CaptureResult result, StaticMetadata staticInfo, HashMap<String, StaticMetadata> allStaticInfo)3662     private static Set<Float> getAvailableAperturesForResult(CaptureResult result,
3663             StaticMetadata staticInfo, HashMap<String, StaticMetadata> allStaticInfo) {
3664         Set<Float> allApertures = new HashSet<Float>();
3665         float[] supportedApertures = staticInfo.getAvailableAperturesChecked();
3666         for (float aperture : supportedApertures) {
3667             allApertures.add(aperture);
3668         }
3669 
3670         if (staticInfo.isLogicalMultiCamera()) {
3671             boolean activePhysicalCameraIdSupported =
3672                     staticInfo.isActivePhysicalCameraIdSupported();
3673             Set<String> physicalCameraIds;
3674             if (activePhysicalCameraIdSupported) {
3675                 String activePhysicalCameraId = result.get(
3676                         CaptureResult.LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID);
3677                 physicalCameraIds = new HashSet<String>();
3678                 physicalCameraIds.add(activePhysicalCameraId);
3679             } else {
3680                 physicalCameraIds = staticInfo.getCharacteristics().getPhysicalCameraIds();
3681             }
3682 
3683             for (String physicalCameraId : physicalCameraIds) {
3684                 StaticMetadata physicalStaticInfo = allStaticInfo.get(physicalCameraId);
3685                 if (physicalStaticInfo != null) {
3686                     float[] apertures = physicalStaticInfo.getAvailableAperturesChecked();
3687                     for (float aperture: apertures) {
3688                         allApertures.add(aperture);
3689                     }
3690                 }
3691             }
3692         }
3693 
3694         return allApertures;
3695     }
3696 
3697     /**
3698      * Validate and return the aperture.
3699      *
3700      * @param result Capture result to get the aperture
3701      * @return Aperture from capture result or -1 if aperture is not available.
3702      */
validateAperture(CaptureResult result, Set<Float> supportedApertures, CameraErrorCollector collector)3703     private static float validateAperture(CaptureResult result,
3704             Set<Float> supportedApertures, CameraErrorCollector collector) {
3705         Float resultAperture = result.get(CaptureResult.LENS_APERTURE);
3706         if (collector.expectTrue("Capture result aperture is invalid",
3707                 resultAperture != null && resultAperture > 0)) {
3708             collector.expectTrue("Aperture should be one of the available apertures",
3709                     supportedApertures.contains(resultAperture));
3710             return resultAperture;
3711         }
3712         return -1;
3713     }
3714 
3715     /**
3716      * Return the closest value in a Set of floats.
3717      */
getClosestValueInSet(Set<Float> values, float target)3718     private static float getClosestValueInSet(Set<Float> values, float target) {
3719         float minDistance = Float.MAX_VALUE;
3720         float closestValue = -1.0f;
3721         for(float value : values) {
3722             float distance = Math.abs(value - target);
3723             if (minDistance > distance) {
3724                 minDistance = distance;
3725                 closestValue = value;
3726             }
3727         }
3728 
3729         return closestValue;
3730     }
3731 
3732     /**
3733      * Return if two Location's GPS field are the same.
3734      */
areGpsFieldsEqual(Location a, Location b)3735     private static boolean areGpsFieldsEqual(Location a, Location b) {
3736         if (a == null || b == null) {
3737             return false;
3738         }
3739 
3740         return a.getTime() == b.getTime() && a.getLatitude() == b.getLatitude() &&
3741                 a.getLongitude() == b.getLongitude() && a.getAltitude() == b.getAltitude() &&
3742                 a.getProvider() == b.getProvider();
3743     }
3744 
3745     /**
3746      * Verify extra tags in JPEG EXIF
3747      */
verifyJpegExifExtraTags(ExifInterface exif, Size jpegSize, CaptureResult result, StaticMetadata staticInfo, HashMap<String, StaticMetadata> allStaticInfo, CameraErrorCollector collector, ExifTestData expectedExifData)3748     private static void verifyJpegExifExtraTags(ExifInterface exif, Size jpegSize,
3749             CaptureResult result, StaticMetadata staticInfo,
3750             HashMap<String, StaticMetadata> allStaticInfo,
3751             CameraErrorCollector collector, ExifTestData expectedExifData)
3752             throws ParseException {
3753         /**
3754          * TAG_IMAGE_WIDTH and TAG_IMAGE_LENGTH and TAG_ORIENTATION.
3755          * Orientation and exif width/height need to be tested carefully, two cases:
3756          *
3757          * 1. Device rotate the image buffer physically, then exif width/height may not match
3758          * the requested still capture size, we need swap them to check.
3759          *
3760          * 2. Device use the exif tag to record the image orientation, it doesn't rotate
3761          * the jpeg image buffer itself. In this case, the exif width/height should always match
3762          * the requested still capture size, and the exif orientation should always match the
3763          * requested orientation.
3764          *
3765          */
3766         int exifWidth = exif.getAttributeInt(ExifInterface.TAG_IMAGE_WIDTH, /*defaultValue*/0);
3767         int exifHeight = exif.getAttributeInt(ExifInterface.TAG_IMAGE_LENGTH, /*defaultValue*/0);
3768         Size exifSize = new Size(exifWidth, exifHeight);
3769         // Orientation could be missing, which is ok, default to 0.
3770         int exifOrientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION,
3771                 /*defaultValue*/-1);
3772         // Get requested orientation from result, because they should be same.
3773         if (collector.expectKeyValueNotNull(result, CaptureResult.JPEG_ORIENTATION) != null) {
3774             int requestedOrientation = result.get(CaptureResult.JPEG_ORIENTATION);
3775             final int ORIENTATION_MIN = ExifInterface.ORIENTATION_UNDEFINED;
3776             final int ORIENTATION_MAX = ExifInterface.ORIENTATION_ROTATE_270;
3777             boolean orientationValid = collector.expectTrue(String.format(
3778                     "Exif orientation must be in range of [%d, %d]",
3779                     ORIENTATION_MIN, ORIENTATION_MAX),
3780                     exifOrientation >= ORIENTATION_MIN && exifOrientation <= ORIENTATION_MAX);
3781             if (orientationValid) {
3782                 /**
3783                  * Device captured image doesn't respect the requested orientation,
3784                  * which means it rotates the image buffer physically. Then we
3785                  * should swap the exif width/height accordingly to compare.
3786                  */
3787                 boolean deviceRotatedImage = exifOrientation == ExifInterface.ORIENTATION_UNDEFINED;
3788 
3789                 if (deviceRotatedImage) {
3790                     // Case 1.
3791                     boolean needSwap = (requestedOrientation % 180 == 90);
3792                     if (needSwap) {
3793                         exifSize = new Size(exifHeight, exifWidth);
3794                     }
3795                 } else {
3796                     // Case 2.
3797                     collector.expectEquals("Exif orientaiton should match requested orientation",
3798                             requestedOrientation, getExifOrientationInDegree(exifOrientation,
3799                             collector));
3800                 }
3801             }
3802         }
3803 
3804         /**
3805          * Ideally, need check exifSize == jpegSize == actual buffer size. But
3806          * jpegSize == jpeg decode bounds size(from jpeg jpeg frame
3807          * header, not exif) was validated in ImageReaderTest, no need to
3808          * validate again here.
3809          */
3810         collector.expectEquals("Exif size should match jpeg capture size", jpegSize, exifSize);
3811 
3812         // TAG_DATETIME, it should be local time
3813         long currentTimeInMs = System.currentTimeMillis();
3814         long currentTimeInSecond = currentTimeInMs / 1000;
3815         Date date = new Date(currentTimeInMs);
3816         String localDatetime = new SimpleDateFormat("yyyy:MM:dd HH:").format(date);
3817         String dateTime = exif.getAttribute(ExifInterface.TAG_DATETIME);
3818         if (collector.expectTrue("Exif TAG_DATETIME shouldn't be null", dateTime != null)) {
3819             collector.expectTrue("Exif TAG_DATETIME is wrong",
3820                     dateTime.length() == EXIF_DATETIME_LENGTH);
3821             long exifTimeInSecond =
3822                     new SimpleDateFormat("yyyy:MM:dd HH:mm:ss").parse(dateTime).getTime() / 1000;
3823             long delta = currentTimeInSecond - exifTimeInSecond;
3824             collector.expectTrue("Capture time deviates too much from the current time",
3825                     Math.abs(delta) < EXIF_DATETIME_ERROR_MARGIN_SEC);
3826             // It should be local time.
3827             collector.expectTrue("Exif date time should be local time",
3828                     dateTime.startsWith(localDatetime));
3829         }
3830 
3831         boolean isExternalCamera = staticInfo.isExternalCamera();
3832         if (!isExternalCamera) {
3833             // TAG_FOCAL_LENGTH.
3834             Set<Float> focalLengths = getAvailableFocalLengthsForResult(
3835                     result, staticInfo, allStaticInfo);
3836             float exifFocalLength = (float)exif.getAttributeDouble(
3837                         ExifInterface.TAG_FOCAL_LENGTH, -1);
3838             collector.expectEquals("Focal length should match",
3839                     getClosestValueInSet(focalLengths, exifFocalLength),
3840                     exifFocalLength, EXIF_FOCAL_LENGTH_ERROR_MARGIN);
3841             // More checks for focal length.
3842             collector.expectEquals("Exif focal length should match capture result",
3843                     validateFocalLength(result, focalLengths, collector),
3844                     exifFocalLength, EXIF_FOCAL_LENGTH_ERROR_MARGIN);
3845 
3846             // TAG_EXPOSURE_TIME
3847             // ExifInterface API gives exposure time value in the form of float instead of rational
3848             String exposureTime = exif.getAttribute(ExifInterface.TAG_EXPOSURE_TIME);
3849             collector.expectNotNull("Exif TAG_EXPOSURE_TIME shouldn't be null", exposureTime);
3850             if (staticInfo.areKeysAvailable(CaptureResult.SENSOR_EXPOSURE_TIME)) {
3851                 if (exposureTime != null) {
3852                     double exposureTimeValue = Double.parseDouble(exposureTime);
3853                     long expTimeResult = result.get(CaptureResult.SENSOR_EXPOSURE_TIME);
3854                     double expected = expTimeResult / 1e9;
3855                     double tolerance = expected * EXIF_EXPOSURE_TIME_ERROR_MARGIN_RATIO;
3856                     tolerance = Math.max(tolerance, EXIF_EXPOSURE_TIME_MIN_ERROR_MARGIN_SEC);
3857                     collector.expectEquals("Exif exposure time doesn't match", expected,
3858                             exposureTimeValue, tolerance);
3859                 }
3860             }
3861 
3862             // TAG_APERTURE
3863             // ExifInterface API gives aperture value in the form of float instead of rational
3864             String exifAperture = exif.getAttribute(ExifInterface.TAG_APERTURE);
3865             collector.expectNotNull("Exif TAG_APERTURE shouldn't be null", exifAperture);
3866             if (staticInfo.areKeysAvailable(CameraCharacteristics.LENS_INFO_AVAILABLE_APERTURES)) {
3867                 Set<Float> apertures = getAvailableAperturesForResult(
3868                         result, staticInfo, allStaticInfo);
3869                 if (exifAperture != null) {
3870                     float apertureValue = Float.parseFloat(exifAperture);
3871                     collector.expectEquals("Aperture value should match",
3872                             getClosestValueInSet(apertures, apertureValue),
3873                             apertureValue, EXIF_APERTURE_ERROR_MARGIN);
3874                     // More checks for aperture.
3875                     collector.expectEquals("Exif aperture length should match capture result",
3876                             validateAperture(result, apertures, collector),
3877                             apertureValue, EXIF_APERTURE_ERROR_MARGIN);
3878                 }
3879             }
3880 
3881             // TAG_MAKE
3882             String make = exif.getAttribute(ExifInterface.TAG_MAKE);
3883             collector.expectEquals("Exif TAG_MAKE is incorrect", Build.MANUFACTURER, make);
3884 
3885             // TAG_MODEL
3886             String model = exif.getAttribute(ExifInterface.TAG_MODEL);
3887             collector.expectTrue("Exif TAG_MODEL is incorrect",
3888                     model.startsWith(Build.MODEL) || model.endsWith(Build.MODEL));
3889 
3890 
3891             // TAG_ISO
3892             int iso = exif.getAttributeInt(ExifInterface.TAG_ISO, /*defaultValue*/-1);
3893             if (staticInfo.areKeysAvailable(CaptureResult.SENSOR_SENSITIVITY) ||
3894                     staticInfo.areKeysAvailable(CaptureResult.CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
3895                 int expectedIso = 100;
3896                 if (staticInfo.areKeysAvailable(CaptureResult.SENSOR_SENSITIVITY)) {
3897                     expectedIso = result.get(CaptureResult.SENSOR_SENSITIVITY);
3898                 }
3899                 if (staticInfo.areKeysAvailable(CaptureResult.CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
3900                     expectedIso = expectedIso *
3901                             result.get(CaptureResult.CONTROL_POST_RAW_SENSITIVITY_BOOST);
3902                 } else {
3903                     expectedIso *= 100;
3904                 }
3905                 collector.expectInRange("Exif TAG_ISO is incorrect", iso,
3906                         expectedIso/100,((expectedIso + 50)/100) + MAX_ISO_MISMATCH);
3907             }
3908         } else {
3909             // External camera specific checks
3910             // TAG_MAKE
3911             String make = exif.getAttribute(ExifInterface.TAG_MAKE);
3912             collector.expectNotNull("Exif TAG_MAKE is null", make);
3913 
3914             // TAG_MODEL
3915             String model = exif.getAttribute(ExifInterface.TAG_MODEL);
3916             collector.expectNotNull("Exif TAG_MODEL is nuill", model);
3917         }
3918 
3919 
3920         /**
3921          * TAG_FLASH. TODO: For full devices, can check a lot more info
3922          * (http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/EXIF.html#Flash)
3923          */
3924         String flash = exif.getAttribute(ExifInterface.TAG_FLASH);
3925         collector.expectNotNull("Exif TAG_FLASH shouldn't be null", flash);
3926 
3927         /**
3928          * TAG_WHITE_BALANCE. TODO: For full devices, with the DNG tags, we
3929          * should be able to cross-check android.sensor.referenceIlluminant.
3930          */
3931         String whiteBalance = exif.getAttribute(ExifInterface.TAG_WHITE_BALANCE);
3932         collector.expectNotNull("Exif TAG_WHITE_BALANCE shouldn't be null", whiteBalance);
3933 
3934         // TAG_DATETIME_DIGITIZED (a.k.a Create time for digital cameras).
3935         String digitizedTime = exif.getAttribute(ExifInterface.TAG_DATETIME_DIGITIZED);
3936         collector.expectNotNull("Exif TAG_DATETIME_DIGITIZED shouldn't be null", digitizedTime);
3937         if (digitizedTime != null) {
3938             String expectedDateTime = exif.getAttribute(ExifInterface.TAG_DATETIME);
3939             collector.expectNotNull("Exif TAG_DATETIME shouldn't be null", expectedDateTime);
3940             if (expectedDateTime != null) {
3941                 collector.expectEquals("dataTime should match digitizedTime",
3942                         expectedDateTime, digitizedTime);
3943             }
3944         }
3945 
3946         /**
3947          * TAG_SUBSEC_TIME. Since the sub second tag strings are truncated to at
3948          * most 9 digits in ExifInterface implementation, use getAttributeInt to
3949          * sanitize it. When the default value -1 is returned, it means that
3950          * this exif tag either doesn't exist or is a non-numerical invalid
3951          * string. Same rule applies to the rest of sub second tags.
3952          */
3953         int subSecTime = exif.getAttributeInt(ExifInterface.TAG_SUBSEC_TIME, /*defaultValue*/-1);
3954         collector.expectTrue("Exif TAG_SUBSEC_TIME value is null or invalid!", subSecTime >= 0);
3955 
3956         // TAG_SUBSEC_TIME_ORIG
3957         int subSecTimeOrig = exif.getAttributeInt(ExifInterface.TAG_SUBSEC_TIME_ORIG,
3958                 /*defaultValue*/-1);
3959         collector.expectTrue("Exif TAG_SUBSEC_TIME_ORIG value is null or invalid!",
3960                 subSecTimeOrig >= 0);
3961 
3962         // TAG_SUBSEC_TIME_DIG
3963         int subSecTimeDig = exif.getAttributeInt(ExifInterface.TAG_SUBSEC_TIME_DIG,
3964                 /*defaultValue*/-1);
3965         collector.expectTrue(
3966                 "Exif TAG_SUBSEC_TIME_DIG value is null or invalid!", subSecTimeDig >= 0);
3967 
3968         /**
3969          * TAG_GPS_DATESTAMP & TAG_GPS_TIMESTAMP.
3970          * The GPS timestamp information should be in seconds UTC time.
3971          */
3972         String gpsDatestamp = exif.getAttribute(ExifInterface.TAG_GPS_DATESTAMP);
3973         collector.expectNotNull("Exif TAG_GPS_DATESTAMP shouldn't be null", gpsDatestamp);
3974         String gpsTimestamp = exif.getAttribute(ExifInterface.TAG_GPS_TIMESTAMP);
3975         collector.expectNotNull("Exif TAG_GPS_TIMESTAMP shouldn't be null", gpsTimestamp);
3976 
3977         SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy:MM:dd hh:mm:ss z");
3978         String gpsExifTimeString = gpsDatestamp + " " + gpsTimestamp + " UTC";
3979         Date gpsDateTime = dateFormat.parse(gpsExifTimeString);
3980         Date expected = new Date(expectedExifData.gpsLocation.getTime());
3981         collector.expectEquals("Jpeg EXIF GPS time should match", expected, gpsDateTime);
3982     }
3983 
3984 
3985     /**
3986      * Immutable class wrapping the exif test data.
3987      */
3988     public static class ExifTestData {
3989         public final Location gpsLocation;
3990         public final int jpegOrientation;
3991         public final byte jpegQuality;
3992         public final byte thumbnailQuality;
3993 
ExifTestData(Location location, int orientation, byte jpgQuality, byte thumbQuality)3994         public ExifTestData(Location location, int orientation,
3995                 byte jpgQuality, byte thumbQuality) {
3996             gpsLocation = location;
3997             jpegOrientation = orientation;
3998             jpegQuality = jpgQuality;
3999             thumbnailQuality = thumbQuality;
4000         }
4001     }
4002 
getPreviewSizeBound(WindowManager windowManager, Size bound)4003     public static Size getPreviewSizeBound(WindowManager windowManager, Size bound) {
4004         WindowMetrics windowMetrics = windowManager.getCurrentWindowMetrics();
4005         Rect windowBounds = windowMetrics.getBounds();
4006 
4007         int windowHeight = windowBounds.height();
4008         int windowWidth = windowBounds.width();
4009 
4010         if (windowHeight > windowWidth) {
4011             windowHeight = windowWidth;
4012             windowWidth = windowBounds.height();
4013         }
4014 
4015         if (bound.getWidth() <= windowWidth
4016                 && bound.getHeight() <= windowHeight) {
4017             return bound;
4018         } else {
4019             return new Size(windowWidth, windowHeight);
4020         }
4021     }
4022 
4023     /**
4024      * Check if a particular stream configuration is supported by configuring it
4025      * to the device.
4026      */
isStreamConfigurationSupported(CameraDevice camera, List<Surface> outputSurfaces, CameraCaptureSession.StateCallback listener, Handler handler)4027     public static boolean isStreamConfigurationSupported(CameraDevice camera,
4028             List<Surface> outputSurfaces,
4029             CameraCaptureSession.StateCallback listener, Handler handler) {
4030         try {
4031             configureCameraSession(camera, outputSurfaces, listener, handler);
4032             return true;
4033         } catch (Exception e) {
4034             Log.i(TAG, "This stream configuration is not supported due to " + e.getMessage());
4035             return false;
4036         }
4037     }
4038 
4039     public final static class SessionConfigSupport {
4040         public final boolean error;
4041         public final boolean callSupported;
4042         public final boolean configSupported;
4043 
SessionConfigSupport(boolean error, boolean callSupported, boolean configSupported)4044         public SessionConfigSupport(boolean error,
4045                 boolean callSupported, boolean configSupported) {
4046             this.error = error;
4047             this.callSupported = callSupported;
4048             this.configSupported = configSupported;
4049         }
4050     }
4051 
4052     /**
4053      * Query whether a particular stream combination is supported.
4054      */
checkSessionConfigurationWithSurfaces(CameraDevice camera, Handler handler, List<Surface> outputSurfaces, InputConfiguration inputConfig, int operatingMode, CameraManager manager, boolean defaultSupport, String msg)4055     public static void checkSessionConfigurationWithSurfaces(CameraDevice camera,
4056             Handler handler, List<Surface> outputSurfaces, InputConfiguration inputConfig,
4057             int operatingMode, CameraManager manager, boolean defaultSupport, String msg)
4058             throws Exception {
4059         List<OutputConfiguration> outConfigurations = new ArrayList<>(outputSurfaces.size());
4060         for (Surface surface : outputSurfaces) {
4061             outConfigurations.add(new OutputConfiguration(surface));
4062         }
4063 
4064         checkSessionConfigurationSupported(camera, handler, outConfigurations,
4065                 inputConfig, operatingMode, manager, defaultSupport, msg);
4066     }
4067 
checkSessionConfigurationSupported(CameraDevice camera, Handler handler, List<OutputConfiguration> outputConfigs, InputConfiguration inputConfig, int operatingMode, CameraManager manager, boolean defaultSupport, String msg)4068     public static void checkSessionConfigurationSupported(CameraDevice camera,
4069             Handler handler, List<OutputConfiguration> outputConfigs,
4070             InputConfiguration inputConfig, int operatingMode, CameraManager manager,
4071             boolean defaultSupport, String msg) throws Exception {
4072         SessionConfigSupport sessionConfigSupported =
4073                 isSessionConfigSupported(camera, handler, outputConfigs, inputConfig,
4074                 operatingMode, manager, defaultSupport);
4075 
4076         assertTrue(msg, !sessionConfigSupported.error && sessionConfigSupported.configSupported);
4077     }
4078 
4079     /**
4080      * Query whether a particular stream combination is supported.
4081      */
isSessionConfigSupported(CameraDevice camera, Handler handler, List<OutputConfiguration> outputConfigs, InputConfiguration inputConfig, int operatingMode, CameraManager manager, boolean defaultSupport)4082     public static SessionConfigSupport isSessionConfigSupported(CameraDevice camera,
4083             Handler handler, List<OutputConfiguration> outputConfigs,
4084             InputConfiguration inputConfig, int operatingMode,
4085             CameraManager manager, boolean defaultSupport)
4086             throws android.hardware.camera2.CameraAccessException {
4087         boolean ret;
4088         BlockingSessionCallback sessionListener = new BlockingSessionCallback();
4089 
4090         SessionConfiguration sessionConfig = new SessionConfiguration(operatingMode, outputConfigs,
4091                 new HandlerExecutor(handler), sessionListener);
4092         if (inputConfig != null) {
4093             sessionConfig.setInputConfiguration(inputConfig);
4094         }
4095 
4096         // Verify that the return value of CameraDevice.isSessionConfigurationSupported is the
4097         // same as CameraDeviceSetup.isSessionConfigurationSupported.
4098         // Note: This check only makes sense if targetSdkVersion and platform's SDK Version >= V
4099         boolean deviceSetupSupported = false;
4100         boolean configSupportedByDeviceSetup = false;
4101         String cameraId = camera.getId();
4102         if (Build.VERSION.SDK_INT > Build.VERSION_CODES.UPSIDE_DOWN_CAKE
4103                 && Flags.cameraDeviceSetup() && manager.isCameraDeviceSetupSupported(cameraId)) {
4104             CameraDeviceSetup deviceSetup = manager.getCameraDeviceSetup(cameraId);
4105             assertNotNull("Failed to get camera device setup for " + cameraId, deviceSetup);
4106             deviceSetupSupported = true;
4107 
4108             configSupportedByDeviceSetup = deviceSetup.isSessionConfigurationSupported(
4109                     sessionConfig);
4110         }
4111 
4112         try {
4113             ret = camera.isSessionConfigurationSupported(sessionConfig);
4114         } catch (UnsupportedOperationException e) {
4115             // Camera doesn't support session configuration query
4116             assertFalse("If device setup is supported, "
4117                     + "CameraDevice.isSessionConfigurationSupported cannot throw"
4118                     + "unsupportedOperationException", deviceSetupSupported);
4119             return new SessionConfigSupport(false/*error*/,
4120                     false/*callSupported*/, defaultSupport/*configSupported*/);
4121         } catch (IllegalArgumentException e) {
4122             return new SessionConfigSupport(true/*error*/,
4123                     false/*callSupported*/, false/*configSupported*/);
4124         } catch (android.hardware.camera2.CameraAccessException e) {
4125             return new SessionConfigSupport(true/*error*/,
4126                     false/*callSupported*/, false/*configSupported*/);
4127         }
4128 
4129         if (deviceSetupSupported) {
4130             assertEquals("CameraDeviceSetup and CameraDevice must return the same value "
4131                     + "for isSessionConfigurationSupported!", ret, configSupportedByDeviceSetup);
4132         }
4133         return new SessionConfigSupport(false/*error*/,
4134                 true/*callSupported*/, ret/*configSupported*/);
4135     }
4136 
4137     /**
4138      * Check if a session configuration with parameters is supported.
4139      *
4140      * All OutputConfigurations contains valid output surfaces.
4141      */
isSessionConfigWithParamsSupported( CameraDevice.CameraDeviceSetup cameraDeviceSetup, Handler handler, List<OutputConfiguration> outputConfigs, int operatingMode, CaptureRequest request)4142     public static boolean isSessionConfigWithParamsSupported(
4143             CameraDevice.CameraDeviceSetup cameraDeviceSetup,
4144             Handler handler, List<OutputConfiguration> outputConfigs,
4145             int operatingMode, CaptureRequest request) throws CameraAccessException {
4146         BlockingSessionCallback sessionListener = new BlockingSessionCallback();
4147         SessionConfiguration sessionConfig = new SessionConfiguration(operatingMode, outputConfigs,
4148                 new HandlerExecutor(handler), sessionListener);
4149         sessionConfig.setSessionParameters(request);
4150 
4151         return cameraDeviceSetup.isSessionConfigurationSupported(sessionConfig);
4152     }
4153 
4154     /**
4155      * Check if a session configuration with parameters is supported.
4156      *
4157      * <p>OutputConfigurations do not contain the output surface. Additionally this function
4158      * checks the consistency of isSessionConfigurationSupported return value between the
4159      * incompleted SessionConfiguration and the completed SessionConfiguration after addSurface
4160      * is called.</p>
4161      */
isSessionConfigWithParamsSupportedChecked( CameraDevice.CameraDeviceSetup cameraDeviceSetup, List<Pair<OutputConfiguration, Surface>> outputConfigs2Steps, int operatingMode, CaptureRequest request)4162     public static boolean isSessionConfigWithParamsSupportedChecked(
4163             CameraDevice.CameraDeviceSetup cameraDeviceSetup,
4164             List<Pair<OutputConfiguration, Surface>> outputConfigs2Steps,
4165             int operatingMode, CaptureRequest request) throws CameraAccessException {
4166         List<OutputConfiguration> outputConfigs = new ArrayList<>();
4167         for (Pair<OutputConfiguration, Surface> c : outputConfigs2Steps) {
4168             outputConfigs.add(c.first);
4169         }
4170         SessionConfiguration sessionConfig = new SessionConfiguration(operatingMode, outputConfigs);
4171         sessionConfig.setSessionParameters(request);
4172         boolean sessionConfigNoSurfaceSupported = cameraDeviceSetup.isSessionConfigurationSupported(
4173                 sessionConfig);
4174 
4175         // Add surfaces for the OutputConfigurations
4176         for (Pair<OutputConfiguration, Surface> c : outputConfigs2Steps) {
4177             OutputConfiguration config = c.first;
4178             Surface surface = c.second;
4179             if (config.getSurface() == null) {
4180                 config.addSurface(surface);
4181             }
4182         }
4183         boolean sessionConfigWithSurfaceSupported =
4184                 cameraDeviceSetup.isSessionConfigurationSupported(sessionConfig);
4185         assertEquals("isSessionConfigurationSupported return value shouldn't change before and "
4186                 + "after surfaces are added to SessionConfiguration",
4187                 sessionConfigNoSurfaceSupported, sessionConfigWithSurfaceSupported);
4188 
4189         return sessionConfigWithSurfaceSupported;
4190     }
4191 
4192     /**
4193      * Wait for numResultWait frames
4194      *
4195      * @param resultListener The capture listener to get capture result back.
4196      * @param numResultsWait Number of frame to wait
4197      * @param timeout Wait timeout in ms.
4198      *
4199      * @return the last result, or {@code null} if there was none
4200      */
waitForNumResults(SimpleCaptureCallback resultListener, int numResultsWait, int timeout)4201     public static CaptureResult waitForNumResults(SimpleCaptureCallback resultListener,
4202             int numResultsWait, int timeout) {
4203         if (numResultsWait < 0 || resultListener == null) {
4204             throw new IllegalArgumentException(
4205                     "Input must be positive number and listener must be non-null");
4206         }
4207 
4208         CaptureResult result = null;
4209         for (int i = 0; i < numResultsWait; i++) {
4210             result = resultListener.getCaptureResult(timeout);
4211         }
4212 
4213         return result;
4214     }
4215 
4216     /**
4217      * Wait for any expected result key values available in a certain number of results.
4218      *
4219      * <p>
4220      * Check the result immediately if numFramesWait is 0.
4221      * </p>
4222      *
4223      * @param listener The capture listener to get capture result.
4224      * @param resultKey The capture result key associated with the result value.
4225      * @param expectedValues The list of result value need to be waited for,
4226      * return immediately if the list is empty.
4227      * @param numResultsWait Number of frame to wait before times out.
4228      * @param timeout result wait time out in ms.
4229      * @throws TimeoutRuntimeException If more than numResultsWait results are.
4230      * seen before the result matching myRequest arrives, or each individual wait
4231      * for result times out after 'timeout' ms.
4232      */
waitForAnyResultValue(SimpleCaptureCallback listener, CaptureResult.Key<T> resultKey, List<T> expectedValues, int numResultsWait, int timeout)4233     public static <T> void waitForAnyResultValue(SimpleCaptureCallback listener,
4234             CaptureResult.Key<T> resultKey, List<T> expectedValues, int numResultsWait,
4235             int timeout) {
4236         if (numResultsWait < 0 || listener == null || expectedValues == null) {
4237             throw new IllegalArgumentException(
4238                     "Input must be non-negative number and listener/expectedValues "
4239                     + "must be non-null");
4240         }
4241 
4242         int i = 0;
4243         CaptureResult result;
4244         do {
4245             result = listener.getCaptureResult(timeout);
4246             T value = result.get(resultKey);
4247             for ( T expectedValue : expectedValues) {
4248                 if (VERBOSE) {
4249                     Log.v(TAG, "Current result value for key " + resultKey.getName() + " is: "
4250                             + value.toString());
4251                 }
4252                 if (value.equals(expectedValue)) {
4253                     return;
4254                 }
4255             }
4256         } while (i++ < numResultsWait);
4257 
4258         throw new TimeoutRuntimeException(
4259                 "Unable to get the expected result value " + expectedValues + " for key " +
4260                         resultKey.getName() + " after waiting for " + numResultsWait + " results");
4261     }
4262 
4263     /**
4264      * Wait for expected result key value available in a certain number of results.
4265      *
4266      * <p>
4267      * Check the result immediately if numFramesWait is 0.
4268      * </p>
4269      *
4270      * @param listener The capture listener to get capture result
4271      * @param resultKey The capture result key associated with the result value
4272      * @param expectedValue The result value need to be waited for
4273      * @param numResultsWait Number of frame to wait before times out
4274      * @param timeout Wait time out.
4275      * @throws TimeoutRuntimeException If more than numResultsWait results are
4276      * seen before the result matching myRequest arrives, or each individual wait
4277      * for result times out after 'timeout' ms.
4278      */
waitForResultValue(SimpleCaptureCallback listener, CaptureResult.Key<T> resultKey, T expectedValue, int numResultsWait, int timeout)4279     public static <T> void waitForResultValue(SimpleCaptureCallback listener,
4280             CaptureResult.Key<T> resultKey, T expectedValue, int numResultsWait, int timeout) {
4281         List<T> expectedValues = new ArrayList<T>();
4282         expectedValues.add(expectedValue);
4283         waitForAnyResultValue(listener, resultKey, expectedValues, numResultsWait, timeout);
4284     }
4285 
4286     /**
4287      * Wait for AE to be stabilized before capture: CONVERGED or FLASH_REQUIRED.
4288      *
4289      * <p>Waits for {@code android.sync.maxLatency} number of results first, to make sure
4290      * that the result is synchronized (or {@code numResultWaitForUnknownLatency} if the latency
4291      * is unknown.</p>
4292      *
4293      * <p>This is a no-op for {@code LEGACY} devices since they don't report
4294      * the {@code aeState} result.</p>
4295      *
4296      * @param resultListener The capture listener to get capture result back.
4297      * @param numResultWaitForUnknownLatency Number of frame to wait if camera device latency is
4298      *                                       unknown.
4299      * @param staticInfo corresponding camera device static metadata.
4300      * @param settingsTimeout wait timeout for settings application in ms.
4301      * @param resultTimeout wait timeout for result in ms.
4302      * @param numResultsWait Number of frame to wait before times out.
4303      */
waitForAeStable(SimpleCaptureCallback resultListener, int numResultWaitForUnknownLatency, StaticMetadata staticInfo, int settingsTimeout, int numResultWait)4304     public static void waitForAeStable(SimpleCaptureCallback resultListener,
4305             int numResultWaitForUnknownLatency, StaticMetadata staticInfo,
4306             int settingsTimeout, int numResultWait) {
4307         waitForSettingsApplied(resultListener, numResultWaitForUnknownLatency, staticInfo,
4308                 settingsTimeout);
4309 
4310         if (!staticInfo.isHardwareLevelAtLeastLimited()) {
4311             // No-op for metadata
4312             return;
4313         }
4314         List<Integer> expectedAeStates = new ArrayList<Integer>();
4315         expectedAeStates.add(new Integer(CaptureResult.CONTROL_AE_STATE_CONVERGED));
4316         expectedAeStates.add(new Integer(CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED));
4317         waitForAnyResultValue(resultListener, CaptureResult.CONTROL_AE_STATE, expectedAeStates,
4318                 numResultWait, settingsTimeout);
4319     }
4320 
4321     /**
4322      * Wait for enough results for settings to be applied
4323      *
4324      * @param resultListener The capture listener to get capture result back.
4325      * @param numResultWaitForUnknownLatency Number of frame to wait if camera device latency is
4326      *                                       unknown.
4327      * @param staticInfo corresponding camera device static metadata.
4328      * @param timeout wait timeout in ms.
4329      */
waitForSettingsApplied(SimpleCaptureCallback resultListener, int numResultWaitForUnknownLatency, StaticMetadata staticInfo, int timeout)4330     public static void waitForSettingsApplied(SimpleCaptureCallback resultListener,
4331             int numResultWaitForUnknownLatency, StaticMetadata staticInfo, int timeout) {
4332         int maxLatency = staticInfo.getSyncMaxLatency();
4333         if (maxLatency == CameraMetadata.SYNC_MAX_LATENCY_UNKNOWN) {
4334             maxLatency = numResultWaitForUnknownLatency;
4335         }
4336         // Wait for settings to take effect
4337         waitForNumResults(resultListener, maxLatency, timeout);
4338     }
4339 
getSuitableFpsRangeForDuration(String cameraId, long frameDuration, StaticMetadata staticInfo)4340     public static Range<Integer> getSuitableFpsRangeForDuration(String cameraId,
4341             long frameDuration, StaticMetadata staticInfo) {
4342         // Add 0.05 here so Fps like 29.99 evaluated to 30
4343         int minBurstFps = (int) Math.floor(1e9 / frameDuration + 0.05f);
4344         boolean foundConstantMaxYUVRange = false;
4345         boolean foundYUVStreamingRange = false;
4346         boolean isExternalCamera = staticInfo.isExternalCamera();
4347         boolean isNIR = staticInfo.isNIRColorFilter();
4348 
4349         // Find suitable target FPS range - as high as possible that covers the max YUV rate
4350         // Also verify that there's a good preview rate as well
4351         List<Range<Integer> > fpsRanges = Arrays.asList(
4352                 staticInfo.getAeAvailableTargetFpsRangesChecked());
4353         Range<Integer> targetRange = null;
4354         for (Range<Integer> fpsRange : fpsRanges) {
4355             if (fpsRange.getLower() == minBurstFps && fpsRange.getUpper() == minBurstFps) {
4356                 foundConstantMaxYUVRange = true;
4357                 targetRange = fpsRange;
4358             } else if (isExternalCamera && fpsRange.getUpper() == minBurstFps) {
4359                 targetRange = fpsRange;
4360             }
4361             if (fpsRange.getLower() <= 15 && fpsRange.getUpper() == minBurstFps) {
4362                 foundYUVStreamingRange = true;
4363             }
4364 
4365         }
4366 
4367         if (!isExternalCamera) {
4368             assertTrue(String.format("Cam %s: Target FPS range of (%d, %d) must be supported",
4369                     cameraId, minBurstFps, minBurstFps), foundConstantMaxYUVRange);
4370         }
4371 
4372         if (!isNIR) {
4373             assertTrue(String.format(
4374                     "Cam %s: Target FPS range of (x, %d) where x <= 15 must be supported",
4375                     cameraId, minBurstFps), foundYUVStreamingRange);
4376         }
4377         return targetRange;
4378     }
4379     /**
4380      * Get the candidate supported zoom ratios for testing
4381      *
4382      * <p>
4383      * This function returns the bounary values of supported zoom ratio range in addition to 1.0x
4384      * zoom ratio.
4385      * </p>
4386      */
getCandidateZoomRatios(StaticMetadata staticInfo)4387     public static List<Float> getCandidateZoomRatios(StaticMetadata staticInfo) {
4388         List<Float> zoomRatios = new ArrayList<Float>();
4389         Range<Float> zoomRatioRange = staticInfo.getZoomRatioRangeChecked();
4390         zoomRatios.add(zoomRatioRange.getLower());
4391         if (zoomRatioRange.contains(1.0f) &&
4392                 1.0f - zoomRatioRange.getLower() > ZOOM_RATIO_THRESHOLD &&
4393                 zoomRatioRange.getUpper() - 1.0f > ZOOM_RATIO_THRESHOLD) {
4394             zoomRatios.add(1.0f);
4395         }
4396         zoomRatios.add(zoomRatioRange.getUpper());
4397 
4398         return zoomRatios;
4399     }
4400 
4401     /**
4402      * Get the primary rear facing camera from an ID list
4403      */
getPrimaryRearCamera(CameraManager manager, String[] cameraIds)4404     public static String getPrimaryRearCamera(CameraManager manager, String[] cameraIds)
4405             throws Exception {
4406         return getPrimaryCamera(manager, cameraIds, CameraCharacteristics.LENS_FACING_BACK);
4407     }
4408 
4409     /**
4410      * Get the primary front facing camera from an ID list
4411      */
getPrimaryFrontCamera(CameraManager manager, String[] cameraIds)4412     public static String getPrimaryFrontCamera(CameraManager manager, String[] cameraIds)
4413             throws Exception {
4414         return getPrimaryCamera(manager, cameraIds, CameraCharacteristics.LENS_FACING_FRONT);
4415     }
4416 
getPrimaryCamera(CameraManager manager, String[] cameraIds, Integer facing)4417     private static String getPrimaryCamera(CameraManager manager,
4418             String[] cameraIds, Integer facing) throws Exception {
4419         if (cameraIds == null) {
4420             return null;
4421         }
4422 
4423         for (String id : cameraIds) {
4424             if (isPrimaryCamera(manager, id, facing)) {
4425                 return id;
4426             }
4427         }
4428 
4429         return null;
4430     }
4431 
4432     /**
4433      * Check whether a camera Id is a primary rear facing camera
4434      */
isPrimaryRearFacingCamera(CameraManager manager, String cameraId)4435     public static boolean isPrimaryRearFacingCamera(CameraManager manager, String cameraId)
4436             throws Exception {
4437         return isPrimaryCamera(manager, cameraId, CameraCharacteristics.LENS_FACING_BACK);
4438     }
4439 
4440     /**
4441      * Check whether a camera Id is a primary front facing camera
4442      */
isPrimaryFrontFacingCamera(CameraManager manager, String cameraId)4443     public static boolean isPrimaryFrontFacingCamera(CameraManager manager, String cameraId)
4444             throws Exception {
4445         return isPrimaryCamera(manager, cameraId, CameraCharacteristics.LENS_FACING_FRONT);
4446     }
4447 
isPrimaryCamera(CameraManager manager, String cameraId, Integer lensFacing)4448     private static boolean isPrimaryCamera(CameraManager manager, String cameraId,
4449             Integer lensFacing) throws Exception {
4450         CameraCharacteristics characteristics;
4451         Integer facing;
4452 
4453         String [] ids = manager.getCameraIdList();
4454         for (String id : ids) {
4455             characteristics = manager.getCameraCharacteristics(id);
4456             facing = characteristics.get(CameraCharacteristics.LENS_FACING);
4457             if (lensFacing.equals(facing)) {
4458                 if (cameraId.equals(id)) {
4459                     return true;
4460                 } else {
4461                     return false;
4462                 }
4463             }
4464         }
4465         return false;
4466     }
4467 
4468     /**
4469      * Verifies the camera in this listener was opened and then unconfigured exactly once.
4470      *
4471      * <p>This assumes that no other action to the camera has been done (e.g.
4472      * it hasn't been configured, or closed, or disconnected). Verification is
4473      * performed immediately without any timeouts.</p>
4474      *
4475      * <p>This checks that the state has previously changed first for opened and then unconfigured.
4476      * Any other state transitions will fail. A test failure is thrown if verification fails.</p>
4477      *
4478      * @param cameraId Camera identifier
4479      * @param listener Listener which was passed to {@link CameraManager#openCamera}
4480      *
4481      * @return The camera device (non-{@code null}).
4482      */
verifyCameraStateOpened(String cameraId, MockStateCallback listener)4483     public static CameraDevice verifyCameraStateOpened(String cameraId,
4484             MockStateCallback listener) {
4485         ArgumentCaptor<CameraDevice> argument =
4486                 ArgumentCaptor.forClass(CameraDevice.class);
4487         InOrder inOrder = inOrder(listener);
4488 
4489         /**
4490          * State transitions (in that order):
4491          *  1) onOpened
4492          *
4493          * No other transitions must occur for successful #openCamera
4494          */
4495         inOrder.verify(listener)
4496                 .onOpened(argument.capture());
4497 
4498         CameraDevice camera = argument.getValue();
4499         assertNotNull(
4500                 String.format("Failed to open camera device ID: %s", cameraId),
4501                 camera);
4502 
4503         // Do not use inOrder here since that would skip anything called before onOpened
4504         verifyNoMoreInteractions(listener);
4505 
4506         return camera;
4507     }
4508 
verifySingleAvailabilityCbsReceived( LinkedBlockingQueue<String> expectedEventQueue, LinkedBlockingQueue<String> unExpectedEventQueue, String expectedId, String expectedStr, String unExpectedStr)4509     public static void verifySingleAvailabilityCbsReceived(
4510             LinkedBlockingQueue<String> expectedEventQueue,
4511             LinkedBlockingQueue<String> unExpectedEventQueue, String expectedId,
4512             String expectedStr, String unExpectedStr) throws Exception {
4513         String candidateId = expectedEventQueue.poll(AVAILABILITY_TIMEOUT_MS,
4514                 java.util.concurrent.TimeUnit.MILLISECONDS);
4515         assertNotNull("No " + expectedStr + " notice for expected ID " + expectedId, candidateId);
4516         assertTrue("Received " + expectedStr + " notice for wrong ID, " + "expected "
4517                 + expectedId + ", got " + candidateId, expectedId.equals(candidateId));
4518         assertTrue("Received >  1 " + expectedStr + " callback for id " + expectedId,
4519                 expectedEventQueue.size() == 0);
4520         assertTrue(unExpectedStr + " events received unexpectedly",
4521                 unExpectedEventQueue.size() == 0);
4522     }
4523 
verifyAvailabilityCbsReceived(HashSet<T> expectedCameras, LinkedBlockingQueue<T> expectedEventQueue, LinkedBlockingQueue<T> unExpectedEventQueue, boolean available)4524     public static <T> void verifyAvailabilityCbsReceived(HashSet<T> expectedCameras,
4525             LinkedBlockingQueue<T> expectedEventQueue, LinkedBlockingQueue<T> unExpectedEventQueue,
4526             boolean available) throws Exception {
4527         while (expectedCameras.size() > 0) {
4528             T id = expectedEventQueue.poll(AVAILABILITY_TIMEOUT_MS,
4529                     java.util.concurrent.TimeUnit.MILLISECONDS);
4530             assertTrue("Did not receive initial " + (available ? "available" : "unavailable")
4531                     + " notices for some cameras", id != null);
4532             assertTrue("Received initial " + (available ? "available" : "unavailable")
4533                     + " notice for wrong camera " + id, expectedCameras.contains(id));
4534             expectedCameras.remove(id);
4535         }
4536         // Verify no unexpected unavailable/available cameras were reported
4537         if (unExpectedEventQueue != null) {
4538             assertTrue("Received unexpected initial "
4539                     + (available ? "unavailable" : "available"),
4540                     unExpectedEventQueue.size() == 0);
4541         }
4542     }
4543 
4544     /**
4545      * This function polls on the event queue to get unavailable physical camera IDs belonging
4546      * to a particular logical camera. The event queue is drained before the function returns.
4547      *
4548      * @param queue The event queue capturing unavailable physical cameras
4549      * @param cameraId The logical camera ID
4550      *
4551      * @return The currently unavailable physical cameras
4552      */
getUnavailablePhysicalCamerasAndDrain( LinkedBlockingQueue<Pair<String, String>> queue, String cameraId)4553     private static Set<String> getUnavailablePhysicalCamerasAndDrain(
4554             LinkedBlockingQueue<Pair<String, String>> queue, String cameraId) throws Exception {
4555         Set<String> unavailablePhysicalCameras = new HashSet<String>();
4556 
4557         while (true) {
4558             Pair<String, String> unavailableIdCombo = queue.poll(
4559                     AVAILABILITY_TIMEOUT_MS, java.util.concurrent.TimeUnit.MILLISECONDS);
4560             if (unavailableIdCombo == null) {
4561                 // No more entries in the queue. Break out of the loop and return.
4562                 break;
4563             }
4564 
4565             if (cameraId.equals(unavailableIdCombo.first)) {
4566                 unavailablePhysicalCameras.add(unavailableIdCombo.second);
4567             }
4568         }
4569 
4570         return unavailablePhysicalCameras;
4571     }
4572 
4573     /**
4574      * Get the unavailable physical cameras based on onPhysicalCameraUnavailable callback.
4575      */
getUnavailablePhysicalCameras(CameraManager manager, Handler handler)4576     public static Set<Pair<String, String>> getUnavailablePhysicalCameras(CameraManager manager,
4577             Handler handler) throws Exception {
4578         final Set<Pair<String, String>> ret = new HashSet<>();
4579         final ConditionVariable cv = new ConditionVariable();
4580 
4581         CameraManager.AvailabilityCallback ac = new CameraManager.AvailabilityCallback() {
4582             @Override
4583             public void onPhysicalCameraUnavailable(String cameraId, String physicalCameraId) {
4584                 synchronized (ret) {
4585                     ret.add(new Pair<String, String>(cameraId, physicalCameraId));
4586                 }
4587                 cv.open();
4588             }
4589         };
4590         manager.registerAvailabilityCallback(ac, handler);
4591 
4592         // Wait for next physical camera availability callback
4593         while (cv.block(AVAILABILITY_TIMEOUT_MS)) {
4594             // cv.block() returns true when open() is called
4595             // false on timeout.
4596             cv.close();
4597         }
4598 
4599         manager.unregisterAvailabilityCallback(ac);
4600 
4601         synchronized (ret) {
4602             return ret;
4603         }
4604     }
4605 
testPhysicalCameraAvailabilityConsistencyHelper( String[] cameraIds, CameraManager manager, Handler handler, boolean expectInitialCallbackAfterOpen)4606     public static void testPhysicalCameraAvailabilityConsistencyHelper(
4607             String[] cameraIds, CameraManager manager,
4608             Handler handler, boolean expectInitialCallbackAfterOpen) throws Throwable {
4609         final LinkedBlockingQueue<String> availableEventQueue = new LinkedBlockingQueue<>();
4610         final LinkedBlockingQueue<String> unavailableEventQueue = new LinkedBlockingQueue<>();
4611         final LinkedBlockingQueue<Pair<String, String>> unavailablePhysicalCamEventQueue =
4612                 new LinkedBlockingQueue<>();
4613         CameraManager.AvailabilityCallback ac = new CameraManager.AvailabilityCallback() {
4614             @Override
4615             public void onCameraAvailable(String cameraId) {
4616                 super.onCameraAvailable(cameraId);
4617                 availableEventQueue.offer(cameraId);
4618             }
4619 
4620             @Override
4621             public void onCameraUnavailable(String cameraId) {
4622                 super.onCameraUnavailable(cameraId);
4623                 unavailableEventQueue.offer(cameraId);
4624             }
4625 
4626             @Override
4627             public void onPhysicalCameraAvailable(String cameraId, String physicalCameraId) {
4628                 super.onPhysicalCameraAvailable(cameraId, physicalCameraId);
4629                 unavailablePhysicalCamEventQueue.remove(new Pair<>(cameraId, physicalCameraId));
4630             }
4631 
4632             @Override
4633             public void onPhysicalCameraUnavailable(String cameraId, String physicalCameraId) {
4634                 super.onPhysicalCameraUnavailable(cameraId, physicalCameraId);
4635                 unavailablePhysicalCamEventQueue.offer(new Pair<>(cameraId, physicalCameraId));
4636             }
4637         };
4638 
4639         String[] cameras = cameraIds;
4640         if (cameras.length == 0) {
4641             Log.i(TAG, "Skipping testPhysicalCameraAvailabilityConsistency, no cameras");
4642             return;
4643         }
4644 
4645         for (String cameraId : cameras) {
4646             CameraCharacteristics ch = manager.getCameraCharacteristics(cameraId);
4647             StaticMetadata staticInfo = new StaticMetadata(ch);
4648             if (!staticInfo.isLogicalMultiCamera()) {
4649                 // Test is only applicable for logical multi-camera.
4650                 continue;
4651             }
4652 
4653             // Get initial physical unavailable callbacks without opening camera
4654             manager.registerAvailabilityCallback(ac, handler);
4655             Set<String> unavailablePhysicalCameras = getUnavailablePhysicalCamerasAndDrain(
4656                     unavailablePhysicalCamEventQueue, cameraId);
4657 
4658             // Open camera
4659             MockStateCallback mockListener = MockStateCallback.mock();
4660             BlockingStateCallback cameraListener = new BlockingStateCallback(mockListener);
4661             manager.openCamera(cameraId, cameraListener, handler);
4662             // Block until opened
4663             cameraListener.waitForState(BlockingStateCallback.STATE_OPENED,
4664                     CameraTestUtils.CAMERA_IDLE_TIMEOUT_MS);
4665             // Then verify only open happened, and get the camera handle
4666             CameraDevice camera = CameraTestUtils.verifyCameraStateOpened(cameraId, mockListener);
4667 
4668             // The camera should be in available->unavailable state.
4669             String candidateUnavailableId = unavailableEventQueue.poll(AVAILABILITY_TIMEOUT_MS,
4670                     java.util.concurrent.TimeUnit.MILLISECONDS);
4671             assertNotNull("No unavailable notice for expected ID " + cameraId,
4672                     candidateUnavailableId);
4673             assertTrue("Received unavailable notice for wrong ID, "
4674                     + "expected " + cameraId + ", got " + candidateUnavailableId,
4675                     cameraId.equals(candidateUnavailableId));
4676             assertTrue("Received >  1 unavailable callback for id " + cameraId,
4677                     unavailableEventQueue.size() == 0);
4678             availableEventQueue.clear();
4679             unavailableEventQueue.clear();
4680 
4681             manager.unregisterAvailabilityCallback(ac);
4682             // Get physical unavailable callbacks while camera is open
4683             manager.registerAvailabilityCallback(ac, handler);
4684             HashSet<String> expectedAvailableCameras = new HashSet<String>(Arrays.asList(cameras));
4685             expectedAvailableCameras.remove(cameraId);
4686             HashSet<String> expectedUnavailableCameras =
4687                     new HashSet<String>(Arrays.asList(cameraId));
4688             CameraTestUtils.verifyAvailabilityCbsReceived(expectedAvailableCameras,
4689                     availableEventQueue, null, /*available*/ true);
4690             CameraTestUtils.verifyAvailabilityCbsReceived(expectedUnavailableCameras,
4691                     unavailableEventQueue, null, /*available*/ false);
4692             Set<String> unavailablePhysicalCamerasWhileOpen = getUnavailablePhysicalCamerasAndDrain(
4693                     unavailablePhysicalCamEventQueue, cameraId);
4694             if (expectInitialCallbackAfterOpen) {
4695                 assertTrue("The unavailable physical cameras must be the same between before open "
4696                         + unavailablePhysicalCameras.toString()  + " and after open "
4697                         + unavailablePhysicalCamerasWhileOpen.toString(),
4698                         unavailablePhysicalCameras.equals(unavailablePhysicalCamerasWhileOpen));
4699             } else {
4700                 assertTrue("The physical camera unavailability callback must not be called when "
4701                         + "the logical camera is open",
4702                         unavailablePhysicalCamerasWhileOpen.isEmpty());
4703             }
4704 
4705             // Close camera device
4706             camera.close();
4707             cameraListener.waitForState(BlockingStateCallback.STATE_CLOSED,
4708                     CameraTestUtils.CAMERA_CLOSE_TIMEOUT_MS);
4709             CameraTestUtils.verifySingleAvailabilityCbsReceived(availableEventQueue,
4710                     unavailableEventQueue, cameraId, "availability", "Unavailability");
4711 
4712             // Get physical unavailable callbacks after opening and closing camera
4713             Set<String> unavailablePhysicalCamerasAfterClose =
4714                     getUnavailablePhysicalCamerasAndDrain(
4715                             unavailablePhysicalCamEventQueue, cameraId);
4716 
4717             assertTrue("The unavailable physical cameras must be the same between before open "
4718                     + unavailablePhysicalCameras.toString()  + " and after close "
4719                     + unavailablePhysicalCamerasAfterClose.toString(),
4720                     unavailablePhysicalCameras.equals(unavailablePhysicalCamerasAfterClose));
4721 
4722             manager.unregisterAvailabilityCallback(ac);
4723         }
4724     }
4725 
4726     /**
4727      * Simple holder for resolutions to use for different camera outputs and size limits.
4728      */
4729     public static class MaxStreamSizes {
4730         // Format shorthands
4731         static final int PRIV = ImageFormat.PRIVATE;
4732         static final int JPEG = ImageFormat.JPEG;
4733         static final int YUV  = ImageFormat.YUV_420_888;
4734         static final int RAW  = ImageFormat.RAW_SENSOR;
4735         static final int Y8   = ImageFormat.Y8;
4736         static final int HEIC = ImageFormat.HEIC;
4737         static final int JPEG_R = ImageFormat.JPEG_R;
4738 
4739         // Max resolution output indices
4740         static final int PREVIEW = 0;
4741         static final int RECORD  = 1;
4742         static final int MAXIMUM = 2;
4743         static final int VGA = 3;
4744         static final int VGA_FULL_FOV = 4;
4745         static final int MAX_30FPS = 5;
4746         static final int S720P = 6;
4747         static final int S1440P_4_3 = 7; // 4:3
4748         static final int MAX_RES = 8;
4749         static final int S1080P = 9;
4750         static final int S1080P_4_3 = 10;
4751         static final int S1440P_16_9 = 11;
4752         static final int XVGA = 12;
4753         static final int MAXIMUM_16_9 = 13;
4754         static final int MAXIMUM_4_3 = 14;
4755         static final int UHD = 15;
4756         static final int RESOLUTION_COUNT = 16;
4757 
4758         // Max resolution input indices
4759         static final int INPUT_MAXIMUM = 0;
4760         static final int INPUT_MAX_RES = 1;
4761         static final int INPUT_RESOLUTION_COUNT = 2;
4762 
4763         static final Size S_1280_720 = new Size(1280, 720);   // 16:9
4764 
4765         static final Size S_1024_768 = new Size(1024, 768);   // 4:3
4766 
4767         static final Size S_1920_1080 = new Size(1920, 1080); // 16:9
4768 
4769         static final Size S_1440_1080 = new Size(1440, 1080); // 4:3
4770 
4771         static final Size S_2560_1440 = new Size(2560, 1440); // 16:9
4772 
4773         static final Size S_1920_1440 = new Size(1920, 1440); // 4:3
4774 
4775         static final Size S_3840_2160 = new Size(3840, 2160); // 16:9
4776 
4777         static final long FRAME_DURATION_30FPS_NSEC = (long) 1e9 / 30;
4778 
4779         static final int USE_CASE_PREVIEW =
4780                 CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW;
4781         static final int USE_CASE_VIDEO_RECORD =
4782                 CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD;
4783         static final int USE_CASE_STILL_CAPTURE =
4784                 CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE;
4785         static final int USE_CASE_PREVIEW_VIDEO_STILL =
4786                 CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL;
4787         static final int USE_CASE_VIDEO_CALL =
4788                 CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL;
4789         static final int USE_CASE_CROPPED_RAW =
4790                 CameraMetadata.SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW;
4791 
4792         // Note: This must match the required stream combinations defined in
4793         // CameraCharacteristcs#INFO_SESSION_CONFIGURATION_QUERY_VERSION.
4794         private static final int[][] QUERY_COMBINATIONS = {
4795             {PRIV, S1080P},
4796             {PRIV, S720P},
4797             {PRIV, S1080P,  JPEG, MAXIMUM_16_9},
4798             {PRIV, S1080P,  JPEG, UHD},
4799             {PRIV, S1080P,  JPEG, S1440P_16_9},
4800             {PRIV, S1080P,  JPEG, S1080P},
4801             {PRIV, S1080P,  PRIV, UHD},
4802             {PRIV, S720P,   JPEG, MAXIMUM_16_9},
4803             {PRIV, S720P,   JPEG, UHD},
4804             {PRIV, S720P,   JPEG, S1080P},
4805             {PRIV, XVGA,    JPEG, MAXIMUM_4_3},
4806             {PRIV, S1080P_4_3, JPEG, MAXIMUM_4_3},
4807             {PRIV, S1080P,  JPEG_R, MAXIMUM_16_9},
4808             {PRIV, S1080P,  JPEG_R, UHD},
4809             {PRIV, S1080P,  JPEG_R, S1440P_16_9},
4810             {PRIV, S1080P,  JPEG_R, S1080P},
4811             {PRIV, S720P,   JPEG_R, MAXIMUM_16_9},
4812             {PRIV, S720P,   JPEG_R, UHD},
4813             {PRIV, S720P,   JPEG_R, S1080P},
4814             {PRIV, XVGA,    JPEG_R, MAXIMUM_4_3},
4815             {PRIV, S1080P_4_3, JPEG_R, MAXIMUM_4_3},
4816         };
4817 
4818         private final Size[] mMaxPrivSizes = new Size[RESOLUTION_COUNT];
4819         private final Size[] mMaxJpegSizes = new Size[RESOLUTION_COUNT];
4820         private final Size[] mMaxJpegRSizes = new Size[RESOLUTION_COUNT];
4821         private final Size[] mMaxYuvSizes = new Size[RESOLUTION_COUNT];
4822         private final Size[] mMaxY8Sizes = new Size[RESOLUTION_COUNT];
4823         private final Size[] mMaxHeicSizes = new Size[RESOLUTION_COUNT];
4824         private final Size mMaxRawSize;
4825         private final Size mMaxResolutionRawSize;
4826 
4827         private final Size[] mMaxPrivInputSizes = new Size[INPUT_RESOLUTION_COUNT];
4828         private final Size[] mMaxYuvInputSizes = new Size[INPUT_RESOLUTION_COUNT];
4829         private final Size mMaxInputY8Size;
4830         private int[][] mQueryableCombinations;
4831 
MaxStreamSizes(StaticMetadata sm, String cameraId, Context context)4832         public MaxStreamSizes(StaticMetadata sm, String cameraId, Context context) {
4833             this(sm, cameraId, context, /*matchSize*/false);
4834         }
4835 
MaxStreamSizes(StaticMetadata sm, String cameraId, Context context, boolean matchSize)4836         public MaxStreamSizes(StaticMetadata sm, String cameraId, Context context,
4837                 boolean matchSize) {
4838             Size[] privSizes = sm.getAvailableSizesForFormatChecked(ImageFormat.PRIVATE,
4839                     StaticMetadata.StreamDirection.Output, /*fastSizes*/true, /*slowSizes*/false);
4840             Size[] yuvSizes = sm.getAvailableSizesForFormatChecked(ImageFormat.YUV_420_888,
4841                     StaticMetadata.StreamDirection.Output, /*fastSizes*/true, /*slowSizes*/false);
4842 
4843             Size[] y8Sizes = sm.getAvailableSizesForFormatChecked(ImageFormat.Y8,
4844                     StaticMetadata.StreamDirection.Output, /*fastSizes*/true, /*slowSizes*/false);
4845             Size[] jpegSizes = sm.getAvailableSizesForFormatChecked(ImageFormat.JPEG,
4846                     StaticMetadata.StreamDirection.Output, /*fastSizes*/true, /*slowSizes*/false);
4847             Size[] jpegRSizes = sm.getAvailableSizesForFormatChecked(ImageFormat.JPEG_R,
4848                     StaticMetadata.StreamDirection.Output, /*fastSizes*/true, /*slowSizes*/false);
4849             Size[] rawSizes = sm.getAvailableSizesForFormatChecked(ImageFormat.RAW_SENSOR,
4850                     StaticMetadata.StreamDirection.Output, /*fastSizes*/true, /*slowSizes*/false);
4851             Size[] heicSizes = sm.getAvailableSizesForFormatChecked(ImageFormat.HEIC,
4852                     StaticMetadata.StreamDirection.Output, /*fastSizes*/true, /*slowSizes*/false);
4853 
4854             Size maxPreviewSize = getMaxPreviewSize(context, cameraId);
4855 
4856             StreamConfigurationMap configs = sm.getCharacteristics().get(
4857                     CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
4858 
4859             StreamConfigurationMap maxResConfigs = sm.getCharacteristics().get(
4860                     CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION);
4861 
4862             mMaxRawSize = (rawSizes.length != 0) ? CameraTestUtils.getMaxSize(rawSizes) : null;
4863             mMaxResolutionRawSize = sm.isUltraHighResolutionSensor()
4864                     ? CameraTestUtils.getMaxSize(
4865                             maxResConfigs.getOutputSizes(ImageFormat.RAW_SENSOR))
4866                     : null;
4867 
4868             if (sm.isColorOutputSupported()) {
4869                 // We don't include JPEG sizes capped at PREVIEW since for MPC 12+ devices, JPEG
4870                 // sizes are necessarily > 1080p. Also the mandatory stream combinations have no
4871                 // JPEG streams capped at PREVIEW.
4872                 mMaxPrivSizes[PREVIEW] = CameraTestUtils.getMaxSizeWithBound(privSizes,
4873                         maxPreviewSize);
4874                 mMaxYuvSizes[PREVIEW]  = CameraTestUtils.getMaxSizeWithBound(yuvSizes,
4875                         maxPreviewSize);
4876 
4877                 if (sm.isExternalCamera()) {
4878                     mMaxPrivSizes[RECORD] = getMaxExternalRecordingSize(cameraId, configs);
4879                     mMaxYuvSizes[RECORD]  = getMaxExternalRecordingSize(cameraId, configs);
4880                     mMaxJpegSizes[RECORD] = getMaxExternalRecordingSize(cameraId, configs);
4881                 } else {
4882                     mMaxPrivSizes[RECORD] = getMaxRecordingSize(cameraId);
4883                     mMaxYuvSizes[RECORD]  = getMaxRecordingSize(cameraId);
4884                     mMaxJpegSizes[RECORD] = getMaxRecordingSize(cameraId);
4885                 }
4886 
4887                 if (sm.isUltraHighResolutionSensor()) {
4888                     mMaxYuvSizes[MAX_RES] = CameraTestUtils.getMaxSize(
4889                             maxResConfigs.getOutputSizes(ImageFormat.YUV_420_888));
4890                     mMaxJpegSizes[MAX_RES] = CameraTestUtils.getMaxSize(
4891                             maxResConfigs.getOutputSizes(ImageFormat.JPEG));
4892                 }
4893 
4894                 mMaxPrivSizes[MAXIMUM] = CameraTestUtils.getMaxSize(privSizes);
4895                 mMaxYuvSizes[MAXIMUM] = CameraTestUtils.getMaxSize(yuvSizes);
4896                 mMaxJpegSizes[MAXIMUM] = CameraTestUtils.getMaxSize(jpegSizes);
4897 
4898                 float aspectRatio43 = 1.0f * 4 / 3;
4899                 mMaxPrivSizes[MAXIMUM_4_3] = CameraTestUtils.getMaxSize(privSizes, aspectRatio43);
4900                 mMaxYuvSizes[MAXIMUM_4_3] = CameraTestUtils.getMaxSize(yuvSizes, aspectRatio43);
4901                 mMaxJpegSizes[MAXIMUM_4_3] = CameraTestUtils.getMaxSize(jpegSizes, aspectRatio43);
4902 
4903                 float aspectRatio169 = 1.0f * 16 / 9;
4904                 mMaxPrivSizes[MAXIMUM_16_9] = CameraTestUtils.getMaxSize(privSizes, aspectRatio169);
4905                 mMaxYuvSizes[MAXIMUM_16_9] = CameraTestUtils.getMaxSize(yuvSizes, aspectRatio169);
4906                 mMaxJpegSizes[MAXIMUM_16_9] = CameraTestUtils.getMaxSize(jpegSizes, aspectRatio169);
4907 
4908                 // Must always be supported, add unconditionally
4909                 final Size vgaSize = new Size(640, 480);
4910                 mMaxPrivSizes[VGA] = vgaSize;
4911                 mMaxYuvSizes[VGA] = vgaSize;
4912                 mMaxJpegSizes[VGA] = vgaSize;
4913 
4914                 final Size s1440p43Size = S_1920_1440;
4915                 mMaxPrivSizes[S1440P_4_3] = CameraTestUtils.getMaxSizeWithBound(
4916                         configs.getOutputSizes(ImageFormat.PRIVATE), s1440p43Size);
4917                 mMaxYuvSizes[S1440P_4_3] = CameraTestUtils.getMaxSizeWithBound(
4918                         configs.getOutputSizes(ImageFormat.YUV_420_888), s1440p43Size);
4919                 mMaxJpegSizes[S1440P_4_3] = CameraTestUtils.getMaxSizeWithBound(
4920                         configs.getOutputSizes(ImageFormat.JPEG), s1440p43Size);
4921 
4922                 final Size s720pSize = S_1280_720;
4923                 final Size xvgaSize = S_1024_768;
4924                 final Size s1080pSize = S_1920_1080;
4925                 final Size s1080p43Size = S_1440_1080;
4926                 final Size s1440p169Size = S_2560_1440;
4927                 final Size uhdSize = S_3840_2160;
4928                 if (!matchSize) {
4929                     // Skip JPEG for 720p, XVGA, and S1080P_4_3, because those resolutions
4930                     // are not mandatory JPEG resolutions, and they could be filtered out
4931                     // for MediaPerformance class.
4932                     mMaxPrivSizes[S720P] = CameraTestUtils.getMaxSizeWithBound(
4933                             configs.getOutputSizes(ImageFormat.PRIVATE), s720pSize);
4934                     mMaxYuvSizes[S720P] = CameraTestUtils.getMaxSizeWithBound(
4935                             configs.getOutputSizes(ImageFormat.YUV_420_888), s720pSize);
4936 
4937                     mMaxPrivSizes[XVGA] = CameraTestUtils.getMaxSizeWithBound(
4938                             configs.getOutputSizes(ImageFormat.PRIVATE), xvgaSize);
4939                     mMaxYuvSizes[XVGA] = CameraTestUtils.getMaxSizeWithBound(
4940                             configs.getOutputSizes(ImageFormat.YUV_420_888), xvgaSize);
4941 
4942                     mMaxPrivSizes[S1080P_4_3] = CameraTestUtils.getMaxSizeWithBound(
4943                             configs.getOutputSizes(ImageFormat.PRIVATE), s1080p43Size);
4944                     mMaxYuvSizes[S1080P_4_3] = CameraTestUtils.getMaxSizeWithBound(
4945                             configs.getOutputSizes(ImageFormat.YUV_420_888), s1080p43Size);
4946 
4947                     mMaxPrivSizes[S1080P] = CameraTestUtils.getMaxSizeWithBound(
4948                             configs.getOutputSizes(ImageFormat.PRIVATE), s1080pSize);
4949                     mMaxYuvSizes[S1080P] = CameraTestUtils.getMaxSizeWithBound(
4950                             configs.getOutputSizes(ImageFormat.YUV_420_888), s1080pSize);
4951                     mMaxJpegSizes[S1080P] = CameraTestUtils.getMaxSizeWithBound(
4952                             configs.getOutputSizes(ImageFormat.JPEG), s1080pSize);
4953 
4954                     mMaxPrivSizes[S1440P_16_9] = CameraTestUtils.getMaxSizeWithBound(
4955                             configs.getOutputSizes(ImageFormat.PRIVATE), s1440p169Size);
4956                     mMaxYuvSizes[S1440P_16_9] = CameraTestUtils.getMaxSizeWithBound(
4957                             configs.getOutputSizes(ImageFormat.YUV_420_888), s1440p169Size);
4958                     mMaxJpegSizes[S1440P_16_9] = CameraTestUtils.getMaxSizeWithBound(
4959                             configs.getOutputSizes(ImageFormat.JPEG), s1440p169Size);
4960 
4961                     mMaxPrivSizes[UHD] = CameraTestUtils.getMaxSizeWithBound(
4962                             configs.getOutputSizes(ImageFormat.PRIVATE), uhdSize);
4963                     mMaxYuvSizes[UHD] = CameraTestUtils.getMaxSizeWithBound(
4964                             configs.getOutputSizes(ImageFormat.YUV_420_888), uhdSize);
4965                     mMaxJpegSizes[UHD] = CameraTestUtils.getMaxSizeWithBound(
4966                             configs.getOutputSizes(ImageFormat.JPEG), uhdSize);
4967                 } else {
4968                     mMaxPrivSizes[S720P] = s720pSize;
4969                     mMaxYuvSizes[S720P] = s720pSize;
4970                     mMaxJpegSizes[S720P] = s720pSize;
4971 
4972                     mMaxPrivSizes[XVGA] = xvgaSize;
4973                     mMaxYuvSizes[XVGA] = xvgaSize;
4974                     mMaxJpegSizes[XVGA] = xvgaSize;
4975 
4976                     mMaxPrivSizes[S1080P] = s1080pSize;
4977                     mMaxYuvSizes[S1080P] = s1080pSize;
4978                     mMaxJpegSizes[S1080P] = s1080pSize;
4979 
4980                     mMaxPrivSizes[S1080P_4_3] = s1080p43Size;
4981                     mMaxYuvSizes[S1080P_4_3] = s1080p43Size;
4982                     mMaxJpegSizes[S1080P_4_3] = s1080p43Size;
4983 
4984                     mMaxPrivSizes[S1440P_16_9] = s1440p169Size;
4985                     mMaxYuvSizes[S1440P_16_9] = s1440p169Size;
4986                     mMaxJpegSizes[S1440P_16_9] = s1440p169Size;
4987 
4988                     mMaxPrivSizes[UHD] = uhdSize;
4989                     mMaxYuvSizes[UHD] = uhdSize;
4990                     mMaxJpegSizes[UHD] = uhdSize;
4991                 }
4992                 if (sm.isJpegRSupported()) {
4993                     mMaxJpegRSizes[MAXIMUM] = CameraTestUtils.getMaxSize(jpegRSizes);
4994                     mMaxJpegRSizes[MAXIMUM_4_3] = CameraTestUtils.getMaxSize(
4995                             jpegRSizes, aspectRatio43);
4996                     mMaxJpegRSizes[MAXIMUM_16_9] = CameraTestUtils.getMaxSize(
4997                             jpegRSizes, aspectRatio169);
4998                     if (!matchSize) {
4999                         mMaxJpegRSizes[S1080P] = CameraTestUtils.getMaxSizeWithBound(
5000                                 configs.getOutputSizes(ImageFormat.JPEG_R), s1080pSize);
5001                         mMaxJpegRSizes[S1440P_16_9] = CameraTestUtils.getMaxSizeWithBound(
5002                                 configs.getOutputSizes(ImageFormat.JPEG_R), s1440p169Size);
5003                         mMaxJpegRSizes[S1440P_4_3] = CameraTestUtils.getMaxSizeWithBound(
5004                                 configs.getOutputSizes(ImageFormat.JPEG_R), s1440p43Size);
5005                         mMaxJpegRSizes[UHD] = CameraTestUtils.getMaxSizeWithBound(
5006                                 configs.getOutputSizes(ImageFormat.JPEG_R), uhdSize);
5007                     } else {
5008                         mMaxJpegRSizes[S720P] = s720pSize;
5009                         mMaxJpegRSizes[XVGA] = xvgaSize;
5010                         mMaxJpegRSizes[S1080P] = s1080pSize;
5011                         mMaxJpegRSizes[S1080P_4_3] = s1080p43Size;
5012                         mMaxJpegRSizes[S1440P_16_9] = s1440p169Size;
5013                         mMaxJpegRSizes[UHD] = uhdSize;
5014                     }
5015                     mQueryableCombinations = QUERY_COMBINATIONS;
5016                 } else {
5017                     // JPEG_R is not supported. Remove all combinations containing JPEG_R
5018                     List<int[]> combinationsMinusJpegR = new ArrayList<int[]>();
5019                     for (int i = 0; i < QUERY_COMBINATIONS.length; i++) {
5020                         boolean hasJpegR = false;
5021                         for (int j = 0; j < QUERY_COMBINATIONS[i].length; j += 2) {
5022                             if (QUERY_COMBINATIONS[i][j] == JPEG_R) {
5023                                 hasJpegR = true;
5024                                 break;
5025                             }
5026                         }
5027 
5028                         if (!hasJpegR) {
5029                             combinationsMinusJpegR.add(QUERY_COMBINATIONS[i]);
5030                         }
5031                     }
5032                     mQueryableCombinations = combinationsMinusJpegR.toArray(int[][]::new);
5033                 }
5034 
5035                 if (sm.isMonochromeWithY8()) {
5036                     mMaxY8Sizes[PREVIEW]  = CameraTestUtils.getMaxSizeWithBound(
5037                             y8Sizes, maxPreviewSize);
5038                     if (sm.isExternalCamera()) {
5039                         mMaxY8Sizes[RECORD]  = getMaxExternalRecordingSize(cameraId, configs);
5040                     } else {
5041                         mMaxY8Sizes[RECORD]  = getMaxRecordingSize(cameraId);
5042                     }
5043                     mMaxY8Sizes[MAXIMUM] = CameraTestUtils.getMaxSize(y8Sizes);
5044                     mMaxY8Sizes[VGA] = vgaSize;
5045                     mMaxY8Sizes[S720P] = CameraTestUtils.getMaxSizeWithBound(
5046                             configs.getOutputSizes(ImageFormat.Y8), s720pSize);
5047                     mMaxY8Sizes[S1440P_4_3] = CameraTestUtils.getMaxSizeWithBound(
5048                             configs.getOutputSizes(ImageFormat.Y8), s1440p43Size);
5049                 }
5050 
5051                 if (sm.isHeicSupported()) {
5052                     mMaxHeicSizes[PREVIEW] = CameraTestUtils.getMaxSizeWithBound(
5053                             heicSizes, maxPreviewSize);
5054                     mMaxHeicSizes[RECORD] = getMaxRecordingSize(cameraId);
5055                     mMaxHeicSizes[MAXIMUM] = CameraTestUtils.getMaxSize(heicSizes);
5056                     mMaxHeicSizes[VGA] = vgaSize;
5057                     mMaxHeicSizes[S720P] = CameraTestUtils.getMaxSizeWithBound(
5058                             configs.getOutputSizes(ImageFormat.HEIC), s720pSize);
5059                     mMaxHeicSizes[S1440P_4_3] = CameraTestUtils.getMaxSizeWithBound(
5060                             configs.getOutputSizes(ImageFormat.HEIC), s1440p43Size);
5061                 }
5062             }
5063             if (sm.isColorOutputSupported() && !sm.isHardwareLevelLegacy()) {
5064                 // VGA resolution, but with aspect ratio matching full res FOV
5065                 float fullFovAspect = mMaxYuvSizes[MAXIMUM].getWidth()
5066                         / (float) mMaxYuvSizes[MAXIMUM].getHeight();
5067                 Size vgaFullFovSize = new Size(640, (int) (640 / fullFovAspect));
5068 
5069                 mMaxPrivSizes[VGA_FULL_FOV] = vgaFullFovSize;
5070                 mMaxYuvSizes[VGA_FULL_FOV] = vgaFullFovSize;
5071                 mMaxJpegSizes[VGA_FULL_FOV] = vgaFullFovSize;
5072                 if (sm.isMonochromeWithY8()) {
5073                     mMaxY8Sizes[VGA_FULL_FOV] = vgaFullFovSize;
5074                 }
5075 
5076                 // Max resolution that runs at 30fps
5077 
5078                 Size maxPriv30fpsSize = null;
5079                 Size maxYuv30fpsSize = null;
5080                 Size maxY830fpsSize = null;
5081                 Size maxJpeg30fpsSize = null;
5082                 Comparator<Size> comparator = new SizeComparator();
5083                 for (Map.Entry<Size, Long> e :
5084                              sm.getAvailableMinFrameDurationsForFormatChecked(ImageFormat.PRIVATE)
5085                              .entrySet()) {
5086                     Size s = e.getKey();
5087                     Long minDuration = e.getValue();
5088                     Log.d(TAG, String.format("Priv Size: %s, duration %d limit %d", s, minDuration,
5089                                 FRAME_DURATION_30FPS_NSEC));
5090                     if (minDuration <= FRAME_DURATION_30FPS_NSEC) {
5091                         if (maxPriv30fpsSize == null
5092                                 || comparator.compare(maxPriv30fpsSize, s) < 0) {
5093                             maxPriv30fpsSize = s;
5094                         }
5095                     }
5096                 }
5097                 assertTrue("No PRIVATE resolution available at 30fps!", maxPriv30fpsSize != null);
5098 
5099                 for (Map.Entry<Size, Long> e :
5100                              sm.getAvailableMinFrameDurationsForFormatChecked(
5101                                      ImageFormat.YUV_420_888)
5102                              .entrySet()) {
5103                     Size s = e.getKey();
5104                     Long minDuration = e.getValue();
5105                     Log.d(TAG, String.format("YUV Size: %s, duration %d limit %d", s, minDuration,
5106                                 FRAME_DURATION_30FPS_NSEC));
5107                     if (minDuration <= FRAME_DURATION_30FPS_NSEC) {
5108                         if (maxYuv30fpsSize == null
5109                                 || comparator.compare(maxYuv30fpsSize, s) < 0) {
5110                             maxYuv30fpsSize = s;
5111                         }
5112                     }
5113                 }
5114                 assertTrue("No YUV_420_888 resolution available at 30fps!",
5115                         maxYuv30fpsSize != null);
5116 
5117                 if (sm.isMonochromeWithY8()) {
5118                     for (Map.Entry<Size, Long> e :
5119                                  sm.getAvailableMinFrameDurationsForFormatChecked(
5120                                          ImageFormat.Y8)
5121                                  .entrySet()) {
5122                         Size s = e.getKey();
5123                         Long minDuration = e.getValue();
5124                         Log.d(TAG, String.format("Y8 Size: %s, duration %d limit %d",
5125                                 s, minDuration, FRAME_DURATION_30FPS_NSEC));
5126                         if (minDuration <= FRAME_DURATION_30FPS_NSEC) {
5127                             if (maxY830fpsSize == null
5128                                     || comparator.compare(maxY830fpsSize, s) < 0) {
5129                                 maxY830fpsSize = s;
5130                             }
5131                         }
5132                     }
5133                     assertTrue("No Y8 resolution available at 30fps!", maxY830fpsSize != null);
5134                 }
5135 
5136                 for (Map.Entry<Size, Long> e :
5137                              sm.getAvailableMinFrameDurationsForFormatChecked(ImageFormat.JPEG)
5138                              .entrySet()) {
5139                     Size s = e.getKey();
5140                     Long minDuration = e.getValue();
5141                     Log.d(TAG, String.format("JPEG Size: %s, duration %d limit %d", s, minDuration,
5142                                 FRAME_DURATION_30FPS_NSEC));
5143                     if (minDuration <= FRAME_DURATION_30FPS_NSEC) {
5144                         if (maxJpeg30fpsSize == null
5145                                 || comparator.compare(maxJpeg30fpsSize, s) < 0) {
5146                             maxJpeg30fpsSize = s;
5147                         }
5148                     }
5149                 }
5150                 assertTrue("No JPEG resolution available at 30fps!", maxJpeg30fpsSize != null);
5151 
5152                 mMaxPrivSizes[MAX_30FPS] = maxPriv30fpsSize;
5153                 mMaxYuvSizes[MAX_30FPS] = maxYuv30fpsSize;
5154                 mMaxY8Sizes[MAX_30FPS] = maxY830fpsSize;
5155                 mMaxJpegSizes[MAX_30FPS] = maxJpeg30fpsSize;
5156             }
5157 
5158             Size[] privInputSizes = configs.getInputSizes(ImageFormat.PRIVATE);
5159             mMaxPrivInputSizes[INPUT_MAXIMUM] = privInputSizes != null
5160                     ? CameraTestUtils.getMaxSize(privInputSizes)
5161                     : null;
5162             Size[] maxResPrivInputSizes =
5163                     sm.isUltraHighResolutionSensor()
5164                     ?  maxResConfigs.getInputSizes(ImageFormat.PRIVATE)
5165                     : null;
5166             mMaxPrivInputSizes[INPUT_MAX_RES] = maxResPrivInputSizes != null
5167                     ? CameraTestUtils.getMaxSize(maxResPrivInputSizes)
5168                     : null;
5169 
5170             Size[] yuvInputSizes = configs.getInputSizes(ImageFormat.YUV_420_888);
5171             mMaxYuvInputSizes[INPUT_MAXIMUM] = yuvInputSizes != null
5172                     ? CameraTestUtils.getMaxSize(yuvInputSizes)
5173                     : null;
5174             Size[] maxResYuvInputSizes = sm.isUltraHighResolutionSensor()
5175                     ?  maxResConfigs.getInputSizes(ImageFormat.YUV_420_888)
5176                     : null;
5177             mMaxYuvInputSizes[INPUT_MAX_RES] = maxResYuvInputSizes != null
5178                     ? CameraTestUtils.getMaxSize(maxResYuvInputSizes)
5179                     : null;
5180 
5181             Size[] y8InputSizes = configs.getInputSizes(ImageFormat.Y8);
5182             mMaxInputY8Size = y8InputSizes != null
5183                     ? CameraTestUtils.getMaxSize(y8InputSizes)
5184                     : null;
5185         }
5186 
getOutputSizeForFormat(int format, int resolutionIndex)5187         public final Size getOutputSizeForFormat(int format, int resolutionIndex) {
5188             if (resolutionIndex >= RESOLUTION_COUNT) {
5189                 return new Size(0, 0);
5190             }
5191 
5192             switch (format) {
5193                 case PRIV:
5194                     return mMaxPrivSizes[resolutionIndex];
5195                 case YUV:
5196                     return mMaxYuvSizes[resolutionIndex];
5197                 case JPEG:
5198                     return mMaxJpegSizes[resolutionIndex];
5199                 case JPEG_R:
5200                     return mMaxJpegRSizes[resolutionIndex];
5201                 case Y8:
5202                     return mMaxY8Sizes[resolutionIndex];
5203                 case HEIC:
5204                     return mMaxHeicSizes[resolutionIndex];
5205                 case RAW:
5206                     if (resolutionIndex == MAX_RES) {
5207                         return mMaxResolutionRawSize;
5208                     }
5209                     return mMaxRawSize;
5210                 default:
5211                     return new Size(0, 0);
5212             }
5213         }
5214 
getMaxInputSizeForFormat(int format, int resolutionIndex)5215         public final Size getMaxInputSizeForFormat(int format, int resolutionIndex) {
5216             int inputResolutionIndex = getInputResolutionIndex(resolutionIndex);
5217             if (inputResolutionIndex >= INPUT_RESOLUTION_COUNT || inputResolutionIndex == -1) {
5218                 return new Size(0, 0);
5219             }
5220             switch (format) {
5221                 case PRIV:
5222                     return mMaxPrivInputSizes[inputResolutionIndex];
5223                 case YUV:
5224                     return mMaxYuvInputSizes[inputResolutionIndex];
5225                 case Y8:
5226                     return mMaxInputY8Size;
5227                 case RAW:
5228                     return mMaxResolutionRawSize;
5229                 default:
5230                     return new Size(0, 0);
5231             }
5232         }
5233 
combinationToString(int[] combination)5234         public static String combinationToString(int[] combination) {
5235             return combinationToString(combination, /*useCaseSpecified*/ false);
5236         }
5237 
combinationToString(int[] combination, boolean useCaseSpecified)5238         public static String combinationToString(int[] combination, boolean useCaseSpecified) {
5239             StringBuilder b = new StringBuilder("{ ");
5240             int i = 0;
5241             while (i < combination.length) {
5242                 int format = combination[i];
5243                 int sizeLimit = combination[i + 1];
5244 
5245                 appendFormatSize(b, format, sizeLimit);
5246                 if (useCaseSpecified) {
5247                     int streamUseCase = combination[i + 2];
5248                     appendStreamUseCase(b, streamUseCase);
5249                     i += 1;
5250                 }
5251                 i += 2;
5252                 b.append(" ");
5253             }
5254             b.append("}");
5255             return b.toString();
5256         }
5257 
reprocessCombinationToString(int[] reprocessCombination)5258         public static String reprocessCombinationToString(int[] reprocessCombination) {
5259             // reprocessConfig[0..1] is the input configuration
5260             StringBuilder b = new StringBuilder("Input: ");
5261             appendFormatSize(b, reprocessCombination[0], reprocessCombination[1]);
5262 
5263             // reprocessCombnation[0..1] is also output combination to be captured as reprocess
5264             // input.
5265             b.append(", Outputs: { ");
5266             for (int i = 0; i < reprocessCombination.length; i += 2) {
5267                 int format = reprocessCombination[i];
5268                 int sizeLimit = reprocessCombination[i + 1];
5269 
5270                 appendFormatSize(b, format, sizeLimit);
5271                 b.append(" ");
5272             }
5273             b.append("}");
5274             return b.toString();
5275         }
5276 
getQueryableCombinations()5277         public final int[][] getQueryableCombinations() {
5278             return mQueryableCombinations;
5279         }
5280 
getInputResolutionIndex(int resolutionIndex)5281         int getInputResolutionIndex(int resolutionIndex) {
5282             switch (resolutionIndex) {
5283                 case MAXIMUM:
5284                     return INPUT_MAXIMUM;
5285                 case MAX_RES:
5286                     return INPUT_MAX_RES;
5287             }
5288             return -1;
5289         }
5290 
appendFormatSize(StringBuilder b, int format, int size)5291         private static void appendFormatSize(StringBuilder b, int format, int size) {
5292             switch (format) {
5293                 case PRIV:
5294                     b.append("[PRIV, ");
5295                     break;
5296                 case JPEG:
5297                     b.append("[JPEG, ");
5298                     break;
5299                 case JPEG_R:
5300                     b.append("[JPEG_R, ");
5301                     break;
5302                 case YUV:
5303                     b.append("[YUV, ");
5304                     break;
5305                 case Y8:
5306                     b.append("[Y8, ");
5307                     break;
5308                 case RAW:
5309                     b.append("[RAW, ");
5310                     break;
5311                 default:
5312                     b.append("[UNK, ");
5313                     break;
5314             }
5315 
5316             switch (size) {
5317                 case PREVIEW:
5318                     b.append("PREVIEW]");
5319                     break;
5320                 case RECORD:
5321                     b.append("RECORD]");
5322                     break;
5323                 case MAXIMUM:
5324                     b.append("MAXIMUM]");
5325                     break;
5326                 case VGA:
5327                     b.append("VGA]");
5328                     break;
5329                 case VGA_FULL_FOV:
5330                     b.append("VGA_FULL_FOV]");
5331                     break;
5332                 case MAX_30FPS:
5333                     b.append("MAX_30FPS]");
5334                     break;
5335                 case S720P:
5336                     b.append("S720P]");
5337                     break;
5338                 case S1440P_4_3:
5339                     b.append("S1440P_4_3]");
5340                     break;
5341                 case MAX_RES:
5342                     b.append("MAX_RES]");
5343                     break;
5344                 case S1080P:
5345                     b.append("S1080P]");
5346                     break;
5347                 case S1080P_4_3:
5348                     b.append("S1080P_4_3]");
5349                     break;
5350                 case S1440P_16_9:
5351                     b.append("S440P_16_9]");
5352                     break;
5353                 case XVGA:
5354                     b.append("XVGA]");
5355                     break;
5356                 case MAXIMUM_16_9:
5357                     b.append("MAXIMUM_16_9]");
5358                     break;
5359                 case MAXIMUM_4_3:
5360                     b.append("MAXIMUM_4_3]");
5361                     break;
5362                 case UHD:
5363                     b.append("UHD]");
5364                     break;
5365                 default:
5366                     b.append("UNK]");
5367                     break;
5368             }
5369         }
5370 
appendStreamUseCase(StringBuilder b, int streamUseCase)5371         private static void appendStreamUseCase(StringBuilder b, int streamUseCase) {
5372             b.append(", ");
5373             switch (streamUseCase) {
5374                 case USE_CASE_PREVIEW:
5375                     b.append("USE_CASE_PREVIEW");
5376                     break;
5377                 case USE_CASE_PREVIEW_VIDEO_STILL:
5378                     b.append("USE_CASE_PREVIEW_VIDEO_STILL");
5379                     break;
5380                 case USE_CASE_STILL_CAPTURE:
5381                     b.append("USE_CASE_STILL_CAPTURE");
5382                     break;
5383                 case USE_CASE_VIDEO_CALL:
5384                     b.append("USE_CASE_VIDEO_CALL");
5385                     break;
5386                 case USE_CASE_VIDEO_RECORD:
5387                     b.append("USE_CASE_VIDEO_RECORD");
5388                     break;
5389                 case USE_CASE_CROPPED_RAW:
5390                     b.append("USE_CASE_CROPPED_RAW");
5391                     break;
5392                 default:
5393                     b.append("UNK STREAM_USE_CASE");
5394                     break;
5395             }
5396             b.append(";");
5397         }
5398     }
5399 
getMaxRecordingSize(String cameraId)5400     private static Size getMaxRecordingSize(String cameraId) {
5401         int id = Integer.valueOf(cameraId);
5402 
5403         int quality =
5404                 CamcorderProfile.hasProfile(id, CamcorderProfile.QUALITY_2160P)
5405                     ?  CamcorderProfile.QUALITY_2160P :
5406                 CamcorderProfile.hasProfile(id, CamcorderProfile.QUALITY_1080P)
5407                     ?  CamcorderProfile.QUALITY_1080P :
5408                 CamcorderProfile.hasProfile(id, CamcorderProfile.QUALITY_720P)
5409                     ?  CamcorderProfile.QUALITY_720P :
5410                 CamcorderProfile.hasProfile(id, CamcorderProfile.QUALITY_480P)
5411                     ?  CamcorderProfile.QUALITY_480P :
5412                 CamcorderProfile.hasProfile(id, CamcorderProfile.QUALITY_QVGA)
5413                     ?  CamcorderProfile.QUALITY_QVGA :
5414                 CamcorderProfile.hasProfile(id, CamcorderProfile.QUALITY_CIF)
5415                     ?  CamcorderProfile.QUALITY_CIF :
5416                 CamcorderProfile.hasProfile(id, CamcorderProfile.QUALITY_QCIF)
5417                     ?  CamcorderProfile.QUALITY_QCIF :
5418                     -1;
5419 
5420         assertTrue("No recording supported for camera id " + cameraId, quality != -1);
5421 
5422         CamcorderProfile maxProfile = CamcorderProfile.get(id, quality);
5423         return new Size(maxProfile.videoFrameWidth, maxProfile.videoFrameHeight);
5424     }
5425 
getMaxExternalRecordingSize( String cameraId, StreamConfigurationMap config)5426     private static Size getMaxExternalRecordingSize(
5427             String cameraId, StreamConfigurationMap config) {
5428         final Size fullHD = new Size(1920, 1080);
5429 
5430         Size[] videoSizeArr = config.getOutputSizes(android.media.MediaRecorder.class);
5431         List<Size> sizes = new ArrayList<Size>();
5432         for (Size sz: videoSizeArr) {
5433             if (sz.getWidth() <= fullHD.getWidth() && sz.getHeight() <= fullHD.getHeight()) {
5434                 sizes.add(sz);
5435             }
5436         }
5437         List<Size> videoSizes = getAscendingOrderSizes(sizes, /*ascending*/false);
5438         for (Size sz : videoSizes) {
5439             long minFrameDuration = config.getOutputMinFrameDuration(
5440                     android.media.MediaRecorder.class, sz);
5441             // Give some margin for rounding error
5442             if (minFrameDuration < (1e9 / 29.9)) {
5443                 Log.i(TAG, "External camera " + cameraId + " has max video size:" + sz);
5444                 return sz;
5445             }
5446         }
5447         fail("Camera " + cameraId + " does not support any 30fps video output");
5448         return fullHD; // doesn't matter what size is returned here
5449     }
5450 
getMaxPreviewSize(Context context, String cameraId)5451     private static Size getMaxPreviewSize(Context context, String cameraId) {
5452         try {
5453             WindowManager windowManager = context.getSystemService(WindowManager.class);
5454             assertNotNull("Could not find WindowManager service.", windowManager);
5455 
5456             WindowMetrics windowMetrics = windowManager.getCurrentWindowMetrics();
5457             Rect windowBounds = windowMetrics.getBounds();
5458 
5459             int width = windowBounds.width();
5460             int height = windowBounds.height();
5461 
5462             if (height > width) {
5463                 height = width;
5464                 width = windowBounds.height();
5465             }
5466 
5467             CameraManager camMgr = context.getSystemService(CameraManager.class);
5468             List<Size> orderedPreviewSizes = CameraTestUtils.getSupportedPreviewSizes(
5469                     cameraId, camMgr, PREVIEW_SIZE_BOUND);
5470 
5471             if (orderedPreviewSizes != null) {
5472                 for (Size size : orderedPreviewSizes) {
5473                     if (width >= size.getWidth()
5474                             && height >= size.getHeight()) {
5475                         return size;
5476                     }
5477                 }
5478             }
5479         } catch (Exception e) {
5480             Log.e(TAG, "getMaxPreviewSize Failed. " + e);
5481         }
5482         return PREVIEW_SIZE_BOUND;
5483     }
5484 
5485     /**
5486      * Use the external feature flag to check if external camera is supported.
5487      * If it is, iterate through the camera ids under test to verify that an
5488      * external camera is connected.
5489      *
5490      * @param cameraIds list of camera ids under test
5491      * @param packageManager package manager instance for checking feature flag
5492      * @param cameraManager camera manager for getting camera characteristics
5493      *
5494      */
verifyExternalCameraConnected(String[] cameraIds, PackageManager packageManager, CameraManager cameraManager)5495     public static void verifyExternalCameraConnected(String[] cameraIds,
5496             PackageManager packageManager, CameraManager cameraManager) throws Exception {
5497         if (packageManager.hasSystemFeature(PackageManager.FEATURE_CAMERA_EXTERNAL)) {
5498             boolean externalCameraConnected = false;
5499             for (int i = 0; i < cameraIds.length; i++) {
5500                 CameraCharacteristics props =
5501                         cameraManager.getCameraCharacteristics(cameraIds[i]);
5502                 assertNotNull("Can't get camera characteristics for camera "
5503                         + cameraIds[i], props);
5504                 Integer lensFacing = props.get(CameraCharacteristics.LENS_FACING);
5505                 assertNotNull("Can't get lens facing info", lensFacing);
5506                 if (lensFacing == CameraCharacteristics.LENS_FACING_EXTERNAL) {
5507                     externalCameraConnected = true;
5508                 }
5509             }
5510             assertTrue("External camera is not connected on device with FEATURE_CAMERA_EXTERNAL",
5511                     externalCameraConnected);
5512         }
5513     }
5514 
5515     /**
5516      * Verifies the presence of keys in the supportedKeys list.
5517      *
5518      * @param keys list of keys to be checked
5519      * @param supportedKeys list utilized to verify presence of keys
5520      * @param expectedResult true if keys should be present, false if not
5521      *
5522      */
checkKeysAreSupported(T[] keys, Set<T> supportedKeys, boolean expectedResult)5523     public static <T> void checkKeysAreSupported(T[] keys, Set<T> supportedKeys,
5524             boolean expectedResult) {
5525         String errorMsg = expectedResult ? " key should be present "
5526                 : " key should not be present ";
5527         for (T currKey : keys) {
5528             assertTrue(currKey + errorMsg
5529                     + " among the supported keys!",
5530                     supportedKeys.contains(currKey) == expectedResult);
5531         }
5532     }
5533 
5534 
5535     /**
5536      * Verifies the presence of keys in the supportedKeys list.
5537      *
5538      * @param keys list of keys to be checked
5539      * @param supportedKeys list utilized to verify presence of keys
5540      * @param expectedResult true if keys should be present, false if not
5541      *
5542      */
checkKeysAreSupported(List<T[]> keys, Set<T> supportedKeys, boolean expectedResult)5543     public static <T> void checkKeysAreSupported(List<T[]> keys, Set<T> supportedKeys,
5544             boolean expectedResult) {
5545         for (T[] k : keys) {
5546             checkKeysAreSupported(k, supportedKeys, expectedResult);
5547         }
5548     }
5549 
5550     /**
5551      * Check if the camera device keeps stabilization off
5552      *
5553      * @param result The capture request builder
5554      * @return true if stabilization is OFF
5555      */
isStabilizationOff(CaptureRequest request)5556     public static boolean isStabilizationOff(CaptureRequest request) {
5557         Integer stabilizationMode = request.get(
5558                 CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE);
5559 
5560         return (stabilizationMode == null
5561                 || stabilizationMode == CameraMetadata.CONTROL_VIDEO_STABILIZATION_MODE_OFF);
5562     }
5563 }
5564