1 /*
2  * Copyright (C) 2019 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "VtsHalEvsTest"
18 
19 
20 // These values are called out in the EVS design doc (as of Mar 8, 2017)
21 static const int kMaxStreamStartMilliseconds = 500;
22 static const int kMinimumFramesPerSecond = 10;
23 
24 static const int kSecondsToMilliseconds = 1000;
25 static const int kMillisecondsToMicroseconds = 1000;
26 static const float kNanoToMilliseconds = 0.000001f;
27 static const float kNanoToSeconds = 0.000000001f;
28 
29 
30 #include "FrameHandler.h"
31 #include "FrameHandlerUltrasonics.h"
32 
33 #include <cstdio>
34 #include <cstring>
35 #include <cstdlib>
36 #include <thread>
37 #include <unordered_set>
38 
39 #include <hidl/HidlTransportSupport.h>
40 #include <hwbinder/ProcessState.h>
41 #include <utils/Errors.h>
42 #include <utils/StrongPointer.h>
43 
44 #include <android-base/logging.h>
45 #include <android/hardware/automotive/evs/1.1/IEvsCamera.h>
46 #include <android/hardware/automotive/evs/1.1/IEvsCameraStream.h>
47 #include <android/hardware/automotive/evs/1.1/IEvsDisplay.h>
48 #include <android/hardware/automotive/evs/1.1/IEvsEnumerator.h>
49 #include <android/hardware/camera/device/3.2/ICameraDevice.h>
50 #include <system/camera_metadata.h>
51 #include <ui/DisplayMode.h>
52 #include <ui/DisplayState.h>
53 #include <ui/GraphicBuffer.h>
54 #include <ui/GraphicBufferAllocator.h>
55 
56 #include <gtest/gtest.h>
57 #include <hidl/GtestPrinter.h>
58 #include <hidl/ServiceManagement.h>
59 
60 using namespace ::android::hardware::automotive::evs::V1_1;
61 using namespace std::chrono_literals;
62 
63 using ::android::hardware::Return;
64 using ::android::hardware::Void;
65 using ::android::hardware::hidl_vec;
66 using ::android::hardware::hidl_handle;
67 using ::android::hardware::hidl_string;
68 using ::android::sp;
69 using ::android::wp;
70 using ::android::hardware::camera::device::V3_2::Stream;
71 using ::android::hardware::automotive::evs::V1_1::BufferDesc;
72 using ::android::hardware::automotive::evs::V1_0::DisplayDesc;
73 using ::android::hardware::automotive::evs::V1_0::DisplayState;
74 using ::android::hardware::graphics::common::V1_0::PixelFormat;
75 using ::android::frameworks::automotive::display::V1_0::HwDisplayConfig;
76 using ::android::frameworks::automotive::display::V1_0::HwDisplayState;
77 using IEvsCamera_1_0 = ::android::hardware::automotive::evs::V1_0::IEvsCamera;
78 using IEvsCamera_1_1 = ::android::hardware::automotive::evs::V1_1::IEvsCamera;
79 using IEvsDisplay_1_0 = ::android::hardware::automotive::evs::V1_0::IEvsDisplay;
80 using IEvsDisplay_1_1 = ::android::hardware::automotive::evs::V1_1::IEvsDisplay;
81 
82 namespace {
83 
84 /*
85  * Plese note that this is different from what is defined in
86  * libhardware/modules/camera/3_4/metadata/types.h; this has one additional
87  * field to store a framerate.
88  */
89 typedef struct {
90     int32_t id;
91     int32_t width;
92     int32_t height;
93     int32_t format;
94     int32_t direction;
95     int32_t framerate;
96 } RawStreamConfig;
97 constexpr const size_t kStreamCfgSz = sizeof(RawStreamConfig) / sizeof(int32_t);
98 
99 } // anonymous namespace
100 
101 
102 // The main test class for EVS
103 class EvsHidlTest : public ::testing::TestWithParam<std::string> {
104 public:
SetUp()105     virtual void SetUp() override {
106         // Make sure we can connect to the enumerator
107         std::string service_name = GetParam();
108         pEnumerator = IEvsEnumerator::getService(service_name);
109         ASSERT_NE(pEnumerator.get(), nullptr);
110         LOG(INFO) << "Test target service: " << service_name;
111 
112         mIsHwModule = pEnumerator->isHardware();
113     }
114 
TearDown()115     virtual void TearDown() override {
116         // Attempt to close any active camera
117         for (auto &&cam : activeCameras) {
118             if (cam != nullptr) {
119                 pEnumerator->closeCamera(cam);
120             }
121         }
122         activeCameras.clear();
123     }
124 
125 protected:
loadCameraList()126     void loadCameraList() {
127         // SetUp() must run first!
128         assert(pEnumerator != nullptr);
129 
130         // Get the camera list
131         pEnumerator->getCameraList_1_1(
132             [this](hidl_vec <CameraDesc> cameraList) {
133                 LOG(INFO) << "Camera list callback received "
134                           << cameraList.size()
135                           << " cameras";
136                 cameraInfo.reserve(cameraList.size());
137                 for (auto&& cam: cameraList) {
138                     LOG(INFO) << "Found camera " << cam.v1.cameraId;
139                     cameraInfo.push_back(cam);
140                 }
141             }
142         );
143     }
144 
loadUltrasonicsArrayList()145     void loadUltrasonicsArrayList() {
146         // SetUp() must run first!
147         assert(pEnumerator != nullptr);
148 
149         // Get the ultrasonics array list
150         pEnumerator->getUltrasonicsArrayList([this](hidl_vec<UltrasonicsArrayDesc> ultraList) {
151             LOG(INFO) << "Ultrasonics array list callback received "
152                       << ultraList.size()
153                       << " arrays";
154             ultrasonicsArraysInfo.reserve(ultraList.size());
155             for (auto&& ultraArray : ultraList) {
156                 LOG(INFO) << "Found ultrasonics array " << ultraArray.ultrasonicsArrayId;
157                 ultrasonicsArraysInfo.push_back(ultraArray);
158             }
159         });
160     }
161 
isLogicalCamera(const camera_metadata_t * metadata)162     bool isLogicalCamera(const camera_metadata_t *metadata) {
163         if (metadata == nullptr) {
164             // A logical camera device must have a valid camera metadata.
165             return false;
166         }
167 
168         // Looking for LOGICAL_MULTI_CAMERA capability from metadata.
169         camera_metadata_ro_entry_t entry;
170         int rc = find_camera_metadata_ro_entry(metadata,
171                                                ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
172                                                &entry);
173         if (0 != rc) {
174             // No capabilities are found.
175             return false;
176         }
177 
178         for (size_t i = 0; i < entry.count; ++i) {
179             uint8_t cap = entry.data.u8[i];
180             if (cap == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
181                 return true;
182             }
183         }
184 
185         return false;
186     }
187 
getPhysicalCameraIds(const std::string & id,bool & flag)188     std::unordered_set<std::string> getPhysicalCameraIds(const std::string& id,
189                                                          bool& flag) {
190         std::unordered_set<std::string> physicalCameras;
191 
192         auto it = cameraInfo.begin();
193         while (it != cameraInfo.end()) {
194             if (it->v1.cameraId == id) {
195                 break;
196             }
197             ++it;
198         }
199 
200         if (it == cameraInfo.end()) {
201             // Unknown camera is requested.  Return an empty list.
202             return physicalCameras;
203         }
204 
205         const camera_metadata_t *metadata =
206             reinterpret_cast<camera_metadata_t *>(&it->metadata[0]);
207         flag = isLogicalCamera(metadata);
208         if (!flag) {
209             // EVS assumes that the device w/o a valid metadata is a physical
210             // device.
211             LOG(INFO) << id << " is not a logical camera device.";
212             physicalCameras.emplace(id);
213             return physicalCameras;
214         }
215 
216         // Look for physical camera identifiers
217         camera_metadata_ro_entry entry;
218         int rc = find_camera_metadata_ro_entry(metadata,
219                                                ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
220                                                &entry);
221         if (rc != 0) {
222             LOG(ERROR) << "No physical camera ID is found for a logical camera device";
223         }
224 
225         const uint8_t *ids = entry.data.u8;
226         size_t start = 0;
227         for (size_t i = 0; i < entry.count; ++i) {
228             if (ids[i] == '\0') {
229                 if (start != i) {
230                     std::string id(reinterpret_cast<const char *>(ids + start));
231                     physicalCameras.emplace(id);
232                 }
233                 start = i + 1;
234             }
235         }
236 
237         LOG(INFO) << id
238                   << " consists of "
239                   << physicalCameras.size()
240                   << " physical camera devices";
241         return physicalCameras;
242     }
243 
getFirstStreamConfiguration(camera_metadata_t * metadata)244     Stream getFirstStreamConfiguration(camera_metadata_t* metadata) {
245         Stream targetCfg = {};
246         camera_metadata_entry_t streamCfgs;
247         if (!find_camera_metadata_entry(metadata,
248                  ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
249                  &streamCfgs)) {
250             // Stream configurations are found in metadata
251             RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(streamCfgs.data.i32);
252             for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
253                 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
254                     ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
255                     targetCfg.width = ptr->width;
256                     targetCfg.height = ptr->height;
257                     targetCfg.format = static_cast<PixelFormat>(ptr->format);
258                     break;
259                 }
260                 ++ptr;
261             }
262         }
263 
264         return targetCfg;
265     }
266 
267     sp<IEvsEnumerator>              pEnumerator;   // Every test needs access to the service
268     std::vector<CameraDesc>         cameraInfo;    // Empty unless/until loadCameraList() is called
269     bool                            mIsHwModule;   // boolean to tell current module under testing
270                                                    // is HW module implementation.
271     std::deque<sp<IEvsCamera_1_1>>  activeCameras; // A list of active camera handles that are
272                                                    // needed to be cleaned up.
273     std::vector<UltrasonicsArrayDesc>
274             ultrasonicsArraysInfo;                           // Empty unless/until
275                                                              // loadUltrasonicsArrayList() is called
276     std::deque<wp<IEvsCamera_1_1>> activeUltrasonicsArrays;  // A list of active ultrasonic array
277                                                              // handles that are to be cleaned up.
278 };
279 
280 
281 // Test cases, their implementations, and corresponding requirements are
282 // documented at go/aae-evs-public-api-test.
283 
284 /*
285  * CameraOpenClean:
286  * Opens each camera reported by the enumerator and then explicitly closes it via a
287  * call to closeCamera.  Then repeats the test to ensure all cameras can be reopened.
288  */
TEST_P(EvsHidlTest,CameraOpenClean)289 TEST_P(EvsHidlTest, CameraOpenClean) {
290     LOG(INFO) << "Starting CameraOpenClean test";
291 
292     // Get the camera list
293     loadCameraList();
294 
295     // Open and close each camera twice
296     for (auto&& cam: cameraInfo) {
297         bool isLogicalCam = false;
298         auto devices = getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
299         if (mIsHwModule && isLogicalCam) {
300             LOG(INFO) << "Skip a logical device, " << cam.v1.cameraId << " for HW target.";
301             continue;
302         }
303 
304         // Read a target resolution from the metadata
305         Stream targetCfg =
306             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
307         ASSERT_GT(targetCfg.width, 0);
308         ASSERT_GT(targetCfg.height, 0);
309 
310         for (int pass = 0; pass < 2; pass++) {
311             sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
312             ASSERT_NE(pCam, nullptr);
313 
314             for (auto&& devName : devices) {
315                 bool matched = false;
316                 pCam->getPhysicalCameraInfo(devName,
317                                             [&devName, &matched](const CameraDesc& info) {
318                                                 matched = devName == info.v1.cameraId;
319                                             });
320                 ASSERT_TRUE(matched);
321             }
322 
323             // Store a camera handle for a clean-up
324             activeCameras.push_back(pCam);
325 
326             // Verify that this camera self-identifies correctly
327             pCam->getCameraInfo_1_1([&cam](CameraDesc desc) {
328                                         LOG(DEBUG) << "Found camera " << desc.v1.cameraId;
329                                         EXPECT_EQ(cam.v1.cameraId, desc.v1.cameraId);
330                                     }
331             );
332 
333             // Verify methods for extended info
334             const auto id = 0xFFFFFFFF; // meaningless id
335             hidl_vec<uint8_t> values;
336             auto err = pCam->setExtendedInfo_1_1(id, values);
337             if (isLogicalCam) {
338                 // Logical camera device does not support setExtendedInfo
339                 // method.
340                 ASSERT_EQ(EvsResult::INVALID_ARG, err);
341             } else {
342                 ASSERT_NE(EvsResult::INVALID_ARG, err);
343             }
344 
345 
346             pCam->getExtendedInfo_1_1(id, [&isLogicalCam](const auto& result, const auto& data) {
347                 if (isLogicalCam) {
348                     ASSERT_EQ(EvsResult::INVALID_ARG, result);
349                 } else {
350                     ASSERT_NE(EvsResult::INVALID_ARG, result);
351                     ASSERT_EQ(0, data.size());
352                 }
353             });
354 
355             // Explicitly close the camera so resources are released right away
356             pEnumerator->closeCamera(pCam);
357             activeCameras.clear();
358         }
359     }
360 }
361 
362 
363 /*
364  * CameraOpenAggressive:
365  * Opens each camera reported by the enumerator twice in a row without an intervening closeCamera
366  * call.  This ensures that the intended "aggressive open" behavior works.  This is necessary for
367  * the system to be tolerant of shutdown/restart race conditions.
368  */
TEST_P(EvsHidlTest,CameraOpenAggressive)369 TEST_P(EvsHidlTest, CameraOpenAggressive) {
370     LOG(INFO) << "Starting CameraOpenAggressive test";
371 
372     // Get the camera list
373     loadCameraList();
374 
375     // Open and close each camera twice
376     for (auto&& cam: cameraInfo) {
377         bool isLogicalCam = false;
378         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
379         if (mIsHwModule && isLogicalCam) {
380             LOG(INFO) << "Skip a logical device, " << cam.v1.cameraId << " for HW target.";
381             continue;
382         }
383 
384         // Read a target resolution from the metadata
385         Stream targetCfg =
386             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
387         ASSERT_GT(targetCfg.width, 0);
388         ASSERT_GT(targetCfg.height, 0);
389 
390         activeCameras.clear();
391         sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
392         ASSERT_NE(pCam, nullptr);
393 
394         // Store a camera handle for a clean-up
395         activeCameras.push_back(pCam);
396 
397         // Verify that this camera self-identifies correctly
398         pCam->getCameraInfo_1_1([&cam](CameraDesc desc) {
399                                     LOG(DEBUG) << "Found camera " << desc.v1.cameraId;
400                                     EXPECT_EQ(cam.v1.cameraId, desc.v1.cameraId);
401                                 }
402         );
403 
404         sp<IEvsCamera_1_1> pCam2 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
405         ASSERT_NE(pCam2, nullptr);
406 
407         // Store a camera handle for a clean-up
408         activeCameras.push_back(pCam2);
409 
410         ASSERT_NE(pCam, pCam2);
411 
412         Return<EvsResult> result = pCam->setMaxFramesInFlight(2);
413         if (mIsHwModule) {
414             // Verify that the old camera rejects calls via HW module.
415             EXPECT_EQ(EvsResult::OWNERSHIP_LOST, EvsResult(result));
416         } else {
417             // default implementation supports multiple clients.
418             EXPECT_EQ(EvsResult::OK, EvsResult(result));
419         }
420 
421         // Close the superceded camera
422         pEnumerator->closeCamera(pCam);
423         activeCameras.pop_front();
424 
425         // Verify that the second camera instance self-identifies correctly
426         pCam2->getCameraInfo_1_1([&cam](CameraDesc desc) {
427                                      LOG(DEBUG) << "Found camera " << desc.v1.cameraId;
428                                      EXPECT_EQ(cam.v1.cameraId, desc.v1.cameraId);
429                                  }
430         );
431 
432         // Close the second camera instance
433         pEnumerator->closeCamera(pCam2);
434         activeCameras.pop_front();
435     }
436 
437     // Sleep here to ensure the destructor cleanup has time to run so we don't break follow on tests
438     sleep(1);   // I hate that this is an arbitrary time to wait.  :(  b/36122635
439 }
440 
441 
442 /*
443  * CameraStreamPerformance:
444  * Measure and qualify the stream start up time and streaming frame rate of each reported camera
445  */
TEST_P(EvsHidlTest,CameraStreamPerformance)446 TEST_P(EvsHidlTest, CameraStreamPerformance) {
447     LOG(INFO) << "Starting CameraStreamPerformance test";
448 
449     // Get the camera list
450     loadCameraList();
451 
452     // Test each reported camera
453     for (auto&& cam: cameraInfo) {
454         bool isLogicalCam = false;
455         auto devices = getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
456         if (mIsHwModule && isLogicalCam) {
457             LOG(INFO) << "Skip a logical device " << cam.v1.cameraId;
458             continue;
459         }
460 
461         // Read a target resolution from the metadata
462         Stream targetCfg =
463             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
464         ASSERT_GT(targetCfg.width, 0);
465         ASSERT_GT(targetCfg.height, 0);
466 
467         sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
468         ASSERT_NE(pCam, nullptr);
469 
470         // Store a camera handle for a clean-up
471         activeCameras.push_back(pCam);
472 
473         // Set up a frame receiver object which will fire up its own thread
474         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
475                                                          nullptr,
476                                                          FrameHandler::eAutoReturn);
477 
478         // Start the camera's video stream
479         nsecs_t start = systemTime(SYSTEM_TIME_MONOTONIC);
480 
481         bool startResult = frameHandler->startStream();
482         ASSERT_TRUE(startResult);
483 
484         // Ensure the first frame arrived within the expected time
485         frameHandler->waitForFrameCount(1);
486         nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
487         nsecs_t timeToFirstFrame = systemTime(SYSTEM_TIME_MONOTONIC) - start;
488 
489         // Extra delays are expected when we attempt to start a video stream on
490         // the logical camera device.  The amount of delay is expected the
491         // number of physical camera devices multiplied by
492         // kMaxStreamStartMilliseconds at most.
493         EXPECT_LE(nanoseconds_to_milliseconds(timeToFirstFrame),
494                   kMaxStreamStartMilliseconds * devices.size());
495         printf("%s: Measured time to first frame %0.2f ms\n",
496                cam.v1.cameraId.c_str(), timeToFirstFrame * kNanoToMilliseconds);
497         LOG(INFO) << cam.v1.cameraId
498                   << ": Measured time to first frame "
499                   << std::scientific << timeToFirstFrame * kNanoToMilliseconds
500                   << " ms.";
501 
502         // Check aspect ratio
503         unsigned width = 0, height = 0;
504         frameHandler->getFrameDimension(&width, &height);
505         EXPECT_GE(width, height);
506 
507         // Wait a bit, then ensure we get at least the required minimum number of frames
508         sleep(5);
509         nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
510 
511         // Even when the camera pointer goes out of scope, the FrameHandler object will
512         // keep the stream alive unless we tell it to shutdown.
513         // Also note that the FrameHandle and the Camera have a mutual circular reference, so
514         // we have to break that cycle in order for either of them to get cleaned up.
515         frameHandler->shutdown();
516 
517         unsigned framesReceived = 0;
518         frameHandler->getFramesCounters(&framesReceived, nullptr);
519         framesReceived = framesReceived - 1;    // Back out the first frame we already waited for
520         nsecs_t runTime = end - firstFrame;
521         float framesPerSecond = framesReceived / (runTime * kNanoToSeconds);
522         printf("Measured camera rate %3.2f fps\n", framesPerSecond);
523         LOG(INFO) << "Measured camera rate "
524                   << std::scientific << framesPerSecond
525                   << " fps.";
526         EXPECT_GE(framesPerSecond, kMinimumFramesPerSecond);
527 
528         // Explicitly release the camera
529         pEnumerator->closeCamera(pCam);
530         activeCameras.clear();
531     }
532 }
533 
534 
535 /*
536  * CameraStreamBuffering:
537  * Ensure the camera implementation behaves properly when the client holds onto buffers for more
538  * than one frame time.  The camera must cleanly skip frames until the client is ready again.
539  */
TEST_P(EvsHidlTest,CameraStreamBuffering)540 TEST_P(EvsHidlTest, CameraStreamBuffering) {
541     LOG(INFO) << "Starting CameraStreamBuffering test";
542 
543     // Arbitrary constant (should be > 1 and not too big)
544     static const unsigned int kBuffersToHold = 6;
545 
546     // Get the camera list
547     loadCameraList();
548 
549     // Test each reported camera
550     for (auto&& cam: cameraInfo) {
551         bool isLogicalCam = false;
552         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
553         if (mIsHwModule && isLogicalCam) {
554             LOG(INFO) << "Skip a logical device " << cam.v1.cameraId << " for HW target.";
555             continue;
556         }
557 
558         // Read a target resolution from the metadata
559         Stream targetCfg =
560             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
561         ASSERT_GT(targetCfg.width, 0);
562         ASSERT_GT(targetCfg.height, 0);
563 
564         sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
565         ASSERT_NE(pCam, nullptr);
566 
567         // Store a camera handle for a clean-up
568         activeCameras.push_back(pCam);
569 
570         // Ask for a very large number of buffers in flight to ensure it errors correctly
571         Return<EvsResult> badResult = pCam->setMaxFramesInFlight(0xFFFFFFFF);
572         EXPECT_EQ(EvsResult::BUFFER_NOT_AVAILABLE, badResult);
573 
574         // Now ask for exactly two buffers in flight as we'll test behavior in that case
575         Return<EvsResult> goodResult = pCam->setMaxFramesInFlight(kBuffersToHold);
576         EXPECT_EQ(EvsResult::OK, goodResult);
577 
578 
579         // Set up a frame receiver object which will fire up its own thread.
580         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
581                                                          nullptr,
582                                                          FrameHandler::eNoAutoReturn);
583 
584         // Start the camera's video stream
585         bool startResult = frameHandler->startStream();
586         ASSERT_TRUE(startResult);
587 
588         // Check that the video stream stalls once we've gotten exactly the number of buffers
589         // we requested since we told the frameHandler not to return them.
590         sleep(1);   // 1 second should be enough for at least 5 frames to be delivered worst case
591         unsigned framesReceived = 0;
592         frameHandler->getFramesCounters(&framesReceived, nullptr);
593         ASSERT_EQ(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
594 
595 
596         // Give back one buffer
597         bool didReturnBuffer = frameHandler->returnHeldBuffer();
598         EXPECT_TRUE(didReturnBuffer);
599 
600         // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
601         // filled since we require 10fps minimum -- but give a 10% allowance just in case.
602         usleep(110 * kMillisecondsToMicroseconds);
603         frameHandler->getFramesCounters(&framesReceived, nullptr);
604         EXPECT_EQ(kBuffersToHold+1, framesReceived) << "Stream should've resumed";
605 
606         // Even when the camera pointer goes out of scope, the FrameHandler object will
607         // keep the stream alive unless we tell it to shutdown.
608         // Also note that the FrameHandle and the Camera have a mutual circular reference, so
609         // we have to break that cycle in order for either of them to get cleaned up.
610         frameHandler->shutdown();
611 
612         // Explicitly release the camera
613         pEnumerator->closeCamera(pCam);
614         activeCameras.clear();
615     }
616 }
617 
618 
619 /*
620  * CameraToDisplayRoundTrip:
621  * End to end test of data flowing from the camera to the display.  Each delivered frame of camera
622  * imagery is simply copied to the display buffer and presented on screen.  This is the one test
623  * which a human could observe to see the operation of the system on the physical display.
624  */
TEST_P(EvsHidlTest,CameraToDisplayRoundTrip)625 TEST_P(EvsHidlTest, CameraToDisplayRoundTrip) {
626     LOG(INFO) << "Starting CameraToDisplayRoundTrip test";
627 
628     // Get the camera list
629     loadCameraList();
630 
631     // Request available display IDs
632     uint8_t targetDisplayId = 0;
633     pEnumerator->getDisplayIdList([&targetDisplayId](auto ids) {
634         ASSERT_GT(ids.size(), 0);
635         targetDisplayId = ids[0];
636     });
637 
638     // Request exclusive access to the first EVS display
639     sp<IEvsDisplay_1_1> pDisplay = pEnumerator->openDisplay_1_1(targetDisplayId);
640     ASSERT_NE(pDisplay, nullptr);
641     LOG(INFO) << "Display " << targetDisplayId << " is alreay in use.";
642 
643     // Get the display descriptor
644     pDisplay->getDisplayInfo_1_1([](const HwDisplayConfig& config, const HwDisplayState& state) {
645         ASSERT_GT(config.size(), 0);
646         ASSERT_GT(state.size(), 0);
647 
648         android::ui::DisplayMode* pConfig = (android::ui::DisplayMode*)config.data();
649         const auto width = pConfig->resolution.getWidth();
650         const auto height = pConfig->resolution.getHeight();
651         LOG(INFO) << "    Resolution: " << width << "x" << height;
652         ASSERT_GT(width, 0);
653         ASSERT_GT(height, 0);
654 
655         android::ui::DisplayState* pState = (android::ui::DisplayState*)state.data();
656         ASSERT_NE(pState->layerStack, -1);
657     });
658 
659     // Test each reported camera
660     for (auto&& cam: cameraInfo) {
661         bool isLogicalCam = false;
662         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
663         if (mIsHwModule && isLogicalCam) {
664             LOG(INFO) << "Skip a logical device " << cam.v1.cameraId << " for HW target.";
665             continue;
666         }
667 
668         // Read a target resolution from the metadata
669         Stream targetCfg =
670             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
671         ASSERT_GT(targetCfg.width, 0);
672         ASSERT_GT(targetCfg.height, 0);
673 
674         sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
675         ASSERT_NE(pCam, nullptr);
676 
677         // Store a camera handle for a clean-up
678         activeCameras.push_back(pCam);
679 
680         // Set up a frame receiver object which will fire up its own thread.
681         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
682                                                          pDisplay,
683                                                          FrameHandler::eAutoReturn);
684 
685 
686         // Activate the display
687         pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME);
688 
689         // Start the camera's video stream
690         bool startResult = frameHandler->startStream();
691         ASSERT_TRUE(startResult);
692 
693         // Wait a while to let the data flow
694         static const int kSecondsToWait = 5;
695         const int streamTimeMs = kSecondsToWait * kSecondsToMilliseconds -
696                                  kMaxStreamStartMilliseconds;
697         const unsigned minimumFramesExpected = streamTimeMs * kMinimumFramesPerSecond /
698                                                kSecondsToMilliseconds;
699         sleep(kSecondsToWait);
700         unsigned framesReceived = 0;
701         unsigned framesDisplayed = 0;
702         frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
703         EXPECT_EQ(framesReceived, framesDisplayed);
704         EXPECT_GE(framesDisplayed, minimumFramesExpected);
705 
706         // Turn off the display (yes, before the stream stops -- it should be handled)
707         pDisplay->setDisplayState(DisplayState::NOT_VISIBLE);
708 
709         // Shut down the streamer
710         frameHandler->shutdown();
711 
712         // Explicitly release the camera
713         pEnumerator->closeCamera(pCam);
714         activeCameras.clear();
715     }
716 
717     // Explicitly release the display
718     pEnumerator->closeDisplay(pDisplay);
719 }
720 
721 
722 /*
723  * MultiCameraStream:
724  * Verify that each client can start and stop video streams on the same
725  * underlying camera.
726  */
TEST_P(EvsHidlTest,MultiCameraStream)727 TEST_P(EvsHidlTest, MultiCameraStream) {
728     LOG(INFO) << "Starting MultiCameraStream test";
729 
730     if (mIsHwModule) {
731         // This test is not for HW module implementation.
732         return;
733     }
734 
735     // Get the camera list
736     loadCameraList();
737 
738     // Test each reported camera
739     for (auto&& cam: cameraInfo) {
740         // Read a target resolution from the metadata
741         Stream targetCfg =
742             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
743         ASSERT_GT(targetCfg.width, 0);
744         ASSERT_GT(targetCfg.height, 0);
745 
746         // Create two camera clients.
747         sp<IEvsCamera_1_1> pCam0 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
748         ASSERT_NE(pCam0, nullptr);
749 
750         // Store a camera handle for a clean-up
751         activeCameras.push_back(pCam0);
752 
753         sp<IEvsCamera_1_1> pCam1 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
754         ASSERT_NE(pCam1, nullptr);
755 
756         // Store a camera handle for a clean-up
757         activeCameras.push_back(pCam1);
758 
759         // Set up per-client frame receiver objects which will fire up its own thread
760         sp<FrameHandler> frameHandler0 = new FrameHandler(pCam0, cam,
761                                                           nullptr,
762                                                           FrameHandler::eAutoReturn);
763         ASSERT_NE(frameHandler0, nullptr);
764 
765         sp<FrameHandler> frameHandler1 = new FrameHandler(pCam1, cam,
766                                                           nullptr,
767                                                           FrameHandler::eAutoReturn);
768         ASSERT_NE(frameHandler1, nullptr);
769 
770         // Start the camera's video stream via client 0
771         bool startResult = false;
772         startResult = frameHandler0->startStream() &&
773                       frameHandler1->startStream();
774         ASSERT_TRUE(startResult);
775 
776         // Ensure the stream starts
777         frameHandler0->waitForFrameCount(1);
778         frameHandler1->waitForFrameCount(1);
779 
780         nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
781 
782         // Wait a bit, then ensure both clients get at least the required minimum number of frames
783         sleep(5);
784         nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
785         unsigned framesReceived0 = 0, framesReceived1 = 0;
786         frameHandler0->getFramesCounters(&framesReceived0, nullptr);
787         frameHandler1->getFramesCounters(&framesReceived1, nullptr);
788         framesReceived0 = framesReceived0 - 1;    // Back out the first frame we already waited for
789         framesReceived1 = framesReceived1 - 1;    // Back out the first frame we already waited for
790         nsecs_t runTime = end - firstFrame;
791         float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
792         float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
793         LOG(INFO) << "Measured camera rate "
794                   << std::scientific << framesPerSecond0 << " fps and "
795                   << framesPerSecond1 << " fps";
796         EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
797         EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
798 
799         // Shutdown one client
800         frameHandler0->shutdown();
801 
802         // Read frame counters again
803         frameHandler0->getFramesCounters(&framesReceived0, nullptr);
804         frameHandler1->getFramesCounters(&framesReceived1, nullptr);
805 
806         // Wait a bit again
807         sleep(5);
808         unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
809         frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
810         frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
811         EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
812         EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
813 
814         // Shutdown another
815         frameHandler1->shutdown();
816 
817         // Explicitly release the camera
818         pEnumerator->closeCamera(pCam0);
819         pEnumerator->closeCamera(pCam1);
820         activeCameras.clear();
821 
822         // TODO(b/145459970, b/145457727): below sleep() is added to ensure the
823         // destruction of active camera objects; this may be related with two
824         // issues.
825         sleep(1);
826     }
827 }
828 
829 
830 /*
831  * CameraParameter:
832  * Verify that a client can adjust a camera parameter.
833  */
TEST_P(EvsHidlTest,CameraParameter)834 TEST_P(EvsHidlTest, CameraParameter) {
835     LOG(INFO) << "Starting CameraParameter test";
836 
837     // Get the camera list
838     loadCameraList();
839 
840     // Test each reported camera
841     Return<EvsResult> result = EvsResult::OK;
842     for (auto&& cam: cameraInfo) {
843         bool isLogicalCam = false;
844         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
845         if (isLogicalCam) {
846             // TODO(b/145465724): Support camera parameter programming on
847             // logical devices.
848             LOG(INFO) << "Skip a logical device " << cam.v1.cameraId;
849             continue;
850         }
851 
852         // Read a target resolution from the metadata
853         Stream targetCfg =
854             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
855         ASSERT_GT(targetCfg.width, 0);
856         ASSERT_GT(targetCfg.height, 0);
857 
858         // Create a camera client
859         sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
860         ASSERT_NE(pCam, nullptr);
861 
862         // Store a camera
863         activeCameras.push_back(pCam);
864 
865         // Get the parameter list
866         std::vector<CameraParam> cmds;
867         pCam->getParameterList([&cmds](hidl_vec<CameraParam> cmdList) {
868                 cmds.reserve(cmdList.size());
869                 for (auto &&cmd : cmdList) {
870                     cmds.push_back(cmd);
871                 }
872             }
873         );
874 
875         if (cmds.size() < 1) {
876             continue;
877         }
878 
879         // Set up per-client frame receiver objects which will fire up its own thread
880         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
881                                                          nullptr,
882                                                          FrameHandler::eAutoReturn);
883         ASSERT_NE(frameHandler, nullptr);
884 
885         // Start the camera's video stream
886         bool startResult = frameHandler->startStream();
887         ASSERT_TRUE(startResult);
888 
889         // Ensure the stream starts
890         frameHandler->waitForFrameCount(1);
891 
892         result = pCam->setMaster();
893         ASSERT_EQ(EvsResult::OK, result);
894 
895         for (auto &cmd : cmds) {
896             // Get a valid parameter value range
897             int32_t minVal, maxVal, step;
898             pCam->getIntParameterRange(
899                 cmd,
900                 [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
901                     minVal = val0;
902                     maxVal = val1;
903                     step   = val2;
904                 }
905             );
906 
907             EvsResult result = EvsResult::OK;
908             if (cmd == CameraParam::ABSOLUTE_FOCUS) {
909                 // Try to turn off auto-focus
910                 std::vector<int32_t> values;
911                 pCam->setIntParameter(CameraParam::AUTO_FOCUS, 0,
912                                    [&result, &values](auto status, auto effectiveValues) {
913                                        result = status;
914                                        if (status == EvsResult::OK) {
915                                           for (auto &&v : effectiveValues) {
916                                               values.push_back(v);
917                                           }
918                                        }
919                                    });
920                 ASSERT_EQ(EvsResult::OK, result);
921                 for (auto &&v : values) {
922                     ASSERT_EQ(v, 0);
923                 }
924             }
925 
926             // Try to program a parameter with a random value [minVal, maxVal]
927             int32_t val0 = minVal + (std::rand() % (maxVal - minVal));
928             std::vector<int32_t> values;
929 
930             // Rounding down
931             val0 = val0 - (val0 % step);
932             pCam->setIntParameter(cmd, val0,
933                                [&result, &values](auto status, auto effectiveValues) {
934                                    result = status;
935                                    if (status == EvsResult::OK) {
936                                       for (auto &&v : effectiveValues) {
937                                           values.push_back(v);
938                                       }
939                                    }
940                                });
941 
942             ASSERT_EQ(EvsResult::OK, result);
943 
944             values.clear();
945             pCam->getIntParameter(cmd,
946                                [&result, &values](auto status, auto readValues) {
947                                    result = status;
948                                    if (status == EvsResult::OK) {
949                                       for (auto &&v : readValues) {
950                                           values.push_back(v);
951                                       }
952                                    }
953                                });
954             ASSERT_EQ(EvsResult::OK, result);
955             for (auto &&v : values) {
956                 ASSERT_EQ(val0, v) << "Values are not matched.";
957             }
958         }
959 
960         result = pCam->unsetMaster();
961         ASSERT_EQ(EvsResult::OK, result);
962 
963         // Shutdown
964         frameHandler->shutdown();
965 
966         // Explicitly release the camera
967         pEnumerator->closeCamera(pCam);
968         activeCameras.clear();
969     }
970 }
971 
972 
973 /*
974  * CameraPrimaryClientRelease
975  * Verify that non-primary client gets notified when the primary client either
976  * terminates or releases a role.
977  */
TEST_P(EvsHidlTest,CameraPrimaryClientRelease)978 TEST_P(EvsHidlTest, CameraPrimaryClientRelease) {
979     LOG(INFO) << "Starting CameraPrimaryClientRelease test";
980 
981     if (mIsHwModule) {
982         // This test is not for HW module implementation.
983         return;
984     }
985 
986     // Get the camera list
987     loadCameraList();
988 
989     // Test each reported camera
990     for (auto&& cam: cameraInfo) {
991         bool isLogicalCam = false;
992         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
993         if (isLogicalCam) {
994             // TODO(b/145465724): Support camera parameter programming on
995             // logical devices.
996             LOG(INFO) << "Skip a logical device " << cam.v1.cameraId;
997             continue;
998         }
999 
1000         // Read a target resolution from the metadata
1001         Stream targetCfg =
1002             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1003         ASSERT_GT(targetCfg.width, 0);
1004         ASSERT_GT(targetCfg.height, 0);
1005 
1006         // Create two camera clients.
1007         sp<IEvsCamera_1_1> pCamPrimary = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
1008         ASSERT_NE(pCamPrimary, nullptr);
1009 
1010         // Store a camera handle for a clean-up
1011         activeCameras.push_back(pCamPrimary);
1012 
1013         sp<IEvsCamera_1_1> pCamSecondary = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
1014         ASSERT_NE(pCamSecondary, nullptr);
1015 
1016         // Store a camera handle for a clean-up
1017         activeCameras.push_back(pCamSecondary);
1018 
1019         // Set up per-client frame receiver objects which will fire up its own thread
1020         sp<FrameHandler> frameHandlerPrimary =
1021             new FrameHandler(pCamPrimary, cam,
1022                              nullptr,
1023                              FrameHandler::eAutoReturn);
1024         ASSERT_NE(frameHandlerPrimary, nullptr);
1025         sp<FrameHandler> frameHandlerSecondary =
1026             new FrameHandler(pCamSecondary, cam,
1027                              nullptr,
1028                              FrameHandler::eAutoReturn);
1029         ASSERT_NE(frameHandlerSecondary, nullptr);
1030 
1031         // Set one client as the primary client
1032         EvsResult result = pCamPrimary->setMaster();
1033         ASSERT_TRUE(result == EvsResult::OK);
1034 
1035         // Try to set another client as the primary client.
1036         result = pCamSecondary->setMaster();
1037         ASSERT_TRUE(result == EvsResult::OWNERSHIP_LOST);
1038 
1039         // Start the camera's video stream via a primary client client.
1040         bool startResult = frameHandlerPrimary->startStream();
1041         ASSERT_TRUE(startResult);
1042 
1043         // Ensure the stream starts
1044         frameHandlerPrimary->waitForFrameCount(1);
1045 
1046         // Start the camera's video stream via another client
1047         startResult = frameHandlerSecondary->startStream();
1048         ASSERT_TRUE(startResult);
1049 
1050         // Ensure the stream starts
1051         frameHandlerSecondary->waitForFrameCount(1);
1052 
1053         // Non-primary client expects to receive a primary client role relesed
1054         // notification.
1055         EvsEventDesc aTargetEvent  = {};
1056         EvsEventDesc aNotification = {};
1057 
1058         bool listening = false;
1059         std::mutex eventLock;
1060         std::condition_variable eventCond;
1061         std::thread listener = std::thread(
1062             [&aNotification, &frameHandlerSecondary, &listening, &eventCond]() {
1063                 // Notify that a listening thread is running.
1064                 listening = true;
1065                 eventCond.notify_all();
1066 
1067                 EvsEventDesc aTargetEvent;
1068                 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1069                 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification, true)) {
1070                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1071                 }
1072 
1073             }
1074         );
1075 
1076         // Wait until a listening thread starts.
1077         std::unique_lock<std::mutex> lock(eventLock);
1078         auto timer = std::chrono::system_clock::now();
1079         while (!listening) {
1080             timer += 1s;
1081             eventCond.wait_until(lock, timer);
1082         }
1083         lock.unlock();
1084 
1085         // Release a primary client role.
1086         pCamPrimary->unsetMaster();
1087 
1088         // Join a listening thread.
1089         if (listener.joinable()) {
1090             listener.join();
1091         }
1092 
1093         // Verify change notifications.
1094         ASSERT_EQ(EvsEventType::MASTER_RELEASED,
1095                   static_cast<EvsEventType>(aNotification.aType));
1096 
1097         // Non-primary becomes a primary client.
1098         result = pCamSecondary->setMaster();
1099         ASSERT_TRUE(result == EvsResult::OK);
1100 
1101         // Previous primary client fails to become a primary client.
1102         result = pCamPrimary->setMaster();
1103         ASSERT_TRUE(result == EvsResult::OWNERSHIP_LOST);
1104 
1105         listening = false;
1106         listener = std::thread(
1107             [&aNotification, &frameHandlerPrimary, &listening, &eventCond]() {
1108                 // Notify that a listening thread is running.
1109                 listening = true;
1110                 eventCond.notify_all();
1111 
1112                 EvsEventDesc aTargetEvent;
1113                 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1114                 if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification, true)) {
1115                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1116                 }
1117 
1118             }
1119         );
1120 
1121         // Wait until a listening thread starts.
1122         timer = std::chrono::system_clock::now();
1123         lock.lock();
1124         while (!listening) {
1125             eventCond.wait_until(lock, timer + 1s);
1126         }
1127         lock.unlock();
1128 
1129         // Closing current primary client.
1130         frameHandlerSecondary->shutdown();
1131 
1132         // Join a listening thread.
1133         if (listener.joinable()) {
1134             listener.join();
1135         }
1136 
1137         // Verify change notifications.
1138         ASSERT_EQ(EvsEventType::MASTER_RELEASED,
1139                   static_cast<EvsEventType>(aNotification.aType));
1140 
1141         // Closing streams.
1142         frameHandlerPrimary->shutdown();
1143 
1144         // Explicitly release the camera
1145         pEnumerator->closeCamera(pCamPrimary);
1146         pEnumerator->closeCamera(pCamSecondary);
1147         activeCameras.clear();
1148     }
1149 }
1150 
1151 
1152 /*
1153  * MultiCameraParameter:
1154  * Verify that primary and non-primary clients behave as expected when they try to adjust
1155  * camera parameters.
1156  */
TEST_P(EvsHidlTest,MultiCameraParameter)1157 TEST_P(EvsHidlTest, MultiCameraParameter) {
1158     LOG(INFO) << "Starting MultiCameraParameter test";
1159 
1160     if (mIsHwModule) {
1161         // This test is not for HW module implementation.
1162         return;
1163     }
1164 
1165     // Get the camera list
1166     loadCameraList();
1167 
1168     // Test each reported camera
1169     for (auto&& cam: cameraInfo) {
1170         bool isLogicalCam = false;
1171         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
1172         if (isLogicalCam) {
1173             // TODO(b/145465724): Support camera parameter programming on
1174             // logical devices.
1175             LOG(INFO) << "Skip a logical device " << cam.v1.cameraId;
1176             continue;
1177         }
1178 
1179         // Read a target resolution from the metadata
1180         Stream targetCfg =
1181             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1182         ASSERT_GT(targetCfg.width, 0);
1183         ASSERT_GT(targetCfg.height, 0);
1184 
1185         // Create two camera clients.
1186         sp<IEvsCamera_1_1> pCamPrimary = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
1187         ASSERT_NE(pCamPrimary, nullptr);
1188 
1189         // Store a camera handle for a clean-up
1190         activeCameras.push_back(pCamPrimary);
1191 
1192         sp<IEvsCamera_1_1> pCamSecondary = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
1193         ASSERT_NE(pCamSecondary, nullptr);
1194 
1195         // Store a camera handle for a clean-up
1196         activeCameras.push_back(pCamSecondary);
1197 
1198         // Get the parameter list
1199         std::vector<CameraParam> camPrimaryCmds, camSecondaryCmds;
1200         pCamPrimary->getParameterList([&camPrimaryCmds](hidl_vec<CameraParam> cmdList) {
1201                 camPrimaryCmds.reserve(cmdList.size());
1202                 for (auto &&cmd : cmdList) {
1203                     camPrimaryCmds.push_back(cmd);
1204                 }
1205             }
1206         );
1207 
1208         pCamSecondary->getParameterList([&camSecondaryCmds](hidl_vec<CameraParam> cmdList) {
1209                 camSecondaryCmds.reserve(cmdList.size());
1210                 for (auto &&cmd : cmdList) {
1211                     camSecondaryCmds.push_back(cmd);
1212                 }
1213             }
1214         );
1215 
1216         if (camPrimaryCmds.size() < 1 ||
1217             camSecondaryCmds.size() < 1) {
1218             // Skip a camera device if it does not support any parameter.
1219             continue;
1220         }
1221 
1222         // Set up per-client frame receiver objects which will fire up its own thread
1223         sp<FrameHandler> frameHandlerPrimary =
1224             new FrameHandler(pCamPrimary, cam,
1225                              nullptr,
1226                              FrameHandler::eAutoReturn);
1227         ASSERT_NE(frameHandlerPrimary, nullptr);
1228         sp<FrameHandler> frameHandlerSecondary =
1229             new FrameHandler(pCamSecondary, cam,
1230                              nullptr,
1231                              FrameHandler::eAutoReturn);
1232         ASSERT_NE(frameHandlerSecondary, nullptr);
1233 
1234         // Set one client as the primary client.
1235         EvsResult result = pCamPrimary->setMaster();
1236         ASSERT_EQ(EvsResult::OK, result);
1237 
1238         // Try to set another client as the primary client.
1239         result = pCamSecondary->setMaster();
1240         ASSERT_EQ(EvsResult::OWNERSHIP_LOST, result);
1241 
1242         // Start the camera's video stream via a primary client client.
1243         bool startResult = frameHandlerPrimary->startStream();
1244         ASSERT_TRUE(startResult);
1245 
1246         // Ensure the stream starts
1247         frameHandlerPrimary->waitForFrameCount(1);
1248 
1249         // Start the camera's video stream via another client
1250         startResult = frameHandlerSecondary->startStream();
1251         ASSERT_TRUE(startResult);
1252 
1253         // Ensure the stream starts
1254         frameHandlerSecondary->waitForFrameCount(1);
1255 
1256         int32_t val0 = 0;
1257         std::vector<int32_t> values;
1258         EvsEventDesc aNotification0 = {};
1259         EvsEventDesc aNotification1 = {};
1260         for (auto &cmd : camPrimaryCmds) {
1261             // Get a valid parameter value range
1262             int32_t minVal, maxVal, step;
1263             pCamPrimary->getIntParameterRange(
1264                 cmd,
1265                 [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
1266                     minVal = val0;
1267                     maxVal = val1;
1268                     step   = val2;
1269                 }
1270             );
1271 
1272             EvsResult result = EvsResult::OK;
1273             if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1274                 // Try to turn off auto-focus
1275                 values.clear();
1276                 pCamPrimary->setIntParameter(CameraParam::AUTO_FOCUS, 0,
1277                                    [&result, &values](auto status, auto effectiveValues) {
1278                                        result = status;
1279                                        if (status == EvsResult::OK) {
1280                                           for (auto &&v : effectiveValues) {
1281                                               values.push_back(v);
1282                                           }
1283                                        }
1284                                    });
1285                 ASSERT_EQ(EvsResult::OK, result);
1286                 for (auto &&v : values) {
1287                     ASSERT_EQ(v, 0);
1288                 }
1289             }
1290 
1291             // Calculate a parameter value to program.
1292             val0 = minVal + (std::rand() % (maxVal - minVal));
1293             val0 = val0 - (val0 % step);
1294 
1295             // Prepare and start event listeners.
1296             bool listening0 = false;
1297             bool listening1 = false;
1298             std::condition_variable eventCond;
1299             std::thread listener0 = std::thread(
1300                 [cmd, val0,
1301                  &aNotification0, &frameHandlerPrimary, &listening0, &listening1, &eventCond]() {
1302                     listening0 = true;
1303                     if (listening1) {
1304                         eventCond.notify_all();
1305                     }
1306 
1307                     EvsEventDesc aTargetEvent;
1308                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1309                     aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1310                     aTargetEvent.payload[1] = val0;
1311                     if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1312                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1313                     }
1314                 }
1315             );
1316             std::thread listener1 = std::thread(
1317                 [cmd, val0,
1318                  &aNotification1, &frameHandlerSecondary, &listening0, &listening1, &eventCond]() {
1319                     listening1 = true;
1320                     if (listening0) {
1321                         eventCond.notify_all();
1322                     }
1323 
1324                     EvsEventDesc aTargetEvent;
1325                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1326                     aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1327                     aTargetEvent.payload[1] = val0;
1328                     if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1329                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1330                     }
1331                 }
1332             );
1333 
1334             // Wait until a listening thread starts.
1335             std::mutex eventLock;
1336             std::unique_lock<std::mutex> lock(eventLock);
1337             auto timer = std::chrono::system_clock::now();
1338             while (!listening0 || !listening1) {
1339                 eventCond.wait_until(lock, timer + 1s);
1340             }
1341             lock.unlock();
1342 
1343             // Try to program a parameter
1344             values.clear();
1345             pCamPrimary->setIntParameter(cmd, val0,
1346                                      [&result, &values](auto status, auto effectiveValues) {
1347                                          result = status;
1348                                          if (status == EvsResult::OK) {
1349                                             for (auto &&v : effectiveValues) {
1350                                                 values.push_back(v);
1351                                             }
1352                                          }
1353                                      });
1354 
1355             ASSERT_EQ(EvsResult::OK, result);
1356             for (auto &&v : values) {
1357                 ASSERT_EQ(val0, v) << "Values are not matched.";
1358             }
1359 
1360             // Join a listening thread.
1361             if (listener0.joinable()) {
1362                 listener0.join();
1363             }
1364             if (listener1.joinable()) {
1365                 listener1.join();
1366             }
1367 
1368             // Verify a change notification
1369             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1370                       static_cast<EvsEventType>(aNotification0.aType));
1371             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1372                       static_cast<EvsEventType>(aNotification1.aType));
1373             ASSERT_EQ(cmd,
1374                       static_cast<CameraParam>(aNotification0.payload[0]));
1375             ASSERT_EQ(cmd,
1376                       static_cast<CameraParam>(aNotification1.payload[0]));
1377             for (auto &&v : values) {
1378                 ASSERT_EQ(v,
1379                           static_cast<int32_t>(aNotification0.payload[1]));
1380                 ASSERT_EQ(v,
1381                           static_cast<int32_t>(aNotification1.payload[1]));
1382             }
1383 
1384             // Clients expects to receive a parameter change notification
1385             // whenever a primary client client adjusts it.
1386             values.clear();
1387             pCamPrimary->getIntParameter(cmd,
1388                                      [&result, &values](auto status, auto readValues) {
1389                                          result = status;
1390                                          if (status == EvsResult::OK) {
1391                                             for (auto &&v : readValues) {
1392                                                 values.push_back(v);
1393                                             }
1394                                          }
1395                                      });
1396             ASSERT_EQ(EvsResult::OK, result);
1397             for (auto &&v : values) {
1398                 ASSERT_EQ(val0, v) << "Values are not matched.";
1399             }
1400         }
1401 
1402         // Try to adjust a parameter via non-primary client
1403         values.clear();
1404         pCamSecondary->setIntParameter(camSecondaryCmds[0], val0,
1405                                     [&result, &values](auto status, auto effectiveValues) {
1406                                         result = status;
1407                                         if (status == EvsResult::OK) {
1408                                             for (auto &&v : effectiveValues) {
1409                                                 values.push_back(v);
1410                                             }
1411                                         }
1412                                     });
1413         ASSERT_EQ(EvsResult::INVALID_ARG, result);
1414 
1415         // Non-primary client attempts to be a primary client
1416         result = pCamSecondary->setMaster();
1417         ASSERT_EQ(EvsResult::OWNERSHIP_LOST, result);
1418 
1419         // Primary client retires from a primary client role
1420         bool listening = false;
1421         std::condition_variable eventCond;
1422         std::thread listener = std::thread(
1423             [&aNotification0, &frameHandlerSecondary, &listening, &eventCond]() {
1424                 listening = true;
1425                 eventCond.notify_all();
1426 
1427                 EvsEventDesc aTargetEvent;
1428                 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1429                 if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification0, true)) {
1430                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1431                 }
1432             }
1433         );
1434 
1435         std::mutex eventLock;
1436         auto timer = std::chrono::system_clock::now();
1437         std::unique_lock<std::mutex> lock(eventLock);
1438         while (!listening) {
1439             eventCond.wait_until(lock, timer + 1s);
1440         }
1441         lock.unlock();
1442 
1443         result = pCamPrimary->unsetMaster();
1444         ASSERT_EQ(EvsResult::OK, result);
1445 
1446         if (listener.joinable()) {
1447             listener.join();
1448         }
1449         ASSERT_EQ(EvsEventType::MASTER_RELEASED,
1450                   static_cast<EvsEventType>(aNotification0.aType));
1451 
1452         // Try to adjust a parameter after being retired
1453         values.clear();
1454         pCamPrimary->setIntParameter(camPrimaryCmds[0], val0,
1455                                  [&result, &values](auto status, auto effectiveValues) {
1456                                      result = status;
1457                                      if (status == EvsResult::OK) {
1458                                         for (auto &&v : effectiveValues) {
1459                                             values.push_back(v);
1460                                         }
1461                                      }
1462                                  });
1463         ASSERT_EQ(EvsResult::INVALID_ARG, result);
1464 
1465         // Non-primary client becomes a primary client
1466         result = pCamSecondary->setMaster();
1467         ASSERT_EQ(EvsResult::OK, result);
1468 
1469         // Try to adjust a parameter via new primary client
1470         for (auto &cmd : camSecondaryCmds) {
1471             // Get a valid parameter value range
1472             int32_t minVal, maxVal, step;
1473             pCamSecondary->getIntParameterRange(
1474                 cmd,
1475                 [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
1476                     minVal = val0;
1477                     maxVal = val1;
1478                     step   = val2;
1479                 }
1480             );
1481 
1482             EvsResult result = EvsResult::OK;
1483             values.clear();
1484             if (cmd == CameraParam::ABSOLUTE_FOCUS) {
1485                 // Try to turn off auto-focus
1486                 values.clear();
1487                 pCamSecondary->setIntParameter(CameraParam::AUTO_FOCUS, 0,
1488                                    [&result, &values](auto status, auto effectiveValues) {
1489                                        result = status;
1490                                        if (status == EvsResult::OK) {
1491                                           for (auto &&v : effectiveValues) {
1492                                               values.push_back(v);
1493                                           }
1494                                        }
1495                                    });
1496                 ASSERT_EQ(EvsResult::OK, result);
1497                 for (auto &&v : values) {
1498                     ASSERT_EQ(v, 0);
1499                 }
1500             }
1501 
1502             // Calculate a parameter value to program.  This is being rounding down.
1503             val0 = minVal + (std::rand() % (maxVal - minVal));
1504             val0 = val0 - (val0 % step);
1505 
1506             // Prepare and start event listeners.
1507             bool listening0 = false;
1508             bool listening1 = false;
1509             std::condition_variable eventCond;
1510             std::thread listener0 = std::thread(
1511                 [&]() {
1512                     listening0 = true;
1513                     if (listening1) {
1514                         eventCond.notify_all();
1515                     }
1516 
1517                     EvsEventDesc aTargetEvent;
1518                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1519                     aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1520                     aTargetEvent.payload[1] = val0;
1521                     if (!frameHandlerPrimary->waitForEvent(aTargetEvent, aNotification0)) {
1522                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1523                     }
1524                 }
1525             );
1526             std::thread listener1 = std::thread(
1527                 [&]() {
1528                     listening1 = true;
1529                     if (listening0) {
1530                         eventCond.notify_all();
1531                     }
1532 
1533                     EvsEventDesc aTargetEvent;
1534                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1535                     aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
1536                     aTargetEvent.payload[1] = val0;
1537                     if (!frameHandlerSecondary->waitForEvent(aTargetEvent, aNotification1)) {
1538                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1539                     }
1540                 }
1541             );
1542 
1543             // Wait until a listening thread starts.
1544             std::mutex eventLock;
1545             std::unique_lock<std::mutex> lock(eventLock);
1546             auto timer = std::chrono::system_clock::now();
1547             while (!listening0 || !listening1) {
1548                 eventCond.wait_until(lock, timer + 1s);
1549             }
1550             lock.unlock();
1551 
1552             // Try to program a parameter
1553             values.clear();
1554             pCamSecondary->setIntParameter(cmd, val0,
1555                                         [&result, &values](auto status, auto effectiveValues) {
1556                                             result = status;
1557                                             if (status == EvsResult::OK) {
1558                                                 for (auto &&v : effectiveValues) {
1559                                                     values.push_back(v);
1560                                                 }
1561                                             }
1562                                         });
1563             ASSERT_EQ(EvsResult::OK, result);
1564 
1565             // Clients expects to receive a parameter change notification
1566             // whenever a primary client client adjusts it.
1567             values.clear();
1568             pCamSecondary->getIntParameter(cmd,
1569                                         [&result, &values](auto status, auto readValues) {
1570                                             result = status;
1571                                             if (status == EvsResult::OK) {
1572                                                 for (auto &&v : readValues) {
1573                                                     values.push_back(v);
1574                                                 }
1575                                             }
1576                                         });
1577             ASSERT_EQ(EvsResult::OK, result);
1578             for (auto &&v : values) {
1579                 ASSERT_EQ(val0, v) << "Values are not matched.";
1580             }
1581 
1582             // Join a listening thread.
1583             if (listener0.joinable()) {
1584                 listener0.join();
1585             }
1586             if (listener1.joinable()) {
1587                 listener1.join();
1588             }
1589 
1590             // Verify a change notification
1591             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1592                       static_cast<EvsEventType>(aNotification0.aType));
1593             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
1594                       static_cast<EvsEventType>(aNotification1.aType));
1595             ASSERT_EQ(cmd,
1596                       static_cast<CameraParam>(aNotification0.payload[0]));
1597             ASSERT_EQ(cmd,
1598                       static_cast<CameraParam>(aNotification1.payload[0]));
1599             for (auto &&v : values) {
1600                 ASSERT_EQ(v,
1601                           static_cast<int32_t>(aNotification0.payload[1]));
1602                 ASSERT_EQ(v,
1603                           static_cast<int32_t>(aNotification1.payload[1]));
1604             }
1605         }
1606 
1607         // New primary client retires from the role
1608         result = pCamSecondary->unsetMaster();
1609         ASSERT_EQ(EvsResult::OK, result);
1610 
1611         // Shutdown
1612         frameHandlerPrimary->shutdown();
1613         frameHandlerSecondary->shutdown();
1614 
1615         // Explicitly release the camera
1616         pEnumerator->closeCamera(pCamPrimary);
1617         pEnumerator->closeCamera(pCamSecondary);
1618         activeCameras.clear();
1619     }
1620 }
1621 
1622 
1623 /*
1624  * HighPriorityCameraClient:
1625  * EVS client, which owns the display, is priortized and therefore can take over
1626  * a primary client role from other EVS clients without the display.
1627  */
TEST_P(EvsHidlTest,HighPriorityCameraClient)1628 TEST_P(EvsHidlTest, HighPriorityCameraClient) {
1629     LOG(INFO) << "Starting HighPriorityCameraClient test";
1630 
1631     if (mIsHwModule) {
1632         // This test is not for HW module implementation.
1633         return;
1634     }
1635 
1636     // Get the camera list
1637     loadCameraList();
1638 
1639     // Request exclusive access to the EVS display
1640     sp<IEvsDisplay_1_0> pDisplay = pEnumerator->openDisplay();
1641     ASSERT_NE(pDisplay, nullptr);
1642 
1643     // Test each reported camera
1644     for (auto&& cam: cameraInfo) {
1645         // Read a target resolution from the metadata
1646         Stream targetCfg =
1647             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
1648         ASSERT_GT(targetCfg.width, 0);
1649         ASSERT_GT(targetCfg.height, 0);
1650 
1651         // Create two clients
1652         sp<IEvsCamera_1_1> pCam0 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
1653         ASSERT_NE(pCam0, nullptr);
1654 
1655         // Store a camera handle for a clean-up
1656         activeCameras.push_back(pCam0);
1657 
1658         sp<IEvsCamera_1_1> pCam1 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
1659         ASSERT_NE(pCam1, nullptr);
1660 
1661         // Store a camera handle for a clean-up
1662         activeCameras.push_back(pCam1);
1663 
1664         // Get the parameter list; this test will use the first command in both
1665         // lists.
1666         std::vector<CameraParam> cam0Cmds, cam1Cmds;
1667         pCam0->getParameterList([&cam0Cmds](hidl_vec<CameraParam> cmdList) {
1668                 cam0Cmds.reserve(cmdList.size());
1669                 for (auto &&cmd : cmdList) {
1670                     cam0Cmds.push_back(cmd);
1671                 }
1672             }
1673         );
1674 
1675         pCam1->getParameterList([&cam1Cmds](hidl_vec<CameraParam> cmdList) {
1676                 cam1Cmds.reserve(cmdList.size());
1677                 for (auto &&cmd : cmdList) {
1678                     cam1Cmds.push_back(cmd);
1679                 }
1680             }
1681         );
1682         if (cam0Cmds.size() < 1 || cam1Cmds.size() < 1) {
1683             // Cannot execute this test.
1684             return;
1685         }
1686 
1687         // Set up a frame receiver object which will fire up its own thread.
1688         sp<FrameHandler> frameHandler0 = new FrameHandler(pCam0, cam,
1689                                                           pDisplay,
1690                                                           FrameHandler::eAutoReturn);
1691         sp<FrameHandler> frameHandler1 = new FrameHandler(pCam1, cam,
1692                                                           nullptr,
1693                                                           FrameHandler::eAutoReturn);
1694 
1695         // Activate the display
1696         pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME);
1697 
1698         // Start the camera's video stream
1699         ASSERT_TRUE(frameHandler0->startStream());
1700         ASSERT_TRUE(frameHandler1->startStream());
1701 
1702         // Ensure the stream starts
1703         frameHandler0->waitForFrameCount(1);
1704         frameHandler1->waitForFrameCount(1);
1705 
1706         // Client 1 becomes a primary client and programs a parameter.
1707         EvsResult result = EvsResult::OK;
1708         // Get a valid parameter value range
1709         int32_t minVal, maxVal, step;
1710         pCam1->getIntParameterRange(
1711             cam1Cmds[0],
1712             [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
1713                 minVal = val0;
1714                 maxVal = val1;
1715                 step   = val2;
1716             }
1717         );
1718 
1719         // Client1 becomes a primary client
1720         result = pCam1->setMaster();
1721         ASSERT_EQ(EvsResult::OK, result);
1722 
1723         std::vector<int32_t> values;
1724         EvsEventDesc aTargetEvent  = {};
1725         EvsEventDesc aNotification = {};
1726         bool listening = false;
1727         std::mutex eventLock;
1728         std::condition_variable eventCond;
1729         if (cam1Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1730             std::thread listener = std::thread(
1731                 [&frameHandler0, &aNotification, &listening, &eventCond] {
1732                     listening = true;
1733                     eventCond.notify_all();
1734 
1735                     EvsEventDesc aTargetEvent;
1736                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1737                     aTargetEvent.payload[0] = static_cast<uint32_t>(CameraParam::AUTO_FOCUS);
1738                     aTargetEvent.payload[1] = 0;
1739                     if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1740                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1741                     }
1742                 }
1743             );
1744 
1745             // Wait until a lister starts.
1746             std::unique_lock<std::mutex> lock(eventLock);
1747             auto timer = std::chrono::system_clock::now();
1748             while (!listening) {
1749                 eventCond.wait_until(lock, timer + 1s);
1750             }
1751             lock.unlock();
1752 
1753             // Try to turn off auto-focus
1754             pCam1->setIntParameter(CameraParam::AUTO_FOCUS, 0,
1755                                [&result, &values](auto status, auto effectiveValues) {
1756                                    result = status;
1757                                    if (status == EvsResult::OK) {
1758                                       for (auto &&v : effectiveValues) {
1759                                           values.push_back(v);
1760                                       }
1761                                    }
1762                                });
1763             ASSERT_EQ(EvsResult::OK, result);
1764             for (auto &&v : values) {
1765                 ASSERT_EQ(v, 0);
1766             }
1767 
1768             // Join a listener
1769             if (listener.joinable()) {
1770                 listener.join();
1771             }
1772 
1773             // Make sure AUTO_FOCUS is off.
1774             ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1775                       EvsEventType::PARAMETER_CHANGED);
1776         }
1777 
1778         // Try to program a parameter with a random value [minVal, maxVal] after
1779         // rounding it down.
1780         int32_t val0 = minVal + (std::rand() % (maxVal - minVal));
1781         val0 = val0 - (val0 % step);
1782 
1783         std::thread listener = std::thread(
1784             [&frameHandler1, &aNotification, &listening, &eventCond, &cam1Cmds, val0] {
1785                 listening = true;
1786                 eventCond.notify_all();
1787 
1788                 EvsEventDesc aTargetEvent;
1789                 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1790                 aTargetEvent.payload[0] = static_cast<uint32_t>(cam1Cmds[0]);
1791                 aTargetEvent.payload[1] = val0;
1792                 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1793                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1794                 }
1795             }
1796         );
1797 
1798         // Wait until a lister starts.
1799         listening = false;
1800         std::unique_lock<std::mutex> lock(eventLock);
1801         auto timer = std::chrono::system_clock::now();
1802         while (!listening) {
1803             eventCond.wait_until(lock, timer + 1s);
1804         }
1805         lock.unlock();
1806 
1807         values.clear();
1808         pCam1->setIntParameter(cam1Cmds[0], val0,
1809                             [&result, &values](auto status, auto effectiveValues) {
1810                                 result = status;
1811                                 if (status == EvsResult::OK) {
1812                                     for (auto &&v : effectiveValues) {
1813                                         values.push_back(v);
1814                                     }
1815                                 }
1816                             });
1817         ASSERT_EQ(EvsResult::OK, result);
1818         for (auto &&v : values) {
1819             ASSERT_EQ(val0, v);
1820         }
1821 
1822         // Join a listener
1823         if (listener.joinable()) {
1824             listener.join();
1825         }
1826 
1827         // Verify a change notification
1828         ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1829                   EvsEventType::PARAMETER_CHANGED);
1830         ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
1831                   cam1Cmds[0]);
1832         for (auto &&v : values) {
1833             ASSERT_EQ(v, static_cast<int32_t>(aNotification.payload[1]));
1834         }
1835 
1836         listener = std::thread(
1837             [&frameHandler1, &aNotification, &listening, &eventCond] {
1838                 listening = true;
1839                 eventCond.notify_all();
1840 
1841                 EvsEventDesc aTargetEvent;
1842                 aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
1843                 if (!frameHandler1->waitForEvent(aTargetEvent, aNotification, true)) {
1844                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1845                 }
1846             }
1847         );
1848 
1849         // Wait until a lister starts.
1850         listening = false;
1851         lock.lock();
1852         timer = std::chrono::system_clock::now();
1853         while (!listening) {
1854             eventCond.wait_until(lock, timer + 1s);
1855         }
1856         lock.unlock();
1857 
1858         // Client 0 steals a primary client role
1859         ASSERT_EQ(EvsResult::OK, pCam0->forceMaster(pDisplay));
1860 
1861         // Join a listener
1862         if (listener.joinable()) {
1863             listener.join();
1864         }
1865 
1866         ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1867                   EvsEventType::MASTER_RELEASED);
1868 
1869         // Client 0 programs a parameter
1870         val0 = minVal + (std::rand() % (maxVal - minVal));
1871 
1872         // Rounding down
1873         val0 = val0 - (val0 % step);
1874 
1875         if (cam0Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
1876             std::thread listener = std::thread(
1877                 [&frameHandler1, &aNotification, &listening, &eventCond] {
1878                     listening = true;
1879                     eventCond.notify_all();
1880 
1881                     EvsEventDesc aTargetEvent;
1882                     aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1883                     aTargetEvent.payload[0] = static_cast<uint32_t>(CameraParam::AUTO_FOCUS);
1884                     aTargetEvent.payload[1] = 0;
1885                     if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
1886                         LOG(WARNING) << "A timer is expired before a target event is fired.";
1887                     }
1888                 }
1889             );
1890 
1891             // Wait until a lister starts.
1892             std::unique_lock<std::mutex> lock(eventLock);
1893             auto timer = std::chrono::system_clock::now();
1894             while (!listening) {
1895                 eventCond.wait_until(lock, timer + 1s);
1896             }
1897             lock.unlock();
1898 
1899             // Try to turn off auto-focus
1900             values.clear();
1901             pCam0->setIntParameter(CameraParam::AUTO_FOCUS, 0,
1902                                [&result, &values](auto status, auto effectiveValues) {
1903                                    result = status;
1904                                    if (status == EvsResult::OK) {
1905                                       for (auto &&v : effectiveValues) {
1906                                           values.push_back(v);
1907                                       }
1908                                    }
1909                                });
1910             ASSERT_EQ(EvsResult::OK, result);
1911             for (auto &&v : values) {
1912                 ASSERT_EQ(v, 0);
1913             }
1914 
1915             // Join a listener
1916             if (listener.joinable()) {
1917                 listener.join();
1918             }
1919 
1920             // Make sure AUTO_FOCUS is off.
1921             ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1922                       EvsEventType::PARAMETER_CHANGED);
1923         }
1924 
1925         listener = std::thread(
1926             [&frameHandler0, &aNotification, &listening, &eventCond, &cam0Cmds, val0] {
1927                 listening = true;
1928                 eventCond.notify_all();
1929 
1930                 EvsEventDesc aTargetEvent;
1931                 aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
1932                 aTargetEvent.payload[0] = static_cast<uint32_t>(cam0Cmds[0]);
1933                 aTargetEvent.payload[1] = val0;
1934                 if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
1935                     LOG(WARNING) << "A timer is expired before a target event is fired.";
1936                 }
1937             }
1938         );
1939 
1940         // Wait until a lister starts.
1941         listening = false;
1942         timer = std::chrono::system_clock::now();
1943         lock.lock();
1944         while (!listening) {
1945             eventCond.wait_until(lock, timer + 1s);
1946         }
1947         lock.unlock();
1948 
1949         values.clear();
1950         pCam0->setIntParameter(cam0Cmds[0], val0,
1951                             [&result, &values](auto status, auto effectiveValues) {
1952                                 result = status;
1953                                 if (status == EvsResult::OK) {
1954                                     for (auto &&v : effectiveValues) {
1955                                         values.push_back(v);
1956                                     }
1957                                 }
1958                             });
1959         ASSERT_EQ(EvsResult::OK, result);
1960 
1961         // Join a listener
1962         if (listener.joinable()) {
1963             listener.join();
1964         }
1965         // Verify a change notification
1966         ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
1967                   EvsEventType::PARAMETER_CHANGED);
1968         ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
1969                   cam0Cmds[0]);
1970         for (auto &&v : values) {
1971             ASSERT_EQ(v, static_cast<int32_t>(aNotification.payload[1]));
1972         }
1973 
1974         // Turn off the display (yes, before the stream stops -- it should be handled)
1975         pDisplay->setDisplayState(DisplayState::NOT_VISIBLE);
1976 
1977         // Shut down the streamer
1978         frameHandler0->shutdown();
1979         frameHandler1->shutdown();
1980 
1981         // Explicitly release the camera
1982         pEnumerator->closeCamera(pCam0);
1983         pEnumerator->closeCamera(pCam1);
1984         activeCameras.clear();
1985 
1986     }
1987 
1988     // Explicitly release the display
1989     pEnumerator->closeDisplay(pDisplay);
1990 }
1991 
1992 
1993 /*
1994  * CameraUseStreamConfigToDisplay:
1995  * End to end test of data flowing from the camera to the display.  Similar to
1996  * CameraToDisplayRoundTrip test case but this case retrieves available stream
1997  * configurations from EVS and uses one of them to start a video stream.
1998  */
TEST_P(EvsHidlTest,CameraUseStreamConfigToDisplay)1999 TEST_P(EvsHidlTest, CameraUseStreamConfigToDisplay) {
2000     LOG(INFO) << "Starting CameraUseStreamConfigToDisplay test";
2001 
2002     // Get the camera list
2003     loadCameraList();
2004 
2005     // Request exclusive access to the EVS display
2006     sp<IEvsDisplay_1_0> pDisplay = pEnumerator->openDisplay();
2007     ASSERT_NE(pDisplay, nullptr);
2008 
2009     // Test each reported camera
2010     for (auto&& cam: cameraInfo) {
2011         // choose a configuration that has a frame rate faster than minReqFps.
2012         Stream targetCfg = {};
2013         const int32_t minReqFps = 15;
2014         int32_t maxArea = 0;
2015         camera_metadata_entry_t streamCfgs;
2016         bool foundCfg = false;
2017         if (!find_camera_metadata_entry(
2018                  reinterpret_cast<camera_metadata_t *>(cam.metadata.data()),
2019                  ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
2020                  &streamCfgs)) {
2021             // Stream configurations are found in metadata
2022             RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(streamCfgs.data.i32);
2023             for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
2024                 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
2025                     ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
2026 
2027                     if (ptr->width * ptr->height > maxArea &&
2028                         ptr->framerate >= minReqFps) {
2029                         targetCfg.width = ptr->width;
2030                         targetCfg.height = ptr->height;
2031 
2032                         maxArea = ptr->width * ptr->height;
2033                         foundCfg = true;
2034                     }
2035                 }
2036                 ++ptr;
2037             }
2038         }
2039         targetCfg.format =
2040             static_cast<PixelFormat>(HAL_PIXEL_FORMAT_RGBA_8888);
2041 
2042         if (!foundCfg) {
2043             // Current EVS camera does not provide stream configurations in the
2044             // metadata.
2045             continue;
2046         }
2047 
2048         sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
2049         ASSERT_NE(pCam, nullptr);
2050 
2051         // Store a camera handle for a clean-up
2052         activeCameras.push_back(pCam);
2053 
2054         // Set up a frame receiver object which will fire up its own thread.
2055         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
2056                                                          pDisplay,
2057                                                          FrameHandler::eAutoReturn);
2058 
2059 
2060         // Activate the display
2061         pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME);
2062 
2063         // Start the camera's video stream
2064         bool startResult = frameHandler->startStream();
2065         ASSERT_TRUE(startResult);
2066 
2067         // Wait a while to let the data flow
2068         static const int kSecondsToWait = 5;
2069         const int streamTimeMs = kSecondsToWait * kSecondsToMilliseconds -
2070                                  kMaxStreamStartMilliseconds;
2071         const unsigned minimumFramesExpected = streamTimeMs * kMinimumFramesPerSecond /
2072                                                kSecondsToMilliseconds;
2073         sleep(kSecondsToWait);
2074         unsigned framesReceived = 0;
2075         unsigned framesDisplayed = 0;
2076         frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
2077         EXPECT_EQ(framesReceived, framesDisplayed);
2078         EXPECT_GE(framesDisplayed, minimumFramesExpected);
2079 
2080         // Turn off the display (yes, before the stream stops -- it should be handled)
2081         pDisplay->setDisplayState(DisplayState::NOT_VISIBLE);
2082 
2083         // Shut down the streamer
2084         frameHandler->shutdown();
2085 
2086         // Explicitly release the camera
2087         pEnumerator->closeCamera(pCam);
2088         activeCameras.clear();
2089     }
2090 
2091     // Explicitly release the display
2092     pEnumerator->closeDisplay(pDisplay);
2093 }
2094 
2095 
2096 /*
2097  * MultiCameraStreamUseConfig:
2098  * Verify that each client can start and stop video streams on the same
2099  * underlying camera with same configuration.
2100  */
TEST_P(EvsHidlTest,MultiCameraStreamUseConfig)2101 TEST_P(EvsHidlTest, MultiCameraStreamUseConfig) {
2102     LOG(INFO) << "Starting MultiCameraStream test";
2103 
2104     if (mIsHwModule) {
2105         // This test is not for HW module implementation.
2106         return;
2107     }
2108 
2109     // Get the camera list
2110     loadCameraList();
2111 
2112     // Test each reported camera
2113     for (auto&& cam: cameraInfo) {
2114         // choose a configuration that has a frame rate faster than minReqFps.
2115         Stream targetCfg = {};
2116         const int32_t minReqFps = 15;
2117         int32_t maxArea = 0;
2118         camera_metadata_entry_t streamCfgs;
2119         bool foundCfg = false;
2120         if (!find_camera_metadata_entry(
2121                  reinterpret_cast<camera_metadata_t *>(cam.metadata.data()),
2122                  ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
2123                  &streamCfgs)) {
2124             // Stream configurations are found in metadata
2125             RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(streamCfgs.data.i32);
2126             for (unsigned offset = 0; offset < streamCfgs.count; offset += kStreamCfgSz) {
2127                 if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
2128                     ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
2129 
2130                     if (ptr->width * ptr->height > maxArea &&
2131                         ptr->framerate >= minReqFps) {
2132                         targetCfg.width = ptr->width;
2133                         targetCfg.height = ptr->height;
2134 
2135                         maxArea = ptr->width * ptr->height;
2136                         foundCfg = true;
2137                     }
2138                 }
2139                 ++ptr;
2140             }
2141         }
2142         targetCfg.format =
2143             static_cast<PixelFormat>(HAL_PIXEL_FORMAT_RGBA_8888);
2144 
2145         if (!foundCfg) {
2146             LOG(INFO) << "Device " << cam.v1.cameraId
2147                       << " does not provide a list of supported stream configurations, skipped";
2148             continue;
2149         }
2150 
2151         // Create the first camera client with a selected stream configuration.
2152         sp<IEvsCamera_1_1> pCam0 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
2153         ASSERT_NE(pCam0, nullptr);
2154 
2155         // Store a camera handle for a clean-up
2156         activeCameras.push_back(pCam0);
2157 
2158         // Try to create the second camera client with different stream
2159         // configuration.
2160         int32_t id = targetCfg.id;
2161         targetCfg.id += 1;  // EVS manager sees only the stream id.
2162         sp<IEvsCamera_1_1> pCam1 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
2163         ASSERT_EQ(pCam1, nullptr);
2164 
2165         // Store a camera handle for a clean-up
2166         activeCameras.push_back(pCam0);
2167 
2168         // Try again with same stream configuration.
2169         targetCfg.id = id;
2170         pCam1 = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
2171         ASSERT_NE(pCam1, nullptr);
2172 
2173         // Set up per-client frame receiver objects which will fire up its own thread
2174         sp<FrameHandler> frameHandler0 = new FrameHandler(pCam0, cam,
2175                                                           nullptr,
2176                                                           FrameHandler::eAutoReturn);
2177         ASSERT_NE(frameHandler0, nullptr);
2178 
2179         sp<FrameHandler> frameHandler1 = new FrameHandler(pCam1, cam,
2180                                                           nullptr,
2181                                                           FrameHandler::eAutoReturn);
2182         ASSERT_NE(frameHandler1, nullptr);
2183 
2184         // Start the camera's video stream via client 0
2185         bool startResult = false;
2186         startResult = frameHandler0->startStream() &&
2187                       frameHandler1->startStream();
2188         ASSERT_TRUE(startResult);
2189 
2190         // Ensure the stream starts
2191         frameHandler0->waitForFrameCount(1);
2192         frameHandler1->waitForFrameCount(1);
2193 
2194         nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
2195 
2196         // Wait a bit, then ensure both clients get at least the required minimum number of frames
2197         sleep(5);
2198         nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
2199         unsigned framesReceived0 = 0, framesReceived1 = 0;
2200         frameHandler0->getFramesCounters(&framesReceived0, nullptr);
2201         frameHandler1->getFramesCounters(&framesReceived1, nullptr);
2202         framesReceived0 = framesReceived0 - 1;    // Back out the first frame we already waited for
2203         framesReceived1 = framesReceived1 - 1;    // Back out the first frame we already waited for
2204         nsecs_t runTime = end - firstFrame;
2205         float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
2206         float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
2207         LOG(INFO) << "Measured camera rate "
2208                   << std::scientific << framesPerSecond0 << " fps and "
2209                   << framesPerSecond1 << " fps";
2210         EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
2211         EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
2212 
2213         // Shutdown one client
2214         frameHandler0->shutdown();
2215 
2216         // Read frame counters again
2217         frameHandler0->getFramesCounters(&framesReceived0, nullptr);
2218         frameHandler1->getFramesCounters(&framesReceived1, nullptr);
2219 
2220         // Wait a bit again
2221         sleep(5);
2222         unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
2223         frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
2224         frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
2225         EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
2226         EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
2227 
2228         // Shutdown another
2229         frameHandler1->shutdown();
2230 
2231         // Explicitly release the camera
2232         pEnumerator->closeCamera(pCam0);
2233         pEnumerator->closeCamera(pCam1);
2234         activeCameras.clear();
2235     }
2236 }
2237 
2238 
2239 /*
2240  * LogicalCameraMetadata:
2241  * Opens logical camera reported by the enumerator and validate its metadata by
2242  * checking its capability and locating supporting physical camera device
2243  * identifiers.
2244  */
TEST_P(EvsHidlTest,LogicalCameraMetadata)2245 TEST_P(EvsHidlTest, LogicalCameraMetadata) {
2246     LOG(INFO) << "Starting LogicalCameraMetadata test";
2247 
2248     // Get the camera list
2249     loadCameraList();
2250 
2251     // Open and close each camera twice
2252     for (auto&& cam: cameraInfo) {
2253         bool isLogicalCam = false;
2254         auto devices = getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
2255         if (isLogicalCam) {
2256             ASSERT_GE(devices.size(), 1) <<
2257                 "Logical camera device must have at least one physical camera device ID in its metadata.";
2258         }
2259     }
2260 }
2261 
2262 
2263 /*
2264  * CameraStreamExternalBuffering:
2265  * This is same with CameraStreamBuffering except frame buffers are allocated by
2266  * the test client and then imported by EVS framework.
2267  */
TEST_P(EvsHidlTest,CameraStreamExternalBuffering)2268 TEST_P(EvsHidlTest, CameraStreamExternalBuffering) {
2269     LOG(INFO) << "Starting CameraStreamExternalBuffering test";
2270 
2271     // Arbitrary constant (should be > 1 and not too big)
2272     static const unsigned int kBuffersToHold = 3;
2273 
2274     // Get the camera list
2275     loadCameraList();
2276 
2277     // Acquire the graphics buffer allocator
2278     android::GraphicBufferAllocator& alloc(android::GraphicBufferAllocator::get());
2279     const auto usage =
2280             GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_RARELY | GRALLOC_USAGE_SW_WRITE_OFTEN;
2281 
2282     // Test each reported camera
2283     for (auto&& cam : cameraInfo) {
2284         // Read a target resolution from the metadata
2285         Stream targetCfg =
2286             getFirstStreamConfiguration(reinterpret_cast<camera_metadata_t*>(cam.metadata.data()));
2287         ASSERT_GT(targetCfg.width, 0);
2288         ASSERT_GT(targetCfg.height, 0);
2289 
2290         // Allocate buffers to use
2291         hidl_vec<BufferDesc> buffers;
2292         buffers.resize(kBuffersToHold);
2293         for (auto i = 0; i < kBuffersToHold; ++i) {
2294             unsigned pixelsPerLine;
2295             buffer_handle_t memHandle = nullptr;
2296             android::status_t result =
2297                     alloc.allocate(targetCfg.width, targetCfg.height,
2298                                    (android::PixelFormat)targetCfg.format,
2299                                    /* layerCount = */ 1, usage, &memHandle, &pixelsPerLine,
2300                                    /* graphicBufferId = */ 0,
2301                                    /* requestorName = */ "CameraStreamExternalBufferingTest");
2302             if (result != android::NO_ERROR) {
2303                 LOG(ERROR) << __FUNCTION__ << " failed to allocate memory.";
2304                 // Release previous allocated buffers
2305                 for (auto j = 0; j < i; j++) {
2306                     alloc.free(buffers[i].buffer.nativeHandle);
2307                 }
2308                 return;
2309             } else {
2310                 BufferDesc buf;
2311                 AHardwareBuffer_Desc* pDesc =
2312                         reinterpret_cast<AHardwareBuffer_Desc*>(&buf.buffer.description);
2313                 pDesc->width = targetCfg.width;
2314                 pDesc->height = targetCfg.height;
2315                 pDesc->layers = 1;
2316                 pDesc->format = static_cast<uint32_t>(targetCfg.format);
2317                 pDesc->usage = usage;
2318                 pDesc->stride = pixelsPerLine;
2319                 buf.buffer.nativeHandle = memHandle;
2320                 buf.bufferId = i;  // Unique number to identify this buffer
2321                 buffers[i] = buf;
2322             }
2323         }
2324 
2325         bool isLogicalCam = false;
2326         getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
2327 
2328         sp<IEvsCamera_1_1> pCam = pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg);
2329         ASSERT_NE(pCam, nullptr);
2330 
2331         // Store a camera handle for a clean-up
2332         activeCameras.push_back(pCam);
2333 
2334         // Request to import buffers
2335         EvsResult result = EvsResult::OK;
2336         int delta = 0;
2337         pCam->importExternalBuffers(buffers,
2338                                     [&] (auto _result, auto _delta) {
2339                                         result = _result;
2340                                         delta = _delta;
2341                                     });
2342         if (isLogicalCam) {
2343             EXPECT_EQ(result, EvsResult::UNDERLYING_SERVICE_ERROR);
2344             continue;
2345         }
2346 
2347         EXPECT_EQ(result, EvsResult::OK);
2348         EXPECT_GE(delta, kBuffersToHold);
2349 
2350         // Set up a frame receiver object which will fire up its own thread.
2351         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
2352                                                          nullptr,
2353                                                          FrameHandler::eNoAutoReturn);
2354 
2355         // Start the camera's video stream
2356         bool startResult = frameHandler->startStream();
2357         ASSERT_TRUE(startResult);
2358 
2359         // Check that the video stream stalls once we've gotten exactly the number of buffers
2360         // we requested since we told the frameHandler not to return them.
2361         sleep(1);   // 1 second should be enough for at least 5 frames to be delivered worst case
2362         unsigned framesReceived = 0;
2363         frameHandler->getFramesCounters(&framesReceived, nullptr);
2364         ASSERT_LE(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
2365 
2366 
2367         // Give back one buffer
2368         bool didReturnBuffer = frameHandler->returnHeldBuffer();
2369         EXPECT_TRUE(didReturnBuffer);
2370 
2371         // Once we return a buffer, it shouldn't take more than 1/10 second to get a new one
2372         // filled since we require 10fps minimum -- but give a 10% allowance just in case.
2373         unsigned framesReceivedAfter = 0;
2374         usleep(110 * kMillisecondsToMicroseconds);
2375         frameHandler->getFramesCounters(&framesReceivedAfter, nullptr);
2376         EXPECT_EQ(framesReceived + 1, framesReceivedAfter) << "Stream should've resumed";
2377 
2378         // Even when the camera pointer goes out of scope, the FrameHandler object will
2379         // keep the stream alive unless we tell it to shutdown.
2380         // Also note that the FrameHandle and the Camera have a mutual circular reference, so
2381         // we have to break that cycle in order for either of them to get cleaned up.
2382         frameHandler->shutdown();
2383 
2384         // Explicitly release the camera
2385         pEnumerator->closeCamera(pCam);
2386         activeCameras.clear();
2387         // Release buffers
2388         for (auto& b : buffers) {
2389             alloc.free(b.buffer.nativeHandle);
2390         }
2391         buffers.resize(0);
2392     }
2393 }
2394 
2395 
2396 /*
2397  * UltrasonicsArrayOpenClean:
2398  * Opens each ultrasonics arrays reported by the enumerator and then explicitly closes it via a
2399  * call to closeUltrasonicsArray. Then repeats the test to ensure all ultrasonics arrays
2400  * can be reopened.
2401  */
TEST_P(EvsHidlTest,UltrasonicsArrayOpenClean)2402 TEST_P(EvsHidlTest, UltrasonicsArrayOpenClean) {
2403     LOG(INFO) << "Starting UltrasonicsArrayOpenClean test";
2404 
2405     // Get the ultrasonics array list
2406     loadUltrasonicsArrayList();
2407 
2408     // Open and close each ultrasonics array twice
2409     for (auto&& ultraInfo : ultrasonicsArraysInfo) {
2410         for (int pass = 0; pass < 2; pass++) {
2411             sp<IEvsUltrasonicsArray> pUltrasonicsArray =
2412                     pEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId);
2413             ASSERT_NE(pUltrasonicsArray, nullptr);
2414 
2415             // Verify that this ultrasonics array self-identifies correctly
2416             pUltrasonicsArray->getUltrasonicArrayInfo([&ultraInfo](UltrasonicsArrayDesc desc) {
2417                 LOG(DEBUG) << "Found ultrasonics array " << ultraInfo.ultrasonicsArrayId;
2418                 EXPECT_EQ(ultraInfo.ultrasonicsArrayId, desc.ultrasonicsArrayId);
2419             });
2420 
2421             // Explicitly close the ultrasonics array so resources are released right away
2422             pEnumerator->closeUltrasonicsArray(pUltrasonicsArray);
2423         }
2424     }
2425 }
2426 
2427 
2428 // Starts a stream and verifies all data received is valid.
TEST_P(EvsHidlTest,UltrasonicsVerifyStreamData)2429 TEST_P(EvsHidlTest, UltrasonicsVerifyStreamData) {
2430     LOG(INFO) << "Starting UltrasonicsVerifyStreamData";
2431 
2432     // Get the ultrasonics array list
2433     loadUltrasonicsArrayList();
2434 
2435     // For each ultrasonics array.
2436     for (auto&& ultraInfo : ultrasonicsArraysInfo) {
2437         LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2438 
2439         sp<IEvsUltrasonicsArray> pUltrasonicsArray =
2440                 pEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId);
2441         ASSERT_NE(pUltrasonicsArray, nullptr);
2442 
2443         sp<FrameHandlerUltrasonics> frameHandler = new FrameHandlerUltrasonics(pUltrasonicsArray);
2444 
2445         // Start stream.
2446         EvsResult result = pUltrasonicsArray->startStream(frameHandler);
2447         ASSERT_EQ(result, EvsResult::OK);
2448 
2449         // Wait 5 seconds to receive frames.
2450         sleep(5);
2451 
2452         // Stop stream.
2453         pUltrasonicsArray->stopStream();
2454 
2455         EXPECT_GT(frameHandler->getReceiveFramesCount(), 0);
2456         EXPECT_TRUE(frameHandler->areAllFramesValid());
2457 
2458         // Explicitly close the ultrasonics array so resources are released right away
2459         pEnumerator->closeUltrasonicsArray(pUltrasonicsArray);
2460     }
2461 }
2462 
2463 
2464 // Sets frames in flight before and after start of stream and verfies success.
TEST_P(EvsHidlTest,UltrasonicsSetFramesInFlight)2465 TEST_P(EvsHidlTest, UltrasonicsSetFramesInFlight) {
2466     LOG(INFO) << "Starting UltrasonicsSetFramesInFlight";
2467 
2468     // Get the ultrasonics array list
2469     loadUltrasonicsArrayList();
2470 
2471     // For each ultrasonics array.
2472     for (auto&& ultraInfo : ultrasonicsArraysInfo) {
2473         LOG(DEBUG) << "Testing ultrasonics array: " << ultraInfo.ultrasonicsArrayId;
2474 
2475         sp<IEvsUltrasonicsArray> pUltrasonicsArray =
2476                 pEnumerator->openUltrasonicsArray(ultraInfo.ultrasonicsArrayId);
2477         ASSERT_NE(pUltrasonicsArray, nullptr);
2478 
2479         EvsResult result = pUltrasonicsArray->setMaxFramesInFlight(10);
2480         EXPECT_EQ(result, EvsResult::OK);
2481 
2482         sp<FrameHandlerUltrasonics> frameHandler = new FrameHandlerUltrasonics(pUltrasonicsArray);
2483 
2484         // Start stream.
2485         result = pUltrasonicsArray->startStream(frameHandler);
2486         ASSERT_EQ(result, EvsResult::OK);
2487 
2488         result = pUltrasonicsArray->setMaxFramesInFlight(5);
2489         EXPECT_EQ(result, EvsResult::OK);
2490 
2491         // Stop stream.
2492         pUltrasonicsArray->stopStream();
2493 
2494         // Explicitly close the ultrasonics array so resources are released right away
2495         pEnumerator->closeUltrasonicsArray(pUltrasonicsArray);
2496     }
2497 }
2498 
2499 GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(EvsHidlTest);
2500 INSTANTIATE_TEST_SUITE_P(
2501     PerInstance,
2502     EvsHidlTest,
2503     testing::ValuesIn(android::hardware::getAllHalInstanceNames(IEvsEnumerator::descriptor)),
2504     android::hardware::PrintInstanceNameToString);
2505 
2506