1 /*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "VirtualCamera.h"
18 #include "HalCamera.h"
19 #include "Enumerator.h"
20
21 #include <android/hardware_buffer.h>
22 #include <android-base/file.h>
23 #include <android-base/logging.h>
24 #include <android-base/stringprintf.h>
25
26 using ::android::base::StringAppendF;
27 using ::android::base::StringPrintf;
28 using ::android::base::WriteStringToFd;
29 using ::android::hardware::automotive::evs::V1_0::DisplayState;
30
31
32 namespace android {
33 namespace automotive {
34 namespace evs {
35 namespace V1_1 {
36 namespace implementation {
37
38
VirtualCamera(const std::vector<sp<HalCamera>> & halCameras)39 VirtualCamera::VirtualCamera(const std::vector<sp<HalCamera>>& halCameras) :
40 mStreamState(STOPPED) {
41 for (auto&& cam : halCameras) {
42 mHalCamera.try_emplace(cam->getId(), cam);
43 }
44 }
45
46
~VirtualCamera()47 VirtualCamera::~VirtualCamera() {
48 shutdown();
49 }
50
51
shutdown()52 void VirtualCamera::shutdown() {
53 // In normal operation, the stream should already be stopped by the time we get here
54 if (mStreamState == RUNNING) {
55 // Note that if we hit this case, no terminating frame will be sent to the client,
56 // but they're probably already dead anyway.
57 LOG(WARNING) << "Virtual camera being shutdown while stream is running";
58
59 // Tell the frame delivery pipeline we don't want any more frames
60 mStreamState = STOPPING;
61
62 // Awakes the capture thread; this thread will terminate.
63 mFramesReadySignal.notify_all();
64
65 // Returns buffers held by this client
66 for (auto&& [key, hwCamera] : mHalCamera) {
67 auto pHwCamera = hwCamera.promote();
68 if (pHwCamera == nullptr) {
69 LOG(WARNING) << "Camera device " << key << " is not alive.";
70 continue;
71 }
72
73 if (mFramesHeld[key].size() > 0) {
74 LOG(WARNING) << "VirtualCamera destructing with frames in flight.";
75
76 // Return to the underlying hardware camera any buffers the client was holding
77 for (auto&& heldBuffer : mFramesHeld[key]) {
78 // Tell our parent that we're done with this buffer
79 pHwCamera->doneWithFrame(heldBuffer);
80 }
81 mFramesHeld[key].clear();
82 }
83
84 // Retire from a primary client
85 pHwCamera->unsetMaster(this);
86
87 // Give the underlying hardware camera the heads up that it might be time to stop
88 pHwCamera->clientStreamEnding(this);
89
90 // Retire from the participating HW camera's client list
91 pHwCamera->disownVirtualCamera(this);
92 }
93
94 // Join a capture thread
95 if (mCaptureThread.joinable()) {
96 mCaptureThread.join();
97 }
98
99 mFramesHeld.clear();
100
101 // Drop our reference to our associated hardware camera
102 mHalCamera.clear();
103 }
104 }
105
106
getHalCameras()107 std::vector<sp<HalCamera>> VirtualCamera::getHalCameras() {
108 std::vector<sp<HalCamera>> cameras;
109 for (auto&& [key, cam] : mHalCamera) {
110 auto ptr = cam.promote();
111 if (ptr != nullptr) {
112 cameras.emplace_back(ptr);
113 }
114 }
115
116 return cameras;
117 }
118
119
deliverFrame(const BufferDesc_1_1 & bufDesc)120 bool VirtualCamera::deliverFrame(const BufferDesc_1_1& bufDesc) {
121 if (mStreamState == STOPPED) {
122 // A stopped stream gets no frames
123 LOG(ERROR) << "A stopped stream should not get any frames";
124 return false;
125 } else if (mFramesHeld[bufDesc.deviceId].size() >= mFramesAllowed) {
126 // Indicate that we declined to send the frame to the client because they're at quota
127 LOG(INFO) << "Skipping new frame as we hold " << mFramesHeld[bufDesc.deviceId].size()
128 << " of " << mFramesAllowed;
129
130 if (mStream_1_1 != nullptr) {
131 // Report a frame drop to v1.1 client.
132 EvsEventDesc event;
133 event.deviceId = bufDesc.deviceId;
134 event.aType = EvsEventType::FRAME_DROPPED;
135 auto result = mStream_1_1->notify(event);
136 if (!result.isOk()) {
137 LOG(ERROR) << "Error delivering end of stream event";
138 }
139 }
140
141 // Marks that a new frame has arrived though it was not accepted
142 {
143 std::lock_guard<std::mutex> lock(mFrameDeliveryMutex);
144 mSourceCameras.erase(bufDesc.deviceId);
145 mFramesReadySignal.notify_all();
146 }
147
148 return false;
149 } else {
150 // Keep a record of this frame so we can clean up if we have to in case of client death
151 mFramesHeld[bufDesc.deviceId].emplace_back(bufDesc);
152
153 // v1.0 client uses an old frame-delivery mechanism.
154 if (mStream_1_1 == nullptr) {
155 // Forward a frame to v1.0 client
156 BufferDesc_1_0 frame_1_0 = {};
157 const AHardwareBuffer_Desc* pDesc =
158 reinterpret_cast<const AHardwareBuffer_Desc *>(&bufDesc.buffer.description);
159 frame_1_0.width = pDesc->width;
160 frame_1_0.height = pDesc->height;
161 frame_1_0.format = pDesc->format;
162 frame_1_0.usage = pDesc->usage;
163 frame_1_0.stride = pDesc->stride;
164 frame_1_0.memHandle = bufDesc.buffer.nativeHandle;
165 frame_1_0.pixelSize = bufDesc.pixelSize;
166 frame_1_0.bufferId = bufDesc.bufferId;
167
168 mStream->deliverFrame(frame_1_0);
169 } else if (mCaptureThread.joinable()) {
170 // Keep forwarding frames as long as a capture thread is alive
171 if (mFramesHeld.size() > 0 && mStream_1_1 != nullptr) {
172 // Notify a new frame receipt
173 std::lock_guard<std::mutex> lock(mFrameDeliveryMutex);
174 mSourceCameras.erase(bufDesc.deviceId);
175 mFramesReadySignal.notify_all();
176 }
177 }
178
179 return true;
180 }
181 }
182
183
notify(const EvsEventDesc & event)184 bool VirtualCamera::notify(const EvsEventDesc& event) {
185 switch(event.aType) {
186 case EvsEventType::STREAM_STOPPED:
187 if (mStreamState != STOPPING) {
188 // Warn if we got an unexpected stream termination
189 LOG(WARNING) << "Stream unexpectedly stopped, current status "
190 << mStreamState;
191
192 // Clean up the resource and forward an event to the client
193 stopVideoStream();
194
195 // This event is handled properly.
196 return true;
197 }
198
199 if (mStream_1_1 == nullptr) {
200 // Send a null frame instead, for v1.0 client
201 auto result = mStream->deliverFrame({});
202 if (!result.isOk()) {
203 LOG(ERROR) << "Error delivering end of stream marker";
204 }
205 }
206 break;
207
208 // v1.0 client will ignore all other events.
209 case EvsEventType::PARAMETER_CHANGED:
210 LOG(DEBUG) << "A camera parameter " << event.payload[0]
211 << " is set to " << event.payload[1];
212 break;
213
214 case EvsEventType::MASTER_RELEASED:
215 LOG(DEBUG) << "The primary client has been released";
216 break;
217
218 default:
219 LOG(WARNING) << "Unknown event id " << static_cast<int32_t>(event.aType);
220 break;
221 }
222
223 if (mStream_1_1 != nullptr) {
224 // Forward a received event to the v1.1 client
225 auto result = mStream_1_1->notify(event);
226 if (!result.isOk()) {
227 LOG(ERROR) << "Failed to forward an event";
228 return false;
229 }
230 }
231
232 return true;
233 }
234
235
236 // Methods from ::android::hardware::automotive::evs::V1_0::IEvsCamera follow.
getCameraInfo(getCameraInfo_cb info_cb)237 Return<void> VirtualCamera::getCameraInfo(getCameraInfo_cb info_cb) {
238 // Straight pass through to hardware layer
239 if (mHalCamera.size() > 1) {
240 LOG(ERROR) << __FUNCTION__
241 << " must NOT be called on a logical camera object.";
242 info_cb({});
243 return Void();
244 }
245
246 auto halCamera = mHalCamera.begin()->second.promote();
247 if (halCamera != nullptr) {
248 return halCamera->getHwCamera()->getCameraInfo(info_cb);
249 } else {
250 info_cb({});
251 return Void();
252 }
253 }
254
255
setMaxFramesInFlight(uint32_t bufferCount)256 Return<EvsResult> VirtualCamera::setMaxFramesInFlight(uint32_t bufferCount) {
257 // How many buffers are we trying to add (or remove if negative)
258 int bufferCountChange = bufferCount - mFramesAllowed;
259
260 // Ask our parent for more buffers
261 bool result = true;
262 std::vector<sp<HalCamera>> changedCameras;
263 for (auto&& [key, hwCamera] : mHalCamera) {
264 auto pHwCam = hwCamera.promote();
265 if (pHwCam == nullptr) {
266 continue;
267 }
268
269 result = pHwCam->changeFramesInFlight(bufferCountChange);
270 if (!result) {
271 LOG(ERROR) << key
272 << ": Failed to change buffer count by " << bufferCountChange
273 << " to " << bufferCount;
274 break;
275 }
276
277 changedCameras.emplace_back(pHwCam);
278 }
279
280 // Update our notion of how many frames we're allowed
281 mFramesAllowed = bufferCount;
282
283 if (!result) {
284 // Rollback changes because we failed to update all cameras
285 for (auto&& hwCamera : changedCameras) {
286 LOG(WARNING) << "Rollback a change on " << hwCamera->getId();
287 hwCamera->changeFramesInFlight(-bufferCountChange);
288 }
289
290 // Restore the original buffer count
291 mFramesAllowed -= bufferCountChange;
292 return EvsResult::BUFFER_NOT_AVAILABLE;
293 } else {
294 return EvsResult::OK;
295 }
296 }
297
298
startVideoStream(const::android::sp<IEvsCameraStream_1_0> & stream)299 Return<EvsResult> VirtualCamera::startVideoStream(const ::android::sp<IEvsCameraStream_1_0>& stream) {
300 // We only support a single stream at a time
301 if (mStreamState != STOPPED) {
302 LOG(ERROR) << "Ignoring startVideoStream call when a stream is already running.";
303 return EvsResult::STREAM_ALREADY_RUNNING;
304 }
305
306 // Validate our held frame count is starting out at zero as we expect
307 assert(mFramesHeld.size() == 0);
308
309 // Record the user's callback for use when we have a frame ready
310 mStream = stream;
311 mStream_1_1 = IEvsCameraStream_1_1::castFrom(stream).withDefault(nullptr);
312 if (mStream_1_1 == nullptr) {
313 LOG(INFO) << "Start video stream for v1.0 client.";
314 } else {
315 LOG(INFO) << "Start video stream for v1.1 client.";
316 }
317
318 mStreamState = RUNNING;
319
320 // Tell the underlying camera hardware that we want to stream
321 auto iter = mHalCamera.begin();
322 while (iter != mHalCamera.end()) {
323 auto pHwCamera = iter->second.promote();
324 if (pHwCamera == nullptr) {
325 LOG(ERROR) << "Failed to start a video stream on " << iter->first;
326 continue;
327 }
328
329 LOG(INFO) << __FUNCTION__
330 << " starts a video stream on " << iter->first;
331 Return<EvsResult> result = pHwCamera->clientStreamStarting();
332 if ((!result.isOk()) || (result != EvsResult::OK)) {
333 // If we failed to start the underlying stream, then we're not actually running
334 mStream = mStream_1_1 = nullptr;
335 mStreamState = STOPPED;
336
337 // Request to stop streams started by this client.
338 auto rb = mHalCamera.begin();
339 while (rb != iter) {
340 auto ptr = rb->second.promote();
341 if (ptr != nullptr) {
342 ptr->clientStreamEnding(this);
343 }
344 ++rb;
345 }
346 return EvsResult::UNDERLYING_SERVICE_ERROR;
347 }
348 ++iter;
349 }
350
351 // Start a thread that waits on the fence and forwards collected frames
352 // to the v1.1 client.
353 auto pHwCamera = mHalCamera.begin()->second.promote();
354 if (mStream_1_1 != nullptr && pHwCamera != nullptr) {
355 mCaptureThread = std::thread([this]() {
356 // TODO(b/145466570): With a proper camera hang handler, we may want
357 // to reduce an amount of timeout.
358 constexpr auto kFrameTimeout = 5s; // timeout in seconds.
359 int64_t lastFrameTimestamp = -1;
360 while (mStreamState == RUNNING) {
361 unsigned count = 0;
362 for (auto&& [key, hwCamera] : mHalCamera) {
363 auto pHwCamera = hwCamera.promote();
364 if (pHwCamera == nullptr) {
365 LOG(WARNING) << "Invalid camera " << key << " is ignored.";
366 continue;
367 }
368
369 pHwCamera->requestNewFrame(this, lastFrameTimestamp);
370 {
371 std::lock_guard<std::mutex> lock(mFrameDeliveryMutex);
372 mSourceCameras.emplace(pHwCamera->getId());
373 }
374 ++count;
375 }
376
377 std::unique_lock<std::mutex> lock(mFrameDeliveryMutex);
378 if (!mFramesReadySignal.wait_for(lock,
379 kFrameTimeout,
380 [this]() REQUIRES(mFrameDeliveryMutex) {
381 // Stops waiting if
382 // 1) we've requested to stop capturing
383 // new frames
384 // 2) or, we've got all frames
385 return mStreamState != RUNNING ||
386 mSourceCameras.empty();
387 })) {
388 // This happens when either a new frame does not arrive
389 // before a timer expires or we're requested to stop
390 // capturing frames.
391 break;
392 } else if (mStreamState == RUNNING) {
393 // Fetch frames and forward to the client
394 if (mFramesHeld.size() > 0 && mStream_1_1 != nullptr) {
395 // Pass this buffer through to our client
396 hardware::hidl_vec<BufferDesc_1_1> frames;
397 frames.resize(count);
398 unsigned i = 0;
399 for (auto&& [key, hwCamera] : mHalCamera) {
400 auto pHwCamera = hwCamera.promote();
401 if (pHwCamera == nullptr) {
402 continue;
403 }
404 if (mFramesHeld[key].size() == 0) {
405 continue;
406 }
407
408 const auto frame = mFramesHeld[key].back();
409 if (frame.timestamp > lastFrameTimestamp) {
410 lastFrameTimestamp = frame.timestamp;
411 }
412 frames[i++] = frame;
413 }
414
415 auto ret = mStream_1_1->deliverFrame_1_1(frames);
416 if (!ret.isOk()) {
417 LOG(WARNING) << "Failed to forward frames";
418 }
419 }
420 } else if (mStreamState != RUNNING) {
421 LOG(DEBUG) << "Requested to stop capturing frames";
422 }
423 }
424
425 LOG(DEBUG) << "Exiting a capture thread";
426 });
427 }
428
429 // TODO(changyeon):
430 // Detect and exit if we encounter a stalled stream or unresponsive driver?
431 // Consider using a timer and watching for frame arrival?
432
433 return EvsResult::OK;
434 }
435
436
doneWithFrame(const BufferDesc_1_0 & buffer)437 Return<void> VirtualCamera::doneWithFrame(const BufferDesc_1_0& buffer) {
438 if (buffer.memHandle == nullptr) {
439 LOG(ERROR) << "Ignoring doneWithFrame called with invalid handle";
440 } else if (mFramesHeld.size() > 1) {
441 LOG(ERROR) << __FUNCTION__
442 << " must NOT be called on a logical camera object.";
443 } else {
444 // Find this buffer in our "held" list
445 auto& frameQueue = mFramesHeld.begin()->second;
446 auto it = frameQueue.begin();
447 while (it != frameQueue.end()) {
448 if (it->bufferId == buffer.bufferId) {
449 // found it!
450 break;
451 }
452 ++it;
453 }
454 if (it == frameQueue.end()) {
455 // We should always find the frame in our "held" list
456 LOG(ERROR) << "Ignoring doneWithFrame called with unrecognized frameID "
457 << buffer.bufferId;
458 } else {
459 // Take this frame out of our "held" list
460 frameQueue.erase(it);
461
462 // Tell our parent that we're done with this buffer
463 auto pHwCamera = mHalCamera.begin()->second.promote();
464 if (pHwCamera != nullptr) {
465 pHwCamera->doneWithFrame(buffer);
466 } else {
467 LOG(WARNING) << "Possible memory leak because a device "
468 << mHalCamera.begin()->first
469 << " is not valid.";
470 }
471 }
472 }
473
474 return Void();
475 }
476
477
stopVideoStream()478 Return<void> VirtualCamera::stopVideoStream() {
479 if (mStreamState == RUNNING) {
480 // Tell the frame delivery pipeline we don't want any more frames
481 mStreamState = STOPPING;
482
483 // Awake the capture thread; this thread will terminate.
484 mFramesReadySignal.notify_all();
485
486 // Deliver the stream-ending notification
487 if (mStream_1_1 != nullptr) {
488 // v1.1 client waits for a stream stopped event
489 EvsEventDesc event;
490 event.aType = EvsEventType::STREAM_STOPPED;
491 auto result = mStream_1_1->notify(event);
492 if (!result.isOk()) {
493 LOG(ERROR) << "Error delivering end of stream event";
494 }
495 } else {
496 // v1.0 client expects a null frame at the end of the stream
497 auto result = mStream->deliverFrame({});
498 if (!result.isOk()) {
499 LOG(ERROR) << "Error delivering end of stream marker";
500 }
501 }
502
503 // Since we are single threaded, no frame can be delivered while this function is running,
504 // so we can go directly to the STOPPED state here on the server.
505 // Note, however, that there still might be frames already queued that client will see
506 // after returning from the client side of this call.
507 mStreamState = STOPPED;
508
509 // Give the underlying hardware camera the heads up that it might be time to stop
510 for (auto&& [key, hwCamera] : mHalCamera) {
511 auto pHwCamera = hwCamera.promote();
512 if (pHwCamera != nullptr) {
513 pHwCamera->clientStreamEnding(this);
514 }
515 }
516
517 // Signal a condition to unblock a capture thread and then join
518 {
519 std::lock_guard<std::mutex> lock(mFrameDeliveryMutex);
520 mSourceCameras.clear();
521 mFramesReadySignal.notify_all();
522 }
523
524 if (mCaptureThread.joinable()) {
525 mCaptureThread.join();
526 }
527
528 }
529
530 return Void();
531 }
532
533
getExtendedInfo(uint32_t opaqueIdentifier)534 Return<int32_t> VirtualCamera::getExtendedInfo(uint32_t opaqueIdentifier) {
535 if (mHalCamera.size() > 1) {
536 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
537 return 0;
538 }
539
540 // Pass straight through to the hardware device
541 auto pHwCamera = mHalCamera.begin()->second.promote();
542 if (pHwCamera != nullptr) {
543 return pHwCamera->getHwCamera()->getExtendedInfo(opaqueIdentifier);
544 } else {
545 LOG(WARNING) << mHalCamera.begin()->first << " is invalid.";
546 return 0;
547 }
548 }
549
550
setExtendedInfo(uint32_t opaqueIdentifier,int32_t opaqueValue)551 Return<EvsResult> VirtualCamera::setExtendedInfo(uint32_t opaqueIdentifier, int32_t opaqueValue) {
552 if (mHalCamera.size() > 1) {
553 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
554 return EvsResult::INVALID_ARG;
555 }
556
557 // Pass straight through to the hardware device
558 auto pHwCamera = mHalCamera.begin()->second.promote();
559 if (pHwCamera != nullptr) {
560 return pHwCamera->getHwCamera()->setExtendedInfo(opaqueIdentifier, opaqueValue);
561 } else {
562 LOG(WARNING) << mHalCamera.begin()->first << " is invalid.";
563 return EvsResult::INVALID_ARG;
564 }
565 }
566
567
568 // Methods from ::android::hardware::automotive::evs::V1_1::IEvsCamera follow.
getCameraInfo_1_1(getCameraInfo_1_1_cb info_cb)569 Return<void> VirtualCamera::getCameraInfo_1_1(getCameraInfo_1_1_cb info_cb) {
570 if (mHalCamera.size() > 1) {
571 // Logical camera description is stored in VirtualCamera object.
572 info_cb(*mDesc);
573 return Void();
574 }
575
576 // Straight pass through to hardware layer
577 auto pHwCamera = mHalCamera.begin()->second.promote();
578 if (pHwCamera == nullptr) {
579 // Return an empty list
580 info_cb({});
581 return Void();
582 }
583
584 auto hwCamera_1_1 =
585 IEvsCamera_1_1::castFrom(pHwCamera->getHwCamera()).withDefault(nullptr);
586 if (hwCamera_1_1 != nullptr) {
587 return hwCamera_1_1->getCameraInfo_1_1(info_cb);
588 } else {
589 // Return an empty list
590 info_cb({});
591 return Void();
592 }
593 }
594
595
getPhysicalCameraInfo(const hidl_string & deviceId,getPhysicalCameraInfo_cb info_cb)596 Return<void> VirtualCamera::getPhysicalCameraInfo(const hidl_string& deviceId,
597 getPhysicalCameraInfo_cb info_cb) {
598 auto device = mHalCamera.find(deviceId);
599 if (device != mHalCamera.end()) {
600 // Straight pass through to hardware layer
601 auto pHwCamera = device->second.promote();
602 if (pHwCamera != nullptr) {
603 auto hwCamera_1_1 =
604 IEvsCamera_1_1::castFrom(pHwCamera->getHwCamera()).withDefault(nullptr);
605 if (hwCamera_1_1 != nullptr) {
606 return hwCamera_1_1->getCameraInfo_1_1(info_cb);
607 } else {
608 LOG(WARNING) << "Failed to promote HW camera to v1.1.";
609 }
610 } else {
611 LOG(WARNING) << "Camera device " << deviceId << " is not alive.";
612 }
613 } else {
614 LOG(WARNING) << " Requested device " << deviceId
615 << " does not back this device.";
616 }
617
618 // Return an empty list
619 info_cb({});
620 return Void();
621 }
622
623
doneWithFrame_1_1(const hardware::hidl_vec<BufferDesc_1_1> & buffers)624 Return<EvsResult> VirtualCamera::doneWithFrame_1_1(
625 const hardware::hidl_vec<BufferDesc_1_1>& buffers) {
626
627 for (auto&& buffer : buffers) {
628 if (buffer.buffer.nativeHandle == nullptr) {
629 LOG(WARNING) << "Ignoring doneWithFrame called with invalid handle";
630 } else {
631 // Find this buffer in our "held" list
632 auto it = mFramesHeld[buffer.deviceId].begin();
633 while (it != mFramesHeld[buffer.deviceId].end()) {
634 if (it->bufferId == buffer.bufferId) {
635 // found it!
636 break;
637 }
638 ++it;
639 }
640 if (it == mFramesHeld[buffer.deviceId].end()) {
641 // We should always find the frame in our "held" list
642 LOG(ERROR) << "Ignoring doneWithFrame called with unrecognized frameID "
643 << buffer.bufferId;
644 } else {
645 // Take this frame out of our "held" list
646 mFramesHeld[buffer.deviceId].erase(it);
647
648 // Tell our parent that we're done with this buffer
649 auto pHwCamera = mHalCamera[buffer.deviceId].promote();
650 if (pHwCamera != nullptr) {
651 pHwCamera->doneWithFrame(buffer);
652 } else {
653 LOG(WARNING) << "Possible memory leak; "
654 << buffer.deviceId << " is not valid.";
655 }
656 }
657 }
658 }
659
660 return EvsResult::OK;
661 }
662
663
setMaster()664 Return<EvsResult> VirtualCamera::setMaster() {
665 if (mHalCamera.size() > 1) {
666 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
667 return EvsResult::INVALID_ARG;
668 }
669
670 auto pHwCamera = mHalCamera.begin()->second.promote();
671 if (pHwCamera != nullptr) {
672 return pHwCamera->setMaster(this);
673 } else {
674 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
675 return EvsResult::INVALID_ARG;
676 }
677 }
678
679
forceMaster(const sp<IEvsDisplay_1_0> & display)680 Return<EvsResult> VirtualCamera::forceMaster(const sp<IEvsDisplay_1_0>& display) {
681 if (mHalCamera.size() > 1) {
682 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
683 return EvsResult::INVALID_ARG;
684 }
685
686 if (display == nullptr) {
687 LOG(ERROR) << __FUNCTION__
688 << ": Passed display is invalid";
689 return EvsResult::INVALID_ARG;
690 }
691
692 DisplayState state = display->getDisplayState();
693 if (state == DisplayState::NOT_OPEN ||
694 state == DisplayState::DEAD ||
695 state >= DisplayState::NUM_STATES) {
696 LOG(ERROR) << __FUNCTION__
697 << ": Passed display is in invalid state";
698 return EvsResult::INVALID_ARG;
699 }
700
701 auto pHwCamera = mHalCamera.begin()->second.promote();
702 if (pHwCamera != nullptr) {
703 return pHwCamera->forceMaster(this);
704 } else {
705 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
706 return EvsResult::INVALID_ARG;
707 }
708 }
709
710
unsetMaster()711 Return<EvsResult> VirtualCamera::unsetMaster() {
712 if (mHalCamera.size() > 1) {
713 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
714 return EvsResult::INVALID_ARG;
715 }
716
717 auto pHwCamera = mHalCamera.begin()->second.promote();
718 if (pHwCamera != nullptr) {
719 return pHwCamera->unsetMaster(this);
720 } else {
721 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
722 return EvsResult::INVALID_ARG;
723 }
724 }
725
726
getParameterList(getParameterList_cb _hidl_cb)727 Return<void> VirtualCamera::getParameterList(getParameterList_cb _hidl_cb) {
728 if (mHalCamera.size() > 1) {
729 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
730
731 // Return an empty list
732 _hidl_cb({});
733 return Void();
734 }
735
736 // Straight pass through to hardware layer
737 auto pHwCamera = mHalCamera.begin()->second.promote();
738 if (pHwCamera == nullptr) {
739 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
740
741 // Return an empty list
742 _hidl_cb({});
743 return Void();
744 }
745
746 auto hwCamera_1_1 =
747 IEvsCamera_1_1::castFrom(pHwCamera->getHwCamera()).withDefault(nullptr);
748 if (hwCamera_1_1 != nullptr) {
749 return hwCamera_1_1->getParameterList(_hidl_cb);
750 } else {
751 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first
752 << " does not support a parameter programming.";
753
754 // Return an empty list
755 _hidl_cb({});
756 return Void();
757 }
758 }
759
760
getIntParameterRange(CameraParam id,getIntParameterRange_cb _hidl_cb)761 Return<void> VirtualCamera::getIntParameterRange(CameraParam id,
762 getIntParameterRange_cb _hidl_cb) {
763 if (mHalCamera.size() > 1) {
764 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
765
766 // Return [0, 0, 0]
767 _hidl_cb(0, 0, 0);
768 return Void();
769 }
770
771 // Straight pass through to hardware layer
772 auto pHwCamera = mHalCamera.begin()->second.promote();
773 if (pHwCamera == nullptr) {
774 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
775
776 // Return [0, 0, 0]
777 _hidl_cb(0, 0, 0);
778 return Void();
779 }
780
781 auto hwCamera_1_1 =
782 IEvsCamera_1_1::castFrom(pHwCamera->getHwCamera()).withDefault(nullptr);
783 if (hwCamera_1_1 != nullptr) {
784 return hwCamera_1_1->getIntParameterRange(id, _hidl_cb);
785 } else {
786 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first
787 << " does not support a parameter programming.";
788
789 // Return [0, 0, 0]
790 _hidl_cb(0, 0, 0);
791 return Void();
792 }
793 return Void();
794 }
795
796
setIntParameter(CameraParam id,int32_t value,setIntParameter_cb _hidl_cb)797 Return<void> VirtualCamera::setIntParameter(CameraParam id,
798 int32_t value,
799 setIntParameter_cb _hidl_cb) {
800 hardware::hidl_vec<int32_t> values;
801 EvsResult status = EvsResult::INVALID_ARG;
802 if (mHalCamera.size() > 1) {
803 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
804 _hidl_cb(status, values);
805 return Void();
806 }
807
808 auto pHwCamera = mHalCamera.begin()->second.promote();
809 if (pHwCamera == nullptr) {
810 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
811 _hidl_cb(status, values);
812 return Void();
813 }
814
815 status = pHwCamera->setParameter(this, id, value);
816
817 values.resize(1);
818 values[0] = value;
819 _hidl_cb(status, values);
820
821 return Void();
822 }
823
824
getIntParameter(CameraParam id,getIntParameter_cb _hidl_cb)825 Return<void> VirtualCamera::getIntParameter(CameraParam id,
826 getIntParameter_cb _hidl_cb) {
827 hardware::hidl_vec<int32_t> values;
828 EvsResult status = EvsResult::INVALID_ARG;
829 if (mHalCamera.size() > 1) {
830 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
831 _hidl_cb(status, values);
832 return Void();
833 }
834
835 auto pHwCamera = mHalCamera.begin()->second.promote();
836 if (pHwCamera == nullptr) {
837 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
838 _hidl_cb(status, values);
839 return Void();
840 }
841
842 int32_t value;
843 status = pHwCamera->getParameter(id, value);
844
845 values.resize(1);
846 values[0] = value;
847 _hidl_cb(status, values);
848
849 return Void();
850 }
851
852
setExtendedInfo_1_1(uint32_t opaqueIdentifier,const hidl_vec<uint8_t> & opaqueValue)853 Return<EvsResult> VirtualCamera::setExtendedInfo_1_1(uint32_t opaqueIdentifier,
854 const hidl_vec<uint8_t>& opaqueValue) {
855 hardware::hidl_vec<int32_t> values;
856 if (mHalCamera.size() > 1) {
857 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
858 return EvsResult::INVALID_ARG;
859 }
860
861 auto pHwCamera = mHalCamera.begin()->second.promote();
862 if (pHwCamera == nullptr) {
863 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
864 return EvsResult::INVALID_ARG;
865 } else {
866 auto hwCamera = IEvsCamera_1_1::castFrom(pHwCamera->getHwCamera()).withDefault(nullptr);
867 if (hwCamera != nullptr) {
868 return hwCamera->setExtendedInfo_1_1(opaqueIdentifier, opaqueValue);
869 } else {
870 LOG(ERROR) << "Underlying hardware camera does not implement v1.1 interfaces.";
871 return EvsResult::INVALID_ARG;
872 }
873 }
874 }
875
876
getExtendedInfo_1_1(uint32_t opaqueIdentifier,getExtendedInfo_1_1_cb _hidl_cb)877 Return<void> VirtualCamera::getExtendedInfo_1_1(uint32_t opaqueIdentifier,
878 getExtendedInfo_1_1_cb _hidl_cb) {
879 hardware::hidl_vec<uint8_t> values;
880 EvsResult status = EvsResult::INVALID_ARG;
881 if (mHalCamera.size() > 1) {
882 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
883 _hidl_cb(status, values);
884 return Void();
885 }
886
887 auto pHwCamera = mHalCamera.begin()->second.promote();
888 if (pHwCamera == nullptr) {
889 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
890 _hidl_cb(status, values);
891 } else {
892 auto hwCamera = IEvsCamera_1_1::castFrom(pHwCamera->getHwCamera()).withDefault(nullptr);
893 if (hwCamera != nullptr) {
894 hwCamera->getExtendedInfo_1_1(opaqueIdentifier, _hidl_cb);
895 } else {
896 LOG(ERROR) << "Underlying hardware camera does not implement v1.1 interfaces.";
897 _hidl_cb(status, values);
898 }
899 }
900
901 return Void();
902 }
903
904
905 Return<void>
importExternalBuffers(const hidl_vec<BufferDesc_1_1> & buffers,importExternalBuffers_cb _hidl_cb)906 VirtualCamera::importExternalBuffers(const hidl_vec<BufferDesc_1_1>& buffers,
907 importExternalBuffers_cb _hidl_cb) {
908 if (mHalCamera.size() > 1) {
909 LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
910 _hidl_cb(EvsResult::UNDERLYING_SERVICE_ERROR, 0);
911 return {};
912 }
913
914 auto pHwCamera = mHalCamera.begin()->second.promote();
915 if (pHwCamera == nullptr) {
916 LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
917 _hidl_cb(EvsResult::UNDERLYING_SERVICE_ERROR, 0);
918 return {};
919 }
920
921 int delta = 0;
922 if (!pHwCamera->changeFramesInFlight(buffers, &delta)) {
923 LOG(ERROR) << "Failed to add extenral capture buffers.";
924 _hidl_cb(EvsResult::UNDERLYING_SERVICE_ERROR, 0);
925 return {};
926 }
927
928 mFramesAllowed += delta;
929 _hidl_cb(EvsResult::OK, delta);
930 return {};
931 }
932
933
toString(const char * indent) const934 std::string VirtualCamera::toString(const char* indent) const {
935 std::string buffer;
936 StringAppendF(&buffer, "%sLogical camera device: %s\n"
937 "%sFramesAllowed: %u\n"
938 "%sFrames in use:\n",
939 indent, mHalCamera.size() > 1 ? "T" : "F",
940 indent, mFramesAllowed,
941 indent);
942
943 std::string next_indent(indent);
944 next_indent += "\t";
945 for (auto&& [id, queue] : mFramesHeld) {
946 StringAppendF(&buffer, "%s%s: %d\n",
947 next_indent.c_str(),
948 id.c_str(),
949 static_cast<int>(queue.size()));
950 }
951 StringAppendF(&buffer, "%sCurrent stream state: %d\n",
952 indent, mStreamState);
953
954 return buffer;
955 }
956
957
958 } // namespace implementation
959 } // namespace V1_1
960 } // namespace evs
961 } // namespace automotive
962 } // namespace android
963