1 /*
2 * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are
6 * met:
7 * * Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * * Redistributions in binary form must reproduce the above
10 * copyright notice, this list of conditions and the following
11 * disclaimer in the documentation and/or other materials provided
12 * with the distribution.
13 * * Neither the name of The Linux Foundation nor the names of its
14 * contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
18 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
20 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
21 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
24 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
25 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
26 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
27 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30 #define __STDC_FORMAT_MACROS
31
32 #include <ctype.h>
33 #include <time.h>
34 #include <drm/drm_fourcc.h>
35 #include <drm_lib_loader.h>
36 #include <drm_master.h>
37 #include <drm_res_mgr.h>
38 #include <fcntl.h>
39 #include <inttypes.h>
40 #include <linux/fb.h>
41 #include <math.h>
42 #include <stdio.h>
43 #include <string.h>
44 #include <sys/ioctl.h>
45 #include <sys/stat.h>
46 #include <sys/types.h>
47 #include <unistd.h>
48 #include <utils/constants.h>
49 #include <utils/debug.h>
50 #include <utils/formats.h>
51 #include <utils/sys.h>
52 #include <drm/sde_drm.h>
53 #include <private/color_params.h>
54 #include <utils/rect.h>
55 #include <utils/utils.h>
56 #include <utils/fence.h>
57
58 #include <sstream>
59 #include <ctime>
60 #include <algorithm>
61 #include <string>
62 #include <unordered_map>
63 #include <utility>
64 #include <vector>
65 #include <limits>
66
67 #include "hw_device_drm.h"
68 #include "hw_info_interface.h"
69
70 #define __CLASS__ "HWDeviceDRM"
71
72 #ifndef DRM_FORMAT_MOD_QCOM_COMPRESSED
73 #define DRM_FORMAT_MOD_QCOM_COMPRESSED fourcc_mod_code(QCOM, 1)
74 #endif
75 #ifndef DRM_FORMAT_MOD_QCOM_DX
76 #define DRM_FORMAT_MOD_QCOM_DX fourcc_mod_code(QCOM, 0x2)
77 #endif
78 #ifndef DRM_FORMAT_MOD_QCOM_TIGHT
79 #define DRM_FORMAT_MOD_QCOM_TIGHT fourcc_mod_code(QCOM, 0x4)
80 #endif
81
82 using std::string;
83 using std::to_string;
84 using std::fstream;
85 using std::unordered_map;
86 using std::stringstream;
87 using std::ifstream;
88 using std::ofstream;
89 using drm_utils::DRMMaster;
90 using drm_utils::DRMResMgr;
91 using drm_utils::DRMLibLoader;
92 using drm_utils::DRMBuffer;
93 using sde_drm::GetDRMManager;
94 using sde_drm::DestroyDRMManager;
95 using sde_drm::DRMDisplayType;
96 using sde_drm::DRMDisplayToken;
97 using sde_drm::DRMConnectorInfo;
98 using sde_drm::DRMPPFeatureInfo;
99 using sde_drm::DRMRect;
100 using sde_drm::DRMRotation;
101 using sde_drm::DRMBlendType;
102 using sde_drm::DRMSrcConfig;
103 using sde_drm::DRMOps;
104 using sde_drm::DRMTopology;
105 using sde_drm::DRMPowerMode;
106 using sde_drm::DRMSecureMode;
107 using sde_drm::DRMSecurityLevel;
108 using sde_drm::DRMCscType;
109 using sde_drm::DRMMultiRectMode;
110 using sde_drm::DRMSSPPLayoutIndex;
111
112 namespace sdm {
113
114 std::atomic<uint32_t> HWDeviceDRM::hw_dest_scaler_blocks_used_(0);
115
GetPPBlock(const HWToneMapLut & lut_type)116 static PPBlock GetPPBlock(const HWToneMapLut &lut_type) {
117 PPBlock pp_block = kPPBlockMax;
118 switch (lut_type) {
119 case kDma1dIgc:
120 case kDma1dGc:
121 pp_block = kDGM;
122 break;
123 case kVig1dIgc:
124 case kVig3dGamut:
125 pp_block = kVIG;
126 break;
127 default:
128 DLOGE("Unknown PP Block");
129 break;
130 }
131 return pp_block;
132 }
133
GetDRMFormat(LayerBufferFormat format,uint32_t * drm_format,uint64_t * drm_format_modifier)134 static void GetDRMFormat(LayerBufferFormat format, uint32_t *drm_format,
135 uint64_t *drm_format_modifier) {
136 switch (format) {
137 case kFormatRGBA8888:
138 *drm_format = DRM_FORMAT_ABGR8888;
139 break;
140 case kFormatRGBA8888Ubwc:
141 *drm_format = DRM_FORMAT_ABGR8888;
142 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_COMPRESSED;
143 break;
144 case kFormatRGBA5551:
145 *drm_format = DRM_FORMAT_ABGR1555;
146 break;
147 case kFormatRGBA4444:
148 *drm_format = DRM_FORMAT_ABGR4444;
149 break;
150 case kFormatBGRA8888:
151 *drm_format = DRM_FORMAT_ARGB8888;
152 break;
153 case kFormatRGBX8888:
154 *drm_format = DRM_FORMAT_XBGR8888;
155 break;
156 case kFormatRGBX8888Ubwc:
157 *drm_format = DRM_FORMAT_XBGR8888;
158 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_COMPRESSED;
159 break;
160 case kFormatBGRX8888:
161 *drm_format = DRM_FORMAT_XRGB8888;
162 break;
163 case kFormatRGB888:
164 *drm_format = DRM_FORMAT_BGR888;
165 break;
166 case kFormatBGR888:
167 *drm_format = DRM_FORMAT_RGB888;
168 break;
169 case kFormatRGB565:
170 *drm_format = DRM_FORMAT_BGR565;
171 break;
172 case kFormatBGR565:
173 *drm_format = DRM_FORMAT_RGB565;
174 break;
175 case kFormatBGR565Ubwc:
176 *drm_format = DRM_FORMAT_BGR565;
177 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_COMPRESSED;
178 break;
179 case kFormatRGBA1010102:
180 *drm_format = DRM_FORMAT_ABGR2101010;
181 break;
182 case kFormatRGBA1010102Ubwc:
183 *drm_format = DRM_FORMAT_ABGR2101010;
184 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_COMPRESSED;
185 break;
186 case kFormatARGB2101010:
187 *drm_format = DRM_FORMAT_BGRA1010102;
188 break;
189 case kFormatRGBX1010102:
190 *drm_format = DRM_FORMAT_XBGR2101010;
191 break;
192 case kFormatRGBX1010102Ubwc:
193 *drm_format = DRM_FORMAT_XBGR2101010;
194 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_COMPRESSED;
195 break;
196 case kFormatXRGB2101010:
197 *drm_format = DRM_FORMAT_BGRX1010102;
198 break;
199 case kFormatBGRA1010102:
200 *drm_format = DRM_FORMAT_ARGB2101010;
201 break;
202 case kFormatABGR2101010:
203 *drm_format = DRM_FORMAT_RGBA1010102;
204 break;
205 case kFormatBGRX1010102:
206 *drm_format = DRM_FORMAT_XRGB2101010;
207 break;
208 case kFormatXBGR2101010:
209 *drm_format = DRM_FORMAT_RGBX1010102;
210 break;
211 case kFormatYCbCr420SemiPlanar:
212 *drm_format = DRM_FORMAT_NV12;
213 break;
214 case kFormatYCbCr420SemiPlanarVenus:
215 *drm_format = DRM_FORMAT_NV12;
216 break;
217 case kFormatYCbCr420SPVenusUbwc:
218 *drm_format = DRM_FORMAT_NV12;
219 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_COMPRESSED;
220 break;
221 case kFormatYCbCr420SPVenusTile:
222 *drm_format = DRM_FORMAT_NV12;
223 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_TILE;
224 break;
225 case kFormatYCrCb420SemiPlanar:
226 *drm_format = DRM_FORMAT_NV21;
227 break;
228 case kFormatYCrCb420SemiPlanarVenus:
229 *drm_format = DRM_FORMAT_NV21;
230 break;
231 case kFormatYCbCr420P010:
232 case kFormatYCbCr420P010Venus:
233 *drm_format = DRM_FORMAT_NV12;
234 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_DX;
235 break;
236 case kFormatYCbCr420P010Ubwc:
237 *drm_format = DRM_FORMAT_NV12;
238 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_COMPRESSED |
239 DRM_FORMAT_MOD_QCOM_DX;
240 break;
241 case kFormatYCbCr420P010Tile:
242 *drm_format = DRM_FORMAT_NV12;
243 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_TILE |
244 DRM_FORMAT_MOD_QCOM_DX;
245 break;
246 case kFormatYCbCr420TP10Ubwc:
247 *drm_format = DRM_FORMAT_NV12;
248 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_COMPRESSED |
249 DRM_FORMAT_MOD_QCOM_DX | DRM_FORMAT_MOD_QCOM_TIGHT;
250 break;
251 case kFormatYCbCr420TP10Tile:
252 *drm_format = DRM_FORMAT_NV12;
253 *drm_format_modifier = DRM_FORMAT_MOD_QCOM_TILE |
254 DRM_FORMAT_MOD_QCOM_DX | DRM_FORMAT_MOD_QCOM_TIGHT;
255 break;
256 case kFormatYCbCr422H2V1SemiPlanar:
257 *drm_format = DRM_FORMAT_NV16;
258 break;
259 case kFormatYCrCb422H2V1SemiPlanar:
260 *drm_format = DRM_FORMAT_NV61;
261 break;
262 case kFormatYCrCb420PlanarStride16:
263 *drm_format = DRM_FORMAT_YVU420;
264 break;
265 default:
266 DLOGW("Unsupported format %s", GetFormatString(format));
267 }
268 }
269
270 class FrameBufferObject : public LayerBufferObject {
271 public:
FrameBufferObject(uint32_t fb_id,LayerBufferFormat format,uint32_t width,uint32_t height)272 explicit FrameBufferObject(uint32_t fb_id, LayerBufferFormat format,
273 uint32_t width, uint32_t height)
274 :fb_id_(fb_id), format_(format), width_(width), height_(height) {
275 }
276
~FrameBufferObject()277 ~FrameBufferObject() {
278 DRMMaster *master;
279 DRMMaster::GetInstance(&master);
280 int ret = master->RemoveFbId(fb_id_);
281 if (ret < 0) {
282 DLOGE("Removing fb_id %d failed with error %d", fb_id_, errno);
283 }
284 }
GetFbId()285 uint32_t GetFbId() { return fb_id_; }
IsEqual(LayerBufferFormat format,uint32_t width,uint32_t height)286 bool IsEqual(LayerBufferFormat format, uint32_t width, uint32_t height) {
287 return (format == format_ && width == width_ && height == height_);
288 }
289
290 private:
291 uint32_t fb_id_;
292 LayerBufferFormat format_;
293 uint32_t width_;
294 uint32_t height_;
295 };
296
Registry(BufferAllocator * buffer_allocator)297 HWDeviceDRM::Registry::Registry(BufferAllocator *buffer_allocator) :
298 buffer_allocator_(buffer_allocator) {
299 int value = 0;
300 if (Debug::GetProperty(DISABLE_FBID_CACHE, &value) == kErrorNone) {
301 disable_fbid_cache_ = (value == 1);
302 }
303 }
304
Register(HWLayers * hw_layers)305 void HWDeviceDRM::Registry::Register(HWLayers *hw_layers) {
306 HWLayersInfo &hw_layer_info = hw_layers->info;
307 uint32_t hw_layer_count = UINT32(hw_layer_info.hw_layers.size());
308
309 for (uint32_t i = 0; i < hw_layer_count; i++) {
310 Layer &layer = hw_layer_info.hw_layers.at(i);
311 LayerBuffer input_buffer = layer.input_buffer;
312 HWRotatorSession *hw_rotator_session = &hw_layers->config[i].hw_rotator_session;
313 HWRotateInfo *hw_rotate_info = &hw_rotator_session->hw_rotate_info[0];
314 fbid_cache_limit_ = input_buffer.flags.video ? VIDEO_FBID_LIMIT : UI_FBID_LIMIT;
315
316 if (hw_rotator_session->mode == kRotatorOffline && hw_rotate_info->valid) {
317 input_buffer = hw_rotator_session->output_buffer;
318 fbid_cache_limit_ = OFFLINE_ROTATOR_FBID_LIMIT;
319 }
320
321 if (input_buffer.flags.interlace) {
322 input_buffer.width *= 2;
323 input_buffer.height /= 2;
324 }
325 MapBufferToFbId(&layer, input_buffer);
326 }
327 }
328
CreateFbId(const LayerBuffer & buffer,uint32_t * fb_id)329 int HWDeviceDRM::Registry::CreateFbId(const LayerBuffer &buffer, uint32_t *fb_id) {
330 DRMMaster *master = nullptr;
331 DRMMaster::GetInstance(&master);
332 int ret = -1;
333
334 if (!master) {
335 DLOGE("Failed to acquire DRM Master instance");
336 return ret;
337 }
338
339 DRMBuffer layout{};
340 AllocatedBufferInfo buf_info{};
341 buf_info.fd = layout.fd = buffer.planes[0].fd;
342 buf_info.aligned_width = layout.width = buffer.width;
343 buf_info.aligned_height = layout.height = buffer.height;
344 buf_info.format = buffer.format;
345 GetDRMFormat(buf_info.format, &layout.drm_format, &layout.drm_format_modifier);
346 buffer_allocator_->GetBufferLayout(buf_info, layout.stride, layout.offset, &layout.num_planes);
347 ret = master->CreateFbId(layout, fb_id);
348 if (ret < 0) {
349 DLOGE("CreateFbId failed. width %d, height %d, format: %s, stride %u, error %d",
350 layout.width, layout.height, GetFormatString(buf_info.format), layout.stride[0], errno);
351 }
352
353 return ret;
354 }
355
MapBufferToFbId(Layer * layer,const LayerBuffer & buffer)356 void HWDeviceDRM::Registry::MapBufferToFbId(Layer* layer, const LayerBuffer &buffer) {
357 if (buffer.planes[0].fd < 0) {
358 return;
359 }
360
361 uint64_t handle_id = buffer.handle_id;
362 if (!handle_id || disable_fbid_cache_) {
363 // In legacy path, clear fb_id map in each frame.
364 layer->buffer_map->buffer_map.clear();
365 } else {
366 auto it = layer->buffer_map->buffer_map.find(handle_id);
367 if (it != layer->buffer_map->buffer_map.end()) {
368 FrameBufferObject *fb_obj = static_cast<FrameBufferObject*>(it->second.get());
369 if (fb_obj->IsEqual(buffer.format, buffer.width, buffer.height)) {
370 // Found fb_id for given handle_id key
371 return;
372 } else {
373 // Erase from fb_id map if format or size have been modified
374 layer->buffer_map->buffer_map.erase(it);
375 }
376 }
377
378 if (layer->buffer_map->buffer_map.size() >= fbid_cache_limit_) {
379 // Clear fb_id map, if the size reaches cache limit.
380 layer->buffer_map->buffer_map.clear();
381 }
382 }
383
384 uint32_t fb_id = 0;
385 if (CreateFbId(buffer, &fb_id) >= 0) {
386 // Create and cache the fb_id in map
387 layer->buffer_map->buffer_map[handle_id] = std::make_shared<FrameBufferObject>(fb_id,
388 buffer.format, buffer.width, buffer.height);
389 }
390 }
391
MapOutputBufferToFbId(LayerBuffer * output_buffer)392 void HWDeviceDRM::Registry::MapOutputBufferToFbId(LayerBuffer *output_buffer) {
393 if (output_buffer->planes[0].fd < 0) {
394 return;
395 }
396
397 uint64_t handle_id = output_buffer->handle_id;
398 if (!handle_id || disable_fbid_cache_) {
399 // In legacy path, clear output buffer map in each frame.
400 output_buffer_map_.clear();
401 } else {
402 auto it = output_buffer_map_.find(handle_id);
403 if (it != output_buffer_map_.end()) {
404 FrameBufferObject *fb_obj = static_cast<FrameBufferObject*>(it->second.get());
405 if (fb_obj->IsEqual(output_buffer->format, output_buffer->width, output_buffer->height)) {
406 return;
407 } else {
408 output_buffer_map_.erase(it);
409 }
410 }
411
412 if (output_buffer_map_.size() >= UI_FBID_LIMIT) {
413 // Clear output buffer map, if the size reaches cache limit.
414 output_buffer_map_.clear();
415 }
416 }
417
418 uint32_t fb_id = 0;
419 if (CreateFbId(*output_buffer, &fb_id) >= 0) {
420 output_buffer_map_[handle_id] = std::make_shared<FrameBufferObject>(fb_id,
421 output_buffer->format, output_buffer->width, output_buffer->height);
422 }
423 }
424
Clear()425 void HWDeviceDRM::Registry::Clear() {
426 output_buffer_map_.clear();
427 }
428
GetFbId(Layer * layer,uint64_t handle_id)429 uint32_t HWDeviceDRM::Registry::GetFbId(Layer *layer, uint64_t handle_id) {
430 auto it = layer->buffer_map->buffer_map.find(handle_id);
431 if (it != layer->buffer_map->buffer_map.end()) {
432 FrameBufferObject *fb_obj = static_cast<FrameBufferObject*>(it->second.get());
433 return fb_obj->GetFbId();
434 }
435
436 return 0;
437 }
438
GetOutputFbId(uint64_t handle_id)439 uint32_t HWDeviceDRM::Registry::GetOutputFbId(uint64_t handle_id) {
440 auto it = output_buffer_map_.find(handle_id);
441 if (it != output_buffer_map_.end()) {
442 FrameBufferObject *fb_obj = static_cast<FrameBufferObject*>(it->second.get());
443 return fb_obj->GetFbId();
444 }
445
446 return 0;
447 }
448
HWDeviceDRM(BufferAllocator * buffer_allocator,HWInfoInterface * hw_info_intf)449 HWDeviceDRM::HWDeviceDRM(BufferAllocator *buffer_allocator, HWInfoInterface *hw_info_intf)
450 : hw_info_intf_(hw_info_intf), registry_(buffer_allocator) {
451 hw_info_intf_ = hw_info_intf;
452 }
453
Init()454 DisplayError HWDeviceDRM::Init() {
455 int ret = 0;
456 DRMMaster *drm_master = {};
457 DRMMaster::GetInstance(&drm_master);
458 drm_master->GetHandle(&dev_fd_);
459 DRMLibLoader::GetInstance()->FuncGetDRMManager()(dev_fd_, &drm_mgr_intf_);
460
461 if (-1 == display_id_) {
462 if (drm_mgr_intf_->RegisterDisplay(disp_type_, &token_)) {
463 DLOGE("RegisterDisplay (by type) failed for %s", device_name_);
464 return kErrorResources;
465 }
466 } else if (drm_mgr_intf_->RegisterDisplay(display_id_, &token_)) {
467 DLOGE("RegisterDisplay (by id) failed for %s - %d", device_name_, display_id_);
468 return kErrorResources;
469 }
470
471 if (token_.conn_id > INT32_MAX) {
472 DLOGE("Connector id %u beyond supported range", token_.conn_id);
473 drm_mgr_intf_->UnregisterDisplay(&token_);
474 return kErrorNotSupported;
475 }
476
477 display_id_ = static_cast<int32_t>(token_.conn_id);
478
479 ret = drm_mgr_intf_->CreateAtomicReq(token_, &drm_atomic_intf_);
480 if (ret) {
481 DLOGE("Failed creating atomic request for connector id %u. Error: %d.", token_.conn_id, ret);
482 drm_mgr_intf_->UnregisterDisplay(&token_);
483 return kErrorResources;
484 }
485
486 ret = drm_mgr_intf_->GetConnectorInfo(token_.conn_id, &connector_info_);
487 if (ret) {
488 DLOGE("Failed getting info for connector id %u. Error: %d.", token_.conn_id, ret);
489 drm_mgr_intf_->DestroyAtomicReq(drm_atomic_intf_);
490 drm_atomic_intf_ = {};
491 drm_mgr_intf_->UnregisterDisplay(&token_);
492 return kErrorHardware;
493 }
494
495 if (!connector_info_.is_connected || connector_info_.modes.empty()) {
496 DLOGW("Device removal detected on connector id %u. Connector status %s and %zu modes.",
497 token_.conn_id, connector_info_.is_connected ? "connected":"disconnected",
498 connector_info_.modes.size());
499 drm_mgr_intf_->DestroyAtomicReq(drm_atomic_intf_);
500 drm_atomic_intf_ = {};
501 drm_mgr_intf_->UnregisterDisplay(&token_);
502 return kErrorDeviceRemoved;
503 }
504
505 hw_info_intf_->GetHWResourceInfo(&hw_resource_);
506
507 InitializeConfigs();
508 PopulateHWPanelInfo();
509 UpdateMixerAttributes();
510
511 // TODO(user): In future, remove has_qseed3 member, add version and pass version to constructor
512 if (hw_resource_.has_qseed3) {
513 hw_scale_ = new HWScaleDRM(HWScaleDRM::Version::V2);
514 }
515
516 std::unique_ptr<HWColorManagerDrm> hw_color_mgr(new HWColorManagerDrm());
517 hw_color_mgr_ = std::move(hw_color_mgr);
518
519 return kErrorNone;
520 }
521
Deinit()522 DisplayError HWDeviceDRM::Deinit() {
523 DisplayError err = kErrorNone;
524 // Power-on will set the CRTC_SET_MODE to valid on external display. Without first commit,
525 // if external is disconnected, CRTC_SET_MODE is not set to NULL, this leads to a synchronization
526 // issue and external is blank for sometime. So on successful power-on (i.e NullCommit),
527 // set CRTC_SET_MODE to NULL for proper sync.
528 if (!first_cycle_ || null_display_commit_) {
529 // A null-commit is needed only if the first commit had gone through. e.g., If a pluggable
530 // display is plugged in and plugged out immediately, HWDeviceDRM::Deinit() may be called
531 // before any commit happened on the device. The driver may have removed any not-in-use
532 // connector (i.e., any connector which did not have a display commit on it and a crtc path
533 // setup), so token_.conn_id may have been removed if there was no commit, resulting in
534 // drmModeAtomicCommit() failure with ENOENT, 'No such file or directory'.
535 ClearSolidfillStages();
536 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_CRTC, token_.conn_id, 0);
537 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_POWER_MODE, token_.conn_id, DRMPowerMode::OFF);
538 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_MODE, token_.crtc_id, nullptr);
539 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ACTIVE, token_.crtc_id, 0);
540 int ret = NullCommit(true /* synchronous */, false /* retain_planes */);
541 if (ret) {
542 DLOGE("Commit failed with error: %d", ret);
543 err = kErrorHardware;
544 }
545 }
546 delete hw_scale_;
547 registry_.Clear();
548 display_attributes_ = {};
549 drm_mgr_intf_->DestroyAtomicReq(drm_atomic_intf_);
550 drm_atomic_intf_ = {};
551 drm_mgr_intf_->UnregisterDisplay(&token_);
552 hw_dest_scaler_blocks_used_ -= dest_scaler_blocks_used_;
553 return err;
554 }
555
GetDisplayId(int32_t * display_id)556 DisplayError HWDeviceDRM::GetDisplayId(int32_t *display_id) {
557 *display_id = display_id_;
558 return kErrorNone;
559 }
560
InitializeConfigs()561 void HWDeviceDRM::InitializeConfigs() {
562 current_mode_index_ = 0;
563 // Update current mode with preferred mode
564 for (uint32_t mode_index = 0; mode_index < connector_info_.modes.size(); mode_index++) {
565 if (connector_info_.modes[mode_index].mode.type & DRM_MODE_TYPE_PREFERRED) {
566 DLOGI("Updating current display mode %d to preferred mode %d.", current_mode_index_,
567 mode_index);
568 current_mode_index_ = mode_index;
569 break;
570 }
571 }
572
573 display_attributes_.resize(connector_info_.modes.size());
574
575 uint32_t width = connector_info_.modes[current_mode_index_].mode.hdisplay;
576 uint32_t height = connector_info_.modes[current_mode_index_].mode.vdisplay;
577 for (uint32_t i = 0; i < connector_info_.modes.size(); i++) {
578 auto &mode = connector_info_.modes[i].mode;
579 if (mode.hdisplay != width || mode.vdisplay != height) {
580 resolution_switch_enabled_ = true;
581 }
582 PopulateDisplayAttributes(i);
583 }
584 SetDisplaySwitchMode(current_mode_index_);
585 }
586
PopulateDisplayAttributes(uint32_t index)587 DisplayError HWDeviceDRM::PopulateDisplayAttributes(uint32_t index) {
588 drmModeModeInfo mode = {};
589 uint32_t mm_width = 0;
590 uint32_t mm_height = 0;
591 DRMTopology topology = DRMTopology::SINGLE_LM;
592
593 if (default_mode_) {
594 DRMResMgr *res_mgr = nullptr;
595 int ret = DRMResMgr::GetInstance(&res_mgr);
596 if (ret < 0) {
597 DLOGE("Failed to acquire DRMResMgr instance");
598 return kErrorResources;
599 }
600
601 res_mgr->GetMode(&mode);
602 res_mgr->GetDisplayDimInMM(&mm_width, &mm_height);
603 } else {
604 mode = connector_info_.modes[index].mode;
605 mm_width = connector_info_.mmWidth;
606 mm_height = connector_info_.mmHeight;
607 topology = connector_info_.modes[index].topology;
608 if (mode.flags & DRM_MODE_FLAG_CMD_MODE_PANEL) {
609 display_attributes_[index].smart_panel = true;
610 }
611 }
612
613 display_attributes_[index].x_pixels = mode.hdisplay;
614 display_attributes_[index].y_pixels = mode.vdisplay;
615 display_attributes_[index].fps = mode.vrefresh;
616 display_attributes_[index].vsync_period_ns =
617 UINT32(1000000000L / display_attributes_[index].fps);
618
619 /*
620 Active Front Sync Back
621 Region Porch Porch
622 <-----------------------><----------------><-------------><-------------->
623 <----- [hv]display ----->
624 <------------- [hv]sync_start ------------>
625 <--------------------- [hv]sync_end --------------------->
626 <-------------------------------- [hv]total ----------------------------->
627 */
628
629 display_attributes_[index].v_front_porch = mode.vsync_start - mode.vdisplay;
630 display_attributes_[index].v_pulse_width = mode.vsync_end - mode.vsync_start;
631 display_attributes_[index].v_back_porch = mode.vtotal - mode.vsync_end;
632 display_attributes_[index].v_total = mode.vtotal;
633 display_attributes_[index].h_total = mode.htotal;
634 display_attributes_[index].is_device_split =
635 (topology == DRMTopology::DUAL_LM || topology == DRMTopology::DUAL_LM_MERGE ||
636 topology == DRMTopology::DUAL_LM_MERGE_DSC || topology == DRMTopology::DUAL_LM_DSC ||
637 topology == DRMTopology::DUAL_LM_DSCMERGE || topology == DRMTopology::QUAD_LM_MERGE ||
638 topology == DRMTopology::QUAD_LM_DSCMERGE || topology == DRMTopology::QUAD_LM_MERGE_DSC);
639 display_attributes_[index].clock_khz = mode.clock;
640
641 // If driver doesn't return panel width/height information, default to 320 dpi
642 if (INT(mm_width) <= 0 || INT(mm_height) <= 0) {
643 mm_width = UINT32(((FLOAT(mode.hdisplay) * 25.4f) / 320.0f) + 0.5f);
644 mm_height = UINT32(((FLOAT(mode.vdisplay) * 25.4f) / 320.0f) + 0.5f);
645 DLOGW("Driver doesn't report panel physical width and height - defaulting to 320dpi");
646 }
647
648 display_attributes_[index].x_dpi = (FLOAT(mode.hdisplay) * 25.4f) / FLOAT(mm_width);
649 display_attributes_[index].y_dpi = (FLOAT(mode.vdisplay) * 25.4f) / FLOAT(mm_height);
650 SetTopology(topology, &display_attributes_[index].topology);
651
652 DLOGI("Display attributes[%d]: WxH: %dx%d, DPI: %fx%f, FPS: %d, LM_SPLIT: %d, V_BACK_PORCH: %d," \
653 " V_FRONT_PORCH: %d, V_PULSE_WIDTH: %d, V_TOTAL: %d, H_TOTAL: %d, CLK: %dKHZ," \
654 " TOPOLOGY: %d, HW_SPLIT: %d", index, display_attributes_[index].x_pixels,
655 display_attributes_[index].y_pixels, display_attributes_[index].x_dpi,
656 display_attributes_[index].y_dpi, display_attributes_[index].fps,
657 display_attributes_[index].is_device_split, display_attributes_[index].v_back_porch,
658 display_attributes_[index].v_front_porch, display_attributes_[index].v_pulse_width,
659 display_attributes_[index].v_total, display_attributes_[index].h_total,
660 display_attributes_[index].clock_khz, display_attributes_[index].topology,
661 mixer_attributes_.split_type);
662
663 return kErrorNone;
664 }
665
PopulateHWPanelInfo()666 void HWDeviceDRM::PopulateHWPanelInfo() {
667 hw_panel_info_ = {};
668
669 snprintf(hw_panel_info_.panel_name, sizeof(hw_panel_info_.panel_name), "%s",
670 connector_info_.panel_name.c_str());
671
672 uint32_t index = current_mode_index_;
673 hw_panel_info_.split_info.left_split = display_attributes_[index].x_pixels;
674 if (display_attributes_[index].is_device_split) {
675 hw_panel_info_.split_info.left_split = hw_panel_info_.split_info.right_split =
676 display_attributes_[index].x_pixels / 2;
677 }
678
679 hw_panel_info_.partial_update = connector_info_.modes[index].num_roi;
680 hw_panel_info_.left_roi_count = UINT32(connector_info_.modes[index].num_roi);
681 hw_panel_info_.right_roi_count = UINT32(connector_info_.modes[index].num_roi);
682 hw_panel_info_.left_align = connector_info_.modes[index].xstart;
683 hw_panel_info_.top_align = connector_info_.modes[index].ystart;
684 hw_panel_info_.width_align = connector_info_.modes[index].walign;
685 hw_panel_info_.height_align = connector_info_.modes[index].halign;
686 hw_panel_info_.min_roi_width = connector_info_.modes[index].wmin;
687 hw_panel_info_.min_roi_height = connector_info_.modes[index].hmin;
688 hw_panel_info_.needs_roi_merge = connector_info_.modes[index].roi_merge;
689 hw_panel_info_.transfer_time_us = connector_info_.modes[index].transfer_time_us;
690 hw_panel_info_.dynamic_fps = connector_info_.dynamic_fps;
691 hw_panel_info_.qsync_support = connector_info_.qsync_support;
692 drmModeModeInfo current_mode = connector_info_.modes[current_mode_index_].mode;
693 if (hw_panel_info_.dynamic_fps) {
694 uint32_t min_fps = current_mode.vrefresh;
695 uint32_t max_fps = current_mode.vrefresh;
696 for (uint32_t mode_index = 0; mode_index < connector_info_.modes.size(); mode_index++) {
697 if ((current_mode.vdisplay == connector_info_.modes[mode_index].mode.vdisplay) &&
698 (current_mode.hdisplay == connector_info_.modes[mode_index].mode.hdisplay)) {
699 if (min_fps > connector_info_.modes[mode_index].mode.vrefresh) {
700 min_fps = connector_info_.modes[mode_index].mode.vrefresh;
701 }
702 if (max_fps < connector_info_.modes[mode_index].mode.vrefresh) {
703 max_fps = connector_info_.modes[mode_index].mode.vrefresh;
704 }
705 }
706 }
707 hw_panel_info_.min_fps = min_fps;
708 hw_panel_info_.max_fps = max_fps;
709 } else {
710 hw_panel_info_.min_fps = current_mode.vrefresh;
711 hw_panel_info_.max_fps = current_mode.vrefresh;
712 }
713
714 hw_panel_info_.is_primary_panel = connector_info_.is_primary;
715 hw_panel_info_.is_pluggable = 0;
716 hw_panel_info_.hdr_enabled = connector_info_.panel_hdr_prop.hdr_enabled;
717 // Convert the luminance values to cd/m^2 units.
718 hw_panel_info_.peak_luminance = FLOAT(connector_info_.panel_hdr_prop.peak_brightness) / 10000.0f;
719 hw_panel_info_.blackness_level = FLOAT(connector_info_.panel_hdr_prop.blackness_level) / 10000.0f;
720 hw_panel_info_.average_luminance = FLOAT(connector_info_.panel_hdr_prop.peak_brightness +
721 connector_info_.panel_hdr_prop.blackness_level) /
722 (2 * 10000.0f);
723 hw_panel_info_.primaries.white_point[0] = connector_info_.panel_hdr_prop.display_primaries[0];
724 hw_panel_info_.primaries.white_point[1] = connector_info_.panel_hdr_prop.display_primaries[1];
725 hw_panel_info_.primaries.red[0] = connector_info_.panel_hdr_prop.display_primaries[2];
726 hw_panel_info_.primaries.red[1] = connector_info_.panel_hdr_prop.display_primaries[3];
727 hw_panel_info_.primaries.green[0] = connector_info_.panel_hdr_prop.display_primaries[4];
728 hw_panel_info_.primaries.green[1] = connector_info_.panel_hdr_prop.display_primaries[5];
729 hw_panel_info_.primaries.blue[0] = connector_info_.panel_hdr_prop.display_primaries[6];
730 hw_panel_info_.primaries.blue[1] = connector_info_.panel_hdr_prop.display_primaries[7];
731 hw_panel_info_.dyn_bitclk_support = connector_info_.dyn_bitclk_support;
732
733 // no supprt for 90 rotation only flips or 180 supported
734 hw_panel_info_.panel_orientation.rotation = 0;
735 hw_panel_info_.panel_orientation.flip_horizontal =
736 (connector_info_.panel_orientation == DRMRotation::FLIP_H) ||
737 (connector_info_.panel_orientation == DRMRotation::ROT_180);
738 hw_panel_info_.panel_orientation.flip_vertical =
739 (connector_info_.panel_orientation == DRMRotation::FLIP_V) ||
740 (connector_info_.panel_orientation == DRMRotation::ROT_180);
741
742 GetHWDisplayPortAndMode();
743 GetHWPanelMaxBrightness();
744
745 if (current_mode.flags & DRM_MODE_FLAG_CMD_MODE_PANEL) {
746 hw_panel_info_.mode = kModeCommand;
747 }
748 if (current_mode.flags & DRM_MODE_FLAG_VID_MODE_PANEL) {
749 hw_panel_info_.mode = kModeVideo;
750 }
751
752 DLOGI_IF(kTagDriverConfig, "%s, Panel Interface = %s, Panel Mode = %s, Is Primary = %d",
753 device_name_, interface_str_.c_str(),
754 hw_panel_info_.mode == kModeVideo ? "Video" : "Command",
755 hw_panel_info_.is_primary_panel);
756 DLOGI_IF(kTagDriverConfig, "Partial Update = %d, Dynamic FPS = %d, HDR Panel = %d QSync = %d",
757 hw_panel_info_.partial_update, hw_panel_info_.dynamic_fps, hw_panel_info_.hdr_enabled,
758 hw_panel_info_.qsync_support);
759 DLOGI_IF(kTagDriverConfig, "Align: left = %d, width = %d, top = %d, height = %d",
760 hw_panel_info_.left_align, hw_panel_info_.width_align, hw_panel_info_.top_align,
761 hw_panel_info_.height_align);
762 DLOGI_IF(kTagDriverConfig, "ROI: min_width = %d, min_height = %d, need_merge = %d",
763 hw_panel_info_.min_roi_width, hw_panel_info_.min_roi_height,
764 hw_panel_info_.needs_roi_merge);
765 DLOGI_IF(kTagDriverConfig, "FPS: min = %d, max = %d", hw_panel_info_.min_fps,
766 hw_panel_info_.max_fps);
767 DLOGI_IF(kTagDriverConfig, "Left Split = %d, Right Split = %d",
768 hw_panel_info_.split_info.left_split, hw_panel_info_.split_info.right_split);
769 DLOGI_IF(kTagDriverConfig, "Panel Transfer time = %d us", hw_panel_info_.transfer_time_us);
770 DLOGI_IF(kTagDriverConfig, "Dynamic Bit Clk Support = %d", hw_panel_info_.dyn_bitclk_support);
771 }
772
GetDisplayIdentificationData(uint8_t * out_port,uint32_t * out_data_size,uint8_t * out_data)773 DisplayError HWDeviceDRM::GetDisplayIdentificationData(uint8_t *out_port, uint32_t *out_data_size,
774 uint8_t *out_data) {
775 *out_port = token_.hw_port;
776 std::vector<uint8_t> &edid = connector_info_.edid;
777
778 if (out_data == nullptr) {
779 *out_data_size = (uint32_t)(edid.size());
780 if (*out_data_size == 0) {
781 DLOGE("EDID blob is empty, no data to return");
782 return kErrorDriverData;
783 }
784 } else {
785 *out_data_size = std::min(*out_data_size, (uint32_t)(edid.size()));
786 memcpy(out_data, edid.data(), *out_data_size);
787 }
788
789 return kErrorNone;
790 }
791
GetHWDisplayPortAndMode()792 void HWDeviceDRM::GetHWDisplayPortAndMode() {
793 hw_panel_info_.port = kPortDefault;
794 hw_panel_info_.mode =
795 (connector_info_.panel_mode == sde_drm::DRMPanelMode::VIDEO) ? kModeVideo : kModeCommand;
796
797 if (default_mode_) {
798 return;
799 }
800
801 switch (connector_info_.type) {
802 case DRM_MODE_CONNECTOR_DSI:
803 hw_panel_info_.port = kPortDSI;
804 interface_str_ = "DSI";
805 break;
806 case DRM_MODE_CONNECTOR_LVDS:
807 hw_panel_info_.port = kPortLVDS;
808 interface_str_ = "LVDS";
809 break;
810 case DRM_MODE_CONNECTOR_eDP:
811 hw_panel_info_.port = kPortEDP;
812 interface_str_ = "EDP";
813 break;
814 case DRM_MODE_CONNECTOR_TV:
815 case DRM_MODE_CONNECTOR_HDMIA:
816 case DRM_MODE_CONNECTOR_HDMIB:
817 hw_panel_info_.port = kPortDTV;
818 interface_str_ = "HDMI";
819 break;
820 case DRM_MODE_CONNECTOR_VIRTUAL:
821 hw_panel_info_.port = kPortWriteBack;
822 interface_str_ = "Virtual";
823 break;
824 case DRM_MODE_CONNECTOR_DisplayPort:
825 hw_panel_info_.port = kPortDP;
826 interface_str_ = "DisplayPort";
827 break;
828 }
829
830 return;
831 }
832
GetActiveConfig(uint32_t * active_config)833 DisplayError HWDeviceDRM::GetActiveConfig(uint32_t *active_config) {
834 *active_config = current_mode_index_;
835 return kErrorNone;
836 }
837
GetNumDisplayAttributes(uint32_t * count)838 DisplayError HWDeviceDRM::GetNumDisplayAttributes(uint32_t *count) {
839 *count = UINT32(display_attributes_.size());
840 return kErrorNone;
841 }
842
GetDisplayAttributes(uint32_t index,HWDisplayAttributes * display_attributes)843 DisplayError HWDeviceDRM::GetDisplayAttributes(uint32_t index,
844 HWDisplayAttributes *display_attributes) {
845 if (index >= display_attributes_.size()) {
846 return kErrorParameters;
847 }
848 *display_attributes = display_attributes_[index];
849 return kErrorNone;
850 }
851
GetHWPanelInfo(HWPanelInfo * panel_info)852 DisplayError HWDeviceDRM::GetHWPanelInfo(HWPanelInfo *panel_info) {
853 *panel_info = hw_panel_info_;
854 return kErrorNone;
855 }
856
SetDisplaySwitchMode(uint32_t index)857 void HWDeviceDRM::SetDisplaySwitchMode(uint32_t index) {
858 uint32_t mode_flag = 0;
859 uint32_t curr_mode_flag = 0, switch_mode_flag = 0;
860 drmModeModeInfo to_set = connector_info_.modes[index].mode;
861 drmModeModeInfo current_mode = connector_info_.modes[current_mode_index_].mode;
862 uint64_t current_bit_clk = connector_info_.modes[current_mode_index_].bit_clk_rate;
863 uint32_t switch_index = 0;
864
865 if (to_set.flags & DRM_MODE_FLAG_CMD_MODE_PANEL) {
866 mode_flag = DRM_MODE_FLAG_CMD_MODE_PANEL;
867 switch_mode_flag = DRM_MODE_FLAG_VID_MODE_PANEL;
868 } else if (to_set.flags & DRM_MODE_FLAG_VID_MODE_PANEL) {
869 mode_flag = DRM_MODE_FLAG_VID_MODE_PANEL;
870 switch_mode_flag = DRM_MODE_FLAG_CMD_MODE_PANEL;
871 }
872
873 if (current_mode.flags & DRM_MODE_FLAG_CMD_MODE_PANEL) {
874 curr_mode_flag = DRM_MODE_FLAG_CMD_MODE_PANEL;
875 } else if (current_mode.flags & DRM_MODE_FLAG_VID_MODE_PANEL) {
876 curr_mode_flag = DRM_MODE_FLAG_VID_MODE_PANEL;
877 }
878
879 if (curr_mode_flag != mode_flag) {
880 panel_mode_changed_ = mode_flag;
881 }
882
883 for (uint32_t mode_index = 0; mode_index < connector_info_.modes.size(); mode_index++) {
884 if ((to_set.vdisplay == connector_info_.modes[mode_index].mode.vdisplay) &&
885 (to_set.hdisplay == connector_info_.modes[mode_index].mode.hdisplay) &&
886 (to_set.vrefresh == connector_info_.modes[mode_index].mode.vrefresh) &&
887 (current_bit_clk == connector_info_.modes[mode_index].bit_clk_rate) &&
888 (mode_flag & connector_info_.modes[mode_index].mode.flags)) {
889 index = mode_index;
890 break;
891 }
892 }
893
894 current_mode_index_ = index;
895
896 switch_mode_valid_ = false;
897 for (uint32_t mode_index = 0; mode_index < connector_info_.modes.size(); mode_index++) {
898 if ((to_set.vdisplay == connector_info_.modes[mode_index].mode.vdisplay) &&
899 (to_set.hdisplay == connector_info_.modes[mode_index].mode.hdisplay) &&
900 (to_set.vrefresh == connector_info_.modes[mode_index].mode.vrefresh) &&
901 (switch_mode_flag & connector_info_.modes[mode_index].mode.flags)) {
902 switch_index = mode_index;
903 switch_mode_valid_ = true;
904 break;
905 }
906 }
907
908 if (!switch_mode_valid_) {
909 // in case there is no corresponding switch mode with same fps, try for a switch
910 // mode with lowest fps. This is to handle cases where there are multiple video mode fps
911 // but only one command mode for doze like 30 fps.
912 uint32_t refresh_rate = 0;
913 for (uint32_t mode_index = 0; mode_index < connector_info_.modes.size(); mode_index++) {
914 if ((to_set.vdisplay == connector_info_.modes[mode_index].mode.vdisplay) &&
915 (to_set.hdisplay == connector_info_.modes[mode_index].mode.hdisplay) &&
916 (switch_mode_flag & connector_info_.modes[mode_index].mode.flags)) {
917 if (!refresh_rate || (refresh_rate > connector_info_.modes[mode_index].mode.vrefresh)) {
918 switch_index = mode_index;
919 switch_mode_valid_ = true;
920 refresh_rate = connector_info_.modes[mode_index].mode.vrefresh;
921 }
922 }
923 }
924 }
925
926 if (switch_mode_valid_) {
927 if (mode_flag & DRM_MODE_FLAG_VID_MODE_PANEL) {
928 video_mode_index_ = current_mode_index_;
929 cmd_mode_index_ = switch_index;
930 } else {
931 video_mode_index_ = switch_index;
932 cmd_mode_index_ = current_mode_index_;
933 }
934 }
935 }
936
SetDisplayAttributes(uint32_t index)937 DisplayError HWDeviceDRM::SetDisplayAttributes(uint32_t index) {
938 if (index >= display_attributes_.size()) {
939 DLOGE("Invalid mode index %d mode size %d", index, UINT32(display_attributes_.size()));
940 return kErrorParameters;
941 }
942
943 SetDisplaySwitchMode(index);
944 PopulateHWPanelInfo();
945 UpdateMixerAttributes();
946
947 DLOGI_IF(kTagDriverConfig,
948 "Display attributes[%d]: WxH: %dx%d, DPI: %fx%f, FPS: %d, LM_SPLIT: %d, V_BACK_PORCH: %d," \
949 " V_FRONT_PORCH: %d, V_PULSE_WIDTH: %d, V_TOTAL: %d, H_TOTAL: %d, CLK: %dKHZ, " \
950 "TOPOLOGY: %d, PanelMode %s", index, display_attributes_[index].x_pixels,
951 display_attributes_[index].y_pixels, display_attributes_[index].x_dpi,
952 display_attributes_[index].y_dpi, display_attributes_[index].fps,
953 display_attributes_[index].is_device_split, display_attributes_[index].v_back_porch,
954 display_attributes_[index].v_front_porch, display_attributes_[index].v_pulse_width,
955 display_attributes_[index].v_total, display_attributes_[index].h_total,
956 display_attributes_[index].clock_khz, display_attributes_[index].topology,
957 (connector_info_.modes[index].mode.flags & DRM_MODE_FLAG_VID_MODE_PANEL) ?
958 "Video" : "Command");
959
960 return kErrorNone;
961 }
962
SetDisplayAttributes(const HWDisplayAttributes & display_attributes)963 DisplayError HWDeviceDRM::SetDisplayAttributes(const HWDisplayAttributes &display_attributes) {
964 return kErrorNotSupported;
965 }
966
GetConfigIndex(char * mode,uint32_t * index)967 DisplayError HWDeviceDRM::GetConfigIndex(char *mode, uint32_t *index) {
968 return kErrorNone;
969 }
970
PowerOn(const HWQosData & qos_data,shared_ptr<Fence> * release_fence)971 DisplayError HWDeviceDRM::PowerOn(const HWQosData &qos_data, shared_ptr<Fence> *release_fence) {
972 SetQOSData(qos_data);
973
974 int64_t release_fence_fd = -1;
975 int64_t retire_fence_fd = -1;
976
977 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ACTIVE, token_.crtc_id, 1);
978 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_POWER_MODE, token_.conn_id, DRMPowerMode::ON);
979 if (release_fence) {
980 drm_atomic_intf_->Perform(DRMOps::CRTC_GET_RELEASE_FENCE, token_.crtc_id, &release_fence_fd);
981 }
982 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_GET_RETIRE_FENCE, token_.conn_id, &retire_fence_fd);
983
984 int ret = NullCommit(false /* asynchronous */, true /* retain_planes */);
985 if (ret) {
986 DLOGE("Failed with error: %d", ret);
987 return kErrorHardware;
988 }
989
990 shared_ptr<Fence> retire_fence = Fence::Create(INT(retire_fence_fd), "retire_power_on");
991
992 if (release_fence) {
993 *release_fence = Fence::Create(INT(release_fence_fd), "release_power_on");
994 DLOGD_IF(kTagDriverConfig, "RELEASE fence: fd: %s", Fence::GetStr(*release_fence).c_str());
995 }
996 pending_doze_ = false;
997
998 Fence::Wait(retire_fence, kTimeoutMsPowerOn);
999
1000 last_power_mode_ = DRMPowerMode::ON;
1001
1002 return kErrorNone;
1003 }
1004
PowerOff(bool teardown)1005 DisplayError HWDeviceDRM::PowerOff(bool teardown) {
1006 DTRACE_SCOPED();
1007 if (!drm_atomic_intf_) {
1008 DLOGE("DRM Atomic Interface is null!");
1009 return kErrorUndefined;
1010 }
1011
1012 if (first_cycle_) {
1013 return kErrorNone;
1014 }
1015
1016 ResetROI();
1017 int64_t retire_fence_fd = -1;
1018 drmModeModeInfo current_mode = connector_info_.modes[current_mode_index_].mode;
1019 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_MODE, token_.crtc_id, ¤t_mode);
1020 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_POWER_MODE, token_.conn_id, DRMPowerMode::OFF);
1021 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ACTIVE, token_.crtc_id, 0);
1022 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_GET_RETIRE_FENCE, token_.conn_id, &retire_fence_fd);
1023
1024 int ret = NullCommit(false /* asynchronous */, false /* retain_planes */);
1025 if (ret) {
1026 DLOGE("Failed with error: %d", ret);
1027 return kErrorHardware;
1028 }
1029
1030 shared_ptr<Fence> retire_fence = Fence::Create(INT(retire_fence_fd), "retire_power_off");
1031 pending_doze_ = false;
1032
1033 Fence::Wait(retire_fence, kTimeoutMsPowerOff);
1034
1035 last_power_mode_ = DRMPowerMode::OFF;
1036
1037 return kErrorNone;
1038 }
1039
Doze(const HWQosData & qos_data,shared_ptr<Fence> * release_fence)1040 DisplayError HWDeviceDRM::Doze(const HWQosData &qos_data, shared_ptr<Fence> *release_fence) {
1041 DTRACE_SCOPED();
1042
1043 if (first_cycle_ || ((!switch_mode_valid_) && (last_power_mode_ != DRMPowerMode::OFF))) {
1044 pending_doze_ = true;
1045 return kErrorDeferred;
1046 }
1047
1048 SetQOSData(qos_data);
1049
1050 int64_t release_fence_fd = -1;
1051 int64_t retire_fence_fd = -1;
1052
1053 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_CRTC, token_.conn_id, token_.crtc_id);
1054 drmModeModeInfo current_mode = connector_info_.modes[current_mode_index_].mode;
1055 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_MODE, token_.crtc_id, ¤t_mode);
1056
1057 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ACTIVE, token_.crtc_id, 1);
1058 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_POWER_MODE, token_.conn_id, DRMPowerMode::DOZE);
1059 if (release_fence) {
1060 drm_atomic_intf_->Perform(DRMOps::CRTC_GET_RELEASE_FENCE, token_.crtc_id, &release_fence_fd);
1061 }
1062 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_GET_RETIRE_FENCE, token_.conn_id, &retire_fence_fd);
1063
1064 int ret = NullCommit(false /* asynchronous */, true /* retain_planes */);
1065 if (ret) {
1066 DLOGE("Failed with error: %d", ret);
1067 return kErrorHardware;
1068 }
1069
1070 shared_ptr<Fence> retire_fence = Fence::Create(INT(retire_fence_fd), "retire_doze");
1071
1072 if (release_fence) {
1073 *release_fence = Fence::Create(release_fence_fd, "release_doze");
1074 DLOGD_IF(kTagDriverConfig, "RELEASE fence: fd: %s", Fence::GetStr(*release_fence).c_str());
1075 }
1076
1077 Fence::Wait(retire_fence, kTimeoutMsDoze);
1078
1079 last_power_mode_ = DRMPowerMode::DOZE;
1080
1081 return kErrorNone;
1082 }
1083
DozeSuspend(const HWQosData & qos_data,shared_ptr<Fence> * release_fence)1084 DisplayError HWDeviceDRM::DozeSuspend(const HWQosData &qos_data,
1085 shared_ptr<Fence> *release_fence) {
1086 DTRACE_SCOPED();
1087
1088 SetQOSData(qos_data);
1089
1090 int64_t release_fence_fd = -1;
1091 int64_t retire_fence_fd = -1;
1092
1093 if (first_cycle_) {
1094 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_CRTC, token_.conn_id, token_.crtc_id);
1095 drmModeModeInfo current_mode = connector_info_.modes[current_mode_index_].mode;
1096 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_MODE, token_.crtc_id, ¤t_mode);
1097 }
1098 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ACTIVE, token_.crtc_id, 1);
1099 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_POWER_MODE, token_.conn_id,
1100 DRMPowerMode::DOZE_SUSPEND);
1101 if (release_fence) {
1102 drm_atomic_intf_->Perform(DRMOps::CRTC_GET_RELEASE_FENCE, token_.crtc_id, &release_fence_fd);
1103 }
1104 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_GET_RETIRE_FENCE, token_.conn_id, &retire_fence_fd);
1105
1106 int ret = NullCommit(false /* asynchronous */, true /* retain_planes */);
1107 if (ret) {
1108 DLOGE("Failed with error: %d", ret);
1109 return kErrorHardware;
1110 }
1111
1112 shared_ptr<Fence> retire_fence = Fence::Create(INT(retire_fence_fd), "retire_doze_suspend");
1113
1114 if (release_fence) {
1115 *release_fence = Fence::Create(release_fence_fd, "release_doze_suspend");
1116 DLOGD_IF(kTagDriverConfig, "RELEASE fence: fd: %s", Fence::GetStr(*release_fence).c_str());
1117 }
1118 pending_doze_ = false;
1119
1120 Fence::Wait(retire_fence, kTimeoutMsDozeSuspend);
1121
1122 last_power_mode_ = DRMPowerMode::DOZE_SUSPEND;
1123
1124 return kErrorNone;
1125 }
1126
SetQOSData(const HWQosData & qos_data)1127 void HWDeviceDRM::SetQOSData(const HWQosData &qos_data) {
1128 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_CORE_CLK, token_.crtc_id, qos_data.clock_hz);
1129 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_CORE_AB, token_.crtc_id, qos_data.core_ab_bps);
1130 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_CORE_IB, token_.crtc_id, qos_data.core_ib_bps);
1131 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_LLCC_AB, token_.crtc_id, qos_data.llcc_ab_bps);
1132 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_LLCC_IB, token_.crtc_id, qos_data.llcc_ib_bps);
1133 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_DRAM_AB, token_.crtc_id, qos_data.dram_ab_bps);
1134 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_DRAM_IB, token_.crtc_id, qos_data.dram_ib_bps);
1135 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ROT_PREFILL_BW, token_.crtc_id,
1136 qos_data.rot_prefill_bw_bps);
1137 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ROT_CLK, token_.crtc_id, qos_data.rot_clock_hz);
1138 }
1139
Standby()1140 DisplayError HWDeviceDRM::Standby() {
1141 return kErrorNone;
1142 }
1143
SetupAtomic(Fence::ScopedRef & scoped_ref,HWLayers * hw_layers,bool validate,int64_t * release_fence_fd,int64_t * retire_fence_fd)1144 void HWDeviceDRM::SetupAtomic(Fence::ScopedRef &scoped_ref, HWLayers *hw_layers, bool validate,
1145 int64_t *release_fence_fd, int64_t *retire_fence_fd) {
1146 if (default_mode_) {
1147 return;
1148 }
1149
1150 DTRACE_SCOPED();
1151 HWLayersInfo &hw_layer_info = hw_layers->info;
1152 uint32_t hw_layer_count = UINT32(hw_layer_info.hw_layers.size());
1153 HWQosData &qos_data = hw_layers->qos_data;
1154 DRMSecurityLevel crtc_security_level = DRMSecurityLevel::SECURE_NON_SECURE;
1155 uint32_t index = current_mode_index_;
1156 drmModeModeInfo current_mode = connector_info_.modes[index].mode;
1157 uint64_t current_bit_clk = connector_info_.modes[index].bit_clk_rate;
1158
1159 solid_fills_.clear();
1160 bool resource_update = hw_layers->updates_mask.test(kUpdateResources);
1161 bool buffer_update = hw_layers->updates_mask.test(kSwapBuffers);
1162 bool update_config = resource_update || buffer_update ||
1163 hw_layer_info.stack->flags.geometry_changed;
1164
1165 if (hw_panel_info_.partial_update && update_config) {
1166 if (IsFullFrameUpdate(hw_layer_info)) {
1167 ResetROI();
1168 } else {
1169 const int kNumMaxROIs = 4;
1170 DRMRect crtc_rects[kNumMaxROIs] = {{0, 0, mixer_attributes_.width, mixer_attributes_.height}};
1171 DRMRect conn_rects[kNumMaxROIs] = {{0, 0, display_attributes_[index].x_pixels,
1172 display_attributes_[index].y_pixels}};
1173
1174 for (uint32_t i = 0; i < hw_layer_info.left_frame_roi.size(); i++) {
1175 auto &roi = hw_layer_info.left_frame_roi.at(i);
1176 // TODO(user): In multi PU, stitch ROIs vertically adjacent and upate plane destination
1177 crtc_rects[i].left = UINT32(roi.left);
1178 crtc_rects[i].right = UINT32(roi.right);
1179 crtc_rects[i].top = UINT32(roi.top);
1180 crtc_rects[i].bottom = UINT32(roi.bottom);
1181 conn_rects[i].left = UINT32(roi.left);
1182 conn_rects[i].right = UINT32(roi.right);
1183 conn_rects[i].top = UINT32(roi.top);
1184 conn_rects[i].bottom = UINT32(roi.bottom);
1185 }
1186
1187 uint32_t num_rects = std::max(1u, static_cast<uint32_t>(hw_layer_info.left_frame_roi.size()));
1188 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ROI, token_.crtc_id, num_rects, crtc_rects);
1189 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_ROI, token_.conn_id, num_rects, conn_rects);
1190 }
1191 }
1192
1193 for (uint32_t i = 0; i < hw_layer_count; i++) {
1194 Layer &layer = hw_layer_info.hw_layers.at(i);
1195 LayerBuffer *input_buffer = &layer.input_buffer;
1196 HWPipeInfo *left_pipe = &hw_layers->config[i].left_pipe;
1197 HWPipeInfo *right_pipe = &hw_layers->config[i].right_pipe;
1198 HWLayerConfig &layer_config = hw_layers->config[i];
1199 HWRotatorSession *hw_rotator_session = &layer_config.hw_rotator_session;
1200
1201 if (hw_layers->config[i].use_solidfill_stage) {
1202 hw_layers->config[i].hw_solidfill_stage.solid_fill_info = layer.solid_fill_info;
1203 AddSolidfillStage(hw_layers->config[i].hw_solidfill_stage, layer.plane_alpha);
1204 continue;
1205 }
1206
1207 for (uint32_t count = 0; count < 2; count++) {
1208 HWPipeInfo *pipe_info = (count == 0) ? left_pipe : right_pipe;
1209 HWRotateInfo *hw_rotate_info = &hw_rotator_session->hw_rotate_info[count];
1210
1211 if (hw_rotator_session->mode == kRotatorOffline && hw_rotate_info->valid) {
1212 input_buffer = &hw_rotator_session->output_buffer;
1213 }
1214
1215 uint32_t fb_id = registry_.GetFbId(&layer, input_buffer->handle_id);
1216
1217 if (pipe_info->valid && fb_id) {
1218 uint32_t pipe_id = pipe_info->pipe_id;
1219
1220 if (update_config) {
1221 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_ALPHA, pipe_id, layer.plane_alpha);
1222
1223 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_ZORDER, pipe_id, pipe_info->z_order);
1224
1225 DRMBlendType blending = {};
1226 SetBlending(layer.blending, &blending);
1227 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_BLEND_TYPE, pipe_id, blending);
1228
1229 DRMRect src = {};
1230 SetRect(pipe_info->src_roi, &src);
1231 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_SRC_RECT, pipe_id, src);
1232
1233 DRMRect dst = {};
1234 SetRect(pipe_info->dst_roi, &dst);
1235 LayerRect right_mixer = {FLOAT(mixer_attributes_.split_left), 0,
1236 FLOAT(mixer_attributes_.width), FLOAT(mixer_attributes_.height)};
1237 LayerRect dst_roi = pipe_info->dst_roi;
1238
1239 // For larget displays ie; 2 * 2k * 2k * 90 fps 4 LM's get programmed.
1240 // Each pair of LM's drive independent displays.
1241 // Layout Index indicates the panel onto which pipe gets staged.
1242 DRMSSPPLayoutIndex layout_index = DRMSSPPLayoutIndex::NONE;
1243 if (mixer_attributes_.split_type == kQuadSplit) {
1244 layout_index = DRMSSPPLayoutIndex::LEFT;
1245 if (IsValid(Intersection(dst_roi, right_mixer))) {
1246 dst_roi = Reposition(dst_roi, -INT(mixer_attributes_.split_left), 0);
1247 layout_index = DRMSSPPLayoutIndex::RIGHT;
1248 DLOGV_IF(kTagDriverConfig, "Layer index = %d sspp layout = RIGHT", i);
1249 DLOGV_IF(kTagDriverConfig, "Right dst_roi l = %f t = %f r = %f b = %f",
1250 dst_roi.left, dst_roi.top, dst_roi.right, dst_roi.bottom);
1251 } else {
1252 DLOGV_IF(kTagDriverConfig, "Layer index = %d sspp layout = LEFT", i);
1253 DLOGV_IF(kTagDriverConfig, "Left dst_roi l = %f t = %f r = %f b = %f",
1254 dst_roi.left, dst_roi.top, dst_roi.right, dst_roi.bottom);
1255 }
1256 }
1257 SetRect(dst_roi, &dst);
1258 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_DST_RECT, pipe_id, dst);
1259
1260 // Update Layout index.
1261 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_SSPP_LAYOUT, pipe_id, layout_index);
1262
1263 DRMRect excl = {};
1264 SetRect(pipe_info->excl_rect, &excl);
1265 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_EXCL_RECT, pipe_id, excl);
1266
1267 uint32_t rot_bit_mask = 0;
1268 SetRotation(layer.transform, layer_config, &rot_bit_mask);
1269 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_ROTATION, pipe_id, rot_bit_mask);
1270
1271 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_H_DECIMATION, pipe_id,
1272 pipe_info->horizontal_decimation);
1273 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_V_DECIMATION, pipe_id,
1274 pipe_info->vertical_decimation);
1275
1276 DRMSecureMode fb_secure_mode;
1277 DRMSecurityLevel security_level;
1278 SetSecureConfig(layer.input_buffer, &fb_secure_mode, &security_level);
1279 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_FB_SECURE_MODE, pipe_id, fb_secure_mode);
1280 if (security_level > crtc_security_level) {
1281 crtc_security_level = security_level;
1282 }
1283
1284 uint32_t config = 0;
1285 SetSrcConfig(layer.input_buffer, hw_rotator_session->mode, &config);
1286 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_SRC_CONFIG, pipe_id, config);;
1287
1288 if (hw_scale_) {
1289 SDEScaler scaler_output = {};
1290 hw_scale_->SetScaler(pipe_info->scale_data, &scaler_output);
1291 // TODO(user): Remove qseed3 and add version check, then send appropriate scaler object
1292 if (hw_resource_.has_qseed3) {
1293 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_SCALER_CONFIG, pipe_id,
1294 reinterpret_cast<uint64_t>(&scaler_output.scaler_v2));
1295 }
1296 }
1297
1298 DRMCscType csc_type = DRMCscType::kCscTypeMax;
1299 SelectCscType(layer.input_buffer, &csc_type);
1300 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_CSC_CONFIG, pipe_id, &csc_type);
1301
1302 DRMMultiRectMode multirect_mode;
1303 SetMultiRectMode(pipe_info->flags, &multirect_mode);
1304 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_MULTIRECT_MODE, pipe_id, multirect_mode);
1305
1306 SetSsppTonemapFeatures(pipe_info);
1307 }
1308
1309 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_FB_ID, pipe_id, fb_id);
1310 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_CRTC, pipe_id, token_.crtc_id);
1311
1312 if (!validate && input_buffer->acquire_fence) {
1313 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_INPUT_FENCE, pipe_id,
1314 scoped_ref.Get(input_buffer->acquire_fence));
1315 }
1316 }
1317 }
1318 }
1319
1320 if (update_config) {
1321 SetSolidfillStages();
1322 SetQOSData(qos_data);
1323 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_SECURITY_LEVEL, token_.crtc_id, crtc_security_level);
1324 }
1325
1326 if (hw_layers->hw_avr_info.update) {
1327 sde_drm::DRMQsyncMode mode = sde_drm::DRMQsyncMode::NONE;
1328 if (hw_layers->hw_avr_info.mode == kContinuousMode) {
1329 mode = sde_drm::DRMQsyncMode::CONTINUOUS;
1330 } else if (hw_layers->hw_avr_info.mode == kOneShotMode) {
1331 mode = sde_drm::DRMQsyncMode::ONESHOT;
1332 }
1333 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_QSYNC_MODE, token_.conn_id, mode);
1334 }
1335
1336 drm_atomic_intf_->Perform(DRMOps::DPPS_COMMIT_FEATURE, 0 /* argument is not used */);
1337
1338 if (reset_output_fence_offset_ && !validate) {
1339 // Change back the fence_offset
1340 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_OUTPUT_FENCE_OFFSET, token_.crtc_id, 0);
1341 reset_output_fence_offset_ = false;
1342 }
1343
1344 // Set panel mode
1345 if (panel_mode_changed_ & DRM_MODE_FLAG_VID_MODE_PANEL) {
1346 if (!validate) {
1347 // Switch to video mode, corresponding change the fence_offset
1348 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_OUTPUT_FENCE_OFFSET, token_.crtc_id, 1);
1349 }
1350 ResetROI();
1351 }
1352
1353 if (!validate && release_fence_fd && retire_fence_fd) {
1354 drm_atomic_intf_->Perform(DRMOps::CRTC_GET_RELEASE_FENCE, token_.crtc_id, release_fence_fd);
1355 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_GET_RETIRE_FENCE, token_.conn_id, retire_fence_fd);
1356 }
1357
1358 DLOGI_IF(kTagDriverConfig, "%s::%s System Clock=%d Hz, Core: AB=%f KBps, IB=%f Bps, " \
1359 "LLCC: AB=%f Bps, IB=%f Bps, DRAM AB=%f Bps, IB=%f Bps, "\
1360 "Rot: Bw=%f Bps, Clock=%d Hz", validate ? "Validate" : "Commit", device_name_,
1361 qos_data.clock_hz, qos_data.core_ab_bps / 1000.f, qos_data.core_ib_bps / 1000.f,
1362 qos_data.llcc_ab_bps / 1000.f, qos_data.llcc_ib_bps / 1000.f,
1363 qos_data.dram_ab_bps / 1000.f, qos_data.dram_ib_bps / 1000.f,
1364 qos_data.rot_prefill_bw_bps / 1000.f, qos_data.rot_clock_hz);
1365
1366 // Set refresh rate
1367 if (vrefresh_) {
1368 for (uint32_t mode_index = 0; mode_index < connector_info_.modes.size(); mode_index++) {
1369 if ((current_mode.vdisplay == connector_info_.modes[mode_index].mode.vdisplay) &&
1370 (current_mode.hdisplay == connector_info_.modes[mode_index].mode.hdisplay) &&
1371 (current_bit_clk == connector_info_.modes[mode_index].bit_clk_rate) &&
1372 (current_mode.flags == connector_info_.modes[mode_index].mode.flags) &&
1373 (vrefresh_ == connector_info_.modes[mode_index].mode.vrefresh)) {
1374 current_mode = connector_info_.modes[mode_index].mode;
1375 break;
1376 }
1377 }
1378 }
1379
1380 if (bit_clk_rate_) {
1381 for (uint32_t mode_index = 0; mode_index < connector_info_.modes.size(); mode_index++) {
1382 if ((current_mode.vdisplay == connector_info_.modes[mode_index].mode.vdisplay) &&
1383 (current_mode.hdisplay == connector_info_.modes[mode_index].mode.hdisplay) &&
1384 (current_mode.vrefresh == connector_info_.modes[mode_index].mode.vrefresh) &&
1385 (current_mode.flags == connector_info_.modes[mode_index].mode.flags) &&
1386 (bit_clk_rate_ == connector_info_.modes[mode_index].bit_clk_rate)) {
1387 current_mode = connector_info_.modes[mode_index].mode;
1388 break;
1389 }
1390 }
1391 }
1392
1393 if (first_cycle_) {
1394 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_TOPOLOGY_CONTROL, token_.conn_id,
1395 topology_control_);
1396 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ACTIVE, token_.crtc_id, 1);
1397 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_CRTC, token_.conn_id, token_.crtc_id);
1398 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_POWER_MODE, token_.conn_id, DRMPowerMode::ON);
1399 last_power_mode_ = DRMPowerMode::ON;
1400 } else if (pending_doze_ && !validate) {
1401 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ACTIVE, token_.crtc_id, 1);
1402 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_POWER_MODE, token_.conn_id, DRMPowerMode::DOZE);
1403 last_power_mode_ = DRMPowerMode::DOZE;
1404 }
1405
1406 // Set CRTC mode, only if display config changes
1407 if (first_cycle_ || vrefresh_ || update_mode_ || panel_mode_changed_) {
1408 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_MODE, token_.crtc_id, ¤t_mode);
1409 }
1410
1411 if (!validate && (hw_layer_info.set_idle_time_ms >= 0)) {
1412 DLOGI_IF(kTagDriverConfig, "Setting idle timeout to = %d ms",
1413 hw_layer_info.set_idle_time_ms);
1414 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_IDLE_TIMEOUT, token_.crtc_id,
1415 hw_layer_info.set_idle_time_ms);
1416 }
1417
1418 if (hw_panel_info_.mode == kModeCommand) {
1419 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_AUTOREFRESH, token_.conn_id, autorefresh_);
1420 }
1421 }
1422
AddSolidfillStage(const HWSolidfillStage & sf,uint32_t plane_alpha)1423 void HWDeviceDRM::AddSolidfillStage(const HWSolidfillStage &sf, uint32_t plane_alpha) {
1424 sde_drm::DRMSolidfillStage solidfill;
1425 solidfill.bounding_rect.left = UINT32(sf.roi.left);
1426 solidfill.bounding_rect.top = UINT32(sf.roi.top);
1427 solidfill.bounding_rect.right = UINT32(sf.roi.right);
1428 solidfill.bounding_rect.bottom = UINT32(sf.roi.bottom);
1429 solidfill.is_exclusion_rect = sf.is_exclusion_rect;
1430 solidfill.plane_alpha = plane_alpha;
1431 solidfill.z_order = sf.z_order;
1432 if (!sf.solid_fill_info.bit_depth) {
1433 solidfill.color_bit_depth = 8;
1434 solidfill.alpha = (0xff000000 & sf.color) >> 24;
1435 solidfill.red = (0xff0000 & sf.color) >> 16;
1436 solidfill.green = (0xff00 & sf.color) >> 8;
1437 solidfill.blue = 0xff & sf.color;
1438 } else {
1439 solidfill.color_bit_depth = sf.solid_fill_info.bit_depth;
1440 solidfill.alpha = sf.solid_fill_info.alpha;
1441 solidfill.red = sf.solid_fill_info.red;
1442 solidfill.green = sf.solid_fill_info.green;
1443 solidfill.blue = sf.solid_fill_info.blue;
1444 }
1445 solid_fills_.push_back(solidfill);
1446 DLOGI_IF(kTagDriverConfig, "Add a solidfill stage at z_order:%d argb_color:%x plane_alpha:%x",
1447 solidfill.z_order, solidfill.color, solidfill.plane_alpha);
1448 }
1449
SetSolidfillStages()1450 void HWDeviceDRM::SetSolidfillStages() {
1451 if (hw_resource_.num_solidfill_stages) {
1452 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_SOLIDFILL_STAGES, token_.crtc_id,
1453 reinterpret_cast<uint64_t> (&solid_fills_));
1454 }
1455 }
1456
ClearSolidfillStages()1457 void HWDeviceDRM::ClearSolidfillStages() {
1458 solid_fills_.clear();
1459 SetSolidfillStages();
1460 }
1461
Validate(HWLayers * hw_layers)1462 DisplayError HWDeviceDRM::Validate(HWLayers *hw_layers) {
1463 DTRACE_SCOPED();
1464
1465 DisplayError err = kErrorNone;
1466 registry_.Register(hw_layers);
1467
1468 Fence::ScopedRef scoped_ref;
1469 SetupAtomic(scoped_ref, hw_layers, true /* validate */, nullptr, nullptr);
1470
1471 int ret = drm_atomic_intf_->Validate();
1472 if (ret) {
1473 DLOGE("failed with error %d for %s", ret, device_name_);
1474 DumpHWLayers(hw_layers);
1475 vrefresh_ = 0;
1476 panel_mode_changed_ = 0;
1477 err = kErrorHardware;
1478 }
1479
1480 return err;
1481 }
1482
Commit(HWLayers * hw_layers)1483 DisplayError HWDeviceDRM::Commit(HWLayers *hw_layers) {
1484 DTRACE_SCOPED();
1485
1486 DisplayError err = kErrorNone;
1487 registry_.Register(hw_layers);
1488
1489 if (default_mode_) {
1490 err = DefaultCommit(hw_layers);
1491 } else {
1492 err = AtomicCommit(hw_layers);
1493 }
1494
1495 return err;
1496 }
1497
DefaultCommit(HWLayers * hw_layers)1498 DisplayError HWDeviceDRM::DefaultCommit(HWLayers *hw_layers) {
1499 DTRACE_SCOPED();
1500
1501 HWLayersInfo &hw_layer_info = hw_layers->info;
1502
1503 for (Layer &layer : hw_layer_info.hw_layers) {
1504 layer.input_buffer.release_fence = nullptr;
1505 }
1506
1507 DRMMaster *master = nullptr;
1508 int ret = DRMMaster::GetInstance(&master);
1509 if (ret < 0) {
1510 DLOGE("Failed to acquire DRMMaster instance");
1511 return kErrorResources;
1512 }
1513
1514 DRMResMgr *res_mgr = nullptr;
1515 ret = DRMResMgr::GetInstance(&res_mgr);
1516 if (ret < 0) {
1517 DLOGE("Failed to acquire DRMResMgr instance");
1518 return kErrorResources;
1519 }
1520
1521 int dev_fd = -1;
1522 master->GetHandle(&dev_fd);
1523
1524 uint32_t connector_id = 0;
1525 res_mgr->GetConnectorId(&connector_id);
1526
1527 uint32_t crtc_id = 0;
1528 res_mgr->GetCrtcId(&crtc_id);
1529
1530 drmModeModeInfo mode;
1531 res_mgr->GetMode(&mode);
1532
1533 uint64_t handle_id = hw_layer_info.hw_layers.at(0).input_buffer.handle_id;
1534 uint32_t fb_id = registry_.GetFbId(&hw_layer_info.hw_layers.at(0), handle_id);
1535 ret = drmModeSetCrtc(dev_fd, crtc_id, fb_id, 0 /* x */, 0 /* y */, &connector_id,
1536 1 /* num_connectors */, &mode);
1537 if (ret < 0) {
1538 DLOGE("drmModeSetCrtc failed dev fd %d, fb_id %d, crtc id %d, connector id %d, %s", dev_fd,
1539 fb_id, crtc_id, connector_id, strerror(errno));
1540 return kErrorHardware;
1541 }
1542
1543 return kErrorNone;
1544 }
1545
AtomicCommit(HWLayers * hw_layers)1546 DisplayError HWDeviceDRM::AtomicCommit(HWLayers *hw_layers) {
1547 DTRACE_SCOPED();
1548
1549 int64_t release_fence_fd = -1;
1550 int64_t retire_fence_fd = -1;
1551
1552 // scoped fence fds will be automatically closed when function scope ends,
1553 // atomic commit will have these fds already set on kernel by then.
1554 Fence::ScopedRef scoped_ref;
1555 SetupAtomic(scoped_ref, hw_layers, false /* validate */, &release_fence_fd, &retire_fence_fd);
1556
1557 if (hw_layers->elapse_timestamp > 0) {
1558 struct timespec t = {0, 0};
1559 clock_gettime(CLOCK_MONOTONIC, &t);
1560 uint64_t current_time = (UINT64(t.tv_sec) * 1000000000LL + t.tv_nsec);
1561 if (current_time < hw_layers->elapse_timestamp) {
1562 usleep(UINT32((hw_layers->elapse_timestamp - current_time) / 1000));
1563 }
1564 }
1565
1566 int ret = drm_atomic_intf_->Commit(synchronous_commit_, false /* retain_planes*/);
1567 shared_ptr<Fence> release_fence = Fence::Create(INT(release_fence_fd), "release");
1568 shared_ptr<Fence> retire_fence = Fence::Create(INT(retire_fence_fd), "retire");
1569 if (ret) {
1570 DLOGE("%s failed with error %d crtc %d", __FUNCTION__, ret, token_.crtc_id);
1571 DumpHWLayers(hw_layers);
1572 vrefresh_ = 0;
1573 panel_mode_changed_ = 0;
1574 return kErrorHardware;
1575 }
1576
1577 DLOGD_IF(kTagDriverConfig, "RELEASE fence: fd: %s", Fence::GetStr(release_fence).c_str());
1578 DLOGD_IF(kTagDriverConfig, "RETIRE fence: fd: %s", Fence::GetStr(retire_fence).c_str());
1579
1580 HWLayersInfo &hw_layer_info = hw_layers->info;
1581 LayerStack *stack = hw_layer_info.stack;
1582 stack->retire_fence = retire_fence;
1583
1584 for (uint32_t i = 0; i < hw_layer_info.hw_layers.size(); i++) {
1585 Layer &layer = hw_layer_info.hw_layers.at(i);
1586 HWRotatorSession *hw_rotator_session = &hw_layers->config[i].hw_rotator_session;
1587 if (hw_rotator_session->mode == kRotatorOffline) {
1588 hw_rotator_session->output_buffer.release_fence = release_fence;
1589 } else {
1590 layer.input_buffer.release_fence = release_fence;
1591 }
1592 }
1593
1594 hw_layer_info.sync_handle = release_fence;
1595
1596 if (vrefresh_) {
1597 // Update current mode index if refresh rate is changed
1598 drmModeModeInfo current_mode = connector_info_.modes[current_mode_index_].mode;
1599 uint64_t current_bit_clk = connector_info_.modes[current_mode_index_].bit_clk_rate;
1600 for (uint32_t mode_index = 0; mode_index < connector_info_.modes.size(); mode_index++) {
1601 if ((current_mode.vdisplay == connector_info_.modes[mode_index].mode.vdisplay) &&
1602 (current_mode.hdisplay == connector_info_.modes[mode_index].mode.hdisplay) &&
1603 (current_bit_clk == connector_info_.modes[mode_index].bit_clk_rate) &&
1604 (vrefresh_ == connector_info_.modes[mode_index].mode.vrefresh)) {
1605 current_mode_index_ = mode_index;
1606 SetDisplaySwitchMode(mode_index);
1607 break;
1608 }
1609 }
1610 vrefresh_ = 0;
1611 }
1612
1613 if (bit_clk_rate_) {
1614 // Update current mode index if bit clk rate is changed.
1615 drmModeModeInfo current_mode = connector_info_.modes[current_mode_index_].mode;
1616 for (uint32_t mode_index = 0; mode_index < connector_info_.modes.size(); mode_index++) {
1617 if ((current_mode.vdisplay == connector_info_.modes[mode_index].mode.vdisplay) &&
1618 (current_mode.hdisplay == connector_info_.modes[mode_index].mode.hdisplay) &&
1619 (current_mode.vrefresh == connector_info_.modes[mode_index].mode.vrefresh) &&
1620 (bit_clk_rate_ == connector_info_.modes[mode_index].bit_clk_rate)) {
1621 current_mode_index_ = mode_index;
1622 SetDisplaySwitchMode(mode_index);
1623 break;
1624 }
1625 }
1626 bit_clk_rate_ = 0;
1627 }
1628
1629 if (panel_mode_changed_ & DRM_MODE_FLAG_CMD_MODE_PANEL) {
1630 panel_mode_changed_ = 0;
1631 synchronous_commit_ = false;
1632 } else if (panel_mode_changed_ & DRM_MODE_FLAG_VID_MODE_PANEL) {
1633 panel_mode_changed_ = 0;
1634 synchronous_commit_ = false;
1635 reset_output_fence_offset_ = true;
1636 }
1637
1638 first_cycle_ = false;
1639 update_mode_ = false;
1640 hw_layers->updates_mask = 0;
1641 pending_doze_ = false;
1642
1643 return kErrorNone;
1644 }
1645
Flush(HWLayers * hw_layers)1646 DisplayError HWDeviceDRM::Flush(HWLayers *hw_layers) {
1647 ClearSolidfillStages();
1648 ResetROI();
1649 int ret = NullCommit(secure_display_active_ /* synchronous */, false /* retain_planes*/);
1650 if (ret) {
1651 DLOGE("failed with error %d", ret);
1652 return kErrorHardware;
1653 }
1654
1655 return kErrorNone;
1656 }
1657
SetBlending(const LayerBlending & source,DRMBlendType * target)1658 void HWDeviceDRM::SetBlending(const LayerBlending &source, DRMBlendType *target) {
1659 switch (source) {
1660 case kBlendingPremultiplied:
1661 *target = DRMBlendType::PREMULTIPLIED;
1662 break;
1663 case kBlendingOpaque:
1664 *target = DRMBlendType::OPAQUE;
1665 break;
1666 case kBlendingCoverage:
1667 *target = DRMBlendType::COVERAGE;
1668 break;
1669 default:
1670 *target = DRMBlendType::UNDEFINED;
1671 }
1672 }
1673
SetSrcConfig(const LayerBuffer & input_buffer,const HWRotatorMode & mode,uint32_t * config)1674 void HWDeviceDRM::SetSrcConfig(const LayerBuffer &input_buffer, const HWRotatorMode &mode,
1675 uint32_t *config) {
1676 // In offline rotation case, rotator will handle deinterlacing.
1677 if (mode == kRotatorInline) {
1678 if (input_buffer.flags.interlace) {
1679 *config |= (0x01 << UINT32(DRMSrcConfig::DEINTERLACE));
1680 }
1681 }
1682 }
1683
SelectCscType(const LayerBuffer & input_buffer,DRMCscType * type)1684 void HWDeviceDRM::SelectCscType(const LayerBuffer &input_buffer, DRMCscType *type) {
1685 if (type == NULL) {
1686 return;
1687 }
1688
1689 *type = DRMCscType::kCscTypeMax;
1690 if (input_buffer.format < kFormatYCbCr420Planar) {
1691 return;
1692 }
1693
1694 switch (input_buffer.color_metadata.colorPrimaries) {
1695 case ColorPrimaries_BT601_6_525:
1696 case ColorPrimaries_BT601_6_625:
1697 *type = ((input_buffer.color_metadata.range == Range_Full) ?
1698 DRMCscType::kCscYuv2Rgb601FR : DRMCscType::kCscYuv2Rgb601L);
1699 break;
1700 case ColorPrimaries_BT709_5:
1701 *type = DRMCscType::kCscYuv2Rgb709L;
1702 break;
1703 case ColorPrimaries_BT2020:
1704 *type = ((input_buffer.color_metadata.range == Range_Full) ?
1705 DRMCscType::kCscYuv2Rgb2020FR : DRMCscType::kCscYuv2Rgb2020L);
1706 break;
1707 default:
1708 break;
1709 }
1710 }
1711
SetRect(const LayerRect & source,DRMRect * target)1712 void HWDeviceDRM::SetRect(const LayerRect &source, DRMRect *target) {
1713 target->left = UINT32(source.left);
1714 target->top = UINT32(source.top);
1715 target->right = UINT32(source.right);
1716 target->bottom = UINT32(source.bottom);
1717 }
1718
SetRotation(LayerTransform transform,const HWLayerConfig & layer_config,uint32_t * rot_bit_mask)1719 void HWDeviceDRM::SetRotation(LayerTransform transform, const HWLayerConfig &layer_config,
1720 uint32_t* rot_bit_mask) {
1721 HWRotatorMode mode = layer_config.hw_rotator_session.mode;
1722 // In offline rotation case, rotator will handle flips set via offline rotator interface.
1723 if (mode == kRotatorOffline) {
1724 *rot_bit_mask = 0;
1725 return;
1726 }
1727
1728 // In no rotation case or inline rotation case, plane will handle flips
1729 // In DRM framework rotation is applied in counter-clockwise direction.
1730 if (layer_config.use_inline_rot && transform.rotation == 90) {
1731 // a) rotate 90 clockwise = rotate 270 counter-clockwise in DRM
1732 // rotate 270 is translated as hflip + vflip + rotate90
1733 // b) rotate 270 clockwise = rotate 90 counter-clockwise in DRM
1734 // c) hflip + rotate 90 clockwise = vflip + rotate 90 counter-clockwise in DRM
1735 // d) vflip + rotate 90 clockwise = hflip + rotate 90 counter-clockwise in DRM
1736 *rot_bit_mask = UINT32(DRMRotation::ROT_90);
1737 transform.flip_horizontal = !transform.flip_horizontal;
1738 transform.flip_vertical = !transform.flip_vertical;
1739 }
1740
1741 if (transform.flip_horizontal) {
1742 *rot_bit_mask |= UINT32(DRMRotation::FLIP_H);
1743 }
1744
1745 if (transform.flip_vertical) {
1746 *rot_bit_mask |= UINT32(DRMRotation::FLIP_V);
1747 }
1748 }
1749
EnableHotPlugDetection(int enable)1750 bool HWDeviceDRM::EnableHotPlugDetection(int enable) {
1751 return true;
1752 }
1753
SetCursorPosition(HWLayers * hw_layers,int x,int y)1754 DisplayError HWDeviceDRM::SetCursorPosition(HWLayers *hw_layers, int x, int y) {
1755 DTRACE_SCOPED();
1756 return kErrorNone;
1757 }
1758
GetPPFeaturesVersion(PPFeatureVersion * vers)1759 DisplayError HWDeviceDRM::GetPPFeaturesVersion(PPFeatureVersion *vers) {
1760 struct DRMPPFeatureInfo info = {};
1761
1762 if (!hw_color_mgr_)
1763 return kErrorNotSupported;
1764
1765 for (uint32_t i = 0; i < kMaxNumPPFeatures; i++) {
1766 std::vector<DRMPPFeatureID> drm_id = {};
1767 memset(&info, 0, sizeof(struct DRMPPFeatureInfo));
1768 hw_color_mgr_->ToDrmFeatureId(kDSPP, i, &drm_id);
1769 if (drm_id.empty())
1770 continue;
1771
1772 info.id = drm_id.at(0);
1773
1774 drm_mgr_intf_->GetCrtcPPInfo(token_.crtc_id, &info);
1775 vers->version[i] = hw_color_mgr_->GetFeatureVersion(info);
1776 }
1777 return kErrorNone;
1778 }
1779
SetPPFeatures(PPFeaturesConfig * feature_list)1780 DisplayError HWDeviceDRM::SetPPFeatures(PPFeaturesConfig *feature_list) {
1781 int ret = 0;
1782 PPFeatureInfo *feature = NULL;
1783
1784 if (!hw_color_mgr_)
1785 return kErrorNotSupported;
1786
1787 while (true) {
1788 std::vector<DRMPPFeatureID> drm_id = {};
1789 DRMPPFeatureInfo kernel_params = {};
1790 bool crtc_feature = true;
1791
1792 ret = feature_list->RetrieveNextFeature(&feature);
1793 if (ret || !feature)
1794 break;
1795
1796 hw_color_mgr_->ToDrmFeatureId(kDSPP, feature->feature_id_, &drm_id);
1797 if (drm_id.empty())
1798 continue;
1799
1800 kernel_params.id = drm_id.at(0);
1801 drm_mgr_intf_->GetCrtcPPInfo(token_.crtc_id, &kernel_params);
1802 if (kernel_params.version == std::numeric_limits<uint32_t>::max()) {
1803 crtc_feature = false;
1804 }
1805 DLOGV_IF(kTagDriverConfig, "feature_id = %d", feature->feature_id_);
1806 for (DRMPPFeatureID id : drm_id) {
1807 if (id >= kPPFeaturesMax) {
1808 DLOGE("Invalid feature id %d", id);
1809 continue;
1810 }
1811 kernel_params.id = id;
1812 ret = hw_color_mgr_->GetDrmFeature(feature, &kernel_params);
1813 if (!ret && crtc_feature)
1814 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_POST_PROC,
1815 token_.crtc_id, &kernel_params);
1816 else if (!ret && !crtc_feature)
1817 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_POST_PROC,
1818 token_.conn_id, &kernel_params);
1819
1820 hw_color_mgr_->FreeDrmFeatureData(&kernel_params);
1821 }
1822 }
1823
1824 // Once all features were consumed, then destroy all feature instance from feature_list,
1825 feature_list->Reset();
1826
1827 return kErrorNone;
1828 }
1829
SetVSyncState(bool enable)1830 DisplayError HWDeviceDRM::SetVSyncState(bool enable) {
1831 return kErrorNotSupported;
1832 }
1833
SetIdleTimeoutMs(uint32_t timeout_ms)1834 void HWDeviceDRM::SetIdleTimeoutMs(uint32_t timeout_ms) {
1835 // TODO(user): This function can be removed after fb is deprecated
1836 }
1837
SetDisplayMode(const HWDisplayMode hw_display_mode)1838 DisplayError HWDeviceDRM::SetDisplayMode(const HWDisplayMode hw_display_mode) {
1839 if (!switch_mode_valid_) {
1840 return kErrorNotSupported;
1841 }
1842
1843 uint32_t mode_flag = 0;
1844
1845 if (hw_display_mode == kModeCommand) {
1846 mode_flag = DRM_MODE_FLAG_CMD_MODE_PANEL;
1847 current_mode_index_ = cmd_mode_index_;
1848 DLOGI_IF(kTagDriverConfig, "switch panel mode to command");
1849 } else if (hw_display_mode == kModeVideo) {
1850 mode_flag = DRM_MODE_FLAG_VID_MODE_PANEL;
1851 current_mode_index_ = video_mode_index_;
1852 DLOGI_IF(kTagDriverConfig, "switch panel mode to video");
1853 }
1854 PopulateHWPanelInfo();
1855 panel_mode_changed_ = mode_flag;
1856 synchronous_commit_ = true;
1857 return kErrorNone;
1858 }
1859
SetRefreshRate(uint32_t refresh_rate)1860 DisplayError HWDeviceDRM::SetRefreshRate(uint32_t refresh_rate) {
1861 if (bit_clk_rate_) {
1862 // bit rate update pending.
1863 // Defer any refresh rate setting.
1864 return kErrorNotSupported;
1865 }
1866
1867 // Check if requested refresh rate is valid
1868 drmModeModeInfo current_mode = connector_info_.modes[current_mode_index_].mode;
1869 uint64_t current_bit_clk = connector_info_.modes[current_mode_index_].bit_clk_rate;
1870 for (uint32_t mode_index = 0; mode_index < connector_info_.modes.size(); mode_index++) {
1871 if ((current_mode.vdisplay == connector_info_.modes[mode_index].mode.vdisplay) &&
1872 (current_mode.hdisplay == connector_info_.modes[mode_index].mode.hdisplay) &&
1873 (current_bit_clk == connector_info_.modes[mode_index].bit_clk_rate) &&
1874 (current_mode.flags == connector_info_.modes[mode_index].mode.flags) &&
1875 (refresh_rate == connector_info_.modes[mode_index].mode.vrefresh)) {
1876 vrefresh_ = refresh_rate;
1877 DLOGV_IF(kTagDriverConfig, "Set refresh rate to %d", refresh_rate);
1878 return kErrorNone;
1879 }
1880 }
1881 return kErrorNotSupported;
1882 }
1883
1884
1885
GetHWScanInfo(HWScanInfo * scan_info)1886 DisplayError HWDeviceDRM::GetHWScanInfo(HWScanInfo *scan_info) {
1887 return kErrorNotSupported;
1888 }
1889
GetVideoFormat(uint32_t config_index,uint32_t * video_format)1890 DisplayError HWDeviceDRM::GetVideoFormat(uint32_t config_index, uint32_t *video_format) {
1891 return kErrorNotSupported;
1892 }
1893
GetMaxCEAFormat(uint32_t * max_cea_format)1894 DisplayError HWDeviceDRM::GetMaxCEAFormat(uint32_t *max_cea_format) {
1895 return kErrorNotSupported;
1896 }
1897
OnMinHdcpEncryptionLevelChange(uint32_t min_enc_level)1898 DisplayError HWDeviceDRM::OnMinHdcpEncryptionLevelChange(uint32_t min_enc_level) {
1899 DisplayError error = kErrorNone;
1900 int fd = -1;
1901 char data[kMaxStringLength] = {'\0'};
1902
1903 snprintf(data, sizeof(data), "/sys/devices/virtual/hdcp/msm_hdcp/min_level_change");
1904
1905 fd = Sys::open_(data, O_WRONLY);
1906 if (fd < 0) {
1907 DLOGE("File '%s' could not be opened. errno = %d, desc = %s", data, errno, strerror(errno));
1908 return kErrorHardware;
1909 }
1910
1911 snprintf(data, sizeof(data), "%d", min_enc_level);
1912
1913 ssize_t err = Sys::pwrite_(fd, data, strlen(data), 0);
1914 if (err <= 0) {
1915 DLOGE("Write failed, Error = %s", strerror(errno));
1916 error = kErrorHardware;
1917 }
1918
1919 Sys::close_(fd);
1920
1921 return error;
1922 }
1923
SetScaleLutConfig(HWScaleLutInfo * lut_info)1924 DisplayError HWDeviceDRM::SetScaleLutConfig(HWScaleLutInfo *lut_info) {
1925 sde_drm::DRMScalerLUTInfo drm_lut_info = {};
1926 drm_lut_info.cir_lut = lut_info->cir_lut;
1927 drm_lut_info.dir_lut = lut_info->dir_lut;
1928 drm_lut_info.sep_lut = lut_info->sep_lut;
1929 drm_lut_info.cir_lut_size = lut_info->cir_lut_size;
1930 drm_lut_info.dir_lut_size = lut_info->dir_lut_size;
1931 drm_lut_info.sep_lut_size = lut_info->sep_lut_size;
1932 drm_mgr_intf_->SetScalerLUT(drm_lut_info);
1933
1934 return kErrorNone;
1935 }
1936
UnsetScaleLutConfig()1937 DisplayError HWDeviceDRM::UnsetScaleLutConfig() {
1938 drm_mgr_intf_->UnsetScalerLUT();
1939
1940 return kErrorNone;
1941 }
1942
SetMixerAttributes(const HWMixerAttributes & mixer_attributes)1943 DisplayError HWDeviceDRM::SetMixerAttributes(const HWMixerAttributes &mixer_attributes) {
1944 if (IsResolutionSwitchEnabled()) {
1945 return kErrorNotSupported;
1946 }
1947
1948 if (!dest_scaler_blocks_used_) {
1949 return kErrorNotSupported;
1950 }
1951
1952 uint32_t index = current_mode_index_;
1953
1954 if (mixer_attributes.width > display_attributes_[index].x_pixels ||
1955 mixer_attributes.height > display_attributes_[index].y_pixels) {
1956 DLOGW("Input resolution exceeds display resolution! input: res %dx%d display: res %dx%d",
1957 mixer_attributes.width, mixer_attributes.height, display_attributes_[index].x_pixels,
1958 display_attributes_[index].y_pixels);
1959 return kErrorNotSupported;
1960 }
1961
1962 uint32_t max_input_width = hw_resource_.hw_dest_scalar_info.max_input_width;
1963 if (display_attributes_[index].is_device_split) {
1964 max_input_width *= 2;
1965 }
1966
1967 if (mixer_attributes.width > max_input_width) {
1968 DLOGW("Input width exceeds width limit! input_width %d width_limit %d", mixer_attributes.width,
1969 max_input_width);
1970 return kErrorNotSupported;
1971 }
1972
1973 float mixer_aspect_ratio = FLOAT(mixer_attributes.width) / FLOAT(mixer_attributes.height);
1974 float display_aspect_ratio =
1975 FLOAT(display_attributes_[index].x_pixels) / FLOAT(display_attributes_[index].y_pixels);
1976
1977 if (display_aspect_ratio != mixer_aspect_ratio) {
1978 DLOGW("Aspect ratio mismatch! input: res %dx%d display: res %dx%d", mixer_attributes.width,
1979 mixer_attributes.height, display_attributes_[index].x_pixels,
1980 display_attributes_[index].y_pixels);
1981 return kErrorNotSupported;
1982 }
1983
1984 float scale_x = FLOAT(display_attributes_[index].x_pixels) / FLOAT(mixer_attributes.width);
1985 float scale_y = FLOAT(display_attributes_[index].y_pixels) / FLOAT(mixer_attributes.height);
1986 float max_scale_up = hw_resource_.hw_dest_scalar_info.max_scale_up;
1987 if (scale_x > max_scale_up || scale_y > max_scale_up) {
1988 DLOGW(
1989 "Up scaling ratio exceeds for destination scalar upscale limit scale_x %f scale_y %f "
1990 "max_scale_up %f",
1991 scale_x, scale_y, max_scale_up);
1992 return kErrorNotSupported;
1993 }
1994
1995 float mixer_split_ratio = FLOAT(mixer_attributes_.split_left) / FLOAT(mixer_attributes_.width);
1996
1997 mixer_attributes_ = mixer_attributes;
1998 mixer_attributes_.split_left = mixer_attributes_.width;
1999 mixer_attributes_.split_type = kNoSplit;
2000 mixer_attributes_.dest_scaler_blocks_used = dest_scaler_blocks_used_; // No change.
2001 if (display_attributes_[index].is_device_split) {
2002 mixer_attributes_.split_left = UINT32(FLOAT(mixer_attributes.width) * mixer_split_ratio);
2003 mixer_attributes_.split_type = kDualSplit;
2004 if (display_attributes_[index].topology == kQuadLMMerge ||
2005 display_attributes_[index].topology == kQuadLMDSCMerge ||
2006 display_attributes_[index].topology == kQuadLMMergeDSC) {
2007 mixer_attributes_.split_type = kQuadSplit;
2008 }
2009 }
2010
2011 return kErrorNone;
2012 }
2013
GetMixerAttributes(HWMixerAttributes * mixer_attributes)2014 DisplayError HWDeviceDRM::GetMixerAttributes(HWMixerAttributes *mixer_attributes) {
2015 if (!mixer_attributes) {
2016 return kErrorParameters;
2017 }
2018
2019 *mixer_attributes = mixer_attributes_;
2020
2021 return kErrorNone;
2022 }
2023
DumpDebugData()2024 DisplayError HWDeviceDRM::DumpDebugData() {
2025 string dir_path = "/data/vendor/display/hw_recovery/";
2026 string device_str = device_name_;
2027
2028 // Attempt to make hw_recovery dir, it may exist
2029 if (mkdir(dir_path.c_str(), 0777) != 0 && errno != EEXIST) {
2030 DLOGW("Failed to create %s directory errno = %d, desc = %s", dir_path.c_str(), errno,
2031 strerror(errno));
2032 return kErrorPermission;
2033 }
2034 // If it does exist, ensure permissions are fine
2035 if (errno == EEXIST && chmod(dir_path.c_str(), 0777) != 0) {
2036 DLOGW("Failed to change permissions on %s directory", dir_path.c_str());
2037 return kErrorPermission;
2038 }
2039
2040 string filename = dir_path+device_str+"_HWR_"+to_string(debug_dump_count_);
2041 ofstream dst(filename);
2042 debug_dump_count_++;
2043
2044 {
2045 ifstream src;
2046 src.open("/sys/kernel/debug/dri/0/debug/dump");
2047 dst << "---- Event Logs ----" << std::endl;
2048 dst << src.rdbuf() << std::endl;
2049 src.close();
2050 }
2051
2052 {
2053 ifstream src;
2054 src.open("/sys/kernel/debug/dri/0/debug/recovery_reg");
2055 dst << "---- All Registers ----" << std::endl;
2056 dst << src.rdbuf() << std::endl;
2057 src.close();
2058 }
2059
2060 {
2061 ifstream src;
2062 src.open("/sys/kernel/debug/dri/0/debug/recovery_dbgbus");
2063 dst << "---- Debug Bus ----" << std::endl;
2064 dst << src.rdbuf() << std::endl;
2065 src.close();
2066 }
2067
2068 {
2069 ifstream src;
2070 src.open("/sys/kernel/debug/dri/0/debug/recovery_vbif_dbgbus");
2071 dst << "---- VBIF Debug Bus ----" << std::endl;
2072 dst << src.rdbuf() << std::endl;
2073 src.close();
2074 }
2075
2076 dst.close();
2077 DLOGI("Wrote hw_recovery file %s", filename.c_str());
2078
2079 return kErrorNone;
2080 }
2081
GetDRMDisplayToken(sde_drm::DRMDisplayToken * token) const2082 void HWDeviceDRM::GetDRMDisplayToken(sde_drm::DRMDisplayToken *token) const {
2083 *token = token_;
2084 }
2085
UpdateMixerAttributes()2086 void HWDeviceDRM::UpdateMixerAttributes() {
2087 uint32_t index = current_mode_index_;
2088
2089 mixer_attributes_.width = display_attributes_[index].x_pixels;
2090 mixer_attributes_.height = display_attributes_[index].y_pixels;
2091 mixer_attributes_.split_left = display_attributes_[index].is_device_split
2092 ? hw_panel_info_.split_info.left_split
2093 : mixer_attributes_.width;
2094 mixer_attributes_.split_type = kNoSplit;
2095 if (display_attributes_[index].is_device_split) {
2096 mixer_attributes_.split_type = kDualSplit;
2097 if (display_attributes_[index].topology == kQuadLMMerge ||
2098 display_attributes_[index].topology == kQuadLMDSCMerge ||
2099 display_attributes_[index].topology == kQuadLMMergeDSC) {
2100 mixer_attributes_.split_type = kQuadSplit;
2101 }
2102 }
2103
2104 DLOGI("Mixer WxH %dx%d-%d for %s", mixer_attributes_.width, mixer_attributes_.height,
2105 mixer_attributes_.split_type, device_name_);
2106 update_mode_ = true;
2107 }
2108
SetSecureConfig(const LayerBuffer & input_buffer,DRMSecureMode * fb_secure_mode,DRMSecurityLevel * security_level)2109 void HWDeviceDRM::SetSecureConfig(const LayerBuffer &input_buffer, DRMSecureMode *fb_secure_mode,
2110 DRMSecurityLevel *security_level) {
2111 *fb_secure_mode = DRMSecureMode::NON_SECURE;
2112 *security_level = DRMSecurityLevel::SECURE_NON_SECURE;
2113
2114 if (input_buffer.flags.secure) {
2115 if (input_buffer.flags.secure_camera) {
2116 // IOMMU configuration for this framebuffer mode is secure domain & requires
2117 // only stage II translation, when this buffer is accessed by Display H/W.
2118 // Secure and non-secure planes can be attached to this CRTC.
2119 *fb_secure_mode = DRMSecureMode::SECURE_DIR_TRANSLATION;
2120 } else if (input_buffer.flags.secure_display) {
2121 // IOMMU configuration for this framebuffer mode is secure domain & requires
2122 // only stage II translation, when this buffer is accessed by Display H/W.
2123 // Only secure planes can be attached to this CRTC.
2124 *fb_secure_mode = DRMSecureMode::SECURE_DIR_TRANSLATION;
2125 *security_level = DRMSecurityLevel::SECURE_ONLY;
2126 } else {
2127 // IOMMU configuration for this framebuffer mode is secure domain & requires both
2128 // stage I and stage II translations, when this buffer is accessed by Display H/W.
2129 // Secure and non-secure planes can be attached to this CRTC.
2130 *fb_secure_mode = DRMSecureMode::SECURE;
2131 }
2132 }
2133 }
2134
SetTopology(sde_drm::DRMTopology drm_topology,HWTopology * hw_topology)2135 void HWDeviceDRM::SetTopology(sde_drm::DRMTopology drm_topology, HWTopology *hw_topology) {
2136 switch (drm_topology) {
2137 case DRMTopology::SINGLE_LM: *hw_topology = kSingleLM; break;
2138 case DRMTopology::SINGLE_LM_DSC: *hw_topology = kSingleLMDSC; break;
2139 case DRMTopology::DUAL_LM: *hw_topology = kDualLM; break;
2140 case DRMTopology::DUAL_LM_DSC: *hw_topology = kDualLMDSC; break;
2141 case DRMTopology::DUAL_LM_MERGE: *hw_topology = kDualLMMerge; break;
2142 case DRMTopology::DUAL_LM_MERGE_DSC: *hw_topology = kDualLMMergeDSC; break;
2143 case DRMTopology::DUAL_LM_DSCMERGE: *hw_topology = kDualLMDSCMerge; break;
2144 case DRMTopology::QUAD_LM_MERGE: *hw_topology = kQuadLMMerge; break;
2145 case DRMTopology::QUAD_LM_DSCMERGE: *hw_topology = kQuadLMDSCMerge; break;
2146 case DRMTopology::QUAD_LM_MERGE_DSC: *hw_topology = kQuadLMMergeDSC; break;
2147 case DRMTopology::PPSPLIT: *hw_topology = kPPSplit; break;
2148 default: *hw_topology = kUnknown; break;
2149 }
2150 }
2151
2152
SetMultiRectMode(const uint32_t flags,DRMMultiRectMode * target)2153 void HWDeviceDRM::SetMultiRectMode(const uint32_t flags, DRMMultiRectMode *target) {
2154 *target = DRMMultiRectMode::NONE;
2155 if (flags & kMultiRect) {
2156 *target = DRMMultiRectMode::SERIAL;
2157 if (flags & kMultiRectParallelMode) {
2158 *target = DRMMultiRectMode::PARALLEL;
2159 }
2160 }
2161 }
2162
SetSsppTonemapFeatures(HWPipeInfo * pipe_info)2163 void HWDeviceDRM::SetSsppTonemapFeatures(HWPipeInfo *pipe_info) {
2164 if (pipe_info->dgm_csc_info.op != kNoOp) {
2165 SDECsc csc = {};
2166 SetDGMCsc(pipe_info->dgm_csc_info, &csc);
2167 DLOGV_IF(kTagDriverConfig, "Call Perform DGM CSC Op = %s",
2168 (pipe_info->dgm_csc_info.op == kSet) ? "Set" : "Reset");
2169 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_DGM_CSC_CONFIG, pipe_info->pipe_id,
2170 reinterpret_cast<uint64_t>(&csc.csc_v1));
2171 }
2172 if (pipe_info->inverse_pma_info.op != kNoOp) {
2173 DLOGV_IF(kTagDriverConfig, "Call Perform Inverse PMA Op = %s",
2174 (pipe_info->inverse_pma_info.op == kSet) ? "Set" : "Reset");
2175 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_INVERSE_PMA, pipe_info->pipe_id,
2176 (pipe_info->inverse_pma_info.inverse_pma) ? 1: 0);
2177 }
2178 SetSsppLutFeatures(pipe_info);
2179 }
2180
SetDGMCsc(const HWPipeCscInfo & dgm_csc_info,SDECsc * csc)2181 void HWDeviceDRM::SetDGMCsc(const HWPipeCscInfo &dgm_csc_info, SDECsc *csc) {
2182 SetDGMCscV1(dgm_csc_info.csc, &csc->csc_v1);
2183 }
2184
SetDGMCscV1(const HWCsc & dgm_csc,sde_drm_csc_v1 * csc_v1)2185 void HWDeviceDRM::SetDGMCscV1(const HWCsc &dgm_csc, sde_drm_csc_v1 *csc_v1) {
2186 uint32_t i = 0;
2187 for (i = 0; i < MAX_CSC_MATRIX_COEFF_SIZE; i++) {
2188 csc_v1->ctm_coeff[i] = dgm_csc.ctm_coeff[i];
2189 DLOGV_IF(kTagDriverConfig, " DGM csc_v1[%d] = %" PRId64, i, csc_v1->ctm_coeff[i]);
2190 }
2191 for (i = 0; i < MAX_CSC_BIAS_SIZE; i++) {
2192 csc_v1->pre_bias[i] = dgm_csc.pre_bias[i];
2193 csc_v1->post_bias[i] = dgm_csc.post_bias[i];
2194 }
2195 for (i = 0; i < MAX_CSC_CLAMP_SIZE; i++) {
2196 csc_v1->pre_clamp[i] = dgm_csc.pre_clamp[i];
2197 csc_v1->post_clamp[i] = dgm_csc.post_clamp[i];
2198 }
2199 }
2200
SetSsppLutFeatures(HWPipeInfo * pipe_info)2201 void HWDeviceDRM::SetSsppLutFeatures(HWPipeInfo *pipe_info) {
2202 for (HWPipeTonemapLutInfo &lut_info : pipe_info->lut_info) {
2203 if (lut_info.op != kNoOp) {
2204 std::shared_ptr<PPFeatureInfo> feature = lut_info.pay_load;
2205 if (feature == nullptr) {
2206 DLOGE("Null Pointer for Op = %d lut type = %d", lut_info.op, lut_info.type);
2207 continue;
2208 }
2209 DRMPPFeatureInfo kernel_params = {};
2210 std::vector<DRMPPFeatureID> drm_id = {};
2211 PPBlock pp_block = GetPPBlock(lut_info.type);
2212 hw_color_mgr_->ToDrmFeatureId(pp_block, feature->feature_id_, &drm_id);
2213 for (DRMPPFeatureID id : drm_id) {
2214 if (id >= kPPFeaturesMax) {
2215 DLOGE("Invalid feature id %d", id);
2216 continue;
2217 }
2218 kernel_params.id = id;
2219 bool disable = (lut_info.op == kReset);
2220 DLOGV_IF(kTagDriverConfig, "Lut Type = %d PPBlock = %d Op = %s Disable = %d Feature = %p",
2221 lut_info.type, pp_block, (lut_info.op ==kSet) ? "Set" : "Reset", disable,
2222 feature.get());
2223 int ret = hw_color_mgr_->GetDrmFeature(feature.get(), &kernel_params, disable);
2224 if (!ret) {
2225 drm_atomic_intf_->Perform(DRMOps::PLANE_SET_POST_PROC, pipe_info->pipe_id,
2226 &kernel_params);
2227 hw_color_mgr_->FreeDrmFeatureData(&kernel_params);
2228 } else {
2229 DLOGE("GetDrmFeature failed for Lut type = %d", lut_info.type);
2230 }
2231 }
2232 drm_id.clear();
2233 }
2234 }
2235 }
2236
AddDimLayerIfNeeded()2237 void HWDeviceDRM::AddDimLayerIfNeeded() {
2238 if (secure_display_active_ && hw_resource_.secure_disp_blend_stage >= 0) {
2239 HWSolidfillStage sf = {};
2240 sf.z_order = UINT32(hw_resource_.secure_disp_blend_stage);
2241 sf.roi = { 0.0, 0.0, FLOAT(mixer_attributes_.width), FLOAT(mixer_attributes_.height) };
2242 solid_fills_.clear();
2243 AddSolidfillStage(sf, 0xFF);
2244 SetSolidfillStages();
2245 }
2246
2247 if (!secure_display_active_) {
2248 DRMSecurityLevel crtc_security_level = DRMSecurityLevel::SECURE_NON_SECURE;
2249 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_SECURITY_LEVEL, token_.crtc_id, crtc_security_level);
2250 }
2251 }
2252
NullCommit(bool synchronous,bool retain_planes)2253 DisplayError HWDeviceDRM::NullCommit(bool synchronous, bool retain_planes) {
2254 DTRACE_SCOPED();
2255 AddDimLayerIfNeeded();
2256 int ret = drm_atomic_intf_->Commit(synchronous , retain_planes);
2257 if (ret) {
2258 DLOGE("failed with error %d", ret);
2259 return kErrorHardware;
2260 }
2261
2262 null_display_commit_ = true;
2263 return kErrorNone;
2264 }
2265
DumpConnectorModeInfo()2266 void HWDeviceDRM::DumpConnectorModeInfo() {
2267 for (uint32_t i = 0; i < (uint32_t)connector_info_.modes.size(); i++) {
2268 DLOGI("Mode[%d] Name:%s vref:%d hdisp:%d hsync_s:%d hsync_e:%d htotal:%d " \
2269 "vdisp:%d vsync_s:%d vsync_e:%d vtotal:%d\n", i, connector_info_.modes[i].mode.name,
2270 connector_info_.modes[i].mode.vrefresh, connector_info_.modes[i].mode.hdisplay,
2271 connector_info_.modes[i].mode.hsync_start, connector_info_.modes[i].mode.hsync_end,
2272 connector_info_.modes[i].mode.htotal, connector_info_.modes[i].mode.vdisplay,
2273 connector_info_.modes[i].mode.vsync_start, connector_info_.modes[i].mode.vsync_end,
2274 connector_info_.modes[i].mode.vtotal);
2275 }
2276 }
2277
ResetROI()2278 void HWDeviceDRM::ResetROI() {
2279 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_ROI, token_.crtc_id, 0, nullptr);
2280 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_ROI, token_.conn_id, 0, nullptr);
2281 }
2282
IsFullFrameUpdate(const HWLayersInfo & hw_layer_info)2283 bool HWDeviceDRM::IsFullFrameUpdate(const HWLayersInfo &hw_layer_info) {
2284 LayerRect full_frame = {0, 0, FLOAT(mixer_attributes_.width), FLOAT(mixer_attributes_.height)};
2285
2286 const LayerRect &frame_roi = hw_layer_info.left_frame_roi.at(0);
2287 // If multiple ROIs are present, then it's not fullscreen update.
2288 if (hw_layer_info.left_frame_roi.size() > 1 ||
2289 (IsValid(frame_roi) && !IsCongruent(full_frame, frame_roi))) {
2290 return false;
2291 }
2292
2293 return true;
2294 }
2295
SetDynamicDSIClock(uint64_t bit_clk_rate)2296 DisplayError HWDeviceDRM::SetDynamicDSIClock(uint64_t bit_clk_rate) {
2297 return kErrorNotSupported;
2298 }
2299
GetDynamicDSIClock(uint64_t * bit_clk_rate)2300 DisplayError HWDeviceDRM::GetDynamicDSIClock(uint64_t *bit_clk_rate) {
2301 return kErrorNotSupported;
2302 }
2303
DumpHWLayers(HWLayers * hw_layers)2304 void HWDeviceDRM::DumpHWLayers(HWLayers *hw_layers) {
2305 HWLayersInfo &hw_layer_info = hw_layers->info;
2306 DestScaleInfoMap &dest_scale_info_map = hw_layer_info.dest_scale_info_map;
2307 LayerStack *stack = hw_layer_info.stack;
2308 uint32_t hw_layer_count = UINT32(hw_layer_info.hw_layers.size());
2309 std::vector<LayerRect> &left_frame_roi = hw_layer_info.left_frame_roi;
2310 std::vector<LayerRect> &right_frame_roi = hw_layer_info.right_frame_roi;
2311 DLOGI("HWLayers Stack: layer_count: %d, app_layer_count: %d, gpu_target_index: %d",
2312 hw_layer_count, hw_layer_info.app_layer_count, hw_layer_info.gpu_target_index);
2313 DLOGI("LayerStackFlags = 0x%" PRIu32 ", blend_cs = {primaries = %d, transfer = %d}",
2314 UINT32(stack->flags.flags), UINT32(stack->blend_cs.primaries),
2315 UINT32(stack->blend_cs.transfer));
2316 for (uint32_t i = 0; i < left_frame_roi.size(); i++) {
2317 DLOGI("left_frame_roi: x = %d, y = %d, w = %d, h = %d", INT(left_frame_roi[i].left),
2318 INT(left_frame_roi[i].top), INT(left_frame_roi[i].right), INT(left_frame_roi[i].bottom));
2319 }
2320 for (uint32_t i = 0; i < right_frame_roi.size(); i++) {
2321 DLOGI("right_frame_roi: x = %d, y = %d, w = %d h = %d", INT(right_frame_roi[i].left),
2322 INT(right_frame_roi[i].top), INT(right_frame_roi[i].right),
2323 INT(right_frame_roi[i].bottom));
2324 }
2325
2326 for (uint32_t i = 0; i < dest_scale_info_map.size(); i++) {
2327 HWDestScaleInfo *dest_scalar_data = dest_scale_info_map[i];
2328 if (dest_scalar_data->scale_data.enable.scale) {
2329 HWScaleData &scale_data = dest_scalar_data->scale_data;
2330 DLOGI("Dest scalar index %d Mixer WxH %dx%d", i,
2331 dest_scalar_data->mixer_width, dest_scalar_data->mixer_height);
2332 DLOGI("Panel ROI [%d, %d, %d, %d]", INT(dest_scalar_data->panel_roi.left),
2333 INT(dest_scalar_data->panel_roi.top), INT(dest_scalar_data->panel_roi.right),
2334 INT(dest_scalar_data->panel_roi.bottom));
2335 DLOGI("Dest scalar Dst WxH %dx%d", scale_data.dst_width, scale_data.dst_height);
2336 }
2337 }
2338
2339 for (uint32_t i = 0; i < hw_layer_count; i++) {
2340 HWLayerConfig &hw_config = hw_layers->config[i];
2341 HWRotatorSession &hw_rotator_session = hw_config.hw_rotator_session;
2342 HWSessionConfig &hw_session_config = hw_rotator_session.hw_session_config;
2343 DLOGI("========================= HW_layer: %d =========================", i);
2344 DLOGI("src_width = %d, src_height = %d, src_format = %d, src_LayerBufferFlags = 0x%" PRIx32 ,
2345 hw_layer_info.hw_layers[i].input_buffer.width,
2346 hw_layer_info.hw_layers[i].input_buffer.height,
2347 hw_layer_info.hw_layers[i].input_buffer.format,
2348 hw_layer_info.hw_layers[i].input_buffer.flags.flags);
2349 if (hw_config.use_inline_rot) {
2350 DLOGI("rotator = %s, rotation = %d, flip_horizontal = %s, flip_vertical = %s",
2351 "inline rotator", INT(hw_session_config.transform.rotation),
2352 hw_session_config.transform.flip_horizontal ? "true" : "false",
2353 hw_session_config.transform.flip_vertical ? "true" : "false");
2354 } else if (hw_rotator_session.mode == kRotatorOffline) {
2355 DLOGI("rotator = %s, rotation = %d, flip_horizontal = %s, flip_vertical = %s",
2356 "offline rotator", INT(hw_session_config.transform.rotation),
2357 hw_session_config.transform.flip_horizontal ? "true" : "false",
2358 hw_session_config.transform.flip_vertical ? "true" : "false");
2359 }
2360 if (hw_config.use_solidfill_stage) {
2361 HWSolidfillStage &hw_solidfill_stage = hw_config.hw_solidfill_stage;
2362 LayerSolidFill &solid_fill_info = hw_solidfill_stage.solid_fill_info;
2363 DLOGI("HW Solid fill info: z_order = %d, color = %d", hw_solidfill_stage.z_order,
2364 hw_solidfill_stage.color);
2365 DLOGI("bit_depth = %d, red = %d, green = %d, blue = %d, alpha = %d",
2366 solid_fill_info.bit_depth, solid_fill_info.red, solid_fill_info.green,
2367 solid_fill_info.blue, solid_fill_info.alpha);
2368 }
2369 for (uint32_t count = 0; count < 2; count++) {
2370 HWPipeInfo &left_pipe = hw_config.left_pipe;
2371 HWPipeInfo &right_pipe = hw_config.right_pipe;
2372 HWPipeInfo &pipe_info = (count == 0) ? left_pipe : right_pipe;
2373 HWScaleData &scale_data = pipe_info.scale_data;
2374 if (!pipe_info.valid) {
2375 continue;
2376 }
2377 std::string pipe = (count == 0) ? "left_pipe" : "right_pipe";
2378 DLOGI("pipe = %s, pipe_id = %d, z_order = %d, flags = 0x%X",
2379 pipe.c_str(), pipe_info.pipe_id, pipe_info.z_order, pipe_info.flags);
2380 DLOGI("src_rect: x = %d, y = %d, w = %d, h = %d", INT(pipe_info.src_roi.left),
2381 INT(pipe_info.src_roi.top), INT(pipe_info.src_roi.right - pipe_info.src_roi.left),
2382 INT(pipe_info.src_roi.bottom - pipe_info.src_roi.top));
2383 DLOGI("dst_rect: x = %d, y = %d, w = %d, h = %d", INT(pipe_info.dst_roi.left),
2384 INT(pipe_info.dst_roi.top), INT(pipe_info.dst_roi.right - pipe_info.dst_roi.left),
2385 INT(pipe_info.dst_roi.bottom - pipe_info.dst_roi.top));
2386 DLOGI("excl_rect: left = %d, top = %d, right = %d, bottom = %d",
2387 INT(pipe_info.excl_rect.left), INT(pipe_info.excl_rect.top),
2388 INT(pipe_info.excl_rect.right), INT(pipe_info.excl_rect.bottom));
2389 if (scale_data.enable.scale) {
2390 DLOGI("HWScaleData enable flags: scale = %s, direction_detection = %s, detail_enhance = %s,"
2391 " dyn_exp_disable = %s", scale_data.enable.scale ? "true" : "false",
2392 scale_data.enable.direction_detection ? "true" : "false",
2393 scale_data.enable.detail_enhance ? "true" : "false",
2394 scale_data.enable.dyn_exp_disable ? "true" : "false");
2395 DLOGI("lut_flags: lut_swap = 0x%X, lut_dir_wr = 0x%X, lut_y_cir_wr = 0x%X, "
2396 "lut_uv_cir_wr = 0x%X, lut_y_sep_wr = 0x%X, lut_uv_sep_wr = 0x%X",
2397 scale_data.lut_flag.lut_swap, scale_data.lut_flag.lut_dir_wr,
2398 scale_data.lut_flag.lut_y_cir_wr, scale_data.lut_flag.lut_uv_cir_wr,
2399 scale_data.lut_flag.lut_y_sep_wr, scale_data.lut_flag.lut_uv_sep_wr);
2400 DLOGI("dir_lut_idx = %d, y_rgb_cir_lut_idx = %d, uv_cir_lut_idx = %d, "
2401 "y_rgb_sep_lut_idx = %d, uv_sep_lut_idx = %d", scale_data.dir_lut_idx,
2402 scale_data.y_rgb_cir_lut_idx, scale_data.uv_cir_lut_idx,
2403 scale_data.y_rgb_sep_lut_idx, scale_data.uv_sep_lut_idx);
2404 }
2405 }
2406 }
2407 }
2408
SetBlendSpace(const PrimariesTransfer & blend_space)2409 DisplayError HWDeviceDRM::SetBlendSpace(const PrimariesTransfer &blend_space) {
2410 blend_space_ = blend_space;
2411 return kErrorNone;
2412 }
2413
2414 } // namespace sdm
2415