1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613
|
// SPDX-License-Identifier: GPL-2.0-only
/*
* Copyright (c) 2022-2023 Qualcomm Innovation Center, Inc. All rights reserved.
* Copyright (c) 2015-2018, The Linux Foundation. All rights reserved.
*/
#include "dpu_hwio.h"
#include "dpu_hw_catalog.h"
#include "dpu_hw_intf.h"
#include "dpu_kms.h"
#include "dpu_trace.h"
#include <linux/iopoll.h>
#include <drm/drm_managed.h>
#define INTF_TIMING_ENGINE_EN 0x000
#define INTF_CONFIG 0x004
#define INTF_HSYNC_CTL 0x008
#define INTF_VSYNC_PERIOD_F0 0x00C
#define INTF_VSYNC_PERIOD_F1 0x010
#define INTF_VSYNC_PULSE_WIDTH_F0 0x014
#define INTF_VSYNC_PULSE_WIDTH_F1 0x018
#define INTF_DISPLAY_V_START_F0 0x01C
#define INTF_DISPLAY_V_START_F1 0x020
#define INTF_DISPLAY_V_END_F0 0x024
#define INTF_DISPLAY_V_END_F1 0x028
#define INTF_ACTIVE_V_START_F0 0x02C
#define INTF_ACTIVE_V_START_F1 0x030
#define INTF_ACTIVE_V_END_F0 0x034
#define INTF_ACTIVE_V_END_F1 0x038
#define INTF_DISPLAY_HCTL 0x03C
#define INTF_ACTIVE_HCTL 0x040
#define INTF_BORDER_COLOR 0x044
#define INTF_UNDERFLOW_COLOR 0x048
#define INTF_HSYNC_SKEW 0x04C
#define INTF_POLARITY_CTL 0x050
#define INTF_TEST_CTL 0x054
#define INTF_TP_COLOR0 0x058
#define INTF_TP_COLOR1 0x05C
#define INTF_CONFIG2 0x060
#define INTF_DISPLAY_DATA_HCTL 0x064
#define INTF_ACTIVE_DATA_HCTL 0x068
#define INTF_DSI_CMD_MODE_TRIGGER_EN 0x084
#define INTF_PANEL_FORMAT 0x090
#define INTF_FRAME_LINE_COUNT_EN 0x0A8
#define INTF_FRAME_COUNT 0x0AC
#define INTF_LINE_COUNT 0x0B0
#define INTF_DEFLICKER_CONFIG 0x0F0
#define INTF_DEFLICKER_STRNG_COEFF 0x0F4
#define INTF_DEFLICKER_WEAK_COEFF 0x0F8
#define INTF_TPG_ENABLE 0x100
#define INTF_TPG_MAIN_CONTROL 0x104
#define INTF_TPG_VIDEO_CONFIG 0x108
#define INTF_TPG_COMPONENT_LIMITS 0x10C
#define INTF_TPG_RECTANGLE 0x110
#define INTF_TPG_INITIAL_VALUE 0x114
#define INTF_TPG_BLK_WHITE_PATTERN_FRAMES 0x118
#define INTF_TPG_RGB_MAPPING 0x11C
#define INTF_PROG_FETCH_START 0x170
#define INTF_PROG_ROT_START 0x174
#define INTF_MISR_CTRL 0x180
#define INTF_MISR_SIGNATURE 0x184
#define INTF_MUX 0x25C
#define INTF_STATUS 0x26C
#define INTF_AVR_CONTROL 0x270
#define INTF_AVR_MODE 0x274
#define INTF_AVR_TRIGGER 0x278
#define INTF_AVR_VTOTAL 0x27C
#define INTF_TEAR_MDP_VSYNC_SEL 0x280
#define INTF_TEAR_TEAR_CHECK_EN 0x284
#define INTF_TEAR_SYNC_CONFIG_VSYNC 0x288
#define INTF_TEAR_SYNC_CONFIG_HEIGHT 0x28C
#define INTF_TEAR_SYNC_WRCOUNT 0x290
#define INTF_TEAR_VSYNC_INIT_VAL 0x294
#define INTF_TEAR_INT_COUNT_VAL 0x298
#define INTF_TEAR_SYNC_THRESH 0x29C
#define INTF_TEAR_START_POS 0x2A0
#define INTF_TEAR_RD_PTR_IRQ 0x2A4
#define INTF_TEAR_WR_PTR_IRQ 0x2A8
#define INTF_TEAR_OUT_LINE_COUNT 0x2AC
#define INTF_TEAR_LINE_COUNT 0x2B0
#define INTF_TEAR_AUTOREFRESH_CONFIG 0x2B4
#define INTF_CFG_ACTIVE_H_EN BIT(29)
#define INTF_CFG_ACTIVE_V_EN BIT(30)
#define INTF_CFG2_DATABUS_WIDEN BIT(0)
#define INTF_CFG2_DATA_HCTL_EN BIT(4)
#define INTF_CFG2_DCE_DATA_COMPRESS BIT(12)
static void dpu_hw_intf_setup_timing_engine(struct dpu_hw_intf *intf,
const struct dpu_hw_intf_timing_params *p,
const struct msm_format *fmt,
const struct dpu_mdss_version *mdss_ver)
{
struct dpu_hw_blk_reg_map *c = &intf->hw;
u32 hsync_period, vsync_period;
u32 display_v_start, display_v_end;
u32 hsync_start_x, hsync_end_x;
u32 hsync_data_start_x, hsync_data_end_x;
u32 active_h_start, active_h_end;
u32 active_v_start, active_v_end;
u32 active_hctl, display_hctl, hsync_ctl;
u32 polarity_ctl, den_polarity;
u32 panel_format;
u32 intf_cfg, intf_cfg2 = 0;
u32 display_data_hctl = 0, active_data_hctl = 0;
u32 data_width;
bool dp_intf = false;
/* read interface_cfg */
intf_cfg = DPU_REG_READ(c, INTF_CONFIG);
if (intf->cap->type == INTF_DP)
dp_intf = true;
hsync_period = p->hsync_pulse_width + p->h_back_porch + p->width +
p->h_front_porch;
vsync_period = p->vsync_pulse_width + p->v_back_porch + p->height +
p->v_front_porch;
display_v_start = ((p->vsync_pulse_width + p->v_back_porch) *
hsync_period) + p->hsync_skew;
display_v_end = ((vsync_period - p->v_front_porch) * hsync_period) +
p->hsync_skew - 1;
hsync_start_x = p->h_back_porch + p->hsync_pulse_width;
hsync_end_x = hsync_period - p->h_front_porch - 1;
if (p->width != p->xres) { /* border fill added */
active_h_start = hsync_start_x;
active_h_end = active_h_start + p->xres - 1;
} else {
active_h_start = 0;
active_h_end = 0;
}
if (p->height != p->yres) { /* border fill added */
active_v_start = display_v_start;
active_v_end = active_v_start + (p->yres * hsync_period) - 1;
} else {
active_v_start = 0;
active_v_end = 0;
}
if (active_h_end) {
active_hctl = (active_h_end << 16) | active_h_start;
intf_cfg |= INTF_CFG_ACTIVE_H_EN;
} else {
active_hctl = 0;
}
if (active_v_end)
intf_cfg |= INTF_CFG_ACTIVE_V_EN;
hsync_ctl = (hsync_period << 16) | p->hsync_pulse_width;
display_hctl = (hsync_end_x << 16) | hsync_start_x;
if (p->wide_bus_en)
intf_cfg2 |= INTF_CFG2_DATABUS_WIDEN;
data_width = p->width;
/*
* If widebus is enabled, data is valid for only half the active window
* since the data rate is doubled in this mode. But for the compression
* mode in DP case, the p->width is already adjusted in
* drm_mode_to_intf_timing_params()
*/
if (p->wide_bus_en && !dp_intf)
data_width = p->width >> 1;
/* TODO: handle DSC+DP case, we only handle DSC+DSI case so far */
if (p->compression_en && !dp_intf &&
mdss_ver->core_major_ver >= 7)
intf_cfg2 |= INTF_CFG2_DCE_DATA_COMPRESS;
hsync_data_start_x = hsync_start_x;
hsync_data_end_x = hsync_start_x + data_width - 1;
display_data_hctl = (hsync_data_end_x << 16) | hsync_data_start_x;
if (dp_intf) {
/* DP timing adjustment */
display_v_start += p->hsync_pulse_width + p->h_back_porch;
display_v_end -= p->h_front_porch;
active_h_start = hsync_start_x;
active_h_end = active_h_start + p->xres - 1;
active_v_start = display_v_start;
active_v_end = active_v_start + (p->yres * hsync_period) - 1;
active_hctl = (active_h_end << 16) | active_h_start;
display_hctl = active_hctl;
intf_cfg |= INTF_CFG_ACTIVE_H_EN | INTF_CFG_ACTIVE_V_EN;
}
den_polarity = 0;
polarity_ctl = (den_polarity << 2) | /* DEN Polarity */
(p->vsync_polarity << 1) | /* VSYNC Polarity */
(p->hsync_polarity << 0); /* HSYNC Polarity */
if (!MSM_FORMAT_IS_YUV(fmt))
panel_format = (fmt->bpc_g_y |
(fmt->bpc_b_cb << 2) |
(fmt->bpc_r_cr << 4) |
(0x21 << 8));
else
/* Interface treats all the pixel data in RGB888 format */
panel_format = (BPC8 |
(BPC8 << 2) |
(BPC8 << 4) |
(0x21 << 8));
DPU_REG_WRITE(c, INTF_HSYNC_CTL, hsync_ctl);
DPU_REG_WRITE(c, INTF_VSYNC_PERIOD_F0, vsync_period * hsync_period);
DPU_REG_WRITE(c, INTF_VSYNC_PULSE_WIDTH_F0,
p->vsync_pulse_width * hsync_period);
DPU_REG_WRITE(c, INTF_DISPLAY_HCTL, display_hctl);
DPU_REG_WRITE(c, INTF_DISPLAY_V_START_F0, display_v_start);
DPU_REG_WRITE(c, INTF_DISPLAY_V_END_F0, display_v_end);
DPU_REG_WRITE(c, INTF_ACTIVE_HCTL, active_hctl);
DPU_REG_WRITE(c, INTF_ACTIVE_V_START_F0, active_v_start);
DPU_REG_WRITE(c, INTF_ACTIVE_V_END_F0, active_v_end);
DPU_REG_WRITE(c, INTF_BORDER_COLOR, p->border_clr);
DPU_REG_WRITE(c, INTF_UNDERFLOW_COLOR, p->underflow_clr);
DPU_REG_WRITE(c, INTF_HSYNC_SKEW, p->hsync_skew);
DPU_REG_WRITE(c, INTF_POLARITY_CTL, polarity_ctl);
DPU_REG_WRITE(c, INTF_FRAME_LINE_COUNT_EN, 0x3);
DPU_REG_WRITE(c, INTF_CONFIG, intf_cfg);
DPU_REG_WRITE(c, INTF_PANEL_FORMAT, panel_format);
if (intf->cap->features & BIT(DPU_DATA_HCTL_EN)) {
/*
* DATA_HCTL_EN controls data timing which can be different from
* video timing. It is recommended to enable it for all cases, except
* if compression is enabled in 1 pixel per clock mode
*/
if (!(p->compression_en && !p->wide_bus_en))
intf_cfg2 |= INTF_CFG2_DATA_HCTL_EN;
DPU_REG_WRITE(c, INTF_CONFIG2, intf_cfg2);
DPU_REG_WRITE(c, INTF_DISPLAY_DATA_HCTL, display_data_hctl);
DPU_REG_WRITE(c, INTF_ACTIVE_DATA_HCTL, active_data_hctl);
}
}
static void dpu_hw_intf_enable_timing_engine(
struct dpu_hw_intf *intf,
u8 enable)
{
struct dpu_hw_blk_reg_map *c = &intf->hw;
/* Note: Display interface select is handled in top block hw layer */
DPU_REG_WRITE(c, INTF_TIMING_ENGINE_EN, enable != 0);
}
static void dpu_hw_intf_setup_prg_fetch(
struct dpu_hw_intf *intf,
const struct dpu_hw_intf_prog_fetch *fetch)
{
struct dpu_hw_blk_reg_map *c = &intf->hw;
int fetch_enable;
/*
* Fetch should always be outside the active lines. If the fetching
* is programmed within active region, hardware behavior is unknown.
*/
fetch_enable = DPU_REG_READ(c, INTF_CONFIG);
if (fetch->enable) {
fetch_enable |= BIT(31);
DPU_REG_WRITE(c, INTF_PROG_FETCH_START,
fetch->fetch_start);
} else {
fetch_enable &= ~BIT(31);
}
DPU_REG_WRITE(c, INTF_CONFIG, fetch_enable);
}
static void dpu_hw_intf_bind_pingpong_blk(
struct dpu_hw_intf *intf,
const enum dpu_pingpong pp)
{
struct dpu_hw_blk_reg_map *c = &intf->hw;
u32 mux_cfg;
mux_cfg = DPU_REG_READ(c, INTF_MUX);
mux_cfg &= ~0xf;
if (pp)
mux_cfg |= (pp - PINGPONG_0) & 0x7;
else
mux_cfg |= 0xf;
DPU_REG_WRITE(c, INTF_MUX, mux_cfg);
}
static void dpu_hw_intf_get_status(
struct dpu_hw_intf *intf,
struct dpu_hw_intf_status *s)
{
struct dpu_hw_blk_reg_map *c = &intf->hw;
unsigned long cap = intf->cap->features;
if (cap & BIT(DPU_INTF_STATUS_SUPPORTED))
s->is_en = DPU_REG_READ(c, INTF_STATUS) & BIT(0);
else
s->is_en = DPU_REG_READ(c, INTF_TIMING_ENGINE_EN);
s->is_prog_fetch_en = !!(DPU_REG_READ(c, INTF_CONFIG) & BIT(31));
if (s->is_en) {
s->frame_count = DPU_REG_READ(c, INTF_FRAME_COUNT);
s->line_count = DPU_REG_READ(c, INTF_LINE_COUNT);
} else {
s->line_count = 0;
s->frame_count = 0;
}
}
static u32 dpu_hw_intf_get_line_count(struct dpu_hw_intf *intf)
{
struct dpu_hw_blk_reg_map *c;
if (!intf)
return 0;
c = &intf->hw;
return DPU_REG_READ(c, INTF_LINE_COUNT);
}
static void dpu_hw_intf_setup_misr(struct dpu_hw_intf *intf)
{
dpu_hw_setup_misr(&intf->hw, INTF_MISR_CTRL, 0x1);
}
static int dpu_hw_intf_collect_misr(struct dpu_hw_intf *intf, u32 *misr_value)
{
return dpu_hw_collect_misr(&intf->hw, INTF_MISR_CTRL, INTF_MISR_SIGNATURE, misr_value);
}
static int dpu_hw_intf_enable_te(struct dpu_hw_intf *intf,
struct dpu_hw_tear_check *te)
{
struct dpu_hw_blk_reg_map *c;
int cfg;
if (!intf)
return -EINVAL;
c = &intf->hw;
cfg = BIT(19); /* VSYNC_COUNTER_EN */
if (te->hw_vsync_mode)
cfg |= BIT(20);
cfg |= te->vsync_count;
DPU_REG_WRITE(c, INTF_TEAR_SYNC_CONFIG_VSYNC, cfg);
DPU_REG_WRITE(c, INTF_TEAR_SYNC_CONFIG_HEIGHT, te->sync_cfg_height);
DPU_REG_WRITE(c, INTF_TEAR_VSYNC_INIT_VAL, te->vsync_init_val);
DPU_REG_WRITE(c, INTF_TEAR_RD_PTR_IRQ, te->rd_ptr_irq);
DPU_REG_WRITE(c, INTF_TEAR_START_POS, te->start_pos);
DPU_REG_WRITE(c, INTF_TEAR_SYNC_THRESH,
((te->sync_threshold_continue << 16) |
te->sync_threshold_start));
DPU_REG_WRITE(c, INTF_TEAR_SYNC_WRCOUNT,
(te->start_pos + te->sync_threshold_start + 1));
DPU_REG_WRITE(c, INTF_TEAR_TEAR_CHECK_EN, 1);
return 0;
}
static void dpu_hw_intf_setup_autorefresh_config(struct dpu_hw_intf *intf,
u32 frame_count, bool enable)
{
struct dpu_hw_blk_reg_map *c;
u32 refresh_cfg;
c = &intf->hw;
refresh_cfg = DPU_REG_READ(c, INTF_TEAR_AUTOREFRESH_CONFIG);
if (enable)
refresh_cfg = BIT(31) | frame_count;
else
refresh_cfg &= ~BIT(31);
DPU_REG_WRITE(c, INTF_TEAR_AUTOREFRESH_CONFIG, refresh_cfg);
}
/*
* dpu_hw_intf_get_autorefresh_config - Get autorefresh config from HW
* @intf: DPU intf structure
* @frame_count: Used to return the current frame count from hw
*
* Returns: True if autorefresh enabled, false if disabled.
*/
static bool dpu_hw_intf_get_autorefresh_config(struct dpu_hw_intf *intf,
u32 *frame_count)
{
u32 val = DPU_REG_READ(&intf->hw, INTF_TEAR_AUTOREFRESH_CONFIG);
if (frame_count != NULL)
*frame_count = val & 0xffff;
return !!((val & BIT(31)) >> 31);
}
static int dpu_hw_intf_disable_te(struct dpu_hw_intf *intf)
{
struct dpu_hw_blk_reg_map *c;
if (!intf)
return -EINVAL;
c = &intf->hw;
DPU_REG_WRITE(c, INTF_TEAR_TEAR_CHECK_EN, 0);
return 0;
}
static int dpu_hw_intf_connect_external_te(struct dpu_hw_intf *intf,
bool enable_external_te)
{
struct dpu_hw_blk_reg_map *c = &intf->hw;
u32 cfg;
int orig;
if (!intf)
return -EINVAL;
c = &intf->hw;
cfg = DPU_REG_READ(c, INTF_TEAR_SYNC_CONFIG_VSYNC);
orig = (bool)(cfg & BIT(20));
if (enable_external_te)
cfg |= BIT(20);
else
cfg &= ~BIT(20);
DPU_REG_WRITE(c, INTF_TEAR_SYNC_CONFIG_VSYNC, cfg);
trace_dpu_intf_connect_ext_te(intf->idx - INTF_0, cfg);
return orig;
}
static int dpu_hw_intf_get_vsync_info(struct dpu_hw_intf *intf,
struct dpu_hw_pp_vsync_info *info)
{
struct dpu_hw_blk_reg_map *c = &intf->hw;
u32 val;
if (!intf || !info)
return -EINVAL;
c = &intf->hw;
val = DPU_REG_READ(c, INTF_TEAR_VSYNC_INIT_VAL);
info->rd_ptr_init_val = val & 0xffff;
val = DPU_REG_READ(c, INTF_TEAR_INT_COUNT_VAL);
info->rd_ptr_frame_count = (val & 0xffff0000) >> 16;
info->rd_ptr_line_count = val & 0xffff;
val = DPU_REG_READ(c, INTF_TEAR_LINE_COUNT);
info->wr_ptr_line_count = val & 0xffff;
val = DPU_REG_READ(c, INTF_FRAME_COUNT);
info->intf_frame_count = val;
return 0;
}
static void dpu_hw_intf_vsync_sel(struct dpu_hw_intf *intf,
enum dpu_vsync_source vsync_source)
{
struct dpu_hw_blk_reg_map *c;
if (!intf)
return;
c = &intf->hw;
DPU_REG_WRITE(c, INTF_TEAR_MDP_VSYNC_SEL, (vsync_source & 0xf));
}
static void dpu_hw_intf_disable_autorefresh(struct dpu_hw_intf *intf,
uint32_t encoder_id, u16 vdisplay)
{
struct dpu_hw_pp_vsync_info info;
int trial = 0;
/* If autorefresh is already disabled, we have nothing to do */
if (!dpu_hw_intf_get_autorefresh_config(intf, NULL))
return;
/*
* If autorefresh is enabled, disable it and make sure it is safe to
* proceed with current frame commit/push. Sequence followed is,
* 1. Disable TE
* 2. Disable autorefresh config
* 4. Poll for frame transfer ongoing to be false
* 5. Enable TE back
*/
dpu_hw_intf_connect_external_te(intf, false);
dpu_hw_intf_setup_autorefresh_config(intf, 0, false);
do {
udelay(DPU_ENC_MAX_POLL_TIMEOUT_US);
if ((trial * DPU_ENC_MAX_POLL_TIMEOUT_US)
> (KICKOFF_TIMEOUT_MS * USEC_PER_MSEC)) {
DPU_ERROR("enc%d intf%d disable autorefresh failed\n",
encoder_id, intf->idx - INTF_0);
break;
}
trial++;
dpu_hw_intf_get_vsync_info(intf, &info);
} while (info.wr_ptr_line_count > 0 &&
info.wr_ptr_line_count < vdisplay);
dpu_hw_intf_connect_external_te(intf, true);
DPU_DEBUG("enc%d intf%d disabled autorefresh\n",
encoder_id, intf->idx - INTF_0);
}
static void dpu_hw_intf_program_intf_cmd_cfg(struct dpu_hw_intf *intf,
struct dpu_hw_intf_cmd_mode_cfg *cmd_mode_cfg)
{
u32 intf_cfg2 = DPU_REG_READ(&intf->hw, INTF_CONFIG2);
if (cmd_mode_cfg->data_compress)
intf_cfg2 |= INTF_CFG2_DCE_DATA_COMPRESS;
if (cmd_mode_cfg->wide_bus_en)
intf_cfg2 |= INTF_CFG2_DATABUS_WIDEN;
DPU_REG_WRITE(&intf->hw, INTF_CONFIG2, intf_cfg2);
}
/**
* dpu_hw_intf_init() - Initializes the INTF driver for the passed
* interface catalog entry.
* @dev: Corresponding device for devres management
* @cfg: interface catalog entry for which driver object is required
* @addr: mapped register io address of MDP
* @mdss_rev: dpu core's major and minor versions
*/
struct dpu_hw_intf *dpu_hw_intf_init(struct drm_device *dev,
const struct dpu_intf_cfg *cfg,
void __iomem *addr,
const struct dpu_mdss_version *mdss_rev)
{
struct dpu_hw_intf *c;
if (cfg->type == INTF_NONE) {
DPU_DEBUG("Skip intf %d with type NONE\n", cfg->id - INTF_0);
return NULL;
}
c = drmm_kzalloc(dev, sizeof(*c), GFP_KERNEL);
if (!c)
return ERR_PTR(-ENOMEM);
c->hw.blk_addr = addr + cfg->base;
c->hw.log_mask = DPU_DBG_MASK_INTF;
/*
* Assign ops
*/
c->idx = cfg->id;
c->cap = cfg;
c->ops.setup_timing_gen = dpu_hw_intf_setup_timing_engine;
c->ops.setup_prg_fetch = dpu_hw_intf_setup_prg_fetch;
c->ops.get_status = dpu_hw_intf_get_status;
c->ops.enable_timing = dpu_hw_intf_enable_timing_engine;
c->ops.get_line_count = dpu_hw_intf_get_line_count;
c->ops.setup_misr = dpu_hw_intf_setup_misr;
c->ops.collect_misr = dpu_hw_intf_collect_misr;
if (cfg->features & BIT(DPU_INTF_INPUT_CTRL))
c->ops.bind_pingpong_blk = dpu_hw_intf_bind_pingpong_blk;
/* INTF TE is only for DSI interfaces */
if (mdss_rev->core_major_ver >= 5 && cfg->type == INTF_DSI) {
WARN_ON(!cfg->intr_tear_rd_ptr);
c->ops.enable_tearcheck = dpu_hw_intf_enable_te;
c->ops.disable_tearcheck = dpu_hw_intf_disable_te;
c->ops.connect_external_te = dpu_hw_intf_connect_external_te;
c->ops.vsync_sel = dpu_hw_intf_vsync_sel;
c->ops.disable_autorefresh = dpu_hw_intf_disable_autorefresh;
}
/* Technically, INTF_CONFIG2 is present for DPU 5.0+, but
* we can configure it for DPU 7.0+ since the wide bus and DSC flags
* would not be set for DPU < 7.0 anyways
*/
if (mdss_rev->core_major_ver >= 7)
c->ops.program_intf_cmd_cfg = dpu_hw_intf_program_intf_cmd_cfg;
return c;
}
|