coreboot
coreboot is an Open Source project aimed at replacing the proprietary BIOS found in most computers.
dramc_pi_calibration_api.c
Go to the documentation of this file.
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 
3 #include <assert.h>
4 #include <console/console.h>
5 #include <delay.h>
6 #include <device/mmio.h>
7 #include <soc/emi.h>
8 #include <soc/dramc_register.h>
9 #include <soc/dramc_param.h>
10 #include <soc/dramc_pi_api.h>
11 #include <soc/spm.h>
12 #include <timer.h>
13 
14 enum {
21 };
22 
23 enum {
28 };
29 
30 #define WRITE_LEVELING_MOVD_DQS 1
31 #define TEST2_1_CAL 0x55000000
32 #define TEST2_2_CAL 0xaa000400
33 
34 enum CAL_TYPE {
39 };
40 
41 enum RX_TYPE {
42  RX_DQ = 0,
45 };
46 
54 };
55 
60 };
61 
62 struct tx_dly_tune {
68 };
69 
70 struct per_byte_dly {
74 };
75 
77  [CHANNEL_A] = {1, 4, 3, 2, 0, 5},
78  [CHANNEL_B] = {0, 3, 2, 4, 1, 5},
79 };
80 
81 static void dramc_auto_refresh_switch(u8 chn, bool option)
82 {
83  SET32_BITFIELDS(&ch[chn].ao.refctrl0, REFCTRL0_REFDIS, option ? 0 : 1);
84 
85  if (!option) {
86  /*
87  * Because HW will actually disable autorefresh after
88  * refresh_queue empty, we need to wait until queue empty.
89  */
90  udelay(READ32_BITFIELD(&ch[chn].nao.misc_statusa,
91  MISC_STATUSA_REFRESH_QUEUE_CNT) * 4);
92  }
93 }
94 
95 static u16 dramc_mode_reg_read(u8 chn, u8 mr_idx)
96 {
97  u16 value;
98  SET32_BITFIELDS(&ch[chn].ao.mrs, MRS_MRSMA, mr_idx);
99  SET32_BITFIELDS(&ch[chn].ao.spcmd, SPCMD_MRREN, 1);
100 
101  /* Wait until MRW command fired */
102  while (READ32_BITFIELD(&ch[chn].nao.spcmdresp, SPCMDRESP_MRR_RESPONSE)
103  == 0)
104  udelay(1);
105 
106  value = READ32_BITFIELD(&ch[chn].nao.mrr_status, MRR_STATUS_MRR_REG);
107 
108  SET32_BITFIELDS(&ch[chn].ao.spcmd, SPCMD_MRREN, 0);
109  dramc_dbg("Read MR%d =%#x\n", mr_idx, value);
110 
111  return value;
112 }
113 
114 void dramc_mode_reg_write(u8 chn, u8 mr_idx, u8 value)
115 {
116  u32 ckectrl_bak = read32(&ch[chn].ao.ckectrl);
117 
119  SET32_BITFIELDS(&ch[chn].ao.mrs, MRS_MRSMA, mr_idx);
120  SET32_BITFIELDS(&ch[chn].ao.mrs, MRS_MRSOP, value);
121  SET32_BITFIELDS(&ch[chn].ao.spcmd, SPCMD_MRWEN, 1);
122 
123  /* Wait MRW command fired */
124  while (READ32_BITFIELD(&ch[chn].nao.spcmdresp, SPCMDRESP_MRW_RESPONSE)
125  == 0)
126  udelay(1);
127 
128  SET32_BITFIELDS(&ch[chn].ao.spcmd, SPCMD_MRWEN, 0);
129  write32(&ch[chn].ao.ckectrl, ckectrl_bak);
130  dramc_dbg("Write MR%d =0x%x\n", mr_idx, value);
131 }
132 
133 static u8 dramc_mode_reg_read_by_rank(u8 chn, u8 rank, u8 mr_idx)
134 {
135  u8 value;
136  u32 rk_bak = READ32_BITFIELD(&ch[chn].ao.mrs, MRS_MRRRK);
137 
138  SET32_BITFIELDS(&ch[chn].ao.mrs, MRS_MRRRK, rank);
139  value = dramc_mode_reg_read(chn, mr_idx);
140  SET32_BITFIELDS(&ch[chn].ao.mrs, MRS_MRRRK, rk_bak);
141 
142  dramc_dbg("Mode reg read rank%d MR%d = %#x\n", rank, mr_idx, value);
143  return value;
144 }
145 
146 static void dramc_mode_reg_write_by_rank(u8 chn, u8 rank,
147  u8 mr_idx, u8 value)
148 {
149  u32 mrs_bak = READ32_BITFIELD(&ch[chn].ao.mrs, MRS_MRSRK);
150  dramc_dbg("Mode reg write rank%d MR%d = 0x%x\n", rank, mr_idx, value);
151 
152  SET32_BITFIELDS(&ch[chn].ao.mrs, MRS_MRSRK, rank);
153  dramc_mode_reg_write(chn, mr_idx, value);
154  SET32_BITFIELDS(&ch[chn].ao.mrs, MRS_MRSRK, mrs_bak);
155 }
156 
157 static void move_dramc_delay(u32 *reg_0, u32 *reg_1, u8 shift, s8 shift_coarse_tune)
158 {
159  s32 sum;
160  u32 tmp_0p5t, tmp_2t;
161 
162  tmp_0p5t = ((read32(reg_0) >> shift) & DQ_DIV_MASK) &
163  ~(1 << DQ_DIV_SHIFT);
164  tmp_2t = (read32(reg_1) >> shift) & DQ_DIV_MASK;
165 
166  sum = (tmp_2t << DQ_DIV_SHIFT) + tmp_0p5t + shift_coarse_tune;
167 
168  if (sum < 0) {
169  tmp_0p5t = 0;
170  tmp_2t = 0;
171  } else {
172  tmp_2t = sum >> DQ_DIV_SHIFT;
173  tmp_0p5t = sum - (tmp_2t << DQ_DIV_SHIFT);
174  }
175 
176  clrsetbits32(reg_0, DQ_DIV_MASK << shift, tmp_0p5t << shift);
177  clrsetbits32(reg_1, DQ_DIV_MASK << shift, tmp_2t << shift);
178 }
179 
180 static void move_dramc_tx_dqs(u8 chn, u8 byte, s8 shift_coarse_tune)
181 {
182  move_dramc_delay(&ch[chn].ao.shu[0].selph_dqs1,
183  &ch[chn].ao.shu[0].selph_dqs0, byte * 4, shift_coarse_tune);
184 }
185 
186 static void move_dramc_tx_dqs_oen(u8 chn, u8 byte,
187  s8 shift_coarse_tune)
188 {
189  move_dramc_delay(&ch[chn].ao.shu[0].selph_dqs1,
190  &ch[chn].ao.shu[0].selph_dqs0, byte * 4 + OEN_SHIFT, shift_coarse_tune);
191 }
192 
193 static void move_dramc_tx_dq(u8 chn, u8 rank, u8 byte, s8 shift_coarse_tune)
194 {
195  /* DQM0 */
196  move_dramc_delay(&ch[chn].ao.shu[0].rk[rank].selph_dq[3],
197  &ch[chn].ao.shu[0].rk[rank].selph_dq[1], byte * 4, shift_coarse_tune);
198 
199  /* DQ0 */
200  move_dramc_delay(&ch[chn].ao.shu[0].rk[rank].selph_dq[2],
201  &ch[chn].ao.shu[0].rk[rank].selph_dq[0], byte * 4, shift_coarse_tune);
202 }
203 
204 static void move_dramc_tx_dq_oen(u8 chn, u8 rank,
205  u8 byte, s8 shift_coarse_tune)
206 {
207  /* DQM_OEN_0 */
208  move_dramc_delay(&ch[chn].ao.shu[0].rk[rank].selph_dq[3],
209  &ch[chn].ao.shu[0].rk[rank].selph_dq[1],
210  byte * 4 + OEN_SHIFT, shift_coarse_tune);
211 
212  /* DQ_OEN_0 */
213  move_dramc_delay(&ch[chn].ao.shu[0].rk[rank].selph_dq[2],
214  &ch[chn].ao.shu[0].rk[rank].selph_dq[0],
215  byte * 4 + OEN_SHIFT, shift_coarse_tune);
216 }
217 
219 {
220  for (u8 byte = 0; byte < DQS_NUMBER; byte++) {
223 
224  for (u8 rk = RANK_0; rk < RANK_MAX; rk++) {
225  move_dramc_tx_dq(chn, rk, byte, -WRITE_LEVELING_MOVD_DQS);
227  }
228  }
229 }
230 
231 static void dramc_write_leveling(u8 chn, u8 rank, u8 freq_group,
232  const u8 wr_level[CHANNEL_MAX][RANK_MAX][DQS_NUMBER])
233 {
234  dramc_auto_refresh_switch(chn, false);
235 
236  if (rank == RANK_0)
238 
239  SET32_BITFIELDS(&ch[chn].phy.shu[0].rk[rank].ca_cmd[9],
240  SHU1_R0_CA_CMD9_RG_RK0_ARPI_CLK, 0);
241 
242  for (size_t byte = 0; byte < DQS_NUMBER; byte++) {
243  u32 wrlevel_dq_delay = wr_level[chn][rank][byte] + 0x10;
244  SET32_BITFIELDS(&ch[chn].phy.shu[0].rk[rank].b[byte].dq[7],
245  FINE_TUNE_PBYTE, wr_level[chn][rank][byte]);
246  if (wrlevel_dq_delay >= 0x40) {
247  wrlevel_dq_delay -= 0x40;
248  move_dramc_tx_dq(chn, rank, byte, 2);
249  move_dramc_tx_dq_oen(chn, rank, byte, 2);
250  }
251  SET32_BITFIELDS(&ch[chn].phy.shu[0].rk[rank].b[byte].dq[7],
252  FINE_TUNE_DQM, wrlevel_dq_delay,
253  FINE_TUNE_DQ, wrlevel_dq_delay);
254  }
255 }
256 
257 static void cbt_set_perbit_delay_cell(u8 chn, u8 rank)
258 {
259  SET32_BITFIELDS(&ch[chn].phy.shu[0].rk[rank].ca_cmd[0],
260  SHU1_R0_CA_CMD0_RK0_TX_ARCA0_DLY, 0,
261  SHU1_R0_CA_CMD0_RK0_TX_ARCA1_DLY, 0,
262  SHU1_R0_CA_CMD0_RK0_TX_ARCA2_DLY, 0,
263  SHU1_R0_CA_CMD0_RK0_TX_ARCA3_DLY, 0,
264  SHU1_R0_CA_CMD0_RK0_TX_ARCA4_DLY, 0,
265  SHU1_R0_CA_CMD0_RK0_TX_ARCA5_DLY, 0);
266 }
267 
268 static void set_dram_mr_cbt_on_off(u8 chn, u8 rank, u8 fsp,
269  bool cbt_on, struct mr_value *mr, u32 cbt_mode)
270 {
271  u8 MR13Value = mr->MR13Value;
272 
273  if (cbt_on) {
274  MR13Value |= 0x1;
275  if (fsp == FSP_1)
276  MR13Value &= 0x7f;
277  else
278  MR13Value |= 0x80;
279 
280  if (cbt_mode)
281  SET32_BITFIELDS(&ch[chn].ao.write_lev, WRITE_LEV_BYTEMODECBTEN, 1);
282  } else {
283  MR13Value &= 0xfe;
284  if (fsp == FSP_1)
285  MR13Value |= 0x80;
286  else
287  MR13Value &= 0x7f;
288  }
289 
290  dramc_mode_reg_write_by_rank(chn, rank, 13, MR13Value);
291  mr->MR13Value = MR13Value;
292 }
293 
294 static void cbt_set_fsp(u8 chn, u8 rank, u8 fsp, struct mr_value *mr)
295 {
296  u8 MR13Value = mr->MR13Value;
297 
298  if (fsp == FSP_0) {
299  MR13Value &= ~(BIT(6));
300  MR13Value &= 0x7f;
301  } else {
302  MR13Value |= BIT(6);
303  MR13Value |= 0x80;
304  }
305 
306  dramc_mode_reg_write_by_rank(chn, rank, 13, MR13Value);
307  mr->MR13Value = MR13Value;
308 }
309 
310 static void o1_path_on_off(u8 cbt_on)
311 {
312  u8 fix_dqien = (cbt_on == 1) ? 3 : 0;
313 
314  for (u8 chn = 0; chn < CHANNEL_MAX; chn++) {
315  SET32_BITFIELDS(&ch[chn].ao.padctrl, PADCTRL_FIXDQIEN, fix_dqien);
316  SET32_BITFIELDS(&ch[chn].phy.b[0].dq[5],
317  B0_DQ5_RG_RX_ARDQ_EYE_VREF_EN_B0, cbt_on);
318  SET32_BITFIELDS(&ch[chn].phy.b[1].dq[5],
319  B1_DQ5_RG_RX_ARDQ_EYE_VREF_EN_B1, cbt_on);
320  SET32_BITFIELDS(&ch[chn].phy.b[0].dq[3],
321  B0_DQ3_RG_RX_ARDQ_SMT_EN_B0, cbt_on);
322  SET32_BITFIELDS(&ch[chn].phy.b[1].dq[3],
323  B1_DQ3_RG_RX_ARDQ_SMT_EN_B1, cbt_on);
324  }
325  udelay(1);
326 }
327 
328 static void cbt_entry(u8 chn, u8 rank, u8 fsp, struct mr_value *mr, u32 cbt_mode)
329 {
330  SET32_BITFIELDS(&ch[chn].ao.dramc_pd_ctrl,
331  DRAMC_PD_CTRL_PHYCLKDYNGEN, 0,
332  DRAMC_PD_CTRL_DCMEN, 0);
333  SET32_BITFIELDS(&ch[chn].ao.stbcal, STBCAL_DQSIENCG_NORMAL_EN, 0);
334  SET32_BITFIELDS(&ch[chn].ao.dramc_pd_ctrl, DRAMC_PD_CTRL_MIOCKCTRLOFF, 1);
335 
337  set_dram_mr_cbt_on_off(chn, rank, fsp, true, mr, cbt_mode);
338 
339  if (cbt_mode == 0)
340  SET32_BITFIELDS(&ch[chn].ao.write_lev, WRITE_LEV_WRITE_LEVEL_EN, 1);
341 
342  udelay(1);
344  o1_path_on_off(1);
345 }
346 
347 static void cbt_exit(u8 chn, u8 rank, u8 fsp, struct mr_value *mr, u32 cbt_mode)
348 {
350 
351  udelay(1);
352  set_dram_mr_cbt_on_off(chn, rank, fsp, false, mr, cbt_mode);
353  o1_path_on_off(0);
354 
355  if (cbt_mode)
356  SET32_BITFIELDS(&ch[chn].ao.write_lev, WRITE_LEV_BYTEMODECBTEN, 0);
357 }
358 
359 static void cbt_set_vref(u8 chn, u8 rank, u8 vref, bool is_final, u32 cbt_mode)
360 {
361  if (cbt_mode == 0 && !is_final) {
362  SET32_BITFIELDS(&ch[chn].ao.write_lev, WRITE_LEV_DMVREFCA, vref);
363  SET32_BITFIELDS(&ch[chn].ao.write_lev, WRITE_LEV_DQS_SEL, 1);
364  SET32_BITFIELDS(&ch[chn].ao.write_lev, WRITE_LEV_DQSBX_G, 0xa);
365  SET32_BITFIELDS(&ch[chn].ao.write_lev, WRITE_LEV_DQS_WLEV, 1);
366  udelay(1);
367  SET32_BITFIELDS(&ch[chn].ao.write_lev, WRITE_LEV_DQS_WLEV, 0);
368  } else {
369  vref |= BIT(6);
370  dramc_dbg("final_vref: %#x\n", vref);
371 
372  /* CBT set vref */
373  dramc_mode_reg_write_by_rank(chn, rank, 12, vref);
374  }
375 }
376 
377 static void cbt_set_ca_clk_result(u8 chn, u8 rank,
378  const struct sdram_params *params)
379 {
380  const u8 *perbit_dly;
381  u8 clk_dly = params->cbt_clk_dly[chn][rank];
382  u8 cmd_dly = params->cbt_cmd_dly[chn][rank];
383  const u8 *ca_mapping = lp4_ca_mapping_pop[chn];
384 
385  for (u8 rk = 0; rk < rank + 1; rk++) {
386  /* Set CLK and CA delay */
387  SET32_BITFIELDS(&ch[chn].phy.shu[0].rk[rk].ca_cmd[9],
388  SHU1_R0_CA_CMD9_RG_RK0_ARPI_CMD, cmd_dly,
389  SHU1_R0_CA_CMD9_RG_RK0_ARPI_CLK, clk_dly);
390  udelay(1);
391 
392  perbit_dly = params->cbt_ca_perbit_delay[chn][rk];
393 
394  /* Set CA perbit delay line calibration results */
395  SET32_BITFIELDS(&ch[chn].phy.shu[0].rk[rk].ca_cmd[0],
396  SHU1_R0_CA_CMD0_RK0_TX_ARCA0_DLY, perbit_dly[ca_mapping[0]],
397  SHU1_R0_CA_CMD0_RK0_TX_ARCA1_DLY, perbit_dly[ca_mapping[1]],
398  SHU1_R0_CA_CMD0_RK0_TX_ARCA2_DLY, perbit_dly[ca_mapping[2]],
399  SHU1_R0_CA_CMD0_RK0_TX_ARCA3_DLY, perbit_dly[ca_mapping[3]],
400  SHU1_R0_CA_CMD0_RK0_TX_ARCA4_DLY, perbit_dly[ca_mapping[4]],
401  SHU1_R0_CA_CMD0_RK0_TX_ARCA5_DLY, perbit_dly[ca_mapping[5]]);
402  }
403 }
404 
405 static u8 get_cbt_vref_pinmux_value(u8 chn, u8 vref_level, u32 cbt_mode)
406 {
407  u8 vref_bit, vref_new, vref_org;
408 
409  vref_new = 0;
410  vref_org = BIT(6) | (vref_level & 0x3f);
411 
412  if (cbt_mode) {
413  dramc_dbg("vref_org: %#x for byte mode\n", vref_org);
414 
415  return vref_org;
416  }
417  for (vref_bit = 0; vref_bit < 8; vref_bit++) {
418  if (vref_org & (1 << vref_bit))
419  vref_new |= (1 << phy_mapping[chn][vref_bit]);
420  }
421 
422  dramc_dbg("vref_new: %#x --> %#x\n", vref_org, vref_new);
423 
424  return vref_new;
425 }
426 
427 static void cbt_dramc_dfs_direct_jump(u8 shu_level, bool run_dvfs)
428 {
429  u8 shu_ack = 0;
430  static bool phy_pll_en = true;
431 
432  if (!run_dvfs)
433  return;
434 
435  for (u8 chn = 0; chn < CHANNEL_MAX; chn++)
436  shu_ack |= (0x1 << chn);
437 
438  if (phy_pll_en) {
439  dramc_dbg("Disable CLRPLL\n");
440  SET32_BITFIELDS(&ch[0].phy.pll2, PLL2_RG_RCLRPLL_EN, 0);
441  dramc_dbg("DFS jump to CLRPLL, shu lev=%d, ACK=%x\n",
442  shu_level, shu_ack);
443  } else {
444  dramc_dbg("Disable PHYPLL\n");
445  SET32_BITFIELDS(&ch[0].phy.pll1, PLL1_RG_RPHYPLL_EN, 0);
446  dramc_dbg("DFS jump to PHYPLL, shu lev=%d, ACK=%x\n",
447  shu_level, shu_ack);
448  }
449 
451  SPM_POWER_ON_VAL0_SC_PHYPLL1_SHU_EN_PCM, 0);
453  SPM_POWER_ON_VAL0_SC_PHYPLL2_SHU_EN_PCM, 0);
455  SPM_POWER_ON_VAL0_SC_DR_SHU_LEVEL_PCM, 0);
457  SPM_POWER_ON_VAL0_SC_DR_SHU_LEVEL_PCM, shu_level);
458 
459  if (phy_pll_en) {
461  SPM_POWER_ON_VAL0_SC_PHYPLL2_SHU_EN_PCM, 1);
462  udelay(1);
463  SET32_BITFIELDS(&ch[0].phy.pll2, PLL2_RG_RCLRPLL_EN, 1);
464  dramc_dbg("Enable CLRPLL\n");
465  } else {
467  SPM_POWER_ON_VAL0_SC_PHYPLL1_SHU_EN_PCM, 1);
468  udelay(1);
469  SET32_BITFIELDS(&ch[0].phy.pll1, PLL1_RG_RPHYPLL_EN, 1);
470  dramc_dbg("Enable PHYPLL\n");
471  }
472 
474  SPM_POWER_ON_VAL0_SC_TX_TRACKING_DIS, 3);
475 
476  udelay(20);
478  SPM_POWER_ON_VAL0_SC_DDRPHY_FB_CK_EN_PCM, 1);
480  SPM_POWER_ON_VAL0_SC_DPHY_RXDLY_TRACK_EN, 0);
482  SPM_POWER_ON_VAL0_SC_DR_SHU_EN_PCM, 1);
483 
485  DRAMC_DPY_CLK_SW_CON_SC_DMDRAMCSHU_ACK) & shu_ack)
486  != shu_ack) {
487  dramc_dbg("wait shu_en ack.\n");
488  }
490  SPM_POWER_ON_VAL0_SC_DR_SHU_EN_PCM, 0);
491 
492  if (shu_level == 0)
494  SPM_POWER_ON_VAL0_SC_DPHY_RXDLY_TRACK_EN, 3);
495 
497  SPM_POWER_ON_VAL0_SC_TX_TRACKING_DIS, 0);
499  SPM_POWER_ON_VAL0_SC_DDRPHY_FB_CK_EN_PCM, 0);
500 
501  if (phy_pll_en)
502  SET32_BITFIELDS(&ch[0].phy.pll1, PLL1_RG_RPHYPLL_EN, 0);
503  else
504  SET32_BITFIELDS(&ch[0].phy.pll2, PLL2_RG_RCLRPLL_EN, 0);
505  dramc_dbg("Shuffle flow complete\n");
506 
507  phy_pll_en = !phy_pll_en;
508 }
509 
510 static void cbt_switch_freq(cbt_freq freq, bool run_dvfs)
511 {
512  if (freq == CBT_LOW_FREQ)
514  else
516 }
517 
518 static void dramc_cmd_bus_training(u8 chn, u8 rank, u8 freq_group,
519  const struct sdram_params *params, struct mr_value *mr,
520  bool run_dvfs)
521 {
522  u8 final_vref, cs_dly;
523  u8 fsp = get_freq_fsq(freq_group);
524  u32 cbt_mode = params->cbt_mode_extern;
525 
526  cs_dly = params->cbt_cs_dly[chn][rank];
527  final_vref = params->cbt_final_vref[chn][rank];
528 
529  struct reg_value regs_bak[] = {
530  {&ch[chn].ao.dramc_pd_ctrl},
531  {&ch[chn].ao.stbcal},
532  {&ch[chn].ao.ckectrl},
533  {&ch[chn].ao.write_lev},
534  {&ch[chn].ao.refctrl0},
535  {&ch[chn].ao.spcmdctrl},
536  };
537 
538  for (int i = 0; i < ARRAY_SIZE(regs_bak); i++)
539  regs_bak[i].value = read32(regs_bak[i].addr);
540 
541  dramc_auto_refresh_switch(chn, false);
542 
543  if (rank == RANK_1) {
544  SET32_BITFIELDS(&ch[chn].ao.mrs, MRS_MRSRK, rank);
545  SET32_BITFIELDS(&ch[chn].ao.rkcfg, RKCFG_TXRANK, rank);
546  SET32_BITFIELDS(&ch[chn].ao.rkcfg, RKCFG_TXRANKFIX, 1);
547  SET32_BITFIELDS(&ch[chn].ao.mpc_option, MPC_OPTION_MPCRKEN, 0);
548  }
549 
550  cbt_set_perbit_delay_cell(chn, rank);
551 
552  if (cbt_mode == 0) {
554  if (fsp == FSP_1)
555  cbt_switch_freq(CBT_LOW_FREQ, run_dvfs);
556  cbt_entry(chn, rank, fsp, mr, cbt_mode);
557  udelay(1);
558  if (fsp == FSP_1)
559  cbt_switch_freq(CBT_HIGH_FREQ, run_dvfs);
560  }
561 
562  u8 new_vref = get_cbt_vref_pinmux_value(chn, final_vref, cbt_mode);
563 
564  if (cbt_mode) {
565  if (fsp == FSP_1)
566  cbt_switch_freq(CBT_LOW_FREQ, run_dvfs);
567 
568  cbt_set_fsp(chn, rank, fsp, mr);
569  cbt_set_vref(chn, rank, new_vref, true, cbt_mode);
570  cbt_entry(chn, rank, fsp, mr, cbt_mode);
571  udelay(1);
572 
573  if (fsp == FSP_1)
574  cbt_switch_freq(CBT_HIGH_FREQ, run_dvfs);
575  } else {
576  cbt_set_vref(chn, rank, new_vref, false, cbt_mode);
577  }
578 
579  cbt_set_ca_clk_result(chn, rank, params);
580  udelay(1);
581 
582  for (u8 rk = 0; rk < rank + 1; rk++) {
583  /* Set CLK and CS delay */
584  SET32_BITFIELDS(&ch[chn].phy.shu[0].rk[rk].ca_cmd[9],
585  SHU1_R0_CA_CMD9_RG_RK0_ARPI_CS, cs_dly);
586  }
587 
588  if (fsp == FSP_1)
589  cbt_switch_freq(CBT_LOW_FREQ, run_dvfs);
590  cbt_exit(chn, rank, fsp, mr, cbt_mode);
591 
592  if (cbt_mode == 0) {
593  cbt_set_fsp(chn, rank, fsp, mr);
594  cbt_set_vref(chn, rank, final_vref, true, cbt_mode);
595  }
596 
597  if (fsp == FSP_1)
598  cbt_switch_freq(CBT_HIGH_FREQ, run_dvfs);
599 
600  /* restore MRR pinmux */
602  if (rank == RANK_1) {
603  SET32_BITFIELDS(&ch[chn].ao.mrs, MRS_MRSRK, 0);
604  SET32_BITFIELDS(&ch[chn].ao.rkcfg, RKCFG_TXRANK, 0);
605  SET32_BITFIELDS(&ch[chn].ao.rkcfg, RKCFG_TXRANKFIX, 0);
606  SET32_BITFIELDS(&ch[chn].ao.mpc_option, MPC_OPTION_MPCRKEN, 0x1);
607  }
608 
609  for (int i = 0; i < ARRAY_SIZE(regs_bak); i++)
610  write32(regs_bak[i].addr, regs_bak[i].value);
611 }
612 
613 static void dramc_read_dbi_onoff(size_t chn, bool on)
614 {
615  for (size_t b = 0; b < 2; b++)
616  SET32_BITFIELDS(&ch[chn].phy.shu[0].b[b].dq[7],
617  SHU1_B0_DQ7_R_DMDQMDBI_SHU_B0, on);
618 }
619 
620 static void dramc_write_dbi_onoff(size_t chn, bool onoff)
621 {
622  SET32_BITFIELDS(&ch[chn].ao.shu[0].wodt, SHU1_WODT_DBIWR, onoff);
623 }
624 
625 static void dramc_phy_dcm_2_channel(u8 chn, bool en)
626 {
627  clrsetbits32(&ch[chn].phy.misc_cg_ctrl0, (0x3 << 19) | (0x3ff << 8),
628  ((en ? 0 : 0x1) << 19) | ((en ? 0 : 0x1ff) << 9) | (1 << 8));
629 
630  for (size_t i = 0; i < DRAM_DFS_SHUFFLE_MAX; i++) {
631  struct ddrphy_ao_shu *shu = &ch[chn].phy.shu[i];
632  for (size_t b = 0; b < 2; b++)
633  clrsetbits32(&shu->b[b].dq[8], 0x1fff << 19,
634  ((en ? 0 : 0x7ff) << 22) | (0x1 << 21) |
635  ((en ? 0 : 0x3) << 19));
636  clrbits32(&shu->ca_cmd[8], 0x1fff << 19);
637  }
638  clrsetbits32(&ch[chn].phy.misc_cg_ctrl5, (0x7 << 16) | (0x7 << 20),
639  ((en ? 0x7 : 0) << 16) | ((en ? 0x7 : 0) << 20));
640 }
641 
642 void dramc_enable_phy_dcm(u8 chn, bool en)
643 {
644  clrbits32(&ch[chn].phy.b[0].dll_fine_tune[1], 0x1 << 20);
645  clrbits32(&ch[chn].phy.b[1].dll_fine_tune[1], 0x1 << 20);
646  clrbits32(&ch[chn].phy.ca_dll_fine_tune[1], 0x1 << 20);
647 
648  for (size_t i = 0; i < DRAM_DFS_SHUFFLE_MAX; i++) {
649  struct ddrphy_ao_shu *shu = &ch[chn].phy.shu[i];
650  setbits32(&shu->b[0].dll[0], 0x1);
651  setbits32(&shu->b[1].dll[0], 0x1);
652  setbits32(&shu->ca_dll[0], 0x1);
653  }
654 
655  clrsetbits32(&ch[chn].ao.dramc_pd_ctrl,
656  (0x1 << 0) | (0x1 << 1) | (0x1 << 2) |
657  (0x1 << 5) | (0x1 << 26) | (0x1 << 30) | (0x1 << 31),
658  ((en ? 0x1 : 0) << 0) | ((en ? 0x1 : 0) << 1) |
659  ((en ? 0x1 : 0) << 2) | ((en ? 0 : 0x1) << 5) |
660  ((en ? 0 : 0x1) << 26) | ((en ? 0x1 : 0) << 30) |
661  ((en ? 0x1 : 0) << 31));
662 
663  /* DCM on: CHANNEL_EMI free run; DCM off: mem_dcm */
664  write32(&ch[chn].phy.misc_cg_ctrl2,
665  0x8060033e | (0x40 << (en ? 0x1 : 0)));
666  write32(&ch[chn].phy.misc_cg_ctrl2,
667  0x8060033f | (0x40 << (en ? 0x1 : 0)));
668  write32(&ch[chn].phy.misc_cg_ctrl2,
669  0x8060033e | (0x40 << (en ? 0x1 : 0)));
670 
671  clrsetbits32(&ch[chn].phy.misc_ctrl3, 0x3 << 26,
672  (en ? 0 : 0x3) << 26);
673  for (size_t i = 0; i < DRAM_DFS_SHUFFLE_MAX; i++) {
674  u32 mask = 0x7 << 17;
675  u32 value = (en ? 0x7 : 0) << 17;
676  struct ddrphy_ao_shu *shu = &ch[chn].phy.shu[i];
677 
678  clrsetbits32(&shu->b[0].dq[7], mask, value);
679  clrsetbits32(&shu->b[1].dq[7], mask, value);
680  clrsetbits32(&shu->ca_cmd[7], mask, value);
681  }
682 
683  dramc_phy_dcm_2_channel(chn, en);
684 }
685 
687 {
688  for (size_t rank = 0; rank < RANK_MAX; rank++) {
689  struct dramc_ddrphy_regs_shu_rk *rk =
690  &ch[chn].phy.shu[0].rk[rank];
691  clrbits32(&rk->ca_cmd[0], 0xffffff << 0);
692  clrbits32(&rk->b[0].dq[0], 0xfffffff << 0);
693  clrbits32(&rk->b[1].dq[0], 0xfffffff << 0);
694  clrbits32(&rk->b[0].dq[1], 0xf << 0);
695  clrbits32(&rk->b[1].dq[1], 0xf << 0);
696  }
697 }
698 
699 void dramc_hw_gating_onoff(u8 chn, bool on)
700 {
701  clrsetbits32(&ch[chn].ao.shuctrl2, 0x3 << 14,
702  (on ? 0x3 : 0) << 14);
703  clrsetbits32(&ch[chn].ao.stbcal2, 0x1 << 28, (on ? 0x1 : 0) << 28);
704  clrsetbits32(&ch[chn].ao.stbcal, 0x1 << 24, (on ? 0x1 : 0) << 24);
705  clrsetbits32(&ch[chn].ao.stbcal, 0x1 << 22, (on ? 0x1 : 0) << 22);
706 }
707 
709 {
710  u8 dvs_delay;
711 
712  struct ddrphy_ao_shu *shu = &ch[chn].phy.shu[0];
713 
714  switch (freq_group) {
715  case LP4X_DDR1600:
716  dvs_delay = 5;
717  break;
718  case LP4X_DDR2400:
719  dvs_delay = 4;
720  break;
721  case LP4X_DDR3200:
722  case LP4X_DDR3600:
723  dvs_delay = 3;
724  break;
725  default:
726  die("Invalid DDR frequency group %u\n", freq_group);
727  return;
728  }
729 
730  clrsetbits32(&shu->b[0].dq[5], 0x7 << 20, dvs_delay << 20);
731  clrsetbits32(&shu->b[1].dq[5], 0x7 << 20, dvs_delay << 20);
732  clrbits32(&shu->b[0].dq[7], (0x1 << 12) | (0x1 << 13));
733  clrbits32(&shu->b[1].dq[7], (0x1 << 12) | (0x1 << 13));
734 }
735 
736 void dramc_apply_config_before_calibration(u8 freq_group, u32 cbt_mode)
737 {
738  for (u8 chn = 0; chn < CHANNEL_MAX; chn++) {
739  dramc_enable_phy_dcm(chn, false);
741 
742  setbits32(&ch[chn].ao.shu[0].conf[3], 0x1ff << 16);
743  setbits32(&ch[chn].ao.spcmdctrl, 0x1 << 24);
744  clrsetbits32(&ch[chn].ao.shu[0].scintv, 0x1f << 1, 0x1b << 1);
745 
746  for (u8 shu = DRAM_DFS_SHUFFLE_1; shu < DRAM_DFS_SHUFFLE_MAX;
747  shu++)
748  setbits32(&ch[chn].ao.shu[shu].conf[3], 0x1ff << 0);
749 
750  clrbits32(&ch[chn].ao.dramctrl, 0x1 << 18);
751  clrbits32(&ch[chn].ao.spcmdctrl, 0x1 << 31);
752  clrbits32(&ch[chn].ao.spcmdctrl, 0x1 << 30);
753 
754  if (cbt_mode == CBT_R0_R1_NORMAL) {
755  clrbits32(&ch[chn].ao.dqsoscr, 0x1 << 26);
756  clrbits32(&ch[chn].ao.dqsoscr, 0x1 << 25);
757  } else if (cbt_mode == CBT_R0_R1_BYTE) {
758  setbits32(&ch[chn].ao.dqsoscr, 0x1 << 26);
759  setbits32(&ch[chn].ao.dqsoscr, 0x1 << 25);
760  } else if (cbt_mode == CBT_R0_NORMAL_R1_BYTE) {
761  clrbits32(&ch[chn].ao.dqsoscr, 0x1 << 26);
762  setbits32(&ch[chn].ao.dqsoscr, 0x1 << 25);
763  } else if (cbt_mode == CBT_R0_BYTE_R1_NORMAL) {
764  setbits32(&ch[chn].ao.dqsoscr, 0x1 << 26);
765  clrbits32(&ch[chn].ao.dqsoscr, 0x1 << 25);
766  }
767 
768  dramc_write_dbi_onoff(chn, false);
769  dramc_read_dbi_onoff(chn, false);
770 
771  setbits32(&ch[chn].ao.spcmdctrl, 0x1 << 29);
772  setbits32(&ch[chn].ao.dqsoscr, 0x1 << 24);
773  for (u8 shu = DRAM_DFS_SHUFFLE_1; shu < DRAM_DFS_SHUFFLE_MAX;
774  shu++)
775  setbits32(&ch[chn].ao.shu[shu].scintv, 0x1 << 30);
776 
777  clrbits32(&ch[chn].ao.dummy_rd, (0x1 << 7) | (0x7 << 20));
778  dramc_hw_gating_onoff(chn, false);
779  clrbits32(&ch[chn].ao.stbcal2, 0x1 << 28);
780 
781  for (size_t r = 0; r < 2; r++) {
782  for (size_t b = 0; b < 2; b++)
783  clrbits32(&ch[chn].phy.r[r].b[b].rxdvs[2],
784  (0x1 << 28) | (0x1 << 23) | (0x3 << 30));
785  clrbits32(&ch[chn].phy.r0_ca_rxdvs[2], 0x3 << 30);
786  }
787  setbits32(&ch[chn].phy.misc_ctrl1, 0x1 << 7);
788  clrbits32(&ch[chn].ao.refctrl0, 0x1 << 18);
789  clrbits32(&ch[chn].ao.mrs, 0x3 << 24);
790  setbits32(&ch[chn].ao.mpc_option, 0x1 << 17);
791  clrsetbits32(&ch[chn].phy.b[0].dq[6], 0x3 << 0, 0x1 << 0);
792  clrsetbits32(&ch[chn].phy.b[1].dq[6], 0x3 << 0, 0x1 << 0);
793  clrsetbits32(&ch[chn].phy.ca_cmd[6], 0x3 << 0, 0x1 << 0);
794 
796 
797  setbits32(&ch[chn].ao.dummy_rd, 0x1 << 25);
798  setbits32(&ch[chn].ao.drsctrl, 0x1 << 0);
799  if (freq_group == LP4X_DDR3200 || freq_group == LP4X_DDR3600)
800  clrbits32(&ch[chn].ao.shu[0].drving[0], 0x1 << 31);
801  else
802  setbits32(&ch[chn].ao.shu[0].drving[0], 0x1 << 31);
803  }
804 }
805 
806 static void dramc_set_mr13_vrcg_to_normal(u8 chn, const struct mr_value *mr, u32 rk_num)
807 {
808  for (u8 rank = 0; rank < rk_num; rank++)
809  dramc_mode_reg_write_by_rank(chn, rank, 13,
810  mr->MR13Value & ~(0x1 << 3));
811 
812  for (u8 shu = 0; shu < DRAM_DFS_SHUFFLE_MAX; shu++)
813  clrbits32(&ch[chn].ao.shu[shu].hwset_vrcg, 0x1 << 19);
814 }
815 
816 void dramc_apply_config_after_calibration(const struct mr_value *mr, u32 rk_num)
817 {
818  for (size_t chn = 0; chn < CHANNEL_MAX; chn++) {
819  write32(&ch[chn].phy.misc_cg_ctrl4, 0x11400000);
820  clrbits32(&ch[chn].ao.refctrl1, 0x1 << 7);
821  clrbits32(&ch[chn].ao.shuctrl, 0x1 << 2);
822  clrbits32(&ch[chn].phy.ca_cmd[6], 0x1 << 6);
823  dramc_set_mr13_vrcg_to_normal(chn, mr, rk_num);
824 
825  clrbits32(&ch[chn].phy.b[0].dq[6], 0x3);
826  clrbits32(&ch[chn].phy.b[1].dq[6], 0x3);
827  clrbits32(&ch[chn].phy.ca_cmd[6], 0x3);
828  setbits32(&ch[chn].phy.b[0].dq[6], 0x1 << 5);
829  setbits32(&ch[chn].phy.b[1].dq[6], 0x1 << 5);
830  setbits32(&ch[chn].phy.ca_cmd[6], 0x1 << 5);
831 
832  clrbits32(&ch[chn].ao.impcal, 0x3 << 24);
833  clrbits32(&ch[chn].phy.misc_imp_ctrl0, 0x4);
834  clrbits32(&ch[chn].phy.misc_cg_ctrl0, 0xf);
835 
836  clrbits32(&ch[chn].phy.misc_ctrl0, 0x1 << 31);
837  clrbits32(&ch[chn].phy.misc_ctrl1, 0x1 << 25);
838 
839  setbits32(&ch[chn].ao.spcmdctrl, 1 << 29);
840  setbits32(&ch[chn].ao.dqsoscr, 1 << 24);
841 
842  for (u8 shu = 0; shu < DRAM_DFS_SHUFFLE_MAX; shu++)
843  clrbits32(&ch[chn].ao.shu[shu].scintv, 0x1 << 30);
844 
845  clrbits32(&ch[chn].ao.dummy_rd, (0x7 << 20) | (0x1 << 7));
847  clrbits32(&ch[chn].ao.dramc_pd_ctrl, 0x1 << 26);
848 
849  clrbits32(&ch[chn].ao.eyescan, 0x7 << 8);
850  clrsetbits32(&ch[chn].ao.test2_4, 0x7 << 28, 0x4 << 28);
851  }
852 }
853 
854 static void dramc_rx_dqs_isi_pulse_cg_switch(u8 chn, bool flag)
855 {
856  for (size_t b = 0; b < 2; b++)
857  clrsetbits32(&ch[chn].phy.b[b].dq[6], 1 << 5,
858  (flag ? 1 : 0) << 5);
859 }
860 
861 static void dramc_set_rank_engine2(u8 chn, u8 rank)
862 {
863  /* Select CTO_AGENT1_RANK */
864  SET32_BITFIELDS(&ch[chn].ao.dramctrl, DRAMCTRL_ADRDECEN_TARKMODE, 1);
865  SET32_BITFIELDS(&ch[chn].ao.test2_4, TEST2_4_TESTAGENTRKSEL, 0);
866  SET32_BITFIELDS(&ch[chn].ao.test2_4, TEST2_4_TESTAGENTRK, rank);
867 }
868 
869 static void dramc_engine2_setpat(u8 chn, bool test_pat)
870 {
871  SET32_BITFIELDS(&ch[chn].ao.test2_4,
872  TEST2_4_TEST_REQ_LEN1, 0,
873  TEST2_4_TESTXTALKPAT, 0,
874  TEST2_4_TESTAUDMODE, 0,
875  TEST2_4_TESTAUDBITINV, 0);
876 
877  if (!test_pat) {
878  SET32_BITFIELDS(&ch[chn].ao.perfctl0, PERFCTL0_RWOFOEN, 1);
879  SET32_BITFIELDS(&ch[chn].ao.test2_4,
880  TEST2_4_TESTSSOPAT, 0,
881  TEST2_4_TESTSSOXTALKPAT, 0,
882  TEST2_4_TESTXTALKPAT, 1);
883  } else {
884  SET32_BITFIELDS(&ch[chn].ao.test2_4,
885  TEST2_4_TESTAUDINIT, 0x11,
886  TEST2_4_TESTAUDINC, 0xd,
887  TEST2_4_TESTAUDBITINV, 1);
888  }
889  SET32_BITFIELDS(&ch[chn].ao.test2_3,
890  TEST2_3_TESTAUDPAT, test_pat, TEST2_3_TESTCNT, 0);
891 }
892 
893 static void dramc_engine2_init(u8 chn, u8 rank, u32 t2_1, u32 t2_2, bool test_pat)
894 {
895  dramc_set_rank_engine2(chn, rank);
896 
897  SET32_BITFIELDS(&ch[chn].ao.dummy_rd,
898  DUMMY_RD_DQSG_DMYRD_EN, 0,
899  DUMMY_RD_DQSG_DMYWR_EN, 0,
900  DUMMY_RD_DUMMY_RD_EN, 0,
901  DUMMY_RD_SREF_DMYRD_EN, 0,
902  DUMMY_RD_DMY_RD_DBG, 0,
903  DUMMY_RD_DMY_WR_DBG, 0);
904  SET32_BITFIELDS(&ch[chn].nao.testchip_dma1,
905  TESTCHIP_DMA1_DMA_LP4MATAB_OPT, 0);
906  SET32_BITFIELDS(&ch[chn].ao.test2_3,
907  TEST2_3_TEST2W, 0,
908  TEST2_3_TEST2R, 0,
909  TEST2_3_TEST1, 0);
910  SET32_BITFIELDS(&ch[chn].ao.test2_0, TEST2_0_PAT0, t2_1 >> 24,
911  TEST2_0_PAT1, t2_2 >> 24);
912  SET32_BITFIELDS(&ch[chn].ao.test2_1, TEST2_1_TEST2_BASE,
913  t2_1 & 0xffffff);
914  SET32_BITFIELDS(&ch[chn].ao.test2_2, TEST2_2_TEST2_OFF,
915  t2_2 & 0xffffff);
916 
917  dramc_engine2_setpat(chn, test_pat);
918 }
919 
920 static void dramc_engine2_check_complete(u8 chn, u8 status)
921 {
922  u32 loop = 0;
923  /* In some case test engine finished but the complete signal late come,
924  * system will wait very long time. Hence, we set a timeout here.
925  * After system receive complete signal or wait until time out
926  * it will return, the caller will check compare result to verify
927  * whether engine success.
928  */
929  while (wait_us(100, read32(&ch[chn].nao.testrpt) & status) != status) {
930  if (loop++ > 100)
931  dramc_dbg("MEASURE_A timeout\n");
932  }
933 }
934 
935 static void dramc_engine2_compare(u8 chn, enum dram_te_op wr)
936 {
937  u8 rank_status = ((read32(&ch[chn].ao.test2_3) & 0xf) == 1) ? 3 : 1;
938 
939  if (wr == TE_OP_WRITE_READ_CHECK) {
940  dramc_engine2_check_complete(chn, rank_status);
941 
942  SET32_BITFIELDS(&ch[chn].ao.test2_3, TEST2_3_TEST2W, 0,
943  TEST2_3_TEST2R, 0, TEST2_3_TEST1, 0);
944  udelay(1);
945  SET32_BITFIELDS(&ch[chn].ao.test2_3, TEST2_3_TEST2W, 1);
946  }
947 
948  dramc_engine2_check_complete(chn, rank_status);
949 }
950 
951 static u32 dramc_engine2_run(u8 chn, enum dram_te_op wr)
952 {
953  u32 result;
954 
955  if (wr == TE_OP_READ_CHECK) {
956  SET32_BITFIELDS(&ch[chn].ao.test2_4, TEST2_4_TESTAUDMODE, 0);
957  SET32_BITFIELDS(&ch[chn].ao.test2_3,
958  TEST2_3_TEST2W, 0, TEST2_3_TEST2R, 1, TEST2_3_TEST1, 0);
959  } else if (wr == TE_OP_WRITE_READ_CHECK) {
960  SET32_BITFIELDS(&ch[chn].ao.test2_3,
961  TEST2_3_TEST2W, 1, TEST2_3_TEST2R, 0, TEST2_3_TEST1, 0);
962  }
963 
964  dramc_engine2_compare(chn, wr);
965 
966  udelay(1);
967  result = read32(&ch[chn].nao.cmp_err);
968  SET32_BITFIELDS(&ch[chn].ao.test2_3,
969  TEST2_3_TEST2W, 0, TEST2_3_TEST2R, 0, TEST2_3_TEST1, 0);
970 
971  return result;
972 }
973 
974 static void dramc_engine2_end(u8 chn, u32 dummy_rd)
975 {
976  clrbits32(&ch[chn].ao.test2_4, 0x1 << 17);
977  write32(&ch[chn].ao.dummy_rd, dummy_rd);
978 }
979 
980 static bool dramc_find_gating_window(u32 result_r, u32 result_f, u32 *debug_cnt,
981  u8 dly_coarse_large, u8 dly_coarse_0p5t, u8 *pass_begin, u8 *pass_count,
982  u8 *pass_count_1, u8 *dly_fine_xt, u8 *dqs_high, u8 *dqs_done)
983 {
984  bool find_tune = false;
985  u16 debug_cnt_perbyte, current_pass = 0, pass_byte_cnt = 0;
986 
987  for (u8 dqs = 0; dqs < DQS_NUMBER; dqs++) {
988  u8 dqs_result_r = (u8) ((result_r >> (8 * dqs)) & 0xff);
989  u8 dqs_result_f = (u8) ((result_f >> (8 * dqs)) & 0xff);
990 
991  if (pass_byte_cnt & (1 << dqs))
992  continue;
993  current_pass = 0;
994 
995  debug_cnt_perbyte = (u16) debug_cnt[dqs];
996  if (dqs_result_r == 0 && dqs_result_f == 0 &&
997  debug_cnt_perbyte == GATING_GOLDEND_DQSCNT)
998  current_pass = 1;
999 
1000  if (current_pass) {
1001  if (pass_begin[dqs] == 0) {
1002  pass_begin[dqs] = 1;
1003  pass_count_1[dqs] = 0;
1004  dramc_dbg("[Byte %d]First pass (%d, %d, %d)\n",
1005  dqs, dly_coarse_large, dly_coarse_0p5t, *dly_fine_xt);
1006  }
1007 
1008  if (pass_begin[dqs] == 1)
1009  pass_count_1[dqs]++;
1010 
1011  if (pass_begin[dqs] == 1 &&
1012  pass_count_1[dqs] * DQS_GW_FINE_STEP > DQS_GW_FINE_END) {
1013  dqs_high[dqs] = 0;
1014  dqs_done[dqs] = 1;
1015  }
1016 
1017  if (pass_count_1[0] * DQS_GW_FINE_STEP > DQS_GW_FINE_END &&
1018  pass_count_1[1] * DQS_GW_FINE_STEP > DQS_GW_FINE_END) {
1019  dramc_dbg("All bytes gating window > 1 coarse_tune, Early break\n");
1020  *dly_fine_xt = DQS_GW_FINE_END;
1021  find_tune = true;
1022  }
1023  } else {
1024  if (pass_begin[dqs] != 1)
1025  continue;
1026 
1027  dramc_dbg("[Byte %d] pass_begin[dqs]:%d, pass_count[dqs]:%d,pass_count_1:%d\n",
1028  dqs, pass_begin[dqs], pass_count[dqs], pass_count_1[dqs]);
1029 
1030  pass_begin[dqs] = 0;
1031  if (pass_count_1[dqs] > pass_count[dqs]) {
1032  pass_count[dqs] = pass_count_1[dqs];
1033  if (pass_count_1[dqs] * DQS_GW_FINE_STEP > 32 &&
1034  pass_count_1[dqs] * DQS_GW_FINE_STEP < 96)
1035  pass_byte_cnt |= (1 << dqs);
1036  if (pass_byte_cnt == 3) {
1037  *dly_fine_xt = DQS_GW_FINE_END;
1038  find_tune = true;
1039  }
1040  }
1041  }
1042  }
1043 
1044  return find_tune;
1045 }
1046 
1047 static void dramc_find_dly_tune(u8 chn, u8 dly_coarse_large, u8 dly_coarse_0p5t,
1048  u8 dly_fine_xt, u8 *dqs_high, u8 *dly_coarse_large_cnt,
1049  u8 *dly_coarse_0p5t_cnt, u8 *dly_fine_tune_cnt, u8 *dqs_trans, u8 *dqs_done)
1050 {
1051  for (size_t dqs = 0; dqs < DQS_NUMBER; dqs++) {
1052  u32 dqs_cnt = read32(&ch[chn].phy_nao.misc_phy_stben_b[dqs]);
1053  dqs_cnt = (dqs_cnt >> 16) & 3;
1054 
1055  if (dqs_done[dqs] == 1)
1056  continue;
1057 
1058  if (dqs_cnt == 3)
1059  dqs_high[dqs]++;
1060 
1061  if (dqs_high[dqs] * DQS_GW_FINE_STEP <= 16)
1062  continue;
1063 
1064  switch (dqs_cnt) {
1065  case 3:
1066  dly_coarse_large_cnt[dqs] = dly_coarse_large;
1067  dly_coarse_0p5t_cnt[dqs] = dly_coarse_0p5t;
1068  dly_fine_tune_cnt[dqs] = dly_fine_xt;
1069  dqs_trans[dqs] = 1;
1070  break;
1071  case 2:
1072  case 1:
1073  if (dqs_trans[dqs] == 1)
1074  dramc_dbg("[Byte %ld] Lead/lag falling Transition"
1075  " (%d, %d, %d)\n",
1076  dqs, dly_coarse_large_cnt[dqs],
1077  dly_coarse_0p5t_cnt[dqs], dly_fine_tune_cnt[dqs]);
1078  dqs_trans[dqs]++;
1079  break;
1080  case 0:
1081  dramc_dbg("[Byte %ld] Lead/lag Transition tap number (%d)\n",
1082  dqs, dqs_trans[dqs]);
1083  dqs_high[dqs] = 0;
1084  break;
1085  }
1086  }
1087 }
1088 
1089 static void dram_phy_reset(u8 chn)
1090 {
1091  SET32_BITFIELDS(&ch[chn].ao.ddrconf0, DDRCONF0_RDATRST, 1);
1092  SET32_BITFIELDS(&ch[chn].phy.misc_ctrl1, MISC_CTRL1_R_DMPHYRST, 1);
1093  clrbits32(&ch[chn].phy.b[0].dq[9], (1 << 4) | (1 << 0));
1094  clrbits32(&ch[chn].phy.b[1].dq[9], (1 << 4) | (1 << 0));
1095 
1096  udelay(1);
1097  setbits32(&ch[chn].phy.b[1].dq[9], (1 << 4) | (1 << 0));
1098  setbits32(&ch[chn].phy.b[0].dq[9], (1 << 4) | (1 << 0));
1099  SET32_BITFIELDS(&ch[chn].phy.misc_ctrl1, MISC_CTRL1_R_DMPHYRST, 0);
1100  SET32_BITFIELDS(&ch[chn].ao.ddrconf0, DDRCONF0_RDATRST, 0);
1101 }
1102 
1103 static void dramc_set_gating_mode(u8 chn, bool mode)
1104 {
1105  u8 vref = 0, burst = 0;
1106 
1107  if (mode) {
1108  vref = 2;
1109  burst = 1;
1110  }
1111 
1112  for (size_t b = 0; b < 2; b++) {
1113  clrsetbits32(&ch[chn].phy.b[b].dq[6], 0x3 << 14, vref << 14);
1114  setbits32(&ch[chn].phy.b[b].dq[9], 0x1 << 5);
1115  }
1116 
1117  clrsetbits32(&ch[chn].ao.stbcal1, 0x1 << 5, burst << 5);
1118  setbits32(&ch[chn].ao.stbcal, 0x1 << 30);
1119 
1120  clrbits32(&ch[chn].phy.b[0].dq[9], (0x1 << 4) | (0x1 << 0));
1121  clrbits32(&ch[chn].phy.b[1].dq[9], (0x1 << 4) | (0x1 << 0));
1122  udelay(1);
1123  setbits32(&ch[chn].phy.b[1].dq[9], (0x1 << 4) | (0x1 << 0));
1124  setbits32(&ch[chn].phy.b[0].dq[9], (0x1 << 4) | (0x1 << 0));
1125 }
1126 
1127 static void dramc_rx_dqs_gating_cal_pre(u8 chn, u8 rank)
1128 {
1129  SET32_BITFIELDS(&ch[chn].ao.refctrl0, REFCTRL0_PBREFEN, 0);
1130 
1131  dramc_hw_gating_onoff(chn, false);
1132 
1133  SET32_BITFIELDS(&ch[chn].ao.stbcal1, STBCAL1_STBENCMPEN, 1);
1134  SET32_BITFIELDS(&ch[chn].ao.stbcal1, STBCAL1_STBCNT_LATCH_EN, 1);
1135  SET32_BITFIELDS(&ch[chn].ao.ddrconf0, DDRCONF0_DM4TO1MODE, 0);
1136  SET32_BITFIELDS(&ch[chn].ao.spcmd, SPCMD_DQSGCNTEN, 1);
1137 
1138  udelay(4);
1139  SET32_BITFIELDS(&ch[chn].ao.spcmd, SPCMD_DQSGCNTRST, 1);
1140  udelay(1);
1141  SET32_BITFIELDS(&ch[chn].ao.spcmd, SPCMD_DQSGCNTRST, 0);
1142  SET32_BITFIELDS(&ch[chn].phy.misc_ctrl1, MISC_CTRL1_R_DMSTBENCMP_RK,
1143  rank);
1144 }
1145 
1146 static void set_selph_gating_value(uint32_t *addr, u8 dly, u8 dly_p1)
1147 {
1148  clrsetbits32(addr, 0x77777777,
1149  (dly << 0) | (dly << 8) | (dly << 16) | (dly << 24) |
1150  (dly_p1 << 4) | (dly_p1 << 12) | (dly_p1 << 20) | (dly_p1 << 28));
1151 }
1152 
1153 static void dramc_write_dqs_gating_result(u8 chn, u8 rank,
1154  u8 *best_coarse_tune2t, u8 *best_coarse_tune0p5t,
1155  u8 *best_coarse_tune2t_p1, u8 *best_coarse_tune0p5t_p1)
1156 {
1157  u8 best_coarse_rodt[DQS_NUMBER], best_coarse_0p5t_rodt[DQS_NUMBER];
1158  u8 best_coarse_rodt_p1[DQS_NUMBER];
1159  u8 best_coarse_0p5t_rodt_p1[DQS_NUMBER];
1160 
1162 
1163  clrsetbits32(&ch[chn].ao.shu[0].rk[rank].selph_dqsg0,
1164  0x77777777,
1165  (best_coarse_tune2t[0] << 0) | (best_coarse_tune2t[1] << 8) |
1166  (best_coarse_tune2t_p1[0] << 4) | (best_coarse_tune2t_p1[1] << 12));
1167  clrsetbits32(&ch[chn].ao.shu[0].rk[rank].selph_dqsg1,
1168  0x77777777,
1169  (best_coarse_tune0p5t[0] << 0) | (best_coarse_tune0p5t[1] << 8) |
1170  (best_coarse_tune0p5t_p1[0] << 4) | (best_coarse_tune0p5t_p1[1] << 12));
1171 
1172  for (size_t dqs = 0; dqs < DQS_NUMBER; dqs++) {
1173  u8 tmp_value = (best_coarse_tune2t[dqs] << 3)
1174  + best_coarse_tune0p5t[dqs];
1175 
1176  if (tmp_value >= 11) {
1177  tmp_value -= 11;
1178  best_coarse_rodt[dqs] = tmp_value >> 3;
1179  best_coarse_0p5t_rodt[dqs] =
1180  tmp_value - (best_coarse_rodt[dqs] << 3);
1181 
1182  tmp_value = (best_coarse_tune2t_p1[dqs] << 3) +
1183  best_coarse_tune0p5t_p1[dqs] - 11;
1184  best_coarse_rodt_p1[dqs] = tmp_value >> 3;
1185  best_coarse_0p5t_rodt_p1[dqs] =
1186  tmp_value - (best_coarse_rodt_p1[dqs] << 3);
1187 
1188  dramc_dbg("Best RODT dly(2T, 0.5T) = (%d, %d)\n",
1189  best_coarse_rodt[dqs],
1190  best_coarse_0p5t_rodt[dqs]);
1191  } else {
1192  best_coarse_rodt[dqs] = 0;
1193  best_coarse_0p5t_rodt[dqs] = 0;
1194  best_coarse_rodt_p1[dqs] = 4;
1195  best_coarse_0p5t_rodt_p1[dqs] = 4;
1196  dramc_dbg("RxdqsGatingCal error: best_coarse_tune2t:%zd"
1197  " is already 0. RODT cannot be -1 coarse\n",
1198  dqs);
1199  }
1200  }
1201 
1202  clrsetbits32(&ch[chn].ao.shu[0].rk[rank].selph_odten0,
1203  0x77777777,
1204  (best_coarse_rodt[0] << 0) | (best_coarse_rodt[1] << 8) |
1205  (best_coarse_rodt_p1[0] << 4) | (best_coarse_rodt_p1[1] << 12));
1206  clrsetbits32(&ch[chn].ao.shu[0].rk[rank].selph_odten1,
1207  0x77777777,
1208  (best_coarse_0p5t_rodt[0] << 0) | (best_coarse_0p5t_rodt[1] << 8) |
1209  (best_coarse_0p5t_rodt_p1[0] << 4) | (best_coarse_0p5t_rodt_p1[1] << 12));
1210 }
1211 
1212 static void dramc_rx_dqs_gating_cal_partial(u8 chn, u8 rank,
1213  u32 coarse_start, u32 coarse_end, u8 freqDiv,
1214  u8 *pass_begin, u8 *pass_count, u8 *pass_count_1, u8 *dqs_done,
1215  u8 *dqs_high, u8 *dqs_transition, u8 *dly_coarse_large_cnt,
1216  u8 *dly_coarse_0p5t_cnt, u8 *dly_fine_tune_cnt)
1217 {
1218  u8 dqs;
1219  u32 debug_cnt[DQS_NUMBER];
1220 
1221  for (u32 coarse_tune = coarse_start; coarse_tune < coarse_end;
1222  coarse_tune++) {
1223  u32 dly_coarse_large_rodt = 0, dly_coarse_0p5t_rodt = 0;
1224  u32 dly_coarse_large_rodt_p1 = 4, dly_coarse_0p5t_rodt_p1 = 4;
1225 
1226  u8 dly_coarse_large = coarse_tune / RX_DQS_CTL_LOOP;
1227  u8 dly_coarse_0p5t = coarse_tune % RX_DQS_CTL_LOOP;
1228  u32 dly_coarse_large_p1 = (coarse_tune + freqDiv) / RX_DQS_CTL_LOOP;
1229  u32 dly_coarse_0p5t_p1 = (coarse_tune + freqDiv) % RX_DQS_CTL_LOOP;
1230  u32 value = (dly_coarse_large << 3) + dly_coarse_0p5t;
1231 
1232  if (value >= 11) {
1233  value -= 11;
1234  dly_coarse_large_rodt = value >> 3;
1235  dly_coarse_0p5t_rodt =
1236  value - (dly_coarse_large_rodt << 3);
1237  value = (dly_coarse_large << 3) + dly_coarse_0p5t - 11;
1238  dly_coarse_large_rodt_p1 = value >> 3;
1239  dly_coarse_0p5t_rodt_p1 =
1240  value - (dly_coarse_large_rodt_p1 << 3);
1241  }
1242 
1243  set_selph_gating_value(&ch[chn].ao.shu[0].rk[rank].selph_dqsg0,
1244  dly_coarse_large, dly_coarse_large_p1);
1245  set_selph_gating_value(&ch[chn].ao.shu[0].rk[rank].selph_dqsg1,
1246  dly_coarse_0p5t, dly_coarse_0p5t_p1);
1247  set_selph_gating_value(&ch[chn].ao.shu[0].rk[rank].selph_odten0,
1248  dly_coarse_large_rodt, dly_coarse_large_rodt_p1);
1249  set_selph_gating_value(&ch[chn].ao.shu[0].rk[rank].selph_odten1,
1250  dly_coarse_0p5t_rodt, dly_coarse_0p5t_rodt_p1);
1251 
1252  for (u8 dly_fine_xt = 0; dly_fine_xt < DQS_GW_FINE_END;
1253  dly_fine_xt += 4) {
1254  dramc_set_gating_mode(chn, 0);
1255  WRITE32_BITFIELDS(&ch[chn].ao.shu[0].rk[rank].dqsien,
1256  SHURK_DQSIEN_DQS0IEN, dly_fine_xt,
1257  SHURK_DQSIEN_DQS1IEN, dly_fine_xt);
1258 
1259  dram_phy_reset(chn);
1260  SET32_BITFIELDS(&ch[chn].ao.spcmd, SPCMD_DQSGCNTRST, 1);
1261  udelay(1);
1262  SET32_BITFIELDS(&ch[chn].ao.spcmd, SPCMD_DQSGCNTRST, 0);
1263 
1265 
1266  u32 result_r = READ32_BITFIELD(
1267  &ch[chn].phy.misc_stberr_rk0_r,
1268  MISC_STBERR_RK_R_STBERR_RK_R);
1269  u32 result_f = READ32_BITFIELD(
1270  &ch[chn].phy.misc_stberr_rk0_f,
1271  MISC_STBERR_RK_F_STBERR_RK_F);
1272 
1273  debug_cnt[0] = read32(&ch[chn].nao.dqsgnwcnt[0]);
1274  debug_cnt[1] = (debug_cnt[0] >> 16) & 0xffff;
1275  debug_cnt[0] &= 0xffff;
1276 
1277  dramc_set_gating_mode(chn, 1);
1279 
1280  dramc_find_dly_tune(chn, dly_coarse_large,
1281  dly_coarse_0p5t, dly_fine_xt, dqs_high,
1282  dly_coarse_large_cnt, dly_coarse_0p5t_cnt,
1283  dly_fine_tune_cnt, dqs_transition, dqs_done);
1284 
1285  dramc_dbg("%d %d %d |", dly_coarse_large,
1286  dly_coarse_0p5t, dly_fine_xt);
1287  for (dqs = 0; dqs < DQS_NUMBER; dqs++)
1288  dramc_dbg("%X ", debug_cnt[dqs]);
1289 
1290  dramc_dbg(" |");
1291  for (dqs = 0; dqs < DQS_NUMBER; dqs++) {
1292  dramc_dbg("(%X %X)",
1293  (result_f >> (DQS_BIT_NUMBER * dqs)) & 0xff,
1294  (result_r >> (DQS_BIT_NUMBER * dqs)) & 0xff);
1295  }
1296 
1297  dramc_dbg("\n");
1298  if (dramc_find_gating_window(result_r, result_f, debug_cnt,
1299  dly_coarse_large, dly_coarse_0p5t, pass_begin,
1300  pass_count, pass_count_1, &dly_fine_xt,
1301  dqs_high, dqs_done))
1302  coarse_tune = coarse_end;
1303  }
1304  }
1305 }
1306 
1307 static void dramc_rx_dqs_gating_cal(u8 chn, u8 rank, u8 freq_group,
1308  const struct sdram_params *params, const bool fast_calib,
1309  const struct mr_value *mr)
1310 {
1311  u8 dqs, fsp, freqDiv = 4;
1312  u8 pass_begin[DQS_NUMBER] = {0}, pass_count[DQS_NUMBER] = {0},
1313  pass_count_1[DQS_NUMBER] = {0}, dqs_done[DQS_NUMBER] = {0};
1314  u8 min_coarse_tune2t[DQS_NUMBER], min_coarse_tune0p5t[DQS_NUMBER],
1315  min_fine_tune[DQS_NUMBER];
1316  u8 best_fine_tune[DQS_NUMBER], best_coarse_tune0p5t[DQS_NUMBER],
1317  best_coarse_tune2t[DQS_NUMBER];
1318  u8 best_coarse_tune0p5t_p1[DQS_NUMBER], best_coarse_tune2t_p1[DQS_NUMBER];
1319  u8 dqs_high[DQS_NUMBER] = {0}, dqs_transition[DQS_NUMBER] = {0};
1320  u8 dly_coarse_large_cnt[DQS_NUMBER] = {0}, dly_coarse_0p5t_cnt[DQS_NUMBER] = {0},
1321  dly_fine_tune_cnt[DQS_NUMBER] = {0};
1322  u32 coarse_start, coarse_end;
1323 
1324  struct reg_value regs_bak[] = {
1325  {&ch[chn].ao.stbcal},
1326  {&ch[chn].ao.stbcal1},
1327  {&ch[chn].ao.ddrconf0},
1328  {&ch[chn].ao.spcmd},
1329  {&ch[chn].ao.refctrl0},
1330  {&ch[chn].phy.b[0].dq[6]},
1331  {&ch[chn].phy.b[1].dq[6]},
1332  };
1333  for (size_t i = 0; i < ARRAY_SIZE(regs_bak); i++)
1334  regs_bak[i].value = read32(regs_bak[i].addr);
1335 
1336  fsp = get_freq_fsq(freq_group);
1338 
1339  dramc_mode_reg_write_by_rank(chn, rank, 0x1, mr->MR01Value[fsp] | 0x80);
1340  dramc_rx_dqs_gating_cal_pre(chn, rank);
1341 
1342  u32 dummy_rd_backup = read32(&ch[chn].ao.dummy_rd);
1343  dramc_engine2_init(chn, rank, TEST2_1_CAL, 0xaa000023, true);
1344 
1345  switch (freq_group) {
1346  case LP4X_DDR1600:
1347  coarse_start = 18;
1348  break;
1349  case LP4X_DDR2400:
1350  coarse_start = 25;
1351  break;
1352  case LP4X_DDR3200:
1353  coarse_start = 25;
1354  break;
1355  case LP4X_DDR3600:
1356  coarse_start = 21;
1357  break;
1358  default:
1359  die("Invalid DDR frequency group %u\n", freq_group);
1360  return;
1361  }
1362  coarse_end = coarse_start + 12;
1363 
1364  dramc_dbg("[Gating]\n");
1365 
1366  if (!fast_calib) {
1368  coarse_start, coarse_end,
1369  freqDiv, pass_begin, pass_count, pass_count_1, dqs_done,
1370  dqs_high, dqs_transition, dly_coarse_large_cnt,
1371  dly_coarse_0p5t_cnt, dly_fine_tune_cnt);
1372  dramc_engine2_end(chn, dummy_rd_backup);
1373  }
1374 
1375  for (dqs = 0; dqs < DQS_NUMBER; dqs++) {
1376  if (fast_calib) {
1377  dramc_dbg("[bypass Gating params] dqs: %d\n", dqs);
1378  pass_count[dqs] = params->gating_pass_count[chn][rank][dqs];
1379  min_fine_tune[dqs] = params->gating_fine_tune[chn][rank][dqs];
1380  min_coarse_tune0p5t[dqs] = params->gating05T[chn][rank][dqs];
1381  min_coarse_tune2t[dqs] = params->gating2T[chn][rank][dqs];
1382  } else {
1383  pass_count[dqs] = dqs_transition[dqs];
1384  min_fine_tune[dqs] = dly_fine_tune_cnt[dqs];
1385  min_coarse_tune0p5t[dqs] = dly_coarse_0p5t_cnt[dqs];
1386  min_coarse_tune2t[dqs] = dly_coarse_large_cnt[dqs];
1387  }
1388  u8 tmp_offset = pass_count[dqs] * DQS_GW_FINE_STEP / 2;
1389  u8 tmp_value = min_fine_tune[dqs] + tmp_offset;
1390  best_fine_tune[dqs] = tmp_value % RX_DLY_DQSIENSTB_LOOP;
1391 
1392  tmp_offset = tmp_value / RX_DLY_DQSIENSTB_LOOP;
1393  tmp_value = min_coarse_tune0p5t[dqs] + tmp_offset;
1394  best_coarse_tune0p5t[dqs] = tmp_value % RX_DQS_CTL_LOOP;
1395 
1396  tmp_offset = tmp_value / RX_DQS_CTL_LOOP;
1397  best_coarse_tune2t[dqs] = min_coarse_tune2t[dqs] + tmp_offset;
1398 
1399  tmp_value = best_coarse_tune0p5t[dqs] + freqDiv;
1400  best_coarse_tune0p5t_p1[dqs] = tmp_value % RX_DQS_CTL_LOOP;
1401 
1402  tmp_offset = tmp_value / RX_DQS_CTL_LOOP;
1403  best_coarse_tune2t_p1[dqs] =
1404  best_coarse_tune2t[dqs] + tmp_offset;
1405  }
1406 
1407  for (dqs = 0; dqs < DQS_NUMBER; dqs++)
1408  dramc_dbg("Best DQS%d dly(2T, 0.5T, fine tune)"
1409  " = (%d, %d, %d)\n", dqs, best_coarse_tune2t[dqs],
1410  best_coarse_tune0p5t[dqs], best_fine_tune[dqs]);
1411 
1412  for (dqs = 0; dqs < DQS_NUMBER; dqs++)
1413  dramc_dbg("Best DQS%d P1 dly(2T, 0.5T, fine tune)"
1414  " = (%d, %d, %d)\n", dqs, best_coarse_tune2t_p1[dqs],
1415  best_coarse_tune0p5t_p1[dqs], best_fine_tune[dqs]);
1416 
1417  for (size_t i = 0; i < ARRAY_SIZE(regs_bak); i++)
1418  write32(regs_bak[i].addr, regs_bak[i].value);
1419 
1420  dramc_mode_reg_write_by_rank(chn, rank, 0x1, mr->MR01Value[fsp]);
1421 
1422  dramc_write_dqs_gating_result(chn, rank, best_coarse_tune2t,
1423  best_coarse_tune0p5t, best_coarse_tune2t_p1, best_coarse_tune0p5t_p1);
1424 
1425  WRITE32_BITFIELDS(&ch[chn].ao.shu[0].rk[rank].dqsien,
1426  SHURK_DQSIEN_DQS0IEN, best_fine_tune[0],
1427  SHURK_DQSIEN_DQS1IEN, best_fine_tune[1]);
1428 
1429  dram_phy_reset(chn);
1430 }
1431 
1432 static void dramc_rx_rd_dqc_init(u8 chn, u8 rank)
1433 {
1434  const u8 *lpddr_phy_mapping = phy_mapping[chn];
1435  u16 temp_value = 0;
1436 
1437  for (size_t b = 0; b < 2; b++)
1438  clrbits32(&ch[chn].phy.shu[0].b[b].dq[7], 0x1 << 7);
1439 
1440  SET32_BITFIELDS(&ch[chn].ao.mrs, MRS_MRSRK, rank);
1441  SET32_BITFIELDS(&ch[chn].ao.mpc_option, MPC_OPTION_MPCRKEN, 1);
1442 
1443  for (size_t bit = 0; bit < DQ_DATA_WIDTH; bit++)
1444  temp_value |= ((0x5555 >> bit) & 0x1) << lpddr_phy_mapping[bit];
1445 
1446  u16 mr15_golden_value = temp_value & 0xff;
1447  u16 mr20_golden_value = (temp_value >> 8) & 0xff;
1448  SET32_BITFIELDS(&ch[chn].ao.mr_golden,
1449  MR_GOLDEN_MR15_GOLDEN, mr15_golden_value,
1450  MR_GOLDEN_MR20_GOLDEN, mr20_golden_value);
1451 }
1452 
1454 {
1455  u32 loop = 0;
1456  SET32_BITFIELDS(&ch[chn].ao.spcmdctrl, SPCMDCTRL_RDDQCDIS, 1);
1457  SET32_BITFIELDS(&ch[chn].ao.spcmd, SPCMD_RDDQCEN, 1);
1458 
1459  while (!wait_us(10, read32(&ch[chn].nao.spcmdresp) & (0x1 << 7))) {
1460  if (loop++ > 10)
1461  dramc_dbg("[RDDQC] resp fail (time out)\n");
1462  }
1463 
1464  u32 result = read32(&ch[chn].nao.rdqc_cmp);
1465  SET32_BITFIELDS(&ch[chn].ao.spcmd, SPCMD_RDDQCEN, 0);
1466  SET32_BITFIELDS(&ch[chn].ao.spcmdctrl, SPCMDCTRL_RDDQCDIS, 0);
1467 
1468  return result;
1469 }
1470 
1471 static void dramc_rx_rd_dqc_end(u8 chn)
1472 {
1473  SET32_BITFIELDS(&ch[chn].ao.mrs, MRS_MRSRK, 0);
1474 }
1475 
1477 {
1478  SET32_BITFIELDS(&ch[chn].phy.b[0].dq[5], B0_DQ5_RG_RX_ARDQ_VREF_EN_B0, 1);
1479  SET32_BITFIELDS(&ch[chn].phy.b[1].dq[5], B1_DQ5_RG_RX_ARDQ_VREF_EN_B1, 1);
1480 }
1481 
1482 static void dramc_set_rx_vref(u8 chn, u8 vref)
1483 {
1484  for (size_t b = 0; b < 2; b++)
1485  SET32_BITFIELDS(&ch[chn].phy.shu[0].b[b].dq[5],
1486  SHU1_BX_DQ5_RG_RX_ARDQ_VREF_SEL_B0, vref);
1487  dramc_dbg("set rx vref :%d\n", vref);
1488 }
1489 
1490 static void dramc_set_tx_vref(u8 chn, u8 rank, u8 value)
1491 {
1492  dramc_mode_reg_write_by_rank(chn, rank, 14, value);
1493 }
1494 
1495 static void dramc_set_vref(u8 chn, u8 rank, enum CAL_TYPE type, u8 vref)
1496 {
1497  if (type == RX_WIN_TEST_ENG)
1498  dramc_set_rx_vref(chn, vref);
1499  else
1500  dramc_set_tx_vref(chn, rank, vref);
1501 }
1502 
1503 static void dramc_transfer_dly_tune(u8 chn, u32 dly, u32 adjust_center,
1504  struct tx_dly_tune *dly_tune)
1505 {
1506  u8 tune = 3, fine_tune = 0;
1507  u16 tmp;
1508 
1509  fine_tune = dly & (TX_DQ_COARSE_TUNE_TO_FINE_TUNE_TAP - 1);
1510  tmp = (dly / TX_DQ_COARSE_TUNE_TO_FINE_TUNE_TAP) << 1;
1511 
1512  if (adjust_center) {
1513  if (fine_tune < 10) {
1514  fine_tune += TX_DQ_COARSE_TUNE_TO_FINE_TUNE_TAP >> 1;
1515  tmp--;
1516  } else if (fine_tune > TX_DQ_COARSE_TUNE_TO_FINE_TUNE_TAP - 10) {
1517  fine_tune -= TX_DQ_COARSE_TUNE_TO_FINE_TUNE_TAP >> 1;
1518  tmp++;
1519  }
1520  }
1521 
1522  dly_tune->fine_tune = fine_tune;
1523  dly_tune->coarse_tune_small = tmp - ((tmp >> tune) << tune);
1524  dly_tune->coarse_tune_large = tmp >> tune;
1525 
1526  tmp -= 3;
1527  dly_tune->coarse_tune_small_oen = tmp - ((tmp >> tune) << tune);
1528  dly_tune->coarse_tune_large_oen = tmp >> tune;
1529 }
1530 
1531 static void dramc_set_rx_dly_factor(u8 chn, u8 rank, enum RX_TYPE type, u32 val)
1532 {
1533  int b, dq;
1534 
1535  switch (type) {
1536  case RX_DQ:
1537  for (dq = 2; dq < 6; dq++)
1538  for (b = 0; b < 2; b++)
1539  WRITE32_BITFIELDS(&ch[chn].phy.shu[0].rk[rank].b[b].dq[dq],
1540  SHU1_R0_B0_DQ6_RK0_RX_ARDQM0_F_DLY_B0, val,
1541  SHU1_R0_B0_DQ6_RK0_RX_ARDQM0_R_DLY_B0, val,
1542  SHU1_R0_B0_DQ6_RK0_RX_ARDQS0_F_DLY_B0, val,
1543  SHU1_R0_B0_DQ6_RK0_RX_ARDQS0_R_DLY_B0, val);
1544  break;
1545 
1546  case RX_DQM:
1547  for (b = 0; b < 2; b++)
1548  SET32_BITFIELDS(&ch[chn].phy.shu[0].rk[rank].b[b].dq[6],
1549  SHU1_R0_B0_DQ6_RK0_RX_ARDQM0_F_DLY_B0, val,
1550  SHU1_R0_B0_DQ6_RK0_RX_ARDQM0_R_DLY_B0, val);
1551  break;
1552 
1553  case RX_DQS:
1554  for (b = 0; b < 2; b++)
1555  SET32_BITFIELDS(&ch[chn].phy.shu[0].rk[rank].b[b].dq[6],
1556  SHU1_R0_B0_DQ6_RK0_RX_ARDQS0_F_DLY_B0, val,
1557  SHU1_R0_B0_DQ6_RK0_RX_ARDQS0_R_DLY_B0, val);
1558  break;
1559  default:
1560  dramc_err("error calibration type: %d\n", type);
1561  break;
1562  }
1563 }
1564 
1565 static void dramc_set_tx_dly_factor(u8 chn, u8 rk,
1566  enum CAL_TYPE type, u8 *dq_small_reg, u32 dly)
1567 {
1568  struct tx_dly_tune dly_tune = {0};
1569  u32 dly_large = 0, dly_large_oen = 0, dly_small = 0, dly_small_oen = 0;
1570  u32 adjust_center = 0;
1571 
1572  dramc_transfer_dly_tune(chn, dly, adjust_center, &dly_tune);
1573 
1574  for (u8 i = 0; i < 4; i++) {
1575  dly_large += dly_tune.coarse_tune_large << (i * 4);
1576  dly_large_oen += dly_tune.coarse_tune_large_oen << (i * 4);
1577  dly_small += dly_tune.coarse_tune_small << (i * 4);
1578  dly_small_oen += dly_tune.coarse_tune_small_oen << (i * 4);
1579  }
1580 
1581  if (type == TX_WIN_DQ_DQM)
1582  dramc_dbg("%3d |%d %d %2d | [0]",
1583  dly, dly_tune.coarse_tune_large,
1584  dly_tune.coarse_tune_small, dly_tune.fine_tune);
1585 
1586  if (*dq_small_reg != dly_tune.coarse_tune_small) {
1587  if (type == TX_WIN_DQ_DQM || type == TX_WIN_DQ_ONLY) {
1588  clrsetbits32(&ch[chn].ao.shu[0].rk[rk].selph_dq[0],
1589  0x77777777, dly_large | (dly_large_oen << 16));
1590  clrsetbits32(&ch[chn].ao.shu[0].rk[rk].selph_dq[2],
1591  0x77777777, dly_small | (dly_small_oen << 16));
1592  }
1593 
1594  if (type == TX_WIN_DQ_DQM) {
1595  /* Large coarse_tune setting */
1596  clrsetbits32(&ch[chn].ao.shu[0].rk[rk].selph_dq[1],
1597  0x77777777, dly_large | (dly_large_oen << 16));
1598  clrsetbits32(&ch[chn].ao.shu[0].rk[rk].selph_dq[3],
1599  0x77777777, dly_small | (dly_small_oen << 16));
1600  }
1601  }
1602  *dq_small_reg = dly_tune.coarse_tune_small;
1603 
1604  if (type == TX_WIN_DQ_DQM || type == TX_WIN_DQ_ONLY) {
1605  for (size_t b = 0; b < 2; b++)
1606  SET32_BITFIELDS(&ch[chn].phy.shu[0].rk[rk].b[b].dq[7],
1607  FINE_TUNE_DQ, dly_tune.fine_tune);
1608  }
1609  if (type == TX_WIN_DQ_DQM) {
1610  for (size_t b = 0; b < 2; b++)
1611  SET32_BITFIELDS(&ch[chn].phy.shu[0].rk[rk].b[b].dq[7],
1612  FINE_TUNE_DQM, dly_tune.fine_tune);
1613  }
1614 }
1615 
1616 static u32 dramc_get_smallest_dqs_dly(u8 chn, u8 rank, const struct sdram_params *params)
1617 {
1618  const u8 mck = 3;
1619  u32 min_dly = 0xffff, virtual_delay = 0;
1620  u32 tx_dly = read32(&ch[chn].ao.shu[0].selph_dqs0);
1621  u32 dly = read32(&ch[chn].ao.shu[0].selph_dqs1);
1622  u32 tmp;
1623 
1624  for (size_t dqs = 0; dqs < DQS_NUMBER; dqs++) {
1625  tmp = ((tx_dly >> (dqs << 2) & 0x7) << mck) +
1626  (dly >> (dqs << 2) & 0x7);
1627  virtual_delay = (tmp << 5) + params->wr_level[chn][rank][dqs];
1628  min_dly = MIN(min_dly, virtual_delay);
1629  }
1630 
1631  return min_dly;
1632 }
1633 
1634 static void dramc_get_dly_range(u8 chn, u8 rank, enum CAL_TYPE type,
1635  u8 freq_group, u16 *pre_cal, s16 *begin, s16 *end,
1636  const struct sdram_params *params)
1637 {
1638  u16 pre_dq_dly;
1639  switch (type) {
1640  case RX_WIN_RD_DQC:
1641  case RX_WIN_TEST_ENG:
1642  switch (freq_group) {
1643  case LP4X_DDR1600:
1644  *begin = -48;
1645  break;
1646  case LP4X_DDR2400:
1647  *begin = -30;
1648  break;
1649  case LP4X_DDR3200:
1650  case LP4X_DDR3600:
1651  *begin = -26;
1652  break;
1653  default:
1654  die("Invalid DDR frequency group %u\n", freq_group);
1655  return;
1656  }
1657 
1658  *end = MAX_RX_DQDLY_TAPS;
1659  break;
1660 
1661  case TX_WIN_DQ_DQM:
1662  *begin = dramc_get_smallest_dqs_dly(chn, rank, params);
1663  *end = *begin + 256;
1664  break;
1665 
1666  case TX_WIN_DQ_ONLY:
1667  pre_dq_dly = MIN(pre_cal[0], pre_cal[1]);
1668  pre_dq_dly = (pre_dq_dly > 24) ? (pre_dq_dly - 24) : 0;
1669  *begin = pre_dq_dly;
1670  *end = *begin + 64;
1671  break;
1672  default:
1673  dramc_err("error calibration type: %d\n", type);
1674  break;
1675  }
1676 }
1677 
1678 static int dramc_check_dqdqs_win(struct win_perbit_dly *perbit_dly,
1679  s16 dly, s16 dly_end, bool fail_bit)
1680 {
1681  int pass_win = 0;
1682 
1683  if (perbit_dly->first_pass == PASS_RANGE_NA) {
1684  if (!fail_bit) /* compare correct: pass */
1685  perbit_dly->first_pass = dly;
1686  } else if (perbit_dly->last_pass == PASS_RANGE_NA) {
1687  if (fail_bit) /* compare error: fail */
1688  perbit_dly->last_pass = dly - 1;
1689  else if (dly == dly_end)
1690  perbit_dly->last_pass = dly;
1691 
1692  if (perbit_dly->last_pass != PASS_RANGE_NA) {
1693  pass_win = perbit_dly->last_pass - perbit_dly->first_pass;
1694  int best_pass_win = perbit_dly->best_last - perbit_dly->best_first;
1695  if (pass_win >= best_pass_win) {
1696  perbit_dly->best_last = perbit_dly->last_pass;
1697  perbit_dly->best_first = perbit_dly->first_pass;
1698  }
1699 
1700  /* Clear to find the next pass range if it has */
1701  perbit_dly->first_pass = PASS_RANGE_NA;
1702  perbit_dly->last_pass = PASS_RANGE_NA;
1703  }
1704  }
1705 
1706  return pass_win;
1707 }
1708 
1709 static void dramc_set_vref_dly(struct vref_perbit_dly *vref_dly, struct win_perbit_dly delay[])
1710 {
1711  struct win_perbit_dly *perbit_dly = vref_dly->perbit_dly;
1712 
1713  for (u8 bit = 0; bit < DQ_DATA_WIDTH; bit++) {
1714  delay[bit].win_center = (delay[bit].best_first + delay[bit].best_last) >> 1;
1715 
1716  perbit_dly[bit].best_first = delay[bit].best_first;
1717  perbit_dly[bit].best_last = delay[bit].best_last;
1718  perbit_dly[bit].win_center = delay[bit].win_center;
1719  perbit_dly[bit].best_dqdly = delay[bit].best_dqdly;
1720  }
1721 }
1722 
1723 static bool dramk_calc_best_vref(enum CAL_TYPE type, u8 vref,
1724  struct vref_perbit_dly *vref_dly, struct win_perbit_dly delay[],
1725  u32 *win_min_max)
1726 {
1727  u32 win_size, min_bit = 0xff, min_winsize = 0xffff, tmp_win_sum = 0;
1728 
1729  switch (type) {
1730  case RX_WIN_RD_DQC:
1731  case RX_WIN_TEST_ENG:
1732  for (size_t bit = 0; bit < DQ_DATA_WIDTH; bit++) {
1733  win_size = delay[bit].best_last - delay[bit].best_first;
1734 
1735  if (win_size < min_winsize) {
1736  min_bit = bit;
1737  min_winsize = win_size;
1738  }
1739  tmp_win_sum += win_size;
1740  }
1741  dramc_dbg("type:%d vref:%d Min Bit=%d, min_winsize=%d, win sum:%d\n",
1742  type, vref, min_bit, min_winsize, tmp_win_sum);
1743 
1744  if (tmp_win_sum > vref_dly->max_win_sum) {
1745  *win_min_max = min_winsize;
1746  vref_dly->max_win_sum = tmp_win_sum;
1747 
1748  /* best vref */
1749  vref_dly->best_vref = vref;
1750  }
1751  dramc_dbg("type:%d vref:%d, win_sum_total:%d, tmp_win_sum:%d)\n",
1752  type, vref, vref_dly->max_win_sum, tmp_win_sum);
1753  dramc_set_vref_dly(vref_dly, delay);
1754 
1755  if (tmp_win_sum < vref_dly->max_win_sum * 95 / 100) {
1756  dramc_dbg("type:%d best vref found[%d], early break! (%d < %d)\n",
1757  type, vref_dly->best_vref, tmp_win_sum,
1758  vref_dly->max_win_sum * 95 / 100);
1759  return true;
1760  }
1761 
1762  break;
1763  case TX_WIN_DQ_ONLY:
1764  case TX_WIN_DQ_DQM:
1765  for (size_t bit = 0; bit < DQ_DATA_WIDTH; bit++) {
1766  win_size = delay[bit].best_last - delay[bit].best_first;
1767 
1768  if (win_size < min_winsize) {
1769  min_bit = bit;
1770  min_winsize = win_size;
1771  }
1772  tmp_win_sum += win_size;
1773  }
1774  dramc_dbg("type:%d vref:%d Min Bit=%d, min_winsize=%d, win sum:%d\n",
1775  type, vref, min_bit, min_winsize, tmp_win_sum);
1776 
1777  if (min_winsize > *win_min_max ||
1778  (min_winsize == *win_min_max &&
1779  tmp_win_sum > vref_dly->max_win_sum)) {
1780  *win_min_max = min_winsize;
1781  vref_dly->max_win_sum = tmp_win_sum;
1782 
1783  /* best vref */
1784  vref_dly->best_vref = vref;
1785  }
1786  dramc_dbg("type:%d vref:%d, win_sum_total:%d, tmp_win_sum:%d)\n",
1787  type, vref, vref_dly->max_win_sum, tmp_win_sum);
1788  dramc_set_vref_dly(vref_dly, delay);
1789 
1790  if (tmp_win_sum < vref_dly->max_win_sum * 95 / 100) {
1791  dramc_dbg("type:%d best vref found[%d], early break! (%d < %d)\n",
1792  type, vref_dly->best_vref, tmp_win_sum,
1793  vref_dly->max_win_sum * 95 / 100);
1794  return true;
1795  }
1796 
1797  break;
1798 
1799  default:
1800  dramc_err("error calibration type: %d\n", type);
1801  break;
1802  }
1803 
1804  return false;
1805 }
1806 
1807 static void dramc_set_rx_dqdqs_dly(u8 chn, u8 rank, s32 dly)
1808 {
1809  if (dly <= 0) {
1810  /* Set DQS delay */
1811  dramc_set_rx_dly_factor(chn, rank, RX_DQS, -dly);
1812  dram_phy_reset(chn);
1813  } else {
1814  /* Setup time calibration */
1815  dramc_set_rx_dly_factor(chn, rank, RX_DQM, dly);
1816  dram_phy_reset(chn);
1817  dramc_set_rx_dly_factor(chn, rank, RX_DQ, dly);
1818  }
1819 }
1820 
1821 static void dramc_set_tx_best_dly_factor(u8 chn, u8 rank_start, u8 type,
1822  struct per_byte_dly *tx_perbyte_dly, u16 *dq_precal_dly,
1823  u8 use_delay_cell, u32 *byte_dly_cell)
1824 {
1825  u32 dq_large = 0, dq_large_oen = 0, dq_small = 0, dq_small_oen = 0, adjust_center = 1;
1826  u32 dqm_large = 0, dqm_large_oen = 0, dqm_small = 0, dqm_small_oen = 0;
1827  u16 dq_oen[DQS_NUMBER] = {0}, dqm_oen[DQS_NUMBER] = {0};
1828  struct tx_dly_tune dqdly_tune[DQS_NUMBER] = {0};
1829  struct tx_dly_tune dqmdly_tune[DQS_NUMBER] = {0};
1830 
1831  for (size_t i = 0; i < DQS_NUMBER; i++) {
1832  dramc_transfer_dly_tune(chn, tx_perbyte_dly[i].final_dly,
1833  adjust_center, &dqdly_tune[i]);
1834  dramc_transfer_dly_tune(chn, dq_precal_dly[i],
1835  adjust_center, &dqmdly_tune[i]);
1836 
1837  dq_large += dqdly_tune[i].coarse_tune_large << (i * 4);
1838  dq_large_oen += dqdly_tune[i].coarse_tune_large_oen << (i * 4);
1839  dq_small += dqdly_tune[i].coarse_tune_small << (i * 4);
1840  dq_small_oen += dqdly_tune[i].coarse_tune_small_oen << (i * 4);
1841 
1842  dqm_large += dqmdly_tune[i].coarse_tune_large << (i * 4);
1843  dqm_large_oen += dqmdly_tune[i].coarse_tune_large_oen << (i * 4);
1844  dqm_small += dqmdly_tune[i].coarse_tune_small << (i * 4);
1845  dqm_small_oen += dqmdly_tune[i].coarse_tune_small_oen << (i * 4);
1846 
1847  dq_oen[i] = (dqdly_tune[i].coarse_tune_large_oen << 3) +
1848  (dqdly_tune[i].coarse_tune_small_oen << 5) + dqdly_tune[i].fine_tune;
1849  dqm_oen[i] = (dqmdly_tune[i].coarse_tune_large_oen << 3) +
1850  (dqmdly_tune[i].coarse_tune_small_oen << 5) +
1851  dqmdly_tune[i].fine_tune;
1852  }
1853 
1854  for (size_t rank = rank_start; rank < RANK_MAX; rank++) {
1855  clrsetbits32(&ch[chn].ao.shu[0].rk[rank].selph_dq[0],
1856  0x77777777, dq_large | (dq_large_oen << 16));
1857  clrsetbits32(&ch[chn].ao.shu[0].rk[rank].selph_dq[2],
1858  0x77777777, dq_small | (dq_small_oen << 16));
1859  clrsetbits32(&ch[chn].ao.shu[0].rk[rank].selph_dq[1],
1860  0x77777777, dqm_large | (dqm_large_oen << 16));
1861  clrsetbits32(&ch[chn].ao.shu[0].rk[rank].selph_dq[3],
1862  0x77777777, dqm_small | (dqm_small_oen << 16));
1863 
1864  for (size_t byte = 0; byte < 2; byte++)
1865  SET32_BITFIELDS(&ch[chn].phy.shu[0].rk[rank].b[byte].dq[7],
1866  FINE_TUNE_DQ, dqdly_tune[byte].fine_tune,
1867  FINE_TUNE_DQM, dqmdly_tune[byte].fine_tune);
1868 
1869  if (use_delay_cell == 1) {
1870  for (size_t byte = 0; byte < DQS_NUMBER; byte++)
1871  write32(&ch[chn].phy.shu[0].rk[rank].b[byte].dq[0],
1872  byte_dly_cell[byte]);
1873  }
1874 
1875  if (type != TX_WIN_DQ_ONLY)
1876  continue;
1877 
1878  clrsetbits32(&ch[chn].ao.shu[0].rk[rank].fine_tune, 0x3f3f3f3f,
1879  (dqdly_tune[0].fine_tune << 8) | (dqdly_tune[1].fine_tune << 0) |
1880  (dqmdly_tune[0].fine_tune << 24) | (dqmdly_tune[1].fine_tune << 16));
1881 
1882  clrsetbits32(&ch[chn].ao.shu[0].rk[rank].dqs2dq_cal1, 0x7ff | (0x7ff << 16),
1883  (dqdly_tune[0].fine_tune << 0) | (dqdly_tune[1].fine_tune << 16));
1884  clrsetbits32(&ch[chn].ao.shu[0].rk[rank].dqs2dq_cal2, 0x7ff | (0x7ff << 16),
1885  (dqdly_tune[0].fine_tune << 0) | (dqdly_tune[1].fine_tune << 16));
1886  clrsetbits32(&ch[chn].ao.shu[0].rk[rank].dqs2dq_cal5, 0x7ff | (0x7ff << 16),
1887  (dqmdly_tune[0].fine_tune << 0) | (dqmdly_tune[1].fine_tune << 16));
1888  }
1889 }
1890 
1891 static void dramc_set_rx_best_dly_factor(u8 chn, u8 rank,
1892  struct win_perbit_dly *dly, s32 *dqsdly_byte, s32 *dqmdly_byte)
1893 {
1894  u32 value;
1895 
1896  /* set dqs delay, (dqm delay) */
1897  for (u8 byte = 0; byte < DQS_NUMBER; byte++) {
1898  value = (dqsdly_byte[byte] << 24) | (dqsdly_byte[byte] << 16) |
1899  (dqmdly_byte[byte] << 8) | (dqmdly_byte[byte] << 0);
1900  clrsetbits32(&ch[chn].phy.shu[0].rk[rank].b[byte].dq[6], 0x7f7f3f3f, value);
1901  }
1902  dram_phy_reset(chn);
1903 
1904  /* set dq delay */
1905  for (u8 byte = 0; byte < DQS_NUMBER; byte++) {
1906  for (u8 bit = 0; bit < DQS_BIT_NUMBER; bit += 2) {
1907  u8 index = bit + byte * DQS_BIT_NUMBER;
1908  u8 dq_num = 2 + bit / 2;
1909  value = (dly[index + 1].best_dqdly << 24) |
1910  (dly[index + 1].best_dqdly << 16) |
1911  (dly[index].best_dqdly << 8) | (dly[index].best_dqdly << 0);
1912 
1913  clrsetbits32(&ch[chn].phy.shu[0].rk[rank].b[byte].dq[dq_num],
1914  0x3f3f3f3f, value);
1915  }
1916  }
1917 }
1918 
1919 static void dramc_set_dqdqs_dly(u8 chn, u8 rank, enum CAL_TYPE type, u8 *small_value, s32 dly)
1920 {
1921  if (type == RX_WIN_RD_DQC || type == RX_WIN_TEST_ENG)
1922  dramc_set_rx_dqdqs_dly(chn, rank, dly);
1923  else
1924  dramc_set_tx_dly_factor(chn, rank, type, small_value, dly);
1925 }
1926 
1927 static void dramc_set_tx_dly_center(struct per_byte_dly *center_dly,
1928  const struct win_perbit_dly *vref_dly)
1929 {
1930  int index;
1931  struct per_byte_dly *dly;
1932 
1933  for (u8 byte = 0; byte < DQS_NUMBER; byte++) {
1934  dly = &center_dly[byte];
1935  dly->min_center = 0xffff;
1936  dly->max_center = 0;
1937 
1938  for (u8 bit = 0; bit < DQS_BIT_NUMBER; bit++) {
1939  index = bit + 8 * byte;
1940  if (vref_dly[index].win_center < dly->min_center)
1941  dly->min_center = vref_dly[index].win_center;
1942  if (vref_dly[index].win_center > dly->max_center)
1943  dly->max_center = vref_dly[index].win_center;
1944  }
1945  dramc_dbg("center_dly[%d].min_center = %d, "
1946  "center_dly[%d].max_center = %d\n",
1947  byte, center_dly[byte].min_center,
1948  byte, center_dly[byte].max_center);
1949  }
1950 }
1951 
1952 static u32 get_freq_group_clock(u8 freq_group)
1953 {
1954  u32 clock_rate = 0;
1955 
1956  /*
1957  * The clock rate is usually (frequency / 2 - delta), where the delta
1958  * is introduced to avoid interference from RF peripherals like
1959  * modem, WiFi, and Bluetooth.
1960  */
1961  switch (freq_group) {
1962  case LP4X_DDR1600:
1963  clock_rate = 796;
1964  break;
1965  case LP4X_DDR2400:
1966  clock_rate = 1196;
1967  break;
1968  case LP4X_DDR3200:
1969  clock_rate = 1596;
1970  break;
1971  case LP4X_DDR3600:
1972  clock_rate = 1792;
1973  break;
1974  default:
1975  die("Invalid DDR frequency group %u\n", freq_group);
1976  break;
1977  }
1978 
1979  return clock_rate;
1980 }
1981 
1982 static void dramc_set_tx_best_dly(u8 chn, u8 rank, bool bypass_tx,
1983  struct win_perbit_dly *vref_dly,
1984  enum CAL_TYPE type, u8 freq_group,
1985  u16 *tx_dq_precal_result, u16 dly_cell_unit,
1986  const struct sdram_params *params,
1987  const bool fast_calib)
1988 {
1989  int index, clock_rate;
1990  u8 use_delay_cell;
1991  u32 byte_dly_cell[DQS_NUMBER] = { 0 };
1992  struct per_byte_dly center_dly[DQS_NUMBER];
1993  u16 tune_diff, dq_delay_cell[DQ_DATA_WIDTH];
1994 
1995  clock_rate = get_freq_group_clock(freq_group);
1996 
1997  if (type == TX_WIN_DQ_ONLY && get_freq_fsq(freq_group) == FSP_1)
1998  use_delay_cell = 1;
1999  else
2000  use_delay_cell = 0;
2001 
2002  if (fast_calib && bypass_tx) {
2003  dramc_dbg("bypass TX, clock_rate: %d\n", clock_rate);
2004  for (u8 byte = 0; byte < DQS_NUMBER; byte++) {
2005  center_dly[byte].min_center = params->tx_center_min[chn][rank][byte];
2006  center_dly[byte].max_center = params->tx_center_max[chn][rank][byte];
2007  for (u8 bit = 0; bit < DQS_BIT_NUMBER; bit++) {
2008  index = bit + 8 * byte;
2009  vref_dly[index].win_center =
2010  params->tx_win_center[chn][rank][index];
2011  vref_dly[index].best_first =
2012  params->tx_first_pass[chn][rank][index];
2013  vref_dly[index].best_last =
2014  params->tx_last_pass[chn][rank][index];
2015  }
2016  }
2017  } else {
2018  dramc_set_tx_dly_center(center_dly, vref_dly);
2019  }
2020 
2021  for (u8 byte = 0; byte < DQS_NUMBER; byte++) {
2022  if (use_delay_cell == 0) {
2023  center_dly[byte].final_dly = (center_dly[byte].min_center +
2024  center_dly[byte].max_center) >> 1;
2025  tx_dq_precal_result[byte] = center_dly[byte].final_dly;
2026  } else {
2027  center_dly[byte].final_dly = center_dly[byte].min_center;
2028  tx_dq_precal_result[byte] = (center_dly[byte].min_center
2029  + center_dly[byte].max_center) >> 1;
2030 
2031  for (u8 bit = 0; bit < DQS_BIT_NUMBER; bit++) {
2032  index = bit + 8 * byte;
2033  tune_diff = vref_dly[index].win_center -
2034  center_dly[byte].min_center;
2035  dq_delay_cell[index] = ((tune_diff * 100000000) /
2036  (clock_rate * 64)) / dly_cell_unit;
2037  byte_dly_cell[byte] |= (dq_delay_cell[index] << (bit * 4));
2038  dramc_dbg("u1DelayCellOfst[%d]=%d cells (%d PI)\n",
2039  index, dq_delay_cell[index], tune_diff);
2040  }
2041  }
2042  }
2043 
2044  dramc_set_tx_best_dly_factor(chn, rank, type, center_dly, tx_dq_precal_result,
2045  use_delay_cell, byte_dly_cell);
2046 }
2047 
2048 static int dramc_set_rx_best_dly(u8 chn, u8 rank, struct win_perbit_dly *perbit_dly)
2049 {
2050  u8 bit_first, bit_last;
2051  u16 u2TmpDQMSum;
2052  s32 dqsdly_byte[DQS_NUMBER] = {0x0}, dqm_dly_byte[DQS_NUMBER] = {0x0};
2053 
2054  for (u8 byte = 0; byte < DQS_NUMBER; byte++) {
2055  u2TmpDQMSum = 0;
2056 
2057  bit_first = DQS_BIT_NUMBER * byte;
2058  bit_last = DQS_BIT_NUMBER * byte + DQS_BIT_NUMBER - 1;
2059  dqsdly_byte[byte] = 64;
2060 
2061  for (u8 bit = bit_first; bit <= bit_last; bit++) {
2062  if (perbit_dly[bit].win_center < dqsdly_byte[byte])
2063  dqsdly_byte[byte] = perbit_dly[bit].win_center;
2064  }
2065  dqsdly_byte[byte] = (dqsdly_byte[byte] > 0) ? 0 : -dqsdly_byte[byte];
2066 
2067  for (u8 bit = bit_first; bit <= bit_last; bit++) {
2068  perbit_dly[bit].best_dqdly = dqsdly_byte[byte] +
2069  perbit_dly[bit].win_center;
2070  u2TmpDQMSum += perbit_dly[bit].best_dqdly;
2071  }
2072 
2073  dqm_dly_byte[byte] = u2TmpDQMSum / DQS_BIT_NUMBER;
2074  }
2075 
2076  dramc_set_rx_best_dly_factor(chn, rank, perbit_dly, dqsdly_byte, dqm_dly_byte);
2077  return 0;
2078 }
2079 
2080 static void dramc_get_vref_prop(u8 rank, enum CAL_TYPE type, u8 fsp,
2081  u8 *vref_scan_en, u8 *vref_begin, u8 *vref_end)
2082 {
2083  if (type == RX_WIN_TEST_ENG && rank == RANK_0) {
2084  *vref_scan_en = 1;
2085  if (fsp == FSP_0)
2086  *vref_begin = 0x18;
2087  else
2088  *vref_begin = 0;
2089  *vref_end = RX_VREF_END;
2090  } else if (type == TX_WIN_DQ_ONLY) {
2091  *vref_scan_en = 1;
2092  if (fsp == FSP_0) {
2093  *vref_begin = 27 - 5;
2094  *vref_end = 27 + 5;
2095  } else {
2096  *vref_begin = TX_VREF_BEGIN;
2097  *vref_end = TX_VREF_END;
2098  }
2099  } else {
2100  *vref_scan_en = 0;
2101  *vref_begin = 0;
2102  *vref_end = 1;
2103  }
2104 }
2105 
2106 static u32 dram_k_perbit(u8 chn, enum CAL_TYPE type)
2107 {
2108  u32 err_value;
2109 
2110  if (type == RX_WIN_RD_DQC) {
2111  err_value = dramc_rx_rd_dqc_run(chn);
2112  } else if (type == RX_WIN_TEST_ENG) {
2113  err_value = dramc_engine2_run(chn, TE_OP_WRITE_READ_CHECK);
2114  } else {
2115  dramc_engine2_setpat(chn, true);
2116  err_value = dramc_engine2_run(chn, TE_OP_WRITE_READ_CHECK);
2117  dramc_engine2_setpat(chn, false);
2118  err_value |= dramc_engine2_run(chn, TE_OP_WRITE_READ_CHECK);
2119  }
2120  return err_value;
2121 }
2122 
2123 static void dramc_window_perbit_cal_partial(u8 chn, u8 rank,
2124  s16 dly_begin, s16 dly_end, s16 dly_step,
2125  enum CAL_TYPE type, u8 *small_value, u8 vref_scan_enable,
2126  struct win_perbit_dly *win_perbit)
2127 {
2128  u32 finish_bit = 0;
2129 
2130  for (s16 dly = dly_begin; dly < dly_end; dly += dly_step) {
2131  dramc_set_dqdqs_dly(chn, rank, type, small_value, dly);
2132 
2133  u32 err_value = dram_k_perbit(chn, type);
2134  if (!vref_scan_enable)
2135  dramc_dbg("%d ", dly);
2136 
2137  for (size_t bit = 0; bit < DQ_DATA_WIDTH; bit++) {
2138  bool bit_fail = (err_value & ((u32) 1 << bit)) != 0;
2139 
2140  /* pass window bigger than 7,
2141  * consider as real pass window.
2142  */
2143  if (dramc_check_dqdqs_win(&(win_perbit[bit]),
2144  dly, dly_end, bit_fail) > 7)
2145  finish_bit |= (1 << bit);
2146 
2147  if (vref_scan_enable)
2148  continue;
2149 
2150  dramc_dbg("%s", bit_fail ? "x" : "o");
2151  if (bit % DQS_BIT_NUMBER == 7)
2152  dramc_dbg(" ");
2153  }
2154 
2155  if (!vref_scan_enable)
2156  dramc_dbg(" [MSB]\n");
2157 
2158  if (finish_bit == 0xffff && (err_value & 0xffff) == 0xffff) {
2159  dramc_dbg("all bits window found, "
2160  "early break! delay=%#x\n", dly);
2161  break;
2162  }
2163  }
2164 }
2165 
2166 static u8 dramc_window_perbit_cal(u8 chn, u8 rank, u8 freq_group,
2167  enum CAL_TYPE type, const struct sdram_params *params,
2168  const bool fast_calib)
2169 {
2170  u8 vref = 0, vref_begin = 0, vref_end = 1, vref_step = 1, vref_use = 0;
2171  u8 vref_scan_enable = 0, small_reg_value = 0xff;
2172  s16 dly_begin = 0, dly_end = 0, dly_step = 1;
2173  u32 dummy_rd_bak_engine2 = 0, finish_bit, win_min_max = 0;
2174  static u16 dq_precal_result[DQS_NUMBER];
2175  struct vref_perbit_dly vref_dly;
2176  struct win_perbit_dly win_perbit[DQ_DATA_WIDTH];
2177  u16 dly_cell_unit = params->delay_cell_unit;
2178 
2179  u8 fsp = get_freq_fsq(freq_group);
2180  u8 vref_range = !fsp;
2181  bool bypass_tx_rx = !fsp;
2182 
2183  dramc_dbg("bypass TX RX window: %s\n", bypass_tx_rx ? "Yes" : "No");
2184  dramc_get_vref_prop(rank, type, fsp,
2185  &vref_scan_enable, &vref_begin, &vref_end);
2186  dramc_get_dly_range(chn, rank, type, freq_group, dq_precal_result,
2187  &dly_begin, &dly_end, params);
2188 
2189  if (fast_calib) {
2190  if (type == RX_WIN_TEST_ENG && vref_scan_enable == 1) {
2191  vref_begin = params->rx_vref[chn];
2192  vref_end = vref_begin + 1;
2193  dramc_dbg("bypass RX vref: %d\n", vref_begin);
2194  } else if (type == TX_WIN_DQ_ONLY) {
2195  vref_begin = params->tx_vref[chn][rank] | (vref_range << 6);
2196  vref_end = vref_begin + 1;
2197  dramc_dbg("bypass TX vref: %d\n", vref_begin);
2198  }
2199  vref_dly.best_vref = vref_begin;
2200  }
2201 
2202  if ((type == RX_WIN_RD_DQC || type == RX_WIN_TEST_ENG) && fsp == FSP_0)
2203  dly_step = 2;
2204 
2205  dramc_dbg("[channel %d] [rank %d] type: %d, vref_enable: %d, vref range[%d : %d]\n",
2206  chn, rank, type, vref_scan_enable, vref_begin, vref_end);
2207 
2208  if (type == TX_WIN_DQ_ONLY || type == TX_WIN_DQ_DQM) {
2209  for (size_t byte = 0; byte < 2; byte++) {
2210  write32(&ch[chn].phy.shu[0].rk[rank].b[byte].dq[0], 0);
2211  clrbits32(&ch[chn].phy.shu[0].rk[rank].b[byte].dq[1],
2212  0xf);
2213  }
2214  setbits32(&ch[chn].phy.misc_ctrl1, 0x1 << 7);
2215  setbits32(&ch[chn].ao.dqsoscr, 0x1 << 7);
2216  if (fsp == FSP_1)
2217  vref_step = 2;
2218  }
2219 
2220  if (fast_calib && bypass_tx_rx &&
2221  (type == TX_WIN_DQ_ONLY || type == TX_WIN_DQ_DQM)) {
2222  dramc_set_tx_best_dly(chn, rank, bypass_tx_rx, vref_dly.perbit_dly,
2223  type, freq_group, dq_precal_result, dly_cell_unit,
2224  params, fast_calib);
2225 
2226  if (vref_scan_enable)
2227  dramc_set_vref(chn, rank, type, vref_dly.best_vref);
2228  return 0;
2229  }
2230 
2231  if (type == RX_WIN_RD_DQC) {
2232  dramc_rx_rd_dqc_init(chn, rank);
2233  } else {
2234  if (type == RX_WIN_TEST_ENG)
2236  dummy_rd_bak_engine2 = read32(&ch[chn].ao.dummy_rd);
2237  dramc_engine2_init(chn, rank, TEST2_1_CAL, TEST2_2_CAL, false);
2238  }
2239 
2240  vref_dly.max_win_sum = 0;
2241  for (vref = vref_begin; vref < vref_end; vref += vref_step) {
2242  small_reg_value = 0xff;
2243  finish_bit = 0;
2244  if (type == TX_WIN_DQ_ONLY)
2245  vref_use = vref | (vref_range << 6);
2246  else
2247  vref_use = vref;
2248 
2249  for (size_t bit = 0; bit < DQ_DATA_WIDTH; bit++) {
2250  win_perbit[bit].first_pass = PASS_RANGE_NA;
2251  win_perbit[bit].last_pass = PASS_RANGE_NA;
2252  win_perbit[bit].best_first = PASS_RANGE_NA;
2253  win_perbit[bit].best_last = PASS_RANGE_NA;
2254  }
2255 
2256  if (vref_scan_enable)
2257  dramc_set_vref(chn, rank, type, vref_use);
2258 
2259  if (type == RX_WIN_RD_DQC || type == RX_WIN_TEST_ENG) {
2260  dramc_set_rx_dly_factor(chn, rank,
2262  dramc_set_rx_dly_factor(chn, rank,
2264  }
2265 
2266  if (fast_calib && bypass_tx_rx &&
2267  (type == RX_WIN_RD_DQC || type == RX_WIN_TEST_ENG)) {
2268  dramc_dbg("bypass RX params\n");
2269  for (size_t bit = 0; bit < DQ_DATA_WIDTH; bit++) {
2270  win_perbit[bit].best_first =
2271  params->rx_firspass[chn][rank][bit];
2272  win_perbit[bit].best_last =
2273  params->rx_lastpass[chn][rank][bit];
2274  }
2275  } else {
2277  dly_begin, dly_end, dly_step,
2278  type, &small_reg_value,
2279  vref_scan_enable, win_perbit);
2280  }
2281 
2282  for (size_t bit = 0; bit < DQ_DATA_WIDTH; bit++)
2283  dramc_dbg("Dq[%zd] win width (%d ~ %d) %d\n", bit,
2284  win_perbit[bit].best_first, win_perbit[bit].best_last,
2285  win_perbit[bit].best_last - win_perbit[bit].best_first);
2286 
2287  if (dramk_calc_best_vref(type, vref_use, &vref_dly,
2288  win_perbit, &win_min_max))
2289  break;
2290  }
2291 
2292  if (type == RX_WIN_RD_DQC)
2293  dramc_rx_rd_dqc_end(chn);
2294  else
2295  dramc_engine2_end(chn, dummy_rd_bak_engine2);
2296 
2297  if (vref_scan_enable && type == RX_WIN_TEST_ENG)
2298  dramc_set_vref(chn, rank, type, vref_dly.best_vref);
2299 
2300  if (type == RX_WIN_RD_DQC || type == RX_WIN_TEST_ENG)
2301  dramc_set_rx_best_dly(chn, rank, vref_dly.perbit_dly);
2302  else
2303  dramc_set_tx_best_dly(chn, rank, false,
2304  vref_dly.perbit_dly, type, freq_group,
2305  dq_precal_result, dly_cell_unit, params, fast_calib);
2306 
2307  if (vref_scan_enable && type == TX_WIN_DQ_ONLY)
2308  dramc_set_vref(chn, rank, type, vref_dly.best_vref);
2309 
2310  return 0;
2311 }
2312 
2313 static void dramc_dle_factor_handler(u8 chn, u8 val, u8 freq_group)
2314 {
2315  u8 start_ext2 = 0, start_ext3 = 0, last_ext2 = 0, last_ext3 = 0;
2316 
2317  val = MAX(val, 2);
2318  SET32_BITFIELDS(&ch[chn].ao.shu[0].conf[1],
2319  SHU_CONF1_DATLAT, val,
2320  SHU_CONF1_DATLAT_DSEL, val - 2,
2321  SHU_CONF1_DATLAT_DSEL_PHY, val - 2);
2322 
2323  if (freq_group == LP4X_DDR3200 || freq_group == LP4X_DDR3600)
2324  start_ext2 = 1;
2325 
2326  if (val >= 24)
2327  last_ext2 = last_ext3 = 1;
2328  else if (val >= 18)
2329  last_ext2 = 1;
2330 
2331  SET32_BITFIELDS(&ch[chn].ao.shu[0].pipe,
2332  SHU_PIPE_READ_START_EXTEND1, 1,
2333  SHU_PIPE_DLE_LAST_EXTEND1, 1,
2334  SHU_PIPE_READ_START_EXTEND2, start_ext2,
2335  SHU_PIPE_DLE_LAST_EXTEND2, last_ext2,
2336  SHU_PIPE_READ_START_EXTEND3, start_ext3,
2337  SHU_PIPE_DLE_LAST_EXTEND3, last_ext3);
2338  dram_phy_reset(chn);
2339 }
2340 
2341 static u8 dramc_rx_datlat_cal(u8 chn, u8 rank, u8 freq_group,
2342  const struct sdram_params *params, const bool fast_calib,
2343  bool *test_passed)
2344 {
2345  u32 datlat, begin = 0, first = 0, sum = 0, best_step;
2346  u32 datlat_start = 7;
2347 
2348  *test_passed = true;
2349  best_step = READ32_BITFIELD(&ch[chn].ao.shu[0].conf[1], SHU_CONF1_DATLAT);
2350 
2351  dramc_dbg("[DATLAT] start. CH%d RK%d DATLAT Default: 0x%x\n",
2352  chn, rank, best_step);
2353 
2354  u32 dummy_rd_backup = read32(&ch[chn].ao.dummy_rd);
2355  dramc_engine2_init(chn, rank, TEST2_1_CAL, TEST2_2_CAL, false);
2356 
2357  if (fast_calib) {
2358  best_step = params->rx_datlat[chn][rank];
2359  dramc_dbg("bypass DATLAT, best_step: %d\n", best_step);
2360  } else {
2361  for (datlat = datlat_start; datlat < DATLAT_TAP_NUMBER; datlat++) {
2362  dramc_dle_factor_handler(chn, datlat, freq_group);
2363 
2365  if (err == 0) {
2366  if (begin == 0) {
2367  first = datlat;
2368  begin = 1;
2369  }
2370  if (begin == 1) {
2371  sum++;
2372  if (sum > 4)
2373  break;
2374  }
2375  } else {
2376  if (begin == 1)
2377  begin = 0xff;
2378  }
2379 
2380  dramc_dbg("Datlat=%2d, err_value=0x%4x, sum=%d\n", datlat, err, sum);
2381  }
2382 
2383  dramc_engine2_end(chn, dummy_rd_backup);
2384 
2385  *test_passed = (sum != 0);
2386  if (!*test_passed) {
2387  dramc_err("DRAM memory test failed\n");
2388  return 0;
2389  }
2390 
2391  if (sum <= 3)
2392  best_step = first + (sum >> 1);
2393  else
2394  best_step = first + 2;
2395  dramc_dbg("First_step=%d, total pass=%d, best_step=%d\n",
2396  begin, sum, best_step);
2397  }
2398 
2399  dramc_dle_factor_handler(chn, best_step, freq_group);
2400 
2401  SET32_BITFIELDS(&ch[chn].ao.padctrl,
2402  PADCTRL_DQIENQKEND, 1, PADCTRL_DQIENLATEBEGIN, 1);
2403 
2404  return (u8) best_step;
2405 }
2406 
2407 static void dramc_dual_rank_rx_datlat_cal(u8 chn, u8 freq_group, u8 datlat0, u8 datlat1)
2408 {
2409  u8 final_datlat = MAX(datlat0, datlat1);
2410  dramc_dle_factor_handler(chn, final_datlat, freq_group);
2411 }
2412 
2413 static void dramc_rx_dqs_gating_post_process(u8 chn, u8 freq_group, u32 rk_num)
2414 {
2415  s8 dqsinctl;
2416  u32 read_dqsinctl, rankinctl_root, reg_tx_dly_dqsgated_min = 3;
2417  u8 txdly_cal_min = 0xff, txdly_cal_max = 0, tx_dly_dqs_gated = 0;
2418  u32 best_coarse_tune2t[RANK_MAX][DQS_NUMBER];
2419  u32 best_coarse_tune2t_p1[RANK_MAX][DQS_NUMBER];
2420 
2421  if (freq_group == LP4X_DDR3200 || freq_group == LP4X_DDR3600)
2422  reg_tx_dly_dqsgated_min = 2;
2423  else
2424  reg_tx_dly_dqsgated_min = 1;
2425 
2426  /* get TXDLY_Cal_min and TXDLY_Cal_max value */
2427  for (size_t rank = 0; rank < rk_num; rank++) {
2428  u32 dqsg0 = read32(&ch[chn].ao.shu[0].rk[rank].selph_dqsg0);
2429  for (size_t dqs = 0; dqs < DQS_NUMBER; dqs++) {
2430  best_coarse_tune2t[rank][dqs] = (dqsg0 >> (dqs * 8)) & 0x7;
2431  best_coarse_tune2t_p1[rank][dqs] = (dqsg0 >> (dqs * 8 + 4)) & 0x7;
2432  dramc_dbg("Rank%zd best DQS%zd dly(2T,(P1)2T)=(%d, %d)\n",
2433  rank, dqs, best_coarse_tune2t[rank][dqs],
2434  best_coarse_tune2t_p1[rank][dqs]);
2435 
2436  tx_dly_dqs_gated = best_coarse_tune2t[rank][dqs];
2437  txdly_cal_min = MIN(txdly_cal_min, tx_dly_dqs_gated);
2438 
2439  tx_dly_dqs_gated = best_coarse_tune2t_p1[rank][dqs];
2440  txdly_cal_max = MAX(txdly_cal_max, tx_dly_dqs_gated);
2441  }
2442  }
2443 
2444  dqsinctl = reg_tx_dly_dqsgated_min - txdly_cal_min;
2445  dramc_dbg("Dqsinctl:%d, dqsgated_min %d, txdly_cal_min %d, txdly_cal_max %d\n",
2446  dqsinctl, reg_tx_dly_dqsgated_min, txdly_cal_min, txdly_cal_max);
2447 
2448  if (dqsinctl != 0) {
2449  txdly_cal_min += dqsinctl;
2450  txdly_cal_max += dqsinctl;
2451 
2452  for (size_t rank = 0; rank < rk_num; rank++) {
2453  dramc_dbg("Rank: %zd\n", rank);
2454  for (size_t dqs = 0; dqs < DQS_NUMBER; dqs++) {
2455  best_coarse_tune2t[rank][dqs] += dqsinctl;
2456  best_coarse_tune2t_p1[rank][dqs] += dqsinctl;
2457 
2458  dramc_dbg("Best DQS%zd dly(2T) = (%d)\n",
2459  dqs, best_coarse_tune2t[rank][dqs]);
2460  dramc_dbg("Best DQS%zd P1 dly(2T) = (%d)\n",
2461  dqs, best_coarse_tune2t_p1[rank][dqs]);
2462  }
2463 
2464  clrsetbits32(&ch[chn].ao.shu[0].rk[rank].selph_dqsg0,
2465  0x77777777,
2466  (best_coarse_tune2t[rank][0] << 0) |
2467  (best_coarse_tune2t[rank][1] << 8) |
2468  (best_coarse_tune2t_p1[rank][0] << 4) |
2469  (best_coarse_tune2t_p1[rank][1] << 12));
2470  }
2471  }
2472 
2473  read_dqsinctl = READ32_BITFIELD(&ch[chn].ao.shu[0].rk[0].dqsctl,
2474  SHURK_DQSCTL_DQSINCTL) - dqsinctl;
2475  rankinctl_root = (read_dqsinctl >= 2) ? (read_dqsinctl - 2) : 0;
2476 
2477  SET32_BITFIELDS(&ch[chn].ao.shu[0].rk[0].dqsctl, SHURK_DQSCTL_DQSINCTL, read_dqsinctl);
2478  SET32_BITFIELDS(&ch[chn].ao.shu[0].rk[1].dqsctl, SHURK_DQSCTL_DQSINCTL, read_dqsinctl);
2479  clrsetbits32(&ch[chn].ao.shu[0].rankctl,
2480  (0xf << 28) | (0xf << 20) | (0xf << 24) | 0xf,
2481  (read_dqsinctl << 28) | (rankinctl_root << 20) |
2482  (rankinctl_root << 24) | rankinctl_root);
2483 
2484  u8 ROEN = read32(&ch[chn].ao.shu[0].odtctrl) & 0x1;
2485  clrsetbits32(&ch[chn].ao.shu[0].rodtenstb, (0xffff << 8) | (0x3f << 2) | (0x1),
2486  (0xff << 8) | (0x9 << 2) | ROEN);
2487 }
2488 
2489 static void start_dqsosc(u8 chn)
2490 {
2491  SET32_BITFIELDS(&ch[chn].ao.spcmd, SPCMD_DQSOSCENEN, 1);
2492  if (!wait_us(100, READ32_BITFIELD(&ch[chn].nao.spcmdresp,
2493  SPCMDRESP_DQSOSCEN_RESPONSE))) {
2494  dramc_err("start dqsosc timed out\n");
2495  return;
2496  }
2497  SET32_BITFIELDS(&ch[chn].ao.spcmd, SPCMD_DQSOSCENEN, 0);
2498 }
2499 
2500 static void dqsosc_auto(u8 chn, u8 rank, u8 freq_group,
2501  u16 *osc_thrd_inc, u16 *osc_thrd_dec)
2502 {
2503  u8 mr23 = MR23_DEFAULT_VALUE;
2504  u16 mr18, mr19;
2505  u16 dqsosc_cnt[2], dqs_cnt, dqsosc, thrd_inc, thrd_dec;
2506  u32 clock_rate, tck;
2507 
2508  struct reg_value regs_bak[] = {
2509  {&ch[chn].ao.mrs},
2510  {&ch[chn].ao.dramc_pd_ctrl},
2511  {&ch[chn].ao.ckectrl},
2512  };
2513 
2514  for (size_t i = 0; i < ARRAY_SIZE(regs_bak); i++)
2515  regs_bak[i].value = read32(regs_bak[i].addr);
2516 
2517  SET32_BITFIELDS(&ch[chn].ao.rkcfg, RKCFG_DQSOSC2RK, 0);
2518  SET32_BITFIELDS(&ch[chn].ao.mrs, MRS_MRSRK, rank);
2519  SET32_BITFIELDS(&ch[chn].ao.mpc_option, MPC_OPTION_MPCRKEN, 1);
2520 
2521  SET32_BITFIELDS(&ch[chn].ao.mrs, MRS_MRSRK, rank);
2522  dramc_mode_reg_write(chn, 23, mr23);
2523 
2524  for (u8 shu = 0; shu < DRAM_DFS_SHUFFLE_MAX; shu++)
2525  SET32_BITFIELDS(&ch[chn].ao.shu[shu].scintv,
2526  SHU_SCINTV_DQSOSCENDIS, 1);
2527 
2528  SET32_BITFIELDS(&ch[chn].ao.dramc_pd_ctrl,
2529  DRAMC_PD_CTRL_MIOCKCTRLOFF, 1);
2531 
2532  start_dqsosc(chn);
2533  udelay(1);
2534  SET32_BITFIELDS(&ch[chn].ao.mrs, MRS_MRRRK, rank);
2535 
2536  mr18 = dramc_mode_reg_read(chn, 18);
2537  mr19 = dramc_mode_reg_read(chn, 19);
2538  dqsosc_cnt[0] = (mr18 & 0xff) | ((mr19 & 0xff) << 8);
2539  dqsosc_cnt[1] = (mr18 >> 8) | (mr19 & 0xff00);
2540  dramc_dbg("DQSOscCnt B0=%#x, B1=%#x\n", dqsosc_cnt[0], dqsosc_cnt[1]);
2541 
2542  /* get the INC and DEC values */
2543  clock_rate = get_freq_group_clock(freq_group);
2544  tck = 1000000 / clock_rate;
2545 
2546  dqs_cnt = (mr18 & 0xff) | ((mr19 & 0xff) << 8);
2547  if (dqs_cnt != 0) {
2548  dqsosc = mr23 * 16 * 1000000 / (2 * dqs_cnt * clock_rate);
2549  thrd_inc = mr23 * tck * tck / (dqsosc * dqsosc * 10);
2550  thrd_dec = 3 * mr23 * tck * tck / (dqsosc * dqsosc * 20);
2551  } else {
2552  dqsosc = 0;
2553  thrd_inc = 0x6;
2554  thrd_dec = 0x4;
2555  }
2556  osc_thrd_inc[rank] = thrd_inc;
2557  osc_thrd_dec[rank] = thrd_dec;
2558  dramc_dbg("CH%d_RK%d: MR18=%#x, MR19=%#x, DQSOSC=%d, MR23=%d, "
2559  "INC=%d, DEC=%d\n",
2560  chn, rank, mr18, mr19, dqsosc, mr23, thrd_inc, thrd_dec);
2561 
2562  for (size_t i = 0; i < ARRAY_SIZE(regs_bak); i++)
2563  write32(regs_bak[i].addr, regs_bak[i].value);
2564 
2565  SET32_BITFIELDS(&ch[chn].ao.shu[0].rk[rank].dqsosc,
2566  SHU1RK0_DQSOSC_DQSOSC_BASE_RK0, dqsosc_cnt[0],
2567  SHU1RK0_DQSOSC_DQSOSC_BASE_RK0_B1, dqsosc_cnt[1]);
2568 }
2569 
2570 void dramc_hw_dqsosc(u8 chn, u32 rk_num)
2571 {
2572  u32 freq_shu1 = get_shu_freq(DRAM_DFS_SHUFFLE_1);
2573  u32 freq_shu2 = get_shu_freq(DRAM_DFS_SHUFFLE_2);
2574  u32 freq_shu3 = get_shu_freq(DRAM_DFS_SHUFFLE_3);
2575 
2576  SET32_BITFIELDS(&ch[chn].ao.rk[2].dqsosc,
2577  RK2_DQSOSC_FREQ_RATIO_TX_0, freq_shu2 * 8 / freq_shu1,
2578  RK2_DQSOSC_FREQ_RATIO_TX_1, freq_shu3 * 8 / freq_shu1);
2579  SET32_BITFIELDS(&ch[chn].ao.rk[2].dqsosc,
2580  RK2_DQSOSC_FREQ_RATIO_TX_3, freq_shu1 * 8 / freq_shu2,
2581  RK2_DQSOSC_FREQ_RATIO_TX_4, freq_shu3 * 8 / freq_shu2);
2582  SET32_BITFIELDS(&ch[chn].ao.rk[2].dummy_rd_bk,
2583  RK2_DUMMY_RD_BK_FREQ_RATIO_TX_6,
2584  freq_shu1 * 8 / freq_shu3,
2585  RK2_DUMMY_RD_BK_FREQ_RATIO_TX_7,
2586  freq_shu2 * 8 / freq_shu3);
2587 
2588  SET32_BITFIELDS(&ch[chn].ao.pre_tdqsck[0],
2589  PRE_TDQSCK1_SHU_PRELOAD_TX_HW, 1,
2590  PRE_TDQSCK1_SHU_PRELOAD_TX_START, 0,
2591  PRE_TDQSCK1_SW_UP_TX_NOW_CASE, 0);
2592 
2593  SET32_BITFIELDS(&ch[chn].ao.mpc_option, MPC_OPTION_MPC_BLOCKALE_OPT, 0);
2594  SET32_BITFIELDS(&ch[chn].phy.misc_ctrl1, MISC_CTRL1_R_DMARPIDQ_SW, 1);
2595  SET32_BITFIELDS(&ch[chn].ao.dqsoscr, DQSOSCR_ARUIDQ_SW, 1);
2596  SET32_BITFIELDS(&ch[chn].ao.dqsoscr, DQSOSCR_DQSOSCRDIS, 1);
2597 
2598  SET32_BITFIELDS(&ch[chn].ao.rk[0].dqsosc, RK0_DQSOSC_DQSOSCR_RK0EN, 1);
2599 
2600  if (rk_num == RANK_MAX)
2601  SET32_BITFIELDS(&ch[chn].ao.rk[1].dqsosc, RK1_DQSOSC_DQSOSCR_RK1EN, 1);
2602 
2603  SET32_BITFIELDS(&ch[chn].ao.dqsoscr, DQSOSCR_DQSOSC_CALEN, 1);
2604 
2605  for (u8 shu = 0; shu < DRAM_DFS_SHUFFLE_MAX; shu++)
2606  SET32_BITFIELDS(&ch[chn].ao.shu[shu].scintv,
2607  SHU_SCINTV_DQSOSCENDIS, 1);
2608 }
2609 
2610 static void dqsosc_shu_settings(u8 chn, u8 freq_group,
2611  u16 *osc_thrd_inc, u16 *osc_thrd_dec)
2612 {
2613  u8 filt_pithrd, w2r_sel, upd_sel;
2614  u8 mr23 = MR23_DEFAULT_VALUE;
2615  u16 prd_cnt, thrd_inc, thrd_dec;
2616 
2617  SET32_BITFIELDS(&ch[chn].ao.shu[0].scintv,
2618  SHU_SCINTV_DQS2DQ_SHU_PITHRD, 0);
2619  SET32_BITFIELDS(&ch[chn].ao.rk[0].dqsosc,
2620  RK0_DQSOSC_R_DMDQS2DQ_FILT_OPT, 0);
2621 
2622  switch (freq_group) {
2623  case LP4X_DDR1600:
2624  filt_pithrd = 0x5;
2625  w2r_sel = 0x5;
2626  upd_sel = 0x0;
2627  break;
2628  case LP4X_DDR2400:
2629  filt_pithrd = 0x8;
2630  w2r_sel = 0x2;
2631  upd_sel = 0x0;
2632  break;
2633  case LP4X_DDR3200:
2634  filt_pithrd = 0xA;
2635  w2r_sel = 0x2;
2636  upd_sel = 0x0;
2637  break;
2638  case LP4X_DDR3600:
2639  filt_pithrd = 0xB;
2640  w2r_sel = 0x2;
2641  upd_sel = 0x0;
2642  break;
2643  default:
2644  die("Invalid DDR frequency group %u\n", freq_group);
2645  return;
2646  }
2647 
2648  SET32_BITFIELDS(&ch[chn].ao.shu[0].scintv,
2649  SHU_SCINTV_DQS2DQ_FILT_PITHRD, filt_pithrd);
2650  SET32_BITFIELDS(&ch[chn].ao.shu[0].wodt,
2651  SHU1_WODT_TXUPD_W2R_SEL, w2r_sel,
2652  SHU1_WODT_TXUPD_SEL, upd_sel);
2653 
2654  prd_cnt = mr23 / 4 + 3;
2655  SET32_BITFIELDS(&ch[chn].ao.shu[0].dqsosc_prd,
2656  SHU1_DQSOSC_PRD_DQSOSC_PRDCNT, prd_cnt);
2657  SET32_BITFIELDS(&ch[chn].ao.shu[0].dqsoscr,
2658  SHU_DQSOSCR_DQSOSCRCNT, 0x40);
2659 
2660  for (u8 rk = RANK_0; rk < RANK_MAX; rk++) {
2661  thrd_inc = osc_thrd_inc[rk];
2662  thrd_dec = osc_thrd_dec[rk];
2663 
2664  if (rk == RANK_0) {
2665  SET32_BITFIELDS(&ch[chn].ao.shu[0].dqsoscthrd,
2666  SHU_DQSOSCTHRD_DQSOSCTHRD_INC_RK0,
2667  thrd_inc);
2668  SET32_BITFIELDS(&ch[chn].ao.shu[0].dqsoscthrd,
2669  SHU_DQSOSCTHRD_DQSOSCTHRD_DEC_RK0,
2670  thrd_dec);
2671  } else {
2672  SET32_BITFIELDS(&ch[chn].ao.shu[0].dqsoscthrd,
2673  SHU_DQSOSCTHRD_DQSOSCTHRD_INC_RK1_7TO0,
2674  thrd_inc & 0xFF);
2675  SET32_BITFIELDS(&ch[chn].ao.shu[0].dqsosc_prd,
2676  SHU1_DQSOSC_PRD_DQSOSCTHRD_INC_RK1_11TO8,
2677  (thrd_inc & 0xF00) >> 8);
2678  SET32_BITFIELDS(&ch[chn].ao.shu[0].dqsosc_prd,
2679  SHU1_DQSOSC_PRD_DQSOSCTHRD_DEC_RK1,
2680  thrd_dec);
2681  }
2682  }
2683 
2684  SET32_BITFIELDS(&ch[chn].ao.shu[0].dqsoscr2,
2685  SHU_DQSOSCR2_DQSOSCENCNT, 0x1FF);
2686 }
2687 
2689 {
2690  u32 jump_ratio_index = 0;
2691  u16 jump_ratio[DRAM_DFS_SHUFFLE_MAX * HW_REG_SHUFFLE_MAX] = {0};
2692  u32 u4value = 0, u4value1 = 0;
2693 
2694  for (u8 shu_src = 0; shu_src < HW_REG_SHUFFLE_MAX; shu_src++)
2695  for (u8 shu_dst = 0; shu_dst < HW_REG_SHUFFLE_MAX; shu_dst++) {
2696  if (shu_src == shu_dst)
2697  continue;
2698  if (shu_src >= DRAM_DFS_SHUFFLE_MAX ||
2699  shu_dst >= DRAM_DFS_SHUFFLE_MAX) {
2700  jump_ratio_index++;
2701  continue;
2702  }
2703 
2704  jump_ratio[jump_ratio_index] = DIV_ROUND_CLOSEST(
2705  (get_shu_freq(shu_dst) >> 1) * 32,
2706  get_shu_freq(shu_src) >> 1);
2707  dramc_dbg("Jump_RATIO [%d]: %x Freq %d -> %d DDR%d ->"
2708  " DDR%d\n",
2709  jump_ratio_index,
2710  jump_ratio[jump_ratio_index],
2711  shu_src + 1, shu_dst + 1,
2712  get_shu_freq(shu_src), get_shu_freq(shu_dst));
2713  jump_ratio_index++;
2714  }
2715 
2716  for (size_t chn = 0; chn < CHANNEL_MAX; chn++) {
2717  struct dramc_ao_regs_shu *shu = &ch[chn].ao.shu[0];
2718  struct dramc_ao_regs_rk *rk = &ch[chn].ao.rk[0];
2719  SET32_BITFIELDS(&ch[chn].ao.pre_tdqsck[0],
2720  PRE_TDQSCK1_TDQSCK_PRECAL_HW, 1);
2721  SET32_BITFIELDS(&ch[chn].ao.pre_tdqsck[1],
2722  PRE_TDQSCK2_TDDQSCK_JUMP_RATIO0, jump_ratio[0],
2723  PRE_TDQSCK2_TDDQSCK_JUMP_RATIO1, jump_ratio[1],
2724  PRE_TDQSCK2_TDDQSCK_JUMP_RATIO2, jump_ratio[2],
2725  PRE_TDQSCK2_TDDQSCK_JUMP_RATIO3, jump_ratio[3]);
2726  SET32_BITFIELDS(&ch[chn].ao.pre_tdqsck[2],
2727  PRE_TDQSCK3_TDDQSCK_JUMP_RATIO4, jump_ratio[4],
2728  PRE_TDQSCK3_TDDQSCK_JUMP_RATIO5, jump_ratio[5],
2729  PRE_TDQSCK3_TDDQSCK_JUMP_RATIO6, jump_ratio[6],
2730  PRE_TDQSCK3_TDDQSCK_JUMP_RATIO7, jump_ratio[7]);
2731  SET32_BITFIELDS(&ch[chn].ao.pre_tdqsck[3],
2732  PRE_TDQSCK4_TDDQSCK_JUMP_RATIO8, jump_ratio[8],
2733  PRE_TDQSCK4_TDDQSCK_JUMP_RATIO9, jump_ratio[9],
2734  PRE_TDQSCK4_TDDQSCK_JUMP_RATIO10, jump_ratio[10],
2735  PRE_TDQSCK4_TDDQSCK_JUMP_RATIO11, jump_ratio[11]);
2736 
2737  for (u8 rnk = RANK_0; rnk < RANK_MAX; rnk++) {
2738  /* Shuffle 0 */
2739  u4value = READ32_BITFIELD(
2740  &shu[0].rk[rnk].selph_dqsg0,
2741  SHURK0_SELPH_DQSG0_TX_DLY_DQS0_GATED);
2742  u4value1 = READ32_BITFIELD(
2743  &shu[0].rk[rnk].selph_dqsg1,
2744  SHURK0_SELPH_DQSG1_REG_DLY_DQS0_GATED);
2745  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[0],
2746  RK0_PRE_TDQSCK1_TDQSCK_UIFREQ1_B0R0,
2747  (u4value << 3) | u4value1);
2748  u4value = READ32_BITFIELD(
2749  &shu[0].rk[rnk].dqsien,
2750  SHURK0_DQSIEN_R0DQS0IEN);
2751  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[0],
2752  RK0_PRE_TDQSCK1_TDQSCK_PIFREQ1_B0R0,
2753  u4value);
2754  u4value = READ32_BITFIELD(
2755  &shu[0].rk[rnk].selph_dqsg0,
2756  SHURK0_SELPH_DQSG0_TX_DLY_DQS0_GATED_P1);
2757  u4value1 = READ32_BITFIELD(
2758  &shu[0].rk[rnk].selph_dqsg1,
2759  SHURK0_SELPH_DQSG1_REG_DLY_DQS0_GATED_P1);
2760  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[2],
2761  RK0_PRE_TDQSCK3_TDQSCK_UIFREQ1_P1_B0R0,
2762  (u4value << 3) | u4value1);
2763  /* Shuffle 1 */
2764  u4value = READ32_BITFIELD(
2765  &shu[1].rk[rnk].selph_dqsg0,
2766  SHURK0_SELPH_DQSG0_TX_DLY_DQS0_GATED);
2767  u4value1 = READ32_BITFIELD(
2768  &shu[1].rk[rnk].selph_dqsg1,
2769  SHURK0_SELPH_DQSG1_REG_DLY_DQS0_GATED);
2770  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[0],
2771  RK0_PRE_TDQSCK1_TDQSCK_UIFREQ2_B0R0,
2772  (u4value << 3) | u4value1);
2773  u4value = READ32_BITFIELD(
2774  &shu[1].rk[rnk].dqsien,
2775  SHURK0_DQSIEN_R0DQS0IEN);
2776  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[0],
2777  RK0_PRE_TDQSCK1_TDQSCK_PIFREQ2_B0R0, u4value);
2778  u4value = READ32_BITFIELD(
2779  &shu[1].rk[rnk].selph_dqsg0,
2780  SHURK0_SELPH_DQSG0_TX_DLY_DQS0_GATED_P1);
2781  u4value1 = READ32_BITFIELD(
2782  &shu[1].rk[rnk].selph_dqsg1,
2783  SHURK0_SELPH_DQSG1_REG_DLY_DQS0_GATED_P1);
2784  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[2],
2785  RK0_PRE_TDQSCK3_TDQSCK_UIFREQ2_P1_B0R0,
2786  (u4value << 3) | u4value1);
2787  /* Shuffle 2 */
2788  u4value = READ32_BITFIELD(
2789  &shu[2].rk[rnk].selph_dqsg0,
2790  SHURK0_SELPH_DQSG0_TX_DLY_DQS0_GATED);
2791  u4value1 = READ32_BITFIELD(
2792  &shu[2].rk[rnk].selph_dqsg1,
2793  SHURK0_SELPH_DQSG1_REG_DLY_DQS0_GATED);
2794  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[1],
2795  RK0_PRE_TDQSCK2_TDQSCK_UIFREQ3_B0R0,
2796  (u4value << 3) | u4value1);
2797  u4value = READ32_BITFIELD(
2798  &shu[2].rk[rnk].dqsien,
2799  SHURK0_DQSIEN_R0DQS0IEN);
2800  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[1],
2801  RK0_PRE_TDQSCK2_TDQSCK_PIFREQ3_B0R0,
2802  u4value);
2803  u4value = READ32_BITFIELD(
2804  &shu[2].rk[rnk].selph_dqsg0,
2805  SHURK0_SELPH_DQSG0_TX_DLY_DQS0_GATED_P1);
2806  u4value1 = READ32_BITFIELD(
2807  &shu[2].rk[rnk].selph_dqsg1,
2808  SHURK0_SELPH_DQSG1_REG_DLY_DQS0_GATED_P1);
2809  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[2],
2810  RK0_PRE_TDQSCK3_TDQSCK_UIFREQ3_P1_B0R0,
2811  (u4value << 3) | u4value1);
2812 
2813  /* Byte 1 */
2814  /* Shuffle 0 */
2815  u4value = READ32_BITFIELD(
2816  &shu[0].rk[rnk].selph_dqsg0,
2817  SHURK0_SELPH_DQSG0_TX_DLY_DQS1_GATED);
2818  u4value1 = READ32_BITFIELD(
2819  &shu[0].rk[rnk].selph_dqsg1,
2820  SHURK0_SELPH_DQSG1_REG_DLY_DQS1_GATED);
2821  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[3],
2822  RK0_PRE_TDQSCK4_TDQSCK_UIFREQ1_B1R0,
2823  (u4value << 3) | u4value1);
2824  u4value = READ32_BITFIELD(
2825  &shu[0].rk[rnk].dqsien,
2826  SHURK0_DQSIEN_R0DQS1IEN);
2827  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[3],
2828  RK0_PRE_TDQSCK4_TDQSCK_PIFREQ1_B1R0,
2829  u4value);
2830  u4value = READ32_BITFIELD(
2831  &shu[0].rk[rnk].selph_dqsg0,
2832  SHURK0_SELPH_DQSG0_TX_DLY_DQS1_GATED_P1);
2833  u4value1 = READ32_BITFIELD(
2834  &shu[0].rk[rnk].selph_dqsg1,
2835  SHURK0_SELPH_DQSG1_REG_DLY_DQS1_GATED_P1);
2836  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[5],
2837  RK0_PRE_TDQSCK6_TDQSCK_UIFREQ1_P1_B1R0,
2838  (u4value << 3) | u4value1);
2839  /* Shuffle 1 */
2840  u4value = READ32_BITFIELD(
2841  &shu[1].rk[rnk].selph_dqsg0,
2842  SHURK0_SELPH_DQSG0_TX_DLY_DQS1_GATED);
2843  u4value1 = READ32_BITFIELD(
2844  &shu[1].rk[rnk].selph_dqsg1,
2845  SHURK0_SELPH_DQSG1_REG_DLY_DQS1_GATED);
2846  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[3],
2847  RK0_PRE_TDQSCK4_TDQSCK_UIFREQ2_B1R0,
2848  (u4value << 3) | u4value1);
2849  u4value = READ32_BITFIELD(
2850  &shu[1].rk[rnk].dqsien,
2851  SHURK0_DQSIEN_R0DQS1IEN);
2852  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[3],
2853  RK0_PRE_TDQSCK4_TDQSCK_PIFREQ2_B1R0,
2854  u4value);
2855  u4value = READ32_BITFIELD(
2856  &shu[1].rk[rnk].selph_dqsg0,
2857  SHURK0_SELPH_DQSG0_TX_DLY_DQS1_GATED_P1);
2858  u4value1 = READ32_BITFIELD(
2859  &shu[1].rk[rnk].selph_dqsg1,
2860  SHURK0_SELPH_DQSG1_REG_DLY_DQS1_GATED_P1);
2861  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[5],
2862  RK0_PRE_TDQSCK6_TDQSCK_UIFREQ2_P1_B1R0,
2863  (u4value << 3) | u4value1);
2864  /* Shuffle 2 */
2865  u4value = READ32_BITFIELD(
2866  &shu[2].rk[rnk].selph_dqsg0,
2867  SHURK0_SELPH_DQSG0_TX_DLY_DQS1_GATED);
2868  u4value1 = READ32_BITFIELD(
2869  &shu[2].rk[rnk].selph_dqsg1,
2870  SHURK0_SELPH_DQSG1_REG_DLY_DQS1_GATED);
2871  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[4],
2872  RK0_PRE_TDQSCK5_TDQSCK_UIFREQ3_B1R0,
2873  (u4value << 3) | u4value1);
2874  u4value = READ32_BITFIELD(
2875  &shu[2].rk[rnk].dqsien,
2876  SHURK0_DQSIEN_R0DQS1IEN);
2877  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[4],
2878  RK0_PRE_TDQSCK5_TDQSCK_PIFREQ3_B1R0,
2879  u4value);
2880  u4value = READ32_BITFIELD(
2881  &shu[2].rk[rnk].selph_dqsg0,
2882  SHURK0_SELPH_DQSG0_TX_DLY_DQS1_GATED_P1);
2883  u4value1 = READ32_BITFIELD(
2884  &shu[2].rk[rnk].selph_dqsg1,
2885  SHURK0_SELPH_DQSG1_REG_DLY_DQS1_GATED_P1);
2886  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[5],
2887  RK0_PRE_TDQSCK6_TDQSCK_UIFREQ3_P1_B1R0,
2888  (u4value << 3) | u4value1);
2889 
2890  /* Byte 2 */
2891  /* Shuffle 0 */
2892  u4value = READ32_BITFIELD(
2893  &shu[0].rk[rnk].selph_dqsg0,
2894  SHURK0_SELPH_DQSG0_TX_DLY_DQS2_GATED);
2895  u4value1 = READ32_BITFIELD(
2896  &shu[0].rk[rnk].selph_dqsg1,
2897  SHURK0_SELPH_DQSG1_REG_DLY_DQS2_GATED);
2898  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[6],
2899  RK0_PRE_TDQSCK7_TDQSCK_UIFREQ1_B2R0,
2900  (u4value << 3) | u4value1);
2901  u4value = READ32_BITFIELD(
2902  &shu[0].rk[rnk].dqsien,
2903  SHURK0_DQSIEN_R0DQS2IEN);
2904  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[6],
2905  RK0_PRE_TDQSCK7_TDQSCK_PIFREQ1_B2R0,
2906  u4value);
2907  u4value = READ32_BITFIELD(
2908  &shu[0].rk[rnk].selph_dqsg0,
2909  SHURK0_SELPH_DQSG0_TX_DLY_DQS2_GATED_P1);
2910  u4value1 = READ32_BITFIELD(
2911  &shu[0].rk[rnk].selph_dqsg1,
2912  SHURK0_SELPH_DQSG1_REG_DLY_DQS2_GATED_P1);
2913  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[8],
2914  RK0_PRE_TDQSCK9_TDQSCK_UIFREQ1_P1_B2R0,
2915  (u4value << 3) | u4value1);
2916  /* Shuffle 1 */
2917  u4value = READ32_BITFIELD(
2918  &shu[1].rk[rnk].selph_dqsg0,
2919  SHURK0_SELPH_DQSG0_TX_DLY_DQS2_GATED);
2920  u4value1 = READ32_BITFIELD(
2921  &shu[1].rk[rnk].selph_dqsg1,
2922  SHURK0_SELPH_DQSG1_REG_DLY_DQS2_GATED);
2923  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[6],
2924  RK0_PRE_TDQSCK7_TDQSCK_UIFREQ2_B2R0,
2925  (u4value << 3) | u4value1);
2926  u4value = READ32_BITFIELD(
2927  &shu[1].rk[rnk].dqsien,
2928  SHURK0_DQSIEN_R0DQS2IEN);
2929  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[6],
2930  RK0_PRE_TDQSCK7_TDQSCK_PIFREQ2_B2R0,
2931  u4value);
2932  u4value = READ32_BITFIELD(
2933  &shu[1].rk[rnk].selph_dqsg0,
2934  SHURK0_SELPH_DQSG0_TX_DLY_DQS2_GATED_P1);
2935  u4value1 = READ32_BITFIELD(
2936  &shu[1].rk[rnk].selph_dqsg1,
2937  SHURK0_SELPH_DQSG1_REG_DLY_DQS2_GATED_P1);
2938  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[8],
2939  RK0_PRE_TDQSCK9_TDQSCK_UIFREQ2_P1_B2R0,
2940  (u4value << 3) | u4value1);
2941  /* Shuffle 2 */
2942  u4value = READ32_BITFIELD(
2943  &shu[2].rk[rnk].selph_dqsg0,
2944  SHURK0_SELPH_DQSG0_TX_DLY_DQS2_GATED);
2945  u4value1 = READ32_BITFIELD(
2946  &shu[2].rk[rnk].selph_dqsg1,
2947  SHURK0_SELPH_DQSG1_REG_DLY_DQS2_GATED);
2948  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[7],
2949  RK0_PRE_TDQSCK8_TDQSCK_UIFREQ3_B2R0,
2950  (u4value << 3) | u4value1);
2951  u4value = READ32_BITFIELD(
2952  &shu[2].rk[rnk].dqsien,
2953  SHURK0_DQSIEN_R0DQS2IEN);
2954  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[7],
2955  RK0_PRE_TDQSCK8_TDQSCK_PIFREQ3_B2R0,
2956  u4value);
2957  u4value = READ32_BITFIELD(
2958  &shu[2].rk[rnk].selph_dqsg0,
2959  SHURK0_SELPH_DQSG0_TX_DLY_DQS2_GATED_P1);
2960  u4value1 = READ32_BITFIELD(
2961  &shu[2].rk[rnk].selph_dqsg1,
2962  SHURK0_SELPH_DQSG1_REG_DLY_DQS2_GATED_P1);
2963  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[8],
2964  RK0_PRE_TDQSCK9_TDQSCK_UIFREQ3_P1_B2R0,
2965  (u4value << 3) | u4value1);
2966 
2967  /* Byte 3 */
2968  /* Shuffle 0 */
2969  u4value = READ32_BITFIELD(
2970  &shu[0].rk[rnk].selph_dqsg0,
2971  SHURK0_SELPH_DQSG0_TX_DLY_DQS3_GATED);
2972  u4value1 = READ32_BITFIELD(
2973  &shu[0].rk[rnk].selph_dqsg1,
2974  SHURK0_SELPH_DQSG1_REG_DLY_DQS3_GATED);
2975  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[9],
2976  RK0_PRE_TDQSCK10_TDQSCK_UIFREQ1_B3R0,
2977  (u4value << 3) | u4value1);
2978  u4value = READ32_BITFIELD(
2979  &shu[0].rk[rnk].dqsien,
2980  SHURK0_DQSIEN_R0DQS3IEN);
2981  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[9],
2982  RK0_PRE_TDQSCK10_TDQSCK_PIFREQ1_B3R0,
2983  u4value);
2984  u4value = READ32_BITFIELD(
2985  &shu[0].rk[rnk].selph_dqsg0,
2986  SHURK0_SELPH_DQSG0_TX_DLY_DQS3_GATED_P1);
2987  u4value1 = READ32_BITFIELD(
2988  &shu[0].rk[rnk].selph_dqsg1,
2989  SHURK0_SELPH_DQSG1_REG_DLY_DQS3_GATED_P1);
2990  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[11],
2991  RK0_PRE_TDQSCK12_TDQSCK_UIFREQ1_P1_B3R0,
2992  (u4value << 3) | u4value1);
2993  /* Shuffle 1 */
2994  u4value = READ32_BITFIELD(
2995  &shu[1].rk[rnk].selph_dqsg0,
2996  SHURK0_SELPH_DQSG0_TX_DLY_DQS3_GATED);
2997  u4value1 = READ32_BITFIELD(
2998  &shu[1].rk[rnk].selph_dqsg1,
2999  SHURK0_SELPH_DQSG1_REG_DLY_DQS3_GATED);
3000  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[9],
3001  RK0_PRE_TDQSCK10_TDQSCK_UIFREQ2_B3R0,
3002  (u4value << 3) | u4value1);
3003  u4value = READ32_BITFIELD(
3004  &shu[1].rk[rnk].dqsien,
3005  SHURK0_DQSIEN_R0DQS3IEN);
3006  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[9],
3007  RK0_PRE_TDQSCK10_TDQSCK_PIFREQ2_B3R0,
3008  u4value);
3009  u4value = READ32_BITFIELD(
3010  &shu[1].rk[rnk].selph_dqsg0,
3011  SHURK0_SELPH_DQSG0_TX_DLY_DQS3_GATED_P1);
3012  u4value1 = READ32_BITFIELD(
3013  &shu[1].rk[rnk].selph_dqsg1,
3014  SHURK0_SELPH_DQSG1_REG_DLY_DQS3_GATED_P1);
3015  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[11],
3016  RK0_PRE_TDQSCK12_TDQSCK_UIFREQ2_P1_B3R0,
3017  (u4value << 3) | u4value1);
3018  /* Shuffle 2 */
3019  u4value = READ32_BITFIELD(
3020  &shu[2].rk[rnk].selph_dqsg0,
3021  SHURK0_SELPH_DQSG0_TX_DLY_DQS3_GATED);
3022  u4value1 = READ32_BITFIELD(
3023  &shu[2].rk[rnk].selph_dqsg1,
3024  SHURK0_SELPH_DQSG1_REG_DLY_DQS3_GATED);
3025  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[10],
3026  RK0_PRE_TDQSCK11_TDQSCK_UIFREQ3_B3R0,
3027  (u4value << 3) | u4value1);
3028  u4value = READ32_BITFIELD(
3029  &shu[2].rk[rnk].dqsien,
3030  SHURK0_DQSIEN_R0DQS3IEN);
3031  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[10],
3032  RK0_PRE_TDQSCK11_TDQSCK_PIFREQ3_B3R0,
3033  u4value);
3034  u4value = READ32_BITFIELD(
3035  &shu[2].rk[rnk].selph_dqsg0,
3036  SHURK0_SELPH_DQSG0_TX_DLY_DQS3_GATED_P1);
3037  u4value1 = READ32_BITFIELD(
3038  &shu[2].rk[rnk].selph_dqsg1,
3039  SHURK0_SELPH_DQSG1_REG_DLY_DQS3_GATED_P1);
3040  SET32_BITFIELDS(&rk[rnk].pre_tdqsck[11],
3041  RK0_PRE_TDQSCK12_TDQSCK_UIFREQ3_P1_B3R0,
3042  (u4value << 3) | u4value1);
3043  }
3044 
3045  SET32_BITFIELDS(&ch[chn].ao.pre_tdqsck[0],
3046  PRE_TDQSCK1_TDQSCK_REG_DVFS, 0x1);
3047  SET32_BITFIELDS(&ch[chn].ao.pre_tdqsck[0],
3048  PRE_TDQSCK1_TDQSCK_HW_SW_UP_SEL, 1);
3049  }
3050 }
3051 
3052 void get_dram_info_after_cal(u8 *density_result, u32 rk_num)
3053 {
3054  u8 vendor_id, density, max_density = 0;
3055  u32 ddr_size, max_size = 0;
3056 
3057  vendor_id = dramc_mode_reg_read_by_rank(CHANNEL_A, RANK_0, 5);
3058  dramc_show("Vendor id is %#x\n", vendor_id);
3059 
3060  for (u8 rk = RANK_0; rk < rk_num; rk++) {
3061  density = dramc_mode_reg_read_by_rank(CHANNEL_A, rk, 8);
3062  dramc_dbg("MR8 %#x\n", density);
3063  density = (density >> 2) & 0xf;
3064 
3065  switch (density) {
3066  case 0x0:
3067  ddr_size = 4;
3068  break;
3069  case 0x1:
3070  ddr_size = 6;
3071  break;
3072  case 0x2:
3073  ddr_size = 8;
3074  break;
3075  case 0x3:
3076  ddr_size = 12;
3077  break;
3078  case 0x4:
3079  ddr_size = 16;
3080  break;
3081  case 0x5:
3082  ddr_size = 24;
3083  break;
3084  case 0x6:
3085  ddr_size = 32;
3086  break;
3087  default:
3088  ddr_size = 0;
3089  break;
3090  }
3091  if (ddr_size > max_size) {
3092  max_size = ddr_size;
3093  max_density = density;
3094  }
3095  dramc_dbg("RK%d size %dGb, density:%d\n", rk, ddr_size, max_density);
3096  }
3097 
3098  *density_result = max_density;
3099 }
3100 
3102  u8 freq_group, struct mr_value *mr, bool run_dvfs)
3103 {
3104  bool fast_calib;
3105  switch (pams->source) {
3107  fast_calib = false;
3108  break;
3110  fast_calib = true;
3111  break;
3112  default:
3113  die("Invalid DRAM param source %u\n", pams->source);
3114  return -1;
3115  }
3116 
3117  bool test_passed;
3118  u8 rx_datlat[RANK_MAX] = {0};
3119  u16 osc_thrd_inc[RANK_MAX];
3120  u16 osc_thrd_dec[RANK_MAX];
3121  for (u8 chn = 0; chn < CHANNEL_MAX; chn++) {
3122  for (u8 rk = RANK_0; rk < pams->rank_num; rk++) {
3123  dramc_dbg("Start K: freq=%d, ch=%d, rank=%d\n",
3124  freq_group, chn, rk);
3125  dramc_cmd_bus_training(chn, rk, freq_group, pams,
3126  mr, run_dvfs);
3127  dramc_write_leveling(chn, rk, freq_group, pams->wr_level);
3128  dramc_auto_refresh_switch(chn, true);
3129 
3130  dramc_rx_dqs_gating_cal(chn, rk, freq_group, pams,
3131  fast_calib, mr);
3132  dramc_window_perbit_cal(chn, rk, freq_group,
3133  RX_WIN_RD_DQC, pams, fast_calib);
3134  dramc_window_perbit_cal(chn, rk, freq_group,
3135  TX_WIN_DQ_DQM, pams, fast_calib);
3136  dramc_window_perbit_cal(chn, rk, freq_group,
3137  TX_WIN_DQ_ONLY, pams, fast_calib);
3138  rx_datlat[rk] = dramc_rx_datlat_cal(chn, rk, freq_group,
3139  pams, fast_calib, &test_passed);
3140  if (!test_passed)
3141  return -2;
3142  dramc_window_perbit_cal(chn, rk, freq_group,
3143  RX_WIN_TEST_ENG, pams, fast_calib);
3144  dramc_auto_refresh_switch(chn, false);
3145 
3146  dqsosc_auto(chn, rk, freq_group, osc_thrd_inc, osc_thrd_dec);
3147  }
3148 
3149  dqsosc_shu_settings(chn, freq_group, osc_thrd_inc, osc_thrd_dec);
3150  dramc_rx_dqs_gating_post_process(chn, freq_group, pams->rank_num);
3151  dramc_dual_rank_rx_datlat_cal(chn, freq_group, rx_datlat[0], rx_datlat[1]);
3152  }
3153  return 0;
3154 }
pte_t value
Definition: mmu.c:91
static void write32(void *addr, uint32_t val)
Definition: mmio.h:40
static uint32_t read32(const void *addr)
Definition: mmio.h:22
static struct sdram_info params
Definition: sdram_configs.c:83
#define ARRAY_SIZE(a)
Definition: helpers.h:12
#define MIN(a, b)
Definition: helpers.h:37
#define MAX(a, b)
Definition: helpers.h:40
static u32 addr
Definition: cirrus.c:14
void __noreturn die(const char *fmt,...)
Definition: die.c:17
void delay(unsigned int secs)
Definition: delay.c:8
#define dramc_err(_x_...)
Definition: dramc_common.h:8
#define dramc_dbg(_x_...)
Definition: dramc_common.h:11
#define dramc_show
Definition: dramc_common.h:10
@ RANK_1
@ RANK_0
@ RANK_MAX
@ DRAM_DFS_SHUFFLE_MAX
@ DRAM_DFS_SHUFFLE_1
@ DRAM_DFS_SHUFFLE_3
@ DRAM_DFS_SHUFFLE_2
@ CBT_R0_BYTE_R1_NORMAL
@ CBT_R0_NORMAL_R1_BYTE
@ CBT_R0_R1_BYTE
@ CBT_R0_R1_NORMAL
@ CHANNEL_MAX
@ CA_NUM_LP4
void dramc_cke_fix_onoff(enum cke_type option, u8 chn)
#define BIT(nr)
Definition: ec_commands.h:45
#define DIV_ROUND_CLOSEST(x, divisor)
Definition: helpers.h:17
#define setbits32(addr, set)
Definition: mmio.h:21
#define WRITE32_BITFIELDS(addr,...)
Definition: mmio.h:198
#define SET32_BITFIELDS(addr,...)
Definition: mmio.h:201
#define READ32_BITFIELD(addr, name)
Definition: mmio.h:207
#define clrsetbits32(addr, clear, set)
Definition: mmio.h:16
#define clrbits32(addr, clear)
Definition: mmio.h:26
#define wait_us(timeout_us, condition)
Definition: timer.h:198
unsigned int type
Definition: edid.c:57
result
Definition: mrc_cache.c:35
u32 dram_k_perbit(u32 channel)
@ TE_OP_WRITE_READ_CHECK
Definition: dramc_pi_api.h:84
@ TE_OP_READ_CHECK
Definition: dramc_pi_api.h:85
static struct dramc_channel const ch[2]
@ DQS_NUMBER
Definition: dramc_soc.h:26
@ DQ_DATA_WIDTH
Definition: dramc_soc.h:24
@ CHANNEL_A
Definition: dramc_soc.h:7
@ CHANNEL_B
Definition: dramc_soc.h:8
static struct mtk_spm_regs *const mtk_spm
Definition: spm.h:154
u8 get_freq_fsq(u8 freq)
static void dramc_get_vref_prop(u8 rank, enum CAL_TYPE type, u8 fsp, u8 *vref_scan_en, u8 *vref_begin, u8 *vref_end)
static void move_dramc_tx_dq(u8 chn, u8 rank, u8 byte, s8 shift_coarse_tune)
static void dramc_rx_dqs_gating_cal_partial(u8 chn, u8 rank, u32 coarse_start, u32 coarse_end, u8 freqDiv, u8 *pass_begin, u8 *pass_count, u8 *pass_count_1, u8 *dqs_done, u8 *dqs_high, u8 *dqs_transition, u8 *dly_coarse_large_cnt, u8 *dly_coarse_0p5t_cnt, u8 *dly_fine_tune_cnt)
static void dramc_rx_dqs_gating_cal(u8 chn, u8 rank, u8 freq_group, const struct sdram_params *params, const bool fast_calib, const struct mr_value *mr)
static void dramc_dle_factor_handler(u8 chn, u8 val, u8 freq_group)
static void dramc_rx_dqs_gating_post_process(u8 chn, u8 freq_group, u32 rk_num)
static void cbt_set_vref(u8 chn, u8 rank, u8 vref, bool is_final, u32 cbt_mode)
static void dramc_set_rank_engine2(u8 chn, u8 rank)
static u32 get_freq_group_clock(u8 freq_group)
static void dramc_read_dbi_onoff(size_t chn, bool on)
static void dqsosc_auto(u8 chn, u8 rank, u8 freq_group, u16 *osc_thrd_inc, u16 *osc_thrd_dec)
static void cbt_switch_freq(cbt_freq freq, bool run_dvfs)
static void dramc_set_dqdqs_dly(u8 chn, u8 rank, enum CAL_TYPE type, u8 *small_value, s32 dly)
static void dramc_rx_input_delay_tracking_init_by_freq(u8 chn, u8 freq_group)
int dramc_calibrate_all_channels(const struct sdram_params *pams, u8 freq_group, struct mr_value *mr, bool run_dvfs)
static void cbt_exit(u8 chn, u8 rank, u8 fsp, struct mr_value *mr, u32 cbt_mode)
static void dramc_set_tx_dly_factor(u8 chn, u8 rk, enum CAL_TYPE type, u8 *dq_small_reg, u32 dly)
static void dramc_engine2_setpat(u8 chn, bool test_pat)
static void dramc_find_dly_tune(u8 chn, u8 dly_coarse_large, u8 dly_coarse_0p5t, u8 dly_fine_xt, u8 *dqs_high, u8 *dly_coarse_large_cnt, u8 *dly_coarse_0p5t_cnt, u8 *dly_fine_tune_cnt, u8 *dqs_trans, u8 *dqs_done)
void dramc_apply_config_after_calibration(const struct mr_value *mr, u32 rk_num)
static int dramc_check_dqdqs_win(struct win_perbit_dly *perbit_dly, s16 dly, s16 dly_end, bool fail_bit)
static void dramc_rx_rd_dqc_end(u8 chn)
static void dramc_set_rx_best_dly_factor(u8 chn, u8 rank, struct win_perbit_dly *dly, s32 *dqsdly_byte, s32 *dqmdly_byte)
static void cbt_set_fsp(u8 chn, u8 rank, u8 fsp, struct mr_value *mr)
static void dramc_set_tx_best_dly_factor(u8 chn, u8 rank_start, u8 type, struct per_byte_dly *tx_perbyte_dly, u16 *dq_precal_dly, u8 use_delay_cell, u32 *byte_dly_cell)
#define WRITE_LEVELING_MOVD_DQS
static void cbt_dramc_dfs_direct_jump(u8 shu_level, bool run_dvfs)
static void dramc_set_rx_dly_factor(u8 chn, u8 rank, enum RX_TYPE type, u32 val)
static u16 dramc_mode_reg_read(u8 chn, u8 mr_idx)
static void dramc_transfer_dly_tune(u8 chn, u32 dly, u32 adjust_center, struct tx_dly_tune *dly_tune)
static void cbt_set_ca_clk_result(u8 chn, u8 rank, const struct sdram_params *params)
static void move_dramc_tx_dqs_oen(u8 chn, u8 byte, s8 shift_coarse_tune)
static u8 dramc_mode_reg_read_by_rank(u8 chn, u8 rank, u8 mr_idx)
void dramc_hw_dqsosc(u8 chn, u32 rk_num)
static u8 get_cbt_vref_pinmux_value(u8 chn, u8 vref_level, u32 cbt_mode)
static u8 dramc_window_perbit_cal(u8 chn, u8 rank, u8 freq_group, enum CAL_TYPE type, const struct sdram_params *params, const bool fast_calib)
static u32 dramc_rx_rd_dqc_run(u8 chn)
static void dramc_rx_vref_pre_setting(u8 chn)
static void dramc_auto_refresh_switch(u8 chn, bool option)
static void dramc_engine2_end(u8 chn, u32 dummy_rd)
static void o1_path_on_off(u8 cbt_on)
static void dramc_write_dqs_gating_result(u8 chn, u8 rank, u8 *best_coarse_tune2t, u8 *best_coarse_tune0p5t, u8 *best_coarse_tune2t_p1, u8 *best_coarse_tune0p5t_p1)
static void dramc_phy_dcm_2_channel(u8 chn, bool en)
static void dramc_set_mr13_vrcg_to_normal(u8 chn, const struct mr_value *mr, u32 rk_num)
static void move_dramc_tx_dq_oen(u8 chn, u8 rank, u8 byte, s8 shift_coarse_tune)
static void dramc_dual_rank_rx_datlat_cal(u8 chn, u8 freq_group, u8 datlat0, u8 datlat1)
static void dramc_rx_dqs_gating_cal_pre(u8 chn, u8 rank)
static void dramc_reset_delay_chain_before_calibration(size_t chn)
static const u8 lp4_ca_mapping_pop[CHANNEL_MAX][CA_NUM_LP4]
static void dramc_set_rx_vref(u8 chn, u8 vref)
static void set_dram_mr_cbt_on_off(u8 chn, u8 rank, u8 fsp, bool cbt_on, struct mr_value *mr, u32 cbt_mode)
static void write_leveling_move_dqs_instead_of_clk(u8 chn)
static int dramc_set_rx_best_dly(u8 chn, u8 rank, struct win_perbit_dly *perbit_dly)
static void dramc_engine2_init(u8 chn, u8 rank, u32 t2_1, u32 t2_2, bool test_pat)
static u32 dramc_get_smallest_dqs_dly(u8 chn, u8 rank, const struct sdram_params *params)
static void dramc_rx_dqs_isi_pulse_cg_switch(u8 chn, bool flag)
void get_dram_info_after_cal(u8 *density_result, u32 rk_num)
void dramc_dqs_precalculation_preset(void)
void dramc_mode_reg_write(u8 chn, u8 mr_idx, u8 value)
static void move_dramc_tx_dqs(u8 chn, u8 byte, s8 shift_coarse_tune)
static void dram_phy_reset(u8 chn)
static void dramc_rx_rd_dqc_init(u8 chn, u8 rank)
static void dramc_set_vref_dly(struct vref_perbit_dly *vref_dly, struct win_perbit_dly delay[])
static void dqsosc_shu_settings(u8 chn, u8 freq_group, u16 *osc_thrd_inc, u16 *osc_thrd_dec)
static void cbt_entry(u8 chn, u8 rank, u8 fsp, struct mr_value *mr, u32 cbt_mode)
static void dramc_set_rx_dqdqs_dly(u8 chn, u8 rank, s32 dly)
#define TEST2_2_CAL
@ MAX_RX_DQDLY_TAPS
static void dramc_engine2_check_complete(u8 chn, u8 status)
static void dramc_get_dly_range(u8 chn, u8 rank, enum CAL_TYPE type, u8 freq_group, u16 *pre_cal, s16 *begin, s16 *end, const struct sdram_params *params)
static bool dramk_calc_best_vref(enum CAL_TYPE type, u8 vref, struct vref_perbit_dly *vref_dly, struct win_perbit_dly delay[], u32 *win_min_max)
static void dramc_set_vref(u8 chn, u8 rank, enum CAL_TYPE type, u8 vref)
static u8 dramc_rx_datlat_cal(u8 chn, u8 rank, u8 freq_group, const struct sdram_params *params, const bool fast_calib, bool *test_passed)
void dramc_hw_gating_onoff(u8 chn, bool on)
static bool dramc_find_gating_window(u32 result_r, u32 result_f, u32 *debug_cnt, u8 dly_coarse_large, u8 dly_coarse_0p5t, u8 *pass_begin, u8 *pass_count, u8 *pass_count_1, u8 *dly_fine_xt, u8 *dqs_high, u8 *dqs_done)
static void dramc_cmd_bus_training(u8 chn, u8 rank, u8 freq_group, const struct sdram_params *params, struct mr_value *mr, bool run_dvfs)
static void dramc_set_tx_dly_center(struct per_byte_dly *center_dly, const struct win_perbit_dly *vref_dly)
static void move_dramc_delay(u32 *reg_0, u32 *reg_1, u8 shift, s8 shift_coarse_tune)
#define TEST2_1_CAL
static void dramc_set_tx_best_dly(u8 chn, u8 rank, bool bypass_tx, struct win_perbit_dly *vref_dly, enum CAL_TYPE type, u8 freq_group, u16 *tx_dq_precal_result, u16 dly_cell_unit, const struct sdram_params *params, const bool fast_calib)
static void dramc_engine2_compare(u8 chn, enum dram_te_op wr)
static void dramc_write_leveling(u8 chn, u8 rank, u8 freq_group, const u8 wr_level[CHANNEL_MAX][RANK_MAX][DQS_NUMBER])
static void dramc_set_gating_mode(u8 chn, bool mode)
static void cbt_set_perbit_delay_cell(u8 chn, u8 rank)
static void start_dqsosc(u8 chn)
void dramc_apply_config_before_calibration(u8 freq_group, u32 cbt_mode)
static void dramc_set_tx_vref(u8 chn, u8 rank, u8 value)
static void set_selph_gating_value(uint32_t *addr, u8 dly, u8 dly_p1)
static u32 dramc_engine2_run(u8 chn, enum dram_te_op wr)
static void dramc_mode_reg_write_by_rank(u8 chn, u8 rank, u8 mr_idx, u8 value)
static void dramc_write_dbi_onoff(size_t chn, bool onoff)
void dramc_enable_phy_dcm(u8 chn, bool en)
static void dramc_window_perbit_cal_partial(u8 chn, u8 rank, s16 dly_begin, s16 dly_end, s16 dly_step, enum CAL_TYPE type, u8 *small_value, u8 vref_scan_enable, struct win_perbit_dly *win_perbit)
void set_mrr_pinmux_mapping(void)
Definition: emi.c:173
void cbt_mrr_pinmux_mapping(void)
Definition: emi.c:160
const u8 phy_mapping[CHANNEL_MAX][16]
Definition: emi.c:39
u32 get_shu_freq(u8 shu)
Definition: emi.c:69
@ CKE_FIXON
Definition: dramc_pi_api.h:40
@ CKE_FIXOFF
Definition: dramc_pi_api.h:39
@ CKE_DYNAMIC
Definition: dramc_pi_api.h:41
#define HW_REG_SHUFFLE_MAX
Definition: dramc_pi_api.h:11
@ DQ_DIV_SHIFT
Definition: dramc_pi_api.h:77
@ OEN_SHIFT
Definition: dramc_pi_api.h:79
@ DQ_DIV_MASK
Definition: dramc_pi_api.h:78
cbt_freq
Definition: dramc_pi_api.h:44
@ CBT_LOW_FREQ
Definition: dramc_pi_api.h:45
@ CBT_HIGH_FREQ
Definition: dramc_pi_api.h:46
#define TX_DQ_COARSE_TUNE_TO_FINE_TUNE_TAP
Definition: dramc_pi_api.h:15
dram_te_op
Definition: dramc_pi_api.h:24
#define DATLAT_TAP_NUMBER
Definition: dramc_pi_api.h:10
@ PASS_RANGE_NA
Definition: dramc_pi_api.h:30
@ GATING_GOLDEND_DQSCNT
Definition: dramc_pi_api.h:35
@ RX_DQS_CTL_LOOP
Definition: dramc_pi_api.h:59
@ DQS_GW_FINE_STEP
Definition: dramc_pi_api.h:58
@ DQS_GW_FINE_END
Definition: dramc_pi_api.h:57
@ RX_DLY_DQSIENSTB_LOOP
Definition: dramc_pi_api.h:60
#define MR23_DEFAULT_VALUE
Definition: dramc_pi_api.h:21
@ DRAMC_PARAM_SOURCE_FLASH
Definition: emi.h:12
@ DRAMC_PARAM_SOURCE_SDRAM_CONFIG
Definition: emi.h:11
@ LP4X_DDR2400
Definition: emi.h:68
@ LP4X_DDR3200
Definition: emi.h:69
@ LP4X_DDR1600
Definition: emi.h:67
@ LP4X_DDR3600
Definition: emi.h:70
#define DQS_BIT_NUMBER
Definition: dramc_soc.h:53
static const int mask[4]
Definition: gpio.c:308
unsigned int uint32_t
Definition: stdint.h:14
uint32_t u32
Definition: stdint.h:51
uint16_t u16
Definition: stdint.h:48
int8_t s8
Definition: stdint.h:44
int16_t s16
Definition: stdint.h:47
uint8_t u8
Definition: stdint.h:45
int32_t s32
Definition: stdint.h:50
uint32_t pre_tdqsck[12]
struct dramc_ddrphy_regs_shu_rk::@919 b[2]
Definition: dw_i2c.c:39
Definition: emi.h:78
u8 MR13Value
Definition: emi.h:80
u8 MR01Value[FSP_MAX]
Definition: emi.h:79
u32 dramc_dpy_clk_sw_con
Definition: spm.h:327
u32 spm_power_on_val0
Definition: spm.h:133
Defines the SDRAM parameter structure.
Definition: emi.h:15
u32 rank_num
Definition: emi.h:18
u16 source
Definition: emi.h:16
u8 wr_level[CHANNEL_MAX][RANK_MAX][DQS_NUMBER]
Definition: emi.h:20
struct win_perbit_dly perbit_dly[DQ_DATA_WIDTH]
u8 val
Definition: sys.c:300
void udelay(uint32_t us)
Definition: udelay.c:15