Stephen Boyd | 8ff1f4c | 2015-11-30 17:31:39 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2015, The Linux Foundation. All rights reserved. |
| 3 | * |
| 4 | * This software is licensed under the terms of the GNU General Public |
| 5 | * License version 2, as published by the Free Software Foundation, and |
| 6 | * may be copied, distributed, and modified under those terms. |
| 7 | * |
| 8 | * This program is distributed in the hope that it will be useful, |
| 9 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 10 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 11 | * GNU General Public License for more details. |
| 12 | */ |
| 13 | |
| 14 | #include <linux/kernel.h> |
| 15 | #include <linux/export.h> |
| 16 | #include <linux/clk-provider.h> |
| 17 | #include <linux/regmap.h> |
| 18 | #include <linux/delay.h> |
| 19 | |
| 20 | #include "clk-alpha-pll.h" |
Rajendra Nayak | 400d9fd | 2016-09-29 14:05:45 +0530 | [diff] [blame] | 21 | #include "common.h" |
Stephen Boyd | 8ff1f4c | 2015-11-30 17:31:39 -0800 | [diff] [blame] | 22 | |
| 23 | #define PLL_MODE 0x00 |
| 24 | # define PLL_OUTCTRL BIT(0) |
| 25 | # define PLL_BYPASSNL BIT(1) |
| 26 | # define PLL_RESET_N BIT(2) |
Rajendra Nayak | feb6564 | 2016-09-29 14:05:42 +0530 | [diff] [blame] | 27 | # define PLL_OFFLINE_REQ BIT(7) |
Stephen Boyd | 8ff1f4c | 2015-11-30 17:31:39 -0800 | [diff] [blame] | 28 | # define PLL_LOCK_COUNT_SHIFT 8 |
| 29 | # define PLL_LOCK_COUNT_MASK 0x3f |
| 30 | # define PLL_BIAS_COUNT_SHIFT 14 |
| 31 | # define PLL_BIAS_COUNT_MASK 0x3f |
| 32 | # define PLL_VOTE_FSM_ENA BIT(20) |
Rajendra Nayak | feb6564 | 2016-09-29 14:05:42 +0530 | [diff] [blame] | 33 | # define PLL_FSM_ENA BIT(20) |
Stephen Boyd | 8ff1f4c | 2015-11-30 17:31:39 -0800 | [diff] [blame] | 34 | # define PLL_VOTE_FSM_RESET BIT(21) |
Rajendra Nayak | feb6564 | 2016-09-29 14:05:42 +0530 | [diff] [blame] | 35 | # define PLL_OFFLINE_ACK BIT(28) |
Stephen Boyd | 8ff1f4c | 2015-11-30 17:31:39 -0800 | [diff] [blame] | 36 | # define PLL_ACTIVE_FLAG BIT(30) |
| 37 | # define PLL_LOCK_DET BIT(31) |
| 38 | |
| 39 | #define PLL_L_VAL 0x04 |
| 40 | #define PLL_ALPHA_VAL 0x08 |
| 41 | #define PLL_ALPHA_VAL_U 0x0c |
| 42 | |
| 43 | #define PLL_USER_CTL 0x10 |
| 44 | # define PLL_POST_DIV_SHIFT 8 |
| 45 | # define PLL_POST_DIV_MASK 0xf |
| 46 | # define PLL_ALPHA_EN BIT(24) |
| 47 | # define PLL_VCO_SHIFT 20 |
| 48 | # define PLL_VCO_MASK 0x3 |
| 49 | |
| 50 | #define PLL_USER_CTL_U 0x14 |
| 51 | |
| 52 | #define PLL_CONFIG_CTL 0x18 |
Rajendra Nayak | 9f4e627 | 2016-09-29 14:05:43 +0530 | [diff] [blame] | 53 | #define PLL_CONFIG_CTL_U 0x20 |
Stephen Boyd | 8ff1f4c | 2015-11-30 17:31:39 -0800 | [diff] [blame] | 54 | #define PLL_TEST_CTL 0x1c |
| 55 | #define PLL_TEST_CTL_U 0x20 |
| 56 | #define PLL_STATUS 0x24 |
| 57 | |
| 58 | /* |
| 59 | * Even though 40 bits are present, use only 32 for ease of calculation. |
| 60 | */ |
| 61 | #define ALPHA_REG_BITWIDTH 40 |
| 62 | #define ALPHA_BITWIDTH 32 |
Rajendra Nayak | 31256f4 | 2016-09-29 14:05:44 +0530 | [diff] [blame] | 63 | #define ALPHA_16BIT_MASK 0xffff |
Stephen Boyd | 8ff1f4c | 2015-11-30 17:31:39 -0800 | [diff] [blame] | 64 | |
| 65 | #define to_clk_alpha_pll(_hw) container_of(to_clk_regmap(_hw), \ |
| 66 | struct clk_alpha_pll, clkr) |
| 67 | |
| 68 | #define to_clk_alpha_pll_postdiv(_hw) container_of(to_clk_regmap(_hw), \ |
| 69 | struct clk_alpha_pll_postdiv, clkr) |
| 70 | |
Rajendra Nayak | feb6564 | 2016-09-29 14:05:42 +0530 | [diff] [blame] | 71 | static int wait_for_pll(struct clk_alpha_pll *pll, u32 mask, bool inverse, |
| 72 | const char *action) |
Stephen Boyd | 8ff1f4c | 2015-11-30 17:31:39 -0800 | [diff] [blame] | 73 | { |
Rajendra Nayak | feb6564 | 2016-09-29 14:05:42 +0530 | [diff] [blame] | 74 | u32 val, off; |
Stephen Boyd | 8ff1f4c | 2015-11-30 17:31:39 -0800 | [diff] [blame] | 75 | int count; |
| 76 | int ret; |
| 77 | const char *name = clk_hw_get_name(&pll->clkr.hw); |
| 78 | |
| 79 | off = pll->offset; |
| 80 | ret = regmap_read(pll->clkr.regmap, off + PLL_MODE, &val); |
| 81 | if (ret) |
| 82 | return ret; |
| 83 | |
Stephen Boyd | 8ff1f4c | 2015-11-30 17:31:39 -0800 | [diff] [blame] | 84 | for (count = 100; count > 0; count--) { |
| 85 | ret = regmap_read(pll->clkr.regmap, off + PLL_MODE, &val); |
| 86 | if (ret) |
| 87 | return ret; |
Rajendra Nayak | feb6564 | 2016-09-29 14:05:42 +0530 | [diff] [blame] | 88 | if (inverse && !(val & mask)) |
| 89 | return 0; |
| 90 | else if ((val & mask) == mask) |
Stephen Boyd | 8ff1f4c | 2015-11-30 17:31:39 -0800 | [diff] [blame] | 91 | return 0; |
| 92 | |
| 93 | udelay(1); |
| 94 | } |
| 95 | |
Rajendra Nayak | feb6564 | 2016-09-29 14:05:42 +0530 | [diff] [blame] | 96 | WARN(1, "%s failed to %s!\n", name, action); |
Stephen Boyd | 8ff1f4c | 2015-11-30 17:31:39 -0800 | [diff] [blame] | 97 | return -ETIMEDOUT; |
| 98 | } |
| 99 | |
Rajendra Nayak | feb6564 | 2016-09-29 14:05:42 +0530 | [diff] [blame] | 100 | #define wait_for_pll_enable_active(pll) \ |
| 101 | wait_for_pll(pll, PLL_ACTIVE_FLAG, 0, "enable") |
| 102 | |
| 103 | #define wait_for_pll_enable_lock(pll) \ |
| 104 | wait_for_pll(pll, PLL_LOCK_DET, 0, "enable") |
| 105 | |
| 106 | #define wait_for_pll_disable(pll) \ |
| 107 | wait_for_pll(pll, PLL_ACTIVE_FLAG, 1, "disable") |
| 108 | |
| 109 | #define wait_for_pll_offline(pll) \ |
| 110 | wait_for_pll(pll, PLL_OFFLINE_ACK, 0, "offline") |
| 111 | |
Rajendra Nayak | 9f4e627 | 2016-09-29 14:05:43 +0530 | [diff] [blame] | 112 | void clk_alpha_pll_configure(struct clk_alpha_pll *pll, struct regmap *regmap, |
| 113 | const struct alpha_pll_config *config) |
| 114 | { |
| 115 | u32 val, mask; |
| 116 | u32 off = pll->offset; |
| 117 | |
| 118 | regmap_write(regmap, off + PLL_L_VAL, config->l); |
| 119 | regmap_write(regmap, off + PLL_ALPHA_VAL, config->alpha); |
| 120 | regmap_write(regmap, off + PLL_CONFIG_CTL, config->config_ctl_val); |
| 121 | regmap_write(regmap, off + PLL_CONFIG_CTL_U, config->config_ctl_hi_val); |
| 122 | |
| 123 | val = config->main_output_mask; |
| 124 | val |= config->aux_output_mask; |
| 125 | val |= config->aux2_output_mask; |
| 126 | val |= config->early_output_mask; |
| 127 | val |= config->pre_div_val; |
| 128 | val |= config->post_div_val; |
| 129 | val |= config->vco_val; |
| 130 | |
| 131 | mask = config->main_output_mask; |
| 132 | mask |= config->aux_output_mask; |
| 133 | mask |= config->aux2_output_mask; |
| 134 | mask |= config->early_output_mask; |
| 135 | mask |= config->pre_div_mask; |
| 136 | mask |= config->post_div_mask; |
| 137 | mask |= config->vco_mask; |
| 138 | |
| 139 | regmap_update_bits(regmap, off + PLL_USER_CTL, mask, val); |
Rajendra Nayak | 400d9fd | 2016-09-29 14:05:45 +0530 | [diff] [blame] | 140 | |
| 141 | if (pll->flags & SUPPORTS_FSM_MODE) |
| 142 | qcom_pll_set_fsm_mode(regmap, off + PLL_MODE, 6, 0); |
Rajendra Nayak | 9f4e627 | 2016-09-29 14:05:43 +0530 | [diff] [blame] | 143 | } |
| 144 | |
Rajendra Nayak | feb6564 | 2016-09-29 14:05:42 +0530 | [diff] [blame] | 145 | static int clk_alpha_pll_hwfsm_enable(struct clk_hw *hw) |
| 146 | { |
| 147 | int ret; |
| 148 | u32 val, off; |
| 149 | struct clk_alpha_pll *pll = to_clk_alpha_pll(hw); |
| 150 | |
| 151 | off = pll->offset; |
| 152 | ret = regmap_read(pll->clkr.regmap, off + PLL_MODE, &val); |
| 153 | if (ret) |
| 154 | return ret; |
| 155 | |
| 156 | val |= PLL_FSM_ENA; |
| 157 | |
| 158 | if (pll->flags & SUPPORTS_OFFLINE_REQ) |
| 159 | val &= ~PLL_OFFLINE_REQ; |
| 160 | |
| 161 | ret = regmap_write(pll->clkr.regmap, off + PLL_MODE, val); |
| 162 | if (ret) |
| 163 | return ret; |
| 164 | |
| 165 | /* Make sure enable request goes through before waiting for update */ |
| 166 | mb(); |
| 167 | |
| 168 | return wait_for_pll_enable_active(pll); |
| 169 | } |
| 170 | |
| 171 | static void clk_alpha_pll_hwfsm_disable(struct clk_hw *hw) |
| 172 | { |
| 173 | int ret; |
| 174 | u32 val, off; |
| 175 | struct clk_alpha_pll *pll = to_clk_alpha_pll(hw); |
| 176 | |
| 177 | off = pll->offset; |
| 178 | ret = regmap_read(pll->clkr.regmap, off + PLL_MODE, &val); |
| 179 | if (ret) |
| 180 | return; |
| 181 | |
| 182 | if (pll->flags & SUPPORTS_OFFLINE_REQ) { |
| 183 | ret = regmap_update_bits(pll->clkr.regmap, off + PLL_MODE, |
| 184 | PLL_OFFLINE_REQ, PLL_OFFLINE_REQ); |
| 185 | if (ret) |
| 186 | return; |
| 187 | |
| 188 | ret = wait_for_pll_offline(pll); |
| 189 | if (ret) |
| 190 | return; |
| 191 | } |
| 192 | |
| 193 | /* Disable hwfsm */ |
| 194 | ret = regmap_update_bits(pll->clkr.regmap, off + PLL_MODE, |
| 195 | PLL_FSM_ENA, 0); |
| 196 | if (ret) |
| 197 | return; |
| 198 | |
| 199 | wait_for_pll_disable(pll); |
| 200 | } |
| 201 | |
Rajendra Nayak | 86c390d | 2016-09-29 14:05:46 +0530 | [diff] [blame] | 202 | static int pll_is_enabled(struct clk_hw *hw, u32 mask) |
| 203 | { |
| 204 | int ret; |
| 205 | u32 val, off; |
| 206 | struct clk_alpha_pll *pll = to_clk_alpha_pll(hw); |
| 207 | |
| 208 | off = pll->offset; |
| 209 | ret = regmap_read(pll->clkr.regmap, off + PLL_MODE, &val); |
| 210 | if (ret) |
| 211 | return ret; |
| 212 | |
| 213 | return !!(val & mask); |
| 214 | } |
| 215 | |
| 216 | static int clk_alpha_pll_hwfsm_is_enabled(struct clk_hw *hw) |
| 217 | { |
| 218 | return pll_is_enabled(hw, PLL_ACTIVE_FLAG); |
| 219 | } |
| 220 | |
| 221 | static int clk_alpha_pll_is_enabled(struct clk_hw *hw) |
| 222 | { |
| 223 | return pll_is_enabled(hw, PLL_LOCK_DET); |
| 224 | } |
| 225 | |
Stephen Boyd | 8ff1f4c | 2015-11-30 17:31:39 -0800 | [diff] [blame] | 226 | static int clk_alpha_pll_enable(struct clk_hw *hw) |
| 227 | { |
| 228 | int ret; |
| 229 | struct clk_alpha_pll *pll = to_clk_alpha_pll(hw); |
| 230 | u32 val, mask, off; |
| 231 | |
| 232 | off = pll->offset; |
| 233 | |
| 234 | mask = PLL_OUTCTRL | PLL_RESET_N | PLL_BYPASSNL; |
| 235 | ret = regmap_read(pll->clkr.regmap, off + PLL_MODE, &val); |
| 236 | if (ret) |
| 237 | return ret; |
| 238 | |
| 239 | /* If in FSM mode, just vote for it */ |
| 240 | if (val & PLL_VOTE_FSM_ENA) { |
| 241 | ret = clk_enable_regmap(hw); |
| 242 | if (ret) |
| 243 | return ret; |
Rajendra Nayak | feb6564 | 2016-09-29 14:05:42 +0530 | [diff] [blame] | 244 | return wait_for_pll_enable_active(pll); |
Stephen Boyd | 8ff1f4c | 2015-11-30 17:31:39 -0800 | [diff] [blame] | 245 | } |
| 246 | |
| 247 | /* Skip if already enabled */ |
| 248 | if ((val & mask) == mask) |
| 249 | return 0; |
| 250 | |
| 251 | ret = regmap_update_bits(pll->clkr.regmap, off + PLL_MODE, |
| 252 | PLL_BYPASSNL, PLL_BYPASSNL); |
| 253 | if (ret) |
| 254 | return ret; |
| 255 | |
| 256 | /* |
| 257 | * H/W requires a 5us delay between disabling the bypass and |
| 258 | * de-asserting the reset. |
| 259 | */ |
| 260 | mb(); |
| 261 | udelay(5); |
| 262 | |
| 263 | ret = regmap_update_bits(pll->clkr.regmap, off + PLL_MODE, |
| 264 | PLL_RESET_N, PLL_RESET_N); |
| 265 | if (ret) |
| 266 | return ret; |
| 267 | |
Rajendra Nayak | feb6564 | 2016-09-29 14:05:42 +0530 | [diff] [blame] | 268 | ret = wait_for_pll_enable_lock(pll); |
Stephen Boyd | 8ff1f4c | 2015-11-30 17:31:39 -0800 | [diff] [blame] | 269 | if (ret) |
| 270 | return ret; |
| 271 | |
| 272 | ret = regmap_update_bits(pll->clkr.regmap, off + PLL_MODE, |
| 273 | PLL_OUTCTRL, PLL_OUTCTRL); |
| 274 | |
| 275 | /* Ensure that the write above goes through before returning. */ |
| 276 | mb(); |
| 277 | return ret; |
| 278 | } |
| 279 | |
| 280 | static void clk_alpha_pll_disable(struct clk_hw *hw) |
| 281 | { |
| 282 | int ret; |
| 283 | struct clk_alpha_pll *pll = to_clk_alpha_pll(hw); |
| 284 | u32 val, mask, off; |
| 285 | |
| 286 | off = pll->offset; |
| 287 | |
| 288 | ret = regmap_read(pll->clkr.regmap, off + PLL_MODE, &val); |
| 289 | if (ret) |
| 290 | return; |
| 291 | |
| 292 | /* If in FSM mode, just unvote it */ |
| 293 | if (val & PLL_VOTE_FSM_ENA) { |
| 294 | clk_disable_regmap(hw); |
| 295 | return; |
| 296 | } |
| 297 | |
| 298 | mask = PLL_OUTCTRL; |
| 299 | regmap_update_bits(pll->clkr.regmap, off + PLL_MODE, mask, 0); |
| 300 | |
| 301 | /* Delay of 2 output clock ticks required until output is disabled */ |
| 302 | mb(); |
| 303 | udelay(1); |
| 304 | |
| 305 | mask = PLL_RESET_N | PLL_BYPASSNL; |
| 306 | regmap_update_bits(pll->clkr.regmap, off + PLL_MODE, mask, 0); |
| 307 | } |
| 308 | |
| 309 | static unsigned long alpha_pll_calc_rate(u64 prate, u32 l, u32 a) |
| 310 | { |
| 311 | return (prate * l) + ((prate * a) >> ALPHA_BITWIDTH); |
| 312 | } |
| 313 | |
| 314 | static unsigned long |
| 315 | alpha_pll_round_rate(unsigned long rate, unsigned long prate, u32 *l, u64 *a) |
| 316 | { |
| 317 | u64 remainder; |
| 318 | u64 quotient; |
| 319 | |
| 320 | quotient = rate; |
| 321 | remainder = do_div(quotient, prate); |
| 322 | *l = quotient; |
| 323 | |
| 324 | if (!remainder) { |
| 325 | *a = 0; |
| 326 | return rate; |
| 327 | } |
| 328 | |
| 329 | /* Upper ALPHA_BITWIDTH bits of Alpha */ |
| 330 | quotient = remainder << ALPHA_BITWIDTH; |
| 331 | remainder = do_div(quotient, prate); |
| 332 | |
| 333 | if (remainder) |
| 334 | quotient++; |
| 335 | |
| 336 | *a = quotient; |
| 337 | return alpha_pll_calc_rate(prate, *l, *a); |
| 338 | } |
| 339 | |
| 340 | static const struct pll_vco * |
| 341 | alpha_pll_find_vco(const struct clk_alpha_pll *pll, unsigned long rate) |
| 342 | { |
| 343 | const struct pll_vco *v = pll->vco_table; |
| 344 | const struct pll_vco *end = v + pll->num_vco; |
| 345 | |
| 346 | for (; v < end; v++) |
| 347 | if (rate >= v->min_freq && rate <= v->max_freq) |
| 348 | return v; |
| 349 | |
| 350 | return NULL; |
| 351 | } |
| 352 | |
| 353 | static unsigned long |
| 354 | clk_alpha_pll_recalc_rate(struct clk_hw *hw, unsigned long parent_rate) |
| 355 | { |
| 356 | u32 l, low, high, ctl; |
| 357 | u64 a = 0, prate = parent_rate; |
| 358 | struct clk_alpha_pll *pll = to_clk_alpha_pll(hw); |
| 359 | u32 off = pll->offset; |
| 360 | |
| 361 | regmap_read(pll->clkr.regmap, off + PLL_L_VAL, &l); |
| 362 | |
| 363 | regmap_read(pll->clkr.regmap, off + PLL_USER_CTL, &ctl); |
| 364 | if (ctl & PLL_ALPHA_EN) { |
| 365 | regmap_read(pll->clkr.regmap, off + PLL_ALPHA_VAL, &low); |
Rajendra Nayak | 31256f4 | 2016-09-29 14:05:44 +0530 | [diff] [blame] | 366 | if (pll->flags & SUPPORTS_16BIT_ALPHA) { |
| 367 | a = low & ALPHA_16BIT_MASK; |
| 368 | } else { |
| 369 | regmap_read(pll->clkr.regmap, off + PLL_ALPHA_VAL_U, |
| 370 | &high); |
| 371 | a = (u64)high << 32 | low; |
| 372 | a >>= ALPHA_REG_BITWIDTH - ALPHA_BITWIDTH; |
| 373 | } |
Stephen Boyd | 8ff1f4c | 2015-11-30 17:31:39 -0800 | [diff] [blame] | 374 | } |
| 375 | |
| 376 | return alpha_pll_calc_rate(prate, l, a); |
| 377 | } |
| 378 | |
| 379 | static int clk_alpha_pll_set_rate(struct clk_hw *hw, unsigned long rate, |
| 380 | unsigned long prate) |
| 381 | { |
| 382 | struct clk_alpha_pll *pll = to_clk_alpha_pll(hw); |
| 383 | const struct pll_vco *vco; |
| 384 | u32 l, off = pll->offset; |
| 385 | u64 a; |
| 386 | |
| 387 | rate = alpha_pll_round_rate(rate, prate, &l, &a); |
| 388 | vco = alpha_pll_find_vco(pll, rate); |
| 389 | if (!vco) { |
| 390 | pr_err("alpha pll not in a valid vco range\n"); |
| 391 | return -EINVAL; |
| 392 | } |
| 393 | |
Stephen Boyd | 8ff1f4c | 2015-11-30 17:31:39 -0800 | [diff] [blame] | 394 | regmap_write(pll->clkr.regmap, off + PLL_L_VAL, l); |
Rajendra Nayak | 31256f4 | 2016-09-29 14:05:44 +0530 | [diff] [blame] | 395 | |
| 396 | if (pll->flags & SUPPORTS_16BIT_ALPHA) { |
| 397 | regmap_write(pll->clkr.regmap, off + PLL_ALPHA_VAL, |
| 398 | a & ALPHA_16BIT_MASK); |
| 399 | } else { |
| 400 | a <<= (ALPHA_REG_BITWIDTH - ALPHA_BITWIDTH); |
| 401 | regmap_write(pll->clkr.regmap, off + PLL_ALPHA_VAL_U, a >> 32); |
| 402 | } |
Stephen Boyd | 8ff1f4c | 2015-11-30 17:31:39 -0800 | [diff] [blame] | 403 | |
| 404 | regmap_update_bits(pll->clkr.regmap, off + PLL_USER_CTL, |
| 405 | PLL_VCO_MASK << PLL_VCO_SHIFT, |
| 406 | vco->val << PLL_VCO_SHIFT); |
| 407 | |
| 408 | regmap_update_bits(pll->clkr.regmap, off + PLL_USER_CTL, PLL_ALPHA_EN, |
| 409 | PLL_ALPHA_EN); |
| 410 | |
| 411 | return 0; |
| 412 | } |
| 413 | |
| 414 | static long clk_alpha_pll_round_rate(struct clk_hw *hw, unsigned long rate, |
| 415 | unsigned long *prate) |
| 416 | { |
| 417 | struct clk_alpha_pll *pll = to_clk_alpha_pll(hw); |
| 418 | u32 l; |
| 419 | u64 a; |
| 420 | unsigned long min_freq, max_freq; |
| 421 | |
| 422 | rate = alpha_pll_round_rate(rate, *prate, &l, &a); |
| 423 | if (alpha_pll_find_vco(pll, rate)) |
| 424 | return rate; |
| 425 | |
| 426 | min_freq = pll->vco_table[0].min_freq; |
| 427 | max_freq = pll->vco_table[pll->num_vco - 1].max_freq; |
| 428 | |
| 429 | return clamp(rate, min_freq, max_freq); |
| 430 | } |
| 431 | |
| 432 | const struct clk_ops clk_alpha_pll_ops = { |
| 433 | .enable = clk_alpha_pll_enable, |
| 434 | .disable = clk_alpha_pll_disable, |
Rajendra Nayak | 86c390d | 2016-09-29 14:05:46 +0530 | [diff] [blame] | 435 | .is_enabled = clk_alpha_pll_is_enabled, |
Stephen Boyd | 8ff1f4c | 2015-11-30 17:31:39 -0800 | [diff] [blame] | 436 | .recalc_rate = clk_alpha_pll_recalc_rate, |
| 437 | .round_rate = clk_alpha_pll_round_rate, |
| 438 | .set_rate = clk_alpha_pll_set_rate, |
| 439 | }; |
| 440 | EXPORT_SYMBOL_GPL(clk_alpha_pll_ops); |
| 441 | |
Rajendra Nayak | feb6564 | 2016-09-29 14:05:42 +0530 | [diff] [blame] | 442 | const struct clk_ops clk_alpha_pll_hwfsm_ops = { |
| 443 | .enable = clk_alpha_pll_hwfsm_enable, |
| 444 | .disable = clk_alpha_pll_hwfsm_disable, |
Rajendra Nayak | 86c390d | 2016-09-29 14:05:46 +0530 | [diff] [blame] | 445 | .is_enabled = clk_alpha_pll_hwfsm_is_enabled, |
Rajendra Nayak | feb6564 | 2016-09-29 14:05:42 +0530 | [diff] [blame] | 446 | .recalc_rate = clk_alpha_pll_recalc_rate, |
| 447 | .round_rate = clk_alpha_pll_round_rate, |
| 448 | .set_rate = clk_alpha_pll_set_rate, |
| 449 | }; |
| 450 | EXPORT_SYMBOL_GPL(clk_alpha_pll_hwfsm_ops); |
| 451 | |
Stephen Boyd | 8ff1f4c | 2015-11-30 17:31:39 -0800 | [diff] [blame] | 452 | static unsigned long |
| 453 | clk_alpha_pll_postdiv_recalc_rate(struct clk_hw *hw, unsigned long parent_rate) |
| 454 | { |
| 455 | struct clk_alpha_pll_postdiv *pll = to_clk_alpha_pll_postdiv(hw); |
| 456 | u32 ctl; |
| 457 | |
| 458 | regmap_read(pll->clkr.regmap, pll->offset + PLL_USER_CTL, &ctl); |
| 459 | |
| 460 | ctl >>= PLL_POST_DIV_SHIFT; |
| 461 | ctl &= PLL_POST_DIV_MASK; |
| 462 | |
| 463 | return parent_rate >> fls(ctl); |
| 464 | } |
| 465 | |
| 466 | static const struct clk_div_table clk_alpha_div_table[] = { |
| 467 | { 0x0, 1 }, |
| 468 | { 0x1, 2 }, |
| 469 | { 0x3, 4 }, |
| 470 | { 0x7, 8 }, |
| 471 | { 0xf, 16 }, |
| 472 | { } |
| 473 | }; |
| 474 | |
| 475 | static long |
| 476 | clk_alpha_pll_postdiv_round_rate(struct clk_hw *hw, unsigned long rate, |
| 477 | unsigned long *prate) |
| 478 | { |
| 479 | struct clk_alpha_pll_postdiv *pll = to_clk_alpha_pll_postdiv(hw); |
| 480 | |
| 481 | return divider_round_rate(hw, rate, prate, clk_alpha_div_table, |
| 482 | pll->width, CLK_DIVIDER_POWER_OF_TWO); |
| 483 | } |
| 484 | |
| 485 | static int clk_alpha_pll_postdiv_set_rate(struct clk_hw *hw, unsigned long rate, |
| 486 | unsigned long parent_rate) |
| 487 | { |
| 488 | struct clk_alpha_pll_postdiv *pll = to_clk_alpha_pll_postdiv(hw); |
| 489 | int div; |
| 490 | |
| 491 | /* 16 -> 0xf, 8 -> 0x7, 4 -> 0x3, 2 -> 0x1, 1 -> 0x0 */ |
| 492 | div = DIV_ROUND_UP_ULL((u64)parent_rate, rate) - 1; |
| 493 | |
| 494 | return regmap_update_bits(pll->clkr.regmap, pll->offset + PLL_USER_CTL, |
| 495 | PLL_POST_DIV_MASK << PLL_POST_DIV_SHIFT, |
| 496 | div << PLL_POST_DIV_SHIFT); |
| 497 | } |
| 498 | |
| 499 | const struct clk_ops clk_alpha_pll_postdiv_ops = { |
| 500 | .recalc_rate = clk_alpha_pll_postdiv_recalc_rate, |
| 501 | .round_rate = clk_alpha_pll_postdiv_round_rate, |
| 502 | .set_rate = clk_alpha_pll_postdiv_set_rate, |
| 503 | }; |
| 504 | EXPORT_SYMBOL_GPL(clk_alpha_pll_postdiv_ops); |