summaryrefslogtreecommitdiffstats
path: root/drivers/clk/qcom/clk-rcg.c
diff options
context:
space:
mode:
authorStephen Boyd <sboyd@codeaurora.org>2014-01-15 10:47:25 -0800
committerMike Turquette <mturquette@linaro.org>2014-01-16 12:01:00 -0800
commitbcd61c0f535a04a2aaa3f3ba27e60fae681fc88f (patch)
tree81fbb7830130ed91c207a3f088a9b5683a9f089e /drivers/clk/qcom/clk-rcg.c
parent9e2631313c463c11645db046beb9bdecaf28b62f (diff)
clk: qcom: Add support for root clock generators (RCGs)
Add support for the root clock generators on Qualcomm devices. RCGs are highly customizable mux/divider/counter clocks that can be used to generate almost any rate desired given some input source that is faster than the desired rate. Signed-off-by: Stephen Boyd <sboyd@codeaurora.org> Signed-off-by: Mike Turquette <mturquette@linaro.org>
Diffstat (limited to 'drivers/clk/qcom/clk-rcg.c')
-rw-r--r--drivers/clk/qcom/clk-rcg.c517
1 files changed, 517 insertions, 0 deletions
diff --git a/drivers/clk/qcom/clk-rcg.c b/drivers/clk/qcom/clk-rcg.c
new file mode 100644
index 00000000000..abfc2b675ae
--- /dev/null
+++ b/drivers/clk/qcom/clk-rcg.c
@@ -0,0 +1,517 @@
+/*
+ * Copyright (c) 2013, The Linux Foundation. All rights reserved.
+ *
+ * This software is licensed under the terms of the GNU General Public
+ * License version 2, as published by the Free Software Foundation, and
+ * may be copied, distributed, and modified under those terms.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ */
+
+#include <linux/kernel.h>
+#include <linux/bitops.h>
+#include <linux/err.h>
+#include <linux/export.h>
+#include <linux/clk-provider.h>
+#include <linux/regmap.h>
+
+#include <asm/div64.h>
+
+#include "clk-rcg.h"
+
+static u32 ns_to_src(struct src_sel *s, u32 ns)
+{
+ ns >>= s->src_sel_shift;
+ ns &= SRC_SEL_MASK;
+ return ns;
+}
+
+static u32 src_to_ns(struct src_sel *s, u8 src, u32 ns)
+{
+ u32 mask;
+
+ mask = SRC_SEL_MASK;
+ mask <<= s->src_sel_shift;
+ ns &= ~mask;
+
+ ns |= src << s->src_sel_shift;
+ return ns;
+}
+
+static u8 clk_rcg_get_parent(struct clk_hw *hw)
+{
+ struct clk_rcg *rcg = to_clk_rcg(hw);
+ int num_parents = __clk_get_num_parents(hw->clk);
+ u32 ns;
+ int i;
+
+ regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
+ ns = ns_to_src(&rcg->s, ns);
+ for (i = 0; i < num_parents; i++)
+ if (ns == rcg->s.parent_map[i])
+ return i;
+
+ return -EINVAL;
+}
+
+static int reg_to_bank(struct clk_dyn_rcg *rcg, u32 bank)
+{
+ bank &= BIT(rcg->mux_sel_bit);
+ return !!bank;
+}
+
+static u8 clk_dyn_rcg_get_parent(struct clk_hw *hw)
+{
+ struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
+ int num_parents = __clk_get_num_parents(hw->clk);
+ u32 ns, ctl;
+ int bank;
+ int i;
+ struct src_sel *s;
+
+ regmap_read(rcg->clkr.regmap, rcg->clkr.enable_reg, &ctl);
+ bank = reg_to_bank(rcg, ctl);
+ s = &rcg->s[bank];
+
+ regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
+ ns = ns_to_src(s, ns);
+
+ for (i = 0; i < num_parents; i++)
+ if (ns == s->parent_map[i])
+ return i;
+
+ return -EINVAL;
+}
+
+static int clk_rcg_set_parent(struct clk_hw *hw, u8 index)
+{
+ struct clk_rcg *rcg = to_clk_rcg(hw);
+ u32 ns;
+
+ regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
+ ns = src_to_ns(&rcg->s, rcg->s.parent_map[index], ns);
+ regmap_write(rcg->clkr.regmap, rcg->ns_reg, ns);
+
+ return 0;
+}
+
+static u32 md_to_m(struct mn *mn, u32 md)
+{
+ md >>= mn->m_val_shift;
+ md &= BIT(mn->width) - 1;
+ return md;
+}
+
+static u32 ns_to_pre_div(struct pre_div *p, u32 ns)
+{
+ ns >>= p->pre_div_shift;
+ ns &= BIT(p->pre_div_width) - 1;
+ return ns;
+}
+
+static u32 pre_div_to_ns(struct pre_div *p, u8 pre_div, u32 ns)
+{
+ u32 mask;
+
+ mask = BIT(p->pre_div_width) - 1;
+ mask <<= p->pre_div_shift;
+ ns &= ~mask;
+
+ ns |= pre_div << p->pre_div_shift;
+ return ns;
+}
+
+static u32 mn_to_md(struct mn *mn, u32 m, u32 n, u32 md)
+{
+ u32 mask, mask_w;
+
+ mask_w = BIT(mn->width) - 1;
+ mask = (mask_w << mn->m_val_shift) | mask_w;
+ md &= ~mask;
+
+ if (n) {
+ m <<= mn->m_val_shift;
+ md |= m;
+ md |= ~n & mask_w;
+ }
+
+ return md;
+}
+
+static u32 ns_m_to_n(struct mn *mn, u32 ns, u32 m)
+{
+ ns = ~ns >> mn->n_val_shift;
+ ns &= BIT(mn->width) - 1;
+ return ns + m;
+}
+
+static u32 reg_to_mnctr_mode(struct mn *mn, u32 val)
+{
+ val >>= mn->mnctr_mode_shift;
+ val &= MNCTR_MODE_MASK;
+ return val;
+}
+
+static u32 mn_to_ns(struct mn *mn, u32 m, u32 n, u32 ns)
+{
+ u32 mask;
+
+ mask = BIT(mn->width) - 1;
+ mask <<= mn->n_val_shift;
+ ns &= ~mask;
+
+ if (n) {
+ n = n - m;
+ n = ~n;
+ n &= BIT(mn->width) - 1;
+ n <<= mn->n_val_shift;
+ ns |= n;
+ }
+
+ return ns;
+}
+
+static u32 mn_to_reg(struct mn *mn, u32 m, u32 n, u32 val)
+{
+ u32 mask;
+
+ mask = MNCTR_MODE_MASK << mn->mnctr_mode_shift;
+ mask |= BIT(mn->mnctr_en_bit);
+ val &= ~mask;
+
+ if (n) {
+ val |= BIT(mn->mnctr_en_bit);
+ val |= MNCTR_MODE_DUAL << mn->mnctr_mode_shift;
+ }
+
+ return val;
+}
+
+static void configure_bank(struct clk_dyn_rcg *rcg, const struct freq_tbl *f)
+{
+ u32 ns, md, ctl, *regp;
+ int bank, new_bank;
+ struct mn *mn;
+ struct pre_div *p;
+ struct src_sel *s;
+ bool enabled;
+ u32 md_reg;
+ u32 bank_reg;
+ bool banked_mn = !!rcg->mn[1].width;
+ struct clk_hw *hw = &rcg->clkr.hw;
+
+ enabled = __clk_is_enabled(hw->clk);
+
+ regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
+ regmap_read(rcg->clkr.regmap, rcg->clkr.enable_reg, &ctl);
+
+ if (banked_mn) {
+ regp = &ctl;
+ bank_reg = rcg->clkr.enable_reg;
+ } else {
+ regp = &ns;
+ bank_reg = rcg->ns_reg;
+ }
+
+ bank = reg_to_bank(rcg, *regp);
+ new_bank = enabled ? !bank : bank;
+
+ if (banked_mn) {
+ mn = &rcg->mn[new_bank];
+ md_reg = rcg->md_reg[new_bank];
+
+ ns |= BIT(mn->mnctr_reset_bit);
+ regmap_write(rcg->clkr.regmap, rcg->ns_reg, ns);
+
+ regmap_read(rcg->clkr.regmap, md_reg, &md);
+ md = mn_to_md(mn, f->m, f->n, md);
+ regmap_write(rcg->clkr.regmap, md_reg, md);
+
+ ns = mn_to_ns(mn, f->m, f->n, ns);
+ regmap_write(rcg->clkr.regmap, rcg->ns_reg, ns);
+
+ ctl = mn_to_reg(mn, f->m, f->n, ctl);
+ regmap_write(rcg->clkr.regmap, rcg->clkr.enable_reg, ctl);
+
+ ns &= ~BIT(mn->mnctr_reset_bit);
+ regmap_write(rcg->clkr.regmap, rcg->ns_reg, ns);
+ } else {
+ p = &rcg->p[new_bank];
+ ns = pre_div_to_ns(p, f->pre_div - 1, ns);
+ }
+
+ s = &rcg->s[new_bank];
+ ns = src_to_ns(s, s->parent_map[f->src], ns);
+ regmap_write(rcg->clkr.regmap, rcg->ns_reg, ns);
+
+ if (enabled) {
+ *regp ^= BIT(rcg->mux_sel_bit);
+ regmap_write(rcg->clkr.regmap, bank_reg, *regp);
+ }
+}
+
+static int clk_dyn_rcg_set_parent(struct clk_hw *hw, u8 index)
+{
+ struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
+ u32 ns, ctl, md, reg;
+ int bank;
+ struct freq_tbl f = { 0 };
+ bool banked_mn = !!rcg->mn[1].width;
+
+ regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
+ regmap_read(rcg->clkr.regmap, rcg->clkr.enable_reg, &ctl);
+ reg = banked_mn ? ctl : ns;
+
+ bank = reg_to_bank(rcg, reg);
+
+ if (banked_mn) {
+ regmap_read(rcg->clkr.regmap, rcg->md_reg[bank], &md);
+ f.m = md_to_m(&rcg->mn[bank], md);
+ f.n = ns_m_to_n(&rcg->mn[bank], ns, f.m);
+ } else {
+ f.pre_div = ns_to_pre_div(&rcg->p[bank], ns) + 1;
+ }
+ f.src = index;
+
+ configure_bank(rcg, &f);
+
+ return 0;
+}
+
+/*
+ * Calculate m/n:d rate
+ *
+ * parent_rate m
+ * rate = ----------- x ---
+ * pre_div n
+ */
+static unsigned long
+calc_rate(unsigned long rate, u32 m, u32 n, u32 mode, u32 pre_div)
+{
+ if (pre_div)
+ rate /= pre_div + 1;
+
+ if (mode) {
+ u64 tmp = rate;
+ tmp *= m;
+ do_div(tmp, n);
+ rate = tmp;
+ }
+
+ return rate;
+}
+
+static unsigned long
+clk_rcg_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
+{
+ struct clk_rcg *rcg = to_clk_rcg(hw);
+ u32 pre_div, m = 0, n = 0, ns, md, mode = 0;
+ struct mn *mn = &rcg->mn;
+
+ regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
+ pre_div = ns_to_pre_div(&rcg->p, ns);
+
+ if (rcg->mn.width) {
+ regmap_read(rcg->clkr.regmap, rcg->md_reg, &md);
+ m = md_to_m(mn, md);
+ n = ns_m_to_n(mn, ns, m);
+ /* MN counter mode is in hw.enable_reg sometimes */
+ if (rcg->clkr.enable_reg != rcg->ns_reg)
+ regmap_read(rcg->clkr.regmap, rcg->clkr.enable_reg, &mode);
+ else
+ mode = ns;
+ mode = reg_to_mnctr_mode(mn, mode);
+ }
+
+ return calc_rate(parent_rate, m, n, mode, pre_div);
+}
+
+static unsigned long
+clk_dyn_rcg_recalc_rate(struct clk_hw *hw, unsigned long parent_rate)
+{
+ struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
+ u32 m, n, pre_div, ns, md, mode, reg;
+ int bank;
+ struct mn *mn;
+ bool banked_mn = !!rcg->mn[1].width;
+
+ regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
+
+ if (banked_mn)
+ regmap_read(rcg->clkr.regmap, rcg->clkr.enable_reg, &reg);
+ else
+ reg = ns;
+
+ bank = reg_to_bank(rcg, reg);
+
+ if (banked_mn) {
+ mn = &rcg->mn[bank];
+ regmap_read(rcg->clkr.regmap, rcg->md_reg[bank], &md);
+ m = md_to_m(mn, md);
+ n = ns_m_to_n(mn, ns, m);
+ mode = reg_to_mnctr_mode(mn, reg);
+ return calc_rate(parent_rate, m, n, mode, 0);
+ } else {
+ pre_div = ns_to_pre_div(&rcg->p[bank], ns);
+ return calc_rate(parent_rate, 0, 0, 0, pre_div);
+ }
+}
+
+static const
+struct freq_tbl *find_freq(const struct freq_tbl *f, unsigned long rate)
+{
+ if (!f)
+ return NULL;
+
+ for (; f->freq; f++)
+ if (rate <= f->freq)
+ return f;
+
+ return NULL;
+}
+
+static long _freq_tbl_determine_rate(struct clk_hw *hw,
+ const struct freq_tbl *f, unsigned long rate,
+ unsigned long *p_rate, struct clk **p)
+{
+ unsigned long clk_flags;
+
+ f = find_freq(f, rate);
+ if (!f)
+ return -EINVAL;
+
+ clk_flags = __clk_get_flags(hw->clk);
+ *p = clk_get_parent_by_index(hw->clk, f->src);
+ if (clk_flags & CLK_SET_RATE_PARENT) {
+ rate = rate * f->pre_div;
+ if (f->n) {
+ u64 tmp = rate;
+ tmp = tmp * f->n;
+ do_div(tmp, f->m);
+ rate = tmp;
+ }
+ } else {
+ rate = __clk_get_rate(*p);
+ }
+ *p_rate = rate;
+
+ return f->freq;
+}
+
+static long clk_rcg_determine_rate(struct clk_hw *hw, unsigned long rate,
+ unsigned long *p_rate, struct clk **p)
+{
+ struct clk_rcg *rcg = to_clk_rcg(hw);
+
+ return _freq_tbl_determine_rate(hw, rcg->freq_tbl, rate, p_rate, p);
+}
+
+static long clk_dyn_rcg_determine_rate(struct clk_hw *hw, unsigned long rate,
+ unsigned long *p_rate, struct clk **p)
+{
+ struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
+
+ return _freq_tbl_determine_rate(hw, rcg->freq_tbl, rate, p_rate, p);
+}
+
+static int clk_rcg_set_rate(struct clk_hw *hw, unsigned long rate,
+ unsigned long parent_rate)
+{
+ struct clk_rcg *rcg = to_clk_rcg(hw);
+ const struct freq_tbl *f;
+ u32 ns, md, ctl;
+ struct mn *mn = &rcg->mn;
+ u32 mask = 0;
+ unsigned int reset_reg;
+
+ f = find_freq(rcg->freq_tbl, rate);
+ if (!f)
+ return -EINVAL;
+
+ if (rcg->mn.reset_in_cc)
+ reset_reg = rcg->clkr.enable_reg;
+ else
+ reset_reg = rcg->ns_reg;
+
+ if (rcg->mn.width) {
+ mask = BIT(mn->mnctr_reset_bit);
+ regmap_update_bits(rcg->clkr.regmap, reset_reg, mask, mask);
+
+ regmap_read(rcg->clkr.regmap, rcg->md_reg, &md);
+ md = mn_to_md(mn, f->m, f->n, md);
+ regmap_write(rcg->clkr.regmap, rcg->md_reg, md);
+
+ regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
+ /* MN counter mode is in hw.enable_reg sometimes */
+ if (rcg->clkr.enable_reg != rcg->ns_reg) {
+ regmap_read(rcg->clkr.regmap, rcg->clkr.enable_reg, &ctl);
+ ctl = mn_to_reg(mn, f->m, f->n, ctl);
+ regmap_write(rcg->clkr.regmap, rcg->clkr.enable_reg, ctl);
+ } else {
+ ns = mn_to_reg(mn, f->m, f->n, ns);
+ }
+ ns = mn_to_ns(mn, f->m, f->n, ns);
+ } else {
+ regmap_read(rcg->clkr.regmap, rcg->ns_reg, &ns);
+ }
+
+ ns = pre_div_to_ns(&rcg->p, f->pre_div - 1, ns);
+ regmap_write(rcg->clkr.regmap, rcg->ns_reg, ns);
+
+ regmap_update_bits(rcg->clkr.regmap, reset_reg, mask, 0);
+
+ return 0;
+}
+
+static int __clk_dyn_rcg_set_rate(struct clk_hw *hw, unsigned long rate)
+{
+ struct clk_dyn_rcg *rcg = to_clk_dyn_rcg(hw);
+ const struct freq_tbl *f;
+
+ f = find_freq(rcg->freq_tbl, rate);
+ if (!f)
+ return -EINVAL;
+
+ configure_bank(rcg, f);
+
+ return 0;
+}
+
+static int clk_dyn_rcg_set_rate(struct clk_hw *hw, unsigned long rate,
+ unsigned long parent_rate)
+{
+ return __clk_dyn_rcg_set_rate(hw, rate);
+}
+
+static int clk_dyn_rcg_set_rate_and_parent(struct clk_hw *hw,
+ unsigned long rate, unsigned long parent_rate, u8 index)
+{
+ return __clk_dyn_rcg_set_rate(hw, rate);
+}
+
+const struct clk_ops clk_rcg_ops = {
+ .enable = clk_enable_regmap,
+ .disable = clk_disable_regmap,
+ .get_parent = clk_rcg_get_parent,
+ .set_parent = clk_rcg_set_parent,
+ .recalc_rate = clk_rcg_recalc_rate,
+ .determine_rate = clk_rcg_determine_rate,
+ .set_rate = clk_rcg_set_rate,
+};
+EXPORT_SYMBOL_GPL(clk_rcg_ops);
+
+const struct clk_ops clk_dyn_rcg_ops = {
+ .enable = clk_enable_regmap,
+ .is_enabled = clk_is_enabled_regmap,
+ .disable = clk_disable_regmap,
+ .get_parent = clk_dyn_rcg_get_parent,
+ .set_parent = clk_dyn_rcg_set_parent,
+ .recalc_rate = clk_dyn_rcg_recalc_rate,
+ .determine_rate = clk_dyn_rcg_determine_rate,
+ .set_rate = clk_dyn_rcg_set_rate,
+ .set_rate_and_parent = clk_dyn_rcg_set_rate_and_parent,
+};
+EXPORT_SYMBOL_GPL(clk_dyn_rcg_ops);