| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 1 | /* | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 2 | * Clock and PLL control for DaVinci devices | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 3 | * | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 4 | * Copyright (C) 2006-2007 Texas Instruments. | 
|  | 5 | * Copyright (C) 2008-2009 Deep Root Systems, LLC | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 6 | * | 
|  | 7 | * This program is free software; you can redistribute it and/or modify | 
|  | 8 | * it under the terms of the GNU General Public License as published by | 
|  | 9 | * the Free Software Foundation; either version 2 of the License, or | 
|  | 10 | * (at your option) any later version. | 
|  | 11 | */ | 
|  | 12 |  | 
|  | 13 | #include <linux/module.h> | 
|  | 14 | #include <linux/kernel.h> | 
|  | 15 | #include <linux/init.h> | 
|  | 16 | #include <linux/errno.h> | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 17 | #include <linux/clk.h> | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 18 | #include <linux/err.h> | 
|  | 19 | #include <linux/mutex.h> | 
| Russell King | fced80c | 2008-09-06 12:10:45 +0100 | [diff] [blame] | 20 | #include <linux/io.h> | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 21 | #include <linux/delay.h> | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 22 |  | 
| Russell King | a09e64f | 2008-08-05 16:14:15 +0100 | [diff] [blame] | 23 | #include <mach/hardware.h> | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 24 |  | 
| Russell King | a09e64f | 2008-08-05 16:14:15 +0100 | [diff] [blame] | 25 | #include <mach/psc.h> | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 26 | #include <mach/cputype.h> | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 27 | #include "clock.h" | 
|  | 28 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 29 | static LIST_HEAD(clocks); | 
|  | 30 | static DEFINE_MUTEX(clocks_mutex); | 
|  | 31 | static DEFINE_SPINLOCK(clockfw_lock); | 
|  | 32 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 33 | static unsigned psc_domain(struct clk *clk) | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 34 | { | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 35 | return (clk->flags & PSC_DSP) | 
|  | 36 | ? DAVINCI_GPSC_DSPDOMAIN | 
|  | 37 | : DAVINCI_GPSC_ARMDOMAIN; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 38 | } | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 39 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 40 | static void __clk_enable(struct clk *clk) | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 41 | { | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 42 | if (clk->parent) | 
|  | 43 | __clk_enable(clk->parent); | 
|  | 44 | if (clk->usecount++ == 0 && (clk->flags & CLK_PSC)) | 
| Sergei Shtylyov | 789a785 | 2009-09-30 19:48:03 +0400 | [diff] [blame] | 45 | davinci_psc_config(psc_domain(clk), clk->gpsc, clk->lpsc, 1); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 46 | } | 
|  | 47 |  | 
|  | 48 | static void __clk_disable(struct clk *clk) | 
|  | 49 | { | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 50 | if (WARN_ON(clk->usecount == 0)) | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 51 | return; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 52 | if (--clk->usecount == 0 && !(clk->flags & CLK_PLL)) | 
| Sergei Shtylyov | 789a785 | 2009-09-30 19:48:03 +0400 | [diff] [blame] | 53 | davinci_psc_config(psc_domain(clk), clk->gpsc, clk->lpsc, 0); | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 54 | if (clk->parent) | 
|  | 55 | __clk_disable(clk->parent); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 56 | } | 
|  | 57 |  | 
|  | 58 | int clk_enable(struct clk *clk) | 
|  | 59 | { | 
|  | 60 | unsigned long flags; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 61 |  | 
|  | 62 | if (clk == NULL || IS_ERR(clk)) | 
|  | 63 | return -EINVAL; | 
|  | 64 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 65 | spin_lock_irqsave(&clockfw_lock, flags); | 
|  | 66 | __clk_enable(clk); | 
|  | 67 | spin_unlock_irqrestore(&clockfw_lock, flags); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 68 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 69 | return 0; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 70 | } | 
|  | 71 | EXPORT_SYMBOL(clk_enable); | 
|  | 72 |  | 
|  | 73 | void clk_disable(struct clk *clk) | 
|  | 74 | { | 
|  | 75 | unsigned long flags; | 
|  | 76 |  | 
|  | 77 | if (clk == NULL || IS_ERR(clk)) | 
|  | 78 | return; | 
|  | 79 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 80 | spin_lock_irqsave(&clockfw_lock, flags); | 
|  | 81 | __clk_disable(clk); | 
|  | 82 | spin_unlock_irqrestore(&clockfw_lock, flags); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 83 | } | 
|  | 84 | EXPORT_SYMBOL(clk_disable); | 
|  | 85 |  | 
|  | 86 | unsigned long clk_get_rate(struct clk *clk) | 
|  | 87 | { | 
|  | 88 | if (clk == NULL || IS_ERR(clk)) | 
|  | 89 | return -EINVAL; | 
|  | 90 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 91 | return clk->rate; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 92 | } | 
|  | 93 | EXPORT_SYMBOL(clk_get_rate); | 
|  | 94 |  | 
|  | 95 | long clk_round_rate(struct clk *clk, unsigned long rate) | 
|  | 96 | { | 
|  | 97 | if (clk == NULL || IS_ERR(clk)) | 
|  | 98 | return -EINVAL; | 
|  | 99 |  | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 100 | if (clk->round_rate) | 
|  | 101 | return clk->round_rate(clk, rate); | 
|  | 102 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 103 | return clk->rate; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 104 | } | 
|  | 105 | EXPORT_SYMBOL(clk_round_rate); | 
|  | 106 |  | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 107 | /* Propagate rate to children */ | 
|  | 108 | static void propagate_rate(struct clk *root) | 
|  | 109 | { | 
|  | 110 | struct clk *clk; | 
|  | 111 |  | 
|  | 112 | list_for_each_entry(clk, &root->children, childnode) { | 
|  | 113 | if (clk->recalc) | 
|  | 114 | clk->rate = clk->recalc(clk); | 
|  | 115 | propagate_rate(clk); | 
|  | 116 | } | 
|  | 117 | } | 
|  | 118 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 119 | int clk_set_rate(struct clk *clk, unsigned long rate) | 
|  | 120 | { | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 121 | unsigned long flags; | 
|  | 122 | int ret = -EINVAL; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 123 |  | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 124 | if (clk == NULL || IS_ERR(clk)) | 
|  | 125 | return ret; | 
|  | 126 |  | 
|  | 127 | spin_lock_irqsave(&clockfw_lock, flags); | 
|  | 128 | if (clk->set_rate) | 
|  | 129 | ret = clk->set_rate(clk, rate); | 
|  | 130 | if (ret == 0) { | 
|  | 131 | if (clk->recalc) | 
|  | 132 | clk->rate = clk->recalc(clk); | 
|  | 133 | propagate_rate(clk); | 
|  | 134 | } | 
|  | 135 | spin_unlock_irqrestore(&clockfw_lock, flags); | 
|  | 136 |  | 
|  | 137 | return ret; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 138 | } | 
|  | 139 | EXPORT_SYMBOL(clk_set_rate); | 
|  | 140 |  | 
| Sekhar Nori | b82a51e | 2009-08-31 15:48:04 +0530 | [diff] [blame] | 141 | int clk_set_parent(struct clk *clk, struct clk *parent) | 
|  | 142 | { | 
|  | 143 | unsigned long flags; | 
|  | 144 |  | 
|  | 145 | if (clk == NULL || IS_ERR(clk)) | 
|  | 146 | return -EINVAL; | 
|  | 147 |  | 
|  | 148 | /* Cannot change parent on enabled clock */ | 
|  | 149 | if (WARN_ON(clk->usecount)) | 
|  | 150 | return -EINVAL; | 
|  | 151 |  | 
|  | 152 | mutex_lock(&clocks_mutex); | 
|  | 153 | clk->parent = parent; | 
|  | 154 | list_del_init(&clk->childnode); | 
|  | 155 | list_add(&clk->childnode, &clk->parent->children); | 
|  | 156 | mutex_unlock(&clocks_mutex); | 
|  | 157 |  | 
|  | 158 | spin_lock_irqsave(&clockfw_lock, flags); | 
|  | 159 | if (clk->recalc) | 
|  | 160 | clk->rate = clk->recalc(clk); | 
|  | 161 | propagate_rate(clk); | 
|  | 162 | spin_unlock_irqrestore(&clockfw_lock, flags); | 
|  | 163 |  | 
|  | 164 | return 0; | 
|  | 165 | } | 
|  | 166 | EXPORT_SYMBOL(clk_set_parent); | 
|  | 167 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 168 | int clk_register(struct clk *clk) | 
|  | 169 | { | 
|  | 170 | if (clk == NULL || IS_ERR(clk)) | 
|  | 171 | return -EINVAL; | 
|  | 172 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 173 | if (WARN(clk->parent && !clk->parent->rate, | 
|  | 174 | "CLK: %s parent %s has no rate!\n", | 
|  | 175 | clk->name, clk->parent->name)) | 
|  | 176 | return -EINVAL; | 
|  | 177 |  | 
| Sekhar Nori | f02bf3b | 2009-08-31 15:48:01 +0530 | [diff] [blame] | 178 | INIT_LIST_HEAD(&clk->children); | 
|  | 179 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 180 | mutex_lock(&clocks_mutex); | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 181 | list_add_tail(&clk->node, &clocks); | 
| Sekhar Nori | f02bf3b | 2009-08-31 15:48:01 +0530 | [diff] [blame] | 182 | if (clk->parent) | 
|  | 183 | list_add_tail(&clk->childnode, &clk->parent->children); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 184 | mutex_unlock(&clocks_mutex); | 
|  | 185 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 186 | /* If rate is already set, use it */ | 
|  | 187 | if (clk->rate) | 
|  | 188 | return 0; | 
|  | 189 |  | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 190 | /* Else, see if there is a way to calculate it */ | 
|  | 191 | if (clk->recalc) | 
|  | 192 | clk->rate = clk->recalc(clk); | 
|  | 193 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 194 | /* Otherwise, default to parent rate */ | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 195 | else if (clk->parent) | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 196 | clk->rate = clk->parent->rate; | 
|  | 197 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 198 | return 0; | 
|  | 199 | } | 
|  | 200 | EXPORT_SYMBOL(clk_register); | 
|  | 201 |  | 
|  | 202 | void clk_unregister(struct clk *clk) | 
|  | 203 | { | 
|  | 204 | if (clk == NULL || IS_ERR(clk)) | 
|  | 205 | return; | 
|  | 206 |  | 
|  | 207 | mutex_lock(&clocks_mutex); | 
|  | 208 | list_del(&clk->node); | 
| Sekhar Nori | f02bf3b | 2009-08-31 15:48:01 +0530 | [diff] [blame] | 209 | list_del(&clk->childnode); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 210 | mutex_unlock(&clocks_mutex); | 
|  | 211 | } | 
|  | 212 | EXPORT_SYMBOL(clk_unregister); | 
|  | 213 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 214 | #ifdef CONFIG_DAVINCI_RESET_CLOCKS | 
|  | 215 | /* | 
|  | 216 | * Disable any unused clocks left on by the bootloader | 
|  | 217 | */ | 
|  | 218 | static int __init clk_disable_unused(void) | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 219 | { | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 220 | struct clk *ck; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 221 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 222 | spin_lock_irq(&clockfw_lock); | 
|  | 223 | list_for_each_entry(ck, &clocks, node) { | 
|  | 224 | if (ck->usecount > 0) | 
|  | 225 | continue; | 
|  | 226 | if (!(ck->flags & CLK_PSC)) | 
|  | 227 | continue; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 228 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 229 | /* ignore if in Disabled or SwRstDisable states */ | 
| Sergei Shtylyov | 789a785 | 2009-09-30 19:48:03 +0400 | [diff] [blame] | 230 | if (!davinci_psc_is_clk_active(ck->gpsc, ck->lpsc)) | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 231 | continue; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 232 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 233 | pr_info("Clocks: disable unused %s\n", ck->name); | 
| Sergei Shtylyov | 789a785 | 2009-09-30 19:48:03 +0400 | [diff] [blame] | 234 | davinci_psc_config(psc_domain(ck), ck->gpsc, ck->lpsc, 0); | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 235 | } | 
|  | 236 | spin_unlock_irq(&clockfw_lock); | 
|  | 237 |  | 
|  | 238 | return 0; | 
|  | 239 | } | 
|  | 240 | late_initcall(clk_disable_unused); | 
|  | 241 | #endif | 
|  | 242 |  | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 243 | static unsigned long clk_sysclk_recalc(struct clk *clk) | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 244 | { | 
|  | 245 | u32 v, plldiv; | 
|  | 246 | struct pll_data *pll; | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 247 | unsigned long rate = clk->rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 248 |  | 
|  | 249 | /* If this is the PLL base clock, no more calculations needed */ | 
|  | 250 | if (clk->pll_data) | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 251 | return rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 252 |  | 
|  | 253 | if (WARN_ON(!clk->parent)) | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 254 | return rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 255 |  | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 256 | rate = clk->parent->rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 257 |  | 
|  | 258 | /* Otherwise, the parent must be a PLL */ | 
|  | 259 | if (WARN_ON(!clk->parent->pll_data)) | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 260 | return rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 261 |  | 
|  | 262 | pll = clk->parent->pll_data; | 
|  | 263 |  | 
|  | 264 | /* If pre-PLL, source clock is before the multiplier and divider(s) */ | 
|  | 265 | if (clk->flags & PRE_PLL) | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 266 | rate = pll->input_rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 267 |  | 
|  | 268 | if (!clk->div_reg) | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 269 | return rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 270 |  | 
|  | 271 | v = __raw_readl(pll->base + clk->div_reg); | 
|  | 272 | if (v & PLLDIV_EN) { | 
|  | 273 | plldiv = (v & PLLDIV_RATIO_MASK) + 1; | 
|  | 274 | if (plldiv) | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 275 | rate /= plldiv; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 276 | } | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 277 |  | 
|  | 278 | return rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 279 | } | 
|  | 280 |  | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 281 | static unsigned long clk_leafclk_recalc(struct clk *clk) | 
|  | 282 | { | 
|  | 283 | if (WARN_ON(!clk->parent)) | 
|  | 284 | return clk->rate; | 
|  | 285 |  | 
|  | 286 | return clk->parent->rate; | 
|  | 287 | } | 
|  | 288 |  | 
|  | 289 | static unsigned long clk_pllclk_recalc(struct clk *clk) | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 290 | { | 
|  | 291 | u32 ctrl, mult = 1, prediv = 1, postdiv = 1; | 
|  | 292 | u8 bypass; | 
|  | 293 | struct pll_data *pll = clk->pll_data; | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 294 | unsigned long rate = clk->rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 295 |  | 
|  | 296 | pll->base = IO_ADDRESS(pll->phys_base); | 
|  | 297 | ctrl = __raw_readl(pll->base + PLLCTL); | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 298 | rate = pll->input_rate = clk->parent->rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 299 |  | 
|  | 300 | if (ctrl & PLLCTL_PLLEN) { | 
|  | 301 | bypass = 0; | 
|  | 302 | mult = __raw_readl(pll->base + PLLM); | 
| Sandeep Paulraj | fb8fcb8 | 2009-06-11 09:41:05 -0400 | [diff] [blame] | 303 | if (cpu_is_davinci_dm365()) | 
|  | 304 | mult = 2 * (mult & PLLM_PLLM_MASK); | 
|  | 305 | else | 
|  | 306 | mult = (mult & PLLM_PLLM_MASK) + 1; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 307 | } else | 
|  | 308 | bypass = 1; | 
|  | 309 |  | 
|  | 310 | if (pll->flags & PLL_HAS_PREDIV) { | 
|  | 311 | prediv = __raw_readl(pll->base + PREDIV); | 
|  | 312 | if (prediv & PLLDIV_EN) | 
|  | 313 | prediv = (prediv & PLLDIV_RATIO_MASK) + 1; | 
|  | 314 | else | 
|  | 315 | prediv = 1; | 
|  | 316 | } | 
|  | 317 |  | 
|  | 318 | /* pre-divider is fixed, but (some?) chips won't report that */ | 
|  | 319 | if (cpu_is_davinci_dm355() && pll->num == 1) | 
|  | 320 | prediv = 8; | 
|  | 321 |  | 
|  | 322 | if (pll->flags & PLL_HAS_POSTDIV) { | 
|  | 323 | postdiv = __raw_readl(pll->base + POSTDIV); | 
|  | 324 | if (postdiv & PLLDIV_EN) | 
|  | 325 | postdiv = (postdiv & PLLDIV_RATIO_MASK) + 1; | 
|  | 326 | else | 
|  | 327 | postdiv = 1; | 
|  | 328 | } | 
|  | 329 |  | 
|  | 330 | if (!bypass) { | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 331 | rate /= prediv; | 
|  | 332 | rate *= mult; | 
|  | 333 | rate /= postdiv; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 334 | } | 
|  | 335 |  | 
|  | 336 | pr_debug("PLL%d: input = %lu MHz [ ", | 
|  | 337 | pll->num, clk->parent->rate / 1000000); | 
|  | 338 | if (bypass) | 
|  | 339 | pr_debug("bypass "); | 
|  | 340 | if (prediv > 1) | 
|  | 341 | pr_debug("/ %d ", prediv); | 
|  | 342 | if (mult > 1) | 
|  | 343 | pr_debug("* %d ", mult); | 
|  | 344 | if (postdiv > 1) | 
|  | 345 | pr_debug("/ %d ", postdiv); | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 346 | pr_debug("] --> %lu MHz output.\n", rate / 1000000); | 
|  | 347 |  | 
|  | 348 | return rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 349 | } | 
|  | 350 |  | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 351 | /** | 
|  | 352 | * davinci_set_pllrate - set the output rate of a given PLL. | 
|  | 353 | * | 
|  | 354 | * Note: Currently tested to work with OMAP-L138 only. | 
|  | 355 | * | 
|  | 356 | * @pll: pll whose rate needs to be changed. | 
|  | 357 | * @prediv: The pre divider value. Passing 0 disables the pre-divider. | 
|  | 358 | * @pllm: The multiplier value. Passing 0 leads to multiply-by-one. | 
|  | 359 | * @postdiv: The post divider value. Passing 0 disables the post-divider. | 
|  | 360 | */ | 
|  | 361 | int davinci_set_pllrate(struct pll_data *pll, unsigned int prediv, | 
|  | 362 | unsigned int mult, unsigned int postdiv) | 
|  | 363 | { | 
|  | 364 | u32 ctrl; | 
|  | 365 | unsigned int locktime; | 
|  | 366 |  | 
|  | 367 | if (pll->base == NULL) | 
|  | 368 | return -EINVAL; | 
|  | 369 |  | 
|  | 370 | /* | 
|  | 371 | *  PLL lock time required per OMAP-L138 datasheet is | 
|  | 372 | * (2000 * prediv)/sqrt(pllm) OSCIN cycles. We approximate sqrt(pllm) | 
|  | 373 | * as 4 and OSCIN cycle as 25 MHz. | 
|  | 374 | */ | 
|  | 375 | if (prediv) { | 
|  | 376 | locktime = ((2000 * prediv) / 100); | 
|  | 377 | prediv = (prediv - 1) | PLLDIV_EN; | 
|  | 378 | } else { | 
|  | 379 | locktime = 20; | 
|  | 380 | } | 
|  | 381 | if (postdiv) | 
|  | 382 | postdiv = (postdiv - 1) | PLLDIV_EN; | 
|  | 383 | if (mult) | 
|  | 384 | mult = mult - 1; | 
|  | 385 |  | 
|  | 386 | ctrl = __raw_readl(pll->base + PLLCTL); | 
|  | 387 |  | 
|  | 388 | /* Switch the PLL to bypass mode */ | 
|  | 389 | ctrl &= ~(PLLCTL_PLLENSRC | PLLCTL_PLLEN); | 
|  | 390 | __raw_writel(ctrl, pll->base + PLLCTL); | 
|  | 391 |  | 
|  | 392 | /* | 
|  | 393 | * Wait for 4 OSCIN/CLKIN cycles to ensure that the PLLC has switched | 
|  | 394 | * to bypass mode. Delay of 1us ensures we are good for all > 4MHz | 
|  | 395 | * OSCIN/CLKIN inputs. Typically the input is ~25MHz. | 
|  | 396 | */ | 
|  | 397 | udelay(1); | 
|  | 398 |  | 
|  | 399 | /* Reset and enable PLL */ | 
|  | 400 | ctrl &= ~(PLLCTL_PLLRST | PLLCTL_PLLDIS); | 
|  | 401 | __raw_writel(ctrl, pll->base + PLLCTL); | 
|  | 402 |  | 
|  | 403 | if (pll->flags & PLL_HAS_PREDIV) | 
|  | 404 | __raw_writel(prediv, pll->base + PREDIV); | 
|  | 405 |  | 
|  | 406 | __raw_writel(mult, pll->base + PLLM); | 
|  | 407 |  | 
|  | 408 | if (pll->flags & PLL_HAS_POSTDIV) | 
|  | 409 | __raw_writel(postdiv, pll->base + POSTDIV); | 
|  | 410 |  | 
|  | 411 | /* | 
|  | 412 | * Wait for PLL to reset properly, OMAP-L138 datasheet says | 
|  | 413 | * 'min' time = 125ns | 
|  | 414 | */ | 
|  | 415 | udelay(1); | 
|  | 416 |  | 
|  | 417 | /* Bring PLL out of reset */ | 
|  | 418 | ctrl |= PLLCTL_PLLRST; | 
|  | 419 | __raw_writel(ctrl, pll->base + PLLCTL); | 
|  | 420 |  | 
|  | 421 | udelay(locktime); | 
|  | 422 |  | 
|  | 423 | /* Remove PLL from bypass mode */ | 
|  | 424 | ctrl |= PLLCTL_PLLEN; | 
|  | 425 | __raw_writel(ctrl, pll->base + PLLCTL); | 
|  | 426 |  | 
|  | 427 | return 0; | 
|  | 428 | } | 
|  | 429 | EXPORT_SYMBOL(davinci_set_pllrate); | 
|  | 430 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 431 | int __init davinci_clk_init(struct davinci_clk *clocks) | 
|  | 432 | { | 
|  | 433 | struct davinci_clk *c; | 
|  | 434 | struct clk *clk; | 
|  | 435 |  | 
|  | 436 | for (c = clocks; c->lk.clk; c++) { | 
|  | 437 | clk = c->lk.clk; | 
|  | 438 |  | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 439 | if (!clk->recalc) { | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 440 |  | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 441 | /* Check if clock is a PLL */ | 
|  | 442 | if (clk->pll_data) | 
|  | 443 | clk->recalc = clk_pllclk_recalc; | 
|  | 444 |  | 
|  | 445 | /* Else, if it is a PLL-derived clock */ | 
|  | 446 | else if (clk->flags & CLK_PLL) | 
|  | 447 | clk->recalc = clk_sysclk_recalc; | 
|  | 448 |  | 
|  | 449 | /* Otherwise, it is a leaf clock (PSC clock) */ | 
|  | 450 | else if (clk->parent) | 
|  | 451 | clk->recalc = clk_leafclk_recalc; | 
|  | 452 | } | 
|  | 453 |  | 
|  | 454 | if (clk->recalc) | 
|  | 455 | clk->rate = clk->recalc(clk); | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 456 |  | 
|  | 457 | if (clk->lpsc) | 
|  | 458 | clk->flags |= CLK_PSC; | 
|  | 459 |  | 
|  | 460 | clkdev_add(&c->lk); | 
|  | 461 | clk_register(clk); | 
|  | 462 |  | 
|  | 463 | /* Turn on clocks that Linux doesn't otherwise manage */ | 
|  | 464 | if (clk->flags & ALWAYS_ENABLED) | 
|  | 465 | clk_enable(clk); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 466 | } | 
|  | 467 |  | 
|  | 468 | return 0; | 
|  | 469 | } | 
|  | 470 |  | 
|  | 471 | #ifdef CONFIG_PROC_FS | 
|  | 472 | #include <linux/proc_fs.h> | 
|  | 473 | #include <linux/seq_file.h> | 
|  | 474 |  | 
|  | 475 | static void *davinci_ck_start(struct seq_file *m, loff_t *pos) | 
|  | 476 | { | 
|  | 477 | return *pos < 1 ? (void *)1 : NULL; | 
|  | 478 | } | 
|  | 479 |  | 
|  | 480 | static void *davinci_ck_next(struct seq_file *m, void *v, loff_t *pos) | 
|  | 481 | { | 
|  | 482 | ++*pos; | 
|  | 483 | return NULL; | 
|  | 484 | } | 
|  | 485 |  | 
|  | 486 | static void davinci_ck_stop(struct seq_file *m, void *v) | 
|  | 487 | { | 
|  | 488 | } | 
|  | 489 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 490 | #define CLKNAME_MAX	10		/* longest clock name */ | 
|  | 491 | #define NEST_DELTA	2 | 
|  | 492 | #define NEST_MAX	4 | 
|  | 493 |  | 
|  | 494 | static void | 
|  | 495 | dump_clock(struct seq_file *s, unsigned nest, struct clk *parent) | 
|  | 496 | { | 
|  | 497 | char		*state; | 
|  | 498 | char		buf[CLKNAME_MAX + NEST_DELTA * NEST_MAX]; | 
|  | 499 | struct clk	*clk; | 
|  | 500 | unsigned	i; | 
|  | 501 |  | 
|  | 502 | if (parent->flags & CLK_PLL) | 
|  | 503 | state = "pll"; | 
|  | 504 | else if (parent->flags & CLK_PSC) | 
|  | 505 | state = "psc"; | 
|  | 506 | else | 
|  | 507 | state = ""; | 
|  | 508 |  | 
|  | 509 | /* <nest spaces> name <pad to end> */ | 
|  | 510 | memset(buf, ' ', sizeof(buf) - 1); | 
|  | 511 | buf[sizeof(buf) - 1] = 0; | 
|  | 512 | i = strlen(parent->name); | 
|  | 513 | memcpy(buf + nest, parent->name, | 
|  | 514 | min(i, (unsigned)(sizeof(buf) - 1 - nest))); | 
|  | 515 |  | 
|  | 516 | seq_printf(s, "%s users=%2d %-3s %9ld Hz\n", | 
|  | 517 | buf, parent->usecount, state, clk_get_rate(parent)); | 
|  | 518 | /* REVISIT show device associations too */ | 
|  | 519 |  | 
|  | 520 | /* cost is now small, but not linear... */ | 
| Sekhar Nori | f02bf3b | 2009-08-31 15:48:01 +0530 | [diff] [blame] | 521 | list_for_each_entry(clk, &parent->children, childnode) { | 
|  | 522 | dump_clock(s, nest + NEST_DELTA, clk); | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 523 | } | 
|  | 524 | } | 
|  | 525 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 526 | static int davinci_ck_show(struct seq_file *m, void *v) | 
|  | 527 | { | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 528 | /* Show clock tree; we know the main oscillator is first. | 
|  | 529 | * We trust nonzero usecounts equate to PSC enables... | 
|  | 530 | */ | 
|  | 531 | mutex_lock(&clocks_mutex); | 
|  | 532 | if (!list_empty(&clocks)) | 
|  | 533 | dump_clock(m, 0, list_first_entry(&clocks, struct clk, node)); | 
|  | 534 | mutex_unlock(&clocks_mutex); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 535 |  | 
|  | 536 | return 0; | 
|  | 537 | } | 
|  | 538 |  | 
| Jan Engelhardt | 2ffd6e1 | 2008-01-22 20:41:07 +0100 | [diff] [blame] | 539 | static const struct seq_operations davinci_ck_op = { | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 540 | .start	= davinci_ck_start, | 
|  | 541 | .next	= davinci_ck_next, | 
|  | 542 | .stop	= davinci_ck_stop, | 
|  | 543 | .show	= davinci_ck_show | 
|  | 544 | }; | 
|  | 545 |  | 
|  | 546 | static int davinci_ck_open(struct inode *inode, struct file *file) | 
|  | 547 | { | 
|  | 548 | return seq_open(file, &davinci_ck_op); | 
|  | 549 | } | 
|  | 550 |  | 
| Jan Engelhardt | 2ffd6e1 | 2008-01-22 20:41:07 +0100 | [diff] [blame] | 551 | static const struct file_operations proc_davinci_ck_operations = { | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 552 | .open		= davinci_ck_open, | 
|  | 553 | .read		= seq_read, | 
|  | 554 | .llseek		= seq_lseek, | 
|  | 555 | .release	= seq_release, | 
|  | 556 | }; | 
|  | 557 |  | 
|  | 558 | static int __init davinci_ck_proc_init(void) | 
|  | 559 | { | 
| Denis V. Lunev | 40ad35d | 2008-04-29 01:02:21 -0700 | [diff] [blame] | 560 | proc_create("davinci_clocks", 0, NULL, &proc_davinci_ck_operations); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 561 | return 0; | 
|  | 562 |  | 
|  | 563 | } | 
|  | 564 | __initcall(davinci_ck_proc_init); | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 565 | #endif /* CONFIG_DEBUG_PROC_FS */ |