| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 1 | /* | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 2 | * Clock and PLL control for DaVinci devices | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 3 | * | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 4 | * Copyright (C) 2006-2007 Texas Instruments. | 
|  | 5 | * Copyright (C) 2008-2009 Deep Root Systems, LLC | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 6 | * | 
|  | 7 | * This program is free software; you can redistribute it and/or modify | 
|  | 8 | * it under the terms of the GNU General Public License as published by | 
|  | 9 | * the Free Software Foundation; either version 2 of the License, or | 
|  | 10 | * (at your option) any later version. | 
|  | 11 | */ | 
|  | 12 |  | 
|  | 13 | #include <linux/module.h> | 
|  | 14 | #include <linux/kernel.h> | 
|  | 15 | #include <linux/init.h> | 
|  | 16 | #include <linux/errno.h> | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 17 | #include <linux/clk.h> | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 18 | #include <linux/err.h> | 
|  | 19 | #include <linux/mutex.h> | 
| Russell King | fced80c | 2008-09-06 12:10:45 +0100 | [diff] [blame] | 20 | #include <linux/io.h> | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 21 | #include <linux/delay.h> | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 22 |  | 
| Russell King | a09e64f | 2008-08-05 16:14:15 +0100 | [diff] [blame] | 23 | #include <mach/hardware.h> | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 24 |  | 
| Russell King | a09e64f | 2008-08-05 16:14:15 +0100 | [diff] [blame] | 25 | #include <mach/psc.h> | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 26 | #include <mach/cputype.h> | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 27 | #include "clock.h" | 
|  | 28 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 29 | static LIST_HEAD(clocks); | 
|  | 30 | static DEFINE_MUTEX(clocks_mutex); | 
|  | 31 | static DEFINE_SPINLOCK(clockfw_lock); | 
|  | 32 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 33 | static unsigned psc_domain(struct clk *clk) | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 34 | { | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 35 | return (clk->flags & PSC_DSP) | 
|  | 36 | ? DAVINCI_GPSC_DSPDOMAIN | 
|  | 37 | : DAVINCI_GPSC_ARMDOMAIN; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 38 | } | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 39 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 40 | static void __clk_enable(struct clk *clk) | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 41 | { | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 42 | if (clk->parent) | 
|  | 43 | __clk_enable(clk->parent); | 
|  | 44 | if (clk->usecount++ == 0 && (clk->flags & CLK_PSC)) | 
| Sergei Shtylyov | 789a785 | 2009-09-30 19:48:03 +0400 | [diff] [blame] | 45 | davinci_psc_config(psc_domain(clk), clk->gpsc, clk->lpsc, 1); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 46 | } | 
|  | 47 |  | 
|  | 48 | static void __clk_disable(struct clk *clk) | 
|  | 49 | { | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 50 | if (WARN_ON(clk->usecount == 0)) | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 51 | return; | 
| Chaithrika U S | 679f921 | 2009-12-15 18:02:58 +0530 | [diff] [blame] | 52 | if (--clk->usecount == 0 && !(clk->flags & CLK_PLL) && | 
|  | 53 | (clk->flags & CLK_PSC)) | 
| Sergei Shtylyov | 789a785 | 2009-09-30 19:48:03 +0400 | [diff] [blame] | 54 | davinci_psc_config(psc_domain(clk), clk->gpsc, clk->lpsc, 0); | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 55 | if (clk->parent) | 
|  | 56 | __clk_disable(clk->parent); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 57 | } | 
|  | 58 |  | 
|  | 59 | int clk_enable(struct clk *clk) | 
|  | 60 | { | 
|  | 61 | unsigned long flags; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 62 |  | 
|  | 63 | if (clk == NULL || IS_ERR(clk)) | 
|  | 64 | return -EINVAL; | 
|  | 65 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 66 | spin_lock_irqsave(&clockfw_lock, flags); | 
|  | 67 | __clk_enable(clk); | 
|  | 68 | spin_unlock_irqrestore(&clockfw_lock, flags); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 69 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 70 | return 0; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 71 | } | 
|  | 72 | EXPORT_SYMBOL(clk_enable); | 
|  | 73 |  | 
|  | 74 | void clk_disable(struct clk *clk) | 
|  | 75 | { | 
|  | 76 | unsigned long flags; | 
|  | 77 |  | 
|  | 78 | if (clk == NULL || IS_ERR(clk)) | 
|  | 79 | return; | 
|  | 80 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 81 | spin_lock_irqsave(&clockfw_lock, flags); | 
|  | 82 | __clk_disable(clk); | 
|  | 83 | spin_unlock_irqrestore(&clockfw_lock, flags); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 84 | } | 
|  | 85 | EXPORT_SYMBOL(clk_disable); | 
|  | 86 |  | 
|  | 87 | unsigned long clk_get_rate(struct clk *clk) | 
|  | 88 | { | 
|  | 89 | if (clk == NULL || IS_ERR(clk)) | 
|  | 90 | return -EINVAL; | 
|  | 91 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 92 | return clk->rate; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 93 | } | 
|  | 94 | EXPORT_SYMBOL(clk_get_rate); | 
|  | 95 |  | 
|  | 96 | long clk_round_rate(struct clk *clk, unsigned long rate) | 
|  | 97 | { | 
|  | 98 | if (clk == NULL || IS_ERR(clk)) | 
|  | 99 | return -EINVAL; | 
|  | 100 |  | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 101 | if (clk->round_rate) | 
|  | 102 | return clk->round_rate(clk, rate); | 
|  | 103 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 104 | return clk->rate; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 105 | } | 
|  | 106 | EXPORT_SYMBOL(clk_round_rate); | 
|  | 107 |  | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 108 | /* Propagate rate to children */ | 
|  | 109 | static void propagate_rate(struct clk *root) | 
|  | 110 | { | 
|  | 111 | struct clk *clk; | 
|  | 112 |  | 
|  | 113 | list_for_each_entry(clk, &root->children, childnode) { | 
|  | 114 | if (clk->recalc) | 
|  | 115 | clk->rate = clk->recalc(clk); | 
|  | 116 | propagate_rate(clk); | 
|  | 117 | } | 
|  | 118 | } | 
|  | 119 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 120 | int clk_set_rate(struct clk *clk, unsigned long rate) | 
|  | 121 | { | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 122 | unsigned long flags; | 
|  | 123 | int ret = -EINVAL; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 124 |  | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 125 | if (clk == NULL || IS_ERR(clk)) | 
|  | 126 | return ret; | 
|  | 127 |  | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 128 | if (clk->set_rate) | 
|  | 129 | ret = clk->set_rate(clk, rate); | 
| Sekhar Nori | 3b43cd6 | 2010-01-12 18:55:35 +0530 | [diff] [blame] | 130 |  | 
|  | 131 | spin_lock_irqsave(&clockfw_lock, flags); | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 132 | if (ret == 0) { | 
|  | 133 | if (clk->recalc) | 
|  | 134 | clk->rate = clk->recalc(clk); | 
|  | 135 | propagate_rate(clk); | 
|  | 136 | } | 
|  | 137 | spin_unlock_irqrestore(&clockfw_lock, flags); | 
|  | 138 |  | 
|  | 139 | return ret; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 140 | } | 
|  | 141 | EXPORT_SYMBOL(clk_set_rate); | 
|  | 142 |  | 
| Sekhar Nori | b82a51e | 2009-08-31 15:48:04 +0530 | [diff] [blame] | 143 | int clk_set_parent(struct clk *clk, struct clk *parent) | 
|  | 144 | { | 
|  | 145 | unsigned long flags; | 
|  | 146 |  | 
|  | 147 | if (clk == NULL || IS_ERR(clk)) | 
|  | 148 | return -EINVAL; | 
|  | 149 |  | 
|  | 150 | /* Cannot change parent on enabled clock */ | 
|  | 151 | if (WARN_ON(clk->usecount)) | 
|  | 152 | return -EINVAL; | 
|  | 153 |  | 
|  | 154 | mutex_lock(&clocks_mutex); | 
|  | 155 | clk->parent = parent; | 
|  | 156 | list_del_init(&clk->childnode); | 
|  | 157 | list_add(&clk->childnode, &clk->parent->children); | 
|  | 158 | mutex_unlock(&clocks_mutex); | 
|  | 159 |  | 
|  | 160 | spin_lock_irqsave(&clockfw_lock, flags); | 
|  | 161 | if (clk->recalc) | 
|  | 162 | clk->rate = clk->recalc(clk); | 
|  | 163 | propagate_rate(clk); | 
|  | 164 | spin_unlock_irqrestore(&clockfw_lock, flags); | 
|  | 165 |  | 
|  | 166 | return 0; | 
|  | 167 | } | 
|  | 168 | EXPORT_SYMBOL(clk_set_parent); | 
|  | 169 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 170 | int clk_register(struct clk *clk) | 
|  | 171 | { | 
|  | 172 | if (clk == NULL || IS_ERR(clk)) | 
|  | 173 | return -EINVAL; | 
|  | 174 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 175 | if (WARN(clk->parent && !clk->parent->rate, | 
|  | 176 | "CLK: %s parent %s has no rate!\n", | 
|  | 177 | clk->name, clk->parent->name)) | 
|  | 178 | return -EINVAL; | 
|  | 179 |  | 
| Sekhar Nori | f02bf3b | 2009-08-31 15:48:01 +0530 | [diff] [blame] | 180 | INIT_LIST_HEAD(&clk->children); | 
|  | 181 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 182 | mutex_lock(&clocks_mutex); | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 183 | list_add_tail(&clk->node, &clocks); | 
| Sekhar Nori | f02bf3b | 2009-08-31 15:48:01 +0530 | [diff] [blame] | 184 | if (clk->parent) | 
|  | 185 | list_add_tail(&clk->childnode, &clk->parent->children); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 186 | mutex_unlock(&clocks_mutex); | 
|  | 187 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 188 | /* If rate is already set, use it */ | 
|  | 189 | if (clk->rate) | 
|  | 190 | return 0; | 
|  | 191 |  | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 192 | /* Else, see if there is a way to calculate it */ | 
|  | 193 | if (clk->recalc) | 
|  | 194 | clk->rate = clk->recalc(clk); | 
|  | 195 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 196 | /* Otherwise, default to parent rate */ | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 197 | else if (clk->parent) | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 198 | clk->rate = clk->parent->rate; | 
|  | 199 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 200 | return 0; | 
|  | 201 | } | 
|  | 202 | EXPORT_SYMBOL(clk_register); | 
|  | 203 |  | 
|  | 204 | void clk_unregister(struct clk *clk) | 
|  | 205 | { | 
|  | 206 | if (clk == NULL || IS_ERR(clk)) | 
|  | 207 | return; | 
|  | 208 |  | 
|  | 209 | mutex_lock(&clocks_mutex); | 
|  | 210 | list_del(&clk->node); | 
| Sekhar Nori | f02bf3b | 2009-08-31 15:48:01 +0530 | [diff] [blame] | 211 | list_del(&clk->childnode); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 212 | mutex_unlock(&clocks_mutex); | 
|  | 213 | } | 
|  | 214 | EXPORT_SYMBOL(clk_unregister); | 
|  | 215 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 216 | #ifdef CONFIG_DAVINCI_RESET_CLOCKS | 
|  | 217 | /* | 
|  | 218 | * Disable any unused clocks left on by the bootloader | 
|  | 219 | */ | 
|  | 220 | static int __init clk_disable_unused(void) | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 221 | { | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 222 | struct clk *ck; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 223 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 224 | spin_lock_irq(&clockfw_lock); | 
|  | 225 | list_for_each_entry(ck, &clocks, node) { | 
|  | 226 | if (ck->usecount > 0) | 
|  | 227 | continue; | 
|  | 228 | if (!(ck->flags & CLK_PSC)) | 
|  | 229 | continue; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 230 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 231 | /* ignore if in Disabled or SwRstDisable states */ | 
| Sergei Shtylyov | 789a785 | 2009-09-30 19:48:03 +0400 | [diff] [blame] | 232 | if (!davinci_psc_is_clk_active(ck->gpsc, ck->lpsc)) | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 233 | continue; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 234 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 235 | pr_info("Clocks: disable unused %s\n", ck->name); | 
| Sergei Shtylyov | 789a785 | 2009-09-30 19:48:03 +0400 | [diff] [blame] | 236 | davinci_psc_config(psc_domain(ck), ck->gpsc, ck->lpsc, 0); | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 237 | } | 
|  | 238 | spin_unlock_irq(&clockfw_lock); | 
|  | 239 |  | 
|  | 240 | return 0; | 
|  | 241 | } | 
|  | 242 | late_initcall(clk_disable_unused); | 
|  | 243 | #endif | 
|  | 244 |  | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 245 | static unsigned long clk_sysclk_recalc(struct clk *clk) | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 246 | { | 
|  | 247 | u32 v, plldiv; | 
|  | 248 | struct pll_data *pll; | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 249 | unsigned long rate = clk->rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 250 |  | 
|  | 251 | /* If this is the PLL base clock, no more calculations needed */ | 
|  | 252 | if (clk->pll_data) | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 253 | return rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 254 |  | 
|  | 255 | if (WARN_ON(!clk->parent)) | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 256 | return rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 257 |  | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 258 | rate = clk->parent->rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 259 |  | 
|  | 260 | /* Otherwise, the parent must be a PLL */ | 
|  | 261 | if (WARN_ON(!clk->parent->pll_data)) | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 262 | return rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 263 |  | 
|  | 264 | pll = clk->parent->pll_data; | 
|  | 265 |  | 
|  | 266 | /* If pre-PLL, source clock is before the multiplier and divider(s) */ | 
|  | 267 | if (clk->flags & PRE_PLL) | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 268 | rate = pll->input_rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 269 |  | 
|  | 270 | if (!clk->div_reg) | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 271 | return rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 272 |  | 
|  | 273 | v = __raw_readl(pll->base + clk->div_reg); | 
|  | 274 | if (v & PLLDIV_EN) { | 
|  | 275 | plldiv = (v & PLLDIV_RATIO_MASK) + 1; | 
|  | 276 | if (plldiv) | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 277 | rate /= plldiv; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 278 | } | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 279 |  | 
|  | 280 | return rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 281 | } | 
|  | 282 |  | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 283 | static unsigned long clk_leafclk_recalc(struct clk *clk) | 
|  | 284 | { | 
|  | 285 | if (WARN_ON(!clk->parent)) | 
|  | 286 | return clk->rate; | 
|  | 287 |  | 
|  | 288 | return clk->parent->rate; | 
|  | 289 | } | 
|  | 290 |  | 
|  | 291 | static unsigned long clk_pllclk_recalc(struct clk *clk) | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 292 | { | 
|  | 293 | u32 ctrl, mult = 1, prediv = 1, postdiv = 1; | 
|  | 294 | u8 bypass; | 
|  | 295 | struct pll_data *pll = clk->pll_data; | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 296 | unsigned long rate = clk->rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 297 |  | 
|  | 298 | pll->base = IO_ADDRESS(pll->phys_base); | 
|  | 299 | ctrl = __raw_readl(pll->base + PLLCTL); | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 300 | rate = pll->input_rate = clk->parent->rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 301 |  | 
|  | 302 | if (ctrl & PLLCTL_PLLEN) { | 
|  | 303 | bypass = 0; | 
|  | 304 | mult = __raw_readl(pll->base + PLLM); | 
| Sandeep Paulraj | fb8fcb8 | 2009-06-11 09:41:05 -0400 | [diff] [blame] | 305 | if (cpu_is_davinci_dm365()) | 
|  | 306 | mult = 2 * (mult & PLLM_PLLM_MASK); | 
|  | 307 | else | 
|  | 308 | mult = (mult & PLLM_PLLM_MASK) + 1; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 309 | } else | 
|  | 310 | bypass = 1; | 
|  | 311 |  | 
|  | 312 | if (pll->flags & PLL_HAS_PREDIV) { | 
|  | 313 | prediv = __raw_readl(pll->base + PREDIV); | 
|  | 314 | if (prediv & PLLDIV_EN) | 
|  | 315 | prediv = (prediv & PLLDIV_RATIO_MASK) + 1; | 
|  | 316 | else | 
|  | 317 | prediv = 1; | 
|  | 318 | } | 
|  | 319 |  | 
|  | 320 | /* pre-divider is fixed, but (some?) chips won't report that */ | 
|  | 321 | if (cpu_is_davinci_dm355() && pll->num == 1) | 
|  | 322 | prediv = 8; | 
|  | 323 |  | 
|  | 324 | if (pll->flags & PLL_HAS_POSTDIV) { | 
|  | 325 | postdiv = __raw_readl(pll->base + POSTDIV); | 
|  | 326 | if (postdiv & PLLDIV_EN) | 
|  | 327 | postdiv = (postdiv & PLLDIV_RATIO_MASK) + 1; | 
|  | 328 | else | 
|  | 329 | postdiv = 1; | 
|  | 330 | } | 
|  | 331 |  | 
|  | 332 | if (!bypass) { | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 333 | rate /= prediv; | 
|  | 334 | rate *= mult; | 
|  | 335 | rate /= postdiv; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 336 | } | 
|  | 337 |  | 
|  | 338 | pr_debug("PLL%d: input = %lu MHz [ ", | 
|  | 339 | pll->num, clk->parent->rate / 1000000); | 
|  | 340 | if (bypass) | 
|  | 341 | pr_debug("bypass "); | 
|  | 342 | if (prediv > 1) | 
|  | 343 | pr_debug("/ %d ", prediv); | 
|  | 344 | if (mult > 1) | 
|  | 345 | pr_debug("* %d ", mult); | 
|  | 346 | if (postdiv > 1) | 
|  | 347 | pr_debug("/ %d ", postdiv); | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 348 | pr_debug("] --> %lu MHz output.\n", rate / 1000000); | 
|  | 349 |  | 
|  | 350 | return rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 351 | } | 
|  | 352 |  | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 353 | /** | 
|  | 354 | * davinci_set_pllrate - set the output rate of a given PLL. | 
|  | 355 | * | 
|  | 356 | * Note: Currently tested to work with OMAP-L138 only. | 
|  | 357 | * | 
|  | 358 | * @pll: pll whose rate needs to be changed. | 
|  | 359 | * @prediv: The pre divider value. Passing 0 disables the pre-divider. | 
|  | 360 | * @pllm: The multiplier value. Passing 0 leads to multiply-by-one. | 
|  | 361 | * @postdiv: The post divider value. Passing 0 disables the post-divider. | 
|  | 362 | */ | 
|  | 363 | int davinci_set_pllrate(struct pll_data *pll, unsigned int prediv, | 
|  | 364 | unsigned int mult, unsigned int postdiv) | 
|  | 365 | { | 
|  | 366 | u32 ctrl; | 
|  | 367 | unsigned int locktime; | 
| Sekhar Nori | 3b43cd6 | 2010-01-12 18:55:35 +0530 | [diff] [blame] | 368 | unsigned long flags; | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 369 |  | 
|  | 370 | if (pll->base == NULL) | 
|  | 371 | return -EINVAL; | 
|  | 372 |  | 
|  | 373 | /* | 
|  | 374 | *  PLL lock time required per OMAP-L138 datasheet is | 
|  | 375 | * (2000 * prediv)/sqrt(pllm) OSCIN cycles. We approximate sqrt(pllm) | 
|  | 376 | * as 4 and OSCIN cycle as 25 MHz. | 
|  | 377 | */ | 
|  | 378 | if (prediv) { | 
|  | 379 | locktime = ((2000 * prediv) / 100); | 
|  | 380 | prediv = (prediv - 1) | PLLDIV_EN; | 
|  | 381 | } else { | 
| Sekhar Nori | 9a219a9 | 2009-11-16 17:21:33 +0530 | [diff] [blame] | 382 | locktime = PLL_LOCK_TIME; | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 383 | } | 
|  | 384 | if (postdiv) | 
|  | 385 | postdiv = (postdiv - 1) | PLLDIV_EN; | 
|  | 386 | if (mult) | 
|  | 387 | mult = mult - 1; | 
|  | 388 |  | 
| Sekhar Nori | 3b43cd6 | 2010-01-12 18:55:35 +0530 | [diff] [blame] | 389 | /* Protect against simultaneous calls to PLL setting seqeunce */ | 
|  | 390 | spin_lock_irqsave(&clockfw_lock, flags); | 
|  | 391 |  | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 392 | ctrl = __raw_readl(pll->base + PLLCTL); | 
|  | 393 |  | 
|  | 394 | /* Switch the PLL to bypass mode */ | 
|  | 395 | ctrl &= ~(PLLCTL_PLLENSRC | PLLCTL_PLLEN); | 
|  | 396 | __raw_writel(ctrl, pll->base + PLLCTL); | 
|  | 397 |  | 
| Sekhar Nori | 9a219a9 | 2009-11-16 17:21:33 +0530 | [diff] [blame] | 398 | udelay(PLL_BYPASS_TIME); | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 399 |  | 
|  | 400 | /* Reset and enable PLL */ | 
|  | 401 | ctrl &= ~(PLLCTL_PLLRST | PLLCTL_PLLDIS); | 
|  | 402 | __raw_writel(ctrl, pll->base + PLLCTL); | 
|  | 403 |  | 
|  | 404 | if (pll->flags & PLL_HAS_PREDIV) | 
|  | 405 | __raw_writel(prediv, pll->base + PREDIV); | 
|  | 406 |  | 
|  | 407 | __raw_writel(mult, pll->base + PLLM); | 
|  | 408 |  | 
|  | 409 | if (pll->flags & PLL_HAS_POSTDIV) | 
|  | 410 | __raw_writel(postdiv, pll->base + POSTDIV); | 
|  | 411 |  | 
| Sekhar Nori | 9a219a9 | 2009-11-16 17:21:33 +0530 | [diff] [blame] | 412 | udelay(PLL_RESET_TIME); | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 413 |  | 
|  | 414 | /* Bring PLL out of reset */ | 
|  | 415 | ctrl |= PLLCTL_PLLRST; | 
|  | 416 | __raw_writel(ctrl, pll->base + PLLCTL); | 
|  | 417 |  | 
|  | 418 | udelay(locktime); | 
|  | 419 |  | 
|  | 420 | /* Remove PLL from bypass mode */ | 
|  | 421 | ctrl |= PLLCTL_PLLEN; | 
|  | 422 | __raw_writel(ctrl, pll->base + PLLCTL); | 
|  | 423 |  | 
| Sekhar Nori | 3b43cd6 | 2010-01-12 18:55:35 +0530 | [diff] [blame] | 424 | spin_unlock_irqrestore(&clockfw_lock, flags); | 
|  | 425 |  | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 426 | return 0; | 
|  | 427 | } | 
|  | 428 | EXPORT_SYMBOL(davinci_set_pllrate); | 
|  | 429 |  | 
| Kevin Hilman | 08aca08 | 2010-01-11 08:22:23 -0800 | [diff] [blame] | 430 | int __init davinci_clk_init(struct clk_lookup *clocks) | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 431 | { | 
| Kevin Hilman | 08aca08 | 2010-01-11 08:22:23 -0800 | [diff] [blame] | 432 | struct clk_lookup *c; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 433 | struct clk *clk; | 
| Kevin Hilman | 08aca08 | 2010-01-11 08:22:23 -0800 | [diff] [blame] | 434 | size_t num_clocks = 0; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 435 |  | 
| Kevin Hilman | 08aca08 | 2010-01-11 08:22:23 -0800 | [diff] [blame] | 436 | for (c = clocks; c->clk; c++) { | 
|  | 437 | clk = c->clk; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 438 |  | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 439 | if (!clk->recalc) { | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 440 |  | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 441 | /* Check if clock is a PLL */ | 
|  | 442 | if (clk->pll_data) | 
|  | 443 | clk->recalc = clk_pllclk_recalc; | 
|  | 444 |  | 
|  | 445 | /* Else, if it is a PLL-derived clock */ | 
|  | 446 | else if (clk->flags & CLK_PLL) | 
|  | 447 | clk->recalc = clk_sysclk_recalc; | 
|  | 448 |  | 
|  | 449 | /* Otherwise, it is a leaf clock (PSC clock) */ | 
|  | 450 | else if (clk->parent) | 
|  | 451 | clk->recalc = clk_leafclk_recalc; | 
|  | 452 | } | 
|  | 453 |  | 
|  | 454 | if (clk->recalc) | 
|  | 455 | clk->rate = clk->recalc(clk); | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 456 |  | 
|  | 457 | if (clk->lpsc) | 
|  | 458 | clk->flags |= CLK_PSC; | 
|  | 459 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 460 | clk_register(clk); | 
| Kevin Hilman | 08aca08 | 2010-01-11 08:22:23 -0800 | [diff] [blame] | 461 | num_clocks++; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 462 |  | 
|  | 463 | /* Turn on clocks that Linux doesn't otherwise manage */ | 
|  | 464 | if (clk->flags & ALWAYS_ENABLED) | 
|  | 465 | clk_enable(clk); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 466 | } | 
|  | 467 |  | 
| Kevin Hilman | 08aca08 | 2010-01-11 08:22:23 -0800 | [diff] [blame] | 468 | clkdev_add_table(clocks, num_clocks); | 
|  | 469 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 470 | return 0; | 
|  | 471 | } | 
|  | 472 |  | 
| Sekhar Nori | 2f72e8d | 2009-12-03 15:36:52 +0530 | [diff] [blame] | 473 | #ifdef CONFIG_DEBUG_FS | 
|  | 474 |  | 
|  | 475 | #include <linux/debugfs.h> | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 476 | #include <linux/seq_file.h> | 
|  | 477 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 478 | #define CLKNAME_MAX	10		/* longest clock name */ | 
|  | 479 | #define NEST_DELTA	2 | 
|  | 480 | #define NEST_MAX	4 | 
|  | 481 |  | 
|  | 482 | static void | 
|  | 483 | dump_clock(struct seq_file *s, unsigned nest, struct clk *parent) | 
|  | 484 | { | 
|  | 485 | char		*state; | 
|  | 486 | char		buf[CLKNAME_MAX + NEST_DELTA * NEST_MAX]; | 
|  | 487 | struct clk	*clk; | 
|  | 488 | unsigned	i; | 
|  | 489 |  | 
|  | 490 | if (parent->flags & CLK_PLL) | 
|  | 491 | state = "pll"; | 
|  | 492 | else if (parent->flags & CLK_PSC) | 
|  | 493 | state = "psc"; | 
|  | 494 | else | 
|  | 495 | state = ""; | 
|  | 496 |  | 
|  | 497 | /* <nest spaces> name <pad to end> */ | 
|  | 498 | memset(buf, ' ', sizeof(buf) - 1); | 
|  | 499 | buf[sizeof(buf) - 1] = 0; | 
|  | 500 | i = strlen(parent->name); | 
|  | 501 | memcpy(buf + nest, parent->name, | 
|  | 502 | min(i, (unsigned)(sizeof(buf) - 1 - nest))); | 
|  | 503 |  | 
|  | 504 | seq_printf(s, "%s users=%2d %-3s %9ld Hz\n", | 
|  | 505 | buf, parent->usecount, state, clk_get_rate(parent)); | 
|  | 506 | /* REVISIT show device associations too */ | 
|  | 507 |  | 
|  | 508 | /* cost is now small, but not linear... */ | 
| Sekhar Nori | f02bf3b | 2009-08-31 15:48:01 +0530 | [diff] [blame] | 509 | list_for_each_entry(clk, &parent->children, childnode) { | 
|  | 510 | dump_clock(s, nest + NEST_DELTA, clk); | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 511 | } | 
|  | 512 | } | 
|  | 513 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 514 | static int davinci_ck_show(struct seq_file *m, void *v) | 
|  | 515 | { | 
| Sekhar Nori | f979aa6 | 2009-12-03 15:36:51 +0530 | [diff] [blame] | 516 | struct clk *clk; | 
|  | 517 |  | 
|  | 518 | /* | 
|  | 519 | * Show clock tree; We trust nonzero usecounts equate to PSC enables... | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 520 | */ | 
|  | 521 | mutex_lock(&clocks_mutex); | 
| Sekhar Nori | f979aa6 | 2009-12-03 15:36:51 +0530 | [diff] [blame] | 522 | list_for_each_entry(clk, &clocks, node) | 
|  | 523 | if (!clk->parent) | 
|  | 524 | dump_clock(m, 0, clk); | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 525 | mutex_unlock(&clocks_mutex); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 526 |  | 
|  | 527 | return 0; | 
|  | 528 | } | 
|  | 529 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 530 | static int davinci_ck_open(struct inode *inode, struct file *file) | 
|  | 531 | { | 
| Sekhar Nori | 2f72e8d | 2009-12-03 15:36:52 +0530 | [diff] [blame] | 532 | return single_open(file, davinci_ck_show, NULL); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 533 | } | 
|  | 534 |  | 
| Sekhar Nori | 2f72e8d | 2009-12-03 15:36:52 +0530 | [diff] [blame] | 535 | static const struct file_operations davinci_ck_operations = { | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 536 | .open		= davinci_ck_open, | 
|  | 537 | .read		= seq_read, | 
|  | 538 | .llseek		= seq_lseek, | 
| Sekhar Nori | 2f72e8d | 2009-12-03 15:36:52 +0530 | [diff] [blame] | 539 | .release	= single_release, | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 540 | }; | 
|  | 541 |  | 
| Sekhar Nori | 2f72e8d | 2009-12-03 15:36:52 +0530 | [diff] [blame] | 542 | static int __init davinci_clk_debugfs_init(void) | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 543 | { | 
| Sekhar Nori | 2f72e8d | 2009-12-03 15:36:52 +0530 | [diff] [blame] | 544 | debugfs_create_file("davinci_clocks", S_IFREG | S_IRUGO, NULL, NULL, | 
|  | 545 | &davinci_ck_operations); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 546 | return 0; | 
|  | 547 |  | 
|  | 548 | } | 
| Sekhar Nori | 2f72e8d | 2009-12-03 15:36:52 +0530 | [diff] [blame] | 549 | device_initcall(davinci_clk_debugfs_init); | 
|  | 550 | #endif /* CONFIG_DEBUG_FS */ |