| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 1 | /* | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 2 | * Clock and PLL control for DaVinci devices | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 3 | * | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 4 | * Copyright (C) 2006-2007 Texas Instruments. | 
|  | 5 | * Copyright (C) 2008-2009 Deep Root Systems, LLC | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 6 | * | 
|  | 7 | * This program is free software; you can redistribute it and/or modify | 
|  | 8 | * it under the terms of the GNU General Public License as published by | 
|  | 9 | * the Free Software Foundation; either version 2 of the License, or | 
|  | 10 | * (at your option) any later version. | 
|  | 11 | */ | 
|  | 12 |  | 
|  | 13 | #include <linux/module.h> | 
|  | 14 | #include <linux/kernel.h> | 
|  | 15 | #include <linux/init.h> | 
|  | 16 | #include <linux/errno.h> | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 17 | #include <linux/clk.h> | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 18 | #include <linux/err.h> | 
|  | 19 | #include <linux/mutex.h> | 
| Russell King | fced80c | 2008-09-06 12:10:45 +0100 | [diff] [blame] | 20 | #include <linux/io.h> | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 21 | #include <linux/delay.h> | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 22 |  | 
| Russell King | a09e64f | 2008-08-05 16:14:15 +0100 | [diff] [blame] | 23 | #include <mach/hardware.h> | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 24 |  | 
| Kevin Hilman | 28552c2 | 2010-02-25 15:36:38 -0800 | [diff] [blame] | 25 | #include <mach/clock.h> | 
| Russell King | a09e64f | 2008-08-05 16:14:15 +0100 | [diff] [blame] | 26 | #include <mach/psc.h> | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 27 | #include <mach/cputype.h> | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 28 | #include "clock.h" | 
|  | 29 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 30 | static LIST_HEAD(clocks); | 
|  | 31 | static DEFINE_MUTEX(clocks_mutex); | 
|  | 32 | static DEFINE_SPINLOCK(clockfw_lock); | 
|  | 33 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 34 | static unsigned psc_domain(struct clk *clk) | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 35 | { | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 36 | return (clk->flags & PSC_DSP) | 
|  | 37 | ? DAVINCI_GPSC_DSPDOMAIN | 
|  | 38 | : DAVINCI_GPSC_ARMDOMAIN; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 39 | } | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 40 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 41 | static void __clk_enable(struct clk *clk) | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 42 | { | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 43 | if (clk->parent) | 
|  | 44 | __clk_enable(clk->parent); | 
|  | 45 | if (clk->usecount++ == 0 && (clk->flags & CLK_PSC)) | 
| Cyril Chemparathy | 52958be | 2010-03-25 17:43:47 -0400 | [diff] [blame] | 46 | davinci_psc_config(psc_domain(clk), clk->gpsc, clk->lpsc, | 
|  | 47 | PSC_STATE_ENABLE); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 48 | } | 
|  | 49 |  | 
|  | 50 | static void __clk_disable(struct clk *clk) | 
|  | 51 | { | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 52 | if (WARN_ON(clk->usecount == 0)) | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 53 | return; | 
| Chaithrika U S | 679f921 | 2009-12-15 18:02:58 +0530 | [diff] [blame] | 54 | if (--clk->usecount == 0 && !(clk->flags & CLK_PLL) && | 
|  | 55 | (clk->flags & CLK_PSC)) | 
| Cyril Chemparathy | 52958be | 2010-03-25 17:43:47 -0400 | [diff] [blame] | 56 | davinci_psc_config(psc_domain(clk), clk->gpsc, clk->lpsc, | 
|  | 57 | (clk->flags & PSC_SWRSTDISABLE) ? | 
|  | 58 | PSC_STATE_SWRSTDISABLE : PSC_STATE_DISABLE); | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 59 | if (clk->parent) | 
|  | 60 | __clk_disable(clk->parent); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 61 | } | 
|  | 62 |  | 
|  | 63 | int clk_enable(struct clk *clk) | 
|  | 64 | { | 
|  | 65 | unsigned long flags; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 66 |  | 
|  | 67 | if (clk == NULL || IS_ERR(clk)) | 
|  | 68 | return -EINVAL; | 
|  | 69 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 70 | spin_lock_irqsave(&clockfw_lock, flags); | 
|  | 71 | __clk_enable(clk); | 
|  | 72 | spin_unlock_irqrestore(&clockfw_lock, flags); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 73 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 74 | return 0; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 75 | } | 
|  | 76 | EXPORT_SYMBOL(clk_enable); | 
|  | 77 |  | 
|  | 78 | void clk_disable(struct clk *clk) | 
|  | 79 | { | 
|  | 80 | unsigned long flags; | 
|  | 81 |  | 
|  | 82 | if (clk == NULL || IS_ERR(clk)) | 
|  | 83 | return; | 
|  | 84 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 85 | spin_lock_irqsave(&clockfw_lock, flags); | 
|  | 86 | __clk_disable(clk); | 
|  | 87 | spin_unlock_irqrestore(&clockfw_lock, flags); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 88 | } | 
|  | 89 | EXPORT_SYMBOL(clk_disable); | 
|  | 90 |  | 
|  | 91 | unsigned long clk_get_rate(struct clk *clk) | 
|  | 92 | { | 
|  | 93 | if (clk == NULL || IS_ERR(clk)) | 
|  | 94 | return -EINVAL; | 
|  | 95 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 96 | return clk->rate; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 97 | } | 
|  | 98 | EXPORT_SYMBOL(clk_get_rate); | 
|  | 99 |  | 
|  | 100 | long clk_round_rate(struct clk *clk, unsigned long rate) | 
|  | 101 | { | 
|  | 102 | if (clk == NULL || IS_ERR(clk)) | 
|  | 103 | return -EINVAL; | 
|  | 104 |  | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 105 | if (clk->round_rate) | 
|  | 106 | return clk->round_rate(clk, rate); | 
|  | 107 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 108 | return clk->rate; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 109 | } | 
|  | 110 | EXPORT_SYMBOL(clk_round_rate); | 
|  | 111 |  | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 112 | /* Propagate rate to children */ | 
|  | 113 | static void propagate_rate(struct clk *root) | 
|  | 114 | { | 
|  | 115 | struct clk *clk; | 
|  | 116 |  | 
|  | 117 | list_for_each_entry(clk, &root->children, childnode) { | 
|  | 118 | if (clk->recalc) | 
|  | 119 | clk->rate = clk->recalc(clk); | 
|  | 120 | propagate_rate(clk); | 
|  | 121 | } | 
|  | 122 | } | 
|  | 123 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 124 | int clk_set_rate(struct clk *clk, unsigned long rate) | 
|  | 125 | { | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 126 | unsigned long flags; | 
|  | 127 | int ret = -EINVAL; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 128 |  | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 129 | if (clk == NULL || IS_ERR(clk)) | 
|  | 130 | return ret; | 
|  | 131 |  | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 132 | if (clk->set_rate) | 
|  | 133 | ret = clk->set_rate(clk, rate); | 
| Sekhar Nori | 3b43cd6 | 2010-01-12 18:55:35 +0530 | [diff] [blame] | 134 |  | 
|  | 135 | spin_lock_irqsave(&clockfw_lock, flags); | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 136 | if (ret == 0) { | 
|  | 137 | if (clk->recalc) | 
|  | 138 | clk->rate = clk->recalc(clk); | 
|  | 139 | propagate_rate(clk); | 
|  | 140 | } | 
|  | 141 | spin_unlock_irqrestore(&clockfw_lock, flags); | 
|  | 142 |  | 
|  | 143 | return ret; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 144 | } | 
|  | 145 | EXPORT_SYMBOL(clk_set_rate); | 
|  | 146 |  | 
| Sekhar Nori | b82a51e | 2009-08-31 15:48:04 +0530 | [diff] [blame] | 147 | int clk_set_parent(struct clk *clk, struct clk *parent) | 
|  | 148 | { | 
|  | 149 | unsigned long flags; | 
|  | 150 |  | 
|  | 151 | if (clk == NULL || IS_ERR(clk)) | 
|  | 152 | return -EINVAL; | 
|  | 153 |  | 
|  | 154 | /* Cannot change parent on enabled clock */ | 
|  | 155 | if (WARN_ON(clk->usecount)) | 
|  | 156 | return -EINVAL; | 
|  | 157 |  | 
|  | 158 | mutex_lock(&clocks_mutex); | 
|  | 159 | clk->parent = parent; | 
|  | 160 | list_del_init(&clk->childnode); | 
|  | 161 | list_add(&clk->childnode, &clk->parent->children); | 
|  | 162 | mutex_unlock(&clocks_mutex); | 
|  | 163 |  | 
|  | 164 | spin_lock_irqsave(&clockfw_lock, flags); | 
|  | 165 | if (clk->recalc) | 
|  | 166 | clk->rate = clk->recalc(clk); | 
|  | 167 | propagate_rate(clk); | 
|  | 168 | spin_unlock_irqrestore(&clockfw_lock, flags); | 
|  | 169 |  | 
|  | 170 | return 0; | 
|  | 171 | } | 
|  | 172 | EXPORT_SYMBOL(clk_set_parent); | 
|  | 173 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 174 | int clk_register(struct clk *clk) | 
|  | 175 | { | 
|  | 176 | if (clk == NULL || IS_ERR(clk)) | 
|  | 177 | return -EINVAL; | 
|  | 178 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 179 | if (WARN(clk->parent && !clk->parent->rate, | 
|  | 180 | "CLK: %s parent %s has no rate!\n", | 
|  | 181 | clk->name, clk->parent->name)) | 
|  | 182 | return -EINVAL; | 
|  | 183 |  | 
| Sekhar Nori | f02bf3b | 2009-08-31 15:48:01 +0530 | [diff] [blame] | 184 | INIT_LIST_HEAD(&clk->children); | 
|  | 185 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 186 | mutex_lock(&clocks_mutex); | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 187 | list_add_tail(&clk->node, &clocks); | 
| Sekhar Nori | f02bf3b | 2009-08-31 15:48:01 +0530 | [diff] [blame] | 188 | if (clk->parent) | 
|  | 189 | list_add_tail(&clk->childnode, &clk->parent->children); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 190 | mutex_unlock(&clocks_mutex); | 
|  | 191 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 192 | /* If rate is already set, use it */ | 
|  | 193 | if (clk->rate) | 
|  | 194 | return 0; | 
|  | 195 |  | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 196 | /* Else, see if there is a way to calculate it */ | 
|  | 197 | if (clk->recalc) | 
|  | 198 | clk->rate = clk->recalc(clk); | 
|  | 199 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 200 | /* Otherwise, default to parent rate */ | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 201 | else if (clk->parent) | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 202 | clk->rate = clk->parent->rate; | 
|  | 203 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 204 | return 0; | 
|  | 205 | } | 
|  | 206 | EXPORT_SYMBOL(clk_register); | 
|  | 207 |  | 
|  | 208 | void clk_unregister(struct clk *clk) | 
|  | 209 | { | 
|  | 210 | if (clk == NULL || IS_ERR(clk)) | 
|  | 211 | return; | 
|  | 212 |  | 
|  | 213 | mutex_lock(&clocks_mutex); | 
|  | 214 | list_del(&clk->node); | 
| Sekhar Nori | f02bf3b | 2009-08-31 15:48:01 +0530 | [diff] [blame] | 215 | list_del(&clk->childnode); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 216 | mutex_unlock(&clocks_mutex); | 
|  | 217 | } | 
|  | 218 | EXPORT_SYMBOL(clk_unregister); | 
|  | 219 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 220 | #ifdef CONFIG_DAVINCI_RESET_CLOCKS | 
|  | 221 | /* | 
|  | 222 | * Disable any unused clocks left on by the bootloader | 
|  | 223 | */ | 
|  | 224 | static int __init clk_disable_unused(void) | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 225 | { | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 226 | struct clk *ck; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 227 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 228 | spin_lock_irq(&clockfw_lock); | 
|  | 229 | list_for_each_entry(ck, &clocks, node) { | 
|  | 230 | if (ck->usecount > 0) | 
|  | 231 | continue; | 
|  | 232 | if (!(ck->flags & CLK_PSC)) | 
|  | 233 | continue; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 234 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 235 | /* ignore if in Disabled or SwRstDisable states */ | 
| Sergei Shtylyov | 789a785 | 2009-09-30 19:48:03 +0400 | [diff] [blame] | 236 | if (!davinci_psc_is_clk_active(ck->gpsc, ck->lpsc)) | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 237 | continue; | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 238 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 239 | pr_info("Clocks: disable unused %s\n", ck->name); | 
| Cyril Chemparathy | 52958be | 2010-03-25 17:43:47 -0400 | [diff] [blame] | 240 |  | 
|  | 241 | davinci_psc_config(psc_domain(ck), ck->gpsc, ck->lpsc, | 
|  | 242 | (ck->flags & PSC_SWRSTDISABLE) ? | 
|  | 243 | PSC_STATE_SWRSTDISABLE : PSC_STATE_DISABLE); | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 244 | } | 
|  | 245 | spin_unlock_irq(&clockfw_lock); | 
|  | 246 |  | 
|  | 247 | return 0; | 
|  | 248 | } | 
|  | 249 | late_initcall(clk_disable_unused); | 
|  | 250 | #endif | 
|  | 251 |  | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 252 | static unsigned long clk_sysclk_recalc(struct clk *clk) | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 253 | { | 
|  | 254 | u32 v, plldiv; | 
|  | 255 | struct pll_data *pll; | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 256 | unsigned long rate = clk->rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 257 |  | 
|  | 258 | /* If this is the PLL base clock, no more calculations needed */ | 
|  | 259 | if (clk->pll_data) | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 260 | return rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 261 |  | 
|  | 262 | if (WARN_ON(!clk->parent)) | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 263 | return rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 264 |  | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 265 | rate = clk->parent->rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 266 |  | 
|  | 267 | /* Otherwise, the parent must be a PLL */ | 
|  | 268 | if (WARN_ON(!clk->parent->pll_data)) | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 269 | return rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 270 |  | 
|  | 271 | pll = clk->parent->pll_data; | 
|  | 272 |  | 
|  | 273 | /* If pre-PLL, source clock is before the multiplier and divider(s) */ | 
|  | 274 | if (clk->flags & PRE_PLL) | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 275 | rate = pll->input_rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 276 |  | 
|  | 277 | if (!clk->div_reg) | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 278 | return rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 279 |  | 
|  | 280 | v = __raw_readl(pll->base + clk->div_reg); | 
|  | 281 | if (v & PLLDIV_EN) { | 
| Cyril Chemparathy | d6961e6 | 2010-04-14 14:44:49 -0400 | [diff] [blame] | 282 | plldiv = (v & pll->div_ratio_mask) + 1; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 283 | if (plldiv) | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 284 | rate /= plldiv; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 285 | } | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 286 |  | 
|  | 287 | return rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 288 | } | 
|  | 289 |  | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 290 | static unsigned long clk_leafclk_recalc(struct clk *clk) | 
|  | 291 | { | 
|  | 292 | if (WARN_ON(!clk->parent)) | 
|  | 293 | return clk->rate; | 
|  | 294 |  | 
|  | 295 | return clk->parent->rate; | 
|  | 296 | } | 
|  | 297 |  | 
|  | 298 | static unsigned long clk_pllclk_recalc(struct clk *clk) | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 299 | { | 
|  | 300 | u32 ctrl, mult = 1, prediv = 1, postdiv = 1; | 
|  | 301 | u8 bypass; | 
|  | 302 | struct pll_data *pll = clk->pll_data; | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 303 | unsigned long rate = clk->rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 304 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 305 | ctrl = __raw_readl(pll->base + PLLCTL); | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 306 | rate = pll->input_rate = clk->parent->rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 307 |  | 
|  | 308 | if (ctrl & PLLCTL_PLLEN) { | 
|  | 309 | bypass = 0; | 
|  | 310 | mult = __raw_readl(pll->base + PLLM); | 
| Sandeep Paulraj | fb8fcb8 | 2009-06-11 09:41:05 -0400 | [diff] [blame] | 311 | if (cpu_is_davinci_dm365()) | 
|  | 312 | mult = 2 * (mult & PLLM_PLLM_MASK); | 
|  | 313 | else | 
|  | 314 | mult = (mult & PLLM_PLLM_MASK) + 1; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 315 | } else | 
|  | 316 | bypass = 1; | 
|  | 317 |  | 
|  | 318 | if (pll->flags & PLL_HAS_PREDIV) { | 
|  | 319 | prediv = __raw_readl(pll->base + PREDIV); | 
|  | 320 | if (prediv & PLLDIV_EN) | 
| Cyril Chemparathy | d6961e6 | 2010-04-14 14:44:49 -0400 | [diff] [blame] | 321 | prediv = (prediv & pll->div_ratio_mask) + 1; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 322 | else | 
|  | 323 | prediv = 1; | 
|  | 324 | } | 
|  | 325 |  | 
|  | 326 | /* pre-divider is fixed, but (some?) chips won't report that */ | 
|  | 327 | if (cpu_is_davinci_dm355() && pll->num == 1) | 
|  | 328 | prediv = 8; | 
|  | 329 |  | 
|  | 330 | if (pll->flags & PLL_HAS_POSTDIV) { | 
|  | 331 | postdiv = __raw_readl(pll->base + POSTDIV); | 
|  | 332 | if (postdiv & PLLDIV_EN) | 
| Cyril Chemparathy | d6961e6 | 2010-04-14 14:44:49 -0400 | [diff] [blame] | 333 | postdiv = (postdiv & pll->div_ratio_mask) + 1; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 334 | else | 
|  | 335 | postdiv = 1; | 
|  | 336 | } | 
|  | 337 |  | 
|  | 338 | if (!bypass) { | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 339 | rate /= prediv; | 
|  | 340 | rate *= mult; | 
|  | 341 | rate /= postdiv; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 342 | } | 
|  | 343 |  | 
|  | 344 | pr_debug("PLL%d: input = %lu MHz [ ", | 
|  | 345 | pll->num, clk->parent->rate / 1000000); | 
|  | 346 | if (bypass) | 
|  | 347 | pr_debug("bypass "); | 
|  | 348 | if (prediv > 1) | 
|  | 349 | pr_debug("/ %d ", prediv); | 
|  | 350 | if (mult > 1) | 
|  | 351 | pr_debug("* %d ", mult); | 
|  | 352 | if (postdiv > 1) | 
|  | 353 | pr_debug("/ %d ", postdiv); | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 354 | pr_debug("] --> %lu MHz output.\n", rate / 1000000); | 
|  | 355 |  | 
|  | 356 | return rate; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 357 | } | 
|  | 358 |  | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 359 | /** | 
|  | 360 | * davinci_set_pllrate - set the output rate of a given PLL. | 
|  | 361 | * | 
|  | 362 | * Note: Currently tested to work with OMAP-L138 only. | 
|  | 363 | * | 
|  | 364 | * @pll: pll whose rate needs to be changed. | 
|  | 365 | * @prediv: The pre divider value. Passing 0 disables the pre-divider. | 
|  | 366 | * @pllm: The multiplier value. Passing 0 leads to multiply-by-one. | 
|  | 367 | * @postdiv: The post divider value. Passing 0 disables the post-divider. | 
|  | 368 | */ | 
|  | 369 | int davinci_set_pllrate(struct pll_data *pll, unsigned int prediv, | 
|  | 370 | unsigned int mult, unsigned int postdiv) | 
|  | 371 | { | 
|  | 372 | u32 ctrl; | 
|  | 373 | unsigned int locktime; | 
| Sekhar Nori | 3b43cd6 | 2010-01-12 18:55:35 +0530 | [diff] [blame] | 374 | unsigned long flags; | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 375 |  | 
|  | 376 | if (pll->base == NULL) | 
|  | 377 | return -EINVAL; | 
|  | 378 |  | 
|  | 379 | /* | 
|  | 380 | *  PLL lock time required per OMAP-L138 datasheet is | 
|  | 381 | * (2000 * prediv)/sqrt(pllm) OSCIN cycles. We approximate sqrt(pllm) | 
|  | 382 | * as 4 and OSCIN cycle as 25 MHz. | 
|  | 383 | */ | 
|  | 384 | if (prediv) { | 
|  | 385 | locktime = ((2000 * prediv) / 100); | 
|  | 386 | prediv = (prediv - 1) | PLLDIV_EN; | 
|  | 387 | } else { | 
| Sekhar Nori | 9a219a9 | 2009-11-16 17:21:33 +0530 | [diff] [blame] | 388 | locktime = PLL_LOCK_TIME; | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 389 | } | 
|  | 390 | if (postdiv) | 
|  | 391 | postdiv = (postdiv - 1) | PLLDIV_EN; | 
|  | 392 | if (mult) | 
|  | 393 | mult = mult - 1; | 
|  | 394 |  | 
| Sekhar Nori | 3b43cd6 | 2010-01-12 18:55:35 +0530 | [diff] [blame] | 395 | /* Protect against simultaneous calls to PLL setting seqeunce */ | 
|  | 396 | spin_lock_irqsave(&clockfw_lock, flags); | 
|  | 397 |  | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 398 | ctrl = __raw_readl(pll->base + PLLCTL); | 
|  | 399 |  | 
|  | 400 | /* Switch the PLL to bypass mode */ | 
|  | 401 | ctrl &= ~(PLLCTL_PLLENSRC | PLLCTL_PLLEN); | 
|  | 402 | __raw_writel(ctrl, pll->base + PLLCTL); | 
|  | 403 |  | 
| Sekhar Nori | 9a219a9 | 2009-11-16 17:21:33 +0530 | [diff] [blame] | 404 | udelay(PLL_BYPASS_TIME); | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 405 |  | 
|  | 406 | /* Reset and enable PLL */ | 
|  | 407 | ctrl &= ~(PLLCTL_PLLRST | PLLCTL_PLLDIS); | 
|  | 408 | __raw_writel(ctrl, pll->base + PLLCTL); | 
|  | 409 |  | 
|  | 410 | if (pll->flags & PLL_HAS_PREDIV) | 
|  | 411 | __raw_writel(prediv, pll->base + PREDIV); | 
|  | 412 |  | 
|  | 413 | __raw_writel(mult, pll->base + PLLM); | 
|  | 414 |  | 
|  | 415 | if (pll->flags & PLL_HAS_POSTDIV) | 
|  | 416 | __raw_writel(postdiv, pll->base + POSTDIV); | 
|  | 417 |  | 
| Sekhar Nori | 9a219a9 | 2009-11-16 17:21:33 +0530 | [diff] [blame] | 418 | udelay(PLL_RESET_TIME); | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 419 |  | 
|  | 420 | /* Bring PLL out of reset */ | 
|  | 421 | ctrl |= PLLCTL_PLLRST; | 
|  | 422 | __raw_writel(ctrl, pll->base + PLLCTL); | 
|  | 423 |  | 
|  | 424 | udelay(locktime); | 
|  | 425 |  | 
|  | 426 | /* Remove PLL from bypass mode */ | 
|  | 427 | ctrl |= PLLCTL_PLLEN; | 
|  | 428 | __raw_writel(ctrl, pll->base + PLLCTL); | 
|  | 429 |  | 
| Sekhar Nori | 3b43cd6 | 2010-01-12 18:55:35 +0530 | [diff] [blame] | 430 | spin_unlock_irqrestore(&clockfw_lock, flags); | 
|  | 431 |  | 
| Sekhar Nori | d6a6156 | 2009-08-31 15:48:03 +0530 | [diff] [blame] | 432 | return 0; | 
|  | 433 | } | 
|  | 434 | EXPORT_SYMBOL(davinci_set_pllrate); | 
|  | 435 |  | 
| Kevin Hilman | 08aca08 | 2010-01-11 08:22:23 -0800 | [diff] [blame] | 436 | int __init davinci_clk_init(struct clk_lookup *clocks) | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 437 | { | 
| Kevin Hilman | 08aca08 | 2010-01-11 08:22:23 -0800 | [diff] [blame] | 438 | struct clk_lookup *c; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 439 | struct clk *clk; | 
| Kevin Hilman | 08aca08 | 2010-01-11 08:22:23 -0800 | [diff] [blame] | 440 | size_t num_clocks = 0; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 441 |  | 
| Kevin Hilman | 08aca08 | 2010-01-11 08:22:23 -0800 | [diff] [blame] | 442 | for (c = clocks; c->clk; c++) { | 
|  | 443 | clk = c->clk; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 444 |  | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 445 | if (!clk->recalc) { | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 446 |  | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 447 | /* Check if clock is a PLL */ | 
|  | 448 | if (clk->pll_data) | 
|  | 449 | clk->recalc = clk_pllclk_recalc; | 
|  | 450 |  | 
|  | 451 | /* Else, if it is a PLL-derived clock */ | 
|  | 452 | else if (clk->flags & CLK_PLL) | 
|  | 453 | clk->recalc = clk_sysclk_recalc; | 
|  | 454 |  | 
|  | 455 | /* Otherwise, it is a leaf clock (PSC clock) */ | 
|  | 456 | else if (clk->parent) | 
|  | 457 | clk->recalc = clk_leafclk_recalc; | 
|  | 458 | } | 
|  | 459 |  | 
| Cyril Chemparathy | e4c822c | 2010-05-07 17:06:36 -0400 | [diff] [blame] | 460 | if (clk->pll_data) { | 
|  | 461 | struct pll_data *pll = clk->pll_data; | 
|  | 462 |  | 
|  | 463 | if (!pll->div_ratio_mask) | 
|  | 464 | pll->div_ratio_mask = PLLDIV_RATIO_MASK; | 
|  | 465 |  | 
|  | 466 | if (pll->phys_base && !pll->base) { | 
|  | 467 | pll->base = ioremap(pll->phys_base, SZ_4K); | 
|  | 468 | WARN_ON(!pll->base); | 
|  | 469 | } | 
|  | 470 | } | 
| Cyril Chemparathy | d6961e6 | 2010-04-14 14:44:49 -0400 | [diff] [blame] | 471 |  | 
| Sekhar Nori | de381a9 | 2009-08-31 15:48:02 +0530 | [diff] [blame] | 472 | if (clk->recalc) | 
|  | 473 | clk->rate = clk->recalc(clk); | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 474 |  | 
|  | 475 | if (clk->lpsc) | 
|  | 476 | clk->flags |= CLK_PSC; | 
|  | 477 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 478 | clk_register(clk); | 
| Kevin Hilman | 08aca08 | 2010-01-11 08:22:23 -0800 | [diff] [blame] | 479 | num_clocks++; | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 480 |  | 
|  | 481 | /* Turn on clocks that Linux doesn't otherwise manage */ | 
|  | 482 | if (clk->flags & ALWAYS_ENABLED) | 
|  | 483 | clk_enable(clk); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 484 | } | 
|  | 485 |  | 
| Kevin Hilman | 08aca08 | 2010-01-11 08:22:23 -0800 | [diff] [blame] | 486 | clkdev_add_table(clocks, num_clocks); | 
|  | 487 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 488 | return 0; | 
|  | 489 | } | 
|  | 490 |  | 
| Sekhar Nori | 2f72e8d | 2009-12-03 15:36:52 +0530 | [diff] [blame] | 491 | #ifdef CONFIG_DEBUG_FS | 
|  | 492 |  | 
|  | 493 | #include <linux/debugfs.h> | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 494 | #include <linux/seq_file.h> | 
|  | 495 |  | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 496 | #define CLKNAME_MAX	10		/* longest clock name */ | 
|  | 497 | #define NEST_DELTA	2 | 
|  | 498 | #define NEST_MAX	4 | 
|  | 499 |  | 
|  | 500 | static void | 
|  | 501 | dump_clock(struct seq_file *s, unsigned nest, struct clk *parent) | 
|  | 502 | { | 
|  | 503 | char		*state; | 
|  | 504 | char		buf[CLKNAME_MAX + NEST_DELTA * NEST_MAX]; | 
|  | 505 | struct clk	*clk; | 
|  | 506 | unsigned	i; | 
|  | 507 |  | 
|  | 508 | if (parent->flags & CLK_PLL) | 
|  | 509 | state = "pll"; | 
|  | 510 | else if (parent->flags & CLK_PSC) | 
|  | 511 | state = "psc"; | 
|  | 512 | else | 
|  | 513 | state = ""; | 
|  | 514 |  | 
|  | 515 | /* <nest spaces> name <pad to end> */ | 
|  | 516 | memset(buf, ' ', sizeof(buf) - 1); | 
|  | 517 | buf[sizeof(buf) - 1] = 0; | 
|  | 518 | i = strlen(parent->name); | 
|  | 519 | memcpy(buf + nest, parent->name, | 
|  | 520 | min(i, (unsigned)(sizeof(buf) - 1 - nest))); | 
|  | 521 |  | 
|  | 522 | seq_printf(s, "%s users=%2d %-3s %9ld Hz\n", | 
|  | 523 | buf, parent->usecount, state, clk_get_rate(parent)); | 
|  | 524 | /* REVISIT show device associations too */ | 
|  | 525 |  | 
|  | 526 | /* cost is now small, but not linear... */ | 
| Sekhar Nori | f02bf3b | 2009-08-31 15:48:01 +0530 | [diff] [blame] | 527 | list_for_each_entry(clk, &parent->children, childnode) { | 
|  | 528 | dump_clock(s, nest + NEST_DELTA, clk); | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 529 | } | 
|  | 530 | } | 
|  | 531 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 532 | static int davinci_ck_show(struct seq_file *m, void *v) | 
|  | 533 | { | 
| Sekhar Nori | f979aa6 | 2009-12-03 15:36:51 +0530 | [diff] [blame] | 534 | struct clk *clk; | 
|  | 535 |  | 
|  | 536 | /* | 
|  | 537 | * Show clock tree; We trust nonzero usecounts equate to PSC enables... | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 538 | */ | 
|  | 539 | mutex_lock(&clocks_mutex); | 
| Sekhar Nori | f979aa6 | 2009-12-03 15:36:51 +0530 | [diff] [blame] | 540 | list_for_each_entry(clk, &clocks, node) | 
|  | 541 | if (!clk->parent) | 
|  | 542 | dump_clock(m, 0, clk); | 
| Kevin Hilman | c5b736d | 2009-03-20 17:29:01 -0700 | [diff] [blame] | 543 | mutex_unlock(&clocks_mutex); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 544 |  | 
|  | 545 | return 0; | 
|  | 546 | } | 
|  | 547 |  | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 548 | static int davinci_ck_open(struct inode *inode, struct file *file) | 
|  | 549 | { | 
| Sekhar Nori | 2f72e8d | 2009-12-03 15:36:52 +0530 | [diff] [blame] | 550 | return single_open(file, davinci_ck_show, NULL); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 551 | } | 
|  | 552 |  | 
| Sekhar Nori | 2f72e8d | 2009-12-03 15:36:52 +0530 | [diff] [blame] | 553 | static const struct file_operations davinci_ck_operations = { | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 554 | .open		= davinci_ck_open, | 
|  | 555 | .read		= seq_read, | 
|  | 556 | .llseek		= seq_lseek, | 
| Sekhar Nori | 2f72e8d | 2009-12-03 15:36:52 +0530 | [diff] [blame] | 557 | .release	= single_release, | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 558 | }; | 
|  | 559 |  | 
| Sekhar Nori | 2f72e8d | 2009-12-03 15:36:52 +0530 | [diff] [blame] | 560 | static int __init davinci_clk_debugfs_init(void) | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 561 | { | 
| Sekhar Nori | 2f72e8d | 2009-12-03 15:36:52 +0530 | [diff] [blame] | 562 | debugfs_create_file("davinci_clocks", S_IFREG | S_IRUGO, NULL, NULL, | 
|  | 563 | &davinci_ck_operations); | 
| Vladimir Barinov | 3e062b0 | 2007-06-05 16:36:55 +0100 | [diff] [blame] | 564 | return 0; | 
|  | 565 |  | 
|  | 566 | } | 
| Sekhar Nori | 2f72e8d | 2009-12-03 15:36:52 +0530 | [diff] [blame] | 567 | device_initcall(davinci_clk_debugfs_init); | 
|  | 568 | #endif /* CONFIG_DEBUG_FS */ |