| Geoff Levand | 540270d8 | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 1 | /* | 
|  | 2 | * Defines an spu hypervisor abstraction layer. | 
|  | 3 | * | 
|  | 4 | *  Copyright 2006 Sony Corp. | 
|  | 5 | * | 
|  | 6 | *  This program is free software; you can redistribute it and/or modify | 
|  | 7 | *  it under the terms of the GNU General Public License as published by | 
|  | 8 | *  the Free Software Foundation; version 2 of the License. | 
|  | 9 | * | 
|  | 10 | *  This program is distributed in the hope that it will be useful, | 
|  | 11 | *  but WITHOUT ANY WARRANTY; without even the implied warranty of | 
|  | 12 | *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | 
|  | 13 | *  GNU General Public License for more details. | 
|  | 14 | * | 
|  | 15 | *  You should have received a copy of the GNU General Public License | 
|  | 16 | *  along with this program; if not, write to the Free Software | 
|  | 17 | *  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA | 
|  | 18 | */ | 
|  | 19 |  | 
|  | 20 | #if !defined(_SPU_PRIV1_H) | 
|  | 21 | #define _SPU_PRIV1_H | 
|  | 22 | #if defined(__KERNEL__) | 
|  | 23 |  | 
| Geoff Levand | e28b003 | 2006-11-23 00:46:49 +0100 | [diff] [blame] | 24 | #include <linux/types.h> | 
|  | 25 |  | 
| Geoff Levand | 540270d8 | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 26 | struct spu; | 
|  | 27 |  | 
|  | 28 | /* access to priv1 registers */ | 
|  | 29 |  | 
| Geoff Levand | e28b003 | 2006-11-23 00:46:49 +0100 | [diff] [blame] | 30 | struct spu_priv1_ops { | 
| Geoff Levand | 540270d8 | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 31 | void (*int_mask_and) (struct spu *spu, int class, u64 mask); | 
|  | 32 | void (*int_mask_or) (struct spu *spu, int class, u64 mask); | 
|  | 33 | void (*int_mask_set) (struct spu *spu, int class, u64 mask); | 
|  | 34 | u64 (*int_mask_get) (struct spu *spu, int class); | 
|  | 35 | void (*int_stat_clear) (struct spu *spu, int class, u64 stat); | 
|  | 36 | u64 (*int_stat_get) (struct spu *spu, int class); | 
| Geoff Levand | a91942a | 2006-06-19 20:33:30 +0200 | [diff] [blame] | 37 | void (*cpu_affinity_set) (struct spu *spu, int cpu); | 
| Geoff Levand | 540270d8 | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 38 | u64 (*mfc_dar_get) (struct spu *spu); | 
|  | 39 | u64 (*mfc_dsisr_get) (struct spu *spu); | 
|  | 40 | void (*mfc_dsisr_set) (struct spu *spu, u64 dsisr); | 
| Masato Noguchi | 24f43b3 | 2006-10-24 18:31:14 +0200 | [diff] [blame] | 41 | void (*mfc_sdr_setup) (struct spu *spu); | 
| Geoff Levand | 540270d8 | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 42 | void (*mfc_sr1_set) (struct spu *spu, u64 sr1); | 
|  | 43 | u64 (*mfc_sr1_get) (struct spu *spu); | 
|  | 44 | void (*mfc_tclass_id_set) (struct spu *spu, u64 tclass_id); | 
|  | 45 | u64 (*mfc_tclass_id_get) (struct spu *spu); | 
|  | 46 | void (*tlb_invalidate) (struct spu *spu); | 
|  | 47 | void (*resource_allocation_groupID_set) (struct spu *spu, u64 id); | 
|  | 48 | u64 (*resource_allocation_groupID_get) (struct spu *spu); | 
|  | 49 | void (*resource_allocation_enable_set) (struct spu *spu, u64 enable); | 
|  | 50 | u64 (*resource_allocation_enable_get) (struct spu *spu); | 
|  | 51 | }; | 
|  | 52 |  | 
|  | 53 | extern const struct spu_priv1_ops* spu_priv1_ops; | 
|  | 54 |  | 
|  | 55 | static inline void | 
|  | 56 | spu_int_mask_and (struct spu *spu, int class, u64 mask) | 
|  | 57 | { | 
|  | 58 | spu_priv1_ops->int_mask_and(spu, class, mask); | 
|  | 59 | } | 
|  | 60 |  | 
|  | 61 | static inline void | 
|  | 62 | spu_int_mask_or (struct spu *spu, int class, u64 mask) | 
|  | 63 | { | 
|  | 64 | spu_priv1_ops->int_mask_or(spu, class, mask); | 
|  | 65 | } | 
|  | 66 |  | 
|  | 67 | static inline void | 
|  | 68 | spu_int_mask_set (struct spu *spu, int class, u64 mask) | 
|  | 69 | { | 
|  | 70 | spu_priv1_ops->int_mask_set(spu, class, mask); | 
|  | 71 | } | 
|  | 72 |  | 
|  | 73 | static inline u64 | 
|  | 74 | spu_int_mask_get (struct spu *spu, int class) | 
|  | 75 | { | 
|  | 76 | return spu_priv1_ops->int_mask_get(spu, class); | 
|  | 77 | } | 
|  | 78 |  | 
|  | 79 | static inline void | 
|  | 80 | spu_int_stat_clear (struct spu *spu, int class, u64 stat) | 
|  | 81 | { | 
|  | 82 | spu_priv1_ops->int_stat_clear(spu, class, stat); | 
|  | 83 | } | 
|  | 84 |  | 
|  | 85 | static inline u64 | 
|  | 86 | spu_int_stat_get (struct spu *spu, int class) | 
|  | 87 | { | 
|  | 88 | return spu_priv1_ops->int_stat_get (spu, class); | 
|  | 89 | } | 
|  | 90 |  | 
|  | 91 | static inline void | 
| Geoff Levand | a91942a | 2006-06-19 20:33:30 +0200 | [diff] [blame] | 92 | spu_cpu_affinity_set (struct spu *spu, int cpu) | 
| Geoff Levand | 540270d8 | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 93 | { | 
| Geoff Levand | a91942a | 2006-06-19 20:33:30 +0200 | [diff] [blame] | 94 | spu_priv1_ops->cpu_affinity_set(spu, cpu); | 
| Geoff Levand | 540270d8 | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 95 | } | 
|  | 96 |  | 
|  | 97 | static inline u64 | 
|  | 98 | spu_mfc_dar_get (struct spu *spu) | 
|  | 99 | { | 
|  | 100 | return spu_priv1_ops->mfc_dar_get(spu); | 
|  | 101 | } | 
|  | 102 |  | 
|  | 103 | static inline u64 | 
|  | 104 | spu_mfc_dsisr_get (struct spu *spu) | 
|  | 105 | { | 
|  | 106 | return spu_priv1_ops->mfc_dsisr_get(spu); | 
|  | 107 | } | 
|  | 108 |  | 
|  | 109 | static inline void | 
|  | 110 | spu_mfc_dsisr_set (struct spu *spu, u64 dsisr) | 
|  | 111 | { | 
|  | 112 | spu_priv1_ops->mfc_dsisr_set(spu, dsisr); | 
|  | 113 | } | 
|  | 114 |  | 
|  | 115 | static inline void | 
| Masato Noguchi | 24f43b3 | 2006-10-24 18:31:14 +0200 | [diff] [blame] | 116 | spu_mfc_sdr_setup (struct spu *spu) | 
| Geoff Levand | 540270d8 | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 117 | { | 
| Masato Noguchi | 24f43b3 | 2006-10-24 18:31:14 +0200 | [diff] [blame] | 118 | spu_priv1_ops->mfc_sdr_setup(spu); | 
| Geoff Levand | 540270d8 | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 119 | } | 
|  | 120 |  | 
|  | 121 | static inline void | 
|  | 122 | spu_mfc_sr1_set (struct spu *spu, u64 sr1) | 
|  | 123 | { | 
|  | 124 | spu_priv1_ops->mfc_sr1_set(spu, sr1); | 
|  | 125 | } | 
|  | 126 |  | 
|  | 127 | static inline u64 | 
|  | 128 | spu_mfc_sr1_get (struct spu *spu) | 
|  | 129 | { | 
|  | 130 | return spu_priv1_ops->mfc_sr1_get(spu); | 
|  | 131 | } | 
|  | 132 |  | 
|  | 133 | static inline void | 
|  | 134 | spu_mfc_tclass_id_set (struct spu *spu, u64 tclass_id) | 
|  | 135 | { | 
|  | 136 | spu_priv1_ops->mfc_tclass_id_set(spu, tclass_id); | 
|  | 137 | } | 
|  | 138 |  | 
|  | 139 | static inline u64 | 
|  | 140 | spu_mfc_tclass_id_get (struct spu *spu) | 
|  | 141 | { | 
|  | 142 | return spu_priv1_ops->mfc_tclass_id_get(spu); | 
|  | 143 | } | 
|  | 144 |  | 
|  | 145 | static inline void | 
|  | 146 | spu_tlb_invalidate (struct spu *spu) | 
|  | 147 | { | 
|  | 148 | spu_priv1_ops->tlb_invalidate(spu); | 
|  | 149 | } | 
|  | 150 |  | 
|  | 151 | static inline void | 
|  | 152 | spu_resource_allocation_groupID_set (struct spu *spu, u64 id) | 
|  | 153 | { | 
|  | 154 | spu_priv1_ops->resource_allocation_groupID_set(spu, id); | 
|  | 155 | } | 
|  | 156 |  | 
|  | 157 | static inline u64 | 
|  | 158 | spu_resource_allocation_groupID_get (struct spu *spu) | 
|  | 159 | { | 
|  | 160 | return spu_priv1_ops->resource_allocation_groupID_get(spu); | 
|  | 161 | } | 
|  | 162 |  | 
|  | 163 | static inline void | 
|  | 164 | spu_resource_allocation_enable_set (struct spu *spu, u64 enable) | 
|  | 165 | { | 
|  | 166 | spu_priv1_ops->resource_allocation_enable_set(spu, enable); | 
|  | 167 | } | 
|  | 168 |  | 
|  | 169 | static inline u64 | 
|  | 170 | spu_resource_allocation_enable_get (struct spu *spu) | 
|  | 171 | { | 
|  | 172 | return spu_priv1_ops->resource_allocation_enable_get(spu); | 
|  | 173 | } | 
|  | 174 |  | 
| Geoff Levand | e28b003 | 2006-11-23 00:46:49 +0100 | [diff] [blame] | 175 | /* spu management abstraction */ | 
|  | 176 |  | 
|  | 177 | struct spu_management_ops { | 
|  | 178 | int (*enumerate_spus)(int (*fn)(void *data)); | 
|  | 179 | int (*create_spu)(struct spu *spu, void *data); | 
|  | 180 | int (*destroy_spu)(struct spu *spu); | 
| Andre Detsch | f599644 | 2007-08-03 18:53:46 -0700 | [diff] [blame] | 181 | int (*init_affinity)(void); | 
| Geoff Levand | e28b003 | 2006-11-23 00:46:49 +0100 | [diff] [blame] | 182 | }; | 
|  | 183 |  | 
|  | 184 | extern const struct spu_management_ops* spu_management_ops; | 
|  | 185 |  | 
|  | 186 | static inline int | 
|  | 187 | spu_enumerate_spus (int (*fn)(void *data)) | 
|  | 188 | { | 
|  | 189 | return spu_management_ops->enumerate_spus(fn); | 
|  | 190 | } | 
|  | 191 |  | 
|  | 192 | static inline int | 
|  | 193 | spu_create_spu (struct spu *spu, void *data) | 
|  | 194 | { | 
|  | 195 | return spu_management_ops->create_spu(spu, data); | 
|  | 196 | } | 
|  | 197 |  | 
|  | 198 | static inline int | 
|  | 199 | spu_destroy_spu (struct spu *spu) | 
|  | 200 | { | 
|  | 201 | return spu_management_ops->destroy_spu(spu); | 
|  | 202 | } | 
|  | 203 |  | 
| Andre Detsch | f599644 | 2007-08-03 18:53:46 -0700 | [diff] [blame] | 204 | static inline int | 
|  | 205 | spu_init_affinity (void) | 
|  | 206 | { | 
|  | 207 | return spu_management_ops->init_affinity(); | 
|  | 208 | } | 
|  | 209 |  | 
| Geoff Levand | e28b003 | 2006-11-23 00:46:49 +0100 | [diff] [blame] | 210 | /* | 
|  | 211 | * The declarations folowing are put here for convenience | 
|  | 212 | * and only intended to be used by the platform setup code. | 
| Geoff Levand | 540270d8 | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 213 | */ | 
|  | 214 |  | 
|  | 215 | extern const struct spu_priv1_ops spu_priv1_mmio_ops; | 
| Ishizaki Kou | c9868fe | 2007-02-02 16:45:33 +0900 | [diff] [blame] | 216 | extern const struct spu_priv1_ops spu_priv1_beat_ops; | 
|  | 217 |  | 
| Geoff Levand | e28b003 | 2006-11-23 00:46:49 +0100 | [diff] [blame] | 218 | extern const struct spu_management_ops spu_management_of_ops; | 
| Geoff Levand | 540270d8 | 2006-06-19 20:33:29 +0200 | [diff] [blame] | 219 |  | 
|  | 220 | #endif /* __KERNEL__ */ | 
|  | 221 | #endif |