blob: afcff06ef29195d677f80f64a8c2a4e5ceb0cdc7 [file] [log] [blame]
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001/*
2 * Copyright 2010 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Alex Deucher
23 */
24#include <linux/firmware.h>
25#include <linux/platform_device.h>
26#include "drmP.h"
27#include "radeon.h"
Daniel Vettere6990372010-03-11 21:19:17 +000028#include "radeon_asic.h"
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050029#include "radeon_drm.h"
Alex Deucher0fcdb612010-03-24 13:20:41 -040030#include "evergreend.h"
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050031#include "atom.h"
32#include "avivod.h"
33#include "evergreen_reg.h"
34
35static void evergreen_gpu_init(struct radeon_device *rdev);
36void evergreen_fini(struct radeon_device *rdev);
37
38bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
39{
40 bool connected = false;
41 /* XXX */
42 return connected;
43}
44
45void evergreen_hpd_set_polarity(struct radeon_device *rdev,
46 enum radeon_hpd_id hpd)
47{
48 /* XXX */
49}
50
51void evergreen_hpd_init(struct radeon_device *rdev)
52{
53 /* XXX */
54}
55
56
57void evergreen_bandwidth_update(struct radeon_device *rdev)
58{
59 /* XXX */
60}
61
62void evergreen_hpd_fini(struct radeon_device *rdev)
63{
64 /* XXX */
65}
66
67static int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
68{
69 unsigned i;
70 u32 tmp;
71
72 for (i = 0; i < rdev->usec_timeout; i++) {
73 /* read MC_STATUS */
74 tmp = RREG32(SRBM_STATUS) & 0x1F00;
75 if (!tmp)
76 return 0;
77 udelay(1);
78 }
79 return -1;
80}
81
82/*
83 * GART
84 */
Alex Deucher0fcdb612010-03-24 13:20:41 -040085void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
86{
87 unsigned i;
88 u32 tmp;
89
90 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
91 for (i = 0; i < rdev->usec_timeout; i++) {
92 /* read MC_STATUS */
93 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
94 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
95 if (tmp == 2) {
96 printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
97 return;
98 }
99 if (tmp) {
100 return;
101 }
102 udelay(1);
103 }
104}
105
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500106int evergreen_pcie_gart_enable(struct radeon_device *rdev)
107{
108 u32 tmp;
Alex Deucher0fcdb612010-03-24 13:20:41 -0400109 int r;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500110
111 if (rdev->gart.table.vram.robj == NULL) {
112 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
113 return -EINVAL;
114 }
115 r = radeon_gart_table_vram_pin(rdev);
116 if (r)
117 return r;
Dave Airlie82568562010-02-05 16:00:07 +1000118 radeon_gart_restore(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500119 /* Setup L2 cache */
120 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
121 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
122 EFFECTIVE_L2_QUEUE_SIZE(7));
123 WREG32(VM_L2_CNTL2, 0);
124 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
125 /* Setup TLB control */
126 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
127 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
128 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
129 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
130 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
131 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
132 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
133 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
134 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
135 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
136 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
137 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
138 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
139 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
140 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
141 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
142 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
143 (u32)(rdev->dummy_page.addr >> 12));
Alex Deucher0fcdb612010-03-24 13:20:41 -0400144 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500145
Alex Deucher0fcdb612010-03-24 13:20:41 -0400146 evergreen_pcie_gart_tlb_flush(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500147 rdev->gart.ready = true;
148 return 0;
149}
150
151void evergreen_pcie_gart_disable(struct radeon_device *rdev)
152{
153 u32 tmp;
Alex Deucher0fcdb612010-03-24 13:20:41 -0400154 int r;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500155
156 /* Disable all tables */
Alex Deucher0fcdb612010-03-24 13:20:41 -0400157 WREG32(VM_CONTEXT0_CNTL, 0);
158 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500159
160 /* Setup L2 cache */
161 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
162 EFFECTIVE_L2_QUEUE_SIZE(7));
163 WREG32(VM_L2_CNTL2, 0);
164 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
165 /* Setup TLB control */
166 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
167 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
168 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
169 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
170 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
171 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
172 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
173 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
174 if (rdev->gart.table.vram.robj) {
175 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
176 if (likely(r == 0)) {
177 radeon_bo_kunmap(rdev->gart.table.vram.robj);
178 radeon_bo_unpin(rdev->gart.table.vram.robj);
179 radeon_bo_unreserve(rdev->gart.table.vram.robj);
180 }
181 }
182}
183
184void evergreen_pcie_gart_fini(struct radeon_device *rdev)
185{
186 evergreen_pcie_gart_disable(rdev);
187 radeon_gart_table_vram_free(rdev);
188 radeon_gart_fini(rdev);
189}
190
191
192void evergreen_agp_enable(struct radeon_device *rdev)
193{
194 u32 tmp;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500195
196 /* Setup L2 cache */
197 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
198 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
199 EFFECTIVE_L2_QUEUE_SIZE(7));
200 WREG32(VM_L2_CNTL2, 0);
201 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
202 /* Setup TLB control */
203 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
204 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
205 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
206 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
207 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
208 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
209 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
210 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
211 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
212 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
213 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
Alex Deucher0fcdb612010-03-24 13:20:41 -0400214 WREG32(VM_CONTEXT0_CNTL, 0);
215 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500216}
217
218static void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
219{
220 save->vga_control[0] = RREG32(D1VGA_CONTROL);
221 save->vga_control[1] = RREG32(D2VGA_CONTROL);
222 save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL);
223 save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL);
224 save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL);
225 save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL);
226 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
227 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
228 save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
229 save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
230 save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
231 save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
232 save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
233 save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
234
235 /* Stop all video */
236 WREG32(VGA_RENDER_CONTROL, 0);
237 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
238 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
239 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
240 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
241 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
242 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
243 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
244 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
245 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
246 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
247 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
248 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
249 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
250 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
251 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
252 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
253 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
254 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
255
256 WREG32(D1VGA_CONTROL, 0);
257 WREG32(D2VGA_CONTROL, 0);
258 WREG32(EVERGREEN_D3VGA_CONTROL, 0);
259 WREG32(EVERGREEN_D4VGA_CONTROL, 0);
260 WREG32(EVERGREEN_D5VGA_CONTROL, 0);
261 WREG32(EVERGREEN_D6VGA_CONTROL, 0);
262}
263
264static void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
265{
266 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
267 upper_32_bits(rdev->mc.vram_start));
268 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
269 upper_32_bits(rdev->mc.vram_start));
270 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
271 (u32)rdev->mc.vram_start);
272 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
273 (u32)rdev->mc.vram_start);
274
275 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
276 upper_32_bits(rdev->mc.vram_start));
277 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
278 upper_32_bits(rdev->mc.vram_start));
279 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
280 (u32)rdev->mc.vram_start);
281 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
282 (u32)rdev->mc.vram_start);
283
284 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
285 upper_32_bits(rdev->mc.vram_start));
286 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
287 upper_32_bits(rdev->mc.vram_start));
288 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
289 (u32)rdev->mc.vram_start);
290 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
291 (u32)rdev->mc.vram_start);
292
293 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
294 upper_32_bits(rdev->mc.vram_start));
295 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
296 upper_32_bits(rdev->mc.vram_start));
297 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
298 (u32)rdev->mc.vram_start);
299 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
300 (u32)rdev->mc.vram_start);
301
302 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
303 upper_32_bits(rdev->mc.vram_start));
304 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
305 upper_32_bits(rdev->mc.vram_start));
306 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
307 (u32)rdev->mc.vram_start);
308 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
309 (u32)rdev->mc.vram_start);
310
311 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
312 upper_32_bits(rdev->mc.vram_start));
313 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
314 upper_32_bits(rdev->mc.vram_start));
315 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
316 (u32)rdev->mc.vram_start);
317 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
318 (u32)rdev->mc.vram_start);
319
320 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
321 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
322 /* Unlock host access */
323 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
324 mdelay(1);
325 /* Restore video state */
326 WREG32(D1VGA_CONTROL, save->vga_control[0]);
327 WREG32(D2VGA_CONTROL, save->vga_control[1]);
328 WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]);
329 WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]);
330 WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]);
331 WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]);
332 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
333 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
334 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
335 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
336 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
337 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
338 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]);
339 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]);
340 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]);
341 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]);
342 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]);
343 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]);
344 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
345 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
346 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
347 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
348 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
349 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
350 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
351}
352
353static void evergreen_mc_program(struct radeon_device *rdev)
354{
355 struct evergreen_mc_save save;
356 u32 tmp;
357 int i, j;
358
359 /* Initialize HDP */
360 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
361 WREG32((0x2c14 + j), 0x00000000);
362 WREG32((0x2c18 + j), 0x00000000);
363 WREG32((0x2c1c + j), 0x00000000);
364 WREG32((0x2c20 + j), 0x00000000);
365 WREG32((0x2c24 + j), 0x00000000);
366 }
367 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
368
369 evergreen_mc_stop(rdev, &save);
370 if (evergreen_mc_wait_for_idle(rdev)) {
371 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
372 }
373 /* Lockout access through VGA aperture*/
374 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
375 /* Update configuration */
376 if (rdev->flags & RADEON_IS_AGP) {
377 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
378 /* VRAM before AGP */
379 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
380 rdev->mc.vram_start >> 12);
381 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
382 rdev->mc.gtt_end >> 12);
383 } else {
384 /* VRAM after AGP */
385 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
386 rdev->mc.gtt_start >> 12);
387 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
388 rdev->mc.vram_end >> 12);
389 }
390 } else {
391 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
392 rdev->mc.vram_start >> 12);
393 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
394 rdev->mc.vram_end >> 12);
395 }
396 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0);
397 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
398 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
399 WREG32(MC_VM_FB_LOCATION, tmp);
400 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
401 WREG32(HDP_NONSURFACE_INFO, (2 << 7));
402 WREG32(HDP_NONSURFACE_SIZE, (rdev->mc.mc_vram_size - 1) | 0x3FF);
403 if (rdev->flags & RADEON_IS_AGP) {
404 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
405 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
406 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
407 } else {
408 WREG32(MC_VM_AGP_BASE, 0);
409 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
410 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
411 }
412 if (evergreen_mc_wait_for_idle(rdev)) {
413 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
414 }
415 evergreen_mc_resume(rdev, &save);
416 /* we need to own VRAM, so turn off the VGA renderer here
417 * to stop it overwriting our objects */
418 rv515_vga_render_disable(rdev);
419}
420
421#if 0
422/*
423 * CP.
424 */
425static void evergreen_cp_stop(struct radeon_device *rdev)
426{
427 /* XXX */
428}
429
430
431static int evergreen_cp_load_microcode(struct radeon_device *rdev)
432{
433 /* XXX */
434
435 return 0;
436}
437
438
439/*
440 * Core functions
441 */
442static u32 evergreen_get_tile_pipe_to_backend_map(u32 num_tile_pipes,
443 u32 num_backends,
444 u32 backend_disable_mask)
445{
446 u32 backend_map = 0;
447
448 return backend_map;
449}
450#endif
451
452static void evergreen_gpu_init(struct radeon_device *rdev)
453{
454 /* XXX */
455}
456
457int evergreen_mc_init(struct radeon_device *rdev)
458{
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500459 u32 tmp;
460 int chansize, numchan;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500461
462 /* Get VRAM informations */
463 rdev->mc.vram_is_ddr = true;
464 tmp = RREG32(MC_ARB_RAMCFG);
465 if (tmp & CHANSIZE_OVERRIDE) {
466 chansize = 16;
467 } else if (tmp & CHANSIZE_MASK) {
468 chansize = 64;
469 } else {
470 chansize = 32;
471 }
472 tmp = RREG32(MC_SHARED_CHMAP);
473 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
474 case 0:
475 default:
476 numchan = 1;
477 break;
478 case 1:
479 numchan = 2;
480 break;
481 case 2:
482 numchan = 4;
483 break;
484 case 3:
485 numchan = 8;
486 break;
487 }
488 rdev->mc.vram_width = numchan * chansize;
489 /* Could aper size report 0 ? */
490 rdev->mc.aper_base = drm_get_resource_start(rdev->ddev, 0);
491 rdev->mc.aper_size = drm_get_resource_len(rdev->ddev, 0);
492 /* Setup GPU memory space */
493 /* size in MB on evergreen */
494 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
495 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
Jerome Glisse51e5fcd2010-02-19 14:33:54 +0000496 rdev->mc.visible_vram_size = rdev->mc.aper_size;
Jerome Glissed594e462010-02-17 21:54:29 +0000497 /* FIXME remove this once we support unmappable VRAM */
498 if (rdev->mc.mc_vram_size > rdev->mc.aper_size) {
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500499 rdev->mc.mc_vram_size = rdev->mc.aper_size;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500500 rdev->mc.real_vram_size = rdev->mc.aper_size;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500501 }
Jerome Glissed594e462010-02-17 21:54:29 +0000502 r600_vram_gtt_location(rdev, &rdev->mc);
Alex Deucherf47299c2010-03-16 20:54:38 -0400503 radeon_update_bandwidth_info(rdev);
504
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500505 return 0;
506}
Jerome Glissed594e462010-02-17 21:54:29 +0000507
Jerome Glisse225758d2010-03-09 14:45:10 +0000508bool evergreen_gpu_is_lockup(struct radeon_device *rdev)
509{
510 /* FIXME: implement for evergreen */
511 return false;
512}
513
Jerome Glissea2d07b72010-03-09 14:45:11 +0000514int evergreen_asic_reset(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500515{
516 /* FIXME: implement for evergreen */
517 return 0;
518}
519
520static int evergreen_startup(struct radeon_device *rdev)
521{
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500522 int r;
523
Alex Deucher0fcdb612010-03-24 13:20:41 -0400524#if 0
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500525 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
526 r = r600_init_microcode(rdev);
527 if (r) {
528 DRM_ERROR("Failed to load firmware!\n");
529 return r;
530 }
531 }
532#endif
533 evergreen_mc_program(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500534 if (rdev->flags & RADEON_IS_AGP) {
Alex Deucher0fcdb612010-03-24 13:20:41 -0400535 evergreen_agp_enable(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500536 } else {
537 r = evergreen_pcie_gart_enable(rdev);
538 if (r)
539 return r;
540 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500541 evergreen_gpu_init(rdev);
542#if 0
543 if (!rdev->r600_blit.shader_obj) {
544 r = r600_blit_init(rdev);
545 if (r) {
546 DRM_ERROR("radeon: failed blitter (%d).\n", r);
547 return r;
548 }
549 }
550
551 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
552 if (unlikely(r != 0))
553 return r;
554 r = radeon_bo_pin(rdev->r600_blit.shader_obj, RADEON_GEM_DOMAIN_VRAM,
555 &rdev->r600_blit.shader_gpu_addr);
556 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
557 if (r) {
558 DRM_ERROR("failed to pin blit object %d\n", r);
559 return r;
560 }
561
562 /* Enable IRQ */
563 r = r600_irq_init(rdev);
564 if (r) {
565 DRM_ERROR("radeon: IH init failed (%d).\n", r);
566 radeon_irq_kms_fini(rdev);
567 return r;
568 }
569 r600_irq_set(rdev);
570
571 r = radeon_ring_init(rdev, rdev->cp.ring_size);
572 if (r)
573 return r;
574 r = evergreen_cp_load_microcode(rdev);
575 if (r)
576 return r;
577 r = r600_cp_resume(rdev);
578 if (r)
579 return r;
580 /* write back buffer are not vital so don't worry about failure */
581 r600_wb_enable(rdev);
582#endif
583 return 0;
584}
585
586int evergreen_resume(struct radeon_device *rdev)
587{
588 int r;
589
590 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
591 * posting will perform necessary task to bring back GPU into good
592 * shape.
593 */
594 /* post card */
595 atom_asic_init(rdev->mode_info.atom_context);
596 /* Initialize clocks */
597 r = radeon_clocks_init(rdev);
598 if (r) {
599 return r;
600 }
601
602 r = evergreen_startup(rdev);
603 if (r) {
604 DRM_ERROR("r600 startup failed on resume\n");
605 return r;
606 }
607#if 0
608 r = r600_ib_test(rdev);
609 if (r) {
610 DRM_ERROR("radeon: failled testing IB (%d).\n", r);
611 return r;
612 }
613#endif
614 return r;
615
616}
617
618int evergreen_suspend(struct radeon_device *rdev)
619{
620#if 0
621 int r;
622
623 /* FIXME: we should wait for ring to be empty */
624 r700_cp_stop(rdev);
625 rdev->cp.ready = false;
626 r600_wb_disable(rdev);
Alex Deucher0fcdb612010-03-24 13:20:41 -0400627#endif
628
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500629 evergreen_pcie_gart_disable(rdev);
Alex Deucher0fcdb612010-03-24 13:20:41 -0400630#if 0
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500631 /* unpin shaders bo */
632 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
633 if (likely(r == 0)) {
634 radeon_bo_unpin(rdev->r600_blit.shader_obj);
635 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
636 }
637#endif
638 return 0;
639}
640
641static bool evergreen_card_posted(struct radeon_device *rdev)
642{
643 u32 reg;
644
645 /* first check CRTCs */
646 reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) |
647 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) |
648 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) |
649 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) |
650 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) |
651 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
652 if (reg & EVERGREEN_CRTC_MASTER_EN)
653 return true;
654
655 /* then check MEM_SIZE, in case the crtcs are off */
656 if (RREG32(CONFIG_MEMSIZE))
657 return true;
658
659 return false;
660}
661
662/* Plan is to move initialization in that function and use
663 * helper function so that radeon_device_init pretty much
664 * do nothing more than calling asic specific function. This
665 * should also allow to remove a bunch of callback function
666 * like vram_info.
667 */
668int evergreen_init(struct radeon_device *rdev)
669{
670 int r;
671
672 r = radeon_dummy_page_init(rdev);
673 if (r)
674 return r;
675 /* This don't do much */
676 r = radeon_gem_init(rdev);
677 if (r)
678 return r;
679 /* Read BIOS */
680 if (!radeon_get_bios(rdev)) {
681 if (ASIC_IS_AVIVO(rdev))
682 return -EINVAL;
683 }
684 /* Must be an ATOMBIOS */
685 if (!rdev->is_atom_bios) {
686 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
687 return -EINVAL;
688 }
689 r = radeon_atombios_init(rdev);
690 if (r)
691 return r;
692 /* Post card if necessary */
693 if (!evergreen_card_posted(rdev)) {
694 if (!rdev->bios) {
695 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
696 return -EINVAL;
697 }
698 DRM_INFO("GPU not posted. posting now...\n");
699 atom_asic_init(rdev->mode_info.atom_context);
700 }
701 /* Initialize scratch registers */
702 r600_scratch_init(rdev);
703 /* Initialize surface registers */
704 radeon_surface_init(rdev);
705 /* Initialize clocks */
706 radeon_get_clock_info(rdev->ddev);
707 r = radeon_clocks_init(rdev);
708 if (r)
709 return r;
710 /* Initialize power management */
711 radeon_pm_init(rdev);
712 /* Fence driver */
713 r = radeon_fence_driver_init(rdev);
714 if (r)
715 return r;
Jerome Glissed594e462010-02-17 21:54:29 +0000716 /* initialize AGP */
717 if (rdev->flags & RADEON_IS_AGP) {
718 r = radeon_agp_init(rdev);
719 if (r)
720 radeon_agp_disable(rdev);
721 }
722 /* initialize memory controller */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500723 r = evergreen_mc_init(rdev);
724 if (r)
725 return r;
726 /* Memory manager */
727 r = radeon_bo_init(rdev);
728 if (r)
729 return r;
730#if 0
731 r = radeon_irq_kms_init(rdev);
732 if (r)
733 return r;
734
735 rdev->cp.ring_obj = NULL;
736 r600_ring_init(rdev, 1024 * 1024);
737
738 rdev->ih.ring_obj = NULL;
739 r600_ih_ring_init(rdev, 64 * 1024);
Alex Deucher0fcdb612010-03-24 13:20:41 -0400740#endif
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500741 r = r600_pcie_gart_init(rdev);
742 if (r)
743 return r;
Alex Deucher0fcdb612010-03-24 13:20:41 -0400744
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500745 rdev->accel_working = false;
746 r = evergreen_startup(rdev);
747 if (r) {
748 evergreen_suspend(rdev);
749 /*r600_wb_fini(rdev);*/
750 /*radeon_ring_fini(rdev);*/
Alex Deucher0fcdb612010-03-24 13:20:41 -0400751 evergreen_pcie_gart_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500752 rdev->accel_working = false;
753 }
754 if (rdev->accel_working) {
755 r = radeon_ib_pool_init(rdev);
756 if (r) {
757 DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r);
758 rdev->accel_working = false;
759 }
760 r = r600_ib_test(rdev);
761 if (r) {
762 DRM_ERROR("radeon: failed testing IB (%d).\n", r);
763 rdev->accel_working = false;
764 }
765 }
766 return 0;
767}
768
769void evergreen_fini(struct radeon_device *rdev)
770{
Alex Deucher29fb52c2010-03-11 10:01:17 -0500771 radeon_pm_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500772 evergreen_suspend(rdev);
773#if 0
774 r600_blit_fini(rdev);
775 r600_irq_fini(rdev);
776 radeon_irq_kms_fini(rdev);
777 radeon_ring_fini(rdev);
778 r600_wb_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500779#endif
Alex Deucher0fcdb612010-03-24 13:20:41 -0400780 evergreen_pcie_gart_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500781 radeon_gem_fini(rdev);
782 radeon_fence_driver_fini(rdev);
783 radeon_clocks_fini(rdev);
784 radeon_agp_fini(rdev);
785 radeon_bo_fini(rdev);
786 radeon_atombios_fini(rdev);
787 kfree(rdev->bios);
788 rdev->bios = NULL;
789 radeon_dummy_page_fini(rdev);
790}