blob: 07c4c6f216f0a02dfb4a5f553c24112fa50db69d [file] [log] [blame]
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001/*
2 * Copyright 2010 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Alex Deucher
23 */
24#include <linux/firmware.h>
25#include <linux/platform_device.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090026#include <linux/slab.h>
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050027#include "drmP.h"
28#include "radeon.h"
Daniel Vettere6990372010-03-11 21:19:17 +000029#include "radeon_asic.h"
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050030#include "radeon_drm.h"
Alex Deucher0fcdb612010-03-24 13:20:41 -040031#include "evergreend.h"
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050032#include "atom.h"
33#include "avivod.h"
34#include "evergreen_reg.h"
Alex Deucher2281a372010-10-21 13:31:38 -040035#include "evergreen_blit_shaders.h"
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050036
Alex Deucherfe251e22010-03-24 13:36:43 -040037#define EVERGREEN_PFP_UCODE_SIZE 1120
38#define EVERGREEN_PM4_UCODE_SIZE 1376
39
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050040static void evergreen_gpu_init(struct radeon_device *rdev);
41void evergreen_fini(struct radeon_device *rdev);
42
Alex Deucher21a81222010-07-02 12:58:16 -040043/* get temperature in millidegrees */
44u32 evergreen_get_temp(struct radeon_device *rdev)
45{
46 u32 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
47 ASIC_T_SHIFT;
48 u32 actual_temp = 0;
49
50 if ((temp >> 10) & 1)
51 actual_temp = 0;
52 else if ((temp >> 9) & 1)
53 actual_temp = 255;
54 else
55 actual_temp = (temp >> 1) & 0xff;
56
57 return actual_temp * 1000;
58}
59
Alex Deucher49e02b72010-04-23 17:57:27 -040060void evergreen_pm_misc(struct radeon_device *rdev)
61{
Rafał Miłeckia081a9d2010-06-07 18:20:25 -040062 int req_ps_idx = rdev->pm.requested_power_state_index;
63 int req_cm_idx = rdev->pm.requested_clock_mode_index;
64 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
65 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
Alex Deucher49e02b72010-04-23 17:57:27 -040066
Alex Deucher4d601732010-06-07 18:15:18 -040067 if ((voltage->type == VOLTAGE_SW) && voltage->voltage) {
68 if (voltage->voltage != rdev->pm.current_vddc) {
69 radeon_atom_set_voltage(rdev, voltage->voltage);
70 rdev->pm.current_vddc = voltage->voltage;
Rafał Miłecki0fcbe942010-06-07 18:25:21 -040071 DRM_DEBUG("Setting: v: %d\n", voltage->voltage);
Alex Deucher4d601732010-06-07 18:15:18 -040072 }
73 }
Alex Deucher49e02b72010-04-23 17:57:27 -040074}
75
76void evergreen_pm_prepare(struct radeon_device *rdev)
77{
78 struct drm_device *ddev = rdev->ddev;
79 struct drm_crtc *crtc;
80 struct radeon_crtc *radeon_crtc;
81 u32 tmp;
82
83 /* disable any active CRTCs */
84 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
85 radeon_crtc = to_radeon_crtc(crtc);
86 if (radeon_crtc->enabled) {
87 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
88 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
89 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
90 }
91 }
92}
93
94void evergreen_pm_finish(struct radeon_device *rdev)
95{
96 struct drm_device *ddev = rdev->ddev;
97 struct drm_crtc *crtc;
98 struct radeon_crtc *radeon_crtc;
99 u32 tmp;
100
101 /* enable any active CRTCs */
102 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
103 radeon_crtc = to_radeon_crtc(crtc);
104 if (radeon_crtc->enabled) {
105 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
106 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
107 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
108 }
109 }
110}
111
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500112bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
113{
114 bool connected = false;
Alex Deucher0ca2ab52010-02-26 13:57:45 -0500115
116 switch (hpd) {
117 case RADEON_HPD_1:
118 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
119 connected = true;
120 break;
121 case RADEON_HPD_2:
122 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
123 connected = true;
124 break;
125 case RADEON_HPD_3:
126 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
127 connected = true;
128 break;
129 case RADEON_HPD_4:
130 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
131 connected = true;
132 break;
133 case RADEON_HPD_5:
134 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
135 connected = true;
136 break;
137 case RADEON_HPD_6:
138 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
139 connected = true;
140 break;
141 default:
142 break;
143 }
144
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500145 return connected;
146}
147
148void evergreen_hpd_set_polarity(struct radeon_device *rdev,
149 enum radeon_hpd_id hpd)
150{
Alex Deucher0ca2ab52010-02-26 13:57:45 -0500151 u32 tmp;
152 bool connected = evergreen_hpd_sense(rdev, hpd);
153
154 switch (hpd) {
155 case RADEON_HPD_1:
156 tmp = RREG32(DC_HPD1_INT_CONTROL);
157 if (connected)
158 tmp &= ~DC_HPDx_INT_POLARITY;
159 else
160 tmp |= DC_HPDx_INT_POLARITY;
161 WREG32(DC_HPD1_INT_CONTROL, tmp);
162 break;
163 case RADEON_HPD_2:
164 tmp = RREG32(DC_HPD2_INT_CONTROL);
165 if (connected)
166 tmp &= ~DC_HPDx_INT_POLARITY;
167 else
168 tmp |= DC_HPDx_INT_POLARITY;
169 WREG32(DC_HPD2_INT_CONTROL, tmp);
170 break;
171 case RADEON_HPD_3:
172 tmp = RREG32(DC_HPD3_INT_CONTROL);
173 if (connected)
174 tmp &= ~DC_HPDx_INT_POLARITY;
175 else
176 tmp |= DC_HPDx_INT_POLARITY;
177 WREG32(DC_HPD3_INT_CONTROL, tmp);
178 break;
179 case RADEON_HPD_4:
180 tmp = RREG32(DC_HPD4_INT_CONTROL);
181 if (connected)
182 tmp &= ~DC_HPDx_INT_POLARITY;
183 else
184 tmp |= DC_HPDx_INT_POLARITY;
185 WREG32(DC_HPD4_INT_CONTROL, tmp);
186 break;
187 case RADEON_HPD_5:
188 tmp = RREG32(DC_HPD5_INT_CONTROL);
189 if (connected)
190 tmp &= ~DC_HPDx_INT_POLARITY;
191 else
192 tmp |= DC_HPDx_INT_POLARITY;
193 WREG32(DC_HPD5_INT_CONTROL, tmp);
194 break;
195 case RADEON_HPD_6:
196 tmp = RREG32(DC_HPD6_INT_CONTROL);
197 if (connected)
198 tmp &= ~DC_HPDx_INT_POLARITY;
199 else
200 tmp |= DC_HPDx_INT_POLARITY;
201 WREG32(DC_HPD6_INT_CONTROL, tmp);
202 break;
203 default:
204 break;
205 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500206}
207
208void evergreen_hpd_init(struct radeon_device *rdev)
209{
Alex Deucher0ca2ab52010-02-26 13:57:45 -0500210 struct drm_device *dev = rdev->ddev;
211 struct drm_connector *connector;
212 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
213 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500214
Alex Deucher0ca2ab52010-02-26 13:57:45 -0500215 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
216 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
217 switch (radeon_connector->hpd.hpd) {
218 case RADEON_HPD_1:
219 WREG32(DC_HPD1_CONTROL, tmp);
220 rdev->irq.hpd[0] = true;
221 break;
222 case RADEON_HPD_2:
223 WREG32(DC_HPD2_CONTROL, tmp);
224 rdev->irq.hpd[1] = true;
225 break;
226 case RADEON_HPD_3:
227 WREG32(DC_HPD3_CONTROL, tmp);
228 rdev->irq.hpd[2] = true;
229 break;
230 case RADEON_HPD_4:
231 WREG32(DC_HPD4_CONTROL, tmp);
232 rdev->irq.hpd[3] = true;
233 break;
234 case RADEON_HPD_5:
235 WREG32(DC_HPD5_CONTROL, tmp);
236 rdev->irq.hpd[4] = true;
237 break;
238 case RADEON_HPD_6:
239 WREG32(DC_HPD6_CONTROL, tmp);
240 rdev->irq.hpd[5] = true;
241 break;
242 default:
243 break;
244 }
245 }
246 if (rdev->irq.installed)
247 evergreen_irq_set(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500248}
249
250void evergreen_hpd_fini(struct radeon_device *rdev)
251{
Alex Deucher0ca2ab52010-02-26 13:57:45 -0500252 struct drm_device *dev = rdev->ddev;
253 struct drm_connector *connector;
254
255 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
256 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
257 switch (radeon_connector->hpd.hpd) {
258 case RADEON_HPD_1:
259 WREG32(DC_HPD1_CONTROL, 0);
260 rdev->irq.hpd[0] = false;
261 break;
262 case RADEON_HPD_2:
263 WREG32(DC_HPD2_CONTROL, 0);
264 rdev->irq.hpd[1] = false;
265 break;
266 case RADEON_HPD_3:
267 WREG32(DC_HPD3_CONTROL, 0);
268 rdev->irq.hpd[2] = false;
269 break;
270 case RADEON_HPD_4:
271 WREG32(DC_HPD4_CONTROL, 0);
272 rdev->irq.hpd[3] = false;
273 break;
274 case RADEON_HPD_5:
275 WREG32(DC_HPD5_CONTROL, 0);
276 rdev->irq.hpd[4] = false;
277 break;
278 case RADEON_HPD_6:
279 WREG32(DC_HPD6_CONTROL, 0);
280 rdev->irq.hpd[5] = false;
281 break;
282 default:
283 break;
284 }
285 }
286}
287
Alex Deucherf9d9c362010-10-22 02:51:05 -0400288/* watermark setup */
289
290static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
291 struct radeon_crtc *radeon_crtc,
292 struct drm_display_mode *mode,
293 struct drm_display_mode *other_mode)
294{
295 u32 tmp = 0;
296 /*
297 * Line Buffer Setup
298 * There are 3 line buffers, each one shared by 2 display controllers.
299 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
300 * the display controllers. The paritioning is done via one of four
301 * preset allocations specified in bits 2:0:
302 * first display controller
303 * 0 - first half of lb (3840 * 2)
304 * 1 - first 3/4 of lb (5760 * 2)
305 * 2 - whole lb (7680 * 2)
306 * 3 - first 1/4 of lb (1920 * 2)
307 * second display controller
308 * 4 - second half of lb (3840 * 2)
309 * 5 - second 3/4 of lb (5760 * 2)
310 * 6 - whole lb (7680 * 2)
311 * 7 - last 1/4 of lb (1920 * 2)
312 */
313 if (mode && other_mode) {
314 if (mode->hdisplay > other_mode->hdisplay) {
315 if (mode->hdisplay > 2560)
316 tmp = 1; /* 3/4 */
317 else
318 tmp = 0; /* 1/2 */
319 } else if (other_mode->hdisplay > mode->hdisplay) {
320 if (other_mode->hdisplay > 2560)
321 tmp = 3; /* 1/4 */
322 else
323 tmp = 0; /* 1/2 */
324 } else
325 tmp = 0; /* 1/2 */
326 } else if (mode)
327 tmp = 2; /* whole */
328 else if (other_mode)
329 tmp = 3; /* 1/4 */
330
331 /* second controller of the pair uses second half of the lb */
332 if (radeon_crtc->crtc_id % 2)
333 tmp += 4;
334 WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
335
336 switch (tmp) {
337 case 0:
338 case 4:
339 default:
340 return 3840 * 2;
341 case 1:
342 case 5:
343 return 5760 * 2;
344 case 2:
345 case 6:
346 return 7680 * 2;
347 case 3:
348 case 7:
349 return 1920 * 2;
350 }
351}
352
353static u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
354{
355 u32 tmp = RREG32(MC_SHARED_CHMAP);
356
357 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
358 case 0:
359 default:
360 return 1;
361 case 1:
362 return 2;
363 case 2:
364 return 4;
365 case 3:
366 return 8;
367 }
368}
369
370struct evergreen_wm_params {
371 u32 dram_channels; /* number of dram channels */
372 u32 yclk; /* bandwidth per dram data pin in kHz */
373 u32 sclk; /* engine clock in kHz */
374 u32 disp_clk; /* display clock in kHz */
375 u32 src_width; /* viewport width */
376 u32 active_time; /* active display time in ns */
377 u32 blank_time; /* blank time in ns */
378 bool interlaced; /* mode is interlaced */
379 fixed20_12 vsc; /* vertical scale ratio */
380 u32 num_heads; /* number of active crtcs */
381 u32 bytes_per_pixel; /* bytes per pixel display + overlay */
382 u32 lb_size; /* line buffer allocated to pipe */
383 u32 vtaps; /* vertical scaler taps */
384};
385
386static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
387{
388 /* Calculate DRAM Bandwidth and the part allocated to display. */
389 fixed20_12 dram_efficiency; /* 0.7 */
390 fixed20_12 yclk, dram_channels, bandwidth;
391 fixed20_12 a;
392
393 a.full = dfixed_const(1000);
394 yclk.full = dfixed_const(wm->yclk);
395 yclk.full = dfixed_div(yclk, a);
396 dram_channels.full = dfixed_const(wm->dram_channels * 4);
397 a.full = dfixed_const(10);
398 dram_efficiency.full = dfixed_const(7);
399 dram_efficiency.full = dfixed_div(dram_efficiency, a);
400 bandwidth.full = dfixed_mul(dram_channels, yclk);
401 bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
402
403 return dfixed_trunc(bandwidth);
404}
405
406static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
407{
408 /* Calculate DRAM Bandwidth and the part allocated to display. */
409 fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
410 fixed20_12 yclk, dram_channels, bandwidth;
411 fixed20_12 a;
412
413 a.full = dfixed_const(1000);
414 yclk.full = dfixed_const(wm->yclk);
415 yclk.full = dfixed_div(yclk, a);
416 dram_channels.full = dfixed_const(wm->dram_channels * 4);
417 a.full = dfixed_const(10);
418 disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
419 disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
420 bandwidth.full = dfixed_mul(dram_channels, yclk);
421 bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
422
423 return dfixed_trunc(bandwidth);
424}
425
426static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
427{
428 /* Calculate the display Data return Bandwidth */
429 fixed20_12 return_efficiency; /* 0.8 */
430 fixed20_12 sclk, bandwidth;
431 fixed20_12 a;
432
433 a.full = dfixed_const(1000);
434 sclk.full = dfixed_const(wm->sclk);
435 sclk.full = dfixed_div(sclk, a);
436 a.full = dfixed_const(10);
437 return_efficiency.full = dfixed_const(8);
438 return_efficiency.full = dfixed_div(return_efficiency, a);
439 a.full = dfixed_const(32);
440 bandwidth.full = dfixed_mul(a, sclk);
441 bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
442
443 return dfixed_trunc(bandwidth);
444}
445
446static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
447{
448 /* Calculate the DMIF Request Bandwidth */
449 fixed20_12 disp_clk_request_efficiency; /* 0.8 */
450 fixed20_12 disp_clk, bandwidth;
451 fixed20_12 a;
452
453 a.full = dfixed_const(1000);
454 disp_clk.full = dfixed_const(wm->disp_clk);
455 disp_clk.full = dfixed_div(disp_clk, a);
456 a.full = dfixed_const(10);
457 disp_clk_request_efficiency.full = dfixed_const(8);
458 disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
459 a.full = dfixed_const(32);
460 bandwidth.full = dfixed_mul(a, disp_clk);
461 bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
462
463 return dfixed_trunc(bandwidth);
464}
465
466static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
467{
468 /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
469 u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
470 u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
471 u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
472
473 return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
474}
475
476static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
477{
478 /* Calculate the display mode Average Bandwidth
479 * DisplayMode should contain the source and destination dimensions,
480 * timing, etc.
481 */
482 fixed20_12 bpp;
483 fixed20_12 line_time;
484 fixed20_12 src_width;
485 fixed20_12 bandwidth;
486 fixed20_12 a;
487
488 a.full = dfixed_const(1000);
489 line_time.full = dfixed_const(wm->active_time + wm->blank_time);
490 line_time.full = dfixed_div(line_time, a);
491 bpp.full = dfixed_const(wm->bytes_per_pixel);
492 src_width.full = dfixed_const(wm->src_width);
493 bandwidth.full = dfixed_mul(src_width, bpp);
494 bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
495 bandwidth.full = dfixed_div(bandwidth, line_time);
496
497 return dfixed_trunc(bandwidth);
498}
499
500static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
501{
502 /* First calcualte the latency in ns */
503 u32 mc_latency = 2000; /* 2000 ns. */
504 u32 available_bandwidth = evergreen_available_bandwidth(wm);
505 u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
506 u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
507 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
508 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
509 (wm->num_heads * cursor_line_pair_return_time);
510 u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
511 u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
512 fixed20_12 a, b, c;
513
514 if (wm->num_heads == 0)
515 return 0;
516
517 a.full = dfixed_const(2);
518 b.full = dfixed_const(1);
519 if ((wm->vsc.full > a.full) ||
520 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
521 (wm->vtaps >= 5) ||
522 ((wm->vsc.full >= a.full) && wm->interlaced))
523 max_src_lines_per_dst_line = 4;
524 else
525 max_src_lines_per_dst_line = 2;
526
527 a.full = dfixed_const(available_bandwidth);
528 b.full = dfixed_const(wm->num_heads);
529 a.full = dfixed_div(a, b);
530
531 b.full = dfixed_const(1000);
532 c.full = dfixed_const(wm->disp_clk);
533 b.full = dfixed_div(c, b);
534 c.full = dfixed_const(wm->bytes_per_pixel);
535 b.full = dfixed_mul(b, c);
536
537 lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
538
539 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
540 b.full = dfixed_const(1000);
541 c.full = dfixed_const(lb_fill_bw);
542 b.full = dfixed_div(c, b);
543 a.full = dfixed_div(a, b);
544 line_fill_time = dfixed_trunc(a);
545
546 if (line_fill_time < wm->active_time)
547 return latency;
548 else
549 return latency + (line_fill_time - wm->active_time);
550
551}
552
553static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
554{
555 if (evergreen_average_bandwidth(wm) <=
556 (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
557 return true;
558 else
559 return false;
560};
561
562static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
563{
564 if (evergreen_average_bandwidth(wm) <=
565 (evergreen_available_bandwidth(wm) / wm->num_heads))
566 return true;
567 else
568 return false;
569};
570
571static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
572{
573 u32 lb_partitions = wm->lb_size / wm->src_width;
574 u32 line_time = wm->active_time + wm->blank_time;
575 u32 latency_tolerant_lines;
576 u32 latency_hiding;
577 fixed20_12 a;
578
579 a.full = dfixed_const(1);
580 if (wm->vsc.full > a.full)
581 latency_tolerant_lines = 1;
582 else {
583 if (lb_partitions <= (wm->vtaps + 1))
584 latency_tolerant_lines = 1;
585 else
586 latency_tolerant_lines = 2;
587 }
588
589 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
590
591 if (evergreen_latency_watermark(wm) <= latency_hiding)
592 return true;
593 else
594 return false;
595}
596
597static void evergreen_program_watermarks(struct radeon_device *rdev,
598 struct radeon_crtc *radeon_crtc,
599 u32 lb_size, u32 num_heads)
600{
601 struct drm_display_mode *mode = &radeon_crtc->base.mode;
602 struct evergreen_wm_params wm;
603 u32 pixel_period;
604 u32 line_time = 0;
605 u32 latency_watermark_a = 0, latency_watermark_b = 0;
606 u32 priority_a_mark = 0, priority_b_mark = 0;
607 u32 priority_a_cnt = PRIORITY_OFF;
608 u32 priority_b_cnt = PRIORITY_OFF;
609 u32 pipe_offset = radeon_crtc->crtc_id * 16;
610 u32 tmp, arb_control3;
611 fixed20_12 a, b, c;
612
613 if (radeon_crtc->base.enabled && num_heads && mode) {
614 pixel_period = 1000000 / (u32)mode->clock;
615 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
616 priority_a_cnt = 0;
617 priority_b_cnt = 0;
618
619 wm.yclk = rdev->pm.current_mclk * 10;
620 wm.sclk = rdev->pm.current_sclk * 10;
621 wm.disp_clk = mode->clock;
622 wm.src_width = mode->crtc_hdisplay;
623 wm.active_time = mode->crtc_hdisplay * pixel_period;
624 wm.blank_time = line_time - wm.active_time;
625 wm.interlaced = false;
626 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
627 wm.interlaced = true;
628 wm.vsc = radeon_crtc->vsc;
629 wm.vtaps = 1;
630 if (radeon_crtc->rmx_type != RMX_OFF)
631 wm.vtaps = 2;
632 wm.bytes_per_pixel = 4; /* XXX: get this from fb config */
633 wm.lb_size = lb_size;
634 wm.dram_channels = evergreen_get_number_of_dram_channels(rdev);
635 wm.num_heads = num_heads;
636
637 /* set for high clocks */
638 latency_watermark_a = min(evergreen_latency_watermark(&wm), (u32)65535);
639 /* set for low clocks */
640 /* wm.yclk = low clk; wm.sclk = low clk */
641 latency_watermark_b = min(evergreen_latency_watermark(&wm), (u32)65535);
642
643 /* possibly force display priority to high */
644 /* should really do this at mode validation time... */
645 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm) ||
646 !evergreen_average_bandwidth_vs_available_bandwidth(&wm) ||
647 !evergreen_check_latency_hiding(&wm) ||
648 (rdev->disp_priority == 2)) {
649 DRM_INFO("force priority to high\n");
650 priority_a_cnt |= PRIORITY_ALWAYS_ON;
651 priority_b_cnt |= PRIORITY_ALWAYS_ON;
652 }
653
654 a.full = dfixed_const(1000);
655 b.full = dfixed_const(mode->clock);
656 b.full = dfixed_div(b, a);
657 c.full = dfixed_const(latency_watermark_a);
658 c.full = dfixed_mul(c, b);
659 c.full = dfixed_mul(c, radeon_crtc->hsc);
660 c.full = dfixed_div(c, a);
661 a.full = dfixed_const(16);
662 c.full = dfixed_div(c, a);
663 priority_a_mark = dfixed_trunc(c);
664 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
665
666 a.full = dfixed_const(1000);
667 b.full = dfixed_const(mode->clock);
668 b.full = dfixed_div(b, a);
669 c.full = dfixed_const(latency_watermark_b);
670 c.full = dfixed_mul(c, b);
671 c.full = dfixed_mul(c, radeon_crtc->hsc);
672 c.full = dfixed_div(c, a);
673 a.full = dfixed_const(16);
674 c.full = dfixed_div(c, a);
675 priority_b_mark = dfixed_trunc(c);
676 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
677 }
678
679 /* select wm A */
680 arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
681 tmp = arb_control3;
682 tmp &= ~LATENCY_WATERMARK_MASK(3);
683 tmp |= LATENCY_WATERMARK_MASK(1);
684 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
685 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
686 (LATENCY_LOW_WATERMARK(latency_watermark_a) |
687 LATENCY_HIGH_WATERMARK(line_time)));
688 /* select wm B */
689 tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
690 tmp &= ~LATENCY_WATERMARK_MASK(3);
691 tmp |= LATENCY_WATERMARK_MASK(2);
692 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
693 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
694 (LATENCY_LOW_WATERMARK(latency_watermark_b) |
695 LATENCY_HIGH_WATERMARK(line_time)));
696 /* restore original selection */
697 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
698
699 /* write the priority marks */
700 WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
701 WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
702
703}
704
Alex Deucher0ca2ab52010-02-26 13:57:45 -0500705void evergreen_bandwidth_update(struct radeon_device *rdev)
706{
Alex Deucherf9d9c362010-10-22 02:51:05 -0400707 struct drm_display_mode *mode0 = NULL;
708 struct drm_display_mode *mode1 = NULL;
709 u32 num_heads = 0, lb_size;
710 int i;
711
712 radeon_update_display_priority(rdev);
713
714 for (i = 0; i < rdev->num_crtc; i++) {
715 if (rdev->mode_info.crtcs[i]->base.enabled)
716 num_heads++;
717 }
718 for (i = 0; i < rdev->num_crtc; i += 2) {
719 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
720 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
721 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
722 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
723 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
724 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
725 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500726}
727
728static int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
729{
730 unsigned i;
731 u32 tmp;
732
733 for (i = 0; i < rdev->usec_timeout; i++) {
734 /* read MC_STATUS */
735 tmp = RREG32(SRBM_STATUS) & 0x1F00;
736 if (!tmp)
737 return 0;
738 udelay(1);
739 }
740 return -1;
741}
742
743/*
744 * GART
745 */
Alex Deucher0fcdb612010-03-24 13:20:41 -0400746void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
747{
748 unsigned i;
749 u32 tmp;
750
751 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
752 for (i = 0; i < rdev->usec_timeout; i++) {
753 /* read MC_STATUS */
754 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
755 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
756 if (tmp == 2) {
757 printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
758 return;
759 }
760 if (tmp) {
761 return;
762 }
763 udelay(1);
764 }
765}
766
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500767int evergreen_pcie_gart_enable(struct radeon_device *rdev)
768{
769 u32 tmp;
Alex Deucher0fcdb612010-03-24 13:20:41 -0400770 int r;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500771
772 if (rdev->gart.table.vram.robj == NULL) {
773 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
774 return -EINVAL;
775 }
776 r = radeon_gart_table_vram_pin(rdev);
777 if (r)
778 return r;
Dave Airlie82568562010-02-05 16:00:07 +1000779 radeon_gart_restore(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500780 /* Setup L2 cache */
781 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
782 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
783 EFFECTIVE_L2_QUEUE_SIZE(7));
784 WREG32(VM_L2_CNTL2, 0);
785 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
786 /* Setup TLB control */
787 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
788 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
789 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
790 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
791 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
792 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
793 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
794 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
795 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
796 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
797 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
798 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
799 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
800 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
801 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
802 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
803 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
804 (u32)(rdev->dummy_page.addr >> 12));
Alex Deucher0fcdb612010-03-24 13:20:41 -0400805 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500806
Alex Deucher0fcdb612010-03-24 13:20:41 -0400807 evergreen_pcie_gart_tlb_flush(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500808 rdev->gart.ready = true;
809 return 0;
810}
811
812void evergreen_pcie_gart_disable(struct radeon_device *rdev)
813{
814 u32 tmp;
Alex Deucher0fcdb612010-03-24 13:20:41 -0400815 int r;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500816
817 /* Disable all tables */
Alex Deucher0fcdb612010-03-24 13:20:41 -0400818 WREG32(VM_CONTEXT0_CNTL, 0);
819 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500820
821 /* Setup L2 cache */
822 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
823 EFFECTIVE_L2_QUEUE_SIZE(7));
824 WREG32(VM_L2_CNTL2, 0);
825 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
826 /* Setup TLB control */
827 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
828 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
829 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
830 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
831 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
832 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
833 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
834 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
835 if (rdev->gart.table.vram.robj) {
836 r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
837 if (likely(r == 0)) {
838 radeon_bo_kunmap(rdev->gart.table.vram.robj);
839 radeon_bo_unpin(rdev->gart.table.vram.robj);
840 radeon_bo_unreserve(rdev->gart.table.vram.robj);
841 }
842 }
843}
844
845void evergreen_pcie_gart_fini(struct radeon_device *rdev)
846{
847 evergreen_pcie_gart_disable(rdev);
848 radeon_gart_table_vram_free(rdev);
849 radeon_gart_fini(rdev);
850}
851
852
853void evergreen_agp_enable(struct radeon_device *rdev)
854{
855 u32 tmp;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500856
857 /* Setup L2 cache */
858 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
859 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
860 EFFECTIVE_L2_QUEUE_SIZE(7));
861 WREG32(VM_L2_CNTL2, 0);
862 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
863 /* Setup TLB control */
864 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
865 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
866 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
867 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
868 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
869 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
870 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
871 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
872 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
873 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
874 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
Alex Deucher0fcdb612010-03-24 13:20:41 -0400875 WREG32(VM_CONTEXT0_CNTL, 0);
876 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500877}
878
879static void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
880{
881 save->vga_control[0] = RREG32(D1VGA_CONTROL);
882 save->vga_control[1] = RREG32(D2VGA_CONTROL);
883 save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL);
884 save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL);
885 save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL);
886 save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL);
887 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
888 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
889 save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
890 save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
Alex Deucher18007402010-11-22 17:56:28 -0500891 if (!(rdev->flags & RADEON_IS_IGP)) {
892 save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
893 save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
894 save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
895 save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
896 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500897
898 /* Stop all video */
899 WREG32(VGA_RENDER_CONTROL, 0);
900 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
901 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
Alex Deucher18007402010-11-22 17:56:28 -0500902 if (!(rdev->flags & RADEON_IS_IGP)) {
903 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
904 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
905 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
906 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
907 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500908 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
909 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
Alex Deucher18007402010-11-22 17:56:28 -0500910 if (!(rdev->flags & RADEON_IS_IGP)) {
911 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
912 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
913 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
914 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
915 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500916 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
917 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
Alex Deucher18007402010-11-22 17:56:28 -0500918 if (!(rdev->flags & RADEON_IS_IGP)) {
919 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
920 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
921 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
922 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
923 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500924
925 WREG32(D1VGA_CONTROL, 0);
926 WREG32(D2VGA_CONTROL, 0);
927 WREG32(EVERGREEN_D3VGA_CONTROL, 0);
928 WREG32(EVERGREEN_D4VGA_CONTROL, 0);
929 WREG32(EVERGREEN_D5VGA_CONTROL, 0);
930 WREG32(EVERGREEN_D6VGA_CONTROL, 0);
931}
932
933static void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
934{
935 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
936 upper_32_bits(rdev->mc.vram_start));
937 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
938 upper_32_bits(rdev->mc.vram_start));
939 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
940 (u32)rdev->mc.vram_start);
941 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
942 (u32)rdev->mc.vram_start);
943
944 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
945 upper_32_bits(rdev->mc.vram_start));
946 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
947 upper_32_bits(rdev->mc.vram_start));
948 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
949 (u32)rdev->mc.vram_start);
950 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
951 (u32)rdev->mc.vram_start);
952
Alex Deucher18007402010-11-22 17:56:28 -0500953 if (!(rdev->flags & RADEON_IS_IGP)) {
954 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
955 upper_32_bits(rdev->mc.vram_start));
956 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
957 upper_32_bits(rdev->mc.vram_start));
958 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
959 (u32)rdev->mc.vram_start);
960 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
961 (u32)rdev->mc.vram_start);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500962
Alex Deucher18007402010-11-22 17:56:28 -0500963 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
964 upper_32_bits(rdev->mc.vram_start));
965 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
966 upper_32_bits(rdev->mc.vram_start));
967 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
968 (u32)rdev->mc.vram_start);
969 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
970 (u32)rdev->mc.vram_start);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500971
Alex Deucher18007402010-11-22 17:56:28 -0500972 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
973 upper_32_bits(rdev->mc.vram_start));
974 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
975 upper_32_bits(rdev->mc.vram_start));
976 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
977 (u32)rdev->mc.vram_start);
978 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
979 (u32)rdev->mc.vram_start);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500980
Alex Deucher18007402010-11-22 17:56:28 -0500981 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
982 upper_32_bits(rdev->mc.vram_start));
983 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
984 upper_32_bits(rdev->mc.vram_start));
985 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
986 (u32)rdev->mc.vram_start);
987 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
988 (u32)rdev->mc.vram_start);
989 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500990
991 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
992 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
993 /* Unlock host access */
994 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
995 mdelay(1);
996 /* Restore video state */
997 WREG32(D1VGA_CONTROL, save->vga_control[0]);
998 WREG32(D2VGA_CONTROL, save->vga_control[1]);
999 WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]);
1000 WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]);
1001 WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]);
1002 WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]);
1003 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
1004 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
Alex Deucher18007402010-11-22 17:56:28 -05001005 if (!(rdev->flags & RADEON_IS_IGP)) {
1006 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
1007 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
1008 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
1009 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
1010 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001011 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]);
1012 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]);
Alex Deucher18007402010-11-22 17:56:28 -05001013 if (!(rdev->flags & RADEON_IS_IGP)) {
1014 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]);
1015 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]);
1016 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]);
1017 WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]);
1018 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001019 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
1020 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
Alex Deucher18007402010-11-22 17:56:28 -05001021 if (!(rdev->flags & RADEON_IS_IGP)) {
1022 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
1023 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
1024 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1025 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1026 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001027 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
1028}
1029
1030static void evergreen_mc_program(struct radeon_device *rdev)
1031{
1032 struct evergreen_mc_save save;
1033 u32 tmp;
1034 int i, j;
1035
1036 /* Initialize HDP */
1037 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
1038 WREG32((0x2c14 + j), 0x00000000);
1039 WREG32((0x2c18 + j), 0x00000000);
1040 WREG32((0x2c1c + j), 0x00000000);
1041 WREG32((0x2c20 + j), 0x00000000);
1042 WREG32((0x2c24 + j), 0x00000000);
1043 }
1044 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
1045
1046 evergreen_mc_stop(rdev, &save);
1047 if (evergreen_mc_wait_for_idle(rdev)) {
1048 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1049 }
1050 /* Lockout access through VGA aperture*/
1051 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
1052 /* Update configuration */
1053 if (rdev->flags & RADEON_IS_AGP) {
1054 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
1055 /* VRAM before AGP */
1056 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1057 rdev->mc.vram_start >> 12);
1058 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1059 rdev->mc.gtt_end >> 12);
1060 } else {
1061 /* VRAM after AGP */
1062 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1063 rdev->mc.gtt_start >> 12);
1064 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1065 rdev->mc.vram_end >> 12);
1066 }
1067 } else {
1068 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1069 rdev->mc.vram_start >> 12);
1070 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1071 rdev->mc.vram_end >> 12);
1072 }
1073 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0);
1074 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
1075 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
1076 WREG32(MC_VM_FB_LOCATION, tmp);
1077 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
1078 WREG32(HDP_NONSURFACE_INFO, (2 << 7));
Jerome Glisse46fcd2b2010-06-03 19:34:48 +02001079 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001080 if (rdev->flags & RADEON_IS_AGP) {
1081 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
1082 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
1083 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
1084 } else {
1085 WREG32(MC_VM_AGP_BASE, 0);
1086 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
1087 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
1088 }
1089 if (evergreen_mc_wait_for_idle(rdev)) {
1090 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1091 }
1092 evergreen_mc_resume(rdev, &save);
1093 /* we need to own VRAM, so turn off the VGA renderer here
1094 * to stop it overwriting our objects */
1095 rv515_vga_render_disable(rdev);
1096}
1097
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001098/*
1099 * CP.
1100 */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001101
1102static int evergreen_cp_load_microcode(struct radeon_device *rdev)
1103{
Alex Deucherfe251e22010-03-24 13:36:43 -04001104 const __be32 *fw_data;
1105 int i;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001106
Alex Deucherfe251e22010-03-24 13:36:43 -04001107 if (!rdev->me_fw || !rdev->pfp_fw)
1108 return -EINVAL;
1109
1110 r700_cp_stop(rdev);
1111 WREG32(CP_RB_CNTL, RB_NO_UPDATE | (15 << 8) | (3 << 0));
1112
1113 fw_data = (const __be32 *)rdev->pfp_fw->data;
1114 WREG32(CP_PFP_UCODE_ADDR, 0);
1115 for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
1116 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
1117 WREG32(CP_PFP_UCODE_ADDR, 0);
1118
1119 fw_data = (const __be32 *)rdev->me_fw->data;
1120 WREG32(CP_ME_RAM_WADDR, 0);
1121 for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
1122 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
1123
1124 WREG32(CP_PFP_UCODE_ADDR, 0);
1125 WREG32(CP_ME_RAM_WADDR, 0);
1126 WREG32(CP_ME_RAM_RADDR, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001127 return 0;
1128}
1129
Alex Deucher7e7b41d2010-09-02 21:32:32 -04001130static int evergreen_cp_start(struct radeon_device *rdev)
1131{
Alex Deucher2281a372010-10-21 13:31:38 -04001132 int r, i;
Alex Deucher7e7b41d2010-09-02 21:32:32 -04001133 uint32_t cp_me;
1134
1135 r = radeon_ring_lock(rdev, 7);
1136 if (r) {
1137 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
1138 return r;
1139 }
1140 radeon_ring_write(rdev, PACKET3(PACKET3_ME_INITIALIZE, 5));
1141 radeon_ring_write(rdev, 0x1);
1142 radeon_ring_write(rdev, 0x0);
1143 radeon_ring_write(rdev, rdev->config.evergreen.max_hw_contexts - 1);
1144 radeon_ring_write(rdev, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
1145 radeon_ring_write(rdev, 0);
1146 radeon_ring_write(rdev, 0);
1147 radeon_ring_unlock_commit(rdev);
1148
1149 cp_me = 0xff;
1150 WREG32(CP_ME_CNTL, cp_me);
1151
Alex Deucher2281a372010-10-21 13:31:38 -04001152 r = radeon_ring_lock(rdev, evergreen_default_size + 15);
Alex Deucher7e7b41d2010-09-02 21:32:32 -04001153 if (r) {
1154 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
1155 return r;
1156 }
Alex Deucher2281a372010-10-21 13:31:38 -04001157
1158 /* setup clear context state */
1159 radeon_ring_write(rdev, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
1160 radeon_ring_write(rdev, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
1161
1162 for (i = 0; i < evergreen_default_size; i++)
1163 radeon_ring_write(rdev, evergreen_default_state[i]);
1164
1165 radeon_ring_write(rdev, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
1166 radeon_ring_write(rdev, PACKET3_PREAMBLE_END_CLEAR_STATE);
1167
1168 /* set clear context state */
1169 radeon_ring_write(rdev, PACKET3(PACKET3_CLEAR_STATE, 0));
1170 radeon_ring_write(rdev, 0);
1171
1172 /* SQ_VTX_BASE_VTX_LOC */
1173 radeon_ring_write(rdev, 0xc0026f00);
1174 radeon_ring_write(rdev, 0x00000000);
1175 radeon_ring_write(rdev, 0x00000000);
1176 radeon_ring_write(rdev, 0x00000000);
1177
1178 /* Clear consts */
1179 radeon_ring_write(rdev, 0xc0036f00);
1180 radeon_ring_write(rdev, 0x00000bc4);
1181 radeon_ring_write(rdev, 0xffffffff);
1182 radeon_ring_write(rdev, 0xffffffff);
1183 radeon_ring_write(rdev, 0xffffffff);
1184
Alex Deucher7e7b41d2010-09-02 21:32:32 -04001185 radeon_ring_unlock_commit(rdev);
1186
1187 return 0;
1188}
1189
Alex Deucherfe251e22010-03-24 13:36:43 -04001190int evergreen_cp_resume(struct radeon_device *rdev)
1191{
1192 u32 tmp;
1193 u32 rb_bufsz;
1194 int r;
1195
1196 /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
1197 WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
1198 SOFT_RESET_PA |
1199 SOFT_RESET_SH |
1200 SOFT_RESET_VGT |
1201 SOFT_RESET_SX));
1202 RREG32(GRBM_SOFT_RESET);
1203 mdelay(15);
1204 WREG32(GRBM_SOFT_RESET, 0);
1205 RREG32(GRBM_SOFT_RESET);
1206
1207 /* Set ring buffer size */
1208 rb_bufsz = drm_order(rdev->cp.ring_size / 8);
Alex Deucher724c80e2010-08-27 18:25:25 -04001209 tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
Alex Deucherfe251e22010-03-24 13:36:43 -04001210#ifdef __BIG_ENDIAN
1211 tmp |= BUF_SWAP_32BIT;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001212#endif
Alex Deucherfe251e22010-03-24 13:36:43 -04001213 WREG32(CP_RB_CNTL, tmp);
1214 WREG32(CP_SEM_WAIT_TIMER, 0x4);
1215
1216 /* Set the write pointer delay */
1217 WREG32(CP_RB_WPTR_DELAY, 0);
1218
1219 /* Initialize the ring buffer's read and write pointers */
1220 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
1221 WREG32(CP_RB_RPTR_WR, 0);
1222 WREG32(CP_RB_WPTR, 0);
Alex Deucher724c80e2010-08-27 18:25:25 -04001223
1224 /* set the wb address wether it's enabled or not */
1225 WREG32(CP_RB_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC);
1226 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
1227 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
1228
1229 if (rdev->wb.enabled)
1230 WREG32(SCRATCH_UMSK, 0xff);
1231 else {
1232 tmp |= RB_NO_UPDATE;
1233 WREG32(SCRATCH_UMSK, 0);
1234 }
1235
Alex Deucherfe251e22010-03-24 13:36:43 -04001236 mdelay(1);
1237 WREG32(CP_RB_CNTL, tmp);
1238
1239 WREG32(CP_RB_BASE, rdev->cp.gpu_addr >> 8);
1240 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
1241
1242 rdev->cp.rptr = RREG32(CP_RB_RPTR);
1243 rdev->cp.wptr = RREG32(CP_RB_WPTR);
1244
Alex Deucher7e7b41d2010-09-02 21:32:32 -04001245 evergreen_cp_start(rdev);
Alex Deucherfe251e22010-03-24 13:36:43 -04001246 rdev->cp.ready = true;
1247 r = radeon_ring_test(rdev);
1248 if (r) {
1249 rdev->cp.ready = false;
1250 return r;
1251 }
1252 return 0;
1253}
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001254
1255/*
1256 * Core functions
1257 */
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001258static u32 evergreen_get_tile_pipe_to_backend_map(struct radeon_device *rdev,
1259 u32 num_tile_pipes,
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001260 u32 num_backends,
1261 u32 backend_disable_mask)
1262{
1263 u32 backend_map = 0;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001264 u32 enabled_backends_mask = 0;
1265 u32 enabled_backends_count = 0;
1266 u32 cur_pipe;
1267 u32 swizzle_pipe[EVERGREEN_MAX_PIPES];
1268 u32 cur_backend = 0;
1269 u32 i;
1270 bool force_no_swizzle;
1271
1272 if (num_tile_pipes > EVERGREEN_MAX_PIPES)
1273 num_tile_pipes = EVERGREEN_MAX_PIPES;
1274 if (num_tile_pipes < 1)
1275 num_tile_pipes = 1;
1276 if (num_backends > EVERGREEN_MAX_BACKENDS)
1277 num_backends = EVERGREEN_MAX_BACKENDS;
1278 if (num_backends < 1)
1279 num_backends = 1;
1280
1281 for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
1282 if (((backend_disable_mask >> i) & 1) == 0) {
1283 enabled_backends_mask |= (1 << i);
1284 ++enabled_backends_count;
1285 }
1286 if (enabled_backends_count == num_backends)
1287 break;
1288 }
1289
1290 if (enabled_backends_count == 0) {
1291 enabled_backends_mask = 1;
1292 enabled_backends_count = 1;
1293 }
1294
1295 if (enabled_backends_count != num_backends)
1296 num_backends = enabled_backends_count;
1297
1298 memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * EVERGREEN_MAX_PIPES);
1299 switch (rdev->family) {
1300 case CHIP_CEDAR:
1301 case CHIP_REDWOOD:
1302 force_no_swizzle = false;
1303 break;
1304 case CHIP_CYPRESS:
1305 case CHIP_HEMLOCK:
1306 case CHIP_JUNIPER:
1307 default:
1308 force_no_swizzle = true;
1309 break;
1310 }
1311 if (force_no_swizzle) {
1312 bool last_backend_enabled = false;
1313
1314 force_no_swizzle = false;
1315 for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
1316 if (((enabled_backends_mask >> i) & 1) == 1) {
1317 if (last_backend_enabled)
1318 force_no_swizzle = true;
1319 last_backend_enabled = true;
1320 } else
1321 last_backend_enabled = false;
1322 }
1323 }
1324
1325 switch (num_tile_pipes) {
1326 case 1:
1327 case 3:
1328 case 5:
1329 case 7:
1330 DRM_ERROR("odd number of pipes!\n");
1331 break;
1332 case 2:
1333 swizzle_pipe[0] = 0;
1334 swizzle_pipe[1] = 1;
1335 break;
1336 case 4:
1337 if (force_no_swizzle) {
1338 swizzle_pipe[0] = 0;
1339 swizzle_pipe[1] = 1;
1340 swizzle_pipe[2] = 2;
1341 swizzle_pipe[3] = 3;
1342 } else {
1343 swizzle_pipe[0] = 0;
1344 swizzle_pipe[1] = 2;
1345 swizzle_pipe[2] = 1;
1346 swizzle_pipe[3] = 3;
1347 }
1348 break;
1349 case 6:
1350 if (force_no_swizzle) {
1351 swizzle_pipe[0] = 0;
1352 swizzle_pipe[1] = 1;
1353 swizzle_pipe[2] = 2;
1354 swizzle_pipe[3] = 3;
1355 swizzle_pipe[4] = 4;
1356 swizzle_pipe[5] = 5;
1357 } else {
1358 swizzle_pipe[0] = 0;
1359 swizzle_pipe[1] = 2;
1360 swizzle_pipe[2] = 4;
1361 swizzle_pipe[3] = 1;
1362 swizzle_pipe[4] = 3;
1363 swizzle_pipe[5] = 5;
1364 }
1365 break;
1366 case 8:
1367 if (force_no_swizzle) {
1368 swizzle_pipe[0] = 0;
1369 swizzle_pipe[1] = 1;
1370 swizzle_pipe[2] = 2;
1371 swizzle_pipe[3] = 3;
1372 swizzle_pipe[4] = 4;
1373 swizzle_pipe[5] = 5;
1374 swizzle_pipe[6] = 6;
1375 swizzle_pipe[7] = 7;
1376 } else {
1377 swizzle_pipe[0] = 0;
1378 swizzle_pipe[1] = 2;
1379 swizzle_pipe[2] = 4;
1380 swizzle_pipe[3] = 6;
1381 swizzle_pipe[4] = 1;
1382 swizzle_pipe[5] = 3;
1383 swizzle_pipe[6] = 5;
1384 swizzle_pipe[7] = 7;
1385 }
1386 break;
1387 }
1388
1389 for (cur_pipe = 0; cur_pipe < num_tile_pipes; ++cur_pipe) {
1390 while (((1 << cur_backend) & enabled_backends_mask) == 0)
1391 cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
1392
1393 backend_map |= (((cur_backend & 0xf) << (swizzle_pipe[cur_pipe] * 4)));
1394
1395 cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
1396 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001397
1398 return backend_map;
1399}
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001400
Alex Deucher9535ab72010-11-22 17:56:18 -05001401static void evergreen_program_channel_remap(struct radeon_device *rdev)
1402{
1403 u32 tcp_chan_steer_lo, tcp_chan_steer_hi, mc_shared_chremap, tmp;
1404
1405 tmp = RREG32(MC_SHARED_CHMAP);
1406 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1407 case 0:
1408 case 1:
1409 case 2:
1410 case 3:
1411 default:
1412 /* default mapping */
1413 mc_shared_chremap = 0x00fac688;
1414 break;
1415 }
1416
1417 switch (rdev->family) {
1418 case CHIP_HEMLOCK:
1419 case CHIP_CYPRESS:
1420 tcp_chan_steer_lo = 0x54763210;
1421 tcp_chan_steer_hi = 0x0000ba98;
1422 break;
1423 case CHIP_JUNIPER:
1424 case CHIP_REDWOOD:
1425 case CHIP_CEDAR:
1426 default:
1427 tcp_chan_steer_lo = 0x76543210;
1428 tcp_chan_steer_hi = 0x0000ba98;
1429 break;
1430 }
1431
1432 WREG32(TCP_CHAN_STEER_LO, tcp_chan_steer_lo);
1433 WREG32(TCP_CHAN_STEER_HI, tcp_chan_steer_hi);
1434 WREG32(MC_SHARED_CHREMAP, mc_shared_chremap);
1435}
1436
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001437static void evergreen_gpu_init(struct radeon_device *rdev)
1438{
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001439 u32 cc_rb_backend_disable = 0;
1440 u32 cc_gc_shader_pipe_config;
1441 u32 gb_addr_config = 0;
1442 u32 mc_shared_chmap, mc_arb_ramcfg;
1443 u32 gb_backend_map;
1444 u32 grbm_gfx_index;
1445 u32 sx_debug_1;
1446 u32 smx_dc_ctl0;
1447 u32 sq_config;
1448 u32 sq_lds_resource_mgmt;
1449 u32 sq_gpr_resource_mgmt_1;
1450 u32 sq_gpr_resource_mgmt_2;
1451 u32 sq_gpr_resource_mgmt_3;
1452 u32 sq_thread_resource_mgmt;
1453 u32 sq_thread_resource_mgmt_2;
1454 u32 sq_stack_resource_mgmt_1;
1455 u32 sq_stack_resource_mgmt_2;
1456 u32 sq_stack_resource_mgmt_3;
1457 u32 vgt_cache_invalidation;
1458 u32 hdp_host_path_cntl;
1459 int i, j, num_shader_engines, ps_thread_count;
1460
1461 switch (rdev->family) {
1462 case CHIP_CYPRESS:
1463 case CHIP_HEMLOCK:
1464 rdev->config.evergreen.num_ses = 2;
1465 rdev->config.evergreen.max_pipes = 4;
1466 rdev->config.evergreen.max_tile_pipes = 8;
1467 rdev->config.evergreen.max_simds = 10;
1468 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
1469 rdev->config.evergreen.max_gprs = 256;
1470 rdev->config.evergreen.max_threads = 248;
1471 rdev->config.evergreen.max_gs_threads = 32;
1472 rdev->config.evergreen.max_stack_entries = 512;
1473 rdev->config.evergreen.sx_num_of_sets = 4;
1474 rdev->config.evergreen.sx_max_export_size = 256;
1475 rdev->config.evergreen.sx_max_export_pos_size = 64;
1476 rdev->config.evergreen.sx_max_export_smx_size = 192;
1477 rdev->config.evergreen.max_hw_contexts = 8;
1478 rdev->config.evergreen.sq_num_cf_insts = 2;
1479
1480 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1481 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1482 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1483 break;
1484 case CHIP_JUNIPER:
1485 rdev->config.evergreen.num_ses = 1;
1486 rdev->config.evergreen.max_pipes = 4;
1487 rdev->config.evergreen.max_tile_pipes = 4;
1488 rdev->config.evergreen.max_simds = 10;
1489 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
1490 rdev->config.evergreen.max_gprs = 256;
1491 rdev->config.evergreen.max_threads = 248;
1492 rdev->config.evergreen.max_gs_threads = 32;
1493 rdev->config.evergreen.max_stack_entries = 512;
1494 rdev->config.evergreen.sx_num_of_sets = 4;
1495 rdev->config.evergreen.sx_max_export_size = 256;
1496 rdev->config.evergreen.sx_max_export_pos_size = 64;
1497 rdev->config.evergreen.sx_max_export_smx_size = 192;
1498 rdev->config.evergreen.max_hw_contexts = 8;
1499 rdev->config.evergreen.sq_num_cf_insts = 2;
1500
1501 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1502 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1503 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1504 break;
1505 case CHIP_REDWOOD:
1506 rdev->config.evergreen.num_ses = 1;
1507 rdev->config.evergreen.max_pipes = 4;
1508 rdev->config.evergreen.max_tile_pipes = 4;
1509 rdev->config.evergreen.max_simds = 5;
1510 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
1511 rdev->config.evergreen.max_gprs = 256;
1512 rdev->config.evergreen.max_threads = 248;
1513 rdev->config.evergreen.max_gs_threads = 32;
1514 rdev->config.evergreen.max_stack_entries = 256;
1515 rdev->config.evergreen.sx_num_of_sets = 4;
1516 rdev->config.evergreen.sx_max_export_size = 256;
1517 rdev->config.evergreen.sx_max_export_pos_size = 64;
1518 rdev->config.evergreen.sx_max_export_smx_size = 192;
1519 rdev->config.evergreen.max_hw_contexts = 8;
1520 rdev->config.evergreen.sq_num_cf_insts = 2;
1521
1522 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1523 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1524 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1525 break;
1526 case CHIP_CEDAR:
1527 default:
1528 rdev->config.evergreen.num_ses = 1;
1529 rdev->config.evergreen.max_pipes = 2;
1530 rdev->config.evergreen.max_tile_pipes = 2;
1531 rdev->config.evergreen.max_simds = 2;
1532 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
1533 rdev->config.evergreen.max_gprs = 256;
1534 rdev->config.evergreen.max_threads = 192;
1535 rdev->config.evergreen.max_gs_threads = 16;
1536 rdev->config.evergreen.max_stack_entries = 256;
1537 rdev->config.evergreen.sx_num_of_sets = 4;
1538 rdev->config.evergreen.sx_max_export_size = 128;
1539 rdev->config.evergreen.sx_max_export_pos_size = 32;
1540 rdev->config.evergreen.sx_max_export_smx_size = 96;
1541 rdev->config.evergreen.max_hw_contexts = 4;
1542 rdev->config.evergreen.sq_num_cf_insts = 1;
1543
1544 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1545 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1546 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
1547 break;
1548 }
1549
1550 /* Initialize HDP */
1551 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
1552 WREG32((0x2c14 + j), 0x00000000);
1553 WREG32((0x2c18 + j), 0x00000000);
1554 WREG32((0x2c1c + j), 0x00000000);
1555 WREG32((0x2c20 + j), 0x00000000);
1556 WREG32((0x2c24 + j), 0x00000000);
1557 }
1558
1559 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
1560
1561 cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & ~2;
1562
1563 cc_gc_shader_pipe_config |=
1564 INACTIVE_QD_PIPES((EVERGREEN_MAX_PIPES_MASK << rdev->config.evergreen.max_pipes)
1565 & EVERGREEN_MAX_PIPES_MASK);
1566 cc_gc_shader_pipe_config |=
1567 INACTIVE_SIMDS((EVERGREEN_MAX_SIMDS_MASK << rdev->config.evergreen.max_simds)
1568 & EVERGREEN_MAX_SIMDS_MASK);
1569
1570 cc_rb_backend_disable =
1571 BACKEND_DISABLE((EVERGREEN_MAX_BACKENDS_MASK << rdev->config.evergreen.max_backends)
1572 & EVERGREEN_MAX_BACKENDS_MASK);
1573
1574
1575 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
1576 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
1577
1578 switch (rdev->config.evergreen.max_tile_pipes) {
1579 case 1:
1580 default:
1581 gb_addr_config |= NUM_PIPES(0);
1582 break;
1583 case 2:
1584 gb_addr_config |= NUM_PIPES(1);
1585 break;
1586 case 4:
1587 gb_addr_config |= NUM_PIPES(2);
1588 break;
1589 case 8:
1590 gb_addr_config |= NUM_PIPES(3);
1591 break;
1592 }
1593
1594 gb_addr_config |= PIPE_INTERLEAVE_SIZE((mc_arb_ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT);
1595 gb_addr_config |= BANK_INTERLEAVE_SIZE(0);
1596 gb_addr_config |= NUM_SHADER_ENGINES(rdev->config.evergreen.num_ses - 1);
1597 gb_addr_config |= SHADER_ENGINE_TILE_SIZE(1);
1598 gb_addr_config |= NUM_GPUS(0); /* Hemlock? */
1599 gb_addr_config |= MULTI_GPU_TILE_SIZE(2);
1600
1601 if (((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT) > 2)
1602 gb_addr_config |= ROW_SIZE(2);
1603 else
1604 gb_addr_config |= ROW_SIZE((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT);
1605
1606 if (rdev->ddev->pdev->device == 0x689e) {
1607 u32 efuse_straps_4;
1608 u32 efuse_straps_3;
1609 u8 efuse_box_bit_131_124;
1610
1611 WREG32(RCU_IND_INDEX, 0x204);
1612 efuse_straps_4 = RREG32(RCU_IND_DATA);
1613 WREG32(RCU_IND_INDEX, 0x203);
1614 efuse_straps_3 = RREG32(RCU_IND_DATA);
1615 efuse_box_bit_131_124 = (u8)(((efuse_straps_4 & 0xf) << 4) | ((efuse_straps_3 & 0xf0000000) >> 28));
1616
1617 switch(efuse_box_bit_131_124) {
1618 case 0x00:
1619 gb_backend_map = 0x76543210;
1620 break;
1621 case 0x55:
1622 gb_backend_map = 0x77553311;
1623 break;
1624 case 0x56:
1625 gb_backend_map = 0x77553300;
1626 break;
1627 case 0x59:
1628 gb_backend_map = 0x77552211;
1629 break;
1630 case 0x66:
1631 gb_backend_map = 0x77443300;
1632 break;
1633 case 0x99:
1634 gb_backend_map = 0x66552211;
1635 break;
1636 case 0x5a:
1637 gb_backend_map = 0x77552200;
1638 break;
1639 case 0xaa:
1640 gb_backend_map = 0x66442200;
1641 break;
1642 case 0x95:
1643 gb_backend_map = 0x66553311;
1644 break;
1645 default:
1646 DRM_ERROR("bad backend map, using default\n");
1647 gb_backend_map =
1648 evergreen_get_tile_pipe_to_backend_map(rdev,
1649 rdev->config.evergreen.max_tile_pipes,
1650 rdev->config.evergreen.max_backends,
1651 ((EVERGREEN_MAX_BACKENDS_MASK <<
1652 rdev->config.evergreen.max_backends) &
1653 EVERGREEN_MAX_BACKENDS_MASK));
1654 break;
1655 }
1656 } else if (rdev->ddev->pdev->device == 0x68b9) {
1657 u32 efuse_straps_3;
1658 u8 efuse_box_bit_127_124;
1659
1660 WREG32(RCU_IND_INDEX, 0x203);
1661 efuse_straps_3 = RREG32(RCU_IND_DATA);
Alex Deucherd31dba52010-10-11 12:41:32 -04001662 efuse_box_bit_127_124 = (u8)((efuse_straps_3 & 0xF0000000) >> 28);
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001663
1664 switch(efuse_box_bit_127_124) {
1665 case 0x0:
1666 gb_backend_map = 0x00003210;
1667 break;
1668 case 0x5:
1669 case 0x6:
1670 case 0x9:
1671 case 0xa:
1672 gb_backend_map = 0x00003311;
1673 break;
1674 default:
1675 DRM_ERROR("bad backend map, using default\n");
1676 gb_backend_map =
1677 evergreen_get_tile_pipe_to_backend_map(rdev,
1678 rdev->config.evergreen.max_tile_pipes,
1679 rdev->config.evergreen.max_backends,
1680 ((EVERGREEN_MAX_BACKENDS_MASK <<
1681 rdev->config.evergreen.max_backends) &
1682 EVERGREEN_MAX_BACKENDS_MASK));
1683 break;
1684 }
Alex Deucherb741be82010-09-09 19:15:23 -04001685 } else {
1686 switch (rdev->family) {
1687 case CHIP_CYPRESS:
1688 case CHIP_HEMLOCK:
1689 gb_backend_map = 0x66442200;
1690 break;
1691 case CHIP_JUNIPER:
1692 gb_backend_map = 0x00006420;
1693 break;
1694 default:
1695 gb_backend_map =
1696 evergreen_get_tile_pipe_to_backend_map(rdev,
1697 rdev->config.evergreen.max_tile_pipes,
1698 rdev->config.evergreen.max_backends,
1699 ((EVERGREEN_MAX_BACKENDS_MASK <<
1700 rdev->config.evergreen.max_backends) &
1701 EVERGREEN_MAX_BACKENDS_MASK));
1702 }
1703 }
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001704
Alex Deucher1aa52bd2010-11-17 12:11:03 -05001705 /* setup tiling info dword. gb_addr_config is not adequate since it does
1706 * not have bank info, so create a custom tiling dword.
1707 * bits 3:0 num_pipes
1708 * bits 7:4 num_banks
1709 * bits 11:8 group_size
1710 * bits 15:12 row_size
1711 */
1712 rdev->config.evergreen.tile_config = 0;
1713 switch (rdev->config.evergreen.max_tile_pipes) {
1714 case 1:
1715 default:
1716 rdev->config.evergreen.tile_config |= (0 << 0);
1717 break;
1718 case 2:
1719 rdev->config.evergreen.tile_config |= (1 << 0);
1720 break;
1721 case 4:
1722 rdev->config.evergreen.tile_config |= (2 << 0);
1723 break;
1724 case 8:
1725 rdev->config.evergreen.tile_config |= (3 << 0);
1726 break;
1727 }
1728 rdev->config.evergreen.tile_config |=
1729 ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) << 4;
1730 rdev->config.evergreen.tile_config |=
1731 ((mc_arb_ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT) << 8;
1732 rdev->config.evergreen.tile_config |=
1733 ((gb_addr_config & 0x30000000) >> 28) << 12;
1734
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001735 WREG32(GB_BACKEND_MAP, gb_backend_map);
1736 WREG32(GB_ADDR_CONFIG, gb_addr_config);
1737 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
1738 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
1739
Alex Deucher9535ab72010-11-22 17:56:18 -05001740 evergreen_program_channel_remap(rdev);
1741
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001742 num_shader_engines = ((RREG32(GB_ADDR_CONFIG) & NUM_SHADER_ENGINES(3)) >> 12) + 1;
1743 grbm_gfx_index = INSTANCE_BROADCAST_WRITES;
1744
1745 for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
1746 u32 rb = cc_rb_backend_disable | (0xf0 << 16);
1747 u32 sp = cc_gc_shader_pipe_config;
1748 u32 gfx = grbm_gfx_index | SE_INDEX(i);
1749
1750 if (i == num_shader_engines) {
1751 rb |= BACKEND_DISABLE(EVERGREEN_MAX_BACKENDS_MASK);
1752 sp |= INACTIVE_SIMDS(EVERGREEN_MAX_SIMDS_MASK);
1753 }
1754
1755 WREG32(GRBM_GFX_INDEX, gfx);
1756 WREG32(RLC_GFX_INDEX, gfx);
1757
1758 WREG32(CC_RB_BACKEND_DISABLE, rb);
1759 WREG32(CC_SYS_RB_BACKEND_DISABLE, rb);
1760 WREG32(GC_USER_RB_BACKEND_DISABLE, rb);
1761 WREG32(CC_GC_SHADER_PIPE_CONFIG, sp);
1762 }
1763
1764 grbm_gfx_index |= SE_BROADCAST_WRITES;
1765 WREG32(GRBM_GFX_INDEX, grbm_gfx_index);
1766 WREG32(RLC_GFX_INDEX, grbm_gfx_index);
1767
1768 WREG32(CGTS_SYS_TCC_DISABLE, 0);
1769 WREG32(CGTS_TCC_DISABLE, 0);
1770 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
1771 WREG32(CGTS_USER_TCC_DISABLE, 0);
1772
1773 /* set HW defaults for 3D engine */
1774 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
1775 ROQ_IB2_START(0x2b)));
1776
1777 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
1778
1779 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
1780 SYNC_GRADIENT |
1781 SYNC_WALKER |
1782 SYNC_ALIGNER));
1783
1784 sx_debug_1 = RREG32(SX_DEBUG_1);
1785 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
1786 WREG32(SX_DEBUG_1, sx_debug_1);
1787
1788
1789 smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
1790 smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
1791 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
1792 WREG32(SMX_DC_CTL0, smx_dc_ctl0);
1793
1794 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
1795 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
1796 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
1797
1798 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
1799 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
1800 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
1801
1802 WREG32(VGT_NUM_INSTANCES, 1);
1803 WREG32(SPI_CONFIG_CNTL, 0);
1804 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
1805 WREG32(CP_PERFMON_CNTL, 0);
1806
1807 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
1808 FETCH_FIFO_HIWATER(0x4) |
1809 DONE_FIFO_HIWATER(0xe0) |
1810 ALU_UPDATE_FIFO_HIWATER(0x8)));
1811
1812 sq_config = RREG32(SQ_CONFIG);
1813 sq_config &= ~(PS_PRIO(3) |
1814 VS_PRIO(3) |
1815 GS_PRIO(3) |
1816 ES_PRIO(3));
1817 sq_config |= (VC_ENABLE |
1818 EXPORT_SRC_C |
1819 PS_PRIO(0) |
1820 VS_PRIO(1) |
1821 GS_PRIO(2) |
1822 ES_PRIO(3));
1823
1824 if (rdev->family == CHIP_CEDAR)
1825 /* no vertex cache */
1826 sq_config &= ~VC_ENABLE;
1827
1828 sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
1829
1830 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
1831 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
1832 sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
1833 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
1834 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
1835 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
1836 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
1837
1838 if (rdev->family == CHIP_CEDAR)
1839 ps_thread_count = 96;
1840 else
1841 ps_thread_count = 128;
1842
1843 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
Alex Deucherf96b35c2010-06-16 12:24:07 -04001844 sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1845 sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1846 sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1847 sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
1848 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001849
1850 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1851 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1852 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1853 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1854 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1855 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
1856
1857 WREG32(SQ_CONFIG, sq_config);
1858 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
1859 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
1860 WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
1861 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
1862 WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
1863 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
1864 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
1865 WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
1866 WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
1867 WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
1868
1869 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
1870 FORCE_EOV_MAX_REZ_CNT(255)));
1871
1872 if (rdev->family == CHIP_CEDAR)
1873 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
1874 else
1875 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
1876 vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
1877 WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
1878
1879 WREG32(VGT_GS_VERTEX_REUSE, 16);
1880 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
1881
Alex Deucher60a4a3e2010-06-29 17:03:35 -04001882 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
1883 WREG32(VGT_OUT_DEALLOC_CNTL, 16);
1884
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001885 WREG32(CB_PERF_CTR0_SEL_0, 0);
1886 WREG32(CB_PERF_CTR0_SEL_1, 0);
1887 WREG32(CB_PERF_CTR1_SEL_0, 0);
1888 WREG32(CB_PERF_CTR1_SEL_1, 0);
1889 WREG32(CB_PERF_CTR2_SEL_0, 0);
1890 WREG32(CB_PERF_CTR2_SEL_1, 0);
1891 WREG32(CB_PERF_CTR3_SEL_0, 0);
1892 WREG32(CB_PERF_CTR3_SEL_1, 0);
1893
Alex Deucher60a4a3e2010-06-29 17:03:35 -04001894 /* clear render buffer base addresses */
1895 WREG32(CB_COLOR0_BASE, 0);
1896 WREG32(CB_COLOR1_BASE, 0);
1897 WREG32(CB_COLOR2_BASE, 0);
1898 WREG32(CB_COLOR3_BASE, 0);
1899 WREG32(CB_COLOR4_BASE, 0);
1900 WREG32(CB_COLOR5_BASE, 0);
1901 WREG32(CB_COLOR6_BASE, 0);
1902 WREG32(CB_COLOR7_BASE, 0);
1903 WREG32(CB_COLOR8_BASE, 0);
1904 WREG32(CB_COLOR9_BASE, 0);
1905 WREG32(CB_COLOR10_BASE, 0);
1906 WREG32(CB_COLOR11_BASE, 0);
1907
1908 /* set the shader const cache sizes to 0 */
1909 for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
1910 WREG32(i, 0);
1911 for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
1912 WREG32(i, 0);
1913
Alex Deucher32fcdbf2010-03-24 13:33:47 -04001914 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
1915 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
1916
1917 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
1918
1919 udelay(50);
1920
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001921}
1922
1923int evergreen_mc_init(struct radeon_device *rdev)
1924{
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001925 u32 tmp;
1926 int chansize, numchan;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001927
1928 /* Get VRAM informations */
1929 rdev->mc.vram_is_ddr = true;
1930 tmp = RREG32(MC_ARB_RAMCFG);
1931 if (tmp & CHANSIZE_OVERRIDE) {
1932 chansize = 16;
1933 } else if (tmp & CHANSIZE_MASK) {
1934 chansize = 64;
1935 } else {
1936 chansize = 32;
1937 }
1938 tmp = RREG32(MC_SHARED_CHMAP);
1939 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1940 case 0:
1941 default:
1942 numchan = 1;
1943 break;
1944 case 1:
1945 numchan = 2;
1946 break;
1947 case 2:
1948 numchan = 4;
1949 break;
1950 case 3:
1951 numchan = 8;
1952 break;
1953 }
1954 rdev->mc.vram_width = numchan * chansize;
1955 /* Could aper size report 0 ? */
Jordan Crouse01d73a62010-05-27 13:40:24 -06001956 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
1957 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001958 /* Setup GPU memory space */
Alex Deucher6eb18f82010-11-22 17:56:27 -05001959 if (rdev->flags & RADEON_IS_IGP) {
1960 /* size in bytes on fusion */
1961 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
1962 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
1963 } else {
1964 /* size in MB on evergreen */
1965 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
1966 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
1967 }
Jerome Glisse51e5fcd2010-02-19 14:33:54 +00001968 rdev->mc.visible_vram_size = rdev->mc.aper_size;
Jerome Glissec919b372010-08-10 17:41:31 -04001969 rdev->mc.active_vram_size = rdev->mc.visible_vram_size;
Alex Deucher0ef0c1f2010-11-22 17:56:26 -05001970 r700_vram_gtt_location(rdev, &rdev->mc);
Alex Deucherf47299c2010-03-16 20:54:38 -04001971 radeon_update_bandwidth_info(rdev);
1972
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001973 return 0;
1974}
Jerome Glissed594e462010-02-17 21:54:29 +00001975
Jerome Glisse225758d2010-03-09 14:45:10 +00001976bool evergreen_gpu_is_lockup(struct radeon_device *rdev)
1977{
1978 /* FIXME: implement for evergreen */
1979 return false;
1980}
1981
Alex Deucher747943e2010-03-24 13:26:36 -04001982static int evergreen_gpu_soft_reset(struct radeon_device *rdev)
1983{
1984 struct evergreen_mc_save save;
1985 u32 srbm_reset = 0;
1986 u32 grbm_reset = 0;
1987
1988 dev_info(rdev->dev, "GPU softreset \n");
1989 dev_info(rdev->dev, " GRBM_STATUS=0x%08X\n",
1990 RREG32(GRBM_STATUS));
1991 dev_info(rdev->dev, " GRBM_STATUS_SE0=0x%08X\n",
1992 RREG32(GRBM_STATUS_SE0));
1993 dev_info(rdev->dev, " GRBM_STATUS_SE1=0x%08X\n",
1994 RREG32(GRBM_STATUS_SE1));
1995 dev_info(rdev->dev, " SRBM_STATUS=0x%08X\n",
1996 RREG32(SRBM_STATUS));
1997 evergreen_mc_stop(rdev, &save);
1998 if (evergreen_mc_wait_for_idle(rdev)) {
1999 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2000 }
2001 /* Disable CP parsing/prefetching */
2002 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
2003
2004 /* reset all the gfx blocks */
2005 grbm_reset = (SOFT_RESET_CP |
2006 SOFT_RESET_CB |
2007 SOFT_RESET_DB |
2008 SOFT_RESET_PA |
2009 SOFT_RESET_SC |
2010 SOFT_RESET_SPI |
2011 SOFT_RESET_SH |
2012 SOFT_RESET_SX |
2013 SOFT_RESET_TC |
2014 SOFT_RESET_TA |
2015 SOFT_RESET_VC |
2016 SOFT_RESET_VGT);
2017
2018 dev_info(rdev->dev, " GRBM_SOFT_RESET=0x%08X\n", grbm_reset);
2019 WREG32(GRBM_SOFT_RESET, grbm_reset);
2020 (void)RREG32(GRBM_SOFT_RESET);
2021 udelay(50);
2022 WREG32(GRBM_SOFT_RESET, 0);
2023 (void)RREG32(GRBM_SOFT_RESET);
2024
2025 /* reset all the system blocks */
2026 srbm_reset = SRBM_SOFT_RESET_ALL_MASK;
2027
2028 dev_info(rdev->dev, " SRBM_SOFT_RESET=0x%08X\n", srbm_reset);
2029 WREG32(SRBM_SOFT_RESET, srbm_reset);
2030 (void)RREG32(SRBM_SOFT_RESET);
2031 udelay(50);
2032 WREG32(SRBM_SOFT_RESET, 0);
2033 (void)RREG32(SRBM_SOFT_RESET);
2034 /* Wait a little for things to settle down */
2035 udelay(50);
2036 dev_info(rdev->dev, " GRBM_STATUS=0x%08X\n",
2037 RREG32(GRBM_STATUS));
2038 dev_info(rdev->dev, " GRBM_STATUS_SE0=0x%08X\n",
2039 RREG32(GRBM_STATUS_SE0));
2040 dev_info(rdev->dev, " GRBM_STATUS_SE1=0x%08X\n",
2041 RREG32(GRBM_STATUS_SE1));
2042 dev_info(rdev->dev, " SRBM_STATUS=0x%08X\n",
2043 RREG32(SRBM_STATUS));
2044 /* After reset we need to reinit the asic as GPU often endup in an
2045 * incoherent state.
2046 */
2047 atom_asic_init(rdev->mode_info.atom_context);
2048 evergreen_mc_resume(rdev, &save);
2049 return 0;
2050}
2051
Jerome Glissea2d07b72010-03-09 14:45:11 +00002052int evergreen_asic_reset(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002053{
Alex Deucher747943e2010-03-24 13:26:36 -04002054 return evergreen_gpu_soft_reset(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002055}
2056
Alex Deucher45f9a392010-03-24 13:55:51 -04002057/* Interrupts */
2058
2059u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
2060{
2061 switch (crtc) {
2062 case 0:
2063 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC0_REGISTER_OFFSET);
2064 case 1:
2065 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC1_REGISTER_OFFSET);
2066 case 2:
2067 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC2_REGISTER_OFFSET);
2068 case 3:
2069 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC3_REGISTER_OFFSET);
2070 case 4:
2071 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC4_REGISTER_OFFSET);
2072 case 5:
2073 return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC5_REGISTER_OFFSET);
2074 default:
2075 return 0;
2076 }
2077}
2078
2079void evergreen_disable_interrupt_state(struct radeon_device *rdev)
2080{
2081 u32 tmp;
2082
Alex Deucher3555e532010-10-08 12:09:12 -04002083 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
Alex Deucher45f9a392010-03-24 13:55:51 -04002084 WREG32(GRBM_INT_CNTL, 0);
2085 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
2086 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
Alex Deucher18007402010-11-22 17:56:28 -05002087 if (!(rdev->flags & RADEON_IS_IGP)) {
2088 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
2089 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
2090 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
2091 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
2092 }
Alex Deucher45f9a392010-03-24 13:55:51 -04002093
2094 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
2095 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
Alex Deucher18007402010-11-22 17:56:28 -05002096 if (!(rdev->flags & RADEON_IS_IGP)) {
2097 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
2098 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
2099 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
2100 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
2101 }
Alex Deucher45f9a392010-03-24 13:55:51 -04002102
2103 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
2104 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
2105
2106 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2107 WREG32(DC_HPD1_INT_CONTROL, tmp);
2108 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2109 WREG32(DC_HPD2_INT_CONTROL, tmp);
2110 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2111 WREG32(DC_HPD3_INT_CONTROL, tmp);
2112 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2113 WREG32(DC_HPD4_INT_CONTROL, tmp);
2114 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2115 WREG32(DC_HPD5_INT_CONTROL, tmp);
2116 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2117 WREG32(DC_HPD6_INT_CONTROL, tmp);
2118
2119}
2120
2121int evergreen_irq_set(struct radeon_device *rdev)
2122{
2123 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
2124 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
2125 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
Alex Deucher2031f772010-04-22 12:52:11 -04002126 u32 grbm_int_cntl = 0;
Alex Deucher45f9a392010-03-24 13:55:51 -04002127
2128 if (!rdev->irq.installed) {
Joe Perchesfce7d612010-10-30 21:08:30 +00002129 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
Alex Deucher45f9a392010-03-24 13:55:51 -04002130 return -EINVAL;
2131 }
2132 /* don't enable anything if the ih is disabled */
2133 if (!rdev->ih.enabled) {
2134 r600_disable_interrupts(rdev);
2135 /* force the active interrupt state to all disabled */
2136 evergreen_disable_interrupt_state(rdev);
2137 return 0;
2138 }
2139
2140 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
2141 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
2142 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
2143 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
2144 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
2145 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
2146
2147 if (rdev->irq.sw_int) {
2148 DRM_DEBUG("evergreen_irq_set: sw int\n");
2149 cp_int_cntl |= RB_INT_ENABLE;
Alex Deucherd0f8a852010-09-04 05:04:34 -04002150 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
Alex Deucher45f9a392010-03-24 13:55:51 -04002151 }
2152 if (rdev->irq.crtc_vblank_int[0]) {
2153 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
2154 crtc1 |= VBLANK_INT_MASK;
2155 }
2156 if (rdev->irq.crtc_vblank_int[1]) {
2157 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
2158 crtc2 |= VBLANK_INT_MASK;
2159 }
2160 if (rdev->irq.crtc_vblank_int[2]) {
2161 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
2162 crtc3 |= VBLANK_INT_MASK;
2163 }
2164 if (rdev->irq.crtc_vblank_int[3]) {
2165 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
2166 crtc4 |= VBLANK_INT_MASK;
2167 }
2168 if (rdev->irq.crtc_vblank_int[4]) {
2169 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
2170 crtc5 |= VBLANK_INT_MASK;
2171 }
2172 if (rdev->irq.crtc_vblank_int[5]) {
2173 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
2174 crtc6 |= VBLANK_INT_MASK;
2175 }
2176 if (rdev->irq.hpd[0]) {
2177 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
2178 hpd1 |= DC_HPDx_INT_EN;
2179 }
2180 if (rdev->irq.hpd[1]) {
2181 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
2182 hpd2 |= DC_HPDx_INT_EN;
2183 }
2184 if (rdev->irq.hpd[2]) {
2185 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
2186 hpd3 |= DC_HPDx_INT_EN;
2187 }
2188 if (rdev->irq.hpd[3]) {
2189 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
2190 hpd4 |= DC_HPDx_INT_EN;
2191 }
2192 if (rdev->irq.hpd[4]) {
2193 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
2194 hpd5 |= DC_HPDx_INT_EN;
2195 }
2196 if (rdev->irq.hpd[5]) {
2197 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
2198 hpd6 |= DC_HPDx_INT_EN;
2199 }
Alex Deucher2031f772010-04-22 12:52:11 -04002200 if (rdev->irq.gui_idle) {
2201 DRM_DEBUG("gui idle\n");
2202 grbm_int_cntl |= GUI_IDLE_INT_ENABLE;
2203 }
Alex Deucher45f9a392010-03-24 13:55:51 -04002204
2205 WREG32(CP_INT_CNTL, cp_int_cntl);
Alex Deucher2031f772010-04-22 12:52:11 -04002206 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
Alex Deucher45f9a392010-03-24 13:55:51 -04002207
2208 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
2209 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
Alex Deucher18007402010-11-22 17:56:28 -05002210 if (!(rdev->flags & RADEON_IS_IGP)) {
2211 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
2212 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
2213 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
2214 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
2215 }
Alex Deucher45f9a392010-03-24 13:55:51 -04002216
2217 WREG32(DC_HPD1_INT_CONTROL, hpd1);
2218 WREG32(DC_HPD2_INT_CONTROL, hpd2);
2219 WREG32(DC_HPD3_INT_CONTROL, hpd3);
2220 WREG32(DC_HPD4_INT_CONTROL, hpd4);
2221 WREG32(DC_HPD5_INT_CONTROL, hpd5);
2222 WREG32(DC_HPD6_INT_CONTROL, hpd6);
2223
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002224 return 0;
2225}
2226
Alex Deucher45f9a392010-03-24 13:55:51 -04002227static inline void evergreen_irq_ack(struct radeon_device *rdev,
2228 u32 *disp_int,
2229 u32 *disp_int_cont,
2230 u32 *disp_int_cont2,
2231 u32 *disp_int_cont3,
2232 u32 *disp_int_cont4,
2233 u32 *disp_int_cont5)
2234{
2235 u32 tmp;
2236
2237 *disp_int = RREG32(DISP_INTERRUPT_STATUS);
2238 *disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
2239 *disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
2240 *disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
2241 *disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
2242 *disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
2243
2244 if (*disp_int & LB_D1_VBLANK_INTERRUPT)
2245 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
2246 if (*disp_int & LB_D1_VLINE_INTERRUPT)
2247 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
2248
2249 if (*disp_int_cont & LB_D2_VBLANK_INTERRUPT)
2250 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
2251 if (*disp_int_cont & LB_D2_VLINE_INTERRUPT)
2252 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
2253
2254 if (*disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
2255 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
2256 if (*disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
2257 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
2258
2259 if (*disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
2260 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
2261 if (*disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
2262 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
2263
2264 if (*disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
2265 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
2266 if (*disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
2267 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
2268
2269 if (*disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
2270 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
2271 if (*disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
2272 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
2273
2274 if (*disp_int & DC_HPD1_INTERRUPT) {
2275 tmp = RREG32(DC_HPD1_INT_CONTROL);
2276 tmp |= DC_HPDx_INT_ACK;
2277 WREG32(DC_HPD1_INT_CONTROL, tmp);
2278 }
2279 if (*disp_int_cont & DC_HPD2_INTERRUPT) {
2280 tmp = RREG32(DC_HPD2_INT_CONTROL);
2281 tmp |= DC_HPDx_INT_ACK;
2282 WREG32(DC_HPD2_INT_CONTROL, tmp);
2283 }
2284 if (*disp_int_cont2 & DC_HPD3_INTERRUPT) {
2285 tmp = RREG32(DC_HPD3_INT_CONTROL);
2286 tmp |= DC_HPDx_INT_ACK;
2287 WREG32(DC_HPD3_INT_CONTROL, tmp);
2288 }
2289 if (*disp_int_cont3 & DC_HPD4_INTERRUPT) {
2290 tmp = RREG32(DC_HPD4_INT_CONTROL);
2291 tmp |= DC_HPDx_INT_ACK;
2292 WREG32(DC_HPD4_INT_CONTROL, tmp);
2293 }
2294 if (*disp_int_cont4 & DC_HPD5_INTERRUPT) {
2295 tmp = RREG32(DC_HPD5_INT_CONTROL);
2296 tmp |= DC_HPDx_INT_ACK;
2297 WREG32(DC_HPD5_INT_CONTROL, tmp);
2298 }
2299 if (*disp_int_cont5 & DC_HPD6_INTERRUPT) {
2300 tmp = RREG32(DC_HPD5_INT_CONTROL);
2301 tmp |= DC_HPDx_INT_ACK;
2302 WREG32(DC_HPD6_INT_CONTROL, tmp);
2303 }
2304}
2305
2306void evergreen_irq_disable(struct radeon_device *rdev)
2307{
2308 u32 disp_int, disp_int_cont, disp_int_cont2;
2309 u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
2310
2311 r600_disable_interrupts(rdev);
2312 /* Wait and acknowledge irq */
2313 mdelay(1);
2314 evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2,
2315 &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
2316 evergreen_disable_interrupt_state(rdev);
2317}
2318
2319static void evergreen_irq_suspend(struct radeon_device *rdev)
2320{
2321 evergreen_irq_disable(rdev);
2322 r600_rlc_stop(rdev);
2323}
2324
2325static inline u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
2326{
2327 u32 wptr, tmp;
2328
Alex Deucher724c80e2010-08-27 18:25:25 -04002329 if (rdev->wb.enabled)
2330 wptr = rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4];
2331 else
2332 wptr = RREG32(IH_RB_WPTR);
Alex Deucher45f9a392010-03-24 13:55:51 -04002333
2334 if (wptr & RB_OVERFLOW) {
2335 /* When a ring buffer overflow happen start parsing interrupt
2336 * from the last not overwritten vector (wptr + 16). Hopefully
2337 * this should allow us to catchup.
2338 */
2339 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
2340 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
2341 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
2342 tmp = RREG32(IH_RB_CNTL);
2343 tmp |= IH_WPTR_OVERFLOW_CLEAR;
2344 WREG32(IH_RB_CNTL, tmp);
2345 }
2346 return (wptr & rdev->ih.ptr_mask);
2347}
2348
2349int evergreen_irq_process(struct radeon_device *rdev)
2350{
2351 u32 wptr = evergreen_get_ih_wptr(rdev);
2352 u32 rptr = rdev->ih.rptr;
2353 u32 src_id, src_data;
2354 u32 ring_index;
2355 u32 disp_int, disp_int_cont, disp_int_cont2;
2356 u32 disp_int_cont3, disp_int_cont4, disp_int_cont5;
2357 unsigned long flags;
2358 bool queue_hotplug = false;
2359
2360 DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
2361 if (!rdev->ih.enabled)
2362 return IRQ_NONE;
2363
2364 spin_lock_irqsave(&rdev->ih.lock, flags);
2365
2366 if (rptr == wptr) {
2367 spin_unlock_irqrestore(&rdev->ih.lock, flags);
2368 return IRQ_NONE;
2369 }
2370 if (rdev->shutdown) {
2371 spin_unlock_irqrestore(&rdev->ih.lock, flags);
2372 return IRQ_NONE;
2373 }
2374
2375restart_ih:
2376 /* display interrupts */
2377 evergreen_irq_ack(rdev, &disp_int, &disp_int_cont, &disp_int_cont2,
2378 &disp_int_cont3, &disp_int_cont4, &disp_int_cont5);
2379
2380 rdev->ih.wptr = wptr;
2381 while (rptr != wptr) {
2382 /* wptr/rptr are in bytes! */
2383 ring_index = rptr / 4;
2384 src_id = rdev->ih.ring[ring_index] & 0xff;
2385 src_data = rdev->ih.ring[ring_index + 1] & 0xfffffff;
2386
2387 switch (src_id) {
2388 case 1: /* D1 vblank/vline */
2389 switch (src_data) {
2390 case 0: /* D1 vblank */
2391 if (disp_int & LB_D1_VBLANK_INTERRUPT) {
2392 drm_handle_vblank(rdev->ddev, 0);
Alex Deucherf5d8e0e2010-10-28 19:00:24 -04002393 rdev->pm.vblank_sync = true;
Alex Deucher45f9a392010-03-24 13:55:51 -04002394 wake_up(&rdev->irq.vblank_queue);
2395 disp_int &= ~LB_D1_VBLANK_INTERRUPT;
2396 DRM_DEBUG("IH: D1 vblank\n");
2397 }
2398 break;
2399 case 1: /* D1 vline */
2400 if (disp_int & LB_D1_VLINE_INTERRUPT) {
2401 disp_int &= ~LB_D1_VLINE_INTERRUPT;
2402 DRM_DEBUG("IH: D1 vline\n");
2403 }
2404 break;
2405 default:
2406 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2407 break;
2408 }
2409 break;
2410 case 2: /* D2 vblank/vline */
2411 switch (src_data) {
2412 case 0: /* D2 vblank */
2413 if (disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
2414 drm_handle_vblank(rdev->ddev, 1);
Alex Deucherf5d8e0e2010-10-28 19:00:24 -04002415 rdev->pm.vblank_sync = true;
Alex Deucher45f9a392010-03-24 13:55:51 -04002416 wake_up(&rdev->irq.vblank_queue);
2417 disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
2418 DRM_DEBUG("IH: D2 vblank\n");
2419 }
2420 break;
2421 case 1: /* D2 vline */
2422 if (disp_int_cont & LB_D2_VLINE_INTERRUPT) {
2423 disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
2424 DRM_DEBUG("IH: D2 vline\n");
2425 }
2426 break;
2427 default:
2428 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2429 break;
2430 }
2431 break;
2432 case 3: /* D3 vblank/vline */
2433 switch (src_data) {
2434 case 0: /* D3 vblank */
2435 if (disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
2436 drm_handle_vblank(rdev->ddev, 2);
Alex Deucherf5d8e0e2010-10-28 19:00:24 -04002437 rdev->pm.vblank_sync = true;
Alex Deucher45f9a392010-03-24 13:55:51 -04002438 wake_up(&rdev->irq.vblank_queue);
2439 disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
2440 DRM_DEBUG("IH: D3 vblank\n");
2441 }
2442 break;
2443 case 1: /* D3 vline */
2444 if (disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
2445 disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
2446 DRM_DEBUG("IH: D3 vline\n");
2447 }
2448 break;
2449 default:
2450 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2451 break;
2452 }
2453 break;
2454 case 4: /* D4 vblank/vline */
2455 switch (src_data) {
2456 case 0: /* D4 vblank */
2457 if (disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
2458 drm_handle_vblank(rdev->ddev, 3);
Alex Deucherf5d8e0e2010-10-28 19:00:24 -04002459 rdev->pm.vblank_sync = true;
Alex Deucher45f9a392010-03-24 13:55:51 -04002460 wake_up(&rdev->irq.vblank_queue);
2461 disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
2462 DRM_DEBUG("IH: D4 vblank\n");
2463 }
2464 break;
2465 case 1: /* D4 vline */
2466 if (disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
2467 disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
2468 DRM_DEBUG("IH: D4 vline\n");
2469 }
2470 break;
2471 default:
2472 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2473 break;
2474 }
2475 break;
2476 case 5: /* D5 vblank/vline */
2477 switch (src_data) {
2478 case 0: /* D5 vblank */
2479 if (disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
2480 drm_handle_vblank(rdev->ddev, 4);
Alex Deucherf5d8e0e2010-10-28 19:00:24 -04002481 rdev->pm.vblank_sync = true;
Alex Deucher45f9a392010-03-24 13:55:51 -04002482 wake_up(&rdev->irq.vblank_queue);
2483 disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
2484 DRM_DEBUG("IH: D5 vblank\n");
2485 }
2486 break;
2487 case 1: /* D5 vline */
2488 if (disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
2489 disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
2490 DRM_DEBUG("IH: D5 vline\n");
2491 }
2492 break;
2493 default:
2494 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2495 break;
2496 }
2497 break;
2498 case 6: /* D6 vblank/vline */
2499 switch (src_data) {
2500 case 0: /* D6 vblank */
2501 if (disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
2502 drm_handle_vblank(rdev->ddev, 5);
Alex Deucherf5d8e0e2010-10-28 19:00:24 -04002503 rdev->pm.vblank_sync = true;
Alex Deucher45f9a392010-03-24 13:55:51 -04002504 wake_up(&rdev->irq.vblank_queue);
2505 disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
2506 DRM_DEBUG("IH: D6 vblank\n");
2507 }
2508 break;
2509 case 1: /* D6 vline */
2510 if (disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
2511 disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
2512 DRM_DEBUG("IH: D6 vline\n");
2513 }
2514 break;
2515 default:
2516 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2517 break;
2518 }
2519 break;
2520 case 42: /* HPD hotplug */
2521 switch (src_data) {
2522 case 0:
2523 if (disp_int & DC_HPD1_INTERRUPT) {
2524 disp_int &= ~DC_HPD1_INTERRUPT;
2525 queue_hotplug = true;
2526 DRM_DEBUG("IH: HPD1\n");
2527 }
2528 break;
2529 case 1:
2530 if (disp_int_cont & DC_HPD2_INTERRUPT) {
2531 disp_int_cont &= ~DC_HPD2_INTERRUPT;
2532 queue_hotplug = true;
2533 DRM_DEBUG("IH: HPD2\n");
2534 }
2535 break;
2536 case 2:
2537 if (disp_int_cont2 & DC_HPD3_INTERRUPT) {
2538 disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
2539 queue_hotplug = true;
2540 DRM_DEBUG("IH: HPD3\n");
2541 }
2542 break;
2543 case 3:
2544 if (disp_int_cont3 & DC_HPD4_INTERRUPT) {
2545 disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
2546 queue_hotplug = true;
2547 DRM_DEBUG("IH: HPD4\n");
2548 }
2549 break;
2550 case 4:
2551 if (disp_int_cont4 & DC_HPD5_INTERRUPT) {
2552 disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
2553 queue_hotplug = true;
2554 DRM_DEBUG("IH: HPD5\n");
2555 }
2556 break;
2557 case 5:
2558 if (disp_int_cont5 & DC_HPD6_INTERRUPT) {
2559 disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
2560 queue_hotplug = true;
2561 DRM_DEBUG("IH: HPD6\n");
2562 }
2563 break;
2564 default:
2565 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2566 break;
2567 }
2568 break;
2569 case 176: /* CP_INT in ring buffer */
2570 case 177: /* CP_INT in IB1 */
2571 case 178: /* CP_INT in IB2 */
2572 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
2573 radeon_fence_process(rdev);
2574 break;
2575 case 181: /* CP EOP event */
2576 DRM_DEBUG("IH: CP EOP\n");
Alex Deucherd0f8a852010-09-04 05:04:34 -04002577 radeon_fence_process(rdev);
Alex Deucher45f9a392010-03-24 13:55:51 -04002578 break;
Alex Deucher2031f772010-04-22 12:52:11 -04002579 case 233: /* GUI IDLE */
2580 DRM_DEBUG("IH: CP EOP\n");
2581 rdev->pm.gui_idle = true;
2582 wake_up(&rdev->irq.idle_queue);
2583 break;
Alex Deucher45f9a392010-03-24 13:55:51 -04002584 default:
2585 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2586 break;
2587 }
2588
2589 /* wptr/rptr are in bytes! */
2590 rptr += 16;
2591 rptr &= rdev->ih.ptr_mask;
2592 }
2593 /* make sure wptr hasn't changed while processing */
2594 wptr = evergreen_get_ih_wptr(rdev);
2595 if (wptr != rdev->ih.wptr)
2596 goto restart_ih;
2597 if (queue_hotplug)
2598 queue_work(rdev->wq, &rdev->hotplug_work);
2599 rdev->ih.rptr = rptr;
2600 WREG32(IH_RB_RPTR, rdev->ih.rptr);
2601 spin_unlock_irqrestore(&rdev->ih.lock, flags);
2602 return IRQ_HANDLED;
2603}
2604
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002605static int evergreen_startup(struct radeon_device *rdev)
2606{
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002607 int r;
2608
2609 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
2610 r = r600_init_microcode(rdev);
2611 if (r) {
2612 DRM_ERROR("Failed to load firmware!\n");
2613 return r;
2614 }
2615 }
Alex Deucherfe251e22010-03-24 13:36:43 -04002616
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002617 evergreen_mc_program(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002618 if (rdev->flags & RADEON_IS_AGP) {
Alex Deucher0fcdb612010-03-24 13:20:41 -04002619 evergreen_agp_enable(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002620 } else {
2621 r = evergreen_pcie_gart_enable(rdev);
2622 if (r)
2623 return r;
2624 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002625 evergreen_gpu_init(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002626
Alex Deucherd7ccd8f2010-09-09 11:33:36 -04002627 r = evergreen_blit_init(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002628 if (r) {
Alex Deucherd7ccd8f2010-09-09 11:33:36 -04002629 evergreen_blit_fini(rdev);
2630 rdev->asic->copy = NULL;
2631 dev_warn(rdev->dev, "failed blitter (%d) falling back to memcpy\n", r);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002632 }
2633
Alex Deucher724c80e2010-08-27 18:25:25 -04002634 /* allocate wb buffer */
2635 r = radeon_wb_init(rdev);
2636 if (r)
2637 return r;
2638
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002639 /* Enable IRQ */
2640 r = r600_irq_init(rdev);
2641 if (r) {
2642 DRM_ERROR("radeon: IH init failed (%d).\n", r);
2643 radeon_irq_kms_fini(rdev);
2644 return r;
2645 }
Alex Deucher45f9a392010-03-24 13:55:51 -04002646 evergreen_irq_set(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002647
2648 r = radeon_ring_init(rdev, rdev->cp.ring_size);
2649 if (r)
2650 return r;
2651 r = evergreen_cp_load_microcode(rdev);
2652 if (r)
2653 return r;
Alex Deucherfe251e22010-03-24 13:36:43 -04002654 r = evergreen_cp_resume(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002655 if (r)
2656 return r;
Alex Deucherfe251e22010-03-24 13:36:43 -04002657
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002658 return 0;
2659}
2660
2661int evergreen_resume(struct radeon_device *rdev)
2662{
2663 int r;
2664
2665 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
2666 * posting will perform necessary task to bring back GPU into good
2667 * shape.
2668 */
2669 /* post card */
2670 atom_asic_init(rdev->mode_info.atom_context);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002671
2672 r = evergreen_startup(rdev);
2673 if (r) {
2674 DRM_ERROR("r600 startup failed on resume\n");
2675 return r;
2676 }
Alex Deucherfe251e22010-03-24 13:36:43 -04002677
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002678 r = r600_ib_test(rdev);
2679 if (r) {
2680 DRM_ERROR("radeon: failled testing IB (%d).\n", r);
2681 return r;
2682 }
Alex Deucherfe251e22010-03-24 13:36:43 -04002683
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002684 return r;
2685
2686}
2687
2688int evergreen_suspend(struct radeon_device *rdev)
2689{
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002690 int r;
Alex Deucherd7ccd8f2010-09-09 11:33:36 -04002691
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002692 /* FIXME: we should wait for ring to be empty */
2693 r700_cp_stop(rdev);
2694 rdev->cp.ready = false;
Alex Deucher45f9a392010-03-24 13:55:51 -04002695 evergreen_irq_suspend(rdev);
Alex Deucher724c80e2010-08-27 18:25:25 -04002696 radeon_wb_disable(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002697 evergreen_pcie_gart_disable(rdev);
Alex Deucherd7ccd8f2010-09-09 11:33:36 -04002698
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002699 /* unpin shaders bo */
2700 r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
2701 if (likely(r == 0)) {
2702 radeon_bo_unpin(rdev->r600_blit.shader_obj);
2703 radeon_bo_unreserve(rdev->r600_blit.shader_obj);
2704 }
Alex Deucherd7ccd8f2010-09-09 11:33:36 -04002705
2706 return 0;
2707}
2708
2709int evergreen_copy_blit(struct radeon_device *rdev,
2710 uint64_t src_offset, uint64_t dst_offset,
2711 unsigned num_pages, struct radeon_fence *fence)
2712{
2713 int r;
2714
2715 mutex_lock(&rdev->r600_blit.mutex);
2716 rdev->r600_blit.vb_ib = NULL;
2717 r = evergreen_blit_prepare_copy(rdev, num_pages * RADEON_GPU_PAGE_SIZE);
2718 if (r) {
2719 if (rdev->r600_blit.vb_ib)
2720 radeon_ib_free(rdev, &rdev->r600_blit.vb_ib);
2721 mutex_unlock(&rdev->r600_blit.mutex);
2722 return r;
2723 }
2724 evergreen_kms_blit_copy(rdev, src_offset, dst_offset, num_pages * RADEON_GPU_PAGE_SIZE);
2725 evergreen_blit_done_copy(rdev, fence);
2726 mutex_unlock(&rdev->r600_blit.mutex);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002727 return 0;
2728}
2729
2730static bool evergreen_card_posted(struct radeon_device *rdev)
2731{
2732 u32 reg;
2733
2734 /* first check CRTCs */
Alex Deucher18007402010-11-22 17:56:28 -05002735 if (rdev->flags & RADEON_IS_IGP)
2736 reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) |
2737 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
2738 else
2739 reg = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) |
2740 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) |
2741 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) |
2742 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) |
2743 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) |
2744 RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002745 if (reg & EVERGREEN_CRTC_MASTER_EN)
2746 return true;
2747
2748 /* then check MEM_SIZE, in case the crtcs are off */
2749 if (RREG32(CONFIG_MEMSIZE))
2750 return true;
2751
2752 return false;
2753}
2754
2755/* Plan is to move initialization in that function and use
2756 * helper function so that radeon_device_init pretty much
2757 * do nothing more than calling asic specific function. This
2758 * should also allow to remove a bunch of callback function
2759 * like vram_info.
2760 */
2761int evergreen_init(struct radeon_device *rdev)
2762{
2763 int r;
2764
2765 r = radeon_dummy_page_init(rdev);
2766 if (r)
2767 return r;
2768 /* This don't do much */
2769 r = radeon_gem_init(rdev);
2770 if (r)
2771 return r;
2772 /* Read BIOS */
2773 if (!radeon_get_bios(rdev)) {
2774 if (ASIC_IS_AVIVO(rdev))
2775 return -EINVAL;
2776 }
2777 /* Must be an ATOMBIOS */
2778 if (!rdev->is_atom_bios) {
2779 dev_err(rdev->dev, "Expecting atombios for R600 GPU\n");
2780 return -EINVAL;
2781 }
2782 r = radeon_atombios_init(rdev);
2783 if (r)
2784 return r;
2785 /* Post card if necessary */
2786 if (!evergreen_card_posted(rdev)) {
2787 if (!rdev->bios) {
2788 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
2789 return -EINVAL;
2790 }
2791 DRM_INFO("GPU not posted. posting now...\n");
2792 atom_asic_init(rdev->mode_info.atom_context);
2793 }
2794 /* Initialize scratch registers */
2795 r600_scratch_init(rdev);
2796 /* Initialize surface registers */
2797 radeon_surface_init(rdev);
2798 /* Initialize clocks */
2799 radeon_get_clock_info(rdev->ddev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002800 /* Fence driver */
2801 r = radeon_fence_driver_init(rdev);
2802 if (r)
2803 return r;
Jerome Glissed594e462010-02-17 21:54:29 +00002804 /* initialize AGP */
2805 if (rdev->flags & RADEON_IS_AGP) {
2806 r = radeon_agp_init(rdev);
2807 if (r)
2808 radeon_agp_disable(rdev);
2809 }
2810 /* initialize memory controller */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002811 r = evergreen_mc_init(rdev);
2812 if (r)
2813 return r;
2814 /* Memory manager */
2815 r = radeon_bo_init(rdev);
2816 if (r)
2817 return r;
Alex Deucher45f9a392010-03-24 13:55:51 -04002818
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002819 r = radeon_irq_kms_init(rdev);
2820 if (r)
2821 return r;
2822
2823 rdev->cp.ring_obj = NULL;
2824 r600_ring_init(rdev, 1024 * 1024);
2825
2826 rdev->ih.ring_obj = NULL;
2827 r600_ih_ring_init(rdev, 64 * 1024);
2828
2829 r = r600_pcie_gart_init(rdev);
2830 if (r)
2831 return r;
Alex Deucher0fcdb612010-03-24 13:20:41 -04002832
Alex Deucher148a03b2010-06-03 19:00:03 -04002833 rdev->accel_working = true;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002834 r = evergreen_startup(rdev);
2835 if (r) {
Alex Deucherfe251e22010-03-24 13:36:43 -04002836 dev_err(rdev->dev, "disabling GPU acceleration\n");
2837 r700_cp_fini(rdev);
Alex Deucherfe251e22010-03-24 13:36:43 -04002838 r600_irq_fini(rdev);
Alex Deucher724c80e2010-08-27 18:25:25 -04002839 radeon_wb_fini(rdev);
Alex Deucherfe251e22010-03-24 13:36:43 -04002840 radeon_irq_kms_fini(rdev);
Alex Deucher0fcdb612010-03-24 13:20:41 -04002841 evergreen_pcie_gart_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002842 rdev->accel_working = false;
2843 }
2844 if (rdev->accel_working) {
2845 r = radeon_ib_pool_init(rdev);
2846 if (r) {
2847 DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r);
2848 rdev->accel_working = false;
2849 }
2850 r = r600_ib_test(rdev);
2851 if (r) {
2852 DRM_ERROR("radeon: failed testing IB (%d).\n", r);
2853 rdev->accel_working = false;
2854 }
2855 }
2856 return 0;
2857}
2858
2859void evergreen_fini(struct radeon_device *rdev)
2860{
Alex Deucherd7ccd8f2010-09-09 11:33:36 -04002861 evergreen_blit_fini(rdev);
Alex Deucher45f9a392010-03-24 13:55:51 -04002862 r700_cp_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002863 r600_irq_fini(rdev);
Alex Deucher724c80e2010-08-27 18:25:25 -04002864 radeon_wb_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002865 radeon_irq_kms_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002866 evergreen_pcie_gart_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002867 radeon_gem_fini(rdev);
2868 radeon_fence_driver_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002869 radeon_agp_fini(rdev);
2870 radeon_bo_fini(rdev);
2871 radeon_atombios_fini(rdev);
2872 kfree(rdev->bios);
2873 rdev->bios = NULL;
2874 radeon_dummy_page_fini(rdev);
2875}