blob: 2ca9f13f2c79bdc2cda7b99de12d85939fc6d2cb [file] [log] [blame]
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001/*
2 * Copyright 2010 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Alex Deucher
23 */
24#include <linux/firmware.h>
25#include <linux/platform_device.h>
Tejun Heo5a0e3ad2010-03-24 17:04:11 +090026#include <linux/slab.h>
David Howells760285e2012-10-02 18:01:07 +010027#include <drm/drmP.h>
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050028#include "radeon.h"
Daniel Vettere6990372010-03-11 21:19:17 +000029#include "radeon_asic.h"
David Howells760285e2012-10-02 18:01:07 +010030#include <drm/radeon_drm.h>
Alex Deucher0fcdb612010-03-24 13:20:41 -040031#include "evergreend.h"
Alex Deucherbcc1c2a2010-01-12 17:54:34 -050032#include "atom.h"
33#include "avivod.h"
34#include "evergreen_reg.h"
Alex Deucher2281a372010-10-21 13:31:38 -040035#include "evergreen_blit_shaders.h"
Alex Deucher138e4e12013-01-11 15:33:13 -050036#include "radeon_ucode.h"
Alex Deucherfe251e22010-03-24 13:36:43 -040037
Alex Deucher4a159032012-08-15 17:13:53 -040038static const u32 crtc_offsets[6] =
39{
40 EVERGREEN_CRTC0_REGISTER_OFFSET,
41 EVERGREEN_CRTC1_REGISTER_OFFSET,
42 EVERGREEN_CRTC2_REGISTER_OFFSET,
43 EVERGREEN_CRTC3_REGISTER_OFFSET,
44 EVERGREEN_CRTC4_REGISTER_OFFSET,
45 EVERGREEN_CRTC5_REGISTER_OFFSET
46};
47
Alex Deucher2948f5e2013-04-12 13:52:52 -040048#include "clearstate_evergreen.h"
49
Alex Deucher1fd11772013-04-17 17:53:50 -040050static const u32 sumo_rlc_save_restore_register_list[] =
Alex Deucher2948f5e2013-04-12 13:52:52 -040051{
52 0x98fc,
53 0x9830,
54 0x9834,
55 0x9838,
56 0x9870,
57 0x9874,
58 0x8a14,
59 0x8b24,
60 0x8bcc,
61 0x8b10,
62 0x8d00,
63 0x8d04,
64 0x8c00,
65 0x8c04,
66 0x8c08,
67 0x8c0c,
68 0x8d8c,
69 0x8c20,
70 0x8c24,
71 0x8c28,
72 0x8c18,
73 0x8c1c,
74 0x8cf0,
75 0x8e2c,
76 0x8e38,
77 0x8c30,
78 0x9508,
79 0x9688,
80 0x9608,
81 0x960c,
82 0x9610,
83 0x9614,
84 0x88c4,
85 0x88d4,
86 0xa008,
87 0x900c,
88 0x9100,
89 0x913c,
90 0x98f8,
91 0x98f4,
92 0x9b7c,
93 0x3f8c,
94 0x8950,
95 0x8954,
96 0x8a18,
97 0x8b28,
98 0x9144,
99 0x9148,
100 0x914c,
101 0x3f90,
102 0x3f94,
103 0x915c,
104 0x9160,
105 0x9178,
106 0x917c,
107 0x9180,
108 0x918c,
109 0x9190,
110 0x9194,
111 0x9198,
112 0x919c,
113 0x91a8,
114 0x91ac,
115 0x91b0,
116 0x91b4,
117 0x91b8,
118 0x91c4,
119 0x91c8,
120 0x91cc,
121 0x91d0,
122 0x91d4,
123 0x91e0,
124 0x91e4,
125 0x91ec,
126 0x91f0,
127 0x91f4,
128 0x9200,
129 0x9204,
130 0x929c,
131 0x9150,
132 0x802c,
133};
Alex Deucher2948f5e2013-04-12 13:52:52 -0400134
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500135static void evergreen_gpu_init(struct radeon_device *rdev);
136void evergreen_fini(struct radeon_device *rdev);
Ilija Hadzicb07759b2011-09-20 10:22:58 -0400137void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
Alex Deucherf52382d2013-02-15 11:02:50 -0500138void evergreen_program_aspm(struct radeon_device *rdev);
Alex Deucher1b370782011-11-17 20:13:28 -0500139extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
140 int ring, u32 cp_int_cntl);
Alex Deucher54e2e492013-06-13 18:26:25 -0400141extern void cayman_vm_decode_fault(struct radeon_device *rdev,
142 u32 status, u32 addr);
Alex Deucher22c775c2013-07-23 09:41:05 -0400143void cik_init_cp_pg_table(struct radeon_device *rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -0500144
Alex Deucher59a82d02013-08-13 12:48:06 -0400145extern u32 si_get_csb_size(struct radeon_device *rdev);
146extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
Alex Deuchera0f38602013-08-22 11:57:46 -0400147extern u32 cik_get_csb_size(struct radeon_device *rdev);
148extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
Alex Deucher59a82d02013-08-13 12:48:06 -0400149
Alex Deucherd4788db2013-02-28 14:40:09 -0500150static const u32 evergreen_golden_registers[] =
151{
152 0x3f90, 0xffff0000, 0xff000000,
153 0x9148, 0xffff0000, 0xff000000,
154 0x3f94, 0xffff0000, 0xff000000,
155 0x914c, 0xffff0000, 0xff000000,
156 0x9b7c, 0xffffffff, 0x00000000,
157 0x8a14, 0xffffffff, 0x00000007,
158 0x8b10, 0xffffffff, 0x00000000,
159 0x960c, 0xffffffff, 0x54763210,
160 0x88c4, 0xffffffff, 0x000000c2,
161 0x88d4, 0xffffffff, 0x00000010,
162 0x8974, 0xffffffff, 0x00000000,
163 0xc78, 0x00000080, 0x00000080,
164 0x5eb4, 0xffffffff, 0x00000002,
165 0x5e78, 0xffffffff, 0x001000f0,
166 0x6104, 0x01000300, 0x00000000,
167 0x5bc0, 0x00300000, 0x00000000,
168 0x7030, 0xffffffff, 0x00000011,
169 0x7c30, 0xffffffff, 0x00000011,
170 0x10830, 0xffffffff, 0x00000011,
171 0x11430, 0xffffffff, 0x00000011,
172 0x12030, 0xffffffff, 0x00000011,
173 0x12c30, 0xffffffff, 0x00000011,
174 0xd02c, 0xffffffff, 0x08421000,
175 0x240c, 0xffffffff, 0x00000380,
176 0x8b24, 0xffffffff, 0x00ff0fff,
177 0x28a4c, 0x06000000, 0x06000000,
178 0x10c, 0x00000001, 0x00000001,
179 0x8d00, 0xffffffff, 0x100e4848,
180 0x8d04, 0xffffffff, 0x00164745,
181 0x8c00, 0xffffffff, 0xe4000003,
182 0x8c04, 0xffffffff, 0x40600060,
183 0x8c08, 0xffffffff, 0x001c001c,
184 0x8cf0, 0xffffffff, 0x08e00620,
185 0x8c20, 0xffffffff, 0x00800080,
186 0x8c24, 0xffffffff, 0x00800080,
187 0x8c18, 0xffffffff, 0x20202078,
188 0x8c1c, 0xffffffff, 0x00001010,
189 0x28350, 0xffffffff, 0x00000000,
190 0xa008, 0xffffffff, 0x00010000,
191 0x5cc, 0xffffffff, 0x00000001,
192 0x9508, 0xffffffff, 0x00000002,
193 0x913c, 0x0000000f, 0x0000000a
194};
195
196static const u32 evergreen_golden_registers2[] =
197{
198 0x2f4c, 0xffffffff, 0x00000000,
199 0x54f4, 0xffffffff, 0x00000000,
200 0x54f0, 0xffffffff, 0x00000000,
201 0x5498, 0xffffffff, 0x00000000,
202 0x549c, 0xffffffff, 0x00000000,
203 0x5494, 0xffffffff, 0x00000000,
204 0x53cc, 0xffffffff, 0x00000000,
205 0x53c8, 0xffffffff, 0x00000000,
206 0x53c4, 0xffffffff, 0x00000000,
207 0x53c0, 0xffffffff, 0x00000000,
208 0x53bc, 0xffffffff, 0x00000000,
209 0x53b8, 0xffffffff, 0x00000000,
210 0x53b4, 0xffffffff, 0x00000000,
211 0x53b0, 0xffffffff, 0x00000000
212};
213
214static const u32 cypress_mgcg_init[] =
215{
216 0x802c, 0xffffffff, 0xc0000000,
217 0x5448, 0xffffffff, 0x00000100,
218 0x55e4, 0xffffffff, 0x00000100,
219 0x160c, 0xffffffff, 0x00000100,
220 0x5644, 0xffffffff, 0x00000100,
221 0xc164, 0xffffffff, 0x00000100,
222 0x8a18, 0xffffffff, 0x00000100,
223 0x897c, 0xffffffff, 0x06000100,
224 0x8b28, 0xffffffff, 0x00000100,
225 0x9144, 0xffffffff, 0x00000100,
226 0x9a60, 0xffffffff, 0x00000100,
227 0x9868, 0xffffffff, 0x00000100,
228 0x8d58, 0xffffffff, 0x00000100,
229 0x9510, 0xffffffff, 0x00000100,
230 0x949c, 0xffffffff, 0x00000100,
231 0x9654, 0xffffffff, 0x00000100,
232 0x9030, 0xffffffff, 0x00000100,
233 0x9034, 0xffffffff, 0x00000100,
234 0x9038, 0xffffffff, 0x00000100,
235 0x903c, 0xffffffff, 0x00000100,
236 0x9040, 0xffffffff, 0x00000100,
237 0xa200, 0xffffffff, 0x00000100,
238 0xa204, 0xffffffff, 0x00000100,
239 0xa208, 0xffffffff, 0x00000100,
240 0xa20c, 0xffffffff, 0x00000100,
241 0x971c, 0xffffffff, 0x00000100,
242 0x977c, 0xffffffff, 0x00000100,
243 0x3f80, 0xffffffff, 0x00000100,
244 0xa210, 0xffffffff, 0x00000100,
245 0xa214, 0xffffffff, 0x00000100,
246 0x4d8, 0xffffffff, 0x00000100,
247 0x9784, 0xffffffff, 0x00000100,
248 0x9698, 0xffffffff, 0x00000100,
249 0x4d4, 0xffffffff, 0x00000200,
250 0x30cc, 0xffffffff, 0x00000100,
251 0xd0c0, 0xffffffff, 0xff000100,
252 0x802c, 0xffffffff, 0x40000000,
253 0x915c, 0xffffffff, 0x00010000,
254 0x9160, 0xffffffff, 0x00030002,
255 0x9178, 0xffffffff, 0x00070000,
256 0x917c, 0xffffffff, 0x00030002,
257 0x9180, 0xffffffff, 0x00050004,
258 0x918c, 0xffffffff, 0x00010006,
259 0x9190, 0xffffffff, 0x00090008,
260 0x9194, 0xffffffff, 0x00070000,
261 0x9198, 0xffffffff, 0x00030002,
262 0x919c, 0xffffffff, 0x00050004,
263 0x91a8, 0xffffffff, 0x00010006,
264 0x91ac, 0xffffffff, 0x00090008,
265 0x91b0, 0xffffffff, 0x00070000,
266 0x91b4, 0xffffffff, 0x00030002,
267 0x91b8, 0xffffffff, 0x00050004,
268 0x91c4, 0xffffffff, 0x00010006,
269 0x91c8, 0xffffffff, 0x00090008,
270 0x91cc, 0xffffffff, 0x00070000,
271 0x91d0, 0xffffffff, 0x00030002,
272 0x91d4, 0xffffffff, 0x00050004,
273 0x91e0, 0xffffffff, 0x00010006,
274 0x91e4, 0xffffffff, 0x00090008,
275 0x91e8, 0xffffffff, 0x00000000,
276 0x91ec, 0xffffffff, 0x00070000,
277 0x91f0, 0xffffffff, 0x00030002,
278 0x91f4, 0xffffffff, 0x00050004,
279 0x9200, 0xffffffff, 0x00010006,
280 0x9204, 0xffffffff, 0x00090008,
281 0x9208, 0xffffffff, 0x00070000,
282 0x920c, 0xffffffff, 0x00030002,
283 0x9210, 0xffffffff, 0x00050004,
284 0x921c, 0xffffffff, 0x00010006,
285 0x9220, 0xffffffff, 0x00090008,
286 0x9224, 0xffffffff, 0x00070000,
287 0x9228, 0xffffffff, 0x00030002,
288 0x922c, 0xffffffff, 0x00050004,
289 0x9238, 0xffffffff, 0x00010006,
290 0x923c, 0xffffffff, 0x00090008,
291 0x9240, 0xffffffff, 0x00070000,
292 0x9244, 0xffffffff, 0x00030002,
293 0x9248, 0xffffffff, 0x00050004,
294 0x9254, 0xffffffff, 0x00010006,
295 0x9258, 0xffffffff, 0x00090008,
296 0x925c, 0xffffffff, 0x00070000,
297 0x9260, 0xffffffff, 0x00030002,
298 0x9264, 0xffffffff, 0x00050004,
299 0x9270, 0xffffffff, 0x00010006,
300 0x9274, 0xffffffff, 0x00090008,
301 0x9278, 0xffffffff, 0x00070000,
302 0x927c, 0xffffffff, 0x00030002,
303 0x9280, 0xffffffff, 0x00050004,
304 0x928c, 0xffffffff, 0x00010006,
305 0x9290, 0xffffffff, 0x00090008,
306 0x9294, 0xffffffff, 0x00000000,
307 0x929c, 0xffffffff, 0x00000001,
308 0x802c, 0xffffffff, 0x40010000,
309 0x915c, 0xffffffff, 0x00010000,
310 0x9160, 0xffffffff, 0x00030002,
311 0x9178, 0xffffffff, 0x00070000,
312 0x917c, 0xffffffff, 0x00030002,
313 0x9180, 0xffffffff, 0x00050004,
314 0x918c, 0xffffffff, 0x00010006,
315 0x9190, 0xffffffff, 0x00090008,
316 0x9194, 0xffffffff, 0x00070000,
317 0x9198, 0xffffffff, 0x00030002,
318 0x919c, 0xffffffff, 0x00050004,
319 0x91a8, 0xffffffff, 0x00010006,
320 0x91ac, 0xffffffff, 0x00090008,
321 0x91b0, 0xffffffff, 0x00070000,
322 0x91b4, 0xffffffff, 0x00030002,
323 0x91b8, 0xffffffff, 0x00050004,
324 0x91c4, 0xffffffff, 0x00010006,
325 0x91c8, 0xffffffff, 0x00090008,
326 0x91cc, 0xffffffff, 0x00070000,
327 0x91d0, 0xffffffff, 0x00030002,
328 0x91d4, 0xffffffff, 0x00050004,
329 0x91e0, 0xffffffff, 0x00010006,
330 0x91e4, 0xffffffff, 0x00090008,
331 0x91e8, 0xffffffff, 0x00000000,
332 0x91ec, 0xffffffff, 0x00070000,
333 0x91f0, 0xffffffff, 0x00030002,
334 0x91f4, 0xffffffff, 0x00050004,
335 0x9200, 0xffffffff, 0x00010006,
336 0x9204, 0xffffffff, 0x00090008,
337 0x9208, 0xffffffff, 0x00070000,
338 0x920c, 0xffffffff, 0x00030002,
339 0x9210, 0xffffffff, 0x00050004,
340 0x921c, 0xffffffff, 0x00010006,
341 0x9220, 0xffffffff, 0x00090008,
342 0x9224, 0xffffffff, 0x00070000,
343 0x9228, 0xffffffff, 0x00030002,
344 0x922c, 0xffffffff, 0x00050004,
345 0x9238, 0xffffffff, 0x00010006,
346 0x923c, 0xffffffff, 0x00090008,
347 0x9240, 0xffffffff, 0x00070000,
348 0x9244, 0xffffffff, 0x00030002,
349 0x9248, 0xffffffff, 0x00050004,
350 0x9254, 0xffffffff, 0x00010006,
351 0x9258, 0xffffffff, 0x00090008,
352 0x925c, 0xffffffff, 0x00070000,
353 0x9260, 0xffffffff, 0x00030002,
354 0x9264, 0xffffffff, 0x00050004,
355 0x9270, 0xffffffff, 0x00010006,
356 0x9274, 0xffffffff, 0x00090008,
357 0x9278, 0xffffffff, 0x00070000,
358 0x927c, 0xffffffff, 0x00030002,
359 0x9280, 0xffffffff, 0x00050004,
360 0x928c, 0xffffffff, 0x00010006,
361 0x9290, 0xffffffff, 0x00090008,
362 0x9294, 0xffffffff, 0x00000000,
363 0x929c, 0xffffffff, 0x00000001,
364 0x802c, 0xffffffff, 0xc0000000
365};
366
367static const u32 redwood_mgcg_init[] =
368{
369 0x802c, 0xffffffff, 0xc0000000,
370 0x5448, 0xffffffff, 0x00000100,
371 0x55e4, 0xffffffff, 0x00000100,
372 0x160c, 0xffffffff, 0x00000100,
373 0x5644, 0xffffffff, 0x00000100,
374 0xc164, 0xffffffff, 0x00000100,
375 0x8a18, 0xffffffff, 0x00000100,
376 0x897c, 0xffffffff, 0x06000100,
377 0x8b28, 0xffffffff, 0x00000100,
378 0x9144, 0xffffffff, 0x00000100,
379 0x9a60, 0xffffffff, 0x00000100,
380 0x9868, 0xffffffff, 0x00000100,
381 0x8d58, 0xffffffff, 0x00000100,
382 0x9510, 0xffffffff, 0x00000100,
383 0x949c, 0xffffffff, 0x00000100,
384 0x9654, 0xffffffff, 0x00000100,
385 0x9030, 0xffffffff, 0x00000100,
386 0x9034, 0xffffffff, 0x00000100,
387 0x9038, 0xffffffff, 0x00000100,
388 0x903c, 0xffffffff, 0x00000100,
389 0x9040, 0xffffffff, 0x00000100,
390 0xa200, 0xffffffff, 0x00000100,
391 0xa204, 0xffffffff, 0x00000100,
392 0xa208, 0xffffffff, 0x00000100,
393 0xa20c, 0xffffffff, 0x00000100,
394 0x971c, 0xffffffff, 0x00000100,
395 0x977c, 0xffffffff, 0x00000100,
396 0x3f80, 0xffffffff, 0x00000100,
397 0xa210, 0xffffffff, 0x00000100,
398 0xa214, 0xffffffff, 0x00000100,
399 0x4d8, 0xffffffff, 0x00000100,
400 0x9784, 0xffffffff, 0x00000100,
401 0x9698, 0xffffffff, 0x00000100,
402 0x4d4, 0xffffffff, 0x00000200,
403 0x30cc, 0xffffffff, 0x00000100,
404 0xd0c0, 0xffffffff, 0xff000100,
405 0x802c, 0xffffffff, 0x40000000,
406 0x915c, 0xffffffff, 0x00010000,
407 0x9160, 0xffffffff, 0x00030002,
408 0x9178, 0xffffffff, 0x00070000,
409 0x917c, 0xffffffff, 0x00030002,
410 0x9180, 0xffffffff, 0x00050004,
411 0x918c, 0xffffffff, 0x00010006,
412 0x9190, 0xffffffff, 0x00090008,
413 0x9194, 0xffffffff, 0x00070000,
414 0x9198, 0xffffffff, 0x00030002,
415 0x919c, 0xffffffff, 0x00050004,
416 0x91a8, 0xffffffff, 0x00010006,
417 0x91ac, 0xffffffff, 0x00090008,
418 0x91b0, 0xffffffff, 0x00070000,
419 0x91b4, 0xffffffff, 0x00030002,
420 0x91b8, 0xffffffff, 0x00050004,
421 0x91c4, 0xffffffff, 0x00010006,
422 0x91c8, 0xffffffff, 0x00090008,
423 0x91cc, 0xffffffff, 0x00070000,
424 0x91d0, 0xffffffff, 0x00030002,
425 0x91d4, 0xffffffff, 0x00050004,
426 0x91e0, 0xffffffff, 0x00010006,
427 0x91e4, 0xffffffff, 0x00090008,
428 0x91e8, 0xffffffff, 0x00000000,
429 0x91ec, 0xffffffff, 0x00070000,
430 0x91f0, 0xffffffff, 0x00030002,
431 0x91f4, 0xffffffff, 0x00050004,
432 0x9200, 0xffffffff, 0x00010006,
433 0x9204, 0xffffffff, 0x00090008,
434 0x9294, 0xffffffff, 0x00000000,
435 0x929c, 0xffffffff, 0x00000001,
436 0x802c, 0xffffffff, 0xc0000000
437};
438
439static const u32 cedar_golden_registers[] =
440{
441 0x3f90, 0xffff0000, 0xff000000,
442 0x9148, 0xffff0000, 0xff000000,
443 0x3f94, 0xffff0000, 0xff000000,
444 0x914c, 0xffff0000, 0xff000000,
445 0x9b7c, 0xffffffff, 0x00000000,
446 0x8a14, 0xffffffff, 0x00000007,
447 0x8b10, 0xffffffff, 0x00000000,
448 0x960c, 0xffffffff, 0x54763210,
449 0x88c4, 0xffffffff, 0x000000c2,
450 0x88d4, 0xffffffff, 0x00000000,
451 0x8974, 0xffffffff, 0x00000000,
452 0xc78, 0x00000080, 0x00000080,
453 0x5eb4, 0xffffffff, 0x00000002,
454 0x5e78, 0xffffffff, 0x001000f0,
455 0x6104, 0x01000300, 0x00000000,
456 0x5bc0, 0x00300000, 0x00000000,
457 0x7030, 0xffffffff, 0x00000011,
458 0x7c30, 0xffffffff, 0x00000011,
459 0x10830, 0xffffffff, 0x00000011,
460 0x11430, 0xffffffff, 0x00000011,
461 0xd02c, 0xffffffff, 0x08421000,
462 0x240c, 0xffffffff, 0x00000380,
463 0x8b24, 0xffffffff, 0x00ff0fff,
464 0x28a4c, 0x06000000, 0x06000000,
465 0x10c, 0x00000001, 0x00000001,
466 0x8d00, 0xffffffff, 0x100e4848,
467 0x8d04, 0xffffffff, 0x00164745,
468 0x8c00, 0xffffffff, 0xe4000003,
469 0x8c04, 0xffffffff, 0x40600060,
470 0x8c08, 0xffffffff, 0x001c001c,
471 0x8cf0, 0xffffffff, 0x08e00410,
472 0x8c20, 0xffffffff, 0x00800080,
473 0x8c24, 0xffffffff, 0x00800080,
474 0x8c18, 0xffffffff, 0x20202078,
475 0x8c1c, 0xffffffff, 0x00001010,
476 0x28350, 0xffffffff, 0x00000000,
477 0xa008, 0xffffffff, 0x00010000,
478 0x5cc, 0xffffffff, 0x00000001,
479 0x9508, 0xffffffff, 0x00000002
480};
481
482static const u32 cedar_mgcg_init[] =
483{
484 0x802c, 0xffffffff, 0xc0000000,
485 0x5448, 0xffffffff, 0x00000100,
486 0x55e4, 0xffffffff, 0x00000100,
487 0x160c, 0xffffffff, 0x00000100,
488 0x5644, 0xffffffff, 0x00000100,
489 0xc164, 0xffffffff, 0x00000100,
490 0x8a18, 0xffffffff, 0x00000100,
491 0x897c, 0xffffffff, 0x06000100,
492 0x8b28, 0xffffffff, 0x00000100,
493 0x9144, 0xffffffff, 0x00000100,
494 0x9a60, 0xffffffff, 0x00000100,
495 0x9868, 0xffffffff, 0x00000100,
496 0x8d58, 0xffffffff, 0x00000100,
497 0x9510, 0xffffffff, 0x00000100,
498 0x949c, 0xffffffff, 0x00000100,
499 0x9654, 0xffffffff, 0x00000100,
500 0x9030, 0xffffffff, 0x00000100,
501 0x9034, 0xffffffff, 0x00000100,
502 0x9038, 0xffffffff, 0x00000100,
503 0x903c, 0xffffffff, 0x00000100,
504 0x9040, 0xffffffff, 0x00000100,
505 0xa200, 0xffffffff, 0x00000100,
506 0xa204, 0xffffffff, 0x00000100,
507 0xa208, 0xffffffff, 0x00000100,
508 0xa20c, 0xffffffff, 0x00000100,
509 0x971c, 0xffffffff, 0x00000100,
510 0x977c, 0xffffffff, 0x00000100,
511 0x3f80, 0xffffffff, 0x00000100,
512 0xa210, 0xffffffff, 0x00000100,
513 0xa214, 0xffffffff, 0x00000100,
514 0x4d8, 0xffffffff, 0x00000100,
515 0x9784, 0xffffffff, 0x00000100,
516 0x9698, 0xffffffff, 0x00000100,
517 0x4d4, 0xffffffff, 0x00000200,
518 0x30cc, 0xffffffff, 0x00000100,
519 0xd0c0, 0xffffffff, 0xff000100,
520 0x802c, 0xffffffff, 0x40000000,
521 0x915c, 0xffffffff, 0x00010000,
522 0x9178, 0xffffffff, 0x00050000,
523 0x917c, 0xffffffff, 0x00030002,
524 0x918c, 0xffffffff, 0x00010004,
525 0x9190, 0xffffffff, 0x00070006,
526 0x9194, 0xffffffff, 0x00050000,
527 0x9198, 0xffffffff, 0x00030002,
528 0x91a8, 0xffffffff, 0x00010004,
529 0x91ac, 0xffffffff, 0x00070006,
530 0x91e8, 0xffffffff, 0x00000000,
531 0x9294, 0xffffffff, 0x00000000,
532 0x929c, 0xffffffff, 0x00000001,
533 0x802c, 0xffffffff, 0xc0000000
534};
535
536static const u32 juniper_mgcg_init[] =
537{
538 0x802c, 0xffffffff, 0xc0000000,
539 0x5448, 0xffffffff, 0x00000100,
540 0x55e4, 0xffffffff, 0x00000100,
541 0x160c, 0xffffffff, 0x00000100,
542 0x5644, 0xffffffff, 0x00000100,
543 0xc164, 0xffffffff, 0x00000100,
544 0x8a18, 0xffffffff, 0x00000100,
545 0x897c, 0xffffffff, 0x06000100,
546 0x8b28, 0xffffffff, 0x00000100,
547 0x9144, 0xffffffff, 0x00000100,
548 0x9a60, 0xffffffff, 0x00000100,
549 0x9868, 0xffffffff, 0x00000100,
550 0x8d58, 0xffffffff, 0x00000100,
551 0x9510, 0xffffffff, 0x00000100,
552 0x949c, 0xffffffff, 0x00000100,
553 0x9654, 0xffffffff, 0x00000100,
554 0x9030, 0xffffffff, 0x00000100,
555 0x9034, 0xffffffff, 0x00000100,
556 0x9038, 0xffffffff, 0x00000100,
557 0x903c, 0xffffffff, 0x00000100,
558 0x9040, 0xffffffff, 0x00000100,
559 0xa200, 0xffffffff, 0x00000100,
560 0xa204, 0xffffffff, 0x00000100,
561 0xa208, 0xffffffff, 0x00000100,
562 0xa20c, 0xffffffff, 0x00000100,
563 0x971c, 0xffffffff, 0x00000100,
564 0xd0c0, 0xffffffff, 0xff000100,
565 0x802c, 0xffffffff, 0x40000000,
566 0x915c, 0xffffffff, 0x00010000,
567 0x9160, 0xffffffff, 0x00030002,
568 0x9178, 0xffffffff, 0x00070000,
569 0x917c, 0xffffffff, 0x00030002,
570 0x9180, 0xffffffff, 0x00050004,
571 0x918c, 0xffffffff, 0x00010006,
572 0x9190, 0xffffffff, 0x00090008,
573 0x9194, 0xffffffff, 0x00070000,
574 0x9198, 0xffffffff, 0x00030002,
575 0x919c, 0xffffffff, 0x00050004,
576 0x91a8, 0xffffffff, 0x00010006,
577 0x91ac, 0xffffffff, 0x00090008,
578 0x91b0, 0xffffffff, 0x00070000,
579 0x91b4, 0xffffffff, 0x00030002,
580 0x91b8, 0xffffffff, 0x00050004,
581 0x91c4, 0xffffffff, 0x00010006,
582 0x91c8, 0xffffffff, 0x00090008,
583 0x91cc, 0xffffffff, 0x00070000,
584 0x91d0, 0xffffffff, 0x00030002,
585 0x91d4, 0xffffffff, 0x00050004,
586 0x91e0, 0xffffffff, 0x00010006,
587 0x91e4, 0xffffffff, 0x00090008,
588 0x91e8, 0xffffffff, 0x00000000,
589 0x91ec, 0xffffffff, 0x00070000,
590 0x91f0, 0xffffffff, 0x00030002,
591 0x91f4, 0xffffffff, 0x00050004,
592 0x9200, 0xffffffff, 0x00010006,
593 0x9204, 0xffffffff, 0x00090008,
594 0x9208, 0xffffffff, 0x00070000,
595 0x920c, 0xffffffff, 0x00030002,
596 0x9210, 0xffffffff, 0x00050004,
597 0x921c, 0xffffffff, 0x00010006,
598 0x9220, 0xffffffff, 0x00090008,
599 0x9224, 0xffffffff, 0x00070000,
600 0x9228, 0xffffffff, 0x00030002,
601 0x922c, 0xffffffff, 0x00050004,
602 0x9238, 0xffffffff, 0x00010006,
603 0x923c, 0xffffffff, 0x00090008,
604 0x9240, 0xffffffff, 0x00070000,
605 0x9244, 0xffffffff, 0x00030002,
606 0x9248, 0xffffffff, 0x00050004,
607 0x9254, 0xffffffff, 0x00010006,
608 0x9258, 0xffffffff, 0x00090008,
609 0x925c, 0xffffffff, 0x00070000,
610 0x9260, 0xffffffff, 0x00030002,
611 0x9264, 0xffffffff, 0x00050004,
612 0x9270, 0xffffffff, 0x00010006,
613 0x9274, 0xffffffff, 0x00090008,
614 0x9278, 0xffffffff, 0x00070000,
615 0x927c, 0xffffffff, 0x00030002,
616 0x9280, 0xffffffff, 0x00050004,
617 0x928c, 0xffffffff, 0x00010006,
618 0x9290, 0xffffffff, 0x00090008,
619 0x9294, 0xffffffff, 0x00000000,
620 0x929c, 0xffffffff, 0x00000001,
621 0x802c, 0xffffffff, 0xc0000000,
622 0x977c, 0xffffffff, 0x00000100,
623 0x3f80, 0xffffffff, 0x00000100,
624 0xa210, 0xffffffff, 0x00000100,
625 0xa214, 0xffffffff, 0x00000100,
626 0x4d8, 0xffffffff, 0x00000100,
627 0x9784, 0xffffffff, 0x00000100,
628 0x9698, 0xffffffff, 0x00000100,
629 0x4d4, 0xffffffff, 0x00000200,
630 0x30cc, 0xffffffff, 0x00000100,
631 0x802c, 0xffffffff, 0xc0000000
632};
633
634static const u32 supersumo_golden_registers[] =
635{
636 0x5eb4, 0xffffffff, 0x00000002,
637 0x5cc, 0xffffffff, 0x00000001,
638 0x7030, 0xffffffff, 0x00000011,
639 0x7c30, 0xffffffff, 0x00000011,
640 0x6104, 0x01000300, 0x00000000,
641 0x5bc0, 0x00300000, 0x00000000,
642 0x8c04, 0xffffffff, 0x40600060,
643 0x8c08, 0xffffffff, 0x001c001c,
644 0x8c20, 0xffffffff, 0x00800080,
645 0x8c24, 0xffffffff, 0x00800080,
646 0x8c18, 0xffffffff, 0x20202078,
647 0x8c1c, 0xffffffff, 0x00001010,
648 0x918c, 0xffffffff, 0x00010006,
649 0x91a8, 0xffffffff, 0x00010006,
650 0x91c4, 0xffffffff, 0x00010006,
651 0x91e0, 0xffffffff, 0x00010006,
652 0x9200, 0xffffffff, 0x00010006,
653 0x9150, 0xffffffff, 0x6e944040,
654 0x917c, 0xffffffff, 0x00030002,
655 0x9180, 0xffffffff, 0x00050004,
656 0x9198, 0xffffffff, 0x00030002,
657 0x919c, 0xffffffff, 0x00050004,
658 0x91b4, 0xffffffff, 0x00030002,
659 0x91b8, 0xffffffff, 0x00050004,
660 0x91d0, 0xffffffff, 0x00030002,
661 0x91d4, 0xffffffff, 0x00050004,
662 0x91f0, 0xffffffff, 0x00030002,
663 0x91f4, 0xffffffff, 0x00050004,
664 0x915c, 0xffffffff, 0x00010000,
665 0x9160, 0xffffffff, 0x00030002,
666 0x3f90, 0xffff0000, 0xff000000,
667 0x9178, 0xffffffff, 0x00070000,
668 0x9194, 0xffffffff, 0x00070000,
669 0x91b0, 0xffffffff, 0x00070000,
670 0x91cc, 0xffffffff, 0x00070000,
671 0x91ec, 0xffffffff, 0x00070000,
672 0x9148, 0xffff0000, 0xff000000,
673 0x9190, 0xffffffff, 0x00090008,
674 0x91ac, 0xffffffff, 0x00090008,
675 0x91c8, 0xffffffff, 0x00090008,
676 0x91e4, 0xffffffff, 0x00090008,
677 0x9204, 0xffffffff, 0x00090008,
678 0x3f94, 0xffff0000, 0xff000000,
679 0x914c, 0xffff0000, 0xff000000,
680 0x929c, 0xffffffff, 0x00000001,
681 0x8a18, 0xffffffff, 0x00000100,
682 0x8b28, 0xffffffff, 0x00000100,
683 0x9144, 0xffffffff, 0x00000100,
684 0x5644, 0xffffffff, 0x00000100,
685 0x9b7c, 0xffffffff, 0x00000000,
686 0x8030, 0xffffffff, 0x0000100a,
687 0x8a14, 0xffffffff, 0x00000007,
688 0x8b24, 0xffffffff, 0x00ff0fff,
689 0x8b10, 0xffffffff, 0x00000000,
690 0x28a4c, 0x06000000, 0x06000000,
691 0x4d8, 0xffffffff, 0x00000100,
692 0x913c, 0xffff000f, 0x0100000a,
693 0x960c, 0xffffffff, 0x54763210,
694 0x88c4, 0xffffffff, 0x000000c2,
695 0x88d4, 0xffffffff, 0x00000010,
696 0x8974, 0xffffffff, 0x00000000,
697 0xc78, 0x00000080, 0x00000080,
698 0x5e78, 0xffffffff, 0x001000f0,
699 0xd02c, 0xffffffff, 0x08421000,
700 0xa008, 0xffffffff, 0x00010000,
701 0x8d00, 0xffffffff, 0x100e4848,
702 0x8d04, 0xffffffff, 0x00164745,
703 0x8c00, 0xffffffff, 0xe4000003,
704 0x8cf0, 0x1fffffff, 0x08e00620,
705 0x28350, 0xffffffff, 0x00000000,
706 0x9508, 0xffffffff, 0x00000002
707};
708
709static const u32 sumo_golden_registers[] =
710{
711 0x900c, 0x00ffffff, 0x0017071f,
712 0x8c18, 0xffffffff, 0x10101060,
713 0x8c1c, 0xffffffff, 0x00001010,
714 0x8c30, 0x0000000f, 0x00000005,
715 0x9688, 0x0000000f, 0x00000007
716};
717
718static const u32 wrestler_golden_registers[] =
719{
720 0x5eb4, 0xffffffff, 0x00000002,
721 0x5cc, 0xffffffff, 0x00000001,
722 0x7030, 0xffffffff, 0x00000011,
723 0x7c30, 0xffffffff, 0x00000011,
724 0x6104, 0x01000300, 0x00000000,
725 0x5bc0, 0x00300000, 0x00000000,
726 0x918c, 0xffffffff, 0x00010006,
727 0x91a8, 0xffffffff, 0x00010006,
728 0x9150, 0xffffffff, 0x6e944040,
729 0x917c, 0xffffffff, 0x00030002,
730 0x9198, 0xffffffff, 0x00030002,
731 0x915c, 0xffffffff, 0x00010000,
732 0x3f90, 0xffff0000, 0xff000000,
733 0x9178, 0xffffffff, 0x00070000,
734 0x9194, 0xffffffff, 0x00070000,
735 0x9148, 0xffff0000, 0xff000000,
736 0x9190, 0xffffffff, 0x00090008,
737 0x91ac, 0xffffffff, 0x00090008,
738 0x3f94, 0xffff0000, 0xff000000,
739 0x914c, 0xffff0000, 0xff000000,
740 0x929c, 0xffffffff, 0x00000001,
741 0x8a18, 0xffffffff, 0x00000100,
742 0x8b28, 0xffffffff, 0x00000100,
743 0x9144, 0xffffffff, 0x00000100,
744 0x9b7c, 0xffffffff, 0x00000000,
745 0x8030, 0xffffffff, 0x0000100a,
746 0x8a14, 0xffffffff, 0x00000001,
747 0x8b24, 0xffffffff, 0x00ff0fff,
748 0x8b10, 0xffffffff, 0x00000000,
749 0x28a4c, 0x06000000, 0x06000000,
750 0x4d8, 0xffffffff, 0x00000100,
751 0x913c, 0xffff000f, 0x0100000a,
752 0x960c, 0xffffffff, 0x54763210,
753 0x88c4, 0xffffffff, 0x000000c2,
754 0x88d4, 0xffffffff, 0x00000010,
755 0x8974, 0xffffffff, 0x00000000,
756 0xc78, 0x00000080, 0x00000080,
757 0x5e78, 0xffffffff, 0x001000f0,
758 0xd02c, 0xffffffff, 0x08421000,
759 0xa008, 0xffffffff, 0x00010000,
760 0x8d00, 0xffffffff, 0x100e4848,
761 0x8d04, 0xffffffff, 0x00164745,
762 0x8c00, 0xffffffff, 0xe4000003,
763 0x8cf0, 0x1fffffff, 0x08e00410,
764 0x28350, 0xffffffff, 0x00000000,
765 0x9508, 0xffffffff, 0x00000002,
766 0x900c, 0xffffffff, 0x0017071f,
767 0x8c18, 0xffffffff, 0x10101060,
768 0x8c1c, 0xffffffff, 0x00001010
769};
770
771static const u32 barts_golden_registers[] =
772{
773 0x5eb4, 0xffffffff, 0x00000002,
774 0x5e78, 0x8f311ff1, 0x001000f0,
775 0x3f90, 0xffff0000, 0xff000000,
776 0x9148, 0xffff0000, 0xff000000,
777 0x3f94, 0xffff0000, 0xff000000,
778 0x914c, 0xffff0000, 0xff000000,
779 0xc78, 0x00000080, 0x00000080,
780 0xbd4, 0x70073777, 0x00010001,
781 0xd02c, 0xbfffff1f, 0x08421000,
782 0xd0b8, 0x03773777, 0x02011003,
783 0x5bc0, 0x00200000, 0x50100000,
784 0x98f8, 0x33773777, 0x02011003,
785 0x98fc, 0xffffffff, 0x76543210,
786 0x7030, 0x31000311, 0x00000011,
787 0x2f48, 0x00000007, 0x02011003,
788 0x6b28, 0x00000010, 0x00000012,
789 0x7728, 0x00000010, 0x00000012,
790 0x10328, 0x00000010, 0x00000012,
791 0x10f28, 0x00000010, 0x00000012,
792 0x11b28, 0x00000010, 0x00000012,
793 0x12728, 0x00000010, 0x00000012,
794 0x240c, 0x000007ff, 0x00000380,
795 0x8a14, 0xf000001f, 0x00000007,
796 0x8b24, 0x3fff3fff, 0x00ff0fff,
797 0x8b10, 0x0000ff0f, 0x00000000,
798 0x28a4c, 0x07ffffff, 0x06000000,
799 0x10c, 0x00000001, 0x00010003,
800 0xa02c, 0xffffffff, 0x0000009b,
801 0x913c, 0x0000000f, 0x0100000a,
802 0x8d00, 0xffff7f7f, 0x100e4848,
803 0x8d04, 0x00ffffff, 0x00164745,
804 0x8c00, 0xfffc0003, 0xe4000003,
805 0x8c04, 0xf8ff00ff, 0x40600060,
806 0x8c08, 0x00ff00ff, 0x001c001c,
807 0x8cf0, 0x1fff1fff, 0x08e00620,
808 0x8c20, 0x0fff0fff, 0x00800080,
809 0x8c24, 0x0fff0fff, 0x00800080,
810 0x8c18, 0xffffffff, 0x20202078,
811 0x8c1c, 0x0000ffff, 0x00001010,
812 0x28350, 0x00000f01, 0x00000000,
813 0x9508, 0x3700001f, 0x00000002,
814 0x960c, 0xffffffff, 0x54763210,
815 0x88c4, 0x001f3ae3, 0x000000c2,
816 0x88d4, 0x0000001f, 0x00000010,
817 0x8974, 0xffffffff, 0x00000000
818};
819
820static const u32 turks_golden_registers[] =
821{
822 0x5eb4, 0xffffffff, 0x00000002,
823 0x5e78, 0x8f311ff1, 0x001000f0,
824 0x8c8, 0x00003000, 0x00001070,
825 0x8cc, 0x000fffff, 0x00040035,
826 0x3f90, 0xffff0000, 0xfff00000,
827 0x9148, 0xffff0000, 0xfff00000,
828 0x3f94, 0xffff0000, 0xfff00000,
829 0x914c, 0xffff0000, 0xfff00000,
830 0xc78, 0x00000080, 0x00000080,
831 0xbd4, 0x00073007, 0x00010002,
832 0xd02c, 0xbfffff1f, 0x08421000,
833 0xd0b8, 0x03773777, 0x02010002,
834 0x5bc0, 0x00200000, 0x50100000,
835 0x98f8, 0x33773777, 0x00010002,
836 0x98fc, 0xffffffff, 0x33221100,
837 0x7030, 0x31000311, 0x00000011,
838 0x2f48, 0x33773777, 0x00010002,
839 0x6b28, 0x00000010, 0x00000012,
840 0x7728, 0x00000010, 0x00000012,
841 0x10328, 0x00000010, 0x00000012,
842 0x10f28, 0x00000010, 0x00000012,
843 0x11b28, 0x00000010, 0x00000012,
844 0x12728, 0x00000010, 0x00000012,
845 0x240c, 0x000007ff, 0x00000380,
846 0x8a14, 0xf000001f, 0x00000007,
847 0x8b24, 0x3fff3fff, 0x00ff0fff,
848 0x8b10, 0x0000ff0f, 0x00000000,
849 0x28a4c, 0x07ffffff, 0x06000000,
850 0x10c, 0x00000001, 0x00010003,
851 0xa02c, 0xffffffff, 0x0000009b,
852 0x913c, 0x0000000f, 0x0100000a,
853 0x8d00, 0xffff7f7f, 0x100e4848,
854 0x8d04, 0x00ffffff, 0x00164745,
855 0x8c00, 0xfffc0003, 0xe4000003,
856 0x8c04, 0xf8ff00ff, 0x40600060,
857 0x8c08, 0x00ff00ff, 0x001c001c,
858 0x8cf0, 0x1fff1fff, 0x08e00410,
859 0x8c20, 0x0fff0fff, 0x00800080,
860 0x8c24, 0x0fff0fff, 0x00800080,
861 0x8c18, 0xffffffff, 0x20202078,
862 0x8c1c, 0x0000ffff, 0x00001010,
863 0x28350, 0x00000f01, 0x00000000,
864 0x9508, 0x3700001f, 0x00000002,
865 0x960c, 0xffffffff, 0x54763210,
866 0x88c4, 0x001f3ae3, 0x000000c2,
867 0x88d4, 0x0000001f, 0x00000010,
868 0x8974, 0xffffffff, 0x00000000
869};
870
871static const u32 caicos_golden_registers[] =
872{
873 0x5eb4, 0xffffffff, 0x00000002,
874 0x5e78, 0x8f311ff1, 0x001000f0,
875 0x8c8, 0x00003420, 0x00001450,
876 0x8cc, 0x000fffff, 0x00040035,
877 0x3f90, 0xffff0000, 0xfffc0000,
878 0x9148, 0xffff0000, 0xfffc0000,
879 0x3f94, 0xffff0000, 0xfffc0000,
880 0x914c, 0xffff0000, 0xfffc0000,
881 0xc78, 0x00000080, 0x00000080,
882 0xbd4, 0x00073007, 0x00010001,
883 0xd02c, 0xbfffff1f, 0x08421000,
884 0xd0b8, 0x03773777, 0x02010001,
885 0x5bc0, 0x00200000, 0x50100000,
886 0x98f8, 0x33773777, 0x02010001,
887 0x98fc, 0xffffffff, 0x33221100,
888 0x7030, 0x31000311, 0x00000011,
889 0x2f48, 0x33773777, 0x02010001,
890 0x6b28, 0x00000010, 0x00000012,
891 0x7728, 0x00000010, 0x00000012,
892 0x10328, 0x00000010, 0x00000012,
893 0x10f28, 0x00000010, 0x00000012,
894 0x11b28, 0x00000010, 0x00000012,
895 0x12728, 0x00000010, 0x00000012,
896 0x240c, 0x000007ff, 0x00000380,
897 0x8a14, 0xf000001f, 0x00000001,
898 0x8b24, 0x3fff3fff, 0x00ff0fff,
899 0x8b10, 0x0000ff0f, 0x00000000,
900 0x28a4c, 0x07ffffff, 0x06000000,
901 0x10c, 0x00000001, 0x00010003,
902 0xa02c, 0xffffffff, 0x0000009b,
903 0x913c, 0x0000000f, 0x0100000a,
904 0x8d00, 0xffff7f7f, 0x100e4848,
905 0x8d04, 0x00ffffff, 0x00164745,
906 0x8c00, 0xfffc0003, 0xe4000003,
907 0x8c04, 0xf8ff00ff, 0x40600060,
908 0x8c08, 0x00ff00ff, 0x001c001c,
909 0x8cf0, 0x1fff1fff, 0x08e00410,
910 0x8c20, 0x0fff0fff, 0x00800080,
911 0x8c24, 0x0fff0fff, 0x00800080,
912 0x8c18, 0xffffffff, 0x20202078,
913 0x8c1c, 0x0000ffff, 0x00001010,
914 0x28350, 0x00000f01, 0x00000000,
915 0x9508, 0x3700001f, 0x00000002,
916 0x960c, 0xffffffff, 0x54763210,
917 0x88c4, 0x001f3ae3, 0x000000c2,
918 0x88d4, 0x0000001f, 0x00000010,
919 0x8974, 0xffffffff, 0x00000000
920};
921
922static void evergreen_init_golden_registers(struct radeon_device *rdev)
923{
924 switch (rdev->family) {
925 case CHIP_CYPRESS:
926 case CHIP_HEMLOCK:
927 radeon_program_register_sequence(rdev,
928 evergreen_golden_registers,
929 (const u32)ARRAY_SIZE(evergreen_golden_registers));
930 radeon_program_register_sequence(rdev,
931 evergreen_golden_registers2,
932 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
933 radeon_program_register_sequence(rdev,
934 cypress_mgcg_init,
935 (const u32)ARRAY_SIZE(cypress_mgcg_init));
936 break;
937 case CHIP_JUNIPER:
938 radeon_program_register_sequence(rdev,
939 evergreen_golden_registers,
940 (const u32)ARRAY_SIZE(evergreen_golden_registers));
941 radeon_program_register_sequence(rdev,
942 evergreen_golden_registers2,
943 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
944 radeon_program_register_sequence(rdev,
945 juniper_mgcg_init,
946 (const u32)ARRAY_SIZE(juniper_mgcg_init));
947 break;
948 case CHIP_REDWOOD:
949 radeon_program_register_sequence(rdev,
950 evergreen_golden_registers,
951 (const u32)ARRAY_SIZE(evergreen_golden_registers));
952 radeon_program_register_sequence(rdev,
953 evergreen_golden_registers2,
954 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
955 radeon_program_register_sequence(rdev,
956 redwood_mgcg_init,
957 (const u32)ARRAY_SIZE(redwood_mgcg_init));
958 break;
959 case CHIP_CEDAR:
960 radeon_program_register_sequence(rdev,
961 cedar_golden_registers,
962 (const u32)ARRAY_SIZE(cedar_golden_registers));
963 radeon_program_register_sequence(rdev,
964 evergreen_golden_registers2,
965 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
966 radeon_program_register_sequence(rdev,
967 cedar_mgcg_init,
968 (const u32)ARRAY_SIZE(cedar_mgcg_init));
969 break;
970 case CHIP_PALM:
971 radeon_program_register_sequence(rdev,
972 wrestler_golden_registers,
973 (const u32)ARRAY_SIZE(wrestler_golden_registers));
974 break;
975 case CHIP_SUMO:
976 radeon_program_register_sequence(rdev,
977 supersumo_golden_registers,
978 (const u32)ARRAY_SIZE(supersumo_golden_registers));
979 break;
980 case CHIP_SUMO2:
981 radeon_program_register_sequence(rdev,
982 supersumo_golden_registers,
983 (const u32)ARRAY_SIZE(supersumo_golden_registers));
984 radeon_program_register_sequence(rdev,
985 sumo_golden_registers,
986 (const u32)ARRAY_SIZE(sumo_golden_registers));
987 break;
988 case CHIP_BARTS:
989 radeon_program_register_sequence(rdev,
990 barts_golden_registers,
991 (const u32)ARRAY_SIZE(barts_golden_registers));
992 break;
993 case CHIP_TURKS:
994 radeon_program_register_sequence(rdev,
995 turks_golden_registers,
996 (const u32)ARRAY_SIZE(turks_golden_registers));
997 break;
998 case CHIP_CAICOS:
999 radeon_program_register_sequence(rdev,
1000 caicos_golden_registers,
1001 (const u32)ARRAY_SIZE(caicos_golden_registers));
1002 break;
1003 default:
1004 break;
1005 }
1006}
1007
Jerome Glisse285484e2011-12-16 17:03:42 -05001008void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1009 unsigned *bankh, unsigned *mtaspect,
1010 unsigned *tile_split)
1011{
1012 *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1013 *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1014 *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1015 *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1016 switch (*bankw) {
1017 default:
1018 case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1019 case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1020 case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1021 case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1022 }
1023 switch (*bankh) {
1024 default:
1025 case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1026 case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1027 case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1028 case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1029 }
1030 switch (*mtaspect) {
1031 default:
1032 case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1033 case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1034 case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1035 case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1036 }
1037}
1038
Alex Deucher23d33ba2013-04-08 12:41:32 +02001039static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1040 u32 cntl_reg, u32 status_reg)
1041{
1042 int r, i;
1043 struct atom_clock_dividers dividers;
1044
1045 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1046 clock, false, &dividers);
1047 if (r)
1048 return r;
1049
1050 WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1051
1052 for (i = 0; i < 100; i++) {
1053 if (RREG32(status_reg) & DCLK_STATUS)
1054 break;
1055 mdelay(10);
1056 }
1057 if (i == 100)
1058 return -ETIMEDOUT;
1059
1060 return 0;
1061}
1062
1063int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1064{
1065 int r = 0;
1066 u32 cg_scratch = RREG32(CG_SCRATCH1);
1067
1068 r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1069 if (r)
1070 goto done;
1071 cg_scratch &= 0xffff0000;
1072 cg_scratch |= vclk / 100; /* Mhz */
1073
1074 r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1075 if (r)
1076 goto done;
1077 cg_scratch &= 0x0000ffff;
1078 cg_scratch |= (dclk / 100) << 16; /* Mhz */
1079
1080done:
1081 WREG32(CG_SCRATCH1, cg_scratch);
1082
1083 return r;
1084}
1085
Alex Deuchera8b49252013-04-08 12:41:33 +02001086int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1087{
1088 /* start off with something large */
Christian Königfacd1122013-04-29 11:55:02 +02001089 unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
Alex Deuchera8b49252013-04-08 12:41:33 +02001090 int r;
1091
Christian König4ed10832013-04-18 15:25:58 +02001092 /* bypass vclk and dclk with bclk */
1093 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1094 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1095 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1096
1097 /* put PLL in bypass mode */
1098 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1099
1100 if (!vclk || !dclk) {
1101 /* keep the Bypass mode, put PLL to sleep */
1102 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1103 return 0;
1104 }
1105
Christian Königfacd1122013-04-29 11:55:02 +02001106 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1107 16384, 0x03FFFFFF, 0, 128, 5,
1108 &fb_div, &vclk_div, &dclk_div);
1109 if (r)
1110 return r;
Alex Deuchera8b49252013-04-08 12:41:33 +02001111
1112 /* set VCO_MODE to 1 */
1113 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1114
1115 /* toggle UPLL_SLEEP to 1 then back to 0 */
1116 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1117 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1118
1119 /* deassert UPLL_RESET */
1120 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1121
1122 mdelay(1);
1123
Christian Königfacd1122013-04-29 11:55:02 +02001124 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
Alex Deuchera8b49252013-04-08 12:41:33 +02001125 if (r)
1126 return r;
1127
1128 /* assert UPLL_RESET again */
1129 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1130
1131 /* disable spread spectrum. */
1132 WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1133
1134 /* set feedback divider */
Christian Königfacd1122013-04-29 11:55:02 +02001135 WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
Alex Deuchera8b49252013-04-08 12:41:33 +02001136
1137 /* set ref divider to 0 */
1138 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1139
Christian Königfacd1122013-04-29 11:55:02 +02001140 if (fb_div < 307200)
Alex Deuchera8b49252013-04-08 12:41:33 +02001141 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1142 else
1143 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1144
1145 /* set PDIV_A and PDIV_B */
1146 WREG32_P(CG_UPLL_FUNC_CNTL_2,
Christian Königfacd1122013-04-29 11:55:02 +02001147 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
Alex Deuchera8b49252013-04-08 12:41:33 +02001148 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1149
1150 /* give the PLL some time to settle */
1151 mdelay(15);
1152
1153 /* deassert PLL_RESET */
1154 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1155
1156 mdelay(15);
1157
1158 /* switch from bypass mode to normal mode */
1159 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1160
Christian Königfacd1122013-04-29 11:55:02 +02001161 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
Alex Deuchera8b49252013-04-08 12:41:33 +02001162 if (r)
1163 return r;
1164
1165 /* switch VCLK and DCLK selection */
1166 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1167 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1168 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1169
1170 mdelay(100);
1171
1172 return 0;
1173}
1174
Alex Deucherd054ac12011-09-01 17:46:15 +00001175void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1176{
1177 u16 ctl, v;
Jiang Liu32195ae2012-07-24 17:20:30 +08001178 int err;
Alex Deucherd054ac12011-09-01 17:46:15 +00001179
Jiang Liu32195ae2012-07-24 17:20:30 +08001180 err = pcie_capability_read_word(rdev->pdev, PCI_EXP_DEVCTL, &ctl);
Alex Deucherd054ac12011-09-01 17:46:15 +00001181 if (err)
1182 return;
1183
1184 v = (ctl & PCI_EXP_DEVCTL_READRQ) >> 12;
1185
1186 /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1187 * to avoid hangs or perfomance issues
1188 */
1189 if ((v == 0) || (v == 6) || (v == 7)) {
1190 ctl &= ~PCI_EXP_DEVCTL_READRQ;
1191 ctl |= (2 << 12);
Jiang Liu32195ae2012-07-24 17:20:30 +08001192 pcie_capability_write_word(rdev->pdev, PCI_EXP_DEVCTL, ctl);
Alex Deucherd054ac12011-09-01 17:46:15 +00001193 }
1194}
1195
Alex Deucher10257a62013-04-09 18:49:59 -04001196static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1197{
1198 if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1199 return true;
1200 else
1201 return false;
1202}
1203
1204static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1205{
1206 u32 pos1, pos2;
1207
1208 pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1209 pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1210
1211 if (pos1 != pos2)
1212 return true;
1213 else
1214 return false;
1215}
1216
Alex Deucher377edc82012-07-17 14:02:42 -04001217/**
1218 * dce4_wait_for_vblank - vblank wait asic callback.
1219 *
1220 * @rdev: radeon_device pointer
1221 * @crtc: crtc to wait for vblank on
1222 *
1223 * Wait for vblank on the requested crtc (evergreen+).
1224 */
Alex Deucher3ae19b72012-02-23 17:53:37 -05001225void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1226{
Alex Deucher10257a62013-04-09 18:49:59 -04001227 unsigned i = 0;
Alex Deucher3ae19b72012-02-23 17:53:37 -05001228
Alex Deucher4a159032012-08-15 17:13:53 -04001229 if (crtc >= rdev->num_crtc)
1230 return;
1231
Alex Deucher10257a62013-04-09 18:49:59 -04001232 if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1233 return;
1234
1235 /* depending on when we hit vblank, we may be close to active; if so,
1236 * wait for another frame.
1237 */
1238 while (dce4_is_in_vblank(rdev, crtc)) {
1239 if (i++ % 100 == 0) {
1240 if (!dce4_is_counter_moving(rdev, crtc))
Alex Deucher3ae19b72012-02-23 17:53:37 -05001241 break;
Alex Deucher3ae19b72012-02-23 17:53:37 -05001242 }
Alex Deucher10257a62013-04-09 18:49:59 -04001243 }
1244
1245 while (!dce4_is_in_vblank(rdev, crtc)) {
1246 if (i++ % 100 == 0) {
1247 if (!dce4_is_counter_moving(rdev, crtc))
Alex Deucher3ae19b72012-02-23 17:53:37 -05001248 break;
Alex Deucher3ae19b72012-02-23 17:53:37 -05001249 }
1250 }
1251}
1252
Alex Deucher377edc82012-07-17 14:02:42 -04001253/**
1254 * radeon_irq_kms_pflip_irq_get - pre-pageflip callback.
1255 *
1256 * @rdev: radeon_device pointer
1257 * @crtc: crtc to prepare for pageflip on
1258 *
1259 * Pre-pageflip callback (evergreen+).
1260 * Enables the pageflip irq (vblank irq).
1261 */
Alex Deucher6f34be52010-11-21 10:59:01 -05001262void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
1263{
Alex Deucher6f34be52010-11-21 10:59:01 -05001264 /* enable the pflip int */
1265 radeon_irq_kms_pflip_irq_get(rdev, crtc);
1266}
1267
Alex Deucher377edc82012-07-17 14:02:42 -04001268/**
1269 * evergreen_post_page_flip - pos-pageflip callback.
1270 *
1271 * @rdev: radeon_device pointer
1272 * @crtc: crtc to cleanup pageflip on
1273 *
1274 * Post-pageflip callback (evergreen+).
1275 * Disables the pageflip irq (vblank irq).
1276 */
Alex Deucher6f34be52010-11-21 10:59:01 -05001277void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
1278{
1279 /* disable the pflip int */
1280 radeon_irq_kms_pflip_irq_put(rdev, crtc);
1281}
1282
Alex Deucher377edc82012-07-17 14:02:42 -04001283/**
1284 * evergreen_page_flip - pageflip callback.
1285 *
1286 * @rdev: radeon_device pointer
1287 * @crtc_id: crtc to cleanup pageflip on
1288 * @crtc_base: new address of the crtc (GPU MC address)
1289 *
1290 * Does the actual pageflip (evergreen+).
1291 * During vblank we take the crtc lock and wait for the update_pending
1292 * bit to go high, when it does, we release the lock, and allow the
1293 * double buffered update to take place.
1294 * Returns the current update pending status.
1295 */
Alex Deucher6f34be52010-11-21 10:59:01 -05001296u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1297{
1298 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1299 u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
Alex Deucherf6496472011-11-28 14:49:26 -05001300 int i;
Alex Deucher6f34be52010-11-21 10:59:01 -05001301
1302 /* Lock the graphics update lock */
1303 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1304 WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1305
1306 /* update the scanout addresses */
1307 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1308 upper_32_bits(crtc_base));
1309 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1310 (u32)crtc_base);
1311
1312 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1313 upper_32_bits(crtc_base));
1314 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1315 (u32)crtc_base);
1316
1317 /* Wait for update_pending to go high. */
Alex Deucherf6496472011-11-28 14:49:26 -05001318 for (i = 0; i < rdev->usec_timeout; i++) {
1319 if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1320 break;
1321 udelay(1);
1322 }
Alex Deucher6f34be52010-11-21 10:59:01 -05001323 DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1324
1325 /* Unlock the lock, so double-buffering can take place inside vblank */
1326 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1327 WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1328
1329 /* Return current update_pending status: */
1330 return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
1331}
1332
Alex Deucher21a81222010-07-02 12:58:16 -04001333/* get temperature in millidegrees */
Alex Deucher20d391d2011-02-01 16:12:34 -05001334int evergreen_get_temp(struct radeon_device *rdev)
Alex Deucher21a81222010-07-02 12:58:16 -04001335{
Alex Deucher1c88d742011-06-14 19:15:53 +00001336 u32 temp, toffset;
1337 int actual_temp = 0;
Alex Deucher21a81222010-07-02 12:58:16 -04001338
Alex Deucher67b3f822011-05-25 18:45:37 -04001339 if (rdev->family == CHIP_JUNIPER) {
1340 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1341 TOFFSET_SHIFT;
1342 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1343 TS0_ADC_DOUT_SHIFT;
Alex Deucher21a81222010-07-02 12:58:16 -04001344
Alex Deucher67b3f822011-05-25 18:45:37 -04001345 if (toffset & 0x100)
1346 actual_temp = temp / 2 - (0x200 - toffset);
1347 else
1348 actual_temp = temp / 2 + toffset;
1349
1350 actual_temp = actual_temp * 1000;
1351
1352 } else {
1353 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1354 ASIC_T_SHIFT;
1355
1356 if (temp & 0x400)
1357 actual_temp = -256;
1358 else if (temp & 0x200)
1359 actual_temp = 255;
1360 else if (temp & 0x100) {
1361 actual_temp = temp & 0x1ff;
1362 actual_temp |= ~0x1ff;
1363 } else
1364 actual_temp = temp & 0xff;
1365
1366 actual_temp = (actual_temp * 1000) / 2;
1367 }
1368
1369 return actual_temp;
Alex Deucher21a81222010-07-02 12:58:16 -04001370}
1371
Alex Deucher20d391d2011-02-01 16:12:34 -05001372int sumo_get_temp(struct radeon_device *rdev)
Alex Deuchere33df252010-11-22 17:56:32 -05001373{
1374 u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
Alex Deucher20d391d2011-02-01 16:12:34 -05001375 int actual_temp = temp - 49;
Alex Deuchere33df252010-11-22 17:56:32 -05001376
1377 return actual_temp * 1000;
1378}
1379
Alex Deucher377edc82012-07-17 14:02:42 -04001380/**
1381 * sumo_pm_init_profile - Initialize power profiles callback.
1382 *
1383 * @rdev: radeon_device pointer
1384 *
1385 * Initialize the power states used in profile mode
1386 * (sumo, trinity, SI).
1387 * Used for profile mode only.
1388 */
Alex Deuchera4c9e2e2011-11-04 10:09:41 -04001389void sumo_pm_init_profile(struct radeon_device *rdev)
1390{
1391 int idx;
1392
1393 /* default */
1394 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1395 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1396 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1397 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1398
1399 /* low,mid sh/mh */
1400 if (rdev->flags & RADEON_IS_MOBILITY)
1401 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1402 else
1403 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1404
1405 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1406 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1407 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1408 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1409
1410 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1411 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1412 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1413 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1414
1415 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1416 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1417 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1418 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1419
1420 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1421 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1422 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1423 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1424
1425 /* high sh/mh */
1426 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1427 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1428 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1429 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1430 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1431 rdev->pm.power_state[idx].num_clock_modes - 1;
1432
1433 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1434 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1435 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1436 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1437 rdev->pm.power_state[idx].num_clock_modes - 1;
1438}
1439
Alex Deucher377edc82012-07-17 14:02:42 -04001440/**
Alex Deucher27810fb2012-10-01 19:25:11 -04001441 * btc_pm_init_profile - Initialize power profiles callback.
1442 *
1443 * @rdev: radeon_device pointer
1444 *
1445 * Initialize the power states used in profile mode
1446 * (BTC, cayman).
1447 * Used for profile mode only.
1448 */
1449void btc_pm_init_profile(struct radeon_device *rdev)
1450{
1451 int idx;
1452
1453 /* default */
1454 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1455 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1456 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1457 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1458 /* starting with BTC, there is one state that is used for both
1459 * MH and SH. Difference is that we always use the high clock index for
1460 * mclk.
1461 */
1462 if (rdev->flags & RADEON_IS_MOBILITY)
1463 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1464 else
1465 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1466 /* low sh */
1467 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1468 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1469 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1470 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1471 /* mid sh */
1472 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1473 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1474 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1475 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1476 /* high sh */
1477 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1478 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1479 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1480 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1481 /* low mh */
1482 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1483 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1484 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1485 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1486 /* mid mh */
1487 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1488 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1489 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1490 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1491 /* high mh */
1492 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1493 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1494 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1495 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1496}
1497
1498/**
Alex Deucher377edc82012-07-17 14:02:42 -04001499 * evergreen_pm_misc - set additional pm hw parameters callback.
1500 *
1501 * @rdev: radeon_device pointer
1502 *
1503 * Set non-clock parameters associated with a power state
1504 * (voltage, etc.) (evergreen+).
1505 */
Alex Deucher49e02b72010-04-23 17:57:27 -04001506void evergreen_pm_misc(struct radeon_device *rdev)
1507{
Rafał Miłeckia081a9d2010-06-07 18:20:25 -04001508 int req_ps_idx = rdev->pm.requested_power_state_index;
1509 int req_cm_idx = rdev->pm.requested_clock_mode_index;
1510 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1511 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
Alex Deucher49e02b72010-04-23 17:57:27 -04001512
Alex Deucher2feea492011-04-12 14:49:24 -04001513 if (voltage->type == VOLTAGE_SW) {
Alex Deucherc6cf7772013-07-05 13:14:30 -04001514 /* 0xff0x are flags rather then an actual voltage */
1515 if ((voltage->voltage & 0xff00) == 0xff00)
Alex Deuchera377e182011-06-20 13:00:31 -04001516 return;
Alex Deucher2feea492011-04-12 14:49:24 -04001517 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
Alex Deucher8a83ec52011-04-12 14:49:23 -04001518 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
Alex Deucher4d601732010-06-07 18:15:18 -04001519 rdev->pm.current_vddc = voltage->voltage;
Alex Deucher2feea492011-04-12 14:49:24 -04001520 DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1521 }
Alex Deucher7ae764b2013-02-11 08:44:48 -05001522
1523 /* starting with BTC, there is one state that is used for both
1524 * MH and SH. Difference is that we always use the high clock index for
1525 * mclk and vddci.
1526 */
1527 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1528 (rdev->family >= CHIP_BARTS) &&
1529 rdev->pm.active_crtc_count &&
1530 ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1531 (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1532 voltage = &rdev->pm.power_state[req_ps_idx].
1533 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1534
Alex Deucherc6cf7772013-07-05 13:14:30 -04001535 /* 0xff0x are flags rather then an actual voltage */
1536 if ((voltage->vddci & 0xff00) == 0xff00)
Alex Deuchera377e182011-06-20 13:00:31 -04001537 return;
Alex Deucher2feea492011-04-12 14:49:24 -04001538 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1539 radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1540 rdev->pm.current_vddci = voltage->vddci;
1541 DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
Alex Deucher4d601732010-06-07 18:15:18 -04001542 }
1543 }
Alex Deucher49e02b72010-04-23 17:57:27 -04001544}
1545
Alex Deucher377edc82012-07-17 14:02:42 -04001546/**
1547 * evergreen_pm_prepare - pre-power state change callback.
1548 *
1549 * @rdev: radeon_device pointer
1550 *
1551 * Prepare for a power state change (evergreen+).
1552 */
Alex Deucher49e02b72010-04-23 17:57:27 -04001553void evergreen_pm_prepare(struct radeon_device *rdev)
1554{
1555 struct drm_device *ddev = rdev->ddev;
1556 struct drm_crtc *crtc;
1557 struct radeon_crtc *radeon_crtc;
1558 u32 tmp;
1559
1560 /* disable any active CRTCs */
1561 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1562 radeon_crtc = to_radeon_crtc(crtc);
1563 if (radeon_crtc->enabled) {
1564 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1565 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1566 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1567 }
1568 }
1569}
1570
Alex Deucher377edc82012-07-17 14:02:42 -04001571/**
1572 * evergreen_pm_finish - post-power state change callback.
1573 *
1574 * @rdev: radeon_device pointer
1575 *
1576 * Clean up after a power state change (evergreen+).
1577 */
Alex Deucher49e02b72010-04-23 17:57:27 -04001578void evergreen_pm_finish(struct radeon_device *rdev)
1579{
1580 struct drm_device *ddev = rdev->ddev;
1581 struct drm_crtc *crtc;
1582 struct radeon_crtc *radeon_crtc;
1583 u32 tmp;
1584
1585 /* enable any active CRTCs */
1586 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1587 radeon_crtc = to_radeon_crtc(crtc);
1588 if (radeon_crtc->enabled) {
1589 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1590 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1591 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1592 }
1593 }
1594}
1595
Alex Deucher377edc82012-07-17 14:02:42 -04001596/**
1597 * evergreen_hpd_sense - hpd sense callback.
1598 *
1599 * @rdev: radeon_device pointer
1600 * @hpd: hpd (hotplug detect) pin
1601 *
1602 * Checks if a digital monitor is connected (evergreen+).
1603 * Returns true if connected, false if not connected.
1604 */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001605bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1606{
1607 bool connected = false;
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001608
1609 switch (hpd) {
1610 case RADEON_HPD_1:
1611 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1612 connected = true;
1613 break;
1614 case RADEON_HPD_2:
1615 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1616 connected = true;
1617 break;
1618 case RADEON_HPD_3:
1619 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1620 connected = true;
1621 break;
1622 case RADEON_HPD_4:
1623 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1624 connected = true;
1625 break;
1626 case RADEON_HPD_5:
1627 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1628 connected = true;
1629 break;
1630 case RADEON_HPD_6:
1631 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1632 connected = true;
1633 break;
1634 default:
1635 break;
1636 }
1637
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001638 return connected;
1639}
1640
Alex Deucher377edc82012-07-17 14:02:42 -04001641/**
1642 * evergreen_hpd_set_polarity - hpd set polarity callback.
1643 *
1644 * @rdev: radeon_device pointer
1645 * @hpd: hpd (hotplug detect) pin
1646 *
1647 * Set the polarity of the hpd pin (evergreen+).
1648 */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001649void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1650 enum radeon_hpd_id hpd)
1651{
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001652 u32 tmp;
1653 bool connected = evergreen_hpd_sense(rdev, hpd);
1654
1655 switch (hpd) {
1656 case RADEON_HPD_1:
1657 tmp = RREG32(DC_HPD1_INT_CONTROL);
1658 if (connected)
1659 tmp &= ~DC_HPDx_INT_POLARITY;
1660 else
1661 tmp |= DC_HPDx_INT_POLARITY;
1662 WREG32(DC_HPD1_INT_CONTROL, tmp);
1663 break;
1664 case RADEON_HPD_2:
1665 tmp = RREG32(DC_HPD2_INT_CONTROL);
1666 if (connected)
1667 tmp &= ~DC_HPDx_INT_POLARITY;
1668 else
1669 tmp |= DC_HPDx_INT_POLARITY;
1670 WREG32(DC_HPD2_INT_CONTROL, tmp);
1671 break;
1672 case RADEON_HPD_3:
1673 tmp = RREG32(DC_HPD3_INT_CONTROL);
1674 if (connected)
1675 tmp &= ~DC_HPDx_INT_POLARITY;
1676 else
1677 tmp |= DC_HPDx_INT_POLARITY;
1678 WREG32(DC_HPD3_INT_CONTROL, tmp);
1679 break;
1680 case RADEON_HPD_4:
1681 tmp = RREG32(DC_HPD4_INT_CONTROL);
1682 if (connected)
1683 tmp &= ~DC_HPDx_INT_POLARITY;
1684 else
1685 tmp |= DC_HPDx_INT_POLARITY;
1686 WREG32(DC_HPD4_INT_CONTROL, tmp);
1687 break;
1688 case RADEON_HPD_5:
1689 tmp = RREG32(DC_HPD5_INT_CONTROL);
1690 if (connected)
1691 tmp &= ~DC_HPDx_INT_POLARITY;
1692 else
1693 tmp |= DC_HPDx_INT_POLARITY;
1694 WREG32(DC_HPD5_INT_CONTROL, tmp);
1695 break;
1696 case RADEON_HPD_6:
1697 tmp = RREG32(DC_HPD6_INT_CONTROL);
1698 if (connected)
1699 tmp &= ~DC_HPDx_INT_POLARITY;
1700 else
1701 tmp |= DC_HPDx_INT_POLARITY;
1702 WREG32(DC_HPD6_INT_CONTROL, tmp);
1703 break;
1704 default:
1705 break;
1706 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001707}
1708
Alex Deucher377edc82012-07-17 14:02:42 -04001709/**
1710 * evergreen_hpd_init - hpd setup callback.
1711 *
1712 * @rdev: radeon_device pointer
1713 *
1714 * Setup the hpd pins used by the card (evergreen+).
1715 * Enable the pin, set the polarity, and enable the hpd interrupts.
1716 */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001717void evergreen_hpd_init(struct radeon_device *rdev)
1718{
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001719 struct drm_device *dev = rdev->ddev;
1720 struct drm_connector *connector;
Christian Koenigfb982572012-05-17 01:33:30 +02001721 unsigned enabled = 0;
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001722 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1723 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001724
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001725 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1726 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
Alex Deucher2e97be72013-04-11 12:45:34 -04001727
1728 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1729 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1730 /* don't try to enable hpd on eDP or LVDS avoid breaking the
1731 * aux dp channel on imac and help (but not completely fix)
1732 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1733 * also avoid interrupt storms during dpms.
1734 */
1735 continue;
1736 }
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001737 switch (radeon_connector->hpd.hpd) {
1738 case RADEON_HPD_1:
1739 WREG32(DC_HPD1_CONTROL, tmp);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001740 break;
1741 case RADEON_HPD_2:
1742 WREG32(DC_HPD2_CONTROL, tmp);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001743 break;
1744 case RADEON_HPD_3:
1745 WREG32(DC_HPD3_CONTROL, tmp);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001746 break;
1747 case RADEON_HPD_4:
1748 WREG32(DC_HPD4_CONTROL, tmp);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001749 break;
1750 case RADEON_HPD_5:
1751 WREG32(DC_HPD5_CONTROL, tmp);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001752 break;
1753 case RADEON_HPD_6:
1754 WREG32(DC_HPD6_CONTROL, tmp);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001755 break;
1756 default:
1757 break;
1758 }
Alex Deucher64912e92011-11-03 11:21:39 -04001759 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
Christian Koenigfb982572012-05-17 01:33:30 +02001760 enabled |= 1 << radeon_connector->hpd.hpd;
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001761 }
Christian Koenigfb982572012-05-17 01:33:30 +02001762 radeon_irq_kms_enable_hpd(rdev, enabled);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001763}
1764
Alex Deucher377edc82012-07-17 14:02:42 -04001765/**
1766 * evergreen_hpd_fini - hpd tear down callback.
1767 *
1768 * @rdev: radeon_device pointer
1769 *
1770 * Tear down the hpd pins used by the card (evergreen+).
1771 * Disable the hpd interrupts.
1772 */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05001773void evergreen_hpd_fini(struct radeon_device *rdev)
1774{
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001775 struct drm_device *dev = rdev->ddev;
1776 struct drm_connector *connector;
Christian Koenigfb982572012-05-17 01:33:30 +02001777 unsigned disabled = 0;
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001778
1779 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1780 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1781 switch (radeon_connector->hpd.hpd) {
1782 case RADEON_HPD_1:
1783 WREG32(DC_HPD1_CONTROL, 0);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001784 break;
1785 case RADEON_HPD_2:
1786 WREG32(DC_HPD2_CONTROL, 0);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001787 break;
1788 case RADEON_HPD_3:
1789 WREG32(DC_HPD3_CONTROL, 0);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001790 break;
1791 case RADEON_HPD_4:
1792 WREG32(DC_HPD4_CONTROL, 0);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001793 break;
1794 case RADEON_HPD_5:
1795 WREG32(DC_HPD5_CONTROL, 0);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001796 break;
1797 case RADEON_HPD_6:
1798 WREG32(DC_HPD6_CONTROL, 0);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001799 break;
1800 default:
1801 break;
1802 }
Christian Koenigfb982572012-05-17 01:33:30 +02001803 disabled |= 1 << radeon_connector->hpd.hpd;
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001804 }
Christian Koenigfb982572012-05-17 01:33:30 +02001805 radeon_irq_kms_disable_hpd(rdev, disabled);
Alex Deucher0ca2ab52010-02-26 13:57:45 -05001806}
1807
Alex Deucherf9d9c362010-10-22 02:51:05 -04001808/* watermark setup */
1809
1810static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1811 struct radeon_crtc *radeon_crtc,
1812 struct drm_display_mode *mode,
1813 struct drm_display_mode *other_mode)
1814{
Alex Deucher12dfc842011-04-14 19:07:34 -04001815 u32 tmp;
Alex Deucherf9d9c362010-10-22 02:51:05 -04001816 /*
1817 * Line Buffer Setup
1818 * There are 3 line buffers, each one shared by 2 display controllers.
1819 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1820 * the display controllers. The paritioning is done via one of four
1821 * preset allocations specified in bits 2:0:
1822 * first display controller
1823 * 0 - first half of lb (3840 * 2)
1824 * 1 - first 3/4 of lb (5760 * 2)
Alex Deucher12dfc842011-04-14 19:07:34 -04001825 * 2 - whole lb (7680 * 2), other crtc must be disabled
Alex Deucherf9d9c362010-10-22 02:51:05 -04001826 * 3 - first 1/4 of lb (1920 * 2)
1827 * second display controller
1828 * 4 - second half of lb (3840 * 2)
1829 * 5 - second 3/4 of lb (5760 * 2)
Alex Deucher12dfc842011-04-14 19:07:34 -04001830 * 6 - whole lb (7680 * 2), other crtc must be disabled
Alex Deucherf9d9c362010-10-22 02:51:05 -04001831 * 7 - last 1/4 of lb (1920 * 2)
1832 */
Alex Deucher12dfc842011-04-14 19:07:34 -04001833 /* this can get tricky if we have two large displays on a paired group
1834 * of crtcs. Ideally for multiple large displays we'd assign them to
1835 * non-linked crtcs for maximum line buffer allocation.
1836 */
1837 if (radeon_crtc->base.enabled && mode) {
1838 if (other_mode)
Alex Deucherf9d9c362010-10-22 02:51:05 -04001839 tmp = 0; /* 1/2 */
Alex Deucher12dfc842011-04-14 19:07:34 -04001840 else
1841 tmp = 2; /* whole */
1842 } else
1843 tmp = 0;
Alex Deucherf9d9c362010-10-22 02:51:05 -04001844
1845 /* second controller of the pair uses second half of the lb */
1846 if (radeon_crtc->crtc_id % 2)
1847 tmp += 4;
1848 WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1849
Alex Deucher12dfc842011-04-14 19:07:34 -04001850 if (radeon_crtc->base.enabled && mode) {
1851 switch (tmp) {
1852 case 0:
1853 case 4:
1854 default:
1855 if (ASIC_IS_DCE5(rdev))
1856 return 4096 * 2;
1857 else
1858 return 3840 * 2;
1859 case 1:
1860 case 5:
1861 if (ASIC_IS_DCE5(rdev))
1862 return 6144 * 2;
1863 else
1864 return 5760 * 2;
1865 case 2:
1866 case 6:
1867 if (ASIC_IS_DCE5(rdev))
1868 return 8192 * 2;
1869 else
1870 return 7680 * 2;
1871 case 3:
1872 case 7:
1873 if (ASIC_IS_DCE5(rdev))
1874 return 2048 * 2;
1875 else
1876 return 1920 * 2;
1877 }
Alex Deucherf9d9c362010-10-22 02:51:05 -04001878 }
Alex Deucher12dfc842011-04-14 19:07:34 -04001879
1880 /* controller not enabled, so no lb used */
1881 return 0;
Alex Deucherf9d9c362010-10-22 02:51:05 -04001882}
1883
Alex Deucherca7db222012-03-20 17:18:30 -04001884u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
Alex Deucherf9d9c362010-10-22 02:51:05 -04001885{
1886 u32 tmp = RREG32(MC_SHARED_CHMAP);
1887
1888 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1889 case 0:
1890 default:
1891 return 1;
1892 case 1:
1893 return 2;
1894 case 2:
1895 return 4;
1896 case 3:
1897 return 8;
1898 }
1899}
1900
1901struct evergreen_wm_params {
1902 u32 dram_channels; /* number of dram channels */
1903 u32 yclk; /* bandwidth per dram data pin in kHz */
1904 u32 sclk; /* engine clock in kHz */
1905 u32 disp_clk; /* display clock in kHz */
1906 u32 src_width; /* viewport width */
1907 u32 active_time; /* active display time in ns */
1908 u32 blank_time; /* blank time in ns */
1909 bool interlaced; /* mode is interlaced */
1910 fixed20_12 vsc; /* vertical scale ratio */
1911 u32 num_heads; /* number of active crtcs */
1912 u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1913 u32 lb_size; /* line buffer allocated to pipe */
1914 u32 vtaps; /* vertical scaler taps */
1915};
1916
1917static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1918{
1919 /* Calculate DRAM Bandwidth and the part allocated to display. */
1920 fixed20_12 dram_efficiency; /* 0.7 */
1921 fixed20_12 yclk, dram_channels, bandwidth;
1922 fixed20_12 a;
1923
1924 a.full = dfixed_const(1000);
1925 yclk.full = dfixed_const(wm->yclk);
1926 yclk.full = dfixed_div(yclk, a);
1927 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1928 a.full = dfixed_const(10);
1929 dram_efficiency.full = dfixed_const(7);
1930 dram_efficiency.full = dfixed_div(dram_efficiency, a);
1931 bandwidth.full = dfixed_mul(dram_channels, yclk);
1932 bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1933
1934 return dfixed_trunc(bandwidth);
1935}
1936
1937static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1938{
1939 /* Calculate DRAM Bandwidth and the part allocated to display. */
1940 fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1941 fixed20_12 yclk, dram_channels, bandwidth;
1942 fixed20_12 a;
1943
1944 a.full = dfixed_const(1000);
1945 yclk.full = dfixed_const(wm->yclk);
1946 yclk.full = dfixed_div(yclk, a);
1947 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1948 a.full = dfixed_const(10);
1949 disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1950 disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1951 bandwidth.full = dfixed_mul(dram_channels, yclk);
1952 bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1953
1954 return dfixed_trunc(bandwidth);
1955}
1956
1957static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
1958{
1959 /* Calculate the display Data return Bandwidth */
1960 fixed20_12 return_efficiency; /* 0.8 */
1961 fixed20_12 sclk, bandwidth;
1962 fixed20_12 a;
1963
1964 a.full = dfixed_const(1000);
1965 sclk.full = dfixed_const(wm->sclk);
1966 sclk.full = dfixed_div(sclk, a);
1967 a.full = dfixed_const(10);
1968 return_efficiency.full = dfixed_const(8);
1969 return_efficiency.full = dfixed_div(return_efficiency, a);
1970 a.full = dfixed_const(32);
1971 bandwidth.full = dfixed_mul(a, sclk);
1972 bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
1973
1974 return dfixed_trunc(bandwidth);
1975}
1976
1977static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
1978{
1979 /* Calculate the DMIF Request Bandwidth */
1980 fixed20_12 disp_clk_request_efficiency; /* 0.8 */
1981 fixed20_12 disp_clk, bandwidth;
1982 fixed20_12 a;
1983
1984 a.full = dfixed_const(1000);
1985 disp_clk.full = dfixed_const(wm->disp_clk);
1986 disp_clk.full = dfixed_div(disp_clk, a);
1987 a.full = dfixed_const(10);
1988 disp_clk_request_efficiency.full = dfixed_const(8);
1989 disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
1990 a.full = dfixed_const(32);
1991 bandwidth.full = dfixed_mul(a, disp_clk);
1992 bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
1993
1994 return dfixed_trunc(bandwidth);
1995}
1996
1997static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
1998{
1999 /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2000 u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2001 u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2002 u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2003
2004 return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2005}
2006
2007static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2008{
2009 /* Calculate the display mode Average Bandwidth
2010 * DisplayMode should contain the source and destination dimensions,
2011 * timing, etc.
2012 */
2013 fixed20_12 bpp;
2014 fixed20_12 line_time;
2015 fixed20_12 src_width;
2016 fixed20_12 bandwidth;
2017 fixed20_12 a;
2018
2019 a.full = dfixed_const(1000);
2020 line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2021 line_time.full = dfixed_div(line_time, a);
2022 bpp.full = dfixed_const(wm->bytes_per_pixel);
2023 src_width.full = dfixed_const(wm->src_width);
2024 bandwidth.full = dfixed_mul(src_width, bpp);
2025 bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2026 bandwidth.full = dfixed_div(bandwidth, line_time);
2027
2028 return dfixed_trunc(bandwidth);
2029}
2030
2031static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2032{
2033 /* First calcualte the latency in ns */
2034 u32 mc_latency = 2000; /* 2000 ns. */
2035 u32 available_bandwidth = evergreen_available_bandwidth(wm);
2036 u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2037 u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2038 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2039 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2040 (wm->num_heads * cursor_line_pair_return_time);
2041 u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2042 u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2043 fixed20_12 a, b, c;
2044
2045 if (wm->num_heads == 0)
2046 return 0;
2047
2048 a.full = dfixed_const(2);
2049 b.full = dfixed_const(1);
2050 if ((wm->vsc.full > a.full) ||
2051 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2052 (wm->vtaps >= 5) ||
2053 ((wm->vsc.full >= a.full) && wm->interlaced))
2054 max_src_lines_per_dst_line = 4;
2055 else
2056 max_src_lines_per_dst_line = 2;
2057
2058 a.full = dfixed_const(available_bandwidth);
2059 b.full = dfixed_const(wm->num_heads);
2060 a.full = dfixed_div(a, b);
2061
2062 b.full = dfixed_const(1000);
2063 c.full = dfixed_const(wm->disp_clk);
2064 b.full = dfixed_div(c, b);
2065 c.full = dfixed_const(wm->bytes_per_pixel);
2066 b.full = dfixed_mul(b, c);
2067
2068 lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2069
2070 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2071 b.full = dfixed_const(1000);
2072 c.full = dfixed_const(lb_fill_bw);
2073 b.full = dfixed_div(c, b);
2074 a.full = dfixed_div(a, b);
2075 line_fill_time = dfixed_trunc(a);
2076
2077 if (line_fill_time < wm->active_time)
2078 return latency;
2079 else
2080 return latency + (line_fill_time - wm->active_time);
2081
2082}
2083
2084static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2085{
2086 if (evergreen_average_bandwidth(wm) <=
2087 (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2088 return true;
2089 else
2090 return false;
2091};
2092
2093static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2094{
2095 if (evergreen_average_bandwidth(wm) <=
2096 (evergreen_available_bandwidth(wm) / wm->num_heads))
2097 return true;
2098 else
2099 return false;
2100};
2101
2102static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2103{
2104 u32 lb_partitions = wm->lb_size / wm->src_width;
2105 u32 line_time = wm->active_time + wm->blank_time;
2106 u32 latency_tolerant_lines;
2107 u32 latency_hiding;
2108 fixed20_12 a;
2109
2110 a.full = dfixed_const(1);
2111 if (wm->vsc.full > a.full)
2112 latency_tolerant_lines = 1;
2113 else {
2114 if (lb_partitions <= (wm->vtaps + 1))
2115 latency_tolerant_lines = 1;
2116 else
2117 latency_tolerant_lines = 2;
2118 }
2119
2120 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2121
2122 if (evergreen_latency_watermark(wm) <= latency_hiding)
2123 return true;
2124 else
2125 return false;
2126}
2127
2128static void evergreen_program_watermarks(struct radeon_device *rdev,
2129 struct radeon_crtc *radeon_crtc,
2130 u32 lb_size, u32 num_heads)
2131{
2132 struct drm_display_mode *mode = &radeon_crtc->base.mode;
Alex Deuchercf0cfdd2012-03-13 16:25:11 -04002133 struct evergreen_wm_params wm_low, wm_high;
2134 u32 dram_channels;
Alex Deucherf9d9c362010-10-22 02:51:05 -04002135 u32 pixel_period;
2136 u32 line_time = 0;
2137 u32 latency_watermark_a = 0, latency_watermark_b = 0;
2138 u32 priority_a_mark = 0, priority_b_mark = 0;
2139 u32 priority_a_cnt = PRIORITY_OFF;
2140 u32 priority_b_cnt = PRIORITY_OFF;
2141 u32 pipe_offset = radeon_crtc->crtc_id * 16;
2142 u32 tmp, arb_control3;
2143 fixed20_12 a, b, c;
2144
2145 if (radeon_crtc->base.enabled && num_heads && mode) {
2146 pixel_period = 1000000 / (u32)mode->clock;
2147 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2148 priority_a_cnt = 0;
2149 priority_b_cnt = 0;
Alex Deuchercf0cfdd2012-03-13 16:25:11 -04002150 dram_channels = evergreen_get_number_of_dram_channels(rdev);
Alex Deucherf9d9c362010-10-22 02:51:05 -04002151
Alex Deuchercf0cfdd2012-03-13 16:25:11 -04002152 /* watermark for high clocks */
2153 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2154 wm_high.yclk =
2155 radeon_dpm_get_mclk(rdev, false) * 10;
2156 wm_high.sclk =
2157 radeon_dpm_get_sclk(rdev, false) * 10;
2158 } else {
2159 wm_high.yclk = rdev->pm.current_mclk * 10;
2160 wm_high.sclk = rdev->pm.current_sclk * 10;
2161 }
2162
2163 wm_high.disp_clk = mode->clock;
2164 wm_high.src_width = mode->crtc_hdisplay;
2165 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2166 wm_high.blank_time = line_time - wm_high.active_time;
2167 wm_high.interlaced = false;
Alex Deucherf9d9c362010-10-22 02:51:05 -04002168 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
Alex Deuchercf0cfdd2012-03-13 16:25:11 -04002169 wm_high.interlaced = true;
2170 wm_high.vsc = radeon_crtc->vsc;
2171 wm_high.vtaps = 1;
Alex Deucherf9d9c362010-10-22 02:51:05 -04002172 if (radeon_crtc->rmx_type != RMX_OFF)
Alex Deuchercf0cfdd2012-03-13 16:25:11 -04002173 wm_high.vtaps = 2;
2174 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2175 wm_high.lb_size = lb_size;
2176 wm_high.dram_channels = dram_channels;
2177 wm_high.num_heads = num_heads;
2178
2179 /* watermark for low clocks */
2180 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2181 wm_low.yclk =
2182 radeon_dpm_get_mclk(rdev, true) * 10;
2183 wm_low.sclk =
2184 radeon_dpm_get_sclk(rdev, true) * 10;
2185 } else {
2186 wm_low.yclk = rdev->pm.current_mclk * 10;
2187 wm_low.sclk = rdev->pm.current_sclk * 10;
2188 }
2189
2190 wm_low.disp_clk = mode->clock;
2191 wm_low.src_width = mode->crtc_hdisplay;
2192 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2193 wm_low.blank_time = line_time - wm_low.active_time;
2194 wm_low.interlaced = false;
2195 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2196 wm_low.interlaced = true;
2197 wm_low.vsc = radeon_crtc->vsc;
2198 wm_low.vtaps = 1;
2199 if (radeon_crtc->rmx_type != RMX_OFF)
2200 wm_low.vtaps = 2;
2201 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2202 wm_low.lb_size = lb_size;
2203 wm_low.dram_channels = dram_channels;
2204 wm_low.num_heads = num_heads;
Alex Deucherf9d9c362010-10-22 02:51:05 -04002205
2206 /* set for high clocks */
Alex Deuchercf0cfdd2012-03-13 16:25:11 -04002207 latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
Alex Deucherf9d9c362010-10-22 02:51:05 -04002208 /* set for low clocks */
Alex Deuchercf0cfdd2012-03-13 16:25:11 -04002209 latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
Alex Deucherf9d9c362010-10-22 02:51:05 -04002210
2211 /* possibly force display priority to high */
2212 /* should really do this at mode validation time... */
Alex Deuchercf0cfdd2012-03-13 16:25:11 -04002213 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2214 !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2215 !evergreen_check_latency_hiding(&wm_high) ||
Alex Deucherf9d9c362010-10-22 02:51:05 -04002216 (rdev->disp_priority == 2)) {
Alex Deuchercf0cfdd2012-03-13 16:25:11 -04002217 DRM_DEBUG_KMS("force priority a to high\n");
Alex Deucherf9d9c362010-10-22 02:51:05 -04002218 priority_a_cnt |= PRIORITY_ALWAYS_ON;
Alex Deuchercf0cfdd2012-03-13 16:25:11 -04002219 }
2220 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2221 !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2222 !evergreen_check_latency_hiding(&wm_low) ||
2223 (rdev->disp_priority == 2)) {
2224 DRM_DEBUG_KMS("force priority b to high\n");
Alex Deucherf9d9c362010-10-22 02:51:05 -04002225 priority_b_cnt |= PRIORITY_ALWAYS_ON;
2226 }
2227
2228 a.full = dfixed_const(1000);
2229 b.full = dfixed_const(mode->clock);
2230 b.full = dfixed_div(b, a);
2231 c.full = dfixed_const(latency_watermark_a);
2232 c.full = dfixed_mul(c, b);
2233 c.full = dfixed_mul(c, radeon_crtc->hsc);
2234 c.full = dfixed_div(c, a);
2235 a.full = dfixed_const(16);
2236 c.full = dfixed_div(c, a);
2237 priority_a_mark = dfixed_trunc(c);
2238 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2239
2240 a.full = dfixed_const(1000);
2241 b.full = dfixed_const(mode->clock);
2242 b.full = dfixed_div(b, a);
2243 c.full = dfixed_const(latency_watermark_b);
2244 c.full = dfixed_mul(c, b);
2245 c.full = dfixed_mul(c, radeon_crtc->hsc);
2246 c.full = dfixed_div(c, a);
2247 a.full = dfixed_const(16);
2248 c.full = dfixed_div(c, a);
2249 priority_b_mark = dfixed_trunc(c);
2250 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2251 }
2252
2253 /* select wm A */
2254 arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2255 tmp = arb_control3;
2256 tmp &= ~LATENCY_WATERMARK_MASK(3);
2257 tmp |= LATENCY_WATERMARK_MASK(1);
2258 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2259 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2260 (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2261 LATENCY_HIGH_WATERMARK(line_time)));
2262 /* select wm B */
2263 tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2264 tmp &= ~LATENCY_WATERMARK_MASK(3);
2265 tmp |= LATENCY_WATERMARK_MASK(2);
2266 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2267 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2268 (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2269 LATENCY_HIGH_WATERMARK(line_time)));
2270 /* restore original selection */
2271 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2272
2273 /* write the priority marks */
2274 WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2275 WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2276
Alex Deucher7178d2a2013-03-21 10:38:49 -04002277 /* save values for DPM */
2278 radeon_crtc->line_time = line_time;
2279 radeon_crtc->wm_high = latency_watermark_a;
2280 radeon_crtc->wm_low = latency_watermark_b;
Alex Deucherf9d9c362010-10-22 02:51:05 -04002281}
2282
Alex Deucher377edc82012-07-17 14:02:42 -04002283/**
2284 * evergreen_bandwidth_update - update display watermarks callback.
2285 *
2286 * @rdev: radeon_device pointer
2287 *
2288 * Update the display watermarks based on the requested mode(s)
2289 * (evergreen+).
2290 */
Alex Deucher0ca2ab52010-02-26 13:57:45 -05002291void evergreen_bandwidth_update(struct radeon_device *rdev)
2292{
Alex Deucherf9d9c362010-10-22 02:51:05 -04002293 struct drm_display_mode *mode0 = NULL;
2294 struct drm_display_mode *mode1 = NULL;
2295 u32 num_heads = 0, lb_size;
2296 int i;
2297
2298 radeon_update_display_priority(rdev);
2299
2300 for (i = 0; i < rdev->num_crtc; i++) {
2301 if (rdev->mode_info.crtcs[i]->base.enabled)
2302 num_heads++;
2303 }
2304 for (i = 0; i < rdev->num_crtc; i += 2) {
2305 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2306 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2307 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2308 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2309 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2310 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2311 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002312}
2313
Alex Deucher377edc82012-07-17 14:02:42 -04002314/**
2315 * evergreen_mc_wait_for_idle - wait for MC idle callback.
2316 *
2317 * @rdev: radeon_device pointer
2318 *
2319 * Wait for the MC (memory controller) to be idle.
2320 * (evergreen+).
2321 * Returns 0 if the MC is idle, -1 if not.
2322 */
Alex Deucherb9952a82011-03-02 20:07:33 -05002323int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002324{
2325 unsigned i;
2326 u32 tmp;
2327
2328 for (i = 0; i < rdev->usec_timeout; i++) {
2329 /* read MC_STATUS */
2330 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2331 if (!tmp)
2332 return 0;
2333 udelay(1);
2334 }
2335 return -1;
2336}
2337
2338/*
2339 * GART
2340 */
Alex Deucher0fcdb612010-03-24 13:20:41 -04002341void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2342{
2343 unsigned i;
2344 u32 tmp;
2345
Alex Deucher6f2f48a2010-12-15 11:01:56 -05002346 WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2347
Alex Deucher0fcdb612010-03-24 13:20:41 -04002348 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2349 for (i = 0; i < rdev->usec_timeout; i++) {
2350 /* read MC_STATUS */
2351 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2352 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2353 if (tmp == 2) {
2354 printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2355 return;
2356 }
2357 if (tmp) {
2358 return;
2359 }
2360 udelay(1);
2361 }
2362}
2363
Lauri Kasanen1109ca02012-08-31 13:43:50 -04002364static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002365{
2366 u32 tmp;
Alex Deucher0fcdb612010-03-24 13:20:41 -04002367 int r;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002368
Jerome Glissec9a1be92011-11-03 11:16:49 -04002369 if (rdev->gart.robj == NULL) {
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002370 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2371 return -EINVAL;
2372 }
2373 r = radeon_gart_table_vram_pin(rdev);
2374 if (r)
2375 return r;
Dave Airlie82568562010-02-05 16:00:07 +10002376 radeon_gart_restore(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002377 /* Setup L2 cache */
2378 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2379 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2380 EFFECTIVE_L2_QUEUE_SIZE(7));
2381 WREG32(VM_L2_CNTL2, 0);
2382 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2383 /* Setup TLB control */
2384 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2385 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2386 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2387 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
Alex Deucher8aeb96f2011-05-03 19:28:02 -04002388 if (rdev->flags & RADEON_IS_IGP) {
2389 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2390 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2391 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2392 } else {
2393 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2394 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2395 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
Alex Deucher0b8c30b2012-05-31 18:54:43 -04002396 if ((rdev->family == CHIP_JUNIPER) ||
2397 (rdev->family == CHIP_CYPRESS) ||
2398 (rdev->family == CHIP_HEMLOCK) ||
2399 (rdev->family == CHIP_BARTS))
2400 WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
Alex Deucher8aeb96f2011-05-03 19:28:02 -04002401 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002402 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2403 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2404 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2405 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2406 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2407 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2408 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2409 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2410 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2411 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2412 (u32)(rdev->dummy_page.addr >> 12));
Alex Deucher0fcdb612010-03-24 13:20:41 -04002413 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002414
Alex Deucher0fcdb612010-03-24 13:20:41 -04002415 evergreen_pcie_gart_tlb_flush(rdev);
Tormod Voldenfcf4de52011-08-31 21:54:07 +00002416 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2417 (unsigned)(rdev->mc.gtt_size >> 20),
2418 (unsigned long long)rdev->gart.table_addr);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002419 rdev->gart.ready = true;
2420 return 0;
2421}
2422
Lauri Kasanen1109ca02012-08-31 13:43:50 -04002423static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002424{
2425 u32 tmp;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002426
2427 /* Disable all tables */
Alex Deucher0fcdb612010-03-24 13:20:41 -04002428 WREG32(VM_CONTEXT0_CNTL, 0);
2429 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002430
2431 /* Setup L2 cache */
2432 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2433 EFFECTIVE_L2_QUEUE_SIZE(7));
2434 WREG32(VM_L2_CNTL2, 0);
2435 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2436 /* Setup TLB control */
2437 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2438 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2439 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2440 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2441 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2442 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2443 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2444 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
Jerome Glissec9a1be92011-11-03 11:16:49 -04002445 radeon_gart_table_vram_unpin(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002446}
2447
Lauri Kasanen1109ca02012-08-31 13:43:50 -04002448static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002449{
2450 evergreen_pcie_gart_disable(rdev);
2451 radeon_gart_table_vram_free(rdev);
2452 radeon_gart_fini(rdev);
2453}
2454
2455
Lauri Kasanen1109ca02012-08-31 13:43:50 -04002456static void evergreen_agp_enable(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002457{
2458 u32 tmp;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002459
2460 /* Setup L2 cache */
2461 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2462 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2463 EFFECTIVE_L2_QUEUE_SIZE(7));
2464 WREG32(VM_L2_CNTL2, 0);
2465 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2466 /* Setup TLB control */
2467 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2468 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2469 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2470 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2471 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2472 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2473 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2474 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2475 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2476 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2477 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
Alex Deucher0fcdb612010-03-24 13:20:41 -04002478 WREG32(VM_CONTEXT0_CNTL, 0);
2479 WREG32(VM_CONTEXT1_CNTL, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002480}
2481
Alex Deucherb9952a82011-03-02 20:07:33 -05002482void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002483{
Alex Deucher62444b72012-08-15 17:18:42 -04002484 u32 crtc_enabled, tmp, frame_count, blackout;
2485 int i, j;
2486
Alex Deucher51535502012-08-30 14:34:30 -04002487 if (!ASIC_IS_NODCE(rdev)) {
2488 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2489 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002490
Alex Deucher51535502012-08-30 14:34:30 -04002491 /* disable VGA render */
2492 WREG32(VGA_RENDER_CONTROL, 0);
2493 }
Alex Deucher62444b72012-08-15 17:18:42 -04002494 /* blank the display controllers */
2495 for (i = 0; i < rdev->num_crtc; i++) {
2496 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2497 if (crtc_enabled) {
2498 save->crtc_enabled[i] = true;
2499 if (ASIC_IS_DCE6(rdev)) {
2500 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2501 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2502 radeon_wait_for_vblank(rdev, i);
Alex Deucherabf14572013-04-10 19:08:14 -04002503 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
Alex Deucher62444b72012-08-15 17:18:42 -04002504 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2505 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2506 }
2507 } else {
2508 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2509 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2510 radeon_wait_for_vblank(rdev, i);
Alex Deucherabf14572013-04-10 19:08:14 -04002511 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
Alex Deucher62444b72012-08-15 17:18:42 -04002512 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2513 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
Alex Deucherabf14572013-04-10 19:08:14 -04002514 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
Alex Deucher62444b72012-08-15 17:18:42 -04002515 }
2516 }
2517 /* wait for the next frame */
2518 frame_count = radeon_get_vblank_counter(rdev, i);
2519 for (j = 0; j < rdev->usec_timeout; j++) {
2520 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2521 break;
2522 udelay(1);
2523 }
Alex Deucherabf14572013-04-10 19:08:14 -04002524
2525 /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2526 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2527 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2528 tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2529 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2530 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2531 save->crtc_enabled[i] = false;
2532 /* ***** */
Alex Deucher804cc4a2012-11-19 09:11:27 -05002533 } else {
2534 save->crtc_enabled[i] = false;
Alex Deucher62444b72012-08-15 17:18:42 -04002535 }
Alex Deucher18007402010-11-22 17:56:28 -05002536 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002537
Alex Deucher62444b72012-08-15 17:18:42 -04002538 radeon_mc_wait_for_idle(rdev);
2539
2540 blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2541 if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2542 /* Block CPU access */
2543 WREG32(BIF_FB_EN, 0);
2544 /* blackout the MC */
2545 blackout &= ~BLACKOUT_MODE_MASK;
2546 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
Alex Deucherb7eff392011-07-08 11:44:56 -04002547 }
Alex Deuchered39fad2013-01-31 09:00:52 -05002548 /* wait for the MC to settle */
2549 udelay(100);
Alex Deucher968c0162013-04-10 09:58:42 -04002550
2551 /* lock double buffered regs */
2552 for (i = 0; i < rdev->num_crtc; i++) {
2553 if (save->crtc_enabled[i]) {
2554 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2555 if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2556 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2557 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2558 }
2559 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2560 if (!(tmp & 1)) {
2561 tmp |= 1;
2562 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2563 }
2564 }
2565 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002566}
2567
Alex Deucherb9952a82011-03-02 20:07:33 -05002568void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002569{
Alex Deucher62444b72012-08-15 17:18:42 -04002570 u32 tmp, frame_count;
2571 int i, j;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002572
Alex Deucher62444b72012-08-15 17:18:42 -04002573 /* update crtc base addresses */
2574 for (i = 0; i < rdev->num_crtc; i++) {
2575 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
Alex Deucher18007402010-11-22 17:56:28 -05002576 upper_32_bits(rdev->mc.vram_start));
Alex Deucher62444b72012-08-15 17:18:42 -04002577 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
Alex Deucher18007402010-11-22 17:56:28 -05002578 upper_32_bits(rdev->mc.vram_start));
Alex Deucher62444b72012-08-15 17:18:42 -04002579 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
Alex Deucher18007402010-11-22 17:56:28 -05002580 (u32)rdev->mc.vram_start);
Alex Deucher62444b72012-08-15 17:18:42 -04002581 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
Alex Deucher18007402010-11-22 17:56:28 -05002582 (u32)rdev->mc.vram_start);
Alex Deucherb7eff392011-07-08 11:44:56 -04002583 }
Alex Deucher51535502012-08-30 14:34:30 -04002584
2585 if (!ASIC_IS_NODCE(rdev)) {
2586 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2587 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2588 }
Alex Deucher62444b72012-08-15 17:18:42 -04002589
Alex Deucher968c0162013-04-10 09:58:42 -04002590 /* unlock regs and wait for update */
2591 for (i = 0; i < rdev->num_crtc; i++) {
2592 if (save->crtc_enabled[i]) {
2593 tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2594 if ((tmp & 0x3) != 0) {
2595 tmp &= ~0x3;
2596 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2597 }
2598 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2599 if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2600 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2601 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2602 }
2603 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2604 if (tmp & 1) {
2605 tmp &= ~1;
2606 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2607 }
2608 for (j = 0; j < rdev->usec_timeout; j++) {
2609 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2610 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2611 break;
2612 udelay(1);
2613 }
2614 }
2615 }
2616
Alex Deucher62444b72012-08-15 17:18:42 -04002617 /* unblackout the MC */
2618 tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2619 tmp &= ~BLACKOUT_MODE_MASK;
2620 WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2621 /* allow CPU access */
2622 WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2623
2624 for (i = 0; i < rdev->num_crtc; i++) {
Alex Deucher695ddeb2012-11-05 16:34:58 +00002625 if (save->crtc_enabled[i]) {
Alex Deucher62444b72012-08-15 17:18:42 -04002626 if (ASIC_IS_DCE6(rdev)) {
2627 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2628 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
Christopher Staitebb5888202013-01-26 11:10:58 -05002629 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
Alex Deucher62444b72012-08-15 17:18:42 -04002630 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
Christopher Staitebb5888202013-01-26 11:10:58 -05002631 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
Alex Deucher62444b72012-08-15 17:18:42 -04002632 } else {
2633 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2634 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
Christopher Staitebb5888202013-01-26 11:10:58 -05002635 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
Alex Deucher62444b72012-08-15 17:18:42 -04002636 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
Christopher Staitebb5888202013-01-26 11:10:58 -05002637 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
Alex Deucher62444b72012-08-15 17:18:42 -04002638 }
2639 /* wait for the next frame */
2640 frame_count = radeon_get_vblank_counter(rdev, i);
2641 for (j = 0; j < rdev->usec_timeout; j++) {
2642 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2643 break;
2644 udelay(1);
2645 }
2646 }
2647 }
Alex Deucher51535502012-08-30 14:34:30 -04002648 if (!ASIC_IS_NODCE(rdev)) {
2649 /* Unlock vga access */
2650 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2651 mdelay(1);
2652 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2653 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002654}
2655
Alex Deucher755d8192011-03-02 20:07:34 -05002656void evergreen_mc_program(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002657{
2658 struct evergreen_mc_save save;
2659 u32 tmp;
2660 int i, j;
2661
2662 /* Initialize HDP */
2663 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2664 WREG32((0x2c14 + j), 0x00000000);
2665 WREG32((0x2c18 + j), 0x00000000);
2666 WREG32((0x2c1c + j), 0x00000000);
2667 WREG32((0x2c20 + j), 0x00000000);
2668 WREG32((0x2c24 + j), 0x00000000);
2669 }
2670 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2671
2672 evergreen_mc_stop(rdev, &save);
2673 if (evergreen_mc_wait_for_idle(rdev)) {
2674 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2675 }
2676 /* Lockout access through VGA aperture*/
2677 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2678 /* Update configuration */
2679 if (rdev->flags & RADEON_IS_AGP) {
2680 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2681 /* VRAM before AGP */
2682 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2683 rdev->mc.vram_start >> 12);
2684 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2685 rdev->mc.gtt_end >> 12);
2686 } else {
2687 /* VRAM after AGP */
2688 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2689 rdev->mc.gtt_start >> 12);
2690 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2691 rdev->mc.vram_end >> 12);
2692 }
2693 } else {
2694 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2695 rdev->mc.vram_start >> 12);
2696 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2697 rdev->mc.vram_end >> 12);
2698 }
Alex Deucher3b9832f2011-11-10 08:59:39 -05002699 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
Alex Deucher05b3ef62012-03-20 17:18:37 -04002700 /* llano/ontario only */
2701 if ((rdev->family == CHIP_PALM) ||
2702 (rdev->family == CHIP_SUMO) ||
2703 (rdev->family == CHIP_SUMO2)) {
Alex Deucherb4183e32010-12-15 11:04:10 -05002704 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2705 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2706 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2707 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2708 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002709 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2710 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2711 WREG32(MC_VM_FB_LOCATION, tmp);
2712 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
Alex Deucherc46cb4d2011-01-06 19:12:37 -05002713 WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
Jerome Glisse46fcd2b2010-06-03 19:34:48 +02002714 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002715 if (rdev->flags & RADEON_IS_AGP) {
2716 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2717 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2718 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2719 } else {
2720 WREG32(MC_VM_AGP_BASE, 0);
2721 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2722 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2723 }
2724 if (evergreen_mc_wait_for_idle(rdev)) {
2725 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2726 }
2727 evergreen_mc_resume(rdev, &save);
2728 /* we need to own VRAM, so turn off the VGA renderer here
2729 * to stop it overwriting our objects */
2730 rv515_vga_render_disable(rdev);
2731}
2732
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002733/*
2734 * CP.
2735 */
Alex Deucher12920592011-02-02 12:37:40 -05002736void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2737{
Christian König876dc9f2012-05-08 14:24:01 +02002738 struct radeon_ring *ring = &rdev->ring[ib->ring];
Alex Deucher89d35802012-07-17 14:02:31 -04002739 u32 next_rptr;
Christian König7b1f2482011-09-23 15:11:23 +02002740
Alex Deucher12920592011-02-02 12:37:40 -05002741 /* set to DX10/11 mode */
Christian Könige32eb502011-10-23 12:56:27 +02002742 radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2743 radeon_ring_write(ring, 1);
Christian König45df6802012-07-06 16:22:55 +02002744
2745 if (ring->rptr_save_reg) {
Alex Deucher89d35802012-07-17 14:02:31 -04002746 next_rptr = ring->wptr + 3 + 4;
Christian König45df6802012-07-06 16:22:55 +02002747 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2748 radeon_ring_write(ring, ((ring->rptr_save_reg -
2749 PACKET3_SET_CONFIG_REG_START) >> 2));
2750 radeon_ring_write(ring, next_rptr);
Alex Deucher89d35802012-07-17 14:02:31 -04002751 } else if (rdev->wb.enabled) {
2752 next_rptr = ring->wptr + 5 + 4;
2753 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2754 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2755 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2756 radeon_ring_write(ring, next_rptr);
2757 radeon_ring_write(ring, 0);
Christian König45df6802012-07-06 16:22:55 +02002758 }
2759
Christian Könige32eb502011-10-23 12:56:27 +02002760 radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2761 radeon_ring_write(ring,
Alex Deucher0f234f5f2011-02-13 19:06:33 -05002762#ifdef __BIG_ENDIAN
2763 (2 << 0) |
2764#endif
2765 (ib->gpu_addr & 0xFFFFFFFC));
Christian Könige32eb502011-10-23 12:56:27 +02002766 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2767 radeon_ring_write(ring, ib->length_dw);
Alex Deucher12920592011-02-02 12:37:40 -05002768}
2769
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002770
2771static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2772{
Alex Deucherfe251e22010-03-24 13:36:43 -04002773 const __be32 *fw_data;
2774 int i;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002775
Alex Deucherfe251e22010-03-24 13:36:43 -04002776 if (!rdev->me_fw || !rdev->pfp_fw)
2777 return -EINVAL;
2778
2779 r700_cp_stop(rdev);
Alex Deucher0f234f5f2011-02-13 19:06:33 -05002780 WREG32(CP_RB_CNTL,
2781#ifdef __BIG_ENDIAN
2782 BUF_SWAP_32BIT |
2783#endif
2784 RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
Alex Deucherfe251e22010-03-24 13:36:43 -04002785
2786 fw_data = (const __be32 *)rdev->pfp_fw->data;
2787 WREG32(CP_PFP_UCODE_ADDR, 0);
2788 for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2789 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2790 WREG32(CP_PFP_UCODE_ADDR, 0);
2791
2792 fw_data = (const __be32 *)rdev->me_fw->data;
2793 WREG32(CP_ME_RAM_WADDR, 0);
2794 for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2795 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2796
2797 WREG32(CP_PFP_UCODE_ADDR, 0);
2798 WREG32(CP_ME_RAM_WADDR, 0);
2799 WREG32(CP_ME_RAM_RADDR, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002800 return 0;
2801}
2802
Alex Deucher7e7b41d2010-09-02 21:32:32 -04002803static int evergreen_cp_start(struct radeon_device *rdev)
2804{
Christian Könige32eb502011-10-23 12:56:27 +02002805 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
Alex Deucher2281a372010-10-21 13:31:38 -04002806 int r, i;
Alex Deucher7e7b41d2010-09-02 21:32:32 -04002807 uint32_t cp_me;
2808
Christian Könige32eb502011-10-23 12:56:27 +02002809 r = radeon_ring_lock(rdev, ring, 7);
Alex Deucher7e7b41d2010-09-02 21:32:32 -04002810 if (r) {
2811 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2812 return r;
2813 }
Christian Könige32eb502011-10-23 12:56:27 +02002814 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2815 radeon_ring_write(ring, 0x1);
2816 radeon_ring_write(ring, 0x0);
2817 radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2818 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2819 radeon_ring_write(ring, 0);
2820 radeon_ring_write(ring, 0);
2821 radeon_ring_unlock_commit(rdev, ring);
Alex Deucher7e7b41d2010-09-02 21:32:32 -04002822
2823 cp_me = 0xff;
2824 WREG32(CP_ME_CNTL, cp_me);
2825
Christian Könige32eb502011-10-23 12:56:27 +02002826 r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
Alex Deucher7e7b41d2010-09-02 21:32:32 -04002827 if (r) {
2828 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2829 return r;
2830 }
Alex Deucher2281a372010-10-21 13:31:38 -04002831
2832 /* setup clear context state */
Christian Könige32eb502011-10-23 12:56:27 +02002833 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2834 radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
Alex Deucher2281a372010-10-21 13:31:38 -04002835
2836 for (i = 0; i < evergreen_default_size; i++)
Christian Könige32eb502011-10-23 12:56:27 +02002837 radeon_ring_write(ring, evergreen_default_state[i]);
Alex Deucher2281a372010-10-21 13:31:38 -04002838
Christian Könige32eb502011-10-23 12:56:27 +02002839 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2840 radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
Alex Deucher2281a372010-10-21 13:31:38 -04002841
2842 /* set clear context state */
Christian Könige32eb502011-10-23 12:56:27 +02002843 radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2844 radeon_ring_write(ring, 0);
Alex Deucher2281a372010-10-21 13:31:38 -04002845
2846 /* SQ_VTX_BASE_VTX_LOC */
Christian Könige32eb502011-10-23 12:56:27 +02002847 radeon_ring_write(ring, 0xc0026f00);
2848 radeon_ring_write(ring, 0x00000000);
2849 radeon_ring_write(ring, 0x00000000);
2850 radeon_ring_write(ring, 0x00000000);
Alex Deucher2281a372010-10-21 13:31:38 -04002851
2852 /* Clear consts */
Christian Könige32eb502011-10-23 12:56:27 +02002853 radeon_ring_write(ring, 0xc0036f00);
2854 radeon_ring_write(ring, 0x00000bc4);
2855 radeon_ring_write(ring, 0xffffffff);
2856 radeon_ring_write(ring, 0xffffffff);
2857 radeon_ring_write(ring, 0xffffffff);
Alex Deucher2281a372010-10-21 13:31:38 -04002858
Christian Könige32eb502011-10-23 12:56:27 +02002859 radeon_ring_write(ring, 0xc0026900);
2860 radeon_ring_write(ring, 0x00000316);
2861 radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2862 radeon_ring_write(ring, 0x00000010); /* */
Alex Deucher18ff84d2011-02-02 12:37:41 -05002863
Christian Könige32eb502011-10-23 12:56:27 +02002864 radeon_ring_unlock_commit(rdev, ring);
Alex Deucher7e7b41d2010-09-02 21:32:32 -04002865
2866 return 0;
2867}
2868
Lauri Kasanen1109ca02012-08-31 13:43:50 -04002869static int evergreen_cp_resume(struct radeon_device *rdev)
Alex Deucherfe251e22010-03-24 13:36:43 -04002870{
Christian Könige32eb502011-10-23 12:56:27 +02002871 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
Alex Deucherfe251e22010-03-24 13:36:43 -04002872 u32 tmp;
2873 u32 rb_bufsz;
2874 int r;
2875
2876 /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2877 WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2878 SOFT_RESET_PA |
2879 SOFT_RESET_SH |
2880 SOFT_RESET_VGT |
Jerome Glissea49a50d2011-08-24 20:00:17 +00002881 SOFT_RESET_SPI |
Alex Deucherfe251e22010-03-24 13:36:43 -04002882 SOFT_RESET_SX));
2883 RREG32(GRBM_SOFT_RESET);
2884 mdelay(15);
2885 WREG32(GRBM_SOFT_RESET, 0);
2886 RREG32(GRBM_SOFT_RESET);
2887
2888 /* Set ring buffer size */
Christian Könige32eb502011-10-23 12:56:27 +02002889 rb_bufsz = drm_order(ring->ring_size / 8);
Alex Deucher724c80e2010-08-27 18:25:25 -04002890 tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
Alex Deucherfe251e22010-03-24 13:36:43 -04002891#ifdef __BIG_ENDIAN
2892 tmp |= BUF_SWAP_32BIT;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04002893#endif
Alex Deucherfe251e22010-03-24 13:36:43 -04002894 WREG32(CP_RB_CNTL, tmp);
Christian König15d33322011-09-15 19:02:22 +02002895 WREG32(CP_SEM_WAIT_TIMER, 0x0);
Alex Deucher11ef3f12012-01-20 14:47:43 -05002896 WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
Alex Deucherfe251e22010-03-24 13:36:43 -04002897
2898 /* Set the write pointer delay */
2899 WREG32(CP_RB_WPTR_DELAY, 0);
2900
2901 /* Initialize the ring buffer's read and write pointers */
2902 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2903 WREG32(CP_RB_RPTR_WR, 0);
Christian Könige32eb502011-10-23 12:56:27 +02002904 ring->wptr = 0;
2905 WREG32(CP_RB_WPTR, ring->wptr);
Alex Deucher724c80e2010-08-27 18:25:25 -04002906
Adam Buchbinder48fc7f72012-09-19 21:48:00 -04002907 /* set the wb address whether it's enabled or not */
Alex Deucher0f234f5f2011-02-13 19:06:33 -05002908 WREG32(CP_RB_RPTR_ADDR,
Alex Deucher0f234f5f2011-02-13 19:06:33 -05002909 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
Alex Deucher724c80e2010-08-27 18:25:25 -04002910 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2911 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2912
2913 if (rdev->wb.enabled)
2914 WREG32(SCRATCH_UMSK, 0xff);
2915 else {
2916 tmp |= RB_NO_UPDATE;
2917 WREG32(SCRATCH_UMSK, 0);
2918 }
2919
Alex Deucherfe251e22010-03-24 13:36:43 -04002920 mdelay(1);
2921 WREG32(CP_RB_CNTL, tmp);
2922
Christian Könige32eb502011-10-23 12:56:27 +02002923 WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
Alex Deucherfe251e22010-03-24 13:36:43 -04002924 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2925
Christian Könige32eb502011-10-23 12:56:27 +02002926 ring->rptr = RREG32(CP_RB_RPTR);
Alex Deucherfe251e22010-03-24 13:36:43 -04002927
Alex Deucher7e7b41d2010-09-02 21:32:32 -04002928 evergreen_cp_start(rdev);
Christian Könige32eb502011-10-23 12:56:27 +02002929 ring->ready = true;
Alex Deucherf7128122012-02-23 17:53:45 -05002930 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
Alex Deucherfe251e22010-03-24 13:36:43 -04002931 if (r) {
Christian Könige32eb502011-10-23 12:56:27 +02002932 ring->ready = false;
Alex Deucherfe251e22010-03-24 13:36:43 -04002933 return r;
2934 }
2935 return 0;
2936}
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002937
2938/*
2939 * Core functions
2940 */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05002941static void evergreen_gpu_init(struct radeon_device *rdev)
2942{
Alex Deucher416a2bd2012-05-31 19:00:25 -04002943 u32 gb_addr_config;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04002944 u32 mc_shared_chmap, mc_arb_ramcfg;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04002945 u32 sx_debug_1;
2946 u32 smx_dc_ctl0;
2947 u32 sq_config;
2948 u32 sq_lds_resource_mgmt;
2949 u32 sq_gpr_resource_mgmt_1;
2950 u32 sq_gpr_resource_mgmt_2;
2951 u32 sq_gpr_resource_mgmt_3;
2952 u32 sq_thread_resource_mgmt;
2953 u32 sq_thread_resource_mgmt_2;
2954 u32 sq_stack_resource_mgmt_1;
2955 u32 sq_stack_resource_mgmt_2;
2956 u32 sq_stack_resource_mgmt_3;
2957 u32 vgt_cache_invalidation;
Alex Deucherf25a5c62011-05-19 11:07:57 -04002958 u32 hdp_host_path_cntl, tmp;
Alex Deucher416a2bd2012-05-31 19:00:25 -04002959 u32 disabled_rb_mask;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04002960 int i, j, num_shader_engines, ps_thread_count;
2961
2962 switch (rdev->family) {
2963 case CHIP_CYPRESS:
2964 case CHIP_HEMLOCK:
2965 rdev->config.evergreen.num_ses = 2;
2966 rdev->config.evergreen.max_pipes = 4;
2967 rdev->config.evergreen.max_tile_pipes = 8;
2968 rdev->config.evergreen.max_simds = 10;
2969 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2970 rdev->config.evergreen.max_gprs = 256;
2971 rdev->config.evergreen.max_threads = 248;
2972 rdev->config.evergreen.max_gs_threads = 32;
2973 rdev->config.evergreen.max_stack_entries = 512;
2974 rdev->config.evergreen.sx_num_of_sets = 4;
2975 rdev->config.evergreen.sx_max_export_size = 256;
2976 rdev->config.evergreen.sx_max_export_pos_size = 64;
2977 rdev->config.evergreen.sx_max_export_smx_size = 192;
2978 rdev->config.evergreen.max_hw_contexts = 8;
2979 rdev->config.evergreen.sq_num_cf_insts = 2;
2980
2981 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2982 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2983 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04002984 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04002985 break;
2986 case CHIP_JUNIPER:
2987 rdev->config.evergreen.num_ses = 1;
2988 rdev->config.evergreen.max_pipes = 4;
2989 rdev->config.evergreen.max_tile_pipes = 4;
2990 rdev->config.evergreen.max_simds = 10;
2991 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2992 rdev->config.evergreen.max_gprs = 256;
2993 rdev->config.evergreen.max_threads = 248;
2994 rdev->config.evergreen.max_gs_threads = 32;
2995 rdev->config.evergreen.max_stack_entries = 512;
2996 rdev->config.evergreen.sx_num_of_sets = 4;
2997 rdev->config.evergreen.sx_max_export_size = 256;
2998 rdev->config.evergreen.sx_max_export_pos_size = 64;
2999 rdev->config.evergreen.sx_max_export_smx_size = 192;
3000 rdev->config.evergreen.max_hw_contexts = 8;
3001 rdev->config.evergreen.sq_num_cf_insts = 2;
3002
3003 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3004 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3005 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04003006 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003007 break;
3008 case CHIP_REDWOOD:
3009 rdev->config.evergreen.num_ses = 1;
3010 rdev->config.evergreen.max_pipes = 4;
3011 rdev->config.evergreen.max_tile_pipes = 4;
3012 rdev->config.evergreen.max_simds = 5;
3013 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3014 rdev->config.evergreen.max_gprs = 256;
3015 rdev->config.evergreen.max_threads = 248;
3016 rdev->config.evergreen.max_gs_threads = 32;
3017 rdev->config.evergreen.max_stack_entries = 256;
3018 rdev->config.evergreen.sx_num_of_sets = 4;
3019 rdev->config.evergreen.sx_max_export_size = 256;
3020 rdev->config.evergreen.sx_max_export_pos_size = 64;
3021 rdev->config.evergreen.sx_max_export_smx_size = 192;
3022 rdev->config.evergreen.max_hw_contexts = 8;
3023 rdev->config.evergreen.sq_num_cf_insts = 2;
3024
3025 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3026 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3027 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04003028 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003029 break;
3030 case CHIP_CEDAR:
3031 default:
3032 rdev->config.evergreen.num_ses = 1;
3033 rdev->config.evergreen.max_pipes = 2;
3034 rdev->config.evergreen.max_tile_pipes = 2;
3035 rdev->config.evergreen.max_simds = 2;
3036 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3037 rdev->config.evergreen.max_gprs = 256;
3038 rdev->config.evergreen.max_threads = 192;
3039 rdev->config.evergreen.max_gs_threads = 16;
3040 rdev->config.evergreen.max_stack_entries = 256;
3041 rdev->config.evergreen.sx_num_of_sets = 4;
3042 rdev->config.evergreen.sx_max_export_size = 128;
3043 rdev->config.evergreen.sx_max_export_pos_size = 32;
3044 rdev->config.evergreen.sx_max_export_smx_size = 96;
3045 rdev->config.evergreen.max_hw_contexts = 4;
3046 rdev->config.evergreen.sq_num_cf_insts = 1;
3047
3048 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3049 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3050 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04003051 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003052 break;
Alex Deucherd5e455e2010-11-22 17:56:29 -05003053 case CHIP_PALM:
3054 rdev->config.evergreen.num_ses = 1;
3055 rdev->config.evergreen.max_pipes = 2;
3056 rdev->config.evergreen.max_tile_pipes = 2;
3057 rdev->config.evergreen.max_simds = 2;
3058 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3059 rdev->config.evergreen.max_gprs = 256;
3060 rdev->config.evergreen.max_threads = 192;
3061 rdev->config.evergreen.max_gs_threads = 16;
3062 rdev->config.evergreen.max_stack_entries = 256;
3063 rdev->config.evergreen.sx_num_of_sets = 4;
3064 rdev->config.evergreen.sx_max_export_size = 128;
3065 rdev->config.evergreen.sx_max_export_pos_size = 32;
3066 rdev->config.evergreen.sx_max_export_smx_size = 96;
3067 rdev->config.evergreen.max_hw_contexts = 4;
3068 rdev->config.evergreen.sq_num_cf_insts = 1;
3069
3070 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3071 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3072 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04003073 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
Alex Deucherd5e455e2010-11-22 17:56:29 -05003074 break;
Alex Deucherd5c5a722011-05-31 15:42:48 -04003075 case CHIP_SUMO:
3076 rdev->config.evergreen.num_ses = 1;
3077 rdev->config.evergreen.max_pipes = 4;
Jerome Glissebd25f072012-12-11 11:56:52 -05003078 rdev->config.evergreen.max_tile_pipes = 4;
Alex Deucherd5c5a722011-05-31 15:42:48 -04003079 if (rdev->pdev->device == 0x9648)
3080 rdev->config.evergreen.max_simds = 3;
3081 else if ((rdev->pdev->device == 0x9647) ||
3082 (rdev->pdev->device == 0x964a))
3083 rdev->config.evergreen.max_simds = 4;
3084 else
3085 rdev->config.evergreen.max_simds = 5;
3086 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3087 rdev->config.evergreen.max_gprs = 256;
3088 rdev->config.evergreen.max_threads = 248;
3089 rdev->config.evergreen.max_gs_threads = 32;
3090 rdev->config.evergreen.max_stack_entries = 256;
3091 rdev->config.evergreen.sx_num_of_sets = 4;
3092 rdev->config.evergreen.sx_max_export_size = 256;
3093 rdev->config.evergreen.sx_max_export_pos_size = 64;
3094 rdev->config.evergreen.sx_max_export_smx_size = 192;
3095 rdev->config.evergreen.max_hw_contexts = 8;
3096 rdev->config.evergreen.sq_num_cf_insts = 2;
3097
3098 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3099 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3100 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Jerome Glissebd25f072012-12-11 11:56:52 -05003101 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
Alex Deucherd5c5a722011-05-31 15:42:48 -04003102 break;
3103 case CHIP_SUMO2:
3104 rdev->config.evergreen.num_ses = 1;
3105 rdev->config.evergreen.max_pipes = 4;
3106 rdev->config.evergreen.max_tile_pipes = 4;
3107 rdev->config.evergreen.max_simds = 2;
3108 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3109 rdev->config.evergreen.max_gprs = 256;
3110 rdev->config.evergreen.max_threads = 248;
3111 rdev->config.evergreen.max_gs_threads = 32;
3112 rdev->config.evergreen.max_stack_entries = 512;
3113 rdev->config.evergreen.sx_num_of_sets = 4;
3114 rdev->config.evergreen.sx_max_export_size = 256;
3115 rdev->config.evergreen.sx_max_export_pos_size = 64;
3116 rdev->config.evergreen.sx_max_export_smx_size = 192;
3117 rdev->config.evergreen.max_hw_contexts = 8;
3118 rdev->config.evergreen.sq_num_cf_insts = 2;
3119
3120 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3121 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3122 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Jerome Glissebd25f072012-12-11 11:56:52 -05003123 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
Alex Deucherd5c5a722011-05-31 15:42:48 -04003124 break;
Alex Deucheradb68fa2011-01-06 21:19:24 -05003125 case CHIP_BARTS:
3126 rdev->config.evergreen.num_ses = 2;
3127 rdev->config.evergreen.max_pipes = 4;
3128 rdev->config.evergreen.max_tile_pipes = 8;
3129 rdev->config.evergreen.max_simds = 7;
3130 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3131 rdev->config.evergreen.max_gprs = 256;
3132 rdev->config.evergreen.max_threads = 248;
3133 rdev->config.evergreen.max_gs_threads = 32;
3134 rdev->config.evergreen.max_stack_entries = 512;
3135 rdev->config.evergreen.sx_num_of_sets = 4;
3136 rdev->config.evergreen.sx_max_export_size = 256;
3137 rdev->config.evergreen.sx_max_export_pos_size = 64;
3138 rdev->config.evergreen.sx_max_export_smx_size = 192;
3139 rdev->config.evergreen.max_hw_contexts = 8;
3140 rdev->config.evergreen.sq_num_cf_insts = 2;
3141
3142 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3143 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3144 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04003145 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
Alex Deucheradb68fa2011-01-06 21:19:24 -05003146 break;
3147 case CHIP_TURKS:
3148 rdev->config.evergreen.num_ses = 1;
3149 rdev->config.evergreen.max_pipes = 4;
3150 rdev->config.evergreen.max_tile_pipes = 4;
3151 rdev->config.evergreen.max_simds = 6;
3152 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3153 rdev->config.evergreen.max_gprs = 256;
3154 rdev->config.evergreen.max_threads = 248;
3155 rdev->config.evergreen.max_gs_threads = 32;
3156 rdev->config.evergreen.max_stack_entries = 256;
3157 rdev->config.evergreen.sx_num_of_sets = 4;
3158 rdev->config.evergreen.sx_max_export_size = 256;
3159 rdev->config.evergreen.sx_max_export_pos_size = 64;
3160 rdev->config.evergreen.sx_max_export_smx_size = 192;
3161 rdev->config.evergreen.max_hw_contexts = 8;
3162 rdev->config.evergreen.sq_num_cf_insts = 2;
3163
3164 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3165 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3166 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04003167 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
Alex Deucheradb68fa2011-01-06 21:19:24 -05003168 break;
3169 case CHIP_CAICOS:
3170 rdev->config.evergreen.num_ses = 1;
Jerome Glissebd25f072012-12-11 11:56:52 -05003171 rdev->config.evergreen.max_pipes = 2;
Alex Deucheradb68fa2011-01-06 21:19:24 -05003172 rdev->config.evergreen.max_tile_pipes = 2;
3173 rdev->config.evergreen.max_simds = 2;
3174 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3175 rdev->config.evergreen.max_gprs = 256;
3176 rdev->config.evergreen.max_threads = 192;
3177 rdev->config.evergreen.max_gs_threads = 16;
3178 rdev->config.evergreen.max_stack_entries = 256;
3179 rdev->config.evergreen.sx_num_of_sets = 4;
3180 rdev->config.evergreen.sx_max_export_size = 128;
3181 rdev->config.evergreen.sx_max_export_pos_size = 32;
3182 rdev->config.evergreen.sx_max_export_smx_size = 96;
3183 rdev->config.evergreen.max_hw_contexts = 4;
3184 rdev->config.evergreen.sq_num_cf_insts = 1;
3185
3186 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3187 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3188 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
Alex Deucher416a2bd2012-05-31 19:00:25 -04003189 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
Alex Deucheradb68fa2011-01-06 21:19:24 -05003190 break;
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003191 }
3192
3193 /* Initialize HDP */
3194 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3195 WREG32((0x2c14 + j), 0x00000000);
3196 WREG32((0x2c18 + j), 0x00000000);
3197 WREG32((0x2c1c + j), 0x00000000);
3198 WREG32((0x2c20 + j), 0x00000000);
3199 WREG32((0x2c24 + j), 0x00000000);
3200 }
3201
3202 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3203
Alex Deucherd054ac12011-09-01 17:46:15 +00003204 evergreen_fix_pci_max_read_req_size(rdev);
3205
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003206 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
Alex Deucher05b3ef62012-03-20 17:18:37 -04003207 if ((rdev->family == CHIP_PALM) ||
3208 (rdev->family == CHIP_SUMO) ||
3209 (rdev->family == CHIP_SUMO2))
Alex Deucherd9282fc2011-05-11 03:15:24 -04003210 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3211 else
3212 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003213
Alex Deucher1aa52bd2010-11-17 12:11:03 -05003214 /* setup tiling info dword. gb_addr_config is not adequate since it does
3215 * not have bank info, so create a custom tiling dword.
3216 * bits 3:0 num_pipes
3217 * bits 7:4 num_banks
3218 * bits 11:8 group_size
3219 * bits 15:12 row_size
3220 */
3221 rdev->config.evergreen.tile_config = 0;
3222 switch (rdev->config.evergreen.max_tile_pipes) {
3223 case 1:
3224 default:
3225 rdev->config.evergreen.tile_config |= (0 << 0);
3226 break;
3227 case 2:
3228 rdev->config.evergreen.tile_config |= (1 << 0);
3229 break;
3230 case 4:
3231 rdev->config.evergreen.tile_config |= (2 << 0);
3232 break;
3233 case 8:
3234 rdev->config.evergreen.tile_config |= (3 << 0);
3235 break;
3236 }
Alex Deucherd698a342011-06-23 00:49:29 -04003237 /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
Alex Deucher5bfa4872011-05-20 12:35:22 -04003238 if (rdev->flags & RADEON_IS_IGP)
Alex Deucherd698a342011-06-23 00:49:29 -04003239 rdev->config.evergreen.tile_config |= 1 << 4;
Alex Deucher29d65402012-05-31 18:53:36 -04003240 else {
Alex Deucherc8d15ed2012-07-31 11:01:10 -04003241 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3242 case 0: /* four banks */
Alex Deucher29d65402012-05-31 18:53:36 -04003243 rdev->config.evergreen.tile_config |= 0 << 4;
Alex Deucherc8d15ed2012-07-31 11:01:10 -04003244 break;
3245 case 1: /* eight banks */
3246 rdev->config.evergreen.tile_config |= 1 << 4;
3247 break;
3248 case 2: /* sixteen banks */
3249 default:
3250 rdev->config.evergreen.tile_config |= 2 << 4;
3251 break;
3252 }
Alex Deucher29d65402012-05-31 18:53:36 -04003253 }
Alex Deucher416a2bd2012-05-31 19:00:25 -04003254 rdev->config.evergreen.tile_config |= 0 << 8;
Alex Deucher1aa52bd2010-11-17 12:11:03 -05003255 rdev->config.evergreen.tile_config |=
3256 ((gb_addr_config & 0x30000000) >> 28) << 12;
3257
Alex Deucher416a2bd2012-05-31 19:00:25 -04003258 num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
3259
3260 if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3261 u32 efuse_straps_4;
3262 u32 efuse_straps_3;
3263
Alex Deucherff82bbc2013-04-12 11:27:20 -04003264 efuse_straps_4 = RREG32_RCU(0x204);
3265 efuse_straps_3 = RREG32_RCU(0x203);
Alex Deucher416a2bd2012-05-31 19:00:25 -04003266 tmp = (((efuse_straps_4 & 0xf) << 4) |
3267 ((efuse_straps_3 & 0xf0000000) >> 28));
3268 } else {
3269 tmp = 0;
3270 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3271 u32 rb_disable_bitmap;
3272
3273 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3274 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3275 rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3276 tmp <<= 4;
3277 tmp |= rb_disable_bitmap;
3278 }
3279 }
3280 /* enabled rb are just the one not disabled :) */
3281 disabled_rb_mask = tmp;
Alex Deuchercedb6552013-04-09 10:13:22 -04003282 tmp = 0;
3283 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3284 tmp |= (1 << i);
3285 /* if all the backends are disabled, fix it up here */
3286 if ((disabled_rb_mask & tmp) == tmp) {
3287 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3288 disabled_rb_mask &= ~(1 << i);
3289 }
Alex Deucher416a2bd2012-05-31 19:00:25 -04003290
3291 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3292 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3293
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003294 WREG32(GB_ADDR_CONFIG, gb_addr_config);
3295 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3296 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
Alex Deucher233d1ad2012-12-04 15:25:59 -05003297 WREG32(DMA_TILING_CONFIG, gb_addr_config);
Christian König9a210592013-04-08 12:41:37 +02003298 WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3299 WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3300 WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003301
Alex Deucherf7eb9732013-01-30 13:57:40 -05003302 if ((rdev->config.evergreen.max_backends == 1) &&
3303 (rdev->flags & RADEON_IS_IGP)) {
3304 if ((disabled_rb_mask & 3) == 1) {
3305 /* RB0 disabled, RB1 enabled */
3306 tmp = 0x11111111;
3307 } else {
3308 /* RB1 disabled, RB0 enabled */
3309 tmp = 0x00000000;
3310 }
3311 } else {
3312 tmp = gb_addr_config & NUM_PIPES_MASK;
3313 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3314 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3315 }
Alex Deucher416a2bd2012-05-31 19:00:25 -04003316 WREG32(GB_BACKEND_MAP, tmp);
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003317
3318 WREG32(CGTS_SYS_TCC_DISABLE, 0);
3319 WREG32(CGTS_TCC_DISABLE, 0);
3320 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3321 WREG32(CGTS_USER_TCC_DISABLE, 0);
3322
3323 /* set HW defaults for 3D engine */
3324 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3325 ROQ_IB2_START(0x2b)));
3326
3327 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3328
3329 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3330 SYNC_GRADIENT |
3331 SYNC_WALKER |
3332 SYNC_ALIGNER));
3333
3334 sx_debug_1 = RREG32(SX_DEBUG_1);
3335 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3336 WREG32(SX_DEBUG_1, sx_debug_1);
3337
3338
3339 smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3340 smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3341 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3342 WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3343
Alex Deucherb866d132012-06-14 22:06:36 +02003344 if (rdev->family <= CHIP_SUMO2)
3345 WREG32(SMX_SAR_CTL0, 0x00010000);
3346
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003347 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3348 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3349 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3350
3351 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3352 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3353 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3354
3355 WREG32(VGT_NUM_INSTANCES, 1);
3356 WREG32(SPI_CONFIG_CNTL, 0);
3357 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3358 WREG32(CP_PERFMON_CNTL, 0);
3359
3360 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3361 FETCH_FIFO_HIWATER(0x4) |
3362 DONE_FIFO_HIWATER(0xe0) |
3363 ALU_UPDATE_FIFO_HIWATER(0x8)));
3364
3365 sq_config = RREG32(SQ_CONFIG);
3366 sq_config &= ~(PS_PRIO(3) |
3367 VS_PRIO(3) |
3368 GS_PRIO(3) |
3369 ES_PRIO(3));
3370 sq_config |= (VC_ENABLE |
3371 EXPORT_SRC_C |
3372 PS_PRIO(0) |
3373 VS_PRIO(1) |
3374 GS_PRIO(2) |
3375 ES_PRIO(3));
3376
Alex Deucherd5e455e2010-11-22 17:56:29 -05003377 switch (rdev->family) {
3378 case CHIP_CEDAR:
3379 case CHIP_PALM:
Alex Deucherd5c5a722011-05-31 15:42:48 -04003380 case CHIP_SUMO:
3381 case CHIP_SUMO2:
Alex Deucheradb68fa2011-01-06 21:19:24 -05003382 case CHIP_CAICOS:
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003383 /* no vertex cache */
3384 sq_config &= ~VC_ENABLE;
Alex Deucherd5e455e2010-11-22 17:56:29 -05003385 break;
3386 default:
3387 break;
3388 }
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003389
3390 sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3391
3392 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3393 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3394 sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3395 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3396 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3397 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3398 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3399
Alex Deucherd5e455e2010-11-22 17:56:29 -05003400 switch (rdev->family) {
3401 case CHIP_CEDAR:
3402 case CHIP_PALM:
Alex Deucherd5c5a722011-05-31 15:42:48 -04003403 case CHIP_SUMO:
3404 case CHIP_SUMO2:
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003405 ps_thread_count = 96;
Alex Deucherd5e455e2010-11-22 17:56:29 -05003406 break;
3407 default:
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003408 ps_thread_count = 128;
Alex Deucherd5e455e2010-11-22 17:56:29 -05003409 break;
3410 }
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003411
3412 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
Alex Deucherf96b35c2010-06-16 12:24:07 -04003413 sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3414 sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3415 sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3416 sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3417 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003418
3419 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3420 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3421 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3422 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3423 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3424 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3425
3426 WREG32(SQ_CONFIG, sq_config);
3427 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3428 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3429 WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3430 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3431 WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3432 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3433 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3434 WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3435 WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3436 WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3437
3438 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3439 FORCE_EOV_MAX_REZ_CNT(255)));
3440
Alex Deucherd5e455e2010-11-22 17:56:29 -05003441 switch (rdev->family) {
3442 case CHIP_CEDAR:
3443 case CHIP_PALM:
Alex Deucherd5c5a722011-05-31 15:42:48 -04003444 case CHIP_SUMO:
3445 case CHIP_SUMO2:
Alex Deucheradb68fa2011-01-06 21:19:24 -05003446 case CHIP_CAICOS:
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003447 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
Alex Deucherd5e455e2010-11-22 17:56:29 -05003448 break;
3449 default:
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003450 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
Alex Deucherd5e455e2010-11-22 17:56:29 -05003451 break;
3452 }
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003453 vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3454 WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3455
3456 WREG32(VGT_GS_VERTEX_REUSE, 16);
Alex Deucher12920592011-02-02 12:37:40 -05003457 WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003458 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3459
Alex Deucher60a4a3e2010-06-29 17:03:35 -04003460 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3461 WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3462
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003463 WREG32(CB_PERF_CTR0_SEL_0, 0);
3464 WREG32(CB_PERF_CTR0_SEL_1, 0);
3465 WREG32(CB_PERF_CTR1_SEL_0, 0);
3466 WREG32(CB_PERF_CTR1_SEL_1, 0);
3467 WREG32(CB_PERF_CTR2_SEL_0, 0);
3468 WREG32(CB_PERF_CTR2_SEL_1, 0);
3469 WREG32(CB_PERF_CTR3_SEL_0, 0);
3470 WREG32(CB_PERF_CTR3_SEL_1, 0);
3471
Alex Deucher60a4a3e2010-06-29 17:03:35 -04003472 /* clear render buffer base addresses */
3473 WREG32(CB_COLOR0_BASE, 0);
3474 WREG32(CB_COLOR1_BASE, 0);
3475 WREG32(CB_COLOR2_BASE, 0);
3476 WREG32(CB_COLOR3_BASE, 0);
3477 WREG32(CB_COLOR4_BASE, 0);
3478 WREG32(CB_COLOR5_BASE, 0);
3479 WREG32(CB_COLOR6_BASE, 0);
3480 WREG32(CB_COLOR7_BASE, 0);
3481 WREG32(CB_COLOR8_BASE, 0);
3482 WREG32(CB_COLOR9_BASE, 0);
3483 WREG32(CB_COLOR10_BASE, 0);
3484 WREG32(CB_COLOR11_BASE, 0);
3485
3486 /* set the shader const cache sizes to 0 */
3487 for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3488 WREG32(i, 0);
3489 for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3490 WREG32(i, 0);
3491
Alex Deucherf25a5c62011-05-19 11:07:57 -04003492 tmp = RREG32(HDP_MISC_CNTL);
3493 tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3494 WREG32(HDP_MISC_CNTL, tmp);
3495
Alex Deucher32fcdbf2010-03-24 13:33:47 -04003496 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3497 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3498
3499 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3500
3501 udelay(50);
3502
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003503}
3504
3505int evergreen_mc_init(struct radeon_device *rdev)
3506{
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003507 u32 tmp;
3508 int chansize, numchan;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003509
3510 /* Get VRAM informations */
3511 rdev->mc.vram_is_ddr = true;
Alex Deucher05b3ef62012-03-20 17:18:37 -04003512 if ((rdev->family == CHIP_PALM) ||
3513 (rdev->family == CHIP_SUMO) ||
3514 (rdev->family == CHIP_SUMO2))
Alex Deucher82084412011-07-01 13:18:28 -04003515 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3516 else
3517 tmp = RREG32(MC_ARB_RAMCFG);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003518 if (tmp & CHANSIZE_OVERRIDE) {
3519 chansize = 16;
3520 } else if (tmp & CHANSIZE_MASK) {
3521 chansize = 64;
3522 } else {
3523 chansize = 32;
3524 }
3525 tmp = RREG32(MC_SHARED_CHMAP);
3526 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3527 case 0:
3528 default:
3529 numchan = 1;
3530 break;
3531 case 1:
3532 numchan = 2;
3533 break;
3534 case 2:
3535 numchan = 4;
3536 break;
3537 case 3:
3538 numchan = 8;
3539 break;
3540 }
3541 rdev->mc.vram_width = numchan * chansize;
3542 /* Could aper size report 0 ? */
Jordan Crouse01d73a62010-05-27 13:40:24 -06003543 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3544 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003545 /* Setup GPU memory space */
Alex Deucher05b3ef62012-03-20 17:18:37 -04003546 if ((rdev->family == CHIP_PALM) ||
3547 (rdev->family == CHIP_SUMO) ||
3548 (rdev->family == CHIP_SUMO2)) {
Alex Deucher6eb18f82010-11-22 17:56:27 -05003549 /* size in bytes on fusion */
3550 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3551 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3552 } else {
Alex Deucher05b3ef62012-03-20 17:18:37 -04003553 /* size in MB on evergreen/cayman/tn */
Niels Ole Salscheiderfc986032013-05-18 21:19:23 +02003554 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3555 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
Alex Deucher6eb18f82010-11-22 17:56:27 -05003556 }
Jerome Glisse51e5fcd2010-02-19 14:33:54 +00003557 rdev->mc.visible_vram_size = rdev->mc.aper_size;
Alex Deucher0ef0c1f2010-11-22 17:56:26 -05003558 r700_vram_gtt_location(rdev, &rdev->mc);
Alex Deucherf47299c2010-03-16 20:54:38 -04003559 radeon_update_bandwidth_info(rdev);
3560
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003561 return 0;
3562}
Jerome Glissed594e462010-02-17 21:54:29 +00003563
Alex Deucher187e3592013-01-18 14:51:38 -05003564void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
Alex Deucher747943e2010-03-24 13:26:36 -04003565{
Jerome Glisse64c56e82013-01-02 17:30:35 -05003566 dev_info(rdev->dev, " GRBM_STATUS = 0x%08X\n",
Alex Deucher747943e2010-03-24 13:26:36 -04003567 RREG32(GRBM_STATUS));
Jerome Glisse64c56e82013-01-02 17:30:35 -05003568 dev_info(rdev->dev, " GRBM_STATUS_SE0 = 0x%08X\n",
Alex Deucher747943e2010-03-24 13:26:36 -04003569 RREG32(GRBM_STATUS_SE0));
Jerome Glisse64c56e82013-01-02 17:30:35 -05003570 dev_info(rdev->dev, " GRBM_STATUS_SE1 = 0x%08X\n",
Alex Deucher747943e2010-03-24 13:26:36 -04003571 RREG32(GRBM_STATUS_SE1));
Jerome Glisse64c56e82013-01-02 17:30:35 -05003572 dev_info(rdev->dev, " SRBM_STATUS = 0x%08X\n",
Alex Deucher747943e2010-03-24 13:26:36 -04003573 RREG32(SRBM_STATUS));
Alex Deuchera65a4362013-01-18 18:55:54 -05003574 dev_info(rdev->dev, " SRBM_STATUS2 = 0x%08X\n",
3575 RREG32(SRBM_STATUS2));
Jerome Glisse440a7cd2012-06-27 12:25:01 -04003576 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3577 RREG32(CP_STALLED_STAT1));
3578 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3579 RREG32(CP_STALLED_STAT2));
3580 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
3581 RREG32(CP_BUSY_STAT));
3582 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
3583 RREG32(CP_STAT));
Alex Deucher0ecebb92013-01-03 12:40:13 -05003584 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n",
3585 RREG32(DMA_STATUS_REG));
Alex Deucher168757e2013-01-18 19:17:22 -05003586 if (rdev->family >= CHIP_CAYMAN) {
3587 dev_info(rdev->dev, " R_00D834_DMA_STATUS_REG = 0x%08X\n",
3588 RREG32(DMA_STATUS_REG + 0x800));
3589 }
Alex Deucher0ecebb92013-01-03 12:40:13 -05003590}
3591
Alex Deucher168757e2013-01-18 19:17:22 -05003592bool evergreen_is_display_hung(struct radeon_device *rdev)
Alex Deuchera65a4362013-01-18 18:55:54 -05003593{
3594 u32 crtc_hung = 0;
3595 u32 crtc_status[6];
3596 u32 i, j, tmp;
3597
3598 for (i = 0; i < rdev->num_crtc; i++) {
3599 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3600 crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3601 crtc_hung |= (1 << i);
3602 }
3603 }
3604
3605 for (j = 0; j < 10; j++) {
3606 for (i = 0; i < rdev->num_crtc; i++) {
3607 if (crtc_hung & (1 << i)) {
3608 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3609 if (tmp != crtc_status[i])
3610 crtc_hung &= ~(1 << i);
3611 }
3612 }
3613 if (crtc_hung == 0)
3614 return false;
3615 udelay(100);
3616 }
3617
3618 return true;
3619}
3620
Christian König2483b4e2013-08-13 11:56:54 +02003621u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
Alex Deuchera65a4362013-01-18 18:55:54 -05003622{
3623 u32 reset_mask = 0;
3624 u32 tmp;
3625
3626 /* GRBM_STATUS */
3627 tmp = RREG32(GRBM_STATUS);
3628 if (tmp & (PA_BUSY | SC_BUSY |
3629 SH_BUSY | SX_BUSY |
3630 TA_BUSY | VGT_BUSY |
3631 DB_BUSY | CB_BUSY |
3632 SPI_BUSY | VGT_BUSY_NO_DMA))
3633 reset_mask |= RADEON_RESET_GFX;
3634
3635 if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3636 CP_BUSY | CP_COHERENCY_BUSY))
3637 reset_mask |= RADEON_RESET_CP;
3638
3639 if (tmp & GRBM_EE_BUSY)
3640 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3641
3642 /* DMA_STATUS_REG */
3643 tmp = RREG32(DMA_STATUS_REG);
3644 if (!(tmp & DMA_IDLE))
3645 reset_mask |= RADEON_RESET_DMA;
3646
3647 /* SRBM_STATUS2 */
3648 tmp = RREG32(SRBM_STATUS2);
3649 if (tmp & DMA_BUSY)
3650 reset_mask |= RADEON_RESET_DMA;
3651
3652 /* SRBM_STATUS */
3653 tmp = RREG32(SRBM_STATUS);
3654 if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3655 reset_mask |= RADEON_RESET_RLC;
3656
3657 if (tmp & IH_BUSY)
3658 reset_mask |= RADEON_RESET_IH;
3659
3660 if (tmp & SEM_BUSY)
3661 reset_mask |= RADEON_RESET_SEM;
3662
3663 if (tmp & GRBM_RQ_PENDING)
3664 reset_mask |= RADEON_RESET_GRBM;
3665
3666 if (tmp & VMC_BUSY)
3667 reset_mask |= RADEON_RESET_VMC;
3668
3669 if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3670 MCC_BUSY | MCD_BUSY))
3671 reset_mask |= RADEON_RESET_MC;
3672
3673 if (evergreen_is_display_hung(rdev))
3674 reset_mask |= RADEON_RESET_DISPLAY;
3675
3676 /* VM_L2_STATUS */
3677 tmp = RREG32(VM_L2_STATUS);
3678 if (tmp & L2_BUSY)
3679 reset_mask |= RADEON_RESET_VMC;
3680
Alex Deucherd808fc82013-02-28 10:03:08 -05003681 /* Skip MC reset as it's mostly likely not hung, just busy */
3682 if (reset_mask & RADEON_RESET_MC) {
3683 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3684 reset_mask &= ~RADEON_RESET_MC;
3685 }
3686
Alex Deuchera65a4362013-01-18 18:55:54 -05003687 return reset_mask;
3688}
3689
3690static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
Alex Deucher0ecebb92013-01-03 12:40:13 -05003691{
3692 struct evergreen_mc_save save;
Alex Deucherb7630472013-01-18 14:28:41 -05003693 u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3694 u32 tmp;
Alex Deucher19fc42e2013-01-14 11:04:39 -05003695
Alex Deucher0ecebb92013-01-03 12:40:13 -05003696 if (reset_mask == 0)
Alex Deuchera65a4362013-01-18 18:55:54 -05003697 return;
Alex Deucher0ecebb92013-01-03 12:40:13 -05003698
3699 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3700
Alex Deucherb7630472013-01-18 14:28:41 -05003701 evergreen_print_gpu_status_regs(rdev);
3702
Alex Deucherb7630472013-01-18 14:28:41 -05003703 /* Disable CP parsing/prefetching */
3704 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3705
3706 if (reset_mask & RADEON_RESET_DMA) {
3707 /* Disable DMA */
3708 tmp = RREG32(DMA_RB_CNTL);
3709 tmp &= ~DMA_RB_ENABLE;
3710 WREG32(DMA_RB_CNTL, tmp);
3711 }
3712
Alex Deucherb21b6e72013-01-23 18:57:56 -05003713 udelay(50);
3714
3715 evergreen_mc_stop(rdev, &save);
3716 if (evergreen_mc_wait_for_idle(rdev)) {
3717 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3718 }
3719
Alex Deucherb7630472013-01-18 14:28:41 -05003720 if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3721 grbm_soft_reset |= SOFT_RESET_DB |
3722 SOFT_RESET_CB |
3723 SOFT_RESET_PA |
3724 SOFT_RESET_SC |
3725 SOFT_RESET_SPI |
3726 SOFT_RESET_SX |
3727 SOFT_RESET_SH |
3728 SOFT_RESET_TC |
3729 SOFT_RESET_TA |
3730 SOFT_RESET_VC |
3731 SOFT_RESET_VGT;
3732 }
3733
3734 if (reset_mask & RADEON_RESET_CP) {
3735 grbm_soft_reset |= SOFT_RESET_CP |
3736 SOFT_RESET_VGT;
3737
3738 srbm_soft_reset |= SOFT_RESET_GRBM;
3739 }
Alex Deucher0ecebb92013-01-03 12:40:13 -05003740
3741 if (reset_mask & RADEON_RESET_DMA)
Alex Deucherb7630472013-01-18 14:28:41 -05003742 srbm_soft_reset |= SOFT_RESET_DMA;
3743
Alex Deuchera65a4362013-01-18 18:55:54 -05003744 if (reset_mask & RADEON_RESET_DISPLAY)
3745 srbm_soft_reset |= SOFT_RESET_DC;
3746
3747 if (reset_mask & RADEON_RESET_RLC)
3748 srbm_soft_reset |= SOFT_RESET_RLC;
3749
3750 if (reset_mask & RADEON_RESET_SEM)
3751 srbm_soft_reset |= SOFT_RESET_SEM;
3752
3753 if (reset_mask & RADEON_RESET_IH)
3754 srbm_soft_reset |= SOFT_RESET_IH;
3755
3756 if (reset_mask & RADEON_RESET_GRBM)
3757 srbm_soft_reset |= SOFT_RESET_GRBM;
3758
3759 if (reset_mask & RADEON_RESET_VMC)
3760 srbm_soft_reset |= SOFT_RESET_VMC;
3761
Alex Deucher24178ec2013-01-24 15:00:17 -05003762 if (!(rdev->flags & RADEON_IS_IGP)) {
3763 if (reset_mask & RADEON_RESET_MC)
3764 srbm_soft_reset |= SOFT_RESET_MC;
3765 }
Alex Deuchera65a4362013-01-18 18:55:54 -05003766
Alex Deucherb7630472013-01-18 14:28:41 -05003767 if (grbm_soft_reset) {
3768 tmp = RREG32(GRBM_SOFT_RESET);
3769 tmp |= grbm_soft_reset;
3770 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3771 WREG32(GRBM_SOFT_RESET, tmp);
3772 tmp = RREG32(GRBM_SOFT_RESET);
3773
3774 udelay(50);
3775
3776 tmp &= ~grbm_soft_reset;
3777 WREG32(GRBM_SOFT_RESET, tmp);
3778 tmp = RREG32(GRBM_SOFT_RESET);
3779 }
3780
3781 if (srbm_soft_reset) {
3782 tmp = RREG32(SRBM_SOFT_RESET);
3783 tmp |= srbm_soft_reset;
3784 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3785 WREG32(SRBM_SOFT_RESET, tmp);
3786 tmp = RREG32(SRBM_SOFT_RESET);
3787
3788 udelay(50);
3789
3790 tmp &= ~srbm_soft_reset;
3791 WREG32(SRBM_SOFT_RESET, tmp);
3792 tmp = RREG32(SRBM_SOFT_RESET);
3793 }
Alex Deucher0ecebb92013-01-03 12:40:13 -05003794
3795 /* Wait a little for things to settle down */
3796 udelay(50);
3797
Alex Deucher747943e2010-03-24 13:26:36 -04003798 evergreen_mc_resume(rdev, &save);
Alex Deucherb7630472013-01-18 14:28:41 -05003799 udelay(50);
Alex Deucher410a3412013-01-18 13:05:39 -05003800
Alex Deucherb7630472013-01-18 14:28:41 -05003801 evergreen_print_gpu_status_regs(rdev);
Alex Deucher747943e2010-03-24 13:26:36 -04003802}
3803
Jerome Glissea2d07b72010-03-09 14:45:11 +00003804int evergreen_asic_reset(struct radeon_device *rdev)
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003805{
Alex Deuchera65a4362013-01-18 18:55:54 -05003806 u32 reset_mask;
3807
3808 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3809
3810 if (reset_mask)
3811 r600_set_bios_scratch_engine_hung(rdev, true);
3812
3813 evergreen_gpu_soft_reset(rdev, reset_mask);
3814
3815 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3816
3817 if (!reset_mask)
3818 r600_set_bios_scratch_engine_hung(rdev, false);
3819
3820 return 0;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05003821}
3822
Alex Deucher123bc182013-01-24 11:37:19 -05003823/**
3824 * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3825 *
3826 * @rdev: radeon_device pointer
3827 * @ring: radeon_ring structure holding ring information
3828 *
3829 * Check if the GFX engine is locked up.
3830 * Returns true if the engine appears to be locked up, false if not.
3831 */
3832bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3833{
3834 u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3835
3836 if (!(reset_mask & (RADEON_RESET_GFX |
3837 RADEON_RESET_COMPUTE |
3838 RADEON_RESET_CP))) {
3839 radeon_ring_lockup_update(ring);
3840 return false;
3841 }
3842 /* force CP activities */
3843 radeon_ring_force_activity(rdev, ring);
3844 return radeon_ring_test_lockup(rdev, ring);
3845}
3846
Alex Deucher2948f5e2013-04-12 13:52:52 -04003847/*
3848 * RLC
3849 */
3850#define RLC_SAVE_RESTORE_LIST_END_MARKER 0x00000000
3851#define RLC_CLEAR_STATE_END_MARKER 0x00000001
3852
3853void sumo_rlc_fini(struct radeon_device *rdev)
3854{
3855 int r;
3856
3857 /* save restore block */
3858 if (rdev->rlc.save_restore_obj) {
3859 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3860 if (unlikely(r != 0))
3861 dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
3862 radeon_bo_unpin(rdev->rlc.save_restore_obj);
3863 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3864
3865 radeon_bo_unref(&rdev->rlc.save_restore_obj);
3866 rdev->rlc.save_restore_obj = NULL;
3867 }
3868
3869 /* clear state block */
3870 if (rdev->rlc.clear_state_obj) {
3871 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
3872 if (unlikely(r != 0))
3873 dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
3874 radeon_bo_unpin(rdev->rlc.clear_state_obj);
3875 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
3876
3877 radeon_bo_unref(&rdev->rlc.clear_state_obj);
3878 rdev->rlc.clear_state_obj = NULL;
3879 }
Alex Deucher22c775c2013-07-23 09:41:05 -04003880
3881 /* clear state block */
3882 if (rdev->rlc.cp_table_obj) {
3883 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
3884 if (unlikely(r != 0))
3885 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
3886 radeon_bo_unpin(rdev->rlc.cp_table_obj);
3887 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
3888
3889 radeon_bo_unref(&rdev->rlc.cp_table_obj);
3890 rdev->rlc.cp_table_obj = NULL;
3891 }
Alex Deucher2948f5e2013-04-12 13:52:52 -04003892}
3893
Alex Deucher22c775c2013-07-23 09:41:05 -04003894#define CP_ME_TABLE_SIZE 96
3895
Alex Deucher2948f5e2013-04-12 13:52:52 -04003896int sumo_rlc_init(struct radeon_device *rdev)
3897{
Alex Deucher1fd11772013-04-17 17:53:50 -04003898 const u32 *src_ptr;
Alex Deucher2948f5e2013-04-12 13:52:52 -04003899 volatile u32 *dst_ptr;
3900 u32 dws, data, i, j, k, reg_num;
Alex Deucher59a82d02013-08-13 12:48:06 -04003901 u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
Alex Deucher2948f5e2013-04-12 13:52:52 -04003902 u64 reg_list_mc_addr;
Alex Deucher1fd11772013-04-17 17:53:50 -04003903 const struct cs_section_def *cs_data;
Alex Deucher2948f5e2013-04-12 13:52:52 -04003904 int r;
3905
3906 src_ptr = rdev->rlc.reg_list;
3907 dws = rdev->rlc.reg_list_size;
Alex Deuchera0f38602013-08-22 11:57:46 -04003908 if (rdev->family >= CHIP_BONAIRE) {
3909 dws += (5 * 16) + 48 + 48 + 64;
3910 }
Alex Deucher2948f5e2013-04-12 13:52:52 -04003911 cs_data = rdev->rlc.cs_data;
3912
Alex Deucher10b7ca72013-04-17 17:22:05 -04003913 if (src_ptr) {
3914 /* save restore block */
3915 if (rdev->rlc.save_restore_obj == NULL) {
3916 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
3917 RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.save_restore_obj);
3918 if (r) {
3919 dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
3920 return r;
3921 }
Alex Deucher2948f5e2013-04-12 13:52:52 -04003922 }
Alex Deucher2948f5e2013-04-12 13:52:52 -04003923
Alex Deucher10b7ca72013-04-17 17:22:05 -04003924 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3925 if (unlikely(r != 0)) {
Alex Deucher2948f5e2013-04-12 13:52:52 -04003926 sumo_rlc_fini(rdev);
3927 return r;
3928 }
Alex Deucher10b7ca72013-04-17 17:22:05 -04003929 r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
3930 &rdev->rlc.save_restore_gpu_addr);
3931 if (r) {
3932 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3933 dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
3934 sumo_rlc_fini(rdev);
3935 return r;
Alex Deucher2948f5e2013-04-12 13:52:52 -04003936 }
Alex Deucher2948f5e2013-04-12 13:52:52 -04003937
Alex Deucher10b7ca72013-04-17 17:22:05 -04003938 r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
3939 if (r) {
3940 dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
3941 sumo_rlc_fini(rdev);
3942 return r;
3943 }
3944 /* write the sr buffer */
3945 dst_ptr = rdev->rlc.sr_ptr;
Alex Deucher1fd11772013-04-17 17:53:50 -04003946 if (rdev->family >= CHIP_TAHITI) {
3947 /* SI */
Alex Deucher59a82d02013-08-13 12:48:06 -04003948 for (i = 0; i < rdev->rlc.reg_list_size; i++)
Alex Deucher1fd11772013-04-17 17:53:50 -04003949 dst_ptr[i] = src_ptr[i];
3950 } else {
3951 /* ON/LN/TN */
3952 /* format:
3953 * dw0: (reg2 << 16) | reg1
3954 * dw1: reg1 save space
3955 * dw2: reg2 save space
3956 */
3957 for (i = 0; i < dws; i++) {
3958 data = src_ptr[i] >> 2;
3959 i++;
3960 if (i < dws)
3961 data |= (src_ptr[i] >> 2) << 16;
3962 j = (((i - 1) * 3) / 2);
3963 dst_ptr[j] = data;
3964 }
3965 j = ((i * 3) / 2);
3966 dst_ptr[j] = RLC_SAVE_RESTORE_LIST_END_MARKER;
Alex Deucher10b7ca72013-04-17 17:22:05 -04003967 }
Alex Deucher10b7ca72013-04-17 17:22:05 -04003968 radeon_bo_kunmap(rdev->rlc.save_restore_obj);
3969 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3970 }
3971
3972 if (cs_data) {
3973 /* clear state block */
Alex Deuchera0f38602013-08-22 11:57:46 -04003974 if (rdev->family >= CHIP_BONAIRE) {
3975 rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
3976 } else if (rdev->family >= CHIP_TAHITI) {
Alex Deucher59a82d02013-08-13 12:48:06 -04003977 rdev->rlc.clear_state_size = si_get_csb_size(rdev);
3978 dws = rdev->rlc.clear_state_size + (256 / 4);
3979 } else {
3980 reg_list_num = 0;
3981 dws = 0;
3982 for (i = 0; cs_data[i].section != NULL; i++) {
3983 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
3984 reg_list_num++;
3985 dws += cs_data[i].section[j].reg_count;
3986 }
Alex Deucher10b7ca72013-04-17 17:22:05 -04003987 }
Alex Deucher59a82d02013-08-13 12:48:06 -04003988 reg_list_blk_index = (3 * reg_list_num + 2);
3989 dws += reg_list_blk_index;
3990 rdev->rlc.clear_state_size = dws;
Alex Deucher10b7ca72013-04-17 17:22:05 -04003991 }
Alex Deucher10b7ca72013-04-17 17:22:05 -04003992
3993 if (rdev->rlc.clear_state_obj == NULL) {
Alex Deucher59a82d02013-08-13 12:48:06 -04003994 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
Alex Deucher10b7ca72013-04-17 17:22:05 -04003995 RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.clear_state_obj);
3996 if (r) {
3997 dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
3998 sumo_rlc_fini(rdev);
3999 return r;
4000 }
4001 }
4002 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4003 if (unlikely(r != 0)) {
4004 sumo_rlc_fini(rdev);
4005 return r;
4006 }
4007 r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4008 &rdev->rlc.clear_state_gpu_addr);
4009 if (r) {
4010 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4011 dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4012 sumo_rlc_fini(rdev);
4013 return r;
4014 }
4015
4016 r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4017 if (r) {
4018 dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4019 sumo_rlc_fini(rdev);
4020 return r;
4021 }
4022 /* set up the cs buffer */
4023 dst_ptr = rdev->rlc.cs_ptr;
Alex Deuchera0f38602013-08-22 11:57:46 -04004024 if (rdev->family >= CHIP_BONAIRE) {
4025 cik_get_csb_buffer(rdev, dst_ptr);
4026 } else if (rdev->family >= CHIP_TAHITI) {
Alex Deucher59a82d02013-08-13 12:48:06 -04004027 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4028 dst_ptr[0] = upper_32_bits(reg_list_mc_addr);
4029 dst_ptr[1] = lower_32_bits(reg_list_mc_addr);
4030 dst_ptr[2] = rdev->rlc.clear_state_size;
4031 si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4032 } else {
4033 reg_list_hdr_blk_index = 0;
4034 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4035 data = upper_32_bits(reg_list_mc_addr);
4036 dst_ptr[reg_list_hdr_blk_index] = data;
4037 reg_list_hdr_blk_index++;
4038 for (i = 0; cs_data[i].section != NULL; i++) {
4039 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4040 reg_num = cs_data[i].section[j].reg_count;
4041 data = reg_list_mc_addr & 0xffffffff;
4042 dst_ptr[reg_list_hdr_blk_index] = data;
4043 reg_list_hdr_blk_index++;
Alex Deucher10b7ca72013-04-17 17:22:05 -04004044
Alex Deucher59a82d02013-08-13 12:48:06 -04004045 data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4046 dst_ptr[reg_list_hdr_blk_index] = data;
4047 reg_list_hdr_blk_index++;
Alex Deucher10b7ca72013-04-17 17:22:05 -04004048
Alex Deucher59a82d02013-08-13 12:48:06 -04004049 data = 0x08000000 | (reg_num * 4);
4050 dst_ptr[reg_list_hdr_blk_index] = data;
4051 reg_list_hdr_blk_index++;
Alex Deucher10b7ca72013-04-17 17:22:05 -04004052
Alex Deucher59a82d02013-08-13 12:48:06 -04004053 for (k = 0; k < reg_num; k++) {
4054 data = cs_data[i].section[j].extent[k];
4055 dst_ptr[reg_list_blk_index + k] = data;
4056 }
4057 reg_list_mc_addr += reg_num * 4;
4058 reg_list_blk_index += reg_num;
Alex Deucher10b7ca72013-04-17 17:22:05 -04004059 }
Alex Deucher10b7ca72013-04-17 17:22:05 -04004060 }
Alex Deucher59a82d02013-08-13 12:48:06 -04004061 dst_ptr[reg_list_hdr_blk_index] = RLC_CLEAR_STATE_END_MARKER;
Alex Deucher10b7ca72013-04-17 17:22:05 -04004062 }
Alex Deucher10b7ca72013-04-17 17:22:05 -04004063 radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4064 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4065 }
Alex Deucher2948f5e2013-04-12 13:52:52 -04004066
Alex Deucher22c775c2013-07-23 09:41:05 -04004067 if (rdev->rlc.cp_table_size) {
4068 if (rdev->rlc.cp_table_obj == NULL) {
4069 r = radeon_bo_create(rdev, rdev->rlc.cp_table_size, PAGE_SIZE, true,
4070 RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.cp_table_obj);
4071 if (r) {
4072 dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4073 sumo_rlc_fini(rdev);
4074 return r;
4075 }
4076 }
4077
4078 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4079 if (unlikely(r != 0)) {
4080 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4081 sumo_rlc_fini(rdev);
4082 return r;
4083 }
4084 r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4085 &rdev->rlc.cp_table_gpu_addr);
4086 if (r) {
4087 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4088 dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4089 sumo_rlc_fini(rdev);
4090 return r;
4091 }
4092 r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4093 if (r) {
4094 dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4095 sumo_rlc_fini(rdev);
4096 return r;
4097 }
4098
4099 cik_init_cp_pg_table(rdev);
4100
4101 radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4102 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4103
4104 }
4105
Alex Deucher2948f5e2013-04-12 13:52:52 -04004106 return 0;
4107}
4108
4109static void evergreen_rlc_start(struct radeon_device *rdev)
4110{
Alex Deucher8ba10462013-02-15 16:26:33 -05004111 u32 mask = RLC_ENABLE;
4112
4113 if (rdev->flags & RADEON_IS_IGP) {
4114 mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
Alex Deucher8ba10462013-02-15 16:26:33 -05004115 }
4116
4117 WREG32(RLC_CNTL, mask);
Alex Deucher2948f5e2013-04-12 13:52:52 -04004118}
4119
4120int evergreen_rlc_resume(struct radeon_device *rdev)
4121{
4122 u32 i;
4123 const __be32 *fw_data;
4124
4125 if (!rdev->rlc_fw)
4126 return -EINVAL;
4127
4128 r600_rlc_stop(rdev);
4129
4130 WREG32(RLC_HB_CNTL, 0);
4131
4132 if (rdev->flags & RADEON_IS_IGP) {
Alex Deucher8ba10462013-02-15 16:26:33 -05004133 if (rdev->family == CHIP_ARUBA) {
4134 u32 always_on_bitmap =
4135 3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4136 /* find out the number of active simds */
4137 u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4138 tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4139 tmp = hweight32(~tmp);
4140 if (tmp == rdev->config.cayman.max_simds_per_se) {
4141 WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4142 WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4143 WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4144 WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4145 WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4146 }
4147 } else {
4148 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4149 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4150 }
Alex Deucher2948f5e2013-04-12 13:52:52 -04004151 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4152 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4153 } else {
4154 WREG32(RLC_HB_BASE, 0);
4155 WREG32(RLC_HB_RPTR, 0);
4156 WREG32(RLC_HB_WPTR, 0);
Alex Deucher8ba10462013-02-15 16:26:33 -05004157 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4158 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
Alex Deucher2948f5e2013-04-12 13:52:52 -04004159 }
Alex Deucher2948f5e2013-04-12 13:52:52 -04004160 WREG32(RLC_MC_CNTL, 0);
4161 WREG32(RLC_UCODE_CNTL, 0);
4162
4163 fw_data = (const __be32 *)rdev->rlc_fw->data;
4164 if (rdev->family >= CHIP_ARUBA) {
4165 for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4166 WREG32(RLC_UCODE_ADDR, i);
4167 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4168 }
4169 } else if (rdev->family >= CHIP_CAYMAN) {
4170 for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4171 WREG32(RLC_UCODE_ADDR, i);
4172 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4173 }
4174 } else {
4175 for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4176 WREG32(RLC_UCODE_ADDR, i);
4177 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4178 }
4179 }
4180 WREG32(RLC_UCODE_ADDR, 0);
4181
4182 evergreen_rlc_start(rdev);
4183
4184 return 0;
4185}
4186
Alex Deucher45f9a392010-03-24 13:55:51 -04004187/* Interrupts */
4188
4189u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4190{
Alex Deucher46437052012-08-15 17:10:32 -04004191 if (crtc >= rdev->num_crtc)
Alex Deucher45f9a392010-03-24 13:55:51 -04004192 return 0;
Alex Deucher46437052012-08-15 17:10:32 -04004193 else
4194 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
Alex Deucher45f9a392010-03-24 13:55:51 -04004195}
4196
4197void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4198{
4199 u32 tmp;
4200
Alex Deucher1b370782011-11-17 20:13:28 -05004201 if (rdev->family >= CHIP_CAYMAN) {
4202 cayman_cp_int_cntl_setup(rdev, 0,
4203 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4204 cayman_cp_int_cntl_setup(rdev, 1, 0);
4205 cayman_cp_int_cntl_setup(rdev, 2, 0);
Alex Deucherf60cbd12012-12-04 15:27:33 -05004206 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4207 WREG32(CAYMAN_DMA1_CNTL, tmp);
Alex Deucher1b370782011-11-17 20:13:28 -05004208 } else
4209 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
Alex Deucher233d1ad2012-12-04 15:25:59 -05004210 tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4211 WREG32(DMA_CNTL, tmp);
Alex Deucher45f9a392010-03-24 13:55:51 -04004212 WREG32(GRBM_INT_CNTL, 0);
4213 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4214 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
Alex Deucherb7eff392011-07-08 11:44:56 -04004215 if (rdev->num_crtc >= 4) {
Alex Deucher18007402010-11-22 17:56:28 -05004216 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4217 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
Alex Deucherb7eff392011-07-08 11:44:56 -04004218 }
4219 if (rdev->num_crtc >= 6) {
Alex Deucher18007402010-11-22 17:56:28 -05004220 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4221 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4222 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004223
4224 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4225 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
Alex Deucherb7eff392011-07-08 11:44:56 -04004226 if (rdev->num_crtc >= 4) {
Alex Deucher18007402010-11-22 17:56:28 -05004227 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4228 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
Alex Deucherb7eff392011-07-08 11:44:56 -04004229 }
4230 if (rdev->num_crtc >= 6) {
Alex Deucher18007402010-11-22 17:56:28 -05004231 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4232 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4233 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004234
Alex Deucher05b3ef62012-03-20 17:18:37 -04004235 /* only one DAC on DCE6 */
4236 if (!ASIC_IS_DCE6(rdev))
4237 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
Alex Deucher45f9a392010-03-24 13:55:51 -04004238 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4239
4240 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4241 WREG32(DC_HPD1_INT_CONTROL, tmp);
4242 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4243 WREG32(DC_HPD2_INT_CONTROL, tmp);
4244 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4245 WREG32(DC_HPD3_INT_CONTROL, tmp);
4246 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4247 WREG32(DC_HPD4_INT_CONTROL, tmp);
4248 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4249 WREG32(DC_HPD5_INT_CONTROL, tmp);
4250 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4251 WREG32(DC_HPD6_INT_CONTROL, tmp);
4252
4253}
4254
4255int evergreen_irq_set(struct radeon_device *rdev)
4256{
4257 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
Alex Deucher1b370782011-11-17 20:13:28 -05004258 u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
Alex Deucher45f9a392010-03-24 13:55:51 -04004259 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4260 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
Alex Deucher2031f772010-04-22 12:52:11 -04004261 u32 grbm_int_cntl = 0;
Alex Deucher6f34be52010-11-21 10:59:01 -05004262 u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
Alex Deucherf122c612012-03-30 08:59:57 -04004263 u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
Alex Deucherf60cbd12012-12-04 15:27:33 -05004264 u32 dma_cntl, dma_cntl1 = 0;
Alex Deucherdc50ba72013-06-26 00:33:35 -04004265 u32 thermal_int = 0;
Alex Deucher45f9a392010-03-24 13:55:51 -04004266
4267 if (!rdev->irq.installed) {
Joe Perchesfce7d612010-10-30 21:08:30 +00004268 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
Alex Deucher45f9a392010-03-24 13:55:51 -04004269 return -EINVAL;
4270 }
4271 /* don't enable anything if the ih is disabled */
4272 if (!rdev->ih.enabled) {
4273 r600_disable_interrupts(rdev);
4274 /* force the active interrupt state to all disabled */
4275 evergreen_disable_interrupt_state(rdev);
4276 return 0;
4277 }
4278
4279 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
4280 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
4281 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
4282 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
4283 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
4284 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
Alex Deucherd70229f2013-04-12 16:40:41 -04004285 if (rdev->family == CHIP_ARUBA)
4286 thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4287 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4288 else
4289 thermal_int = RREG32(CG_THERMAL_INT) &
4290 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
Alex Deucher45f9a392010-03-24 13:55:51 -04004291
Alex Deucherf122c612012-03-30 08:59:57 -04004292 afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4293 afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4294 afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4295 afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4296 afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4297 afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4298
Alex Deucher233d1ad2012-12-04 15:25:59 -05004299 dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4300
Alex Deucher1b370782011-11-17 20:13:28 -05004301 if (rdev->family >= CHIP_CAYMAN) {
4302 /* enable CP interrupts on all rings */
Christian Koenig736fc372012-05-17 19:52:00 +02004303 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
Alex Deucher1b370782011-11-17 20:13:28 -05004304 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4305 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4306 }
Christian Koenig736fc372012-05-17 19:52:00 +02004307 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
Alex Deucher1b370782011-11-17 20:13:28 -05004308 DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4309 cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4310 }
Christian Koenig736fc372012-05-17 19:52:00 +02004311 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
Alex Deucher1b370782011-11-17 20:13:28 -05004312 DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4313 cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4314 }
4315 } else {
Christian Koenig736fc372012-05-17 19:52:00 +02004316 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
Alex Deucher1b370782011-11-17 20:13:28 -05004317 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4318 cp_int_cntl |= RB_INT_ENABLE;
4319 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4320 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004321 }
Alex Deucher1b370782011-11-17 20:13:28 -05004322
Alex Deucher233d1ad2012-12-04 15:25:59 -05004323 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4324 DRM_DEBUG("r600_irq_set: sw int dma\n");
4325 dma_cntl |= TRAP_ENABLE;
4326 }
4327
Alex Deucherf60cbd12012-12-04 15:27:33 -05004328 if (rdev->family >= CHIP_CAYMAN) {
4329 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4330 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4331 DRM_DEBUG("r600_irq_set: sw int dma1\n");
4332 dma_cntl1 |= TRAP_ENABLE;
4333 }
4334 }
4335
Alex Deucherdc50ba72013-06-26 00:33:35 -04004336 if (rdev->irq.dpm_thermal) {
4337 DRM_DEBUG("dpm thermal\n");
4338 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4339 }
4340
Alex Deucher6f34be52010-11-21 10:59:01 -05004341 if (rdev->irq.crtc_vblank_int[0] ||
Christian Koenig736fc372012-05-17 19:52:00 +02004342 atomic_read(&rdev->irq.pflip[0])) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004343 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4344 crtc1 |= VBLANK_INT_MASK;
4345 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004346 if (rdev->irq.crtc_vblank_int[1] ||
Christian Koenig736fc372012-05-17 19:52:00 +02004347 atomic_read(&rdev->irq.pflip[1])) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004348 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4349 crtc2 |= VBLANK_INT_MASK;
4350 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004351 if (rdev->irq.crtc_vblank_int[2] ||
Christian Koenig736fc372012-05-17 19:52:00 +02004352 atomic_read(&rdev->irq.pflip[2])) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004353 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4354 crtc3 |= VBLANK_INT_MASK;
4355 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004356 if (rdev->irq.crtc_vblank_int[3] ||
Christian Koenig736fc372012-05-17 19:52:00 +02004357 atomic_read(&rdev->irq.pflip[3])) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004358 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4359 crtc4 |= VBLANK_INT_MASK;
4360 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004361 if (rdev->irq.crtc_vblank_int[4] ||
Christian Koenig736fc372012-05-17 19:52:00 +02004362 atomic_read(&rdev->irq.pflip[4])) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004363 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4364 crtc5 |= VBLANK_INT_MASK;
4365 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004366 if (rdev->irq.crtc_vblank_int[5] ||
Christian Koenig736fc372012-05-17 19:52:00 +02004367 atomic_read(&rdev->irq.pflip[5])) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004368 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4369 crtc6 |= VBLANK_INT_MASK;
4370 }
4371 if (rdev->irq.hpd[0]) {
4372 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4373 hpd1 |= DC_HPDx_INT_EN;
4374 }
4375 if (rdev->irq.hpd[1]) {
4376 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4377 hpd2 |= DC_HPDx_INT_EN;
4378 }
4379 if (rdev->irq.hpd[2]) {
4380 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4381 hpd3 |= DC_HPDx_INT_EN;
4382 }
4383 if (rdev->irq.hpd[3]) {
4384 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4385 hpd4 |= DC_HPDx_INT_EN;
4386 }
4387 if (rdev->irq.hpd[4]) {
4388 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4389 hpd5 |= DC_HPDx_INT_EN;
4390 }
4391 if (rdev->irq.hpd[5]) {
4392 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4393 hpd6 |= DC_HPDx_INT_EN;
4394 }
Alex Deucherf122c612012-03-30 08:59:57 -04004395 if (rdev->irq.afmt[0]) {
4396 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4397 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4398 }
4399 if (rdev->irq.afmt[1]) {
4400 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4401 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4402 }
4403 if (rdev->irq.afmt[2]) {
4404 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4405 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4406 }
4407 if (rdev->irq.afmt[3]) {
4408 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4409 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4410 }
4411 if (rdev->irq.afmt[4]) {
4412 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4413 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4414 }
4415 if (rdev->irq.afmt[5]) {
4416 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4417 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4418 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004419
Alex Deucher1b370782011-11-17 20:13:28 -05004420 if (rdev->family >= CHIP_CAYMAN) {
4421 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4422 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4423 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4424 } else
4425 WREG32(CP_INT_CNTL, cp_int_cntl);
Alex Deucher233d1ad2012-12-04 15:25:59 -05004426
4427 WREG32(DMA_CNTL, dma_cntl);
4428
Alex Deucherf60cbd12012-12-04 15:27:33 -05004429 if (rdev->family >= CHIP_CAYMAN)
4430 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4431
Alex Deucher2031f772010-04-22 12:52:11 -04004432 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
Alex Deucher45f9a392010-03-24 13:55:51 -04004433
4434 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4435 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
Alex Deucherb7eff392011-07-08 11:44:56 -04004436 if (rdev->num_crtc >= 4) {
Alex Deucher18007402010-11-22 17:56:28 -05004437 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4438 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
Alex Deucherb7eff392011-07-08 11:44:56 -04004439 }
4440 if (rdev->num_crtc >= 6) {
Alex Deucher18007402010-11-22 17:56:28 -05004441 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4442 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4443 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004444
Alex Deucher6f34be52010-11-21 10:59:01 -05004445 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
4446 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
Alex Deucherb7eff392011-07-08 11:44:56 -04004447 if (rdev->num_crtc >= 4) {
4448 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
4449 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
4450 }
4451 if (rdev->num_crtc >= 6) {
4452 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
4453 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
4454 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004455
Alex Deucher45f9a392010-03-24 13:55:51 -04004456 WREG32(DC_HPD1_INT_CONTROL, hpd1);
4457 WREG32(DC_HPD2_INT_CONTROL, hpd2);
4458 WREG32(DC_HPD3_INT_CONTROL, hpd3);
4459 WREG32(DC_HPD4_INT_CONTROL, hpd4);
4460 WREG32(DC_HPD5_INT_CONTROL, hpd5);
4461 WREG32(DC_HPD6_INT_CONTROL, hpd6);
Alex Deucherd70229f2013-04-12 16:40:41 -04004462 if (rdev->family == CHIP_ARUBA)
4463 WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4464 else
4465 WREG32(CG_THERMAL_INT, thermal_int);
Alex Deucher45f9a392010-03-24 13:55:51 -04004466
Alex Deucherf122c612012-03-30 08:59:57 -04004467 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4468 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4469 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4470 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4471 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4472 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4473
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05004474 return 0;
4475}
4476
Andi Kleencbdd4502011-10-13 16:08:46 -07004477static void evergreen_irq_ack(struct radeon_device *rdev)
Alex Deucher45f9a392010-03-24 13:55:51 -04004478{
4479 u32 tmp;
4480
Alex Deucher6f34be52010-11-21 10:59:01 -05004481 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4482 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4483 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4484 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4485 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4486 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4487 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4488 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
Alex Deucherb7eff392011-07-08 11:44:56 -04004489 if (rdev->num_crtc >= 4) {
4490 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4491 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4492 }
4493 if (rdev->num_crtc >= 6) {
4494 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4495 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4496 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004497
Alex Deucherf122c612012-03-30 08:59:57 -04004498 rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4499 rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4500 rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4501 rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4502 rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4503 rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4504
Alex Deucher6f34be52010-11-21 10:59:01 -05004505 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4506 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4507 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4508 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
Alex Deucher6f34be52010-11-21 10:59:01 -05004509 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
Alex Deucher45f9a392010-03-24 13:55:51 -04004510 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
Alex Deucher6f34be52010-11-21 10:59:01 -05004511 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
Alex Deucher45f9a392010-03-24 13:55:51 -04004512 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
Alex Deucher6f34be52010-11-21 10:59:01 -05004513 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
Alex Deucher45f9a392010-03-24 13:55:51 -04004514 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
Alex Deucher6f34be52010-11-21 10:59:01 -05004515 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
Alex Deucher45f9a392010-03-24 13:55:51 -04004516 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4517
Alex Deucherb7eff392011-07-08 11:44:56 -04004518 if (rdev->num_crtc >= 4) {
4519 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4520 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4521 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4522 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4523 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4524 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4525 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4526 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4527 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4528 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4529 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4530 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4531 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004532
Alex Deucherb7eff392011-07-08 11:44:56 -04004533 if (rdev->num_crtc >= 6) {
4534 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4535 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4536 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4537 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4538 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4539 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4540 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4541 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4542 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4543 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4544 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4545 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4546 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004547
Alex Deucher6f34be52010-11-21 10:59:01 -05004548 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004549 tmp = RREG32(DC_HPD1_INT_CONTROL);
4550 tmp |= DC_HPDx_INT_ACK;
4551 WREG32(DC_HPD1_INT_CONTROL, tmp);
4552 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004553 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004554 tmp = RREG32(DC_HPD2_INT_CONTROL);
4555 tmp |= DC_HPDx_INT_ACK;
4556 WREG32(DC_HPD2_INT_CONTROL, tmp);
4557 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004558 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004559 tmp = RREG32(DC_HPD3_INT_CONTROL);
4560 tmp |= DC_HPDx_INT_ACK;
4561 WREG32(DC_HPD3_INT_CONTROL, tmp);
4562 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004563 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004564 tmp = RREG32(DC_HPD4_INT_CONTROL);
4565 tmp |= DC_HPDx_INT_ACK;
4566 WREG32(DC_HPD4_INT_CONTROL, tmp);
4567 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004568 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004569 tmp = RREG32(DC_HPD5_INT_CONTROL);
4570 tmp |= DC_HPDx_INT_ACK;
4571 WREG32(DC_HPD5_INT_CONTROL, tmp);
4572 }
Alex Deucher6f34be52010-11-21 10:59:01 -05004573 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
Alex Deucher45f9a392010-03-24 13:55:51 -04004574 tmp = RREG32(DC_HPD5_INT_CONTROL);
4575 tmp |= DC_HPDx_INT_ACK;
4576 WREG32(DC_HPD6_INT_CONTROL, tmp);
4577 }
Alex Deucherf122c612012-03-30 08:59:57 -04004578 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4579 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4580 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4581 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4582 }
4583 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4584 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4585 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4586 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4587 }
4588 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4589 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4590 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4591 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4592 }
4593 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4594 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4595 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4596 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4597 }
4598 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4599 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4600 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4601 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4602 }
4603 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4604 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4605 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4606 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4607 }
Alex Deucher45f9a392010-03-24 13:55:51 -04004608}
4609
Lauri Kasanen1109ca02012-08-31 13:43:50 -04004610static void evergreen_irq_disable(struct radeon_device *rdev)
Alex Deucher45f9a392010-03-24 13:55:51 -04004611{
Alex Deucher45f9a392010-03-24 13:55:51 -04004612 r600_disable_interrupts(rdev);
4613 /* Wait and acknowledge irq */
4614 mdelay(1);
Alex Deucher6f34be52010-11-21 10:59:01 -05004615 evergreen_irq_ack(rdev);
Alex Deucher45f9a392010-03-24 13:55:51 -04004616 evergreen_disable_interrupt_state(rdev);
4617}
4618
Alex Deucher755d8192011-03-02 20:07:34 -05004619void evergreen_irq_suspend(struct radeon_device *rdev)
Alex Deucher45f9a392010-03-24 13:55:51 -04004620{
4621 evergreen_irq_disable(rdev);
4622 r600_rlc_stop(rdev);
4623}
4624
Andi Kleencbdd4502011-10-13 16:08:46 -07004625static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
Alex Deucher45f9a392010-03-24 13:55:51 -04004626{
4627 u32 wptr, tmp;
4628
Alex Deucher724c80e2010-08-27 18:25:25 -04004629 if (rdev->wb.enabled)
Cédric Cano204ae242011-04-19 11:07:13 -04004630 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
Alex Deucher724c80e2010-08-27 18:25:25 -04004631 else
4632 wptr = RREG32(IH_RB_WPTR);
Alex Deucher45f9a392010-03-24 13:55:51 -04004633
4634 if (wptr & RB_OVERFLOW) {
4635 /* When a ring buffer overflow happen start parsing interrupt
4636 * from the last not overwritten vector (wptr + 16). Hopefully
4637 * this should allow us to catchup.
4638 */
4639 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
4640 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
4641 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4642 tmp = RREG32(IH_RB_CNTL);
4643 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4644 WREG32(IH_RB_CNTL, tmp);
4645 }
4646 return (wptr & rdev->ih.ptr_mask);
4647}
4648
4649int evergreen_irq_process(struct radeon_device *rdev)
4650{
Dave Airlie682f1a52011-06-18 03:59:51 +00004651 u32 wptr;
4652 u32 rptr;
Alex Deucher45f9a392010-03-24 13:55:51 -04004653 u32 src_id, src_data;
4654 u32 ring_index;
Alex Deucher45f9a392010-03-24 13:55:51 -04004655 bool queue_hotplug = false;
Alex Deucherf122c612012-03-30 08:59:57 -04004656 bool queue_hdmi = false;
Alex Deucherdc50ba72013-06-26 00:33:35 -04004657 bool queue_thermal = false;
Alex Deucher54e2e492013-06-13 18:26:25 -04004658 u32 status, addr;
Alex Deucher45f9a392010-03-24 13:55:51 -04004659
Dave Airlie682f1a52011-06-18 03:59:51 +00004660 if (!rdev->ih.enabled || rdev->shutdown)
Alex Deucher45f9a392010-03-24 13:55:51 -04004661 return IRQ_NONE;
4662
Dave Airlie682f1a52011-06-18 03:59:51 +00004663 wptr = evergreen_get_ih_wptr(rdev);
Christian Koenigc20dc362012-05-16 21:45:24 +02004664
4665restart_ih:
4666 /* is somebody else already processing irqs? */
4667 if (atomic_xchg(&rdev->ih.lock, 1))
4668 return IRQ_NONE;
4669
Dave Airlie682f1a52011-06-18 03:59:51 +00004670 rptr = rdev->ih.rptr;
4671 DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
Alex Deucher45f9a392010-03-24 13:55:51 -04004672
Benjamin Herrenschmidt964f6642011-07-13 16:28:19 +10004673 /* Order reading of wptr vs. reading of IH ring data */
4674 rmb();
4675
Alex Deucher45f9a392010-03-24 13:55:51 -04004676 /* display interrupts */
Alex Deucher6f34be52010-11-21 10:59:01 -05004677 evergreen_irq_ack(rdev);
Alex Deucher45f9a392010-03-24 13:55:51 -04004678
Alex Deucher45f9a392010-03-24 13:55:51 -04004679 while (rptr != wptr) {
4680 /* wptr/rptr are in bytes! */
4681 ring_index = rptr / 4;
Alex Deucher0f234f5f2011-02-13 19:06:33 -05004682 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4683 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
Alex Deucher45f9a392010-03-24 13:55:51 -04004684
4685 switch (src_id) {
4686 case 1: /* D1 vblank/vline */
4687 switch (src_data) {
4688 case 0: /* D1 vblank */
Alex Deucher6f34be52010-11-21 10:59:01 -05004689 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
Alex Deucher6f34be52010-11-21 10:59:01 -05004690 if (rdev->irq.crtc_vblank_int[0]) {
4691 drm_handle_vblank(rdev->ddev, 0);
4692 rdev->pm.vblank_sync = true;
4693 wake_up(&rdev->irq.vblank_queue);
4694 }
Christian Koenig736fc372012-05-17 19:52:00 +02004695 if (atomic_read(&rdev->irq.pflip[0]))
Mario Kleiner3e4ea742010-11-21 10:59:02 -05004696 radeon_crtc_handle_flip(rdev, 0);
Alex Deucher6f34be52010-11-21 10:59:01 -05004697 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004698 DRM_DEBUG("IH: D1 vblank\n");
4699 }
4700 break;
4701 case 1: /* D1 vline */
Alex Deucher6f34be52010-11-21 10:59:01 -05004702 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4703 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004704 DRM_DEBUG("IH: D1 vline\n");
4705 }
4706 break;
4707 default:
4708 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4709 break;
4710 }
4711 break;
4712 case 2: /* D2 vblank/vline */
4713 switch (src_data) {
4714 case 0: /* D2 vblank */
Alex Deucher6f34be52010-11-21 10:59:01 -05004715 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
Alex Deucher6f34be52010-11-21 10:59:01 -05004716 if (rdev->irq.crtc_vblank_int[1]) {
4717 drm_handle_vblank(rdev->ddev, 1);
4718 rdev->pm.vblank_sync = true;
4719 wake_up(&rdev->irq.vblank_queue);
4720 }
Christian Koenig736fc372012-05-17 19:52:00 +02004721 if (atomic_read(&rdev->irq.pflip[1]))
Mario Kleiner3e4ea742010-11-21 10:59:02 -05004722 radeon_crtc_handle_flip(rdev, 1);
Alex Deucher6f34be52010-11-21 10:59:01 -05004723 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004724 DRM_DEBUG("IH: D2 vblank\n");
4725 }
4726 break;
4727 case 1: /* D2 vline */
Alex Deucher6f34be52010-11-21 10:59:01 -05004728 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4729 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004730 DRM_DEBUG("IH: D2 vline\n");
4731 }
4732 break;
4733 default:
4734 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4735 break;
4736 }
4737 break;
4738 case 3: /* D3 vblank/vline */
4739 switch (src_data) {
4740 case 0: /* D3 vblank */
Alex Deucher6f34be52010-11-21 10:59:01 -05004741 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4742 if (rdev->irq.crtc_vblank_int[2]) {
4743 drm_handle_vblank(rdev->ddev, 2);
4744 rdev->pm.vblank_sync = true;
4745 wake_up(&rdev->irq.vblank_queue);
4746 }
Christian Koenig736fc372012-05-17 19:52:00 +02004747 if (atomic_read(&rdev->irq.pflip[2]))
Alex Deucher6f34be52010-11-21 10:59:01 -05004748 radeon_crtc_handle_flip(rdev, 2);
4749 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004750 DRM_DEBUG("IH: D3 vblank\n");
4751 }
4752 break;
4753 case 1: /* D3 vline */
Alex Deucher6f34be52010-11-21 10:59:01 -05004754 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4755 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004756 DRM_DEBUG("IH: D3 vline\n");
4757 }
4758 break;
4759 default:
4760 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4761 break;
4762 }
4763 break;
4764 case 4: /* D4 vblank/vline */
4765 switch (src_data) {
4766 case 0: /* D4 vblank */
Alex Deucher6f34be52010-11-21 10:59:01 -05004767 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4768 if (rdev->irq.crtc_vblank_int[3]) {
4769 drm_handle_vblank(rdev->ddev, 3);
4770 rdev->pm.vblank_sync = true;
4771 wake_up(&rdev->irq.vblank_queue);
4772 }
Christian Koenig736fc372012-05-17 19:52:00 +02004773 if (atomic_read(&rdev->irq.pflip[3]))
Alex Deucher6f34be52010-11-21 10:59:01 -05004774 radeon_crtc_handle_flip(rdev, 3);
4775 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004776 DRM_DEBUG("IH: D4 vblank\n");
4777 }
4778 break;
4779 case 1: /* D4 vline */
Alex Deucher6f34be52010-11-21 10:59:01 -05004780 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4781 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004782 DRM_DEBUG("IH: D4 vline\n");
4783 }
4784 break;
4785 default:
4786 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4787 break;
4788 }
4789 break;
4790 case 5: /* D5 vblank/vline */
4791 switch (src_data) {
4792 case 0: /* D5 vblank */
Alex Deucher6f34be52010-11-21 10:59:01 -05004793 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4794 if (rdev->irq.crtc_vblank_int[4]) {
4795 drm_handle_vblank(rdev->ddev, 4);
4796 rdev->pm.vblank_sync = true;
4797 wake_up(&rdev->irq.vblank_queue);
4798 }
Christian Koenig736fc372012-05-17 19:52:00 +02004799 if (atomic_read(&rdev->irq.pflip[4]))
Alex Deucher6f34be52010-11-21 10:59:01 -05004800 radeon_crtc_handle_flip(rdev, 4);
4801 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004802 DRM_DEBUG("IH: D5 vblank\n");
4803 }
4804 break;
4805 case 1: /* D5 vline */
Alex Deucher6f34be52010-11-21 10:59:01 -05004806 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4807 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004808 DRM_DEBUG("IH: D5 vline\n");
4809 }
4810 break;
4811 default:
4812 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4813 break;
4814 }
4815 break;
4816 case 6: /* D6 vblank/vline */
4817 switch (src_data) {
4818 case 0: /* D6 vblank */
Alex Deucher6f34be52010-11-21 10:59:01 -05004819 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4820 if (rdev->irq.crtc_vblank_int[5]) {
4821 drm_handle_vblank(rdev->ddev, 5);
4822 rdev->pm.vblank_sync = true;
4823 wake_up(&rdev->irq.vblank_queue);
4824 }
Christian Koenig736fc372012-05-17 19:52:00 +02004825 if (atomic_read(&rdev->irq.pflip[5]))
Alex Deucher6f34be52010-11-21 10:59:01 -05004826 radeon_crtc_handle_flip(rdev, 5);
4827 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004828 DRM_DEBUG("IH: D6 vblank\n");
4829 }
4830 break;
4831 case 1: /* D6 vline */
Alex Deucher6f34be52010-11-21 10:59:01 -05004832 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4833 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004834 DRM_DEBUG("IH: D6 vline\n");
4835 }
4836 break;
4837 default:
4838 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4839 break;
4840 }
4841 break;
4842 case 42: /* HPD hotplug */
4843 switch (src_data) {
4844 case 0:
Alex Deucher6f34be52010-11-21 10:59:01 -05004845 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4846 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004847 queue_hotplug = true;
4848 DRM_DEBUG("IH: HPD1\n");
4849 }
4850 break;
4851 case 1:
Alex Deucher6f34be52010-11-21 10:59:01 -05004852 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4853 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004854 queue_hotplug = true;
4855 DRM_DEBUG("IH: HPD2\n");
4856 }
4857 break;
4858 case 2:
Alex Deucher6f34be52010-11-21 10:59:01 -05004859 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4860 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004861 queue_hotplug = true;
4862 DRM_DEBUG("IH: HPD3\n");
4863 }
4864 break;
4865 case 3:
Alex Deucher6f34be52010-11-21 10:59:01 -05004866 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4867 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004868 queue_hotplug = true;
4869 DRM_DEBUG("IH: HPD4\n");
4870 }
4871 break;
4872 case 4:
Alex Deucher6f34be52010-11-21 10:59:01 -05004873 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4874 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004875 queue_hotplug = true;
4876 DRM_DEBUG("IH: HPD5\n");
4877 }
4878 break;
4879 case 5:
Alex Deucher6f34be52010-11-21 10:59:01 -05004880 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4881 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
Alex Deucher45f9a392010-03-24 13:55:51 -04004882 queue_hotplug = true;
4883 DRM_DEBUG("IH: HPD6\n");
4884 }
4885 break;
4886 default:
4887 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4888 break;
4889 }
4890 break;
Alex Deucherf122c612012-03-30 08:59:57 -04004891 case 44: /* hdmi */
4892 switch (src_data) {
4893 case 0:
4894 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4895 rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
4896 queue_hdmi = true;
4897 DRM_DEBUG("IH: HDMI0\n");
4898 }
4899 break;
4900 case 1:
4901 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4902 rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
4903 queue_hdmi = true;
4904 DRM_DEBUG("IH: HDMI1\n");
4905 }
4906 break;
4907 case 2:
4908 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4909 rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
4910 queue_hdmi = true;
4911 DRM_DEBUG("IH: HDMI2\n");
4912 }
4913 break;
4914 case 3:
4915 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4916 rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
4917 queue_hdmi = true;
4918 DRM_DEBUG("IH: HDMI3\n");
4919 }
4920 break;
4921 case 4:
4922 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4923 rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
4924 queue_hdmi = true;
4925 DRM_DEBUG("IH: HDMI4\n");
4926 }
4927 break;
4928 case 5:
4929 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4930 rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
4931 queue_hdmi = true;
4932 DRM_DEBUG("IH: HDMI5\n");
4933 }
4934 break;
4935 default:
4936 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
4937 break;
4938 }
Christian Königf2ba57b2013-04-08 12:41:29 +02004939 case 124: /* UVD */
4940 DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
4941 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
Alex Deucherf122c612012-03-30 08:59:57 -04004942 break;
Christian Königae133a12012-09-18 15:30:44 -04004943 case 146:
4944 case 147:
Alex Deucher54e2e492013-06-13 18:26:25 -04004945 addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
4946 status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
Christian Königae133a12012-09-18 15:30:44 -04004947 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
4948 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
Alex Deucher54e2e492013-06-13 18:26:25 -04004949 addr);
Christian Königae133a12012-09-18 15:30:44 -04004950 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
Alex Deucher54e2e492013-06-13 18:26:25 -04004951 status);
4952 cayman_vm_decode_fault(rdev, status, addr);
Christian Königae133a12012-09-18 15:30:44 -04004953 /* reset addr and status */
4954 WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
4955 break;
Alex Deucher45f9a392010-03-24 13:55:51 -04004956 case 176: /* CP_INT in ring buffer */
4957 case 177: /* CP_INT in IB1 */
4958 case 178: /* CP_INT in IB2 */
4959 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
Alex Deucher74652802011-08-25 13:39:48 -04004960 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
Alex Deucher45f9a392010-03-24 13:55:51 -04004961 break;
4962 case 181: /* CP EOP event */
4963 DRM_DEBUG("IH: CP EOP\n");
Alex Deucher1b370782011-11-17 20:13:28 -05004964 if (rdev->family >= CHIP_CAYMAN) {
4965 switch (src_data) {
4966 case 0:
4967 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4968 break;
4969 case 1:
4970 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
4971 break;
4972 case 2:
4973 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
4974 break;
4975 }
4976 } else
4977 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
Alex Deucher45f9a392010-03-24 13:55:51 -04004978 break;
Alex Deucher233d1ad2012-12-04 15:25:59 -05004979 case 224: /* DMA trap event */
4980 DRM_DEBUG("IH: DMA trap\n");
4981 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
4982 break;
Alex Deucherdc50ba72013-06-26 00:33:35 -04004983 case 230: /* thermal low to high */
4984 DRM_DEBUG("IH: thermal low to high\n");
4985 rdev->pm.dpm.thermal.high_to_low = false;
4986 queue_thermal = true;
4987 break;
4988 case 231: /* thermal high to low */
4989 DRM_DEBUG("IH: thermal high to low\n");
4990 rdev->pm.dpm.thermal.high_to_low = true;
4991 queue_thermal = true;
4992 break;
Alex Deucher2031f772010-04-22 12:52:11 -04004993 case 233: /* GUI IDLE */
Ilija Hadzic303c8052011-06-07 14:54:48 -04004994 DRM_DEBUG("IH: GUI idle\n");
Alex Deucher2031f772010-04-22 12:52:11 -04004995 break;
Alex Deucherf60cbd12012-12-04 15:27:33 -05004996 case 244: /* DMA trap event */
4997 if (rdev->family >= CHIP_CAYMAN) {
4998 DRM_DEBUG("IH: DMA1 trap\n");
4999 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5000 }
5001 break;
Alex Deucher45f9a392010-03-24 13:55:51 -04005002 default:
5003 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5004 break;
5005 }
5006
5007 /* wptr/rptr are in bytes! */
5008 rptr += 16;
5009 rptr &= rdev->ih.ptr_mask;
5010 }
Alex Deucher45f9a392010-03-24 13:55:51 -04005011 if (queue_hotplug)
Tejun Heo32c87fc2011-01-03 14:49:32 +01005012 schedule_work(&rdev->hotplug_work);
Alex Deucherf122c612012-03-30 08:59:57 -04005013 if (queue_hdmi)
5014 schedule_work(&rdev->audio_work);
Alex Deucherdc50ba72013-06-26 00:33:35 -04005015 if (queue_thermal && rdev->pm.dpm_enabled)
5016 schedule_work(&rdev->pm.dpm.thermal.work);
Alex Deucher45f9a392010-03-24 13:55:51 -04005017 rdev->ih.rptr = rptr;
5018 WREG32(IH_RB_RPTR, rdev->ih.rptr);
Christian Koenigc20dc362012-05-16 21:45:24 +02005019 atomic_set(&rdev->ih.lock, 0);
5020
5021 /* make sure wptr hasn't changed while processing */
5022 wptr = evergreen_get_ih_wptr(rdev);
5023 if (wptr != rptr)
5024 goto restart_ih;
5025
Alex Deucher45f9a392010-03-24 13:55:51 -04005026 return IRQ_HANDLED;
5027}
5028
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005029static int evergreen_startup(struct radeon_device *rdev)
5030{
Christian Königf2ba57b2013-04-08 12:41:29 +02005031 struct radeon_ring *ring;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005032 int r;
5033
Alex Deucher9e46a482011-01-06 18:49:35 -05005034 /* enable pcie gen2 link */
Ilija Hadziccd540332011-09-20 10:22:57 -04005035 evergreen_pcie_gen2_enable(rdev);
Alex Deucherf52382d2013-02-15 11:02:50 -05005036 /* enable aspm */
5037 evergreen_program_aspm(rdev);
Alex Deucher9e46a482011-01-06 18:49:35 -05005038
Alex Deucher6fab3fe2013-08-04 12:13:17 -04005039 evergreen_mc_program(rdev);
5040
Alex Deucher0af62b02011-01-06 21:19:31 -05005041 if (ASIC_IS_DCE5(rdev)) {
5042 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5043 r = ni_init_microcode(rdev);
5044 if (r) {
5045 DRM_ERROR("Failed to load firmware!\n");
5046 return r;
5047 }
5048 }
Alex Deucher755d8192011-03-02 20:07:34 -05005049 r = ni_mc_load_microcode(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005050 if (r) {
Alex Deucher0af62b02011-01-06 21:19:31 -05005051 DRM_ERROR("Failed to load MC firmware!\n");
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005052 return r;
5053 }
Alex Deucher0af62b02011-01-06 21:19:31 -05005054 } else {
5055 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5056 r = r600_init_microcode(rdev);
5057 if (r) {
5058 DRM_ERROR("Failed to load firmware!\n");
5059 return r;
5060 }
5061 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005062 }
Alex Deucherfe251e22010-03-24 13:36:43 -04005063
Alex Deucher16cdf042011-10-28 10:30:02 -04005064 r = r600_vram_scratch_init(rdev);
5065 if (r)
5066 return r;
5067
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005068 if (rdev->flags & RADEON_IS_AGP) {
Alex Deucher0fcdb612010-03-24 13:20:41 -04005069 evergreen_agp_enable(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005070 } else {
5071 r = evergreen_pcie_gart_enable(rdev);
5072 if (r)
5073 return r;
5074 }
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005075 evergreen_gpu_init(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005076
Alex Deucher2948f5e2013-04-12 13:52:52 -04005077 /* allocate rlc buffers */
5078 if (rdev->flags & RADEON_IS_IGP) {
5079 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
Alex Deucher1fd11772013-04-17 17:53:50 -04005080 rdev->rlc.reg_list_size =
5081 (u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
Alex Deucher2948f5e2013-04-12 13:52:52 -04005082 rdev->rlc.cs_data = evergreen_cs_data;
5083 r = sumo_rlc_init(rdev);
5084 if (r) {
5085 DRM_ERROR("Failed to init rlc BOs!\n");
5086 return r;
5087 }
5088 }
5089
Alex Deucher724c80e2010-08-27 18:25:25 -04005090 /* allocate wb buffer */
5091 r = radeon_wb_init(rdev);
5092 if (r)
5093 return r;
5094
Jerome Glisse30eb77f2011-11-20 20:45:34 +00005095 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5096 if (r) {
5097 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5098 return r;
5099 }
5100
Alex Deucher233d1ad2012-12-04 15:25:59 -05005101 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5102 if (r) {
5103 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5104 return r;
5105 }
5106
Christian Könige409b122013-08-13 11:56:53 +02005107 r = uvd_v2_2_resume(rdev);
Christian Königf2ba57b2013-04-08 12:41:29 +02005108 if (!r) {
5109 r = radeon_fence_driver_start_ring(rdev,
5110 R600_RING_TYPE_UVD_INDEX);
5111 if (r)
5112 dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5113 }
5114
5115 if (r)
5116 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5117
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005118 /* Enable IRQ */
Adis Hamziće49f3952013-06-02 16:47:54 +02005119 if (!rdev->irq.installed) {
5120 r = radeon_irq_kms_init(rdev);
5121 if (r)
5122 return r;
5123 }
5124
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005125 r = r600_irq_init(rdev);
5126 if (r) {
5127 DRM_ERROR("radeon: IH init failed (%d).\n", r);
5128 radeon_irq_kms_fini(rdev);
5129 return r;
5130 }
Alex Deucher45f9a392010-03-24 13:55:51 -04005131 evergreen_irq_set(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005132
Christian Königf2ba57b2013-04-08 12:41:29 +02005133 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
Christian Könige32eb502011-10-23 12:56:27 +02005134 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
Alex Deucher78c55602011-11-17 14:25:56 -05005135 R600_CP_RB_RPTR, R600_CP_RB_WPTR,
Christian König2e1e6da2013-08-13 11:56:52 +02005136 RADEON_CP_PACKET2);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005137 if (r)
5138 return r;
Alex Deucher233d1ad2012-12-04 15:25:59 -05005139
5140 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5141 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5142 DMA_RB_RPTR, DMA_RB_WPTR,
Christian König2e1e6da2013-08-13 11:56:52 +02005143 DMA_PACKET(DMA_PACKET_NOP, 0, 0));
Alex Deucher233d1ad2012-12-04 15:25:59 -05005144 if (r)
5145 return r;
5146
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005147 r = evergreen_cp_load_microcode(rdev);
5148 if (r)
5149 return r;
Alex Deucherfe251e22010-03-24 13:36:43 -04005150 r = evergreen_cp_resume(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005151 if (r)
5152 return r;
Alex Deucher233d1ad2012-12-04 15:25:59 -05005153 r = r600_dma_resume(rdev);
5154 if (r)
5155 return r;
Alex Deucherfe251e22010-03-24 13:36:43 -04005156
Christian Königf2ba57b2013-04-08 12:41:29 +02005157 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5158 if (ring->ring_size) {
Christian König02c9f7f2013-08-13 11:56:51 +02005159 r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
Christian Königf2ba57b2013-04-08 12:41:29 +02005160 UVD_RBC_RB_RPTR, UVD_RBC_RB_WPTR,
Christian König2e1e6da2013-08-13 11:56:52 +02005161 RADEON_CP_PACKET2);
Christian Königf2ba57b2013-04-08 12:41:29 +02005162 if (!r)
Christian Könige409b122013-08-13 11:56:53 +02005163 r = uvd_v1_0_init(rdev);
Christian Königf2ba57b2013-04-08 12:41:29 +02005164
5165 if (r)
5166 DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5167 }
5168
Christian König2898c342012-07-05 11:55:34 +02005169 r = radeon_ib_pool_init(rdev);
5170 if (r) {
5171 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
Jerome Glisseb15ba512011-11-15 11:48:34 -05005172 return r;
Christian König2898c342012-07-05 11:55:34 +02005173 }
Jerome Glisseb15ba512011-11-15 11:48:34 -05005174
Rafał Miłecki69d2ae52011-12-07 23:32:24 +01005175 r = r600_audio_init(rdev);
5176 if (r) {
5177 DRM_ERROR("radeon: audio init failed\n");
Jerome Glisseb15ba512011-11-15 11:48:34 -05005178 return r;
5179 }
5180
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005181 return 0;
5182}
5183
5184int evergreen_resume(struct radeon_device *rdev)
5185{
5186 int r;
5187
Alex Deucher86f5c9e2010-12-20 12:35:04 -05005188 /* reset the asic, the gfx blocks are often in a bad state
5189 * after the driver is unloaded or after a resume
5190 */
5191 if (radeon_asic_reset(rdev))
5192 dev_warn(rdev->dev, "GPU reset failed !\n");
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005193 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5194 * posting will perform necessary task to bring back GPU into good
5195 * shape.
5196 */
5197 /* post card */
5198 atom_asic_init(rdev->mode_info.atom_context);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005199
Alex Deucherd4788db2013-02-28 14:40:09 -05005200 /* init golden registers */
5201 evergreen_init_golden_registers(rdev);
5202
Jerome Glisseb15ba512011-11-15 11:48:34 -05005203 rdev->accel_working = true;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005204 r = evergreen_startup(rdev);
5205 if (r) {
Alex Deucher755d8192011-03-02 20:07:34 -05005206 DRM_ERROR("evergreen startup failed on resume\n");
Jerome Glisse6b7746e2012-02-20 17:57:20 -05005207 rdev->accel_working = false;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005208 return r;
5209 }
Alex Deucherfe251e22010-03-24 13:36:43 -04005210
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005211 return r;
5212
5213}
5214
5215int evergreen_suspend(struct radeon_device *rdev)
5216{
Rafał Miłecki69d2ae52011-12-07 23:32:24 +01005217 r600_audio_fini(rdev);
Christian Könige409b122013-08-13 11:56:53 +02005218 uvd_v1_0_fini(rdev);
Christian Königf2ba57b2013-04-08 12:41:29 +02005219 radeon_uvd_suspend(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005220 r700_cp_stop(rdev);
Alex Deucher233d1ad2012-12-04 15:25:59 -05005221 r600_dma_stop(rdev);
Alex Deucher45f9a392010-03-24 13:55:51 -04005222 evergreen_irq_suspend(rdev);
Alex Deucher724c80e2010-08-27 18:25:25 -04005223 radeon_wb_disable(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005224 evergreen_pcie_gart_disable(rdev);
Alex Deucherd7ccd8f2010-09-09 11:33:36 -04005225
5226 return 0;
5227}
5228
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005229/* Plan is to move initialization in that function and use
5230 * helper function so that radeon_device_init pretty much
5231 * do nothing more than calling asic specific function. This
5232 * should also allow to remove a bunch of callback function
5233 * like vram_info.
5234 */
5235int evergreen_init(struct radeon_device *rdev)
5236{
5237 int r;
5238
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005239 /* Read BIOS */
5240 if (!radeon_get_bios(rdev)) {
5241 if (ASIC_IS_AVIVO(rdev))
5242 return -EINVAL;
5243 }
5244 /* Must be an ATOMBIOS */
5245 if (!rdev->is_atom_bios) {
Alex Deucher755d8192011-03-02 20:07:34 -05005246 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005247 return -EINVAL;
5248 }
5249 r = radeon_atombios_init(rdev);
5250 if (r)
5251 return r;
Alex Deucher86f5c9e2010-12-20 12:35:04 -05005252 /* reset the asic, the gfx blocks are often in a bad state
5253 * after the driver is unloaded or after a resume
5254 */
5255 if (radeon_asic_reset(rdev))
5256 dev_warn(rdev->dev, "GPU reset failed !\n");
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005257 /* Post card if necessary */
Alex Deucherfd909c32011-01-11 18:08:59 -05005258 if (!radeon_card_posted(rdev)) {
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005259 if (!rdev->bios) {
5260 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5261 return -EINVAL;
5262 }
5263 DRM_INFO("GPU not posted. posting now...\n");
5264 atom_asic_init(rdev->mode_info.atom_context);
5265 }
Alex Deucherd4788db2013-02-28 14:40:09 -05005266 /* init golden registers */
5267 evergreen_init_golden_registers(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005268 /* Initialize scratch registers */
5269 r600_scratch_init(rdev);
5270 /* Initialize surface registers */
5271 radeon_surface_init(rdev);
5272 /* Initialize clocks */
5273 radeon_get_clock_info(rdev->ddev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005274 /* Fence driver */
5275 r = radeon_fence_driver_init(rdev);
5276 if (r)
5277 return r;
Jerome Glissed594e462010-02-17 21:54:29 +00005278 /* initialize AGP */
5279 if (rdev->flags & RADEON_IS_AGP) {
5280 r = radeon_agp_init(rdev);
5281 if (r)
5282 radeon_agp_disable(rdev);
5283 }
5284 /* initialize memory controller */
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005285 r = evergreen_mc_init(rdev);
5286 if (r)
5287 return r;
5288 /* Memory manager */
5289 r = radeon_bo_init(rdev);
5290 if (r)
5291 return r;
Alex Deucher45f9a392010-03-24 13:55:51 -04005292
Christian Könige32eb502011-10-23 12:56:27 +02005293 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5294 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005295
Alex Deucher233d1ad2012-12-04 15:25:59 -05005296 rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5297 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5298
Christian Königf2ba57b2013-04-08 12:41:29 +02005299 r = radeon_uvd_init(rdev);
5300 if (!r) {
5301 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5302 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5303 4096);
5304 }
5305
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005306 rdev->ih.ring_obj = NULL;
5307 r600_ih_ring_init(rdev, 64 * 1024);
5308
5309 r = r600_pcie_gart_init(rdev);
5310 if (r)
5311 return r;
Alex Deucher0fcdb612010-03-24 13:20:41 -04005312
Alex Deucher148a03b2010-06-03 19:00:03 -04005313 rdev->accel_working = true;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005314 r = evergreen_startup(rdev);
5315 if (r) {
Alex Deucherfe251e22010-03-24 13:36:43 -04005316 dev_err(rdev->dev, "disabling GPU acceleration\n");
5317 r700_cp_fini(rdev);
Alex Deucher233d1ad2012-12-04 15:25:59 -05005318 r600_dma_fini(rdev);
Alex Deucherfe251e22010-03-24 13:36:43 -04005319 r600_irq_fini(rdev);
Alex Deucher2948f5e2013-04-12 13:52:52 -04005320 if (rdev->flags & RADEON_IS_IGP)
5321 sumo_rlc_fini(rdev);
Alex Deucher724c80e2010-08-27 18:25:25 -04005322 radeon_wb_fini(rdev);
Christian König2898c342012-07-05 11:55:34 +02005323 radeon_ib_pool_fini(rdev);
Alex Deucherfe251e22010-03-24 13:36:43 -04005324 radeon_irq_kms_fini(rdev);
Alex Deucher0fcdb612010-03-24 13:20:41 -04005325 evergreen_pcie_gart_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005326 rdev->accel_working = false;
5327 }
Alex Deucher77e00f22011-12-21 11:58:17 -05005328
5329 /* Don't start up if the MC ucode is missing on BTC parts.
5330 * The default clocks and voltages before the MC ucode
5331 * is loaded are not suffient for advanced operations.
5332 */
5333 if (ASIC_IS_DCE5(rdev)) {
5334 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5335 DRM_ERROR("radeon: MC ucode required for NI+.\n");
5336 return -EINVAL;
5337 }
5338 }
5339
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005340 return 0;
5341}
5342
5343void evergreen_fini(struct radeon_device *rdev)
5344{
Rafał Miłecki69d2ae52011-12-07 23:32:24 +01005345 r600_audio_fini(rdev);
Alex Deucher45f9a392010-03-24 13:55:51 -04005346 r700_cp_fini(rdev);
Alex Deucher233d1ad2012-12-04 15:25:59 -05005347 r600_dma_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005348 r600_irq_fini(rdev);
Alex Deucher2948f5e2013-04-12 13:52:52 -04005349 if (rdev->flags & RADEON_IS_IGP)
5350 sumo_rlc_fini(rdev);
Alex Deucher724c80e2010-08-27 18:25:25 -04005351 radeon_wb_fini(rdev);
Christian König2898c342012-07-05 11:55:34 +02005352 radeon_ib_pool_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005353 radeon_irq_kms_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005354 evergreen_pcie_gart_fini(rdev);
Christian Könige409b122013-08-13 11:56:53 +02005355 uvd_v1_0_fini(rdev);
Christian Königf2ba57b2013-04-08 12:41:29 +02005356 radeon_uvd_fini(rdev);
Alex Deucher16cdf042011-10-28 10:30:02 -04005357 r600_vram_scratch_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005358 radeon_gem_fini(rdev);
5359 radeon_fence_driver_fini(rdev);
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005360 radeon_agp_fini(rdev);
5361 radeon_bo_fini(rdev);
5362 radeon_atombios_fini(rdev);
5363 kfree(rdev->bios);
5364 rdev->bios = NULL;
Alex Deucherbcc1c2a2010-01-12 17:54:34 -05005365}
Alex Deucher9e46a482011-01-06 18:49:35 -05005366
Ilija Hadzicb07759b2011-09-20 10:22:58 -04005367void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
Alex Deucher9e46a482011-01-06 18:49:35 -05005368{
Kleber Sacilotto de Souza7e0e4192013-05-03 19:43:13 -03005369 u32 link_width_cntl, speed_cntl;
Alex Deucher9e46a482011-01-06 18:49:35 -05005370
Alex Deucherd42dd572011-01-12 20:05:11 -05005371 if (radeon_pcie_gen2 == 0)
5372 return;
5373
Alex Deucher9e46a482011-01-06 18:49:35 -05005374 if (rdev->flags & RADEON_IS_IGP)
5375 return;
5376
5377 if (!(rdev->flags & RADEON_IS_PCIE))
5378 return;
5379
5380 /* x2 cards have a special sequence */
5381 if (ASIC_IS_X2(rdev))
5382 return;
5383
Kleber Sacilotto de Souza7e0e4192013-05-03 19:43:13 -03005384 if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5385 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
Dave Airlie197bbb32012-06-27 08:35:54 +01005386 return;
5387
Alex Deucher492d2b62012-10-25 16:06:59 -04005388 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
Alex Deucher3691fee2012-10-08 17:46:27 -04005389 if (speed_cntl & LC_CURRENT_DATA_RATE) {
5390 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5391 return;
5392 }
5393
Dave Airlie197bbb32012-06-27 08:35:54 +01005394 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5395
Alex Deucher9e46a482011-01-06 18:49:35 -05005396 if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5397 (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5398
Alex Deucher492d2b62012-10-25 16:06:59 -04005399 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
Alex Deucher9e46a482011-01-06 18:49:35 -05005400 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
Alex Deucher492d2b62012-10-25 16:06:59 -04005401 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
Alex Deucher9e46a482011-01-06 18:49:35 -05005402
Alex Deucher492d2b62012-10-25 16:06:59 -04005403 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
Alex Deucher9e46a482011-01-06 18:49:35 -05005404 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
Alex Deucher492d2b62012-10-25 16:06:59 -04005405 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
Alex Deucher9e46a482011-01-06 18:49:35 -05005406
Alex Deucher492d2b62012-10-25 16:06:59 -04005407 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
Alex Deucher9e46a482011-01-06 18:49:35 -05005408 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
Alex Deucher492d2b62012-10-25 16:06:59 -04005409 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
Alex Deucher9e46a482011-01-06 18:49:35 -05005410
Alex Deucher492d2b62012-10-25 16:06:59 -04005411 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
Alex Deucher9e46a482011-01-06 18:49:35 -05005412 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
Alex Deucher492d2b62012-10-25 16:06:59 -04005413 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
Alex Deucher9e46a482011-01-06 18:49:35 -05005414
Alex Deucher492d2b62012-10-25 16:06:59 -04005415 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
Alex Deucher9e46a482011-01-06 18:49:35 -05005416 speed_cntl |= LC_GEN2_EN_STRAP;
Alex Deucher492d2b62012-10-25 16:06:59 -04005417 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
Alex Deucher9e46a482011-01-06 18:49:35 -05005418
5419 } else {
Alex Deucher492d2b62012-10-25 16:06:59 -04005420 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
Alex Deucher9e46a482011-01-06 18:49:35 -05005421 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5422 if (1)
5423 link_width_cntl |= LC_UPCONFIGURE_DIS;
5424 else
5425 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
Alex Deucher492d2b62012-10-25 16:06:59 -04005426 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
Alex Deucher9e46a482011-01-06 18:49:35 -05005427 }
5428}
Alex Deucherf52382d2013-02-15 11:02:50 -05005429
5430void evergreen_program_aspm(struct radeon_device *rdev)
5431{
5432 u32 data, orig;
5433 u32 pcie_lc_cntl, pcie_lc_cntl_old;
5434 bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5435 /* fusion_platform = true
5436 * if the system is a fusion system
5437 * (APU or DGPU in a fusion system).
5438 * todo: check if the system is a fusion platform.
5439 */
5440 bool fusion_platform = false;
5441
Alex Deucher1294d4a2013-07-16 15:58:50 -04005442 if (radeon_aspm == 0)
5443 return;
5444
Alex Deucherf52382d2013-02-15 11:02:50 -05005445 if (!(rdev->flags & RADEON_IS_PCIE))
5446 return;
5447
5448 switch (rdev->family) {
5449 case CHIP_CYPRESS:
5450 case CHIP_HEMLOCK:
5451 case CHIP_JUNIPER:
5452 case CHIP_REDWOOD:
5453 case CHIP_CEDAR:
5454 case CHIP_SUMO:
5455 case CHIP_SUMO2:
5456 case CHIP_PALM:
5457 case CHIP_ARUBA:
5458 disable_l0s = true;
5459 break;
5460 default:
5461 disable_l0s = false;
5462 break;
5463 }
5464
5465 if (rdev->flags & RADEON_IS_IGP)
5466 fusion_platform = true; /* XXX also dGPUs in a fusion system */
5467
5468 data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5469 if (fusion_platform)
5470 data &= ~MULTI_PIF;
5471 else
5472 data |= MULTI_PIF;
5473 if (data != orig)
5474 WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5475
5476 data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5477 if (fusion_platform)
5478 data &= ~MULTI_PIF;
5479 else
5480 data |= MULTI_PIF;
5481 if (data != orig)
5482 WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5483
5484 pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5485 pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5486 if (!disable_l0s) {
5487 if (rdev->family >= CHIP_BARTS)
5488 pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5489 else
5490 pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5491 }
5492
5493 if (!disable_l1) {
5494 if (rdev->family >= CHIP_BARTS)
5495 pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5496 else
5497 pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5498
5499 if (!disable_plloff_in_l1) {
5500 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5501 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5502 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5503 if (data != orig)
5504 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5505
5506 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5507 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5508 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5509 if (data != orig)
5510 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5511
5512 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5513 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5514 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5515 if (data != orig)
5516 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5517
5518 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5519 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5520 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5521 if (data != orig)
5522 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5523
5524 if (rdev->family >= CHIP_BARTS) {
5525 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5526 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5527 data |= PLL_RAMP_UP_TIME_0(4);
5528 if (data != orig)
5529 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5530
5531 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5532 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5533 data |= PLL_RAMP_UP_TIME_1(4);
5534 if (data != orig)
5535 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5536
5537 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5538 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5539 data |= PLL_RAMP_UP_TIME_0(4);
5540 if (data != orig)
5541 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5542
5543 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5544 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5545 data |= PLL_RAMP_UP_TIME_1(4);
5546 if (data != orig)
5547 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5548 }
5549
5550 data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5551 data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5552 data |= LC_DYN_LANES_PWR_STATE(3);
5553 if (data != orig)
5554 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5555
5556 if (rdev->family >= CHIP_BARTS) {
5557 data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5558 data &= ~LS2_EXIT_TIME_MASK;
5559 data |= LS2_EXIT_TIME(1);
5560 if (data != orig)
5561 WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5562
5563 data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5564 data &= ~LS2_EXIT_TIME_MASK;
5565 data |= LS2_EXIT_TIME(1);
5566 if (data != orig)
5567 WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5568 }
5569 }
5570 }
5571
5572 /* evergreen parts only */
5573 if (rdev->family < CHIP_BARTS)
5574 pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5575
5576 if (pcie_lc_cntl != pcie_lc_cntl_old)
5577 WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
5578}