blob: e1d1eb9f41e9c82198a5491353f168427eaf6490 [file] [log] [blame]
Jordan Crousef7597bf2012-01-03 08:43:34 -07001/* Copyright (c) 2008-2012, Code Aurora Forum. All rights reserved.
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -07002 *
3 * This program is free software; you can redistribute it and/or modify
4 * it under the terms of the GNU General Public License version 2 and
5 * only version 2 as published by the Free Software Foundation.
6 *
7 * This program is distributed in the hope that it will be useful,
8 * but WITHOUT ANY WARRANTY; without even the implied warranty of
9 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 * GNU General Public License for more details.
11 *
12 */
13#ifndef __ADRENO_H
14#define __ADRENO_H
15
16#include "kgsl_device.h"
17#include "adreno_drawctxt.h"
18#include "adreno_ringbuffer.h"
Shubhraprakash Dasc6e21012012-05-11 17:24:51 -060019#include "kgsl_iommu.h"
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070020
21#define DEVICE_3D_NAME "kgsl-3d"
22#define DEVICE_3D0_NAME "kgsl-3d0"
23
24#define ADRENO_DEVICE(device) \
25 KGSL_CONTAINER_OF(device, struct adreno_device, dev)
26
27/* Flags to control command packet settings */
Jordan Crousee0ea7622012-01-24 09:32:04 -070028#define KGSL_CMD_FLAGS_NONE 0x00000000
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070029#define KGSL_CMD_FLAGS_PMODE 0x00000001
Zhoulu Luo552905e2012-06-21 15:21:52 -070030#define KGSL_CMD_FLAGS_NO_TS_CMP 0x00000002
31#define KGSL_CMD_FLAGS_NOT_KERNEL_CMD 0x00000004
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070032
33/* Command identifiers */
Shubhraprakash Dasd23ff4b2012-04-05 16:55:54 -060034#define KGSL_CONTEXT_TO_MEM_IDENTIFIER 0x2EADBEEF
35#define KGSL_CMD_IDENTIFIER 0x2EEDFACE
36#define KGSL_START_OF_IB_IDENTIFIER 0x2EADEABE
37#define KGSL_END_OF_IB_IDENTIFIER 0x2ABEDEAD
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070038
39#ifdef CONFIG_MSM_SCM
40#define ADRENO_DEFAULT_PWRSCALE_POLICY (&kgsl_pwrscale_policy_tz)
Lynus Vaz31754cb2012-02-22 18:07:02 +053041#elif defined CONFIG_MSM_SLEEP_STATS_DEVICE
42#define ADRENO_DEFAULT_PWRSCALE_POLICY (&kgsl_pwrscale_policy_idlestats)
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070043#else
44#define ADRENO_DEFAULT_PWRSCALE_POLICY NULL
45#endif
46
Jordan Crousec6b3a992012-02-04 10:23:51 -070047#define ADRENO_ISTORE_START 0x5000 /* Istore offset */
Jeremy Gebbenddf6b572011-09-09 13:39:49 -070048
Shubhraprakash Das4624b552012-06-01 14:08:03 -060049#define ADRENO_NUM_CTX_SWITCH_ALLOWED_BEFORE_DRAW 50
50
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070051enum adreno_gpurev {
52 ADRENO_REV_UNKNOWN = 0,
53 ADRENO_REV_A200 = 200,
Ranjhith Kalisamy938e00f2012-02-17 14:39:47 +053054 ADRENO_REV_A203 = 203,
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070055 ADRENO_REV_A205 = 205,
56 ADRENO_REV_A220 = 220,
57 ADRENO_REV_A225 = 225,
Sudhakara Rao Tentu79853832012-03-06 15:52:38 +053058 ADRENO_REV_A305 = 305,
Jordan Crouseb4d31bd2012-02-01 22:11:12 -070059 ADRENO_REV_A320 = 320,
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070060};
61
Jordan Crousea78c9172011-07-11 13:14:09 -060062struct adreno_gpudev;
63
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070064struct adreno_device {
65 struct kgsl_device dev; /* Must be first field in this struct */
66 unsigned int chip_id;
67 enum adreno_gpurev gpurev;
Jordan Crouse7501d452012-04-19 08:58:44 -060068 unsigned long gmem_base;
69 unsigned int gmem_size;
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070070 struct adreno_context *drawctxt_active;
Jordan Crouse505df9c2011-07-28 08:37:59 -060071 const char *pfp_fwfile;
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070072 unsigned int *pfp_fw;
73 size_t pfp_fw_size;
Jordan Crouse505df9c2011-07-28 08:37:59 -060074 const char *pm4_fwfile;
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070075 unsigned int *pm4_fw;
76 size_t pm4_fw_size;
77 struct adreno_ringbuffer ringbuffer;
78 unsigned int mharb;
Jordan Crousea78c9172011-07-11 13:14:09 -060079 struct adreno_gpudev *gpudev;
Ranjhith Kalisamy823c1482011-09-05 20:31:07 +053080 unsigned int wait_timeout;
Jeremy Gebbenddf6b572011-09-09 13:39:49 -070081 unsigned int istore_size;
82 unsigned int pix_shader_start;
Jordan Crousec6b3a992012-02-04 10:23:51 -070083 unsigned int instruction_size;
Jeremy Gebbend0ab6ad2012-04-06 11:13:35 -060084 unsigned int ib_check_level;
Tarun Karra3335f142012-06-19 14:11:48 -070085 unsigned int fast_hang_detect;
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -070086};
87
Jordan Crousea78c9172011-07-11 13:14:09 -060088struct adreno_gpudev {
Jordan Crouseb4d31bd2012-02-01 22:11:12 -070089 /*
90 * These registers are in a different location on A3XX, so define
91 * them in the structure and use them as variables.
92 */
93 unsigned int reg_rbbm_status;
94 unsigned int reg_cp_pfp_ucode_data;
95 unsigned int reg_cp_pfp_ucode_addr;
Shubhraprakash Das4624b552012-06-01 14:08:03 -060096 /* keeps track of when we need to execute the draw workaround code */
97 int ctx_switches_since_last_draw;
Jordan Crouseb4d31bd2012-02-01 22:11:12 -070098
99 /* GPU specific function hooks */
Vijay Krishnamoorthybef66932012-01-24 09:32:05 -0700100 int (*ctxt_create)(struct adreno_device *, struct adreno_context *);
Jordan Crousea78c9172011-07-11 13:14:09 -0600101 void (*ctxt_save)(struct adreno_device *, struct adreno_context *);
102 void (*ctxt_restore)(struct adreno_device *, struct adreno_context *);
Shubhraprakash Dasb2abc452012-06-08 16:33:03 -0600103 void (*ctxt_draw_workaround)(struct adreno_device *,
104 struct adreno_context *);
Jordan Crousea78c9172011-07-11 13:14:09 -0600105 irqreturn_t (*irq_handler)(struct adreno_device *);
106 void (*irq_control)(struct adreno_device *, int);
Jordan Crouse156cfbc2012-01-24 09:32:04 -0700107 void * (*snapshot)(struct adreno_device *, void *, int *, int);
Jordan Crouseb4d31bd2012-02-01 22:11:12 -0700108 void (*rb_init)(struct adreno_device *, struct adreno_ringbuffer *);
109 void (*start)(struct adreno_device *);
110 unsigned int (*busy_cycles)(struct adreno_device *);
Jordan Crousea78c9172011-07-11 13:14:09 -0600111};
112
113extern struct adreno_gpudev adreno_a2xx_gpudev;
Jordan Crouseb4d31bd2012-02-01 22:11:12 -0700114extern struct adreno_gpudev adreno_a3xx_gpudev;
Jordan Crousea78c9172011-07-11 13:14:09 -0600115
Jordan Crousef7597bf2012-01-03 08:43:34 -0700116/* A2XX register sets defined in adreno_a2xx.c */
117extern const unsigned int a200_registers[];
118extern const unsigned int a220_registers[];
Jeremy Gebben6be78d12012-03-07 16:02:47 -0700119extern const unsigned int a225_registers[];
Jordan Crousef7597bf2012-01-03 08:43:34 -0700120extern const unsigned int a200_registers_count;
121extern const unsigned int a220_registers_count;
Jeremy Gebben6be78d12012-03-07 16:02:47 -0700122extern const unsigned int a225_registers_count;
Jordan Crousef7597bf2012-01-03 08:43:34 -0700123
Jordan Crouse0c2761a2012-02-01 22:11:12 -0700124/* A3XX register set defined in adreno_a3xx.c */
125extern const unsigned int a3xx_registers[];
126extern const unsigned int a3xx_registers_count;
127
Tarun Karra3335f142012-06-19 14:11:48 -0700128extern unsigned int hang_detect_regs[];
129extern const unsigned int hang_detect_regs_count;
130
131
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700132int adreno_idle(struct kgsl_device *device, unsigned int timeout);
133void adreno_regread(struct kgsl_device *device, unsigned int offsetwords,
134 unsigned int *value);
135void adreno_regwrite(struct kgsl_device *device, unsigned int offsetwords,
136 unsigned int value);
137
Harsh Vardhan Dwivedi8cb835b2012-03-29 17:23:11 -0600138struct kgsl_memdesc *adreno_find_region(struct kgsl_device *device,
Jeremy Gebben16e80fa2011-11-30 15:56:29 -0700139 unsigned int pt_base,
140 unsigned int gpuaddr,
141 unsigned int size);
142
143uint8_t *adreno_convertaddr(struct kgsl_device *device,
144 unsigned int pt_base, unsigned int gpuaddr, unsigned int size);
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700145
Jordan Crouse233b2092012-04-18 09:31:09 -0600146struct kgsl_memdesc *adreno_find_ctxtmem(struct kgsl_device *device,
147 unsigned int pt_base, unsigned int gpuaddr, unsigned int size);
148
Jordan Crouse156cfbc2012-01-24 09:32:04 -0700149void *adreno_snapshot(struct kgsl_device *device, void *snapshot, int *remain,
150 int hang);
151
Shubhraprakash Dasb2abc452012-06-08 16:33:03 -0600152int adreno_dump_and_recover(struct kgsl_device *device);
153
Tarun Karra3335f142012-06-19 14:11:48 -0700154unsigned int adreno_hang_detect(struct kgsl_device *device,
155 unsigned int *prev_reg_val);
156
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700157static inline int adreno_is_a200(struct adreno_device *adreno_dev)
158{
159 return (adreno_dev->gpurev == ADRENO_REV_A200);
160}
161
Ranjhith Kalisamy938e00f2012-02-17 14:39:47 +0530162static inline int adreno_is_a203(struct adreno_device *adreno_dev)
163{
164 return (adreno_dev->gpurev == ADRENO_REV_A203);
165}
166
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700167static inline int adreno_is_a205(struct adreno_device *adreno_dev)
168{
Ranjhith Kalisamy938e00f2012-02-17 14:39:47 +0530169 return (adreno_dev->gpurev == ADRENO_REV_A205);
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700170}
171
172static inline int adreno_is_a20x(struct adreno_device *adreno_dev)
173{
Ranjhith Kalisamy938e00f2012-02-17 14:39:47 +0530174 return (adreno_dev->gpurev <= 209);
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700175}
176
177static inline int adreno_is_a220(struct adreno_device *adreno_dev)
178{
179 return (adreno_dev->gpurev == ADRENO_REV_A220);
180}
181
182static inline int adreno_is_a225(struct adreno_device *adreno_dev)
183{
184 return (adreno_dev->gpurev == ADRENO_REV_A225);
185}
186
187static inline int adreno_is_a22x(struct adreno_device *adreno_dev)
188{
189 return (adreno_dev->gpurev == ADRENO_REV_A220 ||
190 adreno_dev->gpurev == ADRENO_REV_A225);
191}
192
Jordan Crouse196c45b2011-07-28 08:37:57 -0600193static inline int adreno_is_a2xx(struct adreno_device *adreno_dev)
194{
Jordan Crouseb4d31bd2012-02-01 22:11:12 -0700195 return (adreno_dev->gpurev <= 299);
196}
197
198static inline int adreno_is_a3xx(struct adreno_device *adreno_dev)
199{
200 return (adreno_dev->gpurev >= 300);
Jordan Crouse196c45b2011-07-28 08:37:57 -0600201}
202
Kevin Matlage48d0e2e2012-04-26 10:52:36 -0600203static inline int adreno_is_a305(struct adreno_device *adreno_dev)
204{
205 return (adreno_dev->gpurev == ADRENO_REV_A305);
206}
207
208static inline int adreno_is_a320(struct adreno_device *adreno_dev)
209{
210 return (adreno_dev->gpurev == ADRENO_REV_A320);
211}
212
Jordan Crousee6b77622012-04-05 16:55:54 -0600213static inline int adreno_rb_ctxtswitch(unsigned int *cmd)
214{
215 return (cmd[0] == cp_nop_packet(1) &&
216 cmd[1] == KGSL_CONTEXT_TO_MEM_IDENTIFIER);
217}
218
Jeremy Gebbenddf6b572011-09-09 13:39:49 -0700219/**
220 * adreno_encode_istore_size - encode istore size in CP format
221 * @adreno_dev - The 3D device.
222 *
223 * Encode the istore size into the format expected that the
224 * CP_SET_SHADER_BASES and CP_ME_INIT commands:
225 * bits 31:29 - istore size as encoded by this function
226 * bits 27:16 - vertex shader start offset in instructions
227 * bits 11:0 - pixel shader start offset in instructions.
228 */
229static inline int adreno_encode_istore_size(struct adreno_device *adreno_dev)
230{
231 unsigned int size;
232 /* in a225 the CP microcode multiplies the encoded
233 * value by 3 while decoding.
234 */
235 if (adreno_is_a225(adreno_dev))
236 size = adreno_dev->istore_size/3;
237 else
238 size = adreno_dev->istore_size;
239
240 return (ilog2(size) - 5) << 29;
241}
Jordan Crouse196c45b2011-07-28 08:37:57 -0600242
Shubhraprakash Dasc6e21012012-05-11 17:24:51 -0600243static inline int __adreno_add_idle_indirect_cmds(unsigned int *cmds,
244 unsigned int nop_gpuaddr)
245{
246 /* Adding an indirect buffer ensures that the prefetch stalls until
247 * the commands in indirect buffer have completed. We need to stall
248 * prefetch with a nop indirect buffer when updating pagetables
249 * because it provides stabler synchronization */
250 *cmds++ = CP_HDR_INDIRECT_BUFFER_PFD;
251 *cmds++ = nop_gpuaddr;
252 *cmds++ = 2;
253 *cmds++ = cp_type3_packet(CP_WAIT_FOR_IDLE, 1);
254 *cmds++ = 0x00000000;
255 return 5;
256}
257
258static inline int adreno_add_change_mh_phys_limit_cmds(unsigned int *cmds,
259 unsigned int new_phys_limit,
260 unsigned int nop_gpuaddr)
261{
262 unsigned int *start = cmds;
263
Sunil Josephcf21e442012-07-10 15:23:13 +0530264 cmds += __adreno_add_idle_indirect_cmds(cmds, nop_gpuaddr);
Shubhraprakash Dasc6e21012012-05-11 17:24:51 -0600265 *cmds++ = cp_type0_packet(MH_MMU_MPU_END, 1);
266 *cmds++ = new_phys_limit;
267 cmds += __adreno_add_idle_indirect_cmds(cmds, nop_gpuaddr);
268 return cmds - start;
269}
270
271static inline int adreno_add_bank_change_cmds(unsigned int *cmds,
272 int cur_ctx_bank,
273 unsigned int nop_gpuaddr)
274{
275 unsigned int *start = cmds;
276
Sunil Josephcf21e442012-07-10 15:23:13 +0530277 cmds += __adreno_add_idle_indirect_cmds(cmds, nop_gpuaddr);
Shubhraprakash Dasc6e21012012-05-11 17:24:51 -0600278 *cmds++ = cp_type0_packet(REG_CP_STATE_DEBUG_INDEX, 1);
279 *cmds++ = (cur_ctx_bank ? 0 : 0x20);
280 cmds += __adreno_add_idle_indirect_cmds(cmds, nop_gpuaddr);
281 return cmds - start;
282}
283
284/*
285 * adreno_read_cmds - Add pm4 packets to perform read
286 * @device - Pointer to device structure
287 * @cmds - Pointer to memory where read commands need to be added
288 * @addr - gpu address of the read
289 * @val - The GPU will wait until the data at address addr becomes
290 * equal to value
291 */
292static inline int adreno_add_read_cmds(struct kgsl_device *device,
293 unsigned int *cmds, unsigned int addr,
294 unsigned int val, unsigned int nop_gpuaddr)
295{
296 unsigned int *start = cmds;
297
298 *cmds++ = cp_type3_packet(CP_WAIT_REG_MEM, 5);
299 /* MEM SPACE = memory, FUNCTION = equals */
300 *cmds++ = 0x13;
301 *cmds++ = addr;
302 *cmds++ = val;
303 *cmds++ = 0xFFFFFFFF;
304 *cmds++ = 0xFFFFFFFF;
305 cmds += __adreno_add_idle_indirect_cmds(cmds, nop_gpuaddr);
306 return cmds - start;
307}
308
Bryan Huntsman3f2bc4d2011-08-16 17:27:22 -0700309#endif /*__ADRENO_H */