Duy Truong | e833aca | 2013-02-12 13:35:08 -0800 | [diff] [blame] | 1 | /* Copyright (c) 2008-2012, The Linux Foundation. All rights reserved. |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 2 | * |
| 3 | * This program is free software; you can redistribute it and/or modify |
| 4 | * it under the terms of the GNU General Public License version 2 and |
| 5 | * only version 2 as published by the Free Software Foundation. |
| 6 | * |
| 7 | * This program is distributed in the hope that it will be useful, |
| 8 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 9 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 10 | * GNU General Public License for more details. |
| 11 | * |
| 12 | */ |
| 13 | #ifndef __ADRENO_H |
| 14 | #define __ADRENO_H |
| 15 | |
| 16 | #include "kgsl_device.h" |
| 17 | #include "adreno_drawctxt.h" |
| 18 | #include "adreno_ringbuffer.h" |
Shubhraprakash Das | c6e2101 | 2012-05-11 17:24:51 -0600 | [diff] [blame] | 19 | #include "kgsl_iommu.h" |
liu zhong | 7dfa2a3 | 2012-04-27 19:11:01 -0700 | [diff] [blame] | 20 | #include <mach/ocmem.h> |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 21 | |
| 22 | #define DEVICE_3D_NAME "kgsl-3d" |
| 23 | #define DEVICE_3D0_NAME "kgsl-3d0" |
| 24 | |
| 25 | #define ADRENO_DEVICE(device) \ |
| 26 | KGSL_CONTAINER_OF(device, struct adreno_device, dev) |
| 27 | |
Jordan Crouse | 4815e9f | 2012-07-09 15:36:37 -0600 | [diff] [blame] | 28 | #define ADRENO_CHIPID_CORE(_id) (((_id) >> 24) & 0xFF) |
| 29 | #define ADRENO_CHIPID_MAJOR(_id) (((_id) >> 16) & 0xFF) |
| 30 | #define ADRENO_CHIPID_MINOR(_id) (((_id) >> 8) & 0xFF) |
| 31 | #define ADRENO_CHIPID_PATCH(_id) ((_id) & 0xFF) |
| 32 | |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 33 | /* Flags to control command packet settings */ |
Jordan Crouse | e0ea762 | 2012-01-24 09:32:04 -0700 | [diff] [blame] | 34 | #define KGSL_CMD_FLAGS_NONE 0x00000000 |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 35 | #define KGSL_CMD_FLAGS_PMODE 0x00000001 |
Vijay Krishnamoorthy | e80c346 | 2012-08-27 14:07:32 -0700 | [diff] [blame] | 36 | #define KGSL_CMD_FLAGS_INTERNAL_ISSUE 0x00000002 |
Tarun Karra | deeecc0 | 2013-01-21 23:42:17 -0800 | [diff] [blame] | 37 | #define KGSL_CMD_FLAGS_EOF 0x00000100 |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 38 | |
| 39 | /* Command identifiers */ |
Shubhraprakash Das | d23ff4b | 2012-04-05 16:55:54 -0600 | [diff] [blame] | 40 | #define KGSL_CONTEXT_TO_MEM_IDENTIFIER 0x2EADBEEF |
| 41 | #define KGSL_CMD_IDENTIFIER 0x2EEDFACE |
Carter Cooper | 728bd15 | 2013-05-28 17:00:06 -0600 | [diff] [blame] | 42 | #define KGSL_CMD_INTERNAL_IDENTIFIER 0x2EEDD00D |
Shubhraprakash Das | d23ff4b | 2012-04-05 16:55:54 -0600 | [diff] [blame] | 43 | #define KGSL_START_OF_IB_IDENTIFIER 0x2EADEABE |
| 44 | #define KGSL_END_OF_IB_IDENTIFIER 0x2ABEDEAD |
Tarun Karra | deeecc0 | 2013-01-21 23:42:17 -0800 | [diff] [blame] | 45 | #define KGSL_END_OF_FRAME_IDENTIFIER 0x2E0F2E0F |
| 46 | #define KGSL_NOP_IB_IDENTIFIER 0x20F20F20 |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 47 | |
| 48 | #ifdef CONFIG_MSM_SCM |
| 49 | #define ADRENO_DEFAULT_PWRSCALE_POLICY (&kgsl_pwrscale_policy_tz) |
Lynus Vaz | 31754cb | 2012-02-22 18:07:02 +0530 | [diff] [blame] | 50 | #elif defined CONFIG_MSM_SLEEP_STATS_DEVICE |
| 51 | #define ADRENO_DEFAULT_PWRSCALE_POLICY (&kgsl_pwrscale_policy_idlestats) |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 52 | #else |
| 53 | #define ADRENO_DEFAULT_PWRSCALE_POLICY NULL |
| 54 | #endif |
| 55 | |
Harsh Vardhan Dwivedi | 715fb83 | 2012-05-18 00:24:18 -0600 | [diff] [blame] | 56 | void adreno_debugfs_init(struct kgsl_device *device); |
| 57 | |
Jordan Crouse | c6b3a99 | 2012-02-04 10:23:51 -0700 | [diff] [blame] | 58 | #define ADRENO_ISTORE_START 0x5000 /* Istore offset */ |
Jeremy Gebben | ddf6b57 | 2011-09-09 13:39:49 -0700 | [diff] [blame] | 59 | |
Shubhraprakash Das | 4624b55 | 2012-06-01 14:08:03 -0600 | [diff] [blame] | 60 | #define ADRENO_NUM_CTX_SWITCH_ALLOWED_BEFORE_DRAW 50 |
| 61 | |
Jordan Crouse | a29a2e0 | 2012-08-14 09:09:23 -0600 | [diff] [blame] | 62 | /* One cannot wait forever for the core to idle, so set an upper limit to the |
| 63 | * amount of time to wait for the core to go idle |
| 64 | */ |
| 65 | |
| 66 | #define ADRENO_IDLE_TIMEOUT (20 * 1000) |
| 67 | |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 68 | enum adreno_gpurev { |
| 69 | ADRENO_REV_UNKNOWN = 0, |
| 70 | ADRENO_REV_A200 = 200, |
Ranjhith Kalisamy | 938e00f | 2012-02-17 14:39:47 +0530 | [diff] [blame] | 71 | ADRENO_REV_A203 = 203, |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 72 | ADRENO_REV_A205 = 205, |
| 73 | ADRENO_REV_A220 = 220, |
| 74 | ADRENO_REV_A225 = 225, |
Sudhakara Rao Tentu | 7985383 | 2012-03-06 15:52:38 +0530 | [diff] [blame] | 75 | ADRENO_REV_A305 = 305, |
Jordan Crouse | b4d31bd | 2012-02-01 22:11:12 -0700 | [diff] [blame] | 76 | ADRENO_REV_A320 = 320, |
liu zhong | fd42e62 | 2012-05-01 19:18:30 -0700 | [diff] [blame] | 77 | ADRENO_REV_A330 = 330, |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 78 | }; |
| 79 | |
Jordan Crouse | a78c917 | 2011-07-11 13:14:09 -0600 | [diff] [blame] | 80 | struct adreno_gpudev; |
| 81 | |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 82 | struct adreno_device { |
| 83 | struct kgsl_device dev; /* Must be first field in this struct */ |
| 84 | unsigned int chip_id; |
| 85 | enum adreno_gpurev gpurev; |
Jordan Crouse | 7501d45 | 2012-04-19 08:58:44 -0600 | [diff] [blame] | 86 | unsigned long gmem_base; |
| 87 | unsigned int gmem_size; |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 88 | struct adreno_context *drawctxt_active; |
Jordan Crouse | 505df9c | 2011-07-28 08:37:59 -0600 | [diff] [blame] | 89 | const char *pfp_fwfile; |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 90 | unsigned int *pfp_fw; |
| 91 | size_t pfp_fw_size; |
Tarun Karra | 9c07082 | 2012-11-27 16:43:51 -0700 | [diff] [blame] | 92 | unsigned int pfp_fw_version; |
Jordan Crouse | 505df9c | 2011-07-28 08:37:59 -0600 | [diff] [blame] | 93 | const char *pm4_fwfile; |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 94 | unsigned int *pm4_fw; |
| 95 | size_t pm4_fw_size; |
Tarun Karra | 9c07082 | 2012-11-27 16:43:51 -0700 | [diff] [blame] | 96 | unsigned int pm4_fw_version; |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 97 | struct adreno_ringbuffer ringbuffer; |
| 98 | unsigned int mharb; |
Jordan Crouse | a78c917 | 2011-07-11 13:14:09 -0600 | [diff] [blame] | 99 | struct adreno_gpudev *gpudev; |
Ranjhith Kalisamy | 823c148 | 2011-09-05 20:31:07 +0530 | [diff] [blame] | 100 | unsigned int wait_timeout; |
Jeremy Gebben | ddf6b57 | 2011-09-09 13:39:49 -0700 | [diff] [blame] | 101 | unsigned int istore_size; |
| 102 | unsigned int pix_shader_start; |
Jordan Crouse | c6b3a99 | 2012-02-04 10:23:51 -0700 | [diff] [blame] | 103 | unsigned int instruction_size; |
Jeremy Gebben | d0ab6ad | 2012-04-06 11:13:35 -0600 | [diff] [blame] | 104 | unsigned int ib_check_level; |
Tarun Karra | 3335f14 | 2012-06-19 14:11:48 -0700 | [diff] [blame] | 105 | unsigned int fast_hang_detect; |
Tarun Karra | deeecc0 | 2013-01-21 23:42:17 -0800 | [diff] [blame] | 106 | unsigned int ft_policy; |
Tarun Karra | 987e2f7 | 2013-02-14 12:12:22 -0800 | [diff] [blame] | 107 | unsigned int ft_user_control; |
Tarun Karra | 696f89e | 2013-01-27 21:31:40 -0800 | [diff] [blame] | 108 | unsigned int long_ib_detect; |
| 109 | unsigned int long_ib; |
| 110 | unsigned int long_ib_ts; |
Tarun Karra | 99678f8 | 2013-02-13 13:57:25 -0800 | [diff] [blame] | 111 | unsigned int ft_pf_policy; |
Tarun Karra | 9c07082 | 2012-11-27 16:43:51 -0700 | [diff] [blame] | 112 | unsigned int gpulist_index; |
liu zhong | 7dfa2a3 | 2012-04-27 19:11:01 -0700 | [diff] [blame] | 113 | struct ocmem_buf *ocmem_hdl; |
liu zhong | 5af32d9 | 2012-08-29 14:36:36 -0600 | [diff] [blame] | 114 | unsigned int ocmem_base; |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 115 | }; |
| 116 | |
Jordan Crouse | a78c917 | 2011-07-11 13:14:09 -0600 | [diff] [blame] | 117 | struct adreno_gpudev { |
Jordan Crouse | b4d31bd | 2012-02-01 22:11:12 -0700 | [diff] [blame] | 118 | /* |
| 119 | * These registers are in a different location on A3XX, so define |
| 120 | * them in the structure and use them as variables. |
| 121 | */ |
| 122 | unsigned int reg_rbbm_status; |
| 123 | unsigned int reg_cp_pfp_ucode_data; |
| 124 | unsigned int reg_cp_pfp_ucode_addr; |
Shubhraprakash Das | 4624b55 | 2012-06-01 14:08:03 -0600 | [diff] [blame] | 125 | /* keeps track of when we need to execute the draw workaround code */ |
| 126 | int ctx_switches_since_last_draw; |
Jordan Crouse | b4d31bd | 2012-02-01 22:11:12 -0700 | [diff] [blame] | 127 | |
| 128 | /* GPU specific function hooks */ |
Vijay Krishnamoorthy | bef6693 | 2012-01-24 09:32:05 -0700 | [diff] [blame] | 129 | int (*ctxt_create)(struct adreno_device *, struct adreno_context *); |
Jordan Crouse | a78c917 | 2011-07-11 13:14:09 -0600 | [diff] [blame] | 130 | void (*ctxt_save)(struct adreno_device *, struct adreno_context *); |
| 131 | void (*ctxt_restore)(struct adreno_device *, struct adreno_context *); |
Shubhraprakash Das | b2abc45 | 2012-06-08 16:33:03 -0600 | [diff] [blame] | 132 | void (*ctxt_draw_workaround)(struct adreno_device *, |
| 133 | struct adreno_context *); |
Jordan Crouse | a78c917 | 2011-07-11 13:14:09 -0600 | [diff] [blame] | 134 | irqreturn_t (*irq_handler)(struct adreno_device *); |
| 135 | void (*irq_control)(struct adreno_device *, int); |
Jordan Crouse | ab60199 | 2013-03-05 11:18:20 -0700 | [diff] [blame] | 136 | unsigned int (*irq_pending)(struct adreno_device *); |
Jordan Crouse | 156cfbc | 2012-01-24 09:32:04 -0700 | [diff] [blame] | 137 | void * (*snapshot)(struct adreno_device *, void *, int *, int); |
Jordan Crouse | b4d31bd | 2012-02-01 22:11:12 -0700 | [diff] [blame] | 138 | void (*rb_init)(struct adreno_device *, struct adreno_ringbuffer *); |
| 139 | void (*start)(struct adreno_device *); |
| 140 | unsigned int (*busy_cycles)(struct adreno_device *); |
Jordan Crouse | a78c917 | 2011-07-11 13:14:09 -0600 | [diff] [blame] | 141 | }; |
| 142 | |
Shubhraprakash Das | ba6c70b | 2012-05-31 02:53:06 -0600 | [diff] [blame] | 143 | /* |
Tarun Karra | d20d71a | 2013-01-25 15:38:57 -0800 | [diff] [blame] | 144 | * struct adreno_ft_data - Structure that contains all information to |
| 145 | * perform gpu fault tolerance |
Shubhraprakash Das | ba6c70b | 2012-05-31 02:53:06 -0600 | [diff] [blame] | 146 | * @ib1 - IB1 that the GPU was executing when hang happened |
| 147 | * @context_id - Context which caused the hang |
| 148 | * @global_eop - eoptimestamp at time of hang |
| 149 | * @rb_buffer - Buffer that holds the commands from good contexts |
| 150 | * @rb_size - Number of valid dwords in rb_buffer |
| 151 | * @bad_rb_buffer - Buffer that holds commands from the hanging context |
| 152 | * bad_rb_size - Number of valid dwords in bad_rb_buffer |
Tarun Karra | deeecc0 | 2013-01-21 23:42:17 -0800 | [diff] [blame] | 153 | * @good_rb_buffer - Buffer that holds commands from good contexts |
| 154 | * good_rb_size - Number of valid dwords in good_rb_buffer |
Shubhraprakash Das | ba6c70b | 2012-05-31 02:53:06 -0600 | [diff] [blame] | 155 | * @last_valid_ctx_id - The last context from which commands were placed in |
| 156 | * ringbuffer before the GPU hung |
Tarun Karra | d20d71a | 2013-01-25 15:38:57 -0800 | [diff] [blame] | 157 | * @step - Current fault tolerance step being executed |
| 158 | * @err_code - Fault tolerance error code |
Shubhraprakash Das | 2747cf6 | 2012-09-27 23:05:43 -0700 | [diff] [blame] | 159 | * @fault - Indicates whether the hang was caused due to a pagefault |
Shubhraprakash Das | 460cc76 | 2013-01-16 16:57:46 -0800 | [diff] [blame] | 160 | * @start_of_replay_cmds - Offset in ringbuffer from where commands can be |
Tarun Karra | d20d71a | 2013-01-25 15:38:57 -0800 | [diff] [blame] | 161 | * replayed during fault tolerance |
Shubhraprakash Das | 460cc76 | 2013-01-16 16:57:46 -0800 | [diff] [blame] | 162 | * @replay_for_snapshot - Offset in ringbuffer where IB's can be saved for |
| 163 | * replaying with snapshot |
Shubhraprakash Das | ba6c70b | 2012-05-31 02:53:06 -0600 | [diff] [blame] | 164 | */ |
Tarun Karra | d20d71a | 2013-01-25 15:38:57 -0800 | [diff] [blame] | 165 | struct adreno_ft_data { |
Shubhraprakash Das | ba6c70b | 2012-05-31 02:53:06 -0600 | [diff] [blame] | 166 | unsigned int ib1; |
| 167 | unsigned int context_id; |
| 168 | unsigned int global_eop; |
| 169 | unsigned int *rb_buffer; |
| 170 | unsigned int rb_size; |
| 171 | unsigned int *bad_rb_buffer; |
| 172 | unsigned int bad_rb_size; |
Tarun Karra | deeecc0 | 2013-01-21 23:42:17 -0800 | [diff] [blame] | 173 | unsigned int *good_rb_buffer; |
| 174 | unsigned int good_rb_size; |
Shubhraprakash Das | ba6c70b | 2012-05-31 02:53:06 -0600 | [diff] [blame] | 175 | unsigned int last_valid_ctx_id; |
Tarun Karra | 3164fb0 | 2013-02-05 15:38:51 -0800 | [diff] [blame] | 176 | unsigned int status; |
| 177 | unsigned int ft_policy; |
| 178 | unsigned int err_code; |
Shubhraprakash Das | 460cc76 | 2013-01-16 16:57:46 -0800 | [diff] [blame] | 179 | unsigned int start_of_replay_cmds; |
| 180 | unsigned int replay_for_snapshot; |
Shubhraprakash Das | ba6c70b | 2012-05-31 02:53:06 -0600 | [diff] [blame] | 181 | }; |
| 182 | |
Tarun Karra | 59e7954 | 2013-04-10 10:25:25 -0700 | [diff] [blame] | 183 | /* Fault Tolerance policy flags */ |
| 184 | #define KGSL_FT_DISABLE BIT(0) |
| 185 | #define KGSL_FT_REPLAY BIT(1) |
| 186 | #define KGSL_FT_SKIPIB BIT(2) |
| 187 | #define KGSL_FT_SKIPFRAME BIT(3) |
| 188 | #define KGSL_FT_TEMP_DISABLE BIT(4) |
| 189 | #define KGSL_FT_DEFAULT_POLICY (KGSL_FT_REPLAY + KGSL_FT_SKIPIB) |
| 190 | |
| 191 | /* Pagefault policy flags */ |
| 192 | #define KGSL_FT_PAGEFAULT_INT_ENABLE 0x00000001 |
| 193 | #define KGSL_FT_PAGEFAULT_GPUHALT_ENABLE 0x00000002 |
| 194 | #define KGSL_FT_PAGEFAULT_LOG_ONE_PER_PAGE 0x00000004 |
| 195 | #define KGSL_FT_PAGEFAULT_LOG_ONE_PER_INT 0x00000008 |
| 196 | #define KGSL_FT_PAGEFAULT_DEFAULT_POLICY (KGSL_FT_PAGEFAULT_INT_ENABLE + \ |
| 197 | KGSL_FT_PAGEFAULT_GPUHALT_ENABLE) |
| 198 | |
Jordan Crouse | a78c917 | 2011-07-11 13:14:09 -0600 | [diff] [blame] | 199 | extern struct adreno_gpudev adreno_a2xx_gpudev; |
Jordan Crouse | b4d31bd | 2012-02-01 22:11:12 -0700 | [diff] [blame] | 200 | extern struct adreno_gpudev adreno_a3xx_gpudev; |
Jordan Crouse | a78c917 | 2011-07-11 13:14:09 -0600 | [diff] [blame] | 201 | |
Jordan Crouse | f7597bf | 2012-01-03 08:43:34 -0700 | [diff] [blame] | 202 | /* A2XX register sets defined in adreno_a2xx.c */ |
| 203 | extern const unsigned int a200_registers[]; |
| 204 | extern const unsigned int a220_registers[]; |
Jeremy Gebben | 6be78d1 | 2012-03-07 16:02:47 -0700 | [diff] [blame] | 205 | extern const unsigned int a225_registers[]; |
Jordan Crouse | f7597bf | 2012-01-03 08:43:34 -0700 | [diff] [blame] | 206 | extern const unsigned int a200_registers_count; |
| 207 | extern const unsigned int a220_registers_count; |
Jeremy Gebben | 6be78d1 | 2012-03-07 16:02:47 -0700 | [diff] [blame] | 208 | extern const unsigned int a225_registers_count; |
Jordan Crouse | f7597bf | 2012-01-03 08:43:34 -0700 | [diff] [blame] | 209 | |
Jordan Crouse | 0c2761a | 2012-02-01 22:11:12 -0700 | [diff] [blame] | 210 | /* A3XX register set defined in adreno_a3xx.c */ |
| 211 | extern const unsigned int a3xx_registers[]; |
| 212 | extern const unsigned int a3xx_registers_count; |
| 213 | |
Carter Cooper | f294e89 | 2012-11-26 10:45:53 -0700 | [diff] [blame] | 214 | extern const unsigned int a3xx_hlsq_registers[]; |
| 215 | extern const unsigned int a3xx_hlsq_registers_count; |
| 216 | |
Jordan Crouse | 9983925 | 2012-08-14 14:33:42 -0600 | [diff] [blame] | 217 | extern const unsigned int a330_registers[]; |
| 218 | extern const unsigned int a330_registers_count; |
| 219 | |
Tarun Karra | 696f89e | 2013-01-27 21:31:40 -0800 | [diff] [blame] | 220 | extern unsigned int ft_detect_regs[]; |
| 221 | extern const unsigned int ft_detect_regs_count; |
Tarun Karra | 3335f14 | 2012-06-19 14:11:48 -0700 | [diff] [blame] | 222 | |
| 223 | |
Jordan Crouse | a29a2e0 | 2012-08-14 09:09:23 -0600 | [diff] [blame] | 224 | int adreno_idle(struct kgsl_device *device); |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 225 | void adreno_regread(struct kgsl_device *device, unsigned int offsetwords, |
| 226 | unsigned int *value); |
| 227 | void adreno_regwrite(struct kgsl_device *device, unsigned int offsetwords, |
| 228 | unsigned int value); |
| 229 | |
Harsh Vardhan Dwivedi | 715fb83 | 2012-05-18 00:24:18 -0600 | [diff] [blame] | 230 | int adreno_dump(struct kgsl_device *device, int manual); |
| 231 | |
Harsh Vardhan Dwivedi | 8cb835b | 2012-03-29 17:23:11 -0600 | [diff] [blame] | 232 | struct kgsl_memdesc *adreno_find_region(struct kgsl_device *device, |
Jeremy Gebben | 16e80fa | 2011-11-30 15:56:29 -0700 | [diff] [blame] | 233 | unsigned int pt_base, |
| 234 | unsigned int gpuaddr, |
| 235 | unsigned int size); |
| 236 | |
| 237 | uint8_t *adreno_convertaddr(struct kgsl_device *device, |
| 238 | unsigned int pt_base, unsigned int gpuaddr, unsigned int size); |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 239 | |
Jordan Crouse | 233b209 | 2012-04-18 09:31:09 -0600 | [diff] [blame] | 240 | struct kgsl_memdesc *adreno_find_ctxtmem(struct kgsl_device *device, |
| 241 | unsigned int pt_base, unsigned int gpuaddr, unsigned int size); |
| 242 | |
Jordan Crouse | 156cfbc | 2012-01-24 09:32:04 -0700 | [diff] [blame] | 243 | void *adreno_snapshot(struct kgsl_device *device, void *snapshot, int *remain, |
| 244 | int hang); |
| 245 | |
Tarun Karra | d20d71a | 2013-01-25 15:38:57 -0800 | [diff] [blame] | 246 | int adreno_dump_and_exec_ft(struct kgsl_device *device); |
| 247 | |
| 248 | void adreno_dump_rb(struct kgsl_device *device, const void *buf, |
| 249 | size_t len, int start, int size); |
Shubhraprakash Das | b2abc45 | 2012-06-08 16:33:03 -0600 | [diff] [blame] | 250 | |
Tarun Karra | 696f89e | 2013-01-27 21:31:40 -0800 | [diff] [blame] | 251 | unsigned int adreno_ft_detect(struct kgsl_device *device, |
Tarun Karra | 3335f14 | 2012-06-19 14:11:48 -0700 | [diff] [blame] | 252 | unsigned int *prev_reg_val); |
| 253 | |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 254 | static inline int adreno_is_a200(struct adreno_device *adreno_dev) |
| 255 | { |
| 256 | return (adreno_dev->gpurev == ADRENO_REV_A200); |
| 257 | } |
| 258 | |
Ranjhith Kalisamy | 938e00f | 2012-02-17 14:39:47 +0530 | [diff] [blame] | 259 | static inline int adreno_is_a203(struct adreno_device *adreno_dev) |
| 260 | { |
| 261 | return (adreno_dev->gpurev == ADRENO_REV_A203); |
| 262 | } |
| 263 | |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 264 | static inline int adreno_is_a205(struct adreno_device *adreno_dev) |
| 265 | { |
Ranjhith Kalisamy | 938e00f | 2012-02-17 14:39:47 +0530 | [diff] [blame] | 266 | return (adreno_dev->gpurev == ADRENO_REV_A205); |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 267 | } |
| 268 | |
| 269 | static inline int adreno_is_a20x(struct adreno_device *adreno_dev) |
| 270 | { |
Ranjhith Kalisamy | 938e00f | 2012-02-17 14:39:47 +0530 | [diff] [blame] | 271 | return (adreno_dev->gpurev <= 209); |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 272 | } |
| 273 | |
| 274 | static inline int adreno_is_a220(struct adreno_device *adreno_dev) |
| 275 | { |
| 276 | return (adreno_dev->gpurev == ADRENO_REV_A220); |
| 277 | } |
| 278 | |
| 279 | static inline int adreno_is_a225(struct adreno_device *adreno_dev) |
| 280 | { |
| 281 | return (adreno_dev->gpurev == ADRENO_REV_A225); |
| 282 | } |
| 283 | |
| 284 | static inline int adreno_is_a22x(struct adreno_device *adreno_dev) |
| 285 | { |
| 286 | return (adreno_dev->gpurev == ADRENO_REV_A220 || |
| 287 | adreno_dev->gpurev == ADRENO_REV_A225); |
| 288 | } |
| 289 | |
Jordan Crouse | 196c45b | 2011-07-28 08:37:57 -0600 | [diff] [blame] | 290 | static inline int adreno_is_a2xx(struct adreno_device *adreno_dev) |
| 291 | { |
Jordan Crouse | b4d31bd | 2012-02-01 22:11:12 -0700 | [diff] [blame] | 292 | return (adreno_dev->gpurev <= 299); |
| 293 | } |
| 294 | |
| 295 | static inline int adreno_is_a3xx(struct adreno_device *adreno_dev) |
| 296 | { |
| 297 | return (adreno_dev->gpurev >= 300); |
Jordan Crouse | 196c45b | 2011-07-28 08:37:57 -0600 | [diff] [blame] | 298 | } |
| 299 | |
Kevin Matlage | 48d0e2e | 2012-04-26 10:52:36 -0600 | [diff] [blame] | 300 | static inline int adreno_is_a305(struct adreno_device *adreno_dev) |
| 301 | { |
| 302 | return (adreno_dev->gpurev == ADRENO_REV_A305); |
| 303 | } |
| 304 | |
| 305 | static inline int adreno_is_a320(struct adreno_device *adreno_dev) |
| 306 | { |
| 307 | return (adreno_dev->gpurev == ADRENO_REV_A320); |
| 308 | } |
| 309 | |
Jordan Crouse | c097820 | 2012-08-29 14:35:51 -0600 | [diff] [blame] | 310 | static inline int adreno_is_a330(struct adreno_device *adreno_dev) |
| 311 | { |
| 312 | return (adreno_dev->gpurev == ADRENO_REV_A330); |
| 313 | } |
| 314 | |
Jordan Crouse | e6b7762 | 2012-04-05 16:55:54 -0600 | [diff] [blame] | 315 | static inline int adreno_rb_ctxtswitch(unsigned int *cmd) |
| 316 | { |
| 317 | return (cmd[0] == cp_nop_packet(1) && |
| 318 | cmd[1] == KGSL_CONTEXT_TO_MEM_IDENTIFIER); |
| 319 | } |
| 320 | |
Jeremy Gebben | ddf6b57 | 2011-09-09 13:39:49 -0700 | [diff] [blame] | 321 | /** |
| 322 | * adreno_encode_istore_size - encode istore size in CP format |
| 323 | * @adreno_dev - The 3D device. |
| 324 | * |
| 325 | * Encode the istore size into the format expected that the |
| 326 | * CP_SET_SHADER_BASES and CP_ME_INIT commands: |
| 327 | * bits 31:29 - istore size as encoded by this function |
| 328 | * bits 27:16 - vertex shader start offset in instructions |
| 329 | * bits 11:0 - pixel shader start offset in instructions. |
| 330 | */ |
| 331 | static inline int adreno_encode_istore_size(struct adreno_device *adreno_dev) |
| 332 | { |
| 333 | unsigned int size; |
| 334 | /* in a225 the CP microcode multiplies the encoded |
| 335 | * value by 3 while decoding. |
| 336 | */ |
| 337 | if (adreno_is_a225(adreno_dev)) |
| 338 | size = adreno_dev->istore_size/3; |
| 339 | else |
| 340 | size = adreno_dev->istore_size; |
| 341 | |
| 342 | return (ilog2(size) - 5) << 29; |
| 343 | } |
Jordan Crouse | 196c45b | 2011-07-28 08:37:57 -0600 | [diff] [blame] | 344 | |
Shubhraprakash Das | c6e2101 | 2012-05-11 17:24:51 -0600 | [diff] [blame] | 345 | static inline int __adreno_add_idle_indirect_cmds(unsigned int *cmds, |
| 346 | unsigned int nop_gpuaddr) |
| 347 | { |
| 348 | /* Adding an indirect buffer ensures that the prefetch stalls until |
| 349 | * the commands in indirect buffer have completed. We need to stall |
| 350 | * prefetch with a nop indirect buffer when updating pagetables |
| 351 | * because it provides stabler synchronization */ |
| 352 | *cmds++ = CP_HDR_INDIRECT_BUFFER_PFD; |
| 353 | *cmds++ = nop_gpuaddr; |
| 354 | *cmds++ = 2; |
| 355 | *cmds++ = cp_type3_packet(CP_WAIT_FOR_IDLE, 1); |
| 356 | *cmds++ = 0x00000000; |
| 357 | return 5; |
| 358 | } |
| 359 | |
| 360 | static inline int adreno_add_change_mh_phys_limit_cmds(unsigned int *cmds, |
| 361 | unsigned int new_phys_limit, |
| 362 | unsigned int nop_gpuaddr) |
| 363 | { |
| 364 | unsigned int *start = cmds; |
| 365 | |
Shubhraprakash Das | c6e2101 | 2012-05-11 17:24:51 -0600 | [diff] [blame] | 366 | *cmds++ = cp_type0_packet(MH_MMU_MPU_END, 1); |
| 367 | *cmds++ = new_phys_limit; |
| 368 | cmds += __adreno_add_idle_indirect_cmds(cmds, nop_gpuaddr); |
| 369 | return cmds - start; |
| 370 | } |
| 371 | |
| 372 | static inline int adreno_add_bank_change_cmds(unsigned int *cmds, |
| 373 | int cur_ctx_bank, |
| 374 | unsigned int nop_gpuaddr) |
| 375 | { |
| 376 | unsigned int *start = cmds; |
| 377 | |
Shubhraprakash Das | c6e2101 | 2012-05-11 17:24:51 -0600 | [diff] [blame] | 378 | *cmds++ = cp_type0_packet(REG_CP_STATE_DEBUG_INDEX, 1); |
| 379 | *cmds++ = (cur_ctx_bank ? 0 : 0x20); |
| 380 | cmds += __adreno_add_idle_indirect_cmds(cmds, nop_gpuaddr); |
| 381 | return cmds - start; |
| 382 | } |
| 383 | |
| 384 | /* |
| 385 | * adreno_read_cmds - Add pm4 packets to perform read |
| 386 | * @device - Pointer to device structure |
| 387 | * @cmds - Pointer to memory where read commands need to be added |
| 388 | * @addr - gpu address of the read |
| 389 | * @val - The GPU will wait until the data at address addr becomes |
| 390 | * equal to value |
| 391 | */ |
| 392 | static inline int adreno_add_read_cmds(struct kgsl_device *device, |
| 393 | unsigned int *cmds, unsigned int addr, |
| 394 | unsigned int val, unsigned int nop_gpuaddr) |
| 395 | { |
| 396 | unsigned int *start = cmds; |
| 397 | |
| 398 | *cmds++ = cp_type3_packet(CP_WAIT_REG_MEM, 5); |
| 399 | /* MEM SPACE = memory, FUNCTION = equals */ |
| 400 | *cmds++ = 0x13; |
| 401 | *cmds++ = addr; |
| 402 | *cmds++ = val; |
| 403 | *cmds++ = 0xFFFFFFFF; |
| 404 | *cmds++ = 0xFFFFFFFF; |
| 405 | cmds += __adreno_add_idle_indirect_cmds(cmds, nop_gpuaddr); |
| 406 | return cmds - start; |
| 407 | } |
| 408 | |
Tarun Karra | 9c07082 | 2012-11-27 16:43:51 -0700 | [diff] [blame] | 409 | /* |
| 410 | * adreno_idle_cmds - Add pm4 packets for GPU idle |
| 411 | * @adreno_dev - Pointer to device structure |
| 412 | * @cmds - Pointer to memory where idle commands need to be added |
| 413 | */ |
| 414 | static inline int adreno_add_idle_cmds(struct adreno_device *adreno_dev, |
| 415 | unsigned int *cmds) |
| 416 | { |
| 417 | unsigned int *start = cmds; |
| 418 | |
| 419 | *cmds++ = cp_type3_packet(CP_WAIT_FOR_IDLE, 1); |
| 420 | *cmds++ = 0x00000000; |
| 421 | |
| 422 | if ((adreno_dev->gpurev == ADRENO_REV_A305) || |
| 423 | (adreno_dev->gpurev == ADRENO_REV_A320)) { |
| 424 | *cmds++ = cp_type3_packet(CP_WAIT_FOR_ME, 1); |
| 425 | *cmds++ = 0x00000000; |
| 426 | } |
| 427 | |
| 428 | return cmds - start; |
| 429 | } |
| 430 | |
Bryan Huntsman | 3f2bc4d | 2011-08-16 17:27:22 -0700 | [diff] [blame] | 431 | #endif /*__ADRENO_H */ |