Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 1 | /* |
| 2 | * AMD CPU Microcode Update Driver for Linux |
| 3 | * Copyright (C) 2008 Advanced Micro Devices Inc. |
| 4 | * |
| 5 | * Author: Peter Oruba <peter.oruba@amd.com> |
| 6 | * |
| 7 | * Based on work by: |
| 8 | * Tigran Aivazian <tigran@aivazian.fsnet.co.uk> |
| 9 | * |
| 10 | * This driver allows to upgrade microcode on AMD |
| 11 | * family 0x10 and 0x11 processors. |
| 12 | * |
Andreas Herrmann | 2a3282a7 | 2008-12-16 19:08:53 +0100 | [diff] [blame] | 13 | * Licensed under the terms of the GNU General Public |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 14 | * License version 2. See file COPYING for details. |
Ingo Molnar | 4bae196 | 2009-03-11 11:19:46 +0100 | [diff] [blame] | 15 | */ |
Ingo Molnar | 4bae196 | 2009-03-11 11:19:46 +0100 | [diff] [blame] | 16 | #include <linux/firmware.h> |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 17 | #include <linux/pci_ids.h> |
Andreas Herrmann | be95776 | 2008-12-16 19:11:23 +0100 | [diff] [blame] | 18 | #include <linux/uaccess.h> |
Ingo Molnar | 4bae196 | 2009-03-11 11:19:46 +0100 | [diff] [blame] | 19 | #include <linux/vmalloc.h> |
| 20 | #include <linux/kernel.h> |
| 21 | #include <linux/module.h> |
Ingo Molnar | 4bae196 | 2009-03-11 11:19:46 +0100 | [diff] [blame] | 22 | #include <linux/pci.h> |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 23 | |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 24 | #include <asm/microcode.h> |
Ingo Molnar | 4bae196 | 2009-03-11 11:19:46 +0100 | [diff] [blame] | 25 | #include <asm/processor.h> |
| 26 | #include <asm/msr.h> |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 27 | |
| 28 | MODULE_DESCRIPTION("AMD Microcode Update Driver"); |
Peter Oruba | 3c52204 | 2008-10-17 15:30:38 +0200 | [diff] [blame] | 29 | MODULE_AUTHOR("Peter Oruba"); |
Ingo Molnar | 5d7b605 | 2008-07-29 10:07:36 +0200 | [diff] [blame] | 30 | MODULE_LICENSE("GPL v2"); |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 31 | |
| 32 | #define UCODE_MAGIC 0x00414d44 |
| 33 | #define UCODE_EQUIV_CPU_TABLE_TYPE 0x00000000 |
| 34 | #define UCODE_UCODE_TYPE 0x00000001 |
| 35 | |
Andreas Herrmann | d1c84f7 | 2009-11-10 12:07:23 +0100 | [diff] [blame] | 36 | const struct firmware *firmware; |
Andreas Herrmann | 8cc2361 | 2009-11-17 08:06:38 +0100 | [diff] [blame^] | 37 | static int supported_cpu; |
Andreas Herrmann | d1c84f7 | 2009-11-10 12:07:23 +0100 | [diff] [blame] | 38 | |
Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 39 | struct equiv_cpu_entry { |
Andreas Herrmann | 5549b94 | 2008-12-16 19:21:30 +0100 | [diff] [blame] | 40 | u32 installed_cpu; |
| 41 | u32 fixed_errata_mask; |
| 42 | u32 fixed_errata_compare; |
| 43 | u16 equiv_cpu; |
| 44 | u16 res; |
| 45 | } __attribute__((packed)); |
Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 46 | |
| 47 | struct microcode_header_amd { |
Andreas Herrmann | 5549b94 | 2008-12-16 19:21:30 +0100 | [diff] [blame] | 48 | u32 data_code; |
| 49 | u32 patch_id; |
| 50 | u16 mc_patch_data_id; |
| 51 | u8 mc_patch_data_len; |
| 52 | u8 init_flag; |
| 53 | u32 mc_patch_data_checksum; |
| 54 | u32 nb_dev_id; |
| 55 | u32 sb_dev_id; |
| 56 | u16 processor_rev_id; |
| 57 | u8 nb_rev_id; |
| 58 | u8 sb_rev_id; |
| 59 | u8 bios_api_rev; |
| 60 | u8 reserved1[3]; |
| 61 | u32 match_reg[8]; |
| 62 | } __attribute__((packed)); |
Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 63 | |
| 64 | struct microcode_amd { |
Ingo Molnar | 4bae196 | 2009-03-11 11:19:46 +0100 | [diff] [blame] | 65 | struct microcode_header_amd hdr; |
| 66 | unsigned int mpb[0]; |
Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 67 | }; |
| 68 | |
Andreas Herrmann | 6cc9b6d | 2008-12-16 19:17:45 +0100 | [diff] [blame] | 69 | #define UCODE_MAX_SIZE 2048 |
| 70 | #define UCODE_CONTAINER_SECTION_HDR 8 |
| 71 | #define UCODE_CONTAINER_HEADER_SIZE 12 |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 72 | |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 73 | static struct equiv_cpu_entry *equiv_cpu_table; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 74 | |
Dmitry Adamushko | d45de40 | 2008-08-20 00:22:26 +0200 | [diff] [blame] | 75 | static int collect_cpu_info_amd(int cpu, struct cpu_signature *csig) |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 76 | { |
Andreas Herrmann | 29d0887 | 2008-12-16 19:16:34 +0100 | [diff] [blame] | 77 | u32 dummy; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 78 | |
Andreas Herrmann | 8cc2361 | 2009-11-17 08:06:38 +0100 | [diff] [blame^] | 79 | if (!supported_cpu) |
Dmitry Adamushko | d45de40 | 2008-08-20 00:22:26 +0200 | [diff] [blame] | 80 | return -1; |
Andreas Herrmann | 8cc2361 | 2009-11-17 08:06:38 +0100 | [diff] [blame^] | 81 | |
| 82 | memset(csig, 0, sizeof(*csig)); |
Andreas Herrmann | 29d0887 | 2008-12-16 19:16:34 +0100 | [diff] [blame] | 83 | rdmsr(MSR_AMD64_PATCH_LEVEL, csig->rev, dummy); |
Andreas Herrmann | 1a74357 | 2009-11-10 12:09:20 +0100 | [diff] [blame] | 84 | pr_info("microcode: CPU%d: patch_level=0x%x\n", cpu, csig->rev); |
Dmitry Adamushko | d45de40 | 2008-08-20 00:22:26 +0200 | [diff] [blame] | 85 | return 0; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 86 | } |
| 87 | |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 88 | static int get_matching_microcode(int cpu, void *mc, int rev) |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 89 | { |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 90 | struct microcode_header_amd *mc_header = mc; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 91 | unsigned int current_cpu_id; |
Andreas Herrmann | 5549b94 | 2008-12-16 19:21:30 +0100 | [diff] [blame] | 92 | u16 equiv_cpu_id = 0; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 93 | unsigned int i = 0; |
| 94 | |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 95 | BUG_ON(equiv_cpu_table == NULL); |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 96 | current_cpu_id = cpuid_eax(0x00000001); |
| 97 | |
| 98 | while (equiv_cpu_table[i].installed_cpu != 0) { |
| 99 | if (current_cpu_id == equiv_cpu_table[i].installed_cpu) { |
Andreas Herrmann | 5549b94 | 2008-12-16 19:21:30 +0100 | [diff] [blame] | 100 | equiv_cpu_id = equiv_cpu_table[i].equiv_cpu; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 101 | break; |
| 102 | } |
| 103 | i++; |
| 104 | } |
| 105 | |
Andreas Herrmann | 14c5694 | 2009-11-10 12:08:25 +0100 | [diff] [blame] | 106 | if (!equiv_cpu_id) |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 107 | return 0; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 108 | |
Andreas Herrmann | 6e18da7 | 2009-10-29 14:47:42 +0100 | [diff] [blame] | 109 | if (mc_header->processor_rev_id != equiv_cpu_id) |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 110 | return 0; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 111 | |
Andreas Herrmann | 9841530 | 2008-12-16 19:20:21 +0100 | [diff] [blame] | 112 | /* ucode might be chipset specific -- currently we don't support this */ |
| 113 | if (mc_header->nb_dev_id || mc_header->sb_dev_id) { |
Andreas Herrmann | 1a74357 | 2009-11-10 12:09:20 +0100 | [diff] [blame] | 114 | pr_err(KERN_ERR "microcode: CPU%d: loading of chipset " |
Andreas Herrmann | 9841530 | 2008-12-16 19:20:21 +0100 | [diff] [blame] | 115 | "specific code not yet supported\n", cpu); |
| 116 | return 0; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 117 | } |
| 118 | |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 119 | if (mc_header->patch_id <= rev) |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 120 | return 0; |
| 121 | |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 122 | return 1; |
| 123 | } |
| 124 | |
Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 125 | static int apply_microcode_amd(int cpu) |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 126 | { |
Andreas Herrmann | 29d0887 | 2008-12-16 19:16:34 +0100 | [diff] [blame] | 127 | u32 rev, dummy; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 128 | int cpu_num = raw_smp_processor_id(); |
| 129 | struct ucode_cpu_info *uci = ucode_cpu_info + cpu_num; |
Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 130 | struct microcode_amd *mc_amd = uci->mc; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 131 | |
| 132 | /* We should bind the task to the CPU */ |
| 133 | BUG_ON(cpu_num != cpu); |
| 134 | |
Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 135 | if (mc_amd == NULL) |
Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 136 | return 0; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 137 | |
Ingo Molnar | f34a10b | 2008-12-19 01:36:14 +0100 | [diff] [blame] | 138 | wrmsrl(MSR_AMD64_PATCH_LOADER, (u64)(long)&mc_amd->hdr.data_code); |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 139 | /* get patch id after patching */ |
Andreas Herrmann | 29d0887 | 2008-12-16 19:16:34 +0100 | [diff] [blame] | 140 | rdmsr(MSR_AMD64_PATCH_LEVEL, rev, dummy); |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 141 | |
| 142 | /* check current patch id and patch's id for match */ |
Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 143 | if (rev != mc_amd->hdr.patch_id) { |
Andreas Herrmann | 1a74357 | 2009-11-10 12:09:20 +0100 | [diff] [blame] | 144 | pr_err("microcode: CPU%d: update failed " |
Andreas Herrmann | df23cab | 2008-12-16 19:22:36 +0100 | [diff] [blame] | 145 | "(for patch_level=0x%x)\n", cpu, mc_amd->hdr.patch_id); |
Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 146 | return -1; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 147 | } |
| 148 | |
Andreas Herrmann | 1a74357 | 2009-11-10 12:09:20 +0100 | [diff] [blame] | 149 | pr_info("microcode: CPU%d: updated (new patch_level=0x%x)\n", cpu, rev); |
Dmitry Adamushko | d45de40 | 2008-08-20 00:22:26 +0200 | [diff] [blame] | 150 | uci->cpu_sig.rev = rev; |
Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 151 | |
| 152 | return 0; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 153 | } |
| 154 | |
Andreas Herrmann | 0657d9e | 2008-12-16 19:14:05 +0100 | [diff] [blame] | 155 | static int get_ucode_data(void *to, const u8 *from, size_t n) |
| 156 | { |
| 157 | memcpy(to, from, n); |
| 158 | return 0; |
| 159 | } |
| 160 | |
Ingo Molnar | 4bae196 | 2009-03-11 11:19:46 +0100 | [diff] [blame] | 161 | static void * |
| 162 | get_next_ucode(const u8 *buf, unsigned int size, unsigned int *mc_size) |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 163 | { |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 164 | unsigned int total_size; |
Peter Oruba | d473879 | 2008-09-17 15:39:18 +0200 | [diff] [blame] | 165 | u8 section_hdr[UCODE_CONTAINER_SECTION_HDR]; |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 166 | void *mc; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 167 | |
Peter Oruba | d473879 | 2008-09-17 15:39:18 +0200 | [diff] [blame] | 168 | if (get_ucode_data(section_hdr, buf, UCODE_CONTAINER_SECTION_HDR)) |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 169 | return NULL; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 170 | |
Peter Oruba | d473879 | 2008-09-17 15:39:18 +0200 | [diff] [blame] | 171 | if (section_hdr[0] != UCODE_UCODE_TYPE) { |
Andreas Herrmann | 1a74357 | 2009-11-10 12:09:20 +0100 | [diff] [blame] | 172 | pr_err("microcode: error: invalid type field in " |
Andreas Herrmann | df23cab | 2008-12-16 19:22:36 +0100 | [diff] [blame] | 173 | "container file section header\n"); |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 174 | return NULL; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 175 | } |
| 176 | |
Peter Oruba | d473879 | 2008-09-17 15:39:18 +0200 | [diff] [blame] | 177 | total_size = (unsigned long) (section_hdr[4] + (section_hdr[5] << 8)); |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 178 | |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 179 | if (total_size > size || total_size > UCODE_MAX_SIZE) { |
Andreas Herrmann | 1a74357 | 2009-11-10 12:09:20 +0100 | [diff] [blame] | 180 | pr_err("microcode: error: size mismatch\n"); |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 181 | return NULL; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 182 | } |
| 183 | |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 184 | mc = vmalloc(UCODE_MAX_SIZE); |
| 185 | if (mc) { |
| 186 | memset(mc, 0, UCODE_MAX_SIZE); |
Andreas Herrmann | be95776 | 2008-12-16 19:11:23 +0100 | [diff] [blame] | 187 | if (get_ucode_data(mc, buf + UCODE_CONTAINER_SECTION_HDR, |
| 188 | total_size)) { |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 189 | vfree(mc); |
| 190 | mc = NULL; |
| 191 | } else |
Peter Oruba | d473879 | 2008-09-17 15:39:18 +0200 | [diff] [blame] | 192 | *mc_size = total_size + UCODE_CONTAINER_SECTION_HDR; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 193 | } |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 194 | return mc; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 195 | } |
| 196 | |
Andreas Herrmann | 0657d9e | 2008-12-16 19:14:05 +0100 | [diff] [blame] | 197 | static int install_equiv_cpu_table(const u8 *buf) |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 198 | { |
Peter Oruba | b6cffde | 2008-09-17 15:05:52 +0200 | [diff] [blame] | 199 | u8 *container_hdr[UCODE_CONTAINER_HEADER_SIZE]; |
| 200 | unsigned int *buf_pos = (unsigned int *)container_hdr; |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 201 | unsigned long size; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 202 | |
Peter Oruba | b6cffde | 2008-09-17 15:05:52 +0200 | [diff] [blame] | 203 | if (get_ucode_data(&container_hdr, buf, UCODE_CONTAINER_HEADER_SIZE)) |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 204 | return 0; |
| 205 | |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 206 | size = buf_pos[2]; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 207 | |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 208 | if (buf_pos[1] != UCODE_EQUIV_CPU_TABLE_TYPE || !size) { |
Andreas Herrmann | 1a74357 | 2009-11-10 12:09:20 +0100 | [diff] [blame] | 209 | pr_err("microcode: error: invalid type field in " |
Andreas Herrmann | df23cab | 2008-12-16 19:22:36 +0100 | [diff] [blame] | 210 | "container file section header\n"); |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 211 | return 0; |
| 212 | } |
| 213 | |
| 214 | equiv_cpu_table = (struct equiv_cpu_entry *) vmalloc(size); |
| 215 | if (!equiv_cpu_table) { |
Andreas Herrmann | 1a74357 | 2009-11-10 12:09:20 +0100 | [diff] [blame] | 216 | pr_err("microcode: failed to allocate equivalent CPU table\n"); |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 217 | return 0; |
| 218 | } |
| 219 | |
Peter Oruba | b6cffde | 2008-09-17 15:05:52 +0200 | [diff] [blame] | 220 | buf += UCODE_CONTAINER_HEADER_SIZE; |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 221 | if (get_ucode_data(equiv_cpu_table, buf, size)) { |
| 222 | vfree(equiv_cpu_table); |
| 223 | return 0; |
| 224 | } |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 225 | |
Peter Oruba | b6cffde | 2008-09-17 15:05:52 +0200 | [diff] [blame] | 226 | return size + UCODE_CONTAINER_HEADER_SIZE; /* add header length */ |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 227 | } |
| 228 | |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 229 | static void free_equiv_cpu_table(void) |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 230 | { |
Figo.zhang | aeef50b | 2009-06-07 22:30:36 +0800 | [diff] [blame] | 231 | vfree(equiv_cpu_table); |
| 232 | equiv_cpu_table = NULL; |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 233 | } |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 234 | |
Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 235 | static enum ucode_state |
| 236 | generic_load_microcode(int cpu, const u8 *data, size_t size) |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 237 | { |
| 238 | struct ucode_cpu_info *uci = ucode_cpu_info + cpu; |
Andreas Herrmann | 8c13520 | 2008-12-16 19:13:00 +0100 | [diff] [blame] | 239 | const u8 *ucode_ptr = data; |
| 240 | void *new_mc = NULL; |
| 241 | void *mc; |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 242 | int new_rev = uci->cpu_sig.rev; |
| 243 | unsigned int leftover; |
| 244 | unsigned long offset; |
Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 245 | enum ucode_state state = UCODE_OK; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 246 | |
Andreas Herrmann | 0657d9e | 2008-12-16 19:14:05 +0100 | [diff] [blame] | 247 | offset = install_equiv_cpu_table(ucode_ptr); |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 248 | if (!offset) { |
Andreas Herrmann | 1a74357 | 2009-11-10 12:09:20 +0100 | [diff] [blame] | 249 | pr_err("microcode: failed to create equivalent cpu table\n"); |
Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 250 | return UCODE_ERROR; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 251 | } |
| 252 | |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 253 | ucode_ptr += offset; |
| 254 | leftover = size - offset; |
| 255 | |
| 256 | while (leftover) { |
Dmitry Adamushko | 2f9284e | 2008-09-23 22:56:35 +0200 | [diff] [blame] | 257 | unsigned int uninitialized_var(mc_size); |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 258 | struct microcode_header_amd *mc_header; |
| 259 | |
Andreas Herrmann | 0657d9e | 2008-12-16 19:14:05 +0100 | [diff] [blame] | 260 | mc = get_next_ucode(ucode_ptr, leftover, &mc_size); |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 261 | if (!mc) |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 262 | break; |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 263 | |
| 264 | mc_header = (struct microcode_header_amd *)mc; |
| 265 | if (get_matching_microcode(cpu, mc, new_rev)) { |
Figo.zhang | aeef50b | 2009-06-07 22:30:36 +0800 | [diff] [blame] | 266 | vfree(new_mc); |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 267 | new_rev = mc_header->patch_id; |
| 268 | new_mc = mc; |
Andreas Herrmann | be95776 | 2008-12-16 19:11:23 +0100 | [diff] [blame] | 269 | } else |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 270 | vfree(mc); |
| 271 | |
| 272 | ucode_ptr += mc_size; |
| 273 | leftover -= mc_size; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 274 | } |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 275 | |
| 276 | if (new_mc) { |
| 277 | if (!leftover) { |
Figo.zhang | aeef50b | 2009-06-07 22:30:36 +0800 | [diff] [blame] | 278 | vfree(uci->mc); |
Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 279 | uci->mc = new_mc; |
Andreas Herrmann | be95776 | 2008-12-16 19:11:23 +0100 | [diff] [blame] | 280 | pr_debug("microcode: CPU%d found a matching microcode " |
| 281 | "update with version 0x%x (current=0x%x)\n", |
| 282 | cpu, new_rev, uci->cpu_sig.rev); |
Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 283 | } else { |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 284 | vfree(new_mc); |
Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 285 | state = UCODE_ERROR; |
| 286 | } |
| 287 | } else |
| 288 | state = UCODE_NFOUND; |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 289 | |
| 290 | free_equiv_cpu_table(); |
| 291 | |
Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 292 | return state; |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 293 | } |
| 294 | |
Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 295 | static enum ucode_state request_microcode_fw(int cpu, struct device *device) |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 296 | { |
Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 297 | enum ucode_state ret; |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 298 | |
Andreas Herrmann | d1c84f7 | 2009-11-10 12:07:23 +0100 | [diff] [blame] | 299 | if (firmware == NULL) |
Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 300 | return UCODE_NFOUND; |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 301 | |
Borislav Petkov | 506f90e | 2009-10-29 14:45:52 +0100 | [diff] [blame] | 302 | if (*(u32 *)firmware->data != UCODE_MAGIC) { |
Andreas Herrmann | 1a74357 | 2009-11-10 12:09:20 +0100 | [diff] [blame] | 303 | pr_err("microcode: invalid UCODE_MAGIC (0x%08x)\n", |
Borislav Petkov | 506f90e | 2009-10-29 14:45:52 +0100 | [diff] [blame] | 304 | *(u32 *)firmware->data); |
| 305 | return UCODE_ERROR; |
| 306 | } |
| 307 | |
Andreas Herrmann | 0657d9e | 2008-12-16 19:14:05 +0100 | [diff] [blame] | 308 | ret = generic_load_microcode(cpu, firmware->data, firmware->size); |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 309 | |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 310 | return ret; |
| 311 | } |
| 312 | |
Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 313 | static enum ucode_state |
| 314 | request_microcode_user(int cpu, const void __user *buf, size_t size) |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 315 | { |
Andreas Herrmann | 1a74357 | 2009-11-10 12:09:20 +0100 | [diff] [blame] | 316 | pr_info("microcode: AMD microcode update via " |
| 317 | "/dev/cpu/microcode not supported\n"); |
Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 318 | return UCODE_ERROR; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 319 | } |
| 320 | |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 321 | static void microcode_fini_cpu_amd(int cpu) |
| 322 | { |
| 323 | struct ucode_cpu_info *uci = ucode_cpu_info + cpu; |
| 324 | |
Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 325 | vfree(uci->mc); |
| 326 | uci->mc = NULL; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 327 | } |
| 328 | |
Andreas Herrmann | d1c84f7 | 2009-11-10 12:07:23 +0100 | [diff] [blame] | 329 | void init_microcode_amd(struct device *device) |
| 330 | { |
| 331 | const char *fw_name = "amd-ucode/microcode_amd.bin"; |
Andreas Herrmann | 8cc2361 | 2009-11-17 08:06:38 +0100 | [diff] [blame^] | 332 | struct cpuinfo_x86 *c = &boot_cpu_data; |
| 333 | |
| 334 | WARN_ON(c->x86_vendor != X86_VENDOR_AMD); |
| 335 | |
| 336 | if (c->x86 < 0x10) { |
| 337 | pr_warning("microcode: AMD CPU family 0x%x not supported\n", |
| 338 | c->x86); |
| 339 | return; |
| 340 | } |
| 341 | supported_cpu = 1; |
| 342 | |
Andreas Herrmann | d1c84f7 | 2009-11-10 12:07:23 +0100 | [diff] [blame] | 343 | if (request_firmware(&firmware, fw_name, device)) |
Andreas Herrmann | 1a74357 | 2009-11-10 12:09:20 +0100 | [diff] [blame] | 344 | pr_err("microcode: failed to load file %s\n", fw_name); |
Andreas Herrmann | d1c84f7 | 2009-11-10 12:07:23 +0100 | [diff] [blame] | 345 | } |
| 346 | |
| 347 | void fini_microcode_amd(void) |
| 348 | { |
| 349 | release_firmware(firmware); |
| 350 | } |
| 351 | |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 352 | static struct microcode_ops microcode_amd_ops = { |
Andreas Herrmann | d1c84f7 | 2009-11-10 12:07:23 +0100 | [diff] [blame] | 353 | .init = init_microcode_amd, |
| 354 | .fini = fini_microcode_amd, |
Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 355 | .request_microcode_user = request_microcode_user, |
| 356 | .request_microcode_fw = request_microcode_fw, |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 357 | .collect_cpu_info = collect_cpu_info_amd, |
| 358 | .apply_microcode = apply_microcode_amd, |
| 359 | .microcode_fini_cpu = microcode_fini_cpu_amd, |
| 360 | }; |
| 361 | |
Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 362 | struct microcode_ops * __init init_amd_microcode(void) |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 363 | { |
Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 364 | return µcode_amd_ops; |
Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 365 | } |