| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 1 | /* | 
|  | 2 | *  AMD CPU Microcode Update Driver for Linux | 
|  | 3 | *  Copyright (C) 2008 Advanced Micro Devices Inc. | 
|  | 4 | * | 
|  | 5 | *  Author: Peter Oruba <peter.oruba@amd.com> | 
|  | 6 | * | 
|  | 7 | *  Based on work by: | 
|  | 8 | *  Tigran Aivazian <tigran@aivazian.fsnet.co.uk> | 
|  | 9 | * | 
|  | 10 | *  This driver allows to upgrade microcode on AMD | 
|  | 11 | *  family 0x10 and 0x11 processors. | 
|  | 12 | * | 
| Andreas Herrmann | 2a3282a7 | 2008-12-16 19:08:53 +0100 | [diff] [blame] | 13 | *  Licensed under the terms of the GNU General Public | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 14 | *  License version 2. See file COPYING for details. | 
| Ingo Molnar | 4bae196 | 2009-03-11 11:19:46 +0100 | [diff] [blame] | 15 | */ | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 16 |  | 
|  | 17 | #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt | 
|  | 18 |  | 
| Ingo Molnar | 4bae196 | 2009-03-11 11:19:46 +0100 | [diff] [blame] | 19 | #include <linux/firmware.h> | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 20 | #include <linux/pci_ids.h> | 
| Andreas Herrmann | be95776 | 2008-12-16 19:11:23 +0100 | [diff] [blame] | 21 | #include <linux/uaccess.h> | 
| Ingo Molnar | 4bae196 | 2009-03-11 11:19:46 +0100 | [diff] [blame] | 22 | #include <linux/vmalloc.h> | 
|  | 23 | #include <linux/kernel.h> | 
|  | 24 | #include <linux/module.h> | 
| Ingo Molnar | 4bae196 | 2009-03-11 11:19:46 +0100 | [diff] [blame] | 25 | #include <linux/pci.h> | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 26 |  | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 27 | #include <asm/microcode.h> | 
| Ingo Molnar | 4bae196 | 2009-03-11 11:19:46 +0100 | [diff] [blame] | 28 | #include <asm/processor.h> | 
|  | 29 | #include <asm/msr.h> | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 30 |  | 
|  | 31 | MODULE_DESCRIPTION("AMD Microcode Update Driver"); | 
| Peter Oruba | 3c52204 | 2008-10-17 15:30:38 +0200 | [diff] [blame] | 32 | MODULE_AUTHOR("Peter Oruba"); | 
| Ingo Molnar | 5d7b605 | 2008-07-29 10:07:36 +0200 | [diff] [blame] | 33 | MODULE_LICENSE("GPL v2"); | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 34 |  | 
|  | 35 | #define UCODE_MAGIC                0x00414d44 | 
|  | 36 | #define UCODE_EQUIV_CPU_TABLE_TYPE 0x00000000 | 
|  | 37 | #define UCODE_UCODE_TYPE           0x00000001 | 
|  | 38 |  | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 39 | struct equiv_cpu_entry { | 
| Andreas Herrmann | 5549b94 | 2008-12-16 19:21:30 +0100 | [diff] [blame] | 40 | u32	installed_cpu; | 
|  | 41 | u32	fixed_errata_mask; | 
|  | 42 | u32	fixed_errata_compare; | 
|  | 43 | u16	equiv_cpu; | 
|  | 44 | u16	res; | 
|  | 45 | } __attribute__((packed)); | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 46 |  | 
|  | 47 | struct microcode_header_amd { | 
| Andreas Herrmann | 5549b94 | 2008-12-16 19:21:30 +0100 | [diff] [blame] | 48 | u32	data_code; | 
|  | 49 | u32	patch_id; | 
|  | 50 | u16	mc_patch_data_id; | 
|  | 51 | u8	mc_patch_data_len; | 
|  | 52 | u8	init_flag; | 
|  | 53 | u32	mc_patch_data_checksum; | 
|  | 54 | u32	nb_dev_id; | 
|  | 55 | u32	sb_dev_id; | 
|  | 56 | u16	processor_rev_id; | 
|  | 57 | u8	nb_rev_id; | 
|  | 58 | u8	sb_rev_id; | 
|  | 59 | u8	bios_api_rev; | 
|  | 60 | u8	reserved1[3]; | 
|  | 61 | u32	match_reg[8]; | 
|  | 62 | } __attribute__((packed)); | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 63 |  | 
|  | 64 | struct microcode_amd { | 
| Ingo Molnar | 4bae196 | 2009-03-11 11:19:46 +0100 | [diff] [blame] | 65 | struct microcode_header_amd	hdr; | 
|  | 66 | unsigned int			mpb[0]; | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 67 | }; | 
|  | 68 |  | 
| Andreas Herrmann | 6cc9b6d | 2008-12-16 19:17:45 +0100 | [diff] [blame] | 69 | #define UCODE_MAX_SIZE			2048 | 
|  | 70 | #define UCODE_CONTAINER_SECTION_HDR	8 | 
|  | 71 | #define UCODE_CONTAINER_HEADER_SIZE	12 | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 72 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 73 | static struct equiv_cpu_entry *equiv_cpu_table; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 74 |  | 
| Dmitry Adamushko | d45de40 | 2008-08-20 00:22:26 +0200 | [diff] [blame] | 75 | static int collect_cpu_info_amd(int cpu, struct cpu_signature *csig) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 76 | { | 
| Andreas Herrmann | 3b2e3d8 | 2010-01-22 21:34:56 +0100 | [diff] [blame] | 77 | struct cpuinfo_x86 *c = &cpu_data(cpu); | 
| Andreas Herrmann | 29d0887 | 2008-12-16 19:16:34 +0100 | [diff] [blame] | 78 | u32 dummy; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 79 |  | 
| Andreas Herrmann | 8cc2361 | 2009-11-17 08:06:38 +0100 | [diff] [blame] | 80 | memset(csig, 0, sizeof(*csig)); | 
| Andreas Herrmann | 3b2e3d8 | 2010-01-22 21:34:56 +0100 | [diff] [blame] | 81 | if (c->x86_vendor != X86_VENDOR_AMD || c->x86 < 0x10) { | 
|  | 82 | pr_warning("microcode: CPU%d: AMD CPU family 0x%x not " | 
|  | 83 | "supported\n", cpu, c->x86); | 
|  | 84 | return -1; | 
|  | 85 | } | 
| Andreas Herrmann | 29d0887 | 2008-12-16 19:16:34 +0100 | [diff] [blame] | 86 | rdmsr(MSR_AMD64_PATCH_LEVEL, csig->rev, dummy); | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 87 | pr_info("CPU%d: patch_level=0x%x\n", cpu, csig->rev); | 
| Dmitry Adamushko | d45de40 | 2008-08-20 00:22:26 +0200 | [diff] [blame] | 88 | return 0; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 89 | } | 
|  | 90 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 91 | static int get_matching_microcode(int cpu, void *mc, int rev) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 92 | { | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 93 | struct microcode_header_amd *mc_header = mc; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 94 | unsigned int current_cpu_id; | 
| Andreas Herrmann | 5549b94 | 2008-12-16 19:21:30 +0100 | [diff] [blame] | 95 | u16 equiv_cpu_id = 0; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 96 | unsigned int i = 0; | 
|  | 97 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 98 | BUG_ON(equiv_cpu_table == NULL); | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 99 | current_cpu_id = cpuid_eax(0x00000001); | 
|  | 100 |  | 
|  | 101 | while (equiv_cpu_table[i].installed_cpu != 0) { | 
|  | 102 | if (current_cpu_id == equiv_cpu_table[i].installed_cpu) { | 
| Andreas Herrmann | 5549b94 | 2008-12-16 19:21:30 +0100 | [diff] [blame] | 103 | equiv_cpu_id = equiv_cpu_table[i].equiv_cpu; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 104 | break; | 
|  | 105 | } | 
|  | 106 | i++; | 
|  | 107 | } | 
|  | 108 |  | 
| Andreas Herrmann | 14c5694 | 2009-11-10 12:08:25 +0100 | [diff] [blame] | 109 | if (!equiv_cpu_id) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 110 | return 0; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 111 |  | 
| Andreas Herrmann | 6e18da7 | 2009-10-29 14:47:42 +0100 | [diff] [blame] | 112 | if (mc_header->processor_rev_id != equiv_cpu_id) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 113 | return 0; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 114 |  | 
| Andreas Herrmann | 9841530 | 2008-12-16 19:20:21 +0100 | [diff] [blame] | 115 | /* ucode might be chipset specific -- currently we don't support this */ | 
|  | 116 | if (mc_header->nb_dev_id || mc_header->sb_dev_id) { | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 117 | pr_err("CPU%d: loading of chipset specific code not yet supported\n", | 
|  | 118 | cpu); | 
| Andreas Herrmann | 9841530 | 2008-12-16 19:20:21 +0100 | [diff] [blame] | 119 | return 0; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 120 | } | 
|  | 121 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 122 | if (mc_header->patch_id <= rev) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 123 | return 0; | 
|  | 124 |  | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 125 | return 1; | 
|  | 126 | } | 
|  | 127 |  | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 128 | static int apply_microcode_amd(int cpu) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 129 | { | 
| Andreas Herrmann | 29d0887 | 2008-12-16 19:16:34 +0100 | [diff] [blame] | 130 | u32 rev, dummy; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 131 | int cpu_num = raw_smp_processor_id(); | 
|  | 132 | struct ucode_cpu_info *uci = ucode_cpu_info + cpu_num; | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 133 | struct microcode_amd *mc_amd = uci->mc; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 134 |  | 
|  | 135 | /* We should bind the task to the CPU */ | 
|  | 136 | BUG_ON(cpu_num != cpu); | 
|  | 137 |  | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 138 | if (mc_amd == NULL) | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 139 | return 0; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 140 |  | 
| Ingo Molnar | f34a10b | 2008-12-19 01:36:14 +0100 | [diff] [blame] | 141 | wrmsrl(MSR_AMD64_PATCH_LOADER, (u64)(long)&mc_amd->hdr.data_code); | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 142 | /* get patch id after patching */ | 
| Andreas Herrmann | 29d0887 | 2008-12-16 19:16:34 +0100 | [diff] [blame] | 143 | rdmsr(MSR_AMD64_PATCH_LEVEL, rev, dummy); | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 144 |  | 
|  | 145 | /* check current patch id and patch's id for match */ | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 146 | if (rev != mc_amd->hdr.patch_id) { | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 147 | pr_err("CPU%d: update failed (for patch_level=0x%x)\n", | 
|  | 148 | cpu, mc_amd->hdr.patch_id); | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 149 | return -1; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 150 | } | 
|  | 151 |  | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 152 | pr_info("CPU%d: updated (new patch_level=0x%x)\n", cpu, rev); | 
| Dmitry Adamushko | d45de40 | 2008-08-20 00:22:26 +0200 | [diff] [blame] | 153 | uci->cpu_sig.rev = rev; | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 154 |  | 
|  | 155 | return 0; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 156 | } | 
|  | 157 |  | 
| Andreas Herrmann | 0657d9e | 2008-12-16 19:14:05 +0100 | [diff] [blame] | 158 | static int get_ucode_data(void *to, const u8 *from, size_t n) | 
|  | 159 | { | 
|  | 160 | memcpy(to, from, n); | 
|  | 161 | return 0; | 
|  | 162 | } | 
|  | 163 |  | 
| Ingo Molnar | 4bae196 | 2009-03-11 11:19:46 +0100 | [diff] [blame] | 164 | static void * | 
|  | 165 | get_next_ucode(const u8 *buf, unsigned int size, unsigned int *mc_size) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 166 | { | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 167 | unsigned int total_size; | 
| Peter Oruba | d473879 | 2008-09-17 15:39:18 +0200 | [diff] [blame] | 168 | u8 section_hdr[UCODE_CONTAINER_SECTION_HDR]; | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 169 | void *mc; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 170 |  | 
| Peter Oruba | d473879 | 2008-09-17 15:39:18 +0200 | [diff] [blame] | 171 | if (get_ucode_data(section_hdr, buf, UCODE_CONTAINER_SECTION_HDR)) | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 172 | return NULL; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 173 |  | 
| Peter Oruba | d473879 | 2008-09-17 15:39:18 +0200 | [diff] [blame] | 174 | if (section_hdr[0] != UCODE_UCODE_TYPE) { | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 175 | pr_err("error: invalid type field in container file section header\n"); | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 176 | return NULL; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 177 | } | 
|  | 178 |  | 
| Peter Oruba | d473879 | 2008-09-17 15:39:18 +0200 | [diff] [blame] | 179 | total_size = (unsigned long) (section_hdr[4] + (section_hdr[5] << 8)); | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 180 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 181 | if (total_size > size || total_size > UCODE_MAX_SIZE) { | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 182 | pr_err("error: size mismatch\n"); | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 183 | return NULL; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 184 | } | 
|  | 185 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 186 | mc = vmalloc(UCODE_MAX_SIZE); | 
|  | 187 | if (mc) { | 
|  | 188 | memset(mc, 0, UCODE_MAX_SIZE); | 
| Andreas Herrmann | be95776 | 2008-12-16 19:11:23 +0100 | [diff] [blame] | 189 | if (get_ucode_data(mc, buf + UCODE_CONTAINER_SECTION_HDR, | 
|  | 190 | total_size)) { | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 191 | vfree(mc); | 
|  | 192 | mc = NULL; | 
|  | 193 | } else | 
| Peter Oruba | d473879 | 2008-09-17 15:39:18 +0200 | [diff] [blame] | 194 | *mc_size = total_size + UCODE_CONTAINER_SECTION_HDR; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 195 | } | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 196 | return mc; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 197 | } | 
|  | 198 |  | 
| Andreas Herrmann | 0657d9e | 2008-12-16 19:14:05 +0100 | [diff] [blame] | 199 | static int install_equiv_cpu_table(const u8 *buf) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 200 | { | 
| Peter Oruba | b6cffde | 2008-09-17 15:05:52 +0200 | [diff] [blame] | 201 | u8 *container_hdr[UCODE_CONTAINER_HEADER_SIZE]; | 
|  | 202 | unsigned int *buf_pos = (unsigned int *)container_hdr; | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 203 | unsigned long size; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 204 |  | 
| Peter Oruba | b6cffde | 2008-09-17 15:05:52 +0200 | [diff] [blame] | 205 | if (get_ucode_data(&container_hdr, buf, UCODE_CONTAINER_HEADER_SIZE)) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 206 | return 0; | 
|  | 207 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 208 | size = buf_pos[2]; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 209 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 210 | if (buf_pos[1] != UCODE_EQUIV_CPU_TABLE_TYPE || !size) { | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 211 | pr_err("error: invalid type field in container file section header\n"); | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 212 | return 0; | 
|  | 213 | } | 
|  | 214 |  | 
|  | 215 | equiv_cpu_table = (struct equiv_cpu_entry *) vmalloc(size); | 
|  | 216 | if (!equiv_cpu_table) { | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 217 | pr_err("failed to allocate equivalent CPU table\n"); | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 218 | return 0; | 
|  | 219 | } | 
|  | 220 |  | 
| Peter Oruba | b6cffde | 2008-09-17 15:05:52 +0200 | [diff] [blame] | 221 | buf += UCODE_CONTAINER_HEADER_SIZE; | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 222 | if (get_ucode_data(equiv_cpu_table, buf, size)) { | 
|  | 223 | vfree(equiv_cpu_table); | 
|  | 224 | return 0; | 
|  | 225 | } | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 226 |  | 
| Peter Oruba | b6cffde | 2008-09-17 15:05:52 +0200 | [diff] [blame] | 227 | return size + UCODE_CONTAINER_HEADER_SIZE; /* add header length */ | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 228 | } | 
|  | 229 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 230 | static void free_equiv_cpu_table(void) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 231 | { | 
| Figo.zhang | aeef50b | 2009-06-07 22:30:36 +0800 | [diff] [blame] | 232 | vfree(equiv_cpu_table); | 
|  | 233 | equiv_cpu_table = NULL; | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 234 | } | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 235 |  | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 236 | static enum ucode_state | 
|  | 237 | generic_load_microcode(int cpu, const u8 *data, size_t size) | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 238 | { | 
|  | 239 | struct ucode_cpu_info *uci = ucode_cpu_info + cpu; | 
| Andreas Herrmann | 8c13520 | 2008-12-16 19:13:00 +0100 | [diff] [blame] | 240 | const u8 *ucode_ptr = data; | 
|  | 241 | void *new_mc = NULL; | 
|  | 242 | void *mc; | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 243 | int new_rev = uci->cpu_sig.rev; | 
|  | 244 | unsigned int leftover; | 
|  | 245 | unsigned long offset; | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 246 | enum ucode_state state = UCODE_OK; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 247 |  | 
| Andreas Herrmann | 0657d9e | 2008-12-16 19:14:05 +0100 | [diff] [blame] | 248 | offset = install_equiv_cpu_table(ucode_ptr); | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 249 | if (!offset) { | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 250 | pr_err("failed to create equivalent cpu table\n"); | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 251 | return UCODE_ERROR; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 252 | } | 
|  | 253 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 254 | ucode_ptr += offset; | 
|  | 255 | leftover = size - offset; | 
|  | 256 |  | 
|  | 257 | while (leftover) { | 
| Dmitry Adamushko | 2f9284e | 2008-09-23 22:56:35 +0200 | [diff] [blame] | 258 | unsigned int uninitialized_var(mc_size); | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 259 | struct microcode_header_amd *mc_header; | 
|  | 260 |  | 
| Andreas Herrmann | 0657d9e | 2008-12-16 19:14:05 +0100 | [diff] [blame] | 261 | mc = get_next_ucode(ucode_ptr, leftover, &mc_size); | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 262 | if (!mc) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 263 | break; | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 264 |  | 
|  | 265 | mc_header = (struct microcode_header_amd *)mc; | 
|  | 266 | if (get_matching_microcode(cpu, mc, new_rev)) { | 
| Figo.zhang | aeef50b | 2009-06-07 22:30:36 +0800 | [diff] [blame] | 267 | vfree(new_mc); | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 268 | new_rev = mc_header->patch_id; | 
|  | 269 | new_mc  = mc; | 
| Andreas Herrmann | be95776 | 2008-12-16 19:11:23 +0100 | [diff] [blame] | 270 | } else | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 271 | vfree(mc); | 
|  | 272 |  | 
|  | 273 | ucode_ptr += mc_size; | 
|  | 274 | leftover  -= mc_size; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 275 | } | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 276 |  | 
|  | 277 | if (new_mc) { | 
|  | 278 | if (!leftover) { | 
| Figo.zhang | aeef50b | 2009-06-07 22:30:36 +0800 | [diff] [blame] | 279 | vfree(uci->mc); | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 280 | uci->mc = new_mc; | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 281 | pr_debug("CPU%d found a matching microcode update with version 0x%x (current=0x%x)\n", | 
| Andreas Herrmann | be95776 | 2008-12-16 19:11:23 +0100 | [diff] [blame] | 282 | cpu, new_rev, uci->cpu_sig.rev); | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 283 | } else { | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 284 | vfree(new_mc); | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 285 | state = UCODE_ERROR; | 
|  | 286 | } | 
|  | 287 | } else | 
|  | 288 | state = UCODE_NFOUND; | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 289 |  | 
|  | 290 | free_equiv_cpu_table(); | 
|  | 291 |  | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 292 | return state; | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 293 | } | 
|  | 294 |  | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 295 | static enum ucode_state request_microcode_fw(int cpu, struct device *device) | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 296 | { | 
| Andreas Herrmann | 3b2e3d8 | 2010-01-22 21:34:56 +0100 | [diff] [blame] | 297 | const char *fw_name = "amd-ucode/microcode_amd.bin"; | 
|  | 298 | const struct firmware *firmware; | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 299 | enum ucode_state ret; | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 300 |  | 
| Andreas Herrmann | 3b2e3d8 | 2010-01-22 21:34:56 +0100 | [diff] [blame] | 301 | if (request_firmware(&firmware, fw_name, device)) { | 
|  | 302 | printk(KERN_ERR "microcode: failed to load file %s\n", fw_name); | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 303 | return UCODE_NFOUND; | 
| Andreas Herrmann | 3b2e3d8 | 2010-01-22 21:34:56 +0100 | [diff] [blame] | 304 | } | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 305 |  | 
| Borislav Petkov | 506f90e | 2009-10-29 14:45:52 +0100 | [diff] [blame] | 306 | if (*(u32 *)firmware->data != UCODE_MAGIC) { | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 307 | pr_err("invalid UCODE_MAGIC (0x%08x)\n", | 
| Borislav Petkov | 506f90e | 2009-10-29 14:45:52 +0100 | [diff] [blame] | 308 | *(u32 *)firmware->data); | 
|  | 309 | return UCODE_ERROR; | 
|  | 310 | } | 
|  | 311 |  | 
| Andreas Herrmann | 0657d9e | 2008-12-16 19:14:05 +0100 | [diff] [blame] | 312 | ret = generic_load_microcode(cpu, firmware->data, firmware->size); | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 313 |  | 
| Andreas Herrmann | 3b2e3d8 | 2010-01-22 21:34:56 +0100 | [diff] [blame] | 314 | release_firmware(firmware); | 
|  | 315 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 316 | return ret; | 
|  | 317 | } | 
|  | 318 |  | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 319 | static enum ucode_state | 
|  | 320 | request_microcode_user(int cpu, const void __user *buf, size_t size) | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 321 | { | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 322 | pr_info("AMD microcode update via /dev/cpu/microcode not supported\n"); | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 323 | return UCODE_ERROR; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 324 | } | 
|  | 325 |  | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 326 | static void microcode_fini_cpu_amd(int cpu) | 
|  | 327 | { | 
|  | 328 | struct ucode_cpu_info *uci = ucode_cpu_info + cpu; | 
|  | 329 |  | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 330 | vfree(uci->mc); | 
|  | 331 | uci->mc = NULL; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 332 | } | 
|  | 333 |  | 
|  | 334 | static struct microcode_ops microcode_amd_ops = { | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 335 | .request_microcode_user           = request_microcode_user, | 
|  | 336 | .request_microcode_fw             = request_microcode_fw, | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 337 | .collect_cpu_info                 = collect_cpu_info_amd, | 
|  | 338 | .apply_microcode                  = apply_microcode_amd, | 
|  | 339 | .microcode_fini_cpu               = microcode_fini_cpu_amd, | 
|  | 340 | }; | 
|  | 341 |  | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 342 | struct microcode_ops * __init init_amd_microcode(void) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 343 | { | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 344 | return µcode_amd_ops; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 345 | } |