| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 1 | /* | 
 | 2 |  *  AMD CPU Microcode Update Driver for Linux | 
 | 3 |  *  Copyright (C) 2008 Advanced Micro Devices Inc. | 
 | 4 |  * | 
 | 5 |  *  Author: Peter Oruba <peter.oruba@amd.com> | 
 | 6 |  * | 
 | 7 |  *  Based on work by: | 
 | 8 |  *  Tigran Aivazian <tigran@aivazian.fsnet.co.uk> | 
 | 9 |  * | 
 | 10 |  *  This driver allows to upgrade microcode on AMD | 
 | 11 |  *  family 0x10 and 0x11 processors. | 
 | 12 |  * | 
| Andreas Herrmann | 2a3282a7 | 2008-12-16 19:08:53 +0100 | [diff] [blame] | 13 |  *  Licensed under the terms of the GNU General Public | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 14 |  *  License version 2. See file COPYING for details. | 
| Ingo Molnar | 4bae196 | 2009-03-11 11:19:46 +0100 | [diff] [blame] | 15 |  */ | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 16 |  | 
 | 17 | #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt | 
 | 18 |  | 
| Ingo Molnar | 4bae196 | 2009-03-11 11:19:46 +0100 | [diff] [blame] | 19 | #include <linux/firmware.h> | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 20 | #include <linux/pci_ids.h> | 
| Andreas Herrmann | be95776 | 2008-12-16 19:11:23 +0100 | [diff] [blame] | 21 | #include <linux/uaccess.h> | 
| Ingo Molnar | 4bae196 | 2009-03-11 11:19:46 +0100 | [diff] [blame] | 22 | #include <linux/vmalloc.h> | 
 | 23 | #include <linux/kernel.h> | 
 | 24 | #include <linux/module.h> | 
| Ingo Molnar | 4bae196 | 2009-03-11 11:19:46 +0100 | [diff] [blame] | 25 | #include <linux/pci.h> | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 26 |  | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 27 | #include <asm/microcode.h> | 
| Ingo Molnar | 4bae196 | 2009-03-11 11:19:46 +0100 | [diff] [blame] | 28 | #include <asm/processor.h> | 
 | 29 | #include <asm/msr.h> | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 30 |  | 
 | 31 | MODULE_DESCRIPTION("AMD Microcode Update Driver"); | 
| Peter Oruba | 3c52204 | 2008-10-17 15:30:38 +0200 | [diff] [blame] | 32 | MODULE_AUTHOR("Peter Oruba"); | 
| Ingo Molnar | 5d7b605 | 2008-07-29 10:07:36 +0200 | [diff] [blame] | 33 | MODULE_LICENSE("GPL v2"); | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 34 |  | 
 | 35 | #define UCODE_MAGIC                0x00414d44 | 
 | 36 | #define UCODE_EQUIV_CPU_TABLE_TYPE 0x00000000 | 
 | 37 | #define UCODE_UCODE_TYPE           0x00000001 | 
 | 38 |  | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 39 | struct equiv_cpu_entry { | 
| Andreas Herrmann | 5549b94 | 2008-12-16 19:21:30 +0100 | [diff] [blame] | 40 | 	u32	installed_cpu; | 
 | 41 | 	u32	fixed_errata_mask; | 
 | 42 | 	u32	fixed_errata_compare; | 
 | 43 | 	u16	equiv_cpu; | 
 | 44 | 	u16	res; | 
 | 45 | } __attribute__((packed)); | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 46 |  | 
 | 47 | struct microcode_header_amd { | 
| Andreas Herrmann | 5549b94 | 2008-12-16 19:21:30 +0100 | [diff] [blame] | 48 | 	u32	data_code; | 
 | 49 | 	u32	patch_id; | 
 | 50 | 	u16	mc_patch_data_id; | 
 | 51 | 	u8	mc_patch_data_len; | 
 | 52 | 	u8	init_flag; | 
 | 53 | 	u32	mc_patch_data_checksum; | 
 | 54 | 	u32	nb_dev_id; | 
 | 55 | 	u32	sb_dev_id; | 
 | 56 | 	u16	processor_rev_id; | 
 | 57 | 	u8	nb_rev_id; | 
 | 58 | 	u8	sb_rev_id; | 
 | 59 | 	u8	bios_api_rev; | 
 | 60 | 	u8	reserved1[3]; | 
 | 61 | 	u32	match_reg[8]; | 
 | 62 | } __attribute__((packed)); | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 63 |  | 
 | 64 | struct microcode_amd { | 
| Ingo Molnar | 4bae196 | 2009-03-11 11:19:46 +0100 | [diff] [blame] | 65 | 	struct microcode_header_amd	hdr; | 
 | 66 | 	unsigned int			mpb[0]; | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 67 | }; | 
 | 68 |  | 
| Andreas Herrmann | 6cc9b6d | 2008-12-16 19:17:45 +0100 | [diff] [blame] | 69 | #define UCODE_MAX_SIZE			2048 | 
 | 70 | #define UCODE_CONTAINER_SECTION_HDR	8 | 
 | 71 | #define UCODE_CONTAINER_HEADER_SIZE	12 | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 72 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 73 | static struct equiv_cpu_entry *equiv_cpu_table; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 74 |  | 
| Dmitry Adamushko | d45de40 | 2008-08-20 00:22:26 +0200 | [diff] [blame] | 75 | static int collect_cpu_info_amd(int cpu, struct cpu_signature *csig) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 76 | { | 
| Andreas Herrmann | 3b2e3d8 | 2010-01-22 21:34:56 +0100 | [diff] [blame] | 77 | 	struct cpuinfo_x86 *c = &cpu_data(cpu); | 
| Andreas Herrmann | 29d0887 | 2008-12-16 19:16:34 +0100 | [diff] [blame] | 78 | 	u32 dummy; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 79 |  | 
| Andreas Herrmann | 8cc2361 | 2009-11-17 08:06:38 +0100 | [diff] [blame] | 80 | 	memset(csig, 0, sizeof(*csig)); | 
| Andreas Herrmann | 3b2e3d8 | 2010-01-22 21:34:56 +0100 | [diff] [blame] | 81 | 	if (c->x86_vendor != X86_VENDOR_AMD || c->x86 < 0x10) { | 
 | 82 | 		pr_warning("microcode: CPU%d: AMD CPU family 0x%x not " | 
 | 83 | 			   "supported\n", cpu, c->x86); | 
 | 84 | 		return -1; | 
 | 85 | 	} | 
| Andreas Herrmann | 29d0887 | 2008-12-16 19:16:34 +0100 | [diff] [blame] | 86 | 	rdmsr(MSR_AMD64_PATCH_LEVEL, csig->rev, dummy); | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 87 | 	pr_info("CPU%d: patch_level=0x%x\n", cpu, csig->rev); | 
| Dmitry Adamushko | d45de40 | 2008-08-20 00:22:26 +0200 | [diff] [blame] | 88 | 	return 0; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 89 | } | 
 | 90 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 91 | static int get_matching_microcode(int cpu, void *mc, int rev) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 92 | { | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 93 | 	struct microcode_header_amd *mc_header = mc; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 94 | 	unsigned int current_cpu_id; | 
| Andreas Herrmann | 5549b94 | 2008-12-16 19:21:30 +0100 | [diff] [blame] | 95 | 	u16 equiv_cpu_id = 0; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 96 | 	unsigned int i = 0; | 
 | 97 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 98 | 	BUG_ON(equiv_cpu_table == NULL); | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 99 | 	current_cpu_id = cpuid_eax(0x00000001); | 
 | 100 |  | 
 | 101 | 	while (equiv_cpu_table[i].installed_cpu != 0) { | 
 | 102 | 		if (current_cpu_id == equiv_cpu_table[i].installed_cpu) { | 
| Andreas Herrmann | 5549b94 | 2008-12-16 19:21:30 +0100 | [diff] [blame] | 103 | 			equiv_cpu_id = equiv_cpu_table[i].equiv_cpu; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 104 | 			break; | 
 | 105 | 		} | 
 | 106 | 		i++; | 
 | 107 | 	} | 
 | 108 |  | 
| Andreas Herrmann | 14c5694 | 2009-11-10 12:08:25 +0100 | [diff] [blame] | 109 | 	if (!equiv_cpu_id) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 110 | 		return 0; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 111 |  | 
| Andreas Herrmann | 6e18da7 | 2009-10-29 14:47:42 +0100 | [diff] [blame] | 112 | 	if (mc_header->processor_rev_id != equiv_cpu_id) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 113 | 		return 0; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 114 |  | 
| Andreas Herrmann | 9841530 | 2008-12-16 19:20:21 +0100 | [diff] [blame] | 115 | 	/* ucode might be chipset specific -- currently we don't support this */ | 
 | 116 | 	if (mc_header->nb_dev_id || mc_header->sb_dev_id) { | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 117 | 		pr_err("CPU%d: loading of chipset specific code not yet supported\n", | 
 | 118 | 		       cpu); | 
| Andreas Herrmann | 9841530 | 2008-12-16 19:20:21 +0100 | [diff] [blame] | 119 | 		return 0; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 120 | 	} | 
 | 121 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 122 | 	if (mc_header->patch_id <= rev) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 123 | 		return 0; | 
 | 124 |  | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 125 | 	return 1; | 
 | 126 | } | 
 | 127 |  | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 128 | static int apply_microcode_amd(int cpu) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 129 | { | 
| Andreas Herrmann | 29d0887 | 2008-12-16 19:16:34 +0100 | [diff] [blame] | 130 | 	u32 rev, dummy; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 131 | 	int cpu_num = raw_smp_processor_id(); | 
 | 132 | 	struct ucode_cpu_info *uci = ucode_cpu_info + cpu_num; | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 133 | 	struct microcode_amd *mc_amd = uci->mc; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 134 |  | 
 | 135 | 	/* We should bind the task to the CPU */ | 
 | 136 | 	BUG_ON(cpu_num != cpu); | 
 | 137 |  | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 138 | 	if (mc_amd == NULL) | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 139 | 		return 0; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 140 |  | 
| Ingo Molnar | f34a10b | 2008-12-19 01:36:14 +0100 | [diff] [blame] | 141 | 	wrmsrl(MSR_AMD64_PATCH_LOADER, (u64)(long)&mc_amd->hdr.data_code); | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 142 | 	/* get patch id after patching */ | 
| Andreas Herrmann | 29d0887 | 2008-12-16 19:16:34 +0100 | [diff] [blame] | 143 | 	rdmsr(MSR_AMD64_PATCH_LEVEL, rev, dummy); | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 144 |  | 
 | 145 | 	/* check current patch id and patch's id for match */ | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 146 | 	if (rev != mc_amd->hdr.patch_id) { | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 147 | 		pr_err("CPU%d: update failed (for patch_level=0x%x)\n", | 
 | 148 | 		       cpu, mc_amd->hdr.patch_id); | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 149 | 		return -1; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 150 | 	} | 
 | 151 |  | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 152 | 	pr_info("CPU%d: updated (new patch_level=0x%x)\n", cpu, rev); | 
| Dmitry Adamushko | d45de40 | 2008-08-20 00:22:26 +0200 | [diff] [blame] | 153 | 	uci->cpu_sig.rev = rev; | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 154 |  | 
 | 155 | 	return 0; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 156 | } | 
 | 157 |  | 
| Ingo Molnar | 4bae196 | 2009-03-11 11:19:46 +0100 | [diff] [blame] | 158 | static void * | 
 | 159 | get_next_ucode(const u8 *buf, unsigned int size, unsigned int *mc_size) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 160 | { | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 161 | 	unsigned int total_size; | 
| Peter Oruba | d473879 | 2008-09-17 15:39:18 +0200 | [diff] [blame] | 162 | 	u8 section_hdr[UCODE_CONTAINER_SECTION_HDR]; | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 163 | 	void *mc; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 164 |  | 
| Borislav Petkov | c7657ac | 2010-11-01 23:36:53 +0100 | [diff] [blame] | 165 | 	get_ucode_data(section_hdr, buf, UCODE_CONTAINER_SECTION_HDR); | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 166 |  | 
| Peter Oruba | d473879 | 2008-09-17 15:39:18 +0200 | [diff] [blame] | 167 | 	if (section_hdr[0] != UCODE_UCODE_TYPE) { | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 168 | 		pr_err("error: invalid type field in container file section header\n"); | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 169 | 		return NULL; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 170 | 	} | 
 | 171 |  | 
| Peter Oruba | d473879 | 2008-09-17 15:39:18 +0200 | [diff] [blame] | 172 | 	total_size = (unsigned long) (section_hdr[4] + (section_hdr[5] << 8)); | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 173 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 174 | 	if (total_size > size || total_size > UCODE_MAX_SIZE) { | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 175 | 		pr_err("error: size mismatch\n"); | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 176 | 		return NULL; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 177 | 	} | 
 | 178 |  | 
| Jesper Juhl | 1ea6be2 | 2010-11-01 22:44:34 +0100 | [diff] [blame] | 179 | 	mc = vzalloc(UCODE_MAX_SIZE); | 
 | 180 | 	if (!mc) | 
 | 181 | 		return NULL; | 
 | 182 |  | 
| Borislav Petkov | c7657ac | 2010-11-01 23:36:53 +0100 | [diff] [blame] | 183 | 	get_ucode_data(mc, buf + UCODE_CONTAINER_SECTION_HDR, total_size); | 
 | 184 | 	*mc_size = total_size + UCODE_CONTAINER_SECTION_HDR; | 
| Jesper Juhl | 1ea6be2 | 2010-11-01 22:44:34 +0100 | [diff] [blame] | 185 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 186 | 	return mc; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 187 | } | 
 | 188 |  | 
| Andreas Herrmann | 0657d9e | 2008-12-16 19:14:05 +0100 | [diff] [blame] | 189 | static int install_equiv_cpu_table(const u8 *buf) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 190 | { | 
| Peter Oruba | b6cffde | 2008-09-17 15:05:52 +0200 | [diff] [blame] | 191 | 	u8 *container_hdr[UCODE_CONTAINER_HEADER_SIZE]; | 
 | 192 | 	unsigned int *buf_pos = (unsigned int *)container_hdr; | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 193 | 	unsigned long size; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 194 |  | 
| Borislav Petkov | c7657ac | 2010-11-01 23:36:53 +0100 | [diff] [blame] | 195 | 	get_ucode_data(&container_hdr, buf, UCODE_CONTAINER_HEADER_SIZE); | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 196 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 197 | 	size = buf_pos[2]; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 198 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 199 | 	if (buf_pos[1] != UCODE_EQUIV_CPU_TABLE_TYPE || !size) { | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 200 | 		pr_err("error: invalid type field in container file section header\n"); | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 201 | 		return 0; | 
 | 202 | 	} | 
 | 203 |  | 
| Jesper Juhl | 8e5e952 | 2010-11-09 00:08:11 +0100 | [diff] [blame] | 204 | 	equiv_cpu_table = vmalloc(size); | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 205 | 	if (!equiv_cpu_table) { | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 206 | 		pr_err("failed to allocate equivalent CPU table\n"); | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 207 | 		return 0; | 
 | 208 | 	} | 
 | 209 |  | 
| Peter Oruba | b6cffde | 2008-09-17 15:05:52 +0200 | [diff] [blame] | 210 | 	buf += UCODE_CONTAINER_HEADER_SIZE; | 
| Borislav Petkov | c7657ac | 2010-11-01 23:36:53 +0100 | [diff] [blame] | 211 | 	get_ucode_data(equiv_cpu_table, buf, size); | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 212 |  | 
| Peter Oruba | b6cffde | 2008-09-17 15:05:52 +0200 | [diff] [blame] | 213 | 	return size + UCODE_CONTAINER_HEADER_SIZE; /* add header length */ | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 214 | } | 
 | 215 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 216 | static void free_equiv_cpu_table(void) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 217 | { | 
| Figo.zhang | aeef50b | 2009-06-07 22:30:36 +0800 | [diff] [blame] | 218 | 	vfree(equiv_cpu_table); | 
 | 219 | 	equiv_cpu_table = NULL; | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 220 | } | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 221 |  | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 222 | static enum ucode_state | 
 | 223 | generic_load_microcode(int cpu, const u8 *data, size_t size) | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 224 | { | 
 | 225 | 	struct ucode_cpu_info *uci = ucode_cpu_info + cpu; | 
| Andreas Herrmann | 8c13520 | 2008-12-16 19:13:00 +0100 | [diff] [blame] | 226 | 	const u8 *ucode_ptr = data; | 
 | 227 | 	void *new_mc = NULL; | 
 | 228 | 	void *mc; | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 229 | 	int new_rev = uci->cpu_sig.rev; | 
 | 230 | 	unsigned int leftover; | 
 | 231 | 	unsigned long offset; | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 232 | 	enum ucode_state state = UCODE_OK; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 233 |  | 
| Andreas Herrmann | 0657d9e | 2008-12-16 19:14:05 +0100 | [diff] [blame] | 234 | 	offset = install_equiv_cpu_table(ucode_ptr); | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 235 | 	if (!offset) { | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 236 | 		pr_err("failed to create equivalent cpu table\n"); | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 237 | 		return UCODE_ERROR; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 238 | 	} | 
 | 239 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 240 | 	ucode_ptr += offset; | 
 | 241 | 	leftover = size - offset; | 
 | 242 |  | 
 | 243 | 	while (leftover) { | 
| Dmitry Adamushko | 2f9284e | 2008-09-23 22:56:35 +0200 | [diff] [blame] | 244 | 		unsigned int uninitialized_var(mc_size); | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 245 | 		struct microcode_header_amd *mc_header; | 
 | 246 |  | 
| Andreas Herrmann | 0657d9e | 2008-12-16 19:14:05 +0100 | [diff] [blame] | 247 | 		mc = get_next_ucode(ucode_ptr, leftover, &mc_size); | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 248 | 		if (!mc) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 249 | 			break; | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 250 |  | 
 | 251 | 		mc_header = (struct microcode_header_amd *)mc; | 
 | 252 | 		if (get_matching_microcode(cpu, mc, new_rev)) { | 
| Figo.zhang | aeef50b | 2009-06-07 22:30:36 +0800 | [diff] [blame] | 253 | 			vfree(new_mc); | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 254 | 			new_rev = mc_header->patch_id; | 
 | 255 | 			new_mc  = mc; | 
| Andreas Herrmann | be95776 | 2008-12-16 19:11:23 +0100 | [diff] [blame] | 256 | 		} else | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 257 | 			vfree(mc); | 
 | 258 |  | 
 | 259 | 		ucode_ptr += mc_size; | 
 | 260 | 		leftover  -= mc_size; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 261 | 	} | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 262 |  | 
 | 263 | 	if (new_mc) { | 
 | 264 | 		if (!leftover) { | 
| Figo.zhang | aeef50b | 2009-06-07 22:30:36 +0800 | [diff] [blame] | 265 | 			vfree(uci->mc); | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 266 | 			uci->mc = new_mc; | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 267 | 			pr_debug("CPU%d found a matching microcode update with version 0x%x (current=0x%x)\n", | 
| Andreas Herrmann | be95776 | 2008-12-16 19:11:23 +0100 | [diff] [blame] | 268 | 				 cpu, new_rev, uci->cpu_sig.rev); | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 269 | 		} else { | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 270 | 			vfree(new_mc); | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 271 | 			state = UCODE_ERROR; | 
 | 272 | 		} | 
 | 273 | 	} else | 
 | 274 | 		state = UCODE_NFOUND; | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 275 |  | 
 | 276 | 	free_equiv_cpu_table(); | 
 | 277 |  | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 278 | 	return state; | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 279 | } | 
 | 280 |  | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 281 | static enum ucode_state request_microcode_fw(int cpu, struct device *device) | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 282 | { | 
| Andreas Herrmann | 3b2e3d8 | 2010-01-22 21:34:56 +0100 | [diff] [blame] | 283 | 	const char *fw_name = "amd-ucode/microcode_amd.bin"; | 
 | 284 | 	const struct firmware *firmware; | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 285 | 	enum ucode_state ret; | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 286 |  | 
| Andreas Herrmann | 3b2e3d8 | 2010-01-22 21:34:56 +0100 | [diff] [blame] | 287 | 	if (request_firmware(&firmware, fw_name, device)) { | 
 | 288 | 		printk(KERN_ERR "microcode: failed to load file %s\n", fw_name); | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 289 | 		return UCODE_NFOUND; | 
| Andreas Herrmann | 3b2e3d8 | 2010-01-22 21:34:56 +0100 | [diff] [blame] | 290 | 	} | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 291 |  | 
| Borislav Petkov | 506f90e | 2009-10-29 14:45:52 +0100 | [diff] [blame] | 292 | 	if (*(u32 *)firmware->data != UCODE_MAGIC) { | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 293 | 		pr_err("invalid UCODE_MAGIC (0x%08x)\n", | 
| Borislav Petkov | 506f90e | 2009-10-29 14:45:52 +0100 | [diff] [blame] | 294 | 		       *(u32 *)firmware->data); | 
 | 295 | 		return UCODE_ERROR; | 
 | 296 | 	} | 
 | 297 |  | 
| Andreas Herrmann | 0657d9e | 2008-12-16 19:14:05 +0100 | [diff] [blame] | 298 | 	ret = generic_load_microcode(cpu, firmware->data, firmware->size); | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 299 |  | 
| Andreas Herrmann | 3b2e3d8 | 2010-01-22 21:34:56 +0100 | [diff] [blame] | 300 | 	release_firmware(firmware); | 
 | 301 |  | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 302 | 	return ret; | 
 | 303 | } | 
 | 304 |  | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 305 | static enum ucode_state | 
 | 306 | request_microcode_user(int cpu, const void __user *buf, size_t size) | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 307 | { | 
| Joe Perches | f58e1f5 | 2009-12-08 22:30:50 -0800 | [diff] [blame] | 308 | 	pr_info("AMD microcode update via /dev/cpu/microcode not supported\n"); | 
| Dmitry Adamushko | 871b72d | 2009-05-11 23:48:27 +0200 | [diff] [blame] | 309 | 	return UCODE_ERROR; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 310 | } | 
 | 311 |  | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 312 | static void microcode_fini_cpu_amd(int cpu) | 
 | 313 | { | 
 | 314 | 	struct ucode_cpu_info *uci = ucode_cpu_info + cpu; | 
 | 315 |  | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 316 | 	vfree(uci->mc); | 
 | 317 | 	uci->mc = NULL; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 318 | } | 
 | 319 |  | 
 | 320 | static struct microcode_ops microcode_amd_ops = { | 
| Dmitry Adamushko | a0a29b6 | 2008-09-11 23:27:52 +0200 | [diff] [blame] | 321 | 	.request_microcode_user           = request_microcode_user, | 
 | 322 | 	.request_microcode_fw             = request_microcode_fw, | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 323 | 	.collect_cpu_info                 = collect_cpu_info_amd, | 
 | 324 | 	.apply_microcode                  = apply_microcode_amd, | 
 | 325 | 	.microcode_fini_cpu               = microcode_fini_cpu_amd, | 
 | 326 | }; | 
 | 327 |  | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 328 | struct microcode_ops * __init init_amd_microcode(void) | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 329 | { | 
| Dmitry Adamushko | 18dbc91 | 2008-09-23 12:08:44 +0200 | [diff] [blame] | 330 | 	return µcode_amd_ops; | 
| Peter Oruba | 80cc9f1 | 2008-07-28 18:44:22 +0200 | [diff] [blame] | 331 | } |