| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | /* Copyright 2002 Andi Kleen */ | 
| Dave Jones | 038b0a6 | 2006-10-04 03:38:54 -0400 | [diff] [blame] | 2 |  | 
| Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 3 | #include <linux/linkage.h> | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 4 |  | 
| Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 5 | #include <asm/cpufeature.h> | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 6 | #include <asm/dwarf2.h> | 
| Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 7 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 8 | /* | 
 | 9 |  * memcpy - Copy a memory block. | 
 | 10 |  * | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 11 |  * Input: | 
 | 12 |  *  rdi destination | 
 | 13 |  *  rsi source | 
 | 14 |  *  rdx count | 
 | 15 |  * | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 16 |  * Output: | 
 | 17 |  * rax original destination | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 18 |  */ | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 19 |  | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 20 | /* | 
 | 21 |  * memcpy_c() - fast string ops (REP MOVSQ) based variant. | 
 | 22 |  * | 
 | 23 |  * Calls to this get patched into the kernel image via the | 
 | 24 |  * alternative instructions framework: | 
 | 25 |  */ | 
| Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 26 | 	ALIGN | 
 | 27 | memcpy_c: | 
 | 28 | 	CFI_STARTPROC | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 29 | 	movq %rdi, %rax | 
 | 30 |  | 
 | 31 | 	movl %edx, %ecx | 
 | 32 | 	shrl $3, %ecx | 
 | 33 | 	andl $7, %edx | 
| Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 34 | 	rep movsq | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 35 | 	movl %edx, %ecx | 
| Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 36 | 	rep movsb | 
 | 37 | 	ret | 
 | 38 | 	CFI_ENDPROC | 
 | 39 | ENDPROC(memcpy_c) | 
 | 40 |  | 
 | 41 | ENTRY(__memcpy) | 
 | 42 | ENTRY(memcpy) | 
 | 43 | 	CFI_STARTPROC | 
| Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 44 |  | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 45 | 	/* | 
 | 46 | 	 * Put the number of full 64-byte blocks into %ecx. | 
 | 47 | 	 * Tail portion is handled at the end: | 
 | 48 | 	 */ | 
 | 49 | 	movq %rdi, %rax | 
 | 50 | 	movl %edx, %ecx | 
 | 51 | 	shrl   $6, %ecx | 
| Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 52 | 	jz .Lhandle_tail | 
 | 53 |  | 
 | 54 | 	.p2align 4 | 
 | 55 | .Lloop_64: | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 56 | 	/* | 
 | 57 | 	 * We decrement the loop index here - and the zero-flag is | 
 | 58 | 	 * checked at the end of the loop (instructions inbetween do | 
 | 59 | 	 * not change the zero flag): | 
 | 60 | 	 */ | 
| Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 61 | 	decl %ecx | 
 | 62 |  | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 63 | 	/* | 
 | 64 | 	 * Move in blocks of 4x16 bytes: | 
 | 65 | 	 */ | 
 | 66 | 	movq 0*8(%rsi),		%r11 | 
 | 67 | 	movq 1*8(%rsi),		%r8 | 
 | 68 | 	movq %r11,		0*8(%rdi) | 
 | 69 | 	movq %r8,		1*8(%rdi) | 
| Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 70 |  | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 71 | 	movq 2*8(%rsi),		%r9 | 
 | 72 | 	movq 3*8(%rsi),		%r10 | 
 | 73 | 	movq %r9,		2*8(%rdi) | 
 | 74 | 	movq %r10,		3*8(%rdi) | 
| Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 75 |  | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 76 | 	movq 4*8(%rsi),		%r11 | 
 | 77 | 	movq 5*8(%rsi),		%r8 | 
 | 78 | 	movq %r11,		4*8(%rdi) | 
 | 79 | 	movq %r8,		5*8(%rdi) | 
| Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 80 |  | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 81 | 	movq 6*8(%rsi),		%r9 | 
 | 82 | 	movq 7*8(%rsi),		%r10 | 
 | 83 | 	movq %r9,		6*8(%rdi) | 
 | 84 | 	movq %r10,		7*8(%rdi) | 
| Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 85 |  | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 86 | 	leaq 64(%rsi), %rsi | 
 | 87 | 	leaq 64(%rdi), %rdi | 
| Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 88 |  | 
| Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 89 | 	jnz  .Lloop_64 | 
 | 90 |  | 
 | 91 | .Lhandle_tail: | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 92 | 	movl %edx, %ecx | 
 | 93 | 	andl  $63, %ecx | 
 | 94 | 	shrl   $3, %ecx | 
| Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 95 | 	jz   .Lhandle_7 | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 96 |  | 
| Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 97 | 	.p2align 4 | 
 | 98 | .Lloop_8: | 
 | 99 | 	decl %ecx | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 100 | 	movq (%rsi),		%r8 | 
 | 101 | 	movq %r8,		(%rdi) | 
 | 102 | 	leaq 8(%rdi),		%rdi | 
 | 103 | 	leaq 8(%rsi),		%rsi | 
| Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 104 | 	jnz  .Lloop_8 | 
 | 105 |  | 
 | 106 | .Lhandle_7: | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 107 | 	movl %edx, %ecx | 
 | 108 | 	andl $7, %ecx | 
 | 109 | 	jz .Lend | 
 | 110 |  | 
| Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 111 | 	.p2align 4 | 
 | 112 | .Lloop_1: | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 113 | 	movb (%rsi), %r8b | 
 | 114 | 	movb %r8b, (%rdi) | 
| Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 115 | 	incq %rdi | 
 | 116 | 	incq %rsi | 
 | 117 | 	decl %ecx | 
 | 118 | 	jnz .Lloop_1 | 
 | 119 |  | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 120 | .Lend: | 
| Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 121 | 	ret | 
| Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 122 | 	CFI_ENDPROC | 
 | 123 | ENDPROC(memcpy) | 
 | 124 | ENDPROC(__memcpy) | 
| Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 125 |  | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 126 | 	/* | 
 | 127 | 	 * Some CPUs run faster using the string copy instructions. | 
 | 128 | 	 * It is also a lot simpler. Use this when possible: | 
 | 129 | 	 */ | 
| Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 130 |  | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 131 | 	.section .altinstr_replacement, "ax" | 
| Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 132 | 1:	.byte 0xeb				/* jmp <disp8> */ | 
 | 133 | 	.byte (memcpy_c - memcpy) - (2f - 1b)	/* offset */ | 
 | 134 | 2: | 
 | 135 | 	.previous | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 136 |  | 
 | 137 | 	.section .altinstructions, "a" | 
| Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 138 | 	.align 8 | 
| Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 139 | 	.quad memcpy | 
 | 140 | 	.quad 1b | 
 | 141 | 	.byte X86_FEATURE_REP_GOOD | 
| Ingo Molnar | f3b6eaf | 2009-03-12 12:20:17 +0100 | [diff] [blame] | 142 |  | 
 | 143 | 	/* | 
 | 144 | 	 * Replace only beginning, memcpy is used to apply alternatives, | 
 | 145 | 	 * so it is silly to overwrite itself with nops - reboot is the | 
 | 146 | 	 * only outcome... | 
 | 147 | 	 */ | 
| Petr Vandrovec | b8d3f244 | 2007-08-12 10:12:52 +0200 | [diff] [blame] | 148 | 	.byte 2b - 1b | 
| Jan Beulich | 8d379da | 2006-09-26 10:52:32 +0200 | [diff] [blame] | 149 | 	.byte 2b - 1b | 
| Andi Kleen | 7bcd3f3 | 2006-02-03 21:51:02 +0100 | [diff] [blame] | 150 | 	.previous |