| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | /* | 
|  | 2 | * Optimized version of the strlen_user() function | 
|  | 3 | * | 
|  | 4 | * Inputs: | 
|  | 5 | *	in0	address of buffer | 
|  | 6 | * | 
|  | 7 | * Outputs: | 
|  | 8 | *	ret0	0 in case of fault, strlen(buffer)+1 otherwise | 
|  | 9 | * | 
|  | 10 | * Copyright (C) 1998, 1999, 2001 Hewlett-Packard Co | 
|  | 11 | *	David Mosberger-Tang <davidm@hpl.hp.com> | 
|  | 12 | *	Stephane Eranian <eranian@hpl.hp.com> | 
|  | 13 | * | 
|  | 14 | * 01/19/99 S.Eranian heavily enhanced version (see details below) | 
|  | 15 | * 09/24/99 S.Eranian added speculation recovery code | 
|  | 16 | */ | 
|  | 17 |  | 
|  | 18 | #include <asm/asmmacro.h> | 
|  | 19 |  | 
|  | 20 | // | 
|  | 21 | // int strlen_user(char *) | 
|  | 22 | // ------------------------ | 
|  | 23 | // Returns: | 
|  | 24 | //	- length of string + 1 | 
|  | 25 | //	- 0 in case an exception is raised | 
|  | 26 | // | 
|  | 27 | // This is an enhanced version of the basic strlen_user. it includes a | 
|  | 28 | // combination of compute zero index (czx), parallel comparisons, speculative | 
|  | 29 | // loads and loop unroll using rotating registers. | 
|  | 30 | // | 
|  | 31 | // General Ideas about the algorithm: | 
|  | 32 | //	  The goal is to look at the string in chunks of 8 bytes. | 
|  | 33 | //	  so we need to do a few extra checks at the beginning because the | 
|  | 34 | //	  string may not be 8-byte aligned. In this case we load the 8byte | 
|  | 35 | //	  quantity which includes the start of the string and mask the unused | 
|  | 36 | //	  bytes with 0xff to avoid confusing czx. | 
|  | 37 | //	  We use speculative loads and software pipelining to hide memory | 
|  | 38 | //	  latency and do read ahead safely. This way we defer any exception. | 
|  | 39 | // | 
|  | 40 | //	  Because we don't want the kernel to be relying on particular | 
|  | 41 | //	  settings of the DCR register, we provide recovery code in case | 
|  | 42 | //	  speculation fails. The recovery code is going to "redo" the work using | 
|  | 43 | //	  only normal loads. If we still get a fault then we return an | 
|  | 44 | //	  error (ret0=0). Otherwise we return the strlen+1 as usual. | 
|  | 45 | //	  The fact that speculation may fail can be caused, for instance, by | 
|  | 46 | //	  the DCR.dm bit being set. In this case TLB misses are deferred, i.e., | 
|  | 47 | //	  a NaT bit will be set if the translation is not present. The normal | 
|  | 48 | //	  load, on the other hand, will cause the translation to be inserted | 
|  | 49 | //	  if the mapping exists. | 
|  | 50 | // | 
|  | 51 | //	  It should be noted that we execute recovery code only when we need | 
|  | 52 | //	  to use the data that has been speculatively loaded: we don't execute | 
|  | 53 | //	  recovery code on pure read ahead data. | 
|  | 54 | // | 
|  | 55 | // Remarks: | 
|  | 56 | //	- the cmp r0,r0 is used as a fast way to initialize a predicate | 
|  | 57 | //	  register to 1. This is required to make sure that we get the parallel | 
|  | 58 | //	  compare correct. | 
|  | 59 | // | 
|  | 60 | //	- we don't use the epilogue counter to exit the loop but we need to set | 
|  | 61 | //	  it to zero beforehand. | 
|  | 62 | // | 
|  | 63 | //	- after the loop we must test for Nat values because neither the | 
|  | 64 | //	  czx nor cmp instruction raise a NaT consumption fault. We must be | 
|  | 65 | //	  careful not to look too far for a Nat for which we don't care. | 
|  | 66 | //	  For instance we don't need to look at a NaT in val2 if the zero byte | 
|  | 67 | //	  was in val1. | 
|  | 68 | // | 
|  | 69 | //	- Clearly performance tuning is required. | 
|  | 70 | // | 
|  | 71 |  | 
|  | 72 | #define saved_pfs	r11 | 
|  | 73 | #define	tmp		r10 | 
|  | 74 | #define base		r16 | 
|  | 75 | #define orig		r17 | 
|  | 76 | #define saved_pr	r18 | 
|  | 77 | #define src		r19 | 
|  | 78 | #define mask		r20 | 
|  | 79 | #define val		r21 | 
|  | 80 | #define val1		r22 | 
|  | 81 | #define val2		r23 | 
|  | 82 |  | 
|  | 83 | GLOBAL_ENTRY(__strlen_user) | 
|  | 84 | .prologue | 
|  | 85 | .save ar.pfs, saved_pfs | 
|  | 86 | alloc saved_pfs=ar.pfs,11,0,0,8 | 
|  | 87 |  | 
|  | 88 | .rotr v[2], w[2]	// declares our 4 aliases | 
|  | 89 |  | 
|  | 90 | extr.u tmp=in0,0,3	// tmp=least significant 3 bits | 
|  | 91 | mov orig=in0		// keep trackof initial byte address | 
|  | 92 | dep src=0,in0,0,3	// src=8byte-aligned in0 address | 
|  | 93 | .save pr, saved_pr | 
|  | 94 | mov saved_pr=pr		// preserve predicates (rotation) | 
|  | 95 | ;; | 
|  | 96 |  | 
|  | 97 | .body | 
|  | 98 |  | 
|  | 99 | ld8.s v[1]=[src],8	// load the initial 8bytes (must speculate) | 
|  | 100 | shl tmp=tmp,3		// multiply by 8bits/byte | 
|  | 101 | mov mask=-1		// our mask | 
|  | 102 | ;; | 
|  | 103 | ld8.s w[1]=[src],8	// load next 8 bytes in 2nd pipeline | 
|  | 104 | cmp.eq p6,p0=r0,r0	// sets p6 (required because of // cmp.and) | 
|  | 105 | sub tmp=64,tmp		// how many bits to shift our mask on the right | 
|  | 106 | ;; | 
|  | 107 | shr.u	mask=mask,tmp	// zero enough bits to hold v[1] valuable part | 
|  | 108 | mov ar.ec=r0		// clear epilogue counter (saved in ar.pfs) | 
|  | 109 | ;; | 
|  | 110 | add base=-16,src	// keep track of aligned base | 
|  | 111 | chk.s v[1], .recover	// if already NaT, then directly skip to recover | 
|  | 112 | or v[1]=v[1],mask	// now we have a safe initial byte pattern | 
|  | 113 | ;; | 
|  | 114 | 1: | 
|  | 115 | ld8.s v[0]=[src],8	// speculatively load next | 
|  | 116 | czx1.r val1=v[1]	// search 0 byte from right | 
|  | 117 | czx1.r val2=w[1]	// search 0 byte from right following 8bytes | 
|  | 118 | ;; | 
|  | 119 | ld8.s w[0]=[src],8	// speculatively load next to next | 
|  | 120 | cmp.eq.and p6,p0=8,val1	// p6 = p6 and val1==8 | 
|  | 121 | cmp.eq.and p6,p0=8,val2	// p6 = p6 and mask==8 | 
|  | 122 | (p6)	br.wtop.dptk.few 1b	// loop until p6 == 0 | 
|  | 123 | ;; | 
|  | 124 | // | 
|  | 125 | // We must return try the recovery code iff | 
|  | 126 | // val1_is_nat || (val1==8 && val2_is_nat) | 
|  | 127 | // | 
|  | 128 | // XXX Fixme | 
|  | 129 | //	- there must be a better way of doing the test | 
|  | 130 | // | 
|  | 131 | cmp.eq  p8,p9=8,val1	// p6 = val1 had zero (disambiguate) | 
|  | 132 | tnat.nz p6,p7=val1	// test NaT on val1 | 
|  | 133 | (p6)	br.cond.spnt .recover	// jump to recovery if val1 is NaT | 
|  | 134 | ;; | 
|  | 135 | // | 
|  | 136 | // if we come here p7 is true, i.e., initialized for // cmp | 
|  | 137 | // | 
|  | 138 | cmp.eq.and  p7,p0=8,val1// val1==8? | 
|  | 139 | tnat.nz.and p7,p0=val2	// test NaT if val2 | 
|  | 140 | (p7)	br.cond.spnt .recover	// jump to recovery if val2 is NaT | 
|  | 141 | ;; | 
|  | 142 | (p8)	mov val1=val2		// val2 contains the value | 
|  | 143 | (p8)	adds src=-16,src	// correct position when 3 ahead | 
|  | 144 | (p9)	adds src=-24,src	// correct position when 4 ahead | 
|  | 145 | ;; | 
|  | 146 | sub ret0=src,orig	// distance from origin | 
|  | 147 | sub tmp=7,val1		// 7=8-1 because this strlen returns strlen+1 | 
|  | 148 | mov pr=saved_pr,0xffffffffffff0000 | 
|  | 149 | ;; | 
|  | 150 | sub ret0=ret0,tmp	// length=now - back -1 | 
|  | 151 | mov ar.pfs=saved_pfs	// because of ar.ec, restore no matter what | 
|  | 152 | br.ret.sptk.many rp	// end of normal execution | 
|  | 153 |  | 
|  | 154 | // | 
|  | 155 | // Outlined recovery code when speculation failed | 
|  | 156 | // | 
|  | 157 | // This time we don't use speculation and rely on the normal exception | 
|  | 158 | // mechanism. that's why the loop is not as good as the previous one | 
|  | 159 | // because read ahead is not possible | 
|  | 160 | // | 
|  | 161 | // XXX Fixme | 
|  | 162 | //	- today we restart from the beginning of the string instead | 
|  | 163 | //	  of trying to continue where we left off. | 
|  | 164 | // | 
|  | 165 | .recover: | 
|  | 166 | EX(.Lexit1, ld8 val=[base],8)	// load the initial bytes | 
|  | 167 | ;; | 
|  | 168 | or val=val,mask			// remask first bytes | 
|  | 169 | cmp.eq p0,p6=r0,r0		// nullify first ld8 in loop | 
|  | 170 | ;; | 
|  | 171 | // | 
|  | 172 | // ar.ec is still zero here | 
|  | 173 | // | 
|  | 174 | 2: | 
|  | 175 | EX(.Lexit1, (p6) ld8 val=[base],8) | 
|  | 176 | ;; | 
|  | 177 | czx1.r val1=val		// search 0 byte from right | 
|  | 178 | ;; | 
|  | 179 | cmp.eq p6,p0=8,val1	// val1==8 ? | 
|  | 180 | (p6)	br.wtop.dptk.few 2b	// loop until p6 == 0 | 
|  | 181 | ;; | 
|  | 182 | sub ret0=base,orig	// distance from base | 
|  | 183 | sub tmp=7,val1		// 7=8-1 because this strlen returns strlen+1 | 
|  | 184 | mov pr=saved_pr,0xffffffffffff0000 | 
|  | 185 | ;; | 
|  | 186 | sub ret0=ret0,tmp	// length=now - back -1 | 
|  | 187 | mov ar.pfs=saved_pfs	// because of ar.ec, restore no matter what | 
|  | 188 | br.ret.sptk.many rp	// end of successful recovery code | 
|  | 189 |  | 
|  | 190 | // | 
|  | 191 | // We failed even on the normal load (called from exception handler) | 
|  | 192 | // | 
|  | 193 | .Lexit1: | 
|  | 194 | mov ret0=0 | 
|  | 195 | mov pr=saved_pr,0xffffffffffff0000 | 
|  | 196 | mov ar.pfs=saved_pfs	// because of ar.ec, restore no matter what | 
|  | 197 | br.ret.sptk.many rp | 
|  | 198 | END(__strlen_user) |