Fix IA-64 memchr read-ahead.

The latest stratcliff extension exposed a bug in the IA-64 memchr which
uses non-speculative loads to prefetch data.  Change the code to use
speculative loads with appropriate fixup.  Fixes BZ 10162.
This commit is contained in:
Ulrich Drepper 2009-05-21 18:47:59 -07:00
parent d2812fc6d2
commit fa64b7f76b
2 changed files with 21 additions and 1 deletions

View file

@ -1,3 +1,8 @@
2009-05-21 H.J. Lu <hongjiu.lu@intel.com>
[BZ #10162]
* sysdeps/ia64/memchr.S: Use speculative load.
2009-05-21 H.J. Lu <hongjiu.lu@intel.com>
* sysdeps/unix/sysv/linux/ia64/____longjmp_chk.S: New file.

View file

@ -96,7 +96,8 @@ ENTRY(__memchr)
mov pr.rot = 1 << 16 ;;
.l2:
(p[0]) mov addr[0] = ret0
(p[0]) ld8 value[0] = [ret0], 8
(p[0]) ld8.s value[0] = [ret0], 8 // speculative load
(p[MEMLAT]) chk.s value[MEMLAT], .recovery // check and recovery
(p[MEMLAT]) xor aux[0] = value[MEMLAT], chrx8
(p[MEMLAT+1]) czx1.r poschr[0] = aux[1]
(p[MEMLAT+2]) cmp.ne p7, p0 = 8, poschr[1]
@ -124,6 +125,20 @@ ENTRY(__memchr)
mov ar.lc = saved_lc
br.ret.sptk.many b0
.recovery:
adds ret0 = -((MEMLAT + 1) * 8), ret0;;
(p[MEMLAT+1]) add ret0 = -8, ret0;;
(p[MEMLAT+2]) add ret0 = -8, ret0;;
.l4:
mov addr[MEMLAT+2] = ret0
ld8 tmp = [ret0];; // load the first unchecked 8byte
xor aux[1] = tmp, chrx8;;
czx1.r poschr[1] = aux[1];;
cmp.ne p7, p0 = 8, poschr[1]
(p7) br.cond.spnt .foundit;;
adds ret0 = 8, ret0 // load the next unchecked 8byte
br.sptk .l4;;
END(__memchr)
weak_alias (__memchr, memchr)