00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00035 #ifndef __amd64_MEMSTR_H__
00036 #define __amd64_MEMSTR_H__
00037
00050 static inline void * memcpy(void * dst, const void * src, size_t cnt)
00051 {
00052 __native d0, d1, d2;
00053
00054 __asm__ __volatile__(
00055 "rep movsq\n\t"
00056 "movq %4, %%rcx\n\t"
00057 "andq $7, %%rcx\n\t"
00058 "jz 1f\n\t"
00059 "rep movsb\n\t"
00060 "1:\n"
00061 : "=&c" (d0), "=&D" (d1), "=&S" (d2)
00062 : "0" ((__native)(cnt / 8)), "g" ((__native)cnt), "1" ((__native) dst), "2" ((__native) src)
00063 : "memory");
00064
00065 return dst;
00066 }
00067
00068
00080 static inline int memcmp(const void * src, const void * dst, size_t cnt)
00081 {
00082 __native d0, d1, d2;
00083 __native ret;
00084
00085 __asm__ (
00086 "repe cmpsb\n\t"
00087 "je 1f\n\t"
00088 "movq %3, %0\n\t"
00089 "addq $1, %0\n\t"
00090 "1:\n"
00091 : "=a" (ret), "=%S" (d0), "=&D" (d1), "=&c" (d2)
00092 : "0" (0), "1" (src), "2" (dst), "3" ((__native)cnt)
00093 );
00094
00095 return ret;
00096 }
00097
00107 static inline void memsetw(__address dst, size_t cnt, __u16 x)
00108 {
00109 __native d0, d1;
00110
00111 __asm__ __volatile__ (
00112 "rep stosw\n\t"
00113 : "=&D" (d0), "=&c" (d1), "=a" (x)
00114 : "0" (dst), "1" ((__native)cnt), "2" (x)
00115 : "memory"
00116 );
00117
00118 }
00119
00129 static inline void memsetb(__address dst, size_t cnt, __u8 x)
00130 {
00131 __native d0, d1;
00132
00133 __asm__ __volatile__ (
00134 "rep stosb\n\t"
00135 : "=&D" (d0), "=&c" (d1), "=a" (x)
00136 : "0" (dst), "1" ((__native)cnt), "2" (x)
00137 : "memory"
00138 );
00139
00140 }
00141
00142 #endif
00143