User | Revision | Line number | New contents of line |
andersrundgren |
0:1728f99b19f6
|
1
|
#define DATA_ORDER_IS_BIG_ENDIAN
|
andersrundgren |
0:1728f99b19f6
|
2
|
|
andersrundgren |
0:1728f99b19f6
|
3
|
/*
|
andersrundgren |
0:1728f99b19f6
|
4
|
* Engage compiler specific rotate intrinsic function if available.
|
andersrundgren |
0:1728f99b19f6
|
5
|
*/
|
andersrundgren |
0:1728f99b19f6
|
6
|
#undef ROTATE
|
andersrundgren |
0:1728f99b19f6
|
7
|
#ifndef PEDANTIC
|
andersrundgren |
0:1728f99b19f6
|
8
|
# if defined(_MSC_VER) || defined(__ICC)
|
andersrundgren |
0:1728f99b19f6
|
9
|
# define ROTATE(a,n) _lrotl(a,n)
|
andersrundgren |
0:1728f99b19f6
|
10
|
# elif defined(__MWERKS__)
|
andersrundgren |
0:1728f99b19f6
|
11
|
# if defined(__POWERPC__)
|
andersrundgren |
0:1728f99b19f6
|
12
|
# define ROTATE(a,n) __rlwinm(a,n,0,31)
|
andersrundgren |
0:1728f99b19f6
|
13
|
# elif defined(__MC68K__)
|
andersrundgren |
0:1728f99b19f6
|
14
|
/* Motorola specific tweak. <appro@fy.chalmers.se> */
|
andersrundgren |
0:1728f99b19f6
|
15
|
# define ROTATE(a,n) ( n<24 ? __rol(a,n) : __ror(a,32-n) )
|
andersrundgren |
0:1728f99b19f6
|
16
|
# else
|
andersrundgren |
0:1728f99b19f6
|
17
|
# define ROTATE(a,n) __rol(a,n)
|
andersrundgren |
0:1728f99b19f6
|
18
|
# endif
|
andersrundgren |
0:1728f99b19f6
|
19
|
# elif defined(__GNUC__) && __GNUC__>=2 && !defined(OPENSSL_NO_ASM) && !defined(OPENSSL_NO_INLINE_ASM)
|
andersrundgren |
0:1728f99b19f6
|
20
|
/*
|
andersrundgren |
0:1728f99b19f6
|
21
|
* Some GNU C inline assembler templates. Note that these are
|
andersrundgren |
0:1728f99b19f6
|
22
|
* rotates by *constant* number of bits! But that's exactly
|
andersrundgren |
0:1728f99b19f6
|
23
|
* what we need here...
|
andersrundgren |
0:1728f99b19f6
|
24
|
* <appro@fy.chalmers.se>
|
andersrundgren |
0:1728f99b19f6
|
25
|
*/
|
andersrundgren |
0:1728f99b19f6
|
26
|
# if defined(__i386) || defined(__i386__) || defined(__x86_64) || defined(__x86_64__)
|
andersrundgren |
0:1728f99b19f6
|
27
|
# define ROTATE(a,n) ({ register unsigned int ret; \
|
andersrundgren |
0:1728f99b19f6
|
28
|
asm ( \
|
andersrundgren |
0:1728f99b19f6
|
29
|
"roll %1,%0" \
|
andersrundgren |
0:1728f99b19f6
|
30
|
: "=r"(ret) \
|
andersrundgren |
0:1728f99b19f6
|
31
|
: "I"(n), "0"(a) \
|
andersrundgren |
0:1728f99b19f6
|
32
|
: "cc"); \
|
andersrundgren |
0:1728f99b19f6
|
33
|
ret; \
|
andersrundgren |
0:1728f99b19f6
|
34
|
})
|
andersrundgren |
0:1728f99b19f6
|
35
|
# elif defined(_ARCH_PPC) || defined(_ARCH_PPC64) || \
|
andersrundgren |
0:1728f99b19f6
|
36
|
defined(__powerpc) || defined(__ppc__) || defined(__powerpc64__)
|
andersrundgren |
0:1728f99b19f6
|
37
|
# define ROTATE(a,n) ({ register unsigned int ret; \
|
andersrundgren |
0:1728f99b19f6
|
38
|
asm ( \
|
andersrundgren |
0:1728f99b19f6
|
39
|
"rlwinm %0,%1,%2,0,31" \
|
andersrundgren |
0:1728f99b19f6
|
40
|
: "=r"(ret) \
|
andersrundgren |
0:1728f99b19f6
|
41
|
: "r"(a), "I"(n)); \
|
andersrundgren |
0:1728f99b19f6
|
42
|
ret; \
|
andersrundgren |
0:1728f99b19f6
|
43
|
})
|
andersrundgren |
0:1728f99b19f6
|
44
|
# elif defined(__s390x__)
|
andersrundgren |
0:1728f99b19f6
|
45
|
# define ROTATE(a,n) ({ register unsigned int ret; \
|
andersrundgren |
0:1728f99b19f6
|
46
|
asm ("rll %0,%1,%2" \
|
andersrundgren |
0:1728f99b19f6
|
47
|
: "=r"(ret) \
|
andersrundgren |
0:1728f99b19f6
|
48
|
: "r"(a), "I"(n)); \
|
andersrundgren |
0:1728f99b19f6
|
49
|
ret; \
|
andersrundgren |
0:1728f99b19f6
|
50
|
})
|
andersrundgren |
0:1728f99b19f6
|
51
|
# endif
|
andersrundgren |
0:1728f99b19f6
|
52
|
# endif
|
andersrundgren |
0:1728f99b19f6
|
53
|
#endif /* PEDANTIC */
|
andersrundgren |
0:1728f99b19f6
|
54
|
|
andersrundgren |
0:1728f99b19f6
|
55
|
#ifndef ROTATE
|
andersrundgren |
0:1728f99b19f6
|
56
|
#define ROTATE(a,n) (((a)<<(n))|(((a)&0xffffffff)>>(32-(n))))
|
andersrundgren |
0:1728f99b19f6
|
57
|
#endif
|
andersrundgren |
0:1728f99b19f6
|
58
|
|
andersrundgren |
0:1728f99b19f6
|
59
|
#if defined(DATA_ORDER_IS_BIG_ENDIAN)
|
andersrundgren |
0:1728f99b19f6
|
60
|
|
andersrundgren |
0:1728f99b19f6
|
61
|
#ifndef PEDANTIC
|
andersrundgren |
0:1728f99b19f6
|
62
|
# if defined(__GNUC__) && __GNUC__>=2 && !defined(OPENSSL_NO_ASM) && !defined(OPENSSL_NO_INLINE_ASM)
|
andersrundgren |
0:1728f99b19f6
|
63
|
# if ((defined(__i386) || defined(__i386__)) && !defined(I386_ONLY)) || \
|
andersrundgren |
0:1728f99b19f6
|
64
|
(defined(__x86_64) || defined(__x86_64__))
|
andersrundgren |
0:1728f99b19f6
|
65
|
# if !defined(B_ENDIAN)
|
andersrundgren |
0:1728f99b19f6
|
66
|
/*
|
andersrundgren |
0:1728f99b19f6
|
67
|
* This gives ~30-40% performance improvement in SHA-256 compiled
|
andersrundgren |
0:1728f99b19f6
|
68
|
* with gcc [on P4]. Well, first macro to be frank. We can pull
|
andersrundgren |
0:1728f99b19f6
|
69
|
* this trick on x86* platforms only, because these CPUs can fetch
|
andersrundgren |
0:1728f99b19f6
|
70
|
* unaligned data without raising an exception.
|
andersrundgren |
0:1728f99b19f6
|
71
|
*/
|
andersrundgren |
0:1728f99b19f6
|
72
|
# define HOST_c2l(c,l) ({ unsigned int r=*((const unsigned int *)(c)); \
|
andersrundgren |
0:1728f99b19f6
|
73
|
asm ("bswapl %0":"=r"(r):"0"(r)); \
|
andersrundgren |
0:1728f99b19f6
|
74
|
(c)+=4; (l)=r; })
|
andersrundgren |
0:1728f99b19f6
|
75
|
# define HOST_l2c(l,c) ({ unsigned int r=(l); \
|
andersrundgren |
0:1728f99b19f6
|
76
|
asm ("bswapl %0":"=r"(r):"0"(r)); \
|
andersrundgren |
0:1728f99b19f6
|
77
|
*((unsigned int *)(c))=r; (c)+=4; r; })
|
andersrundgren |
0:1728f99b19f6
|
78
|
# endif
|
andersrundgren |
0:1728f99b19f6
|
79
|
# endif
|
andersrundgren |
0:1728f99b19f6
|
80
|
# endif
|
andersrundgren |
0:1728f99b19f6
|
81
|
#endif
|
andersrundgren |
0:1728f99b19f6
|
82
|
#if defined(__s390__) || defined(__s390x__)
|
andersrundgren |
0:1728f99b19f6
|
83
|
# define HOST_c2l(c,l) ((l)=*((const unsigned int *)(c)), (c)+=4, (l))
|
andersrundgren |
0:1728f99b19f6
|
84
|
# define HOST_l2c(l,c) (*((unsigned int *)(c))=(l), (c)+=4, (l))
|
andersrundgren |
0:1728f99b19f6
|
85
|
#endif
|
andersrundgren |
0:1728f99b19f6
|
86
|
|
andersrundgren |
0:1728f99b19f6
|
87
|
#ifndef HOST_c2l
|
andersrundgren |
0:1728f99b19f6
|
88
|
#define HOST_c2l(c,l) (l =(((unsigned long)(*((c)++)))<<24), \
|
andersrundgren |
0:1728f99b19f6
|
89
|
l|=(((unsigned long)(*((c)++)))<<16), \
|
andersrundgren |
0:1728f99b19f6
|
90
|
l|=(((unsigned long)(*((c)++)))<< 8), \
|
andersrundgren |
0:1728f99b19f6
|
91
|
l|=(((unsigned long)(*((c)++))) ), \
|
andersrundgren |
0:1728f99b19f6
|
92
|
l)
|
andersrundgren |
0:1728f99b19f6
|
93
|
#endif
|
andersrundgren |
0:1728f99b19f6
|
94
|
#ifndef HOST_l2c
|
andersrundgren |
0:1728f99b19f6
|
95
|
#define HOST_l2c(l,c) (*((c)++)=(unsigned char)(((l)>>24)&0xff), \
|
andersrundgren |
0:1728f99b19f6
|
96
|
*((c)++)=(unsigned char)(((l)>>16)&0xff), \
|
andersrundgren |
0:1728f99b19f6
|
97
|
*((c)++)=(unsigned char)(((l)>> 8)&0xff), \
|
andersrundgren |
0:1728f99b19f6
|
98
|
*((c)++)=(unsigned char)(((l) )&0xff), \
|
andersrundgren |
0:1728f99b19f6
|
99
|
l)
|
andersrundgren |
0:1728f99b19f6
|
100
|
#endif
|
andersrundgren |
0:1728f99b19f6
|
101
|
|
andersrundgren |
0:1728f99b19f6
|
102
|
#elif defined(DATA_ORDER_IS_LITTLE_ENDIAN)
|
andersrundgren |
0:1728f99b19f6
|
103
|
|
andersrundgren |
0:1728f99b19f6
|
104
|
#ifndef PEDANTIC
|
andersrundgren |
0:1728f99b19f6
|
105
|
# if defined(__GNUC__) && __GNUC__>=2 && !defined(OPENSSL_NO_ASM) && !defined(OPENSSL_NO_INLINE_ASM)
|
andersrundgren |
0:1728f99b19f6
|
106
|
# if defined(__s390x__)
|
andersrundgren |
0:1728f99b19f6
|
107
|
# define HOST_c2l(c,l) ({ asm ("lrv %0,0(%1)" \
|
andersrundgren |
0:1728f99b19f6
|
108
|
:"=r"(l) : "r"(c)); \
|
andersrundgren |
0:1728f99b19f6
|
109
|
(c)+=4; (l); })
|
andersrundgren |
0:1728f99b19f6
|
110
|
# define HOST_l2c(l,c) ({ asm ("strv %0,0(%1)" \
|
andersrundgren |
0:1728f99b19f6
|
111
|
: : "r"(l),"r"(c) : "memory"); \
|
andersrundgren |
0:1728f99b19f6
|
112
|
(c)+=4; (l); })
|
andersrundgren |
0:1728f99b19f6
|
113
|
# endif
|
andersrundgren |
0:1728f99b19f6
|
114
|
# endif
|
andersrundgren |
0:1728f99b19f6
|
115
|
#endif
|
andersrundgren |
0:1728f99b19f6
|
116
|
#if defined(__i386) || defined(__i386__) || defined(__x86_64) || defined(__x86_64__)
|
andersrundgren |
0:1728f99b19f6
|
117
|
# ifndef B_ENDIAN
|
andersrundgren |
0:1728f99b19f6
|
118
|
/* See comment in DATA_ORDER_IS_BIG_ENDIAN section. */
|
andersrundgren |
0:1728f99b19f6
|
119
|
# define HOST_c2l(c,l) ((l)=*((const unsigned int *)(c)), (c)+=4, l)
|
andersrundgren |
0:1728f99b19f6
|
120
|
# define HOST_l2c(l,c) (*((unsigned int *)(c))=(l), (c)+=4, l)
|
andersrundgren |
0:1728f99b19f6
|
121
|
# endif
|
andersrundgren |
0:1728f99b19f6
|
122
|
#endif
|
andersrundgren |
0:1728f99b19f6
|
123
|
|
andersrundgren |
0:1728f99b19f6
|
124
|
#ifndef HOST_c2l
|
andersrundgren |
0:1728f99b19f6
|
125
|
#define HOST_c2l(c,l) (l =(((unsigned long)(*((c)++))) ), \
|
andersrundgren |
0:1728f99b19f6
|
126
|
l|=(((unsigned long)(*((c)++)))<< 8), \
|
andersrundgren |
0:1728f99b19f6
|
127
|
l|=(((unsigned long)(*((c)++)))<<16), \
|
andersrundgren |
0:1728f99b19f6
|
128
|
l|=(((unsigned long)(*((c)++)))<<24), \
|
andersrundgren |
0:1728f99b19f6
|
129
|
l)
|
andersrundgren |
0:1728f99b19f6
|
130
|
#endif
|
andersrundgren |
0:1728f99b19f6
|
131
|
#ifndef HOST_l2c
|
andersrundgren |
0:1728f99b19f6
|
132
|
#define HOST_l2c(l,c) (*((c)++)=(unsigned char)(((l) )&0xff), \
|
andersrundgren |
0:1728f99b19f6
|
133
|
*((c)++)=(unsigned char)(((l)>> 8)&0xff), \
|
andersrundgren |
0:1728f99b19f6
|
134
|
*((c)++)=(unsigned char)(((l)>>16)&0xff), \
|
andersrundgren |
0:1728f99b19f6
|
135
|
*((c)++)=(unsigned char)(((l)>>24)&0xff), \
|
andersrundgren |
0:1728f99b19f6
|
136
|
l)
|
andersrundgren |
0:1728f99b19f6
|
137
|
#endif
|
andersrundgren |
0:1728f99b19f6
|
138
|
|
andersrundgren |
0:1728f99b19f6
|
139
|
#endif
|