]> git.cryptolib.org Git - arm-crypto-lib.git/blob - bmw/f1_autogen_large.i
including even/odd-trick for BMW
[arm-crypto-lib.git] / bmw / f1_autogen_large.i
1 /* BEGIN of automatic generated code */
2
3 static inline
4 void bmw_large_f1(uint64_t* q, const void* m, const void* h){ 
5   uint64_t even, odd;
6 /* expand_1( 0) */
7         q[16] = 
8                 ((  ROTL64(((uint64_t*)m)[ 0], 1) 
9                   + ROTL64(((uint64_t*)m)[ 3], 4) 
10                   - ROTL64(((uint64_t*)m)[10], 11) 
11                   + 0x5555555555555550ULL 
12                  )^ ((uint64_t*)h)[ 7] 
13                 )
14                 + S64_1(q[ 0]) + S64_2(q[ 1]) + S64_3(q[ 2]) + S64_0(q[ 3])
15                 + S64_1(q[ 4]) + S64_2(q[ 5]) + S64_3(q[ 6]) + S64_0(q[ 7])
16                 + S64_1(q[ 8]) + S64_2(q[ 9]) + S64_3(q[10]) + S64_0(q[11])
17                 + S64_1(q[12]) + S64_2(q[13]) + S64_3(q[14]) + S64_0(q[15]);
18 /* expand_1( 1) */
19         q[17] = 
20                 ((  ROTL64(((uint64_t*)m)[ 1], 2) 
21                   + ROTL64(((uint64_t*)m)[ 4], 5) 
22                   - ROTL64(((uint64_t*)m)[11], 12) 
23                   + 0x5aaaaaaaaaaaaaa5ULL 
24                  )^ ((uint64_t*)h)[ 8] 
25                 )
26                 + S64_1(q[ 1]) + S64_2(q[ 2]) + S64_3(q[ 3]) + S64_0(q[ 4])
27                 + S64_1(q[ 5]) + S64_2(q[ 6]) + S64_3(q[ 7]) + S64_0(q[ 8])
28                 + S64_1(q[ 9]) + S64_2(q[10]) + S64_3(q[11]) + S64_0(q[12])
29                 + S64_1(q[13]) + S64_2(q[14]) + S64_3(q[15]) + S64_0(q[16]);
30 /* expand_2( 2) */
31         q[18] = 
32                 ((  ROTL64(((uint64_t*)m)[ 2], 3) 
33                   + ROTL64(((uint64_t*)m)[ 5], 6) 
34                   - ROTL64(((uint64_t*)m)[12], 13) 
35                   + 0x5ffffffffffffffaULL 
36                  )^ ((uint64_t*)h)[ 9] 
37                 )
38                  + ( even =  q[ 2] + q[ 4] + q[ 6]
39                            + q[ 8] + q[10] + q[12] + q[14] )
40                  + R64_1(q[ 3]) + R64_2(q[ 5]) + R64_3(q[ 7])
41                  + R64_4(q[ 9]) + R64_5(q[11]) + R64_6(q[13])
42                  + R64_7(q[15]) + S64_4(q[16]) + S64_5(q[17]);
43 /* expand_2( 3) */
44         q[19] = 
45                 ((  ROTL64(((uint64_t*)m)[ 3], 4) 
46                   + ROTL64(((uint64_t*)m)[ 6], 7) 
47                   - ROTL64(((uint64_t*)m)[13], 14) 
48                   + 0x655555555555554fULL 
49                  )^ ((uint64_t*)h)[10] 
50                 )
51                  + ( odd  =  q[ 3] + q[ 5] + q[ 7]
52                            + q[ 9] + q[11] + q[13] + q[15] )
53                  + R64_1(q[ 4]) + R64_2(q[ 6]) + R64_3(q[ 8])
54                  + R64_4(q[10]) + R64_5(q[12]) + R64_6(q[14])
55                  + R64_7(q[16]) + S64_4(q[17]) + S64_5(q[18]);
56 /* expand_2( 4) */
57         q[20] = 
58                 ((  ROTL64(((uint64_t*)m)[ 4], 5) 
59                   + ROTL64(((uint64_t*)m)[ 7], 8) 
60                   - ROTL64(((uint64_t*)m)[14], 15) 
61                   + 0x6aaaaaaaaaaaaaa4ULL 
62                  )^ ((uint64_t*)h)[11] 
63                 )
64                  + ( even  +=  q[16] - q[ 2] )
65                  + R64_1(q[ 5]) + R64_2(q[ 7]) + R64_3(q[ 9])
66                  + R64_4(q[11]) + R64_5(q[13]) + R64_6(q[15])
67                  + R64_7(q[17]) + S64_4(q[18]) + S64_5(q[19]);
68 /* expand_2( 5) */
69         q[21] = 
70                 ((  ROTL64(((uint64_t*)m)[ 5], 6) 
71                   + ROTL64(((uint64_t*)m)[ 8], 9) 
72                   - ROTL64(((uint64_t*)m)[15], 16) 
73                   + 0x6ffffffffffffff9ULL 
74                  )^ ((uint64_t*)h)[12] 
75                 )
76                  + ( odd   +=  q[17] - q[ 3] )
77                  + R64_1(q[ 6]) + R64_2(q[ 8]) + R64_3(q[10])
78                  + R64_4(q[12]) + R64_5(q[14]) + R64_6(q[16])
79                  + R64_7(q[18]) + S64_4(q[19]) + S64_5(q[20]);
80 /* expand_2( 6) */
81         q[22] = 
82                 ((  ROTL64(((uint64_t*)m)[ 6], 7) 
83                   + ROTL64(((uint64_t*)m)[ 9], 10) 
84                   - ROTL64(((uint64_t*)m)[ 0], 1) 
85                   + 0x755555555555554eULL 
86                  )^ ((uint64_t*)h)[13] 
87                 )
88                  + ( even  +=  q[18] - q[ 4] )
89                  + R64_1(q[ 7]) + R64_2(q[ 9]) + R64_3(q[11])
90                  + R64_4(q[13]) + R64_5(q[15]) + R64_6(q[17])
91                  + R64_7(q[19]) + S64_4(q[20]) + S64_5(q[21]);
92 /* expand_2( 7) */
93         q[23] = 
94                 ((  ROTL64(((uint64_t*)m)[ 7], 8) 
95                   + ROTL64(((uint64_t*)m)[10], 11) 
96                   - ROTL64(((uint64_t*)m)[ 1], 2) 
97                   + 0x7aaaaaaaaaaaaaa3ULL 
98                  )^ ((uint64_t*)h)[14] 
99                 )
100                  + ( odd   +=  q[19] - q[ 5] )
101                  + R64_1(q[ 8]) + R64_2(q[10]) + R64_3(q[12])
102                  + R64_4(q[14]) + R64_5(q[16]) + R64_6(q[18])
103                  + R64_7(q[20]) + S64_4(q[21]) + S64_5(q[22]);
104 /* expand_2( 8) */
105         q[24] = 
106                 ((  ROTL64(((uint64_t*)m)[ 8], 9) 
107                   + ROTL64(((uint64_t*)m)[11], 12) 
108                   - ROTL64(((uint64_t*)m)[ 2], 3) 
109                   + 0x7ffffffffffffff8ULL 
110                  )^ ((uint64_t*)h)[15] 
111                 )
112                  + ( even  +=  q[20] - q[ 6] )
113                  + R64_1(q[ 9]) + R64_2(q[11]) + R64_3(q[13])
114                  + R64_4(q[15]) + R64_5(q[17]) + R64_6(q[19])
115                  + R64_7(q[21]) + S64_4(q[22]) + S64_5(q[23]);
116 /* expand_2( 9) */
117         q[25] = 
118                 ((  ROTL64(((uint64_t*)m)[ 9], 10) 
119                   + ROTL64(((uint64_t*)m)[12], 13) 
120                   - ROTL64(((uint64_t*)m)[ 3], 4) 
121                   + 0x855555555555554dULL 
122                  )^ ((uint64_t*)h)[ 0] 
123                 )
124                  + ( odd   +=  q[21] - q[ 7] )
125                  + R64_1(q[10]) + R64_2(q[12]) + R64_3(q[14])
126                  + R64_4(q[16]) + R64_5(q[18]) + R64_6(q[20])
127                  + R64_7(q[22]) + S64_4(q[23]) + S64_5(q[24]);
128 /* expand_2(10) */
129         q[26] = 
130                 ((  ROTL64(((uint64_t*)m)[10], 11) 
131                   + ROTL64(((uint64_t*)m)[13], 14) 
132                   - ROTL64(((uint64_t*)m)[ 4], 5) 
133                   + 0x8aaaaaaaaaaaaaa2ULL 
134                  )^ ((uint64_t*)h)[ 1] 
135                 )
136                  + ( even  +=  q[22] - q[ 8] )
137                  + R64_1(q[11]) + R64_2(q[13]) + R64_3(q[15])
138                  + R64_4(q[17]) + R64_5(q[19]) + R64_6(q[21])
139                  + R64_7(q[23]) + S64_4(q[24]) + S64_5(q[25]);
140 /* expand_2(11) */
141         q[27] = 
142                 ((  ROTL64(((uint64_t*)m)[11], 12) 
143                   + ROTL64(((uint64_t*)m)[14], 15) 
144                   - ROTL64(((uint64_t*)m)[ 5], 6) 
145                   + 0x8ffffffffffffff7ULL 
146                  )^ ((uint64_t*)h)[ 2] 
147                 )
148                  + ( odd   +=  q[23] - q[ 9] )
149                  + R64_1(q[12]) + R64_2(q[14]) + R64_3(q[16])
150                  + R64_4(q[18]) + R64_5(q[20]) + R64_6(q[22])
151                  + R64_7(q[24]) + S64_4(q[25]) + S64_5(q[26]);
152 /* expand_2(12) */
153         q[28] = 
154                 ((  ROTL64(((uint64_t*)m)[12], 13) 
155                   + ROTL64(((uint64_t*)m)[15], 16) 
156                   - ROTL64(((uint64_t*)m)[ 6], 7) 
157                   + 0x955555555555554cULL 
158                  )^ ((uint64_t*)h)[ 3] 
159                 )
160                  + ( even  +=  q[24] - q[10] )
161                  + R64_1(q[13]) + R64_2(q[15]) + R64_3(q[17])
162                  + R64_4(q[19]) + R64_5(q[21]) + R64_6(q[23])
163                  + R64_7(q[25]) + S64_4(q[26]) + S64_5(q[27]);
164 /* expand_2(13) */
165         q[29] = 
166                 ((  ROTL64(((uint64_t*)m)[13], 14) 
167                   + ROTL64(((uint64_t*)m)[ 0], 1) 
168                   - ROTL64(((uint64_t*)m)[ 7], 8) 
169                   + 0x9aaaaaaaaaaaaaa1ULL 
170                  )^ ((uint64_t*)h)[ 4] 
171                 )
172                  + ( odd   +=  q[25] - q[11] )
173                  + R64_1(q[14]) + R64_2(q[16]) + R64_3(q[18])
174                  + R64_4(q[20]) + R64_5(q[22]) + R64_6(q[24])
175                  + R64_7(q[26]) + S64_4(q[27]) + S64_5(q[28]);
176 /* expand_2(14) */
177         q[30] = 
178                 ((  ROTL64(((uint64_t*)m)[14], 15) 
179                   + ROTL64(((uint64_t*)m)[ 1], 2) 
180                   - ROTL64(((uint64_t*)m)[ 8], 9) 
181                   + 0x9ffffffffffffff6ULL 
182                  )^ ((uint64_t*)h)[ 5] 
183                 )
184                  + ( even  +=  q[26] - q[12] )
185                  + R64_1(q[15]) + R64_2(q[17]) + R64_3(q[19])
186                  + R64_4(q[21]) + R64_5(q[23]) + R64_6(q[25])
187                  + R64_7(q[27]) + S64_4(q[28]) + S64_5(q[29]);
188 /* expand_2(15) */
189         q[31] = 
190                 ((  ROTL64(((uint64_t*)m)[15], 16) 
191                   + ROTL64(((uint64_t*)m)[ 2], 3) 
192                   - ROTL64(((uint64_t*)m)[ 9], 10) 
193                   + 0xa55555555555554bULL 
194                  )^ ((uint64_t*)h)[ 6] 
195                 )
196                  + ( odd   +=  q[27] - q[13] )
197                  + R64_1(q[16]) + R64_2(q[18]) + R64_3(q[20])
198                  + R64_4(q[22]) + R64_5(q[24]) + R64_6(q[26])
199                  + R64_7(q[28]) + S64_4(q[29]) + S64_5(q[30]);
200 }
201
202 /* END of automatic generated code */
203