1 /* BEGIN of automatic generated code */
4 void bmw_large_f1(uint64_t* q, const void* m, const void* h){
8 (( ROTL64(((uint64_t*)m)[ 0], 1)
9 + ROTL64(((uint64_t*)m)[ 3], 4)
10 - ROTL64(((uint64_t*)m)[10], 11)
11 + 0x5555555555555550ULL
14 + S64_1(q[ 0]) + S64_2(q[ 1]) + S64_3(q[ 2]) + S64_0(q[ 3])
15 + S64_1(q[ 4]) + S64_2(q[ 5]) + S64_3(q[ 6]) + S64_0(q[ 7])
16 + S64_1(q[ 8]) + S64_2(q[ 9]) + S64_3(q[10]) + S64_0(q[11])
17 + S64_1(q[12]) + S64_2(q[13]) + S64_3(q[14]) + S64_0(q[15]);
20 (( ROTL64(((uint64_t*)m)[ 1], 2)
21 + ROTL64(((uint64_t*)m)[ 4], 5)
22 - ROTL64(((uint64_t*)m)[11], 12)
23 + 0x5aaaaaaaaaaaaaa5ULL
26 + S64_1(q[ 1]) + S64_2(q[ 2]) + S64_3(q[ 3]) + S64_0(q[ 4])
27 + S64_1(q[ 5]) + S64_2(q[ 6]) + S64_3(q[ 7]) + S64_0(q[ 8])
28 + S64_1(q[ 9]) + S64_2(q[10]) + S64_3(q[11]) + S64_0(q[12])
29 + S64_1(q[13]) + S64_2(q[14]) + S64_3(q[15]) + S64_0(q[16]);
32 (( ROTL64(((uint64_t*)m)[ 2], 3)
33 + ROTL64(((uint64_t*)m)[ 5], 6)
34 - ROTL64(((uint64_t*)m)[12], 13)
35 + 0x5ffffffffffffffaULL
38 + ( even = q[ 2] + q[ 4] + q[ 6]
39 + q[ 8] + q[10] + q[12] + q[14] )
40 + R64_1(q[ 3]) + R64_2(q[ 5]) + R64_3(q[ 7])
41 + R64_4(q[ 9]) + R64_5(q[11]) + R64_6(q[13])
42 + R64_7(q[15]) + S64_4(q[16]) + S64_5(q[17]);
45 (( ROTL64(((uint64_t*)m)[ 3], 4)
46 + ROTL64(((uint64_t*)m)[ 6], 7)
47 - ROTL64(((uint64_t*)m)[13], 14)
48 + 0x655555555555554fULL
51 + ( odd = q[ 3] + q[ 5] + q[ 7]
52 + q[ 9] + q[11] + q[13] + q[15] )
53 + R64_1(q[ 4]) + R64_2(q[ 6]) + R64_3(q[ 8])
54 + R64_4(q[10]) + R64_5(q[12]) + R64_6(q[14])
55 + R64_7(q[16]) + S64_4(q[17]) + S64_5(q[18]);
58 (( ROTL64(((uint64_t*)m)[ 4], 5)
59 + ROTL64(((uint64_t*)m)[ 7], 8)
60 - ROTL64(((uint64_t*)m)[14], 15)
61 + 0x6aaaaaaaaaaaaaa4ULL
64 + ( even += q[16] - q[ 2] )
65 + R64_1(q[ 5]) + R64_2(q[ 7]) + R64_3(q[ 9])
66 + R64_4(q[11]) + R64_5(q[13]) + R64_6(q[15])
67 + R64_7(q[17]) + S64_4(q[18]) + S64_5(q[19]);
70 (( ROTL64(((uint64_t*)m)[ 5], 6)
71 + ROTL64(((uint64_t*)m)[ 8], 9)
72 - ROTL64(((uint64_t*)m)[15], 16)
73 + 0x6ffffffffffffff9ULL
76 + ( odd += q[17] - q[ 3] )
77 + R64_1(q[ 6]) + R64_2(q[ 8]) + R64_3(q[10])
78 + R64_4(q[12]) + R64_5(q[14]) + R64_6(q[16])
79 + R64_7(q[18]) + S64_4(q[19]) + S64_5(q[20]);
82 (( ROTL64(((uint64_t*)m)[ 6], 7)
83 + ROTL64(((uint64_t*)m)[ 9], 10)
84 - ROTL64(((uint64_t*)m)[ 0], 1)
85 + 0x755555555555554eULL
88 + ( even += q[18] - q[ 4] )
89 + R64_1(q[ 7]) + R64_2(q[ 9]) + R64_3(q[11])
90 + R64_4(q[13]) + R64_5(q[15]) + R64_6(q[17])
91 + R64_7(q[19]) + S64_4(q[20]) + S64_5(q[21]);
94 (( ROTL64(((uint64_t*)m)[ 7], 8)
95 + ROTL64(((uint64_t*)m)[10], 11)
96 - ROTL64(((uint64_t*)m)[ 1], 2)
97 + 0x7aaaaaaaaaaaaaa3ULL
100 + ( odd += q[19] - q[ 5] )
101 + R64_1(q[ 8]) + R64_2(q[10]) + R64_3(q[12])
102 + R64_4(q[14]) + R64_5(q[16]) + R64_6(q[18])
103 + R64_7(q[20]) + S64_4(q[21]) + S64_5(q[22]);
106 (( ROTL64(((uint64_t*)m)[ 8], 9)
107 + ROTL64(((uint64_t*)m)[11], 12)
108 - ROTL64(((uint64_t*)m)[ 2], 3)
109 + 0x7ffffffffffffff8ULL
110 )^ ((uint64_t*)h)[15]
112 + ( even += q[20] - q[ 6] )
113 + R64_1(q[ 9]) + R64_2(q[11]) + R64_3(q[13])
114 + R64_4(q[15]) + R64_5(q[17]) + R64_6(q[19])
115 + R64_7(q[21]) + S64_4(q[22]) + S64_5(q[23]);
118 (( ROTL64(((uint64_t*)m)[ 9], 10)
119 + ROTL64(((uint64_t*)m)[12], 13)
120 - ROTL64(((uint64_t*)m)[ 3], 4)
121 + 0x855555555555554dULL
122 )^ ((uint64_t*)h)[ 0]
124 + ( odd += q[21] - q[ 7] )
125 + R64_1(q[10]) + R64_2(q[12]) + R64_3(q[14])
126 + R64_4(q[16]) + R64_5(q[18]) + R64_6(q[20])
127 + R64_7(q[22]) + S64_4(q[23]) + S64_5(q[24]);
130 (( ROTL64(((uint64_t*)m)[10], 11)
131 + ROTL64(((uint64_t*)m)[13], 14)
132 - ROTL64(((uint64_t*)m)[ 4], 5)
133 + 0x8aaaaaaaaaaaaaa2ULL
134 )^ ((uint64_t*)h)[ 1]
136 + ( even += q[22] - q[ 8] )
137 + R64_1(q[11]) + R64_2(q[13]) + R64_3(q[15])
138 + R64_4(q[17]) + R64_5(q[19]) + R64_6(q[21])
139 + R64_7(q[23]) + S64_4(q[24]) + S64_5(q[25]);
142 (( ROTL64(((uint64_t*)m)[11], 12)
143 + ROTL64(((uint64_t*)m)[14], 15)
144 - ROTL64(((uint64_t*)m)[ 5], 6)
145 + 0x8ffffffffffffff7ULL
146 )^ ((uint64_t*)h)[ 2]
148 + ( odd += q[23] - q[ 9] )
149 + R64_1(q[12]) + R64_2(q[14]) + R64_3(q[16])
150 + R64_4(q[18]) + R64_5(q[20]) + R64_6(q[22])
151 + R64_7(q[24]) + S64_4(q[25]) + S64_5(q[26]);
154 (( ROTL64(((uint64_t*)m)[12], 13)
155 + ROTL64(((uint64_t*)m)[15], 16)
156 - ROTL64(((uint64_t*)m)[ 6], 7)
157 + 0x955555555555554cULL
158 )^ ((uint64_t*)h)[ 3]
160 + ( even += q[24] - q[10] )
161 + R64_1(q[13]) + R64_2(q[15]) + R64_3(q[17])
162 + R64_4(q[19]) + R64_5(q[21]) + R64_6(q[23])
163 + R64_7(q[25]) + S64_4(q[26]) + S64_5(q[27]);
166 (( ROTL64(((uint64_t*)m)[13], 14)
167 + ROTL64(((uint64_t*)m)[ 0], 1)
168 - ROTL64(((uint64_t*)m)[ 7], 8)
169 + 0x9aaaaaaaaaaaaaa1ULL
170 )^ ((uint64_t*)h)[ 4]
172 + ( odd += q[25] - q[11] )
173 + R64_1(q[14]) + R64_2(q[16]) + R64_3(q[18])
174 + R64_4(q[20]) + R64_5(q[22]) + R64_6(q[24])
175 + R64_7(q[26]) + S64_4(q[27]) + S64_5(q[28]);
178 (( ROTL64(((uint64_t*)m)[14], 15)
179 + ROTL64(((uint64_t*)m)[ 1], 2)
180 - ROTL64(((uint64_t*)m)[ 8], 9)
181 + 0x9ffffffffffffff6ULL
182 )^ ((uint64_t*)h)[ 5]
184 + ( even += q[26] - q[12] )
185 + R64_1(q[15]) + R64_2(q[17]) + R64_3(q[19])
186 + R64_4(q[21]) + R64_5(q[23]) + R64_6(q[25])
187 + R64_7(q[27]) + S64_4(q[28]) + S64_5(q[29]);
190 (( ROTL64(((uint64_t*)m)[15], 16)
191 + ROTL64(((uint64_t*)m)[ 2], 3)
192 - ROTL64(((uint64_t*)m)[ 9], 10)
193 + 0xa55555555555554bULL
194 )^ ((uint64_t*)h)[ 6]
196 + ( odd += q[27] - q[13] )
197 + R64_1(q[16]) + R64_2(q[18]) + R64_3(q[20])
198 + R64_4(q[22]) + R64_5(q[24]) + R64_6(q[26])
199 + R64_7(q[28]) + S64_4(q[29]) + S64_5(q[30]);
202 /* END of automatic generated code */