]> git.cryptolib.org Git - arm-crypto-lib.git/blob - bmw/f1_autogen.i
switching to dedicated endian switching function
[arm-crypto-lib.git] / bmw / f1_autogen.i
1 /* BEGIN of automatic generated code */
2
3 static inline
4 void bmw_small_f1(uint32_t* q, const void* m, const void* h){ 
5   uint32_t even, odd;
6 /* expand_1( 0) */
7         q[16] = 
8                 ((  ROTL32(((uint32_t*)m)[ 0], 1) 
9                   + ROTL32(((uint32_t*)m)[ 3], 4) 
10                   - ROTL32(((uint32_t*)m)[10], 11) 
11                   + 0x55555550UL 
12                  )^ ((uint32_t*)h)[ 7] 
13                 )
14                 + S32_1(q[ 0]) + S32_2(q[ 1]) + S32_3(q[ 2]) + S32_0(q[ 3])
15                 + S32_1(q[ 4]) + S32_2(q[ 5]) + S32_3(q[ 6]) + S32_0(q[ 7])
16                 + S32_1(q[ 8]) + S32_2(q[ 9]) + S32_3(q[10]) + S32_0(q[11])
17                 + S32_1(q[12]) + S32_2(q[13]) + S32_3(q[14]) + S32_0(q[15]);
18 /* expand_1( 1) */
19         q[17] = 
20                 ((  ROTL32(((uint32_t*)m)[ 1], 2) 
21                   + ROTL32(((uint32_t*)m)[ 4], 5) 
22                   - ROTL32(((uint32_t*)m)[11], 12) 
23                   + 0x5aaaaaa5UL 
24                  )^ ((uint32_t*)h)[ 8] 
25                 )
26                 + S32_1(q[ 1]) + S32_2(q[ 2]) + S32_3(q[ 3]) + S32_0(q[ 4])
27                 + S32_1(q[ 5]) + S32_2(q[ 6]) + S32_3(q[ 7]) + S32_0(q[ 8])
28                 + S32_1(q[ 9]) + S32_2(q[10]) + S32_3(q[11]) + S32_0(q[12])
29                 + S32_1(q[13]) + S32_2(q[14]) + S32_3(q[15]) + S32_0(q[16]);
30 /* expand_2( 2) */
31         q[18] = 
32                 ((  ROTL32(((uint32_t*)m)[ 2], 3) 
33                   + ROTL32(((uint32_t*)m)[ 5], 6) 
34                   - ROTL32(((uint32_t*)m)[12], 13) 
35                   + 0x5ffffffaUL 
36                  )^ ((uint32_t*)h)[ 9] 
37                 )
38                  + ( even =  q[ 2] + q[ 4] + q[ 6]
39                            + q[ 8] + q[10] + q[12] + q[14] )
40                  + R32_1(q[ 3]) + R32_2(q[ 5]) + R32_3(q[ 7])
41                  + R32_4(q[ 9]) + R32_5(q[11]) + R32_6(q[13])
42                  + R32_7(q[15]) + S32_4(q[16]) + S32_5(q[17]);
43 /* expand_2( 3) */
44         q[19] = 
45                 ((  ROTL32(((uint32_t*)m)[ 3], 4) 
46                   + ROTL32(((uint32_t*)m)[ 6], 7) 
47                   - ROTL32(((uint32_t*)m)[13], 14) 
48                   + 0x6555554fUL 
49                  )^ ((uint32_t*)h)[10] 
50                 )
51                  + ( odd  =  q[ 3] + q[ 5] + q[ 7]
52                            + q[ 9] + q[11] + q[13] + q[15] )
53                  + R32_1(q[ 4]) + R32_2(q[ 6]) + R32_3(q[ 8])
54                  + R32_4(q[10]) + R32_5(q[12]) + R32_6(q[14])
55                  + R32_7(q[16]) + S32_4(q[17]) + S32_5(q[18]);
56 /* expand_2( 4) */
57         q[20] = 
58                 ((  ROTL32(((uint32_t*)m)[ 4], 5) 
59                   + ROTL32(((uint32_t*)m)[ 7], 8) 
60                   - ROTL32(((uint32_t*)m)[14], 15) 
61                   + 0x6aaaaaa4UL 
62                  )^ ((uint32_t*)h)[11] 
63                 )
64                  + ( even  +=  q[16] - q[ 2] )
65                  + R32_1(q[ 5]) + R32_2(q[ 7]) + R32_3(q[ 9])
66                  + R32_4(q[11]) + R32_5(q[13]) + R32_6(q[15])
67                  + R32_7(q[17]) + S32_4(q[18]) + S32_5(q[19]);
68 /* expand_2( 5) */
69         q[21] = 
70                 ((  ROTL32(((uint32_t*)m)[ 5], 6) 
71                   + ROTL32(((uint32_t*)m)[ 8], 9) 
72                   - ROTL32(((uint32_t*)m)[15], 16) 
73                   + 0x6ffffff9UL 
74                  )^ ((uint32_t*)h)[12] 
75                 )
76                  + ( odd   +=  q[17] - q[ 3] )
77                  + R32_1(q[ 6]) + R32_2(q[ 8]) + R32_3(q[10])
78                  + R32_4(q[12]) + R32_5(q[14]) + R32_6(q[16])
79                  + R32_7(q[18]) + S32_4(q[19]) + S32_5(q[20]);
80 /* expand_2( 6) */
81         q[22] = 
82                 ((  ROTL32(((uint32_t*)m)[ 6], 7) 
83                   + ROTL32(((uint32_t*)m)[ 9], 10) 
84                   - ROTL32(((uint32_t*)m)[ 0], 1) 
85                   + 0x7555554eUL 
86                  )^ ((uint32_t*)h)[13] 
87                 )
88                  + ( even  +=  q[18] - q[ 4] )
89                  + R32_1(q[ 7]) + R32_2(q[ 9]) + R32_3(q[11])
90                  + R32_4(q[13]) + R32_5(q[15]) + R32_6(q[17])
91                  + R32_7(q[19]) + S32_4(q[20]) + S32_5(q[21]);
92 /* expand_2( 7) */
93         q[23] = 
94                 ((  ROTL32(((uint32_t*)m)[ 7], 8) 
95                   + ROTL32(((uint32_t*)m)[10], 11) 
96                   - ROTL32(((uint32_t*)m)[ 1], 2) 
97                   + 0x7aaaaaa3UL 
98                  )^ ((uint32_t*)h)[14] 
99                 )
100                  + ( odd   +=  q[19] - q[ 5] )
101                  + R32_1(q[ 8]) + R32_2(q[10]) + R32_3(q[12])
102                  + R32_4(q[14]) + R32_5(q[16]) + R32_6(q[18])
103                  + R32_7(q[20]) + S32_4(q[21]) + S32_5(q[22]);
104 /* expand_2( 8) */
105         q[24] = 
106                 ((  ROTL32(((uint32_t*)m)[ 8], 9) 
107                   + ROTL32(((uint32_t*)m)[11], 12) 
108                   - ROTL32(((uint32_t*)m)[ 2], 3) 
109                   + 0x7ffffff8UL 
110                  )^ ((uint32_t*)h)[15] 
111                 )
112                  + ( even  +=  q[20] - q[ 6] )
113                  + R32_1(q[ 9]) + R32_2(q[11]) + R32_3(q[13])
114                  + R32_4(q[15]) + R32_5(q[17]) + R32_6(q[19])
115                  + R32_7(q[21]) + S32_4(q[22]) + S32_5(q[23]);
116 /* expand_2( 9) */
117         q[25] = 
118                 ((  ROTL32(((uint32_t*)m)[ 9], 10) 
119                   + ROTL32(((uint32_t*)m)[12], 13) 
120                   - ROTL32(((uint32_t*)m)[ 3], 4) 
121                   + 0x8555554dUL 
122                  )^ ((uint32_t*)h)[ 0] 
123                 )
124                  + ( odd   +=  q[21] - q[ 7] )
125                  + R32_1(q[10]) + R32_2(q[12]) + R32_3(q[14])
126                  + R32_4(q[16]) + R32_5(q[18]) + R32_6(q[20])
127                  + R32_7(q[22]) + S32_4(q[23]) + S32_5(q[24]);
128 /* expand_2(10) */
129         q[26] = 
130                 ((  ROTL32(((uint32_t*)m)[10], 11) 
131                   + ROTL32(((uint32_t*)m)[13], 14) 
132                   - ROTL32(((uint32_t*)m)[ 4], 5) 
133                   + 0x8aaaaaa2UL 
134                  )^ ((uint32_t*)h)[ 1] 
135                 )
136                  + ( even  +=  q[22] - q[ 8] )
137                  + R32_1(q[11]) + R32_2(q[13]) + R32_3(q[15])
138                  + R32_4(q[17]) + R32_5(q[19]) + R32_6(q[21])
139                  + R32_7(q[23]) + S32_4(q[24]) + S32_5(q[25]);
140 /* expand_2(11) */
141         q[27] = 
142                 ((  ROTL32(((uint32_t*)m)[11], 12) 
143                   + ROTL32(((uint32_t*)m)[14], 15) 
144                   - ROTL32(((uint32_t*)m)[ 5], 6) 
145                   + 0x8ffffff7UL 
146                  )^ ((uint32_t*)h)[ 2] 
147                 )
148                  + ( odd   +=  q[23] - q[ 9] )
149                  + R32_1(q[12]) + R32_2(q[14]) + R32_3(q[16])
150                  + R32_4(q[18]) + R32_5(q[20]) + R32_6(q[22])
151                  + R32_7(q[24]) + S32_4(q[25]) + S32_5(q[26]);
152 /* expand_2(12) */
153         q[28] = 
154                 ((  ROTL32(((uint32_t*)m)[12], 13) 
155                   + ROTL32(((uint32_t*)m)[15], 16) 
156                   - ROTL32(((uint32_t*)m)[ 6], 7) 
157                   + 0x9555554cUL 
158                  )^ ((uint32_t*)h)[ 3] 
159                 )
160                  + ( even  +=  q[24] - q[10] )
161                  + R32_1(q[13]) + R32_2(q[15]) + R32_3(q[17])
162                  + R32_4(q[19]) + R32_5(q[21]) + R32_6(q[23])
163                  + R32_7(q[25]) + S32_4(q[26]) + S32_5(q[27]);
164 /* expand_2(13) */
165         q[29] = 
166                 ((  ROTL32(((uint32_t*)m)[13], 14) 
167                   + ROTL32(((uint32_t*)m)[ 0], 1) 
168                   - ROTL32(((uint32_t*)m)[ 7], 8) 
169                   + 0x9aaaaaa1UL 
170                  )^ ((uint32_t*)h)[ 4] 
171                 )
172                  + ( odd   +=  q[25] - q[11] )
173                  + R32_1(q[14]) + R32_2(q[16]) + R32_3(q[18])
174                  + R32_4(q[20]) + R32_5(q[22]) + R32_6(q[24])
175                  + R32_7(q[26]) + S32_4(q[27]) + S32_5(q[28]);
176 /* expand_2(14) */
177         q[30] = 
178                 ((  ROTL32(((uint32_t*)m)[14], 15) 
179                   + ROTL32(((uint32_t*)m)[ 1], 2) 
180                   - ROTL32(((uint32_t*)m)[ 8], 9) 
181                   + 0x9ffffff6UL 
182                  )^ ((uint32_t*)h)[ 5] 
183                 )
184                  + ( even  +=  q[26] - q[12] )
185                  + R32_1(q[15]) + R32_2(q[17]) + R32_3(q[19])
186                  + R32_4(q[21]) + R32_5(q[23]) + R32_6(q[25])
187                  + R32_7(q[27]) + S32_4(q[28]) + S32_5(q[29]);
188 /* expand_2(15) */
189         q[31] = 
190                 ((  ROTL32(((uint32_t*)m)[15], 16) 
191                   + ROTL32(((uint32_t*)m)[ 2], 3) 
192                   - ROTL32(((uint32_t*)m)[ 9], 10) 
193                   + 0xa555554bUL 
194                  )^ ((uint32_t*)h)[ 6] 
195                 )
196                  + ( odd   +=  q[27] - q[13] )
197                  + R32_1(q[16]) + R32_2(q[18]) + R32_3(q[20])
198                  + R32_4(q[22]) + R32_5(q[24]) + R32_6(q[26])
199                  + R32_7(q[28]) + S32_4(q[29]) + S32_5(q[30]);
200 }
201
202 /* END of automatic generated code */
203