1 | /* |
2 | * Copyright (c) 2012 Apple Computer, Inc. All rights reserved. |
3 | * |
4 | * @APPLE_OSREFERENCE_LICENSE_HEADER_START@ |
5 | * |
6 | * This file contains Original Code and/or Modifications of Original Code |
7 | * as defined in and that are subject to the Apple Public Source License |
8 | * Version 2.0 (the 'License'). You may not use this file except in |
9 | * compliance with the License. The rights granted to you under the License |
10 | * may not be used to create, or enable the creation or redistribution of, |
11 | * unlawful or unlicensed copies of an Apple operating system, or to |
12 | * circumvent, violate, or enable the circumvention or violation of, any |
13 | * terms of an Apple operating system software license agreement. |
14 | * |
15 | * Please obtain a copy of the License at |
16 | * http://www.opensource.apple.com/apsl/ and read it before using this file. |
17 | * |
18 | * The Original Code and all software distributed under the License are |
19 | * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER |
20 | * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES, |
21 | * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY, |
22 | * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT. |
23 | * Please see the License for the specific language governing rights and |
24 | * limitations under the License. |
25 | * |
26 | * @APPLE_OSREFERENCE_LICENSE_HEADER_END@ |
27 | */ |
28 | |
29 | #include <libkern/crypto/crypto_internal.h> |
30 | #include <libkern/crypto/aes.h> |
31 | #include <corecrypto/ccmode.h> |
32 | #include <corecrypto/ccaes.h> |
33 | #include <kern/debug.h> |
34 | |
35 | aes_rval |
36 | aes_encrypt_key(const unsigned char *key, int key_len, aes_encrypt_ctx cx[1]) |
37 | { |
38 | const struct ccmode_cbc *cbc = g_crypto_funcs->ccaes_cbc_encrypt; |
39 | |
40 | /* Make sure the context size for the mode fits in the one we have */ |
41 | if (cbc->size > sizeof(aes_encrypt_ctx)) { |
42 | panic("%s: inconsistent size for AES encrypt context" , __FUNCTION__); |
43 | } |
44 | |
45 | return cccbc_init(mode: cbc, ctx: cx[0].ctx, key_len, key); |
46 | } |
47 | |
48 | aes_rval |
49 | aes_encrypt_cbc(const unsigned char *in_blk, const unsigned char *in_iv, unsigned int num_blk, |
50 | unsigned char *out_blk, aes_encrypt_ctx cx[1]) |
51 | { |
52 | const struct ccmode_cbc *cbc = g_crypto_funcs->ccaes_cbc_encrypt; |
53 | cccbc_iv_decl(cbc->block_size, ctx_iv); |
54 | |
55 | int rc = cccbc_set_iv(mode: cbc, iv_ctx: ctx_iv, iv: in_iv); |
56 | if (rc) { |
57 | return rc; |
58 | } |
59 | |
60 | return cccbc_update(mode: cbc, ctx: cx[0].ctx, iv: ctx_iv, nblocks: num_blk, in: in_blk, out: out_blk); //Actually cbc encrypt. |
61 | } |
62 | |
63 | #if defined (__i386__) || defined (__x86_64__) || defined (__arm64__) |
64 | /* This does one block of ECB, using the CBC implementation - this allow to use the same context for both CBC and ECB */ |
65 | aes_rval |
66 | aes_encrypt(const unsigned char *in_blk, unsigned char *out_blk, aes_encrypt_ctx cx[1]) |
67 | { |
68 | return aes_encrypt_cbc(in_blk, NULL, num_blk: 1, out_blk, cx); |
69 | } |
70 | #endif |
71 | |
72 | aes_rval |
73 | aes_decrypt_key(const unsigned char *key, int key_len, aes_decrypt_ctx cx[1]) |
74 | { |
75 | const struct ccmode_cbc *cbc = g_crypto_funcs->ccaes_cbc_decrypt; |
76 | |
77 | /* Make sure the context size for the mode fits in the one we have */ |
78 | if (cbc->size > sizeof(aes_decrypt_ctx)) { |
79 | panic("%s: inconsistent size for AES decrypt context" , __FUNCTION__); |
80 | } |
81 | |
82 | return cccbc_init(mode: cbc, ctx: cx[0].ctx, key_len, key); |
83 | } |
84 | |
85 | aes_rval |
86 | aes_decrypt_cbc(const unsigned char *in_blk, const unsigned char *in_iv, unsigned int num_blk, |
87 | unsigned char *out_blk, aes_decrypt_ctx cx[1]) |
88 | { |
89 | const struct ccmode_cbc *cbc = g_crypto_funcs->ccaes_cbc_decrypt; |
90 | cccbc_iv_decl(cbc->block_size, ctx_iv); |
91 | |
92 | int rc = cccbc_set_iv(mode: cbc, iv_ctx: ctx_iv, iv: in_iv); |
93 | if (rc) { |
94 | return rc; |
95 | } |
96 | |
97 | return cccbc_update(mode: cbc, ctx: cx[0].ctx, iv: ctx_iv, nblocks: num_blk, in: in_blk, out: out_blk); //Actually cbc decrypt. |
98 | } |
99 | |
100 | #if defined (__i386__) || defined (__x86_64__) || defined (__arm64__) |
101 | /* This does one block of ECB, using the CBC implementation - this allow to use the same context for both CBC and ECB */ |
102 | aes_rval |
103 | aes_decrypt(const unsigned char *in_blk, unsigned char *out_blk, aes_decrypt_ctx cx[1]) |
104 | { |
105 | return aes_decrypt_cbc(in_blk, NULL, num_blk: 1, out_blk, cx); |
106 | } |
107 | #endif |
108 | |
109 | aes_rval |
110 | aes_encrypt_key128(const unsigned char *key, aes_encrypt_ctx cx[1]) |
111 | { |
112 | return aes_encrypt_key(key, key_len: 16, cx); |
113 | } |
114 | |
115 | aes_rval |
116 | aes_decrypt_key128(const unsigned char *key, aes_decrypt_ctx cx[1]) |
117 | { |
118 | return aes_decrypt_key(key, key_len: 16, cx); |
119 | } |
120 | |
121 | |
122 | aes_rval |
123 | aes_encrypt_key256(const unsigned char *key, aes_encrypt_ctx cx[1]) |
124 | { |
125 | return aes_encrypt_key(key, key_len: 32, cx); |
126 | } |
127 | |
128 | aes_rval |
129 | aes_decrypt_key256(const unsigned char *key, aes_decrypt_ctx cx[1]) |
130 | { |
131 | return aes_decrypt_key(key, key_len: 32, cx); |
132 | } |
133 | |
134 | aes_rval |
135 | aes_encrypt_key_gcm(const unsigned char *key, int key_len, ccgcm_ctx *ctx) |
136 | { |
137 | const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_encrypt; |
138 | if (!gcm) { |
139 | return aes_error; |
140 | } |
141 | |
142 | return ccgcm_init(mode: gcm, ctx, key_nbytes: key_len, key); |
143 | } |
144 | |
145 | aes_rval |
146 | aes_encrypt_key_with_iv_gcm(const unsigned char *key, int key_len, const unsigned char *in_iv, ccgcm_ctx *ctx) |
147 | { |
148 | const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_encrypt; |
149 | if (!gcm) { |
150 | return aes_error; |
151 | } |
152 | |
153 | return g_crypto_funcs->ccgcm_init_with_iv_fn(gcm, ctx, key_len, key, in_iv); |
154 | } |
155 | |
156 | aes_rval |
157 | aes_encrypt_set_iv_gcm(const unsigned char *in_iv, unsigned int len, ccgcm_ctx *ctx) |
158 | { |
159 | const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_encrypt; |
160 | if (!gcm) { |
161 | return aes_error; |
162 | } |
163 | |
164 | return ccgcm_set_iv(mode: gcm, ctx, iv_nbytes: len, iv: in_iv); |
165 | } |
166 | |
167 | aes_rval |
168 | aes_encrypt_reset_gcm(ccgcm_ctx *ctx) |
169 | { |
170 | const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_encrypt; |
171 | if (!gcm) { |
172 | return aes_error; |
173 | } |
174 | |
175 | return ccgcm_reset(mode: gcm, ctx); |
176 | } |
177 | |
178 | aes_rval |
179 | aes_encrypt_inc_iv_gcm(unsigned char *out_iv, ccgcm_ctx *ctx) |
180 | { |
181 | const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_encrypt; |
182 | if (!gcm) { |
183 | return aes_error; |
184 | } |
185 | |
186 | return g_crypto_funcs->ccgcm_inc_iv_fn(gcm, ctx, out_iv); |
187 | } |
188 | |
189 | aes_rval |
190 | aes_encrypt_aad_gcm(const unsigned char *aad, unsigned int aad_bytes, ccgcm_ctx *ctx) |
191 | { |
192 | const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_encrypt; |
193 | if (!gcm) { |
194 | return aes_error; |
195 | } |
196 | |
197 | return ccgcm_aad(mode: gcm, ctx, nbytes: aad_bytes, additional_data: aad); |
198 | } |
199 | |
200 | aes_rval |
201 | aes_encrypt_gcm(const unsigned char *in_blk, unsigned int num_bytes, |
202 | unsigned char *out_blk, ccgcm_ctx *ctx) |
203 | { |
204 | const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_encrypt; |
205 | if (!gcm) { |
206 | return aes_error; |
207 | } |
208 | |
209 | return ccgcm_update(mode: gcm, ctx, nbytes: num_bytes, in: in_blk, out: out_blk); //Actually gcm encrypt. |
210 | } |
211 | |
212 | aes_rval |
213 | aes_encrypt_finalize_gcm(unsigned char *tag, size_t tag_bytes, ccgcm_ctx *ctx) |
214 | { |
215 | const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_encrypt; |
216 | if (!gcm) { |
217 | return aes_error; |
218 | } |
219 | |
220 | int rc = ccgcm_finalize(mode: gcm, ctx, tag_nbytes: tag_bytes, tag); |
221 | if (rc) { |
222 | return rc; |
223 | } |
224 | |
225 | return ccgcm_reset(mode: gcm, ctx); |
226 | } |
227 | |
228 | aes_rval |
229 | aes_decrypt_key_gcm(const unsigned char *key, int key_len, ccgcm_ctx *ctx) |
230 | { |
231 | const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_decrypt; |
232 | if (!gcm) { |
233 | return aes_error; |
234 | } |
235 | |
236 | return ccgcm_init(mode: gcm, ctx, key_nbytes: key_len, key); |
237 | } |
238 | |
239 | aes_rval |
240 | aes_decrypt_key_with_iv_gcm(const unsigned char *key, int key_len, const unsigned char *in_iv, ccgcm_ctx *ctx) |
241 | { |
242 | const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_decrypt; |
243 | if (!gcm) { |
244 | return aes_error; |
245 | } |
246 | |
247 | return g_crypto_funcs->ccgcm_init_with_iv_fn(gcm, ctx, key_len, key, in_iv); |
248 | } |
249 | |
250 | aes_rval |
251 | aes_decrypt_set_iv_gcm(const unsigned char *in_iv, size_t len, ccgcm_ctx *ctx) |
252 | { |
253 | const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_decrypt; |
254 | if (!gcm) { |
255 | return aes_error; |
256 | } |
257 | |
258 | int rc = ccgcm_reset(mode: gcm, ctx); |
259 | if (rc) { |
260 | return rc; |
261 | } |
262 | |
263 | return ccgcm_set_iv(mode: gcm, ctx, iv_nbytes: len, iv: in_iv); |
264 | } |
265 | |
266 | aes_rval |
267 | aes_decrypt_reset_gcm(ccgcm_ctx *ctx) |
268 | { |
269 | const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_decrypt; |
270 | if (!gcm) { |
271 | return aes_error; |
272 | } |
273 | |
274 | return ccgcm_reset(mode: gcm, ctx); |
275 | } |
276 | |
277 | aes_rval |
278 | aes_decrypt_inc_iv_gcm(unsigned char *out_iv, ccgcm_ctx *ctx) |
279 | { |
280 | const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_decrypt; |
281 | if (!gcm) { |
282 | return aes_error; |
283 | } |
284 | |
285 | return g_crypto_funcs->ccgcm_inc_iv_fn(gcm, ctx, out_iv); |
286 | } |
287 | |
288 | aes_rval |
289 | aes_decrypt_aad_gcm(const unsigned char *aad, unsigned int aad_bytes, ccgcm_ctx *ctx) |
290 | { |
291 | const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_decrypt; |
292 | if (!gcm) { |
293 | return aes_error; |
294 | } |
295 | |
296 | return ccgcm_aad(mode: gcm, ctx, nbytes: aad_bytes, additional_data: aad); |
297 | } |
298 | |
299 | aes_rval |
300 | aes_decrypt_gcm(const unsigned char *in_blk, unsigned int num_bytes, |
301 | unsigned char *out_blk, ccgcm_ctx *ctx) |
302 | { |
303 | const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_decrypt; |
304 | if (!gcm) { |
305 | return aes_error; |
306 | } |
307 | |
308 | return ccgcm_update(mode: gcm, ctx, nbytes: num_bytes, in: in_blk, out: out_blk); //Actually gcm decrypt. |
309 | } |
310 | |
311 | aes_rval |
312 | aes_decrypt_finalize_gcm(unsigned char *tag, size_t tag_bytes, ccgcm_ctx *ctx) |
313 | { |
314 | const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_decrypt; |
315 | if (!gcm) { |
316 | return aes_error; |
317 | } |
318 | |
319 | int rc = ccgcm_finalize(mode: gcm, ctx, tag_nbytes: tag_bytes, tag); |
320 | if (rc) { |
321 | return rc; |
322 | } |
323 | |
324 | return ccgcm_reset(mode: gcm, ctx); |
325 | } |
326 | |
327 | size_t |
328 | aes_encrypt_get_ctx_size_gcm(void) |
329 | { |
330 | const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_encrypt; |
331 | if (!gcm) { |
332 | return 0; |
333 | } |
334 | return cc_ctx_sizeof(ccgcm_ctx, gcm->size); |
335 | } |
336 | |
337 | size_t |
338 | aes_decrypt_get_ctx_size_gcm(void) |
339 | { |
340 | const struct ccmode_gcm *gcm = g_crypto_funcs->ccaes_gcm_decrypt; |
341 | if (!gcm) { |
342 | return 0; |
343 | } |
344 | return cc_ctx_sizeof(ccgcm_ctx, gcm->size); |
345 | } |
346 | |