Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/mono/boringssl.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/crypto
diff options
context:
space:
mode:
authorDavid Benjamin <davidben@google.com>2016-09-12 22:25:05 +0300
committerCQ bot account: commit-bot@chromium.org <commit-bot@chromium.org>2016-09-14 02:50:05 +0300
commita97919791b9cb2bb87692e02b4c4d768f4834a00 (patch)
tree2269bf2c959bb459c48a8358464687bbb84c7e55 /crypto
parent6855f773296c8bddd31da6439956b84615b7a059 (diff)
Use C99 for size_t loops in gcm.c.
The stuff around i being reused for |len| rounded to a number of blocks is a little weird. Change-Id: I6f07a82fe84d077062e5b34ce75cc68250be8a4a Reviewed-on: https://boringssl-review.googlesource.com/10802 Reviewed-by: Adam Langley <agl@google.com> Commit-Queue: Adam Langley <agl@google.com> CQ-Verified: CQ bot account: commit-bot@chromium.org <commit-bot@chromium.org>
Diffstat (limited to 'crypto')
-rw-r--r--crypto/modes/gcm.c51
1 files changed, 25 insertions, 26 deletions
diff --git a/crypto/modes/gcm.c b/crypto/modes/gcm.c
index 04c32116..1b43cd4b 100644
--- a/crypto/modes/gcm.c
+++ b/crypto/modes/gcm.c
@@ -569,7 +569,6 @@ void CRYPTO_gcm128_setiv(GCM128_CONTEXT *ctx, const void *key,
}
int CRYPTO_gcm128_aad(GCM128_CONTEXT *ctx, const uint8_t *aad, size_t len) {
- size_t i;
unsigned int n;
uint64_t alen = ctx->len.u[0];
#ifdef GCM_FUNCREF_4BIT
@@ -605,16 +604,17 @@ int CRYPTO_gcm128_aad(GCM128_CONTEXT *ctx, const uint8_t *aad, size_t len) {
}
}
+ /* Process a whole number of blocks. */
#ifdef GHASH
- i = len & kSizeTWithoutLower4Bits;
- if (i != 0) {
- GHASH(ctx, aad, i);
- aad += i;
- len -= i;
+ size_t len_blocks = len & kSizeTWithoutLower4Bits;
+ if (len_blocks != 0) {
+ GHASH(ctx, aad, len_blocks);
+ aad += len_blocks;
+ len -= len_blocks;
}
#else
while (len >= 16) {
- for (i = 0; i < 16; ++i) {
+ for (size_t i = 0; i < 16; ++i) {
ctx->Xi.c[i] ^= aad[i];
}
GCM_MUL(ctx, Xi);
@@ -622,9 +622,11 @@ int CRYPTO_gcm128_aad(GCM128_CONTEXT *ctx, const uint8_t *aad, size_t len) {
len -= 16;
}
#endif
- if (len) {
+
+ /* Process the remainder. */
+ if (len != 0) {
n = (unsigned int)len;
- for (i = 0; i < len; ++i) {
+ for (size_t i = 0; i < len; ++i) {
ctx->Xi.c[i] ^= aad[i];
}
}
@@ -641,7 +643,6 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx, const void *key,
char little;
} is_endian = {1};
unsigned int n, ctr;
- size_t i;
uint64_t mlen = ctx->len.u[1];
block128_f block = ctx->block;
#ifdef GCM_FUNCREF_4BIT
@@ -686,7 +687,7 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx, const void *key,
}
}
if (STRICT_ALIGNMENT && ((size_t)in | (size_t)out) % sizeof(size_t) != 0) {
- for (i = 0; i < len; ++i) {
+ for (size_t i = 0; i < len; ++i) {
if (n == 0) {
(*block)(ctx->Yi.c, ctx->EKi.c, key);
++ctr;
@@ -721,7 +722,7 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx, const void *key,
} else {
ctx->Yi.d[3] = ctr;
}
- for (i = 0; i < 16 / sizeof(size_t); ++i) {
+ for (size_t i = 0; i < 16 / sizeof(size_t); ++i) {
out_t[i] = in_t[i] ^ ctx->EKi.t[i];
}
out += 16;
@@ -731,9 +732,8 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx, const void *key,
GHASH(ctx, out - GHASH_CHUNK, GHASH_CHUNK);
len -= GHASH_CHUNK;
}
- if ((i = (len & (size_t) - 16))) {
- size_t j = i;
-
+ size_t len_blocks = len & kSizeTWithoutLower4Bits;
+ if (len_blocks != 0) {
while (len >= 16) {
size_t *out_t = (size_t *)out;
const size_t *in_t = (const size_t *)in;
@@ -745,14 +745,14 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx, const void *key,
} else {
ctx->Yi.d[3] = ctr;
}
- for (i = 0; i < 16 / sizeof(size_t); ++i) {
+ for (size_t i = 0; i < 16 / sizeof(size_t); ++i) {
out_t[i] = in_t[i] ^ ctx->EKi.t[i];
}
out += 16;
in += 16;
len -= 16;
}
- GHASH(ctx, out - j, j);
+ GHASH(ctx, out - len_blocks, len_blocks);
}
#else
while (len >= 16) {
@@ -766,7 +766,7 @@ int CRYPTO_gcm128_encrypt(GCM128_CONTEXT *ctx, const void *key,
} else {
ctx->Yi.d[3] = ctr;
}
- for (i = 0; i < 16 / sizeof(size_t); ++i) {
+ for (size_t i = 0; i < 16 / sizeof(size_t); ++i) {
ctx->Xi.t[i] ^= out_t[i] = in_t[i] ^ ctx->EKi.t[i];
}
GCM_MUL(ctx, Xi);
@@ -801,7 +801,6 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx, const void *key,
char little;
} is_endian = {1};
unsigned int n, ctr;
- size_t i;
uint64_t mlen = ctx->len.u[1];
block128_f block = ctx->block;
#ifdef GCM_FUNCREF_4BIT
@@ -848,7 +847,7 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx, const void *key,
}
}
if (STRICT_ALIGNMENT && ((size_t)in | (size_t)out) % sizeof(size_t) != 0) {
- for (i = 0; i < len; ++i) {
+ for (size_t i = 0; i < len; ++i) {
uint8_t c;
if (n == 0) {
(*block)(ctx->Yi.c, ctx->EKi.c, key);
@@ -887,7 +886,7 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx, const void *key,
} else {
ctx->Yi.d[3] = ctr;
}
- for (i = 0; i < 16 / sizeof(size_t); ++i) {
+ for (size_t i = 0; i < 16 / sizeof(size_t); ++i) {
out_t[i] = in_t[i] ^ ctx->EKi.t[i];
}
out += 16;
@@ -896,9 +895,9 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx, const void *key,
}
len -= GHASH_CHUNK;
}
- i = len & kSizeTWithoutLower4Bits;
- if (i != 0) {
- GHASH(ctx, in, i);
+ size_t len_blocks = len & kSizeTWithoutLower4Bits;
+ if (len_blocks != 0) {
+ GHASH(ctx, in, len_blocks);
while (len >= 16) {
size_t *out_t = (size_t *)out;
const size_t *in_t = (const size_t *)in;
@@ -910,7 +909,7 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx, const void *key,
} else {
ctx->Yi.d[3] = ctr;
}
- for (i = 0; i < 16 / sizeof(size_t); ++i) {
+ for (size_t i = 0; i < 16 / sizeof(size_t); ++i) {
out_t[i] = in_t[i] ^ ctx->EKi.t[i];
}
out += 16;
@@ -930,7 +929,7 @@ int CRYPTO_gcm128_decrypt(GCM128_CONTEXT *ctx, const void *key,
} else {
ctx->Yi.d[3] = ctr;
}
- for (i = 0; i < 16 / sizeof(size_t); ++i) {
+ for (size_t i = 0; i < 16 / sizeof(size_t); ++i) {
size_t c = in_t[i];
out_t[i] = c ^ ctx->EKi.t[i];
ctx->Xi.t[i] ^= c;