static int magma_cipher_init_ctr_acpkm_omac(EVP_CIPHER_CTX *ctx, const unsigned char *key,
const unsigned char *iv, int enc);
/* Handles block of data in CBC mode */
+static int magma_cipher_do_ecb(EVP_CIPHER_CTX *ctx, unsigned char *out,
+ const unsigned char *in, size_t inl);
static int magma_cipher_do_cbc(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t inl);
static int magma_cipher_do_ctr(EVP_CIPHER_CTX *ctx, unsigned char *out,
* Note: that you cannot template 0 value.
*/
#define TPL(st,field) ( \
- ((st)->field) ?: TPL_VAL(st,field) \
+ ((st)->field) ? ((st)->field) : TPL_VAL(st,field) \
)
#define TPL_VAL(st,field) ( \
static GOST_cipher magma_template_cipher = {
.block_size = 8,
.key_len = 32,
- .iv_len = 8,
- .flags = EVP_CIPH_CUSTOM_IV |
+ .flags =
EVP_CIPH_RAND_KEY |
EVP_CIPH_ALWAYS_CALL_INIT,
.cleanup = gost_cipher_cleanup,
.block_size = 1,
.iv_len = 4,
.flags = EVP_CIPH_CTR_MODE |
+ EVP_CIPH_CUSTOM_IV |
EVP_CIPH_NO_PADDING,
.init = magma_cipher_init,
};
.block_size = 1,
.iv_len = 4,
.flags = EVP_CIPH_CTR_MODE |
+ EVP_CIPH_CUSTOM_IV |
EVP_CIPH_NO_PADDING,
.init = magma_cipher_init,
};
.block_size = 1,
.iv_len = 4,
.flags = EVP_CIPH_CTR_MODE |
+ EVP_CIPH_CUSTOM_IV |
EVP_CIPH_NO_PADDING |
EVP_CIPH_CUSTOM_COPY |
EVP_CIPH_FLAG_CUSTOM_CIPHER |
.ctrl = magma_cipher_ctl_acpkm_omac,
};
+GOST_cipher magma_ecb_cipher = {
+ .nid = NID_magma_ecb,
+ .template = &magma_template_cipher,
+ .flags = EVP_CIPH_ECB_MODE,
+ .init = magma_cipher_init,
+ .do_cipher = magma_cipher_do_ecb,
+};
+
GOST_cipher magma_cbc_cipher = {
.nid = NID_magma_cbc,
.template = &gost_template_cipher,
- .flags = EVP_CIPH_CBC_MODE,
+ .iv_len = 8,
+ .flags = EVP_CIPH_CBC_MODE |
+ EVP_CIPH_CUSTOM_IV,
.init = magma_cipher_init,
.do_cipher = magma_cipher_do_cbc,
};
return 1;
}
+/* MAGMA encryption in ECB mode */
+static int magma_cipher_do_ecb(EVP_CIPHER_CTX *ctx, unsigned char *out,
+ const unsigned char *in, size_t inl)
+{
+ struct ossl_gost_cipher_ctx *c = EVP_CIPHER_CTX_get_cipher_data(ctx);
+ if (EVP_CIPHER_CTX_encrypting(ctx)) {
+ while (inl > 0) {
+ magmacrypt(&(c->cctx), in, out);
+ out += 8;
+ in += 8;
+ inl -= 8;
+ }
+ } else {
+ while (inl > 0) {
+ magmadecrypt(&(c->cctx), in, out);
+ out += 8;
+ in += 8;
+ inl -= 8;
+ }
+ }
+ return 1;
+}
+
/* MAGMA encryption in CBC mode */
static int magma_cipher_do_cbc(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t inl)
while (inl > 0) {
for (i = 0; i < 8; i++) {
- b[7 - i] = iv[i] ^ in_ptr[i];
- }
- gostcrypt(&(c->cctx), b, d);
-
- for (i = 0; i < 8; i++) {
- out_ptr[7 - i] = d[i];
+ out_ptr[i] = iv[i] ^ in_ptr[i];
}
+ magmacrypt(&(c->cctx), out_ptr, out_ptr);
memcpy(iv, out_ptr, 8);
out_ptr += 8;
in_ptr += 8;
}
} else {
while (inl > 0) {
- for (i = 0; i < 8; i++) {
- d[7 - i] = in_ptr[i];
- }
- gostdecrypt(&(c->cctx), d, b);
+ magmadecrypt(&(c->cctx), in_ptr, b);
memcpy(d, in_ptr, 8);
for (i = 0; i < 8; i++) {
- out_ptr[i] = iv[i] ^ b[7 - i];
+ out_ptr[i] = iv[i] ^ b[i];
}
memcpy(iv, d, 8);
out_ptr += 8;
inc_counter(counter, 8);
}
+#define MAGMA_BLOCK_SIZE 8
+#define MAGMA_BLOCK_MASK (MAGMA_BLOCK_SIZE - 1)
+static inline void apply_acpkm_magma(struct ossl_gost_cipher_ctx *
+ ctx, unsigned int *num)
+{
+ if (!ctx->key_meshing || (*num < (unsigned int)ctx->key_meshing))
+ return;
+ acpkm_magma_key_meshing(&ctx->cctx);
+ *num &= MAGMA_BLOCK_MASK;
+}
+
/* MAGMA encryption in CTR mode */
static int magma_cipher_do_ctr(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t inl)
{
const unsigned char *in_ptr = in;
unsigned char *out_ptr = out;
- size_t i = 0;
size_t j;
struct ossl_gost_cipher_ctx *c = EVP_CIPHER_CTX_get_cipher_data(ctx);
unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
- unsigned char b[8];
+ unsigned int num = EVP_CIPHER_CTX_num(ctx);
+ size_t blocks, i, lasted = inl;
/* Process partial blocks */
- if (EVP_CIPHER_CTX_num(ctx)) {
- for (j = EVP_CIPHER_CTX_num(ctx), i = 0; j < 8 && i < inl;
- j++, i++, in_ptr++, out_ptr++) {
- *out_ptr = buf[7 - j] ^ (*in_ptr);
- }
- if (j == 8) {
- EVP_CIPHER_CTX_set_num(ctx, 0);
- } else {
- EVP_CIPHER_CTX_set_num(ctx, j);
- return inl;
- }
+ while ((num & MAGMA_BLOCK_MASK) && lasted) {
+ *out_ptr++ = *in_ptr++ ^ buf[num & MAGMA_BLOCK_MASK];
+ --lasted;
+ num++;
}
+ blocks = lasted / MAGMA_BLOCK_SIZE;
/* Process full blocks */
- for (; i + 8 <= inl; i += 8, in_ptr += 8, out_ptr += 8) {
+ for (i = 0; i < blocks; i++) {
+ apply_acpkm_magma(c, &num);
+ magmacrypt(&(c->cctx), iv, buf);
for (j = 0; j < 8; j++) {
- b[7 - j] = iv[j];
- }
- gostcrypt(&(c->cctx), b, buf);
- for (j = 0; j < 8; j++) {
- out_ptr[j] = buf[7 - j] ^ in_ptr[j];
+ out_ptr[j] = buf[j] ^ in_ptr[j];
}
ctr64_inc(iv);
- c->count += 8;
- if (c->key_meshing && (c->count % c->key_meshing == 0))
- acpkm_magma_key_meshing(&(c->cctx));
+ c->count += MAGMA_BLOCK_SIZE;
+ in_ptr += MAGMA_BLOCK_SIZE;
+ out_ptr += MAGMA_BLOCK_SIZE;
+ num += MAGMA_BLOCK_SIZE;
+ lasted -= MAGMA_BLOCK_SIZE;
}
/* Process the rest of plaintext */
- if (i < inl) {
- for (j = 0; j < 8; j++) {
- b[7 - j] = iv[j];
- }
- gostcrypt(&(c->cctx), b, buf);
-
- for (j = 0; i < inl; j++, i++) {
- out_ptr[j] = buf[7 - j] ^ in_ptr[j];
- }
+ if (lasted > 0) {
+ apply_acpkm_magma(c, &num);
+ magmacrypt(&(c->cctx), iv, buf);
+ for (i = 0; i < lasted; i++)
+ out_ptr[i] = buf[i] ^ in_ptr[i];
ctr64_inc(iv);
c->count += 8;
- if (c->key_meshing && (c->count % c->key_meshing == 0))
- acpkm_magma_key_meshing(&(c->cctx));
-
- EVP_CIPHER_CTX_set_num(ctx, j);
- } else {
- EVP_CIPHER_CTX_set_num(ctx, 0);
+ num += lasted;
}
+ EVP_CIPHER_CTX_set_num(ctx, num);
return inl;
}
}
}
- for (; i + 8 < inl; i += 8, in_ptr += 8, out_ptr += 8) {
+ for (; (inl - i) >= 8; i += 8, in_ptr += 8, out_ptr += 8) {
/*
* block cipher current iv
*/
}
}
- for (; i + 8 < inl; i += 8, in_ptr += 8, out_ptr += 8) {
+ for (; (inl - i) >= 8; i += 8, in_ptr += 8, out_ptr += 8) {
/*
* block cipher current iv
*/