static int magma_cipher_init_ctr_acpkm_omac(EVP_CIPHER_CTX *ctx, const unsigned char *key,
const unsigned char *iv, int enc);
/* Handles block of data in CBC mode */
+static int magma_cipher_do_ecb(EVP_CIPHER_CTX *ctx, unsigned char *out,
+ const unsigned char *in, size_t inl);
static int magma_cipher_do_cbc(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t inl);
static int magma_cipher_do_ctr(EVP_CIPHER_CTX *ctx, unsigned char *out,
static GOST_cipher magma_template_cipher = {
.block_size = 8,
.key_len = 32,
- .iv_len = 8,
- .flags = EVP_CIPH_CUSTOM_IV |
+ .flags =
EVP_CIPH_RAND_KEY |
EVP_CIPH_ALWAYS_CALL_INIT,
.cleanup = gost_cipher_cleanup,
.block_size = 1,
.iv_len = 4,
.flags = EVP_CIPH_CTR_MODE |
+ EVP_CIPH_CUSTOM_IV |
EVP_CIPH_NO_PADDING,
.init = magma_cipher_init,
};
.block_size = 1,
.iv_len = 4,
.flags = EVP_CIPH_CTR_MODE |
+ EVP_CIPH_CUSTOM_IV |
EVP_CIPH_NO_PADDING,
.init = magma_cipher_init,
};
.block_size = 1,
.iv_len = 4,
.flags = EVP_CIPH_CTR_MODE |
+ EVP_CIPH_CUSTOM_IV |
EVP_CIPH_NO_PADDING |
EVP_CIPH_CUSTOM_COPY |
EVP_CIPH_FLAG_CUSTOM_CIPHER |
.ctrl = magma_cipher_ctl_acpkm_omac,
};
+GOST_cipher magma_ecb_cipher = {
+ .nid = NID_magma_ecb,
+ .template = &magma_template_cipher,
+ .flags = EVP_CIPH_ECB_MODE,
+ .init = magma_cipher_init,
+ .do_cipher = magma_cipher_do_ecb,
+};
+
GOST_cipher magma_cbc_cipher = {
.nid = NID_magma_cbc,
.template = &gost_template_cipher,
- .flags = EVP_CIPH_CBC_MODE,
+ .iv_len = 8,
+ .flags = EVP_CIPH_CBC_MODE |
+ EVP_CIPH_CUSTOM_IV,
.init = magma_cipher_init,
.do_cipher = magma_cipher_do_cbc,
};
return 1;
}
+/* MAGMA encryption in ECB mode */
+static int magma_cipher_do_ecb(EVP_CIPHER_CTX *ctx, unsigned char *out,
+ const unsigned char *in, size_t inl)
+{
+ struct ossl_gost_cipher_ctx *c = EVP_CIPHER_CTX_get_cipher_data(ctx);
+ if (EVP_CIPHER_CTX_encrypting(ctx)) {
+ while (inl > 0) {
+ magmacrypt(&(c->cctx), in, out);
+ out += 8;
+ in += 8;
+ inl -= 8;
+ }
+ } else {
+ while (inl > 0) {
+ magmadecrypt(&(c->cctx), in, out);
+ out += 8;
+ in += 8;
+ inl -= 8;
+ }
+ }
+ return 1;
+}
+
/* MAGMA encryption in CBC mode */
static int magma_cipher_do_cbc(EVP_CIPHER_CTX *ctx, unsigned char *out,
const unsigned char *in, size_t inl)
while (inl > 0) {
for (i = 0; i < 8; i++) {
- b[7 - i] = iv[i] ^ in_ptr[i];
- }
- gostcrypt(&(c->cctx), b, d);
-
- for (i = 0; i < 8; i++) {
- out_ptr[7 - i] = d[i];
+ out_ptr[i] = iv[i] ^ in_ptr[i];
}
+ magmacrypt(&(c->cctx), out_ptr, out_ptr);
memcpy(iv, out_ptr, 8);
out_ptr += 8;
in_ptr += 8;
}
} else {
while (inl > 0) {
- for (i = 0; i < 8; i++) {
- d[7 - i] = in_ptr[i];
- }
- gostdecrypt(&(c->cctx), d, b);
+ magmadecrypt(&(c->cctx), in_ptr, b);
memcpy(d, in_ptr, 8);
for (i = 0; i < 8; i++) {
- out_ptr[i] = iv[i] ^ b[7 - i];
+ out_ptr[i] = iv[i] ^ b[i];
}
memcpy(iv, d, 8);
out_ptr += 8;
unsigned char *iv = EVP_CIPHER_CTX_iv_noconst(ctx);
unsigned int num = EVP_CIPHER_CTX_num(ctx);
size_t blocks, i, lasted = inl;
- unsigned char b[8];
/* Process partial blocks */
while ((num & MAGMA_BLOCK_MASK) && lasted) {
- *out_ptr++ = *in_ptr++ ^ buf[7 - (num & MAGMA_BLOCK_MASK)];
+ *out_ptr++ = *in_ptr++ ^ buf[num & MAGMA_BLOCK_MASK];
--lasted;
num++;
}
/* Process full blocks */
for (i = 0; i < blocks; i++) {
apply_acpkm_magma(c, &num);
+ magmacrypt(&(c->cctx), iv, buf);
for (j = 0; j < 8; j++) {
- b[7 - j] = iv[j];
- }
- gostcrypt(&(c->cctx), b, buf);
- for (j = 0; j < 8; j++) {
- out_ptr[j] = buf[7 - j] ^ in_ptr[j];
+ out_ptr[j] = buf[j] ^ in_ptr[j];
}
ctr64_inc(iv);
c->count += MAGMA_BLOCK_SIZE;
/* Process the rest of plaintext */
if (lasted > 0) {
apply_acpkm_magma(c, &num);
- for (j = 0; j < 8; j++) {
- b[7 - j] = iv[j];
- }
- gostcrypt(&(c->cctx), b, buf);
+ magmacrypt(&(c->cctx), iv, buf);
for (i = 0; i < lasted; i++)
- out_ptr[i] = buf[7 - i] ^ in_ptr[i];
+ out_ptr[i] = buf[i] ^ in_ptr[i];
ctr64_inc(iv);
- c->count += j;
+ c->count += 8;
num += lasted;
}
EVP_CIPHER_CTX_set_num(ctx, num);