summaryrefslogtreecommitdiff
path: root/src/include/s390_gcm.h
diff options
context:
space:
mode:
Diffstat (limited to 'src/include/s390_gcm.h')
-rw-r--r--src/include/s390_gcm.h250
1 files changed, 203 insertions, 47 deletions
diff --git a/src/include/s390_gcm.h b/src/include/s390_gcm.h
index 0c03137..3236d46 100644
--- a/src/include/s390_gcm.h
+++ b/src/include/s390_gcm.h
@@ -319,36 +319,77 @@ static inline int s390_gcm(unsigned int function_code,
/* prepate initial counter for cipher */
memcpy(tmp_ctr, j0, AES_BLOCK_SIZE);
- __inc_aes_ctr((struct uint128 *)tmp_ctr, GCM_CTR_WIDTH);
- /* en-/decrypt payload */
- if (function_code % 2) {
- /* decrypt */
- rc = s390_aes_ctr(UNDIRECTED_FC(function_code),
- ciphertext, plaintext, text_length,
- key, tmp_ctr, GCM_CTR_WIDTH);
- if (rc)
- return rc;
+ if (!msa8_switch) {
+
+ /**
+ * simulate aes-gcm with aes-ctr and ghash.
+ */
+
+ __inc_aes_ctr((struct uint128 *)tmp_ctr, GCM_CTR_WIDTH);
+
+ if (function_code % 2) {
+ /* mac */
+ rc = s390_gcm_authenticate(ciphertext, text_length,
+ aad, aad_length,
+ subkey_h, tmp_tag);
+ if (rc)
+ return rc;
+
+ /* decrypt */
+ rc = s390_aes_ctr(UNDIRECTED_FC(function_code),
+ ciphertext, plaintext, text_length,
+ key, tmp_ctr, GCM_CTR_WIDTH);
+ if (rc)
+ return rc;
+ } else {
+ /* encrypt */
+ rc = s390_aes_ctr(UNDIRECTED_FC(function_code),
+ plaintext, ciphertext, text_length,
+ key, tmp_ctr, GCM_CTR_WIDTH);
+ if (rc)
+ return rc;
+
+ /* mac */
+ rc = s390_gcm_authenticate(ciphertext, text_length,
+ aad, aad_length,
+ subkey_h, tmp_tag);
+ if (rc)
+ return rc;
+ }
+
+ /* encrypt tag */
+ return s390_aes_ctr(UNDIRECTED_FC(function_code),
+ tmp_tag, tag, tag_length,
+ key, j0, GCM_CTR_WIDTH);
+
} else {
- /* encrypt */
- rc = s390_aes_ctr(UNDIRECTED_FC(function_code),
- plaintext, ciphertext, text_length,
- key, tmp_ctr, GCM_CTR_WIDTH);
- if (rc)
- return rc;
- }
- /* generate authentication tag */
- rc = s390_gcm_authenticate(ciphertext, text_length,
- aad, aad_length,
- subkey_h, tmp_tag);
- if (rc)
- return rc;
+ /**
+ * use the aes-gcm support via CPACF.
+ */
+
+ if (function_code % 2) {
+ /* decrypt */
+ rc = s390_aes_gcm(function_code,
+ ciphertext, plaintext, text_length,
+ key, j0, GCM_CTR_WIDTH,
+ tmp_ctr, GCM_CTR_WIDTH,
+ aad, aad_length, subkey_h,
+ tag, tag_length, 1, 1);
+ } else {
+ /* encrypt */
+ memset(tag, 0, AES_BLOCK_SIZE);
+ rc = s390_aes_gcm(function_code,
+ plaintext, ciphertext, text_length,
+ key, j0, GCM_CTR_WIDTH,
+ tmp_ctr, GCM_CTR_WIDTH,
+ aad, aad_length, subkey_h,
+ tag, tag_length, 1, 1);
+ }
- /* encrypt tag */
- return s390_aes_ctr(UNDIRECTED_FC(function_code),
- tmp_tag, tag, tag_length,
- key, j0, GCM_CTR_WIDTH);
+ return rc;
+ }
}
static inline int s390_gcm_initialize(unsigned int function_code,
@@ -373,14 +414,29 @@ static inline int s390_gcm_initialize(unsigned int function_code,
/* calculate initial counter, based on iv */
__compute_j0(iv, iv_length, subkey, icb);
- /* prepate usage counter for cipher */
+ /* prepare usage counter for cipher */
memcpy(ucb, icb, AES_BLOCK_SIZE);
- __inc_aes_ctr((struct uint128 *)ucb, GCM_CTR_WIDTH);
+
+ if (!msa8_switch) // KMA increases the ctr internally
+ __inc_aes_ctr((struct uint128 *)ucb, GCM_CTR_WIDTH);
return 0;
}
-static inline int s390_gcm_intermediate(unsigned int function_code,
+static inline void inc_ctr(unsigned char* ctr)
+{
+ unsigned int* cv;
+
+ cv = (unsigned int*)&ctr[12];
+ *cv = *cv + 1;
+}
+
+/**
+ * processes the last partial plaintext/ciphertext (< 16 bytes) and calculates
+ * the last intermediate tag using the old code path. This is not possible with
+ * KMA, because KMA cannot process partial blocks before s390_gcm_last.
+ */
+static inline int s390_gcm_last_intermediate(unsigned int function_code,
unsigned char *plaintext, unsigned long text_length,
unsigned char *ciphertext,
unsigned char *ctr,
@@ -389,30 +445,114 @@ static inline int s390_gcm_intermediate(unsigned int function_code,
unsigned char *key, unsigned char *subkey)
{
unsigned int rc;
+ unsigned char tmp_ctr[16];
- if (!msa4_switch)
- return EPERM;
+ /*
+ * The old code needs ctr +1.
+ * We copy ctr, to not destroy the original ctr.
+ */
+ memcpy(tmp_ctr, ctr, sizeof(tmp_ctr));
+ inc_ctr(tmp_ctr);
- /* en-/decrypt payload */
if (function_code % 2) {
+ /* mac */
+ rc = s390_gcm_authenticate_intermediate(ciphertext, text_length, aad,
+ aad_length, subkey, tag);
+ if (rc)
+ return rc;
/* decrypt */
rc = s390_aes_ctr(UNDIRECTED_FC(function_code), ciphertext, plaintext,
- text_length, key, ctr, GCM_CTR_WIDTH);
+ text_length, key, tmp_ctr, GCM_CTR_WIDTH);
if (rc)
return rc;
} else {
/* encrypt */
rc = s390_aes_ctr(UNDIRECTED_FC(function_code), plaintext, ciphertext,
- text_length, key, ctr, GCM_CTR_WIDTH);
+ text_length, key, tmp_ctr, GCM_CTR_WIDTH);
+ if (rc)
+ return rc;
+ /* mac */
+ rc = s390_gcm_authenticate_intermediate(ciphertext, text_length, aad,
+ aad_length, subkey, tag);
if (rc)
return rc;
}
- /* generate authentication tag */
- rc = s390_gcm_authenticate_intermediate(ciphertext, text_length, aad,
- aad_length, subkey, tag);
- if (rc)
- return rc;
+ return 0;
+}
+
+static inline int s390_gcm_intermediate(unsigned int function_code,
+ unsigned char *plaintext, unsigned long text_length,
+ unsigned char *ciphertext,
+ unsigned char *ctr,
+ unsigned char *aad, unsigned long aad_length,
+ unsigned char *tag, unsigned long tag_length,
+ unsigned char *key, unsigned char *subkey)
+{
+ unsigned long bulk;
+ unsigned int rc, laad;
+ unsigned char *in, *out;
+
+ if (!msa4_switch)
+ return EPERM;
+
+ if (!msa8_switch) {
+ if (function_code % 2) {
+ /* mac */
+ rc = s390_gcm_authenticate_intermediate(ciphertext, text_length, aad,
+ aad_length, subkey, tag);
+ if (rc)
+ return rc;
+
+ /* decrypt */
+ rc = s390_aes_ctr(UNDIRECTED_FC(function_code), ciphertext, plaintext,
+ text_length, key, ctr, GCM_CTR_WIDTH);
+ if (rc)
+ return rc;
+ } else {
+ /* encrypt */
+ rc = s390_aes_ctr(UNDIRECTED_FC(function_code), plaintext, ciphertext,
+ text_length, key, ctr, GCM_CTR_WIDTH);
+ if (rc)
+ return rc;
+
+ /* mac */
+ rc = s390_gcm_authenticate_intermediate(ciphertext, text_length, aad,
+ aad_length, subkey, tag);
+ if (rc)
+ return rc;
+ }
+ } else {
+ if ((text_length > 0) || (aad_length % AES_BLOCK_SIZE))
+ laad = 1;
+ else
+ laad = 0;
+
+ bulk = (text_length / AES_BLOCK_SIZE) * AES_BLOCK_SIZE;
+ text_length %= AES_BLOCK_SIZE;
+
+ if (bulk || aad_length) {
+ in = (function_code % 2) ? ciphertext : plaintext;
+ out = (function_code % 2) ? plaintext : ciphertext;
+
+ rc = s390_aes_gcm(function_code,
+ in, out, bulk, key,
+ NULL, 0, // j0, j0_length not used here
+ ctr, GCM_CTR_WIDTH,
+ aad, aad_length, subkey,
+ tag, tag_length, laad, 0);
+ if (rc)
+ return rc;
+ }
+ if (text_length) {
+ rc = s390_gcm_last_intermediate(function_code,
+ plaintext + bulk, text_length,
+ ciphertext + bulk, ctr, NULL,
+ 0, tag, tag_length, key, subkey);
+ if (rc)
+ return rc;
+ }
+ }
return 0;
}
@@ -423,16 +563,32 @@ static inline int s390_gcm_last(unsigned int function_code, unsigned char *icb,
unsigned char *key, unsigned char *subkey)
{
unsigned char tmp_tag[AES_BLOCK_SIZE];
+ unsigned char tmp_icb[AES_BLOCK_SIZE];
int rc;
- /* generate authentication tag */
- memcpy(tmp_tag, tag, tag_length);
- rc = s390_gcm_authenticate_last(aad_length, ciph_length, subkey, tmp_tag);
- if (rc)
- return rc;
+ /* dont modify icb buffer */
+ memcpy(tmp_icb, icb, sizeof(tmp_icb));
- /* encrypt tag */
- return s390_aes_ctr(UNDIRECTED_FC(function_code), tmp_tag, tag, tag_length,
- key, icb, GCM_CTR_WIDTH);
+ if (!msa8_switch) {
+
+ /* generate authentication tag */
+ memcpy(tmp_tag, tag, tag_length);
+ rc = s390_gcm_authenticate_last(aad_length, ciph_length, subkey, tmp_tag);
+ if (rc)
+ return rc;
+
+ /* encrypt tag */
+ return s390_aes_ctr(UNDIRECTED_FC(function_code), tmp_tag, tag, tag_length,
+ key, tmp_icb, GCM_CTR_WIDTH);
+
+ } else {
+
+ return s390_aes_gcm(function_code,
+ NULL, NULL, ciph_length,
+ key, tmp_icb, GCM_CTR_WIDTH,
+ NULL, 0,
+ NULL, aad_length, subkey,
+ tag, tag_length, 1, 1);
+ }
}
#endif