aboutsummaryrefslogtreecommitdiff
path: root/arch/s390/crypto/crypt_s390.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/s390/crypto/crypt_s390.h')
-rw-r--r--arch/s390/crypto/crypt_s390.h540
1 files changed, 290 insertions, 250 deletions
diff --git a/arch/s390/crypto/crypt_s390.h b/arch/s390/crypto/crypt_s390.h
index d1c259a7fe3..6c5cc6da711 100644
--- a/arch/s390/crypto/crypt_s390.h
+++ b/arch/s390/crypto/crypt_s390.h
@@ -3,8 +3,9 @@
*
* Support for s390 cryptographic instructions.
*
- * Copyright (C) 2003 IBM Deutschland GmbH, IBM Corporation
- * Author(s): Thomas Spatzier (tspat@de.ibm.com)
+ * Copyright IBM Corp. 2003, 2007
+ * Author(s): Thomas Spatzier
+ * Jan Glauber (jan.glauber@de.ibm.com)
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
@@ -16,20 +17,30 @@
#define _CRYPTO_ARCH_S390_CRYPT_S390_H
#include <asm/errno.h>
+#include <asm/facility.h>
#define CRYPT_S390_OP_MASK 0xFF00
#define CRYPT_S390_FUNC_MASK 0x00FF
-/* s930 cryptographic operations */
+#define CRYPT_S390_PRIORITY 300
+#define CRYPT_S390_COMPOSITE_PRIORITY 400
+
+#define CRYPT_S390_MSA 0x1
+#define CRYPT_S390_MSA3 0x2
+#define CRYPT_S390_MSA4 0x4
+
+/* s390 cryptographic operations */
enum crypt_s390_operations {
CRYPT_S390_KM = 0x0100,
CRYPT_S390_KMC = 0x0200,
CRYPT_S390_KIMD = 0x0300,
CRYPT_S390_KLMD = 0x0400,
- CRYPT_S390_KMAC = 0x0500
+ CRYPT_S390_KMAC = 0x0500,
+ CRYPT_S390_KMCTR = 0x0600
};
-/* function codes for KM (CIPHER MESSAGE) instruction
+/*
+ * function codes for KM (CIPHER MESSAGE) instruction
* 0x80 is the decipher modifier bit
*/
enum crypt_s390_km_func {
@@ -46,9 +57,14 @@ enum crypt_s390_km_func {
KM_AES_192_DECRYPT = CRYPT_S390_KM | 0x13 | 0x80,
KM_AES_256_ENCRYPT = CRYPT_S390_KM | 0x14,
KM_AES_256_DECRYPT = CRYPT_S390_KM | 0x14 | 0x80,
+ KM_XTS_128_ENCRYPT = CRYPT_S390_KM | 0x32,
+ KM_XTS_128_DECRYPT = CRYPT_S390_KM | 0x32 | 0x80,
+ KM_XTS_256_ENCRYPT = CRYPT_S390_KM | 0x34,
+ KM_XTS_256_DECRYPT = CRYPT_S390_KM | 0x34 | 0x80,
};
-/* function codes for KMC (CIPHER MESSAGE WITH CHAINING)
+/*
+ * function codes for KMC (CIPHER MESSAGE WITH CHAINING)
* instruction
*/
enum crypt_s390_kmc_func {
@@ -65,27 +81,54 @@ enum crypt_s390_kmc_func {
KMC_AES_192_DECRYPT = CRYPT_S390_KMC | 0x13 | 0x80,
KMC_AES_256_ENCRYPT = CRYPT_S390_KMC | 0x14,
KMC_AES_256_DECRYPT = CRYPT_S390_KMC | 0x14 | 0x80,
+ KMC_PRNG = CRYPT_S390_KMC | 0x43,
+};
+
+/*
+ * function codes for KMCTR (CIPHER MESSAGE WITH COUNTER)
+ * instruction
+ */
+enum crypt_s390_kmctr_func {
+ KMCTR_QUERY = CRYPT_S390_KMCTR | 0x0,
+ KMCTR_DEA_ENCRYPT = CRYPT_S390_KMCTR | 0x1,
+ KMCTR_DEA_DECRYPT = CRYPT_S390_KMCTR | 0x1 | 0x80,
+ KMCTR_TDEA_128_ENCRYPT = CRYPT_S390_KMCTR | 0x2,
+ KMCTR_TDEA_128_DECRYPT = CRYPT_S390_KMCTR | 0x2 | 0x80,
+ KMCTR_TDEA_192_ENCRYPT = CRYPT_S390_KMCTR | 0x3,
+ KMCTR_TDEA_192_DECRYPT = CRYPT_S390_KMCTR | 0x3 | 0x80,
+ KMCTR_AES_128_ENCRYPT = CRYPT_S390_KMCTR | 0x12,
+ KMCTR_AES_128_DECRYPT = CRYPT_S390_KMCTR | 0x12 | 0x80,
+ KMCTR_AES_192_ENCRYPT = CRYPT_S390_KMCTR | 0x13,
+ KMCTR_AES_192_DECRYPT = CRYPT_S390_KMCTR | 0x13 | 0x80,
+ KMCTR_AES_256_ENCRYPT = CRYPT_S390_KMCTR | 0x14,
+ KMCTR_AES_256_DECRYPT = CRYPT_S390_KMCTR | 0x14 | 0x80,
};
-/* function codes for KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST)
+/*
+ * function codes for KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST)
* instruction
*/
enum crypt_s390_kimd_func {
KIMD_QUERY = CRYPT_S390_KIMD | 0,
KIMD_SHA_1 = CRYPT_S390_KIMD | 1,
KIMD_SHA_256 = CRYPT_S390_KIMD | 2,
+ KIMD_SHA_512 = CRYPT_S390_KIMD | 3,
+ KIMD_GHASH = CRYPT_S390_KIMD | 65,
};
-/* function codes for KLMD (COMPUTE LAST MESSAGE DIGEST)
+/*
+ * function codes for KLMD (COMPUTE LAST MESSAGE DIGEST)
* instruction
*/
enum crypt_s390_klmd_func {
KLMD_QUERY = CRYPT_S390_KLMD | 0,
KLMD_SHA_1 = CRYPT_S390_KLMD | 1,
KLMD_SHA_256 = CRYPT_S390_KLMD | 2,
+ KLMD_SHA_512 = CRYPT_S390_KLMD | 3,
};
-/* function codes for KMAC (COMPUTE MESSAGE AUTHENTICATION CODE)
+/*
+ * function codes for KMAC (COMPUTE MESSAGE AUTHENTICATION CODE)
* instruction
*/
enum crypt_s390_kmac_func {
@@ -95,303 +138,300 @@ enum crypt_s390_kmac_func {
KMAC_TDEA_192 = CRYPT_S390_KMAC | 3
};
-/* status word for s390 crypto instructions' QUERY functions */
-struct crypt_s390_query_status {
- u64 high;
- u64 low;
-};
-
-/*
- * Standard fixup and ex_table sections for crypt_s390 inline functions.
- * label 0: the s390 crypto operation
- * label 1: just after 1 to catch illegal operation exception
- * (unsupported model)
- * label 6: the return point after fixup
- * label 7: set error value if exception _in_ crypto operation
- * label 8: set error value if illegal operation exception
- * [ret] is the variable to receive the error code
- * [ERR] is the error code value
- */
-#ifndef CONFIG_64BIT
-#define __crypt_s390_fixup \
- ".section .fixup,\"ax\" \n" \
- "7: lhi %0,%h[e1] \n" \
- " bras 1,9f \n" \
- " .long 6b \n" \
- "8: lhi %0,%h[e2] \n" \
- " bras 1,9f \n" \
- " .long 6b \n" \
- "9: l 1,0(1) \n" \
- " br 1 \n" \
- ".previous \n" \
- ".section __ex_table,\"a\" \n" \
- " .align 4 \n" \
- " .long 0b,7b \n" \
- " .long 1b,8b \n" \
- ".previous"
-#else /* CONFIG_64BIT */
-#define __crypt_s390_fixup \
- ".section .fixup,\"ax\" \n" \
- "7: lhi %0,%h[e1] \n" \
- " jg 6b \n" \
- "8: lhi %0,%h[e2] \n" \
- " jg 6b \n" \
- ".previous\n" \
- ".section __ex_table,\"a\" \n" \
- " .align 8 \n" \
- " .quad 0b,7b \n" \
- " .quad 1b,8b \n" \
- ".previous"
-#endif /* CONFIG_64BIT */
-
-/*
- * Standard code for setting the result of s390 crypto instructions.
- * %0: the register which will receive the result
- * [result]: the register containing the result (e.g. second operand length
- * to compute number of processed bytes].
- */
-#ifndef CONFIG_64BIT
-#define __crypt_s390_set_result \
- " lr %0,%[result] \n"
-#else /* CONFIG_64BIT */
-#define __crypt_s390_set_result \
- " lgr %0,%[result] \n"
-#endif
-
-/*
+/**
+ * crypt_s390_km:
+ * @func: the function code passed to KM; see crypt_s390_km_func
+ * @param: address of parameter block; see POP for details on each func
+ * @dest: address of destination memory area
+ * @src: address of source memory area
+ * @src_len: length of src operand in bytes
+ *
* Executes the KM (CIPHER MESSAGE) operation of the CPU.
- * @param func: the function code passed to KM; see crypt_s390_km_func
- * @param param: address of parameter block; see POP for details on each func
- * @param dest: address of destination memory area
- * @param src: address of source memory area
- * @param src_len: length of src operand in bytes
- * @returns < zero for failure, 0 for the query func, number of processed bytes
- * for encryption/decryption funcs
+ *
+ * Returns -1 for failure, 0 for the query func, number of processed
+ * bytes for encryption/decryption funcs
*/
-static inline int
-crypt_s390_km(long func, void* param, u8* dest, const u8* src, long src_len)
+static inline int crypt_s390_km(long func, void *param,
+ u8 *dest, const u8 *src, long src_len)
{
register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
- register void* __param asm("1") = param;
- register u8* __dest asm("4") = dest;
- register const u8* __src asm("2") = src;
+ register void *__param asm("1") = param;
+ register const u8 *__src asm("2") = src;
register long __src_len asm("3") = src_len;
+ register u8 *__dest asm("4") = dest;
int ret;
- ret = 0;
- __asm__ __volatile__ (
- "0: .insn rre,0xB92E0000,%1,%2 \n" /* KM opcode */
+ asm volatile(
+ "0: .insn rre,0xb92e0000,%3,%1 \n" /* KM opcode */
"1: brc 1,0b \n" /* handle partial completion */
- __crypt_s390_set_result
- "6: \n"
- __crypt_s390_fixup
- : "+d" (ret), "+a" (__dest), "+a" (__src),
- [result] "+d" (__src_len)
- : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func),
- "a" (__param)
- : "cc", "memory"
- );
- if (ret >= 0 && func & CRYPT_S390_FUNC_MASK){
- ret = src_len - ret;
- }
- return ret;
+ " la %0,0\n"
+ "2:\n"
+ EX_TABLE(0b,2b) EX_TABLE(1b,2b)
+ : "=d" (ret), "+a" (__src), "+d" (__src_len), "+a" (__dest)
+ : "d" (__func), "a" (__param), "0" (-1) : "cc", "memory");
+ if (ret < 0)
+ return ret;
+ return (func & CRYPT_S390_FUNC_MASK) ? src_len - __src_len : __src_len;
}
-/*
+/**
+ * crypt_s390_kmc:
+ * @func: the function code passed to KM; see crypt_s390_kmc_func
+ * @param: address of parameter block; see POP for details on each func
+ * @dest: address of destination memory area
+ * @src: address of source memory area
+ * @src_len: length of src operand in bytes
+ *
* Executes the KMC (CIPHER MESSAGE WITH CHAINING) operation of the CPU.
- * @param func: the function code passed to KM; see crypt_s390_kmc_func
- * @param param: address of parameter block; see POP for details on each func
- * @param dest: address of destination memory area
- * @param src: address of source memory area
- * @param src_len: length of src operand in bytes
- * @returns < zero for failure, 0 for the query func, number of processed bytes
- * for encryption/decryption funcs
+ *
+ * Returns -1 for failure, 0 for the query func, number of processed
+ * bytes for encryption/decryption funcs
*/
-static inline int
-crypt_s390_kmc(long func, void* param, u8* dest, const u8* src, long src_len)
+static inline int crypt_s390_kmc(long func, void *param,
+ u8 *dest, const u8 *src, long src_len)
{
register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
- register void* __param asm("1") = param;
- register u8* __dest asm("4") = dest;
- register const u8* __src asm("2") = src;
+ register void *__param asm("1") = param;
+ register const u8 *__src asm("2") = src;
register long __src_len asm("3") = src_len;
+ register u8 *__dest asm("4") = dest;
int ret;
- ret = 0;
- __asm__ __volatile__ (
- "0: .insn rre,0xB92F0000,%1,%2 \n" /* KMC opcode */
+ asm volatile(
+ "0: .insn rre,0xb92f0000,%3,%1 \n" /* KMC opcode */
"1: brc 1,0b \n" /* handle partial completion */
- __crypt_s390_set_result
- "6: \n"
- __crypt_s390_fixup
- : "+d" (ret), "+a" (__dest), "+a" (__src),
- [result] "+d" (__src_len)
- : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func),
- "a" (__param)
- : "cc", "memory"
- );
- if (ret >= 0 && func & CRYPT_S390_FUNC_MASK){
- ret = src_len - ret;
- }
- return ret;
+ " la %0,0\n"
+ "2:\n"
+ EX_TABLE(0b,2b) EX_TABLE(1b,2b)
+ : "=d" (ret), "+a" (__src), "+d" (__src_len), "+a" (__dest)
+ : "d" (__func), "a" (__param), "0" (-1) : "cc", "memory");
+ if (ret < 0)
+ return ret;
+ return (func & CRYPT_S390_FUNC_MASK) ? src_len - __src_len : __src_len;
}
-/*
+/**
+ * crypt_s390_kimd:
+ * @func: the function code passed to KM; see crypt_s390_kimd_func
+ * @param: address of parameter block; see POP for details on each func
+ * @src: address of source memory area
+ * @src_len: length of src operand in bytes
+ *
* Executes the KIMD (COMPUTE INTERMEDIATE MESSAGE DIGEST) operation
* of the CPU.
- * @param func: the function code passed to KM; see crypt_s390_kimd_func
- * @param param: address of parameter block; see POP for details on each func
- * @param src: address of source memory area
- * @param src_len: length of src operand in bytes
- * @returns < zero for failure, 0 for the query func, number of processed bytes
- * for digest funcs
+ *
+ * Returns -1 for failure, 0 for the query func, number of processed
+ * bytes for digest funcs
*/
-static inline int
-crypt_s390_kimd(long func, void* param, const u8* src, long src_len)
+static inline int crypt_s390_kimd(long func, void *param,
+ const u8 *src, long src_len)
{
register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
- register void* __param asm("1") = param;
- register const u8* __src asm("2") = src;
+ register void *__param asm("1") = param;
+ register const u8 *__src asm("2") = src;
register long __src_len asm("3") = src_len;
int ret;
- ret = 0;
- __asm__ __volatile__ (
- "0: .insn rre,0xB93E0000,%1,%1 \n" /* KIMD opcode */
- "1: brc 1,0b \n" /* handle partical completion */
- __crypt_s390_set_result
- "6: \n"
- __crypt_s390_fixup
- : "+d" (ret), "+a" (__src), [result] "+d" (__src_len)
- : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func),
- "a" (__param)
- : "cc", "memory"
- );
- if (ret >= 0 && (func & CRYPT_S390_FUNC_MASK)){
- ret = src_len - ret;
- }
- return ret;
+ asm volatile(
+ "0: .insn rre,0xb93e0000,%1,%1 \n" /* KIMD opcode */
+ "1: brc 1,0b \n" /* handle partial completion */
+ " la %0,0\n"
+ "2:\n"
+ EX_TABLE(0b,2b) EX_TABLE(1b,2b)
+ : "=d" (ret), "+a" (__src), "+d" (__src_len)
+ : "d" (__func), "a" (__param), "0" (-1) : "cc", "memory");
+ if (ret < 0)
+ return ret;
+ return (func & CRYPT_S390_FUNC_MASK) ? src_len - __src_len : __src_len;
}
-/*
+/**
+ * crypt_s390_klmd:
+ * @func: the function code passed to KM; see crypt_s390_klmd_func
+ * @param: address of parameter block; see POP for details on each func
+ * @src: address of source memory area
+ * @src_len: length of src operand in bytes
+ *
* Executes the KLMD (COMPUTE LAST MESSAGE DIGEST) operation of the CPU.
- * @param func: the function code passed to KM; see crypt_s390_klmd_func
- * @param param: address of parameter block; see POP for details on each func
- * @param src: address of source memory area
- * @param src_len: length of src operand in bytes
- * @returns < zero for failure, 0 for the query func, number of processed bytes
- * for digest funcs
+ *
+ * Returns -1 for failure, 0 for the query func, number of processed
+ * bytes for digest funcs
*/
-static inline int
-crypt_s390_klmd(long func, void* param, const u8* src, long src_len)
+static inline int crypt_s390_klmd(long func, void *param,
+ const u8 *src, long src_len)
{
register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
- register void* __param asm("1") = param;
- register const u8* __src asm("2") = src;
+ register void *__param asm("1") = param;
+ register const u8 *__src asm("2") = src;
register long __src_len asm("3") = src_len;
int ret;
- ret = 0;
- __asm__ __volatile__ (
- "0: .insn rre,0xB93F0000,%1,%1 \n" /* KLMD opcode */
- "1: brc 1,0b \n" /* handle partical completion */
- __crypt_s390_set_result
- "6: \n"
- __crypt_s390_fixup
- : "+d" (ret), "+a" (__src), [result] "+d" (__src_len)
- : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func),
- "a" (__param)
- : "cc", "memory"
- );
- if (ret >= 0 && func & CRYPT_S390_FUNC_MASK){
- ret = src_len - ret;
- }
- return ret;
+ asm volatile(
+ "0: .insn rre,0xb93f0000,%1,%1 \n" /* KLMD opcode */
+ "1: brc 1,0b \n" /* handle partial completion */
+ " la %0,0\n"
+ "2:\n"
+ EX_TABLE(0b,2b) EX_TABLE(1b,2b)
+ : "=d" (ret), "+a" (__src), "+d" (__src_len)
+ : "d" (__func), "a" (__param), "0" (-1) : "cc", "memory");
+ if (ret < 0)
+ return ret;
+ return (func & CRYPT_S390_FUNC_MASK) ? src_len - __src_len : __src_len;
}
-/*
+/**
+ * crypt_s390_kmac:
+ * @func: the function code passed to KM; see crypt_s390_klmd_func
+ * @param: address of parameter block; see POP for details on each func
+ * @src: address of source memory area
+ * @src_len: length of src operand in bytes
+ *
* Executes the KMAC (COMPUTE MESSAGE AUTHENTICATION CODE) operation
* of the CPU.
- * @param func: the function code passed to KM; see crypt_s390_klmd_func
- * @param param: address of parameter block; see POP for details on each func
- * @param src: address of source memory area
- * @param src_len: length of src operand in bytes
- * @returns < zero for failure, 0 for the query func, number of processed bytes
- * for digest funcs
+ *
+ * Returns -1 for failure, 0 for the query func, number of processed
+ * bytes for digest funcs
*/
-static inline int
-crypt_s390_kmac(long func, void* param, const u8* src, long src_len)
+static inline int crypt_s390_kmac(long func, void *param,
+ const u8 *src, long src_len)
{
register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
- register void* __param asm("1") = param;
- register const u8* __src asm("2") = src;
+ register void *__param asm("1") = param;
+ register const u8 *__src asm("2") = src;
register long __src_len asm("3") = src_len;
int ret;
- ret = 0;
- __asm__ __volatile__ (
- "0: .insn rre,0xB91E0000,%5,%5 \n" /* KMAC opcode */
- "1: brc 1,0b \n" /* handle partical completion */
- __crypt_s390_set_result
- "6: \n"
- __crypt_s390_fixup
- : "+d" (ret), "+a" (__src), [result] "+d" (__src_len)
- : [e1] "K" (-EFAULT), [e2] "K" (-ENOSYS), "d" (__func),
- "a" (__param)
- : "cc", "memory"
- );
- if (ret >= 0 && func & CRYPT_S390_FUNC_MASK){
- ret = src_len - ret;
- }
- return ret;
+ asm volatile(
+ "0: .insn rre,0xb91e0000,%1,%1 \n" /* KLAC opcode */
+ "1: brc 1,0b \n" /* handle partial completion */
+ " la %0,0\n"
+ "2:\n"
+ EX_TABLE(0b,2b) EX_TABLE(1b,2b)
+ : "=d" (ret), "+a" (__src), "+d" (__src_len)
+ : "d" (__func), "a" (__param), "0" (-1) : "cc", "memory");
+ if (ret < 0)
+ return ret;
+ return (func & CRYPT_S390_FUNC_MASK) ? src_len - __src_len : __src_len;
}
/**
+ * crypt_s390_kmctr:
+ * @func: the function code passed to KMCTR; see crypt_s390_kmctr_func
+ * @param: address of parameter block; see POP for details on each func
+ * @dest: address of destination memory area
+ * @src: address of source memory area
+ * @src_len: length of src operand in bytes
+ * @counter: address of counter value
+ *
+ * Executes the KMCTR (CIPHER MESSAGE WITH COUNTER) operation of the CPU.
+ *
+ * Returns -1 for failure, 0 for the query func, number of processed
+ * bytes for encryption/decryption funcs
+ */
+static inline int crypt_s390_kmctr(long func, void *param, u8 *dest,
+ const u8 *src, long src_len, u8 *counter)
+{
+ register long __func asm("0") = func & CRYPT_S390_FUNC_MASK;
+ register void *__param asm("1") = param;
+ register const u8 *__src asm("2") = src;
+ register long __src_len asm("3") = src_len;
+ register u8 *__dest asm("4") = dest;
+ register u8 *__ctr asm("6") = counter;
+ int ret = -1;
+
+ asm volatile(
+ "0: .insn rrf,0xb92d0000,%3,%1,%4,0 \n" /* KMCTR opcode */
+ "1: brc 1,0b \n" /* handle partial completion */
+ " la %0,0\n"
+ "2:\n"
+ EX_TABLE(0b,2b) EX_TABLE(1b,2b)
+ : "+d" (ret), "+a" (__src), "+d" (__src_len), "+a" (__dest),
+ "+a" (__ctr)
+ : "d" (__func), "a" (__param) : "cc", "memory");
+ if (ret < 0)
+ return ret;
+ return (func & CRYPT_S390_FUNC_MASK) ? src_len - __src_len : __src_len;
+}
+
+/**
+ * crypt_s390_func_available:
+ * @func: the function code of the specific function; 0 if op in general
+ *
* Tests if a specific crypto function is implemented on the machine.
- * @param func: the function code of the specific function; 0 if op in general
- * @return 1 if func available; 0 if func or op in general not available
+ *
+ * Returns 1 if func available; 0 if func or op in general not available
*/
-static inline int
-crypt_s390_func_available(int func)
+static inline int crypt_s390_func_available(int func,
+ unsigned int facility_mask)
{
+ unsigned char status[16];
int ret;
- struct crypt_s390_query_status status = {
- .high = 0,
- .low = 0
- };
- switch (func & CRYPT_S390_OP_MASK){
- case CRYPT_S390_KM:
- ret = crypt_s390_km(KM_QUERY, &status, NULL, NULL, 0);
- break;
- case CRYPT_S390_KMC:
- ret = crypt_s390_kmc(KMC_QUERY, &status, NULL, NULL, 0);
- break;
- case CRYPT_S390_KIMD:
- ret = crypt_s390_kimd(KIMD_QUERY, &status, NULL, 0);
- break;
- case CRYPT_S390_KLMD:
- ret = crypt_s390_klmd(KLMD_QUERY, &status, NULL, 0);
- break;
- case CRYPT_S390_KMAC:
- ret = crypt_s390_kmac(KMAC_QUERY, &status, NULL, 0);
- break;
- default:
- ret = 0;
- return ret;
- }
- if (ret >= 0){
- func &= CRYPT_S390_FUNC_MASK;
- func &= 0x7f; //mask modifier bit
- if (func < 64){
- ret = (status.high >> (64 - func - 1)) & 0x1;
- } else {
- ret = (status.low >> (128 - func - 1)) & 0x1;
- }
- } else {
- ret = 0;
+ if (facility_mask & CRYPT_S390_MSA && !test_facility(17))
+ return 0;
+
+ if (facility_mask & CRYPT_S390_MSA3 &&
+ (!test_facility(2) || !test_facility(76)))
+ return 0;
+ if (facility_mask & CRYPT_S390_MSA4 &&
+ (!test_facility(2) || !test_facility(77)))
+ return 0;
+
+ switch (func & CRYPT_S390_OP_MASK) {
+ case CRYPT_S390_KM:
+ ret = crypt_s390_km(KM_QUERY, &status, NULL, NULL, 0);
+ break;
+ case CRYPT_S390_KMC:
+ ret = crypt_s390_kmc(KMC_QUERY, &status, NULL, NULL, 0);
+ break;
+ case CRYPT_S390_KIMD:
+ ret = crypt_s390_kimd(KIMD_QUERY, &status, NULL, 0);
+ break;
+ case CRYPT_S390_KLMD:
+ ret = crypt_s390_klmd(KLMD_QUERY, &status, NULL, 0);
+ break;
+ case CRYPT_S390_KMAC:
+ ret = crypt_s390_kmac(KMAC_QUERY, &status, NULL, 0);
+ break;
+ case CRYPT_S390_KMCTR:
+ ret = crypt_s390_kmctr(KMCTR_QUERY, &status, NULL, NULL, 0,
+ NULL);
+ break;
+ default:
+ return 0;
}
+ if (ret < 0)
+ return 0;
+ func &= CRYPT_S390_FUNC_MASK;
+ func &= 0x7f; /* mask modifier bit */
+ return (status[func >> 3] & (0x80 >> (func & 7))) != 0;
+}
+
+/**
+ * crypt_s390_pcc:
+ * @func: the function code passed to KM; see crypt_s390_km_func
+ * @param: address of parameter block; see POP for details on each func
+ *
+ * Executes the PCC (PERFORM CRYPTOGRAPHIC COMPUTATION) operation of the CPU.
+ *
+ * Returns -1 for failure, 0 for success.
+ */
+static inline int crypt_s390_pcc(long func, void *param)
+{
+ register long __func asm("0") = func & 0x7f; /* encrypt or decrypt */
+ register void *__param asm("1") = param;
+ int ret = -1;
+
+ asm volatile(
+ "0: .insn rre,0xb92c0000,0,0 \n" /* PCC opcode */
+ "1: brc 1,0b \n" /* handle partial completion */
+ " la %0,0\n"
+ "2:\n"
+ EX_TABLE(0b,2b) EX_TABLE(1b,2b)
+ : "+d" (ret)
+ : "d" (__func), "a" (__param) : "cc", "memory");
return ret;
}
-#endif // _CRYPTO_ARCH_S390_CRYPT_S390_H
+
+#endif /* _CRYPTO_ARCH_S390_CRYPT_S390_H */