aboutsummaryrefslogtreecommitdiff
path: root/arch/x86/crypto/cast6_avx_glue.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/x86/crypto/cast6_avx_glue.c')
-rw-r--r--arch/x86/crypto/cast6_avx_glue.c24
1 files changed, 19 insertions, 5 deletions
diff --git a/arch/x86/crypto/cast6_avx_glue.c b/arch/x86/crypto/cast6_avx_glue.c
index 50e684768c55..8caf9ba8c1da 100644
--- a/arch/x86/crypto/cast6_avx_glue.c
+++ b/arch/x86/crypto/cast6_avx_glue.c
@@ -205,19 +205,33 @@ struct crypt_priv {
bool fpu_enabled;
};
+#ifdef CONFIG_PREEMPT_RT_FULL
+static void cast6_fpu_end_rt(struct crypt_priv *ctx)
+{
+ bool fpu_enabled = ctx->fpu_enabled;
+
+ if (!fpu_enabled)
+ return;
+ cast6_fpu_end(fpu_enabled);
+ ctx->fpu_enabled = false;
+}
+
+#else
+static void cast6_fpu_end_rt(struct crypt_priv *ctx) { }
+#endif
+
static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
{
const unsigned int bsize = CAST6_BLOCK_SIZE;
struct crypt_priv *ctx = priv;
int i;
- ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes);
-
if (nbytes == bsize * CAST6_PARALLEL_BLOCKS) {
+ ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes);
cast6_ecb_enc_8way(ctx->ctx, srcdst, srcdst);
+ cast6_fpu_end_rt(ctx);
return;
}
-
for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
__cast6_encrypt(ctx->ctx, srcdst, srcdst);
}
@@ -228,10 +242,10 @@ static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
struct crypt_priv *ctx = priv;
int i;
- ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes);
-
if (nbytes == bsize * CAST6_PARALLEL_BLOCKS) {
+ ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes);
cast6_ecb_dec_8way(ctx->ctx, srcdst, srcdst);
+ cast6_fpu_end_rt(ctx);
return;
}