aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJoakim Bech <joakim.xx.bech@stericsson.com>2010-12-06 14:42:38 +0100
committerJonas ABERG <jonas.aberg@stericsson.com>2011-02-21 13:50:13 +0100
commitea29c03fbe127c447edac6d5b6b5ac941f534de4 (patch)
tree25f52d6506b17e43646a60e11b963f1364633b4c
parent68ab02401e62cac6825d524607a1ff80785857d9 (diff)
crypto: ux500: AES ECB converted to ablk_cipher and supports DMA.u8500-android-2.3_v0.18
- DMA support for AES_ECB added. - ablk_cipher support added to the driver. In this commit AES_ECB is using this asynchronous API. This is a must since you will get miscellaneous sleep warning- error-messages from the crypto testmgr which runs sanity tests when loading a module in synchrounous mode using DMA. Therefore DMA operations should use the ablk_cipher (asynchronous) API, - Added scatterlist walk function for ablk_cipher for the non DMA version. - Added power awarness to DMA related code in this cryp driver. - Refactored code in hw_cryp_noxts with functions calls when getting the device and when setting up the context. - Renamed registers so they corresponds to the names in the design spec. ST-Ericsson ID: AP277474 ST-Ericsson Linux next: ER320876, v-ape regulator missing. Signed-off-by: Joakim Bech <joakim.xx.bech@stericsson.com> Change-Id: I989c82cc10f74ce0907dcc255406165c14846683 Reviewed-on: http://gerrit.lud.stericsson.com/gerrit/15007 Reviewed-by: Niklas HERNAEUS <niklas.hernaeus@stericsson.com> Reviewed-by: Linus WALLEIJ <linus.walleij@stericsson.com>
-rw-r--r--arch/arm/mach-ux500/devices.c29
-rw-r--r--arch/arm/mach-ux500/dma-db8500.c4
-rw-r--r--arch/arm/mach-ux500/include/mach/crypto-ux500.h16
-rw-r--r--arch/arm/mach-ux500/include/mach/hardware.h2
-rw-r--r--drivers/crypto/ux500/Kconfig4
-rw-r--r--drivers/crypto/ux500/cryp/cryp.c61
-rw-r--r--drivers/crypto/ux500/cryp/cryp.h37
-rw-r--r--drivers/crypto/ux500/cryp/cryp_core.c704
-rw-r--r--drivers/crypto/ux500/cryp/cryp_p.h8
9 files changed, 652 insertions, 213 deletions
diff --git a/arch/arm/mach-ux500/devices.c b/arch/arm/mach-ux500/devices.c
index 891a391b37f..2ca79df69a9 100644
--- a/arch/arm/mach-ux500/devices.c
+++ b/arch/arm/mach-ux500/devices.c
@@ -25,12 +25,15 @@
#include <asm/mach/map.h>
#include <asm/setup.h>
+#include <mach/crypto-ux500.h>
#include <mach/irqs.h>
#include <mach/hardware.h>
#include <mach/devices.h>
#include <mach/setup.h>
#include <linux/hwmem.h>
+#include <plat/ste_dma40.h>
+
#ifdef CONFIG_STE_TRACE_MODEM
#include <linux/db8500-modem-trace.h>
#endif
@@ -197,9 +200,35 @@ static struct resource ux500_cryp1_resources[] = {
}
};
+static struct cryp_platform_data cryp1_platform_data = {
+ .mem_to_engine = {
+ .dir = STEDMA40_MEM_TO_PERIPH,
+ .src_dev_type = STEDMA40_DEV_SRC_MEMORY,
+ .dst_dev_type = DB8500_DMA_DEV48_CAC1_TX,
+ .src_info.data_width = STEDMA40_WORD_WIDTH,
+ .dst_info.data_width = STEDMA40_WORD_WIDTH,
+ .mode = STEDMA40_MODE_LOGICAL,
+ .src_info.psize = STEDMA40_PSIZE_LOG_4,
+ .dst_info.psize = STEDMA40_PSIZE_LOG_4,
+ },
+ .engine_to_mem = {
+ .dir = STEDMA40_PERIPH_TO_MEM,
+ .src_dev_type = DB8500_DMA_DEV48_CAC1_RX,
+ .dst_dev_type = STEDMA40_DEV_DST_MEMORY,
+ .src_info.data_width = STEDMA40_WORD_WIDTH,
+ .dst_info.data_width = STEDMA40_WORD_WIDTH,
+ .mode = STEDMA40_MODE_LOGICAL,
+ .src_info.psize = STEDMA40_PSIZE_LOG_4,
+ .dst_info.psize = STEDMA40_PSIZE_LOG_4,
+ }
+};
+
struct platform_device ux500_cryp1_device = {
.name = "cryp1",
.id = -1,
+ .dev = {
+ .platform_data = &cryp1_platform_data
+ },
.num_resources = ARRAY_SIZE(ux500_cryp1_resources),
.resource = ux500_cryp1_resources
};
diff --git a/arch/arm/mach-ux500/dma-db8500.c b/arch/arm/mach-ux500/dma-db8500.c
index 66c03c3f7ba..79a5b2eff9d 100644
--- a/arch/arm/mach-ux500/dma-db8500.c
+++ b/arch/arm/mach-ux500/dma-db8500.c
@@ -125,7 +125,7 @@ static dma_addr_t dma40_rx_map[DB8500_DMA_NR_DEV] = {
[DB8500_DMA_DEV45_SRC_SXA5_RX_TX] = 0,
[DB8500_DMA_DEV46_SLIM0_CH8_RX_SRC_SXA6_RX_TX] = 0,
[DB8500_DMA_DEV47_SLIM0_CH9_RX_SRC_SXA7_RX_TX] = 0,
- [DB8500_DMA_DEV48_CAC1_RX] = 0,
+ [DB8500_DMA_DEV48_CAC1_RX] = U8500_CRYP1_BASE + CRYP1_RX_REG_OFFSET,
/* 49, 50 and 51 are not used */
[DB8500_DMA_DEV52_SLIM0_CH4_RX_HSI_RX_CH4] = 0,
[DB8500_DMA_DEV53_SLIM0_CH5_RX_HSI_RX_CH5] = 0,
@@ -186,7 +186,7 @@ static const dma_addr_t dma40_tx_map[DB8500_DMA_NR_DEV] = {
[DB8500_DMA_DEV45_DST_SXA5_RX_TX] = 0,
[DB8500_DMA_DEV46_SLIM0_CH8_TX_DST_SXA6_RX_TX] = 0,
[DB8500_DMA_DEV47_SLIM0_CH9_TX_DST_SXA7_RX_TX] = 0,
- [DB8500_DMA_DEV48_CAC1_TX] = 0,
+ [DB8500_DMA_DEV48_CAC1_TX] = U8500_CRYP1_BASE + CRYP1_TX_REG_OFFSET,
[DB8500_DMA_DEV49_CAC1_TX_HAC1_TX] = 0,
[DB8500_DMA_DEV50_HAC1_TX] = 0,
[DB8500_DMA_MEMCPY_TX_0] = 0,
diff --git a/arch/arm/mach-ux500/include/mach/crypto-ux500.h b/arch/arm/mach-ux500/include/mach/crypto-ux500.h
new file mode 100644
index 00000000000..57da88398d5
--- /dev/null
+++ b/arch/arm/mach-ux500/include/mach/crypto-ux500.h
@@ -0,0 +1,16 @@
+/*
+ * Copyright (C) ST-Ericsson SA 2011
+ *
+ * Author: Joakim Bech <joakim.xx.bech@stericsson.com> for ST-Ericsson
+ * License terms: GNU General Public License (GPL) version 2
+ */
+#ifndef _CRYPTO_UX500_H
+#include <plat/ste_dma40.h>
+#include <mach/ste-dma40-db8500.h>
+
+struct cryp_platform_data {
+ struct stedma40_chan_cfg mem_to_engine;
+ struct stedma40_chan_cfg engine_to_mem;
+};
+
+#endif
diff --git a/arch/arm/mach-ux500/include/mach/hardware.h b/arch/arm/mach-ux500/include/mach/hardware.h
index 90ae97594e4..7bb75c7838e 100644
--- a/arch/arm/mach-ux500/include/mach/hardware.h
+++ b/arch/arm/mach-ux500/include/mach/hardware.h
@@ -62,6 +62,8 @@
#define SSP_TX_RX_REG_OFFSET (0x8)
#define SPI_TX_RX_REG_OFFSET (0x8)
#define SD_MMC_TX_RX_REG_OFFSET (0x80)
+#define CRYP1_RX_REG_OFFSET (0x10)
+#define CRYP1_TX_REG_OFFSET (0x8)
#define MSP_0_CONTROLLER 1
#define MSP_1_CONTROLLER 2
diff --git a/drivers/crypto/ux500/Kconfig b/drivers/crypto/ux500/Kconfig
index 86f7ca49637..165a03d46c0 100644
--- a/drivers/crypto/ux500/Kconfig
+++ b/drivers/crypto/ux500/Kconfig
@@ -6,7 +6,7 @@
config CRYPTO_DEV_UX500_CRYP
tristate "UX500 crypto driver for CRYP block"
- depends on CRYPTO_DEV_UX500
+ depends on CRYPTO_DEV_UX500
select CRYPTO_DES
help
This is the driver for the crypto block CRYP.
@@ -21,7 +21,7 @@ config CRYPTO_DEV_UX500_HASH
Depends on U8500/STM DMA if running in DMA mode.
config CRYPTO_DEV_UX500_DEBUG
- bool "Activate ux500 platform debug-mode"
+ bool "Activate ux500 platform debug-mode for crypto and hash block"
depends on CRYPTO_DEV_UX500_CRYP || CRYPTO_DEV_UX500_HASH
default n
help
diff --git a/drivers/crypto/ux500/cryp/cryp.c b/drivers/crypto/ux500/cryp/cryp.c
index d8f6333e675..94928f7efce 100644
--- a/drivers/crypto/ux500/cryp/cryp.c
+++ b/drivers/crypto/ux500/cryp/cryp.c
@@ -8,13 +8,13 @@
* License terms: GNU General Public License (GPL) version 2
*/
-#include <linux/kernel.h>
+#include <linux/delay.h>
#include <linux/device.h>
-#include <linux/types.h>
#include <linux/errno.h>
#include <linux/io.h>
+#include <linux/kernel.h>
#include <linux/spinlock.h>
-#include <linux/delay.h>
+#include <linux/types.h>
#include "cryp_p.h"
#include "cryp.h"
@@ -32,7 +32,6 @@ void cryp_wait_until_done(struct cryp_device_data *device_data)
* cryp_check - This routine checks Peripheral and PCell Id
* @device_data: Pointer to the device data struct for base address.
*/
-
int cryp_check(struct cryp_device_data *device_data)
{
if (NULL == device_data)
@@ -61,7 +60,6 @@ void cryp_reset(struct cryp_device_data *device_data)
{
writel(CRYP_DMACR_DEFAULT, &device_data->base->dmacr);
writel(CRYP_IMSC_DEFAULT, &device_data->base->imsc);
- writel(CRYP_DIN_DEFAULT, &device_data->base->din);
writel(CRYP_KEY_DEFAULT, &device_data->base->key_1_l);
writel(CRYP_KEY_DEFAULT, &device_data->base->key_1_r);
@@ -94,12 +92,12 @@ void cryp_reset(struct cryp_device_data *device_data)
* @cryp_activity: Enable/Disable functionality
*/
void cryp_activity(struct cryp_device_data *device_data,
- enum cryp_activity cryp_activity)
+ enum cryp_crypen cryp_crypen)
{
CRYP_PUT_BITS(&device_data->base->cr,
- cryp_activity,
- CRYP_ACTIVITY_POS,
- CRYP_ACTIVITY_MASK);
+ cryp_crypen,
+ CRYP_CRYPEN_POS,
+ CRYP_CRYPEN_MASK);
}
/**
@@ -183,9 +181,9 @@ void cryp_cen_flush(struct cryp_device_data *device_data)
CRYP_SET_BITS(&device_data->base->cr,
CRYP_FIFO_FLUSH_MASK);
CRYP_PUT_BITS(&device_data->base->cr,
- CRYP_ACTIVITY_ENABLE,
- CRYP_ACTIVITY_POS,
- CRYP_ACTIVITY_MASK);
+ CRYP_CRYPEN_ENABLE,
+ CRYP_CRYPEN_POS,
+ CRYP_CRYPEN_MASK);
}
/**
@@ -221,12 +219,12 @@ int cryp_set_configuration(struct cryp_device_data *device_data,
(CRYP_ALGO_AES_CBC == p_cryp_config->algo_mode))) {
CRYP_PUT_BITS(&device_data->base->cr,
CRYP_ALGO_AES_ECB,
- CRYP_ALGO_POS,
- CRYP_ALGO_MASK);
+ CRYP_ALGOMODE_POS,
+ CRYP_ALGOMODE_MASK);
CRYP_PUT_BITS(&device_data->base->cr,
- CRYP_ACTIVITY_ENABLE,
- CRYP_ACTIVITY_POS,
- CRYP_ACTIVITY_MASK);
+ CRYP_CRYPEN_ENABLE,
+ CRYP_CRYPEN_POS,
+ CRYP_CRYPEN_MASK);
CRYP_PUT_BITS(&device_data->base->cr,
KSE_ENABLED,
CRYP_KSE_POS,
@@ -235,14 +233,19 @@ int cryp_set_configuration(struct cryp_device_data *device_data,
cryp_wait_until_done(device_data);
CRYP_PUT_BITS(&device_data->base->cr,
- CRYP_ACTIVITY_DISABLE,
- CRYP_ACTIVITY_POS,
- CRYP_ACTIVITY_MASK);
+ CRYP_CRYPEN_DISABLE,
+ CRYP_CRYPEN_POS,
+ CRYP_CRYPEN_MASK);
}
+
+ CRYP_PUT_BITS(&device_data->base->cr,
+ CRYP_CRYPEN_ENABLE,
+ CRYP_CRYPEN_POS,
+ CRYP_CRYPEN_MASK);
CRYP_PUT_BITS(&device_data->base->cr,
p_cryp_config->algo_mode,
- CRYP_ALGO_POS,
- CRYP_ALGO_MASK);
+ CRYP_ALGOMODE_POS,
+ CRYP_ALGOMODE_MASK);
CRYP_PUT_BITS(&device_data->base->cr,
p_cryp_config->encrypt_or_decrypt,
CRYP_ENC_DEC_POS,
@@ -279,8 +282,8 @@ int cryp_get_configuration(struct cryp_device_data *device_data,
((readl(&device_data->base->cr) & CRYP_DATA_TYPE_MASK) >>
CRYP_DATA_TYPE_POS);
p_cryp_config->algo_mode =
- ((readl(&device_data->base->cr) & CRYP_ALGO_MASK) >>
- CRYP_ALGO_POS);
+ ((readl(&device_data->base->cr) & CRYP_ALGOMODE_MASK) >>
+ CRYP_ALGOMODE_POS);
return 0;
}
@@ -334,13 +337,11 @@ int cryp_get_status(struct cryp_device_data *device_data)
* @device_data: Pointer to the device data struct for base address.
* @dma_req: Specifies the DMA request type value.
*/
-int cryp_configure_for_dma(struct cryp_device_data *device_data,
- enum cryp_dma_req_type dma_req)
+void cryp_configure_for_dma(struct cryp_device_data *device_data,
+ enum cryp_dma_req_type dma_req)
{
CRYP_SET_BITS(&device_data->base->dmacr,
(u32) dma_req);
-
- return 0;
}
/**
@@ -431,7 +432,7 @@ int cryp_configure_init_vector(struct cryp_device_data *device_data,
static void cryp_prep_ctx_mgmt(struct cryp_device_data *device_data)
{
cryp_configure_for_dma(device_data, CRYP_DMA_DISABLE_BOTH);
- cryp_activity(device_data, CRYP_ACTIVITY_DISABLE);
+ cryp_activity(device_data, CRYP_CRYPEN_DISABLE);
cryp_wait_until_done(device_data);
}
@@ -472,7 +473,7 @@ void cryp_save_device_context(struct cryp_device_data *device_data,
}
/**
- * cryp_restore_device_context - Retore hardware registers and
+ * cryp_restore_device_context - Restore hardware registers and
* other device context parameter
* @device_data: Pointer to the device data struct for base address.
* @ctx: Crypto device context
diff --git a/drivers/crypto/ux500/cryp/cryp.h b/drivers/crypto/ux500/cryp/cryp.h
index 1d098b6ae15..58ddb20442f 100644
--- a/drivers/crypto/ux500/cryp/cryp.h
+++ b/drivers/crypto/ux500/cryp/cryp.h
@@ -12,6 +12,7 @@
#define _CRYP_H_
#include <linux/completion.h>
+#include <linux/dmaengine.h>
#include <linux/klist.h>
#include <linux/mutex.h>
@@ -21,9 +22,9 @@
#define DEV_DBG_NAME "crypX crypX:"
/* CRYP enable/disable */
-enum cryp_activity {
- CRYP_ACTIVITY_DISABLE = 0,
- CRYP_ACTIVITY_ENABLE = 1
+enum cryp_crypen {
+ CRYP_CRYPEN_DISABLE = 0,
+ CRYP_CRYPEN_ENABLE = 1
};
/* CRYP Start Computation enable/disable */
@@ -130,7 +131,12 @@ enum cryp_dma_req_type {
CRYP_DMA_DISABLE_BOTH,
CRYP_DMA_ENABLE_IN_DATA,
CRYP_DMA_ENABLE_OUT_DATA,
- CRYP_DMA_ENABLE_BOTH_DIRECTION
+ CRYP_DMA_ENABLE_BOTH_DIRECTIONS
+};
+
+enum cryp_dma_channel {
+ CRYP_DMA_RX = 0,
+ CRYP_DMA_TX
};
/* Key registers */
@@ -207,6 +213,21 @@ struct cryp_device_context {
u32 dout;
};
+struct cryp_dma {
+ dma_cap_mask_t mask;
+ struct completion cryp_dma_complete;
+ struct dma_chan *chan_cryp2mem;
+ struct dma_chan *chan_mem2cryp;
+ struct stedma40_chan_cfg *cfg_cryp2mem;
+ struct stedma40_chan_cfg *cfg_mem2cryp;
+ int sg_src_len;
+ int sg_dst_len;
+ struct scatterlist *sg_src;
+ struct scatterlist *sg_dst;
+ int nents_src;
+ int nents_dst;
+};
+
/**
* struct cryp_device_data - structure for a cryp device.
* @base: Pointer to the hardware base address.
@@ -219,6 +240,7 @@ struct cryp_device_context {
* @ctx_lock: Lock for current_ctx.
* @current_ctx: Pointer to the currently allocated context.
* @list_node: For inclusion into a klist.
+ * @dma: The dma structure holding channel configuration.
* @power_state: TRUE = power state on, FALSE = power state off.
* @power_state_mutex: Mutex for power_state.
* @restore_dev_ctx: TRUE = saved ctx, FALSE = no saved ctx.
@@ -233,6 +255,7 @@ struct cryp_device_data {
struct spinlock ctx_lock;
struct cryp_ctx *current_ctx;
struct klist_node list_node;
+ struct cryp_dma dma;
bool power_state;
struct mutex power_state_mutex;
bool restore_dev_ctx;
@@ -247,7 +270,7 @@ int cryp_check(struct cryp_device_data *device_data);
void cryp_reset(struct cryp_device_data *device_data);
void cryp_activity(struct cryp_device_data *device_data,
- enum cryp_activity cryp_activity);
+ enum cryp_crypen cryp_crypen);
void cryp_start(struct cryp_device_data *device_data);
@@ -269,8 +292,8 @@ int cryp_set_configuration(struct cryp_device_data *device_data,
int cryp_get_configuration(struct cryp_device_data *device_data,
struct cryp_config *p_cryp_config);
-int cryp_configure_for_dma(struct cryp_device_data *device_data,
- enum cryp_dma_req_type dma_req);
+void cryp_configure_for_dma(struct cryp_device_data *device_data,
+ enum cryp_dma_req_type dma_req);
int cryp_configure_key_values(struct cryp_device_data *device_data,
enum cryp_key_reg_index key_reg_index,
diff --git a/drivers/crypto/ux500/cryp/cryp_core.c b/drivers/crypto/ux500/cryp/cryp_core.c
index 2ea7b0464b0..aa6d594602d 100644
--- a/drivers/crypto/ux500/cryp/cryp_core.c
+++ b/drivers/crypto/ux500/cryp/cryp_core.c
@@ -8,27 +8,32 @@
* License terms: GNU General Public License (GPL) version 2
*/
-#include <linux/module.h>
+#include <linux/clk.h>
+#include <linux/completion.h>
+#include <linux/crypto.h>
+#include <linux/dmaengine.h>
#include <linux/err.h>
#include <linux/errno.h>
-#include <linux/clk.h>
+#include <linux/interrupt.h>
#include <linux/io.h>
+#include <linux/irqreturn.h>
+#include <linux/klist.h>
+#include <linux/module.h>
#include <linux/platform_device.h>
-#include <linux/interrupt.h>
-#include <linux/crypto.h>
-#include <linux/completion.h>
#include <linux/regulator/consumer.h>
-#include <linux/klist.h>
#include <linux/semaphore.h>
-#include <linux/mutex.h>
-#include <linux/irqreturn.h>
-#include <crypto/algapi.h>
#include <crypto/aes.h>
-#include <crypto/des.h>
+#include <crypto/algapi.h>
#include <crypto/ctr.h>
+#include <crypto/des.h>
+#include <crypto/scatterwalk.h>
+
+#include <plat/ste_dma40.h>
+#include <mach/crypto-ux500.h>
#include <mach/hardware.h>
+#include <mach/ste-dma40-db8500.h>
#include "cryp_p.h"
#include "cryp.h"
@@ -36,12 +41,13 @@
#define CRYP_MAX_KEY_SIZE 32
#define BYTES_PER_WORD 4
-static int debug;
-
static int cryp_mode;
static DEFINE_KLIST(cryp_device_list, NULL, NULL);
+static struct stedma40_chan_cfg *mem_to_engine;
+static struct stedma40_chan_cfg *engine_to_mem;
+
/**
* struct cryp_driver_data - data specific to the driver.
*
@@ -378,6 +384,270 @@ static int cfg_keys(struct cryp_ctx *ctx)
return cryp_error;
}
+static int cryp_setup_context(struct cryp_ctx *ctx,
+ struct cryp_device_data *device_data)
+{
+ if (ctx->updated)
+ cryp_restore_device_context(device_data, &ctx->dev_ctx);
+ else {
+ cryp_activity(device_data, CRYP_CRYPEN_DISABLE);
+
+ if (cfg_keys(ctx) != 0) {
+ dev_err(ctx->device->dev, "[%s]: cfg_keys failed!",
+ __func__);
+ return -EPERM;
+ }
+
+ if ((ctx->iv) &&
+ (CRYP_ALGO_AES_ECB != ctx->config.algo_mode) &&
+ (CRYP_ALGO_DES_ECB != ctx->config.algo_mode) &&
+ (CRYP_ALGO_TDES_ECB != ctx->config.algo_mode)) {
+ if (cfg_ivs(device_data, ctx) != 0)
+ return -EPERM;
+ }
+
+ cryp_set_configuration(device_data, &ctx->config);
+ }
+
+ return 0;
+}
+
+
+static int cryp_get_device_data(struct cryp_ctx *ctx,
+ struct cryp_device_data **device_data)
+{
+ int ret;
+ struct klist_iter device_iterator;
+ struct klist_node *device_node;
+ struct cryp_device_data *local_device_data;
+ pr_debug(DEV_DBG_NAME " [%s]", __func__);
+
+ /* Wait until a device is available */
+ ret = down_interruptible(&driver_data.device_allocation);
+ if (ret)
+ return ret; /* Interrupted */
+
+ /* Select a device */
+ klist_iter_init(&driver_data.device_list, &device_iterator);
+
+ device_node = klist_next(&device_iterator);
+ while (device_node) {
+ local_device_data = container_of(device_node,
+ struct cryp_device_data, list_node);
+ spin_lock(&local_device_data->ctx_lock);
+ /* current_ctx allocates a device, NULL = unallocated */
+ if (local_device_data->current_ctx) {
+ device_node = klist_next(&device_iterator);
+ } else {
+ local_device_data->current_ctx = ctx;
+ ctx->device = local_device_data;
+ spin_unlock(&local_device_data->ctx_lock);
+ break;
+ }
+ spin_unlock(&local_device_data->ctx_lock);
+ }
+ klist_iter_exit(&device_iterator);
+
+ if (!device_node) {
+ /**
+ * No free device found.
+ * Since we allocated a device with down_interruptible, this
+ * should not be able to happen.
+ * Number of available devices, which are contained in
+ * device_allocation, is therefore decremented by not doing
+ * an up(device_allocation).
+ */
+ return -EBUSY;
+ }
+
+ *device_data = local_device_data;
+
+ return 0;
+}
+
+static void cryp_dma_setup_channel(struct cryp_device_data *device_data,
+ struct device *dev)
+{
+ dma_cap_zero(device_data->dma.mask);
+ dma_cap_set(DMA_SLAVE, device_data->dma.mask);
+
+ device_data->dma.cfg_mem2cryp = mem_to_engine;
+ device_data->dma.chan_mem2cryp =
+ dma_request_channel(device_data->dma.mask,
+ stedma40_filter,
+ device_data->dma.cfg_mem2cryp);
+
+ device_data->dma.cfg_cryp2mem = engine_to_mem;
+ device_data->dma.chan_cryp2mem =
+ dma_request_channel(device_data->dma.mask,
+ stedma40_filter,
+ device_data->dma.cfg_cryp2mem);
+
+ init_completion(&device_data->dma.cryp_dma_complete);
+}
+
+static void cryp_dma_out_callback(void *data)
+{
+ struct cryp_ctx *ctx = (struct cryp_ctx *) data;
+ dev_dbg(ctx->device->dev, "[%s]: ", __func__);
+
+ complete(&ctx->device->dma.cryp_dma_complete);
+}
+
+static int cryp_set_dma_transfer(struct cryp_ctx *ctx,
+ struct scatterlist *sg,
+ int len,
+ enum dma_data_direction direction)
+{
+ struct dma_async_tx_descriptor *desc;
+ struct dma_chan *channel = NULL;
+ dma_cookie_t cookie;
+
+ dev_dbg(ctx->device->dev, "[%s]: ", __func__);
+
+ switch (direction) {
+ case DMA_TO_DEVICE:
+ channel = ctx->device->dma.chan_mem2cryp;
+ ctx->device->dma.sg_src = sg;
+ ctx->device->dma.sg_src_len = dma_map_sg(channel->device->dev,
+ ctx->device->dma.sg_src,
+ ctx->device->dma.nents_src,
+ direction);
+
+ if (!ctx->device->dma.sg_src_len) {
+ dev_dbg(ctx->device->dev,
+ "[%s]: Could not map the sg list (TO_DEVICE)",
+ __func__);
+ return -EFAULT;
+ }
+
+ dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
+ "(TO_DEVICE)", __func__);
+
+ desc = channel->device->device_prep_slave_sg(channel,
+ ctx->device->dma.sg_src,
+ ctx->device->dma.sg_src_len,
+ direction,
+ DMA_CTRL_ACK);
+ break;
+
+ case DMA_FROM_DEVICE:
+ channel = ctx->device->dma.chan_cryp2mem;
+ ctx->device->dma.sg_dst = sg;
+
+ ctx->device->dma.sg_dst_len = dma_map_sg(channel->device->dev,
+ ctx->device->dma.sg_dst,
+ ctx->device->dma.nents_dst,
+ direction);
+
+ if (!ctx->device->dma.sg_dst_len) {
+ dev_dbg(ctx->device->dev,
+ "[%s]: Could not map the sg list "
+ "(FROM_DEVICE)", __func__);
+ return -EFAULT;
+ }
+
+ dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
+ "(FROM_DEVICE)", __func__);
+
+ desc = channel->device->device_prep_slave_sg(channel,
+ ctx->device->dma.sg_dst,
+ ctx->device->dma.sg_dst_len,
+ direction,
+ DMA_CTRL_ACK |
+ DMA_PREP_INTERRUPT);
+
+ desc->callback = cryp_dma_out_callback;
+ desc->callback_param = ctx;
+ break;
+
+ default:
+ dev_dbg(ctx->device->dev, "[%s]: Invalid DMA direction",
+ __func__);
+ return -EFAULT;
+ }
+
+ cookie = desc->tx_submit(desc);
+ dma_async_issue_pending(channel);
+
+ return 0;
+}
+
+static void cryp_dma_done(struct cryp_ctx *ctx)
+{
+ struct dma_chan *chan;
+
+ dev_dbg(ctx->device->dev, "[%s]: ", __func__);
+
+ chan = ctx->device->dma.chan_mem2cryp;
+ chan->device->device_control(chan, DMA_TERMINATE_ALL, 0);
+ dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_src,
+ ctx->device->dma.sg_src_len, DMA_TO_DEVICE);
+
+ chan = ctx->device->dma.chan_cryp2mem;
+ chan->device->device_control(chan, DMA_TERMINATE_ALL, 0);
+ dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_dst,
+ ctx->device->dma.sg_dst_len, DMA_FROM_DEVICE);
+}
+
+static int cryp_dma_write(struct cryp_ctx *ctx, struct scatterlist *sg,
+ int len)
+{
+ int error = cryp_set_dma_transfer(ctx, sg, len, DMA_TO_DEVICE);
+ dev_dbg(ctx->device->dev, "[%s]: ", __func__);
+
+ if (error) {
+ dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
+ "failed", __func__);
+ return error;
+ }
+
+ return len;
+}
+
+static int cryp_dma_read(struct cryp_ctx *ctx, struct scatterlist *sg, int len)
+{
+ int error = cryp_set_dma_transfer(ctx, sg, len, DMA_FROM_DEVICE);
+ if (error) {
+ dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
+ "failed", __func__);
+ return error;
+ }
+
+ return len;
+}
+
+static int cryp_polling_mode(struct cryp_ctx *ctx,
+ struct cryp_device_data *device_data)
+{
+ int i;
+ int ret = 0;
+ int remaining_length = ctx->datalen;
+
+ cryp_activity(device_data, CRYP_CRYPEN_ENABLE);
+ while (remaining_length > 0) {
+ for (i = 0; i < ctx->blocksize / BYTES_PER_WORD; i++) {
+ ret = cryp_write_indata(device_data,
+ *((u32 *)ctx->indata));
+ if (ret)
+ goto out;
+ ctx->indata += BYTES_PER_WORD;
+ remaining_length -= BYTES_PER_WORD;
+ }
+ cryp_wait_until_done(device_data);
+ for (i = 0; i < ctx->blocksize / BYTES_PER_WORD; i++) {
+ ret = cryp_read_outdata(device_data,
+ (u32 *)ctx->outdata);
+ if (ret)
+ goto out;
+ ctx->outdata += BYTES_PER_WORD;
+ }
+ cryp_wait_until_done(device_data);
+ }
+out:
+ return ret;
+}
+
static int cryp_disable_power(
struct device *dev,
struct cryp_device_data *device_data,
@@ -460,99 +730,34 @@ out:
static int hw_crypt_noxts(struct cryp_ctx *ctx)
{
int ret;
- int i;
- int cryp_error = -EINVAL;
-
- const u8 *indata;
- u8 *outdata;
- u32 datalen;
- u32 outlen;
-
- struct klist_iter device_iterator;
- struct klist_node *device_node;
struct cryp_device_data *device_data = NULL;
- indata = ctx->indata;
- outdata = ctx->outdata;
- datalen = ctx->datalen;
- outlen = datalen;
+ const u8 *indata = ctx->indata;
+ u8 *outdata = ctx->outdata;
+ u32 datalen = ctx->datalen;
+ u32 outlen = datalen;
pr_debug(DEV_DBG_NAME " [%s]", __func__);
- pr_debug(DEV_DBG_NAME " [%s] key size: %d", __func__,
- ctx->config.key_size);
ctx->outlen = ctx->datalen;
ctx->config.key_access = CRYP_STATE_ENABLE;
ctx->config.data_type = CRYP_DATA_TYPE_8BIT_SWAP;
- /* Wait until a device is available */
- ret = down_interruptible(&driver_data.device_allocation);
+ ret = cryp_get_device_data(ctx, &device_data);
if (ret)
- return ret; /* Interrupted */
-
- /* Select a device */
- klist_iter_init(&driver_data.device_list, &device_iterator);
-
- device_node = klist_next(&device_iterator);
- while (device_node) {
- device_data = container_of(device_node,
- struct cryp_device_data, list_node);
- spin_lock(&device_data->ctx_lock);
- /* current_ctx allocates a device, NULL = unallocated */
- if (device_data->current_ctx) {
- device_node = klist_next(&device_iterator);
- } else {
- device_data->current_ctx = ctx;
- ctx->device = device_data;
- spin_unlock(&device_data->ctx_lock);
- break;
- }
- spin_unlock(&device_data->ctx_lock);
- }
- klist_iter_exit(&device_iterator);
-
- if (!device_node) {
- /* No free device found */
- up(&driver_data.device_allocation);
- return -EBUSY;
- }
+ goto out;
- /* Enable device power (and clock) */
ret = cryp_enable_power(device_data->dev, device_data, false);
if (ret) {
dev_err(device_data->dev, "[%s]: "
- "cryp_enable_power() failed!", __func__);
+ "cryp_enable_power() failed!", __func__);
goto out;
}
cryp_reset(device_data);
- if (ctx->updated)
- cryp_restore_device_context(device_data, &ctx->dev_ctx);
- else {
- cryp_activity(device_data, CRYP_ACTIVITY_DISABLE);
-
- cryp_error = cfg_keys(ctx);
- if (cryp_error != 0) {
- dev_err(ctx->device->dev, "[%s]: CRYP_Configure Keys "
- "failed!", __func__);
- ret = -EPERM;
- goto out_power;
- }
-
- if ((ctx->iv) &&
- (CRYP_ALGO_AES_ECB != ctx->config.algo_mode) &&
- (CRYP_ALGO_DES_ECB != ctx->config.algo_mode) &&
- (CRYP_ALGO_TDES_ECB != ctx->config.algo_mode)) {
-
- cryp_error = cfg_ivs(device_data, ctx);
- if (cryp_error != 0) {
- ret = -EPERM;
- goto out_power;
- }
- }
-
- cryp_set_configuration(device_data, &ctx->config);
- }
+ ret = cryp_setup_context(ctx, device_data);
+ if (ret)
+ goto out_power;
cryp_flush_inoutfifo(device_data);
@@ -562,46 +767,22 @@ static int hw_crypt_noxts(struct cryp_ctx *ctx)
cryp_enable_irq_src(device_data, CRYP_IRQ_SRC_INPUT_FIFO);
cryp_enable_irq_src(device_data, CRYP_IRQ_SRC_OUTPUT_FIFO);
- cryp_activity(device_data, CRYP_ACTIVITY_ENABLE);
+ cryp_activity(device_data, CRYP_CRYPEN_ENABLE);
wait_for_completion(&ctx->device->cryp_irq_complete);
- } else if (cryp_mode == CRYP_MODE_POLLING) {
- cryp_activity(device_data, CRYP_ACTIVITY_ENABLE);
-
- while (ctx->datalen > 0) {
- for (i = 0; i < ctx->blocksize / 4; i++) {
- ret = cryp_write_indata(device_data,
- *((u32 *)ctx->indata));
-
- /* Catch errors */
- if (ret != 0)
- goto out_power;
-
- ctx->indata += 4;
- ctx->datalen -= 4;
- }
-
- cryp_wait_until_done(device_data);
-
- for (i = 0; i < ctx->blocksize / 4; i++) {
- ret = cryp_read_outdata(device_data,
- (u32 *)ctx->outdata);
-
- /* Catch errors */
- if (ret != 0)
- goto out_power;
-
- ctx->outdata += 4;
- ctx->outlen -= 4;
- }
- cryp_wait_until_done(device_data);
- }
-
- } else if (cryp_mode == CRYP_MODE_DMA) {
- dev_err(ctx->device->dev, "[%s]: DMA is not supported yet!",
- __func__);
- ret = -EPERM;
- goto out_power;
+ } else if (cryp_mode == CRYP_MODE_POLLING ||
+ cryp_mode == CRYP_MODE_DMA) {
+ /*
+ * The reason for having DMA in this if case is that if we are
+ * running cryp_mode = 2, then we separate DMA routines for
+ * handling cipher/plaintext > blocksize, except when
+ * running the normal CRYPTO_ALG_TYPE_CIPHER, then we still use
+ * the polling mode. Overhead of doing DMA setup eats up the
+ * benefits using it.
+ */
+ ret = cryp_polling_mode(ctx, device_data);
+ if (ret)
+ goto out_power;
} else {
dev_err(ctx->device->dev, "[%s]: Invalid operation mode!",
__func__);
@@ -615,16 +796,20 @@ static int hw_crypt_noxts(struct cryp_ctx *ctx)
ctx->updated = 1;
out_power:
- /* Disable power (and clock) */
if (cryp_disable_power(device_data->dev, device_data, false))
dev_err(device_data->dev, "[%s]: "
- "cryp_disable_power() failed!", __func__);
+ "cryp_disable_power() failed!", __func__);
out:
/* Release the device */
spin_lock(&device_data->ctx_lock);
device_data->current_ctx = NULL;
ctx->device = NULL;
spin_unlock(&device_data->ctx_lock);
+
+ /*
+ * The down_interruptible part for this semaphore is called in
+ * cryp_get_device_data.
+ */
up(&driver_data.device_allocation);
ctx->indata = indata;
@@ -635,6 +820,92 @@ out:
return ret;
}
+static int get_nents(struct scatterlist *sg, int nbytes)
+{
+ int nents = 0;
+
+ while (nbytes > 0) {
+ nbytes -= sg->length;
+ sg = scatterwalk_sg_next(sg);
+ nents++;
+ }
+
+ return nents;
+}
+
+static int ablk_dma_crypt(struct ablkcipher_request *areq)
+{
+ struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
+ struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
+ struct cryp_device_data *device_data;
+
+ int bytes_written;
+ int bytes_read;
+ int ret;
+
+ pr_debug(DEV_DBG_NAME " [%s]", __func__);
+
+ ctx->config.key_access = CRYP_STATE_ENABLE;
+ ctx->config.data_type = CRYP_DATA_TYPE_8BIT_SWAP;
+ ctx->datalen = areq->nbytes;
+ ctx->outlen = areq->nbytes;
+
+ ret = cryp_get_device_data(ctx, &device_data);
+ if (ret)
+ return ret;
+
+ ret = cryp_enable_power(device_data->dev, device_data, false);
+ if (ret) {
+ dev_err(device_data->dev, "[%s]: "
+ "cryp_enable_power() failed!", __func__);
+ goto out;
+ }
+
+ cryp_reset(device_data);
+
+ ret = cryp_setup_context(ctx, device_data);
+ if (ret)
+ goto out_power;
+
+ /* We have the device now, so store the nents in the dma struct. */
+ ctx->device->dma.nents_src = get_nents(areq->src, ctx->datalen);
+ ctx->device->dma.nents_dst = get_nents(areq->dst, ctx->outlen);
+
+ /* Enable DMA in- and output. */
+ cryp_configure_for_dma(device_data, CRYP_DMA_ENABLE_BOTH_DIRECTIONS);
+
+ bytes_written = cryp_dma_write(ctx, areq->src, ctx->datalen);
+ bytes_read = cryp_dma_read(ctx, areq->dst, bytes_written);
+
+ wait_for_completion(&ctx->device->dma.cryp_dma_complete);
+ cryp_dma_done(ctx);
+
+ cryp_save_device_context(device_data, &ctx->dev_ctx);
+ ctx->updated = 1;
+
+out_power:
+ if (cryp_disable_power(device_data->dev, device_data, false))
+ dev_err(device_data->dev, "[%s]: "
+ "cryp_disable_power() failed!", __func__);
+
+out:
+ spin_lock(&device_data->ctx_lock);
+ device_data->current_ctx = NULL;
+ ctx->device = NULL;
+ spin_unlock(&device_data->ctx_lock);
+
+ /*
+ * The down_interruptible part for this semaphore is called in
+ * cryp_get_device_data.
+ */
+ up(&driver_data.device_allocation);
+
+ if (unlikely(bytes_written != bytes_read))
+ return -EPERM;
+
+ return 0;
+}
+
static int blk_crypt(struct blkcipher_desc *desc,
struct scatterlist *dst, struct scatterlist *src,
unsigned int nbytes)
@@ -675,6 +946,84 @@ out:
return ret;
}
+static int ablk_crypt(struct ablkcipher_request *areq)
+{
+ struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
+ struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
+ struct ablkcipher_walk walk;
+ unsigned long src_paddr;
+ unsigned long dst_paddr;
+ int ret;
+ int nbytes;
+
+ ablkcipher_walk_init(&walk, areq->dst, areq->src, areq->nbytes);
+ ret = ablkcipher_walk_phys(areq, &walk);
+
+ if (ret) {
+ pr_err(DEV_DBG_NAME "[%s]: ablkcipher_walk_phys() failed!",
+ __func__);
+ goto out;
+ }
+
+ while ((nbytes = walk.nbytes) > 0) {
+ ctx->iv = walk.iv;
+ src_paddr = (page_to_phys(walk.src.page) + walk.src.offset);
+ ctx->indata = phys_to_virt(src_paddr);
+
+ dst_paddr = (page_to_phys(walk.dst.page) + walk.dst.offset);
+ ctx->outdata = phys_to_virt(dst_paddr);
+
+ ctx->datalen = nbytes - (nbytes % ctx->blocksize);
+ ret = hw_crypt_noxts(ctx);
+
+ if (ret)
+ goto out;
+
+ nbytes -= ctx->datalen;
+ ret = ablkcipher_walk_done(areq, &walk, nbytes);
+ }
+
+ ablkcipher_walk_complete(&walk);
+out:
+
+ return ret;
+}
+
+static int ablkcipher_setkey(struct crypto_ablkcipher *cipher,
+ const u8 *key, unsigned int keylen)
+{
+ struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
+ u32 *flags = &cipher->base.crt_flags;
+
+ pr_debug(DEV_DBG_NAME " [%s]", __func__);
+
+ switch (keylen) {
+ case AES_KEYSIZE_128:
+ ctx->config.key_size = CRYP_KEY_SIZE_128;
+ break;
+
+ case AES_KEYSIZE_192:
+ ctx->config.key_size = CRYP_KEY_SIZE_192;
+ break;
+
+ case AES_KEYSIZE_256:
+ ctx->config.key_size = CRYP_KEY_SIZE_256;
+ break;
+
+ default:
+ pr_err(DEV_DBG_NAME "[%s]: Unknown keylen!", __func__);
+ *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
+ return -EINVAL;
+ }
+
+ memcpy(ctx->key, key, keylen);
+ ctx->keylen = keylen;
+
+ ctx->updated = 0;
+
+ return 0;
+}
+
static int aes_setkey(struct crypto_tfm *tfm, const u8 *key,
unsigned int keylen)
{
@@ -902,30 +1251,41 @@ static void des3_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
__func__);
}
-static int aes_ecb_encrypt(struct blkcipher_desc *desc,
- struct scatterlist *dst, struct scatterlist *src,
- unsigned int nbytes)
+
+static int aes_ecb_encrypt(struct ablkcipher_request *areq)
{
- struct cryp_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
+ struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
+ struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
pr_debug(DEV_DBG_NAME " [%s]", __func__);
ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_ENCRYPT;
ctx->config.algo_mode = CRYP_ALGO_AES_ECB;
- return blk_crypt(desc, dst, src, nbytes);
+ ctx->blocksize = AES_BLOCK_SIZE;
+
+ if (cryp_mode == CRYP_MODE_DMA)
+ return ablk_dma_crypt(areq);
+
+ /* For everything except DMA, we run the non DMA version. */
+ return ablk_crypt(areq);
}
-static int aes_ecb_decrypt(struct blkcipher_desc *desc,
- struct scatterlist *dst, struct scatterlist *src,
- unsigned int nbytes)
+static int aes_ecb_decrypt(struct ablkcipher_request *areq)
{
- struct cryp_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
+ struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
+ struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
pr_debug(DEV_DBG_NAME " [%s]", __func__);
ctx->config.encrypt_or_decrypt = CRYP_ALGORITHM_DECRYPT;
ctx->config.algo_mode = CRYP_ALGO_AES_ECB;
- return blk_crypt(desc, dst, src, nbytes);
+ ctx->blocksize = AES_BLOCK_SIZE;
+
+ if (cryp_mode == CRYP_MODE_DMA)
+ return ablk_dma_crypt(areq);
+
+ /* For everything except DMA, we run the non DMA version. */
+ return ablk_crypt(areq);
}
static int aes_cbc_encrypt(struct blkcipher_desc *desc,
@@ -1163,18 +1523,19 @@ static struct crypto_alg aes_ecb_alg = {
.cra_name = "ecb(aes)",
.cra_driver_name = "ecb-aes-u8500",
.cra_priority = 100,
- .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
+ .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
+ CRYPTO_ALG_ASYNC,
.cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct cryp_ctx),
.cra_alignmask = 3,
- .cra_type = &crypto_blkcipher_type,
+ .cra_type = &crypto_ablkcipher_type,
.cra_module = THIS_MODULE,
.cra_list = LIST_HEAD_INIT(aes_ecb_alg.cra_list),
.cra_u = {
- .blkcipher = {
+ .ablkcipher = {
.min_keysize = AES_MIN_KEY_SIZE,
.max_keysize = AES_MAX_KEY_SIZE,
- .setkey = aes_setkey,
+ .setkey = ablkcipher_setkey,
.encrypt = aes_ecb_encrypt,
.decrypt = aes_ecb_decrypt,
}
@@ -1414,6 +1775,12 @@ static int u8500_cryp_probe(struct platform_device *pdev)
device_data->dev = dev;
device_data->current_ctx = NULL;
+ /* Grab the DMA configuration from platform data. */
+ mem_to_engine = &((struct cryp_platform_data *)
+ dev->platform_data)->mem_to_engine;
+ engine_to_mem = &((struct cryp_platform_data *)
+ dev->platform_data)->engine_to_mem;
+
res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
if (!res) {
dev_err(dev, "[%s]: platform_get_resource() failed",
@@ -1496,6 +1863,11 @@ static int u8500_cryp_probe(struct platform_device *pdev)
goto out_power;
}
+ init_completion(&device_data->cryp_irq_complete);
+
+ if (cryp_mode == CRYP_MODE_DMA)
+ cryp_dma_setup_channel(device_data, dev);
+
platform_set_drvdata(pdev, device_data);
/* Put the new device into the device list... */
@@ -1512,8 +1884,7 @@ static int u8500_cryp_probe(struct platform_device *pdev)
}
if (cryp_disable_power(&pdev->dev, device_data, false))
- dev_err(dev, "[u8500_cryp]: cryp_disable_power()"
- " failed!");
+ dev_err(dev, "[%s]: cryp_disable_power() failed!", __func__);
return 0;
@@ -1547,8 +1918,8 @@ static int u8500_cryp_remove(struct platform_device *pdev)
dev_dbg(&pdev->dev, "[%s]", __func__);
device_data = platform_get_drvdata(pdev);
if (!device_data) {
- dev_err(&pdev->dev, "[%s]: "
- "platform_get_drvdata() failed!", __func__);
+ dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
+ __func__);
return -ENOMEM;
}
@@ -1579,16 +1950,16 @@ static int u8500_cryp_remove(struct platform_device *pdev)
res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
if (!res_irq)
- dev_err(&pdev->dev, "[%s]: "
- "IORESOURCE_IRQ, unavailable", __func__);
+ dev_err(&pdev->dev, "[%s]: IORESOURCE_IRQ, unavailable",
+ __func__);
else {
disable_irq(res_irq->start);
free_irq(res_irq->start, device_data);
}
if (cryp_disable_power(&pdev->dev, device_data, false))
- dev_err(&pdev->dev, "[%s]: "
- "cryp_disable_power() failed", __func__);
+ dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed",
+ __func__);
clk_put(device_data->clk);
regulator_put(device_data->pwr_regulator);
@@ -1613,8 +1984,8 @@ static void u8500_cryp_shutdown(struct platform_device *pdev)
device_data = platform_get_drvdata(pdev);
if (!device_data) {
- dev_err(&pdev->dev, "[%s]: "
- "platform_get_drvdata() failed!", __func__);
+ dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
+ __func__);
return;
}
@@ -1624,7 +1995,7 @@ static void u8500_cryp_shutdown(struct platform_device *pdev)
if (!device_data->current_ctx) {
if (down_trylock(&driver_data.device_allocation))
dev_dbg(&pdev->dev, "[%s]: Cryp still in use!"
- "Shutting down anyway...", __func__);
+ "Shutting down anyway...", __func__);
/**
* (Allocate the device)
* Need to set this to non-null (dummy) value,
@@ -1644,16 +2015,16 @@ static void u8500_cryp_shutdown(struct platform_device *pdev)
res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
if (!res_irq)
- dev_err(&pdev->dev, "[%s]: "
- "IORESOURCE_IRQ, unavailable", __func__);
+ dev_err(&pdev->dev, "[%s]: IORESOURCE_IRQ, unavailable",
+ __func__);
else {
disable_irq(res_irq->start);
free_irq(res_irq->start, device_data);
}
if (cryp_disable_power(&pdev->dev, device_data, false))
- dev_err(&pdev->dev, "[%s]: "
- "cryp_disable_power() failed", __func__);
+ dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed",
+ __func__);
}
@@ -1669,15 +2040,15 @@ static int u8500_cryp_suspend(struct platform_device *pdev, pm_message_t state)
/* Handle state? */
device_data = platform_get_drvdata(pdev);
if (!device_data) {
- dev_err(&pdev->dev, "[%s]: "
- "platform_get_drvdata() failed!", __func__);
+ dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
+ __func__);
return -ENOMEM;
}
res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
if (!res_irq)
- dev_err(&pdev->dev, "[%s]: "
- "IORESOURCE_IRQ, unavailable", __func__);
+ dev_err(&pdev->dev, "[%s]: IORESOURCE_IRQ, unavailable",
+ __func__);
else
disable_irq(res_irq->start);
@@ -1696,9 +2067,7 @@ static int u8500_cryp_suspend(struct platform_device *pdev, pm_message_t state)
ret = cryp_disable_power(&pdev->dev, device_data, true);
if (ret)
- dev_err(&pdev->dev, "[%s]: "
- "cryp_disable_power() failed", __func__);
-
+ dev_err(&pdev->dev, "[%s]: cryp_disable_power()", __func__);
return ret;
}
@@ -1714,8 +2083,8 @@ static int u8500_cryp_resume(struct platform_device *pdev)
device_data = platform_get_drvdata(pdev);
if (!device_data) {
- dev_err(&pdev->dev, "[%s]: "
- "platform_get_drvdata() failed!", __func__);
+ dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
+ __func__);
return -ENOMEM;
}
@@ -1731,8 +2100,8 @@ static int u8500_cryp_resume(struct platform_device *pdev)
ret = cryp_enable_power(&pdev->dev, device_data, true);
if (ret)
- dev_err(&pdev->dev, "[%s]: "
- "cryp_enable_power() failed!", __func__);
+ dev_err(&pdev->dev, "[%s]: cryp_enable_power() failed!",
+ __func__);
else {
res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
if (res_irq)
@@ -1775,7 +2144,6 @@ module_init(u8500_cryp_mod_init);
module_exit(u8500_cryp_mod_fini);
module_param(cryp_mode, int, 0);
-module_param(debug, int, 0);
MODULE_DESCRIPTION("Driver for ST-Ericsson U8500 CRYP crypto engine.");
diff --git a/drivers/crypto/ux500/cryp/cryp_p.h b/drivers/crypto/ux500/cryp/cryp_p.h
index 7d72b7b5651..966de4633cc 100644
--- a/drivers/crypto/ux500/cryp/cryp_p.h
+++ b/drivers/crypto/ux500/cryp/cryp_p.h
@@ -71,9 +71,9 @@
#define CRYP_START_MASK BIT(12)
#define CRYP_INIT_MASK BIT(13)
#define CRYP_FIFO_FLUSH_MASK BIT(14)
-#define CRYP_ACTIVITY_MASK BIT(15)
+#define CRYP_CRYPEN_MASK BIT(15)
#define CRYP_INFIFO_READY_MASK (BIT(0) | BIT(1))
-#define CRYP_ALGO_MASK (BIT(5) | BIT(4) | BIT(3))
+#define CRYP_ALGOMODE_MASK (BIT(5) | BIT(4) | BIT(3))
#define CRYP_DATA_TYPE_MASK (BIT(7) | BIT(6))
#define CRYP_KEY_SIZE_MASK (BIT(9) | BIT(8))
@@ -82,7 +82,7 @@
*/
#define CRYP_PRLG_POS 1
#define CRYP_ENC_DEC_POS 2
-#define CRYP_ALGO_POS 3
+#define CRYP_ALGOMODE_POS 3
#define CRYP_SR_BUSY_POS 4
#define CRYP_DATA_TYPE_POS 6
#define CRYP_KEY_SIZE_POS 8
@@ -90,7 +90,7 @@
#define CRYP_KSE_POS 11
#define CRYP_START_POS 12
#define CRYP_INIT_POS 13
-#define CRYP_ACTIVITY_POS 15
+#define CRYP_CRYPEN_POS 15
/**
* CRYP Status register