diff options
Diffstat (limited to 'include/crypto/algapi.h')
| -rw-r--r-- | include/crypto/algapi.h | 92 | 
1 files changed, 74 insertions, 18 deletions
diff --git a/include/crypto/algapi.h b/include/crypto/algapi.h index c9fe145f7dd3..eeafd21afb44 100644 --- a/include/crypto/algapi.h +++ b/include/crypto/algapi.h @@ -15,6 +15,7 @@  #include <linux/crypto.h>  #include <linux/list.h>  #include <linux/kernel.h> +#include <linux/kthread.h>  #include <linux/skbuff.h>  struct crypto_aead; @@ -128,6 +129,75 @@ struct ablkcipher_walk {  	unsigned int		blocksize;  }; +#define ENGINE_NAME_LEN	30 +/* + * struct crypto_engine - crypto hardware engine + * @name: the engine name + * @idling: the engine is entering idle state + * @busy: request pump is busy + * @running: the engine is on working + * @cur_req_prepared: current request is prepared + * @list: link with the global crypto engine list + * @queue_lock: spinlock to syncronise access to request queue + * @queue: the crypto queue of the engine + * @rt: whether this queue is set to run as a realtime task + * @prepare_crypt_hardware: a request will soon arrive from the queue + * so the subsystem requests the driver to prepare the hardware + * by issuing this call + * @unprepare_crypt_hardware: there are currently no more requests on the + * queue so the subsystem notifies the driver that it may relax the + * hardware by issuing this call + * @prepare_request: do some prepare if need before handle the current request + * @unprepare_request: undo any work done by prepare_message() + * @crypt_one_request: do encryption for current request + * @kworker: thread struct for request pump + * @kworker_task: pointer to task for request pump kworker thread + * @pump_requests: work struct for scheduling work to the request pump + * @priv_data: the engine private data + * @cur_req: the current request which is on processing + */ +struct crypto_engine { +	char			name[ENGINE_NAME_LEN]; +	bool			idling; +	bool			busy; +	bool			running; +	bool			cur_req_prepared; + +	struct list_head	list; +	spinlock_t		queue_lock; +	struct crypto_queue	queue; + +	bool			rt; + +	int (*prepare_crypt_hardware)(struct crypto_engine *engine); +	int (*unprepare_crypt_hardware)(struct crypto_engine *engine); + +	int (*prepare_request)(struct crypto_engine *engine, +			       struct ablkcipher_request *req); +	int (*unprepare_request)(struct crypto_engine *engine, +				 struct ablkcipher_request *req); +	int (*crypt_one_request)(struct crypto_engine *engine, +				 struct ablkcipher_request *req); + +	struct kthread_worker           kworker; +	struct task_struct              *kworker_task; +	struct kthread_work             pump_requests; + +	void				*priv_data; +	struct ablkcipher_request	*cur_req; +}; + +int crypto_transfer_request(struct crypto_engine *engine, +			    struct ablkcipher_request *req, bool need_pump); +int crypto_transfer_request_to_engine(struct crypto_engine *engine, +				      struct ablkcipher_request *req); +void crypto_finalize_request(struct crypto_engine *engine, +			     struct ablkcipher_request *req, int err); +int crypto_engine_start(struct crypto_engine *engine); +int crypto_engine_stop(struct crypto_engine *engine); +struct crypto_engine *crypto_engine_alloc_init(struct device *dev, bool rt); +int crypto_engine_exit(struct crypto_engine *engine); +  extern const struct crypto_type crypto_ablkcipher_type;  extern const struct crypto_type crypto_blkcipher_type; @@ -184,6 +254,10 @@ int crypto_enqueue_request(struct crypto_queue *queue,  			   struct crypto_async_request *request);  struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);  int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm); +static inline unsigned int crypto_queue_len(struct crypto_queue *queue) +{ +	return queue->qlen; +}  /* These functions require the input/output to be aligned as u32. */  void crypto_inc(u8 *a, unsigned int size); @@ -275,24 +349,6 @@ static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm)  	return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher;  } -static inline struct crypto_hash *crypto_spawn_hash(struct crypto_spawn *spawn) -{ -	u32 type = CRYPTO_ALG_TYPE_HASH; -	u32 mask = CRYPTO_ALG_TYPE_HASH_MASK; - -	return __crypto_hash_cast(crypto_spawn_tfm(spawn, type, mask)); -} - -static inline void *crypto_hash_ctx(struct crypto_hash *tfm) -{ -	return crypto_tfm_ctx(&tfm->base); -} - -static inline void *crypto_hash_ctx_aligned(struct crypto_hash *tfm) -{ -	return crypto_tfm_ctx_aligned(&tfm->base); -} -  static inline void blkcipher_walk_init(struct blkcipher_walk *walk,  				       struct scatterlist *dst,  				       struct scatterlist *src,  | 
