Skip to content

Commit

Permalink
Add SHA256 and SHA512 fast implementations
Browse files Browse the repository at this point in the history
Signed-off-by: Jan Kasiak <[email protected]>
  • Loading branch information
cybojanek committed Oct 17, 2021
1 parent 5f42c34 commit 89e103b
Show file tree
Hide file tree
Showing 9 changed files with 3,186 additions and 1 deletion.
8 changes: 7 additions & 1 deletion lib/libicp/Makefile.am
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,13 @@ ASM_SOURCES_AS = \
asm-x86_64/modes/ghash-x86_64.S \
asm-x86_64/sha1/sha1-x86_64.S \
asm-x86_64/sha2/sha256_impl.S \
asm-x86_64/sha2/sha512_impl.S
asm-x86_64/sha2/sha256_avx.S \
asm-x86_64/sha2/sha256_ssse3.S \
asm-x86_64/sha2/sha256_ni.S \
asm-x86_64/sha2/sha512_impl.S \
asm-x86_64/sha2/sha512_avx.S \
asm-x86_64/sha2/sha512_avx2.S \
asm-x86_64/sha2/sha512_ssse3.S
else
ASM_SOURCES_C =
ASM_SOURCES_AS =
Expand Down
6 changes: 6 additions & 0 deletions module/icp/Makefile.in
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,13 @@ $(MODULE)-$(CONFIG_X86_64) += asm-x86_64/modes/aesni-gcm-x86_64.o
$(MODULE)-$(CONFIG_X86_64) += asm-x86_64/modes/ghash-x86_64.o
$(MODULE)-$(CONFIG_X86_64) += asm-x86_64/sha1/sha1-x86_64.o
$(MODULE)-$(CONFIG_X86_64) += asm-x86_64/sha2/sha256_impl.o
$(MODULE)-$(CONFIG_X86_64) += asm-x86_64/sha2/sha256_avx.o
$(MODULE)-$(CONFIG_X86_64) += asm-x86_64/sha2/sha256_ssse3.o
$(MODULE)-$(CONFIG_X86_64) += asm-x86_64/sha2/sha256_ni.o
$(MODULE)-$(CONFIG_X86_64) += asm-x86_64/sha2/sha512_impl.o
$(MODULE)-$(CONFIG_X86_64) += asm-x86_64/sha2/sha512_avx.o
$(MODULE)-$(CONFIG_X86_64) += asm-x86_64/sha2/sha512_avx2.o
$(MODULE)-$(CONFIG_X86_64) += asm-x86_64/sha2/sha512_ssse3.o

$(MODULE)-$(CONFIG_X86) += algs/modes/gcm_pclmulqdq.o
$(MODULE)-$(CONFIG_X86) += algs/aes/aes_impl_aesni.o
Expand Down
92 changes: 92 additions & 0 deletions module/icp/algs/sha2/sha2.c
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,8 @@ static void Encode64(uint8_t *, uint64_t *, size_t);
/* userspace only supports the generic version */
#if defined(__amd64) && defined(_KERNEL)

#include <sys/simd.h>

typedef void (*sha256_block_f)(uint32_t *state, const void *in, size_t num);
typedef void (*sha512_block_f)(uint64_t *state, const void *in, size_t num);

Expand Down Expand Up @@ -90,12 +92,57 @@ static alg_impl_ops_t sha256_x86_64 = {
sha256_x86_64_transform, alg_impl_will_always_work, 1, "x86_64"};
#endif

#if defined(__amd64) && defined(HAVE_AVX)
static boolean_t
sha256_avx_will_work(void)
{
return (kfpu_allowed() && zfs_avx_available());
}

extern void sha256_avx_transform(SHA2_CTX *ctx, const void *in, size_t num);
static alg_impl_ops_t sha256_avx = {
sha256_avx_transform, sha256_avx_will_work, 10, "sha-avx"};
#endif

#if defined(__amd64) && defined(HAVE_SSSE3)
static boolean_t
sha256_ssse3_will_work(void)
{
return (kfpu_allowed() && zfs_ssse3_available());
}

extern void sha256_ssse3_transform(SHA2_CTX *ctx, const void *in, size_t num);
static alg_impl_ops_t sha256_ssse3 = {
sha256_ssse3_transform, sha256_ssse3_will_work, 30, "sha-ssse3"};
#endif

#if defined(__amd64) && defined(HAVE_SHA)
static boolean_t
sha256_ni_will_work(void)
{
return (kfpu_allowed() && zfs_sha_available());
}

extern void sha256_ni_transform(SHA2_CTX *ctx, const void *in, size_t num);
static alg_impl_ops_t sha256_ni = {
sha256_ni_transform, sha256_ni_will_work, 40, "sha-ni"};
#endif

/* All compiled in implementations */
static const alg_impl_ops_t *sha256_all_impl[] = {
&sha256_impl_generic,
#if defined(__amd64)
&sha256_x86_64,
#endif
#if defined(__amd64) && defined(HAVE_AVX)
&sha256_avx,
#endif
#if defined(__amd64) && defined(HAVE_SSSE3)
&sha256_ssse3,
#endif
#if defined(__amd64) && defined(HAVE_SHA)
&sha256_ni,
#endif
};

static alg_impl_ops_t *sha256_supp_impl[ARRAY_SIZE(sha256_all_impl)];
Expand Down Expand Up @@ -139,12 +186,57 @@ static alg_impl_ops_t sha512_x86_64 = {
sha512_x86_64_transform, alg_impl_will_always_work, 1, "x86_64"};
#endif

#if defined(__amd64) && defined(HAVE_AVX)
static boolean_t
sha512_avx_will_work(void)
{
return (kfpu_allowed() && zfs_avx_available());
}

extern void sha512_avx_transform(SHA2_CTX *ctx, const void *in, size_t num);
static alg_impl_ops_t sha512_avx = {
sha512_avx_transform, sha512_avx_will_work, 10, "sha-avx"};
#endif

#if defined(__amd64) && defined(HAVE_AVX2)
static boolean_t
sha512_avx2_will_work(void)
{
return (kfpu_allowed() && zfs_avx2_available());
}

extern void sha512_avx2_transform(SHA2_CTX *ctx, const void *in, size_t num);
static alg_impl_ops_t sha512_avx2 = {
sha512_avx2_transform, sha512_avx2_will_work, 20, "sha-avx2"};
#endif

#if defined(__amd64) && defined(HAVE_SSSE3)
static boolean_t
sha512_ssse3_will_work(void)
{
return (kfpu_allowed() && zfs_ssse3_available());
}

extern void sha512_ssse3_transform(SHA2_CTX *ctx, const void *in, size_t num);
static alg_impl_ops_t sha512_ssse3 = {
sha512_ssse3_transform, sha512_ssse3_will_work, 30, "sha-ssse3"};
#endif

/* All compiled in implementations */
static const alg_impl_ops_t *sha512_all_impl[] = {
&sha512_impl_generic,
#if defined(__amd64)
&sha512_x86_64,
#endif
#if defined(__amd64) && defined(HAVE_AVX)
&sha512_avx,
#endif
#if defined(__amd64) && defined(HAVE_AVX2)
&sha512_avx2,
#endif
#if defined(__amd64) && defined(HAVE_SSSE3)
&sha512_ssse3,
#endif
};

static alg_impl_ops_t *sha512_supp_impl[ARRAY_SIZE(sha512_all_impl)];
Expand Down
Loading

0 comments on commit 89e103b

Please sign in to comment.