Skip to content

Commit

Permalink
Fix kernel unaligned access on sparc64
Browse files Browse the repository at this point in the history
Update the SA_COPY_DATA macro to check if architecture supports
efficient unaligned memory accesses at compile time.  Otherwise
fallback to using the sa_copy_data() function.

The kernel provided CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS is
used to determine availability in kernel space.  In user space
the x86_64, x86, powerpc, and sometimes arm architectures will
define the HAVE_EFFICIENT_UNALIGNED_ACCESS macro.

Signed-off-by: Brian Behlendorf <[email protected]>
Closes #7642 
Closes #7684
  • Loading branch information
behlendorf authored Jul 11, 2018
1 parent 2dca37d commit 33a19e0
Show file tree
Hide file tree
Showing 4 changed files with 36 additions and 16 deletions.
8 changes: 8 additions & 0 deletions include/spl/sys/isa_defs.h
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,14 @@

#include <sys/byteorder.h>

/*
* CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS will be defined by the Linux
* kernel for architectures which support efficient unaligned access.
*/
#if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)
#define HAVE_EFFICIENT_UNALIGNED_ACCESS
#endif

#if defined(__LITTLE_ENDIAN) && !defined(_LITTLE_ENDIAN)
#define _LITTLE_ENDIAN __LITTLE_ENDIAN
#endif
Expand Down
7 changes: 7 additions & 0 deletions lib/libspl/include/sys/isa_defs.h
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ extern "C" {
#endif

#define _SUNOS_VTOC_16
#define HAVE_EFFICIENT_UNALIGNED_ACCESS

/* i386 arch specific defines */
#elif defined(__i386) || defined(__i386__)
Expand All @@ -76,6 +77,7 @@ extern "C" {
#endif

#define _SUNOS_VTOC_16
#define HAVE_EFFICIENT_UNALIGNED_ACCESS

/* powerpc arch specific defines */
#elif defined(__powerpc) || defined(__powerpc__) || defined(__powerpc64__)
Expand All @@ -99,6 +101,7 @@ extern "C" {
#endif

#define _SUNOS_VTOC_16
#define HAVE_EFFICIENT_UNALIGNED_ACCESS

/* arm arch specific defines */
#elif defined(__arm) || defined(__arm__) || defined(__aarch64__)
Expand Down Expand Up @@ -129,6 +132,10 @@ extern "C" {

#define _SUNOS_VTOC_16

#if defined(__ARM_FEATURE_UNALIGNED)
#define HAVE_EFFICIENT_UNALIGNED_ACCESS
#endif

/* sparc arch specific defines */
#elif defined(__sparc) || defined(__sparc__)

Expand Down
2 changes: 1 addition & 1 deletion module/icp/algs/modes/ccm.c
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
#include <sys/crypto/common.h>
#include <sys/crypto/impl.h>

#if defined(__i386) || defined(__amd64)
#ifdef HAVE_EFFICIENT_UNALIGNED_ACCESS
#include <sys/byteorder.h>
#define UNALIGNED_POINTERS_PERMITTED
#endif
Expand Down
35 changes: 20 additions & 15 deletions module/zfs/sa.c
Original file line number Diff line number Diff line change
Expand Up @@ -150,21 +150,26 @@ arc_byteswap_func_t sa_bswap_table[] = {
zfs_acl_byteswap,
};

#define SA_COPY_DATA(f, s, t, l) \
{ \
if (f == NULL) { \
if (l == 8) { \
*(uint64_t *)t = *(uint64_t *)s; \
} else if (l == 16) { \
*(uint64_t *)t = *(uint64_t *)s; \
*(uint64_t *)((uintptr_t)t + 8) = \
*(uint64_t *)((uintptr_t)s + 8); \
} else { \
bcopy(s, t, l); \
} \
} else \
sa_copy_data(f, s, t, l); \
}
#ifdef HAVE_EFFICIENT_UNALIGNED_ACCESS
#define SA_COPY_DATA(f, s, t, l) \
do { \
if (f == NULL) { \
if (l == 8) { \
*(uint64_t *)t = *(uint64_t *)s; \
} else if (l == 16) { \
*(uint64_t *)t = *(uint64_t *)s; \
*(uint64_t *)((uintptr_t)t + 8) = \
*(uint64_t *)((uintptr_t)s + 8); \
} else { \
bcopy(s, t, l); \
} \
} else { \
sa_copy_data(f, s, t, l); \
} \
} while (0)
#else
#define SA_COPY_DATA(f, s, t, l) sa_copy_data(f, s, t, l)
#endif

/*
* This table is fixed and cannot be changed. Its purpose is to
Expand Down

0 comments on commit 33a19e0

Please sign in to comment.