David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 1 | /* SPDX-License-Identifier: GPL-2.0-or-later */ |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 2 | /* |
| 3 | * OpenRISC Linux |
| 4 | * |
| 5 | * Linux architectural port borrowing liberally from similar works of |
| 6 | * others. All original copyrights apply as per the original source |
| 7 | * declaration. |
| 8 | * |
| 9 | * OpenRISC implementation: |
| 10 | * Copyright (C) 2003 Matjaz Breskvar <phoenix@bsemi.com> |
| 11 | * Copyright (C) 2010-2011 Jonas Bonn <jonas@southpole.se> |
| 12 | * et al. |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 13 | */ |
| 14 | |
| 15 | #ifndef __ASM_OPENRISC_UNALIGNED_H |
| 16 | #define __ASM_OPENRISC_UNALIGNED_H |
| 17 | |
| 18 | /* |
| 19 | * This is copied from the generic implementation and the C-struct |
| 20 | * variant replaced with the memmove variant. The GCC compiler |
| 21 | * for the OR32 arch optimizes too aggressively for the C-struct |
| 22 | * variant to work, so use the memmove variant instead. |
| 23 | * |
| 24 | * It may be worth considering implementing the unaligned access |
| 25 | * exception handler and allowing unaligned accesses (access_ok.h)... |
| 26 | * not sure if it would be much of a performance win without further |
| 27 | * investigation. |
| 28 | */ |
| 29 | #include <asm/byteorder.h> |
| 30 | |
| 31 | #if defined(__LITTLE_ENDIAN) |
| 32 | # include <linux/unaligned/le_memmove.h> |
| 33 | # include <linux/unaligned/be_byteshift.h> |
| 34 | # include <linux/unaligned/generic.h> |
| 35 | # define get_unaligned __get_unaligned_le |
| 36 | # define put_unaligned __put_unaligned_le |
| 37 | #elif defined(__BIG_ENDIAN) |
| 38 | # include <linux/unaligned/be_memmove.h> |
| 39 | # include <linux/unaligned/le_byteshift.h> |
| 40 | # include <linux/unaligned/generic.h> |
| 41 | # define get_unaligned __get_unaligned_be |
| 42 | # define put_unaligned __put_unaligned_be |
| 43 | #else |
| 44 | # error need to define endianess |
| 45 | #endif |
| 46 | |
| 47 | #endif /* __ASM_OPENRISC_UNALIGNED_H */ |