Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame^] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
| 2 | /* |
| 3 | * S390 version |
| 4 | * Copyright IBM Corp. 1999 |
| 5 | * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com), |
| 6 | */ |
| 7 | |
| 8 | #ifndef _S390_STRING_H_ |
| 9 | #define _S390_STRING_H_ |
| 10 | |
| 11 | #ifndef _LINUX_TYPES_H |
| 12 | #include <linux/types.h> |
| 13 | #endif |
| 14 | |
| 15 | #define __HAVE_ARCH_MEMCHR /* inline & arch function */ |
| 16 | #define __HAVE_ARCH_MEMCMP /* arch function */ |
| 17 | #define __HAVE_ARCH_MEMCPY /* gcc builtin & arch function */ |
| 18 | #define __HAVE_ARCH_MEMMOVE /* gcc builtin & arch function */ |
| 19 | #define __HAVE_ARCH_MEMSCAN /* inline & arch function */ |
| 20 | #define __HAVE_ARCH_MEMSET /* gcc builtin & arch function */ |
| 21 | #define __HAVE_ARCH_MEMSET16 /* arch function */ |
| 22 | #define __HAVE_ARCH_MEMSET32 /* arch function */ |
| 23 | #define __HAVE_ARCH_MEMSET64 /* arch function */ |
| 24 | #define __HAVE_ARCH_STRCAT /* inline & arch function */ |
| 25 | #define __HAVE_ARCH_STRCMP /* arch function */ |
| 26 | #define __HAVE_ARCH_STRCPY /* inline & arch function */ |
| 27 | #define __HAVE_ARCH_STRLCAT /* arch function */ |
| 28 | #define __HAVE_ARCH_STRLCPY /* arch function */ |
| 29 | #define __HAVE_ARCH_STRLEN /* inline & arch function */ |
| 30 | #define __HAVE_ARCH_STRNCAT /* arch function */ |
| 31 | #define __HAVE_ARCH_STRNCPY /* arch function */ |
| 32 | #define __HAVE_ARCH_STRNLEN /* inline & arch function */ |
| 33 | #define __HAVE_ARCH_STRRCHR /* arch function */ |
| 34 | #define __HAVE_ARCH_STRSTR /* arch function */ |
| 35 | |
| 36 | /* Prototypes for non-inlined arch strings functions. */ |
| 37 | int memcmp(const void *s1, const void *s2, size_t n); |
| 38 | void *memcpy(void *dest, const void *src, size_t n); |
| 39 | void *memset(void *s, int c, size_t n); |
| 40 | void *memmove(void *dest, const void *src, size_t n); |
| 41 | int strcmp(const char *s1, const char *s2); |
| 42 | size_t strlcat(char *dest, const char *src, size_t n); |
| 43 | size_t strlcpy(char *dest, const char *src, size_t size); |
| 44 | char *strncat(char *dest, const char *src, size_t n); |
| 45 | char *strncpy(char *dest, const char *src, size_t n); |
| 46 | char *strrchr(const char *s, int c); |
| 47 | char *strstr(const char *s1, const char *s2); |
| 48 | |
| 49 | #undef __HAVE_ARCH_STRCHR |
| 50 | #undef __HAVE_ARCH_STRNCHR |
| 51 | #undef __HAVE_ARCH_STRNCMP |
| 52 | #undef __HAVE_ARCH_STRPBRK |
| 53 | #undef __HAVE_ARCH_STRSEP |
| 54 | #undef __HAVE_ARCH_STRSPN |
| 55 | |
| 56 | void *__memset16(uint16_t *s, uint16_t v, size_t count); |
| 57 | void *__memset32(uint32_t *s, uint32_t v, size_t count); |
| 58 | void *__memset64(uint64_t *s, uint64_t v, size_t count); |
| 59 | |
| 60 | static inline void *memset16(uint16_t *s, uint16_t v, size_t count) |
| 61 | { |
| 62 | return __memset16(s, v, count * sizeof(v)); |
| 63 | } |
| 64 | |
| 65 | static inline void *memset32(uint32_t *s, uint32_t v, size_t count) |
| 66 | { |
| 67 | return __memset32(s, v, count * sizeof(v)); |
| 68 | } |
| 69 | |
| 70 | static inline void *memset64(uint64_t *s, uint64_t v, size_t count) |
| 71 | { |
| 72 | return __memset64(s, v, count * sizeof(v)); |
| 73 | } |
| 74 | |
| 75 | #if !defined(IN_ARCH_STRING_C) && (!defined(CONFIG_FORTIFY_SOURCE) || defined(__NO_FORTIFY)) |
| 76 | |
| 77 | static inline void *memchr(const void * s, int c, size_t n) |
| 78 | { |
| 79 | register int r0 asm("0") = (char) c; |
| 80 | const void *ret = s + n; |
| 81 | |
| 82 | asm volatile( |
| 83 | "0: srst %0,%1\n" |
| 84 | " jo 0b\n" |
| 85 | " jl 1f\n" |
| 86 | " la %0,0\n" |
| 87 | "1:" |
| 88 | : "+a" (ret), "+&a" (s) : "d" (r0) : "cc", "memory"); |
| 89 | return (void *) ret; |
| 90 | } |
| 91 | |
| 92 | static inline void *memscan(void *s, int c, size_t n) |
| 93 | { |
| 94 | register int r0 asm("0") = (char) c; |
| 95 | const void *ret = s + n; |
| 96 | |
| 97 | asm volatile( |
| 98 | "0: srst %0,%1\n" |
| 99 | " jo 0b\n" |
| 100 | : "+a" (ret), "+&a" (s) : "d" (r0) : "cc", "memory"); |
| 101 | return (void *) ret; |
| 102 | } |
| 103 | |
| 104 | static inline char *strcat(char *dst, const char *src) |
| 105 | { |
| 106 | register int r0 asm("0") = 0; |
| 107 | unsigned long dummy; |
| 108 | char *ret = dst; |
| 109 | |
| 110 | asm volatile( |
| 111 | "0: srst %0,%1\n" |
| 112 | " jo 0b\n" |
| 113 | "1: mvst %0,%2\n" |
| 114 | " jo 1b" |
| 115 | : "=&a" (dummy), "+a" (dst), "+a" (src) |
| 116 | : "d" (r0), "0" (0) : "cc", "memory" ); |
| 117 | return ret; |
| 118 | } |
| 119 | |
| 120 | static inline char *strcpy(char *dst, const char *src) |
| 121 | { |
| 122 | register int r0 asm("0") = 0; |
| 123 | char *ret = dst; |
| 124 | |
| 125 | asm volatile( |
| 126 | "0: mvst %0,%1\n" |
| 127 | " jo 0b" |
| 128 | : "+&a" (dst), "+&a" (src) : "d" (r0) |
| 129 | : "cc", "memory"); |
| 130 | return ret; |
| 131 | } |
| 132 | |
| 133 | static inline size_t strlen(const char *s) |
| 134 | { |
| 135 | register unsigned long r0 asm("0") = 0; |
| 136 | const char *tmp = s; |
| 137 | |
| 138 | asm volatile( |
| 139 | "0: srst %0,%1\n" |
| 140 | " jo 0b" |
| 141 | : "+d" (r0), "+a" (tmp) : : "cc", "memory"); |
| 142 | return r0 - (unsigned long) s; |
| 143 | } |
| 144 | |
| 145 | static inline size_t strnlen(const char * s, size_t n) |
| 146 | { |
| 147 | register int r0 asm("0") = 0; |
| 148 | const char *tmp = s; |
| 149 | const char *end = s + n; |
| 150 | |
| 151 | asm volatile( |
| 152 | "0: srst %0,%1\n" |
| 153 | " jo 0b" |
| 154 | : "+a" (end), "+a" (tmp) : "d" (r0) : "cc", "memory"); |
| 155 | return end - s; |
| 156 | } |
| 157 | #else /* IN_ARCH_STRING_C */ |
| 158 | void *memchr(const void * s, int c, size_t n); |
| 159 | void *memscan(void *s, int c, size_t n); |
| 160 | char *strcat(char *dst, const char *src); |
| 161 | char *strcpy(char *dst, const char *src); |
| 162 | size_t strlen(const char *s); |
| 163 | size_t strnlen(const char * s, size_t n); |
| 164 | #endif /* !IN_ARCH_STRING_C */ |
| 165 | |
| 166 | #endif /* __S390_STRING_H_ */ |