blob: 72b81015cebe9aca1266d8eb92030220e5a6e706 [file] [log] [blame]
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef _ASM_POWERPC_CACHE_H
3#define _ASM_POWERPC_CACHE_H
4
5#ifdef __KERNEL__
6
7
8/* bytes per L1 cache line */
9#if defined(CONFIG_PPC_8xx) || defined(CONFIG_403GCX)
10#define L1_CACHE_SHIFT 4
11#define MAX_COPY_PREFETCH 1
12#define IFETCH_ALIGN_SHIFT 2
13#elif defined(CONFIG_PPC_E500MC)
14#define L1_CACHE_SHIFT 6
15#define MAX_COPY_PREFETCH 4
16#define IFETCH_ALIGN_SHIFT 3
17#elif defined(CONFIG_PPC32)
18#define MAX_COPY_PREFETCH 4
19#define IFETCH_ALIGN_SHIFT 3 /* 603 fetches 2 insn at a time */
20#if defined(CONFIG_PPC_47x)
21#define L1_CACHE_SHIFT 7
22#else
23#define L1_CACHE_SHIFT 5
24#endif
25#else /* CONFIG_PPC64 */
26#define L1_CACHE_SHIFT 7
27#define IFETCH_ALIGN_SHIFT 4 /* POWER8,9 */
28#endif
29
30#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
31
32#define SMP_CACHE_BYTES L1_CACHE_BYTES
33
34#define IFETCH_ALIGN_BYTES (1 << IFETCH_ALIGN_SHIFT)
35
David Brazdil0f672f62019-12-10 10:32:29 +000036#if !defined(__ASSEMBLY__)
37#ifdef CONFIG_PPC64
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000038
39struct ppc_cache_info {
40 u32 size;
41 u32 line_size;
42 u32 block_size; /* L1 only */
43 u32 log_block_size;
44 u32 blocks_per_page;
45 u32 sets;
46 u32 assoc;
47};
48
49struct ppc64_caches {
50 struct ppc_cache_info l1d;
51 struct ppc_cache_info l1i;
52 struct ppc_cache_info l2;
53 struct ppc_cache_info l3;
54};
55
56extern struct ppc64_caches ppc64_caches;
David Brazdil0f672f62019-12-10 10:32:29 +000057
Olivier Deprez0e641232021-09-23 10:07:05 +020058static inline u32 l1_dcache_shift(void)
David Brazdil0f672f62019-12-10 10:32:29 +000059{
60 return ppc64_caches.l1d.log_block_size;
61}
62
Olivier Deprez0e641232021-09-23 10:07:05 +020063static inline u32 l1_dcache_bytes(void)
David Brazdil0f672f62019-12-10 10:32:29 +000064{
65 return ppc64_caches.l1d.block_size;
66}
Olivier Deprez0e641232021-09-23 10:07:05 +020067
68static inline u32 l1_icache_shift(void)
69{
70 return ppc64_caches.l1i.log_block_size;
71}
72
73static inline u32 l1_icache_bytes(void)
74{
75 return ppc64_caches.l1i.block_size;
76}
David Brazdil0f672f62019-12-10 10:32:29 +000077#else
Olivier Deprez0e641232021-09-23 10:07:05 +020078static inline u32 l1_dcache_shift(void)
David Brazdil0f672f62019-12-10 10:32:29 +000079{
80 return L1_CACHE_SHIFT;
81}
82
Olivier Deprez0e641232021-09-23 10:07:05 +020083static inline u32 l1_dcache_bytes(void)
David Brazdil0f672f62019-12-10 10:32:29 +000084{
85 return L1_CACHE_BYTES;
86}
Olivier Deprez0e641232021-09-23 10:07:05 +020087
88static inline u32 l1_icache_shift(void)
89{
90 return L1_CACHE_SHIFT;
91}
92
93static inline u32 l1_icache_bytes(void)
94{
95 return L1_CACHE_BYTES;
96}
97
David Brazdil0f672f62019-12-10 10:32:29 +000098#endif
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000099
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000100#define __read_mostly __attribute__((__section__(".data..read_mostly")))
101
David Brazdil0f672f62019-12-10 10:32:29 +0000102#ifdef CONFIG_PPC_BOOK3S_32
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000103extern long _get_L2CR(void);
104extern long _get_L3CR(void);
105extern void _set_L2CR(unsigned long);
106extern void _set_L3CR(unsigned long);
107#else
108#define _get_L2CR() 0L
109#define _get_L3CR() 0L
110#define _set_L2CR(val) do { } while(0)
111#define _set_L3CR(val) do { } while(0)
112#endif
113
114static inline void dcbz(void *addr)
115{
116 __asm__ __volatile__ ("dcbz 0, %0" : : "r"(addr) : "memory");
117}
118
119static inline void dcbi(void *addr)
120{
121 __asm__ __volatile__ ("dcbi 0, %0" : : "r"(addr) : "memory");
122}
123
124static inline void dcbf(void *addr)
125{
126 __asm__ __volatile__ ("dcbf 0, %0" : : "r"(addr) : "memory");
127}
128
129static inline void dcbst(void *addr)
130{
131 __asm__ __volatile__ ("dcbst 0, %0" : : "r"(addr) : "memory");
132}
Olivier Deprez0e641232021-09-23 10:07:05 +0200133
134static inline void icbi(void *addr)
135{
136 asm volatile ("icbi 0, %0" : : "r"(addr) : "memory");
137}
138
139static inline void iccci(void *addr)
140{
141 asm volatile ("iccci 0, %0" : : "r"(addr) : "memory");
142}
143
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000144#endif /* !__ASSEMBLY__ */
145#endif /* __KERNEL__ */
146#endif /* _ASM_POWERPC_CACHE_H */