David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0-only */ |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 2 | /* |
| 3 | * Copyright (C) 2012 ARM Ltd. |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 4 | */ |
| 5 | #ifndef __ASM_IRQFLAGS_H |
| 6 | #define __ASM_IRQFLAGS_H |
| 7 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 8 | #include <asm/alternative.h> |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 9 | #include <asm/barrier.h> |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 10 | #include <asm/ptrace.h> |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 11 | #include <asm/sysreg.h> |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 12 | |
| 13 | /* |
| 14 | * Aarch64 has flags for masking: Debug, Asynchronous (serror), Interrupts and |
| 15 | * FIQ exceptions, in the 'daif' register. We mask and unmask them in 'dai' |
| 16 | * order: |
| 17 | * Masking debug exceptions causes all other exceptions to be masked too/ |
| 18 | * Masking SError masks irq, but not debug exceptions. Masking irqs has no |
| 19 | * side effects for other flags. Keeping to this order makes it easier for |
| 20 | * entry.S to know which exceptions should be unmasked. |
| 21 | * |
| 22 | * FIQ is never expected, but we mask it when we disable debug exceptions, and |
| 23 | * unmask it at all other times. |
| 24 | */ |
| 25 | |
| 26 | /* |
| 27 | * CPU interrupt mask handling. |
| 28 | */ |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 29 | static inline void arch_local_irq_enable(void) |
| 30 | { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 31 | if (system_has_prio_mask_debugging()) { |
| 32 | u32 pmr = read_sysreg_s(SYS_ICC_PMR_EL1); |
| 33 | |
| 34 | WARN_ON_ONCE(pmr != GIC_PRIO_IRQON && pmr != GIC_PRIO_IRQOFF); |
| 35 | } |
| 36 | |
| 37 | asm volatile(ALTERNATIVE( |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 38 | "msr daifclr, #2 // arch_local_irq_enable", |
| 39 | __msr_s(SYS_ICC_PMR_EL1, "%0"), |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 40 | ARM64_HAS_IRQ_PRIO_MASKING) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 41 | : |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 42 | : "r" ((unsigned long) GIC_PRIO_IRQON) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 43 | : "memory"); |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 44 | |
| 45 | pmr_sync(); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 46 | } |
| 47 | |
| 48 | static inline void arch_local_irq_disable(void) |
| 49 | { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 50 | if (system_has_prio_mask_debugging()) { |
| 51 | u32 pmr = read_sysreg_s(SYS_ICC_PMR_EL1); |
| 52 | |
| 53 | WARN_ON_ONCE(pmr != GIC_PRIO_IRQON && pmr != GIC_PRIO_IRQOFF); |
| 54 | } |
| 55 | |
| 56 | asm volatile(ALTERNATIVE( |
| 57 | "msr daifset, #2 // arch_local_irq_disable", |
| 58 | __msr_s(SYS_ICC_PMR_EL1, "%0"), |
| 59 | ARM64_HAS_IRQ_PRIO_MASKING) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 60 | : |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 61 | : "r" ((unsigned long) GIC_PRIO_IRQOFF) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 62 | : "memory"); |
| 63 | } |
| 64 | |
| 65 | /* |
| 66 | * Save the current interrupt enable state. |
| 67 | */ |
| 68 | static inline unsigned long arch_local_save_flags(void) |
| 69 | { |
| 70 | unsigned long flags; |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 71 | |
| 72 | asm volatile(ALTERNATIVE( |
| 73 | "mrs %0, daif", |
| 74 | __mrs_s("%0", SYS_ICC_PMR_EL1), |
| 75 | ARM64_HAS_IRQ_PRIO_MASKING) |
| 76 | : "=&r" (flags) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 77 | : |
| 78 | : "memory"); |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 79 | |
| 80 | return flags; |
| 81 | } |
| 82 | |
| 83 | static inline int arch_irqs_disabled_flags(unsigned long flags) |
| 84 | { |
| 85 | int res; |
| 86 | |
| 87 | asm volatile(ALTERNATIVE( |
| 88 | "and %w0, %w1, #" __stringify(PSR_I_BIT), |
| 89 | "eor %w0, %w1, #" __stringify(GIC_PRIO_IRQON), |
| 90 | ARM64_HAS_IRQ_PRIO_MASKING) |
| 91 | : "=&r" (res) |
| 92 | : "r" ((int) flags) |
| 93 | : "memory"); |
| 94 | |
| 95 | return res; |
| 96 | } |
| 97 | |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 98 | static inline int arch_irqs_disabled(void) |
| 99 | { |
| 100 | return arch_irqs_disabled_flags(arch_local_save_flags()); |
| 101 | } |
| 102 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 103 | static inline unsigned long arch_local_irq_save(void) |
| 104 | { |
| 105 | unsigned long flags; |
| 106 | |
| 107 | flags = arch_local_save_flags(); |
| 108 | |
| 109 | /* |
| 110 | * There are too many states with IRQs disabled, just keep the current |
| 111 | * state if interrupts are already disabled/masked. |
| 112 | */ |
| 113 | if (!arch_irqs_disabled_flags(flags)) |
| 114 | arch_local_irq_disable(); |
| 115 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 116 | return flags; |
| 117 | } |
| 118 | |
| 119 | /* |
| 120 | * restore saved IRQ state |
| 121 | */ |
| 122 | static inline void arch_local_irq_restore(unsigned long flags) |
| 123 | { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 124 | asm volatile(ALTERNATIVE( |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 125 | "msr daif, %0", |
| 126 | __msr_s(SYS_ICC_PMR_EL1, "%0"), |
| 127 | ARM64_HAS_IRQ_PRIO_MASKING) |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 128 | : |
| 129 | : "r" (flags) |
| 130 | : "memory"); |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 131 | |
| 132 | pmr_sync(); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 133 | } |
| 134 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 135 | #endif /* __ASM_IRQFLAGS_H */ |