Kyle Swenson | 8d8f654 | 2021-03-15 11:02:55 -0600 | [diff] [blame^] | 1 | /* |
| 2 | * Copyright (C) 2014-15 Synopsys, Inc. (www.synopsys.com) |
| 3 | * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com) |
| 4 | * |
| 5 | * This program is free software; you can redistribute it and/or modify |
| 6 | * it under the terms of the GNU General Public License version 2 as |
| 7 | * published by the Free Software Foundation. |
| 8 | */ |
| 9 | |
| 10 | #ifndef __ASM_IRQFLAGS_ARCOMPACT_H |
| 11 | #define __ASM_IRQFLAGS_ARCOMPACT_H |
| 12 | |
| 13 | /* vineetg: March 2010 : local_irq_save( ) optimisation |
| 14 | * -Remove explicit mov of current status32 into reg, that is not needed |
| 15 | * -Use BIC insn instead of INVERTED + AND |
| 16 | * -Conditionally disable interrupts (if they are not enabled, don't disable) |
| 17 | */ |
| 18 | |
| 19 | #include <asm/arcregs.h> |
| 20 | |
| 21 | /* status32 Reg bits related to Interrupt Handling */ |
| 22 | #define STATUS_E1_BIT 1 /* Int 1 enable */ |
| 23 | #define STATUS_E2_BIT 2 /* Int 2 enable */ |
| 24 | #define STATUS_A1_BIT 3 /* Int 1 active */ |
| 25 | #define STATUS_A2_BIT 4 /* Int 2 active */ |
| 26 | #define STATUS_AE_BIT 5 /* Exception active */ |
| 27 | |
| 28 | #define STATUS_E1_MASK (1<<STATUS_E1_BIT) |
| 29 | #define STATUS_E2_MASK (1<<STATUS_E2_BIT) |
| 30 | #define STATUS_A1_MASK (1<<STATUS_A1_BIT) |
| 31 | #define STATUS_A2_MASK (1<<STATUS_A2_BIT) |
| 32 | #define STATUS_AE_MASK (1<<STATUS_AE_BIT) |
| 33 | #define STATUS_IE_MASK (STATUS_E1_MASK | STATUS_E2_MASK) |
| 34 | |
| 35 | /* Other Interrupt Handling related Aux regs */ |
| 36 | #define AUX_IRQ_LEV 0x200 /* IRQ Priority: L1 or L2 */ |
| 37 | #define AUX_IRQ_HINT 0x201 /* For generating Soft Interrupts */ |
| 38 | #define AUX_IRQ_LV12 0x43 /* interrupt level register */ |
| 39 | |
| 40 | #define AUX_IENABLE 0x40c |
| 41 | #define AUX_ITRIGGER 0x40d |
| 42 | #define AUX_IPULSE 0x415 |
| 43 | |
| 44 | #define ISA_INIT_STATUS_BITS STATUS_IE_MASK |
| 45 | |
| 46 | #define ISA_SLEEP_ARG 0x3 |
| 47 | |
| 48 | #ifndef __ASSEMBLY__ |
| 49 | |
| 50 | /****************************************************************** |
| 51 | * IRQ Control Macros |
| 52 | * |
| 53 | * All of them have "memory" clobber (compiler barrier) which is needed to |
| 54 | * ensure that LD/ST requiring irq safetly (R-M-W when LLSC is not available) |
| 55 | * are redone after IRQs are re-enabled (and gcc doesn't reuse stale register) |
| 56 | * |
| 57 | * Noted at the time of Abilis Timer List corruption |
| 58 | * Orig Bug + Rejected solution : https://lkml.org/lkml/2013/3/29/67 |
| 59 | * Reasoning : https://lkml.org/lkml/2013/4/8/15 |
| 60 | * |
| 61 | ******************************************************************/ |
| 62 | |
| 63 | /* |
| 64 | * Save IRQ state and disable IRQs |
| 65 | */ |
| 66 | static inline long arch_local_irq_save(void) |
| 67 | { |
| 68 | unsigned long temp, flags; |
| 69 | |
| 70 | __asm__ __volatile__( |
| 71 | " lr %1, [status32] \n" |
| 72 | " bic %0, %1, %2 \n" |
| 73 | " and.f 0, %1, %2 \n" |
| 74 | " flag.nz %0 \n" |
| 75 | : "=r"(temp), "=r"(flags) |
| 76 | : "n"((STATUS_E1_MASK | STATUS_E2_MASK)) |
| 77 | : "memory", "cc"); |
| 78 | |
| 79 | return flags; |
| 80 | } |
| 81 | |
| 82 | /* |
| 83 | * restore saved IRQ state |
| 84 | */ |
| 85 | static inline void arch_local_irq_restore(unsigned long flags) |
| 86 | { |
| 87 | |
| 88 | __asm__ __volatile__( |
| 89 | " flag %0 \n" |
| 90 | : |
| 91 | : "r"(flags) |
| 92 | : "memory"); |
| 93 | } |
| 94 | |
| 95 | /* |
| 96 | * Unconditionally Enable IRQs |
| 97 | */ |
| 98 | static inline void arch_local_irq_enable(void) |
| 99 | { |
| 100 | unsigned long temp; |
| 101 | |
| 102 | __asm__ __volatile__( |
| 103 | " lr %0, [status32] \n" |
| 104 | " or %0, %0, %1 \n" |
| 105 | " flag %0 \n" |
| 106 | : "=&r"(temp) |
| 107 | : "n"((STATUS_E1_MASK | STATUS_E2_MASK)) |
| 108 | : "cc", "memory"); |
| 109 | } |
| 110 | |
| 111 | |
| 112 | /* |
| 113 | * Unconditionally Disable IRQs |
| 114 | */ |
| 115 | static inline void arch_local_irq_disable(void) |
| 116 | { |
| 117 | unsigned long temp; |
| 118 | |
| 119 | __asm__ __volatile__( |
| 120 | " lr %0, [status32] \n" |
| 121 | " and %0, %0, %1 \n" |
| 122 | " flag %0 \n" |
| 123 | : "=&r"(temp) |
| 124 | : "n"(~(STATUS_E1_MASK | STATUS_E2_MASK)) |
| 125 | : "memory"); |
| 126 | } |
| 127 | |
| 128 | /* |
| 129 | * save IRQ state |
| 130 | */ |
| 131 | static inline long arch_local_save_flags(void) |
| 132 | { |
| 133 | unsigned long temp; |
| 134 | |
| 135 | __asm__ __volatile__( |
| 136 | " lr %0, [status32] \n" |
| 137 | : "=&r"(temp) |
| 138 | : |
| 139 | : "memory"); |
| 140 | |
| 141 | return temp; |
| 142 | } |
| 143 | |
| 144 | /* |
| 145 | * Query IRQ state |
| 146 | */ |
| 147 | static inline int arch_irqs_disabled_flags(unsigned long flags) |
| 148 | { |
| 149 | return !(flags & (STATUS_E1_MASK |
| 150 | #ifdef CONFIG_ARC_COMPACT_IRQ_LEVELS |
| 151 | | STATUS_E2_MASK |
| 152 | #endif |
| 153 | )); |
| 154 | } |
| 155 | |
| 156 | static inline int arch_irqs_disabled(void) |
| 157 | { |
| 158 | return arch_irqs_disabled_flags(arch_local_save_flags()); |
| 159 | } |
| 160 | |
| 161 | #else |
| 162 | |
| 163 | #ifdef CONFIG_TRACE_IRQFLAGS |
| 164 | |
| 165 | .macro TRACE_ASM_IRQ_DISABLE |
| 166 | bl trace_hardirqs_off |
| 167 | .endm |
| 168 | |
| 169 | .macro TRACE_ASM_IRQ_ENABLE |
| 170 | bl trace_hardirqs_on |
| 171 | .endm |
| 172 | |
| 173 | #else |
| 174 | |
| 175 | .macro TRACE_ASM_IRQ_DISABLE |
| 176 | .endm |
| 177 | |
| 178 | .macro TRACE_ASM_IRQ_ENABLE |
| 179 | .endm |
| 180 | |
| 181 | #endif |
| 182 | |
| 183 | .macro IRQ_DISABLE scratch |
| 184 | lr \scratch, [status32] |
| 185 | bic \scratch, \scratch, (STATUS_E1_MASK | STATUS_E2_MASK) |
| 186 | flag \scratch |
| 187 | TRACE_ASM_IRQ_DISABLE |
| 188 | .endm |
| 189 | |
| 190 | .macro IRQ_ENABLE scratch |
| 191 | TRACE_ASM_IRQ_ENABLE |
| 192 | lr \scratch, [status32] |
| 193 | or \scratch, \scratch, (STATUS_E1_MASK | STATUS_E2_MASK) |
| 194 | flag \scratch |
| 195 | .endm |
| 196 | |
| 197 | #endif /* __ASSEMBLY__ */ |
| 198 | |
| 199 | #endif |