root/arch/arm/include/asm/arch_gicv3.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


DEFINITIONS

This source file includes following definitions.
  1. gic_write_eoir
  2. gic_write_dir
  3. gic_read_iar
  4. gic_write_ctlr
  5. gic_read_ctlr
  6. gic_write_grpen1
  7. gic_write_sgi1r
  8. gic_read_sre
  9. gic_write_sre
  10. gic_write_bpr1
  11. gic_read_pmr
  12. gic_write_pmr
  13. gic_read_rpr
  14. __gic_writeq_nonatomic
  15. __gic_readq_nonatomic
  16. gits_write_vpendbaser
  17. gic_prio_masking_enabled
  18. gic_pmr_mask_irqs
  19. gic_arch_enable_irqs

   1 /* SPDX-License-Identifier: GPL-2.0-only */
   2 /*
   3  * arch/arm/include/asm/arch_gicv3.h
   4  *
   5  * Copyright (C) 2015 ARM Ltd.
   6  */
   7 #ifndef __ASM_ARCH_GICV3_H
   8 #define __ASM_ARCH_GICV3_H
   9 
  10 #ifndef __ASSEMBLY__
  11 
  12 #include <linux/io.h>
  13 #include <asm/barrier.h>
  14 #include <asm/cacheflush.h>
  15 #include <asm/cp15.h>
  16 
  17 #define ICC_EOIR1                       __ACCESS_CP15(c12, 0, c12, 1)
  18 #define ICC_DIR                         __ACCESS_CP15(c12, 0, c11, 1)
  19 #define ICC_IAR1                        __ACCESS_CP15(c12, 0, c12, 0)
  20 #define ICC_SGI1R                       __ACCESS_CP15_64(0, c12)
  21 #define ICC_PMR                         __ACCESS_CP15(c4, 0, c6, 0)
  22 #define ICC_CTLR                        __ACCESS_CP15(c12, 0, c12, 4)
  23 #define ICC_SRE                         __ACCESS_CP15(c12, 0, c12, 5)
  24 #define ICC_IGRPEN1                     __ACCESS_CP15(c12, 0, c12, 7)
  25 #define ICC_BPR1                        __ACCESS_CP15(c12, 0, c12, 3)
  26 #define ICC_RPR                         __ACCESS_CP15(c12, 0, c11, 3)
  27 
  28 #define __ICC_AP0Rx(x)                  __ACCESS_CP15(c12, 0, c8, 4 | x)
  29 #define ICC_AP0R0                       __ICC_AP0Rx(0)
  30 #define ICC_AP0R1                       __ICC_AP0Rx(1)
  31 #define ICC_AP0R2                       __ICC_AP0Rx(2)
  32 #define ICC_AP0R3                       __ICC_AP0Rx(3)
  33 
  34 #define __ICC_AP1Rx(x)                  __ACCESS_CP15(c12, 0, c9, x)
  35 #define ICC_AP1R0                       __ICC_AP1Rx(0)
  36 #define ICC_AP1R1                       __ICC_AP1Rx(1)
  37 #define ICC_AP1R2                       __ICC_AP1Rx(2)
  38 #define ICC_AP1R3                       __ICC_AP1Rx(3)
  39 
  40 #define ICC_HSRE                        __ACCESS_CP15(c12, 4, c9, 5)
  41 
  42 #define ICH_VSEIR                       __ACCESS_CP15(c12, 4, c9, 4)
  43 #define ICH_HCR                         __ACCESS_CP15(c12, 4, c11, 0)
  44 #define ICH_VTR                         __ACCESS_CP15(c12, 4, c11, 1)
  45 #define ICH_MISR                        __ACCESS_CP15(c12, 4, c11, 2)
  46 #define ICH_EISR                        __ACCESS_CP15(c12, 4, c11, 3)
  47 #define ICH_ELRSR                       __ACCESS_CP15(c12, 4, c11, 5)
  48 #define ICH_VMCR                        __ACCESS_CP15(c12, 4, c11, 7)
  49 
  50 #define __LR0(x)                        __ACCESS_CP15(c12, 4, c12, x)
  51 #define __LR8(x)                        __ACCESS_CP15(c12, 4, c13, x)
  52 
  53 #define ICH_LR0                         __LR0(0)
  54 #define ICH_LR1                         __LR0(1)
  55 #define ICH_LR2                         __LR0(2)
  56 #define ICH_LR3                         __LR0(3)
  57 #define ICH_LR4                         __LR0(4)
  58 #define ICH_LR5                         __LR0(5)
  59 #define ICH_LR6                         __LR0(6)
  60 #define ICH_LR7                         __LR0(7)
  61 #define ICH_LR8                         __LR8(0)
  62 #define ICH_LR9                         __LR8(1)
  63 #define ICH_LR10                        __LR8(2)
  64 #define ICH_LR11                        __LR8(3)
  65 #define ICH_LR12                        __LR8(4)
  66 #define ICH_LR13                        __LR8(5)
  67 #define ICH_LR14                        __LR8(6)
  68 #define ICH_LR15                        __LR8(7)
  69 
  70 /* LR top half */
  71 #define __LRC0(x)                       __ACCESS_CP15(c12, 4, c14, x)
  72 #define __LRC8(x)                       __ACCESS_CP15(c12, 4, c15, x)
  73 
  74 #define ICH_LRC0                        __LRC0(0)
  75 #define ICH_LRC1                        __LRC0(1)
  76 #define ICH_LRC2                        __LRC0(2)
  77 #define ICH_LRC3                        __LRC0(3)
  78 #define ICH_LRC4                        __LRC0(4)
  79 #define ICH_LRC5                        __LRC0(5)
  80 #define ICH_LRC6                        __LRC0(6)
  81 #define ICH_LRC7                        __LRC0(7)
  82 #define ICH_LRC8                        __LRC8(0)
  83 #define ICH_LRC9                        __LRC8(1)
  84 #define ICH_LRC10                       __LRC8(2)
  85 #define ICH_LRC11                       __LRC8(3)
  86 #define ICH_LRC12                       __LRC8(4)
  87 #define ICH_LRC13                       __LRC8(5)
  88 #define ICH_LRC14                       __LRC8(6)
  89 #define ICH_LRC15                       __LRC8(7)
  90 
  91 #define __ICH_AP0Rx(x)                  __ACCESS_CP15(c12, 4, c8, x)
  92 #define ICH_AP0R0                       __ICH_AP0Rx(0)
  93 #define ICH_AP0R1                       __ICH_AP0Rx(1)
  94 #define ICH_AP0R2                       __ICH_AP0Rx(2)
  95 #define ICH_AP0R3                       __ICH_AP0Rx(3)
  96 
  97 #define __ICH_AP1Rx(x)                  __ACCESS_CP15(c12, 4, c9, x)
  98 #define ICH_AP1R0                       __ICH_AP1Rx(0)
  99 #define ICH_AP1R1                       __ICH_AP1Rx(1)
 100 #define ICH_AP1R2                       __ICH_AP1Rx(2)
 101 #define ICH_AP1R3                       __ICH_AP1Rx(3)
 102 
 103 /* A32-to-A64 mappings used by VGIC save/restore */
 104 
 105 #define CPUIF_MAP(a32, a64)                     \
 106 static inline void write_ ## a64(u32 val)       \
 107 {                                               \
 108         write_sysreg(val, a32);                 \
 109 }                                               \
 110 static inline u32 read_ ## a64(void)            \
 111 {                                               \
 112         return read_sysreg(a32);                \
 113 }                                               \
 114 
 115 #define CPUIF_MAP_LO_HI(a32lo, a32hi, a64)      \
 116 static inline void write_ ## a64(u64 val)       \
 117 {                                               \
 118         write_sysreg(lower_32_bits(val), a32lo);\
 119         write_sysreg(upper_32_bits(val), a32hi);\
 120 }                                               \
 121 static inline u64 read_ ## a64(void)            \
 122 {                                               \
 123         u64 val = read_sysreg(a32lo);           \
 124                                                 \
 125         val |=  (u64)read_sysreg(a32hi) << 32;  \
 126                                                 \
 127         return val;                             \
 128 }
 129 
 130 CPUIF_MAP(ICC_PMR, ICC_PMR_EL1)
 131 CPUIF_MAP(ICC_AP0R0, ICC_AP0R0_EL1)
 132 CPUIF_MAP(ICC_AP0R1, ICC_AP0R1_EL1)
 133 CPUIF_MAP(ICC_AP0R2, ICC_AP0R2_EL1)
 134 CPUIF_MAP(ICC_AP0R3, ICC_AP0R3_EL1)
 135 CPUIF_MAP(ICC_AP1R0, ICC_AP1R0_EL1)
 136 CPUIF_MAP(ICC_AP1R1, ICC_AP1R1_EL1)
 137 CPUIF_MAP(ICC_AP1R2, ICC_AP1R2_EL1)
 138 CPUIF_MAP(ICC_AP1R3, ICC_AP1R3_EL1)
 139 
 140 CPUIF_MAP(ICH_HCR, ICH_HCR_EL2)
 141 CPUIF_MAP(ICH_VTR, ICH_VTR_EL2)
 142 CPUIF_MAP(ICH_MISR, ICH_MISR_EL2)
 143 CPUIF_MAP(ICH_EISR, ICH_EISR_EL2)
 144 CPUIF_MAP(ICH_ELRSR, ICH_ELRSR_EL2)
 145 CPUIF_MAP(ICH_VMCR, ICH_VMCR_EL2)
 146 CPUIF_MAP(ICH_AP0R3, ICH_AP0R3_EL2)
 147 CPUIF_MAP(ICH_AP0R2, ICH_AP0R2_EL2)
 148 CPUIF_MAP(ICH_AP0R1, ICH_AP0R1_EL2)
 149 CPUIF_MAP(ICH_AP0R0, ICH_AP0R0_EL2)
 150 CPUIF_MAP(ICH_AP1R3, ICH_AP1R3_EL2)
 151 CPUIF_MAP(ICH_AP1R2, ICH_AP1R2_EL2)
 152 CPUIF_MAP(ICH_AP1R1, ICH_AP1R1_EL2)
 153 CPUIF_MAP(ICH_AP1R0, ICH_AP1R0_EL2)
 154 CPUIF_MAP(ICC_HSRE, ICC_SRE_EL2)
 155 CPUIF_MAP(ICC_SRE, ICC_SRE_EL1)
 156 
 157 CPUIF_MAP_LO_HI(ICH_LR15, ICH_LRC15, ICH_LR15_EL2)
 158 CPUIF_MAP_LO_HI(ICH_LR14, ICH_LRC14, ICH_LR14_EL2)
 159 CPUIF_MAP_LO_HI(ICH_LR13, ICH_LRC13, ICH_LR13_EL2)
 160 CPUIF_MAP_LO_HI(ICH_LR12, ICH_LRC12, ICH_LR12_EL2)
 161 CPUIF_MAP_LO_HI(ICH_LR11, ICH_LRC11, ICH_LR11_EL2)
 162 CPUIF_MAP_LO_HI(ICH_LR10, ICH_LRC10, ICH_LR10_EL2)
 163 CPUIF_MAP_LO_HI(ICH_LR9, ICH_LRC9, ICH_LR9_EL2)
 164 CPUIF_MAP_LO_HI(ICH_LR8, ICH_LRC8, ICH_LR8_EL2)
 165 CPUIF_MAP_LO_HI(ICH_LR7, ICH_LRC7, ICH_LR7_EL2)
 166 CPUIF_MAP_LO_HI(ICH_LR6, ICH_LRC6, ICH_LR6_EL2)
 167 CPUIF_MAP_LO_HI(ICH_LR5, ICH_LRC5, ICH_LR5_EL2)
 168 CPUIF_MAP_LO_HI(ICH_LR4, ICH_LRC4, ICH_LR4_EL2)
 169 CPUIF_MAP_LO_HI(ICH_LR3, ICH_LRC3, ICH_LR3_EL2)
 170 CPUIF_MAP_LO_HI(ICH_LR2, ICH_LRC2, ICH_LR2_EL2)
 171 CPUIF_MAP_LO_HI(ICH_LR1, ICH_LRC1, ICH_LR1_EL2)
 172 CPUIF_MAP_LO_HI(ICH_LR0, ICH_LRC0, ICH_LR0_EL2)
 173 
 174 #define read_gicreg(r)                 read_##r()
 175 #define write_gicreg(v, r)             write_##r(v)
 176 
 177 /* Low-level accessors */
 178 
 179 static inline void gic_write_eoir(u32 irq)
 180 {
 181         write_sysreg(irq, ICC_EOIR1);
 182         isb();
 183 }
 184 
 185 static inline void gic_write_dir(u32 val)
 186 {
 187         write_sysreg(val, ICC_DIR);
 188         isb();
 189 }
 190 
 191 static inline u32 gic_read_iar(void)
 192 {
 193         u32 irqstat = read_sysreg(ICC_IAR1);
 194 
 195         dsb(sy);
 196 
 197         return irqstat;
 198 }
 199 
 200 static inline void gic_write_ctlr(u32 val)
 201 {
 202         write_sysreg(val, ICC_CTLR);
 203         isb();
 204 }
 205 
 206 static inline u32 gic_read_ctlr(void)
 207 {
 208         return read_sysreg(ICC_CTLR);
 209 }
 210 
 211 static inline void gic_write_grpen1(u32 val)
 212 {
 213         write_sysreg(val, ICC_IGRPEN1);
 214         isb();
 215 }
 216 
 217 static inline void gic_write_sgi1r(u64 val)
 218 {
 219         write_sysreg(val, ICC_SGI1R);
 220 }
 221 
 222 static inline u32 gic_read_sre(void)
 223 {
 224         return read_sysreg(ICC_SRE);
 225 }
 226 
 227 static inline void gic_write_sre(u32 val)
 228 {
 229         write_sysreg(val, ICC_SRE);
 230         isb();
 231 }
 232 
 233 static inline void gic_write_bpr1(u32 val)
 234 {
 235         write_sysreg(val, ICC_BPR1);
 236 }
 237 
 238 static inline u32 gic_read_pmr(void)
 239 {
 240         return read_sysreg(ICC_PMR);
 241 }
 242 
 243 static inline void gic_write_pmr(u32 val)
 244 {
 245         write_sysreg(val, ICC_PMR);
 246 }
 247 
 248 static inline u32 gic_read_rpr(void)
 249 {
 250         return read_sysreg(ICC_RPR);
 251 }
 252 
 253 /*
 254  * Even in 32bit systems that use LPAE, there is no guarantee that the I/O
 255  * interface provides true 64bit atomic accesses, so using strd/ldrd doesn't
 256  * make much sense.
 257  * Moreover, 64bit I/O emulation is extremely difficult to implement on
 258  * AArch32, since the syndrome register doesn't provide any information for
 259  * them.
 260  * Consequently, the following IO helpers use 32bit accesses.
 261  */
 262 static inline void __gic_writeq_nonatomic(u64 val, volatile void __iomem *addr)
 263 {
 264         writel_relaxed((u32)val, addr);
 265         writel_relaxed((u32)(val >> 32), addr + 4);
 266 }
 267 
 268 static inline u64 __gic_readq_nonatomic(const volatile void __iomem *addr)
 269 {
 270         u64 val;
 271 
 272         val = readl_relaxed(addr);
 273         val |= (u64)readl_relaxed(addr + 4) << 32;
 274         return val;
 275 }
 276 
 277 #define gic_flush_dcache_to_poc(a,l)    __cpuc_flush_dcache_area((a), (l))
 278 
 279 /*
 280  *  GICD_IROUTERn, contain the affinity values associated to each interrupt.
 281  *  The upper-word (aff3) will always be 0, so there is no need for a lock.
 282  */
 283 #define gic_write_irouter(v, c)         __gic_writeq_nonatomic(v, c)
 284 
 285 /*
 286  * GICR_TYPER is an ID register and doesn't need atomicity.
 287  */
 288 #define gic_read_typer(c)               __gic_readq_nonatomic(c)
 289 
 290 /*
 291  * GITS_BASER - hi and lo bits may be accessed independently.
 292  */
 293 #define gits_read_baser(c)              __gic_readq_nonatomic(c)
 294 #define gits_write_baser(v, c)          __gic_writeq_nonatomic(v, c)
 295 
 296 /*
 297  * GICR_PENDBASER and GICR_PROPBASE are changed with LPIs disabled, so they
 298  * won't be being used during any updates and can be changed non-atomically
 299  */
 300 #define gicr_read_propbaser(c)          __gic_readq_nonatomic(c)
 301 #define gicr_write_propbaser(v, c)      __gic_writeq_nonatomic(v, c)
 302 #define gicr_read_pendbaser(c)          __gic_readq_nonatomic(c)
 303 #define gicr_write_pendbaser(v, c)      __gic_writeq_nonatomic(v, c)
 304 
 305 /*
 306  * GICR_xLPIR - only the lower bits are significant
 307  */
 308 #define gic_read_lpir(c)                readl_relaxed(c)
 309 #define gic_write_lpir(v, c)            writel_relaxed(lower_32_bits(v), c)
 310 
 311 /*
 312  * GITS_TYPER is an ID register and doesn't need atomicity.
 313  */
 314 #define gits_read_typer(c)              __gic_readq_nonatomic(c)
 315 
 316 /*
 317  * GITS_CBASER - hi and lo bits may be accessed independently.
 318  */
 319 #define gits_read_cbaser(c)             __gic_readq_nonatomic(c)
 320 #define gits_write_cbaser(v, c)         __gic_writeq_nonatomic(v, c)
 321 
 322 /*
 323  * GITS_CWRITER - hi and lo bits may be accessed independently.
 324  */
 325 #define gits_write_cwriter(v, c)        __gic_writeq_nonatomic(v, c)
 326 
 327 /*
 328  * GITS_VPROPBASER - hi and lo bits may be accessed independently.
 329  */
 330 #define gits_write_vpropbaser(v, c)     __gic_writeq_nonatomic(v, c)
 331 
 332 /*
 333  * GITS_VPENDBASER - the Valid bit must be cleared before changing
 334  * anything else.
 335  */
 336 static inline void gits_write_vpendbaser(u64 val, void * __iomem addr)
 337 {
 338         u32 tmp;
 339 
 340         tmp = readl_relaxed(addr + 4);
 341         if (tmp & (GICR_VPENDBASER_Valid >> 32)) {
 342                 tmp &= ~(GICR_VPENDBASER_Valid >> 32);
 343                 writel_relaxed(tmp, addr + 4);
 344         }
 345 
 346         /*
 347          * Use the fact that __gic_writeq_nonatomic writes the second
 348          * half of the 64bit quantity after the first.
 349          */
 350         __gic_writeq_nonatomic(val, addr);
 351 }
 352 
 353 #define gits_read_vpendbaser(c)         __gic_readq_nonatomic(c)
 354 
 355 static inline bool gic_prio_masking_enabled(void)
 356 {
 357         return false;
 358 }
 359 
 360 static inline void gic_pmr_mask_irqs(void)
 361 {
 362         /* Should not get called. */
 363         WARN_ON_ONCE(true);
 364 }
 365 
 366 static inline void gic_arch_enable_irqs(void)
 367 {
 368         /* Should not get called. */
 369         WARN_ON_ONCE(true);
 370 }
 371 
 372 #endif /* !__ASSEMBLY__ */
 373 #endif /* !__ASM_ARCH_GICV3_H */

/* [<][>][^][v][top][bottom][index][help] */