mem_types 245 arch/arm/mm/mmu.c static struct mem_type mem_types[] __ro_after_init = { mem_types 356 arch/arm/mm/mmu.c return type < ARRAY_SIZE(mem_types) ? &mem_types[type] : NULL; mem_types 473 arch/arm/mm/mmu.c for (i = 0; i < ARRAY_SIZE(mem_types); i++) mem_types 474 arch/arm/mm/mmu.c mem_types[i].prot_sect &= ~PMD_SECT_TEX(7); mem_types 476 arch/arm/mm/mmu.c for (i = 0; i < ARRAY_SIZE(mem_types); i++) mem_types 477 arch/arm/mm/mmu.c mem_types[i].prot_sect &= ~PMD_SECT_S; mem_types 485 arch/arm/mm/mmu.c for (i = 0; i < ARRAY_SIZE(mem_types); i++) { mem_types 486 arch/arm/mm/mmu.c mem_types[i].prot_sect &= ~PMD_BIT4; mem_types 487 arch/arm/mm/mmu.c mem_types[i].prot_l1 &= ~PMD_BIT4; mem_types 490 arch/arm/mm/mmu.c for (i = 0; i < ARRAY_SIZE(mem_types); i++) { mem_types 491 arch/arm/mm/mmu.c if (mem_types[i].prot_l1) mem_types 492 arch/arm/mm/mmu.c mem_types[i].prot_l1 |= PMD_BIT4; mem_types 493 arch/arm/mm/mmu.c if (mem_types[i].prot_sect) mem_types 494 arch/arm/mm/mmu.c mem_types[i].prot_sect |= PMD_BIT4; mem_types 507 arch/arm/mm/mmu.c mem_types[MT_DEVICE].prot_sect |= PMD_SECT_XN; mem_types 508 arch/arm/mm/mmu.c mem_types[MT_DEVICE_NONSHARED].prot_sect |= PMD_SECT_XN; mem_types 509 arch/arm/mm/mmu.c mem_types[MT_DEVICE_CACHED].prot_sect |= PMD_SECT_XN; mem_types 510 arch/arm/mm/mmu.c mem_types[MT_DEVICE_WC].prot_sect |= PMD_SECT_XN; mem_types 513 arch/arm/mm/mmu.c mem_types[MT_MEMORY_RW].prot_sect |= PMD_SECT_XN; mem_types 523 arch/arm/mm/mmu.c mem_types[MT_DEVICE].prot_sect |= PMD_SECT_TEX(1); mem_types 524 arch/arm/mm/mmu.c mem_types[MT_DEVICE_NONSHARED].prot_sect |= PMD_SECT_TEX(1); mem_types 525 arch/arm/mm/mmu.c mem_types[MT_DEVICE_WC].prot_sect |= PMD_SECT_BUFFERABLE; mem_types 534 arch/arm/mm/mmu.c mem_types[MT_DEVICE].prot_sect |= PMD_SECT_TEX(1) | PMD_SECT_BUFFERED; mem_types 535 arch/arm/mm/mmu.c mem_types[MT_DEVICE_NONSHARED].prot_sect |= PMD_SECT_TEX(2); mem_types 536 arch/arm/mm/mmu.c mem_types[MT_DEVICE_WC].prot_sect |= PMD_SECT_TEX(1); mem_types 545 arch/arm/mm/mmu.c mem_types[MT_DEVICE].prot_sect |= PMD_SECT_BUFFERED; mem_types 546 arch/arm/mm/mmu.c mem_types[MT_DEVICE_NONSHARED].prot_sect |= PMD_SECT_TEX(2); mem_types 547 arch/arm/mm/mmu.c mem_types[MT_DEVICE_WC].prot_sect |= PMD_SECT_TEX(1); mem_types 553 arch/arm/mm/mmu.c mem_types[MT_DEVICE_WC].prot_sect |= PMD_SECT_BUFFERABLE; mem_types 562 arch/arm/mm/mmu.c hyp_device_pgprot = mem_types[MT_DEVICE].prot_pte; mem_types 563 arch/arm/mm/mmu.c s2_device_pgprot = mem_types[MT_DEVICE].prot_pte_s2; mem_types 593 arch/arm/mm/mmu.c mem_types[MT_ROM].prot_sect |= PMD_SECT_APX|PMD_SECT_AP_WRITE; mem_types 594 arch/arm/mm/mmu.c mem_types[MT_MINICLEAN].prot_sect |= PMD_SECT_APX|PMD_SECT_AP_WRITE; mem_types 595 arch/arm/mm/mmu.c mem_types[MT_CACHECLEAN].prot_sect |= PMD_SECT_APX|PMD_SECT_AP_WRITE; mem_types 608 arch/arm/mm/mmu.c mem_types[MT_DEVICE_WC].prot_sect |= PMD_SECT_S; mem_types 609 arch/arm/mm/mmu.c mem_types[MT_DEVICE_WC].prot_pte |= L_PTE_SHARED; mem_types 610 arch/arm/mm/mmu.c mem_types[MT_DEVICE_CACHED].prot_sect |= PMD_SECT_S; mem_types 611 arch/arm/mm/mmu.c mem_types[MT_DEVICE_CACHED].prot_pte |= L_PTE_SHARED; mem_types 612 arch/arm/mm/mmu.c mem_types[MT_MEMORY_RWX].prot_sect |= PMD_SECT_S; mem_types 613 arch/arm/mm/mmu.c mem_types[MT_MEMORY_RWX].prot_pte |= L_PTE_SHARED; mem_types 614 arch/arm/mm/mmu.c mem_types[MT_MEMORY_RW].prot_sect |= PMD_SECT_S; mem_types 615 arch/arm/mm/mmu.c mem_types[MT_MEMORY_RW].prot_pte |= L_PTE_SHARED; mem_types 616 arch/arm/mm/mmu.c mem_types[MT_MEMORY_DMA_READY].prot_pte |= L_PTE_SHARED; mem_types 617 arch/arm/mm/mmu.c mem_types[MT_MEMORY_RWX_NONCACHED].prot_sect |= PMD_SECT_S; mem_types 618 arch/arm/mm/mmu.c mem_types[MT_MEMORY_RWX_NONCACHED].prot_pte |= L_PTE_SHARED; mem_types 629 arch/arm/mm/mmu.c mem_types[MT_MEMORY_RWX_NONCACHED].prot_sect |= mem_types 633 arch/arm/mm/mmu.c mem_types[MT_MEMORY_RWX_NONCACHED].prot_sect |= mem_types 637 arch/arm/mm/mmu.c mem_types[MT_MEMORY_RWX_NONCACHED].prot_sect |= PMD_SECT_BUFFERABLE; mem_types 644 arch/arm/mm/mmu.c for (i = 0; i < ARRAY_SIZE(mem_types); i++) { mem_types 645 arch/arm/mm/mmu.c mem_types[i].prot_pte |= PTE_EXT_AF; mem_types 646 arch/arm/mm/mmu.c if (mem_types[i].prot_sect) mem_types 647 arch/arm/mm/mmu.c mem_types[i].prot_sect |= PMD_SECT_AF; mem_types 663 arch/arm/mm/mmu.c mem_types[MT_LOW_VECTORS].prot_pte |= vecs_pgprot; mem_types 664 arch/arm/mm/mmu.c mem_types[MT_HIGH_VECTORS].prot_pte |= vecs_pgprot; mem_types 673 arch/arm/mm/mmu.c mem_types[MT_LOW_VECTORS].prot_l1 |= ecc_mask; mem_types 674 arch/arm/mm/mmu.c mem_types[MT_HIGH_VECTORS].prot_l1 |= ecc_mask; mem_types 675 arch/arm/mm/mmu.c mem_types[MT_MEMORY_RWX].prot_sect |= ecc_mask | cp->pmd; mem_types 676 arch/arm/mm/mmu.c mem_types[MT_MEMORY_RWX].prot_pte |= kern_pgprot; mem_types 677 arch/arm/mm/mmu.c mem_types[MT_MEMORY_RW].prot_sect |= ecc_mask | cp->pmd; mem_types 678 arch/arm/mm/mmu.c mem_types[MT_MEMORY_RW].prot_pte |= kern_pgprot; mem_types 679 arch/arm/mm/mmu.c mem_types[MT_MEMORY_DMA_READY].prot_pte |= kern_pgprot; mem_types 680 arch/arm/mm/mmu.c mem_types[MT_MEMORY_RWX_NONCACHED].prot_sect |= ecc_mask; mem_types 681 arch/arm/mm/mmu.c mem_types[MT_ROM].prot_sect |= cp->pmd; mem_types 685 arch/arm/mm/mmu.c mem_types[MT_CACHECLEAN].prot_sect |= PMD_SECT_WT; mem_types 689 arch/arm/mm/mmu.c mem_types[MT_CACHECLEAN].prot_sect |= PMD_SECT_WB; mem_types 695 arch/arm/mm/mmu.c for (i = 0; i < ARRAY_SIZE(mem_types); i++) { mem_types 696 arch/arm/mm/mmu.c struct mem_type *t = &mem_types[i]; mem_types 916 arch/arm/mm/mmu.c type = &mem_types[md->type]; mem_types 87 arch/unicore32/mm/mmu.c static struct mem_type mem_types[] = { mem_types 124 arch/unicore32/mm/mmu.c return type < ARRAY_SIZE(mem_types) ? &mem_types[type] : NULL; mem_types 223 arch/unicore32/mm/mmu.c type = &mem_types[md->type];