Searched refs:PMD_SECT_TEX (Results 1 – 3 of 3) sorted by relevance
36 #define PMD_SECT_TEX(x) (_AT(pmdval_t, (x)) << 12) /* v5 */ macro47 #define PMD_SECT_MINICACHE (PMD_SECT_TEX(1) | PMD_SECT_CACHEABLE)48 #define PMD_SECT_WBWA (PMD_SECT_TEX(1) | PMD_SECT_CACHEABLE | PMD_SECT_BUFFERABLE)49 #define PMD_SECT_NONSHARED_DEV (PMD_SECT_TEX(2))
55 #define PMD_SECT_TEX(x) (_AT(pmdval_t, 0)) macro
139 pmd &= PMD_SECT_TEX(1) | PMD_SECT_BUFFERABLE | PMD_SECT_CACHEABLE; in init_default_cache_policy()428 mem_types[i].prot_sect &= ~PMD_SECT_TEX(7); in build_mem_type_table()477 mem_types[MT_DEVICE].prot_sect |= PMD_SECT_TEX(1); in build_mem_type_table()478 mem_types[MT_DEVICE_NONSHARED].prot_sect |= PMD_SECT_TEX(1); in build_mem_type_table()488 mem_types[MT_DEVICE].prot_sect |= PMD_SECT_TEX(1) | PMD_SECT_BUFFERED; in build_mem_type_table()489 mem_types[MT_DEVICE_NONSHARED].prot_sect |= PMD_SECT_TEX(2); in build_mem_type_table()490 mem_types[MT_DEVICE_WC].prot_sect |= PMD_SECT_TEX(1); in build_mem_type_table()500 mem_types[MT_DEVICE_NONSHARED].prot_sect |= PMD_SECT_TEX(2); in build_mem_type_table()501 mem_types[MT_DEVICE_WC].prot_sect |= PMD_SECT_TEX(1); in build_mem_type_table()588 PMD_SECT_TEX(1); in build_mem_type_table()