3 #ifndef __ARCH_ARM64_MMU_H__
4 #define __ARCH_ARM64_MMU_H__
13 #define MA_MEM (1 << 0)
14 #define MA_DEV (0 << 0)
17 #define MA_NS (1 << 1)
21 #define MA_RO (1 << 2)
22 #define MA_RW (0 << 2)
25 #define MA_MEM_NC (1 << 3)
29 #define INVALID_DESC 0x0
30 #define BLOCK_DESC 0x1
31 #define TABLE_DESC 0x3
36 #define BLOCK_NS (1 << 5)
38 #define BLOCK_AP_RW (0 << 7)
39 #define BLOCK_AP_RO (1 << 7)
41 #define BLOCK_ACCESS (1 << 10)
43 #define BLOCK_XN (1UL << 54)
45 #define BLOCK_SH_SHIFT (8)
46 #define BLOCK_SH_NON_SHAREABLE (0 << BLOCK_SH_SHIFT)
47 #define BLOCK_SH_UNPREDICTABLE (1 << BLOCK_SH_SHIFT)
48 #define BLOCK_SH_OUTER_SHAREABLE (2 << BLOCK_SH_SHIFT)
49 #define BLOCK_SH_INNER_SHAREABLE (3 << BLOCK_SH_SHIFT)
54 #define UNUSED_DESC 0x6EbAAD0BBADbA6E0
59 #define BITS_PER_VA 48
61 #define GRANULE_SIZE_SHIFT 12
62 #define GRANULE_SIZE (1 << GRANULE_SIZE_SHIFT)
63 #define XLAT_ADDR_MASK ((1UL << BITS_PER_VA) - GRANULE_SIZE)
64 #define GRANULE_SIZE_MASK ((1 << GRANULE_SIZE_SHIFT) - 1)
66 #define BITS_RESOLVED_PER_LVL (GRANULE_SIZE_SHIFT - 3)
67 #define L0_ADDR_SHIFT (GRANULE_SIZE_SHIFT + BITS_RESOLVED_PER_LVL * 3)
68 #define L1_ADDR_SHIFT (GRANULE_SIZE_SHIFT + BITS_RESOLVED_PER_LVL * 2)
69 #define L2_ADDR_SHIFT (GRANULE_SIZE_SHIFT + BITS_RESOLVED_PER_LVL * 1)
70 #define L3_ADDR_SHIFT (GRANULE_SIZE_SHIFT + BITS_RESOLVED_PER_LVL * 0)
72 #define L0_ADDR_MASK (((1UL << BITS_RESOLVED_PER_LVL) - 1) << L0_ADDR_SHIFT)
73 #define L1_ADDR_MASK (((1UL << BITS_RESOLVED_PER_LVL) - 1) << L1_ADDR_SHIFT)
74 #define L2_ADDR_MASK (((1UL << BITS_RESOLVED_PER_LVL) - 1) << L2_ADDR_SHIFT)
75 #define L3_ADDR_MASK (((1UL << BITS_RESOLVED_PER_LVL) - 1) << L3_ADDR_SHIFT)
79 #define L3_XLAT_SIZE (1UL << L3_ADDR_SHIFT)
80 #define L2_XLAT_SIZE (1UL << L2_ADDR_SHIFT)
81 #define L1_XLAT_SIZE (1UL << L1_ADDR_SHIFT)
82 #define L0_XLAT_SIZE (1UL << L0_ADDR_SHIFT)
85 #define BLOCK_INDEX_MEM_DEV_NGNRNE 0
86 #define BLOCK_INDEX_MEM_DEV_NGNRE 1
87 #define BLOCK_INDEX_MEM_DEV_GRE 2
88 #define BLOCK_INDEX_MEM_NORMAL_NC 3
89 #define BLOCK_INDEX_MEM_NORMAL 4
91 #define BLOCK_INDEX_MASK 0x7
92 #define BLOCK_INDEX_SHIFT 2
95 #define MAIR_ATTRIBUTES ((0x00 << (BLOCK_INDEX_MEM_DEV_NGNRNE*8)) | \
96 (0x04 << (BLOCK_INDEX_MEM_DEV_NGNRE*8)) | \
97 (0x0c << (BLOCK_INDEX_MEM_DEV_GRE*8)) | \
98 (0x44 << (BLOCK_INDEX_MEM_NORMAL_NC*8)) | \
99 (0xffUL << (BLOCK_INDEX_MEM_NORMAL*8)))
102 #define TCR_TOSZ (64 - BITS_PER_VA)
104 #define TCR_IRGN0_SHIFT 8
105 #define TCR_IRGN0_NM_NC (0x00 << TCR_IRGN0_SHIFT)
106 #define TCR_IRGN0_NM_WBWAC (0x01 << TCR_IRGN0_SHIFT)
107 #define TCR_IRGN0_NM_WTC (0x02 << TCR_IRGN0_SHIFT)
108 #define TCR_IRGN0_NM_WBNWAC (0x03 << TCR_IRGN0_SHIFT)
110 #define TCR_ORGN0_SHIFT 10
111 #define TCR_ORGN0_NM_NC (0x00 << TCR_ORGN0_SHIFT)
112 #define TCR_ORGN0_NM_WBWAC (0x01 << TCR_ORGN0_SHIFT)
113 #define TCR_ORGN0_NM_WTC (0x02 << TCR_ORGN0_SHIFT)
114 #define TCR_ORGN0_NM_WBNWAC (0x03 << TCR_ORGN0_SHIFT)
116 #define TCR_SH0_SHIFT 12
117 #define TCR_SH0_NC (0x0 << TCR_SH0_SHIFT)
118 #define TCR_SH0_OS (0x2 << TCR_SH0_SHIFT)
119 #define TCR_SH0_IS (0x3 << TCR_SH0_SHIFT)
121 #define TCR_TG0_SHIFT 14
122 #define TCR_TG0_4KB (0x0 << TCR_TG0_SHIFT)
123 #define TCR_TG0_64KB (0x1 << TCR_TG0_SHIFT)
124 #define TCR_TG0_16KB (0x2 << TCR_TG0_SHIFT)
126 #define TCR_PS_SHIFT 16
127 #define TCR_PS_4GB (0x0 << TCR_PS_SHIFT)
128 #define TCR_PS_64GB (0x1 << TCR_PS_SHIFT)
129 #define TCR_PS_1TB (0x2 << TCR_PS_SHIFT)
130 #define TCR_PS_4TB (0x3 << TCR_PS_SHIFT)
131 #define TCR_PS_16TB (0x4 << TCR_PS_SHIFT)
132 #define TCR_PS_256TB (0x5 << TCR_PS_SHIFT)
134 #define TCR_TBI_SHIFT 20
135 #define TCR_TBI_USED (0x0 << TCR_TBI_SHIFT)
136 #define TCR_TBI_IGNORED (0x1 << TCR_TBI_SHIFT)
void mmu_config_range(void *start, size_t size, uint64_t tag)
void mmu_restore_context(const struct mmu_context *mmu_context)
void mmu_save_context(struct mmu_context *mmu_context)
unsigned long long uint64_t