summaryrefslogtreecommitdiff
path: root/src/core/arm/mmu/cache.h
diff options
context:
space:
mode:
Diffstat (limited to 'src/core/arm/mmu/cache.h')
-rw-r--r--src/core/arm/mmu/cache.h168
1 files changed, 168 insertions, 0 deletions
diff --git a/src/core/arm/mmu/cache.h b/src/core/arm/mmu/cache.h
new file mode 100644
index 000000000..d308d9b87
--- /dev/null
+++ b/src/core/arm/mmu/cache.h
@@ -0,0 +1,168 @@
1#ifndef _MMU_CACHE_H_
2#define _MMU_CACHE_H_
3
4typedef struct cache_line_t
5{
6 ARMword tag; /* cache line align address |
7 bit2: last half dirty
8 bit1: first half dirty
9 bit0: cache valid flag
10 */
11 ARMword pa; /*physical address */
12 ARMword *data; /*array of cached data */
13} cache_line_t;
14#define TAG_VALID_FLAG 0x00000001
15#define TAG_FIRST_HALF_DIRTY 0x00000002
16#define TAG_LAST_HALF_DIRTY 0x00000004
17
18/*cache set association*/
19typedef struct cache_set_s
20{
21 cache_line_t *lines;
22 int cycle;
23} cache_set_t;
24
25enum
26{
27 CACHE_WRITE_BACK,
28 CACHE_WRITE_THROUGH,
29};
30
31typedef struct cache_s
32{
33 int width; /*bytes in a line */
34 int way; /*way of set asscociate */
35 int set; /*num of set */
36 int w_mode; /*write back or write through */
37 //int a_mode; /*alloc mode: random or round-bin*/
38 cache_set_t *sets;
39 /**/} cache_s;
40
41typedef struct cache_desc_s
42{
43 int width;
44 int way;
45 int set;
46 int w_mode;
47// int a_mode;
48} cache_desc_t;
49
50
51/*virtual address to cache set index*/
52#define va_cache_set(va, cache_t) \
53 (((va) / (cache_t)->width) & ((cache_t)->set - 1))
54/*virtual address to cahce line aligned*/
55#define va_cache_align(va, cache_t) \
56 ((va) & ~((cache_t)->width - 1))
57/*virtaul address to cache line word index*/
58#define va_cache_index(va, cache_t) \
59 (((va) & ((cache_t)->width - 1)) >> WORD_SHT)
60
61/*see Page 558 in arm manual*/
62/*set/index format value to cache set value*/
63#define index_cache_set(index, cache_t) \
64 (((index) / (cache_t)->width) & ((cache_t)->set - 1))
65
66/*************************cache********************/
67/* mmu cache init
68 *
69 * @cache_t :cache_t to init
70 * @width :cache line width in byte
71 * @way :way of each cache set
72 * @set :cache set num
73 * @w_mode :cache w_mode
74 *
75 * $ -1: error
76 * 0: sucess
77 */
78int
79mmu_cache_init (cache_s * cache_t, int width, int way, int set, int w_mode);
80
81/* free a cache_t's inner data, the ptr self is not freed,
82 * when needed do like below:
83 * mmu_cache_exit(cache);
84 * free(cache_t);
85 *
86 * @cache_t : the cache_t to free
87 */
88void mmu_cache_exit (cache_s * cache_t);
89
90/* mmu cache search
91 *
92 * @state :ARMul_State
93 * @cache_t :cache_t to search
94 * @va :virtual address
95 *
96 * $ NULL: no cache match
97 * cache :cache matched
98 * */
99cache_line_t *mmu_cache_search (ARMul_State * state, cache_s * cache_t,
100 ARMword va);
101
102/* mmu cache search by set/index
103 *
104 * @state :ARMul_State
105 * @cache_t :cache_t to search
106 * @index :set/index value.
107 *
108 * $ NULL: no cache match
109 * cache :cache matched
110 * */
111
112cache_line_t *mmu_cache_search_by_index (ARMul_State * state,
113 cache_s * cache_t, ARMword index);
114
115/* mmu cache alloc
116 *
117 * @state :ARMul_State
118 * @cache_t :cache_t to alloc from
119 * @va :virtual address that require cache alloc, need not cache aligned
120 * @pa :physical address of va
121 *
122 * $ cache_alloced, always alloc OK
123 */
124cache_line_t *mmu_cache_alloc (ARMul_State * state, cache_s * cache_t,
125 ARMword va, ARMword pa);
126
127/* mmu_cache_write_back write cache data to memory
128 *
129 * @state:
130 * @cache_t :cache_t of the cache line
131 * @cache : cache line
132 */
133void
134mmu_cache_write_back (ARMul_State * state, cache_s * cache_t,
135 cache_line_t * cache);
136
137/* mmu_cache_clean: clean a cache of va in cache_t
138 *
139 * @state :ARMul_State
140 * @cache_t :cache_t to clean
141 * @va :virtaul address
142 */
143void mmu_cache_clean (ARMul_State * state, cache_s * cache_t, ARMword va);
144void
145mmu_cache_clean_by_index (ARMul_State * state, cache_s * cache_t,
146 ARMword index);
147
148/* mmu_cache_invalidate : invalidate a cache of va
149 *
150 * @state :ARMul_State
151 * @cache_t :cache_t to invalid
152 * @va :virt_addr to invalid
153 */
154void
155mmu_cache_invalidate (ARMul_State * state, cache_s * cache_t, ARMword va);
156
157void
158mmu_cache_invalidate_by_index (ARMul_State * state, cache_s * cache_t,
159 ARMword index);
160
161void mmu_cache_invalidate_all (ARMul_State * state, cache_s * cache_t);
162
163void
164mmu_cache_soft_flush (ARMul_State * state, cache_s * cache_t, ARMword pa);
165
166cache_line_t* mmu_cache_dirty_cache(ARMul_State * state, cache_s * cache_t);
167
168#endif /*_MMU_CACHE_H_*/