aboutsummaryrefslogtreecommitdiffhomepage
path: root/src/core/src/arm/mmu/cache.h
diff options
context:
space:
mode:
Diffstat (limited to 'src/core/src/arm/mmu/cache.h')
-rw-r--r--src/core/src/arm/mmu/cache.h116
1 files changed, 58 insertions, 58 deletions
diff --git a/src/core/src/arm/mmu/cache.h b/src/core/src/arm/mmu/cache.h
index b26d9269..d308d9b8 100644
--- a/src/core/src/arm/mmu/cache.h
+++ b/src/core/src/arm/mmu/cache.h
@@ -3,85 +3,85 @@
typedef struct cache_line_t
{
- ARMword tag; /* cache line align address |
- bit2: last half dirty
- bit1: first half dirty
- bit0: cache valid flag
- */
- ARMword pa; /*physical address */
- ARMword *data; /*array of cached data */
+ ARMword tag; /* cache line align address |
+ bit2: last half dirty
+ bit1: first half dirty
+ bit0: cache valid flag
+ */
+ ARMword pa; /*physical address */
+ ARMword *data; /*array of cached data */
} cache_line_t;
#define TAG_VALID_FLAG 0x00000001
#define TAG_FIRST_HALF_DIRTY 0x00000002
-#define TAG_LAST_HALF_DIRTY 0x00000004
+#define TAG_LAST_HALF_DIRTY 0x00000004
/*cache set association*/
typedef struct cache_set_s
{
- cache_line_t *lines;
- int cycle;
+ cache_line_t *lines;
+ int cycle;
} cache_set_t;
enum
{
- CACHE_WRITE_BACK,
- CACHE_WRITE_THROUGH,
+ CACHE_WRITE_BACK,
+ CACHE_WRITE_THROUGH,
};
typedef struct cache_s
{
- int width; /*bytes in a line */
- int way; /*way of set asscociate */
- int set; /*num of set */
- int w_mode; /*write back or write through */
- //int a_mode; /*alloc mode: random or round-bin*/
- cache_set_t *sets;
+ int width; /*bytes in a line */
+ int way; /*way of set asscociate */
+ int set; /*num of set */
+ int w_mode; /*write back or write through */
+ //int a_mode; /*alloc mode: random or round-bin*/
+ cache_set_t *sets;
/**/} cache_s;
typedef struct cache_desc_s
{
- int width;
- int way;
- int set;
- int w_mode;
+ int width;
+ int way;
+ int set;
+ int w_mode;
// int a_mode;
} cache_desc_t;
/*virtual address to cache set index*/
#define va_cache_set(va, cache_t) \
- (((va) / (cache_t)->width) & ((cache_t)->set - 1))
+ (((va) / (cache_t)->width) & ((cache_t)->set - 1))
/*virtual address to cahce line aligned*/
#define va_cache_align(va, cache_t) \
- ((va) & ~((cache_t)->width - 1))
+ ((va) & ~((cache_t)->width - 1))
/*virtaul address to cache line word index*/
#define va_cache_index(va, cache_t) \
- (((va) & ((cache_t)->width - 1)) >> WORD_SHT)
+ (((va) & ((cache_t)->width - 1)) >> WORD_SHT)
/*see Page 558 in arm manual*/
/*set/index format value to cache set value*/
#define index_cache_set(index, cache_t) \
- (((index) / (cache_t)->width) & ((cache_t)->set - 1))
+ (((index) / (cache_t)->width) & ((cache_t)->set - 1))
/*************************cache********************/
/* mmu cache init
*
* @cache_t :cache_t to init
- * @width :cache line width in byte
- * @way :way of each cache set
- * @set :cache set num
- * @w_mode :cache w_mode
+ * @width :cache line width in byte
+ * @way :way of each cache set
+ * @set :cache set num
+ * @w_mode :cache w_mode
*
* $ -1: error
- * 0: sucess
+ * 0: sucess
*/
int
mmu_cache_init (cache_s * cache_t, int width, int way, int set, int w_mode);
/* free a cache_t's inner data, the ptr self is not freed,
* when needed do like below:
- * mmu_cache_exit(cache);
- * free(cache_t);
+ * mmu_cache_exit(cache);
+ * free(cache_t);
*
* @cache_t : the cache_t to free
*/
@@ -89,40 +89,40 @@ void mmu_cache_exit (cache_s * cache_t);
/* mmu cache search
*
- * @state :ARMul_State
- * @cache_t :cache_t to search
- * @va :virtual address
+ * @state :ARMul_State
+ * @cache_t :cache_t to search
+ * @va :virtual address
*
- * $ NULL: no cache match
- * cache :cache matched
+ * $ NULL: no cache match
+ * cache :cache matched
* */
cache_line_t *mmu_cache_search (ARMul_State * state, cache_s * cache_t,
- ARMword va);
+ ARMword va);
/* mmu cache search by set/index
*
- * @state :ARMul_State
- * @cache_t :cache_t to search
+ * @state :ARMul_State
+ * @cache_t :cache_t to search
* @index :set/index value.
*
- * $ NULL: no cache match
- * cache :cache matched
+ * $ NULL: no cache match
+ * cache :cache matched
* */
cache_line_t *mmu_cache_search_by_index (ARMul_State * state,
- cache_s * cache_t, ARMword index);
+ cache_s * cache_t, ARMword index);
/* mmu cache alloc
*
* @state :ARMul_State
- * @cache_t :cache_t to alloc from
- * @va :virtual address that require cache alloc, need not cache aligned
- * @pa :physical address of va
+ * @cache_t :cache_t to alloc from
+ * @va :virtual address that require cache alloc, need not cache aligned
+ * @pa :physical address of va
*
- * $ cache_alloced, always alloc OK
+ * $ cache_alloced, always alloc OK
*/
cache_line_t *mmu_cache_alloc (ARMul_State * state, cache_s * cache_t,
- ARMword va, ARMword pa);
+ ARMword va, ARMword pa);
/* mmu_cache_write_back write cache data to memory
*
@@ -132,31 +132,31 @@ cache_line_t *mmu_cache_alloc (ARMul_State * state, cache_s * cache_t,
*/
void
mmu_cache_write_back (ARMul_State * state, cache_s * cache_t,
- cache_line_t * cache);
+ cache_line_t * cache);
/* mmu_cache_clean: clean a cache of va in cache_t
*
- * @state :ARMul_State
- * @cache_t :cache_t to clean
- * @va :virtaul address
+ * @state :ARMul_State
+ * @cache_t :cache_t to clean
+ * @va :virtaul address
*/
void mmu_cache_clean (ARMul_State * state, cache_s * cache_t, ARMword va);
void
mmu_cache_clean_by_index (ARMul_State * state, cache_s * cache_t,
- ARMword index);
+ ARMword index);
/* mmu_cache_invalidate : invalidate a cache of va
*
- * @state :ARMul_State
- * @cache_t :cache_t to invalid
- * @va :virt_addr to invalid
+ * @state :ARMul_State
+ * @cache_t :cache_t to invalid
+ * @va :virt_addr to invalid
*/
void
mmu_cache_invalidate (ARMul_State * state, cache_s * cache_t, ARMword va);
void
mmu_cache_invalidate_by_index (ARMul_State * state, cache_s * cache_t,
- ARMword index);
+ ARMword index);
void mmu_cache_invalidate_all (ARMul_State * state, cache_s * cache_t);