diff --git a/arch/x86/core/cache.c b/arch/x86/core/cache.c index 28a028ecd74..cd0bbb8e16d 100644 --- a/arch/x86/core/cache.c +++ b/arch/x86/core/cache.c @@ -18,6 +18,58 @@ #include #include +static inline void z_x86_wbinvd(void) +{ + __asm__ volatile("wbinvd;\n\t" : : : "memory"); +} + +void arch_dcache_enable(void) +{ + uint32_t cr0; + + /* Enable write-back caching by clearing the NW and CD bits */ + __asm__ volatile("movl %%cr0, %0;\n\t" + "andl $0x9fffffff, %0;\n\t" + "movl %0, %%cr0;\n\t" + : "=r" (cr0)); +} + +void arch_dcache_disable(void) +{ + uint32_t cr0; + + /* Enter the no-fill mode by setting NW=0 and CD=1 */ + __asm__ volatile("movl %%cr0, %0;\n\t" + "andl $0xdfffffff, %0;\n\t" + "orl $0x40000000, %0;\n\t" + "movl %0, %%cr0;\n\t" + : "=r" (cr0)); + + /* Flush all caches */ + z_x86_wbinvd(); +} + +int arch_dcache_flush_all(void) +{ + z_x86_wbinvd(); + + return 0; +} + +int arch_dcache_invd_all(void) +{ + z_x86_wbinvd(); + + return 0; +} + +int arch_dcache_flush_and_invd_all(void) +{ + z_x86_wbinvd(); + + return 0; +} + /** * No alignment is required for either or , but since * sys_cache_flush() iterates on the cache lines, a cache line alignment for @@ -49,3 +101,13 @@ int arch_dcache_flush_range(void *start_addr, size_t size) #endif return 0; } + +int arch_dcache_invd_range(void *start_addr, size_t size) +{ + return arch_dcache_flush_range(start_addr, size); +} + +int arch_dcache_flush_and_invd_range(void *start_addr, size_t size) +{ + return arch_dcache_flush_range(start_addr, size); +}