1// Copyright 2016 The Fuchsia Authors
2// Copyright (c) 2014, Google Inc. All rights reserved
3//
4// Use of this source code is governed by a MIT-style
5// license that can be found in the LICENSE file or at
6// https://opensource.org/licenses/MIT
7
8
9#include <asm.h>
10#include <arch/ops.h>
11#include <arch/defines.h>
12
13.text
14
15.macro cache_range_op, cache op size_var
16    adrp    x16, \size_var
17    ldr     w4, [x16, :lo12:\size_var]  // cache line size in bytes
18
19    add     x2, x0, x1                  // calculate the end address
20
21    sub     x5, x4, #1                  // cache line size mask
22    bic     x3, x0, x5                  // cache align the start address by applying inverse mask
23
24.Lcache_range_op_loop\@:
25    \cache  \op, x3
26    add     x3, x3, x4
27    cmp     x3, x2
28    blo     .Lcache_range_op_loop\@
29    dsb     sy
30.endm
31
32    /* void arch_flush_cache_range(addr_t start, size_t len); */
33FUNCTION(arch_clean_cache_range)
34    cache_range_op dc cvac arm64_dcache_size // clean cache to PoC by MVA
35    ret
36END_FUNCTION(arch_clean_cache_range)
37
38    /* void arch_flush_invalidate_cache_range(addr_t start, size_t len); */
39FUNCTION(arch_clean_invalidate_cache_range)
40    cache_range_op dc civac arm64_dcache_size // clean & invalidate dcache to PoC by MVA
41    ret
42END_FUNCTION(arch_clean_invalidate_cache_range)
43
44    /* void arch_invalidate_cache_range(addr_t start, size_t len); */
45FUNCTION(arch_invalidate_cache_range)
46    cache_range_op dc ivac arm64_dcache_size // invalidate dcache to PoC by MVA
47    ret
48END_FUNCTION(arch_invalidate_cache_range)
49
50    /* void arch_sync_cache_range(addr_t start, size_t len); */
51FUNCTION(arch_sync_cache_range)
52    cache_range_op dc cvau arm64_dcache_size // clean dcache to PoU by MVA
53    cache_range_op ic ivau arm64_icache_size // invalidate icache to PoU by MVA
54    isb
55    ret
56END_FUNCTION(arch_sync_cache_range)
57
58/* void arch_invalidate_cache_all()
59 *      should only be used early in boot, prior to enabling mmu/cache
60 */
61FUNCTION(arch_invalidate_cache_all)
62    mrs     x0, clidr_el1
63    and     w3, w0, #0x07000000
64    lsr     w3, w3, #23
65    cbz     w3, finished
66    mov     w10, #0
67    mov     w8, #1
68loop1:
69    add     w2, w10, w10, lsr #1
70    lsr     w1, w0, w2
71    and     w1, w1, #0x7
72    cmp     w1, #2
73    b.lt    skip
74    msr     csselr_el1, x10
75    isb
76    mrs     x1, ccsidr_el1
77    and     w2, w1, #7
78    add     w2, w2, #4
79    ubfx    w4, w1, #3, #10
80    clz     w5, w4
81    lsl     w9, w4, w5
82
83    lsl     w16, w8, w5
84
85loop2:
86    ubfx    w7, w1, #13, #15
87    lsl     w7, w7, w2
88    lsl     w17, w8, w2
89loop3:
90    orr     w11, w10, w9
91    orr     w11, w11, w7
92    dc      isw, x11
93    subs    w7, w7, w17
94    b.ge    loop3
95
96    subs    x9, x9, x16
97    b.ge    loop2
98skip:
99    add     w10, w10, #2
100    cmp     w3, w10
101    dsb     sy
102    b.gt    loop1
103finished:
104    ic      iallu
105    ret
106END_FUNCTION(arch_invalidate_cache_all)
107
108