1/* 2 * linux/arch/arm/mm/proc-arm922.S: MMU functions for ARM922 3 * 4 * Copyright (C) 1999,2000 ARM Limited 5 * Copyright (C) 2000 Deep Blue Solutions Ltd. 6 * Copyright (C) 2001 Altera Corporation 7 * hacked for non-paged-MM by Hyok S. Choi, 2003. 8 * 9 * This program is free software; you can redistribute it and/or modify 10 * it under the terms of the GNU General Public License as published by 11 * the Free Software Foundation; either version 2 of the License, or 12 * (at your option) any later version. 13 * 14 * This program is distributed in the hope that it will be useful, 15 * but WITHOUT ANY WARRANTY; without even the implied warranty of 16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 17 * GNU General Public License for more details. 18 * 19 * You should have received a copy of the GNU General Public License 20 * along with this program; if not, write to the Free Software 21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA 22 * 23 * 24 * These are the low level assembler for performing cache and TLB 25 * functions on the arm922. 26 * 27 * CONFIG_CPU_ARM922_CPU_IDLE -> nohlt 28 */ 29#include <linux/linkage.h> 30#include <linux/init.h> 31#include <asm/assembler.h> 32#include <asm/hwcap.h> 33#include <asm/pgtable-hwdef.h> 34#include <asm/pgtable.h> 35#include <asm/page.h> 36#include <asm/ptrace.h> 37#include "proc-macros.S" 38 39/* 40 * The size of one data cache line. 41 */ 42#define CACHE_DLINESIZE 32 43 44/* 45 * The number of data cache segments. 46 */ 47#define CACHE_DSEGMENTS 4 48 49/* 50 * The number of lines in a cache segment. 51 */ 52#define CACHE_DENTRIES 64 53 54/* 55 * This is the size at which it becomes more efficient to 56 * clean the whole cache, rather than using the individual 57 * cache line maintainence instructions. (I think this should 58 * be 32768). 59 */ 60#define CACHE_DLIMIT 8192 61 62 63 .text 64/* 65 * cpu_arm922_proc_init() 66 */ 67ENTRY(cpu_arm922_proc_init) 68 mov pc, lr 69 70/* 71 * cpu_arm922_proc_fin() 72 */ 73ENTRY(cpu_arm922_proc_fin) 74 mrc p15, 0, r0, c1, c0, 0 @ ctrl register 75 bic r0, r0, #0x1000 @ ...i............ 76 bic r0, r0, #0x000e @ ............wca. 77 mcr p15, 0, r0, c1, c0, 0 @ disable caches 78 mov pc, lr 79 80/* 81 * cpu_arm922_reset(loc) 82 * 83 * Perform a soft reset of the system. Put the CPU into the 84 * same state as it would be if it had been reset, and branch 85 * to what would be the reset vector. 86 * 87 * loc: location to jump to for soft reset 88 */ 89 .align 5 90ENTRY(cpu_arm922_reset) 91 mov ip, #0 92 mcr p15, 0, ip, c7, c7, 0 @ invalidate I,D caches 93 mcr p15, 0, ip, c7, c10, 4 @ drain WB 94#ifdef CONFIG_MMU 95 mcr p15, 0, ip, c8, c7, 0 @ invalidate I & D TLBs 96#endif 97 mrc p15, 0, ip, c1, c0, 0 @ ctrl register 98 bic ip, ip, #0x000f @ ............wcam 99 bic ip, ip, #0x1100 @ ...i...s........ 100 mcr p15, 0, ip, c1, c0, 0 @ ctrl register 101 mov pc, r0 102 103/* 104 * cpu_arm922_do_idle() 105 */ 106 .align 5 107ENTRY(cpu_arm922_do_idle) 108 mcr p15, 0, r0, c7, c0, 4 @ Wait for interrupt 109 mov pc, lr 110 111 112#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH 113 114/* 115 * flush_user_cache_all() 116 * 117 * Clean and invalidate all cache entries in a particular 118 * address space. 119 */ 120ENTRY(arm922_flush_user_cache_all) 121 /* FALLTHROUGH */ 122 123/* 124 * flush_kern_cache_all() 125 * 126 * Clean and invalidate the entire cache. 127 */ 128ENTRY(arm922_flush_kern_cache_all) 129 mov r2, #VM_EXEC 130 mov ip, #0 131__flush_whole_cache: 132 mov r1, #(CACHE_DSEGMENTS - 1) << 5 @ 8 segments 1331: orr r3, r1, #(CACHE_DENTRIES - 1) << 26 @ 64 entries 1342: mcr p15, 0, r3, c7, c14, 2 @ clean+invalidate D index 135 subs r3, r3, #1 << 26 136 bcs 2b @ entries 63 to 0 137 subs r1, r1, #1 << 5 138 bcs 1b @ segments 7 to 0 139 tst r2, #VM_EXEC 140 mcrne p15, 0, ip, c7, c5, 0 @ invalidate I cache 141 mcrne p15, 0, ip, c7, c10, 4 @ drain WB 142 mov pc, lr 143 144/* 145 * flush_user_cache_range(start, end, flags) 146 * 147 * Clean and invalidate a range of cache entries in the 148 * specified address range. 149 * 150 * - start - start address (inclusive) 151 * - end - end address (exclusive) 152 * - flags - vm_flags describing address space 153 */ 154ENTRY(arm922_flush_user_cache_range) 155 mov ip, #0 156 sub r3, r1, r0 @ calculate total size 157 cmp r3, #CACHE_DLIMIT 158 bhs __flush_whole_cache 159 1601: mcr p15, 0, r0, c7, c14, 1 @ clean+invalidate D entry 161 tst r2, #VM_EXEC 162 mcrne p15, 0, r0, c7, c5, 1 @ invalidate I entry 163 add r0, r0, #CACHE_DLINESIZE 164 cmp r0, r1 165 blo 1b 166 tst r2, #VM_EXEC 167 mcrne p15, 0, ip, c7, c10, 4 @ drain WB 168 mov pc, lr 169 170/* 171 * coherent_kern_range(start, end) 172 * 173 * Ensure coherency between the Icache and the Dcache in the 174 * region described by start, end. If you have non-snooping 175 * Harvard caches, you need to implement this function. 176 * 177 * - start - virtual start address 178 * - end - virtual end address 179 */ 180ENTRY(arm922_coherent_kern_range) 181 /* FALLTHROUGH */ 182 183/* 184 * coherent_user_range(start, end) 185 * 186 * Ensure coherency between the Icache and the Dcache in the 187 * region described by start, end. If you have non-snooping 188 * Harvard caches, you need to implement this function. 189 * 190 * - start - virtual start address 191 * - end - virtual end address 192 */ 193ENTRY(arm922_coherent_user_range) 194 bic r0, r0, #CACHE_DLINESIZE - 1 1951: mcr p15, 0, r0, c7, c10, 1 @ clean D entry 196 mcr p15, 0, r0, c7, c5, 1 @ invalidate I entry 197 add r0, r0, #CACHE_DLINESIZE 198 cmp r0, r1 199 blo 1b 200 mcr p15, 0, r0, c7, c10, 4 @ drain WB 201 mov pc, lr 202 203/* 204 * flush_kern_dcache_area(void *addr, size_t size) 205 * 206 * Ensure no D cache aliasing occurs, either with itself or 207 * the I cache 208 * 209 * - addr - kernel address 210 * - size - region size 211 */ 212ENTRY(arm922_flush_kern_dcache_area) 213 add r1, r0, r1 2141: mcr p15, 0, r0, c7, c14, 1 @ clean+invalidate D entry 215 add r0, r0, #CACHE_DLINESIZE 216 cmp r0, r1 217 blo 1b 218 mov r0, #0 219 mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache 220 mcr p15, 0, r0, c7, c10, 4 @ drain WB 221 mov pc, lr 222 223/* 224 * dma_inv_range(start, end) 225 * 226 * Invalidate (discard) the specified virtual address range. 227 * May not write back any entries. If 'start' or 'end' 228 * are not cache line aligned, those lines must be written 229 * back. 230 * 231 * - start - virtual start address 232 * - end - virtual end address 233 * 234 * (same as v4wb) 235 */ 236arm922_dma_inv_range: 237 tst r0, #CACHE_DLINESIZE - 1 238 bic r0, r0, #CACHE_DLINESIZE - 1 239 mcrne p15, 0, r0, c7, c10, 1 @ clean D entry 240 tst r1, #CACHE_DLINESIZE - 1 241 mcrne p15, 0, r1, c7, c10, 1 @ clean D entry 2421: mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry 243 add r0, r0, #CACHE_DLINESIZE 244 cmp r0, r1 245 blo 1b 246 mcr p15, 0, r0, c7, c10, 4 @ drain WB 247 mov pc, lr 248 249/* 250 * dma_clean_range(start, end) 251 * 252 * Clean the specified virtual address range. 253 * 254 * - start - virtual start address 255 * - end - virtual end address 256 * 257 * (same as v4wb) 258 */ 259arm922_dma_clean_range: 260 bic r0, r0, #CACHE_DLINESIZE - 1 2611: mcr p15, 0, r0, c7, c10, 1 @ clean D entry 262 add r0, r0, #CACHE_DLINESIZE 263 cmp r0, r1 264 blo 1b 265 mcr p15, 0, r0, c7, c10, 4 @ drain WB 266 mov pc, lr 267 268/* 269 * dma_flush_range(start, end) 270 * 271 * Clean and invalidate the specified virtual address range. 272 * 273 * - start - virtual start address 274 * - end - virtual end address 275 */ 276ENTRY(arm922_dma_flush_range) 277 bic r0, r0, #CACHE_DLINESIZE - 1 2781: mcr p15, 0, r0, c7, c14, 1 @ clean+invalidate D entry 279 add r0, r0, #CACHE_DLINESIZE 280 cmp r0, r1 281 blo 1b 282 mcr p15, 0, r0, c7, c10, 4 @ drain WB 283 mov pc, lr 284 285/* 286 * dma_map_area(start, size, dir) 287 * - start - kernel virtual start address 288 * - size - size of region 289 * - dir - DMA direction 290 */ 291ENTRY(arm922_dma_map_area) 292 add r1, r1, r0 293 cmp r2, #DMA_TO_DEVICE 294 beq arm922_dma_clean_range 295 bcs arm922_dma_inv_range 296 b arm922_dma_flush_range 297ENDPROC(arm922_dma_map_area) 298 299/* 300 * dma_unmap_area(start, size, dir) 301 * - start - kernel virtual start address 302 * - size - size of region 303 * - dir - DMA direction 304 */ 305ENTRY(arm922_dma_unmap_area) 306 mov pc, lr 307ENDPROC(arm922_dma_unmap_area) 308 309ENTRY(arm922_cache_fns) 310 .long arm922_flush_kern_cache_all 311 .long arm922_flush_user_cache_all 312 .long arm922_flush_user_cache_range 313 .long arm922_coherent_kern_range 314 .long arm922_coherent_user_range 315 .long arm922_flush_kern_dcache_area 316 .long arm922_dma_map_area 317 .long arm922_dma_unmap_area 318 .long arm922_dma_flush_range 319 320#endif 321 322 323ENTRY(cpu_arm922_dcache_clean_area) 324#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH 3251: mcr p15, 0, r0, c7, c10, 1 @ clean D entry 326 add r0, r0, #CACHE_DLINESIZE 327 subs r1, r1, #CACHE_DLINESIZE 328 bhi 1b 329#endif 330 mov pc, lr 331 332/* =============================== PageTable ============================== */ 333 334/* 335 * cpu_arm922_switch_mm(pgd) 336 * 337 * Set the translation base pointer to be as described by pgd. 338 * 339 * pgd: new page tables 340 */ 341 .align 5 342ENTRY(cpu_arm922_switch_mm) 343#ifdef CONFIG_MMU 344 mov ip, #0 345#ifdef CONFIG_CPU_DCACHE_WRITETHROUGH 346 mcr p15, 0, ip, c7, c6, 0 @ invalidate D cache 347#else 348@ && 'Clean & Invalidate whole DCache' 349@ && Re-written to use Index Ops. 350@ && Uses registers r1, r3 and ip 351 352 mov r1, #(CACHE_DSEGMENTS - 1) << 5 @ 4 segments 3531: orr r3, r1, #(CACHE_DENTRIES - 1) << 26 @ 64 entries 3542: mcr p15, 0, r3, c7, c14, 2 @ clean & invalidate D index 355 subs r3, r3, #1 << 26 356 bcs 2b @ entries 63 to 0 357 subs r1, r1, #1 << 5 358 bcs 1b @ segments 7 to 0 359#endif 360 mcr p15, 0, ip, c7, c5, 0 @ invalidate I cache 361 mcr p15, 0, ip, c7, c10, 4 @ drain WB 362 mcr p15, 0, r0, c2, c0, 0 @ load page table pointer 363 mcr p15, 0, ip, c8, c7, 0 @ invalidate I & D TLBs 364#endif 365 mov pc, lr 366 367/* 368 * cpu_arm922_set_pte_ext(ptep, pte, ext) 369 * 370 * Set a PTE and flush it out 371 */ 372 .align 5 373ENTRY(cpu_arm922_set_pte_ext) 374#ifdef CONFIG_MMU 375 armv3_set_pte_ext 376 mov r0, r0 377 mcr p15, 0, r0, c7, c10, 1 @ clean D entry 378 mcr p15, 0, r0, c7, c10, 4 @ drain WB 379#endif /* CONFIG_MMU */ 380 mov pc, lr 381 382 __INIT 383 384 .type __arm922_setup, #function 385__arm922_setup: 386 mov r0, #0 387 mcr p15, 0, r0, c7, c7 @ invalidate I,D caches on v4 388 mcr p15, 0, r0, c7, c10, 4 @ drain write buffer on v4 389#ifdef CONFIG_MMU 390 mcr p15, 0, r0, c8, c7 @ invalidate I,D TLBs on v4 391#endif 392 adr r5, arm922_crval 393 ldmia r5, {r5, r6} 394 mrc p15, 0, r0, c1, c0 @ get control register v4 395 bic r0, r0, r5 396 orr r0, r0, r6 397 mov pc, lr 398 .size __arm922_setup, . - __arm922_setup 399 400 /* 401 * R 402 * .RVI ZFRS BLDP WCAM 403 * ..11 0001 ..11 0101 404 * 405 */ 406 .type arm922_crval, #object 407arm922_crval: 408 crval clear=0x00003f3f, mmuset=0x00003135, ucset=0x00001130 409 410 __INITDATA 411 412/* 413 * Purpose : Function pointers used to access above functions - all calls 414 * come through these 415 */ 416 .type arm922_processor_functions, #object 417arm922_processor_functions: 418 .word v4t_early_abort 419 .word legacy_pabort 420 .word cpu_arm922_proc_init 421 .word cpu_arm922_proc_fin 422 .word cpu_arm922_reset 423 .word cpu_arm922_do_idle 424 .word cpu_arm922_dcache_clean_area 425 .word cpu_arm922_switch_mm 426 .word cpu_arm922_set_pte_ext 427 .size arm922_processor_functions, . - arm922_processor_functions 428 429 .section ".rodata" 430 431 .type cpu_arch_name, #object 432cpu_arch_name: 433 .asciz "armv4t" 434 .size cpu_arch_name, . - cpu_arch_name 435 436 .type cpu_elf_name, #object 437cpu_elf_name: 438 .asciz "v4" 439 .size cpu_elf_name, . - cpu_elf_name 440 441 .type cpu_arm922_name, #object 442cpu_arm922_name: 443 .asciz "ARM922T" 444 .size cpu_arm922_name, . - cpu_arm922_name 445 446 .align 447 448 .section ".proc.info.init", #alloc, #execinstr 449 450 .type __arm922_proc_info,#object 451__arm922_proc_info: 452 .long 0x41009220 453 .long 0xff00fff0 454 .long PMD_TYPE_SECT | \ 455 PMD_SECT_BUFFERABLE | \ 456 PMD_SECT_CACHEABLE | \ 457 PMD_BIT4 | \ 458 PMD_SECT_AP_WRITE | \ 459 PMD_SECT_AP_READ 460 .long PMD_TYPE_SECT | \ 461 PMD_BIT4 | \ 462 PMD_SECT_AP_WRITE | \ 463 PMD_SECT_AP_READ 464 b __arm922_setup 465 .long cpu_arch_name 466 .long cpu_elf_name 467 .long HWCAP_SWP | HWCAP_HALF | HWCAP_THUMB 468 .long cpu_arm922_name 469 .long arm922_processor_functions 470 .long v4wbi_tlb_fns 471 .long v4wb_user_fns 472#ifndef CONFIG_CPU_DCACHE_WRITETHROUGH 473 .long arm922_cache_fns 474#else 475 .long v4wt_cache_fns 476#endif 477 .size __arm922_proc_info, . - __arm922_proc_info 478