Changeset 5265eea4 in mainline
- Timestamp:
- 2015-10-28T18:17:27Z (9 years ago)
- Branches:
- lfn, master, serial, ticket/834-toolchain-update, topic/msim-upgrade, topic/simplify-dev-export
- Children:
- 77a194c, ff381a7
- Parents:
- 0328987 (diff), 5783d10 (diff)
Note: this is a merge changeset, the changes displayed below correspond to the merge itself.
Use the(diff)
links above to see all the changes relative to each parent. - Files:
-
- 12 edited
Legend:
- Unmodified
- Added
- Removed
-
boot/arch/arm32/src/asm.S
r0328987 r5265eea4 75 75 bic r4, r4, #(1 << CP15_C1_DC) 76 76 77 # Disable I-cache and Branch epredictors.77 # Disable I-cache and Branch predictors. 78 78 bic r4, r4, #(1 << CP15_C1_IC) 79 #ifdef PROCESSOR_ARCH_armv6 79 80 bic r4, r4, #(1 << CP15_C1_BP) 81 #endif 80 82 81 83 mcr p15, 0, r4, c1, c0, 0 82 84 #endif 83 84 85 85 86 # Wait for the operations to complete86 # Wait for the operations to complete 87 87 #ifdef PROCESSOR_ARCH_armv7_a 88 88 dsb 89 89 #else 90 # cp15 dsb, r4 is ignored (should be zero)90 # cp15 dsb, r4 is ignored (should be zero) 91 91 mov r4, #0 92 92 mcr p15, 0, r4, c7, c10, 4 … … 98 98 nop 99 99 100 # Wait for the operations to complete100 # Wait for the operations to complete 101 101 #ifdef PROCESSOR_ARCH_armv7_a 102 102 isb 103 103 nop 104 #el se104 #elif defined(PROCESSOR_ARCH_armv6) 105 105 # cp15 isb 106 106 mcr p15, 0, r4, c7, c5, 4 -
boot/arch/arm32/src/main.c
r0328987 r5265eea4 47 47 #include <errno.h> 48 48 #include <inflate.h> 49 #include <arch/cp15.h> 49 50 50 51 #define TOP2ADDR(top) (((void *) PA2KA(BOOT_OFFSET)) + (top)) … … 55 56 static inline void clean_dcache_poc(void *address, size_t size) 56 57 { 57 const uintptr_t addr = (uintptr_t)address; 58 for (uintptr_t a = addr; a < addr + size; a += 4) { 59 /* DCCMVAC - clean by address to the point of coherence */ 60 asm volatile ("mcr p15, 0, %[a], c7, c10, 1\n" :: [a]"r"(a) : ); 58 const uintptr_t addr = (uintptr_t) address; 59 60 #if !defined(PROCESSOR_ARCH_armv7_a) 61 bool sep; 62 if (MIDR_read() != CTR_read()) { 63 sep = (CTR_read() & CTR_SEP_FLAG) == CTR_SEP_FLAG; 64 } else { 65 printf("Unknown cache type.\n"); 66 halt(); 67 } 68 #endif 69 70 for (uintptr_t a = ALIGN_DOWN(addr, CP15_C7_MVA_ALIGN); a < addr + size; 71 a += CP15_C7_MVA_ALIGN) { 72 #if defined(PROCESSOR_ARCH_armv7_a) 73 DCCMVAC_write(a); 74 #else 75 if (sep) 76 DCCMVA_write(a); 77 else 78 CCMVA_write(a); 79 #endif 61 80 } 62 81 } -
boot/arch/arm32/src/mm.c
r0328987 r5265eea4 143 143 pte->should_be_zero_1 = 0; 144 144 pte->access_permission_0 = PTE_AP_USER_NO_KERNEL_RW; 145 #if def PROCESSOR_ARCH_armv7_a145 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a) 146 146 /* 147 147 * Keeps this setting in sync with memory type attributes in: … … 152 152 pte->tex = section_cacheable(frame) ? 5 : 0; 153 153 pte->cacheable = section_cacheable(frame) ? 0 : 0; 154 pte->bufferable = section_cacheable(frame) ? 1 : 0;154 pte->bufferable = section_cacheable(frame) ? 1 : 1; 155 155 #else 156 pte->bufferable = 1;156 pte->bufferable = section_cacheable(frame); 157 157 pte->cacheable = section_cacheable(frame); 158 158 pte->tex = 0; … … 189 189 */ 190 190 uint32_t val = (uint32_t)boot_pt & TTBR_ADDR_MASK; 191 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a) 192 // FIXME: TTBR_RGN_WBWA_CACHE is unpredictable on ARMv6 191 193 val |= TTBR_RGN_WBWA_CACHE | TTBR_C_FLAG; 194 #endif 192 195 TTBR0_write(val); 193 196 } -
kernel/arch/arm32/include/arch/atomic.h
r0328987 r5265eea4 78 78 * 79 79 */ 80 NO_TRACE static inline void atomic_dec(atomic_t *val) { 80 NO_TRACE static inline void atomic_dec(atomic_t *val) 81 { 81 82 atomic_add(val, -1); 82 83 } -
kernel/arch/arm32/include/arch/barrier.h
r0328987 r5265eea4 38 38 39 39 #ifdef KERNEL 40 #include <arch/cache.h> 40 41 #include <arch/cp15.h> 42 #include <align.h> 41 43 #else 42 44 #include <libarch/cp15.h> … … 71 73 * CP15 implementation is mandatory only for armv6+. 72 74 */ 75 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a) 73 76 #define memory_barrier() CP15DMB_write(0) 74 #define read_barrier() CP15DSB_write(0) 77 #else 78 #define memory_barrier() CP15DSB_write(0) 79 #endif 80 #define read_barrier() CP15DSB_write(0) 75 81 #define write_barrier() read_barrier() 82 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a) 76 83 #define inst_barrier() CP15ISB_write(0) 84 #else 85 #define inst_barrier() 86 #endif 77 87 #else 78 88 /* Older manuals mention syscalls as a way to implement cache coherency and … … 103 113 104 114 #if defined PROCESSOR_ARCH_armv7_a | defined PROCESSOR_ARCH_armv6 | defined KERNEL 105 /* Available on all supported arms,106 * invalidates entire ICache so the written value does not matter. */107 115 //TODO might be PL1 only on armv5- 108 116 #define smc_coherence(a) \ 109 117 do { \ 110 DCCMVAU_write((uint32_t)(a)); /* Flush changed memory */\118 dcache_clean_mva_pou(ALIGN_DOWN((uintptr_t) a, CP15_C7_MVA_ALIGN)); \ 111 119 write_barrier(); /* Wait for completion */\ 112 ICIALLU_write(0); /* Flush ICache */\120 icache_invalidate();\ 113 121 inst_barrier(); /* Wait for Inst refetch */\ 114 122 } while (0) … … 117 125 #define smc_coherence_block(a, l) \ 118 126 do { \ 119 for (uintptr_t addr = (uintptr_t)a; addr < (uintptr_t)a + l; addr += 4)\ 127 for (uintptr_t addr = (uintptr_t) a; addr < (uintptr_t) a + l; \ 128 addr += CP15_C7_MVA_ALIGN) \ 120 129 smc_coherence(addr); \ 121 130 } while (0) -
kernel/arch/arm32/include/arch/cache.h
r0328987 r5265eea4 37 37 #define KERN_arm32_CACHE_H_ 38 38 39 #include <typedefs.h> 40 39 41 unsigned dcache_levels(void); 40 42 … … 43 45 void cpu_dcache_flush(void); 44 46 void cpu_dcache_flush_invalidate(void); 45 void icache_invalidate(void); 47 extern void icache_invalidate(void); 48 extern void dcache_invalidate(void); 49 extern void dcache_clean_mva_pou(uintptr_t); 46 50 47 51 #endif -
kernel/arch/arm32/include/arch/cp15.h
r0328987 r5265eea4 118 118 }; 119 119 CONTROL_REG_GEN_READ(CTR, c0, 0, c0, 1); 120 121 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a) 120 122 CONTROL_REG_GEN_READ(TCMR, c0, 0, c0, 2); 123 124 enum { 125 TLBTR_SEP_FLAG = 1, 126 }; 127 121 128 CONTROL_REG_GEN_READ(TLBTR, c0, 0, c0, 3); 129 #endif 130 131 #if defined(PROCESSOR_ARCH_armv7_a) 122 132 CONTROL_REG_GEN_READ(MPIDR, c0, 0, c0, 5); 123 133 CONTROL_REG_GEN_READ(REVIDR, c0, 0, c0, 6); 134 #endif 124 135 125 136 enum { … … 309 320 enum { 310 321 TTBR_ADDR_MASK = 0xffffff80, 322 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a) 311 323 TTBR_NOS_FLAG = 1 << 5, 312 324 TTBR_RGN_MASK = 0x3 << 3, … … 317 329 TTBR_S_FLAG = 1 << 1, 318 330 TTBR_C_FLAG = 1 << 0, 331 #endif 319 332 }; 320 333 CONTROL_REG_GEN_READ(TTBR0, c2, 0, c0, 0); 321 334 CONTROL_REG_GEN_WRITE(TTBR0, c2, 0, c0, 0); 335 336 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a) 322 337 CONTROL_REG_GEN_READ(TTBR1, c2, 0, c0, 1); 323 338 CONTROL_REG_GEN_WRITE(TTBR1, c2, 0, c0, 1); 324 339 CONTROL_REG_GEN_READ(TTBCR, c2, 0, c0, 2); 325 340 CONTROL_REG_GEN_WRITE(TTBCR, c2, 0, c0, 2); 326 341 #endif 342 343 #if defined(PROCESSOR_ARCH_armv7) 327 344 CONTROL_REG_GEN_READ(HTCR, c2, 4, c0, 2); 328 345 CONTROL_REG_GEN_WRITE(HTCR, c2, 4, c0, 2); … … 339 356 CONTROL_REG_GEN_READ(VTTBRH, c2, 0, c2, 6); 340 357 CONTROL_REG_GEN_WRITE(VTTBRH, c2, 0, c2, 6); 358 #endif 341 359 342 360 CONTROL_REG_GEN_READ(DACR, c3, 0, c0, 0); … … 373 391 CONTROL_REG_GEN_WRITE(HPFAR, c6, 4, c0, 4); 374 392 375 /* Cache maintenance, address translation and other */ 376 CONTROL_REG_GEN_WRITE(WFI, c7, 0, c0, 4); /* armv6 only */ 377 CONTROL_REG_GEN_WRITE(ICIALLLUIS, c7, 0, c1, 0); 378 CONTROL_REG_GEN_WRITE(BPIALLIS, c7, 0, c1, 6); 379 CONTROL_REG_GEN_READ(PAR, c7, 0, c4, 0); 380 CONTROL_REG_GEN_WRITE(PAR, c7, 0, c4, 0); 381 CONTROL_REG_GEN_READ(PARH, c7, 0, c7, 0); /* PAE */ 382 CONTROL_REG_GEN_WRITE(PARH, c7, 0, c7, 0); /* PAE */ 383 CONTROL_REG_GEN_WRITE(ICIALLU, c7, 0, c5, 0); 384 CONTROL_REG_GEN_WRITE(ICIMVAU, c7, 0, c5, 1); 393 /* 394 * Cache maintenance, address translation and other 395 */ 396 397 #if defined(PROCESSOR_cortex_a8) 398 #define CP15_C7_MVA_ALIGN 64 399 #elif defined(PROCESSOR_arm1176) 400 #define CP15_C7_MVA_ALIGN 32 401 #elif defined(PROCESSOR_arm926ej_s) 402 #define CP15_C7_MVA_ALIGN 32 403 #elif defined(PROCESSOR_arm920t) 404 #define CP15_C7_MVA_ALIGN 32 405 #else 406 #error Unknow MVA alignment 407 #endif 408 409 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a) 385 410 CONTROL_REG_GEN_WRITE(CP15ISB, c7, 0, c5, 4); 386 411 CONTROL_REG_GEN_WRITE(BPIALL, c7, 0, c5, 6); 387 412 CONTROL_REG_GEN_WRITE(BPIMVA, c7, 0, c5, 7); 388 413 #endif 414 415 #if !defined(PROCESSOR_arm920t) 416 CONTROL_REG_GEN_WRITE(DCISW, c7, 0, c6, 2); 417 #endif 418 419 #if defined(PROCESSOR_arm920t) || !defined(PROCESSOR_ARCH_armv4) 420 CONTROL_REG_GEN_WRITE(DCCSW, c7, 0, c10, 2); 421 #endif 422 423 CONTROL_REG_GEN_WRITE(CP15DSB, c7, 0, c10, 4); 424 425 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a) 426 CONTROL_REG_GEN_WRITE(CP15DMB, c7, 0, c10, 5); 427 #endif 428 429 #if defined(PROCESSOR_arm920t) || !defined(PROCESSOR_ARCH_armv4) 430 CONTROL_REG_GEN_WRITE(DCCISW, c7, 0, c14, 2); 431 #endif 432 433 #if defined(PROCESSOR_ARCH_armv7_a) 434 CONTROL_REG_GEN_WRITE(ICIALLLUIS, c7, 0, c1, 0); 435 CONTROL_REG_GEN_WRITE(BPIALLIS, c7, 0, c1, 6); 436 CONTROL_REG_GEN_READ(PAR, c7, 0, c4, 0); /* Security Extensions */ 437 CONTROL_REG_GEN_WRITE(PAR, c7, 0, c4, 0); /* Security Extensions */ 438 CONTROL_REG_GEN_WRITE(ICIALLU, c7, 0, c5, 0); 439 CONTROL_REG_GEN_WRITE(ICIMVAU, c7, 0, c5, 1); 389 440 CONTROL_REG_GEN_WRITE(DCIMVAC, c7, 0, c6, 1); 390 CONTROL_REG_GEN_ WRITE(DCISW, c7, 0, c6, 2);391 392 CONTROL_REG_GEN_WRITE(ATS1CPR, c7, 0, c8, 0); 393 CONTROL_REG_GEN_WRITE(ATS1CPW, c7, 0, c8, 1); 394 CONTROL_REG_GEN_WRITE(ATS1CUR, c7, 0, c8, 2); 395 CONTROL_REG_GEN_WRITE(ATS1CUW, c7, 0, c8, 3); 396 CONTROL_REG_GEN_WRITE(ATS12NSOPR, c7, 0, c8, 4); 397 CONTROL_REG_GEN_WRITE(ATS12NSOPW, c7, 0, c8, 5); 398 CONTROL_REG_GEN_WRITE(ATS12NSOUR, c7, 0, c8, 6); 399 CONTROL_REG_GEN_WRITE(ATS12NSOUW, c7, 0, c8, 7); 400 401 441 CONTROL_REG_GEN_READ(PARH, c7, 0, c7, 0); /* PAE */ 442 CONTROL_REG_GEN_WRITE(PARH, c7, 0, c7, 0); /* PAE */ 443 CONTROL_REG_GEN_WRITE(ATS1CPR, c7, 0, c8, 0); /* Security Extensions */ 444 CONTROL_REG_GEN_WRITE(ATS1CPW, c7, 0, c8, 1); /* Security Extensions */ 445 CONTROL_REG_GEN_WRITE(ATS1CUR, c7, 0, c8, 2); /* Security Extensions */ 446 CONTROL_REG_GEN_WRITE(ATS1CUW, c7, 0, c8, 3); /* Security Extensions */ 447 CONTROL_REG_GEN_WRITE(ATS12NSOPR, c7, 0, c8, 4); /* Security Extensions */ 448 CONTROL_REG_GEN_WRITE(ATS12NSOPW, c7, 0, c8, 5); /* Security Extensions */ 449 CONTROL_REG_GEN_WRITE(ATS12NSOUR, c7, 0, c8, 6); /* Security Extensions */ 450 CONTROL_REG_GEN_WRITE(ATS12NSOUW, c7, 0, c8, 7); /* Security Extensions */ 451 CONTROL_REG_GEN_WRITE(ATS1HR, c7, 4, c8, 0); /* Virtualization Extensions */ 452 CONTROL_REG_GEN_WRITE(ATS1HW, c7, 4, c8, 1); /* Virtualization Extensions */ 402 453 CONTROL_REG_GEN_WRITE(DCCMVAC, c7, 0, c10, 1); 403 CONTROL_REG_GEN_WRITE(DCCSW, c7, 0, c10, 2);404 CONTROL_REG_GEN_WRITE(CP15DSB, c7, 0, c10, 4);405 CONTROL_REG_GEN_WRITE(CP15DMB, c7, 0, c10, 5);406 454 CONTROL_REG_GEN_WRITE(DCCMVAU, c7, 0, c11, 1); 407 408 CONTROL_REG_GEN_WRITE(PFI, c7, 0, c11, 1); /* armv6 only */409 410 455 CONTROL_REG_GEN_WRITE(DCCIMVAC, c7, 0, c14, 1); 411 CONTROL_REG_GEN_WRITE(DCCISW, c7, 0, c14, 2); 412 413 CONTROL_REG_GEN_WRITE(ATS1HR, c7, 4, c8, 0); 414 CONTROL_REG_GEN_WRITE(ATS1HW, c7, 4, c8, 1); 456 #else 457 458 #if defined(PROCESSOR_arm920t) || !defined(PROCESSOR_ARCH_armv4) 459 CONTROL_REG_GEN_WRITE(WFI, c7, 0, c0, 4); 460 #endif 461 462 CONTROL_REG_GEN_WRITE(ICIALL, c7, 0, c5, 0); 463 CONTROL_REG_GEN_WRITE(ICIMVA, c7, 0, c5, 1); 464 465 #if !defined(PROCESSOR_ARCH_armv4) 466 CONTROL_REG_GEN_WRITE(ICISW, c7, 0, c5, 2); 467 #endif 468 469 CONTROL_REG_GEN_WRITE(DCIALL, c7, 0, c6, 0); 470 CONTROL_REG_GEN_WRITE(DCIMVA, c7, 0, c6, 1); 471 CONTROL_REG_GEN_WRITE(CIALL, c7, 0, c7, 0); 472 CONTROL_REG_GEN_WRITE(CIMVA, c7, 0, c7, 1); 473 474 #if !defined(PROCESSOR_ARCH_armv4) 475 CONTROL_REG_GEN_WRITE(CISW, c7, 0, c7, 2); 476 #endif 477 478 #if defined(PROCESSOR_ARCH_armv4) || defined(PROCESSOR_ARCH_armv6) 479 CONTROL_REG_GEN_WRITE(DCCALL, c7, 0, c10, 0); 480 #endif 481 482 CONTROL_REG_GEN_WRITE(DCCMVA, c7, 0, c10, 1); 483 484 #if defined(PROCESSOR_ARCH_armv4) || defined(PROCESSOR_ARCH_armv6) 485 CONTROL_REG_GEN_WRITE(CCALL, c7, 0, c11, 0); 486 #endif 487 488 CONTROL_REG_GEN_WRITE(CCMVA, c7, 0, c11, 1); 489 490 #if !defined(PROCESSOR_ARCH_armv4) 491 CONTROL_REG_GEN_WRITE(CCSW, c7, 0, c11, 2); 492 #endif 493 494 #if defined(PROCESSOR_arm920t) || !defined(PROCESSOR_ARCH_armv4) 495 CONTROL_REG_GEN_WRITE(PFIMVA, c7, 0, c13, 1); 496 #endif 497 498 #if defined(PROCESSOR_ARCH_armv4) || defined(PROCESSOR_ARCH_armv6) 499 CONTROL_REG_GEN_WRITE(DCCIALL, c7, 0, c14, 0); 500 #endif 501 502 CONTROL_REG_GEN_WRITE(DCCIMVA, c7, 0, c14, 1); 503 504 #if defined(PROCESSOR_ARCH_armv4) || defined(PROCESSOR_ARCH_armv6) 505 CONTROL_REG_GEN_WRITE(CCIALL, c7, 0, c15, 0); 506 #endif 507 508 CONTROL_REG_GEN_WRITE(CCIMVA, c7, 0, c15, 1); 509 510 #if defined(PROCESSOR_ARCH_armv5) || defined(PROCESSOR_ARCH_armv6) 511 CONTROL_REG_GEN_WRITE(CCISW, c7, 0, c15, 2); 512 #endif 513 514 #endif 415 515 416 516 /* TLB maintenance */ 517 #if defined(PROCESSOR_ARCH_armv7_a) 417 518 CONTROL_REG_GEN_WRITE(TLBIALLIS, c8, 0, c3, 0); /* Inner shareable */ 418 519 CONTROL_REG_GEN_WRITE(TLBIMVAIS, c8, 0, c3, 1); /* Inner shareable */ 419 520 CONTROL_REG_GEN_WRITE(TLBIASIDIS, c8, 0, c3, 2); /* Inner shareable */ 420 521 CONTROL_REG_GEN_WRITE(TLBIMVAAIS, c8, 0, c3, 3); /* Inner shareable */ 522 #endif 421 523 422 524 CONTROL_REG_GEN_WRITE(ITLBIALL, c8, 0, c5, 0); 423 525 CONTROL_REG_GEN_WRITE(ITLBIMVA, c8, 0, c5, 1); 526 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a) 424 527 CONTROL_REG_GEN_WRITE(ITLBIASID, c8, 0, c5, 2); 528 #endif 425 529 426 530 CONTROL_REG_GEN_WRITE(DTLBIALL, c8, 0, c6, 0); 427 531 CONTROL_REG_GEN_WRITE(DTLBIMVA, c8, 0, c6, 1); 532 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a) 428 533 CONTROL_REG_GEN_WRITE(DTLBIASID, c8, 0, c6, 2); 534 #endif 429 535 430 536 CONTROL_REG_GEN_WRITE(TLBIALL, c8, 0, c7, 0); 431 537 CONTROL_REG_GEN_WRITE(TLBIMVA, c8, 0, c7, 1); 538 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a) 432 539 CONTROL_REG_GEN_WRITE(TLBIASID, c8, 0, c7, 2); 540 #endif 541 #if defined(PROCESSOR_ARCH_armv7_a) 433 542 CONTROL_REG_GEN_WRITE(TLBIMVAA, c8, 0, c7, 3); 434 543 #endif 544 545 #if defined(PROCESSOR_ARCH_armv7_a) 435 546 CONTROL_REG_GEN_WRITE(TLBIALLHIS, c8, 4, c3, 0); /* Inner shareable */ 436 547 CONTROL_REG_GEN_WRITE(TLBIMVAHIS, c8, 4, c3, 1); /* Inner shareable */ 437 548 CONTROL_REG_GEN_WRITE(TLBIALLNSNHIS, c8, 4, c3, 4); /* Inner shareable */ 438 549 #endif 550 551 #if defined(PROCESSOR_ARCH_armv7_a) 439 552 CONTROL_REG_GEN_WRITE(TLBIALLH, c8, 4, c7, 0); 440 553 CONTROL_REG_GEN_WRITE(TLBIMVAH, c8, 4, c7, 1); 441 554 CONTROL_REG_GEN_WRITE(TLBIALLNSNHS, c8, 4, c7, 4); 555 #endif 442 556 443 557 /* c9 are performance monitoring resgisters */ -
kernel/arch/arm32/include/arch/mm/page.h
r0328987 r5265eea4 154 154 { 155 155 uint32_t val = (uint32_t)pt & TTBR_ADDR_MASK; 156 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a) 157 // FIXME: TTBR_RGN_WBWA_CACHE is unpredictable on ARMv6 156 158 val |= TTBR_RGN_WBWA_CACHE | TTBR_C_FLAG; 159 #endif 157 160 TTBR0_write(val); 158 161 } -
kernel/arch/arm32/include/arch/mm/page_armv4.h
r0328987 r5265eea4 123 123 do { \ 124 124 for (unsigned i = 0; i < count; ++i) \ 125 DCCMVAU_write((uintptr_t)(pt + i)); \125 dcache_clean_mva_pou((uintptr_t)(pt + i)); \ 126 126 read_barrier(); \ 127 127 } while (0) -
kernel/arch/arm32/include/arch/mm/page_armv6.h
r0328987 r5265eea4 156 156 do { \ 157 157 for (unsigned i = 0; i < count; ++i) \ 158 DCCMVAU_write((uintptr_t)(pt + i)); \158 dcache_clean_mva_pou((uintptr_t)(pt + i)); \ 159 159 read_barrier(); \ 160 160 } while (0) 161 162 161 163 162 /** Returns level 0 page table entry flags. … … 258 257 if (flags & PAGE_CACHEABLE) { 259 258 /* 260 * Write-through, write-allocate memory, see ch. B3.8.2 261 * (p. B3-1358) of ARM Architecture reference manual. 259 * Outer and inner write-back, write-allocate memory, 260 * see ch. B3.8.2 (p. B3-1358) of ARM Architecture reference 261 * manual. 262 * 262 263 * Make sure the memory type is correct, and in sync with: 263 264 * init_boot_pt (boot/arch/arm32/src/mm.c) … … 278 279 } 279 280 280 #if defined(PROCESSOR_ARCH_armv6)281 /* FIXME: this disables caches */282 p->shareable = 1;283 #else284 281 /* Shareable is ignored for devices (non-cacheable), 285 282 * turn it off for normal memory. */ 286 283 p->shareable = 0; 287 #endif288 284 289 285 p->non_global = !(flags & PAGE_GLOBAL); -
kernel/arch/arm32/src/cpu/cpu.c
r0328987 r5265eea4 130 130 { 131 131 uint32_t control_reg = SCTLR_read(); 132 132 133 dcache_invalidate(); 134 read_barrier(); 135 133 136 /* Turn off tex remap, RAZ/WI prior to armv7 */ 134 137 control_reg &= ~SCTLR_TEX_REMAP_EN_FLAG; … … 322 325 void icache_invalidate(void) 323 326 { 327 #if defined(PROCESSOR_ARCH_armv7_a) 324 328 ICIALLU_write(0); 329 #else 330 ICIALL_write(0); 331 #endif 332 } 333 334 #if !defined(PROCESSOR_ARCH_armv7_a) 335 static bool cache_is_unified(void) 336 { 337 if (MIDR_read() != CTR_read()) { 338 /* We have the CTR register */ 339 return (CTR_read() & CTR_SEP_FLAG) != CTR_SEP_FLAG; 340 } else { 341 panic("Unknown cache type"); 342 } 343 } 344 #endif 345 346 void dcache_invalidate(void) 347 { 348 #if defined(PROCESSOR_ARCH_armv7_a) 349 dcache_flush_invalidate(); 350 #else 351 if (cache_is_unified()) 352 CIALL_write(0); 353 else 354 DCIALL_write(0); 355 #endif 356 } 357 358 void dcache_clean_mva_pou(uintptr_t mva) 359 { 360 #if defined(PROCESSOR_ARCH_armv7_a) 361 DCCMVAU_write(mva); 362 #else 363 if (cache_is_unified()) 364 CCMVA_write(mva); 365 else 366 DCCMVA_write(mva); 367 #endif 325 368 } 326 369 -
kernel/arch/arm32/src/mm/tlb.c
r0328987 r5265eea4 79 79 static inline void invalidate_page(uintptr_t page) 80 80 { 81 //TODO: What about TLBIMVAA? 81 #if defined(PROCESSOR_ARCH_armv6) || defined(PROCESSOR_ARCH_armv7_a) 82 if (TLBTR_read() & TLBTR_SEP_FLAG) { 83 ITLBIMVA_write(page); 84 DTLBIMVA_write(page); 85 } else { 86 TLBIMVA_write(page); 87 } 88 #elif defined(PROCESSOR_arm920t) 89 ITLBIMVA_write(page); 90 DTLBIMVA_write(page); 91 #elif defined(PROCESSOR_arm926ej_s) 82 92 TLBIMVA_write(page); 93 #else 94 #error Unknown TLB type 95 #endif 96 83 97 /* 84 98 * "A TLB maintenance operation is only guaranteed to be complete after
Note:
See TracChangeset
for help on using the changeset viewer.