Deleted Added
full compact
rmi_mips_exts.h (212366) rmi_mips_exts.h (212758)
1/*-
2 * Copyright (c) 2003-2009 RMI Corporation
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright

--- 13 unchanged lines hidden (view full) ---

22 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
23 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
24 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
26 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
27 * SUCH DAMAGE.
28 *
29 * RMI_BSD
1/*-
2 * Copyright (c) 2003-2009 RMI Corporation
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright

--- 13 unchanged lines hidden (view full) ---

22 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
23 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
24 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
26 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
27 * SUCH DAMAGE.
28 *
29 * RMI_BSD
30 * $FreeBSD: head/sys/mips/rmi/rmi_mips_exts.h 212366 2010-09-09 17:45:48Z jchandra $
30 * $FreeBSD: head/sys/mips/rmi/rmi_mips_exts.h 212758 2010-09-16 19:13:55Z jchandra $
31 */
32#ifndef __MIPS_EXTS_H__
33#define __MIPS_EXTS_H__
34
35#define CPU_BLOCKID_IFU 0
36#define CPU_BLOCKID_ICU 1
37#define CPU_BLOCKID_IEU 2
38#define CPU_BLOCKID_LSU 3

--- 304 unchanged lines hidden (view full) ---

343
344static __inline void
345write_c0_eimr64(uint64_t val)
346{
347
348 write_c0_register64(9, 7, val);
349}
350
31 */
32#ifndef __MIPS_EXTS_H__
33#define __MIPS_EXTS_H__
34
35#define CPU_BLOCKID_IFU 0
36#define CPU_BLOCKID_ICU 1
37#define CPU_BLOCKID_IEU 2
38#define CPU_BLOCKID_LSU 3

--- 304 unchanged lines hidden (view full) ---

343
344static __inline void
345write_c0_eimr64(uint64_t val)
346{
347
348 write_c0_register64(9, 7, val);
349}
350
351static __inline__ int
351static __inline int
352xlr_test_and_set(int *lock)
353{
354 int oldval = 0;
355
356 __asm__ __volatile__(
357 ".set push\n"
358 ".set noreorder\n"
359 "move $9, %2\n"
360 "li $8, 1\n"
361 // "swapw $8, $9\n"
362 ".word 0x71280014\n"
363 "move %1, $8\n"
364 ".set pop\n"
365 : "+m"(*lock), "=r"(oldval)
366 : "r"((unsigned long)lock)
367 : "$8", "$9"
368 );
369
352xlr_test_and_set(int *lock)
353{
354 int oldval = 0;
355
356 __asm__ __volatile__(
357 ".set push\n"
358 ".set noreorder\n"
359 "move $9, %2\n"
360 "li $8, 1\n"
361 // "swapw $8, $9\n"
362 ".word 0x71280014\n"
363 "move %1, $8\n"
364 ".set pop\n"
365 : "+m"(*lock), "=r"(oldval)
366 : "r"((unsigned long)lock)
367 : "$8", "$9"
368 );
369
370 return (oldval == 0 ? 1 /* success */ : 0 /* failure */ );
370 return (oldval == 0 ? 1 /* success */ : 0 /* failure */);
371}
372
371}
372
373static __inline__ uint32_t
373static __inline uint32_t
374xlr_mfcr(uint32_t reg)
375{
376 uint32_t val;
377
378 __asm__ __volatile__(
379 "move $8, %1\n"
380 ".word 0x71090018\n"
381 "move %0, $9\n"
382 : "=r"(val)
383 : "r"(reg):"$8", "$9");
384
385 return val;
386}
387
374xlr_mfcr(uint32_t reg)
375{
376 uint32_t val;
377
378 __asm__ __volatile__(
379 "move $8, %1\n"
380 ".word 0x71090018\n"
381 "move %0, $9\n"
382 : "=r"(val)
383 : "r"(reg):"$8", "$9");
384
385 return val;
386}
387
388static __inline__ void
388static __inline void
389xlr_mtcr(uint32_t reg, uint32_t val)
390{
391 __asm__ __volatile__(
392 "move $8, %1\n"
393 "move $9, %0\n"
394 ".word 0x71090019\n"
395 :: "r"(val), "r"(reg)
396 : "$8", "$9");
397}
398
389xlr_mtcr(uint32_t reg, uint32_t val)
390{
391 __asm__ __volatile__(
392 "move $8, %1\n"
393 "move $9, %0\n"
394 ".word 0x71090019\n"
395 :: "r"(val), "r"(reg)
396 : "$8", "$9");
397}
398
399/*
400 * Atomic increment a unsigned int
401 */
402static __inline unsigned int
403xlr_ldaddwu(unsigned int value, unsigned int *addr)
404{
405 __asm__ __volatile__(
406 ".set push\n"
407 ".set noreorder\n"
408 "move $8, %2\n"
409 "move $9, %3\n"
410 ".word 0x71280011\n" /* ldaddwu $8, $9 */
411 "move %0, $8\n"
412 ".set pop\n"
413 : "=&r"(value), "+m"(*addr)
414 : "0"(value), "r" ((unsigned long)addr)
415 : "$8", "$9");
416
417 return (value);
418}
419
399#if defined(__mips_n64)
420#if defined(__mips_n64)
400static __inline__ uint32_t
401xlr_paddr_lw(uint64_t paddr)
421static __inline uint64_t
422xlr_paddr_ld(uint64_t paddr)
402{
403
404 paddr |= 0x9800000000000000ULL;
423{
424
425 paddr |= 0x9800000000000000ULL;
405 return (*(uint32_t *)(uintptr_t)paddr);
426 return (*(uint64_t *)(uintptr_t)paddr);
406}
407
408#elif defined(__mips_n32)
427}
428
429#elif defined(__mips_n32)
409static __inline__ uint32_t
410xlr_paddr_lw(uint64_t paddr)
430static __inline uint64_t
431xlr_paddr_ld(uint64_t paddr)
411{
432{
412 uint32_t val;
433 uint64_t val;
413
414 paddr |= 0x9800000000000000ULL;
415 __asm__ __volatile__(
416 ".set push \n\t"
417 ".set mips64 \n\t"
434
435 paddr |= 0x9800000000000000ULL;
436 __asm__ __volatile__(
437 ".set push \n\t"
438 ".set mips64 \n\t"
418 "lw %0, 0(%1) \n\t"
439 "ld %0, 0(%1) \n\t"
419 ".set pop \n"
420 : "=r"(val)
421 : "r"(paddr));
422
423 return (val);
424}
425#else
440 ".set pop \n"
441 : "=r"(val)
442 : "r"(paddr));
443
444 return (val);
445}
446#else
426static __inline__ uint32_t
427xlr_paddr_lw(uint64_t paddr)
447static __inline uint32_t
448xlr_paddr_ld(uint64_t paddr)
428{
449{
429 uint32_t high, low, tmp;
450 uint32_t addrh, addrl;
451 uint32_t valh, vall;
430
452
431 high = 0x98000000 | (paddr >> 32);
432 low = paddr & 0xffffffff;
453 addrh = 0x98000000 | (paddr >> 32);
454 addrl = paddr & 0xffffffff;
433
434 __asm__ __volatile__(
435 ".set push \n\t"
436 ".set mips64 \n\t"
455
456 __asm__ __volatile__(
457 ".set push \n\t"
458 ".set mips64 \n\t"
437 "dsll32 %1, %1, 0 \n\t"
438 "dsll32 %2, %2, 0 \n\t" /* get rid of the */
439 "dsrl32 %2, %2, 0 \n\t" /* sign extend */
440 "or %1, %1, %2 \n\t"
441 "lw %0, 0(%1) \n\t"
459 "dsll32 %2, %2, 0 \n\t"
460 "dsll32 %3, %3, 0 \n\t" /* get rid of the */
461 "dsrl32 %3, %3, 0 \n\t" /* sign extend */
462 "or %2, %2, %3 \n\t"
463 "lw %0, 0(%2) \n\t"
464 "lw %1, 4(%2) \n\t"
442 ".set pop \n"
465 ".set pop \n"
443 : "=r"(tmp)
444 : "r"(high), "r"(low));
466 : "=&r"(valh), "=r"(vall)
467 : "r"(addrh), "r"(addrl));
445
468
446 return tmp;
469 return (((uint64_t)valh << 32) | vall);
447}
448#endif
449
470}
471#endif
472
473/*
474 * XXX: Not really needed in n32 or n64, retain for now
475 */
476#if defined(__mips_n64) || defined(__mips_n32)
477static __inline uint32_t
478xlr_enable_kx(void)
479{
480
481 return (0);
482}
483
484static __inline void
485xlr_restore_kx(uint32_t sr)
486{
487}
488#else
489static __inline uint32_t
490xlr_enable_kx(void)
491{
492 uint32_t sr = mips_rd_status();
493
494 mips_wr_status((sr & ~MIPS_SR_INT_IE) | MIPS_SR_KX);
495 return (sr);
496}
497
498static __inline void
499xlr_restore_kx(uint32_t sr)
500{
501
502 mips_wr_status(sr);
503}
504#endif
505
450/* for cpuid to hardware thread id mapping */
451extern uint32_t xlr_hw_thread_mask;
452extern int xlr_cpuid_to_hwtid[];
453extern int xlr_hwtid_to_cpuid[];
454
455#endif
506/* for cpuid to hardware thread id mapping */
507extern uint32_t xlr_hw_thread_mask;
508extern int xlr_cpuid_to_hwtid[];
509extern int xlr_hwtid_to_cpuid[];
510
511#endif