|
68 | 68 | //! ```
|
69 | 69 |
|
70 | 70 | #![no_std]
|
71 |
| -#![feature(llvm_asm)] |
| 71 | +#![feature(asm_experimental_arch)] |
72 | 72 | #![cfg_attr(not(target_arch = "msp430"), feature(core_intrinsics))]
|
73 | 73 |
|
| 74 | +use core::arch::asm; |
74 | 75 | use core::cell::UnsafeCell;
|
75 | 76 | use core::fmt;
|
76 | 77 |
|
@@ -630,52 +631,44 @@ macro_rules! atomic_int {
|
630 | 631 | impl AtomicOperations for $int_type {
|
631 | 632 | #[inline(always)]
|
632 | 633 | unsafe fn atomic_store(dst: *mut Self, val: Self) {
|
633 |
| - llvm_asm!(concat!("mov", $asm_suffix, " $1, $0") |
634 |
| - :: "*m"(dst), "ir"(val) : "memory" : "volatile"); |
| 634 | + asm!(concat!("mov", $asm_suffix, " {1}, 0({0})"), in(reg) dst, in(reg) val); |
635 | 635 | }
|
636 | 636 |
|
637 | 637 | #[inline(always)]
|
638 | 638 | unsafe fn atomic_load(dst: *const Self) -> Self {
|
639 | 639 | let out;
|
640 |
| - llvm_asm!(concat!("mov", $asm_suffix, " $1, $0") |
641 |
| - : "=r"(out) : "*m"(dst) : "memory" : "volatile"); |
| 640 | + asm!(concat!("mov", $asm_suffix, " @{0}, {1}"), in(reg) dst, lateout(reg) out); |
642 | 641 | out
|
643 | 642 | }
|
644 | 643 |
|
645 | 644 | #[inline(always)]
|
646 | 645 | unsafe fn atomic_add(dst: *mut Self, val: Self) {
|
647 |
| - llvm_asm!(concat!("add", $asm_suffix, " $1, $0") |
648 |
| - :: "*m"(dst), "ir"(val) : "memory" : "volatile"); |
| 646 | + asm!(concat!("add", $asm_suffix, " {1}, 0({0})"), in(reg) dst, in(reg) val); |
649 | 647 | }
|
650 | 648 |
|
651 | 649 | #[inline(always)]
|
652 | 650 | unsafe fn atomic_sub(dst: *mut Self, val: Self) {
|
653 |
| - llvm_asm!(concat!("sub", $asm_suffix, " $1, $0") |
654 |
| - :: "*m"(dst), "ir"(val) : "memory" : "volatile"); |
| 651 | + asm!(concat!("sub", $asm_suffix, " {1}, 0({0})"), in(reg) dst, in(reg) val); |
655 | 652 | }
|
656 | 653 |
|
657 | 654 | #[inline(always)]
|
658 | 655 | unsafe fn atomic_and(dst: *mut Self, val: Self) {
|
659 |
| - llvm_asm!(concat!("and", $asm_suffix, " $1, $0") |
660 |
| - :: "*m"(dst), "ir"(val) : "memory" : "volatile"); |
| 656 | + asm!(concat!("and", $asm_suffix, " {1}, 0({0})"), in(reg) dst, in(reg) val); |
661 | 657 | }
|
662 | 658 |
|
663 | 659 | #[inline(always)]
|
664 | 660 | unsafe fn atomic_clear(dst: *mut Self, val: Self) {
|
665 |
| - llvm_asm!(concat!("bic", $asm_suffix, " $1, $0") |
666 |
| - :: "*m"(dst), "ir"(val) : "memory" : "volatile"); |
| 661 | + asm!(concat!("bic", $asm_suffix, " {1}, 0({0})"), in(reg) dst, in(reg) val); |
667 | 662 | }
|
668 | 663 |
|
669 | 664 | #[inline(always)]
|
670 | 665 | unsafe fn atomic_or(dst: *mut Self, val: Self) {
|
671 |
| - llvm_asm!(concat!("bis", $asm_suffix, " $1, $0") |
672 |
| - :: "*m"(dst), "ir"(val) : "memory" : "volatile"); |
| 666 | + asm!(concat!("bis", $asm_suffix, " {1}, 0({0})"), in(reg) dst, in(reg) val); |
673 | 667 | }
|
674 | 668 |
|
675 | 669 | #[inline(always)]
|
676 | 670 | unsafe fn atomic_xor(dst: *mut Self, val: Self) {
|
677 |
| - llvm_asm!(concat!("xor", $asm_suffix, " $1, $0") |
678 |
| - :: "*m"(dst), "ir"(val) : "memory" : "volatile"); |
| 671 | + asm!(concat!("xor", $asm_suffix, " {1}, 0({0})"), in(reg) dst, in(reg) val); |
679 | 672 | }
|
680 | 673 | }
|
681 | 674 |
|
|
0 commit comments