@@ -102,6 +102,13 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
102
102
let llret_ty = self . layout_of ( ret_ty) . llvm_type ( self ) ;
103
103
let result = PlaceRef :: new_sized ( llresult, fn_ty. ret . layout , fn_ty. ret . layout . align . abi ) ;
104
104
105
+ let invalid_integer_monomorphization = |ty| {
106
+ span_invalid_monomorphization_error ( tcx. sess , span,
107
+ & format ! ( "invalid monomorphization of `{}` intrinsic: \
108
+ expected basic integer type, found `{}`", name, ty) ) ;
109
+ } ;
110
+
111
+
105
112
let simple = get_simple_intrinsic ( self , name) ;
106
113
let llval = match name {
107
114
_ if simple. is_some ( ) => {
@@ -503,10 +510,7 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
503
510
_ => bug ! ( ) ,
504
511
} ,
505
512
None => {
506
- span_invalid_monomorphization_error (
507
- tcx. sess , span,
508
- & format ! ( "invalid monomorphization of `{}` intrinsic: \
509
- expected basic integer type, found `{}`", name, ty) ) ;
513
+ invalid_integer_monomorphization ( ty) ;
510
514
return ;
511
515
}
512
516
}
@@ -548,6 +552,17 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
548
552
Err ( ( ) ) => return
549
553
}
550
554
}
555
+ name if name. starts_with ( "atomic_element_" ) => {
556
+ let ty = substs. type_at ( 0 ) ;
557
+ if int_type_width_signed ( ty, self ) . is_some ( ) {
558
+ atomic_element_intrinsic ( self , name,
559
+ substs. type_at ( 0 ) ,
560
+ args) ;
561
+ return ;
562
+ } else {
563
+ return invalid_integer_monomorphization ( ty) ;
564
+ }
565
+ }
551
566
// This requires that atomic intrinsics follow a specific naming pattern:
552
567
// "atomic_<operation>[_<ordering>]", and no ordering means SeqCst
553
568
name if name. starts_with ( "atomic_" ) => {
@@ -582,12 +597,6 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
582
597
_ => self . sess ( ) . fatal ( "Atomic intrinsic not in correct format" ) ,
583
598
} ;
584
599
585
- let invalid_monomorphization = |ty| {
586
- span_invalid_monomorphization_error ( tcx. sess , span,
587
- & format ! ( "invalid monomorphization of `{}` intrinsic: \
588
- expected basic integer type, found `{}`", name, ty) ) ;
589
- } ;
590
-
591
600
match split[ 1 ] {
592
601
"cxchg" | "cxchgweak" => {
593
602
let ty = substs. type_at ( 0 ) ;
@@ -610,7 +619,7 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
610
619
self . store ( success, dest. llval , dest. align ) ;
611
620
return ;
612
621
} else {
613
- return invalid_monomorphization ( ty) ;
622
+ return invalid_integer_monomorphization ( ty) ;
614
623
}
615
624
}
616
625
@@ -620,7 +629,7 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
620
629
let size = self . size_of ( ty) ;
621
630
self . atomic_load ( args[ 0 ] . immediate ( ) , order, size)
622
631
} else {
623
- return invalid_monomorphization ( ty) ;
632
+ return invalid_integer_monomorphization ( ty) ;
624
633
}
625
634
}
626
635
@@ -636,7 +645,7 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
636
645
) ;
637
646
return ;
638
647
} else {
639
- return invalid_monomorphization ( ty) ;
648
+ return invalid_integer_monomorphization ( ty) ;
640
649
}
641
650
}
642
651
@@ -676,7 +685,7 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
676
685
order
677
686
)
678
687
} else {
679
- return invalid_monomorphization ( ty) ;
688
+ return invalid_integer_monomorphization ( ty) ;
680
689
}
681
690
}
682
691
}
@@ -754,6 +763,54 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
754
763
}
755
764
}
756
765
766
+ fn atomic_element_intrinsic (
767
+ bx : & mut Builder < ' a , ' ll , ' tcx > ,
768
+ name : & str ,
769
+ ty : Ty < ' tcx > ,
770
+ args : & [ OperandRef < ' tcx , & ' ll Value > ] ,
771
+ ) {
772
+ let ( element_size, align) = bx. size_and_align_of ( ty) ;
773
+ let element_size = element_size. bytes ( ) ;
774
+ assert ! ( element_size <= u32 :: max_value( ) as u64 ) ;
775
+
776
+ let size = bx. mul ( bx. const_usize ( element_size) , args[ 2 ] . immediate ( ) ) ;
777
+
778
+ match name {
779
+ "atomic_element_copy_nonoverlapping_memory_unordered" => {
780
+ bx. atomic_element_unordered_memcpy (
781
+ args[ 0 ] . immediate ( ) ,
782
+ align,
783
+ args[ 1 ] . immediate ( ) ,
784
+ align,
785
+ size,
786
+ element_size as u32
787
+ ) ;
788
+ }
789
+ "atomic_element_copy_memory_unordered" => {
790
+ bx. atomic_element_unordered_memmove (
791
+ args[ 0 ] . immediate ( ) ,
792
+ align,
793
+ args[ 1 ] . immediate ( ) ,
794
+ align,
795
+ size,
796
+ element_size as u32
797
+ ) ;
798
+ }
799
+ "atomic_element_set_memory_unordered" => {
800
+ bx. atomic_element_unordered_memset (
801
+ args[ 0 ] . immediate ( ) ,
802
+ args[ 1 ] . immediate ( ) ,
803
+ size,
804
+ align,
805
+ element_size as u32
806
+ ) ;
807
+ }
808
+ _ => {
809
+ bug ! ( "unknown intrinsic '{}'" , name) ;
810
+ }
811
+ }
812
+ }
813
+
757
814
fn copy_intrinsic (
758
815
bx : & mut Builder < ' a , ' ll , ' tcx > ,
759
816
allow_overlap : bool ,
@@ -777,6 +834,7 @@ fn copy_intrinsic(
777
834
}
778
835
}
779
836
837
+
780
838
fn memset_intrinsic (
781
839
bx : & mut Builder < ' a , ' ll , ' tcx > ,
782
840
volatile : bool ,
0 commit comments