@@ -52,11 +52,11 @@ static void x_load_barrier_slow_path(MacroAssembler& _masm, const MachNode* node
52
52
%}
53
53
54
54
// Load Pointer
55
- instruct xLoadP(iRegPNoSp dst, memory mem)
55
+ instruct xLoadP(iRegPNoSp dst, memory mem, iRegPNoSp tmp )
56
56
%{
57
57
match(Set dst (LoadP mem));
58
58
predicate(UseZGC && !ZGenerational && (n->as_Load()->barrier_data() != 0));
59
- effect(TEMP dst);
59
+ effect(TEMP dst, TEMP tmp );
60
60
61
61
ins_cost(4 * DEFAULT_COST);
62
62
@@ -65,17 +65,17 @@ instruct xLoadP(iRegPNoSp dst, memory mem)
65
65
ins_encode %{
66
66
const Address ref_addr (as_Register($mem$$base), $mem$$disp);
67
67
__ ld($dst$$Register, ref_addr);
68
- x_load_barrier(_masm, this, ref_addr, $dst$$Register, t0 /* tmp */, barrier_data());
68
+ x_load_barrier(_masm, this, ref_addr, $dst$$Register, $tmp$$Register /* tmp */, barrier_data());
69
69
%}
70
70
71
71
ins_pipe(iload_reg_mem);
72
72
%}
73
73
74
- instruct xCompareAndSwapP(iRegINoSp res, indirect mem, iRegP oldval, iRegP newval, rFlagsReg cr ) %{
74
+ instruct xCompareAndSwapP(iRegINoSp res, indirect mem, iRegP oldval, iRegP newval, iRegPNoSp tmp ) %{
75
75
match(Set res (CompareAndSwapP mem (Binary oldval newval)));
76
76
match(Set res (WeakCompareAndSwapP mem (Binary oldval newval)));
77
77
predicate(UseZGC && !ZGenerational && !needs_acquiring_load_reserved(n) && n->as_LoadStore()->barrier_data() == XLoadBarrierStrong);
78
- effect(KILL cr, TEMP_DEF res );
78
+ effect(TEMP_DEF res, TEMP tmp );
79
79
80
80
ins_cost(2 * VOLATILE_REF_COST);
81
81
@@ -86,17 +86,15 @@ instruct xCompareAndSwapP(iRegINoSp res, indirect mem, iRegP oldval, iRegP newva
86
86
Label failed;
87
87
guarantee($mem$$index == -1 && $mem$$disp == 0, "impossible encoding");
88
88
__ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64,
89
- Assembler::relaxed /* acquire */, Assembler::rl /* release */, $res$$Register,
90
- true /* result_as_bool */);
91
- __ beqz($res$$Register, failed);
92
- __ mv(t0, $oldval$$Register);
93
- __ bind(failed);
89
+ Assembler::relaxed /* acquire */, Assembler::rl /* release */, $tmp$$Register);
90
+ __ sub(t0, $tmp$$Register, $oldval$$Register);
91
+ __ seqz($res$$Register, t0);
94
92
if (barrier_data() != XLoadBarrierElided) {
95
93
Label good;
96
- __ ld(t1 , Address(xthread, XThreadLocalData::address_bad_mask_offset()), t1 /* tmp */ );
97
- __ andr(t1, t1, t0 );
98
- __ beqz(t1 , good);
99
- x_load_barrier_slow_path(_masm, this, Address($mem$$Register), t0 /* ref */, t1 /* tmp */);
94
+ __ ld(t0 , Address(xthread, XThreadLocalData::address_bad_mask_offset()));
95
+ __ andr(t0, t0, $tmp$$Register );
96
+ __ beqz(t0 , good);
97
+ x_load_barrier_slow_path(_masm, this, Address($mem$$Register), $tmp$$Register /* ref */, $res$$Register /* tmp */);
100
98
__ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64,
101
99
Assembler::relaxed /* acquire */, Assembler::rl /* release */, $res$$Register,
102
100
true /* result_as_bool */);
@@ -107,11 +105,11 @@ instruct xCompareAndSwapP(iRegINoSp res, indirect mem, iRegP oldval, iRegP newva
107
105
ins_pipe(pipe_slow);
108
106
%}
109
107
110
- instruct xCompareAndSwapPAcq(iRegINoSp res, indirect mem, iRegP oldval, iRegP newval, rFlagsReg cr ) %{
108
+ instruct xCompareAndSwapPAcq(iRegINoSp res, indirect mem, iRegP oldval, iRegP newval, iRegPNoSp tmp ) %{
111
109
match(Set res (CompareAndSwapP mem (Binary oldval newval)));
112
110
match(Set res (WeakCompareAndSwapP mem (Binary oldval newval)));
113
111
predicate(UseZGC && !ZGenerational && needs_acquiring_load_reserved(n) && (n->as_LoadStore()->barrier_data() == XLoadBarrierStrong));
114
- effect(KILL cr, TEMP_DEF res );
112
+ effect(TEMP_DEF res, TEMP tmp );
115
113
116
114
ins_cost(2 * VOLATILE_REF_COST);
117
115
@@ -122,17 +120,15 @@ instruct xCompareAndSwapPAcq(iRegINoSp res, indirect mem, iRegP oldval, iRegP ne
122
120
Label failed;
123
121
guarantee($mem$$index == -1 && $mem$$disp == 0, "impossible encoding");
124
122
__ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64,
125
- Assembler::aq /* acquire */, Assembler::rl /* release */, $res$$Register,
126
- true /* result_as_bool */);
127
- __ beqz($res$$Register, failed);
128
- __ mv(t0, $oldval$$Register);
129
- __ bind(failed);
123
+ Assembler::aq /* acquire */, Assembler::rl /* release */, $tmp$$Register);
124
+ __ sub(t0, $tmp$$Register, $oldval$$Register);
125
+ __ seqz($res$$Register, t0);
130
126
if (barrier_data() != XLoadBarrierElided) {
131
127
Label good;
132
- __ ld(t1 , Address(xthread, XThreadLocalData::address_bad_mask_offset()), t1 /* tmp */ );
133
- __ andr(t1, t1, t0 );
134
- __ beqz(t1 , good);
135
- x_load_barrier_slow_path(_masm, this, Address($mem$$Register), t0 /* ref */, t1 /* tmp */);
128
+ __ ld(t0 , Address(xthread, XThreadLocalData::address_bad_mask_offset()));
129
+ __ andr(t0, t0, $tmp$$Register );
130
+ __ beqz(t0 , good);
131
+ x_load_barrier_slow_path(_masm, this, Address($mem$$Register), $tmp$$Register /* ref */, $res$$Register /* tmp */);
136
132
__ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64,
137
133
Assembler::aq /* acquire */, Assembler::rl /* release */, $res$$Register,
138
134
true /* result_as_bool */);
@@ -143,10 +139,10 @@ instruct xCompareAndSwapPAcq(iRegINoSp res, indirect mem, iRegP oldval, iRegP ne
143
139
ins_pipe(pipe_slow);
144
140
%}
145
141
146
- instruct xCompareAndExchangeP(iRegPNoSp res, indirect mem, iRegP oldval, iRegP newval) %{
142
+ instruct xCompareAndExchangeP(iRegPNoSp res, indirect mem, iRegP oldval, iRegP newval, iRegPNoSp tmp ) %{
147
143
match(Set res (CompareAndExchangeP mem (Binary oldval newval)));
148
144
predicate(UseZGC && !ZGenerational && !needs_acquiring_load_reserved(n) && n->as_LoadStore()->barrier_data() == XLoadBarrierStrong);
149
- effect(TEMP_DEF res);
145
+ effect(TEMP_DEF res, TEMP tmp );
150
146
151
147
ins_cost(2 * VOLATILE_REF_COST);
152
148
@@ -161,7 +157,7 @@ instruct xCompareAndExchangeP(iRegPNoSp res, indirect mem, iRegP oldval, iRegP n
161
157
__ ld(t0, Address(xthread, XThreadLocalData::address_bad_mask_offset()));
162
158
__ andr(t0, t0, $res$$Register);
163
159
__ beqz(t0, good);
164
- x_load_barrier_slow_path(_masm, this, Address($mem$$Register), $res$$Register /* ref */, t0 /* tmp */);
160
+ x_load_barrier_slow_path(_masm, this, Address($mem$$Register), $res$$Register /* ref */, $tmp$$Register /* tmp */);
165
161
__ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64,
166
162
Assembler::relaxed /* acquire */, Assembler::rl /* release */, $res$$Register);
167
163
__ bind(good);
@@ -171,10 +167,10 @@ instruct xCompareAndExchangeP(iRegPNoSp res, indirect mem, iRegP oldval, iRegP n
171
167
ins_pipe(pipe_slow);
172
168
%}
173
169
174
- instruct xCompareAndExchangePAcq(iRegPNoSp res, indirect mem, iRegP oldval, iRegP newval) %{
170
+ instruct xCompareAndExchangePAcq(iRegPNoSp res, indirect mem, iRegP oldval, iRegP newval, iRegPNoSp tmp ) %{
175
171
match(Set res (CompareAndExchangeP mem (Binary oldval newval)));
176
172
predicate(UseZGC && !ZGenerational && needs_acquiring_load_reserved(n) && n->as_LoadStore()->barrier_data() == XLoadBarrierStrong);
177
- effect(TEMP_DEF res);
173
+ effect(TEMP_DEF res, TEMP tmp );
178
174
179
175
ins_cost(2 * VOLATILE_REF_COST);
180
176
@@ -189,7 +185,7 @@ instruct xCompareAndExchangePAcq(iRegPNoSp res, indirect mem, iRegP oldval, iReg
189
185
__ ld(t0, Address(xthread, XThreadLocalData::address_bad_mask_offset()));
190
186
__ andr(t0, t0, $res$$Register);
191
187
__ beqz(t0, good);
192
- x_load_barrier_slow_path(_masm, this, Address($mem$$Register), $res$$Register /* ref */, t0 /* tmp */);
188
+ x_load_barrier_slow_path(_masm, this, Address($mem$$Register), $res$$Register /* ref */, $tmp$$Register /* tmp */);
193
189
__ cmpxchg($mem$$Register, $oldval$$Register, $newval$$Register, Assembler::int64,
194
190
Assembler::aq /* acquire */, Assembler::rl /* release */, $res$$Register);
195
191
__ bind(good);
@@ -199,35 +195,35 @@ instruct xCompareAndExchangePAcq(iRegPNoSp res, indirect mem, iRegP oldval, iReg
199
195
ins_pipe(pipe_slow);
200
196
%}
201
197
202
- instruct xGetAndSetP(indirect mem, iRegP newv, iRegPNoSp prev, rFlagsReg cr ) %{
198
+ instruct xGetAndSetP(indirect mem, iRegP newv, iRegPNoSp prev, iRegPNoSp tmp ) %{
203
199
match(Set prev (GetAndSetP mem newv));
204
200
predicate(UseZGC && !ZGenerational && !needs_acquiring_load_reserved(n) && n->as_LoadStore()->barrier_data() != 0);
205
- effect(TEMP_DEF prev, KILL cr );
201
+ effect(TEMP_DEF prev, TEMP tmp );
206
202
207
203
ins_cost(2 * VOLATILE_REF_COST);
208
204
209
205
format %{ "atomic_xchg $prev, $newv, [$mem], #@zGetAndSetP" %}
210
206
211
207
ins_encode %{
212
208
__ atomic_xchg($prev$$Register, $newv$$Register, as_Register($mem$$base));
213
- x_load_barrier(_masm, this, Address(noreg, 0), $prev$$Register, t0 /* tmp */, barrier_data());
209
+ x_load_barrier(_masm, this, Address(noreg, 0), $prev$$Register, $tmp$$Register /* tmp */, barrier_data());
214
210
%}
215
211
216
212
ins_pipe(pipe_serial);
217
213
%}
218
214
219
- instruct xGetAndSetPAcq(indirect mem, iRegP newv, iRegPNoSp prev, rFlagsReg cr ) %{
215
+ instruct xGetAndSetPAcq(indirect mem, iRegP newv, iRegPNoSp prev, iRegPNoSp tmp ) %{
220
216
match(Set prev (GetAndSetP mem newv));
221
217
predicate(UseZGC && !ZGenerational && needs_acquiring_load_reserved(n) && (n->as_LoadStore()->barrier_data() != 0));
222
- effect(TEMP_DEF prev, KILL cr );
218
+ effect(TEMP_DEF prev, TEMP tmp );
223
219
224
220
ins_cost(VOLATILE_REF_COST);
225
221
226
222
format %{ "atomic_xchg_acq $prev, $newv, [$mem], #@zGetAndSetPAcq" %}
227
223
228
224
ins_encode %{
229
225
__ atomic_xchgal($prev$$Register, $newv$$Register, as_Register($mem$$base));
230
- x_load_barrier(_masm, this, Address(noreg, 0), $prev$$Register, t0 /* tmp */, barrier_data());
226
+ x_load_barrier(_masm, this, Address(noreg, 0), $prev$$Register, $tmp$$Register /* tmp */, barrier_data());
231
227
%}
232
228
ins_pipe(pipe_serial);
233
229
%}
0 commit comments