1/*
2 * Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
3 * Copyright (c) 2016 SAP SE. All rights reserved.
4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
5 *
6 * This code is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 only, as
8 * published by the Free Software Foundation.
9 *
10 * This code is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
13 * version 2 for more details (a copy is included in the LICENSE file that
14 * accompanied this code).
15 *
16 * You should have received a copy of the GNU General Public License version
17 * 2 along with this work; if not, write to the Free Software Foundation,
18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
19 *
20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
21 * or visit www.oracle.com if you need additional information or have any
22 * questions.
23 *
24 */
25
26#ifndef CPU_S390_VM_MACROASSEMBLER_S390_INLINE_HPP
27#define CPU_S390_VM_MACROASSEMBLER_S390_INLINE_HPP
28
29#include "asm/assembler.inline.hpp"
30#include "asm/macroAssembler.hpp"
31#include "asm/codeBuffer.hpp"
32#include "code/codeCache.hpp"
33#include "runtime/thread.hpp"
34
35// Simplified shift operations for single register operands, constant shift amount.
36inline void MacroAssembler::lshift(Register r, int places, bool is_DW) {
37  if (is_DW) {
38    z_sllg(r, r, places);
39  } else {
40    z_sll(r, places);
41  }
42}
43
44inline void MacroAssembler::rshift(Register r, int places, bool is_DW) {
45  if (is_DW) {
46    z_srlg(r, r, places);
47  } else {
48    z_srl(r, places);
49  }
50}
51
52// *((int8_t*)(dst)) |= imm8
53inline void MacroAssembler::or2mem_8(Address& dst, int64_t imm8) {
54  if (Displacement::is_shortDisp(dst.disp())) {
55    z_oi(dst, imm8);
56  } else {
57    z_oiy(dst, imm8);
58  }
59}
60
61inline int MacroAssembler::store_const(const Address &dest, long imm, Register scratch, bool is_long) {
62  unsigned int lm = is_long ? 8 : 4;
63  unsigned int lc = is_long ? 8 : 4;
64  return store_const(dest, imm, lm, lc, scratch);
65}
66
67// Do not rely on add2reg* emitter.
68// Depending on CmdLine switches and actual parameter values,
69// the generated code may alter the condition code, which is counter-intuitive
70// to the semantics of the "load address" (LA/LAY) instruction.
71// Generic address loading d <- base(a) + index(a) + disp(a)
72inline void MacroAssembler::load_address(Register d, const Address &a) {
73  if (Displacement::is_shortDisp(a.disp())) {
74    z_la(d, a.disp(), a.indexOrR0(), a.baseOrR0());
75  } else if (Displacement::is_validDisp(a.disp())) {
76    z_lay(d, a.disp(), a.indexOrR0(), a.baseOrR0());
77  } else {
78    guarantee(false, "displacement = " SIZE_FORMAT_HEX ", out of range for LA/LAY", a.disp());
79  }
80}
81
82inline void MacroAssembler::load_const(Register t, void* x) {
83  load_const(t, (long)x);
84}
85
86// Load a 64 bit constant encoded by a `Label'.
87// Works for bound as well as unbound labels. For unbound labels, the
88// code will become patched as soon as the label gets bound.
89inline void MacroAssembler::load_const(Register t, Label& L) {
90  load_const(t, target(L));
91}
92
93inline void MacroAssembler::load_const(Register t, const AddressLiteral& a) {
94  assert(t != Z_R0, "R0 not allowed");
95  // First relocate (we don't change the offset in the RelocationHolder,
96  // just pass a.rspec()), then delegate to load_const(Register, long).
97  relocate(a.rspec());
98  load_const(t, (long)a.value());
99}
100
101inline void MacroAssembler::load_const_optimized(Register t, long x) {
102  (void) load_const_optimized_rtn_len(t, x, true);
103}
104
105inline void MacroAssembler::load_const_optimized(Register t, void* a) {
106  load_const_optimized(t, (long)a);
107}
108
109inline void MacroAssembler::load_const_optimized(Register t, Label& L) {
110  load_const_optimized(t, target(L));
111}
112
113inline void MacroAssembler::load_const_optimized(Register t, const AddressLiteral& a) {
114  assert(t != Z_R0, "R0 not allowed");
115  assert((relocInfo::relocType)a.rspec().reloc()->type() == relocInfo::none,
116          "cannot relocate optimized load_consts");
117  load_const_optimized(t, a.value());
118}
119
120inline void MacroAssembler::set_oop(jobject obj, Register d) {
121  load_const(d, allocate_oop_address(obj));
122}
123
124inline void MacroAssembler::set_oop_constant(jobject obj, Register d) {
125  load_const(d, constant_oop_address(obj));
126}
127
128// Adds MetaData constant md to TOC and loads it from there.
129// md is added to the oop_recorder, but no relocation is added.
130inline bool MacroAssembler::set_metadata_constant(Metadata* md, Register d) {
131  AddressLiteral a = constant_metadata_address(md);
132  return load_const_from_toc(d, a, d); // Discards the relocation.
133}
134
135
136inline bool MacroAssembler::is_call_pcrelative_short(unsigned long inst) {
137  return is_equal(inst, BRAS_ZOPC); // off 16, len 16
138}
139
140inline bool MacroAssembler::is_call_pcrelative_long(unsigned long inst) {
141  return is_equal(inst, BRASL_ZOPC); // off 16, len 32
142}
143
144inline bool MacroAssembler::is_branch_pcrelative_short(unsigned long inst) {
145  // Branch relative, 16-bit offset.
146  return is_equal(inst, BRC_ZOPC); // off 16, len 16
147}
148
149inline bool MacroAssembler::is_branch_pcrelative_long(unsigned long inst) {
150  // Branch relative, 32-bit offset.
151  return is_equal(inst, BRCL_ZOPC); // off 16, len 32
152}
153
154inline bool MacroAssembler::is_compareandbranch_pcrelative_short(unsigned long inst) {
155  // Compare and branch relative, 16-bit offset.
156  return is_equal(inst, CRJ_ZOPC, CMPBRANCH_MASK)  || is_equal(inst, CGRJ_ZOPC, CMPBRANCH_MASK)  ||
157         is_equal(inst, CIJ_ZOPC, CMPBRANCH_MASK)  || is_equal(inst, CGIJ_ZOPC, CMPBRANCH_MASK)  ||
158         is_equal(inst, CLRJ_ZOPC, CMPBRANCH_MASK) || is_equal(inst, CLGRJ_ZOPC, CMPBRANCH_MASK) ||
159         is_equal(inst, CLIJ_ZOPC, CMPBRANCH_MASK) || is_equal(inst, CLGIJ_ZOPC, CMPBRANCH_MASK);
160}
161
162inline bool MacroAssembler::is_branchoncount_pcrelative_short(unsigned long inst) {
163  // Branch relative on count, 16-bit offset.
164  return is_equal(inst, BRCT_ZOPC) || is_equal(inst, BRCTG_ZOPC); // off 16, len 16
165}
166
167inline bool MacroAssembler::is_branchonindex32_pcrelative_short(unsigned long inst) {
168  // Branch relative on index (32bit), 16-bit offset.
169  return is_equal(inst, BRXH_ZOPC) || is_equal(inst, BRXLE_ZOPC); // off 16, len 16
170}
171
172inline bool MacroAssembler::is_branchonindex64_pcrelative_short(unsigned long inst) {
173  // Branch relative on index (64bit), 16-bit offset.
174  return is_equal(inst, BRXHG_ZOPC) || is_equal(inst, BRXLG_ZOPC); // off 16, len 16
175}
176
177inline bool MacroAssembler::is_branchonindex_pcrelative_short(unsigned long inst) {
178  return is_branchonindex32_pcrelative_short(inst) ||
179         is_branchonindex64_pcrelative_short(inst);
180}
181
182inline bool MacroAssembler::is_branch_pcrelative16(unsigned long inst) {
183  return is_branch_pcrelative_short(inst) ||
184         is_compareandbranch_pcrelative_short(inst) ||
185         is_branchoncount_pcrelative_short(inst) ||
186         is_branchonindex_pcrelative_short(inst);
187}
188
189inline bool MacroAssembler::is_branch_pcrelative32(unsigned long inst) {
190  return is_branch_pcrelative_long(inst);
191}
192
193inline bool MacroAssembler::is_branch_pcrelative(unsigned long inst) {
194  return is_branch_pcrelative16(inst) ||
195         is_branch_pcrelative32(inst);
196}
197
198inline bool MacroAssembler::is_load_pcrelative_long(unsigned long inst) {
199  // Load relative, 32-bit offset.
200  return is_equal(inst, LRL_ZOPC, REL_LONG_MASK) || is_equal(inst, LGRL_ZOPC, REL_LONG_MASK); // off 16, len 32
201}
202
203inline bool MacroAssembler::is_misc_pcrelative_long(unsigned long inst) {
204  // Load address, execute relative, 32-bit offset.
205  return is_equal(inst, LARL_ZOPC, REL_LONG_MASK) || is_equal(inst, EXRL_ZOPC, REL_LONG_MASK); // off 16, len 32
206}
207
208inline bool MacroAssembler::is_pcrelative_short(unsigned long inst) {
209  return is_branch_pcrelative16(inst) ||
210         is_call_pcrelative_short(inst);
211}
212
213inline bool MacroAssembler::is_pcrelative_long(unsigned long inst) {
214  return is_branch_pcrelative32(inst) ||
215         is_call_pcrelative_long(inst) ||
216         is_load_pcrelative_long(inst) ||
217         is_misc_pcrelative_long(inst);
218}
219
220inline bool MacroAssembler::is_load_pcrelative_long(address iLoc) {
221  unsigned long inst;
222  unsigned int  len = get_instruction(iLoc, &inst);
223  return (len == 6) && is_load_pcrelative_long(inst);
224}
225
226inline bool MacroAssembler::is_pcrelative_short(address iLoc) {
227  unsigned long inst;
228  unsigned int  len = get_instruction(iLoc, &inst);
229  return ((len == 4) || (len == 6)) && is_pcrelative_short(inst);
230}
231
232inline bool MacroAssembler::is_pcrelative_long(address iLoc) {
233  unsigned long inst;
234  unsigned int  len = get_instruction(iLoc, &inst);
235  return (len == 6) && is_pcrelative_long(inst);
236}
237
238// Dynamic TOC. Test for any pc-relative instruction.
239inline bool MacroAssembler::is_pcrelative_instruction(address iloc) {
240  unsigned long inst;
241  get_instruction(iloc, &inst);
242  return is_pcrelative_short(inst) ||
243         is_pcrelative_long(inst);
244}
245
246inline bool MacroAssembler::is_load_addr_pcrel(address a) {
247  return is_equal(a, LARL_ZOPC, LARL_MASK);
248}
249
250// Save the return pc in the register that should be stored as the return pc
251// in the current frame (default is R14).
252inline void MacroAssembler::save_return_pc(Register pc) {
253  z_stg(pc, _z_abi16(return_pc), Z_SP);
254}
255
256inline void MacroAssembler::restore_return_pc() {
257  z_lg(Z_R14, _z_abi16(return_pc), Z_SP);
258}
259
260// Call a function with given entry.
261inline address MacroAssembler::call(Register function_entry) {
262  assert(function_entry != Z_R0, "function_entry cannot be Z_R0");
263
264  Assembler::z_basr(Z_R14, function_entry);
265  _last_calls_return_pc = pc();
266
267  return _last_calls_return_pc;
268}
269
270// Call a C function via a function entry.
271inline address MacroAssembler::call_c(Register function_entry) {
272  return call(function_entry);
273}
274
275// Call a stub function via a function descriptor, but don't save TOC before
276// call, don't setup TOC and ENV for call, and don't restore TOC after call
277inline address MacroAssembler::call_stub(Register function_entry) {
278  return call_c(function_entry);
279}
280
281inline address MacroAssembler::call_stub(address function_entry) {
282  return call_c(function_entry);
283}
284
285// Get the pc where the last emitted call will return to.
286inline address MacroAssembler::last_calls_return_pc() {
287  return _last_calls_return_pc;
288}
289
290inline void MacroAssembler::set_last_Java_frame(Register last_Java_sp, Register last_Java_pc) {
291  set_last_Java_frame(last_Java_sp, last_Java_pc, true);
292}
293
294inline void MacroAssembler::set_last_Java_frame_static(Register last_Java_sp, Register last_Java_pc) {
295  set_last_Java_frame(last_Java_sp, last_Java_pc, false);
296}
297
298inline void MacroAssembler::reset_last_Java_frame(void) {
299  reset_last_Java_frame(true);
300}
301
302inline void MacroAssembler::reset_last_Java_frame_static(void) {
303  reset_last_Java_frame(false);
304}
305
306inline void MacroAssembler::set_top_ijava_frame_at_SP_as_last_Java_frame(Register sp, Register tmp1) {
307  set_top_ijava_frame_at_SP_as_last_Java_frame(sp, tmp1, true);
308}
309
310inline void MacroAssembler::set_top_ijava_frame_at_SP_as_last_Java_frame_static(Register sp, Register tmp1) {
311  set_top_ijava_frame_at_SP_as_last_Java_frame(sp, tmp1, true);
312}
313
314#endif // CPU_S390_VM_MACROASSEMBLER_S390_INLINE_HPP
315