assembler.cpp revision 1410:f03d0a26bf83
1236884Smm/* 2236884Smm * Copyright 1997-2009 Sun Microsystems, Inc. All Rights Reserved. 3236884Smm * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4236884Smm * 5236884Smm * This code is free software; you can redistribute it and/or modify it 6236884Smm * under the terms of the GNU General Public License version 2 only, as 7236884Smm * published by the Free Software Foundation. 8236884Smm * 9236884Smm * This code is distributed in the hope that it will be useful, but WITHOUT 10236884Smm * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11236884Smm * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12236884Smm * version 2 for more details (a copy is included in the LICENSE file that 13236884Smm * accompanied this code). 14236884Smm * 15236884Smm * You should have received a copy of the GNU General Public License version 16236884Smm * 2 along with this work; if not, write to the Free Software Foundation, 17236884Smm * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18236884Smm * 19236884Smm * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara, 20236884Smm * CA 95054 USA or visit www.sun.com if you need additional information or 21236884Smm * have any questions. 22236884Smm * 23286708Smav */ 24246586Sdelphij 25255750Sdelphij#include "incls/_precompiled.incl" 26268126Sdelphij#include "incls/_assembler.cpp.incl" 27296519Smav 28236884Smm 29236884Smm// Implementation of AbstractAssembler 30236884Smm// 31236884Smm// The AbstractAssembler is generating code into a CodeBuffer. To make code generation faster, 32236884Smm// the assembler keeps a copy of the code buffers boundaries & modifies them when 33236884Smm// emitting bytes rather than using the code buffers accessor functions all the time. 34236884Smm// The code buffer is updated via set_code_end(...) after emitting a whole instruction. 35236884Smm 36236884SmmAbstractAssembler::AbstractAssembler(CodeBuffer* code) { 37236884Smm if (code == NULL) return; 38236884Smm CodeSection* cs = code->insts(); 39236884Smm cs->clear_mark(); // new assembler kills old mark 40236884Smm _code_section = cs; 41236884Smm _code_begin = cs->start(); 42236884Smm _code_limit = cs->limit(); 43236884Smm _code_pos = cs->end(); 44236884Smm _oop_recorder= code->oop_recorder(); 45236884Smm if (_code_begin == NULL) { 46236884Smm vm_exit_out_of_memory(0, err_msg("CodeCache: no room for %s", 47236884Smm code->name())); 48236884Smm } 49236884Smm} 50236884Smm 51236884Smmvoid AbstractAssembler::set_code_section(CodeSection* cs) { 52236884Smm assert(cs->outer() == code_section()->outer(), "sanity"); 53236884Smm assert(cs->is_allocated(), "need to pre-allocate this section"); 54236884Smm cs->clear_mark(); // new assembly into this section kills old mark 55236884Smm _code_section = cs; 56236884Smm _code_begin = cs->start(); 57236884Smm _code_limit = cs->limit(); 58236884Smm _code_pos = cs->end(); 59236884Smm} 60274337Sdelphij 61274337Sdelphij// Inform CodeBuffer that incoming code and relocation will be for stubs 62236884Smmaddress AbstractAssembler::start_a_stub(int required_space) { 63236884Smm CodeBuffer* cb = code(); 64236884Smm CodeSection* cs = cb->stubs(); 65236884Smm assert(_code_section == cb->insts(), "not in insts?"); 66236884Smm sync(); 67236884Smm if (cs->maybe_expand_to_ensure_remaining(required_space) 68236884Smm && cb->blob() == NULL) { 69236884Smm return NULL; 70236884Smm } 71236884Smm set_code_section(cs); 72236884Smm return pc(); 73236884Smm} 74236884Smm 75236884Smm// Inform CodeBuffer that incoming code and relocation will be code 76236884Smm// Should not be called if start_a_stub() returned NULL 77236884Smmvoid AbstractAssembler::end_a_stub() { 78236884Smm assert(_code_section == code()->stubs(), "not in stubs?"); 79236884Smm sync(); 80236884Smm set_code_section(code()->insts()); 81236884Smm} 82236884Smm 83236884Smm// Inform CodeBuffer that incoming code and relocation will be for stubs 84236884Smmaddress AbstractAssembler::start_a_const(int required_space, int required_align) { 85236884Smm CodeBuffer* cb = code(); 86236884Smm CodeSection* cs = cb->consts(); 87236884Smm assert(_code_section == cb->insts(), "not in insts?"); 88236884Smm sync(); 89236884Smm address end = cs->end(); 90236884Smm int pad = -(intptr_t)end & (required_align-1); 91236884Smm if (cs->maybe_expand_to_ensure_remaining(pad + required_space)) { 92236884Smm if (cb->blob() == NULL) return NULL; 93236884Smm end = cs->end(); // refresh pointer 94236884Smm } 95236884Smm if (pad > 0) { 96236884Smm while (--pad >= 0) { *end++ = 0; } 97259813Sdelphij cs->set_end(end); 98236884Smm } 99259813Sdelphij set_code_section(cs); 100259813Sdelphij return end; 101236884Smm} 102259813Sdelphij 103236884Smm// Inform CodeBuffer that incoming code and relocation will be code 104236884Smm// Should not be called if start_a_const() returned NULL 105236884Smmvoid AbstractAssembler::end_a_const() { 106259813Sdelphij assert(_code_section == code()->consts(), "not in consts?"); 107236884Smm sync(); 108259813Sdelphij set_code_section(code()->insts()); 109236884Smm} 110236884Smm 111236884Smm 112259813Sdelphijvoid AbstractAssembler::flush() { 113236884Smm sync(); 114236884Smm ICache::invalidate_range(addr_at(0), offset()); 115236884Smm} 116236884Smm 117236884Smm 118236884Smmvoid AbstractAssembler::a_byte(int x) { 119236884Smm emit_byte(x); 120260150Sdelphij} 121289562Smav 122289562Smav 123260150Sdelphijvoid AbstractAssembler::a_long(jint x) { 124260150Sdelphij emit_long(x); 125260150Sdelphij} 126260150Sdelphij 127260150Sdelphij// Labels refer to positions in the (to be) generated code. There are bound 128260150Sdelphij// and unbound 129260150Sdelphij// 130260150Sdelphij// Bound labels refer to known positions in the already generated code. 131260150Sdelphij// offset() is the position the label refers to. 132236884Smm// 133259813Sdelphij// Unbound labels refer to unknown positions in the code to be generated; it 134286708Smav// may contain a list of unresolved displacements that refer to it 135236884Smm#ifndef PRODUCT 136236884Smmvoid AbstractAssembler::print(Label& L) { 137259813Sdelphij if (L.is_bound()) { 138236884Smm tty->print_cr("bound label to %d|%d", L.loc_pos(), L.loc_sect()); 139236884Smm } else if (L.is_unbound()) { 140236884Smm L.print_instructions((MacroAssembler*)this); 141286708Smav } else { 142286708Smav tty->print_cr("label in inconsistent state (loc = %d)", L.loc()); 143236884Smm } 144236884Smm} 145236884Smm#endif // PRODUCT 146236884Smm 147236884Smm 148236884Smmvoid AbstractAssembler::bind(Label& L) { 149259813Sdelphij if (L.is_bound()) { 150236884Smm // Assembler can bind a label more than once to the same place. 151236884Smm guarantee(L.loc() == locator(), "attempt to redefine label"); 152236884Smm return; 153286708Smav } 154236884Smm L.bind_loc(locator()); 155236884Smm L.patch_instructions((MacroAssembler*)this); 156236884Smm} 157236884Smm 158236884Smmvoid AbstractAssembler::generate_stack_overflow_check( int frame_size_in_bytes) { 159236884Smm if (UseStackBanging) { 160236884Smm // Each code entry causes one stack bang n pages down the stack where n 161236884Smm // is configurable by StackBangPages. The setting depends on the maximum 162286708Smav // depth of VM call stack or native before going back into java code, 163286708Smav // since only java code can raise a stack overflow exception using the 164260150Sdelphij // stack banging mechanism. The VM and native code does not detect stack 165239774Smm // overflow. 166239774Smm // The code in JavaCalls::call() checks that there is at least n pages 167286708Smav // available, so all entry code needs to do is bang once for the end of 168286708Smav // this shadow zone. 169260150Sdelphij // The entry code may need to bang additional pages if the framesize 170246586Sdelphij // is greater than a page. 171246586Sdelphij 172286708Smav const int page_size = os::vm_page_size(); 173286708Smav int bang_end = StackShadowPages*page_size; 174260150Sdelphij 175255750Sdelphij // This is how far the previous frame's stack banging extended. 176255750Sdelphij const int bang_end_safe = bang_end; 177286708Smav 178286708Smav if (frame_size_in_bytes > page_size) { 179260150Sdelphij bang_end += frame_size_in_bytes; 180258717Savg } 181258717Savg 182286708Smav int bang_offset = bang_end_safe; 183286708Smav while (bang_offset <= bang_end) { 184260150Sdelphij // Need at least one stack bang at end of shadow zone. 185260150Sdelphij bang_stack_with_offset(bang_offset); 186260150Sdelphij bang_offset += page_size; 187286708Smav } 188286708Smav } // end (UseStackBanging) 189260150Sdelphij} 190260150Sdelphij 191260150Sdelphijvoid Label::add_patch_at(CodeBuffer* cb, int branch_loc) { 192260150Sdelphij assert(_loc == -1, "Label is unbound"); 193260150Sdelphij if (_patch_index < PatchCacheSize) { 194260150Sdelphij _patches[_patch_index] = branch_loc; 195286708Smav } else { 196286708Smav if (_patch_overflow == NULL) { 197260150Sdelphij _patch_overflow = cb->create_patch_overflow(); 198259813Sdelphij } 199259813Sdelphij _patch_overflow->push(branch_loc); 200259813Sdelphij } 201286708Smav ++_patch_index; 202260183Sdelphij} 203260183Sdelphij 204260183Sdelphijvoid Label::patch_instructions(MacroAssembler* masm) { 205260183Sdelphij assert(is_bound(), "Label is bound"); 206260183Sdelphij CodeBuffer* cb = masm->code(); 207260183Sdelphij int target_sect = CodeBuffer::locator_sect(loc()); 208260183Sdelphij address target = cb->locator_address(loc()); 209260183Sdelphij while (_patch_index > 0) { 210286708Smav --_patch_index; 211264835Sdelphij int branch_loc; 212264835Sdelphij if (_patch_index >= PatchCacheSize) { 213264835Sdelphij branch_loc = _patch_overflow->pop(); 214264835Sdelphij } else { 215264835Sdelphij branch_loc = _patches[_patch_index]; 216264835Sdelphij } 217264835Sdelphij int branch_sect = CodeBuffer::locator_sect(branch_loc); 218286708Smav address branch = cb->locator_address(branch_loc); 219286708Smav if (branch_sect == CodeBuffer::SECT_CONSTS) { 220268075Sdelphij // The thing to patch is a constant word. 221268075Sdelphij *(address*)branch = target; 222268075Sdelphij continue; 223268075Sdelphij } 224286708Smav 225286708Smav#ifdef ASSERT 226274337Sdelphij // Cross-section branches only work if the 227274337Sdelphij // intermediate section boundaries are frozen. 228274337Sdelphij if (target_sect != branch_sect) { 229274337Sdelphij for (int n = MIN2(target_sect, branch_sect), 230274337Sdelphij nlimit = (target_sect + branch_sect) - n; 231274337Sdelphij n < nlimit; n++) { 232274337Sdelphij CodeSection* cs = cb->code_section(n); 233286708Smav assert(cs->is_frozen(), "cross-section branch needs stable offsets"); 234286708Smav } 235289422Smav } 236289422Smav#endif //ASSERT 237289422Smav 238289422Smav // Push the target offset into the branch instruction. 239289422Smav masm->pd_patch_instruction(branch, target); 240289422Smav } 241289422Smav} 242289422Smav 243301010Sallanjudestruct DelayedConstant { 244301010Sallanjude typedef void (*value_fn_t)(); 245289422Smav BasicType type; 246289422Smav intptr_t value; 247289422Smav value_fn_t value_fn; 248289422Smav // This limit of 20 is generous for initial uses. 249289422Smav // The limit needs to be large enough to store the field offsets 250332525Smav // into classes which do not have statically fixed layouts. 251332525Smav // (Initial use is for method handle object offsets.) 252332525Smav // Look for uses of "delayed_value" in the source code 253332525Smav // and make sure this number is generous enough to handle all of them. 254332525Smav enum { DC_LIMIT = 20 }; 255332525Smav static DelayedConstant delayed_constants[DC_LIMIT]; 256332525Smav static DelayedConstant* add(BasicType type, value_fn_t value_fn); 257332525Smav bool match(BasicType t, value_fn_t cfn) { 258332525Smav return type == t && value_fn == cfn; 259332525Smav } 260332525Smav static void update_all(); 261332525Smav}; 262332525Smav 263332525SmavDelayedConstant DelayedConstant::delayed_constants[DC_LIMIT]; 264332525Smav// Default C structure initialization rules have the following effect here: 265332525Smav// = { { (BasicType)0, (intptr_t)NULL }, ... }; 266236884Smm 267DelayedConstant* DelayedConstant::add(BasicType type, 268 DelayedConstant::value_fn_t cfn) { 269 for (int i = 0; i < DC_LIMIT; i++) { 270 DelayedConstant* dcon = &delayed_constants[i]; 271 if (dcon->match(type, cfn)) 272 return dcon; 273 if (dcon->value_fn == NULL) { 274 // (cmpxchg not because this is multi-threaded but because I'm paranoid) 275 if (Atomic::cmpxchg_ptr(CAST_FROM_FN_PTR(void*, cfn), &dcon->value_fn, NULL) == NULL) { 276 dcon->type = type; 277 return dcon; 278 } 279 } 280 } 281 // If this assert is hit (in pre-integration testing!) then re-evaluate 282 // the comment on the definition of DC_LIMIT. 283 guarantee(false, "too many delayed constants"); 284 return NULL; 285} 286 287void DelayedConstant::update_all() { 288 for (int i = 0; i < DC_LIMIT; i++) { 289 DelayedConstant* dcon = &delayed_constants[i]; 290 if (dcon->value_fn != NULL && dcon->value == 0) { 291 typedef int (*int_fn_t)(); 292 typedef address (*address_fn_t)(); 293 switch (dcon->type) { 294 case T_INT: dcon->value = (intptr_t) ((int_fn_t) dcon->value_fn)(); break; 295 case T_ADDRESS: dcon->value = (intptr_t) ((address_fn_t)dcon->value_fn)(); break; 296 } 297 } 298 } 299} 300 301intptr_t* AbstractAssembler::delayed_value_addr(int(*value_fn)()) { 302 DelayedConstant* dcon = DelayedConstant::add(T_INT, (DelayedConstant::value_fn_t) value_fn); 303 return &dcon->value; 304} 305intptr_t* AbstractAssembler::delayed_value_addr(address(*value_fn)()) { 306 DelayedConstant* dcon = DelayedConstant::add(T_ADDRESS, (DelayedConstant::value_fn_t) value_fn); 307 return &dcon->value; 308} 309void AbstractAssembler::update_delayed_values() { 310 DelayedConstant::update_all(); 311} 312 313 314 315 316void AbstractAssembler::block_comment(const char* comment) { 317 if (sect() == CodeBuffer::SECT_INSTS) { 318 code_section()->outer()->block_comment(offset(), comment); 319 } 320} 321 322bool MacroAssembler::needs_explicit_null_check(intptr_t offset) { 323 // Exception handler checks the nmethod's implicit null checks table 324 // only when this method returns false. 325#ifdef _LP64 326 if (UseCompressedOops && Universe::narrow_oop_base() != NULL) { 327 assert (Universe::heap() != NULL, "java heap should be initialized"); 328 // The first page after heap_base is unmapped and 329 // the 'offset' is equal to [heap_base + offset] for 330 // narrow oop implicit null checks. 331 uintptr_t base = (uintptr_t)Universe::narrow_oop_base(); 332 if ((uintptr_t)offset >= base) { 333 // Normalize offset for the next check. 334 offset = (intptr_t)(pointer_delta((void*)offset, (void*)base, 1)); 335 } 336 } 337#endif 338 return offset < 0 || os::vm_page_size() <= offset; 339} 340 341#ifndef PRODUCT 342void Label::print_instructions(MacroAssembler* masm) const { 343 CodeBuffer* cb = masm->code(); 344 for (int i = 0; i < _patch_index; ++i) { 345 int branch_loc; 346 if (i >= PatchCacheSize) { 347 branch_loc = _patch_overflow->at(i - PatchCacheSize); 348 } else { 349 branch_loc = _patches[i]; 350 } 351 int branch_pos = CodeBuffer::locator_pos(branch_loc); 352 int branch_sect = CodeBuffer::locator_sect(branch_loc); 353 address branch = cb->locator_address(branch_loc); 354 tty->print_cr("unbound label"); 355 tty->print("@ %d|%d ", branch_pos, branch_sect); 356 if (branch_sect == CodeBuffer::SECT_CONSTS) { 357 tty->print_cr(PTR_FORMAT, *(address*)branch); 358 continue; 359 } 360 masm->pd_print_patched_instruction(branch); 361 tty->cr(); 362 } 363} 364#endif // ndef PRODUCT 365