inline bool CollectedHeap::promotion_should_fail() {
return promotion_should_fail(&_promotion_failure_alot_count);
}
inline void CollectedHeap::reset_promotion_should_fail(volatile size_t* count) {
if (PromotionFailureALot) {
_promotion_failure_alot_gc_number = total_collections();
}
}
inline void CollectedHeap::reset_promotion_should_fail() {
reset_promotion_should_fail(&_promotion_failure_alot_count);
}
#endif // #ifndef PRODUCT
#endif // SHARE_VM_GC_INTERFACE_COLLECTEDHEAP_INLINE_HPP
C:\hotspot-69087d08d473\src\share\vm/gc_interface/gcCause.cpp
#include "precompiled.hpp"
#include "gc_interface/gcCause.hpp"
const char* GCCause::to_string(GCCause::Cause cause) {
switch (cause) {
case _java_lang_system_gc:
return "System.gc()";
case _full_gc_alot:
return "FullGCAlot";
case _scavenge_alot:
return "ScavengeAlot";
case _allocation_profiler:
return "Allocation Profiler";
case _jvmti_force_gc:
return "JvmtiEnv ForceGarbageCollection";
case _gc_locker:
return "GCLocker Initiated GC";
case _heap_inspection:
return "Heap Inspection Initiated GC";
case _heap_dump:
return "Heap Dump Initiated GC";
case _wb_young_gc:
return "WhiteBox Initiated Young GC";
case _wb_conc_mark:
return "WhiteBox Initiated Concurrent Mark";
case _update_allocation_context_stats_inc:
case _update_allocation_context_stats_full:
return "Update Allocation Context Stats";
case _no_gc:
return "No GC";
case _allocation_failure:
return "Allocation Failure";
case _tenured_generation_full:
return "Tenured Generation Full";
case _metadata_GC_threshold:
return "Metadata GC Threshold";
case _cms_generation_full:
return "CMS Generation Full";
case _cms_initial_mark:
return "CMS Initial Mark";
case _cms_final_remark:
return "CMS Final Remark";
case _cms_concurrent_mark:
return "CMS Concurrent Mark";
case _old_generation_expanded_on_last_scavenge:
return "Old Generation Expanded On Last Scavenge";
case _old_generation_too_full_to_scavenge:
return "Old Generation Too Full To Scavenge";
case _adaptive_size_policy:
return "Ergonomics";
case _g1_inc_collection_pause:
return "G1 Evacuation Pause";
case _g1_humongous_allocation:
return "G1 Humongous Allocation";
case _last_ditch_collection:
return "Last ditch collection";
case _last_gc_cause:
return "ILLEGAL VALUE - last gc cause - ILLEGAL VALUE";
default:
return "unknown GCCause";
}
ShouldNotReachHere();
}
C:\hotspot-69087d08d473\src\share\vm/gc_interface/gcCause.hpp
#ifndef SHARE_VM_GC_INTERFACE_GCCAUSE_HPP
#define SHARE_VM_GC_INTERFACE_GCCAUSE_HPP
#include "memory/allocation.hpp"
class GCCause : public AllStatic {
public:
enum Cause {
_java_lang_system_gc,
_full_gc_alot,
_scavenge_alot,
_allocation_profiler,
_jvmti_force_gc,
_gc_locker,
_heap_inspection,
_heap_dump,
_wb_young_gc,
_wb_conc_mark,
_update_allocation_context_stats_inc,
_update_allocation_context_stats_full,
_no_gc,
_no_cause_specified,
_allocation_failure,
_tenured_generation_full,
_metadata_GC_threshold,
_cms_generation_full,
_cms_initial_mark,
_cms_final_remark,
_cms_concurrent_mark,
_old_generation_expanded_on_last_scavenge,
_old_generation_too_full_to_scavenge,
_adaptive_size_policy,
_g1_inc_collection_pause,
_g1_humongous_allocation,
_last_ditch_collection,
_last_gc_cause
};
inline static bool is_user_requested_gc(GCCause::Cause cause) {
return (cause == GCCause::_java_lang_system_gc ||
cause == GCCause::_jvmti_force_gc);
}
inline static bool is_serviceability_requested_gc(GCCause::Cause
cause) {
return (cause == GCCause::_jvmti_force_gc ||
cause == GCCause::_heap_inspection ||
cause == GCCause::_heap_dump);
}
static const char* to_string(GCCause::Cause cause);
};
class GCCauseString : StackObj {
private:
static const int _length = 128;
char _buffer[_length];
int _position;
public:
GCCauseString(const char* prefix, GCCause::Cause cause) {
if (PrintGCCause) {
_position = jio_snprintf(_buffer, _length, "%s (%s) ", prefix, GCCause::to_string(cause));
} else {
_position = jio_snprintf(_buffer, _length, "%s ", prefix);
}
assert(_position >= 0 && _position <= _length,
err_msg("Need to increase the buffer size in GCCauseString? %d", _position));
}
GCCauseString& append(const char* str) {
int res = jio_snprintf(_buffer + _position, _length - _position, "%s", str);
_position += res;
assert(res >= 0 && _position <= _length,
err_msg("Need to increase the buffer size in GCCauseString? %d", res));
return *this;
}
operator const char*() {
return _buffer;
}
};
#endif // SHARE_VM_GC_INTERFACE_GCCAUSE_HPP
C:\hotspot-69087d08d473\src\share\vm/gc_interface/gcName.hpp
#ifndef SHARE_VM_GC_INTERFACE_GCNAME_HPP
#define SHARE_VM_GC_INTERFACE_GCNAME_HPP
#include "utilities/debug.hpp"
enum GCName {
ParallelOld,
SerialOld,
PSMarkSweep,
ParallelScavenge,
DefNew,
ParNew,
G1New,
ConcurrentMarkSweep,
G1Old,
GCNameEndSentinel
};
class GCNameHelper {
public:
static const char* to_string(GCName name) {
switch(name) {
case ParallelOld: return "ParallelOld";
case SerialOld: return "SerialOld";
case PSMarkSweep: return "PSMarkSweep";
case ParallelScavenge: return "ParallelScavenge";
case DefNew: return "DefNew";
case ParNew: return "ParNew";
case G1New: return "G1New";
case ConcurrentMarkSweep: return "ConcurrentMarkSweep";
case G1Old: return "G1Old";
default: ShouldNotReachHere(); return NULL;
}
}
};
#endif // SHARE_VM_GC_INTERFACE_GCNAME_HPP
C:\hotspot-69087d08d473\src\share\vm/interpreter/abstractInterpreter.hpp
#ifndef SHARE_VM_INTERPRETER_ABSTRACTINTERPRETER_HPP
#define SHARE_VM_INTERPRETER_ABSTRACTINTERPRETER_HPP
#include "code/stubs.hpp"
#include "interpreter/bytecodes.hpp"
#include "runtime/thread.inline.hpp"
#include "runtime/vmThread.hpp"
#include "utilities/top.hpp"
#if defined INTERP_MASM_MD_HPP
# include INTERP_MASM_MD_HPP
#elif defined TARGET_ARCH_x86
# include "interp_masm_x86.hpp"
#elif defined TARGET_ARCH_MODEL_aarch64
# include "interp_masm_aarch64.hpp"
#elif defined TARGET_ARCH_MODEL_sparc
# include "interp_masm_sparc.hpp"
#elif defined TARGET_ARCH_MODEL_zero
# include "interp_masm_zero.hpp"
#elif defined TARGET_ARCH_MODEL_ppc_64
# include "interp_masm_ppc_64.hpp"
#endif
class AbstractInterpreter: AllStatic {
friend class VMStructs;
friend class Interpreter;
friend class CppInterpreterGenerator;
public:
enum MethodKind {
zerolocals, // method needs locals initialization
zerolocals_synchronized, // method needs locals initialization & is synchronized
native, // native method
native_synchronized, // native method & is synchronized
empty, // empty method (code: _return)
accessor, // accessor method (code: _aload_0, _getfield, _(a|i)return)
abstract, // abstract method (throws an AbstractMethodException)
method_handle_invoke_FIRST, // java.lang.invoke.MethodHandles::invokeExact, etc.
method_handle_invoke_LAST = (method_handle_invoke_FIRST
+ (vmIntrinsics::LAST_MH_SIG_POLY
- vmIntrinsics::FIRST_MH_SIG_POLY)),
java_lang_math_sin, // implementation of java.lang.Math.sin (x)
java_lang_math_cos, // implementation of java.lang.Math.cos (x)
java_lang_math_tan, // implementation of java.lang.Math.tan (x)
java_lang_math_abs, // implementation of java.lang.Math.abs (x)
java_lang_math_sqrt, // implementation of java.lang.Math.sqrt (x)
java_lang_math_log, // implementation of java.lang.Math.log (x)
java_lang_math_log10, // implementation of java.lang.Math.log10 (x)
java_lang_math_pow, // implementation of java.lang.Math.pow (x,y)
java_lang_math_exp, // implementation of java.lang.Math.exp (x)
java_lang_ref_reference_get, // implementation of java.lang.ref.Reference.get()
java_util_zip_CRC32_update, // implementation of java.util.zip.CRC32.update()
java_util_zip_CRC32_updateBytes, // implementation of java.util.zip.CRC32.updateBytes()
java_util_zip_CRC32_updateByteBuffer, // implementation of java.util.zip.CRC32.updateByteBuffer()
number_of_method_entries,
invalid = -1
};
static vmIntrinsics::ID method_handle_intrinsic(MethodKind kind) {
if (kind >= method_handle_invoke_FIRST && kind <= method_handle_invoke_LAST)
return (vmIntrinsics::ID)( vmIntrinsics::FIRST_MH_SIG_POLY + (kind - method_handle_invoke_FIRST) );
else
return vmIntrinsics::_none;
}
enum SomeConstants {
number_of_result_handlers = 10 // number of result handlers for native calls
};
protected:
static StubQueue* _code; // the interpreter code (codelets)
static bool _notice_safepoints; // true if safepoints are activated
static address _native_entry_begin; // Region for native entry code
static address _native_entry_end;
static address _entry_table[number_of_method_entries]; // entry points for a given method
static address _native_abi_to_tosca[number_of_result_handlers]; // for native method result handlers
static address _slow_signature_handler; // the native method generic (slow) signature handler
static address _rethrow_exception_entry; // rethrows an activation in previous frame
friend class AbstractInterpreterGenerator;
friend class InterpreterGenerator;
friend class InterpreterMacroAssembler;
public:
static void initialize();
static StubQueue* code() { return _code; }
static MethodKind method_kind(methodHandle m);
static address entry_for_kind(MethodKind k) { assert(0 <= k && k < number_of_method_entries, "illegal kind"); return _entry_table[k]; }
static address entry_for_method(methodHandle m) { return entry_for_kind(method_kind(m)); }
static void set_entry_for_kind(MethodKind k, address e);
static void print_method_kind(MethodKind kind) PRODUCT_RETURN;
static bool can_be_compiled(methodHandle m);
static address deopt_entry(TosState state, int length) { ShouldNotReachHere(); return NULL; }
static address return_entry(TosState state, int length, Bytecodes::Code code) { ShouldNotReachHere(); return NULL; }
static address rethrow_exception_entry() { return _rethrow_exception_entry; }
static int size_top_interpreter_activation(Method* method);
static address deopt_continue_after_entry(Method* method,
address bcp,
int callee_parameters,
bool is_top_frame);
static address deopt_reexecute_entry(Method* method, address bcp);
static bool bytecode_should_reexecute(Bytecodes::Code code);
static int size_activation(int max_stack,
int temps,
int extra_args,
int monitors,
int callee_params,
int callee_locals,
bool is_top_frame);
static void layout_activation(Method* method,
int temps,
int popframe_args,
int monitors,
int caller_actual_parameters,
int callee_params,
int callee_locals,
frame* caller,
frame* interpreter_frame,
bool is_top_frame,
bool is_bottom_frame);
static bool is_not_reached( methodHandle method, int bci);
static void notice_safepoints() { ShouldNotReachHere(); } // stops the thread when reaching a safepoint
static void ignore_safepoints() { ShouldNotReachHere(); } // ignores safepoints
static address slow_signature_handler() { return _slow_signature_handler; }
static address result_handler(BasicType type) { return _native_abi_to_tosca[BasicType_as_index(type)]; }
static int BasicType_as_index(BasicType type); // computes index into result_handler_by_index table
static bool in_native_entry(address pc) { return _native_entry_begin <= pc && pc < _native_entry_end; }
static void print(); // prints the interpreter code
public:
const static int stackElementWords = 1;
const static int stackElementSize = stackElementWords * wordSize;
const static int logStackElementSize = LogBytesPerWord;
static int local_offset_in_bytes(int n) {
return ((frame::interpreter_frame_expression_stack_direction() * n) * stackElementSize);
}
static oop* oop_addr_in_slot(intptr_t* slot_addr) {
return (oop*) slot_addr;
}
static jint* int_addr_in_slot(intptr_t* slot_addr) {
if ((int) sizeof(jint) < wordSize && !Bytes::is_Java_byte_ordering_different())
return (jint*)(slot_addr + 1) - 1;
else
return (jint*) slot_addr;
}
static jlong long_in_slot(intptr_t* slot_addr) {
if (sizeof(intptr_t) >= sizeof(jlong)) {
return *(jlong*) slot_addr;
} else {
return Bytes::get_native_u8((address)slot_addr);
}
}
static void set_long_in_slot(intptr_t* slot_addr, jlong value) {
if (sizeof(intptr_t) >= sizeof(jlong)) {
} else {
Bytes::put_native_u8((address)slot_addr, value);
}
}
static void get_jvalue_in_slot(intptr_t* slot_addr, BasicType type, jvalue* value) {
switch (type) {
case T_BOOLEAN: value->z = *int_addr_in_slot(slot_addr); break;
case T_CHAR: value->c = *int_addr_in_slot(slot_addr); break;
case T_BYTE: value->b = *int_addr_in_slot(slot_addr); break;
case T_SHORT: value->s = *int_addr_in_slot(slot_addr); break;
case T_INT: value->i = *int_addr_in_slot(slot_addr); break;
case T_LONG: value->j = long_in_slot(slot_addr); break;
case T_FLOAT: value->f = *(jfloat*)int_addr_in_slot(slot_addr); break;
case T_DOUBLE: value->d = jdouble_cast(long_in_slot(slot_addr)); break;
case T_OBJECT: value->l = (jobject)*oop_addr_in_slot(slot_addr); break;
default: ShouldNotReachHere();
}
}
static void set_jvalue_in_slot(intptr_t* slot_addr, BasicType type, jvalue* value) {
switch (type) {
case T_BOOLEAN: *int_addr_in_slot(slot_addr) = (value->z != 0); break;
case T_CHAR: *int_addr_in_slot(slot_addr) = value->c; break;
case T_BYTE: *int_addr_in_slot(slot_addr) = value->b; break;
case T_SHORT: *int_addr_in_slot(slot_addr) = value->s; break;
case T_INT: *int_addr_in_slot(slot_addr) = value->i; break;
case T_LONG: set_long_in_slot(slot_addr, value->j); break;
case T_FLOAT: *(jfloat*)int_addr_in_slot(slot_addr) = value->f; break;
case T_DOUBLE: set_long_in_slot(slot_addr, jlong_cast(value->d)); break;
case T_OBJECT: *oop_addr_in_slot(slot_addr) = (oop) value->l; break;
default: ShouldNotReachHere();
}
}
};
class Template;
class AbstractInterpreterGenerator: public StackObj {
protected:
InterpreterMacroAssembler* _masm;
address generate_result_handler_for(BasicType type);
address generate_slow_signature_handler();
address generate_method_entry(AbstractInterpreter::MethodKind kind);
void bang_stack_shadow_pages(bool native_call);
void generate_all();
void initialize_method_handle_entries();
public:
AbstractInterpreterGenerator(StubQueue* _code);
};
#endif // SHARE_VM_INTERPRETER_ABSTRACTINTERPRETER_HPP
C:\hotspot-69087d08d473\src\share\vm/interpreter/bytecode.cpp
#include "precompiled.hpp"
#include "interpreter/bytecode.hpp"
#include "interpreter/linkResolver.hpp"
#include "oops/constantPool.hpp"
#include "oops/oop.inline.hpp"
#include "runtime/fieldType.hpp"
#include "runtime/handles.inline.hpp"
#include "runtime/safepoint.hpp"
#include "runtime/signature.hpp"
#ifdef ASSERT
void Bytecode::assert_same_format_as(Bytecodes::Code testbc, bool is_wide) const {
Bytecodes::Code thisbc = Bytecodes::cast(byte_at(0));
if (thisbc == Bytecodes::_breakpoint) return; // let the assertion fail silently
if (is_wide) {
assert(thisbc == Bytecodes::_wide, "expected a wide instruction");
thisbc = Bytecodes::cast(byte_at(1));
if (thisbc == Bytecodes::_breakpoint) return;
}
int thisflags = Bytecodes::flags(testbc, is_wide) & Bytecodes::_all_fmt_bits;
int testflags = Bytecodes::flags(thisbc, is_wide) & Bytecodes::_all_fmt_bits;
if (thisflags != testflags)
tty->print_cr("assert_same_format_as(%d) failed on bc=%d%s; %d != %d",
(int)testbc, (int)thisbc, (is_wide?"/wide":""), testflags, thisflags);
assert(thisflags == testflags, "expected format");
}
void Bytecode::assert_index_size(int size, Bytecodes::Code bc, bool is_wide) {
int have_fmt = (Bytecodes::flags(bc, is_wide)
& (Bytecodes::_fmt_has_u2 | Bytecodes::_fmt_has_u4 |
Bytecodes::_fmt_not_simple |
Bytecodes::_fmt_has_o));
int need_fmt = -1;
switch (size) {
case 1: need_fmt = 0; break;
case 2: need_fmt = Bytecodes::_fmt_has_u2; break;
case 4: need_fmt = Bytecodes::_fmt_has_u4; break;
}
if (is_wide) need_fmt |= Bytecodes::_fmt_not_simple;
if (have_fmt != need_fmt) {
tty->print_cr("assert_index_size %d: bc=%d%s %d != %d", size, bc, (is_wide?"/wide":""), have_fmt, need_fmt);
assert(have_fmt == need_fmt, "assert_index_size");
}
}
void Bytecode::assert_offset_size(int size, Bytecodes::Code bc, bool is_wide) {
int have_fmt = Bytecodes::flags(bc, is_wide) & Bytecodes::_all_fmt_bits;
int need_fmt = -1;
switch (size) {
case 2: need_fmt = Bytecodes::_fmt_bo2; break;
case 4: need_fmt = Bytecodes::_fmt_bo4; break;
}
if (is_wide) need_fmt |= Bytecodes::_fmt_not_simple;
if (have_fmt != need_fmt) {
tty->print_cr("assert_offset_size %d: bc=%d%s %d != %d", size, bc, (is_wide?"/wide":""), have_fmt, need_fmt);
assert(have_fmt == need_fmt, "assert_offset_size");
}
}
void Bytecode::assert_constant_size(int size, int where, Bytecodes::Code bc, bool is_wide) {
int have_fmt = Bytecodes::flags(bc, is_wide) & (Bytecodes::_all_fmt_bits
& ~Bytecodes::_fmt_has_i);
int need_fmt = -1;
switch (size) {
case 1: need_fmt = Bytecodes::_fmt_bc; break;
case 2: need_fmt = Bytecodes::_fmt_bc | Bytecodes::_fmt_has_u2; break;
}
if (is_wide) need_fmt |= Bytecodes::_fmt_not_simple;
int length = is_wide ? Bytecodes::wide_length_for(bc) : Bytecodes::length_for(bc);
if (have_fmt != need_fmt || where + size != length) {
tty->print_cr("assert_constant_size %d @%d: bc=%d%s %d != %d", size, where, bc, (is_wide?"/wide":""), have_fmt, need_fmt);
}
assert(have_fmt == need_fmt, "assert_constant_size");
assert(where + size == length, "assert_constant_size oob");
}
void Bytecode::assert_native_index(Bytecodes::Code bc, bool is_wide) {
assert((Bytecodes::flags(bc, is_wide) & Bytecodes::_fmt_has_nbo) != 0, "native index");
}
#endif //ASSERT
int Bytecode_tableswitch::dest_offset_at(int i) const {
return get_Java_u4_at(aligned_offset(1 + (3 + i)*jintSize));
}
void Bytecode_invoke::verify() const {
assert(is_valid(), "check invoke");
assert(cpcache() != NULL, "do not call this from verifier or rewriter");
}
Symbol* Bytecode_member_ref::klass() const {
return constants()->klass_ref_at_noresolve(index());
}
Symbol* Bytecode_member_ref::name() const {
return constants()->name_ref_at(index());
}
Symbol* Bytecode_member_ref::signature() const {
return constants()->signature_ref_at(index());
}
BasicType Bytecode_member_ref::result_type() const {
ResultTypeFinder rts(signature());
rts.iterate();
return rts.type();
}
methodHandle Bytecode_invoke::static_target(TRAPS) {
methodHandle m;
KlassHandle resolved_klass;
constantPoolHandle constants(THREAD, this->constants());
Bytecodes::Code bc = invoke_code();
LinkResolver::resolve_method_statically(m, resolved_klass, bc, constants, index(), CHECK_(methodHandle()));
return m;
}
Handle Bytecode_invoke::appendix(TRAPS) {
ConstantPoolCacheEntry* cpce = cpcache_entry();
if (cpce->has_appendix())
return Handle(THREAD, cpce->appendix_if_resolved(constants()));
return Handle(); // usual case
}
int Bytecode_member_ref::index() const {
Bytecodes::Code rawc = code();
if (has_index_u4(rawc))
return get_index_u4(rawc);
else
return get_index_u2_cpcache(rawc);
}
int Bytecode_member_ref::pool_index() const {
return cpcache_entry()->constant_pool_index();
}
ConstantPoolCacheEntry* Bytecode_member_ref::cpcache_entry() const {
int index = this->index();
return cpcache()->entry_at(ConstantPool::decode_cpcache_index(index, true));
}
void Bytecode_field::verify() const {
assert(is_valid(), "check field");
}
int Bytecode_loadconstant::raw_index() const {
Bytecodes::Code rawc = code();
assert(rawc != Bytecodes::_wide, "verifier prevents this");
if (Bytecodes::java_code(rawc) == Bytecodes::_ldc)
return get_index_u1(rawc);
else
return get_index_u2(rawc, false);
}
int Bytecode_loadconstant::pool_index() const {
int index = raw_index();
if (has_cache_index()) {
return _method->constants()->object_to_cp_index(index);
}
return index;
}
BasicType Bytecode_loadconstant::result_type() const {
int index = pool_index();
constantTag tag = _method->constants()->tag_at(index);
return tag.basic_type();
}
oop Bytecode_loadconstant::resolve_constant(TRAPS) const {
assert(_method.not_null(), "must supply method to resolve constant");
int index = raw_index();
ConstantPool* constants = _method->constants();
if (has_cache_index()) {
return constants->resolve_cached_constant_at(index, THREAD);
} else {
return constants->resolve_constant_at(index, THREAD);
}
}
#ifndef PRODUCT
void Bytecode_lookupswitch::verify() const {
switch (Bytecodes::java_code(code())) {
case Bytecodes::_lookupswitch:
{ int i = number_of_pairs() - 1;
while (i-- > 0) {
assert(pair_at(i).match() < pair_at(i+1).match(), "unsorted table entries");
}
}
break;
default:
fatal("not a lookupswitch bytecode");
}
}
void Bytecode_tableswitch::verify() const {
switch (Bytecodes::java_code(code())) {
case Bytecodes::_tableswitch:
{ int lo = low_key();
int hi = high_key();
assert (hi >= lo, "incorrect hi/lo values in tableswitch");
int i = hi - lo - 1 ;
while (i-- > 0) {
}
}
break;
default:
fatal("not a tableswitch bytecode");
}
}
#endif
C:\hotspot-69087d08d473\src\share\vm/interpreter/bytecode.hpp
#ifndef SHARE_VM_INTERPRETER_BYTECODE_HPP
#define SHARE_VM_INTERPRETER_BYTECODE_HPP
#include "interpreter/bytecodes.hpp"
#include "memory/allocation.hpp"
#include "oops/method.hpp"
#ifdef TARGET_ARCH_x86
# include "bytes_x86.hpp"
#endif
#ifdef TARGET_ARCH_aarch64
# include "bytes_aarch64.hpp"
#endif
#ifdef TARGET_ARCH_sparc
# include "bytes_sparc.hpp"
#endif
#ifdef TARGET_ARCH_zero
# include "bytes_zero.hpp"
#endif
#ifdef TARGET_ARCH_arm
# include "bytes_arm.hpp"
#endif
#ifdef TARGET_ARCH_ppc
# include "bytes_ppc.hpp"
#endif
class ciBytecodeStream;
class Bytecode: public StackObj {
protected:
const address _bcp;
const Bytecodes::Code _code;
address addr_at (int offset) const { return (address)_bcp + offset; }
u_char byte_at(int offset) const { return *addr_at(offset); }
address aligned_addr_at (int offset) const { return (address)round_to((intptr_t)addr_at(offset), jintSize); }
int aligned_offset (int offset) const { return aligned_addr_at(offset) - addr_at(0); }
int get_Java_u2_at (int offset) const { return Bytes::get_Java_u2(addr_at(offset)); }
int get_Java_u4_at (int offset) const { return Bytes::get_Java_u4(addr_at(offset)); }
int get_native_u2_at (int offset) const { return Bytes::get_native_u2(addr_at(offset)); }
int get_native_u4_at (int offset) const { return Bytes::get_native_u4(addr_at(offset)); }
public:
Bytecode(Method* method, address bcp): _bcp(bcp), _code(Bytecodes::code_at(method, addr_at(0))) {
assert(method != NULL, "this form requires a valid Method*");
}
inline Bytecode(const ciBytecodeStream* stream, address bcp = NULL);
address bcp() const { return _bcp; }
int instruction_size() const { return Bytecodes::length_for_code_at(_code, bcp()); }
Bytecodes::Code code() const { return _code; }
Bytecodes::Code java_code() const { return Bytecodes::java_code(code()); }
Bytecodes::Code invoke_code() const { return (code() == Bytecodes::_invokehandle) ? code() : java_code(); }
int get_index_u1(Bytecodes::Code bc) const {
assert_same_format_as(bc); assert_index_size(1, bc);
return *(jubyte*)addr_at(1);
}
int get_index_u2(Bytecodes::Code bc, bool is_wide = false) const {
assert_same_format_as(bc, is_wide); assert_index_size(2, bc, is_wide);
address p = addr_at(is_wide ? 2 : 1);
if (can_use_native_byte_order(bc, is_wide))
return Bytes::get_native_u2(p);
else return Bytes::get_Java_u2(p);
}
int get_index_u1_cpcache(Bytecodes::Code bc) const {
assert_same_format_as(bc); assert_index_size(1, bc);
return *(jubyte*)addr_at(1) + ConstantPool::CPCACHE_INDEX_TAG;
}
int get_index_u2_cpcache(Bytecodes::Code bc) const {
assert_same_format_as(bc); assert_index_size(2, bc); assert_native_index(bc);
return Bytes::get_native_u2(addr_at(1)) + ConstantPool::CPCACHE_INDEX_TAG;
}
int get_index_u4(Bytecodes::Code bc) const {
assert_same_format_as(bc); assert_index_size(4, bc);
assert(can_use_native_byte_order(bc), "");
return Bytes::get_native_u4(addr_at(1));
}
bool has_index_u4(Bytecodes::Code bc) const {
return bc == Bytecodes::_invokedynamic;
}
int get_offset_s2(Bytecodes::Code bc) const {
assert_same_format_as(bc); assert_offset_size(2, bc);
return (jshort) Bytes::get_Java_u2(addr_at(1));
}
int get_offset_s4(Bytecodes::Code bc) const {
assert_same_format_as(bc); assert_offset_size(4, bc);
return (jint) Bytes::get_Java_u4(addr_at(1));
}
int get_constant_u1(int offset, Bytecodes::Code bc) const {
assert_same_format_as(bc); assert_constant_size(1, offset, bc);
return *(jbyte*)addr_at(offset);
}
int get_constant_u2(int offset, Bytecodes::Code bc, bool is_wide = false) const {
assert_same_format_as(bc, is_wide); assert_constant_size(2, offset, bc, is_wide);
return (jshort) Bytes::get_Java_u2(addr_at(offset));
}
void assert_same_format_as(Bytecodes::Code testbc, bool is_wide = false) const NOT_DEBUG_RETURN;
static void assert_index_size(int required_size, Bytecodes::Code bc, bool is_wide = false) NOT_DEBUG_RETURN;
static void assert_offset_size(int required_size, Bytecodes::Code bc, bool is_wide = false) NOT_DEBUG_RETURN;
static void assert_constant_size(int required_size, int where, Bytecodes::Code bc, bool is_wide = false) NOT_DEBUG_RETURN;
static void assert_native_index(Bytecodes::Code bc, bool is_wide = false) NOT_DEBUG_RETURN;
static bool can_use_native_byte_order(Bytecodes::Code bc, bool is_wide = false) {
return (!Bytes::is_Java_byte_ordering_different() || Bytecodes::native_byte_order(bc /*, is_wide*/));
}
};
class LookupswitchPair VALUE_OBJ_CLASS_SPEC {
private:
const address _bcp;
address addr_at (int offset) const { return _bcp + offset; }
int get_Java_u4_at (int offset) const { return Bytes::get_Java_u4(addr_at(offset)); }
public:
LookupswitchPair(address bcp): _bcp(bcp) {}
int match() const { return get_Java_u4_at(0 * jintSize); }
int offset() const { return get_Java_u4_at(1 * jintSize); }
};
class Bytecode_lookupswitch: public Bytecode {
public:
Bytecode_lookupswitch(Method* method, address bcp): Bytecode(method, bcp) { verify(); }
inline Bytecode_lookupswitch(const ciBytecodeStream* stream);
void verify() const PRODUCT_RETURN;
int default_offset() const { return get_Java_u4_at(aligned_offset(1 + 0*jintSize)); }
int number_of_pairs() const { return get_Java_u4_at(aligned_offset(1 + 1*jintSize)); }
LookupswitchPair pair_at(int i) const {
assert(0 <= i && i < number_of_pairs(), "pair index out of bounds");
return LookupswitchPair(aligned_addr_at(1 + (1 + i)*2*jintSize));
}
};
class Bytecode_tableswitch: public Bytecode {
public:
Bytecode_tableswitch(Method* method, address bcp): Bytecode(method, bcp) { verify(); }
inline Bytecode_tableswitch(const ciBytecodeStream* stream);
void verify() const PRODUCT_RETURN;
int default_offset() const { return get_Java_u4_at(aligned_offset(1 + 0*jintSize)); }
int low_key() const { return get_Java_u4_at(aligned_offset(1 + 1*jintSize)); }
int high_key() const { return get_Java_u4_at(aligned_offset(1 + 2*jintSize)); }
int dest_offset_at(int i) const;
int length() { return high_key()-low_key()+1; }
};
class Bytecode_member_ref: public Bytecode {
protected:
const methodHandle _method; // method containing the bytecode
Bytecode_member_ref(methodHandle method, int bci) : Bytecode(method(), method()->bcp_from(bci)), _method(method) {}
methodHandle method() const { return _method; }
ConstantPool* constants() const { return _method->constants(); }
ConstantPoolCache* cpcache() const { return _method->constants()->cache(); }
ConstantPoolCacheEntry* cpcache_entry() const;
public:
int index() const; // cache index (loaded from instruction)
int pool_index() const; // constant pool index
Symbol* klass() const; // returns the klass of the method or field
Symbol* name() const; // returns the name of the method or field
Symbol* signature() const; // returns the signature of the method or field
BasicType result_type() const; // returns the result type of the getfield or invoke
};
class Bytecode_invoke: public Bytecode_member_ref {
protected:
Bytecode_invoke(methodHandle method, int bci, bool unused) : Bytecode_member_ref(method, bci) {}
public:
Bytecode_invoke(methodHandle method, int bci) : Bytecode_member_ref(method, bci) { verify(); }
void verify() const;
methodHandle static_target(TRAPS); // "specified" method (from constant pool)
Handle appendix(TRAPS); // if CPCE::has_appendix (from constant pool)
bool is_invokeinterface() const { return invoke_code() == Bytecodes::_invokeinterface; }
bool is_invokevirtual() const { return invoke_code() == Bytecodes::_invokevirtual; }
bool is_invokestatic() const { return invoke_code() == Bytecodes::_invokestatic; }
bool is_invokespecial() const { return invoke_code() == Bytecodes::_invokespecial; }
bool is_invokedynamic() const { return invoke_code() == Bytecodes::_invokedynamic; }
bool is_invokehandle() const { return invoke_code() == Bytecodes::_invokehandle; }
bool has_receiver() const { return !is_invokestatic() && !is_invokedynamic(); }
bool is_valid() const { return is_invokeinterface() ||
is_invokevirtual() ||
is_invokestatic() ||
is_invokespecial() ||
is_invokedynamic() ||
is_invokehandle(); }
bool has_appendix() { return cpcache_entry()->has_appendix(); }
private:
inline friend Bytecode_invoke Bytecode_invoke_check(methodHandle method, int bci);
};
inline Bytecode_invoke Bytecode_invoke_check(methodHandle method, int bci) {
return Bytecode_invoke(method, bci, false);
}
class Bytecode_field: public Bytecode_member_ref {
public:
Bytecode_field(methodHandle method, int bci) : Bytecode_member_ref(method, bci) { verify(); }
bool is_getfield() const { return java_code() == Bytecodes::_getfield; }
bool is_putfield() const { return java_code() == Bytecodes::_putfield; }
bool is_getstatic() const { return java_code() == Bytecodes::_getstatic; }
bool is_putstatic() const { return java_code() == Bytecodes::_putstatic; }
bool is_getter() const { return is_getfield() || is_getstatic(); }
bool is_static() const { return is_getstatic() || is_putstatic(); }
bool is_valid() const { return is_getfield() ||
is_putfield() ||
is_getstatic() ||
is_putstatic(); }
void verify() const;
};
class Bytecode_checkcast: public Bytecode {
public:
Bytecode_checkcast(Method* method, address bcp): Bytecode(method, bcp) { verify(); }
void verify() const { assert(Bytecodes::java_code(code()) == Bytecodes::_checkcast, "check checkcast"); }
long index() const { return get_index_u2(Bytecodes::_checkcast); };
};
class Bytecode_instanceof: public Bytecode {
public:
Bytecode_instanceof(Method* method, address bcp): Bytecode(method, bcp) { verify(); }
void verify() const { assert(code() == Bytecodes::_instanceof, "check instanceof"); }
long index() const { return get_index_u2(Bytecodes::_instanceof); };
};
class Bytecode_new: public Bytecode {
public:
Bytecode_new(Method* method, address bcp): Bytecode(method, bcp) { verify(); }
void verify() const { assert(java_code() == Bytecodes::_new, "check new"); }
long index() const { return get_index_u2(Bytecodes::_new); };
};
class Bytecode_multianewarray: public Bytecode {
public:
Bytecode_multianewarray(Method* method, address bcp): Bytecode(method, bcp) { verify(); }
void verify() const { assert(java_code() == Bytecodes::_multianewarray, "check new"); }
long index() const { return get_index_u2(Bytecodes::_multianewarray); };
};
class Bytecode_anewarray: public Bytecode {
public:
Bytecode_anewarray(Method* method, address bcp): Bytecode(method, bcp) { verify(); }
void verify() const { assert(java_code() == Bytecodes::_anewarray, "check anewarray"); }
long index() const { return get_index_u2(Bytecodes::_anewarray); };
};
class Bytecode_loadconstant: public Bytecode {
private:
const methodHandle _method;
int raw_index() const;
public:
Bytecode_loadconstant(methodHandle method, int bci): Bytecode(method(), method->bcp_from(bci)), _method(method) { verify(); }
void verify() const {
assert(_method.not_null(), "must supply method");
Bytecodes::Code stdc = Bytecodes::java_code(code());
assert(stdc == Bytecodes::_ldc ||
stdc == Bytecodes::_ldc_w ||
stdc == Bytecodes::_ldc2_w, "load constant");
}
bool has_cache_index() const { return code() >= Bytecodes::number_of_java_codes; }
int pool_index() const; // index into constant pool
int cache_index() const { // index into reference cache (or -1 if none)
return has_cache_index() ? raw_index() : -1;
}
BasicType result_type() const; // returns the result type of the ldc
oop resolve_constant(TRAPS) const;
};
#endif // SHARE_VM_INTERPRETER_BYTECODE_HPP
C:\hotspot-69087d08d473\src\share\vm/interpreter/bytecodeHistogram.cpp
#include "precompiled.hpp"
#include "interpreter/bytecodeHistogram.hpp"
#include "memory/resourceArea.hpp"
#include "runtime/os.hpp"
#include "utilities/growableArray.hpp"
#ifndef PRODUCT
int BytecodeCounter::_counter_value = 0;
jlong BytecodeCounter::_reset_time = 0;
void BytecodeCounter::reset() {
_counter_value = 0;
_reset_time = os::elapsed_counter();
}
double BytecodeCounter::elapsed_time() {
return (double)(os::elapsed_counter() - _reset_time) / (double)os::elapsed_frequency();
}
double BytecodeCounter::frequency() {
return (double)counter_value() / elapsed_time();
}
void BytecodeCounter::print() {
tty->print_cr(
"%d bytecodes executed in %.1fs (%.3fMHz)",
counter_value(),
elapsed_time(),
frequency() / 1000000.0
);
}
class HistoEntry: public ResourceObj {
private:
int _index;
int _count;
public:
HistoEntry(int index, int count) { _index = index; _count = count; }
int index() const { return _index; }
int count() const { return _count; }
static int compare(HistoEntry** x, HistoEntry** y) { return (*x)->count() - (*y)->count(); }
};
static GrowableArray<HistoEntry*>* sorted_array(int* array, int length) {
GrowableArray<HistoEntry*>* a = new GrowableArray<HistoEntry*>(length);
int i = length;
while (i-- > 0) a->append(new HistoEntry(i, array[i]));
a->sort(HistoEntry::compare);
return a;
}
static int total_count(GrowableArray<HistoEntry*>* profile) {
int sum = 0;
int i = profile->length();
while (i-- > 0) sum += profile->at(i)->count();
return sum;
}
static const char* name_for(int i) {
return Bytecodes::is_defined(i) ? Bytecodes::name(Bytecodes::cast(i)) : "xxxunusedxxx";
}
int BytecodeHistogram::_counters[Bytecodes::number_of_codes];
void BytecodeHistogram::reset() {
int i = Bytecodes::number_of_codes;
while (i-- > 0) _counters[i] = 0;
}
void BytecodeHistogram::print(float cutoff) {
ResourceMark rm;
GrowableArray<HistoEntry*>* profile = sorted_array(_counters, Bytecodes::number_of_codes);
int tot = total_count(profile);
int abs_sum = 0;
tty->cr(); //0123456789012345678901234567890123456789012345678901234567890123456789
tty->print_cr("Histogram of %d executed bytecodes:", tot);
tty->cr();
tty->print_cr(" absolute relative code name");
tty->print_cr("----------------------------------------------------------------------");
int i = profile->length();
while (i-- > 0) {
HistoEntry* e = profile->at(i);
int abs = e->count();
float rel = abs * 100.0F / tot;
if (cutoff <= rel) {
tty->print_cr("%10d %7.2f%% %02x %s", abs, rel, e->index(), name_for(e->index()));
abs_sum += abs;
}
}
tty->print_cr("----------------------------------------------------------------------");
float rel_sum = abs_sum * 100.0F / tot;
tty->print_cr("%10d %7.2f%% (cutoff = %.2f%%)", abs_sum, rel_sum, cutoff);
tty->cr();
}
int BytecodePairHistogram::_index;
int BytecodePairHistogram::_counters[BytecodePairHistogram::number_of_pairs];
void BytecodePairHistogram::reset() {
_index = Bytecodes::_nop << log2_number_of_codes;
int i = number_of_pairs;
while (i-- > 0) _counters[i] = 0;
}
void BytecodePairHistogram::print(float cutoff) {
ResourceMark rm;
GrowableArray<HistoEntry*>* profile = sorted_array(_counters, number_of_pairs);
int tot = total_count(profile);
int abs_sum = 0;
tty->cr(); //0123456789012345678901234567890123456789012345678901234567890123456789
tty->print_cr("Histogram of %d executed bytecode pairs:", tot);
tty->cr();
tty->print_cr(" absolute relative codes 1st bytecode 2nd bytecode");
tty->print_cr("----------------------------------------------------------------------");
int i = profile->length();
while (i-- > 0) {
HistoEntry* e = profile->at(i);
int abs = e->count();
float rel = abs * 100.0F / tot;
if (cutoff <= rel) {
int c1 = e->index() % number_of_codes;
int c2 = e->index() / number_of_codes;
tty->print_cr("%10d %6.3f%% %02x %02x %-19s %s", abs, rel, c1, c2, name_for(c1), name_for(c2));
abs_sum += abs;
}
}
tty->print_cr("----------------------------------------------------------------------");
float rel_sum = abs_sum * 100.0F / tot;
tty->print_cr("%10d %6.3f%% (cutoff = %.3f%%)", abs_sum, rel_sum, cutoff);
tty->cr();
}
#endif
C:\hotspot-69087d08d473\src\share\vm/interpreter/bytecodeHistogram.hpp
#ifndef SHARE_VM_INTERPRETER_BYTECODEHISTOGRAM_HPP
#define SHARE_VM_INTERPRETER_BYTECODEHISTOGRAM_HPP
#include "interpreter/bytecodes.hpp"
#include "memory/allocation.hpp"
class BytecodeCounter: AllStatic {
private:
NOT_PRODUCT(static int _counter_value;)
NOT_PRODUCT(static jlong _reset_time;)
friend class TemplateInterpreterGenerator;
friend class BytecodeInterpreter;
public:
static void reset() PRODUCT_RETURN;
static int counter_value() PRODUCT_RETURN0 NOT_PRODUCT({ return _counter_value; });
static double elapsed_time() PRODUCT_RETURN0; // in seconds
static double frequency() PRODUCT_RETURN0; // bytecodes/seconds
static void print() PRODUCT_RETURN;
};
class BytecodeHistogram: AllStatic {
private:
NOT_PRODUCT(static int _counters[Bytecodes::number_of_codes];) // a counter for each bytecode
friend class TemplateInterpreterGenerator;
friend class InterpreterGenerator;
friend class BytecodeInterpreter;
public:
static void reset() PRODUCT_RETURN; // reset counters
static void print(float cutoff = 0.01F) PRODUCT_RETURN; // cutoff in percent
};
class BytecodePairHistogram: AllStatic {
public: // for SparcWorks
enum Constants {
log2_number_of_codes = 8, // use a power of 2 for faster addressing
number_of_codes = 1 << log2_number_of_codes, // must be no less than Bytecodes::number_of_codes
number_of_pairs = number_of_codes * number_of_codes
};
private:
NOT_PRODUCT(static int _index;) // new bytecode is shifted in - used to index into _counters
NOT_PRODUCT(static int _counters[number_of_pairs];) // a counter for each pair
friend class TemplateInterpreterGenerator;
friend class InterpreterGenerator;
public:
static void reset() PRODUCT_RETURN; // reset counters
static void print(float cutoff = 0.01F) PRODUCT_RETURN; // cutoff in percent
};
#endif // SHARE_VM_INTERPRETER_BYTECODEHISTOGRAM_HPP
C:\hotspot-69087d08d473\src\share\vm/interpreter/bytecodeInterpreter.cpp
#include "classfile/vmSymbols.hpp"
#include "gc_interface/collectedHeap.hpp"
#include "interpreter/bytecodeHistogram.hpp"
#include "interpreter/bytecodeInterpreter.hpp"
#include "interpreter/bytecodeInterpreter.inline.hpp"
#include "interpreter/bytecodeInterpreterProfiling.hpp"
#include "interpreter/interpreter.hpp"
#include "interpreter/interpreterRuntime.hpp"
#include "memory/resourceArea.hpp"
#include "oops/methodCounters.hpp"
#include "oops/objArrayKlass.hpp"
#include "oops/oop.inline.hpp"
#include "prims/jvmtiExport.hpp"
#include "prims/jvmtiThreadState.hpp"
#include "runtime/biasedLocking.hpp"
#include "runtime/frame.inline.hpp"
#include "runtime/handles.inline.hpp"
#include "runtime/interfaceSupport.hpp"
#include "runtime/orderAccess.inline.hpp"
#include "runtime/sharedRuntime.hpp"
#include "runtime/threadCritical.hpp"
#include "utilities/exceptions.hpp"
#ifdef CC_INTERP
#undef USELABELS
#ifdef __GNUC__
ASSERT signifies debugging. It is much easier to step thru bytecodes if we
don't use the computed goto approach.
#ifndef ASSERT
#define USELABELS
#endif
#endif
#undef CASE
#ifdef USELABELS
#define CASE(opcode) opc ## opcode
#define DEFAULT opc_default
#else
#define CASE(opcode) case Bytecodes:: opcode
#define DEFAULT default
#endif
#undef PREFETCH_OPCCODE
#define PREFETCH_OPCCODE
Interpreter safepoint: it is expected that the interpreter will have no live
handles of its own creation live at an interpreter safepoint. Therefore we
run a HandleMarkCleaner and trash all handles allocated in the call chain
since the JavaCalls::call_helper invocation that initiated the chain.
There really shouldn't be any handles remaining to trash but this is cheap
in relation to a safepoint.
#define SAFEPOINT \
if ( SafepointSynchronize::is_synchronizing()) { \
{ \
HandleMarkCleaner __hmc(THREAD); \
} \
CALL_VM(SafepointSynchronize::block(THREAD), handle_exception); \
}
#define VM_JAVA_ERROR_NO_JUMP(name, msg, note_a_trap) \
DECACHE_STATE(); \
SET_LAST_JAVA_FRAME(); \
{ \
InterpreterRuntime::note_a_trap(THREAD, istate->method(), BCI()); \
ThreadInVMfromJava trans(THREAD); \
Exceptions::_throw_msg(THREAD, __FILE__, __LINE__, name, msg); \
} \
RESET_LAST_JAVA_FRAME(); \
CACHE_STATE();
#define VM_JAVA_ERROR(name, msg, note_a_trap) \
VM_JAVA_ERROR_NO_JUMP(name, msg, note_a_trap) \
goto handle_exception;
#ifdef PRODUCT
#define DO_UPDATE_INSTRUCTION_COUNT(opcode)
#else
#define DO_UPDATE_INSTRUCTION_COUNT(opcode) \
{ \
BytecodeCounter::_counter_value++; \
BytecodeHistogram::_counters[(Bytecodes::Code)opcode]++; \
if (StopInterpreterAt && StopInterpreterAt == BytecodeCounter::_counter_value) os::breakpoint(); \
if (TraceBytecodes) { \
CALL_VM((void)SharedRuntime::trace_bytecode(THREAD, 0, \
topOfStack[Interpreter::expr_index_at(1)], \
topOfStack[Interpreter::expr_index_at(2)]), \
handle_exception); \
} \
}
#endif
#undef DEBUGGER_SINGLE_STEP_NOTIFY
#ifdef VM_JVMTI
incremented. JvmtiExport::at_single_stepping_point() may cause a
breakpoint opcode to get inserted at the current PC to allow the
debugger to coalesce single-step events.
As a result if we call at_single_stepping_point() we refetch opcode
to get the current opcode. This will override any other prefetching
that might have occurred.
#define DEBUGGER_SINGLE_STEP_NOTIFY() \
{ \
if (_jvmti_interp_events) { \
if (JvmtiExport::should_post_single_step()) { \
DECACHE_STATE(); \
SET_LAST_JAVA_FRAME(); \
ThreadInVMfromJava trans(THREAD); \
JvmtiExport::at_single_stepping_point(THREAD, \
istate->method(), \
pc); \
RESET_LAST_JAVA_FRAME(); \
CACHE_STATE(); \
if (THREAD->pop_frame_pending() && \
!THREAD->pop_frame_in_process()) { \
goto handle_Pop_Frame; \
} \
if (THREAD->jvmti_thread_state() && \
THREAD->jvmti_thread_state()->is_earlyret_pending()) { \
goto handle_Early_Return; \
} \
opcode = *pc; \
} \
} \
}
#else
#define DEBUGGER_SINGLE_STEP_NOTIFY()
#endif
#undef CONTINUE
#ifdef USELABELS
#define DISPATCH(opcode) goto *(void*)dispatch_table[opcode]
#define CONTINUE { \
opcode = *pc; \
DO_UPDATE_INSTRUCTION_COUNT(opcode); \
DEBUGGER_SINGLE_STEP_NOTIFY(); \
DISPATCH(opcode); \
}
#else
#ifdef PREFETCH_OPCCODE
#define CONTINUE { \
opcode = *pc; \
DO_UPDATE_INSTRUCTION_COUNT(opcode); \
DEBUGGER_SINGLE_STEP_NOTIFY(); \
continue; \
}
#else
#define CONTINUE { \
DO_UPDATE_INSTRUCTION_COUNT(opcode); \
DEBUGGER_SINGLE_STEP_NOTIFY(); \
continue; \
}
#endif
#endif
#define UPDATE_PC(opsize) {pc += opsize; }
#undef UPDATE_PC_AND_TOS
#define UPDATE_PC_AND_TOS(opsize, stack) \
{pc += opsize; MORE_STACK(stack); }
#undef UPDATE_PC_AND_TOS_AND_CONTINUE
#ifdef USELABELS
#define UPDATE_PC_AND_TOS_AND_CONTINUE(opsize, stack) { \
pc += opsize; opcode = *pc; MORE_STACK(stack); \
DO_UPDATE_INSTRUCTION_COUNT(opcode); \
DEBUGGER_SINGLE_STEP_NOTIFY(); \
DISPATCH(opcode); \
}
#define UPDATE_PC_AND_CONTINUE(opsize) { \
pc += opsize; opcode = *pc; \
DO_UPDATE_INSTRUCTION_COUNT(opcode); \
DEBUGGER_SINGLE_STEP_NOTIFY(); \
DISPATCH(opcode); \
}
#else
#ifdef PREFETCH_OPCCODE
#define UPDATE_PC_AND_TOS_AND_CONTINUE(opsize, stack) { \
pc += opsize; opcode = *pc; MORE_STACK(stack); \
DO_UPDATE_INSTRUCTION_COUNT(opcode); \
DEBUGGER_SINGLE_STEP_NOTIFY(); \
goto do_continue; \
}
#define UPDATE_PC_AND_CONTINUE(opsize) { \
pc += opsize; opcode = *pc; \
DO_UPDATE_INSTRUCTION_COUNT(opcode); \
DEBUGGER_SINGLE_STEP_NOTIFY(); \
goto do_continue; \
}
#else
#define UPDATE_PC_AND_TOS_AND_CONTINUE(opsize, stack) { \
pc += opsize; MORE_STACK(stack); \
DO_UPDATE_INSTRUCTION_COUNT(opcode); \
DEBUGGER_SINGLE_STEP_NOTIFY(); \
goto do_continue; \
}
#define UPDATE_PC_AND_CONTINUE(opsize) { \
pc += opsize; \
DO_UPDATE_INSTRUCTION_COUNT(opcode); \
DEBUGGER_SINGLE_STEP_NOTIFY(); \
goto do_continue; \
}
#endif /* PREFETCH_OPCCODE */
#endif /* USELABELS */
#define UPDATE_PC_AND_RETURN(opsize) \
DECACHE_TOS(); \
istate->set_bcp(pc+opsize); \
return;
#define METHOD istate->method()
#define GET_METHOD_COUNTERS(res) \
res = METHOD->method_counters(); \
if (res == NULL) { \
CALL_VM(res = InterpreterRuntime::build_method_counters(THREAD, METHOD), handle_exception); \
}
#define OSR_REQUEST(res, branch_pc) \
CALL_VM(res=InterpreterRuntime::frequency_counter_overflow(THREAD, branch_pc), handle_exception);
#define DO_BACKEDGE_CHECKS(skip, branch_pc) \
if ((skip) <= 0) { \
MethodCounters* mcs; \
GET_METHOD_COUNTERS(mcs); \
if (UseLoopCounter) { \
bool do_OSR = UseOnStackReplacement; \
mcs->backedge_counter()->increment(); \
if (ProfileInterpreter) { \
BI_PROFILE_GET_OR_CREATE_METHOD_DATA(handle_exception); \
do_OSR = do_OSR \
&& (mdo_last_branch_taken_count >= (uint)InvocationCounter::InterpreterBackwardBranchLimit)\
&& (!(mdo_last_branch_taken_count & 1023)); \
} else { \
do_OSR = do_OSR \
&& mcs->invocation_counter()->reached_InvocationLimit(mcs->backedge_counter()); \
} \
if (do_OSR) { \
nmethod* osr_nmethod; \
OSR_REQUEST(osr_nmethod, branch_pc); \
if (osr_nmethod != NULL && osr_nmethod->osr_entry_bci() != InvalidOSREntryBci) { \
intptr_t* buf; \
CALL_VM_NAKED_LJF(buf=SharedRuntime::OSR_migration_begin(THREAD)); \
istate->set_msg(do_osr); \
istate->set_osr_buf((address)buf); \
istate->set_osr_entry(osr_nmethod->osr_entry()); \
return; \
} \
} \
} /* UseCompiler ... */ \
SAFEPOINT; \
}
#undef DECACHE_TOS
#undef CACHE_TOS
#undef CACHE_PREV_TOS
#define DECACHE_TOS() istate->set_stack(topOfStack);
#define CACHE_TOS() topOfStack = (intptr_t *)istate->stack();
#undef DECACHE_PC
#undef CACHE_PC
#define DECACHE_PC() istate->set_bcp(pc);
#define CACHE_PC() pc = istate->bcp();
#define CACHE_CP() cp = istate->constants();
#define CACHE_LOCALS() locals = istate->locals();
#undef CACHE_FRAME
#define CACHE_FRAME()
#undef BCI
#define BCI() ((int)(intptr_t)(pc - (intptr_t)istate->method()->code_base()))
#undef CHECK_NULL
#define CHECK_NULL(obj_) \
if ((obj_) == NULL) { \
VM_JAVA_ERROR(vmSymbols::java_lang_NullPointerException(), NULL, note_nullCheck_trap); \
} \
VERIFY_OOP(obj_)
#define VMdoubleConstZero() 0.0
#define VMdoubleConstOne() 1.0
#define VMlongConstZero() (max_jlong-max_jlong)
#define VMlongConstOne() ((max_jlong-max_jlong)+1)
#define VMalignWordUp(val) (((uintptr_t)(val) + 3) & ~3)
#define DECACHE_STATE() DECACHE_PC(); DECACHE_TOS();
#define CACHE_STATE() \
CACHE_TOS(); \
CACHE_PC(); \
CACHE_CP(); \
CACHE_LOCALS();
#define CALL_VM_NAKED_LJF(func) \
DECACHE_STATE(); \
SET_LAST_JAVA_FRAME(); \
func; \
RESET_LAST_JAVA_FRAME(); \
CACHE_STATE();
#define CALL_VM_NOCHECK(func) \
CALL_VM_NAKED_LJF(func) \
if (THREAD->pop_frame_pending() && \
!THREAD->pop_frame_in_process()) { \
goto handle_Pop_Frame; \
} \
if (THREAD->jvmti_thread_state() && \
THREAD->jvmti_thread_state()->is_earlyret_pending()) { \
goto handle_Early_Return; \
}
#define CALL_VM(func, label) { \
CALL_VM_NOCHECK(func); \
if (THREAD->has_pending_exception()) goto label; \
}
#if defined(VM_JVMTI)
void
BytecodeInterpreter::runWithChecks(interpreterState istate) {
#else
void
BytecodeInterpreter::run(interpreterState istate) {
#endif
static int initialized = 0;
static int checkit = 0;
static intptr_t* c_addr = NULL;
static intptr_t c_value;
if (checkit && *c_addr != c_value) {
os::breakpoint();
}
#ifdef VM_JVMTI
static bool _jvmti_interp_events = 0;
#endif
static int _compiling; // (UseCompiler || CountCompiledCalls)
#ifdef ASSERT
if (istate->_msg != initialize) {
if (EnableInvokeDynamic || FLAG_IS_CMDLINE(EnableInvokeDynamic)) {
assert(labs(istate->_stack_base - istate->_stack_limit) == (istate->_method->max_stack() + 1), "bad stack limit");
} else {
const int extra_stack_entries = Method::extra_stack_entries_for_jsr292;
assert(labs(istate->_stack_base - istate->_stack_limit) == (istate->_method->max_stack() + extra_stack_entries
+ 1), "bad stack limit");
}
#ifndef SHARK
IA32_ONLY(assert(istate->_stack_limit == istate->_thread->last_Java_sp() + 1, "wrong"));
#endif // !SHARK
}
interpreterState l = istate;
do {
assert(l == l->_self_link, "bad link");
l = l->_prev_link;
} while (l != NULL);
interpreterState orig = istate;
#endif
register intptr_t* topOfStack = (intptr_t *)istate->stack(); /* access with STACK macros */
register address pc = istate->bcp();
register jubyte opcode;
register intptr_t* locals = istate->locals();
register ConstantPoolCache* cp = istate->constants(); // method()->constants()->cache()
#ifdef LOTS_OF_REGS
register JavaThread* THREAD = istate->thread();
#else
#undef THREAD
#define THREAD istate->thread()
#endif
#ifdef USELABELS
const static void* const opclabels_data[256] = {
&&opc_invokehandle, &&opc_default, &&opc_default,
};
register uintptr_t *dispatch_table = (uintptr_t*)&opclabels_data[0];
#endif /* USELABELS */
#ifdef ASSERT
if (istate->msg() != initialize && ! METHOD->is_static()) {
oop rcvr = LOCALS_OBJECT(0);
VERIFY_OOP(rcvr);
}
#endif
#ifdef HACK
bool interesting = false;
#endif // HACK
guarantee(istate->msg() == initialize ||
topOfStack >= istate->stack_limit() &&
topOfStack < istate->stack_base(),
"Stack top out of range");
#ifdef CC_INTERP_PROFILE
uint mdo_last_branch_taken_count = 0;
#else
const uint mdo_last_branch_taken_count = 0;
#endif
switch (istate->msg()) {
case initialize: {
if (initialized++) ShouldNotReachHere(); // Only one initialize call.
_compiling = (UseCompiler || CountCompiledCalls);
#ifdef VM_JVMTI
_jvmti_interp_events = JvmtiExport::can_post_interpreter_events();
#endif
return;
}
break;
case method_entry: {
THREAD->set_do_not_unlock();
assert(initialized, "Interpreter not initialized");
if (_compiling) {
MethodCounters* mcs;
GET_METHOD_COUNTERS(mcs);
if (ProfileInterpreter) {
METHOD->increment_interpreter_invocation_count(THREAD);
}
mcs->invocation_counter()->increment();
if (mcs->invocation_counter()->reached_InvocationLimit(mcs->backedge_counter())) {
CALL_VM((void)InterpreterRuntime::frequency_counter_overflow(THREAD, NULL), handle_exception);
}
BI_PROFILE_GET_OR_CREATE_METHOD_DATA(handle_exception);
SAFEPOINT;
}
if ((istate->_stack_base - istate->_stack_limit) != istate->method()->max_stack() + 1) {
os::breakpoint();
}
#ifdef HACK
{
ResourceMark rm;
char *method_name = istate->method()->name_and_sig_as_C_string();
if (strstr(method_name, "runThese$TestRunner.run()V") != NULL) {
tty->print_cr("entering: depth %d bci: %d",
(istate->_stack_base - istate->_stack),
istate->_bcp - istate->_method->code_base());
interesting = true;
}
}
#endif // HACK
if (METHOD->is_synchronized()) {
oop rcvr;
if (METHOD->is_static()) {
rcvr = METHOD->constants()->pool_holder()->java_mirror();
} else {
rcvr = LOCALS_OBJECT(0);
VERIFY_OOP(rcvr);
}
BasicObjectLock* mon = &istate->monitor_base()[-1];
mon->set_obj(rcvr);
bool success = false;
uintptr_t epoch_mask_in_place = (uintptr_t)markOopDesc::epoch_mask_in_place;
markOop mark = rcvr->mark();
intptr_t hash = (intptr_t) markOopDesc::no_hash;
if (mark->has_bias_pattern()) {
uintptr_t thread_ident;
uintptr_t anticipated_bias_locking_value;
thread_ident = (uintptr_t)istate->thread();
anticipated_bias_locking_value =
(((uintptr_t)rcvr->klass()->prototype_header() | thread_ident) ^ (uintptr_t)mark) &
~((uintptr_t) markOopDesc::age_mask_in_place);
if (anticipated_bias_locking_value == 0) {
if (PrintBiasedLockingStatistics) {
(* BiasedLocking::biased_lock_entry_count_addr())++;
}
success = true;
} else if ((anticipated_bias_locking_value & markOopDesc::biased_lock_mask_in_place) != 0) {
markOop header = rcvr->klass()->prototype_header();
if (hash != markOopDesc::no_hash) {
header = header->copy_set_hash(hash);
}
if (Atomic::cmpxchg_ptr(header, rcvr->mark_addr(), mark) == mark) {
if (PrintBiasedLockingStatistics)
(*BiasedLocking::revoked_lock_entry_count_addr())++;
}
} else if ((anticipated_bias_locking_value & epoch_mask_in_place) != 0) {
markOop new_header = (markOop) ( (intptr_t) rcvr->klass()->prototype_header() | thread_ident);
if (hash != markOopDesc::no_hash) {
new_header = new_header->copy_set_hash(hash);
}
if (Atomic::cmpxchg_ptr((void*)new_header, rcvr->mark_addr(), mark) == mark) {
if (PrintBiasedLockingStatistics) {
(* BiasedLocking::rebiased_lock_entry_count_addr())++;
}
} else {
CALL_VM(InterpreterRuntime::monitorenter(THREAD, mon), handle_exception);
}
success = true;
} else {
markOop header = (markOop) ((uintptr_t) mark &
((uintptr_t)markOopDesc::biased_lock_mask_in_place |
(uintptr_t)markOopDesc::age_mask_in_place | epoch_mask_in_place));
if (hash != markOopDesc::no_hash) {
header = header->copy_set_hash(hash);
}
markOop new_header = (markOop) ((uintptr_t) header | thread_ident);
DEBUG_ONLY(mon->lock()->set_displaced_header((markOop) (uintptr_t) 0xdeaddead);)
if (Atomic::cmpxchg_ptr((void*)new_header, rcvr->mark_addr(), header) == header) {
if (PrintBiasedLockingStatistics) {
(* BiasedLocking::anonymously_biased_lock_entry_count_addr())++;
}
} else {
CALL_VM(InterpreterRuntime::monitorenter(THREAD, mon), handle_exception);
}
success = true;
}
}
if (!success) {
markOop displaced = rcvr->mark()->set_unlocked();
mon->lock()->set_displaced_header(displaced);
bool call_vm = UseHeavyMonitors;
if (call_vm || Atomic::cmpxchg_ptr(mon, rcvr->mark_addr(), displaced) != displaced) {
if (!call_vm && THREAD->is_lock_owned((address) displaced->clear_lock_bits())) {
mon->lock()->set_displaced_header(NULL);
} else {
CALL_VM(InterpreterRuntime::monitorenter(THREAD, mon), handle_exception);
}
}
}
}
THREAD->clr_do_not_unlock();
#ifdef VM_JVMTI
if (_jvmti_interp_events) {
if (THREAD->is_interp_only_mode()) {
CALL_VM(InterpreterRuntime::post_method_entry(THREAD),
handle_exception);
}
}
#endif /* VM_JVMTI */
goto run;
}
case popping_frame: {
assert(THREAD->pop_frame_in_process(), "wrong frame pop state");
istate->set_msg(no_request);
if (_compiling) {
SET_MDX(NULL);
BI_PROFILE_GET_OR_CREATE_METHOD_DATA(handle_exception);
}
THREAD->clr_pop_frame_in_process();
goto run;
}
case method_resume: {
if ((istate->_stack_base - istate->_stack_limit) != istate->method()->max_stack() + 1) {
os::breakpoint();
}
#ifdef HACK
{
ResourceMark rm;
char *method_name = istate->method()->name_and_sig_as_C_string();
if (strstr(method_name, "runThese$TestRunner.run()V") != NULL) {
tty->print_cr("resume: depth %d bci: %d",
(istate->_stack_base - istate->_stack) ,
istate->_bcp - istate->_method->code_base());
interesting = true;
}
}
#endif // HACK
if (THREAD->pop_frame_pending() && !THREAD->pop_frame_in_process()) {
goto handle_Pop_Frame;
}
if (THREAD->jvmti_thread_state() &&
THREAD->jvmti_thread_state()->is_earlyret_pending()) {
goto handle_Early_Return;
}
if (THREAD->has_pending_exception()) goto handle_exception;
UPDATE_PC(istate->bcp_advance());
if (_compiling) {
BI_PROFILE_GET_OR_CREATE_METHOD_DATA(handle_exception);
}
goto run;
}
case deopt_resume2: {
if (_compiling) {
BI_PROFILE_GET_OR_CREATE_METHOD_DATA(handle_exception);
}
goto run;
}
case deopt_resume: {
if ( Bytecodes::code_at(METHOD, pc) == Bytecodes::_return_register_finalizer) {
goto handle_return;
}
UPDATE_PC(Bytecodes::length_at(METHOD, pc));
if (THREAD->has_pending_exception()) goto handle_exception;
if (_compiling) {
BI_PROFILE_GET_OR_CREATE_METHOD_DATA(handle_exception);
}
goto run;
}
case got_monitors: {
oop lockee = STACK_OBJECT(-1);
VERIFY_OOP(lockee);
BasicObjectLock* entry = (BasicObjectLock*) istate->stack_base();
assert(entry->obj() == NULL, "Frame manager didn't allocate the monitor");
entry->set_obj(lockee);
bool success = false;
uintptr_t epoch_mask_in_place = (uintptr_t)markOopDesc::epoch_mask_in_place;
markOop mark = lockee->mark();
intptr_t hash = (intptr_t) markOopDesc::no_hash;
if (mark->has_bias_pattern()) {
uintptr_t thread_ident;
uintptr_t anticipated_bias_locking_value;
thread_ident = (uintptr_t)istate->thread();
anticipated_bias_locking_value =
(((uintptr_t)lockee->klass()->prototype_header() | thread_ident) ^ (uintptr_t)mark) &
~((uintptr_t) markOopDesc::age_mask_in_place);
if (anticipated_bias_locking_value == 0) {
if (PrintBiasedLockingStatistics) {
(* BiasedLocking::biased_lock_entry_count_addr())++;
}
success = true;
} else if ((anticipated_bias_locking_value & markOopDesc::biased_lock_mask_in_place) != 0) {
markOop header = lockee->klass()->prototype_header();
if (hash != markOopDesc::no_hash) {
header = header->copy_set_hash(hash);
}
if (Atomic::cmpxchg_ptr(header, lockee->mark_addr(), mark) == mark) {
if (PrintBiasedLockingStatistics) {
(*BiasedLocking::revoked_lock_entry_count_addr())++;
}
}
} else if ((anticipated_bias_locking_value & epoch_mask_in_place) !=0) {
markOop new_header = (markOop) ( (intptr_t) lockee->klass()->prototype_header() | thread_ident);
if (hash != markOopDesc::no_hash) {
new_header = new_header->copy_set_hash(hash);
}
if (Atomic::cmpxchg_ptr((void*)new_header, lockee->mark_addr(), mark) == mark) {
if (PrintBiasedLockingStatistics) {
(* BiasedLocking::rebiased_lock_entry_count_addr())++;
}
} else {
CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);
}
success = true;
} else {
markOop header = (markOop) ((uintptr_t) mark & ((uintptr_t)markOopDesc::biased_lock_mask_in_place |
(uintptr_t)markOopDesc::age_mask_in_place | epoch_mask_in_place));
if (hash != markOopDesc::no_hash) {
header = header->copy_set_hash(hash);
}
markOop new_header = (markOop) ((uintptr_t) header | thread_ident);
DEBUG_ONLY(entry->lock()->set_displaced_header((markOop) (uintptr_t) 0xdeaddead);)
if (Atomic::cmpxchg_ptr((void*)new_header, lockee->mark_addr(), header) == header) {
if (PrintBiasedLockingStatistics) {
(* BiasedLocking::anonymously_biased_lock_entry_count_addr())++;
}
} else {
CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);
}
success = true;
}
}
if (!success) {
markOop displaced = lockee->mark()->set_unlocked();
entry->lock()->set_displaced_header(displaced);
bool call_vm = UseHeavyMonitors;
if (call_vm || Atomic::cmpxchg_ptr(entry, lockee->mark_addr(), displaced) != displaced) {
if (!call_vm && THREAD->is_lock_owned((address) displaced->clear_lock_bits())) {
entry->lock()->set_displaced_header(NULL);
} else {
CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);
}
}
}
UPDATE_PC_AND_TOS(1, -1);
goto run;
}
default: {
fatal("Unexpected message from frame manager");
}
}
run:
DO_UPDATE_INSTRUCTION_COUNT(*pc)
DEBUGGER_SINGLE_STEP_NOTIFY();
#ifdef PREFETCH_OPCCODE
opcode = *pc; /* prefetch first opcode */
#endif
#ifndef USELABELS
while (1)
#endif
{
#ifndef PREFETCH_OPCCODE
opcode = *pc;
#endif
opcode_switch:
assert(istate == orig, "Corrupted istate");
assert(topOfStack >= istate->stack_limit(), "Stack overrun");
assert(topOfStack < istate->stack_base(), "Stack underrun");
#ifdef USELABELS
DISPATCH(opcode);
#else
switch (opcode)
#endif
{
CASE(_nop):
UPDATE_PC_AND_CONTINUE(1);
CASE(_aconst_null):
SET_STACK_OBJECT(NULL, 0);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);
#undef OPC_CONST_n
#define OPC_CONST_n(opcode, const_type, value) \
CASE(opcode): \
SET_STACK_ ## const_type(value, 0); \
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);
OPC_CONST_n(_iconst_m1, INT, -1);
OPC_CONST_n(_iconst_0, INT, 0);
OPC_CONST_n(_iconst_1, INT, 1);
OPC_CONST_n(_iconst_2, INT, 2);
OPC_CONST_n(_iconst_3, INT, 3);
OPC_CONST_n(_iconst_4, INT, 4);
OPC_CONST_n(_iconst_5, INT, 5);
OPC_CONST_n(_fconst_0, FLOAT, 0.0);
OPC_CONST_n(_fconst_1, FLOAT, 1.0);
OPC_CONST_n(_fconst_2, FLOAT, 2.0);
#undef OPC_CONST2_n
#define OPC_CONST2_n(opcname, value, key, kind) \
CASE(_##opcname): \
{ \
SET_STACK_ ## kind(VM##key##Const##value(), 1); \
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2); \
}
OPC_CONST2_n(dconst_0, Zero, double, DOUBLE);
OPC_CONST2_n(dconst_1, One, double, DOUBLE);
OPC_CONST2_n(lconst_0, Zero, long, LONG);
OPC_CONST2_n(lconst_1, One, long, LONG);
CASE(_bipush):
SET_STACK_INT((jbyte)(pc[1]), 0);
UPDATE_PC_AND_TOS_AND_CONTINUE(2, 1);
CASE(_sipush):
SET_STACK_INT((int16_t)Bytes::get_Java_u2(pc + 1), 0);
UPDATE_PC_AND_TOS_AND_CONTINUE(3, 1);
CASE(_aload):
VERIFY_OOP(LOCALS_OBJECT(pc[1]));
SET_STACK_OBJECT(LOCALS_OBJECT(pc[1]), 0);
UPDATE_PC_AND_TOS_AND_CONTINUE(2, 1);
CASE(_iload):
CASE(_fload):
SET_STACK_SLOT(LOCALS_SLOT(pc[1]), 0);
UPDATE_PC_AND_TOS_AND_CONTINUE(2, 1);
CASE(_lload):
SET_STACK_LONG_FROM_ADDR(LOCALS_LONG_AT(pc[1]), 1);
UPDATE_PC_AND_TOS_AND_CONTINUE(2, 2);
CASE(_dload):
SET_STACK_DOUBLE_FROM_ADDR(LOCALS_DOUBLE_AT(pc[1]), 1);
UPDATE_PC_AND_TOS_AND_CONTINUE(2, 2);
#undef OPC_LOAD_n
#define OPC_LOAD_n(num) \
CASE(_aload_##num): \
VERIFY_OOP(LOCALS_OBJECT(num)); \
SET_STACK_OBJECT(LOCALS_OBJECT(num), 0); \
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1); \
\
CASE(_iload_##num): \
CASE(_fload_##num): \
SET_STACK_SLOT(LOCALS_SLOT(num), 0); \
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1); \
\
CASE(_lload_##num): \
SET_STACK_LONG_FROM_ADDR(LOCALS_LONG_AT(num), 1); \
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2); \
CASE(_dload_##num): \
SET_STACK_DOUBLE_FROM_ADDR(LOCALS_DOUBLE_AT(num), 1); \
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2);
OPC_LOAD_n(0);
OPC_LOAD_n(1);
OPC_LOAD_n(2);
OPC_LOAD_n(3);
CASE(_astore):
astore(topOfStack, -1, locals, pc[1]);
UPDATE_PC_AND_TOS_AND_CONTINUE(2, -1);
CASE(_istore):
CASE(_fstore):
SET_LOCALS_SLOT(STACK_SLOT(-1), pc[1]);
UPDATE_PC_AND_TOS_AND_CONTINUE(2, -1);
CASE(_lstore):
SET_LOCALS_LONG(STACK_LONG(-1), pc[1]);
UPDATE_PC_AND_TOS_AND_CONTINUE(2, -2);
CASE(_dstore):
SET_LOCALS_DOUBLE(STACK_DOUBLE(-1), pc[1]);
UPDATE_PC_AND_TOS_AND_CONTINUE(2, -2);
CASE(_wide): {
uint16_t reg = Bytes::get_Java_u2(pc + 2);
opcode = pc[1];
DO_UPDATE_INSTRUCTION_COUNT(opcode);
switch(opcode) {
case Bytecodes::_aload:
VERIFY_OOP(LOCALS_OBJECT(reg));
SET_STACK_OBJECT(LOCALS_OBJECT(reg), 0);
UPDATE_PC_AND_TOS_AND_CONTINUE(4, 1);
case Bytecodes::_iload:
case Bytecodes::_fload:
SET_STACK_SLOT(LOCALS_SLOT(reg), 0);
UPDATE_PC_AND_TOS_AND_CONTINUE(4, 1);
case Bytecodes::_lload:
SET_STACK_LONG_FROM_ADDR(LOCALS_LONG_AT(reg), 1);
UPDATE_PC_AND_TOS_AND_CONTINUE(4, 2);
case Bytecodes::_dload:
SET_STACK_DOUBLE_FROM_ADDR(LOCALS_LONG_AT(reg), 1);
UPDATE_PC_AND_TOS_AND_CONTINUE(4, 2);
case Bytecodes::_astore:
astore(topOfStack, -1, locals, reg);
UPDATE_PC_AND_TOS_AND_CONTINUE(4, -1);
case Bytecodes::_istore:
case Bytecodes::_fstore:
SET_LOCALS_SLOT(STACK_SLOT(-1), reg);
UPDATE_PC_AND_TOS_AND_CONTINUE(4, -1);
case Bytecodes::_lstore:
SET_LOCALS_LONG(STACK_LONG(-1), reg);
UPDATE_PC_AND_TOS_AND_CONTINUE(4, -2);
case Bytecodes::_dstore:
SET_LOCALS_DOUBLE(STACK_DOUBLE(-1), reg);
UPDATE_PC_AND_TOS_AND_CONTINUE(4, -2);
case Bytecodes::_iinc: {
int16_t offset = (int16_t)Bytes::get_Java_u2(pc+4);
SET_LOCALS_INT(LOCALS_INT(reg) + offset, reg);
UPDATE_PC_AND_CONTINUE(6);
}
case Bytecodes::_ret:
BI_PROFILE_UPDATE_RET(/*bci=*/((int)(intptr_t)(LOCALS_ADDR(reg))));
pc = istate->method()->code_base() + (intptr_t)(LOCALS_ADDR(reg));
UPDATE_PC_AND_CONTINUE(0);
default:
VM_JAVA_ERROR(vmSymbols::java_lang_InternalError(), "undefined opcode", note_no_trap);
}
}
#undef OPC_STORE_n
#define OPC_STORE_n(num) \
CASE(_astore_##num): \
astore(topOfStack, -1, locals, num); \
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1); \
CASE(_istore_##num): \
CASE(_fstore_##num): \
SET_LOCALS_SLOT(STACK_SLOT(-1), num); \
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);
OPC_STORE_n(0);
OPC_STORE_n(1);
OPC_STORE_n(2);
OPC_STORE_n(3);
#undef OPC_DSTORE_n
#define OPC_DSTORE_n(num) \
CASE(_dstore_##num): \
SET_LOCALS_DOUBLE(STACK_DOUBLE(-1), num); \
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -2); \
CASE(_lstore_##num): \
SET_LOCALS_LONG(STACK_LONG(-1), num); \
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -2);
OPC_DSTORE_n(0);
OPC_DSTORE_n(1);
OPC_DSTORE_n(2);
OPC_DSTORE_n(3);
CASE(_pop): /* Discard the top item on the stack */
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);
CASE(_pop2): /* Discard the top 2 items on the stack */
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -2);
CASE(_dup): /* Duplicate the top item on the stack */
dup(topOfStack);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);
CASE(_dup2): /* Duplicate the top 2 items on the stack */
dup2(topOfStack);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2);
CASE(_dup_x1): /* insert top word two down */
dup_x1(topOfStack);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);
CASE(_dup_x2): /* insert top word three down */
dup_x2(topOfStack);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);
CASE(_dup2_x1): /* insert top 2 slots three down */
dup2_x1(topOfStack);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2);
CASE(_dup2_x2): /* insert top 2 slots four down */
dup2_x2(topOfStack);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2);
CASE(_swap): { /* swap top two elements on the stack */
swap(topOfStack);
UPDATE_PC_AND_CONTINUE(1);
}
#undef OPC_INT_BINARY
#define OPC_INT_BINARY(opcname, opname, test) \
CASE(_i##opcname): \
if (test && (STACK_INT(-1) == 0)) { \
VM_JAVA_ERROR(vmSymbols::java_lang_ArithmeticException(), \
"/ by zero", note_div0Check_trap); \
} \
SET_STACK_INT(VMint##opname(STACK_INT(-2), \
STACK_INT(-1)), \
-2); \
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1); \
CASE(_l##opcname): \
{ \
if (test) { \
jlong l1 = STACK_LONG(-1); \
if (VMlongEqz(l1)) { \
VM_JAVA_ERROR(vmSymbols::java_lang_ArithmeticException(), \
"/ by long zero", note_div0Check_trap); \
} \
} \
SET_STACK_LONG(VMlong##opname(STACK_LONG(-3), \
STACK_LONG(-1)), \
-3); \
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -2); \
}
OPC_INT_BINARY(add, Add, 0);
OPC_INT_BINARY(sub, Sub, 0);
OPC_INT_BINARY(mul, Mul, 0);
OPC_INT_BINARY(and, And, 0);
OPC_INT_BINARY(or, Or, 0);
OPC_INT_BINARY(xor, Xor, 0);
OPC_INT_BINARY(div, Div, 1);
OPC_INT_BINARY(rem, Rem, 1);
#undef OPC_FLOAT_BINARY
#define OPC_FLOAT_BINARY(opcname, opname) \
CASE(_d##opcname): { \
SET_STACK_DOUBLE(VMdouble##opname(STACK_DOUBLE(-3), \
STACK_DOUBLE(-1)), \
-3); \
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -2); \
} \
CASE(_f##opcname): \
SET_STACK_FLOAT(VMfloat##opname(STACK_FLOAT(-2), \
STACK_FLOAT(-1)), \
-2); \
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);
OPC_FLOAT_BINARY(add, Add);
OPC_FLOAT_BINARY(sub, Sub);
OPC_FLOAT_BINARY(mul, Mul);
OPC_FLOAT_BINARY(div, Div);
OPC_FLOAT_BINARY(rem, Rem);
#undef OPC_SHIFT_BINARY
#define OPC_SHIFT_BINARY(opcname, opname) \
CASE(_i##opcname): \
SET_STACK_INT(VMint##opname(STACK_INT(-2), \
STACK_INT(-1)), \
-2); \
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1); \
CASE(_l##opcname): \
{ \
SET_STACK_LONG(VMlong##opname(STACK_LONG(-2), \
STACK_INT(-1)), \
-2); \
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1); \
}
OPC_SHIFT_BINARY(shl, Shl);
OPC_SHIFT_BINARY(shr, Shr);
OPC_SHIFT_BINARY(ushr, Ushr);
CASE(_iinc):
{
SET_LOCALS_INT(LOCALS_INT(pc[1]) + (jbyte)(pc[2]), pc[1]);
UPDATE_PC_AND_CONTINUE(3);
}
CASE(_ineg):
SET_STACK_INT(VMintNeg(STACK_INT(-1)), -1);
UPDATE_PC_AND_CONTINUE(1);
CASE(_fneg):
SET_STACK_FLOAT(VMfloatNeg(STACK_FLOAT(-1)), -1);
UPDATE_PC_AND_CONTINUE(1);
CASE(_lneg):
{
SET_STACK_LONG(VMlongNeg(STACK_LONG(-1)), -1);
UPDATE_PC_AND_CONTINUE(1);
}
CASE(_dneg):
{
SET_STACK_DOUBLE(VMdoubleNeg(STACK_DOUBLE(-1)), -1);
UPDATE_PC_AND_CONTINUE(1);
}
CASE(_i2f): /* convert top of stack int to float */
SET_STACK_FLOAT(VMint2Float(STACK_INT(-1)), -1);
UPDATE_PC_AND_CONTINUE(1);
CASE(_i2l): /* convert top of stack int to long */
{
jlong r = VMint2Long(STACK_INT(-1));
MORE_STACK(-1); // Pop
SET_STACK_LONG(r, 1);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2);
}
CASE(_i2d): /* convert top of stack int to double */
{
jdouble r = (jlong)STACK_INT(-1);
MORE_STACK(-1); // Pop
SET_STACK_DOUBLE(r, 1);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2);
}
CASE(_l2i): /* convert top of stack long to int */
{
jint r = VMlong2Int(STACK_LONG(-1));
MORE_STACK(-2); // Pop
SET_STACK_INT(r, 0);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);
}
CASE(_l2f): /* convert top of stack long to float */
{
jlong r = STACK_LONG(-1);
MORE_STACK(-2); // Pop
SET_STACK_FLOAT(VMlong2Float(r), 0);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);
}
CASE(_l2d): /* convert top of stack long to double */
{
jlong r = STACK_LONG(-1);
MORE_STACK(-2); // Pop
SET_STACK_DOUBLE(VMlong2Double(r), 1);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2);
}
CASE(_f2i): /* Convert top of stack float to int */
SET_STACK_INT(SharedRuntime::f2i(STACK_FLOAT(-1)), -1);
UPDATE_PC_AND_CONTINUE(1);
CASE(_f2l): /* convert top of stack float to long */
{
jlong r = SharedRuntime::f2l(STACK_FLOAT(-1));
MORE_STACK(-1); // POP
SET_STACK_LONG(r, 1);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2);
}
CASE(_f2d): /* convert top of stack float to double */
{
jfloat f;
jdouble r;
f = STACK_FLOAT(-1);
r = (jdouble) f;
MORE_STACK(-1); // POP
SET_STACK_DOUBLE(r, 1);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2);
}
CASE(_d2i): /* convert top of stack double to int */
{
jint r1 = SharedRuntime::d2i(STACK_DOUBLE(-1));
MORE_STACK(-2);
SET_STACK_INT(r1, 0);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);
}
CASE(_d2f): /* convert top of stack double to float */
{
jfloat r1 = VMdouble2Float(STACK_DOUBLE(-1));
MORE_STACK(-2);
SET_STACK_FLOAT(r1, 0);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);
}
CASE(_d2l): /* convert top of stack double to long */
{
jlong r1 = SharedRuntime::d2l(STACK_DOUBLE(-1));
MORE_STACK(-2);
SET_STACK_LONG(r1, 1);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 2);
}
CASE(_i2b):
SET_STACK_INT(VMint2Byte(STACK_INT(-1)), -1);
UPDATE_PC_AND_CONTINUE(1);
CASE(_i2c):
SET_STACK_INT(VMint2Char(STACK_INT(-1)), -1);
UPDATE_PC_AND_CONTINUE(1);
CASE(_i2s):
SET_STACK_INT(VMint2Short(STACK_INT(-1)), -1);
UPDATE_PC_AND_CONTINUE(1);
#define COMPARISON_OP(name, comparison) \
CASE(_if_icmp##name): { \
const bool cmp = (STACK_INT(-2) comparison STACK_INT(-1)); \
int skip = cmp \
? (int16_t)Bytes::get_Java_u2(pc + 1) : 3; \
address branch_pc = pc; \
BI_PROFILE_UPDATE_BRANCH(/*is_taken=*/cmp); \
UPDATE_PC_AND_TOS(skip, -2); \
DO_BACKEDGE_CHECKS(skip, branch_pc); \
CONTINUE; \
} \
CASE(_if##name): { \
const bool cmp = (STACK_INT(-1) comparison 0); \
int skip = cmp \
? (int16_t)Bytes::get_Java_u2(pc + 1) : 3; \
address branch_pc = pc; \
BI_PROFILE_UPDATE_BRANCH(/*is_taken=*/cmp); \
UPDATE_PC_AND_TOS(skip, -1); \
DO_BACKEDGE_CHECKS(skip, branch_pc); \
CONTINUE; \
}
#define COMPARISON_OP2(name, comparison) \
COMPARISON_OP(name, comparison) \
CASE(_if_acmp##name): { \
const bool cmp = (STACK_OBJECT(-2) comparison STACK_OBJECT(-1)); \
int skip = cmp \
? (int16_t)Bytes::get_Java_u2(pc + 1) : 3; \
address branch_pc = pc; \
BI_PROFILE_UPDATE_BRANCH(/*is_taken=*/cmp); \
UPDATE_PC_AND_TOS(skip, -2); \
DO_BACKEDGE_CHECKS(skip, branch_pc); \
CONTINUE; \
}
#define NULL_COMPARISON_NOT_OP(name) \
CASE(_if##name): { \
const bool cmp = (!(STACK_OBJECT(-1) == NULL)); \
int skip = cmp \
? (int16_t)Bytes::get_Java_u2(pc + 1) : 3; \
address branch_pc = pc; \
BI_PROFILE_UPDATE_BRANCH(/*is_taken=*/cmp); \
UPDATE_PC_AND_TOS(skip, -1); \
DO_BACKEDGE_CHECKS(skip, branch_pc); \
CONTINUE; \
}
#define NULL_COMPARISON_OP(name) \
CASE(_if##name): { \
const bool cmp = ((STACK_OBJECT(-1) == NULL)); \
int skip = cmp \
? (int16_t)Bytes::get_Java_u2(pc + 1) : 3; \
address branch_pc = pc; \
BI_PROFILE_UPDATE_BRANCH(/*is_taken=*/cmp); \
UPDATE_PC_AND_TOS(skip, -1); \
DO_BACKEDGE_CHECKS(skip, branch_pc); \
CONTINUE; \
}
COMPARISON_OP(lt, <);
COMPARISON_OP(gt, >);
COMPARISON_OP(le, <=);
COMPARISON_OP(ge, >=);
COMPARISON_OP2(eq, ==); /* include ref comparison */
COMPARISON_OP2(ne, !=); /* include ref comparison */
NULL_COMPARISON_OP(null);
NULL_COMPARISON_NOT_OP(nonnull);
CASE(_tableswitch): {
jint* lpc = (jint*)VMalignWordUp(pc+1);
int32_t key = STACK_INT(-1);
int32_t low = Bytes::get_Java_u4((address)&lpc[1]);
int32_t high = Bytes::get_Java_u4((address)&lpc[2]);
int32_t skip;
key -= low;
if (((uint32_t) key > (uint32_t)(high - low))) {
key = -1;
skip = Bytes::get_Java_u4((address)&lpc[0]);
} else {
skip = Bytes::get_Java_u4((address)&lpc[key + 3]);
}
BI_PROFILE_UPDATE_SWITCH(/*switch_index=*/key);
address branch_pc = pc;
UPDATE_PC_AND_TOS(skip, -1);
DO_BACKEDGE_CHECKS(skip, branch_pc);
CONTINUE;
}
CASE(_lookupswitch): {
jint* lpc = (jint*)VMalignWordUp(pc+1);
int32_t key = STACK_INT(-1);
int32_t skip = Bytes::get_Java_u4((address) lpc); /* default amount */
int index = -1;
int newindex = 0;
int32_t npairs = Bytes::get_Java_u4((address) &lpc[1]);
while (--npairs >= 0) {
lpc += 2;
if (key == (int32_t)Bytes::get_Java_u4((address)lpc)) {
skip = Bytes::get_Java_u4((address)&lpc[1]);
index = newindex;
break;
}
newindex += 1;
}
BI_PROFILE_UPDATE_SWITCH(/*switch_index=*/index);
address branch_pc = pc;
UPDATE_PC_AND_TOS(skip, -1);
DO_BACKEDGE_CHECKS(skip, branch_pc);
CONTINUE;
}
CASE(_fcmpl):
CASE(_fcmpg):
{
SET_STACK_INT(VMfloatCompare(STACK_FLOAT(-2),
STACK_FLOAT(-1),
(opcode == Bytecodes::_fcmpl ? -1 : 1)),
-2);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);
}
CASE(_dcmpl):
CASE(_dcmpg):
{
int r = VMdoubleCompare(STACK_DOUBLE(-3),
STACK_DOUBLE(-1),
(opcode == Bytecodes::_dcmpl ? -1 : 1));
MORE_STACK(-4); // Pop
SET_STACK_INT(r, 0);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);
}
CASE(_lcmp):
{
int r = VMlongCompare(STACK_LONG(-3), STACK_LONG(-1));
MORE_STACK(-4);
SET_STACK_INT(r, 0);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, 1);
}
CASE(_areturn):
CASE(_ireturn):
CASE(_freturn):
{
SAFEPOINT;
goto handle_return;
}
CASE(_lreturn):
CASE(_dreturn):
{
SAFEPOINT;
goto handle_return;
}
CASE(_return_register_finalizer): {
oop rcvr = LOCALS_OBJECT(0);
VERIFY_OOP(rcvr);
if (rcvr->klass()->has_finalizer()) {
CALL_VM(InterpreterRuntime::register_finalizer(THREAD, rcvr), handle_exception);
}
goto handle_return;
}
CASE(_return): {
SAFEPOINT;
goto handle_return;
}
#define ARRAY_INTRO(arrayOff) \
arrayOop arrObj = (arrayOop)STACK_OBJECT(arrayOff); \
jint index = STACK_INT(arrayOff + 1); \
char message[jintAsStringSize]; \
CHECK_NULL(arrObj); \
if ((uint32_t)index >= (uint32_t)arrObj->length()) { \
sprintf(message, "%d", index); \
VM_JAVA_ERROR(vmSymbols::java_lang_ArrayIndexOutOfBoundsException(), \
message, note_rangeCheck_trap); \
}
#define ARRAY_LOADTO32(T, T2, format, stackRes, extra) \
{ \
ARRAY_INTRO(-2); \
(void)extra; \
SET_ ## stackRes(*(T2 *)(((address) arrObj->base(T)) + index * sizeof(T2)), \
-2); \
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1); \
}
#define ARRAY_LOADTO64(T,T2, stackRes, extra) \
{ \
ARRAY_INTRO(-2); \
SET_ ## stackRes(*(T2 *)(((address) arrObj->base(T)) + index * sizeof(T2)), -1); \
(void)extra; \
UPDATE_PC_AND_CONTINUE(1); \
}
CASE(_iaload):
ARRAY_LOADTO32(T_INT, jint, "%d", STACK_INT, 0);
CASE(_faload):
ARRAY_LOADTO32(T_FLOAT, jfloat, "%f", STACK_FLOAT, 0);
CASE(_aaload): {
ARRAY_INTRO(-2);
SET_STACK_OBJECT(((objArrayOop) arrObj)->obj_at(index), -2);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);
}
CASE(_baload):
ARRAY_LOADTO32(T_BYTE, jbyte, "%d", STACK_INT, 0);
CASE(_caload):
ARRAY_LOADTO32(T_CHAR, jchar, "%d", STACK_INT, 0);
CASE(_saload):
ARRAY_LOADTO32(T_SHORT, jshort, "%d", STACK_INT, 0);
CASE(_laload):
ARRAY_LOADTO64(T_LONG, jlong, STACK_LONG, 0);
CASE(_daload):
ARRAY_LOADTO64(T_DOUBLE, jdouble, STACK_DOUBLE, 0);
#define ARRAY_STOREFROM32(T, T2, format, stackSrc, extra) \
{ \
ARRAY_INTRO(-3); \
(void)extra; \
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -3); \
}
#define ARRAY_STOREFROM64(T, T2, stackSrc, extra) \
{ \
ARRAY_INTRO(-4); \
(void)extra; \
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -4); \
}
CASE(_iastore):
ARRAY_STOREFROM32(T_INT, jint, "%d", STACK_INT, 0);
CASE(_fastore):
ARRAY_STOREFROM32(T_FLOAT, jfloat, "%f", STACK_FLOAT, 0);
CASE(_aastore): {
oop rhsObject = STACK_OBJECT(-1);
VERIFY_OOP(rhsObject);
ARRAY_INTRO( -3);
if (rhsObject != NULL) {
Klass* rhsKlass = rhsObject->klass(); // EBX (subclass)
Klass* elemKlass = ObjArrayKlass::cast(arrObj->klass())->element_klass(); // superklass EAX
if (rhsKlass != elemKlass && !rhsKlass->is_subtype_of(elemKlass)) { // ebx->is...
BI_PROFILE_SUBTYPECHECK_FAILED(rhsKlass);
VM_JAVA_ERROR(vmSymbols::java_lang_ArrayStoreException(), "", note_arrayCheck_trap);
}
BI_PROFILE_UPDATE_CHECKCAST(/*null_seen=*/false, rhsKlass);
} else {
BI_PROFILE_UPDATE_CHECKCAST(/*null_seen=*/true, NULL);
}
((objArrayOop) arrObj)->obj_at_put(index, rhsObject);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -3);
}
CASE(_bastore): {
ARRAY_INTRO(-3);
int item = STACK_INT(-1);
if (arrObj->klass() == Universe::boolArrayKlassObj()) {
item &= 1;
} else {
assert(arrObj->klass() == Universe::byteArrayKlassObj(),
"should be byte array otherwise");
}
((typeArrayOop)arrObj)->byte_at_put(index, item);
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -3);
}
CASE(_castore):
ARRAY_STOREFROM32(T_CHAR, jchar, "%d", STACK_INT, 0);
CASE(_sastore):
ARRAY_STOREFROM32(T_SHORT, jshort, "%d", STACK_INT, 0);
CASE(_lastore):
ARRAY_STOREFROM64(T_LONG, jlong, STACK_LONG, 0);
CASE(_dastore):
ARRAY_STOREFROM64(T_DOUBLE, jdouble, STACK_DOUBLE, 0);
CASE(_arraylength):
{
arrayOop ary = (arrayOop) STACK_OBJECT(-1);
CHECK_NULL(ary);
SET_STACK_INT(ary->length(), -1);
UPDATE_PC_AND_CONTINUE(1);
}
CASE(_monitorenter): {
oop lockee = STACK_OBJECT(-1);
CHECK_NULL(lockee);
BasicObjectLock* limit = istate->monitor_base();
BasicObjectLock* most_recent = (BasicObjectLock*) istate->stack_base();
BasicObjectLock* entry = NULL;
while (most_recent != limit ) {
if (most_recent->obj() == NULL) entry = most_recent;
else if (most_recent->obj() == lockee) break;
most_recent++;
}
if (entry != NULL) {
entry->set_obj(lockee);
int success = false;
uintptr_t epoch_mask_in_place = (uintptr_t)markOopDesc::epoch_mask_in_place;
markOop mark = lockee->mark();
intptr_t hash = (intptr_t) markOopDesc::no_hash;
if (mark->has_bias_pattern()) {
uintptr_t thread_ident;
uintptr_t anticipated_bias_locking_value;
thread_ident = (uintptr_t)istate->thread();
anticipated_bias_locking_value =
(((uintptr_t)lockee->klass()->prototype_header() | thread_ident) ^ (uintptr_t)mark) &
~((uintptr_t) markOopDesc::age_mask_in_place);
if (anticipated_bias_locking_value == 0) {
if (PrintBiasedLockingStatistics) {
(* BiasedLocking::biased_lock_entry_count_addr())++;
}
success = true;
}
else if ((anticipated_bias_locking_value & markOopDesc::biased_lock_mask_in_place) != 0) {
markOop header = lockee->klass()->prototype_header();
if (hash != markOopDesc::no_hash) {
header = header->copy_set_hash(hash);
}
if (Atomic::cmpxchg_ptr(header, lockee->mark_addr(), mark) == mark) {
if (PrintBiasedLockingStatistics)
(*BiasedLocking::revoked_lock_entry_count_addr())++;
}
}
else if ((anticipated_bias_locking_value & epoch_mask_in_place) !=0) {
markOop new_header = (markOop) ( (intptr_t) lockee->klass()->prototype_header() | thread_ident);
if (hash != markOopDesc::no_hash) {
new_header = new_header->copy_set_hash(hash);
}
if (Atomic::cmpxchg_ptr((void*)new_header, lockee->mark_addr(), mark) == mark) {
if (PrintBiasedLockingStatistics)
(* BiasedLocking::rebiased_lock_entry_count_addr())++;
}
else {
CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);
}
success = true;
}
else {
markOop header = (markOop) ((uintptr_t) mark & ((uintptr_t)markOopDesc::biased_lock_mask_in_place |
(uintptr_t)markOopDesc::age_mask_in_place |
epoch_mask_in_place));
if (hash != markOopDesc::no_hash) {
header = header->copy_set_hash(hash);
}
markOop new_header = (markOop) ((uintptr_t) header | thread_ident);
DEBUG_ONLY(entry->lock()->set_displaced_header((markOop) (uintptr_t) 0xdeaddead);)
if (Atomic::cmpxchg_ptr((void*)new_header, lockee->mark_addr(), header) == header) {
if (PrintBiasedLockingStatistics)
(* BiasedLocking::anonymously_biased_lock_entry_count_addr())++;
}
else {
CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);
}
success = true;
}
}
if (!success) {
markOop displaced = lockee->mark()->set_unlocked();
entry->lock()->set_displaced_header(displaced);
bool call_vm = UseHeavyMonitors;
if (call_vm || Atomic::cmpxchg_ptr(entry, lockee->mark_addr(), displaced) != displaced) {
if (!call_vm && THREAD->is_lock_owned((address) displaced->clear_lock_bits())) {
entry->lock()->set_displaced_header(NULL);
} else {
CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);
}
}
}
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);
} else {
istate->set_msg(more_monitors);
UPDATE_PC_AND_RETURN(0); // Re-execute
}
}
CASE(_monitorexit): {
oop lockee = STACK_OBJECT(-1);
CHECK_NULL(lockee);
BasicObjectLock* limit = istate->monitor_base();
BasicObjectLock* most_recent = (BasicObjectLock*) istate->stack_base();
while (most_recent != limit ) {
if ((most_recent)->obj() == lockee) {
BasicLock* lock = most_recent->lock();
markOop header = lock->displaced_header();
most_recent->set_obj(NULL);
if (!lockee->mark()->has_bias_pattern()) {
bool call_vm = UseHeavyMonitors;
if (header != NULL || call_vm) {
if (call_vm || Atomic::cmpxchg_ptr(header, lockee->mark_addr(), lock) != lock) {
most_recent->set_obj(lockee);
CALL_VM(InterpreterRuntime::monitorexit(THREAD, most_recent), handle_exception);
}
}
}
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);
}
most_recent++;
}
CALL_VM(InterpreterRuntime::throw_illegal_monitor_state_exception(THREAD), handle_exception);
ShouldNotReachHere();
}
CASE(_getfield):
CASE(_getstatic):
{
u2 index;
ConstantPoolCacheEntry* cache;
index = Bytes::get_native_u2(pc+1);
cache = cp->entry_at(index);
if (!cache->is_resolved((Bytecodes::Code)opcode)) {
CALL_VM(InterpreterRuntime::resolve_get_put(THREAD, (Bytecodes::Code)opcode),
handle_exception);
cache = cp->entry_at(index);
}
#ifdef VM_JVMTI
if (_jvmti_interp_events) {
int *count_addr;
oop obj;
count_addr = (int *)JvmtiExport::get_field_access_count_addr();
if ( *count_addr > 0 ) {
if ((Bytecodes::Code)opcode == Bytecodes::_getstatic) {
obj = (oop)NULL;
} else {
obj = (oop) STACK_OBJECT(-1);
VERIFY_OOP(obj);
}
CALL_VM(InterpreterRuntime::post_field_access(THREAD,
obj,
cache),
handle_exception);
}
}
#endif /* VM_JVMTI */
oop obj;
if ((Bytecodes::Code)opcode == Bytecodes::_getstatic) {
Klass* k = cache->f1_as_klass();
obj = k->java_mirror();
MORE_STACK(1); // Assume single slot push
} else {
obj = (oop) STACK_OBJECT(-1);
CHECK_NULL(obj);
}
TosState tos_type = cache->flag_state();
int field_offset = cache->f2_as_index();
if (cache->is_volatile()) {
if (support_IRIW_for_not_multiple_copy_atomic_cpu) {
OrderAccess::fence();
}
if (tos_type == atos) {
VERIFY_OOP(obj->obj_field_acquire(field_offset));
SET_STACK_OBJECT(obj->obj_field_acquire(field_offset), -1);
} else if (tos_type == itos) {
SET_STACK_INT(obj->int_field_acquire(field_offset), -1);
} else if (tos_type == ltos) {
SET_STACK_LONG(obj->long_field_acquire(field_offset), 0);
MORE_STACK(1);
} else if (tos_type == btos || tos_type == ztos) {
SET_STACK_INT(obj->byte_field_acquire(field_offset), -1);
} else if (tos_type == ctos) {
SET_STACK_INT(obj->char_field_acquire(field_offset), -1);
} else if (tos_type == stos) {
SET_STACK_INT(obj->short_field_acquire(field_offset), -1);
} else if (tos_type == ftos) {
SET_STACK_FLOAT(obj->float_field_acquire(field_offset), -1);
} else {
SET_STACK_DOUBLE(obj->double_field_acquire(field_offset), 0);
MORE_STACK(1);
}
} else {
if (tos_type == atos) {
VERIFY_OOP(obj->obj_field(field_offset));
SET_STACK_OBJECT(obj->obj_field(field_offset), -1);
} else if (tos_type == itos) {
SET_STACK_INT(obj->int_field(field_offset), -1);
} else if (tos_type == ltos) {
SET_STACK_LONG(obj->long_field(field_offset), 0);
MORE_STACK(1);
} else if (tos_type == btos || tos_type == ztos) {
SET_STACK_INT(obj->byte_field(field_offset), -1);
} else if (tos_type == ctos) {
SET_STACK_INT(obj->char_field(field_offset), -1);
} else if (tos_type == stos) {
SET_STACK_INT(obj->short_field(field_offset), -1);
} else if (tos_type == ftos) {
SET_STACK_FLOAT(obj->float_field(field_offset), -1);
} else {
SET_STACK_DOUBLE(obj->double_field(field_offset), 0);
MORE_STACK(1);
}
}
UPDATE_PC_AND_CONTINUE(3);
}
CASE(_putfield):
CASE(_putstatic):
{
u2 index = Bytes::get_native_u2(pc+1);
ConstantPoolCacheEntry* cache = cp->entry_at(index);
if (!cache->is_resolved((Bytecodes::Code)opcode)) {
CALL_VM(InterpreterRuntime::resolve_get_put(THREAD, (Bytecodes::Code)opcode),
handle_exception);
cache = cp->entry_at(index);
}
#ifdef VM_JVMTI
if (_jvmti_interp_events) {
int *count_addr;
oop obj;
count_addr = (int *)JvmtiExport::get_field_modification_count_addr();
if ( *count_addr > 0 ) {
if ((Bytecodes::Code)opcode == Bytecodes::_putstatic) {
obj = (oop)NULL;
}
else {
if (cache->is_long() || cache->is_double()) {
obj = (oop) STACK_OBJECT(-3);
} else {
obj = (oop) STACK_OBJECT(-2);
}
VERIFY_OOP(obj);
}
CALL_VM(InterpreterRuntime::post_field_modification(THREAD,
obj,
cache,
(jvalue *)STACK_SLOT(-1)),
handle_exception);
}
}
#endif /* VM_JVMTI */
oop obj;
int count;
TosState tos_type = cache->flag_state();
count = -1;
if (tos_type == ltos || tos_type == dtos) {
--count;
}
if ((Bytecodes::Code)opcode == Bytecodes::_putstatic) {
Klass* k = cache->f1_as_klass();
obj = k->java_mirror();
} else {
--count;
obj = (oop) STACK_OBJECT(count);
CHECK_NULL(obj);
}
int field_offset = cache->f2_as_index();
if (cache->is_volatile()) {
if (tos_type == itos) {
obj->release_int_field_put(field_offset, STACK_INT(-1));
} else if (tos_type == atos) {
VERIFY_OOP(STACK_OBJECT(-1));
obj->release_obj_field_put(field_offset, STACK_OBJECT(-1));
} else if (tos_type == btos) {
obj->release_byte_field_put(field_offset, STACK_INT(-1));
} else if (tos_type == ztos) {
int bool_field = STACK_INT(-1); // only store LSB
obj->release_byte_field_put(field_offset, (bool_field & 1));
} else if (tos_type == ltos) {
obj->release_long_field_put(field_offset, STACK_LONG(-1));
} else if (tos_type == ctos) {
obj->release_char_field_put(field_offset, STACK_INT(-1));
} else if (tos_type == stos) {
obj->release_short_field_put(field_offset, STACK_INT(-1));
} else if (tos_type == ftos) {
obj->release_float_field_put(field_offset, STACK_FLOAT(-1));
} else {
obj->release_double_field_put(field_offset, STACK_DOUBLE(-1));
}
OrderAccess::storeload();
} else {
if (tos_type == itos) {
obj->int_field_put(field_offset, STACK_INT(-1));
} else if (tos_type == atos) {
VERIFY_OOP(STACK_OBJECT(-1));
obj->obj_field_put(field_offset, STACK_OBJECT(-1));
} else if (tos_type == btos) {
obj->byte_field_put(field_offset, STACK_INT(-1));
} else if (tos_type == ztos) {
int bool_field = STACK_INT(-1); // only store LSB
obj->byte_field_put(field_offset, (bool_field & 1));
} else if (tos_type == ltos) {
obj->long_field_put(field_offset, STACK_LONG(-1));
} else if (tos_type == ctos) {
obj->char_field_put(field_offset, STACK_INT(-1));
} else if (tos_type == stos) {
obj->short_field_put(field_offset, STACK_INT(-1));
} else if (tos_type == ftos) {
obj->float_field_put(field_offset, STACK_FLOAT(-1));
} else {
obj->double_field_put(field_offset, STACK_DOUBLE(-1));
}
}
UPDATE_PC_AND_TOS_AND_CONTINUE(3, count);
}
CASE(_new): {
u2 index = Bytes::get_Java_u2(pc+1);
ConstantPool* constants = istate->method()->constants();
if (!constants->tag_at(index).is_unresolved_klass()) {
Klass* entry = constants->slot_at(index).get_klass();
assert(entry->is_klass(), "Should be resolved klass");
Klass* k_entry = (Klass*) entry;
assert(k_entry->oop_is_instance(), "Should be InstanceKlass");
InstanceKlass* ik = (InstanceKlass*) k_entry;
if ( ik->is_initialized() && ik->can_be_fastpath_allocated() ) {
size_t obj_size = ik->size_helper();
oop result = NULL;
bool need_zero = !ZeroTLAB;
if (UseTLAB) {
result = (oop) THREAD->tlab().allocate(obj_size);
}
#ifndef CC_INTERP_PROFILE
if (result == NULL) {
need_zero = true;
retry:
HeapWord* compare_to = *Universe::heap()->top_addr();
HeapWord* new_top = compare_to + obj_size;
if (new_top <= *Universe::heap()->end_addr()) {
if (Atomic::cmpxchg_ptr(new_top, Universe::heap()->top_addr(), compare_to) != compare_to) {
goto retry;
}
result = (oop) compare_to;
}
}
#endif
if (result != NULL) {
if (need_zero ) {
HeapWord* to_zero = (HeapWord*) result + sizeof(oopDesc) / oopSize;
obj_size -= sizeof(oopDesc) / oopSize;
if (obj_size > 0 ) {
memset(to_zero, 0, obj_size * HeapWordSize);
}
}
if (UseBiasedLocking) {
result->set_mark(ik->prototype_header());
} else {
result->set_mark(markOopDesc::prototype());
}
result->set_klass_gap(0);
result->set_klass(k_entry);
OrderAccess::storestore();
SET_STACK_OBJECT(result, 0);
UPDATE_PC_AND_TOS_AND_CONTINUE(3, 1);
}
}
}
CALL_VM(InterpreterRuntime::_new(THREAD, METHOD->constants(), index),
handle_exception);
OrderAccess::storestore();
SET_STACK_OBJECT(THREAD->vm_result(), 0);
THREAD->set_vm_result(NULL);
UPDATE_PC_AND_TOS_AND_CONTINUE(3, 1);
}
CASE(_anewarray): {
u2 index = Bytes::get_Java_u2(pc+1);
jint size = STACK_INT(-1);
CALL_VM(InterpreterRuntime::anewarray(THREAD, METHOD->constants(), index, size),
handle_exception);
OrderAccess::storestore();
SET_STACK_OBJECT(THREAD->vm_result(), -1);
THREAD->set_vm_result(NULL);
UPDATE_PC_AND_CONTINUE(3);
}
CASE(_multianewarray): {
jint dims = *(pc+3);
jint size = STACK_INT(-1);
jint *dimarray =
(jint*)&topOfStack[dims * Interpreter::stackElementWords+
Interpreter::stackElementWords-1];
CALL_VM(InterpreterRuntime::multianewarray(THREAD, dimarray),
handle_exception);
OrderAccess::storestore();
SET_STACK_OBJECT(THREAD->vm_result(), -dims);
THREAD->set_vm_result(NULL);
UPDATE_PC_AND_TOS_AND_CONTINUE(4, -(dims-1));
}
CASE(_checkcast):
if (STACK_OBJECT(-1) != NULL) {
VERIFY_OOP(STACK_OBJECT(-1));
u2 index = Bytes::get_Java_u2(pc+1);
if (METHOD->constants()->tag_at(index).is_unresolved_klass()) {
CALL_VM(InterpreterRuntime::quicken_io_cc(THREAD), handle_exception);
}
Klass* klassOf = (Klass*) METHOD->constants()->slot_at(index).get_klass();
Klass* objKlass = STACK_OBJECT(-1)->klass(); // ebx
if (objKlass != klassOf && !objKlass->is_subtype_of(klassOf)) {
BI_PROFILE_SUBTYPECHECK_FAILED(objKlass);
ResourceMark rm(THREAD);
const char* objName = objKlass->external_name();
const char* klassName = klassOf->external_name();
char* message = SharedRuntime::generate_class_cast_message(
objName, klassName);
VM_JAVA_ERROR(vmSymbols::java_lang_ClassCastException(), message, note_classCheck_trap);
}
BI_PROFILE_UPDATE_CHECKCAST(/*null_seen=*/false, objKlass);
} else {
BI_PROFILE_UPDATE_CHECKCAST(/*null_seen=*/true, NULL);
}
UPDATE_PC_AND_CONTINUE(3);
CASE(_instanceof):
if (STACK_OBJECT(-1) == NULL) {
SET_STACK_INT(0, -1);
BI_PROFILE_UPDATE_INSTANCEOF(/*null_seen=*/true, NULL);
} else {
VERIFY_OOP(STACK_OBJECT(-1));
u2 index = Bytes::get_Java_u2(pc+1);
if (METHOD->constants()->tag_at(index).is_unresolved_klass()) {
CALL_VM(InterpreterRuntime::quicken_io_cc(THREAD), handle_exception);
}
Klass* klassOf = (Klass*) METHOD->constants()->slot_at(index).get_klass();
Klass* objKlass = STACK_OBJECT(-1)->klass();
if ( objKlass == klassOf || objKlass->is_subtype_of(klassOf)) {
SET_STACK_INT(1, -1);
} else {
SET_STACK_INT(0, -1);
BI_PROFILE_SUBTYPECHECK_FAILED(objKlass);
}
BI_PROFILE_UPDATE_INSTANCEOF(/*null_seen=*/false, objKlass);
}
UPDATE_PC_AND_CONTINUE(3);
CASE(_ldc_w):
CASE(_ldc):
{
u2 index;
bool wide = false;
int incr = 2; // frequent case
if (opcode == Bytecodes::_ldc) {
index = pc[1];
} else {
index = Bytes::get_Java_u2(pc+1);
incr = 3;
wide = true;
}
ConstantPool* constants = METHOD->constants();
switch (constants->tag_at(index).value()) {
case JVM_CONSTANT_Integer:
SET_STACK_INT(constants->int_at(index), 0);
break;
case JVM_CONSTANT_Float:
SET_STACK_FLOAT(constants->float_at(index), 0);
break;
case JVM_CONSTANT_String:
{
oop result = constants->resolved_references()->obj_at(index);
if (result == NULL) {
CALL_VM(InterpreterRuntime::resolve_ldc(THREAD, (Bytecodes::Code) opcode), handle_exception);
SET_STACK_OBJECT(THREAD->vm_result(), 0);
THREAD->set_vm_result(NULL);
} else {
VERIFY_OOP(result);
SET_STACK_OBJECT(result, 0);
}
break;
}
case JVM_CONSTANT_Class:
VERIFY_OOP(constants->resolved_klass_at(index)->java_mirror());
SET_STACK_OBJECT(constants->resolved_klass_at(index)->java_mirror(), 0);
break;
case JVM_CONSTANT_UnresolvedClass:
case JVM_CONSTANT_UnresolvedClassInError:
CALL_VM(InterpreterRuntime::ldc(THREAD, wide), handle_exception);
SET_STACK_OBJECT(THREAD->vm_result(), 0);
THREAD->set_vm_result(NULL);
break;
default: ShouldNotReachHere();
}
UPDATE_PC_AND_TOS_AND_CONTINUE(incr, 1);
}
CASE(_ldc2_w):
{
u2 index = Bytes::get_Java_u2(pc+1);
ConstantPool* constants = METHOD->constants();
switch (constants->tag_at(index).value()) {
case JVM_CONSTANT_Long:
SET_STACK_LONG(constants->long_at(index), 1);
break;
case JVM_CONSTANT_Double:
SET_STACK_DOUBLE(constants->double_at(index), 1);
break;
default: ShouldNotReachHere();
}
UPDATE_PC_AND_TOS_AND_CONTINUE(3, 2);
}
CASE(_fast_aldc_w):
CASE(_fast_aldc): {
u2 index;
int incr;
if (opcode == Bytecodes::_fast_aldc) {
index = pc[1];
incr = 2;
} else {
index = Bytes::get_native_u2(pc+1);
incr = 3;
}
ConstantPool* constants = METHOD->constants();
oop result = constants->resolved_references()->obj_at(index);
if (result == NULL) {
CALL_VM(InterpreterRuntime::resolve_ldc(THREAD, (Bytecodes::Code) opcode),
handle_exception);
result = THREAD->vm_result();
}
VERIFY_OOP(result);
SET_STACK_OBJECT(result, 0);
UPDATE_PC_AND_TOS_AND_CONTINUE(incr, 1);
}
CASE(_invokedynamic): {
if (!EnableInvokeDynamic) {
CALL_VM(InterpreterRuntime::throw_IncompatibleClassChangeError(THREAD),
handle_exception);
ShouldNotReachHere();
}
u4 index = Bytes::get_native_u4(pc+1);
ConstantPoolCacheEntry* cache = cp->constant_pool()->invokedynamic_cp_cache_entry_at(index);
if (! cache->is_resolved((Bytecodes::Code) opcode)) {
CALL_VM(InterpreterRuntime::resolve_invokedynamic(THREAD),
handle_exception);
cache = cp->constant_pool()->invokedynamic_cp_cache_entry_at(index);
}
Method* method = cache->f1_as_method();
if (VerifyOops) method->verify();
if (cache->has_appendix()) {
ConstantPool* constants = METHOD->constants();
SET_STACK_OBJECT(cache->appendix_if_resolved(constants), 0);
MORE_STACK(1);
}
istate->set_msg(call_method);
istate->set_callee(method);
istate->set_callee_entry_point(method->from_interpreted_entry());
istate->set_bcp_advance(5);
BI_PROFILE_UPDATE_CALL();
UPDATE_PC_AND_RETURN(0); // I'll be back...
}
CASE(_invokehandle): {
if (!EnableInvokeDynamic) {
ShouldNotReachHere();
}
u2 index = Bytes::get_native_u2(pc+1);
ConstantPoolCacheEntry* cache = cp->entry_at(index);
if (! cache->is_resolved((Bytecodes::Code) opcode)) {
CALL_VM(InterpreterRuntime::resolve_invokehandle(THREAD),
handle_exception);
cache = cp->entry_at(index);
}
Method* method = cache->f1_as_method();
if (VerifyOops) method->verify();
if (cache->has_appendix()) {
ConstantPool* constants = METHOD->constants();
SET_STACK_OBJECT(cache->appendix_if_resolved(constants), 0);
MORE_STACK(1);
}
istate->set_msg(call_method);
istate->set_callee(method);
istate->set_callee_entry_point(method->from_interpreted_entry());
istate->set_bcp_advance(3);
BI_PROFILE_UPDATE_FINALCALL();
UPDATE_PC_AND_RETURN(0); // I'll be back...
}
CASE(_invokeinterface): {
u2 index = Bytes::get_native_u2(pc+1);
ConstantPoolCacheEntry* cache = cp->entry_at(index);
if (!cache->is_resolved((Bytecodes::Code)opcode)) {
CALL_VM(InterpreterRuntime::resolve_invoke(THREAD, (Bytecodes::Code)opcode),
handle_exception);
cache = cp->entry_at(index);
}
istate->set_msg(call_method);
if (cache->is_forced_virtual()) {
Method* callee;
CHECK_NULL(STACK_OBJECT(-(cache->parameter_size())));
if (cache->is_vfinal()) {
callee = cache->f2_as_vfinal_method();
BI_PROFILE_UPDATE_FINALCALL();
} else {
int parms = cache->parameter_size();
oop rcvr = STACK_OBJECT(-parms);
VERIFY_OOP(rcvr);
InstanceKlass* rcvrKlass = (InstanceKlass*)rcvr->klass();
callee = (Method*) rcvrKlass->start_of_vtable()[ cache->f2_as_index()];
BI_PROFILE_UPDATE_VIRTUALCALL(rcvr->klass());
}
istate->set_callee(callee);
istate->set_callee_entry_point(callee->from_interpreted_entry());
#ifdef VM_JVMTI
if (JvmtiExport::can_post_interpreter_events() && THREAD->is_interp_only_mode()) {
istate->set_callee_entry_point(callee->interpreter_entry());
}
#endif /* VM_JVMTI */
istate->set_bcp_advance(5);
UPDATE_PC_AND_RETURN(0); // I'll be back...
}
Method* callee;
Method *interface_method = cache->f2_as_interface_method();
InstanceKlass* iclass = interface_method->method_holder();
int parms = cache->parameter_size();
oop rcvr = STACK_OBJECT(-parms);
CHECK_NULL(rcvr);
InstanceKlass* int2 = (InstanceKlass*) rcvr->klass();
{
Klass* refc = cache->f1_as_klass();
itableOffsetEntry* scan;
for (scan = (itableOffsetEntry*) int2->start_of_itable();
scan->interface_klass() != NULL;
scan++) {
if (scan->interface_klass() == refc) {
break;
}
}
if (scan->interface_klass() == NULL) {
VM_JAVA_ERROR(vmSymbols::java_lang_IncompatibleClassChangeError(), "", note_no_trap);
}
}
itableOffsetEntry* ki = (itableOffsetEntry*) int2->start_of_itable();
int i;
for ( i = 0 ; i < int2->itable_length() ; i++, ki++ ) {
if (ki->interface_klass() == iclass) break;
}
if (i == int2->itable_length()) {
VM_JAVA_ERROR(vmSymbols::java_lang_IncompatibleClassChangeError(), "", note_no_trap);
}
int mindex = interface_method->itable_index();
itableMethodEntry* im = ki->first_method_entry(rcvr->klass());
callee = im[mindex].method();
if (callee == NULL) {
VM_JAVA_ERROR(vmSymbols::java_lang_AbstractMethodError(), "", note_no_trap);
}
BI_PROFILE_UPDATE_VIRTUALCALL(rcvr->klass());
istate->set_callee(callee);
istate->set_callee_entry_point(callee->from_interpreted_entry());
#ifdef VM_JVMTI
if (JvmtiExport::can_post_interpreter_events() && THREAD->is_interp_only_mode()) {
istate->set_callee_entry_point(callee->interpreter_entry());
}
#endif /* VM_JVMTI */
istate->set_bcp_advance(5);
UPDATE_PC_AND_RETURN(0); // I'll be back...
}
CASE(_invokevirtual):
CASE(_invokespecial):
CASE(_invokestatic): {
u2 index = Bytes::get_native_u2(pc+1);
ConstantPoolCacheEntry* cache = cp->entry_at(index);
if (!cache->is_resolved((Bytecodes::Code)opcode)) {
CALL_VM(InterpreterRuntime::resolve_invoke(THREAD, (Bytecodes::Code)opcode),
handle_exception);
cache = cp->entry_at(index);
}
istate->set_msg(call_method);
{
Method* callee;
if ((Bytecodes::Code)opcode == Bytecodes::_invokevirtual) {
CHECK_NULL(STACK_OBJECT(-(cache->parameter_size())));
if (cache->is_vfinal()) {
callee = cache->f2_as_vfinal_method();
BI_PROFILE_UPDATE_FINALCALL();
} else {
int parms = cache->parameter_size();
oop rcvr = STACK_OBJECT(-parms);
VERIFY_OOP(rcvr);
InstanceKlass* rcvrKlass = (InstanceKlass*)rcvr->klass();
Executing this code in java.lang.String:
public String(char value[]) {
this.count = value.length;
this.value = (char[])value.clone();
}
a find on rcvr->klass() reports:
{type array char}{type array class}
- klass: {other class}
but using InstanceKlass::cast(STACK_OBJECT(-parms)->klass()) causes in assertion failure
because rcvr->klass()->oop_is_instance() == 0
However it seems to have a vtable in the right location. Huh?
callee = (Method*) rcvrKlass->start_of_vtable()[ cache->f2_as_index()];
BI_PROFILE_UPDATE_VIRTUALCALL(rcvr->klass());
}
} else {
if ((Bytecodes::Code)opcode == Bytecodes::_invokespecial) {
CHECK_NULL(STACK_OBJECT(-(cache->parameter_size())));
}
callee = cache->f1_as_method();
BI_PROFILE_UPDATE_CALL();
}
istate->set_callee(callee);
istate->set_callee_entry_point(callee->from_interpreted_entry());
#ifdef VM_JVMTI
if (JvmtiExport::can_post_interpreter_events() && THREAD->is_interp_only_mode()) {
istate->set_callee_entry_point(callee->interpreter_entry());
}
#endif /* VM_JVMTI */
istate->set_bcp_advance(3);
UPDATE_PC_AND_RETURN(0); // I'll be back...
}
}
CASE(_newarray): {
BasicType atype = (BasicType) *(pc+1);
jint size = STACK_INT(-1);
CALL_VM(InterpreterRuntime::newarray(THREAD, atype, size),
handle_exception);
OrderAccess::storestore();
SET_STACK_OBJECT(THREAD->vm_result(), -1);
THREAD->set_vm_result(NULL);
UPDATE_PC_AND_CONTINUE(2);
}
CASE(_athrow): {
oop except_oop = STACK_OBJECT(-1);
CHECK_NULL(except_oop);
THREAD->set_pending_exception(except_oop, NULL, 0);
goto handle_exception;
}
CASE(_jsr): {
SET_STACK_ADDR(((address)pc - (intptr_t)(istate->method()->code_base()) + 3), 0);
MORE_STACK(1);
}
CASE(_goto):
{
int16_t offset = (int16_t)Bytes::get_Java_u2(pc + 1);
BI_PROFILE_UPDATE_JUMP();
address branch_pc = pc;
UPDATE_PC(offset);
DO_BACKEDGE_CHECKS(offset, branch_pc);
CONTINUE;
}
CASE(_jsr_w): {
SET_STACK_ADDR(((address)pc - (intptr_t)(istate->method()->code_base()) + 5), 0);
MORE_STACK(1);
}
CASE(_goto_w):
{
int32_t offset = Bytes::get_Java_u4(pc + 1);
BI_PROFILE_UPDATE_JUMP();
address branch_pc = pc;
UPDATE_PC(offset);
DO_BACKEDGE_CHECKS(offset, branch_pc);
CONTINUE;
}
CASE(_ret): {
BI_PROFILE_UPDATE_RET(/*bci=*/((int)(intptr_t)(LOCALS_ADDR(pc[1]))));
pc = istate->method()->code_base() + (intptr_t)(LOCALS_ADDR(pc[1]));
UPDATE_PC_AND_CONTINUE(0);
}
CASE(_breakpoint): {
Bytecodes::Code original_bytecode;
DECACHE_STATE();
SET_LAST_JAVA_FRAME();
original_bytecode = InterpreterRuntime::get_original_bytecode_at(THREAD,
METHOD, pc);
RESET_LAST_JAVA_FRAME();
CACHE_STATE();
if (THREAD->has_pending_exception()) goto handle_exception;
CALL_VM(InterpreterRuntime::_breakpoint(THREAD, METHOD, pc),
handle_exception);
opcode = (jubyte)original_bytecode;
goto opcode_switch;
}
DEFAULT:
fatal(err_msg("Unimplemented opcode %d = %s", opcode,
Bytecodes::name((Bytecodes::Code)opcode)));
goto finish;
} /* switch(opc) */
#ifdef USELABELS
check_for_exception:
#endif
{
if (!THREAD->has_pending_exception()) {
CONTINUE;
}
DECACHE_PC();
goto handle_exception;
}
do_continue: ;
} /* while (1) interpreter loop */
handle_exception: {
HandleMarkCleaner __hmc(THREAD);
Handle except_oop(THREAD, THREAD->pending_exception());
HandleMark __hm(THREAD);
THREAD->clear_pending_exception();
assert(except_oop(), "No exception to process");
intptr_t continuation_bci;
topOfStack = istate->stack_base() - Interpreter::stackElementWords;
CALL_VM(continuation_bci = (intptr_t)InterpreterRuntime::exception_handler_for_exception(THREAD, except_oop()),
handle_exception);
except_oop = THREAD->vm_result();
THREAD->set_vm_result(NULL);
if (continuation_bci >= 0) {
SET_STACK_OBJECT(except_oop(), 0);
MORE_STACK(1);
pc = METHOD->code_base() + continuation_bci;
if (TraceExceptions) {
ttyLocker ttyl;
ResourceMark rm;
tty->print_cr("Exception <%s> (" INTPTR_FORMAT ")", except_oop->print_value_string(), p2i(except_oop()));
tty->print_cr(" thrown in interpreter method <%s>", METHOD->print_value_string());
tty->print_cr(" at bci %d, continuing at %d for thread " INTPTR_FORMAT,
(int)(istate->bcp() - METHOD->code_base()),
(int)continuation_bci, p2i(THREAD));
}
NOT_PRODUCT(Exceptions::debug_check_abort(except_oop));
BI_PROFILE_ALIGN_TO_CURRENT_BCI();
goto run;
}
if (TraceExceptions) {
ttyLocker ttyl;
ResourceMark rm;
tty->print_cr("Exception <%s> (" INTPTR_FORMAT ")", except_oop->print_value_string(), p2i(except_oop()));
tty->print_cr(" thrown in interpreter method <%s>", METHOD->print_value_string());
tty->print_cr(" at bci %d, unwinding for thread " INTPTR_FORMAT,
(int)(istate->bcp() - METHOD->code_base()),
p2i(THREAD));
}
NOT_PRODUCT(Exceptions::debug_check_abort(except_oop));
THREAD->set_pending_exception(except_oop(), NULL, 0);
goto handle_return;
} // handle_exception:
handle_Pop_Frame: {
istate->set_msg(popping_frame);
THREAD->clr_pop_frame_pending();
THREAD->set_pop_frame_in_process();
goto handle_return;
} // handle_Pop_Frame
handle_Early_Return: {
istate->set_msg(early_return);
topOfStack = istate->stack_base() - Interpreter::stackElementWords;
JvmtiThreadState *ts = THREAD->jvmti_thread_state();
switch (istate->method()->result_type()) {
case T_BOOLEAN:
case T_SHORT:
case T_BYTE:
case T_CHAR:
case T_INT:
SET_STACK_INT(ts->earlyret_value().i, 0);
MORE_STACK(1);
break;
case T_LONG:
SET_STACK_LONG(ts->earlyret_value().j, 1);
MORE_STACK(2);
break;
case T_FLOAT:
SET_STACK_FLOAT(ts->earlyret_value().f, 0);
MORE_STACK(1);
break;
case T_DOUBLE:
SET_STACK_DOUBLE(ts->earlyret_value().d, 1);
MORE_STACK(2);
break;
case T_ARRAY:
case T_OBJECT:
SET_STACK_OBJECT(ts->earlyret_oop(), 0);
MORE_STACK(1);
break;
}
ts->clr_earlyret_value();
ts->set_earlyret_oop(NULL);
ts->clr_earlyret_pending();
} // handle_Early_Return
handle_return: {
OrderAccess::storestore();
DECACHE_STATE();
bool suppress_error = istate->msg() == popping_frame || istate->msg() == early_return;
bool suppress_exit_event = THREAD->has_pending_exception() || istate->msg() == popping_frame;
Handle original_exception(THREAD, THREAD->pending_exception());
Handle illegal_state_oop(THREAD, NULL);
THREAD->clear_pending_exception();
if (THREAD->do_not_unlock()) {
THREAD->clr_do_not_unlock();
} else {
BasicObjectLock* base = istate->monitor_base();
BasicObjectLock* end = (BasicObjectLock*) istate->stack_base();
bool method_unlock_needed = METHOD->is_synchronized();
if (method_unlock_needed) base--;
while (end < base) {
oop lockee = end->obj();
if (lockee != NULL) {
BasicLock* lock = end->lock();
markOop header = lock->displaced_header();
end->set_obj(NULL);
if (!lockee->mark()->has_bias_pattern()) {
if (header != NULL) {
if (Atomic::cmpxchg_ptr(header, lockee->mark_addr(), lock) != lock) {
end->set_obj(lockee);
{
HandleMark __hm(THREAD);
CALL_VM_NOCHECK(InterpreterRuntime::monitorexit(THREAD, end));
}
}
}
}
if (illegal_state_oop() == NULL && !suppress_error) {
{
HandleMark __hm(THREAD);
CALL_VM_NOCHECK(InterpreterRuntime::throw_illegal_monitor_state_exception(THREAD));
}
assert(THREAD->has_pending_exception(), "Lost our exception!");
illegal_state_oop = THREAD->pending_exception();
THREAD->clear_pending_exception();
}
}
end++;
}
if (method_unlock_needed) {
if (base->obj() == NULL) {
if (illegal_state_oop() == NULL && !suppress_error) {
{
HandleMark __hm(THREAD);
CALL_VM_NOCHECK(InterpreterRuntime::throw_illegal_monitor_state_exception(THREAD));
}
assert(THREAD->has_pending_exception(), "Lost our exception!");
illegal_state_oop = THREAD->pending_exception();
THREAD->clear_pending_exception();
}
} else {
oop rcvr = base->obj();
if (rcvr == NULL) {
if (!suppress_error) {
VM_JAVA_ERROR_NO_JUMP(vmSymbols::java_lang_NullPointerException(), "", note_nullCheck_trap);
illegal_state_oop = THREAD->pending_exception();
THREAD->clear_pending_exception();
}
} else if (UseHeavyMonitors) {
{
HandleMark __hm(THREAD);
CALL_VM_NOCHECK(InterpreterRuntime::monitorexit(THREAD, base));
}
if (THREAD->has_pending_exception()) {
if (!suppress_error) illegal_state_oop = THREAD->pending_exception();
THREAD->clear_pending_exception();
}
} else {
BasicLock* lock = base->lock();
markOop header = lock->displaced_header();
base->set_obj(NULL);
if (!rcvr->mark()->has_bias_pattern()) {
base->set_obj(NULL);
if (header != NULL) {
if (Atomic::cmpxchg_ptr(header, rcvr->mark_addr(), lock) != lock) {
base->set_obj(rcvr);
{
HandleMark __hm(THREAD);
CALL_VM_NOCHECK(InterpreterRuntime::monitorexit(THREAD, base));
}
if (THREAD->has_pending_exception()) {
if (!suppress_error) illegal_state_oop = THREAD->pending_exception();
THREAD->clear_pending_exception();
}
}
}
}
}
}
}
}
THREAD->clr_do_not_unlock();
suppress_exit_event = suppress_exit_event || illegal_state_oop() != NULL;
#ifdef VM_JVMTI
if (_jvmti_interp_events) {
if ( !suppress_exit_event && THREAD->is_interp_only_mode() ) {
{
HandleMark __hm(THREAD);
CALL_VM_NOCHECK(InterpreterRuntime::post_method_exit(THREAD));
}
}
}
#endif /* VM_JVMTI */
assert(!suppress_error || (suppress_error && illegal_state_oop() == NULL), "Error was not suppressed");
if (illegal_state_oop() != NULL || original_exception() != NULL) {
istate->set_msg(throwing_exception);
if (illegal_state_oop() != NULL)
THREAD->set_pending_exception(illegal_state_oop(), NULL, 0);
else
THREAD->set_pending_exception(original_exception(), NULL, 0);
UPDATE_PC_AND_RETURN(0);
}
if (istate->msg() == popping_frame) {
if (istate->prev() == NULL) {
THREAD->popframe_preserve_args(in_ByteSize(METHOD->size_of_parameters() * wordSize),
LOCALS_SLOT(METHOD->size_of_parameters() - 1));
THREAD->set_popframe_condition_bit(JavaThread::popframe_force_deopt_reexecution_bit);
}
} else {
istate->set_msg(return_from_method);
}
UPDATE_PC_AND_RETURN(1);
} /* handle_return: */
finish:
DECACHE_TOS();
DECACHE_PC();
return;
}
#ifndef VM_JVMTI
BytecodeInterpreter::BytecodeInterpreter(messages msg) {
if (msg != initialize) ShouldNotReachHere();
_msg = msg;
_self_link = this;
_prev_link = NULL;
}
address BytecodeInterpreter::stack_slot(intptr_t *tos, int offset) {
return (address) tos[Interpreter::expr_index_at(-offset)];
}
jint BytecodeInterpreter::stack_int(intptr_t *tos, int offset) {
return *((jint*) &tos[Interpreter::expr_index_at(-offset)]);
}
jfloat BytecodeInterpreter::stack_float(intptr_t *tos, int offset) {
return *((jfloat *) &tos[Interpreter::expr_index_at(-offset)]);
}
oop BytecodeInterpreter::stack_object(intptr_t *tos, int offset) {
return cast_to_oop(tos [Interpreter::expr_index_at(-offset)]);
}
jdouble BytecodeInterpreter::stack_double(intptr_t *tos, int offset) {
return ((VMJavaVal64*) &tos[Interpreter::expr_index_at(-offset)])->d;
}
jlong BytecodeInterpreter::stack_long(intptr_t *tos, int offset) {
return ((VMJavaVal64 *) &tos[Interpreter::expr_index_at(-offset)])->l;
}
void BytecodeInterpreter::set_stack_slot(intptr_t *tos, address value,
int offset) {
}
void BytecodeInterpreter::set_stack_int(intptr_t *tos, int value,
int offset) {
}
void BytecodeInterpreter::set_stack_float(intptr_t *tos, jfloat value,
int offset) {
}
void BytecodeInterpreter::set_stack_object(intptr_t *tos, oop value,
int offset) {
}
void BytecodeInterpreter::set_stack_double(intptr_t *tos, jdouble value,
int offset) {
((VMJavaVal64*)&tos[Interpreter::expr_index_at(-offset)])->d = value;
}
void BytecodeInterpreter::set_stack_double_from_addr(intptr_t *tos,
address addr, int offset) {
(((VMJavaVal64*)&tos[Interpreter::expr_index_at(-offset)])->d =
((VMJavaVal64*)addr)->d);
}
void BytecodeInterpreter::set_stack_long(intptr_t *tos, jlong value,
int offset) {
((VMJavaVal64*)&tos[Interpreter::expr_index_at(-offset+1)])->l = 0xdeedbeeb;
((VMJavaVal64*)&tos[Interpreter::expr_index_at(-offset)])->l = value;
}
void BytecodeInterpreter::set_stack_long_from_addr(intptr_t *tos,
address addr, int offset) {
((VMJavaVal64*)&tos[Interpreter::expr_index_at(-offset+1)])->l = 0xdeedbeeb;
((VMJavaVal64*)&tos[Interpreter::expr_index_at(-offset)])->l =
((VMJavaVal64*)addr)->l;
}
address BytecodeInterpreter::locals_slot(intptr_t* locals, int offset) {
return (address)locals[Interpreter::local_index_at(-offset)];
}
jint BytecodeInterpreter::locals_int(intptr_t* locals, int offset) {
return (jint)locals[Interpreter::local_index_at(-offset)];
}
jfloat BytecodeInterpreter::locals_float(intptr_t* locals, int offset) {
return (jfloat)locals[Interpreter::local_index_at(-offset)];
}
oop BytecodeInterpreter::locals_object(intptr_t* locals, int offset) {
return cast_to_oop(locals[Interpreter::local_index_at(-offset)]);
}
jdouble BytecodeInterpreter::locals_double(intptr_t* locals, int offset) {
return ((VMJavaVal64*)&locals[Interpreter::local_index_at(-(offset+1))])->d;
}
jlong BytecodeInterpreter::locals_long(intptr_t* locals, int offset) {
return ((VMJavaVal64*)&locals[Interpreter::local_index_at(-(offset+1))])->l;
}
address BytecodeInterpreter::locals_long_at(intptr_t* locals, int offset) {
return ((address)&locals[Interpreter::local_index_at(-(offset+1))]);
}
address BytecodeInterpreter::locals_double_at(intptr_t* locals, int offset) {
return ((address)&locals[Interpreter::local_index_at(-(offset+1))]);
}
void BytecodeInterpreter::set_locals_slot(intptr_t *locals,
address value, int offset) {
}
void BytecodeInterpreter::set_locals_int(intptr_t *locals,
jint value, int offset) {
}
void BytecodeInterpreter::set_locals_float(intptr_t *locals,
jfloat value, int offset) {
}
void BytecodeInterpreter::set_locals_object(intptr_t *locals,
oop value, int offset) {
}
void BytecodeInterpreter::set_locals_double(intptr_t *locals,
jdouble value, int offset) {
((VMJavaVal64*)&locals[Interpreter::local_index_at(-(offset+1))])->d = value;
}
void BytecodeInterpreter::set_locals_long(intptr_t *locals,
jlong value, int offset) {
((VMJavaVal64*)&locals[Interpreter::local_index_at(-(offset+1))])->l = value;
}
void BytecodeInterpreter::set_locals_double_from_addr(intptr_t *locals,
address addr, int offset) {
((VMJavaVal64*)&locals[Interpreter::local_index_at(-(offset+1))])->d = ((VMJavaVal64*)addr)->d;
}
void BytecodeInterpreter::set_locals_long_from_addr(intptr_t *locals,
address addr, int offset) {
((VMJavaVal64*)&locals[Interpreter::local_index_at(-(offset+1))])->l = ((VMJavaVal64*)addr)->l;
}
void BytecodeInterpreter::astore(intptr_t* tos, int stack_offset,
intptr_t* locals, int locals_offset) {
intptr_t value = tos[Interpreter::expr_index_at(-stack_offset)];
locals[Interpreter::local_index_at(-locals_offset)] = value;
}
void BytecodeInterpreter::copy_stack_slot(intptr_t *tos, int from_offset,
int to_offset) {
tos[Interpreter::expr_index_at(-to_offset)] =
(intptr_t)tos[Interpreter::expr_index_at(-from_offset)];
}
void BytecodeInterpreter::dup(intptr_t *tos) {
copy_stack_slot(tos, -1, 0);
}
void BytecodeInterpreter::dup2(intptr_t *tos) {
copy_stack_slot(tos, -2, 0);
copy_stack_slot(tos, -1, 1);
}
void BytecodeInterpreter::dup_x1(intptr_t *tos) {
copy_stack_slot(tos, -1, 0);
copy_stack_slot(tos, -2, -1);
copy_stack_slot(tos, 0, -2);
}
void BytecodeInterpreter::dup_x2(intptr_t *tos) {
copy_stack_slot(tos, -1, 0);
copy_stack_slot(tos, -2, -1);
copy_stack_slot(tos, -3, -2);
copy_stack_slot(tos, 0, -3);
}
void BytecodeInterpreter::dup2_x1(intptr_t *tos) {
copy_stack_slot(tos, -1, 1);
copy_stack_slot(tos, -2, 0);
copy_stack_slot(tos, -3, -1);
copy_stack_slot(tos, 1, -2);
copy_stack_slot(tos, 0, -3);
}
void BytecodeInterpreter::dup2_x2(intptr_t *tos) {
copy_stack_slot(tos, -1, 1);
copy_stack_slot(tos, -2, 0);
copy_stack_slot(tos, -3, -1);
copy_stack_slot(tos, -4, -2);
copy_stack_slot(tos, 1, -3);
copy_stack_slot(tos, 0, -4);
}
void BytecodeInterpreter::swap(intptr_t *tos) {
intptr_t val = tos[Interpreter::expr_index_at(1)];
copy_stack_slot(tos, -2, -1);
tos[Interpreter::expr_index_at(2)] = val;
}
#ifndef PRODUCT
const char* BytecodeInterpreter::C_msg(BytecodeInterpreter::messages msg) {
switch (msg) {
case BytecodeInterpreter::no_request: return("no_request");
case BytecodeInterpreter::initialize: return("initialize");
case BytecodeInterpreter::method_entry: return("method_entry");
case BytecodeInterpreter::method_resume: return("method_resume");
case BytecodeInterpreter::got_monitors: return("got_monitors");
case BytecodeInterpreter::rethrow_exception: return("rethrow_exception");
case BytecodeInterpreter::call_method: return("call_method");
case BytecodeInterpreter::return_from_method: return("return_from_method");
case BytecodeInterpreter::more_monitors: return("more_monitors");
case BytecodeInterpreter::throwing_exception: return("throwing_exception");
case BytecodeInterpreter::popping_frame: return("popping_frame");
case BytecodeInterpreter::do_osr: return("do_osr");
case BytecodeInterpreter::deopt_resume: return("deopt_resume");
case BytecodeInterpreter::deopt_resume2: return("deopt_resume2");
default: return("BAD MSG");
}
}
void
BytecodeInterpreter::print() {
tty->print_cr("thread: " INTPTR_FORMAT, (uintptr_t) this->_thread);
tty->print_cr("bcp: " INTPTR_FORMAT, (uintptr_t) this->_bcp);
tty->print_cr("locals: " INTPTR_FORMAT, (uintptr_t) this->_locals);
tty->print_cr("constants: " INTPTR_FORMAT, (uintptr_t) this->_constants);
{
ResourceMark rm;
char *method_name = _method->name_and_sig_as_C_string();
tty->print_cr("method: " INTPTR_FORMAT "[ %s ]", (uintptr_t) this->_method, method_name);
}
tty->print_cr("mdx: " INTPTR_FORMAT, (uintptr_t) this->_mdx);
tty->print_cr("stack: " INTPTR_FORMAT, (uintptr_t) this->_stack);
tty->print_cr("msg: %s", C_msg(this->_msg));
tty->print_cr("result_to_call._callee: " INTPTR_FORMAT, (uintptr_t) this->_result._to_call._callee);
tty->print_cr("result_to_call._callee_entry_point: " INTPTR_FORMAT, (uintptr_t) this->_result._to_call._callee_entry_point);
tty->print_cr("result_to_call._bcp_advance: %d ", this->_result._to_call._bcp_advance);
tty->print_cr("osr._osr_buf: " INTPTR_FORMAT, (uintptr_t) this->_result._osr._osr_buf);
tty->print_cr("osr._osr_entry: " INTPTR_FORMAT, (uintptr_t) this->_result._osr._osr_entry);
tty->print_cr("prev_link: " INTPTR_FORMAT, (uintptr_t) this->_prev_link);
tty->print_cr("native_mirror: " INTPTR_FORMAT, (uintptr_t) p2i(this->_oop_temp));
tty->print_cr("stack_base: " INTPTR_FORMAT, (uintptr_t) this->_stack_base);
tty->print_cr("stack_limit: " INTPTR_FORMAT, (uintptr_t) this->_stack_limit);
tty->print_cr("monitor_base: " INTPTR_FORMAT, (uintptr_t) this->_monitor_base);
#ifdef SPARC
tty->print_cr("last_Java_pc: " INTPTR_FORMAT, (uintptr_t) this->_last_Java_pc);
tty->print_cr("frame_bottom: " INTPTR_FORMAT, (uintptr_t) this->_frame_bottom);
tty->print_cr("&native_fresult: " INTPTR_FORMAT, (uintptr_t) &this->_native_fresult);
tty->print_cr("native_lresult: " INTPTR_FORMAT, (uintptr_t) this->_native_lresult);
#endif
#if !defined(ZERO)
tty->print_cr("last_Java_fp: " INTPTR_FORMAT, (uintptr_t) this->_last_Java_fp);
#endif // !ZERO
tty->print_cr("self_link: " INTPTR_FORMAT, (uintptr_t) this->_self_link);
}
extern "C" {
void PI(uintptr_t arg) {
((BytecodeInterpreter*)arg)->print();
}
}
#endif // PRODUCT
#endif // JVMTI
#endif // CC_INTERP
C:\hotspot-69087d08d473\src\share\vm/interpreter/bytecodeInterpreter.hpp
#ifndef SHARE_VM_INTERPRETER_BYTECODEINTERPRETER_HPP
#define SHARE_VM_INTERPRETER_BYTECODEINTERPRETER_HPP
#include "memory/allocation.hpp"
#include "oops/methodData.hpp"
#include "oops/method.hpp"
#include "runtime/basicLock.hpp"
#include "runtime/frame.hpp"
#include "runtime/globals.hpp"
#include "utilities/globalDefinitions.hpp"
#ifdef TARGET_ARCH_x86
# include "bytes_x86.hpp"
#endif
#ifdef TARGET_ARCH_aarch64
# include "bytes_aarch64.hpp"
#endif
#ifdef TARGET_ARCH_sparc
# include "bytes_sparc.hpp"
#endif
#ifdef TARGET_ARCH_zero
# include "bytes_zero.hpp"
#endif
#ifdef TARGET_ARCH_arm
# include "bytes_arm.hpp"
#endif
#ifdef TARGET_ARCH_ppc
# include "bytes_ppc.hpp"
#endif
#ifdef CC_INTERP
#define MORE_STACK(count) \
(topOfStack -= ((count) * Interpreter::stackElementWords))
union VMJavaVal64 {
jlong l;
jdouble d;
uint32_t v[2];
};
typedef class BytecodeInterpreter* interpreterState;
struct call_message {
class Method* _callee; // method to call during call_method request
address _callee_entry_point; // address to jump to for call_method request
int _bcp_advance; // size of the invoke bytecode operation
};
struct osr_message {
address _osr_buf; // the osr buffer
address _osr_entry; // the entry to the osr method
};
struct osr_result {
nmethod* nm; // osr nmethod
address return_addr; // osr blob return address
};
union frame_manager_message {
call_message _to_call; // describes callee
osr_message _osr; // describes the osr
osr_result _osr_result; // result of OSR request
};
class BytecodeInterpreter : StackObj {
friend class SharedRuntime;
friend class AbstractInterpreterGenerator;
friend class CppInterpreterGenerator;
friend class InterpreterGenerator;
friend class InterpreterMacroAssembler;
friend class frame;
friend class VMStructs;
public:
enum messages {
no_request = 0, // unused
initialize, // Perform one time interpreter initializations (assumes all switches set)
method_entry, // initial method entry to interpreter
method_resume, // frame manager response to return_from_method request (assuming a frame to resume)
deopt_resume, // returning from a native call into a deopted frame
deopt_resume2, // deopt resume as a result of a PopFrame
got_monitors, // frame manager response to more_monitors request
rethrow_exception, // unwinding and throwing exception
call_method, // request for new frame from interpreter, manager responds with method_entry
return_from_method, // request from interpreter to unwind, manager responds with method_continue
more_monitors, // need a new monitor
throwing_exception, // unwind stack and rethrow
popping_frame, // unwind call and retry call
do_osr, // request this invocation be OSR's
early_return // early return as commanded by jvmti
};
private:
JavaThread* _thread; // the vm's java thread pointer
address _bcp; // instruction pointer
intptr_t* _locals; // local variable pointer
ConstantPoolCache* _constants; // constant pool cache
Method* _method; // method being executed
DataLayout* _mdx; // compiler profiling data for current bytecode
intptr_t* _stack; // expression stack
messages _msg; // frame manager <-> interpreter message
frame_manager_message _result; // result to frame manager
interpreterState _prev_link; // previous interpreter state
oop _oop_temp; // mirror for interpreted native, null otherwise
intptr_t* _stack_base; // base of expression stack
intptr_t* _stack_limit; // limit of expression stack
BasicObjectLock* _monitor_base; // base of monitors on the native stack
public:
BytecodeInterpreter(messages msg);
static void layout_interpreterState(interpreterState to_fill,
frame* caller,
frame* interpreter_frame,
Method* method,
intptr_t* locals,
intptr_t* stack,
intptr_t* stack_base,
intptr_t* monitor_base,
intptr_t* frame_bottom,
bool top_frame);
union VMJavaVal32 {
jint i;
jfloat f;
class oopDesc* r;
uint32_t raw;
};
union VMJavaVal64 {
jlong l;
jdouble d;
uint32_t v[2];
};
typedef union VMSlotVal32 {
VMJavaVal32 j; /* For "Java" values */
address a; /* a return created by jsr or jsr_w */
} VMSlotVal32;
union VMStackVal32 {
VMJavaVal32 j; /* For "Java" values */
VMSlotVal32 s; /* any value from a "slot" or locals[] */
};
inline JavaThread* thread() { return _thread; }
inline address bcp() { return _bcp; }
inline void set_bcp(address new_bcp) { _bcp = new_bcp; }
inline intptr_t* locals() { return _locals; }
inline ConstantPoolCache* constants() { return _constants; }
inline Method* method() { return _method; }
inline DataLayout* mdx() { return _mdx; }
inline void set_mdx(DataLayout *new_mdx) { _mdx = new_mdx; }
inline messages msg() { return _msg; }
inline void set_msg(messages new_msg) { _msg = new_msg; }
inline Method* callee() { return _result._to_call._callee; }
inline void set_callee(Method* new_callee) { _result._to_call._callee = new_callee; }
inline void set_callee_entry_point(address entry) { _result._to_call._callee_entry_point = entry; }
inline void set_osr_buf(address buf) { _result._osr._osr_buf = buf; }
inline void set_osr_entry(address entry) { _result._osr._osr_entry = entry; }
inline int bcp_advance() { return _result._to_call._bcp_advance; }
inline void set_bcp_advance(int count) { _result._to_call._bcp_advance = count; }
inline interpreterState prev() { return _prev_link; }
inline intptr_t* stack() { return _stack; }
inline void set_stack(intptr_t* new_stack) { _stack = new_stack; }
inline intptr_t* stack_base() { return _stack_base; }
inline intptr_t* stack_limit() { return _stack_limit; }
inline BasicObjectLock* monitor_base() { return _monitor_base; }
static jlong VMlongAdd(jlong op1, jlong op2);
static jlong VMlongAnd(jlong op1, jlong op2);
static jlong VMlongDiv(jlong op1, jlong op2);
static jlong VMlongMul(jlong op1, jlong op2);
static jlong VMlongOr (jlong op1, jlong op2);
static jlong VMlongSub(jlong op1, jlong op2);
static jlong VMlongXor(jlong op1, jlong op2);
static jlong VMlongRem(jlong op1, jlong op2);
static jlong VMlongUshr(jlong op1, jint op2);
static jlong VMlongShl (jlong op1, jint op2);
static jlong VMlongShr (jlong op1, jint op2);
static jlong VMlongNeg(jlong op);
static jlong VMlongNot(jlong op);
static int32_t VMlongLtz(jlong op); /* op <= 0 */
static int32_t VMlongGez(jlong op); /* op >= 0 */
static int32_t VMlongEqz(jlong op); /* op == 0 */
static int32_t VMlongEq(jlong op1, jlong op2); /* op1 == op2 */
static int32_t VMlongNe(jlong op1, jlong op2); /* op1 != op2 */
static int32_t VMlongGe(jlong op1, jlong op2); /* op1 >= op2 */
static int32_t VMlongLe(jlong op1, jlong op2); /* op1 <= op2 */
static int32_t VMlongLt(jlong op1, jlong op2); /* op1 < op2 */
static int32_t VMlongGt(jlong op1, jlong op2); /* op1 > op2 */
static int32_t VMlongCompare(jlong op1, jlong op2);
static jlong VMint2Long(jint val);
static jint VMlong2Int(jlong val);
static jfloat VMlong2Float(jlong val);
static jdouble VMlong2Double(jlong val);
static jfloat VMfloatAdd(jfloat op1, jfloat op2);
static jfloat VMfloatSub(jfloat op1, jfloat op2);
static jfloat VMfloatMul(jfloat op1, jfloat op2);
static jfloat VMfloatDiv(jfloat op1, jfloat op2);
static jfloat VMfloatRem(jfloat op1, jfloat op2);
static jfloat VMfloatNeg(jfloat op);
static int32_t VMfloatCompare(jfloat op1, jfloat op2,
int32_t direction);
static jdouble VMfloat2Double(jfloat op);
static jint VMdouble2Int(jdouble val);
static jfloat VMdouble2Float(jdouble val);
static jdouble VMint2Double(jint val);
static jdouble VMdoubleAdd(jdouble op1, jdouble op2);
static jdouble VMdoubleSub(jdouble op1, jdouble op2);
static jdouble VMdoubleDiv(jdouble op1, jdouble op2);
static jdouble VMdoubleMul(jdouble op1, jdouble op2);
static jdouble VMdoubleRem(jdouble op1, jdouble op2);
static jdouble VMdoubleNeg(jdouble op);
static int32_t VMdoubleCompare(jdouble op1, jdouble op2, int32_t direction);
static void VMmemCopy64(uint32_t to[2], const uint32_t from[2]);
static jint VMintAdd(jint op1, jint op2);
static jint VMintSub(jint op1, jint op2);
static jint VMintMul(jint op1, jint op2);
static jint VMintDiv(jint op1, jint op2);
static jint VMintRem(jint op1, jint op2);
static jint VMintAnd(jint op1, jint op2);
static jint VMintOr (jint op1, jint op2);
static jint VMintXor(jint op1, jint op2);
static juint VMintUshr(jint op, jint num);
static jint VMintShl (jint op, jint num);
static jint VMintShr (jint op, jint num);
static jint VMintNeg(jint op);
static jfloat VMint2Float(jint val);
static jbyte VMint2Byte(jint val);
static jchar VMint2Char(jint val);
static jshort VMint2Short(jint val);
static void dup(intptr_t *tos);
static void dup2(intptr_t *tos);
static void dup_x1(intptr_t *tos); /* insert top word two down */
static void dup_x2(intptr_t *tos); /* insert top word three down */
static void dup2_x1(intptr_t *tos); /* insert top 2 slots three down */
static void dup2_x2(intptr_t *tos); /* insert top 2 slots four down */
static void swap(intptr_t *tos); /* swap top two elements */
static void run(interpreterState istate);
static void runWithChecks(interpreterState istate);
static void End_Of_Interpreter(void);
static address stack_slot(intptr_t *tos, int offset);
static jint stack_int(intptr_t *tos, int offset);
static jfloat stack_float(intptr_t *tos, int offset);
static oop stack_object(intptr_t *tos, int offset);
static jdouble stack_double(intptr_t *tos, int offset);
static jlong stack_long(intptr_t *tos, int offset);
static void set_stack_slot(intptr_t *tos, address value, int offset);
static void set_stack_int(intptr_t *tos, int value, int offset);
static void set_stack_float(intptr_t *tos, jfloat value, int offset);
static void set_stack_object(intptr_t *tos, oop value, int offset);
static void set_stack_double(intptr_t *tos, jdouble value, int offset);
static void set_stack_long(intptr_t *tos, jlong value, int offset);
static void set_stack_double_from_addr(intptr_t *tos, address addr, int offset);
static void set_stack_long_from_addr(intptr_t *tos, address addr, int offset);
static address locals_slot(intptr_t* locals, int offset);
static jint locals_int(intptr_t* locals, int offset);
static jfloat locals_float(intptr_t* locals, int offset);
static oop locals_object(intptr_t* locals, int offset);
static jdouble locals_double(intptr_t* locals, int offset);
static jlong locals_long(intptr_t* locals, int offset);
static address locals_long_at(intptr_t* locals, int offset);
static address locals_double_at(intptr_t* locals, int offset);
static void set_locals_slot(intptr_t *locals, address value, int offset);
static void set_locals_int(intptr_t *locals, jint value, int offset);
static void set_locals_float(intptr_t *locals, jfloat value, int offset);
static void set_locals_object(intptr_t *locals, oop value, int offset);
static void set_locals_double(intptr_t *locals, jdouble value, int offset);
static void set_locals_long(intptr_t *locals, jlong value, int offset);
static void set_locals_double_from_addr(intptr_t *locals,
address addr, int offset);
static void set_locals_long_from_addr(intptr_t *locals,
address addr, int offset);
static void astore(intptr_t* topOfStack, int stack_offset,
intptr_t* locals, int locals_offset);
static void copy_stack_slot(intptr_t *tos, int from_offset, int to_offset);
#ifndef PRODUCT
static const char* C_msg(BytecodeInterpreter::messages msg);
void print();
#endif // PRODUCT
#ifdef TARGET_ARCH_x86
# include "bytecodeInterpreter_x86.hpp"
#endif
#ifdef TARGET_ARCH_aarch64
# include "bytecodeInterpreter_aarch64.hpp"
#endif
#ifdef TARGET_ARCH_sparc
# include "bytecodeInterpreter_sparc.hpp"
#endif
#ifdef TARGET_ARCH_zero
# include "bytecodeInterpreter_zero.hpp"
#endif
#ifdef TARGET_ARCH_arm
# include "bytecodeInterpreter_arm.hpp"
#endif
#ifdef TARGET_ARCH_ppc
# include "bytecodeInterpreter_ppc.hpp"
#endif
}; // BytecodeInterpreter
#endif // CC_INTERP
#endif // SHARE_VM_INTERPRETER_BYTECODEINTERPRETER_HPP
C:\hotspot-69087d08d473\src\share\vm/interpreter/bytecodeInterpreter.inline.hpp
#ifndef SHARE_VM_INTERPRETER_BYTECODEINTERPRETER_INLINE_HPP
#define SHARE_VM_INTERPRETER_BYTECODEINTERPRETER_INLINE_HPP
#include "interpreter/bytecodeInterpreter.hpp"
#include "runtime/stubRoutines.hpp"
#ifdef CC_INTERP
#ifdef ASSERT
#define VERIFY_OOP(o_) \
if (VerifyOops) { \
assert((oop(o_))->is_oop_or_null(), "Not an oop!"); \
StubRoutines::_verify_oop_count++; \
}
#else
#define VERIFY_OOP(o)
#endif
#ifdef TARGET_ARCH_x86
# include "bytecodeInterpreter_x86.inline.hpp"
#endif
#ifdef TARGET_ARCH_aarch64
# include "bytecodeInterpreter_aarch64.inline.hpp"
#endif
#ifdef TARGET_ARCH_sparc
# include "bytecodeInterpreter_sparc.inline.hpp"
#endif
#ifdef TARGET_ARCH_zero
# include "bytecodeInterpreter_zero.inline.hpp"
#endif
#ifdef TARGET_ARCH_arm
# include "bytecodeInterpreter_arm.inline.hpp"
#endif
#ifdef TARGET_ARCH_ppc
# include "bytecodeInterpreter_ppc.inline.hpp"
#endif
#endif // CC_INTERP
#endif // SHARE_VM_INTERPRETER_BYTECODEINTERPRETER_INLINE_HPP
C:\hotspot-69087d08d473\src\share\vm/interpreter/bytecodeInterpreterProfiling.hpp
#ifndef SHARE_VM_INTERPRETER_BYTECODEINTERPRETERPROFILING_HPP
#define SHARE_VM_INTERPRETER_BYTECODEINTERPRETERPROFILING_HPP
#if defined(COMPILER2)
#define CC_INTERP_PROFILE
#endif
#ifdef CC_INTERP
#ifndef CC_INTERP_PROFILE
#define SET_MDX(mdx)
#define BI_PROFILE_GET_OR_CREATE_METHOD_DATA(exception_handler) \
if (ProfileInterpreter) { \
ShouldNotReachHere(); \
}
#define BI_PROFILE_ALIGN_TO_CURRENT_BCI()
#define BI_PROFILE_UPDATE_JUMP()
#define BI_PROFILE_UPDATE_BRANCH(is_taken)
#define BI_PROFILE_UPDATE_RET(bci)
#define BI_PROFILE_SUBTYPECHECK_FAILED(receiver)
#define BI_PROFILE_UPDATE_CHECKCAST(null_seen, receiver)
#define BI_PROFILE_UPDATE_INSTANCEOF(null_seen, receiver)
#define BI_PROFILE_UPDATE_CALL()
#define BI_PROFILE_UPDATE_FINALCALL()
#define BI_PROFILE_UPDATE_VIRTUALCALL(receiver)
#define BI_PROFILE_UPDATE_SWITCH(switch_index)
#else
#define MDX() (istate->mdx())
#define SET_MDX(mdx) \
if (TraceProfileInterpreter) { \
tty->print_cr("[%d] %4d " \
"mdx " PTR_FORMAT "(%d)" \
" " \
" \t-> " PTR_FORMAT "(%d)", \
(int) THREAD->osthread()->thread_id(), \
BCI(), \
p2i(MDX()), \
(MDX() == NULL \
? 0 \
: istate->method()->method_data()->dp_to_di((address)MDX())), \
p2i(mdx), \
istate->method()->method_data()->dp_to_di((address)mdx) \
); \
}; \
istate->set_mdx(mdx);
#ifdef PRODUCT
#define BI_PROFILE_PRINT_METHOD_DATA()
#else // PRODUCT
#define BI_PROFILE_PRINT_METHOD_DATA() \
{ \
ttyLocker ttyl; \
MethodData *md = istate->method()->method_data(); \
tty->cr(); \
tty->print("method data at mdx " PTR_FORMAT "(0) for", \
p2i(md->data_layout_at(md->bci_to_di(0)))); \
istate->method()->print_short_name(tty); \
tty->cr(); \
if (md != NULL) { \
md->print_data_on(tty); \
address mdx = (address) MDX(); \
if (mdx != NULL) { \
tty->print_cr("current mdx " PTR_FORMAT "(%d)", \
p2i(mdx), \
istate->method()->method_data()->dp_to_di(mdx)); \
} \
} else { \
tty->print_cr("no method data"); \
} \
}
#endif // PRODUCT
#define BI_PROFILE_GET_OR_CREATE_METHOD_DATA(exception_handler) \
if (ProfileInterpreter && MDX() == NULL) { \
MethodData *md = istate->method()->method_data(); \
if (md == NULL) { \
MethodCounters* mcs; \
GET_METHOD_COUNTERS(mcs); \
if (mcs->invocation_counter() \
->reached_ProfileLimit(mcs->backedge_counter())) { \
CALL_VM((InterpreterRuntime::profile_method(THREAD)), \
exception_handler); \
md = istate->method()->method_data(); \
if (md != NULL) { \
if (TraceProfileInterpreter) { \
BI_PROFILE_PRINT_METHOD_DATA(); \
} \
Method *m = istate->method(); \
int bci = m->bci_from(pc); \
jint di = md->bci_to_di(bci); \
SET_MDX(md->data_layout_at(di)); \
} \
} \
} else { \
if (TraceProfileInterpreter) { \
BI_PROFILE_PRINT_METHOD_DATA(); \
} \
SET_MDX(md->data_layout_at(md->bci_to_di(BCI()))); \
} \
}
#if defined(CC_INTERP_PROFILE_WITH_ASSERTIONS)
#define BI_PROFILE_CHECK_MDX() \
{ \
MethodData *md = istate->method()->method_data(); \
address mdx = (address) MDX(); \
address mdx2 = (address) md->data_layout_at(md->bci_to_di(BCI())); \
guarantee(md != NULL, "1"); \
guarantee(mdx != NULL, "2"); \
guarantee(mdx2 != NULL, "3"); \
if (mdx != mdx2) { \
BI_PROFILE_PRINT_METHOD_DATA(); \
fatal3("invalid mdx at bci %d:" \
" was " PTR_FORMAT \
" but expected " PTR_FORMAT, \
BCI(), \
mdx, \
mdx2); \
} \
}
#else
#define BI_PROFILE_CHECK_MDX()
#endif
#define BI_PROFILE_ALIGN_TO_CURRENT_BCI() \
if (ProfileInterpreter && MDX() != NULL) { \
MethodData *md = istate->method()->method_data(); \
SET_MDX(md->data_layout_at(md->bci_to_di(BCI()))); \
}
#define BI_PROFILE_UPDATE_JUMP() \
if (ProfileInterpreter && MDX() != NULL) { \
BI_PROFILE_CHECK_MDX(); \
JumpData::increment_taken_count_no_overflow(MDX()); \
mdo_last_branch_taken_count = JumpData::taken_count(MDX()); \
SET_MDX(JumpData::advance_taken(MDX())); \
}
#define BI_PROFILE_UPDATE_BRANCH(is_taken) \
if (ProfileInterpreter && MDX() != NULL) { \
BI_PROFILE_CHECK_MDX(); \
if (is_taken) { \
BranchData::increment_taken_count_no_overflow(MDX()); \
mdo_last_branch_taken_count = BranchData::taken_count(MDX()); \
SET_MDX(BranchData::advance_taken(MDX())); \
} else { \
BranchData::increment_not_taken_count_no_overflow(MDX()); \
SET_MDX(BranchData::advance_not_taken(MDX())); \
} \
}
#define BI_PROFILE_UPDATE_RET(bci) \
if (ProfileInterpreter && MDX() != NULL) { \
BI_PROFILE_CHECK_MDX(); \
MethodData *md = istate->method()->method_data(); \
CounterData::increment_count_no_overflow(MDX()); \
SET_MDX(RetData::advance(md, bci)); \
}
#define BI_PROFILE_SUBTYPECHECK_FAILED(receiver) \
if (ProfileInterpreter && MDX() != NULL) { \
BI_PROFILE_CHECK_MDX(); \
ReceiverTypeData::increment_receiver_count_no_overflow(MDX(), receiver); \
ReceiverTypeData::decrement_count(MDX()); \
}
#define BI_PROFILE_UPDATE_CHECKCAST(null_seen, receiver) \
if (ProfileInterpreter && MDX() != NULL) { \
BI_PROFILE_CHECK_MDX(); \
if (null_seen) { \
ReceiverTypeData::set_null_seen(MDX()); \
} else { \
ReceiverTypeData::increment_receiver_count_no_overflow(MDX(), receiver); \
} \
SET_MDX(ReceiverTypeData::advance(MDX())); \
}
#define BI_PROFILE_UPDATE_INSTANCEOF(null_seen, receiver) \
BI_PROFILE_UPDATE_CHECKCAST(null_seen, receiver)
#define BI_PROFILE_UPDATE_CALL() \
if (ProfileInterpreter && MDX() != NULL) { \
BI_PROFILE_CHECK_MDX(); \
CounterData::increment_count_no_overflow(MDX()); \
SET_MDX(CounterData::advance(MDX())); \
}
#define BI_PROFILE_UPDATE_FINALCALL() \
if (ProfileInterpreter && MDX() != NULL) { \
BI_PROFILE_CHECK_MDX(); \
VirtualCallData::increment_count_no_overflow(MDX()); \
SET_MDX(VirtualCallData::advance(MDX())); \
}
#define BI_PROFILE_UPDATE_VIRTUALCALL(receiver) \
if (ProfileInterpreter && MDX() != NULL) { \
BI_PROFILE_CHECK_MDX(); \
VirtualCallData::increment_receiver_count_no_overflow(MDX(), receiver); \
SET_MDX(VirtualCallData::advance(MDX())); \
}
#define BI_PROFILE_UPDATE_SWITCH(switch_index) \
if (ProfileInterpreter && MDX() != NULL) { \
BI_PROFILE_CHECK_MDX(); \
MultiBranchData::increment_count_no_overflow(MDX(), switch_index); \
SET_MDX(MultiBranchData::advance(MDX(), switch_index)); \
}
#endif // CC_INTERP_PROFILE
#endif // CC_INTERP
#endif // SHARE_VM_INTERPRETER_BYTECODECINTERPRETERPROFILING_HPP
C:\hotspot-69087d08d473\src\share\vm/interpreter/bytecodes.cpp
#include "precompiled.hpp"
#include "interpreter/bytecodes.hpp"
#include "memory/resourceArea.hpp"
#include "oops/method.hpp"
#ifdef TARGET_ARCH_x86
# include "bytes_x86.hpp"
#endif
#ifdef TARGET_ARCH_aarch64
# include "bytes_aarch64.hpp"
#endif
#ifdef TARGET_ARCH_sparc
# include "bytes_sparc.hpp"
#endif
#ifdef TARGET_ARCH_zero
# include "bytes_zero.hpp"
#endif
#ifdef TARGET_ARCH_arm
# include "bytes_arm.hpp"
#endif
#ifdef TARGET_ARCH_ppc
# include "bytes_ppc.hpp"
#endif
#if defined(WIN32) && (defined(_MSC_VER) && (_MSC_VER < 1600))
#ifdef _M_AMD64
#pragma optimize ("", off)
#endif
#endif
bool Bytecodes::_is_initialized = false;
const char* Bytecodes::_name [Bytecodes::number_of_codes];
BasicType Bytecodes::_result_type [Bytecodes::number_of_codes];
s_char Bytecodes::_depth [Bytecodes::number_of_codes];
u_char Bytecodes::_lengths [Bytecodes::number_of_codes];
Bytecodes::Code Bytecodes::_java_code [Bytecodes::number_of_codes];
u_short Bytecodes::_flags [(1<<BitsPerByte)*2];
#ifdef ASSERT
bool Bytecodes::check_method(const Method* method, address bcp) {
return method->contains(bcp);
}
#endif
bool Bytecodes::check_must_rewrite(Bytecodes::Code code) {
assert(can_rewrite(code), "post-check only");
switch (code) {
case Bytecodes::_aload_0:
return false;
case Bytecodes::_lookupswitch:
return false; // the rewrite is not done by the interpreter
case Bytecodes::_new:
return false; // the rewrite is not always done
}
return true;
}
Bytecodes::Code Bytecodes::code_at(Method* method, int bci) {
return code_at(method, method->bcp_from(bci));
}
Bytecodes::Code Bytecodes::non_breakpoint_code_at(const Method* method, address bcp) {
assert(method != NULL, "must have the method for breakpoint conversion");
assert(method->contains(bcp), "must be valid bcp in method");
return method->orig_bytecode_at(method->bci_from(bcp));
}
int Bytecodes::special_length_at(Bytecodes::Code code, address bcp, address end) {
switch (code) {
case _wide:
if (end != NULL && bcp + 1 >= end) {
return -1; // don't read past end of code buffer
}
return wide_length_for(cast(*(bcp + 1)));
case _tableswitch:
{ address aligned_bcp = (address)round_to((intptr_t)bcp + 1, jintSize);
if (end != NULL && aligned_bcp + 3*jintSize >= end) {
return -1; // don't read past end of code buffer
}
jlong lo = (jint)Bytes::get_Java_u4(aligned_bcp + 1*jintSize);
jlong hi = (jint)Bytes::get_Java_u4(aligned_bcp + 2*jintSize);
jlong len = (aligned_bcp - bcp) + (3 + hi - lo + 1)*jintSize;
return (len > 0 && len == (int)len) ? len : -1;
}
case _lookupswitch: // fall through
case _fast_binaryswitch: // fall through
case _fast_linearswitch:
{ address aligned_bcp = (address)round_to((intptr_t)bcp + 1, jintSize);
if (end != NULL && aligned_bcp + 2*jintSize >= end) {
return -1; // don't read past end of code buffer
}
jlong npairs = (jint)Bytes::get_Java_u4(aligned_bcp + jintSize);
jlong len = (aligned_bcp - bcp) + (2 + 2*npairs)*jintSize;
return (len > 0 && len == (int)len) ? len : -1;
}
}
return 0;
}
int Bytecodes::raw_special_length_at(address bcp, address end) {
Code code = code_or_bp_at(bcp);
if (code == _breakpoint) {
return 1;
} else {
return special_length_at(code, bcp, end);
}
}
void Bytecodes::def(Code code, const char* name, const char* format, const char* wide_format, BasicType result_type, int depth, bool can_trap) {
def(code, name, format, wide_format, result_type, depth, can_trap, code);
}
void Bytecodes::def(Code code, const char* name, const char* format, const char* wide_format, BasicType result_type, int depth, bool can_trap, Code java_code) {
assert(wide_format == NULL || format != NULL, "short form must exist if there's a wide form");
int len = (format != NULL ? (int) strlen(format) : 0);
int wlen = (wide_format != NULL ? (int) strlen(wide_format) : 0);
_name [code] = name;
_result_type [code] = result_type;
_depth [code] = depth;
_lengths [code] = (wlen << 4) | (len & 0xF);
_java_code [code] = java_code;
int bc_flags = 0;
if (can_trap) bc_flags |= _bc_can_trap;
if (java_code != code) bc_flags |= _bc_can_rewrite;
_flags[(u1)code+0*(1<<BitsPerByte)] = compute_flags(format, bc_flags);
_flags[(u1)code+1*(1<<BitsPerByte)] = compute_flags(wide_format, bc_flags);
assert(is_defined(code) == (format != NULL), "");
assert(wide_is_defined(code) == (wide_format != NULL), "");
assert(length_for(code) == len, "");
assert(wide_length_for(code) == wlen, "");
}
int Bytecodes::compute_flags(const char* format, int more_flags) {
if (format == NULL) return 0; // not even more_flags
int flags = more_flags;
const char* fp = format;
switch (*fp) {
case '\0':
flags |= _fmt_not_simple; // but variable
break;
case 'b':
flags |= _fmt_not_variable; // but simple
++fp; // skip 'b'
break;
case 'w':
flags |= _fmt_not_variable | _fmt_not_simple;
++fp; // skip 'w'
guarantee(*fp == 'b', "wide format must start with 'wb'");
++fp; // skip 'b'
break;
}
int has_nbo = 0, has_jbo = 0, has_size = 0;
for (;;) {
int this_flag = 0;
char fc = *fp++;
switch (fc) {
case '\0': // end of string
assert(flags == (jchar)flags, "change _format_flags");
return flags;
case '_': continue; // ignore these
case 'j': this_flag = _fmt_has_j; has_jbo = 1; break;
case 'k': this_flag = _fmt_has_k; has_jbo = 1; break;
case 'i': this_flag = _fmt_has_i; has_jbo = 1; break;
case 'c': this_flag = _fmt_has_c; has_jbo = 1; break;
case 'o': this_flag = _fmt_has_o; has_jbo = 1; break;
case 'J': this_flag = _fmt_has_j; has_nbo = 1; break;
case 'K': this_flag = _fmt_has_k; has_nbo = 1; break;
case 'I': this_flag = _fmt_has_i; has_nbo = 1; break;
case 'C': this_flag = _fmt_has_c; has_nbo = 1; break;
case 'O': this_flag = _fmt_has_o; has_nbo = 1; break;
default: guarantee(false, "bad char in format");
}
flags |= this_flag;
guarantee(!(has_jbo && has_nbo), "mixed byte orders in format");
if (has_nbo)
flags |= _fmt_has_nbo;
int this_size = 1;
if (*fp == fc) {
this_size = 2;
while (*++fp == fc) this_size++;
switch (this_size) {
case 2: flags |= _fmt_has_u2; break;
case 4: flags |= _fmt_has_u4; break;
default: guarantee(false, "bad rep count in format");
}
}
guarantee(has_size == 0 || // no field yet
this_size == has_size || // same size
this_size < has_size && *fp == '\0', // last field can be short
"mixed field sizes in format");
has_size = this_size;
}
}
void Bytecodes::initialize() {
if (_is_initialized) return;
assert(number_of_codes <= 256, "too many bytecodes");
def(_nop , "nop" , "b" , NULL , T_VOID , 0, false);
def(_aconst_null , "aconst_null" , "b" , NULL , T_OBJECT , 1, false);
def(_iconst_m1 , "iconst_m1" , "b" , NULL , T_INT , 1, false);
def(_iconst_0 , "iconst_0" , "b" , NULL , T_INT , 1, false);
def(_iconst_1 , "iconst_1" , "b" , NULL , T_INT , 1, false);
def(_iconst_2 , "iconst_2" , "b" , NULL , T_INT , 1, false);
def(_iconst_3 , "iconst_3" , "b" , NULL , T_INT , 1, false);
def(_iconst_4 , "iconst_4" , "b" , NULL , T_INT , 1, false);
def(_iconst_5 , "iconst_5" , "b" , NULL , T_INT , 1, false);
def(_lconst_0 , "lconst_0" , "b" , NULL , T_LONG , 2, false);
def(_lconst_1 , "lconst_1" , "b" , NULL , T_LONG , 2, false);
def(_fconst_0 , "fconst_0" , "b" , NULL , T_FLOAT , 1, false);
def(_fconst_1 , "fconst_1" , "b" , NULL , T_FLOAT , 1, false);
def(_fconst_2 , "fconst_2" , "b" , NULL , T_FLOAT , 1, false);
def(_dconst_0 , "dconst_0" , "b" , NULL , T_DOUBLE , 2, false);
def(_dconst_1 , "dconst_1" , "b" , NULL , T_DOUBLE , 2, false);
def(_bipush , "bipush" , "bc" , NULL , T_INT , 1, false);
def(_sipush , "sipush" , "bcc" , NULL , T_INT , 1, false);
def(_ldc , "ldc" , "bk" , NULL , T_ILLEGAL, 1, true );
def(_ldc_w , "ldc_w" , "bkk" , NULL , T_ILLEGAL, 1, true );
def(_ldc2_w , "ldc2_w" , "bkk" , NULL , T_ILLEGAL, 2, true );
def(_iload , "iload" , "bi" , "wbii" , T_INT , 1, false);
def(_lload , "lload" , "bi" , "wbii" , T_LONG , 2, false);
def(_fload , "fload" , "bi" , "wbii" , T_FLOAT , 1, false);
def(_dload , "dload" , "bi" , "wbii" , T_DOUBLE , 2, false);
def(_aload , "aload" , "bi" , "wbii" , T_OBJECT , 1, false);
def(_iload_0 , "iload_0" , "b" , NULL , T_INT , 1, false);
def(_iload_1 , "iload_1" , "b" , NULL , T_INT , 1, false);
def(_iload_2 , "iload_2" , "b" , NULL , T_INT , 1, false);
def(_iload_3 , "iload_3" , "b" , NULL , T_INT , 1, false);
def(_lload_0 , "lload_0" , "b" , NULL , T_LONG , 2, false);
def(_lload_1 , "lload_1" , "b" , NULL , T_LONG , 2, false);
def(_lload_2 , "lload_2" , "b" , NULL , T_LONG , 2, false);
def(_lload_3 , "lload_3" , "b" , NULL , T_LONG , 2, false);
def(_fload_0 , "fload_0" , "b" , NULL , T_FLOAT , 1, false);
def(_fload_1 , "fload_1" , "b" , NULL , T_FLOAT , 1, false);
def(_fload_2 , "fload_2" , "b" , NULL , T_FLOAT , 1, false);
def(_fload_3 , "fload_3" , "b" , NULL , T_FLOAT , 1, false);
def(_dload_0 , "dload_0" , "b" , NULL , T_DOUBLE , 2, false);
def(_dload_1 , "dload_1" , "b" , NULL , T_DOUBLE , 2, false);
def(_dload_2 , "dload_2" , "b" , NULL , T_DOUBLE , 2, false);
def(_dload_3 , "dload_3" , "b" , NULL , T_DOUBLE , 2, false);
def(_aload_0 , "aload_0" , "b" , NULL , T_OBJECT , 1, true ); // rewriting in interpreter
def(_aload_1 , "aload_1" , "b" , NULL , T_OBJECT , 1, false);
def(_aload_2 , "aload_2" , "b" , NULL , T_OBJECT , 1, false);
def(_aload_3 , "aload_3" , "b" , NULL , T_OBJECT , 1, false);
def(_iaload , "iaload" , "b" , NULL , T_INT , -1, true );
def(_laload , "laload" , "b" , NULL , T_LONG , 0, true );
def(_faload , "faload" , "b" , NULL , T_FLOAT , -1, true );
def(_daload , "daload" , "b" , NULL , T_DOUBLE , 0, true );
def(_aaload , "aaload" , "b" , NULL , T_OBJECT , -1, true );
def(_baload , "baload" , "b" , NULL , T_INT , -1, true );
def(_caload , "caload" , "b" , NULL , T_INT , -1, true );
def(_saload , "saload" , "b" , NULL , T_INT , -1, true );
def(_istore , "istore" , "bi" , "wbii" , T_VOID , -1, false);
def(_lstore , "lstore" , "bi" , "wbii" , T_VOID , -2, false);
def(_fstore , "fstore" , "bi" , "wbii" , T_VOID , -1, false);
def(_dstore , "dstore" , "bi" , "wbii" , T_VOID , -2, false);
def(_astore , "astore" , "bi" , "wbii" , T_VOID , -1, false);
def(_istore_0 , "istore_0" , "b" , NULL , T_VOID , -1, false);
def(_istore_1 , "istore_1" , "b" , NULL , T_VOID , -1, false);
def(_istore_2 , "istore_2" , "b" , NULL , T_VOID , -1, false);
def(_istore_3 , "istore_3" , "b" , NULL , T_VOID , -1, false);
def(_lstore_0 , "lstore_0" , "b" , NULL , T_VOID , -2, false);
def(_lstore_1 , "lstore_1" , "b" , NULL , T_VOID , -2, false);
def(_lstore_2 , "lstore_2" , "b" , NULL , T_VOID , -2, false);
def(_lstore_3 , "lstore_3" , "b" , NULL , T_VOID , -2, false);
def(_fstore_0 , "fstore_0" , "b" , NULL , T_VOID , -1, false);
def(_fstore_1 , "fstore_1" , "b" , NULL , T_VOID , -1, false);
def(_fstore_2 , "fstore_2" , "b" , NULL , T_VOID , -1, false);
def(_fstore_3 , "fstore_3" , "b" , NULL , T_VOID , -1, false);
def(_dstore_0 , "dstore_0" , "b" , NULL , T_VOID , -2, false);
def(_dstore_1 , "dstore_1" , "b" , NULL , T_VOID , -2, false);
def(_dstore_2 , "dstore_2" , "b" , NULL , T_VOID , -2, false);
def(_dstore_3 , "dstore_3" , "b" , NULL , T_VOID , -2, false);
def(_astore_0 , "astore_0" , "b" , NULL , T_VOID , -1, false);
def(_astore_1 , "astore_1" , "b" , NULL , T_VOID , -1, false);
def(_astore_2 , "astore_2" , "b" , NULL , T_VOID , -1, false);
def(_astore_3 , "astore_3" , "b" , NULL , T_VOID , -1, false);
def(_iastore , "iastore" , "b" , NULL , T_VOID , -3, true );
def(_lastore , "lastore" , "b" , NULL , T_VOID , -4, true );
def(_fastore , "fastore" , "b" , NULL , T_VOID , -3, true );
def(_dastore , "dastore" , "b" , NULL , T_VOID , -4, true );
def(_aastore , "aastore" , "b" , NULL , T_VOID , -3, true );
def(_bastore , "bastore" , "b" , NULL , T_VOID , -3, true );
def(_castore , "castore" , "b" , NULL , T_VOID , -3, true );
def(_sastore , "sastore" , "b" , NULL , T_VOID , -3, true );
def(_pop , "pop" , "b" , NULL , T_VOID , -1, false);
def(_pop2 , "pop2" , "b" , NULL , T_VOID , -2, false);
def(_dup , "dup" , "b" , NULL , T_VOID , 1, false);
def(_dup_x1 , "dup_x1" , "b" , NULL , T_VOID , 1, false);
def(_dup_x2 , "dup_x2" , "b" , NULL , T_VOID , 1, false);
def(_dup2 , "dup2" , "b" , NULL , T_VOID , 2, false);
def(_dup2_x1 , "dup2_x1" , "b" , NULL , T_VOID , 2, false);
def(_dup2_x2 , "dup2_x2" , "b" , NULL , T_VOID , 2, false);
def(_swap , "swap" , "b" , NULL , T_VOID , 0, false);
def(_iadd , "iadd" , "b" , NULL , T_INT , -1, false);
def(_ladd , "ladd" , "b" , NULL , T_LONG , -2, false);
def(_fadd , "fadd" , "b" , NULL , T_FLOAT , -1, false);
def(_dadd , "dadd" , "b" , NULL , T_DOUBLE , -2, false);
def(_isub , "isub" , "b" , NULL , T_INT , -1, false);
def(_lsub , "lsub" , "b" , NULL , T_LONG , -2, false);
def(_fsub , "fsub" , "b" , NULL , T_FLOAT , -1, false);
def(_dsub , "dsub" , "b" , NULL , T_DOUBLE , -2, false);
def(_imul , "imul" , "b" , NULL , T_INT , -1, false);
def(_lmul , "lmul" , "b" , NULL , T_LONG , -2, false);
def(_fmul , "fmul" , "b" , NULL , T_FLOAT , -1, false);
def(_dmul , "dmul" , "b" , NULL , T_DOUBLE , -2, false);
def(_idiv , "idiv" , "b" , NULL , T_INT , -1, true );
def(_ldiv , "ldiv" , "b" , NULL , T_LONG , -2, true );
def(_fdiv , "fdiv" , "b" , NULL , T_FLOAT , -1, false);
def(_ddiv , "ddiv" , "b" , NULL , T_DOUBLE , -2, false);
def(_irem , "irem" , "b" , NULL , T_INT , -1, true );
def(_lrem , "lrem" , "b" , NULL , T_LONG , -2, true );
def(_frem , "frem" , "b" , NULL , T_FLOAT , -1, false);
def(_drem , "drem" , "b" , NULL , T_DOUBLE , -2, false);
def(_ineg , "ineg" , "b" , NULL , T_INT , 0, false);
def(_lneg , "lneg" , "b" , NULL , T_LONG , 0, false);
def(_fneg , "fneg" , "b" , NULL , T_FLOAT , 0, false);
def(_dneg , "dneg" , "b" , NULL , T_DOUBLE , 0, false);
def(_ishl , "ishl" , "b" , NULL , T_INT , -1, false);
def(_lshl , "lshl" , "b" , NULL , T_LONG , -1, false);
def(_ishr , "ishr" , "b" , NULL , T_INT , -1, false);
def(_lshr , "lshr" , "b" , NULL , T_LONG , -1, false);
def(_iushr , "iushr" , "b" , NULL , T_INT , -1, false);
def(_lushr , "lushr" , "b" , NULL , T_LONG , -1, false);
def(_iand , "iand" , "b" , NULL , T_INT , -1, false);
def(_land , "land" , "b" , NULL , T_LONG , -2, false);
def(_ior , "ior" , "b" , NULL , T_INT , -1, false);
def(_lor , "lor" , "b" , NULL , T_LONG , -2, false);
def(_ixor , "ixor" , "b" , NULL , T_INT , -1, false);
def(_lxor , "lxor" , "b" , NULL , T_LONG , -2, false);
def(_iinc , "iinc" , "bic" , "wbiicc", T_VOID , 0, false);
def(_i2l , "i2l" , "b" , NULL , T_LONG , 1, false);
def(_i2f , "i2f" , "b" , NULL , T_FLOAT , 0, false);
def(_i2d , "i2d" , "b" , NULL , T_DOUBLE , 1, false);
def(_l2i , "l2i" , "b" , NULL , T_INT , -1, false);
def(_l2f , "l2f" , "b" , NULL , T_FLOAT , -1, false);
def(_l2d , "l2d" , "b" , NULL , T_DOUBLE , 0, false);
def(_f2i , "f2i" , "b" , NULL , T_INT , 0, false);
def(_f2l , "f2l" , "b" , NULL , T_LONG , 1, false);
def(_f2d , "f2d" , "b" , NULL , T_DOUBLE , 1, false);
def(_d2i , "d2i" , "b" , NULL , T_INT , -1, false);
def(_d2l , "d2l" , "b" , NULL , T_LONG , 0, false);
def(_d2f , "d2f" , "b" , NULL , T_FLOAT , -1, false);
def(_i2b , "i2b" , "b" , NULL , T_BYTE , 0, false);
def(_i2c , "i2c" , "b" , NULL , T_CHAR , 0, false);
def(_i2s , "i2s" , "b" , NULL , T_SHORT , 0, false);
def(_lcmp , "lcmp" , "b" , NULL , T_VOID , -3, false);
def(_fcmpl , "fcmpl" , "b" , NULL , T_VOID , -1, false);
def(_fcmpg , "fcmpg" , "b" , NULL , T_VOID , -1, false);
def(_dcmpl , "dcmpl" , "b" , NULL , T_VOID , -3, false);
def(_dcmpg , "dcmpg" , "b" , NULL , T_VOID , -3, false);
def(_ifeq , "ifeq" , "boo" , NULL , T_VOID , -1, false);
def(_ifne , "ifne" , "boo" , NULL , T_VOID , -1, false);
def(_iflt , "iflt" , "boo" , NULL , T_VOID , -1, false);
def(_ifge , "ifge" , "boo" , NULL , T_VOID , -1, false);
def(_ifgt , "ifgt" , "boo" , NULL , T_VOID , -1, false);
def(_ifle , "ifle" , "boo" , NULL , T_VOID , -1, false);
def(_if_icmpeq , "if_icmpeq" , "boo" , NULL , T_VOID , -2, false);
def(_if_icmpne , "if_icmpne" , "boo" , NULL , T_VOID , -2, false);
def(_if_icmplt , "if_icmplt" , "boo" , NULL , T_VOID , -2, false);
def(_if_icmpge , "if_icmpge" , "boo" , NULL , T_VOID , -2, false);
def(_if_icmpgt , "if_icmpgt" , "boo" , NULL , T_VOID , -2, false);
def(_if_icmple , "if_icmple" , "boo" , NULL , T_VOID , -2, false);
def(_if_acmpeq , "if_acmpeq" , "boo" , NULL , T_VOID , -2, false);
def(_if_acmpne , "if_acmpne" , "boo" , NULL , T_VOID , -2, false);
def(_goto , "goto" , "boo" , NULL , T_VOID , 0, false);
def(_jsr , "jsr" , "boo" , NULL , T_INT , 0, false);
def(_ret , "ret" , "bi" , "wbii" , T_VOID , 0, false);
def(_tableswitch , "tableswitch" , "" , NULL , T_VOID , -1, false); // may have backward branches
def(_lookupswitch , "lookupswitch" , "" , NULL , T_VOID , -1, false); // rewriting in interpreter
def(_ireturn , "ireturn" , "b" , NULL , T_INT , -1, true);
def(_lreturn , "lreturn" , "b" , NULL , T_LONG , -2, true);
def(_freturn , "freturn" , "b" , NULL , T_FLOAT , -1, true);
def(_dreturn , "dreturn" , "b" , NULL , T_DOUBLE , -2, true);
def(_areturn , "areturn" , "b" , NULL , T_OBJECT , -1, true);
def(_return , "return" , "b" , NULL , T_VOID , 0, true);
def(_getstatic , "getstatic" , "bJJ" , NULL , T_ILLEGAL, 1, true );
def(_putstatic , "putstatic" , "bJJ" , NULL , T_ILLEGAL, -1, true );
def(_getfield , "getfield" , "bJJ" , NULL , T_ILLEGAL, 0, true );
def(_putfield , "putfield" , "bJJ" , NULL , T_ILLEGAL, -2, true );
def(_invokevirtual , "invokevirtual" , "bJJ" , NULL , T_ILLEGAL, -1, true);
def(_invokespecial , "invokespecial" , "bJJ" , NULL , T_ILLEGAL, -1, true);
def(_invokestatic , "invokestatic" , "bJJ" , NULL , T_ILLEGAL, 0, true);
def(_invokeinterface , "invokeinterface" , "bJJ__", NULL , T_ILLEGAL, -1, true);
def(_invokedynamic , "invokedynamic" , "bJJJJ", NULL , T_ILLEGAL, 0, true );
def(_new , "new" , "bkk" , NULL , T_OBJECT , 1, true );
def(_newarray , "newarray" , "bc" , NULL , T_OBJECT , 0, true );
def(_anewarray , "anewarray" , "bkk" , NULL , T_OBJECT , 0, true );
def(_arraylength , "arraylength" , "b" , NULL , T_VOID , 0, true );
def(_athrow , "athrow" , "b" , NULL , T_VOID , -1, true );
def(_checkcast , "checkcast" , "bkk" , NULL , T_OBJECT , 0, true );
def(_instanceof , "instanceof" , "bkk" , NULL , T_INT , 0, true );
def(_monitorenter , "monitorenter" , "b" , NULL , T_VOID , -1, true );
def(_monitorexit , "monitorexit" , "b" , NULL , T_VOID , -1, true );
def(_wide , "wide" , "" , NULL , T_VOID , 0, false);
def(_multianewarray , "multianewarray" , "bkkc" , NULL , T_OBJECT , 1, true );
def(_ifnull , "ifnull" , "boo" , NULL , T_VOID , -1, false);
def(_ifnonnull , "ifnonnull" , "boo" , NULL , T_VOID , -1, false);
def(_goto_w , "goto_w" , "boooo", NULL , T_VOID , 0, false);
def(_jsr_w , "jsr_w" , "boooo", NULL , T_INT , 0, false);
def(_breakpoint , "breakpoint" , "" , NULL , T_VOID , 0, true);
def(_fast_agetfield , "fast_agetfield" , "bJJ" , NULL , T_OBJECT , 0, true , _getfield );
def(_fast_bgetfield , "fast_bgetfield" , "bJJ" , NULL , T_INT , 0, true , _getfield );
def(_fast_cgetfield , "fast_cgetfield" , "bJJ" , NULL , T_CHAR , 0, true , _getfield );
def(_fast_dgetfield , "fast_dgetfield" , "bJJ" , NULL , T_DOUBLE , 0, true , _getfield );
def(_fast_fgetfield , "fast_fgetfield" , "bJJ" , NULL , T_FLOAT , 0, true , _getfield );
def(_fast_igetfield , "fast_igetfield" , "bJJ" , NULL , T_INT , 0, true , _getfield );
def(_fast_lgetfield , "fast_lgetfield" , "bJJ" , NULL , T_LONG , 0, true , _getfield );
def(_fast_sgetfield , "fast_sgetfield" , "bJJ" , NULL , T_SHORT , 0, true , _getfield );
def(_fast_aputfield , "fast_aputfield" , "bJJ" , NULL , T_OBJECT , 0, true , _putfield );
def(_fast_bputfield , "fast_bputfield" , "bJJ" , NULL , T_INT , 0, true , _putfield );
def(_fast_zputfield , "fast_zputfield" , "bJJ" , NULL , T_INT , 0, true , _putfield );
def(_fast_cputfield , "fast_cputfield" , "bJJ" , NULL , T_CHAR , 0, true , _putfield );
def(_fast_dputfield , "fast_dputfield" , "bJJ" , NULL , T_DOUBLE , 0, true , _putfield );
def(_fast_fputfield , "fast_fputfield" , "bJJ" , NULL , T_FLOAT , 0, true , _putfield );
def(_fast_iputfield , "fast_iputfield" , "bJJ" , NULL , T_INT , 0, true , _putfield );
def(_fast_lputfield , "fast_lputfield" , "bJJ" , NULL , T_LONG , 0, true , _putfield );
def(_fast_sputfield , "fast_sputfield" , "bJJ" , NULL , T_SHORT , 0, true , _putfield );
def(_fast_aload_0 , "fast_aload_0" , "b" , NULL , T_OBJECT , 1, true , _aload_0 );
def(_fast_iaccess_0 , "fast_iaccess_0" , "b_JJ" , NULL , T_INT , 1, true , _aload_0 );
def(_fast_aaccess_0 , "fast_aaccess_0" , "b_JJ" , NULL , T_OBJECT , 1, true , _aload_0 );
def(_fast_faccess_0 , "fast_faccess_0" , "b_JJ" , NULL , T_OBJECT , 1, true , _aload_0 );
def(_fast_iload , "fast_iload" , "bi" , NULL , T_INT , 1, false, _iload);
def(_fast_iload2 , "fast_iload2" , "bi_i" , NULL , T_INT , 2, false, _iload);
def(_fast_icaload , "fast_icaload" , "bi_" , NULL , T_INT , 0, false, _iload);
def(_fast_invokevfinal , "fast_invokevfinal" , "bJJ" , NULL , T_ILLEGAL, -1, true, _invokevirtual );
def(_fast_linearswitch , "fast_linearswitch" , "" , NULL , T_VOID , -1, false, _lookupswitch );
def(_fast_binaryswitch , "fast_binaryswitch" , "" , NULL , T_VOID , -1, false, _lookupswitch );
def(_return_register_finalizer , "return_register_finalizer" , "b" , NULL , T_VOID , 0, true, _return);
def(_invokehandle , "invokehandle" , "bJJ" , NULL , T_ILLEGAL, -1, true, _invokevirtual );
def(_fast_aldc , "fast_aldc" , "bj" , NULL , T_OBJECT, 1, true, _ldc );
def(_fast_aldc_w , "fast_aldc_w" , "bJJ" , NULL , T_OBJECT, 1, true, _ldc_w );
def(_shouldnotreachhere , "_shouldnotreachhere" , "b" , NULL , T_VOID , 0, false);
pd_initialize();
#ifdef ASSERT
{ for (int i = 0; i < number_of_codes; i++) {
if (is_defined(i)) {
Code code = cast(i);
Code java = java_code(code);
if (can_trap(code) && !can_trap(java))
fatal(err_msg("%s can trap => %s can trap, too", name(code),
name(java)));
}
}
}
#endif
_is_initialized = true;
}
void bytecodes_init() {
Bytecodes::initialize();
}
#ifdef _M_AMD64
#pragma optimize ("", on)
#endif
C:\hotspot-69087d08d473\src\share\vm/interpreter/bytecodes.hpp
#ifndef SHARE_VM_INTERPRETER_BYTECODES_HPP
#define SHARE_VM_INTERPRETER_BYTECODES_HPP
#include "memory/allocation.hpp"
#include "utilities/top.hpp"
class Bytecodes: AllStatic {
public:
enum Code {
_illegal = -1,
_nop = 0, // 0x00
_aconst_null = 1, // 0x01
_iconst_m1 = 2, // 0x02
_iconst_0 = 3, // 0x03
_iconst_1 = 4, // 0x04
_iconst_2 = 5, // 0x05
_iconst_3 = 6, // 0x06
_iconst_4 = 7, // 0x07
_iconst_5 = 8, // 0x08
_lconst_0 = 9, // 0x09
_lconst_1 = 10, // 0x0a
_fconst_0 = 11, // 0x0b
_fconst_1 = 12, // 0x0c
_fconst_2 = 13, // 0x0d
_dconst_0 = 14, // 0x0e
_dconst_1 = 15, // 0x0f
_bipush = 16, // 0x10
_sipush = 17, // 0x11
_ldc = 18, // 0x12
_ldc_w = 19, // 0x13
_ldc2_w = 20, // 0x14
_iload = 21, // 0x15
_lload = 22, // 0x16
_fload = 23, // 0x17
_dload = 24, // 0x18
_aload = 25, // 0x19
_iload_0 = 26, // 0x1a
_iload_1 = 27, // 0x1b
_iload_2 = 28, // 0x1c
_iload_3 = 29, // 0x1d
_lload_0 = 30, // 0x1e
_lload_1 = 31, // 0x1f
_lload_2 = 32, // 0x20
_lload_3 = 33, // 0x21
_fload_0 = 34, // 0x22
_fload_1 = 35, // 0x23
_fload_2 = 36, // 0x24
_fload_3 = 37, // 0x25
_dload_0 = 38, // 0x26
_dload_1 = 39, // 0x27
_dload_2 = 40, // 0x28
_dload_3 = 41, // 0x29
_aload_0 = 42, // 0x2a
_aload_1 = 43, // 0x2b
_aload_2 = 44, // 0x2c
_aload_3 = 45, // 0x2d
_iaload = 46, // 0x2e
_laload = 47, // 0x2f
_faload = 48, // 0x30
_daload = 49, // 0x31
_aaload = 50, // 0x32
_baload = 51, // 0x33
_caload = 52, // 0x34
_saload = 53, // 0x35
_istore = 54, // 0x36
_lstore = 55, // 0x37
_fstore = 56, // 0x38
_dstore = 57, // 0x39
_astore = 58, // 0x3a
_istore_0 = 59, // 0x3b
_istore_1 = 60, // 0x3c
_istore_2 = 61, // 0x3d
_istore_3 = 62, // 0x3e
_lstore_0 = 63, // 0x3f
_lstore_1 = 64, // 0x40
_lstore_2 = 65, // 0x41
_lstore_3 = 66, // 0x42
_fstore_0 = 67, // 0x43
_fstore_1 = 68, // 0x44
_fstore_2 = 69, // 0x45
_fstore_3 = 70, // 0x46
_dstore_0 = 71, // 0x47
_dstore_1 = 72, // 0x48
_dstore_2 = 73, // 0x49
_dstore_3 = 74, // 0x4a
_astore_0 = 75, // 0x4b
_astore_1 = 76, // 0x4c
_astore_2 = 77, // 0x4d
_astore_3 = 78, // 0x4e
_iastore = 79, // 0x4f
_lastore = 80, // 0x50
_fastore = 81, // 0x51
_dastore = 82, // 0x52
_aastore = 83, // 0x53
_bastore = 84, // 0x54
_castore = 85, // 0x55
_sastore = 86, // 0x56
_pop = 87, // 0x57
_pop2 = 88, // 0x58
_dup = 89, // 0x59
_dup_x1 = 90, // 0x5a
_dup_x2 = 91, // 0x5b
_dup2 = 92, // 0x5c
_dup2_x1 = 93, // 0x5d
_dup2_x2 = 94, // 0x5e
_swap = 95, // 0x5f
_iadd = 96, // 0x60
_ladd = 97, // 0x61
_fadd = 98, // 0x62
_dadd = 99, // 0x63
_isub = 100, // 0x64
_lsub = 101, // 0x65
_fsub = 102, // 0x66
_dsub = 103, // 0x67
_imul = 104, // 0x68
_lmul = 105, // 0x69
_fmul = 106, // 0x6a
_dmul = 107, // 0x6b
_idiv = 108, // 0x6c
_ldiv = 109, // 0x6d
_fdiv = 110, // 0x6e
_ddiv = 111, // 0x6f
_irem = 112, // 0x70
_lrem = 113, // 0x71
_frem = 114, // 0x72
_drem = 115, // 0x73
_ineg = 116, // 0x74
_lneg = 117, // 0x75
_fneg = 118, // 0x76
_dneg = 119, // 0x77
_ishl = 120, // 0x78
_lshl = 121, // 0x79
_ishr = 122, // 0x7a
_lshr = 123, // 0x7b
_iushr = 124, // 0x7c
_lushr = 125, // 0x7d
_iand = 126, // 0x7e
_land = 127, // 0x7f
_ior = 128, // 0x80
_lor = 129, // 0x81
_ixor = 130, // 0x82
_lxor = 131, // 0x83
_iinc = 132, // 0x84
_i2l = 133, // 0x85
_i2f = 134, // 0x86
_i2d = 135, // 0x87
_l2i = 136, // 0x88
_l2f = 137, // 0x89
_l2d = 138, // 0x8a
_f2i = 139, // 0x8b
_f2l = 140, // 0x8c
_f2d = 141, // 0x8d
_d2i = 142, // 0x8e
_d2l = 143, // 0x8f
_d2f = 144, // 0x90
_i2b = 145, // 0x91
_i2c = 146, // 0x92
_i2s = 147, // 0x93
_lcmp = 148, // 0x94
_fcmpl = 149, // 0x95
_fcmpg = 150, // 0x96
_dcmpl = 151, // 0x97
_dcmpg = 152, // 0x98
_ifeq = 153, // 0x99
_ifne = 154, // 0x9a
_iflt = 155, // 0x9b
_ifge = 156, // 0x9c
_ifgt = 157, // 0x9d
_ifle = 158, // 0x9e
_if_icmpeq = 159, // 0x9f
_if_icmpne = 160, // 0xa0
_if_icmplt = 161, // 0xa1
_if_icmpge = 162, // 0xa2
_if_icmpgt = 163, // 0xa3
_if_icmple = 164, // 0xa4
_if_acmpeq = 165, // 0xa5
_if_acmpne = 166, // 0xa6
_goto = 167, // 0xa7
_jsr = 168, // 0xa8
_ret = 169, // 0xa9
_tableswitch = 170, // 0xaa
_lookupswitch = 171, // 0xab
_ireturn = 172, // 0xac
_lreturn = 173, // 0xad
_freturn = 174, // 0xae
_dreturn = 175, // 0xaf
_areturn = 176, // 0xb0
_return = 177, // 0xb1
_getstatic = 178, // 0xb2
_putstatic = 179, // 0xb3
_getfield = 180, // 0xb4
_putfield = 181, // 0xb5
_invokevirtual = 182, // 0xb6
_invokespecial = 183, // 0xb7
_invokestatic = 184, // 0xb8
_invokeinterface = 185, // 0xb9
_invokedynamic = 186, // 0xba // if EnableInvokeDynamic
_new = 187, // 0xbb
_newarray = 188, // 0xbc
_anewarray = 189, // 0xbd
_arraylength = 190, // 0xbe
_athrow = 191, // 0xbf
_checkcast = 192, // 0xc0
_instanceof = 193, // 0xc1
_monitorenter = 194, // 0xc2
_monitorexit = 195, // 0xc3
_wide = 196, // 0xc4
_multianewarray = 197, // 0xc5
_ifnull = 198, // 0xc6
_ifnonnull = 199, // 0xc7
_goto_w = 200, // 0xc8
_jsr_w = 201, // 0xc9
_breakpoint = 202, // 0xca
number_of_java_codes,
_fast_agetfield = number_of_java_codes,
_fast_bgetfield ,
_fast_cgetfield ,
_fast_dgetfield ,
_fast_fgetfield ,
_fast_igetfield ,
_fast_lgetfield ,
_fast_sgetfield ,
_fast_aputfield ,
_fast_bputfield ,
_fast_zputfield ,
_fast_cputfield ,
_fast_dputfield ,
_fast_fputfield ,
_fast_iputfield ,
_fast_lputfield ,
_fast_sputfield ,
_fast_aload_0 ,
_fast_iaccess_0 ,
_fast_aaccess_0 ,
_fast_faccess_0 ,
_fast_iload ,
_fast_iload2 ,
_fast_icaload ,
_fast_invokevfinal ,
_fast_linearswitch ,
_fast_binaryswitch ,
_fast_aldc ,
_fast_aldc_w ,
_return_register_finalizer ,
_invokehandle ,
_shouldnotreachhere, // For debugging
#ifdef TARGET_ARCH_x86
# include "bytecodes_x86.hpp"
#endif
#ifdef TARGET_ARCH_aarch64
# include "bytecodes_aarch64.hpp"
#endif
#ifdef TARGET_ARCH_sparc
# include "bytecodes_sparc.hpp"
#endif
#ifdef TARGET_ARCH_zero
# include "bytecodes_zero.hpp"
#endif
#ifdef TARGET_ARCH_arm
# include "bytecodes_arm.hpp"
#endif
#ifdef TARGET_ARCH_ppc
# include "bytecodes_ppc.hpp"
#endif
number_of_codes
};
enum Flags {
_bc_can_trap = 1<<0, // bytecode execution can trap or block
_bc_can_rewrite = 1<<1, // bytecode execution has an alternate form
_fmt_has_c = 1<<2, // constant, such as sipush "bcc"
_fmt_has_j = 1<<3, // constant pool cache index, such as getfield "bjj"
_fmt_has_k = 1<<4, // constant pool index, such as ldc "bk"
_fmt_has_i = 1<<5, // local index, such as iload
_fmt_has_o = 1<<6, // offset, such as ifeq
_fmt_has_nbo = 1<<7, // contains native-order field(s)
_fmt_has_u2 = 1<<8, // contains double-byte field(s)
_fmt_has_u4 = 1<<9, // contains quad-byte field
_fmt_not_variable = 1<<10, // not of variable length (simple or wide)
_fmt_not_simple = 1<<11, // either wide or variable length
_all_fmt_bits = (_fmt_not_simple*2 - _fmt_has_c),
_fmt_b = _fmt_not_variable,
_fmt_bc = _fmt_b | _fmt_has_c,
_fmt_bi = _fmt_b | _fmt_has_i,
_fmt_bkk = _fmt_b | _fmt_has_k | _fmt_has_u2,
_fmt_bJJ = _fmt_b | _fmt_has_j | _fmt_has_u2 | _fmt_has_nbo,
_fmt_bo2 = _fmt_b | _fmt_has_o | _fmt_has_u2,
_fmt_bo4 = _fmt_b | _fmt_has_o | _fmt_has_u4
};
private:
static bool _is_initialized;
static const char* _name [number_of_codes];
static BasicType _result_type [number_of_codes];
static s_char _depth [number_of_codes];
static u_char _lengths [number_of_codes];
static Code _java_code [number_of_codes];
static jchar _flags [(1<<BitsPerByte)*2]; // all second page for wide formats
static void def(Code code, const char* name, const char* format, const char* wide_format, BasicType result_type, int depth, bool can_trap);
static void def(Code code, const char* name, const char* format, const char* wide_format, BasicType result_type, int depth, bool can_trap, Code java_code);
static void pd_initialize(); // platform specific initialization
static Code pd_base_code_for(Code code); // platform specific base_code_for implementation
#ifdef ASSERT
static bool check_method(const Method* method, address bcp);
#endif
static bool check_must_rewrite(Bytecodes::Code bc);
public:
static void check (Code code) { assert(is_defined(code), err_msg("illegal code: %d", (int)code)); }
static void wide_check (Code code) { assert(wide_is_defined(code), err_msg("illegal code: %d", (int)code)); }
static Code cast (int code) { return (Code)code; }
static Code code_at(const Method* method, address bcp) {
assert(method == NULL || check_method(method, bcp), "bcp must point into method");
Code code = cast(*bcp);
assert(code != _breakpoint || method != NULL, "need Method* to decode breakpoint");
return (code != _breakpoint) ? code : non_breakpoint_code_at(method, bcp);
}
static Code java_code_at(const Method* method, address bcp) {
return java_code(code_at(method, bcp));
}
static Code code_or_bp_at(address bcp) { return (Code)cast(*bcp); }
static Code code_at(Method* method, int bci);
static bool is_active_breakpoint_at(address bcp) { return (Code)*bcp == _breakpoint; }
static Code non_breakpoint_code_at(const Method* method, address bcp);
static bool is_defined (int code) { return 0 <= code && code < number_of_codes && flags(code, false) != 0; }
static bool wide_is_defined(int code) { return is_defined(code) && flags(code, true) != 0; }
static const char* name (Code code) { check(code); return _name [code]; }
static BasicType result_type (Code code) { check(code); return _result_type [code]; }
static int depth (Code code) { check(code); return _depth [code]; }
static int length_for (Code code) { /*no check*/ return _lengths [code] & 0xF; }
static int wide_length_for(Code code) { /*no check*/ return _lengths [code] >> 4; }
static bool can_trap (Code code) { check(code); return has_all_flags(code, _bc_can_trap, false); }
static Code java_code (Code code) { check(code); return _java_code [code]; }
static bool can_rewrite (Code code) { check(code); return has_all_flags(code, _bc_can_rewrite, false); }
static bool must_rewrite(Bytecodes::Code code) { return can_rewrite(code) && check_must_rewrite(code); }
static bool native_byte_order(Code code) { check(code); return has_all_flags(code, _fmt_has_nbo, false); }
static bool uses_cp_cache (Code code) { check(code); return has_all_flags(code, _fmt_has_j, false); }
static int special_length_at(Bytecodes::Code code, address bcp, address end = NULL);
static int special_length_at(Method* method, address bcp, address end = NULL) { return special_length_at(code_at(method, bcp), bcp, end); }
static int raw_special_length_at(address bcp, address end = NULL);
static int length_for_code_at(Bytecodes::Code code, address bcp) { int l = length_for(code); return l > 0 ? l : special_length_at(code, bcp); }
static int length_at (Method* method, address bcp) { return length_for_code_at(code_at(method, bcp), bcp); }
static int java_length_at (Method* method, address bcp) { return length_for_code_at(java_code_at(method, bcp), bcp); }
static bool is_java_code (Code code) { return 0 <= code && code < number_of_java_codes; }
static bool is_aload (Code code) { return (code == _aload || code == _aload_0 || code == _aload_1
|| code == _aload_2 || code == _aload_3); }
static bool is_astore (Code code) { return (code == _astore || code == _astore_0 || code == _astore_1
|| code == _astore_2 || code == _astore_3); }
static bool is_store_into_local(Code code){ return (_istore <= code && code <= _astore_3); }
static bool is_const (Code code) { return (_aconst_null <= code && code <= _ldc2_w); }
static bool is_zero_const (Code code) { return (code == _aconst_null || code == _iconst_0
|| code == _fconst_0 || code == _dconst_0); }
static bool is_return (Code code) { return (_ireturn <= code && code <= _return); }
static bool is_invoke (Code code) { return (_invokevirtual <= code && code <= _invokedynamic); }
static bool has_receiver (Code code) { assert(is_invoke(code), ""); return code == _invokevirtual ||
code == _invokespecial ||
code == _invokeinterface; }
static bool has_optional_appendix(Code code) { return code == _invokedynamic || code == _invokehandle; }
static int compute_flags (const char* format, int more_flags = 0); // compute the flags
static int flags (int code, bool is_wide) {
assert(code == (u_char)code, "must be a byte");
return _flags[code + (is_wide ? (1<<BitsPerByte) : 0)];
}
static int format_bits (Code code, bool is_wide) { return flags(code, is_wide) & _all_fmt_bits; }
static bool has_all_flags (Code code, int test_flags, bool is_wide) {
return (flags(code, is_wide) & test_flags) == test_flags;
}
static void initialize ();
};
#endif // SHARE_VM_INTERPRETER_BYTECODES_HPP
C:\hotspot-69087d08d473\src\share\vm/interpreter/bytecodeStream.cpp
#include "precompiled.hpp"
#include "interpreter/bytecodeStream.hpp"
#include "interpreter/bytecodes.hpp"
Bytecodes::Code RawBytecodeStream::raw_next_special(Bytecodes::Code code) {
assert(!is_last_bytecode(), "should have been checked");
address bcp = RawBytecodeStream::bcp();
address end = method()->code_base() + end_bci();
int len = Bytecodes::raw_special_length_at(bcp, end);
if (len <= 0 || (_bci > _end_bci - len) || (_bci - len >= _next_bci)) {
code = Bytecodes::_illegal;
} else {
_next_bci += len;
_is_wide = false;
if (code == Bytecodes::_wide) {
if (bcp + 1 >= end) {
code = Bytecodes::_illegal;
} else {
code = (Bytecodes::Code)bcp[1];
_is_wide = true;
}
}
}
_raw_code = code;
return code;
}
#ifdef ASSERT
void BaseBytecodeStream::assert_raw_index_size(int size) const {
if (raw_code() == Bytecodes::_invokedynamic && is_raw()) {
assert(size == 2, "raw invokedynamic instruction has 2-byte index only");
} else {
bytecode().assert_index_size(size, raw_code(), is_wide());
}
}
void BaseBytecodeStream::assert_raw_stream(bool want_raw) const {
if (want_raw) {
assert( is_raw(), "this function only works on raw streams");
} else {
assert(!is_raw(), "this function only works on non-raw streams");
}
}
#endif //ASSERT
C:\hotspot-69087d08d473\src\share\vm/interpreter/bytecodeStream.hpp
#ifndef SHARE_VM_INTERPRETER_BYTECODESTREAM_HPP
#define SHARE_VM_INTERPRETER_BYTECODESTREAM_HPP
#include "interpreter/bytecode.hpp"
#include "memory/allocation.hpp"
#include "oops/method.hpp"
#include "runtime/handles.inline.hpp"
#ifdef TARGET_ARCH_x86
# include "bytes_x86.hpp"
#endif
#ifdef TARGET_ARCH_aarch64
# include "bytes_aarch64.hpp"
#endif
#ifdef TARGET_ARCH_sparc
# include "bytes_sparc.hpp"
#endif
#ifdef TARGET_ARCH_zero
# include "bytes_zero.hpp"
#endif
#ifdef TARGET_ARCH_arm
# include "bytes_arm.hpp"
#endif
#ifdef TARGET_ARCH_ppc
# include "bytes_ppc.hpp"
#endif
class BaseBytecodeStream: StackObj {
protected:
methodHandle _method; // read from method directly
int _bci; // bci if current bytecode
int _next_bci; // bci of next bytecode
int _end_bci; // bci after the current iteration interval
Bytecodes::Code _raw_code;
bool _is_wide;
bool _is_raw; // false in 'cooked' BytecodeStream
BaseBytecodeStream(methodHandle method) : _method(method) {
set_interval(0, _method->code_size());
_is_raw = false;
}
public:
void set_interval(int beg_bci, int end_bci) {
assert(0 <= beg_bci && beg_bci <= method()->code_size(), "illegal beg_bci");
assert(0 <= end_bci && end_bci <= method()->code_size(), "illegal end_bci");
_bci = beg_bci;
_next_bci = beg_bci;
_end_bci = end_bci;
}
void set_start (int beg_bci) {
set_interval(beg_bci, _method->code_size());
}
bool is_raw() const { return _is_raw; }
methodHandle method() const { return _method; }
int bci() const { return _bci; }
int next_bci() const { return _next_bci; }
int end_bci() const { return _end_bci; }
Bytecodes::Code raw_code() const { return _raw_code; }
bool is_wide() const { return _is_wide; }
int instruction_size() const { return (_next_bci - _bci); }
bool is_last_bytecode() const { return _next_bci >= _end_bci; }
address bcp() const { return method()->code_base() + _bci; }
Bytecode bytecode() const { return Bytecode(_method(), bcp()); }
void set_next_bci(int bci) { assert(0 <= bci && bci <= method()->code_size(), "illegal bci"); _next_bci = bci; }
int dest() const { return bci() + bytecode().get_offset_s2(raw_code()); }
int dest_w() const { return bci() + bytecode().get_offset_s4(raw_code()); }
int get_index_u1() const { assert_raw_index_size(1); return *(jubyte*)(bcp()+1); }
protected:
void assert_raw_index_size(int size) const NOT_DEBUG_RETURN;
void assert_raw_stream(bool want_raw) const NOT_DEBUG_RETURN;
};
class RawBytecodeStream: public BaseBytecodeStream {
public:
RawBytecodeStream(methodHandle method) : BaseBytecodeStream(method) {
_is_raw = true;
}
public:
Bytecodes::Code raw_next() {
Bytecodes::Code code;
_bci = _next_bci;
assert(!is_last_bytecode(), "caller should check is_last_bytecode()");
address bcp = this->bcp();
code = Bytecodes::code_or_bp_at(bcp);
int len = Bytecodes::length_for(code);
if (len > 0 && (_bci <= _end_bci - len)) {
assert(code != Bytecodes::_wide && code != Bytecodes::_tableswitch
&& code != Bytecodes::_lookupswitch, "can't be special bytecode");
_is_wide = false;
_next_bci += len;
if (_next_bci <= _bci) { // Check for integer overflow
code = Bytecodes::_illegal;
}
_raw_code = code;
return code;
} else {
return raw_next_special(code);
}
}
Bytecodes::Code raw_next_special(Bytecodes::Code code);
int get_index() const { return (is_wide()) ? get_index_u2_raw(bcp() + 2) : get_index_u1(); }
int get_index_u2() const { assert(!is_wide(), ""); return get_index_u2_raw(bcp() + 1); }
private:
int get_index_u2_raw(address p) const {
assert_raw_index_size(2); assert_raw_stream(true);
return Bytes::get_Java_u2(p);
}
};
class BytecodeStream: public BaseBytecodeStream {
Bytecodes::Code _code;
public:
BytecodeStream(methodHandle method) : BaseBytecodeStream(method) { }
Bytecodes::Code next() {
Bytecodes::Code raw_code, code;
_bci = _next_bci;
if (is_last_bytecode()) {
raw_code = code = Bytecodes::_illegal;
} else {
address bcp = this->bcp();
raw_code = Bytecodes::code_at(_method(), bcp);
code = Bytecodes::java_code(raw_code);
int len = Bytecodes::length_for(code);
if (len == 0) len = Bytecodes::length_at(_method(), bcp);
if (len <= 0 || (_bci > _end_bci - len) || (_bci - len >= _next_bci)) {
raw_code = code = Bytecodes::_illegal;
} else {
_next_bci += len;
assert(_bci < _next_bci, "length must be > 0");
_is_wide = false;
if (code == Bytecodes::_wide) {
raw_code = (Bytecodes::Code)bcp[1];
code = raw_code; // wide BCs are always Java-normal
_is_wide = true;
}
assert(Bytecodes::is_java_code(code), "sanity check");
}
}
_raw_code = raw_code;
_code = code;
return _code;
}
bool is_active_breakpoint() const { return Bytecodes::is_active_breakpoint_at(bcp()); }
Bytecodes::Code code() const { return _code; }
int get_index() const { return is_wide() ? bytecode().get_index_u2(raw_code(), true) : get_index_u1(); }
int get_index_u2() const { assert_raw_stream(false);
return bytecode().get_index_u2(raw_code(), false); }
int get_index_u2_cpcache() const { assert_raw_stream(false);
return bytecode().get_index_u2_cpcache(raw_code()); }
int get_index_u4() const { assert_raw_stream(false);
return bytecode().get_index_u4(raw_code()); }
bool has_index_u4() const { return bytecode().has_index_u4(raw_code()); }
};
#endif // SHARE_VM_INTERPRETER_BYTECODESTREAM_HPP
C:\hotspot-69087d08d473\src\share\vm/interpreter/bytecodeTracer.cpp
#include "precompiled.hpp"
#include "interpreter/bytecodeHistogram.hpp"
#include "interpreter/bytecodeTracer.hpp"
#include "interpreter/bytecodes.hpp"
#include "interpreter/interpreter.hpp"
#include "interpreter/interpreterRuntime.hpp"
#include "memory/resourceArea.hpp"
#include "oops/methodData.hpp"
#include "oops/method.hpp"
#include "runtime/mutexLocker.hpp"
#include "runtime/timer.hpp"
#ifndef PRODUCT
class BytecodePrinter: public BytecodeClosure {
private:
Method* _current_method;
bool _is_wide;
Bytecodes::Code _code;
address _next_pc; // current decoding position
void align() { _next_pc = (address)round_to((intptr_t)_next_pc, sizeof(jint)); }
int get_byte() { return *(jbyte*) _next_pc++; } // signed
short get_short() { short i=Bytes::get_Java_u2(_next_pc); _next_pc+=2; return i; }
int get_int() { int i=Bytes::get_Java_u4(_next_pc); _next_pc+=4; return i; }
int get_index_u1() { return *(address)_next_pc++; }
int get_index_u2() { int i=Bytes::get_Java_u2(_next_pc); _next_pc+=2; return i; }
int get_index_u1_cpcache() { return get_index_u1() + ConstantPool::CPCACHE_INDEX_TAG; }
int get_index_u2_cpcache() { int i=Bytes::get_native_u2(_next_pc); _next_pc+=2; return i + ConstantPool::CPCACHE_INDEX_TAG; }
int get_index_u4() { int i=Bytes::get_native_u4(_next_pc); _next_pc+=4; return i; }
int get_index_special() { return (is_wide()) ? get_index_u2() : get_index_u1(); }
Method* method() { return _current_method; }
bool is_wide() { return _is_wide; }
Bytecodes::Code raw_code() { return Bytecodes::Code(_code); }
bool check_index(int i, int& cp_index, outputStream* st = tty);
bool check_cp_cache_index(int i, int& cp_index, outputStream* st = tty);
bool check_obj_index(int i, int& cp_index, outputStream* st = tty);
bool check_invokedynamic_index(int i, int& cp_index, outputStream* st = tty);
void print_constant(int i, outputStream* st = tty);
void print_field_or_method(int i, outputStream* st = tty);
void print_field_or_method(int orig_i, int i, outputStream* st = tty);
void print_attributes(int bci, outputStream* st = tty);
void bytecode_epilog(int bci, outputStream* st = tty);
public:
BytecodePrinter() {
_is_wide = false;
_code = Bytecodes::_illegal;
}
void trace(methodHandle method, address bcp, uintptr_t tos, uintptr_t tos2, outputStream* st) {
ResourceMark rm;
if (_current_method != method()) {
st->cr();
st->print("[%ld] ", (long) Thread::current()->osthread()->thread_id());
method->print_name(st);
st->cr();
_current_method = method();
}
Bytecodes::Code code;
if (is_wide()) {
code = Bytecodes::code_at(method(), bcp+1);
} else {
code = Bytecodes::code_at(method(), bcp);
}
_code = code;
int bci = bcp - method->code_base();
st->print("[%ld] ", (long) Thread::current()->osthread()->thread_id());
if (Verbose) {
st->print("%8d %4d " INTPTR_FORMAT " " INTPTR_FORMAT " %s",
BytecodeCounter::counter_value(), bci, tos, tos2, Bytecodes::name(code));
} else {
st->print("%8d %4d %s",
BytecodeCounter::counter_value(), bci, Bytecodes::name(code));
}
_next_pc = is_wide() ? bcp+2 : bcp+1;
print_attributes(bci);
_is_wide = (code == Bytecodes::_wide);
_code = Bytecodes::_illegal;
}
void trace(methodHandle method, address bcp, outputStream* st) {
_current_method = method();
ResourceMark rm;
Bytecodes::Code code = Bytecodes::code_at(method(), bcp);
_is_wide = (code == Bytecodes::_wide);
if (is_wide()) {
code = Bytecodes::code_at(method(), bcp+1);
}
_code = code;
int bci = bcp - method->code_base();
if (is_wide()) {
st->print("%d %s_w", bci, Bytecodes::name(code));
} else {
st->print("%d %s", bci, Bytecodes::name(code));
}
_next_pc = is_wide() ? bcp+2 : bcp+1;
print_attributes(bci, st);
bytecode_epilog(bci, st);
}
};
BytecodeClosure* BytecodeTracer::_closure = NULL;
static BytecodePrinter std_closure;
BytecodeClosure* BytecodeTracer::std_closure() {
return &::std_closure;
}
void BytecodeTracer::trace(methodHandle method, address bcp, uintptr_t tos, uintptr_t tos2, outputStream* st) {
if (TraceBytecodes && BytecodeCounter::counter_value() >= TraceBytecodesAt) {
ttyLocker ttyl; // 5065316: keep the following output coherent
_closure->trace(method, bcp, tos, tos2, st);
}
}
void BytecodeTracer::trace(methodHandle method, address bcp, outputStream* st) {
ttyLocker ttyl; // 5065316: keep the following output coherent
_closure->trace(method, bcp, st);
}
void print_symbol(Symbol* sym, outputStream* st) {
char buf[40];
int len = sym->utf8_length();
if (len >= (int)sizeof(buf)) {
st->print_cr(" %s...[%d]", sym->as_C_string(buf, sizeof(buf)), len);
} else {
st->print(" ");
sym->print_on(st); st->cr();
}
}
void print_oop(oop value, outputStream* st) {
if (value == NULL) {
st->print_cr(" NULL");
} else if (java_lang_String::is_instance(value)) {
char buf[40];
int len = java_lang_String::utf8_length(value);
java_lang_String::as_utf8_string(value, buf, sizeof(buf));
if (len >= (int)sizeof(buf)) {
st->print_cr(" %s...[%d]", buf, len);
} else {
st->print_cr(" %s", buf);
}
} else {
st->print_cr(" " PTR_FORMAT, p2i((void *)value));
}
}
bool BytecodePrinter::check_index(int i, int& cp_index, outputStream* st) {
ConstantPool* constants = method()->constants();
int ilimit = constants->length();
Bytecodes::Code code = raw_code();
ConstantPoolCache* cache = NULL;
if (Bytecodes::uses_cp_cache(code)) {
bool okay = true;
switch (code) {
case Bytecodes::_fast_aldc:
case Bytecodes::_fast_aldc_w:
okay = check_obj_index(i, cp_index, st);
break;
case Bytecodes::_invokedynamic:
okay = check_invokedynamic_index(i, cp_index, st);
break;
default:
okay = check_cp_cache_index(i, cp_index, st);
break;
}
if (!okay) return false;
}
if (cp_index >= 0 && cp_index < ilimit) {
if (WizardMode) st->print(" cp[%d]", cp_index);
return true;
}
st->print_cr(" CP[%d] not in CP", cp_index);
return false;
}
bool BytecodePrinter::check_cp_cache_index(int i, int& cp_index, outputStream* st) {
ConstantPool* constants = method()->constants();
int ilimit = constants->length(), climit = 0;
Bytecodes::Code code = raw_code();
ConstantPoolCache* cache = constants->cache();
if (cache == NULL) {
cp_index = i;
return true;
}
size_t size = cache->size() * HeapWordSize;
size -= sizeof(ConstantPoolCache);
size /= sizeof(ConstantPoolCacheEntry);
climit = (int) size;
#ifdef ASSERT
{
const int CPCACHE_INDEX_TAG = ConstantPool::CPCACHE_INDEX_TAG;
if (i >= CPCACHE_INDEX_TAG && i < climit + CPCACHE_INDEX_TAG) {
i -= CPCACHE_INDEX_TAG;
} else {
st->print_cr(" CP[%d] missing bias?", i);
return false;
}
}
#endif //ASSERT
if (i >= 0 && i < climit) {
cp_index = cache->entry_at(i)->constant_pool_index();
} else {
st->print_cr("%d not in CP[*]?", i);
return false;
}
return true;
}
bool BytecodePrinter::check_obj_index(int i, int& cp_index, outputStream* st) {
ConstantPool* constants = method()->constants();
i -= ConstantPool::CPCACHE_INDEX_TAG;
if (i >= 0 && i < constants->resolved_references()->length()) {
cp_index = constants->object_to_cp_index(i);
return true;
} else {
st->print_cr("%d not in OBJ[*]?", i);
return false;
}
}
bool BytecodePrinter::check_invokedynamic_index(int i, int& cp_index, outputStream* st) {
ConstantPool* constants = method()->constants();
assert(ConstantPool::is_invokedynamic_index(i), "not secondary index?");
i = ConstantPool::decode_invokedynamic_index(i) + ConstantPool::CPCACHE_INDEX_TAG;
return check_cp_cache_index(i, cp_index, st);
}
void BytecodePrinter::print_constant(int i, outputStream* st) {
int orig_i = i;
if (!check_index(orig_i, i, st)) return;
ConstantPool* constants = method()->constants();
constantTag tag = constants->tag_at(i);
if (tag.is_int()) {
st->print_cr(" " INT32_FORMAT, constants->int_at(i));
} else if (tag.is_long()) {
st->print_cr(" " INT64_FORMAT, (int64_t)(constants->long_at(i)));
} else if (tag.is_float()) {
st->print_cr(" %f", constants->float_at(i));
} else if (tag.is_double()) {
st->print_cr(" %f", constants->double_at(i));
} else if (tag.is_string()) {
const char* string = constants->string_at_noresolve(i);
st->print_cr(" %s", string);
} else if (tag.is_klass()) {
st->print_cr(" %s", constants->resolved_klass_at(i)->external_name());
} else if (tag.is_unresolved_klass()) {
st->print_cr(" <unresolved klass at %d>", i);
} else if (tag.is_method_type()) {
int i2 = constants->method_type_index_at(i);
st->print(" <MethodType> %d", i2);
print_symbol(constants->symbol_at(i2), st);
} else if (tag.is_method_handle()) {
int kind = constants->method_handle_ref_kind_at(i);
int i2 = constants->method_handle_index_at(i);
st->print(" <MethodHandle of kind %d index at %d>", kind, i2);
print_field_or_method(-i, i2, st);
} else {
st->print_cr(" bad tag=%d at %d", tag.value(), i);
}
}
void BytecodePrinter::print_field_or_method(int i, outputStream* st) {
int orig_i = i;
if (!check_index(orig_i, i, st)) return;
print_field_or_method(orig_i, i, st);
}
void BytecodePrinter::print_field_or_method(int orig_i, int i, outputStream* st) {
ConstantPool* constants = method()->constants();
constantTag tag = constants->tag_at(i);
bool has_klass = true;
switch (tag.value()) {
case JVM_CONSTANT_InterfaceMethodref:
case JVM_CONSTANT_Methodref:
case JVM_CONSTANT_Fieldref:
break;
case JVM_CONSTANT_NameAndType:
case JVM_CONSTANT_InvokeDynamic:
has_klass = false;
break;
default:
st->print_cr(" bad tag=%d at %d", tag.value(), i);
return;
}
Symbol* name = constants->uncached_name_ref_at(i);
Symbol* signature = constants->uncached_signature_ref_at(i);
const char* sep = (tag.is_field() ? "/" : "");
if (has_klass) {
Symbol* klass = constants->klass_name_at(constants->uncached_klass_ref_index_at(i));
st->print_cr(" %d <%s.%s%s%s> ", i, klass->as_C_string(), name->as_C_string(), sep, signature->as_C_string());
} else {
if (tag.is_invoke_dynamic()) {
int bsm = constants->invoke_dynamic_bootstrap_method_ref_index_at(i);
st->print(" bsm=%d", bsm);
}
st->print_cr(" %d <%s%s%s>", i, name->as_C_string(), sep, signature->as_C_string());
}
}
PRAGMA_FORMAT_NONLITERAL_IGNORED_EXTERNAL
void BytecodePrinter::print_attributes(int bci, outputStream* st) {
Bytecodes::Code code = Bytecodes::java_code(raw_code());
if (Bytecodes::length_for(code) == 1) {
st->cr();
return;
}
switch(code) {
case Bytecodes::_bipush:
st->print_cr(" " INT32_FORMAT, get_byte());
break;
case Bytecodes::_sipush:
st->print_cr(" " INT32_FORMAT, get_short());
break;
case Bytecodes::_ldc:
if (Bytecodes::uses_cp_cache(raw_code())) {
print_constant(get_index_u1_cpcache(), st);
} else {
print_constant(get_index_u1(), st);
}
break;
case Bytecodes::_ldc_w:
case Bytecodes::_ldc2_w:
if (Bytecodes::uses_cp_cache(raw_code())) {
print_constant(get_index_u2_cpcache(), st);
} else {
print_constant(get_index_u2(), st);
}
break;
case Bytecodes::_iload:
case Bytecodes::_lload:
case Bytecodes::_fload:
case Bytecodes::_dload:
case Bytecodes::_aload:
case Bytecodes::_istore:
case Bytecodes::_lstore:
case Bytecodes::_fstore:
case Bytecodes::_dstore:
case Bytecodes::_astore:
st->print_cr(" #%d", get_index_special());
break;
case Bytecodes::_iinc:
{ int index = get_index_special();
jint offset = is_wide() ? get_short(): get_byte();
st->print_cr(" #%d " INT32_FORMAT, index, offset);
}
break;
case Bytecodes::_newarray: {
BasicType atype = (BasicType)get_index_u1();
const char* str = type2name(atype);
if (str == NULL || atype == T_OBJECT || atype == T_ARRAY) {
assert(false, "Unidentified basic type");
}
st->print_cr(" %s", str);
}
break;
case Bytecodes::_anewarray: {
int klass_index = get_index_u2();
ConstantPool* constants = method()->constants();
Symbol* name = constants->klass_name_at(klass_index);
st->print_cr(" %s ", name->as_C_string());
}
break;
case Bytecodes::_multianewarray: {
int klass_index = get_index_u2();
int nof_dims = get_index_u1();
ConstantPool* constants = method()->constants();
Symbol* name = constants->klass_name_at(klass_index);
st->print_cr(" %s %d", name->as_C_string(), nof_dims);
}
break;
case Bytecodes::_ifeq:
case Bytecodes::_ifnull:
case Bytecodes::_iflt:
case Bytecodes::_ifle:
case Bytecodes::_ifne:
case Bytecodes::_ifnonnull:
case Bytecodes::_ifgt:
case Bytecodes::_ifge:
case Bytecodes::_if_icmpeq:
case Bytecodes::_if_icmpne:
case Bytecodes::_if_icmplt:
case Bytecodes::_if_icmpgt:
case Bytecodes::_if_icmple:
case Bytecodes::_if_icmpge:
case Bytecodes::_if_acmpeq:
case Bytecodes::_if_acmpne:
case Bytecodes::_goto:
case Bytecodes::_jsr:
st->print_cr(" %d", bci + get_short());
break;
case Bytecodes::_goto_w:
case Bytecodes::_jsr_w:
st->print_cr(" %d", bci + get_int());
break;
case Bytecodes::_ret: st->print_cr(" %d", get_index_special()); break;
case Bytecodes::_tableswitch:
{ align();
int default_dest = bci + get_int();
int lo = get_int();
int hi = get_int();
int len = hi - lo + 1;
jint* dest = NEW_RESOURCE_ARRAY(jint, len);
for (int i = 0; i < len; i++) {
dest[i] = bci + get_int();
}
st->print(" %d " INT32_FORMAT " " INT32_FORMAT " ",
default_dest, lo, hi);
int first = true;
for (int ll = lo; ll <= hi; ll++, first = false) {
int idx = ll - lo;
const char *format = first ? " %d:" INT32_FORMAT " (delta: %d)" :
", %d:" INT32_FORMAT " (delta: %d)";
PRAGMA_DIAG_PUSH
PRAGMA_FORMAT_NONLITERAL_IGNORED_INTERNAL
st->print(format, ll, dest[idx], dest[idx]-bci);
PRAGMA_DIAG_POP
}
st->cr();
}
break;
case Bytecodes::_lookupswitch:
{ align();
int default_dest = bci + get_int();
int len = get_int();
jint* key = NEW_RESOURCE_ARRAY(jint, len);
jint* dest = NEW_RESOURCE_ARRAY(jint, len);
for (int i = 0; i < len; i++) {
key [i] = get_int();
dest[i] = bci + get_int();
};
st->print(" %d %d ", default_dest, len);
bool first = true;
for (int ll = 0; ll < len; ll++, first = false) {
const char *format = first ? " " INT32_FORMAT ":" INT32_FORMAT :
", " INT32_FORMAT ":" INT32_FORMAT ;
PRAGMA_DIAG_PUSH
PRAGMA_FORMAT_NONLITERAL_IGNORED_INTERNAL
st->print(format, key[ll], dest[ll]);
PRAGMA_DIAG_POP
}
st->cr();
}
break;
case Bytecodes::_putstatic:
case Bytecodes::_getstatic:
case Bytecodes::_putfield:
case Bytecodes::_getfield:
print_field_or_method(get_index_u2_cpcache(), st);
break;
case Bytecodes::_invokevirtual:
case Bytecodes::_invokespecial:
case Bytecodes::_invokestatic:
print_field_or_method(get_index_u2_cpcache(), st);
break;
case Bytecodes::_invokeinterface:
{ int i = get_index_u2_cpcache();
int n = get_index_u1();
get_byte(); // ignore zero byte
print_field_or_method(i, st);
}
break;
case Bytecodes::_invokedynamic:
print_field_or_method(get_index_u4(), st);
break;
case Bytecodes::_new:
case Bytecodes::_checkcast:
case Bytecodes::_instanceof:
{ int i = get_index_u2();
ConstantPool* constants = method()->constants();
Symbol* name = constants->klass_name_at(i);
st->print_cr(" %d <%s>", i, name->as_C_string());
}
break;
case Bytecodes::_wide:
break;
default:
ShouldNotReachHere();
break;
}
}
void BytecodePrinter::bytecode_epilog(int bci, outputStream* st) {
MethodData* mdo = method()->method_data();
if (mdo != NULL) {
ProfileData* data = mdo->bci_to_data(bci);
if (data != NULL) {
st->print(" %d", mdo->dp_to_di(data->dp()));
st->fill_to(6);
data->print_data_on(st, mdo);
}
}
}
#endif // PRODUCT
C:\hotspot-69087d08d473\src\share\vm/interpreter/bytecodeTracer.hpp
#ifndef SHARE_VM_INTERPRETER_BYTECODETRACER_HPP
#define SHARE_VM_INTERPRETER_BYTECODETRACER_HPP
#include "memory/allocation.hpp"
#ifndef PRODUCT
class BytecodeClosure;
class BytecodeTracer: AllStatic {
private:
static BytecodeClosure* _closure;
public:
static BytecodeClosure* std_closure(); // a printing closure
static BytecodeClosure* closure() { return _closure; }
static void set_closure(BytecodeClosure* closure) { _closure = closure; }
static void trace(methodHandle method, address bcp, uintptr_t tos, uintptr_t tos2, outputStream* st = tty);
static void trace(methodHandle method, address bcp, outputStream* st = tty);
};
class BytecodeClosure {
public:
virtual void trace(methodHandle method, address bcp, uintptr_t tos, uintptr_t tos2, outputStream* st) = 0;
virtual void trace(methodHandle method, address bcp, outputStream* st) = 0;
};
#endif // !PRODUCT
#endif // SHARE_VM_INTERPRETER_BYTECODETRACER_HPP
C:\hotspot-69087d08d473\src\share\vm/interpreter/cppInterpreter.cpp
#include "precompiled.hpp"
#include "interpreter/bytecodeInterpreter.hpp"
#include "interpreter/interpreter.hpp"
#include "interpreter/interpreterGenerator.hpp"
#include "interpreter/interpreterRuntime.hpp"
#ifdef CC_INTERP
# define __ _masm->
void CppInterpreter::initialize() {
if (_code != NULL) return;
AbstractInterpreter::initialize();
{ ResourceMark rm;
TraceTime timer("Interpreter generation", TraceStartupTime);
int code_size = InterpreterCodeSize;
NOT_PRODUCT(code_size *= 4;) // debug uses extra interpreter code space
_code = new StubQueue(new InterpreterCodeletInterface, code_size, NULL,
"Interpreter");
InterpreterGenerator g(_code);
if (PrintInterpreter) print();
}
BytecodeInterpreter start_msg(BytecodeInterpreter::initialize);
if (JvmtiExport::can_post_interpreter_events())
BytecodeInterpreter::runWithChecks(&start_msg);
else
BytecodeInterpreter::run(&start_msg);
}
address CppInterpreter::_tosca_to_stack [AbstractInterpreter::number_of_result_handlers];
address CppInterpreter::_stack_to_stack [AbstractInterpreter::number_of_result_handlers];
address CppInterpreter::_stack_to_native_abi [AbstractInterpreter::number_of_result_handlers];
CppInterpreterGenerator::CppInterpreterGenerator(StubQueue* _code): AbstractInterpreterGenerator(_code) {
}
static const BasicType types[Interpreter::number_of_result_handlers] = {
T_BOOLEAN,
T_CHAR ,
T_BYTE ,
T_SHORT ,
T_INT ,
T_LONG ,
T_VOID ,
T_FLOAT ,
T_DOUBLE ,
T_OBJECT
};
void CppInterpreterGenerator::generate_all() {
AbstractInterpreterGenerator::generate_all();
{ CodeletMark cm(_masm, "result handlers for native calls");
int is_generated[Interpreter::number_of_result_handlers];
memset(is_generated, 0, sizeof(is_generated));
int _tosca_to_stack_is_generated[Interpreter::number_of_result_handlers];
int _stack_to_stack_is_generated[Interpreter::number_of_result_handlers];
int _stack_to_native_abi_is_generated[Interpreter::number_of_result_handlers];
memset(_tosca_to_stack_is_generated, 0, sizeof(_tosca_to_stack_is_generated));
memset(_stack_to_stack_is_generated, 0, sizeof(_stack_to_stack_is_generated));
memset(_stack_to_native_abi_is_generated, 0, sizeof(_stack_to_native_abi_is_generated));
for (int i = 0; i < Interpreter::number_of_result_handlers; i++) {
BasicType type = types[i];
if (!is_generated[Interpreter::BasicType_as_index(type)]++) {
Interpreter::_native_abi_to_tosca[Interpreter::BasicType_as_index(type)] = generate_result_handler_for(type);
}
if (!_tosca_to_stack_is_generated[Interpreter::BasicType_as_index(type)]++) {
Interpreter::_tosca_to_stack[Interpreter::BasicType_as_index(type)] = generate_tosca_to_stack_converter(type);
}
if (!_stack_to_stack_is_generated[Interpreter::BasicType_as_index(type)]++) {
Interpreter::_stack_to_stack[Interpreter::BasicType_as_index(type)] = generate_stack_to_stack_converter(type);
}
if (!_stack_to_native_abi_is_generated[Interpreter::BasicType_as_index(type)]++) {
Interpreter::_stack_to_native_abi[Interpreter::BasicType_as_index(type)] = generate_stack_to_native_abi_converter(type);
}
}
}
#define method_entry(kind) Interpreter::_entry_table[Interpreter::kind] = generate_method_entry(Interpreter::kind)
{ CodeletMark cm(_masm, "(kind = frame_manager)");
method_entry(zerolocals);
method_entry(zerolocals_synchronized);
method_entry(empty);
method_entry(accessor);
method_entry(abstract);
method_entry(java_lang_math_sin );
method_entry(java_lang_math_cos );
method_entry(java_lang_math_tan );
method_entry(java_lang_math_abs );
method_entry(java_lang_math_sqrt );
method_entry(java_lang_math_log );
method_entry(java_lang_math_log10 );
method_entry(java_lang_math_pow );
method_entry(java_lang_math_exp );
method_entry(java_lang_ref_reference_get);
initialize_method_handle_entries();
Interpreter::_native_entry_begin = Interpreter::code()->code_end();
method_entry(native);
method_entry(native_synchronized);
Interpreter::_native_entry_end = Interpreter::code()->code_end();
}
#undef method_entry
}
#endif // CC_INTERP
C:\hotspot-69087d08d473\src\share\vm/interpreter/cppInterpreter.hpp
#ifndef SHARE_VM_INTERPRETER_CPPINTERPRETER_HPP
#define SHARE_VM_INTERPRETER_CPPINTERPRETER_HPP
#include "interpreter/abstractInterpreter.hpp"
#ifdef CC_INTERP
class CppInterpreter: public AbstractInterpreter {
friend class VMStructs;
friend class Interpreter; // contains()
friend class InterpreterGenerator; // result handlers
friend class CppInterpreterGenerator; // result handlers
public:
protected:
static address _tosca_to_stack[number_of_result_handlers]; // converts tosca to C++ interpreter stack result
static address _stack_to_stack[number_of_result_handlers]; // pass result between C++ interpreter calls
static address _stack_to_native_abi[number_of_result_handlers]; // converts C++ interpreter results to native abi
friend class frame;
public:
static void initialize();
static bool contains(address pc);
public:
static void notice_safepoints() {}
static void ignore_safepoints() {}
static address native_result_to_tosca() { return (address)_native_abi_to_tosca; } // aka result handler
static address tosca_result_to_stack() { return (address)_tosca_to_stack; }
static address stack_result_to_stack() { return (address)_stack_to_stack; }
static address stack_result_to_native() { return (address)_stack_to_native_abi; }
static address native_result_to_tosca(int index) { return _native_abi_to_tosca[index]; } // aka result handler
static address tosca_result_to_stack(int index) { return _tosca_to_stack[index]; }
static address stack_result_to_stack(int index) { return _stack_to_stack[index]; }
static address stack_result_to_native(int index) { return _stack_to_native_abi[index]; }
static address return_entry (TosState state, int length, Bytecodes::Code code);
static address deopt_entry (TosState state, int length);
#ifdef TARGET_ARCH_x86
# include "cppInterpreter_x86.hpp"
#endif
#ifdef TARGET_ARCH_aarch64
# include "cppInterpreter_aarch64.hpp"
#endif
#ifdef TARGET_ARCH_sparc
# include "cppInterpreter_sparc.hpp"
#endif
#ifdef TARGET_ARCH_zero
# include "cppInterpreter_zero.hpp"
#endif
#ifdef TARGET_ARCH_arm
# include "cppInterpreter_arm.hpp"
#endif
#ifdef TARGET_ARCH_ppc
# include "cppInterpreter_ppc.hpp"
#endif
};
#endif // CC_INTERP
#endif // SHARE_VM_INTERPRETER_CPPINTERPRETER_HPP
C:\hotspot-69087d08d473\src\share\vm/interpreter/cppInterpreterGenerator.hpp
#ifndef SHARE_VM_INTERPRETER_CPPINTERPRETERGENERATOR_HPP
#define SHARE_VM_INTERPRETER_CPPINTERPRETERGENERATOR_HPP
#ifdef CC_INTERP
#ifdef TARGET_ARCH_zero
# include "entry_zero.hpp"
#endif
class CppInterpreterGenerator: public AbstractInterpreterGenerator {
protected:
address generate_result_handler_for(BasicType type);
address generate_tosca_to_stack_converter(BasicType type);
address generate_stack_to_stack_converter(BasicType type);
address generate_stack_to_native_abi_converter(BasicType type);
void generate_all();
public:
CppInterpreterGenerator(StubQueue* _code);
#ifdef TARGET_ARCH_x86
# include "cppInterpreterGenerator_x86.hpp"
#endif
#ifdef TARGET_ARCH_aarch64
# include "cppInterpreterGenerator_aarch64.hpp"
#endif
#ifdef TARGET_ARCH_sparc
# include "cppInterpreterGenerator_sparc.hpp"
#endif
#ifdef TARGET_ARCH_zero
# include "cppInterpreterGenerator_zero.hpp"
#endif
#ifdef TARGET_ARCH_arm
# include "cppInterpreterGenerator_arm.hpp"
#endif
#ifdef TARGET_ARCH_ppc
# include "cppInterpreterGenerator_ppc.hpp"
#endif
};
#endif // CC_INTERP
#endif // SHARE_VM_INTERPRETER_CPPINTERPRETERGENERATOR_HPP
C:\hotspot-69087d08d473\src\share\vm/interpreter/interpreter.cpp
#include "precompiled.hpp"
#include "asm/macroAssembler.hpp"
#include "asm/macroAssembler.inline.hpp"
#include "compiler/disassembler.hpp"
#include "interpreter/bytecodeHistogram.hpp"
#include "interpreter/bytecodeInterpreter.hpp"
#include "interpreter/interpreter.hpp"
#include "interpreter/interpreterRuntime.hpp"
#include "interpreter/templateTable.hpp"
#include "memory/allocation.inline.hpp"
#include "memory/resourceArea.hpp"
#include "oops/arrayOop.hpp"
#include "oops/methodData.hpp"
#include "oops/method.hpp"
#include "oops/oop.inline.hpp"
#include "prims/forte.hpp"
#include "prims/jvmtiExport.hpp"
#include "prims/methodHandles.hpp"
#include "runtime/handles.inline.hpp"
#include "runtime/sharedRuntime.hpp"
#include "runtime/stubRoutines.hpp"
#include "runtime/timer.hpp"
# define __ _masm->
void InterpreterCodelet::initialize(const char* description, Bytecodes::Code bytecode) {
_description = description;
_bytecode = bytecode;
}
void InterpreterCodelet::verify() {
}
void InterpreterCodelet::print_on(outputStream* st) const {
ttyLocker ttyl;
if (PrintInterpreter) {
st->cr();
st->print_cr("----------------------------------------------------------------------");
}
if (description() != NULL) st->print("%s ", description());
if (bytecode() >= 0 ) st->print("%d %s ", bytecode(), Bytecodes::name(bytecode()));
st->print_cr("[" INTPTR_FORMAT ", " INTPTR_FORMAT "] %d bytes",
p2i(code_begin()), p2i(code_end()), code_size());
if (PrintInterpreter) {
st->cr();
Disassembler::decode(code_begin(), code_end(), st, DEBUG_ONLY(_strings) NOT_DEBUG(CodeStrings()));
}
}
void AbstractInterpreter::initialize() {
if (_code != NULL) return;
if (CountBytecodes || TraceBytecodes || StopInterpreterAt) BytecodeCounter::reset();
if (PrintBytecodeHistogram) BytecodeHistogram::reset();
if (PrintBytecodePairHistogram) BytecodePairHistogram::reset();
InvocationCounter::reinitialize(DelayCompilationDuringStartup);
}
void AbstractInterpreter::print() {
tty->cr();
tty->print_cr("----------------------------------------------------------------------");
tty->print_cr("Interpreter");
tty->cr();
tty->print_cr("code size = %6dK bytes", (int)_code->used_space()/1024);
tty->print_cr("total space = %6dK bytes", (int)_code->total_space()/1024);
tty->print_cr("wasted space = %6dK bytes", (int)_code->available_space()/1024);
tty->cr();
tty->print_cr("# of codelets = %6d" , _code->number_of_stubs());
tty->print_cr("avg codelet size = %6d bytes", _code->used_space() / _code->number_of_stubs());
tty->cr();
_code->print();
tty->print_cr("----------------------------------------------------------------------");
tty->cr();
}
void interpreter_init() {
Interpreter::initialize();
#ifndef PRODUCT
if (TraceBytecodes) BytecodeTracer::set_closure(BytecodeTracer::std_closure());
#endif // PRODUCT
Forte::register_stub(
"Interpreter",
AbstractInterpreter::code()->code_start(),
AbstractInterpreter::code()->code_end()
);
if (JvmtiExport::should_post_dynamic_code_generated()) {
JvmtiExport::post_dynamic_code_generated("Interpreter",
AbstractInterpreter::code()->code_start(),
AbstractInterpreter::code()->code_end());
}
}
StubQueue* AbstractInterpreter::_code = NULL;
bool AbstractInterpreter::_notice_safepoints = false;
address AbstractInterpreter::_rethrow_exception_entry = NULL;
address AbstractInterpreter::_native_entry_begin = NULL;
address AbstractInterpreter::_native_entry_end = NULL;
address AbstractInterpreter::_slow_signature_handler;
address AbstractInterpreter::_entry_table [AbstractInterpreter::number_of_method_entries];
address AbstractInterpreter::_native_abi_to_tosca [AbstractInterpreter::number_of_result_handlers];
AbstractInterpreterGenerator::AbstractInterpreterGenerator(StubQueue* _code) {
_masm = NULL;
}
static const BasicType types[Interpreter::number_of_result_handlers] = {
T_BOOLEAN,
T_CHAR ,
T_BYTE ,
T_SHORT ,
T_INT ,
T_LONG ,
T_VOID ,
T_FLOAT ,
T_DOUBLE ,
T_OBJECT
};
void AbstractInterpreterGenerator::generate_all() {
{ CodeletMark cm(_masm, "slow signature handler");
Interpreter::_slow_signature_handler = generate_slow_signature_handler();
}
}
AbstractInterpreter::MethodKind AbstractInterpreter::method_kind(methodHandle m) {
if (m->is_abstract()) return abstract;
if (m->is_method_handle_intrinsic()) {
vmIntrinsics::ID id = m->intrinsic_id();
assert(MethodHandles::is_signature_polymorphic(id), "must match an intrinsic");
MethodKind kind = (MethodKind)( method_handle_invoke_FIRST +
((int)id - vmIntrinsics::FIRST_MH_SIG_POLY) );
assert(kind <= method_handle_invoke_LAST, "parallel enum ranges");
return kind;
}
#ifndef CC_INTERP
if (UseCRC32Intrinsics && m->is_native()) {
switch (m->intrinsic_id()) {
case vmIntrinsics::_updateCRC32 : return java_util_zip_CRC32_update;
case vmIntrinsics::_updateBytesCRC32 : return java_util_zip_CRC32_updateBytes;
case vmIntrinsics::_updateByteBufferCRC32 : return java_util_zip_CRC32_updateByteBuffer;
}
}
#endif
if (m->is_native()) {
assert(!m->is_method_handle_intrinsic(), "overlapping bits here, watch out");
return m->is_synchronized() ? native_synchronized : native;
}
if (m->is_synchronized()) {
return zerolocals_synchronized;
}
if (RegisterFinalizersAtInit && m->code_size() == 1 &&
m->intrinsic_id() == vmIntrinsics::_Object_init) {
return zerolocals;
}
if (m->is_empty_method()) {
return empty;
}
switch (m->intrinsic_id()) {
case vmIntrinsics::_dsin : return java_lang_math_sin ;
case vmIntrinsics::_dcos : return java_lang_math_cos ;
case vmIntrinsics::_dtan : return java_lang_math_tan ;
case vmIntrinsics::_dabs : return java_lang_math_abs ;
case vmIntrinsics::_dsqrt : return java_lang_math_sqrt ;
case vmIntrinsics::_dlog : return java_lang_math_log ;
case vmIntrinsics::_dlog10: return java_lang_math_log10;
case vmIntrinsics::_dpow : return java_lang_math_pow ;
case vmIntrinsics::_dexp : return java_lang_math_exp ;
case vmIntrinsics::_Reference_get:
return java_lang_ref_reference_get;
}
if (m->is_accessor()) {
assert(m->size_of_parameters() == 1, "fast code for accessors assumes parameter size = 1");
return accessor;
}
return zerolocals;
}
void AbstractInterpreter::set_entry_for_kind(AbstractInterpreter::MethodKind kind, address entry) {
assert(kind >= method_handle_invoke_FIRST &&
kind <= method_handle_invoke_LAST, "late initialization only for MH entry points");
assert(_entry_table[kind] == _entry_table[abstract], "previous value must be AME entry");
_entry_table[kind] = entry;
}
bool AbstractInterpreter::is_not_reached(methodHandle method, int bci) {
Bytecodes::Code code = method()->code_at(bci);
if (!Bytecodes::must_rewrite(code)) {
return false;
}
address ientry = method->interpreter_entry();
if (ientry != entry_for_kind(AbstractInterpreter::zerolocals) &&
ientry != entry_for_kind(AbstractInterpreter::zerolocals_synchronized))
return false; // interpreter does not run this method!
return true;
}
#ifndef PRODUCT
void AbstractInterpreter::print_method_kind(MethodKind kind) {
switch (kind) {
case zerolocals : tty->print("zerolocals" ); break;
case zerolocals_synchronized: tty->print("zerolocals_synchronized"); break;
case native : tty->print("native" ); break;
case native_synchronized : tty->print("native_synchronized" ); break;
case empty : tty->print("empty" ); break;
case accessor : tty->print("accessor" ); break;
case abstract : tty->print("abstract" ); break;
case java_lang_math_sin : tty->print("java_lang_math_sin" ); break;
case java_lang_math_cos : tty->print("java_lang_math_cos" ); break;
case java_lang_math_tan : tty->print("java_lang_math_tan" ); break;
case java_lang_math_abs : tty->print("java_lang_math_abs" ); break;
case java_lang_math_sqrt : tty->print("java_lang_math_sqrt" ); break;
case java_lang_math_log : tty->print("java_lang_math_log" ); break;
case java_lang_math_log10 : tty->print("java_lang_math_log10" ); break;
case java_util_zip_CRC32_update : tty->print("java_util_zip_CRC32_update"); break;
case java_util_zip_CRC32_updateBytes : tty->print("java_util_zip_CRC32_updateBytes"); break;
case java_util_zip_CRC32_updateByteBuffer : tty->print("java_util_zip_CRC32_updateByteBuffer"); break;
default:
if (kind >= method_handle_invoke_FIRST &&
kind <= method_handle_invoke_LAST) {
const char* kind_name = vmIntrinsics::name_at(method_handle_intrinsic(kind));
if (kind_name[0] == '_') kind_name = &kind_name[1]; // '_invokeExact' => 'invokeExact'
tty->print("method_handle_%s", kind_name);
break;
}
ShouldNotReachHere();
break;
}
}
#endif // PRODUCT
address AbstractInterpreter::deopt_continue_after_entry(Method* method, address bcp, int callee_parameters, bool is_top_frame) {
assert(method->contains(bcp), "just checkin'");
Bytecodes::Code code = Bytecodes::java_code_at(method, bcp);
assert(!Interpreter::bytecode_should_reexecute(code), "should not reexecute");
const int bci = method->bci_from(bcp);
const int length = Bytecodes::length_at(method, bcp);
BasicType type = T_ILLEGAL;
switch (code) {
case Bytecodes::_invokevirtual :
case Bytecodes::_invokespecial :
case Bytecodes::_invokestatic :
case Bytecodes::_invokeinterface: {
Thread *thread = Thread::current();
ResourceMark rm(thread);
methodHandle mh(thread, method);
type = Bytecode_invoke(mh, bci).result_type();
if (!is_top_frame) {
int index = Bytes::get_native_u2(bcp+1);
method->constants()->cache()->entry_at(index)->set_parameter_size(callee_parameters);
}
break;
}
case Bytecodes::_invokedynamic: {
Thread *thread = Thread::current();
ResourceMark rm(thread);
methodHandle mh(thread, method);
type = Bytecode_invoke(mh, bci).result_type();
if (!is_top_frame) {
int index = Bytes::get_native_u4(bcp+1);
method->constants()->invokedynamic_cp_cache_entry_at(index)->set_parameter_size(callee_parameters);
}
break;
}
case Bytecodes::_ldc :
case Bytecodes::_ldc_w : // fall through
case Bytecodes::_ldc2_w:
{
Thread *thread = Thread::current();
ResourceMark rm(thread);
methodHandle mh(thread, method);
type = Bytecode_loadconstant(mh, bci).result_type();
break;
}
default:
type = Bytecodes::result_type(code);
break;
}
return
is_top_frame
? Interpreter::deopt_entry (as_TosState(type), length)
: Interpreter::return_entry(as_TosState(type), length, code);
}
address AbstractInterpreter::deopt_reexecute_entry(Method* method, address bcp) {
assert(method->contains(bcp), "just checkin'");
Bytecodes::Code code = Bytecodes::java_code_at(method, bcp);
#ifdef COMPILER1
if(code == Bytecodes::_athrow ) {
return Interpreter::rethrow_exception_entry();
}
#endif /* COMPILER1 */
return Interpreter::deopt_entry(vtos, 0);
}
bool AbstractInterpreter::bytecode_should_reexecute(Bytecodes::Code code) {
switch (code) {
case Bytecodes::_lookupswitch:
case Bytecodes::_tableswitch:
case Bytecodes::_fast_binaryswitch:
case Bytecodes::_fast_linearswitch:
case Bytecodes::_lcmp :
case Bytecodes::_fcmpl :
case Bytecodes::_fcmpg :
case Bytecodes::_dcmpl :
case Bytecodes::_dcmpg :
case Bytecodes::_ifnull :
case Bytecodes::_ifnonnull :
case Bytecodes::_goto :
case Bytecodes::_goto_w :
case Bytecodes::_ifeq :
case Bytecodes::_ifne :
case Bytecodes::_iflt :
case Bytecodes::_ifge :
case Bytecodes::_ifgt :
case Bytecodes::_ifle :
case Bytecodes::_if_icmpeq :
case Bytecodes::_if_icmpne :
case Bytecodes::_if_icmplt :
case Bytecodes::_if_icmpge :
case Bytecodes::_if_icmpgt :
case Bytecodes::_if_icmple :
case Bytecodes::_if_acmpeq :
case Bytecodes::_if_acmpne :
case Bytecodes::_getfield :
case Bytecodes::_putfield :
case Bytecodes::_getstatic :
case Bytecodes::_putstatic :
case Bytecodes::_aastore :
#ifdef COMPILER1
case Bytecodes::_athrow :
#endif
return true;
default:
return false;
}
}
void AbstractInterpreterGenerator::bang_stack_shadow_pages(bool native_call) {
if (UseStackBanging) {
const int start_page = native_call ? StackShadowPages : 1;
const int page_size = os::vm_page_size();
for (int pages = start_page; pages <= StackShadowPages ; pages++) {
__ bang_stack_with_offset(pages*page_size);
}
}
}
void AbstractInterpreterGenerator::initialize_method_handle_entries() {
for (int i = Interpreter::method_handle_invoke_FIRST; i <= Interpreter::method_handle_invoke_LAST; i++) {
Interpreter::MethodKind kind = (Interpreter::MethodKind) i;
Interpreter::_entry_table[kind] = Interpreter::_entry_table[Interpreter::abstract];
}
}
sssssssss34
最新推荐文章于 2024-05-25 12:01:51 发布