139 };
140
141 // What action must be taken by the runtime?
142 // Note: Keep this enum in sync. with Deoptimization::_trap_action_name.
143 enum DeoptAction {
144 Action_none, // just interpret, do not invalidate nmethod
145 Action_maybe_recompile, // recompile the nmethod; need not invalidate
146 Action_reinterpret, // invalidate the nmethod, reset IC, maybe recompile
147 Action_make_not_entrant, // invalidate the nmethod, recompile (probably)
148 Action_make_not_compilable, // invalidate the nmethod and do not compile
149 Action_LIMIT
150 };
151
152 enum {
153 _action_bits = 3,
154 _reason_bits = 5,
155 _debug_id_bits = 23,
156 _action_shift = 0,
157 _reason_shift = _action_shift+_action_bits,
158 _debug_id_shift = _reason_shift+_reason_bits,
159 BC_CASE_LIMIT = PRODUCT_ONLY(1) NOT_PRODUCT(4) // for _deoptimization_hist
160 };
161
162 enum UnpackType {
163 Unpack_deopt = 0, // normal deoptimization, use pc computed in unpack_vframe_on_stack
164 Unpack_exception = 1, // exception is pending
165 Unpack_uncommon_trap = 2, // redo last byte code (C2 only)
166 Unpack_reexecute = 3, // reexecute bytecode (C1 only)
167 Unpack_none = 4, // not deoptimizing the frame, just reallocating/relocking for JVMTI
168 Unpack_LIMIT = 5
169 };
170
171 #if INCLUDE_JVMCI
172 // Can reconstruct virtualized unsafe large accesses to byte arrays.
173 static const int _support_large_access_byte_array_virtualization = 1;
174 #endif
175
176 // Make all nmethods that are marked_for_deoptimization not_entrant and deoptimize any live
177 // activations using those nmethods. Scan of the code cache is done to
178 // find all marked nmethods and they are made not_entrant.
179 static void deoptimize_all_marked();
308 static void unwind_callee_save_values(frame* f, vframeArray* vframe_array);
309
310 //** Performs an uncommon trap for compiled code.
311 // The top most compiler frame is converted into interpreter frames
312 static UnrollBlock* uncommon_trap(JavaThread* current, jint unloaded_class_index, jint exec_mode);
313 // Helper routine that enters the VM and may block
314 static void uncommon_trap_inner(JavaThread* current, jint unloaded_class_index);
315
316 //** Deoptimizes the frame identified by id.
317 // Only called from VMDeoptimizeFrame
318 // @argument thread. Thread where stub_frame resides.
319 // @argument id. id of frame that should be deoptimized.
320 static void deoptimize_frame_internal(JavaThread* thread, intptr_t* id, DeoptReason reason);
321
322 // if thread is not the current thread then execute
323 // VM_DeoptimizeFrame otherwise deoptimize directly.
324 static void deoptimize_frame(JavaThread* thread, intptr_t* id, DeoptReason reason);
325 static void deoptimize_frame(JavaThread* thread, intptr_t* id);
326
327 // Statistics
328 static void gather_statistics(DeoptReason reason, DeoptAction action,
329 Bytecodes::Code bc = Bytecodes::_illegal);
330 static void print_statistics();
331
332 // How much room to adjust the last frame's SP by, to make space for
333 // the callee's interpreter frame (which expects locals to be next to
334 // incoming arguments)
335 static int last_frame_adjust(int callee_parameters, int callee_locals);
336
337 // trap_request codes
338 static DeoptReason trap_request_reason(int trap_request) {
339 if (trap_request < 0)
340 return (DeoptReason)
341 ((~(trap_request) >> _reason_shift) & right_n_bits(_reason_bits));
342 else
343 // standard reason for unloaded CP entry
344 return Reason_unloaded;
345 }
346 static DeoptAction trap_request_action(int trap_request) {
347 if (trap_request < 0)
348 return (DeoptAction)
349 ((~(trap_request) >> _action_shift) & right_n_bits(_action_bits));
350 else
466 int trap_bci,
467 DeoptReason reason,
468 bool update_total_trap_count,
469 #if INCLUDE_JVMCI
470 bool is_osr,
471 #endif
472 Method* compiled_method,
473 //outputs:
474 uint& ret_this_trap_count,
475 bool& ret_maybe_prior_trap,
476 bool& ret_maybe_prior_recompile);
477 // class loading support for uncommon trap
478 static void load_class_by_index(const constantPoolHandle& constant_pool, int index, TRAPS);
479
480 static UnrollBlock* fetch_unroll_info_helper(JavaThread* current, int exec_mode);
481
482 static DeoptAction _unloaded_action; // == Action_reinterpret;
483 static const char* _trap_reason_name[];
484 static const char* _trap_action_name[];
485
486 static juint _deoptimization_hist[Reason_LIMIT][1+Action_LIMIT][BC_CASE_LIMIT];
487 // Note: Histogram array size is 1-2 Kb.
488
489 public:
490 static void update_method_data_from_interpreter(MethodData* trap_mdo, int trap_bci, int reason);
491 };
492
493 #endif // SHARE_RUNTIME_DEOPTIMIZATION_HPP
|
139 };
140
141 // What action must be taken by the runtime?
142 // Note: Keep this enum in sync. with Deoptimization::_trap_action_name.
143 enum DeoptAction {
144 Action_none, // just interpret, do not invalidate nmethod
145 Action_maybe_recompile, // recompile the nmethod; need not invalidate
146 Action_reinterpret, // invalidate the nmethod, reset IC, maybe recompile
147 Action_make_not_entrant, // invalidate the nmethod, recompile (probably)
148 Action_make_not_compilable, // invalidate the nmethod and do not compile
149 Action_LIMIT
150 };
151
152 enum {
153 _action_bits = 3,
154 _reason_bits = 5,
155 _debug_id_bits = 23,
156 _action_shift = 0,
157 _reason_shift = _action_shift+_action_bits,
158 _debug_id_shift = _reason_shift+_reason_bits,
159 BC_CASE_LIMIT = 4 // for _deoptimization_hist
160 };
161
162 enum UnpackType {
163 Unpack_deopt = 0, // normal deoptimization, use pc computed in unpack_vframe_on_stack
164 Unpack_exception = 1, // exception is pending
165 Unpack_uncommon_trap = 2, // redo last byte code (C2 only)
166 Unpack_reexecute = 3, // reexecute bytecode (C1 only)
167 Unpack_none = 4, // not deoptimizing the frame, just reallocating/relocking for JVMTI
168 Unpack_LIMIT = 5
169 };
170
171 #if INCLUDE_JVMCI
172 // Can reconstruct virtualized unsafe large accesses to byte arrays.
173 static const int _support_large_access_byte_array_virtualization = 1;
174 #endif
175
176 // Make all nmethods that are marked_for_deoptimization not_entrant and deoptimize any live
177 // activations using those nmethods. Scan of the code cache is done to
178 // find all marked nmethods and they are made not_entrant.
179 static void deoptimize_all_marked();
308 static void unwind_callee_save_values(frame* f, vframeArray* vframe_array);
309
310 //** Performs an uncommon trap for compiled code.
311 // The top most compiler frame is converted into interpreter frames
312 static UnrollBlock* uncommon_trap(JavaThread* current, jint unloaded_class_index, jint exec_mode);
313 // Helper routine that enters the VM and may block
314 static void uncommon_trap_inner(JavaThread* current, jint unloaded_class_index);
315
316 //** Deoptimizes the frame identified by id.
317 // Only called from VMDeoptimizeFrame
318 // @argument thread. Thread where stub_frame resides.
319 // @argument id. id of frame that should be deoptimized.
320 static void deoptimize_frame_internal(JavaThread* thread, intptr_t* id, DeoptReason reason);
321
322 // if thread is not the current thread then execute
323 // VM_DeoptimizeFrame otherwise deoptimize directly.
324 static void deoptimize_frame(JavaThread* thread, intptr_t* id, DeoptReason reason);
325 static void deoptimize_frame(JavaThread* thread, intptr_t* id);
326
327 // Statistics
328 static void gather_statistics(nmethod* nm, DeoptReason reason, DeoptAction action,
329 Bytecodes::Code bc = Bytecodes::_illegal);
330 static void print_statistics();
331 static void print_statistics_on(outputStream* st);
332
333 static void print_statistics_on(const char* title, int lvl, outputStream* st);
334
335 // How much room to adjust the last frame's SP by, to make space for
336 // the callee's interpreter frame (which expects locals to be next to
337 // incoming arguments)
338 static int last_frame_adjust(int callee_parameters, int callee_locals);
339
340 // trap_request codes
341 static DeoptReason trap_request_reason(int trap_request) {
342 if (trap_request < 0)
343 return (DeoptReason)
344 ((~(trap_request) >> _reason_shift) & right_n_bits(_reason_bits));
345 else
346 // standard reason for unloaded CP entry
347 return Reason_unloaded;
348 }
349 static DeoptAction trap_request_action(int trap_request) {
350 if (trap_request < 0)
351 return (DeoptAction)
352 ((~(trap_request) >> _action_shift) & right_n_bits(_action_bits));
353 else
469 int trap_bci,
470 DeoptReason reason,
471 bool update_total_trap_count,
472 #if INCLUDE_JVMCI
473 bool is_osr,
474 #endif
475 Method* compiled_method,
476 //outputs:
477 uint& ret_this_trap_count,
478 bool& ret_maybe_prior_trap,
479 bool& ret_maybe_prior_recompile);
480 // class loading support for uncommon trap
481 static void load_class_by_index(const constantPoolHandle& constant_pool, int index, TRAPS);
482
483 static UnrollBlock* fetch_unroll_info_helper(JavaThread* current, int exec_mode);
484
485 static DeoptAction _unloaded_action; // == Action_reinterpret;
486 static const char* _trap_reason_name[];
487 static const char* _trap_action_name[];
488
489 static juint _deoptimization_hist[1 + 4 + 5][Reason_LIMIT][1+Action_LIMIT][BC_CASE_LIMIT];
490 // Note: Histogram array size is 1-2 Kb.
491
492 public:
493 static void update_method_data_from_interpreter(MethodData* trap_mdo, int trap_bci, int reason);
494
495 static void init_counters();
496 static void print_counters_on(outputStream* st);
497 };
498
499 #endif // SHARE_RUNTIME_DEOPTIMIZATION_HPP
|