140 };
141
142 // What action must be taken by the runtime?
143 // Note: Keep this enum in sync. with Deoptimization::_trap_action_name.
144 enum DeoptAction {
145 Action_none, // just interpret, do not invalidate nmethod
146 Action_maybe_recompile, // recompile the nmethod; need not invalidate
147 Action_reinterpret, // invalidate the nmethod, reset IC, maybe recompile
148 Action_make_not_entrant, // invalidate the nmethod, recompile (probably)
149 Action_make_not_compilable, // invalidate the nmethod and do not compile
150 Action_LIMIT
151 };
152
153 enum {
154 _action_bits = 3,
155 _reason_bits = 5,
156 _debug_id_bits = 23,
157 _action_shift = 0,
158 _reason_shift = _action_shift+_action_bits,
159 _debug_id_shift = _reason_shift+_reason_bits,
160 BC_CASE_LIMIT = PRODUCT_ONLY(1) NOT_PRODUCT(4) // for _deoptimization_hist
161 };
162
163 enum UnpackType {
164 Unpack_deopt = 0, // normal deoptimization, use pc computed in unpack_vframe_on_stack
165 Unpack_exception = 1, // exception is pending
166 Unpack_uncommon_trap = 2, // redo last byte code (C2 only)
167 Unpack_reexecute = 3, // reexecute bytecode (C1 only)
168 Unpack_none = 4, // not deoptimizing the frame, just reallocating/relocking for JVMTI
169 Unpack_LIMIT = 5
170 };
171
172 #if INCLUDE_JVMCI
173 // Can reconstruct virtualized unsafe large accesses to byte arrays.
174 static const int _support_large_access_byte_array_virtualization = 1;
175 #endif
176
177 // Make all nmethods that are marked_for_deoptimization not_entrant and deoptimize any live
178 // activations using those nmethods. Scan of the code cache is done to
179 // find all marked nmethods and they are made not_entrant.
180 static void deoptimize_all_marked();
309 static void unwind_callee_save_values(frame* f, vframeArray* vframe_array);
310
311 //** Performs an uncommon trap for compiled code.
312 // The top most compiler frame is converted into interpreter frames
313 static UnrollBlock* uncommon_trap(JavaThread* current, jint unloaded_class_index, jint exec_mode);
314 // Helper routine that enters the VM and may block
315 static void uncommon_trap_inner(JavaThread* current, jint unloaded_class_index);
316
317 //** Deoptimizes the frame identified by id.
318 // Only called from VMDeoptimizeFrame
319 // @argument thread. Thread where stub_frame resides.
320 // @argument id. id of frame that should be deoptimized.
321 static void deoptimize_frame_internal(JavaThread* thread, intptr_t* id, DeoptReason reason);
322
323 // if thread is not the current thread then execute
324 // VM_DeoptimizeFrame otherwise deoptimize directly.
325 static void deoptimize_frame(JavaThread* thread, intptr_t* id, DeoptReason reason);
326 static void deoptimize_frame(JavaThread* thread, intptr_t* id);
327
328 // Statistics
329 static void gather_statistics(DeoptReason reason, DeoptAction action,
330 Bytecodes::Code bc = Bytecodes::_illegal);
331 static void print_statistics();
332
333 // How much room to adjust the last frame's SP by, to make space for
334 // the callee's interpreter frame (which expects locals to be next to
335 // incoming arguments)
336 static int last_frame_adjust(int callee_parameters, int callee_locals);
337
338 // trap_request codes
339 static DeoptReason trap_request_reason(int trap_request) {
340 if (trap_request < 0)
341 return (DeoptReason)
342 ((~(trap_request) >> _reason_shift) & right_n_bits(_reason_bits));
343 else
344 // standard reason for unloaded CP entry
345 return Reason_unloaded;
346 }
347 static DeoptAction trap_request_action(int trap_request) {
348 if (trap_request < 0)
349 return (DeoptAction)
350 ((~(trap_request) >> _action_shift) & right_n_bits(_action_bits));
351 else
467 int trap_bci,
468 DeoptReason reason,
469 bool update_total_trap_count,
470 #if INCLUDE_JVMCI
471 bool is_osr,
472 #endif
473 Method* compiled_method,
474 //outputs:
475 uint& ret_this_trap_count,
476 bool& ret_maybe_prior_trap,
477 bool& ret_maybe_prior_recompile);
478 // class loading support for uncommon trap
479 static void load_class_by_index(const constantPoolHandle& constant_pool, int index, TRAPS);
480
481 static UnrollBlock* fetch_unroll_info_helper(JavaThread* current, int exec_mode);
482
483 static DeoptAction _unloaded_action; // == Action_reinterpret;
484 static const char* _trap_reason_name[];
485 static const char* _trap_action_name[];
486
487 static juint _deoptimization_hist[Reason_LIMIT][1+Action_LIMIT][BC_CASE_LIMIT];
488 // Note: Histogram array size is 1-2 Kb.
489
490 public:
491 static void update_method_data_from_interpreter(MethodData* trap_mdo, int trap_bci, int reason);
492 };
493
494 #endif // SHARE_RUNTIME_DEOPTIMIZATION_HPP
|
140 };
141
142 // What action must be taken by the runtime?
143 // Note: Keep this enum in sync. with Deoptimization::_trap_action_name.
144 enum DeoptAction {
145 Action_none, // just interpret, do not invalidate nmethod
146 Action_maybe_recompile, // recompile the nmethod; need not invalidate
147 Action_reinterpret, // invalidate the nmethod, reset IC, maybe recompile
148 Action_make_not_entrant, // invalidate the nmethod, recompile (probably)
149 Action_make_not_compilable, // invalidate the nmethod and do not compile
150 Action_LIMIT
151 };
152
153 enum {
154 _action_bits = 3,
155 _reason_bits = 5,
156 _debug_id_bits = 23,
157 _action_shift = 0,
158 _reason_shift = _action_shift+_action_bits,
159 _debug_id_shift = _reason_shift+_reason_bits,
160 BC_CASE_LIMIT = 4 // for _deoptimization_hist
161 };
162
163 enum UnpackType {
164 Unpack_deopt = 0, // normal deoptimization, use pc computed in unpack_vframe_on_stack
165 Unpack_exception = 1, // exception is pending
166 Unpack_uncommon_trap = 2, // redo last byte code (C2 only)
167 Unpack_reexecute = 3, // reexecute bytecode (C1 only)
168 Unpack_none = 4, // not deoptimizing the frame, just reallocating/relocking for JVMTI
169 Unpack_LIMIT = 5
170 };
171
172 #if INCLUDE_JVMCI
173 // Can reconstruct virtualized unsafe large accesses to byte arrays.
174 static const int _support_large_access_byte_array_virtualization = 1;
175 #endif
176
177 // Make all nmethods that are marked_for_deoptimization not_entrant and deoptimize any live
178 // activations using those nmethods. Scan of the code cache is done to
179 // find all marked nmethods and they are made not_entrant.
180 static void deoptimize_all_marked();
309 static void unwind_callee_save_values(frame* f, vframeArray* vframe_array);
310
311 //** Performs an uncommon trap for compiled code.
312 // The top most compiler frame is converted into interpreter frames
313 static UnrollBlock* uncommon_trap(JavaThread* current, jint unloaded_class_index, jint exec_mode);
314 // Helper routine that enters the VM and may block
315 static void uncommon_trap_inner(JavaThread* current, jint unloaded_class_index);
316
317 //** Deoptimizes the frame identified by id.
318 // Only called from VMDeoptimizeFrame
319 // @argument thread. Thread where stub_frame resides.
320 // @argument id. id of frame that should be deoptimized.
321 static void deoptimize_frame_internal(JavaThread* thread, intptr_t* id, DeoptReason reason);
322
323 // if thread is not the current thread then execute
324 // VM_DeoptimizeFrame otherwise deoptimize directly.
325 static void deoptimize_frame(JavaThread* thread, intptr_t* id, DeoptReason reason);
326 static void deoptimize_frame(JavaThread* thread, intptr_t* id);
327
328 // Statistics
329 static void gather_statistics(nmethod* nm, DeoptReason reason, DeoptAction action,
330 Bytecodes::Code bc = Bytecodes::_illegal);
331 static void print_statistics();
332 static void print_statistics_on(outputStream* st);
333
334 static void print_statistics_on(const char* title, int lvl, outputStream* st);
335
336 // How much room to adjust the last frame's SP by, to make space for
337 // the callee's interpreter frame (which expects locals to be next to
338 // incoming arguments)
339 static int last_frame_adjust(int callee_parameters, int callee_locals);
340
341 // trap_request codes
342 static DeoptReason trap_request_reason(int trap_request) {
343 if (trap_request < 0)
344 return (DeoptReason)
345 ((~(trap_request) >> _reason_shift) & right_n_bits(_reason_bits));
346 else
347 // standard reason for unloaded CP entry
348 return Reason_unloaded;
349 }
350 static DeoptAction trap_request_action(int trap_request) {
351 if (trap_request < 0)
352 return (DeoptAction)
353 ((~(trap_request) >> _action_shift) & right_n_bits(_action_bits));
354 else
470 int trap_bci,
471 DeoptReason reason,
472 bool update_total_trap_count,
473 #if INCLUDE_JVMCI
474 bool is_osr,
475 #endif
476 Method* compiled_method,
477 //outputs:
478 uint& ret_this_trap_count,
479 bool& ret_maybe_prior_trap,
480 bool& ret_maybe_prior_recompile);
481 // class loading support for uncommon trap
482 static void load_class_by_index(const constantPoolHandle& constant_pool, int index, TRAPS);
483
484 static UnrollBlock* fetch_unroll_info_helper(JavaThread* current, int exec_mode);
485
486 static DeoptAction _unloaded_action; // == Action_reinterpret;
487 static const char* _trap_reason_name[];
488 static const char* _trap_action_name[];
489
490 static juint _deoptimization_hist[1 + 4 + 5][Reason_LIMIT][1+Action_LIMIT][BC_CASE_LIMIT];
491 // Note: Histogram array size is 1-2 Kb.
492
493 public:
494 static void update_method_data_from_interpreter(MethodData* trap_mdo, int trap_bci, int reason);
495
496 static void init_counters();
497 static void print_counters_on(outputStream* st);
498 };
499
500 #endif // SHARE_RUNTIME_DEOPTIMIZATION_HPP
|