141 };
142
143 // What action must be taken by the runtime?
144 // Note: Keep this enum in sync. with Deoptimization::_trap_action_name.
145 enum DeoptAction {
146 Action_none, // just interpret, do not invalidate nmethod
147 Action_maybe_recompile, // recompile the nmethod; need not invalidate
148 Action_reinterpret, // invalidate the nmethod, reset IC, maybe recompile
149 Action_make_not_entrant, // invalidate the nmethod, recompile (probably)
150 Action_make_not_compilable, // invalidate the nmethod and do not compile
151 Action_LIMIT
152 };
153
154 enum {
155 _action_bits = 3,
156 _reason_bits = 5,
157 _debug_id_bits = 23,
158 _action_shift = 0,
159 _reason_shift = _action_shift+_action_bits,
160 _debug_id_shift = _reason_shift+_reason_bits,
161 BC_CASE_LIMIT = PRODUCT_ONLY(1) NOT_PRODUCT(4) // for _deoptimization_hist
162 };
163
164 enum UnpackType {
165 Unpack_deopt = 0, // normal deoptimization, use pc computed in unpack_vframe_on_stack
166 Unpack_exception = 1, // exception is pending
167 Unpack_uncommon_trap = 2, // redo last byte code (C2 only)
168 Unpack_reexecute = 3, // reexecute bytecode (C1 only)
169 Unpack_none = 4, // not deoptimizing the frame, just reallocating/relocking for JVMTI
170 Unpack_LIMIT = 5
171 };
172
173 #if INCLUDE_JVMCI
174 // Can reconstruct virtualized unsafe large accesses to byte arrays.
175 static const int _support_large_access_byte_array_virtualization = 1;
176 #endif
177
178 // Make all nmethods that are marked_for_deoptimization not_entrant and deoptimize any live
179 // activations using those nmethods. Scan of the code cache is done to
180 // find all marked nmethods and they are made not_entrant.
181 static void deoptimize_all_marked();
310 static void unwind_callee_save_values(frame* f, vframeArray* vframe_array);
311
312 //** Performs an uncommon trap for compiled code.
313 // The top most compiler frame is converted into interpreter frames
314 static UnrollBlock* uncommon_trap(JavaThread* current, jint unloaded_class_index, jint exec_mode);
315 // Helper routine that enters the VM and may block
316 static void uncommon_trap_inner(JavaThread* current, jint unloaded_class_index);
317
318 //** Deoptimizes the frame identified by id.
319 // Only called from VMDeoptimizeFrame
320 // @argument thread. Thread where stub_frame resides.
321 // @argument id. id of frame that should be deoptimized.
322 static void deoptimize_frame_internal(JavaThread* thread, intptr_t* id, DeoptReason reason);
323
324 // if thread is not the current thread then execute
325 // VM_DeoptimizeFrame otherwise deoptimize directly.
326 static void deoptimize_frame(JavaThread* thread, intptr_t* id, DeoptReason reason);
327 static void deoptimize_frame(JavaThread* thread, intptr_t* id);
328
329 // Statistics
330 static void gather_statistics(DeoptReason reason, DeoptAction action,
331 Bytecodes::Code bc = Bytecodes::_illegal);
332 static void print_statistics();
333
334 // How much room to adjust the last frame's SP by, to make space for
335 // the callee's interpreter frame (which expects locals to be next to
336 // incoming arguments)
337 static int last_frame_adjust(int callee_parameters, int callee_locals);
338
339 // trap_request codes
340 static DeoptReason trap_request_reason(int trap_request) {
341 if (trap_request < 0)
342 return (DeoptReason)
343 ((~(trap_request) >> _reason_shift) & right_n_bits(_reason_bits));
344 else
345 // standard reason for unloaded CP entry
346 return Reason_unloaded;
347 }
348 static DeoptAction trap_request_action(int trap_request) {
349 if (trap_request < 0)
350 return (DeoptAction)
351 ((~(trap_request) >> _action_shift) & right_n_bits(_action_bits));
352 else
468 int trap_bci,
469 DeoptReason reason,
470 bool update_total_trap_count,
471 #if INCLUDE_JVMCI
472 bool is_osr,
473 #endif
474 Method* compiled_method,
475 //outputs:
476 uint& ret_this_trap_count,
477 bool& ret_maybe_prior_trap,
478 bool& ret_maybe_prior_recompile);
479 // class loading support for uncommon trap
480 static void load_class_by_index(const constantPoolHandle& constant_pool, int index, TRAPS);
481
482 static UnrollBlock* fetch_unroll_info_helper(JavaThread* current, int exec_mode);
483
484 static DeoptAction _unloaded_action; // == Action_reinterpret;
485 static const char* _trap_reason_name[];
486 static const char* _trap_action_name[];
487
488 static juint _deoptimization_hist[Reason_LIMIT][1+Action_LIMIT][BC_CASE_LIMIT];
489 // Note: Histogram array size is 1-2 Kb.
490
491 public:
492 static void update_method_data_from_interpreter(MethodData* trap_mdo, int trap_bci, int reason);
493 };
494
495 #endif // SHARE_RUNTIME_DEOPTIMIZATION_HPP
|
141 };
142
143 // What action must be taken by the runtime?
144 // Note: Keep this enum in sync. with Deoptimization::_trap_action_name.
145 enum DeoptAction {
146 Action_none, // just interpret, do not invalidate nmethod
147 Action_maybe_recompile, // recompile the nmethod; need not invalidate
148 Action_reinterpret, // invalidate the nmethod, reset IC, maybe recompile
149 Action_make_not_entrant, // invalidate the nmethod, recompile (probably)
150 Action_make_not_compilable, // invalidate the nmethod and do not compile
151 Action_LIMIT
152 };
153
154 enum {
155 _action_bits = 3,
156 _reason_bits = 5,
157 _debug_id_bits = 23,
158 _action_shift = 0,
159 _reason_shift = _action_shift+_action_bits,
160 _debug_id_shift = _reason_shift+_reason_bits,
161 BC_CASE_LIMIT = 4 // for _deoptimization_hist
162 };
163
164 enum UnpackType {
165 Unpack_deopt = 0, // normal deoptimization, use pc computed in unpack_vframe_on_stack
166 Unpack_exception = 1, // exception is pending
167 Unpack_uncommon_trap = 2, // redo last byte code (C2 only)
168 Unpack_reexecute = 3, // reexecute bytecode (C1 only)
169 Unpack_none = 4, // not deoptimizing the frame, just reallocating/relocking for JVMTI
170 Unpack_LIMIT = 5
171 };
172
173 #if INCLUDE_JVMCI
174 // Can reconstruct virtualized unsafe large accesses to byte arrays.
175 static const int _support_large_access_byte_array_virtualization = 1;
176 #endif
177
178 // Make all nmethods that are marked_for_deoptimization not_entrant and deoptimize any live
179 // activations using those nmethods. Scan of the code cache is done to
180 // find all marked nmethods and they are made not_entrant.
181 static void deoptimize_all_marked();
310 static void unwind_callee_save_values(frame* f, vframeArray* vframe_array);
311
312 //** Performs an uncommon trap for compiled code.
313 // The top most compiler frame is converted into interpreter frames
314 static UnrollBlock* uncommon_trap(JavaThread* current, jint unloaded_class_index, jint exec_mode);
315 // Helper routine that enters the VM and may block
316 static void uncommon_trap_inner(JavaThread* current, jint unloaded_class_index);
317
318 //** Deoptimizes the frame identified by id.
319 // Only called from VMDeoptimizeFrame
320 // @argument thread. Thread where stub_frame resides.
321 // @argument id. id of frame that should be deoptimized.
322 static void deoptimize_frame_internal(JavaThread* thread, intptr_t* id, DeoptReason reason);
323
324 // if thread is not the current thread then execute
325 // VM_DeoptimizeFrame otherwise deoptimize directly.
326 static void deoptimize_frame(JavaThread* thread, intptr_t* id, DeoptReason reason);
327 static void deoptimize_frame(JavaThread* thread, intptr_t* id);
328
329 // Statistics
330 static void gather_statistics(nmethod* nm, DeoptReason reason, DeoptAction action,
331 Bytecodes::Code bc = Bytecodes::_illegal);
332 static void print_statistics();
333 static void print_statistics_on(outputStream* st);
334
335 static void print_statistics_on(const char* title, int lvl, outputStream* st);
336
337 // How much room to adjust the last frame's SP by, to make space for
338 // the callee's interpreter frame (which expects locals to be next to
339 // incoming arguments)
340 static int last_frame_adjust(int callee_parameters, int callee_locals);
341
342 // trap_request codes
343 static DeoptReason trap_request_reason(int trap_request) {
344 if (trap_request < 0)
345 return (DeoptReason)
346 ((~(trap_request) >> _reason_shift) & right_n_bits(_reason_bits));
347 else
348 // standard reason for unloaded CP entry
349 return Reason_unloaded;
350 }
351 static DeoptAction trap_request_action(int trap_request) {
352 if (trap_request < 0)
353 return (DeoptAction)
354 ((~(trap_request) >> _action_shift) & right_n_bits(_action_bits));
355 else
471 int trap_bci,
472 DeoptReason reason,
473 bool update_total_trap_count,
474 #if INCLUDE_JVMCI
475 bool is_osr,
476 #endif
477 Method* compiled_method,
478 //outputs:
479 uint& ret_this_trap_count,
480 bool& ret_maybe_prior_trap,
481 bool& ret_maybe_prior_recompile);
482 // class loading support for uncommon trap
483 static void load_class_by_index(const constantPoolHandle& constant_pool, int index, TRAPS);
484
485 static UnrollBlock* fetch_unroll_info_helper(JavaThread* current, int exec_mode);
486
487 static DeoptAction _unloaded_action; // == Action_reinterpret;
488 static const char* _trap_reason_name[];
489 static const char* _trap_action_name[];
490
491 static juint _deoptimization_hist[1 + 4 + 5][Reason_LIMIT][1+Action_LIMIT][BC_CASE_LIMIT];
492 // Note: Histogram array size is 1-2 Kb.
493
494 public:
495 static void update_method_data_from_interpreter(MethodData* trap_mdo, int trap_bci, int reason);
496
497 static void init_counters();
498 static void print_counters_on(outputStream* st);
499 };
500
501 #endif // SHARE_RUNTIME_DEOPTIMIZATION_HPP
|