1 /*
  2  * Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "classfile/javaClasses.inline.hpp"
 27 #include "code/codeCache.hpp"
 28 #include "code/debugInfoRec.hpp"
 29 #include "code/nmethod.hpp"
 30 #include "code/pcDesc.hpp"
 31 #include "code/scopeDesc.hpp"
 32 #include "interpreter/interpreter.hpp"
 33 #include "interpreter/oopMapCache.hpp"
 34 #include "oops/instanceKlass.hpp"
 35 #include "oops/oop.inline.hpp"
 36 #include "prims/jvmtiDeferredUpdates.hpp"
 37 #include "runtime/basicLock.hpp"
 38 #include "runtime/continuation.hpp"
 39 #include "runtime/frame.inline.hpp"
 40 #include "runtime/handles.inline.hpp"
 41 #include "runtime/monitorChunk.hpp"
 42 #include "runtime/registerMap.hpp"
 43 #include "runtime/signature.hpp"
 44 #include "runtime/stackValue.hpp"
 45 #include "runtime/stubRoutines.hpp"
 46 #include "runtime/vframeArray.hpp"
 47 #include "runtime/vframe_hp.hpp"
 48 #ifdef COMPILER2
 49 #include "opto/matcher.hpp"
 50 #endif
 51 
 52 
 53 // ------------- compiledVFrame --------------
 54 
 55 StackValueCollection* compiledVFrame::locals() const {
 56   // Natives has no scope
 57   if (scope() == nullptr) return new StackValueCollection(0);
 58   GrowableArray<ScopeValue*>*  scv_list = scope()->locals();
 59   if (scv_list == nullptr) return new StackValueCollection(0);
 60 
 61   // scv_list is the list of ScopeValues describing the JVM stack state.
 62   // There is one scv_list entry for every JVM stack state in use.
 63   int length = scv_list->length();
 64   StackValueCollection* result = new StackValueCollection(length);
 65   for (int i = 0; i < length; i++) {
 66     result->add(create_stack_value(scv_list->at(i)));
 67   }
 68 
 69   // Replace the original values with any stores that have been
 70   // performed through compiledVFrame::update_locals.
 71   if (!register_map()->in_cont()) { // LOOM TODO
 72     GrowableArray<jvmtiDeferredLocalVariableSet*>* list = JvmtiDeferredUpdates::deferred_locals(thread());
 73     if (list != nullptr ) {
 74       // In real life this never happens or is typically a single element search
 75       for (int i = 0; i < list->length(); i++) {
 76         if (list->at(i)->matches(this)) {
 77           list->at(i)->update_locals(result);
 78           break;
 79         }
 80       }
 81     }
 82   }
 83 
 84   return result;
 85 }
 86 
 87 
 88 void compiledVFrame::set_locals(StackValueCollection* values) const {
 89 
 90   fatal("Should use update_local for each local update");
 91 }
 92 
 93 void compiledVFrame::update_local(BasicType type, int index, jvalue value) {
 94   assert(index >= 0 && index < method()->max_locals(), "out of bounds");
 95   update_deferred_value(type, index, value);
 96 }
 97 
 98 void compiledVFrame::update_stack(BasicType type, int index, jvalue value) {
 99   assert(index >= 0 && index < method()->max_stack(), "out of bounds");
100   update_deferred_value(type, index + method()->max_locals(), value);
101 }
102 
103 void compiledVFrame::update_monitor(int index, MonitorInfo* val) {
104   assert(index >= 0, "out of bounds");
105   jvalue value;
106   value.l = cast_from_oop<jobject>(val->owner());
107   update_deferred_value(T_OBJECT, index + method()->max_locals() + method()->max_stack(), value);
108 }
109 
110 void compiledVFrame::update_deferred_value(BasicType type, int index, jvalue value) {
111   assert(fr().is_deoptimized_frame(), "frame must be scheduled for deoptimization");
112   assert(!Continuation::is_frame_in_continuation(thread(), fr()), "No support for deferred values in continuations");
113   GrowableArray<jvmtiDeferredLocalVariableSet*>* deferred = JvmtiDeferredUpdates::deferred_locals(thread());
114   jvmtiDeferredLocalVariableSet* locals = nullptr;
115   if (deferred != nullptr ) {
116     // See if this vframe has already had locals with deferred writes
117     for (int f = 0; f < deferred->length(); f++ ) {
118       if (deferred->at(f)->matches(this)) {
119         locals = deferred->at(f);
120         break;
121       }
122     }
123     // No matching vframe must push a new vframe
124   } else {
125     // No deferred updates pending for this thread.
126     // allocate in C heap
127     JvmtiDeferredUpdates::create_for(thread());
128     deferred = JvmtiDeferredUpdates::deferred_locals(thread());
129   }
130   if (locals == nullptr) {
131     locals = new jvmtiDeferredLocalVariableSet(method(), bci(), fr().id(), vframe_id());
132     deferred->push(locals);
133     assert(locals->id() == fr().id(), "Huh? Must match");
134   }
135   locals->set_value_at(index, type, value);
136 }
137 
138 // After object deoptimization, that is object reallocation and relocking, we
139 // create deferred updates for all objects in scope. No new update will be
140 // created if a deferred update already exists. It is not easy to see how this
141 // is achieved: the deoptimized objects are in the arrays returned by locals(),
142 // expressions(), and monitors(). For each object in these arrays we create a
143 // deferred updated. If an update already exists, then it will override the
144 // corresponding deoptimized object returned in one of the arrays. So the
145 // original update is kept.
146 void compiledVFrame::create_deferred_updates_after_object_deoptimization() {
147   // locals
148   GrowableArray<ScopeValue*>* scopedValues = scope()->locals();
149   StackValueCollection* lcls = locals();
150   if (lcls != nullptr) {
151     for (int i2 = 0; i2 < lcls->size(); i2++) {
152       StackValue* var = lcls->at(i2);
153       if (var->type() == T_OBJECT && scopedValues->at(i2)->is_object()) {
154         jvalue val;
155         val.l = cast_from_oop<jobject>(lcls->at(i2)->get_obj()());
156         update_local(T_OBJECT, i2, val);
157       }
158     }
159   }
160 
161   // expressions
162   GrowableArray<ScopeValue*>* scopeExpressions = scope()->expressions();
163   StackValueCollection* exprs = expressions();
164   if (exprs != nullptr) {
165     for (int i2 = 0; i2 < exprs->size(); i2++) {
166       StackValue* var = exprs->at(i2);
167       if (var->type() == T_OBJECT && scopeExpressions->at(i2)->is_object()) {
168         jvalue val;
169         val.l = cast_from_oop<jobject>(exprs->at(i2)->get_obj()());
170         update_stack(T_OBJECT, i2, val);
171       }
172     }
173   }
174 
175   // monitors
176   GrowableArray<MonitorInfo*>* mtrs = monitors();
177   if (mtrs != nullptr) {
178     for (int i2 = 0; i2 < mtrs->length(); i2++) {
179       if (mtrs->at(i2)->eliminated()) {
180         assert(!mtrs->at(i2)->owner_is_scalar_replaced(),
181                "reallocation failure, should not update");
182         update_monitor(i2, mtrs->at(i2));
183       }
184     }
185   }
186 }
187 
188 StackValueCollection* compiledVFrame::expressions() const {
189   // Natives has no scope
190   if (scope() == nullptr) return new StackValueCollection(0);
191   GrowableArray<ScopeValue*>*  scv_list = scope()->expressions();
192   if (scv_list == nullptr) return new StackValueCollection(0);
193 
194   // scv_list is the list of ScopeValues describing the JVM stack state.
195   // There is one scv_list entry for every JVM stack state in use.
196   int length = scv_list->length();
197   StackValueCollection* result = new StackValueCollection(length);
198   for (int i = 0; i < length; i++) {
199     result->add(create_stack_value(scv_list->at(i)));
200   }
201 
202   if (!register_map()->in_cont()) { // LOOM TODO
203     // Replace the original values with any stores that have been
204     // performed through compiledVFrame::update_stack.
205     GrowableArray<jvmtiDeferredLocalVariableSet*>* list = JvmtiDeferredUpdates::deferred_locals(thread());
206     if (list != nullptr ) {
207       // In real life this never happens or is typically a single element search
208       for (int i = 0; i < list->length(); i++) {
209         if (list->at(i)->matches(this)) {
210           list->at(i)->update_stack(result);
211           break;
212         }
213       }
214     }
215   }
216 
217   return result;
218 }
219 
220 
221 // The implementation of the following two methods was factorized into the
222 // class StackValue because it is also used from within deoptimization.cpp for
223 // rematerialization and relocking of non-escaping objects.
224 
225 StackValue *compiledVFrame::create_stack_value(ScopeValue *sv) const {
226   stackChunkOop c = _reg_map.stack_chunk()();
227   int index = _reg_map.stack_chunk_index();
228   const_cast<RegisterMap*>(&_reg_map)->set_stack_chunk(_chunk());
229 
230   StackValue* res = StackValue::create_stack_value(&_fr, register_map(), sv);
231 
232   const_cast<RegisterMap*>(&_reg_map)->set_stack_chunk(c);
233   const_cast<RegisterMap*>(&_reg_map)->set_stack_chunk_index(index);
234   return res;
235 }
236 
237 BasicLock* compiledVFrame::resolve_monitor_lock(Location location) const {
238   return StackValue::resolve_monitor_lock(&_fr, location);
239 }
240 
241 
242 GrowableArray<MonitorInfo*>* compiledVFrame::monitors() const {
243   // Natives has no scope
244   if (scope() == nullptr) {
245     CompiledMethod* nm = code();
246     Method* method = nm->method();
247     assert(method->is_native(), "Expect a native method");
248     if (!method->is_synchronized()) {
249       return new GrowableArray<MonitorInfo*>(0);
250     }
251     // This monitor is not really needed but return it for now as it might be
252     // useful for stack traces and tools
253     GrowableArray<MonitorInfo*> *monitors = new GrowableArray<MonitorInfo*>(1);
254     // Casting away const
255     frame& fr = (frame&) _fr;
256     MonitorInfo* info = new MonitorInfo(
257         fr.get_native_receiver(), fr.get_native_monitor(), false, false);
258     monitors->push(info);
259     return monitors;
260   }
261   GrowableArray<MonitorValue*>* monitors = scope()->monitors();
262   if (monitors == nullptr) {
263     return new GrowableArray<MonitorInfo*>(0);
264   }
265   GrowableArray<MonitorInfo*>* result = new GrowableArray<MonitorInfo*>(monitors->length());
266   for (int index = 0; index < monitors->length(); index++) {
267     MonitorValue* mv = monitors->at(index);
268     ScopeValue*   ov = mv->owner();
269     StackValue *owner_sv = create_stack_value(ov); // it is an oop
270     if (ov->is_object() && owner_sv->obj_is_scalar_replaced()) { // The owner object was scalar replaced
271       assert(mv->eliminated(), "monitor should be eliminated for scalar replaced object");
272       // Put klass for scalar replaced object.
273       ScopeValue* kv = ((ObjectValue *)ov)->klass();
274       assert(kv->is_constant_oop(), "klass should be oop constant for scalar replaced object");
275       Handle k(Thread::current(), ((ConstantOopReadValue*)kv)->value()());
276       assert(java_lang_Class::is_instance(k()), "must be");
277       result->push(new MonitorInfo(k(), resolve_monitor_lock(mv->basic_lock()),
278                                    mv->eliminated(), true));
279     } else {
280       result->push(new MonitorInfo(owner_sv->get_obj()(), resolve_monitor_lock(mv->basic_lock()),
281                                    mv->eliminated(), false));
282     }
283   }
284 
285   // Replace the original values with any stores that have been
286   // performed through compiledVFrame::update_monitors.
287   GrowableArrayView<jvmtiDeferredLocalVariableSet*>* list = JvmtiDeferredUpdates::deferred_locals(thread());
288   if (list != nullptr ) {
289     // In real life this never happens or is typically a single element search
290     for (int i = 0; i < list->length(); i++) {
291       if (list->at(i)->matches(this)) {
292         list->at(i)->update_monitors(result);
293         break;
294       }
295     }
296   }
297 
298   return result;
299 }
300 
301 
302 compiledVFrame::compiledVFrame(const frame* fr, const RegisterMap* reg_map, JavaThread* thread, CompiledMethod* nm)
303 : javaVFrame(fr, reg_map, thread) {
304   _scope  = nullptr;
305   _vframe_id = 0;
306   // Compiled method (native stub or Java code)
307   // native wrappers have no scope data, it is implied
308   if (!nm->is_compiled() || !nm->as_compiled_method()->is_native_method()) {
309       _scope  = nm->scope_desc_at(_fr.pc());
310   }
311 }
312 
313 compiledVFrame::compiledVFrame(const frame* fr, const RegisterMap* reg_map, JavaThread* thread, ScopeDesc* scope, int vframe_id)
314 : javaVFrame(fr, reg_map, thread) {
315   _scope  = scope;
316   _vframe_id = vframe_id;
317   guarantee(_scope != nullptr, "scope must be present");
318 }
319 
320 compiledVFrame* compiledVFrame::at_scope(int decode_offset, int vframe_id) {
321   if (scope()->decode_offset() != decode_offset) {
322     ScopeDesc* scope = this->scope()->at_offset(decode_offset);
323     return new compiledVFrame(frame_pointer(), register_map(), thread(), scope, vframe_id);
324   }
325   assert(_vframe_id == vframe_id, "wrong frame id");
326   return this;
327 }
328 
329 bool compiledVFrame::is_top() const {
330   // FIX IT: Remove this when new native stubs are in place
331   if (scope() == nullptr) return true;
332   return scope()->is_top();
333 }
334 
335 
336 CompiledMethod* compiledVFrame::code() const {
337   return CodeCache::find_compiled(_fr.pc());
338 }
339 
340 
341 Method* compiledVFrame::method() const {
342   if (scope() == nullptr) {
343     // native nmethods have no scope the method is implied
344     nmethod* nm = code()->as_nmethod();
345     assert(nm->is_native_method(), "must be native");
346     return nm->method();
347   }
348   return scope()->method();
349 }
350 
351 
352 int compiledVFrame::bci() const {
353   int raw = raw_bci();
354   return raw == SynchronizationEntryBCI ? 0 : raw;
355 }
356 
357 
358 int compiledVFrame::raw_bci() const {
359   if (scope() == nullptr) {
360     // native nmethods have no scope the method/bci is implied
361     nmethod* nm = code()->as_nmethod();
362     assert(nm->is_native_method(), "must be native");
363     return 0;
364   }
365   return scope()->bci();
366 }
367 
368 bool compiledVFrame::should_reexecute() const {
369   if (scope() == nullptr) {
370     // native nmethods have no scope the method/bci is implied
371     nmethod* nm = code()->as_nmethod();
372     assert(nm->is_native_method(), "must be native");
373     return false;
374   }
375   return scope()->should_reexecute();
376 }
377 
378 bool compiledVFrame::has_ea_local_in_scope() const {
379   if (scope() == nullptr) {
380     // native nmethod, all objs escape
381     assert(code()->as_nmethod()->is_native_method(), "must be native");
382     return false;
383   }
384   return (scope()->objects() != nullptr) || scope()->has_ea_local_in_scope();
385 }
386 
387 bool compiledVFrame::arg_escape() const {
388   if (scope() == nullptr) {
389     // native nmethod, all objs escape
390     assert(code()->as_nmethod()->is_native_method(), "must be native");
391     return false;
392   }
393   return scope()->arg_escape();
394 }
395 
396 vframe* compiledVFrame::sender() const {
397   const frame f = fr();
398   if (scope() == nullptr) {
399     // native nmethods have no scope the method/bci is implied
400     nmethod* nm = code()->as_nmethod();
401     assert(nm->is_native_method(), "must be native");
402     return vframe::sender();
403   } else {
404     return scope()->is_top()
405       ? vframe::sender()
406       : new compiledVFrame(&f, register_map(), thread(), scope()->sender(), vframe_id() + 1);
407   }
408 }
409 
410 jvmtiDeferredLocalVariableSet::jvmtiDeferredLocalVariableSet(Method* method, int bci, intptr_t* id, int vframe_id) {
411   _method = method;
412   _bci = bci;
413   _id = id;
414   _vframe_id = vframe_id;
415   // Always will need at least one, must be on C heap
416   _locals = new(mtCompiler) GrowableArray<jvmtiDeferredLocalVariable*> (1, mtCompiler);
417   _objects_are_deoptimized = false;
418 }
419 
420 jvmtiDeferredLocalVariableSet::~jvmtiDeferredLocalVariableSet() {
421   for (int i = 0; i < _locals->length(); i++ ) {
422     delete _locals->at(i);
423   }
424   // Free growableArray and c heap for elements
425   delete _locals;
426 }
427 
428 bool jvmtiDeferredLocalVariableSet::matches(const vframe* vf) {
429   if (!vf->is_compiled_frame()) return false;
430   compiledVFrame* cvf = (compiledVFrame*)vf;
431   if (cvf->fr().id() == id() && cvf->vframe_id() == vframe_id()) {
432     assert(cvf->method() == method() && cvf->bci() == bci(), "must agree");
433     return true;
434   }
435   return false;
436 }
437 
438 void jvmtiDeferredLocalVariableSet::set_value_at(int idx, BasicType type, jvalue val) {
439   for (int i = 0; i < _locals->length(); i++) {
440     if (_locals->at(i)->index() == idx) {
441       assert(_locals->at(i)->type() == type, "Wrong type");
442       _locals->at(i)->set_value(val);
443       return;
444     }
445   }
446   _locals->push(new jvmtiDeferredLocalVariable(idx, type, val));
447 }
448 
449 void jvmtiDeferredLocalVariableSet::update_value(StackValueCollection* locals, BasicType type, int index, jvalue value) {
450   switch (type) {
451     case T_BOOLEAN:
452       locals->set_int_at(index, value.z);
453       break;
454     case T_CHAR:
455       locals->set_int_at(index, value.c);
456       break;
457     case T_FLOAT:
458       locals->set_float_at(index, value.f);
459       break;
460     case T_DOUBLE:
461       locals->set_double_at(index, value.d);
462       break;
463     case T_BYTE:
464       locals->set_int_at(index, value.b);
465       break;
466     case T_SHORT:
467       locals->set_int_at(index, value.s);
468       break;
469     case T_INT:
470       locals->set_int_at(index, value.i);
471       break;
472     case T_LONG:
473       locals->set_long_at(index, value.j);
474       break;
475     case T_OBJECT:
476       {
477         Handle obj(Thread::current(), cast_to_oop(value.l));
478         locals->set_obj_at(index, obj);
479       }
480       break;
481     default:
482       ShouldNotReachHere();
483   }
484 }
485 
486 void jvmtiDeferredLocalVariableSet::update_locals(StackValueCollection* locals) {
487   for (int l = 0; l < _locals->length(); l ++) {
488     jvmtiDeferredLocalVariable* val = _locals->at(l);
489     if (val->index() >= 0 && val->index() < method()->max_locals()) {
490       update_value(locals, val->type(), val->index(), val->value());
491     }
492   }
493 }
494 
495 
496 void jvmtiDeferredLocalVariableSet::update_stack(StackValueCollection* expressions) {
497   for (int l = 0; l < _locals->length(); l ++) {
498     jvmtiDeferredLocalVariable* val = _locals->at(l);
499     if (val->index() >= method()->max_locals() && val->index() < method()->max_locals() + method()->max_stack()) {
500       update_value(expressions, val->type(), val->index() - method()->max_locals(), val->value());
501     }
502   }
503 }
504 
505 
506 void jvmtiDeferredLocalVariableSet::update_monitors(GrowableArray<MonitorInfo*>* monitors) {
507   for (int l = 0; l < _locals->length(); l ++) {
508     jvmtiDeferredLocalVariable* val = _locals->at(l);
509     if (val->index() >= method()->max_locals() + method()->max_stack()) {
510       int lock_index = val->index() - (method()->max_locals() + method()->max_stack());
511       MonitorInfo* info = monitors->at(lock_index);
512       // Originally the owner may have been scalar replaced but as an update
513       // exists it must have been deoptimized, i.e. reallocated to the heap, and
514       // now it is considered not to be scalar replaced.
515       MonitorInfo* new_info = new MonitorInfo((oopDesc*)val->value().l, info->lock(),
516                                               info->eliminated(), false);
517       monitors->at_put(lock_index, new_info);
518     }
519   }
520 }
521 
522 
523 void jvmtiDeferredLocalVariableSet::oops_do(OopClosure* f) {
524   // The Method* is on the stack so a live activation keeps it alive
525   // either by mirror in interpreter or code in compiled code.
526   for (int i = 0; i < _locals->length(); i++) {
527     if (_locals->at(i)->type() == T_OBJECT) {
528       f->do_oop(_locals->at(i)->oop_addr());
529     }
530   }
531 }
532 
533 jvmtiDeferredLocalVariable::jvmtiDeferredLocalVariable(int index, BasicType type, jvalue value) {
534   _index = index;
535   _type = type;
536   _value = value;
537 }