1 /*
  2  * Copyright (c) 1997, 2024, Oracle and/or its affiliates. All rights reserved.
  3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  4  *
  5  * This code is free software; you can redistribute it and/or modify it
  6  * under the terms of the GNU General Public License version 2 only, as
  7  * published by the Free Software Foundation.
  8  *
  9  * This code is distributed in the hope that it will be useful, but WITHOUT
 10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
 12  * version 2 for more details (a copy is included in the LICENSE file that
 13  * accompanied this code).
 14  *
 15  * You should have received a copy of the GNU General Public License version
 16  * 2 along with this work; if not, write to the Free Software Foundation,
 17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
 18  *
 19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
 20  * or visit www.oracle.com if you need additional information or have any
 21  * questions.
 22  *
 23  */
 24 
 25 #include "precompiled.hpp"
 26 #include "classfile/javaClasses.inline.hpp"
 27 #include "code/codeCache.hpp"
 28 #include "code/debugInfoRec.hpp"
 29 #include "code/nmethod.hpp"
 30 #include "code/pcDesc.hpp"
 31 #include "code/scopeDesc.hpp"
 32 #include "interpreter/interpreter.hpp"
 33 #include "interpreter/oopMapCache.hpp"
 34 #include "oops/instanceKlass.hpp"
 35 #include "oops/oop.inline.hpp"
 36 #include "prims/jvmtiDeferredUpdates.hpp"
 37 #include "runtime/basicLock.hpp"
 38 #include "runtime/continuation.hpp"
 39 #include "runtime/frame.inline.hpp"
 40 #include "runtime/handles.inline.hpp"
 41 #include "runtime/monitorChunk.hpp"
 42 #include "runtime/registerMap.hpp"
 43 #include "runtime/signature.hpp"
 44 #include "runtime/stackValue.hpp"
 45 #include "runtime/stubRoutines.hpp"
 46 #include "runtime/vframeArray.hpp"
 47 #include "runtime/vframe_hp.hpp"
 48 #ifdef COMPILER2
 49 #include "opto/matcher.hpp"
 50 #endif
 51 
 52 
 53 // ------------- compiledVFrame --------------
 54 
 55 StackValueCollection* compiledVFrame::locals() const {
 56   // Natives has no scope
 57   if (scope() == nullptr) return new StackValueCollection(0);
 58   GrowableArray<ScopeValue*>*  scv_list = scope()->locals();
 59   if (scv_list == nullptr) return new StackValueCollection(0);
 60 
 61   // scv_list is the list of ScopeValues describing the JVM stack state.
 62   // There is one scv_list entry for every JVM stack state in use.
 63   int length = scv_list->length();
 64   StackValueCollection* result = new StackValueCollection(length);
 65   for (int i = 0; i < length; i++) {
 66     result->add(create_stack_value(scv_list->at(i)));
 67   }
 68 
 69   // Replace the original values with any stores that have been
 70   // performed through compiledVFrame::update_locals.
 71   if (!register_map()->in_cont()) { // LOOM TODO
 72     GrowableArray<jvmtiDeferredLocalVariableSet*>* list = JvmtiDeferredUpdates::deferred_locals(thread());
 73     if (list != nullptr ) {
 74       // In real life this never happens or is typically a single element search
 75       for (int i = 0; i < list->length(); i++) {
 76         if (list->at(i)->matches(this)) {
 77           list->at(i)->update_locals(result);
 78           break;
 79         }
 80       }
 81     }
 82   }
 83 
 84   return result;
 85 }
 86 
 87 
 88 void compiledVFrame::set_locals(StackValueCollection* values) const {
 89 
 90   fatal("Should use update_local for each local update");
 91 }
 92 
 93 void compiledVFrame::update_local(BasicType type, int index, jvalue value) {
 94   assert(index >= 0 && index < method()->max_locals(), "out of bounds");
 95   update_deferred_value(type, index, value);
 96 }
 97 
 98 void compiledVFrame::update_stack(BasicType type, int index, jvalue value) {
 99   assert(index >= 0 && index < method()->max_stack(), "out of bounds");
100   update_deferred_value(type, index + method()->max_locals(), value);
101 }
102 
103 void compiledVFrame::update_monitor(int index, MonitorInfo* val) {
104   assert(index >= 0, "out of bounds");
105   jvalue value;
106   value.l = cast_from_oop<jobject>(val->owner());
107   update_deferred_value(T_OBJECT, index + method()->max_locals() + method()->max_stack(), value);
108 }
109 
110 void compiledVFrame::update_deferred_value(BasicType type, int index, jvalue value) {
111   assert(fr().is_deoptimized_frame(), "frame must be scheduled for deoptimization");
112   assert(!Continuation::is_frame_in_continuation(thread(), fr()), "No support for deferred values in continuations");
113   GrowableArray<jvmtiDeferredLocalVariableSet*>* deferred = JvmtiDeferredUpdates::deferred_locals(thread());
114   jvmtiDeferredLocalVariableSet* locals = nullptr;
115   if (deferred != nullptr ) {
116     // See if this vframe has already had locals with deferred writes
117     for (int f = 0; f < deferred->length(); f++ ) {
118       if (deferred->at(f)->matches(this)) {
119         locals = deferred->at(f);
120         break;
121       }
122     }
123     // No matching vframe must push a new vframe
124   } else {
125     // No deferred updates pending for this thread.
126     // allocate in C heap
127     JvmtiDeferredUpdates::create_for(thread());
128     deferred = JvmtiDeferredUpdates::deferred_locals(thread());
129   }
130   if (locals == nullptr) {
131     locals = new jvmtiDeferredLocalVariableSet(method(), bci(), fr().id(), vframe_id());
132     deferred->push(locals);
133     assert(locals->id() == fr().id(), "Huh? Must match");
134   }
135   locals->set_value_at(index, type, value);
136 }
137 
138 // After object deoptimization, that is object reallocation and relocking, we
139 // create deferred updates for all objects in scope. No new update will be
140 // created if a deferred update already exists. It is not easy to see how this
141 // is achieved: the deoptimized objects are in the arrays returned by locals(),
142 // expressions(), and monitors(). For each object in these arrays we create a
143 // deferred updated. If an update already exists, then it will override the
144 // corresponding deoptimized object returned in one of the arrays. So the
145 // original update is kept.
146 void compiledVFrame::create_deferred_updates_after_object_deoptimization() {
147   // locals
148   GrowableArray<ScopeValue*>* scopedValues = scope()->locals();
149   StackValueCollection* lcls = locals();
150   if (lcls != nullptr) {
151     for (int i2 = 0; i2 < lcls->size(); i2++) {
152       StackValue* var = lcls->at(i2);
153       if (var->type() == T_OBJECT && scopedValues->at(i2)->is_object()) {
154         jvalue val;
155         val.l = cast_from_oop<jobject>(lcls->at(i2)->get_obj()());
156         update_local(T_OBJECT, i2, val);
157       }
158     }
159   }
160 
161   // expressions
162   GrowableArray<ScopeValue*>* scopeExpressions = scope()->expressions();
163   StackValueCollection* exprs = expressions();
164   if (exprs != nullptr) {
165     for (int i2 = 0; i2 < exprs->size(); i2++) {
166       StackValue* var = exprs->at(i2);
167       if (var->type() == T_OBJECT && scopeExpressions->at(i2)->is_object()) {
168         jvalue val;
169         val.l = cast_from_oop<jobject>(exprs->at(i2)->get_obj()());
170         update_stack(T_OBJECT, i2, val);
171       }
172     }
173   }
174 
175   // monitors
176   GrowableArray<MonitorInfo*>* mtrs = monitors();
177   if (mtrs != nullptr) {
178     for (int i2 = 0; i2 < mtrs->length(); i2++) {
179       if (mtrs->at(i2)->eliminated()) {
180         assert(!mtrs->at(i2)->owner_is_scalar_replaced(),
181                "reallocation failure, should not update");
182         update_monitor(i2, mtrs->at(i2));
183       }
184     }
185   }
186 }
187 
188 StackValueCollection* compiledVFrame::expressions() const {
189   // Natives has no scope
190   if (scope() == nullptr) return new StackValueCollection(0);
191   GrowableArray<ScopeValue*>*  scv_list = scope()->expressions();
192   if (scv_list == nullptr) return new StackValueCollection(0);
193 
194   // scv_list is the list of ScopeValues describing the JVM stack state.
195   // There is one scv_list entry for every JVM stack state in use.
196   int length = scv_list->length();
197   StackValueCollection* result = new StackValueCollection(length);
198   for (int i = 0; i < length; i++) {
199     result->add(create_stack_value(scv_list->at(i)));
200   }
201 
202   if (!register_map()->in_cont()) { // LOOM TODO
203     // Replace the original values with any stores that have been
204     // performed through compiledVFrame::update_stack.
205     GrowableArray<jvmtiDeferredLocalVariableSet*>* list = JvmtiDeferredUpdates::deferred_locals(thread());
206     if (list != nullptr ) {
207       // In real life this never happens or is typically a single element search
208       for (int i = 0; i < list->length(); i++) {
209         if (list->at(i)->matches(this)) {
210           list->at(i)->update_stack(result);
211           break;
212         }
213       }
214     }
215   }
216 
217   return result;
218 }
219 
220 
221 // The implementation of the following two methods was factorized into the
222 // class StackValue because it is also used from within deoptimization.cpp for
223 // rematerialization and relocking of non-escaping objects.
224 
225 StackValue *compiledVFrame::create_stack_value(ScopeValue *sv) const {
226   stackChunkOop c = _reg_map.stack_chunk()();
227   int index = _reg_map.stack_chunk_index();
228   const_cast<RegisterMap*>(&_reg_map)->set_stack_chunk(_chunk());
229 
230   StackValue* res = StackValue::create_stack_value(&_fr, register_map(), sv);
231 
232   const_cast<RegisterMap*>(&_reg_map)->set_stack_chunk(c);
233   const_cast<RegisterMap*>(&_reg_map)->set_stack_chunk_index(index);
234   return res;
235 }
236 
237 BasicLock* compiledVFrame::resolve_monitor_lock(Location location) const {
238   return StackValue::resolve_monitor_lock(stack_chunk() == nullptr ? _fr : stack_chunk()->derelativize(_fr), location);
239 }
240 
241 
242 GrowableArray<MonitorInfo*>* compiledVFrame::monitors() const {
243   // Natives has no scope
244   if (scope() == nullptr) {
245     nmethod* nm = code();
246     Method* method = nm->method();
247     assert(method->is_native(), "Expect a native method");
248     if (!method->is_synchronized()) {
249       return new GrowableArray<MonitorInfo*>(0);
250     }
251     // This monitor is not really needed but return it for now as it might be
252     // useful for stack traces and tools
253     GrowableArray<MonitorInfo*> *monitors = new GrowableArray<MonitorInfo*>(1);
254     // Casting away const
255     frame& fr = (frame&) _fr;
256     MonitorInfo* info = new MonitorInfo(
257         fr.get_native_receiver(), fr.get_native_monitor(), false, false);
258     monitors->push(info);
259     return monitors;
260   }
261   GrowableArray<MonitorValue*>* monitors = scope()->monitors();
262   if (monitors == nullptr) {
263     return new GrowableArray<MonitorInfo*>(0);
264   }
265   GrowableArray<MonitorInfo*>* result = new GrowableArray<MonitorInfo*>(monitors->length());
266   for (int index = 0; index < monitors->length(); index++) {
267     MonitorValue* mv = monitors->at(index);
268     ScopeValue*   ov = mv->owner();
269     StackValue *owner_sv = create_stack_value(ov); // it is an oop
270     if (ov->is_object() && owner_sv->obj_is_scalar_replaced()) { // The owner object was scalar replaced
271       assert(mv->eliminated(), "monitor should be eliminated for scalar replaced object");
272       // Put klass for scalar replaced object.
273       ScopeValue* kv = ((ObjectValue *)ov)->klass();
274       assert(kv->is_constant_oop(), "klass should be oop constant for scalar replaced object");
275       Handle k(Thread::current(), ((ConstantOopReadValue*)kv)->value()());
276       assert(java_lang_Class::is_instance(k()), "must be");
277       result->push(new MonitorInfo(k(), resolve_monitor_lock(mv->basic_lock()),
278                                    mv->eliminated(), true));
279     } else {
280       result->push(new MonitorInfo(owner_sv->get_obj()(), resolve_monitor_lock(mv->basic_lock()),
281                                    mv->eliminated(), false));
282     }
283   }
284 
285   // Replace the original values with any stores that have been
286   // performed through compiledVFrame::update_monitors.
287   if (thread() == nullptr) return result; // Unmounted continuations have no thread so nothing to do.
288   GrowableArrayView<jvmtiDeferredLocalVariableSet*>* list = JvmtiDeferredUpdates::deferred_locals(thread());
289   if (list != nullptr ) {
290     // In real life this never happens or is typically a single element search
291     for (int i = 0; i < list->length(); i++) {
292       if (list->at(i)->matches(this)) {
293         list->at(i)->update_monitors(result);
294         break;
295       }
296     }
297   }
298 
299   return result;
300 }
301 
302 
303 compiledVFrame::compiledVFrame(const frame* fr, const RegisterMap* reg_map, JavaThread* thread, nmethod* nm)
304 : javaVFrame(fr, reg_map, thread) {
305   _scope  = nullptr;
306   _vframe_id = 0;
307   // Compiled method (native stub or Java code)
308   // native wrappers have no scope data, it is implied
309   if (!nm->is_native_method()) {
310       _scope  = nm->scope_desc_at(_fr.pc());
311   }
312 }
313 
314 compiledVFrame::compiledVFrame(const frame* fr, const RegisterMap* reg_map, JavaThread* thread, ScopeDesc* scope, int vframe_id)
315 : javaVFrame(fr, reg_map, thread) {
316   _scope  = scope;
317   _vframe_id = vframe_id;
318   guarantee(_scope != nullptr, "scope must be present");
319 }
320 
321 compiledVFrame* compiledVFrame::at_scope(int decode_offset, int vframe_id) {
322   if (scope()->decode_offset() != decode_offset) {
323     ScopeDesc* scope = this->scope()->at_offset(decode_offset);
324     return new compiledVFrame(frame_pointer(), register_map(), thread(), scope, vframe_id);
325   }
326   assert(_vframe_id == vframe_id, "wrong frame id");
327   return this;
328 }
329 
330 bool compiledVFrame::is_top() const {
331   // FIX IT: Remove this when new native stubs are in place
332   if (scope() == nullptr) return true;
333   return scope()->is_top();
334 }
335 
336 
337 nmethod* compiledVFrame::code() const {
338   return CodeCache::find_nmethod(_fr.pc());
339 }
340 
341 
342 Method* compiledVFrame::method() const {
343   if (scope() == nullptr) {
344     // native nmethods have no scope the method is implied
345     nmethod* nm = code();
346     assert(nm->is_native_method(), "must be native");
347     return nm->method();
348   }
349   return scope()->method();
350 }
351 
352 
353 int compiledVFrame::bci() const {
354   int raw = raw_bci();
355   return raw == SynchronizationEntryBCI ? 0 : raw;
356 }
357 
358 
359 int compiledVFrame::raw_bci() const {
360   if (scope() == nullptr) {
361     // native nmethods have no scope the method/bci is implied
362     nmethod* nm = code();
363     assert(nm->is_native_method(), "must be native");
364     return 0;
365   }
366   return scope()->bci();
367 }
368 
369 bool compiledVFrame::should_reexecute() const {
370   if (scope() == nullptr) {
371     // native nmethods have no scope the method/bci is implied
372     nmethod* nm = code();
373     assert(nm->is_native_method(), "must be native");
374     return false;
375   }
376   return scope()->should_reexecute();
377 }
378 
379 bool compiledVFrame::has_ea_local_in_scope() const {
380   if (scope() == nullptr) {
381     // native nmethod, all objs escape
382     assert(code()->is_native_method(), "must be native");
383     return false;
384   }
385   return (scope()->objects() != nullptr) || scope()->has_ea_local_in_scope();
386 }
387 
388 bool compiledVFrame::arg_escape() const {
389   if (scope() == nullptr) {
390     // native nmethod, all objs escape
391     assert(code()->is_native_method(), "must be native");
392     return false;
393   }
394   return scope()->arg_escape();
395 }
396 
397 vframe* compiledVFrame::sender() const {
398   const frame f = fr();
399   if (scope() == nullptr) {
400     // native nmethods have no scope the method/bci is implied
401     nmethod* nm = code();
402     assert(nm->is_native_method(), "must be native");
403     return vframe::sender();
404   } else {
405     return scope()->is_top()
406       ? vframe::sender()
407       : new compiledVFrame(&f, register_map(), thread(), scope()->sender(), vframe_id() + 1);
408   }
409 }
410 
411 jvmtiDeferredLocalVariableSet::jvmtiDeferredLocalVariableSet(Method* method, int bci, intptr_t* id, int vframe_id) {
412   _method = method;
413   _bci = bci;
414   _id = id;
415   _vframe_id = vframe_id;
416   // Always will need at least one, must be on C heap
417   _locals = new(mtCompiler) GrowableArray<jvmtiDeferredLocalVariable*> (1, mtCompiler);
418   _objects_are_deoptimized = false;
419 }
420 
421 jvmtiDeferredLocalVariableSet::~jvmtiDeferredLocalVariableSet() {
422   for (int i = 0; i < _locals->length(); i++ ) {
423     delete _locals->at(i);
424   }
425   // Free growableArray and c heap for elements
426   delete _locals;
427 }
428 
429 bool jvmtiDeferredLocalVariableSet::matches(const vframe* vf) {
430   if (!vf->is_compiled_frame()) return false;
431   compiledVFrame* cvf = (compiledVFrame*)vf;
432   if (cvf->fr().id() == id() && cvf->vframe_id() == vframe_id()) {
433     assert(cvf->method() == method() && cvf->bci() == bci(), "must agree");
434     return true;
435   }
436   return false;
437 }
438 
439 void jvmtiDeferredLocalVariableSet::set_value_at(int idx, BasicType type, jvalue val) {
440   for (int i = 0; i < _locals->length(); i++) {
441     if (_locals->at(i)->index() == idx) {
442       assert(_locals->at(i)->type() == type, "Wrong type");
443       _locals->at(i)->set_value(val);
444       return;
445     }
446   }
447   _locals->push(new jvmtiDeferredLocalVariable(idx, type, val));
448 }
449 
450 void jvmtiDeferredLocalVariableSet::update_value(StackValueCollection* locals, BasicType type, int index, jvalue value) {
451   switch (type) {
452     case T_BOOLEAN:
453       locals->set_int_at(index, value.z);
454       break;
455     case T_CHAR:
456       locals->set_int_at(index, value.c);
457       break;
458     case T_FLOAT:
459       locals->set_float_at(index, value.f);
460       break;
461     case T_DOUBLE:
462       locals->set_double_at(index, value.d);
463       break;
464     case T_BYTE:
465       locals->set_int_at(index, value.b);
466       break;
467     case T_SHORT:
468       locals->set_int_at(index, value.s);
469       break;
470     case T_INT:
471       locals->set_int_at(index, value.i);
472       break;
473     case T_LONG:
474       locals->set_long_at(index, value.j);
475       break;
476     case T_OBJECT:
477       {
478         Handle obj(Thread::current(), cast_to_oop(value.l));
479         locals->set_obj_at(index, obj);
480       }
481       break;
482     default:
483       ShouldNotReachHere();
484   }
485 }
486 
487 void jvmtiDeferredLocalVariableSet::update_locals(StackValueCollection* locals) {
488   for (int l = 0; l < _locals->length(); l ++) {
489     jvmtiDeferredLocalVariable* val = _locals->at(l);
490     if (val->index() >= 0 && val->index() < method()->max_locals()) {
491       update_value(locals, val->type(), val->index(), val->value());
492     }
493   }
494 }
495 
496 
497 void jvmtiDeferredLocalVariableSet::update_stack(StackValueCollection* expressions) {
498   for (int l = 0; l < _locals->length(); l ++) {
499     jvmtiDeferredLocalVariable* val = _locals->at(l);
500     if (val->index() >= method()->max_locals() && val->index() < method()->max_locals() + method()->max_stack()) {
501       update_value(expressions, val->type(), val->index() - method()->max_locals(), val->value());
502     }
503   }
504 }
505 
506 
507 void jvmtiDeferredLocalVariableSet::update_monitors(GrowableArray<MonitorInfo*>* monitors) {
508   for (int l = 0; l < _locals->length(); l ++) {
509     jvmtiDeferredLocalVariable* val = _locals->at(l);
510     if (val->index() >= method()->max_locals() + method()->max_stack()) {
511       int lock_index = val->index() - (method()->max_locals() + method()->max_stack());
512       MonitorInfo* info = monitors->at(lock_index);
513       // Originally the owner may have been scalar replaced but as an update
514       // exists it must have been deoptimized, i.e. reallocated to the heap, and
515       // now it is considered not to be scalar replaced.
516       MonitorInfo* new_info = new MonitorInfo((oopDesc*)val->value().l, info->lock(),
517                                               info->eliminated(), false);
518       monitors->at_put(lock_index, new_info);
519     }
520   }
521 }
522 
523 
524 void jvmtiDeferredLocalVariableSet::oops_do(OopClosure* f) {
525   // The Method* is on the stack so a live activation keeps it alive
526   // either by mirror in interpreter or code in compiled code.
527   for (int i = 0; i < _locals->length(); i++) {
528     if (_locals->at(i)->type() == T_OBJECT) {
529       f->do_oop(_locals->at(i)->oop_addr());
530     }
531   }
532 }
533 
534 jvmtiDeferredLocalVariable::jvmtiDeferredLocalVariable(int index, BasicType type, jvalue value) {
535   _index = index;
536   _type = type;
537   _value = value;
538 }