1982 // - klass can be fastpath allocated (e.g. does not have finalizer)
1983 // - TLAB accepts the allocation
1984 ConstantPool* constants = istate->method()->constants();
1985 if (UseTLAB && !constants->tag_at(index).is_unresolved_klass()) {
1986 Klass* entry = constants->resolved_klass_at(index);
1987 InstanceKlass* ik = InstanceKlass::cast(entry);
1988 if (ik->is_initialized() && ik->can_be_fastpath_allocated()) {
1989 size_t obj_size = ik->size_helper();
1990 HeapWord* result = THREAD->tlab().allocate(obj_size);
1991 if (result != nullptr) {
1992 // Initialize object field block:
1993 // - if TLAB is pre-zeroed, we can skip this path
1994 // - in debug mode, ThreadLocalAllocBuffer::allocate mangles
1995 // this area, and we still need to initialize it
1996 if (DEBUG_ONLY(true ||) !ZeroTLAB) {
1997 size_t hdr_size = oopDesc::header_size();
1998 Copy::fill_to_words(result + hdr_size, obj_size - hdr_size, 0);
1999 }
2000
2001 // Initialize header, mirrors MemAllocator.
2002 oopDesc::set_mark(result, markWord::prototype());
2003 oopDesc::set_klass_gap(result, 0);
2004 oopDesc::release_set_klass(result, ik);
2005
2006 oop obj = cast_to_oop(result);
2007
2008 // Must prevent reordering of stores for object initialization
2009 // with stores that publish the new object.
2010 OrderAccess::storestore();
2011 SET_STACK_OBJECT(obj, 0);
2012 UPDATE_PC_AND_TOS_AND_CONTINUE(3, 1);
2013 }
2014 }
2015 }
2016 // Slow case allocation
2017 CALL_VM(InterpreterRuntime::_new(THREAD, METHOD->constants(), index),
2018 handle_exception);
2019 // Must prevent reordering of stores for object initialization
2020 // with stores that publish the new object.
2021 OrderAccess::storestore();
2022 SET_STACK_OBJECT(THREAD->vm_result(), 0);
2023 THREAD->set_vm_result(nullptr);
2024 UPDATE_PC_AND_TOS_AND_CONTINUE(3, 1);
2025 }
|
1982 // - klass can be fastpath allocated (e.g. does not have finalizer)
1983 // - TLAB accepts the allocation
1984 ConstantPool* constants = istate->method()->constants();
1985 if (UseTLAB && !constants->tag_at(index).is_unresolved_klass()) {
1986 Klass* entry = constants->resolved_klass_at(index);
1987 InstanceKlass* ik = InstanceKlass::cast(entry);
1988 if (ik->is_initialized() && ik->can_be_fastpath_allocated()) {
1989 size_t obj_size = ik->size_helper();
1990 HeapWord* result = THREAD->tlab().allocate(obj_size);
1991 if (result != nullptr) {
1992 // Initialize object field block:
1993 // - if TLAB is pre-zeroed, we can skip this path
1994 // - in debug mode, ThreadLocalAllocBuffer::allocate mangles
1995 // this area, and we still need to initialize it
1996 if (DEBUG_ONLY(true ||) !ZeroTLAB) {
1997 size_t hdr_size = oopDesc::header_size();
1998 Copy::fill_to_words(result + hdr_size, obj_size - hdr_size, 0);
1999 }
2000
2001 // Initialize header, mirrors MemAllocator.
2002 #ifdef _LP64
2003 oopDesc::release_set_mark(result, ik->prototype_header());
2004 #else
2005 oopDesc::set_mark(result, markWord::prototype());
2006 oopDesc::release_set_klass(result, ik);
2007 #endif
2008 oop obj = cast_to_oop(result);
2009
2010 // Must prevent reordering of stores for object initialization
2011 // with stores that publish the new object.
2012 OrderAccess::storestore();
2013 SET_STACK_OBJECT(obj, 0);
2014 UPDATE_PC_AND_TOS_AND_CONTINUE(3, 1);
2015 }
2016 }
2017 }
2018 // Slow case allocation
2019 CALL_VM(InterpreterRuntime::_new(THREAD, METHOD->constants(), index),
2020 handle_exception);
2021 // Must prevent reordering of stores for object initialization
2022 // with stores that publish the new object.
2023 OrderAccess::storestore();
2024 SET_STACK_OBJECT(THREAD->vm_result(), 0);
2025 THREAD->set_vm_result(nullptr);
2026 UPDATE_PC_AND_TOS_AND_CONTINUE(3, 1);
2027 }
|