1983 // - klass can be fastpath allocated (e.g. does not have finalizer)
1984 // - TLAB accepts the allocation
1985 ConstantPool* constants = istate->method()->constants();
1986 if (UseTLAB && !constants->tag_at(index).is_unresolved_klass()) {
1987 Klass* entry = constants->resolved_klass_at(index);
1988 InstanceKlass* ik = InstanceKlass::cast(entry);
1989 if (ik->is_initialized() && ik->can_be_fastpath_allocated()) {
1990 size_t obj_size = ik->size_helper();
1991 HeapWord* result = THREAD->tlab().allocate(obj_size);
1992 if (result != nullptr) {
1993 // Initialize object field block:
1994 // - if TLAB is pre-zeroed, we can skip this path
1995 // - in debug mode, ThreadLocalAllocBuffer::allocate mangles
1996 // this area, and we still need to initialize it
1997 if (DEBUG_ONLY(true ||) !ZeroTLAB) {
1998 size_t hdr_size = oopDesc::header_size();
1999 Copy::fill_to_words(result + hdr_size, obj_size - hdr_size, 0);
2000 }
2001
2002 // Initialize header, mirrors MemAllocator.
2003 oopDesc::set_mark(result, markWord::prototype());
2004 oopDesc::set_klass_gap(result, 0);
2005 oopDesc::release_set_klass(result, ik);
2006
2007 oop obj = cast_to_oop(result);
2008
2009 // Must prevent reordering of stores for object initialization
2010 // with stores that publish the new object.
2011 OrderAccess::storestore();
2012 SET_STACK_OBJECT(obj, 0);
2013 UPDATE_PC_AND_TOS_AND_CONTINUE(3, 1);
2014 }
2015 }
2016 }
2017 // Slow case allocation
2018 CALL_VM(InterpreterRuntime::_new(THREAD, METHOD->constants(), index),
2019 handle_exception);
2020 // Must prevent reordering of stores for object initialization
2021 // with stores that publish the new object.
2022 OrderAccess::storestore();
2023 SET_STACK_OBJECT(THREAD->vm_result(), 0);
2024 THREAD->set_vm_result(nullptr);
2025 UPDATE_PC_AND_TOS_AND_CONTINUE(3, 1);
2026 }
|
1983 // - klass can be fastpath allocated (e.g. does not have finalizer)
1984 // - TLAB accepts the allocation
1985 ConstantPool* constants = istate->method()->constants();
1986 if (UseTLAB && !constants->tag_at(index).is_unresolved_klass()) {
1987 Klass* entry = constants->resolved_klass_at(index);
1988 InstanceKlass* ik = InstanceKlass::cast(entry);
1989 if (ik->is_initialized() && ik->can_be_fastpath_allocated()) {
1990 size_t obj_size = ik->size_helper();
1991 HeapWord* result = THREAD->tlab().allocate(obj_size);
1992 if (result != nullptr) {
1993 // Initialize object field block:
1994 // - if TLAB is pre-zeroed, we can skip this path
1995 // - in debug mode, ThreadLocalAllocBuffer::allocate mangles
1996 // this area, and we still need to initialize it
1997 if (DEBUG_ONLY(true ||) !ZeroTLAB) {
1998 size_t hdr_size = oopDesc::header_size();
1999 Copy::fill_to_words(result + hdr_size, obj_size - hdr_size, 0);
2000 }
2001
2002 // Initialize header, mirrors MemAllocator.
2003 #ifdef _LP64
2004 oopDesc::release_set_mark(result, ik->prototype_header());
2005 #else
2006 oopDesc::set_mark(result, markWord::prototype());
2007 oopDesc::release_set_klass(result, ik);
2008 #endif
2009 oop obj = cast_to_oop(result);
2010
2011 // Must prevent reordering of stores for object initialization
2012 // with stores that publish the new object.
2013 OrderAccess::storestore();
2014 SET_STACK_OBJECT(obj, 0);
2015 UPDATE_PC_AND_TOS_AND_CONTINUE(3, 1);
2016 }
2017 }
2018 }
2019 // Slow case allocation
2020 CALL_VM(InterpreterRuntime::_new(THREAD, METHOD->constants(), index),
2021 handle_exception);
2022 // Must prevent reordering of stores for object initialization
2023 // with stores that publish the new object.
2024 OrderAccess::storestore();
2025 SET_STACK_OBJECT(THREAD->vm_result(), 0);
2026 THREAD->set_vm_result(nullptr);
2027 UPDATE_PC_AND_TOS_AND_CONTINUE(3, 1);
2028 }
|