1984 // - klass can be fastpath allocated (e.g. does not have finalizer)
1985 // - TLAB accepts the allocation
1986 ConstantPool* constants = istate->method()->constants();
1987 if (UseTLAB && !constants->tag_at(index).is_unresolved_klass()) {
1988 Klass* entry = constants->resolved_klass_at(index);
1989 InstanceKlass* ik = InstanceKlass::cast(entry);
1990 if (ik->is_initialized() && ik->can_be_fastpath_allocated()) {
1991 size_t obj_size = ik->size_helper();
1992 HeapWord* result = THREAD->tlab().allocate(obj_size);
1993 if (result != nullptr) {
1994 // Initialize object field block:
1995 // - if TLAB is pre-zeroed, we can skip this path
1996 // - in debug mode, ThreadLocalAllocBuffer::allocate mangles
1997 // this area, and we still need to initialize it
1998 if (DEBUG_ONLY(true ||) !ZeroTLAB) {
1999 size_t hdr_size = oopDesc::header_size();
2000 Copy::fill_to_words(result + hdr_size, obj_size - hdr_size, 0);
2001 }
2002
2003 // Initialize header, mirrors MemAllocator.
2004 oopDesc::set_mark(result, markWord::prototype());
2005 oopDesc::set_klass_gap(result, 0);
2006 oopDesc::release_set_klass(result, ik);
2007
2008 oop obj = cast_to_oop(result);
2009
2010 // Must prevent reordering of stores for object initialization
2011 // with stores that publish the new object.
2012 OrderAccess::storestore();
2013 SET_STACK_OBJECT(obj, 0);
2014 UPDATE_PC_AND_TOS_AND_CONTINUE(3, 1);
2015 }
2016 }
2017 }
2018 // Slow case allocation
2019 CALL_VM(InterpreterRuntime::_new(THREAD, METHOD->constants(), index),
2020 handle_exception);
2021 // Must prevent reordering of stores for object initialization
2022 // with stores that publish the new object.
2023 OrderAccess::storestore();
2024 SET_STACK_OBJECT(THREAD->vm_result(), 0);
2025 THREAD->set_vm_result(nullptr);
2026 UPDATE_PC_AND_TOS_AND_CONTINUE(3, 1);
2027 }
|
1984 // - klass can be fastpath allocated (e.g. does not have finalizer)
1985 // - TLAB accepts the allocation
1986 ConstantPool* constants = istate->method()->constants();
1987 if (UseTLAB && !constants->tag_at(index).is_unresolved_klass()) {
1988 Klass* entry = constants->resolved_klass_at(index);
1989 InstanceKlass* ik = InstanceKlass::cast(entry);
1990 if (ik->is_initialized() && ik->can_be_fastpath_allocated()) {
1991 size_t obj_size = ik->size_helper();
1992 HeapWord* result = THREAD->tlab().allocate(obj_size);
1993 if (result != nullptr) {
1994 // Initialize object field block:
1995 // - if TLAB is pre-zeroed, we can skip this path
1996 // - in debug mode, ThreadLocalAllocBuffer::allocate mangles
1997 // this area, and we still need to initialize it
1998 if (DEBUG_ONLY(true ||) !ZeroTLAB) {
1999 size_t hdr_size = oopDesc::header_size();
2000 Copy::fill_to_words(result + hdr_size, obj_size - hdr_size, 0);
2001 }
2002
2003 // Initialize header, mirrors MemAllocator.
2004 if (UseCompactObjectHeaders) {
2005 oopDesc::release_set_mark(result, ik->prototype_header());
2006 } else {
2007 oopDesc::set_mark(result, markWord::prototype());
2008 oopDesc::set_klass_gap(result, 0);
2009 oopDesc::release_set_klass(result, ik);
2010 }
2011 oop obj = cast_to_oop(result);
2012
2013 // Must prevent reordering of stores for object initialization
2014 // with stores that publish the new object.
2015 OrderAccess::storestore();
2016 SET_STACK_OBJECT(obj, 0);
2017 UPDATE_PC_AND_TOS_AND_CONTINUE(3, 1);
2018 }
2019 }
2020 }
2021 // Slow case allocation
2022 CALL_VM(InterpreterRuntime::_new(THREAD, METHOD->constants(), index),
2023 handle_exception);
2024 // Must prevent reordering of stores for object initialization
2025 // with stores that publish the new object.
2026 OrderAccess::storestore();
2027 SET_STACK_OBJECT(THREAD->vm_result(), 0);
2028 THREAD->set_vm_result(nullptr);
2029 UPDATE_PC_AND_TOS_AND_CONTINUE(3, 1);
2030 }
|