diff --git a/src/hotspot/share/ci/ciField.cpp b/src/hotspot/share/ci/ciField.cpp index b25bbc67a26..3a2bd2f4e05 100644 --- a/src/hotspot/share/ci/ciField.cpp +++ b/src/hotspot/share/ci/ciField.cpp @@ -106,6 +106,7 @@ ciField::ciField(ciInstanceKlass* klass, int index, Bytecodes::Code bc) : _name = (ciSymbol*)ciEnv::current(THREAD)->get_symbol(name); _is_null_free = false; + _null_marker_offset = -1; // Get the field's declared holder. // @@ -221,7 +222,7 @@ ciField::ciField(fieldDescriptor *fd) : // Special copy constructor used to flatten inline type fields by // copying the fields of the inline type to a new holder klass. ciField::ciField(ciField* field, ciInstanceKlass* holder, int offset, bool is_final) { - assert(field->holder()->is_inlinetype(), "should only be used for inline type field flattening"); + assert(field->holder()->is_inlinetype() || field->holder()->is_abstract(), "should only be used for inline type field flattening"); // Set the is_final flag jint final = is_final ? JVM_ACC_FINAL : ~JVM_ACC_FINAL; AccessFlags flags(field->flags().as_int() & final); @@ -240,6 +241,7 @@ ciField::ciField(ciField* field, ciInstanceKlass* holder, int offset, bool is_fi assert(!field->is_flat(), "field must not be flat"); _is_flat = false; _is_null_free = field->_is_null_free; + _null_marker_offset = field->_null_marker_offset; _original_holder = (field->_original_holder != nullptr) ? field->_original_holder : field->_holder; } @@ -287,11 +289,17 @@ void ciField::initialize_from(fieldDescriptor* fd) { // Get the flags, offset, and canonical holder of the field. _flags = ciFlags(fd->access_flags(), fd->field_flags().is_stable(), fd->field_status().is_initialized_final_update()); _offset = fd->offset(); - Klass* field_holder = fd->field_holder(); + InstanceKlass* field_holder = fd->field_holder(); assert(field_holder != nullptr, "null field_holder"); _holder = CURRENT_ENV->get_instance_klass(field_holder); _is_flat = fd->is_flat(); _is_null_free = fd->is_null_free_inline_type(); + if (fd->has_null_marker()) { + InlineLayoutInfo* li = field_holder->inline_layout_info_adr(fd->index()); + _null_marker_offset = li->null_marker_offset(); + } else { + _null_marker_offset = -1; + } _original_holder = nullptr; // Check to see if the field is constant. @@ -492,6 +500,7 @@ void ciField::print() { } tty->print(" is_flat=%s", bool_to_str(_is_flat)); tty->print(" is_null_free=%s", bool_to_str(_is_null_free)); + tty->print(" null_marker_offset=%s", bool_to_str(_null_marker_offset)); tty->print(">"); } diff --git a/src/hotspot/share/ci/ciField.hpp b/src/hotspot/share/ci/ciField.hpp index 7928e03ca55..718f859187a 100644 --- a/src/hotspot/share/ci/ciField.hpp +++ b/src/hotspot/share/ci/ciField.hpp @@ -52,6 +52,7 @@ class ciField : public ArenaObj { bool _is_constant; bool _is_flat; bool _is_null_free; + int _null_marker_offset; ciMethod* _known_to_link_with_put; ciInstanceKlass* _known_to_link_with_get; ciConstant _constant_value; @@ -175,6 +176,7 @@ class ciField : public ArenaObj { bool is_transient () const { return flags().is_transient(); } bool is_flat () const { return _is_flat; } bool is_null_free () const { return _is_null_free; } + int null_marker_offset () const { return _null_marker_offset; } // The field is modified outside of instance initializer methods // (or class/initializer methods if the field is static). diff --git a/src/hotspot/share/ci/ciInstanceKlass.cpp b/src/hotspot/share/ci/ciInstanceKlass.cpp index 3190a9487d7..16d1f8ca96f 100644 --- a/src/hotspot/share/ci/ciInstanceKlass.cpp +++ b/src/hotspot/share/ci/ciInstanceKlass.cpp @@ -536,7 +536,8 @@ GrowableArray* ciInstanceKlass::compute_nonstatic_fields_impl(Growable } // allocate the array: - if (flen == 0) { + // TODO why do we need this? + if (flen == 0 && !is_inlinetype()) { return nullptr; // return nothing if none are locally declared } if (super_fields != nullptr) { diff --git a/src/hotspot/share/classfile/classFileParser.hpp b/src/hotspot/share/classfile/classFileParser.hpp index fc64ea39e6b..9c09e21977b 100644 --- a/src/hotspot/share/classfile/classFileParser.hpp +++ b/src/hotspot/share/classfile/classFileParser.hpp @@ -212,7 +212,6 @@ class ClassFileParser { bool _has_contended_fields; bool _has_inline_type_fields; - bool _has_null_marker_offsets; bool _is_naturally_atomic; bool _must_be_atomic; bool _is_implicitly_constructible; diff --git a/src/hotspot/share/code/nmethod.cpp b/src/hotspot/share/code/nmethod.cpp index be3f952fd21..0383f20eea2 100644 --- a/src/hotspot/share/code/nmethod.cpp +++ b/src/hotspot/share/code/nmethod.cpp @@ -3812,6 +3812,12 @@ void nmethod::print_nmethod_labels(outputStream* stream, address block_begin, bo } if (!did_name) stream->print("%s", type2name(t)); + if ((*sig)._offset == -1) { + // TODO it's also -1 if it's not from a flat field + //stream->print(" IS INIT"); + } else if ((*sig)._sort_offset != (*sig)._offset) { + stream->print(" NULL MARKER"); + } } if (at_old_sp) { stream->print(" (%s of caller)", spname); diff --git a/src/hotspot/share/oops/fieldStreams.hpp b/src/hotspot/share/oops/fieldStreams.hpp index 4ca87802f3b..f240757a1a4 100644 --- a/src/hotspot/share/oops/fieldStreams.hpp +++ b/src/hotspot/share/oops/fieldStreams.hpp @@ -297,6 +297,10 @@ class HierarchicalFieldStream : public StackObj { bool is_flat() const { return _current_stream.is_flat(); } + + bool is_null_free_inline_type() { + return _current_stream.is_null_free_inline_type(); + } }; #endif // SHARE_OOPS_FIELDSTREAMS_HPP diff --git a/src/hotspot/share/oops/inlineKlass.cpp b/src/hotspot/share/oops/inlineKlass.cpp index dd27cc3c924..011203d77c2 100644 --- a/src/hotspot/share/oops/inlineKlass.cpp +++ b/src/hotspot/share/oops/inlineKlass.cpp @@ -294,28 +294,49 @@ Klass* InlineKlass::value_array_klass_or_null() { // // Value classes could also have fields in abstract super value classes. // Use a HierarchicalFieldStream to get them as well. -int InlineKlass::collect_fields(GrowableArray* sig, int base_off) { +int InlineKlass::collect_fields(GrowableArray* sig, int base_off, int null_marker_offset) { int count = 0; SigEntry::add_entry(sig, T_METADATA, name(), base_off); + int max_offset = 0; for (HierarchicalFieldStream fs(this); !fs.done(); fs.next()) { if (fs.access_flags().is_static()) continue; int offset = base_off + fs.offset() - (base_off > 0 ? first_field_offset() : 0); // TODO 8284443 Use different heuristic to decide what should be scalarized in the calling convention if (fs.is_flat()) { // Resolve klass of flat field and recursively collect fields + int null_marker_offset = -1; + if (!fs.is_null_free_inline_type()) { + // TODO can we use null_marker_offset() instead? + InlineLayoutInfo* li = inline_layout_info_adr(fs.index()); + null_marker_offset = li->null_marker_offset(); + } Klass* vk = get_inline_type_field_klass(fs.index()); - count += InlineKlass::cast(vk)->collect_fields(sig, offset); + count += InlineKlass::cast(vk)->collect_fields(sig, offset, null_marker_offset); } else { BasicType bt = Signature::basic_type(fs.signature()); SigEntry::add_entry(sig, bt, fs.signature(), offset); count += type2size[bt]; } + max_offset = MAX2(max_offset, offset); } int offset = base_off + size_helper()*HeapWordSize - (base_off > 0 ? first_field_offset() : 0); + // Null markers are no real fields, add them manually at the end (C2 relies on this) of the flat fields + if (null_marker_offset != -1) { + tty->print_cr("MARKER %d %d %d", null_marker_offset, offset, max_offset + 1); + SigEntry::add_entry(sig, T_BOOLEAN, name(), null_marker_offset, max_offset + 1); + count++; + } SigEntry::add_entry(sig, T_VOID, name(), offset); if (base_off == 0) { sig->sort(SigEntry::compare); } + /* + tty->print_cr("##\n"); + print(); + for (int i = 0; i < sig->length(); ++i) { + tty->print_cr("%s %d %d", type2name(sig->at(i)._bt), sig->at(i)._offset, sig->at(i)._sort_offset); + } + */ assert(sig->at(0)._bt == T_METADATA && sig->at(sig->length()-1)._bt == T_VOID, "broken structure"); return count; } diff --git a/src/hotspot/share/oops/inlineKlass.hpp b/src/hotspot/share/oops/inlineKlass.hpp index 82d675bb6a6..615e60cf683 100644 --- a/src/hotspot/share/oops/inlineKlass.hpp +++ b/src/hotspot/share/oops/inlineKlass.hpp @@ -178,7 +178,7 @@ class InlineKlass: public InstanceKlass { virtual void metaspace_pointers_do(MetaspaceClosure* it); private: - int collect_fields(GrowableArray* sig, int base_off = 0); + int collect_fields(GrowableArray* sig, int base_off = 0, int null_marker_offset = -1); void cleanup_blobs(); diff --git a/src/hotspot/share/oops/methodData.cpp b/src/hotspot/share/oops/methodData.cpp index 18895fbdcbb..8932f461e66 100644 --- a/src/hotspot/share/oops/methodData.cpp +++ b/src/hotspot/share/oops/methodData.cpp @@ -524,7 +524,7 @@ void BranchData::post_initialize(BytecodeStream* stream, MethodData* mdo) { void BranchData::print_data_on(outputStream* st, const char* extra) const { print_shared(st, "BranchData", extra); if (data()->flags()) { - tty->cr(); + st->cr(); tab(st); } st->print_cr("taken(%u) displacement(%d)", diff --git a/src/hotspot/share/opto/callnode.cpp b/src/hotspot/share/opto/callnode.cpp index 3a63e002bec..9bdca7cb7c4 100644 --- a/src/hotspot/share/opto/callnode.cpp +++ b/src/hotspot/share/opto/callnode.cpp @@ -524,8 +524,13 @@ void JVMState::format(PhaseRegAlloc *regalloc, const Node *n, outputStream* st) fld_node = mcall->in(first_ind+j); if (iklass != nullptr) { st->print(", ["); - cifield = iklass->nonstatic_field_at(j); - cifield->print_name_on(st); + if (j < (uint)iklass->nof_nonstatic_fields()) { + cifield = iklass->nonstatic_field_at(j); + cifield->print_name_on(st); + } else { + // Must be a null marker + st->print("null marker"); + } format_helper(regalloc, st, fld_node, ":", j, &scobjs); } else { format_helper(regalloc, st, fld_node, ", [", j, &scobjs); diff --git a/src/hotspot/share/opto/inlinetypenode.cpp b/src/hotspot/share/opto/inlinetypenode.cpp index 85bc2e99a89..b1602ffd047 100644 --- a/src/hotspot/share/opto/inlinetypenode.cpp +++ b/src/hotspot/share/opto/inlinetypenode.cpp @@ -257,6 +257,37 @@ bool InlineTypeNode::field_is_null_free(uint index) const { return field->is_null_free(); } +int InlineTypeNode::field_null_marker_offset(uint index) const { + assert(index < field_count(), "index out of bounds"); + ciField* field = inline_klass()->declared_nonstatic_field_at(index); + assert(field->is_flat(), "must be an inline type"); + return field->null_marker_offset(); +} + + +// TODO implement with a worklist +static uint helper(InlineTypeNode* vt, Unique_Node_List& worklist, Node_List& null_markers, SafePointNode* sfpt) { + uint cnt = 0; + for (uint i = 0; i < vt->field_count(); ++i) { + Node* value = vt->field_value(i); + if (vt->field_is_flat(i)) { + cnt += helper(value->as_InlineType(), worklist, null_markers, sfpt); + if (!vt->field_is_null_free(i)) { + null_markers.push(value->as_InlineType()->get_is_init()); + } + } else { + if (value->is_InlineType()) { + // Add inline type field to the worklist to process later + worklist.push(value); + } + sfpt->add_req(value); + cnt++; + } + } + return cnt; +} + + void InlineTypeNode::make_scalar_in_safepoint(PhaseIterGVN* igvn, Unique_Node_List& worklist, SafePointNode* sfpt) { // We should not scalarize larvals in debug info of their constructor calls because their fields could still be // updated. If we scalarize and update the fields in the constructor, the updates won't be visible in the caller after @@ -271,17 +302,12 @@ void InlineTypeNode::make_scalar_in_safepoint(PhaseIterGVN* igvn, Unique_Node_Li } ciInlineKlass* vk = inline_klass(); - uint nfields = vk->nof_nonstatic_fields(); + vk->nof_nonstatic_fields(); JVMState* jvms = sfpt->jvms(); // Replace safepoint edge by SafePointScalarObjectNode and add field values assert(jvms != nullptr, "missing JVMS"); uint first_ind = (sfpt->req() - jvms->scloff()); - SafePointScalarObjectNode* sobj = new SafePointScalarObjectNode(type()->isa_instptr(), - nullptr, - first_ind, - sfpt->jvms()->depth(), - nfields); - sobj->init_req(0, igvn->C->root()); + // Nullable inline types have an IsInit field that needs // to be checked before using the field values. const TypeInt* tinit = igvn->type(get_is_init())->isa_int(); @@ -292,16 +318,21 @@ void InlineTypeNode::make_scalar_in_safepoint(PhaseIterGVN* igvn, Unique_Node_Li } // Iterate over the inline type fields in order of increasing // offset and add the field values to the safepoint. - for (uint j = 0; j < nfields; ++j) { - int offset = vk->nonstatic_field_at(j)->offset_in_bytes(); - Node* value = field_value_by_offset(offset, true /* include flat inline type fields */); - if (value->is_InlineType()) { - // Add inline type field to the worklist to process later - worklist.push(value); - } - sfpt->add_req(value); + Node_List null_markers; + uint nfields = helper(this, worklist, null_markers, sfpt); + + for (uint i = 0; i < null_markers.size(); ++i) { + Node* is_init = null_markers.at(i); + sfpt->add_req(is_init); + nfields++; } jvms->set_endoff(sfpt->req()); + SafePointScalarObjectNode* sobj = new SafePointScalarObjectNode(type()->isa_instptr(), + nullptr, + first_ind, + sfpt->jvms()->depth(), + nfields); + sobj->init_req(0, igvn->C->root()); sobj = igvn->transform(sobj)->as_SafePointScalarObject(); igvn->rehash_node_delayed(sfpt); for (uint i = jvms->debug_start(); i < jvms->debug_end(); i++) { @@ -455,7 +486,8 @@ void InlineTypeNode::load(GraphKit* kit, Node* base, Node* ptr, ciInstanceKlass* value = make_default_impl(kit->gvn(), ft->as_inline_klass(), visited); } else if (field_is_flat(i)) { // Recursively load the flat inline type field - value = make_from_flat_impl(kit, ft->as_inline_klass(), base, ptr, holder, offset, decorators, visited); + assert(field_is_null_free(i) == (field_null_marker_offset(i) == -1), "inconsistency"); + value = make_from_flat_impl(kit, ft->as_inline_klass(), base, ptr, holder, offset, field_null_marker_offset(i), decorators, visited); } else { const TypeOopPtr* oop_ptr = kit->gvn().type(base)->isa_oopptr(); bool is_array = (oop_ptr->isa_aryptr() != nullptr); @@ -498,7 +530,7 @@ void InlineTypeNode::load(GraphKit* kit, Node* base, Node* ptr, ciInstanceKlass* } } -void InlineTypeNode::store_flat(GraphKit* kit, Node* base, Node* ptr, ciInstanceKlass* holder, int holder_offset, DecoratorSet decorators) const { +void InlineTypeNode::store_flat(GraphKit* kit, Node* base, Node* ptr, ciInstanceKlass* holder, int holder_offset, int null_marker_offset, DecoratorSet decorators) const { if (kit->gvn().type(base)->isa_aryptr()) { kit->C->set_flat_accesses(); } @@ -507,6 +539,13 @@ void InlineTypeNode::store_flat(GraphKit* kit, Node* base, Node* ptr, ciInstance if (holder == nullptr) { holder = inline_klass(); } + if (null_marker_offset != -1) { + // Nullable flat field, read the null marker + Node* adr = kit->basic_plus_adr(base, null_marker_offset); + const TypePtr* adr_type = kit->gvn().type(adr)->isa_ptr(); + int alias_idx = kit->C->get_alias_index(adr_type); + kit->store_to_memory(kit->control(), adr, get_is_init(), T_BOOLEAN, alias_idx, MemNode::unordered); + } holder_offset -= inline_klass()->first_field_offset(); store(kit, base, ptr, holder, holder_offset, -1, decorators); } @@ -520,7 +559,8 @@ void InlineTypeNode::store(GraphKit* kit, Node* base, Node* ptr, ciInstanceKlass ciType* ft = field_type(i); if (field_is_flat(i)) { // Recursively store the flat inline type field - value->as_InlineType()->store_flat(kit, base, ptr, holder, offset, decorators); + assert(field_is_null_free(i) == (field_null_marker_offset(i) == -1), "inconsistency"); + value->as_InlineType()->store_flat(kit, base, ptr, holder, offset, field_null_marker_offset(i), decorators); } else { // Store field value to memory const TypePtr* adr_type = field_adr_type(base, offset, holder, decorators, kit->gvn()); @@ -946,23 +986,34 @@ InlineTypeNode* InlineTypeNode::make_from_oop_impl(GraphKit* kit, Node* oop, ciI return gvn.transform(vt)->as_InlineType(); } -InlineTypeNode* InlineTypeNode::make_from_flat(GraphKit* kit, ciInlineKlass* vk, Node* obj, Node* ptr, ciInstanceKlass* holder, int holder_offset, DecoratorSet decorators) { +InlineTypeNode* InlineTypeNode::make_from_flat(GraphKit* kit, ciInlineKlass* vk, Node* obj, Node* ptr, ciInstanceKlass* holder, int holder_offset, int null_marker_offset, DecoratorSet decorators) { GrowableArray visited; visited.push(vk); - return make_from_flat_impl(kit, vk, obj, ptr, holder, holder_offset, decorators, visited); + return make_from_flat_impl(kit, vk, obj, ptr, holder, holder_offset, null_marker_offset, decorators, visited); } // GraphKit wrapper for the 'make_from_flat' method -InlineTypeNode* InlineTypeNode::make_from_flat_impl(GraphKit* kit, ciInlineKlass* vk, Node* obj, Node* ptr, ciInstanceKlass* holder, int holder_offset, DecoratorSet decorators, GrowableArray& visited) { +InlineTypeNode* InlineTypeNode::make_from_flat_impl(GraphKit* kit, ciInlineKlass* vk, Node* obj, Node* ptr, ciInstanceKlass* holder, int holder_offset, int null_marker_offset, DecoratorSet decorators, GrowableArray& visited) { if (kit->gvn().type(obj)->isa_aryptr()) { kit->C->set_flat_accesses(); } + + bool null_free = (null_marker_offset == -1); + // Create and initialize an InlineTypeNode by loading all field values from // a flat inline type field at 'holder_offset' or from an inline type array. - InlineTypeNode* vt = make_uninitialized(kit->gvn(), vk); + InlineTypeNode* vt = make_uninitialized(kit->gvn(), vk, null_free); // The inline type is flattened into the object without an oop header. Subtract the // offset of the first field to account for the missing header when loading the values. holder_offset -= vk->first_field_offset(); + + if (null_marker_offset != -1) { + // Nullable flat field, read the null marker + Node* adr = kit->basic_plus_adr(obj, null_marker_offset); + Node* is_init = kit->make_load(nullptr, adr, TypeInt::BOOL, T_BOOLEAN, MemNode::unordered); + vt->set_req(IsInit, is_init); + } + vt->load(kit, obj, ptr, holder, visited, holder_offset, decorators); assert(vt->is_loaded(&kit->gvn()) != obj, "holder oop should not be used as flattened inline type oop"); return kit->gvn().transform(vt)->as_InlineType(); @@ -1121,6 +1172,10 @@ void InlineTypeNode::pass_fields(GraphKit* kit, Node* n, uint& base_input, bool if (field_is_flat(i)) { // Flat inline type field arg->as_InlineType()->pass_fields(kit, n, base_input, in); + if (!field_is_null_free(i)) { + assert(field_null_marker_offset(i) != -1, "inconsistency"); + n->init_req(base_input++, arg->as_InlineType()->get_is_init()); + } } else { if (arg->is_InlineType()) { // Non-flat inline type field @@ -1179,8 +1234,21 @@ void InlineTypeNode::initialize_fields(GraphKit* kit, MultiNode* multi, uint& ba Node* parm = nullptr; if (field_is_flat(i)) { // Flat inline type field - InlineTypeNode* vt = make_uninitialized(gvn, type->as_inline_klass()); + InlineTypeNode* vt = make_uninitialized(gvn, type->as_inline_klass(), field_is_null_free(i)); vt->initialize_fields(kit, multi, base_input, in, true, null_check_region, visited); + if (!field_is_null_free(i)) { + assert(in, "FIXME"); + assert(field_null_marker_offset(i) != -1, "inconsistency"); + Node* is_init = nullptr; + if (multi->is_Start()) { + is_init = gvn.transform(new ParmNode(multi->as_Start(), base_input)); + } else { + // TODO add a test for this + is_init = multi->as_Call()->in(base_input); + } + vt->set_req(IsInit, is_init); + base_input++; + } parm = gvn.transform(vt); } else { if (multi->is_Start()) { diff --git a/src/hotspot/share/opto/inlinetypenode.hpp b/src/hotspot/share/opto/inlinetypenode.hpp index a3427bb29e1..9644634b863 100644 --- a/src/hotspot/share/opto/inlinetypenode.hpp +++ b/src/hotspot/share/opto/inlinetypenode.hpp @@ -83,7 +83,7 @@ class InlineTypeNode : public TypeNode { static InlineTypeNode* make_default_impl(PhaseGVN& gvn, ciInlineKlass* vk, GrowableArray& visited, bool is_larval = false); static InlineTypeNode* make_from_oop_impl(GraphKit* kit, Node* oop, ciInlineKlass* vk, bool null_free, GrowableArray& visited, bool is_larval = false); static InlineTypeNode* make_null_impl(PhaseGVN& gvn, ciInlineKlass* vk, GrowableArray& visited, bool transform = true); - static InlineTypeNode* make_from_flat_impl(GraphKit* kit, ciInlineKlass* vk, Node* obj, Node* ptr, ciInstanceKlass* holder, int holder_offset, DecoratorSet decorators, GrowableArray& visited); + static InlineTypeNode* make_from_flat_impl(GraphKit* kit, ciInlineKlass* vk, Node* obj, Node* ptr, ciInstanceKlass* holder, int holder_offset, int null_marker_offset, DecoratorSet decorators, GrowableArray& visited); public: // Create with default field values @@ -93,7 +93,7 @@ class InlineTypeNode : public TypeNode { // Create and initialize by loading the field values from an oop static InlineTypeNode* make_from_oop(GraphKit* kit, Node* oop, ciInlineKlass* vk, bool null_free = true, bool is_larval = false); // Create and initialize by loading the field values from a flat field or array - static InlineTypeNode* make_from_flat(GraphKit* kit, ciInlineKlass* vk, Node* obj, Node* ptr, ciInstanceKlass* holder = nullptr, int holder_offset = 0, DecoratorSet decorators = IN_HEAP | MO_UNORDERED); + static InlineTypeNode* make_from_flat(GraphKit* kit, ciInlineKlass* vk, Node* obj, Node* ptr, ciInstanceKlass* holder = nullptr, int holder_offset = 0, int null_marker_offset = -1, DecoratorSet decorators = IN_HEAP | MO_UNORDERED); // Create and initialize with the inputs or outputs of a MultiNode (method entry or call) static InlineTypeNode* make_from_multi(GraphKit* kit, MultiNode* multi, ciInlineKlass* vk, uint& base_input, bool in, bool null_free = true); // Create with null field values @@ -130,12 +130,13 @@ class InlineTypeNode : public TypeNode { ciType* field_type(uint index) const; bool field_is_flat(uint index) const; bool field_is_null_free(uint index) const; + int field_null_marker_offset(uint index) const; // Replace InlineTypeNodes in debug info at safepoints with SafePointScalarObjectNodes void make_scalar_in_safepoints(PhaseIterGVN* igvn, bool allow_oop = true); // Store the inline type as a flat (headerless) representation - void store_flat(GraphKit* kit, Node* base, Node* ptr, ciInstanceKlass* holder, int holder_offset, DecoratorSet decorators) const; + void store_flat(GraphKit* kit, Node* base, Node* ptr, ciInstanceKlass* holder, int holder_offset, int null_marker_offset, DecoratorSet decorators) const; // Store the field values to memory void store(GraphKit* kit, Node* base, Node* ptr, ciInstanceKlass* holder, int holder_offset = 0, int offset = -1, DecoratorSet decorators = C2_TIGHTLY_COUPLED_ALLOC | IN_HEAP | MO_UNORDERED) const; // Initialize the inline type by loading its field values from memory diff --git a/src/hotspot/share/opto/library_call.cpp b/src/hotspot/share/opto/library_call.cpp index 6cd5f5f108e..8cd1ebe2b4d 100644 --- a/src/hotspot/share/opto/library_call.cpp +++ b/src/hotspot/share/opto/library_call.cpp @@ -2592,9 +2592,9 @@ bool LibraryCallKit::inline_unsafe_access(bool is_store, const BasicType type, c if (adr_type->isa_instptr() && !mismatched) { ciInstanceKlass* holder = adr_type->is_instptr()->instance_klass(); int offset = adr_type->is_instptr()->offset(); - p = InlineTypeNode::make_from_flat(this, inline_klass, base, base, holder, offset, decorators); + p = InlineTypeNode::make_from_flat(this, inline_klass, base, base, holder, offset, -1, decorators); } else { - p = InlineTypeNode::make_from_flat(this, inline_klass, base, adr, nullptr, 0, decorators); + p = InlineTypeNode::make_from_flat(this, inline_klass, base, adr, nullptr, 0, -1, decorators); } } else { p = access_load_at(heap_base_oop, adr, adr_type, value_type, type, decorators); @@ -2645,9 +2645,9 @@ bool LibraryCallKit::inline_unsafe_access(bool is_store, const BasicType type, c if (adr_type->isa_instptr() && !mismatched) { ciInstanceKlass* holder = adr_type->is_instptr()->instance_klass(); int offset = adr_type->is_instptr()->offset(); - val->as_InlineType()->store_flat(this, base, base, holder, offset, decorators); + val->as_InlineType()->store_flat(this, base, base, holder, offset, -1, decorators); } else { - val->as_InlineType()->store_flat(this, base, adr, nullptr, 0, decorators); + val->as_InlineType()->store_flat(this, base, adr, nullptr, 0, -1, decorators); } } else { access_store_at(heap_base_oop, adr, adr_type, val, value_type, type, decorators); diff --git a/src/hotspot/share/opto/macro.cpp b/src/hotspot/share/opto/macro.cpp index ee52efd7d96..f2619f71b5c 100644 --- a/src/hotspot/share/opto/macro.cpp +++ b/src/hotspot/share/opto/macro.cpp @@ -579,6 +579,8 @@ Node* PhaseMacroExpand::inline_type_from_mem(Node* mem, Node* ctl, ciInlineKlass int field_offset = offset + vt->field_offset(i); Node* value = nullptr; if (vt->field_is_flat(i)) { + // TODO could be null + assert(vt->field_is_null_free(i), "FIXME"); value = inline_type_from_mem(mem, ctl, field_type->as_inline_klass(), adr_type, field_offset, alloc); } else { const Type* ft = Type::get_const_type(field_type); diff --git a/src/hotspot/share/opto/parse2.cpp b/src/hotspot/share/opto/parse2.cpp index 35ca063cb76..cf679d6c041 100644 --- a/src/hotspot/share/opto/parse2.cpp +++ b/src/hotspot/share/opto/parse2.cpp @@ -239,7 +239,7 @@ void Parse::array_store(BasicType bt) { PreserveReexecuteState preexecs(this); inc_sp(3); jvms()->set_should_reexecute(true); - cast_val->as_InlineType()->store_flat(this, ary, adr, nullptr, 0, MO_UNORDERED | IN_HEAP | IS_ARRAY); + cast_val->as_InlineType()->store_flat(this, ary, adr, nullptr, 0, -1, MO_UNORDERED | IN_HEAP | IS_ARRAY); return; } else if (ary_t->is_null_free()) { // Store to non-flat inline type array (elements can never be null) @@ -297,7 +297,7 @@ void Parse::array_store(BasicType bt) { PreserveReexecuteState preexecs(this); inc_sp(3); jvms()->set_should_reexecute(true); - val->as_InlineType()->store_flat(this, casted_ary, casted_adr, nullptr, 0, MO_UNORDERED | IN_HEAP | IS_ARRAY); + val->as_InlineType()->store_flat(this, casted_ary, casted_adr, nullptr, 0, -1, MO_UNORDERED | IN_HEAP | IS_ARRAY); } else if (!stopped()) { // Element type is unknown, emit runtime call diff --git a/src/hotspot/share/opto/parse3.cpp b/src/hotspot/share/opto/parse3.cpp index c098b6abbd6..b8b0f613dfa 100644 --- a/src/hotspot/share/opto/parse3.cpp +++ b/src/hotspot/share/opto/parse3.cpp @@ -151,7 +151,7 @@ void Parse::do_get_xxx(Node* obj, ciField* field) { ld = InlineTypeNode::make_default(_gvn, field_klass->as_inline_klass()); } else if (field->is_flat()) { // Loading from a flat inline type field. - ld = InlineTypeNode::make_from_flat(this, field_klass->as_inline_klass(), obj, obj, field->holder(), offset); + ld = InlineTypeNode::make_from_flat(this, field_klass->as_inline_klass(), obj, obj, field->holder(), offset, field->null_marker_offset()); } else { // Build the resultant type of the load const Type* type; @@ -268,10 +268,10 @@ void Parse::do_put_xxx(Node* obj, ciField* field, bool is_field) { } else if (field->is_flat()) { // Storing to a flat inline type field. if (!val->is_InlineType()) { - val = InlineTypeNode::make_from_oop(this, val, field->type()->as_inline_klass()); + val = InlineTypeNode::make_from_oop(this, val, field->type()->as_inline_klass(), field->is_null_free()); } inc_sp(1); - val->as_InlineType()->store_flat(this, obj, obj, field->holder(), offset, IN_HEAP | MO_UNORDERED); + val->as_InlineType()->store_flat(this, obj, obj, field->holder(), offset, field->null_marker_offset(), IN_HEAP | MO_UNORDERED); dec_sp(1); } else { // Store the value. diff --git a/src/hotspot/share/opto/type.cpp b/src/hotspot/share/opto/type.cpp index e527e44e8c7..f15d6a8a660 100644 --- a/src/hotspot/share/opto/type.cpp +++ b/src/hotspot/share/opto/type.cpp @@ -2194,13 +2194,21 @@ const TypeTuple *TypeTuple::INT_CC_PAIR; const TypeTuple *TypeTuple::LONG_CC_PAIR; static void collect_inline_fields(ciInlineKlass* vk, const Type** field_array, uint& pos) { - for (int j = 0; j < vk->nof_nonstatic_fields(); j++) { - ciField* field = vk->nonstatic_field_at(j); - BasicType bt = field->type()->basic_type(); - const Type* ft = Type::get_const_type(field->type()); - field_array[pos++] = ft; - if (type2size[bt] == 2) { - field_array[pos++] = Type::HALF; + for (int i = 0; i < vk->nof_declared_nonstatic_fields(); i++) { + ciField* field = vk->declared_nonstatic_field_at(i); + if (field->is_flat()) { + collect_inline_fields(field->type()->as_inline_klass(), field_array, pos); + if (!field->is_null_free()) { + // TODO add is_init field at the end + field_array[pos++] = Type::get_const_basic_type(T_BOOLEAN); + } + } else { + BasicType bt = field->type()->basic_type(); + const Type* ft = Type::get_const_type(field->type()); + field_array[pos++] = ft; + if (type2size[bt] == 2) { + field_array[pos++] = Type::HALF; + } } } } @@ -2230,6 +2238,7 @@ const TypeTuple *TypeTuple::make_range(ciSignature* sig, InterfaceHandling inter uint pos = TypeFunc::Parms; field_array[pos++] = get_const_type(return_type); // Oop might be null when returning as fields collect_inline_fields(return_type->as_inline_klass(), field_array, pos); + // TODO move this in // InlineTypeNode::IsInit field used for null checking field_array[pos++] = get_const_basic_type(T_BOOLEAN); break; diff --git a/src/hotspot/share/runtime/deoptimization.cpp b/src/hotspot/share/runtime/deoptimization.cpp index 59cadcde989..7cb38a500d1 100644 --- a/src/hotspot/share/runtime/deoptimization.cpp +++ b/src/hotspot/share/runtime/deoptimization.cpp @@ -1501,8 +1501,9 @@ class ReassignedField { BasicType _type; InstanceKlass* _klass; bool _is_flat; + bool _is_null_free; public: - ReassignedField() : _offset(0), _type(T_ILLEGAL), _klass(nullptr), _is_flat(false) { } + ReassignedField() : _offset(0), _type(T_ILLEGAL), _klass(nullptr), _is_flat(false), _is_null_free(false) { } }; static int compare(ReassignedField* left, ReassignedField* right) { @@ -1511,21 +1512,28 @@ static int compare(ReassignedField* left, ReassignedField* right) { // Restore fields of an eliminated instance object using the same field order // returned by HotSpotResolvedObjectTypeImpl.getInstanceFields(true) -static int reassign_fields_by_klass(InstanceKlass* klass, frame* fr, RegisterMap* reg_map, ObjectValue* sv, int svIndex, oop obj, bool skip_internal, int base_offset, TRAPS) { +static int reassign_fields_by_klass(InstanceKlass* klass, frame* fr, RegisterMap* reg_map, ObjectValue* sv, int svIndex, oop obj, bool skip_internal, int base_offset, GrowableArray* null_markers, TRAPS) { GrowableArray* fields = new GrowableArray(); + if (null_markers == nullptr) { + // Null markers are no real fields.. + null_markers = new GrowableArray(); + } InstanceKlass* ik = klass; + // TODO can we use a hierachical field stream here? while (ik != nullptr) { for (AllFieldStream fs(ik); !fs.done(); fs.next()) { if (!fs.access_flags().is_static() && (!skip_internal || !fs.field_flags().is_injected())) { ReassignedField field; field._offset = fs.offset(); field._type = Signature::basic_type(fs.signature()); - if (fs.is_null_free_inline_type()) { - if (fs.is_flat()) { - field._is_flat = true; - // Resolve klass of flat inline type field - field._klass = InlineKlass::cast(klass->get_inline_type_field_klass(fs.index())); - } else { + if (fs.is_flat()) { + field._is_flat = true; + field._is_null_free = fs.is_null_free_inline_type(); + // Resolve klass of flat inline type field + field._klass = InlineKlass::cast(klass->get_inline_type_field_klass(fs.index())); + } else { + // TODO? + if (fs.is_null_free_inline_type()) { field._type = T_OBJECT; // Can be removed once Q-descriptors have been removed. } } @@ -1544,7 +1552,15 @@ static int reassign_fields_by_klass(InstanceKlass* klass, frame* fr, RegisterMap InstanceKlass* vk = fields->at(i)._klass; assert(vk != nullptr, "must be resolved"); offset -= InlineKlass::cast(vk)->first_field_offset(); // Adjust offset to omit oop header - svIndex = reassign_fields_by_klass(vk, fr, reg_map, sv, svIndex, obj, skip_internal, offset, CHECK_0); + svIndex = reassign_fields_by_klass(vk, fr, reg_map, sv, svIndex, obj, skip_internal, offset, null_markers, CHECK_0); + + if (!fields->at(i)._is_null_free) { + int nm_offset = offset + InlineKlass::cast(vk)->null_marker_offset(); + ReassignedField field; + field._offset = nm_offset; + null_markers->append(field); + } + continue; // Continue because we don't need to increment svIndex } ScopeValue* scope_field = sv->field_at(svIndex); @@ -1622,6 +1638,14 @@ static int reassign_fields_by_klass(InstanceKlass* klass, frame* fr, RegisterMap } svIndex++; } + if (base_offset == 0) { + for (int i = 0; i < null_markers->length(); ++i) { + int offset = null_markers->at(i)._offset; + jbyte is_init = (jbyte)StackValue::create_stack_value(fr, reg_map, sv->field_at(svIndex++))->get_jint(); + tty->print_cr("NULL MARKER %d %d", offset, is_init); + obj->byte_field_put(offset, is_init); + } + } return svIndex; } @@ -1635,7 +1659,7 @@ void Deoptimization::reassign_flat_array_elements(frame* fr, RegisterMap* reg_ma for (int i = 0; i < sv->field_size(); i++) { ScopeValue* val = sv->field_at(i); int offset = base_offset + (i << Klass::layout_helper_log2_element_size(vak->layout_helper())); - reassign_fields_by_klass(vk, fr, reg_map, val->as_ObjectValue(), 0, (oop)obj, skip_internal, offset, CHECK); + reassign_fields_by_klass(vk, fr, reg_map, val->as_ObjectValue(), 0, (oop)obj, skip_internal, offset, nullptr, CHECK); } } @@ -1684,7 +1708,7 @@ void Deoptimization::reassign_fields(frame* fr, RegisterMap* reg_map, GrowableAr } if (k->is_instance_klass()) { InstanceKlass* ik = InstanceKlass::cast(k); - reassign_fields_by_klass(ik, fr, reg_map, sv, 0, obj(), skip_internal, 0, CHECK); + reassign_fields_by_klass(ik, fr, reg_map, sv, 0, obj(), skip_internal, 0, nullptr, CHECK); } else if (k->is_flatArray_klass()) { FlatArrayKlass* vak = FlatArrayKlass::cast(k); reassign_flat_array_elements(fr, reg_map, sv, (flatArrayOop) obj(), vak, skip_internal, CHECK); diff --git a/src/hotspot/share/runtime/sharedRuntime.cpp b/src/hotspot/share/runtime/sharedRuntime.cpp index fefb8fb9131..8edfb557cd6 100644 --- a/src/hotspot/share/runtime/sharedRuntime.cpp +++ b/src/hotspot/share/runtime/sharedRuntime.cpp @@ -2588,24 +2588,24 @@ void AdapterHandlerLibrary::initialize() { _no_arg_handler = create_adapter(no_arg_blob, no_args, true); CompiledEntrySignature obj_args; - SigEntry::add_entry(obj_args.sig(), T_OBJECT, nullptr); + SigEntry::add_entry(obj_args.sig(), T_OBJECT); obj_args.compute_calling_conventions(); _obj_arg_handler = create_adapter(obj_arg_blob, obj_args, true); CompiledEntrySignature int_args; - SigEntry::add_entry(int_args.sig(), T_INT, nullptr); + SigEntry::add_entry(int_args.sig(), T_INT); int_args.compute_calling_conventions(); _int_arg_handler = create_adapter(int_arg_blob, int_args, true); CompiledEntrySignature obj_int_args; - SigEntry::add_entry(obj_int_args.sig(), T_OBJECT, nullptr); - SigEntry::add_entry(obj_int_args.sig(), T_INT, nullptr); + SigEntry::add_entry(obj_int_args.sig(), T_OBJECT); + SigEntry::add_entry(obj_int_args.sig(), T_INT); obj_int_args.compute_calling_conventions(); _obj_int_arg_handler = create_adapter(obj_int_arg_blob, obj_int_args, true); CompiledEntrySignature obj_obj_args; - SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT, nullptr); - SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT, nullptr); + SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT); + SigEntry::add_entry(obj_obj_args.sig(), T_OBJECT); obj_obj_args.compute_calling_conventions(); _obj_obj_arg_handler = create_adapter(obj_obj_arg_blob, obj_obj_args, true); @@ -2868,8 +2868,8 @@ void CompiledEntrySignature::compute_calling_conventions(bool init) { _sig_cc_ro->appendAll(vk->extended_sig()); if (bt == T_OBJECT) { // Nullable inline type argument, insert InlineTypeNode::IsInit field right after T_METADATA delimiter - _sig_cc->insert_before(last+1, SigEntry(T_BOOLEAN, -1, nullptr)); - _sig_cc_ro->insert_before(last_ro+1, SigEntry(T_BOOLEAN, -1, nullptr)); + _sig_cc->insert_before(last+1, SigEntry(T_BOOLEAN)); + _sig_cc_ro->insert_before(last_ro+1, SigEntry(T_BOOLEAN)); } } } else { diff --git a/src/hotspot/share/runtime/signature.cpp b/src/hotspot/share/runtime/signature.cpp index 3e71dcc301a..43640faae66 100644 --- a/src/hotspot/share/runtime/signature.cpp +++ b/src/hotspot/share/runtime/signature.cpp @@ -687,10 +687,13 @@ ssize_t SignatureVerifier::is_valid_type(const char* type, ssize_t limit) { #endif // ASSERT // Adds an argument to the signature -void SigEntry::add_entry(GrowableArray* sig, BasicType bt, Symbol* symbol, int offset) { - sig->append(SigEntry(bt, offset, symbol)); +void SigEntry::add_entry(GrowableArray* sig, BasicType bt, Symbol* symbol, int offset, int sort_offset) { + if (sort_offset == -1) { + sort_offset = offset; + } + sig->append(SigEntry(bt, offset, sort_offset, symbol)); if (bt == T_LONG || bt == T_DOUBLE) { - sig->append(SigEntry(T_VOID, offset, symbol)); // Longs and doubles take two stack slots + sig->append(SigEntry(T_VOID, offset, sort_offset, symbol)); // Longs and doubles take two stack slots } } diff --git a/src/hotspot/share/runtime/signature.hpp b/src/hotspot/share/runtime/signature.hpp index 461073562f5..28133bf6da9 100644 --- a/src/hotspot/share/runtime/signature.hpp +++ b/src/hotspot/share/runtime/signature.hpp @@ -577,17 +577,22 @@ typedef GrowableArrayFilterIterator ExtendedSignature; // specially. See comment for InlineKlass::collect_fields(). class SigEntry { public: + // TODO improve these comments BasicType _bt; - int _offset; - Symbol* _symbol; + int _offset; // Offset of the corresponding field in it's value class holder for scalarized arguments (-1 otherwise). Used for packing and unpacking + int _sort_offset; // Offset used for sorting + Symbol* _symbol; // For printing SigEntry() - : _bt(T_ILLEGAL), _offset(-1), _symbol(NULL) {} + : _bt(T_ILLEGAL), _offset(-1), _sort_offset(-1), _symbol(NULL) {} - SigEntry(BasicType bt, int offset, Symbol* symbol) - : _bt(bt), _offset(offset), _symbol(symbol) {} + SigEntry(BasicType bt, int offset = -1, int sort_offset = -1, Symbol* symbol = nullptr) + : _bt(bt), _offset(offset), _sort_offset(sort_offset), _symbol(symbol) {} static int compare(SigEntry* e1, SigEntry* e2) { + if (e1->_sort_offset != e2->_sort_offset) { + return e1->_sort_offset - e2->_sort_offset; + } if (e1->_offset != e2->_offset) { return e1->_offset - e2->_offset; } @@ -609,7 +614,7 @@ class SigEntry { ShouldNotReachHere(); return 0; } - static void add_entry(GrowableArray* sig, BasicType bt, Symbol* symbol, int offset = -1); + static void add_entry(GrowableArray* sig, BasicType bt, Symbol* symbol = nullptr, int offset = -1, int sort_offset = -1); static bool skip_value_delimiters(const GrowableArray* sig, int i); static int fill_sig_bt(const GrowableArray* sig, BasicType* sig_bt); static TempNewSymbol create_symbol(const GrowableArray* sig);