diff --git a/src/ccall.cpp b/src/ccall.cpp index 15f51cf9d68972..c7853ccc2a8d86 100644 --- a/src/ccall.cpp +++ b/src/ccall.cpp @@ -493,7 +493,7 @@ static Value *julia_to_native( assert(!byRef); // don't expect any ABI to pass pointers by pointer return boxed(ctx, jvinfo); } - assert(jl_is_datatype(jlto) && julia_struct_has_layout((jl_datatype_t*)jlto)); + assert(jl_is_datatype(jlto) && jl_struct_try_layout((jl_datatype_t*)jlto)); typeassert_input(ctx, jvinfo, jlto, jlto_env, argn); if (!byRef) diff --git a/src/cgutils.cpp b/src/cgutils.cpp index 2303ddb2869712..f715abb3b490ba 100644 --- a/src/cgutils.cpp +++ b/src/cgutils.cpp @@ -321,7 +321,7 @@ static size_t dereferenceable_size(jl_value_t *jt) // Array has at least this much data return sizeof(jl_array_t); } - else if (jl_is_datatype(jt) && ((jl_datatype_t*)jt)->layout) { + else if (jl_is_datatype(jt) && jl_struct_try_layout((jl_datatype_t*)jt)) { return jl_datatype_size(jt); } return 0; @@ -339,7 +339,7 @@ static unsigned julia_alignment(jl_value_t *jt) // and this is the guarantee we have for the GC bits return 16; } - assert(jl_is_datatype(jt) && ((jl_datatype_t*)jt)->layout); + assert(jl_is_datatype(jt) && jl_struct_try_layout((jl_datatype_t*)jt)); unsigned alignment = jl_datatype_align(jt); if (alignment > JL_HEAP_ALIGNMENT) return JL_HEAP_ALIGNMENT; @@ -555,16 +555,10 @@ static Type *bitstype_to_llvm(jl_value_t *bt, bool llvmcall = false) } static bool jl_type_hasptr(jl_value_t* typ) -{ // assumes that jl_stored_inline(typ) is true +{ // assumes that jl_stored_inline(typ) is true (and therefore that layout is defined) return jl_is_datatype(typ) && ((jl_datatype_t*)typ)->layout->npointers > 0; } -// return whether all concrete subtypes of this type have the same layout -static bool julia_struct_has_layout(jl_datatype_t *dt) -{ - return dt->layout || jl_has_fixed_layout(dt); -} - static unsigned jl_field_align(jl_datatype_t *dt, size_t i) { unsigned al = jl_field_offset(dt, i); @@ -588,11 +582,8 @@ static Type *_julia_struct_to_llvm(jl_codegen_params_t *ctx, jl_value_t *jt, boo bool isTuple = jl_is_tuple_type(jt); jl_svec_t *ftypes = jl_get_fieldtypes(jst); size_t i, ntypes = jl_svec_len(ftypes); - if (!julia_struct_has_layout(jst)) + if (!jl_struct_try_layout(jst)) return NULL; // caller should have checked jl_type_mappable_to_c already, but we'll be nice - if (jst->layout == NULL) - jl_compute_field_offsets(jst); - assert(jst->layout); if (ntypes == 0 || jl_datatype_nbits(jst) == 0) return T_void; Type *_struct_decl = NULL; @@ -1904,6 +1895,9 @@ static bool emit_getfield_unknownidx(jl_codectx_t &ctx, return true; } if (nfields == 1) { + if (jl_has_free_typevars(jl_field_type(stt, 0))) { + return false; + } (void)idx0(); *ret = emit_getfield_knownidx(ctx, strct, 0, stt, order); return true; diff --git a/src/codegen.cpp b/src/codegen.cpp index dc03d453d4578f..3d8ad335160809 100644 --- a/src/codegen.cpp +++ b/src/codegen.cpp @@ -933,12 +933,12 @@ static MDNode *best_tbaa(jl_value_t *jt) { // note that this includes jl_isbits, although codegen should work regardless static bool jl_is_concrete_immutable(jl_value_t* t) { - return jl_is_immutable_datatype(t) && ((jl_datatype_t*)t)->layout; + return jl_is_immutable_datatype(t) && ((jl_datatype_t*)t)->isconcretetype; } static bool jl_is_pointerfree(jl_value_t* t) { - if (!jl_is_immutable_datatype(t)) + if (!jl_is_concrete_immutable(t)) return 0; const jl_datatype_layout_t *layout = ((jl_datatype_t*)t)->layout; return layout && layout->npointers == 0; @@ -3033,9 +3033,9 @@ static bool emit_builtin_call(jl_codectx_t &ctx, jl_cgval_t *ret, jl_value_t *f, return true; } - if (jl_is_datatype(utt) && utt->layout) { + if (jl_is_datatype(utt) && jl_struct_try_layout(utt)) { ssize_t idx = jl_field_index(utt, name, 0); - if (idx != -1) { + if (idx != -1 && !jl_has_free_typevars(jl_field_type(utt, idx))) { *ret = emit_getfield_knownidx(ctx, obj, idx, utt, order); return true; } @@ -3063,14 +3063,16 @@ static bool emit_builtin_call(jl_codectx_t &ctx, jl_cgval_t *ret, jl_value_t *f, } if (jl_is_datatype(utt)) { - if (jl_is_structtype(utt) && utt->layout) { + if (jl_struct_try_layout(utt)) { size_t nfields = jl_datatype_nfields(utt); // integer index size_t idx; if (fld.constant && (idx = jl_unbox_long(fld.constant) - 1) < nfields) { - // known index - *ret = emit_getfield_knownidx(ctx, obj, idx, utt, order); - return true; + if (!jl_has_free_typevars(jl_field_type(utt, idx))) { + // known index + *ret = emit_getfield_knownidx(ctx, obj, idx, utt, order); + return true; + } } else { // unknown index @@ -3115,8 +3117,6 @@ static bool emit_builtin_call(jl_codectx_t &ctx, jl_cgval_t *ret, jl_value_t *f, } } } - // TODO: attempt better codegen for approximate types, if the types - // and offsets of some fields are independent of parameters. // TODO: generic getfield func with more efficient calling convention return false; } @@ -3155,7 +3155,7 @@ static bool emit_builtin_call(jl_codectx_t &ctx, jl_cgval_t *ret, jl_value_t *f, } jl_datatype_t *uty = (jl_datatype_t*)jl_unwrap_unionall(obj.typ); - if (jl_is_structtype(uty) && uty->layout) { + if (jl_is_datatype(uty) && jl_struct_try_layout(uty)) { ssize_t idx = -1; if (fld.constant && fld.typ == (jl_value_t*)jl_symbol_type) { idx = jl_field_index(uty, (jl_sym_t*)fld.constant, 0); diff --git a/src/datatype.c b/src/datatype.c index 9bad2bc3d3bd81..7749387deee1e9 100644 --- a/src/datatype.c +++ b/src/datatype.c @@ -238,9 +238,21 @@ STATIC_INLINE void jl_maybe_allocate_singleton_instance(jl_datatype_t *st) } } +// return whether all concrete subtypes of this type have the same layout +int jl_struct_try_layout(jl_datatype_t *dt) +{ + if (dt->layout) + return 1; + else if (!jl_has_fixed_layout(dt)) + return 0; + jl_compute_field_offsets(dt); + assert(dt->layout); + return 1; +} + int jl_datatype_isinlinealloc(jl_datatype_t *ty, int pointerfree) JL_NOTSAFEPOINT { - if (ty->name->mayinlinealloc && ty->layout) { + if (ty->name->mayinlinealloc && jl_struct_try_layout(ty)) { if (ty->layout->npointers > 0) { if (pointerfree) return 0; diff --git a/src/jltypes.c b/src/jltypes.c index f9f60f1227b9a7..0fe280b97369a3 100644 --- a/src/jltypes.c +++ b/src/jltypes.c @@ -223,7 +223,9 @@ int jl_has_fixed_layout(jl_datatype_t *dt) { if (dt->layout || dt->isconcretetype) return 1; - if (jl_is_tuple_type(dt)) + if (dt->name->abstract) + return 0; + if (jl_is_tuple_type(dt) || jl_is_namedtuple_type(dt)) return 0; // TODO: relax more? jl_svec_t *types = jl_get_fieldtypes(dt); size_t i, l = jl_svec_len(types); @@ -1437,7 +1439,7 @@ static jl_value_t *inst_datatype_inner(jl_datatype_t *dt, jl_svec_t *p, jl_value jl_gc_wb(ndt, ndt->parameters); ndt->types = NULL; // to be filled in below if (istuple) { - ndt->types = p; + ndt->types = p; // TODO: this may need to filter out certain types } else if (isnamedtuple) { jl_value_t *names_tup = jl_svecref(p, 0); @@ -1463,7 +1465,7 @@ static jl_value_t *inst_datatype_inner(jl_datatype_t *dt, jl_svec_t *p, jl_value jl_gc_wb(ndt, ndt->types); } else { - ndt->types = jl_emptysvec; + ndt->types = jl_emptysvec; // XXX: this is essentially always false } } ndt->size = 0; diff --git a/src/julia_internal.h b/src/julia_internal.h index c22f8846de52ac..5c73150a1eac44 100644 --- a/src/julia_internal.h +++ b/src/julia_internal.h @@ -539,6 +539,7 @@ jl_datatype_t *jl_mk_builtin_func(jl_datatype_t *dt, const char *name, jl_fptr_a int jl_obviously_unequal(jl_value_t *a, jl_value_t *b); JL_DLLEXPORT jl_array_t *jl_find_free_typevars(jl_value_t *v); int jl_has_fixed_layout(jl_datatype_t *t); +int jl_struct_try_layout(jl_datatype_t *dt); int jl_type_mappable_to_c(jl_value_t *ty); jl_svec_t *jl_outer_unionall_vars(jl_value_t *u); jl_value_t *jl_type_intersection_env_s(jl_value_t *a, jl_value_t *b, jl_svec_t **penv, int *issubty);