From c24517918dd7ea33df4eb0c965dd7d45d530d7b0 Mon Sep 17 00:00:00 2001 From: Jameson Nash Date: Tue, 16 May 2023 16:14:58 -0400 Subject: [PATCH] fix missing gc root on store to iparams (#49820) Try to optimize the order of this code a bit more, given that these checks are somewhat infrequently to be needed. Fix #49762 --- src/jltypes.c | 75 ++++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 59 insertions(+), 16 deletions(-) diff --git a/src/jltypes.c b/src/jltypes.c index ff40b7e93092d..5fc98194775b5 100644 --- a/src/jltypes.c +++ b/src/jltypes.c @@ -1844,13 +1844,8 @@ static jl_value_t *inst_datatype_inner(jl_datatype_t *dt, jl_svec_t *p, jl_value jl_typename_t *tn = dt->name; int istuple = (tn == jl_tuple_typename); int isnamedtuple = (tn == jl_namedtuple_typename); - if (check && tn != jl_type_typename) { - size_t i; - for (i = 0; i < ntp; i++) - iparams[i] = normalize_unionalls(iparams[i]); - } - // check type cache, if applicable + // check if type cache will be applicable int cacheable = 1; if (istuple) { size_t i; @@ -1886,7 +1881,15 @@ static jl_value_t *inst_datatype_inner(jl_datatype_t *dt, jl_svec_t *p, jl_value if (jl_has_free_typevars(iparams[i])) cacheable = 0; } + // if applicable, check the cache first for a match if (cacheable) { + jl_value_t *lkup = (jl_value_t*)lookup_type(tn, iparams, ntp); + if (lkup != NULL) + return lkup; + } + // if some normalization might be needed, do that now + // it is probably okay to mutate iparams, and we only store globally rooted objects here + if (check && cacheable) { size_t i; for (i = 0; i < ntp; i++) { jl_value_t *pi = iparams[i]; @@ -1894,18 +1897,16 @@ static jl_value_t *inst_datatype_inner(jl_datatype_t *dt, jl_svec_t *p, jl_value continue; if (jl_is_datatype(pi)) continue; - if (jl_is_vararg(pi)) { - pi = jl_unwrap_vararg(pi); - if (jl_has_free_typevars(pi)) - continue; - } - // normalize types equal to wrappers (prepare for wrapper_id) + if (jl_is_vararg(pi)) + // This would require some special handling, but is not needed + // at the moment (and might be better handled in jl_wrap_vararg instead). + continue; + if (!cacheable && jl_has_free_typevars(pi)) + continue; + // normalize types equal to wrappers (prepare for Typeofwrapper) jl_value_t *tw = extract_wrapper(pi); if (tw && tw != pi && (tn != jl_type_typename || jl_typeof(pi) == jl_typeof(tw)) && jl_types_equal(pi, tw)) { - // This would require some special handling, but is never used at - // the moment. - assert(!jl_is_vararg(iparams[i])); iparams[i] = tw; if (p) jl_gc_wb(p, tw); } @@ -1915,6 +1916,9 @@ static jl_value_t *inst_datatype_inner(jl_datatype_t *dt, jl_svec_t *p, jl_value // normalize Type{Type{Union{}}} to Type{TypeofBottom} iparams[0] = (jl_value_t*)jl_typeofbottom_type; } + } + // then check the cache again, if applicable + if (cacheable) { jl_value_t *lkup = (jl_value_t*)lookup_type(tn, iparams, ntp); if (lkup != NULL) return lkup; @@ -1923,12 +1927,15 @@ static jl_value_t *inst_datatype_inner(jl_datatype_t *dt, jl_svec_t *p, jl_value if (stack_lkup) return stack_lkup; + // check parameters against bounds in type definition + // for whether this is even valid if (check && !istuple) { - // check parameters against bounds in type definition + assert(ntp > 0); check_datatype_parameters(tn, iparams, ntp); } else if (ntp == 0 && jl_emptytuple_type != NULL) { // empty tuple type case + assert(istuple); return (jl_value_t*)jl_emptytuple_type; } @@ -1974,6 +1981,42 @@ static jl_value_t *inst_datatype_inner(jl_datatype_t *dt, jl_svec_t *p, jl_value jl_svecset(p, i, iparams[i]); } + // try to simplify some type parameters + if (check && tn != jl_type_typename) { + size_t i; + int changed = 0; + if (istuple) // normalization might change Tuple's, but not other types's, cacheable status + cacheable = 1; + for (i = 0; i < ntp; i++) { + jl_value_t *newp = normalize_unionalls(iparams[i]); + if (newp != iparams[i]) { + iparams[i] = newp; + jl_svecset(p, i, newp); + changed = 1; + } + if (istuple && cacheable && !jl_is_concrete_type(newp)) + cacheable = 0; + } + if (changed) { + // If this changed something, we need to check the cache again, in + // case we missed the match earlier before the normalizations + // + // e.g. return inst_datatype_inner(dt, p, iparams, ntp, stack, env, 0); + if (cacheable) { + jl_value_t *lkup = (jl_value_t*)lookup_type(tn, iparams, ntp); + if (lkup != NULL) { + JL_GC_POP(); + return lkup; + } + } + jl_value_t *stack_lkup = lookup_type_stack(stack, dt, ntp, iparams); + if (stack_lkup) { + JL_GC_POP(); + return stack_lkup; + } + } + } + // acquire the write lock now that we know we need a new object // since we're going to immediately leak it globally via the instantiation stack if (cacheable) {