diff --git a/src/coreclr/jit/importercalls.cpp b/src/coreclr/jit/importercalls.cpp index b2a2d46892aaf..c57ecc3c71952 100644 --- a/src/coreclr/jit/importercalls.cpp +++ b/src/coreclr/jit/importercalls.cpp @@ -2597,6 +2597,45 @@ GenTree* Compiler::impIntrinsic(GenTree* newobjThis, break; } + // Allow some lighweight intrinsics in Tier0 which can improve throughput + // we introduced betterToExpand here because we're fine if intrinsic decides to not expand itself + // in this case unlike mustExpand. + bool betterToExpand = false; + + // NOTE: MinOpts() is always true for Tier0 so we have to check explicit flags instead. + // To be fixed in https://github.com/dotnet/runtime/pull/77465 + const bool tier0opts = !opts.compDbgCode && !opts.jitFlags->IsSet(JitFlags::JIT_FLAG_MIN_OPT); + + if (!mustExpand && tier0opts) + { + switch (ni) + { + // This one is just `return true/false` + case NI_System_Runtime_CompilerServices_RuntimeHelpers_IsKnownConstant: + + // We need these to be able to fold "typeof(...) == typeof(...)" + case NI_System_RuntimeTypeHandle_GetValueInternal: + case NI_System_Type_GetTypeFromHandle: + case NI_System_Type_op_Equality: + case NI_System_Type_op_Inequality: + + // Simple cases + case NI_System_String_get_Chars: + case NI_System_String_get_Length: + case NI_System_Span_get_Item: + case NI_System_Span_get_Length: + case NI_System_ReadOnlySpan_get_Item: + case NI_System_ReadOnlySpan_get_Length: + betterToExpand = true; + break; + + default: + // Unsafe.* are all small enough to prefer expansions. + betterToExpand = ni >= NI_SRCS_UNSAFE_START && ni <= NI_SRCS_UNSAFE_END; + break; + } + } + GenTree* retNode = nullptr; // Under debug and minopts, only expand what is required. @@ -2604,7 +2643,7 @@ GenTree* Compiler::impIntrinsic(GenTree* newobjThis, // If that call is an intrinsic and is expanded, codegen for NextCallReturnAddress will fail. // To avoid that we conservatively expand only required intrinsics in methods that call // the NextCallReturnAddress intrinsic. - if (!mustExpand && (opts.OptimizationDisabled() || info.compHasNextCallRetAddr)) + if (!mustExpand && ((opts.OptimizationDisabled() && !betterToExpand) || info.compHasNextCallRetAddr)) { *pIntrinsicName = NI_Illegal; return retNode; @@ -2715,6 +2754,12 @@ GenTree* Compiler::impIntrinsic(GenTree* newobjThis, JITDUMP("\nExpanding RuntimeHelpers.IsKnownConstant to true early\n"); // We can also consider FTN_ADDR here } + else if (opts.OptimizationDisabled()) + { + // It doesn't make sense to carry it as GT_INTRINSIC till Morph in Tier0 + retNode = gtNewIconNode(0); + JITDUMP("\nExpanding RuntimeHelpers.IsKnownConstant to false early\n"); + } else { // op1 is not a known constant, we'll do the expansion in morph diff --git a/src/coreclr/jit/morph.cpp b/src/coreclr/jit/morph.cpp index a34552aa1f197..15521b03761be 100644 --- a/src/coreclr/jit/morph.cpp +++ b/src/coreclr/jit/morph.cpp @@ -9014,11 +9014,13 @@ GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac, bool* optA case GT_EQ: case GT_NE: { - GenTree* optimizedTree = gtFoldTypeCompare(tree); - - if (optimizedTree != tree) + if (opts.OptimizationEnabled()) { - return fgMorphTree(optimizedTree); + GenTree* optimizedTree = gtFoldTypeCompare(tree); + if (optimizedTree != tree) + { + return fgMorphTree(optimizedTree); + } } // Pattern-matching optimization: