diff --git a/src/nlp_expr.jl b/src/nlp_expr.jl index 6393ea297e0..066610bb8ba 100644 --- a/src/nlp_expr.jl +++ b/src/nlp_expr.jl @@ -408,10 +408,15 @@ function _MA.operate!!( _throw_if_not_real(x) if any(isequal(_MA.Zero()), args) return x - elseif x.head == :+ - push!(x.args, *(args...)) - return x end + # It may seem like we should do this performance optimization, but it is NOT + # safe. See JuMP#3825. The issue is that even though we're calling operate!! + # `x` is not mutable, because it may, amoungst other things, be aliased in + # one of the args + # elseif x.head == :+ + # push!(x.args, *(args...)) + # return x + # end return +(x, *(args...)) end diff --git a/test/test_nlp_expr.jl b/test/test_nlp_expr.jl index a92d624462e..afc555f7da4 100644 --- a/test/test_nlp_expr.jl +++ b/test/test_nlp_expr.jl @@ -873,6 +873,16 @@ function test_ma_zero_in_operate!!() return end +function test_ma_operate!!_nested_sum() + model = Model() + @variable(model, x) + y = NonlinearExpr(:+, Any[x]) + z = MA.operate!!(MA.add_mul, y, y) + @test isequal_canonical(y, @force_nonlinear(+x)) + @test isequal_canonical(z, @force_nonlinear(+(+x, +x))) + return +end + function test_nonlinear_operator_inferred() model = Model() @variable(model, x)