diff --git a/base/sparse/higherorderfns.jl b/base/sparse/higherorderfns.jl index 5ccd57e02a2f2..32a255e133281 100644 --- a/base/sparse/higherorderfns.jl +++ b/base/sparse/higherorderfns.jl @@ -107,18 +107,6 @@ broadcast(f::Tf, A::SparseMatrixCSC) where {Tf} = _noshapecheck_map(f, A) end return C end -@inline function broadcast!(f::Tf, dest::SparseVecOrMat, ::Void, As::Vararg{Any,N}) where {Tf,N} - if f isa typeof(identity) && N == 1 - A = As[1] - if A isa Number - return fill!(dest, A) - elseif A isa AbstractArray && Base.axes(dest) == Base.axes(A) - return copyto!(dest, A) - end - end - spbroadcast_args!(f, dest, Broadcast.combine_styles(As...), As...) - return dest -end # the following three similar defs are necessary for type stability in the mixed vector/matrix case broadcast(f::Tf, A::SparseVector, Bs::Vararg{SparseVector,N}) where {Tf,N} = @@ -1015,25 +1003,27 @@ broadcast(f, ::PromoteToSparse, ::Void, ::Void, As::Vararg{Any,N}) where {N} = # For broadcast! with ::Any inputs, we need a layer of indirection to determine whether # the inputs can be promoted to SparseVecOrMat. If it's just SparseVecOrMat and scalars, # we can handle it here, otherwise see below for the promotion machinery. -function spbroadcast_args!(f::Tf, C, ::SPVM, A::SparseVecOrMat, Bs::Vararg{SparseVecOrMat,N}) where {Tf,N} - _aresameshape(C, A, Bs...) && return _noshapecheck_map!(f, C, A, Bs...) - Base.Broadcast.check_broadcast_indices(axes(C), A, Bs...) +function broadcast!(f::Tf, dest::SparseVecOrMat, ::SPVM, A::SparseVecOrMat, Bs::Vararg{SparseVecOrMat,N}) where {Tf,N} + if f isa typeof(identity) && N == 0 && Base.axes(dest) == Base.axes(A) + return copyto!(dest, A) + end + _aresameshape(dest, A, Bs...) && return _noshapecheck_map!(f, dest, A, Bs...) + Base.Broadcast.check_broadcast_indices(axes(dest), A, Bs...) fofzeros = f(_zeros_eltypes(A, Bs...)...) fpreszeros = _iszero(fofzeros) - return fpreszeros ? _broadcast_zeropres!(f, C, A, Bs...) : - _broadcast_notzeropres!(f, fofzeros, C, A, Bs...) + fpreszeros ? _broadcast_zeropres!(f, dest, A, Bs...) : + _broadcast_notzeropres!(f, fofzeros, dest, A, Bs...) + return dest end -function spbroadcast_args!(f::Tf, dest, ::SPVM, mixedsrcargs::Vararg{Any,N}) where {Tf,N} +function broadcast!(f::Tf, dest::SparseVecOrMat, ::SPVM, mixedsrcargs::Vararg{Any,N}) where {Tf,N} # mixedsrcargs contains nothing but SparseVecOrMat and scalars parevalf, passedsrcargstup = capturescalars(f, mixedsrcargs) - return broadcast!(parevalf, dest, passedsrcargstup...) -end -function spbroadcast_args!(f::Tf, dest, ::PromoteToSparse, mixedsrcargs::Vararg{Any,N}) where {Tf,N} - broadcast!(f, dest, map(_sparsifystructured, mixedsrcargs)...) + broadcast!(parevalf, dest, nothing, passedsrcargstup...) + return dest end -function spbroadcast_args!(f::Tf, dest, ::Any, mixedsrcargs::Vararg{Any,N}) where {Tf,N} - # Fallback. From a performance perspective would it be best to densify? - Broadcast._broadcast!(f, dest, mixedsrcargs...) +function broadcast!(f::Tf, dest::SparseVecOrMat, ::PromoteToSparse, mixedsrcargs::Vararg{Any,N}) where {Tf,N} + broadcast!(f, dest, nothing, map(_sparsifystructured, mixedsrcargs)...) + return dest end _sparsifystructured(M::AbstractMatrix) = SparseMatrixCSC(M)