diff --git a/base/abstractarray.jl b/base/abstractarray.jl index 9b0883b5a9e9e..2def7116b0a2d 100644 --- a/base/abstractarray.jl +++ b/base/abstractarray.jl @@ -1768,10 +1768,10 @@ function mapslices(f, A::AbstractArray, dims::AbstractVector) end # These are needed because map(eltype, As) is not inferrable -promote_eltype_op(::Any) = (@_pure_meta; Any) -promote_eltype_op(op, A) = (@_pure_meta; promote_op(op, eltype(A))) -promote_eltype_op(op, A, B) = (@_pure_meta; promote_op(op, eltype(A), eltype(B))) -promote_eltype_op(op, A, B, C, D...) = (@_pure_meta; promote_eltype_op(op, eltype(A), promote_eltype_op(op, B, C, D...))) +promote_eltype_op(::Any) = Any +promote_eltype_op(op, A) = (@_inline_meta; promote_op(op, eltype(A))) +promote_eltype_op(op, A, B) = (@_inline_meta; promote_op(op, eltype(A), eltype(B))) +promote_eltype_op(op, A, B, C, D...) = (@_inline_meta; promote_eltype_op(op, eltype(A), promote_eltype_op(op, B, C, D...))) ## 1 argument diff --git a/base/broadcast.jl b/base/broadcast.jl index fa09ac08c46e8..fbeb15b1a3542 100644 --- a/base/broadcast.jl +++ b/base/broadcast.jl @@ -3,7 +3,7 @@ module Broadcast using Base.Cartesian -using Base: promote_eltype_op, _default_eltype, linearindices, tail, OneTo, to_shape, +using Base: @pure, promote_eltype_op, _promote_op, linearindices, tail, OneTo, to_shape, _msk_end, unsafe_bitgetindex, bitcache_chunks, bitcache_size, dumpbitcache import Base: .+, .-, .*, ./, .\, .//, .==, .<, .!=, .<=, .รท, .%, .<<, .>>, .^ import Base: broadcast @@ -257,26 +257,22 @@ end @inline broadcast_elwise_op(f, As...) = broadcast!(f, similar(Array{promote_eltype_op(f, As...)}, broadcast_indices(As...)), As...) -ftype(f, A) = typeof(f) -ftype(f, A...) = typeof(a -> f(a...)) -ftype(T::DataType, A) = Type{T} -ftype(T::DataType, A...) = Type{T} -ziptype(A) = Tuple{eltype(A)} -ziptype(A, B) = Iterators.Zip2{Tuple{eltype(A)}, Tuple{eltype(B)}} -@inline ziptype(A, B, C, D...) = Iterators.Zip{Tuple{eltype(A)}, ziptype(B, C, D...)} +@pure typestuple(a) = Tuple{eltype(a)} +@pure typestuple(T::Type) = Tuple{Type{T}} +@pure typestuple(a, b...) = Tuple{typestuple(a).types..., typestuple(b...).types...} # broadcast methods that dispatch on the type of the final container -@inline function broadcast_c(f, ::Type{Array}, As...) - T = _default_eltype(Base.Generator{ziptype(As...), ftype(f, As...)}) - shape = broadcast_indices(As...) +@inline function broadcast_c(f, ::Type{Array}, A, Bs...) + T = _promote_op(f, typestuple(A, Bs...)) + shape = broadcast_indices(A, Bs...) iter = CartesianRange(shape) if isleaftype(T) - return broadcast_t(f, T, shape, iter, As...) + return broadcast_t(f, T, shape, iter, A, Bs...) end if isempty(iter) return similar(Array{T}, shape) end - return broadcast_t(f, Any, shape, iter, As...) + return broadcast_t(f, Any, shape, iter, A, Bs...) end function broadcast_c(f, ::Type{Tuple}, As...) shape = broadcast_indices(As...) @@ -351,7 +347,7 @@ julia> string.(("one","two","three","four"), ": ", 1:4) "four: 4" ``` """ -@inline broadcast(f, As...) = broadcast_c(f, containertype(As...), As...) +@inline broadcast(f, A, Bs...) = broadcast_c(f, containertype(A, Bs...), A, Bs...) """ bitbroadcast(f, As...) diff --git a/base/promotion.jl b/base/promotion.jl index 2273a3882b50c..9b639d84c16d3 100644 --- a/base/promotion.jl +++ b/base/promotion.jl @@ -222,18 +222,22 @@ minmax(x::Real, y::Real) = minmax(promote(x, y)...) # operations, so it is advised against overriding them _default_type(T::Type) = (@_pure_meta; T) +if isdefined(Core, :Inference) + _promote_op(f::ANY, t::ANY) = Core.Inference.return_type(f, t) +else + _promote_op(f::ANY, t::ANY) = Any +end + promote_op(::Any...) = (@_pure_meta; Any) function promote_op{S}(f, ::Type{S}) @_inline_meta - Z = Tuple{_default_type(S)} - T = _default_eltype(Generator{Z, typeof(f)}) + T = _promote_op(f, Tuple{_default_type(S)}) isleaftype(S) && return isleaftype(T) ? T : Any return typejoin(S, T) end function promote_op{R,S}(f, ::Type{R}, ::Type{S}) @_inline_meta - Z = Iterators.Zip2{Tuple{_default_type(R)}, Tuple{_default_type(S)}} - T = _default_eltype(Generator{Z, typeof(a -> f(a...))}) + T = _promote_op(f, Tuple{_default_type(R), _default_type(S)}) isleaftype(R) && isleaftype(S) && return isleaftype(T) ? T : Any return typejoin(R, S, T) end diff --git a/test/broadcast.jl b/test/broadcast.jl index 21cee3fed274a..bd20d69c5a20c 100644 --- a/test/broadcast.jl +++ b/test/broadcast.jl @@ -341,14 +341,17 @@ end immutable StrangeType18623 end StrangeType18623(x) = x StrangeType18623(x,y) = (x,y) -@test @inferred broadcast(StrangeType18623, 1:3) == [1,2,3] -@test @inferred broadcast(StrangeType18623, 1:3, 4:6) == [(1,4),(2,5),(3,6)] +@test @inferred(broadcast(StrangeType18623, 1:3)) == [1,2,3] +@test @inferred(broadcast(StrangeType18623, 1:3, 4:6)) == [(1,4),(2,5),(3,6)] @test typeof(Int.(Number[1, 2, 3])) === typeof((x->Int(x)).(Number[1, 2, 3])) -@test @inferred broadcast(CartesianIndex, 1:2) == [CartesianIndex(1), CartesianIndex(2)] -@test @inferred broadcast(CartesianIndex, 1:2, 3:4) == [CartesianIndex(1,3), CartesianIndex(2,4)] +@test @inferred(broadcast(CartesianIndex, 1:2)) == [CartesianIndex(1), CartesianIndex(2)] +@test @inferred(broadcast(CartesianIndex, 1:2, 3:4)) == [CartesianIndex(1,3), CartesianIndex(2,4)] # Issue 18622 -@test @inferred muladd.([1.0], [2.0], [3.0])::Vector{Float64} == [5.0] -@test @inferred tuple.(1:3, 4:6, 7:9)::Vector{Tuple{Int,Int,Int}} == [(1,4,7), (2,5,8), (3,6,9)] +@test @inferred(broadcast(muladd, [1.0], [2.0], [3.0])) == [5.0] +@test @inferred(broadcast(tuple, 1:3, 4:6, 7:9)) == [(1,4,7), (2,5,8), (3,6,9)] + +# 19419 +@test @inferred(broadcast(round, Int, [1])) == [1]