diff --git a/base/abstractarray.jl b/base/abstractarray.jl index e937e9ca4208a..f4ec61de51665 100644 --- a/base/abstractarray.jl +++ b/base/abstractarray.jl @@ -151,7 +151,6 @@ immutable IndicesList <: IndicesBehavior end # indices like (:cat, :dog, indicesbehavior(A::AbstractArray) = indicesbehavior(typeof(A)) indicesbehavior{T<:AbstractArray}(::Type{T}) = IndicesStartAt1() -indicesbehavior(::Number) = IndicesStartAt1() abstract IndicesPerformance immutable IndicesFast1D <: IndicesPerformance end # indices(A, d) is fast @@ -412,8 +411,9 @@ end promote_indices(a::AbstractArray, b::AbstractArray) = _promote_indices(indicesbehavior(a), indicesbehavior(b), a, b) _promote_indices(::IndicesStartAt1, ::IndicesStartAt1, a, b) = a _promote_indices(::IndicesBehavior, ::IndicesBehavior, a, b) = throw(ArgumentError("types $(typeof(a)) and $(typeof(b)) do not have promote_indices defined")) -promote_indices(a::Number, b::AbstractArray) = b -promote_indices(a::AbstractArray, b::Number) = a +promote_indices(a, b::AbstractArray) = b +promote_indices(a::AbstractArray, b) = a +promote_indices(a, b) = a # Strip off the index-changing container---this assumes that `parent` # performs such an operation. TODO: since few things in Base need this, it @@ -1459,9 +1459,14 @@ end promote_eltype_op(::Any) = (@_pure_meta; Bottom) promote_eltype_op{T}(op, ::AbstractArray{T}) = (@_pure_meta; promote_op(op, T)) promote_eltype_op{T}(op, ::T ) = (@_pure_meta; promote_op(op, T)) +promote_eltype_op{T}(op, Ts::AbstractArray{DataType}, ::AbstractArray{T}) = typejoin((promote_op(op, S, T) for S in Ts)...) +promote_eltype_op{T}(op, Ts::AbstractArray{DataType}, ::T) = typejoin((promote_op(op, S, T) for S in Ts)...) promote_eltype_op{R,S}(op, ::AbstractArray{R}, ::AbstractArray{S}) = (@_pure_meta; promote_op(op, R, S)) promote_eltype_op{R,S}(op, ::AbstractArray{R}, ::S) = (@_pure_meta; promote_op(op, R, S)) promote_eltype_op{R,S}(op, ::R, ::AbstractArray{S}) = (@_pure_meta; promote_op(op, R, S)) +promote_eltype_op{R,S}(op, ::Type{R}, ::AbstractArray{S}) = (@_pure_meta; promote_op(op, R, S)) +promote_eltype_op{R,S}(op, ::Type{R}, ::S) = (@_pure_meta; promote_op(op, R, S)) +promote_eltype_op{R,S}(op, ::R, ::S) = (@_pure_meta; promote_op(op, R, S)) promote_eltype_op(op, A, B, C, D...) = (@_pure_meta; promote_op(op, eltype(A), promote_eltype_op(op, B, C, D...))) ## 1 argument diff --git a/base/broadcast.jl b/base/broadcast.jl index 0c0165fac300e..ff7f3b0e716d4 100644 --- a/base/broadcast.jl +++ b/base/broadcast.jl @@ -13,8 +13,9 @@ export broadcast_getindex, broadcast_setindex! ## Calculate the broadcast shape of the arguments, or error if incompatible # array inputs broadcast_shape() = () -broadcast_shape(A) = shape(A) -@inline broadcast_shape(A, B...) = broadcast_shape((), shape(A), map(shape, B)...) +broadcast_shape(A) = () +broadcast_shape(A::AbstractArray) = shape(A) +@inline broadcast_shape(A, B...) = broadcast_shape((), broadcast_shape(A), map(broadcast_shape, B)...) # shape inputs broadcast_shape(shape::Tuple) = shape @inline broadcast_shape(shape::Tuple, shape1::Tuple, shapes::Tuple...) = broadcast_shape(_bcs((), shape, shape1), shapes...) @@ -40,7 +41,7 @@ _bcsm(a::Number, b::Number) = a == b || b == 1 ## Check that all arguments are broadcast compatible with shape # comparing one input against a shape check_broadcast_shape(shp) = nothing -check_broadcast_shape(shp, A) = check_broadcast_shape(shp, shape(A)) +check_broadcast_shape(shp, A) = check_broadcast_shape(shp, broadcast_shape(A)) check_broadcast_shape(::Tuple{}, ::Tuple{}) = nothing check_broadcast_shape(shp, ::Tuple{}) = nothing check_broadcast_shape(::Tuple{}, Ashp::Tuple) = throw(DimensionMismatch("cannot broadcast array to have fewer dimensions")) @@ -63,8 +64,8 @@ end @inline _newindex(out, I) = out # can truncate if indexmap is shorter than I @inline _newindex(out, I, keep::Bool, indexmap...) = _newindex((out..., ifelse(keep, I[1], 1)), tail(I), indexmap...) -newindexer(sz, x::Number) = () -@inline newindexer(sz, A) = _newindexer(sz, size(A)) +newindexer(sz, x) = () +@inline newindexer(sz, A::AbstractArray) = _newindexer(sz, size(A)) @inline _newindexer(sz, szA::Tuple{}) = () @inline _newindexer(sz, szA) = (sz[1] == szA[1], _newindexer(tail(sz), tail(szA))...) @@ -79,6 +80,9 @@ const bitcache_size = 64 * bitcache_chunks # do not change this dumpbitcache(Bc::Vector{UInt64}, bind::Int, C::Vector{Bool}) = Base.copy_to_bitarray_chunks!(Bc, ((bind - 1) << 6) + 1, C, 1, min(bitcache_size, (length(Bc)-bind+1) << 6)) +@inline _broadcast_getvals(A, I) = A # default for non-indexable types +@inline _broadcast_getvals(A::AbstractArray, I) = A[I] + ## Broadcasting core # nargs encodes the number of As arguments (which matches the number # of indexmaps). The first two type parameters are to ensure specialization. @@ -92,7 +96,7 @@ dumpbitcache(Bc::Vector{UInt64}, bind::Int, C::Vector{Bool}) = # reverse-broadcast the indices @nexprs $nargs i->(I_i = newindex(I, imap_i)) # extract array values - @nexprs $nargs i->(@inbounds val_i = A_i[I_i]) + @nexprs $nargs i->(@inbounds val_i = _broadcast_getvals(A_i, I_i)) # call the function and store the result @inbounds B[I] = @ncall $nargs f val end diff --git a/base/float.jl b/base/float.jl index 6d507f2d6b34b..232e3fff2cf41 100644 --- a/base/float.jl +++ b/base/float.jl @@ -199,6 +199,13 @@ promote_rule(::Type{Float64}, ::Type{Float32}) = Float64 widen(::Type{Float16}) = Float32 widen(::Type{Float32}) = Float64 +promote_op{Op<:typeof(trunc),T<:Union{Float32,Float64}}(::Op, ::Type{Signed}, ::Type{T}) = Int +promote_op{Op<:typeof(trunc),T<:Union{Float32,Float64}}(::Op, ::Type{Unsigned}, ::Type{T}) = UInt +promote_op{Op<:typeof(trunc),R,S}(::Op, ::Type{R}, ::Type{S}) = R +for f in (ceil, floor, round) + @eval promote_op{Op<:$(typeof(f)),R,S}(::Op, ::Type{R}, ::Type{S}) = promote_op($trunc, R, S) +end + ## floating point arithmetic ## -(x::Float32) = box(Float32,neg_float(unbox(Float32,x))) -(x::Float64) = box(Float64,neg_float(unbox(Float64,x))) diff --git a/base/parse.jl b/base/parse.jl index 1da9b742457e3..5387ee55d26e7 100644 --- a/base/parse.jl +++ b/base/parse.jl @@ -194,3 +194,5 @@ function parse(str::AbstractString; raise::Bool=true) end return ex end + +promote_op{Op<:typeof(parse),R,S}(::Op, ::Type{R}, ::Type{S}) = R diff --git a/base/promotion.jl b/base/promotion.jl index 7b6ebf0f406cb..2ba07567c81c2 100644 --- a/base/promotion.jl +++ b/base/promotion.jl @@ -221,10 +221,11 @@ minmax(x::Real, y::Real) = minmax(promote(x, y)...) # as needed. For example, if you need to provide a custom result type # for the multiplication of two types, # promote_op{R<:MyType,S<:MyType}(::typeof(*), ::Type{R}, ::Type{S}) = MyType{multype(R,S)} -promote_op(::Any) = (@_pure_meta; Bottom) -promote_op(::Any, T) = (@_pure_meta; T) +promote_op(::Any) = (@_pure_meta; Any) +promote_op(::Any, T) = (@_pure_meta; Any) promote_op{T}(::Type{T}, ::Any) = (@_pure_meta; T) promote_op{R,S}(::Any, ::Type{R}, ::Type{S}) = (@_pure_meta; promote_type(R, S)) +promote_op{Op<:typeof(convert),R,S}(::Op, ::Type{R}, ::Type{S}) = (@_pure_meta; R) promote_op(op, T, S, U, V...) = (@_pure_meta; promote_op(op, T, promote_op(op, S, U, V...))) ## catch-alls to prevent infinite recursion when definitions are missing ##