Skip to content
This repository has been archived by the owner on Mar 12, 2021. It is now read-only.

Commit

Permalink
Initialization micro optimizations.
Browse files Browse the repository at this point in the history
  • Loading branch information
maleadt committed Mar 10, 2020
1 parent 95a35a7 commit 8259597
Show file tree
Hide file tree
Showing 9 changed files with 11 additions and 10 deletions.
2 changes: 1 addition & 1 deletion src/CuArrays.jl
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ function functional(show_reason::Bool=false)
if configured[] === nothing
_functional(show_reason)
end
configured[]
configured[]::Bool
end

const configure_lock = ReentrantLock()
Expand Down
5 changes: 3 additions & 2 deletions src/blas/error.jl
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,9 @@ end
end

function initialize_api()
# make sure the calling thread has an active context
CUDAnative.context()
# make sure the callin
g thread has an active context
CUDAnative.initialize_context()
end

macro check(ex)
Expand Down
2 changes: 1 addition & 1 deletion src/dnn/error.jl
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ end

function initialize_api()
# make sure the calling thread has an active context
CUDAnative.context()
CUDAnative.initialize_context()
end

macro check(ex)
Expand Down
2 changes: 1 addition & 1 deletion src/fft/error.jl
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ end

function initialize_api()
# make sure the calling thread has an active context
CUDAnative.context()
CUDAnative.initialize_context()
end

macro check(ex)
Expand Down
2 changes: 1 addition & 1 deletion src/memory.jl
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ synchronized right before and after executing `ex` to exclude any external effec
macro time(ex)
quote
# @time might surround an application, so be sure to initialize CUDA before that
CUDAnative.context()
CUDAnative.initialize_context()

# coarse synchronization to exclude effects from previously-executed code
CUDAdrv.synchronize()
Expand Down
2 changes: 1 addition & 1 deletion src/rand/error.jl
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ end

function initialize_api()
# make sure the calling thread has an active context
CUDAnative.context()
CUDAnative.initialize_context()
end

macro check(ex)
Expand Down
2 changes: 1 addition & 1 deletion src/solver/error.jl
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ end

function initialize_api()
# make sure the calling thread has an active context
CUDAnative.context()
CUDAnative.initialize_context()
end

macro check(ex)
Expand Down
2 changes: 1 addition & 1 deletion src/sparse/error.jl
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ end

function initialize_api()
# make sure the calling thread has an active context
CUDAnative.context()
CUDAnative.initialize_context()
end

macro check(ex)
Expand Down
2 changes: 1 addition & 1 deletion src/tensor/error.jl
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ end

function initialize_api()
# make sure the calling thread has an active context
CUDAnative.context()
CUDAnative.initialize_context()
end

macro check(ex)
Expand Down

0 comments on commit 8259597

Please sign in to comment.