From b09697f1665b414aa66671f88910b5c30e963913 Mon Sep 17 00:00:00 2001 From: Johannes Terblanche Date: Thu, 2 Jul 2020 12:36:54 +0200 Subject: [PATCH] WIP #403 - rename Elements->Blob --- src/BigData/BigData.jl | 5 +- .../entities/AbstractBigDataEntries.jl | 23 ++-- src/BigData/entities/InMemoryDataStore.jl | 2 +- .../services/AbstractBigDataEntries.jl | 103 ++++++++------ src/BigData/services/AbstractDataStore.jl | 24 ++-- src/BigData/services/FileDataStore.jl | 47 +++++-- src/BigData/services/InMemoryDataStore.jl | 15 +- src/Deprecated.jl | 11 ++ src/DistributedFactorGraphs.jl | 6 +- test/DataStoreTests.jl | 107 +++++++++------ test/consol_BigDataTests.jl | 129 ++++++++++++++++++ test/iifInterfaceTests.jl | 8 +- test/runtests.jl | 1 + test/testBlocks.jl | 42 +++--- 14 files changed, 367 insertions(+), 156 deletions(-) create mode 100644 test/consol_BigDataTests.jl diff --git a/src/BigData/BigData.jl b/src/BigData/BigData.jl index 22b5a5de..9e35d4dd 100644 --- a/src/BigData/BigData.jl +++ b/src/BigData/BigData.jl @@ -10,8 +10,9 @@ include("services/FileDataStore.jl") export AbstractDataStore -export AbstractBigDataEntry, GeneralBigDataEntry, MongodbBigDataEntry, FileBigDataEntry +export AbstractBigDataEntry, GeneralDataEntry, MongodbDataEntry, FileDataEntry export InMemoryDataStore, FileDataStore -export getBigData, addBigData!, updateBigData!, deleteBigData!, listStoreEntries +export getData, addData!, updateData!, deleteData!, listStoreEntries +export getDataBlob, addDataBlob!, updateDataBlob!, deleteDataBlob!, listDataBlobs export copyStore diff --git a/src/BigData/entities/AbstractBigDataEntries.jl b/src/BigData/entities/AbstractBigDataEntries.jl index 708baab3..944e2dda 100644 --- a/src/BigData/entities/AbstractBigDataEntries.jl +++ b/src/BigData/entities/AbstractBigDataEntries.jl @@ -3,7 +3,7 @@ GeneralBigDataEntry is a generic multipurpose data entry that creates a unique reproducible key using userId_robotId_sessionId_variableId_key. """ -mutable struct GeneralBigDataEntry <: AbstractBigDataEntry +mutable struct GeneralDataEntry <: AbstractBigDataEntry key::Symbol storeKey::Symbol # Could swap this to string, but using it as an index later, so better as a symbol I believe. createdTimestamp::DateTime @@ -21,32 +21,39 @@ function _uniqueKey(dfg::G, v::V, key::Symbol)::Symbol where {G <: AbstractDFG, return Symbol(key) end -GeneralBigDataEntry(key::Symbol, storeKey::Symbol; + +GeneralDataEntry(key::Symbol, storeKey::Symbol; mimeType::String="application/octet-stream") = - GeneralBigDataEntry(key, storeKey, now(), now(), mimeType) + GeneralDataEntry(key, storeKey, now(), now(), mimeType) -function GeneralBigDataEntry(dfg::G, var::V, key::Symbol; +function GeneralDataEntry(dfg::G, var::V, key::Symbol; mimeType::String="application/octet-stream") where {G <: AbstractDFG, V <: AbstractDFGVariable} - return GeneralBigDataEntry(key, _uniqueKey(dfg, var, key), mimeType=mimeType) + return GeneralDataEntry(key, _uniqueKey(dfg, var, key), mimeType=mimeType) end +@deprecate GeneralBigDataEntry(args...; kwargs...) GeneralDataEntry(args...; kwargs...) + """ $(TYPEDEF) BigDataEntry in MongoDB. """ -struct MongodbBigDataEntry <: AbstractBigDataEntry +struct MongodbDataEntry <: AbstractBigDataEntry key::Symbol oid::NTuple{12, UInt8} #mongodb object id #maybe other fields such as: #flags::Bool ready, valid, locked, permissions - #MIMEType::Symbol + #MIMEType::String end +@deprecate MongodbBigDataEntry(args...) MongodbDataEntry(args...) + """ $(TYPEDEF) BigDataEntry in a file. """ -struct FileBigDataEntry <: AbstractBigDataEntry +struct FileDataEntry <: AbstractBigDataEntry key::Symbol filename::String end + +@deprecate FileBigDataEntry(args...) FileDataEntry(args...) diff --git a/src/BigData/entities/InMemoryDataStore.jl b/src/BigData/entities/InMemoryDataStore.jl index c1a01623..9dfbe72a 100644 --- a/src/BigData/entities/InMemoryDataStore.jl +++ b/src/BigData/entities/InMemoryDataStore.jl @@ -20,5 +20,5 @@ end Create an in-memory store using binary data (UInt8) as a type. """ function InMemoryDataStore() - return InMemoryDataStore{Vector{UInt8}, GeneralBigDataEntry}() + return InMemoryDataStore{Vector{UInt8}, GeneralDataEntry}() end diff --git a/src/BigData/services/AbstractBigDataEntries.jl b/src/BigData/services/AbstractBigDataEntries.jl index 48e9fbb2..5c4c33f1 100644 --- a/src/BigData/services/AbstractBigDataEntries.jl +++ b/src/BigData/services/AbstractBigDataEntries.jl @@ -1,6 +1,6 @@ import Base: == -function ==(a::GeneralBigDataEntry, b::GeneralBigDataEntry) +function ==(a::GeneralDataEntry, b::GeneralDataEntry) return a.key == b.key && a.storeKey == b.storeKey && a.mimeType == b.mimeType && @@ -8,11 +8,11 @@ function ==(a::GeneralBigDataEntry, b::GeneralBigDataEntry) Dates.value(a.lastUpdatedTimestamp - b.lastUpdatedTimestamp) < 1000 #1 second end -function ==(a::MongodbBigDataEntry, b::MongodbBigDataEntry) +function ==(a::MongodbDataEntry, b::MongodbDataEntry) return a.key == b.key && a.oid == b.oid end -function ==(a::FileBigDataEntry, b::FileBigDataEntry) +function ==(a::FileDataEntry, b::FileDataEntry) return a.key == b.key && a.filename == b.filename end @@ -20,22 +20,23 @@ end $(SIGNATURES) Add Big Data Entry to a DFG variable """ -function addBigDataEntry!(var::AbstractDFGVariable, bde::AbstractBigDataEntry) +function addDataEntry!(var::AbstractDFGVariable, bde::AbstractBigDataEntry) haskey(var.bigData,bde.key) && error("BigData entry $(bde.key) already exists in variable") var.bigData[bde.key] = bde return bde end + """ $(SIGNATURES) Add Big Data Entry to distributed factor graph. Should be extended if DFG variable is not returned by reference. """ -function addBigDataEntry!(dfg::AbstractDFG, label::Symbol, bde::AbstractBigDataEntry) +function addDataEntry!(dfg::AbstractDFG, label::Symbol, bde::AbstractBigDataEntry) return addBigDataEntry!(getVariable(dfg, label), bde) end -addDataEntry!(x...) = addBigDataEntry!(x...) +@deprecate addBigDataEntry!(args...) addDataEntry!(args...) """ $(SIGNATURES) @@ -50,22 +51,22 @@ Related addData!, getDataEntryElement, fetchData """ -function addDataEntry!(dfg::AbstractDFG, - lbl::Symbol, - datastore::Union{FileDataStore, InMemoryDataStore}, - descr::Symbol, - mimeType::AbstractString, - data::Vector{UInt8} ) +function addData!(dfg::AbstractDFG, + lbl::Symbol, + datastore::Union{FileDataStore, InMemoryDataStore}, + descr::Symbol, + mimeType::AbstractString, + data::Vector{UInt8} ) # node = isVariable(dfg, lbl) ? getVariable(dfg, lbl) : getFactor(dfg, lbl) # Make a big data entry in the graph - use JSON2 to just write this - entry = GeneralBigDataEntry(dfg, node, descr, mimeType=mimeType) + entry = GeneralDataEntry(dfg, node, descr, mimeType=mimeType) # Set it in the store - addBigData!(datastore, entry, data) + addDataBlob!(datastore, entry, data) # Add the entry to the graph - addBigDataEntry!(node, entry) + addDataEntry!(node, entry) end -const addData! = addDataEntry! +# const addDataEntry! = addData! """ $SIGNATURES @@ -79,15 +80,17 @@ const hasBigDataEntry = hasDataEntry $(SIGNATURES) Get big data entry """ -function getBigDataEntry(var::AbstractDFGVariable, key::Symbol) +function getDataEntry(var::AbstractDFGVariable, key::Symbol) !hasDataEntry(var, key) && (error("BigData entry $(key) does not exist in variable"); return nothing) return var.bigData[key] end -function getBigDataEntry(dfg::AbstractDFG, label::Symbol, key::Symbol) +function getDataEntry(dfg::AbstractDFG, label::Symbol, key::Symbol) return getBigDataEntry(getVariable(dfg, label), key) end +@deprecate getBigDataEntry(args...) getDataEntry(args...) + """ $SIGNATURES Get both the entry and raw data element from datastore returning as a tuple. @@ -125,24 +128,26 @@ Related addDataEntry!, addData!, fetchData, fetchDataEntryElement """ -function getDataEntryElement(dfg::AbstractDFG, - dfglabel::Symbol, - datastore::Union{FileDataStore, InMemoryDataStore}, - datalabel::Symbol) +function getDataEntryBlob(dfg::AbstractDFG, + dfglabel::Symbol, + datastore::Union{FileDataStore, InMemoryDataStore}, + datalabel::Symbol) # vari = getVariable(dfg, dfglabel) if !hasDataEntry(vari, datalabel) - @error "missing data entry $datalabel in $dfglabel" - return nothing, nothing + # current standards is to fail hard + error("missing data entry $datalabel in $dfglabel") + # return nothing, nothing end - entry = getBigDataEntry(vari, datalabel) - element = getBigData(datastore, entry) + entry = getDataEntry(vari, datalabel) + element = getDataBlob(datastore, entry) return entry, element end -const fetchDataEntryElement = getDataEntryElement -const fetchData = getDataEntryElement - +const fetchDataEntryElement = getDataEntryBlob +const fetchData = getDataEntryBlob +# +@deprecate getDataEntryElement(args...) getDataEntryBlob(args...) """ $(SIGNATURES) @@ -151,16 +156,18 @@ Update big data entry DevNote - DF, unclear if `update` verb is applicable in this case, see #404 """ -function updateBigDataEntry!(var::AbstractDFGVariable, bde::AbstractBigDataEntry) +function updateDataEntry!(var::AbstractDFGVariable, bde::AbstractBigDataEntry) !haskey(var.bigData,bde.key) && (@warn "$(bde.key) does not exist in variable, adding") var.bigData[bde.key] = bde return bde end -function updateBigDataEntry!(dfg::AbstractDFG, label::Symbol, bde::AbstractBigDataEntry) +function updateDataEntry!(dfg::AbstractDFG, label::Symbol, bde::AbstractBigDataEntry) # !isVariable(dfg, label) && return nothing - return updateBigDataEntry!(getVariable(dfg, label), bde) + return updateDataEntry!(getVariable(dfg, label), bde) end +@deprecate updateBigDataEntry!(args...) updateDataEntry!(args...) + """ $(SIGNATURES) Delete big data entry from the factor graph. @@ -169,46 +176,52 @@ Note this doesn't remove it from any data stores. Notes: - users responsibility to delete big data in db before deleting entry """ -function deleteBigDataEntry!(var::AbstractDFGVariable, key::Symbol) - bde = getBigDataEntry(var, key) +function deleteDataEntry!(var::AbstractDFGVariable, key::Symbol) + bde = getDataEntry(var, key) bde == nothing && return nothing delete!(var.bigData, key) return var end -function deleteBigDataEntry!(dfg::AbstractDFG, label::Symbol, key::Symbol) +function deleteDataEntry!(dfg::AbstractDFG, label::Symbol, key::Symbol) #users responsibility to delete big data in db before deleting entry !isVariable(dfg, label) && return nothing - return deleteBigDataEntry!(getVariable(dfg, label), key) + return deleteDataEntry!(getVariable(dfg, label), key) end -function deleteBigDataEntry!(var::AbstractDFGVariable, entry::AbstractBigDataEntry) +function deleteDataEntry!(var::AbstractDFGVariable, entry::AbstractBigDataEntry) #users responsibility to delete big data in db before deleting entry - return deleteBigDataEntry!(var, entry.key) + return deleteDataEntry!(var, entry.key) end +@deprecate deleteBigDataEntry!(args...) deleteDataEntry!(args...) + """ $(SIGNATURES) Get big data entries, Vector{AbstractBigDataEntry} """ -function getBigDataEntries(var::AbstractDFGVariable) +function getDataEntries(var::AbstractDFGVariable) #or should we return the iterator, Base.ValueIterator{Dict{Symbol,AbstractBigDataEntry}}? collect(values(var.bigData)) end -function getBigDataEntries(dfg::AbstractDFG, label::Symbol) +function getDataEntries(dfg::AbstractDFG, label::Symbol) !isVariable(dfg, label) && return nothing #or should we return the iterator, Base.ValueIterator{Dict{Symbol,AbstractBigDataEntry}}? - getBigDataEntries(getVariable(dfg, label)) + getDataEntries(getVariable(dfg, label)) end +@deprecate getBigDataEntries(args...) getDataEntries(args...) + """ $(SIGNATURES) -getBigDataKeys +listDataEntries """ -function getBigDataKeys(var::AbstractDFGVariable) +function listDataEntries(var::AbstractDFGVariable) collect(keys(var.bigData)) end -function getBigDataKeys(dfg::AbstractDFG, label::Symbol) +function listDataEntries(dfg::AbstractDFG, label::Symbol) !isVariable(dfg, label) && return nothing - getBigDataKeys(getVariable(dfg, label)) + listDataEntries(getVariable(dfg, label)) end + +@deprecate getBigDataKeys(args...) listDataEntries(args...) diff --git a/src/BigData/services/AbstractDataStore.jl b/src/BigData/services/AbstractDataStore.jl index 7640bfa0..639281a1 100644 --- a/src/BigData/services/AbstractDataStore.jl +++ b/src/BigData/services/AbstractDataStore.jl @@ -2,8 +2,8 @@ $(SIGNATURES) Get the data for the specified entry, returns the data or Nothing. """ -function getBigData(store::D, entry::E)::Union{Nothing, T} where {T, D <: AbstractDataStore{T}, E <: AbstractBigDataEntry} - error("$(typeof(store)) doesn't override 'getData'.") +function getDataBlob(store::D, entry::E)::Union{Nothing, T} where {T, D <: AbstractDataStore{T}, E <: AbstractBigDataEntry} + error("$(typeof(store)) doesn't override 'getDataBlob'.") end """ @@ -11,8 +11,8 @@ end Adds the data to the store with the given entry. The function will warn if the entry already exists and will overwrite it. """ -function addBigData!(store::D, entry::E, data::T)::Union{Nothing, T} where {T, D <: AbstractDataStore{T}, E <: AbstractBigDataEntry} - error("$(typeof(store)) doesn't override 'addData!'.") +function addDataBlob!(store::D, entry::E, data::T)::Union{Nothing, T} where {T, D <: AbstractDataStore{T}, E <: AbstractBigDataEntry} + error("$(typeof(store)) doesn't override 'addDataBlob!'.") end """ @@ -20,8 +20,8 @@ end Update the data in the store. The function will error and return nothing if the entry does not exist. """ -function updateBigData!(store::D, entry::E, data::T)::Union{Nothing, T} where {T, D <: AbstractDataStore{T}, E <: AbstractBigDataEntry} - error("$(typeof(store)) doesn't override 'updateData!'.") +function updateDataBlob!(store::D, entry::E, data::T)::Union{Nothing, T} where {T, D <: AbstractDataStore{T}, E <: AbstractBigDataEntry} + error("$(typeof(store)) doesn't override 'updateDataBlob!'.") end """ @@ -29,16 +29,16 @@ end Delete the data in the store for the given entry. The function will error and return nothing if the entry does not exist. """ -function deleteBigData!(store::D, entry::E)::Union{Nothing, T} where {T, D <: AbstractDataStore{T}, E <: AbstractBigDataEntry} - error("$(typeof(store)) doesn't override 'deleteData!'.") +function deleteDataBlob!(store::D, entry::E)::Union{Nothing, T} where {T, D <: AbstractDataStore{T}, E <: AbstractBigDataEntry} + error("$(typeof(store)) doesn't override 'deleteDataBlob!'.") end """ $(SIGNATURES) List all entries in the data store. """ -function listStoreEntries(store::D)::Vector{E} where {D <: AbstractDataStore, E <: AbstractBigDataEntry} - error("$(typeof(store)) doesn't override 'listEntries'.") +function listDataBlobs(store::D) where D <: AbstractDataStore + error("$(typeof(store)) doesn't override 'listDataBlobs'.") end """ @@ -49,13 +49,13 @@ Returns the list of copied entries. """ function copyStore(sourceStore::D1, destStore::D2; sourceEntries=listEntries(sourceStore))::Vector{E} where {T, D1 <: AbstractDataStore{T}, D2 <: AbstractDataStore{T}, E <: AbstractBigDataEntry} # Quick check - destEntries = listStoreEntries(destStore) + destEntries = listDataBlobs(destStore) typeof(sourceEntries) != typeof(destEntries) && error("Can't copy stores, source has entries of type $(typeof(sourceEntries)), destination has entries of type $(typeof(destEntries)).") # Same source/destination check sourceStore == destStore && error("Can't specify same store for source and destination.") # Otherwise, continue for sourceEntry in sourceEntries - addBigData!(destStore, deepcopy(sourceEntry), getBigData(sourceStore, sourceEntry)) + addDataBlob!(destStore, deepcopy(sourceEntry), getDataBlob(sourceStore, sourceEntry)) end return sourceEntries end diff --git a/src/BigData/services/FileDataStore.jl b/src/BigData/services/FileDataStore.jl index 781b4f06..64710962 100644 --- a/src/BigData/services/FileDataStore.jl +++ b/src/BigData/services/FileDataStore.jl @@ -1,39 +1,56 @@ +##============================================================================== +## FileDataStore Common +##============================================================================== searchdir(path,key) = filter(x->occursin(key,x), readdir(path)) filename(store::FileDataStore, entry::E) where E <: AbstractBigDataEntry = "$(store.folder)/$(entry.storeKey).dat" -function readentry(store::FileDataStore, entry::E) where E <: AbstractBigDataEntry + +function readentry(store::FileDataStore, entry::AbstractBigDataEntry) open(filename(store, entry)) do f return read(f) end end -function writeentry(store::FileDataStore, entry::E, data::Vector{UInt8}) where E <: AbstractBigDataEntry + +function writeentry(store::FileDataStore, entry::AbstractBigDataEntry, data::Vector{UInt8}) open(filename(store, entry), "w") do f write(f, data) end end -function getBigData(store::FileDataStore, entry::E)::Union{Vector{UInt8}, Nothing} where {E <: AbstractBigDataEntry} +##============================================================================== +## FileDataStore CRUD +##============================================================================== + +function getDataBlob(store::FileDataStore, entry::AbstractBigDataEntry)::Union{Vector{UInt8}, Nothing} length(searchdir(store.folder, String(entry.storeKey)*".dat")) !=1 && (@warn "Could not find unique file for key '$(entry.storeKey)'."; return nothing) return readentry(store, entry) end -function addBigData!(store::FileDataStore, entry::E, data::Vector{UInt8})::Vector{UInt8} where {E <: AbstractBigDataEntry} - length(searchdir(store.folder, String(entry.storeKey)*".dat")) !=0 && @warn "Key '$(entry.storeKey)' already exists, overwriting!." +function addDataBlob!(store::FileDataStore, entry::AbstractBigDataEntry, data::Vector{UInt8})::Vector{UInt8} + length(searchdir(store.folder, String(entry.storeKey)*".dat")) !=0 && error("Key '$(entry.storeKey)' already exists.") writeentry(store, entry, data) # Update timestamp entry.lastUpdatedTimestamp = now() - return getBigData(store, entry) + return getDataBlob(store, entry) end -function updateBigData!(store::FileDataStore, entry::E, data::Vector{UInt8})::Union{Vector{UInt8}, Nothing} where {E <: AbstractBigDataEntry} - length(searchdir(store.folder, String(entry.storeKey)*".dat")) !=1 && (@warn "Could not find unique file for key '$(entry.storeKey)'."; return nothing) - writeentry(store, entry, data) - # Update timestamp - entry.lastUpdatedTimestamp = now() - return getBigData(store, entry) +function updateDataBlob!(store::FileDataStore, entry::AbstractBigDataEntry, data::Vector{UInt8})::Union{Vector{UInt8}, Nothing} + n_entries = length(searchdir(store.folder, String(entry.storeKey)*".dat")) + if n_entries > 1 + error("Could not find unique file for key '$(entry.storeKey)'.") + # return nothing + elseif n_entries == 0 + @warn "Entry '$(entry.storeKey)' does not exist, adding." + return addDataBlob!(store, entry, data) + else + writeentry(store, entry, data) + # Update timestamp + entry.lastUpdatedTimestamp = now() + return getDataBlob(store, entry) + end end -function deleteBigData!(store::FileDataStore, entry::E)::Vector{UInt8} where {E <: AbstractBigDataEntry} - data = getBigData(store, entry) +function deleteDataBlob!(store::FileDataStore, entry::AbstractBigDataEntry)::Vector{UInt8} + data = getDataBlob(store, entry) data == nothing && return nothing rm(filename(store, entry)) return data @@ -41,6 +58,6 @@ end # TODO: Manifest file # -# function listStoreEntries(store::FileDataStore)::Vector{E} where {E <: AbstractBigDataEntry} +# function listDataBlobs(store::FileDataStore)::Vector{E} where {E <: AbstractBigDataEntry} # return collect(values(store.entries)) # end diff --git a/src/BigData/services/InMemoryDataStore.jl b/src/BigData/services/InMemoryDataStore.jl index 5ab0131c..70f66e20 100644 --- a/src/BigData/services/InMemoryDataStore.jl +++ b/src/BigData/services/InMemoryDataStore.jl @@ -1,9 +1,12 @@ -function getBigData(store::InMemoryDataStore{T, E}, entry::E)::Union{T, Nothing} where {T, E <: AbstractBigDataEntry} +##============================================================================== +## InMemoryDataStore CRUD +##============================================================================== +function getDataBlob(store::InMemoryDataStore{T, E}, entry::E)::Union{T, Nothing} where {T, E <: AbstractBigDataEntry} !haskey(store.data, entry.storeKey) && return nothing return store.data[entry.storeKey] end -function addBigData!(store::InMemoryDataStore{T, E}, entry::E, data::T)::T where {T, E <: AbstractBigDataEntry} +function addDataBlob!(store::InMemoryDataStore{T, E}, entry::E, data::T)::T where {T, E <: AbstractBigDataEntry} haskey(store.entries, entry.storeKey) && @warn "Key '$(entry.storeKey)' already exists in the data store, overwriting!" store.entries[entry.storeKey] = entry # Update timestamp @@ -11,7 +14,7 @@ function addBigData!(store::InMemoryDataStore{T, E}, entry::E, data::T)::T where return store.data[entry.storeKey] = data end -function updateBigData!(store::InMemoryDataStore{T, E}, entry::E, data::T)::Union{T, Nothing} where {T, E <: AbstractBigDataEntry} +function updateDataBlob!(store::InMemoryDataStore{T, E}, entry::E, data::T)::Union{T, Nothing} where {T, E <: AbstractBigDataEntry} !haskey(store.entries, entry.storeKey) && (@error "Key '$(entry.storeKey)' doesn't exist in the data store!"; return nothing) store.entries[entry.storeKey] = entry # Update timestamp @@ -19,14 +22,14 @@ function updateBigData!(store::InMemoryDataStore{T, E}, entry::E, data::T)::Unio return store.data[entry.storeKey] = data end -function deleteBigData!(store::InMemoryDataStore{T, E}, entry::E)::T where {T, E <: AbstractBigDataEntry} - data = getBigData(store, entry) +function deleteDataBlob!(store::InMemoryDataStore{T, E}, entry::E)::T where {T, E <: AbstractBigDataEntry} + data = getDataBlob(store, entry) data == nothing && return nothing delete!(store.data, entry.storeKey) delete!(store.entries, entry.storeKey) return data end -function listStoreEntries(store::InMemoryDataStore{T, E})::Vector{E} where {T, E <: AbstractBigDataEntry} +function listDataBlobs(store::InMemoryDataStore{T, E})::Vector{E} where {T, E <: AbstractBigDataEntry} return collect(values(store.entries)) end diff --git a/src/Deprecated.jl b/src/Deprecated.jl index c9b1ca06..93cd3bc9 100644 --- a/src/Deprecated.jl +++ b/src/Deprecated.jl @@ -44,3 +44,14 @@ function buildSubgraphFromLabels!(dfg::AbstractDFG, """) end + + +##============================================================================== +## Remove in 0.10 +##============================================================================== + +@deprecate getBigData(args...) getDataBlob(args...) +@deprecate addBigData!(args...) addDataBlob!(args...) +@deprecate deleteBigData!(args...) deleteDataBlob!(args...) +@deprecate listStoreEntries(args...) listDataBlobs(args...) +@deprecate updateBigData!(args...) updateDataBlob!(args...) diff --git a/src/DistributedFactorGraphs.jl b/src/DistributedFactorGraphs.jl index 1aefb79b..bc1b1251 100644 --- a/src/DistributedFactorGraphs.jl +++ b/src/DistributedFactorGraphs.jl @@ -169,11 +169,11 @@ export findFactorsBetweenNaive export copyGraph!, deepcopyGraph, deepcopyGraph!, buildSubgraph, mergeGraph! # Big Data ##------------------------------------------------------------------------------ -export addBigDataEntry!, getBigDataEntry, updateBigDataEntry!, deleteBigDataEntry!, getBigDataEntries, getBigDataKeys, hasDataEntry, hasBigDataEntry +export addDataEntry!, getDataEntry, updateDataEntry!, deleteDataEntry!, getDataEntries, listDataEntries, hasDataEntry, hasDataEntry # convenience wrappers -export addDataEntry!, getDataEntryElement +export addDataEntry!, getDataEntryBlob # aliases -export addData!, fetchData, fetchDataEntryElement +export addData!, fetchData, fetchDataEntryBlob ##------------------------------------------------------------------------------ diff --git a/test/DataStoreTests.jl b/test/DataStoreTests.jl index 15626f3e..b2c9ba4a 100644 --- a/test/DataStoreTests.jl +++ b/test/DataStoreTests.jl @@ -1,52 +1,81 @@ -using DistributedFactorGraphs +if false using Test +using GraphPlot +using Neo4j +using DistributedFactorGraphs +using Pkg +using Dates +using UUIDs + +include("testBlocks.jl") +testDFGAPI = CloudGraphsDFG +testDFGAPI = LightDFG +end # Build a basic graph. -dfg = LightDFG{NoSolverParams}() +dfg = testDFGAPI{NoSolverParams}() #add types for softtypes -struct TestInferenceVariable1 <: InferenceVariable end -v1 = DFGVariable(:a, TestInferenceVariable1()) -v2 = DFGVariable(:b, TestInferenceVariable1()) -f1 = DFGFactor{TestFunctorInferenceType1}(:f1, [:a,:b]) -#add tags for filters -union!(v1.tags, [:VARIABLE, :POSE]) -union!(v2.tags, [:VARIABLE, :LANDMARK]) -union!(f1.tags, [:FACTOR]) -# @testset "Creating Graphs" begin -addVariable!(dfg, v1) -addVariable!(dfg, v2) -addFactor!(dfg, f1) +# struct TestInferenceVariable1 <: InferenceVariable end +# v1 = DFGVariable(:a, TestInferenceVariable1()) +# v2 = DFGVariable(:b, TestInferenceVariable1()) +# f1 = DFGFactor{TestFunctorInferenceType1}(:f1, [:a,:b]); +# #add tags for filters +# union!(v1.tags, [:VARIABLE, :POSE]) +# union!(v2.tags, [:VARIABLE, :LANDMARK]) +# union!(f1.tags, [:FACTOR]) +# # @testset "Creating Graphs" begin + +var1, var2, var3, vorphan, v1_tags = DFGVariableSCA() +fac0, fac1, fac2 = DFGFactorSCA() + +addVariable!(dfg, var1) +addVariable!(dfg, var2) +addFactor!(dfg, fac1) # Stores to test testStores = [InMemoryDataStore(), FileDataStore("/tmp/dfgFilestore")] +if false +testStore = testStores[1] +testStore = testStores[2] +end + for testStore in testStores # Create a data store and a dataset - ds = testStore - # ds = FileDataStore("/tmp/filestore") - dataset = rand(UInt8, 1000) - dataset2 = rand(UInt8, 1000) - - entry1 = GeneralBigDataEntry(dfg, v1, :test1) - # Set it in the store - @test addBigData!(ds, entry1, dataset) == dataset - @test getBigData(ds, entry1) == dataset - # Now add it to the variable - @test addBigDataEntry!(v1, entry1) == entry1 - @test entry1 in getBigDataEntries(v1) - # Update test - copyEntry = deepcopy(entry1) - sleep(0.1) - @test updateBigData!(ds, entry1, dataset2) == dataset2 - # Data updated? - @test getBigData(ds, entry1) == dataset2 - # Timestamp updated? - @test entry1.lastUpdatedTimestamp > copyEntry.lastUpdatedTimestamp - # Delete data - @test deleteBigData!(ds, entry1) == dataset2 - # Delete entry - @test deleteBigDataEntry!(v1, entry1) == v1 - #TODO +ds = testStore +# ds = FileDataStore("/tmp/filestore") +dataset = rand(UInt8, 1000) +dataset2 = rand(UInt8, 1000) + +entry1 = GeneralDataEntry(dfg, var1, :test1) +# Set it in the store +@test addDataBlob!(ds, entry1, dataset) == dataset +@test getDataBlob(ds, entry1) == dataset +# Now add it to the variable +@test addDataEntry!(var1, entry1) == entry1 +@test entry1 in getDataEntries(var1) +# Update test +copyEntry = deepcopy(entry1) +sleep(0.1) +@test updateDataBlob!(ds, entry1, dataset2) == dataset2 +# Data updated? +@test getDataBlob(ds, entry1) == dataset2 +# Timestamp updated? +@test entry1.lastUpdatedTimestamp > copyEntry.lastUpdatedTimestamp +# Delete data +@test deleteDataBlob!(ds, entry1) == dataset2 +# Delete entry +@test deleteDataEntry!(var1, entry1) == var1 + +# @test addData!() +# gde_td1 = DFG.addData!(dfg, :a, ds, :td1, "text/plain", rand(UInt8,10)) +# DFG.getDataEntryBlob(dfg, :a, ds, :td1) +# +# +# +# @test getDataBlob(ds, entry1) == dataset2 + +#TODO #listStoreEntries(ds) end diff --git a/test/consol_BigDataTests.jl b/test/consol_BigDataTests.jl new file mode 100644 index 00000000..506cf513 --- /dev/null +++ b/test/consol_BigDataTests.jl @@ -0,0 +1,129 @@ +if false +using Test +using GraphPlot +using Neo4j +using DistributedFactorGraphs +using Pkg +using Dates +using UUIDs + +include("testBlocks.jl") + +testDFGAPI = CloudGraphsDFG +testDFGAPI = LightDFG +end +# Build a basic graph. + + + +# Stores to test +testStores = [InMemoryDataStore(), FileDataStore("/tmp/dfgFilestore")] + +if false +testStore = testStores[1] +testStore = testStores[2] +end + +for testStore in testStores + +dfg = testDFGAPI{NoSolverParams}() +var1, var2, var3, vorphan, v1_tags = DFGVariableSCA() +fac0, fac1, fac2 = DFGFactorSCA() + +addVariable!(dfg, var1) +addVariable!(dfg, var2) +addFactor!(dfg, fac1) + # Create a data store and a dataset +ds = testStore +# ds = FileDataStore("/tmp/filestore") +dataset = rand(UInt8, 1000) +dataset2 = rand(UInt8, 1000) + +entry1 = GeneralDataEntry(dfg, var1, :test1) +# Set it in the store +@test addDataBlob!(ds, entry1, dataset) == dataset +@test getDataBlob(ds, entry1) == dataset +# Now add it to the variable +@test addDataEntry!(var1, entry1) == entry1 +@test entry1 in getDataEntries(var1) + +@test getDataEntryBlob(dfg, :a, ds, :test1) == (entry1, dataset) + +# Update test +copyEntry = deepcopy(entry1) +sleep(0.1) +@test updateDataBlob!(ds, entry1, dataset2) == dataset2 +# Data updated? +@test getDataBlob(ds, entry1) == dataset2 +# Timestamp updated? +@test entry1.lastUpdatedTimestamp > copyEntry.lastUpdatedTimestamp +# Delete data +@test deleteDataBlob!(ds, entry1) == dataset2 +# Delete entry +@test deleteDataEntry!(var1, entry1) == var1 + +# @test addData!() +gde_td1 = addData!(dfg, :a, ds, :td1, "text/plain", rand(UInt8,10)) + +td1_entry = getDataEntry(dfg, :a, :td1) +td1_blob = getDataBlob(ds, td1_entry) +@test getDataEntryBlob(dfg, :a, ds, :td1) == (td1_entry, td1_blob) + +if testStore == testStores[1] + liststore = listDataBlobs(ds) + @test length(liststore) == 1 + @test liststore[1] == td1_entry +end +end + + +fg = testDFGAPI{NoSolverParams}() +var1, var2, var3, vorphan, v1_tags = DFGVariableSCA() +fac0, fac1, fac2 = DFGFactorSCA() + +addVariable!(fg, var1) +addVariable!(fg, var2) +addFactor!(fg, fac1) + +oid = zeros(UInt8,12); oid[12] = 0x01 +de1 = MongodbDataEntry(:key1, NTuple{12,UInt8}(oid)) + +oid = zeros(UInt8,12); oid[12] = 0x02 +de2 = MongodbDataEntry(:key2, NTuple{12,UInt8}(oid)) + +oid = zeros(UInt8,12); oid[12] = 0x03 +de2_update = MongodbDataEntry(:key2, NTuple{12,UInt8}(oid)) + +#add +v1 = getVariable(fg, :a) +@test addDataEntry!(v1, de1) == de1 +@test addDataEntry!(fg, :a, de2) == de2 +@test_throws ErrorException addDataEntry!(v1, de1) +@test de2 in getDataEntries(v1) + +#get +@test deepcopy(de1) == getDataEntry(v1, :key1) +@test deepcopy(de2) == getDataEntry(fg, :a, :key2) +@test_throws ErrorException getDataEntry(v2, :key1) +@test_throws ErrorException getDataEntry(fg, :b, :key1) + +#update +@test updateDataEntry!(fg, :a, de2_update) == de2_update +@test deepcopy(de2_update) == getBigDataEntry(fg, :a, :key2) +@test @test_logs (:warn, r"does not exist") updateDataEntry!(fg, :b, de2_update) == de2_update + +#list +entries = getDataEntries(fg, :a) +@test length(entries) == 2 +@test issetequal(map(e->e.key, entries), [:key1, :key2]) +@test length(getDataEntries(fg, :b)) == 1 + +@test issetequal(listDataEntries(fg, :a), [:key1, :key2]) +@test listDataEntries(fg, :b) == Symbol[:key2] + +#delete +@test deleteDataEntry!(v1, :key1) == v1 +@test listDataEntries(v1) == Symbol[:key2] +#delete from dfg +@test deleteDataEntry!(fg, :a, :key2) == v1 +@test listDataEntries(v1) == Symbol[] diff --git a/test/iifInterfaceTests.jl b/test/iifInterfaceTests.jl index 5e8a2961..608b85be 100644 --- a/test/iifInterfaceTests.jl +++ b/test/iifInterfaceTests.jl @@ -261,15 +261,15 @@ end @test issetequal(map(e->e.key, entries), [:key1, :key2]) @test length(getBigDataEntries(dfg, :b)) == 1 - @test issetequal(getBigDataKeys(dfg, :a), [:key1, :key2]) - @test getBigDataKeys(dfg, :b) == Symbol[:key2] + @test issetequal(listDataEntries(dfg, :a), [:key1, :key2]) + @test listDataEntries(dfg, :b) == Symbol[:key2] #delete @test deleteBigDataEntry!(v1, :key1) == v1 - @test getBigDataKeys(v1) == Symbol[:key2] + @test listDataEntries(v1) == Symbol[:key2] #delete from ddfg @test deleteBigDataEntry!(dfg, :a, :key2) == v1 - @test getBigDataKeys(v1) == Symbol[] + @test listDataEntries(v1) == Symbol[] end end diff --git a/test/runtests.jl b/test/runtests.jl index b5913272..f2427fb1 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -58,6 +58,7 @@ end @testset "Data Store Tests" begin include("DataStoreTests.jl") + include("consol_BigDataTests.jl") end @testset "LightDFG subtype tests" begin diff --git a/test/testBlocks.jl b/test/testBlocks.jl index d8af8800..c68b3429 100644 --- a/test/testBlocks.jl +++ b/test/testBlocks.jl @@ -773,47 +773,47 @@ function BigDataEntriesTestBlock!(fg, v2) # mergeBigDataEntries oid = zeros(UInt8,12); oid[12] = 0x01 - de1 = MongodbBigDataEntry(:key1, NTuple{12,UInt8}(oid)) + de1 = MongodbDataEntry(:key1, NTuple{12,UInt8}(oid)) oid = zeros(UInt8,12); oid[12] = 0x02 - de2 = MongodbBigDataEntry(:key2, NTuple{12,UInt8}(oid)) + de2 = MongodbDataEntry(:key2, NTuple{12,UInt8}(oid)) oid = zeros(UInt8,12); oid[12] = 0x03 - de2_update = MongodbBigDataEntry(:key2, NTuple{12,UInt8}(oid)) + de2_update = MongodbDataEntry(:key2, NTuple{12,UInt8}(oid)) #add v1 = getVariable(fg, :a) - @test addBigDataEntry!(v1, de1) == de1 - @test addBigDataEntry!(fg, :a, de2) == de2 - @test_throws ErrorException addBigDataEntry!(v1, de1) - @test de2 in getBigDataEntries(v1) + @test addDataEntry!(v1, de1) == de1 + @test addDataEntry!(fg, :a, de2) == de2 + @test_throws ErrorException addDataEntry!(v1, de1) + @test de2 in getDataEntries(v1) #get - @test deepcopy(de1) == getBigDataEntry(v1, :key1) - @test deepcopy(de2) == getBigDataEntry(fg, :a, :key2) - @test_throws ErrorException getBigDataEntry(v2, :key1) - @test_throws ErrorException getBigDataEntry(fg, :b, :key1) + @test deepcopy(de1) == getDataEntry(v1, :key1) + @test deepcopy(de2) == getDataEntry(fg, :a, :key2) + @test_throws ErrorException getDataEntry(v2, :key1) + @test_throws ErrorException getDataEntry(fg, :b, :key1) #update - @test updateBigDataEntry!(fg, :a, de2_update) == de2_update + @test updateDataEntry!(fg, :a, de2_update) == de2_update @test deepcopy(de2_update) == getBigDataEntry(fg, :a, :key2) - @test @test_logs (:warn, r"does not exist") updateBigDataEntry!(fg, :b, de2_update) == de2_update + @test @test_logs (:warn, r"does not exist") updateDataEntry!(fg, :b, de2_update) == de2_update #list - entries = getBigDataEntries(fg, :a) + entries = getDataEntries(fg, :a) @test length(entries) == 2 @test issetequal(map(e->e.key, entries), [:key1, :key2]) - @test length(getBigDataEntries(fg, :b)) == 1 + @test length(getDataEntries(fg, :b)) == 1 - @test issetequal(getBigDataKeys(fg, :a), [:key1, :key2]) - @test getBigDataKeys(fg, :b) == Symbol[:key2] + @test issetequal(listDataEntries(fg, :a), [:key1, :key2]) + @test listDataEntries(fg, :b) == Symbol[:key2] #delete - @test deleteBigDataEntry!(v1, :key1) == v1 - @test getBigDataKeys(v1) == Symbol[:key2] + @test deleteDataEntry!(v1, :key1) == v1 + @test listDataEntries(v1) == Symbol[:key2] #delete from dfg - @test deleteBigDataEntry!(fg, :a, :key2) == v1 - @test getBigDataKeys(v1) == Symbol[] + @test deleteDataEntry!(fg, :a, :key2) == v1 + @test listDataEntries(v1) == Symbol[] end