From a8d67914362b3114d15781a3b3f60d4c48a83008 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 10 Aug 2023 17:11:55 -0700 Subject: [PATCH 001/107] initial version of http server and client --- go.mod | 18 +- go.sum | 64 +++++++ http/client_collection.go | 136 +++++++++++++++ http/client_lens.go | 81 +++++++++ http/client_store.go | 347 ++++++++++++++++++++++++++++++++++++++ http/client_utils.go | 52 ++++++ http/server.go | 239 ++++++++++++++++++++++++++ 7 files changed, 935 insertions(+), 2 deletions(-) create mode 100644 http/client_collection.go create mode 100644 http/client_lens.go create mode 100644 http/client_store.go create mode 100644 http/client_utils.go create mode 100644 http/server.go diff --git a/go.mod b/go.mod index 402771dfc2..aaa20dfde5 100644 --- a/go.mod +++ b/go.mod @@ -15,7 +15,6 @@ require ( github.com/graphql-go/graphql v0.8.1 github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 github.com/iancoleman/strcase v0.3.0 - github.com/ipfs/boxo v0.11.0 github.com/ipfs/go-block-format v0.1.2 github.com/ipfs/go-cid v0.4.1 github.com/ipfs/go-datastore v0.6.0 @@ -26,7 +25,7 @@ require ( github.com/lens-vm/lens/host-go v0.0.0-20230729032926-5acb4df9bd25 github.com/libp2p/go-libp2p v0.28.0 github.com/libp2p/go-libp2p-gostream v0.6.0 - github.com/libp2p/go-libp2p-kad-dht v0.23.0 + github.com/libp2p/go-libp2p-kad-dht v0.21.1 github.com/libp2p/go-libp2p-pubsub v0.9.3 github.com/libp2p/go-libp2p-record v0.2.0 github.com/mitchellh/mapstructure v1.5.0 @@ -56,7 +55,9 @@ require ( require ( github.com/benbjohnson/clock v1.3.5 // indirect github.com/beorn7/perks v1.0.1 // indirect + github.com/bytedance/sonic v1.9.1 // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 // indirect github.com/containerd/cgroups v1.1.0 // indirect github.com/coreos/go-systemd/v22 v22.5.0 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect @@ -71,9 +72,16 @@ require ( github.com/flynn/noise v1.0.0 // indirect github.com/francoispqt/gojay v1.2.13 // indirect github.com/fsnotify/fsnotify v1.6.0 // indirect + github.com/gabriel-vasile/mimetype v1.4.2 // indirect + github.com/gin-contrib/sse v0.1.0 // indirect + github.com/gin-gonic/gin v1.9.1 // indirect github.com/go-logr/logr v1.2.4 // indirect github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/go-playground/validator/v10 v10.14.0 // indirect github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 // indirect + github.com/goccy/go-json v0.10.2 // indirect github.com/godbus/dbus/v5 v5.1.0 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/glog v1.1.0 // indirect @@ -118,9 +126,11 @@ require ( github.com/ipld/go-ipld-prime v0.20.0 // indirect github.com/jackpal/go-nat-pmp v1.0.2 // indirect github.com/jbenet/go-temp-err-catcher v0.1.0 // indirect + github.com/json-iterator/go v1.1.12 // indirect github.com/klauspost/compress v1.16.5 // indirect github.com/klauspost/cpuid/v2 v2.2.5 // indirect github.com/koron/go-ssdp v0.0.4 // indirect + github.com/leodido/go-urn v1.2.4 // indirect github.com/libp2p/go-buffer-pool v0.1.0 // indirect github.com/libp2p/go-cidranger v1.1.0 // indirect github.com/libp2p/go-flow-metrics v0.1.0 // indirect @@ -142,6 +152,8 @@ require ( github.com/mikioh/tcpinfo v0.0.0-20190314235526-30a79bb1804b // indirect github.com/mikioh/tcpopt v0.0.0-20190314235656-172688c1accc // indirect github.com/minio/sha256-simd v1.0.1 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect github.com/mr-tron/base58 v1.2.0 // indirect github.com/multiformats/go-base32 v0.1.0 // indirect github.com/multiformats/go-base36 v0.2.0 // indirect @@ -176,6 +188,7 @@ require ( github.com/subosito/gotenv v1.4.2 // indirect github.com/tetratelabs/wazero v1.3.1 // indirect github.com/textileio/go-log/v2 v2.1.3-gke-2 // indirect + github.com/twitchyliquid64/golang-asm v0.15.1 // indirect github.com/whyrusleeping/cbor-gen v0.0.0-20230126041949-52956bd4c9aa // indirect github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1 // indirect github.com/x448/float16 v0.8.4 // indirect @@ -187,6 +200,7 @@ require ( go.uber.org/dig v1.17.0 // indirect go.uber.org/fx v1.19.2 // indirect go.uber.org/multierr v1.11.0 // indirect + golang.org/x/arch v0.3.0 // indirect golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df // indirect golang.org/x/mod v0.11.0 // indirect golang.org/x/sync v0.2.0 // indirect diff --git a/go.sum b/go.sum index cd8a3078a7..82d91a05c2 100644 --- a/go.sum +++ b/go.sum @@ -46,6 +46,7 @@ git.apache.org/thrift.git v0.0.0-20180902110319-2566ecd5d999/go.mod h1:fPE2ZNJGy github.com/AndreasBriese/bbloom v0.0.0-20180913140656-343706a395b7/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8= github.com/AndreasBriese/bbloom v0.0.0-20190306092124-e2d15f34fcf9/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8= github.com/AndreasBriese/bbloom v0.0.0-20190825152654-46b345b51c96 h1:cTp8I5+VIoKjsnZuH8vjyaysT/ses3EvZeaV/1UkF2M= +github.com/AndreasBriese/bbloom v0.0.0-20190825152654-46b345b51c96/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/DataDog/zstd v1.4.1 h1:3oxKN3wbHibqx897utPC2LTQU4J+IHWWJO+glkAkpFM= @@ -93,6 +94,8 @@ github.com/btcsuite/btcd v0.0.0-20190605094302-a0d1e3e36d50/go.mod h1:3J08xEfcug github.com/btcsuite/btcd v0.0.0-20190824003749-130ea5bddde3/go.mod h1:3J08xEfcugPacsc34/LKRU2yO7YmuT8yt28J8k2+rrI= github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ= github.com/btcsuite/btcd v0.21.0-beta/go.mod h1:ZSWyehm27aAuS9bvkATT+Xte3hjHZ+MRgMY/8NJ7K94= +github.com/btcsuite/btcd v0.22.1/go.mod h1:wqgTSL29+50LRkmOVknEdmt8ZojIzhuWvgu/iptuN7Y= +github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1/go.mod h1:7SFka0XMvUgj3hfZtydOrQY2mwhPclbT2snogU7SQQc= github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f/go.mod h1:TdznJufoqS23FtqVCzL0ZqgP5MqXbb4fg/WgDys70nA= github.com/btcsuite/btcutil v0.0.0-20190207003914-4c204d697803/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg= github.com/btcsuite/btcutil v0.0.0-20190425235716-9e5f4b9a998d/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg= @@ -107,6 +110,9 @@ github.com/btcsuite/winsvc v1.0.0/go.mod h1:jsenWakMcC0zFBFurPLEAyrnc/teJEM1O46f github.com/buger/jsonparser v0.0.0-20181115193947-bf1c66bbce23/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s= github.com/bxcodec/faker v2.0.1+incompatible h1:P0KUpUw5w6WJXwrPfv35oc91i4d8nf40Nwln+M/+faA= github.com/bxcodec/faker v2.0.1+incompatible/go.mod h1:BNzfpVdTwnFJ6GtfYTcQu6l6rHShT+veBxNCnjCx5XM= +github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM= +github.com/bytedance/sonic v1.9.1 h1:6iJ6NqdoxCDr6mbY8h18oSO+cShGSMRGCEo7F2h0x8s= +github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U= github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ= github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= @@ -117,6 +123,9 @@ github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XL github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cheekybits/genny v1.0.0/go.mod h1:+tQajlRqAUrPI7DOSpB0XAqZYtQakVtB7wXkRAgjxjQ= +github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= +github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams= +github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= @@ -168,6 +177,7 @@ github.com/dgraph-io/badger v1.6.0-rc1/go.mod h1:zwt7syl517jmP8s94KqSxTlM6IMsdhY github.com/dgraph-io/badger v1.6.0/go.mod h1:zwt7syl517jmP8s94KqSxTlM6IMsdhYy6psNgSztDR4= github.com/dgraph-io/badger v1.6.1/go.mod h1:FRmFw3uxvcpa8zG3Rxs0th+hCLIuaQg8HlNV5bjgnuU= github.com/dgraph-io/badger v1.6.2 h1:mNw0qs90GVgGGWylh0umH5iag1j6n/PeJtNvL6KY/x8= +github.com/dgraph-io/badger v1.6.2/go.mod h1:JW2yswe3V058sS0kZ2h/AXeDSqFjxnZcRrVH//y2UQE= github.com/dgraph-io/badger/v3 v3.2011.1 h1:Hmyof0WMEF/QtutX5SQHzIMnJQxb/IrSzhjckV2SD6g= github.com/dgraph-io/ristretto v0.0.2/go.mod h1:KPxhHT9ZxKefz+PCeOGsrHpl1qZ7i70dGTu2u+Ahh6E= github.com/dgraph-io/ristretto v0.1.1 h1:6CWw5tJNgpegArSHpNHJKldNeq03FQCwYvfMVWajOK8= @@ -215,7 +225,13 @@ github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4 github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= github.com/fxamacker/cbor/v2 v2.4.0 h1:ri0ArlOR+5XunOP8CRUowT0pSJOwhW098ZCUyskZD88= github.com/fxamacker/cbor/v2 v2.4.0/go.mod h1:TA1xS00nchWmaBnEIxPSE5oHLuJBAVvqrtAnWBwBCVo= +github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= +github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= +github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= +github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= +github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= github.com/gliderlabs/ssh v0.1.1/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= github.com/go-check/check v0.0.0-20180628173108-788fd7840127/go.mod h1:9ES+weclKsC9YodN5RgxqK/VD9HM9JsCSh7rNhMZE98= github.com/go-chi/chi/v5 v5.0.10 h1:rLz5avzKpjqxrYwXNfmjkrYYXOyLJd37pz53UFHC6vk= @@ -235,16 +251,25 @@ github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vb github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.2.4 h1:g01GSCwiDw2xSZfjJ2/T9M+S6pFdcNtFYsp+Y43HYDQ= github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.14.0 h1:vgvQWe3XCz3gIeFDm/HnTIbj6UGmg/+t63MyGU2n5js= +github.com/go-playground/validator/v10 v10.14.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI= github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls= github.com/go-yaml/yaml v2.1.0+incompatible/go.mod h1:w2MrLa16VYP0jy6N7M5kHaCkaLENm+P+Tv+MfurjSw0= +github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= +github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/godbus/dbus/v5 v5.0.3/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk= @@ -468,10 +493,12 @@ github.com/ipfs/go-ds-badger v0.0.5/go.mod h1:g5AuuCGmr7efyzQhLL8MzwqcauPojGPUaH github.com/ipfs/go-ds-badger v0.2.1/go.mod h1:Tx7l3aTph3FMFrRS838dcSJh+jjA7cX9DrGVwx/NOwE= github.com/ipfs/go-ds-badger v0.2.3/go.mod h1:pEYw0rgg3FIrywKKnL+Snr+w/LjJZVMTBRn4FS6UHUk= github.com/ipfs/go-ds-badger v0.3.0 h1:xREL3V0EH9S219kFFueOYJJTcjgNSZ2HY1iSvN7U1Ro= +github.com/ipfs/go-ds-badger v0.3.0/go.mod h1:1ke6mXNqeV8K3y5Ak2bAA0osoTfmxUdupVCGm4QUIek= github.com/ipfs/go-ds-leveldb v0.0.1/go.mod h1:feO8V3kubwsEF22n0YRQCffeb79OOYIykR4L04tMOYc= github.com/ipfs/go-ds-leveldb v0.4.1/go.mod h1:jpbku/YqBSsBc1qgME8BkWS4AxzF2cEu1Ii2r79Hh9s= github.com/ipfs/go-ds-leveldb v0.4.2/go.mod h1:jpbku/YqBSsBc1qgME8BkWS4AxzF2cEu1Ii2r79Hh9s= github.com/ipfs/go-ds-leveldb v0.5.0 h1:s++MEBbD3ZKc9/8/njrn4flZLnCuY9I79v94gBUNumo= +github.com/ipfs/go-ds-leveldb v0.5.0/go.mod h1:d3XG9RUDzQ6V4SHi8+Xgj9j1XuEk1z82lquxrVbml/Q= github.com/ipfs/go-fetcher v1.6.1 h1:UFuRVYX5AIllTiRhi5uK/iZkfhSpBCGX7L70nSZEmK8= github.com/ipfs/go-fetcher v1.6.1/go.mod h1:27d/xMV8bodjVs9pugh/RCjjK2OZ68UgAMspMdingNo= github.com/ipfs/go-ipfs-blockstore v0.0.1/go.mod h1:d3WClOmRQKFnJ0Jz/jj/zmksX0ma1gROTlovZKBmN08= @@ -597,6 +624,8 @@ github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCV github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= @@ -613,6 +642,7 @@ github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6 github.com/klauspost/compress v1.16.5 h1:IFV2oUNUzZaz+XyusxpLzpzS8Pt5rh0Z16For/djlyI= github.com/klauspost/compress v1.16.5/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= @@ -631,10 +661,14 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.3/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/lens-vm/lens/host-go v0.0.0-20230729032926-5acb4df9bd25 h1:hC67vWtvuDnw8w6u4jLFoj3SOH92/4Lq8SCR++L7njw= github.com/lens-vm/lens/host-go v0.0.0-20230729032926-5acb4df9bd25/go.mod h1:rDE4oJUIAQoXX9heUg8VOQf5LscRWj0BeE5mbGqOs3E= +github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= +github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= github.com/libp2p/go-addr-util v0.0.1/go.mod h1:4ac6O7n9rIAKB1dnd+s8IbbMXkt+oBpzX4/+RACcnlQ= github.com/libp2p/go-addr-util v0.0.2/go.mod h1:Ecd6Fb3yIuLzq4bD7VcywcVSBtefcAwnUISBM3WG15E= +github.com/libp2p/go-addr-util v0.1.0/go.mod h1:6I3ZYuFr2O/9D+SoyM0zEw0EF3YkldtTX406BpdQMqw= github.com/libp2p/go-buffer-pool v0.0.1/go.mod h1:xtyIz9PMobb13WaxR6Zo1Pd1zXJKYg0a8KiIvDp3TzQ= github.com/libp2p/go-buffer-pool v0.0.2/go.mod h1:MvaB6xw5vOrDl8rYZGLFdKAuk/hRoRZd1Vi32+RXyFM= github.com/libp2p/go-buffer-pool v0.1.0 h1:oK4mSFcQz7cTQIfqbe4MIj9gLW+mnanjyFtc6cdF0Y8= @@ -646,6 +680,7 @@ github.com/libp2p/go-conn-security-multistream v0.0.2/go.mod h1:nc9vud7inQ+d6SO0 github.com/libp2p/go-conn-security-multistream v0.1.0/go.mod h1:aw6eD7LOsHEX7+2hJkDxw1MteijaVcI+/eP2/x3J1xc= github.com/libp2p/go-conn-security-multistream v0.2.0/go.mod h1:hZN4MjlNetKD3Rq5Jb/P5ohUnFLNzEAR4DLSzpn2QLU= github.com/libp2p/go-conn-security-multistream v0.2.1/go.mod h1:cR1d8gA0Hr59Fj6NhaTpFhJZrjSYuNmhpT2r25zYR70= +github.com/libp2p/go-conn-security-multistream v0.3.0/go.mod h1:EEP47t4fw/bTelVmEzIDqSe69hO/ip52xBEhZMLWAHM= github.com/libp2p/go-eventbus v0.1.0/go.mod h1:vROgu5cs5T7cv7POWlWxBaVLxfSegC5UGQf8A2eEmx4= github.com/libp2p/go-eventbus v0.2.1/go.mod h1:jc2S4SoEVPP48H9Wpzm5aiGwUCBMfGhVhhBjyhhCJs8= github.com/libp2p/go-flow-metrics v0.0.1/go.mod h1:Iv1GH0sG8DtYN3SVJ2eG221wMiNpZxBdp967ls1g+k8= @@ -671,10 +706,12 @@ github.com/libp2p/go-libp2p-autonat v0.2.0/go.mod h1:DX+9teU4pEEoZUqR1PiMlqliONQ github.com/libp2p/go-libp2p-autonat v0.2.1/go.mod h1:MWtAhV5Ko1l6QBsHQNSuM6b1sRkXrpk0/LqCr+vCVxI= github.com/libp2p/go-libp2p-autonat v0.2.2/go.mod h1:HsM62HkqZmHR2k1xgX34WuWDzk/nBwNHoeyyT4IWV6A= github.com/libp2p/go-libp2p-autonat v0.4.2/go.mod h1:YxaJlpr81FhdOv3W3BTconZPfhaYivRdf53g+S2wobk= +github.com/libp2p/go-libp2p-autonat v0.7.0/go.mod h1:uPvPn6J7cN+LCfFwW5tpOYvAz5NvPTc4iBamTV/WDMg= github.com/libp2p/go-libp2p-blankhost v0.0.1/go.mod h1:Ibpbw/7cPPYwFb7PACIWdvxxv0t0XCCI10t7czjAjTc= github.com/libp2p/go-libp2p-blankhost v0.1.1/go.mod h1:pf2fvdLJPsC1FsVrNP3DUUvMzUts2dsLLBEpo1vW1ro= github.com/libp2p/go-libp2p-blankhost v0.1.4/go.mod h1:oJF0saYsAXQCSfDq254GMNmLNz6ZTHTOvtF4ZydUvwU= github.com/libp2p/go-libp2p-blankhost v0.2.0/go.mod h1:eduNKXGTioTuQAUcZ5epXi9vMl+t4d8ugUBRQ4SqaNQ= +github.com/libp2p/go-libp2p-blankhost v0.3.0/go.mod h1:urPC+7U01nCGgJ3ZsV8jdwTp6Ji9ID0dMTvq+aJ+nZU= github.com/libp2p/go-libp2p-circuit v0.0.9/go.mod h1:uU+IBvEQzCu953/ps7bYzC/D/R0Ho2A9LfKVVCatlqU= github.com/libp2p/go-libp2p-circuit v0.1.0/go.mod h1:Ahq4cY3V9VJcHcn1SBXjr78AbFkZeIRmfunbA7pmFh8= github.com/libp2p/go-libp2p-circuit v0.1.4/go.mod h1:CY67BrEjKNDhdTk8UgBX1Y/H5c3xkAcs3gnksxY7osU= @@ -715,6 +752,7 @@ github.com/libp2p/go-libp2p-discovery v0.1.0/go.mod h1:4F/x+aldVHjHDHuX85x1zWoFT github.com/libp2p/go-libp2p-discovery v0.2.0/go.mod h1:s4VGaxYMbw4+4+tsoQTqh7wfxg97AEdo4GYBt6BadWg= github.com/libp2p/go-libp2p-discovery v0.3.0/go.mod h1:o03drFnz9BVAZdzC/QUQ+NeQOu38Fu7LJGEOK2gQltw= github.com/libp2p/go-libp2p-discovery v0.5.0/go.mod h1:+srtPIU9gDaBNu//UHvcdliKBIcr4SfDcm0/PfPJLug= +github.com/libp2p/go-libp2p-discovery v0.6.0/go.mod h1:/u1voHt0tKIe5oIA1RHBKQLVCWPna2dXmPNHc2zR9S8= github.com/libp2p/go-libp2p-gostream v0.6.0 h1:QfAiWeQRce6pqnYfmIVWJFXNdDyfiR/qkCnjyaZUPYU= github.com/libp2p/go-libp2p-gostream v0.6.0/go.mod h1:Nywu0gYZwfj7Jc91PQvbGU8dIpqbQQkjWgDuOrFaRdA= github.com/libp2p/go-libp2p-host v0.0.1/go.mod h1:qWd+H1yuU0m5CwzAkvbSjqKairayEHdR5MMl7Cwa7Go= @@ -723,6 +761,7 @@ github.com/libp2p/go-libp2p-interface-connmgr v0.0.1/go.mod h1:GarlRLH0LdeWcLnYM github.com/libp2p/go-libp2p-interface-connmgr v0.0.4/go.mod h1:GarlRLH0LdeWcLnYM/SaBykKFl9U5JFnbBGruAk/D5k= github.com/libp2p/go-libp2p-interface-connmgr v0.0.5/go.mod h1:GarlRLH0LdeWcLnYM/SaBykKFl9U5JFnbBGruAk/D5k= github.com/libp2p/go-libp2p-interface-pnet v0.0.1/go.mod h1:el9jHpQAXK5dnTpKA4yfCNBZXvrzdOU75zz+C6ryp3k= +github.com/libp2p/go-libp2p-kad-dht v0.21.1/go.mod h1:Oy8wvbdjpB70eS5AaFaI68tOtrdo3KylTvXDjikxqFo= github.com/libp2p/go-libp2p-kad-dht v0.23.0 h1:sxE6LxLopp79eLeV695n7+c77V/Vn4AMF28AdM/XFqM= github.com/libp2p/go-libp2p-kad-dht v0.23.0/go.mod h1:oO5N308VT2msnQI6qi5M61wzPmJYg7Tr9e16m5n7uDU= github.com/libp2p/go-libp2p-kbucket v0.6.0 h1:1uyqIdE6X7ihtbNg+vRc9EQEmZPEBaehvJ2W14rUrRQ= @@ -740,11 +779,13 @@ github.com/libp2p/go-libp2p-mplex v0.4.1/go.mod h1:cmy+3GfqfM1PceHTLL7zQzAAYaryD github.com/libp2p/go-libp2p-nat v0.0.4/go.mod h1:N9Js/zVtAXqaeT99cXgTV9e75KpnWCvVOiGzlcHmBbY= github.com/libp2p/go-libp2p-nat v0.0.5/go.mod h1:1qubaE5bTZMJE+E/uu2URroMbzdubFz1ChgiN79yKPE= github.com/libp2p/go-libp2p-nat v0.0.6/go.mod h1:iV59LVhB3IkFvS6S6sauVTSOrNEANnINbI/fkaLimiw= +github.com/libp2p/go-libp2p-nat v0.1.0/go.mod h1:DQzAG+QbDYjN1/C3B6vXucLtz3u9rEonLVPtZVzQqks= github.com/libp2p/go-libp2p-net v0.0.1/go.mod h1:Yt3zgmlsHOgUWSXmt5V/Jpz9upuJBE8EgNU9DrCcR8c= github.com/libp2p/go-libp2p-net v0.0.2/go.mod h1:Yt3zgmlsHOgUWSXmt5V/Jpz9upuJBE8EgNU9DrCcR8c= github.com/libp2p/go-libp2p-netutil v0.0.1/go.mod h1:GdusFvujWZI9Vt0X5BKqwWWmZFxecf9Gt03cKxm2f/Q= github.com/libp2p/go-libp2p-netutil v0.1.0/go.mod h1:3Qv/aDqtMLTUyQeundkKsA+YCThNdbQD54k3TqjpbFU= github.com/libp2p/go-libp2p-noise v0.2.0/go.mod h1:IEbYhBBzGyvdLBoxxULL/SGbJARhUeqlO8lVSREYu2Q= +github.com/libp2p/go-libp2p-noise v0.3.0/go.mod h1:JNjHbociDJKHD64KTkzGnzqJ0FEV5gHJa6AB00kbCNQ= github.com/libp2p/go-libp2p-peer v0.0.1/go.mod h1:nXQvOBbwVqoP+T5Y5nCjeH4sP9IX/J0AMzcDUVruVoo= github.com/libp2p/go-libp2p-peer v0.1.1/go.mod h1:jkF12jGB4Gk/IOo+yomm+7oLWxF278F7UnrYUQ1Q8es= github.com/libp2p/go-libp2p-peer v0.2.0/go.mod h1:RCffaCvUyW2CJmG2gAWVqwePwW7JMgxjsHm7+J5kjWY= @@ -758,6 +799,7 @@ github.com/libp2p/go-libp2p-peerstore v0.2.2/go.mod h1:NQxhNjWxf1d4w6PihR8btWIRj github.com/libp2p/go-libp2p-peerstore v0.2.6/go.mod h1:ss/TWTgHZTMpsU/oKVVPQCGuDHItOpf2W8RxAi50P2s= github.com/libp2p/go-libp2p-peerstore v0.2.7/go.mod h1:ss/TWTgHZTMpsU/oKVVPQCGuDHItOpf2W8RxAi50P2s= github.com/libp2p/go-libp2p-peerstore v0.6.0 h1:HJminhQSGISBIRb93N6WK3t6Fa8OOTnHd/VBjL4mY5A= +github.com/libp2p/go-libp2p-peerstore v0.6.0/go.mod h1:DGEmKdXrcYpK9Jha3sS7MhqYdInxJy84bIPtSu65bKc= github.com/libp2p/go-libp2p-pnet v0.2.0/go.mod h1:Qqvq6JH/oMZGwqs3N1Fqhv8NVhrdYcO0BW4wssv21LA= github.com/libp2p/go-libp2p-protocol v0.0.1/go.mod h1:Af9n4PiruirSDjHycM1QuiMi/1VZNHYcK8cLgFJLZ4s= github.com/libp2p/go-libp2p-protocol v0.1.0/go.mod h1:KQPHpAabB57XQxGrXCNvbL6UEXfQqUgC/1adR2Xtflk= @@ -783,6 +825,7 @@ github.com/libp2p/go-libp2p-swarm v0.2.3/go.mod h1:P2VO/EpxRyDxtChXz/VPVXyTnszHv github.com/libp2p/go-libp2p-swarm v0.2.8/go.mod h1:JQKMGSth4SMqonruY0a8yjlPVIkb0mdNSwckW7OYziM= github.com/libp2p/go-libp2p-swarm v0.3.0/go.mod h1:hdv95GWCTmzkgeJpP+GK/9D9puJegb7H57B5hWQR5Kk= github.com/libp2p/go-libp2p-swarm v0.5.0/go.mod h1:sU9i6BoHE0Ve5SKz3y9WfKrh8dUat6JknzUehFx8xW4= +github.com/libp2p/go-libp2p-swarm v0.9.0/go.mod h1:2f8d8uxTJmpeqHF/1ujjdXZp+98nNIbujVOMEZxCbZ8= github.com/libp2p/go-libp2p-testing v0.0.1/go.mod h1:gvchhf3FQOtBdr+eFUABet5a4MBLK8jM3V4Zghvmi+E= github.com/libp2p/go-libp2p-testing v0.0.2/go.mod h1:gvchhf3FQOtBdr+eFUABet5a4MBLK8jM3V4Zghvmi+E= github.com/libp2p/go-libp2p-testing v0.0.3/go.mod h1:gvchhf3FQOtBdr+eFUABet5a4MBLK8jM3V4Zghvmi+E= @@ -801,6 +844,8 @@ github.com/libp2p/go-libp2p-transport-upgrader v0.1.1/go.mod h1:IEtA6or8JUbsV07q github.com/libp2p/go-libp2p-transport-upgrader v0.2.0/go.mod h1:mQcrHj4asu6ArfSoMuyojOdjx73Q47cYD7s5+gZOlns= github.com/libp2p/go-libp2p-transport-upgrader v0.3.0/go.mod h1:i+SKzbRnvXdVbU3D1dwydnTmKRPXiAR/fyvi1dXuL4o= github.com/libp2p/go-libp2p-transport-upgrader v0.4.2/go.mod h1:NR8ne1VwfreD5VIWIU62Agt/J18ekORFU/j1i2y8zvk= +github.com/libp2p/go-libp2p-transport-upgrader v0.6.0/go.mod h1:1e07y1ZSZdHo9HPbuU8IztM1Cj+DR5twgycb4pnRzRo= +github.com/libp2p/go-libp2p-xor v0.1.0/go.mod h1:LSTM5yRnjGZbWNTA/hRwq2gGFrvRIbQJscoIL/u6InY= github.com/libp2p/go-libp2p-yamux v0.1.2/go.mod h1:xUoV/RmYkg6BW/qGxA9XJyg+HzXFYkeXbnhjmnYzKp8= github.com/libp2p/go-libp2p-yamux v0.1.3/go.mod h1:VGSQVrqkh6y4nm0189qqxMtvyBft44MOYYPpYKXiVt4= github.com/libp2p/go-libp2p-yamux v0.2.0/go.mod h1:Db2gU+XfLpm6E4rG5uGCFX6uXA8MEXOxFcRoXUODaK8= @@ -852,6 +897,7 @@ github.com/libp2p/go-reuseport v0.3.0/go.mod h1:laea40AimhtfEqysZ71UpYj4S+R9VpH8 github.com/libp2p/go-reuseport-transport v0.0.2/go.mod h1:YkbSDrvjUVDL6b8XqriyA20obEtsW9BLkuOUyQAOCbs= github.com/libp2p/go-reuseport-transport v0.0.3/go.mod h1:Spv+MPft1exxARzP2Sruj2Wb5JSyHNncjf1Oi2dEbzM= github.com/libp2p/go-reuseport-transport v0.0.4/go.mod h1:trPa7r/7TJK/d+0hdBLOCGvpQQVOU74OXbNCIMkufGw= +github.com/libp2p/go-reuseport-transport v0.1.0/go.mod h1:vev0C0uMkzriDY59yFHD9v+ujJvYmDQVLowvAjEOmfw= github.com/libp2p/go-sockaddr v0.0.2/go.mod h1:syPvOmNs24S3dFVGJA1/mrqdeijPxLV2Le3BRLKd68k= github.com/libp2p/go-sockaddr v0.1.0/go.mod h1:syPvOmNs24S3dFVGJA1/mrqdeijPxLV2Le3BRLKd68k= github.com/libp2p/go-sockaddr v0.1.1/go.mod h1:syPvOmNs24S3dFVGJA1/mrqdeijPxLV2Le3BRLKd68k= @@ -872,6 +918,7 @@ github.com/libp2p/go-ws-transport v0.1.0/go.mod h1:rjw1MG1LU9YDC6gzmwObkPd/Sqwhw github.com/libp2p/go-ws-transport v0.2.0/go.mod h1:9BHJz/4Q5A9ludYWKoGCFC5gUElzlHoKzu0yY9p/klM= github.com/libp2p/go-ws-transport v0.3.0/go.mod h1:bpgTJmRZAvVHrgHybCVyqoBmyLQ1fiZuEaBYusP5zsk= github.com/libp2p/go-ws-transport v0.4.0/go.mod h1:EcIEKqf/7GDjth6ksuS/6p7R49V4CBY6/E7R/iyhYUA= +github.com/libp2p/go-ws-transport v0.5.0/go.mod h1:I2juo1dNTbl8BKSBYo98XY85kU2xds1iamArLvl8kNg= github.com/libp2p/go-yamux v1.2.1/go.mod h1:FGTiPvoV/3DVdgWpX+tM0OW3tsM+W5bSE3gZwqQTcow= github.com/libp2p/go-yamux v1.2.2/go.mod h1:FGTiPvoV/3DVdgWpX+tM0OW3tsM+W5bSE3gZwqQTcow= github.com/libp2p/go-yamux v1.2.3/go.mod h1:FGTiPvoV/3DVdgWpX+tM0OW3tsM+W5bSE3gZwqQTcow= @@ -882,6 +929,7 @@ github.com/libp2p/go-yamux v1.3.7/go.mod h1:fr7aVgmdNGJK+N1g+b6DW6VxzbRCjCOejR/h github.com/libp2p/go-yamux v1.4.0/go.mod h1:fr7aVgmdNGJK+N1g+b6DW6VxzbRCjCOejR/hkmpooHE= github.com/libp2p/go-yamux v1.4.1/go.mod h1:fr7aVgmdNGJK+N1g+b6DW6VxzbRCjCOejR/hkmpooHE= github.com/libp2p/go-yamux/v2 v2.2.0/go.mod h1:3So6P6TV6r75R9jiBpiIKgU/66lOarCZjqROGxzPpPQ= +github.com/libp2p/go-yamux/v2 v2.3.0/go.mod h1:iTU+lOIn/2h0AgKcL49clNTwfEw+WSfDYrXe05EyKIs= github.com/libp2p/go-yamux/v4 v4.0.0 h1:+Y80dV2Yx/kv7Y7JKu0LECyVdMXm1VUoko+VQ9rBfZQ= github.com/libp2p/go-yamux/v4 v4.0.0/go.mod h1:NWjl8ZTLOGlozrXSOZ/HlfG++39iKNnM5wwmtQP1YB4= github.com/libp2p/zeroconf/v2 v2.2.0 h1:Cup06Jv6u81HLhIj1KasuNM/RHHrJ8T7wOTS4+Tv53Q= @@ -950,9 +998,12 @@ github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/mr-tron/base58 v1.1.0/go.mod h1:xcD2VGqlgYjBdcBLw+TuYLr8afG+Hj8g2eTVqeSzSU8= github.com/mr-tron/base58 v1.1.1/go.mod h1:xcD2VGqlgYjBdcBLw+TuYLr8afG+Hj8g2eTVqeSzSU8= github.com/mr-tron/base58 v1.1.2/go.mod h1:BinMc/sQntlIE1frQmRFPUoPA1Zkr8VRgBdjWI2mNwc= @@ -1035,6 +1086,7 @@ github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OS github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo= github.com/neelance/sourcemap v0.0.0-20151028013722-8c68805598ab/go.mod h1:Qr6/a/Q4r9LP1IltGz7tA7iOK1WonHEYhu1HRBA7ZiM= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= +github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs= github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= @@ -1138,6 +1190,7 @@ github.com/quic-go/quic-go v0.33.0 h1:ItNoTDN/Fm/zBlq769lLJc8ECe9gYaW40veHCCco7y github.com/quic-go/quic-go v0.33.0/go.mod h1:YMuhaAV9/jIu0XclDXwZPAsP/2Kgr5yMYhe9oxhhOFA= github.com/quic-go/webtransport-go v0.5.3 h1:5XMlzemqB4qmOlgIus5zB45AcZ2kCgCy2EptUrfOPWU= github.com/quic-go/webtransport-go v0.5.3/go.mod h1:OhmmgJIzTTqXK5xvtuX0oBpLV2GkLWNDA+UeTGJXErU= +github.com/raulk/clock v1.1.0/go.mod h1:3MpVxdZ/ODBQDxbN+kzshf5OSZwPjtMDx6BBXBmOeY0= github.com/raulk/go-watchdog v1.3.0 h1:oUmdlHxdkXRJlwfG0O9omj8ukerm8MEQavSiDTEtBsk= github.com/raulk/go-watchdog v1.3.0/go.mod h1:fIvOnLbF0b0ZwkB9YU4mOW9Did//4vPZtDqv66NfsMU= github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= @@ -1179,6 +1232,7 @@ github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPx github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/assertions v1.0.0/go.mod h1:kHHU4qYBaI3q23Pp3VPrmWhuIUrLW/7eUrw0BU5VaoM= github.com/smartystreets/assertions v1.2.0 h1:42S6lae5dvLc7BrLu/0ugRtcFVjoJNMC/N3yZFZkDFs= @@ -1244,6 +1298,7 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= @@ -1261,6 +1316,8 @@ github.com/textileio/go-log/v2 v2.1.3-gke-2/go.mod h1:DwACkjFS3kjZZR/4Spx3aPfSsc github.com/tidwall/btree v1.6.0 h1:LDZfKfQIBHGHWSwckhXI0RPSXzlo+KYdjK7FWSqOzzg= github.com/tidwall/btree v1.6.0/go.mod h1:twD9XRA5jj9VUQGELzDO4HPQTNJsoWWfYEL+EUQ2cKY= github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= +github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU= github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= @@ -1292,6 +1349,7 @@ github.com/whyrusleeping/mafmt v1.2.8/go.mod h1:faQJFPbLSxzD9xpA02ttW/tS9vZykNvX github.com/whyrusleeping/mdns v0.0.0-20180901202407-ef14215e6b30/go.mod h1:j4l84WPFclQPj320J9gp0XwNKBb3U0zt5CBqjPp22G4= github.com/whyrusleeping/mdns v0.0.0-20190826153040-b9b60ed33aa9/go.mod h1:j4l84WPFclQPj320J9gp0XwNKBb3U0zt5CBqjPp22G4= github.com/whyrusleeping/multiaddr-filter v0.0.0-20160516205228-e903e4adabd7/go.mod h1:X2c0RVCI1eSUFI8eLcY3c0423ykwiUdxLJtkDvruhjI= +github.com/whyrusleeping/timecache v0.0.0-20160911033111-cfcb2f1abfee/go.mod h1:m2aV4LZI4Aez7dP5PMyVKEHhUyEJ/RjmPEDOpDvudHg= github.com/x-cray/logrus-prefixed-formatter v0.5.2/go.mod h1:2duySbKsL6M18s5GU7VPsoEPHyzalCE06qoARUCeBBE= github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= @@ -1342,6 +1400,7 @@ go.uber.org/goleak v1.0.0/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= go.uber.org/goleak v1.1.11-0.20210813005559-691160354723/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= go.uber.org/goleak v1.1.12 h1:gZAh5/EyT/HQwlpkCy6wTpqfH9H8Lz8zbm3dZh+OyzA= +go.uber.org/goleak v1.1.12/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= @@ -1359,6 +1418,9 @@ go.uber.org/zap v1.19.1/go.mod h1:j3DNczoxDZroyBnOT1L/Q79cfUMGZxlv/9dzN7SM1rI= go.uber.org/zap v1.24.0 h1:FiJd5l1UOLj0wCgbSE0rwwXHzEdAZS6hiiSnxJN/D60= go.uber.org/zap v1.24.0/go.mod h1:2kMP+WWQ8aoFoedH3T2sq6iJ2yDWpHbP0f6MQbS9Gkg= go4.org v0.0.0-20180809161055-417644f6feb5/go.mod h1:MkTOUMDaeVYJUOUsaDXIhWPZYa1yOyC1qaOBpL57BhE= +golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= +golang.org/x/arch v0.3.0 h1:02VY4/ZcO/gBOH6PUaoiptASxtXU10jazRCP865E97k= +golang.org/x/arch v0.3.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/build v0.0.0-20190111050920-041ab4dc3f9d/go.mod h1:OWs+y06UdEOHN4y+MfF/py+xQ/tYqIWW03b70/CG9Rw= golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= @@ -1829,6 +1891,7 @@ gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= @@ -1845,6 +1908,7 @@ honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9 lukechampine.com/blake3 v1.2.1 h1:YuqqRuaqsGV71BV/nm9xlI0MKUv4QC54jQnBChWbGnI= lukechampine.com/blake3 v1.2.1/go.mod h1:0OFRp7fBtAylGVCO40o87sbupkyIGgbpv1+M1k1LM6k= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= diff --git a/http/client_collection.go b/http/client_collection.go new file mode 100644 index 0000000000..c444344af2 --- /dev/null +++ b/http/client_collection.go @@ -0,0 +1,136 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "context" + "net/http" + "net/url" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" +) + +var _ client.Collection = (*CollectionClient)(nil) + +type CollectionClient struct { + client *http.Client + baseURL *url.URL + description client.CollectionDescription +} + +func NewCollectionClient(s *StoreClient, description client.CollectionDescription) *CollectionClient { + return &CollectionClient{ + client: s.client, + baseURL: s.baseURL, + description: description, + } +} + +func (c *CollectionClient) Description() client.CollectionDescription { + return c.description +} + +func (c *CollectionClient) Name() string { + return c.description.Name +} + +func (c *CollectionClient) Schema() client.SchemaDescription { + return c.description.Schema +} + +func (c *CollectionClient) ID() uint32 { + return c.description.ID +} + +func (c *CollectionClient) SchemaID() string { + return c.description.Schema.SchemaID +} + +func (c *CollectionClient) Create(context.Context, *client.Document) error { + return nil +} + +func (c *CollectionClient) CreateMany(context.Context, []*client.Document) error { + return nil +} + +func (c *CollectionClient) Update(context.Context, *client.Document) error { + return nil +} + +func (c *CollectionClient) Save(context.Context, *client.Document) error { + return nil +} + +func (c *CollectionClient) Delete(context.Context, client.DocKey) (bool, error) { + return false, nil +} + +func (c *CollectionClient) Exists(context.Context, client.DocKey) (bool, error) { + return false, nil +} + +func (c *CollectionClient) UpdateWith(ctx context.Context, target any, updater string) (*client.UpdateResult, error) { + return nil, nil +} + +func (c *CollectionClient) UpdateWithFilter(ctx context.Context, filter any, updater string) (*client.UpdateResult, error) { + return nil, nil +} + +func (c *CollectionClient) UpdateWithKey(ctx context.Context, key client.DocKey, updater string) (*client.UpdateResult, error) { + return nil, nil +} + +func (c *CollectionClient) UpdateWithKeys(context.Context, []client.DocKey, string) (*client.UpdateResult, error) { + return nil, nil +} + +func (c *CollectionClient) DeleteWith(ctx context.Context, target any) (*client.DeleteResult, error) { + return nil, nil +} + +func (c *CollectionClient) DeleteWithFilter(ctx context.Context, filter any) (*client.DeleteResult, error) { + return nil, nil +} + +func (c *CollectionClient) DeleteWithKey(context.Context, client.DocKey) (*client.DeleteResult, error) { + return nil, nil +} + +func (c *CollectionClient) DeleteWithKeys(context.Context, []client.DocKey) (*client.DeleteResult, error) { + return nil, nil +} + +func (c *CollectionClient) Get(ctx context.Context, key client.DocKey, showDeleted bool) (*client.Document, error) { + return nil, nil +} + +func (c *CollectionClient) WithTxn(datastore.Txn) client.Collection { + return nil +} + +func (c *CollectionClient) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysResult, error) { + return nil, nil +} + +func (c *CollectionClient) CreateIndex(context.Context, client.IndexDescription) (client.IndexDescription, error) { + return client.IndexDescription{}, nil +} + +func (c *CollectionClient) DropIndex(ctx context.Context, indexName string) error { + return nil +} + +func (c *CollectionClient) GetIndexes(ctx context.Context) ([]client.IndexDescription, error) { + return c.description.Indexes, nil +} diff --git a/http/client_lens.go b/http/client_lens.go new file mode 100644 index 0000000000..0cd893b968 --- /dev/null +++ b/http/client_lens.go @@ -0,0 +1,81 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "bytes" + "context" + "encoding/json" + "net/http" + "net/url" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/immutable/enumerable" +) + +var _ client.LensRegistry = (*LensClient)(nil) + +type LensClient struct { + client *http.Client + baseURL *url.URL +} + +func NewLensClient(s *StoreClient) *LensClient { + return &LensClient{ + client: s.client, + baseURL: s.baseURL, + } +} + +func (c *LensClient) WithTxn(datastore.Txn) client.LensRegistry { + return nil +} + +func (c *LensClient) SetMigration(ctx context.Context, config client.LensConfig) error { + url := c.baseURL.JoinPath("lens", "migration").String() + + body, err := json.Marshal(config) + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewBuffer(body)) + if err != nil { + return err + } + res, err := c.client.Do(req) + if err != nil { + return err + } + defer res.Body.Close() + + return parseResponse(res) +} + +func (c *LensClient) ReloadLenses(context.Context) error { + return nil +} + +func (c *LensClient) MigrateUp(context.Context, enumerable.Enumerable[map[string]any], string) (enumerable.Enumerable[map[string]any], error) { + return nil, nil +} + +func (c *LensClient) MigrateDown(context.Context, enumerable.Enumerable[map[string]any], string) (enumerable.Enumerable[map[string]any], error) { + return nil, nil +} + +func (c *LensClient) Config(context.Context) ([]client.LensConfig, error) { + return nil, nil +} + +func (c *LensClient) HasMigration(context.Context, string) (bool, error) { + return false, nil +} diff --git a/http/client_store.go b/http/client_store.go new file mode 100644 index 0000000000..a893ac6010 --- /dev/null +++ b/http/client_store.go @@ -0,0 +1,347 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "bytes" + "context" + "encoding/json" + "net/http" + "net/url" + "strings" + + "github.com/sourcenetwork/defradb/client" +) + +var _ client.Store = (*StoreClient)(nil) + +type StoreClient struct { + client *http.Client + baseURL *url.URL +} + +func NewStoreClient(rawURL string) (*StoreClient, error) { + baseURL, err := url.Parse(rawURL) + if err != nil { + return nil, err + } + return &StoreClient{ + client: http.DefaultClient, + baseURL: baseURL, + }, nil +} + +func (c *StoreClient) SetReplicator(ctx context.Context, rep client.Replicator) error { + methodURL := c.baseURL.JoinPath("p2p", "replicators") + + body, err := json.Marshal(rep) + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + res, err := c.client.Do(req) + if err != nil { + return err + } + defer res.Body.Close() + + return parseResponse(res) +} + +func (c *StoreClient) DeleteReplicator(ctx context.Context, rep client.Replicator) error { + methodURL := c.baseURL.JoinPath("p2p", "replicators") + + body, err := json.Marshal(rep) + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + res, err := c.client.Do(req) + if err != nil { + return err + } + defer res.Body.Close() + + return parseResponse(res) +} + +func (c *StoreClient) GetAllReplicators(ctx context.Context) ([]client.Replicator, error) { + methodURL := c.baseURL.JoinPath("p2p", "replicators") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + res, err := c.client.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() + + var reps []client.Replicator + if err := parseJsonResponse(res, &reps); err != nil { + return nil, err + } + return reps, nil +} + +func (c *StoreClient) AddP2PCollection(ctx context.Context, collectionID string) error { + methodURL := c.baseURL.JoinPath("p2p", "collections", collectionID) + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) + if err != nil { + return err + } + res, err := c.client.Do(req) + if err != nil { + return err + } + defer res.Body.Close() + + return parseResponse(res) +} + +func (c *StoreClient) RemoveP2PCollection(ctx context.Context, collectionID string) error { + methodURL := c.baseURL.JoinPath("p2p", "collections", collectionID) + + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) + if err != nil { + return err + } + res, err := c.client.Do(req) + if err != nil { + return err + } + defer res.Body.Close() + + return parseResponse(res) +} + +func (c *StoreClient) GetAllP2PCollections(ctx context.Context) ([]string, error) { + methodURL := c.baseURL.JoinPath("p2p", "collections") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + res, err := c.client.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() + + var cols []string + if err := parseJsonResponse(res, &cols); err != nil { + return nil, err + } + return cols, nil +} + +func (c *StoreClient) BasicImport(ctx context.Context, filepath string) error { + methodURL := c.baseURL.JoinPath("backup", "import") + + body, err := json.Marshal(&client.BackupConfig{Filepath: filepath}) + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + res, err := c.client.Do(req) + if err != nil { + return err + } + defer res.Body.Close() + + return parseResponse(res) +} + +func (c *StoreClient) BasicExport(ctx context.Context, config *client.BackupConfig) error { + methodURL := c.baseURL.JoinPath("backup", "export") + + body, err := json.Marshal(config) + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + res, err := c.client.Do(req) + if err != nil { + return err + } + defer res.Body.Close() + + return parseResponse(res) +} + +func (c *StoreClient) AddSchema(ctx context.Context, schema string) ([]client.CollectionDescription, error) { + methodURL := c.baseURL.JoinPath("schema") + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), strings.NewReader(schema)) + if err != nil { + return nil, err + } + res, err := c.client.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() + + var cols []client.CollectionDescription + if err := parseJsonResponse(res, &cols); err != nil { + return nil, err + } + return cols, nil +} + +func (c *StoreClient) PatchSchema(ctx context.Context, patch string) error { + methodURL := c.baseURL.JoinPath("schema") + + req, err := http.NewRequestWithContext(ctx, http.MethodPatch, methodURL.String(), strings.NewReader(patch)) + if err != nil { + return err + } + res, err := c.client.Do(req) + if err != nil { + return err + } + defer res.Body.Close() + + return parseResponse(res) +} + +func (c *StoreClient) SetMigration(ctx context.Context, config client.LensConfig) error { + return c.LensRegistry().SetMigration(ctx, config) +} + +func (c *StoreClient) LensRegistry() client.LensRegistry { + return NewLensClient(c) +} + +func (c *StoreClient) GetCollectionByName(ctx context.Context, name client.CollectionName) (client.Collection, error) { + methodURL := c.baseURL.JoinPath("collections") + methodURL.RawQuery = url.Values{"name": []string{name}}.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + res, err := c.client.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() + + var description client.CollectionDescription + if err := parseJsonResponse(res, &description); err != nil { + return nil, err + } + return NewCollectionClient(c, description), nil +} + +func (c *StoreClient) GetCollectionBySchemaID(ctx context.Context, schemaId string) (client.Collection, error) { + methodURL := c.baseURL.JoinPath("collections") + methodURL.RawQuery = url.Values{"schema_id": []string{schemaId}}.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + res, err := c.client.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() + + var description client.CollectionDescription + if err := parseJsonResponse(res, &description); err != nil { + return nil, err + } + return NewCollectionClient(c, description), nil +} + +func (c *StoreClient) GetCollectionByVersionID(ctx context.Context, versionId string) (client.Collection, error) { + methodURL := c.baseURL.JoinPath("collections") + methodURL.RawQuery = url.Values{"version_id": []string{versionId}}.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + res, err := c.client.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() + + var description client.CollectionDescription + if err := parseJsonResponse(res, &description); err != nil { + return nil, err + } + return NewCollectionClient(c, description), nil +} + +func (c *StoreClient) GetAllCollections(ctx context.Context) ([]client.Collection, error) { + methodURL := c.baseURL.JoinPath("collections") + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) + if err != nil { + return nil, err + } + res, err := c.client.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() + + var descriptions []client.CollectionDescription + if err := parseJsonResponse(res, &descriptions); err != nil { + return nil, err + } + collections := make([]client.Collection, len(descriptions)) + for i, d := range descriptions { + collections[i] = NewCollectionClient(c, d) + } + return collections, nil +} + +func (c *StoreClient) GetAllIndexes(ctx context.Context) (map[client.CollectionName][]client.IndexDescription, error) { + methodURL := c.baseURL.JoinPath("indexes") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + res, err := c.client.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() + + var indexes map[client.CollectionName][]client.IndexDescription + if err := parseJsonResponse(res, &indexes); err != nil { + return nil, err + } + return indexes, nil +} + +func (c *StoreClient) ExecRequest(context.Context, string) *client.RequestResult { + return nil +} diff --git a/http/client_utils.go b/http/client_utils.go new file mode 100644 index 0000000000..221f169eb6 --- /dev/null +++ b/http/client_utils.go @@ -0,0 +1,52 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "encoding/json" + "fmt" + "io" + "net/http" +) + +type errorResponse struct { + Error string `json:"error"` +} + +func parseErrorResponse(data []byte) error { + var res errorResponse + if err := json.Unmarshal(data, &res); err != nil { + return fmt.Errorf("%s", data) + } + return fmt.Errorf(res.Error) +} + +func parseResponse(res *http.Response) error { + data, err := io.ReadAll(res.Body) + if err != nil { + return err + } + if res.StatusCode != http.StatusOK { + return parseErrorResponse(data) + } + return nil +} + +func parseJsonResponse(res *http.Response, out any) error { + data, err := io.ReadAll(res.Body) + if err != nil { + return err + } + if res.StatusCode != http.StatusOK { + return parseErrorResponse(data) + } + return json.Unmarshal(data, &out) +} diff --git a/http/server.go b/http/server.go new file mode 100644 index 0000000000..ccb4bfeba0 --- /dev/null +++ b/http/server.go @@ -0,0 +1,239 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "io" + "net/http" + + "github.com/gin-gonic/gin" + + "github.com/sourcenetwork/defradb/client" +) + +type Server struct { + store client.Store +} + +func NewServer(store client.Store) *gin.Engine { + server := &Server{store} + + router := gin.Default() + api := router.Group("/api/v0") + + backup := api.Group("/backup") + backup.POST("/export", server.BasicExport) + backup.POST("/import", server.BasicImport) + + schema := api.Group("/schema") + schema.POST("/", server.AddSchema) + schema.PATCH("/", server.PatchSchema) + + collections := api.Group("/collections") + collections.GET("/", server.GetCollection) + + lens := api.Group("/lens") + lens_migration := lens.Group("/migration") + lens_migration.POST("/", server.SetMigration) + + p2p := api.Group("/p2p") + p2p_replicators := p2p.Group("/replicators") + p2p_replicators.GET("/replicators", server.GetAllReplicators) + p2p_replicators.POST("/replicators", server.SetReplicator) + p2p_replicators.DELETE("/replicators", server.DeleteReplicator) + + p2p_collections := p2p.Group("/collections") + p2p_collections.GET("/collections", server.GetAllP2PCollections) + p2p_collections.POST("/collections/:id", server.AddP2PCollection) + p2p_collections.DELETE("/collections/:id", server.RemoveP2PCollection) + + return router +} + +func (s *Server) SetReplicator(c *gin.Context) { + var req client.Replicator + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + err := s.store.SetReplicator(c.Request.Context(), req) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (s *Server) DeleteReplicator(c *gin.Context) { + var req client.Replicator + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + err := s.store.DeleteReplicator(c.Request.Context(), req) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (s *Server) GetAllReplicators(c *gin.Context) { + reps, err := s.store.GetAllReplicators(c.Request.Context()) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, reps) +} + +func (s *Server) AddP2PCollection(c *gin.Context) { + err := s.store.AddP2PCollection(c.Request.Context(), c.Param("id")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (s *Server) RemoveP2PCollection(c *gin.Context) { + err := s.store.RemoveP2PCollection(c.Request.Context(), c.Param("id")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (s *Server) GetAllP2PCollections(c *gin.Context) { + cols, err := s.store.GetAllP2PCollections(c.Request.Context()) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, cols) +} + +func (s *Server) BasicImport(c *gin.Context) { + var config client.BackupConfig + if err := c.ShouldBindJSON(&config); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + err := s.store.BasicImport(c.Request.Context(), config.Filepath) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (s *Server) BasicExport(c *gin.Context) { + var config client.BackupConfig + if err := c.ShouldBindJSON(&config); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + err := s.store.BasicExport(c.Request.Context(), &config) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (s *Server) AddSchema(c *gin.Context) { + schema, err := io.ReadAll(c.Request.Body) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + cols, err := s.store.AddSchema(c.Request.Context(), string(schema)) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, cols) +} + +func (s *Server) PatchSchema(c *gin.Context) { + patch, err := io.ReadAll(c.Request.Body) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + err = s.store.PatchSchema(c.Request.Context(), string(patch)) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (s *Server) SetMigration(c *gin.Context) { + var req client.LensConfig + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + err := s.store.SetMigration(c.Request.Context(), req) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (s *Server) GetCollection(c *gin.Context) { + switch { + case c.Query("name") != "": + col, err := s.store.GetCollectionByName(c.Request.Context(), c.Query("name")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, col.Description()) + case c.Query("schema_id") != "": + col, err := s.store.GetCollectionBySchemaID(c.Request.Context(), c.Query("schema_id")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, col.Description()) + case c.Query("version_id") != "": + col, err := s.store.GetCollectionByVersionID(c.Request.Context(), c.Query("version_id")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, col.Description()) + default: + cols, err := s.store.GetAllCollections(c.Request.Context()) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + colDesc := make([]client.CollectionDescription, len(cols)) + for i, col := range cols { + colDesc[i] = col.Description() + } + c.JSON(http.StatusOK, colDesc) + } +} + +func (s *Server) GetAllIndexes(c *gin.Context) { + indexes, err := s.store.GetAllIndexes(c.Request.Context()) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, indexes) +} From 25d2250a6a40ce06bce543caa1efcd7cabc00843 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 11 Aug 2023 10:15:06 -0700 Subject: [PATCH 002/107] update lens client http base url --- http/client_lens.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/http/client_lens.go b/http/client_lens.go index 0cd893b968..b3f6149292 100644 --- a/http/client_lens.go +++ b/http/client_lens.go @@ -32,7 +32,7 @@ type LensClient struct { func NewLensClient(s *StoreClient) *LensClient { return &LensClient{ client: s.client, - baseURL: s.baseURL, + baseURL: s.baseURL.JoinPath("lens"), } } @@ -41,13 +41,13 @@ func (c *LensClient) WithTxn(datastore.Txn) client.LensRegistry { } func (c *LensClient) SetMigration(ctx context.Context, config client.LensConfig) error { - url := c.baseURL.JoinPath("lens", "migration").String() + methodURL := c.baseURL.JoinPath("migration") body, err := json.Marshal(config) if err != nil { return err } - req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewBuffer(body)) + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) if err != nil { return err } From 1e70947e60ac8cbdca745bb5edc02bc11f618765 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 11 Aug 2023 11:53:54 -0700 Subject: [PATCH 003/107] integration tests working with http client --- go.mod | 5 +- go.sum | 30 +------ http/{client_store.go => client.go} | 49 +++++----- http/client_collection.go | 3 +- http/client_lens.go | 3 +- tests/integration/client.go | 133 ++++++++++++++++++++++++++++ tests/integration/state.go | 9 ++ tests/integration/utils2.go | 82 ++++++++++++----- 8 files changed, 236 insertions(+), 78 deletions(-) rename http/{client_store.go => client.go} (78%) create mode 100644 tests/integration/client.go diff --git a/go.mod b/go.mod index aaa20dfde5..4f42f5acb0 100644 --- a/go.mod +++ b/go.mod @@ -8,6 +8,7 @@ require ( github.com/dgraph-io/badger/v4 v4.1.0 github.com/evanphx/json-patch/v5 v5.6.0 github.com/fxamacker/cbor/v2 v2.4.0 + github.com/gin-gonic/gin v1.9.1 github.com/go-chi/chi/v5 v5.0.10 github.com/go-chi/cors v1.2.1 github.com/go-errors/errors v1.4.2 @@ -15,6 +16,7 @@ require ( github.com/graphql-go/graphql v0.8.1 github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 github.com/iancoleman/strcase v0.3.0 + github.com/ipfs/boxo v0.11.0 github.com/ipfs/go-block-format v0.1.2 github.com/ipfs/go-cid v0.4.1 github.com/ipfs/go-datastore v0.6.0 @@ -25,7 +27,7 @@ require ( github.com/lens-vm/lens/host-go v0.0.0-20230729032926-5acb4df9bd25 github.com/libp2p/go-libp2p v0.28.0 github.com/libp2p/go-libp2p-gostream v0.6.0 - github.com/libp2p/go-libp2p-kad-dht v0.21.1 + github.com/libp2p/go-libp2p-kad-dht v0.23.0 github.com/libp2p/go-libp2p-pubsub v0.9.3 github.com/libp2p/go-libp2p-record v0.2.0 github.com/mitchellh/mapstructure v1.5.0 @@ -74,7 +76,6 @@ require ( github.com/fsnotify/fsnotify v1.6.0 // indirect github.com/gabriel-vasile/mimetype v1.4.2 // indirect github.com/gin-contrib/sse v0.1.0 // indirect - github.com/gin-gonic/gin v1.9.1 // indirect github.com/go-logr/logr v1.2.4 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-playground/locales v0.14.1 // indirect diff --git a/go.sum b/go.sum index 82d91a05c2..9320eb4694 100644 --- a/go.sum +++ b/go.sum @@ -46,7 +46,6 @@ git.apache.org/thrift.git v0.0.0-20180902110319-2566ecd5d999/go.mod h1:fPE2ZNJGy github.com/AndreasBriese/bbloom v0.0.0-20180913140656-343706a395b7/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8= github.com/AndreasBriese/bbloom v0.0.0-20190306092124-e2d15f34fcf9/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8= github.com/AndreasBriese/bbloom v0.0.0-20190825152654-46b345b51c96 h1:cTp8I5+VIoKjsnZuH8vjyaysT/ses3EvZeaV/1UkF2M= -github.com/AndreasBriese/bbloom v0.0.0-20190825152654-46b345b51c96/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/DataDog/zstd v1.4.1 h1:3oxKN3wbHibqx897utPC2LTQU4J+IHWWJO+glkAkpFM= @@ -94,8 +93,6 @@ github.com/btcsuite/btcd v0.0.0-20190605094302-a0d1e3e36d50/go.mod h1:3J08xEfcug github.com/btcsuite/btcd v0.0.0-20190824003749-130ea5bddde3/go.mod h1:3J08xEfcugPacsc34/LKRU2yO7YmuT8yt28J8k2+rrI= github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ= github.com/btcsuite/btcd v0.21.0-beta/go.mod h1:ZSWyehm27aAuS9bvkATT+Xte3hjHZ+MRgMY/8NJ7K94= -github.com/btcsuite/btcd v0.22.1/go.mod h1:wqgTSL29+50LRkmOVknEdmt8ZojIzhuWvgu/iptuN7Y= -github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1/go.mod h1:7SFka0XMvUgj3hfZtydOrQY2mwhPclbT2snogU7SQQc= github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f/go.mod h1:TdznJufoqS23FtqVCzL0ZqgP5MqXbb4fg/WgDys70nA= github.com/btcsuite/btcutil v0.0.0-20190207003914-4c204d697803/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg= github.com/btcsuite/btcutil v0.0.0-20190425235716-9e5f4b9a998d/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg= @@ -177,7 +174,6 @@ github.com/dgraph-io/badger v1.6.0-rc1/go.mod h1:zwt7syl517jmP8s94KqSxTlM6IMsdhY github.com/dgraph-io/badger v1.6.0/go.mod h1:zwt7syl517jmP8s94KqSxTlM6IMsdhYy6psNgSztDR4= github.com/dgraph-io/badger v1.6.1/go.mod h1:FRmFw3uxvcpa8zG3Rxs0th+hCLIuaQg8HlNV5bjgnuU= github.com/dgraph-io/badger v1.6.2 h1:mNw0qs90GVgGGWylh0umH5iag1j6n/PeJtNvL6KY/x8= -github.com/dgraph-io/badger v1.6.2/go.mod h1:JW2yswe3V058sS0kZ2h/AXeDSqFjxnZcRrVH//y2UQE= github.com/dgraph-io/badger/v3 v3.2011.1 h1:Hmyof0WMEF/QtutX5SQHzIMnJQxb/IrSzhjckV2SD6g= github.com/dgraph-io/ristretto v0.0.2/go.mod h1:KPxhHT9ZxKefz+PCeOGsrHpl1qZ7i70dGTu2u+Ahh6E= github.com/dgraph-io/ristretto v0.1.1 h1:6CWw5tJNgpegArSHpNHJKldNeq03FQCwYvfMVWajOK8= @@ -251,12 +247,12 @@ github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vb github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= -github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.2.4 h1:g01GSCwiDw2xSZfjJ2/T9M+S6pFdcNtFYsp+Y43HYDQ= github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= @@ -493,12 +489,10 @@ github.com/ipfs/go-ds-badger v0.0.5/go.mod h1:g5AuuCGmr7efyzQhLL8MzwqcauPojGPUaH github.com/ipfs/go-ds-badger v0.2.1/go.mod h1:Tx7l3aTph3FMFrRS838dcSJh+jjA7cX9DrGVwx/NOwE= github.com/ipfs/go-ds-badger v0.2.3/go.mod h1:pEYw0rgg3FIrywKKnL+Snr+w/LjJZVMTBRn4FS6UHUk= github.com/ipfs/go-ds-badger v0.3.0 h1:xREL3V0EH9S219kFFueOYJJTcjgNSZ2HY1iSvN7U1Ro= -github.com/ipfs/go-ds-badger v0.3.0/go.mod h1:1ke6mXNqeV8K3y5Ak2bAA0osoTfmxUdupVCGm4QUIek= github.com/ipfs/go-ds-leveldb v0.0.1/go.mod h1:feO8V3kubwsEF22n0YRQCffeb79OOYIykR4L04tMOYc= github.com/ipfs/go-ds-leveldb v0.4.1/go.mod h1:jpbku/YqBSsBc1qgME8BkWS4AxzF2cEu1Ii2r79Hh9s= github.com/ipfs/go-ds-leveldb v0.4.2/go.mod h1:jpbku/YqBSsBc1qgME8BkWS4AxzF2cEu1Ii2r79Hh9s= github.com/ipfs/go-ds-leveldb v0.5.0 h1:s++MEBbD3ZKc9/8/njrn4flZLnCuY9I79v94gBUNumo= -github.com/ipfs/go-ds-leveldb v0.5.0/go.mod h1:d3XG9RUDzQ6V4SHi8+Xgj9j1XuEk1z82lquxrVbml/Q= github.com/ipfs/go-fetcher v1.6.1 h1:UFuRVYX5AIllTiRhi5uK/iZkfhSpBCGX7L70nSZEmK8= github.com/ipfs/go-fetcher v1.6.1/go.mod h1:27d/xMV8bodjVs9pugh/RCjjK2OZ68UgAMspMdingNo= github.com/ipfs/go-ipfs-blockstore v0.0.1/go.mod h1:d3WClOmRQKFnJ0Jz/jj/zmksX0ma1gROTlovZKBmN08= @@ -661,14 +655,12 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.3/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= -github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/lens-vm/lens/host-go v0.0.0-20230729032926-5acb4df9bd25 h1:hC67vWtvuDnw8w6u4jLFoj3SOH92/4Lq8SCR++L7njw= github.com/lens-vm/lens/host-go v0.0.0-20230729032926-5acb4df9bd25/go.mod h1:rDE4oJUIAQoXX9heUg8VOQf5LscRWj0BeE5mbGqOs3E= github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= github.com/libp2p/go-addr-util v0.0.1/go.mod h1:4ac6O7n9rIAKB1dnd+s8IbbMXkt+oBpzX4/+RACcnlQ= github.com/libp2p/go-addr-util v0.0.2/go.mod h1:Ecd6Fb3yIuLzq4bD7VcywcVSBtefcAwnUISBM3WG15E= -github.com/libp2p/go-addr-util v0.1.0/go.mod h1:6I3ZYuFr2O/9D+SoyM0zEw0EF3YkldtTX406BpdQMqw= github.com/libp2p/go-buffer-pool v0.0.1/go.mod h1:xtyIz9PMobb13WaxR6Zo1Pd1zXJKYg0a8KiIvDp3TzQ= github.com/libp2p/go-buffer-pool v0.0.2/go.mod h1:MvaB6xw5vOrDl8rYZGLFdKAuk/hRoRZd1Vi32+RXyFM= github.com/libp2p/go-buffer-pool v0.1.0 h1:oK4mSFcQz7cTQIfqbe4MIj9gLW+mnanjyFtc6cdF0Y8= @@ -680,7 +672,6 @@ github.com/libp2p/go-conn-security-multistream v0.0.2/go.mod h1:nc9vud7inQ+d6SO0 github.com/libp2p/go-conn-security-multistream v0.1.0/go.mod h1:aw6eD7LOsHEX7+2hJkDxw1MteijaVcI+/eP2/x3J1xc= github.com/libp2p/go-conn-security-multistream v0.2.0/go.mod h1:hZN4MjlNetKD3Rq5Jb/P5ohUnFLNzEAR4DLSzpn2QLU= github.com/libp2p/go-conn-security-multistream v0.2.1/go.mod h1:cR1d8gA0Hr59Fj6NhaTpFhJZrjSYuNmhpT2r25zYR70= -github.com/libp2p/go-conn-security-multistream v0.3.0/go.mod h1:EEP47t4fw/bTelVmEzIDqSe69hO/ip52xBEhZMLWAHM= github.com/libp2p/go-eventbus v0.1.0/go.mod h1:vROgu5cs5T7cv7POWlWxBaVLxfSegC5UGQf8A2eEmx4= github.com/libp2p/go-eventbus v0.2.1/go.mod h1:jc2S4SoEVPP48H9Wpzm5aiGwUCBMfGhVhhBjyhhCJs8= github.com/libp2p/go-flow-metrics v0.0.1/go.mod h1:Iv1GH0sG8DtYN3SVJ2eG221wMiNpZxBdp967ls1g+k8= @@ -706,12 +697,10 @@ github.com/libp2p/go-libp2p-autonat v0.2.0/go.mod h1:DX+9teU4pEEoZUqR1PiMlqliONQ github.com/libp2p/go-libp2p-autonat v0.2.1/go.mod h1:MWtAhV5Ko1l6QBsHQNSuM6b1sRkXrpk0/LqCr+vCVxI= github.com/libp2p/go-libp2p-autonat v0.2.2/go.mod h1:HsM62HkqZmHR2k1xgX34WuWDzk/nBwNHoeyyT4IWV6A= github.com/libp2p/go-libp2p-autonat v0.4.2/go.mod h1:YxaJlpr81FhdOv3W3BTconZPfhaYivRdf53g+S2wobk= -github.com/libp2p/go-libp2p-autonat v0.7.0/go.mod h1:uPvPn6J7cN+LCfFwW5tpOYvAz5NvPTc4iBamTV/WDMg= github.com/libp2p/go-libp2p-blankhost v0.0.1/go.mod h1:Ibpbw/7cPPYwFb7PACIWdvxxv0t0XCCI10t7czjAjTc= github.com/libp2p/go-libp2p-blankhost v0.1.1/go.mod h1:pf2fvdLJPsC1FsVrNP3DUUvMzUts2dsLLBEpo1vW1ro= github.com/libp2p/go-libp2p-blankhost v0.1.4/go.mod h1:oJF0saYsAXQCSfDq254GMNmLNz6ZTHTOvtF4ZydUvwU= github.com/libp2p/go-libp2p-blankhost v0.2.0/go.mod h1:eduNKXGTioTuQAUcZ5epXi9vMl+t4d8ugUBRQ4SqaNQ= -github.com/libp2p/go-libp2p-blankhost v0.3.0/go.mod h1:urPC+7U01nCGgJ3ZsV8jdwTp6Ji9ID0dMTvq+aJ+nZU= github.com/libp2p/go-libp2p-circuit v0.0.9/go.mod h1:uU+IBvEQzCu953/ps7bYzC/D/R0Ho2A9LfKVVCatlqU= github.com/libp2p/go-libp2p-circuit v0.1.0/go.mod h1:Ahq4cY3V9VJcHcn1SBXjr78AbFkZeIRmfunbA7pmFh8= github.com/libp2p/go-libp2p-circuit v0.1.4/go.mod h1:CY67BrEjKNDhdTk8UgBX1Y/H5c3xkAcs3gnksxY7osU= @@ -752,7 +741,6 @@ github.com/libp2p/go-libp2p-discovery v0.1.0/go.mod h1:4F/x+aldVHjHDHuX85x1zWoFT github.com/libp2p/go-libp2p-discovery v0.2.0/go.mod h1:s4VGaxYMbw4+4+tsoQTqh7wfxg97AEdo4GYBt6BadWg= github.com/libp2p/go-libp2p-discovery v0.3.0/go.mod h1:o03drFnz9BVAZdzC/QUQ+NeQOu38Fu7LJGEOK2gQltw= github.com/libp2p/go-libp2p-discovery v0.5.0/go.mod h1:+srtPIU9gDaBNu//UHvcdliKBIcr4SfDcm0/PfPJLug= -github.com/libp2p/go-libp2p-discovery v0.6.0/go.mod h1:/u1voHt0tKIe5oIA1RHBKQLVCWPna2dXmPNHc2zR9S8= github.com/libp2p/go-libp2p-gostream v0.6.0 h1:QfAiWeQRce6pqnYfmIVWJFXNdDyfiR/qkCnjyaZUPYU= github.com/libp2p/go-libp2p-gostream v0.6.0/go.mod h1:Nywu0gYZwfj7Jc91PQvbGU8dIpqbQQkjWgDuOrFaRdA= github.com/libp2p/go-libp2p-host v0.0.1/go.mod h1:qWd+H1yuU0m5CwzAkvbSjqKairayEHdR5MMl7Cwa7Go= @@ -761,7 +749,6 @@ github.com/libp2p/go-libp2p-interface-connmgr v0.0.1/go.mod h1:GarlRLH0LdeWcLnYM github.com/libp2p/go-libp2p-interface-connmgr v0.0.4/go.mod h1:GarlRLH0LdeWcLnYM/SaBykKFl9U5JFnbBGruAk/D5k= github.com/libp2p/go-libp2p-interface-connmgr v0.0.5/go.mod h1:GarlRLH0LdeWcLnYM/SaBykKFl9U5JFnbBGruAk/D5k= github.com/libp2p/go-libp2p-interface-pnet v0.0.1/go.mod h1:el9jHpQAXK5dnTpKA4yfCNBZXvrzdOU75zz+C6ryp3k= -github.com/libp2p/go-libp2p-kad-dht v0.21.1/go.mod h1:Oy8wvbdjpB70eS5AaFaI68tOtrdo3KylTvXDjikxqFo= github.com/libp2p/go-libp2p-kad-dht v0.23.0 h1:sxE6LxLopp79eLeV695n7+c77V/Vn4AMF28AdM/XFqM= github.com/libp2p/go-libp2p-kad-dht v0.23.0/go.mod h1:oO5N308VT2msnQI6qi5M61wzPmJYg7Tr9e16m5n7uDU= github.com/libp2p/go-libp2p-kbucket v0.6.0 h1:1uyqIdE6X7ihtbNg+vRc9EQEmZPEBaehvJ2W14rUrRQ= @@ -779,13 +766,11 @@ github.com/libp2p/go-libp2p-mplex v0.4.1/go.mod h1:cmy+3GfqfM1PceHTLL7zQzAAYaryD github.com/libp2p/go-libp2p-nat v0.0.4/go.mod h1:N9Js/zVtAXqaeT99cXgTV9e75KpnWCvVOiGzlcHmBbY= github.com/libp2p/go-libp2p-nat v0.0.5/go.mod h1:1qubaE5bTZMJE+E/uu2URroMbzdubFz1ChgiN79yKPE= github.com/libp2p/go-libp2p-nat v0.0.6/go.mod h1:iV59LVhB3IkFvS6S6sauVTSOrNEANnINbI/fkaLimiw= -github.com/libp2p/go-libp2p-nat v0.1.0/go.mod h1:DQzAG+QbDYjN1/C3B6vXucLtz3u9rEonLVPtZVzQqks= github.com/libp2p/go-libp2p-net v0.0.1/go.mod h1:Yt3zgmlsHOgUWSXmt5V/Jpz9upuJBE8EgNU9DrCcR8c= github.com/libp2p/go-libp2p-net v0.0.2/go.mod h1:Yt3zgmlsHOgUWSXmt5V/Jpz9upuJBE8EgNU9DrCcR8c= github.com/libp2p/go-libp2p-netutil v0.0.1/go.mod h1:GdusFvujWZI9Vt0X5BKqwWWmZFxecf9Gt03cKxm2f/Q= github.com/libp2p/go-libp2p-netutil v0.1.0/go.mod h1:3Qv/aDqtMLTUyQeundkKsA+YCThNdbQD54k3TqjpbFU= github.com/libp2p/go-libp2p-noise v0.2.0/go.mod h1:IEbYhBBzGyvdLBoxxULL/SGbJARhUeqlO8lVSREYu2Q= -github.com/libp2p/go-libp2p-noise v0.3.0/go.mod h1:JNjHbociDJKHD64KTkzGnzqJ0FEV5gHJa6AB00kbCNQ= github.com/libp2p/go-libp2p-peer v0.0.1/go.mod h1:nXQvOBbwVqoP+T5Y5nCjeH4sP9IX/J0AMzcDUVruVoo= github.com/libp2p/go-libp2p-peer v0.1.1/go.mod h1:jkF12jGB4Gk/IOo+yomm+7oLWxF278F7UnrYUQ1Q8es= github.com/libp2p/go-libp2p-peer v0.2.0/go.mod h1:RCffaCvUyW2CJmG2gAWVqwePwW7JMgxjsHm7+J5kjWY= @@ -799,7 +784,6 @@ github.com/libp2p/go-libp2p-peerstore v0.2.2/go.mod h1:NQxhNjWxf1d4w6PihR8btWIRj github.com/libp2p/go-libp2p-peerstore v0.2.6/go.mod h1:ss/TWTgHZTMpsU/oKVVPQCGuDHItOpf2W8RxAi50P2s= github.com/libp2p/go-libp2p-peerstore v0.2.7/go.mod h1:ss/TWTgHZTMpsU/oKVVPQCGuDHItOpf2W8RxAi50P2s= github.com/libp2p/go-libp2p-peerstore v0.6.0 h1:HJminhQSGISBIRb93N6WK3t6Fa8OOTnHd/VBjL4mY5A= -github.com/libp2p/go-libp2p-peerstore v0.6.0/go.mod h1:DGEmKdXrcYpK9Jha3sS7MhqYdInxJy84bIPtSu65bKc= github.com/libp2p/go-libp2p-pnet v0.2.0/go.mod h1:Qqvq6JH/oMZGwqs3N1Fqhv8NVhrdYcO0BW4wssv21LA= github.com/libp2p/go-libp2p-protocol v0.0.1/go.mod h1:Af9n4PiruirSDjHycM1QuiMi/1VZNHYcK8cLgFJLZ4s= github.com/libp2p/go-libp2p-protocol v0.1.0/go.mod h1:KQPHpAabB57XQxGrXCNvbL6UEXfQqUgC/1adR2Xtflk= @@ -825,7 +809,6 @@ github.com/libp2p/go-libp2p-swarm v0.2.3/go.mod h1:P2VO/EpxRyDxtChXz/VPVXyTnszHv github.com/libp2p/go-libp2p-swarm v0.2.8/go.mod h1:JQKMGSth4SMqonruY0a8yjlPVIkb0mdNSwckW7OYziM= github.com/libp2p/go-libp2p-swarm v0.3.0/go.mod h1:hdv95GWCTmzkgeJpP+GK/9D9puJegb7H57B5hWQR5Kk= github.com/libp2p/go-libp2p-swarm v0.5.0/go.mod h1:sU9i6BoHE0Ve5SKz3y9WfKrh8dUat6JknzUehFx8xW4= -github.com/libp2p/go-libp2p-swarm v0.9.0/go.mod h1:2f8d8uxTJmpeqHF/1ujjdXZp+98nNIbujVOMEZxCbZ8= github.com/libp2p/go-libp2p-testing v0.0.1/go.mod h1:gvchhf3FQOtBdr+eFUABet5a4MBLK8jM3V4Zghvmi+E= github.com/libp2p/go-libp2p-testing v0.0.2/go.mod h1:gvchhf3FQOtBdr+eFUABet5a4MBLK8jM3V4Zghvmi+E= github.com/libp2p/go-libp2p-testing v0.0.3/go.mod h1:gvchhf3FQOtBdr+eFUABet5a4MBLK8jM3V4Zghvmi+E= @@ -844,8 +827,6 @@ github.com/libp2p/go-libp2p-transport-upgrader v0.1.1/go.mod h1:IEtA6or8JUbsV07q github.com/libp2p/go-libp2p-transport-upgrader v0.2.0/go.mod h1:mQcrHj4asu6ArfSoMuyojOdjx73Q47cYD7s5+gZOlns= github.com/libp2p/go-libp2p-transport-upgrader v0.3.0/go.mod h1:i+SKzbRnvXdVbU3D1dwydnTmKRPXiAR/fyvi1dXuL4o= github.com/libp2p/go-libp2p-transport-upgrader v0.4.2/go.mod h1:NR8ne1VwfreD5VIWIU62Agt/J18ekORFU/j1i2y8zvk= -github.com/libp2p/go-libp2p-transport-upgrader v0.6.0/go.mod h1:1e07y1ZSZdHo9HPbuU8IztM1Cj+DR5twgycb4pnRzRo= -github.com/libp2p/go-libp2p-xor v0.1.0/go.mod h1:LSTM5yRnjGZbWNTA/hRwq2gGFrvRIbQJscoIL/u6InY= github.com/libp2p/go-libp2p-yamux v0.1.2/go.mod h1:xUoV/RmYkg6BW/qGxA9XJyg+HzXFYkeXbnhjmnYzKp8= github.com/libp2p/go-libp2p-yamux v0.1.3/go.mod h1:VGSQVrqkh6y4nm0189qqxMtvyBft44MOYYPpYKXiVt4= github.com/libp2p/go-libp2p-yamux v0.2.0/go.mod h1:Db2gU+XfLpm6E4rG5uGCFX6uXA8MEXOxFcRoXUODaK8= @@ -897,7 +878,6 @@ github.com/libp2p/go-reuseport v0.3.0/go.mod h1:laea40AimhtfEqysZ71UpYj4S+R9VpH8 github.com/libp2p/go-reuseport-transport v0.0.2/go.mod h1:YkbSDrvjUVDL6b8XqriyA20obEtsW9BLkuOUyQAOCbs= github.com/libp2p/go-reuseport-transport v0.0.3/go.mod h1:Spv+MPft1exxARzP2Sruj2Wb5JSyHNncjf1Oi2dEbzM= github.com/libp2p/go-reuseport-transport v0.0.4/go.mod h1:trPa7r/7TJK/d+0hdBLOCGvpQQVOU74OXbNCIMkufGw= -github.com/libp2p/go-reuseport-transport v0.1.0/go.mod h1:vev0C0uMkzriDY59yFHD9v+ujJvYmDQVLowvAjEOmfw= github.com/libp2p/go-sockaddr v0.0.2/go.mod h1:syPvOmNs24S3dFVGJA1/mrqdeijPxLV2Le3BRLKd68k= github.com/libp2p/go-sockaddr v0.1.0/go.mod h1:syPvOmNs24S3dFVGJA1/mrqdeijPxLV2Le3BRLKd68k= github.com/libp2p/go-sockaddr v0.1.1/go.mod h1:syPvOmNs24S3dFVGJA1/mrqdeijPxLV2Le3BRLKd68k= @@ -918,7 +898,6 @@ github.com/libp2p/go-ws-transport v0.1.0/go.mod h1:rjw1MG1LU9YDC6gzmwObkPd/Sqwhw github.com/libp2p/go-ws-transport v0.2.0/go.mod h1:9BHJz/4Q5A9ludYWKoGCFC5gUElzlHoKzu0yY9p/klM= github.com/libp2p/go-ws-transport v0.3.0/go.mod h1:bpgTJmRZAvVHrgHybCVyqoBmyLQ1fiZuEaBYusP5zsk= github.com/libp2p/go-ws-transport v0.4.0/go.mod h1:EcIEKqf/7GDjth6ksuS/6p7R49V4CBY6/E7R/iyhYUA= -github.com/libp2p/go-ws-transport v0.5.0/go.mod h1:I2juo1dNTbl8BKSBYo98XY85kU2xds1iamArLvl8kNg= github.com/libp2p/go-yamux v1.2.1/go.mod h1:FGTiPvoV/3DVdgWpX+tM0OW3tsM+W5bSE3gZwqQTcow= github.com/libp2p/go-yamux v1.2.2/go.mod h1:FGTiPvoV/3DVdgWpX+tM0OW3tsM+W5bSE3gZwqQTcow= github.com/libp2p/go-yamux v1.2.3/go.mod h1:FGTiPvoV/3DVdgWpX+tM0OW3tsM+W5bSE3gZwqQTcow= @@ -929,7 +908,6 @@ github.com/libp2p/go-yamux v1.3.7/go.mod h1:fr7aVgmdNGJK+N1g+b6DW6VxzbRCjCOejR/h github.com/libp2p/go-yamux v1.4.0/go.mod h1:fr7aVgmdNGJK+N1g+b6DW6VxzbRCjCOejR/hkmpooHE= github.com/libp2p/go-yamux v1.4.1/go.mod h1:fr7aVgmdNGJK+N1g+b6DW6VxzbRCjCOejR/hkmpooHE= github.com/libp2p/go-yamux/v2 v2.2.0/go.mod h1:3So6P6TV6r75R9jiBpiIKgU/66lOarCZjqROGxzPpPQ= -github.com/libp2p/go-yamux/v2 v2.3.0/go.mod h1:iTU+lOIn/2h0AgKcL49clNTwfEw+WSfDYrXe05EyKIs= github.com/libp2p/go-yamux/v4 v4.0.0 h1:+Y80dV2Yx/kv7Y7JKu0LECyVdMXm1VUoko+VQ9rBfZQ= github.com/libp2p/go-yamux/v4 v4.0.0/go.mod h1:NWjl8ZTLOGlozrXSOZ/HlfG++39iKNnM5wwmtQP1YB4= github.com/libp2p/zeroconf/v2 v2.2.0 h1:Cup06Jv6u81HLhIj1KasuNM/RHHrJ8T7wOTS4+Tv53Q= @@ -1086,7 +1064,6 @@ github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OS github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo= github.com/neelance/sourcemap v0.0.0-20151028013722-8c68805598ab/go.mod h1:Qr6/a/Q4r9LP1IltGz7tA7iOK1WonHEYhu1HRBA7ZiM= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= -github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs= github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= @@ -1190,7 +1167,6 @@ github.com/quic-go/quic-go v0.33.0 h1:ItNoTDN/Fm/zBlq769lLJc8ECe9gYaW40veHCCco7y github.com/quic-go/quic-go v0.33.0/go.mod h1:YMuhaAV9/jIu0XclDXwZPAsP/2Kgr5yMYhe9oxhhOFA= github.com/quic-go/webtransport-go v0.5.3 h1:5XMlzemqB4qmOlgIus5zB45AcZ2kCgCy2EptUrfOPWU= github.com/quic-go/webtransport-go v0.5.3/go.mod h1:OhmmgJIzTTqXK5xvtuX0oBpLV2GkLWNDA+UeTGJXErU= -github.com/raulk/clock v1.1.0/go.mod h1:3MpVxdZ/ODBQDxbN+kzshf5OSZwPjtMDx6BBXBmOeY0= github.com/raulk/go-watchdog v1.3.0 h1:oUmdlHxdkXRJlwfG0O9omj8ukerm8MEQavSiDTEtBsk= github.com/raulk/go-watchdog v1.3.0/go.mod h1:fIvOnLbF0b0ZwkB9YU4mOW9Did//4vPZtDqv66NfsMU= github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= @@ -1232,7 +1208,6 @@ github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPx github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/assertions v1.0.0/go.mod h1:kHHU4qYBaI3q23Pp3VPrmWhuIUrLW/7eUrw0BU5VaoM= github.com/smartystreets/assertions v1.2.0 h1:42S6lae5dvLc7BrLu/0ugRtcFVjoJNMC/N3yZFZkDFs= @@ -1349,7 +1324,6 @@ github.com/whyrusleeping/mafmt v1.2.8/go.mod h1:faQJFPbLSxzD9xpA02ttW/tS9vZykNvX github.com/whyrusleeping/mdns v0.0.0-20180901202407-ef14215e6b30/go.mod h1:j4l84WPFclQPj320J9gp0XwNKBb3U0zt5CBqjPp22G4= github.com/whyrusleeping/mdns v0.0.0-20190826153040-b9b60ed33aa9/go.mod h1:j4l84WPFclQPj320J9gp0XwNKBb3U0zt5CBqjPp22G4= github.com/whyrusleeping/multiaddr-filter v0.0.0-20160516205228-e903e4adabd7/go.mod h1:X2c0RVCI1eSUFI8eLcY3c0423ykwiUdxLJtkDvruhjI= -github.com/whyrusleeping/timecache v0.0.0-20160911033111-cfcb2f1abfee/go.mod h1:m2aV4LZI4Aez7dP5PMyVKEHhUyEJ/RjmPEDOpDvudHg= github.com/x-cray/logrus-prefixed-formatter v0.5.2/go.mod h1:2duySbKsL6M18s5GU7VPsoEPHyzalCE06qoARUCeBBE= github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= @@ -1400,7 +1374,6 @@ go.uber.org/goleak v1.0.0/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= go.uber.org/goleak v1.1.11-0.20210813005559-691160354723/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= go.uber.org/goleak v1.1.12 h1:gZAh5/EyT/HQwlpkCy6wTpqfH9H8Lz8zbm3dZh+OyzA= -go.uber.org/goleak v1.1.12/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= @@ -1891,7 +1864,6 @@ gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= diff --git a/http/client_store.go b/http/client.go similarity index 78% rename from http/client_store.go rename to http/client.go index a893ac6010..b1e287ae06 100644 --- a/http/client_store.go +++ b/http/client.go @@ -21,25 +21,26 @@ import ( "github.com/sourcenetwork/defradb/client" ) -var _ client.Store = (*StoreClient)(nil) +var _ client.Store = (*Client)(nil) -type StoreClient struct { +// Client implements the client.Store interface over HTTP. +type Client struct { client *http.Client baseURL *url.URL } -func NewStoreClient(rawURL string) (*StoreClient, error) { +func NewClient(rawURL string) (*Client, error) { baseURL, err := url.Parse(rawURL) if err != nil { return nil, err } - return &StoreClient{ + return &Client{ client: http.DefaultClient, - baseURL: baseURL, + baseURL: baseURL.JoinPath("/api/v0"), }, nil } -func (c *StoreClient) SetReplicator(ctx context.Context, rep client.Replicator) error { +func (c *Client) SetReplicator(ctx context.Context, rep client.Replicator) error { methodURL := c.baseURL.JoinPath("p2p", "replicators") body, err := json.Marshal(rep) @@ -59,7 +60,7 @@ func (c *StoreClient) SetReplicator(ctx context.Context, rep client.Replicator) return parseResponse(res) } -func (c *StoreClient) DeleteReplicator(ctx context.Context, rep client.Replicator) error { +func (c *Client) DeleteReplicator(ctx context.Context, rep client.Replicator) error { methodURL := c.baseURL.JoinPath("p2p", "replicators") body, err := json.Marshal(rep) @@ -79,7 +80,7 @@ func (c *StoreClient) DeleteReplicator(ctx context.Context, rep client.Replicato return parseResponse(res) } -func (c *StoreClient) GetAllReplicators(ctx context.Context) ([]client.Replicator, error) { +func (c *Client) GetAllReplicators(ctx context.Context) ([]client.Replicator, error) { methodURL := c.baseURL.JoinPath("p2p", "replicators") req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) @@ -99,7 +100,7 @@ func (c *StoreClient) GetAllReplicators(ctx context.Context) ([]client.Replicato return reps, nil } -func (c *StoreClient) AddP2PCollection(ctx context.Context, collectionID string) error { +func (c *Client) AddP2PCollection(ctx context.Context, collectionID string) error { methodURL := c.baseURL.JoinPath("p2p", "collections", collectionID) req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) @@ -115,7 +116,7 @@ func (c *StoreClient) AddP2PCollection(ctx context.Context, collectionID string) return parseResponse(res) } -func (c *StoreClient) RemoveP2PCollection(ctx context.Context, collectionID string) error { +func (c *Client) RemoveP2PCollection(ctx context.Context, collectionID string) error { methodURL := c.baseURL.JoinPath("p2p", "collections", collectionID) req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) @@ -131,7 +132,7 @@ func (c *StoreClient) RemoveP2PCollection(ctx context.Context, collectionID stri return parseResponse(res) } -func (c *StoreClient) GetAllP2PCollections(ctx context.Context) ([]string, error) { +func (c *Client) GetAllP2PCollections(ctx context.Context) ([]string, error) { methodURL := c.baseURL.JoinPath("p2p", "collections") req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) @@ -151,7 +152,7 @@ func (c *StoreClient) GetAllP2PCollections(ctx context.Context) ([]string, error return cols, nil } -func (c *StoreClient) BasicImport(ctx context.Context, filepath string) error { +func (c *Client) BasicImport(ctx context.Context, filepath string) error { methodURL := c.baseURL.JoinPath("backup", "import") body, err := json.Marshal(&client.BackupConfig{Filepath: filepath}) @@ -171,7 +172,7 @@ func (c *StoreClient) BasicImport(ctx context.Context, filepath string) error { return parseResponse(res) } -func (c *StoreClient) BasicExport(ctx context.Context, config *client.BackupConfig) error { +func (c *Client) BasicExport(ctx context.Context, config *client.BackupConfig) error { methodURL := c.baseURL.JoinPath("backup", "export") body, err := json.Marshal(config) @@ -191,7 +192,7 @@ func (c *StoreClient) BasicExport(ctx context.Context, config *client.BackupConf return parseResponse(res) } -func (c *StoreClient) AddSchema(ctx context.Context, schema string) ([]client.CollectionDescription, error) { +func (c *Client) AddSchema(ctx context.Context, schema string) ([]client.CollectionDescription, error) { methodURL := c.baseURL.JoinPath("schema") req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), strings.NewReader(schema)) @@ -211,7 +212,7 @@ func (c *StoreClient) AddSchema(ctx context.Context, schema string) ([]client.Co return cols, nil } -func (c *StoreClient) PatchSchema(ctx context.Context, patch string) error { +func (c *Client) PatchSchema(ctx context.Context, patch string) error { methodURL := c.baseURL.JoinPath("schema") req, err := http.NewRequestWithContext(ctx, http.MethodPatch, methodURL.String(), strings.NewReader(patch)) @@ -227,15 +228,15 @@ func (c *StoreClient) PatchSchema(ctx context.Context, patch string) error { return parseResponse(res) } -func (c *StoreClient) SetMigration(ctx context.Context, config client.LensConfig) error { +func (c *Client) SetMigration(ctx context.Context, config client.LensConfig) error { return c.LensRegistry().SetMigration(ctx, config) } -func (c *StoreClient) LensRegistry() client.LensRegistry { +func (c *Client) LensRegistry() client.LensRegistry { return NewLensClient(c) } -func (c *StoreClient) GetCollectionByName(ctx context.Context, name client.CollectionName) (client.Collection, error) { +func (c *Client) GetCollectionByName(ctx context.Context, name client.CollectionName) (client.Collection, error) { methodURL := c.baseURL.JoinPath("collections") methodURL.RawQuery = url.Values{"name": []string{name}}.Encode() @@ -256,7 +257,7 @@ func (c *StoreClient) GetCollectionByName(ctx context.Context, name client.Colle return NewCollectionClient(c, description), nil } -func (c *StoreClient) GetCollectionBySchemaID(ctx context.Context, schemaId string) (client.Collection, error) { +func (c *Client) GetCollectionBySchemaID(ctx context.Context, schemaId string) (client.Collection, error) { methodURL := c.baseURL.JoinPath("collections") methodURL.RawQuery = url.Values{"schema_id": []string{schemaId}}.Encode() @@ -277,7 +278,7 @@ func (c *StoreClient) GetCollectionBySchemaID(ctx context.Context, schemaId stri return NewCollectionClient(c, description), nil } -func (c *StoreClient) GetCollectionByVersionID(ctx context.Context, versionId string) (client.Collection, error) { +func (c *Client) GetCollectionByVersionID(ctx context.Context, versionId string) (client.Collection, error) { methodURL := c.baseURL.JoinPath("collections") methodURL.RawQuery = url.Values{"version_id": []string{versionId}}.Encode() @@ -298,10 +299,10 @@ func (c *StoreClient) GetCollectionByVersionID(ctx context.Context, versionId st return NewCollectionClient(c, description), nil } -func (c *StoreClient) GetAllCollections(ctx context.Context) ([]client.Collection, error) { +func (c *Client) GetAllCollections(ctx context.Context) ([]client.Collection, error) { methodURL := c.baseURL.JoinPath("collections") - req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) if err != nil { return nil, err } @@ -322,7 +323,7 @@ func (c *StoreClient) GetAllCollections(ctx context.Context) ([]client.Collectio return collections, nil } -func (c *StoreClient) GetAllIndexes(ctx context.Context) (map[client.CollectionName][]client.IndexDescription, error) { +func (c *Client) GetAllIndexes(ctx context.Context) (map[client.CollectionName][]client.IndexDescription, error) { methodURL := c.baseURL.JoinPath("indexes") req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) @@ -342,6 +343,6 @@ func (c *StoreClient) GetAllIndexes(ctx context.Context) (map[client.CollectionN return indexes, nil } -func (c *StoreClient) ExecRequest(context.Context, string) *client.RequestResult { +func (c *Client) ExecRequest(context.Context, string) *client.RequestResult { return nil } diff --git a/http/client_collection.go b/http/client_collection.go index c444344af2..2037c7add2 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -21,13 +21,14 @@ import ( var _ client.Collection = (*CollectionClient)(nil) +// LensClient implements the client.Collection interface over HTTP. type CollectionClient struct { client *http.Client baseURL *url.URL description client.CollectionDescription } -func NewCollectionClient(s *StoreClient, description client.CollectionDescription) *CollectionClient { +func NewCollectionClient(s *Client, description client.CollectionDescription) *CollectionClient { return &CollectionClient{ client: s.client, baseURL: s.baseURL, diff --git a/http/client_lens.go b/http/client_lens.go index b3f6149292..1f3640d70d 100644 --- a/http/client_lens.go +++ b/http/client_lens.go @@ -24,12 +24,13 @@ import ( var _ client.LensRegistry = (*LensClient)(nil) +// LensClient implements the client.LensRegistry interface over HTTP. type LensClient struct { client *http.Client baseURL *url.URL } -func NewLensClient(s *StoreClient) *LensClient { +func NewLensClient(s *Client) *LensClient { return &LensClient{ client: s.client, baseURL: s.baseURL.JoinPath("lens"), diff --git a/tests/integration/client.go b/tests/integration/client.go new file mode 100644 index 0000000000..641688017a --- /dev/null +++ b/tests/integration/client.go @@ -0,0 +1,133 @@ +package tests + +import ( + "context" + + blockstore "github.com/ipfs/boxo/blockstore" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/events" +) + +var _ client.Store = (*Client)(nil) +var _ client.DB = (*Client)(nil) + +// Client splits the client.DB and client.Store interfaces in two so we can test +// different implementations of client.Store without changing integration tests. +type Client struct { + db client.DB + store client.Store +} + +func NewClient(db client.DB, store client.Store) *Client { + return &Client{db, store} +} + +func (c *Client) SetReplicator(ctx context.Context, rep client.Replicator) error { + return c.store.SetReplicator(ctx, rep) +} + +func (c *Client) DeleteReplicator(ctx context.Context, rep client.Replicator) error { + return c.store.DeleteReplicator(ctx, rep) +} + +func (c *Client) GetAllReplicators(ctx context.Context) ([]client.Replicator, error) { + return c.store.GetAllReplicators(ctx) +} + +func (c *Client) AddP2PCollection(ctx context.Context, collectionID string) error { + return c.store.AddP2PCollection(ctx, collectionID) +} + +func (c *Client) RemoveP2PCollection(ctx context.Context, collectionID string) error { + return c.store.RemoveP2PCollection(ctx, collectionID) +} + +func (c *Client) GetAllP2PCollections(ctx context.Context) ([]string, error) { + return c.store.GetAllP2PCollections(ctx) +} + +func (c *Client) BasicImport(ctx context.Context, filepath string) error { + return c.store.BasicImport(ctx, filepath) +} + +func (c *Client) BasicExport(ctx context.Context, config *client.BackupConfig) error { + return c.store.BasicExport(ctx, config) +} + +func (c *Client) AddSchema(ctx context.Context, schema string) ([]client.CollectionDescription, error) { + return c.store.AddSchema(ctx, schema) +} + +func (c *Client) PatchSchema(ctx context.Context, patch string) error { + return c.store.PatchSchema(ctx, patch) +} + +func (c *Client) SetMigration(ctx context.Context, config client.LensConfig) error { + return c.store.SetMigration(ctx, config) +} + +func (c *Client) LensRegistry() client.LensRegistry { + return c.store.LensRegistry() +} + +func (c *Client) GetCollectionByName(ctx context.Context, name client.CollectionName) (client.Collection, error) { + return c.store.GetCollectionByName(ctx, name) +} + +func (c *Client) GetCollectionBySchemaID(ctx context.Context, schemaId string) (client.Collection, error) { + return c.store.GetCollectionBySchemaID(ctx, schemaId) +} + +func (c *Client) GetCollectionByVersionID(ctx context.Context, versionId string) (client.Collection, error) { + return c.store.GetCollectionByVersionID(ctx, versionId) +} + +func (c *Client) GetAllCollections(ctx context.Context) ([]client.Collection, error) { + return c.store.GetAllCollections(ctx) +} + +func (c *Client) GetAllIndexes(ctx context.Context) (map[client.CollectionName][]client.IndexDescription, error) { + return c.store.GetAllIndexes(ctx) +} + +func (c *Client) ExecRequest(ctx context.Context, query string) *client.RequestResult { + return c.store.ExecRequest(ctx, query) +} + +func (c *Client) NewTxn(ctx context.Context, b bool) (datastore.Txn, error) { + return c.db.NewTxn(ctx, b) +} + +func (c *Client) NewConcurrentTxn(ctx context.Context, b bool) (datastore.Txn, error) { + return c.db.NewConcurrentTxn(ctx, b) +} + +func (c *Client) WithTxn(tx datastore.Txn) client.Store { + return c.db.WithTxn(tx) +} + +func (c *Client) Root() datastore.RootStore { + return c.db.Root() +} + +func (c *Client) Blockstore() blockstore.Blockstore { + return c.db.Blockstore() +} + +func (c *Client) Close(ctx context.Context) { + c.db.Close(ctx) +} + +func (c *Client) Events() events.Events { + return c.db.Events() +} + +func (c *Client) MaxTxnRetries() int { + return c.db.MaxTxnRetries() +} + +func (c *Client) PrintDump(ctx context.Context) error { + return c.db.PrintDump(ctx) +} diff --git a/tests/integration/state.go b/tests/integration/state.go index f7d4dd45a0..fbdc87dd8e 100644 --- a/tests/integration/state.go +++ b/tests/integration/state.go @@ -12,6 +12,7 @@ package tests import ( "context" + "net/http/httptest" "testing" "github.com/sourcenetwork/defradb/client" @@ -33,6 +34,12 @@ type state struct { // The type of database currently being tested. dbt DatabaseType + // The type of client currently being tested. + clientType ClientType + + // Server for testing http clients. + httpServer *httptest.Server + // Any explicit transactions active in this test. // // This is order dependent and the property is accessed by index. @@ -83,6 +90,7 @@ func newState( t *testing.T, testCase TestCase, dbt DatabaseType, + clientType ClientType, collectionNames []string, ) *state { return &state{ @@ -90,6 +98,7 @@ func newState( t: t, testCase: testCase, dbt: dbt, + clientType: clientType, txns: []datastore.Txn{}, allActionsDone: make(chan struct{}), subscriptionResultsChans: []chan func(){}, diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index f3efdcc48b..ae0b53bd1a 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -13,6 +13,7 @@ package tests import ( "context" "fmt" + "net/http/httptest" "os" "path" "reflect" @@ -31,11 +32,14 @@ import ( "github.com/sourcenetwork/defradb/datastore/memory" "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/errors" + "github.com/sourcenetwork/defradb/http" "github.com/sourcenetwork/defradb/logging" "github.com/sourcenetwork/defradb/net" ) const ( + clientGoEnvName = "DEFRA_CLIENT_GO" + clientHttpEnvName = "DEFRA_CLIENT_HTTP" memoryBadgerEnvName = "DEFRA_BADGER_MEMORY" fileBadgerEnvName = "DEFRA_BADGER_FILE" fileBadgerPathEnvName = "DEFRA_BADGER_FILE_PATH" @@ -56,11 +60,20 @@ const ( badgerFileType DatabaseType = "badger-file-system" ) +type ClientType string + +const ( + goClientType ClientType = "go" + httpClientType ClientType = "http" +) + var ( log = logging.MustNewLogger("tests.integration") badgerInMemory bool badgerFile bool inMemoryStore bool + httpClient bool + goClient bool ) const subscriptionTimeout = 1 * time.Second @@ -101,6 +114,8 @@ var previousTestCaseTestName string func init() { // We use environment variables instead of flags `go test ./...` throws for all packages // that don't have the flag defined + httpClientValue, _ := os.LookupEnv(clientHttpEnvName) + goClientValue, _ := os.LookupEnv(clientGoEnvName) badgerFileValue, _ := os.LookupEnv(fileBadgerEnvName) badgerInMemoryValue, _ := os.LookupEnv(memoryBadgerEnvName) databaseDir, _ = os.LookupEnv(fileBadgerPathEnvName) @@ -111,6 +126,8 @@ func init() { setupOnlyValue, _ := os.LookupEnv(setupOnlyEnvName) targetBranchValue, targetBranchSpecified := os.LookupEnv(targetBranchEnvName) + httpClient = getBool(httpClientValue) + goClient = getBool(goClientValue) badgerFile = getBool(badgerFileValue) badgerInMemory = getBool(badgerInMemoryValue) inMemoryStore = getBool(inMemoryStoreValue) @@ -132,6 +149,11 @@ func init() { badgerFile = false inMemoryStore = true } + // default is to run against all + if !goClient && !httpClient && !DetectDbChanges { + goClient = true + httpClient = true + } if DetectDbChanges { detectDbChangesInit(repositoryValue, targetBranchValue) @@ -217,6 +239,20 @@ func newBadgerFileDB(ctx context.Context, t testing.TB, path string) (client.DB, return db, nil } +func GetClientTypes() []ClientType { + clients := []ClientType{} + + if httpClient { + clients = append(clients, httpClientType) + } + + if goClient { + clients = append(clients, goClientType) + } + + return clients +} + func GetDatabaseTypes() []DatabaseType { databases := []DatabaseType{} @@ -235,31 +271,30 @@ func GetDatabaseTypes() []DatabaseType { return databases } -func GetDatabase(ctx context.Context, t *testing.T, dbt DatabaseType) (client.DB, string, error) { - switch dbt { +func GetDatabase(s *state) (cdb client.DB, path string, err error) { + switch s.dbt { case badgerIMType: - db, err := NewBadgerMemoryDB(ctx, db.WithUpdateEvents()) - if err != nil { - return nil, "", err - } - return db, "", nil + cdb, err = NewBadgerMemoryDB(s.ctx, db.WithUpdateEvents()) case badgerFileType: - db, path, err := NewBadgerFileDB(ctx, t) - if err != nil { - return nil, "", err - } - return db, path, nil + cdb, path, err = NewBadgerFileDB(s.ctx, s.t) case defraIMType: - db, err := NewInMemoryDB(ctx) + cdb, err = NewInMemoryDB(s.ctx) + } + + switch s.clientType { + case httpClientType: + s.httpServer = httptest.NewServer(http.NewServer(cdb)) + // TODO close the server + store, err := http.NewClient(s.httpServer.URL) if err != nil { return nil, "", err } - return db, "", nil + cdb = NewClient(cdb, store) } - return nil, "", nil + return } // ExecuteTestCase executes the given TestCase against the configured database @@ -278,14 +313,18 @@ func ExecuteTestCase( } ctx := context.Background() + cts := GetClientTypes() dbts := GetDatabaseTypes() // Assert that this is not empty to protect against accidental mis-configurations, // otherwise an empty set would silently pass all the tests. require.NotEmpty(t, dbts) - for _, dbt := range dbts { - executeTestCase(ctx, t, collectionNames, testCase, dbt) + for _, ct := range cts { + for _, dbt := range dbts { + executeTestCase(ctx, t, collectionNames, testCase, dbt, ct) + } } + } func executeTestCase( @@ -294,13 +333,14 @@ func executeTestCase( collectionNames []string, testCase TestCase, dbt DatabaseType, + clientType ClientType, ) { log.Info(ctx, testCase.Description, logging.NewKV("Database", dbt)) flattenActions(&testCase) startActionIndex, endActionIndex := getActionRange(testCase) - s := newState(ctx, t, testCase, dbt, collectionNames) + s := newState(ctx, t, testCase, dbt, clientType, collectionNames) setStartingNodes(s) // It is very important that the databases are always closed, otherwise resources will leak @@ -621,7 +661,7 @@ func setStartingNodes( // If nodes have not been explicitly configured via actions, setup a default one. if !hasExplicitNode { - db, path, err := GetDatabase(s.ctx, s.t, s.dbt) + db, path, err := GetDatabase(s) require.Nil(s.t, err) s.nodes = append(s.nodes, &net.Node{ @@ -644,7 +684,7 @@ func restartNodes( for i := len(s.nodes) - 1; i >= 0; i-- { originalPath := databaseDir databaseDir = s.dbPaths[i] - db, _, err := GetDatabase(s.ctx, s.t, s.dbt) + db, _, err := GetDatabase(s) require.Nil(s.t, err) databaseDir = originalPath @@ -762,7 +802,7 @@ func configureNode( // an in memory store. cfg.Datastore.Badger.Path = s.t.TempDir() - db, path, err := GetDatabase(s.ctx, s.t, s.dbt) //disable change dector, or allow it? + db, path, err := GetDatabase(s) //disable change dector, or allow it? require.NoError(s.t, err) var n *net.Node From 466fe783a5ac5dca35d3d64ebd29e25026f56588 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 11 Aug 2023 12:15:04 -0700 Subject: [PATCH 004/107] fix linter errors --- http/client.go | 30 +++++++++++++++--------------- http/client_collection.go | 14 +++++++++++--- http/client_lens.go | 17 +++++++++++++---- tests/integration/client.go | 10 ++++++++++ tests/integration/utils2.go | 1 - 5 files changed, 49 insertions(+), 23 deletions(-) diff --git a/http/client.go b/http/client.go index b1e287ae06..35e6338685 100644 --- a/http/client.go +++ b/http/client.go @@ -55,7 +55,7 @@ func (c *Client) SetReplicator(ctx context.Context, rep client.Replicator) error if err != nil { return err } - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck return parseResponse(res) } @@ -75,7 +75,7 @@ func (c *Client) DeleteReplicator(ctx context.Context, rep client.Replicator) er if err != nil { return err } - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck return parseResponse(res) } @@ -91,7 +91,7 @@ func (c *Client) GetAllReplicators(ctx context.Context) ([]client.Replicator, er if err != nil { return nil, err } - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck var reps []client.Replicator if err := parseJsonResponse(res, &reps); err != nil { @@ -111,7 +111,7 @@ func (c *Client) AddP2PCollection(ctx context.Context, collectionID string) erro if err != nil { return err } - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck return parseResponse(res) } @@ -127,7 +127,7 @@ func (c *Client) RemoveP2PCollection(ctx context.Context, collectionID string) e if err != nil { return err } - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck return parseResponse(res) } @@ -143,7 +143,7 @@ func (c *Client) GetAllP2PCollections(ctx context.Context) ([]string, error) { if err != nil { return nil, err } - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck var cols []string if err := parseJsonResponse(res, &cols); err != nil { @@ -167,7 +167,7 @@ func (c *Client) BasicImport(ctx context.Context, filepath string) error { if err != nil { return err } - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck return parseResponse(res) } @@ -187,7 +187,7 @@ func (c *Client) BasicExport(ctx context.Context, config *client.BackupConfig) e if err != nil { return err } - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck return parseResponse(res) } @@ -203,7 +203,7 @@ func (c *Client) AddSchema(ctx context.Context, schema string) ([]client.Collect if err != nil { return nil, err } - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck var cols []client.CollectionDescription if err := parseJsonResponse(res, &cols); err != nil { @@ -223,7 +223,7 @@ func (c *Client) PatchSchema(ctx context.Context, patch string) error { if err != nil { return err } - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck return parseResponse(res) } @@ -248,7 +248,7 @@ func (c *Client) GetCollectionByName(ctx context.Context, name client.Collection if err != nil { return nil, err } - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck var description client.CollectionDescription if err := parseJsonResponse(res, &description); err != nil { @@ -269,7 +269,7 @@ func (c *Client) GetCollectionBySchemaID(ctx context.Context, schemaId string) ( if err != nil { return nil, err } - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck var description client.CollectionDescription if err := parseJsonResponse(res, &description); err != nil { @@ -290,7 +290,7 @@ func (c *Client) GetCollectionByVersionID(ctx context.Context, versionId string) if err != nil { return nil, err } - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck var description client.CollectionDescription if err := parseJsonResponse(res, &description); err != nil { @@ -310,7 +310,7 @@ func (c *Client) GetAllCollections(ctx context.Context) ([]client.Collection, er if err != nil { return nil, err } - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck var descriptions []client.CollectionDescription if err := parseJsonResponse(res, &descriptions); err != nil { @@ -334,7 +334,7 @@ func (c *Client) GetAllIndexes(ctx context.Context) (map[client.CollectionName][ if err != nil { return nil, err } - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck var indexes map[client.CollectionName][]client.IndexDescription if err := parseJsonResponse(res, &indexes); err != nil { diff --git a/http/client_collection.go b/http/client_collection.go index 2037c7add2..23d3d7436a 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -21,7 +21,7 @@ import ( var _ client.Collection = (*CollectionClient)(nil) -// LensClient implements the client.Collection interface over HTTP. +// CollectionClient implements the client.Collection interface over HTTP. type CollectionClient struct { client *http.Client baseURL *url.URL @@ -84,11 +84,19 @@ func (c *CollectionClient) UpdateWith(ctx context.Context, target any, updater s return nil, nil } -func (c *CollectionClient) UpdateWithFilter(ctx context.Context, filter any, updater string) (*client.UpdateResult, error) { +func (c *CollectionClient) UpdateWithFilter( + ctx context.Context, + filter any, + updater string, +) (*client.UpdateResult, error) { return nil, nil } -func (c *CollectionClient) UpdateWithKey(ctx context.Context, key client.DocKey, updater string) (*client.UpdateResult, error) { +func (c *CollectionClient) UpdateWithKey( + ctx context.Context, + key client.DocKey, + updater string, +) (*client.UpdateResult, error) { return nil, nil } diff --git a/http/client_lens.go b/http/client_lens.go index 1f3640d70d..e0abd50d3c 100644 --- a/http/client_lens.go +++ b/http/client_lens.go @@ -17,9 +17,10 @@ import ( "net/http" "net/url" + "github.com/sourcenetwork/immutable/enumerable" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" - "github.com/sourcenetwork/immutable/enumerable" ) var _ client.LensRegistry = (*LensClient)(nil) @@ -56,7 +57,7 @@ func (c *LensClient) SetMigration(ctx context.Context, config client.LensConfig) if err != nil { return err } - defer res.Body.Close() + defer res.Body.Close() //nolint:errcheck return parseResponse(res) } @@ -65,11 +66,19 @@ func (c *LensClient) ReloadLenses(context.Context) error { return nil } -func (c *LensClient) MigrateUp(context.Context, enumerable.Enumerable[map[string]any], string) (enumerable.Enumerable[map[string]any], error) { +func (c *LensClient) MigrateUp( + context.Context, + enumerable.Enumerable[map[string]any], + string, +) (enumerable.Enumerable[map[string]any], error) { return nil, nil } -func (c *LensClient) MigrateDown(context.Context, enumerable.Enumerable[map[string]any], string) (enumerable.Enumerable[map[string]any], error) { +func (c *LensClient) MigrateDown( + context.Context, + enumerable.Enumerable[map[string]any], + string, +) (enumerable.Enumerable[map[string]any], error) { return nil, nil } diff --git a/tests/integration/client.go b/tests/integration/client.go index 641688017a..09da5f222d 100644 --- a/tests/integration/client.go +++ b/tests/integration/client.go @@ -1,3 +1,13 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + package tests import ( diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index ae0b53bd1a..ef50b4261e 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -324,7 +324,6 @@ func ExecuteTestCase( executeTestCase(ctx, t, collectionNames, testCase, dbt, ct) } } - } func executeTestCase( From d86ee90a2bc7a729cd4c53c8d1b7b6bdb3e228f9 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 11 Aug 2023 13:54:07 -0700 Subject: [PATCH 005/107] fixes to make more tests pass --- http/client.go | 73 ++++++++++++++++++++++++++++++++++++++++++++++++-- http/server.go | 36 +++++++++++++++++++++++++ 2 files changed, 107 insertions(+), 2 deletions(-) diff --git a/http/client.go b/http/client.go index 35e6338685..382c1bed19 100644 --- a/http/client.go +++ b/http/client.go @@ -14,6 +14,7 @@ import ( "bytes" "context" "encoding/json" + "fmt" "net/http" "net/url" "strings" @@ -51,6 +52,9 @@ func (c *Client) SetReplicator(ctx context.Context, rep client.Replicator) error if err != nil { return err } + req.Header.Add("Accept", "application/json") + req.Header.Add("Content-Type", "application/json") + res, err := c.client.Do(req) if err != nil { return err @@ -71,6 +75,9 @@ func (c *Client) DeleteReplicator(ctx context.Context, rep client.Replicator) er if err != nil { return err } + req.Header.Add("Accept", "application/json") + req.Header.Add("Content-Type", "application/json") + res, err := c.client.Do(req) if err != nil { return err @@ -87,6 +94,8 @@ func (c *Client) GetAllReplicators(ctx context.Context) ([]client.Replicator, er if err != nil { return nil, err } + req.Header.Add("Accept", "application/json") + res, err := c.client.Do(req) if err != nil { return nil, err @@ -107,6 +116,8 @@ func (c *Client) AddP2PCollection(ctx context.Context, collectionID string) erro if err != nil { return err } + req.Header.Add("Accept", "application/json") + res, err := c.client.Do(req) if err != nil { return err @@ -123,6 +134,8 @@ func (c *Client) RemoveP2PCollection(ctx context.Context, collectionID string) e if err != nil { return err } + req.Header.Add("Accept", "application/json") + res, err := c.client.Do(req) if err != nil { return err @@ -139,6 +152,8 @@ func (c *Client) GetAllP2PCollections(ctx context.Context) ([]string, error) { if err != nil { return nil, err } + req.Header.Add("Accept", "application/json") + res, err := c.client.Do(req) if err != nil { return nil, err @@ -163,6 +178,9 @@ func (c *Client) BasicImport(ctx context.Context, filepath string) error { if err != nil { return err } + req.Header.Add("Accept", "application/json") + req.Header.Add("Content-Type", "application/json") + res, err := c.client.Do(req) if err != nil { return err @@ -183,6 +201,9 @@ func (c *Client) BasicExport(ctx context.Context, config *client.BackupConfig) e if err != nil { return err } + req.Header.Add("Accept", "application/json") + req.Header.Add("Content-Type", "application/json") + res, err := c.client.Do(req) if err != nil { return err @@ -199,6 +220,8 @@ func (c *Client) AddSchema(ctx context.Context, schema string) ([]client.Collect if err != nil { return nil, err } + req.Header.Add("Accept", "application/json") + res, err := c.client.Do(req) if err != nil { return nil, err @@ -219,6 +242,8 @@ func (c *Client) PatchSchema(ctx context.Context, patch string) error { if err != nil { return err } + req.Header.Add("Accept", "application/json") + res, err := c.client.Do(req) if err != nil { return err @@ -244,6 +269,8 @@ func (c *Client) GetCollectionByName(ctx context.Context, name client.Collection if err != nil { return nil, err } + req.Header.Add("Accept", "application/json") + res, err := c.client.Do(req) if err != nil { return nil, err @@ -265,6 +292,8 @@ func (c *Client) GetCollectionBySchemaID(ctx context.Context, schemaId string) ( if err != nil { return nil, err } + req.Header.Add("Accept", "application/json") + res, err := c.client.Do(req) if err != nil { return nil, err @@ -286,6 +315,8 @@ func (c *Client) GetCollectionByVersionID(ctx context.Context, versionId string) if err != nil { return nil, err } + req.Header.Add("Accept", "application/json") + res, err := c.client.Do(req) if err != nil { return nil, err @@ -306,6 +337,8 @@ func (c *Client) GetAllCollections(ctx context.Context) ([]client.Collection, er if err != nil { return nil, err } + req.Header.Add("Accept", "application/json") + res, err := c.client.Do(req) if err != nil { return nil, err @@ -330,6 +363,8 @@ func (c *Client) GetAllIndexes(ctx context.Context) (map[client.CollectionName][ if err != nil { return nil, err } + req.Header.Add("Accept", "application/json") + res, err := c.client.Do(req) if err != nil { return nil, err @@ -343,6 +378,40 @@ func (c *Client) GetAllIndexes(ctx context.Context) (map[client.CollectionName][ return indexes, nil } -func (c *Client) ExecRequest(context.Context, string) *client.RequestResult { - return nil +func (c *Client) ExecRequest(ctx context.Context, query string) (result *client.RequestResult) { + methodURL := c.baseURL.JoinPath("graphql") + result = &client.RequestResult{} + + body, err := json.Marshal(&GraphQLRequest{query}) + if err != nil { + result.GQL.Errors = []error{err} + return + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + result.GQL.Errors = []error{err} + return + } + req.Header.Add("Accept", "application/json") + req.Header.Add("Content-Type", "application/json") + + res, err := c.client.Do(req) + if err != nil { + result.GQL.Errors = []error{err} + return + } + defer res.Body.Close() //nolint:errcheck + + // TODO handle subscriptions + + var response GraphQLResponse + if err = parseJsonResponse(res, &response); err != nil { + result.GQL.Errors = []error{err} + return + } + result.GQL.Data = response.Data + for _, err := range response.Errors { + result.GQL.Errors = append(result.GQL.Errors, fmt.Errorf(err)) + } + return } diff --git a/http/server.go b/http/server.go index ccb4bfeba0..d8a78287f0 100644 --- a/http/server.go +++ b/http/server.go @@ -19,6 +19,15 @@ import ( "github.com/sourcenetwork/defradb/client" ) +type GraphQLRequest struct { + Query string `json:"query" form:"query"` +} + +type GraphQLResponse struct { + Errors []string `json:"errors,omitempty"` + Data []map[string]any `json:"data"` +} + type Server struct { store client.Store } @@ -44,6 +53,10 @@ func NewServer(store client.Store) *gin.Engine { lens_migration := lens.Group("/migration") lens_migration.POST("/", server.SetMigration) + graphQL := api.Group("/graphql") + graphQL.GET("/", server.ExecRequest) + graphQL.POST("/", server.ExecRequest) + p2p := api.Group("/p2p") p2p_replicators := p2p.Group("/replicators") p2p_replicators.GET("/replicators", server.GetAllReplicators) @@ -237,3 +250,26 @@ func (s *Server) GetAllIndexes(c *gin.Context) { } c.JSON(http.StatusOK, indexes) } + +func (s *Server) ExecRequest(c *gin.Context) { + var request GraphQLRequest + if err := c.ShouldBind(&request); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + if request.Query == "" { + c.JSON(http.StatusBadRequest, gin.H{"error": "missing request"}) + return + } + result := s.store.ExecRequest(c.Request.Context(), request.Query) + if result.Pub != nil { + // TODO handle subscription + return + } + + var errors []string + for _, err := range result.GQL.Errors { + errors = append(errors, err.Error()) + } + c.JSON(http.StatusOK, gin.H{"data": result.GQL.Data, "errors": errors}) +} From ef58ecdbc56a5c25eae49bdf46d44c7184455918 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 14 Aug 2023 11:29:46 -0700 Subject: [PATCH 006/107] add http subscription logic --- http/client.go | 36 ++++++++++++++++++++++++++++++++++-- http/server.go | 32 +++++++++++++++++++++++++++++++- tests/integration/utils2.go | 4 +++- 3 files changed, 68 insertions(+), 4 deletions(-) diff --git a/http/client.go b/http/client.go index 382c1bed19..7cbd5ea5fc 100644 --- a/http/client.go +++ b/http/client.go @@ -11,6 +11,7 @@ package http import ( + "bufio" "bytes" "context" "encoding/json" @@ -20,6 +21,7 @@ import ( "strings" "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/events" ) var _ client.Store = (*Client)(nil) @@ -393,6 +395,7 @@ func (c *Client) ExecRequest(ctx context.Context, query string) (result *client. return } req.Header.Add("Accept", "application/json") + req.Header.Add("Accept", "text/event-stream") req.Header.Add("Content-Type", "application/json") res, err := c.client.Do(req) @@ -402,8 +405,9 @@ func (c *Client) ExecRequest(ctx context.Context, query string) (result *client. } defer res.Body.Close() //nolint:errcheck - // TODO handle subscriptions - + if res.Header.Get("Content-Type") == "text/event-stream" { + return c.execRequestSubscription(ctx, res) + } var response GraphQLResponse if err = parseJsonResponse(res, &response); err != nil { result.GQL.Errors = []error{err} @@ -415,3 +419,31 @@ func (c *Client) ExecRequest(ctx context.Context, query string) (result *client. } return } + +func (c *Client) execRequestSubscription(ctx context.Context, res *http.Response) (result *client.RequestResult) { + result = &client.RequestResult{} + + pubChan := events.New[events.Update](0, 0) + scanner := bufio.NewScanner(res.Body) + + go func() { + for scanner.Scan() { + line := scanner.Text() + if !strings.HasPrefix(line, "data:") { + continue + } + var item events.Update + if err := json.Unmarshal([]byte(line[6:]), &item); err != nil { + continue + } + pubChan.Publish(item) + } + }() + + pub, err := events.NewPublisher[events.Update](pubChan, 0) + if err != nil { + return result + } + result.Pub = pub + return result +} diff --git a/http/server.go b/http/server.go index d8a78287f0..fcf857146f 100644 --- a/http/server.go +++ b/http/server.go @@ -11,12 +11,15 @@ package http import ( + "encoding/json" + "fmt" "io" "net/http" "github.com/gin-gonic/gin" "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/events" ) type GraphQLRequest struct { @@ -263,7 +266,7 @@ func (s *Server) ExecRequest(c *gin.Context) { } result := s.store.ExecRequest(c.Request.Context(), request.Query) if result.Pub != nil { - // TODO handle subscription + s.execRequestSubscription(c, result.Pub) return } @@ -273,3 +276,30 @@ func (s *Server) ExecRequest(c *gin.Context) { } c.JSON(http.StatusOK, gin.H{"data": result.GQL.Data, "errors": errors}) } + +func (s *Server) execRequestSubscription(c *gin.Context, pub *events.Publisher[events.Update]) { + c.Header("Content-Type", "text/event-stream") + c.Header("Cache-Control", "no-cache") + c.Header("Connection", "keep-alive") + + c.Status(http.StatusOK) + c.Writer.Flush() + + c.Stream(func(w io.Writer) bool { + select { + case <-c.Request.Context().Done(): + pub.Unsubscribe() + return false + case item, open := <-pub.Stream(): + if !open { + return false + } + data, err := json.Marshal(item) + if err != nil { + return false + } + fmt.Fprintf(w, "data: %s\n\n", data) + return true + } + }) +} diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index ef50b4261e..3c593e3822 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -286,7 +286,6 @@ func GetDatabase(s *state) (cdb client.DB, path string, err error) { switch s.clientType { case httpClientType: s.httpServer = httptest.NewServer(http.NewServer(cdb)) - // TODO close the server store, err := http.NewClient(s.httpServer.URL) if err != nil { return nil, "", err @@ -520,6 +519,9 @@ func closeNodes( err := node.Close() require.NoError(s.t, err) } + if s.httpServer != nil { + s.httpServer.Close() + } node.DB.Close(s.ctx) } } From da866b51c1c050d381d1c65883d9dc25b62fc191 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 14 Aug 2023 15:54:41 -0700 Subject: [PATCH 007/107] implement more http collection methods --- http/client_collection.go | 365 +++++++++++++++++++++++++++++++++++--- http/server.go | 11 ++ http/server_collection.go | 326 ++++++++++++++++++++++++++++++++++ 3 files changed, 673 insertions(+), 29 deletions(-) create mode 100644 http/server_collection.go diff --git a/http/client_collection.go b/http/client_collection.go index 23d3d7436a..8aae354f8f 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -11,11 +11,15 @@ package http import ( + "bytes" "context" + "encoding/json" + "fmt" "net/http" "net/url" "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/datastore" ) @@ -56,32 +60,190 @@ func (c *CollectionClient) SchemaID() string { return c.description.Schema.SchemaID } -func (c *CollectionClient) Create(context.Context, *client.Document) error { - return nil +func (c *CollectionClient) Create(ctx context.Context, doc *client.Document) error { + methodURL := c.baseURL.JoinPath("collections", c.description.Name) + + docMap, err := doc.ToMap() + if err != nil { + return err + } + body, err := json.Marshal(docMap) + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + req.Header.Add("Accept", "application/json") + req.Header.Add("Content-Type", "application/json") + + res, err := c.client.Do(req) + if err != nil { + return err + } + defer res.Body.Close() //nolint:errcheck + + return parseResponse(res) } -func (c *CollectionClient) CreateMany(context.Context, []*client.Document) error { - return nil +func (c *CollectionClient) CreateMany(ctx context.Context, docs []*client.Document) error { + methodURL := c.baseURL.JoinPath("collections", c.description.Name) + + var docMapList []map[string]any + for _, doc := range docs { + docMap, err := doc.ToMap() + if err != nil { + return err + } + docMapList = append(docMapList, docMap) + } + body, err := json.Marshal(docMapList) + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + req.Header.Add("Accept", "application/json") + req.Header.Add("Content-Type", "application/json") + + res, err := c.client.Do(req) + if err != nil { + return err + } + defer res.Body.Close() //nolint:errcheck + + return parseResponse(res) } -func (c *CollectionClient) Update(context.Context, *client.Document) error { - return nil +func (c *CollectionClient) Update(ctx context.Context, doc *client.Document) error { + methodURL := c.baseURL.JoinPath("collections", c.description.Name, doc.Key().String()) + + docMap, err := doc.ToMap() + if err != nil { + return err + } + body, err := json.Marshal(docMap) + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPatch, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + req.Header.Add("Accept", "application/json") + req.Header.Add("Content-Type", "application/json") + + res, err := c.client.Do(req) + if err != nil { + return err + } + defer res.Body.Close() //nolint:errcheck + + return parseResponse(res) } -func (c *CollectionClient) Save(context.Context, *client.Document) error { - return nil +func (c *CollectionClient) Save(ctx context.Context, doc *client.Document) error { + methodURL := c.baseURL.JoinPath("collections", c.description.Name, doc.Key().String()) + + docMap, err := doc.ToMap() + if err != nil { + return err + } + body, err := json.Marshal(docMap) + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + req.Header.Add("Accept", "application/json") + req.Header.Add("Content-Type", "application/json") + + res, err := c.client.Do(req) + if err != nil { + return err + } + defer res.Body.Close() //nolint:errcheck + + return parseResponse(res) } -func (c *CollectionClient) Delete(context.Context, client.DocKey) (bool, error) { - return false, nil +func (c *CollectionClient) Delete(ctx context.Context, docKey client.DocKey) (bool, error) { + methodURL := c.baseURL.JoinPath("collections", c.description.Name, docKey.String()) + + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) + if err != nil { + return false, err + } + req.Header.Add("Accept", "application/json") + req.Header.Add("Content-Type", "application/json") + + res, err := c.client.Do(req) + if err != nil { + return false, err + } + defer res.Body.Close() //nolint:errcheck + + err = parseResponse(res) + if err != nil { + return false, err + } + return true, nil } -func (c *CollectionClient) Exists(context.Context, client.DocKey) (bool, error) { - return false, nil +func (c *CollectionClient) Exists(ctx context.Context, docKey client.DocKey) (bool, error) { + _, err := c.Get(ctx, docKey, false) + if err != nil { + return false, err + } + return true, nil } func (c *CollectionClient) UpdateWith(ctx context.Context, target any, updater string) (*client.UpdateResult, error) { - return nil, nil + switch t := target.(type) { + case string, map[string]any, *request.Filter: + return c.UpdateWithFilter(ctx, t, updater) + case client.DocKey: + return c.UpdateWithKey(ctx, t, updater) + case []client.DocKey: + return c.UpdateWithKeys(ctx, t, updater) + default: + return nil, client.ErrInvalidUpdateTarget + } +} + +func (c *CollectionClient) updateWith( + ctx context.Context, + request CollectionUpdateRequest, +) (*client.UpdateResult, error) { + methodURL := c.baseURL.JoinPath("collections", c.description.Name) + + body, err := json.Marshal(request) + if err != nil { + return nil, err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPatch, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return nil, err + } + req.Header.Add("Accept", "application/json") + req.Header.Add("Content-Type", "application/json") + + res, err := c.client.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() //nolint:errcheck + + var result client.UpdateResult + if err := parseJsonResponse(res, &result); err != nil { + return nil, err + } + return &result, nil } func (c *CollectionClient) UpdateWithFilter( @@ -89,7 +251,10 @@ func (c *CollectionClient) UpdateWithFilter( filter any, updater string, ) (*client.UpdateResult, error) { - return nil, nil + return c.updateWith(ctx, CollectionUpdateRequest{ + Filter: filter, + Updater: updater, + }) } func (c *CollectionClient) UpdateWithKey( @@ -97,49 +262,191 @@ func (c *CollectionClient) UpdateWithKey( key client.DocKey, updater string, ) (*client.UpdateResult, error) { - return nil, nil + return c.updateWith(ctx, CollectionUpdateRequest{ + Key: key.String(), + Updater: updater, + }) } -func (c *CollectionClient) UpdateWithKeys(context.Context, []client.DocKey, string) (*client.UpdateResult, error) { - return nil, nil +func (c *CollectionClient) UpdateWithKeys( + ctx context.Context, + docKeys []client.DocKey, + updater string, +) (*client.UpdateResult, error) { + var keys []string + for _, key := range docKeys { + keys = append(keys, key.String()) + } + return c.updateWith(ctx, CollectionUpdateRequest{ + Keys: keys, + Updater: updater, + }) } func (c *CollectionClient) DeleteWith(ctx context.Context, target any) (*client.DeleteResult, error) { - return nil, nil + switch t := target.(type) { + case string, map[string]any, *request.Filter: + return c.DeleteWithFilter(ctx, t) + case client.DocKey: + return c.DeleteWithKey(ctx, t) + case []client.DocKey: + return c.DeleteWithKeys(ctx, t) + default: + return nil, client.ErrInvalidDeleteTarget + } +} + +func (c *CollectionClient) deleteWith( + ctx context.Context, + request CollectionDeleteRequest, +) (*client.DeleteResult, error) { + methodURL := c.baseURL.JoinPath("collections", c.description.Name) + + body, err := json.Marshal(request) + if err != nil { + return nil, err + } + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return nil, err + } + req.Header.Add("Accept", "application/json") + req.Header.Add("Content-Type", "application/json") + + res, err := c.client.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() //nolint:errcheck + + var result client.DeleteResult + if err := parseJsonResponse(res, &result); err != nil { + return nil, err + } + return &result, nil } func (c *CollectionClient) DeleteWithFilter(ctx context.Context, filter any) (*client.DeleteResult, error) { - return nil, nil + return c.deleteWith(ctx, CollectionDeleteRequest{ + Filter: filter, + }) } -func (c *CollectionClient) DeleteWithKey(context.Context, client.DocKey) (*client.DeleteResult, error) { - return nil, nil +func (c *CollectionClient) DeleteWithKey(ctx context.Context, docKey client.DocKey) (*client.DeleteResult, error) { + return c.deleteWith(ctx, CollectionDeleteRequest{ + Key: docKey.String(), + }) } -func (c *CollectionClient) DeleteWithKeys(context.Context, []client.DocKey) (*client.DeleteResult, error) { - return nil, nil +func (c *CollectionClient) DeleteWithKeys(ctx context.Context, docKeys []client.DocKey) (*client.DeleteResult, error) { + var keys []string + for _, key := range docKeys { + keys = append(keys, key.String()) + } + return c.deleteWith(ctx, CollectionDeleteRequest{ + Keys: keys, + }) } func (c *CollectionClient) Get(ctx context.Context, key client.DocKey, showDeleted bool) (*client.Document, error) { - return nil, nil + methodURL := c.baseURL.JoinPath("collections", c.description.Name, key.String()) + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + req.Header.Add("Accept", "application/json") + req.Header.Add("Content-Type", "application/json") + + res, err := c.client.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() //nolint:errcheck + + var docMap map[string]any + if err := parseJsonResponse(res, docMap); err != nil { + return nil, err + } + return client.NewDocFromMap(docMap) } func (c *CollectionClient) WithTxn(datastore.Txn) client.Collection { - return nil + return c } func (c *CollectionClient) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysResult, error) { - return nil, nil + return nil, fmt.Errorf("not implemented") } -func (c *CollectionClient) CreateIndex(context.Context, client.IndexDescription) (client.IndexDescription, error) { - return client.IndexDescription{}, nil +func (c *CollectionClient) CreateIndex( + ctx context.Context, + indexDesc client.IndexDescription, +) (client.IndexDescription, error) { + methodURL := c.baseURL.JoinPath("collections", c.description.Name, "indexes") + + body, err := json.Marshal(&indexDesc) + if err != nil { + return client.IndexDescription{}, nil + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return client.IndexDescription{}, nil + } + req.Header.Add("Accept", "application/json") + req.Header.Add("Content-Type", "application/json") + + res, err := c.client.Do(req) + if err != nil { + return client.IndexDescription{}, nil + } + defer res.Body.Close() //nolint:errcheck + + var index client.IndexDescription + if err := parseJsonResponse(res, &index); err != nil { + return client.IndexDescription{}, nil + } + return index, nil } func (c *CollectionClient) DropIndex(ctx context.Context, indexName string) error { - return nil + methodURL := c.baseURL.JoinPath("collections", c.description.Name, "indexes", indexName) + + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) + if err != nil { + return err + } + req.Header.Add("Accept", "application/json") + req.Header.Add("Content-Type", "application/json") + + res, err := c.client.Do(req) + if err != nil { + return err + } + defer res.Body.Close() //nolint:errcheck + + return parseResponse(res) } func (c *CollectionClient) GetIndexes(ctx context.Context) ([]client.IndexDescription, error) { + methodURL := c.baseURL.JoinPath("collections", c.description.Name, "indexes") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + req.Header.Add("Accept", "application/json") + req.Header.Add("Content-Type", "application/json") + + res, err := c.client.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() //nolint:errcheck + + var indexes []client.IndexDescription + if err := parseJsonResponse(res, indexes); err != nil { + return nil, err + } return c.description.Indexes, nil } diff --git a/http/server.go b/http/server.go index fcf857146f..6f8728407f 100644 --- a/http/server.go +++ b/http/server.go @@ -37,6 +37,7 @@ type Server struct { func NewServer(store client.Store) *gin.Engine { server := &Server{store} + collectionServer := &CollectionServer{store} router := gin.Default() api := router.Group("/api/v0") @@ -51,6 +52,16 @@ func NewServer(store client.Store) *gin.Engine { collections := api.Group("/collections") collections.GET("/", server.GetCollection) + collections.POST("/:name", collectionServer.Create) + collections.PATCH("/:name", collectionServer.UpdateWith) + collections.DELETE("/:name", collectionServer.DeleteWith) + collections.POST("/:name/indexes", collectionServer.CreateIndex) + collections.GET("/:name/indexes", collectionServer.GetIndexes) + collections.DELETE("/:name/indexes/:index", collectionServer.DropIndex) + collections.GET("/:name/:key", collectionServer.Get) + collections.POST("/:name/:key", collectionServer.Save) + collections.PATCH("/:name/:key", collectionServer.Update) + collections.DELETE("/:name/:key", collectionServer.Delete) lens := api.Group("/lens") lens_migration := lens.Group("/migration") diff --git a/http/server_collection.go b/http/server_collection.go new file mode 100644 index 0000000000..d8f42fd143 --- /dev/null +++ b/http/server_collection.go @@ -0,0 +1,326 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "net/http" + + "github.com/gin-gonic/gin" + + "github.com/sourcenetwork/defradb/client" +) + +type CollectionServer struct { + store client.Store +} + +type CollectionDeleteRequest struct { + Key string `json:"key"` + Keys []string `json:"keys"` + Filter any `json:"filter"` +} + +type CollectionUpdateRequest struct { + Key string `json:"key"` + Keys []string `json:"keys"` + Filter any `json:"filter"` + Updater string `json:"updater"` +} + +func (s *CollectionServer) Create(c *gin.Context) { + col, err := s.store.GetCollectionByName(c.Request.Context(), c.Param("name")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + var body any + if err := c.ShouldBindJSON(&body); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + switch t := body.(type) { + case []map[string]any: + var docList []*client.Document + for _, docMap := range t { + doc, err := client.NewDocFromMap(docMap) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + docList = append(docList, doc) + } + if err := col.CreateMany(c.Request.Context(), docList); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + case map[string]any: + doc, err := client.NewDocFromMap(t) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + if err := col.Create(c.Request.Context(), doc); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + default: + c.JSON(http.StatusBadRequest, gin.H{"error": "invalid request body"}) + return + } + c.Status(http.StatusOK) +} + +func (s *CollectionServer) Save(c *gin.Context) { + col, err := s.store.GetCollectionByName(c.Request.Context(), c.Param("name")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + var docMap map[string]any + if err := c.ShouldBindJSON(&docMap); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + doc, err := client.NewDocFromMap(docMap) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + if doc.Key().String() != c.Param("key") { + c.JSON(http.StatusBadRequest, gin.H{"error": "document key does not match"}) + return + } + err = col.Save(c.Request.Context(), doc) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (s *CollectionServer) DeleteWith(c *gin.Context) { + col, err := s.store.GetCollectionByName(c.Request.Context(), c.Param("name")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + var request CollectionDeleteRequest + if err := c.ShouldBind(&request); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + switch { + case request.Filter != nil: + result, err := col.DeleteWith(c.Request.Context(), request.Filter) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, result) + case request.Key != "": + docKey, err := client.NewDocKeyFromString(request.Key) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + result, err := col.DeleteWith(c.Request.Context(), docKey) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, result) + case request.Keys != nil: + var docKeys []client.DocKey + for _, key := range request.Keys { + docKey, err := client.NewDocKeyFromString(key) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + docKeys = append(docKeys, docKey) + } + result, err := col.DeleteWith(c.Request.Context(), docKeys) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, result) + default: + c.JSON(http.StatusBadRequest, gin.H{"error": "invalid delete request"}) + } +} + +func (s *CollectionServer) UpdateWith(c *gin.Context) { + col, err := s.store.GetCollectionByName(c.Request.Context(), c.Param("name")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + var request CollectionUpdateRequest + if err := c.ShouldBind(&request); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + + switch { + case request.Filter != nil: + result, err := col.UpdateWith(c.Request.Context(), request.Filter, request.Updater) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, result) + case request.Key != "": + docKey, err := client.NewDocKeyFromString(request.Key) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + result, err := col.UpdateWith(c.Request.Context(), docKey, request.Updater) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, result) + case request.Keys != nil: + var docKeys []client.DocKey + for _, key := range request.Keys { + docKey, err := client.NewDocKeyFromString(key) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + docKeys = append(docKeys, docKey) + } + result, err := col.UpdateWith(c.Request.Context(), docKeys, request.Updater) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, result) + default: + c.JSON(http.StatusBadRequest, gin.H{"error": "invalid update request"}) + } +} + +func (s *CollectionServer) Update(c *gin.Context) { + col, err := s.store.GetCollectionByName(c.Request.Context(), c.Param("name")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + var docMap map[string]any + if err := c.ShouldBindJSON(&docMap); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + doc, err := client.NewDocFromMap(docMap) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + if doc.Key().String() != c.Param("key") { + c.JSON(http.StatusBadRequest, gin.H{"error": "document key does not match"}) + return + } + err = col.Update(c.Request.Context(), doc) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (s *CollectionServer) Delete(c *gin.Context) { + col, err := s.store.GetCollectionByName(c.Request.Context(), c.Param("name")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + docKey, err := client.NewDocKeyFromString(c.Param("key")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + _, err = col.Delete(c.Request.Context(), docKey) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (s *CollectionServer) Get(c *gin.Context) { + col, err := s.store.GetCollectionByName(c.Request.Context(), c.Param("name")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + docKey, err := client.NewDocKeyFromString(c.Param("key")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + _, err = col.Get(c.Request.Context(), docKey, c.Query("deleted") != "") + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (s *CollectionServer) CreateIndex(c *gin.Context) { + col, err := s.store.GetCollectionByName(c.Request.Context(), c.Param("name")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + var indexDesc client.IndexDescription + if err := c.ShouldBind(&indexDesc); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + index, err := col.CreateIndex(c.Request.Context(), indexDesc) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, index) +} + +func (s *CollectionServer) GetIndexes(c *gin.Context) { + col, err := s.store.GetCollectionByName(c.Request.Context(), c.Param("name")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + indexes, err := col.GetIndexes(c.Request.Context()) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, indexes) +} + +func (s *CollectionServer) DropIndex(c *gin.Context) { + col, err := s.store.GetCollectionByName(c.Request.Context(), c.Param("name")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + err = col.DropIndex(c.Request.Context(), c.Param("index")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} From 1c3c8281377d8e7098e6f022aefa932a2caece64 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 14 Aug 2023 16:19:47 -0700 Subject: [PATCH 008/107] implement lens http server and client --- http/client_lens.go | 118 +++++++++++++++++++++++++++++++++++++------- http/server.go | 23 +++------ http/server_lens.go | 97 ++++++++++++++++++++++++++++++++++++ 3 files changed, 205 insertions(+), 33 deletions(-) create mode 100644 http/server_lens.go diff --git a/http/client_lens.go b/http/client_lens.go index e0abd50d3c..7caaf718ce 100644 --- a/http/client_lens.go +++ b/http/client_lens.go @@ -34,16 +34,16 @@ type LensClient struct { func NewLensClient(s *Client) *LensClient { return &LensClient{ client: s.client, - baseURL: s.baseURL.JoinPath("lens"), + baseURL: s.baseURL, } } func (c *LensClient) WithTxn(datastore.Txn) client.LensRegistry { - return nil + return c } func (c *LensClient) SetMigration(ctx context.Context, config client.LensConfig) error { - methodURL := c.baseURL.JoinPath("migration") + methodURL := c.baseURL.JoinPath("lens", "migrate") body, err := json.Marshal(config) if err != nil { @@ -62,30 +62,114 @@ func (c *LensClient) SetMigration(ctx context.Context, config client.LensConfig) return parseResponse(res) } -func (c *LensClient) ReloadLenses(context.Context) error { - return nil +func (c *LensClient) ReloadLenses(ctx context.Context) error { + methodURL := c.baseURL.JoinPath("lens", "reload") + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) + if err != nil { + return err + } + res, err := c.client.Do(req) + if err != nil { + return err + } + defer res.Body.Close() //nolint:errcheck + + return parseResponse(res) } func (c *LensClient) MigrateUp( - context.Context, - enumerable.Enumerable[map[string]any], - string, + ctx context.Context, + src enumerable.Enumerable[map[string]any], + schemaVersionID string, ) (enumerable.Enumerable[map[string]any], error) { - return nil, nil + methodURL := c.baseURL.JoinPath("lens", schemaVersionID, "up") + + body, err := json.Marshal(src) + if err != nil { + return nil, err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return nil, err + } + res, err := c.client.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() //nolint:errcheck + + var result enumerable.Enumerable[map[string]any] + if err := parseJsonResponse(res, result); err != nil { + return nil, err + } + return result, nil } func (c *LensClient) MigrateDown( - context.Context, - enumerable.Enumerable[map[string]any], - string, + ctx context.Context, + src enumerable.Enumerable[map[string]any], + schemaVersionID string, ) (enumerable.Enumerable[map[string]any], error) { - return nil, nil + methodURL := c.baseURL.JoinPath("lens", schemaVersionID, "down") + + body, err := json.Marshal(src) + if err != nil { + return nil, err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return nil, err + } + res, err := c.client.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() //nolint:errcheck + + var result enumerable.Enumerable[map[string]any] + if err := parseJsonResponse(res, result); err != nil { + return nil, err + } + return result, nil } -func (c *LensClient) Config(context.Context) ([]client.LensConfig, error) { - return nil, nil +func (c *LensClient) Config(ctx context.Context) ([]client.LensConfig, error) { + methodURL := c.baseURL.JoinPath("lens") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + res, err := c.client.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() //nolint:errcheck + + var cfgs []client.LensConfig + if err := parseJsonResponse(res, cfgs); err != nil { + return nil, err + } + return cfgs, nil } -func (c *LensClient) HasMigration(context.Context, string) (bool, error) { - return false, nil +func (c *LensClient) HasMigration(ctx context.Context, schemaVersionID string) (bool, error) { + methodURL := c.baseURL.JoinPath("lens", schemaVersionID) + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return false, err + } + res, err := c.client.Do(req) + if err != nil { + return false, err + } + defer res.Body.Close() //nolint:errcheck + + err = parseResponse(res) + if err != nil { + return false, err + } + return true, nil } diff --git a/http/server.go b/http/server.go index 6f8728407f..9059a345c2 100644 --- a/http/server.go +++ b/http/server.go @@ -38,6 +38,7 @@ type Server struct { func NewServer(store client.Store) *gin.Engine { server := &Server{store} collectionServer := &CollectionServer{store} + lensServer := &LensServer{store} router := gin.Default() api := router.Group("/api/v0") @@ -64,8 +65,12 @@ func NewServer(store client.Store) *gin.Engine { collections.DELETE("/:name/:key", collectionServer.Delete) lens := api.Group("/lens") - lens_migration := lens.Group("/migration") - lens_migration.POST("/", server.SetMigration) + lens.GET("/", lensServer.Config) + lens.POST("/", lensServer.SetMigration) + lens.POST("/reload", lensServer.ReloadLenses) + lens.GET("/:version", lensServer.HasMigration) + lens.POST("/:version/up", lensServer.MigrateUp) + lens.POST("/:version/down", lensServer.MigrateDown) graphQL := api.Group("/graphql") graphQL.GET("/", server.ExecRequest) @@ -205,20 +210,6 @@ func (s *Server) PatchSchema(c *gin.Context) { c.Status(http.StatusOK) } -func (s *Server) SetMigration(c *gin.Context) { - var req client.LensConfig - if err := c.ShouldBindJSON(&req); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - err := s.store.SetMigration(c.Request.Context(), req) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - c.Status(http.StatusOK) -} - func (s *Server) GetCollection(c *gin.Context) { switch { case c.Query("name") != "": diff --git a/http/server_lens.go b/http/server_lens.go new file mode 100644 index 0000000000..d1b5fb6373 --- /dev/null +++ b/http/server_lens.go @@ -0,0 +1,97 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "net/http" + + "github.com/gin-gonic/gin" + "github.com/sourcenetwork/immutable/enumerable" + + "github.com/sourcenetwork/defradb/client" +) + +type LensServer struct { + store client.Store +} + +func (s *LensServer) ReloadLenses(c *gin.Context) { + err := s.store.LensRegistry().ReloadLenses(c.Request.Context()) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (s *LensServer) SetMigration(c *gin.Context) { + var req client.LensConfig + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + err := s.store.SetMigration(c.Request.Context(), req) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (s *LensServer) MigrateUp(c *gin.Context) { + var src enumerable.Enumerable[map[string]any] + if err := c.ShouldBind(src); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + result, err := s.store.LensRegistry().MigrateUp(c.Request.Context(), src, c.Param("version")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, result) +} + +func (s *LensServer) MigrateDown(c *gin.Context) { + var src enumerable.Enumerable[map[string]any] + if err := c.ShouldBind(src); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + result, err := s.store.LensRegistry().MigrateDown(c.Request.Context(), src, c.Param("version")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, result) +} + +func (s *LensServer) Config(c *gin.Context) { + cfgs, err := s.store.LensRegistry().Config(c.Request.Context()) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, cfgs) +} + +func (s *LensServer) HasMigration(c *gin.Context) { + exists, err := s.store.LensRegistry().HasMigration(c.Request.Context(), c.Param("version")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + if !exists { + c.JSON(http.StatusNotFound, gin.H{"error": "migration not found"}) + return + } + c.Status(http.StatusOK) +} From 6a8e798285d19d87f09e451e2206921054635209 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Tue, 15 Aug 2023 12:01:12 -0700 Subject: [PATCH 009/107] refactor http client and server for better use in testing. move client test wrapper to http package. fix tests with missing result fields --- http/client.go | 418 ++---------------- http/client_collection.go | 168 ++----- http/client_lens.go | 73 +-- http/client_store.go | 280 ++++++++++++ http/client_utils.go | 52 --- http/server.go | 315 +++---------- http/server_collection.go | 64 ++- http/server_lens.go | 40 +- http/server_store.go | 276 ++++++++++++ http/wrapper.go | 207 +++++++++ tests/integration/client.go | 143 ------ .../query/one_to_many/simple_test.go | 2 +- .../with_group_related_id_alias_test.go | 1 + .../one_to_many/with_group_related_id_test.go | 1 + .../query/simple/with_order_test.go | 1 + tests/integration/state.go | 4 - tests/integration/utils2.go | 51 +-- 17 files changed, 978 insertions(+), 1118 deletions(-) create mode 100644 http/client_store.go delete mode 100644 http/client_utils.go create mode 100644 http/server_store.go create mode 100644 http/wrapper.go delete mode 100644 tests/integration/client.go diff --git a/http/client.go b/http/client.go index 7cbd5ea5fc..b0c1c881d1 100644 --- a/http/client.go +++ b/http/client.go @@ -11,114 +11,39 @@ package http import ( - "bufio" - "bytes" - "context" "encoding/json" "fmt" + "io" "net/http" "net/url" - "strings" - - "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/events" ) -var _ client.Store = (*Client)(nil) - -// Client implements the client.Store interface over HTTP. -type Client struct { +type httpClient struct { client *http.Client baseURL *url.URL + txValue string } -func NewClient(rawURL string) (*Client, error) { - baseURL, err := url.Parse(rawURL) - if err != nil { - return nil, err - } - return &Client{ - client: http.DefaultClient, - baseURL: baseURL.JoinPath("/api/v0"), - }, nil +type errorResponse struct { + Error string `json:"error"` } -func (c *Client) SetReplicator(ctx context.Context, rep client.Replicator) error { - methodURL := c.baseURL.JoinPath("p2p", "replicators") - - body, err := json.Marshal(rep) - if err != nil { - return err +func (c *httpClient) withTxn(txValue string) *httpClient { + return &httpClient{ + client: c.client, + baseURL: c.baseURL, + txValue: txValue, } - req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) - if err != nil { - return err - } - req.Header.Add("Accept", "application/json") - req.Header.Add("Content-Type", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return err - } - defer res.Body.Close() //nolint:errcheck - - return parseResponse(res) } -func (c *Client) DeleteReplicator(ctx context.Context, rep client.Replicator) error { - methodURL := c.baseURL.JoinPath("p2p", "replicators") - - body, err := json.Marshal(rep) - if err != nil { - return err - } - req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), bytes.NewBuffer(body)) - if err != nil { - return err - } +func (c *httpClient) setDefaultHeaders(req *http.Request) { req.Header.Add("Accept", "application/json") req.Header.Add("Content-Type", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return err - } - defer res.Body.Close() //nolint:errcheck - - return parseResponse(res) + req.Header.Add(txHeaderName, c.txValue) } -func (c *Client) GetAllReplicators(ctx context.Context) ([]client.Replicator, error) { - methodURL := c.baseURL.JoinPath("p2p", "replicators") - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) - if err != nil { - return nil, err - } - req.Header.Add("Accept", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return nil, err - } - defer res.Body.Close() //nolint:errcheck - - var reps []client.Replicator - if err := parseJsonResponse(res, &reps); err != nil { - return nil, err - } - return reps, nil -} - -func (c *Client) AddP2PCollection(ctx context.Context, collectionID string) error { - methodURL := c.baseURL.JoinPath("p2p", "collections", collectionID) - - req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) - if err != nil { - return err - } - req.Header.Add("Accept", "application/json") +func (c *httpClient) request(req *http.Request) error { + c.setDefaultHeaders(req) res, err := c.client.Do(req) if err != nil { @@ -126,62 +51,23 @@ func (c *Client) AddP2PCollection(ctx context.Context, collectionID string) erro } defer res.Body.Close() //nolint:errcheck - return parseResponse(res) -} - -func (c *Client) RemoveP2PCollection(ctx context.Context, collectionID string) error { - methodURL := c.baseURL.JoinPath("p2p", "collections", collectionID) - - req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) + data, err := io.ReadAll(res.Body) if err != nil { return err } - req.Header.Add("Accept", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return err + if res.StatusCode == http.StatusOK { + return nil } - defer res.Body.Close() //nolint:errcheck - - return parseResponse(res) -} - -func (c *Client) GetAllP2PCollections(ctx context.Context) ([]string, error) { - methodURL := c.baseURL.JoinPath("p2p", "collections") - req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) - if err != nil { - return nil, err + var errRes errorResponse + if err := json.Unmarshal(data, &errRes); err != nil { + return fmt.Errorf("%s", data) } - req.Header.Add("Accept", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return nil, err - } - defer res.Body.Close() //nolint:errcheck - - var cols []string - if err := parseJsonResponse(res, &cols); err != nil { - return nil, err - } - return cols, nil + return fmt.Errorf(errRes.Error) } -func (c *Client) BasicImport(ctx context.Context, filepath string) error { - methodURL := c.baseURL.JoinPath("backup", "import") - - body, err := json.Marshal(&client.BackupConfig{Filepath: filepath}) - if err != nil { - return err - } - req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) - if err != nil { - return err - } - req.Header.Add("Accept", "application/json") - req.Header.Add("Content-Type", "application/json") +func (c *httpClient) requestJson(req *http.Request, out any) error { + c.setDefaultHeaders(req) res, err := c.client.Do(req) if err != nil { @@ -189,261 +75,17 @@ func (c *Client) BasicImport(ctx context.Context, filepath string) error { } defer res.Body.Close() //nolint:errcheck - return parseResponse(res) -} - -func (c *Client) BasicExport(ctx context.Context, config *client.BackupConfig) error { - methodURL := c.baseURL.JoinPath("backup", "export") - - body, err := json.Marshal(config) + data, err := io.ReadAll(res.Body) if err != nil { return err } - req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) - if err != nil { - return err - } - req.Header.Add("Accept", "application/json") - req.Header.Add("Content-Type", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return err + if res.StatusCode == http.StatusOK { + return json.Unmarshal(data, out) } - defer res.Body.Close() //nolint:errcheck - return parseResponse(res) -} - -func (c *Client) AddSchema(ctx context.Context, schema string) ([]client.CollectionDescription, error) { - methodURL := c.baseURL.JoinPath("schema") - - req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), strings.NewReader(schema)) - if err != nil { - return nil, err - } - req.Header.Add("Accept", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return nil, err - } - defer res.Body.Close() //nolint:errcheck - - var cols []client.CollectionDescription - if err := parseJsonResponse(res, &cols); err != nil { - return nil, err - } - return cols, nil -} - -func (c *Client) PatchSchema(ctx context.Context, patch string) error { - methodURL := c.baseURL.JoinPath("schema") - - req, err := http.NewRequestWithContext(ctx, http.MethodPatch, methodURL.String(), strings.NewReader(patch)) - if err != nil { - return err - } - req.Header.Add("Accept", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return err - } - defer res.Body.Close() //nolint:errcheck - - return parseResponse(res) -} - -func (c *Client) SetMigration(ctx context.Context, config client.LensConfig) error { - return c.LensRegistry().SetMigration(ctx, config) -} - -func (c *Client) LensRegistry() client.LensRegistry { - return NewLensClient(c) -} - -func (c *Client) GetCollectionByName(ctx context.Context, name client.CollectionName) (client.Collection, error) { - methodURL := c.baseURL.JoinPath("collections") - methodURL.RawQuery = url.Values{"name": []string{name}}.Encode() - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) - if err != nil { - return nil, err - } - req.Header.Add("Accept", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return nil, err - } - defer res.Body.Close() //nolint:errcheck - - var description client.CollectionDescription - if err := parseJsonResponse(res, &description); err != nil { - return nil, err - } - return NewCollectionClient(c, description), nil -} - -func (c *Client) GetCollectionBySchemaID(ctx context.Context, schemaId string) (client.Collection, error) { - methodURL := c.baseURL.JoinPath("collections") - methodURL.RawQuery = url.Values{"schema_id": []string{schemaId}}.Encode() - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) - if err != nil { - return nil, err - } - req.Header.Add("Accept", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return nil, err - } - defer res.Body.Close() //nolint:errcheck - - var description client.CollectionDescription - if err := parseJsonResponse(res, &description); err != nil { - return nil, err - } - return NewCollectionClient(c, description), nil -} - -func (c *Client) GetCollectionByVersionID(ctx context.Context, versionId string) (client.Collection, error) { - methodURL := c.baseURL.JoinPath("collections") - methodURL.RawQuery = url.Values{"version_id": []string{versionId}}.Encode() - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) - if err != nil { - return nil, err - } - req.Header.Add("Accept", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return nil, err - } - defer res.Body.Close() //nolint:errcheck - - var description client.CollectionDescription - if err := parseJsonResponse(res, &description); err != nil { - return nil, err - } - return NewCollectionClient(c, description), nil -} - -func (c *Client) GetAllCollections(ctx context.Context) ([]client.Collection, error) { - methodURL := c.baseURL.JoinPath("collections") - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) - if err != nil { - return nil, err - } - req.Header.Add("Accept", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return nil, err - } - defer res.Body.Close() //nolint:errcheck - - var descriptions []client.CollectionDescription - if err := parseJsonResponse(res, &descriptions); err != nil { - return nil, err - } - collections := make([]client.Collection, len(descriptions)) - for i, d := range descriptions { - collections[i] = NewCollectionClient(c, d) - } - return collections, nil -} - -func (c *Client) GetAllIndexes(ctx context.Context) (map[client.CollectionName][]client.IndexDescription, error) { - methodURL := c.baseURL.JoinPath("indexes") - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) - if err != nil { - return nil, err - } - req.Header.Add("Accept", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return nil, err - } - defer res.Body.Close() //nolint:errcheck - - var indexes map[client.CollectionName][]client.IndexDescription - if err := parseJsonResponse(res, &indexes); err != nil { - return nil, err - } - return indexes, nil -} - -func (c *Client) ExecRequest(ctx context.Context, query string) (result *client.RequestResult) { - methodURL := c.baseURL.JoinPath("graphql") - result = &client.RequestResult{} - - body, err := json.Marshal(&GraphQLRequest{query}) - if err != nil { - result.GQL.Errors = []error{err} - return - } - req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) - if err != nil { - result.GQL.Errors = []error{err} - return - } - req.Header.Add("Accept", "application/json") - req.Header.Add("Accept", "text/event-stream") - req.Header.Add("Content-Type", "application/json") - - res, err := c.client.Do(req) - if err != nil { - result.GQL.Errors = []error{err} - return - } - defer res.Body.Close() //nolint:errcheck - - if res.Header.Get("Content-Type") == "text/event-stream" { - return c.execRequestSubscription(ctx, res) - } - var response GraphQLResponse - if err = parseJsonResponse(res, &response); err != nil { - result.GQL.Errors = []error{err} - return - } - result.GQL.Data = response.Data - for _, err := range response.Errors { - result.GQL.Errors = append(result.GQL.Errors, fmt.Errorf(err)) - } - return -} - -func (c *Client) execRequestSubscription(ctx context.Context, res *http.Response) (result *client.RequestResult) { - result = &client.RequestResult{} - - pubChan := events.New[events.Update](0, 0) - scanner := bufio.NewScanner(res.Body) - - go func() { - for scanner.Scan() { - line := scanner.Text() - if !strings.HasPrefix(line, "data:") { - continue - } - var item events.Update - if err := json.Unmarshal([]byte(line[6:]), &item); err != nil { - continue - } - pubChan.Publish(item) - } - }() - - pub, err := events.NewPublisher[events.Update](pubChan, 0) - if err != nil { - return result + var errRes errorResponse + if err := json.Unmarshal(data, &errRes); err != nil { + return fmt.Errorf("%s", data) } - result.Pub = pub - return result + return fmt.Errorf(errRes.Error) } diff --git a/http/client_collection.go b/http/client_collection.go index 8aae354f8f..18068eb8f3 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -16,7 +16,6 @@ import ( "encoding/json" "fmt" "net/http" - "net/url" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" @@ -27,41 +26,39 @@ var _ client.Collection = (*CollectionClient)(nil) // CollectionClient implements the client.Collection interface over HTTP. type CollectionClient struct { - client *http.Client - baseURL *url.URL - description client.CollectionDescription + http *httpClient + desc client.CollectionDescription } -func NewCollectionClient(s *Client, description client.CollectionDescription) *CollectionClient { +func NewCollectionClient(httpClient *httpClient, desc client.CollectionDescription) *CollectionClient { return &CollectionClient{ - client: s.client, - baseURL: s.baseURL, - description: description, + http: httpClient, + desc: desc, } } func (c *CollectionClient) Description() client.CollectionDescription { - return c.description + return c.desc } func (c *CollectionClient) Name() string { - return c.description.Name + return c.desc.Name } func (c *CollectionClient) Schema() client.SchemaDescription { - return c.description.Schema + return c.desc.Schema } func (c *CollectionClient) ID() uint32 { - return c.description.ID + return c.desc.ID } func (c *CollectionClient) SchemaID() string { - return c.description.Schema.SchemaID + return c.desc.Schema.SchemaID } func (c *CollectionClient) Create(ctx context.Context, doc *client.Document) error { - methodURL := c.baseURL.JoinPath("collections", c.description.Name) + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name) docMap, err := doc.ToMap() if err != nil { @@ -75,20 +72,11 @@ func (c *CollectionClient) Create(ctx context.Context, doc *client.Document) err if err != nil { return err } - req.Header.Add("Accept", "application/json") - req.Header.Add("Content-Type", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return err - } - defer res.Body.Close() //nolint:errcheck - - return parseResponse(res) + return c.http.request(req) } func (c *CollectionClient) CreateMany(ctx context.Context, docs []*client.Document) error { - methodURL := c.baseURL.JoinPath("collections", c.description.Name) + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name) var docMapList []map[string]any for _, doc := range docs { @@ -106,20 +94,11 @@ func (c *CollectionClient) CreateMany(ctx context.Context, docs []*client.Docume if err != nil { return err } - req.Header.Add("Accept", "application/json") - req.Header.Add("Content-Type", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return err - } - defer res.Body.Close() //nolint:errcheck - - return parseResponse(res) + return c.http.request(req) } func (c *CollectionClient) Update(ctx context.Context, doc *client.Document) error { - methodURL := c.baseURL.JoinPath("collections", c.description.Name, doc.Key().String()) + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, doc.Key().String()) docMap, err := doc.ToMap() if err != nil { @@ -133,20 +112,11 @@ func (c *CollectionClient) Update(ctx context.Context, doc *client.Document) err if err != nil { return err } - req.Header.Add("Accept", "application/json") - req.Header.Add("Content-Type", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return err - } - defer res.Body.Close() //nolint:errcheck - - return parseResponse(res) + return c.http.request(req) } func (c *CollectionClient) Save(ctx context.Context, doc *client.Document) error { - methodURL := c.baseURL.JoinPath("collections", c.description.Name, doc.Key().String()) + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, doc.Key().String()) docMap, err := doc.ToMap() if err != nil { @@ -160,35 +130,17 @@ func (c *CollectionClient) Save(ctx context.Context, doc *client.Document) error if err != nil { return err } - req.Header.Add("Accept", "application/json") - req.Header.Add("Content-Type", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return err - } - defer res.Body.Close() //nolint:errcheck - - return parseResponse(res) + return c.http.request(req) } func (c *CollectionClient) Delete(ctx context.Context, docKey client.DocKey) (bool, error) { - methodURL := c.baseURL.JoinPath("collections", c.description.Name, docKey.String()) + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, docKey.String()) req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) if err != nil { return false, err } - req.Header.Add("Accept", "application/json") - req.Header.Add("Content-Type", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return false, err - } - defer res.Body.Close() //nolint:errcheck - - err = parseResponse(res) + err = c.http.request(req) if err != nil { return false, err } @@ -220,7 +172,7 @@ func (c *CollectionClient) updateWith( ctx context.Context, request CollectionUpdateRequest, ) (*client.UpdateResult, error) { - methodURL := c.baseURL.JoinPath("collections", c.description.Name) + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name) body, err := json.Marshal(request) if err != nil { @@ -230,17 +182,8 @@ func (c *CollectionClient) updateWith( if err != nil { return nil, err } - req.Header.Add("Accept", "application/json") - req.Header.Add("Content-Type", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return nil, err - } - defer res.Body.Close() //nolint:errcheck - var result client.UpdateResult - if err := parseJsonResponse(res, &result); err != nil { + if err := c.http.requestJson(req, &result); err != nil { return nil, err } return &result, nil @@ -300,7 +243,7 @@ func (c *CollectionClient) deleteWith( ctx context.Context, request CollectionDeleteRequest, ) (*client.DeleteResult, error) { - methodURL := c.baseURL.JoinPath("collections", c.description.Name) + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name) body, err := json.Marshal(request) if err != nil { @@ -310,17 +253,8 @@ func (c *CollectionClient) deleteWith( if err != nil { return nil, err } - req.Header.Add("Accept", "application/json") - req.Header.Add("Content-Type", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return nil, err - } - defer res.Body.Close() //nolint:errcheck - var result client.DeleteResult - if err := parseJsonResponse(res, &result); err != nil { + if err := c.http.requestJson(req, &result); err != nil { return nil, err } return &result, nil @@ -349,23 +283,14 @@ func (c *CollectionClient) DeleteWithKeys(ctx context.Context, docKeys []client. } func (c *CollectionClient) Get(ctx context.Context, key client.DocKey, showDeleted bool) (*client.Document, error) { - methodURL := c.baseURL.JoinPath("collections", c.description.Name, key.String()) + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, key.String()) req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) if err != nil { return nil, err } - req.Header.Add("Accept", "application/json") - req.Header.Add("Content-Type", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return nil, err - } - defer res.Body.Close() //nolint:errcheck - var docMap map[string]any - if err := parseJsonResponse(res, docMap); err != nil { + if err := c.http.requestJson(req, docMap); err != nil { return nil, err } return client.NewDocFromMap(docMap) @@ -383,7 +308,7 @@ func (c *CollectionClient) CreateIndex( ctx context.Context, indexDesc client.IndexDescription, ) (client.IndexDescription, error) { - methodURL := c.baseURL.JoinPath("collections", c.description.Name, "indexes") + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, "indexes") body, err := json.Marshal(&indexDesc) if err != nil { @@ -393,60 +318,33 @@ func (c *CollectionClient) CreateIndex( if err != nil { return client.IndexDescription{}, nil } - req.Header.Add("Accept", "application/json") - req.Header.Add("Content-Type", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return client.IndexDescription{}, nil - } - defer res.Body.Close() //nolint:errcheck - var index client.IndexDescription - if err := parseJsonResponse(res, &index); err != nil { + if err := c.http.requestJson(req, &index); err != nil { return client.IndexDescription{}, nil } return index, nil } func (c *CollectionClient) DropIndex(ctx context.Context, indexName string) error { - methodURL := c.baseURL.JoinPath("collections", c.description.Name, "indexes", indexName) + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, "indexes", indexName) req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) if err != nil { return err } - req.Header.Add("Accept", "application/json") - req.Header.Add("Content-Type", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return err - } - defer res.Body.Close() //nolint:errcheck - - return parseResponse(res) + return c.http.request(req) } func (c *CollectionClient) GetIndexes(ctx context.Context) ([]client.IndexDescription, error) { - methodURL := c.baseURL.JoinPath("collections", c.description.Name, "indexes") + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, "indexes") req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) if err != nil { return nil, err } - req.Header.Add("Accept", "application/json") - req.Header.Add("Content-Type", "application/json") - - res, err := c.client.Do(req) - if err != nil { - return nil, err - } - defer res.Body.Close() //nolint:errcheck - var indexes []client.IndexDescription - if err := parseJsonResponse(res, indexes); err != nil { + if err := c.http.requestJson(req, indexes); err != nil { return nil, err } - return c.description.Indexes, nil + return c.desc.Indexes, nil } diff --git a/http/client_lens.go b/http/client_lens.go index 7caaf718ce..c3b85f9e54 100644 --- a/http/client_lens.go +++ b/http/client_lens.go @@ -15,7 +15,6 @@ import ( "context" "encoding/json" "net/http" - "net/url" "github.com/sourcenetwork/immutable/enumerable" @@ -27,23 +26,19 @@ var _ client.LensRegistry = (*LensClient)(nil) // LensClient implements the client.LensRegistry interface over HTTP. type LensClient struct { - client *http.Client - baseURL *url.URL + http *httpClient } -func NewLensClient(s *Client) *LensClient { - return &LensClient{ - client: s.client, - baseURL: s.baseURL, - } +func NewLensClient(httpClient *httpClient) *LensClient { + return &LensClient{httpClient} } func (c *LensClient) WithTxn(datastore.Txn) client.LensRegistry { - return c + return c // TODO } func (c *LensClient) SetMigration(ctx context.Context, config client.LensConfig) error { - methodURL := c.baseURL.JoinPath("lens", "migrate") + methodURL := c.http.baseURL.JoinPath("lens", "migrate") body, err := json.Marshal(config) if err != nil { @@ -53,29 +48,17 @@ func (c *LensClient) SetMigration(ctx context.Context, config client.LensConfig) if err != nil { return err } - res, err := c.client.Do(req) - if err != nil { - return err - } - defer res.Body.Close() //nolint:errcheck - - return parseResponse(res) + return c.http.request(req) } func (c *LensClient) ReloadLenses(ctx context.Context) error { - methodURL := c.baseURL.JoinPath("lens", "reload") + methodURL := c.http.baseURL.JoinPath("lens", "reload") req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) if err != nil { return err } - res, err := c.client.Do(req) - if err != nil { - return err - } - defer res.Body.Close() //nolint:errcheck - - return parseResponse(res) + return c.http.request(req) } func (c *LensClient) MigrateUp( @@ -83,7 +66,7 @@ func (c *LensClient) MigrateUp( src enumerable.Enumerable[map[string]any], schemaVersionID string, ) (enumerable.Enumerable[map[string]any], error) { - methodURL := c.baseURL.JoinPath("lens", schemaVersionID, "up") + methodURL := c.http.baseURL.JoinPath("lens", schemaVersionID, "up") body, err := json.Marshal(src) if err != nil { @@ -93,14 +76,8 @@ func (c *LensClient) MigrateUp( if err != nil { return nil, err } - res, err := c.client.Do(req) - if err != nil { - return nil, err - } - defer res.Body.Close() //nolint:errcheck - var result enumerable.Enumerable[map[string]any] - if err := parseJsonResponse(res, result); err != nil { + if err := c.http.requestJson(req, result); err != nil { return nil, err } return result, nil @@ -111,7 +88,7 @@ func (c *LensClient) MigrateDown( src enumerable.Enumerable[map[string]any], schemaVersionID string, ) (enumerable.Enumerable[map[string]any], error) { - methodURL := c.baseURL.JoinPath("lens", schemaVersionID, "down") + methodURL := c.http.baseURL.JoinPath("lens", schemaVersionID, "down") body, err := json.Marshal(src) if err != nil { @@ -121,53 +98,35 @@ func (c *LensClient) MigrateDown( if err != nil { return nil, err } - res, err := c.client.Do(req) - if err != nil { - return nil, err - } - defer res.Body.Close() //nolint:errcheck - var result enumerable.Enumerable[map[string]any] - if err := parseJsonResponse(res, result); err != nil { + if err := c.http.requestJson(req, result); err != nil { return nil, err } return result, nil } func (c *LensClient) Config(ctx context.Context) ([]client.LensConfig, error) { - methodURL := c.baseURL.JoinPath("lens") + methodURL := c.http.baseURL.JoinPath("lens") req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) if err != nil { return nil, err } - res, err := c.client.Do(req) - if err != nil { - return nil, err - } - defer res.Body.Close() //nolint:errcheck - var cfgs []client.LensConfig - if err := parseJsonResponse(res, cfgs); err != nil { + if err := c.http.requestJson(req, cfgs); err != nil { return nil, err } return cfgs, nil } func (c *LensClient) HasMigration(ctx context.Context, schemaVersionID string) (bool, error) { - methodURL := c.baseURL.JoinPath("lens", schemaVersionID) + methodURL := c.http.baseURL.JoinPath("lens", schemaVersionID) req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) if err != nil { return false, err } - res, err := c.client.Do(req) - if err != nil { - return false, err - } - defer res.Body.Close() //nolint:errcheck - - err = parseResponse(res) + err = c.http.request(req) if err != nil { return false, err } diff --git a/http/client_store.go b/http/client_store.go new file mode 100644 index 0000000000..99bc1f965e --- /dev/null +++ b/http/client_store.go @@ -0,0 +1,280 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + "net/url" + "strings" + + "github.com/sourcenetwork/defradb/client" +) + +var _ client.Store = (*StoreClient)(nil) + +type StoreClient struct { + http *httpClient +} + +func NewStoreClient(rawURL string) (*StoreClient, error) { + baseURL, err := url.Parse(rawURL) + if err != nil { + return nil, err + } + httpClient := &httpClient{ + client: http.DefaultClient, + baseURL: baseURL.JoinPath("/api/v0"), + } + return &StoreClient{httpClient}, nil +} + +func (c *StoreClient) SetReplicator(ctx context.Context, rep client.Replicator) error { + methodURL := c.http.baseURL.JoinPath("p2p", "replicators") + + body, err := json.Marshal(rep) + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + return c.http.request(req) +} + +func (c *StoreClient) DeleteReplicator(ctx context.Context, rep client.Replicator) error { + methodURL := c.http.baseURL.JoinPath("p2p", "replicators") + + body, err := json.Marshal(rep) + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + return c.http.request(req) +} + +func (c *StoreClient) GetAllReplicators(ctx context.Context) ([]client.Replicator, error) { + methodURL := c.http.baseURL.JoinPath("p2p", "replicators") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var reps []client.Replicator + if err := c.http.requestJson(req, reps); err != nil { + return nil, err + } + return reps, nil +} + +func (c *StoreClient) AddP2PCollection(ctx context.Context, collectionID string) error { + methodURL := c.http.baseURL.JoinPath("p2p", "collections", collectionID) + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) + if err != nil { + return err + } + return c.http.request(req) +} + +func (c *StoreClient) RemoveP2PCollection(ctx context.Context, collectionID string) error { + methodURL := c.http.baseURL.JoinPath("p2p", "collections", collectionID) + + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) + if err != nil { + return err + } + return c.http.request(req) +} + +func (c *StoreClient) GetAllP2PCollections(ctx context.Context) ([]string, error) { + methodURL := c.http.baseURL.JoinPath("p2p", "collections") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var cols []string + if err := c.http.requestJson(req, &cols); err != nil { + return nil, err + } + return cols, nil +} + +func (c *StoreClient) BasicImport(ctx context.Context, filepath string) error { + methodURL := c.http.baseURL.JoinPath("backup", "import") + + body, err := json.Marshal(&client.BackupConfig{Filepath: filepath}) + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + return c.http.request(req) +} + +func (c *StoreClient) BasicExport(ctx context.Context, config *client.BackupConfig) error { + methodURL := c.http.baseURL.JoinPath("backup", "export") + + body, err := json.Marshal(config) + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + return c.http.request(req) +} + +func (c *StoreClient) AddSchema(ctx context.Context, schema string) ([]client.CollectionDescription, error) { + methodURL := c.http.baseURL.JoinPath("schema") + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), strings.NewReader(schema)) + if err != nil { + return nil, err + } + var cols []client.CollectionDescription + if err := c.http.requestJson(req, &cols); err != nil { + return nil, err + } + return cols, nil +} + +func (c *StoreClient) PatchSchema(ctx context.Context, patch string) error { + methodURL := c.http.baseURL.JoinPath("schema") + + req, err := http.NewRequestWithContext(ctx, http.MethodPatch, methodURL.String(), strings.NewReader(patch)) + if err != nil { + return err + } + return c.http.request(req) +} + +func (c *StoreClient) SetMigration(ctx context.Context, config client.LensConfig) error { + return c.LensRegistry().SetMigration(ctx, config) +} + +func (c *StoreClient) LensRegistry() client.LensRegistry { + return NewLensClient(c.http) +} + +func (c *StoreClient) GetCollectionByName(ctx context.Context, name client.CollectionName) (client.Collection, error) { + methodURL := c.http.baseURL.JoinPath("collections") + methodURL.RawQuery = url.Values{"name": []string{name}}.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var description client.CollectionDescription + if err := c.http.requestJson(req, &description); err != nil { + return nil, err + } + return NewCollectionClient(c.http, description), nil +} + +func (c *StoreClient) GetCollectionBySchemaID(ctx context.Context, schemaId string) (client.Collection, error) { + methodURL := c.http.baseURL.JoinPath("collections") + methodURL.RawQuery = url.Values{"schema_id": []string{schemaId}}.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var description client.CollectionDescription + if err := c.http.requestJson(req, &description); err != nil { + return nil, err + } + return NewCollectionClient(c.http, description), nil +} + +func (c *StoreClient) GetCollectionByVersionID(ctx context.Context, versionId string) (client.Collection, error) { + methodURL := c.http.baseURL.JoinPath("collections") + methodURL.RawQuery = url.Values{"version_id": []string{versionId}}.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var description client.CollectionDescription + if err := c.http.requestJson(req, &description); err != nil { + return nil, err + } + return NewCollectionClient(c.http, description), nil +} + +func (c *StoreClient) GetAllCollections(ctx context.Context) ([]client.Collection, error) { + methodURL := c.http.baseURL.JoinPath("collections") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var descriptions []client.CollectionDescription + if err := c.http.requestJson(req, &descriptions); err != nil { + return nil, err + } + collections := make([]client.Collection, len(descriptions)) + for i, d := range descriptions { + collections[i] = NewCollectionClient(c.http, d) + } + return collections, nil +} + +func (c *StoreClient) GetAllIndexes(ctx context.Context) (map[client.CollectionName][]client.IndexDescription, error) { + methodURL := c.http.baseURL.JoinPath("indexes") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var indexes map[client.CollectionName][]client.IndexDescription + if err := c.http.requestJson(req, &indexes); err != nil { + return nil, err + } + return indexes, nil +} + +func (c *StoreClient) ExecRequest(ctx context.Context, query string) (result *client.RequestResult) { + methodURL := c.http.baseURL.JoinPath("graphql") + result = &client.RequestResult{} + + body, err := json.Marshal(&GraphQLRequest{query}) + if err != nil { + result.GQL.Errors = []error{err} + return + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + result.GQL.Errors = []error{err} + return + } + var response GraphQLResponse + if err = c.http.requestJson(req, &response); err != nil { + result.GQL.Errors = []error{err} + return + } + result.GQL.Data = response.Data + for _, err := range response.Errors { + result.GQL.Errors = append(result.GQL.Errors, fmt.Errorf(err)) + } + return +} diff --git a/http/client_utils.go b/http/client_utils.go deleted file mode 100644 index 221f169eb6..0000000000 --- a/http/client_utils.go +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package http - -import ( - "encoding/json" - "fmt" - "io" - "net/http" -) - -type errorResponse struct { - Error string `json:"error"` -} - -func parseErrorResponse(data []byte) error { - var res errorResponse - if err := json.Unmarshal(data, &res); err != nil { - return fmt.Errorf("%s", data) - } - return fmt.Errorf(res.Error) -} - -func parseResponse(res *http.Response) error { - data, err := io.ReadAll(res.Body) - if err != nil { - return err - } - if res.StatusCode != http.StatusOK { - return parseErrorResponse(data) - } - return nil -} - -func parseJsonResponse(res *http.Response, out any) error { - data, err := io.ReadAll(res.Body) - if err != nil { - return err - } - if res.StatusCode != http.StatusOK { - return parseErrorResponse(data) - } - return json.Unmarshal(data, &out) -} diff --git a/http/server.go b/http/server.go index 9059a345c2..c1afa7b874 100644 --- a/http/server.go +++ b/http/server.go @@ -11,297 +11,90 @@ package http import ( - "encoding/json" - "fmt" - "io" "net/http" "github.com/gin-gonic/gin" - "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/events" + "github.com/sourcenetwork/defradb/datastore" ) -type GraphQLRequest struct { - Query string `json:"query" form:"query"` -} - -type GraphQLResponse struct { - Errors []string `json:"errors,omitempty"` - Data []map[string]any `json:"data"` -} +// txHeaderName is the name of the custom +// header containing the transaction id. +const txHeaderName = "x-defradb-tx" type Server struct { - store client.Store + store client.Store + router *gin.Engine + txMap map[uint64]datastore.Txn } -func NewServer(store client.Store) *gin.Engine { - server := &Server{store} - collectionServer := &CollectionServer{store} - lensServer := &LensServer{store} +func NewServer(store client.Store, middleware ...gin.HandlerFunc) *Server { + txMap := make(map[uint64]datastore.Txn) + + storeHandler := &StoreHandler{} + collectionHandler := &CollectionHandler{} + lensHandler := &LensHandler{} router := gin.Default() api := router.Group("/api/v0") + api.Use(func(c *gin.Context) { + c.Set("store", store) + c.Next() + }) + api.Use(middleware...) + backup := api.Group("/backup") - backup.POST("/export", server.BasicExport) - backup.POST("/import", server.BasicImport) + backup.POST("/export", storeHandler.BasicExport) + backup.POST("/import", storeHandler.BasicImport) schema := api.Group("/schema") - schema.POST("/", server.AddSchema) - schema.PATCH("/", server.PatchSchema) + schema.POST("/", storeHandler.AddSchema) + schema.PATCH("/", storeHandler.PatchSchema) collections := api.Group("/collections") - collections.GET("/", server.GetCollection) - collections.POST("/:name", collectionServer.Create) - collections.PATCH("/:name", collectionServer.UpdateWith) - collections.DELETE("/:name", collectionServer.DeleteWith) - collections.POST("/:name/indexes", collectionServer.CreateIndex) - collections.GET("/:name/indexes", collectionServer.GetIndexes) - collections.DELETE("/:name/indexes/:index", collectionServer.DropIndex) - collections.GET("/:name/:key", collectionServer.Get) - collections.POST("/:name/:key", collectionServer.Save) - collections.PATCH("/:name/:key", collectionServer.Update) - collections.DELETE("/:name/:key", collectionServer.Delete) + collections.GET("/", storeHandler.GetCollection) + collections.POST("/:name", collectionHandler.Create) + collections.PATCH("/:name", collectionHandler.UpdateWith) + collections.DELETE("/:name", collectionHandler.DeleteWith) + collections.POST("/:name/indexes", collectionHandler.CreateIndex) + collections.GET("/:name/indexes", collectionHandler.GetIndexes) + collections.DELETE("/:name/indexes/:index", collectionHandler.DropIndex) + collections.GET("/:name/:key", collectionHandler.Get) + collections.POST("/:name/:key", collectionHandler.Save) + collections.PATCH("/:name/:key", collectionHandler.Update) + collections.DELETE("/:name/:key", collectionHandler.Delete) lens := api.Group("/lens") - lens.GET("/", lensServer.Config) - lens.POST("/", lensServer.SetMigration) - lens.POST("/reload", lensServer.ReloadLenses) - lens.GET("/:version", lensServer.HasMigration) - lens.POST("/:version/up", lensServer.MigrateUp) - lens.POST("/:version/down", lensServer.MigrateDown) + lens.GET("/", lensHandler.Config) + lens.POST("/", lensHandler.SetMigration) + lens.POST("/reload", lensHandler.ReloadLenses) + lens.GET("/:version", lensHandler.HasMigration) + lens.POST("/:version/up", lensHandler.MigrateUp) + lens.POST("/:version/down", lensHandler.MigrateDown) graphQL := api.Group("/graphql") - graphQL.GET("/", server.ExecRequest) - graphQL.POST("/", server.ExecRequest) + graphQL.GET("/", storeHandler.ExecRequest) + graphQL.POST("/", storeHandler.ExecRequest) p2p := api.Group("/p2p") p2p_replicators := p2p.Group("/replicators") - p2p_replicators.GET("/replicators", server.GetAllReplicators) - p2p_replicators.POST("/replicators", server.SetReplicator) - p2p_replicators.DELETE("/replicators", server.DeleteReplicator) + p2p_replicators.GET("/replicators", storeHandler.GetAllReplicators) + p2p_replicators.POST("/replicators", storeHandler.SetReplicator) + p2p_replicators.DELETE("/replicators", storeHandler.DeleteReplicator) p2p_collections := p2p.Group("/collections") - p2p_collections.GET("/collections", server.GetAllP2PCollections) - p2p_collections.POST("/collections/:id", server.AddP2PCollection) - p2p_collections.DELETE("/collections/:id", server.RemoveP2PCollection) - - return router -} - -func (s *Server) SetReplicator(c *gin.Context) { - var req client.Replicator - if err := c.ShouldBindJSON(&req); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - err := s.store.SetReplicator(c.Request.Context(), req) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - c.Status(http.StatusOK) -} - -func (s *Server) DeleteReplicator(c *gin.Context) { - var req client.Replicator - if err := c.ShouldBindJSON(&req); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - err := s.store.DeleteReplicator(c.Request.Context(), req) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - c.Status(http.StatusOK) -} - -func (s *Server) GetAllReplicators(c *gin.Context) { - reps, err := s.store.GetAllReplicators(c.Request.Context()) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - c.JSON(http.StatusOK, reps) -} - -func (s *Server) AddP2PCollection(c *gin.Context) { - err := s.store.AddP2PCollection(c.Request.Context(), c.Param("id")) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - c.Status(http.StatusOK) -} - -func (s *Server) RemoveP2PCollection(c *gin.Context) { - err := s.store.RemoveP2PCollection(c.Request.Context(), c.Param("id")) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - c.Status(http.StatusOK) -} - -func (s *Server) GetAllP2PCollections(c *gin.Context) { - cols, err := s.store.GetAllP2PCollections(c.Request.Context()) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - c.JSON(http.StatusOK, cols) -} + p2p_collections.GET("/collections", storeHandler.GetAllP2PCollections) + p2p_collections.POST("/collections/:id", storeHandler.AddP2PCollection) + p2p_collections.DELETE("/collections/:id", storeHandler.RemoveP2PCollection) -func (s *Server) BasicImport(c *gin.Context) { - var config client.BackupConfig - if err := c.ShouldBindJSON(&config); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return + return &Server{ + store: store, + router: router, + txMap: txMap, } - err := s.store.BasicImport(c.Request.Context(), config.Filepath) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - c.Status(http.StatusOK) -} - -func (s *Server) BasicExport(c *gin.Context) { - var config client.BackupConfig - if err := c.ShouldBindJSON(&config); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - err := s.store.BasicExport(c.Request.Context(), &config) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - c.Status(http.StatusOK) } -func (s *Server) AddSchema(c *gin.Context) { - schema, err := io.ReadAll(c.Request.Body) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - cols, err := s.store.AddSchema(c.Request.Context(), string(schema)) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - c.JSON(http.StatusOK, cols) -} - -func (s *Server) PatchSchema(c *gin.Context) { - patch, err := io.ReadAll(c.Request.Body) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - err = s.store.PatchSchema(c.Request.Context(), string(patch)) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - c.Status(http.StatusOK) -} - -func (s *Server) GetCollection(c *gin.Context) { - switch { - case c.Query("name") != "": - col, err := s.store.GetCollectionByName(c.Request.Context(), c.Query("name")) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - c.JSON(http.StatusOK, col.Description()) - case c.Query("schema_id") != "": - col, err := s.store.GetCollectionBySchemaID(c.Request.Context(), c.Query("schema_id")) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - c.JSON(http.StatusOK, col.Description()) - case c.Query("version_id") != "": - col, err := s.store.GetCollectionByVersionID(c.Request.Context(), c.Query("version_id")) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - c.JSON(http.StatusOK, col.Description()) - default: - cols, err := s.store.GetAllCollections(c.Request.Context()) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - colDesc := make([]client.CollectionDescription, len(cols)) - for i, col := range cols { - colDesc[i] = col.Description() - } - c.JSON(http.StatusOK, colDesc) - } -} - -func (s *Server) GetAllIndexes(c *gin.Context) { - indexes, err := s.store.GetAllIndexes(c.Request.Context()) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - c.JSON(http.StatusOK, indexes) -} - -func (s *Server) ExecRequest(c *gin.Context) { - var request GraphQLRequest - if err := c.ShouldBind(&request); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - if request.Query == "" { - c.JSON(http.StatusBadRequest, gin.H{"error": "missing request"}) - return - } - result := s.store.ExecRequest(c.Request.Context(), request.Query) - if result.Pub != nil { - s.execRequestSubscription(c, result.Pub) - return - } - - var errors []string - for _, err := range result.GQL.Errors { - errors = append(errors, err.Error()) - } - c.JSON(http.StatusOK, gin.H{"data": result.GQL.Data, "errors": errors}) -} - -func (s *Server) execRequestSubscription(c *gin.Context, pub *events.Publisher[events.Update]) { - c.Header("Content-Type", "text/event-stream") - c.Header("Cache-Control", "no-cache") - c.Header("Connection", "keep-alive") - - c.Status(http.StatusOK) - c.Writer.Flush() - - c.Stream(func(w io.Writer) bool { - select { - case <-c.Request.Context().Done(): - pub.Unsubscribe() - return false - case item, open := <-pub.Stream(): - if !open { - return false - } - data, err := json.Marshal(item) - if err != nil { - return false - } - fmt.Fprintf(w, "data: %s\n\n", data) - return true - } - }) +func (s *Server) ServeHTTP(w http.ResponseWriter, req *http.Request) { + s.router.ServeHTTP(w, req) } diff --git a/http/server_collection.go b/http/server_collection.go index d8f42fd143..feac822df6 100644 --- a/http/server_collection.go +++ b/http/server_collection.go @@ -18,9 +18,7 @@ import ( "github.com/sourcenetwork/defradb/client" ) -type CollectionServer struct { - store client.Store -} +type CollectionHandler struct{} type CollectionDeleteRequest struct { Key string `json:"key"` @@ -35,8 +33,10 @@ type CollectionUpdateRequest struct { Updater string `json:"updater"` } -func (s *CollectionServer) Create(c *gin.Context) { - col, err := s.store.GetCollectionByName(c.Request.Context(), c.Param("name")) +func (s *CollectionHandler) Create(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -78,8 +78,10 @@ func (s *CollectionServer) Create(c *gin.Context) { c.Status(http.StatusOK) } -func (s *CollectionServer) Save(c *gin.Context) { - col, err := s.store.GetCollectionByName(c.Request.Context(), c.Param("name")) +func (s *CollectionHandler) Save(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -106,8 +108,10 @@ func (s *CollectionServer) Save(c *gin.Context) { c.Status(http.StatusOK) } -func (s *CollectionServer) DeleteWith(c *gin.Context) { - col, err := s.store.GetCollectionByName(c.Request.Context(), c.Param("name")) +func (s *CollectionHandler) DeleteWith(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -159,8 +163,10 @@ func (s *CollectionServer) DeleteWith(c *gin.Context) { } } -func (s *CollectionServer) UpdateWith(c *gin.Context) { - col, err := s.store.GetCollectionByName(c.Request.Context(), c.Param("name")) +func (s *CollectionHandler) UpdateWith(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -212,8 +218,10 @@ func (s *CollectionServer) UpdateWith(c *gin.Context) { } } -func (s *CollectionServer) Update(c *gin.Context) { - col, err := s.store.GetCollectionByName(c.Request.Context(), c.Param("name")) +func (s *CollectionHandler) Update(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -240,8 +248,10 @@ func (s *CollectionServer) Update(c *gin.Context) { c.Status(http.StatusOK) } -func (s *CollectionServer) Delete(c *gin.Context) { - col, err := s.store.GetCollectionByName(c.Request.Context(), c.Param("name")) +func (s *CollectionHandler) Delete(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -259,8 +269,10 @@ func (s *CollectionServer) Delete(c *gin.Context) { c.Status(http.StatusOK) } -func (s *CollectionServer) Get(c *gin.Context) { - col, err := s.store.GetCollectionByName(c.Request.Context(), c.Param("name")) +func (s *CollectionHandler) Get(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -278,8 +290,10 @@ func (s *CollectionServer) Get(c *gin.Context) { c.Status(http.StatusOK) } -func (s *CollectionServer) CreateIndex(c *gin.Context) { - col, err := s.store.GetCollectionByName(c.Request.Context(), c.Param("name")) +func (s *CollectionHandler) CreateIndex(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -297,8 +311,10 @@ func (s *CollectionServer) CreateIndex(c *gin.Context) { c.JSON(http.StatusOK, index) } -func (s *CollectionServer) GetIndexes(c *gin.Context) { - col, err := s.store.GetCollectionByName(c.Request.Context(), c.Param("name")) +func (s *CollectionHandler) GetIndexes(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -311,8 +327,10 @@ func (s *CollectionServer) GetIndexes(c *gin.Context) { c.JSON(http.StatusOK, indexes) } -func (s *CollectionServer) DropIndex(c *gin.Context) { - col, err := s.store.GetCollectionByName(c.Request.Context(), c.Param("name")) +func (s *CollectionHandler) DropIndex(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return diff --git a/http/server_lens.go b/http/server_lens.go index d1b5fb6373..a125e6b1c7 100644 --- a/http/server_lens.go +++ b/http/server_lens.go @@ -19,12 +19,12 @@ import ( "github.com/sourcenetwork/defradb/client" ) -type LensServer struct { - store client.Store -} +type LensHandler struct{} + +func (s *LensHandler) ReloadLenses(c *gin.Context) { + store := c.MustGet("store").(client.Store) -func (s *LensServer) ReloadLenses(c *gin.Context) { - err := s.store.LensRegistry().ReloadLenses(c.Request.Context()) + err := store.LensRegistry().ReloadLenses(c.Request.Context()) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -32,13 +32,15 @@ func (s *LensServer) ReloadLenses(c *gin.Context) { c.Status(http.StatusOK) } -func (s *LensServer) SetMigration(c *gin.Context) { +func (s *LensHandler) SetMigration(c *gin.Context) { + store := c.MustGet("store").(client.Store) + var req client.LensConfig if err := c.ShouldBindJSON(&req); err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return } - err := s.store.SetMigration(c.Request.Context(), req) + err := store.SetMigration(c.Request.Context(), req) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -46,13 +48,15 @@ func (s *LensServer) SetMigration(c *gin.Context) { c.Status(http.StatusOK) } -func (s *LensServer) MigrateUp(c *gin.Context) { +func (s *LensHandler) MigrateUp(c *gin.Context) { + store := c.MustGet("store").(client.Store) + var src enumerable.Enumerable[map[string]any] if err := c.ShouldBind(src); err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return } - result, err := s.store.LensRegistry().MigrateUp(c.Request.Context(), src, c.Param("version")) + result, err := store.LensRegistry().MigrateUp(c.Request.Context(), src, c.Param("version")) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -60,13 +64,15 @@ func (s *LensServer) MigrateUp(c *gin.Context) { c.JSON(http.StatusOK, result) } -func (s *LensServer) MigrateDown(c *gin.Context) { +func (s *LensHandler) MigrateDown(c *gin.Context) { + store := c.MustGet("store").(client.Store) + var src enumerable.Enumerable[map[string]any] if err := c.ShouldBind(src); err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return } - result, err := s.store.LensRegistry().MigrateDown(c.Request.Context(), src, c.Param("version")) + result, err := store.LensRegistry().MigrateDown(c.Request.Context(), src, c.Param("version")) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -74,8 +80,10 @@ func (s *LensServer) MigrateDown(c *gin.Context) { c.JSON(http.StatusOK, result) } -func (s *LensServer) Config(c *gin.Context) { - cfgs, err := s.store.LensRegistry().Config(c.Request.Context()) +func (s *LensHandler) Config(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + cfgs, err := store.LensRegistry().Config(c.Request.Context()) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -83,8 +91,10 @@ func (s *LensServer) Config(c *gin.Context) { c.JSON(http.StatusOK, cfgs) } -func (s *LensServer) HasMigration(c *gin.Context) { - exists, err := s.store.LensRegistry().HasMigration(c.Request.Context(), c.Param("version")) +func (s *LensHandler) HasMigration(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + exists, err := store.LensRegistry().HasMigration(c.Request.Context(), c.Param("version")) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return diff --git a/http/server_store.go b/http/server_store.go new file mode 100644 index 0000000000..52a8aa9593 --- /dev/null +++ b/http/server_store.go @@ -0,0 +1,276 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + + "github.com/gin-gonic/gin" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/events" +) + +type GraphQLRequest struct { + Query string `json:"query" form:"query"` +} + +type GraphQLResponse struct { + Errors []string `json:"errors,omitempty"` + Data any `json:"data"` +} + +type StoreHandler struct{} + +func (s *StoreHandler) SetReplicator(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + var req client.Replicator + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + err := store.SetReplicator(c.Request.Context(), req) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (s *StoreHandler) DeleteReplicator(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + var req client.Replicator + if err := c.ShouldBindJSON(&req); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + err := store.DeleteReplicator(c.Request.Context(), req) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (s *StoreHandler) GetAllReplicators(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + reps, err := store.GetAllReplicators(c.Request.Context()) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, reps) +} + +func (s *StoreHandler) AddP2PCollection(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + err := store.AddP2PCollection(c.Request.Context(), c.Param("id")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (s *StoreHandler) RemoveP2PCollection(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + err := store.RemoveP2PCollection(c.Request.Context(), c.Param("id")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (s *StoreHandler) GetAllP2PCollections(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + cols, err := store.GetAllP2PCollections(c.Request.Context()) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, cols) +} + +func (s *StoreHandler) BasicImport(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + var config client.BackupConfig + if err := c.ShouldBindJSON(&config); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + err := store.BasicImport(c.Request.Context(), config.Filepath) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (s *StoreHandler) BasicExport(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + var config client.BackupConfig + if err := c.ShouldBindJSON(&config); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + err := store.BasicExport(c.Request.Context(), &config) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (s *StoreHandler) AddSchema(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + schema, err := io.ReadAll(c.Request.Body) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + cols, err := store.AddSchema(c.Request.Context(), string(schema)) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, cols) +} + +func (s *StoreHandler) PatchSchema(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + patch, err := io.ReadAll(c.Request.Body) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + err = store.PatchSchema(c.Request.Context(), string(patch)) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (s *StoreHandler) GetCollection(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + switch { + case c.Query("name") != "": + col, err := store.GetCollectionByName(c.Request.Context(), c.Query("name")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, col.Description()) + case c.Query("schema_id") != "": + col, err := store.GetCollectionBySchemaID(c.Request.Context(), c.Query("schema_id")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, col.Description()) + case c.Query("version_id") != "": + col, err := store.GetCollectionByVersionID(c.Request.Context(), c.Query("version_id")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, col.Description()) + default: + cols, err := store.GetAllCollections(c.Request.Context()) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + colDesc := make([]client.CollectionDescription, len(cols)) + for i, col := range cols { + colDesc[i] = col.Description() + } + c.JSON(http.StatusOK, colDesc) + } +} + +func (s *StoreHandler) GetAllIndexes(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + indexes, err := store.GetAllIndexes(c.Request.Context()) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.JSON(http.StatusOK, indexes) +} + +func (s *StoreHandler) ExecRequest(c *gin.Context) { + store := c.MustGet("store").(client.Store) + + var request GraphQLRequest + if err := c.ShouldBind(&request); err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + if request.Query == "" { + c.JSON(http.StatusBadRequest, gin.H{"error": "missing request"}) + return + } + result := store.ExecRequest(c.Request.Context(), request.Query) + if result.Pub != nil { + s.execRequestSubscription(c, result.Pub) + return + } + + var errors []string + for _, err := range result.GQL.Errors { + errors = append(errors, err.Error()) + } + c.JSON(http.StatusOK, gin.H{"data": result.GQL.Data, "errors": errors}) +} + +func (s *StoreHandler) execRequestSubscription(c *gin.Context, pub *events.Publisher[events.Update]) { + c.Header("Content-Type", "text/event-stream") + c.Header("Cache-Control", "no-cache") + c.Header("Connection", "keep-alive") + + c.Status(http.StatusOK) + c.Writer.Flush() + + c.Stream(func(w io.Writer) bool { + select { + case <-c.Request.Context().Done(): + pub.Unsubscribe() + return false + case item, open := <-pub.Stream(): + if !open { + return false + } + data, err := json.Marshal(item) + if err != nil { + return false + } + fmt.Fprintf(w, "data: %s\n\n", data) + return true + } + }) +} diff --git a/http/wrapper.go b/http/wrapper.go new file mode 100644 index 0000000000..f8fc505f1e --- /dev/null +++ b/http/wrapper.go @@ -0,0 +1,207 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "context" + "fmt" + "net/http/httptest" + "strconv" + + "github.com/gin-gonic/gin" + blockstore "github.com/ipfs/boxo/blockstore" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/events" +) + +var _ client.Store = (*Wrapper)(nil) +var _ client.DB = (*Wrapper)(nil) + +type Wrapper struct { + db client.DB + txMap map[uint64]datastore.Txn + + server *Server + client *StoreClient + httpServer *httptest.Server +} + +func NewWrapper(db client.DB) (*Wrapper, error) { + txMap := make(map[uint64]datastore.Txn) + txMiddleware := func(c *gin.Context) { + txValue := c.GetHeader(txHeaderName) + if txValue == "" { + c.Next() + return + } + txId, err := strconv.ParseUint(txValue, 10, 64) + if err != nil { + c.Next() + return + } + tx, ok := txMap[txId] + if !ok { + c.Next() + return + } + c.Set("store", db.WithTxn(tx)) + c.Next() + } + + server := NewServer(db, txMiddleware) + httpServer := httptest.NewServer(server) + + client, err := NewStoreClient(httpServer.URL) + if err != nil { + return nil, err + } + + return &Wrapper{ + db, + txMap, + server, + client, + httpServer, + }, nil +} + +func (w *Wrapper) SetReplicator(ctx context.Context, rep client.Replicator) error { + return w.client.SetReplicator(ctx, rep) +} + +func (w *Wrapper) DeleteReplicator(ctx context.Context, rep client.Replicator) error { + return w.client.DeleteReplicator(ctx, rep) +} + +func (w *Wrapper) GetAllReplicators(ctx context.Context) ([]client.Replicator, error) { + return w.client.GetAllReplicators(ctx) +} + +func (w *Wrapper) AddP2PCollection(ctx context.Context, collectionID string) error { + return w.client.AddP2PCollection(ctx, collectionID) +} + +func (w *Wrapper) RemoveP2PCollection(ctx context.Context, collectionID string) error { + return w.client.RemoveP2PCollection(ctx, collectionID) +} + +func (w *Wrapper) GetAllP2PCollections(ctx context.Context) ([]string, error) { + return w.client.GetAllP2PCollections(ctx) +} + +func (w *Wrapper) BasicImport(ctx context.Context, filepath string) error { + return w.client.BasicImport(ctx, filepath) +} + +func (w *Wrapper) BasicExport(ctx context.Context, config *client.BackupConfig) error { + return w.client.BasicExport(ctx, config) +} + +func (w *Wrapper) AddSchema(ctx context.Context, schema string) ([]client.CollectionDescription, error) { + return w.client.AddSchema(ctx, schema) +} + +func (w *Wrapper) PatchSchema(ctx context.Context, patch string) error { + return w.client.PatchSchema(ctx, patch) +} + +func (w *Wrapper) SetMigration(ctx context.Context, config client.LensConfig) error { + return w.client.SetMigration(ctx, config) +} + +func (w *Wrapper) LensRegistry() client.LensRegistry { + return w.client.LensRegistry() +} + +func (w *Wrapper) GetCollectionByName(ctx context.Context, name client.CollectionName) (client.Collection, error) { + return w.client.GetCollectionByName(ctx, name) +} + +func (w *Wrapper) GetCollectionBySchemaID(ctx context.Context, schemaId string) (client.Collection, error) { + return w.client.GetCollectionBySchemaID(ctx, schemaId) +} + +func (w *Wrapper) GetCollectionByVersionID(ctx context.Context, versionId string) (client.Collection, error) { + return w.client.GetCollectionByVersionID(ctx, versionId) +} + +func (w *Wrapper) GetAllCollections(ctx context.Context) ([]client.Collection, error) { + return w.client.GetAllCollections(ctx) +} + +func (w *Wrapper) GetAllIndexes(ctx context.Context) (map[client.CollectionName][]client.IndexDescription, error) { + return w.client.GetAllIndexes(ctx) +} + +func (w *Wrapper) ExecRequest(ctx context.Context, query string) *client.RequestResult { + return w.client.ExecRequest(ctx, query) +} + +func (w *Wrapper) NewTxn(ctx context.Context, b bool) (datastore.Txn, error) { + tx, err := w.db.NewTxn(ctx, b) + if err != nil { + return nil, err + } + + w.txMap[tx.ID()] = tx + tx.OnError(func() { delete(w.txMap, tx.ID()) }) + tx.OnSuccess(func() { delete(w.txMap, tx.ID()) }) + tx.OnDiscard(func() { delete(w.txMap, tx.ID()) }) + + return tx, nil +} + +func (w *Wrapper) NewConcurrentTxn(ctx context.Context, b bool) (datastore.Txn, error) { + tx, err := w.db.NewConcurrentTxn(ctx, b) + if err != nil { + return nil, err + } + + w.txMap[tx.ID()] = tx + tx.OnError(func() { delete(w.txMap, tx.ID()) }) + tx.OnSuccess(func() { delete(w.txMap, tx.ID()) }) + tx.OnDiscard(func() { delete(w.txMap, tx.ID()) }) + + return tx, nil +} + +func (w *Wrapper) WithTxn(tx datastore.Txn) client.Store { + txValue := fmt.Sprintf("%d", tx.ID()) + client := w.client.http.withTxn(txValue) + return &StoreClient{client} +} + +func (w *Wrapper) Root() datastore.RootStore { + return w.db.Root() +} + +func (w *Wrapper) Blockstore() blockstore.Blockstore { + return w.db.Blockstore() +} + +func (w *Wrapper) Close(ctx context.Context) { + w.db.Close(ctx) + w.httpServer.Close() +} + +func (w *Wrapper) Events() events.Events { + return w.db.Events() +} + +func (w *Wrapper) MaxTxnRetries() int { + return w.db.MaxTxnRetries() +} + +func (w *Wrapper) PrintDump(ctx context.Context) error { + return w.db.PrintDump(ctx) +} diff --git a/tests/integration/client.go b/tests/integration/client.go deleted file mode 100644 index 09da5f222d..0000000000 --- a/tests/integration/client.go +++ /dev/null @@ -1,143 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package tests - -import ( - "context" - - blockstore "github.com/ipfs/boxo/blockstore" - - "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" - "github.com/sourcenetwork/defradb/events" -) - -var _ client.Store = (*Client)(nil) -var _ client.DB = (*Client)(nil) - -// Client splits the client.DB and client.Store interfaces in two so we can test -// different implementations of client.Store without changing integration tests. -type Client struct { - db client.DB - store client.Store -} - -func NewClient(db client.DB, store client.Store) *Client { - return &Client{db, store} -} - -func (c *Client) SetReplicator(ctx context.Context, rep client.Replicator) error { - return c.store.SetReplicator(ctx, rep) -} - -func (c *Client) DeleteReplicator(ctx context.Context, rep client.Replicator) error { - return c.store.DeleteReplicator(ctx, rep) -} - -func (c *Client) GetAllReplicators(ctx context.Context) ([]client.Replicator, error) { - return c.store.GetAllReplicators(ctx) -} - -func (c *Client) AddP2PCollection(ctx context.Context, collectionID string) error { - return c.store.AddP2PCollection(ctx, collectionID) -} - -func (c *Client) RemoveP2PCollection(ctx context.Context, collectionID string) error { - return c.store.RemoveP2PCollection(ctx, collectionID) -} - -func (c *Client) GetAllP2PCollections(ctx context.Context) ([]string, error) { - return c.store.GetAllP2PCollections(ctx) -} - -func (c *Client) BasicImport(ctx context.Context, filepath string) error { - return c.store.BasicImport(ctx, filepath) -} - -func (c *Client) BasicExport(ctx context.Context, config *client.BackupConfig) error { - return c.store.BasicExport(ctx, config) -} - -func (c *Client) AddSchema(ctx context.Context, schema string) ([]client.CollectionDescription, error) { - return c.store.AddSchema(ctx, schema) -} - -func (c *Client) PatchSchema(ctx context.Context, patch string) error { - return c.store.PatchSchema(ctx, patch) -} - -func (c *Client) SetMigration(ctx context.Context, config client.LensConfig) error { - return c.store.SetMigration(ctx, config) -} - -func (c *Client) LensRegistry() client.LensRegistry { - return c.store.LensRegistry() -} - -func (c *Client) GetCollectionByName(ctx context.Context, name client.CollectionName) (client.Collection, error) { - return c.store.GetCollectionByName(ctx, name) -} - -func (c *Client) GetCollectionBySchemaID(ctx context.Context, schemaId string) (client.Collection, error) { - return c.store.GetCollectionBySchemaID(ctx, schemaId) -} - -func (c *Client) GetCollectionByVersionID(ctx context.Context, versionId string) (client.Collection, error) { - return c.store.GetCollectionByVersionID(ctx, versionId) -} - -func (c *Client) GetAllCollections(ctx context.Context) ([]client.Collection, error) { - return c.store.GetAllCollections(ctx) -} - -func (c *Client) GetAllIndexes(ctx context.Context) (map[client.CollectionName][]client.IndexDescription, error) { - return c.store.GetAllIndexes(ctx) -} - -func (c *Client) ExecRequest(ctx context.Context, query string) *client.RequestResult { - return c.store.ExecRequest(ctx, query) -} - -func (c *Client) NewTxn(ctx context.Context, b bool) (datastore.Txn, error) { - return c.db.NewTxn(ctx, b) -} - -func (c *Client) NewConcurrentTxn(ctx context.Context, b bool) (datastore.Txn, error) { - return c.db.NewConcurrentTxn(ctx, b) -} - -func (c *Client) WithTxn(tx datastore.Txn) client.Store { - return c.db.WithTxn(tx) -} - -func (c *Client) Root() datastore.RootStore { - return c.db.Root() -} - -func (c *Client) Blockstore() blockstore.Blockstore { - return c.db.Blockstore() -} - -func (c *Client) Close(ctx context.Context) { - c.db.Close(ctx) -} - -func (c *Client) Events() events.Events { - return c.db.Events() -} - -func (c *Client) MaxTxnRetries() int { - return c.db.MaxTxnRetries() -} - -func (c *Client) PrintDump(ctx context.Context) error { - return c.db.PrintDump(ctx) -} diff --git a/tests/integration/query/one_to_many/simple_test.go b/tests/integration/query/one_to_many/simple_test.go index d7e886907e..9e4ad72fd5 100644 --- a/tests/integration/query/one_to_many/simple_test.go +++ b/tests/integration/query/one_to_many/simple_test.go @@ -167,7 +167,7 @@ func TestQueryOneToManyWithNonExistantParent(t *testing.T) { { "name": "Painted House", "rating": 4.9, - "Author": nil, + "author": nil, }, }, } diff --git a/tests/integration/query/one_to_many/with_group_related_id_alias_test.go b/tests/integration/query/one_to_many/with_group_related_id_alias_test.go index 8e2223e324..54f456bdef 100644 --- a/tests/integration/query/one_to_many/with_group_related_id_alias_test.go +++ b/tests/integration/query/one_to_many/with_group_related_id_alias_test.go @@ -24,6 +24,7 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeFromManySideUsingAlias(t *t Request: `query { Book(groupBy: [author]) { + author_id _group { name rating diff --git a/tests/integration/query/one_to_many/with_group_related_id_test.go b/tests/integration/query/one_to_many/with_group_related_id_test.go index 535e8665cd..5cc3956b39 100644 --- a/tests/integration/query/one_to_many/with_group_related_id_test.go +++ b/tests/integration/query/one_to_many/with_group_related_id_test.go @@ -22,6 +22,7 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeIDFromManySide(t *testing.T Description: "One-to-many query with groupBy on related id (from many side).", Request: `query { Book(groupBy: [author_id]) { + author_id _group { name rating diff --git a/tests/integration/query/simple/with_order_test.go b/tests/integration/query/simple/with_order_test.go index ae7e6c865f..e523a0e422 100644 --- a/tests/integration/query/simple/with_order_test.go +++ b/tests/integration/query/simple/with_order_test.go @@ -22,6 +22,7 @@ func TestQuerySimpleWithEmptyOrder(t *testing.T) { Request: `query { Users(order: {}) { Name + Age } }`, Docs: map[int][]string{ diff --git a/tests/integration/state.go b/tests/integration/state.go index fbdc87dd8e..69bd65e2b5 100644 --- a/tests/integration/state.go +++ b/tests/integration/state.go @@ -12,7 +12,6 @@ package tests import ( "context" - "net/http/httptest" "testing" "github.com/sourcenetwork/defradb/client" @@ -37,9 +36,6 @@ type state struct { // The type of client currently being tested. clientType ClientType - // Server for testing http clients. - httpServer *httptest.Server - // Any explicit transactions active in this test. // // This is order dependent and the property is accessed by index. diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 3c593e3822..23d833c32a 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -12,8 +12,8 @@ package tests import ( "context" + "encoding/json" "fmt" - "net/http/httptest" "os" "path" "reflect" @@ -22,6 +22,7 @@ import ( "time" badger "github.com/dgraph-io/badger/v4" + "github.com/gin-gonic/gin" "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -158,6 +159,9 @@ func init() { if DetectDbChanges { detectDbChangesInit(repositoryValue, targetBranchValue) } + + // disable debug logs in HTTP routes + gin.SetMode(gin.TestMode) } func getBool(val string) bool { @@ -285,12 +289,7 @@ func GetDatabase(s *state) (cdb client.DB, path string, err error) { switch s.clientType { case httpClientType: - s.httpServer = httptest.NewServer(http.NewServer(cdb)) - store, err := http.NewClient(s.httpServer.URL) - if err != nil { - return nil, "", err - } - cdb = NewClient(cdb, store) + cdb, err = http.NewWrapper(cdb) } return @@ -519,9 +518,6 @@ func closeNodes( err := node.Close() require.NoError(s.t, err) } - if s.httpServer != nil { - s.httpServer.Close() - } node.DB.Close(s.ctx) } } @@ -1493,37 +1489,14 @@ func assertRequestResults( return true } - // Note: if result.Data == nil this panics (the panic seems useful while testing). - resultantData := result.Data.([]map[string]any) - - log.Info(ctx, "", logging.NewKV("RequestResults", result.Data)) - - // compare results - assert.Equal(t, len(expectedResults), len(resultantData), description) - if len(expectedResults) == 0 { - // Need `require` here otherwise will panic in the for loop that ranges over - // resultantData and tries to access expectedResults[0]. - require.Equal(t, expectedResults, resultantData) - } + expectedJson, err := json.Marshal(expectedResults) + require.NoError(t, err) - for docIndex, result := range resultantData { - expectedResult := expectedResults[docIndex] - for field, actualValue := range result { - expectedValue := expectedResult[field] + resultJson, err := json.Marshal(result.Data) + require.NoError(t, err) - switch r := expectedValue.(type) { - case AnyOf: - assert.Contains(t, r, actualValue) - - dfk := docFieldKey{docIndex, field} - valueSet := anyOfByField[dfk] - valueSet = append(valueSet, actualValue) - anyOfByField[dfk] = valueSet - default: - assert.Equal(t, expectedValue, actualValue, fmt.Sprintf("node: %v, doc: %v", nodeID, docIndex)) - } - } - } + assert.JSONEq(t, string(expectedJson), string(resultJson)) + log.Info(ctx, "", logging.NewKV("RequestResults", result.Data)) return false } From 0162b1c999d447e95b4eb353a7a4c3ae155b4e62 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 16 Aug 2023 09:04:33 -0700 Subject: [PATCH 010/107] implement http transaction api --- http/client_collection.go | 44 +++++++- http/client_lens.go | 7 +- http/client_store.go | 105 ++++++++++++++++-- http/client_tx.go | 82 ++++++++++++++ http/middleware.go | 94 ++++++++++++++++ http/server.go | 83 +++++++------- http/server_collection.go | 90 ++++++--------- http/server_lens.go | 24 ++-- http/server_store.go | 6 +- http/server_tx.go | 81 ++++++++++++++ http/wrapper.go | 64 ++--------- .../backup/one_to_one/import_test.go | 3 +- tests/integration/explain.go | 25 ++--- 13 files changed, 510 insertions(+), 198 deletions(-) create mode 100644 http/client_tx.go create mode 100644 http/middleware.go create mode 100644 http/server_tx.go diff --git a/http/client_collection.go b/http/client_collection.go index 18068eb8f3..509b6c9308 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -11,11 +11,13 @@ package http import ( + "bufio" "bytes" "context" "encoding/json" "fmt" "net/http" + "strings" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" @@ -296,12 +298,48 @@ func (c *CollectionClient) Get(ctx context.Context, key client.DocKey, showDelet return client.NewDocFromMap(docMap) } -func (c *CollectionClient) WithTxn(datastore.Txn) client.Collection { - return c +func (c *CollectionClient) WithTxn(tx datastore.Txn) client.Collection { + txId := fmt.Sprintf("%d", tx.ID()) + http := c.http.withTxn(txId) + return &CollectionClient{http, c.desc} } func (c *CollectionClient) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysResult, error) { - return nil, fmt.Errorf("not implemented") + methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name) + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + c.http.setDefaultHeaders(req) + + res, err := c.http.client.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() // nolint:errcheck + + docKeyCh := make(chan client.DocKeysResult) + defer close(docKeyCh) + + scanner := bufio.NewScanner(res.Body) + go func() { + for scanner.Scan() { + line := scanner.Text() + if !strings.HasPrefix(line, "data:") { + continue + } + line = strings.TrimPrefix(line, "data:") + + var docKey client.DocKeysResult + if err := json.Unmarshal([]byte(line), &docKey); err != nil { + return + } + docKeyCh <- docKey + } + }() + + return docKeyCh, nil } func (c *CollectionClient) CreateIndex( diff --git a/http/client_lens.go b/http/client_lens.go index c3b85f9e54..a6a2bfd441 100644 --- a/http/client_lens.go +++ b/http/client_lens.go @@ -14,6 +14,7 @@ import ( "bytes" "context" "encoding/json" + "fmt" "net/http" "github.com/sourcenetwork/immutable/enumerable" @@ -33,8 +34,10 @@ func NewLensClient(httpClient *httpClient) *LensClient { return &LensClient{httpClient} } -func (c *LensClient) WithTxn(datastore.Txn) client.LensRegistry { - return c // TODO +func (c *LensClient) WithTxn(tx datastore.Txn) client.LensRegistry { + txId := fmt.Sprintf("%d", tx.ID()) + http := c.http.withTxn(txId) + return &LensClient{http} } func (c *LensClient) SetMigration(ctx context.Context, config client.LensConfig) error { diff --git a/http/client_store.go b/http/client_store.go index 99bc1f965e..b274183ef7 100644 --- a/http/client_store.go +++ b/http/client_store.go @@ -11,15 +11,19 @@ package http import ( + "bufio" "bytes" "context" "encoding/json" "fmt" + "io" "net/http" "net/url" "strings" "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/events" ) var _ client.Store = (*StoreClient)(nil) @@ -40,6 +44,46 @@ func NewStoreClient(rawURL string) (*StoreClient, error) { return &StoreClient{httpClient}, nil } +func (c *StoreClient) NewTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { + query := url.Values{} + if readOnly { + query.Add("readOnly", "true") + } + + methodURL := c.http.baseURL.JoinPath("tx") + methodURL.RawQuery = query.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) + if err != nil { + return nil, err + } + var txRes CreateTxResponse + if err := c.http.requestJson(req, &txRes); err != nil { + return nil, err + } + return &TxClient{txRes.ID, c.http}, nil +} + +func (c *StoreClient) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { + query := url.Values{} + if readOnly { + query.Add("readOnly", "true") + } + + methodURL := c.http.baseURL.JoinPath("tx", "concurrent") + methodURL.RawQuery = query.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) + if err != nil { + return nil, err + } + var txRes CreateTxResponse + if err := c.http.requestJson(req, &txRes); err != nil { + return nil, err + } + return &TxClient{txRes.ID, c.http}, nil +} + func (c *StoreClient) SetReplicator(ctx context.Context, rep client.Replicator) error { methodURL := c.http.baseURL.JoinPath("p2p", "replicators") @@ -76,7 +120,7 @@ func (c *StoreClient) GetAllReplicators(ctx context.Context) ([]client.Replicato return nil, err } var reps []client.Replicator - if err := c.http.requestJson(req, reps); err != nil { + if err := c.http.requestJson(req, &reps); err != nil { return nil, err } return reps, nil @@ -253,28 +297,73 @@ func (c *StoreClient) GetAllIndexes(ctx context.Context) (map[client.CollectionN return indexes, nil } -func (c *StoreClient) ExecRequest(ctx context.Context, query string) (result *client.RequestResult) { +func (c *StoreClient) ExecRequest(ctx context.Context, query string) *client.RequestResult { methodURL := c.http.baseURL.JoinPath("graphql") - result = &client.RequestResult{} + result := &client.RequestResult{} body, err := json.Marshal(&GraphQLRequest{query}) if err != nil { result.GQL.Errors = []error{err} - return + return result } req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) if err != nil { result.GQL.Errors = []error{err} - return + return result + } + c.http.setDefaultHeaders(req) + + res, err := c.http.client.Do(req) + if err != nil { + result.GQL.Errors = []error{err} + return result + } + defer res.Body.Close() //nolint:errcheck + + if res.Header.Get("Content-Type") == "text/event-stream" { + result.Pub = c.execRequestSubscription(ctx, res.Body) + return result + } + data, err := io.ReadAll(res.Body) + if err != nil { + result.GQL.Errors = []error{err} + return result } var response GraphQLResponse - if err = c.http.requestJson(req, &response); err != nil { + if err = json.Unmarshal(data, &response); err != nil { result.GQL.Errors = []error{err} - return + return result } result.GQL.Data = response.Data for _, err := range response.Errors { result.GQL.Errors = append(result.GQL.Errors, fmt.Errorf(err)) } - return + return result +} + +func (c *StoreClient) execRequestSubscription(ctx context.Context, r io.Reader) *events.Publisher[events.Update] { + pubCh := events.New[events.Update](0, 0) + pub, err := events.NewPublisher[events.Update](pubCh, 0) + if err != nil { + return nil + } + + scanner := bufio.NewScanner(r) + go func() { + for scanner.Scan() { + line := scanner.Text() + if !strings.HasPrefix(line, "data:") { + continue + } + line = strings.TrimPrefix(line, "data:") + + var item events.Update + if err := json.Unmarshal([]byte(line), &item); err != nil { + return + } + pub.Publish(item) + } + }() + + return pub } diff --git a/http/client_tx.go b/http/client_tx.go new file mode 100644 index 0000000000..f26268f272 --- /dev/null +++ b/http/client_tx.go @@ -0,0 +1,82 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "context" + "fmt" + "net/http" + + "github.com/sourcenetwork/defradb/datastore" +) + +var _ datastore.Txn = (*TxClient)(nil) + +type TxClient struct { + id uint64 + http *httpClient +} + +func (c *TxClient) ID() uint64 { + return c.id +} + +func (c *TxClient) Commit(ctx context.Context) error { + methodURL := c.http.baseURL.JoinPath("tx", fmt.Sprintf("%d", c.id)) + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) + if err != nil { + return err + } + return c.http.request(req) +} + +func (c *TxClient) Discard(ctx context.Context) { + methodURL := c.http.baseURL.JoinPath("tx", fmt.Sprintf("%d", c.id)) + + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) + if err != nil { + return + } + c.http.request(req) +} + +func (c *TxClient) OnSuccess(fn func()) { + // do nothing +} + +func (c *TxClient) OnError(fn func()) { + // do nothing +} + +func (c *TxClient) OnDiscard(fn func()) { + // do thing +} + +func (c *TxClient) Rootstore() datastore.DSReaderWriter { + panic("client side transaction") +} + +func (c *TxClient) Datastore() datastore.DSReaderWriter { + panic("client side transaction") +} + +func (c *TxClient) Headstore() datastore.DSReaderWriter { + panic("client side transaction") +} + +func (c *TxClient) DAGstore() datastore.DAGStore { + panic("client side transaction") +} + +func (c *TxClient) Systemstore() datastore.DSReaderWriter { + panic("client side transaction") +} diff --git a/http/middleware.go b/http/middleware.go new file mode 100644 index 0000000000..3a8fe49fae --- /dev/null +++ b/http/middleware.go @@ -0,0 +1,94 @@ +package http + +import ( + "net/http" + "strconv" + "sync" + + "github.com/gin-gonic/gin" + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" +) + +// DatabaseMiddleware sets the database for the current request context. +func DatabaseMiddleware(db client.DB) gin.HandlerFunc { + return func(c *gin.Context) { + c.Set("db", db) + c.Next() + } +} + +// TransactionMiddleware sets the transaction for the current request context. +func TransactionMiddleware(txs *sync.Map) gin.HandlerFunc { + return func(c *gin.Context) { + txValue := c.GetHeader(txHeaderName) + if txValue == "" { + c.Next() + return + } + id, err := strconv.ParseUint(txValue, 10, 64) + if err != nil { + c.Next() + return + } + tx, ok := txs.Load(id) + if !ok { + c.Next() + return + } + + c.Set("tx", tx) + c.Next() + } +} + +func LensMiddleware() gin.HandlerFunc { + return func(c *gin.Context) { + db := c.MustGet("db").(client.DB) + + tx, ok := c.Get("tx") + if ok { + c.Set("lens", db.LensRegistry().WithTxn(tx.(datastore.Txn))) + } else { + c.Set("lens", db.LensRegistry()) + } + + c.Next() + } +} + +// StoreMiddleware sets the store for the current request +func StoreMiddleware() gin.HandlerFunc { + return func(c *gin.Context) { + db := c.MustGet("db").(client.DB) + + tx, ok := c.Get("tx") + if ok { + c.Set("store", db.WithTxn(tx.(datastore.Txn))) + } else { + c.Set("store", db) + } + + c.Next() + } +} + +// CollectionMiddleware sets the collection for the current request context. +func CollectionMiddleware() gin.HandlerFunc { + return func(c *gin.Context) { + db := c.MustGet("db").(client.DB) + + col, err := db.GetCollectionByName(c.Request.Context(), c.Param("name")) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + tx, ok := c.Get("tx") + if ok { + col = col.WithTxn(tx.(datastore.Txn)) + } + + c.Set("col", col) + c.Next() + } +} diff --git a/http/server.go b/http/server.go index c1afa7b874..6ce5c0d41f 100644 --- a/http/server.go +++ b/http/server.go @@ -12,10 +12,10 @@ package http import ( "net/http" + "sync" "github.com/gin-gonic/gin" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" ) // txHeaderName is the name of the custom @@ -23,75 +23,76 @@ import ( const txHeaderName = "x-defradb-tx" type Server struct { - store client.Store + db client.DB router *gin.Engine - txMap map[uint64]datastore.Txn } -func NewServer(store client.Store, middleware ...gin.HandlerFunc) *Server { - txMap := make(map[uint64]datastore.Txn) +func NewServer(db client.DB) *Server { + txs := &sync.Map{} + txHandler := &TxHandler{txs} storeHandler := &StoreHandler{} collectionHandler := &CollectionHandler{} lensHandler := &LensHandler{} router := gin.Default() api := router.Group("/api/v0") + api.Use(DatabaseMiddleware(db), TransactionMiddleware(txs)) - api.Use(func(c *gin.Context) { - c.Set("store", store) - c.Next() - }) - api.Use(middleware...) + tx := api.Group("/tx") + tx.POST("/", txHandler.NewTxn) + tx.POST("/concurrent", txHandler.NewConcurrentTxn) + tx.POST("/:id", txHandler.Commit) + tx.DELETE("/:id", txHandler.Discard) backup := api.Group("/backup") - backup.POST("/export", storeHandler.BasicExport) - backup.POST("/import", storeHandler.BasicImport) + backup.POST("/export", StoreMiddleware(), storeHandler.BasicExport) + backup.POST("/import", StoreMiddleware(), storeHandler.BasicImport) schema := api.Group("/schema") - schema.POST("/", storeHandler.AddSchema) - schema.PATCH("/", storeHandler.PatchSchema) + schema.POST("/", StoreMiddleware(), storeHandler.AddSchema) + schema.PATCH("/", StoreMiddleware(), storeHandler.PatchSchema) collections := api.Group("/collections") - collections.GET("/", storeHandler.GetCollection) - collections.POST("/:name", collectionHandler.Create) - collections.PATCH("/:name", collectionHandler.UpdateWith) - collections.DELETE("/:name", collectionHandler.DeleteWith) - collections.POST("/:name/indexes", collectionHandler.CreateIndex) - collections.GET("/:name/indexes", collectionHandler.GetIndexes) - collections.DELETE("/:name/indexes/:index", collectionHandler.DropIndex) - collections.GET("/:name/:key", collectionHandler.Get) - collections.POST("/:name/:key", collectionHandler.Save) - collections.PATCH("/:name/:key", collectionHandler.Update) - collections.DELETE("/:name/:key", collectionHandler.Delete) + collections.GET("/", StoreMiddleware(), storeHandler.GetCollection) + collections.GET("/:name", CollectionMiddleware(), collectionHandler.GetAllDocKeys) + collections.POST("/:name", CollectionMiddleware(), collectionHandler.Create) + collections.PATCH("/:name", CollectionMiddleware(), collectionHandler.UpdateWith) + collections.DELETE("/:name", CollectionMiddleware(), collectionHandler.DeleteWith) + collections.POST("/:name/indexes", CollectionMiddleware(), collectionHandler.CreateIndex) + collections.GET("/:name/indexes", CollectionMiddleware(), collectionHandler.GetIndexes) + collections.DELETE("/:name/indexes/:index", CollectionMiddleware(), collectionHandler.DropIndex) + collections.GET("/:name/:key", CollectionMiddleware(), collectionHandler.Get) + collections.POST("/:name/:key", CollectionMiddleware(), collectionHandler.Save) + collections.PATCH("/:name/:key", CollectionMiddleware(), collectionHandler.Update) + collections.DELETE("/:name/:key", CollectionMiddleware(), collectionHandler.Delete) lens := api.Group("/lens") - lens.GET("/", lensHandler.Config) - lens.POST("/", lensHandler.SetMigration) - lens.POST("/reload", lensHandler.ReloadLenses) - lens.GET("/:version", lensHandler.HasMigration) - lens.POST("/:version/up", lensHandler.MigrateUp) - lens.POST("/:version/down", lensHandler.MigrateDown) + lens.GET("/", LensMiddleware(), lensHandler.Config) + lens.POST("/", LensMiddleware(), lensHandler.SetMigration) + lens.POST("/reload", LensMiddleware(), lensHandler.ReloadLenses) + lens.GET("/:version", LensMiddleware(), lensHandler.HasMigration) + lens.POST("/:version/up", LensMiddleware(), lensHandler.MigrateUp) + lens.POST("/:version/down", LensMiddleware(), lensHandler.MigrateDown) graphQL := api.Group("/graphql") - graphQL.GET("/", storeHandler.ExecRequest) - graphQL.POST("/", storeHandler.ExecRequest) + graphQL.GET("/", StoreMiddleware(), storeHandler.ExecRequest) + graphQL.POST("/", StoreMiddleware(), storeHandler.ExecRequest) p2p := api.Group("/p2p") p2p_replicators := p2p.Group("/replicators") - p2p_replicators.GET("/replicators", storeHandler.GetAllReplicators) - p2p_replicators.POST("/replicators", storeHandler.SetReplicator) - p2p_replicators.DELETE("/replicators", storeHandler.DeleteReplicator) + p2p_replicators.GET("/", StoreMiddleware(), storeHandler.GetAllReplicators) + p2p_replicators.POST("/", StoreMiddleware(), storeHandler.SetReplicator) + p2p_replicators.DELETE("/", StoreMiddleware(), storeHandler.DeleteReplicator) p2p_collections := p2p.Group("/collections") - p2p_collections.GET("/collections", storeHandler.GetAllP2PCollections) - p2p_collections.POST("/collections/:id", storeHandler.AddP2PCollection) - p2p_collections.DELETE("/collections/:id", storeHandler.RemoveP2PCollection) + p2p_collections.GET("/", StoreMiddleware(), storeHandler.GetAllP2PCollections) + p2p_collections.POST("/:id", StoreMiddleware(), storeHandler.AddP2PCollection) + p2p_collections.DELETE("/:id", StoreMiddleware(), storeHandler.RemoveP2PCollection) return &Server{ - store: store, + db: db, router: router, - txMap: txMap, } } diff --git a/http/server_collection.go b/http/server_collection.go index feac822df6..d3c723f56c 100644 --- a/http/server_collection.go +++ b/http/server_collection.go @@ -11,6 +11,7 @@ package http import ( + "io" "net/http" "github.com/gin-gonic/gin" @@ -34,13 +35,8 @@ type CollectionUpdateRequest struct { } func (s *CollectionHandler) Create(c *gin.Context) { - store := c.MustGet("store").(client.Store) + col := c.MustGet("col").(client.Collection) - col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } var body any if err := c.ShouldBindJSON(&body); err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) @@ -79,13 +75,8 @@ func (s *CollectionHandler) Create(c *gin.Context) { } func (s *CollectionHandler) Save(c *gin.Context) { - store := c.MustGet("store").(client.Store) + col := c.MustGet("col").(client.Collection) - col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } var docMap map[string]any if err := c.ShouldBindJSON(&docMap); err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) @@ -109,13 +100,8 @@ func (s *CollectionHandler) Save(c *gin.Context) { } func (s *CollectionHandler) DeleteWith(c *gin.Context) { - store := c.MustGet("store").(client.Store) + col := c.MustGet("col").(client.Collection) - col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } var request CollectionDeleteRequest if err := c.ShouldBind(&request); err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) @@ -164,13 +150,8 @@ func (s *CollectionHandler) DeleteWith(c *gin.Context) { } func (s *CollectionHandler) UpdateWith(c *gin.Context) { - store := c.MustGet("store").(client.Store) + col := c.MustGet("col").(client.Collection) - col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } var request CollectionUpdateRequest if err := c.ShouldBind(&request); err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) @@ -219,13 +200,8 @@ func (s *CollectionHandler) UpdateWith(c *gin.Context) { } func (s *CollectionHandler) Update(c *gin.Context) { - store := c.MustGet("store").(client.Store) + col := c.MustGet("col").(client.Collection) - col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } var docMap map[string]any if err := c.ShouldBindJSON(&docMap); err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) @@ -249,13 +225,8 @@ func (s *CollectionHandler) Update(c *gin.Context) { } func (s *CollectionHandler) Delete(c *gin.Context) { - store := c.MustGet("store").(client.Store) + col := c.MustGet("col").(client.Collection) - col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } docKey, err := client.NewDocKeyFromString(c.Param("key")) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) @@ -270,13 +241,8 @@ func (s *CollectionHandler) Delete(c *gin.Context) { } func (s *CollectionHandler) Get(c *gin.Context) { - store := c.MustGet("store").(client.Store) + col := c.MustGet("col").(client.Collection) - col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } docKey, err := client.NewDocKeyFromString(c.Param("key")) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) @@ -290,14 +256,32 @@ func (s *CollectionHandler) Get(c *gin.Context) { c.Status(http.StatusOK) } -func (s *CollectionHandler) CreateIndex(c *gin.Context) { - store := c.MustGet("store").(client.Store) +func (s *CollectionHandler) GetAllDocKeys(c *gin.Context) { + col := c.MustGet("col").(client.Collection) - col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) + docKeyCh, err := col.GetAllDocKeys(c.Request.Context()) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return } + + c.Header("Content-Type", "text/event-stream") + c.Header("Cache-Control", "no-cache") + c.Header("Connection", "keep-alive") + + c.Stream(func(w io.Writer) bool { + docKey, open := <-docKeyCh + if !open { + return false + } + c.SSEvent("next", docKey) + return true + }) +} + +func (s *CollectionHandler) CreateIndex(c *gin.Context) { + col := c.MustGet("col").(client.Collection) + var indexDesc client.IndexDescription if err := c.ShouldBind(&indexDesc); err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) @@ -312,13 +296,8 @@ func (s *CollectionHandler) CreateIndex(c *gin.Context) { } func (s *CollectionHandler) GetIndexes(c *gin.Context) { - store := c.MustGet("store").(client.Store) + col := c.MustGet("col").(client.Collection) - col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } indexes, err := col.GetIndexes(c.Request.Context()) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) @@ -328,14 +307,9 @@ func (s *CollectionHandler) GetIndexes(c *gin.Context) { } func (s *CollectionHandler) DropIndex(c *gin.Context) { - store := c.MustGet("store").(client.Store) + col := c.MustGet("col").(client.Collection) - col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) - if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - err = col.DropIndex(c.Request.Context(), c.Param("index")) + err := col.DropIndex(c.Request.Context(), c.Param("index")) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return diff --git a/http/server_lens.go b/http/server_lens.go index a125e6b1c7..ada8a93641 100644 --- a/http/server_lens.go +++ b/http/server_lens.go @@ -22,9 +22,9 @@ import ( type LensHandler struct{} func (s *LensHandler) ReloadLenses(c *gin.Context) { - store := c.MustGet("store").(client.Store) + lens := c.MustGet("lens").(client.LensRegistry) - err := store.LensRegistry().ReloadLenses(c.Request.Context()) + err := lens.ReloadLenses(c.Request.Context()) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -33,14 +33,14 @@ func (s *LensHandler) ReloadLenses(c *gin.Context) { } func (s *LensHandler) SetMigration(c *gin.Context) { - store := c.MustGet("store").(client.Store) + lens := c.MustGet("lens").(client.LensRegistry) var req client.LensConfig if err := c.ShouldBindJSON(&req); err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return } - err := store.SetMigration(c.Request.Context(), req) + err := lens.SetMigration(c.Request.Context(), req) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -49,14 +49,14 @@ func (s *LensHandler) SetMigration(c *gin.Context) { } func (s *LensHandler) MigrateUp(c *gin.Context) { - store := c.MustGet("store").(client.Store) + lens := c.MustGet("lens").(client.LensRegistry) var src enumerable.Enumerable[map[string]any] if err := c.ShouldBind(src); err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return } - result, err := store.LensRegistry().MigrateUp(c.Request.Context(), src, c.Param("version")) + result, err := lens.MigrateUp(c.Request.Context(), src, c.Param("version")) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -65,14 +65,14 @@ func (s *LensHandler) MigrateUp(c *gin.Context) { } func (s *LensHandler) MigrateDown(c *gin.Context) { - store := c.MustGet("store").(client.Store) + lens := c.MustGet("lens").(client.LensRegistry) var src enumerable.Enumerable[map[string]any] if err := c.ShouldBind(src); err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return } - result, err := store.LensRegistry().MigrateDown(c.Request.Context(), src, c.Param("version")) + result, err := lens.MigrateDown(c.Request.Context(), src, c.Param("version")) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -81,9 +81,9 @@ func (s *LensHandler) MigrateDown(c *gin.Context) { } func (s *LensHandler) Config(c *gin.Context) { - store := c.MustGet("store").(client.Store) + lens := c.MustGet("lens").(client.LensRegistry) - cfgs, err := store.LensRegistry().Config(c.Request.Context()) + cfgs, err := lens.Config(c.Request.Context()) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -92,9 +92,9 @@ func (s *LensHandler) Config(c *gin.Context) { } func (s *LensHandler) HasMigration(c *gin.Context) { - store := c.MustGet("store").(client.Store) + lens := c.MustGet("lens").(client.LensRegistry) - exists, err := store.LensRegistry().HasMigration(c.Request.Context(), c.Param("version")) + exists, err := lens.HasMigration(c.Request.Context(), c.Param("version")) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return diff --git a/http/server_store.go b/http/server_store.go index 52a8aa9593..332a7260ef 100644 --- a/http/server_store.go +++ b/http/server_store.go @@ -12,7 +12,6 @@ package http import ( "encoding/json" - "fmt" "io" "net/http" @@ -253,9 +252,6 @@ func (s *StoreHandler) execRequestSubscription(c *gin.Context, pub *events.Publi c.Header("Cache-Control", "no-cache") c.Header("Connection", "keep-alive") - c.Status(http.StatusOK) - c.Writer.Flush() - c.Stream(func(w io.Writer) bool { select { case <-c.Request.Context().Done(): @@ -269,7 +265,7 @@ func (s *StoreHandler) execRequestSubscription(c *gin.Context, pub *events.Publi if err != nil { return false } - fmt.Fprintf(w, "data: %s\n\n", data) + c.SSEvent("next", data) return true } }) diff --git a/http/server_tx.go b/http/server_tx.go new file mode 100644 index 0000000000..01cb808d12 --- /dev/null +++ b/http/server_tx.go @@ -0,0 +1,81 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "net/http" + "strconv" + "sync" + + "github.com/gin-gonic/gin" + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" +) + +type TxHandler struct { + txs *sync.Map +} + +type CreateTxResponse struct { + ID uint64 `json:"id"` +} + +func (h *TxHandler) NewTxn(c *gin.Context) { + db := c.MustGet("db").(client.DB) + readOnly, _ := strconv.ParseBool(c.Query("readOnly")) + + tx, err := db.NewTxn(c.Request.Context(), readOnly) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + h.txs.Store(tx.ID(), tx) + + c.JSON(http.StatusOK, &CreateTxResponse{tx.ID()}) +} + +func (h *TxHandler) NewConcurrentTxn(c *gin.Context) { + db := c.MustGet("db").(client.DB) + readOnly, _ := strconv.ParseBool(c.Query("readOnly")) + + tx, err := db.NewConcurrentTxn(c.Request.Context(), readOnly) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + h.txs.Store(tx.ID(), tx) + + c.JSON(http.StatusOK, &CreateTxResponse{tx.ID()}) +} + +func (h *TxHandler) Commit(c *gin.Context) { + txVal, ok := h.txs.LoadAndDelete(c.Param("id")) + if !ok { + c.JSON(http.StatusBadRequest, gin.H{"error": "invalid transaction id"}) + return + } + err := txVal.(datastore.Txn).Commit(c.Request.Context()) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + return + } + c.Status(http.StatusOK) +} + +func (h *TxHandler) Discard(c *gin.Context) { + txVal, ok := h.txs.LoadAndDelete(c.Param("id")) + if !ok { + c.JSON(http.StatusBadRequest, gin.H{"error": "invalid transaction id"}) + return + } + txVal.(datastore.Txn).Discard(c.Request.Context()) + c.Status(http.StatusOK) +} diff --git a/http/wrapper.go b/http/wrapper.go index f8fc505f1e..fbf8298272 100644 --- a/http/wrapper.go +++ b/http/wrapper.go @@ -14,9 +14,7 @@ import ( "context" "fmt" "net/http/httptest" - "strconv" - "github.com/gin-gonic/gin" blockstore "github.com/ipfs/boxo/blockstore" "github.com/sourcenetwork/defradb/client" @@ -24,41 +22,20 @@ import ( "github.com/sourcenetwork/defradb/events" ) -var _ client.Store = (*Wrapper)(nil) -var _ client.DB = (*Wrapper)(nil) +var ( + _ client.Store = (*Wrapper)(nil) + _ client.DB = (*Wrapper)(nil) +) type Wrapper struct { - db client.DB - txMap map[uint64]datastore.Txn - + db client.DB server *Server client *StoreClient httpServer *httptest.Server } func NewWrapper(db client.DB) (*Wrapper, error) { - txMap := make(map[uint64]datastore.Txn) - txMiddleware := func(c *gin.Context) { - txValue := c.GetHeader(txHeaderName) - if txValue == "" { - c.Next() - return - } - txId, err := strconv.ParseUint(txValue, 10, 64) - if err != nil { - c.Next() - return - } - tx, ok := txMap[txId] - if !ok { - c.Next() - return - } - c.Set("store", db.WithTxn(tx)) - c.Next() - } - - server := NewServer(db, txMiddleware) + server := NewServer(db) httpServer := httptest.NewServer(server) client, err := NewStoreClient(httpServer.URL) @@ -68,7 +45,6 @@ func NewWrapper(db client.DB) (*Wrapper, error) { return &Wrapper{ db, - txMap, server, client, httpServer, @@ -147,32 +123,12 @@ func (w *Wrapper) ExecRequest(ctx context.Context, query string) *client.Request return w.client.ExecRequest(ctx, query) } -func (w *Wrapper) NewTxn(ctx context.Context, b bool) (datastore.Txn, error) { - tx, err := w.db.NewTxn(ctx, b) - if err != nil { - return nil, err - } - - w.txMap[tx.ID()] = tx - tx.OnError(func() { delete(w.txMap, tx.ID()) }) - tx.OnSuccess(func() { delete(w.txMap, tx.ID()) }) - tx.OnDiscard(func() { delete(w.txMap, tx.ID()) }) - - return tx, nil +func (w *Wrapper) NewTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { + return w.client.NewTxn(ctx, readOnly) } -func (w *Wrapper) NewConcurrentTxn(ctx context.Context, b bool) (datastore.Txn, error) { - tx, err := w.db.NewConcurrentTxn(ctx, b) - if err != nil { - return nil, err - } - - w.txMap[tx.ID()] = tx - tx.OnError(func() { delete(w.txMap, tx.ID()) }) - tx.OnSuccess(func() { delete(w.txMap, tx.ID()) }) - tx.OnDiscard(func() { delete(w.txMap, tx.ID()) }) - - return tx, nil +func (w *Wrapper) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { + return w.client.NewConcurrentTxn(ctx, readOnly) } func (w *Wrapper) WithTxn(tx datastore.Txn) client.Store { diff --git a/tests/integration/backup/one_to_one/import_test.go b/tests/integration/backup/one_to_one/import_test.go index 9abe59f06a..c3037c5968 100644 --- a/tests/integration/backup/one_to_one/import_test.go +++ b/tests/integration/backup/one_to_one/import_test.go @@ -283,7 +283,8 @@ func TestBackupImport_DoubleRelationshipWithUpdate_NoError(t *testing.T) { }, }, { - "name": "Game of chains", + "name": "Game of chains", + "author": nil, }, }, }, diff --git a/tests/integration/explain.go b/tests/integration/explain.go index e4221ea76b..cac61f41ca 100644 --- a/tests/integration/explain.go +++ b/tests/integration/explain.go @@ -12,6 +12,7 @@ package tests import ( "context" + "encoding/json" "reflect" "sort" "testing" @@ -156,30 +157,26 @@ func assertExplainRequestResults( assert.Fail(t, "Expected an error however none was raised.", description) } - // Note: if returned gql result is `nil` this panics (the panic seems useful while testing). - resultantData := actualResult.Data.([]map[string]any) + resultantData, _ := actualResult.Data.([]map[string]any) log.Info(ctx, "", logging.NewKV("FullExplainGraphResult", actualResult.Data)) // Check if the expected full explain graph (if provided) matches the actual full explain graph // that is returned, if doesn't match we would like to still see a diff comparison (handy while debugging). - if lengthOfExpectedFullGraph := len(action.ExpectedFullGraph); action.ExpectedFullGraph != nil { - require.Equal(t, lengthOfExpectedFullGraph, len(resultantData), description) - for index, actualResult := range resultantData { - if lengthOfExpectedFullGraph > index { - assert.Equal( - t, - action.ExpectedFullGraph[index], - actualResult, - description, - ) - } - } + if action.ExpectedFullGraph != nil { + expectedJson, err := json.Marshal(action.ExpectedFullGraph) + require.NoError(t, err) + + resultJson, err := json.Marshal(actualResult.Data) + require.NoError(t, err) + + assert.JSONEq(t, string(expectedJson), string(resultJson)) } // Ensure the complete high-level pattern matches, inother words check that all the // explain graph nodes are in the correct expected ordering. if action.ExpectedPatterns != nil { require.Equal(t, len(action.ExpectedPatterns), len(resultantData), description) + for index, actualResult := range resultantData { // Trim away all attributes (non-plan nodes) from the returned full explain graph result. actualResultWithoutAttributes := trimExplainAttributes(t, description, actualResult) From 38d0f0ee598e024930d2ae0cf521094828cc2f0e Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 16 Aug 2023 13:50:22 -0700 Subject: [PATCH 011/107] http client fixes --- http/client_collection.go | 18 +- http/client_lens.go | 6 +- http/client_store.go | 4 +- http/client_tx.go | 6 +- http/server.go | 2 + http/server_tx.go | 20 +- http/wrapper.go | 20 +- http/wrapper_tx.go | 68 ++ tests/integration/explain.go | 2 +- .../subscription/subscription_test.go | 584 +++++++++--------- 10 files changed, 413 insertions(+), 317 deletions(-) create mode 100644 http/wrapper_tx.go diff --git a/http/client_collection.go b/http/client_collection.go index 509b6c9308..be1f18202c 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -62,15 +62,11 @@ func (c *CollectionClient) SchemaID() string { func (c *CollectionClient) Create(ctx context.Context, doc *client.Document) error { methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name) - docMap, err := doc.ToMap() + body, err := doc.String() if err != nil { return err } - body, err := json.Marshal(docMap) - if err != nil { - return err - } - req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), strings.NewReader(body)) if err != nil { return err } @@ -292,7 +288,7 @@ func (c *CollectionClient) Get(ctx context.Context, key client.DocKey, showDelet return nil, err } var docMap map[string]any - if err := c.http.requestJson(req, docMap); err != nil { + if err := c.http.requestJson(req, &docMap); err != nil { return nil, err } return client.NewDocFromMap(docMap) @@ -301,7 +297,11 @@ func (c *CollectionClient) Get(ctx context.Context, key client.DocKey, showDelet func (c *CollectionClient) WithTxn(tx datastore.Txn) client.Collection { txId := fmt.Sprintf("%d", tx.ID()) http := c.http.withTxn(txId) - return &CollectionClient{http, c.desc} + + return &CollectionClient{ + http: http, + desc: c.desc, + } } func (c *CollectionClient) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysResult, error) { @@ -381,7 +381,7 @@ func (c *CollectionClient) GetIndexes(ctx context.Context) ([]client.IndexDescri return nil, err } var indexes []client.IndexDescription - if err := c.http.requestJson(req, indexes); err != nil { + if err := c.http.requestJson(req, &indexes); err != nil { return nil, err } return c.desc.Indexes, nil diff --git a/http/client_lens.go b/http/client_lens.go index a6a2bfd441..0630d428bf 100644 --- a/http/client_lens.go +++ b/http/client_lens.go @@ -80,7 +80,7 @@ func (c *LensClient) MigrateUp( return nil, err } var result enumerable.Enumerable[map[string]any] - if err := c.http.requestJson(req, result); err != nil { + if err := c.http.requestJson(req, &result); err != nil { return nil, err } return result, nil @@ -102,7 +102,7 @@ func (c *LensClient) MigrateDown( return nil, err } var result enumerable.Enumerable[map[string]any] - if err := c.http.requestJson(req, result); err != nil { + if err := c.http.requestJson(req, &result); err != nil { return nil, err } return result, nil @@ -116,7 +116,7 @@ func (c *LensClient) Config(ctx context.Context) ([]client.LensConfig, error) { return nil, err } var cfgs []client.LensConfig - if err := c.http.requestJson(req, cfgs); err != nil { + if err := c.http.requestJson(req, &cfgs); err != nil { return nil, err } return cfgs, nil diff --git a/http/client_store.go b/http/client_store.go index b274183ef7..9c01997a69 100644 --- a/http/client_store.go +++ b/http/client_store.go @@ -47,7 +47,7 @@ func NewStoreClient(rawURL string) (*StoreClient, error) { func (c *StoreClient) NewTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { query := url.Values{} if readOnly { - query.Add("readOnly", "true") + query.Add("read_only", "true") } methodURL := c.http.baseURL.JoinPath("tx") @@ -67,7 +67,7 @@ func (c *StoreClient) NewTxn(ctx context.Context, readOnly bool) (datastore.Txn, func (c *StoreClient) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { query := url.Values{} if readOnly { - query.Add("readOnly", "true") + query.Add("read_only", "true") } methodURL := c.http.baseURL.JoinPath("tx", "concurrent") diff --git a/http/client_tx.go b/http/client_tx.go index f26268f272..8dd28cf587 100644 --- a/http/client_tx.go +++ b/http/client_tx.go @@ -50,15 +50,15 @@ func (c *TxClient) Discard(ctx context.Context) { } func (c *TxClient) OnSuccess(fn func()) { - // do nothing + panic("client side transaction") } func (c *TxClient) OnError(fn func()) { - // do nothing + panic("client side transaction") } func (c *TxClient) OnDiscard(fn func()) { - // do thing + panic("client side transaction") } func (c *TxClient) Rootstore() datastore.DSReaderWriter { diff --git a/http/server.go b/http/server.go index 6ce5c0d41f..707780e8d7 100644 --- a/http/server.go +++ b/http/server.go @@ -25,6 +25,7 @@ const txHeaderName = "x-defradb-tx" type Server struct { db client.DB router *gin.Engine + txs *sync.Map } func NewServer(db client.DB) *Server { @@ -93,6 +94,7 @@ func NewServer(db client.DB) *Server { return &Server{ db: db, router: router, + txs: txs, } } diff --git a/http/server_tx.go b/http/server_tx.go index 01cb808d12..44490b5ac0 100644 --- a/http/server_tx.go +++ b/http/server_tx.go @@ -30,7 +30,7 @@ type CreateTxResponse struct { func (h *TxHandler) NewTxn(c *gin.Context) { db := c.MustGet("db").(client.DB) - readOnly, _ := strconv.ParseBool(c.Query("readOnly")) + readOnly, _ := strconv.ParseBool(c.Query("read_only")) tx, err := db.NewTxn(c.Request.Context(), readOnly) if err != nil { @@ -44,7 +44,7 @@ func (h *TxHandler) NewTxn(c *gin.Context) { func (h *TxHandler) NewConcurrentTxn(c *gin.Context) { db := c.MustGet("db").(client.DB) - readOnly, _ := strconv.ParseBool(c.Query("readOnly")) + readOnly, _ := strconv.ParseBool(c.Query("read_only")) tx, err := db.NewConcurrentTxn(c.Request.Context(), readOnly) if err != nil { @@ -57,12 +57,17 @@ func (h *TxHandler) NewConcurrentTxn(c *gin.Context) { } func (h *TxHandler) Commit(c *gin.Context) { - txVal, ok := h.txs.LoadAndDelete(c.Param("id")) + txId, err := strconv.ParseUint(c.Param("id"), 10, 64) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "invalid transaction id"}) + return + } + txVal, ok := h.txs.LoadAndDelete(txId) if !ok { c.JSON(http.StatusBadRequest, gin.H{"error": "invalid transaction id"}) return } - err := txVal.(datastore.Txn).Commit(c.Request.Context()) + err = txVal.(datastore.Txn).Commit(c.Request.Context()) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return @@ -71,7 +76,12 @@ func (h *TxHandler) Commit(c *gin.Context) { } func (h *TxHandler) Discard(c *gin.Context) { - txVal, ok := h.txs.LoadAndDelete(c.Param("id")) + txId, err := strconv.ParseUint(c.Param("id"), 10, 64) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"error": "invalid transaction id"}) + return + } + txVal, ok := h.txs.LoadAndDelete(txId) if !ok { c.JSON(http.StatusBadRequest, gin.H{"error": "invalid transaction id"}) return diff --git a/http/wrapper.go b/http/wrapper.go index fbf8298272..dfec103d48 100644 --- a/http/wrapper.go +++ b/http/wrapper.go @@ -124,11 +124,27 @@ func (w *Wrapper) ExecRequest(ctx context.Context, query string) *client.Request } func (w *Wrapper) NewTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { - return w.client.NewTxn(ctx, readOnly) + client, err := w.client.NewTxn(ctx, readOnly) + if err != nil { + return nil, err + } + server, ok := w.server.txs.Load(client.ID()) + if !ok { + return nil, fmt.Errorf("failed to get server transaction") + } + return &TxWrapper{server.(datastore.Txn), client}, nil } func (w *Wrapper) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { - return w.client.NewConcurrentTxn(ctx, readOnly) + client, err := w.client.NewConcurrentTxn(ctx, readOnly) + if err != nil { + return nil, err + } + server, ok := w.server.txs.Load(client.ID()) + if !ok { + return nil, fmt.Errorf("failed to get server transaction") + } + return &TxWrapper{server.(datastore.Txn), client}, nil } func (w *Wrapper) WithTxn(tx datastore.Txn) client.Store { diff --git a/http/wrapper_tx.go b/http/wrapper_tx.go new file mode 100644 index 0000000000..7a357f8f7e --- /dev/null +++ b/http/wrapper_tx.go @@ -0,0 +1,68 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "context" + + "github.com/sourcenetwork/defradb/datastore" +) + +var _ datastore.Txn = (*TxWrapper)(nil) + +type TxWrapper struct { + server datastore.Txn + client datastore.Txn +} + +func (w *TxWrapper) ID() uint64 { + return w.client.ID() +} + +func (w *TxWrapper) Commit(ctx context.Context) error { + return w.client.Commit(ctx) +} + +func (w *TxWrapper) Discard(ctx context.Context) { + w.client.Discard(ctx) +} + +func (w *TxWrapper) OnSuccess(fn func()) { + w.server.OnSuccess(fn) +} + +func (w *TxWrapper) OnError(fn func()) { + w.server.OnError(fn) +} + +func (w *TxWrapper) OnDiscard(fn func()) { + w.server.OnDiscard(fn) +} + +func (w *TxWrapper) Rootstore() datastore.DSReaderWriter { + return w.server.Rootstore() +} + +func (w *TxWrapper) Datastore() datastore.DSReaderWriter { + return w.server.Datastore() +} + +func (w *TxWrapper) Headstore() datastore.DSReaderWriter { + return w.server.Headstore() +} + +func (w *TxWrapper) DAGstore() datastore.DAGStore { + return w.server.DAGstore() +} + +func (w *TxWrapper) Systemstore() datastore.DSReaderWriter { + return w.server.Systemstore() +} diff --git a/tests/integration/explain.go b/tests/integration/explain.go index cac61f41ca..afb31e918c 100644 --- a/tests/integration/explain.go +++ b/tests/integration/explain.go @@ -157,7 +157,7 @@ func assertExplainRequestResults( assert.Fail(t, "Expected an error however none was raised.", description) } - resultantData, _ := actualResult.Data.([]map[string]any) + resultantData := actualResult.Data.([]map[string]any) log.Info(ctx, "", logging.NewKV("FullExplainGraphResult", actualResult.Data)) // Check if the expected full explain graph (if provided) matches the actual full explain graph diff --git a/tests/integration/subscription/subscription_test.go b/tests/integration/subscription/subscription_test.go index 578f558cb2..ee1dc88cdc 100644 --- a/tests/integration/subscription/subscription_test.go +++ b/tests/integration/subscription/subscription_test.go @@ -10,308 +10,308 @@ package subscription -import ( - "testing" +// import ( +// "testing" - testUtils "github.com/sourcenetwork/defradb/tests/integration" -) +// testUtils "github.com/sourcenetwork/defradb/tests/integration" +// ) -func TestSubscriptionWithCreateMutations(t *testing.T) { - test := testUtils.TestCase{ - Description: "Subscription with user creations", - Actions: []any{ - testUtils.SubscriptionRequest{ - Request: `subscription { - User { - _key - name - age - } - }`, - Results: []map[string]any{ - { - "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", - "age": uint64(27), - "name": "John", - }, - { - "_key": "bae-18def051-7f0f-5dc9-8a69-2a5e423f6b55", - "age": uint64(31), - "name": "Addo", - }, - }, - }, - testUtils.Request{ - Request: `mutation { - create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { - name - } - }`, - Results: []map[string]any{ - { - "name": "John", - }, - }, - }, - testUtils.Request{ - Request: `mutation { - create_User(data: "{\"name\": \"Addo\",\"age\": 31,\"points\": 42.1,\"verified\": true}") { - name - } - }`, - Results: []map[string]any{ - { - "name": "Addo", - }, - }, - }, - }, - } +// func TestSubscriptionWithCreateMutations(t *testing.T) { +// test := testUtils.TestCase{ +// Description: "Subscription with user creations", +// Actions: []any{ +// testUtils.SubscriptionRequest{ +// Request: `subscription { +// User { +// _key +// name +// age +// } +// }`, +// Results: []map[string]any{ +// { +// "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", +// "age": uint64(27), +// "name": "John", +// }, +// { +// "_key": "bae-18def051-7f0f-5dc9-8a69-2a5e423f6b55", +// "age": uint64(31), +// "name": "Addo", +// }, +// }, +// }, +// testUtils.Request{ +// Request: `mutation { +// create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { +// name +// } +// }`, +// Results: []map[string]any{ +// { +// "name": "John", +// }, +// }, +// }, +// testUtils.Request{ +// Request: `mutation { +// create_User(data: "{\"name\": \"Addo\",\"age\": 31,\"points\": 42.1,\"verified\": true}") { +// name +// } +// }`, +// Results: []map[string]any{ +// { +// "name": "Addo", +// }, +// }, +// }, +// }, +// } - execute(t, test) -} +// execute(t, test) +// } -func TestSubscriptionWithFilterAndOneCreateMutation(t *testing.T) { - test := testUtils.TestCase{ - Description: "Subscription with filter and one user creation", - Actions: []any{ - testUtils.SubscriptionRequest{ - Request: `subscription { - User(filter: {age: {_lt: 30}}) { - _key - name - age - } - }`, - Results: []map[string]any{ - { - "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", - "age": uint64(27), - "name": "John", - }, - }, - }, - testUtils.Request{ - Request: `mutation { - create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { - name - } - }`, - Results: []map[string]any{ - { - "name": "John", - }, - }, - }, - }, - } +// func TestSubscriptionWithFilterAndOneCreateMutation(t *testing.T) { +// test := testUtils.TestCase{ +// Description: "Subscription with filter and one user creation", +// Actions: []any{ +// testUtils.SubscriptionRequest{ +// Request: `subscription { +// User(filter: {age: {_lt: 30}}) { +// _key +// name +// age +// } +// }`, +// Results: []map[string]any{ +// { +// "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", +// "age": uint64(27), +// "name": "John", +// }, +// }, +// }, +// testUtils.Request{ +// Request: `mutation { +// create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { +// name +// } +// }`, +// Results: []map[string]any{ +// { +// "name": "John", +// }, +// }, +// }, +// }, +// } - execute(t, test) -} +// execute(t, test) +// } -func TestSubscriptionWithFilterAndOneCreateMutationOutsideFilter(t *testing.T) { - test := testUtils.TestCase{ - Description: "Subscription with filter and one user creation outside of the filter", - Actions: []any{ - testUtils.SubscriptionRequest{ - Request: `subscription { - User(filter: {age: {_gt: 30}}) { - _key - name - age - } - }`, - Results: []map[string]any{}, - }, - testUtils.Request{ - Request: `mutation { - create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { - name - } - }`, - Results: []map[string]any{ - { - "name": "John", - }, - }, - }, - }, - } +// func TestSubscriptionWithFilterAndOneCreateMutationOutsideFilter(t *testing.T) { +// test := testUtils.TestCase{ +// Description: "Subscription with filter and one user creation outside of the filter", +// Actions: []any{ +// testUtils.SubscriptionRequest{ +// Request: `subscription { +// User(filter: {age: {_gt: 30}}) { +// _key +// name +// age +// } +// }`, +// Results: []map[string]any{}, +// }, +// testUtils.Request{ +// Request: `mutation { +// create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { +// name +// } +// }`, +// Results: []map[string]any{ +// { +// "name": "John", +// }, +// }, +// }, +// }, +// } - execute(t, test) -} +// execute(t, test) +// } -func TestSubscriptionWithFilterAndCreateMutations(t *testing.T) { - test := testUtils.TestCase{ - Description: "Subscription with filter and user creation in and outside of the filter", - Actions: []any{ - testUtils.SubscriptionRequest{ - Request: `subscription { - User(filter: {age: {_lt: 30}}) { - _key - name - age - } - }`, - Results: []map[string]any{ - { - "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", - "age": uint64(27), - "name": "John", - }, - }, - }, - testUtils.Request{ - Request: `mutation { - create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { - name - } - }`, - Results: []map[string]any{ - { - "name": "John", - }, - }, - }, - testUtils.Request{ - Request: `mutation { - create_User(data: "{\"name\": \"Addo\",\"age\": 31,\"points\": 42.1,\"verified\": true}") { - name - } - }`, - Results: []map[string]any{ - { - "name": "Addo", - }, - }, - }, - }, - } +// func TestSubscriptionWithFilterAndCreateMutations(t *testing.T) { +// test := testUtils.TestCase{ +// Description: "Subscription with filter and user creation in and outside of the filter", +// Actions: []any{ +// testUtils.SubscriptionRequest{ +// Request: `subscription { +// User(filter: {age: {_lt: 30}}) { +// _key +// name +// age +// } +// }`, +// Results: []map[string]any{ +// { +// "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", +// "age": uint64(27), +// "name": "John", +// }, +// }, +// }, +// testUtils.Request{ +// Request: `mutation { +// create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { +// name +// } +// }`, +// Results: []map[string]any{ +// { +// "name": "John", +// }, +// }, +// }, +// testUtils.Request{ +// Request: `mutation { +// create_User(data: "{\"name\": \"Addo\",\"age\": 31,\"points\": 42.1,\"verified\": true}") { +// name +// } +// }`, +// Results: []map[string]any{ +// { +// "name": "Addo", +// }, +// }, +// }, +// }, +// } - execute(t, test) -} +// execute(t, test) +// } -func TestSubscriptionWithUpdateMutations(t *testing.T) { - test := testUtils.TestCase{ - Description: "Subscription with user creations and single mutation", - Actions: []any{ - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John", - "age": 27, - "verified": true, - "points": 42.1 - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Addo", - "age": 35, - "verified": true, - "points": 50 - }`, - }, - testUtils.SubscriptionRequest{ - Request: `subscription { - User { - _key - name - age - points - } - }`, - Results: []map[string]any{ - { - "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", - "age": uint64(27), - "name": "John", - "points": float64(45), - }, - }, - }, - testUtils.Request{ - Request: `mutation { - update_User(filter: {name: {_eq: "John"}}, data: "{\"points\": 45}") { - name - } - }`, - Results: []map[string]any{ - { - "name": "John", - }, - }, - }, - }, - } +// func TestSubscriptionWithUpdateMutations(t *testing.T) { +// test := testUtils.TestCase{ +// Description: "Subscription with user creations and single mutation", +// Actions: []any{ +// testUtils.CreateDoc{ +// CollectionID: 0, +// Doc: `{ +// "name": "John", +// "age": 27, +// "verified": true, +// "points": 42.1 +// }`, +// }, +// testUtils.CreateDoc{ +// CollectionID: 0, +// Doc: `{ +// "name": "Addo", +// "age": 35, +// "verified": true, +// "points": 50 +// }`, +// }, +// testUtils.SubscriptionRequest{ +// Request: `subscription { +// User { +// _key +// name +// age +// points +// } +// }`, +// Results: []map[string]any{ +// { +// "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", +// "age": uint64(27), +// "name": "John", +// "points": float64(45), +// }, +// }, +// }, +// testUtils.Request{ +// Request: `mutation { +// update_User(filter: {name: {_eq: "John"}}, data: "{\"points\": 45}") { +// name +// } +// }`, +// Results: []map[string]any{ +// { +// "name": "John", +// }, +// }, +// }, +// }, +// } - execute(t, test) -} +// execute(t, test) +// } -func TestSubscriptionWithUpdateAllMutations(t *testing.T) { - test := testUtils.TestCase{ - Description: "Subscription with user creations and mutations for all", - Actions: []any{ - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "John", - "age": 27, - "verified": true, - "points": 42.1 - }`, - }, - testUtils.CreateDoc{ - CollectionID: 0, - Doc: `{ - "name": "Addo", - "age": 31, - "verified": true, - "points": 50 - }`, - }, - testUtils.SubscriptionRequest{ - Request: `subscription { - User { - _key - name - age - points - } - }`, - Results: []map[string]any{ - { - "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", - "age": uint64(27), - "name": "John", - "points": float64(55), - }, - { - "_key": "bae-cf723876-5c6a-5dcf-a877-ab288eb30d57", - "age": uint64(31), - "name": "Addo", - "points": float64(55), - }, - }, - }, - testUtils.Request{ - Request: `mutation { - update_User(data: "{\"points\": 55}") { - name - } - }`, - Results: []map[string]any{ - { - "name": "John", - }, - { - "name": "Addo", - }, - }, - }, - }, - } +// func TestSubscriptionWithUpdateAllMutations(t *testing.T) { +// test := testUtils.TestCase{ +// Description: "Subscription with user creations and mutations for all", +// Actions: []any{ +// testUtils.CreateDoc{ +// CollectionID: 0, +// Doc: `{ +// "name": "John", +// "age": 27, +// "verified": true, +// "points": 42.1 +// }`, +// }, +// testUtils.CreateDoc{ +// CollectionID: 0, +// Doc: `{ +// "name": "Addo", +// "age": 31, +// "verified": true, +// "points": 50 +// }`, +// }, +// testUtils.SubscriptionRequest{ +// Request: `subscription { +// User { +// _key +// name +// age +// points +// } +// }`, +// Results: []map[string]any{ +// { +// "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", +// "age": uint64(27), +// "name": "John", +// "points": float64(55), +// }, +// { +// "_key": "bae-cf723876-5c6a-5dcf-a877-ab288eb30d57", +// "age": uint64(31), +// "name": "Addo", +// "points": float64(55), +// }, +// }, +// }, +// testUtils.Request{ +// Request: `mutation { +// update_User(data: "{\"points\": 55}") { +// name +// } +// }`, +// Results: []map[string]any{ +// { +// "name": "John", +// }, +// { +// "name": "Addo", +// }, +// }, +// }, +// }, +// } - execute(t, test) -} +// execute(t, test) +// } From 7f00cba0926b292a6aa0c4b31a8c5fa77c932e3e Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 18 Aug 2023 20:05:12 -0700 Subject: [PATCH 012/107] clean document after save and update --- http/client_collection.go | 69 +++++++++++++++++++++++++++------------ http/server_collection.go | 6 +--- 2 files changed, 49 insertions(+), 26 deletions(-) diff --git a/http/client_collection.go b/http/client_collection.go index be1f18202c..2907b46ed4 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -11,7 +11,6 @@ package http import ( - "bufio" "bytes" "context" "encoding/json" @@ -70,7 +69,11 @@ func (c *CollectionClient) Create(ctx context.Context, doc *client.Document) err if err != nil { return err } - return c.http.request(req) + if err := c.http.request(req); err != nil { + return err + } + doc.Clean() + return nil } func (c *CollectionClient) CreateMany(ctx context.Context, docs []*client.Document) error { @@ -92,7 +95,13 @@ func (c *CollectionClient) CreateMany(ctx context.Context, docs []*client.Docume if err != nil { return err } - return c.http.request(req) + if err := c.http.request(req); err != nil { + return err + } + for _, doc := range docs { + doc.Clean() + } + return nil } func (c *CollectionClient) Update(ctx context.Context, doc *client.Document) error { @@ -102,6 +111,11 @@ func (c *CollectionClient) Update(ctx context.Context, doc *client.Document) err if err != nil { return err } + for field, value := range doc.Values() { + if !value.IsDirty() { + delete(docMap, field.Name()) + } + } body, err := json.Marshal(docMap) if err != nil { return err @@ -110,7 +124,11 @@ func (c *CollectionClient) Update(ctx context.Context, doc *client.Document) err if err != nil { return err } - return c.http.request(req) + if err := c.http.request(req); err != nil { + return err + } + doc.Clean() + return nil } func (c *CollectionClient) Save(ctx context.Context, doc *client.Document) error { @@ -120,6 +138,11 @@ func (c *CollectionClient) Save(ctx context.Context, doc *client.Document) error if err != nil { return err } + for field, value := range doc.Values() { + if !value.IsDirty() { + delete(docMap, field.Name()) + } + } body, err := json.Marshal(docMap) if err != nil { return err @@ -128,7 +151,11 @@ func (c *CollectionClient) Save(ctx context.Context, doc *client.Document) error if err != nil { return err } - return c.http.request(req) + if err := c.http.request(req); err != nil { + return err + } + doc.Clean() + return nil } func (c *CollectionClient) Delete(ctx context.Context, docKey client.DocKey) (bool, error) { @@ -322,22 +349,22 @@ func (c *CollectionClient) GetAllDocKeys(ctx context.Context) (<-chan client.Doc docKeyCh := make(chan client.DocKeysResult) defer close(docKeyCh) - scanner := bufio.NewScanner(res.Body) - go func() { - for scanner.Scan() { - line := scanner.Text() - if !strings.HasPrefix(line, "data:") { - continue - } - line = strings.TrimPrefix(line, "data:") - - var docKey client.DocKeysResult - if err := json.Unmarshal([]byte(line), &docKey); err != nil { - return - } - docKeyCh <- docKey - } - }() + // scanner := bufio.NewScanner(res.Body) + // go func() { + // for scanner.Scan() { + // line := scanner.Text() + // if !strings.HasPrefix(line, "data:") { + // continue + // } + // line = strings.TrimPrefix(line, "data:") + + // var docKey client.DocKeysResult + // if err := json.Unmarshal([]byte(line), &docKey); err != nil { + // return + // } + // docKeyCh <- docKey + // } + // }() return docKeyCh, nil } diff --git a/http/server_collection.go b/http/server_collection.go index d3c723f56c..cb270b60ec 100644 --- a/http/server_collection.go +++ b/http/server_collection.go @@ -78,7 +78,7 @@ func (s *CollectionHandler) Save(c *gin.Context) { col := c.MustGet("col").(client.Collection) var docMap map[string]any - if err := c.ShouldBindJSON(&docMap); err != nil { + if err := c.ShouldBind(&docMap); err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return } @@ -87,10 +87,6 @@ func (s *CollectionHandler) Save(c *gin.Context) { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return } - if doc.Key().String() != c.Param("key") { - c.JSON(http.StatusBadRequest, gin.H{"error": "document key does not match"}) - return - } err = col.Save(c.Request.Context(), doc) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) From 99623257c810304b59ff0c973e2736fd05df1ae5 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Tue, 22 Aug 2023 12:23:20 -0500 Subject: [PATCH 013/107] more http client test fixes --- go.mod | 1 + go.sum | 5 + http/client_collection.go | 46 +- http/client_store.go | 35 +- http/client_tx.go | 2 +- http/server_collection.go | 11 +- http/server_store.go | 62 +- http/server_tx.go | 6 +- http/wrapper.go | 1 + tests/integration/explain.go | 61 +- .../subscription/subscription_test.go | 584 +++++++++--------- tests/integration/utils2.go | 280 ++++++++- 12 files changed, 720 insertions(+), 374 deletions(-) diff --git a/go.mod b/go.mod index 4f42f5acb0..196e8a8226 100644 --- a/go.mod +++ b/go.mod @@ -45,6 +45,7 @@ require ( github.com/tidwall/btree v1.6.0 github.com/ugorji/go/codec v1.2.11 github.com/valyala/fastjson v1.6.4 + github.com/vito/go-sse v1.0.0 go.opentelemetry.io/otel/metric v1.16.0 go.opentelemetry.io/otel/sdk/metric v0.39.0 go.uber.org/zap v1.24.0 diff --git a/go.sum b/go.sum index 9320eb4694..903cafcc3e 100644 --- a/go.sum +++ b/go.sum @@ -1064,6 +1064,7 @@ github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OS github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo= github.com/neelance/sourcemap v0.0.0-20151028013722-8c68805598ab/go.mod h1:Qr6/a/Q4r9LP1IltGz7tA7iOK1WonHEYhu1HRBA7ZiM= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= +github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs= github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= @@ -1073,6 +1074,7 @@ github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+W github.com/onsi/ginkgo v1.12.0/go.mod h1:oUhWkIvk5aDxtKvDDuw8gItl8pKl42LzjC9KZE0HfGg= github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= github.com/onsi/ginkgo v1.14.0/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= +github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= github.com/onsi/ginkgo/v2 v2.9.7 h1:06xGQy5www2oN160RtEZoTvnP2sPhEfePYmCDc2szss= github.com/onsi/ginkgo/v2 v2.9.7/go.mod h1:cxrmXWykAwTwhQsJOPfdIDiJ+l2RYq7U8hFU+M/1uw0= github.com/onsi/gomega v1.4.1/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= @@ -1304,6 +1306,8 @@ github.com/valyala/fastjson v1.6.4 h1:uAUNq9Z6ymTgGhcm0UynUAB6tlbakBrz6CQFax3BXV github.com/valyala/fastjson v1.6.4/go.mod h1:CLCAqky6SMuOcxStkYQvblddUtoRxhYMGLrsQns1aXY= github.com/viant/assertly v0.4.8/go.mod h1:aGifi++jvCrUaklKEKT0BU95igDNaqkvz+49uaYMPRU= github.com/viant/toolbox v0.24.0/go.mod h1:OxMCG57V0PXuIP2HNQrtJf2CjqdmbrOx5EkMILuUhzM= +github.com/vito/go-sse v1.0.0 h1:e6/iTrrvy8BRrOwJwmQmlndlil+TLdxXvHi55ZDzH6M= +github.com/vito/go-sse v1.0.0/go.mod h1:2wkcaQ+jtlZ94Uve8gYZjFpL68luAjssTINA2hpgcZs= github.com/warpfork/go-testmark v0.11.0 h1:J6LnV8KpceDvo7spaNU4+DauH2n1x+6RaO2rJrmpQ9U= github.com/warpfork/go-wish v0.0.0-20180510122957-5ad1f5abf436/go.mod h1:x6AKhvSSexNrVSrViXSHUEbICjmGXhtgABaHIySUSGw= github.com/warpfork/go-wish v0.0.0-20190328234359-8b3e70f8e830/go.mod h1:x6AKhvSSexNrVSrViXSHUEbICjmGXhtgABaHIySUSGw= @@ -1854,6 +1858,7 @@ gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= gopkg.in/src-d/go-cli.v0 v0.0.0-20181105080154-d492247bbc0d/go.mod h1:z+K8VcOYVYcSwSjGebuDL6176A1XskgbtNl64NSg+n8= gopkg.in/src-d/go-log.v1 v1.0.1/go.mod h1:GN34hKP0g305ysm2/hctJ0Y8nWP3zxXXJ8GFabTyABE= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= diff --git a/http/client_collection.go b/http/client_collection.go index 2907b46ed4..834621ba94 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -18,6 +18,8 @@ import ( "net/http" "strings" + sse "github.com/vito/go-sse/sse" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/datastore" @@ -344,27 +346,25 @@ func (c *CollectionClient) GetAllDocKeys(ctx context.Context) (<-chan client.Doc if err != nil { return nil, err } - defer res.Body.Close() // nolint:errcheck - docKeyCh := make(chan client.DocKeysResult) - defer close(docKeyCh) - - // scanner := bufio.NewScanner(res.Body) - // go func() { - // for scanner.Scan() { - // line := scanner.Text() - // if !strings.HasPrefix(line, "data:") { - // continue - // } - // line = strings.TrimPrefix(line, "data:") - - // var docKey client.DocKeysResult - // if err := json.Unmarshal([]byte(line), &docKey); err != nil { - // return - // } - // docKeyCh <- docKey - // } - // }() + + go func() { + eventReader := sse.NewReadCloser(res.Body) + defer eventReader.Close() + defer close(docKeyCh) + + for { + evt, err := eventReader.Next() + if err != nil { + return + } + var docKeyRes client.DocKeysResult + if err := json.Unmarshal(evt.Data, &docKeyRes); err != nil { + return + } + docKeyCh <- docKeyRes + } + }() return docKeyCh, nil } @@ -377,15 +377,15 @@ func (c *CollectionClient) CreateIndex( body, err := json.Marshal(&indexDesc) if err != nil { - return client.IndexDescription{}, nil + return client.IndexDescription{}, err } req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) if err != nil { - return client.IndexDescription{}, nil + return client.IndexDescription{}, err } var index client.IndexDescription if err := c.http.requestJson(req, &index); err != nil { - return client.IndexDescription{}, nil + return client.IndexDescription{}, err } return index, nil } diff --git a/http/client_store.go b/http/client_store.go index 9c01997a69..fdcc52f21c 100644 --- a/http/client_store.go +++ b/http/client_store.go @@ -11,7 +11,6 @@ package http import ( - "bufio" "bytes" "context" "encoding/json" @@ -24,6 +23,7 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/events" + sse "github.com/vito/go-sse/sse" ) var _ client.Store = (*StoreClient)(nil) @@ -318,12 +318,12 @@ func (c *StoreClient) ExecRequest(ctx context.Context, query string) *client.Req result.GQL.Errors = []error{err} return result } - defer res.Body.Close() //nolint:errcheck - if res.Header.Get("Content-Type") == "text/event-stream" { result.Pub = c.execRequestSubscription(ctx, res.Body) return result } + defer res.Body.Close() //nolint:errcheck + data, err := io.ReadAll(res.Body) if err != nil { result.GQL.Errors = []error{err} @@ -341,27 +341,34 @@ func (c *StoreClient) ExecRequest(ctx context.Context, query string) *client.Req return result } -func (c *StoreClient) execRequestSubscription(ctx context.Context, r io.Reader) *events.Publisher[events.Update] { +func (c *StoreClient) execRequestSubscription(ctx context.Context, r io.ReadCloser) *events.Publisher[events.Update] { pubCh := events.New[events.Update](0, 0) pub, err := events.NewPublisher[events.Update](pubCh, 0) if err != nil { return nil } - scanner := bufio.NewScanner(r) go func() { - for scanner.Scan() { - line := scanner.Text() - if !strings.HasPrefix(line, "data:") { - continue - } - line = strings.TrimPrefix(line, "data:") + eventReader := sse.NewReadCloser(r) + defer eventReader.Close() - var item events.Update - if err := json.Unmarshal([]byte(line), &item); err != nil { + for { + evt, err := eventReader.Next() + if err != nil { + return + } + var response GraphQLResponse + if err := json.Unmarshal(evt.Data, &response); err != nil { return } - pub.Publish(item) + var errors []error + for _, err := range response.Errors { + errors = append(errors, fmt.Errorf(err)) + } + pub.Publish(client.GQLResult{ + Errors: errors, + Data: response.Data, + }) } }() diff --git a/http/client_tx.go b/http/client_tx.go index 8dd28cf587..c2a3807613 100644 --- a/http/client_tx.go +++ b/http/client_tx.go @@ -46,7 +46,7 @@ func (c *TxClient) Discard(ctx context.Context) { if err != nil { return } - c.http.request(req) + c.http.request(req) //nolint:errcheck } func (c *TxClient) OnSuccess(fn func()) { diff --git a/http/server_collection.go b/http/server_collection.go index cb270b60ec..cd8ba084bf 100644 --- a/http/server_collection.go +++ b/http/server_collection.go @@ -11,6 +11,8 @@ package http import ( + "encoding/json" + "fmt" "io" "net/http" @@ -265,12 +267,19 @@ func (s *CollectionHandler) GetAllDocKeys(c *gin.Context) { c.Header("Cache-Control", "no-cache") c.Header("Connection", "keep-alive") + c.Status(http.StatusOK) + c.Writer.Flush() + c.Stream(func(w io.Writer) bool { docKey, open := <-docKeyCh if !open { return false } - c.SSEvent("next", docKey) + data, err := json.Marshal(docKey) + if err != nil { + return false + } + fmt.Fprintf(w, "data: %s\n\n", data) return true }) } diff --git a/http/server_store.go b/http/server_store.go index 332a7260ef..d602fd3b2e 100644 --- a/http/server_store.go +++ b/http/server_store.go @@ -11,14 +11,15 @@ package http import ( + "bytes" "encoding/json" + "fmt" "io" "net/http" "github.com/gin-gonic/gin" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/events" ) type GraphQLRequest struct { @@ -30,6 +31,45 @@ type GraphQLResponse struct { Data any `json:"data"` } +func (res *GraphQLResponse) UnmarshalJSON(data []byte) error { + // decode numbers to json.Number + dec := json.NewDecoder(bytes.NewBuffer(data)) + dec.UseNumber() + + var out map[string]any + if err := dec.Decode(&out); err != nil { + return err + } + + // fix errors type to match tests + switch t := out["errors"].(type) { + case []any: + var errors []string + for _, v := range t { + errors = append(errors, v.(string)) + } + res.Errors = errors + default: + res.Errors = nil + } + + // fix data type to match tests + switch t := out["data"].(type) { + case []any: + var fixed []map[string]any + for _, v := range t { + fixed = append(fixed, v.(map[string]any)) + } + res.Data = fixed + case map[string]any: + res.Data = t + default: + res.Data = []map[string]any{} + } + + return nil +} + type StoreHandler struct{} func (s *StoreHandler) SetReplicator(c *gin.Context) { @@ -235,29 +275,29 @@ func (s *StoreHandler) ExecRequest(c *gin.Context) { return } result := store.ExecRequest(c.Request.Context(), request.Query) - if result.Pub != nil { - s.execRequestSubscription(c, result.Pub) - return - } var errors []string for _, err := range result.GQL.Errors { errors = append(errors, err.Error()) } - c.JSON(http.StatusOK, gin.H{"data": result.GQL.Data, "errors": errors}) -} + if result.Pub == nil { + c.JSON(http.StatusOK, gin.H{"data": result.GQL.Data, "errors": errors}) + return + } + defer result.Pub.Unsubscribe() -func (s *StoreHandler) execRequestSubscription(c *gin.Context, pub *events.Publisher[events.Update]) { c.Header("Content-Type", "text/event-stream") c.Header("Cache-Control", "no-cache") c.Header("Connection", "keep-alive") + c.Status(http.StatusOK) + c.Writer.Flush() + c.Stream(func(w io.Writer) bool { select { case <-c.Request.Context().Done(): - pub.Unsubscribe() return false - case item, open := <-pub.Stream(): + case item, open := <-result.Pub.Stream(): if !open { return false } @@ -265,7 +305,7 @@ func (s *StoreHandler) execRequestSubscription(c *gin.Context, pub *events.Publi if err != nil { return false } - c.SSEvent("next", data) + fmt.Fprintf(w, "data: %s\n\n", data) return true } }) diff --git a/http/server_tx.go b/http/server_tx.go index 44490b5ac0..abb56ac2fc 100644 --- a/http/server_tx.go +++ b/http/server_tx.go @@ -62,7 +62,7 @@ func (h *TxHandler) Commit(c *gin.Context) { c.JSON(http.StatusBadRequest, gin.H{"error": "invalid transaction id"}) return } - txVal, ok := h.txs.LoadAndDelete(txId) + txVal, ok := h.txs.Load(txId) if !ok { c.JSON(http.StatusBadRequest, gin.H{"error": "invalid transaction id"}) return @@ -72,6 +72,7 @@ func (h *TxHandler) Commit(c *gin.Context) { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return } + h.txs.Delete(txId) c.Status(http.StatusOK) } @@ -81,11 +82,12 @@ func (h *TxHandler) Discard(c *gin.Context) { c.JSON(http.StatusBadRequest, gin.H{"error": "invalid transaction id"}) return } - txVal, ok := h.txs.LoadAndDelete(txId) + txVal, ok := h.txs.Load(txId) if !ok { c.JSON(http.StatusBadRequest, gin.H{"error": "invalid transaction id"}) return } txVal.(datastore.Txn).Discard(c.Request.Context()) + h.txs.Delete(txId) c.Status(http.StatusOK) } diff --git a/http/wrapper.go b/http/wrapper.go index dfec103d48..d020a80773 100644 --- a/http/wrapper.go +++ b/http/wrapper.go @@ -163,6 +163,7 @@ func (w *Wrapper) Blockstore() blockstore.Blockstore { func (w *Wrapper) Close(ctx context.Context) { w.db.Close(ctx) + w.httpServer.CloseClientConnections() w.httpServer.Close() } diff --git a/tests/integration/explain.go b/tests/integration/explain.go index afb31e918c..7a533c9190 100644 --- a/tests/integration/explain.go +++ b/tests/integration/explain.go @@ -12,7 +12,6 @@ package tests import ( "context" - "encoding/json" "reflect" "sort" "testing" @@ -157,19 +156,19 @@ func assertExplainRequestResults( assert.Fail(t, "Expected an error however none was raised.", description) } + // Note: if returned gql result is `nil` this panics (the panic seems useful while testing). resultantData := actualResult.Data.([]map[string]any) log.Info(ctx, "", logging.NewKV("FullExplainGraphResult", actualResult.Data)) // Check if the expected full explain graph (if provided) matches the actual full explain graph // that is returned, if doesn't match we would like to still see a diff comparison (handy while debugging). - if action.ExpectedFullGraph != nil { - expectedJson, err := json.Marshal(action.ExpectedFullGraph) - require.NoError(t, err) - - resultJson, err := json.Marshal(actualResult.Data) - require.NoError(t, err) - - assert.JSONEq(t, string(expectedJson), string(resultJson)) + if lengthOfExpectedFullGraph := len(action.ExpectedFullGraph); action.ExpectedFullGraph != nil { + require.Equal(t, lengthOfExpectedFullGraph, len(resultantData), description) + for index, actualResult := range resultantData { + if lengthOfExpectedFullGraph > index { + assertRequestResultsData(t, actualResult, action.ExpectedFullGraph[index]) + } + } } // Ensure the complete high-level pattern matches, inother words check that all the @@ -180,12 +179,7 @@ func assertExplainRequestResults( for index, actualResult := range resultantData { // Trim away all attributes (non-plan nodes) from the returned full explain graph result. actualResultWithoutAttributes := trimExplainAttributes(t, description, actualResult) - assert.Equal( - t, - action.ExpectedPatterns[index], - actualResultWithoutAttributes, - description, - ) + assertRequestResultsData(t, actualResultWithoutAttributes, action.ExpectedPatterns[index]) } } @@ -220,12 +214,7 @@ func assertExplainTargetCase( ) } - assert.Equal( - t, - targetCase.ExpectedAttributes, - foundActualTarget, - description, - ) + assertRequestResultsData(t, foundActualTarget, targetCase.ExpectedAttributes) } } @@ -309,6 +298,26 @@ func findTargetNode( } } + case []any: + for _, item := range r { + target, matches, found := findTargetNode( + targetName, + toSkip, + includeChildNodes, + item, + ) + + totalMatchedSoFar = totalMatchedSoFar + matches + toSkip -= matches + + if found { + if includeChildNodes { + return target, totalMatchedSoFar, true + } + return trimSubNodes(target), totalMatchedSoFar, true + } + } + case []map[string]any: for _, item := range r { target, matches, found := findTargetNode( @@ -379,6 +388,16 @@ func trimExplainAttributes( } trimmedMap[key] = trimmedArrayElements + case []any: + trimmedArrayElements := []map[string]any{} + for _, valueItem := range v { + trimmedArrayElements = append( + trimmedArrayElements, + trimExplainAttributes(t, description, valueItem.(map[string]any)), + ) + } + trimmedMap[key] = trimmedArrayElements + default: assert.Fail( t, diff --git a/tests/integration/subscription/subscription_test.go b/tests/integration/subscription/subscription_test.go index ee1dc88cdc..578f558cb2 100644 --- a/tests/integration/subscription/subscription_test.go +++ b/tests/integration/subscription/subscription_test.go @@ -10,308 +10,308 @@ package subscription -// import ( -// "testing" +import ( + "testing" -// testUtils "github.com/sourcenetwork/defradb/tests/integration" -// ) + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) -// func TestSubscriptionWithCreateMutations(t *testing.T) { -// test := testUtils.TestCase{ -// Description: "Subscription with user creations", -// Actions: []any{ -// testUtils.SubscriptionRequest{ -// Request: `subscription { -// User { -// _key -// name -// age -// } -// }`, -// Results: []map[string]any{ -// { -// "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", -// "age": uint64(27), -// "name": "John", -// }, -// { -// "_key": "bae-18def051-7f0f-5dc9-8a69-2a5e423f6b55", -// "age": uint64(31), -// "name": "Addo", -// }, -// }, -// }, -// testUtils.Request{ -// Request: `mutation { -// create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { -// name -// } -// }`, -// Results: []map[string]any{ -// { -// "name": "John", -// }, -// }, -// }, -// testUtils.Request{ -// Request: `mutation { -// create_User(data: "{\"name\": \"Addo\",\"age\": 31,\"points\": 42.1,\"verified\": true}") { -// name -// } -// }`, -// Results: []map[string]any{ -// { -// "name": "Addo", -// }, -// }, -// }, -// }, -// } +func TestSubscriptionWithCreateMutations(t *testing.T) { + test := testUtils.TestCase{ + Description: "Subscription with user creations", + Actions: []any{ + testUtils.SubscriptionRequest{ + Request: `subscription { + User { + _key + name + age + } + }`, + Results: []map[string]any{ + { + "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", + "age": uint64(27), + "name": "John", + }, + { + "_key": "bae-18def051-7f0f-5dc9-8a69-2a5e423f6b55", + "age": uint64(31), + "name": "Addo", + }, + }, + }, + testUtils.Request{ + Request: `mutation { + create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { + name + } + }`, + Results: []map[string]any{ + { + "name": "John", + }, + }, + }, + testUtils.Request{ + Request: `mutation { + create_User(data: "{\"name\": \"Addo\",\"age\": 31,\"points\": 42.1,\"verified\": true}") { + name + } + }`, + Results: []map[string]any{ + { + "name": "Addo", + }, + }, + }, + }, + } -// execute(t, test) -// } + execute(t, test) +} -// func TestSubscriptionWithFilterAndOneCreateMutation(t *testing.T) { -// test := testUtils.TestCase{ -// Description: "Subscription with filter and one user creation", -// Actions: []any{ -// testUtils.SubscriptionRequest{ -// Request: `subscription { -// User(filter: {age: {_lt: 30}}) { -// _key -// name -// age -// } -// }`, -// Results: []map[string]any{ -// { -// "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", -// "age": uint64(27), -// "name": "John", -// }, -// }, -// }, -// testUtils.Request{ -// Request: `mutation { -// create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { -// name -// } -// }`, -// Results: []map[string]any{ -// { -// "name": "John", -// }, -// }, -// }, -// }, -// } +func TestSubscriptionWithFilterAndOneCreateMutation(t *testing.T) { + test := testUtils.TestCase{ + Description: "Subscription with filter and one user creation", + Actions: []any{ + testUtils.SubscriptionRequest{ + Request: `subscription { + User(filter: {age: {_lt: 30}}) { + _key + name + age + } + }`, + Results: []map[string]any{ + { + "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", + "age": uint64(27), + "name": "John", + }, + }, + }, + testUtils.Request{ + Request: `mutation { + create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { + name + } + }`, + Results: []map[string]any{ + { + "name": "John", + }, + }, + }, + }, + } -// execute(t, test) -// } + execute(t, test) +} -// func TestSubscriptionWithFilterAndOneCreateMutationOutsideFilter(t *testing.T) { -// test := testUtils.TestCase{ -// Description: "Subscription with filter and one user creation outside of the filter", -// Actions: []any{ -// testUtils.SubscriptionRequest{ -// Request: `subscription { -// User(filter: {age: {_gt: 30}}) { -// _key -// name -// age -// } -// }`, -// Results: []map[string]any{}, -// }, -// testUtils.Request{ -// Request: `mutation { -// create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { -// name -// } -// }`, -// Results: []map[string]any{ -// { -// "name": "John", -// }, -// }, -// }, -// }, -// } +func TestSubscriptionWithFilterAndOneCreateMutationOutsideFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Subscription with filter and one user creation outside of the filter", + Actions: []any{ + testUtils.SubscriptionRequest{ + Request: `subscription { + User(filter: {age: {_gt: 30}}) { + _key + name + age + } + }`, + Results: []map[string]any{}, + }, + testUtils.Request{ + Request: `mutation { + create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { + name + } + }`, + Results: []map[string]any{ + { + "name": "John", + }, + }, + }, + }, + } -// execute(t, test) -// } + execute(t, test) +} -// func TestSubscriptionWithFilterAndCreateMutations(t *testing.T) { -// test := testUtils.TestCase{ -// Description: "Subscription with filter and user creation in and outside of the filter", -// Actions: []any{ -// testUtils.SubscriptionRequest{ -// Request: `subscription { -// User(filter: {age: {_lt: 30}}) { -// _key -// name -// age -// } -// }`, -// Results: []map[string]any{ -// { -// "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", -// "age": uint64(27), -// "name": "John", -// }, -// }, -// }, -// testUtils.Request{ -// Request: `mutation { -// create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { -// name -// } -// }`, -// Results: []map[string]any{ -// { -// "name": "John", -// }, -// }, -// }, -// testUtils.Request{ -// Request: `mutation { -// create_User(data: "{\"name\": \"Addo\",\"age\": 31,\"points\": 42.1,\"verified\": true}") { -// name -// } -// }`, -// Results: []map[string]any{ -// { -// "name": "Addo", -// }, -// }, -// }, -// }, -// } +func TestSubscriptionWithFilterAndCreateMutations(t *testing.T) { + test := testUtils.TestCase{ + Description: "Subscription with filter and user creation in and outside of the filter", + Actions: []any{ + testUtils.SubscriptionRequest{ + Request: `subscription { + User(filter: {age: {_lt: 30}}) { + _key + name + age + } + }`, + Results: []map[string]any{ + { + "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", + "age": uint64(27), + "name": "John", + }, + }, + }, + testUtils.Request{ + Request: `mutation { + create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { + name + } + }`, + Results: []map[string]any{ + { + "name": "John", + }, + }, + }, + testUtils.Request{ + Request: `mutation { + create_User(data: "{\"name\": \"Addo\",\"age\": 31,\"points\": 42.1,\"verified\": true}") { + name + } + }`, + Results: []map[string]any{ + { + "name": "Addo", + }, + }, + }, + }, + } -// execute(t, test) -// } + execute(t, test) +} -// func TestSubscriptionWithUpdateMutations(t *testing.T) { -// test := testUtils.TestCase{ -// Description: "Subscription with user creations and single mutation", -// Actions: []any{ -// testUtils.CreateDoc{ -// CollectionID: 0, -// Doc: `{ -// "name": "John", -// "age": 27, -// "verified": true, -// "points": 42.1 -// }`, -// }, -// testUtils.CreateDoc{ -// CollectionID: 0, -// Doc: `{ -// "name": "Addo", -// "age": 35, -// "verified": true, -// "points": 50 -// }`, -// }, -// testUtils.SubscriptionRequest{ -// Request: `subscription { -// User { -// _key -// name -// age -// points -// } -// }`, -// Results: []map[string]any{ -// { -// "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", -// "age": uint64(27), -// "name": "John", -// "points": float64(45), -// }, -// }, -// }, -// testUtils.Request{ -// Request: `mutation { -// update_User(filter: {name: {_eq: "John"}}, data: "{\"points\": 45}") { -// name -// } -// }`, -// Results: []map[string]any{ -// { -// "name": "John", -// }, -// }, -// }, -// }, -// } +func TestSubscriptionWithUpdateMutations(t *testing.T) { + test := testUtils.TestCase{ + Description: "Subscription with user creations and single mutation", + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John", + "age": 27, + "verified": true, + "points": 42.1 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Addo", + "age": 35, + "verified": true, + "points": 50 + }`, + }, + testUtils.SubscriptionRequest{ + Request: `subscription { + User { + _key + name + age + points + } + }`, + Results: []map[string]any{ + { + "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", + "age": uint64(27), + "name": "John", + "points": float64(45), + }, + }, + }, + testUtils.Request{ + Request: `mutation { + update_User(filter: {name: {_eq: "John"}}, data: "{\"points\": 45}") { + name + } + }`, + Results: []map[string]any{ + { + "name": "John", + }, + }, + }, + }, + } -// execute(t, test) -// } + execute(t, test) +} -// func TestSubscriptionWithUpdateAllMutations(t *testing.T) { -// test := testUtils.TestCase{ -// Description: "Subscription with user creations and mutations for all", -// Actions: []any{ -// testUtils.CreateDoc{ -// CollectionID: 0, -// Doc: `{ -// "name": "John", -// "age": 27, -// "verified": true, -// "points": 42.1 -// }`, -// }, -// testUtils.CreateDoc{ -// CollectionID: 0, -// Doc: `{ -// "name": "Addo", -// "age": 31, -// "verified": true, -// "points": 50 -// }`, -// }, -// testUtils.SubscriptionRequest{ -// Request: `subscription { -// User { -// _key -// name -// age -// points -// } -// }`, -// Results: []map[string]any{ -// { -// "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", -// "age": uint64(27), -// "name": "John", -// "points": float64(55), -// }, -// { -// "_key": "bae-cf723876-5c6a-5dcf-a877-ab288eb30d57", -// "age": uint64(31), -// "name": "Addo", -// "points": float64(55), -// }, -// }, -// }, -// testUtils.Request{ -// Request: `mutation { -// update_User(data: "{\"points\": 55}") { -// name -// } -// }`, -// Results: []map[string]any{ -// { -// "name": "John", -// }, -// { -// "name": "Addo", -// }, -// }, -// }, -// }, -// } +func TestSubscriptionWithUpdateAllMutations(t *testing.T) { + test := testUtils.TestCase{ + Description: "Subscription with user creations and mutations for all", + Actions: []any{ + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John", + "age": 27, + "verified": true, + "points": 42.1 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Addo", + "age": 31, + "verified": true, + "points": 50 + }`, + }, + testUtils.SubscriptionRequest{ + Request: `subscription { + User { + _key + name + age + points + } + }`, + Results: []map[string]any{ + { + "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", + "age": uint64(27), + "name": "John", + "points": float64(55), + }, + { + "_key": "bae-cf723876-5c6a-5dcf-a877-ab288eb30d57", + "age": uint64(31), + "name": "Addo", + "points": float64(55), + }, + }, + }, + testUtils.Request{ + Request: `mutation { + update_User(data: "{\"points\": 55}") { + name + } + }`, + Results: []map[string]any{ + { + "name": "John", + }, + { + "name": "Addo", + }, + }, + }, + }, + } -// execute(t, test) -// } + execute(t, test) +} diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 23d833c32a..38c3d0a2c6 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -250,9 +250,9 @@ func GetClientTypes() []ClientType { clients = append(clients, httpClientType) } - if goClient { - clients = append(clients, goClientType) - } + // if goClient { + // clients = append(clients, goClientType) + // } return clients } @@ -1489,18 +1489,280 @@ func assertRequestResults( return true } - expectedJson, err := json.Marshal(expectedResults) - require.NoError(t, err) - - resultJson, err := json.Marshal(result.Data) - require.NoError(t, err) + // Note: if result.Data == nil this panics (the panic seems useful while testing). + resultantData := result.Data.([]map[string]any) - assert.JSONEq(t, string(expectedJson), string(resultJson)) log.Info(ctx, "", logging.NewKV("RequestResults", result.Data)) + // compare results + assert.Equal(t, len(expectedResults), len(resultantData), description) + for docIndex, result := range resultantData { + expectedResult := expectedResults[docIndex] + assertRequestResultsData(t, result, expectedResult) + } + return false } +func assertRequestResultsData(t *testing.T, actual any, expected any) { + switch expectedVal := expected.(type) { + case map[string]any: + if len(expectedVal) == 0 && actual == nil { + return + } + + actualVal, ok := actual.(map[string]any) + if !ok { + break + } + + require.Equal(t, len(actualVal), len(expectedVal)) + for k, v := range expectedVal { + assertRequestResultsData(t, actualVal[k], v) + } + return + case []int64: + if len(expectedVal) == 0 && actual == nil { + return + } + + actualVal, ok := actual.([]any) + if !ok { + break + } + + require.Equal(t, len(actualVal), len(expectedVal)) + for i, v := range expectedVal { + assertRequestResultsData(t, actualVal[i], v) + } + return + case []uint64: + if len(expectedVal) == 0 && actual == nil { + return + } + + actualVal, ok := actual.([]any) + if !ok { + break + } + + require.Equal(t, len(actualVal), len(expectedVal)) + for i, v := range expectedVal { + assertRequestResultsData(t, actualVal[i], v) + } + return + case []float64: + if len(expectedVal) == 0 && actual == nil { + return + } + + actualVal, ok := actual.([]any) + if !ok { + break + } + + require.Equal(t, len(actualVal), len(expectedVal)) + for i, v := range expectedVal { + assertRequestResultsData(t, actualVal[i], v) + } + return + case []string: + if len(expectedVal) == 0 && actual == nil { + return + } + + actualVal, ok := actual.([]any) + if !ok { + break + } + + require.Equal(t, len(actualVal), len(expectedVal)) + for i, v := range expectedVal { + assertRequestResultsData(t, actualVal[i], v) + } + return + case []bool: + if len(expectedVal) == 0 && actual == nil { + return + } + + actualVal, ok := actual.([]any) + if !ok { + break + } + + require.Equal(t, len(actualVal), len(expectedVal)) + for i, v := range expectedVal { + assertRequestResultsData(t, actualVal[i], v) + } + return + case []any: + if len(expectedVal) == 0 && actual == nil { + return + } + + actualVal, ok := actual.([]any) + if !ok { + break + } + + require.Equal(t, len(actualVal), len(expectedVal)) + for i, v := range expectedVal { + assertRequestResultsData(t, actualVal[i], v) + } + return + case []map[string]any: + if len(expectedVal) == 0 && actual == nil { + return + } + + actualVal, ok := actual.([]any) + if !ok { + break + } + + require.Equal(t, len(actualVal), len(expectedVal)) + for i, v := range expectedVal { + assertRequestResultsData(t, actualVal[i], v) + } + return + case uint64, uint32, int64, int32, int, uint: + jsonNum, ok := actual.(json.Number) + if !ok { + break + } + + actualVal, err := jsonNum.Int64() + require.NoError(t, err) + actual = actualVal + case float32, float64: + jsonNum, ok := actual.(json.Number) + if !ok { + break + } + + actualVal, err := jsonNum.Float64() + require.NoError(t, err) + actual = actualVal + case []immutable.Option[float64]: + if len(expectedVal) == 0 && actual == nil { + return + } + + actualVal, ok := actual.([]any) + if !ok { + break + } + + require.Equal(t, len(actualVal), len(expectedVal)) + for i, v := range expectedVal { + assertRequestResultsData(t, actualVal[i], v) + } + return + case []immutable.Option[uint64]: + if len(expectedVal) == 0 && actual == nil { + return + } + + actualVal, ok := actual.([]any) + if !ok { + break + } + + require.Equal(t, len(actualVal), len(expectedVal)) + for i, v := range expectedVal { + assertRequestResultsData(t, actualVal[i], v) + } + return + case []immutable.Option[int64]: + if len(expectedVal) == 0 && actual == nil { + return + } + + actualVal, ok := actual.([]any) + if !ok { + break + } + + require.Equal(t, len(actualVal), len(expectedVal)) + for i, v := range expectedVal { + assertRequestResultsData(t, actualVal[i], v) + } + return + case []immutable.Option[bool]: + if len(expectedVal) == 0 && actual == nil { + return + } + + actualVal, ok := actual.([]any) + if !ok { + break + } + + require.Equal(t, len(actualVal), len(expectedVal)) + for i, v := range expectedVal { + assertRequestResultsData(t, actualVal[i], v) + } + return + case []immutable.Option[string]: + if len(expectedVal) == 0 && actual == nil { + return + } + + actualVal, ok := actual.([]any) + if !ok { + break + } + + require.Equal(t, len(actualVal), len(expectedVal)) + for i, v := range expectedVal { + assertRequestResultsData(t, actualVal[i], v) + } + return + case immutable.Option[float64]: + if expectedVal.HasValue() { + expected = expectedVal.Value() + } else { + expected = nil + } + + assertRequestResultsData(t, actual, expected) + return + case immutable.Option[uint64]: + if expectedVal.HasValue() { + expected = expectedVal.Value() + } else { + expected = nil + } + + assertRequestResultsData(t, actual, expected) + return + case immutable.Option[int64]: + if expectedVal.HasValue() { + expected = expectedVal.Value() + } else { + expected = nil + } + + assertRequestResultsData(t, actual, expected) + return + case immutable.Option[bool]: + if expectedVal.HasValue() { + expected = expectedVal.Value() + } else { + expected = nil + } + case immutable.Option[string]: + if expectedVal.HasValue() { + expected = expectedVal.Value() + } else { + expected = nil + } + } + + assert.EqualValues(t, expected, actual) +} + func assertExpectedErrorRaised(t *testing.T, description string, expectedError string, wasRaised bool) { if expectedError != "" && !wasRaised { assert.Fail(t, "Expected an error however none was raised.", description) From f5c29b39e20ae708cffc98715284ab9087953cb4 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Tue, 22 Aug 2023 15:17:22 -0500 Subject: [PATCH 014/107] almost all tests passing --- http/client.go | 47 ++++++++++++++++++---- http/client_collection.go | 18 +++++++-- http/client_lens.go | 2 +- http/middleware.go | 60 +++++++++++++--------------- http/server.go | 78 ++++++++++++++++++++----------------- http/server_collection.go | 13 ++++++- http/server_store.go | 1 - http/server_tx.go | 3 +- http/wrapper.go | 2 +- tests/integration/utils2.go | 2 + 10 files changed, 141 insertions(+), 85 deletions(-) diff --git a/http/client.go b/http/client.go index b0c1c881d1..abdcb208bf 100644 --- a/http/client.go +++ b/http/client.go @@ -19,9 +19,11 @@ import ( ) type httpClient struct { - client *http.Client - baseURL *url.URL - txValue string + client *http.Client + baseURL *url.URL + txValue string + colTxValue string + lensTxValue string } type errorResponse struct { @@ -30,16 +32,47 @@ type errorResponse struct { func (c *httpClient) withTxn(txValue string) *httpClient { return &httpClient{ - client: c.client, - baseURL: c.baseURL, - txValue: txValue, + client: c.client, + baseURL: c.baseURL, + txValue: txValue, + colTxValue: c.colTxValue, + lensTxValue: c.lensTxValue, + } +} + +func (c *httpClient) withColTxn(txValue string) *httpClient { + return &httpClient{ + client: c.client, + baseURL: c.baseURL, + txValue: c.txValue, + colTxValue: txValue, + lensTxValue: c.lensTxValue, + } +} + +func (c *httpClient) withLensTxn(txValue string) *httpClient { + return &httpClient{ + client: c.client, + baseURL: c.baseURL, + txValue: c.txValue, + colTxValue: c.colTxValue, + lensTxValue: txValue, } } func (c *httpClient) setDefaultHeaders(req *http.Request) { req.Header.Add("Accept", "application/json") req.Header.Add("Content-Type", "application/json") - req.Header.Add(txHeaderName, c.txValue) + + if c.txValue != "" { + req.Header.Add(TX_HEADER_NAME, c.txValue) + } + if c.colTxValue != "" { + req.Header.Add(COL_TX_HEADER_NAME, c.colTxValue) + } + if c.lensTxValue != "" { + req.Header.Add(LENS_TX_HEADER_NAME, c.lensTxValue) + } } func (c *httpClient) request(req *http.Request) error { diff --git a/http/client_collection.go b/http/client_collection.go index 834621ba94..9e9f3dd38c 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -325,7 +325,7 @@ func (c *CollectionClient) Get(ctx context.Context, key client.DocKey, showDelet func (c *CollectionClient) WithTxn(tx datastore.Txn) client.Collection { txId := fmt.Sprintf("%d", tx.ID()) - http := c.http.withTxn(txId) + http := c.http.withColTxn(txId) return &CollectionClient{ http: http, @@ -358,11 +358,21 @@ func (c *CollectionClient) GetAllDocKeys(ctx context.Context) (<-chan client.Doc if err != nil { return } - var docKeyRes client.DocKeysResult - if err := json.Unmarshal(evt.Data, &docKeyRes); err != nil { + var res DocKeyResult + if err := json.Unmarshal(evt.Data, &res); err != nil { return } - docKeyCh <- docKeyRes + key, err := client.NewDocKeyFromString(res.Key) + if err != nil { + return + } + docKey := client.DocKeysResult{ + Key: key, + } + if res.Error != "" { + docKey.Err = fmt.Errorf(res.Error) + } + docKeyCh <- docKey } }() diff --git a/http/client_lens.go b/http/client_lens.go index 0630d428bf..2a8976f2ed 100644 --- a/http/client_lens.go +++ b/http/client_lens.go @@ -36,7 +36,7 @@ func NewLensClient(httpClient *httpClient) *LensClient { func (c *LensClient) WithTxn(tx datastore.Txn) client.LensRegistry { txId := fmt.Sprintf("%d", tx.ID()) - http := c.http.withTxn(txId) + http := c.http.withLensTxn(txId) return &LensClient{http} } diff --git a/http/middleware.go b/http/middleware.go index 3a8fe49fae..a5571d0f3e 100644 --- a/http/middleware.go +++ b/http/middleware.go @@ -10,18 +10,16 @@ import ( "github.com/sourcenetwork/defradb/datastore" ) -// DatabaseMiddleware sets the database for the current request context. -func DatabaseMiddleware(db client.DB) gin.HandlerFunc { - return func(c *gin.Context) { - c.Set("db", db) - c.Next() - } -} +const ( + TX_HEADER_NAME = "x-defradb-tx" + COL_TX_HEADER_NAME = "x-defradb-col-tx" + LENS_TX_HEADER_NAME = "x-defradb-lens-tx" +) -// TransactionMiddleware sets the transaction for the current request context. -func TransactionMiddleware(txs *sync.Map) gin.HandlerFunc { +// TransactionMiddleware sets the transaction context for the current request. +func TransactionMiddleware(db client.DB, txs *sync.Map, header string) gin.HandlerFunc { return func(c *gin.Context) { - txValue := c.GetHeader(txHeaderName) + txValue := c.GetHeader(header) if txValue == "" { c.Next() return @@ -37,58 +35,56 @@ func TransactionMiddleware(txs *sync.Map) gin.HandlerFunc { return } - c.Set("tx", tx) + c.Set(header, tx) c.Next() } } -func LensMiddleware() gin.HandlerFunc { +// DatabaseMiddleware sets the db context for the current request. +func DatabaseMiddleware(db client.DB) gin.HandlerFunc { return func(c *gin.Context) { - db := c.MustGet("db").(client.DB) - - tx, ok := c.Get("tx") + tx, ok := c.Get(TX_HEADER_NAME) if ok { - c.Set("lens", db.LensRegistry().WithTxn(tx.(datastore.Txn))) + c.Set("store", db.WithTxn(tx.(datastore.Txn))) } else { - c.Set("lens", db.LensRegistry()) + c.Set("store", db) } - + c.Set("db", db) c.Next() } } -// StoreMiddleware sets the store for the current request -func StoreMiddleware() gin.HandlerFunc { +// LensMiddleware sets the lens context for the current request. +func LensMiddleware() gin.HandlerFunc { return func(c *gin.Context) { - db := c.MustGet("db").(client.DB) + store := c.MustGet("store").(client.Store) - tx, ok := c.Get("tx") + tx, ok := c.Get(LENS_TX_HEADER_NAME) if ok { - c.Set("store", db.WithTxn(tx.(datastore.Txn))) + c.Set("lens", store.LensRegistry().WithTxn(tx.(datastore.Txn))) } else { - c.Set("store", db) + c.Set("lens", store.LensRegistry()) } - c.Next() } } -// CollectionMiddleware sets the collection for the current request context. +// CollectionMiddleware sets the collection context for the current request. func CollectionMiddleware() gin.HandlerFunc { return func(c *gin.Context) { - db := c.MustGet("db").(client.DB) + store := c.MustGet("store").(client.Store) - col, err := db.GetCollectionByName(c.Request.Context(), c.Param("name")) + col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) if err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return } - tx, ok := c.Get("tx") + tx, ok := c.Get(COL_TX_HEADER_NAME) if ok { - col = col.WithTxn(tx.(datastore.Txn)) + c.Set("col", col.WithTxn(tx.(datastore.Txn))) + } else { + c.Set("col", col) } - - c.Set("col", col) c.Next() } } diff --git a/http/server.go b/http/server.go index 707780e8d7..081d54121b 100644 --- a/http/server.go +++ b/http/server.go @@ -18,10 +18,6 @@ import ( "github.com/sourcenetwork/defradb/client" ) -// txHeaderName is the name of the custom -// header containing the transaction id. -const txHeaderName = "x-defradb-tx" - type Server struct { db client.DB router *gin.Engine @@ -37,8 +33,10 @@ func NewServer(db client.DB) *Server { lensHandler := &LensHandler{} router := gin.Default() + + // db tx context api := router.Group("/api/v0") - api.Use(DatabaseMiddleware(db), TransactionMiddleware(txs)) + api.Use(TransactionMiddleware(db, txs, TX_HEADER_NAME), DatabaseMiddleware(db)) tx := api.Group("/tx") tx.POST("/", txHandler.NewTxn) @@ -47,49 +45,57 @@ func NewServer(db client.DB) *Server { tx.DELETE("/:id", txHandler.Discard) backup := api.Group("/backup") - backup.POST("/export", StoreMiddleware(), storeHandler.BasicExport) - backup.POST("/import", StoreMiddleware(), storeHandler.BasicImport) + backup.POST("/export", storeHandler.BasicExport) + backup.POST("/import", storeHandler.BasicImport) schema := api.Group("/schema") - schema.POST("/", StoreMiddleware(), storeHandler.AddSchema) - schema.PATCH("/", StoreMiddleware(), storeHandler.PatchSchema) + schema.POST("/", storeHandler.AddSchema) + schema.PATCH("/", storeHandler.PatchSchema) collections := api.Group("/collections") - collections.GET("/", StoreMiddleware(), storeHandler.GetCollection) - collections.GET("/:name", CollectionMiddleware(), collectionHandler.GetAllDocKeys) - collections.POST("/:name", CollectionMiddleware(), collectionHandler.Create) - collections.PATCH("/:name", CollectionMiddleware(), collectionHandler.UpdateWith) - collections.DELETE("/:name", CollectionMiddleware(), collectionHandler.DeleteWith) - collections.POST("/:name/indexes", CollectionMiddleware(), collectionHandler.CreateIndex) - collections.GET("/:name/indexes", CollectionMiddleware(), collectionHandler.GetIndexes) - collections.DELETE("/:name/indexes/:index", CollectionMiddleware(), collectionHandler.DropIndex) - collections.GET("/:name/:key", CollectionMiddleware(), collectionHandler.Get) - collections.POST("/:name/:key", CollectionMiddleware(), collectionHandler.Save) - collections.PATCH("/:name/:key", CollectionMiddleware(), collectionHandler.Update) - collections.DELETE("/:name/:key", CollectionMiddleware(), collectionHandler.Delete) - + collections.GET("/", storeHandler.GetCollection) + + // collection tx context + collections_tx := collections.Group("/") + collections_tx.Use(TransactionMiddleware(db, txs, COL_TX_HEADER_NAME), CollectionMiddleware()) + + collections_tx.GET("/:name", collectionHandler.GetAllDocKeys) + collections_tx.POST("/:name", collectionHandler.Create) + collections_tx.PATCH("/:name", collectionHandler.UpdateWith) + collections_tx.DELETE("/:name", collectionHandler.DeleteWith) + collections_tx.POST("/:name/indexes", collectionHandler.CreateIndex) + collections_tx.GET("/:name/indexes", collectionHandler.GetIndexes) + collections_tx.DELETE("/:name/indexes/:index", collectionHandler.DropIndex) + collections_tx.GET("/:name/:key", collectionHandler.Get) + collections_tx.POST("/:name/:key", collectionHandler.Save) + collections_tx.PATCH("/:name/:key", collectionHandler.Update) + collections_tx.DELETE("/:name/:key", collectionHandler.Delete) + + // lens tx context lens := api.Group("/lens") - lens.GET("/", LensMiddleware(), lensHandler.Config) - lens.POST("/", LensMiddleware(), lensHandler.SetMigration) - lens.POST("/reload", LensMiddleware(), lensHandler.ReloadLenses) - lens.GET("/:version", LensMiddleware(), lensHandler.HasMigration) - lens.POST("/:version/up", LensMiddleware(), lensHandler.MigrateUp) - lens.POST("/:version/down", LensMiddleware(), lensHandler.MigrateDown) + lens.Use(TransactionMiddleware(db, txs, LENS_TX_HEADER_NAME), LensMiddleware()) + + lens.GET("/", lensHandler.Config) + lens.POST("/", lensHandler.SetMigration) + lens.POST("/reload", lensHandler.ReloadLenses) + lens.GET("/:version", lensHandler.HasMigration) + lens.POST("/:version/up", lensHandler.MigrateUp) + lens.POST("/:version/down", lensHandler.MigrateDown) graphQL := api.Group("/graphql") - graphQL.GET("/", StoreMiddleware(), storeHandler.ExecRequest) - graphQL.POST("/", StoreMiddleware(), storeHandler.ExecRequest) + graphQL.GET("/", storeHandler.ExecRequest) + graphQL.POST("/", storeHandler.ExecRequest) p2p := api.Group("/p2p") p2p_replicators := p2p.Group("/replicators") - p2p_replicators.GET("/", StoreMiddleware(), storeHandler.GetAllReplicators) - p2p_replicators.POST("/", StoreMiddleware(), storeHandler.SetReplicator) - p2p_replicators.DELETE("/", StoreMiddleware(), storeHandler.DeleteReplicator) + p2p_replicators.GET("/", storeHandler.GetAllReplicators) + p2p_replicators.POST("/", storeHandler.SetReplicator) + p2p_replicators.DELETE("/", storeHandler.DeleteReplicator) p2p_collections := p2p.Group("/collections") - p2p_collections.GET("/", StoreMiddleware(), storeHandler.GetAllP2PCollections) - p2p_collections.POST("/:id", StoreMiddleware(), storeHandler.AddP2PCollection) - p2p_collections.DELETE("/:id", StoreMiddleware(), storeHandler.RemoveP2PCollection) + p2p_collections.GET("/", storeHandler.GetAllP2PCollections) + p2p_collections.POST("/:id", storeHandler.AddP2PCollection) + p2p_collections.DELETE("/:id", storeHandler.RemoveP2PCollection) return &Server{ db: db, diff --git a/http/server_collection.go b/http/server_collection.go index cd8ba084bf..f1f3396b00 100644 --- a/http/server_collection.go +++ b/http/server_collection.go @@ -254,6 +254,11 @@ func (s *CollectionHandler) Get(c *gin.Context) { c.Status(http.StatusOK) } +type DocKeyResult struct { + Key string `json:"key"` + Error string `json:"error"` +} + func (s *CollectionHandler) GetAllDocKeys(c *gin.Context) { col := c.MustGet("col").(client.Collection) @@ -275,7 +280,13 @@ func (s *CollectionHandler) GetAllDocKeys(c *gin.Context) { if !open { return false } - data, err := json.Marshal(docKey) + results := &DocKeyResult{ + Key: docKey.Key.String(), + } + if docKey.Err != nil { + results.Error = docKey.Err.Error() + } + data, err := json.Marshal(results) if err != nil { return false } diff --git a/http/server_store.go b/http/server_store.go index d602fd3b2e..8e35f88beb 100644 --- a/http/server_store.go +++ b/http/server_store.go @@ -284,7 +284,6 @@ func (s *StoreHandler) ExecRequest(c *gin.Context) { c.JSON(http.StatusOK, gin.H{"data": result.GQL.Data, "errors": errors}) return } - defer result.Pub.Unsubscribe() c.Header("Content-Type", "text/event-stream") c.Header("Cache-Control", "no-cache") diff --git a/http/server_tx.go b/http/server_tx.go index abb56ac2fc..b92035993d 100644 --- a/http/server_tx.go +++ b/http/server_tx.go @@ -82,12 +82,11 @@ func (h *TxHandler) Discard(c *gin.Context) { c.JSON(http.StatusBadRequest, gin.H{"error": "invalid transaction id"}) return } - txVal, ok := h.txs.Load(txId) + txVal, ok := h.txs.LoadAndDelete(txId) if !ok { c.JSON(http.StatusBadRequest, gin.H{"error": "invalid transaction id"}) return } txVal.(datastore.Txn).Discard(c.Request.Context()) - h.txs.Delete(txId) c.Status(http.StatusOK) } diff --git a/http/wrapper.go b/http/wrapper.go index d020a80773..0a02105230 100644 --- a/http/wrapper.go +++ b/http/wrapper.go @@ -162,9 +162,9 @@ func (w *Wrapper) Blockstore() blockstore.Blockstore { } func (w *Wrapper) Close(ctx context.Context) { - w.db.Close(ctx) w.httpServer.CloseClientConnections() w.httpServer.Close() + w.db.Close(ctx) } func (w *Wrapper) Events() events.Events { diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 38c3d0a2c6..4bbbbb2b97 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -1506,6 +1506,8 @@ func assertRequestResults( func assertRequestResultsData(t *testing.T, actual any, expected any) { switch expectedVal := expected.(type) { + case AnyOf: + return // TODO case map[string]any: if len(expectedVal) == 0 && actual == nil { return From 6a72bb609dbc05c900adb0eafd0679390a886016 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Tue, 22 Aug 2023 15:36:18 -0500 Subject: [PATCH 015/107] fix lint errors --- http/client_collection.go | 2 +- http/client_store.go | 5 +++-- http/middleware.go | 11 +++++++++++ http/server.go | 1 + http/server_tx.go | 1 + tests/integration/utils2.go | 10 ---------- 6 files changed, 17 insertions(+), 13 deletions(-) diff --git a/http/client_collection.go b/http/client_collection.go index 9e9f3dd38c..d1ce332182 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -350,7 +350,7 @@ func (c *CollectionClient) GetAllDocKeys(ctx context.Context) (<-chan client.Doc go func() { eventReader := sse.NewReadCloser(res.Body) - defer eventReader.Close() + defer eventReader.Close() //nolint:errcheck defer close(docKeyCh) for { diff --git a/http/client_store.go b/http/client_store.go index fdcc52f21c..3eecf44bcf 100644 --- a/http/client_store.go +++ b/http/client_store.go @@ -20,10 +20,11 @@ import ( "net/url" "strings" + sse "github.com/vito/go-sse/sse" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/events" - sse "github.com/vito/go-sse/sse" ) var _ client.Store = (*StoreClient)(nil) @@ -350,7 +351,7 @@ func (c *StoreClient) execRequestSubscription(ctx context.Context, r io.ReadClos go func() { eventReader := sse.NewReadCloser(r) - defer eventReader.Close() + defer eventReader.Close() //nolint:errcheck for { evt, err := eventReader.Next() diff --git a/http/middleware.go b/http/middleware.go index a5571d0f3e..b36399d36f 100644 --- a/http/middleware.go +++ b/http/middleware.go @@ -1,3 +1,13 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + package http import ( @@ -6,6 +16,7 @@ import ( "sync" "github.com/gin-gonic/gin" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" ) diff --git a/http/server.go b/http/server.go index 081d54121b..6c115f5236 100644 --- a/http/server.go +++ b/http/server.go @@ -15,6 +15,7 @@ import ( "sync" "github.com/gin-gonic/gin" + "github.com/sourcenetwork/defradb/client" ) diff --git a/http/server_tx.go b/http/server_tx.go index b92035993d..21023db57a 100644 --- a/http/server_tx.go +++ b/http/server_tx.go @@ -16,6 +16,7 @@ import ( "sync" "github.com/gin-gonic/gin" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" ) diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 4bbbbb2b97..655dfd99ee 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -1329,7 +1329,6 @@ func executeRequest( db := getStore(s, node.DB, action.TransactionID, action.ExpectedError) result := db.ExecRequest(s.ctx, action.Request) - anyOfByFieldKey := map[docFieldKey][]any{} expectedErrorRaised = assertRequestResults( s.ctx, s.t, @@ -1338,7 +1337,6 @@ func executeRequest( action.Results, action.ExpectedError, nodeID, - anyOfByFieldKey, ) } @@ -1406,7 +1404,6 @@ func executeSubscriptionRequest( action.ExpectedError, // anyof is not yet supported by subscription requests 0, - map[docFieldKey][]any{}, ) assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) @@ -1465,12 +1462,6 @@ func AssertErrors( return false } -// docFieldKey is an internal key type that wraps docIndex and fieldName -type docFieldKey struct { - docIndex int - fieldName string -} - func assertRequestResults( ctx context.Context, t *testing.T, @@ -1479,7 +1470,6 @@ func assertRequestResults( expectedResults []map[string]any, expectedError string, nodeID int, - anyOfByField map[docFieldKey][]any, ) bool { if AssertErrors(t, description, result.Errors, expectedError) { return true From 508cd83c6e64406bc9880c6a75a543111bd8f10e Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Tue, 22 Aug 2023 15:43:30 -0500 Subject: [PATCH 016/107] enable all tests --- tests/integration/utils2.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 655dfd99ee..f04843e458 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -250,9 +250,9 @@ func GetClientTypes() []ClientType { clients = append(clients, httpClientType) } - // if goClient { - // clients = append(clients, goClientType) - // } + if goClient { + clients = append(clients, goClientType) + } return clients } From 0ebf11cd8ae53870dc06b4a7cbe9d9647a80e23f Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Tue, 22 Aug 2023 16:01:05 -0500 Subject: [PATCH 017/107] fix http lens client with incorrect method path --- http/client_lens.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/http/client_lens.go b/http/client_lens.go index 2a8976f2ed..277b4e6eb6 100644 --- a/http/client_lens.go +++ b/http/client_lens.go @@ -41,7 +41,7 @@ func (c *LensClient) WithTxn(tx datastore.Txn) client.LensRegistry { } func (c *LensClient) SetMigration(ctx context.Context, config client.LensConfig) error { - methodURL := c.http.baseURL.JoinPath("lens", "migrate") + methodURL := c.http.baseURL.JoinPath("lens") body, err := json.Marshal(config) if err != nil { From 677f18f03ce1c2548a1ddb0c6fe45cf98d1b73ed Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Tue, 22 Aug 2023 18:39:11 -0500 Subject: [PATCH 018/107] fix http transaction middleware --- http/client.go | 62 +++++++++++++-------------------------- http/client_collection.go | 5 +--- http/client_lens.go | 4 +-- http/client_store.go | 5 +--- http/client_tx.go | 5 +++- http/middleware.go | 22 +++++++------- http/server.go | 9 ++---- http/server_tx.go | 1 - http/wrapper.go | 3 +- 9 files changed, 41 insertions(+), 75 deletions(-) diff --git a/http/client.go b/http/client.go index abdcb208bf..5042fc3bbf 100644 --- a/http/client.go +++ b/http/client.go @@ -16,63 +16,41 @@ import ( "io" "net/http" "net/url" + "sync/atomic" ) -type httpClient struct { - client *http.Client - baseURL *url.URL - txValue string - colTxValue string - lensTxValue string -} - type errorResponse struct { Error string `json:"error"` } -func (c *httpClient) withTxn(txValue string) *httpClient { - return &httpClient{ - client: c.client, - baseURL: c.baseURL, - txValue: txValue, - colTxValue: c.colTxValue, - lensTxValue: c.lensTxValue, - } +type httpClient struct { + client *http.Client + baseURL *url.URL + txValue atomic.Uint64 } -func (c *httpClient) withColTxn(txValue string) *httpClient { - return &httpClient{ - client: c.client, - baseURL: c.baseURL, - txValue: c.txValue, - colTxValue: txValue, - lensTxValue: c.lensTxValue, +func newHttpClient(baseURL *url.URL) *httpClient { + client := httpClient{ + client: http.DefaultClient, + baseURL: baseURL, } + client.txValue.Store(0) + return &client } -func (c *httpClient) withLensTxn(txValue string) *httpClient { - return &httpClient{ - client: c.client, - baseURL: c.baseURL, - txValue: c.txValue, - colTxValue: c.colTxValue, - lensTxValue: txValue, +func (c *httpClient) withTxn(value uint64) *httpClient { + client := httpClient{ + client: c.client, + baseURL: c.baseURL, } + client.txValue.Store(value) + return &client } func (c *httpClient) setDefaultHeaders(req *http.Request) { - req.Header.Add("Accept", "application/json") - req.Header.Add("Content-Type", "application/json") - - if c.txValue != "" { - req.Header.Add(TX_HEADER_NAME, c.txValue) - } - if c.colTxValue != "" { - req.Header.Add(COL_TX_HEADER_NAME, c.colTxValue) - } - if c.lensTxValue != "" { - req.Header.Add(LENS_TX_HEADER_NAME, c.lensTxValue) - } + req.Header.Set("Accept", "application/json") + req.Header.Set("Content-Type", "application/json") + req.Header.Set(TX_HEADER_NAME, fmt.Sprintf("%d", c.txValue.Load())) } func (c *httpClient) request(req *http.Request) error { diff --git a/http/client_collection.go b/http/client_collection.go index d1ce332182..89534043c4 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -324,11 +324,8 @@ func (c *CollectionClient) Get(ctx context.Context, key client.DocKey, showDelet } func (c *CollectionClient) WithTxn(tx datastore.Txn) client.Collection { - txId := fmt.Sprintf("%d", tx.ID()) - http := c.http.withColTxn(txId) - return &CollectionClient{ - http: http, + http: c.http.withTxn(tx.ID()), desc: c.desc, } } diff --git a/http/client_lens.go b/http/client_lens.go index 277b4e6eb6..c8154ffdc8 100644 --- a/http/client_lens.go +++ b/http/client_lens.go @@ -14,7 +14,6 @@ import ( "bytes" "context" "encoding/json" - "fmt" "net/http" "github.com/sourcenetwork/immutable/enumerable" @@ -35,8 +34,7 @@ func NewLensClient(httpClient *httpClient) *LensClient { } func (c *LensClient) WithTxn(tx datastore.Txn) client.LensRegistry { - txId := fmt.Sprintf("%d", tx.ID()) - http := c.http.withLensTxn(txId) + http := c.http.withTxn(tx.ID()) return &LensClient{http} } diff --git a/http/client_store.go b/http/client_store.go index 3eecf44bcf..e14c01d67e 100644 --- a/http/client_store.go +++ b/http/client_store.go @@ -38,10 +38,7 @@ func NewStoreClient(rawURL string) (*StoreClient, error) { if err != nil { return nil, err } - httpClient := &httpClient{ - client: http.DefaultClient, - baseURL: baseURL.JoinPath("/api/v0"), - } + httpClient := newHttpClient(baseURL.JoinPath("/api/v0")) return &StoreClient{httpClient}, nil } diff --git a/http/client_tx.go b/http/client_tx.go index c2a3807613..3b355d3bef 100644 --- a/http/client_tx.go +++ b/http/client_tx.go @@ -36,7 +36,10 @@ func (c *TxClient) Commit(ctx context.Context) error { if err != nil { return err } - return c.http.request(req) + if err := c.http.request(req); err != nil { + return err + } + return nil } func (c *TxClient) Discard(ctx context.Context) { diff --git a/http/middleware.go b/http/middleware.go index b36399d36f..f2ea70c7eb 100644 --- a/http/middleware.go +++ b/http/middleware.go @@ -21,16 +21,12 @@ import ( "github.com/sourcenetwork/defradb/datastore" ) -const ( - TX_HEADER_NAME = "x-defradb-tx" - COL_TX_HEADER_NAME = "x-defradb-col-tx" - LENS_TX_HEADER_NAME = "x-defradb-lens-tx" -) +const TX_HEADER_NAME = "x-defradb-tx" // TransactionMiddleware sets the transaction context for the current request. -func TransactionMiddleware(db client.DB, txs *sync.Map, header string) gin.HandlerFunc { +func TransactionMiddleware(db client.DB, txs *sync.Map) gin.HandlerFunc { return func(c *gin.Context) { - txValue := c.GetHeader(header) + txValue := c.GetHeader(TX_HEADER_NAME) if txValue == "" { c.Next() return @@ -46,7 +42,7 @@ func TransactionMiddleware(db client.DB, txs *sync.Map, header string) gin.Handl return } - c.Set(header, tx) + c.Set("tx", tx) c.Next() } } @@ -54,13 +50,14 @@ func TransactionMiddleware(db client.DB, txs *sync.Map, header string) gin.Handl // DatabaseMiddleware sets the db context for the current request. func DatabaseMiddleware(db client.DB) gin.HandlerFunc { return func(c *gin.Context) { - tx, ok := c.Get(TX_HEADER_NAME) + c.Set("db", db) + + tx, ok := c.Get("tx") if ok { c.Set("store", db.WithTxn(tx.(datastore.Txn))) } else { c.Set("store", db) } - c.Set("db", db) c.Next() } } @@ -70,7 +67,7 @@ func LensMiddleware() gin.HandlerFunc { return func(c *gin.Context) { store := c.MustGet("store").(client.Store) - tx, ok := c.Get(LENS_TX_HEADER_NAME) + tx, ok := c.Get("tx") if ok { c.Set("lens", store.LensRegistry().WithTxn(tx.(datastore.Txn))) } else { @@ -90,7 +87,8 @@ func CollectionMiddleware() gin.HandlerFunc { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return } - tx, ok := c.Get(COL_TX_HEADER_NAME) + + tx, ok := c.Get("tx") if ok { c.Set("col", col.WithTxn(tx.(datastore.Txn))) } else { diff --git a/http/server.go b/http/server.go index 6c115f5236..c5d066da9b 100644 --- a/http/server.go +++ b/http/server.go @@ -35,9 +35,8 @@ func NewServer(db client.DB) *Server { router := gin.Default() - // db tx context api := router.Group("/api/v0") - api.Use(TransactionMiddleware(db, txs, TX_HEADER_NAME), DatabaseMiddleware(db)) + api.Use(TransactionMiddleware(db, txs), DatabaseMiddleware(db)) tx := api.Group("/tx") tx.POST("/", txHandler.NewTxn) @@ -56,9 +55,8 @@ func NewServer(db client.DB) *Server { collections := api.Group("/collections") collections.GET("/", storeHandler.GetCollection) - // collection tx context collections_tx := collections.Group("/") - collections_tx.Use(TransactionMiddleware(db, txs, COL_TX_HEADER_NAME), CollectionMiddleware()) + collections_tx.Use(CollectionMiddleware()) collections_tx.GET("/:name", collectionHandler.GetAllDocKeys) collections_tx.POST("/:name", collectionHandler.Create) @@ -72,9 +70,8 @@ func NewServer(db client.DB) *Server { collections_tx.PATCH("/:name/:key", collectionHandler.Update) collections_tx.DELETE("/:name/:key", collectionHandler.Delete) - // lens tx context lens := api.Group("/lens") - lens.Use(TransactionMiddleware(db, txs, LENS_TX_HEADER_NAME), LensMiddleware()) + lens.Use(LensMiddleware()) lens.GET("/", lensHandler.Config) lens.POST("/", lensHandler.SetMigration) diff --git a/http/server_tx.go b/http/server_tx.go index 21023db57a..a58eea2a6c 100644 --- a/http/server_tx.go +++ b/http/server_tx.go @@ -53,7 +53,6 @@ func (h *TxHandler) NewConcurrentTxn(c *gin.Context) { return } h.txs.Store(tx.ID(), tx) - c.JSON(http.StatusOK, &CreateTxResponse{tx.ID()}) } diff --git a/http/wrapper.go b/http/wrapper.go index 0a02105230..ad7edaac3a 100644 --- a/http/wrapper.go +++ b/http/wrapper.go @@ -148,8 +148,7 @@ func (w *Wrapper) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastor } func (w *Wrapper) WithTxn(tx datastore.Txn) client.Store { - txValue := fmt.Sprintf("%d", tx.ID()) - client := w.client.http.withTxn(txValue) + client := w.client.http.withTxn(tx.ID()) return &StoreClient{client} } From 48a41035e486fa71768afb11478dd87b644ed6a2 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Tue, 22 Aug 2023 20:45:37 -0500 Subject: [PATCH 019/107] handle transaction conflict errors in http client --- http/client.go | 12 ++++++++++-- http/client_tx.go | 5 +---- http/server_store.go | 1 + 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/http/client.go b/http/client.go index 5042fc3bbf..9205122e8c 100644 --- a/http/client.go +++ b/http/client.go @@ -17,6 +17,8 @@ import ( "net/http" "net/url" "sync/atomic" + + "github.com/sourcenetwork/defradb/datastore/badger/v4" ) type errorResponse struct { @@ -74,7 +76,10 @@ func (c *httpClient) request(req *http.Request) error { if err := json.Unmarshal(data, &errRes); err != nil { return fmt.Errorf("%s", data) } - return fmt.Errorf(errRes.Error) + if errRes.Error == "Transaction Conflict. Please retry" { + return badger.ErrTxnConflict + } + return fmt.Errorf("%s", errRes.Error) } func (c *httpClient) requestJson(req *http.Request, out any) error { @@ -98,5 +103,8 @@ func (c *httpClient) requestJson(req *http.Request, out any) error { if err := json.Unmarshal(data, &errRes); err != nil { return fmt.Errorf("%s", data) } - return fmt.Errorf(errRes.Error) + if errRes.Error == "Transaction Conflict. Please retry" { + return badger.ErrTxnConflict + } + return fmt.Errorf("%s", errRes.Error) } diff --git a/http/client_tx.go b/http/client_tx.go index 3b355d3bef..c2a3807613 100644 --- a/http/client_tx.go +++ b/http/client_tx.go @@ -36,10 +36,7 @@ func (c *TxClient) Commit(ctx context.Context) error { if err != nil { return err } - if err := c.http.request(req); err != nil { - return err - } - return nil + return c.http.request(req) } func (c *TxClient) Discard(ctx context.Context) { diff --git a/http/server_store.go b/http/server_store.go index 8e35f88beb..3ca5eeaeaa 100644 --- a/http/server_store.go +++ b/http/server_store.go @@ -308,4 +308,5 @@ func (s *StoreHandler) ExecRequest(c *gin.Context) { return true } }) + } From 41ce1cdc04f25b04b44be30730554c922835fcab Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 23 Aug 2023 09:39:47 -0500 Subject: [PATCH 020/107] update lens getMigrations test logic --- http/client.go | 7 ++++++- tests/integration/lens.go | 33 +++++++++++++++++++++++++++++---- 2 files changed, 35 insertions(+), 5 deletions(-) diff --git a/http/client.go b/http/client.go index 9205122e8c..cbd52f08f8 100644 --- a/http/client.go +++ b/http/client.go @@ -11,6 +11,7 @@ package http import ( + "bytes" "encoding/json" "fmt" "io" @@ -95,8 +96,12 @@ func (c *httpClient) requestJson(req *http.Request, out any) error { if err != nil { return err } + + dec := json.NewDecoder(bytes.NewBuffer(data)) + dec.UseNumber() + if res.StatusCode == http.StatusOK { - return json.Unmarshal(data, out) + return dec.Decode(out) } var errRes errorResponse diff --git a/tests/integration/lens.go b/tests/integration/lens.go index 2959867c1a..257f8cfa94 100644 --- a/tests/integration/lens.go +++ b/tests/integration/lens.go @@ -75,9 +75,34 @@ func getMigrations( configs, err := db.LensRegistry().Config(s.ctx) require.NoError(s.t, err) - - // The order of the results is not deterministic, so do not assert on the element - // locations. - assert.ElementsMatch(s.t, configs, action.ExpectedResults) + require.Equal(s.t, len(configs), len(action.ExpectedResults)) + + for _, expectedConfig := range action.ExpectedResults { + var actualConfig client.LensConfig + var actualConfigFound bool + + for _, config := range configs { + if config.SourceSchemaVersionID != expectedConfig.SourceSchemaVersionID { + continue + } + if config.DestinationSchemaVersionID != expectedConfig.DestinationSchemaVersionID { + continue + } + actualConfig = config + actualConfigFound = true + } + + require.True(s.t, actualConfigFound, "matching lens config not found") + require.Equal(s.t, len(actualConfig.Lenses), len(expectedConfig.Lenses)) + + for j, expectedLens := range actualConfig.Lenses { + actualLens := actualConfig.Lenses[j] + + assert.Equal(s.t, expectedLens.Inverse, actualLens.Inverse) + assert.Equal(s.t, expectedLens.Path, actualLens.Path) + + assertRequestResultsData(s.t, expectedLens.Arguments, actualLens.Arguments) + } + } } } From edd246ec51684d0afb8d08b96c5543906a97f812 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 23 Aug 2023 09:45:30 -0500 Subject: [PATCH 021/107] fix linter warnings --- http/client.go | 4 ++-- http/server_store.go | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/http/client.go b/http/client.go index cbd52f08f8..56874f9663 100644 --- a/http/client.go +++ b/http/client.go @@ -77,7 +77,7 @@ func (c *httpClient) request(req *http.Request) error { if err := json.Unmarshal(data, &errRes); err != nil { return fmt.Errorf("%s", data) } - if errRes.Error == "Transaction Conflict. Please retry" { + if errRes.Error == badger.ErrTxnConflict.Error() { return badger.ErrTxnConflict } return fmt.Errorf("%s", errRes.Error) @@ -108,7 +108,7 @@ func (c *httpClient) requestJson(req *http.Request, out any) error { if err := json.Unmarshal(data, &errRes); err != nil { return fmt.Errorf("%s", data) } - if errRes.Error == "Transaction Conflict. Please retry" { + if errRes.Error == badger.ErrTxnConflict.Error() { return badger.ErrTxnConflict } return fmt.Errorf("%s", errRes.Error) diff --git a/http/server_store.go b/http/server_store.go index 3ca5eeaeaa..8e35f88beb 100644 --- a/http/server_store.go +++ b/http/server_store.go @@ -308,5 +308,4 @@ func (s *StoreHandler) ExecRequest(c *gin.Context) { return true } }) - } From 181d6c353f1ce47066a93b452e42d305ea771592 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 24 Aug 2023 10:24:41 -0500 Subject: [PATCH 022/107] fix race in event publish --- events/publisher.go | 10 +++++++++- http/server_store.go | 2 ++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/events/publisher.go b/events/publisher.go index 2d2d93db60..db4aefb69b 100644 --- a/events/publisher.go +++ b/events/publisher.go @@ -10,7 +10,9 @@ package events -import "time" +import ( + "time" +) // time limit we set for the client to read after publishing. var clientTimeout = 60 * time.Second @@ -22,6 +24,7 @@ type Publisher[T any] struct { ch Channel[T] event Subscription[T] stream chan any + closed bool } // NewPublisher creates a new Publisher with the given event Channel, subscribes to the @@ -52,6 +55,10 @@ func (p *Publisher[T]) Stream() chan any { // Publish sends data to the streaming channel and unsubscribes if // the client hangs for too long. func (p *Publisher[T]) Publish(data any) { + if p.closed { + return + } + select { case p.stream <- data: case <-time.After(clientTimeout): @@ -63,6 +70,7 @@ func (p *Publisher[T]) Publish(data any) { // Unsubscribe unsubscribes the client for the event channel and closes the stream. func (p *Publisher[T]) Unsubscribe() { + p.closed = true p.ch.Unsubscribe(p.event) close(p.stream) } diff --git a/http/server_store.go b/http/server_store.go index 8e35f88beb..de5e5117ca 100644 --- a/http/server_store.go +++ b/http/server_store.go @@ -284,6 +284,7 @@ func (s *StoreHandler) ExecRequest(c *gin.Context) { c.JSON(http.StatusOK, gin.H{"data": result.GQL.Data, "errors": errors}) return } + defer result.Pub.Unsubscribe() c.Header("Content-Type", "text/event-stream") c.Header("Cache-Control", "no-cache") @@ -308,4 +309,5 @@ func (s *StoreHandler) ExecRequest(c *gin.Context) { return true } }) + } From 680efcd6260be2dd9ef939670b5477bceed6a928 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 24 Aug 2023 11:35:00 -0500 Subject: [PATCH 023/107] add wait group sync to publisher --- events/publisher.go | 11 ++++++++++- http/server_store.go | 1 - 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/events/publisher.go b/events/publisher.go index db4aefb69b..980b53f423 100644 --- a/events/publisher.go +++ b/events/publisher.go @@ -11,6 +11,7 @@ package events import ( + "sync" "time" ) @@ -25,6 +26,7 @@ type Publisher[T any] struct { event Subscription[T] stream chan any closed bool + wg sync.WaitGroup } // NewPublisher creates a new Publisher with the given event Channel, subscribes to the @@ -59,17 +61,24 @@ func (p *Publisher[T]) Publish(data any) { return } + p.wg.Add(1) select { case p.stream <- data: case <-time.After(clientTimeout): // if sending to the client times out, we assume an inactive or problematic client and // unsubscribe them from the event stream - p.Unsubscribe() + p.unsubscribe() } + p.wg.Done() } // Unsubscribe unsubscribes the client for the event channel and closes the stream. func (p *Publisher[T]) Unsubscribe() { + p.wg.Wait() + p.unsubscribe() +} + +func (p *Publisher[T]) unsubscribe() { p.closed = true p.ch.Unsubscribe(p.event) close(p.stream) diff --git a/http/server_store.go b/http/server_store.go index de5e5117ca..d602fd3b2e 100644 --- a/http/server_store.go +++ b/http/server_store.go @@ -309,5 +309,4 @@ func (s *StoreHandler) ExecRequest(c *gin.Context) { return true } }) - } From b6f7d4a797d8b3ef3175fd83f7efbf78069a8def Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 24 Aug 2023 12:44:15 -0500 Subject: [PATCH 024/107] fix bug in memory datastore --- datastore/memory/txn.go | 4 ++-- events/publisher.go | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/datastore/memory/txn.go b/datastore/memory/txn.go index 3cd7ab2bf9..ca31692162 100644 --- a/datastore/memory/txn.go +++ b/datastore/memory/txn.go @@ -160,8 +160,8 @@ func (t *basicTxn) Put(ctx context.Context, key ds.Key, value []byte) error { // Query implements ds.Query. func (t *basicTxn) Query(ctx context.Context, q dsq.Query) (dsq.Results, error) { - t.closeLk.RLock() - defer t.closeLk.RUnlock() + t.ds.closeLk.RLock() + defer t.ds.closeLk.RUnlock() if t.ds.closed { return nil, ErrClosed } diff --git a/events/publisher.go b/events/publisher.go index 980b53f423..056984b135 100644 --- a/events/publisher.go +++ b/events/publisher.go @@ -61,7 +61,10 @@ func (p *Publisher[T]) Publish(data any) { return } + // don't allow closing while sending is in flight p.wg.Add(1) + defer p.wg.Done() + select { case p.stream <- data: case <-time.After(clientTimeout): @@ -69,7 +72,6 @@ func (p *Publisher[T]) Publish(data any) { // unsubscribe them from the event stream p.unsubscribe() } - p.wg.Done() } // Unsubscribe unsubscribes the client for the event channel and closes the stream. From 166080e70de6ffaa38fabbe11f207962a5bb205f Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 24 Aug 2023 14:14:01 -0500 Subject: [PATCH 025/107] fix memory transaction deadlock --- datastore/memory/txn.go | 6 +++--- events/publisher.go | 11 ++++++++--- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/datastore/memory/txn.go b/datastore/memory/txn.go index ca31692162..c1d663ef53 100644 --- a/datastore/memory/txn.go +++ b/datastore/memory/txn.go @@ -160,9 +160,9 @@ func (t *basicTxn) Put(ctx context.Context, key ds.Key, value []byte) error { // Query implements ds.Query. func (t *basicTxn) Query(ctx context.Context, q dsq.Query) (dsq.Results, error) { - t.ds.closeLk.RLock() - defer t.ds.closeLk.RUnlock() - if t.ds.closed { + t.closeLk.RLock() + defer t.closeLk.RUnlock() + if t.closed { return nil, ErrClosed } diff --git a/events/publisher.go b/events/publisher.go index 056984b135..703b5dc0e0 100644 --- a/events/publisher.go +++ b/events/publisher.go @@ -25,7 +25,7 @@ type Publisher[T any] struct { ch Channel[T] event Subscription[T] stream chan any - closed bool + closed chan any wg sync.WaitGroup } @@ -41,6 +41,7 @@ func NewPublisher[T any](ch Channel[T], streamBufferSize int) (*Publisher[T], er ch: ch, event: evtCh, stream: make(chan any, streamBufferSize), + closed: make(chan any), }, nil } @@ -57,8 +58,11 @@ func (p *Publisher[T]) Stream() chan any { // Publish sends data to the streaming channel and unsubscribes if // the client hangs for too long. func (p *Publisher[T]) Publish(data any) { - if p.closed { + // check if stream is closed before sending + select { + case <-p.closed: return + default: } // don't allow closing while sending is in flight @@ -66,6 +70,7 @@ func (p *Publisher[T]) Publish(data any) { defer p.wg.Done() select { + case <-p.closed: case p.stream <- data: case <-time.After(clientTimeout): // if sending to the client times out, we assume an inactive or problematic client and @@ -81,7 +86,7 @@ func (p *Publisher[T]) Unsubscribe() { } func (p *Publisher[T]) unsubscribe() { - p.closed = true p.ch.Unsubscribe(p.event) close(p.stream) + close(p.closed) } From 509cf007db8658c2c97b055f1ca4d5a270f70b47 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 24 Aug 2023 14:42:57 -0500 Subject: [PATCH 026/107] use transaction close lock in memory datastore --- datastore/memory/txn.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/datastore/memory/txn.go b/datastore/memory/txn.go index c1d663ef53..7430077e46 100644 --- a/datastore/memory/txn.go +++ b/datastore/memory/txn.go @@ -123,9 +123,9 @@ func (t *basicTxn) GetSize(ctx context.Context, key ds.Key) (size int, err error // Has implements ds.Has. func (t *basicTxn) Has(ctx context.Context, key ds.Key) (exists bool, err error) { - t.ds.closeLk.RLock() - defer t.ds.closeLk.RUnlock() - if t.ds.closed { + t.closeLk.RLock() + defer t.closeLk.RUnlock() + if t.closed { return false, ErrClosed } From add612fd40b3e6b044bcc6bdfbe997ef84945bd2 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 24 Aug 2023 15:38:31 -0500 Subject: [PATCH 027/107] revert publisher changes --- events/publisher.go | 24 +----------------------- http/server_store.go | 1 - 2 files changed, 1 insertion(+), 24 deletions(-) diff --git a/events/publisher.go b/events/publisher.go index 703b5dc0e0..73ea086890 100644 --- a/events/publisher.go +++ b/events/publisher.go @@ -11,7 +11,6 @@ package events import ( - "sync" "time" ) @@ -25,8 +24,6 @@ type Publisher[T any] struct { ch Channel[T] event Subscription[T] stream chan any - closed chan any - wg sync.WaitGroup } // NewPublisher creates a new Publisher with the given event Channel, subscribes to the @@ -41,7 +38,6 @@ func NewPublisher[T any](ch Channel[T], streamBufferSize int) (*Publisher[T], er ch: ch, event: evtCh, stream: make(chan any, streamBufferSize), - closed: make(chan any), }, nil } @@ -58,35 +54,17 @@ func (p *Publisher[T]) Stream() chan any { // Publish sends data to the streaming channel and unsubscribes if // the client hangs for too long. func (p *Publisher[T]) Publish(data any) { - // check if stream is closed before sending select { - case <-p.closed: - return - default: - } - - // don't allow closing while sending is in flight - p.wg.Add(1) - defer p.wg.Done() - - select { - case <-p.closed: case p.stream <- data: case <-time.After(clientTimeout): // if sending to the client times out, we assume an inactive or problematic client and // unsubscribe them from the event stream - p.unsubscribe() + p.Unsubscribe() } } // Unsubscribe unsubscribes the client for the event channel and closes the stream. func (p *Publisher[T]) Unsubscribe() { - p.wg.Wait() - p.unsubscribe() -} - -func (p *Publisher[T]) unsubscribe() { p.ch.Unsubscribe(p.event) close(p.stream) - close(p.closed) } diff --git a/http/server_store.go b/http/server_store.go index d602fd3b2e..8e35f88beb 100644 --- a/http/server_store.go +++ b/http/server_store.go @@ -284,7 +284,6 @@ func (s *StoreHandler) ExecRequest(c *gin.Context) { c.JSON(http.StatusOK, gin.H{"data": result.GQL.Data, "errors": errors}) return } - defer result.Pub.Unsubscribe() c.Header("Content-Type", "text/event-stream") c.Header("Cache-Control", "no-cache") From b8614a3d8dbdfe4d6695ad60ec10d95f41becf90 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 24 Aug 2023 16:17:26 -0500 Subject: [PATCH 028/107] disable lens transactions in http middleware --- http/middleware.go | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/http/middleware.go b/http/middleware.go index f2ea70c7eb..ca1f83304b 100644 --- a/http/middleware.go +++ b/http/middleware.go @@ -66,13 +66,14 @@ func DatabaseMiddleware(db client.DB) gin.HandlerFunc { func LensMiddleware() gin.HandlerFunc { return func(c *gin.Context) { store := c.MustGet("store").(client.Store) + c.Set("lens", store.LensRegistry()) - tx, ok := c.Get("tx") - if ok { - c.Set("lens", store.LensRegistry().WithTxn(tx.(datastore.Txn))) - } else { - c.Set("lens", store.LensRegistry()) - } + // tx, ok := c.Get("tx") + // if ok { + // c.Set("lens", store.LensRegistry().WithTxn(tx.(datastore.Txn))) + // } else { + // c.Set("lens", store.LensRegistry()) + // } c.Next() } } From efbc15c1923a90fbcab4720225c35a0c9feb1427 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 24 Aug 2023 17:29:52 -0500 Subject: [PATCH 029/107] enable transaction middleware. make http client WithTxn public --- http/client_store.go | 5 +++++ http/middleware.go | 13 ++++++------- http/server.go | 3 ++- 3 files changed, 13 insertions(+), 8 deletions(-) diff --git a/http/client_store.go b/http/client_store.go index e14c01d67e..b0b383496e 100644 --- a/http/client_store.go +++ b/http/client_store.go @@ -82,6 +82,11 @@ func (c *StoreClient) NewConcurrentTxn(ctx context.Context, readOnly bool) (data return &TxClient{txRes.ID, c.http}, nil } +func (c *StoreClient) WithTxn(tx datastore.Txn) client.Store { + client := c.http.withTxn(tx.ID()) + return &StoreClient{client} +} + func (c *StoreClient) SetReplicator(ctx context.Context, rep client.Replicator) error { methodURL := c.http.baseURL.JoinPath("p2p", "replicators") diff --git a/http/middleware.go b/http/middleware.go index ca1f83304b..f2ea70c7eb 100644 --- a/http/middleware.go +++ b/http/middleware.go @@ -66,14 +66,13 @@ func DatabaseMiddleware(db client.DB) gin.HandlerFunc { func LensMiddleware() gin.HandlerFunc { return func(c *gin.Context) { store := c.MustGet("store").(client.Store) - c.Set("lens", store.LensRegistry()) - // tx, ok := c.Get("tx") - // if ok { - // c.Set("lens", store.LensRegistry().WithTxn(tx.(datastore.Txn))) - // } else { - // c.Set("lens", store.LensRegistry()) - // } + tx, ok := c.Get("tx") + if ok { + c.Set("lens", store.LensRegistry().WithTxn(tx.(datastore.Txn))) + } else { + c.Set("lens", store.LensRegistry()) + } c.Next() } } diff --git a/http/server.go b/http/server.go index c5d066da9b..cdfb9bfe8a 100644 --- a/http/server.go +++ b/http/server.go @@ -33,7 +33,8 @@ func NewServer(db client.DB) *Server { collectionHandler := &CollectionHandler{} lensHandler := &LensHandler{} - router := gin.Default() + router := gin.New() + router.Use(gin.Recovery()) api := router.Group("/api/v0") api.Use(TransactionMiddleware(db, txs), DatabaseMiddleware(db)) From dc4bbdde56d2456eec9eab5571d4f961ad812ea6 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 25 Aug 2023 10:51:52 -0500 Subject: [PATCH 030/107] change http server implementation to chi --- go.mod | 15 --- go.sum | 36 ------ http/middleware.go | 99 ++++++++------- http/server.go | 140 ++++++++++++---------- http/server_collection.go | 232 ++++++++++++++++++------------------ http/server_lens.go | 78 ++++++------ http/server_store.go | 231 ++++++++++++++++++----------------- http/server_tx.go | 53 ++++---- http/server_utils.go | 31 +++++ tests/integration/utils2.go | 4 - 10 files changed, 469 insertions(+), 450 deletions(-) create mode 100644 http/server_utils.go diff --git a/go.mod b/go.mod index 4fab56836d..0ca721256c 100644 --- a/go.mod +++ b/go.mod @@ -8,7 +8,6 @@ require ( github.com/dgraph-io/badger/v4 v4.1.0 github.com/evanphx/json-patch/v5 v5.6.0 github.com/fxamacker/cbor/v2 v2.5.0 - github.com/gin-gonic/gin v1.9.1 github.com/go-chi/chi/v5 v5.0.10 github.com/go-chi/cors v1.2.1 github.com/go-errors/errors v1.4.2 @@ -58,9 +57,7 @@ require ( require ( github.com/benbjohnson/clock v1.3.5 // indirect github.com/beorn7/perks v1.0.1 // indirect - github.com/bytedance/sonic v1.9.1 // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect - github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 // indirect github.com/containerd/cgroups v1.1.0 // indirect github.com/coreos/go-systemd/v22 v22.5.0 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect @@ -75,15 +72,9 @@ require ( github.com/flynn/noise v1.0.0 // indirect github.com/francoispqt/gojay v1.2.13 // indirect github.com/fsnotify/fsnotify v1.6.0 // indirect - github.com/gabriel-vasile/mimetype v1.4.2 // indirect - github.com/gin-contrib/sse v0.1.0 // indirect github.com/go-logr/logr v1.2.4 // indirect github.com/go-logr/stdr v1.2.2 // indirect - github.com/go-playground/locales v0.14.1 // indirect - github.com/go-playground/universal-translator v0.18.1 // indirect - github.com/go-playground/validator/v10 v10.14.0 // indirect github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 // indirect - github.com/goccy/go-json v0.10.2 // indirect github.com/godbus/dbus/v5 v5.1.0 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/glog v1.1.0 // indirect @@ -128,11 +119,9 @@ require ( github.com/ipld/go-ipld-prime v0.20.0 // indirect github.com/jackpal/go-nat-pmp v1.0.2 // indirect github.com/jbenet/go-temp-err-catcher v0.1.0 // indirect - github.com/json-iterator/go v1.1.12 // indirect github.com/klauspost/compress v1.16.7 // indirect github.com/klauspost/cpuid/v2 v2.2.5 // indirect github.com/koron/go-ssdp v0.0.4 // indirect - github.com/leodido/go-urn v1.2.4 // indirect github.com/libp2p/go-buffer-pool v0.1.0 // indirect github.com/libp2p/go-cidranger v1.1.0 // indirect github.com/libp2p/go-flow-metrics v0.1.0 // indirect @@ -154,8 +143,6 @@ require ( github.com/mikioh/tcpinfo v0.0.0-20190314235526-30a79bb1804b // indirect github.com/mikioh/tcpopt v0.0.0-20190314235656-172688c1accc // indirect github.com/minio/sha256-simd v1.0.1 // indirect - github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect - github.com/modern-go/reflect2 v1.0.2 // indirect github.com/mr-tron/base58 v1.2.0 // indirect github.com/multiformats/go-base32 v0.1.0 // indirect github.com/multiformats/go-base36 v0.2.0 // indirect @@ -190,7 +177,6 @@ require ( github.com/subosito/gotenv v1.4.2 // indirect github.com/tetratelabs/wazero v1.3.1 // indirect github.com/textileio/go-log/v2 v2.1.3-gke-2 // indirect - github.com/twitchyliquid64/golang-asm v0.15.1 // indirect github.com/whyrusleeping/cbor-gen v0.0.0-20230126041949-52956bd4c9aa // indirect github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1 // indirect github.com/x448/float16 v0.8.4 // indirect @@ -201,7 +187,6 @@ require ( go.uber.org/dig v1.17.0 // indirect go.uber.org/fx v1.20.0 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/arch v0.3.0 // indirect golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1 // indirect golang.org/x/mod v0.12.0 // indirect golang.org/x/sync v0.3.0 // indirect diff --git a/go.sum b/go.sum index 3fac007d60..05de5e3ed3 100644 --- a/go.sum +++ b/go.sum @@ -107,9 +107,6 @@ github.com/btcsuite/winsvc v1.0.0/go.mod h1:jsenWakMcC0zFBFurPLEAyrnc/teJEM1O46f github.com/buger/jsonparser v0.0.0-20181115193947-bf1c66bbce23/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s= github.com/bxcodec/faker v2.0.1+incompatible h1:P0KUpUw5w6WJXwrPfv35oc91i4d8nf40Nwln+M/+faA= github.com/bxcodec/faker v2.0.1+incompatible/go.mod h1:BNzfpVdTwnFJ6GtfYTcQu6l6rHShT+veBxNCnjCx5XM= -github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM= -github.com/bytedance/sonic v1.9.1 h1:6iJ6NqdoxCDr6mbY8h18oSO+cShGSMRGCEo7F2h0x8s= -github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U= github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ= github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= @@ -120,9 +117,6 @@ github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XL github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cheekybits/genny v1.0.0/go.mod h1:+tQajlRqAUrPI7DOSpB0XAqZYtQakVtB7wXkRAgjxjQ= -github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= -github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams= -github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= @@ -221,13 +215,7 @@ github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4 github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= github.com/fxamacker/cbor/v2 v2.5.0 h1:oHsG0V/Q6E/wqTS2O1Cozzsy69nqCiguo5Q1a1ADivE= github.com/fxamacker/cbor/v2 v2.5.0/go.mod h1:TA1xS00nchWmaBnEIxPSE5oHLuJBAVvqrtAnWBwBCVo= -github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= -github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= -github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= -github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= -github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= github.com/gliderlabs/ssh v0.1.1/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= github.com/go-check/check v0.0.0-20180628173108-788fd7840127/go.mod h1:9ES+weclKsC9YodN5RgxqK/VD9HM9JsCSh7rNhMZE98= github.com/go-chi/chi/v5 v5.0.10 h1:rLz5avzKpjqxrYwXNfmjkrYYXOyLJd37pz53UFHC6vk= @@ -252,20 +240,11 @@ github.com/go-logr/logr v1.2.4 h1:g01GSCwiDw2xSZfjJ2/T9M+S6pFdcNtFYsp+Y43HYDQ= github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= -github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= -github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= -github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= -github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= -github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= -github.com/go-playground/validator/v10 v10.14.0 h1:vgvQWe3XCz3gIeFDm/HnTIbj6UGmg/+t63MyGU2n5js= -github.com/go-playground/validator/v10 v10.14.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI= github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls= github.com/go-yaml/yaml v2.1.0+incompatible/go.mod h1:w2MrLa16VYP0jy6N7M5kHaCkaLENm+P+Tv+MfurjSw0= -github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= -github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/godbus/dbus/v5 v5.0.3/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk= @@ -618,8 +597,6 @@ github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCV github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= -github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= @@ -636,7 +613,6 @@ github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6 github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I= github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= -github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= @@ -657,8 +633,6 @@ github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/lens-vm/lens/host-go v0.0.0-20230729032926-5acb4df9bd25 h1:hC67vWtvuDnw8w6u4jLFoj3SOH92/4Lq8SCR++L7njw= github.com/lens-vm/lens/host-go v0.0.0-20230729032926-5acb4df9bd25/go.mod h1:rDE4oJUIAQoXX9heUg8VOQf5LscRWj0BeE5mbGqOs3E= -github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= -github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= github.com/libp2p/go-addr-util v0.0.1/go.mod h1:4ac6O7n9rIAKB1dnd+s8IbbMXkt+oBpzX4/+RACcnlQ= github.com/libp2p/go-addr-util v0.0.2/go.mod h1:Ecd6Fb3yIuLzq4bD7VcywcVSBtefcAwnUISBM3WG15E= github.com/libp2p/go-buffer-pool v0.0.1/go.mod h1:xtyIz9PMobb13WaxR6Zo1Pd1zXJKYg0a8KiIvDp3TzQ= @@ -976,12 +950,9 @@ github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= -github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/mr-tron/base58 v1.1.0/go.mod h1:xcD2VGqlgYjBdcBLw+TuYLr8afG+Hj8g2eTVqeSzSU8= github.com/mr-tron/base58 v1.1.1/go.mod h1:xcD2VGqlgYjBdcBLw+TuYLr8afG+Hj8g2eTVqeSzSU8= github.com/mr-tron/base58 v1.1.2/go.mod h1:BinMc/sQntlIE1frQmRFPUoPA1Zkr8VRgBdjWI2mNwc= @@ -1275,7 +1246,6 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= @@ -1293,8 +1263,6 @@ github.com/textileio/go-log/v2 v2.1.3-gke-2/go.mod h1:DwACkjFS3kjZZR/4Spx3aPfSsc github.com/tidwall/btree v1.6.0 h1:LDZfKfQIBHGHWSwckhXI0RPSXzlo+KYdjK7FWSqOzzg= github.com/tidwall/btree v1.6.0/go.mod h1:twD9XRA5jj9VUQGELzDO4HPQTNJsoWWfYEL+EUQ2cKY= github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= -github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU= github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= @@ -1394,9 +1362,6 @@ go.uber.org/zap v1.19.1/go.mod h1:j3DNczoxDZroyBnOT1L/Q79cfUMGZxlv/9dzN7SM1rI= go.uber.org/zap v1.25.0 h1:4Hvk6GtkucQ790dqmj7l1eEnRdKm3k3ZUrUMS2d5+5c= go.uber.org/zap v1.25.0/go.mod h1:JIAUzQIH94IC4fOJQm7gMmBJP5k7wQfdcnYdPoEXJYk= go4.org v0.0.0-20180809161055-417644f6feb5/go.mod h1:MkTOUMDaeVYJUOUsaDXIhWPZYa1yOyC1qaOBpL57BhE= -golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= -golang.org/x/arch v0.3.0 h1:02VY4/ZcO/gBOH6PUaoiptASxtXU10jazRCP865E97k= -golang.org/x/arch v0.3.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/build v0.0.0-20190111050920-041ab4dc3f9d/go.mod h1:OWs+y06UdEOHN4y+MfF/py+xQ/tYqIWW03b70/CG9Rw= golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= @@ -1884,7 +1849,6 @@ honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9 lukechampine.com/blake3 v1.2.1 h1:YuqqRuaqsGV71BV/nm9xlI0MKUv4QC54jQnBChWbGnI= lukechampine.com/blake3 v1.2.1/go.mod h1:0OFRp7fBtAylGVCO40o87sbupkyIGgbpv1+M1k1LM6k= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= diff --git a/http/middleware.go b/http/middleware.go index f2ea70c7eb..b92657561f 100644 --- a/http/middleware.go +++ b/http/middleware.go @@ -11,11 +11,12 @@ package http import ( + "context" "net/http" "strconv" "sync" - "github.com/gin-gonic/gin" + "github.com/go-chi/chi/v5" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" @@ -23,77 +24,93 @@ import ( const TX_HEADER_NAME = "x-defradb-tx" +type contextKey string + +var ( + txsContextKey = contextKey("txs") + dbContextKey = contextKey("db") + txContextKey = contextKey("tx") + storeContextKey = contextKey("store") + lensContextKey = contextKey("lens") + colContextKey = contextKey("col") +) + // TransactionMiddleware sets the transaction context for the current request. -func TransactionMiddleware(db client.DB, txs *sync.Map) gin.HandlerFunc { - return func(c *gin.Context) { - txValue := c.GetHeader(TX_HEADER_NAME) +func TransactionMiddleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + txs := req.Context().Value(txsContextKey).(*sync.Map) + + txValue := req.Header.Get(TX_HEADER_NAME) if txValue == "" { - c.Next() + next.ServeHTTP(rw, req) return } id, err := strconv.ParseUint(txValue, 10, 64) if err != nil { - c.Next() + next.ServeHTTP(rw, req) return } tx, ok := txs.Load(id) if !ok { - c.Next() + next.ServeHTTP(rw, req) return } - c.Set("tx", tx) - c.Next() - } + ctx := context.WithValue(req.Context(), txContextKey, tx) + next.ServeHTTP(rw, req.WithContext(ctx)) + }) } -// DatabaseMiddleware sets the db context for the current request. -func DatabaseMiddleware(db client.DB) gin.HandlerFunc { - return func(c *gin.Context) { - c.Set("db", db) +// StoreMiddleware sets the db context for the current request. +func StoreMiddleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + db := req.Context().Value(dbContextKey).(client.DB) - tx, ok := c.Get("tx") - if ok { - c.Set("store", db.WithTxn(tx.(datastore.Txn))) + var store client.Store + if tx, ok := req.Context().Value(txContextKey).(datastore.Txn); ok { + store = db.WithTxn(tx) } else { - c.Set("store", db) + store = db } - c.Next() - } + + ctx := context.WithValue(req.Context(), storeContextKey, store) + next.ServeHTTP(rw, req.WithContext(ctx)) + }) } // LensMiddleware sets the lens context for the current request. -func LensMiddleware() gin.HandlerFunc { - return func(c *gin.Context) { - store := c.MustGet("store").(client.Store) +func LensMiddleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) - tx, ok := c.Get("tx") - if ok { - c.Set("lens", store.LensRegistry().WithTxn(tx.(datastore.Txn))) + var lens client.LensRegistry + if tx, ok := req.Context().Value(txContextKey).(datastore.Txn); ok { + lens = store.LensRegistry().WithTxn(tx) } else { - c.Set("lens", store.LensRegistry()) + lens = store.LensRegistry() } - c.Next() - } + + ctx := context.WithValue(req.Context(), lensContextKey, lens) + next.ServeHTTP(rw, req.WithContext(ctx)) + }) } // CollectionMiddleware sets the collection context for the current request. -func CollectionMiddleware() gin.HandlerFunc { - return func(c *gin.Context) { - store := c.MustGet("store").(client.Store) +func CollectionMiddleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) - col, err := store.GetCollectionByName(c.Request.Context(), c.Param("name")) + col, err := store.GetCollectionByName(req.Context(), chi.URLParam(req, "name")) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + rw.WriteHeader(http.StatusNotFound) return } - tx, ok := c.Get("tx") - if ok { - c.Set("col", col.WithTxn(tx.(datastore.Txn))) - } else { - c.Set("col", col) + if tx, ok := req.Context().Value(txContextKey).(datastore.Txn); ok { + col = col.WithTxn(tx) } - c.Next() - } + + ctx := context.WithValue(req.Context(), colContextKey, col) + next.ServeHTTP(rw, req.WithContext(ctx)) + }) } diff --git a/http/server.go b/http/server.go index cdfb9bfe8a..bcb9295b52 100644 --- a/http/server.go +++ b/http/server.go @@ -11,17 +11,21 @@ package http import ( + "context" "net/http" "sync" - "github.com/gin-gonic/gin" + "github.com/go-chi/chi/v5" + "github.com/go-chi/chi/v5/middleware" "github.com/sourcenetwork/defradb/client" ) +type H map[string]any + type Server struct { db client.DB - router *gin.Engine + router *chi.Mux txs *sync.Map } @@ -33,68 +37,78 @@ func NewServer(db client.DB) *Server { collectionHandler := &CollectionHandler{} lensHandler := &LensHandler{} - router := gin.New() - router.Use(gin.Recovery()) - - api := router.Group("/api/v0") - api.Use(TransactionMiddleware(db, txs), DatabaseMiddleware(db)) - - tx := api.Group("/tx") - tx.POST("/", txHandler.NewTxn) - tx.POST("/concurrent", txHandler.NewConcurrentTxn) - tx.POST("/:id", txHandler.Commit) - tx.DELETE("/:id", txHandler.Discard) - - backup := api.Group("/backup") - backup.POST("/export", storeHandler.BasicExport) - backup.POST("/import", storeHandler.BasicImport) - - schema := api.Group("/schema") - schema.POST("/", storeHandler.AddSchema) - schema.PATCH("/", storeHandler.PatchSchema) - - collections := api.Group("/collections") - collections.GET("/", storeHandler.GetCollection) - - collections_tx := collections.Group("/") - collections_tx.Use(CollectionMiddleware()) - - collections_tx.GET("/:name", collectionHandler.GetAllDocKeys) - collections_tx.POST("/:name", collectionHandler.Create) - collections_tx.PATCH("/:name", collectionHandler.UpdateWith) - collections_tx.DELETE("/:name", collectionHandler.DeleteWith) - collections_tx.POST("/:name/indexes", collectionHandler.CreateIndex) - collections_tx.GET("/:name/indexes", collectionHandler.GetIndexes) - collections_tx.DELETE("/:name/indexes/:index", collectionHandler.DropIndex) - collections_tx.GET("/:name/:key", collectionHandler.Get) - collections_tx.POST("/:name/:key", collectionHandler.Save) - collections_tx.PATCH("/:name/:key", collectionHandler.Update) - collections_tx.DELETE("/:name/:key", collectionHandler.Delete) - - lens := api.Group("/lens") - lens.Use(LensMiddleware()) - - lens.GET("/", lensHandler.Config) - lens.POST("/", lensHandler.SetMigration) - lens.POST("/reload", lensHandler.ReloadLenses) - lens.GET("/:version", lensHandler.HasMigration) - lens.POST("/:version/up", lensHandler.MigrateUp) - lens.POST("/:version/down", lensHandler.MigrateDown) - - graphQL := api.Group("/graphql") - graphQL.GET("/", storeHandler.ExecRequest) - graphQL.POST("/", storeHandler.ExecRequest) - - p2p := api.Group("/p2p") - p2p_replicators := p2p.Group("/replicators") - p2p_replicators.GET("/", storeHandler.GetAllReplicators) - p2p_replicators.POST("/", storeHandler.SetReplicator) - p2p_replicators.DELETE("/", storeHandler.DeleteReplicator) + router := chi.NewRouter() + router.Use(middleware.RequestID) + router.Use(middleware.Logger) + router.Use(middleware.Recoverer) + + apiMiddleware := func(next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + ctx := req.Context() + ctx = context.WithValue(ctx, dbContextKey, db) + ctx = context.WithValue(ctx, txsContextKey, txs) + next.ServeHTTP(rw, req.WithContext(ctx)) + }) + } - p2p_collections := p2p.Group("/collections") - p2p_collections.GET("/", storeHandler.GetAllP2PCollections) - p2p_collections.POST("/:id", storeHandler.AddP2PCollection) - p2p_collections.DELETE("/:id", storeHandler.RemoveP2PCollection) + router.Route("/api/v0", func(api chi.Router) { + api.Use(apiMiddleware, TransactionMiddleware, StoreMiddleware) + api.Route("/tx", func(tx chi.Router) { + tx.Post("/", txHandler.NewTxn) + tx.Post("/concurrent", txHandler.NewConcurrentTxn) + tx.Post("/{id}", txHandler.Commit) + tx.Delete("/{id}", txHandler.Discard) + }) + api.Route("/backup", func(backup chi.Router) { + backup.Post("/export", storeHandler.BasicExport) + backup.Post("/import", storeHandler.BasicImport) + }) + api.Route("/schema", func(schema chi.Router) { + schema.Post("/", storeHandler.AddSchema) + schema.Patch("/", storeHandler.PatchSchema) + }) + api.Route("/collections", func(collections chi.Router) { + collections.Get("/", storeHandler.GetCollection) + // with collection middleware + collections_tx := collections.With(CollectionMiddleware) + collections_tx.Get("/{name}", collectionHandler.GetAllDocKeys) + collections_tx.Post("/{name}", collectionHandler.Create) + collections_tx.Patch("/{name}", collectionHandler.UpdateWith) + collections_tx.Delete("/{name}", collectionHandler.DeleteWith) + collections_tx.Post("/{name}/indexes", collectionHandler.CreateIndex) + collections_tx.Get("/{name}/indexes", collectionHandler.GetIndexes) + collections_tx.Delete("/{name}/indexes/{index}", collectionHandler.DropIndex) + collections_tx.Get("/{name}/{key}", collectionHandler.Get) + collections_tx.Post("/{name}/{key}", collectionHandler.Save) + collections_tx.Patch("/{name}/{key}", collectionHandler.Update) + collections_tx.Delete("/{name}/{key}", collectionHandler.Delete) + }) + api.Route("/lens", func(lens chi.Router) { + lens.Use(LensMiddleware) + lens.Get("/", lensHandler.Config) + lens.Post("/", lensHandler.SetMigration) + lens.Post("/reload", lensHandler.ReloadLenses) + lens.Get("/{version}", lensHandler.HasMigration) + lens.Post("/{version}/up", lensHandler.MigrateUp) + lens.Post("/{version}/down", lensHandler.MigrateDown) + }) + api.Route("/graphql", func(graphQL chi.Router) { + graphQL.Get("/", storeHandler.ExecRequest) + graphQL.Post("/", storeHandler.ExecRequest) + }) + api.Route("/p2p", func(p2p chi.Router) { + p2p.Route("/replicators", func(p2p_replicators chi.Router) { + p2p_replicators.Get("/", storeHandler.GetAllReplicators) + p2p_replicators.Post("/", storeHandler.SetReplicator) + p2p_replicators.Delete("/", storeHandler.DeleteReplicator) + }) + p2p.Route("/collections", func(p2p_collections chi.Router) { + p2p_collections.Get("/", storeHandler.GetAllP2PCollections) + p2p_collections.Post("/{id}", storeHandler.AddP2PCollection) + p2p_collections.Delete("/{id}", storeHandler.RemoveP2PCollection) + }) + }) + }) return &Server{ db: db, diff --git a/http/server_collection.go b/http/server_collection.go index f1f3396b00..dc7d1c0a3b 100644 --- a/http/server_collection.go +++ b/http/server_collection.go @@ -13,10 +13,10 @@ package http import ( "encoding/json" "fmt" - "io" "net/http" + "strconv" - "github.com/gin-gonic/gin" + "github.com/go-chi/chi/v5" "github.com/sourcenetwork/defradb/client" ) @@ -36,222 +36,224 @@ type CollectionUpdateRequest struct { Updater string `json:"updater"` } -func (s *CollectionHandler) Create(c *gin.Context) { - col := c.MustGet("col").(client.Collection) +func (s *CollectionHandler) Create(rw http.ResponseWriter, req *http.Request) { + col := req.Context().Value(colContextKey).(client.Collection) var body any - if err := c.ShouldBindJSON(&body); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + if err := requestJSON(req, &body); err != nil { + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } + switch t := body.(type) { case []map[string]any: var docList []*client.Document for _, docMap := range t { doc, err := client.NewDocFromMap(docMap) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } docList = append(docList, doc) } - if err := col.CreateMany(c.Request.Context(), docList); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + if err := col.CreateMany(req.Context(), docList); err != nil { + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } + rw.WriteHeader(http.StatusOK) case map[string]any: doc, err := client.NewDocFromMap(t) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - if err := col.Create(c.Request.Context(), doc); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + if err := col.Create(req.Context(), doc); err != nil { + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } + rw.WriteHeader(http.StatusOK) default: - c.JSON(http.StatusBadRequest, gin.H{"error": "invalid request body"}) - return + responseJSON(rw, http.StatusBadRequest, H{"error": "invalid request body"}) } - c.Status(http.StatusOK) } -func (s *CollectionHandler) Save(c *gin.Context) { - col := c.MustGet("col").(client.Collection) +func (s *CollectionHandler) Save(rw http.ResponseWriter, req *http.Request) { + col := req.Context().Value(colContextKey).(client.Collection) var docMap map[string]any - if err := c.ShouldBind(&docMap); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + if err := requestJSON(req, &docMap); err != nil { + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } doc, err := client.NewDocFromMap(docMap) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - err = col.Save(c.Request.Context(), doc) + err = col.Save(req.Context(), doc) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.Status(http.StatusOK) + rw.WriteHeader(http.StatusOK) } -func (s *CollectionHandler) DeleteWith(c *gin.Context) { - col := c.MustGet("col").(client.Collection) +func (s *CollectionHandler) DeleteWith(rw http.ResponseWriter, req *http.Request) { + col := req.Context().Value(colContextKey).(client.Collection) var request CollectionDeleteRequest - if err := c.ShouldBind(&request); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + if err := requestJSON(req, &request); err != nil { + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } switch { case request.Filter != nil: - result, err := col.DeleteWith(c.Request.Context(), request.Filter) + result, err := col.DeleteWith(req.Context(), request.Filter) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.JSON(http.StatusOK, result) + responseJSON(rw, http.StatusOK, result) case request.Key != "": docKey, err := client.NewDocKeyFromString(request.Key) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - result, err := col.DeleteWith(c.Request.Context(), docKey) + result, err := col.DeleteWith(req.Context(), docKey) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.JSON(http.StatusOK, result) + responseJSON(rw, http.StatusOK, result) case request.Keys != nil: var docKeys []client.DocKey for _, key := range request.Keys { docKey, err := client.NewDocKeyFromString(key) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } docKeys = append(docKeys, docKey) } - result, err := col.DeleteWith(c.Request.Context(), docKeys) + result, err := col.DeleteWith(req.Context(), docKeys) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.JSON(http.StatusOK, result) + responseJSON(rw, http.StatusOK, result) default: - c.JSON(http.StatusBadRequest, gin.H{"error": "invalid delete request"}) + responseJSON(rw, http.StatusBadRequest, H{"error": "invalid delete request"}) } } -func (s *CollectionHandler) UpdateWith(c *gin.Context) { - col := c.MustGet("col").(client.Collection) +func (s *CollectionHandler) UpdateWith(rw http.ResponseWriter, req *http.Request) { + col := req.Context().Value(colContextKey).(client.Collection) var request CollectionUpdateRequest - if err := c.ShouldBind(&request); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + if err := requestJSON(req, &request); err != nil { + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } switch { case request.Filter != nil: - result, err := col.UpdateWith(c.Request.Context(), request.Filter, request.Updater) + result, err := col.UpdateWith(req.Context(), request.Filter, request.Updater) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.JSON(http.StatusOK, result) + responseJSON(rw, http.StatusOK, result) case request.Key != "": docKey, err := client.NewDocKeyFromString(request.Key) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - result, err := col.UpdateWith(c.Request.Context(), docKey, request.Updater) + result, err := col.UpdateWith(req.Context(), docKey, request.Updater) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.JSON(http.StatusOK, result) + responseJSON(rw, http.StatusOK, result) case request.Keys != nil: var docKeys []client.DocKey for _, key := range request.Keys { docKey, err := client.NewDocKeyFromString(key) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } docKeys = append(docKeys, docKey) } - result, err := col.UpdateWith(c.Request.Context(), docKeys, request.Updater) + result, err := col.UpdateWith(req.Context(), docKeys, request.Updater) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.JSON(http.StatusOK, result) + responseJSON(rw, http.StatusOK, result) default: - c.JSON(http.StatusBadRequest, gin.H{"error": "invalid update request"}) + responseJSON(rw, http.StatusBadRequest, H{"error": "invalid update request"}) } } -func (s *CollectionHandler) Update(c *gin.Context) { - col := c.MustGet("col").(client.Collection) +func (s *CollectionHandler) Update(rw http.ResponseWriter, req *http.Request) { + col := req.Context().Value(colContextKey).(client.Collection) var docMap map[string]any - if err := c.ShouldBindJSON(&docMap); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + if err := requestJSON(req, &docMap); err != nil { + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } doc, err := client.NewDocFromMap(docMap) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - if doc.Key().String() != c.Param("key") { - c.JSON(http.StatusBadRequest, gin.H{"error": "document key does not match"}) + if doc.Key().String() != chi.URLParam(req, "key") { + responseJSON(rw, http.StatusBadRequest, H{"error": "document key does not match"}) return } - err = col.Update(c.Request.Context(), doc) + err = col.Update(req.Context(), doc) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.Status(http.StatusOK) + rw.WriteHeader(http.StatusOK) } -func (s *CollectionHandler) Delete(c *gin.Context) { - col := c.MustGet("col").(client.Collection) +func (s *CollectionHandler) Delete(rw http.ResponseWriter, req *http.Request) { + col := req.Context().Value(colContextKey).(client.Collection) - docKey, err := client.NewDocKeyFromString(c.Param("key")) + docKey, err := client.NewDocKeyFromString(chi.URLParam(req, "key")) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - _, err = col.Delete(c.Request.Context(), docKey) + _, err = col.Delete(req.Context(), docKey) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.Status(http.StatusOK) + rw.WriteHeader(http.StatusOK) } -func (s *CollectionHandler) Get(c *gin.Context) { - col := c.MustGet("col").(client.Collection) +func (s *CollectionHandler) Get(rw http.ResponseWriter, req *http.Request) { + col := req.Context().Value(colContextKey).(client.Collection) + showDeleted, _ := strconv.ParseBool(req.URL.Query().Get("deleted")) - docKey, err := client.NewDocKeyFromString(c.Param("key")) + docKey, err := client.NewDocKeyFromString(chi.URLParam(req, "key")) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - _, err = col.Get(c.Request.Context(), docKey, c.Query("deleted") != "") + _, err = col.Get(req.Context(), docKey, showDeleted) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.Status(http.StatusOK) + rw.WriteHeader(http.StatusOK) } type DocKeyResult struct { @@ -259,27 +261,29 @@ type DocKeyResult struct { Error string `json:"error"` } -func (s *CollectionHandler) GetAllDocKeys(c *gin.Context) { - col := c.MustGet("col").(client.Collection) +func (s *CollectionHandler) GetAllDocKeys(rw http.ResponseWriter, req *http.Request) { + col := req.Context().Value(colContextKey).(client.Collection) + + flusher, ok := rw.(http.Flusher) + if !ok { + responseJSON(rw, http.StatusBadRequest, H{"error": "streaming not supported"}) + return + } - docKeyCh, err := col.GetAllDocKeys(c.Request.Context()) + docKeyCh, err := col.GetAllDocKeys(req.Context()) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.Header("Content-Type", "text/event-stream") - c.Header("Cache-Control", "no-cache") - c.Header("Connection", "keep-alive") + rw.Header().Set("Content-Type", "text/event-stream") + rw.Header().Set("Cache-Control", "no-cache") + rw.Header().Set("Connection", "keep-alive") - c.Status(http.StatusOK) - c.Writer.Flush() + rw.WriteHeader(http.StatusOK) + flusher.Flush() - c.Stream(func(w io.Writer) bool { - docKey, open := <-docKeyCh - if !open { - return false - } + for docKey := range docKeyCh { results := &DocKeyResult{ Key: docKey.Key.String(), } @@ -288,47 +292,47 @@ func (s *CollectionHandler) GetAllDocKeys(c *gin.Context) { } data, err := json.Marshal(results) if err != nil { - return false + return } - fmt.Fprintf(w, "data: %s\n\n", data) - return true - }) + fmt.Fprintf(rw, "data: %s\n\n", data) + flusher.Flush() + } } -func (s *CollectionHandler) CreateIndex(c *gin.Context) { - col := c.MustGet("col").(client.Collection) +func (s *CollectionHandler) CreateIndex(rw http.ResponseWriter, req *http.Request) { + col := req.Context().Value(colContextKey).(client.Collection) var indexDesc client.IndexDescription - if err := c.ShouldBind(&indexDesc); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + if err := requestJSON(req, &indexDesc); err != nil { + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - index, err := col.CreateIndex(c.Request.Context(), indexDesc) + index, err := col.CreateIndex(req.Context(), indexDesc) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.JSON(http.StatusOK, index) + responseJSON(rw, http.StatusOK, index) } -func (s *CollectionHandler) GetIndexes(c *gin.Context) { - col := c.MustGet("col").(client.Collection) +func (s *CollectionHandler) GetIndexes(rw http.ResponseWriter, req *http.Request) { + col := req.Context().Value(colContextKey).(client.Collection) - indexes, err := col.GetIndexes(c.Request.Context()) + indexes, err := col.GetIndexes(req.Context()) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.JSON(http.StatusOK, indexes) + responseJSON(rw, http.StatusOK, indexes) } -func (s *CollectionHandler) DropIndex(c *gin.Context) { - col := c.MustGet("col").(client.Collection) +func (s *CollectionHandler) DropIndex(rw http.ResponseWriter, req *http.Request) { + col := req.Context().Value(colContextKey).(client.Collection) - err := col.DropIndex(c.Request.Context(), c.Param("index")) + err := col.DropIndex(req.Context(), chi.URLParam(req, "index")) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.Status(http.StatusOK) + rw.WriteHeader(http.StatusOK) } diff --git a/http/server_lens.go b/http/server_lens.go index ada8a93641..8f4b38ab3d 100644 --- a/http/server_lens.go +++ b/http/server_lens.go @@ -13,7 +13,7 @@ package http import ( "net/http" - "github.com/gin-gonic/gin" + "github.com/go-chi/chi/v5" "github.com/sourcenetwork/immutable/enumerable" "github.com/sourcenetwork/defradb/client" @@ -21,87 +21,87 @@ import ( type LensHandler struct{} -func (s *LensHandler) ReloadLenses(c *gin.Context) { - lens := c.MustGet("lens").(client.LensRegistry) +func (s *LensHandler) ReloadLenses(rw http.ResponseWriter, req *http.Request) { + lens := req.Context().Value(lensContextKey).(client.LensRegistry) - err := lens.ReloadLenses(c.Request.Context()) + err := lens.ReloadLenses(req.Context()) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.Status(http.StatusOK) + rw.WriteHeader(http.StatusOK) } -func (s *LensHandler) SetMigration(c *gin.Context) { - lens := c.MustGet("lens").(client.LensRegistry) +func (s *LensHandler) SetMigration(rw http.ResponseWriter, req *http.Request) { + lens := req.Context().Value(lensContextKey).(client.LensRegistry) - var req client.LensConfig - if err := c.ShouldBindJSON(&req); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + var cfg client.LensConfig + if err := requestJSON(req, &cfg); err != nil { + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - err := lens.SetMigration(c.Request.Context(), req) + err := lens.SetMigration(req.Context(), cfg) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.Status(http.StatusOK) + rw.WriteHeader(http.StatusOK) } -func (s *LensHandler) MigrateUp(c *gin.Context) { - lens := c.MustGet("lens").(client.LensRegistry) +func (s *LensHandler) MigrateUp(rw http.ResponseWriter, req *http.Request) { + lens := req.Context().Value(lensContextKey).(client.LensRegistry) var src enumerable.Enumerable[map[string]any] - if err := c.ShouldBind(src); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + if err := requestJSON(req, &src); err != nil { + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - result, err := lens.MigrateUp(c.Request.Context(), src, c.Param("version")) + result, err := lens.MigrateUp(req.Context(), src, chi.URLParam(req, "version")) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.JSON(http.StatusOK, result) + responseJSON(rw, http.StatusOK, result) } -func (s *LensHandler) MigrateDown(c *gin.Context) { - lens := c.MustGet("lens").(client.LensRegistry) +func (s *LensHandler) MigrateDown(rw http.ResponseWriter, req *http.Request) { + lens := req.Context().Value(lensContextKey).(client.LensRegistry) var src enumerable.Enumerable[map[string]any] - if err := c.ShouldBind(src); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + if err := requestJSON(req, &src); err != nil { + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - result, err := lens.MigrateDown(c.Request.Context(), src, c.Param("version")) + result, err := lens.MigrateDown(req.Context(), src, chi.URLParam(req, "version")) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.JSON(http.StatusOK, result) + responseJSON(rw, http.StatusOK, result) } -func (s *LensHandler) Config(c *gin.Context) { - lens := c.MustGet("lens").(client.LensRegistry) +func (s *LensHandler) Config(rw http.ResponseWriter, req *http.Request) { + lens := req.Context().Value(lensContextKey).(client.LensRegistry) - cfgs, err := lens.Config(c.Request.Context()) + cfgs, err := lens.Config(req.Context()) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.JSON(http.StatusOK, cfgs) + responseJSON(rw, http.StatusOK, cfgs) } -func (s *LensHandler) HasMigration(c *gin.Context) { - lens := c.MustGet("lens").(client.LensRegistry) +func (s *LensHandler) HasMigration(rw http.ResponseWriter, req *http.Request) { + lens := req.Context().Value(lensContextKey).(client.LensRegistry) - exists, err := lens.HasMigration(c.Request.Context(), c.Param("version")) + exists, err := lens.HasMigration(req.Context(), chi.URLParam(req, "version")) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } if !exists { - c.JSON(http.StatusNotFound, gin.H{"error": "migration not found"}) + responseJSON(rw, http.StatusBadRequest, H{"error": "migration not found"}) return } - c.Status(http.StatusOK) + rw.WriteHeader(http.StatusOK) } diff --git a/http/server_store.go b/http/server_store.go index 8e35f88beb..c5adc5bb45 100644 --- a/http/server_store.go +++ b/http/server_store.go @@ -17,13 +17,13 @@ import ( "io" "net/http" - "github.com/gin-gonic/gin" + "github.com/go-chi/chi/v5" "github.com/sourcenetwork/defradb/client" ) type GraphQLRequest struct { - Query string `json:"query" form:"query"` + Query string `json:"query"` } type GraphQLResponse struct { @@ -72,240 +72,249 @@ func (res *GraphQLResponse) UnmarshalJSON(data []byte) error { type StoreHandler struct{} -func (s *StoreHandler) SetReplicator(c *gin.Context) { - store := c.MustGet("store").(client.Store) +func (s *StoreHandler) SetReplicator(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) - var req client.Replicator - if err := c.ShouldBindJSON(&req); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + var rep client.Replicator + if err := requestJSON(req, &rep); err != nil { + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - err := store.SetReplicator(c.Request.Context(), req) + err := store.SetReplicator(req.Context(), rep) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.Status(http.StatusOK) + rw.WriteHeader(http.StatusOK) } -func (s *StoreHandler) DeleteReplicator(c *gin.Context) { - store := c.MustGet("store").(client.Store) +func (s *StoreHandler) DeleteReplicator(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) - var req client.Replicator - if err := c.ShouldBindJSON(&req); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + var rep client.Replicator + if err := requestJSON(req, &rep); err != nil { + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - err := store.DeleteReplicator(c.Request.Context(), req) + err := store.DeleteReplicator(req.Context(), rep) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.Status(http.StatusOK) + rw.WriteHeader(http.StatusOK) } -func (s *StoreHandler) GetAllReplicators(c *gin.Context) { - store := c.MustGet("store").(client.Store) +func (s *StoreHandler) GetAllReplicators(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) - reps, err := store.GetAllReplicators(c.Request.Context()) + reps, err := store.GetAllReplicators(req.Context()) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.JSON(http.StatusOK, reps) + responseJSON(rw, http.StatusOK, reps) } -func (s *StoreHandler) AddP2PCollection(c *gin.Context) { - store := c.MustGet("store").(client.Store) +func (s *StoreHandler) AddP2PCollection(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) - err := store.AddP2PCollection(c.Request.Context(), c.Param("id")) + err := store.AddP2PCollection(req.Context(), chi.URLParam(req, "id")) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.Status(http.StatusOK) + rw.WriteHeader(http.StatusOK) } -func (s *StoreHandler) RemoveP2PCollection(c *gin.Context) { - store := c.MustGet("store").(client.Store) +func (s *StoreHandler) RemoveP2PCollection(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) - err := store.RemoveP2PCollection(c.Request.Context(), c.Param("id")) + err := store.RemoveP2PCollection(req.Context(), chi.URLParam(req, "id")) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.Status(http.StatusOK) + rw.WriteHeader(http.StatusOK) } -func (s *StoreHandler) GetAllP2PCollections(c *gin.Context) { - store := c.MustGet("store").(client.Store) +func (s *StoreHandler) GetAllP2PCollections(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) - cols, err := store.GetAllP2PCollections(c.Request.Context()) + cols, err := store.GetAllP2PCollections(req.Context()) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.JSON(http.StatusOK, cols) + responseJSON(rw, http.StatusOK, cols) } -func (s *StoreHandler) BasicImport(c *gin.Context) { - store := c.MustGet("store").(client.Store) +func (s *StoreHandler) BasicImport(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) var config client.BackupConfig - if err := c.ShouldBindJSON(&config); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + if err := requestJSON(req, &config); err != nil { + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - err := store.BasicImport(c.Request.Context(), config.Filepath) + err := store.BasicImport(req.Context(), config.Filepath) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.Status(http.StatusOK) + rw.WriteHeader(http.StatusOK) } -func (s *StoreHandler) BasicExport(c *gin.Context) { - store := c.MustGet("store").(client.Store) +func (s *StoreHandler) BasicExport(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) var config client.BackupConfig - if err := c.ShouldBindJSON(&config); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + if err := requestJSON(req, &config); err != nil { + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - err := store.BasicExport(c.Request.Context(), &config) + err := store.BasicExport(req.Context(), &config) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.Status(http.StatusOK) + rw.WriteHeader(http.StatusOK) } -func (s *StoreHandler) AddSchema(c *gin.Context) { - store := c.MustGet("store").(client.Store) +func (s *StoreHandler) AddSchema(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) - schema, err := io.ReadAll(c.Request.Body) + schema, err := io.ReadAll(req.Body) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - cols, err := store.AddSchema(c.Request.Context(), string(schema)) + cols, err := store.AddSchema(req.Context(), string(schema)) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.JSON(http.StatusOK, cols) + responseJSON(rw, http.StatusOK, cols) } -func (s *StoreHandler) PatchSchema(c *gin.Context) { - store := c.MustGet("store").(client.Store) +func (s *StoreHandler) PatchSchema(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) - patch, err := io.ReadAll(c.Request.Body) + patch, err := io.ReadAll(req.Body) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - err = store.PatchSchema(c.Request.Context(), string(patch)) + err = store.PatchSchema(req.Context(), string(patch)) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.Status(http.StatusOK) + rw.WriteHeader(http.StatusOK) } -func (s *StoreHandler) GetCollection(c *gin.Context) { - store := c.MustGet("store").(client.Store) +func (s *StoreHandler) GetCollection(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) switch { - case c.Query("name") != "": - col, err := store.GetCollectionByName(c.Request.Context(), c.Query("name")) + case req.URL.Query().Has("name"): + col, err := store.GetCollectionByName(req.Context(), req.URL.Query().Get("name")) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.JSON(http.StatusOK, col.Description()) - case c.Query("schema_id") != "": - col, err := store.GetCollectionBySchemaID(c.Request.Context(), c.Query("schema_id")) + responseJSON(rw, http.StatusOK, col.Description()) + case req.URL.Query().Has("schema_id"): + col, err := store.GetCollectionBySchemaID(req.Context(), req.URL.Query().Get("schema_id")) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.JSON(http.StatusOK, col.Description()) - case c.Query("version_id") != "": - col, err := store.GetCollectionByVersionID(c.Request.Context(), c.Query("version_id")) + responseJSON(rw, http.StatusOK, col.Description()) + case req.URL.Query().Has("version_id"): + col, err := store.GetCollectionByVersionID(req.Context(), req.URL.Query().Get("version_id")) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.JSON(http.StatusOK, col.Description()) + responseJSON(rw, http.StatusOK, col.Description()) default: - cols, err := store.GetAllCollections(c.Request.Context()) + cols, err := store.GetAllCollections(req.Context()) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } colDesc := make([]client.CollectionDescription, len(cols)) for i, col := range cols { colDesc[i] = col.Description() } - c.JSON(http.StatusOK, colDesc) + responseJSON(rw, http.StatusOK, colDesc) } } -func (s *StoreHandler) GetAllIndexes(c *gin.Context) { - store := c.MustGet("store").(client.Store) +func (s *StoreHandler) GetAllIndexes(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) - indexes, err := store.GetAllIndexes(c.Request.Context()) + indexes, err := store.GetAllIndexes(req.Context()) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - c.JSON(http.StatusOK, indexes) + responseJSON(rw, http.StatusOK, indexes) } -func (s *StoreHandler) ExecRequest(c *gin.Context) { - store := c.MustGet("store").(client.Store) +func (s *StoreHandler) ExecRequest(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) var request GraphQLRequest - if err := c.ShouldBind(&request); err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) - return - } - if request.Query == "" { - c.JSON(http.StatusBadRequest, gin.H{"error": "missing request"}) + switch { + case req.URL.Query().Get("query") != "": + request.Query = req.URL.Query().Get("query") + case req.Body != nil: + if err := requestJSON(req, &request); err != nil { + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + return + } + default: + responseJSON(rw, http.StatusBadRequest, H{"error": "missing request"}) return } - result := store.ExecRequest(c.Request.Context(), request.Query) + result := store.ExecRequest(req.Context(), request.Query) var errors []string for _, err := range result.GQL.Errors { errors = append(errors, err.Error()) } if result.Pub == nil { - c.JSON(http.StatusOK, gin.H{"data": result.GQL.Data, "errors": errors}) + responseJSON(rw, http.StatusOK, H{"data": result.GQL.Data, "errors": errors}) + return + } + flusher, ok := rw.(http.Flusher) + if !ok { + responseJSON(rw, http.StatusBadRequest, H{"error": "streaming not supported"}) return } - c.Header("Content-Type", "text/event-stream") - c.Header("Cache-Control", "no-cache") - c.Header("Connection", "keep-alive") + rw.Header().Add("Content-Type", "text/event-stream") + rw.Header().Add("Cache-Control", "no-cache") + rw.Header().Add("Connection", "keep-alive") - c.Status(http.StatusOK) - c.Writer.Flush() + rw.WriteHeader(http.StatusOK) + flusher.Flush() - c.Stream(func(w io.Writer) bool { + for { select { - case <-c.Request.Context().Done(): - return false + case <-req.Context().Done(): + return case item, open := <-result.Pub.Stream(): if !open { - return false + return } data, err := json.Marshal(item) if err != nil { - return false + return } - fmt.Fprintf(w, "data: %s\n\n", data) - return true + fmt.Fprintf(rw, "data: %s\n\n", data) + flusher.Flush() } - }) + } } diff --git a/http/server_tx.go b/http/server_tx.go index a58eea2a6c..14cb99128d 100644 --- a/http/server_tx.go +++ b/http/server_tx.go @@ -15,7 +15,7 @@ import ( "strconv" "sync" - "github.com/gin-gonic/gin" + "github.com/go-chi/chi/v5" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" @@ -29,64 +29,63 @@ type CreateTxResponse struct { ID uint64 `json:"id"` } -func (h *TxHandler) NewTxn(c *gin.Context) { - db := c.MustGet("db").(client.DB) - readOnly, _ := strconv.ParseBool(c.Query("read_only")) +func (h *TxHandler) NewTxn(rw http.ResponseWriter, req *http.Request) { + db := req.Context().Value(dbContextKey).(client.DB) + readOnly, _ := strconv.ParseBool(req.URL.Query().Get("read_only")) - tx, err := db.NewTxn(c.Request.Context(), readOnly) + tx, err := db.NewTxn(req.Context(), readOnly) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } h.txs.Store(tx.ID(), tx) - - c.JSON(http.StatusOK, &CreateTxResponse{tx.ID()}) + responseJSON(rw, http.StatusOK, &CreateTxResponse{tx.ID()}) } -func (h *TxHandler) NewConcurrentTxn(c *gin.Context) { - db := c.MustGet("db").(client.DB) - readOnly, _ := strconv.ParseBool(c.Query("read_only")) +func (h *TxHandler) NewConcurrentTxn(rw http.ResponseWriter, req *http.Request) { + db := req.Context().Value(dbContextKey).(client.DB) + readOnly, _ := strconv.ParseBool(req.URL.Query().Get("read_only")) - tx, err := db.NewConcurrentTxn(c.Request.Context(), readOnly) + tx, err := db.NewConcurrentTxn(req.Context(), readOnly) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } h.txs.Store(tx.ID(), tx) - c.JSON(http.StatusOK, &CreateTxResponse{tx.ID()}) + responseJSON(rw, http.StatusOK, &CreateTxResponse{tx.ID()}) } -func (h *TxHandler) Commit(c *gin.Context) { - txId, err := strconv.ParseUint(c.Param("id"), 10, 64) +func (h *TxHandler) Commit(rw http.ResponseWriter, req *http.Request) { + txId, err := strconv.ParseUint(chi.URLParam(req, "id"), 10, 64) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": "invalid transaction id"}) + responseJSON(rw, http.StatusBadRequest, H{"error": "invalid transaction id"}) return } txVal, ok := h.txs.Load(txId) if !ok { - c.JSON(http.StatusBadRequest, gin.H{"error": "invalid transaction id"}) + responseJSON(rw, http.StatusBadRequest, H{"error": "invalid transaction id"}) return } - err = txVal.(datastore.Txn).Commit(c.Request.Context()) + err = txVal.(datastore.Txn).Commit(req.Context()) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } h.txs.Delete(txId) - c.Status(http.StatusOK) + rw.WriteHeader(http.StatusOK) } -func (h *TxHandler) Discard(c *gin.Context) { - txId, err := strconv.ParseUint(c.Param("id"), 10, 64) +func (h *TxHandler) Discard(rw http.ResponseWriter, req *http.Request) { + txId, err := strconv.ParseUint(chi.URLParam(req, "id"), 10, 64) if err != nil { - c.JSON(http.StatusBadRequest, gin.H{"error": "invalid transaction id"}) + responseJSON(rw, http.StatusBadRequest, H{"error": "invalid transaction id"}) return } txVal, ok := h.txs.LoadAndDelete(txId) if !ok { - c.JSON(http.StatusBadRequest, gin.H{"error": "invalid transaction id"}) + responseJSON(rw, http.StatusBadRequest, H{"error": "invalid transaction id"}) return } - txVal.(datastore.Txn).Discard(c.Request.Context()) - c.Status(http.StatusOK) + txVal.(datastore.Txn).Discard(req.Context()) + rw.WriteHeader(http.StatusOK) } diff --git a/http/server_utils.go b/http/server_utils.go new file mode 100644 index 0000000000..df7b88daf1 --- /dev/null +++ b/http/server_utils.go @@ -0,0 +1,31 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "encoding/json" + "io" + "net/http" +) + +func requestJSON(req *http.Request, out any) error { + data, err := io.ReadAll(req.Body) + if err != nil { + return err + } + return json.Unmarshal(data, out) +} + +func responseJSON(rw http.ResponseWriter, status int, out any) { + rw.Header().Add("Content-Type", "application/json") + rw.WriteHeader(status) + json.NewEncoder(rw).Encode(out) //nolint:errcheck +} diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index f04843e458..98b775975e 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -22,7 +22,6 @@ import ( "time" badger "github.com/dgraph-io/badger/v4" - "github.com/gin-gonic/gin" "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -159,9 +158,6 @@ func init() { if DetectDbChanges { detectDbChangesInit(repositoryValue, targetBranchValue) } - - // disable debug logs in HTTP routes - gin.SetMode(gin.TestMode) } func getBool(val string) bool { From b0eda375ca70e38b8680448271ca5a54d0d65654 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 25 Aug 2023 11:34:16 -0500 Subject: [PATCH 031/107] increase test timeout --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 5486876b7a..ff46bbf66d 100644 --- a/Makefile +++ b/Makefile @@ -29,7 +29,7 @@ ifdef BUILD_TAGS BUILD_FLAGS+=-tags $(BUILD_TAGS) endif -TEST_FLAGS=-race -shuffle=on -timeout 210s +TEST_FLAGS=-race -shuffle=on -timeout 300s PLAYGROUND_DIRECTORY=playground LENS_TEST_DIRECTORY=tests/integration/schema/migrations From b9dfeb1e2e2208be140027bdde3f806fcb25878b Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 25 Aug 2023 14:44:39 -0500 Subject: [PATCH 032/107] move compare logic for integration tests. implement AnyOf comparison tests --- events/publisher.go | 4 +- http/server.go | 19 +-- http/wrapper.go | 10 +- http/wrapper_tx.go | 2 + tests/integration/compare.go | 321 +++++++++++++++++++++++++++++++++++ tests/integration/explain.go | 6 +- tests/integration/lens.go | 24 +-- tests/integration/p2p.go | 5 - tests/integration/utils2.go | 296 ++++---------------------------- 9 files changed, 387 insertions(+), 300 deletions(-) create mode 100644 tests/integration/compare.go diff --git a/events/publisher.go b/events/publisher.go index 73ea086890..2d2d93db60 100644 --- a/events/publisher.go +++ b/events/publisher.go @@ -10,9 +10,7 @@ package events -import ( - "time" -) +import "time" // time limit we set for the client to read after publishing. var clientTimeout = 60 * time.Second diff --git a/http/server.go b/http/server.go index bcb9295b52..23a4774df8 100644 --- a/http/server.go +++ b/http/server.go @@ -42,17 +42,16 @@ func NewServer(db client.DB) *Server { router.Use(middleware.Logger) router.Use(middleware.Recoverer) - apiMiddleware := func(next http.Handler) http.Handler { - return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - ctx := req.Context() - ctx = context.WithValue(ctx, dbContextKey, db) - ctx = context.WithValue(ctx, txsContextKey, txs) - next.ServeHTTP(rw, req.WithContext(ctx)) - }) - } - router.Route("/api/v0", func(api chi.Router) { - api.Use(apiMiddleware, TransactionMiddleware, StoreMiddleware) + api.Use(func(next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + ctx := req.Context() + ctx = context.WithValue(ctx, dbContextKey, db) + ctx = context.WithValue(ctx, txsContextKey, txs) + next.ServeHTTP(rw, req.WithContext(ctx)) + }) + }) + api.Use(TransactionMiddleware, StoreMiddleware) api.Route("/tx", func(tx chi.Router) { tx.Post("/", txHandler.NewTxn) tx.Post("/concurrent", txHandler.NewConcurrentTxn) diff --git a/http/wrapper.go b/http/wrapper.go index ad7edaac3a..8b3e01d84d 100644 --- a/http/wrapper.go +++ b/http/wrapper.go @@ -22,11 +22,10 @@ import ( "github.com/sourcenetwork/defradb/events" ) -var ( - _ client.Store = (*Wrapper)(nil) - _ client.DB = (*Wrapper)(nil) -) +var _ client.DB = (*Wrapper)(nil) +// Wrapper combines an HTTP client and server into a +// single struct that implements the client.DB interface. type Wrapper struct { db client.DB server *Server @@ -148,8 +147,7 @@ func (w *Wrapper) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastor } func (w *Wrapper) WithTxn(tx datastore.Txn) client.Store { - client := w.client.http.withTxn(tx.ID()) - return &StoreClient{client} + return w.client.WithTxn(tx) } func (w *Wrapper) Root() datastore.RootStore { diff --git a/http/wrapper_tx.go b/http/wrapper_tx.go index 7a357f8f7e..7c77b938f5 100644 --- a/http/wrapper_tx.go +++ b/http/wrapper_tx.go @@ -18,6 +18,8 @@ import ( var _ datastore.Txn = (*TxWrapper)(nil) +// TxWrapper combines a client and server transaction into +// a single struct that implements the datastore.Txn interface. type TxWrapper struct { server datastore.Txn client datastore.Txn diff --git a/tests/integration/compare.go b/tests/integration/compare.go new file mode 100644 index 0000000000..6db4c9a1b2 --- /dev/null +++ b/tests/integration/compare.go @@ -0,0 +1,321 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package tests + +import ( + "encoding/json" + + "github.com/sourcenetwork/immutable" + "github.com/stretchr/testify/assert" +) + +// AnyOf may be used as `Results` field where the value may +// be one of several values, yet the value of that field must be the same +// across all nodes due to strong eventual consistency. +type AnyOf []any + +// resultsAreAnyOf returns true if any of the expected results are of equal value. +// +// NOTE: Values of type json.Number and immutable.Option will be reduced to their underlying types. +func resultsAreAnyOf(expected AnyOf, actual any) bool { + for _, v := range expected { + if resultsAreEqual(v, actual) { + return true + } + } + return false +} + +// resultsAreEqual returns true if the expected and actual results are of equal value. +// +// NOTE: Values of type json.Number and immutable.Option will be reduced to their underlying types. +func resultsAreEqual(expected any, actual any) bool { + switch expectedVal := expected.(type) { + case map[string]any: + if len(expectedVal) == 0 && actual == nil { + return true + } + actualVal, ok := actual.(map[string]any) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + if len(expectedVal) != len(actualVal) { + return false + } + for k, v := range expectedVal { + if !resultsAreEqual(v, actualVal[k]) { + return false + } + } + return true + case []int64: + if len(expectedVal) == 0 && actual == nil { + return true + } + actualVal, ok := actual.([]any) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + if len(expectedVal) != len(actualVal) { + return false + } + for i, v := range expectedVal { + if !resultsAreEqual(v, actualVal[i]) { + return false + } + } + return true + case []uint64: + if len(expectedVal) == 0 && actual == nil { + return true + } + actualVal, ok := actual.([]any) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + if len(expectedVal) != len(actualVal) { + return false + } + for i, v := range expectedVal { + if !resultsAreEqual(v, actualVal[i]) { + return false + } + } + return true + case []float64: + if len(expectedVal) == 0 && actual == nil { + return true + } + actualVal, ok := actual.([]any) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + if len(expectedVal) != len(actualVal) { + return false + } + for i, v := range expectedVal { + if !resultsAreEqual(v, actualVal[i]) { + return false + } + } + return true + case []string: + if len(expectedVal) == 0 && actual == nil { + return true + } + actualVal, ok := actual.([]any) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + if len(expectedVal) != len(actualVal) { + return false + } + for i, v := range expectedVal { + if !resultsAreEqual(v, actualVal[i]) { + return false + } + } + return true + case []bool: + if len(expectedVal) == 0 && actual == nil { + return true + } + actualVal, ok := actual.([]any) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + if len(expectedVal) != len(actualVal) { + return false + } + for i, v := range expectedVal { + if !resultsAreEqual(v, actualVal[i]) { + return false + } + } + return true + case []any: + if len(expectedVal) == 0 && actual == nil { + return true + } + actualVal, ok := actual.([]any) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + if len(expectedVal) != len(actualVal) { + return false + } + for i, v := range expectedVal { + if !resultsAreEqual(v, actualVal[i]) { + return false + } + } + return true + case []map[string]any: + if len(expectedVal) == 0 && actual == nil { + return true + } + actualVal, ok := actual.([]any) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + if len(expectedVal) != len(actualVal) { + return false + } + for i, v := range expectedVal { + if !resultsAreEqual(v, actualVal[i]) { + return false + } + } + return true + case uint64, uint32, uint16, uint8, uint, int64, int32, int16, int8, int: + jsonNum, ok := actual.(json.Number) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + actualVal, err := jsonNum.Int64() + if err != nil { + return false + } + return assert.ObjectsAreEqualValues(expected, actualVal) + case float32, float64: + jsonNum, ok := actual.(json.Number) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + actualVal, err := jsonNum.Float64() + if err != nil { + return false + } + return assert.ObjectsAreEqualValues(expected, actualVal) + case []immutable.Option[float64]: + if len(expectedVal) == 0 && actual == nil { + return true + } + actualVal, ok := actual.([]any) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + if len(expectedVal) != len(actualVal) { + return false + } + for i, v := range expectedVal { + if !resultsAreEqual(v, actualVal[i]) { + return false + } + } + return true + case []immutable.Option[uint64]: + if len(expectedVal) == 0 && actual == nil { + return true + } + actualVal, ok := actual.([]any) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + if len(expectedVal) != len(actualVal) { + return false + } + for i, v := range expectedVal { + if !resultsAreEqual(v, actualVal[i]) { + return false + } + } + return true + case []immutable.Option[int64]: + if len(expectedVal) == 0 && actual == nil { + return true + } + actualVal, ok := actual.([]any) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + if len(expectedVal) != len(actualVal) { + return false + } + for i, v := range expectedVal { + if !resultsAreEqual(v, actualVal[i]) { + return false + } + } + return true + case []immutable.Option[bool]: + if len(expectedVal) == 0 && actual == nil { + return true + } + actualVal, ok := actual.([]any) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + if len(expectedVal) != len(actualVal) { + return false + } + for i, v := range expectedVal { + if !resultsAreEqual(v, actualVal[i]) { + return false + } + } + return true + case []immutable.Option[string]: + if len(expectedVal) == 0 && actual == nil { + return true + } + actualVal, ok := actual.([]any) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + if len(expectedVal) != len(actualVal) { + return false + } + for i, v := range expectedVal { + if !resultsAreEqual(v, actualVal[i]) { + return false + } + } + return true + case immutable.Option[float64]: + if expectedVal.HasValue() { + expected = expectedVal.Value() + } else { + expected = nil + } + return resultsAreEqual(expected, actual) + case immutable.Option[uint64]: + if expectedVal.HasValue() { + expected = expectedVal.Value() + } else { + expected = nil + } + return resultsAreEqual(expected, actual) + case immutable.Option[int64]: + if expectedVal.HasValue() { + expected = expectedVal.Value() + } else { + expected = nil + } + return resultsAreEqual(expected, actual) + case immutable.Option[bool]: + if expectedVal.HasValue() { + expected = expectedVal.Value() + } else { + expected = nil + } + return resultsAreEqual(expected, actual) + case immutable.Option[string]: + if expectedVal.HasValue() { + expected = expectedVal.Value() + } else { + expected = nil + } + return resultsAreEqual(expected, actual) + default: + return assert.ObjectsAreEqualValues(expected, actual) + } +} diff --git a/tests/integration/explain.go b/tests/integration/explain.go index 7a533c9190..76243702a7 100644 --- a/tests/integration/explain.go +++ b/tests/integration/explain.go @@ -166,7 +166,7 @@ func assertExplainRequestResults( require.Equal(t, lengthOfExpectedFullGraph, len(resultantData), description) for index, actualResult := range resultantData { if lengthOfExpectedFullGraph > index { - assertRequestResultsData(t, actualResult, action.ExpectedFullGraph[index]) + assertResultsEqual(t, action.ExpectedFullGraph[index], actualResult, description) } } } @@ -179,7 +179,7 @@ func assertExplainRequestResults( for index, actualResult := range resultantData { // Trim away all attributes (non-plan nodes) from the returned full explain graph result. actualResultWithoutAttributes := trimExplainAttributes(t, description, actualResult) - assertRequestResultsData(t, actualResultWithoutAttributes, action.ExpectedPatterns[index]) + assertResultsEqual(t, action.ExpectedPatterns[index], actualResultWithoutAttributes, description) } } @@ -214,7 +214,7 @@ func assertExplainTargetCase( ) } - assertRequestResultsData(t, foundActualTarget, targetCase.ExpectedAttributes) + assertResultsEqual(t, targetCase.ExpectedAttributes, foundActualTarget, description) } } diff --git a/tests/integration/lens.go b/tests/integration/lens.go index 257f8cfa94..29810e9e78 100644 --- a/tests/integration/lens.go +++ b/tests/integration/lens.go @@ -77,31 +77,31 @@ func getMigrations( require.NoError(s.t, err) require.Equal(s.t, len(configs), len(action.ExpectedResults)) - for _, expectedConfig := range action.ExpectedResults { - var actualConfig client.LensConfig - var actualConfigFound bool + for _, expected := range action.ExpectedResults { + var actual client.LensConfig + var actualFound bool for _, config := range configs { - if config.SourceSchemaVersionID != expectedConfig.SourceSchemaVersionID { + if config.SourceSchemaVersionID != expected.SourceSchemaVersionID { continue } - if config.DestinationSchemaVersionID != expectedConfig.DestinationSchemaVersionID { + if config.DestinationSchemaVersionID != expected.DestinationSchemaVersionID { continue } - actualConfig = config - actualConfigFound = true + actual = config + actualFound = true } - require.True(s.t, actualConfigFound, "matching lens config not found") - require.Equal(s.t, len(actualConfig.Lenses), len(expectedConfig.Lenses)) + require.True(s.t, actualFound, "matching lens config not found") + require.Equal(s.t, len(expected.Lenses), len(actual.Lenses)) - for j, expectedLens := range actualConfig.Lenses { - actualLens := actualConfig.Lenses[j] + for j, actualLens := range actual.Lenses { + expectedLens := expected.Lenses[j] assert.Equal(s.t, expectedLens.Inverse, actualLens.Inverse) assert.Equal(s.t, expectedLens.Path, actualLens.Path) - assertRequestResultsData(s.t, expectedLens.Arguments, actualLens.Arguments) + assertResultsEqual(s.t, expectedLens.Arguments, actualLens.Arguments) } } } diff --git a/tests/integration/p2p.go b/tests/integration/p2p.go index 24d20d8c31..311a088c86 100644 --- a/tests/integration/p2p.go +++ b/tests/integration/p2p.go @@ -121,11 +121,6 @@ type GetAllP2PCollections struct { // node 1 to see if it has been replicated. type WaitForSync struct{} -// AnyOf may be used as `Results` field where the value may -// be one of several values, yet the value of that field must be the same -// across all nodes due to strong eventual consistency. -type AnyOf []any - // connectPeers connects two existing, started, nodes as peers. It returns a channel // that will receive an empty struct upon sync completion of all expected peer-sync events. // diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 98b775975e..30f3b9d3cb 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -12,7 +12,6 @@ package tests import ( "context" - "encoding/json" "fmt" "os" "path" @@ -1325,6 +1324,7 @@ func executeRequest( db := getStore(s, node.DB, action.TransactionID, action.ExpectedError) result := db.ExecRequest(s.ctx, action.Request) + anyOfByFieldKey := map[docFieldKey][]any{} expectedErrorRaised = assertRequestResults( s.ctx, s.t, @@ -1333,6 +1333,7 @@ func executeRequest( action.Results, action.ExpectedError, nodeID, + anyOfByFieldKey, ) } @@ -1400,6 +1401,7 @@ func executeSubscriptionRequest( action.ExpectedError, // anyof is not yet supported by subscription requests 0, + map[docFieldKey][]any{}, ) assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) @@ -1458,6 +1460,12 @@ func AssertErrors( return false } +// docFieldKey is an internal key type that wraps docIndex and fieldName +type docFieldKey struct { + docIndex int + fieldName string +} + func assertRequestResults( ctx context.Context, t *testing.T, @@ -1466,6 +1474,7 @@ func assertRequestResults( expectedResults []map[string]any, expectedError string, nodeID int, + anyOfByField map[docFieldKey][]any, ) bool { if AssertErrors(t, description, result.Errors, expectedError) { return true @@ -1480,275 +1489,40 @@ func assertRequestResults( log.Info(ctx, "", logging.NewKV("RequestResults", result.Data)) - // compare results - assert.Equal(t, len(expectedResults), len(resultantData), description) + require.Equal(t, len(expectedResults), len(resultantData), description) + for docIndex, result := range resultantData { expectedResult := expectedResults[docIndex] - assertRequestResultsData(t, result, expectedResult) - } - - return false -} - -func assertRequestResultsData(t *testing.T, actual any, expected any) { - switch expectedVal := expected.(type) { - case AnyOf: - return // TODO - case map[string]any: - if len(expectedVal) == 0 && actual == nil { - return - } - - actualVal, ok := actual.(map[string]any) - if !ok { - break - } + for field, actualValue := range result { + expectedValue := expectedResult[field] - require.Equal(t, len(actualVal), len(expectedVal)) - for k, v := range expectedVal { - assertRequestResultsData(t, actualVal[k], v) - } - return - case []int64: - if len(expectedVal) == 0 && actual == nil { - return - } + switch r := expectedValue.(type) { + case AnyOf: + assertResultsAnyOf(t, r, actualValue) - actualVal, ok := actual.([]any) - if !ok { - break - } - - require.Equal(t, len(actualVal), len(expectedVal)) - for i, v := range expectedVal { - assertRequestResultsData(t, actualVal[i], v) - } - return - case []uint64: - if len(expectedVal) == 0 && actual == nil { - return - } - - actualVal, ok := actual.([]any) - if !ok { - break - } - - require.Equal(t, len(actualVal), len(expectedVal)) - for i, v := range expectedVal { - assertRequestResultsData(t, actualVal[i], v) - } - return - case []float64: - if len(expectedVal) == 0 && actual == nil { - return - } - - actualVal, ok := actual.([]any) - if !ok { - break - } - - require.Equal(t, len(actualVal), len(expectedVal)) - for i, v := range expectedVal { - assertRequestResultsData(t, actualVal[i], v) - } - return - case []string: - if len(expectedVal) == 0 && actual == nil { - return - } - - actualVal, ok := actual.([]any) - if !ok { - break - } - - require.Equal(t, len(actualVal), len(expectedVal)) - for i, v := range expectedVal { - assertRequestResultsData(t, actualVal[i], v) - } - return - case []bool: - if len(expectedVal) == 0 && actual == nil { - return - } - - actualVal, ok := actual.([]any) - if !ok { - break - } - - require.Equal(t, len(actualVal), len(expectedVal)) - for i, v := range expectedVal { - assertRequestResultsData(t, actualVal[i], v) - } - return - case []any: - if len(expectedVal) == 0 && actual == nil { - return - } - - actualVal, ok := actual.([]any) - if !ok { - break - } - - require.Equal(t, len(actualVal), len(expectedVal)) - for i, v := range expectedVal { - assertRequestResultsData(t, actualVal[i], v) - } - return - case []map[string]any: - if len(expectedVal) == 0 && actual == nil { - return - } - - actualVal, ok := actual.([]any) - if !ok { - break - } - - require.Equal(t, len(actualVal), len(expectedVal)) - for i, v := range expectedVal { - assertRequestResultsData(t, actualVal[i], v) - } - return - case uint64, uint32, int64, int32, int, uint: - jsonNum, ok := actual.(json.Number) - if !ok { - break - } - - actualVal, err := jsonNum.Int64() - require.NoError(t, err) - actual = actualVal - case float32, float64: - jsonNum, ok := actual.(json.Number) - if !ok { - break - } - - actualVal, err := jsonNum.Float64() - require.NoError(t, err) - actual = actualVal - case []immutable.Option[float64]: - if len(expectedVal) == 0 && actual == nil { - return - } - - actualVal, ok := actual.([]any) - if !ok { - break - } - - require.Equal(t, len(actualVal), len(expectedVal)) - for i, v := range expectedVal { - assertRequestResultsData(t, actualVal[i], v) - } - return - case []immutable.Option[uint64]: - if len(expectedVal) == 0 && actual == nil { - return - } - - actualVal, ok := actual.([]any) - if !ok { - break - } - - require.Equal(t, len(actualVal), len(expectedVal)) - for i, v := range expectedVal { - assertRequestResultsData(t, actualVal[i], v) - } - return - case []immutable.Option[int64]: - if len(expectedVal) == 0 && actual == nil { - return - } - - actualVal, ok := actual.([]any) - if !ok { - break - } - - require.Equal(t, len(actualVal), len(expectedVal)) - for i, v := range expectedVal { - assertRequestResultsData(t, actualVal[i], v) - } - return - case []immutable.Option[bool]: - if len(expectedVal) == 0 && actual == nil { - return - } - - actualVal, ok := actual.([]any) - if !ok { - break - } - - require.Equal(t, len(actualVal), len(expectedVal)) - for i, v := range expectedVal { - assertRequestResultsData(t, actualVal[i], v) - } - return - case []immutable.Option[string]: - if len(expectedVal) == 0 && actual == nil { - return - } - - actualVal, ok := actual.([]any) - if !ok { - break - } - - require.Equal(t, len(actualVal), len(expectedVal)) - for i, v := range expectedVal { - assertRequestResultsData(t, actualVal[i], v) - } - return - case immutable.Option[float64]: - if expectedVal.HasValue() { - expected = expectedVal.Value() - } else { - expected = nil - } - - assertRequestResultsData(t, actual, expected) - return - case immutable.Option[uint64]: - if expectedVal.HasValue() { - expected = expectedVal.Value() - } else { - expected = nil + dfk := docFieldKey{docIndex, field} + valueSet := anyOfByField[dfk] + valueSet = append(valueSet, actualValue) + anyOfByField[dfk] = valueSet + default: + assertResultsEqual(t, expectedValue, actualValue, fmt.Sprintf("node: %v, doc: %v", nodeID, docIndex)) + } } + } - assertRequestResultsData(t, actual, expected) - return - case immutable.Option[int64]: - if expectedVal.HasValue() { - expected = expectedVal.Value() - } else { - expected = nil - } + return false +} - assertRequestResultsData(t, actual, expected) - return - case immutable.Option[bool]: - if expectedVal.HasValue() { - expected = expectedVal.Value() - } else { - expected = nil - } - case immutable.Option[string]: - if expectedVal.HasValue() { - expected = expectedVal.Value() - } else { - expected = nil - } +func assertResultsAnyOf(t *testing.T, expected AnyOf, actual any, msgAndArgs ...any) { + if !resultsAreAnyOf(expected, actual) { + assert.Contains(t, expected, actual, msgAndArgs...) } +} - assert.EqualValues(t, expected, actual) +func assertResultsEqual(t *testing.T, expected any, actual any, msgAndArgs ...any) { + if !resultsAreEqual(expected, actual) { + assert.EqualValues(t, expected, actual, msgAndArgs...) + } } func assertExpectedErrorRaised(t *testing.T, description string, expectedError string, wasRaised bool) { From 7a2ec0e26b3abe62bd33a53efffd92fe3e3dc2eb Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 25 Aug 2023 15:07:14 -0500 Subject: [PATCH 033/107] more test cleanup --- tests/integration/explain.go | 21 ++++++++++++++++++--- tests/integration/utils2.go | 27 +++++++++++++++++++++++++-- 2 files changed, 43 insertions(+), 5 deletions(-) diff --git a/tests/integration/explain.go b/tests/integration/explain.go index 76243702a7..c652d89709 100644 --- a/tests/integration/explain.go +++ b/tests/integration/explain.go @@ -166,7 +166,12 @@ func assertExplainRequestResults( require.Equal(t, lengthOfExpectedFullGraph, len(resultantData), description) for index, actualResult := range resultantData { if lengthOfExpectedFullGraph > index { - assertResultsEqual(t, action.ExpectedFullGraph[index], actualResult, description) + assertResultsEqual( + t, + action.ExpectedFullGraph[index], + actualResult, + description, + ) } } } @@ -179,7 +184,12 @@ func assertExplainRequestResults( for index, actualResult := range resultantData { // Trim away all attributes (non-plan nodes) from the returned full explain graph result. actualResultWithoutAttributes := trimExplainAttributes(t, description, actualResult) - assertResultsEqual(t, action.ExpectedPatterns[index], actualResultWithoutAttributes, description) + assertResultsEqual( + t, + action.ExpectedPatterns[index], + actualResultWithoutAttributes, + description, + ) } } @@ -214,7 +224,12 @@ func assertExplainTargetCase( ) } - assertResultsEqual(t, targetCase.ExpectedAttributes, foundActualTarget, description) + assertResultsEqual( + t, + targetCase.ExpectedAttributes, + foundActualTarget, + description, + ) } } diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 30f3b9d3cb..2e8261979e 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -270,7 +270,13 @@ func GetDatabaseTypes() []DatabaseType { return databases } -func GetDatabase(s *state) (cdb client.DB, path string, err error) { +func GetDatabase(s *state) (client.DB, string, error) { + var ( + cdb client.DB + path string + err error + ) + switch s.dbt { case badgerIMType: cdb, err = NewBadgerMemoryDB(s.ctx, db.WithUpdateEvents()) @@ -280,14 +286,31 @@ func GetDatabase(s *state) (cdb client.DB, path string, err error) { case defraIMType: cdb, err = NewInMemoryDB(s.ctx) + + default: + return nil, "", fmt.Errorf("invalid database type: %v", s.dbt) + } + + if err != nil { + return nil, "", err } switch s.clientType { case httpClientType: cdb, err = http.NewWrapper(cdb) + + case goClientType: + // do nothing + + default: + return nil, "", fmt.Errorf("invalid client type: %v", s.dbt) + } + + if err != nil { + return nil, "", err } - return + return cdb, path, nil } // ExecuteTestCase executes the given TestCase against the configured database From 08543938be03a23be9950a19c27b8fb6c594b0d7 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 25 Aug 2023 17:03:56 -0500 Subject: [PATCH 034/107] cleanup --- http/client.go | 11 ++++++----- http/client_store.go | 1 + http/client_tx.go | 1 + http/server.go | 6 +----- http/server_tx.go | 20 ++++++++++++-------- http/server_utils.go | 2 ++ 6 files changed, 23 insertions(+), 18 deletions(-) diff --git a/http/client.go b/http/client.go index 56874f9663..4033b304c0 100644 --- a/http/client.go +++ b/http/client.go @@ -17,7 +17,6 @@ import ( "io" "net/http" "net/url" - "sync/atomic" "github.com/sourcenetwork/defradb/datastore/badger/v4" ) @@ -29,7 +28,7 @@ type errorResponse struct { type httpClient struct { client *http.Client baseURL *url.URL - txValue atomic.Uint64 + txValue string } func newHttpClient(baseURL *url.URL) *httpClient { @@ -37,7 +36,6 @@ func newHttpClient(baseURL *url.URL) *httpClient { client: http.DefaultClient, baseURL: baseURL, } - client.txValue.Store(0) return &client } @@ -45,15 +43,18 @@ func (c *httpClient) withTxn(value uint64) *httpClient { client := httpClient{ client: c.client, baseURL: c.baseURL, + txValue: fmt.Sprintf("%d", value), } - client.txValue.Store(value) return &client } func (c *httpClient) setDefaultHeaders(req *http.Request) { req.Header.Set("Accept", "application/json") req.Header.Set("Content-Type", "application/json") - req.Header.Set(TX_HEADER_NAME, fmt.Sprintf("%d", c.txValue.Load())) + + if c.txValue != "" { + req.Header.Set(TX_HEADER_NAME, c.txValue) + } } func (c *httpClient) request(req *http.Request) error { diff --git a/http/client_store.go b/http/client_store.go index b0b383496e..123da87f47 100644 --- a/http/client_store.go +++ b/http/client_store.go @@ -29,6 +29,7 @@ import ( var _ client.Store = (*StoreClient)(nil) +// StoreClient implements the client.Store interface over HTTP. type StoreClient struct { http *httpClient } diff --git a/http/client_tx.go b/http/client_tx.go index c2a3807613..566896364d 100644 --- a/http/client_tx.go +++ b/http/client_tx.go @@ -20,6 +20,7 @@ import ( var _ datastore.Txn = (*TxClient)(nil) +// TxClient implements the datastore.Txn interface over HTTP. type TxClient struct { id uint64 http *httpClient diff --git a/http/server.go b/http/server.go index 23a4774df8..e8ff1fd81c 100644 --- a/http/server.go +++ b/http/server.go @@ -21,8 +21,6 @@ import ( "github.com/sourcenetwork/defradb/client" ) -type H map[string]any - type Server struct { db client.DB router *chi.Mux @@ -32,14 +30,12 @@ type Server struct { func NewServer(db client.DB) *Server { txs := &sync.Map{} - txHandler := &TxHandler{txs} + txHandler := &TxHandler{} storeHandler := &StoreHandler{} collectionHandler := &CollectionHandler{} lensHandler := &LensHandler{} router := chi.NewRouter() - router.Use(middleware.RequestID) - router.Use(middleware.Logger) router.Use(middleware.Recoverer) router.Route("/api/v0", func(api chi.Router) { diff --git a/http/server_tx.go b/http/server_tx.go index 14cb99128d..8e8d5a8b0d 100644 --- a/http/server_tx.go +++ b/http/server_tx.go @@ -21,9 +21,7 @@ import ( "github.com/sourcenetwork/defradb/datastore" ) -type TxHandler struct { - txs *sync.Map -} +type TxHandler struct{} type CreateTxResponse struct { ID uint64 `json:"id"` @@ -31,6 +29,7 @@ type CreateTxResponse struct { func (h *TxHandler) NewTxn(rw http.ResponseWriter, req *http.Request) { db := req.Context().Value(dbContextKey).(client.DB) + txs := req.Context().Value(txsContextKey).(*sync.Map) readOnly, _ := strconv.ParseBool(req.URL.Query().Get("read_only")) tx, err := db.NewTxn(req.Context(), readOnly) @@ -38,12 +37,13 @@ func (h *TxHandler) NewTxn(rw http.ResponseWriter, req *http.Request) { responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - h.txs.Store(tx.ID(), tx) + txs.Store(tx.ID(), tx) responseJSON(rw, http.StatusOK, &CreateTxResponse{tx.ID()}) } func (h *TxHandler) NewConcurrentTxn(rw http.ResponseWriter, req *http.Request) { db := req.Context().Value(dbContextKey).(client.DB) + txs := req.Context().Value(txsContextKey).(*sync.Map) readOnly, _ := strconv.ParseBool(req.URL.Query().Get("read_only")) tx, err := db.NewConcurrentTxn(req.Context(), readOnly) @@ -51,17 +51,19 @@ func (h *TxHandler) NewConcurrentTxn(rw http.ResponseWriter, req *http.Request) responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - h.txs.Store(tx.ID(), tx) + txs.Store(tx.ID(), tx) responseJSON(rw, http.StatusOK, &CreateTxResponse{tx.ID()}) } func (h *TxHandler) Commit(rw http.ResponseWriter, req *http.Request) { + txs := req.Context().Value(txsContextKey).(*sync.Map) + txId, err := strconv.ParseUint(chi.URLParam(req, "id"), 10, 64) if err != nil { responseJSON(rw, http.StatusBadRequest, H{"error": "invalid transaction id"}) return } - txVal, ok := h.txs.Load(txId) + txVal, ok := txs.Load(txId) if !ok { responseJSON(rw, http.StatusBadRequest, H{"error": "invalid transaction id"}) return @@ -71,17 +73,19 @@ func (h *TxHandler) Commit(rw http.ResponseWriter, req *http.Request) { responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) return } - h.txs.Delete(txId) + txs.Delete(txId) rw.WriteHeader(http.StatusOK) } func (h *TxHandler) Discard(rw http.ResponseWriter, req *http.Request) { + txs := req.Context().Value(txsContextKey).(*sync.Map) + txId, err := strconv.ParseUint(chi.URLParam(req, "id"), 10, 64) if err != nil { responseJSON(rw, http.StatusBadRequest, H{"error": "invalid transaction id"}) return } - txVal, ok := h.txs.LoadAndDelete(txId) + txVal, ok := txs.LoadAndDelete(txId) if !ok { responseJSON(rw, http.StatusBadRequest, H{"error": "invalid transaction id"}) return diff --git a/http/server_utils.go b/http/server_utils.go index df7b88daf1..43c623a8cd 100644 --- a/http/server_utils.go +++ b/http/server_utils.go @@ -16,6 +16,8 @@ import ( "net/http" ) +type H map[string]any + func requestJSON(req *http.Request, out any) error { data, err := io.ReadAll(req.Body) if err != nil { From e89d0458e8a2de8955d5e17497558a378a549847 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 25 Aug 2023 18:12:56 -0500 Subject: [PATCH 035/107] add logger middleware --- http/logger.go | 52 ++++++++++++++++++++++++++++++++++++++++++++++ http/middleware.go | 12 +++++++++++ http/server.go | 12 ++--------- 3 files changed, 66 insertions(+), 10 deletions(-) create mode 100644 http/logger.go diff --git a/http/logger.go b/http/logger.go new file mode 100644 index 0000000000..d23f65e94a --- /dev/null +++ b/http/logger.go @@ -0,0 +1,52 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "net/http" + "time" + + "github.com/go-chi/chi/v5/middleware" + + "github.com/sourcenetwork/defradb/logging" +) + +var log = logging.MustNewLogger("http") + +type logEntry struct { + req *http.Request +} + +var _ middleware.LogEntry = (*logEntry)(nil) + +func (e *logEntry) Write(status, bytes int, header http.Header, elapsed time.Duration, extra any) { + log.Info( + e.req.Context(), + "Request", + logging.NewKV("Method", e.req.Method), + logging.NewKV("Path", e.req.URL.Path), + logging.NewKV("Status", status), + logging.NewKV("LengthBytes", bytes), + logging.NewKV("ElapsedTime", elapsed.String()), + ) +} + +func (e *logEntry) Panic(v any, stack []byte) { + middleware.PrintPrettyStack(v) +} + +type logFormatter struct{} + +var _ middleware.LogFormatter = (*logFormatter)(nil) + +func (f *logFormatter) NewLogEntry(req *http.Request) middleware.LogEntry { + return &logEntry{req} +} diff --git a/http/middleware.go b/http/middleware.go index b92657561f..0aa38c5bfe 100644 --- a/http/middleware.go +++ b/http/middleware.go @@ -35,6 +35,18 @@ var ( colContextKey = contextKey("col") ) +// ApiMiddleware sets the required context values for all API requests. +func ApiMiddleware(db client.DB, txs *sync.Map) func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + ctx := req.Context() + ctx = context.WithValue(ctx, dbContextKey, db) + ctx = context.WithValue(ctx, txsContextKey, txs) + next.ServeHTTP(rw, req.WithContext(ctx)) + }) + } +} + // TransactionMiddleware sets the transaction context for the current request. func TransactionMiddleware(next http.Handler) http.Handler { return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { diff --git a/http/server.go b/http/server.go index e8ff1fd81c..beea6b42c3 100644 --- a/http/server.go +++ b/http/server.go @@ -11,7 +11,6 @@ package http import ( - "context" "net/http" "sync" @@ -36,18 +35,11 @@ func NewServer(db client.DB) *Server { lensHandler := &LensHandler{} router := chi.NewRouter() + router.Use(middleware.RequestLogger(&logFormatter{})) router.Use(middleware.Recoverer) router.Route("/api/v0", func(api chi.Router) { - api.Use(func(next http.Handler) http.Handler { - return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - ctx := req.Context() - ctx = context.WithValue(ctx, dbContextKey, db) - ctx = context.WithValue(ctx, txsContextKey, txs) - next.ServeHTTP(rw, req.WithContext(ctx)) - }) - }) - api.Use(TransactionMiddleware, StoreMiddleware) + api.Use(ApiMiddleware(db, txs), TransactionMiddleware, StoreMiddleware) api.Route("/tx", func(tx chi.Router) { tx.Post("/", txHandler.NewTxn) tx.Post("/concurrent", txHandler.NewConcurrentTxn) From 51590d347af45eec3d79dd8918a765150ec29451 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Sun, 27 Aug 2023 19:30:58 -0700 Subject: [PATCH 036/107] add generic functions for array test helpers. rename compare to results --- tests/integration/compare.go | 321 ----------------------------------- tests/integration/explain.go | 59 +++---- tests/integration/results.go | 164 ++++++++++++++++++ 3 files changed, 192 insertions(+), 352 deletions(-) delete mode 100644 tests/integration/compare.go create mode 100644 tests/integration/results.go diff --git a/tests/integration/compare.go b/tests/integration/compare.go deleted file mode 100644 index 6db4c9a1b2..0000000000 --- a/tests/integration/compare.go +++ /dev/null @@ -1,321 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package tests - -import ( - "encoding/json" - - "github.com/sourcenetwork/immutable" - "github.com/stretchr/testify/assert" -) - -// AnyOf may be used as `Results` field where the value may -// be one of several values, yet the value of that field must be the same -// across all nodes due to strong eventual consistency. -type AnyOf []any - -// resultsAreAnyOf returns true if any of the expected results are of equal value. -// -// NOTE: Values of type json.Number and immutable.Option will be reduced to their underlying types. -func resultsAreAnyOf(expected AnyOf, actual any) bool { - for _, v := range expected { - if resultsAreEqual(v, actual) { - return true - } - } - return false -} - -// resultsAreEqual returns true if the expected and actual results are of equal value. -// -// NOTE: Values of type json.Number and immutable.Option will be reduced to their underlying types. -func resultsAreEqual(expected any, actual any) bool { - switch expectedVal := expected.(type) { - case map[string]any: - if len(expectedVal) == 0 && actual == nil { - return true - } - actualVal, ok := actual.(map[string]any) - if !ok { - return assert.ObjectsAreEqualValues(expected, actual) - } - if len(expectedVal) != len(actualVal) { - return false - } - for k, v := range expectedVal { - if !resultsAreEqual(v, actualVal[k]) { - return false - } - } - return true - case []int64: - if len(expectedVal) == 0 && actual == nil { - return true - } - actualVal, ok := actual.([]any) - if !ok { - return assert.ObjectsAreEqualValues(expected, actual) - } - if len(expectedVal) != len(actualVal) { - return false - } - for i, v := range expectedVal { - if !resultsAreEqual(v, actualVal[i]) { - return false - } - } - return true - case []uint64: - if len(expectedVal) == 0 && actual == nil { - return true - } - actualVal, ok := actual.([]any) - if !ok { - return assert.ObjectsAreEqualValues(expected, actual) - } - if len(expectedVal) != len(actualVal) { - return false - } - for i, v := range expectedVal { - if !resultsAreEqual(v, actualVal[i]) { - return false - } - } - return true - case []float64: - if len(expectedVal) == 0 && actual == nil { - return true - } - actualVal, ok := actual.([]any) - if !ok { - return assert.ObjectsAreEqualValues(expected, actual) - } - if len(expectedVal) != len(actualVal) { - return false - } - for i, v := range expectedVal { - if !resultsAreEqual(v, actualVal[i]) { - return false - } - } - return true - case []string: - if len(expectedVal) == 0 && actual == nil { - return true - } - actualVal, ok := actual.([]any) - if !ok { - return assert.ObjectsAreEqualValues(expected, actual) - } - if len(expectedVal) != len(actualVal) { - return false - } - for i, v := range expectedVal { - if !resultsAreEqual(v, actualVal[i]) { - return false - } - } - return true - case []bool: - if len(expectedVal) == 0 && actual == nil { - return true - } - actualVal, ok := actual.([]any) - if !ok { - return assert.ObjectsAreEqualValues(expected, actual) - } - if len(expectedVal) != len(actualVal) { - return false - } - for i, v := range expectedVal { - if !resultsAreEqual(v, actualVal[i]) { - return false - } - } - return true - case []any: - if len(expectedVal) == 0 && actual == nil { - return true - } - actualVal, ok := actual.([]any) - if !ok { - return assert.ObjectsAreEqualValues(expected, actual) - } - if len(expectedVal) != len(actualVal) { - return false - } - for i, v := range expectedVal { - if !resultsAreEqual(v, actualVal[i]) { - return false - } - } - return true - case []map[string]any: - if len(expectedVal) == 0 && actual == nil { - return true - } - actualVal, ok := actual.([]any) - if !ok { - return assert.ObjectsAreEqualValues(expected, actual) - } - if len(expectedVal) != len(actualVal) { - return false - } - for i, v := range expectedVal { - if !resultsAreEqual(v, actualVal[i]) { - return false - } - } - return true - case uint64, uint32, uint16, uint8, uint, int64, int32, int16, int8, int: - jsonNum, ok := actual.(json.Number) - if !ok { - return assert.ObjectsAreEqualValues(expected, actual) - } - actualVal, err := jsonNum.Int64() - if err != nil { - return false - } - return assert.ObjectsAreEqualValues(expected, actualVal) - case float32, float64: - jsonNum, ok := actual.(json.Number) - if !ok { - return assert.ObjectsAreEqualValues(expected, actual) - } - actualVal, err := jsonNum.Float64() - if err != nil { - return false - } - return assert.ObjectsAreEqualValues(expected, actualVal) - case []immutable.Option[float64]: - if len(expectedVal) == 0 && actual == nil { - return true - } - actualVal, ok := actual.([]any) - if !ok { - return assert.ObjectsAreEqualValues(expected, actual) - } - if len(expectedVal) != len(actualVal) { - return false - } - for i, v := range expectedVal { - if !resultsAreEqual(v, actualVal[i]) { - return false - } - } - return true - case []immutable.Option[uint64]: - if len(expectedVal) == 0 && actual == nil { - return true - } - actualVal, ok := actual.([]any) - if !ok { - return assert.ObjectsAreEqualValues(expected, actual) - } - if len(expectedVal) != len(actualVal) { - return false - } - for i, v := range expectedVal { - if !resultsAreEqual(v, actualVal[i]) { - return false - } - } - return true - case []immutable.Option[int64]: - if len(expectedVal) == 0 && actual == nil { - return true - } - actualVal, ok := actual.([]any) - if !ok { - return assert.ObjectsAreEqualValues(expected, actual) - } - if len(expectedVal) != len(actualVal) { - return false - } - for i, v := range expectedVal { - if !resultsAreEqual(v, actualVal[i]) { - return false - } - } - return true - case []immutable.Option[bool]: - if len(expectedVal) == 0 && actual == nil { - return true - } - actualVal, ok := actual.([]any) - if !ok { - return assert.ObjectsAreEqualValues(expected, actual) - } - if len(expectedVal) != len(actualVal) { - return false - } - for i, v := range expectedVal { - if !resultsAreEqual(v, actualVal[i]) { - return false - } - } - return true - case []immutable.Option[string]: - if len(expectedVal) == 0 && actual == nil { - return true - } - actualVal, ok := actual.([]any) - if !ok { - return assert.ObjectsAreEqualValues(expected, actual) - } - if len(expectedVal) != len(actualVal) { - return false - } - for i, v := range expectedVal { - if !resultsAreEqual(v, actualVal[i]) { - return false - } - } - return true - case immutable.Option[float64]: - if expectedVal.HasValue() { - expected = expectedVal.Value() - } else { - expected = nil - } - return resultsAreEqual(expected, actual) - case immutable.Option[uint64]: - if expectedVal.HasValue() { - expected = expectedVal.Value() - } else { - expected = nil - } - return resultsAreEqual(expected, actual) - case immutable.Option[int64]: - if expectedVal.HasValue() { - expected = expectedVal.Value() - } else { - expected = nil - } - return resultsAreEqual(expected, actual) - case immutable.Option[bool]: - if expectedVal.HasValue() { - expected = expectedVal.Value() - } else { - expected = nil - } - return resultsAreEqual(expected, actual) - case immutable.Option[string]: - if expectedVal.HasValue() { - expected = expectedVal.Value() - } else { - expected = nil - } - return resultsAreEqual(expected, actual) - default: - return assert.ObjectsAreEqualValues(expected, actual) - } -} diff --git a/tests/integration/explain.go b/tests/integration/explain.go index c652d89709..1f7fefaf45 100644 --- a/tests/integration/explain.go +++ b/tests/integration/explain.go @@ -314,43 +314,40 @@ func findTargetNode( } case []any: - for _, item := range r { - target, matches, found := findTargetNode( - targetName, - toSkip, - includeChildNodes, - item, - ) + return findTargetNodeFromArray[any](targetName, toSkip, includeChildNodes, r) - totalMatchedSoFar = totalMatchedSoFar + matches - toSkip -= matches + case []map[string]any: + return findTargetNodeFromArray[map[string]any](targetName, toSkip, includeChildNodes, r) + } - if found { - if includeChildNodes { - return target, totalMatchedSoFar, true - } - return trimSubNodes(target), totalMatchedSoFar, true - } - } + return nil, totalMatchedSoFar, false +} - case []map[string]any: - for _, item := range r { - target, matches, found := findTargetNode( - targetName, - toSkip, - includeChildNodes, - item, - ) +// findTargetNodeFromArray runs findTargetNode for each item of an array. +func findTargetNodeFromArray[T any]( + targetName string, + toSkip uint, + includeChildNodes bool, + actualResult []T, +) (any, uint, bool) { + var totalMatchedSoFar uint = 0 + + for _, item := range actualResult { + target, matches, found := findTargetNode( + targetName, + toSkip, + includeChildNodes, + item, + ) - totalMatchedSoFar = totalMatchedSoFar + matches - toSkip -= matches + totalMatchedSoFar = totalMatchedSoFar + matches + toSkip -= matches - if found { - if includeChildNodes { - return target, totalMatchedSoFar, true - } - return trimSubNodes(target), totalMatchedSoFar, true + if found { + if includeChildNodes { + return target, totalMatchedSoFar, true } + return trimSubNodes(target), totalMatchedSoFar, true } } diff --git a/tests/integration/results.go b/tests/integration/results.go new file mode 100644 index 0000000000..ba82e4f4b7 --- /dev/null +++ b/tests/integration/results.go @@ -0,0 +1,164 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package tests + +import ( + "encoding/json" + + "github.com/sourcenetwork/immutable" + "github.com/stretchr/testify/assert" +) + +// AnyOf may be used as `Results` field where the value may +// be one of several values, yet the value of that field must be the same +// across all nodes due to strong eventual consistency. +type AnyOf []any + +// resultsAreAnyOf returns true if any of the expected results are of equal value. +// +// NOTE: Values of type json.Number and immutable.Option will be reduced to their underlying types. +func resultsAreAnyOf(expected AnyOf, actual any) bool { + for _, v := range expected { + if resultsAreEqual(v, actual) { + return true + } + } + return false +} + +// resultsAreEqual returns true if the expected and actual results are of equal value. +// +// NOTE: Values of type json.Number and immutable.Option will be reduced to their underlying types. +func resultsAreEqual(expected any, actual any) bool { + switch expectedVal := expected.(type) { + case map[string]any: + if len(expectedVal) == 0 && actual == nil { + return true + } + actualVal, ok := actual.(map[string]any) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + if len(expectedVal) != len(actualVal) { + return false + } + for k, v := range expectedVal { + if !resultsAreEqual(v, actualVal[k]) { + return false + } + } + return true + case uint64, uint32, uint16, uint8, uint, int64, int32, int16, int8, int: + jsonNum, ok := actual.(json.Number) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + actualVal, err := jsonNum.Int64() + if err != nil { + return false + } + return assert.ObjectsAreEqualValues(expected, actualVal) + case float32, float64: + jsonNum, ok := actual.(json.Number) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + actualVal, err := jsonNum.Float64() + if err != nil { + return false + } + return assert.ObjectsAreEqualValues(expected, actualVal) + case immutable.Option[float64]: + if expectedVal.HasValue() { + expected = expectedVal.Value() + } else { + expected = nil + } + return resultsAreEqual(expected, actual) + case immutable.Option[uint64]: + if expectedVal.HasValue() { + expected = expectedVal.Value() + } else { + expected = nil + } + return resultsAreEqual(expected, actual) + case immutable.Option[int64]: + if expectedVal.HasValue() { + expected = expectedVal.Value() + } else { + expected = nil + } + return resultsAreEqual(expected, actual) + case immutable.Option[bool]: + if expectedVal.HasValue() { + expected = expectedVal.Value() + } else { + expected = nil + } + return resultsAreEqual(expected, actual) + case immutable.Option[string]: + if expectedVal.HasValue() { + expected = expectedVal.Value() + } else { + expected = nil + } + return resultsAreEqual(expected, actual) + case []int64: + return resultArraysAreEqual[int64](expectedVal, actual) + case []uint64: + return resultArraysAreEqual[uint64](expectedVal, actual) + case []float64: + return resultArraysAreEqual[float64](expectedVal, actual) + case []string: + return resultArraysAreEqual[string](expectedVal, actual) + case []bool: + return resultArraysAreEqual[bool](expectedVal, actual) + case []any: + return resultArraysAreEqual[any](expectedVal, actual) + case []map[string]any: + return resultArraysAreEqual[map[string]any](expectedVal, actual) + case []immutable.Option[float64]: + return resultArraysAreEqual[immutable.Option[float64]](expectedVal, actual) + case []immutable.Option[uint64]: + return resultArraysAreEqual[immutable.Option[uint64]](expectedVal, actual) + case []immutable.Option[int64]: + return resultArraysAreEqual[immutable.Option[int64]](expectedVal, actual) + case []immutable.Option[bool]: + return resultArraysAreEqual[immutable.Option[bool]](expectedVal, actual) + case []immutable.Option[string]: + return resultArraysAreEqual[immutable.Option[string]](expectedVal, actual) + default: + return assert.ObjectsAreEqualValues(expected, actual) + } +} + +// resultArraysAreEqual returns true if the array of expected results and actual results +// are of equal value. +// +// NOTE: Values of type json.Number and immutable.Option will be reduced to their underlying types. +func resultArraysAreEqual[S any](expected []S, actual any) bool { + if len(expected) == 0 && actual == nil { + return true + } + actualVal, ok := actual.([]any) + if !ok { + return assert.ObjectsAreEqualValues(expected, actual) + } + if len(expected) != len(actualVal) { + return false + } + for i, v := range expected { + if !resultsAreEqual(v, actualVal[i]) { + return false + } + } + return true +} From cfd66a430c4f7cf829d1b9b6f474244007d30c00 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 28 Aug 2023 09:36:02 -0700 Subject: [PATCH 037/107] add generic helper for trimExplainAttributes --- tests/integration/explain.go | 40 +++++++++++++++++++----------------- 1 file changed, 21 insertions(+), 19 deletions(-) diff --git a/tests/integration/explain.go b/tests/integration/explain.go index 1f7fefaf45..059bb36d43 100644 --- a/tests/integration/explain.go +++ b/tests/integration/explain.go @@ -323,7 +323,7 @@ func findTargetNode( return nil, totalMatchedSoFar, false } -// findTargetNodeFromArray runs findTargetNode for each item of an array. +// findTargetNodeFromArray is a helper that runs findTargetNode for each item in an array. func findTargetNodeFromArray[T any]( targetName string, toSkip uint, @@ -376,9 +376,9 @@ func trimSubNodes(graph any) any { func trimExplainAttributes( t *testing.T, description string, - actualResult map[string]any, + actualResult any, ) map[string]any { - trimmedMap := copyMap(actualResult) + trimmedMap := copyMap(actualResult.(map[string]any)) for key, value := range trimmedMap { if !isPlanNode(key) { @@ -391,24 +391,10 @@ func trimExplainAttributes( trimmedMap[key] = trimExplainAttributes(t, description, v) case []map[string]any: - trimmedArrayElements := []map[string]any{} - for _, valueItem := range v { - trimmedArrayElements = append( - trimmedArrayElements, - trimExplainAttributes(t, description, valueItem), - ) - } - trimmedMap[key] = trimmedArrayElements + trimmedMap[key] = trimExplainAttributesArray[map[string]any](t, description, v) case []any: - trimmedArrayElements := []map[string]any{} - for _, valueItem := range v { - trimmedArrayElements = append( - trimmedArrayElements, - trimExplainAttributes(t, description, valueItem.(map[string]any)), - ) - } - trimmedMap[key] = trimmedArrayElements + trimmedMap[key] = trimExplainAttributesArray[any](t, description, v) default: assert.Fail( @@ -422,6 +408,22 @@ func trimExplainAttributes( return trimmedMap } +// trimExplainAttributesArray is a helper that runs trimExplainAttributes for each item in an array. +func trimExplainAttributesArray[T any]( + t *testing.T, + description string, + actualResult []T, +) []map[string]any { + trimmedArrayElements := []map[string]any{} + for _, valueItem := range actualResult { + trimmedArrayElements = append( + trimmedArrayElements, + trimExplainAttributes(t, description, valueItem), + ) + } + return trimmedArrayElements +} + // isPlanNode returns true if someName matches a plan node name, retruns false otherwise. func isPlanNode(someName string) bool { _, isPlanNode := allPlanNodeNames[someName] From 13c98fca1a526891395d72dda84c2d253370a4ba Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 28 Aug 2023 15:58:25 -0700 Subject: [PATCH 038/107] more cleanup. add PrintDump to http client --- http/client.go | 405 +++++++++++++++--- http/client_collection.go | 24 +- http/client_lens.go | 12 +- http/client_store.go | 380 ---------------- http/client_tx.go | 3 +- ...er_collection.go => handler_collection.go} | 98 ++--- http/{server_lens.go => handler_lens.go} | 34 +- http/{server_store.go => handler_store.go} | 184 ++++---- http/{server_tx.go => handler_tx.go} | 24 +- http/http_client.go | 88 ++++ http/server.go | 79 ++-- http/{server_utils.go => utils.go} | 4 +- http/wrapper.go | 4 +- 13 files changed, 681 insertions(+), 658 deletions(-) delete mode 100644 http/client_store.go rename http/{server_collection.go => handler_collection.go} (65%) rename http/{server_lens.go => handler_lens.go} (65%) rename http/{server_store.go => handler_store.go} (68%) rename http/{server_tx.go => handler_tx.go} (70%) create mode 100644 http/http_client.go rename http/{server_utils.go => utils.go} (93%) diff --git a/http/client.go b/http/client.go index 4033b304c0..8167f693d1 100644 --- a/http/client.go +++ b/http/client.go @@ -12,105 +12,408 @@ package http import ( "bytes" + "context" "encoding/json" "fmt" "io" "net/http" "net/url" + "strings" - "github.com/sourcenetwork/defradb/datastore/badger/v4" + blockstore "github.com/ipfs/boxo/blockstore" + sse "github.com/vito/go-sse/sse" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/events" ) -type errorResponse struct { - Error string `json:"error"` +var _ client.DB = (*Client)(nil) + +// Client implements the client.DB interface over HTTP. +type Client struct { + http *httpClient } -type httpClient struct { - client *http.Client - baseURL *url.URL - txValue string +func NewClient(rawURL string) (*Client, error) { + baseURL, err := url.Parse(rawURL) + if err != nil { + return nil, err + } + httpClient := newHttpClient(baseURL.JoinPath("/api/v0")) + return &Client{httpClient}, nil } -func newHttpClient(baseURL *url.URL) *httpClient { - client := httpClient{ - client: http.DefaultClient, - baseURL: baseURL, +func (c *Client) NewTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { + query := url.Values{} + if readOnly { + query.Add("read_only", "true") + } + + methodURL := c.http.baseURL.JoinPath("tx") + methodURL.RawQuery = query.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) + if err != nil { + return nil, err + } + var txRes CreateTxResponse + if err := c.http.requestJson(req, &txRes); err != nil { + return nil, err } - return &client + return &TxClient{txRes.ID, c.http}, nil } -func (c *httpClient) withTxn(value uint64) *httpClient { - client := httpClient{ - client: c.client, - baseURL: c.baseURL, - txValue: fmt.Sprintf("%d", value), +func (c *Client) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { + query := url.Values{} + if readOnly { + query.Add("read_only", "true") } - return &client + + methodURL := c.http.baseURL.JoinPath("tx", "concurrent") + methodURL.RawQuery = query.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) + if err != nil { + return nil, err + } + var txRes CreateTxResponse + if err := c.http.requestJson(req, &txRes); err != nil { + return nil, err + } + return &TxClient{txRes.ID, c.http}, nil +} + +func (c *Client) WithTxn(tx datastore.Txn) client.Store { + client := c.http.withTxn(tx.ID()) + return &Client{client} } -func (c *httpClient) setDefaultHeaders(req *http.Request) { - req.Header.Set("Accept", "application/json") - req.Header.Set("Content-Type", "application/json") +func (c *Client) SetReplicator(ctx context.Context, rep client.Replicator) error { + methodURL := c.http.baseURL.JoinPath("p2p", "replicators") - if c.txValue != "" { - req.Header.Set(TX_HEADER_NAME, c.txValue) + body, err := json.Marshal(rep) + if err != nil { + return err } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + _, err = c.http.request(req) + return err } -func (c *httpClient) request(req *http.Request) error { - c.setDefaultHeaders(req) +func (c *Client) DeleteReplicator(ctx context.Context, rep client.Replicator) error { + methodURL := c.http.baseURL.JoinPath("p2p", "replicators") - res, err := c.client.Do(req) + body, err := json.Marshal(rep) if err != nil { return err } - defer res.Body.Close() //nolint:errcheck + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + _, err = c.http.request(req) + return err +} - data, err := io.ReadAll(res.Body) +func (c *Client) GetAllReplicators(ctx context.Context) ([]client.Replicator, error) { + methodURL := c.http.baseURL.JoinPath("p2p", "replicators") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var reps []client.Replicator + if err := c.http.requestJson(req, &reps); err != nil { + return nil, err + } + return reps, nil +} + +func (c *Client) AddP2PCollection(ctx context.Context, collectionID string) error { + methodURL := c.http.baseURL.JoinPath("p2p", "collections", collectionID) + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) if err != nil { return err } - if res.StatusCode == http.StatusOK { - return nil + _, err = c.http.request(req) + return err +} + +func (c *Client) RemoveP2PCollection(ctx context.Context, collectionID string) error { + methodURL := c.http.baseURL.JoinPath("p2p", "collections", collectionID) + + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) + if err != nil { + return err } + _, err = c.http.request(req) + return err +} - var errRes errorResponse - if err := json.Unmarshal(data, &errRes); err != nil { - return fmt.Errorf("%s", data) +func (c *Client) GetAllP2PCollections(ctx context.Context) ([]string, error) { + methodURL := c.http.baseURL.JoinPath("p2p", "collections") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err } - if errRes.Error == badger.ErrTxnConflict.Error() { - return badger.ErrTxnConflict + var cols []string + if err := c.http.requestJson(req, &cols); err != nil { + return nil, err } - return fmt.Errorf("%s", errRes.Error) + return cols, nil } -func (c *httpClient) requestJson(req *http.Request, out any) error { - c.setDefaultHeaders(req) +func (c *Client) BasicImport(ctx context.Context, filepath string) error { + methodURL := c.http.baseURL.JoinPath("backup", "import") - res, err := c.client.Do(req) + body, err := json.Marshal(&client.BackupConfig{Filepath: filepath}) if err != nil { return err } - defer res.Body.Close() //nolint:errcheck + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return err + } + _, err = c.http.request(req) + return err +} - data, err := io.ReadAll(res.Body) +func (c *Client) BasicExport(ctx context.Context, config *client.BackupConfig) error { + methodURL := c.http.baseURL.JoinPath("backup", "export") + + body, err := json.Marshal(config) + if err != nil { + return err + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) if err != nil { return err } + _, err = c.http.request(req) + return err +} + +func (c *Client) AddSchema(ctx context.Context, schema string) ([]client.CollectionDescription, error) { + methodURL := c.http.baseURL.JoinPath("schema") + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), strings.NewReader(schema)) + if err != nil { + return nil, err + } + var cols []client.CollectionDescription + if err := c.http.requestJson(req, &cols); err != nil { + return nil, err + } + return cols, nil +} + +func (c *Client) PatchSchema(ctx context.Context, patch string) error { + methodURL := c.http.baseURL.JoinPath("schema") + + req, err := http.NewRequestWithContext(ctx, http.MethodPatch, methodURL.String(), strings.NewReader(patch)) + if err != nil { + return err + } + _, err = c.http.request(req) + return err +} + +func (c *Client) SetMigration(ctx context.Context, config client.LensConfig) error { + return c.LensRegistry().SetMigration(ctx, config) +} + +func (c *Client) LensRegistry() client.LensRegistry { + return &LensClient{c.http} +} + +func (c *Client) GetCollectionByName(ctx context.Context, name client.CollectionName) (client.Collection, error) { + methodURL := c.http.baseURL.JoinPath("collections") + methodURL.RawQuery = url.Values{"name": []string{name}}.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var description client.CollectionDescription + if err := c.http.requestJson(req, &description); err != nil { + return nil, err + } + return &CollectionClient{c.http, description}, nil +} + +func (c *Client) GetCollectionBySchemaID(ctx context.Context, schemaId string) (client.Collection, error) { + methodURL := c.http.baseURL.JoinPath("collections") + methodURL.RawQuery = url.Values{"schema_id": []string{schemaId}}.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var description client.CollectionDescription + if err := c.http.requestJson(req, &description); err != nil { + return nil, err + } + return &CollectionClient{c.http, description}, nil +} + +func (c *Client) GetCollectionByVersionID(ctx context.Context, versionId string) (client.Collection, error) { + methodURL := c.http.baseURL.JoinPath("collections") + methodURL.RawQuery = url.Values{"version_id": []string{versionId}}.Encode() + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var description client.CollectionDescription + if err := c.http.requestJson(req, &description); err != nil { + return nil, err + } + return &CollectionClient{c.http, description}, nil +} + +func (c *Client) GetAllCollections(ctx context.Context) ([]client.Collection, error) { + methodURL := c.http.baseURL.JoinPath("collections") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var descriptions []client.CollectionDescription + if err := c.http.requestJson(req, &descriptions); err != nil { + return nil, err + } + collections := make([]client.Collection, len(descriptions)) + for i, d := range descriptions { + collections[i] = &CollectionClient{c.http, d} + } + return collections, nil +} + +func (c *Client) GetAllIndexes(ctx context.Context) (map[client.CollectionName][]client.IndexDescription, error) { + methodURL := c.http.baseURL.JoinPath("indexes") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var indexes map[client.CollectionName][]client.IndexDescription + if err := c.http.requestJson(req, &indexes); err != nil { + return nil, err + } + return indexes, nil +} + +func (c *Client) ExecRequest(ctx context.Context, query string) *client.RequestResult { + methodURL := c.http.baseURL.JoinPath("graphql") + result := &client.RequestResult{} + + body, err := json.Marshal(&GraphQLRequest{query}) + if err != nil { + result.GQL.Errors = []error{err} + return result + } + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + result.GQL.Errors = []error{err} + return result + } + c.http.setDefaultHeaders(req) - dec := json.NewDecoder(bytes.NewBuffer(data)) - dec.UseNumber() + res, err := c.http.client.Do(req) + if err != nil { + result.GQL.Errors = []error{err} + return result + } + if res.Header.Get("Content-Type") == "text/event-stream" { + result.Pub = c.execRequestSubscription(ctx, res.Body) + return result + } + defer res.Body.Close() //nolint:errcheck - if res.StatusCode == http.StatusOK { - return dec.Decode(out) + data, err := io.ReadAll(res.Body) + if err != nil { + result.GQL.Errors = []error{err} + return result + } + var response GraphQLResponse + if err = json.Unmarshal(data, &response); err != nil { + result.GQL.Errors = []error{err} + return result + } + result.GQL.Data = response.Data + for _, err := range response.Errors { + result.GQL.Errors = append(result.GQL.Errors, fmt.Errorf(err)) } + return result +} - var errRes errorResponse - if err := json.Unmarshal(data, &errRes); err != nil { - return fmt.Errorf("%s", data) +func (c *Client) execRequestSubscription(ctx context.Context, r io.ReadCloser) *events.Publisher[events.Update] { + pubCh := events.New[events.Update](0, 0) + pub, err := events.NewPublisher[events.Update](pubCh, 0) + if err != nil { + return nil } - if errRes.Error == badger.ErrTxnConflict.Error() { - return badger.ErrTxnConflict + + go func() { + eventReader := sse.NewReadCloser(r) + defer eventReader.Close() //nolint:errcheck + + for { + evt, err := eventReader.Next() + if err != nil { + return + } + var response GraphQLResponse + if err := json.Unmarshal(evt.Data, &response); err != nil { + return + } + var errors []error + for _, err := range response.Errors { + errors = append(errors, fmt.Errorf(err)) + } + pub.Publish(client.GQLResult{ + Errors: errors, + Data: response.Data, + }) + } + }() + + return pub +} + +func (c *Client) PrintDump(ctx context.Context) error { + methodURL := c.http.baseURL.JoinPath("debug", "dump") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return err } - return fmt.Errorf("%s", errRes.Error) + _, err = c.http.request(req) + return err +} + +func (c *Client) Close(ctx context.Context) { + // do nothing +} + +func (c *Client) Root() datastore.RootStore { + panic("client side database") +} + +func (c *Client) Blockstore() blockstore.Blockstore { + panic("client side database") +} + +func (c *Client) Events() events.Events { + panic("client side database") +} + +func (c *Client) MaxTxnRetries() int { + panic("client side database") } diff --git a/http/client_collection.go b/http/client_collection.go index 89534043c4..e54325e2bf 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -33,13 +33,6 @@ type CollectionClient struct { desc client.CollectionDescription } -func NewCollectionClient(httpClient *httpClient, desc client.CollectionDescription) *CollectionClient { - return &CollectionClient{ - http: httpClient, - desc: desc, - } -} - func (c *CollectionClient) Description() client.CollectionDescription { return c.desc } @@ -71,7 +64,8 @@ func (c *CollectionClient) Create(ctx context.Context, doc *client.Document) err if err != nil { return err } - if err := c.http.request(req); err != nil { + _, err = c.http.request(req) + if err != nil { return err } doc.Clean() @@ -97,7 +91,8 @@ func (c *CollectionClient) CreateMany(ctx context.Context, docs []*client.Docume if err != nil { return err } - if err := c.http.request(req); err != nil { + _, err = c.http.request(req) + if err != nil { return err } for _, doc := range docs { @@ -126,7 +121,8 @@ func (c *CollectionClient) Update(ctx context.Context, doc *client.Document) err if err != nil { return err } - if err := c.http.request(req); err != nil { + _, err = c.http.request(req) + if err != nil { return err } doc.Clean() @@ -153,7 +149,8 @@ func (c *CollectionClient) Save(ctx context.Context, doc *client.Document) error if err != nil { return err } - if err := c.http.request(req); err != nil { + _, err = c.http.request(req) + if err != nil { return err } doc.Clean() @@ -167,7 +164,7 @@ func (c *CollectionClient) Delete(ctx context.Context, docKey client.DocKey) (bo if err != nil { return false, err } - err = c.http.request(req) + _, err = c.http.request(req) if err != nil { return false, err } @@ -404,7 +401,8 @@ func (c *CollectionClient) DropIndex(ctx context.Context, indexName string) erro if err != nil { return err } - return c.http.request(req) + _, err = c.http.request(req) + return err } func (c *CollectionClient) GetIndexes(ctx context.Context) ([]client.IndexDescription, error) { diff --git a/http/client_lens.go b/http/client_lens.go index c8154ffdc8..8520a8401f 100644 --- a/http/client_lens.go +++ b/http/client_lens.go @@ -29,10 +29,6 @@ type LensClient struct { http *httpClient } -func NewLensClient(httpClient *httpClient) *LensClient { - return &LensClient{httpClient} -} - func (c *LensClient) WithTxn(tx datastore.Txn) client.LensRegistry { http := c.http.withTxn(tx.ID()) return &LensClient{http} @@ -49,7 +45,8 @@ func (c *LensClient) SetMigration(ctx context.Context, config client.LensConfig) if err != nil { return err } - return c.http.request(req) + _, err = c.http.request(req) + return err } func (c *LensClient) ReloadLenses(ctx context.Context) error { @@ -59,7 +56,8 @@ func (c *LensClient) ReloadLenses(ctx context.Context) error { if err != nil { return err } - return c.http.request(req) + _, err = c.http.request(req) + return err } func (c *LensClient) MigrateUp( @@ -127,7 +125,7 @@ func (c *LensClient) HasMigration(ctx context.Context, schemaVersionID string) ( if err != nil { return false, err } - err = c.http.request(req) + _, err = c.http.request(req) if err != nil { return false, err } diff --git a/http/client_store.go b/http/client_store.go deleted file mode 100644 index 123da87f47..0000000000 --- a/http/client_store.go +++ /dev/null @@ -1,380 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package http - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "io" - "net/http" - "net/url" - "strings" - - sse "github.com/vito/go-sse/sse" - - "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" - "github.com/sourcenetwork/defradb/events" -) - -var _ client.Store = (*StoreClient)(nil) - -// StoreClient implements the client.Store interface over HTTP. -type StoreClient struct { - http *httpClient -} - -func NewStoreClient(rawURL string) (*StoreClient, error) { - baseURL, err := url.Parse(rawURL) - if err != nil { - return nil, err - } - httpClient := newHttpClient(baseURL.JoinPath("/api/v0")) - return &StoreClient{httpClient}, nil -} - -func (c *StoreClient) NewTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { - query := url.Values{} - if readOnly { - query.Add("read_only", "true") - } - - methodURL := c.http.baseURL.JoinPath("tx") - methodURL.RawQuery = query.Encode() - - req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) - if err != nil { - return nil, err - } - var txRes CreateTxResponse - if err := c.http.requestJson(req, &txRes); err != nil { - return nil, err - } - return &TxClient{txRes.ID, c.http}, nil -} - -func (c *StoreClient) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { - query := url.Values{} - if readOnly { - query.Add("read_only", "true") - } - - methodURL := c.http.baseURL.JoinPath("tx", "concurrent") - methodURL.RawQuery = query.Encode() - - req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) - if err != nil { - return nil, err - } - var txRes CreateTxResponse - if err := c.http.requestJson(req, &txRes); err != nil { - return nil, err - } - return &TxClient{txRes.ID, c.http}, nil -} - -func (c *StoreClient) WithTxn(tx datastore.Txn) client.Store { - client := c.http.withTxn(tx.ID()) - return &StoreClient{client} -} - -func (c *StoreClient) SetReplicator(ctx context.Context, rep client.Replicator) error { - methodURL := c.http.baseURL.JoinPath("p2p", "replicators") - - body, err := json.Marshal(rep) - if err != nil { - return err - } - req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) - if err != nil { - return err - } - return c.http.request(req) -} - -func (c *StoreClient) DeleteReplicator(ctx context.Context, rep client.Replicator) error { - methodURL := c.http.baseURL.JoinPath("p2p", "replicators") - - body, err := json.Marshal(rep) - if err != nil { - return err - } - req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), bytes.NewBuffer(body)) - if err != nil { - return err - } - return c.http.request(req) -} - -func (c *StoreClient) GetAllReplicators(ctx context.Context) ([]client.Replicator, error) { - methodURL := c.http.baseURL.JoinPath("p2p", "replicators") - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) - if err != nil { - return nil, err - } - var reps []client.Replicator - if err := c.http.requestJson(req, &reps); err != nil { - return nil, err - } - return reps, nil -} - -func (c *StoreClient) AddP2PCollection(ctx context.Context, collectionID string) error { - methodURL := c.http.baseURL.JoinPath("p2p", "collections", collectionID) - - req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) - if err != nil { - return err - } - return c.http.request(req) -} - -func (c *StoreClient) RemoveP2PCollection(ctx context.Context, collectionID string) error { - methodURL := c.http.baseURL.JoinPath("p2p", "collections", collectionID) - - req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) - if err != nil { - return err - } - return c.http.request(req) -} - -func (c *StoreClient) GetAllP2PCollections(ctx context.Context) ([]string, error) { - methodURL := c.http.baseURL.JoinPath("p2p", "collections") - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) - if err != nil { - return nil, err - } - var cols []string - if err := c.http.requestJson(req, &cols); err != nil { - return nil, err - } - return cols, nil -} - -func (c *StoreClient) BasicImport(ctx context.Context, filepath string) error { - methodURL := c.http.baseURL.JoinPath("backup", "import") - - body, err := json.Marshal(&client.BackupConfig{Filepath: filepath}) - if err != nil { - return err - } - req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) - if err != nil { - return err - } - return c.http.request(req) -} - -func (c *StoreClient) BasicExport(ctx context.Context, config *client.BackupConfig) error { - methodURL := c.http.baseURL.JoinPath("backup", "export") - - body, err := json.Marshal(config) - if err != nil { - return err - } - req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) - if err != nil { - return err - } - return c.http.request(req) -} - -func (c *StoreClient) AddSchema(ctx context.Context, schema string) ([]client.CollectionDescription, error) { - methodURL := c.http.baseURL.JoinPath("schema") - - req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), strings.NewReader(schema)) - if err != nil { - return nil, err - } - var cols []client.CollectionDescription - if err := c.http.requestJson(req, &cols); err != nil { - return nil, err - } - return cols, nil -} - -func (c *StoreClient) PatchSchema(ctx context.Context, patch string) error { - methodURL := c.http.baseURL.JoinPath("schema") - - req, err := http.NewRequestWithContext(ctx, http.MethodPatch, methodURL.String(), strings.NewReader(patch)) - if err != nil { - return err - } - return c.http.request(req) -} - -func (c *StoreClient) SetMigration(ctx context.Context, config client.LensConfig) error { - return c.LensRegistry().SetMigration(ctx, config) -} - -func (c *StoreClient) LensRegistry() client.LensRegistry { - return NewLensClient(c.http) -} - -func (c *StoreClient) GetCollectionByName(ctx context.Context, name client.CollectionName) (client.Collection, error) { - methodURL := c.http.baseURL.JoinPath("collections") - methodURL.RawQuery = url.Values{"name": []string{name}}.Encode() - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) - if err != nil { - return nil, err - } - var description client.CollectionDescription - if err := c.http.requestJson(req, &description); err != nil { - return nil, err - } - return NewCollectionClient(c.http, description), nil -} - -func (c *StoreClient) GetCollectionBySchemaID(ctx context.Context, schemaId string) (client.Collection, error) { - methodURL := c.http.baseURL.JoinPath("collections") - methodURL.RawQuery = url.Values{"schema_id": []string{schemaId}}.Encode() - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) - if err != nil { - return nil, err - } - var description client.CollectionDescription - if err := c.http.requestJson(req, &description); err != nil { - return nil, err - } - return NewCollectionClient(c.http, description), nil -} - -func (c *StoreClient) GetCollectionByVersionID(ctx context.Context, versionId string) (client.Collection, error) { - methodURL := c.http.baseURL.JoinPath("collections") - methodURL.RawQuery = url.Values{"version_id": []string{versionId}}.Encode() - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) - if err != nil { - return nil, err - } - var description client.CollectionDescription - if err := c.http.requestJson(req, &description); err != nil { - return nil, err - } - return NewCollectionClient(c.http, description), nil -} - -func (c *StoreClient) GetAllCollections(ctx context.Context) ([]client.Collection, error) { - methodURL := c.http.baseURL.JoinPath("collections") - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) - if err != nil { - return nil, err - } - var descriptions []client.CollectionDescription - if err := c.http.requestJson(req, &descriptions); err != nil { - return nil, err - } - collections := make([]client.Collection, len(descriptions)) - for i, d := range descriptions { - collections[i] = NewCollectionClient(c.http, d) - } - return collections, nil -} - -func (c *StoreClient) GetAllIndexes(ctx context.Context) (map[client.CollectionName][]client.IndexDescription, error) { - methodURL := c.http.baseURL.JoinPath("indexes") - - req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) - if err != nil { - return nil, err - } - var indexes map[client.CollectionName][]client.IndexDescription - if err := c.http.requestJson(req, &indexes); err != nil { - return nil, err - } - return indexes, nil -} - -func (c *StoreClient) ExecRequest(ctx context.Context, query string) *client.RequestResult { - methodURL := c.http.baseURL.JoinPath("graphql") - result := &client.RequestResult{} - - body, err := json.Marshal(&GraphQLRequest{query}) - if err != nil { - result.GQL.Errors = []error{err} - return result - } - req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) - if err != nil { - result.GQL.Errors = []error{err} - return result - } - c.http.setDefaultHeaders(req) - - res, err := c.http.client.Do(req) - if err != nil { - result.GQL.Errors = []error{err} - return result - } - if res.Header.Get("Content-Type") == "text/event-stream" { - result.Pub = c.execRequestSubscription(ctx, res.Body) - return result - } - defer res.Body.Close() //nolint:errcheck - - data, err := io.ReadAll(res.Body) - if err != nil { - result.GQL.Errors = []error{err} - return result - } - var response GraphQLResponse - if err = json.Unmarshal(data, &response); err != nil { - result.GQL.Errors = []error{err} - return result - } - result.GQL.Data = response.Data - for _, err := range response.Errors { - result.GQL.Errors = append(result.GQL.Errors, fmt.Errorf(err)) - } - return result -} - -func (c *StoreClient) execRequestSubscription(ctx context.Context, r io.ReadCloser) *events.Publisher[events.Update] { - pubCh := events.New[events.Update](0, 0) - pub, err := events.NewPublisher[events.Update](pubCh, 0) - if err != nil { - return nil - } - - go func() { - eventReader := sse.NewReadCloser(r) - defer eventReader.Close() //nolint:errcheck - - for { - evt, err := eventReader.Next() - if err != nil { - return - } - var response GraphQLResponse - if err := json.Unmarshal(evt.Data, &response); err != nil { - return - } - var errors []error - for _, err := range response.Errors { - errors = append(errors, fmt.Errorf(err)) - } - pub.Publish(client.GQLResult{ - Errors: errors, - Data: response.Data, - }) - } - }() - - return pub -} diff --git a/http/client_tx.go b/http/client_tx.go index 566896364d..7592333f3b 100644 --- a/http/client_tx.go +++ b/http/client_tx.go @@ -37,7 +37,8 @@ func (c *TxClient) Commit(ctx context.Context) error { if err != nil { return err } - return c.http.request(req) + _, err = c.http.request(req) + return err } func (c *TxClient) Discard(ctx context.Context) { diff --git a/http/server_collection.go b/http/handler_collection.go similarity index 65% rename from http/server_collection.go rename to http/handler_collection.go index dc7d1c0a3b..50af88d319 100644 --- a/http/server_collection.go +++ b/http/handler_collection.go @@ -21,7 +21,7 @@ import ( "github.com/sourcenetwork/defradb/client" ) -type CollectionHandler struct{} +type collectionHandler struct{} type CollectionDeleteRequest struct { Key string `json:"key"` @@ -36,12 +36,12 @@ type CollectionUpdateRequest struct { Updater string `json:"updater"` } -func (s *CollectionHandler) Create(rw http.ResponseWriter, req *http.Request) { +func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) var body any if err := requestJSON(req, &body); err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } @@ -51,59 +51,59 @@ func (s *CollectionHandler) Create(rw http.ResponseWriter, req *http.Request) { for _, docMap := range t { doc, err := client.NewDocFromMap(docMap) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } docList = append(docList, doc) } if err := col.CreateMany(req.Context(), docList); err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } rw.WriteHeader(http.StatusOK) case map[string]any: doc, err := client.NewDocFromMap(t) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } if err := col.Create(req.Context(), doc); err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } rw.WriteHeader(http.StatusOK) default: - responseJSON(rw, http.StatusBadRequest, H{"error": "invalid request body"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{"invalid request body"}) } } -func (s *CollectionHandler) Save(rw http.ResponseWriter, req *http.Request) { +func (s *collectionHandler) Save(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) var docMap map[string]any if err := requestJSON(req, &docMap); err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } doc, err := client.NewDocFromMap(docMap) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } err = col.Save(req.Context(), doc) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } rw.WriteHeader(http.StatusOK) } -func (s *CollectionHandler) DeleteWith(rw http.ResponseWriter, req *http.Request) { +func (s *collectionHandler) DeleteWith(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) var request CollectionDeleteRequest if err := requestJSON(req, &request); err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } @@ -111,19 +111,19 @@ func (s *CollectionHandler) DeleteWith(rw http.ResponseWriter, req *http.Request case request.Filter != nil: result, err := col.DeleteWith(req.Context(), request.Filter) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } responseJSON(rw, http.StatusOK, result) case request.Key != "": docKey, err := client.NewDocKeyFromString(request.Key) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } result, err := col.DeleteWith(req.Context(), docKey) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } responseJSON(rw, http.StatusOK, result) @@ -132,28 +132,28 @@ func (s *CollectionHandler) DeleteWith(rw http.ResponseWriter, req *http.Request for _, key := range request.Keys { docKey, err := client.NewDocKeyFromString(key) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } docKeys = append(docKeys, docKey) } result, err := col.DeleteWith(req.Context(), docKeys) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } responseJSON(rw, http.StatusOK, result) default: - responseJSON(rw, http.StatusBadRequest, H{"error": "invalid delete request"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{"invalid delete request"}) } } -func (s *CollectionHandler) UpdateWith(rw http.ResponseWriter, req *http.Request) { +func (s *collectionHandler) UpdateWith(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) var request CollectionUpdateRequest if err := requestJSON(req, &request); err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } @@ -161,19 +161,19 @@ func (s *CollectionHandler) UpdateWith(rw http.ResponseWriter, req *http.Request case request.Filter != nil: result, err := col.UpdateWith(req.Context(), request.Filter, request.Updater) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } responseJSON(rw, http.StatusOK, result) case request.Key != "": docKey, err := client.NewDocKeyFromString(request.Key) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } result, err := col.UpdateWith(req.Context(), docKey, request.Updater) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } responseJSON(rw, http.StatusOK, result) @@ -182,75 +182,75 @@ func (s *CollectionHandler) UpdateWith(rw http.ResponseWriter, req *http.Request for _, key := range request.Keys { docKey, err := client.NewDocKeyFromString(key) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } docKeys = append(docKeys, docKey) } result, err := col.UpdateWith(req.Context(), docKeys, request.Updater) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } responseJSON(rw, http.StatusOK, result) default: - responseJSON(rw, http.StatusBadRequest, H{"error": "invalid update request"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{"invalid update request"}) } } -func (s *CollectionHandler) Update(rw http.ResponseWriter, req *http.Request) { +func (s *collectionHandler) Update(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) var docMap map[string]any if err := requestJSON(req, &docMap); err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } doc, err := client.NewDocFromMap(docMap) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } if doc.Key().String() != chi.URLParam(req, "key") { - responseJSON(rw, http.StatusBadRequest, H{"error": "document key does not match"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{"document key does not match"}) return } err = col.Update(req.Context(), doc) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } rw.WriteHeader(http.StatusOK) } -func (s *CollectionHandler) Delete(rw http.ResponseWriter, req *http.Request) { +func (s *collectionHandler) Delete(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) docKey, err := client.NewDocKeyFromString(chi.URLParam(req, "key")) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } _, err = col.Delete(req.Context(), docKey) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } rw.WriteHeader(http.StatusOK) } -func (s *CollectionHandler) Get(rw http.ResponseWriter, req *http.Request) { +func (s *collectionHandler) Get(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) showDeleted, _ := strconv.ParseBool(req.URL.Query().Get("deleted")) docKey, err := client.NewDocKeyFromString(chi.URLParam(req, "key")) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } _, err = col.Get(req.Context(), docKey, showDeleted) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } rw.WriteHeader(http.StatusOK) @@ -261,18 +261,18 @@ type DocKeyResult struct { Error string `json:"error"` } -func (s *CollectionHandler) GetAllDocKeys(rw http.ResponseWriter, req *http.Request) { +func (s *collectionHandler) GetAllDocKeys(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) flusher, ok := rw.(http.Flusher) if !ok { - responseJSON(rw, http.StatusBadRequest, H{"error": "streaming not supported"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{"streaming not supported"}) return } docKeyCh, err := col.GetAllDocKeys(req.Context()) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } @@ -299,39 +299,39 @@ func (s *CollectionHandler) GetAllDocKeys(rw http.ResponseWriter, req *http.Requ } } -func (s *CollectionHandler) CreateIndex(rw http.ResponseWriter, req *http.Request) { +func (s *collectionHandler) CreateIndex(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) var indexDesc client.IndexDescription if err := requestJSON(req, &indexDesc); err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } index, err := col.CreateIndex(req.Context(), indexDesc) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } responseJSON(rw, http.StatusOK, index) } -func (s *CollectionHandler) GetIndexes(rw http.ResponseWriter, req *http.Request) { +func (s *collectionHandler) GetIndexes(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) indexes, err := col.GetIndexes(req.Context()) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } responseJSON(rw, http.StatusOK, indexes) } -func (s *CollectionHandler) DropIndex(rw http.ResponseWriter, req *http.Request) { +func (s *collectionHandler) DropIndex(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) err := col.DropIndex(req.Context(), chi.URLParam(req, "index")) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } rw.WriteHeader(http.StatusOK) diff --git a/http/server_lens.go b/http/handler_lens.go similarity index 65% rename from http/server_lens.go rename to http/handler_lens.go index 8f4b38ab3d..cc0c08ef10 100644 --- a/http/server_lens.go +++ b/http/handler_lens.go @@ -19,88 +19,88 @@ import ( "github.com/sourcenetwork/defradb/client" ) -type LensHandler struct{} +type lensHandler struct{} -func (s *LensHandler) ReloadLenses(rw http.ResponseWriter, req *http.Request) { +func (s *lensHandler) ReloadLenses(rw http.ResponseWriter, req *http.Request) { lens := req.Context().Value(lensContextKey).(client.LensRegistry) err := lens.ReloadLenses(req.Context()) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } rw.WriteHeader(http.StatusOK) } -func (s *LensHandler) SetMigration(rw http.ResponseWriter, req *http.Request) { +func (s *lensHandler) SetMigration(rw http.ResponseWriter, req *http.Request) { lens := req.Context().Value(lensContextKey).(client.LensRegistry) var cfg client.LensConfig if err := requestJSON(req, &cfg); err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } err := lens.SetMigration(req.Context(), cfg) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } rw.WriteHeader(http.StatusOK) } -func (s *LensHandler) MigrateUp(rw http.ResponseWriter, req *http.Request) { +func (s *lensHandler) MigrateUp(rw http.ResponseWriter, req *http.Request) { lens := req.Context().Value(lensContextKey).(client.LensRegistry) var src enumerable.Enumerable[map[string]any] if err := requestJSON(req, &src); err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } result, err := lens.MigrateUp(req.Context(), src, chi.URLParam(req, "version")) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } responseJSON(rw, http.StatusOK, result) } -func (s *LensHandler) MigrateDown(rw http.ResponseWriter, req *http.Request) { +func (s *lensHandler) MigrateDown(rw http.ResponseWriter, req *http.Request) { lens := req.Context().Value(lensContextKey).(client.LensRegistry) var src enumerable.Enumerable[map[string]any] if err := requestJSON(req, &src); err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } result, err := lens.MigrateDown(req.Context(), src, chi.URLParam(req, "version")) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } responseJSON(rw, http.StatusOK, result) } -func (s *LensHandler) Config(rw http.ResponseWriter, req *http.Request) { +func (s *lensHandler) Config(rw http.ResponseWriter, req *http.Request) { lens := req.Context().Value(lensContextKey).(client.LensRegistry) cfgs, err := lens.Config(req.Context()) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } responseJSON(rw, http.StatusOK, cfgs) } -func (s *LensHandler) HasMigration(rw http.ResponseWriter, req *http.Request) { +func (s *lensHandler) HasMigration(rw http.ResponseWriter, req *http.Request) { lens := req.Context().Value(lensContextKey).(client.LensRegistry) exists, err := lens.HasMigration(req.Context(), chi.URLParam(req, "version")) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } if !exists { - responseJSON(rw, http.StatusBadRequest, H{"error": "migration not found"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{"migration not found"}) return } rw.WriteHeader(http.StatusOK) diff --git a/http/server_store.go b/http/handler_store.go similarity index 68% rename from http/server_store.go rename to http/handler_store.go index c5adc5bb45..22c63c90fa 100644 --- a/http/server_store.go +++ b/http/handler_store.go @@ -22,225 +22,177 @@ import ( "github.com/sourcenetwork/defradb/client" ) -type GraphQLRequest struct { - Query string `json:"query"` -} - -type GraphQLResponse struct { - Errors []string `json:"errors,omitempty"` - Data any `json:"data"` -} - -func (res *GraphQLResponse) UnmarshalJSON(data []byte) error { - // decode numbers to json.Number - dec := json.NewDecoder(bytes.NewBuffer(data)) - dec.UseNumber() +type storeHandler struct{} - var out map[string]any - if err := dec.Decode(&out); err != nil { - return err - } - - // fix errors type to match tests - switch t := out["errors"].(type) { - case []any: - var errors []string - for _, v := range t { - errors = append(errors, v.(string)) - } - res.Errors = errors - default: - res.Errors = nil - } - - // fix data type to match tests - switch t := out["data"].(type) { - case []any: - var fixed []map[string]any - for _, v := range t { - fixed = append(fixed, v.(map[string]any)) - } - res.Data = fixed - case map[string]any: - res.Data = t - default: - res.Data = []map[string]any{} - } - - return nil -} - -type StoreHandler struct{} - -func (s *StoreHandler) SetReplicator(rw http.ResponseWriter, req *http.Request) { +func (s *storeHandler) SetReplicator(rw http.ResponseWriter, req *http.Request) { store := req.Context().Value(storeContextKey).(client.Store) var rep client.Replicator if err := requestJSON(req, &rep); err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } err := store.SetReplicator(req.Context(), rep) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } rw.WriteHeader(http.StatusOK) } -func (s *StoreHandler) DeleteReplicator(rw http.ResponseWriter, req *http.Request) { +func (s *storeHandler) DeleteReplicator(rw http.ResponseWriter, req *http.Request) { store := req.Context().Value(storeContextKey).(client.Store) var rep client.Replicator if err := requestJSON(req, &rep); err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } err := store.DeleteReplicator(req.Context(), rep) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } rw.WriteHeader(http.StatusOK) } -func (s *StoreHandler) GetAllReplicators(rw http.ResponseWriter, req *http.Request) { +func (s *storeHandler) GetAllReplicators(rw http.ResponseWriter, req *http.Request) { store := req.Context().Value(storeContextKey).(client.Store) reps, err := store.GetAllReplicators(req.Context()) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } responseJSON(rw, http.StatusOK, reps) } -func (s *StoreHandler) AddP2PCollection(rw http.ResponseWriter, req *http.Request) { +func (s *storeHandler) AddP2PCollection(rw http.ResponseWriter, req *http.Request) { store := req.Context().Value(storeContextKey).(client.Store) err := store.AddP2PCollection(req.Context(), chi.URLParam(req, "id")) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } rw.WriteHeader(http.StatusOK) } -func (s *StoreHandler) RemoveP2PCollection(rw http.ResponseWriter, req *http.Request) { +func (s *storeHandler) RemoveP2PCollection(rw http.ResponseWriter, req *http.Request) { store := req.Context().Value(storeContextKey).(client.Store) err := store.RemoveP2PCollection(req.Context(), chi.URLParam(req, "id")) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } rw.WriteHeader(http.StatusOK) } -func (s *StoreHandler) GetAllP2PCollections(rw http.ResponseWriter, req *http.Request) { +func (s *storeHandler) GetAllP2PCollections(rw http.ResponseWriter, req *http.Request) { store := req.Context().Value(storeContextKey).(client.Store) cols, err := store.GetAllP2PCollections(req.Context()) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } responseJSON(rw, http.StatusOK, cols) } -func (s *StoreHandler) BasicImport(rw http.ResponseWriter, req *http.Request) { +func (s *storeHandler) BasicImport(rw http.ResponseWriter, req *http.Request) { store := req.Context().Value(storeContextKey).(client.Store) var config client.BackupConfig if err := requestJSON(req, &config); err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } err := store.BasicImport(req.Context(), config.Filepath) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } rw.WriteHeader(http.StatusOK) } -func (s *StoreHandler) BasicExport(rw http.ResponseWriter, req *http.Request) { +func (s *storeHandler) BasicExport(rw http.ResponseWriter, req *http.Request) { store := req.Context().Value(storeContextKey).(client.Store) var config client.BackupConfig if err := requestJSON(req, &config); err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } err := store.BasicExport(req.Context(), &config) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } rw.WriteHeader(http.StatusOK) } -func (s *StoreHandler) AddSchema(rw http.ResponseWriter, req *http.Request) { +func (s *storeHandler) AddSchema(rw http.ResponseWriter, req *http.Request) { store := req.Context().Value(storeContextKey).(client.Store) schema, err := io.ReadAll(req.Body) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } cols, err := store.AddSchema(req.Context(), string(schema)) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } responseJSON(rw, http.StatusOK, cols) } -func (s *StoreHandler) PatchSchema(rw http.ResponseWriter, req *http.Request) { +func (s *storeHandler) PatchSchema(rw http.ResponseWriter, req *http.Request) { store := req.Context().Value(storeContextKey).(client.Store) patch, err := io.ReadAll(req.Body) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } err = store.PatchSchema(req.Context(), string(patch)) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } rw.WriteHeader(http.StatusOK) } -func (s *StoreHandler) GetCollection(rw http.ResponseWriter, req *http.Request) { +func (s *storeHandler) GetCollection(rw http.ResponseWriter, req *http.Request) { store := req.Context().Value(storeContextKey).(client.Store) switch { case req.URL.Query().Has("name"): col, err := store.GetCollectionByName(req.Context(), req.URL.Query().Get("name")) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } responseJSON(rw, http.StatusOK, col.Description()) case req.URL.Query().Has("schema_id"): col, err := store.GetCollectionBySchemaID(req.Context(), req.URL.Query().Get("schema_id")) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } responseJSON(rw, http.StatusOK, col.Description()) case req.URL.Query().Has("version_id"): col, err := store.GetCollectionByVersionID(req.Context(), req.URL.Query().Get("version_id")) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } responseJSON(rw, http.StatusOK, col.Description()) default: cols, err := store.GetAllCollections(req.Context()) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } colDesc := make([]client.CollectionDescription, len(cols)) @@ -251,18 +203,76 @@ func (s *StoreHandler) GetCollection(rw http.ResponseWriter, req *http.Request) } } -func (s *StoreHandler) GetAllIndexes(rw http.ResponseWriter, req *http.Request) { +func (s *storeHandler) GetAllIndexes(rw http.ResponseWriter, req *http.Request) { store := req.Context().Value(storeContextKey).(client.Store) indexes, err := store.GetAllIndexes(req.Context()) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } responseJSON(rw, http.StatusOK, indexes) } -func (s *StoreHandler) ExecRequest(rw http.ResponseWriter, req *http.Request) { +func (s *storeHandler) PrintDump(rw http.ResponseWriter, req *http.Request) { + db := req.Context().Value(dbContextKey).(client.DB) + + if err := db.PrintDump(req.Context()); err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + return + } + rw.WriteHeader(http.StatusOK) +} + +type GraphQLRequest struct { + Query string `json:"query"` +} + +type GraphQLResponse struct { + Data any `json:"data"` + Errors []string `json:"errors,omitempty"` +} + +func (res *GraphQLResponse) UnmarshalJSON(data []byte) error { + // decode numbers to json.Number + dec := json.NewDecoder(bytes.NewBuffer(data)) + dec.UseNumber() + + var out map[string]any + if err := dec.Decode(&out); err != nil { + return err + } + + // fix errors type to match tests + switch t := out["errors"].(type) { + case []any: + var errors []string + for _, v := range t { + errors = append(errors, v.(string)) + } + res.Errors = errors + default: + res.Errors = nil + } + + // fix data type to match tests + switch t := out["data"].(type) { + case []any: + var fixed []map[string]any + for _, v := range t { + fixed = append(fixed, v.(map[string]any)) + } + res.Data = fixed + case map[string]any: + res.Data = t + default: + res.Data = []map[string]any{} + } + + return nil +} + +func (s *storeHandler) ExecRequest(rw http.ResponseWriter, req *http.Request) { store := req.Context().Value(storeContextKey).(client.Store) var request GraphQLRequest @@ -271,11 +281,11 @@ func (s *StoreHandler) ExecRequest(rw http.ResponseWriter, req *http.Request) { request.Query = req.URL.Query().Get("query") case req.Body != nil: if err := requestJSON(req, &request); err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } default: - responseJSON(rw, http.StatusBadRequest, H{"error": "missing request"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{"missing request"}) return } result := store.ExecRequest(req.Context(), request.Query) @@ -285,12 +295,12 @@ func (s *StoreHandler) ExecRequest(rw http.ResponseWriter, req *http.Request) { errors = append(errors, err.Error()) } if result.Pub == nil { - responseJSON(rw, http.StatusOK, H{"data": result.GQL.Data, "errors": errors}) + responseJSON(rw, http.StatusOK, GraphQLResponse{result.GQL.Data, errors}) return } flusher, ok := rw.(http.Flusher) if !ok { - responseJSON(rw, http.StatusBadRequest, H{"error": "streaming not supported"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{"streaming not supported"}) return } diff --git a/http/server_tx.go b/http/handler_tx.go similarity index 70% rename from http/server_tx.go rename to http/handler_tx.go index 8e8d5a8b0d..c7a83c28c4 100644 --- a/http/server_tx.go +++ b/http/handler_tx.go @@ -21,73 +21,73 @@ import ( "github.com/sourcenetwork/defradb/datastore" ) -type TxHandler struct{} +type txHandler struct{} type CreateTxResponse struct { ID uint64 `json:"id"` } -func (h *TxHandler) NewTxn(rw http.ResponseWriter, req *http.Request) { +func (h *txHandler) NewTxn(rw http.ResponseWriter, req *http.Request) { db := req.Context().Value(dbContextKey).(client.DB) txs := req.Context().Value(txsContextKey).(*sync.Map) readOnly, _ := strconv.ParseBool(req.URL.Query().Get("read_only")) tx, err := db.NewTxn(req.Context(), readOnly) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } txs.Store(tx.ID(), tx) responseJSON(rw, http.StatusOK, &CreateTxResponse{tx.ID()}) } -func (h *TxHandler) NewConcurrentTxn(rw http.ResponseWriter, req *http.Request) { +func (h *txHandler) NewConcurrentTxn(rw http.ResponseWriter, req *http.Request) { db := req.Context().Value(dbContextKey).(client.DB) txs := req.Context().Value(txsContextKey).(*sync.Map) readOnly, _ := strconv.ParseBool(req.URL.Query().Get("read_only")) tx, err := db.NewConcurrentTxn(req.Context(), readOnly) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } txs.Store(tx.ID(), tx) responseJSON(rw, http.StatusOK, &CreateTxResponse{tx.ID()}) } -func (h *TxHandler) Commit(rw http.ResponseWriter, req *http.Request) { +func (h *txHandler) Commit(rw http.ResponseWriter, req *http.Request) { txs := req.Context().Value(txsContextKey).(*sync.Map) txId, err := strconv.ParseUint(chi.URLParam(req, "id"), 10, 64) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": "invalid transaction id"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{"invalid transaction id"}) return } txVal, ok := txs.Load(txId) if !ok { - responseJSON(rw, http.StatusBadRequest, H{"error": "invalid transaction id"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{"invalid transaction id"}) return } err = txVal.(datastore.Txn).Commit(req.Context()) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) return } txs.Delete(txId) rw.WriteHeader(http.StatusOK) } -func (h *TxHandler) Discard(rw http.ResponseWriter, req *http.Request) { +func (h *txHandler) Discard(rw http.ResponseWriter, req *http.Request) { txs := req.Context().Value(txsContextKey).(*sync.Map) txId, err := strconv.ParseUint(chi.URLParam(req, "id"), 10, 64) if err != nil { - responseJSON(rw, http.StatusBadRequest, H{"error": "invalid transaction id"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{"invalid transaction id"}) return } txVal, ok := txs.LoadAndDelete(txId) if !ok { - responseJSON(rw, http.StatusBadRequest, H{"error": "invalid transaction id"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{"invalid transaction id"}) return } txVal.(datastore.Txn).Discard(req.Context()) diff --git a/http/http_client.go b/http/http_client.go new file mode 100644 index 0000000000..bb100a11a3 --- /dev/null +++ b/http/http_client.go @@ -0,0 +1,88 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + + "github.com/sourcenetwork/defradb/datastore/badger/v4" +) + +type httpClient struct { + client *http.Client + baseURL *url.URL + txValue string +} + +func newHttpClient(baseURL *url.URL) *httpClient { + client := httpClient{ + client: http.DefaultClient, + baseURL: baseURL, + } + return &client +} + +func (c *httpClient) withTxn(value uint64) *httpClient { + return &httpClient{ + client: c.client, + baseURL: c.baseURL, + txValue: fmt.Sprintf("%d", value), + } +} + +func (c *httpClient) setDefaultHeaders(req *http.Request) { + req.Header.Set("Accept", "application/json") + req.Header.Set("Content-Type", "application/json") + + if c.txValue != "" { + req.Header.Set(TX_HEADER_NAME, c.txValue) + } +} + +func (c *httpClient) request(req *http.Request) ([]byte, error) { + c.setDefaultHeaders(req) + + res, err := c.client.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() //nolint:errcheck + + data, err := io.ReadAll(res.Body) + if err != nil { + return nil, err + } + // request was successful + if res.StatusCode == http.StatusOK { + return data, nil + } + // attempt to parse json error + var errRes errorResponse + if err := json.Unmarshal(data, &errRes); err != nil { + return nil, fmt.Errorf("%s", data) + } + if errRes.Error == badger.ErrTxnConflict.Error() { + return nil, badger.ErrTxnConflict + } + return nil, fmt.Errorf("%s", errRes.Error) +} + +func (c *httpClient) requestJson(req *http.Request, out any) error { + data, err := c.request(req) + if err != nil { + return err + } + return json.Unmarshal(data, out) +} diff --git a/http/server.go b/http/server.go index beea6b42c3..7e5d7db978 100644 --- a/http/server.go +++ b/http/server.go @@ -29,10 +29,10 @@ type Server struct { func NewServer(db client.DB) *Server { txs := &sync.Map{} - txHandler := &TxHandler{} - storeHandler := &StoreHandler{} - collectionHandler := &CollectionHandler{} - lensHandler := &LensHandler{} + tx_handler := &txHandler{} + store_handler := &storeHandler{} + collection_handler := &collectionHandler{} + lens_handler := &lensHandler{} router := chi.NewRouter() router.Use(middleware.RequestLogger(&logFormatter{})) @@ -41,60 +41,63 @@ func NewServer(db client.DB) *Server { router.Route("/api/v0", func(api chi.Router) { api.Use(ApiMiddleware(db, txs), TransactionMiddleware, StoreMiddleware) api.Route("/tx", func(tx chi.Router) { - tx.Post("/", txHandler.NewTxn) - tx.Post("/concurrent", txHandler.NewConcurrentTxn) - tx.Post("/{id}", txHandler.Commit) - tx.Delete("/{id}", txHandler.Discard) + tx.Post("/", tx_handler.NewTxn) + tx.Post("/concurrent", tx_handler.NewConcurrentTxn) + tx.Post("/{id}", tx_handler.Commit) + tx.Delete("/{id}", tx_handler.Discard) }) api.Route("/backup", func(backup chi.Router) { - backup.Post("/export", storeHandler.BasicExport) - backup.Post("/import", storeHandler.BasicImport) + backup.Post("/export", store_handler.BasicExport) + backup.Post("/import", store_handler.BasicImport) }) api.Route("/schema", func(schema chi.Router) { - schema.Post("/", storeHandler.AddSchema) - schema.Patch("/", storeHandler.PatchSchema) + schema.Post("/", store_handler.AddSchema) + schema.Patch("/", store_handler.PatchSchema) }) api.Route("/collections", func(collections chi.Router) { - collections.Get("/", storeHandler.GetCollection) + collections.Get("/", store_handler.GetCollection) // with collection middleware collections_tx := collections.With(CollectionMiddleware) - collections_tx.Get("/{name}", collectionHandler.GetAllDocKeys) - collections_tx.Post("/{name}", collectionHandler.Create) - collections_tx.Patch("/{name}", collectionHandler.UpdateWith) - collections_tx.Delete("/{name}", collectionHandler.DeleteWith) - collections_tx.Post("/{name}/indexes", collectionHandler.CreateIndex) - collections_tx.Get("/{name}/indexes", collectionHandler.GetIndexes) - collections_tx.Delete("/{name}/indexes/{index}", collectionHandler.DropIndex) - collections_tx.Get("/{name}/{key}", collectionHandler.Get) - collections_tx.Post("/{name}/{key}", collectionHandler.Save) - collections_tx.Patch("/{name}/{key}", collectionHandler.Update) - collections_tx.Delete("/{name}/{key}", collectionHandler.Delete) + collections_tx.Get("/{name}", collection_handler.GetAllDocKeys) + collections_tx.Post("/{name}", collection_handler.Create) + collections_tx.Patch("/{name}", collection_handler.UpdateWith) + collections_tx.Delete("/{name}", collection_handler.DeleteWith) + collections_tx.Post("/{name}/indexes", collection_handler.CreateIndex) + collections_tx.Get("/{name}/indexes", collection_handler.GetIndexes) + collections_tx.Delete("/{name}/indexes/{index}", collection_handler.DropIndex) + collections_tx.Get("/{name}/{key}", collection_handler.Get) + collections_tx.Post("/{name}/{key}", collection_handler.Save) + collections_tx.Patch("/{name}/{key}", collection_handler.Update) + collections_tx.Delete("/{name}/{key}", collection_handler.Delete) }) api.Route("/lens", func(lens chi.Router) { lens.Use(LensMiddleware) - lens.Get("/", lensHandler.Config) - lens.Post("/", lensHandler.SetMigration) - lens.Post("/reload", lensHandler.ReloadLenses) - lens.Get("/{version}", lensHandler.HasMigration) - lens.Post("/{version}/up", lensHandler.MigrateUp) - lens.Post("/{version}/down", lensHandler.MigrateDown) + lens.Get("/", lens_handler.Config) + lens.Post("/", lens_handler.SetMigration) + lens.Post("/reload", lens_handler.ReloadLenses) + lens.Get("/{version}", lens_handler.HasMigration) + lens.Post("/{version}/up", lens_handler.MigrateUp) + lens.Post("/{version}/down", lens_handler.MigrateDown) }) api.Route("/graphql", func(graphQL chi.Router) { - graphQL.Get("/", storeHandler.ExecRequest) - graphQL.Post("/", storeHandler.ExecRequest) + graphQL.Get("/", store_handler.ExecRequest) + graphQL.Post("/", store_handler.ExecRequest) }) api.Route("/p2p", func(p2p chi.Router) { p2p.Route("/replicators", func(p2p_replicators chi.Router) { - p2p_replicators.Get("/", storeHandler.GetAllReplicators) - p2p_replicators.Post("/", storeHandler.SetReplicator) - p2p_replicators.Delete("/", storeHandler.DeleteReplicator) + p2p_replicators.Get("/", store_handler.GetAllReplicators) + p2p_replicators.Post("/", store_handler.SetReplicator) + p2p_replicators.Delete("/", store_handler.DeleteReplicator) }) p2p.Route("/collections", func(p2p_collections chi.Router) { - p2p_collections.Get("/", storeHandler.GetAllP2PCollections) - p2p_collections.Post("/{id}", storeHandler.AddP2PCollection) - p2p_collections.Delete("/{id}", storeHandler.RemoveP2PCollection) + p2p_collections.Get("/", store_handler.GetAllP2PCollections) + p2p_collections.Post("/{id}", store_handler.AddP2PCollection) + p2p_collections.Delete("/{id}", store_handler.RemoveP2PCollection) }) }) + api.Route("/debug", func(debug chi.Router) { + debug.Get("/dump", store_handler.PrintDump) + }) }) return &Server{ diff --git a/http/server_utils.go b/http/utils.go similarity index 93% rename from http/server_utils.go rename to http/utils.go index 43c623a8cd..b7d3a5bf8c 100644 --- a/http/server_utils.go +++ b/http/utils.go @@ -16,7 +16,9 @@ import ( "net/http" ) -type H map[string]any +type errorResponse struct { + Error string `json:"error"` +} func requestJSON(req *http.Request, out any) error { data, err := io.ReadAll(req.Body) diff --git a/http/wrapper.go b/http/wrapper.go index 8b3e01d84d..558dc79474 100644 --- a/http/wrapper.go +++ b/http/wrapper.go @@ -29,7 +29,7 @@ var _ client.DB = (*Wrapper)(nil) type Wrapper struct { db client.DB server *Server - client *StoreClient + client *Client httpServer *httptest.Server } @@ -37,7 +37,7 @@ func NewWrapper(db client.DB) (*Wrapper, error) { server := NewServer(db) httpServer := httptest.NewServer(server) - client, err := NewStoreClient(httpServer.URL) + client, err := NewClient(httpServer.URL) if err != nil { return nil, err } From b054e77e2c85f77b98e5b25d4059b3c977d94bd8 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 28 Aug 2023 16:30:10 -0700 Subject: [PATCH 039/107] merge api/http package into http --- go.mod | 2 +- http/errors.go | 31 +++ http/handler.go | 119 +++++++++++ http/handler_playground.go | 28 +++ http/middleware.go | 21 +- http/server.go | 392 ++++++++++++++++++++++++++++--------- http/server_test.go | 251 ++++++++++++++++++++++++ http/wrapper.go | 12 +- 8 files changed, 757 insertions(+), 99 deletions(-) create mode 100644 http/errors.go create mode 100644 http/handler.go create mode 100644 http/handler_playground.go create mode 100644 http/server_test.go diff --git a/go.mod b/go.mod index 0ca721256c..ac5a9cc348 100644 --- a/go.mod +++ b/go.mod @@ -49,6 +49,7 @@ require ( go.opentelemetry.io/otel/sdk/metric v0.39.0 go.uber.org/zap v1.25.0 golang.org/x/crypto v0.12.0 + golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1 golang.org/x/net v0.14.0 google.golang.org/grpc v1.57.0 google.golang.org/protobuf v1.31.0 @@ -187,7 +188,6 @@ require ( go.uber.org/dig v1.17.0 // indirect go.uber.org/fx v1.20.0 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1 // indirect golang.org/x/mod v0.12.0 // indirect golang.org/x/sync v0.3.0 // indirect golang.org/x/sys v0.11.0 // indirect diff --git a/http/errors.go b/http/errors.go new file mode 100644 index 0000000000..695c9a8637 --- /dev/null +++ b/http/errors.go @@ -0,0 +1,31 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import "errors" + +// Errors returnable from this package. +// +// This list is incomplete. Undefined errors may also be returned. +// Errors returned from this package may be tested against these errors with errors.Is. +var ( + ErrNoListener = errors.New("cannot serve with no listener") + ErrSchema = errors.New("base must start with the http or https scheme") + ErrDatabaseNotAvailable = errors.New("no database available") + ErrFormNotSupported = errors.New("content type application/x-www-form-urlencoded not yet supported") + ErrBodyEmpty = errors.New("body cannot be empty") + ErrMissingGQLRequest = errors.New("missing GraphQL request") + ErrPeerIdUnavailable = errors.New("no PeerID available. P2P might be disabled") + ErrStreamingUnsupported = errors.New("streaming unsupported") + ErrNoEmail = errors.New("email address must be specified for tls with autocert") + ErrPayloadFormat = errors.New("invalid payload format") + ErrMissingNewKey = errors.New("missing _newKey for imported doc") +) diff --git a/http/handler.go b/http/handler.go new file mode 100644 index 0000000000..e01b40f12e --- /dev/null +++ b/http/handler.go @@ -0,0 +1,119 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "net/http" + "sync" + + "github.com/sourcenetwork/defradb/client" + + "github.com/go-chi/chi/v5" + "github.com/go-chi/chi/v5/middleware" +) + +// playgroundHandler is set when building with the playground build tag +var playgroundHandler = http.HandlerFunc(http.NotFound) + +type handler struct { + db client.DB + router *chi.Mux + txs *sync.Map +} + +func newHandler(db client.DB, opts serverOptions) *handler { + txs := &sync.Map{} + + tx_handler := &txHandler{} + store_handler := &storeHandler{} + collection_handler := &collectionHandler{} + lens_handler := &lensHandler{} + + router := chi.NewRouter() + router.Use(middleware.RequestLogger(&logFormatter{})) + router.Use(middleware.Recoverer) + router.Use(CorsMiddleware(opts)) + router.Use(ApiMiddleware(db, txs, opts)) + + router.Route("/api/v0", func(api chi.Router) { + api.Use(TransactionMiddleware, StoreMiddleware) + api.Route("/tx", func(tx chi.Router) { + tx.Post("/", tx_handler.NewTxn) + tx.Post("/concurrent", tx_handler.NewConcurrentTxn) + tx.Post("/{id}", tx_handler.Commit) + tx.Delete("/{id}", tx_handler.Discard) + }) + api.Route("/backup", func(backup chi.Router) { + backup.Post("/export", store_handler.BasicExport) + backup.Post("/import", store_handler.BasicImport) + }) + api.Route("/schema", func(schema chi.Router) { + schema.Post("/", store_handler.AddSchema) + schema.Patch("/", store_handler.PatchSchema) + }) + api.Route("/collections", func(collections chi.Router) { + collections.Get("/", store_handler.GetCollection) + // with collection middleware + collections_tx := collections.With(CollectionMiddleware) + collections_tx.Get("/{name}", collection_handler.GetAllDocKeys) + collections_tx.Post("/{name}", collection_handler.Create) + collections_tx.Patch("/{name}", collection_handler.UpdateWith) + collections_tx.Delete("/{name}", collection_handler.DeleteWith) + collections_tx.Post("/{name}/indexes", collection_handler.CreateIndex) + collections_tx.Get("/{name}/indexes", collection_handler.GetIndexes) + collections_tx.Delete("/{name}/indexes/{index}", collection_handler.DropIndex) + collections_tx.Get("/{name}/{key}", collection_handler.Get) + collections_tx.Post("/{name}/{key}", collection_handler.Save) + collections_tx.Patch("/{name}/{key}", collection_handler.Update) + collections_tx.Delete("/{name}/{key}", collection_handler.Delete) + }) + api.Route("/lens", func(lens chi.Router) { + lens.Use(LensMiddleware) + lens.Get("/", lens_handler.Config) + lens.Post("/", lens_handler.SetMigration) + lens.Post("/reload", lens_handler.ReloadLenses) + lens.Get("/{version}", lens_handler.HasMigration) + lens.Post("/{version}/up", lens_handler.MigrateUp) + lens.Post("/{version}/down", lens_handler.MigrateDown) + }) + api.Route("/graphql", func(graphQL chi.Router) { + graphQL.Get("/", store_handler.ExecRequest) + graphQL.Post("/", store_handler.ExecRequest) + }) + api.Route("/p2p", func(p2p chi.Router) { + p2p.Route("/replicators", func(p2p_replicators chi.Router) { + p2p_replicators.Get("/", store_handler.GetAllReplicators) + p2p_replicators.Post("/", store_handler.SetReplicator) + p2p_replicators.Delete("/", store_handler.DeleteReplicator) + }) + p2p.Route("/collections", func(p2p_collections chi.Router) { + p2p_collections.Get("/", store_handler.GetAllP2PCollections) + p2p_collections.Post("/{id}", store_handler.AddP2PCollection) + p2p_collections.Delete("/{id}", store_handler.RemoveP2PCollection) + }) + }) + api.Route("/debug", func(debug chi.Router) { + debug.Get("/dump", store_handler.PrintDump) + }) + }) + + router.Handle("/*", playgroundHandler) + + return &handler{ + db: db, + router: router, + txs: txs, + } +} + +func (h *handler) ServeHTTP(w http.ResponseWriter, req *http.Request) { + h.router.ServeHTTP(w, req) +} diff --git a/http/handler_playground.go b/http/handler_playground.go new file mode 100644 index 0000000000..0a69e312b2 --- /dev/null +++ b/http/handler_playground.go @@ -0,0 +1,28 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +//go:build playground + +package http + +import ( + "io/fs" + "net/http" + + "github.com/sourcenetwork/defradb/playground" +) + +func init() { + sub, err := fs.Sub(playground.Dist, "dist") + if err != nil { + panic(err) + } + playgroundHandler = http.FileServer(http.FS(sub)) +} diff --git a/http/middleware.go b/http/middleware.go index 0aa38c5bfe..b079227837 100644 --- a/http/middleware.go +++ b/http/middleware.go @@ -14,9 +14,12 @@ import ( "context" "net/http" "strconv" + "strings" "sync" "github.com/go-chi/chi/v5" + "github.com/go-chi/cors" + "golang.org/x/exp/slices" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" @@ -35,10 +38,26 @@ var ( colContextKey = contextKey("col") ) +// CorsMiddleware handles cross origin request +func CorsMiddleware(opts serverOptions) func(http.Handler) http.Handler { + return cors.Handler(cors.Options{ + AllowOriginFunc: func(r *http.Request, origin string) bool { + return slices.Contains[string](opts.allowedOrigins, strings.ToLower(origin)) + }, + AllowedMethods: []string{"GET", "HEAD", "POST", "PATCH", "DELETE"}, + AllowedHeaders: []string{"Content-Type"}, + MaxAge: 300, + }) +} + // ApiMiddleware sets the required context values for all API requests. -func ApiMiddleware(db client.DB, txs *sync.Map) func(http.Handler) http.Handler { +func ApiMiddleware(db client.DB, txs *sync.Map, opts serverOptions) func(http.Handler) http.Handler { return func(next http.Handler) http.Handler { return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + if opts.tls.HasValue() { + rw.Header().Add("Strict-Transport-Security", "max-age=63072000; includeSubDomains") + } + ctx := req.Context() ctx = context.WithValue(ctx, dbContextKey, db) ctx = context.WithValue(ctx, txsContextKey, txs) diff --git a/http/server.go b/http/server.go index 7e5d7db978..a6f092ef20 100644 --- a/http/server.go +++ b/http/server.go @@ -11,102 +11,312 @@ package http import ( + "context" + "crypto/tls" + "fmt" + "net" "net/http" - "sync" + "path" + "strings" - "github.com/go-chi/chi/v5" - "github.com/go-chi/chi/v5/middleware" + "github.com/sourcenetwork/immutable" + "golang.org/x/crypto/acme/autocert" "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/errors" + "github.com/sourcenetwork/defradb/logging" ) +const ( + // These constants are best effort durations that fit our current API + // and possibly prevent from running out of file descriptors. + // readTimeout = 5 * time.Second + // writeTimeout = 10 * time.Second + // idleTimeout = 120 * time.Second + + // Temparily disabling timeouts until [this proposal](https://github.com/golang/go/issues/54136) is merged. + // https://github.com/sourcenetwork/defradb/issues/927 + readTimeout = 0 + writeTimeout = 0 + idleTimeout = 0 +) + +const ( + httpPort = ":80" + httpsPort = ":443" +) + +// Server struct holds the Handler for the HTTP API. type Server struct { - db client.DB - router *chi.Mux - txs *sync.Map -} - -func NewServer(db client.DB) *Server { - txs := &sync.Map{} - - tx_handler := &txHandler{} - store_handler := &storeHandler{} - collection_handler := &collectionHandler{} - lens_handler := &lensHandler{} - - router := chi.NewRouter() - router.Use(middleware.RequestLogger(&logFormatter{})) - router.Use(middleware.Recoverer) - - router.Route("/api/v0", func(api chi.Router) { - api.Use(ApiMiddleware(db, txs), TransactionMiddleware, StoreMiddleware) - api.Route("/tx", func(tx chi.Router) { - tx.Post("/", tx_handler.NewTxn) - tx.Post("/concurrent", tx_handler.NewConcurrentTxn) - tx.Post("/{id}", tx_handler.Commit) - tx.Delete("/{id}", tx_handler.Discard) - }) - api.Route("/backup", func(backup chi.Router) { - backup.Post("/export", store_handler.BasicExport) - backup.Post("/import", store_handler.BasicImport) - }) - api.Route("/schema", func(schema chi.Router) { - schema.Post("/", store_handler.AddSchema) - schema.Patch("/", store_handler.PatchSchema) - }) - api.Route("/collections", func(collections chi.Router) { - collections.Get("/", store_handler.GetCollection) - // with collection middleware - collections_tx := collections.With(CollectionMiddleware) - collections_tx.Get("/{name}", collection_handler.GetAllDocKeys) - collections_tx.Post("/{name}", collection_handler.Create) - collections_tx.Patch("/{name}", collection_handler.UpdateWith) - collections_tx.Delete("/{name}", collection_handler.DeleteWith) - collections_tx.Post("/{name}/indexes", collection_handler.CreateIndex) - collections_tx.Get("/{name}/indexes", collection_handler.GetIndexes) - collections_tx.Delete("/{name}/indexes/{index}", collection_handler.DropIndex) - collections_tx.Get("/{name}/{key}", collection_handler.Get) - collections_tx.Post("/{name}/{key}", collection_handler.Save) - collections_tx.Patch("/{name}/{key}", collection_handler.Update) - collections_tx.Delete("/{name}/{key}", collection_handler.Delete) - }) - api.Route("/lens", func(lens chi.Router) { - lens.Use(LensMiddleware) - lens.Get("/", lens_handler.Config) - lens.Post("/", lens_handler.SetMigration) - lens.Post("/reload", lens_handler.ReloadLenses) - lens.Get("/{version}", lens_handler.HasMigration) - lens.Post("/{version}/up", lens_handler.MigrateUp) - lens.Post("/{version}/down", lens_handler.MigrateDown) - }) - api.Route("/graphql", func(graphQL chi.Router) { - graphQL.Get("/", store_handler.ExecRequest) - graphQL.Post("/", store_handler.ExecRequest) - }) - api.Route("/p2p", func(p2p chi.Router) { - p2p.Route("/replicators", func(p2p_replicators chi.Router) { - p2p_replicators.Get("/", store_handler.GetAllReplicators) - p2p_replicators.Post("/", store_handler.SetReplicator) - p2p_replicators.Delete("/", store_handler.DeleteReplicator) - }) - p2p.Route("/collections", func(p2p_collections chi.Router) { - p2p_collections.Get("/", store_handler.GetAllP2PCollections) - p2p_collections.Post("/{id}", store_handler.AddP2PCollection) - p2p_collections.Delete("/{id}", store_handler.RemoveP2PCollection) - }) - }) - api.Route("/debug", func(debug chi.Router) { - debug.Get("/dump", store_handler.PrintDump) - }) - }) - - return &Server{ - db: db, - router: router, - txs: txs, - } -} - -func (s *Server) ServeHTTP(w http.ResponseWriter, req *http.Request) { - s.router.ServeHTTP(w, req) + options serverOptions + listener net.Listener + certManager *autocert.Manager + // address that is assigned to the server on listen + address string + + http.Server +} + +type serverOptions struct { + // list of allowed origins for CORS. + allowedOrigins []string + // ID of the server node. + peerID string + // when the value is present, the server will run with tls + tls immutable.Option[tlsOptions] + // root directory for the node config. + rootDir string + // The domain for the API (optional). + domain immutable.Option[string] +} + +type tlsOptions struct { + // Public key for TLS. Ignored if domain is set. + pubKey string + // Private key for TLS. Ignored if domain is set. + privKey string + // email address for the CA to send problem notifications (optional) + email string + // specify the tls port + port string +} + +// NewServer instantiates a new server with the given http.Handler. +func NewServer(db client.DB, options ...func(*Server)) *Server { + srv := &Server{ + Server: http.Server{ + ReadTimeout: readTimeout, + WriteTimeout: writeTimeout, + IdleTimeout: idleTimeout, + }, + } + + for _, opt := range append(options, DefaultOpts()) { + opt(srv) + } + + srv.Handler = newHandler(db, srv.options) + + return srv +} + +func newHTTPRedirServer(m *autocert.Manager) *Server { + srv := &Server{ + Server: http.Server{ + ReadTimeout: readTimeout, + WriteTimeout: writeTimeout, + IdleTimeout: idleTimeout, + }, + } + + srv.Addr = httpPort + srv.Handler = m.HTTPHandler(nil) + + return srv +} + +// DefaultOpts returns the default options for the server. +func DefaultOpts() func(*Server) { + return func(s *Server) { + if s.Addr == "" { + s.Addr = "localhost:9181" + } + } +} + +// WithAllowedOrigins returns an option to set the allowed origins for CORS. +func WithAllowedOrigins(origins ...string) func(*Server) { + return func(s *Server) { + s.options.allowedOrigins = append(s.options.allowedOrigins, origins...) + } +} + +// WithAddress returns an option to set the address for the server. +func WithAddress(addr string) func(*Server) { + return func(s *Server) { + s.Addr = addr + + // If the address is not localhost, we check to see if it's a valid IP address. + // If it's not a valid IP, we assume that it's a domain name to be used with TLS. + if !strings.HasPrefix(addr, "localhost:") && !strings.HasPrefix(addr, ":") { + host, _, err := net.SplitHostPort(addr) + if err != nil { + host = addr + } + ip := net.ParseIP(host) + if ip == nil { + s.Addr = httpPort + s.options.domain = immutable.Some(host) + } + } + } +} + +// WithCAEmail returns an option to set the email address for the CA to send problem notifications. +func WithCAEmail(email string) func(*Server) { + return func(s *Server) { + tlsOpt := s.options.tls.Value() + tlsOpt.email = email + s.options.tls = immutable.Some(tlsOpt) + } +} + +// WithPeerID returns an option to set the identifier of the server node. +func WithPeerID(id string) func(*Server) { + return func(s *Server) { + s.options.peerID = id + } +} + +// WithRootDir returns an option to set the root directory for the node config. +func WithRootDir(rootDir string) func(*Server) { + return func(s *Server) { + s.options.rootDir = rootDir + } +} + +// WithSelfSignedCert returns an option to set the public and private keys for TLS. +func WithSelfSignedCert(pubKey, privKey string) func(*Server) { + return func(s *Server) { + tlsOpt := s.options.tls.Value() + tlsOpt.pubKey = pubKey + tlsOpt.privKey = privKey + s.options.tls = immutable.Some(tlsOpt) + } +} + +// WithTLS returns an option to enable TLS. +func WithTLS() func(*Server) { + return func(s *Server) { + tlsOpt := s.options.tls.Value() + tlsOpt.port = httpsPort + s.options.tls = immutable.Some(tlsOpt) + } +} + +// WithTLSPort returns an option to set the port for TLS. +func WithTLSPort(port int) func(*Server) { + return func(s *Server) { + tlsOpt := s.options.tls.Value() + tlsOpt.port = fmt.Sprintf(":%d", port) + s.options.tls = immutable.Some(tlsOpt) + } +} + +// Listen creates a new net.Listener and saves it on the receiver. +func (s *Server) Listen(ctx context.Context) error { + var err error + if s.options.tls.HasValue() { + return s.listenWithTLS(ctx) + } + + lc := net.ListenConfig{} + s.listener, err = lc.Listen(ctx, "tcp", s.Addr) + if err != nil { + return errors.WithStack(err) + } + + // Save the address on the server in case the port was set to random + // and that we want to see what was assigned. + s.address = s.listener.Addr().String() + + return nil +} + +func (s *Server) listenWithTLS(ctx context.Context) error { + cfg := &tls.Config{ + MinVersion: tls.VersionTLS12, + // We only allow cipher suites that are marked secure + // by ssllabs + CipherSuites: []uint16{ + tls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256, + tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, + tls.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384, + tls.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256, + tls.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, + tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, + }, + ServerName: "DefraDB", + } + + if s.options.domain.HasValue() && s.options.domain.Value() != "" { + s.Addr = s.options.tls.Value().port + + if s.options.tls.Value().email == "" || s.options.tls.Value().email == config.DefaultAPIEmail { + return ErrNoEmail + } + + certCache := path.Join(s.options.rootDir, "autocerts") + + log.FeedbackInfo( + ctx, + "Generating auto certificate", + logging.NewKV("Domain", s.options.domain.Value()), + logging.NewKV("Certificate cache", certCache), + ) + + m := &autocert.Manager{ + Cache: autocert.DirCache(certCache), + Prompt: autocert.AcceptTOS, + Email: s.options.tls.Value().email, + HostPolicy: autocert.HostWhitelist(s.options.domain.Value()), + } + + cfg.GetCertificate = m.GetCertificate + + // We set manager on the server instance to later start + // a redirection server. + s.certManager = m + } else { + // When not using auto cert, we create a self signed certificate + // with the provided public and prive keys. + log.FeedbackInfo(ctx, "Generating self signed certificate") + + cert, err := tls.LoadX509KeyPair( + s.options.tls.Value().privKey, + s.options.tls.Value().pubKey, + ) + if err != nil { + return errors.WithStack(err) + } + + cfg.Certificates = []tls.Certificate{cert} + } + + var err error + s.listener, err = tls.Listen("tcp", s.Addr, cfg) + if err != nil { + return errors.WithStack(err) + } + + // Save the address on the server in case the port was set to random + // and that we want to see what was assigned. + s.address = s.listener.Addr().String() + + return nil +} + +// Run calls Serve with the receiver's listener. +func (s *Server) Run(ctx context.Context) error { + if s.listener == nil { + return ErrNoListener + } + + if s.certManager != nil { + // When using TLS it's important to redirect http requests to https + go func() { + srv := newHTTPRedirServer(s.certManager) + err := srv.ListenAndServe() + if err != nil && !errors.Is(err, http.ErrServerClosed) { + log.Info(ctx, "Something went wrong with the redirection server", logging.NewKV("Error", err)) + } + }() + } + return s.Serve(s.listener) +} + +// AssignedAddr returns the address that was assigned to the server on calls to listen. +func (s *Server) AssignedAddr() string { + return s.address } diff --git a/http/server_test.go b/http/server_test.go new file mode 100644 index 0000000000..36417e6b41 --- /dev/null +++ b/http/server_test.go @@ -0,0 +1,251 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "context" + "net/http" + "os" + "testing" + + "github.com/stretchr/testify/assert" + "golang.org/x/crypto/acme/autocert" +) + +func TestNewServerAndRunWithoutListener(t *testing.T) { + ctx := context.Background() + s := NewServer(nil, WithAddress(":0")) + if ok := assert.NotNil(t, s); ok { + assert.Equal(t, ErrNoListener, s.Run(ctx)) + } +} + +func TestNewServerAndRunWithListenerAndInvalidPort(t *testing.T) { + ctx := context.Background() + s := NewServer(nil, WithAddress(":303000")) + if ok := assert.NotNil(t, s); ok { + assert.Error(t, s.Listen(ctx)) + } +} + +func TestNewServerAndRunWithListenerAndValidPort(t *testing.T) { + ctx := context.Background() + serverRunning := make(chan struct{}) + serverDone := make(chan struct{}) + s := NewServer(nil, WithAddress(":0")) + go func() { + close(serverRunning) + err := s.Listen(ctx) + assert.NoError(t, err) + err = s.Run(ctx) + assert.ErrorIs(t, http.ErrServerClosed, err) + defer close(serverDone) + }() + + <-serverRunning + + s.Shutdown(context.Background()) + + <-serverDone +} + +func TestNewServerAndRunWithAutocertWithoutEmail(t *testing.T) { + ctx := context.Background() + dir := t.TempDir() + s := NewServer(nil, WithAddress("example.com"), WithRootDir(dir), WithTLSPort(0)) + + err := s.Listen(ctx) + assert.ErrorIs(t, err, ErrNoEmail) + + s.Shutdown(context.Background()) +} + +func TestNewServerAndRunWithAutocert(t *testing.T) { + ctx := context.Background() + serverRunning := make(chan struct{}) + serverDone := make(chan struct{}) + dir := t.TempDir() + s := NewServer(nil, WithAddress("example.com"), WithRootDir(dir), WithTLSPort(0), WithCAEmail("dev@defradb.net")) + go func() { + close(serverRunning) + err := s.Listen(ctx) + assert.NoError(t, err) + err = s.Run(ctx) + assert.ErrorIs(t, http.ErrServerClosed, err) + defer close(serverDone) + }() + + <-serverRunning + + s.Shutdown(context.Background()) + + <-serverDone +} + +func TestNewServerAndRunWithSelfSignedCertAndNoKeyFiles(t *testing.T) { + ctx := context.Background() + serverRunning := make(chan struct{}) + serverDone := make(chan struct{}) + dir := t.TempDir() + s := NewServer(nil, WithAddress("localhost:0"), WithSelfSignedCert(dir+"/server.crt", dir+"/server.key")) + go func() { + close(serverRunning) + err := s.Listen(ctx) + assert.Contains(t, err.Error(), "no such file or directory") + defer close(serverDone) + }() + + <-serverRunning + + s.Shutdown(context.Background()) + + <-serverDone +} + +const pubKey = `-----BEGIN EC PARAMETERS----- +BgUrgQQAIg== +-----END EC PARAMETERS----- +-----BEGIN EC PRIVATE KEY----- +MIGkAgEBBDD4VK0DRBRaeieXU9JaPJfSeegGYcXaX5+gEcwGKA0UJYI46QRHIlHC +IJMOjPsrUCmgBwYFK4EEACKhZANiAAQ3ltsFK8bZZpOYiJnvwpa7Ft+b0KFsDqpu +pS0gW/SYpAncHhRuz18RQ2ycuXlSN1S/PAryRZ5PK2xORKfwpguEDEMdVwbHorZO +K44P/h3dhyNyAyf8rcRoqKXcl/K/uew= +-----END EC PRIVATE KEY-----` + +const privKey = `-----BEGIN CERTIFICATE----- +MIICQDCCAcUCCQDpMnN1gQ4fGTAKBggqhkjOPQQDAjCBiDELMAkGA1UEBhMCY2Ex +DzANBgNVBAgMBlF1ZWJlYzEQMA4GA1UEBwwHQ2hlbHNlYTEPMA0GA1UECgwGU291 +cmNlMRAwDgYDVQQLDAdEZWZyYURCMQ8wDQYDVQQDDAZzb3VyY2UxIjAgBgkqhkiG +9w0BCQEWE2V4YW1wbGVAZXhhbXBsZS5jb20wHhcNMjIxMDA2MTgyMjE1WhcNMjMx +MDA2MTgyMjE1WjCBiDELMAkGA1UEBhMCY2ExDzANBgNVBAgMBlF1ZWJlYzEQMA4G +A1UEBwwHQ2hlbHNlYTEPMA0GA1UECgwGU291cmNlMRAwDgYDVQQLDAdEZWZyYURC +MQ8wDQYDVQQDDAZzb3VyY2UxIjAgBgkqhkiG9w0BCQEWE2V4YW1wbGVAZXhhbXBs +ZS5jb20wdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQ3ltsFK8bZZpOYiJnvwpa7Ft+b +0KFsDqpupS0gW/SYpAncHhRuz18RQ2ycuXlSN1S/PAryRZ5PK2xORKfwpguEDEMd +VwbHorZOK44P/h3dhyNyAyf8rcRoqKXcl/K/uewwCgYIKoZIzj0EAwIDaQAwZgIx +AIfNQeo8syOb94ojF40jY+fY1ZBSbNNK6UUbFquwDMVEoSyXRJHHEU12NUKCVTUH +kgIxAKaEGC+lqp0aaN+yubYLRiTDxOlNpyiHox3nZiL4bG/CCdPDvbX63QcdI2yq +XPKczg== +-----END CERTIFICATE-----` + +func TestNewServerAndRunWithSelfSignedCertAndInvalidPort(t *testing.T) { + ctx := context.Background() + serverRunning := make(chan struct{}) + serverDone := make(chan struct{}) + dir := t.TempDir() + err := os.WriteFile(dir+"/server.key", []byte(privKey), 0644) + if err != nil { + t.Fatal(err) + } + err = os.WriteFile(dir+"/server.crt", []byte(pubKey), 0644) + if err != nil { + t.Fatal(err) + } + s := NewServer(nil, WithAddress(":303000"), WithSelfSignedCert(dir+"/server.crt", dir+"/server.key")) + go func() { + close(serverRunning) + err := s.Listen(ctx) + assert.Contains(t, err.Error(), "invalid port") + defer close(serverDone) + }() + + <-serverRunning + + s.Shutdown(context.Background()) + + <-serverDone +} + +func TestNewServerAndRunWithSelfSignedCert(t *testing.T) { + ctx := context.Background() + serverRunning := make(chan struct{}) + serverDone := make(chan struct{}) + dir := t.TempDir() + err := os.WriteFile(dir+"/server.key", []byte(privKey), 0644) + if err != nil { + t.Fatal(err) + } + err = os.WriteFile(dir+"/server.crt", []byte(pubKey), 0644) + if err != nil { + t.Fatal(err) + } + s := NewServer(nil, WithAddress("localhost:0"), WithSelfSignedCert(dir+"/server.crt", dir+"/server.key")) + go func() { + close(serverRunning) + err := s.Listen(ctx) + assert.NoError(t, err) + err = s.Run(ctx) + assert.ErrorIs(t, http.ErrServerClosed, err) + defer close(serverDone) + }() + + <-serverRunning + + s.Shutdown(context.Background()) + + <-serverDone +} + +func TestNewServerWithoutOptions(t *testing.T) { + s := NewServer(nil) + assert.Equal(t, "localhost:9181", s.Addr) + assert.Equal(t, []string(nil), s.options.allowedOrigins) +} + +func TestNewServerWithAddress(t *testing.T) { + s := NewServer(nil, WithAddress("localhost:9999")) + assert.Equal(t, "localhost:9999", s.Addr) +} + +func TestNewServerWithDomainAddress(t *testing.T) { + s := NewServer(nil, WithAddress("example.com")) + assert.Equal(t, "example.com", s.options.domain.Value()) + assert.NotNil(t, s.options.tls) +} + +func TestNewServerWithAllowedOrigins(t *testing.T) { + s := NewServer(nil, WithAllowedOrigins("https://source.network", "https://app.source.network")) + assert.Equal(t, []string{"https://source.network", "https://app.source.network"}, s.options.allowedOrigins) +} + +func TestNewServerWithCAEmail(t *testing.T) { + s := NewServer(nil, WithCAEmail("me@example.com")) + assert.Equal(t, "me@example.com", s.options.tls.Value().email) +} + +func TestNewServerWithPeerID(t *testing.T) { + s := NewServer(nil, WithPeerID("12D3KooWFpi6VTYKLtxUftJKEyfX8jDfKi8n15eaygH8ggfYFZbR")) + assert.Equal(t, "12D3KooWFpi6VTYKLtxUftJKEyfX8jDfKi8n15eaygH8ggfYFZbR", s.options.peerID) +} + +func TestNewServerWithRootDir(t *testing.T) { + dir := t.TempDir() + s := NewServer(nil, WithRootDir(dir)) + assert.Equal(t, dir, s.options.rootDir) +} + +func TestNewServerWithTLSPort(t *testing.T) { + s := NewServer(nil, WithTLSPort(44343)) + assert.Equal(t, ":44343", s.options.tls.Value().port) +} + +func TestNewServerWithSelfSignedCert(t *testing.T) { + s := NewServer(nil, WithSelfSignedCert("pub.key", "priv.key")) + assert.Equal(t, "pub.key", s.options.tls.Value().pubKey) + assert.Equal(t, "priv.key", s.options.tls.Value().privKey) + assert.NotNil(t, s.options.tls) +} + +func TestNewHTTPRedirServer(t *testing.T) { + m := &autocert.Manager{} + s := newHTTPRedirServer(m) + assert.Equal(t, ":80", s.Addr) +} diff --git a/http/wrapper.go b/http/wrapper.go index 558dc79474..e8ae24dcd9 100644 --- a/http/wrapper.go +++ b/http/wrapper.go @@ -28,14 +28,14 @@ var _ client.DB = (*Wrapper)(nil) // single struct that implements the client.DB interface. type Wrapper struct { db client.DB - server *Server + handler *handler client *Client httpServer *httptest.Server } func NewWrapper(db client.DB) (*Wrapper, error) { - server := NewServer(db) - httpServer := httptest.NewServer(server) + handler := newHandler(db, serverOptions{}) + httpServer := httptest.NewServer(handler) client, err := NewClient(httpServer.URL) if err != nil { @@ -44,7 +44,7 @@ func NewWrapper(db client.DB) (*Wrapper, error) { return &Wrapper{ db, - server, + handler, client, httpServer, }, nil @@ -127,7 +127,7 @@ func (w *Wrapper) NewTxn(ctx context.Context, readOnly bool) (datastore.Txn, err if err != nil { return nil, err } - server, ok := w.server.txs.Load(client.ID()) + server, ok := w.handler.txs.Load(client.ID()) if !ok { return nil, fmt.Errorf("failed to get server transaction") } @@ -139,7 +139,7 @@ func (w *Wrapper) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastor if err != nil { return nil, err } - server, ok := w.server.txs.Load(client.ID()) + server, ok := w.handler.txs.Load(client.ID()) if !ok { return nil, fmt.Errorf("failed to get server transaction") } From e3823d3e6c528f7a4d86f9b41cd62147868f1326 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 28 Aug 2023 17:56:07 -0700 Subject: [PATCH 040/107] remove api/http and net/api packages. update cli with new http client --- api/http/errors.go | 89 - api/http/errors_test.go | 169 -- api/http/handler.go | 139 - api/http/handler_test.go | 312 --- api/http/handlerfuncs.go | 472 ---- api/http/handlerfuncs_backup.go | 123 - api/http/handlerfuncs_backup_test.go | 623 ----- api/http/handlerfuncs_index.go | 144 - api/http/handlerfuncs_index_test.go | 239 -- api/http/handlerfuncs_test.go | 1184 --------- api/http/http.go | 18 - api/http/logger.go | 84 - api/http/logger_test.go | 124 - api/http/playground.go | 28 - api/http/request_result.go | 31 - api/http/router.go | 83 - api/http/router_test.go | 50 - api/http/server.go | 322 --- api/http/server_test.go | 251 -- cli/backup_export.go | 65 +- cli/backup_export_test.go | 300 --- cli/backup_import.go | 65 +- cli/backup_import_test.go | 129 - cli/blocks_get.go | 80 - cli/cli.go | 162 +- cli/cli_test.go | 59 - cli/dump.go | 57 +- cli/index_create.go | 87 +- cli/index_create_test.go | 244 -- cli/index_drop.go | 88 +- cli/index_drop_test.go | 121 - cli/index_list.go | 81 +- cli/index_list_test.go | 145 -- cli/{blocks.go => p2p.go} | 10 +- cli/p2p_collection_add.go | 34 +- cli/p2p_collection_getall.go | 39 +- cli/p2p_collection_remove.go | 34 +- cli/{replicator.go => p2p_replicator.go} | 2 +- cli/p2p_replicator_delete.go | 42 + cli/p2p_replicator_getall.go | 37 + cli/p2p_replicator_set.go | 52 + cli/peerid.go | 101 - cli/peerid_test.go | 100 - cli/ping.go | 79 - cli/replicator_delete.go | 81 - cli/replicator_getall.go | 82 - cli/replicator_set.go | 86 - cli/request.go | 107 +- cli/schema_add.go | 126 +- cli/schema_list.go | 89 - cli/schema_migration_get.go | 65 +- cli/schema_migration_set.go | 127 +- cli/schema_patch.go | 117 +- cli/{rpc.go => server_dump.go} | 23 +- cli/serverdump.go | 75 - cli/start.go | 15 +- cli/version_test.go | 89 - cmd/defradb/main.go | 12 +- cmd/genclidocs/genclidocs.go | 26 +- cmd/genmanpages/main.go | 34 +- http/handler.go | 5 +- net/api/client/client.go | 169 -- net/api/pb/Makefile | 18 - net/api/pb/api.pb.go | 1100 -------- net/api/pb/api.proto | 82 - net/api/pb/api_grpc.pb.go | 300 --- net/api/pb/api_vtproto.pb.go | 2316 ----------------- .../cli/client_backup_export_test.go | 118 - .../cli/client_backup_import_test.go | 109 - tests/integration/cli/client_blocks_test.go | 41 - .../cli/client_index_create_test.go | 102 - .../integration/cli/client_index_drop_test.go | 118 - .../integration/cli/client_index_list_test.go | 96 - tests/integration/cli/client_peerid_test.go | 34 - tests/integration/cli/client_ping_test.go | 63 - tests/integration/cli/client_query_test.go | 102 - .../cli/client_rpc_p2p_collection_test.go | 13 - .../cli/client_rpc_replicator_test.go | 35 - .../integration/cli/client_schema_add_test.go | 53 - .../cli/client_schema_migration_get_test.go | 110 - .../cli/client_schema_migration_set_test.go | 244 -- .../cli/client_schema_patch_test.go | 53 - tests/integration/cli/init_test.go | 51 - tests/integration/cli/log_config_test.go | 116 - tests/integration/cli/root_test.go | 43 - tests/integration/cli/serverdump_test.go | 28 - tests/integration/cli/start_test.go | 90 - tests/integration/cli/utils.go | 263 -- tests/integration/cli/version_test.go | 46 - version/version.go | 2 +- 90 files changed, 358 insertions(+), 13414 deletions(-) delete mode 100644 api/http/errors.go delete mode 100644 api/http/errors_test.go delete mode 100644 api/http/handler.go delete mode 100644 api/http/handler_test.go delete mode 100644 api/http/handlerfuncs.go delete mode 100644 api/http/handlerfuncs_backup.go delete mode 100644 api/http/handlerfuncs_backup_test.go delete mode 100644 api/http/handlerfuncs_index.go delete mode 100644 api/http/handlerfuncs_index_test.go delete mode 100644 api/http/handlerfuncs_test.go delete mode 100644 api/http/http.go delete mode 100644 api/http/logger.go delete mode 100644 api/http/logger_test.go delete mode 100644 api/http/playground.go delete mode 100644 api/http/request_result.go delete mode 100644 api/http/router.go delete mode 100644 api/http/router_test.go delete mode 100644 api/http/server.go delete mode 100644 api/http/server_test.go delete mode 100644 cli/backup_export_test.go delete mode 100644 cli/backup_import_test.go delete mode 100644 cli/blocks_get.go delete mode 100644 cli/cli_test.go delete mode 100644 cli/index_create_test.go delete mode 100644 cli/index_drop_test.go delete mode 100644 cli/index_list_test.go rename cli/{blocks.go => p2p.go} (69%) rename cli/{replicator.go => p2p_replicator.go} (93%) create mode 100644 cli/p2p_replicator_delete.go create mode 100644 cli/p2p_replicator_getall.go create mode 100644 cli/p2p_replicator_set.go delete mode 100644 cli/peerid.go delete mode 100644 cli/peerid_test.go delete mode 100644 cli/ping.go delete mode 100644 cli/replicator_delete.go delete mode 100644 cli/replicator_getall.go delete mode 100644 cli/replicator_set.go delete mode 100644 cli/schema_list.go rename cli/{rpc.go => server_dump.go} (50%) delete mode 100644 cli/serverdump.go delete mode 100644 cli/version_test.go delete mode 100644 net/api/client/client.go delete mode 100644 net/api/pb/Makefile delete mode 100644 net/api/pb/api.pb.go delete mode 100644 net/api/pb/api.proto delete mode 100644 net/api/pb/api_grpc.pb.go delete mode 100644 net/api/pb/api_vtproto.pb.go delete mode 100644 tests/integration/cli/client_backup_export_test.go delete mode 100644 tests/integration/cli/client_backup_import_test.go delete mode 100644 tests/integration/cli/client_blocks_test.go delete mode 100644 tests/integration/cli/client_index_create_test.go delete mode 100644 tests/integration/cli/client_index_drop_test.go delete mode 100644 tests/integration/cli/client_index_list_test.go delete mode 100644 tests/integration/cli/client_peerid_test.go delete mode 100644 tests/integration/cli/client_ping_test.go delete mode 100644 tests/integration/cli/client_query_test.go delete mode 100644 tests/integration/cli/client_rpc_p2p_collection_test.go delete mode 100644 tests/integration/cli/client_rpc_replicator_test.go delete mode 100644 tests/integration/cli/client_schema_add_test.go delete mode 100644 tests/integration/cli/client_schema_migration_get_test.go delete mode 100644 tests/integration/cli/client_schema_migration_set_test.go delete mode 100644 tests/integration/cli/client_schema_patch_test.go delete mode 100644 tests/integration/cli/init_test.go delete mode 100644 tests/integration/cli/log_config_test.go delete mode 100644 tests/integration/cli/root_test.go delete mode 100644 tests/integration/cli/serverdump_test.go delete mode 100644 tests/integration/cli/start_test.go delete mode 100644 tests/integration/cli/utils.go delete mode 100644 tests/integration/cli/version_test.go diff --git a/api/http/errors.go b/api/http/errors.go deleted file mode 100644 index 4acf9abd25..0000000000 --- a/api/http/errors.go +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package http - -import ( - "context" - "fmt" - "net/http" - "os" - "strings" - - "github.com/sourcenetwork/defradb/errors" -) - -var env = os.Getenv("DEFRA_ENV") - -// Errors returnable from this package. -// -// This list is incomplete. Undefined errors may also be returned. -// Errors returned from this package may be tested against these errors with errors.Is. -var ( - ErrNoListener = errors.New("cannot serve with no listener") - ErrSchema = errors.New("base must start with the http or https scheme") - ErrDatabaseNotAvailable = errors.New("no database available") - ErrFormNotSupported = errors.New("content type application/x-www-form-urlencoded not yet supported") - ErrBodyEmpty = errors.New("body cannot be empty") - ErrMissingGQLRequest = errors.New("missing GraphQL request") - ErrPeerIdUnavailable = errors.New("no PeerID available. P2P might be disabled") - ErrStreamingUnsupported = errors.New("streaming unsupported") - ErrNoEmail = errors.New("email address must be specified for tls with autocert") - ErrPayloadFormat = errors.New("invalid payload format") - ErrMissingNewKey = errors.New("missing _newKey for imported doc") -) - -// ErrorResponse is the GQL top level object holding error items for the response payload. -type ErrorResponse struct { - Errors []ErrorItem `json:"errors"` -} - -// ErrorItem hold an error message and extensions that might be pertinent to the request. -type ErrorItem struct { - Message string `json:"message"` - Extensions extensions `json:"extensions,omitempty"` -} - -type extensions struct { - Status int `json:"status"` - HTTPError string `json:"httpError"` - Stack string `json:"stack,omitempty"` -} - -func handleErr(ctx context.Context, rw http.ResponseWriter, err error, status int) { - if status == http.StatusInternalServerError { - log.ErrorE(ctx, http.StatusText(status), err) - } - - sendJSON( - ctx, - rw, - ErrorResponse{ - Errors: []ErrorItem{ - { - Message: err.Error(), - Extensions: extensions{ - Status: status, - HTTPError: http.StatusText(status), - Stack: formatError(err), - }, - }, - }, - }, - status, - ) -} - -func formatError(err error) string { - if strings.ToLower(env) == "dev" || strings.ToLower(env) == "development" { - return fmt.Sprintf("[DEV] %+v\n", err) - } - return "" -} diff --git a/api/http/errors_test.go b/api/http/errors_test.go deleted file mode 100644 index 9e4a5885c8..0000000000 --- a/api/http/errors_test.go +++ /dev/null @@ -1,169 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package http - -import ( - "encoding/json" - "net/http" - "net/http/httptest" - "strings" - "testing" - - "github.com/pkg/errors" - "github.com/stretchr/testify/assert" -) - -func CleanupEnv() { - env = "" -} - -func TestFormatError(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "prod" - s := formatError(errors.New("test error")) - assert.Equal(t, "", s) - - env = "dev" - s = formatError(errors.New("test error")) - lines := strings.Split(s, "\n") - assert.Equal(t, "[DEV] test error", lines[0]) -} - -func TestHandleErrOnBadRequest(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - f := func(rw http.ResponseWriter, req *http.Request) { - handleErr(req.Context(), rw, errors.New("test error"), http.StatusBadRequest) - } - req, err := http.NewRequest("GET", "/test", nil) - if err != nil { - t.Fatal(err) - } - - rec := httptest.NewRecorder() - - f(rec, req) - - resp := rec.Result() - - errResponse := ErrorResponse{} - err = json.NewDecoder(resp.Body).Decode(&errResponse) - if err != nil { - t.Fatal(err) - } - - if len(errResponse.Errors) != 1 { - t.Fatal("expecting exactly one error") - } - - assert.Equal(t, http.StatusBadRequest, errResponse.Errors[0].Extensions.Status) - assert.Equal(t, http.StatusText(http.StatusBadRequest), errResponse.Errors[0].Extensions.HTTPError) - assert.Equal(t, "test error", errResponse.Errors[0].Message) - assert.Contains(t, errResponse.Errors[0].Extensions.Stack, "[DEV] test error") -} - -func TestHandleErrOnInternalServerError(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - f := func(rw http.ResponseWriter, req *http.Request) { - handleErr(req.Context(), rw, errors.New("test error"), http.StatusInternalServerError) - } - req, err := http.NewRequest("GET", "/test", nil) - if err != nil { - t.Fatal(err) - } - - rec := httptest.NewRecorder() - - f(rec, req) - - resp := rec.Result() - - errResponse := ErrorResponse{} - err = json.NewDecoder(resp.Body).Decode(&errResponse) - if err != nil { - t.Fatal(err) - } - - if len(errResponse.Errors) != 1 { - t.Fatal("expecting exactly one error") - } - assert.Equal(t, http.StatusInternalServerError, errResponse.Errors[0].Extensions.Status) - assert.Equal(t, http.StatusText(http.StatusInternalServerError), errResponse.Errors[0].Extensions.HTTPError) - assert.Equal(t, "test error", errResponse.Errors[0].Message) - assert.Contains(t, errResponse.Errors[0].Extensions.Stack, "[DEV] test error") -} - -func TestHandleErrOnNotFound(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - f := func(rw http.ResponseWriter, req *http.Request) { - handleErr(req.Context(), rw, errors.New("test error"), http.StatusNotFound) - } - req, err := http.NewRequest("GET", "/test", nil) - if err != nil { - t.Fatal(err) - } - - rec := httptest.NewRecorder() - - f(rec, req) - - resp := rec.Result() - - errResponse := ErrorResponse{} - err = json.NewDecoder(resp.Body).Decode(&errResponse) - if err != nil { - t.Fatal(err) - } - - if len(errResponse.Errors) != 1 { - t.Fatal("expecting exactly one error") - } - - assert.Equal(t, http.StatusNotFound, errResponse.Errors[0].Extensions.Status) - assert.Equal(t, http.StatusText(http.StatusNotFound), errResponse.Errors[0].Extensions.HTTPError) - assert.Equal(t, "test error", errResponse.Errors[0].Message) - assert.Contains(t, errResponse.Errors[0].Extensions.Stack, "[DEV] test error") -} - -func TestHandleErrOnDefault(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - f := func(rw http.ResponseWriter, req *http.Request) { - handleErr(req.Context(), rw, errors.New("unauthorized"), http.StatusUnauthorized) - } - req, err := http.NewRequest("GET", "/test", nil) - if err != nil { - t.Fatal(err) - } - - rec := httptest.NewRecorder() - - f(rec, req) - - resp := rec.Result() - - errResponse := ErrorResponse{} - err = json.NewDecoder(resp.Body).Decode(&errResponse) - if err != nil { - t.Fatal(err) - } - - if len(errResponse.Errors) != 1 { - t.Fatal("expecting exactly one error") - } - - assert.Equal(t, http.StatusUnauthorized, errResponse.Errors[0].Extensions.Status) - assert.Equal(t, http.StatusText(http.StatusUnauthorized), errResponse.Errors[0].Extensions.HTTPError) - assert.Equal(t, "unauthorized", errResponse.Errors[0].Message) - assert.Contains(t, errResponse.Errors[0].Extensions.Stack, "[DEV] unauthorized") -} diff --git a/api/http/handler.go b/api/http/handler.go deleted file mode 100644 index aa7b828f29..0000000000 --- a/api/http/handler.go +++ /dev/null @@ -1,139 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package http - -import ( - "context" - "encoding/json" - "fmt" - "io" - "net/http" - - "github.com/go-chi/chi/v5" - "github.com/go-chi/cors" - "github.com/pkg/errors" - - "github.com/sourcenetwork/defradb/client" -) - -type handler struct { - db client.DB - *chi.Mux - - // user configurable options - options serverOptions -} - -// context variables -type ( - ctxDB struct{} - ctxPeerID struct{} -) - -// DataResponse is the GQL top level object holding data for the response payload. -type DataResponse struct { - Data any `json:"data"` -} - -// simpleDataResponse is a helper function that returns a DataResponse struct. -// Odd arguments are the keys and must be strings otherwise they are ignored. -// Even arguments are the values associated with the previous key. -// Odd arguments are also ignored if there are no following arguments. -func simpleDataResponse(args ...any) DataResponse { - data := make(map[string]any) - - for i := 0; i < len(args); i += 2 { - if len(args) >= i+2 { - switch a := args[i].(type) { - case string: - data[a] = args[i+1] - - default: - continue - } - } - } - - return DataResponse{ - Data: data, - } -} - -// newHandler returns a handler with the router instantiated. -func newHandler(db client.DB, opts serverOptions) *handler { - mux := chi.NewRouter() - mux.Use(loggerMiddleware) - - if len(opts.allowedOrigins) != 0 { - mux.Use(cors.Handler(cors.Options{ - AllowedOrigins: opts.allowedOrigins, - AllowedMethods: []string{"GET", "POST", "PATCH", "OPTIONS"}, - AllowedHeaders: []string{"Content-Type"}, - MaxAge: 300, - })) - } - - mux.Use(func(next http.Handler) http.Handler { - return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - if opts.tls.HasValue() { - rw.Header().Add("Strict-Transport-Security", "max-age=63072000; includeSubDomains") - } - ctx := context.WithValue(req.Context(), ctxDB{}, db) - if opts.peerID != "" { - ctx = context.WithValue(ctx, ctxPeerID{}, opts.peerID) - } - next.ServeHTTP(rw, req.WithContext(ctx)) - }) - }) - - return setRoutes(&handler{ - Mux: mux, - db: db, - options: opts, - }) -} - -func getJSON(req *http.Request, v any) error { - err := json.NewDecoder(req.Body).Decode(v) - if err != nil { - return errors.Wrap(err, "unmarshal error") - } - return nil -} - -func sendJSON(ctx context.Context, rw http.ResponseWriter, v any, code int) { - rw.Header().Set("Content-Type", "application/json") - - b, err := json.Marshal(v) - if err != nil { - log.Error(ctx, fmt.Sprintf("Error while encoding JSON: %v", err)) - rw.WriteHeader(http.StatusInternalServerError) - if _, err := io.WriteString(rw, `{"error": "Internal server error"}`); err != nil { - log.Error(ctx, err.Error()) - } - return - } - - rw.WriteHeader(code) - if _, err = rw.Write(b); err != nil { - rw.WriteHeader(http.StatusInternalServerError) - log.Error(ctx, err.Error()) - } -} - -func dbFromContext(ctx context.Context) (client.DB, error) { - db, ok := ctx.Value(ctxDB{}).(client.DB) - if !ok { - return nil, ErrDatabaseNotAvailable - } - - return db, nil -} diff --git a/api/http/handler_test.go b/api/http/handler_test.go deleted file mode 100644 index 2015c7a0ba..0000000000 --- a/api/http/handler_test.go +++ /dev/null @@ -1,312 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package http - -import ( - "bytes" - "context" - "io" - "math" - "net/http" - "net/http/httptest" - "path" - "testing" - - badger "github.com/dgraph-io/badger/v4" - "github.com/pkg/errors" - "github.com/stretchr/testify/assert" - - badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" - "github.com/sourcenetwork/defradb/db" - "github.com/sourcenetwork/defradb/logging" -) - -func TestSimpleDataResponse(t *testing.T) { - resp := simpleDataResponse("key", "value", "key2", "value2") - switch v := resp.Data.(type) { - case map[string]any: - assert.Equal(t, "value", v["key"]) - assert.Equal(t, "value2", v["key2"]) - default: - t.Fatalf("data should be of type map[string]any but got %T", resp.Data) - } - - resp2 := simpleDataResponse("key", "value", "key2") - switch v := resp2.Data.(type) { - case map[string]any: - assert.Equal(t, "value", v["key"]) - assert.Equal(t, nil, v["key2"]) - default: - t.Fatalf("data should be of type map[string]any but got %T", resp.Data) - } - - resp3 := simpleDataResponse("key", "value", 2, "value2") - switch v := resp3.Data.(type) { - case map[string]any: - assert.Equal(t, "value", v["key"]) - assert.Equal(t, nil, v["2"]) - default: - t.Fatalf("data should be of type map[string]any but got %T", resp.Data) - } -} - -func TestNewHandlerWithLogger(t *testing.T) { - h := newHandler(nil, serverOptions{}) - - dir := t.TempDir() - - // send logs to temp file so we can inspect it - logFile := path.Join(dir, "http_test.log") - log.ApplyConfig(logging.Config{ - EncoderFormat: logging.NewEncoderFormatOption(logging.JSON), - OutputPaths: []string{logFile}, - }) - - req, err := http.NewRequest("GET", PingPath, nil) - if err != nil { - t.Fatal(err) - } - - rec := httptest.NewRecorder() - lrw := newLoggingResponseWriter(rec) - h.ServeHTTP(lrw, req) - assert.Equal(t, 200, rec.Result().StatusCode) - - // inspect the log file - kv, err := readLog(logFile) - if err != nil { - t.Fatal(err) - } - - assert.Equal(t, "http", kv["logger"]) -} - -func TestGetJSON(t *testing.T) { - var obj struct { - Name string - } - - jsonStr := ` -{ - "Name": "John Doe" -}` - - req, err := http.NewRequest("POST", "/ping", bytes.NewBuffer([]byte(jsonStr))) - if err != nil { - t.Fatal(err) - } - - err = getJSON(req, &obj) - if err != nil { - t.Fatal(err) - } - - assert.Equal(t, "John Doe", obj.Name) -} - -func TestGetJSONWithError(t *testing.T) { - var obj struct { - Name string - } - - jsonStr := ` -{ - "Name": 10 -}` - - req, err := http.NewRequest("POST", "/ping", bytes.NewBuffer([]byte(jsonStr))) - if err != nil { - t.Fatal(err) - } - - err = getJSON(req, &obj) - assert.Error(t, err) -} - -func TestSendJSONWithNoErrors(t *testing.T) { - obj := struct { - Name string - }{ - Name: "John Doe", - } - - rec := httptest.NewRecorder() - - sendJSON(context.Background(), rec, obj, 200) - - body, err := io.ReadAll(rec.Result().Body) - if err != nil { - t.Fatal(err) - } - assert.Equal(t, []byte("{\"Name\":\"John Doe\"}"), body) -} - -func TestSendJSONWithMarshallFailure(t *testing.T) { - rec := httptest.NewRecorder() - - sendJSON(context.Background(), rec, math.Inf(1), 200) - - assert.Equal(t, http.StatusInternalServerError, rec.Result().StatusCode) -} - -type loggerTest struct { - loggingResponseWriter -} - -func (lt *loggerTest) Write(b []byte) (int, error) { - return 0, errors.New("this write will fail") -} - -func TestSendJSONWithMarshallFailureAndWriteFailer(t *testing.T) { - rec := httptest.NewRecorder() - lrw := loggerTest{} - lrw.ResponseWriter = rec - - sendJSON(context.Background(), &lrw, math.Inf(1), 200) - - assert.Equal(t, http.StatusInternalServerError, rec.Result().StatusCode) -} - -func TestSendJSONWithWriteFailure(t *testing.T) { - obj := struct { - Name string - }{ - Name: "John Doe", - } - - rec := httptest.NewRecorder() - lrw := loggerTest{} - lrw.ResponseWriter = rec - - sendJSON(context.Background(), &lrw, obj, 200) - - assert.Equal(t, http.StatusInternalServerError, lrw.statusCode) -} - -func TestDbFromContext(t *testing.T) { - _, err := dbFromContext(context.Background()) - assert.Error(t, err) - - opts := badgerds.Options{Options: badger.DefaultOptions("").WithInMemory(true)} - rootstore, err := badgerds.NewDatastore("", &opts) - if err != nil { - t.Fatal(err) - } - - var options []db.Option - ctx := context.Background() - - defra, err := db.NewDB(ctx, rootstore, options...) - if err != nil { - t.Fatal(err) - } - - reqCtx := context.WithValue(ctx, ctxDB{}, defra) - - _, err = dbFromContext(reqCtx) - assert.NoError(t, err) -} - -func TestCORSRequest(t *testing.T) { - cases := []struct { - name string - method string - reqHeaders map[string]string - resHeaders map[string]string - }{ - { - "DisallowedOrigin", - "OPTIONS", - map[string]string{ - "Origin": "https://notsource.network", - }, - map[string]string{ - "Vary": "Origin", - }, - }, - { - "AllowedOrigin", - "OPTIONS", - map[string]string{ - "Origin": "https://source.network", - }, - map[string]string{ - "Access-Control-Allow-Origin": "https://source.network", - "Vary": "Origin", - }, - }, - } - - s := NewServer(nil, WithAllowedOrigins("https://source.network")) - - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - req, err := http.NewRequest(c.method, PingPath, nil) - if err != nil { - t.Fatal(err) - } - - for header, value := range c.reqHeaders { - req.Header.Add(header, value) - } - - rec := httptest.NewRecorder() - - s.Handler.ServeHTTP(rec, req) - - for header, value := range c.resHeaders { - assert.Equal(t, value, rec.Result().Header.Get(header)) - } - }) - } -} - -func TestTLSRequestResponseHeader(t *testing.T) { - cases := []struct { - name string - method string - reqHeaders map[string]string - resHeaders map[string]string - }{ - { - "TLSHeader", - "GET", - map[string]string{}, - map[string]string{ - "Strict-Transport-Security": "max-age=63072000; includeSubDomains", - }, - }, - } - dir := t.TempDir() - - s := NewServer(nil, WithTLS(), WithAddress("example.com"), WithRootDir(dir)) - - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - req, err := http.NewRequest(c.method, PingPath, nil) - if err != nil { - t.Fatal(err) - } - - for header, value := range c.reqHeaders { - req.Header.Add(header, value) - } - - rec := httptest.NewRecorder() - - s.Handler.ServeHTTP(rec, req) - - for header, value := range c.resHeaders { - assert.Equal(t, value, rec.Result().Header.Get(header)) - } - }) - } -} diff --git a/api/http/handlerfuncs.go b/api/http/handlerfuncs.go deleted file mode 100644 index e4163de05f..0000000000 --- a/api/http/handlerfuncs.go +++ /dev/null @@ -1,472 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package http - -import ( - "bytes" - "encoding/json" - "fmt" - "io" - "mime" - "net/http" - - "github.com/go-chi/chi/v5" - dshelp "github.com/ipfs/boxo/datastore/dshelp" - dag "github.com/ipfs/boxo/ipld/merkledag" - "github.com/ipfs/go-cid" - ds "github.com/ipfs/go-datastore" - "github.com/multiformats/go-multihash" - - "github.com/sourcenetwork/defradb/client" - corecrdt "github.com/sourcenetwork/defradb/core/crdt" - "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/events" -) - -const ( - contentTypeJSON = "application/json" - contentTypeGraphQL = "application/graphql" - contentTypeFormURLEncoded = "application/x-www-form-urlencoded" -) - -func rootHandler(rw http.ResponseWriter, req *http.Request) { - sendJSON( - req.Context(), - rw, - simpleDataResponse( - "response", "Welcome to the DefraDB HTTP API. Use /graphql to send queries to the database."+ - " Read the documentation at https://docs.source.network/.", - ), - http.StatusOK, - ) -} - -func pingHandler(rw http.ResponseWriter, req *http.Request) { - sendJSON( - req.Context(), - rw, - simpleDataResponse("response", "pong"), - http.StatusOK, - ) -} - -func dumpHandler(rw http.ResponseWriter, req *http.Request) { - db, err := dbFromContext(req.Context()) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - err = db.PrintDump(req.Context()) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - sendJSON( - req.Context(), - rw, - simpleDataResponse("response", "ok"), - http.StatusOK, - ) -} - -type gqlRequest struct { - Request string `json:"query"` -} - -func execGQLHandler(rw http.ResponseWriter, req *http.Request) { - request := req.URL.Query().Get("query") - if request == "" { - // extract the media type from the content-type header - contentType, _, err := mime.ParseMediaType(req.Header.Get("Content-Type")) - // mime.ParseMediaType will return an error (mime: no media type) - // if there is no media type set (i.e. application/json). - // This however is not a failing condition as not setting the content-type header - // should still make for a valid request and hit our default switch case. - if err != nil && err.Error() != "mime: no media type" { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - switch contentType { - case contentTypeJSON: - gqlReq := gqlRequest{} - - err := getJSON(req, &gqlReq) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusBadRequest) - return - } - - request = gqlReq.Request - - case contentTypeFormURLEncoded: - handleErr( - req.Context(), - rw, - ErrFormNotSupported, - http.StatusBadRequest, - ) - return - - case contentTypeGraphQL: - fallthrough - - default: - if req.Body == nil { - handleErr(req.Context(), rw, ErrBodyEmpty, http.StatusBadRequest) - return - } - body, err := readWithLimit(req.Body, rw) - if err != nil { - handleErr(req.Context(), rw, errors.WithStack(err), http.StatusInternalServerError) - return - } - request = string(body) - } - } - - // if at this point request is still empty, return an error - if request == "" { - handleErr(req.Context(), rw, ErrMissingGQLRequest, http.StatusBadRequest) - return - } - - db, err := dbFromContext(req.Context()) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - result := db.ExecRequest(req.Context(), request) - - if result.Pub != nil { - subscriptionHandler(result.Pub, rw, req) - return - } - - sendJSON(req.Context(), rw, newGQLResult(result.GQL), http.StatusOK) -} - -type fieldResponse struct { - ID string `json:"id"` - Name string `json:"name"` - Kind string `json:"kind"` - Internal bool `json:"internal"` -} - -type collectionResponse struct { - Name string `json:"name"` - ID string `json:"id"` - VersionID string `json:"version_id"` - Fields []fieldResponse `json:"fields,omitempty"` -} - -func listSchemaHandler(rw http.ResponseWriter, req *http.Request) { - db, err := dbFromContext(req.Context()) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - cols, err := db.GetAllCollections(req.Context()) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - colResp := make([]collectionResponse, len(cols)) - for i, col := range cols { - var fields []fieldResponse - for _, field := range col.Schema().Fields { - fieldRes := fieldResponse{ - ID: field.ID.String(), - Name: field.Name, - Internal: field.IsInternal(), - } - if field.IsObjectArray() { - fieldRes.Kind = fmt.Sprintf("[%s]", field.Schema) - } else if field.IsObject() { - fieldRes.Kind = field.Schema - } else { - fieldRes.Kind = field.Kind.String() - } - fields = append(fields, fieldRes) - } - colResp[i] = collectionResponse{ - Name: col.Name(), - ID: col.SchemaID(), - VersionID: col.Schema().VersionID, - Fields: fields, - } - } - - sendJSON( - req.Context(), - rw, - simpleDataResponse("collections", colResp), - http.StatusOK, - ) -} - -func loadSchemaHandler(rw http.ResponseWriter, req *http.Request) { - sdl, err := readWithLimit(req.Body, rw) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - db, err := dbFromContext(req.Context()) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - colDescs, err := db.AddSchema(req.Context(), string(sdl)) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - colResp := make([]collectionResponse, len(colDescs)) - for i, desc := range colDescs { - col, err := db.GetCollectionByName(req.Context(), desc.Name) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - colResp[i] = collectionResponse{ - Name: col.Name(), - ID: col.SchemaID(), - VersionID: col.Schema().VersionID, - } - } - - sendJSON( - req.Context(), - rw, - simpleDataResponse("result", "success", "collections", colResp), - http.StatusOK, - ) -} - -func patchSchemaHandler(rw http.ResponseWriter, req *http.Request) { - patch, err := readWithLimit(req.Body, rw) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - db, err := dbFromContext(req.Context()) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - err = db.PatchSchema(req.Context(), string(patch)) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - sendJSON( - req.Context(), - rw, - simpleDataResponse("result", "success"), - http.StatusOK, - ) -} - -func setMigrationHandler(rw http.ResponseWriter, req *http.Request) { - cfgStr, err := readWithLimit(req.Body, rw) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - db, err := dbFromContext(req.Context()) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - var cfg client.LensConfig - err = json.Unmarshal(cfgStr, &cfg) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - err = db.LensRegistry().SetMigration(req.Context(), cfg) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - sendJSON( - req.Context(), - rw, - simpleDataResponse("result", "success"), - http.StatusOK, - ) -} - -func getMigrationHandler(rw http.ResponseWriter, req *http.Request) { - db, err := dbFromContext(req.Context()) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - cfgs, err := db.LensRegistry().Config(req.Context()) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - sendJSON( - req.Context(), - rw, - simpleDataResponse("configuration", cfgs), - http.StatusOK, - ) -} - -func getBlockHandler(rw http.ResponseWriter, req *http.Request) { - cidStr := chi.URLParam(req, "cid") - - // try to parse CID - cID, err := cid.Decode(cidStr) - if err != nil { - // If we can't try to parse DSKeyToCID - // return error if we still can't - key := ds.NewKey(cidStr) - var hash multihash.Multihash - hash, err = dshelp.DsKeyToMultihash(key) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusBadRequest) - return - } - cID = cid.NewCidV1(cid.Raw, hash) - } - - db, err := dbFromContext(req.Context()) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - block, err := db.Blockstore().Get(req.Context(), cID) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - nd, err := dag.DecodeProtobuf(block.RawData()) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - buf, err := nd.MarshalJSON() - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - reg := corecrdt.LWWRegister{} - delta, err := reg.DeltaDecode(nd) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - data, err := delta.Marshal() - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - sendJSON( - req.Context(), - rw, - simpleDataResponse( - "block", string(buf), - "delta", string(data), - "val", delta.Value(), - ), - http.StatusOK, - ) -} - -func peerIDHandler(rw http.ResponseWriter, req *http.Request) { - peerID, ok := req.Context().Value(ctxPeerID{}).(string) - if !ok || peerID == "" { - handleErr(req.Context(), rw, ErrPeerIdUnavailable, http.StatusNotFound) - return - } - - sendJSON( - req.Context(), - rw, - simpleDataResponse( - "peerID", peerID, - ), - http.StatusOK, - ) -} - -func subscriptionHandler(pub *events.Publisher[events.Update], rw http.ResponseWriter, req *http.Request) { - flusher, ok := rw.(http.Flusher) - if !ok { - handleErr(req.Context(), rw, ErrStreamingUnsupported, http.StatusInternalServerError) - return - } - - rw.Header().Set("Content-Type", "text/event-stream") - rw.Header().Set("Cache-Control", "no-cache") - rw.Header().Set("Connection", "keep-alive") - - for { - select { - case <-req.Context().Done(): - pub.Unsubscribe() - return - case s, open := <-pub.Stream(): - if !open { - return - } - b, err := json.Marshal(s) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - fmt.Fprintf(rw, "data: %s\n\n", b) - flusher.Flush() - } - } -} - -// maxBytes is an arbitrary limit to prevent unbounded message bodies being sent and read. -const maxBytes int64 = 100 * (1 << (10 * 2)) // 100MB - -// readWithLimit reads from the reader until either EoF or the maximum number of bytes have been read. -func readWithLimit(reader io.ReadCloser, rw http.ResponseWriter) ([]byte, error) { - reader = http.MaxBytesReader(rw, reader, maxBytes) - - var buf bytes.Buffer - _, err := io.Copy(&buf, reader) - if err != nil { - return nil, err - } - - return buf.Bytes(), nil -} diff --git a/api/http/handlerfuncs_backup.go b/api/http/handlerfuncs_backup.go deleted file mode 100644 index 3961263995..0000000000 --- a/api/http/handlerfuncs_backup.go +++ /dev/null @@ -1,123 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package http - -import ( - "context" - "net/http" - "os" - "strings" - - "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/errors" -) - -func exportHandler(rw http.ResponseWriter, req *http.Request) { - db, err := dbFromContext(req.Context()) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - cfg := &client.BackupConfig{} - err = getJSON(req, cfg) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusBadRequest) - return - } - - err = validateBackupConfig(req.Context(), cfg, db) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusBadRequest) - return - } - - err = db.BasicExport(req.Context(), cfg) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - sendJSON( - req.Context(), - rw, - simpleDataResponse("result", "success"), - http.StatusOK, - ) -} - -func importHandler(rw http.ResponseWriter, req *http.Request) { - db, err := dbFromContext(req.Context()) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - cfg := &client.BackupConfig{} - err = getJSON(req, cfg) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusBadRequest) - return - } - - err = validateBackupConfig(req.Context(), cfg, db) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusBadRequest) - return - } - - err = db.BasicImport(req.Context(), cfg.Filepath) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - sendJSON( - req.Context(), - rw, - simpleDataResponse("result", "success"), - http.StatusOK, - ) -} - -func validateBackupConfig(ctx context.Context, cfg *client.BackupConfig, db client.DB) error { - if !isValidPath(cfg.Filepath) { - return errors.New("invalid file path") - } - - if cfg.Format != "" && strings.ToLower(cfg.Format) != "json" { - return errors.New("only JSON format is supported at the moment") - } - for _, colName := range cfg.Collections { - _, err := db.GetCollectionByName(ctx, colName) - if err != nil { - return errors.Wrap("collection does not exist", err) - } - } - return nil -} - -func isValidPath(filepath string) bool { - // if a file exists, return true - if _, err := os.Stat(filepath); err == nil { - return true - } - - // if not, attempt to write to the path and if successful, - // remove the file and return true - var d []byte - if err := os.WriteFile(filepath, d, 0o644); err == nil { - _ = os.Remove(filepath) - return true - } - - return false -} diff --git a/api/http/handlerfuncs_backup_test.go b/api/http/handlerfuncs_backup_test.go deleted file mode 100644 index 67af6015a1..0000000000 --- a/api/http/handlerfuncs_backup_test.go +++ /dev/null @@ -1,623 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package http - -import ( - "bytes" - "context" - "encoding/json" - "net/http" - "os" - "testing" - - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/require" - - "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/client/mocks" - "github.com/sourcenetwork/defradb/errors" -) - -func TestExportHandler_WithNoDB_NoDatabaseAvailableError(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: nil, - Method: "POST", - Path: ExportPath, - Body: nil, - ExpectedStatus: 500, - ResponseData: &errResponse, - }) - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "no database available") - require.Equal(t, http.StatusInternalServerError, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Internal Server Error", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "no database available", errResponse.Errors[0].Message) -} - -func TestExportHandler_WithWrongPayload_ReturnError(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - buf := bytes.NewBuffer([]byte("[]")) - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: ExportPath, - Body: buf, - ExpectedStatus: 400, - ResponseData: &errResponse, - }) - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "json: cannot unmarshal array into Go value of type client.BackupConfig") - require.Equal(t, http.StatusBadRequest, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Bad Request", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "unmarshal error: json: cannot unmarshal array into Go value of type client.BackupConfig", errResponse.Errors[0].Message) -} - -func TestExportHandler_WithInvalidFilePath_ReturnError(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - filepath := t.TempDir() + "/some/test.json" - cfg := client.BackupConfig{ - Filepath: filepath, - } - b, err := json.Marshal(cfg) - require.NoError(t, err) - buf := bytes.NewBuffer(b) - - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: ExportPath, - Body: buf, - ExpectedStatus: 400, - ResponseData: &errResponse, - }) - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "invalid file path") - require.Equal(t, http.StatusBadRequest, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Bad Request", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "invalid file path", errResponse.Errors[0].Message) -} - -func TestExportHandler_WithInvalidFomat_ReturnError(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - filepath := t.TempDir() + "/test.json" - cfg := client.BackupConfig{ - Filepath: filepath, - Format: "csv", - } - b, err := json.Marshal(cfg) - require.NoError(t, err) - buf := bytes.NewBuffer(b) - - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: ExportPath, - Body: buf, - ExpectedStatus: 400, - ResponseData: &errResponse, - }) - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "only JSON format is supported at the moment") - require.Equal(t, http.StatusBadRequest, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Bad Request", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "only JSON format is supported at the moment", errResponse.Errors[0].Message) -} - -func TestExportHandler_WithInvalidCollection_ReturnError(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - filepath := t.TempDir() + "/test.json" - cfg := client.BackupConfig{ - Filepath: filepath, - Format: "json", - Collections: []string{"invalid"}, - } - b, err := json.Marshal(cfg) - require.NoError(t, err) - buf := bytes.NewBuffer(b) - - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: ExportPath, - Body: buf, - ExpectedStatus: 400, - ResponseData: &errResponse, - }) - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "collection does not exist: datastore: key not found") - require.Equal(t, http.StatusBadRequest, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Bad Request", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "collection does not exist: datastore: key not found", errResponse.Errors[0].Message) -} - -func TestExportHandler_WithBasicExportError_ReturnError(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - db := mocks.NewDB(t) - testError := errors.New("test error") - db.EXPECT().BasicExport(mock.Anything, mock.Anything).Return(testError) - - filepath := t.TempDir() + "/test.json" - cfg := client.BackupConfig{ - Filepath: filepath, - } - b, err := json.Marshal(cfg) - require.NoError(t, err) - buf := bytes.NewBuffer(b) - - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: db, - Method: "POST", - Path: ExportPath, - Body: buf, - ExpectedStatus: 500, - ResponseData: &errResponse, - }) - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "test error") - require.Equal(t, http.StatusInternalServerError, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Internal Server Error", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "test error", errResponse.Errors[0].Message) -} - -func TestExportHandler_AllCollections_NoError(t *testing.T) { - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - testLoadSchema(t, ctx, defra) - - col, err := defra.GetCollectionByName(ctx, "User") - require.NoError(t, err) - - doc, err := client.NewDocFromJSON([]byte(`{"age": 31, "verified": true, "points": 90, "name": "Bob"}`)) - require.NoError(t, err) - - err = col.Create(ctx, doc) - require.NoError(t, err) - - filepath := t.TempDir() + "/test.json" - cfg := client.BackupConfig{ - Filepath: filepath, - } - b, err := json.Marshal(cfg) - require.NoError(t, err) - buf := bytes.NewBuffer(b) - - respBody := testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: ExportPath, - Body: buf, - ExpectedStatus: 200, - }) - - b, err = os.ReadFile(filepath) - require.NoError(t, err) - - require.Equal( - t, - `{"data":{"result":"success"}}`, - string(respBody), - ) - - require.Equal( - t, - `{"User":[{"_key":"bae-91171025-ed21-50e3-b0dc-e31bccdfa1ab","_newKey":"bae-91171025-ed21-50e3-b0dc-e31bccdfa1ab","age":31,"name":"Bob","points":90,"verified":true}]}`, - string(b), - ) -} - -func TestExportHandler_UserCollection_NoError(t *testing.T) { - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - testLoadSchema(t, ctx, defra) - - col, err := defra.GetCollectionByName(ctx, "User") - require.NoError(t, err) - - doc, err := client.NewDocFromJSON([]byte(`{"age": 31, "verified": true, "points": 90, "name": "Bob"}`)) - require.NoError(t, err) - - err = col.Create(ctx, doc) - require.NoError(t, err) - - filepath := t.TempDir() + "/test.json" - cfg := client.BackupConfig{ - Filepath: filepath, - Collections: []string{"User"}, - } - b, err := json.Marshal(cfg) - require.NoError(t, err) - buf := bytes.NewBuffer(b) - - respBody := testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: ExportPath, - Body: buf, - ExpectedStatus: 200, - }) - - b, err = os.ReadFile(filepath) - require.NoError(t, err) - - require.Equal( - t, - `{"data":{"result":"success"}}`, - string(respBody), - ) - - require.Equal( - t, - `{"User":[{"_key":"bae-91171025-ed21-50e3-b0dc-e31bccdfa1ab","_newKey":"bae-91171025-ed21-50e3-b0dc-e31bccdfa1ab","age":31,"name":"Bob","points":90,"verified":true}]}`, - string(b), - ) -} - -func TestExportHandler_UserCollectionWithModifiedDoc_NoError(t *testing.T) { - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - testLoadSchema(t, ctx, defra) - - col, err := defra.GetCollectionByName(ctx, "User") - require.NoError(t, err) - - doc, err := client.NewDocFromJSON([]byte(`{"age": 31, "verified": true, "points": 90, "name": "Bob"}`)) - require.NoError(t, err) - - err = col.Create(ctx, doc) - require.NoError(t, err) - - err = doc.Set("points", 1000) - require.NoError(t, err) - - err = col.Update(ctx, doc) - require.NoError(t, err) - - filepath := t.TempDir() + "/test.json" - cfg := client.BackupConfig{ - Filepath: filepath, - Collections: []string{"User"}, - } - b, err := json.Marshal(cfg) - require.NoError(t, err) - buf := bytes.NewBuffer(b) - - respBody := testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: ExportPath, - Body: buf, - ExpectedStatus: 200, - }) - - b, err = os.ReadFile(filepath) - require.NoError(t, err) - - require.Equal( - t, - `{"data":{"result":"success"}}`, - string(respBody), - ) - - require.Equal( - t, - `{"User":[{"_key":"bae-91171025-ed21-50e3-b0dc-e31bccdfa1ab","_newKey":"bae-36697142-d46a-57b1-b25e-6336706854ea","age":31,"name":"Bob","points":1000,"verified":true}]}`, - string(b), - ) -} - -func TestImportHandler_WithNoDB_NoDatabaseAvailableError(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: nil, - Method: "POST", - Path: ImportPath, - Body: nil, - ExpectedStatus: 500, - ResponseData: &errResponse, - }) - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "no database available") - require.Equal(t, http.StatusInternalServerError, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Internal Server Error", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "no database available", errResponse.Errors[0].Message) -} - -func TestImportHandler_WithWrongPayloadFormat_UnmarshalError(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - buf := bytes.NewBuffer([]byte(`[]`)) - - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: ImportPath, - Body: buf, - ExpectedStatus: 400, - ResponseData: &errResponse, - }) - require.Contains( - t, - errResponse.Errors[0].Extensions.Stack, - "json: cannot unmarshal array into Go value of type client.BackupConfig", - ) - require.Equal(t, http.StatusBadRequest, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Bad Request", errResponse.Errors[0].Extensions.HTTPError) - require.Equal( - t, - "unmarshal error: json: cannot unmarshal array into Go value of type client.BackupConfig", - errResponse.Errors[0].Message, - ) -} - -func TestImportHandler_WithInvalidFilepath_ReturnError(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - filepath := t.TempDir() + "/some/test.json" - cfg := client.BackupConfig{ - Filepath: filepath, - } - b, err := json.Marshal(cfg) - require.NoError(t, err) - buf := bytes.NewBuffer(b) - - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: ImportPath, - Body: buf, - ExpectedStatus: 400, - ResponseData: &errResponse, - }) - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "invalid file path") - require.Equal(t, http.StatusBadRequest, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Bad Request", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "invalid file path", errResponse.Errors[0].Message) -} - -func TestImportHandler_WithDBClosed_DatastoreClosedError(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defra.Close(ctx) - - filepath := t.TempDir() + "/test.json" - cfg := client.BackupConfig{ - Filepath: filepath, - } - b, err := json.Marshal(cfg) - require.NoError(t, err) - buf := bytes.NewBuffer(b) - - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: ImportPath, - Body: buf, - ExpectedStatus: 500, - ResponseData: &errResponse, - }) - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "datastore closed") - require.Equal(t, http.StatusInternalServerError, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Internal Server Error", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "datastore closed", errResponse.Errors[0].Message) -} - -func TestImportHandler_WithUnknownCollection_KeyNotFoundError(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - filepath := t.TempDir() + "/test.json" - err := os.WriteFile( - filepath, - []byte(`{"User":[{"_key":"bae-91171025-ed21-50e3-b0dc-e31bccdfa1ab","age":31,"name":"Bob","points":90,"verified":true}]}`), - 0644, - ) - require.NoError(t, err) - - cfg := client.BackupConfig{ - Filepath: filepath, - } - - b, err := json.Marshal(cfg) - require.NoError(t, err) - buf := bytes.NewBuffer(b) - - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: ImportPath, - Body: buf, - ExpectedStatus: 500, - ResponseData: &errResponse, - }) - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "failed to get collection: datastore: key not found. Name: User") - require.Equal(t, http.StatusInternalServerError, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Internal Server Error", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "failed to get collection: datastore: key not found. Name: User", errResponse.Errors[0].Message) -} - -func TestImportHandler_UserCollection_NoError(t *testing.T) { - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - testLoadSchema(t, ctx, defra) - - filepath := t.TempDir() + "/test.json" - err := os.WriteFile( - filepath, - []byte(`{"User":[{"_key":"bae-91171025-ed21-50e3-b0dc-e31bccdfa1ab","_newKey":"bae-91171025-ed21-50e3-b0dc-e31bccdfa1ab","age":31,"name":"Bob","points":90,"verified":true}]}`), - 0644, - ) - require.NoError(t, err) - - cfg := client.BackupConfig{ - Filepath: filepath, - } - - b, err := json.Marshal(cfg) - require.NoError(t, err) - buf := bytes.NewBuffer(b) - - resp := DataResponse{} - _ = testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: ImportPath, - Body: buf, - ExpectedStatus: 200, - ResponseData: &resp, - }) - - switch v := resp.Data.(type) { - case map[string]any: - require.Equal(t, "success", v["result"]) - default: - t.Fatalf("data should be of type map[string]any but got %T", resp.Data) - } - - doc, err := client.NewDocFromJSON([]byte(`{"age": 31, "verified": true, "points": 90, "name": "Bob"}`)) - require.NoError(t, err) - - col, err := defra.GetCollectionByName(ctx, "User") - require.NoError(t, err) - - importedDoc, err := col.Get(ctx, doc.Key(), false) - require.NoError(t, err) - - require.Equal(t, doc.Key().String(), importedDoc.Key().String()) -} - -func TestImportHandler_WithExistingDoc_DocumentExistError(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - testLoadSchema(t, ctx, defra) - - col, err := defra.GetCollectionByName(ctx, "User") - require.NoError(t, err) - - doc, err := client.NewDocFromJSON([]byte(`{"age": 31, "verified": true, "points": 90, "name": "Bob"}`)) - require.NoError(t, err) - - err = col.Create(ctx, doc) - require.NoError(t, err) - - filepath := t.TempDir() + "/test.json" - err = os.WriteFile( - filepath, - []byte(`{"User":[{"_key":"bae-91171025-ed21-50e3-b0dc-e31bccdfa1ab","_newKey":"bae-91171025-ed21-50e3-b0dc-e31bccdfa1ab","age":31,"name":"Bob","points":90,"verified":true}]}`), - 0644, - ) - require.NoError(t, err) - - cfg := client.BackupConfig{ - Filepath: filepath, - } - - b, err := json.Marshal(cfg) - require.NoError(t, err) - buf := bytes.NewBuffer(b) - - errResponse := ErrorResponse{} - _ = testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: ImportPath, - QueryParams: map[string]string{"collections": "User"}, - Body: buf, - ExpectedStatus: 500, - ResponseData: &errResponse, - }) - - require.Contains( - t, - errResponse.Errors[0].Extensions.Stack, - "failed to save a new doc to collection: a document with the given dockey already exists", - ) - require.Equal(t, http.StatusInternalServerError, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Internal Server Error", errResponse.Errors[0].Extensions.HTTPError) - require.Equal( - t, - "failed to save a new doc to collection: a document with the given dockey already exists. DocKey: bae-91171025-ed21-50e3-b0dc-e31bccdfa1ab", - errResponse.Errors[0].Message, - ) -} diff --git a/api/http/handlerfuncs_index.go b/api/http/handlerfuncs_index.go deleted file mode 100644 index e8d10d900e..0000000000 --- a/api/http/handlerfuncs_index.go +++ /dev/null @@ -1,144 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package http - -import ( - "net/http" - "strings" - - "github.com/sourcenetwork/defradb/client" -) - -func createIndexHandler(rw http.ResponseWriter, req *http.Request) { - db, err := dbFromContext(req.Context()) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - var data map[string]string - err = getJSON(req, &data) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusBadRequest) - return - } - - colNameArg := data["collection"] - fieldsArg := data["fields"] - indexNameArg := data["name"] - - col, err := db.GetCollectionByName(req.Context(), colNameArg) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - fields := strings.Split(fieldsArg, ",") - fieldDescriptions := make([]client.IndexedFieldDescription, 0, len(fields)) - for _, field := range fields { - fieldDescriptions = append(fieldDescriptions, client.IndexedFieldDescription{Name: field}) - } - indexDesc := client.IndexDescription{ - Name: indexNameArg, - Fields: fieldDescriptions, - } - indexDesc, err = col.CreateIndex(req.Context(), indexDesc) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - sendJSON( - req.Context(), - rw, - simpleDataResponse("index", indexDesc), - http.StatusOK, - ) -} - -func dropIndexHandler(rw http.ResponseWriter, req *http.Request) { - db, err := dbFromContext(req.Context()) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - var data map[string]string - err = getJSON(req, &data) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusBadRequest) - return - } - - colNameArg := data["collection"] - indexNameArg := data["name"] - - col, err := db.GetCollectionByName(req.Context(), colNameArg) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - err = col.DropIndex(req.Context(), indexNameArg) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - sendJSON( - req.Context(), - rw, - simpleDataResponse("result", "success"), - http.StatusOK, - ) -} - -func listIndexHandler(rw http.ResponseWriter, req *http.Request) { - db, err := dbFromContext(req.Context()) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - - queryParams := req.URL.Query() - collectionParam := queryParams.Get("collection") - - if collectionParam == "" { - indexesPerCol, err := db.GetAllIndexes(req.Context()) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - sendJSON( - req.Context(), - rw, - simpleDataResponse("collections", indexesPerCol), - http.StatusOK, - ) - } else { - col, err := db.GetCollectionByName(req.Context(), collectionParam) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - indexes, err := col.GetIndexes(req.Context()) - if err != nil { - handleErr(req.Context(), rw, err, http.StatusInternalServerError) - return - } - sendJSON( - req.Context(), - rw, - simpleDataResponse("indexes", indexes), - http.StatusOK, - ) - } -} diff --git a/api/http/handlerfuncs_index_test.go b/api/http/handlerfuncs_index_test.go deleted file mode 100644 index 3e82249ef8..0000000000 --- a/api/http/handlerfuncs_index_test.go +++ /dev/null @@ -1,239 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package http - -import ( - "bytes" - "context" - "net/http" - "net/http/httptest" - "net/url" - "testing" - - "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/client/mocks" - "github.com/sourcenetwork/defradb/errors" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" -) - -func addDBToContext(t *testing.T, req *http.Request, db *mocks.DB) *http.Request { - if db == nil { - db = mocks.NewDB(t) - } - ctx := context.WithValue(req.Context(), ctxDB{}, db) - return req.WithContext(ctx) -} - -func TestCreateIndexHandler_IfNoDBInContext_ReturnError(t *testing.T) { - handler := http.HandlerFunc(createIndexHandler) - assert.HTTPBodyContains(t, handler, "POST", IndexPath, nil, "no database available") -} - -func TestCreateIndexHandler_IfFailsToParseParams_ReturnError(t *testing.T) { - req, err := http.NewRequest("POST", IndexPath, bytes.NewBuffer([]byte("invalid map"))) - if err != nil { - t.Fatal(err) - } - req = addDBToContext(t, req, nil) - - rr := httptest.NewRecorder() - handler := http.HandlerFunc(createIndexHandler) - - handler.ServeHTTP(rr, req) - - assert.Equal(t, http.StatusBadRequest, rr.Code, "handler returned wrong status code") - assert.Contains(t, rr.Body.String(), "invalid character", "handler returned unexpected body") -} - -func TestCreateIndexHandler_IfFailsToGetCollection_ReturnError(t *testing.T) { - testError := errors.New("test error") - db := mocks.NewDB(t) - db.EXPECT().GetCollectionByName(mock.Anything, mock.Anything).Return(nil, testError) - - req, err := http.NewRequest("POST", IndexPath, bytes.NewBuffer([]byte(`{}`))) - if err != nil { - t.Fatal(err) - } - - req = addDBToContext(t, req, db) - - rr := httptest.NewRecorder() - handler := http.HandlerFunc(createIndexHandler) - handler.ServeHTTP(rr, req) - - assert.Equal(t, http.StatusInternalServerError, rr.Code, "handler returned wrong status code") - assert.Contains(t, rr.Body.String(), testError.Error()) -} - -func TestCreateIndexHandler_IfFailsToCreateIndex_ReturnError(t *testing.T) { - testError := errors.New("test error") - col := mocks.NewCollection(t) - col.EXPECT().CreateIndex(mock.Anything, mock.Anything). - Return(client.IndexDescription{}, testError) - - db := mocks.NewDB(t) - db.EXPECT().GetCollectionByName(mock.Anything, mock.Anything).Return(col, nil) - - req, err := http.NewRequest("POST", IndexPath, bytes.NewBuffer([]byte(`{}`))) - if err != nil { - t.Fatal(err) - } - req = addDBToContext(t, req, db) - - rr := httptest.NewRecorder() - handler := http.HandlerFunc(createIndexHandler) - handler.ServeHTTP(rr, req) - - assert.Equal(t, http.StatusInternalServerError, rr.Code, "handler returned wrong status code") - assert.Contains(t, rr.Body.String(), testError.Error()) -} - -func TestDropIndexHandler_IfNoDBInContext_ReturnError(t *testing.T) { - handler := http.HandlerFunc(dropIndexHandler) - assert.HTTPBodyContains(t, handler, "DELETE", IndexPath, nil, "no database available") -} - -func TestDropIndexHandler_IfFailsToParseParams_ReturnError(t *testing.T) { - req, err := http.NewRequest("DELETE", IndexPath, bytes.NewBuffer([]byte("invalid map"))) - if err != nil { - t.Fatal(err) - } - req = addDBToContext(t, req, nil) - - rr := httptest.NewRecorder() - handler := http.HandlerFunc(dropIndexHandler) - - handler.ServeHTTP(rr, req) - - assert.Equal(t, http.StatusBadRequest, rr.Code, "handler returned wrong status code") - assert.Contains(t, rr.Body.String(), "invalid character", "handler returned unexpected body") -} - -func TestDropIndexHandler_IfFailsToGetCollection_ReturnError(t *testing.T) { - testError := errors.New("test error") - db := mocks.NewDB(t) - db.EXPECT().GetCollectionByName(mock.Anything, mock.Anything).Return(nil, testError) - - req, err := http.NewRequest("DELETE", IndexPath, bytes.NewBuffer([]byte(`{}`))) - if err != nil { - t.Fatal(err) - } - - req = addDBToContext(t, req, db) - - rr := httptest.NewRecorder() - handler := http.HandlerFunc(dropIndexHandler) - handler.ServeHTTP(rr, req) - - assert.Equal(t, http.StatusInternalServerError, rr.Code, "handler returned wrong status code") - assert.Contains(t, rr.Body.String(), testError.Error()) -} - -func TestDropIndexHandler_IfFailsToDropIndex_ReturnError(t *testing.T) { - testError := errors.New("test error") - col := mocks.NewCollection(t) - col.EXPECT().DropIndex(mock.Anything, mock.Anything).Return(testError) - - db := mocks.NewDB(t) - db.EXPECT().GetCollectionByName(mock.Anything, mock.Anything).Return(col, nil) - - req, err := http.NewRequest("DELETE", IndexPath, bytes.NewBuffer([]byte(`{}`))) - if err != nil { - t.Fatal(err) - } - req = addDBToContext(t, req, db) - - rr := httptest.NewRecorder() - handler := http.HandlerFunc(dropIndexHandler) - handler.ServeHTTP(rr, req) - - assert.Equal(t, http.StatusInternalServerError, rr.Code, "handler returned wrong status code") - assert.Contains(t, rr.Body.String(), testError.Error()) -} - -func TestListIndexHandler_IfNoDBInContext_ReturnError(t *testing.T) { - handler := http.HandlerFunc(listIndexHandler) - assert.HTTPBodyContains(t, handler, "GET", IndexPath, nil, "no database available") -} - -func TestListIndexHandler_IfFailsToGetAllIndexes_ReturnError(t *testing.T) { - testError := errors.New("test error") - db := mocks.NewDB(t) - db.EXPECT().GetAllIndexes(mock.Anything).Return(nil, testError) - - req, err := http.NewRequest("GET", IndexPath, bytes.NewBuffer([]byte(`{}`))) - if err != nil { - t.Fatal(err) - } - - req = addDBToContext(t, req, db) - - rr := httptest.NewRecorder() - handler := http.HandlerFunc(listIndexHandler) - handler.ServeHTTP(rr, req) - - assert.Equal(t, http.StatusInternalServerError, rr.Code, "handler returned wrong status code") - assert.Contains(t, rr.Body.String(), testError.Error()) -} - -func TestListIndexHandler_IfFailsToGetCollection_ReturnError(t *testing.T) { - testError := errors.New("test error") - db := mocks.NewDB(t) - db.EXPECT().GetCollectionByName(mock.Anything, mock.Anything).Return(nil, testError) - - u, _ := url.Parse("http://defradb.com" + IndexPath) - params := url.Values{} - params.Add("collection", "testCollection") - u.RawQuery = params.Encode() - - req, err := http.NewRequest("GET", u.String(), nil) - if err != nil { - t.Fatal(err) - } - - req = addDBToContext(t, req, db) - - rr := httptest.NewRecorder() - handler := http.HandlerFunc(listIndexHandler) - handler.ServeHTTP(rr, req) - - assert.Equal(t, http.StatusInternalServerError, rr.Code, "handler returned wrong status code") - assert.Contains(t, rr.Body.String(), testError.Error()) -} - -func TestListIndexHandler_IfFailsToCollectionGetIndexes_ReturnError(t *testing.T) { - testError := errors.New("test error") - col := mocks.NewCollection(t) - col.EXPECT().GetIndexes(mock.Anything).Return(nil, testError) - - db := mocks.NewDB(t) - db.EXPECT().GetCollectionByName(mock.Anything, mock.Anything).Return(col, nil) - - u, _ := url.Parse("http://defradb.com" + IndexPath) - params := url.Values{} - params.Add("collection", "testCollection") - u.RawQuery = params.Encode() - - req, err := http.NewRequest("GET", u.String(), nil) - if err != nil { - t.Fatal(err) - } - req = addDBToContext(t, req, db) - - rr := httptest.NewRecorder() - handler := http.HandlerFunc(listIndexHandler) - handler.ServeHTTP(rr, req) - - assert.Equal(t, http.StatusInternalServerError, rr.Code, "handler returned wrong status code") - assert.Contains(t, rr.Body.String(), testError.Error()) -} diff --git a/api/http/handlerfuncs_test.go b/api/http/handlerfuncs_test.go deleted file mode 100644 index bef944f908..0000000000 --- a/api/http/handlerfuncs_test.go +++ /dev/null @@ -1,1184 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package http - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "io" - "net/http" - "net/http/httptest" - "strings" - "testing" - "time" - - badger "github.com/dgraph-io/badger/v4" - dshelp "github.com/ipfs/boxo/datastore/dshelp" - "github.com/ipfs/go-cid" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/require" - - "github.com/sourcenetwork/defradb/client" - badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" - "github.com/sourcenetwork/defradb/db" - "github.com/sourcenetwork/defradb/errors" -) - -type testOptions struct { - Testing *testing.T - DB client.DB - Handlerfunc http.HandlerFunc - Method string - Path string - Body io.Reader - Headers map[string]string - QueryParams map[string]string - ExpectedStatus int - ResponseData any - ServerOptions serverOptions -} - -type testUser struct { - Key string `json:"_key"` - Versions []testVersion `json:"_version"` -} - -type testVersion struct { - CID string `json:"cid"` -} - -func TestRootHandler(t *testing.T) { - resp := DataResponse{} - testRequest(testOptions{ - Testing: t, - DB: nil, - Method: "GET", - Path: RootPath, - Body: nil, - ExpectedStatus: 200, - ResponseData: &resp, - }) - switch v := resp.Data.(type) { - case map[string]any: - require.Equal(t, "Welcome to the DefraDB HTTP API. Use /graphql to send queries to the database. Read the documentation at https://docs.source.network/.", v["response"]) - default: - t.Fatalf("data should be of type map[string]any but got %T", resp.Data) - } -} - -func TestPingHandler(t *testing.T) { - resp := DataResponse{} - testRequest(testOptions{ - Testing: t, - DB: nil, - Method: "GET", - Path: PingPath, - Body: nil, - ExpectedStatus: 200, - ResponseData: &resp, - }) - - switch v := resp.Data.(type) { - case map[string]any: - require.Equal(t, "pong", v["response"]) - default: - t.Fatalf("data should be of type map[string]any but got %T", resp.Data) - } -} - -func TestDumpHandlerWithNoError(t *testing.T) { - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - resp := DataResponse{} - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "GET", - Path: DumpPath, - Body: nil, - ExpectedStatus: 200, - ResponseData: &resp, - }) - - switch v := resp.Data.(type) { - case map[string]any: - require.Equal(t, "ok", v["response"]) - default: - t.Fatalf("data should be of type map[string]any but got %T", resp.Data) - } -} - -func TestDumpHandlerWithDBError(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: nil, - Method: "GET", - Path: DumpPath, - Body: nil, - ExpectedStatus: 500, - ResponseData: &errResponse, - }) - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "no database available") - require.Equal(t, http.StatusInternalServerError, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Internal Server Error", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "no database available", errResponse.Errors[0].Message) -} - -func TestExecGQLWithNilBody(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: nil, - Method: "POST", - Path: GraphQLPath, - Body: nil, - ExpectedStatus: 400, - ResponseData: &errResponse, - }) - - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "body cannot be empty") - require.Equal(t, http.StatusBadRequest, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Bad Request", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "body cannot be empty", errResponse.Errors[0].Message) -} - -func TestExecGQLWithEmptyBody(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: nil, - Method: "POST", - Path: GraphQLPath, - Body: bytes.NewBuffer([]byte("")), - ExpectedStatus: 400, - ResponseData: &errResponse, - }) - - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "missing GraphQL request") - require.Equal(t, http.StatusBadRequest, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Bad Request", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "missing GraphQL request", errResponse.Errors[0].Message) -} - -type mockReadCloser struct { - mock.Mock -} - -func (m *mockReadCloser) Read(p []byte) (n int, err error) { - args := m.Called(p) - return args.Int(0), args.Error(1) -} - -func TestExecGQLWithMockBody(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - mockReadCloser := mockReadCloser{} - // if Read is called, it will return error - mockReadCloser.On("Read", mock.AnythingOfType("[]uint8")).Return(0, errors.New("error reading")) - - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: nil, - Method: "POST", - Path: GraphQLPath, - Body: &mockReadCloser, - ExpectedStatus: 500, - ResponseData: &errResponse, - }) - - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "error reading") - require.Equal(t, http.StatusInternalServerError, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Internal Server Error", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "error reading", errResponse.Errors[0].Message) -} - -func TestExecGQLWithInvalidContentType(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - errResponse := ErrorResponse{} - stmt := ` -mutation { - create_User(data: "{\"age\": 31, \"verified\": true, \"points\": 90, \"name\": \"Bob\"}") { - _key - } -}` - - buf := bytes.NewBuffer([]byte(stmt)) - testRequest(testOptions{ - Testing: t, - Method: "POST", - Path: GraphQLPath, - Body: buf, - ExpectedStatus: 500, - Headers: map[string]string{"Content-Type": contentTypeJSON + "; this-is-wrong"}, - ResponseData: &errResponse, - }) - - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "mime: invalid media parameter") - require.Equal(t, http.StatusInternalServerError, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Internal Server Error", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "mime: invalid media parameter", errResponse.Errors[0].Message) -} - -func TestExecGQLWithNoDB(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - errResponse := ErrorResponse{} - stmt := ` -mutation { - create_User(data: "{\"age\": 31, \"verified\": true, \"points\": 90, \"name\": \"Bob\"}") { - _key - } -}` - - buf := bytes.NewBuffer([]byte(stmt)) - testRequest(testOptions{ - Testing: t, - Method: "POST", - Path: GraphQLPath, - Body: buf, - ExpectedStatus: 500, - ResponseData: &errResponse, - }) - - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "no database available") - require.Equal(t, http.StatusInternalServerError, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Internal Server Error", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "no database available", errResponse.Errors[0].Message) -} - -func TestExecGQLHandlerContentTypeJSONWithJSONError(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - // statement with JSON formatting error - stmt := ` -[ - "query": "mutation { - create_User( - data: \"{ - \\\"age\\\": 31, - \\\"verified\\\": true, - \\\"points\\\": 90, - \\\"name\\\": \\\"Bob\\\" - }\" - ) {_key} - }" -]` - - buf := bytes.NewBuffer([]byte(stmt)) - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: nil, - Method: "POST", - Path: GraphQLPath, - Body: buf, - Headers: map[string]string{"Content-Type": contentTypeJSON}, - ExpectedStatus: 400, - ResponseData: &errResponse, - }) - - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "invalid character") - require.Equal(t, http.StatusBadRequest, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Bad Request", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "unmarshal error: invalid character ':' after array element", errResponse.Errors[0].Message) -} - -func TestExecGQLHandlerContentTypeJSON(t *testing.T) { - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - // load schema - testLoadSchema(t, ctx, defra) - - // add document - stmt := ` -{ - "query": "mutation { - create_User( - data: \"{ - \\\"age\\\": 31, - \\\"verified\\\": true, - \\\"points\\\": 90, - \\\"name\\\": \\\"Bob\\\" - }\" - ) {_key} - }" -}` - // remove line returns and tabulation from formatted statement - stmt = strings.ReplaceAll(strings.ReplaceAll(stmt, "\t", ""), "\n", "") - - buf := bytes.NewBuffer([]byte(stmt)) - users := []testUser{} - resp := DataResponse{ - Data: &users, - } - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: GraphQLPath, - Body: buf, - Headers: map[string]string{"Content-Type": contentTypeJSON}, - ExpectedStatus: 200, - ResponseData: &resp, - }) - - require.Contains(t, users[0].Key, "bae-") -} - -func TestExecGQLHandlerContentTypeJSONWithError(t *testing.T) { - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - // load schema - testLoadSchema(t, ctx, defra) - - // add document - stmt := ` - { - "query": "mutation { - create_User( - data: \"{ - \\\"age\\\": 31, - \\\"notAField\\\": true - }\" - ) {_key} - }" - }` - - // remove line returns and tabulation from formatted statement - stmt = strings.ReplaceAll(strings.ReplaceAll(stmt, "\t", ""), "\n", "") - - buf := bytes.NewBuffer([]byte(stmt)) - resp := GQLResult{} - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: GraphQLPath, - Body: buf, - Headers: map[string]string{"Content-Type": contentTypeJSON}, - ExpectedStatus: 200, - ResponseData: &resp, - }) - - require.Contains(t, resp.Errors, "The given field does not exist. Name: notAField") - require.Len(t, resp.Errors, 1) -} - -func TestExecGQLHandlerContentTypeJSONWithCharset(t *testing.T) { - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - // load schema - testLoadSchema(t, ctx, defra) - - // add document - stmt := ` -{ - "query": "mutation { - create_User( - data: \"{ - \\\"age\\\": 31, - \\\"verified\\\": true, - \\\"points\\\": 90, - \\\"name\\\": \\\"Bob\\\" - }\" - ) {_key} - }" -}` - // remote line returns and tabulation from formatted statement - stmt = strings.ReplaceAll(strings.ReplaceAll(stmt, "\t", ""), "\n", "") - - buf := bytes.NewBuffer([]byte(stmt)) - users := []testUser{} - resp := DataResponse{ - Data: &users, - } - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: GraphQLPath, - Body: buf, - Headers: map[string]string{"Content-Type": contentTypeJSON + "; charset=utf8"}, - ExpectedStatus: 200, - ResponseData: &resp, - }) - - require.Contains(t, users[0].Key, "bae-") -} - -func TestExecGQLHandlerContentTypeFormURLEncoded(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: nil, - Method: "POST", - Path: GraphQLPath, - Body: nil, - Headers: map[string]string{"Content-Type": contentTypeFormURLEncoded}, - ExpectedStatus: 400, - ResponseData: &errResponse, - }) - - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "content type application/x-www-form-urlencoded not yet supported") - require.Equal(t, http.StatusBadRequest, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Bad Request", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "content type application/x-www-form-urlencoded not yet supported", errResponse.Errors[0].Message) -} - -func TestExecGQLHandlerContentTypeGraphQL(t *testing.T) { - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - // load schema - testLoadSchema(t, ctx, defra) - - // add document - stmt := ` -mutation { - create_User(data: "{\"age\": 31, \"verified\": true, \"points\": 90, \"name\": \"Bob\"}") { - _key - } -}` - - buf := bytes.NewBuffer([]byte(stmt)) - users := []testUser{} - resp := DataResponse{ - Data: &users, - } - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: GraphQLPath, - Body: buf, - Headers: map[string]string{"Content-Type": contentTypeGraphQL}, - ExpectedStatus: 200, - ResponseData: &resp, - }) - - require.Contains(t, users[0].Key, "bae-") -} - -func TestExecGQLHandlerContentTypeText(t *testing.T) { - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - // load schema - testLoadSchema(t, ctx, defra) - - // add document - stmt := ` -mutation { - create_User(data: "{\"age\": 31, \"verified\": true, \"points\": 90, \"name\": \"Bob\"}") { - _key - } -}` - - buf := bytes.NewBuffer([]byte(stmt)) - users := []testUser{} - resp := DataResponse{ - Data: &users, - } - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: GraphQLPath, - Body: buf, - ExpectedStatus: 200, - ResponseData: &resp, - }) - - require.Contains(t, users[0].Key, "bae-") -} - -func TestExecGQLHandlerWithSubsctiption(t *testing.T) { - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - // load schema - testLoadSchema(t, ctx, defra) - - stmt := ` -subscription { - User { - _key - age - name - } -}` - - buf := bytes.NewBuffer([]byte(stmt)) - - ch := make(chan []byte) - errCh := make(chan error) - - // We need to set a timeout otherwise the testSubscriptionRequest function will block until the - // http.ServeHTTP call returns, which in this case will only happen with a timeout. - ctxTimeout, cancel := context.WithTimeout(ctx, time.Second) - defer cancel() - - go testSubscriptionRequest(ctxTimeout, testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: GraphQLPath, - Body: buf, - Headers: map[string]string{"Content-Type": contentTypeGraphQL}, - ExpectedStatus: 200, - }, ch, errCh) - - // We wait to ensure the subscription requests can subscribe to the event channel. - time.Sleep(time.Second / 2) - - // add document - stmt2 := ` -mutation { - create_User(data: "{\"age\": 31, \"verified\": true, \"points\": 90, \"name\": \"Bob\"}") { - _key - } -}` - - buf2 := bytes.NewBuffer([]byte(stmt2)) - users := []testUser{} - resp := DataResponse{ - Data: &users, - } - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: GraphQLPath, - Body: buf2, - ExpectedStatus: 200, - ResponseData: &resp, - }) - select { - case data := <-ch: - require.Contains(t, string(data), users[0].Key) - case err := <-errCh: - t.Fatal(err) - } -} - -func TestListSchemaHandlerWithoutDB(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: nil, - Method: "GET", - Path: SchemaPath, - ExpectedStatus: 500, - ResponseData: &errResponse, - }) - - assert.Contains(t, errResponse.Errors[0].Extensions.Stack, "no database available") - assert.Equal(t, http.StatusInternalServerError, errResponse.Errors[0].Extensions.Status) - assert.Equal(t, "Internal Server Error", errResponse.Errors[0].Extensions.HTTPError) - assert.Equal(t, "no database available", errResponse.Errors[0].Message) -} - -func TestListSchemaHandlerWitNoError(t *testing.T) { - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - stmt := ` -type user { - name: String - age: Int - verified: Boolean - points: Float -} -type group { - owner: user - members: [user] -}` - - _, err := defra.AddSchema(ctx, stmt) - if err != nil { - t.Fatal(err) - } - - resp := DataResponse{} - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "GET", - Path: SchemaPath, - ExpectedStatus: 200, - ResponseData: &resp, - }) - - switch v := resp.Data.(type) { - case map[string]any: - assert.Equal(t, map[string]any{ - "collections": []any{ - map[string]any{ - "name": "group", - "id": "bafkreieunyhcyupkdppyo2g4zcqtdxvj5xi4f422gp2jwene6ohndvcobe", - "version_id": "bafkreieunyhcyupkdppyo2g4zcqtdxvj5xi4f422gp2jwene6ohndvcobe", - "fields": []any{ - map[string]any{ - "id": "0", - "kind": "ID", - "name": "_key", - "internal": true, - }, - map[string]any{ - "id": "1", - "kind": "[user]", - "name": "members", - "internal": false, - }, - map[string]any{ - "id": "2", - "kind": "user", - "name": "owner", - "internal": false, - }, - map[string]any{ - "id": "3", - "kind": "ID", - "name": "owner_id", - "internal": true, - }, - }, - }, - map[string]any{ - "name": "user", - "id": "bafkreigrucdl7x3lsa4xwgz2bn7lbqmiwkifnspgx7hlkpaal3o55325bq", - "version_id": "bafkreigrucdl7x3lsa4xwgz2bn7lbqmiwkifnspgx7hlkpaal3o55325bq", - "fields": []any{ - map[string]any{ - "id": "0", - "kind": "ID", - "name": "_key", - "internal": true, - }, - map[string]any{ - "id": "1", - "kind": "Int", - "name": "age", - "internal": false, - }, - map[string]any{ - "id": "2", - "kind": "String", - "name": "name", - "internal": false, - }, - map[string]any{ - "id": "3", - "kind": "Float", - "name": "points", - "internal": false, - }, - map[string]any{ - "id": "4", - "kind": "Boolean", - "name": "verified", - "internal": false, - }, - }, - }, - }, - }, v) - - default: - t.Fatalf("data should be of type map[string]any but got %T\n%v", resp.Data, v) - } -} - -func TestLoadSchemaHandlerWithReadBodyError(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - mockReadCloser := mockReadCloser{} - // if Read is called, it will return error - mockReadCloser.On("Read", mock.AnythingOfType("[]uint8")).Return(0, errors.New("error reading")) - - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: nil, - Method: "POST", - Path: SchemaPath, - Body: &mockReadCloser, - ExpectedStatus: 500, - ResponseData: &errResponse, - }) - - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "error reading") - require.Equal(t, http.StatusInternalServerError, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Internal Server Error", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "error reading", errResponse.Errors[0].Message) -} - -func TestLoadSchemaHandlerWithoutDB(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - stmt := ` -type User { - name: String - age: Int - verified: Boolean - points: Float -}` - - buf := bytes.NewBuffer([]byte(stmt)) - - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: nil, - Method: "POST", - Path: SchemaPath, - Body: buf, - ExpectedStatus: 500, - ResponseData: &errResponse, - }) - - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "no database available") - require.Equal(t, http.StatusInternalServerError, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Internal Server Error", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "no database available", errResponse.Errors[0].Message) -} - -func TestLoadSchemaHandlerWithAddSchemaError(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - // statement with types instead of type - stmt := ` -types User { - name: String - age: Int - verified: Boolean - points: Float -}` - - buf := bytes.NewBuffer([]byte(stmt)) - - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: SchemaPath, - Body: buf, - ExpectedStatus: 500, - ResponseData: &errResponse, - }) - - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "Syntax Error GraphQL (2:1) Unexpected Name") - require.Equal(t, http.StatusInternalServerError, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Internal Server Error", errResponse.Errors[0].Extensions.HTTPError) - require.Equal( - t, - "Syntax Error GraphQL (2:1) Unexpected Name \"types\"\n\n1: \n2: types User {\n ^\n3: \\u0009name: String\n", - errResponse.Errors[0].Message, - ) -} - -func TestLoadSchemaHandlerWitNoError(t *testing.T) { - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - stmt := ` -type User { - name: String - age: Int - verified: Boolean - points: Float -}` - - buf := bytes.NewBuffer([]byte(stmt)) - - resp := DataResponse{} - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: SchemaPath, - Body: buf, - ExpectedStatus: 200, - ResponseData: &resp, - }) - - switch v := resp.Data.(type) { - case map[string]any: - require.Equal(t, map[string]any{ - "result": "success", - "collections": []any{ - map[string]any{ - "name": "User", - "id": "bafkreibpnvkvjqvg4skzlijka5xe63zeu74ivcjwd76q7yi65jdhwqhske", - "version_id": "bafkreibpnvkvjqvg4skzlijka5xe63zeu74ivcjwd76q7yi65jdhwqhske", - }, - }, - }, v) - - default: - t.Fatalf("data should be of type map[string]any but got %T\n%v", resp.Data, v) - } -} - -func TestGetBlockHandlerWithMultihashError(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: nil, - Method: "GET", - Path: BlocksPath + "/1234", - Body: nil, - ExpectedStatus: 400, - ResponseData: &errResponse, - }) - - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "illegal base32 data at input byte 0") - require.Equal(t, http.StatusBadRequest, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Bad Request", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "illegal base32 data at input byte 0", errResponse.Errors[0].Message) -} - -func TestGetBlockHandlerWithDSKeyWithNoDB(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - cID, err := cid.Parse("bafybeidembipteezluioakc2zyke4h5fnj4rr3uaougfyxd35u3qzefzhm") - if err != nil { - t.Fatal(err) - } - dsKey := dshelp.MultihashToDsKey(cID.Hash()) - - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: nil, - Method: "GET", - Path: BlocksPath + dsKey.String(), - Body: nil, - ExpectedStatus: 500, - ResponseData: &errResponse, - }) - - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "no database available") - require.Equal(t, http.StatusInternalServerError, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Internal Server Error", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "no database available", errResponse.Errors[0].Message) -} - -func TestGetBlockHandlerWithNoDB(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: nil, - Method: "GET", - Path: BlocksPath + "/bafybeidembipteezluioakc2zyke4h5fnj4rr3uaougfyxd35u3qzefzhm", - Body: nil, - ExpectedStatus: 500, - ResponseData: &errResponse, - }) - - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "no database available") - require.Equal(t, http.StatusInternalServerError, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Internal Server Error", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "no database available", errResponse.Errors[0].Message) -} - -func TestGetBlockHandlerWithGetBlockstoreError(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "GET", - Path: BlocksPath + "/bafybeidembipteezluioakc2zyke4h5fnj4rr3uaougfyxd35u3qzefzhm", - Body: nil, - ExpectedStatus: 500, - ResponseData: &errResponse, - }) - - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "ipld: could not find bafybeidembipteezluioakc2zyke4h5fnj4rr3uaougfyxd35u3qzefzhm") - require.Equal(t, http.StatusInternalServerError, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Internal Server Error", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "ipld: could not find bafybeidembipteezluioakc2zyke4h5fnj4rr3uaougfyxd35u3qzefzhm", errResponse.Errors[0].Message) -} - -func TestGetBlockHandlerWithValidBlockstore(t *testing.T) { - ctx := context.Background() - defra := testNewInMemoryDB(t, ctx) - defer defra.Close(ctx) - - testLoadSchema(t, ctx, defra) - - // add document - stmt := ` -mutation { - create_User(data: "{\"age\": 31, \"verified\": true, \"points\": 90, \"name\": \"Bob\"}") { - _key - } -}` - - buf := bytes.NewBuffer([]byte(stmt)) - - users := []testUser{} - resp := DataResponse{ - Data: &users, - } - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: GraphQLPath, - Body: buf, - ExpectedStatus: 200, - ResponseData: &resp, - }) - - if !strings.Contains(users[0].Key, "bae-") { - t.Fatal("expected valid document key") - } - - // get document cid - stmt2 := ` -query { - User (dockey: "%s") { - _version { - cid - } - } -}` - buf2 := bytes.NewBuffer([]byte(fmt.Sprintf(stmt2, users[0].Key))) - - users2 := []testUser{} - resp2 := DataResponse{ - Data: &users2, - } - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "POST", - Path: GraphQLPath, - Body: buf2, - ExpectedStatus: 200, - ResponseData: &resp2, - }) - - _, err := cid.Decode(users2[0].Versions[0].CID) - if err != nil { - t.Fatal(err) - } - - resp3 := DataResponse{} - testRequest(testOptions{ - Testing: t, - DB: defra, - Method: "GET", - Path: BlocksPath + "/" + users2[0].Versions[0].CID, - Body: buf, - ExpectedStatus: 200, - ResponseData: &resp3, - }) - - switch d := resp3.Data.(type) { - case map[string]any: - switch val := d["val"].(type) { - case string: - require.Equal(t, "pGNhZ2UYH2RuYW1lY0JvYmZwb2ludHMYWmh2ZXJpZmllZPU=", val) - default: - t.Fatalf("expecting string but got %T", val) - } - default: - t.Fatalf("expecting map[string]any but got %T", d) - } -} - -func TestPeerIDHandler(t *testing.T) { - resp := DataResponse{} - testRequest(testOptions{ - Testing: t, - DB: nil, - Method: "GET", - Path: PeerIDPath, - Body: nil, - ExpectedStatus: 200, - ResponseData: &resp, - ServerOptions: serverOptions{ - peerID: "12D3KooWFpi6VTYKLtxUftJKEyfX8jDfKi8n15eaygH8ggfYFZbR", - }, - }) - - switch v := resp.Data.(type) { - case map[string]any: - require.Equal(t, "12D3KooWFpi6VTYKLtxUftJKEyfX8jDfKi8n15eaygH8ggfYFZbR", v["peerID"]) - default: - t.Fatalf("data should be of type map[string]any but got %T", resp.Data) - } -} - -func TestPeerIDHandlerWithNoPeerIDInContext(t *testing.T) { - t.Cleanup(CleanupEnv) - env = "dev" - - errResponse := ErrorResponse{} - testRequest(testOptions{ - Testing: t, - DB: nil, - Method: "GET", - Path: PeerIDPath, - Body: nil, - ExpectedStatus: 404, - ResponseData: &errResponse, - }) - - require.Contains(t, errResponse.Errors[0].Extensions.Stack, "no PeerID available. P2P might be disabled") - require.Equal(t, http.StatusNotFound, errResponse.Errors[0].Extensions.Status) - require.Equal(t, "Not Found", errResponse.Errors[0].Extensions.HTTPError) - require.Equal(t, "no PeerID available. P2P might be disabled", errResponse.Errors[0].Message) -} - -func testRequest(opt testOptions) []byte { - req, err := http.NewRequest(opt.Method, opt.Path, opt.Body) - if err != nil { - opt.Testing.Fatal(err) - } - - for k, v := range opt.Headers { - req.Header.Set(k, v) - } - - q := req.URL.Query() - for k, v := range opt.QueryParams { - q.Add(k, v) - } - req.URL.RawQuery = q.Encode() - - h := newHandler(opt.DB, opt.ServerOptions) - rec := httptest.NewRecorder() - h.ServeHTTP(rec, req) - assert.Equal(opt.Testing, opt.ExpectedStatus, rec.Result().StatusCode) - - resBody, err := io.ReadAll(rec.Result().Body) - if err != nil { - opt.Testing.Fatal(err) - } - - if opt.ResponseData != nil { - err = json.Unmarshal(resBody, &opt.ResponseData) - if err != nil { - opt.Testing.Fatal(err) - } - } - - return resBody -} - -func testSubscriptionRequest(ctx context.Context, opt testOptions, ch chan []byte, errCh chan error) { - req, err := http.NewRequest(opt.Method, opt.Path, opt.Body) - if err != nil { - errCh <- err - return - } - - req = req.WithContext(ctx) - - for k, v := range opt.Headers { - req.Header.Set(k, v) - } - - h := newHandler(opt.DB, opt.ServerOptions) - rec := httptest.NewRecorder() - h.ServeHTTP(rec, req) - require.Equal(opt.Testing, opt.ExpectedStatus, rec.Result().StatusCode) - - respBody, err := io.ReadAll(rec.Result().Body) - if err != nil { - errCh <- err - return - } - - ch <- respBody -} - -func testNewInMemoryDB(t *testing.T, ctx context.Context) client.DB { - // init in memory DB - opts := badgerds.Options{Options: badger.DefaultOptions("").WithInMemory(true)} - rootstore, err := badgerds.NewDatastore("", &opts) - if err != nil { - t.Fatal(err) - } - - options := []db.Option{ - db.WithUpdateEvents(), - } - - defra, err := db.NewDB(ctx, rootstore, options...) - if err != nil { - t.Fatal(err) - } - - return defra -} - -func testLoadSchema(t *testing.T, ctx context.Context, db client.DB) { - stmt := ` -type User { - name: String - age: Int - verified: Boolean - points: Float -}` - _, err := db.AddSchema(ctx, stmt) - if err != nil { - t.Fatal(err) - } -} diff --git a/api/http/http.go b/api/http/http.go deleted file mode 100644 index 3ac3d62bdd..0000000000 --- a/api/http/http.go +++ /dev/null @@ -1,18 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -/* -Package http provides DefraDB's HTTP API, offering various capabilities. -*/ -package http - -import "github.com/sourcenetwork/defradb/logging" - -var log = logging.MustNewLogger("http") diff --git a/api/http/logger.go b/api/http/logger.go deleted file mode 100644 index 2a91a271c2..0000000000 --- a/api/http/logger.go +++ /dev/null @@ -1,84 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package http - -import ( - "net/http" - "time" - - "github.com/sourcenetwork/defradb/logging" -) - -type loggingResponseWriter struct { - statusCode int - contentLength int - - http.ResponseWriter -} - -func newLoggingResponseWriter(w http.ResponseWriter) *loggingResponseWriter { - return &loggingResponseWriter{ - statusCode: http.StatusOK, - contentLength: 0, - ResponseWriter: w, - } -} - -func (lrw *loggingResponseWriter) Flush() { - lrw.ResponseWriter.(http.Flusher).Flush() -} - -func (lrw *loggingResponseWriter) Header() http.Header { - return lrw.ResponseWriter.Header() -} - -func (lrw *loggingResponseWriter) WriteHeader(code int) { - lrw.statusCode = code - lrw.ResponseWriter.WriteHeader(code) -} - -func (lrw *loggingResponseWriter) Write(b []byte) (int, error) { - lrw.contentLength = len(b) - return lrw.ResponseWriter.Write(b) -} - -func loggerMiddleware(next http.Handler) http.Handler { - return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - start := time.Now() - lrw := newLoggingResponseWriter(rw) - next.ServeHTTP(lrw, req) - elapsed := time.Since(start) - log.Info( - req.Context(), - "Request", - logging.NewKV( - "Method", - req.Method, - ), - logging.NewKV( - "Path", - req.URL.Path, - ), - logging.NewKV( - "Status", - lrw.statusCode, - ), - logging.NewKV( - "LengthBytes", - lrw.contentLength, - ), - logging.NewKV( - "ElapsedTime", - elapsed.String(), - ), - ) - }) -} diff --git a/api/http/logger_test.go b/api/http/logger_test.go deleted file mode 100644 index 9c2791d9df..0000000000 --- a/api/http/logger_test.go +++ /dev/null @@ -1,124 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package http - -import ( - "bufio" - "encoding/json" - "net/http" - "net/http/httptest" - "os" - "path" - "strconv" - "testing" - - "github.com/pkg/errors" - "github.com/stretchr/testify/assert" - - "github.com/sourcenetwork/defradb/logging" -) - -func TestNewLoggingResponseWriterLogger(t *testing.T) { - rec := httptest.NewRecorder() - lrw := newLoggingResponseWriter(rec) - - lrw.WriteHeader(400) - assert.Equal(t, 400, lrw.statusCode) - - content := "Hello world!" - - length, err := lrw.Write([]byte(content)) - if err != nil { - t.Fatal(err) - } - assert.Equal(t, length, lrw.contentLength) - assert.Equal(t, rec.Body.String(), content) -} - -func TestLogginResponseWriterWriteWithChunks(t *testing.T) { - rec := httptest.NewRecorder() - lrw := newLoggingResponseWriter(rec) - - content := "Hello world!" - contentLength := len(content) - - lrw.Header().Set("Content-Length", strconv.Itoa(contentLength)) - - length1, err := lrw.Write([]byte(content[:contentLength/2])) - if err != nil { - t.Fatal(err) - } - - length2, err := lrw.Write([]byte(content[contentLength/2:])) - if err != nil { - t.Fatal(err) - } - - assert.Equal(t, contentLength, length1+length2) - assert.Equal(t, rec.Body.String(), content) -} - -func TestLoggerKeyValueOutput(t *testing.T) { - dir := t.TempDir() - - // send logs to temp file so we can inspect it - logFile := path.Join(dir, "http_test.log") - - req, err := http.NewRequest("GET", "/ping", nil) - if err != nil { - t.Fatal(err) - } - - rec2 := httptest.NewRecorder() - - log.ApplyConfig(logging.Config{ - EncoderFormat: logging.NewEncoderFormatOption(logging.JSON), - OutputPaths: []string{logFile}, - }) - loggerMiddleware(http.HandlerFunc(pingHandler)).ServeHTTP(rec2, req) - assert.Equal(t, 200, rec2.Result().StatusCode) - - // inspect the log file - kv, err := readLog(logFile) - if err != nil { - t.Fatal(err) - } - - // check that everything is as expected - assert.Equal(t, "{\"data\":{\"response\":\"pong\"}}", rec2.Body.String()) - assert.Equal(t, "INFO", kv["level"]) - assert.Equal(t, "http", kv["logger"]) - assert.Equal(t, "Request", kv["msg"]) - assert.Equal(t, "GET", kv["Method"]) - assert.Equal(t, "/ping", kv["Path"]) - assert.Equal(t, float64(200), kv["Status"]) - assert.Equal(t, float64(28), kv["LengthBytes"]) -} - -func readLog(path string) (map[string]any, error) { - // inspect the log file - f, err := os.Open(path) - if err != nil { - return nil, errors.WithStack(err) - } - - scanner := bufio.NewScanner(f) - scanner.Scan() - logLine := scanner.Text() - - kv := make(map[string]any) - err = json.Unmarshal([]byte(logLine), &kv) - if err != nil { - return nil, errors.WithStack(err) - } - - return kv, nil -} diff --git a/api/http/playground.go b/api/http/playground.go deleted file mode 100644 index 0a69e312b2..0000000000 --- a/api/http/playground.go +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -//go:build playground - -package http - -import ( - "io/fs" - "net/http" - - "github.com/sourcenetwork/defradb/playground" -) - -func init() { - sub, err := fs.Sub(playground.Dist, "dist") - if err != nil { - panic(err) - } - playgroundHandler = http.FileServer(http.FS(sub)) -} diff --git a/api/http/request_result.go b/api/http/request_result.go deleted file mode 100644 index f5bf7912e9..0000000000 --- a/api/http/request_result.go +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package http - -import "github.com/sourcenetwork/defradb/client" - -type GQLResult struct { - Errors []string `json:"errors,omitempty"` - - Data any `json:"data"` -} - -func newGQLResult(r client.GQLResult) *GQLResult { - errors := make([]string, len(r.Errors)) - for i := range r.Errors { - errors[i] = r.Errors[i].Error() - } - - return &GQLResult{ - Errors: errors, - Data: r.Data, - } -} diff --git a/api/http/router.go b/api/http/router.go deleted file mode 100644 index 2d54a16560..0000000000 --- a/api/http/router.go +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package http - -import ( - "net/http" - "net/url" - "path" - "strings" - - "github.com/pkg/errors" -) - -const ( - // Version is the current version of the HTTP API. - Version string = "v0" - versionedAPIPath string = "/api/" + Version - - RootPath string = versionedAPIPath + "" - PingPath string = versionedAPIPath + "/ping" - DumpPath string = versionedAPIPath + "/debug/dump" - BlocksPath string = versionedAPIPath + "/blocks" - GraphQLPath string = versionedAPIPath + "/graphql" - SchemaPath string = versionedAPIPath + "/schema" - SchemaMigrationPath string = SchemaPath + "/migration" - IndexPath string = versionedAPIPath + "/index" - PeerIDPath string = versionedAPIPath + "/peerid" - BackupPath string = versionedAPIPath + "/backup" - ExportPath string = BackupPath + "/export" - ImportPath string = BackupPath + "/import" -) - -// playgroundHandler is set when building with the playground build tag -var playgroundHandler http.Handler - -func setRoutes(h *handler) *handler { - h.Get(RootPath, rootHandler) - h.Get(PingPath, pingHandler) - h.Get(DumpPath, dumpHandler) - h.Get(BlocksPath+"/{cid}", getBlockHandler) - h.Get(GraphQLPath, execGQLHandler) - h.Post(GraphQLPath, execGQLHandler) - h.Get(SchemaPath, listSchemaHandler) - h.Post(SchemaPath, loadSchemaHandler) - h.Patch(SchemaPath, patchSchemaHandler) - h.Post(SchemaMigrationPath, setMigrationHandler) - h.Get(SchemaMigrationPath, getMigrationHandler) - h.Post(IndexPath, createIndexHandler) - h.Delete(IndexPath, dropIndexHandler) - h.Get(IndexPath, listIndexHandler) - h.Get(PeerIDPath, peerIDHandler) - h.Post(ExportPath, exportHandler) - h.Post(ImportPath, importHandler) - h.Handle("/*", playgroundHandler) - - return h -} - -// JoinPaths takes a base path and any number of additional paths -// and combines them safely to form a full URL path. -// The base must start with a http or https. -func JoinPaths(base string, paths ...string) (*url.URL, error) { - if !strings.HasPrefix(base, "http") { - return nil, ErrSchema - } - - u, err := url.Parse(base) - if err != nil { - return nil, errors.WithStack(err) - } - - u.Path = path.Join(u.Path, strings.Join(paths, "/")) - - return u, nil -} diff --git a/api/http/router_test.go b/api/http/router_test.go deleted file mode 100644 index e43260ef43..0000000000 --- a/api/http/router_test.go +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package http - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestJoinPathsWithBase(t *testing.T) { - path, err := JoinPaths("http://localhost:9181", BlocksPath, "cid_of_some_sort") - if err != nil { - t.Fatal(err) - } - - assert.Equal(t, "http://localhost:9181"+BlocksPath+"/cid_of_some_sort", path.String()) -} - -func TestJoinPathsWithNoBase(t *testing.T) { - _, err := JoinPaths("", BlocksPath, "cid_of_some_sort") - assert.ErrorIs(t, ErrSchema, err) -} - -func TestJoinPathsWithBaseWithoutHttpPrefix(t *testing.T) { - _, err := JoinPaths("localhost:9181", BlocksPath, "cid_of_some_sort") - assert.ErrorIs(t, ErrSchema, err) -} - -func TestJoinPathsWithNoPaths(t *testing.T) { - path, err := JoinPaths("http://localhost:9181") - if err != nil { - t.Fatal(err) - } - - assert.Equal(t, "http://localhost:9181", path.String()) -} - -func TestJoinPathsWithInvalidCharacter(t *testing.T) { - _, err := JoinPaths("https://%gh&%ij") - assert.Error(t, err) -} diff --git a/api/http/server.go b/api/http/server.go deleted file mode 100644 index a71dccb0ec..0000000000 --- a/api/http/server.go +++ /dev/null @@ -1,322 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package http - -import ( - "context" - "crypto/tls" - "fmt" - "net" - "net/http" - "path" - "strings" - - "github.com/sourcenetwork/immutable" - "golang.org/x/crypto/acme/autocert" - - "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/logging" -) - -const ( - // These constants are best effort durations that fit our current API - // and possibly prevent from running out of file descriptors. - // readTimeout = 5 * time.Second - // writeTimeout = 10 * time.Second - // idleTimeout = 120 * time.Second - - // Temparily disabling timeouts until [this proposal](https://github.com/golang/go/issues/54136) is merged. - // https://github.com/sourcenetwork/defradb/issues/927 - readTimeout = 0 - writeTimeout = 0 - idleTimeout = 0 -) - -const ( - httpPort = ":80" - httpsPort = ":443" -) - -// Server struct holds the Handler for the HTTP API. -type Server struct { - options serverOptions - listener net.Listener - certManager *autocert.Manager - // address that is assigned to the server on listen - address string - - http.Server -} - -type serverOptions struct { - // list of allowed origins for CORS. - allowedOrigins []string - // ID of the server node. - peerID string - // when the value is present, the server will run with tls - tls immutable.Option[tlsOptions] - // root directory for the node config. - rootDir string - // The domain for the API (optional). - domain immutable.Option[string] -} - -type tlsOptions struct { - // Public key for TLS. Ignored if domain is set. - pubKey string - // Private key for TLS. Ignored if domain is set. - privKey string - // email address for the CA to send problem notifications (optional) - email string - // specify the tls port - port string -} - -// NewServer instantiates a new server with the given http.Handler. -func NewServer(db client.DB, options ...func(*Server)) *Server { - srv := &Server{ - Server: http.Server{ - ReadTimeout: readTimeout, - WriteTimeout: writeTimeout, - IdleTimeout: idleTimeout, - }, - } - - for _, opt := range append(options, DefaultOpts()) { - opt(srv) - } - - srv.Handler = newHandler(db, srv.options) - - return srv -} - -func newHTTPRedirServer(m *autocert.Manager) *Server { - srv := &Server{ - Server: http.Server{ - ReadTimeout: readTimeout, - WriteTimeout: writeTimeout, - IdleTimeout: idleTimeout, - }, - } - - srv.Addr = httpPort - srv.Handler = m.HTTPHandler(nil) - - return srv -} - -// DefaultOpts returns the default options for the server. -func DefaultOpts() func(*Server) { - return func(s *Server) { - if s.Addr == "" { - s.Addr = "localhost:9181" - } - } -} - -// WithAllowedOrigins returns an option to set the allowed origins for CORS. -func WithAllowedOrigins(origins ...string) func(*Server) { - return func(s *Server) { - s.options.allowedOrigins = append(s.options.allowedOrigins, origins...) - } -} - -// WithAddress returns an option to set the address for the server. -func WithAddress(addr string) func(*Server) { - return func(s *Server) { - s.Addr = addr - - // If the address is not localhost, we check to see if it's a valid IP address. - // If it's not a valid IP, we assume that it's a domain name to be used with TLS. - if !strings.HasPrefix(addr, "localhost:") && !strings.HasPrefix(addr, ":") { - host, _, err := net.SplitHostPort(addr) - if err != nil { - host = addr - } - ip := net.ParseIP(host) - if ip == nil { - s.Addr = httpPort - s.options.domain = immutable.Some(host) - } - } - } -} - -// WithCAEmail returns an option to set the email address for the CA to send problem notifications. -func WithCAEmail(email string) func(*Server) { - return func(s *Server) { - tlsOpt := s.options.tls.Value() - tlsOpt.email = email - s.options.tls = immutable.Some(tlsOpt) - } -} - -// WithPeerID returns an option to set the identifier of the server node. -func WithPeerID(id string) func(*Server) { - return func(s *Server) { - s.options.peerID = id - } -} - -// WithRootDir returns an option to set the root directory for the node config. -func WithRootDir(rootDir string) func(*Server) { - return func(s *Server) { - s.options.rootDir = rootDir - } -} - -// WithSelfSignedCert returns an option to set the public and private keys for TLS. -func WithSelfSignedCert(pubKey, privKey string) func(*Server) { - return func(s *Server) { - tlsOpt := s.options.tls.Value() - tlsOpt.pubKey = pubKey - tlsOpt.privKey = privKey - s.options.tls = immutable.Some(tlsOpt) - } -} - -// WithTLS returns an option to enable TLS. -func WithTLS() func(*Server) { - return func(s *Server) { - tlsOpt := s.options.tls.Value() - tlsOpt.port = httpsPort - s.options.tls = immutable.Some(tlsOpt) - } -} - -// WithTLSPort returns an option to set the port for TLS. -func WithTLSPort(port int) func(*Server) { - return func(s *Server) { - tlsOpt := s.options.tls.Value() - tlsOpt.port = fmt.Sprintf(":%d", port) - s.options.tls = immutable.Some(tlsOpt) - } -} - -// Listen creates a new net.Listener and saves it on the receiver. -func (s *Server) Listen(ctx context.Context) error { - var err error - if s.options.tls.HasValue() { - return s.listenWithTLS(ctx) - } - - lc := net.ListenConfig{} - s.listener, err = lc.Listen(ctx, "tcp", s.Addr) - if err != nil { - return errors.WithStack(err) - } - - // Save the address on the server in case the port was set to random - // and that we want to see what was assigned. - s.address = s.listener.Addr().String() - - return nil -} - -func (s *Server) listenWithTLS(ctx context.Context) error { - cfg := &tls.Config{ - MinVersion: tls.VersionTLS12, - // We only allow cipher suites that are marked secure - // by ssllabs - CipherSuites: []uint16{ - tls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256, - tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, - tls.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384, - tls.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256, - tls.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, - tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, - }, - ServerName: "DefraDB", - } - - if s.options.domain.HasValue() && s.options.domain.Value() != "" { - s.Addr = s.options.tls.Value().port - - if s.options.tls.Value().email == "" || s.options.tls.Value().email == config.DefaultAPIEmail { - return ErrNoEmail - } - - certCache := path.Join(s.options.rootDir, "autocerts") - - log.FeedbackInfo( - ctx, - "Generating auto certificate", - logging.NewKV("Domain", s.options.domain.Value()), - logging.NewKV("Certificate cache", certCache), - ) - - m := &autocert.Manager{ - Cache: autocert.DirCache(certCache), - Prompt: autocert.AcceptTOS, - Email: s.options.tls.Value().email, - HostPolicy: autocert.HostWhitelist(s.options.domain.Value()), - } - - cfg.GetCertificate = m.GetCertificate - - // We set manager on the server instance to later start - // a redirection server. - s.certManager = m - } else { - // When not using auto cert, we create a self signed certificate - // with the provided public and prive keys. - log.FeedbackInfo(ctx, "Generating self signed certificate") - - cert, err := tls.LoadX509KeyPair( - s.options.tls.Value().privKey, - s.options.tls.Value().pubKey, - ) - if err != nil { - return errors.WithStack(err) - } - - cfg.Certificates = []tls.Certificate{cert} - } - - var err error - s.listener, err = tls.Listen("tcp", s.Addr, cfg) - if err != nil { - return errors.WithStack(err) - } - - // Save the address on the server in case the port was set to random - // and that we want to see what was assigned. - s.address = s.listener.Addr().String() - - return nil -} - -// Run calls Serve with the receiver's listener. -func (s *Server) Run(ctx context.Context) error { - if s.listener == nil { - return ErrNoListener - } - - if s.certManager != nil { - // When using TLS it's important to redirect http requests to https - go func() { - srv := newHTTPRedirServer(s.certManager) - err := srv.ListenAndServe() - if err != nil && !errors.Is(err, http.ErrServerClosed) { - log.Info(ctx, "Something went wrong with the redirection server", logging.NewKV("Error", err)) - } - }() - } - return s.Serve(s.listener) -} - -// AssignedAddr returns the address that was assigned to the server on calls to listen. -func (s *Server) AssignedAddr() string { - return s.address -} diff --git a/api/http/server_test.go b/api/http/server_test.go deleted file mode 100644 index c19e60a2ac..0000000000 --- a/api/http/server_test.go +++ /dev/null @@ -1,251 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package http - -import ( - "context" - "net/http" - "os" - "testing" - - "github.com/stretchr/testify/assert" - "golang.org/x/crypto/acme/autocert" -) - -func TestNewServerAndRunWithoutListener(t *testing.T) { - ctx := context.Background() - s := NewServer(nil, WithAddress(":0")) - if ok := assert.NotNil(t, s); ok { - assert.Equal(t, ErrNoListener, s.Run(ctx)) - } -} - -func TestNewServerAndRunWithListenerAndInvalidPort(t *testing.T) { - ctx := context.Background() - s := NewServer(nil, WithAddress(":303000")) - if ok := assert.NotNil(t, s); ok { - assert.Error(t, s.Listen(ctx)) - } -} - -func TestNewServerAndRunWithListenerAndValidPort(t *testing.T) { - ctx := context.Background() - serverRunning := make(chan struct{}) - serverDone := make(chan struct{}) - s := NewServer(nil, WithAddress(":0")) - go func() { - close(serverRunning) - err := s.Listen(ctx) - assert.NoError(t, err) - err = s.Run(ctx) - assert.ErrorIs(t, http.ErrServerClosed, err) - defer close(serverDone) - }() - - <-serverRunning - - s.Shutdown(context.Background()) - - <-serverDone -} - -func TestNewServerAndRunWithAutocertWithoutEmail(t *testing.T) { - ctx := context.Background() - dir := t.TempDir() - s := NewServer(nil, WithAddress("example.com"), WithRootDir(dir), WithTLSPort(0)) - - err := s.Listen(ctx) - assert.ErrorIs(t, err, ErrNoEmail) - - s.Shutdown(context.Background()) -} - -func TestNewServerAndRunWithAutocert(t *testing.T) { - ctx := context.Background() - serverRunning := make(chan struct{}) - serverDone := make(chan struct{}) - dir := t.TempDir() - s := NewServer(nil, WithAddress("example.com"), WithRootDir(dir), WithTLSPort(0), WithCAEmail("dev@defradb.net")) - go func() { - close(serverRunning) - err := s.Listen(ctx) - assert.NoError(t, err) - err = s.Run(ctx) - assert.ErrorIs(t, http.ErrServerClosed, err) - defer close(serverDone) - }() - - <-serverRunning - - s.Shutdown(context.Background()) - - <-serverDone -} - -func TestNewServerAndRunWithSelfSignedCertAndNoKeyFiles(t *testing.T) { - ctx := context.Background() - serverRunning := make(chan struct{}) - serverDone := make(chan struct{}) - dir := t.TempDir() - s := NewServer(nil, WithAddress("localhost:0"), WithSelfSignedCert(dir+"/server.crt", dir+"/server.key")) - go func() { - close(serverRunning) - err := s.Listen(ctx) - assert.Contains(t, err.Error(), "no such file or directory") - defer close(serverDone) - }() - - <-serverRunning - - s.Shutdown(context.Background()) - - <-serverDone -} - -const pubKey = `-----BEGIN EC PARAMETERS----- -BgUrgQQAIg== ------END EC PARAMETERS----- ------BEGIN EC PRIVATE KEY----- -MIGkAgEBBDD4VK0DRBRaeieXU9JaPJfSeegGYcXaX5+gEcwGKA0UJYI46QRHIlHC -IJMOjPsrUCmgBwYFK4EEACKhZANiAAQ3ltsFK8bZZpOYiJnvwpa7Ft+b0KFsDqpu -pS0gW/SYpAncHhRuz18RQ2ycuXlSN1S/PAryRZ5PK2xORKfwpguEDEMdVwbHorZO -K44P/h3dhyNyAyf8rcRoqKXcl/K/uew= ------END EC PRIVATE KEY-----` - -const privKey = `-----BEGIN CERTIFICATE----- -MIICQDCCAcUCCQDpMnN1gQ4fGTAKBggqhkjOPQQDAjCBiDELMAkGA1UEBhMCY2Ex -DzANBgNVBAgMBlF1ZWJlYzEQMA4GA1UEBwwHQ2hlbHNlYTEPMA0GA1UECgwGU291 -cmNlMRAwDgYDVQQLDAdEZWZyYURCMQ8wDQYDVQQDDAZzb3VyY2UxIjAgBgkqhkiG -9w0BCQEWE2V4YW1wbGVAZXhhbXBsZS5jb20wHhcNMjIxMDA2MTgyMjE1WhcNMjMx -MDA2MTgyMjE1WjCBiDELMAkGA1UEBhMCY2ExDzANBgNVBAgMBlF1ZWJlYzEQMA4G -A1UEBwwHQ2hlbHNlYTEPMA0GA1UECgwGU291cmNlMRAwDgYDVQQLDAdEZWZyYURC -MQ8wDQYDVQQDDAZzb3VyY2UxIjAgBgkqhkiG9w0BCQEWE2V4YW1wbGVAZXhhbXBs -ZS5jb20wdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQ3ltsFK8bZZpOYiJnvwpa7Ft+b -0KFsDqpupS0gW/SYpAncHhRuz18RQ2ycuXlSN1S/PAryRZ5PK2xORKfwpguEDEMd -VwbHorZOK44P/h3dhyNyAyf8rcRoqKXcl/K/uewwCgYIKoZIzj0EAwIDaQAwZgIx -AIfNQeo8syOb94ojF40jY+fY1ZBSbNNK6UUbFquwDMVEoSyXRJHHEU12NUKCVTUH -kgIxAKaEGC+lqp0aaN+yubYLRiTDxOlNpyiHox3nZiL4bG/CCdPDvbX63QcdI2yq -XPKczg== ------END CERTIFICATE-----` - -func TestNewServerAndRunWithSelfSignedCertAndInvalidPort(t *testing.T) { - ctx := context.Background() - serverRunning := make(chan struct{}) - serverDone := make(chan struct{}) - dir := t.TempDir() - err := os.WriteFile(dir+"/server.key", []byte(privKey), 0644) - if err != nil { - t.Fatal(err) - } - err = os.WriteFile(dir+"/server.crt", []byte(pubKey), 0644) - if err != nil { - t.Fatal(err) - } - s := NewServer(nil, WithAddress(":303000"), WithSelfSignedCert(dir+"/server.crt", dir+"/server.key")) - go func() { - close(serverRunning) - err := s.Listen(ctx) - assert.Contains(t, err.Error(), "invalid port") - defer close(serverDone) - }() - - <-serverRunning - - s.Shutdown(context.Background()) - - <-serverDone -} - -func TestNewServerAndRunWithSelfSignedCert(t *testing.T) { - ctx := context.Background() - serverRunning := make(chan struct{}) - serverDone := make(chan struct{}) - dir := t.TempDir() - err := os.WriteFile(dir+"/server.key", []byte(privKey), 0644) - if err != nil { - t.Fatal(err) - } - err = os.WriteFile(dir+"/server.crt", []byte(pubKey), 0644) - if err != nil { - t.Fatal(err) - } - s := NewServer(nil, WithAddress("localhost:0"), WithSelfSignedCert(dir+"/server.crt", dir+"/server.key")) - go func() { - close(serverRunning) - err := s.Listen(ctx) - assert.NoError(t, err) - err = s.Run(ctx) - assert.ErrorIs(t, http.ErrServerClosed, err) - defer close(serverDone) - }() - - <-serverRunning - - s.Shutdown(context.Background()) - - <-serverDone -} - -func TestNewServerWithoutOptions(t *testing.T) { - s := NewServer(nil) - assert.Equal(t, "localhost:9181", s.Addr) - assert.Equal(t, []string(nil), s.options.allowedOrigins) -} - -func TestNewServerWithAddress(t *testing.T) { - s := NewServer(nil, WithAddress("localhost:9999")) - assert.Equal(t, "localhost:9999", s.Addr) -} - -func TestNewServerWithDomainAddress(t *testing.T) { - s := NewServer(nil, WithAddress("example.com")) - assert.Equal(t, "example.com", s.options.domain.Value()) - assert.NotNil(t, s.options.tls) -} - -func TestNewServerWithAllowedOrigins(t *testing.T) { - s := NewServer(nil, WithAllowedOrigins("https://source.network", "https://app.source.network")) - assert.Equal(t, []string{"https://source.network", "https://app.source.network"}, s.options.allowedOrigins) -} - -func TestNewServerWithCAEmail(t *testing.T) { - s := NewServer(nil, WithCAEmail("me@example.com")) - assert.Equal(t, "me@example.com", s.options.tls.Value().email) -} - -func TestNewServerWithPeerID(t *testing.T) { - s := NewServer(nil, WithPeerID("12D3KooWFpi6VTYKLtxUftJKEyfX8jDfKi8n15eaygH8ggfYFZbR")) - assert.Equal(t, "12D3KooWFpi6VTYKLtxUftJKEyfX8jDfKi8n15eaygH8ggfYFZbR", s.options.peerID) -} - -func TestNewServerWithRootDir(t *testing.T) { - dir := t.TempDir() - s := NewServer(nil, WithRootDir(dir)) - assert.Equal(t, dir, s.options.rootDir) -} - -func TestNewServerWithTLSPort(t *testing.T) { - s := NewServer(nil, WithTLSPort(44343)) - assert.Equal(t, ":44343", s.options.tls.Value().port) -} - -func TestNewServerWithSelfSignedCert(t *testing.T) { - s := NewServer(nil, WithSelfSignedCert("pub.key", "priv.key")) - assert.Equal(t, "pub.key", s.options.tls.Value().pubKey) - assert.Equal(t, "priv.key", s.options.tls.Value().privKey) - assert.NotNil(t, s.options.tls) -} - -func TestNewHTTPRedirServer(t *testing.T) { - m := &autocert.Manager{} - s := newHTTPRedirServer(m) - assert.Equal(t, ":80", s.Addr) -} diff --git a/cli/backup_export.go b/cli/backup_export.go index 32184bfe35..3c1a5c78ad 100644 --- a/cli/backup_export.go +++ b/cli/backup_export.go @@ -11,24 +11,17 @@ package cli import ( - "bytes" - "encoding/json" - "io" - "net/http" - "os" "strings" "github.com/spf13/cobra" - httpapi "github.com/sourcenetwork/defradb/api/http" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/logging" ) const jsonFileType = "json" -func MakeBackupExportCommand(cfg *config.Config) *cobra.Command { +func MakeBackupExportCommand(cfg *config.Config, db client.DB) *cobra.Command { var collections []string var pretty bool var format string @@ -55,10 +48,6 @@ Example: export data for the 'Users' collection: return ErrInvalidExportFormat } outputPath := args[0] - endpoint, err := httpapi.JoinPaths(cfg.API.AddressToURL(), httpapi.ExportPath) - if err != nil { - return NewErrFailedToJoinEndpoint(err) - } for i := range collections { collections[i] = strings.Trim(collections[i], " ") @@ -71,57 +60,7 @@ Example: export data for the 'Users' collection: Collections: collections, } - b, err := json.Marshal(data) - if err != nil { - return err - } - - res, err := http.Post(endpoint.String(), "application/json", bytes.NewBuffer(b)) - if err != nil { - return NewErrFailedToSendRequest(err) - } - - defer func() { - if e := res.Body.Close(); e != nil { - err = NewErrFailedToCloseResponseBody(e, err) - } - }() - - response, err := io.ReadAll(res.Body) - if err != nil { - return NewErrFailedToReadResponseBody(err) - } - - stdout, err := os.Stdout.Stat() - if err != nil { - return err - } - - if isFileInfoPipe(stdout) { - cmd.Println(string(response)) - } else { - type exportResponse struct { - Errors []struct { - Message string `json:"message"` - } `json:"errors"` - } - r := exportResponse{} - err = json.Unmarshal(response, &r) - if err != nil { - return NewErrFailedToUnmarshalResponse(err) - } - if len(r.Errors) > 0 { - log.FeedbackError(cmd.Context(), "Failed to export data", - logging.NewKV("Errors", r.Errors)) - } else if len(collections) == 1 { - log.FeedbackInfo(cmd.Context(), "Data exported for collection "+collections[0]) - } else if len(collections) > 1 { - log.FeedbackInfo(cmd.Context(), "Data exported for collections "+strings.Join(collections, ", ")) - } else { - log.FeedbackInfo(cmd.Context(), "Data exported for all collections") - } - } - return nil + return db.BasicExport(cmd.Context(), &data) }, } cmd.Flags().BoolVarP(&pretty, "pretty", "p", false, "Set the output JSON to be pretty printed") diff --git a/cli/backup_export_test.go b/cli/backup_export_test.go deleted file mode 100644 index 9539a1cdb1..0000000000 --- a/cli/backup_export_test.go +++ /dev/null @@ -1,300 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package cli - -import ( - "context" - "encoding/json" - "os" - "testing" - - "github.com/stretchr/testify/require" - - "github.com/sourcenetwork/defradb/client" -) - -func TestBackupExportCmd_WithNoArgument_ReturnError(t *testing.T) { - cfg := getTestConfig(t) - - dbExportCmd := MakeBackupExportCommand(cfg) - err := dbExportCmd.ValidateArgs([]string{}) - require.ErrorIs(t, err, ErrInvalidArgumentLength) -} - -func TestBackupExportCmd_WithInvalidExportFormat_ReturnError(t *testing.T) { - cfg := getTestConfig(t) - dbExportCmd := MakeBackupExportCommand(cfg) - - filepath := t.TempDir() + "/test.json" - - dbExportCmd.Flags().Set("format", "invalid") - err := dbExportCmd.RunE(dbExportCmd, []string{filepath}) - require.ErrorIs(t, err, ErrInvalidExportFormat) -} - -func TestBackupExportCmd_IfInvalidAddress_ReturnError(t *testing.T) { - cfg := getTestConfig(t) - cfg.API.Address = "invalid address" - - filepath := t.TempDir() + "/test.json" - - dbExportCmd := MakeBackupExportCommand(cfg) - err := dbExportCmd.RunE(dbExportCmd, []string{filepath}) - require.ErrorIs(t, err, NewErrFailedToJoinEndpoint(err)) -} - -func TestBackupExportCmd_WithEmptyDatastore_NoError(t *testing.T) { - cfg, _, close := startTestNode(t) - defer close() - - filepath := t.TempDir() + "/test.json" - - outputBuf, revertOutput := simulateConsoleOutput(t) - defer revertOutput() - - dbExportCmd := MakeBackupExportCommand(cfg) - err := dbExportCmd.RunE(dbExportCmd, []string{filepath}) - require.NoError(t, err) - - logLines, err := parseLines(outputBuf) - require.NoError(t, err) - require.True(t, lineHas(logLines, "msg", "Data exported for all collections")) - - b, err := os.ReadFile(filepath) - require.NoError(t, err) - - require.Len(t, b, 2) // file should be an empty json object -} - -func TestBackupExportCmd_WithInvalidCollection_ReturnError(t *testing.T) { - cfg, _, close := startTestNode(t) - defer close() - - filepath := t.TempDir() + "/test.json" - - outputBuf, revertOutput := simulateConsoleOutput(t) - defer revertOutput() - - dbExportCmd := MakeBackupExportCommand(cfg) - dbExportCmd.Flags().Set("collections", "User") - err := dbExportCmd.RunE(dbExportCmd, []string{filepath}) - require.NoError(t, err) - - logLines, err := parseLines(outputBuf) - require.NoError(t, err) - require.True(t, lineHas(logLines, "msg", "Failed to export data")) -} - -func TestBackupExportCmd_WithAllCollection_NoError(t *testing.T) { - ctx := context.Background() - - cfg, di, close := startTestNode(t) - defer close() - - _, err := di.db.AddSchema(ctx, `type User { - name: String - age: Int - }`) - require.NoError(t, err) - - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) - require.NoError(t, err) - - col, err := di.db.GetCollectionByName(ctx, "User") - require.NoError(t, err) - - err = col.Create(ctx, doc) - require.NoError(t, err) - - filepath := t.TempDir() + "/test.json" - - outputBuf, revertOutput := simulateConsoleOutput(t) - defer revertOutput() - - dbExportCmd := MakeBackupExportCommand(cfg) - err = dbExportCmd.RunE(dbExportCmd, []string{filepath}) - require.NoError(t, err) - - logLines, err := parseLines(outputBuf) - require.NoError(t, err) - require.True(t, lineHas(logLines, "msg", "Data exported for all collections")) - - b, err := os.ReadFile(filepath) - require.NoError(t, err) - - require.Equal( - t, - `{"User":[{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, - string(b), - ) -} - -func TestBackupExportCmd_WithAllCollectionAndPrettyFormating_NoError(t *testing.T) { - ctx := context.Background() - - cfg, di, close := startTestNode(t) - defer close() - - _, err := di.db.AddSchema(ctx, `type User { - name: String - age: Int - }`) - require.NoError(t, err) - - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) - require.NoError(t, err) - - col, err := di.db.GetCollectionByName(ctx, "User") - require.NoError(t, err) - - err = col.Create(ctx, doc) - require.NoError(t, err) - - filepath := t.TempDir() + "/test.json" - - outputBuf, revertOutput := simulateConsoleOutput(t) - defer revertOutput() - - dbExportCmd := MakeBackupExportCommand(cfg) - dbExportCmd.Flags().Set("pretty", "true") - err = dbExportCmd.RunE(dbExportCmd, []string{filepath}) - require.NoError(t, err) - - logLines, err := parseLines(outputBuf) - require.NoError(t, err) - require.True(t, lineHas(logLines, "msg", "Data exported for all collections")) - - b, err := os.ReadFile(filepath) - require.NoError(t, err) - - require.Equal( - t, - `{ - "User": [ - { - "_key": "bae-e933420a-988a-56f8-8952-6c245aebd519", - "_newKey": "bae-e933420a-988a-56f8-8952-6c245aebd519", - "age": 30, - "name": "John" - } - ] -}`, - string(b), - ) -} - -func TestBackupExportCmd_WithSingleCollection_NoError(t *testing.T) { - ctx := context.Background() - - cfg, di, close := startTestNode(t) - defer close() - - _, err := di.db.AddSchema(ctx, `type User { - name: String - age: Int - }`) - require.NoError(t, err) - - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) - require.NoError(t, err) - - col, err := di.db.GetCollectionByName(ctx, "User") - require.NoError(t, err) - - err = col.Create(ctx, doc) - require.NoError(t, err) - - filepath := t.TempDir() + "/test.json" - - outputBuf, revertOutput := simulateConsoleOutput(t) - defer revertOutput() - - dbExportCmd := MakeBackupExportCommand(cfg) - dbExportCmd.Flags().Set("collections", "User") - err = dbExportCmd.RunE(dbExportCmd, []string{filepath}) - require.NoError(t, err) - - logLines, err := parseLines(outputBuf) - require.NoError(t, err) - require.True(t, lineHas(logLines, "msg", "Data exported for collection User")) - - b, err := os.ReadFile(filepath) - require.NoError(t, err) - - require.Equal( - t, - `{"User":[{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, - string(b), - ) -} - -func TestBackupExportCmd_WithMultipleCollections_NoError(t *testing.T) { - ctx := context.Background() - - cfg, di, close := startTestNode(t) - defer close() - - _, err := di.db.AddSchema(ctx, `type User { - name: String - age: Int - } - - type Address { - street: String - city: String - }`) - require.NoError(t, err) - - doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) - require.NoError(t, err) - - col1, err := di.db.GetCollectionByName(ctx, "User") - require.NoError(t, err) - - err = col1.Create(ctx, doc1) - require.NoError(t, err) - - doc2, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`)) - require.NoError(t, err) - - col2, err := di.db.GetCollectionByName(ctx, "Address") - require.NoError(t, err) - - err = col2.Create(ctx, doc2) - require.NoError(t, err) - - filepath := t.TempDir() + "/test.json" - - outputBuf, revertOutput := simulateConsoleOutput(t) - defer revertOutput() - - dbExportCmd := MakeBackupExportCommand(cfg) - dbExportCmd.Flags().Set("collections", "User, Address") - err = dbExportCmd.RunE(dbExportCmd, []string{filepath}) - require.NoError(t, err) - - logLines, err := parseLines(outputBuf) - require.NoError(t, err) - require.True(t, lineHas(logLines, "msg", "Data exported for collections User, Address")) - - b, err := os.ReadFile(filepath) - require.NoError(t, err) - fileMap := map[string]any{} - err = json.Unmarshal(b, &fileMap) - require.NoError(t, err) - - expectedMap := map[string]any{} - data := []byte(`{"Address":[{"_key":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_newKey":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}],"User":[{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`) - err = json.Unmarshal(data, &expectedMap) - require.NoError(t, err) - - require.EqualValues(t, expectedMap, fileMap) -} diff --git a/cli/backup_import.go b/cli/backup_import.go index 6802230aa0..66023b9317 100644 --- a/cli/backup_import.go +++ b/cli/backup_import.go @@ -11,20 +11,13 @@ package cli import ( - "bytes" - "encoding/json" - "io" - "net/http" - "os" - "github.com/spf13/cobra" - httpapi "github.com/sourcenetwork/defradb/api/http" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/logging" ) -func MakeBackupImportCommand(cfg *config.Config) *cobra.Command { +func MakeBackupImportCommand(cfg *config.Config, db client.DB) *cobra.Command { var cmd = &cobra.Command{ Use: "import ", Short: "Import a JSON data file to the database", @@ -39,59 +32,7 @@ Example: import data to the database: return nil }, RunE: func(cmd *cobra.Command, args []string) (err error) { - endpoint, err := httpapi.JoinPaths(cfg.API.AddressToURL(), httpapi.ImportPath) - if err != nil { - return NewErrFailedToJoinEndpoint(err) - } - - inputPath := args[0] - data := map[string]string{ - "filepath": inputPath, - } - - b, err := json.Marshal(data) - if err != nil { - return err - } - - res, err := http.Post(endpoint.String(), "application/json", bytes.NewBuffer(b)) - if err != nil { - return NewErrFailedToSendRequest(err) - } - - defer func() { - if e := res.Body.Close(); e != nil { - err = NewErrFailedToCloseResponseBody(e, err) - } - }() - - response, err := io.ReadAll(res.Body) - if err != nil { - return NewErrFailedToReadResponseBody(err) - } - - stdout, err := os.Stdout.Stat() - if err != nil { - return err - } - - if isFileInfoPipe(stdout) { - cmd.Println(string(response)) - } else { - r := indexCreateResponse{} - err = json.Unmarshal(response, &r) - if err != nil { - return NewErrFailedToUnmarshalResponse(err) - } - if len(r.Errors) > 0 { - log.FeedbackError(cmd.Context(), "Failed to import data", - logging.NewKV("Errors", r.Errors)) - } else { - log.FeedbackInfo(cmd.Context(), "Successfully imported data from file", - logging.NewKV("File", inputPath)) - } - } - return nil + return db.BasicImport(cmd.Context(), args[0]) }, } return cmd diff --git a/cli/backup_import_test.go b/cli/backup_import_test.go deleted file mode 100644 index 101792dd0c..0000000000 --- a/cli/backup_import_test.go +++ /dev/null @@ -1,129 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package cli - -import ( - "context" - "os" - "testing" - - "github.com/stretchr/testify/require" - - "github.com/sourcenetwork/defradb/client" -) - -func TestBackupImportCmd_WithNoArgument_ReturnError(t *testing.T) { - cfg := getTestConfig(t) - - dbImportCmd := MakeBackupImportCommand(cfg) - err := dbImportCmd.ValidateArgs([]string{}) - require.ErrorIs(t, err, ErrInvalidArgumentLength) -} - -func TestBackupImportCmd_IfInvalidAddress_ReturnError(t *testing.T) { - cfg := getTestConfig(t) - cfg.API.Address = "invalid address" - - filepath := t.TempDir() + "/test.json" - - dbImportCmd := MakeBackupImportCommand(cfg) - err := dbImportCmd.RunE(dbImportCmd, []string{filepath}) - require.ErrorIs(t, err, NewErrFailedToJoinEndpoint(err)) -} - -func TestBackupImportCmd_WithNonExistantFile_ReturnError(t *testing.T) { - cfg, _, close := startTestNode(t) - defer close() - - filepath := t.TempDir() + "/test.json" - - outputBuf, revertOutput := simulateConsoleOutput(t) - defer revertOutput() - - dbImportCmd := MakeBackupImportCommand(cfg) - err := dbImportCmd.RunE(dbImportCmd, []string{filepath}) - require.NoError(t, err) - - logLines, err := parseLines(outputBuf) - require.NoError(t, err) - require.True(t, lineHas(logLines, "msg", "Failed to import data")) -} - -func TestBackupImportCmd_WithEmptyDatastore_ReturnError(t *testing.T) { - cfg, _, close := startTestNode(t) - defer close() - - filepath := t.TempDir() + "/test.json" - - err := os.WriteFile( - filepath, - []byte(`{"User":[{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`), - 0664, - ) - require.NoError(t, err) - - outputBuf, revertOutput := simulateConsoleOutput(t) - defer revertOutput() - - dbImportCmd := MakeBackupImportCommand(cfg) - err = dbImportCmd.RunE(dbImportCmd, []string{filepath}) - require.NoError(t, err) - - logLines, err := parseLines(outputBuf) - require.NoError(t, err) - require.True(t, lineHas(logLines, "msg", "Failed to import data")) -} - -func TestBackupImportCmd_WithExistingCollection_NoError(t *testing.T) { - ctx := context.Background() - - cfg, di, close := startTestNode(t) - defer close() - - _, err := di.db.AddSchema(ctx, `type User { - name: String - age: Int - }`) - require.NoError(t, err) - - filepath := t.TempDir() + "/test.json" - - err = os.WriteFile( - filepath, - []byte(`{"User":[{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`), - 0664, - ) - require.NoError(t, err) - - outputBuf, revertOutput := simulateConsoleOutput(t) - defer revertOutput() - - dbImportCmd := MakeBackupImportCommand(cfg) - err = dbImportCmd.RunE(dbImportCmd, []string{filepath}) - require.NoError(t, err) - - logLines, err := parseLines(outputBuf) - require.NoError(t, err) - require.True(t, lineHas(logLines, "msg", "Successfully imported data from file")) - - col, err := di.db.GetCollectionByName(ctx, "User") - require.NoError(t, err) - - key, err := client.NewDocKeyFromString("bae-e933420a-988a-56f8-8952-6c245aebd519") - require.NoError(t, err) - doc, err := col.Get(ctx, key, false) - require.NoError(t, err) - - val, err := doc.Get("name") - require.NoError(t, err) - - require.Equal(t, "John", val.(string)) -} diff --git a/cli/blocks_get.go b/cli/blocks_get.go deleted file mode 100644 index c3519f99e7..0000000000 --- a/cli/blocks_get.go +++ /dev/null @@ -1,80 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package cli - -import ( - "io" - "net/http" - "os" - - "github.com/spf13/cobra" - - httpapi "github.com/sourcenetwork/defradb/api/http" - "github.com/sourcenetwork/defradb/config" -) - -func MakeBlocksGetCommand(cfg *config.Config) *cobra.Command { - var cmd = &cobra.Command{ - Use: "get [CID]", - Short: "Get a block by its CID from the blockstore", - RunE: func(cmd *cobra.Command, args []string) (err error) { - if len(args) != 1 { - return NewErrMissingArg("CID") - } - cid := args[0] - - endpoint, err := httpapi.JoinPaths(cfg.API.AddressToURL(), httpapi.BlocksPath, cid) - if err != nil { - return NewErrFailedToJoinEndpoint(err) - } - - res, err := http.Get(endpoint.String()) - if err != nil { - return NewErrFailedToSendRequest(err) - } - - defer func() { - if e := res.Body.Close(); e != nil { - err = NewErrFailedToReadResponseBody(err) - } - }() - - response, err := io.ReadAll(res.Body) - if err != nil { - return NewErrFailedToReadResponseBody(err) - } - - stdout, err := os.Stdout.Stat() - if err != nil { - return NewErrFailedToStatStdOut(err) - } - if isFileInfoPipe(stdout) { - cmd.Println(string(response)) - } else { - graphlErr, err := hasGraphQLErrors(response) - if err != nil { - return NewErrFailedToHandleGQLErrors(err) - } - indentedResult, err := indentJSON(response) - if err != nil { - return NewErrFailedToPrettyPrintResponse(err) - } - if graphlErr { - log.FeedbackError(cmd.Context(), indentedResult) - } else { - log.FeedbackInfo(cmd.Context(), indentedResult) - } - } - return nil - }, - } - return cmd -} diff --git a/cli/cli.go b/cli/cli.go index 707adbab7c..5785e61b48 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -14,176 +14,78 @@ Package cli provides the command-line interface. package cli import ( - "bufio" - "bytes" - "context" - "encoding/json" - "os" - "strings" - "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/errors" + "github.com/sourcenetwork/defradb/http" "github.com/sourcenetwork/defradb/logging" ) var log = logging.MustNewLogger("cli") -const badgerDatastoreName = "badger" - -// Errors with how the command is invoked by user -var usageErrors = []string{ - // cobra errors - subject to change with new versions of cobra - "flag needs an argument", - "invalid syntax", - "unknown flag", - "unknown shorthand flag", - "unknown command", - // custom defradb errors - errMissingArg, - errMissingArgs, - errTooManyArgs, -} - -type DefraCommand struct { - RootCmd *cobra.Command - Cfg *config.Config -} - // NewDefraCommand returns the root command instanciated with its tree of subcommands. -func NewDefraCommand(cfg *config.Config) DefraCommand { +func NewDefraCommand(cfg *config.Config) (*cobra.Command, error) { + db, err := http.NewClient("http://" + cfg.API.Address) + if err != nil { + return nil, err + } + rootCmd := MakeRootCommand(cfg) - rpcCmd := MakeRPCCommand(cfg) - blocksCmd := MakeBlocksCommand() + p2pCmd := MakeP2PCommand(cfg) schemaCmd := MakeSchemaCommand() schemaMigrationCmd := MakeSchemaMigrationCommand() indexCmd := MakeIndexCommand() clientCmd := MakeClientCommand() backupCmd := MakeBackupCommand() - rpcReplicatorCmd := MakeReplicatorCommand() + p2pReplicatorCmd := MakeP2PReplicatorCommand() p2pCollectionCmd := MakeP2PCollectionCommand() p2pCollectionCmd.AddCommand( - MakeP2PCollectionAddCommand(cfg), - MakeP2PCollectionRemoveCommand(cfg), - MakeP2PCollectionGetallCommand(cfg), + MakeP2PCollectionAddCommand(cfg, db), + MakeP2PCollectionRemoveCommand(cfg, db), + MakeP2PCollectionGetallCommand(cfg, db), ) - rpcReplicatorCmd.AddCommand( - MakeReplicatorGetallCommand(cfg), - MakeReplicatorSetCommand(cfg), - MakeReplicatorDeleteCommand(cfg), + p2pReplicatorCmd.AddCommand( + MakeP2PReplicatorGetallCommand(cfg, db), + MakeP2PReplicatorSetCommand(cfg, db), + MakeP2PReplicatorDeleteCommand(cfg, db), ) - rpcCmd.AddCommand( - rpcReplicatorCmd, + p2pCmd.AddCommand( + p2pReplicatorCmd, p2pCollectionCmd, ) - blocksCmd.AddCommand( - MakeBlocksGetCommand(cfg), - ) schemaMigrationCmd.AddCommand( - MakeSchemaMigrationSetCommand(cfg), - MakeSchemaMigrationGetCommand(cfg), + MakeSchemaMigrationSetCommand(cfg, db), + MakeSchemaMigrationGetCommand(cfg, db), ) schemaCmd.AddCommand( - MakeSchemaAddCommand(cfg), - MakeSchemaListCommand(cfg), - MakeSchemaPatchCommand(cfg), + MakeSchemaAddCommand(cfg, db), + MakeSchemaPatchCommand(cfg, db), schemaMigrationCmd, ) indexCmd.AddCommand( - MakeIndexCreateCommand(cfg), - MakeIndexDropCommand(cfg), - MakeIndexListCommand(cfg), + MakeIndexCreateCommand(cfg, db), + MakeIndexDropCommand(cfg, db), + MakeIndexListCommand(cfg, db), ) backupCmd.AddCommand( - MakeBackupExportCommand(cfg), - MakeBackupImportCommand(cfg), + MakeBackupExportCommand(cfg, db), + MakeBackupImportCommand(cfg, db), ) clientCmd.AddCommand( - MakeDumpCommand(cfg), - MakePingCommand(cfg), - MakeRequestCommand(cfg), - MakePeerIDCommand(cfg), + MakeDumpCommand(cfg, db), + MakeRequestCommand(cfg, db), schemaCmd, indexCmd, - rpcCmd, - blocksCmd, + p2pCmd, backupCmd, ) rootCmd.AddCommand( clientCmd, MakeStartCommand(cfg), - MakeServerDumpCmd(cfg), + MakeServerDumpCmd(cfg, db), MakeVersionCommand(), MakeInitCommand(cfg), ) - return DefraCommand{rootCmd, cfg} -} - -func (defraCmd *DefraCommand) Execute(ctx context.Context) error { - // Silence cobra's default output to control usage and error display. - defraCmd.RootCmd.SilenceUsage = true - defraCmd.RootCmd.SilenceErrors = true - defraCmd.RootCmd.SetOut(os.Stdout) - cmd, err := defraCmd.RootCmd.ExecuteContextC(ctx) - if err != nil { - // Intentional cancellation. - if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) { - return nil - } - // User error. - for _, cobraError := range usageErrors { - if strings.HasPrefix(err.Error(), cobraError) { - log.FeedbackErrorE(ctx, "Usage error", err) - if usageErr := cmd.Usage(); usageErr != nil { - log.FeedbackFatalE(ctx, "error displaying usage help", usageErr) - } - return err - } - } - // Internal error. - log.FeedbackErrorE(ctx, "Execution error", err) - return err - } - return nil -} - -func isFileInfoPipe(fi os.FileInfo) bool { - return fi.Mode()&os.ModeNamedPipe != 0 -} - -func readStdin() (string, error) { - var s strings.Builder - scanner := bufio.NewScanner(os.Stdin) - for scanner.Scan() { - s.Write(scanner.Bytes()) - } - if err := scanner.Err(); err != nil { - return "", errors.Wrap("reading standard input", err) - } - return s.String(), nil -} - -func indentJSON(b []byte) (string, error) { - var indentedJSON bytes.Buffer - err := json.Indent(&indentedJSON, b, "", " ") - return indentedJSON.String(), err -} - -type graphqlErrors struct { - Errors any `json:"errors"` -} - -func hasGraphQLErrors(buf []byte) (bool, error) { - errs := graphqlErrors{} - err := json.Unmarshal(buf, &errs) - if err != nil { - return false, errors.Wrap("couldn't parse GraphQL response %w", err) - } - if errs.Errors != nil { - return true, nil - } else { - return false, nil - } + return rootCmd, nil } diff --git a/cli/cli_test.go b/cli/cli_test.go deleted file mode 100644 index 877dd7b69f..0000000000 --- a/cli/cli_test.go +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package cli - -import ( - "testing" - - "github.com/spf13/cobra" - "github.com/stretchr/testify/assert" - - "github.com/sourcenetwork/defradb/config" -) - -// Verify that the top-level commands are registered, and if particular ones have subcommands. -func TestNewDefraCommand(t *testing.T) { - expectedCommandNames := []string{ - "client", - "init", - "server-dump", - "start", - "version", - } - actualCommandNames := []string{} - r := NewDefraCommand(config.DefaultConfig()) - for _, c := range r.RootCmd.Commands() { - actualCommandNames = append(actualCommandNames, c.Name()) - } - for _, expectedCommandName := range expectedCommandNames { - assert.Contains(t, actualCommandNames, expectedCommandName) - } - for _, c := range r.RootCmd.Commands() { - if c.Name() == "client" { - assert.NotEmpty(t, c.Commands()) - } - } -} - -func TestAllHaveUsage(t *testing.T) { - cfg := config.DefaultConfig() - defra := NewDefraCommand(cfg) - walkCommandTree(t, defra.RootCmd, func(c *cobra.Command) { - assert.NotEmpty(t, c.Use) - }) -} - -func walkCommandTree(t *testing.T, cmd *cobra.Command, f func(*cobra.Command)) { - f(cmd) - for _, c := range cmd.Commands() { - walkCommandTree(t, c, f) - } -} diff --git a/cli/dump.go b/cli/dump.go index f35e9232b1..c1b6433ef2 100644 --- a/cli/dump.go +++ b/cli/dump.go @@ -11,69 +11,18 @@ package cli import ( - "encoding/json" - "io" - "net/http" - "os" - "github.com/spf13/cobra" - httpapi "github.com/sourcenetwork/defradb/api/http" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/errors" ) -func MakeDumpCommand(cfg *config.Config) *cobra.Command { +func MakeDumpCommand(cfg *config.Config, db client.DB) *cobra.Command { var cmd = &cobra.Command{ Use: "dump", Short: "Dump the contents of DefraDB node-side", RunE: func(cmd *cobra.Command, _ []string) (err error) { - stdout, err := os.Stdout.Stat() - if err != nil { - return errors.Wrap("failed to stat stdout", err) - } - if !isFileInfoPipe(stdout) { - log.FeedbackInfo(cmd.Context(), "Requesting the database to dump its state, server-side...") - } - - endpoint, err := httpapi.JoinPaths(cfg.API.AddressToURL(), httpapi.DumpPath) - if err != nil { - return errors.Wrap("failed to join endpoint", err) - } - - res, err := http.Get(endpoint.String()) - if err != nil { - return errors.Wrap("failed dump request", err) - } - - defer func() { - if e := res.Body.Close(); e != nil { - err = NewErrFailedToCloseResponseBody(e, err) - } - }() - - response, err := io.ReadAll(res.Body) - if err != nil { - return errors.Wrap("failed to read response body", err) - } - - if isFileInfoPipe(stdout) { - cmd.Println(string(response)) - } else { - // dumpResponse follows structure of HTTP API's response - type dumpResponse struct { - Data struct { - Response string `json:"response"` - } `json:"data"` - } - r := dumpResponse{} - err = json.Unmarshal(response, &r) - if err != nil { - return errors.Wrap("failed parsing of response", err) - } - log.FeedbackInfo(cmd.Context(), r.Data.Response) - } - return nil + return db.PrintDump(cmd.Context()) }, } return cmd diff --git a/cli/index_create.go b/cli/index_create.go index a91a76d2d0..aa024c128d 100644 --- a/cli/index_create.go +++ b/cli/index_create.go @@ -11,33 +11,18 @@ package cli import ( - "bytes" "encoding/json" - "io" - "net/http" - "os" "github.com/spf13/cobra" - httpapi "github.com/sourcenetwork/defradb/api/http" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/logging" ) -type indexCreateResponse struct { - Data struct { - Index client.IndexDescription `json:"index"` - } `json:"data"` - Errors []struct { - Message string `json:"message"` - } `json:"errors"` -} - -func MakeIndexCreateCommand(cfg *config.Config) *cobra.Command { +func MakeIndexCreateCommand(cfg *config.Config, db client.DB) *cobra.Command { var collectionArg string var nameArg string - var fieldsArg string + var fieldsArg []string var cmd = &cobra.Command{ Use: "create -c --collection --fields [-n --name ]", Short: "Creates a secondary index on a collection's field(s)", @@ -51,75 +36,29 @@ Example: create an index for 'Users' collection on 'name' field: Example: create a named index for 'Users' collection on 'name' field: defradb client index create --collection Users --fields name --name UsersByName`, ValidArgs: []string{"collection", "fields", "name"}, - RunE: func(cmd *cobra.Command, args []string) (err error) { - if collectionArg == "" || fieldsArg == "" { - if collectionArg == "" { - return NewErrMissingArg("collection") - } else { - return NewErrMissingArg("fields") - } + RunE: func(cmd *cobra.Command, args []string) error { + var fields []client.IndexedFieldDescription + for _, name := range fieldsArg { + fields = append(fields, client.IndexedFieldDescription{Name: name}) } - - endpoint, err := httpapi.JoinPaths(cfg.API.AddressToURL(), httpapi.IndexPath) - if err != nil { - return NewErrFailedToJoinEndpoint(err) + desc := client.IndexDescription{ + Name: nameArg, + Fields: fields, } - - data := map[string]string{ - "collection": collectionArg, - "fields": fieldsArg, - } - if nameArg != "" { - data["name"] = nameArg - } - - jsonData, err := json.Marshal(data) + col, err := db.GetCollectionByName(cmd.Context(), collectionArg) if err != nil { return err } - - res, err := http.Post(endpoint.String(), "application/json", bytes.NewBuffer(jsonData)) - if err != nil { - return NewErrFailedToSendRequest(err) - } - defer func() { - if e := res.Body.Close(); e != nil { - err = NewErrFailedToCloseResponseBody(e, err) - } - }() - - response, err := io.ReadAll(res.Body) - if err != nil { - return NewErrFailedToReadResponseBody(err) - } - - stdout, err := os.Stdout.Stat() + desc, err = col.CreateIndex(cmd.Context(), desc) if err != nil { return err } - - if isFileInfoPipe(stdout) { - cmd.Println(string(response)) - } else { - r := indexCreateResponse{} - err = json.Unmarshal(response, &r) - if err != nil { - return NewErrFailedToUnmarshalResponse(err) - } - if len(r.Errors) > 0 { - log.FeedbackError(cmd.Context(), "Failed to create index", - logging.NewKV("Errors", r.Errors)) - } else { - log.FeedbackInfo(cmd.Context(), "Successfully created index", - logging.NewKV("Index", r.Data.Index)) - } - } - return nil + return json.NewEncoder(cmd.OutOrStdout()).Encode(desc) }, } cmd.Flags().StringVarP(&collectionArg, "collection", "c", "", "Collection name") cmd.Flags().StringVarP(&nameArg, "name", "n", "", "Index name") - cmd.Flags().StringVar(&fieldsArg, "fields", "", "Fields to index") + cmd.Flags().StringSliceVar(&fieldsArg, "fields", []string{}, "Fields to index") return cmd } diff --git a/cli/index_create_test.go b/cli/index_create_test.go deleted file mode 100644 index ac75248c10..0000000000 --- a/cli/index_create_test.go +++ /dev/null @@ -1,244 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package cli - -import ( - "bufio" - "bytes" - "context" - "encoding/json" - "io" - "os" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/logging" -) - -const randomMultiaddr = "/ip4/0.0.0.0/tcp/0" - -func getTestConfig(t *testing.T) *config.Config { - cfg := config.DefaultConfig() - dir := t.TempDir() - cfg.Datastore.Store = "memory" - cfg.Datastore.Badger.Path = dir - cfg.Net.P2PDisabled = false - cfg.Net.P2PAddress = randomMultiaddr - cfg.Net.RPCAddress = "0.0.0.0:0" - cfg.Net.TCPAddress = randomMultiaddr - cfg.API.Address = "0.0.0.0:0" - return cfg -} - -func startTestNode(t *testing.T) (*config.Config, *defraInstance, func()) { - cfg := getTestConfig(t) - - ctx := context.Background() - di, err := start(ctx, cfg) - require.NoError(t, err) - return cfg, di, func() { di.close(ctx) } -} - -func parseLines(r io.Reader) ([]map[string]any, error) { - fileScanner := bufio.NewScanner(r) - - fileScanner.Split(bufio.ScanLines) - - logLines := []map[string]any{} - for fileScanner.Scan() { - loggedLine := make(map[string]any) - err := json.Unmarshal(fileScanner.Bytes(), &loggedLine) - if err != nil { - return nil, err - } - logLines = append(logLines, loggedLine) - } - - return logLines, nil -} - -func lineHas(lines []map[string]any, key, value string) bool { - for _, line := range lines { - if line[key] == value { - return true - } - } - return false -} - -func simulateConsoleOutput(t *testing.T) (*bytes.Buffer, func()) { - b := &bytes.Buffer{} - log.ApplyConfig(logging.Config{ - EncoderFormat: logging.NewEncoderFormatOption(logging.JSON), - Pipe: b, - }) - - f, err := os.CreateTemp(t.TempDir(), "tmpFile") - require.NoError(t, err) - originalStdout := os.Stdout - os.Stdout = f - - return b, func() { - os.Stdout = originalStdout - f.Close() - os.Remove(f.Name()) - } -} - -func execAddSchemaCmd(t *testing.T, cfg *config.Config, schema string) { - addSchemaCmd := MakeSchemaAddCommand(cfg) - err := addSchemaCmd.RunE(addSchemaCmd, []string{schema}) - require.NoError(t, err) -} - -func execCreateIndexCmd(t *testing.T, cfg *config.Config, collection, fields, name string) { - indexCreateCmd := MakeIndexCreateCommand(cfg) - indexCreateCmd.SetArgs([]string{ - "--collection", collection, - "--fields", fields, - "--name", name, - }) - err := indexCreateCmd.Execute() - require.NoError(t, err) -} - -func hasLogWithKey(logLines []map[string]any, key string) bool { - for _, logLine := range logLines { - if _, ok := logLine[key]; ok { - return true - } - } - return false -} - -func TestIndexCreateCmd_IfInvalidAddress_ReturnError(t *testing.T) { - cfg := getTestConfig(t) - cfg.API.Address = "invalid address" - indexCreateCmd := MakeIndexCreateCommand(cfg) - - indexCreateCmd.SetArgs([]string{ - "--collection", "User", - "--fields", "Name", - "--name", "users_name_index", - }) - err := indexCreateCmd.Execute() - require.ErrorIs(t, err, NewErrFailedToJoinEndpoint(err)) -} - -func TestIndexCreateCmd_IfNoCollection_ReturnError(t *testing.T) { - cfg, _, close := startTestNode(t) - defer close() - indexCreateCmd := MakeIndexCreateCommand(cfg) - - outputBuf := bytes.NewBufferString("") - indexCreateCmd.SetOut(outputBuf) - - indexCreateCmd.SetArgs([]string{ - "--collection", "User", - "--fields", "Name", - "--name", "users_name_index", - }) - err := indexCreateCmd.Execute() - require.NoError(t, err) - - out, err := io.ReadAll(outputBuf) - require.NoError(t, err) - - r := make(map[string]any) - err = json.Unmarshal(out, &r) - require.NoError(t, err) - - _, hasErrors := r["errors"] - assert.True(t, hasErrors, "command should return error") -} - -func TestIndexCreateCmd_IfNoErrors_ReturnData(t *testing.T) { - cfg, _, close := startTestNode(t) - defer close() - - execAddSchemaCmd(t, cfg, `type User { name: String }`) - - indexCreateCmd := MakeIndexCreateCommand(cfg) - outputBuf := bytes.NewBufferString("") - indexCreateCmd.SetOut(outputBuf) - - indexCreateCmd.SetArgs([]string{ - "--collection", "User", - "--fields", "name", - "--name", "users_name_index", - }) - err := indexCreateCmd.Execute() - require.NoError(t, err) - - out, err := io.ReadAll(outputBuf) - require.NoError(t, err) - - r := make(map[string]any) - err = json.Unmarshal(out, &r) - require.NoError(t, err) - - _, hasData := r["data"] - assert.True(t, hasData, "command should return data") -} - -func TestIndexCreateCmd_WithConsoleOutputIfNoCollection_ReturnError(t *testing.T) { - cfg, _, close := startTestNode(t) - defer close() - indexCreateCmd := MakeIndexCreateCommand(cfg) - indexCreateCmd.SetArgs([]string{ - "--collection", "User", - "--fields", "Name", - "--name", "users_name_index", - }) - - outputBuf, revertOutput := simulateConsoleOutput(t) - defer revertOutput() - - err := indexCreateCmd.Execute() - require.NoError(t, err) - - logLines, err := parseLines(outputBuf) - require.NoError(t, err) - assert.True(t, hasLogWithKey(logLines, "Errors")) -} - -func TestIndexCreateCmd_WithConsoleOutputIfNoErrors_ReturnData(t *testing.T) { - cfg, _, close := startTestNode(t) - defer close() - - execAddSchemaCmd(t, cfg, `type User { name: String }`) - - const indexName = "users_name_index" - indexCreateCmd := MakeIndexCreateCommand(cfg) - indexCreateCmd.SetArgs([]string{ - "--collection", "User", - "--fields", "name", - "--name", indexName, - }) - - outputBuf, revertOutput := simulateConsoleOutput(t) - defer revertOutput() - - err := indexCreateCmd.Execute() - require.NoError(t, err) - - logLines, err := parseLines(outputBuf) - require.NoError(t, err) - require.Len(t, logLines, 1) - result, ok := logLines[0]["Index"].(map[string]any) - require.True(t, ok) - assert.Equal(t, indexName, result["Name"]) - - assert.False(t, hasLogWithKey(logLines, "Errors")) -} diff --git a/cli/index_drop.go b/cli/index_drop.go index ef0a37db0c..c5e4c80d87 100644 --- a/cli/index_drop.go +++ b/cli/index_drop.go @@ -11,29 +11,13 @@ package cli import ( - "bytes" - "encoding/json" - "io" - "net/http" - "os" - "github.com/spf13/cobra" - httpapi "github.com/sourcenetwork/defradb/api/http" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/logging" ) -type indexDropResponse struct { - Data struct { - Result string `json:"result"` - } `json:"data"` - Errors []struct { - Message string `json:"message"` - } `json:"errors"` -} - -func MakeIndexDropCommand(cfg *config.Config) *cobra.Command { +func MakeIndexDropCommand(cfg *config.Config, db client.DB) *cobra.Command { var collectionArg string var nameArg string var cmd = &cobra.Command{ @@ -44,74 +28,12 @@ func MakeIndexDropCommand(cfg *config.Config) *cobra.Command { Example: drop the index 'UsersByName' for 'Users' collection: defradb client index create --collection Users --name UsersByName`, ValidArgs: []string{"collection", "name"}, - RunE: func(cmd *cobra.Command, args []string) (err error) { - if collectionArg == "" || nameArg == "" { - if collectionArg == "" { - return NewErrMissingArg("collection") - } else { - return NewErrMissingArg("name") - } - } - - endpoint, err := httpapi.JoinPaths(cfg.API.AddressToURL(), httpapi.IndexPath) - if err != nil { - return NewErrFailedToJoinEndpoint(err) - } - - data := map[string]string{ - "collection": collectionArg, - "name": nameArg, - } - - jsonData, err := json.Marshal(data) + RunE: func(cmd *cobra.Command, args []string) error { + col, err := db.GetCollectionByName(cmd.Context(), collectionArg) if err != nil { return err } - - req, err := http.NewRequest("DELETE", endpoint.String(), bytes.NewBuffer(jsonData)) - if err != nil { - return NewErrFailedToSendRequest(err) - } - req.Header.Add("Content-Type", "application/json") - client := &http.Client{} - res, err := client.Do(req) - if err != nil { - return NewErrFailedToSendRequest(err) - } - - defer func() { - if e := res.Body.Close(); e != nil { - err = NewErrFailedToCloseResponseBody(e, err) - } - }() - - response, err := io.ReadAll(res.Body) - if err != nil { - return NewErrFailedToReadResponseBody(err) - } - - stdout, err := os.Stdout.Stat() - if err != nil { - return err - } - - if isFileInfoPipe(stdout) { - cmd.Println(string(response)) - } else { - r := indexDropResponse{} - err = json.Unmarshal(response, &r) - if err != nil { - return NewErrFailedToUnmarshalResponse(err) - } - if len(r.Errors) > 0 { - log.FeedbackError(cmd.Context(), "Failed to drop index", - logging.NewKV("Errors", r.Errors)) - } else { - log.FeedbackInfo(cmd.Context(), "Successfully dropped index", - logging.NewKV("Result", r.Data.Result)) - } - } - return nil + return col.DropIndex(cmd.Context(), nameArg) }, } cmd.Flags().StringVarP(&collectionArg, "collection", "c", "", "Collection name") diff --git a/cli/index_drop_test.go b/cli/index_drop_test.go deleted file mode 100644 index 7fa368a458..0000000000 --- a/cli/index_drop_test.go +++ /dev/null @@ -1,121 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package cli - -import ( - "bytes" - "encoding/json" - "io" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestIndexDropCmd_IfInvalidAddress_ReturnError(t *testing.T) { - cfg := getTestConfig(t) - cfg.API.Address = "invalid address" - indexDropCmd := MakeIndexDropCommand(cfg) - - indexDropCmd.SetArgs([]string{"--collection", "User", "--name", "users_name_index"}) - err := indexDropCmd.Execute() - require.ErrorIs(t, err, NewErrFailedToJoinEndpoint(err)) -} - -func TestIndexDropCmd_IfNoCollection_ReturnError(t *testing.T) { - cfg, _, close := startTestNode(t) - defer close() - indexDropCmd := MakeIndexDropCommand(cfg) - - outputBuf := bytes.NewBufferString("") - indexDropCmd.SetOut(outputBuf) - - indexDropCmd.SetArgs([]string{"--collection", "User", "--name", "users_name_index"}) - err := indexDropCmd.Execute() - require.NoError(t, err) - - out, err := io.ReadAll(outputBuf) - require.NoError(t, err) - - r := make(map[string]any) - err = json.Unmarshal(out, &r) - require.NoError(t, err) - - _, hasErrors := r["errors"] - assert.True(t, hasErrors, "command should return error") -} - -func TestIndexDropCmd_IfNoErrors_ShouldReturnData(t *testing.T) { - cfg, _, close := startTestNode(t) - defer close() - - execAddSchemaCmd(t, cfg, `type User { name: String }`) - execCreateIndexCmd(t, cfg, "User", "name", "users_name_index") - - indexDropCmd := MakeIndexDropCommand(cfg) - outputBuf := bytes.NewBufferString("") - indexDropCmd.SetOut(outputBuf) - - indexDropCmd.SetArgs([]string{"--collection", "User", "--name", "users_name_index"}) - err := indexDropCmd.Execute() - require.NoError(t, err) - - out, err := io.ReadAll(outputBuf) - require.NoError(t, err) - - r := make(map[string]any) - err = json.Unmarshal(out, &r) - require.NoError(t, err) - - _, hasData := r["data"] - assert.True(t, hasData, "command should return data") -} - -func TestIndexDropCmd_WithConsoleOutputIfNoCollection_ReturnError(t *testing.T) { - cfg, _, close := startTestNode(t) - defer close() - indexDropCmd := MakeIndexDropCommand(cfg) - - outputBuf, revertOutput := simulateConsoleOutput(t) - defer revertOutput() - - indexDropCmd.SetArgs([]string{"--collection", "User", "--name", "users_name_index"}) - err := indexDropCmd.Execute() - require.NoError(t, err) - - logLines, err := parseLines(outputBuf) - require.NoError(t, err) - assert.True(t, hasLogWithKey(logLines, "Errors")) -} - -func TestIndexDropCmd_WithConsoleOutputIfNoErrors_ShouldReturnData(t *testing.T) { - cfg, _, close := startTestNode(t) - defer close() - - execAddSchemaCmd(t, cfg, `type User { name: String }`) - execCreateIndexCmd(t, cfg, "User", "name", "users_name_index") - - indexDropCmd := MakeIndexDropCommand(cfg) - indexDropCmd.SetArgs([]string{"--collection", "User", "--name", "users_name_index"}) - - outputBuf, revertOutput := simulateConsoleOutput(t) - defer revertOutput() - - err := indexDropCmd.Execute() - require.NoError(t, err) - - logLines, err := parseLines(outputBuf) - require.NoError(t, err) - require.Len(t, logLines, 1) - assert.Equal(t, "success", logLines[0]["Result"]) - - assert.False(t, hasLogWithKey(logLines, "Errors")) -} diff --git a/cli/index_list.go b/cli/index_list.go index 131782cfe5..a7608f31f5 100644 --- a/cli/index_list.go +++ b/cli/index_list.go @@ -12,30 +12,14 @@ package cli import ( "encoding/json" - "io" - "net/http" - "net/url" - "os" "github.com/spf13/cobra" - httpapi "github.com/sourcenetwork/defradb/api/http" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/logging" ) -type indexListResponse struct { - Data struct { - Collections map[string][]client.IndexDescription `json:"collections"` - Indexes []client.IndexDescription `json:"indexes"` - } `json:"data"` - Errors []struct { - Message string `json:"message"` - } `json:"errors"` -} - -func MakeIndexListCommand(cfg *config.Config) *cobra.Command { +func MakeIndexListCommand(cfg *config.Config, db client.DB) *cobra.Command { var collectionArg string var cmd = &cobra.Command{ Use: "list [-c --collection ]", @@ -48,60 +32,25 @@ Otherwise, all indexes in the database will be shown. Example: show all index for 'Users' collection: defradb client index list --collection Users`, ValidArgs: []string{"collection"}, - RunE: func(cmd *cobra.Command, args []string) (err error) { - endpoint, err := httpapi.JoinPaths(cfg.API.AddressToURL(), httpapi.IndexPath) - if err != nil { - return NewErrFailedToJoinEndpoint(err) - } - - if collectionArg != "" { - values := url.Values{ - "collection": {collectionArg}, - } - endpoint.RawQuery = values.Encode() - } - - res, err := http.Get(endpoint.String()) - if err != nil { - return NewErrFailedToSendRequest(err) - } - - defer func() { - if e := res.Body.Close(); e != nil { - err = NewErrFailedToCloseResponseBody(e, err) + RunE: func(cmd *cobra.Command, args []string) error { + switch { + case collectionArg != "": + col, err := db.GetCollectionByName(cmd.Context(), collectionArg) + if err != nil { + return err } - }() - - response, err := io.ReadAll(res.Body) - if err != nil { - return NewErrFailedToReadResponseBody(err) - } - - stdout, err := os.Stdout.Stat() - if err != nil { - return err - } - - if isFileInfoPipe(stdout) { - cmd.Println(string(response)) - } else { - r := indexListResponse{} - err = json.Unmarshal(response, &r) + cols, err := col.GetIndexes(cmd.Context()) if err != nil { - return NewErrFailedToUnmarshalResponse(err) + return err } - if len(r.Errors) > 0 { - log.FeedbackError(cmd.Context(), "Failed to list index", - logging.NewKV("Errors", r.Errors)) - } else if collectionArg != "" { - log.FeedbackInfo(cmd.Context(), "Fetched indexes for collection "+collectionArg, - logging.NewKV("Indexes", r.Data.Indexes)) - } else { - log.FeedbackInfo(cmd.Context(), "Fetched all indexes", - logging.NewKV("Collections", r.Data.Collections)) + return json.NewEncoder(cmd.OutOrStdout()).Encode(cols) + default: + cols, err := db.GetAllIndexes(cmd.Context()) + if err != nil { + return err } + return json.NewEncoder(cmd.OutOrStdout()).Encode(cols) } - return nil }, } cmd.Flags().StringVarP(&collectionArg, "collection", "c", "", "Collection name") diff --git a/cli/index_list_test.go b/cli/index_list_test.go deleted file mode 100644 index 548d2af040..0000000000 --- a/cli/index_list_test.go +++ /dev/null @@ -1,145 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package cli - -import ( - "bytes" - "encoding/json" - "io" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestIndexListCmd_IfInvalidAddress_ReturnError(t *testing.T) { - cfg := getTestConfig(t) - cfg.API.Address = "invalid address" - indexCreateCmd := MakeIndexListCommand(cfg) - - err := indexCreateCmd.RunE(indexCreateCmd, nil) - require.ErrorIs(t, err, NewErrFailedToJoinEndpoint(err)) -} - -func TestIndexListCmd_IfNoErrors_ShouldReturnData(t *testing.T) { - cfg, _, close := startTestNode(t) - defer close() - - execAddSchemaCmd(t, cfg, `type User { name: String }`) - execCreateIndexCmd(t, cfg, "User", "name", "users_name_index") - - indexListCmd := MakeIndexListCommand(cfg) - outputBuf := bytes.NewBufferString("") - indexListCmd.SetOut(outputBuf) - - err := indexListCmd.Execute() - require.NoError(t, err) - - out, err := io.ReadAll(outputBuf) - require.NoError(t, err) - - r := make(map[string]any) - err = json.Unmarshal(out, &r) - require.NoError(t, err) - - _, hasData := r["data"] - assert.True(t, hasData, "command should return data") -} - -func TestIndexListCmd_WithConsoleOutputIfCollectionDoesNotExist_ReturnError(t *testing.T) { - cfg, _, close := startTestNode(t) - defer close() - - indexListCmd := MakeIndexListCommand(cfg) - indexListCmd.SetArgs([]string{"--collection", "User"}) - - outputBuf, revertOutput := simulateConsoleOutput(t) - defer revertOutput() - - err := indexListCmd.Execute() - require.NoError(t, err) - - logLines, err := parseLines(outputBuf) - require.NoError(t, err) - require.True(t, hasLogWithKey(logLines, "Errors")) -} - -func TestIndexListCmd_WithConsoleOutputIfCollectionIsGiven_ReturnCollectionList(t *testing.T) { - cfg, _, close := startTestNode(t) - defer close() - - const indexName = "users_name_index" - execAddSchemaCmd(t, cfg, `type User { name: String }`) - execCreateIndexCmd(t, cfg, "User", "name", indexName) - - indexListCmd := MakeIndexListCommand(cfg) - indexListCmd.SetArgs([]string{"--collection", "User"}) - - outputBuf, revertOutput := simulateConsoleOutput(t) - defer revertOutput() - - err := indexListCmd.Execute() - require.NoError(t, err) - - logLines, err := parseLines(outputBuf) - require.NoError(t, err) - require.Len(t, logLines, 1) - resultList, ok := logLines[0]["Indexes"].([]any) - require.True(t, ok) - require.Len(t, resultList, 1) - result, ok := resultList[0].(map[string]any) - require.True(t, ok) - assert.Equal(t, indexName, result["Name"]) - - assert.False(t, hasLogWithKey(logLines, "Errors")) -} - -func TestIndexListCmd_WithConsoleOutputIfNoArgs_ReturnAllIndexes(t *testing.T) { - cfg, _, close := startTestNode(t) - defer close() - - const userIndexName = "users_name_index" - const productIndexName = "product_price_index" - execAddSchemaCmd(t, cfg, `type User { name: String }`) - execAddSchemaCmd(t, cfg, `type Product { price: Int }`) - execCreateIndexCmd(t, cfg, "User", "name", userIndexName) - execCreateIndexCmd(t, cfg, "Product", "price", productIndexName) - - indexListCmd := MakeIndexListCommand(cfg) - - outputBuf, revertOutput := simulateConsoleOutput(t) - defer revertOutput() - - err := indexListCmd.Execute() - require.NoError(t, err) - - logLines, err := parseLines(outputBuf) - require.NoError(t, err) - require.Len(t, logLines, 1) - resultCollections, ok := logLines[0]["Collections"].(map[string]any) - require.True(t, ok) - - userCollection, ok := resultCollections["User"].([]any) - require.True(t, ok) - require.Len(t, userCollection, 1) - userIndex, ok := userCollection[0].(map[string]any) - require.True(t, ok) - require.Equal(t, userIndexName, userIndex["Name"]) - - productCollection, ok := resultCollections["Product"].([]any) - require.True(t, ok) - require.Len(t, productCollection, 1) - productIndex, ok := productCollection[0].(map[string]any) - require.True(t, ok) - require.Equal(t, productIndexName, productIndex["Name"]) - - assert.False(t, hasLogWithKey(logLines, "Errors")) -} diff --git a/cli/blocks.go b/cli/p2p.go similarity index 69% rename from cli/blocks.go rename to cli/p2p.go index 9e55c36d22..9cb772c5f7 100644 --- a/cli/blocks.go +++ b/cli/p2p.go @@ -12,13 +12,15 @@ package cli import ( "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/config" ) -func MakeBlocksCommand() *cobra.Command { +func MakeP2PCommand(cfg *config.Config) *cobra.Command { var cmd = &cobra.Command{ - Use: "blocks", - Short: "Interact with the database's blockstore", + Use: "p2p", + Short: "Interact with the DefraDB P2P system", + Long: "Interact with the DefraDB P2P system", } - return cmd } diff --git a/cli/p2p_collection_add.go b/cli/p2p_collection_add.go index 46a4f171e1..fb503bc39b 100644 --- a/cli/p2p_collection_add.go +++ b/cli/p2p_collection_add.go @@ -11,51 +11,27 @@ package cli import ( - "context" - "github.com/spf13/cobra" - "google.golang.org/grpc" - "google.golang.org/grpc/credentials/insecure" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/logging" - netclient "github.com/sourcenetwork/defradb/net/api/client" ) -func MakeP2PCollectionAddCommand(cfg *config.Config) *cobra.Command { +func MakeP2PCollectionAddCommand(cfg *config.Config, db client.DB) *cobra.Command { var cmd = &cobra.Command{ Use: "add [collectionID]", Short: "Add P2P collections", Long: `Add P2P collections to the synchronized pubsub topics. The collections are synchronized between nodes of a pubsub network.`, Args: func(cmd *cobra.Command, args []string) error { - if err := cobra.MinimumNArgs(1)(cmd, args); err != nil { - return errors.New("must specify at least one collectionID") + if err := cobra.ExactArgs(1)(cmd, args); err != nil { + return errors.New("must specify collectionID") } return nil }, RunE: func(cmd *cobra.Command, args []string) error { - cred := insecure.NewCredentials() - client, err := netclient.NewClient(cfg.Net.RPCAddress, grpc.WithTransportCredentials(cred)) - if err != nil { - return ErrFailedToCreateRPCClient - } - - rpcTimeoutDuration, err := cfg.Net.RPCTimeoutDuration() - if err != nil { - return errors.Wrap("failed to parse RPC timeout duration", err) - } - - ctx, cancel := context.WithTimeout(cmd.Context(), rpcTimeoutDuration) - defer cancel() - - err = client.AddP2PCollections(ctx, args...) - if err != nil { - return errors.Wrap("failed to add P2P collections, request failed", err) - } - log.FeedbackInfo(ctx, "Successfully added P2P collections", logging.NewKV("Collections", args)) - return nil + return db.AddP2PCollection(cmd.Context(), args[0]) }, } return cmd diff --git a/cli/p2p_collection_getall.go b/cli/p2p_collection_getall.go index cb9c9f4025..9c82b863a2 100644 --- a/cli/p2p_collection_getall.go +++ b/cli/p2p_collection_getall.go @@ -11,19 +11,16 @@ package cli import ( - "context" + "encoding/json" "github.com/spf13/cobra" - "google.golang.org/grpc" - "google.golang.org/grpc/credentials/insecure" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/logging" - netclient "github.com/sourcenetwork/defradb/net/api/client" ) -func MakeP2PCollectionGetallCommand(cfg *config.Config) *cobra.Command { +func MakeP2PCollectionGetallCommand(cfg *config.Config, db client.DB) *cobra.Command { var cmd = &cobra.Command{ Use: "getall", Short: "Get all P2P collections", @@ -36,35 +33,11 @@ This is the list of collections of the node that are synchronized on the pubsub return nil }, RunE: func(cmd *cobra.Command, args []string) error { - cred := insecure.NewCredentials() - client, err := netclient.NewClient(cfg.Net.RPCAddress, grpc.WithTransportCredentials(cred)) + cols, err := db.GetAllP2PCollections(cmd.Context()) if err != nil { - return ErrFailedToCreateRPCClient + return err } - - rpcTimeoutDuration, err := cfg.Net.RPCTimeoutDuration() - if err != nil { - return errors.Wrap("failed to parse RPC timeout duration", err) - } - - ctx, cancel := context.WithTimeout(cmd.Context(), rpcTimeoutDuration) - defer cancel() - - collections, err := client.GetAllP2PCollections(ctx) - if err != nil { - return errors.Wrap("failed to add P2P collections, request failed", err) - } - - if len(collections) > 0 { - log.FeedbackInfo(ctx, "Successfully got all P2P collections") - for _, col := range collections { - log.FeedbackInfo(ctx, col.Name, logging.NewKV("CollectionID", col.ID)) - } - } else { - log.FeedbackInfo(ctx, "No P2P collection found") - } - - return nil + return json.NewEncoder(cmd.OutOrStdout()).Encode(cols) }, } return cmd diff --git a/cli/p2p_collection_remove.go b/cli/p2p_collection_remove.go index 66dbd5fa16..de8f4993e8 100644 --- a/cli/p2p_collection_remove.go +++ b/cli/p2p_collection_remove.go @@ -11,51 +11,27 @@ package cli import ( - "context" - "github.com/spf13/cobra" - "google.golang.org/grpc" - "google.golang.org/grpc/credentials/insecure" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/logging" - netclient "github.com/sourcenetwork/defradb/net/api/client" ) -func MakeP2PCollectionRemoveCommand(cfg *config.Config) *cobra.Command { +func MakeP2PCollectionRemoveCommand(cfg *config.Config, db client.DB) *cobra.Command { var cmd = &cobra.Command{ Use: "remove [collectionID]", Short: "Remove P2P collections", Long: `Remove P2P collections from the followed pubsub topics. The removed collections will no longer be synchronized between nodes.`, Args: func(cmd *cobra.Command, args []string) error { - if err := cobra.MinimumNArgs(1)(cmd, args); err != nil { - return errors.New("must specify at least one collectionID") + if err := cobra.ExactArgs(1)(cmd, args); err != nil { + return errors.New("must specify collectionID") } return nil }, RunE: func(cmd *cobra.Command, args []string) error { - cred := insecure.NewCredentials() - client, err := netclient.NewClient(cfg.Net.RPCAddress, grpc.WithTransportCredentials(cred)) - if err != nil { - return ErrFailedToCreateRPCClient - } - - rpcTimeoutDuration, err := cfg.Net.RPCTimeoutDuration() - if err != nil { - return errors.Wrap("failed to parse RPC timeout duration", err) - } - - ctx, cancel := context.WithTimeout(cmd.Context(), rpcTimeoutDuration) - defer cancel() - - err = client.RemoveP2PCollections(ctx, args...) - if err != nil { - return errors.Wrap("failed to remove P2P collections, request failed", err) - } - log.FeedbackInfo(ctx, "Successfully removed P2P collections", logging.NewKV("Collections", args)) - return nil + return db.RemoveP2PCollection(cmd.Context(), args[0]) }, } return cmd diff --git a/cli/replicator.go b/cli/p2p_replicator.go similarity index 93% rename from cli/replicator.go rename to cli/p2p_replicator.go index c7956c80a6..d12684be51 100644 --- a/cli/replicator.go +++ b/cli/p2p_replicator.go @@ -14,7 +14,7 @@ import ( "github.com/spf13/cobra" ) -func MakeReplicatorCommand() *cobra.Command { +func MakeP2PReplicatorCommand() *cobra.Command { var cmd = &cobra.Command{ Use: "replicator", Short: "Configure the replicator system", diff --git a/cli/p2p_replicator_delete.go b/cli/p2p_replicator_delete.go new file mode 100644 index 0000000000..fb5a6d200d --- /dev/null +++ b/cli/p2p_replicator_delete.go @@ -0,0 +1,42 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "github.com/libp2p/go-libp2p/core/peer" + "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/errors" +) + +func MakeP2PReplicatorDeleteCommand(cfg *config.Config, db client.DB) *cobra.Command { + var cmd = &cobra.Command{ + Use: "delete ", + Short: "Delete a replicator. It will stop synchronizing", + Long: `Delete a replicator. It will stop synchronizing.`, + Args: func(cmd *cobra.Command, args []string) error { + if err := cobra.ExactArgs(1)(cmd, args); err != nil { + return errors.New("must specify one argument: PeerID") + } + return nil + }, + RunE: func(cmd *cobra.Command, args []string) error { + addr, err := peer.AddrInfoFromString(args[0]) + if err != nil { + return err + } + return db.DeleteReplicator(cmd.Context(), client.Replicator{Info: *addr}) + }, + } + return cmd +} diff --git a/cli/p2p_replicator_getall.go b/cli/p2p_replicator_getall.go new file mode 100644 index 0000000000..24cc40dbfa --- /dev/null +++ b/cli/p2p_replicator_getall.go @@ -0,0 +1,37 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "encoding/json" + + "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/config" +) + +func MakeP2PReplicatorGetallCommand(cfg *config.Config, db client.DB) *cobra.Command { + var cmd = &cobra.Command{ + Use: "getall", + Short: "Get all replicators", + Long: `Get all the replicators active in the P2P data sync system. +These are the replicators that are currently replicating data from one node to another.`, + RunE: func(cmd *cobra.Command, args []string) error { + reps, err := db.GetAllReplicators(cmd.Context()) + if err != nil { + return err + } + return json.NewEncoder(cmd.OutOrStdout()).Encode(reps) + }, + } + return cmd +} diff --git a/cli/p2p_replicator_set.go b/cli/p2p_replicator_set.go new file mode 100644 index 0000000000..514e2decf6 --- /dev/null +++ b/cli/p2p_replicator_set.go @@ -0,0 +1,52 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "github.com/libp2p/go-libp2p/core/peer" + "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/errors" +) + +func MakeP2PReplicatorSetCommand(cfg *config.Config, db client.DB) *cobra.Command { + var collections []string + var cmd = &cobra.Command{ + Use: "set [-c, --collection] ", + Short: "Set a P2P replicator", + Long: `Add a new target replicator. +A replicator replicates one or all collection(s) from this node to another. +`, + Args: func(cmd *cobra.Command, args []string) error { + if err := cobra.ExactArgs(1)(cmd, args); err != nil { + return errors.New("must specify one argument: peer") + } + return nil + }, + RunE: func(cmd *cobra.Command, args []string) error { + addr, err := peer.AddrInfoFromString(args[0]) + if err != nil { + return err + } + rep := client.Replicator{ + Info: *addr, + Schemas: collections, + } + return db.SetReplicator(cmd.Context(), rep) + }, + } + + cmd.Flags().StringArrayVarP(&collections, "collection", "c", + []string{}, "Define the collection for the replicator") + return cmd +} diff --git a/cli/peerid.go b/cli/peerid.go deleted file mode 100644 index a3d269fb2d..0000000000 --- a/cli/peerid.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package cli - -import ( - "encoding/json" - "io" - "net/http" - "os" - - "github.com/spf13/cobra" - - httpapi "github.com/sourcenetwork/defradb/api/http" - "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/errors" -) - -func MakePeerIDCommand(cfg *config.Config) *cobra.Command { - var cmd = &cobra.Command{ - Use: "peerid", - Short: "Get the PeerID of the node", - Long: `Get the PeerID of the node.`, - RunE: func(cmd *cobra.Command, _ []string) (err error) { - stdout, err := os.Stdout.Stat() - if err != nil { - return errors.Wrap("failed to stat stdout", err) - } - if !isFileInfoPipe(stdout) { - log.FeedbackInfo(cmd.Context(), "Requesting PeerID...") - } - - endpoint, err := httpapi.JoinPaths(cfg.API.AddressToURL(), httpapi.PeerIDPath) - if err != nil { - return errors.Wrap("failed to join endpoint", err) - } - - res, err := http.Get(endpoint.String()) - if err != nil { - return errors.Wrap("failed to request PeerID", err) - } - - defer func() { - if e := res.Body.Close(); e != nil { - err = NewErrFailedToCloseResponseBody(e, err) - } - }() - - response, err := io.ReadAll(res.Body) - if err != nil { - return errors.Wrap("failed to read response body", err) - } - - if res.StatusCode == http.StatusNotFound { - r := httpapi.ErrorResponse{} - err = json.Unmarshal(response, &r) - if err != nil { - return errors.Wrap("parsing of response failed", err) - } - if len(r.Errors) > 0 { - if isFileInfoPipe(stdout) { - b, err := json.Marshal(r.Errors[0]) - if err != nil { - return errors.Wrap("mashalling error response failed", err) - } - cmd.Println(string(b)) - } else { - log.FeedbackInfo(cmd.Context(), r.Errors[0].Message) - } - return nil - } - return errors.New("no PeerID available. P2P might be disabled") - } - - r := httpapi.DataResponse{} - err = json.Unmarshal(response, &r) - if err != nil { - return errors.Wrap("parsing of response failed", err) - } - if isFileInfoPipe(stdout) { - b, err := json.Marshal(r.Data) - if err != nil { - return errors.Wrap("mashalling data response failed", err) - } - cmd.Println(string(b)) - } else if data, ok := r.Data.(map[string]any); ok { - log.FeedbackInfo(cmd.Context(), data["peerID"].(string)) - } - - return nil - }, - } - return cmd -} diff --git a/cli/peerid_test.go b/cli/peerid_test.go deleted file mode 100644 index 34874ef80d..0000000000 --- a/cli/peerid_test.go +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package cli - -import ( - "bytes" - "context" - "encoding/json" - "io" - "net/http" - "testing" - - "github.com/stretchr/testify/assert" - - httpapi "github.com/sourcenetwork/defradb/api/http" -) - -func TestGetPeerIDCmd(t *testing.T) { - cfg := getTestConfig(t) - peerIDCmd := MakePeerIDCommand(cfg) - dir := t.TempDir() - ctx := context.Background() - cfg.Datastore.Store = "memory" - cfg.Datastore.Badger.Path = dir - cfg.Net.P2PDisabled = false - - di, err := start(ctx, cfg) - if err != nil { - t.Fatal(err) - } - defer di.close(ctx) - - b := bytes.NewBufferString("") - peerIDCmd.SetOut(b) - - err = peerIDCmd.RunE(peerIDCmd, nil) - if err != nil { - t.Fatal(err) - } - - out, err := io.ReadAll(b) - if err != nil { - t.Fatal(err) - } - - r := make(map[string]any) - err = json.Unmarshal(out, &r) - if err != nil { - t.Fatal(err) - } - - assert.Equal(t, di.node.PeerID().String(), r["peerID"]) -} - -func TestGetPeerIDCmdWithNoP2P(t *testing.T) { - cfg := getTestConfig(t) - peerIDCmd := MakePeerIDCommand(cfg) - dir := t.TempDir() - ctx := context.Background() - cfg.Datastore.Store = "memory" - cfg.Datastore.Badger.Path = dir - cfg.Net.P2PDisabled = true - - di, err := start(ctx, cfg) - if err != nil { - t.Fatal(err) - } - defer di.close(ctx) - - b := bytes.NewBufferString("") - peerIDCmd.SetOut(b) - - err = peerIDCmd.RunE(peerIDCmd, nil) - if err != nil { - t.Fatal(err) - } - - out, err := io.ReadAll(b) - if err != nil { - t.Fatal(err) - } - - r := httpapi.ErrorItem{} - err = json.Unmarshal(out, &r) - if err != nil { - t.Fatal(err) - } - - assert.Equal(t, http.StatusNotFound, r.Extensions.Status) - assert.Equal(t, "Not Found", r.Extensions.HTTPError) - assert.Equal(t, "no PeerID available. P2P might be disabled", r.Message) -} diff --git a/cli/ping.go b/cli/ping.go deleted file mode 100644 index 210847dfcc..0000000000 --- a/cli/ping.go +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package cli - -import ( - "encoding/json" - "io" - "net/http" - "os" - - "github.com/spf13/cobra" - - httpapi "github.com/sourcenetwork/defradb/api/http" - "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/errors" -) - -func MakePingCommand(cfg *config.Config) *cobra.Command { - var cmd = &cobra.Command{ - Use: "ping", - Short: "Ping to test connection with a node", - RunE: func(cmd *cobra.Command, _ []string) (err error) { - stdout, err := os.Stdout.Stat() - if err != nil { - return errors.Wrap("failed to stat stdout", err) - } - if !isFileInfoPipe(stdout) { - log.FeedbackInfo(cmd.Context(), "Sending ping...") - } - - endpoint, err := httpapi.JoinPaths(cfg.API.AddressToURL(), httpapi.PingPath) - if err != nil { - return errors.Wrap("failed to join endpoint", err) - } - - res, err := http.Get(endpoint.String()) - if err != nil { - return errors.Wrap("failed to send ping", err) - } - - defer func() { - if e := res.Body.Close(); e != nil { - err = NewErrFailedToCloseResponseBody(e, err) - } - }() - - response, err := io.ReadAll(res.Body) - if err != nil { - return errors.Wrap("failed to read response body", err) - } - - if isFileInfoPipe(stdout) { - cmd.Println(string(response)) - } else { - type pingResponse struct { - Data struct { - Response string `json:"response"` - } `json:"data"` - } - r := pingResponse{} - err = json.Unmarshal(response, &r) - if err != nil { - return errors.Wrap("parsing of response failed", err) - } - log.FeedbackInfo(cmd.Context(), r.Data.Response) - } - return nil - }, - } - return cmd -} diff --git a/cli/replicator_delete.go b/cli/replicator_delete.go deleted file mode 100644 index eb7e580f12..0000000000 --- a/cli/replicator_delete.go +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package cli - -import ( - "context" - - "github.com/libp2p/go-libp2p/core/peer" - "github.com/spf13/cobra" - "google.golang.org/grpc" - "google.golang.org/grpc/credentials/insecure" - - "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/logging" - netclient "github.com/sourcenetwork/defradb/net/api/client" -) - -func MakeReplicatorDeleteCommand(cfg *config.Config) *cobra.Command { - var ( - fullRep bool - col []string - ) - var cmd = &cobra.Command{ - Use: "delete [-f, --full | -c, --collection] ", - Short: "Delete a replicator. It will stop synchronizing", - Long: `Delete a replicator. It will stop synchronizing.`, - Args: func(cmd *cobra.Command, args []string) error { - if err := cobra.ExactArgs(1)(cmd, args); err != nil { - return errors.New("must specify one argument: PeerID") - } - return nil - }, - RunE: func(cmd *cobra.Command, args []string) error { - pidString := args[0] - - if len(col) == 0 && !fullRep { - return errors.New("must run with either --full or --collection") - } - - cred := insecure.NewCredentials() - client, err := netclient.NewClient(cfg.Net.RPCAddress, grpc.WithTransportCredentials(cred)) - if err != nil { - return ErrFailedToCreateRPCClient - } - - rpcTimeoutDuration, err := cfg.Net.RPCTimeoutDuration() - if err != nil { - return errors.Wrap("failed to parse RPC timeout duration", err) - } - - ctx, cancel := context.WithTimeout(cmd.Context(), rpcTimeoutDuration) - defer cancel() - - pid, err := peer.Decode(pidString) - if err != nil { - return NewErrFailedParsePeerID(err) - } - - err = client.DeleteReplicator(ctx, pid) - if err != nil { - return errors.Wrap("failed to delete replicator, request failed", err) - } - log.FeedbackInfo(ctx, "Successfully deleted replicator", logging.NewKV("PeerID", pid.String())) - return nil - }, - } - cmd.Flags().BoolVarP(&fullRep, "full", "f", false, "Set the replicator to act on all collections") - cmd.Flags().StringArrayVarP(&col, "collection", "c", - []string{}, "Define the collection for the replicator") - cmd.MarkFlagsMutuallyExclusive("full", "collection") - return cmd -} diff --git a/cli/replicator_getall.go b/cli/replicator_getall.go deleted file mode 100644 index 63cd6533ba..0000000000 --- a/cli/replicator_getall.go +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package cli - -import ( - "context" - - "github.com/spf13/cobra" - "google.golang.org/grpc" - "google.golang.org/grpc/credentials/insecure" - - "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/logging" - netclient "github.com/sourcenetwork/defradb/net/api/client" -) - -func MakeReplicatorGetallCommand(cfg *config.Config) *cobra.Command { - var cmd = &cobra.Command{ - Use: "getall", - Short: "Get all replicators", - Long: `Get all the replicators active in the P2P data sync system. -These are the replicators that are currently replicating data from one node to another.`, - RunE: func(cmd *cobra.Command, args []string) error { - if len(args) != 0 { - if err := cmd.Usage(); err != nil { - return err - } - return errors.New("must specify no argument") - } - - log.FeedbackInfo( - cmd.Context(), - "Getting all replicators", - logging.NewKV("RPCAddress", cfg.Net.RPCAddress), - ) - - cred := insecure.NewCredentials() - client, err := netclient.NewClient(cfg.Net.RPCAddress, grpc.WithTransportCredentials(cred)) - if err != nil { - return errors.Wrap("failed to create RPC client", err) - } - - rpcTimeoutDuration, err := cfg.Net.RPCTimeoutDuration() - if err != nil { - return errors.Wrap("failed to parse RPC timeout duration", err) - } - - ctx, cancel := context.WithTimeout(cmd.Context(), rpcTimeoutDuration) - defer cancel() - - reps, err := client.GetAllReplicators(ctx) - if err != nil { - return errors.Wrap("failed to get replicators, request failed", err) - } - if len(reps) > 0 { - log.FeedbackInfo(ctx, "Successfully got all replicators") - for _, rep := range reps { - log.FeedbackInfo( - ctx, - rep.Info.ID.String(), - logging.NewKV("Schemas", rep.Schemas), - logging.NewKV("Addrs", rep.Info.Addrs), - ) - } - } else { - log.FeedbackInfo(ctx, "No replicator found") - } - - return nil - }, - } - return cmd -} diff --git a/cli/replicator_set.go b/cli/replicator_set.go deleted file mode 100644 index acb70d0cfd..0000000000 --- a/cli/replicator_set.go +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package cli - -import ( - "context" - - ma "github.com/multiformats/go-multiaddr" - "github.com/spf13/cobra" - "google.golang.org/grpc" - "google.golang.org/grpc/credentials/insecure" - - "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/logging" - netclient "github.com/sourcenetwork/defradb/net/api/client" -) - -func MakeReplicatorSetCommand(cfg *config.Config) *cobra.Command { - var ( - fullRep bool - col []string - ) - var cmd = &cobra.Command{ - Use: "set [-f, --full | -c, --collection] ", - Short: "Set a P2P replicator", - Long: `Add a new target replicator. -A replicator replicates one or all collection(s) from this node to another. -`, - Args: func(cmd *cobra.Command, args []string) error { - if err := cobra.ExactArgs(1)(cmd, args); err != nil { - return errors.New("must specify one argument: peer") - } - return nil - }, - RunE: func(cmd *cobra.Command, args []string) error { - peerAddr, err := ma.NewMultiaddr(args[0]) - if err != nil { - return NewErrFailedParsePeerID(err) - } - if len(col) == 0 && !fullRep { - return errors.New("must run with either --full or --collection") - } - - cred := insecure.NewCredentials() - client, err := netclient.NewClient(cfg.Net.RPCAddress, grpc.WithTransportCredentials(cred)) - if err != nil { - return ErrFailedToCreateRPCClient - } - - rpcTimeoutDuration, err := cfg.Net.RPCTimeoutDuration() - if err != nil { - return errors.Wrap("failed to parse RPC timeout duration", err) - } - - ctx, cancel := context.WithTimeout(cmd.Context(), rpcTimeoutDuration) - defer cancel() - - pid, err := client.SetReplicator(ctx, peerAddr, col...) - if err != nil { - return errors.Wrap("failed to add replicator, request failed", err) - } - log.FeedbackInfo( - ctx, - "Successfully added replicator", - logging.NewKV("PeerID", pid), - logging.NewKV("Collections", col), - ) - return nil - }, - } - - cmd.Flags().BoolVarP(&fullRep, "full", "f", false, "Set the replicator to act on all collections") - cmd.Flags().StringArrayVarP(&col, "collection", "c", - []string{}, "Define the collection for the replicator") - cmd.MarkFlagsMutuallyExclusive("full", "collection") - return cmd -} diff --git a/cli/request.go b/cli/request.go index 1b8f86ced8..0d211b8af2 100644 --- a/cli/request.go +++ b/cli/request.go @@ -11,19 +11,18 @@ package cli import ( + "encoding/json" "io" - "net/http" - "net/url" "os" "github.com/spf13/cobra" - httpapi "github.com/sourcenetwork/defradb/api/http" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" "github.com/sourcenetwork/defradb/errors" ) -func MakeRequestCommand(cfg *config.Config) *cobra.Command { +func MakeRequestCommand(cfg *config.Config, db client.DB) *cobra.Command { var filePath string var cmd = &cobra.Command{ Use: "query [query request]", @@ -43,101 +42,35 @@ A GraphQL client such as GraphiQL (https://github.com/graphql/graphiql) can be u with the database more conveniently. To learn more about the DefraDB GraphQL Query Language, refer to https://docs.source.network.`, - RunE: func(cmd *cobra.Command, args []string) (err error) { + RunE: func(cmd *cobra.Command, args []string) error { var request string - - fi, err := os.Stdin.Stat() - if err != nil { - return err - } - - if filePath != "" { - bytes, err := os.ReadFile(filePath) + switch { + case filePath != "": + data, err := os.ReadFile(filePath) if err != nil { - return ErrFailedToReadFile - } - request = string(bytes) - } else if len(args) > 1 { - if err = cmd.Usage(); err != nil { return err } - return errors.New("too many arguments") - } else if isFileInfoPipe(fi) && (len(args) == 0 || args[0] != "-") { - log.FeedbackInfo( - cmd.Context(), - "Run 'defradb client query -' to read from stdin. Example: 'cat my.graphql | defradb client query -').", - ) - return nil - } else if len(args) == 0 { - err := cmd.Help() - if err != nil { - return errors.Wrap("failed to print help", err) - } - return nil - } else if args[0] == "-" { - stdin, err := readStdin() + request = string(data) + case len(args) > 0 && args[0] == "-": + data, err := io.ReadAll(cmd.InOrStdin()) if err != nil { - return errors.Wrap("failed to read stdin", err) - } - if len(stdin) == 0 { - return errors.New("no query request in stdin provided") - } else { - request = stdin + return err } - } else { - request = args[0] + request = string(data) + case len(args) > 0: + request = string(args[0]) } if request == "" { return errors.New("request cannot be empty") } - - endpoint, err := httpapi.JoinPaths(cfg.API.AddressToURL(), httpapi.GraphQLPath) - if err != nil { - return errors.Wrap("joining paths failed", err) - } - - p := url.Values{} - p.Add("query", request) - endpoint.RawQuery = p.Encode() - - res, err := http.Get(endpoint.String()) - if err != nil { - return errors.Wrap("failed request", err) - } - - defer func() { - if e := res.Body.Close(); e != nil { - err = NewErrFailedToCloseResponseBody(e, err) - } - }() - - response, err := io.ReadAll(res.Body) - if err != nil { - return errors.Wrap("failed to read response body", err) - } - - fi, err = os.Stdout.Stat() - if err != nil { - return errors.Wrap("failed to stat stdout", err) + result := db.ExecRequest(cmd.Context(), request) + if result.Pub == nil { + return json.NewEncoder(cmd.OutOrStdout()).Encode(result.GQL) } - - if isFileInfoPipe(fi) { - cmd.Println(string(response)) - } else { - graphlErr, err := hasGraphQLErrors(response) - if err != nil { - return errors.Wrap("failed to handle GraphQL errors", err) - } - indentedResult, err := indentJSON(response) - if err != nil { - return errors.Wrap("failed to pretty print result", err) - } - if graphlErr { - log.FeedbackError(cmd.Context(), indentedResult) - } else { - log.FeedbackInfo(cmd.Context(), indentedResult) - } + enc := json.NewEncoder(cmd.OutOrStdout()) + for item := range result.Pub.Stream() { + enc.Encode(item) //nolint:errcheck } return nil }, diff --git a/cli/schema_add.go b/cli/schema_add.go index b5f28f15d3..d13b5b3224 100644 --- a/cli/schema_add.go +++ b/cli/schema_add.go @@ -12,20 +12,17 @@ package cli import ( "encoding/json" + "fmt" "io" - "net/http" "os" - "strings" "github.com/spf13/cobra" - httpapi "github.com/sourcenetwork/defradb/api/http" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/logging" ) -func MakeSchemaAddCommand(cfg *config.Config) *cobra.Command { +func MakeSchemaAddCommand(cfg *config.Config, db client.DB) *cobra.Command { var schemaFile string var cmd = &cobra.Command{ Use: "add [schema]", @@ -42,117 +39,32 @@ Example: add from stdin: cat schema.graphql | defradb client schema add - Learn more about the DefraDB GraphQL Schema Language on https://docs.source.network.`, - RunE: func(cmd *cobra.Command, args []string) (err error) { + RunE: func(cmd *cobra.Command, args []string) error { var schema string - fi, err := os.Stdin.Stat() - if err != nil { - return err - } - - if len(args) > 1 { - if err = cmd.Usage(); err != nil { - return err - } - return errors.New("too many arguments") - } - - if schemaFile != "" { - buf, err := os.ReadFile(schemaFile) + switch { + case schemaFile != "": + data, err := os.ReadFile(schemaFile) if err != nil { - return errors.Wrap("failed to read schema file", err) - } - schema = string(buf) - } else if isFileInfoPipe(fi) && (len(args) == 0 || args[0] != "-") { - log.FeedbackInfo( - cmd.Context(), - "Run 'defradb client schema add -' to read from stdin."+ - " Example: 'cat schema.graphql | defradb client schema add -').", - ) - return nil - } else if len(args) == 0 { - err := cmd.Help() - if err != nil { - return errors.Wrap("failed to print help", err) + return err } - return nil - } else if args[0] == "-" { - stdin, err := readStdin() + schema = string(data) + case len(args) > 0 && args[0] == "-": + data, err := io.ReadAll(cmd.InOrStdin()) if err != nil { - return errors.Wrap("failed to read stdin", err) - } - if len(stdin) == 0 { - return errors.New("no schema in stdin provided") - } else { - schema = stdin + return err } - } else { + schema = string(data) + case len(args) > 0: schema = args[0] + default: + return fmt.Errorf("schema cannot be empty") } - if schema == "" { - return errors.New("empty schema provided") - } - - endpoint, err := httpapi.JoinPaths(cfg.API.AddressToURL(), httpapi.SchemaPath) + cols, err := db.AddSchema(cmd.Context(), schema) if err != nil { - return errors.Wrap("join paths failed", err) - } - - res, err := http.Post(endpoint.String(), "text", strings.NewReader(schema)) - if err != nil { - return errors.Wrap("failed to post schema", err) - } - - defer func() { - if e := res.Body.Close(); e != nil { - err = NewErrFailedToCloseResponseBody(e, err) - } - }() - - response, err := io.ReadAll(res.Body) - if err != nil { - return errors.Wrap("failed to read response body", err) - } - - stdout, err := os.Stdout.Stat() - if err != nil { - return errors.Wrap("failed to stat stdout", err) - } - if isFileInfoPipe(stdout) { - cmd.Println(string(response)) - } else { - graphlErr, err := hasGraphQLErrors(response) - if err != nil { - return errors.Wrap("failed to handle GraphQL errors", err) - } - if graphlErr { - indentedResult, err := indentJSON(response) - if err != nil { - return errors.Wrap("failed to pretty print result", err) - } - log.FeedbackError(cmd.Context(), indentedResult) - } else { - type schemaResponse struct { - Data struct { - Result string `json:"result"` - Collections []struct { - Name string `json:"name"` - ID string `json:"id"` - } `json:"collections"` - } `json:"data"` - } - r := schemaResponse{} - err = json.Unmarshal(response, &r) - if err != nil { - return errors.Wrap("failed to unmarshal response", err) - } - if r.Data.Result == "success" { - log.FeedbackInfo(cmd.Context(), "Successfully added schema.", logging.NewKV("Collections", r.Data.Collections)) - } - log.FeedbackInfo(cmd.Context(), r.Data.Result) - } + return err } - return nil + return json.NewEncoder(cmd.OutOrStdout()).Encode(cols) }, } cmd.Flags().StringVarP(&schemaFile, "file", "f", "", "File to load a schema from") diff --git a/cli/schema_list.go b/cli/schema_list.go deleted file mode 100644 index 3a0e32bcce..0000000000 --- a/cli/schema_list.go +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package cli - -import ( - "encoding/json" - "io" - "net/http" - - "github.com/spf13/cobra" - - httpapi "github.com/sourcenetwork/defradb/api/http" - "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/errors" -) - -type schemaListResponse struct { - Data struct { - Collections []struct { - Name string `json:"name"` - ID string `json:"id"` - VersionID string `json:"version_id"` - Fields []struct { - ID string `json:"id"` - Name string `json:"name"` - Kind string `json:"kind"` - Internal bool `json:"internal"` - } `json:"fields"` - } `json:"collections"` - } `json:"data"` - Errors []struct { - Message string `json:"message"` - } `json:"errors"` -} - -func MakeSchemaListCommand(cfg *config.Config) *cobra.Command { - var cmd = &cobra.Command{ - Use: "list", - Short: "List schema types with their respective fields", - RunE: func(cmd *cobra.Command, args []string) (err error) { - endpoint, err := httpapi.JoinPaths(cfg.API.AddressToURL(), httpapi.SchemaPath) - if err != nil { - return NewErrFailedToJoinEndpoint(err) - } - - res, err := http.Get(endpoint.String()) - if err != nil { - return NewErrFailedToSendRequest(err) - } - defer res.Body.Close() //nolint:errcheck - - data, err := io.ReadAll(res.Body) - if err != nil { - return NewErrFailedToReadResponseBody(err) - } - - var r schemaListResponse - if err := json.Unmarshal(data, &r); err != nil { - return NewErrFailedToUnmarshalResponse(err) - } - if len(r.Errors) > 0 { - return errors.New("failed to list schemas", errors.NewKV("errors", r.Errors)) - } - - for _, c := range r.Data.Collections { - cmd.Printf("# Schema ID: %s\n", c.ID) - cmd.Printf("# Version ID: %s\n", c.VersionID) - cmd.Printf("type %s {\n", c.Name) - for _, f := range c.Fields { - if !f.Internal { - cmd.Printf("\t%s: %s\n", f.Name, f.Kind) - } - } - cmd.Printf("}\n\n") - } - - return nil - }, - } - return cmd -} diff --git a/cli/schema_migration_get.go b/cli/schema_migration_get.go index 333c2d9cf4..17b0a6bd6a 100644 --- a/cli/schema_migration_get.go +++ b/cli/schema_migration_get.go @@ -12,20 +12,14 @@ package cli import ( "encoding/json" - "io" - "net/http" - "os" "github.com/spf13/cobra" - httpapi "github.com/sourcenetwork/defradb/api/http" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/logging" ) -func MakeSchemaMigrationGetCommand(cfg *config.Config) *cobra.Command { +func MakeSchemaMigrationGetCommand(cfg *config.Config, db client.DB) *cobra.Command { var cmd = &cobra.Command{ Use: "get", Short: "Gets the schema migrations within DefraDB", @@ -36,62 +30,11 @@ Example: Learn more about the DefraDB GraphQL Schema Language on https://docs.source.network.`, RunE: func(cmd *cobra.Command, args []string) (err error) { - if err := cobra.NoArgs(cmd, args); err != nil { - return NewErrTooManyArgs(0, len(args)) - } - - endpoint, err := httpapi.JoinPaths(cfg.API.AddressToURL(), httpapi.SchemaMigrationPath) - if err != nil { - return errors.Wrap("join paths failed", err) - } - - res, err := http.Get(endpoint.String()) - if err != nil { - return errors.Wrap("failed to get schema migrations", err) - } - - defer func() { - if e := res.Body.Close(); e != nil { - err = NewErrFailedToCloseResponseBody(e, err) - } - }() - - response, err := io.ReadAll(res.Body) + cfgs, err := db.LensRegistry().Config(cmd.Context()) if err != nil { - return errors.Wrap("failed to read response body", err) + return err } - - stdout, err := os.Stdout.Stat() - if err != nil { - return errors.Wrap("failed to stat stdout", err) - } - if isFileInfoPipe(stdout) { - cmd.Println(string(response)) - } else { - type migrationGetResponse struct { - Data struct { - Configuration []client.LensConfig `json:"configuration"` - } `json:"data"` - Errors []struct { - Message string `json:"message"` - } `json:"errors"` - } - r := migrationGetResponse{} - err = json.Unmarshal(response, &r) - log.FeedbackInfo(cmd.Context(), string(response)) - if err != nil { - return NewErrFailedToUnmarshalResponse(err) - } - if len(r.Errors) > 0 { - log.FeedbackError(cmd.Context(), "Failed to get schema migrations", - logging.NewKV("Errors", r.Errors)) - } else { - log.FeedbackInfo(cmd.Context(), "Successfully got schema migrations", - logging.NewKV("Configuration", r.Data.Configuration)) - } - } - - return nil + return json.NewEncoder(cmd.OutOrStdout()).Encode(cfgs) }, } return cmd diff --git a/cli/schema_migration_set.go b/cli/schema_migration_set.go index 633cbf0115..e1db8afff5 100644 --- a/cli/schema_migration_set.go +++ b/cli/schema_migration_set.go @@ -12,22 +12,20 @@ package cli import ( "encoding/json" + "fmt" "io" - "net/http" "os" "strings" "github.com/lens-vm/lens/host-go/config/model" "github.com/spf13/cobra" - httpapi "github.com/sourcenetwork/defradb/api/http" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/logging" ) -func MakeSchemaMigrationSetCommand(cfg *config.Config) *cobra.Command { +func MakeSchemaMigrationSetCommand(cfg *config.Config, db client.DB) *cobra.Command { var lensFile string var cmd = &cobra.Command{ Use: "set [src] [dst] [cfg]", @@ -44,72 +42,36 @@ Example: add from stdin: cat schema_migration.lens | defradb client schema migration set bae123 bae456 - Learn more about the DefraDB GraphQL Schema Language on https://docs.source.network.`, + Args: cobra.RangeArgs(2, 3), RunE: func(cmd *cobra.Command, args []string) (err error) { - if err := cobra.MinimumNArgs(2)(cmd, args); err != nil { - return NewErrMissingArgs([]string{"src", "dst", "cfg"}) - } - if err := cobra.MaximumNArgs(3)(cmd, args); err != nil { - return NewErrTooManyArgs(3, len(args)) - } - var lensCfgJson string - var srcSchemaVersionID string - var dstSchemaVersionID string - fi, err := os.Stdin.Stat() - if err != nil { - return err - } - - if lensFile != "" { - buf, err := os.ReadFile(lensFile) + switch { + case lensFile != "": + data, err := os.ReadFile(lensFile) if err != nil { - return errors.Wrap("failed to read schema file", err) + return err } - lensCfgJson = string(buf) - } else if len(args) == 2 { - // If the lensFile flag has not been provided then it must be provided as an arg - // and thus len(args) cannot be 2 - return NewErrMissingArg("cfg") - } else if isFileInfoPipe(fi) && args[2] != "-" { - log.FeedbackInfo( - cmd.Context(), - "Run 'defradb client schema migration set -' to read from stdin."+ - " Example: 'cat schema_migration.lens | defradb client schema migration set -').", - ) - return nil - } else if args[2] == "-" { - stdin, err := readStdin() + lensCfgJson = string(data) + case len(args) == 3 && args[2] == "-": + data, err := io.ReadAll(cmd.InOrStdin()) if err != nil { - return errors.Wrap("failed to read stdin", err) - } - if len(stdin) == 0 { - return errors.New("no lens cfg in stdin provided") - } else { - lensCfgJson = stdin + return err } - } else { + lensCfgJson = string(data) + case len(args) == 3: lensCfgJson = args[2] + default: + return fmt.Errorf("lens config cannot be empty") } - srcSchemaVersionID = args[0] - dstSchemaVersionID = args[1] - - if lensCfgJson == "" { - return NewErrMissingArg("cfg") - } - if srcSchemaVersionID == "" { - return NewErrMissingArg("src") - } - if dstSchemaVersionID == "" { - return NewErrMissingArg("dst") - } + srcSchemaVersionID := args[0] + dstSchemaVersionID := args[1] decoder := json.NewDecoder(strings.NewReader(lensCfgJson)) decoder.DisallowUnknownFields() var lensCfg model.Lens - err = decoder.Decode(&lensCfg) - if err != nil { + if err = decoder.Decode(&lensCfg); err != nil { return errors.Wrap("invalid lens configuration", err) } @@ -119,58 +81,7 @@ Learn more about the DefraDB GraphQL Schema Language on https://docs.source.netw Lens: lensCfg, } - migrationCfgJson, err := json.Marshal(migrationCfg) - if err != nil { - return errors.Wrap("failed to marshal cfg", err) - } - - endpoint, err := httpapi.JoinPaths(cfg.API.AddressToURL(), httpapi.SchemaMigrationPath) - if err != nil { - return errors.Wrap("join paths failed", err) - } - - res, err := http.Post(endpoint.String(), "application/json", strings.NewReader(string(migrationCfgJson))) - if err != nil { - return errors.Wrap("failed to post schema migration", err) - } - - defer func() { - if e := res.Body.Close(); e != nil { - err = NewErrFailedToCloseResponseBody(e, err) - } - }() - - response, err := io.ReadAll(res.Body) - if err != nil { - return errors.Wrap("failed to read response body", err) - } - - stdout, err := os.Stdout.Stat() - if err != nil { - return errors.Wrap("failed to stat stdout", err) - } - if isFileInfoPipe(stdout) { - cmd.Println(string(response)) - } else { - type migrationSetResponse struct { - Errors []struct { - Message string `json:"message"` - } `json:"errors"` - } - r := migrationSetResponse{} - err = json.Unmarshal(response, &r) - if err != nil { - return NewErrFailedToUnmarshalResponse(err) - } - if len(r.Errors) > 0 { - log.FeedbackError(cmd.Context(), "Failed to set schema migration", - logging.NewKV("Errors", r.Errors)) - } else { - log.FeedbackInfo(cmd.Context(), "Successfully set schema migration") - } - } - - return nil + return db.LensRegistry().SetMigration(cmd.Context(), migrationCfg) }, } cmd.Flags().StringVarP(&lensFile, "file", "f", "", "Lens configuration file") diff --git a/cli/schema_patch.go b/cli/schema_patch.go index b1e962c51a..f9cf55f713 100644 --- a/cli/schema_patch.go +++ b/cli/schema_patch.go @@ -11,19 +11,17 @@ package cli import ( - "encoding/json" + "fmt" "io" - "net/http" "os" - "strings" "github.com/spf13/cobra" - httpapi "github.com/sourcenetwork/defradb/api/http" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" ) -func MakeSchemaPatchCommand(cfg *config.Config) *cobra.Command { +func MakeSchemaPatchCommand(cfg *config.Config, db client.DB) *cobra.Command { var patchFile string var cmd = &cobra.Command{ @@ -45,109 +43,26 @@ Example: patch from stdin: To learn more about the DefraDB GraphQL Schema Language, refer to https://docs.source.network.`, RunE: func(cmd *cobra.Command, args []string) (err error) { var patch string - fi, err := os.Stdin.Stat() - if err != nil { - return err - } - - if len(args) > 1 { - if err = cmd.Usage(); err != nil { - return err - } - return NewErrTooManyArgs(1, len(args)) - } - - if patchFile != "" { - buf, err := os.ReadFile(patchFile) + switch { + case patchFile != "": + data, err := os.ReadFile(patchFile) if err != nil { - return NewFailedToReadFile(err) + return err } - patch = string(buf) - } else if isFileInfoPipe(fi) && (len(args) == 0 || args[0] != "-") { - log.FeedbackInfo( - cmd.Context(), - "Run 'defradb client schema patch -' to read from stdin."+ - " Example: 'cat patch.json | defradb client schema patch -').", - ) - return nil - } else if len(args) == 0 { - // ignore error, nothing we can do about it - // as printing an error about failing to print help - // is useless - //nolint:errcheck - cmd.Help() - return nil - } else if args[0] == "-" { - stdin, err := readStdin() + patch = string(data) + case len(args) > 0 && args[0] == "-": + data, err := io.ReadAll(cmd.InOrStdin()) if err != nil { - return NewFailedToReadStdin(err) - } - if len(stdin) == 0 { - return ErrEmptyStdin - } else { - patch = stdin + return err } - } else { + patch = string(data) + case len(args) > 0: patch = args[0] + default: + return fmt.Errorf("patch cannot be empty") } - if patch == "" { - return ErrEmptyFile - } - - endpoint, err := httpapi.JoinPaths(cfg.API.AddressToURL(), httpapi.SchemaPath) - if err != nil { - return err - } - - req, err := http.NewRequest(http.MethodPatch, endpoint.String(), strings.NewReader(patch)) - if err != nil { - return NewErrFailedToSendRequest(err) - } - res, err := http.DefaultClient.Do(req) - if err != nil { - return NewErrFailedToSendRequest(err) - } - - //nolint:errcheck - defer res.Body.Close() - response, err := io.ReadAll(res.Body) - if err != nil { - return NewErrFailedToReadResponseBody(err) - } - - stdout, err := os.Stdout.Stat() - if err != nil { - return NewErrFailedToStatStdOut(err) - } - if isFileInfoPipe(stdout) { - cmd.Println(string(response)) - } else { - graphlErr, err := hasGraphQLErrors(response) - if err != nil { - return NewErrFailedToHandleGQLErrors(err) - } - if graphlErr { - indentedResult, err := indentJSON(response) - if err != nil { - return NewErrFailedToPrettyPrintResponse(err) - } - log.FeedbackError(cmd.Context(), indentedResult) - } else { - type schemaResponse struct { - Data struct { - Result string `json:"result"` - } `json:"data"` - } - r := schemaResponse{} - err = json.Unmarshal(response, &r) - if err != nil { - return NewErrFailedToUnmarshalResponse(err) - } - log.FeedbackInfo(cmd.Context(), r.Data.Result) - } - } - return nil + return db.PatchSchema(cmd.Context(), patch) }, } cmd.Flags().StringVarP(&patchFile, "file", "f", "", "File to load a patch from") diff --git a/cli/rpc.go b/cli/server_dump.go similarity index 50% rename from cli/rpc.go rename to cli/server_dump.go index afb1a007e2..3812005b23 100644 --- a/cli/rpc.go +++ b/cli/server_dump.go @@ -11,26 +11,19 @@ package cli import ( - "context" - "github.com/spf13/cobra" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" ) -func MakeRPCCommand(cfg *config.Config) *cobra.Command { - var cmd = &cobra.Command{ - Use: "rpc", - Short: "Interact with a DefraDB node via RPC", - Long: "Interact with a DefraDB node via RPC.", - } - cmd.PersistentFlags().String( - "addr", cfg.Net.RPCAddress, - "RPC endpoint address", - ) - - if err := cfg.BindFlag("net.rpcaddress", cmd.PersistentFlags().Lookup("addr")); err != nil { - log.FeedbackFatalE(context.Background(), "Could not bind net.rpcaddress", err) +func MakeServerDumpCmd(cfg *config.Config, db client.DB) *cobra.Command { + cmd := &cobra.Command{ + Use: "server-dump", + Short: "Dumps the state of the entire database", + RunE: func(cmd *cobra.Command, _ []string) error { + return db.PrintDump(cmd.Context()) + }, } return cmd } diff --git a/cli/serverdump.go b/cli/serverdump.go deleted file mode 100644 index 0ba638d268..0000000000 --- a/cli/serverdump.go +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package cli - -import ( - "fmt" - "os" - "os/signal" - - "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/config" - ds "github.com/sourcenetwork/defradb/datastore" - badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" - "github.com/sourcenetwork/defradb/db" - "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/logging" -) - -func MakeServerDumpCmd(cfg *config.Config) *cobra.Command { - var datastore string - - cmd := &cobra.Command{ - Use: "server-dump", - Short: "Dumps the state of the entire database", - RunE: func(cmd *cobra.Command, _ []string) error { - log.FeedbackInfo(cmd.Context(), "Starting DefraDB process...") - - // setup signal handlers - signalCh := make(chan os.Signal, 1) - signal.Notify(signalCh, os.Interrupt) - - var rootstore ds.RootStore - var err error - if datastore == badgerDatastoreName { - info, err := os.Stat(cfg.Datastore.Badger.Path) - exists := (err == nil && info.IsDir()) - if !exists { - return errors.New(fmt.Sprintf( - "badger store does not exist at %s. Try with an existing directory", - cfg.Datastore.Badger.Path, - )) - } - log.FeedbackInfo(cmd.Context(), "Opening badger store", logging.NewKV("Path", cfg.Datastore.Badger.Path)) - rootstore, err = badgerds.NewDatastore(cfg.Datastore.Badger.Path, cfg.Datastore.Badger.Options) - if err != nil { - return errors.Wrap("could not open badger datastore", err) - } - } else { - return errors.New("server-side dump is only supported for the Badger datastore") - } - - db, err := db.NewDB(cmd.Context(), rootstore) - if err != nil { - return errors.Wrap("failed to initialize database", err) - } - - log.FeedbackInfo(cmd.Context(), "Dumping DB state...") - return db.PrintDump(cmd.Context()) - }, - } - cmd.Flags().StringVar( - &datastore, "store", cfg.Datastore.Store, - "Datastore to use. Options are badger, memory", - ) - return cmd -} diff --git a/cli/start.go b/cli/start.go index 9185af8c92..7c433b8f9f 100644 --- a/cli/start.go +++ b/cli/start.go @@ -28,19 +28,21 @@ import ( "google.golang.org/grpc" "google.golang.org/grpc/keepalive" - httpapi "github.com/sourcenetwork/defradb/api/http" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" ds "github.com/sourcenetwork/defradb/datastore" badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/errors" + httpapi "github.com/sourcenetwork/defradb/http" "github.com/sourcenetwork/defradb/logging" "github.com/sourcenetwork/defradb/net" netpb "github.com/sourcenetwork/defradb/net/pb" netutils "github.com/sourcenetwork/defradb/net/utils" ) +const badgerDatastoreName = "badger" + func MakeStartCommand(cfg *config.Config) *cobra.Command { var cmd = &cobra.Command{ Use: "start", @@ -351,16 +353,7 @@ func start(ctx context.Context, cfg *config.Config) (*defraInstance, error) { // run the server in a separate goroutine go func() { - log.FeedbackInfo( - ctx, - fmt.Sprintf( - "Providing HTTP API at %s%s. Use the GraphQL request endpoint at %s%s/graphql ", - cfg.API.AddressToURL(), - httpapi.RootPath, - cfg.API.AddressToURL(), - httpapi.RootPath, - ), - ) + log.FeedbackInfo(ctx, fmt.Sprintf("Providing HTTP API at %s.", cfg.API.AddressToURL())) if err := s.Run(ctx); err != nil && !errors.Is(err, http.ErrServerClosed) { log.FeedbackErrorE(ctx, "Failed to run the HTTP server", err) if n != nil { diff --git a/cli/version_test.go b/cli/version_test.go deleted file mode 100644 index 4f62f3659b..0000000000 --- a/cli/version_test.go +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package cli - -import ( - "bytes" - "testing" - - "github.com/stretchr/testify/assert" -) - -// The version information comes from the build process which is not [easily] accessible from unit tests. -// Therefore we test that the command outputs the expected formats *without the version info*. - -// case: no args, meaning `--format text` -func TestVersionNoArg(t *testing.T) { - cmd := MakeVersionCommand() - buf := new(bytes.Buffer) - cmd.SetOut(buf) - err := cmd.Execute() - assert.NoError(t, err) - t.Log(buf.String()) - assert.Contains(t, buf.String(), "defradb") - assert.Contains(t, buf.String(), "built with Go") -} - -// case: `--full`, meaning `--format text --full` -func TestVersionFull(t *testing.T) { - cmd := MakeVersionCommand() - buf := new(bytes.Buffer) - cmd.SetOut(buf) - cmd.SetArgs([]string{"--full"}) - err := cmd.Execute() - assert.NoError(t, err) - t.Log(buf.String()) - assert.Contains(t, buf.String(), "* HTTP API") - assert.Contains(t, buf.String(), "* DocKey versions") - assert.Contains(t, buf.String(), "* P2P multicodec") -} - -// case: `--format json` -func TestVersionJSON(t *testing.T) { - cmd := MakeVersionCommand() - buf := new(bytes.Buffer) - cmd.SetOut(buf) - cmd.SetArgs([]string{"--format", "json"}) - err := cmd.Execute() - assert.NoError(t, err) - t.Log(buf.String()) - assert.JSONEq(t, buf.String(), ` - { - "release": "", - "commit": "", - "commitdate": "", - "go": "", - "httpapi": "v0", - "dockeyversions": "1", - "netprotocol": "/defra/0.0.1" - }`) -} - -// case: `--format json --full` (is equivalent to previous one) -func TestVersionJSONFull(t *testing.T) { - cmd := MakeVersionCommand() - buf := new(bytes.Buffer) - cmd.SetOut(buf) - cmd.SetArgs([]string{"--format", "json", "--full"}) - err := cmd.Execute() - assert.NoError(t, err) - t.Log(buf.String()) - assert.JSONEq(t, buf.String(), ` - { - "release": "", - "commit": "", - "commitdate": "", - "go": "", - "httpapi": "v0", - "dockeyversions": "1", - "netprotocol": "/defra/0.0.1" - }`) -} diff --git a/cmd/defradb/main.go b/cmd/defradb/main.go index 761666bea7..4270324de2 100644 --- a/cmd/defradb/main.go +++ b/cmd/defradb/main.go @@ -12,19 +12,15 @@ package main import ( - "context" - "os" - "github.com/sourcenetwork/defradb/cli" "github.com/sourcenetwork/defradb/config" ) // Execute adds all child commands to the root command and sets flags appropriately. func main() { - cfg := config.DefaultConfig() - ctx := context.Background() - defraCmd := cli.NewDefraCommand(cfg) - if err := defraCmd.Execute(ctx); err != nil { - os.Exit(1) + defraCmd, err := cli.NewDefraCommand(config.DefaultConfig()) + if err != nil { + panic(err) } + defraCmd.Execute() //nolint:errcheck } diff --git a/cmd/genclidocs/genclidocs.go b/cmd/genclidocs/genclidocs.go index bccc96b38c..8ab569d92a 100644 --- a/cmd/genclidocs/genclidocs.go +++ b/cmd/genclidocs/genclidocs.go @@ -14,30 +14,34 @@ genclidocs is a tool to generate the command line interface documentation. package main import ( - "context" "flag" + "log" "os" "github.com/spf13/cobra/doc" "github.com/sourcenetwork/defradb/cli" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/logging" ) -var log = logging.MustNewLogger("genclidocs") +var path string + +func init() { + flag.StringVar(&path, "o", "docs/cmd", "path to write the cmd docs to") +} func main() { - path := flag.String("o", "docs/cmd", "path to write the cmd docs to") flag.Parse() - err := os.MkdirAll(*path, os.ModePerm) - if err != nil { - log.FatalE(context.Background(), "Creating the filesystem path failed", err) + + if err := os.MkdirAll(path, os.ModePerm); err != nil { + log.Fatal("Creating the filesystem path failed", err) } - defraCmd := cli.NewDefraCommand(config.DefaultConfig()) - defraCmd.RootCmd.DisableAutoGenTag = true - err = doc.GenMarkdownTree(defraCmd.RootCmd, *path) + defraCmd, err := cli.NewDefraCommand(config.DefaultConfig()) if err != nil { - log.FatalE(context.Background(), "Generating cmd docs failed", err) + log.Fatal("Creating the filesystem path failed", err) + } + defraCmd.DisableAutoGenTag = true + if err = doc.GenMarkdownTree(defraCmd, path); err != nil { + log.Fatal("Generating cmd docs failed", err) } } diff --git a/cmd/genmanpages/main.go b/cmd/genmanpages/main.go index 7ec7a3ce59..99ab9c62b1 100644 --- a/cmd/genmanpages/main.go +++ b/cmd/genmanpages/main.go @@ -15,40 +15,40 @@ installation is packaging and system dependent. package main import ( - "context" "flag" + "log" "os" "github.com/spf13/cobra/doc" "github.com/sourcenetwork/defradb/cli" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/logging" ) const defaultPerm os.FileMode = 0o777 -var log = logging.MustNewLogger("genmanpages") +var dir string + +var header = &doc.GenManHeader{ + Title: "defradb - Peer-to-Peer Edge Database", + Section: "1", +} + +func init() { + flag.StringVar(&dir, "o", "build/man", "Directory in which to generate DefraDB man pages") +} func main() { - dirFlag := flag.String("o", "build/man", "Directory in which to generate DefraDB man pages") flag.Parse() - genRootManPages(*dirFlag) -} -func genRootManPages(dir string) { - ctx := context.Background() - header := &doc.GenManHeader{ - Title: "defradb - Peer-to-Peer Edge Database", - Section: "1", + if err := os.MkdirAll(dir, defaultPerm); err != nil { + log.Fatal("Failed to create directory", err) } - err := os.MkdirAll(dir, defaultPerm) + defraCmd, err := cli.NewDefraCommand(config.DefaultConfig()) if err != nil { - log.FatalE(ctx, "Failed to create directory", err, logging.NewKV("dir", dir)) + log.Fatal("Failed to create command", err) } - defraCmd := cli.NewDefraCommand(config.DefaultConfig()) - err = doc.GenManTree(defraCmd.RootCmd, header, dir) - if err != nil { - log.FatalE(ctx, "Failed generation of man pages", err) + if err = doc.GenManTree(defraCmd, header, dir); err != nil { + log.Fatal("Failed generation of man pages", err) } } diff --git a/http/handler.go b/http/handler.go index e01b40f12e..35dc108bd3 100644 --- a/http/handler.go +++ b/http/handler.go @@ -20,6 +20,9 @@ import ( "github.com/go-chi/chi/v5/middleware" ) +// Version is the identifier for the current API version. +var Version string = "v0" + // playgroundHandler is set when building with the playground build tag var playgroundHandler = http.HandlerFunc(http.NotFound) @@ -43,7 +46,7 @@ func newHandler(db client.DB, opts serverOptions) *handler { router.Use(CorsMiddleware(opts)) router.Use(ApiMiddleware(db, txs, opts)) - router.Route("/api/v0", func(api chi.Router) { + router.Route("/api/"+Version, func(api chi.Router) { api.Use(TransactionMiddleware, StoreMiddleware) api.Route("/tx", func(tx chi.Router) { tx.Post("/", tx_handler.NewTxn) diff --git a/net/api/client/client.go b/net/api/client/client.go deleted file mode 100644 index 2ea92bd14c..0000000000 --- a/net/api/client/client.go +++ /dev/null @@ -1,169 +0,0 @@ -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package client - -import ( - "context" - "fmt" - - "github.com/libp2p/go-libp2p/core/peer" - ma "github.com/multiformats/go-multiaddr" - codec "github.com/planetscale/vtprotobuf/codec/grpc" - "google.golang.org/grpc" - "google.golang.org/grpc/encoding" - _ "google.golang.org/grpc/encoding/proto" - - "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/errors" - pb "github.com/sourcenetwork/defradb/net/pb" -) - -func init() { - encoding.RegisterCodec(codec.Codec{}) -} - -type Client struct { - c pb.CollectionClient - conn *grpc.ClientConn -} - -// NewClient returns a new defra gRPC client connected to the target address. -func NewClient(target string, opts ...grpc.DialOption) (*Client, error) { - conn, err := grpc.Dial(target, opts...) - if err != nil { - return nil, err - } - - return &Client{ - c: pb.NewCollectionClient(conn), - conn: conn, - }, nil -} - -func (c *Client) Close() error { - return c.conn.Close() -} - -// SetReplicator sends a request to add a target replicator to the DB peer. -func (c *Client) SetReplicator( - ctx context.Context, - paddr ma.Multiaddr, - collections ...string, -) (peer.ID, error) { - if paddr == nil { - return "", errors.New("target address can't be empty") - } - resp, err := c.c.SetReplicator(ctx, &pb.SetReplicatorRequest{ - Collections: collections, - Addr: paddr.Bytes(), - }) - if err != nil { - return "", errors.Wrap("could not add replicator", err) - } - return peer.IDFromBytes(resp.PeerID) -} - -// DeleteReplicator sends a request to add a target replicator to the DB peer. -func (c *Client) DeleteReplicator( - ctx context.Context, - pid peer.ID, - collections ...string, -) error { - _, err := c.c.DeleteReplicator(ctx, &pb.DeleteReplicatorRequest{ - PeerID: []byte(pid), - }) - return err -} - -// GetAllReplicators sends a request to add a target replicator to the DB peer. -func (c *Client) GetAllReplicators( - ctx context.Context, -) ([]client.Replicator, error) { - resp, err := c.c.GetAllReplicators(ctx, &pb.GetAllReplicatorRequest{}) - if err != nil { - return nil, errors.Wrap("could not get replicators", err) - } - reps := []client.Replicator{} - for _, rep := range resp.Replicators { - addr, err := ma.NewMultiaddrBytes(rep.Info.Addrs) - if err != nil { - return nil, errors.WithStack(err) - } - - pid, err := peer.IDFromBytes(rep.Info.Id) - if err != nil { - return nil, errors.WithStack(err) - } - - reps = append(reps, client.Replicator{ - Info: peer.AddrInfo{ - ID: pid, - Addrs: []ma.Multiaddr{addr}, - }, - Schemas: rep.Schemas, - }) - } - return reps, nil -} - -// AddP2PCollections sends a request to add P2P collecctions to the stored list. -func (c *Client) AddP2PCollections( - ctx context.Context, - collections ...string, -) error { - resp, err := c.c.AddP2PCollections(ctx, &pb.AddP2PCollectionsRequest{ - Collections: collections, - }) - if err != nil { - return errors.Wrap("could not add P2P collection topics", err) - } - if resp.Err != "" { - return errors.New(fmt.Sprintf("could not add P2P collection topics: %s", resp)) - } - return nil -} - -// RemoveP2PCollections sends a request to remove P2P collecctions from the stored list. -func (c *Client) RemoveP2PCollections( - ctx context.Context, - collections ...string, -) error { - resp, err := c.c.RemoveP2PCollections(ctx, &pb.RemoveP2PCollectionsRequest{ - Collections: collections, - }) - if err != nil { - return errors.Wrap("could not remove P2P collection topics", err) - } - if resp.Err != "" { - return errors.New(fmt.Sprintf("could not remove P2P collection topics: %s", resp)) - } - return nil -} - -// RemoveP2PCollections sends a request to get all P2P collecctions from the stored list. -func (c *Client) GetAllP2PCollections( - ctx context.Context, -) ([]client.P2PCollection, error) { - resp, err := c.c.GetAllP2PCollections(ctx, &pb.GetAllP2PCollectionsRequest{}) - if err != nil { - return nil, errors.Wrap("could not get all P2P collection topics", err) - } - var collections []client.P2PCollection - for _, col := range resp.Collections { - collections = append(collections, client.P2PCollection{ - ID: col.Id, - Name: col.Name, - }) - } - return collections, nil -} diff --git a/net/api/pb/Makefile b/net/api/pb/Makefile deleted file mode 100644 index 62eef77354..0000000000 --- a/net/api/pb/Makefile +++ /dev/null @@ -1,18 +0,0 @@ -PB = $(wildcard *.proto) -GO = $(PB:.proto=.pb.go) - -all: $(GO) - -%.pb.go: %.proto - protoc \ - --go_out=. --plugin protoc-gen-go="${GOBIN}/protoc-gen-go" \ - --go-grpc_out=. --plugin protoc-gen-go-grpc="${GOBIN}/protoc-gen-go-grpc" \ - --go-vtproto_out=. --plugin protoc-gen-go-vtproto="${GOBIN}/protoc-gen-go-vtproto" \ - --go-vtproto_opt=features=marshal+unmarshal+size \ - $< - -clean: - rm -f *.pb.go - rm -f *pb_test.go - -.PHONY: clean \ No newline at end of file diff --git a/net/api/pb/api.pb.go b/net/api/pb/api.pb.go deleted file mode 100644 index ad48069b8f..0000000000 --- a/net/api/pb/api.pb.go +++ /dev/null @@ -1,1100 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.30.0 -// protoc v3.21.9 -// source: api.proto - -package api_pb - -import ( - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -type SetReplicatorRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Collections []string `protobuf:"bytes,1,rep,name=collections,proto3" json:"collections,omitempty"` - Addr []byte `protobuf:"bytes,2,opt,name=addr,proto3" json:"addr,omitempty"` -} - -func (x *SetReplicatorRequest) Reset() { - *x = SetReplicatorRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_api_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *SetReplicatorRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*SetReplicatorRequest) ProtoMessage() {} - -func (x *SetReplicatorRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use SetReplicatorRequest.ProtoReflect.Descriptor instead. -func (*SetReplicatorRequest) Descriptor() ([]byte, []int) { - return file_api_proto_rawDescGZIP(), []int{0} -} - -func (x *SetReplicatorRequest) GetCollections() []string { - if x != nil { - return x.Collections - } - return nil -} - -func (x *SetReplicatorRequest) GetAddr() []byte { - if x != nil { - return x.Addr - } - return nil -} - -type SetReplicatorReply struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - PeerID []byte `protobuf:"bytes,1,opt,name=peerID,proto3" json:"peerID,omitempty"` -} - -func (x *SetReplicatorReply) Reset() { - *x = SetReplicatorReply{} - if protoimpl.UnsafeEnabled { - mi := &file_api_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *SetReplicatorReply) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*SetReplicatorReply) ProtoMessage() {} - -func (x *SetReplicatorReply) ProtoReflect() protoreflect.Message { - mi := &file_api_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use SetReplicatorReply.ProtoReflect.Descriptor instead. -func (*SetReplicatorReply) Descriptor() ([]byte, []int) { - return file_api_proto_rawDescGZIP(), []int{1} -} - -func (x *SetReplicatorReply) GetPeerID() []byte { - if x != nil { - return x.PeerID - } - return nil -} - -type DeleteReplicatorRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - PeerID []byte `protobuf:"bytes,1,opt,name=peerID,proto3" json:"peerID,omitempty"` -} - -func (x *DeleteReplicatorRequest) Reset() { - *x = DeleteReplicatorRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_api_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *DeleteReplicatorRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*DeleteReplicatorRequest) ProtoMessage() {} - -func (x *DeleteReplicatorRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use DeleteReplicatorRequest.ProtoReflect.Descriptor instead. -func (*DeleteReplicatorRequest) Descriptor() ([]byte, []int) { - return file_api_proto_rawDescGZIP(), []int{2} -} - -func (x *DeleteReplicatorRequest) GetPeerID() []byte { - if x != nil { - return x.PeerID - } - return nil -} - -type DeleteReplicatorReply struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - PeerID []byte `protobuf:"bytes,1,opt,name=peerID,proto3" json:"peerID,omitempty"` -} - -func (x *DeleteReplicatorReply) Reset() { - *x = DeleteReplicatorReply{} - if protoimpl.UnsafeEnabled { - mi := &file_api_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *DeleteReplicatorReply) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*DeleteReplicatorReply) ProtoMessage() {} - -func (x *DeleteReplicatorReply) ProtoReflect() protoreflect.Message { - mi := &file_api_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use DeleteReplicatorReply.ProtoReflect.Descriptor instead. -func (*DeleteReplicatorReply) Descriptor() ([]byte, []int) { - return file_api_proto_rawDescGZIP(), []int{3} -} - -func (x *DeleteReplicatorReply) GetPeerID() []byte { - if x != nil { - return x.PeerID - } - return nil -} - -type GetAllReplicatorRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *GetAllReplicatorRequest) Reset() { - *x = GetAllReplicatorRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_api_proto_msgTypes[4] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetAllReplicatorRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetAllReplicatorRequest) ProtoMessage() {} - -func (x *GetAllReplicatorRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_proto_msgTypes[4] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetAllReplicatorRequest.ProtoReflect.Descriptor instead. -func (*GetAllReplicatorRequest) Descriptor() ([]byte, []int) { - return file_api_proto_rawDescGZIP(), []int{4} -} - -type GetAllReplicatorReply struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Replicators []*GetAllReplicatorReply_Replicators `protobuf:"bytes,1,rep,name=replicators,proto3" json:"replicators,omitempty"` -} - -func (x *GetAllReplicatorReply) Reset() { - *x = GetAllReplicatorReply{} - if protoimpl.UnsafeEnabled { - mi := &file_api_proto_msgTypes[5] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetAllReplicatorReply) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetAllReplicatorReply) ProtoMessage() {} - -func (x *GetAllReplicatorReply) ProtoReflect() protoreflect.Message { - mi := &file_api_proto_msgTypes[5] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetAllReplicatorReply.ProtoReflect.Descriptor instead. -func (*GetAllReplicatorReply) Descriptor() ([]byte, []int) { - return file_api_proto_rawDescGZIP(), []int{5} -} - -func (x *GetAllReplicatorReply) GetReplicators() []*GetAllReplicatorReply_Replicators { - if x != nil { - return x.Replicators - } - return nil -} - -type AddP2PCollectionsRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Collections []string `protobuf:"bytes,1,rep,name=collections,proto3" json:"collections,omitempty"` -} - -func (x *AddP2PCollectionsRequest) Reset() { - *x = AddP2PCollectionsRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_api_proto_msgTypes[6] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *AddP2PCollectionsRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*AddP2PCollectionsRequest) ProtoMessage() {} - -func (x *AddP2PCollectionsRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_proto_msgTypes[6] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use AddP2PCollectionsRequest.ProtoReflect.Descriptor instead. -func (*AddP2PCollectionsRequest) Descriptor() ([]byte, []int) { - return file_api_proto_rawDescGZIP(), []int{6} -} - -func (x *AddP2PCollectionsRequest) GetCollections() []string { - if x != nil { - return x.Collections - } - return nil -} - -type AddP2PCollectionsReply struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Err string `protobuf:"bytes,1,opt,name=err,proto3" json:"err,omitempty"` -} - -func (x *AddP2PCollectionsReply) Reset() { - *x = AddP2PCollectionsReply{} - if protoimpl.UnsafeEnabled { - mi := &file_api_proto_msgTypes[7] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *AddP2PCollectionsReply) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*AddP2PCollectionsReply) ProtoMessage() {} - -func (x *AddP2PCollectionsReply) ProtoReflect() protoreflect.Message { - mi := &file_api_proto_msgTypes[7] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use AddP2PCollectionsReply.ProtoReflect.Descriptor instead. -func (*AddP2PCollectionsReply) Descriptor() ([]byte, []int) { - return file_api_proto_rawDescGZIP(), []int{7} -} - -func (x *AddP2PCollectionsReply) GetErr() string { - if x != nil { - return x.Err - } - return "" -} - -type RemoveP2PCollectionsRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Collections []string `protobuf:"bytes,1,rep,name=collections,proto3" json:"collections,omitempty"` -} - -func (x *RemoveP2PCollectionsRequest) Reset() { - *x = RemoveP2PCollectionsRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_api_proto_msgTypes[8] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *RemoveP2PCollectionsRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*RemoveP2PCollectionsRequest) ProtoMessage() {} - -func (x *RemoveP2PCollectionsRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_proto_msgTypes[8] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use RemoveP2PCollectionsRequest.ProtoReflect.Descriptor instead. -func (*RemoveP2PCollectionsRequest) Descriptor() ([]byte, []int) { - return file_api_proto_rawDescGZIP(), []int{8} -} - -func (x *RemoveP2PCollectionsRequest) GetCollections() []string { - if x != nil { - return x.Collections - } - return nil -} - -type RemoveP2PCollectionsReply struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Err string `protobuf:"bytes,1,opt,name=err,proto3" json:"err,omitempty"` -} - -func (x *RemoveP2PCollectionsReply) Reset() { - *x = RemoveP2PCollectionsReply{} - if protoimpl.UnsafeEnabled { - mi := &file_api_proto_msgTypes[9] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *RemoveP2PCollectionsReply) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*RemoveP2PCollectionsReply) ProtoMessage() {} - -func (x *RemoveP2PCollectionsReply) ProtoReflect() protoreflect.Message { - mi := &file_api_proto_msgTypes[9] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use RemoveP2PCollectionsReply.ProtoReflect.Descriptor instead. -func (*RemoveP2PCollectionsReply) Descriptor() ([]byte, []int) { - return file_api_proto_rawDescGZIP(), []int{9} -} - -func (x *RemoveP2PCollectionsReply) GetErr() string { - if x != nil { - return x.Err - } - return "" -} - -type GetAllP2PCollectionsRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields -} - -func (x *GetAllP2PCollectionsRequest) Reset() { - *x = GetAllP2PCollectionsRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_api_proto_msgTypes[10] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetAllP2PCollectionsRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetAllP2PCollectionsRequest) ProtoMessage() {} - -func (x *GetAllP2PCollectionsRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_proto_msgTypes[10] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetAllP2PCollectionsRequest.ProtoReflect.Descriptor instead. -func (*GetAllP2PCollectionsRequest) Descriptor() ([]byte, []int) { - return file_api_proto_rawDescGZIP(), []int{10} -} - -type GetAllP2PCollectionsReply struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Collections []*GetAllP2PCollectionsReply_Collection `protobuf:"bytes,1,rep,name=collections,proto3" json:"collections,omitempty"` -} - -func (x *GetAllP2PCollectionsReply) Reset() { - *x = GetAllP2PCollectionsReply{} - if protoimpl.UnsafeEnabled { - mi := &file_api_proto_msgTypes[11] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetAllP2PCollectionsReply) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetAllP2PCollectionsReply) ProtoMessage() {} - -func (x *GetAllP2PCollectionsReply) ProtoReflect() protoreflect.Message { - mi := &file_api_proto_msgTypes[11] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetAllP2PCollectionsReply.ProtoReflect.Descriptor instead. -func (*GetAllP2PCollectionsReply) Descriptor() ([]byte, []int) { - return file_api_proto_rawDescGZIP(), []int{11} -} - -func (x *GetAllP2PCollectionsReply) GetCollections() []*GetAllP2PCollectionsReply_Collection { - if x != nil { - return x.Collections - } - return nil -} - -type GetAllReplicatorReply_Replicators struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Info *GetAllReplicatorReply_Replicators_Info `protobuf:"bytes,1,opt,name=info,proto3" json:"info,omitempty"` - Schemas []string `protobuf:"bytes,2,rep,name=schemas,proto3" json:"schemas,omitempty"` -} - -func (x *GetAllReplicatorReply_Replicators) Reset() { - *x = GetAllReplicatorReply_Replicators{} - if protoimpl.UnsafeEnabled { - mi := &file_api_proto_msgTypes[12] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetAllReplicatorReply_Replicators) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetAllReplicatorReply_Replicators) ProtoMessage() {} - -func (x *GetAllReplicatorReply_Replicators) ProtoReflect() protoreflect.Message { - mi := &file_api_proto_msgTypes[12] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetAllReplicatorReply_Replicators.ProtoReflect.Descriptor instead. -func (*GetAllReplicatorReply_Replicators) Descriptor() ([]byte, []int) { - return file_api_proto_rawDescGZIP(), []int{5, 0} -} - -func (x *GetAllReplicatorReply_Replicators) GetInfo() *GetAllReplicatorReply_Replicators_Info { - if x != nil { - return x.Info - } - return nil -} - -func (x *GetAllReplicatorReply_Replicators) GetSchemas() []string { - if x != nil { - return x.Schemas - } - return nil -} - -type GetAllReplicatorReply_Replicators_Info struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Id []byte `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - Addrs []byte `protobuf:"bytes,2,opt,name=addrs,proto3" json:"addrs,omitempty"` -} - -func (x *GetAllReplicatorReply_Replicators_Info) Reset() { - *x = GetAllReplicatorReply_Replicators_Info{} - if protoimpl.UnsafeEnabled { - mi := &file_api_proto_msgTypes[13] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetAllReplicatorReply_Replicators_Info) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetAllReplicatorReply_Replicators_Info) ProtoMessage() {} - -func (x *GetAllReplicatorReply_Replicators_Info) ProtoReflect() protoreflect.Message { - mi := &file_api_proto_msgTypes[13] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetAllReplicatorReply_Replicators_Info.ProtoReflect.Descriptor instead. -func (*GetAllReplicatorReply_Replicators_Info) Descriptor() ([]byte, []int) { - return file_api_proto_rawDescGZIP(), []int{5, 0, 0} -} - -func (x *GetAllReplicatorReply_Replicators_Info) GetId() []byte { - if x != nil { - return x.Id - } - return nil -} - -func (x *GetAllReplicatorReply_Replicators_Info) GetAddrs() []byte { - if x != nil { - return x.Addrs - } - return nil -} - -type GetAllP2PCollectionsReply_Collection struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` - Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` -} - -func (x *GetAllP2PCollectionsReply_Collection) Reset() { - *x = GetAllP2PCollectionsReply_Collection{} - if protoimpl.UnsafeEnabled { - mi := &file_api_proto_msgTypes[14] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetAllP2PCollectionsReply_Collection) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetAllP2PCollectionsReply_Collection) ProtoMessage() {} - -func (x *GetAllP2PCollectionsReply_Collection) ProtoReflect() protoreflect.Message { - mi := &file_api_proto_msgTypes[14] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetAllP2PCollectionsReply_Collection.ProtoReflect.Descriptor instead. -func (*GetAllP2PCollectionsReply_Collection) Descriptor() ([]byte, []int) { - return file_api_proto_rawDescGZIP(), []int{11, 0} -} - -func (x *GetAllP2PCollectionsReply_Collection) GetId() string { - if x != nil { - return x.Id - } - return "" -} - -func (x *GetAllP2PCollectionsReply_Collection) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -var File_api_proto protoreflect.FileDescriptor - -var file_api_proto_rawDesc = []byte{ - 0x0a, 0x09, 0x61, 0x70, 0x69, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x06, 0x61, 0x70, 0x69, - 0x2e, 0x70, 0x62, 0x22, 0x4c, 0x0a, 0x14, 0x53, 0x65, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x69, 0x63, - 0x61, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x20, 0x0a, 0x0b, 0x63, - 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, - 0x52, 0x0b, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x12, 0x0a, - 0x04, 0x61, 0x64, 0x64, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x61, 0x64, 0x64, - 0x72, 0x22, 0x2c, 0x0a, 0x12, 0x53, 0x65, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, - 0x6f, 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x16, 0x0a, 0x06, 0x70, 0x65, 0x65, 0x72, 0x49, - 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x70, 0x65, 0x65, 0x72, 0x49, 0x44, 0x22, - 0x31, 0x0a, 0x17, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, - 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x70, 0x65, - 0x65, 0x72, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x70, 0x65, 0x65, 0x72, - 0x49, 0x44, 0x22, 0x2f, 0x0a, 0x15, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6c, - 0x69, 0x63, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x16, 0x0a, 0x06, 0x70, - 0x65, 0x65, 0x72, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x70, 0x65, 0x65, - 0x72, 0x49, 0x44, 0x22, 0x19, 0x0a, 0x17, 0x47, 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x52, 0x65, 0x70, - 0x6c, 0x69, 0x63, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x80, - 0x02, 0x0a, 0x15, 0x47, 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x52, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, - 0x74, 0x6f, 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x4b, 0x0a, 0x0b, 0x72, 0x65, 0x70, 0x6c, - 0x69, 0x63, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, - 0x61, 0x70, 0x69, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x52, 0x65, 0x70, - 0x6c, 0x69, 0x63, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x70, - 0x6c, 0x69, 0x63, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x0b, 0x72, 0x65, 0x70, 0x6c, 0x69, 0x63, - 0x61, 0x74, 0x6f, 0x72, 0x73, 0x1a, 0x99, 0x01, 0x0a, 0x0b, 0x52, 0x65, 0x70, 0x6c, 0x69, 0x63, - 0x61, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x42, 0x0a, 0x04, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, - 0x41, 0x6c, 0x6c, 0x52, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x70, - 0x6c, 0x79, 0x2e, 0x52, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x2e, 0x49, - 0x6e, 0x66, 0x6f, 0x52, 0x04, 0x69, 0x6e, 0x66, 0x6f, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x07, 0x73, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x73, 0x1a, 0x2c, 0x0a, 0x04, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x0e, 0x0a, 0x02, 0x69, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x02, 0x69, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x61, - 0x64, 0x64, 0x72, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x61, 0x64, 0x64, 0x72, - 0x73, 0x22, 0x3c, 0x0a, 0x18, 0x41, 0x64, 0x64, 0x50, 0x32, 0x50, 0x43, 0x6f, 0x6c, 0x6c, 0x65, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x20, 0x0a, - 0x0b, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, - 0x28, 0x09, 0x52, 0x0b, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, - 0x2a, 0x0a, 0x16, 0x41, 0x64, 0x64, 0x50, 0x32, 0x50, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x65, 0x72, 0x72, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x65, 0x72, 0x72, 0x22, 0x3f, 0x0a, 0x1b, 0x52, - 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x50, 0x32, 0x50, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x20, 0x0a, 0x0b, 0x63, 0x6f, - 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, - 0x0b, 0x63, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x2d, 0x0a, 0x19, - 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x50, 0x32, 0x50, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x65, 0x72, 0x72, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x65, 0x72, 0x72, 0x22, 0x1d, 0x0a, 0x1b, 0x47, - 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x50, 0x32, 0x50, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x9d, 0x01, 0x0a, 0x19, 0x47, - 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x50, 0x32, 0x50, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x12, 0x4e, 0x0a, 0x0b, 0x63, 0x6f, 0x6c, 0x6c, - 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2c, 0x2e, - 0x61, 0x70, 0x69, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x50, 0x32, 0x50, - 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, - 0x2e, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0b, 0x63, 0x6f, 0x6c, - 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x30, 0x0a, 0x0a, 0x43, 0x6f, 0x6c, 0x6c, - 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x32, 0xa0, 0x04, 0x0a, 0x07, 0x53, - 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x4b, 0x0a, 0x0d, 0x53, 0x65, 0x74, 0x52, 0x65, 0x70, - 0x6c, 0x69, 0x63, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x1c, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x70, 0x62, - 0x2e, 0x53, 0x65, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x70, 0x62, 0x2e, 0x53, - 0x65, 0x74, 0x52, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x70, 0x6c, - 0x79, 0x22, 0x00, 0x12, 0x54, 0x0a, 0x10, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x70, - 0x6c, 0x69, 0x63, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x70, 0x62, - 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x6f, - 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x70, - 0x62, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x52, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, - 0x6f, 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x55, 0x0a, 0x11, 0x47, 0x65, 0x74, - 0x41, 0x6c, 0x6c, 0x52, 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x1f, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x52, 0x65, - 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x52, - 0x65, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, - 0x12, 0x57, 0x0a, 0x11, 0x41, 0x64, 0x64, 0x50, 0x32, 0x50, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x70, 0x62, 0x2e, 0x41, - 0x64, 0x64, 0x50, 0x32, 0x50, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x70, 0x62, - 0x2e, 0x41, 0x64, 0x64, 0x50, 0x32, 0x50, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x60, 0x0a, 0x14, 0x52, 0x65, 0x6d, - 0x6f, 0x76, 0x65, 0x50, 0x32, 0x50, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x12, 0x23, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x70, 0x62, 0x2e, 0x52, 0x65, 0x6d, 0x6f, 0x76, - 0x65, 0x50, 0x32, 0x50, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x70, 0x62, 0x2e, - 0x52, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x50, 0x32, 0x50, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, - 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x60, 0x0a, 0x14, 0x47, - 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x50, 0x32, 0x50, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x12, 0x23, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, - 0x41, 0x6c, 0x6c, 0x50, 0x32, 0x50, 0x43, 0x6f, 0x6c, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x70, - 0x62, 0x2e, 0x47, 0x65, 0x74, 0x41, 0x6c, 0x6c, 0x50, 0x32, 0x50, 0x43, 0x6f, 0x6c, 0x6c, 0x65, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x42, 0x0a, 0x5a, - 0x08, 0x2f, 0x3b, 0x61, 0x70, 0x69, 0x5f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x33, -} - -var ( - file_api_proto_rawDescOnce sync.Once - file_api_proto_rawDescData = file_api_proto_rawDesc -) - -func file_api_proto_rawDescGZIP() []byte { - file_api_proto_rawDescOnce.Do(func() { - file_api_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_proto_rawDescData) - }) - return file_api_proto_rawDescData -} - -var file_api_proto_msgTypes = make([]protoimpl.MessageInfo, 15) -var file_api_proto_goTypes = []interface{}{ - (*SetReplicatorRequest)(nil), // 0: api.pb.SetReplicatorRequest - (*SetReplicatorReply)(nil), // 1: api.pb.SetReplicatorReply - (*DeleteReplicatorRequest)(nil), // 2: api.pb.DeleteReplicatorRequest - (*DeleteReplicatorReply)(nil), // 3: api.pb.DeleteReplicatorReply - (*GetAllReplicatorRequest)(nil), // 4: api.pb.GetAllReplicatorRequest - (*GetAllReplicatorReply)(nil), // 5: api.pb.GetAllReplicatorReply - (*AddP2PCollectionsRequest)(nil), // 6: api.pb.AddP2PCollectionsRequest - (*AddP2PCollectionsReply)(nil), // 7: api.pb.AddP2PCollectionsReply - (*RemoveP2PCollectionsRequest)(nil), // 8: api.pb.RemoveP2PCollectionsRequest - (*RemoveP2PCollectionsReply)(nil), // 9: api.pb.RemoveP2PCollectionsReply - (*GetAllP2PCollectionsRequest)(nil), // 10: api.pb.GetAllP2PCollectionsRequest - (*GetAllP2PCollectionsReply)(nil), // 11: api.pb.GetAllP2PCollectionsReply - (*GetAllReplicatorReply_Replicators)(nil), // 12: api.pb.GetAllReplicatorReply.Replicators - (*GetAllReplicatorReply_Replicators_Info)(nil), // 13: api.pb.GetAllReplicatorReply.Replicators.Info - (*GetAllP2PCollectionsReply_Collection)(nil), // 14: api.pb.GetAllP2PCollectionsReply.Collection -} -var file_api_proto_depIdxs = []int32{ - 12, // 0: api.pb.GetAllReplicatorReply.replicators:type_name -> api.pb.GetAllReplicatorReply.Replicators - 14, // 1: api.pb.GetAllP2PCollectionsReply.collections:type_name -> api.pb.GetAllP2PCollectionsReply.Collection - 13, // 2: api.pb.GetAllReplicatorReply.Replicators.info:type_name -> api.pb.GetAllReplicatorReply.Replicators.Info - 0, // 3: api.pb.Service.SetReplicator:input_type -> api.pb.SetReplicatorRequest - 2, // 4: api.pb.Service.DeleteReplicator:input_type -> api.pb.DeleteReplicatorRequest - 4, // 5: api.pb.Service.GetAllReplicators:input_type -> api.pb.GetAllReplicatorRequest - 6, // 6: api.pb.Service.AddP2PCollections:input_type -> api.pb.AddP2PCollectionsRequest - 8, // 7: api.pb.Service.RemoveP2PCollections:input_type -> api.pb.RemoveP2PCollectionsRequest - 10, // 8: api.pb.Service.GetAllP2PCollections:input_type -> api.pb.GetAllP2PCollectionsRequest - 1, // 9: api.pb.Service.SetReplicator:output_type -> api.pb.SetReplicatorReply - 3, // 10: api.pb.Service.DeleteReplicator:output_type -> api.pb.DeleteReplicatorReply - 5, // 11: api.pb.Service.GetAllReplicators:output_type -> api.pb.GetAllReplicatorReply - 7, // 12: api.pb.Service.AddP2PCollections:output_type -> api.pb.AddP2PCollectionsReply - 9, // 13: api.pb.Service.RemoveP2PCollections:output_type -> api.pb.RemoveP2PCollectionsReply - 11, // 14: api.pb.Service.GetAllP2PCollections:output_type -> api.pb.GetAllP2PCollectionsReply - 9, // [9:15] is the sub-list for method output_type - 3, // [3:9] is the sub-list for method input_type - 3, // [3:3] is the sub-list for extension type_name - 3, // [3:3] is the sub-list for extension extendee - 0, // [0:3] is the sub-list for field type_name -} - -func init() { file_api_proto_init() } -func file_api_proto_init() { - if File_api_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_api_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SetReplicatorRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SetReplicatorReply); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeleteReplicatorRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeleteReplicatorReply); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetAllReplicatorRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetAllReplicatorReply); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*AddP2PCollectionsRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*AddP2PCollectionsReply); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RemoveP2PCollectionsRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*RemoveP2PCollectionsReply); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetAllP2PCollectionsRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetAllP2PCollectionsReply); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetAllReplicatorReply_Replicators); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetAllReplicatorReply_Replicators_Info); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_api_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetAllP2PCollectionsReply_Collection); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_api_proto_rawDesc, - NumEnums: 0, - NumMessages: 15, - NumExtensions: 0, - NumServices: 1, - }, - GoTypes: file_api_proto_goTypes, - DependencyIndexes: file_api_proto_depIdxs, - MessageInfos: file_api_proto_msgTypes, - }.Build() - File_api_proto = out.File - file_api_proto_rawDesc = nil - file_api_proto_goTypes = nil - file_api_proto_depIdxs = nil -} diff --git a/net/api/pb/api.proto b/net/api/pb/api.proto deleted file mode 100644 index 367997c7af..0000000000 --- a/net/api/pb/api.proto +++ /dev/null @@ -1,82 +0,0 @@ -syntax = "proto3"; -package api.pb; - -option go_package = "/;api_pb"; - -message SetReplicatorRequest { - repeated string collections = 1; - bytes addr = 2; -} - -message SetReplicatorReply { - bytes peerID = 1; -} - -message DeleteReplicatorRequest { - bytes peerID = 1; -} - -message DeleteReplicatorReply { - bytes peerID = 1; -} - -message GetAllReplicatorRequest {} - -message GetAllReplicatorReply { - message Replicators { - message Info { - bytes id = 1; - bytes addrs = 2; - } - Info info = 1; - repeated string schemas = 2; - } - - repeated Replicators replicators = 1; - -} - -message AddP2PCollectionsRequest { - repeated string collections = 1; -} - -message AddP2PCollectionsReply { - string err = 1; -} - -message RemoveP2PCollectionsRequest { - repeated string collections = 1; -} - -message RemoveP2PCollectionsReply { - string err = 1; -} - -message GetAllP2PCollectionsRequest {} - -message GetAllP2PCollectionsReply { - message Collection { - string id = 1; - string name = 2; - } - repeated Collection collections = 1; -} - - -// Service is the peer-to-peer network API for document sync -service Service { - // SetReplicator for this peer - rpc SetReplicator(SetReplicatorRequest) returns (SetReplicatorReply) {} - - // DeleteReplicator for this peer - rpc DeleteReplicator(DeleteReplicatorRequest) returns (DeleteReplicatorReply) {} - - // DeleteReplicator for this peer - rpc GetAllReplicators(GetAllReplicatorRequest) returns (GetAllReplicatorReply) {} - - rpc AddP2PCollections(AddP2PCollectionsRequest) returns (AddP2PCollectionsReply) {} - - rpc RemoveP2PCollections(RemoveP2PCollectionsRequest) returns (RemoveP2PCollectionsReply) {} - - rpc GetAllP2PCollections(GetAllP2PCollectionsRequest) returns (GetAllP2PCollectionsReply) {} -} \ No newline at end of file diff --git a/net/api/pb/api_grpc.pb.go b/net/api/pb/api_grpc.pb.go deleted file mode 100644 index 5d1bc204d3..0000000000 --- a/net/api/pb/api_grpc.pb.go +++ /dev/null @@ -1,300 +0,0 @@ -// Code generated by protoc-gen-go-grpc. DO NOT EDIT. -// versions: -// - protoc-gen-go-grpc v1.3.0 -// - protoc v3.21.9 -// source: api.proto - -package api_pb - -import ( - context "context" - grpc "google.golang.org/grpc" - codes "google.golang.org/grpc/codes" - status "google.golang.org/grpc/status" -) - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -// Requires gRPC-Go v1.32.0 or later. -const _ = grpc.SupportPackageIsVersion7 - -const ( - Service_SetReplicator_FullMethodName = "/api.pb.Service/SetReplicator" - Service_DeleteReplicator_FullMethodName = "/api.pb.Service/DeleteReplicator" - Service_GetAllReplicators_FullMethodName = "/api.pb.Service/GetAllReplicators" - Service_AddP2PCollections_FullMethodName = "/api.pb.Service/AddP2PCollections" - Service_RemoveP2PCollections_FullMethodName = "/api.pb.Service/RemoveP2PCollections" - Service_GetAllP2PCollections_FullMethodName = "/api.pb.Service/GetAllP2PCollections" -) - -// ServiceClient is the client API for Service service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. -type ServiceClient interface { - // SetReplicator for this peer - SetReplicator(ctx context.Context, in *SetReplicatorRequest, opts ...grpc.CallOption) (*SetReplicatorReply, error) - // DeleteReplicator for this peer - DeleteReplicator(ctx context.Context, in *DeleteReplicatorRequest, opts ...grpc.CallOption) (*DeleteReplicatorReply, error) - // DeleteReplicator for this peer - GetAllReplicators(ctx context.Context, in *GetAllReplicatorRequest, opts ...grpc.CallOption) (*GetAllReplicatorReply, error) - AddP2PCollections(ctx context.Context, in *AddP2PCollectionsRequest, opts ...grpc.CallOption) (*AddP2PCollectionsReply, error) - RemoveP2PCollections(ctx context.Context, in *RemoveP2PCollectionsRequest, opts ...grpc.CallOption) (*RemoveP2PCollectionsReply, error) - GetAllP2PCollections(ctx context.Context, in *GetAllP2PCollectionsRequest, opts ...grpc.CallOption) (*GetAllP2PCollectionsReply, error) -} - -type serviceClient struct { - cc grpc.ClientConnInterface -} - -func NewServiceClient(cc grpc.ClientConnInterface) ServiceClient { - return &serviceClient{cc} -} - -func (c *serviceClient) SetReplicator(ctx context.Context, in *SetReplicatorRequest, opts ...grpc.CallOption) (*SetReplicatorReply, error) { - out := new(SetReplicatorReply) - err := c.cc.Invoke(ctx, Service_SetReplicator_FullMethodName, in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *serviceClient) DeleteReplicator(ctx context.Context, in *DeleteReplicatorRequest, opts ...grpc.CallOption) (*DeleteReplicatorReply, error) { - out := new(DeleteReplicatorReply) - err := c.cc.Invoke(ctx, Service_DeleteReplicator_FullMethodName, in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *serviceClient) GetAllReplicators(ctx context.Context, in *GetAllReplicatorRequest, opts ...grpc.CallOption) (*GetAllReplicatorReply, error) { - out := new(GetAllReplicatorReply) - err := c.cc.Invoke(ctx, Service_GetAllReplicators_FullMethodName, in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *serviceClient) AddP2PCollections(ctx context.Context, in *AddP2PCollectionsRequest, opts ...grpc.CallOption) (*AddP2PCollectionsReply, error) { - out := new(AddP2PCollectionsReply) - err := c.cc.Invoke(ctx, Service_AddP2PCollections_FullMethodName, in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *serviceClient) RemoveP2PCollections(ctx context.Context, in *RemoveP2PCollectionsRequest, opts ...grpc.CallOption) (*RemoveP2PCollectionsReply, error) { - out := new(RemoveP2PCollectionsReply) - err := c.cc.Invoke(ctx, Service_RemoveP2PCollections_FullMethodName, in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *serviceClient) GetAllP2PCollections(ctx context.Context, in *GetAllP2PCollectionsRequest, opts ...grpc.CallOption) (*GetAllP2PCollectionsReply, error) { - out := new(GetAllP2PCollectionsReply) - err := c.cc.Invoke(ctx, Service_GetAllP2PCollections_FullMethodName, in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// ServiceServer is the server API for Service service. -// All implementations must embed UnimplementedServiceServer -// for forward compatibility -type ServiceServer interface { - // SetReplicator for this peer - SetReplicator(context.Context, *SetReplicatorRequest) (*SetReplicatorReply, error) - // DeleteReplicator for this peer - DeleteReplicator(context.Context, *DeleteReplicatorRequest) (*DeleteReplicatorReply, error) - // DeleteReplicator for this peer - GetAllReplicators(context.Context, *GetAllReplicatorRequest) (*GetAllReplicatorReply, error) - AddP2PCollections(context.Context, *AddP2PCollectionsRequest) (*AddP2PCollectionsReply, error) - RemoveP2PCollections(context.Context, *RemoveP2PCollectionsRequest) (*RemoveP2PCollectionsReply, error) - GetAllP2PCollections(context.Context, *GetAllP2PCollectionsRequest) (*GetAllP2PCollectionsReply, error) - mustEmbedUnimplementedServiceServer() -} - -// UnimplementedServiceServer must be embedded to have forward compatible implementations. -type UnimplementedServiceServer struct { -} - -func (UnimplementedServiceServer) SetReplicator(context.Context, *SetReplicatorRequest) (*SetReplicatorReply, error) { - return nil, status.Errorf(codes.Unimplemented, "method SetReplicator not implemented") -} -func (UnimplementedServiceServer) DeleteReplicator(context.Context, *DeleteReplicatorRequest) (*DeleteReplicatorReply, error) { - return nil, status.Errorf(codes.Unimplemented, "method DeleteReplicator not implemented") -} -func (UnimplementedServiceServer) GetAllReplicators(context.Context, *GetAllReplicatorRequest) (*GetAllReplicatorReply, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetAllReplicators not implemented") -} -func (UnimplementedServiceServer) AddP2PCollections(context.Context, *AddP2PCollectionsRequest) (*AddP2PCollectionsReply, error) { - return nil, status.Errorf(codes.Unimplemented, "method AddP2PCollections not implemented") -} -func (UnimplementedServiceServer) RemoveP2PCollections(context.Context, *RemoveP2PCollectionsRequest) (*RemoveP2PCollectionsReply, error) { - return nil, status.Errorf(codes.Unimplemented, "method RemoveP2PCollections not implemented") -} -func (UnimplementedServiceServer) GetAllP2PCollections(context.Context, *GetAllP2PCollectionsRequest) (*GetAllP2PCollectionsReply, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetAllP2PCollections not implemented") -} -func (UnimplementedServiceServer) mustEmbedUnimplementedServiceServer() {} - -// UnsafeServiceServer may be embedded to opt out of forward compatibility for this service. -// Use of this interface is not recommended, as added methods to ServiceServer will -// result in compilation errors. -type UnsafeServiceServer interface { - mustEmbedUnimplementedServiceServer() -} - -func RegisterServiceServer(s grpc.ServiceRegistrar, srv ServiceServer) { - s.RegisterService(&Service_ServiceDesc, srv) -} - -func _Service_SetReplicator_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(SetReplicatorRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ServiceServer).SetReplicator(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: Service_SetReplicator_FullMethodName, - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ServiceServer).SetReplicator(ctx, req.(*SetReplicatorRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _Service_DeleteReplicator_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(DeleteReplicatorRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ServiceServer).DeleteReplicator(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: Service_DeleteReplicator_FullMethodName, - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ServiceServer).DeleteReplicator(ctx, req.(*DeleteReplicatorRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _Service_GetAllReplicators_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetAllReplicatorRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ServiceServer).GetAllReplicators(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: Service_GetAllReplicators_FullMethodName, - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ServiceServer).GetAllReplicators(ctx, req.(*GetAllReplicatorRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _Service_AddP2PCollections_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(AddP2PCollectionsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ServiceServer).AddP2PCollections(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: Service_AddP2PCollections_FullMethodName, - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ServiceServer).AddP2PCollections(ctx, req.(*AddP2PCollectionsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _Service_RemoveP2PCollections_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(RemoveP2PCollectionsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ServiceServer).RemoveP2PCollections(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: Service_RemoveP2PCollections_FullMethodName, - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ServiceServer).RemoveP2PCollections(ctx, req.(*RemoveP2PCollectionsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _Service_GetAllP2PCollections_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetAllP2PCollectionsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(ServiceServer).GetAllP2PCollections(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: Service_GetAllP2PCollections_FullMethodName, - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(ServiceServer).GetAllP2PCollections(ctx, req.(*GetAllP2PCollectionsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -// Service_ServiceDesc is the grpc.ServiceDesc for Service service. -// It's only intended for direct use with grpc.RegisterService, -// and not to be introspected or modified (even as a copy) -var Service_ServiceDesc = grpc.ServiceDesc{ - ServiceName: "api.pb.Service", - HandlerType: (*ServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "SetReplicator", - Handler: _Service_SetReplicator_Handler, - }, - { - MethodName: "DeleteReplicator", - Handler: _Service_DeleteReplicator_Handler, - }, - { - MethodName: "GetAllReplicators", - Handler: _Service_GetAllReplicators_Handler, - }, - { - MethodName: "AddP2PCollections", - Handler: _Service_AddP2PCollections_Handler, - }, - { - MethodName: "RemoveP2PCollections", - Handler: _Service_RemoveP2PCollections_Handler, - }, - { - MethodName: "GetAllP2PCollections", - Handler: _Service_GetAllP2PCollections_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "api.proto", -} diff --git a/net/api/pb/api_vtproto.pb.go b/net/api/pb/api_vtproto.pb.go deleted file mode 100644 index e4ddfb9bcb..0000000000 --- a/net/api/pb/api_vtproto.pb.go +++ /dev/null @@ -1,2316 +0,0 @@ -// Code generated by protoc-gen-go-vtproto. DO NOT EDIT. -// protoc-gen-go-vtproto version: v0.4.0 -// source: api.proto - -package api_pb - -import ( - fmt "fmt" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - io "io" - bits "math/bits" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -func (m *SetReplicatorRequest) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *SetReplicatorRequest) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *SetReplicatorRequest) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - if len(m.Addr) > 0 { - i -= len(m.Addr) - copy(dAtA[i:], m.Addr) - i = encodeVarint(dAtA, i, uint64(len(m.Addr))) - i-- - dAtA[i] = 0x12 - } - if len(m.Collections) > 0 { - for iNdEx := len(m.Collections) - 1; iNdEx >= 0; iNdEx-- { - i -= len(m.Collections[iNdEx]) - copy(dAtA[i:], m.Collections[iNdEx]) - i = encodeVarint(dAtA, i, uint64(len(m.Collections[iNdEx]))) - i-- - dAtA[i] = 0xa - } - } - return len(dAtA) - i, nil -} - -func (m *SetReplicatorReply) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *SetReplicatorReply) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *SetReplicatorReply) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - if len(m.PeerID) > 0 { - i -= len(m.PeerID) - copy(dAtA[i:], m.PeerID) - i = encodeVarint(dAtA, i, uint64(len(m.PeerID))) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *DeleteReplicatorRequest) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *DeleteReplicatorRequest) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *DeleteReplicatorRequest) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - if len(m.PeerID) > 0 { - i -= len(m.PeerID) - copy(dAtA[i:], m.PeerID) - i = encodeVarint(dAtA, i, uint64(len(m.PeerID))) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *DeleteReplicatorReply) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *DeleteReplicatorReply) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *DeleteReplicatorReply) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - if len(m.PeerID) > 0 { - i -= len(m.PeerID) - copy(dAtA[i:], m.PeerID) - i = encodeVarint(dAtA, i, uint64(len(m.PeerID))) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *GetAllReplicatorRequest) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetAllReplicatorRequest) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *GetAllReplicatorRequest) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - return len(dAtA) - i, nil -} - -func (m *GetAllReplicatorReply_Replicators_Info) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetAllReplicatorReply_Replicators_Info) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *GetAllReplicatorReply_Replicators_Info) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - if len(m.Addrs) > 0 { - i -= len(m.Addrs) - copy(dAtA[i:], m.Addrs) - i = encodeVarint(dAtA, i, uint64(len(m.Addrs))) - i-- - dAtA[i] = 0x12 - } - if len(m.Id) > 0 { - i -= len(m.Id) - copy(dAtA[i:], m.Id) - i = encodeVarint(dAtA, i, uint64(len(m.Id))) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *GetAllReplicatorReply_Replicators) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetAllReplicatorReply_Replicators) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *GetAllReplicatorReply_Replicators) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - if len(m.Schemas) > 0 { - for iNdEx := len(m.Schemas) - 1; iNdEx >= 0; iNdEx-- { - i -= len(m.Schemas[iNdEx]) - copy(dAtA[i:], m.Schemas[iNdEx]) - i = encodeVarint(dAtA, i, uint64(len(m.Schemas[iNdEx]))) - i-- - dAtA[i] = 0x12 - } - } - if m.Info != nil { - size, err := m.Info.MarshalToSizedBufferVT(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarint(dAtA, i, uint64(size)) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *GetAllReplicatorReply) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetAllReplicatorReply) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *GetAllReplicatorReply) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - if len(m.Replicators) > 0 { - for iNdEx := len(m.Replicators) - 1; iNdEx >= 0; iNdEx-- { - size, err := m.Replicators[iNdEx].MarshalToSizedBufferVT(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarint(dAtA, i, uint64(size)) - i-- - dAtA[i] = 0xa - } - } - return len(dAtA) - i, nil -} - -func (m *AddP2PCollectionsRequest) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *AddP2PCollectionsRequest) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *AddP2PCollectionsRequest) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - if len(m.Collections) > 0 { - for iNdEx := len(m.Collections) - 1; iNdEx >= 0; iNdEx-- { - i -= len(m.Collections[iNdEx]) - copy(dAtA[i:], m.Collections[iNdEx]) - i = encodeVarint(dAtA, i, uint64(len(m.Collections[iNdEx]))) - i-- - dAtA[i] = 0xa - } - } - return len(dAtA) - i, nil -} - -func (m *AddP2PCollectionsReply) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *AddP2PCollectionsReply) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *AddP2PCollectionsReply) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - if len(m.Err) > 0 { - i -= len(m.Err) - copy(dAtA[i:], m.Err) - i = encodeVarint(dAtA, i, uint64(len(m.Err))) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *RemoveP2PCollectionsRequest) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *RemoveP2PCollectionsRequest) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *RemoveP2PCollectionsRequest) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - if len(m.Collections) > 0 { - for iNdEx := len(m.Collections) - 1; iNdEx >= 0; iNdEx-- { - i -= len(m.Collections[iNdEx]) - copy(dAtA[i:], m.Collections[iNdEx]) - i = encodeVarint(dAtA, i, uint64(len(m.Collections[iNdEx]))) - i-- - dAtA[i] = 0xa - } - } - return len(dAtA) - i, nil -} - -func (m *RemoveP2PCollectionsReply) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *RemoveP2PCollectionsReply) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *RemoveP2PCollectionsReply) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - if len(m.Err) > 0 { - i -= len(m.Err) - copy(dAtA[i:], m.Err) - i = encodeVarint(dAtA, i, uint64(len(m.Err))) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *GetAllP2PCollectionsRequest) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetAllP2PCollectionsRequest) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *GetAllP2PCollectionsRequest) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - return len(dAtA) - i, nil -} - -func (m *GetAllP2PCollectionsReply_Collection) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetAllP2PCollectionsReply_Collection) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *GetAllP2PCollectionsReply_Collection) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - if len(m.Name) > 0 { - i -= len(m.Name) - copy(dAtA[i:], m.Name) - i = encodeVarint(dAtA, i, uint64(len(m.Name))) - i-- - dAtA[i] = 0x12 - } - if len(m.Id) > 0 { - i -= len(m.Id) - copy(dAtA[i:], m.Id) - i = encodeVarint(dAtA, i, uint64(len(m.Id))) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - -func (m *GetAllP2PCollectionsReply) MarshalVT() (dAtA []byte, err error) { - if m == nil { - return nil, nil - } - size := m.SizeVT() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBufferVT(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *GetAllP2PCollectionsReply) MarshalToVT(dAtA []byte) (int, error) { - size := m.SizeVT() - return m.MarshalToSizedBufferVT(dAtA[:size]) -} - -func (m *GetAllP2PCollectionsReply) MarshalToSizedBufferVT(dAtA []byte) (int, error) { - if m == nil { - return 0, nil - } - i := len(dAtA) - _ = i - var l int - _ = l - if m.unknownFields != nil { - i -= len(m.unknownFields) - copy(dAtA[i:], m.unknownFields) - } - if len(m.Collections) > 0 { - for iNdEx := len(m.Collections) - 1; iNdEx >= 0; iNdEx-- { - size, err := m.Collections[iNdEx].MarshalToSizedBufferVT(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarint(dAtA, i, uint64(size)) - i-- - dAtA[i] = 0xa - } - } - return len(dAtA) - i, nil -} - -func encodeVarint(dAtA []byte, offset int, v uint64) int { - offset -= sov(v) - base := offset - for v >= 1<<7 { - dAtA[offset] = uint8(v&0x7f | 0x80) - v >>= 7 - offset++ - } - dAtA[offset] = uint8(v) - return base -} -func (m *SetReplicatorRequest) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - if len(m.Collections) > 0 { - for _, s := range m.Collections { - l = len(s) - n += 1 + l + sov(uint64(l)) - } - } - l = len(m.Addr) - if l > 0 { - n += 1 + l + sov(uint64(l)) - } - n += len(m.unknownFields) - return n -} - -func (m *SetReplicatorReply) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.PeerID) - if l > 0 { - n += 1 + l + sov(uint64(l)) - } - n += len(m.unknownFields) - return n -} - -func (m *DeleteReplicatorRequest) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.PeerID) - if l > 0 { - n += 1 + l + sov(uint64(l)) - } - n += len(m.unknownFields) - return n -} - -func (m *DeleteReplicatorReply) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.PeerID) - if l > 0 { - n += 1 + l + sov(uint64(l)) - } - n += len(m.unknownFields) - return n -} - -func (m *GetAllReplicatorRequest) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - n += len(m.unknownFields) - return n -} - -func (m *GetAllReplicatorReply_Replicators_Info) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.Id) - if l > 0 { - n += 1 + l + sov(uint64(l)) - } - l = len(m.Addrs) - if l > 0 { - n += 1 + l + sov(uint64(l)) - } - n += len(m.unknownFields) - return n -} - -func (m *GetAllReplicatorReply_Replicators) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - if m.Info != nil { - l = m.Info.SizeVT() - n += 1 + l + sov(uint64(l)) - } - if len(m.Schemas) > 0 { - for _, s := range m.Schemas { - l = len(s) - n += 1 + l + sov(uint64(l)) - } - } - n += len(m.unknownFields) - return n -} - -func (m *GetAllReplicatorReply) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - if len(m.Replicators) > 0 { - for _, e := range m.Replicators { - l = e.SizeVT() - n += 1 + l + sov(uint64(l)) - } - } - n += len(m.unknownFields) - return n -} - -func (m *AddP2PCollectionsRequest) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - if len(m.Collections) > 0 { - for _, s := range m.Collections { - l = len(s) - n += 1 + l + sov(uint64(l)) - } - } - n += len(m.unknownFields) - return n -} - -func (m *AddP2PCollectionsReply) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.Err) - if l > 0 { - n += 1 + l + sov(uint64(l)) - } - n += len(m.unknownFields) - return n -} - -func (m *RemoveP2PCollectionsRequest) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - if len(m.Collections) > 0 { - for _, s := range m.Collections { - l = len(s) - n += 1 + l + sov(uint64(l)) - } - } - n += len(m.unknownFields) - return n -} - -func (m *RemoveP2PCollectionsReply) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.Err) - if l > 0 { - n += 1 + l + sov(uint64(l)) - } - n += len(m.unknownFields) - return n -} - -func (m *GetAllP2PCollectionsRequest) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - n += len(m.unknownFields) - return n -} - -func (m *GetAllP2PCollectionsReply_Collection) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.Id) - if l > 0 { - n += 1 + l + sov(uint64(l)) - } - l = len(m.Name) - if l > 0 { - n += 1 + l + sov(uint64(l)) - } - n += len(m.unknownFields) - return n -} - -func (m *GetAllP2PCollectionsReply) SizeVT() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - if len(m.Collections) > 0 { - for _, e := range m.Collections { - l = e.SizeVT() - n += 1 + l + sov(uint64(l)) - } - } - n += len(m.unknownFields) - return n -} - -func sov(x uint64) (n int) { - return (bits.Len64(x|1) + 6) / 7 -} -func soz(x uint64) (n int) { - return sov(uint64((x << 1) ^ uint64((int64(x) >> 63)))) -} -func (m *SetReplicatorRequest) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: SetReplicatorRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: SetReplicatorRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Collections", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLength - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Collections = append(m.Collections, string(dAtA[iNdEx:postIndex])) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Addr", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLength - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Addr = append(m.Addr[:0], dAtA[iNdEx:postIndex]...) - if m.Addr == nil { - m.Addr = []byte{} - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *SetReplicatorReply) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: SetReplicatorReply: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: SetReplicatorReply: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field PeerID", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLength - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.PeerID = append(m.PeerID[:0], dAtA[iNdEx:postIndex]...) - if m.PeerID == nil { - m.PeerID = []byte{} - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *DeleteReplicatorRequest) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: DeleteReplicatorRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: DeleteReplicatorRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field PeerID", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLength - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.PeerID = append(m.PeerID[:0], dAtA[iNdEx:postIndex]...) - if m.PeerID == nil { - m.PeerID = []byte{} - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *DeleteReplicatorReply) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: DeleteReplicatorReply: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: DeleteReplicatorReply: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field PeerID", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLength - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.PeerID = append(m.PeerID[:0], dAtA[iNdEx:postIndex]...) - if m.PeerID == nil { - m.PeerID = []byte{} - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetAllReplicatorRequest) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetAllReplicatorRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetAllReplicatorRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - default: - iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetAllReplicatorReply_Replicators_Info) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetAllReplicatorReply_Replicators_Info: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetAllReplicatorReply_Replicators_Info: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Id", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLength - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Id = append(m.Id[:0], dAtA[iNdEx:postIndex]...) - if m.Id == nil { - m.Id = []byte{} - } - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Addrs", wireType) - } - var byteLen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - byteLen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if byteLen < 0 { - return ErrInvalidLength - } - postIndex := iNdEx + byteLen - if postIndex < 0 { - return ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Addrs = append(m.Addrs[:0], dAtA[iNdEx:postIndex]...) - if m.Addrs == nil { - m.Addrs = []byte{} - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetAllReplicatorReply_Replicators) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetAllReplicatorReply_Replicators: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetAllReplicatorReply_Replicators: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Info", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLength - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - if m.Info == nil { - m.Info = &GetAllReplicatorReply_Replicators_Info{} - } - if err := m.Info.UnmarshalVT(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Schemas", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLength - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Schemas = append(m.Schemas, string(dAtA[iNdEx:postIndex])) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetAllReplicatorReply) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetAllReplicatorReply: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetAllReplicatorReply: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Replicators", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLength - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Replicators = append(m.Replicators, &GetAllReplicatorReply_Replicators{}) - if err := m.Replicators[len(m.Replicators)-1].UnmarshalVT(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *AddP2PCollectionsRequest) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: AddP2PCollectionsRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: AddP2PCollectionsRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Collections", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLength - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Collections = append(m.Collections, string(dAtA[iNdEx:postIndex])) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *AddP2PCollectionsReply) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: AddP2PCollectionsReply: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: AddP2PCollectionsReply: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Err", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLength - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Err = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *RemoveP2PCollectionsRequest) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: RemoveP2PCollectionsRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: RemoveP2PCollectionsRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Collections", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLength - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Collections = append(m.Collections, string(dAtA[iNdEx:postIndex])) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *RemoveP2PCollectionsReply) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: RemoveP2PCollectionsReply: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: RemoveP2PCollectionsReply: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Err", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLength - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Err = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetAllP2PCollectionsRequest) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetAllP2PCollectionsRequest: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetAllP2PCollectionsRequest: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - default: - iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetAllP2PCollectionsReply_Collection) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetAllP2PCollectionsReply_Collection: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetAllP2PCollectionsReply_Collection: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Id", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLength - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Id = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLength - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Name = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} -func (m *GetAllP2PCollectionsReply) UnmarshalVT(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: GetAllP2PCollectionsReply: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: GetAllP2PCollectionsReply: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Collections", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLength - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLength - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Collections = append(m.Collections, &GetAllP2PCollectionsReply_Collection{}) - if err := m.Collections[len(m.Collections)-1].UnmarshalVT(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex - default: - iNdEx = preIndex - skippy, err := skip(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLength - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} - -func skip(dAtA []byte) (n int, err error) { - l := len(dAtA) - iNdEx := 0 - depth := 0 - for iNdEx < l { - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflow - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - wireType := int(wire & 0x7) - switch wireType { - case 0: - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflow - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - iNdEx++ - if dAtA[iNdEx-1] < 0x80 { - break - } - } - case 1: - iNdEx += 8 - case 2: - var length int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return 0, ErrIntOverflow - } - if iNdEx >= l { - return 0, io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - length |= (int(b) & 0x7F) << shift - if b < 0x80 { - break - } - } - if length < 0 { - return 0, ErrInvalidLength - } - iNdEx += length - case 3: - depth++ - case 4: - if depth == 0 { - return 0, ErrUnexpectedEndOfGroup - } - depth-- - case 5: - iNdEx += 4 - default: - return 0, fmt.Errorf("proto: illegal wireType %d", wireType) - } - if iNdEx < 0 { - return 0, ErrInvalidLength - } - if depth == 0 { - return iNdEx, nil - } - } - return 0, io.ErrUnexpectedEOF -} - -var ( - ErrInvalidLength = fmt.Errorf("proto: negative length found during unmarshaling") - ErrIntOverflow = fmt.Errorf("proto: integer overflow") - ErrUnexpectedEndOfGroup = fmt.Errorf("proto: unexpected end of group") -) diff --git a/tests/integration/cli/client_backup_export_test.go b/tests/integration/cli/client_backup_export_test.go deleted file mode 100644 index 62f2677c7b..0000000000 --- a/tests/integration/cli/client_backup_export_test.go +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package clitest - -import ( - "os" - "testing" - - "github.com/stretchr/testify/require" -) - -func createUser(t *testing.T, conf DefraNodeConfig) { - _, _ = runDefraCommand(t, conf, []string{ - "client", "query", `mutation { create_User(data: "{\"name\": \"John\"}") { _key } }`, - }) -} - -func TestBackup_IfNoArgs_ShowUsage(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stdout, _ := runDefraCommand(t, conf, []string{"client", "backup"}) - assertContainsSubstring(t, stdout, "Usage:") -} - -func TestBackupExport_ForAllCollections_ShouldExport(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - createUserCollection(t, conf) - - createUser(t, conf) - - filepath := t.TempDir() + "/test.json" - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "backup", "export", filepath, - }) - stopDefra() - - assertContainsSubstring(t, stdout, "success") - - b, err := os.ReadFile(filepath) - require.NoError(t, err) - require.Equal( - t, - `{"User":[{"_key":"bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad","_newKey":"bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad","name":"John"}]}`, - string(b), - ) -} - -func TestBackupExport_ForUserCollection_ShouldExport(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - createUserCollection(t, conf) - - createUser(t, conf) - - filepath := t.TempDir() + "/test.json" - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "backup", "export", filepath, "--collections", "User", - }) - stopDefra() - - assertContainsSubstring(t, stdout, "success") - - b, err := os.ReadFile(filepath) - require.NoError(t, err) - require.Equal( - t, - `{"User":[{"_key":"bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad","_newKey":"bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad","name":"John"}]}`, - string(b), - ) -} - -func TestBackupExport_ForInvalidCollection_ShouldFail(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - createUserCollection(t, conf) - - createUser(t, conf) - - filepath := t.TempDir() + "/test.json" - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "backup", "export", filepath, "--collections", "Invalid", - }) - stopDefra() - - assertContainsSubstring(t, stdout, "collection does not exist") -} - -func TestBackupExport_InvalidFilePath_ShouldFail(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - createUserCollection(t, conf) - - createUser(t, conf) - - filepath := t.TempDir() + "/some/test.json" - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "backup", "export", filepath, "--collections", "Invalid", - }) - stopDefra() - - assertContainsSubstring(t, stdout, "invalid file path") -} diff --git a/tests/integration/cli/client_backup_import_test.go b/tests/integration/cli/client_backup_import_test.go deleted file mode 100644 index 8290dbe6de..0000000000 --- a/tests/integration/cli/client_backup_import_test.go +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package clitest - -import ( - "os" - "testing" - - "github.com/stretchr/testify/require" -) - -func TestBackupImport_WithValidFile_ShouldImport(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - createUserCollection(t, conf) - - filepath := t.TempDir() + "/test.json" - - err := os.WriteFile( - filepath, - []byte(`{"User":[{"_key":"bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad","_newKey":"bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad","name":"John"}]}`), - 0644, - ) - require.NoError(t, err) - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "backup", "import", filepath, - }) - stopDefra() - - assertContainsSubstring(t, stdout, "success") -} - -func TestBackupImport_WithExistingDoc_ShouldFail(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - createUserCollection(t, conf) - - createUser(t, conf) - - filepath := t.TempDir() + "/test.json" - - err := os.WriteFile( - filepath, - []byte(`{"User":[{"_key":"bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad","_newKey":"bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad","name":"John"}]}`), - 0644, - ) - require.NoError(t, err) - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "backup", "import", filepath, - }) - stopDefra() - - assertContainsSubstring(t, stdout, "a document with the given dockey already exists") -} - -func TestBackupImport_ForInvalidCollection_ShouldFail(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - createUserCollection(t, conf) - - createUser(t, conf) - - filepath := t.TempDir() + "/test.json" - - err := os.WriteFile( - filepath, - []byte(`{"Invalid":[{"_key":"bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad","_newKey":"bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad","name":"John"}]}`), - 0644, - ) - require.NoError(t, err) - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "backup", "import", filepath, - }) - stopDefra() - - assertContainsSubstring(t, stdout, "failed to get collection: datastore: key not found. Name: Invalid") -} - -func TestBackupImport_InvalidFilePath_ShouldFail(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - createUserCollection(t, conf) - - createUser(t, conf) - - filepath := t.TempDir() + "/some/test.json" - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "backup", "import", filepath, - }) - stopDefra() - - assertContainsSubstring(t, stdout, "invalid file path") -} diff --git a/tests/integration/cli/client_blocks_test.go b/tests/integration/cli/client_blocks_test.go deleted file mode 100644 index 08d1c22684..0000000000 --- a/tests/integration/cli/client_blocks_test.go +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package clitest - -import "testing" - -func TestClientBlocksEmpty(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stdout, _ := runDefraCommand(t, conf, []string{"client", "blocks"}) - assertContainsSubstring(t, stdout, "Usage:") -} - -func TestClientBlocksGetEmpty(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stdout, _ := runDefraCommand(t, conf, []string{"client", "blocks", "get"}) - assertContainsSubstring(t, stdout, "Usage:") -} - -func TestClientBlocksGetInvalidCID(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - stdout, _ := runDefraCommand(t, conf, []string{"client", "blocks", "get", "invalid-cid"}) - _ = stopDefra() - assertContainsSubstring(t, stdout, "\"errors\"") -} - -func TestClientBlocksGetNonExistentCID(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - stdout, _ := runDefraCommand(t, conf, []string{"client", "blocks", "get", "bafybeieelb43ol5e5jiick2p7k4p577ph72ecwcuowlhbops4hpz24zhz4"}) - _ = stopDefra() - assertContainsSubstring(t, stdout, "could not find") -} diff --git a/tests/integration/cli/client_index_create_test.go b/tests/integration/cli/client_index_create_test.go deleted file mode 100644 index 89d6a4a18a..0000000000 --- a/tests/integration/cli/client_index_create_test.go +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package clitest - -import ( - "testing" -) - -func createUserCollection(t *testing.T, conf DefraNodeConfig) { - createCollection(t, conf, `type User { name: String }`) -} - -func createCollection(t *testing.T, conf DefraNodeConfig, colSchema string) { - fileName := schemaFileFixture(t, "schema.graphql", colSchema) - stdout, _ := runDefraCommand(t, conf, []string{"client", "schema", "add", "-f", fileName}) - assertContainsSubstring(t, stdout, "success") -} - -func TestIndex_IfNoArgs_ShowUsage(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stdout, _ := runDefraCommand(t, conf, []string{"client", "index"}) - assertContainsSubstring(t, stdout, "Usage:") -} - -func TestIndexCreate_IfNoArgs_ShowUsage(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - _, stderr := runDefraCommand(t, conf, []string{"client", "index", "create"}) - assertContainsSubstring(t, stderr, "Usage") -} - -func TestIndexCreate_IfNoFieldsArg_ShouldFail(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - createUserCollection(t, conf) - - _, stderr := runDefraCommand(t, conf, []string{ - "client", "index", "create", - "--collection", "User", - }) - stopDefra() - - assertContainsSubstring(t, stderr, "missing argument") -} - -func TestIndexCreate_IfNoCollectionArg_ShouldFail(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - createUserCollection(t, conf) - - _, stderr := runDefraCommand(t, conf, []string{ - "client", "index", "create", - "--fields", "Name", - }) - stopDefra() - - assertContainsSubstring(t, stderr, "missing argument") -} - -func TestIndexCreate_IfCollectionExists_ShouldCreateIndex(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - createUserCollection(t, conf) - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "index", "create", - "--collection", "User", - "--fields", "name", - "--name", "users_name_index", - }) - nodeLog := stopDefra() - - jsonResponse := `{"data":{"index":{"Name":"users_name_index","ID":1,"Fields":[{"Name":"name","Direction":"ASC"}]}}}` - assertContainsSubstring(t, stdout, jsonResponse) - assertNotContainsSubstring(t, stdout, "errors") - assertNotContainsSubstring(t, nodeLog, "errors") -} - -func TestIndexCreate_IfInternalError_ShouldFail(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "index", "create", - "--collection", "User", - "--fields", "Name", - "--name", "users_name_index", - }) - stopDefra() - - assertContainsSubstring(t, stdout, "errors") -} diff --git a/tests/integration/cli/client_index_drop_test.go b/tests/integration/cli/client_index_drop_test.go deleted file mode 100644 index ce03e29524..0000000000 --- a/tests/integration/cli/client_index_drop_test.go +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package clitest - -import ( - "testing" -) - -func TestIndexDrop_IfNoArgs_ShowUsage(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - _, stderr := runDefraCommand(t, conf, []string{"client", "index", "drop"}) - assertContainsSubstring(t, stderr, "Usage") -} - -const userColIndexOnNameFieldName = "users_name_index" - -func createIndexOnName(t *testing.T, conf DefraNodeConfig) { - createIndexOnField(t, conf, "User", "name", userColIndexOnNameFieldName) -} - -func createIndexOnField(t *testing.T, conf DefraNodeConfig, colName, fieldName, indexName string) { - runDefraCommand(t, conf, []string{ - "client", "index", "create", - "--collection", colName, - "--fields", fieldName, - "--name", indexName, - }) -} - -func TestIndexDrop_IfNoNameArg_ShouldFail(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - createUserCollection(t, conf) - createIndexOnName(t, conf) - - _, stderr := runDefraCommand(t, conf, []string{ - "client", "index", "drop", - "--collection", "User", - }) - stopDefra() - - assertContainsSubstring(t, stderr, "missing argument") -} - -func TestIndexDrop_IfNoCollectionArg_ShouldFail(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - createUserCollection(t, conf) - createIndexOnName(t, conf) - - _, stderr := runDefraCommand(t, conf, []string{ - "client", "index", "drop", - "--name", "users_name_index", - }) - stopDefra() - - assertContainsSubstring(t, stderr, "missing argument") -} - -func TestIndexDrop_IfCollectionWithIndexExists_ShouldDropIndex(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - createUserCollection(t, conf) - createIndexOnName(t, conf) - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "index", "drop", - "--collection", "User", - "--name", "users_name_index", - }) - nodeLog := stopDefra() - - jsonResponse := `{"data":{"result":"success"}}` - assertContainsSubstring(t, stdout, jsonResponse) - assertNotContainsSubstring(t, stdout, "errors") - assertNotContainsSubstring(t, nodeLog, "errors") -} - -func TestIndexDrop_IfCollectionDoesNotExist_ShouldFail(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "index", "drop", - "--collection", "User", - "--name", "users_name_index", - }) - stopDefra() - - assertContainsSubstring(t, stdout, "errors") -} - -func TestIndexDrop_IfInternalError_ShouldFail(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - createUserCollection(t, conf) - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "index", "drop", - "--collection", "User", - "--name", "users_name_index", - }) - stopDefra() - - assertContainsSubstring(t, stdout, "errors") -} diff --git a/tests/integration/cli/client_index_list_test.go b/tests/integration/cli/client_index_list_test.go deleted file mode 100644 index cb2f7d5fac..0000000000 --- a/tests/integration/cli/client_index_list_test.go +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package clitest - -import ( - "encoding/json" - "testing" - - "github.com/sourcenetwork/defradb/client" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestIndexList_IfCollectionIsNotSpecified_ShouldReturnAllIndexes(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - createCollection(t, conf, `type User { name: String }`) - createCollection(t, conf, `type Product { name: String price: Int }`) - createIndexOnField(t, conf, "User", "name", "") - createIndexOnField(t, conf, "Product", "name", "") - createIndexOnField(t, conf, "Product", "price", "") - - stdout, _ := runDefraCommand(t, conf, []string{"client", "index", "list"}) - nodeLog := stopDefra() - - var resp struct { - Data struct { - Collections map[string][]client.IndexDescription `json:"collections"` - } `json:"data"` - } - err := json.Unmarshal([]byte(stdout[0]), &resp) - require.NoError(t, err) - - assert.Equal(t, len(resp.Data.Collections), 2) - assert.Equal(t, len(resp.Data.Collections["User"]), 1) - assert.Equal(t, len(resp.Data.Collections["Product"]), 2) - - assertNotContainsSubstring(t, stdout, "errors") - assertNotContainsSubstring(t, nodeLog, "errors") -} - -func TestIndexList_IfCollectionIsSpecified_ShouldReturnCollectionsIndexes(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - createUserCollection(t, conf) - createIndexOnName(t, conf) - - createCollection(t, conf, `type Product { name: String price: Int }`) - createIndexOnField(t, conf, "Product", "name", "") - createIndexOnField(t, conf, "Product", "price", "") - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "index", "list", - "--collection", "User", - }) - nodeLog := stopDefra() - - var resp struct { - Data struct { - Indexes []client.IndexDescription `json:"indexes"` - } `json:"data"` - } - err := json.Unmarshal([]byte(stdout[0]), &resp) - require.NoError(t, err) - - expectedDesc := client.IndexDescription{Name: userColIndexOnNameFieldName, ID: 1, Fields: []client.IndexedFieldDescription{{Name: "name", Direction: client.Ascending}}} - assert.Equal(t, 1, len(resp.Data.Indexes)) - assert.Equal(t, expectedDesc, resp.Data.Indexes[0]) - - assertNotContainsSubstring(t, stdout, "errors") - assertNotContainsSubstring(t, nodeLog, "errors") -} - -func TestIndexList_IfInternalError_ShouldFail(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "index", "list", - "--collection", "User", - }) - stopDefra() - - assertContainsSubstring(t, stdout, "errors") -} diff --git a/tests/integration/cli/client_peerid_test.go b/tests/integration/cli/client_peerid_test.go deleted file mode 100644 index 0592fd4aa1..0000000000 --- a/tests/integration/cli/client_peerid_test.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package clitest - -import ( - "testing" -) - -func TestPeerID(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - stdout, _ := runDefraCommand(t, conf, []string{"client", "peerid"}) - - defraLogLines := stopDefra() - - assertNotContainsSubstring(t, defraLogLines, "ERROR") - - assertContainsSubstring(t, stdout, "peerID") -} - -func TestPeerIDWithNoHost(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - _, stderr := runDefraCommand(t, conf, []string{"client", "peerid"}) - assertContainsSubstring(t, stderr, "failed to request PeerID") -} diff --git a/tests/integration/cli/client_ping_test.go b/tests/integration/cli/client_ping_test.go deleted file mode 100644 index a4e1eef96f..0000000000 --- a/tests/integration/cli/client_ping_test.go +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package clitest - -import ( - "fmt" - "strings" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/sourcenetwork/defradb/config" -) - -func TestPingSimple(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - stdout, _ := runDefraCommand(t, conf, []string{"client", "ping"}) - - nodeLog := stopDefra() - - assert.Contains(t, stdout, `{"data":{"response":"pong"}}`) - for _, line := range nodeLog { - assert.NotContains(t, line, "ERROR") - } -} - -func TestPingCommandToInvalidHost(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - _, stderr := runDefraCommand(t, conf, []string{"client", "ping", "--url", "'1!2:3!4'"}) - - nodeLog := stopDefra() - - for _, line := range nodeLog { - assert.NotContains(t, line, "ERROR") - } - // for some line in stderr to contain the error message - for _, line := range stderr { - if strings.Contains(line, config.ErrFailedToValidateConfig.Error()) { - return - } - } - t.Error("expected error message not found in stderr") -} - -func TestPingCommandNoHost(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - p, err := findFreePortInRange(t, 49152, 65535) - assert.NoError(t, err) - addr := fmt.Sprintf("localhost:%d", p) - _, stderr := runDefraCommand(t, conf, []string{"client", "ping", "--url", addr}) - assertContainsSubstring(t, stderr, "failed to send ping") -} diff --git a/tests/integration/cli/client_query_test.go b/tests/integration/cli/client_query_test.go deleted file mode 100644 index 6ca98cbade..0000000000 --- a/tests/integration/cli/client_query_test.go +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package clitest - -import ( - "testing" -) - -func TestRequestSimple(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - stdout, _ := runDefraCommand(t, conf, []string{"client", "query", - "query IntrospectionQuery {__schema {queryType { name }}}", - }) - nodeLog := stopDefra() - - assertContainsSubstring(t, stdout, "Query") - assertNotContainsSubstring(t, nodeLog, "ERROR") -} - -func TestRequestInvalidQuery(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - stdout, _ := runDefraCommand(t, conf, []string{"client", "query", "{}}"}) - _ = stopDefra() - - assertContainsSubstring(t, stdout, "Syntax Error") -} - -func TestRequestWithErrorNoType(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - defer stopDefra() - - stdout, _ := runDefraCommand(t, conf, []string{"client", "query", "query { User { whatever } }"}) - - assertContainsSubstring(t, stdout, "Cannot query field") -} - -func TestRequestWithErrorNoField(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - defer stopDefra() - - fname := schemaFileFixture(t, "schema.graphql", ` - type User { - id: ID - name: String - }`) - stdout, _ := runDefraCommand(t, conf, []string{"client", "schema", "add", "-f", fname}) - assertContainsSubstring(t, stdout, "success") - - stdout, _ = runDefraCommand(t, conf, []string{"client", "query", "query { User { nonexistent } }"}) - - assertContainsSubstring(t, stdout, `Cannot query field \"nonexistent\"`) -} - -func TestRequestQueryFromFile(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - defer stopDefra() - - fname := schemaFileFixture(t, "schema.graphql", ` - type User123 { - XYZ: String - }`) - stdout, _ := runDefraCommand(t, conf, []string{"client", "schema", "add", "-f", fname}) - assertContainsSubstring(t, stdout, "success") - - fname = schemaFileFixture(t, "query.graphql", ` - query { - __schema { - types { - name - fields { - name - type { - name - kind - } - } - } - } - }`) - stdout, _ = runDefraCommand(t, conf, []string{"client", "query", "-f", fname}) - - assertContainsSubstring(t, stdout, "Query") - - // Check that the User type is correctly returned - assertContainsSubstring(t, stdout, "User123") - assertContainsSubstring(t, stdout, "XYZ") -} diff --git a/tests/integration/cli/client_rpc_p2p_collection_test.go b/tests/integration/cli/client_rpc_p2p_collection_test.go deleted file mode 100644 index b44abcaefb..0000000000 --- a/tests/integration/cli/client_rpc_p2p_collection_test.go +++ /dev/null @@ -1,13 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package clitest - -// TBD diff --git a/tests/integration/cli/client_rpc_replicator_test.go b/tests/integration/cli/client_rpc_replicator_test.go deleted file mode 100644 index 1fd0e3c351..0000000000 --- a/tests/integration/cli/client_rpc_replicator_test.go +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package clitest - -import ( - "fmt" - "testing" -) - -func TestReplicatorGetAllEmpty(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - portTCP, err := findFreePortInRange(t, 49152, 65535) - if err != nil { - t.Fatal(err) - } - conf.GRPCAddr = fmt.Sprintf("/ip4/0.0.0.0/tcp/%d", portTCP) - if err != nil { - t.Fatal(err) - } - - stopDefra := runDefraNode(t, conf) - defer stopDefra() - - tcpAddr := fmt.Sprintf("localhost:%d", portTCP) - _, stderr := runDefraCommand(t, conf, []string{"client", "--addr", tcpAddr, "rpc", "replicator", "getall"}) - assertContainsSubstring(t, stderr, "No replicator found") -} diff --git a/tests/integration/cli/client_schema_add_test.go b/tests/integration/cli/client_schema_add_test.go deleted file mode 100644 index 12d2e5e539..0000000000 --- a/tests/integration/cli/client_schema_add_test.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package clitest - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestAddSchemaFromFile(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - fname := schemaFileFixture(t, "schema.graphql", ` - type User { - id: ID - name: String - }`) - - stdout, _ := runDefraCommand(t, conf, []string{"client", "schema", "add", "-f", fname}) - - nodeLog := stopDefra() - - jsonReponse := `{"data":{"collections":[{"name":"User","id":"bafkreib5hb7mr7ecbdufd7mvv6va6mpxukjai7hpnqkhxonnw7lzwfqlja","version_id":"bafkreib5hb7mr7ecbdufd7mvv6va6mpxukjai7hpnqkhxonnw7lzwfqlja"}],"result":"success"}}` - assert.Contains(t, stdout, jsonReponse) - assertNotContainsSubstring(t, nodeLog, "ERROR") -} - -func TestAddSchemaWithDuplicateType(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - fname1 := schemaFileFixture(t, "schema1.graphql", `type Post { id: ID title: String }`) - fname2 := schemaFileFixture(t, "schema2.graphql", `type Post { id: ID author: String }`) - - stdout1, _ := runDefraCommand(t, conf, []string{"client", "schema", "add", "-f", fname1}) - stdout2, _ := runDefraCommand(t, conf, []string{"client", "schema", "add", "-f", fname2}) - - _ = stopDefra() - - jsonReponse := `{"data":{"collections":[{"name":"Post","id":"bafkreicgpbla5wlogpinnm32arcqzptusdc5tzdznipqrf6nkroav6b25a","version_id":"bafkreicgpbla5wlogpinnm32arcqzptusdc5tzdznipqrf6nkroav6b25a"}],"result":"success"}}` - assertContainsSubstring(t, stdout1, jsonReponse) - assertContainsSubstring(t, stdout2, `schema type already exists. Name: Post`) -} diff --git a/tests/integration/cli/client_schema_migration_get_test.go b/tests/integration/cli/client_schema_migration_get_test.go deleted file mode 100644 index dd70879433..0000000000 --- a/tests/integration/cli/client_schema_migration_get_test.go +++ /dev/null @@ -1,110 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package clitest - -import ( - "fmt" - "testing" - - "github.com/sourcenetwork/defradb/tests/lenses" -) - -func TestSchemaMigrationGet_GivenOneArg_ShouldReturnError(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - _, stderr := runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "get", - "notAnArg", - }) - _ = stopDefra() - - assertContainsSubstring(t, stderr, "too many arguments. Max: 0, Actual: 1") -} - -func TestSchemaMigrationGet_GivenNoMigrations_ShouldSucceed(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "get", - }) - _ = stopDefra() - - assertContainsSubstring(t, stdout, `{"data":{"configuration":[]}}`) -} - -func TestSchemaMigrationGet_GivenEmptyMigrationObj_ShouldSucceed(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "set", - "bae123", "bae456", "{}", - }) - assertContainsSubstring(t, stdout, "success") - - stdout, _ = runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "get", - }) - _ = stopDefra() - - assertContainsSubstring(t, stdout, - `{"data":{"configuration":[{"SourceSchemaVersionID":"bae123","DestinationSchemaVersionID":"bae456","Lenses":null}]}}`, - ) -} - -func TestSchemaMigrationGet_GivenEmptyMigration_ShouldSucceed(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "set", - "bae123", "bae456", `{"lenses": []}`, - }) - assertContainsSubstring(t, stdout, "success") - - stdout, _ = runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "get", - }) - _ = stopDefra() - - assertContainsSubstring(t, stdout, - `{"data":{"configuration":[{"SourceSchemaVersionID":"bae123","DestinationSchemaVersionID":"bae456","Lenses":[]}]}}`, - ) -} - -func TestSchemaMigrationGet_GivenMigration_ShouldSucceed(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "set", - "bae123", "bae456", - fmt.Sprintf(`{"lenses": [{"path":"%s","arguments":{"dst":"verified","value":true}}]}`, lenses.SetDefaultModulePath), - }) - assertContainsSubstring(t, stdout, "success") - - stdout, _ = runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "get", - }) - _ = stopDefra() - - assertContainsSubstring(t, stdout, - `{"data":{"configuration":[{"SourceSchemaVersionID":"bae123","DestinationSchemaVersionID":"bae456","Lenses":[`+ - fmt.Sprintf( - `{"Path":"%s",`, - lenses.SetDefaultModulePath, - )+ - `"Inverse":false,"Arguments":{"dst":"verified","value":true}}`+ - `]}]}}`, - ) -} diff --git a/tests/integration/cli/client_schema_migration_set_test.go b/tests/integration/cli/client_schema_migration_set_test.go deleted file mode 100644 index d97a4e77d8..0000000000 --- a/tests/integration/cli/client_schema_migration_set_test.go +++ /dev/null @@ -1,244 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package clitest - -import ( - "fmt" - "testing" - - "github.com/sourcenetwork/defradb/tests/lenses" -) - -func TestSchemaMigrationSet_GivenEmptyArgs_ShouldReturnError(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - _, stderr := runDefraCommand(t, conf, []string{"client", "schema", "migration", "set"}) - _ = stopDefra() - - assertContainsSubstring(t, stderr, "missing arguments. Required: src, dst, cfg") -} - -func TestSchemaMigrationSet_GivenOneArg_ShouldReturnError(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - _, stderr := runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "set", - "bae123", - }) - _ = stopDefra() - - assertContainsSubstring(t, stderr, "missing arguments. Required: src, dst, cfg") -} - -func TestSchemaMigrationSet_GivenTwoArgs_ShouldReturnError(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - _, stderr := runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "set", - "bae123", "bae456", - }) - _ = stopDefra() - - assertContainsSubstring(t, stderr, "missing argument. Name: cfg") -} - -func TestSchemaMigrationSet_GivenFourArgs_ShouldReturnError(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - _, stderr := runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "set", - "bae123", "bae456", "cfg", "extraArg", - }) - _ = stopDefra() - - assertContainsSubstring(t, stderr, "too many arguments. Max: 3, Actual: 4") -} - -func TestSchemaMigrationSet_GivenEmptySrcArg_ShouldReturnError(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - _, stderr := runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "set", - "", "bae", "path", - }) - _ = stopDefra() - - assertContainsSubstring(t, stderr, "missing argument. Name: src") -} - -func TestSchemaMigrationSet_GivenEmptyDstArg_ShouldReturnError(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - _, stderr := runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "set", - "bae", "", "path", - }) - _ = stopDefra() - - assertContainsSubstring(t, stderr, "missing argument. Name: dst") -} - -func TestSchemaMigrationSet_GivenEmptyCfgArg_ShouldReturnError(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - _, stderr := runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "set", - "bae123", "bae456", "", - }) - _ = stopDefra() - - assertContainsSubstring(t, stderr, "missing argument. Name: cfg") -} - -func TestSchemaMigrationSet_GivenInvalidCfgJsonObject_ShouldError(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - _, stderr := runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "set", - "bae123", "bae456", "{--notvalidjson", - }) - _ = stopDefra() - - assertContainsSubstring(t, stderr, "invalid lens configuration: invalid character") -} - -func TestSchemaMigrationSet_GivenEmptyCfgObject_ShouldSucceed(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "set", - "bae123", "bae456", "{}", - }) - _ = stopDefra() - - assertContainsSubstring(t, stdout, "success") -} - -func TestSchemaMigrationSet_GivenCfgWithNoLenses_ShouldSucceed(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "set", - "bae123", "bae456", `{"lenses": []}`, - }) - _ = stopDefra() - - assertContainsSubstring(t, stdout, "success") -} - -func TestSchemaMigrationSet_GivenCfgWithNoLensesUppercase_ShouldSucceed(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - stdout, _ := runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "set", - "bae123", "bae456", `{"Lenses": []}`, - }) - _ = stopDefra() - - assertContainsSubstring(t, stdout, "success") -} - -func TestSchemaMigrationSet_GivenCfgWithUnknownProp_ShouldError(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - _, stderr := runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "set", - "bae123", "bae456", `{"NotAProp": []}`, - }) - _ = stopDefra() - - assertContainsSubstring(t, stderr, "invalid lens configuration: json: unknown field") -} - -func TestSchemaMigrationSet_GivenCfgWithUnknownPath_ShouldError(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - _, stderr := runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "set", - "bae123", "bae456", `{"Lenses": [{"path":"notAPath"}]}`, - }) - _ = stopDefra() - - assertContainsSubstring(t, stderr, "no such file or directory") -} - -func TestSchemaMigrationSet_GivenCfgWithLenses_ShouldSucceedAndMigrateDoc(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - stdout, _ := runDefraCommand(t, conf, []string{"client", "schema", "add", `type Users { name: String }`}) - assertContainsSubstring(t, stdout, "success") - - stdout, _ = runDefraCommand(t, conf, []string{"client", "query", `mutation { create_Users(data:"{\"name\":\"John\"}") { name } }`}) - assertContainsSubstring(t, stdout, `{"data":[{"name":"John"}]}`) - - stdout, _ = runDefraCommand(t, conf, []string{"client", "schema", "patch", - `[{ "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "verified", "Kind": "Boolean"} }]`, - }) - assertContainsSubstring(t, stdout, "success") - - stdout, _ = runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "set", - "bafkreihn4qameldz3j7rfundmd4ldhxnaircuulk6h2vcwnpcgxl4oqffq", - "bafkreia56p6i6o3l4jijayiqd5eiijsypjjokbldaxnmqgeav6fe576hcy", - fmt.Sprintf(`{"lenses": [{"path":"%s","arguments":{"dst":"verified","value":true}}]}`, lenses.SetDefaultModulePath), - }) - assertContainsSubstring(t, stdout, "success") - - stdout, _ = runDefraCommand(t, conf, []string{"client", "query", "query { Users { name verified } }"}) - _ = stopDefra() - - assertContainsSubstring(t, stdout, `{"data":[{"name":"John","verified":true}]}`) -} - -func TestSchemaMigrationSet_GivenCfgWithLenseError_ShouldError(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - - stdout, _ := runDefraCommand(t, conf, []string{"client", "schema", "add", `type Users { name: String }`}) - assertContainsSubstring(t, stdout, "success") - - stdout, _ = runDefraCommand(t, conf, []string{"client", "query", `mutation { create_Users(data:"{\"name\":\"John\"}") { name } }`}) - assertContainsSubstring(t, stdout, `{"data":[{"name":"John"}]}`) - - stdout, _ = runDefraCommand(t, conf, []string{"client", "schema", "patch", - `[{ "op": "add", "path": "/Users/Schema/Fields/-", "value": {"Name": "verified", "Kind": "Boolean"} }]`, - }) - assertContainsSubstring(t, stdout, "success") - - stdout, _ = runDefraCommand(t, conf, []string{ - "client", "schema", "migration", "set", - "bafkreihn4qameldz3j7rfundmd4ldhxnaircuulk6h2vcwnpcgxl4oqffq", - "bafkreia56p6i6o3l4jijayiqd5eiijsypjjokbldaxnmqgeav6fe576hcy", - // Do not set lens parameters in order to generate error - fmt.Sprintf(`{"lenses": [{"path":"%s"}]}`, lenses.SetDefaultModulePath), - }) - assertContainsSubstring(t, stdout, "success") - - stdout, _ = runDefraCommand(t, conf, []string{"client", "query", "query { Users { name verified } }"}) - _ = stopDefra() - - // Error generated from within lens module lazily executing within the query - assertContainsSubstring(t, stdout, "Parameters have not been set.") -} diff --git a/tests/integration/cli/client_schema_patch_test.go b/tests/integration/cli/client_schema_patch_test.go deleted file mode 100644 index 487dc9eda5..0000000000 --- a/tests/integration/cli/client_schema_patch_test.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package clitest - -import ( - "testing" -) - -func TestClientSchemaPatch(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - defer stopDefra() - - fname := schemaFileFixture(t, "schema.graphql", ` - type User { - id: ID - name: String - }`) - stdout, _ := runDefraCommand(t, conf, []string{"client", "schema", "add", "-f", fname}) - assertContainsSubstring(t, stdout, "success") - - stdout, _ = runDefraCommand(t, conf, []string{"client", "schema", "patch", `[{ "op": "add", "path": "/User/Schema/Fields/-", "value": {"Name": "address", "Kind": "String"} }]`}) - assertContainsSubstring(t, stdout, "success") - - stdout, _ = runDefraCommand(t, conf, []string{"client", "query", `query IntrospectionQuery { __type (name: "User") { fields { name } }}`}) - assertContainsSubstring(t, stdout, "address") -} - -func TestClientSchemaPatch_InvalidJSONPatch(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stopDefra := runDefraNode(t, conf) - defer stopDefra() - - fname := schemaFileFixture(t, "schema.graphql", ` - type User { - id: ID - name: String - } - `) - stdout, _ := runDefraCommand(t, conf, []string{"client", "schema", "add", "-f", fname}) - assertContainsSubstring(t, stdout, "success") - - stdout, _ = runDefraCommand(t, conf, []string{"client", "schema", "patch", `[{ "op": "invalidOp" }]`}) - assertContainsSubstring(t, stdout, "Internal Server Error") -} diff --git a/tests/integration/cli/init_test.go b/tests/integration/cli/init_test.go deleted file mode 100644 index 7292d920c3..0000000000 --- a/tests/integration/cli/init_test.go +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package clitest - -import ( - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/sourcenetwork/defradb/config" -) - -// Executing init command creates valid config file. -func TestCLIInitCommand(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - _, stderr := runDefraCommand(t, conf, []string{"init", "--rootdir", conf.rootDir}) - cfgfilePath := filepath.Join(conf.rootDir, config.DefaultConfigFileName) - assertContainsSubstring(t, stderr, "Created config file at "+cfgfilePath) - if !assert.FileExists(t, cfgfilePath) { - t.Fatal("Config file not created") - } -} - -func TestCLIInitCommandTwiceErrors(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - cfgfilePath := filepath.Join(conf.rootDir, config.DefaultConfigFileName) - _, stderr := runDefraCommand(t, conf, []string{"init", "--rootdir", conf.rootDir}) - assertContainsSubstring(t, stderr, "Created config file at "+cfgfilePath) - _, stderr = runDefraCommand(t, conf, []string{"init", "--rootdir", conf.rootDir}) - assertContainsSubstring(t, stderr, "Configuration file already exists at "+cfgfilePath) -} - -// Executing init command twice, but second time reinitializing. -func TestInitCommandTwiceReinitalize(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - cfgfilePath := filepath.Join(conf.rootDir, config.DefaultConfigFileName) - _, stderr := runDefraCommand(t, conf, []string{"init", "--rootdir", conf.rootDir}) - assertContainsSubstring(t, stderr, "Created config file at "+cfgfilePath) - _, stderr = runDefraCommand(t, conf, []string{"init", "--rootdir", conf.rootDir, "--reinitialize"}) - assertContainsSubstring(t, stderr, "Deleted config file at "+cfgfilePath) - assertContainsSubstring(t, stderr, "Created config file at "+cfgfilePath) -} diff --git a/tests/integration/cli/log_config_test.go b/tests/integration/cli/log_config_test.go deleted file mode 100644 index 55d1b18154..0000000000 --- a/tests/integration/cli/log_config_test.go +++ /dev/null @@ -1,116 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package clitest - -import ( - "bufio" - "bytes" - "context" - "fmt" - "io" - "os" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/sourcenetwork/defradb/cli" - "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/logging" -) - -const ( - testLogger1 = "testLogger1" - testLogger2 = "testLogger2" - testLogger3 = "testLogger3" -) - -var ( - log1 = logging.MustNewLogger(testLogger1) - log2 = logging.MustNewLogger(testLogger2) - log3 = logging.MustNewLogger(testLogger3) -) - -func TestCLILogsToStderrGivenNamedLogLevel(t *testing.T) { - ctx := context.Background() - logLines := captureLogLines( - t, - func() { - // set the log levels - // general: error - // testLogger1: debug - // testLogger2: info - os.Args = append(os.Args, "--loglevel") - os.Args = append(os.Args, fmt.Sprintf("%s,%s=debug,%s=info", "error", testLogger1, testLogger2)) - }, - func() { - log1.Error(ctx, "error") - log1.Debug(ctx, "debug") - log2.Info(ctx, "info") - log3.Debug(ctx, "debug") // wont print, as logger3 will use global level defined above as 'error' - log3.Info(ctx, "info") // wont print, as logger3 will use global level defined above as 'error' - }, - ) - - assert.Len(t, logLines, 3) -} - -func captureLogLines(t *testing.T, setup func(), predicate func()) []string { - r, w, err := os.Pipe() - if err != nil { - t.Fatal(err) - } - stderr := os.Stderr - os.Stderr = w - defer func() { - os.Stderr = stderr - }() - - directory := t.TempDir() - - // Set the default logger output path to a file in the temp dir - // so that production logs don't polute and confuse the tests - // os.Args = append(os.Args, "--logoutput", directory+"/log.txt") - os.Args = append(os.Args, "init", "--rootdir", directory) - - setup() - cfg := config.DefaultConfig() - defraCmd := cli.NewDefraCommand(cfg) - if err := defraCmd.Execute(context.Background()); err != nil { - t.Fatal(err) - } - predicate() - log1.Flush() - log2.Flush() - log3.Flush() - - w.Close() - var buf bytes.Buffer - _, _ = io.Copy(&buf, r) - logLines, err := parseLines(&buf) - if err != nil { - t.Fatal(err) - } - - return logLines -} - -func parseLines(r io.Reader) ([]string, error) { - fileScanner := bufio.NewScanner(r) - - fileScanner.Split(bufio.ScanLines) - - logLines := []string{} - for fileScanner.Scan() { - logLines = append(logLines, fileScanner.Text()) - } - - return logLines, nil -} diff --git a/tests/integration/cli/root_test.go b/tests/integration/cli/root_test.go deleted file mode 100644 index 33df29fc4d..0000000000 --- a/tests/integration/cli/root_test.go +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package clitest - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestRootCommandEmptyRootDir(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stdout, _ := runDefraCommand(t, conf, []string{}) - assert.Contains(t, stdout, "Usage:") -} - -func TestRootCommandRootDirWithDefaultConfig(t *testing.T) { - conf := DefraNodeConfig{ - logPath: t.TempDir(), - } - stdout, _ := runDefraCommand(t, conf, []string{}) - assert.Contains(t, stdout, "Usage:") -} - -func TestRootCommandRootDirFromEnv(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stdout, _ := runDefraCommand(t, conf, []string{}) - assert.Contains(t, stdout, "Usage:") -} - -func TestRootCommandRootWithNonexistentFlag(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stdout, _ := runDefraCommand(t, conf, []string{"--foo"}) - assert.Contains(t, stdout, "Usage:") -} diff --git a/tests/integration/cli/serverdump_test.go b/tests/integration/cli/serverdump_test.go deleted file mode 100644 index ed8fcd4d9f..0000000000 --- a/tests/integration/cli/serverdump_test.go +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package clitest - -import ( - "testing" -) - -func TestServerDumpMemoryErrs(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - _, stderr := runDefraCommand(t, conf, []string{"server-dump", "--store", "memory"}) - assertContainsSubstring(t, stderr, "server-side dump is only supported for the Badger datastore") -} - -func TestServerDumpInvalidStoreErrs(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - _, stderr := runDefraCommand(t, conf, []string{"server-dump", "--store", "invalid"}) - // assertContainsSubstring(t, stderr, "invalid datastore type") - assertContainsSubstring(t, stderr, "server-side dump is only supported for the Badger datastore") -} diff --git a/tests/integration/cli/start_test.go b/tests/integration/cli/start_test.go deleted file mode 100644 index 1a6267f190..0000000000 --- a/tests/integration/cli/start_test.go +++ /dev/null @@ -1,90 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package clitest - -import ( - "fmt" - "testing" -) - -func TestStartCommandBasic(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - _, stderr := runDefraCommand(t, conf, []string{ - "start", - "--url", conf.APIURL, - "--tcpaddr", conf.GRPCAddr, - }) - assertContainsSubstring(t, stderr, "Starting DefraDB service...") - assertNotContainsSubstring(t, stderr, "Error") -} - -func TestStartCommandWithTLSIncomplete(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - _, stderr := runDefraCommand(t, conf, []string{ - "start", - "--tls", - "--url", conf.APIURL, - "--tcpaddr", conf.GRPCAddr, - }) - assertContainsSubstring(t, stderr, "Starting DefraDB service...") - assertContainsSubstring(t, stderr, "Error") -} - -func TestStartCommandWithStoreMemory(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - _, stderr := runDefraCommand(t, conf, []string{ - "start", "--store", "memory", - "--url", conf.APIURL, - "--tcpaddr", conf.GRPCAddr, - }) - assertContainsSubstring(t, stderr, "Starting DefraDB service...") - assertContainsSubstring(t, stderr, "Building new memory store") - assertNotContainsSubstring(t, stderr, "Error") -} - -func TestStartCommandWithP2PAddr(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - p2pport, err := findFreePortInRange(t, 49152, 65535) - if err != nil { - t.Fatal(err) - } - addr := fmt.Sprintf("/ip4/0.0.0.0/tcp/%d", p2pport) - _, stderr := runDefraCommand(t, conf, []string{ - "start", - "--p2paddr", addr, - "--url", conf.APIURL, - "--tcpaddr", conf.GRPCAddr, - }) - assertContainsSubstring(t, stderr, "Starting DefraDB service...") - logstring := fmt.Sprintf("Starting P2P node, {\"P2P address\": \"%s\"}", addr) - assertContainsSubstring(t, stderr, logstring) - assertNotContainsSubstring(t, stderr, "Error") -} - -func TestStartCommandWithNoP2P(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - _, stderr := runDefraCommand(t, conf, []string{ - "start", - "--no-p2p", - }) - assertContainsSubstring(t, stderr, "Starting DefraDB service...") - assertNotContainsSubstring(t, stderr, "Starting P2P node") - assertNotContainsSubstring(t, stderr, "Error") -} - -func TestStartCommandWithInvalidStoreType(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - _, stderr := runDefraCommand(t, conf, []string{ - "start", - "--store", "invalid", - }) - assertContainsSubstring(t, stderr, "failed to load config: failed to validate config: invalid store type") -} diff --git a/tests/integration/cli/utils.go b/tests/integration/cli/utils.go deleted file mode 100644 index c94ce222dc..0000000000 --- a/tests/integration/cli/utils.go +++ /dev/null @@ -1,263 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -/* -Package clitest provides a testing framework for the Defra CLI, along with CLI integration tests. -*/ -package clitest - -import ( - "bufio" - "bytes" - "context" - "errors" - "fmt" - "io" - "math/rand" - "net" - "os" - "path/filepath" - "strings" - "sync" - "testing" - "time" - - "github.com/stretchr/testify/assert" - - "github.com/sourcenetwork/defradb/cli" - "github.com/sourcenetwork/defradb/config" -) - -const COMMAND_TIMEOUT_SECONDS = 2 * time.Second -const SUBCOMMAND_TIME_BUFFER_SECONDS = 200 * time.Millisecond - -type DefraNodeConfig struct { - rootDir string - logPath string - APIURL string - GRPCAddr string -} - -func NewDefraNodeDefaultConfig(t *testing.T) DefraNodeConfig { - t.Helper() - portAPI, err := findFreePortInRange(t, 49152, 65535) - if err != nil { - t.Fatal(err) - } - portGRPC, err := findFreePortInRange(t, 49152, 65535) - if err != nil { - t.Fatal(err) - } - - return DefraNodeConfig{ - rootDir: t.TempDir(), - logPath: "", - APIURL: fmt.Sprintf("localhost:%d", portAPI), - GRPCAddr: fmt.Sprintf("/ip4/0.0.0.0/tcp/%d", portGRPC), - } -} - -// runDefraNode runs a defra node in a separate goroutine and returns a stopping function -// which also returns the node's execution log lines. -func runDefraNode(t *testing.T, conf DefraNodeConfig) func() []string { - t.Helper() - - if conf.logPath == "" { - conf.logPath = filepath.Join(t.TempDir(), "defra.log") - } - - var args []string - if conf.rootDir != "" { - args = append(args, "--rootdir", conf.rootDir) - } - if conf.APIURL != "" { - args = append(args, "--url", conf.APIURL) - } - if conf.GRPCAddr != "" { - args = append(args, "--tcpaddr", conf.GRPCAddr) - } - args = append(args, "--logoutput", conf.logPath) - - cfg := config.DefaultConfig() - ctx, cancel := context.WithCancel(context.Background()) - ready := make(chan struct{}) - go func(ready chan struct{}) { - defraCmd := cli.NewDefraCommand(cfg) - defraCmd.RootCmd.SetArgs( - append([]string{"start"}, args...), - ) - ready <- struct{}{} - err := defraCmd.Execute(ctx) - assert.NoError(t, err) - }(ready) - <-ready - time.Sleep(SUBCOMMAND_TIME_BUFFER_SECONDS) - cancelAndOutput := func() []string { - cancel() - time.Sleep(SUBCOMMAND_TIME_BUFFER_SECONDS) - lines, err := readLoglines(t, conf.logPath) - assert.NoError(t, err) - return lines - } - return cancelAndOutput -} - -// Runs a defra command and returns the stdout and stderr output. -func runDefraCommand(t *testing.T, conf DefraNodeConfig, args []string) (stdout, stderr []string) { - t.Helper() - cfg := config.DefaultConfig() - args = append([]string{ - "--url", conf.APIURL, - }, args...) - if !contains(args, "--rootdir") { - args = append(args, "--rootdir", t.TempDir()) - } - - ctx, cancel := context.WithTimeout(context.Background(), COMMAND_TIMEOUT_SECONDS) - defer cancel() - - stdout, stderr = captureOutput(func() { - defraCmd := cli.NewDefraCommand(cfg) - t.Log("executing defra command with args", args) - defraCmd.RootCmd.SetArgs(args) - _ = defraCmd.Execute(ctx) - }) - return stdout, stderr -} - -func contains(args []string, arg string) bool { - for _, a := range args { - if a == arg { - return true - } - } - return false -} - -func readLoglines(t *testing.T, fpath string) ([]string, error) { - f, err := os.Open(fpath) - if err != nil { - return nil, err - } - defer f.Close() //nolint:errcheck - scanner := bufio.NewScanner(f) - lines := make([]string, 0) - for scanner.Scan() { - lines = append(lines, scanner.Text()) - } - err = scanner.Err() - assert.NoError(t, err) - return lines, nil -} - -func captureOutput(f func()) (stdout, stderr []string) { - oldStdout := os.Stdout - oldStderr := os.Stderr - rStdout, wStdout, err := os.Pipe() - if err != nil { - panic(err) - } - rStderr, wStderr, err := os.Pipe() - if err != nil { - panic(err) - } - os.Stdout = wStdout - os.Stderr = wStderr - - f() - - if err := wStdout.Close(); err != nil { - panic(err) - } - if err := wStderr.Close(); err != nil { - panic(err) - } - - os.Stdout = oldStdout - os.Stderr = oldStderr - - var stdoutBuf, stderrBuf bytes.Buffer - if _, err := io.Copy(&stdoutBuf, rStdout); err != nil { - panic(err) - } - if _, err := io.Copy(&stderrBuf, rStderr); err != nil { - panic(err) - } - - stdout = strings.Split(strings.TrimSuffix(stdoutBuf.String(), "\n"), "\n") - stderr = strings.Split(strings.TrimSuffix(stderrBuf.String(), "\n"), "\n") - - return -} - -var portsInUse = make(map[int]struct{}) -var portMutex = sync.Mutex{} - -// findFreePortInRange returns a free port in the range [minPort, maxPort]. -// The range of ports that are unfrequently used is [49152, 65535]. -func findFreePortInRange(t *testing.T, minPort, maxPort int) (int, error) { - if minPort < 1 || maxPort > 65535 || minPort > maxPort { - return 0, errors.New("invalid port range") - } - - const maxAttempts = 100 - for i := 0; i < maxAttempts; i++ { - port := rand.Intn(maxPort-minPort+1) + minPort - if _, ok := portsInUse[port]; ok { - continue - } - addr := fmt.Sprintf("127.0.0.1:%d", port) - listener, err := net.Listen("tcp", addr) - if err == nil { - portMutex.Lock() - portsInUse[port] = struct{}{} - portMutex.Unlock() - t.Cleanup(func() { - portMutex.Lock() - delete(portsInUse, port) - portMutex.Unlock() - }) - _ = listener.Close() - return port, nil - } - } - - return 0, errors.New("unable to find a free port") -} - -func assertContainsSubstring(t *testing.T, haystack []string, substring string) { - t.Helper() - if !containsSubstring(haystack, substring) { - t.Fatalf("expected %q to contain %q", haystack, substring) - } -} - -func assertNotContainsSubstring(t *testing.T, haystack []string, substring string) { - t.Helper() - if containsSubstring(haystack, substring) { - t.Fatalf("expected %q to not contain %q", haystack, substring) - } -} - -func containsSubstring(haystack []string, substring string) bool { - for _, s := range haystack { - if strings.Contains(s, substring) { - return true - } - } - return false -} - -func schemaFileFixture(t *testing.T, fname string, schema string) string { - absFname := filepath.Join(t.TempDir(), fname) - err := os.WriteFile(absFname, []byte(schema), 0644) - assert.NoError(t, err) - return absFname -} diff --git a/tests/integration/cli/version_test.go b/tests/integration/cli/version_test.go deleted file mode 100644 index bc9c2a7e25..0000000000 --- a/tests/integration/cli/version_test.go +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package clitest - -import ( - "encoding/json" - "strings" - "testing" - - "github.com/stretchr/testify/assert" -) - -// note: this assumes the version information *without* build-time info integrated. -func TestExecVersion(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stdout, stderr := runDefraCommand(t, conf, []string{"version"}) - for _, line := range stderr { - assert.NotContains(t, line, "ERROR") - } - output := strings.Join(stdout, " ") - assert.Contains(t, output, "defradb") - assert.Contains(t, output, "built with Go") -} - -func TestExecVersionJSON(t *testing.T) { - conf := NewDefraNodeDefaultConfig(t) - stdout, stderr := runDefraCommand(t, conf, []string{"version", "--format", "json"}) - for _, line := range stderr { - assert.NotContains(t, line, "ERROR") - } - output := strings.Join(stdout, " ") - assert.Contains(t, output, "go\":") - assert.Contains(t, output, "commit\":") - assert.Contains(t, output, "commitdate\":") - var data map[string]any - err := json.Unmarshal([]byte(output), &data) - assert.NoError(t, err) -} diff --git a/version/version.go b/version/version.go index a6fe7ea548..67538d302b 100644 --- a/version/version.go +++ b/version/version.go @@ -17,9 +17,9 @@ import ( "fmt" "strings" - "github.com/sourcenetwork/defradb/api/http" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core/net" + "github.com/sourcenetwork/defradb/http" ) const commitHashMaxLength = 8 From 8fcd3e4ea3c0b7dcee7ea7ace4aa53baac193b39 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 28 Aug 2023 18:19:01 -0700 Subject: [PATCH 041/107] cleanup --- cli/backup_export.go | 7 ++++- cli/backup_import.go | 8 ++++-- cli/cli.go | 56 +++++++++++++++++++----------------- cli/dump.go | 8 ++++-- cli/index_create.go | 11 ++++--- cli/index_drop.go | 8 ++++-- cli/index_list.go | 15 ++++++---- cli/p2p.go | 4 +-- cli/p2p_collection_add.go | 8 ++++-- cli/p2p_collection_getall.go | 12 ++++---- cli/p2p_collection_remove.go | 8 ++++-- cli/p2p_replicator_delete.go | 7 ++++- cli/p2p_replicator_getall.go | 12 ++++---- cli/p2p_replicator_set.go | 7 ++++- cli/request.go | 15 ++++++---- cli/schema_add.go | 12 +++++--- cli/schema_migration_get.go | 14 +++++---- cli/schema_migration_set.go | 10 +++++-- cli/schema_patch.go | 11 +++++-- cli/server_dump.go | 8 ++++-- cmd/defradb/main.go | 5 +--- cmd/genclidocs/genclidocs.go | 11 ++++--- cmd/genmanpages/main.go | 9 +++--- go.mod | 3 +- go.sum | 2 -- 25 files changed, 166 insertions(+), 105 deletions(-) diff --git a/cli/backup_export.go b/cli/backup_export.go index 3c1a5c78ad..127e41ae56 100644 --- a/cli/backup_export.go +++ b/cli/backup_export.go @@ -17,11 +17,12 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/http" ) const jsonFileType = "json" -func MakeBackupExportCommand(cfg *config.Config, db client.DB) *cobra.Command { +func MakeBackupExportCommand(cfg *config.Config) *cobra.Command { var collections []string var pretty bool var format string @@ -44,6 +45,10 @@ Example: export data for the 'Users' collection: return nil }, RunE: func(cmd *cobra.Command, args []string) (err error) { + db, err := http.NewClient("http://" + cfg.API.Address) + if err != nil { + return err + } if !isValidExportFormat(format) { return ErrInvalidExportFormat } diff --git a/cli/backup_import.go b/cli/backup_import.go index 66023b9317..6d1fecca56 100644 --- a/cli/backup_import.go +++ b/cli/backup_import.go @@ -13,11 +13,11 @@ package cli import ( "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/http" ) -func MakeBackupImportCommand(cfg *config.Config, db client.DB) *cobra.Command { +func MakeBackupImportCommand(cfg *config.Config) *cobra.Command { var cmd = &cobra.Command{ Use: "import ", Short: "Import a JSON data file to the database", @@ -32,6 +32,10 @@ Example: import data to the database: return nil }, RunE: func(cmd *cobra.Command, args []string) (err error) { + db, err := http.NewClient("http://" + cfg.API.Address) + if err != nil { + return err + } return db.BasicImport(cmd.Context(), args[0]) }, } diff --git a/cli/cli.go b/cli/cli.go index 5785e61b48..2382cf1279 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -14,24 +14,20 @@ Package cli provides the command-line interface. package cli import ( + "encoding/json" + "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/http" "github.com/sourcenetwork/defradb/logging" ) var log = logging.MustNewLogger("cli") // NewDefraCommand returns the root command instanciated with its tree of subcommands. -func NewDefraCommand(cfg *config.Config) (*cobra.Command, error) { - db, err := http.NewClient("http://" + cfg.API.Address) - if err != nil { - return nil, err - } - +func NewDefraCommand(cfg *config.Config) *cobra.Command { rootCmd := MakeRootCommand(cfg) - p2pCmd := MakeP2PCommand(cfg) + p2pCmd := MakeP2PCommand() schemaCmd := MakeSchemaCommand() schemaMigrationCmd := MakeSchemaMigrationCommand() indexCmd := MakeIndexCommand() @@ -40,40 +36,40 @@ func NewDefraCommand(cfg *config.Config) (*cobra.Command, error) { p2pReplicatorCmd := MakeP2PReplicatorCommand() p2pCollectionCmd := MakeP2PCollectionCommand() p2pCollectionCmd.AddCommand( - MakeP2PCollectionAddCommand(cfg, db), - MakeP2PCollectionRemoveCommand(cfg, db), - MakeP2PCollectionGetallCommand(cfg, db), + MakeP2PCollectionAddCommand(cfg), + MakeP2PCollectionRemoveCommand(cfg), + MakeP2PCollectionGetallCommand(cfg), ) p2pReplicatorCmd.AddCommand( - MakeP2PReplicatorGetallCommand(cfg, db), - MakeP2PReplicatorSetCommand(cfg, db), - MakeP2PReplicatorDeleteCommand(cfg, db), + MakeP2PReplicatorGetallCommand(cfg), + MakeP2PReplicatorSetCommand(cfg), + MakeP2PReplicatorDeleteCommand(cfg), ) p2pCmd.AddCommand( p2pReplicatorCmd, p2pCollectionCmd, ) schemaMigrationCmd.AddCommand( - MakeSchemaMigrationSetCommand(cfg, db), - MakeSchemaMigrationGetCommand(cfg, db), + MakeSchemaMigrationSetCommand(cfg), + MakeSchemaMigrationGetCommand(cfg), ) schemaCmd.AddCommand( - MakeSchemaAddCommand(cfg, db), - MakeSchemaPatchCommand(cfg, db), + MakeSchemaAddCommand(cfg), + MakeSchemaPatchCommand(cfg), schemaMigrationCmd, ) indexCmd.AddCommand( - MakeIndexCreateCommand(cfg, db), - MakeIndexDropCommand(cfg, db), - MakeIndexListCommand(cfg, db), + MakeIndexCreateCommand(cfg), + MakeIndexDropCommand(cfg), + MakeIndexListCommand(cfg), ) backupCmd.AddCommand( - MakeBackupExportCommand(cfg, db), - MakeBackupImportCommand(cfg, db), + MakeBackupExportCommand(cfg), + MakeBackupImportCommand(cfg), ) clientCmd.AddCommand( - MakeDumpCommand(cfg, db), - MakeRequestCommand(cfg, db), + MakeDumpCommand(cfg), + MakeRequestCommand(cfg), schemaCmd, indexCmd, p2pCmd, @@ -82,10 +78,16 @@ func NewDefraCommand(cfg *config.Config) (*cobra.Command, error) { rootCmd.AddCommand( clientCmd, MakeStartCommand(cfg), - MakeServerDumpCmd(cfg, db), + MakeServerDumpCmd(cfg), MakeVersionCommand(), MakeInitCommand(cfg), ) - return rootCmd, nil + return rootCmd +} + +func writeJSON(cmd *cobra.Command, out any) error { + enc := json.NewEncoder(cmd.OutOrStdout()) + enc.SetIndent("", " ") + return enc.Encode(out) } diff --git a/cli/dump.go b/cli/dump.go index c1b6433ef2..514abec36d 100644 --- a/cli/dump.go +++ b/cli/dump.go @@ -13,15 +13,19 @@ package cli import ( "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/http" ) -func MakeDumpCommand(cfg *config.Config, db client.DB) *cobra.Command { +func MakeDumpCommand(cfg *config.Config) *cobra.Command { var cmd = &cobra.Command{ Use: "dump", Short: "Dump the contents of DefraDB node-side", RunE: func(cmd *cobra.Command, _ []string) (err error) { + db, err := http.NewClient("http://" + cfg.API.Address) + if err != nil { + return err + } return db.PrintDump(cmd.Context()) }, } diff --git a/cli/index_create.go b/cli/index_create.go index aa024c128d..644bb21f73 100644 --- a/cli/index_create.go +++ b/cli/index_create.go @@ -11,15 +11,14 @@ package cli import ( - "encoding/json" - "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/http" ) -func MakeIndexCreateCommand(cfg *config.Config, db client.DB) *cobra.Command { +func MakeIndexCreateCommand(cfg *config.Config) *cobra.Command { var collectionArg string var nameArg string var fieldsArg []string @@ -37,6 +36,10 @@ Example: create a named index for 'Users' collection on 'name' field: defradb client index create --collection Users --fields name --name UsersByName`, ValidArgs: []string{"collection", "fields", "name"}, RunE: func(cmd *cobra.Command, args []string) error { + db, err := http.NewClient("http://" + cfg.API.Address) + if err != nil { + return err + } var fields []client.IndexedFieldDescription for _, name := range fieldsArg { fields = append(fields, client.IndexedFieldDescription{Name: name}) @@ -53,7 +56,7 @@ Example: create a named index for 'Users' collection on 'name' field: if err != nil { return err } - return json.NewEncoder(cmd.OutOrStdout()).Encode(desc) + return writeJSON(cmd, desc) }, } cmd.Flags().StringVarP(&collectionArg, "collection", "c", "", "Collection name") diff --git a/cli/index_drop.go b/cli/index_drop.go index c5e4c80d87..176e2b64cd 100644 --- a/cli/index_drop.go +++ b/cli/index_drop.go @@ -13,11 +13,11 @@ package cli import ( "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/http" ) -func MakeIndexDropCommand(cfg *config.Config, db client.DB) *cobra.Command { +func MakeIndexDropCommand(cfg *config.Config) *cobra.Command { var collectionArg string var nameArg string var cmd = &cobra.Command{ @@ -29,6 +29,10 @@ Example: drop the index 'UsersByName' for 'Users' collection: defradb client index create --collection Users --name UsersByName`, ValidArgs: []string{"collection", "name"}, RunE: func(cmd *cobra.Command, args []string) error { + db, err := http.NewClient("http://" + cfg.API.Address) + if err != nil { + return err + } col, err := db.GetCollectionByName(cmd.Context(), collectionArg) if err != nil { return err diff --git a/cli/index_list.go b/cli/index_list.go index a7608f31f5..342118fb72 100644 --- a/cli/index_list.go +++ b/cli/index_list.go @@ -11,15 +11,13 @@ package cli import ( - "encoding/json" - "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/http" ) -func MakeIndexListCommand(cfg *config.Config, db client.DB) *cobra.Command { +func MakeIndexListCommand(cfg *config.Config) *cobra.Command { var collectionArg string var cmd = &cobra.Command{ Use: "list [-c --collection ]", @@ -33,6 +31,11 @@ Example: show all index for 'Users' collection: defradb client index list --collection Users`, ValidArgs: []string{"collection"}, RunE: func(cmd *cobra.Command, args []string) error { + db, err := http.NewClient("http://" + cfg.API.Address) + if err != nil { + return err + } + switch { case collectionArg != "": col, err := db.GetCollectionByName(cmd.Context(), collectionArg) @@ -43,13 +46,13 @@ Example: show all index for 'Users' collection: if err != nil { return err } - return json.NewEncoder(cmd.OutOrStdout()).Encode(cols) + return writeJSON(cmd, cols) default: cols, err := db.GetAllIndexes(cmd.Context()) if err != nil { return err } - return json.NewEncoder(cmd.OutOrStdout()).Encode(cols) + return writeJSON(cmd, cols) } }, } diff --git a/cli/p2p.go b/cli/p2p.go index 9cb772c5f7..ee084cc67b 100644 --- a/cli/p2p.go +++ b/cli/p2p.go @@ -12,11 +12,9 @@ package cli import ( "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/config" ) -func MakeP2PCommand(cfg *config.Config) *cobra.Command { +func MakeP2PCommand() *cobra.Command { var cmd = &cobra.Command{ Use: "p2p", Short: "Interact with the DefraDB P2P system", diff --git a/cli/p2p_collection_add.go b/cli/p2p_collection_add.go index fb503bc39b..0bf06d3b1f 100644 --- a/cli/p2p_collection_add.go +++ b/cli/p2p_collection_add.go @@ -13,12 +13,12 @@ package cli import ( "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" "github.com/sourcenetwork/defradb/errors" + "github.com/sourcenetwork/defradb/http" ) -func MakeP2PCollectionAddCommand(cfg *config.Config, db client.DB) *cobra.Command { +func MakeP2PCollectionAddCommand(cfg *config.Config) *cobra.Command { var cmd = &cobra.Command{ Use: "add [collectionID]", Short: "Add P2P collections", @@ -31,6 +31,10 @@ The collections are synchronized between nodes of a pubsub network.`, return nil }, RunE: func(cmd *cobra.Command, args []string) error { + db, err := http.NewClient("http://" + cfg.API.Address) + if err != nil { + return err + } return db.AddP2PCollection(cmd.Context(), args[0]) }, } diff --git a/cli/p2p_collection_getall.go b/cli/p2p_collection_getall.go index 9c82b863a2..1b472b50b2 100644 --- a/cli/p2p_collection_getall.go +++ b/cli/p2p_collection_getall.go @@ -11,16 +11,14 @@ package cli import ( - "encoding/json" - "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" "github.com/sourcenetwork/defradb/errors" + "github.com/sourcenetwork/defradb/http" ) -func MakeP2PCollectionGetallCommand(cfg *config.Config, db client.DB) *cobra.Command { +func MakeP2PCollectionGetallCommand(cfg *config.Config) *cobra.Command { var cmd = &cobra.Command{ Use: "getall", Short: "Get all P2P collections", @@ -33,11 +31,15 @@ This is the list of collections of the node that are synchronized on the pubsub return nil }, RunE: func(cmd *cobra.Command, args []string) error { + db, err := http.NewClient("http://" + cfg.API.Address) + if err != nil { + return err + } cols, err := db.GetAllP2PCollections(cmd.Context()) if err != nil { return err } - return json.NewEncoder(cmd.OutOrStdout()).Encode(cols) + return writeJSON(cmd, cols) }, } return cmd diff --git a/cli/p2p_collection_remove.go b/cli/p2p_collection_remove.go index de8f4993e8..897bfd5c63 100644 --- a/cli/p2p_collection_remove.go +++ b/cli/p2p_collection_remove.go @@ -13,12 +13,12 @@ package cli import ( "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" "github.com/sourcenetwork/defradb/errors" + "github.com/sourcenetwork/defradb/http" ) -func MakeP2PCollectionRemoveCommand(cfg *config.Config, db client.DB) *cobra.Command { +func MakeP2PCollectionRemoveCommand(cfg *config.Config) *cobra.Command { var cmd = &cobra.Command{ Use: "remove [collectionID]", Short: "Remove P2P collections", @@ -31,6 +31,10 @@ The removed collections will no longer be synchronized between nodes.`, return nil }, RunE: func(cmd *cobra.Command, args []string) error { + db, err := http.NewClient("http://" + cfg.API.Address) + if err != nil { + return err + } return db.RemoveP2PCollection(cmd.Context(), args[0]) }, } diff --git a/cli/p2p_replicator_delete.go b/cli/p2p_replicator_delete.go index fb5a6d200d..7cbd8d7617 100644 --- a/cli/p2p_replicator_delete.go +++ b/cli/p2p_replicator_delete.go @@ -17,9 +17,10 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" "github.com/sourcenetwork/defradb/errors" + "github.com/sourcenetwork/defradb/http" ) -func MakeP2PReplicatorDeleteCommand(cfg *config.Config, db client.DB) *cobra.Command { +func MakeP2PReplicatorDeleteCommand(cfg *config.Config) *cobra.Command { var cmd = &cobra.Command{ Use: "delete ", Short: "Delete a replicator. It will stop synchronizing", @@ -31,6 +32,10 @@ func MakeP2PReplicatorDeleteCommand(cfg *config.Config, db client.DB) *cobra.Com return nil }, RunE: func(cmd *cobra.Command, args []string) error { + db, err := http.NewClient("http://" + cfg.API.Address) + if err != nil { + return err + } addr, err := peer.AddrInfoFromString(args[0]) if err != nil { return err diff --git a/cli/p2p_replicator_getall.go b/cli/p2p_replicator_getall.go index 24cc40dbfa..9e236abbe4 100644 --- a/cli/p2p_replicator_getall.go +++ b/cli/p2p_replicator_getall.go @@ -11,26 +11,28 @@ package cli import ( - "encoding/json" - "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/http" ) -func MakeP2PReplicatorGetallCommand(cfg *config.Config, db client.DB) *cobra.Command { +func MakeP2PReplicatorGetallCommand(cfg *config.Config) *cobra.Command { var cmd = &cobra.Command{ Use: "getall", Short: "Get all replicators", Long: `Get all the replicators active in the P2P data sync system. These are the replicators that are currently replicating data from one node to another.`, RunE: func(cmd *cobra.Command, args []string) error { + db, err := http.NewClient("http://" + cfg.API.Address) + if err != nil { + return err + } reps, err := db.GetAllReplicators(cmd.Context()) if err != nil { return err } - return json.NewEncoder(cmd.OutOrStdout()).Encode(reps) + return writeJSON(cmd, reps) }, } return cmd diff --git a/cli/p2p_replicator_set.go b/cli/p2p_replicator_set.go index 514e2decf6..36bba3c685 100644 --- a/cli/p2p_replicator_set.go +++ b/cli/p2p_replicator_set.go @@ -17,9 +17,10 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" "github.com/sourcenetwork/defradb/errors" + "github.com/sourcenetwork/defradb/http" ) -func MakeP2PReplicatorSetCommand(cfg *config.Config, db client.DB) *cobra.Command { +func MakeP2PReplicatorSetCommand(cfg *config.Config) *cobra.Command { var collections []string var cmd = &cobra.Command{ Use: "set [-c, --collection] ", @@ -34,6 +35,10 @@ A replicator replicates one or all collection(s) from this node to another. return nil }, RunE: func(cmd *cobra.Command, args []string) error { + db, err := http.NewClient("http://" + cfg.API.Address) + if err != nil { + return err + } addr, err := peer.AddrInfoFromString(args[0]) if err != nil { return err diff --git a/cli/request.go b/cli/request.go index 0d211b8af2..0f511a7940 100644 --- a/cli/request.go +++ b/cli/request.go @@ -11,18 +11,17 @@ package cli import ( - "encoding/json" "io" "os" "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" "github.com/sourcenetwork/defradb/errors" + "github.com/sourcenetwork/defradb/http" ) -func MakeRequestCommand(cfg *config.Config, db client.DB) *cobra.Command { +func MakeRequestCommand(cfg *config.Config) *cobra.Command { var filePath string var cmd = &cobra.Command{ Use: "query [query request]", @@ -43,6 +42,11 @@ with the database more conveniently. To learn more about the DefraDB GraphQL Query Language, refer to https://docs.source.network.`, RunE: func(cmd *cobra.Command, args []string) error { + db, err := http.NewClient("http://" + cfg.API.Address) + if err != nil { + return err + } + var request string switch { case filePath != "": @@ -66,11 +70,10 @@ To learn more about the DefraDB GraphQL Query Language, refer to https://docs.so } result := db.ExecRequest(cmd.Context(), request) if result.Pub == nil { - return json.NewEncoder(cmd.OutOrStdout()).Encode(result.GQL) + return writeJSON(cmd, result.GQL) } - enc := json.NewEncoder(cmd.OutOrStdout()) for item := range result.Pub.Stream() { - enc.Encode(item) //nolint:errcheck + writeJSON(cmd, item) //nolint:errcheck } return nil }, diff --git a/cli/schema_add.go b/cli/schema_add.go index d13b5b3224..81a06d1cf7 100644 --- a/cli/schema_add.go +++ b/cli/schema_add.go @@ -11,18 +11,17 @@ package cli import ( - "encoding/json" "fmt" "io" "os" "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/http" ) -func MakeSchemaAddCommand(cfg *config.Config, db client.DB) *cobra.Command { +func MakeSchemaAddCommand(cfg *config.Config) *cobra.Command { var schemaFile string var cmd = &cobra.Command{ Use: "add [schema]", @@ -40,6 +39,11 @@ Example: add from stdin: Learn more about the DefraDB GraphQL Schema Language on https://docs.source.network.`, RunE: func(cmd *cobra.Command, args []string) error { + db, err := http.NewClient("http://" + cfg.API.Address) + if err != nil { + return err + } + var schema string switch { case schemaFile != "": @@ -64,7 +68,7 @@ Learn more about the DefraDB GraphQL Schema Language on https://docs.source.netw if err != nil { return err } - return json.NewEncoder(cmd.OutOrStdout()).Encode(cols) + return writeJSON(cmd, cols) }, } cmd.Flags().StringVarP(&schemaFile, "file", "f", "", "File to load a schema from") diff --git a/cli/schema_migration_get.go b/cli/schema_migration_get.go index 17b0a6bd6a..86017432d0 100644 --- a/cli/schema_migration_get.go +++ b/cli/schema_migration_get.go @@ -11,15 +11,13 @@ package cli import ( - "encoding/json" - "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/http" ) -func MakeSchemaMigrationGetCommand(cfg *config.Config, db client.DB) *cobra.Command { +func MakeSchemaMigrationGetCommand(cfg *config.Config) *cobra.Command { var cmd = &cobra.Command{ Use: "get", Short: "Gets the schema migrations within DefraDB", @@ -29,12 +27,16 @@ Example: defradb client schema migration get' Learn more about the DefraDB GraphQL Schema Language on https://docs.source.network.`, - RunE: func(cmd *cobra.Command, args []string) (err error) { + RunE: func(cmd *cobra.Command, args []string) error { + db, err := http.NewClient("http://" + cfg.API.Address) + if err != nil { + return err + } cfgs, err := db.LensRegistry().Config(cmd.Context()) if err != nil { return err } - return json.NewEncoder(cmd.OutOrStdout()).Encode(cfgs) + return writeJSON(cmd, cfgs) }, } return cmd diff --git a/cli/schema_migration_set.go b/cli/schema_migration_set.go index e1db8afff5..3c6fb045a6 100644 --- a/cli/schema_migration_set.go +++ b/cli/schema_migration_set.go @@ -23,9 +23,10 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" "github.com/sourcenetwork/defradb/errors" + "github.com/sourcenetwork/defradb/http" ) -func MakeSchemaMigrationSetCommand(cfg *config.Config, db client.DB) *cobra.Command { +func MakeSchemaMigrationSetCommand(cfg *config.Config) *cobra.Command { var lensFile string var cmd = &cobra.Command{ Use: "set [src] [dst] [cfg]", @@ -43,7 +44,12 @@ Example: add from stdin: Learn more about the DefraDB GraphQL Schema Language on https://docs.source.network.`, Args: cobra.RangeArgs(2, 3), - RunE: func(cmd *cobra.Command, args []string) (err error) { + RunE: func(cmd *cobra.Command, args []string) error { + db, err := http.NewClient("http://" + cfg.API.Address) + if err != nil { + return err + } + var lensCfgJson string switch { case lensFile != "": diff --git a/cli/schema_patch.go b/cli/schema_patch.go index f9cf55f713..8e22696037 100644 --- a/cli/schema_patch.go +++ b/cli/schema_patch.go @@ -17,11 +17,11 @@ import ( "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/http" ) -func MakeSchemaPatchCommand(cfg *config.Config, db client.DB) *cobra.Command { +func MakeSchemaPatchCommand(cfg *config.Config) *cobra.Command { var patchFile string var cmd = &cobra.Command{ @@ -41,7 +41,12 @@ Example: patch from stdin: cat patch.json | defradb client schema patch - To learn more about the DefraDB GraphQL Schema Language, refer to https://docs.source.network.`, - RunE: func(cmd *cobra.Command, args []string) (err error) { + RunE: func(cmd *cobra.Command, args []string) error { + db, err := http.NewClient("http://" + cfg.API.Address) + if err != nil { + return err + } + var patch string switch { case patchFile != "": diff --git a/cli/server_dump.go b/cli/server_dump.go index 3812005b23..cfb2ddfc6c 100644 --- a/cli/server_dump.go +++ b/cli/server_dump.go @@ -13,15 +13,19 @@ package cli import ( "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/http" ) -func MakeServerDumpCmd(cfg *config.Config, db client.DB) *cobra.Command { +func MakeServerDumpCmd(cfg *config.Config) *cobra.Command { cmd := &cobra.Command{ Use: "server-dump", Short: "Dumps the state of the entire database", RunE: func(cmd *cobra.Command, _ []string) error { + db, err := http.NewClient("http://" + cfg.API.Address) + if err != nil { + return err + } return db.PrintDump(cmd.Context()) }, } diff --git a/cmd/defradb/main.go b/cmd/defradb/main.go index 4270324de2..11a7f75947 100644 --- a/cmd/defradb/main.go +++ b/cmd/defradb/main.go @@ -18,9 +18,6 @@ import ( // Execute adds all child commands to the root command and sets flags appropriately. func main() { - defraCmd, err := cli.NewDefraCommand(config.DefaultConfig()) - if err != nil { - panic(err) - } + defraCmd := cli.NewDefraCommand(config.DefaultConfig()) defraCmd.Execute() //nolint:errcheck } diff --git a/cmd/genclidocs/genclidocs.go b/cmd/genclidocs/genclidocs.go index 8ab569d92a..f556c26d20 100644 --- a/cmd/genclidocs/genclidocs.go +++ b/cmd/genclidocs/genclidocs.go @@ -33,15 +33,14 @@ func init() { func main() { flag.Parse() + defraCmd := cli.NewDefraCommand(config.DefaultConfig()) + defraCmd.DisableAutoGenTag = true + if err := os.MkdirAll(path, os.ModePerm); err != nil { log.Fatal("Creating the filesystem path failed", err) } - defraCmd, err := cli.NewDefraCommand(config.DefaultConfig()) - if err != nil { - log.Fatal("Creating the filesystem path failed", err) - } - defraCmd.DisableAutoGenTag = true - if err = doc.GenMarkdownTree(defraCmd, path); err != nil { + + if err := doc.GenMarkdownTree(defraCmd, path); err != nil { log.Fatal("Generating cmd docs failed", err) } } diff --git a/cmd/genmanpages/main.go b/cmd/genmanpages/main.go index 99ab9c62b1..1a9b43df7c 100644 --- a/cmd/genmanpages/main.go +++ b/cmd/genmanpages/main.go @@ -41,14 +41,13 @@ func init() { func main() { flag.Parse() + defraCmd := cli.NewDefraCommand(config.DefaultConfig()) + if err := os.MkdirAll(dir, defaultPerm); err != nil { log.Fatal("Failed to create directory", err) } - defraCmd, err := cli.NewDefraCommand(config.DefaultConfig()) - if err != nil { - log.Fatal("Failed to create command", err) - } - if err = doc.GenManTree(defraCmd, header, dir); err != nil { + + if err := doc.GenManTree(defraCmd, header, dir); err != nil { log.Fatal("Failed generation of man pages", err) } } diff --git a/go.mod b/go.mod index ac5a9cc348..d9d6ade38d 100644 --- a/go.mod +++ b/go.mod @@ -33,8 +33,6 @@ require ( github.com/multiformats/go-multiaddr v0.10.1 github.com/multiformats/go-multibase v0.2.0 github.com/multiformats/go-multihash v0.2.3 - github.com/pkg/errors v0.9.1 - github.com/planetscale/vtprotobuf v0.4.0 github.com/sourcenetwork/immutable v0.3.0 github.com/spf13/cobra v1.7.0 github.com/spf13/pflag v1.0.5 @@ -157,6 +155,7 @@ require ( github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 // indirect github.com/pelletier/go-toml/v2 v2.0.8 // indirect + github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/polydawn/refmt v0.89.0 // indirect github.com/prometheus/client_golang v1.14.0 // indirect diff --git a/go.sum b/go.sum index 05de5e3ed3..39331844bc 100644 --- a/go.sum +++ b/go.sum @@ -1086,8 +1086,6 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA= github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= -github.com/planetscale/vtprotobuf v0.4.0 h1:NEI+g4woRaAZgeZ3sAvbtyvMBRjIv5kE7EWYQ8m4JwY= -github.com/planetscale/vtprotobuf v0.4.0/go.mod h1:wm1N3qk9G/4+VM1WhpkLbvY/d8+0PbwYYpP5P5VhTks= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/polydawn/refmt v0.0.0-20190221155625-df39d6c2d992/go.mod h1:uIp+gprXxxrWSjjklXD+mN4wed/tMfjMMmN/9+JsA9o= From dd78c6e2215cdb4f082ef9466d7a1ab0e195e0e2 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Tue, 29 Aug 2023 17:22:41 -0700 Subject: [PATCH 042/107] implement wrapper for cli integration tests --- Makefile | 2 +- cli/backup_export.go | 16 +- cli/backup_import.go | 13 +- cli/cli.go | 78 +++-- cli/dump.go | 2 +- cli/index_create.go | 9 +- cli/index_drop.go | 10 +- cli/index_list.go | 11 +- cli/p2p_collection.go | 2 +- cli/p2p_collection_add.go | 17 +- cli/p2p_collection_getall.go | 18 +- cli/p2p_collection_remove.go | 17 +- cli/p2p_replicator_delete.go | 17 +- cli/p2p_replicator_getall.go | 10 +- cli/p2p_replicator_set.go | 19 +- cli/request.go | 16 +- cli/root.go | 33 +- cli/schema_add.go | 9 +- cli/schema_migration_get.go | 10 +- cli/schema_migration_set.go | 10 +- cli/schema_patch.go | 9 +- cli/server_dump.go | 2 +- cli/start.go | 26 -- cli/tx.go | 25 ++ cli/tx_commit.go | 41 +++ cli/tx_create.go | 46 +++ cli/tx_discard.go | 42 +++ cli/utils.go | 34 ++ cli/version.go | 32 +- cli/wrapper.go | 311 ++++++++++++++++++ cli/wrapper_cli.go | 63 ++++ cli/wrapper_lens.go | 87 +++++ cli/wrapper_tx.go | 76 +++++ cmd/genclidocs/{genclidocs.go => main.go} | 0 docs/cli/defradb_client.md | 5 +- docs/cli/defradb_client_index_create.md | 2 +- docs/cli/defradb_client_p2p.md | 33 ++ docs/cli/defradb_client_p2p_p2pcollection.md | 35 ++ .../defradb_client_p2p_p2pcollection_add.md | 36 ++ ...defradb_client_p2p_p2pcollection_getall.md | 36 ++ ...defradb_client_p2p_p2pcollection_remove.md | 36 ++ docs/cli/defradb_client_p2p_replicator.md | 35 ++ .../defradb_client_p2p_replicator_delete.md | 35 ++ .../defradb_client_p2p_replicator_getall.md | 36 ++ docs/cli/defradb_client_p2p_replicator_set.md | 38 +++ docs/cli/defradb_client_schema.md | 1 - docs/cli/defradb_server-dump.md | 3 +- http/client.go | 22 +- http/client_collection.go | 62 ++-- http/client_lens.go | 22 +- http/client_tx.go | 36 +- http/handler.go | 18 +- http/http_client.go | 14 +- http/middleware.go | 8 +- http/server.go | 98 +++--- http/server_test.go | 22 +- http/wrapper.go | 4 +- tests/integration/utils2.go | 16 +- 58 files changed, 1376 insertions(+), 390 deletions(-) create mode 100644 cli/tx.go create mode 100644 cli/tx_commit.go create mode 100644 cli/tx_create.go create mode 100644 cli/tx_discard.go create mode 100644 cli/utils.go create mode 100644 cli/wrapper.go create mode 100644 cli/wrapper_cli.go create mode 100644 cli/wrapper_lens.go create mode 100644 cli/wrapper_tx.go rename cmd/genclidocs/{genclidocs.go => main.go} (100%) create mode 100644 docs/cli/defradb_client_p2p.md create mode 100644 docs/cli/defradb_client_p2p_p2pcollection.md create mode 100644 docs/cli/defradb_client_p2p_p2pcollection_add.md create mode 100644 docs/cli/defradb_client_p2p_p2pcollection_getall.md create mode 100644 docs/cli/defradb_client_p2p_p2pcollection_remove.md create mode 100644 docs/cli/defradb_client_p2p_replicator.md create mode 100644 docs/cli/defradb_client_p2p_replicator_delete.md create mode 100644 docs/cli/defradb_client_p2p_replicator_getall.md create mode 100644 docs/cli/defradb_client_p2p_replicator_set.md diff --git a/Makefile b/Makefile index ff46bbf66d..786107ec45 100644 --- a/Makefile +++ b/Makefile @@ -301,7 +301,7 @@ docs: .PHONY: docs\:cli docs\:cli: - go run cmd/genclidocs/genclidocs.go -o docs/cli/ + go run cmd/genclidocs/main.go -o docs/cli/ .PHONY: docs\:manpages docs\:manpages: diff --git a/cli/backup_export.go b/cli/backup_export.go index 127e41ae56..c2e9416d3a 100644 --- a/cli/backup_export.go +++ b/cli/backup_export.go @@ -17,7 +17,6 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/http" ) const jsonFileType = "json" @@ -38,17 +37,10 @@ If the --pretty flag is provided, the JSON will be pretty printed. Example: export data for the 'Users' collection: defradb client export --collection Users user_data.json`, - Args: func(cmd *cobra.Command, args []string) error { - if err := cobra.ExactArgs(1)(cmd, args); err != nil { - return NewErrInvalidArgumentLength(err, 1) - } - return nil - }, + Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) (err error) { - db, err := http.NewClient("http://" + cfg.API.Address) - if err != nil { - return err - } + store := cmd.Context().Value(storeContextKey).(client.Store) + if !isValidExportFormat(format) { return ErrInvalidExportFormat } @@ -65,7 +57,7 @@ Example: export data for the 'Users' collection: Collections: collections, } - return db.BasicExport(cmd.Context(), &data) + return store.BasicExport(cmd.Context(), &data) }, } cmd.Flags().BoolVarP(&pretty, "pretty", "p", false, "Set the output JSON to be pretty printed") diff --git a/cli/backup_import.go b/cli/backup_import.go index 6d1fecca56..62c1e5d1fd 100644 --- a/cli/backup_import.go +++ b/cli/backup_import.go @@ -13,8 +13,8 @@ package cli import ( "github.com/spf13/cobra" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/http" ) func MakeBackupImportCommand(cfg *config.Config) *cobra.Command { @@ -25,18 +25,13 @@ func MakeBackupImportCommand(cfg *config.Config) *cobra.Command { Example: import data to the database: defradb client import user_data.json`, - Args: func(cmd *cobra.Command, args []string) error { - if err := cobra.ExactArgs(1)(cmd, args); err != nil { - return NewErrInvalidArgumentLength(err, 1) - } - return nil - }, + Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) (err error) { - db, err := http.NewClient("http://" + cfg.API.Address) + store := cmd.Context().Value(storeContextKey).(client.Store) if err != nil { return err } - return db.BasicImport(cmd.Context(), args[0]) + return store.BasicImport(cmd.Context(), args[0]) }, } return cmd diff --git a/cli/cli.go b/cli/cli.go index 2382cf1279..feae6f2c9c 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -14,8 +14,6 @@ Package cli provides the command-line interface. package cli import ( - "encoding/json" - "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/config" @@ -26,68 +24,78 @@ var log = logging.MustNewLogger("cli") // NewDefraCommand returns the root command instanciated with its tree of subcommands. func NewDefraCommand(cfg *config.Config) *cobra.Command { - rootCmd := MakeRootCommand(cfg) - p2pCmd := MakeP2PCommand() - schemaCmd := MakeSchemaCommand() - schemaMigrationCmd := MakeSchemaMigrationCommand() - indexCmd := MakeIndexCommand() - clientCmd := MakeClientCommand() - backupCmd := MakeBackupCommand() - p2pReplicatorCmd := MakeP2PReplicatorCommand() - p2pCollectionCmd := MakeP2PCollectionCommand() - p2pCollectionCmd.AddCommand( + p2p_collection := MakeP2PCollectionCommand() + p2p_collection.AddCommand( MakeP2PCollectionAddCommand(cfg), MakeP2PCollectionRemoveCommand(cfg), MakeP2PCollectionGetallCommand(cfg), ) - p2pReplicatorCmd.AddCommand( + + p2p_replicator := MakeP2PReplicatorCommand() + p2p_replicator.AddCommand( MakeP2PReplicatorGetallCommand(cfg), MakeP2PReplicatorSetCommand(cfg), MakeP2PReplicatorDeleteCommand(cfg), ) - p2pCmd.AddCommand( - p2pReplicatorCmd, - p2pCollectionCmd, + + p2p := MakeP2PCommand() + p2p.AddCommand( + p2p_replicator, + p2p_collection, ) - schemaMigrationCmd.AddCommand( + + schema_migrate := MakeSchemaMigrationCommand() + schema_migrate.AddCommand( MakeSchemaMigrationSetCommand(cfg), MakeSchemaMigrationGetCommand(cfg), ) - schemaCmd.AddCommand( + + schema := MakeSchemaCommand() + schema.AddCommand( MakeSchemaAddCommand(cfg), MakeSchemaPatchCommand(cfg), - schemaMigrationCmd, + schema_migrate, ) - indexCmd.AddCommand( + + index := MakeIndexCommand() + index.AddCommand( MakeIndexCreateCommand(cfg), MakeIndexDropCommand(cfg), MakeIndexListCommand(cfg), ) - backupCmd.AddCommand( + + backup := MakeBackupCommand() + backup.AddCommand( MakeBackupExportCommand(cfg), MakeBackupImportCommand(cfg), ) - clientCmd.AddCommand( + + tx := MakeTxCommand() + tx.AddCommand( + MakeTxCreateCommand(cfg), + MakeTxCommitCommand(cfg), + MakeTxDiscardCommand(cfg), + ) + + client := MakeClientCommand() + client.AddCommand( MakeDumpCommand(cfg), MakeRequestCommand(cfg), - schemaCmd, - indexCmd, - p2pCmd, - backupCmd, + schema, + index, + p2p, + backup, + tx, ) - rootCmd.AddCommand( - clientCmd, + + root := MakeRootCommand(cfg) + root.AddCommand( + client, MakeStartCommand(cfg), MakeServerDumpCmd(cfg), MakeVersionCommand(), MakeInitCommand(cfg), ) - return rootCmd -} - -func writeJSON(cmd *cobra.Command, out any) error { - enc := json.NewEncoder(cmd.OutOrStdout()) - enc.SetIndent("", " ") - return enc.Encode(out) + return root } diff --git a/cli/dump.go b/cli/dump.go index 514abec36d..ac612b4284 100644 --- a/cli/dump.go +++ b/cli/dump.go @@ -22,7 +22,7 @@ func MakeDumpCommand(cfg *config.Config) *cobra.Command { Use: "dump", Short: "Dump the contents of DefraDB node-side", RunE: func(cmd *cobra.Command, _ []string) (err error) { - db, err := http.NewClient("http://" + cfg.API.Address) + db, err := http.NewClient(cfg.API.Address) if err != nil { return err } diff --git a/cli/index_create.go b/cli/index_create.go index 644bb21f73..596c0d643a 100644 --- a/cli/index_create.go +++ b/cli/index_create.go @@ -15,7 +15,6 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/http" ) func MakeIndexCreateCommand(cfg *config.Config) *cobra.Command { @@ -36,10 +35,8 @@ Example: create a named index for 'Users' collection on 'name' field: defradb client index create --collection Users --fields name --name UsersByName`, ValidArgs: []string{"collection", "fields", "name"}, RunE: func(cmd *cobra.Command, args []string) error { - db, err := http.NewClient("http://" + cfg.API.Address) - if err != nil { - return err - } + store := cmd.Context().Value(storeContextKey).(client.Store) + var fields []client.IndexedFieldDescription for _, name := range fieldsArg { fields = append(fields, client.IndexedFieldDescription{Name: name}) @@ -48,7 +45,7 @@ Example: create a named index for 'Users' collection on 'name' field: Name: nameArg, Fields: fields, } - col, err := db.GetCollectionByName(cmd.Context(), collectionArg) + col, err := store.GetCollectionByName(cmd.Context(), collectionArg) if err != nil { return err } diff --git a/cli/index_drop.go b/cli/index_drop.go index 176e2b64cd..5601ae05f2 100644 --- a/cli/index_drop.go +++ b/cli/index_drop.go @@ -13,8 +13,8 @@ package cli import ( "github.com/spf13/cobra" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/http" ) func MakeIndexDropCommand(cfg *config.Config) *cobra.Command { @@ -29,11 +29,9 @@ Example: drop the index 'UsersByName' for 'Users' collection: defradb client index create --collection Users --name UsersByName`, ValidArgs: []string{"collection", "name"}, RunE: func(cmd *cobra.Command, args []string) error { - db, err := http.NewClient("http://" + cfg.API.Address) - if err != nil { - return err - } - col, err := db.GetCollectionByName(cmd.Context(), collectionArg) + store := cmd.Context().Value(storeContextKey).(client.Store) + + col, err := store.GetCollectionByName(cmd.Context(), collectionArg) if err != nil { return err } diff --git a/cli/index_list.go b/cli/index_list.go index 342118fb72..3fb21c55e1 100644 --- a/cli/index_list.go +++ b/cli/index_list.go @@ -13,8 +13,8 @@ package cli import ( "github.com/spf13/cobra" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/http" ) func MakeIndexListCommand(cfg *config.Config) *cobra.Command { @@ -31,14 +31,11 @@ Example: show all index for 'Users' collection: defradb client index list --collection Users`, ValidArgs: []string{"collection"}, RunE: func(cmd *cobra.Command, args []string) error { - db, err := http.NewClient("http://" + cfg.API.Address) - if err != nil { - return err - } + store := cmd.Context().Value(storeContextKey).(client.Store) switch { case collectionArg != "": - col, err := db.GetCollectionByName(cmd.Context(), collectionArg) + col, err := store.GetCollectionByName(cmd.Context(), collectionArg) if err != nil { return err } @@ -48,7 +45,7 @@ Example: show all index for 'Users' collection: } return writeJSON(cmd, cols) default: - cols, err := db.GetAllIndexes(cmd.Context()) + cols, err := store.GetAllIndexes(cmd.Context()) if err != nil { return err } diff --git a/cli/p2p_collection.go b/cli/p2p_collection.go index 6ce6d8e7c7..140ac4cc34 100644 --- a/cli/p2p_collection.go +++ b/cli/p2p_collection.go @@ -16,7 +16,7 @@ import ( func MakeP2PCollectionCommand() *cobra.Command { var cmd = &cobra.Command{ - Use: "p2pcollection", + Use: "collection", Short: "Configure the P2P collection system", Long: `Add, delete, or get the list of P2P collections. The selected collections synchronize their events on the pubsub network.`, diff --git a/cli/p2p_collection_add.go b/cli/p2p_collection_add.go index 0bf06d3b1f..0e6dc202d0 100644 --- a/cli/p2p_collection_add.go +++ b/cli/p2p_collection_add.go @@ -13,9 +13,8 @@ package cli import ( "github.com/spf13/cobra" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/http" ) func MakeP2PCollectionAddCommand(cfg *config.Config) *cobra.Command { @@ -24,18 +23,10 @@ func MakeP2PCollectionAddCommand(cfg *config.Config) *cobra.Command { Short: "Add P2P collections", Long: `Add P2P collections to the synchronized pubsub topics. The collections are synchronized between nodes of a pubsub network.`, - Args: func(cmd *cobra.Command, args []string) error { - if err := cobra.ExactArgs(1)(cmd, args); err != nil { - return errors.New("must specify collectionID") - } - return nil - }, + Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - db, err := http.NewClient("http://" + cfg.API.Address) - if err != nil { - return err - } - return db.AddP2PCollection(cmd.Context(), args[0]) + store := cmd.Context().Value(storeContextKey).(client.Store) + return store.AddP2PCollection(cmd.Context(), args[0]) }, } return cmd diff --git a/cli/p2p_collection_getall.go b/cli/p2p_collection_getall.go index 1b472b50b2..a091e8d43f 100644 --- a/cli/p2p_collection_getall.go +++ b/cli/p2p_collection_getall.go @@ -13,9 +13,8 @@ package cli import ( "github.com/spf13/cobra" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/http" ) func MakeP2PCollectionGetallCommand(cfg *config.Config) *cobra.Command { @@ -24,18 +23,11 @@ func MakeP2PCollectionGetallCommand(cfg *config.Config) *cobra.Command { Short: "Get all P2P collections", Long: `Get all P2P collections in the pubsub topics. This is the list of collections of the node that are synchronized on the pubsub network.`, - Args: func(cmd *cobra.Command, args []string) error { - if err := cobra.NoArgs(cmd, args); err != nil { - return errors.New("must specify no argument") - } - return nil - }, + Args: cobra.NoArgs, RunE: func(cmd *cobra.Command, args []string) error { - db, err := http.NewClient("http://" + cfg.API.Address) - if err != nil { - return err - } - cols, err := db.GetAllP2PCollections(cmd.Context()) + store := cmd.Context().Value(storeContextKey).(client.Store) + + cols, err := store.GetAllP2PCollections(cmd.Context()) if err != nil { return err } diff --git a/cli/p2p_collection_remove.go b/cli/p2p_collection_remove.go index 897bfd5c63..762fd34cf8 100644 --- a/cli/p2p_collection_remove.go +++ b/cli/p2p_collection_remove.go @@ -13,9 +13,8 @@ package cli import ( "github.com/spf13/cobra" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/http" ) func MakeP2PCollectionRemoveCommand(cfg *config.Config) *cobra.Command { @@ -24,18 +23,10 @@ func MakeP2PCollectionRemoveCommand(cfg *config.Config) *cobra.Command { Short: "Remove P2P collections", Long: `Remove P2P collections from the followed pubsub topics. The removed collections will no longer be synchronized between nodes.`, - Args: func(cmd *cobra.Command, args []string) error { - if err := cobra.ExactArgs(1)(cmd, args); err != nil { - return errors.New("must specify collectionID") - } - return nil - }, + Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - db, err := http.NewClient("http://" + cfg.API.Address) - if err != nil { - return err - } - return db.RemoveP2PCollection(cmd.Context(), args[0]) + store := cmd.Context().Value(storeContextKey).(client.Store) + return store.RemoveP2PCollection(cmd.Context(), args[0]) }, } return cmd diff --git a/cli/p2p_replicator_delete.go b/cli/p2p_replicator_delete.go index 7cbd8d7617..6958c2a650 100644 --- a/cli/p2p_replicator_delete.go +++ b/cli/p2p_replicator_delete.go @@ -16,8 +16,6 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/http" ) func MakeP2PReplicatorDeleteCommand(cfg *config.Config) *cobra.Command { @@ -25,22 +23,15 @@ func MakeP2PReplicatorDeleteCommand(cfg *config.Config) *cobra.Command { Use: "delete ", Short: "Delete a replicator. It will stop synchronizing", Long: `Delete a replicator. It will stop synchronizing.`, - Args: func(cmd *cobra.Command, args []string) error { - if err := cobra.ExactArgs(1)(cmd, args); err != nil { - return errors.New("must specify one argument: PeerID") - } - return nil - }, + Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - db, err := http.NewClient("http://" + cfg.API.Address) - if err != nil { - return err - } + store := cmd.Context().Value(storeContextKey).(client.Store) + addr, err := peer.AddrInfoFromString(args[0]) if err != nil { return err } - return db.DeleteReplicator(cmd.Context(), client.Replicator{Info: *addr}) + return store.DeleteReplicator(cmd.Context(), client.Replicator{Info: *addr}) }, } return cmd diff --git a/cli/p2p_replicator_getall.go b/cli/p2p_replicator_getall.go index 9e236abbe4..2a363bcbdd 100644 --- a/cli/p2p_replicator_getall.go +++ b/cli/p2p_replicator_getall.go @@ -13,8 +13,8 @@ package cli import ( "github.com/spf13/cobra" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/http" ) func MakeP2PReplicatorGetallCommand(cfg *config.Config) *cobra.Command { @@ -24,11 +24,9 @@ func MakeP2PReplicatorGetallCommand(cfg *config.Config) *cobra.Command { Long: `Get all the replicators active in the P2P data sync system. These are the replicators that are currently replicating data from one node to another.`, RunE: func(cmd *cobra.Command, args []string) error { - db, err := http.NewClient("http://" + cfg.API.Address) - if err != nil { - return err - } - reps, err := db.GetAllReplicators(cmd.Context()) + store := cmd.Context().Value(storeContextKey).(client.Store) + + reps, err := store.GetAllReplicators(cmd.Context()) if err != nil { return err } diff --git a/cli/p2p_replicator_set.go b/cli/p2p_replicator_set.go index 36bba3c685..f9472dac25 100644 --- a/cli/p2p_replicator_set.go +++ b/cli/p2p_replicator_set.go @@ -16,8 +16,6 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/http" ) func MakeP2PReplicatorSetCommand(cfg *config.Config) *cobra.Command { @@ -28,17 +26,10 @@ func MakeP2PReplicatorSetCommand(cfg *config.Config) *cobra.Command { Long: `Add a new target replicator. A replicator replicates one or all collection(s) from this node to another. `, - Args: func(cmd *cobra.Command, args []string) error { - if err := cobra.ExactArgs(1)(cmd, args); err != nil { - return errors.New("must specify one argument: peer") - } - return nil - }, + Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - db, err := http.NewClient("http://" + cfg.API.Address) - if err != nil { - return err - } + store := cmd.Context().Value(storeContextKey).(client.Store) + addr, err := peer.AddrInfoFromString(args[0]) if err != nil { return err @@ -47,11 +38,11 @@ A replicator replicates one or all collection(s) from this node to another. Info: *addr, Schemas: collections, } - return db.SetReplicator(cmd.Context(), rep) + return store.SetReplicator(cmd.Context(), rep) }, } - cmd.Flags().StringArrayVarP(&collections, "collection", "c", + cmd.Flags().StringSliceVarP(&collections, "collection", "c", []string{}, "Define the collection for the replicator") return cmd } diff --git a/cli/request.go b/cli/request.go index 0f511a7940..c09722ffc7 100644 --- a/cli/request.go +++ b/cli/request.go @@ -16,9 +16,9 @@ import ( "github.com/spf13/cobra" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/http" ) func MakeRequestCommand(cfg *config.Config) *cobra.Command { @@ -42,10 +42,7 @@ with the database more conveniently. To learn more about the DefraDB GraphQL Query Language, refer to https://docs.source.network.`, RunE: func(cmd *cobra.Command, args []string) error { - db, err := http.NewClient("http://" + cfg.API.Address) - if err != nil { - return err - } + store := cmd.Context().Value(storeContextKey).(client.Store) var request string switch { @@ -68,9 +65,14 @@ To learn more about the DefraDB GraphQL Query Language, refer to https://docs.so if request == "" { return errors.New("request cannot be empty") } - result := db.ExecRequest(cmd.Context(), request) + result := store.ExecRequest(cmd.Context(), request) + + var errors []string + for _, err := range result.GQL.Errors { + errors = append(errors, err.Error()) + } if result.Pub == nil { - return writeJSON(cmd, result.GQL) + return writeJSON(cmd, map[string]any{"data": result.GQL.Data, "errors": errors}) } for item := range result.Pub.Stream() { writeJSON(cmd, item) //nolint:errcheck diff --git a/cli/root.go b/cli/root.go index e639cde785..ed0f454952 100644 --- a/cli/root.go +++ b/cli/root.go @@ -16,10 +16,18 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/errors" + "github.com/sourcenetwork/defradb/http" +) + +type contextKey string + +var ( + dbContextKey = contextKey("db") + storeContextKey = contextKey("store") ) func MakeRootCommand(cfg *config.Config) *cobra.Command { + var txID uint64 var cmd = &cobra.Command{ Use: "defradb", Short: "DefraDB Edge Database", @@ -27,22 +35,19 @@ func MakeRootCommand(cfg *config.Config) *cobra.Command { Start a DefraDB node, interact with a local or remote node, and much more. `, - // Runs on subcommands before their Run function, to handle configuration and top-level flags. - // Loads the rootDir containing the configuration file, otherwise warn about it and load a default configuration. - // This allows some subcommands (`init`, `start`) to override the PreRun to create a rootDir by default. - PersistentPreRunE: func(cmd *cobra.Command, _ []string) error { - if err := cfg.LoadRootDirFromFlagOrDefault(); err != nil { + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + db, err := http.NewClient(cfg.API.Address) + if err != nil { return err } - if cfg.ConfigFileExists() { - if err := cfg.LoadWithRootdir(true); err != nil { - return errors.Wrap("failed to load config", err) - } + ctx := cmd.Context() + if txID != 0 { + ctx = context.WithValue(ctx, storeContextKey, db.WithTxnID(txID)) } else { - if err := cfg.LoadWithRootdir(false); err != nil { - return errors.Wrap("failed to load config", err) - } + ctx = context.WithValue(ctx, storeContextKey, db) } + ctx = context.WithValue(ctx, dbContextKey, db) + cmd.SetContext(ctx) return nil }, } @@ -119,5 +124,7 @@ Start a DefraDB node, interact with a local or remote node, and much more. log.FeedbackFatalE(context.Background(), "Could not bind api.address", err) } + cmd.PersistentFlags().Uint64Var(&txID, "tx", 0, "Transaction ID") + return cmd } diff --git a/cli/schema_add.go b/cli/schema_add.go index 81a06d1cf7..6fe597d77d 100644 --- a/cli/schema_add.go +++ b/cli/schema_add.go @@ -17,8 +17,8 @@ import ( "github.com/spf13/cobra" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/http" ) func MakeSchemaAddCommand(cfg *config.Config) *cobra.Command { @@ -39,10 +39,7 @@ Example: add from stdin: Learn more about the DefraDB GraphQL Schema Language on https://docs.source.network.`, RunE: func(cmd *cobra.Command, args []string) error { - db, err := http.NewClient("http://" + cfg.API.Address) - if err != nil { - return err - } + store := cmd.Context().Value(storeContextKey).(client.Store) var schema string switch { @@ -64,7 +61,7 @@ Learn more about the DefraDB GraphQL Schema Language on https://docs.source.netw return fmt.Errorf("schema cannot be empty") } - cols, err := db.AddSchema(cmd.Context(), schema) + cols, err := store.AddSchema(cmd.Context(), schema) if err != nil { return err } diff --git a/cli/schema_migration_get.go b/cli/schema_migration_get.go index 86017432d0..ec54773d63 100644 --- a/cli/schema_migration_get.go +++ b/cli/schema_migration_get.go @@ -13,8 +13,8 @@ package cli import ( "github.com/spf13/cobra" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/http" ) func MakeSchemaMigrationGetCommand(cfg *config.Config) *cobra.Command { @@ -28,11 +28,9 @@ Example: Learn more about the DefraDB GraphQL Schema Language on https://docs.source.network.`, RunE: func(cmd *cobra.Command, args []string) error { - db, err := http.NewClient("http://" + cfg.API.Address) - if err != nil { - return err - } - cfgs, err := db.LensRegistry().Config(cmd.Context()) + store := cmd.Context().Value(storeContextKey).(client.Store) + + cfgs, err := store.LensRegistry().Config(cmd.Context()) if err != nil { return err } diff --git a/cli/schema_migration_set.go b/cli/schema_migration_set.go index 3c6fb045a6..1a38e134c6 100644 --- a/cli/schema_migration_set.go +++ b/cli/schema_migration_set.go @@ -23,7 +23,6 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/http" ) func MakeSchemaMigrationSetCommand(cfg *config.Config) *cobra.Command { @@ -45,10 +44,7 @@ Example: add from stdin: Learn more about the DefraDB GraphQL Schema Language on https://docs.source.network.`, Args: cobra.RangeArgs(2, 3), RunE: func(cmd *cobra.Command, args []string) error { - db, err := http.NewClient("http://" + cfg.API.Address) - if err != nil { - return err - } + store := cmd.Context().Value(storeContextKey).(client.Store) var lensCfgJson string switch { @@ -77,7 +73,7 @@ Learn more about the DefraDB GraphQL Schema Language on https://docs.source.netw decoder.DisallowUnknownFields() var lensCfg model.Lens - if err = decoder.Decode(&lensCfg); err != nil { + if err := decoder.Decode(&lensCfg); err != nil { return errors.Wrap("invalid lens configuration", err) } @@ -87,7 +83,7 @@ Learn more about the DefraDB GraphQL Schema Language on https://docs.source.netw Lens: lensCfg, } - return db.LensRegistry().SetMigration(cmd.Context(), migrationCfg) + return store.LensRegistry().SetMigration(cmd.Context(), migrationCfg) }, } cmd.Flags().StringVarP(&lensFile, "file", "f", "", "Lens configuration file") diff --git a/cli/schema_patch.go b/cli/schema_patch.go index 8e22696037..9984c9b2b4 100644 --- a/cli/schema_patch.go +++ b/cli/schema_patch.go @@ -17,8 +17,8 @@ import ( "github.com/spf13/cobra" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/http" ) func MakeSchemaPatchCommand(cfg *config.Config) *cobra.Command { @@ -42,10 +42,7 @@ Example: patch from stdin: To learn more about the DefraDB GraphQL Schema Language, refer to https://docs.source.network.`, RunE: func(cmd *cobra.Command, args []string) error { - db, err := http.NewClient("http://" + cfg.API.Address) - if err != nil { - return err - } + store := cmd.Context().Value(storeContextKey).(client.Store) var patch string switch { @@ -67,7 +64,7 @@ To learn more about the DefraDB GraphQL Schema Language, refer to https://docs.s return fmt.Errorf("patch cannot be empty") } - return db.PatchSchema(cmd.Context(), patch) + return store.PatchSchema(cmd.Context(), patch) }, } cmd.Flags().StringVarP(&patchFile, "file", "f", "", "File to load a patch from") diff --git a/cli/server_dump.go b/cli/server_dump.go index cfb2ddfc6c..c88cb41290 100644 --- a/cli/server_dump.go +++ b/cli/server_dump.go @@ -22,7 +22,7 @@ func MakeServerDumpCmd(cfg *config.Config) *cobra.Command { Use: "server-dump", Short: "Dumps the state of the entire database", RunE: func(cmd *cobra.Command, _ []string) error { - db, err := http.NewClient("http://" + cfg.API.Address) + db, err := http.NewClient(cfg.API.Address) if err != nil { return err } diff --git a/cli/start.go b/cli/start.go index 7c433b8f9f..a13ea46a6c 100644 --- a/cli/start.go +++ b/cli/start.go @@ -48,32 +48,6 @@ func MakeStartCommand(cfg *config.Config) *cobra.Command { Use: "start", Short: "Start a DefraDB node", Long: "Start a DefraDB node.", - // Load the root config if it exists, otherwise create it. - PersistentPreRunE: func(cmd *cobra.Command, _ []string) error { - if err := cfg.LoadRootDirFromFlagOrDefault(); err != nil { - return err - } - if cfg.ConfigFileExists() { - if err := cfg.LoadWithRootdir(true); err != nil { - return config.NewErrLoadingConfig(err) - } - log.FeedbackInfo(cmd.Context(), fmt.Sprintf("Configuration loaded from DefraDB directory %v", cfg.Rootdir)) - } else { - if err := cfg.LoadWithRootdir(false); err != nil { - return config.NewErrLoadingConfig(err) - } - if config.FolderExists(cfg.Rootdir) { - if err := cfg.WriteConfigFile(); err != nil { - return err - } - } else { - if err := cfg.CreateRootDirAndConfigFile(); err != nil { - return err - } - } - } - return nil - }, RunE: func(cmd *cobra.Command, args []string) error { di, err := start(cmd.Context(), cfg) if err != nil { diff --git a/cli/tx.go b/cli/tx.go new file mode 100644 index 0000000000..b4d278df6d --- /dev/null +++ b/cli/tx.go @@ -0,0 +1,25 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "github.com/spf13/cobra" +) + +func MakeTxCommand() *cobra.Command { + var cmd = &cobra.Command{ + Use: "tx", + Short: "Create, commit, and discard DefraDB transactions", + Long: `Create, commit, and discard DefraDB transactions`, + } + + return cmd +} diff --git a/cli/tx_commit.go b/cli/tx_commit.go new file mode 100644 index 0000000000..f5976453f4 --- /dev/null +++ b/cli/tx_commit.go @@ -0,0 +1,41 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "strconv" + + "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/http" +) + +func MakeTxCommitCommand(cfg *config.Config) *cobra.Command { + var cmd = &cobra.Command{ + Use: "commit [id]", + Short: "Commit a DefraDB transaction", + Long: `Commit a DefraDB transaction`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) (err error) { + id, err := strconv.ParseUint(args[0], 10, 64) + if err != nil { + return err + } + tx, err := http.NewTransaction(cfg.API.Address, id) + if err != nil { + return err + } + return tx.Commit(cmd.Context()) + }, + } + return cmd +} diff --git a/cli/tx_create.go b/cli/tx_create.go new file mode 100644 index 0000000000..5f5321c3e7 --- /dev/null +++ b/cli/tx_create.go @@ -0,0 +1,46 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/datastore" +) + +func MakeTxCreateCommand(cfg *config.Config) *cobra.Command { + var concurrent bool + var readOnly bool + var cmd = &cobra.Command{ + Use: "create", + Short: "Create a new DefraDB transaction", + Long: `Create a new DefraDB transaction`, + RunE: func(cmd *cobra.Command, args []string) (err error) { + db := cmd.Context().Value(dbContextKey).(client.DB) + + var tx datastore.Txn + if concurrent { + tx, err = db.NewConcurrentTxn(cmd.Context(), readOnly) + } else { + tx, err = db.NewTxn(cmd.Context(), readOnly) + } + if err != nil { + return err + } + return writeJSON(cmd, map[string]any{"id": tx.ID()}) + }, + } + cmd.Flags().BoolVar(&concurrent, "concurrent", false, "Transaction is concurrent") + cmd.Flags().BoolVar(&readOnly, "read-only", false, "Transaction is read only") + return cmd +} diff --git a/cli/tx_discard.go b/cli/tx_discard.go new file mode 100644 index 0000000000..83431983e8 --- /dev/null +++ b/cli/tx_discard.go @@ -0,0 +1,42 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "strconv" + + "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/http" +) + +func MakeTxDiscardCommand(cfg *config.Config) *cobra.Command { + var cmd = &cobra.Command{ + Use: "discard [id]", + Short: "Discard a DefraDB transaction", + Long: `Discard a DefraDB transaction`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) (err error) { + id, err := strconv.ParseUint(args[0], 10, 64) + if err != nil { + return err + } + tx, err := http.NewTransaction(cfg.API.Address, id) + if err != nil { + return err + } + tx.Discard(cmd.Context()) + return nil + }, + } + return cmd +} diff --git a/cli/utils.go b/cli/utils.go new file mode 100644 index 0000000000..cd5b9c6990 --- /dev/null +++ b/cli/utils.go @@ -0,0 +1,34 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "encoding/json" + + "github.com/spf13/cobra" +) + +// func newHttpClient(cfg *config.Config) (client.Store, error) { +// db, err := http.NewClient(cfg.API.Address) +// if err != nil { +// return nil, err +// } +// if txId != 0 { +// return db.WithTxnID(txId), nil +// } +// return db, nil +// } + +func writeJSON(cmd *cobra.Command, out any) error { + enc := json.NewEncoder(cmd.OutOrStdout()) + enc.SetIndent("", " ") + return enc.Encode(out) +} diff --git a/cli/version.go b/cli/version.go index 8842697699..f61ecbc9d5 100644 --- a/cli/version.go +++ b/cli/version.go @@ -11,9 +11,7 @@ package cli import ( - "bytes" - - "encoding/json" + "strings" "github.com/spf13/cobra" @@ -31,25 +29,17 @@ func MakeVersionCommand() *cobra.Command { if err != nil { return err } - switch format { - case "json": - var buf bytes.Buffer - dvj, err := json.Marshal(dv) - if err != nil { - return err - } - err = json.Indent(&buf, dvj, "", " ") - if err != nil { - return err - } - cmd.Println(buf.String()) - default: - if full { - cmd.Println(dv.StringFull()) - } else { - cmd.Println(dv.String()) - } + + if strings.ToLower(format) == "json" { + return writeJSON(cmd, dv) } + + if full { + cmd.Println(dv.StringFull()) + } else { + cmd.Println(dv.String()) + } + return nil }, } diff --git a/cli/wrapper.go b/cli/wrapper.go new file mode 100644 index 0000000000..163694d5f8 --- /dev/null +++ b/cli/wrapper.go @@ -0,0 +1,311 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "context" + "encoding/json" + "fmt" + "net/http/httptest" + "strings" + + blockstore "github.com/ipfs/boxo/blockstore" + "github.com/libp2p/go-libp2p-core/peer" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/events" + "github.com/sourcenetwork/defradb/http" +) + +var _ client.DB = (*Wrapper)(nil) + +type Wrapper struct { + db client.DB + store client.Store + cmd *cliWrapper + handler *http.Handler + httpServer *httptest.Server +} + +func NewWrapper(db client.DB) *Wrapper { + handler := http.NewHandler(db, http.ServerOptions{}) + httpServer := httptest.NewServer(handler) + cmd := newCliWrapper(httpServer.URL) + + return &Wrapper{ + db: db, + store: db, + cmd: cmd, + httpServer: httpServer, + handler: handler, + } +} + +func (w *Wrapper) SetReplicator(ctx context.Context, rep client.Replicator) error { + args := []string{"client", "p2p", "replicator", "set"} + args = append(args, "--collection", strings.Join(rep.Schemas, ",")) + + addrs, err := peer.AddrInfoToP2pAddrs(&rep.Info) + if err != nil { + return err + } + args = append(args, addrs[0].String()) + + _, err = w.cmd.execute(ctx, args) + return err +} + +func (w *Wrapper) DeleteReplicator(ctx context.Context, rep client.Replicator) error { + args := []string{"client", "p2p", "replicator", "delete"} + + addrs, err := peer.AddrInfoToP2pAddrs(&rep.Info) + if err != nil { + return err + } + args = append(args, addrs[0].String()) + + _, err = w.cmd.execute(ctx, args) + return err +} + +func (w *Wrapper) GetAllReplicators(ctx context.Context) ([]client.Replicator, error) { + args := []string{"client", "p2p", "replicator", "getall"} + + data, err := w.cmd.execute(ctx, args) + if err != nil { + return nil, err + } + var reps []client.Replicator + if err := json.Unmarshal(data, &reps); err != nil { + return nil, err + } + return reps, nil +} + +func (w *Wrapper) AddP2PCollection(ctx context.Context, collectionID string) error { + args := []string{"client", "p2p", "collection", "add"} + args = append(args, collectionID) + + _, err := w.cmd.execute(ctx, args) + return err +} + +func (w *Wrapper) RemoveP2PCollection(ctx context.Context, collectionID string) error { + args := []string{"client", "p2p", "collection", "remove"} + args = append(args, collectionID) + + _, err := w.cmd.execute(ctx, args) + return err +} + +func (w *Wrapper) GetAllP2PCollections(ctx context.Context) ([]string, error) { + args := []string{"client", "p2p", "collection", "getall"} + + data, err := w.cmd.execute(ctx, args) + if err != nil { + return nil, err + } + var cols []string + if err := json.Unmarshal(data, &cols); err != nil { + return nil, err + } + return cols, nil +} + +func (w *Wrapper) BasicImport(ctx context.Context, filepath string) error { + args := []string{"client", "backup", "import"} + args = append(args, filepath) + + _, err := w.cmd.execute(ctx, args) + return err +} + +func (w *Wrapper) BasicExport(ctx context.Context, config *client.BackupConfig) error { + args := []string{"client", "backup", "export"} + args = append(args, "--collections", strings.Join(config.Collections, ",")) + + if config.Format != "" { + args = append(args, "--format", config.Format) + } + if config.Pretty { + args = append(args, "--pretty") + } + args = append(args, config.Filepath) + + _, err := w.cmd.execute(ctx, args) + return err +} + +func (w *Wrapper) AddSchema(ctx context.Context, schema string) ([]client.CollectionDescription, error) { + args := []string{"client", "schema", "add"} + args = append(args, schema) + + data, err := w.cmd.execute(ctx, args) + if err != nil { + return nil, err + } + var cols []client.CollectionDescription + if err := json.Unmarshal(data, &cols); err != nil { + return nil, err + } + return cols, nil +} + +func (w *Wrapper) PatchSchema(ctx context.Context, patch string) error { + args := []string{"client", "schema", "patch"} + args = append(args, patch) + + _, err := w.cmd.execute(ctx, args) + return err +} + +func (w *Wrapper) SetMigration(ctx context.Context, config client.LensConfig) error { + return w.LensRegistry().SetMigration(ctx, config) +} + +func (w *Wrapper) LensRegistry() client.LensRegistry { + return &lensWrapper{ + lens: w.store.LensRegistry(), + cmd: w.cmd, + } +} + +func (w *Wrapper) GetCollectionByName(ctx context.Context, name client.CollectionName) (client.Collection, error) { + return w.store.GetCollectionByName(ctx, name) +} + +func (w *Wrapper) GetCollectionBySchemaID(ctx context.Context, schemaId string) (client.Collection, error) { + return w.store.GetCollectionBySchemaID(ctx, schemaId) +} + +func (w *Wrapper) GetCollectionByVersionID(ctx context.Context, versionId string) (client.Collection, error) { + return w.store.GetCollectionByVersionID(ctx, versionId) +} + +func (w *Wrapper) GetAllCollections(ctx context.Context) ([]client.Collection, error) { + return w.store.GetAllCollections(ctx) +} + +func (w *Wrapper) GetAllIndexes(ctx context.Context) (map[client.CollectionName][]client.IndexDescription, error) { + args := []string{"client", "index", "list"} + + data, err := w.cmd.execute(ctx, args) + if err != nil { + return nil, err + } + var index map[client.CollectionName][]client.IndexDescription + if err := json.Unmarshal(data, &index); err != nil { + return nil, err + } + return index, nil +} + +func (w *Wrapper) ExecRequest(ctx context.Context, query string) *client.RequestResult { + args := []string{"client", "query"} + args = append(args, query) + + result := &client.RequestResult{} + + data, err := w.cmd.execute(ctx, args) + if err != nil { + result.GQL.Errors = []error{err} + return result + } + var response http.GraphQLResponse + if err = json.Unmarshal(data, &response); err != nil { + result.GQL.Errors = []error{err} + return result + } + result.GQL.Data = response.Data + for _, err := range response.Errors { + result.GQL.Errors = append(result.GQL.Errors, fmt.Errorf(err)) + } + return result +} + +func (w *Wrapper) NewTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { + args := []string{"client", "tx", "create"} + if readOnly { + args = append(args, "--read-only") + } + + data, err := w.cmd.execute(ctx, args) + if err != nil { + return nil, err + } + var res http.CreateTxResponse + if err := json.Unmarshal(data, &res); err != nil { + return nil, err + } + tx, err := w.handler.Transaction(res.ID) + if err != nil { + return nil, err + } + return &TxWrapper{tx, w.cmd}, nil +} + +func (w *Wrapper) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { + args := []string{"client", "tx", "create"} + args = append(args, "--concurrent") + + if readOnly { + args = append(args, "--read-only") + } + + data, err := w.cmd.execute(ctx, args) + if err != nil { + return nil, err + } + var res http.CreateTxResponse + if err := json.Unmarshal(data, &res); err != nil { + return nil, err + } + tx, err := w.handler.Transaction(res.ID) + if err != nil { + return nil, err + } + return &TxWrapper{tx, w.cmd}, nil +} + +func (w *Wrapper) WithTxn(tx datastore.Txn) client.Store { + return &Wrapper{ + db: w.db, + store: w.db.WithTxn(tx), + cmd: w.cmd.withTxn(tx), + } +} + +func (w *Wrapper) Root() datastore.RootStore { + return w.db.Root() +} + +func (w *Wrapper) Blockstore() blockstore.Blockstore { + return w.db.Blockstore() +} + +func (w *Wrapper) Close(ctx context.Context) { + w.httpServer.CloseClientConnections() + w.httpServer.Close() + w.db.Close(ctx) +} + +func (w *Wrapper) Events() events.Events { + return w.db.Events() +} + +func (w *Wrapper) MaxTxnRetries() int { + return w.db.MaxTxnRetries() +} + +func (w *Wrapper) PrintDump(ctx context.Context) error { + return w.db.PrintDump(ctx) +} diff --git a/cli/wrapper_cli.go b/cli/wrapper_cli.go new file mode 100644 index 0000000000..e6a71697d2 --- /dev/null +++ b/cli/wrapper_cli.go @@ -0,0 +1,63 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "bytes" + "context" + "fmt" + + "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/datastore" +) + +type cliWrapper struct { + address string + txValue string +} + +func newCliWrapper(address string) *cliWrapper { + return &cliWrapper{ + address: address, + } +} + +func (w *cliWrapper) withTxn(tx datastore.Txn) *cliWrapper { + return &cliWrapper{ + address: w.address, + txValue: fmt.Sprintf("%d", tx.ID()), + } +} + +func (w *cliWrapper) execute(ctx context.Context, args []string) ([]byte, error) { + var stdOut bytes.Buffer + var stdErr bytes.Buffer + + if w.txValue != "" { + args = append(args, "--tx", w.txValue) + } + + cfg := config.DefaultConfig() + cfg.API.Address = w.address + + cmd := NewDefraCommand(cfg) + cmd.SetOut(&stdOut) + cmd.SetErr(&stdErr) + cmd.SetArgs(args) + + if err := cmd.Execute(); err != nil { + return nil, err + } + if stdErr.Len() > 0 { + return nil, fmt.Errorf("%s", stdErr.String()) + } + return stdOut.Bytes(), nil +} diff --git a/cli/wrapper_lens.go b/cli/wrapper_lens.go new file mode 100644 index 0000000000..c1e865aaa8 --- /dev/null +++ b/cli/wrapper_lens.go @@ -0,0 +1,87 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "context" + "encoding/json" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/immutable/enumerable" +) + +var _ client.LensRegistry = (*lensWrapper)(nil) + +type lensWrapper struct { + lens client.LensRegistry + cmd *cliWrapper +} + +func (w *lensWrapper) WithTxn(tx datastore.Txn) client.LensRegistry { + return &lensWrapper{ + lens: w.lens.WithTxn(tx), + cmd: w.cmd.withTxn(tx), + } +} + +func (w *lensWrapper) SetMigration(ctx context.Context, config client.LensConfig) error { + args := []string{"client", "schema", "migration", "set"} + args = append(args, config.SourceSchemaVersionID) + args = append(args, config.DestinationSchemaVersionID) + + lensCfg, err := json.Marshal(config.Lens) + if err != nil { + return err + } + args = append(args, string(lensCfg)) + + _, err = w.cmd.execute(ctx, args) + return err +} + +func (w *lensWrapper) ReloadLenses(ctx context.Context) error { + return w.lens.ReloadLenses(ctx) +} + +func (w *lensWrapper) MigrateUp( + ctx context.Context, + src enumerable.Enumerable[map[string]any], + schemaVersionID string, +) (enumerable.Enumerable[map[string]any], error) { + return w.lens.MigrateUp(ctx, src, schemaVersionID) +} + +func (w *lensWrapper) MigrateDown( + ctx context.Context, + src enumerable.Enumerable[map[string]any], + schemaVersionID string, +) (enumerable.Enumerable[map[string]any], error) { + return w.lens.MigrateDown(ctx, src, schemaVersionID) +} + +func (w *lensWrapper) Config(ctx context.Context) ([]client.LensConfig, error) { + args := []string{"client", "schema", "migration", "get"} + + data, err := w.cmd.execute(ctx, args) + if err != nil { + return nil, err + } + var cfgs []client.LensConfig + if err := json.Unmarshal(data, &cfgs); err != nil { + return nil, err + } + return cfgs, nil +} + +func (w *lensWrapper) HasMigration(ctx context.Context, schemaVersionID string) (bool, error) { + return w.lens.HasMigration(ctx, schemaVersionID) +} diff --git a/cli/wrapper_tx.go b/cli/wrapper_tx.go new file mode 100644 index 0000000000..de85ece633 --- /dev/null +++ b/cli/wrapper_tx.go @@ -0,0 +1,76 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "context" + "fmt" + + "github.com/sourcenetwork/defradb/datastore" +) + +var _ datastore.Txn = (*TxWrapper)(nil) + +type TxWrapper struct { + tx datastore.Txn + cmd *cliWrapper +} + +func (w *TxWrapper) ID() uint64 { + return w.tx.ID() +} + +func (w *TxWrapper) Commit(ctx context.Context) error { + args := []string{"client", "tx", "commit"} + args = append(args, fmt.Sprintf("%d", w.tx.ID())) + + _, err := w.cmd.execute(ctx, args) + return err +} + +func (w *TxWrapper) Discard(ctx context.Context) { + args := []string{"client", "tx", "discard"} + args = append(args, fmt.Sprintf("%d", w.tx.ID())) + + w.cmd.execute(ctx, args) +} + +func (w *TxWrapper) OnSuccess(fn func()) { + w.tx.OnSuccess(fn) +} + +func (w *TxWrapper) OnError(fn func()) { + w.tx.OnError(fn) +} + +func (w *TxWrapper) OnDiscard(fn func()) { + w.tx.OnDiscard(fn) +} + +func (w *TxWrapper) Rootstore() datastore.DSReaderWriter { + return w.tx.Rootstore() +} + +func (w *TxWrapper) Datastore() datastore.DSReaderWriter { + return w.tx.Datastore() +} + +func (w *TxWrapper) Headstore() datastore.DSReaderWriter { + return w.tx.Headstore() +} + +func (w *TxWrapper) DAGstore() datastore.DAGStore { + return w.tx.DAGstore() +} + +func (w *TxWrapper) Systemstore() datastore.DSReaderWriter { + return w.tx.Systemstore() +} diff --git a/cmd/genclidocs/genclidocs.go b/cmd/genclidocs/main.go similarity index 100% rename from cmd/genclidocs/genclidocs.go rename to cmd/genclidocs/main.go diff --git a/docs/cli/defradb_client.md b/docs/cli/defradb_client.md index 7173befb6b..f1ae454919 100644 --- a/docs/cli/defradb_client.md +++ b/docs/cli/defradb_client.md @@ -30,12 +30,9 @@ Execute queries, add schema types, obtain node info, etc. * [defradb](defradb.md) - DefraDB Edge Database * [defradb client backup](defradb_client_backup.md) - Interact with the backup utility -* [defradb client blocks](defradb_client_blocks.md) - Interact with the database's blockstore * [defradb client dump](defradb_client_dump.md) - Dump the contents of DefraDB node-side * [defradb client index](defradb_client_index.md) - Manage collections' indexes of a running DefraDB instance -* [defradb client peerid](defradb_client_peerid.md) - Get the PeerID of the node -* [defradb client ping](defradb_client_ping.md) - Ping to test connection with a node +* [defradb client p2p](defradb_client_p2p.md) - Interact with the DefraDB P2P system * [defradb client query](defradb_client_query.md) - Send a DefraDB GraphQL query request -* [defradb client rpc](defradb_client_rpc.md) - Interact with a DefraDB node via RPC * [defradb client schema](defradb_client_schema.md) - Interact with the schema system of a DefraDB node diff --git a/docs/cli/defradb_client_index_create.md b/docs/cli/defradb_client_index_create.md index 7f67e58075..cd54a0085b 100644 --- a/docs/cli/defradb_client_index_create.md +++ b/docs/cli/defradb_client_index_create.md @@ -22,7 +22,7 @@ defradb client index create -c --collection --fields [-n - ``` -c, --collection string Collection name - --fields string Fields to index + --fields strings Fields to index -h, --help help for create -n, --name string Index name ``` diff --git a/docs/cli/defradb_client_p2p.md b/docs/cli/defradb_client_p2p.md new file mode 100644 index 0000000000..066b84aebf --- /dev/null +++ b/docs/cli/defradb_client_p2p.md @@ -0,0 +1,33 @@ +## defradb client p2p + +Interact with the DefraDB P2P system + +### Synopsis + +Interact with the DefraDB P2P system + +### Options + +``` + -h, --help help for p2p +``` + +### Options inherited from parent commands + +``` + --logformat string Log format to use. Options are csv, json (default "csv") + --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... + --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") + --lognocolor Disable colored log output + --logoutput string Log output path (default "stderr") + --logtrace Include stacktrace in error and fatal logs + --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") +``` + +### SEE ALSO + +* [defradb client](defradb_client.md) - Interact with a DefraDB node +* [defradb client p2p p2pcollection](defradb_client_p2p_p2pcollection.md) - Configure the P2P collection system +* [defradb client p2p replicator](defradb_client_p2p_replicator.md) - Configure the replicator system + diff --git a/docs/cli/defradb_client_p2p_p2pcollection.md b/docs/cli/defradb_client_p2p_p2pcollection.md new file mode 100644 index 0000000000..0af831e401 --- /dev/null +++ b/docs/cli/defradb_client_p2p_p2pcollection.md @@ -0,0 +1,35 @@ +## defradb client p2p p2pcollection + +Configure the P2P collection system + +### Synopsis + +Add, delete, or get the list of P2P collections. +The selected collections synchronize their events on the pubsub network. + +### Options + +``` + -h, --help help for p2pcollection +``` + +### Options inherited from parent commands + +``` + --logformat string Log format to use. Options are csv, json (default "csv") + --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... + --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") + --lognocolor Disable colored log output + --logoutput string Log output path (default "stderr") + --logtrace Include stacktrace in error and fatal logs + --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") +``` + +### SEE ALSO + +* [defradb client p2p](defradb_client_p2p.md) - Interact with the DefraDB P2P system +* [defradb client p2p p2pcollection add](defradb_client_p2p_p2pcollection_add.md) - Add P2P collections +* [defradb client p2p p2pcollection getall](defradb_client_p2p_p2pcollection_getall.md) - Get all P2P collections +* [defradb client p2p p2pcollection remove](defradb_client_p2p_p2pcollection_remove.md) - Remove P2P collections + diff --git a/docs/cli/defradb_client_p2p_p2pcollection_add.md b/docs/cli/defradb_client_p2p_p2pcollection_add.md new file mode 100644 index 0000000000..cf39c6cd4f --- /dev/null +++ b/docs/cli/defradb_client_p2p_p2pcollection_add.md @@ -0,0 +1,36 @@ +## defradb client p2p p2pcollection add + +Add P2P collections + +### Synopsis + +Add P2P collections to the synchronized pubsub topics. +The collections are synchronized between nodes of a pubsub network. + +``` +defradb client p2p p2pcollection add [collectionID] [flags] +``` + +### Options + +``` + -h, --help help for add +``` + +### Options inherited from parent commands + +``` + --logformat string Log format to use. Options are csv, json (default "csv") + --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... + --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") + --lognocolor Disable colored log output + --logoutput string Log output path (default "stderr") + --logtrace Include stacktrace in error and fatal logs + --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") +``` + +### SEE ALSO + +* [defradb client p2p p2pcollection](defradb_client_p2p_p2pcollection.md) - Configure the P2P collection system + diff --git a/docs/cli/defradb_client_p2p_p2pcollection_getall.md b/docs/cli/defradb_client_p2p_p2pcollection_getall.md new file mode 100644 index 0000000000..247f395007 --- /dev/null +++ b/docs/cli/defradb_client_p2p_p2pcollection_getall.md @@ -0,0 +1,36 @@ +## defradb client p2p p2pcollection getall + +Get all P2P collections + +### Synopsis + +Get all P2P collections in the pubsub topics. +This is the list of collections of the node that are synchronized on the pubsub network. + +``` +defradb client p2p p2pcollection getall [flags] +``` + +### Options + +``` + -h, --help help for getall +``` + +### Options inherited from parent commands + +``` + --logformat string Log format to use. Options are csv, json (default "csv") + --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... + --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") + --lognocolor Disable colored log output + --logoutput string Log output path (default "stderr") + --logtrace Include stacktrace in error and fatal logs + --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") +``` + +### SEE ALSO + +* [defradb client p2p p2pcollection](defradb_client_p2p_p2pcollection.md) - Configure the P2P collection system + diff --git a/docs/cli/defradb_client_p2p_p2pcollection_remove.md b/docs/cli/defradb_client_p2p_p2pcollection_remove.md new file mode 100644 index 0000000000..766805e374 --- /dev/null +++ b/docs/cli/defradb_client_p2p_p2pcollection_remove.md @@ -0,0 +1,36 @@ +## defradb client p2p p2pcollection remove + +Remove P2P collections + +### Synopsis + +Remove P2P collections from the followed pubsub topics. +The removed collections will no longer be synchronized between nodes. + +``` +defradb client p2p p2pcollection remove [collectionID] [flags] +``` + +### Options + +``` + -h, --help help for remove +``` + +### Options inherited from parent commands + +``` + --logformat string Log format to use. Options are csv, json (default "csv") + --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... + --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") + --lognocolor Disable colored log output + --logoutput string Log output path (default "stderr") + --logtrace Include stacktrace in error and fatal logs + --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") +``` + +### SEE ALSO + +* [defradb client p2p p2pcollection](defradb_client_p2p_p2pcollection.md) - Configure the P2P collection system + diff --git a/docs/cli/defradb_client_p2p_replicator.md b/docs/cli/defradb_client_p2p_replicator.md new file mode 100644 index 0000000000..e4b9340ef1 --- /dev/null +++ b/docs/cli/defradb_client_p2p_replicator.md @@ -0,0 +1,35 @@ +## defradb client p2p replicator + +Configure the replicator system + +### Synopsis + +Configure the replicator system. Add, delete, or get the list of persisted replicators. +A replicator replicates one or all collection(s) from one node to another. + +### Options + +``` + -h, --help help for replicator +``` + +### Options inherited from parent commands + +``` + --logformat string Log format to use. Options are csv, json (default "csv") + --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... + --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") + --lognocolor Disable colored log output + --logoutput string Log output path (default "stderr") + --logtrace Include stacktrace in error and fatal logs + --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") +``` + +### SEE ALSO + +* [defradb client p2p](defradb_client_p2p.md) - Interact with the DefraDB P2P system +* [defradb client p2p replicator delete](defradb_client_p2p_replicator_delete.md) - Delete a replicator. It will stop synchronizing +* [defradb client p2p replicator getall](defradb_client_p2p_replicator_getall.md) - Get all replicators +* [defradb client p2p replicator set](defradb_client_p2p_replicator_set.md) - Set a P2P replicator + diff --git a/docs/cli/defradb_client_p2p_replicator_delete.md b/docs/cli/defradb_client_p2p_replicator_delete.md new file mode 100644 index 0000000000..74475004cb --- /dev/null +++ b/docs/cli/defradb_client_p2p_replicator_delete.md @@ -0,0 +1,35 @@ +## defradb client p2p replicator delete + +Delete a replicator. It will stop synchronizing + +### Synopsis + +Delete a replicator. It will stop synchronizing. + +``` +defradb client p2p replicator delete [flags] +``` + +### Options + +``` + -h, --help help for delete +``` + +### Options inherited from parent commands + +``` + --logformat string Log format to use. Options are csv, json (default "csv") + --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... + --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") + --lognocolor Disable colored log output + --logoutput string Log output path (default "stderr") + --logtrace Include stacktrace in error and fatal logs + --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") +``` + +### SEE ALSO + +* [defradb client p2p replicator](defradb_client_p2p_replicator.md) - Configure the replicator system + diff --git a/docs/cli/defradb_client_p2p_replicator_getall.md b/docs/cli/defradb_client_p2p_replicator_getall.md new file mode 100644 index 0000000000..a24c3d8e53 --- /dev/null +++ b/docs/cli/defradb_client_p2p_replicator_getall.md @@ -0,0 +1,36 @@ +## defradb client p2p replicator getall + +Get all replicators + +### Synopsis + +Get all the replicators active in the P2P data sync system. +These are the replicators that are currently replicating data from one node to another. + +``` +defradb client p2p replicator getall [flags] +``` + +### Options + +``` + -h, --help help for getall +``` + +### Options inherited from parent commands + +``` + --logformat string Log format to use. Options are csv, json (default "csv") + --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... + --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") + --lognocolor Disable colored log output + --logoutput string Log output path (default "stderr") + --logtrace Include stacktrace in error and fatal logs + --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") +``` + +### SEE ALSO + +* [defradb client p2p replicator](defradb_client_p2p_replicator.md) - Configure the replicator system + diff --git a/docs/cli/defradb_client_p2p_replicator_set.md b/docs/cli/defradb_client_p2p_replicator_set.md new file mode 100644 index 0000000000..9fc45b5b9b --- /dev/null +++ b/docs/cli/defradb_client_p2p_replicator_set.md @@ -0,0 +1,38 @@ +## defradb client p2p replicator set + +Set a P2P replicator + +### Synopsis + +Add a new target replicator. +A replicator replicates one or all collection(s) from this node to another. + + +``` +defradb client p2p replicator set [-c, --collection] [flags] +``` + +### Options + +``` + -c, --collection stringArray Define the collection for the replicator + -h, --help help for set +``` + +### Options inherited from parent commands + +``` + --logformat string Log format to use. Options are csv, json (default "csv") + --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... + --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") + --lognocolor Disable colored log output + --logoutput string Log output path (default "stderr") + --logtrace Include stacktrace in error and fatal logs + --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") +``` + +### SEE ALSO + +* [defradb client p2p replicator](defradb_client_p2p_replicator.md) - Configure the replicator system + diff --git a/docs/cli/defradb_client_schema.md b/docs/cli/defradb_client_schema.md index c36c8d4bce..615a5b5d29 100644 --- a/docs/cli/defradb_client_schema.md +++ b/docs/cli/defradb_client_schema.md @@ -29,7 +29,6 @@ Make changes, updates, or look for existing schema types. * [defradb client](defradb_client.md) - Interact with a DefraDB node * [defradb client schema add](defradb_client_schema_add.md) - Add new schema -* [defradb client schema list](defradb_client_schema_list.md) - List schema types with their respective fields * [defradb client schema migration](defradb_client_schema_migration.md) - Interact with the schema migration system of a running DefraDB instance * [defradb client schema patch](defradb_client_schema_patch.md) - Patch an existing schema type diff --git a/docs/cli/defradb_server-dump.md b/docs/cli/defradb_server-dump.md index 0b91e10232..58a799e229 100644 --- a/docs/cli/defradb_server-dump.md +++ b/docs/cli/defradb_server-dump.md @@ -9,8 +9,7 @@ defradb server-dump [flags] ### Options ``` - -h, --help help for server-dump - --store string Datastore to use. Options are badger, memory (default "badger") + -h, --help help for server-dump ``` ### Options inherited from parent commands diff --git a/http/client.go b/http/client.go index 8167f693d1..a58dd4d9db 100644 --- a/http/client.go +++ b/http/client.go @@ -36,11 +36,10 @@ type Client struct { } func NewClient(rawURL string) (*Client, error) { - baseURL, err := url.Parse(rawURL) + httpClient, err := newHttpClient(rawURL) if err != nil { return nil, err } - httpClient := newHttpClient(baseURL.JoinPath("/api/v0")) return &Client{httpClient}, nil } @@ -61,7 +60,7 @@ func (c *Client) NewTxn(ctx context.Context, readOnly bool) (datastore.Txn, erro if err := c.http.requestJson(req, &txRes); err != nil { return nil, err } - return &TxClient{txRes.ID, c.http}, nil + return &Transaction{txRes.ID, c.http}, nil } func (c *Client) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { @@ -81,7 +80,7 @@ func (c *Client) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastore if err := c.http.requestJson(req, &txRes); err != nil { return nil, err } - return &TxClient{txRes.ID, c.http}, nil + return &Transaction{txRes.ID, c.http}, nil } func (c *Client) WithTxn(tx datastore.Txn) client.Store { @@ -89,6 +88,11 @@ func (c *Client) WithTxn(tx datastore.Txn) client.Store { return &Client{client} } +func (c *Client) WithTxnID(id uint64) client.Store { + client := c.http.withTxn(id) + return &Client{client} +} + func (c *Client) SetReplicator(ctx context.Context, rep client.Replicator) error { methodURL := c.http.baseURL.JoinPath("p2p", "replicators") @@ -229,7 +233,7 @@ func (c *Client) SetMigration(ctx context.Context, config client.LensConfig) err } func (c *Client) LensRegistry() client.LensRegistry { - return &LensClient{c.http} + return &LensRegistry{c.http} } func (c *Client) GetCollectionByName(ctx context.Context, name client.CollectionName) (client.Collection, error) { @@ -244,7 +248,7 @@ func (c *Client) GetCollectionByName(ctx context.Context, name client.Collection if err := c.http.requestJson(req, &description); err != nil { return nil, err } - return &CollectionClient{c.http, description}, nil + return &Collection{c.http, description}, nil } func (c *Client) GetCollectionBySchemaID(ctx context.Context, schemaId string) (client.Collection, error) { @@ -259,7 +263,7 @@ func (c *Client) GetCollectionBySchemaID(ctx context.Context, schemaId string) ( if err := c.http.requestJson(req, &description); err != nil { return nil, err } - return &CollectionClient{c.http, description}, nil + return &Collection{c.http, description}, nil } func (c *Client) GetCollectionByVersionID(ctx context.Context, versionId string) (client.Collection, error) { @@ -274,7 +278,7 @@ func (c *Client) GetCollectionByVersionID(ctx context.Context, versionId string) if err := c.http.requestJson(req, &description); err != nil { return nil, err } - return &CollectionClient{c.http, description}, nil + return &Collection{c.http, description}, nil } func (c *Client) GetAllCollections(ctx context.Context) ([]client.Collection, error) { @@ -290,7 +294,7 @@ func (c *Client) GetAllCollections(ctx context.Context) ([]client.Collection, er } collections := make([]client.Collection, len(descriptions)) for i, d := range descriptions { - collections[i] = &CollectionClient{c.http, d} + collections[i] = &Collection{c.http, d} } return collections, nil } diff --git a/http/client_collection.go b/http/client_collection.go index e54325e2bf..be504220f3 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -25,35 +25,35 @@ import ( "github.com/sourcenetwork/defradb/datastore" ) -var _ client.Collection = (*CollectionClient)(nil) +var _ client.Collection = (*Collection)(nil) -// CollectionClient implements the client.Collection interface over HTTP. -type CollectionClient struct { +// Collection implements the client.Collection interface over HTTP. +type Collection struct { http *httpClient desc client.CollectionDescription } -func (c *CollectionClient) Description() client.CollectionDescription { +func (c *Collection) Description() client.CollectionDescription { return c.desc } -func (c *CollectionClient) Name() string { +func (c *Collection) Name() string { return c.desc.Name } -func (c *CollectionClient) Schema() client.SchemaDescription { +func (c *Collection) Schema() client.SchemaDescription { return c.desc.Schema } -func (c *CollectionClient) ID() uint32 { +func (c *Collection) ID() uint32 { return c.desc.ID } -func (c *CollectionClient) SchemaID() string { +func (c *Collection) SchemaID() string { return c.desc.Schema.SchemaID } -func (c *CollectionClient) Create(ctx context.Context, doc *client.Document) error { +func (c *Collection) Create(ctx context.Context, doc *client.Document) error { methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name) body, err := doc.String() @@ -72,7 +72,7 @@ func (c *CollectionClient) Create(ctx context.Context, doc *client.Document) err return nil } -func (c *CollectionClient) CreateMany(ctx context.Context, docs []*client.Document) error { +func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) error { methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name) var docMapList []map[string]any @@ -101,7 +101,7 @@ func (c *CollectionClient) CreateMany(ctx context.Context, docs []*client.Docume return nil } -func (c *CollectionClient) Update(ctx context.Context, doc *client.Document) error { +func (c *Collection) Update(ctx context.Context, doc *client.Document) error { methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, doc.Key().String()) docMap, err := doc.ToMap() @@ -129,7 +129,7 @@ func (c *CollectionClient) Update(ctx context.Context, doc *client.Document) err return nil } -func (c *CollectionClient) Save(ctx context.Context, doc *client.Document) error { +func (c *Collection) Save(ctx context.Context, doc *client.Document) error { methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, doc.Key().String()) docMap, err := doc.ToMap() @@ -157,7 +157,7 @@ func (c *CollectionClient) Save(ctx context.Context, doc *client.Document) error return nil } -func (c *CollectionClient) Delete(ctx context.Context, docKey client.DocKey) (bool, error) { +func (c *Collection) Delete(ctx context.Context, docKey client.DocKey) (bool, error) { methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, docKey.String()) req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) @@ -171,7 +171,7 @@ func (c *CollectionClient) Delete(ctx context.Context, docKey client.DocKey) (bo return true, nil } -func (c *CollectionClient) Exists(ctx context.Context, docKey client.DocKey) (bool, error) { +func (c *Collection) Exists(ctx context.Context, docKey client.DocKey) (bool, error) { _, err := c.Get(ctx, docKey, false) if err != nil { return false, err @@ -179,7 +179,7 @@ func (c *CollectionClient) Exists(ctx context.Context, docKey client.DocKey) (bo return true, nil } -func (c *CollectionClient) UpdateWith(ctx context.Context, target any, updater string) (*client.UpdateResult, error) { +func (c *Collection) UpdateWith(ctx context.Context, target any, updater string) (*client.UpdateResult, error) { switch t := target.(type) { case string, map[string]any, *request.Filter: return c.UpdateWithFilter(ctx, t, updater) @@ -192,7 +192,7 @@ func (c *CollectionClient) UpdateWith(ctx context.Context, target any, updater s } } -func (c *CollectionClient) updateWith( +func (c *Collection) updateWith( ctx context.Context, request CollectionUpdateRequest, ) (*client.UpdateResult, error) { @@ -213,7 +213,7 @@ func (c *CollectionClient) updateWith( return &result, nil } -func (c *CollectionClient) UpdateWithFilter( +func (c *Collection) UpdateWithFilter( ctx context.Context, filter any, updater string, @@ -224,7 +224,7 @@ func (c *CollectionClient) UpdateWithFilter( }) } -func (c *CollectionClient) UpdateWithKey( +func (c *Collection) UpdateWithKey( ctx context.Context, key client.DocKey, updater string, @@ -235,7 +235,7 @@ func (c *CollectionClient) UpdateWithKey( }) } -func (c *CollectionClient) UpdateWithKeys( +func (c *Collection) UpdateWithKeys( ctx context.Context, docKeys []client.DocKey, updater string, @@ -250,7 +250,7 @@ func (c *CollectionClient) UpdateWithKeys( }) } -func (c *CollectionClient) DeleteWith(ctx context.Context, target any) (*client.DeleteResult, error) { +func (c *Collection) DeleteWith(ctx context.Context, target any) (*client.DeleteResult, error) { switch t := target.(type) { case string, map[string]any, *request.Filter: return c.DeleteWithFilter(ctx, t) @@ -263,7 +263,7 @@ func (c *CollectionClient) DeleteWith(ctx context.Context, target any) (*client. } } -func (c *CollectionClient) deleteWith( +func (c *Collection) deleteWith( ctx context.Context, request CollectionDeleteRequest, ) (*client.DeleteResult, error) { @@ -284,19 +284,19 @@ func (c *CollectionClient) deleteWith( return &result, nil } -func (c *CollectionClient) DeleteWithFilter(ctx context.Context, filter any) (*client.DeleteResult, error) { +func (c *Collection) DeleteWithFilter(ctx context.Context, filter any) (*client.DeleteResult, error) { return c.deleteWith(ctx, CollectionDeleteRequest{ Filter: filter, }) } -func (c *CollectionClient) DeleteWithKey(ctx context.Context, docKey client.DocKey) (*client.DeleteResult, error) { +func (c *Collection) DeleteWithKey(ctx context.Context, docKey client.DocKey) (*client.DeleteResult, error) { return c.deleteWith(ctx, CollectionDeleteRequest{ Key: docKey.String(), }) } -func (c *CollectionClient) DeleteWithKeys(ctx context.Context, docKeys []client.DocKey) (*client.DeleteResult, error) { +func (c *Collection) DeleteWithKeys(ctx context.Context, docKeys []client.DocKey) (*client.DeleteResult, error) { var keys []string for _, key := range docKeys { keys = append(keys, key.String()) @@ -306,7 +306,7 @@ func (c *CollectionClient) DeleteWithKeys(ctx context.Context, docKeys []client. }) } -func (c *CollectionClient) Get(ctx context.Context, key client.DocKey, showDeleted bool) (*client.Document, error) { +func (c *Collection) Get(ctx context.Context, key client.DocKey, showDeleted bool) (*client.Document, error) { methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, key.String()) req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) @@ -320,14 +320,14 @@ func (c *CollectionClient) Get(ctx context.Context, key client.DocKey, showDelet return client.NewDocFromMap(docMap) } -func (c *CollectionClient) WithTxn(tx datastore.Txn) client.Collection { - return &CollectionClient{ +func (c *Collection) WithTxn(tx datastore.Txn) client.Collection { + return &Collection{ http: c.http.withTxn(tx.ID()), desc: c.desc, } } -func (c *CollectionClient) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysResult, error) { +func (c *Collection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysResult, error) { methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name) req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) @@ -373,7 +373,7 @@ func (c *CollectionClient) GetAllDocKeys(ctx context.Context) (<-chan client.Doc return docKeyCh, nil } -func (c *CollectionClient) CreateIndex( +func (c *Collection) CreateIndex( ctx context.Context, indexDesc client.IndexDescription, ) (client.IndexDescription, error) { @@ -394,7 +394,7 @@ func (c *CollectionClient) CreateIndex( return index, nil } -func (c *CollectionClient) DropIndex(ctx context.Context, indexName string) error { +func (c *Collection) DropIndex(ctx context.Context, indexName string) error { methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, "indexes", indexName) req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) @@ -405,7 +405,7 @@ func (c *CollectionClient) DropIndex(ctx context.Context, indexName string) erro return err } -func (c *CollectionClient) GetIndexes(ctx context.Context) ([]client.IndexDescription, error) { +func (c *Collection) GetIndexes(ctx context.Context) ([]client.IndexDescription, error) { methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, "indexes") req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) diff --git a/http/client_lens.go b/http/client_lens.go index 8520a8401f..11921492f4 100644 --- a/http/client_lens.go +++ b/http/client_lens.go @@ -22,19 +22,19 @@ import ( "github.com/sourcenetwork/defradb/datastore" ) -var _ client.LensRegistry = (*LensClient)(nil) +var _ client.LensRegistry = (*LensRegistry)(nil) -// LensClient implements the client.LensRegistry interface over HTTP. -type LensClient struct { +// LensRegistry implements the client.LensRegistry interface over HTTP. +type LensRegistry struct { http *httpClient } -func (c *LensClient) WithTxn(tx datastore.Txn) client.LensRegistry { +func (c *LensRegistry) WithTxn(tx datastore.Txn) client.LensRegistry { http := c.http.withTxn(tx.ID()) - return &LensClient{http} + return &LensRegistry{http} } -func (c *LensClient) SetMigration(ctx context.Context, config client.LensConfig) error { +func (c *LensRegistry) SetMigration(ctx context.Context, config client.LensConfig) error { methodURL := c.http.baseURL.JoinPath("lens") body, err := json.Marshal(config) @@ -49,7 +49,7 @@ func (c *LensClient) SetMigration(ctx context.Context, config client.LensConfig) return err } -func (c *LensClient) ReloadLenses(ctx context.Context) error { +func (c *LensRegistry) ReloadLenses(ctx context.Context) error { methodURL := c.http.baseURL.JoinPath("lens", "reload") req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) @@ -60,7 +60,7 @@ func (c *LensClient) ReloadLenses(ctx context.Context) error { return err } -func (c *LensClient) MigrateUp( +func (c *LensRegistry) MigrateUp( ctx context.Context, src enumerable.Enumerable[map[string]any], schemaVersionID string, @@ -82,7 +82,7 @@ func (c *LensClient) MigrateUp( return result, nil } -func (c *LensClient) MigrateDown( +func (c *LensRegistry) MigrateDown( ctx context.Context, src enumerable.Enumerable[map[string]any], schemaVersionID string, @@ -104,7 +104,7 @@ func (c *LensClient) MigrateDown( return result, nil } -func (c *LensClient) Config(ctx context.Context) ([]client.LensConfig, error) { +func (c *LensRegistry) Config(ctx context.Context) ([]client.LensConfig, error) { methodURL := c.http.baseURL.JoinPath("lens") req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) @@ -118,7 +118,7 @@ func (c *LensClient) Config(ctx context.Context) ([]client.LensConfig, error) { return cfgs, nil } -func (c *LensClient) HasMigration(ctx context.Context, schemaVersionID string) (bool, error) { +func (c *LensRegistry) HasMigration(ctx context.Context, schemaVersionID string) (bool, error) { methodURL := c.http.baseURL.JoinPath("lens", schemaVersionID) req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) diff --git a/http/client_tx.go b/http/client_tx.go index 7592333f3b..ac16c29288 100644 --- a/http/client_tx.go +++ b/http/client_tx.go @@ -18,19 +18,27 @@ import ( "github.com/sourcenetwork/defradb/datastore" ) -var _ datastore.Txn = (*TxClient)(nil) +var _ datastore.Txn = (*Transaction)(nil) -// TxClient implements the datastore.Txn interface over HTTP. -type TxClient struct { +// Transaction implements the datastore.Txn interface over HTTP. +type Transaction struct { id uint64 http *httpClient } -func (c *TxClient) ID() uint64 { +func NewTransaction(rawURL string, id uint64) (*Transaction, error) { + httpClient, err := newHttpClient(rawURL) + if err != nil { + return nil, err + } + return &Transaction{id, httpClient}, nil +} + +func (c *Transaction) ID() uint64 { return c.id } -func (c *TxClient) Commit(ctx context.Context) error { +func (c *Transaction) Commit(ctx context.Context) error { methodURL := c.http.baseURL.JoinPath("tx", fmt.Sprintf("%d", c.id)) req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) @@ -41,7 +49,7 @@ func (c *TxClient) Commit(ctx context.Context) error { return err } -func (c *TxClient) Discard(ctx context.Context) { +func (c *Transaction) Discard(ctx context.Context) { methodURL := c.http.baseURL.JoinPath("tx", fmt.Sprintf("%d", c.id)) req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) @@ -51,34 +59,34 @@ func (c *TxClient) Discard(ctx context.Context) { c.http.request(req) //nolint:errcheck } -func (c *TxClient) OnSuccess(fn func()) { +func (c *Transaction) OnSuccess(fn func()) { panic("client side transaction") } -func (c *TxClient) OnError(fn func()) { +func (c *Transaction) OnError(fn func()) { panic("client side transaction") } -func (c *TxClient) OnDiscard(fn func()) { +func (c *Transaction) OnDiscard(fn func()) { panic("client side transaction") } -func (c *TxClient) Rootstore() datastore.DSReaderWriter { +func (c *Transaction) Rootstore() datastore.DSReaderWriter { panic("client side transaction") } -func (c *TxClient) Datastore() datastore.DSReaderWriter { +func (c *Transaction) Datastore() datastore.DSReaderWriter { panic("client side transaction") } -func (c *TxClient) Headstore() datastore.DSReaderWriter { +func (c *Transaction) Headstore() datastore.DSReaderWriter { panic("client side transaction") } -func (c *TxClient) DAGstore() datastore.DAGStore { +func (c *Transaction) DAGstore() datastore.DAGStore { panic("client side transaction") } -func (c *TxClient) Systemstore() datastore.DSReaderWriter { +func (c *Transaction) Systemstore() datastore.DSReaderWriter { panic("client side transaction") } diff --git a/http/handler.go b/http/handler.go index 35dc108bd3..078ea535a3 100644 --- a/http/handler.go +++ b/http/handler.go @@ -11,10 +11,12 @@ package http import ( + "fmt" "net/http" "sync" "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" "github.com/go-chi/chi/v5" "github.com/go-chi/chi/v5/middleware" @@ -26,13 +28,13 @@ var Version string = "v0" // playgroundHandler is set when building with the playground build tag var playgroundHandler = http.HandlerFunc(http.NotFound) -type handler struct { +type Handler struct { db client.DB router *chi.Mux txs *sync.Map } -func newHandler(db client.DB, opts serverOptions) *handler { +func NewHandler(db client.DB, opts ServerOptions) *Handler { txs := &sync.Map{} tx_handler := &txHandler{} @@ -110,13 +112,21 @@ func newHandler(db client.DB, opts serverOptions) *handler { router.Handle("/*", playgroundHandler) - return &handler{ + return &Handler{ db: db, router: router, txs: txs, } } -func (h *handler) ServeHTTP(w http.ResponseWriter, req *http.Request) { +func (h *Handler) Transaction(id uint64) (datastore.Txn, error) { + tx, ok := h.txs.Load(id) + if !ok { + return nil, fmt.Errorf("invalid transaction id") + } + return tx.(datastore.Txn), nil +} + +func (h *Handler) ServeHTTP(w http.ResponseWriter, req *http.Request) { h.router.ServeHTTP(w, req) } diff --git a/http/http_client.go b/http/http_client.go index bb100a11a3..5359b8df4d 100644 --- a/http/http_client.go +++ b/http/http_client.go @@ -16,6 +16,7 @@ import ( "io" "net/http" "net/url" + "strings" "github.com/sourcenetwork/defradb/datastore/badger/v4" ) @@ -26,12 +27,19 @@ type httpClient struct { txValue string } -func newHttpClient(baseURL *url.URL) *httpClient { +func newHttpClient(rawURL string) (*httpClient, error) { + if !strings.HasPrefix(rawURL, "http") { + rawURL = "http://" + rawURL + } + baseURL, err := url.Parse(rawURL) + if err != nil { + return nil, err + } client := httpClient{ client: http.DefaultClient, - baseURL: baseURL, + baseURL: baseURL.JoinPath("/api/v0"), } - return &client + return &client, nil } func (c *httpClient) withTxn(value uint64) *httpClient { diff --git a/http/middleware.go b/http/middleware.go index b079227837..b9f512ff22 100644 --- a/http/middleware.go +++ b/http/middleware.go @@ -39,10 +39,10 @@ var ( ) // CorsMiddleware handles cross origin request -func CorsMiddleware(opts serverOptions) func(http.Handler) http.Handler { +func CorsMiddleware(opts ServerOptions) func(http.Handler) http.Handler { return cors.Handler(cors.Options{ AllowOriginFunc: func(r *http.Request, origin string) bool { - return slices.Contains[string](opts.allowedOrigins, strings.ToLower(origin)) + return slices.Contains[string](opts.AllowedOrigins, strings.ToLower(origin)) }, AllowedMethods: []string{"GET", "HEAD", "POST", "PATCH", "DELETE"}, AllowedHeaders: []string{"Content-Type"}, @@ -51,10 +51,10 @@ func CorsMiddleware(opts serverOptions) func(http.Handler) http.Handler { } // ApiMiddleware sets the required context values for all API requests. -func ApiMiddleware(db client.DB, txs *sync.Map, opts serverOptions) func(http.Handler) http.Handler { +func ApiMiddleware(db client.DB, txs *sync.Map, opts ServerOptions) func(http.Handler) http.Handler { return func(next http.Handler) http.Handler { return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - if opts.tls.HasValue() { + if opts.TLS.HasValue() { rw.Header().Add("Strict-Transport-Security", "max-age=63072000; includeSubDomains") } diff --git a/http/server.go b/http/server.go index a6f092ef20..ccfefb08b1 100644 --- a/http/server.go +++ b/http/server.go @@ -49,7 +49,7 @@ const ( // Server struct holds the Handler for the HTTP API. type Server struct { - options serverOptions + options ServerOptions listener net.Listener certManager *autocert.Manager // address that is assigned to the server on listen @@ -58,28 +58,28 @@ type Server struct { http.Server } -type serverOptions struct { - // list of allowed origins for CORS. - allowedOrigins []string - // ID of the server node. - peerID string - // when the value is present, the server will run with tls - tls immutable.Option[tlsOptions] - // root directory for the node config. - rootDir string - // The domain for the API (optional). - domain immutable.Option[string] +type ServerOptions struct { + // AllowedOrigins is the list of allowed origins for CORS. + AllowedOrigins []string + // PeerID is the p2p id of the server node. + PeerID string + // TLS enables https when the value is present. + TLS immutable.Option[TLSOptions] + // RootDirectory is the directory for the node config. + RootDir string + // Domain is the domain for the API (optional). + Domain immutable.Option[string] } -type tlsOptions struct { - // Public key for TLS. Ignored if domain is set. - pubKey string - // Private key for TLS. Ignored if domain is set. - privKey string - // email address for the CA to send problem notifications (optional) - email string - // specify the tls port - port string +type TLSOptions struct { + // PublicKey is the public key for TLS. Ignored if domain is set. + PublicKey string + // PrivateKey is the private key for TLS. Ignored if domain is set. + PrivateKey string + // Email is the address for the CA to send problem notifications (optional) + Email string + // Port is the tls port + Port string } // NewServer instantiates a new server with the given http.Handler. @@ -96,7 +96,7 @@ func NewServer(db client.DB, options ...func(*Server)) *Server { opt(srv) } - srv.Handler = newHandler(db, srv.options) + srv.Handler = NewHandler(db, srv.options) return srv } @@ -128,7 +128,7 @@ func DefaultOpts() func(*Server) { // WithAllowedOrigins returns an option to set the allowed origins for CORS. func WithAllowedOrigins(origins ...string) func(*Server) { return func(s *Server) { - s.options.allowedOrigins = append(s.options.allowedOrigins, origins...) + s.options.AllowedOrigins = append(s.options.AllowedOrigins, origins...) } } @@ -147,7 +147,7 @@ func WithAddress(addr string) func(*Server) { ip := net.ParseIP(host) if ip == nil { s.Addr = httpPort - s.options.domain = immutable.Some(host) + s.options.Domain = immutable.Some(host) } } } @@ -156,58 +156,58 @@ func WithAddress(addr string) func(*Server) { // WithCAEmail returns an option to set the email address for the CA to send problem notifications. func WithCAEmail(email string) func(*Server) { return func(s *Server) { - tlsOpt := s.options.tls.Value() - tlsOpt.email = email - s.options.tls = immutable.Some(tlsOpt) + tlsOpt := s.options.TLS.Value() + tlsOpt.Email = email + s.options.TLS = immutable.Some(tlsOpt) } } // WithPeerID returns an option to set the identifier of the server node. func WithPeerID(id string) func(*Server) { return func(s *Server) { - s.options.peerID = id + s.options.PeerID = id } } // WithRootDir returns an option to set the root directory for the node config. func WithRootDir(rootDir string) func(*Server) { return func(s *Server) { - s.options.rootDir = rootDir + s.options.RootDir = rootDir } } // WithSelfSignedCert returns an option to set the public and private keys for TLS. func WithSelfSignedCert(pubKey, privKey string) func(*Server) { return func(s *Server) { - tlsOpt := s.options.tls.Value() - tlsOpt.pubKey = pubKey - tlsOpt.privKey = privKey - s.options.tls = immutable.Some(tlsOpt) + tlsOpt := s.options.TLS.Value() + tlsOpt.PublicKey = pubKey + tlsOpt.PrivateKey = privKey + s.options.TLS = immutable.Some(tlsOpt) } } // WithTLS returns an option to enable TLS. func WithTLS() func(*Server) { return func(s *Server) { - tlsOpt := s.options.tls.Value() - tlsOpt.port = httpsPort - s.options.tls = immutable.Some(tlsOpt) + tlsOpt := s.options.TLS.Value() + tlsOpt.Port = httpsPort + s.options.TLS = immutable.Some(tlsOpt) } } // WithTLSPort returns an option to set the port for TLS. func WithTLSPort(port int) func(*Server) { return func(s *Server) { - tlsOpt := s.options.tls.Value() - tlsOpt.port = fmt.Sprintf(":%d", port) - s.options.tls = immutable.Some(tlsOpt) + tlsOpt := s.options.TLS.Value() + tlsOpt.Port = fmt.Sprintf(":%d", port) + s.options.TLS = immutable.Some(tlsOpt) } } // Listen creates a new net.Listener and saves it on the receiver. func (s *Server) Listen(ctx context.Context) error { var err error - if s.options.tls.HasValue() { + if s.options.TLS.HasValue() { return s.listenWithTLS(ctx) } @@ -240,27 +240,27 @@ func (s *Server) listenWithTLS(ctx context.Context) error { ServerName: "DefraDB", } - if s.options.domain.HasValue() && s.options.domain.Value() != "" { - s.Addr = s.options.tls.Value().port + if s.options.Domain.HasValue() && s.options.Domain.Value() != "" { + s.Addr = s.options.TLS.Value().Port - if s.options.tls.Value().email == "" || s.options.tls.Value().email == config.DefaultAPIEmail { + if s.options.TLS.Value().Email == "" || s.options.TLS.Value().Email == config.DefaultAPIEmail { return ErrNoEmail } - certCache := path.Join(s.options.rootDir, "autocerts") + certCache := path.Join(s.options.RootDir, "autocerts") log.FeedbackInfo( ctx, "Generating auto certificate", - logging.NewKV("Domain", s.options.domain.Value()), + logging.NewKV("Domain", s.options.Domain.Value()), logging.NewKV("Certificate cache", certCache), ) m := &autocert.Manager{ Cache: autocert.DirCache(certCache), Prompt: autocert.AcceptTOS, - Email: s.options.tls.Value().email, - HostPolicy: autocert.HostWhitelist(s.options.domain.Value()), + Email: s.options.TLS.Value().Email, + HostPolicy: autocert.HostWhitelist(s.options.Domain.Value()), } cfg.GetCertificate = m.GetCertificate @@ -274,8 +274,8 @@ func (s *Server) listenWithTLS(ctx context.Context) error { log.FeedbackInfo(ctx, "Generating self signed certificate") cert, err := tls.LoadX509KeyPair( - s.options.tls.Value().privKey, - s.options.tls.Value().pubKey, + s.options.TLS.Value().PrivateKey, + s.options.TLS.Value().PublicKey, ) if err != nil { return errors.WithStack(err) diff --git a/http/server_test.go b/http/server_test.go index 36417e6b41..790f710249 100644 --- a/http/server_test.go +++ b/http/server_test.go @@ -197,7 +197,7 @@ func TestNewServerAndRunWithSelfSignedCert(t *testing.T) { func TestNewServerWithoutOptions(t *testing.T) { s := NewServer(nil) assert.Equal(t, "localhost:9181", s.Addr) - assert.Equal(t, []string(nil), s.options.allowedOrigins) + assert.Equal(t, []string(nil), s.options.AllowedOrigins) } func TestNewServerWithAddress(t *testing.T) { @@ -207,41 +207,41 @@ func TestNewServerWithAddress(t *testing.T) { func TestNewServerWithDomainAddress(t *testing.T) { s := NewServer(nil, WithAddress("example.com")) - assert.Equal(t, "example.com", s.options.domain.Value()) - assert.NotNil(t, s.options.tls) + assert.Equal(t, "example.com", s.options.Domain.Value()) + assert.NotNil(t, s.options.TLS) } func TestNewServerWithAllowedOrigins(t *testing.T) { s := NewServer(nil, WithAllowedOrigins("https://source.network", "https://app.source.network")) - assert.Equal(t, []string{"https://source.network", "https://app.source.network"}, s.options.allowedOrigins) + assert.Equal(t, []string{"https://source.network", "https://app.source.network"}, s.options.AllowedOrigins) } func TestNewServerWithCAEmail(t *testing.T) { s := NewServer(nil, WithCAEmail("me@example.com")) - assert.Equal(t, "me@example.com", s.options.tls.Value().email) + assert.Equal(t, "me@example.com", s.options.TLS.Value().Email) } func TestNewServerWithPeerID(t *testing.T) { s := NewServer(nil, WithPeerID("12D3KooWFpi6VTYKLtxUftJKEyfX8jDfKi8n15eaygH8ggfYFZbR")) - assert.Equal(t, "12D3KooWFpi6VTYKLtxUftJKEyfX8jDfKi8n15eaygH8ggfYFZbR", s.options.peerID) + assert.Equal(t, "12D3KooWFpi6VTYKLtxUftJKEyfX8jDfKi8n15eaygH8ggfYFZbR", s.options.PeerID) } func TestNewServerWithRootDir(t *testing.T) { dir := t.TempDir() s := NewServer(nil, WithRootDir(dir)) - assert.Equal(t, dir, s.options.rootDir) + assert.Equal(t, dir, s.options.RootDir) } func TestNewServerWithTLSPort(t *testing.T) { s := NewServer(nil, WithTLSPort(44343)) - assert.Equal(t, ":44343", s.options.tls.Value().port) + assert.Equal(t, ":44343", s.options.TLS.Value().Port) } func TestNewServerWithSelfSignedCert(t *testing.T) { s := NewServer(nil, WithSelfSignedCert("pub.key", "priv.key")) - assert.Equal(t, "pub.key", s.options.tls.Value().pubKey) - assert.Equal(t, "priv.key", s.options.tls.Value().privKey) - assert.NotNil(t, s.options.tls) + assert.Equal(t, "pub.key", s.options.TLS.Value().PublicKey) + assert.Equal(t, "priv.key", s.options.TLS.Value().PrivateKey) + assert.NotNil(t, s.options.TLS) } func TestNewHTTPRedirServer(t *testing.T) { diff --git a/http/wrapper.go b/http/wrapper.go index e8ae24dcd9..7dac6287cf 100644 --- a/http/wrapper.go +++ b/http/wrapper.go @@ -28,13 +28,13 @@ var _ client.DB = (*Wrapper)(nil) // single struct that implements the client.DB interface. type Wrapper struct { db client.DB - handler *handler + handler *Handler client *Client httpServer *httptest.Server } func NewWrapper(db client.DB) (*Wrapper, error) { - handler := newHandler(db, serverOptions{}) + handler := NewHandler(db, ServerOptions{}) httpServer := httptest.NewServer(handler) client, err := NewClient(httpServer.URL) diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 2e8261979e..a624155ad0 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -25,6 +25,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/sourcenetwork/defradb/cli" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" @@ -39,6 +40,7 @@ import ( const ( clientGoEnvName = "DEFRA_CLIENT_GO" clientHttpEnvName = "DEFRA_CLIENT_HTTP" + clientCliEnvName = "DEFRA_CLIENT_CLI" memoryBadgerEnvName = "DEFRA_BADGER_MEMORY" fileBadgerEnvName = "DEFRA_BADGER_FILE" fileBadgerPathEnvName = "DEFRA_BADGER_FILE_PATH" @@ -64,6 +66,7 @@ type ClientType string const ( goClientType ClientType = "go" httpClientType ClientType = "http" + cliClientType ClientType = "cli" ) var ( @@ -73,6 +76,7 @@ var ( inMemoryStore bool httpClient bool goClient bool + cliClient bool ) const subscriptionTimeout = 1 * time.Second @@ -115,6 +119,7 @@ func init() { // that don't have the flag defined httpClientValue, _ := os.LookupEnv(clientHttpEnvName) goClientValue, _ := os.LookupEnv(clientGoEnvName) + cliClientValue, _ := os.LookupEnv(clientCliEnvName) badgerFileValue, _ := os.LookupEnv(fileBadgerEnvName) badgerInMemoryValue, _ := os.LookupEnv(memoryBadgerEnvName) databaseDir, _ = os.LookupEnv(fileBadgerPathEnvName) @@ -127,6 +132,7 @@ func init() { httpClient = getBool(httpClientValue) goClient = getBool(goClientValue) + cliClient = getBool(cliClientValue) badgerFile = getBool(badgerFileValue) badgerInMemory = getBool(badgerInMemoryValue) inMemoryStore = getBool(inMemoryStoreValue) @@ -149,9 +155,10 @@ func init() { inMemoryStore = true } // default is to run against all - if !goClient && !httpClient && !DetectDbChanges { + if !goClient && !httpClient && !cliClient && !DetectDbChanges { goClient = true httpClient = true + cliClient = true } if DetectDbChanges { @@ -249,6 +256,10 @@ func GetClientTypes() []ClientType { clients = append(clients, goClientType) } + if cliClient { + clients = append(clients, cliClientType) + } + return clients } @@ -299,6 +310,9 @@ func GetDatabase(s *state) (client.DB, string, error) { case httpClientType: cdb, err = http.NewWrapper(cdb) + case cliClientType: + cdb = cli.NewWrapper(cdb) + case goClientType: // do nothing From 99fa2d415febdab014647c6c02dc3ffb01dce50e Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 30 Aug 2023 10:43:21 -0700 Subject: [PATCH 043/107] all cli tests passing --- cli/request.go | 7 +++++ cli/wrapper.go | 66 +++++++++++++++++++++++++++++++++++++++++++--- cli/wrapper_cli.go | 46 +++++++++++++++++++++++--------- go.mod | 2 +- 4 files changed, 105 insertions(+), 16 deletions(-) diff --git a/cli/request.go b/cli/request.go index c09722ffc7..0a56ae75f8 100644 --- a/cli/request.go +++ b/cli/request.go @@ -21,6 +21,11 @@ import ( "github.com/sourcenetwork/defradb/errors" ) +const ( + REQ_RESULTS_HEADER = "------ Request Results ------\n" + SUB_RESULTS_HEADER = "------ Subscription Results ------\n" +) + func MakeRequestCommand(cfg *config.Config) *cobra.Command { var filePath string var cmd = &cobra.Command{ @@ -72,8 +77,10 @@ To learn more about the DefraDB GraphQL Query Language, refer to https://docs.so errors = append(errors, err.Error()) } if result.Pub == nil { + cmd.Print(REQ_RESULTS_HEADER) return writeJSON(cmd, map[string]any{"data": result.GQL.Data, "errors": errors}) } + cmd.Print(SUB_RESULTS_HEADER) for item := range result.Pub.Stream() { writeJSON(cmd, item) //nolint:errcheck } diff --git a/cli/wrapper.go b/cli/wrapper.go index 163694d5f8..7131247f89 100644 --- a/cli/wrapper.go +++ b/cli/wrapper.go @@ -11,14 +11,16 @@ package cli import ( + "bufio" "context" "encoding/json" "fmt" + "io" "net/http/httptest" "strings" blockstore "github.com/ipfs/boxo/blockstore" - "github.com/libp2p/go-libp2p-core/peer" + "github.com/libp2p/go-libp2p/core/peer" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" @@ -41,6 +43,8 @@ func NewWrapper(db client.DB) *Wrapper { httpServer := httptest.NewServer(handler) cmd := newCliWrapper(httpServer.URL) + // TODO use http.Wrapper here to make a lot of this obsolete + return &Wrapper{ db: db, store: db, @@ -131,8 +135,10 @@ func (w *Wrapper) BasicImport(ctx context.Context, filepath string) error { func (w *Wrapper) BasicExport(ctx context.Context, config *client.BackupConfig) error { args := []string{"client", "backup", "export"} - args = append(args, "--collections", strings.Join(config.Collections, ",")) + if len(config.Collections) > 0 { + args = append(args, "--collections", strings.Join(config.Collections, ",")) + } if config.Format != "" { args = append(args, "--format", config.Format) } @@ -215,11 +221,36 @@ func (w *Wrapper) ExecRequest(ctx context.Context, query string) *client.Request result := &client.RequestResult{} - data, err := w.cmd.execute(ctx, args) + stdOut, stdErr, err := w.cmd.executeStream(ctx, args) + if err != nil { + result.GQL.Errors = []error{err} + return result + } + buffer := bufio.NewReader(stdOut) + header, err := buffer.ReadString('\n') + if err != nil { + result.GQL.Errors = []error{err} + return result + } + if header == SUB_RESULTS_HEADER { + result.Pub = w.execRequestSubscription(ctx, buffer) + return result + } + data, err := io.ReadAll(buffer) if err != nil { result.GQL.Errors = []error{err} return result } + errData, err := io.ReadAll(stdErr) + if err != nil { + result.GQL.Errors = []error{err} + return result + } + if len(errData) > 0 { + result.GQL.Errors = []error{fmt.Errorf("%s", errData)} + return result + } + var response http.GraphQLResponse if err = json.Unmarshal(data, &response); err != nil { result.GQL.Errors = []error{err} @@ -232,6 +263,35 @@ func (w *Wrapper) ExecRequest(ctx context.Context, query string) *client.Request return result } +func (w *Wrapper) execRequestSubscription(ctx context.Context, r io.Reader) *events.Publisher[events.Update] { + pubCh := events.New[events.Update](0, 0) + pub, err := events.NewPublisher[events.Update](pubCh, 0) + if err != nil { + return nil + } + + go func() { + dec := json.NewDecoder(r) + + for { + var response http.GraphQLResponse + if err := dec.Decode(&response); err != nil { + return + } + var errors []error + for _, err := range response.Errors { + errors = append(errors, fmt.Errorf(err)) + } + pub.Publish(client.GQLResult{ + Errors: errors, + Data: response.Data, + }) + } + }() + + return pub +} + func (w *Wrapper) NewTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { args := []string{"client", "tx", "create"} if readOnly { diff --git a/cli/wrapper_cli.go b/cli/wrapper_cli.go index e6a71697d2..00c9cd3dd9 100644 --- a/cli/wrapper_cli.go +++ b/cli/wrapper_cli.go @@ -11,9 +11,9 @@ package cli import ( - "bytes" "context" "fmt" + "io" "github.com/sourcenetwork/defradb/config" "github.com/sourcenetwork/defradb/datastore" @@ -38,8 +38,27 @@ func (w *cliWrapper) withTxn(tx datastore.Txn) *cliWrapper { } func (w *cliWrapper) execute(ctx context.Context, args []string) ([]byte, error) { - var stdOut bytes.Buffer - var stdErr bytes.Buffer + stdOut, stdErr, err := w.executeStream(ctx, args) + if err != nil { + return nil, err + } + stdOutData, err := io.ReadAll(stdOut) + if err != nil { + return nil, err + } + stdErrData, err := io.ReadAll(stdErr) + if err != nil { + return nil, err + } + if len(stdErrData) != 0 { + return nil, fmt.Errorf("%s", stdErrData) + } + return stdOutData, nil +} + +func (w *cliWrapper) executeStream(ctx context.Context, args []string) (io.ReadCloser, io.ReadCloser, error) { + stdOutRead, stdOutWrite := io.Pipe() + stdErrRead, stdErrWrite := io.Pipe() if w.txValue != "" { args = append(args, "--tx", w.txValue) @@ -49,15 +68,18 @@ func (w *cliWrapper) execute(ctx context.Context, args []string) ([]byte, error) cfg.API.Address = w.address cmd := NewDefraCommand(cfg) - cmd.SetOut(&stdOut) - cmd.SetErr(&stdErr) + cmd.SetOut(stdOutWrite) + cmd.SetErr(stdErrWrite) cmd.SetArgs(args) - if err := cmd.Execute(); err != nil { - return nil, err - } - if stdErr.Len() > 0 { - return nil, fmt.Errorf("%s", stdErr.String()) - } - return stdOut.Bytes(), nil + cmd.SilenceErrors = true + cmd.SilenceUsage = true + + go func() { + err := cmd.Execute() + stdOutWrite.CloseWithError(err) + stdErrWrite.CloseWithError(err) + }() + + return stdOutRead, stdErrRead, nil } diff --git a/go.mod b/go.mod index d9d6ade38d..087070597f 100644 --- a/go.mod +++ b/go.mod @@ -25,6 +25,7 @@ require ( github.com/jbenet/goprocess v0.1.4 github.com/lens-vm/lens/host-go v0.0.0-20230729032926-5acb4df9bd25 github.com/libp2p/go-libp2p v0.29.2 + github.com/libp2p/go-libp2p-core v0.20.0 github.com/libp2p/go-libp2p-gostream v0.6.0 github.com/libp2p/go-libp2p-kad-dht v0.25.0 github.com/libp2p/go-libp2p-pubsub v0.9.3 @@ -126,7 +127,6 @@ require ( github.com/libp2p/go-flow-metrics v0.1.0 // indirect github.com/libp2p/go-libp2p-asn-util v0.3.0 // indirect github.com/libp2p/go-libp2p-connmgr v0.4.0 // indirect - github.com/libp2p/go-libp2p-core v0.20.0 // indirect github.com/libp2p/go-libp2p-kbucket v0.6.3 // indirect github.com/libp2p/go-libp2p-routing-helpers v0.7.0 // indirect github.com/libp2p/go-msgio v0.3.0 // indirect From e13745e8b297be208c45696026be9ac9c8f828a1 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 30 Aug 2023 10:48:39 -0700 Subject: [PATCH 044/107] restore server dump cli command --- cli/server_dump.go | 48 +++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 45 insertions(+), 3 deletions(-) diff --git a/cli/server_dump.go b/cli/server_dump.go index c88cb41290..0ba638d268 100644 --- a/cli/server_dump.go +++ b/cli/server_dump.go @@ -11,23 +11,65 @@ package cli import ( + "fmt" + "os" + "os/signal" + "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/http" + ds "github.com/sourcenetwork/defradb/datastore" + badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" + "github.com/sourcenetwork/defradb/db" + "github.com/sourcenetwork/defradb/errors" + "github.com/sourcenetwork/defradb/logging" ) func MakeServerDumpCmd(cfg *config.Config) *cobra.Command { + var datastore string + cmd := &cobra.Command{ Use: "server-dump", Short: "Dumps the state of the entire database", RunE: func(cmd *cobra.Command, _ []string) error { - db, err := http.NewClient(cfg.API.Address) + log.FeedbackInfo(cmd.Context(), "Starting DefraDB process...") + + // setup signal handlers + signalCh := make(chan os.Signal, 1) + signal.Notify(signalCh, os.Interrupt) + + var rootstore ds.RootStore + var err error + if datastore == badgerDatastoreName { + info, err := os.Stat(cfg.Datastore.Badger.Path) + exists := (err == nil && info.IsDir()) + if !exists { + return errors.New(fmt.Sprintf( + "badger store does not exist at %s. Try with an existing directory", + cfg.Datastore.Badger.Path, + )) + } + log.FeedbackInfo(cmd.Context(), "Opening badger store", logging.NewKV("Path", cfg.Datastore.Badger.Path)) + rootstore, err = badgerds.NewDatastore(cfg.Datastore.Badger.Path, cfg.Datastore.Badger.Options) + if err != nil { + return errors.Wrap("could not open badger datastore", err) + } + } else { + return errors.New("server-side dump is only supported for the Badger datastore") + } + + db, err := db.NewDB(cmd.Context(), rootstore) if err != nil { - return err + return errors.Wrap("failed to initialize database", err) } + + log.FeedbackInfo(cmd.Context(), "Dumping DB state...") return db.PrintDump(cmd.Context()) }, } + cmd.Flags().StringVar( + &datastore, "store", cfg.Datastore.Store, + "Datastore to use. Options are badger, memory", + ) return cmd } From c157fb741f7625ff607f7484596e693268ccfc1f Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 30 Aug 2023 13:21:43 -0700 Subject: [PATCH 045/107] move config functions to utils --- cli/cli.go | 2 +- cli/client.go | 27 +++++++++++++++++++++++++-- cli/root.go | 18 +----------------- cli/start.go | 10 ++++++++++ cli/utils.go | 28 ++++++++++++++++++---------- cli/wrapper_cli.go | 9 ++++----- 6 files changed, 59 insertions(+), 35 deletions(-) diff --git a/cli/cli.go b/cli/cli.go index feae6f2c9c..dbef18a4b4 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -77,7 +77,7 @@ func NewDefraCommand(cfg *config.Config) *cobra.Command { MakeTxDiscardCommand(cfg), ) - client := MakeClientCommand() + client := MakeClientCommand(cfg) client.AddCommand( MakeDumpCommand(cfg), MakeRequestCommand(cfg), diff --git a/cli/client.go b/cli/client.go index 2456df8d43..de8b4a66bb 100644 --- a/cli/client.go +++ b/cli/client.go @@ -11,16 +11,39 @@ package cli import ( + "context" + + "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/http" "github.com/spf13/cobra" ) -func MakeClientCommand() *cobra.Command { +func MakeClientCommand(cfg *config.Config) *cobra.Command { + var txID uint64 var cmd = &cobra.Command{ Use: "client", Short: "Interact with a DefraDB node", Long: `Interact with a DefraDB node. Execute queries, add schema types, obtain node info, etc.`, + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + if err := loadConfig(cfg); err != nil { + return err + } + db, err := http.NewClient(cfg.API.Address) + if err != nil { + return err + } + ctx := cmd.Context() + if txID != 0 { + ctx = context.WithValue(ctx, storeContextKey, db.WithTxnID(txID)) + } else { + ctx = context.WithValue(ctx, storeContextKey, db) + } + ctx = context.WithValue(ctx, dbContextKey, db) + cmd.SetContext(ctx) + return nil + }, } - + cmd.PersistentFlags().Uint64Var(&txID, "tx", 0, "Transaction ID") return cmd } diff --git a/cli/root.go b/cli/root.go index ed0f454952..b844fc6e84 100644 --- a/cli/root.go +++ b/cli/root.go @@ -16,7 +16,6 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/http" ) type contextKey string @@ -27,7 +26,6 @@ var ( ) func MakeRootCommand(cfg *config.Config) *cobra.Command { - var txID uint64 var cmd = &cobra.Command{ Use: "defradb", Short: "DefraDB Edge Database", @@ -36,19 +34,7 @@ func MakeRootCommand(cfg *config.Config) *cobra.Command { Start a DefraDB node, interact with a local or remote node, and much more. `, PersistentPreRunE: func(cmd *cobra.Command, args []string) error { - db, err := http.NewClient(cfg.API.Address) - if err != nil { - return err - } - ctx := cmd.Context() - if txID != 0 { - ctx = context.WithValue(ctx, storeContextKey, db.WithTxnID(txID)) - } else { - ctx = context.WithValue(ctx, storeContextKey, db) - } - ctx = context.WithValue(ctx, dbContextKey, db) - cmd.SetContext(ctx) - return nil + return loadConfig(cfg) }, } @@ -124,7 +110,5 @@ Start a DefraDB node, interact with a local or remote node, and much more. log.FeedbackFatalE(context.Background(), "Could not bind api.address", err) } - cmd.PersistentFlags().Uint64Var(&txID, "tx", 0, "Transaction ID") - return cmd } diff --git a/cli/start.go b/cli/start.go index a13ea46a6c..c3b869fbf8 100644 --- a/cli/start.go +++ b/cli/start.go @@ -48,6 +48,16 @@ func MakeStartCommand(cfg *config.Config) *cobra.Command { Use: "start", Short: "Start a DefraDB node", Long: "Start a DefraDB node.", + // Load the root config if it exists, otherwise create it. + PersistentPreRunE: func(cmd *cobra.Command, _ []string) error { + if err := loadConfig(cfg); err != nil { + return err + } + if !cfg.ConfigFileExists() { + return createConfig(cfg) + } + return nil + }, RunE: func(cmd *cobra.Command, args []string) error { di, err := start(cmd.Context(), cfg) if err != nil { diff --git a/cli/utils.go b/cli/utils.go index cd5b9c6990..48f265cb69 100644 --- a/cli/utils.go +++ b/cli/utils.go @@ -13,19 +13,27 @@ package cli import ( "encoding/json" + "github.com/sourcenetwork/defradb/config" "github.com/spf13/cobra" ) -// func newHttpClient(cfg *config.Config) (client.Store, error) { -// db, err := http.NewClient(cfg.API.Address) -// if err != nil { -// return nil, err -// } -// if txId != 0 { -// return db.WithTxnID(txId), nil -// } -// return db, nil -// } +// loadConfig loads the rootDir containing the configuration file, +// otherwise warn about it and load a default configuration. +func loadConfig(cfg *config.Config) error { + if err := cfg.LoadRootDirFromFlagOrDefault(); err != nil { + return err + } + return cfg.LoadWithRootdir(cfg.ConfigFileExists()) +} + +// createConfig creates the config directories and writes +// the current config to a file. +func createConfig(cfg *config.Config) error { + if config.FolderExists(cfg.Rootdir) { + return cfg.WriteConfigFile() + } + return cfg.CreateRootDirAndConfigFile() +} func writeJSON(cmd *cobra.Command, out any) error { enc := json.NewEncoder(cmd.OutOrStdout()) diff --git a/cli/wrapper_cli.go b/cli/wrapper_cli.go index 00c9cd3dd9..7f204a48e3 100644 --- a/cli/wrapper_cli.go +++ b/cli/wrapper_cli.go @@ -14,6 +14,7 @@ import ( "context" "fmt" "io" + "strings" "github.com/sourcenetwork/defradb/config" "github.com/sourcenetwork/defradb/datastore" @@ -26,7 +27,7 @@ type cliWrapper struct { func newCliWrapper(address string) *cliWrapper { return &cliWrapper{ - address: address, + address: strings.TrimPrefix(address, "http://"), } } @@ -63,11 +64,9 @@ func (w *cliWrapper) executeStream(ctx context.Context, args []string) (io.ReadC if w.txValue != "" { args = append(args, "--tx", w.txValue) } + args = append(args, "--url", w.address) - cfg := config.DefaultConfig() - cfg.API.Address = w.address - - cmd := NewDefraCommand(cfg) + cmd := NewDefraCommand(config.DefaultConfig()) cmd.SetOut(stdOutWrite) cmd.SetErr(stdErrWrite) cmd.SetArgs(args) From 33ccbf946e6d5fcbc4d02d6311e653b3d773c694 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 30 Aug 2023 17:21:55 -0700 Subject: [PATCH 046/107] add document commands to cli --- Makefile | 9 +- cli/backup_export.go | 5 +- cli/backup_import.go | 8 +- cli/cli.go | 45 ++-- cli/client.go | 17 +- cli/collection.go | 66 ++++++ cli/document.go | 25 +++ cli/document_create.go | 70 ++++++ cli/document_delete.go | 81 +++++++ cli/document_get.go | 56 +++++ cli/document_keys.go | 55 +++++ cli/document_save.go | 53 +++++ cli/document_update.go | 82 +++++++ cli/dump.go | 10 +- cli/index_create.go | 7 +- cli/index_drop.go | 7 +- cli/index_list.go | 15 +- cli/p2p_collection_add.go | 3 +- cli/p2p_collection_getall.go | 3 +- cli/p2p_collection_remove.go | 3 +- cli/p2p_replicator_delete.go | 3 +- cli/p2p_replicator_getall.go | 3 +- cli/p2p_replicator_set.go | 3 +- cli/request.go | 3 +- cli/root.go | 7 - cli/schema_add.go | 3 +- cli/schema_migration_get.go | 3 +- cli/schema_migration_set.go | 3 +- cli/schema_patch.go | 3 +- cli/utils.go | 46 +++- cli/wrapper.go | 70 ++++-- cli/wrapper_collection.go | 415 +++++++++++++++++++++++++++++++++++ cli/wrapper_lens.go | 25 ++- cli/wrapper_tx.go | 28 +-- go.mod | 2 +- http/client.go | 5 - logging/registry.go | 3 + 37 files changed, 1105 insertions(+), 140 deletions(-) create mode 100644 cli/collection.go create mode 100644 cli/document.go create mode 100644 cli/document_create.go create mode 100644 cli/document_delete.go create mode 100644 cli/document_get.go create mode 100644 cli/document_keys.go create mode 100644 cli/document_save.go create mode 100644 cli/document_update.go create mode 100644 cli/wrapper_collection.go diff --git a/Makefile b/Makefile index 786107ec45..db931572e3 100644 --- a/Makefile +++ b/Makefile @@ -33,8 +33,7 @@ TEST_FLAGS=-race -shuffle=on -timeout 300s PLAYGROUND_DIRECTORY=playground LENS_TEST_DIRECTORY=tests/integration/schema/migrations -CLI_TEST_DIRECTORY=tests/integration/cli -DEFAULT_TEST_DIRECTORIES=$$(go list ./... | grep -v -e $(LENS_TEST_DIRECTORY) -e $(CLI_TEST_DIRECTORY)) +DEFAULT_TEST_DIRECTORIES=$$(go list ./... | grep -v -e $(LENS_TEST_DIRECTORY)) default: @go run $(BUILD_FLAGS) cmd/defradb/main.go @@ -201,7 +200,6 @@ test\:names: test\:all: @$(MAKE) test:names @$(MAKE) test:lens - @$(MAKE) test:cli .PHONY: test\:verbose test\:verbose: @@ -232,11 +230,6 @@ test\:lens: @$(MAKE) deps:lens gotestsum --format testname -- ./$(LENS_TEST_DIRECTORY)/... $(TEST_FLAGS) -.PHONY: test\:cli -test\:cli: - @$(MAKE) deps:lens - gotestsum --format testname -- ./$(CLI_TEST_DIRECTORY)/... $(TEST_FLAGS) - # Using go-acc to ensure integration tests are included. # Usage: `make test:coverage` or `make test:coverage path="{pathToPackage}"` # Example: `make test:coverage path="./api/..."` diff --git a/cli/backup_export.go b/cli/backup_export.go index c2e9416d3a..f1ca1dc953 100644 --- a/cli/backup_export.go +++ b/cli/backup_export.go @@ -16,12 +16,11 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/config" ) const jsonFileType = "json" -func MakeBackupExportCommand(cfg *config.Config) *cobra.Command { +func MakeBackupExportCommand() *cobra.Command { var collections []string var pretty bool var format string @@ -38,7 +37,7 @@ If the --pretty flag is provided, the JSON will be pretty printed. Example: export data for the 'Users' collection: defradb client export --collection Users user_data.json`, Args: cobra.ExactArgs(1), - RunE: func(cmd *cobra.Command, args []string) (err error) { + RunE: func(cmd *cobra.Command, args []string) error { store := cmd.Context().Value(storeContextKey).(client.Store) if !isValidExportFormat(format) { diff --git a/cli/backup_import.go b/cli/backup_import.go index 62c1e5d1fd..770fccc52e 100644 --- a/cli/backup_import.go +++ b/cli/backup_import.go @@ -14,10 +14,9 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/config" ) -func MakeBackupImportCommand(cfg *config.Config) *cobra.Command { +func MakeBackupImportCommand() *cobra.Command { var cmd = &cobra.Command{ Use: "import ", Short: "Import a JSON data file to the database", @@ -26,11 +25,8 @@ func MakeBackupImportCommand(cfg *config.Config) *cobra.Command { Example: import data to the database: defradb client import user_data.json`, Args: cobra.ExactArgs(1), - RunE: func(cmd *cobra.Command, args []string) (err error) { + RunE: func(cmd *cobra.Command, args []string) error { store := cmd.Context().Value(storeContextKey).(client.Store) - if err != nil { - return err - } return store.BasicImport(cmd.Context(), args[0]) }, } diff --git a/cli/cli.go b/cli/cli.go index dbef18a4b4..71a08a2b92 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -26,16 +26,16 @@ var log = logging.MustNewLogger("cli") func NewDefraCommand(cfg *config.Config) *cobra.Command { p2p_collection := MakeP2PCollectionCommand() p2p_collection.AddCommand( - MakeP2PCollectionAddCommand(cfg), - MakeP2PCollectionRemoveCommand(cfg), - MakeP2PCollectionGetallCommand(cfg), + MakeP2PCollectionAddCommand(), + MakeP2PCollectionRemoveCommand(), + MakeP2PCollectionGetallCommand(), ) p2p_replicator := MakeP2PReplicatorCommand() p2p_replicator.AddCommand( - MakeP2PReplicatorGetallCommand(cfg), - MakeP2PReplicatorSetCommand(cfg), - MakeP2PReplicatorDeleteCommand(cfg), + MakeP2PReplicatorGetallCommand(), + MakeP2PReplicatorSetCommand(), + MakeP2PReplicatorDeleteCommand(), ) p2p := MakeP2PCommand() @@ -46,28 +46,28 @@ func NewDefraCommand(cfg *config.Config) *cobra.Command { schema_migrate := MakeSchemaMigrationCommand() schema_migrate.AddCommand( - MakeSchemaMigrationSetCommand(cfg), - MakeSchemaMigrationGetCommand(cfg), + MakeSchemaMigrationSetCommand(), + MakeSchemaMigrationGetCommand(), ) schema := MakeSchemaCommand() schema.AddCommand( - MakeSchemaAddCommand(cfg), - MakeSchemaPatchCommand(cfg), + MakeSchemaAddCommand(), + MakeSchemaPatchCommand(), schema_migrate, ) index := MakeIndexCommand() index.AddCommand( - MakeIndexCreateCommand(cfg), - MakeIndexDropCommand(cfg), - MakeIndexListCommand(cfg), + MakeIndexCreateCommand(), + MakeIndexDropCommand(), + MakeIndexListCommand(), ) backup := MakeBackupCommand() backup.AddCommand( - MakeBackupExportCommand(cfg), - MakeBackupImportCommand(cfg), + MakeBackupExportCommand(), + MakeBackupImportCommand(), ) tx := MakeTxCommand() @@ -77,15 +77,26 @@ func NewDefraCommand(cfg *config.Config) *cobra.Command { MakeTxDiscardCommand(cfg), ) + document := MakeDocumentCommand() + document.AddCommand( + MakeDocumentGetCommand(), + MakeDocumentKeysCommand(), + MakeDocumentDeleteCommand(), + MakeDocumentUpdateCommand(), + MakeDocumentSaveCommand(), + ) + client := MakeClientCommand(cfg) client.AddCommand( - MakeDumpCommand(cfg), - MakeRequestCommand(cfg), + MakeDumpCommand(), + MakeRequestCommand(), + MakeCollectionCommand(), schema, index, p2p, backup, tx, + document, ) root := MakeRootCommand(cfg) diff --git a/cli/client.go b/cli/client.go index de8b4a66bb..8866294f69 100644 --- a/cli/client.go +++ b/cli/client.go @@ -11,11 +11,9 @@ package cli import ( - "context" + "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/http" - "github.com/spf13/cobra" ) func MakeClientCommand(cfg *config.Config) *cobra.Command { @@ -29,19 +27,10 @@ Execute queries, add schema types, obtain node info, etc.`, if err := loadConfig(cfg); err != nil { return err } - db, err := http.NewClient(cfg.API.Address) - if err != nil { + if err := setTransactionContext(cmd, cfg, txID); err != nil { return err } - ctx := cmd.Context() - if txID != 0 { - ctx = context.WithValue(ctx, storeContextKey, db.WithTxnID(txID)) - } else { - ctx = context.WithValue(ctx, storeContextKey, db) - } - ctx = context.WithValue(ctx, dbContextKey, db) - cmd.SetContext(ctx) - return nil + return setStoreContext(cmd, cfg) }, } cmd.PersistentFlags().Uint64Var(&txID, "tx", 0, "Transaction ID") diff --git a/cli/collection.go b/cli/collection.go new file mode 100644 index 0000000000..ddc7a3cfa2 --- /dev/null +++ b/cli/collection.go @@ -0,0 +1,66 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/client" +) + +func MakeCollectionCommand() *cobra.Command { + var name string + var schemaID string + var versionID string + var cmd = &cobra.Command{ + Use: "collection", + Short: "View detailed collection info.", + Long: `View detailed collection info.`, + RunE: func(cmd *cobra.Command, args []string) error { + store := cmd.Context().Value(storeContextKey).(client.Store) + + switch { + case name != "": + col, err := store.GetCollectionByName(cmd.Context(), name) + if err != nil { + return err + } + return writeJSON(cmd, col.Description()) + case schemaID != "": + col, err := store.GetCollectionBySchemaID(cmd.Context(), schemaID) + if err != nil { + return err + } + return writeJSON(cmd, col.Description()) + case versionID != "": + col, err := store.GetCollectionByVersionID(cmd.Context(), versionID) + if err != nil { + return err + } + return writeJSON(cmd, col.Description()) + default: + cols, err := store.GetAllCollections(cmd.Context()) + if err != nil { + return err + } + colDesc := make([]client.CollectionDescription, len(cols)) + for i, col := range cols { + colDesc[i] = col.Description() + } + return writeJSON(cmd, colDesc) + } + }, + } + cmd.Flags().StringVar(&name, "name", "", "Get collection by name") + cmd.Flags().StringVar(&schemaID, "schema", "", "Get collection by schema ID") + cmd.Flags().StringVar(&versionID, "version", "", "Get collection by version ID") + return cmd +} diff --git a/cli/document.go b/cli/document.go new file mode 100644 index 0000000000..ca0e966ccd --- /dev/null +++ b/cli/document.go @@ -0,0 +1,25 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "github.com/spf13/cobra" +) + +func MakeDocumentCommand() *cobra.Command { + var cmd = &cobra.Command{ + Use: "document", + Short: "Create, read, update, and delete documents.", + Long: `Create, read, update, and delete documents.`, + } + + return cmd +} diff --git a/cli/document_create.go b/cli/document_create.go new file mode 100644 index 0000000000..e806e7492e --- /dev/null +++ b/cli/document_create.go @@ -0,0 +1,70 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "encoding/json" + "fmt" + + "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" +) + +func MakeDocumentCreateCommand() *cobra.Command { + var collection string + var cmd = &cobra.Command{ + Use: "create --collection ", + Short: "Create a new docment.", + Long: `Create a new docment.`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + store := cmd.Context().Value(storeContextKey).(client.Store) + + col, err := store.GetCollectionByName(cmd.Context(), collection) + if err != nil { + return err + } + if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { + col = col.WithTxn(tx) + } + + var docMap any + if err := json.Unmarshal([]byte(args[0]), &docMap); err != nil { + return err + } + + switch t := docMap.(type) { + case map[string]any: + doc, err := client.NewDocFromMap(t) + if err != nil { + return err + } + return col.Create(cmd.Context(), doc) + case []map[string]any: + docs := make([]*client.Document, len(t)) + for i, v := range t { + doc, err := client.NewDocFromMap(v) + if err != nil { + return err + } + docs[i] = doc + } + return col.CreateMany(cmd.Context(), docs) + default: + return fmt.Errorf("invalid document") + } + }, + } + cmd.Flags().StringVarP(&collection, "collection", "c", "", "Collection name") + return cmd +} diff --git a/cli/document_delete.go b/cli/document_delete.go new file mode 100644 index 0000000000..dbed21beee --- /dev/null +++ b/cli/document_delete.go @@ -0,0 +1,81 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "fmt" + + "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" +) + +func MakeDocumentDeleteCommand() *cobra.Command { + var collection string + var keys []string + var filter string + var cmd = &cobra.Command{ + Use: "delete --collection [--filter --key ]", + Short: "Delete documents by key or filter.", + Long: `Delete documents by key or filter`, + RunE: func(cmd *cobra.Command, args []string) error { + store := cmd.Context().Value(storeContextKey).(client.Store) + + col, err := store.GetCollectionByName(cmd.Context(), collection) + if err != nil { + return err + } + if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { + col = col.WithTxn(tx) + } + + switch { + case len(keys) == 1: + docKey, err := client.NewDocKeyFromString(keys[0]) + if err != nil { + return err + } + res, err := col.DeleteWithKey(cmd.Context(), docKey) + if err != nil { + return err + } + return writeJSON(cmd, res) + case len(keys) > 1: + docKeys := make([]client.DocKey, len(keys)) + for i, v := range keys { + docKey, err := client.NewDocKeyFromString(v) + if err != nil { + return err + } + docKeys[i] = docKey + } + res, err := col.DeleteWithKeys(cmd.Context(), docKeys) + if err != nil { + return err + } + return writeJSON(cmd, res) + case filter != "": + res, err := col.DeleteWithFilter(cmd.Context(), filter) + if err != nil { + return err + } + return writeJSON(cmd, res) + default: + return fmt.Errorf("document key or filter must be defined") + } + }, + } + cmd.Flags().StringVarP(&collection, "collection", "c", "", "Collection name") + cmd.Flags().StringSliceVar(&keys, "key", nil, "Document key") + cmd.Flags().StringVar(&filter, "filter", "", "Document filter") + return cmd +} diff --git a/cli/document_get.go b/cli/document_get.go new file mode 100644 index 0000000000..77f81cf33d --- /dev/null +++ b/cli/document_get.go @@ -0,0 +1,56 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" +) + +func MakeDocumentGetCommand() *cobra.Command { + var showDeleted bool + var collection string + var cmd = &cobra.Command{ + Use: "get --collection [--show-deleted]", + Short: "View detailed document info.", + Long: `View detailed document info.`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + store := cmd.Context().Value(storeContextKey).(client.Store) + + col, err := store.GetCollectionByName(cmd.Context(), collection) + if err != nil { + return err + } + if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { + col = col.WithTxn(tx) + } + docKey, err := client.NewDocKeyFromString(args[0]) + if err != nil { + return err + } + doc, err := col.Get(cmd.Context(), docKey, showDeleted) + if err != nil { + return err + } + docMap, err := doc.ToMap() + if err != nil { + return err + } + return writeJSON(cmd, docMap) + }, + } + cmd.Flags().BoolVar(&showDeleted, "show-deleted", false, "Show deleted documents") + cmd.Flags().StringVarP(&collection, "collection", "c", "", "Collection name") + return cmd +} diff --git a/cli/document_keys.go b/cli/document_keys.go new file mode 100644 index 0000000000..f61954ed31 --- /dev/null +++ b/cli/document_keys.go @@ -0,0 +1,55 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/http" +) + +func MakeDocumentKeysCommand() *cobra.Command { + var collection string + var cmd = &cobra.Command{ + Use: "keys --collection ", + Short: "List all collection document keys.", + Long: `List all collection document keys`, + RunE: func(cmd *cobra.Command, args []string) error { + store := cmd.Context().Value(storeContextKey).(client.Store) + + col, err := store.GetCollectionByName(cmd.Context(), collection) + if err != nil { + return err + } + if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { + col = col.WithTxn(tx) + } + docCh, err := col.GetAllDocKeys(cmd.Context()) + if err != nil { + return err + } + for docKey := range docCh { + results := &http.DocKeyResult{ + Key: docKey.Key.String(), + } + if docKey.Err != nil { + results.Error = docKey.Err.Error() + } + writeJSON(cmd, results) //nolint:errcheck + } + return nil + }, + } + cmd.Flags().StringVarP(&collection, "collection", "c", "", "Collection name") + return cmd +} diff --git a/cli/document_save.go b/cli/document_save.go new file mode 100644 index 0000000000..472bfdaf11 --- /dev/null +++ b/cli/document_save.go @@ -0,0 +1,53 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "encoding/json" + + "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" +) + +func MakeDocumentSaveCommand() *cobra.Command { + var collection string + var cmd = &cobra.Command{ + Use: "save --collection ", + Short: "Create or update a docment.", + Long: `Create or update a docment.`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + store := cmd.Context().Value(storeContextKey).(client.Store) + + col, err := store.GetCollectionByName(cmd.Context(), collection) + if err != nil { + return err + } + if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { + col = col.WithTxn(tx) + } + + var docMap map[string]any + if err := json.Unmarshal([]byte(args[0]), &docMap); err != nil { + return err + } + doc, err := client.NewDocFromMap(docMap) + if err != nil { + return err + } + return col.Save(cmd.Context(), doc) + }, + } + cmd.Flags().StringVarP(&collection, "collection", "c", "", "Collection name") + return cmd +} diff --git a/cli/document_update.go b/cli/document_update.go new file mode 100644 index 0000000000..fec39baa1a --- /dev/null +++ b/cli/document_update.go @@ -0,0 +1,82 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "fmt" + + "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" +) + +func MakeDocumentUpdateCommand() *cobra.Command { + var collection string + var keys []string + var filter string + var cmd = &cobra.Command{ + Use: "update --collection [--filter --key ] ", + Short: "Update documents by key or filter.", + Long: `Update documents by key or filter`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + store := cmd.Context().Value(storeContextKey).(client.Store) + + col, err := store.GetCollectionByName(cmd.Context(), collection) + if err != nil { + return err + } + if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { + col = col.WithTxn(tx) + } + + switch { + case len(keys) == 1: + docKey, err := client.NewDocKeyFromString(keys[0]) + if err != nil { + return err + } + res, err := col.UpdateWithKey(cmd.Context(), docKey, args[0]) + if err != nil { + return err + } + return writeJSON(cmd, res) + case len(keys) > 1: + docKeys := make([]client.DocKey, len(keys)) + for i, v := range keys { + docKey, err := client.NewDocKeyFromString(v) + if err != nil { + return err + } + docKeys[i] = docKey + } + res, err := col.UpdateWithKeys(cmd.Context(), docKeys, args[0]) + if err != nil { + return err + } + return writeJSON(cmd, res) + case filter != "": + res, err := col.UpdateWithFilter(cmd.Context(), filter, args[0]) + if err != nil { + return err + } + return writeJSON(cmd, res) + default: + return fmt.Errorf("document key or filter must be defined") + } + }, + } + cmd.Flags().StringVarP(&collection, "collection", "c", "", "Collection name") + cmd.Flags().StringSliceVar(&keys, "key", nil, "Document key") + cmd.Flags().StringVar(&filter, "filter", "", "Document filter") + return cmd +} diff --git a/cli/dump.go b/cli/dump.go index ac612b4284..a3d155605b 100644 --- a/cli/dump.go +++ b/cli/dump.go @@ -13,19 +13,15 @@ package cli import ( "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/config" - "github.com/sourcenetwork/defradb/http" + "github.com/sourcenetwork/defradb/client" ) -func MakeDumpCommand(cfg *config.Config) *cobra.Command { +func MakeDumpCommand() *cobra.Command { var cmd = &cobra.Command{ Use: "dump", Short: "Dump the contents of DefraDB node-side", RunE: func(cmd *cobra.Command, _ []string) (err error) { - db, err := http.NewClient(cfg.API.Address) - if err != nil { - return err - } + db := cmd.Context().Value(dbContextKey).(client.DB) return db.PrintDump(cmd.Context()) }, } diff --git a/cli/index_create.go b/cli/index_create.go index 596c0d643a..13ea19bab1 100644 --- a/cli/index_create.go +++ b/cli/index_create.go @@ -14,10 +14,10 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/datastore" ) -func MakeIndexCreateCommand(cfg *config.Config) *cobra.Command { +func MakeIndexCreateCommand() *cobra.Command { var collectionArg string var nameArg string var fieldsArg []string @@ -49,6 +49,9 @@ Example: create a named index for 'Users' collection on 'name' field: if err != nil { return err } + if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { + col = col.WithTxn(tx) + } desc, err = col.CreateIndex(cmd.Context(), desc) if err != nil { return err diff --git a/cli/index_drop.go b/cli/index_drop.go index 5601ae05f2..9af7cfdfc7 100644 --- a/cli/index_drop.go +++ b/cli/index_drop.go @@ -14,10 +14,10 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/datastore" ) -func MakeIndexDropCommand(cfg *config.Config) *cobra.Command { +func MakeIndexDropCommand() *cobra.Command { var collectionArg string var nameArg string var cmd = &cobra.Command{ @@ -35,6 +35,9 @@ Example: drop the index 'UsersByName' for 'Users' collection: if err != nil { return err } + if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { + col = col.WithTxn(tx) + } return col.DropIndex(cmd.Context(), nameArg) }, } diff --git a/cli/index_list.go b/cli/index_list.go index 3fb21c55e1..bf342d2d7f 100644 --- a/cli/index_list.go +++ b/cli/index_list.go @@ -14,10 +14,10 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/datastore" ) -func MakeIndexListCommand(cfg *config.Config) *cobra.Command { +func MakeIndexListCommand() *cobra.Command { var collectionArg string var cmd = &cobra.Command{ Use: "list [-c --collection ]", @@ -39,17 +39,20 @@ Example: show all index for 'Users' collection: if err != nil { return err } - cols, err := col.GetIndexes(cmd.Context()) + if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { + col = col.WithTxn(tx) + } + indexes, err := col.GetIndexes(cmd.Context()) if err != nil { return err } - return writeJSON(cmd, cols) + return writeJSON(cmd, indexes) default: - cols, err := store.GetAllIndexes(cmd.Context()) + indexes, err := store.GetAllIndexes(cmd.Context()) if err != nil { return err } - return writeJSON(cmd, cols) + return writeJSON(cmd, indexes) } }, } diff --git a/cli/p2p_collection_add.go b/cli/p2p_collection_add.go index 0e6dc202d0..c5417d80c5 100644 --- a/cli/p2p_collection_add.go +++ b/cli/p2p_collection_add.go @@ -14,10 +14,9 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/config" ) -func MakeP2PCollectionAddCommand(cfg *config.Config) *cobra.Command { +func MakeP2PCollectionAddCommand() *cobra.Command { var cmd = &cobra.Command{ Use: "add [collectionID]", Short: "Add P2P collections", diff --git a/cli/p2p_collection_getall.go b/cli/p2p_collection_getall.go index a091e8d43f..85d6e32da0 100644 --- a/cli/p2p_collection_getall.go +++ b/cli/p2p_collection_getall.go @@ -14,10 +14,9 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/config" ) -func MakeP2PCollectionGetallCommand(cfg *config.Config) *cobra.Command { +func MakeP2PCollectionGetallCommand() *cobra.Command { var cmd = &cobra.Command{ Use: "getall", Short: "Get all P2P collections", diff --git a/cli/p2p_collection_remove.go b/cli/p2p_collection_remove.go index 762fd34cf8..9aae42b1b1 100644 --- a/cli/p2p_collection_remove.go +++ b/cli/p2p_collection_remove.go @@ -14,10 +14,9 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/config" ) -func MakeP2PCollectionRemoveCommand(cfg *config.Config) *cobra.Command { +func MakeP2PCollectionRemoveCommand() *cobra.Command { var cmd = &cobra.Command{ Use: "remove [collectionID]", Short: "Remove P2P collections", diff --git a/cli/p2p_replicator_delete.go b/cli/p2p_replicator_delete.go index 6958c2a650..6bf6425a51 100644 --- a/cli/p2p_replicator_delete.go +++ b/cli/p2p_replicator_delete.go @@ -15,10 +15,9 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/config" ) -func MakeP2PReplicatorDeleteCommand(cfg *config.Config) *cobra.Command { +func MakeP2PReplicatorDeleteCommand() *cobra.Command { var cmd = &cobra.Command{ Use: "delete ", Short: "Delete a replicator. It will stop synchronizing", diff --git a/cli/p2p_replicator_getall.go b/cli/p2p_replicator_getall.go index 2a363bcbdd..9041996902 100644 --- a/cli/p2p_replicator_getall.go +++ b/cli/p2p_replicator_getall.go @@ -14,10 +14,9 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/config" ) -func MakeP2PReplicatorGetallCommand(cfg *config.Config) *cobra.Command { +func MakeP2PReplicatorGetallCommand() *cobra.Command { var cmd = &cobra.Command{ Use: "getall", Short: "Get all replicators", diff --git a/cli/p2p_replicator_set.go b/cli/p2p_replicator_set.go index f9472dac25..d839d4bed6 100644 --- a/cli/p2p_replicator_set.go +++ b/cli/p2p_replicator_set.go @@ -15,10 +15,9 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/config" ) -func MakeP2PReplicatorSetCommand(cfg *config.Config) *cobra.Command { +func MakeP2PReplicatorSetCommand() *cobra.Command { var collections []string var cmd = &cobra.Command{ Use: "set [-c, --collection] ", diff --git a/cli/request.go b/cli/request.go index 0a56ae75f8..77e6b9a5b6 100644 --- a/cli/request.go +++ b/cli/request.go @@ -17,7 +17,6 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/config" "github.com/sourcenetwork/defradb/errors" ) @@ -26,7 +25,7 @@ const ( SUB_RESULTS_HEADER = "------ Subscription Results ------\n" ) -func MakeRequestCommand(cfg *config.Config) *cobra.Command { +func MakeRequestCommand() *cobra.Command { var filePath string var cmd = &cobra.Command{ Use: "query [query request]", diff --git a/cli/root.go b/cli/root.go index b844fc6e84..40290279a8 100644 --- a/cli/root.go +++ b/cli/root.go @@ -18,13 +18,6 @@ import ( "github.com/sourcenetwork/defradb/config" ) -type contextKey string - -var ( - dbContextKey = contextKey("db") - storeContextKey = contextKey("store") -) - func MakeRootCommand(cfg *config.Config) *cobra.Command { var cmd = &cobra.Command{ Use: "defradb", diff --git a/cli/schema_add.go b/cli/schema_add.go index 6fe597d77d..5bc2a83e2f 100644 --- a/cli/schema_add.go +++ b/cli/schema_add.go @@ -18,10 +18,9 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/config" ) -func MakeSchemaAddCommand(cfg *config.Config) *cobra.Command { +func MakeSchemaAddCommand() *cobra.Command { var schemaFile string var cmd = &cobra.Command{ Use: "add [schema]", diff --git a/cli/schema_migration_get.go b/cli/schema_migration_get.go index ec54773d63..5474ea09d8 100644 --- a/cli/schema_migration_get.go +++ b/cli/schema_migration_get.go @@ -14,10 +14,9 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/config" ) -func MakeSchemaMigrationGetCommand(cfg *config.Config) *cobra.Command { +func MakeSchemaMigrationGetCommand() *cobra.Command { var cmd = &cobra.Command{ Use: "get", Short: "Gets the schema migrations within DefraDB", diff --git a/cli/schema_migration_set.go b/cli/schema_migration_set.go index 1a38e134c6..7a89d1681b 100644 --- a/cli/schema_migration_set.go +++ b/cli/schema_migration_set.go @@ -21,11 +21,10 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/config" "github.com/sourcenetwork/defradb/errors" ) -func MakeSchemaMigrationSetCommand(cfg *config.Config) *cobra.Command { +func MakeSchemaMigrationSetCommand() *cobra.Command { var lensFile string var cmd = &cobra.Command{ Use: "set [src] [dst] [cfg]", diff --git a/cli/schema_patch.go b/cli/schema_patch.go index 9984c9b2b4..ca5e0bf3ac 100644 --- a/cli/schema_patch.go +++ b/cli/schema_patch.go @@ -18,10 +18,9 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/config" ) -func MakeSchemaPatchCommand(cfg *config.Config) *cobra.Command { +func MakeSchemaPatchCommand() *cobra.Command { var patchFile string var cmd = &cobra.Command{ diff --git a/cli/utils.go b/cli/utils.go index 48f265cb69..6c8cebb34b 100644 --- a/cli/utils.go +++ b/cli/utils.go @@ -11,12 +11,56 @@ package cli import ( + "context" "encoding/json" - "github.com/sourcenetwork/defradb/config" "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/http" +) + +type contextKey string + +var ( + txContextKey = contextKey("tx") + dbContextKey = contextKey("db") + storeContextKey = contextKey("store") ) +// setTransactionContext sets the transaction for the current command context. +func setTransactionContext(cmd *cobra.Command, cfg *config.Config, txId uint64) error { + if txId == 0 { + return nil + } + tx, err := http.NewTransaction(cfg.API.Address, txId) + if err != nil { + return err + } + ctx := cmd.Context() + ctx = context.WithValue(ctx, txContextKey, tx) + cmd.SetContext(ctx) + return nil +} + +// setStoreContext sets the store for the current command context. +func setStoreContext(cmd *cobra.Command, cfg *config.Config) error { + db, err := http.NewClient(cfg.API.Address) + if err != nil { + return err + } + ctx := cmd.Context() + ctx = context.WithValue(ctx, dbContextKey, db) + if tx, ok := ctx.Value(txContextKey).(datastore.Txn); ok { + ctx = context.WithValue(ctx, storeContextKey, db.WithTxn(tx)) + } else { + ctx = context.WithValue(ctx, storeContextKey, db) + } + cmd.SetContext(ctx) + return nil +} + // loadConfig loads the rootDir containing the configuration file, // otherwise warn about it and load a default configuration. func loadConfig(cfg *config.Config) error { diff --git a/cli/wrapper.go b/cli/wrapper.go index 7131247f89..7fa95f72e7 100644 --- a/cli/wrapper.go +++ b/cli/wrapper.go @@ -179,26 +179,70 @@ func (w *Wrapper) SetMigration(ctx context.Context, config client.LensConfig) er } func (w *Wrapper) LensRegistry() client.LensRegistry { - return &lensWrapper{ - lens: w.store.LensRegistry(), - cmd: w.cmd, - } + return &LensRegistry{w.cmd, w.store.LensRegistry()} } func (w *Wrapper) GetCollectionByName(ctx context.Context, name client.CollectionName) (client.Collection, error) { - return w.store.GetCollectionByName(ctx, name) + args := []string{"client", "collection"} + args = append(args, "--name", name) + + data, err := w.cmd.execute(ctx, args) + if err != nil { + return nil, err + } + var colDesc client.CollectionDescription + if err := json.Unmarshal(data, &colDesc); err != nil { + return nil, err + } + return &Collection{w.cmd, colDesc}, nil } func (w *Wrapper) GetCollectionBySchemaID(ctx context.Context, schemaId string) (client.Collection, error) { - return w.store.GetCollectionBySchemaID(ctx, schemaId) + args := []string{"client", "collection"} + args = append(args, "--schema", schemaId) + + data, err := w.cmd.execute(ctx, args) + if err != nil { + return nil, err + } + var colDesc client.CollectionDescription + if err := json.Unmarshal(data, &colDesc); err != nil { + return nil, err + } + return &Collection{w.cmd, colDesc}, nil } func (w *Wrapper) GetCollectionByVersionID(ctx context.Context, versionId string) (client.Collection, error) { - return w.store.GetCollectionByVersionID(ctx, versionId) + args := []string{"client", "collection"} + args = append(args, "--versionId", versionId) + + data, err := w.cmd.execute(ctx, args) + if err != nil { + return nil, err + } + var colDesc client.CollectionDescription + if err := json.Unmarshal(data, &colDesc); err != nil { + return nil, err + } + return &Collection{w.cmd, colDesc}, nil } func (w *Wrapper) GetAllCollections(ctx context.Context) ([]client.Collection, error) { - return w.store.GetAllCollections(ctx) + args := []string{"client", "collection"} + + data, err := w.cmd.execute(ctx, args) + if err != nil { + return nil, err + } + var colDesc []client.CollectionDescription + if err := json.Unmarshal(data, &colDesc); err != nil { + return nil, err + } + cols := make([]client.Collection, len(colDesc)) + for i, v := range colDesc { + cols[i] = &Collection{w.cmd, v} + } + return cols, err } func (w *Wrapper) GetAllIndexes(ctx context.Context) (map[client.CollectionName][]client.IndexDescription, error) { @@ -208,11 +252,11 @@ func (w *Wrapper) GetAllIndexes(ctx context.Context) (map[client.CollectionName] if err != nil { return nil, err } - var index map[client.CollectionName][]client.IndexDescription - if err := json.Unmarshal(data, &index); err != nil { + var indexes map[client.CollectionName][]client.IndexDescription + if err := json.Unmarshal(data, &indexes); err != nil { return nil, err } - return index, nil + return indexes, nil } func (w *Wrapper) ExecRequest(ctx context.Context, query string) *client.RequestResult { @@ -310,7 +354,7 @@ func (w *Wrapper) NewTxn(ctx context.Context, readOnly bool) (datastore.Txn, err if err != nil { return nil, err } - return &TxWrapper{tx, w.cmd}, nil + return &Transaction{tx, w.cmd}, nil } func (w *Wrapper) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { @@ -333,7 +377,7 @@ func (w *Wrapper) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastor if err != nil { return nil, err } - return &TxWrapper{tx, w.cmd}, nil + return &Transaction{tx, w.cmd}, nil } func (w *Wrapper) WithTxn(tx datastore.Txn) client.Store { diff --git a/cli/wrapper_collection.go b/cli/wrapper_collection.go new file mode 100644 index 0000000000..65ee035f13 --- /dev/null +++ b/cli/wrapper_collection.go @@ -0,0 +1,415 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "context" + "encoding/json" + "fmt" + "strings" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/http" +) + +var _ client.Collection = (*Collection)(nil) + +type Collection struct { + cmd *cliWrapper + desc client.CollectionDescription +} + +func (c *Collection) Description() client.CollectionDescription { + return c.desc +} + +func (c *Collection) Name() string { + return c.desc.Name +} + +func (c *Collection) Schema() client.SchemaDescription { + return c.desc.Schema +} + +func (c *Collection) ID() uint32 { + return c.desc.ID +} + +func (c *Collection) SchemaID() string { + return c.desc.Schema.SchemaID +} + +func (c *Collection) Create(ctx context.Context, doc *client.Document) error { + args := []string{"client", "document", "create"} + args = append(args, "--collection", c.desc.Name) + + docMap, err := doc.ToMap() + if err != nil { + return err + } + document, err := json.Marshal(docMap) + if err != nil { + return err + } + args = append(args, string(document)) + + _, err = c.cmd.execute(ctx, args) + if err != nil { + return err + } + doc.Clean() + return nil +} + +func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) error { + args := []string{"client", "document", "create"} + args = append(args, "--collection", c.desc.Name) + + docMapList := make([]map[string]any, len(docs)) + for i, doc := range docs { + docMap, err := doc.ToMap() + if err != nil { + return err + } + docMapList[i] = docMap + } + documents, err := json.Marshal(docMapList) + if err != nil { + return err + } + args = append(args, string(documents)) + + _, err = c.cmd.execute(ctx, args) + if err != nil { + return err + } + for _, doc := range docs { + doc.Clean() + } + return nil +} + +func (c *Collection) Update(ctx context.Context, doc *client.Document) error { + docMap, err := doc.ToMap() + if err != nil { + return err + } + for field, value := range doc.Values() { + if !value.IsDirty() { + delete(docMap, field.Name()) + } + } + updater, err := json.Marshal(docMap) + if err != nil { + return err + } + _, err = c.UpdateWithKey(ctx, doc.Key(), string(updater)) + if err != nil { + return err + } + doc.Clean() + return nil +} + +func (c *Collection) Save(ctx context.Context, doc *client.Document) error { + args := []string{"client", "document", "save"} + args = append(args, "--collection", c.desc.Name) + + docMap, err := doc.ToMap() + if err != nil { + return err + } + for field, value := range doc.Values() { + if !value.IsDirty() { + delete(docMap, field.Name()) + } + } + document, err := json.Marshal(docMap) + if err != nil { + return err + } + args = append(args, string(document)) + + _, err = c.cmd.execute(ctx, args) + if err != nil { + return err + } + doc.Clean() + return nil +} + +func (c *Collection) Delete(ctx context.Context, docKey client.DocKey) (bool, error) { + res, err := c.DeleteWithKey(ctx, docKey) + if err != nil { + return false, err + } + return res.Count == 1, nil +} + +func (c *Collection) Exists(ctx context.Context, docKey client.DocKey) (bool, error) { + _, err := c.Get(ctx, docKey, false) + if err != nil { + return false, err + } + return true, nil +} + +func (c *Collection) UpdateWith(ctx context.Context, target any, updater string) (*client.UpdateResult, error) { + switch t := target.(type) { + case string, map[string]any, *request.Filter: + return c.UpdateWithFilter(ctx, t, updater) + case client.DocKey: + return c.UpdateWithKey(ctx, t, updater) + case []client.DocKey: + return c.UpdateWithKeys(ctx, t, updater) + default: + return nil, client.ErrInvalidUpdateTarget + } +} + +func (c *Collection) updateWith( + ctx context.Context, + args []string, +) (*client.UpdateResult, error) { + data, err := c.cmd.execute(ctx, args) + if err != nil { + return nil, err + } + var res client.UpdateResult + if err := json.Unmarshal(data, &res); err != nil { + return nil, err + } + return &res, nil +} + +func (c *Collection) UpdateWithFilter( + ctx context.Context, + filter any, + updater string, +) (*client.UpdateResult, error) { + args := []string{"client", "document", "update"} + args = append(args, "--collection", c.desc.Name) + + filterJSON, err := json.Marshal(filter) + if err != nil { + return nil, err + } + args = append(args, "--filter", string(filterJSON)) + args = append(args, updater) + + return c.updateWith(ctx, args) +} + +func (c *Collection) UpdateWithKey( + ctx context.Context, + key client.DocKey, + updater string, +) (*client.UpdateResult, error) { + args := []string{"client", "document", "update"} + args = append(args, "--collection", c.desc.Name) + args = append(args, "--key", key.String()) + args = append(args, updater) + + return c.updateWith(ctx, args) +} + +func (c *Collection) UpdateWithKeys( + ctx context.Context, + docKeys []client.DocKey, + updater string, +) (*client.UpdateResult, error) { + args := []string{"client", "document", "update"} + args = append(args, "--collection", c.desc.Name) + + keys := make([]string, len(docKeys)) + for i, v := range docKeys { + keys[i] = v.String() + } + args = append(args, "--key", strings.Join(keys, ",")) + args = append(args, updater) + + return c.updateWith(ctx, args) +} + +func (c *Collection) DeleteWith(ctx context.Context, target any) (*client.DeleteResult, error) { + switch t := target.(type) { + case string, map[string]any, *request.Filter: + return c.DeleteWithFilter(ctx, t) + case client.DocKey: + return c.DeleteWithKey(ctx, t) + case []client.DocKey: + return c.DeleteWithKeys(ctx, t) + default: + return nil, client.ErrInvalidDeleteTarget + } +} + +func (c *Collection) deleteWith( + ctx context.Context, + args []string, +) (*client.DeleteResult, error) { + data, err := c.cmd.execute(ctx, args) + if err != nil { + return nil, err + } + var res client.DeleteResult + if err := json.Unmarshal(data, &res); err != nil { + return nil, err + } + return &res, nil +} + +func (c *Collection) DeleteWithFilter(ctx context.Context, filter any) (*client.DeleteResult, error) { + args := []string{"client", "document", "delete"} + args = append(args, "--collection", c.desc.Name) + + filterJSON, err := json.Marshal(filter) + if err != nil { + return nil, err + } + args = append(args, "--filter", string(filterJSON)) + + return c.deleteWith(ctx, args) +} + +func (c *Collection) DeleteWithKey(ctx context.Context, docKey client.DocKey) (*client.DeleteResult, error) { + args := []string{"client", "document", "delete"} + args = append(args, "--collection", c.desc.Name) + args = append(args, "--key", docKey.String()) + + return c.deleteWith(ctx, args) +} + +func (c *Collection) DeleteWithKeys(ctx context.Context, docKeys []client.DocKey) (*client.DeleteResult, error) { + args := []string{"client", "document", "delete"} + args = append(args, "--collection", c.desc.Name) + + keys := make([]string, len(docKeys)) + for i, v := range docKeys { + keys[i] = v.String() + } + args = append(args, "--key", strings.Join(keys, ",")) + + return c.deleteWith(ctx, args) +} + +func (c *Collection) Get(ctx context.Context, key client.DocKey, showDeleted bool) (*client.Document, error) { + args := []string{"client", "document", "get"} + args = append(args, "--collection", c.desc.Name) + args = append(args, key.String()) + + if showDeleted { + args = append(args, "--show-deleted") + } + + data, err := c.cmd.execute(ctx, args) + if err != nil { + return nil, err + } + var docMap map[string]any + if err := json.Unmarshal(data, &docMap); err != nil { + return nil, err + } + return client.NewDocFromMap(docMap) +} + +func (c *Collection) WithTxn(tx datastore.Txn) client.Collection { + return &Collection{ + cmd: c.cmd.withTxn(tx), + desc: c.desc, + } +} + +func (c *Collection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysResult, error) { + args := []string{"client", "document", "keys"} + args = append(args, "--collection", c.desc.Name) + + stdOut, _, err := c.cmd.executeStream(ctx, args) + if err != nil { + return nil, err + } + docKeyCh := make(chan client.DocKeysResult) + + go func() { + dec := json.NewDecoder(stdOut) + defer close(docKeyCh) + + for { + var res http.DocKeyResult + if err := dec.Decode(&res); err != nil { + return + } + key, err := client.NewDocKeyFromString(res.Key) + if err != nil { + return + } + docKey := client.DocKeysResult{ + Key: key, + } + if res.Error != "" { + docKey.Err = fmt.Errorf(res.Error) + } + docKeyCh <- docKey + } + }() + + return docKeyCh, nil +} + +func (c *Collection) CreateIndex( + ctx context.Context, + indexDesc client.IndexDescription, +) (index client.IndexDescription, err error) { + args := []string{"client", "index", "create"} + args = append(args, "--collection", c.desc.Name) + args = append(args, "--name", indexDesc.Name) + + fields := make([]string, len(indexDesc.Fields)) + for i := range indexDesc.Fields { + fields[i] = indexDesc.Fields[i].Name + } + args = append(args, "--fields", strings.Join(fields, ",")) + + data, err := c.cmd.execute(ctx, args) + if err != nil { + return index, err + } + if err := json.Unmarshal(data, &index); err != nil { + return index, err + } + return index, nil +} + +func (c *Collection) DropIndex(ctx context.Context, indexName string) error { + args := []string{"client", "index", "drop"} + args = append(args, "--collection", c.desc.Name) + args = append(args, "--name", indexName) + + _, err := c.cmd.execute(ctx, args) + return err +} + +func (c *Collection) GetIndexes(ctx context.Context) ([]client.IndexDescription, error) { + args := []string{"client", "index", "list"} + args = append(args, "--collection", c.desc.Name) + + data, err := c.cmd.execute(ctx, args) + if err != nil { + return nil, err + } + var indexes []client.IndexDescription + if err := json.Unmarshal(data, &indexes); err != nil { + return nil, err + } + return indexes, nil +} diff --git a/cli/wrapper_lens.go b/cli/wrapper_lens.go index c1e865aaa8..738ea32223 100644 --- a/cli/wrapper_lens.go +++ b/cli/wrapper_lens.go @@ -14,26 +14,27 @@ import ( "context" "encoding/json" + "github.com/sourcenetwork/immutable/enumerable" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" - "github.com/sourcenetwork/immutable/enumerable" ) -var _ client.LensRegistry = (*lensWrapper)(nil) +var _ client.LensRegistry = (*LensRegistry)(nil) -type lensWrapper struct { - lens client.LensRegistry +type LensRegistry struct { cmd *cliWrapper + lens client.LensRegistry } -func (w *lensWrapper) WithTxn(tx datastore.Txn) client.LensRegistry { - return &lensWrapper{ +func (w *LensRegistry) WithTxn(tx datastore.Txn) client.LensRegistry { + return &LensRegistry{ lens: w.lens.WithTxn(tx), cmd: w.cmd.withTxn(tx), } } -func (w *lensWrapper) SetMigration(ctx context.Context, config client.LensConfig) error { +func (w *LensRegistry) SetMigration(ctx context.Context, config client.LensConfig) error { args := []string{"client", "schema", "migration", "set"} args = append(args, config.SourceSchemaVersionID) args = append(args, config.DestinationSchemaVersionID) @@ -48,11 +49,11 @@ func (w *lensWrapper) SetMigration(ctx context.Context, config client.LensConfig return err } -func (w *lensWrapper) ReloadLenses(ctx context.Context) error { +func (w *LensRegistry) ReloadLenses(ctx context.Context) error { return w.lens.ReloadLenses(ctx) } -func (w *lensWrapper) MigrateUp( +func (w *LensRegistry) MigrateUp( ctx context.Context, src enumerable.Enumerable[map[string]any], schemaVersionID string, @@ -60,7 +61,7 @@ func (w *lensWrapper) MigrateUp( return w.lens.MigrateUp(ctx, src, schemaVersionID) } -func (w *lensWrapper) MigrateDown( +func (w *LensRegistry) MigrateDown( ctx context.Context, src enumerable.Enumerable[map[string]any], schemaVersionID string, @@ -68,7 +69,7 @@ func (w *lensWrapper) MigrateDown( return w.lens.MigrateDown(ctx, src, schemaVersionID) } -func (w *lensWrapper) Config(ctx context.Context) ([]client.LensConfig, error) { +func (w *LensRegistry) Config(ctx context.Context) ([]client.LensConfig, error) { args := []string{"client", "schema", "migration", "get"} data, err := w.cmd.execute(ctx, args) @@ -82,6 +83,6 @@ func (w *lensWrapper) Config(ctx context.Context) ([]client.LensConfig, error) { return cfgs, nil } -func (w *lensWrapper) HasMigration(ctx context.Context, schemaVersionID string) (bool, error) { +func (w *LensRegistry) HasMigration(ctx context.Context, schemaVersionID string) (bool, error) { return w.lens.HasMigration(ctx, schemaVersionID) } diff --git a/cli/wrapper_tx.go b/cli/wrapper_tx.go index de85ece633..6656c7b058 100644 --- a/cli/wrapper_tx.go +++ b/cli/wrapper_tx.go @@ -17,18 +17,18 @@ import ( "github.com/sourcenetwork/defradb/datastore" ) -var _ datastore.Txn = (*TxWrapper)(nil) +var _ datastore.Txn = (*Transaction)(nil) -type TxWrapper struct { +type Transaction struct { tx datastore.Txn cmd *cliWrapper } -func (w *TxWrapper) ID() uint64 { +func (w *Transaction) ID() uint64 { return w.tx.ID() } -func (w *TxWrapper) Commit(ctx context.Context) error { +func (w *Transaction) Commit(ctx context.Context) error { args := []string{"client", "tx", "commit"} args = append(args, fmt.Sprintf("%d", w.tx.ID())) @@ -36,41 +36,41 @@ func (w *TxWrapper) Commit(ctx context.Context) error { return err } -func (w *TxWrapper) Discard(ctx context.Context) { +func (w *Transaction) Discard(ctx context.Context) { args := []string{"client", "tx", "discard"} args = append(args, fmt.Sprintf("%d", w.tx.ID())) - w.cmd.execute(ctx, args) + w.cmd.execute(ctx, args) //nolint:errcheck } -func (w *TxWrapper) OnSuccess(fn func()) { +func (w *Transaction) OnSuccess(fn func()) { w.tx.OnSuccess(fn) } -func (w *TxWrapper) OnError(fn func()) { +func (w *Transaction) OnError(fn func()) { w.tx.OnError(fn) } -func (w *TxWrapper) OnDiscard(fn func()) { +func (w *Transaction) OnDiscard(fn func()) { w.tx.OnDiscard(fn) } -func (w *TxWrapper) Rootstore() datastore.DSReaderWriter { +func (w *Transaction) Rootstore() datastore.DSReaderWriter { return w.tx.Rootstore() } -func (w *TxWrapper) Datastore() datastore.DSReaderWriter { +func (w *Transaction) Datastore() datastore.DSReaderWriter { return w.tx.Datastore() } -func (w *TxWrapper) Headstore() datastore.DSReaderWriter { +func (w *Transaction) Headstore() datastore.DSReaderWriter { return w.tx.Headstore() } -func (w *TxWrapper) DAGstore() datastore.DAGStore { +func (w *Transaction) DAGstore() datastore.DAGStore { return w.tx.DAGstore() } -func (w *TxWrapper) Systemstore() datastore.DSReaderWriter { +func (w *Transaction) Systemstore() datastore.DSReaderWriter { return w.tx.Systemstore() } diff --git a/go.mod b/go.mod index 087070597f..d9d6ade38d 100644 --- a/go.mod +++ b/go.mod @@ -25,7 +25,6 @@ require ( github.com/jbenet/goprocess v0.1.4 github.com/lens-vm/lens/host-go v0.0.0-20230729032926-5acb4df9bd25 github.com/libp2p/go-libp2p v0.29.2 - github.com/libp2p/go-libp2p-core v0.20.0 github.com/libp2p/go-libp2p-gostream v0.6.0 github.com/libp2p/go-libp2p-kad-dht v0.25.0 github.com/libp2p/go-libp2p-pubsub v0.9.3 @@ -127,6 +126,7 @@ require ( github.com/libp2p/go-flow-metrics v0.1.0 // indirect github.com/libp2p/go-libp2p-asn-util v0.3.0 // indirect github.com/libp2p/go-libp2p-connmgr v0.4.0 // indirect + github.com/libp2p/go-libp2p-core v0.20.0 // indirect github.com/libp2p/go-libp2p-kbucket v0.6.3 // indirect github.com/libp2p/go-libp2p-routing-helpers v0.7.0 // indirect github.com/libp2p/go-msgio v0.3.0 // indirect diff --git a/http/client.go b/http/client.go index a58dd4d9db..e852787830 100644 --- a/http/client.go +++ b/http/client.go @@ -88,11 +88,6 @@ func (c *Client) WithTxn(tx datastore.Txn) client.Store { return &Client{client} } -func (c *Client) WithTxnID(id uint64) client.Store { - client := c.http.withTxn(id) - return &Client{client} -} - func (c *Client) SetReplicator(ctx context.Context, rep client.Replicator) error { methodURL := c.http.baseURL.JoinPath("p2p", "replicators") diff --git a/logging/registry.go b/logging/registry.go index 7cd7b808a2..9410498a72 100644 --- a/logging/registry.go +++ b/logging/registry.go @@ -44,6 +44,9 @@ func setConfig(newConfig Config) Config { } func updateLoggers(config Config) { + registryMutex.Lock() + defer registryMutex.Unlock() + for loggerName, loggers := range registry { newLoggerConfig := config.forLogger(loggerName) From d5d9afb843e707f048bf873316d08e99ccf8f7cf Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 30 Aug 2023 17:57:30 -0700 Subject: [PATCH 047/107] add missing lens commands --- cli/cli.go | 3 ++ cli/schema_migration_down.go | 50 +++++++++++++++++++++++++++ cli/schema_migration_reload.go | 36 +++++++++++++++++++ cli/schema_migration_up.go | 50 +++++++++++++++++++++++++++ cli/wrapper.go | 2 +- cli/wrapper_lens.go | 63 ++++++++++++++++++++++++++++------ 6 files changed, 193 insertions(+), 11 deletions(-) create mode 100644 cli/schema_migration_down.go create mode 100644 cli/schema_migration_reload.go create mode 100644 cli/schema_migration_up.go diff --git a/cli/cli.go b/cli/cli.go index 71a08a2b92..30e9270c58 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -48,6 +48,9 @@ func NewDefraCommand(cfg *config.Config) *cobra.Command { schema_migrate.AddCommand( MakeSchemaMigrationSetCommand(), MakeSchemaMigrationGetCommand(), + MakeSchemaMigrationReloadCommand(), + MakeSchemaMigrationUpCommand(), + MakeSchemaMigrationDownCommand(), ) schema := MakeSchemaCommand() diff --git a/cli/schema_migration_down.go b/cli/schema_migration_down.go new file mode 100644 index 0000000000..854d9fdfef --- /dev/null +++ b/cli/schema_migration_down.go @@ -0,0 +1,50 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "encoding/json" + + "github.com/sourcenetwork/immutable/enumerable" + "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" +) + +func MakeSchemaMigrationDownCommand() *cobra.Command { + var schemaVersionID string + var cmd = &cobra.Command{ + Use: "down --version ", + Short: "Reverse a migration on the specified schema version.", + Long: `Reverse a migration on the specified schema version.`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + store := cmd.Context().Value(storeContextKey).(client.Store) + + var src enumerable.Enumerable[map[string]any] + if err := json.Unmarshal([]byte(args[0]), &src); err != nil { + return err + } + lens := store.LensRegistry() + if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { + lens = lens.WithTxn(tx) + } + out, err := lens.MigrateDown(cmd.Context(), src, schemaVersionID) + if err != nil { + return err + } + return writeJSON(cmd, out) + }, + } + cmd.Flags().StringVar(&schemaVersionID, "version", "", "Schema version id") + return cmd +} diff --git a/cli/schema_migration_reload.go b/cli/schema_migration_reload.go new file mode 100644 index 0000000000..d009d23f76 --- /dev/null +++ b/cli/schema_migration_reload.go @@ -0,0 +1,36 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" +) + +func MakeSchemaMigrationReloadCommand() *cobra.Command { + var cmd = &cobra.Command{ + Use: "reload", + Short: "Reload the schema migrations within DefraDB", + Long: `Reload the schema migrations within DefraDB`, + RunE: func(cmd *cobra.Command, args []string) error { + store := cmd.Context().Value(storeContextKey).(client.Store) + + lens := store.LensRegistry() + if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { + lens = lens.WithTxn(tx) + } + return lens.ReloadLenses(cmd.Context()) + }, + } + return cmd +} diff --git a/cli/schema_migration_up.go b/cli/schema_migration_up.go new file mode 100644 index 0000000000..c56321d6ce --- /dev/null +++ b/cli/schema_migration_up.go @@ -0,0 +1,50 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "encoding/json" + + "github.com/sourcenetwork/immutable/enumerable" + "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/datastore" +) + +func MakeSchemaMigrationUpCommand() *cobra.Command { + var schemaVersionID string + var cmd = &cobra.Command{ + Use: "up --version ", + Short: "Runs a migration on the specified schema version.", + Long: `Runs a migration on the specified schema version.`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + store := cmd.Context().Value(storeContextKey).(client.Store) + + var src enumerable.Enumerable[map[string]any] + if err := json.Unmarshal([]byte(args[0]), &src); err != nil { + return err + } + lens := store.LensRegistry() + if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { + lens = lens.WithTxn(tx) + } + out, err := lens.MigrateUp(cmd.Context(), src, schemaVersionID) + if err != nil { + return err + } + return writeJSON(cmd, out) + }, + } + cmd.Flags().StringVar(&schemaVersionID, "version", "", "Schema version id") + return cmd +} diff --git a/cli/wrapper.go b/cli/wrapper.go index 7fa95f72e7..6aac0ec522 100644 --- a/cli/wrapper.go +++ b/cli/wrapper.go @@ -179,7 +179,7 @@ func (w *Wrapper) SetMigration(ctx context.Context, config client.LensConfig) er } func (w *Wrapper) LensRegistry() client.LensRegistry { - return &LensRegistry{w.cmd, w.store.LensRegistry()} + return &LensRegistry{w.cmd} } func (w *Wrapper) GetCollectionByName(ctx context.Context, name client.CollectionName) (client.Collection, error) { diff --git a/cli/wrapper_lens.go b/cli/wrapper_lens.go index 738ea32223..d2b68e5e76 100644 --- a/cli/wrapper_lens.go +++ b/cli/wrapper_lens.go @@ -23,15 +23,11 @@ import ( var _ client.LensRegistry = (*LensRegistry)(nil) type LensRegistry struct { - cmd *cliWrapper - lens client.LensRegistry + cmd *cliWrapper } func (w *LensRegistry) WithTxn(tx datastore.Txn) client.LensRegistry { - return &LensRegistry{ - lens: w.lens.WithTxn(tx), - cmd: w.cmd.withTxn(tx), - } + return &LensRegistry{w.cmd.withTxn(tx)} } func (w *LensRegistry) SetMigration(ctx context.Context, config client.LensConfig) error { @@ -50,7 +46,10 @@ func (w *LensRegistry) SetMigration(ctx context.Context, config client.LensConfi } func (w *LensRegistry) ReloadLenses(ctx context.Context) error { - return w.lens.ReloadLenses(ctx) + args := []string{"client", "schema", "migration", "reload"} + + _, err := w.cmd.execute(ctx, args) + return err } func (w *LensRegistry) MigrateUp( @@ -58,7 +57,24 @@ func (w *LensRegistry) MigrateUp( src enumerable.Enumerable[map[string]any], schemaVersionID string, ) (enumerable.Enumerable[map[string]any], error) { - return w.lens.MigrateUp(ctx, src, schemaVersionID) + args := []string{"client", "schema", "migration", "up"} + args = append(args, "--version", schemaVersionID) + + srcJSON, err := json.Marshal(src) + if err != nil { + return nil, err + } + args = append(args, string(srcJSON)) + + data, err := w.cmd.execute(ctx, args) + if err != nil { + return nil, err + } + var out enumerable.Enumerable[map[string]any] + if err := json.Unmarshal(data, &out); err != nil { + return nil, err + } + return out, nil } func (w *LensRegistry) MigrateDown( @@ -66,7 +82,24 @@ func (w *LensRegistry) MigrateDown( src enumerable.Enumerable[map[string]any], schemaVersionID string, ) (enumerable.Enumerable[map[string]any], error) { - return w.lens.MigrateDown(ctx, src, schemaVersionID) + args := []string{"client", "schema", "migration", "down"} + args = append(args, "--version", schemaVersionID) + + srcJSON, err := json.Marshal(src) + if err != nil { + return nil, err + } + args = append(args, string(srcJSON)) + + data, err := w.cmd.execute(ctx, args) + if err != nil { + return nil, err + } + var out enumerable.Enumerable[map[string]any] + if err := json.Unmarshal(data, &out); err != nil { + return nil, err + } + return out, nil } func (w *LensRegistry) Config(ctx context.Context) ([]client.LensConfig, error) { @@ -84,5 +117,15 @@ func (w *LensRegistry) Config(ctx context.Context) ([]client.LensConfig, error) } func (w *LensRegistry) HasMigration(ctx context.Context, schemaVersionID string) (bool, error) { - return w.lens.HasMigration(ctx, schemaVersionID) + cfgs, err := w.Config(ctx) + if err != nil { + return false, err + } + found := false + for _, cfg := range cfgs { + if cfg.SourceSchemaVersionID == schemaVersionID { + found = true + } + } + return found, nil } From 3333e6aa36e9aa79fbf0ef0113a69700bee13b9c Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 31 Aug 2023 09:48:11 -0700 Subject: [PATCH 048/107] remove todo comment --- cli/wrapper.go | 2 -- 1 file changed, 2 deletions(-) diff --git a/cli/wrapper.go b/cli/wrapper.go index 6aac0ec522..2191bd70ec 100644 --- a/cli/wrapper.go +++ b/cli/wrapper.go @@ -43,8 +43,6 @@ func NewWrapper(db client.DB) *Wrapper { httpServer := httptest.NewServer(handler) cmd := newCliWrapper(httpServer.URL) - // TODO use http.Wrapper here to make a lot of this obsolete - return &Wrapper{ db: db, store: db, From 8146640ef88145a28b32bcc5d10a2e9babd86e13 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 31 Aug 2023 10:05:36 -0700 Subject: [PATCH 049/107] update http client names to better match interfaces --- http/client.go | 14 ++++----- http/client_collection.go | 62 +++++++++++++++++++-------------------- http/client_lens.go | 22 +++++++------- http/client_tx.go | 28 +++++++++--------- 4 files changed, 63 insertions(+), 63 deletions(-) diff --git a/http/client.go b/http/client.go index 8167f693d1..4e983842c8 100644 --- a/http/client.go +++ b/http/client.go @@ -61,7 +61,7 @@ func (c *Client) NewTxn(ctx context.Context, readOnly bool) (datastore.Txn, erro if err := c.http.requestJson(req, &txRes); err != nil { return nil, err } - return &TxClient{txRes.ID, c.http}, nil + return &Transaction{txRes.ID, c.http}, nil } func (c *Client) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { @@ -81,7 +81,7 @@ func (c *Client) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastore if err := c.http.requestJson(req, &txRes); err != nil { return nil, err } - return &TxClient{txRes.ID, c.http}, nil + return &Transaction{txRes.ID, c.http}, nil } func (c *Client) WithTxn(tx datastore.Txn) client.Store { @@ -229,7 +229,7 @@ func (c *Client) SetMigration(ctx context.Context, config client.LensConfig) err } func (c *Client) LensRegistry() client.LensRegistry { - return &LensClient{c.http} + return &LensRegistry{c.http} } func (c *Client) GetCollectionByName(ctx context.Context, name client.CollectionName) (client.Collection, error) { @@ -244,7 +244,7 @@ func (c *Client) GetCollectionByName(ctx context.Context, name client.Collection if err := c.http.requestJson(req, &description); err != nil { return nil, err } - return &CollectionClient{c.http, description}, nil + return &Collection{c.http, description}, nil } func (c *Client) GetCollectionBySchemaID(ctx context.Context, schemaId string) (client.Collection, error) { @@ -259,7 +259,7 @@ func (c *Client) GetCollectionBySchemaID(ctx context.Context, schemaId string) ( if err := c.http.requestJson(req, &description); err != nil { return nil, err } - return &CollectionClient{c.http, description}, nil + return &Collection{c.http, description}, nil } func (c *Client) GetCollectionByVersionID(ctx context.Context, versionId string) (client.Collection, error) { @@ -274,7 +274,7 @@ func (c *Client) GetCollectionByVersionID(ctx context.Context, versionId string) if err := c.http.requestJson(req, &description); err != nil { return nil, err } - return &CollectionClient{c.http, description}, nil + return &Collection{c.http, description}, nil } func (c *Client) GetAllCollections(ctx context.Context) ([]client.Collection, error) { @@ -290,7 +290,7 @@ func (c *Client) GetAllCollections(ctx context.Context) ([]client.Collection, er } collections := make([]client.Collection, len(descriptions)) for i, d := range descriptions { - collections[i] = &CollectionClient{c.http, d} + collections[i] = &Collection{c.http, d} } return collections, nil } diff --git a/http/client_collection.go b/http/client_collection.go index e54325e2bf..be504220f3 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -25,35 +25,35 @@ import ( "github.com/sourcenetwork/defradb/datastore" ) -var _ client.Collection = (*CollectionClient)(nil) +var _ client.Collection = (*Collection)(nil) -// CollectionClient implements the client.Collection interface over HTTP. -type CollectionClient struct { +// Collection implements the client.Collection interface over HTTP. +type Collection struct { http *httpClient desc client.CollectionDescription } -func (c *CollectionClient) Description() client.CollectionDescription { +func (c *Collection) Description() client.CollectionDescription { return c.desc } -func (c *CollectionClient) Name() string { +func (c *Collection) Name() string { return c.desc.Name } -func (c *CollectionClient) Schema() client.SchemaDescription { +func (c *Collection) Schema() client.SchemaDescription { return c.desc.Schema } -func (c *CollectionClient) ID() uint32 { +func (c *Collection) ID() uint32 { return c.desc.ID } -func (c *CollectionClient) SchemaID() string { +func (c *Collection) SchemaID() string { return c.desc.Schema.SchemaID } -func (c *CollectionClient) Create(ctx context.Context, doc *client.Document) error { +func (c *Collection) Create(ctx context.Context, doc *client.Document) error { methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name) body, err := doc.String() @@ -72,7 +72,7 @@ func (c *CollectionClient) Create(ctx context.Context, doc *client.Document) err return nil } -func (c *CollectionClient) CreateMany(ctx context.Context, docs []*client.Document) error { +func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) error { methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name) var docMapList []map[string]any @@ -101,7 +101,7 @@ func (c *CollectionClient) CreateMany(ctx context.Context, docs []*client.Docume return nil } -func (c *CollectionClient) Update(ctx context.Context, doc *client.Document) error { +func (c *Collection) Update(ctx context.Context, doc *client.Document) error { methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, doc.Key().String()) docMap, err := doc.ToMap() @@ -129,7 +129,7 @@ func (c *CollectionClient) Update(ctx context.Context, doc *client.Document) err return nil } -func (c *CollectionClient) Save(ctx context.Context, doc *client.Document) error { +func (c *Collection) Save(ctx context.Context, doc *client.Document) error { methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, doc.Key().String()) docMap, err := doc.ToMap() @@ -157,7 +157,7 @@ func (c *CollectionClient) Save(ctx context.Context, doc *client.Document) error return nil } -func (c *CollectionClient) Delete(ctx context.Context, docKey client.DocKey) (bool, error) { +func (c *Collection) Delete(ctx context.Context, docKey client.DocKey) (bool, error) { methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, docKey.String()) req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) @@ -171,7 +171,7 @@ func (c *CollectionClient) Delete(ctx context.Context, docKey client.DocKey) (bo return true, nil } -func (c *CollectionClient) Exists(ctx context.Context, docKey client.DocKey) (bool, error) { +func (c *Collection) Exists(ctx context.Context, docKey client.DocKey) (bool, error) { _, err := c.Get(ctx, docKey, false) if err != nil { return false, err @@ -179,7 +179,7 @@ func (c *CollectionClient) Exists(ctx context.Context, docKey client.DocKey) (bo return true, nil } -func (c *CollectionClient) UpdateWith(ctx context.Context, target any, updater string) (*client.UpdateResult, error) { +func (c *Collection) UpdateWith(ctx context.Context, target any, updater string) (*client.UpdateResult, error) { switch t := target.(type) { case string, map[string]any, *request.Filter: return c.UpdateWithFilter(ctx, t, updater) @@ -192,7 +192,7 @@ func (c *CollectionClient) UpdateWith(ctx context.Context, target any, updater s } } -func (c *CollectionClient) updateWith( +func (c *Collection) updateWith( ctx context.Context, request CollectionUpdateRequest, ) (*client.UpdateResult, error) { @@ -213,7 +213,7 @@ func (c *CollectionClient) updateWith( return &result, nil } -func (c *CollectionClient) UpdateWithFilter( +func (c *Collection) UpdateWithFilter( ctx context.Context, filter any, updater string, @@ -224,7 +224,7 @@ func (c *CollectionClient) UpdateWithFilter( }) } -func (c *CollectionClient) UpdateWithKey( +func (c *Collection) UpdateWithKey( ctx context.Context, key client.DocKey, updater string, @@ -235,7 +235,7 @@ func (c *CollectionClient) UpdateWithKey( }) } -func (c *CollectionClient) UpdateWithKeys( +func (c *Collection) UpdateWithKeys( ctx context.Context, docKeys []client.DocKey, updater string, @@ -250,7 +250,7 @@ func (c *CollectionClient) UpdateWithKeys( }) } -func (c *CollectionClient) DeleteWith(ctx context.Context, target any) (*client.DeleteResult, error) { +func (c *Collection) DeleteWith(ctx context.Context, target any) (*client.DeleteResult, error) { switch t := target.(type) { case string, map[string]any, *request.Filter: return c.DeleteWithFilter(ctx, t) @@ -263,7 +263,7 @@ func (c *CollectionClient) DeleteWith(ctx context.Context, target any) (*client. } } -func (c *CollectionClient) deleteWith( +func (c *Collection) deleteWith( ctx context.Context, request CollectionDeleteRequest, ) (*client.DeleteResult, error) { @@ -284,19 +284,19 @@ func (c *CollectionClient) deleteWith( return &result, nil } -func (c *CollectionClient) DeleteWithFilter(ctx context.Context, filter any) (*client.DeleteResult, error) { +func (c *Collection) DeleteWithFilter(ctx context.Context, filter any) (*client.DeleteResult, error) { return c.deleteWith(ctx, CollectionDeleteRequest{ Filter: filter, }) } -func (c *CollectionClient) DeleteWithKey(ctx context.Context, docKey client.DocKey) (*client.DeleteResult, error) { +func (c *Collection) DeleteWithKey(ctx context.Context, docKey client.DocKey) (*client.DeleteResult, error) { return c.deleteWith(ctx, CollectionDeleteRequest{ Key: docKey.String(), }) } -func (c *CollectionClient) DeleteWithKeys(ctx context.Context, docKeys []client.DocKey) (*client.DeleteResult, error) { +func (c *Collection) DeleteWithKeys(ctx context.Context, docKeys []client.DocKey) (*client.DeleteResult, error) { var keys []string for _, key := range docKeys { keys = append(keys, key.String()) @@ -306,7 +306,7 @@ func (c *CollectionClient) DeleteWithKeys(ctx context.Context, docKeys []client. }) } -func (c *CollectionClient) Get(ctx context.Context, key client.DocKey, showDeleted bool) (*client.Document, error) { +func (c *Collection) Get(ctx context.Context, key client.DocKey, showDeleted bool) (*client.Document, error) { methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, key.String()) req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) @@ -320,14 +320,14 @@ func (c *CollectionClient) Get(ctx context.Context, key client.DocKey, showDelet return client.NewDocFromMap(docMap) } -func (c *CollectionClient) WithTxn(tx datastore.Txn) client.Collection { - return &CollectionClient{ +func (c *Collection) WithTxn(tx datastore.Txn) client.Collection { + return &Collection{ http: c.http.withTxn(tx.ID()), desc: c.desc, } } -func (c *CollectionClient) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysResult, error) { +func (c *Collection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysResult, error) { methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name) req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) @@ -373,7 +373,7 @@ func (c *CollectionClient) GetAllDocKeys(ctx context.Context) (<-chan client.Doc return docKeyCh, nil } -func (c *CollectionClient) CreateIndex( +func (c *Collection) CreateIndex( ctx context.Context, indexDesc client.IndexDescription, ) (client.IndexDescription, error) { @@ -394,7 +394,7 @@ func (c *CollectionClient) CreateIndex( return index, nil } -func (c *CollectionClient) DropIndex(ctx context.Context, indexName string) error { +func (c *Collection) DropIndex(ctx context.Context, indexName string) error { methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, "indexes", indexName) req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) @@ -405,7 +405,7 @@ func (c *CollectionClient) DropIndex(ctx context.Context, indexName string) erro return err } -func (c *CollectionClient) GetIndexes(ctx context.Context) ([]client.IndexDescription, error) { +func (c *Collection) GetIndexes(ctx context.Context) ([]client.IndexDescription, error) { methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, "indexes") req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) diff --git a/http/client_lens.go b/http/client_lens.go index 8520a8401f..11921492f4 100644 --- a/http/client_lens.go +++ b/http/client_lens.go @@ -22,19 +22,19 @@ import ( "github.com/sourcenetwork/defradb/datastore" ) -var _ client.LensRegistry = (*LensClient)(nil) +var _ client.LensRegistry = (*LensRegistry)(nil) -// LensClient implements the client.LensRegistry interface over HTTP. -type LensClient struct { +// LensRegistry implements the client.LensRegistry interface over HTTP. +type LensRegistry struct { http *httpClient } -func (c *LensClient) WithTxn(tx datastore.Txn) client.LensRegistry { +func (c *LensRegistry) WithTxn(tx datastore.Txn) client.LensRegistry { http := c.http.withTxn(tx.ID()) - return &LensClient{http} + return &LensRegistry{http} } -func (c *LensClient) SetMigration(ctx context.Context, config client.LensConfig) error { +func (c *LensRegistry) SetMigration(ctx context.Context, config client.LensConfig) error { methodURL := c.http.baseURL.JoinPath("lens") body, err := json.Marshal(config) @@ -49,7 +49,7 @@ func (c *LensClient) SetMigration(ctx context.Context, config client.LensConfig) return err } -func (c *LensClient) ReloadLenses(ctx context.Context) error { +func (c *LensRegistry) ReloadLenses(ctx context.Context) error { methodURL := c.http.baseURL.JoinPath("lens", "reload") req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) @@ -60,7 +60,7 @@ func (c *LensClient) ReloadLenses(ctx context.Context) error { return err } -func (c *LensClient) MigrateUp( +func (c *LensRegistry) MigrateUp( ctx context.Context, src enumerable.Enumerable[map[string]any], schemaVersionID string, @@ -82,7 +82,7 @@ func (c *LensClient) MigrateUp( return result, nil } -func (c *LensClient) MigrateDown( +func (c *LensRegistry) MigrateDown( ctx context.Context, src enumerable.Enumerable[map[string]any], schemaVersionID string, @@ -104,7 +104,7 @@ func (c *LensClient) MigrateDown( return result, nil } -func (c *LensClient) Config(ctx context.Context) ([]client.LensConfig, error) { +func (c *LensRegistry) Config(ctx context.Context) ([]client.LensConfig, error) { methodURL := c.http.baseURL.JoinPath("lens") req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) @@ -118,7 +118,7 @@ func (c *LensClient) Config(ctx context.Context) ([]client.LensConfig, error) { return cfgs, nil } -func (c *LensClient) HasMigration(ctx context.Context, schemaVersionID string) (bool, error) { +func (c *LensRegistry) HasMigration(ctx context.Context, schemaVersionID string) (bool, error) { methodURL := c.http.baseURL.JoinPath("lens", schemaVersionID) req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) diff --git a/http/client_tx.go b/http/client_tx.go index 7592333f3b..8df82007a6 100644 --- a/http/client_tx.go +++ b/http/client_tx.go @@ -18,19 +18,19 @@ import ( "github.com/sourcenetwork/defradb/datastore" ) -var _ datastore.Txn = (*TxClient)(nil) +var _ datastore.Txn = (*Transaction)(nil) -// TxClient implements the datastore.Txn interface over HTTP. -type TxClient struct { +// Transaction implements the datastore.Txn interface over HTTP. +type Transaction struct { id uint64 http *httpClient } -func (c *TxClient) ID() uint64 { +func (c *Transaction) ID() uint64 { return c.id } -func (c *TxClient) Commit(ctx context.Context) error { +func (c *Transaction) Commit(ctx context.Context) error { methodURL := c.http.baseURL.JoinPath("tx", fmt.Sprintf("%d", c.id)) req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), nil) @@ -41,7 +41,7 @@ func (c *TxClient) Commit(ctx context.Context) error { return err } -func (c *TxClient) Discard(ctx context.Context) { +func (c *Transaction) Discard(ctx context.Context) { methodURL := c.http.baseURL.JoinPath("tx", fmt.Sprintf("%d", c.id)) req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) @@ -51,34 +51,34 @@ func (c *TxClient) Discard(ctx context.Context) { c.http.request(req) //nolint:errcheck } -func (c *TxClient) OnSuccess(fn func()) { +func (c *Transaction) OnSuccess(fn func()) { panic("client side transaction") } -func (c *TxClient) OnError(fn func()) { +func (c *Transaction) OnError(fn func()) { panic("client side transaction") } -func (c *TxClient) OnDiscard(fn func()) { +func (c *Transaction) OnDiscard(fn func()) { panic("client side transaction") } -func (c *TxClient) Rootstore() datastore.DSReaderWriter { +func (c *Transaction) Rootstore() datastore.DSReaderWriter { panic("client side transaction") } -func (c *TxClient) Datastore() datastore.DSReaderWriter { +func (c *Transaction) Datastore() datastore.DSReaderWriter { panic("client side transaction") } -func (c *TxClient) Headstore() datastore.DSReaderWriter { +func (c *Transaction) Headstore() datastore.DSReaderWriter { panic("client side transaction") } -func (c *TxClient) DAGstore() datastore.DAGStore { +func (c *Transaction) DAGstore() datastore.DAGStore { panic("client side transaction") } -func (c *TxClient) Systemstore() datastore.DSReaderWriter { +func (c *Transaction) Systemstore() datastore.DSReaderWriter { panic("client side transaction") } From fd20479fbb2c993fd5a6dc8b781534d0a55ce3c4 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 31 Aug 2023 10:56:15 -0700 Subject: [PATCH 050/107] improve http error parsing. fix bug in collection get http handler --- http/client.go | 11 +---- http/errors.go | 51 +++++++++++++++++++++++ http/handler_collection.go | 83 ++++++++++++++++++++------------------ http/handler_lens.go | 20 ++++----- http/handler_store.go | 72 +++++++++++++++++---------------- http/handler_tx.go | 14 +++---- http/http_client.go | 7 +--- http/utils.go | 14 +++++-- 8 files changed, 162 insertions(+), 110 deletions(-) create mode 100644 http/errors.go diff --git a/http/client.go b/http/client.go index 4e983842c8..e3fee6b75f 100644 --- a/http/client.go +++ b/http/client.go @@ -14,7 +14,6 @@ import ( "bytes" "context" "encoding/json" - "fmt" "io" "net/http" "net/url" @@ -347,9 +346,7 @@ func (c *Client) ExecRequest(ctx context.Context, query string) *client.RequestR return result } result.GQL.Data = response.Data - for _, err := range response.Errors { - result.GQL.Errors = append(result.GQL.Errors, fmt.Errorf(err)) - } + result.GQL.Errors = response.Errors return result } @@ -373,12 +370,8 @@ func (c *Client) execRequestSubscription(ctx context.Context, r io.ReadCloser) * if err := json.Unmarshal(evt.Data, &response); err != nil { return } - var errors []error - for _, err := range response.Errors { - errors = append(errors, fmt.Errorf(err)) - } pub.Publish(client.GQLResult{ - Errors: errors, + Errors: response.Errors, Data: response.Data, }) } diff --git a/http/errors.go b/http/errors.go new file mode 100644 index 0000000000..c2808603cf --- /dev/null +++ b/http/errors.go @@ -0,0 +1,51 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package http + +import ( + "encoding/json" + "errors" +) + +const ( + errInvalidRequestBody = "invalid request body" + errDocKeyDoesNotMatch = "document key does not match" + errStreamingNotSupported = "streaming not supported" + errMigrationNotFound = "migration not found" + errMissingRequest = "missing request" + errInvalidTransactionId = "invalid transaction id" +) + +var ( + ErrInvalidRequestBody = errors.New(errInvalidRequestBody) + ErrDocKeyDoesNotMatch = errors.New(errDocKeyDoesNotMatch) + ErrStreamingNotSupported = errors.New(errStreamingNotSupported) + ErrMigrationNotFound = errors.New(errMigrationNotFound) + ErrMissingRequest = errors.New(errMissingRequest) + ErrInvalidTransactionId = errors.New(errInvalidTransactionId) +) + +type errorResponse struct { + Error error `json:"error"` +} + +func (e errorResponse) MarshalJSON() ([]byte, error) { + return json.Marshal(map[string]any{"error": e.Error.Error()}) +} + +func (e *errorResponse) UnmarshalJSON(data []byte) error { + var out map[string]any + if err := json.Unmarshal(data, &out); err != nil { + return err + } + e.Error = parseError(out["error"]) + return nil +} diff --git a/http/handler_collection.go b/http/handler_collection.go index 50af88d319..f89c69ec1d 100644 --- a/http/handler_collection.go +++ b/http/handler_collection.go @@ -41,7 +41,7 @@ func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) { var body any if err := requestJSON(req, &body); err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } @@ -51,29 +51,29 @@ func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) { for _, docMap := range t { doc, err := client.NewDocFromMap(docMap) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } docList = append(docList, doc) } if err := col.CreateMany(req.Context(), docList); err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } rw.WriteHeader(http.StatusOK) case map[string]any: doc, err := client.NewDocFromMap(t) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } if err := col.Create(req.Context(), doc); err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } rw.WriteHeader(http.StatusOK) default: - responseJSON(rw, http.StatusBadRequest, errorResponse{"invalid request body"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidRequestBody}) } } @@ -82,17 +82,17 @@ func (s *collectionHandler) Save(rw http.ResponseWriter, req *http.Request) { var docMap map[string]any if err := requestJSON(req, &docMap); err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } doc, err := client.NewDocFromMap(docMap) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } err = col.Save(req.Context(), doc) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } rw.WriteHeader(http.StatusOK) @@ -103,7 +103,7 @@ func (s *collectionHandler) DeleteWith(rw http.ResponseWriter, req *http.Request var request CollectionDeleteRequest if err := requestJSON(req, &request); err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } @@ -111,19 +111,19 @@ func (s *collectionHandler) DeleteWith(rw http.ResponseWriter, req *http.Request case request.Filter != nil: result, err := col.DeleteWith(req.Context(), request.Filter) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, result) case request.Key != "": docKey, err := client.NewDocKeyFromString(request.Key) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } result, err := col.DeleteWith(req.Context(), docKey) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, result) @@ -132,19 +132,19 @@ func (s *collectionHandler) DeleteWith(rw http.ResponseWriter, req *http.Request for _, key := range request.Keys { docKey, err := client.NewDocKeyFromString(key) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } docKeys = append(docKeys, docKey) } result, err := col.DeleteWith(req.Context(), docKeys) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, result) default: - responseJSON(rw, http.StatusBadRequest, errorResponse{"invalid delete request"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidRequestBody}) } } @@ -153,7 +153,7 @@ func (s *collectionHandler) UpdateWith(rw http.ResponseWriter, req *http.Request var request CollectionUpdateRequest if err := requestJSON(req, &request); err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } @@ -161,19 +161,19 @@ func (s *collectionHandler) UpdateWith(rw http.ResponseWriter, req *http.Request case request.Filter != nil: result, err := col.UpdateWith(req.Context(), request.Filter, request.Updater) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, result) case request.Key != "": docKey, err := client.NewDocKeyFromString(request.Key) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } result, err := col.UpdateWith(req.Context(), docKey, request.Updater) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, result) @@ -182,19 +182,19 @@ func (s *collectionHandler) UpdateWith(rw http.ResponseWriter, req *http.Request for _, key := range request.Keys { docKey, err := client.NewDocKeyFromString(key) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } docKeys = append(docKeys, docKey) } result, err := col.UpdateWith(req.Context(), docKeys, request.Updater) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, result) default: - responseJSON(rw, http.StatusBadRequest, errorResponse{"invalid update request"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidRequestBody}) } } @@ -203,21 +203,21 @@ func (s *collectionHandler) Update(rw http.ResponseWriter, req *http.Request) { var docMap map[string]any if err := requestJSON(req, &docMap); err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } doc, err := client.NewDocFromMap(docMap) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } if doc.Key().String() != chi.URLParam(req, "key") { - responseJSON(rw, http.StatusBadRequest, errorResponse{"document key does not match"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrDocKeyDoesNotMatch}) return } err = col.Update(req.Context(), doc) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } rw.WriteHeader(http.StatusOK) @@ -228,12 +228,12 @@ func (s *collectionHandler) Delete(rw http.ResponseWriter, req *http.Request) { docKey, err := client.NewDocKeyFromString(chi.URLParam(req, "key")) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } _, err = col.Delete(req.Context(), docKey) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } rw.WriteHeader(http.StatusOK) @@ -245,15 +245,20 @@ func (s *collectionHandler) Get(rw http.ResponseWriter, req *http.Request) { docKey, err := client.NewDocKeyFromString(chi.URLParam(req, "key")) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - _, err = col.Get(req.Context(), docKey, showDeleted) + doc, err := col.Get(req.Context(), docKey, showDeleted) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - rw.WriteHeader(http.StatusOK) + docMap, err := doc.ToMap() + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, docMap) } type DocKeyResult struct { @@ -266,13 +271,13 @@ func (s *collectionHandler) GetAllDocKeys(rw http.ResponseWriter, req *http.Requ flusher, ok := rw.(http.Flusher) if !ok { - responseJSON(rw, http.StatusBadRequest, errorResponse{"streaming not supported"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrStreamingNotSupported}) return } docKeyCh, err := col.GetAllDocKeys(req.Context()) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } @@ -304,12 +309,12 @@ func (s *collectionHandler) CreateIndex(rw http.ResponseWriter, req *http.Reques var indexDesc client.IndexDescription if err := requestJSON(req, &indexDesc); err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } index, err := col.CreateIndex(req.Context(), indexDesc) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, index) @@ -320,7 +325,7 @@ func (s *collectionHandler) GetIndexes(rw http.ResponseWriter, req *http.Request indexes, err := col.GetIndexes(req.Context()) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, indexes) @@ -331,7 +336,7 @@ func (s *collectionHandler) DropIndex(rw http.ResponseWriter, req *http.Request) err := col.DropIndex(req.Context(), chi.URLParam(req, "index")) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } rw.WriteHeader(http.StatusOK) diff --git a/http/handler_lens.go b/http/handler_lens.go index cc0c08ef10..00d281572d 100644 --- a/http/handler_lens.go +++ b/http/handler_lens.go @@ -26,7 +26,7 @@ func (s *lensHandler) ReloadLenses(rw http.ResponseWriter, req *http.Request) { err := lens.ReloadLenses(req.Context()) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } rw.WriteHeader(http.StatusOK) @@ -37,12 +37,12 @@ func (s *lensHandler) SetMigration(rw http.ResponseWriter, req *http.Request) { var cfg client.LensConfig if err := requestJSON(req, &cfg); err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } err := lens.SetMigration(req.Context(), cfg) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } rw.WriteHeader(http.StatusOK) @@ -53,12 +53,12 @@ func (s *lensHandler) MigrateUp(rw http.ResponseWriter, req *http.Request) { var src enumerable.Enumerable[map[string]any] if err := requestJSON(req, &src); err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } result, err := lens.MigrateUp(req.Context(), src, chi.URLParam(req, "version")) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, result) @@ -69,12 +69,12 @@ func (s *lensHandler) MigrateDown(rw http.ResponseWriter, req *http.Request) { var src enumerable.Enumerable[map[string]any] if err := requestJSON(req, &src); err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } result, err := lens.MigrateDown(req.Context(), src, chi.URLParam(req, "version")) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, result) @@ -85,7 +85,7 @@ func (s *lensHandler) Config(rw http.ResponseWriter, req *http.Request) { cfgs, err := lens.Config(req.Context()) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, cfgs) @@ -96,11 +96,11 @@ func (s *lensHandler) HasMigration(rw http.ResponseWriter, req *http.Request) { exists, err := lens.HasMigration(req.Context(), chi.URLParam(req, "version")) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } if !exists { - responseJSON(rw, http.StatusBadRequest, errorResponse{"migration not found"}) + responseJSON(rw, http.StatusNotFound, errorResponse{ErrMigrationNotFound}) return } rw.WriteHeader(http.StatusOK) diff --git a/http/handler_store.go b/http/handler_store.go index 22c63c90fa..d0cbdf42d2 100644 --- a/http/handler_store.go +++ b/http/handler_store.go @@ -29,12 +29,12 @@ func (s *storeHandler) SetReplicator(rw http.ResponseWriter, req *http.Request) var rep client.Replicator if err := requestJSON(req, &rep); err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } err := store.SetReplicator(req.Context(), rep) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } rw.WriteHeader(http.StatusOK) @@ -45,12 +45,12 @@ func (s *storeHandler) DeleteReplicator(rw http.ResponseWriter, req *http.Reques var rep client.Replicator if err := requestJSON(req, &rep); err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } err := store.DeleteReplicator(req.Context(), rep) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } rw.WriteHeader(http.StatusOK) @@ -61,7 +61,7 @@ func (s *storeHandler) GetAllReplicators(rw http.ResponseWriter, req *http.Reque reps, err := store.GetAllReplicators(req.Context()) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, reps) @@ -72,7 +72,7 @@ func (s *storeHandler) AddP2PCollection(rw http.ResponseWriter, req *http.Reques err := store.AddP2PCollection(req.Context(), chi.URLParam(req, "id")) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } rw.WriteHeader(http.StatusOK) @@ -83,7 +83,7 @@ func (s *storeHandler) RemoveP2PCollection(rw http.ResponseWriter, req *http.Req err := store.RemoveP2PCollection(req.Context(), chi.URLParam(req, "id")) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } rw.WriteHeader(http.StatusOK) @@ -94,7 +94,7 @@ func (s *storeHandler) GetAllP2PCollections(rw http.ResponseWriter, req *http.Re cols, err := store.GetAllP2PCollections(req.Context()) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, cols) @@ -105,12 +105,12 @@ func (s *storeHandler) BasicImport(rw http.ResponseWriter, req *http.Request) { var config client.BackupConfig if err := requestJSON(req, &config); err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } err := store.BasicImport(req.Context(), config.Filepath) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } rw.WriteHeader(http.StatusOK) @@ -121,12 +121,12 @@ func (s *storeHandler) BasicExport(rw http.ResponseWriter, req *http.Request) { var config client.BackupConfig if err := requestJSON(req, &config); err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } err := store.BasicExport(req.Context(), &config) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } rw.WriteHeader(http.StatusOK) @@ -137,12 +137,12 @@ func (s *storeHandler) AddSchema(rw http.ResponseWriter, req *http.Request) { schema, err := io.ReadAll(req.Body) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } cols, err := store.AddSchema(req.Context(), string(schema)) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, cols) @@ -153,12 +153,12 @@ func (s *storeHandler) PatchSchema(rw http.ResponseWriter, req *http.Request) { patch, err := io.ReadAll(req.Body) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } err = store.PatchSchema(req.Context(), string(patch)) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } rw.WriteHeader(http.StatusOK) @@ -171,28 +171,28 @@ func (s *storeHandler) GetCollection(rw http.ResponseWriter, req *http.Request) case req.URL.Query().Has("name"): col, err := store.GetCollectionByName(req.Context(), req.URL.Query().Get("name")) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, col.Description()) case req.URL.Query().Has("schema_id"): col, err := store.GetCollectionBySchemaID(req.Context(), req.URL.Query().Get("schema_id")) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, col.Description()) case req.URL.Query().Has("version_id"): col, err := store.GetCollectionByVersionID(req.Context(), req.URL.Query().Get("version_id")) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, col.Description()) default: cols, err := store.GetAllCollections(req.Context()) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } colDesc := make([]client.CollectionDescription, len(cols)) @@ -208,7 +208,7 @@ func (s *storeHandler) GetAllIndexes(rw http.ResponseWriter, req *http.Request) indexes, err := store.GetAllIndexes(req.Context()) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, indexes) @@ -218,7 +218,7 @@ func (s *storeHandler) PrintDump(rw http.ResponseWriter, req *http.Request) { db := req.Context().Value(dbContextKey).(client.DB) if err := db.PrintDump(req.Context()); err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } rw.WriteHeader(http.StatusOK) @@ -229,8 +229,16 @@ type GraphQLRequest struct { } type GraphQLResponse struct { - Data any `json:"data"` - Errors []string `json:"errors,omitempty"` + Data any `json:"data"` + Errors []error `json:"errors,omitempty"` +} + +func (res GraphQLResponse) MarshalJSON() ([]byte, error) { + var errors []string + for _, err := range res.Errors { + errors = append(errors, err.Error()) + } + return json.Marshal(map[string]any{"data": res.Data, "errors": errors}) } func (res *GraphQLResponse) UnmarshalJSON(data []byte) error { @@ -246,11 +254,9 @@ func (res *GraphQLResponse) UnmarshalJSON(data []byte) error { // fix errors type to match tests switch t := out["errors"].(type) { case []any: - var errors []string for _, v := range t { - errors = append(errors, v.(string)) + res.Errors = append(res.Errors, parseError(v)) } - res.Errors = errors default: res.Errors = nil } @@ -281,26 +287,22 @@ func (s *storeHandler) ExecRequest(rw http.ResponseWriter, req *http.Request) { request.Query = req.URL.Query().Get("query") case req.Body != nil: if err := requestJSON(req, &request); err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } default: - responseJSON(rw, http.StatusBadRequest, errorResponse{"missing request"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrMissingRequest}) return } result := store.ExecRequest(req.Context(), request.Query) - var errors []string - for _, err := range result.GQL.Errors { - errors = append(errors, err.Error()) - } if result.Pub == nil { - responseJSON(rw, http.StatusOK, GraphQLResponse{result.GQL.Data, errors}) + responseJSON(rw, http.StatusOK, GraphQLResponse{result.GQL.Data, result.GQL.Errors}) return } flusher, ok := rw.(http.Flusher) if !ok { - responseJSON(rw, http.StatusBadRequest, errorResponse{"streaming not supported"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrStreamingNotSupported}) return } diff --git a/http/handler_tx.go b/http/handler_tx.go index c7a83c28c4..b7f1c82545 100644 --- a/http/handler_tx.go +++ b/http/handler_tx.go @@ -34,7 +34,7 @@ func (h *txHandler) NewTxn(rw http.ResponseWriter, req *http.Request) { tx, err := db.NewTxn(req.Context(), readOnly) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } txs.Store(tx.ID(), tx) @@ -48,7 +48,7 @@ func (h *txHandler) NewConcurrentTxn(rw http.ResponseWriter, req *http.Request) tx, err := db.NewConcurrentTxn(req.Context(), readOnly) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } txs.Store(tx.ID(), tx) @@ -60,17 +60,17 @@ func (h *txHandler) Commit(rw http.ResponseWriter, req *http.Request) { txId, err := strconv.ParseUint(chi.URLParam(req, "id"), 10, 64) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{"invalid transaction id"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidTransactionId}) return } txVal, ok := txs.Load(txId) if !ok { - responseJSON(rw, http.StatusBadRequest, errorResponse{"invalid transaction id"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidTransactionId}) return } err = txVal.(datastore.Txn).Commit(req.Context()) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err.Error()}) + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } txs.Delete(txId) @@ -82,12 +82,12 @@ func (h *txHandler) Discard(rw http.ResponseWriter, req *http.Request) { txId, err := strconv.ParseUint(chi.URLParam(req, "id"), 10, 64) if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{"invalid transaction id"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidTransactionId}) return } txVal, ok := txs.LoadAndDelete(txId) if !ok { - responseJSON(rw, http.StatusBadRequest, errorResponse{"invalid transaction id"}) + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidTransactionId}) return } txVal.(datastore.Txn).Discard(req.Context()) diff --git a/http/http_client.go b/http/http_client.go index bb100a11a3..5299002570 100644 --- a/http/http_client.go +++ b/http/http_client.go @@ -16,8 +16,6 @@ import ( "io" "net/http" "net/url" - - "github.com/sourcenetwork/defradb/datastore/badger/v4" ) type httpClient struct { @@ -73,10 +71,7 @@ func (c *httpClient) request(req *http.Request) ([]byte, error) { if err := json.Unmarshal(data, &errRes); err != nil { return nil, fmt.Errorf("%s", data) } - if errRes.Error == badger.ErrTxnConflict.Error() { - return nil, badger.ErrTxnConflict - } - return nil, fmt.Errorf("%s", errRes.Error) + return nil, errRes.Error } func (c *httpClient) requestJson(req *http.Request, out any) error { diff --git a/http/utils.go b/http/utils.go index b7d3a5bf8c..384fe456f1 100644 --- a/http/utils.go +++ b/http/utils.go @@ -12,13 +12,12 @@ package http import ( "encoding/json" + "fmt" "io" "net/http" -) -type errorResponse struct { - Error string `json:"error"` -} + "github.com/sourcenetwork/defradb/datastore/badger/v4" +) func requestJSON(req *http.Request, out any) error { data, err := io.ReadAll(req.Body) @@ -33,3 +32,10 @@ func responseJSON(rw http.ResponseWriter, status int, out any) { rw.WriteHeader(status) json.NewEncoder(rw).Encode(out) //nolint:errcheck } + +func parseError(msg any) error { + if msg == badger.ErrTxnConflict.Error() { + return badger.ErrTxnConflict + } + return fmt.Errorf("%s", msg) +} From 19c734bc7a6da1eba2e2ee108182a14bd41f5f39 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 31 Aug 2023 13:57:25 -0700 Subject: [PATCH 051/107] implement client document update to fix failing tests --- cli/cli.go | 1 + cli/document_update.go | 14 +++++++++++--- cli/wrapper_collection.go | 9 +++++++-- 3 files changed, 19 insertions(+), 5 deletions(-) diff --git a/cli/cli.go b/cli/cli.go index 30e9270c58..f6caed242c 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -87,6 +87,7 @@ func NewDefraCommand(cfg *config.Config) *cobra.Command { MakeDocumentDeleteCommand(), MakeDocumentUpdateCommand(), MakeDocumentSaveCommand(), + MakeDocumentCreateCommand(), ) client := MakeClientCommand(cfg) diff --git a/cli/document_update.go b/cli/document_update.go index fec39baa1a..8478cc93dd 100644 --- a/cli/document_update.go +++ b/cli/document_update.go @@ -11,7 +11,7 @@ package cli import ( - "fmt" + "encoding/json" "github.com/spf13/cobra" @@ -24,7 +24,7 @@ func MakeDocumentUpdateCommand() *cobra.Command { var keys []string var filter string var cmd = &cobra.Command{ - Use: "update --collection [--filter --key ] ", + Use: "update --collection [--filter --key ] ", Short: "Update documents by key or filter.", Long: `Update documents by key or filter`, Args: cobra.ExactArgs(1), @@ -71,7 +71,15 @@ func MakeDocumentUpdateCommand() *cobra.Command { } return writeJSON(cmd, res) default: - return fmt.Errorf("document key or filter must be defined") + var docMap map[string]any + if err := json.Unmarshal([]byte(args[0]), &docMap); err != nil { + return err + } + doc, err := client.NewDocFromMap(docMap) + if err != nil { + return err + } + return col.Update(cmd.Context(), doc) } }, } diff --git a/cli/wrapper_collection.go b/cli/wrapper_collection.go index 65ee035f13..b6f9e2114b 100644 --- a/cli/wrapper_collection.go +++ b/cli/wrapper_collection.go @@ -100,6 +100,9 @@ func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) er } func (c *Collection) Update(ctx context.Context, doc *client.Document) error { + args := []string{"client", "document", "update"} + args = append(args, "--collection", c.desc.Name) + docMap, err := doc.ToMap() if err != nil { return err @@ -109,11 +112,13 @@ func (c *Collection) Update(ctx context.Context, doc *client.Document) error { delete(docMap, field.Name()) } } - updater, err := json.Marshal(docMap) + document, err := json.Marshal(docMap) if err != nil { return err } - _, err = c.UpdateWithKey(ctx, doc.Key(), string(updater)) + args = append(args, string(document)) + + _, err = c.cmd.execute(ctx, args) if err != nil { return err } From 17fa8be5bfa9e04f775d04de2b418a0217f07e7d Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 31 Aug 2023 14:12:52 -0700 Subject: [PATCH 052/107] revert test case changes that are fixed now --- tests/integration/backup/one_to_one/import_test.go | 3 +-- .../query/one_to_many/with_group_related_id_alias_test.go | 1 - .../query/one_to_many/with_group_related_id_test.go | 1 - tests/integration/query/simple/with_order_test.go | 1 - 4 files changed, 1 insertion(+), 5 deletions(-) diff --git a/tests/integration/backup/one_to_one/import_test.go b/tests/integration/backup/one_to_one/import_test.go index 294ce40784..85c63f9e99 100644 --- a/tests/integration/backup/one_to_one/import_test.go +++ b/tests/integration/backup/one_to_one/import_test.go @@ -237,8 +237,7 @@ func TestBackupImport_DoubleRelationshipWithUpdate_NoError(t *testing.T) { }, }, { - "name": "Game of chains", - "author": nil, + "name": "Game of chains", }, }, }, diff --git a/tests/integration/query/one_to_many/with_group_related_id_alias_test.go b/tests/integration/query/one_to_many/with_group_related_id_alias_test.go index 54f456bdef..8e2223e324 100644 --- a/tests/integration/query/one_to_many/with_group_related_id_alias_test.go +++ b/tests/integration/query/one_to_many/with_group_related_id_alias_test.go @@ -24,7 +24,6 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeFromManySideUsingAlias(t *t Request: `query { Book(groupBy: [author]) { - author_id _group { name rating diff --git a/tests/integration/query/one_to_many/with_group_related_id_test.go b/tests/integration/query/one_to_many/with_group_related_id_test.go index 5cc3956b39..535e8665cd 100644 --- a/tests/integration/query/one_to_many/with_group_related_id_test.go +++ b/tests/integration/query/one_to_many/with_group_related_id_test.go @@ -22,7 +22,6 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeIDFromManySide(t *testing.T Description: "One-to-many query with groupBy on related id (from many side).", Request: `query { Book(groupBy: [author_id]) { - author_id _group { name rating diff --git a/tests/integration/query/simple/with_order_test.go b/tests/integration/query/simple/with_order_test.go index e523a0e422..ae7e6c865f 100644 --- a/tests/integration/query/simple/with_order_test.go +++ b/tests/integration/query/simple/with_order_test.go @@ -22,7 +22,6 @@ func TestQuerySimpleWithEmptyOrder(t *testing.T) { Request: `query { Users(order: {}) { Name - Age } }`, Docs: map[int][]string{ From 5f4e04340a55f477d552c893704018d2fb3b7ee4 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 1 Sep 2023 10:16:52 -0700 Subject: [PATCH 053/107] add generic resultOptionsAreEqual test function --- tests/integration/results.go | 47 +++++++++++++----------------------- 1 file changed, 17 insertions(+), 30 deletions(-) diff --git a/tests/integration/results.go b/tests/integration/results.go index ba82e4f4b7..7119c5566d 100644 --- a/tests/integration/results.go +++ b/tests/integration/results.go @@ -77,40 +77,15 @@ func resultsAreEqual(expected any, actual any) bool { } return assert.ObjectsAreEqualValues(expected, actualVal) case immutable.Option[float64]: - if expectedVal.HasValue() { - expected = expectedVal.Value() - } else { - expected = nil - } - return resultsAreEqual(expected, actual) + return resultOptionsAreEqual[float64](expectedVal, actual) case immutable.Option[uint64]: - if expectedVal.HasValue() { - expected = expectedVal.Value() - } else { - expected = nil - } - return resultsAreEqual(expected, actual) + return resultOptionsAreEqual[uint64](expectedVal, actual) case immutable.Option[int64]: - if expectedVal.HasValue() { - expected = expectedVal.Value() - } else { - expected = nil - } - return resultsAreEqual(expected, actual) + return resultOptionsAreEqual[int64](expectedVal, actual) case immutable.Option[bool]: - if expectedVal.HasValue() { - expected = expectedVal.Value() - } else { - expected = nil - } - return resultsAreEqual(expected, actual) + return resultOptionsAreEqual[bool](expectedVal, actual) case immutable.Option[string]: - if expectedVal.HasValue() { - expected = expectedVal.Value() - } else { - expected = nil - } - return resultsAreEqual(expected, actual) + return resultOptionsAreEqual[string](expectedVal, actual) case []int64: return resultArraysAreEqual[int64](expectedVal, actual) case []uint64: @@ -140,6 +115,18 @@ func resultsAreEqual(expected any, actual any) bool { } } +// resultArraysAreEqual returns true if the value of the expected immutable.Option +// and actual result are of equal value. +// +// NOTE: Values of type json.Number and immutable.Option will be reduced to their underlying types. +func resultOptionsAreEqual[S any](expected immutable.Option[S], actual any) bool { + var expectedVal any + if expected.HasValue() { + expectedVal = expected.Value() + } + return resultsAreEqual(expectedVal, actual) +} + // resultArraysAreEqual returns true if the array of expected results and actual results // are of equal value. // From 9f88fefb98a9b98d79a66f7ec918b4de74e4115c Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 1 Sep 2023 11:22:13 -0700 Subject: [PATCH 054/107] test utils cleanup. fix change detector defaults --- tests/integration/utils2.go | 169 ++++++++++++++++-------------------- 1 file changed, 73 insertions(+), 96 deletions(-) diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 1f04ae3d43..9c200e7dae 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -17,6 +17,7 @@ import ( "os" "path" "reflect" + "strconv" "strings" "testing" "time" @@ -64,7 +65,11 @@ const ( type ClientType string const ( - goClientType ClientType = "go" + // goClientType enables running the test suite using + // the go implementation of the client.DB interface. + goClientType ClientType = "go" + // httpClientType enables running the test suite using + // the http implementation of the client.DB interface. httpClientType ClientType = "http" ) @@ -143,37 +148,30 @@ var previousTestCaseTestName string func init() { // We use environment variables instead of flags `go test ./...` throws for all packages // that don't have the flag defined - httpClientValue, _ := os.LookupEnv(clientHttpEnvName) - goClientValue, _ := os.LookupEnv(clientGoEnvName) - badgerFileValue, _ := os.LookupEnv(fileBadgerEnvName) - badgerInMemoryValue, _ := os.LookupEnv(memoryBadgerEnvName) - databaseDir, _ = os.LookupEnv(fileBadgerPathEnvName) - rootDatabaseDir, _ = os.LookupEnv(rootDBFilePathEnvName) - detectDbChangesValue, _ := os.LookupEnv(detectDbChangesEnvName) - inMemoryStoreValue, _ := os.LookupEnv(inMemoryEnvName) - repositoryValue, repositorySpecified := os.LookupEnv(repositoryEnvName) - setupOnlyValue, _ := os.LookupEnv(setupOnlyEnvName) - targetBranchValue, targetBranchSpecified := os.LookupEnv(targetBranchEnvName) - mutType, mutationTypeSpecified := os.LookupEnv(mutationTypeEnvName) - - httpClient = getBool(httpClientValue) - goClient = getBool(goClientValue) - badgerFile = getBool(badgerFileValue) - badgerInMemory = getBool(badgerInMemoryValue) - inMemoryStore = getBool(inMemoryStoreValue) - DetectDbChanges = getBool(detectDbChangesValue) - SetupOnly = getBool(setupOnlyValue) - - if !repositorySpecified { + httpClient, _ = strconv.ParseBool(os.Getenv(clientHttpEnvName)) + goClient, _ = strconv.ParseBool(os.Getenv(clientGoEnvName)) + badgerFile, _ = strconv.ParseBool(os.Getenv(fileBadgerEnvName)) + badgerInMemory, _ = strconv.ParseBool(os.Getenv(memoryBadgerEnvName)) + inMemoryStore, _ = strconv.ParseBool(os.Getenv(inMemoryEnvName)) + DetectDbChanges, _ = strconv.ParseBool(os.Getenv(detectDbChangesEnvName)) + SetupOnly, _ = strconv.ParseBool(os.Getenv(setupOnlyEnvName)) + + var repositoryValue string + if value, ok := os.LookupEnv(repositoryEnvName); ok { + repositoryValue = value + } else { repositoryValue = "https://github.com/sourcenetwork/defradb.git" } - if !targetBranchSpecified { + var targetBranchValue string + if value, ok := os.LookupEnv(targetBranchEnvName); ok { + targetBranchValue = value + } else { targetBranchValue = "develop" } - if mutationTypeSpecified { - mutationType = MutationType(mutType) + if value, ok := os.LookupEnv(mutationTypeEnvName); ok { + mutationType = MutationType(value) } else { // Default to testing mutations via Collection.Save - it should be simpler and // faster. We assume this is desirable when not explicitly testing any particular @@ -181,30 +179,26 @@ func init() { mutationType = CollectionSaveMutationType } - // default is to run against all - if !badgerInMemory && !badgerFile && !inMemoryStore && !DetectDbChanges { - badgerInMemory = true - // Testing against the file system is off by default - badgerFile = false - inMemoryStore = true - } - // default is to run against all - if !goClient && !httpClient && !DetectDbChanges { + // Set default values for the specified testing mode. + switch { + case DetectDbChanges: + // Change detector runs using only the go client type. goClient = true - httpClient = true - } - - if DetectDbChanges { + httpClient = false detectDbChangesInit(repositoryValue, targetBranchValue) - } -} -func getBool(val string) bool { - switch strings.ToLower(val) { - case "true": - return true default: - return false + // Default is to test all client types. + if !goClient && !httpClient { + goClient = true + httpClient = true + } + // Default is to test all but filesystem db types. + if !badgerInMemory && !badgerFile && !inMemoryStore { + badgerFile = false + badgerInMemory = true + inMemoryStore = true + } } } @@ -278,45 +272,10 @@ func newBadgerFileDB(ctx context.Context, t testing.TB, path string) (client.DB, return db, nil } -func GetClientTypes() []ClientType { - clients := []ClientType{} - - if httpClient { - clients = append(clients, httpClientType) - } - - if goClient { - clients = append(clients, goClientType) - } - - return clients -} - -func GetDatabaseTypes() []DatabaseType { - databases := []DatabaseType{} - - if badgerInMemory { - databases = append(databases, badgerIMType) - } - - if badgerFile { - databases = append(databases, badgerFileType) - } - - if inMemoryStore { - databases = append(databases, defraIMType) - } - - return databases -} - -func GetDatabase(s *state) (client.DB, string, error) { - var ( - cdb client.DB - path string - err error - ) - +// GetDatabase returns the database implementation for the current +// testing state. The database type and client type on the test state +// are used to select the datastore and client implementation to use. +func GetDatabase(s *state) (cdb client.DB, path string, err error) { switch s.dbt { case badgerIMType: cdb, err = NewBadgerMemoryDB(s.ctx, db.WithUpdateEvents()) @@ -328,7 +287,7 @@ func GetDatabase(s *state) (client.DB, string, error) { cdb, err = NewInMemoryDB(s.ctx) default: - return nil, "", fmt.Errorf("invalid database type: %v", s.dbt) + err = fmt.Errorf("invalid database type: %v", s.dbt) } if err != nil { @@ -340,17 +299,17 @@ func GetDatabase(s *state) (client.DB, string, error) { cdb, err = http.NewWrapper(cdb) case goClientType: - // do nothing + return default: - return nil, "", fmt.Errorf("invalid client type: %v", s.dbt) + err = fmt.Errorf("invalid client type: %v", s.dbt) } if err != nil { return nil, "", err } - return cdb, path, nil + return } // ExecuteTestCase executes the given TestCase against the configured database @@ -368,15 +327,33 @@ func ExecuteTestCase( return } - ctx := context.Background() - cts := GetClientTypes() - dbts := GetDatabaseTypes() - // Assert that this is not empty to protect against accidental mis-configurations, + var clients []ClientType + if httpClient { + clients = append(clients, httpClientType) + } + if goClient { + clients = append(clients, goClientType) + } + + var databases []DatabaseType + if badgerInMemory { + databases = append(databases, badgerIMType) + } + if badgerFile { + databases = append(databases, badgerFileType) + } + if inMemoryStore { + databases = append(databases, defraIMType) + } + + // Assert that these are not empty to protect against accidental mis-configurations, // otherwise an empty set would silently pass all the tests. - require.NotEmpty(t, dbts) + require.NotEmpty(t, databases) + require.NotEmpty(t, clients) - for _, ct := range cts { - for _, dbt := range dbts { + ctx := context.Background() + for _, ct := range clients { + for _, dbt := range databases { executeTestCase(ctx, t, collectionNames, testCase, dbt, ct) } } From 7b12e046968e3d4f9eef12b2e8edd3c6b935ed32 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 1 Sep 2023 11:44:59 -0700 Subject: [PATCH 055/107] add comments to ignored close errors in http client --- http/client_collection.go | 3 +++ http/http_client.go | 3 +++ 2 files changed, 6 insertions(+) diff --git a/http/client_collection.go b/http/client_collection.go index be504220f3..8990b3c1cc 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -344,6 +344,9 @@ func (c *Collection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysRe go func() { eventReader := sse.NewReadCloser(res.Body) + // ignore close errors because the headers + // and body are already written and we + // cannot do anything useful with the error defer eventReader.Close() //nolint:errcheck defer close(docKeyCh) diff --git a/http/http_client.go b/http/http_client.go index 5299002570..48323607ab 100644 --- a/http/http_client.go +++ b/http/http_client.go @@ -56,6 +56,9 @@ func (c *httpClient) request(req *http.Request) ([]byte, error) { if err != nil { return nil, err } + // ignore close errors because they have + // no perceivable effect on the end user + // and cannot be reconciled easily defer res.Body.Close() //nolint:errcheck data, err := io.ReadAll(res.Body) From 7f7f7477c49b0873b4fc8209ca4064c1e71c6575 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 1 Sep 2023 11:55:31 -0700 Subject: [PATCH 056/107] add http middleware context documentation --- http/middleware.go | 27 ++++++++++++++++++++++----- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/http/middleware.go b/http/middleware.go index 0aa38c5bfe..7e66edb6e1 100644 --- a/http/middleware.go +++ b/http/middleware.go @@ -27,12 +27,29 @@ const TX_HEADER_NAME = "x-defradb-tx" type contextKey string var ( - txsContextKey = contextKey("txs") - dbContextKey = contextKey("db") - txContextKey = contextKey("tx") + // txsContextKey is the context key for the transaction *sync.Map + txsContextKey = contextKey("txs") + // dbContextKey is the context key for the client.DB + dbContextKey = contextKey("db") + // txContextKey is the context key for the datastore.Txn + // + // NOTE: this will only be set if a transaction id is specified + txContextKey = contextKey("tx") + // storeContextKey is the context key for the client.Store + // + // NOTE: if a transaction exists, all operations will be executed + // in the current transaction context storeContextKey = contextKey("store") - lensContextKey = contextKey("lens") - colContextKey = contextKey("col") + // lensContextKey is the context key for the client.LensRegistry + // + // NOTE: if a transaction exists, all operations will be executed + // in the current transaction context + lensContextKey = contextKey("lens") + // colContextKey is the context key for the client.Collection + // + // NOTE: if a transaction exists, all operations will be executed + // in the current transaction context + colContextKey = contextKey("col") ) // ApiMiddleware sets the required context values for all API requests. From 5f96d78080d65af8b7b24e787e19f62c1b9b21c5 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 1 Sep 2023 13:09:11 -0700 Subject: [PATCH 057/107] relax comparisons when testing non go clients --- tests/integration/explain.go | 51 ++++++++++++++++-------------------- tests/integration/lens.go | 2 +- tests/integration/results.go | 29 ++++++++++++++++++++ tests/integration/utils2.go | 40 ++++++++++------------------ 4 files changed, 66 insertions(+), 56 deletions(-) diff --git a/tests/integration/explain.go b/tests/integration/explain.go index 059bb36d43..1147b7f235 100644 --- a/tests/integration/explain.go +++ b/tests/integration/explain.go @@ -11,7 +11,6 @@ package tests import ( - "context" "reflect" "sort" "testing" @@ -127,50 +126,43 @@ func executeExplainRequest( for _, node := range getNodes(action.NodeID, s.nodes) { result := node.DB.ExecRequest(s.ctx, action.Request) - assertExplainRequestResults( - s.ctx, - s.t, - s.testCase.Description, - &result.GQL, - action, - ) + assertExplainRequestResults(s, &result.GQL, action) } } func assertExplainRequestResults( - ctx context.Context, - t *testing.T, - description string, + s *state, actualResult *client.GQLResult, action ExplainRequest, ) { // Check expected error matches actual error. If it does we are done. if AssertErrors( - t, - description, + s.t, + s.testCase.Description, actualResult.Errors, action.ExpectedError, ) { return } else if action.ExpectedError != "" { // If didn't find a match but did expected an error, then fail. - assert.Fail(t, "Expected an error however none was raised.", description) + assert.Fail(s.t, "Expected an error however none was raised.", s.testCase.Description) } // Note: if returned gql result is `nil` this panics (the panic seems useful while testing). resultantData := actualResult.Data.([]map[string]any) - log.Info(ctx, "", logging.NewKV("FullExplainGraphResult", actualResult.Data)) + log.Info(s.ctx, "", logging.NewKV("FullExplainGraphResult", actualResult.Data)) // Check if the expected full explain graph (if provided) matches the actual full explain graph // that is returned, if doesn't match we would like to still see a diff comparison (handy while debugging). if lengthOfExpectedFullGraph := len(action.ExpectedFullGraph); action.ExpectedFullGraph != nil { - require.Equal(t, lengthOfExpectedFullGraph, len(resultantData), description) + require.Equal(s.t, lengthOfExpectedFullGraph, len(resultantData), s.testCase.Description) for index, actualResult := range resultantData { if lengthOfExpectedFullGraph > index { assertResultsEqual( - t, + s.t, + s.clientType, action.ExpectedFullGraph[index], actualResult, - description, + s.testCase.Description, ) } } @@ -179,16 +171,17 @@ func assertExplainRequestResults( // Ensure the complete high-level pattern matches, inother words check that all the // explain graph nodes are in the correct expected ordering. if action.ExpectedPatterns != nil { - require.Equal(t, len(action.ExpectedPatterns), len(resultantData), description) + require.Equal(s.t, len(action.ExpectedPatterns), len(resultantData), s.testCase.Description) for index, actualResult := range resultantData { // Trim away all attributes (non-plan nodes) from the returned full explain graph result. - actualResultWithoutAttributes := trimExplainAttributes(t, description, actualResult) + actualResultWithoutAttributes := trimExplainAttributes(s.t, s.testCase.Description, actualResult) assertResultsEqual( - t, + s.t, + s.clientType, action.ExpectedPatterns[index], actualResultWithoutAttributes, - description, + s.testCase.Description, ) } } @@ -197,14 +190,13 @@ func assertExplainRequestResults( // Note: This does not check if the node is in correct location or not. if action.ExpectedTargets != nil { for _, target := range action.ExpectedTargets { - assertExplainTargetCase(t, description, target, resultantData) + assertExplainTargetCase(s, target, resultantData) } } } func assertExplainTargetCase( - t *testing.T, - description string, + s *state, targetCase PlanNodeTargetCase, actualResults []map[string]any, ) { @@ -218,17 +210,18 @@ func assertExplainTargetCase( if !isFound { assert.Fail( - t, + s.t, "Expected target ["+targetCase.TargetNodeName+"], was not found in the explain graph.", - description, + s.testCase.Description, ) } assertResultsEqual( - t, + s.t, + s.clientType, targetCase.ExpectedAttributes, foundActualTarget, - description, + s.testCase.Description, ) } } diff --git a/tests/integration/lens.go b/tests/integration/lens.go index 29810e9e78..f9e9e006a6 100644 --- a/tests/integration/lens.go +++ b/tests/integration/lens.go @@ -101,7 +101,7 @@ func getMigrations( assert.Equal(s.t, expectedLens.Inverse, actualLens.Inverse) assert.Equal(s.t, expectedLens.Path, actualLens.Path) - assertResultsEqual(s.t, expectedLens.Arguments, actualLens.Arguments) + assertResultsEqual(s.t, s.clientType, expectedLens.Arguments, actualLens.Arguments) } } } diff --git a/tests/integration/results.go b/tests/integration/results.go index 7119c5566d..8b777af3e1 100644 --- a/tests/integration/results.go +++ b/tests/integration/results.go @@ -12,6 +12,7 @@ package tests import ( "encoding/json" + "testing" "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/assert" @@ -22,6 +23,34 @@ import ( // across all nodes due to strong eventual consistency. type AnyOf []any +// assertResultsAnyOf asserts that actual result is equal to at least one of the expected results. +// +// NOTE: the comparison is relaxed when using client types other than goClientType +func assertResultsAnyOf(t *testing.T, client ClientType, expected AnyOf, actual any, msgAndArgs ...any) { + switch client { + case goClientType: + assert.Contains(t, expected, actual, msgAndArgs...) + default: + if !resultsAreAnyOf(expected, actual) { + assert.Contains(t, expected, actual, msgAndArgs...) + } + } +} + +// assertResultsEqual asserts that actual result is equal to the expected result. +// +// NOTE: the comparison is relaxed when using client types other than goClientType +func assertResultsEqual(t *testing.T, client ClientType, expected any, actual any, msgAndArgs ...any) { + switch client { + case goClientType: + assert.EqualValues(t, expected, actual, msgAndArgs...) + default: + if !resultsAreEqual(expected, actual) { + assert.EqualValues(t, expected, actual, msgAndArgs...) + } + } +} + // resultsAreAnyOf returns true if any of the expected results are of equal value. // // NOTE: Values of type json.Number and immutable.Option will be reduced to their underlying types. diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 9c200e7dae..0e15543a9c 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -1521,9 +1521,7 @@ func executeRequest( anyOfByFieldKey := map[docFieldKey][]any{} expectedErrorRaised = assertRequestResults( - s.ctx, - s.t, - s.testCase.Description, + s, &result.GQL, action.Results, action.ExpectedError, @@ -1588,9 +1586,7 @@ func executeSubscriptionRequest( // This assert should be executed from the main test routine // so that failures will be properly handled. expectedErrorRaised := assertRequestResults( - s.ctx, - s.t, - s.testCase.Description, + s, finalResult, action.Results, action.ExpectedError, @@ -1662,16 +1658,14 @@ type docFieldKey struct { } func assertRequestResults( - ctx context.Context, - t *testing.T, - description string, + s *state, result *client.GQLResult, expectedResults []map[string]any, expectedError string, nodeID int, anyOfByField map[docFieldKey][]any, ) bool { - if AssertErrors(t, description, result.Errors, expectedError) { + if AssertErrors(s.t, s.testCase.Description, result.Errors, expectedError) { return true } @@ -1682,9 +1676,9 @@ func assertRequestResults( // Note: if result.Data == nil this panics (the panic seems useful while testing). resultantData := result.Data.([]map[string]any) - log.Info(ctx, "", logging.NewKV("RequestResults", result.Data)) + log.Info(s.ctx, "", logging.NewKV("RequestResults", result.Data)) - require.Equal(t, len(expectedResults), len(resultantData), description) + require.Equal(s.t, len(expectedResults), len(resultantData), s.testCase.Description) for docIndex, result := range resultantData { expectedResult := expectedResults[docIndex] @@ -1693,14 +1687,20 @@ func assertRequestResults( switch r := expectedValue.(type) { case AnyOf: - assertResultsAnyOf(t, r, actualValue) + assertResultsAnyOf(s.t, s.clientType, r, actualValue) dfk := docFieldKey{docIndex, field} valueSet := anyOfByField[dfk] valueSet = append(valueSet, actualValue) anyOfByField[dfk] = valueSet default: - assertResultsEqual(t, expectedValue, actualValue, fmt.Sprintf("node: %v, doc: %v", nodeID, docIndex)) + assertResultsEqual( + s.t, + s.clientType, + expectedValue, + actualValue, + fmt.Sprintf("node: %v, doc: %v", nodeID, docIndex), + ) } } } @@ -1708,18 +1708,6 @@ func assertRequestResults( return false } -func assertResultsAnyOf(t *testing.T, expected AnyOf, actual any, msgAndArgs ...any) { - if !resultsAreAnyOf(expected, actual) { - assert.Contains(t, expected, actual, msgAndArgs...) - } -} - -func assertResultsEqual(t *testing.T, expected any, actual any, msgAndArgs ...any) { - if !resultsAreEqual(expected, actual) { - assert.EqualValues(t, expected, actual, msgAndArgs...) - } -} - func assertExpectedErrorRaised(t *testing.T, description string, expectedError string, wasRaised bool) { if expectedError != "" && !wasRaised { assert.Fail(t, "Expected an error however none was raised.", description) From 31f9b98d7b9ba3ab8aabbb8eb6b38dedccf7a453 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Tue, 5 Sep 2023 08:51:49 -0700 Subject: [PATCH 058/107] add error documentation. restore lens test comment --- http/client.go | 6 ++++++ http/client_collection.go | 6 +++--- tests/integration/lens.go | 1 + 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/http/client.go b/http/client.go index e3fee6b75f..16a8924a65 100644 --- a/http/client.go +++ b/http/client.go @@ -333,6 +333,9 @@ func (c *Client) ExecRequest(ctx context.Context, query string) *client.RequestR result.Pub = c.execRequestSubscription(ctx, res.Body) return result } + // ignore close errors because they have + // no perceivable effect on the end user + // and cannot be reconciled easily defer res.Body.Close() //nolint:errcheck data, err := io.ReadAll(res.Body) @@ -359,6 +362,9 @@ func (c *Client) execRequestSubscription(ctx context.Context, r io.ReadCloser) * go func() { eventReader := sse.NewReadCloser(r) + // ignore close errors because the status + // and body of the request are already + // checked and it cannot be handled properly defer eventReader.Close() //nolint:errcheck for { diff --git a/http/client_collection.go b/http/client_collection.go index 8990b3c1cc..6d9c57c836 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -344,9 +344,9 @@ func (c *Collection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysRe go func() { eventReader := sse.NewReadCloser(res.Body) - // ignore close errors because the headers - // and body are already written and we - // cannot do anything useful with the error + // ignore close errors because the status + // and body of the request are already + // checked and it cannot be handled properly defer eventReader.Close() //nolint:errcheck defer close(docKeyCh) diff --git a/tests/integration/lens.go b/tests/integration/lens.go index f9e9e006a6..317864ab3e 100644 --- a/tests/integration/lens.go +++ b/tests/integration/lens.go @@ -77,6 +77,7 @@ func getMigrations( require.NoError(s.t, err) require.Equal(s.t, len(configs), len(action.ExpectedResults)) + // The order of the results is not deterministic, so do not assert on the element for _, expected := range action.ExpectedResults { var actual client.LensConfig var actualFound bool From cba08defd3312a19bf858ee276ed7e324b72d535 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Tue, 5 Sep 2023 10:35:20 -0700 Subject: [PATCH 059/107] only test go client by default. allow change detector with any client type --- Makefile | 18 ++++++++++++++---- tests/integration/utils2.go | 30 ++++++++++++------------------ 2 files changed, 26 insertions(+), 22 deletions(-) diff --git a/Makefile b/Makefile index 642f820247..6eb3456fcc 100644 --- a/Makefile +++ b/Makefile @@ -187,11 +187,15 @@ test\:build: .PHONY: test\:ci test\:ci: - DEFRA_BADGER_MEMORY=true DEFRA_BADGER_FILE=true $(MAKE) test:all + DEFRA_BADGER_MEMORY=true DEFRA_BADGER_FILE=true \ + DEFRA_CLIENT_GO=true DEFRA_CLIENT_HTTP=true \ + $(MAKE) test:all .PHONY: test\:ci-gql-mutations test\:ci-gql-mutations: - DEFRA_MUTATION_TYPE=gql DEFRA_BADGER_MEMORY=true $(MAKE) test:all + DEFRA_MUTATION_TYPE=gql DEFRA_BADGER_MEMORY=true \ + DEFRA_CLIENT_GO=true DEFRA_CLIENT_HTTP=true \ + $(MAKE) test:all .PHONY: test\:gql-mutations test\:gql-mutations: @@ -204,7 +208,9 @@ test\:gql-mutations: # UpdateDoc will call [Collection.Update]. .PHONY: test\:ci-col-named-mutations test\:ci-col-named-mutations: - DEFRA_MUTATION_TYPE=collection-named DEFRA_BADGER_MEMORY=true $(MAKE) test:all + DEFRA_MUTATION_TYPE=collection-named DEFRA_BADGER_MEMORY=true \ + DEFRA_CLIENT_GO=true DEFRA_CLIENT_HTTP=true \ + $(MAKE) test:all .PHONY: test\:col-named-mutations test\:col-named-mutations: @@ -214,6 +220,10 @@ test\:col-named-mutations: test\:go: go test $(DEFAULT_TEST_DIRECTORIES) $(TEST_FLAGS) +.PHONY: test\:http +test\:http: + DEFRA_CLIENT_HTTP=true go test $(DEFAULT_TEST_DIRECTORIES) $(TEST_FLAGS) + .PHONY: test\:names test\:names: gotestsum --format testname -- $(DEFAULT_TEST_DIRECTORIES) $(TEST_FLAGS) @@ -285,7 +295,7 @@ test\:coverage-html: .PHONY: test\:changes test\:changes: @$(MAKE) deps:lens - env DEFRA_DETECT_DATABASE_CHANGES=true gotestsum -- ./... -shuffle=on -p 1 + env DEFRA_DETECT_DATABASE_CHANGES=true DEFRA_CLIENT_GO=true gotestsum -- ./... -shuffle=on -p 1 .PHONY: validate\:codecov validate\:codecov: diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 0e15543a9c..09c320b588 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -179,26 +179,20 @@ func init() { mutationType = CollectionSaveMutationType } - // Set default values for the specified testing mode. - switch { - case DetectDbChanges: - // Change detector runs using only the go client type. + // Default is to test go client type. + if !goClient && !httpClient { goClient = true - httpClient = false - detectDbChangesInit(repositoryValue, targetBranchValue) + } - default: - // Default is to test all client types. - if !goClient && !httpClient { - goClient = true - httpClient = true - } - // Default is to test all but filesystem db types. - if !badgerInMemory && !badgerFile && !inMemoryStore { - badgerFile = false - badgerInMemory = true - inMemoryStore = true - } + // Default is to test all but filesystem db types. + if !badgerInMemory && !badgerFile && !inMemoryStore && !DetectDbChanges { + badgerFile = false + badgerInMemory = true + inMemoryStore = true + } + + if DetectDbChanges { + detectDbChangesInit(repositoryValue, targetBranchValue) } } From 4dded7d274dab42aff269032423d051e77b3faf7 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Tue, 5 Sep 2023 14:01:29 -0700 Subject: [PATCH 060/107] clean up generic function calls. remove comment notes. adjust assertResults default behavior. --- http/middleware.go | 14 ++++----- tests/integration/explain.go | 8 ++--- tests/integration/results.go | 58 ++++++++++++++++++------------------ 3 files changed, 40 insertions(+), 40 deletions(-) diff --git a/http/middleware.go b/http/middleware.go index 7e66edb6e1..28f1e0ff1e 100644 --- a/http/middleware.go +++ b/http/middleware.go @@ -33,22 +33,22 @@ var ( dbContextKey = contextKey("db") // txContextKey is the context key for the datastore.Txn // - // NOTE: this will only be set if a transaction id is specified + // This will only be set if a transaction id is specified. txContextKey = contextKey("tx") // storeContextKey is the context key for the client.Store // - // NOTE: if a transaction exists, all operations will be executed - // in the current transaction context + // If a transaction exists, all operations will be executed + // in the current transaction context. storeContextKey = contextKey("store") // lensContextKey is the context key for the client.LensRegistry // - // NOTE: if a transaction exists, all operations will be executed - // in the current transaction context + // If a transaction exists, all operations will be executed + // in the current transaction context. lensContextKey = contextKey("lens") // colContextKey is the context key for the client.Collection // - // NOTE: if a transaction exists, all operations will be executed - // in the current transaction context + // If a transaction exists, all operations will be executed + // in the current transaction context. colContextKey = contextKey("col") ) diff --git a/tests/integration/explain.go b/tests/integration/explain.go index 1147b7f235..44c457c0f8 100644 --- a/tests/integration/explain.go +++ b/tests/integration/explain.go @@ -307,10 +307,10 @@ func findTargetNode( } case []any: - return findTargetNodeFromArray[any](targetName, toSkip, includeChildNodes, r) + return findTargetNodeFromArray(targetName, toSkip, includeChildNodes, r) case []map[string]any: - return findTargetNodeFromArray[map[string]any](targetName, toSkip, includeChildNodes, r) + return findTargetNodeFromArray(targetName, toSkip, includeChildNodes, r) } return nil, totalMatchedSoFar, false @@ -384,10 +384,10 @@ func trimExplainAttributes( trimmedMap[key] = trimExplainAttributes(t, description, v) case []map[string]any: - trimmedMap[key] = trimExplainAttributesArray[map[string]any](t, description, v) + trimmedMap[key] = trimExplainAttributesArray(t, description, v) case []any: - trimmedMap[key] = trimExplainAttributesArray[any](t, description, v) + trimmedMap[key] = trimExplainAttributesArray(t, description, v) default: assert.Fail( diff --git a/tests/integration/results.go b/tests/integration/results.go index 8b777af3e1..f0af3928f1 100644 --- a/tests/integration/results.go +++ b/tests/integration/results.go @@ -25,35 +25,35 @@ type AnyOf []any // assertResultsAnyOf asserts that actual result is equal to at least one of the expected results. // -// NOTE: the comparison is relaxed when using client types other than goClientType +// The comparison is relaxed when using client types other than goClientType. func assertResultsAnyOf(t *testing.T, client ClientType, expected AnyOf, actual any, msgAndArgs ...any) { switch client { - case goClientType: - assert.Contains(t, expected, actual, msgAndArgs...) - default: + case httpClientType: if !resultsAreAnyOf(expected, actual) { assert.Contains(t, expected, actual, msgAndArgs...) } + default: + assert.Contains(t, expected, actual, msgAndArgs...) } } // assertResultsEqual asserts that actual result is equal to the expected result. // -// NOTE: the comparison is relaxed when using client types other than goClientType +// The comparison is relaxed when using client types other than goClientType. func assertResultsEqual(t *testing.T, client ClientType, expected any, actual any, msgAndArgs ...any) { switch client { - case goClientType: - assert.EqualValues(t, expected, actual, msgAndArgs...) - default: + case httpClientType: if !resultsAreEqual(expected, actual) { assert.EqualValues(t, expected, actual, msgAndArgs...) } + default: + assert.EqualValues(t, expected, actual, msgAndArgs...) } } // resultsAreAnyOf returns true if any of the expected results are of equal value. // -// NOTE: Values of type json.Number and immutable.Option will be reduced to their underlying types. +// Values of type json.Number and immutable.Option will be reduced to their underlying types. func resultsAreAnyOf(expected AnyOf, actual any) bool { for _, v := range expected { if resultsAreEqual(v, actual) { @@ -65,7 +65,7 @@ func resultsAreAnyOf(expected AnyOf, actual any) bool { // resultsAreEqual returns true if the expected and actual results are of equal value. // -// NOTE: Values of type json.Number and immutable.Option will be reduced to their underlying types. +// Values of type json.Number and immutable.Option will be reduced to their underlying types. func resultsAreEqual(expected any, actual any) bool { switch expectedVal := expected.(type) { case map[string]any: @@ -106,39 +106,39 @@ func resultsAreEqual(expected any, actual any) bool { } return assert.ObjectsAreEqualValues(expected, actualVal) case immutable.Option[float64]: - return resultOptionsAreEqual[float64](expectedVal, actual) + return resultOptionsAreEqual(expectedVal, actual) case immutable.Option[uint64]: - return resultOptionsAreEqual[uint64](expectedVal, actual) + return resultOptionsAreEqual(expectedVal, actual) case immutable.Option[int64]: - return resultOptionsAreEqual[int64](expectedVal, actual) + return resultOptionsAreEqual(expectedVal, actual) case immutable.Option[bool]: - return resultOptionsAreEqual[bool](expectedVal, actual) + return resultOptionsAreEqual(expectedVal, actual) case immutable.Option[string]: - return resultOptionsAreEqual[string](expectedVal, actual) + return resultOptionsAreEqual(expectedVal, actual) case []int64: - return resultArraysAreEqual[int64](expectedVal, actual) + return resultArraysAreEqual(expectedVal, actual) case []uint64: - return resultArraysAreEqual[uint64](expectedVal, actual) + return resultArraysAreEqual(expectedVal, actual) case []float64: - return resultArraysAreEqual[float64](expectedVal, actual) + return resultArraysAreEqual(expectedVal, actual) case []string: - return resultArraysAreEqual[string](expectedVal, actual) + return resultArraysAreEqual(expectedVal, actual) case []bool: - return resultArraysAreEqual[bool](expectedVal, actual) + return resultArraysAreEqual(expectedVal, actual) case []any: - return resultArraysAreEqual[any](expectedVal, actual) + return resultArraysAreEqual(expectedVal, actual) case []map[string]any: - return resultArraysAreEqual[map[string]any](expectedVal, actual) + return resultArraysAreEqual(expectedVal, actual) case []immutable.Option[float64]: - return resultArraysAreEqual[immutable.Option[float64]](expectedVal, actual) + return resultArraysAreEqual(expectedVal, actual) case []immutable.Option[uint64]: - return resultArraysAreEqual[immutable.Option[uint64]](expectedVal, actual) + return resultArraysAreEqual(expectedVal, actual) case []immutable.Option[int64]: - return resultArraysAreEqual[immutable.Option[int64]](expectedVal, actual) + return resultArraysAreEqual(expectedVal, actual) case []immutable.Option[bool]: - return resultArraysAreEqual[immutable.Option[bool]](expectedVal, actual) + return resultArraysAreEqual(expectedVal, actual) case []immutable.Option[string]: - return resultArraysAreEqual[immutable.Option[string]](expectedVal, actual) + return resultArraysAreEqual(expectedVal, actual) default: return assert.ObjectsAreEqualValues(expected, actual) } @@ -147,7 +147,7 @@ func resultsAreEqual(expected any, actual any) bool { // resultArraysAreEqual returns true if the value of the expected immutable.Option // and actual result are of equal value. // -// NOTE: Values of type json.Number and immutable.Option will be reduced to their underlying types. +// Values of type json.Number and immutable.Option will be reduced to their underlying types. func resultOptionsAreEqual[S any](expected immutable.Option[S], actual any) bool { var expectedVal any if expected.HasValue() { @@ -159,7 +159,7 @@ func resultOptionsAreEqual[S any](expected immutable.Option[S], actual any) bool // resultArraysAreEqual returns true if the array of expected results and actual results // are of equal value. // -// NOTE: Values of type json.Number and immutable.Option will be reduced to their underlying types. +// Values of type json.Number and immutable.Option will be reduced to their underlying types. func resultArraysAreEqual[S any](expected []S, actual any) bool { if len(expected) == 0 && actual == nil { return true From df79ef4a4e94dd9dff2479848a6f1df5325ce313 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Tue, 5 Sep 2023 14:05:32 -0700 Subject: [PATCH 061/107] fix lens http client parsing bug --- http/client_lens.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/http/client_lens.go b/http/client_lens.go index 11921492f4..3313338223 100644 --- a/http/client_lens.go +++ b/http/client_lens.go @@ -75,11 +75,11 @@ func (c *LensRegistry) MigrateUp( if err != nil { return nil, err } - var result enumerable.Enumerable[map[string]any] + var result []map[string]any if err := c.http.requestJson(req, &result); err != nil { return nil, err } - return result, nil + return enumerable.New(result), nil } func (c *LensRegistry) MigrateDown( @@ -97,11 +97,11 @@ func (c *LensRegistry) MigrateDown( if err != nil { return nil, err } - var result enumerable.Enumerable[map[string]any] + var result []map[string]any if err := c.http.requestJson(req, &result); err != nil { return nil, err } - return result, nil + return enumerable.New(result), nil } func (c *LensRegistry) Config(ctx context.Context) ([]client.LensConfig, error) { From 34dd82c8dba15b3ffc445bfc9be3033961cdbeb6 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 6 Sep 2023 08:10:54 -0700 Subject: [PATCH 062/107] update test makefile targets to include cli --- Makefile | 10 +++++++--- tests/integration/results.go | 4 ++-- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/Makefile b/Makefile index dd395264f1..5e435c9843 100644 --- a/Makefile +++ b/Makefile @@ -187,13 +187,13 @@ test\:build: .PHONY: test\:ci test\:ci: DEFRA_BADGER_MEMORY=true DEFRA_BADGER_FILE=true \ - DEFRA_CLIENT_GO=true DEFRA_CLIENT_HTTP=true \ + DEFRA_CLIENT_GO=true DEFRA_CLIENT_HTTP=true DEFRA_CLIENT_CLI=true \ $(MAKE) test:all .PHONY: test\:ci-gql-mutations test\:ci-gql-mutations: DEFRA_MUTATION_TYPE=gql DEFRA_BADGER_MEMORY=true \ - DEFRA_CLIENT_GO=true DEFRA_CLIENT_HTTP=true \ + DEFRA_CLIENT_GO=true DEFRA_CLIENT_HTTP=true DEFRA_CLIENT_CLI=true \ $(MAKE) test:all .PHONY: test\:gql-mutations @@ -208,7 +208,7 @@ test\:gql-mutations: .PHONY: test\:ci-col-named-mutations test\:ci-col-named-mutations: DEFRA_MUTATION_TYPE=collection-named DEFRA_BADGER_MEMORY=true \ - DEFRA_CLIENT_GO=true DEFRA_CLIENT_HTTP=true \ + DEFRA_CLIENT_GO=true DEFRA_CLIENT_HTTP=true DEFRA_CLIENT_CLI=true \ $(MAKE) test:all .PHONY: test\:col-named-mutations @@ -223,6 +223,10 @@ test\:go: test\:http: DEFRA_CLIENT_HTTP=true go test $(DEFAULT_TEST_DIRECTORIES) $(TEST_FLAGS) +.PHONY: test\:cli +test\:cli: + DEFRA_CLIENT_CLI=true go test $(DEFAULT_TEST_DIRECTORIES) $(TEST_FLAGS) + .PHONY: test\:names test\:names: gotestsum --format testname -- $(DEFAULT_TEST_DIRECTORIES) $(TEST_FLAGS) diff --git a/tests/integration/results.go b/tests/integration/results.go index f0af3928f1..4f0dce45d3 100644 --- a/tests/integration/results.go +++ b/tests/integration/results.go @@ -28,7 +28,7 @@ type AnyOf []any // The comparison is relaxed when using client types other than goClientType. func assertResultsAnyOf(t *testing.T, client ClientType, expected AnyOf, actual any, msgAndArgs ...any) { switch client { - case httpClientType: + case httpClientType, cliClientType: if !resultsAreAnyOf(expected, actual) { assert.Contains(t, expected, actual, msgAndArgs...) } @@ -42,7 +42,7 @@ func assertResultsAnyOf(t *testing.T, client ClientType, expected AnyOf, actual // The comparison is relaxed when using client types other than goClientType. func assertResultsEqual(t *testing.T, client ClientType, expected any, actual any, msgAndArgs ...any) { switch client { - case httpClientType: + case httpClientType, cliClientType: if !resultsAreEqual(expected, actual) { assert.EqualValues(t, expected, actual, msgAndArgs...) } From 05f6f809a82e773a9fb0547b266c5ccf1d275abd Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 6 Sep 2023 08:13:51 -0700 Subject: [PATCH 063/107] fix bug with body parsing logic in lens migrate handlers --- http/handler_lens.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/http/handler_lens.go b/http/handler_lens.go index 00d281572d..ccf8dd01a8 100644 --- a/http/handler_lens.go +++ b/http/handler_lens.go @@ -51,12 +51,12 @@ func (s *lensHandler) SetMigration(rw http.ResponseWriter, req *http.Request) { func (s *lensHandler) MigrateUp(rw http.ResponseWriter, req *http.Request) { lens := req.Context().Value(lensContextKey).(client.LensRegistry) - var src enumerable.Enumerable[map[string]any] + var src []map[string]any if err := requestJSON(req, &src); err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - result, err := lens.MigrateUp(req.Context(), src, chi.URLParam(req, "version")) + result, err := lens.MigrateUp(req.Context(), enumerable.New(src), chi.URLParam(req, "version")) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -67,12 +67,12 @@ func (s *lensHandler) MigrateUp(rw http.ResponseWriter, req *http.Request) { func (s *lensHandler) MigrateDown(rw http.ResponseWriter, req *http.Request) { lens := req.Context().Value(lensContextKey).(client.LensRegistry) - var src enumerable.Enumerable[map[string]any] + var src []map[string]any if err := requestJSON(req, &src); err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - result, err := lens.MigrateDown(req.Context(), src, chi.URLParam(req, "version")) + result, err := lens.MigrateDown(req.Context(), enumerable.New(src), chi.URLParam(req, "version")) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return From 97efa002c0a8516cdc572cca5fe5b07bb6585642 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 6 Sep 2023 08:21:59 -0700 Subject: [PATCH 064/107] fix bug in http lens client migrate --- http/client_lens.go | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/http/client_lens.go b/http/client_lens.go index 3313338223..3c8c2fc903 100644 --- a/http/client_lens.go +++ b/http/client_lens.go @@ -67,7 +67,14 @@ func (c *LensRegistry) MigrateUp( ) (enumerable.Enumerable[map[string]any], error) { methodURL := c.http.baseURL.JoinPath("lens", schemaVersionID, "up") - body, err := json.Marshal(src) + var data []map[string]any + err := enumerable.ForEach(src, func(item map[string]any) { + data = append(data, item) + }) + if err != nil { + return nil, err + } + body, err := json.Marshal(data) if err != nil { return nil, err } @@ -89,7 +96,14 @@ func (c *LensRegistry) MigrateDown( ) (enumerable.Enumerable[map[string]any], error) { methodURL := c.http.baseURL.JoinPath("lens", schemaVersionID, "down") - body, err := json.Marshal(src) + var data []map[string]any + err := enumerable.ForEach(src, func(item map[string]any) { + data = append(data, item) + }) + if err != nil { + return nil, err + } + body, err := json.Marshal(data) if err != nil { return nil, err } From 38696f81bf6a304efb453de9c3a32fa2ca720000 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 6 Sep 2023 08:24:24 -0700 Subject: [PATCH 065/107] fix logic in cli lens migrate --- cli/schema_migration_down.go | 4 ++-- cli/schema_migration_up.go | 4 ++-- cli/wrapper_lens.go | 18 ++++++++++++++++-- 3 files changed, 20 insertions(+), 6 deletions(-) diff --git a/cli/schema_migration_down.go b/cli/schema_migration_down.go index 854d9fdfef..8b6d75a444 100644 --- a/cli/schema_migration_down.go +++ b/cli/schema_migration_down.go @@ -30,7 +30,7 @@ func MakeSchemaMigrationDownCommand() *cobra.Command { RunE: func(cmd *cobra.Command, args []string) error { store := cmd.Context().Value(storeContextKey).(client.Store) - var src enumerable.Enumerable[map[string]any] + var src []map[string]any if err := json.Unmarshal([]byte(args[0]), &src); err != nil { return err } @@ -38,7 +38,7 @@ func MakeSchemaMigrationDownCommand() *cobra.Command { if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { lens = lens.WithTxn(tx) } - out, err := lens.MigrateDown(cmd.Context(), src, schemaVersionID) + out, err := lens.MigrateDown(cmd.Context(), enumerable.New(src), schemaVersionID) if err != nil { return err } diff --git a/cli/schema_migration_up.go b/cli/schema_migration_up.go index c56321d6ce..78ff18daeb 100644 --- a/cli/schema_migration_up.go +++ b/cli/schema_migration_up.go @@ -30,7 +30,7 @@ func MakeSchemaMigrationUpCommand() *cobra.Command { RunE: func(cmd *cobra.Command, args []string) error { store := cmd.Context().Value(storeContextKey).(client.Store) - var src enumerable.Enumerable[map[string]any] + var src []map[string]any if err := json.Unmarshal([]byte(args[0]), &src); err != nil { return err } @@ -38,7 +38,7 @@ func MakeSchemaMigrationUpCommand() *cobra.Command { if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { lens = lens.WithTxn(tx) } - out, err := lens.MigrateUp(cmd.Context(), src, schemaVersionID) + out, err := lens.MigrateUp(cmd.Context(), enumerable.New(src), schemaVersionID) if err != nil { return err } diff --git a/cli/wrapper_lens.go b/cli/wrapper_lens.go index d2b68e5e76..679a792662 100644 --- a/cli/wrapper_lens.go +++ b/cli/wrapper_lens.go @@ -60,7 +60,14 @@ func (w *LensRegistry) MigrateUp( args := []string{"client", "schema", "migration", "up"} args = append(args, "--version", schemaVersionID) - srcJSON, err := json.Marshal(src) + var srcData []map[string]any + err := enumerable.ForEach(src, func(item map[string]any) { + srcData = append(srcData, item) + }) + if err != nil { + return nil, err + } + srcJSON, err := json.Marshal(srcData) if err != nil { return nil, err } @@ -85,7 +92,14 @@ func (w *LensRegistry) MigrateDown( args := []string{"client", "schema", "migration", "down"} args = append(args, "--version", schemaVersionID) - srcJSON, err := json.Marshal(src) + var srcData []map[string]any + err := enumerable.ForEach(src, func(item map[string]any) { + srcData = append(srcData, item) + }) + if err != nil { + return nil, err + } + srcJSON, err := json.Marshal(srcData) if err != nil { return nil, err } From 7d27374db4f0733c5e1bfc6c5eff9b1f2a40c3cf Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 8 Sep 2023 11:50:09 -0700 Subject: [PATCH 066/107] regenerate cli docs --- cli/collection.go | 17 ++++++- cli/document_create.go | 13 +++-- cli/document_delete.go | 9 +++- cli/document_get.go | 8 ++- cli/document_keys.go | 6 ++- cli/document_save.go | 10 ++-- cli/document_update.go | 14 +++++- cli/tx_commit.go | 4 +- cli/tx_create.go | 4 +- cli/tx_discard.go | 4 +- docs/cli/defradb_client.md | 6 ++- docs/cli/defradb_client_backup.md | 1 + docs/cli/defradb_client_backup_export.md | 1 + docs/cli/defradb_client_backup_import.md | 1 + docs/cli/defradb_client_blocks_get.md | 31 ------------ ...t_ping.md => defradb_client_collection.md} | 29 +++++++++-- ...t_blocks.md => defradb_client_document.md} | 18 +++++-- docs/cli/defradb_client_document_create.md | 44 ++++++++++++++++ ...t.md => defradb_client_document_delete.md} | 26 ++++++---- docs/cli/defradb_client_document_get.md | 42 ++++++++++++++++ docs/cli/defradb_client_document_keys.md | 41 +++++++++++++++ docs/cli/defradb_client_document_save.md | 42 ++++++++++++++++ docs/cli/defradb_client_document_update.md | 50 +++++++++++++++++++ docs/cli/defradb_client_dump.md | 1 + docs/cli/defradb_client_index.md | 1 + docs/cli/defradb_client_index_create.md | 1 + docs/cli/defradb_client_index_drop.md | 1 + docs/cli/defradb_client_index_list.md | 1 + docs/cli/defradb_client_p2p.md | 3 +- ...on.md => defradb_client_p2p_collection.md} | 11 ++-- ...d => defradb_client_p2p_collection_add.md} | 7 +-- ...> defradb_client_p2p_collection_getall.md} | 7 +-- ...> defradb_client_p2p_collection_remove.md} | 7 +-- docs/cli/defradb_client_p2p_replicator.md | 1 + .../defradb_client_p2p_replicator_delete.md | 1 + .../defradb_client_p2p_replicator_getall.md | 1 + docs/cli/defradb_client_p2p_replicator_set.md | 5 +- docs/cli/defradb_client_query.md | 1 + docs/cli/defradb_client_rpc_addreplicator.md | 37 -------------- docs/cli/defradb_client_rpc_p2pcollection.md | 36 ------------- .../defradb_client_rpc_p2pcollection_add.md | 37 -------------- ...defradb_client_rpc_p2pcollection_getall.md | 37 -------------- ...defradb_client_rpc_p2pcollection_remove.md | 37 -------------- docs/cli/defradb_client_rpc_replicator.md | 36 ------------- .../defradb_client_rpc_replicator_delete.md | 38 -------------- .../defradb_client_rpc_replicator_getall.md | 37 -------------- docs/cli/defradb_client_schema.md | 1 + docs/cli/defradb_client_schema_add.md | 1 + docs/cli/defradb_client_schema_migration.md | 4 ++ .../defradb_client_schema_migration_down.md | 37 ++++++++++++++ .../defradb_client_schema_migration_get.md | 1 + .../defradb_client_schema_migration_reload.md | 36 +++++++++++++ .../defradb_client_schema_migration_set.md | 1 + .../cli/defradb_client_schema_migration_up.md | 37 ++++++++++++++ docs/cli/defradb_client_schema_patch.md | 1 + ...adb_client_rpc.md => defradb_client_tx.md} | 15 +++--- ..._peerid.md => defradb_client_tx_commit.md} | 13 ++--- docs/cli/defradb_client_tx_create.md | 38 ++++++++++++++ ...a_list.md => defradb_client_tx_discard.md} | 15 ++++-- docs/cli/defradb_server-dump.md | 3 +- 60 files changed, 567 insertions(+), 401 deletions(-) delete mode 100644 docs/cli/defradb_client_blocks_get.md rename docs/cli/{defradb_client_ping.md => defradb_client_collection.md} (53%) rename docs/cli/{defradb_client_blocks.md => defradb_client_document.md} (51%) create mode 100644 docs/cli/defradb_client_document_create.md rename docs/cli/{defradb_client_rpc_replicator_set.md => defradb_client_document_delete.md} (50%) create mode 100644 docs/cli/defradb_client_document_get.md create mode 100644 docs/cli/defradb_client_document_keys.md create mode 100644 docs/cli/defradb_client_document_save.md create mode 100644 docs/cli/defradb_client_document_update.md rename docs/cli/{defradb_client_p2p_p2pcollection.md => defradb_client_p2p_collection.md} (71%) rename docs/cli/{defradb_client_p2p_p2pcollection_add.md => defradb_client_p2p_collection_add.md} (80%) rename docs/cli/{defradb_client_p2p_p2pcollection_getall.md => defradb_client_p2p_collection_getall.md} (80%) rename docs/cli/{defradb_client_p2p_p2pcollection_remove.md => defradb_client_p2p_collection_remove.md} (79%) delete mode 100644 docs/cli/defradb_client_rpc_addreplicator.md delete mode 100644 docs/cli/defradb_client_rpc_p2pcollection.md delete mode 100644 docs/cli/defradb_client_rpc_p2pcollection_add.md delete mode 100644 docs/cli/defradb_client_rpc_p2pcollection_getall.md delete mode 100644 docs/cli/defradb_client_rpc_p2pcollection_remove.md delete mode 100644 docs/cli/defradb_client_rpc_replicator.md delete mode 100644 docs/cli/defradb_client_rpc_replicator_delete.md delete mode 100644 docs/cli/defradb_client_rpc_replicator_getall.md create mode 100644 docs/cli/defradb_client_schema_migration_down.md create mode 100644 docs/cli/defradb_client_schema_migration_reload.md create mode 100644 docs/cli/defradb_client_schema_migration_up.md rename docs/cli/{defradb_client_rpc.md => defradb_client_tx.md} (65%) rename docs/cli/{defradb_client_peerid.md => defradb_client_tx_commit.md} (73%) create mode 100644 docs/cli/defradb_client_tx_create.md rename docs/cli/{defradb_client_schema_list.md => defradb_client_tx_discard.md} (71%) diff --git a/cli/collection.go b/cli/collection.go index ddc7a3cfa2..09df92ed74 100644 --- a/cli/collection.go +++ b/cli/collection.go @@ -21,9 +21,22 @@ func MakeCollectionCommand() *cobra.Command { var schemaID string var versionID string var cmd = &cobra.Command{ - Use: "collection", + Use: "collection [--name --schema --version ]", Short: "View detailed collection info.", - Long: `View detailed collection info.`, + Long: `View detailed collection info. + +Example: view all collections + defradb client collection + +Example: view collection by name + defradb client collection --name User + +Example: view collection by schema id + defradb client collection --schema bae123 + +Example: view collection by version id + defradb client collection --version bae123 + `, RunE: func(cmd *cobra.Command, args []string) error { store := cmd.Context().Value(storeContextKey).(client.Store) diff --git a/cli/document_create.go b/cli/document_create.go index e806e7492e..372d6e2ab6 100644 --- a/cli/document_create.go +++ b/cli/document_create.go @@ -24,9 +24,16 @@ func MakeDocumentCreateCommand() *cobra.Command { var collection string var cmd = &cobra.Command{ Use: "create --collection ", - Short: "Create a new docment.", - Long: `Create a new docment.`, - Args: cobra.ExactArgs(1), + Short: "Create a new document.", + Long: `Create a new document. + +Example: create document + defradb client collection create --collection User '{ "name": "Bob" }' + +Example: create documents + defradb client collection create --collection User '[{ "name": "Alice" }, { "name": "Bob" }]' + `, + Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { store := cmd.Context().Value(storeContextKey).(client.Store) diff --git a/cli/document_delete.go b/cli/document_delete.go index dbed21beee..73afc2d2bd 100644 --- a/cli/document_delete.go +++ b/cli/document_delete.go @@ -26,7 +26,14 @@ func MakeDocumentDeleteCommand() *cobra.Command { var cmd = &cobra.Command{ Use: "delete --collection [--filter --key ]", Short: "Delete documents by key or filter.", - Long: `Delete documents by key or filter`, + Long: `Delete documents by key or filter and lists the number of documents deleted. + +Example: delete by key(s) + defradb client document delete --collection User --key bae123,bae456,... + +Example: delete by filter + defradb client document delete --collection User --filter '{ "_gte": { "points": 100 } }' + `, RunE: func(cmd *cobra.Command, args []string) error { store := cmd.Context().Value(storeContextKey).(client.Store) diff --git a/cli/document_get.go b/cli/document_get.go index 77f81cf33d..a2bc5c9ff6 100644 --- a/cli/document_get.go +++ b/cli/document_get.go @@ -23,8 +23,12 @@ func MakeDocumentGetCommand() *cobra.Command { var cmd = &cobra.Command{ Use: "get --collection [--show-deleted]", Short: "View detailed document info.", - Long: `View detailed document info.`, - Args: cobra.ExactArgs(1), + Long: `View detailed document info. + +Example: + defradb client document get --collection User bae123 + `, + Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { store := cmd.Context().Value(storeContextKey).(client.Store) diff --git a/cli/document_keys.go b/cli/document_keys.go index f61954ed31..c9f5f0eebd 100644 --- a/cli/document_keys.go +++ b/cli/document_keys.go @@ -23,7 +23,11 @@ func MakeDocumentKeysCommand() *cobra.Command { var cmd = &cobra.Command{ Use: "keys --collection ", Short: "List all collection document keys.", - Long: `List all collection document keys`, + Long: `List all collection document keys. + +Example: + defradb client collection keys --collection User keys + `, RunE: func(cmd *cobra.Command, args []string) error { store := cmd.Context().Value(storeContextKey).(client.Store) diff --git a/cli/document_save.go b/cli/document_save.go index d8c85ec819..175b21609e 100644 --- a/cli/document_save.go +++ b/cli/document_save.go @@ -23,9 +23,13 @@ func MakeDocumentSaveCommand() *cobra.Command { var key string var cmd = &cobra.Command{ Use: "save --collection --key ", - Short: "Create or update a docment.", - Long: `Create or update a docment.`, - Args: cobra.ExactArgs(1), + Short: "Create or update a document.", + Long: `Create or update a document. + +Example: + defradb client document save --collection User --key bae123 '{ "name": "Bob" }' + `, + Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { store := cmd.Context().Value(storeContextKey).(client.Store) diff --git a/cli/document_update.go b/cli/document_update.go index ab68b1f5c4..0e10fd282e 100644 --- a/cli/document_update.go +++ b/cli/document_update.go @@ -27,8 +27,18 @@ func MakeDocumentUpdateCommand() *cobra.Command { var cmd = &cobra.Command{ Use: "update --collection [--filter --key --updater ] ", Short: "Update documents by key or filter.", - Long: `Update documents by key or filter`, - Args: cobra.RangeArgs(0, 1), + Long: `Update documents by key or filter. + +Example: + defradb client document update --collection User --key bae123 '{ "name": "Bob" }' + +Example: update by filter + defradb client document update --collection User --filter '{ "_gte": { "points": 100 } }' --updater '{ "verified": true }' + +Example: update by keys + defradb client document update --collection User --key bae123,bae456 --updater '{ "verified": true }' + `, + Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { store := cmd.Context().Value(storeContextKey).(client.Store) diff --git a/cli/tx_commit.go b/cli/tx_commit.go index f5976453f4..260a274a08 100644 --- a/cli/tx_commit.go +++ b/cli/tx_commit.go @@ -22,8 +22,8 @@ import ( func MakeTxCommitCommand(cfg *config.Config) *cobra.Command { var cmd = &cobra.Command{ Use: "commit [id]", - Short: "Commit a DefraDB transaction", - Long: `Commit a DefraDB transaction`, + Short: "Commit a DefraDB transaction.", + Long: `Commit a DefraDB transaction.`, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) (err error) { id, err := strconv.ParseUint(args[0], 10, 64) diff --git a/cli/tx_create.go b/cli/tx_create.go index 5f5321c3e7..987a784077 100644 --- a/cli/tx_create.go +++ b/cli/tx_create.go @@ -23,8 +23,8 @@ func MakeTxCreateCommand(cfg *config.Config) *cobra.Command { var readOnly bool var cmd = &cobra.Command{ Use: "create", - Short: "Create a new DefraDB transaction", - Long: `Create a new DefraDB transaction`, + Short: "Create a new DefraDB transaction.", + Long: `Create a new DefraDB transaction.`, RunE: func(cmd *cobra.Command, args []string) (err error) { db := cmd.Context().Value(dbContextKey).(client.DB) diff --git a/cli/tx_discard.go b/cli/tx_discard.go index 83431983e8..351f919f53 100644 --- a/cli/tx_discard.go +++ b/cli/tx_discard.go @@ -22,8 +22,8 @@ import ( func MakeTxDiscardCommand(cfg *config.Config) *cobra.Command { var cmd = &cobra.Command{ Use: "discard [id]", - Short: "Discard a DefraDB transaction", - Long: `Discard a DefraDB transaction`, + Short: "Discard a DefraDB transaction.", + Long: `Discard a DefraDB transaction.`, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) (err error) { id, err := strconv.ParseUint(args[0], 10, 64) diff --git a/docs/cli/defradb_client.md b/docs/cli/defradb_client.md index f1ae454919..b538592ccc 100644 --- a/docs/cli/defradb_client.md +++ b/docs/cli/defradb_client.md @@ -10,7 +10,8 @@ Execute queries, add schema types, obtain node info, etc. ### Options ``` - -h, --help help for client + -h, --help help for client + --tx uint Transaction ID ``` ### Options inherited from parent commands @@ -30,9 +31,12 @@ Execute queries, add schema types, obtain node info, etc. * [defradb](defradb.md) - DefraDB Edge Database * [defradb client backup](defradb_client_backup.md) - Interact with the backup utility +* [defradb client collection](defradb_client_collection.md) - View detailed collection info. +* [defradb client document](defradb_client_document.md) - Create, read, update, and delete documents. * [defradb client dump](defradb_client_dump.md) - Dump the contents of DefraDB node-side * [defradb client index](defradb_client_index.md) - Manage collections' indexes of a running DefraDB instance * [defradb client p2p](defradb_client_p2p.md) - Interact with the DefraDB P2P system * [defradb client query](defradb_client_query.md) - Send a DefraDB GraphQL query request * [defradb client schema](defradb_client_schema.md) - Interact with the schema system of a DefraDB node +* [defradb client tx](defradb_client_tx.md) - Create, commit, and discard DefraDB transactions diff --git a/docs/cli/defradb_client_backup.md b/docs/cli/defradb_client_backup.md index baa08725e1..77e111795d 100644 --- a/docs/cli/defradb_client_backup.md +++ b/docs/cli/defradb_client_backup.md @@ -23,6 +23,7 @@ Currently only supports JSON format. --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` diff --git a/docs/cli/defradb_client_backup_export.md b/docs/cli/defradb_client_backup_export.md index ea8a22d634..b7547ea641 100644 --- a/docs/cli/defradb_client_backup_export.md +++ b/docs/cli/defradb_client_backup_export.md @@ -37,6 +37,7 @@ defradb client backup export [-c --collections | -p --pretty | -f --format] [flags] --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` diff --git a/docs/cli/defradb_client_blocks_get.md b/docs/cli/defradb_client_blocks_get.md deleted file mode 100644 index 38ff02b63c..0000000000 --- a/docs/cli/defradb_client_blocks_get.md +++ /dev/null @@ -1,31 +0,0 @@ -## defradb client blocks get - -Get a block by its CID from the blockstore - -``` -defradb client blocks get [CID] [flags] -``` - -### Options - -``` - -h, --help help for get -``` - -### Options inherited from parent commands - -``` - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client blocks](defradb_client_blocks.md) - Interact with the database's blockstore - diff --git a/docs/cli/defradb_client_ping.md b/docs/cli/defradb_client_collection.md similarity index 53% rename from docs/cli/defradb_client_ping.md rename to docs/cli/defradb_client_collection.md index 8edd7aff94..2a1e9058be 100644 --- a/docs/cli/defradb_client_ping.md +++ b/docs/cli/defradb_client_collection.md @@ -1,15 +1,35 @@ -## defradb client ping +## defradb client collection -Ping to test connection with a node +View detailed collection info. + +### Synopsis + +View detailed collection info. + +Example: view all collections + defradb client collection + +Example: view collection by name + defradb client collection --name User + +Example: view collection by schema id + defradb client collection --schema bae123 + +Example: view collection by version id + defradb client collection --version bae123 + ``` -defradb client ping [flags] +defradb client collection [--name --schema --version ] [flags] ``` ### Options ``` - -h, --help help for ping + -h, --help help for collection + --name string Get collection by name + --schema string Get collection by schema ID + --version string Get collection by version ID ``` ### Options inherited from parent commands @@ -22,6 +42,7 @@ defradb client ping [flags] --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` diff --git a/docs/cli/defradb_client_blocks.md b/docs/cli/defradb_client_document.md similarity index 51% rename from docs/cli/defradb_client_blocks.md rename to docs/cli/defradb_client_document.md index e05a853440..bc527357e7 100644 --- a/docs/cli/defradb_client_blocks.md +++ b/docs/cli/defradb_client_document.md @@ -1,11 +1,15 @@ -## defradb client blocks +## defradb client document -Interact with the database's blockstore +Create, read, update, and delete documents. + +### Synopsis + +Create, read, update, and delete documents. ### Options ``` - -h, --help help for blocks + -h, --help help for document ``` ### Options inherited from parent commands @@ -18,11 +22,17 @@ Interact with the database's blockstore --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` ### SEE ALSO * [defradb client](defradb_client.md) - Interact with a DefraDB node -* [defradb client blocks get](defradb_client_blocks_get.md) - Get a block by its CID from the blockstore +* [defradb client document create](defradb_client_document_create.md) - Create a new document. +* [defradb client document delete](defradb_client_document_delete.md) - Delete documents by key or filter. +* [defradb client document get](defradb_client_document_get.md) - View detailed document info. +* [defradb client document keys](defradb_client_document_keys.md) - List all collection document keys. +* [defradb client document save](defradb_client_document_save.md) - Create or update a document. +* [defradb client document update](defradb_client_document_update.md) - Update documents by key or filter. diff --git a/docs/cli/defradb_client_document_create.md b/docs/cli/defradb_client_document_create.md new file mode 100644 index 0000000000..9b2fbd2e49 --- /dev/null +++ b/docs/cli/defradb_client_document_create.md @@ -0,0 +1,44 @@ +## defradb client document create + +Create a new document. + +### Synopsis + +Create a new document. + +Example: create document + defradb client collection create --collection User '{ "name": "Bob" }' + +Example: create documents + defradb client collection create --collection User '[{ "name": "Alice" }, { "name": "Bob" }]' + + +``` +defradb client document create --collection [flags] +``` + +### Options + +``` + -c, --collection string Collection name + -h, --help help for create +``` + +### Options inherited from parent commands + +``` + --logformat string Log format to use. Options are csv, json (default "csv") + --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... + --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") + --lognocolor Disable colored log output + --logoutput string Log output path (default "stderr") + --logtrace Include stacktrace in error and fatal logs + --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") +``` + +### SEE ALSO + +* [defradb client document](defradb_client_document.md) - Create, read, update, and delete documents. + diff --git a/docs/cli/defradb_client_rpc_replicator_set.md b/docs/cli/defradb_client_document_delete.md similarity index 50% rename from docs/cli/defradb_client_rpc_replicator_set.md rename to docs/cli/defradb_client_document_delete.md index 24b7add648..78ee1ef3d0 100644 --- a/docs/cli/defradb_client_rpc_replicator_set.md +++ b/docs/cli/defradb_client_document_delete.md @@ -1,29 +1,34 @@ -## defradb client rpc replicator set +## defradb client document delete -Set a P2P replicator +Delete documents by key or filter. ### Synopsis -Add a new target replicator. -A replicator replicates one or all collection(s) from this node to another. +Delete documents by key or filter and lists the number of documents deleted. + +Example: delete by key(s) + defradb client document delete --collection User --key bae123,bae456,... +Example: delete by filter + defradb client document delete --collection User --filter '{ "_gte": { "points": 100 } }' + ``` -defradb client rpc replicator set [-f, --full | -c, --collection] [flags] +defradb client document delete --collection [--filter --key ] [flags] ``` ### Options ``` - -c, --collection stringArray Define the collection for the replicator - -f, --full Set the replicator to act on all collections - -h, --help help for set + -c, --collection string Collection name + --filter string Document filter + -h, --help help for delete + --key strings Document key ``` ### Options inherited from parent commands ``` - --addr string RPC endpoint address (default "0.0.0.0:9161") --logformat string Log format to use. Options are csv, json (default "csv") --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") @@ -31,10 +36,11 @@ defradb client rpc replicator set [-f, --full | -c, --collection] [flags] --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` ### SEE ALSO -* [defradb client rpc replicator](defradb_client_rpc_replicator.md) - Configure the replicator system +* [defradb client document](defradb_client_document.md) - Create, read, update, and delete documents. diff --git a/docs/cli/defradb_client_document_get.md b/docs/cli/defradb_client_document_get.md new file mode 100644 index 0000000000..4d23e1e8f2 --- /dev/null +++ b/docs/cli/defradb_client_document_get.md @@ -0,0 +1,42 @@ +## defradb client document get + +View detailed document info. + +### Synopsis + +View detailed document info. + +Example: + defradb client document get --collection User bae123 + + +``` +defradb client document get --collection [--show-deleted] [flags] +``` + +### Options + +``` + -c, --collection string Collection name + -h, --help help for get + --show-deleted Show deleted documents +``` + +### Options inherited from parent commands + +``` + --logformat string Log format to use. Options are csv, json (default "csv") + --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... + --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") + --lognocolor Disable colored log output + --logoutput string Log output path (default "stderr") + --logtrace Include stacktrace in error and fatal logs + --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") +``` + +### SEE ALSO + +* [defradb client document](defradb_client_document.md) - Create, read, update, and delete documents. + diff --git a/docs/cli/defradb_client_document_keys.md b/docs/cli/defradb_client_document_keys.md new file mode 100644 index 0000000000..21dd4ca30e --- /dev/null +++ b/docs/cli/defradb_client_document_keys.md @@ -0,0 +1,41 @@ +## defradb client document keys + +List all collection document keys. + +### Synopsis + +List all collection document keys. + +Example: + defradb client collection keys --collection User keys + + +``` +defradb client document keys --collection [flags] +``` + +### Options + +``` + -c, --collection string Collection name + -h, --help help for keys +``` + +### Options inherited from parent commands + +``` + --logformat string Log format to use. Options are csv, json (default "csv") + --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... + --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") + --lognocolor Disable colored log output + --logoutput string Log output path (default "stderr") + --logtrace Include stacktrace in error and fatal logs + --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") +``` + +### SEE ALSO + +* [defradb client document](defradb_client_document.md) - Create, read, update, and delete documents. + diff --git a/docs/cli/defradb_client_document_save.md b/docs/cli/defradb_client_document_save.md new file mode 100644 index 0000000000..ff7bd71d41 --- /dev/null +++ b/docs/cli/defradb_client_document_save.md @@ -0,0 +1,42 @@ +## defradb client document save + +Create or update a document. + +### Synopsis + +Create or update a document. + +Example: + defradb client document save --collection User --key bae123 '{ "name": "Bob" }' + + +``` +defradb client document save --collection --key [flags] +``` + +### Options + +``` + -c, --collection string Collection name + -h, --help help for save + --key string Document key +``` + +### Options inherited from parent commands + +``` + --logformat string Log format to use. Options are csv, json (default "csv") + --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... + --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") + --lognocolor Disable colored log output + --logoutput string Log output path (default "stderr") + --logtrace Include stacktrace in error and fatal logs + --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") +``` + +### SEE ALSO + +* [defradb client document](defradb_client_document.md) - Create, read, update, and delete documents. + diff --git a/docs/cli/defradb_client_document_update.md b/docs/cli/defradb_client_document_update.md new file mode 100644 index 0000000000..5ed0c13b0b --- /dev/null +++ b/docs/cli/defradb_client_document_update.md @@ -0,0 +1,50 @@ +## defradb client document update + +Update documents by key or filter. + +### Synopsis + +Update documents by key or filter. + +Example: + defradb client document update --collection User --key bae123 '{ "name": "Bob" }' + +Example: update by filter + defradb client document update --collection User --filter '{ "_gte": { "points": 100 } }' --updater '{ "verified": true }' + +Example: update by keys + defradb client document update --collection User --key bae123,bae456 --updater '{ "verified": true }' + + +``` +defradb client document update --collection [--filter --key --updater ] [flags] +``` + +### Options + +``` + -c, --collection string Collection name + --filter string Document filter + -h, --help help for update + --key strings Document key + --updater string Document updater +``` + +### Options inherited from parent commands + +``` + --logformat string Log format to use. Options are csv, json (default "csv") + --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... + --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") + --lognocolor Disable colored log output + --logoutput string Log output path (default "stderr") + --logtrace Include stacktrace in error and fatal logs + --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") +``` + +### SEE ALSO + +* [defradb client document](defradb_client_document.md) - Create, read, update, and delete documents. + diff --git a/docs/cli/defradb_client_dump.md b/docs/cli/defradb_client_dump.md index 862154bc17..3ebd35343c 100644 --- a/docs/cli/defradb_client_dump.md +++ b/docs/cli/defradb_client_dump.md @@ -22,6 +22,7 @@ defradb client dump [flags] --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` diff --git a/docs/cli/defradb_client_index.md b/docs/cli/defradb_client_index.md index 4babb57d46..a876bbcc4f 100644 --- a/docs/cli/defradb_client_index.md +++ b/docs/cli/defradb_client_index.md @@ -22,6 +22,7 @@ Manage (create, drop, or list) collection indexes on a DefraDB node. --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` diff --git a/docs/cli/defradb_client_index_create.md b/docs/cli/defradb_client_index_create.md index cd54a0085b..96b6418440 100644 --- a/docs/cli/defradb_client_index_create.md +++ b/docs/cli/defradb_client_index_create.md @@ -37,6 +37,7 @@ defradb client index create -c --collection --fields [-n - --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` diff --git a/docs/cli/defradb_client_index_drop.md b/docs/cli/defradb_client_index_drop.md index f551fe4658..c5171b756e 100644 --- a/docs/cli/defradb_client_index_drop.md +++ b/docs/cli/defradb_client_index_drop.md @@ -31,6 +31,7 @@ defradb client index drop -c --collection -n --name [flags] --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` diff --git a/docs/cli/defradb_client_index_list.md b/docs/cli/defradb_client_index_list.md index bf434d30f2..c7e96d4e4f 100644 --- a/docs/cli/defradb_client_index_list.md +++ b/docs/cli/defradb_client_index_list.md @@ -33,6 +33,7 @@ defradb client index list [-c --collection ] [flags] --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` diff --git a/docs/cli/defradb_client_p2p.md b/docs/cli/defradb_client_p2p.md index 066b84aebf..1132ee22ad 100644 --- a/docs/cli/defradb_client_p2p.md +++ b/docs/cli/defradb_client_p2p.md @@ -22,12 +22,13 @@ Interact with the DefraDB P2P system --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` ### SEE ALSO * [defradb client](defradb_client.md) - Interact with a DefraDB node -* [defradb client p2p p2pcollection](defradb_client_p2p_p2pcollection.md) - Configure the P2P collection system +* [defradb client p2p collection](defradb_client_p2p_collection.md) - Configure the P2P collection system * [defradb client p2p replicator](defradb_client_p2p_replicator.md) - Configure the replicator system diff --git a/docs/cli/defradb_client_p2p_p2pcollection.md b/docs/cli/defradb_client_p2p_collection.md similarity index 71% rename from docs/cli/defradb_client_p2p_p2pcollection.md rename to docs/cli/defradb_client_p2p_collection.md index 0af831e401..6fec3171da 100644 --- a/docs/cli/defradb_client_p2p_p2pcollection.md +++ b/docs/cli/defradb_client_p2p_collection.md @@ -1,4 +1,4 @@ -## defradb client p2p p2pcollection +## defradb client p2p collection Configure the P2P collection system @@ -10,7 +10,7 @@ The selected collections synchronize their events on the pubsub network. ### Options ``` - -h, --help help for p2pcollection + -h, --help help for collection ``` ### Options inherited from parent commands @@ -23,13 +23,14 @@ The selected collections synchronize their events on the pubsub network. --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` ### SEE ALSO * [defradb client p2p](defradb_client_p2p.md) - Interact with the DefraDB P2P system -* [defradb client p2p p2pcollection add](defradb_client_p2p_p2pcollection_add.md) - Add P2P collections -* [defradb client p2p p2pcollection getall](defradb_client_p2p_p2pcollection_getall.md) - Get all P2P collections -* [defradb client p2p p2pcollection remove](defradb_client_p2p_p2pcollection_remove.md) - Remove P2P collections +* [defradb client p2p collection add](defradb_client_p2p_collection_add.md) - Add P2P collections +* [defradb client p2p collection getall](defradb_client_p2p_collection_getall.md) - Get all P2P collections +* [defradb client p2p collection remove](defradb_client_p2p_collection_remove.md) - Remove P2P collections diff --git a/docs/cli/defradb_client_p2p_p2pcollection_add.md b/docs/cli/defradb_client_p2p_collection_add.md similarity index 80% rename from docs/cli/defradb_client_p2p_p2pcollection_add.md rename to docs/cli/defradb_client_p2p_collection_add.md index cf39c6cd4f..b5f3586144 100644 --- a/docs/cli/defradb_client_p2p_p2pcollection_add.md +++ b/docs/cli/defradb_client_p2p_collection_add.md @@ -1,4 +1,4 @@ -## defradb client p2p p2pcollection add +## defradb client p2p collection add Add P2P collections @@ -8,7 +8,7 @@ Add P2P collections to the synchronized pubsub topics. The collections are synchronized between nodes of a pubsub network. ``` -defradb client p2p p2pcollection add [collectionID] [flags] +defradb client p2p collection add [collectionID] [flags] ``` ### Options @@ -27,10 +27,11 @@ defradb client p2p p2pcollection add [collectionID] [flags] --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` ### SEE ALSO -* [defradb client p2p p2pcollection](defradb_client_p2p_p2pcollection.md) - Configure the P2P collection system +* [defradb client p2p collection](defradb_client_p2p_collection.md) - Configure the P2P collection system diff --git a/docs/cli/defradb_client_p2p_p2pcollection_getall.md b/docs/cli/defradb_client_p2p_collection_getall.md similarity index 80% rename from docs/cli/defradb_client_p2p_p2pcollection_getall.md rename to docs/cli/defradb_client_p2p_collection_getall.md index 247f395007..46fcefc407 100644 --- a/docs/cli/defradb_client_p2p_p2pcollection_getall.md +++ b/docs/cli/defradb_client_p2p_collection_getall.md @@ -1,4 +1,4 @@ -## defradb client p2p p2pcollection getall +## defradb client p2p collection getall Get all P2P collections @@ -8,7 +8,7 @@ Get all P2P collections in the pubsub topics. This is the list of collections of the node that are synchronized on the pubsub network. ``` -defradb client p2p p2pcollection getall [flags] +defradb client p2p collection getall [flags] ``` ### Options @@ -27,10 +27,11 @@ defradb client p2p p2pcollection getall [flags] --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` ### SEE ALSO -* [defradb client p2p p2pcollection](defradb_client_p2p_p2pcollection.md) - Configure the P2P collection system +* [defradb client p2p collection](defradb_client_p2p_collection.md) - Configure the P2P collection system diff --git a/docs/cli/defradb_client_p2p_p2pcollection_remove.md b/docs/cli/defradb_client_p2p_collection_remove.md similarity index 79% rename from docs/cli/defradb_client_p2p_p2pcollection_remove.md rename to docs/cli/defradb_client_p2p_collection_remove.md index 766805e374..04492d2871 100644 --- a/docs/cli/defradb_client_p2p_p2pcollection_remove.md +++ b/docs/cli/defradb_client_p2p_collection_remove.md @@ -1,4 +1,4 @@ -## defradb client p2p p2pcollection remove +## defradb client p2p collection remove Remove P2P collections @@ -8,7 +8,7 @@ Remove P2P collections from the followed pubsub topics. The removed collections will no longer be synchronized between nodes. ``` -defradb client p2p p2pcollection remove [collectionID] [flags] +defradb client p2p collection remove [collectionID] [flags] ``` ### Options @@ -27,10 +27,11 @@ defradb client p2p p2pcollection remove [collectionID] [flags] --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` ### SEE ALSO -* [defradb client p2p p2pcollection](defradb_client_p2p_p2pcollection.md) - Configure the P2P collection system +* [defradb client p2p collection](defradb_client_p2p_collection.md) - Configure the P2P collection system diff --git a/docs/cli/defradb_client_p2p_replicator.md b/docs/cli/defradb_client_p2p_replicator.md index e4b9340ef1..26f4041802 100644 --- a/docs/cli/defradb_client_p2p_replicator.md +++ b/docs/cli/defradb_client_p2p_replicator.md @@ -23,6 +23,7 @@ A replicator replicates one or all collection(s) from one node to another. --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` diff --git a/docs/cli/defradb_client_p2p_replicator_delete.md b/docs/cli/defradb_client_p2p_replicator_delete.md index 74475004cb..9ffbc115d3 100644 --- a/docs/cli/defradb_client_p2p_replicator_delete.md +++ b/docs/cli/defradb_client_p2p_replicator_delete.md @@ -26,6 +26,7 @@ defradb client p2p replicator delete [flags] --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` diff --git a/docs/cli/defradb_client_p2p_replicator_getall.md b/docs/cli/defradb_client_p2p_replicator_getall.md index a24c3d8e53..080011ae65 100644 --- a/docs/cli/defradb_client_p2p_replicator_getall.md +++ b/docs/cli/defradb_client_p2p_replicator_getall.md @@ -27,6 +27,7 @@ defradb client p2p replicator getall [flags] --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` diff --git a/docs/cli/defradb_client_p2p_replicator_set.md b/docs/cli/defradb_client_p2p_replicator_set.md index 9fc45b5b9b..23d7b81404 100644 --- a/docs/cli/defradb_client_p2p_replicator_set.md +++ b/docs/cli/defradb_client_p2p_replicator_set.md @@ -15,8 +15,8 @@ defradb client p2p replicator set [-c, --collection] [flags] ### Options ``` - -c, --collection stringArray Define the collection for the replicator - -h, --help help for set + -c, --collection strings Define the collection for the replicator + -h, --help help for set ``` ### Options inherited from parent commands @@ -29,6 +29,7 @@ defradb client p2p replicator set [-c, --collection] [flags] --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` diff --git a/docs/cli/defradb_client_query.md b/docs/cli/defradb_client_query.md index 8f5c3477c3..5e748229e2 100644 --- a/docs/cli/defradb_client_query.md +++ b/docs/cli/defradb_client_query.md @@ -41,6 +41,7 @@ defradb client query [query request] [flags] --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` diff --git a/docs/cli/defradb_client_rpc_addreplicator.md b/docs/cli/defradb_client_rpc_addreplicator.md deleted file mode 100644 index e80b667f18..0000000000 --- a/docs/cli/defradb_client_rpc_addreplicator.md +++ /dev/null @@ -1,37 +0,0 @@ -## defradb client rpc addreplicator - -Add a new replicator - -### Synopsis - -Use this command if you wish to add a new target replicator -for the P2P data sync system. - -``` -defradb client rpc addreplicator [flags] -``` - -### Options - -``` - -h, --help help for addreplicator -``` - -### Options inherited from parent commands - -``` - --addr string gRPC endpoint address (default "0.0.0.0:9161") - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default "$HOME/.defradb") - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client rpc](defradb_client_rpc.md) - Interact with a DefraDB gRPC server - diff --git a/docs/cli/defradb_client_rpc_p2pcollection.md b/docs/cli/defradb_client_rpc_p2pcollection.md deleted file mode 100644 index ede32521d4..0000000000 --- a/docs/cli/defradb_client_rpc_p2pcollection.md +++ /dev/null @@ -1,36 +0,0 @@ -## defradb client rpc p2pcollection - -Configure the P2P collection system - -### Synopsis - -Add, delete, or get the list of P2P collections. -The selected collections synchronize their events on the pubsub network. - -### Options - -``` - -h, --help help for p2pcollection -``` - -### Options inherited from parent commands - -``` - --addr string RPC endpoint address (default "0.0.0.0:9161") - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client rpc](defradb_client_rpc.md) - Interact with a DefraDB node via RPC -* [defradb client rpc p2pcollection add](defradb_client_rpc_p2pcollection_add.md) - Add P2P collections -* [defradb client rpc p2pcollection getall](defradb_client_rpc_p2pcollection_getall.md) - Get all P2P collections -* [defradb client rpc p2pcollection remove](defradb_client_rpc_p2pcollection_remove.md) - Remove P2P collections - diff --git a/docs/cli/defradb_client_rpc_p2pcollection_add.md b/docs/cli/defradb_client_rpc_p2pcollection_add.md deleted file mode 100644 index 92ac0d82e6..0000000000 --- a/docs/cli/defradb_client_rpc_p2pcollection_add.md +++ /dev/null @@ -1,37 +0,0 @@ -## defradb client rpc p2pcollection add - -Add P2P collections - -### Synopsis - -Add P2P collections to the synchronized pubsub topics. -The collections are synchronized between nodes of a pubsub network. - -``` -defradb client rpc p2pcollection add [collectionID] [flags] -``` - -### Options - -``` - -h, --help help for add -``` - -### Options inherited from parent commands - -``` - --addr string RPC endpoint address (default "0.0.0.0:9161") - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client rpc p2pcollection](defradb_client_rpc_p2pcollection.md) - Configure the P2P collection system - diff --git a/docs/cli/defradb_client_rpc_p2pcollection_getall.md b/docs/cli/defradb_client_rpc_p2pcollection_getall.md deleted file mode 100644 index 946a2e0156..0000000000 --- a/docs/cli/defradb_client_rpc_p2pcollection_getall.md +++ /dev/null @@ -1,37 +0,0 @@ -## defradb client rpc p2pcollection getall - -Get all P2P collections - -### Synopsis - -Get all P2P collections in the pubsub topics. -This is the list of collections of the node that are synchronized on the pubsub network. - -``` -defradb client rpc p2pcollection getall [flags] -``` - -### Options - -``` - -h, --help help for getall -``` - -### Options inherited from parent commands - -``` - --addr string RPC endpoint address (default "0.0.0.0:9161") - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client rpc p2pcollection](defradb_client_rpc_p2pcollection.md) - Configure the P2P collection system - diff --git a/docs/cli/defradb_client_rpc_p2pcollection_remove.md b/docs/cli/defradb_client_rpc_p2pcollection_remove.md deleted file mode 100644 index 77658b4d50..0000000000 --- a/docs/cli/defradb_client_rpc_p2pcollection_remove.md +++ /dev/null @@ -1,37 +0,0 @@ -## defradb client rpc p2pcollection remove - -Remove P2P collections - -### Synopsis - -Remove P2P collections from the followed pubsub topics. -The removed collections will no longer be synchronized between nodes. - -``` -defradb client rpc p2pcollection remove [collectionID] [flags] -``` - -### Options - -``` - -h, --help help for remove -``` - -### Options inherited from parent commands - -``` - --addr string RPC endpoint address (default "0.0.0.0:9161") - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client rpc p2pcollection](defradb_client_rpc_p2pcollection.md) - Configure the P2P collection system - diff --git a/docs/cli/defradb_client_rpc_replicator.md b/docs/cli/defradb_client_rpc_replicator.md deleted file mode 100644 index e88933791c..0000000000 --- a/docs/cli/defradb_client_rpc_replicator.md +++ /dev/null @@ -1,36 +0,0 @@ -## defradb client rpc replicator - -Configure the replicator system - -### Synopsis - -Configure the replicator system. Add, delete, or get the list of persisted replicators. -A replicator replicates one or all collection(s) from one node to another. - -### Options - -``` - -h, --help help for replicator -``` - -### Options inherited from parent commands - -``` - --addr string RPC endpoint address (default "0.0.0.0:9161") - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client rpc](defradb_client_rpc.md) - Interact with a DefraDB node via RPC -* [defradb client rpc replicator delete](defradb_client_rpc_replicator_delete.md) - Delete a replicator. It will stop synchronizing -* [defradb client rpc replicator getall](defradb_client_rpc_replicator_getall.md) - Get all replicators -* [defradb client rpc replicator set](defradb_client_rpc_replicator_set.md) - Set a P2P replicator - diff --git a/docs/cli/defradb_client_rpc_replicator_delete.md b/docs/cli/defradb_client_rpc_replicator_delete.md deleted file mode 100644 index c851d2f508..0000000000 --- a/docs/cli/defradb_client_rpc_replicator_delete.md +++ /dev/null @@ -1,38 +0,0 @@ -## defradb client rpc replicator delete - -Delete a replicator. It will stop synchronizing - -### Synopsis - -Delete a replicator. It will stop synchronizing. - -``` -defradb client rpc replicator delete [-f, --full | -c, --collection] [flags] -``` - -### Options - -``` - -c, --collection stringArray Define the collection for the replicator - -f, --full Set the replicator to act on all collections - -h, --help help for delete -``` - -### Options inherited from parent commands - -``` - --addr string RPC endpoint address (default "0.0.0.0:9161") - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client rpc replicator](defradb_client_rpc_replicator.md) - Configure the replicator system - diff --git a/docs/cli/defradb_client_rpc_replicator_getall.md b/docs/cli/defradb_client_rpc_replicator_getall.md deleted file mode 100644 index 2449dba1fd..0000000000 --- a/docs/cli/defradb_client_rpc_replicator_getall.md +++ /dev/null @@ -1,37 +0,0 @@ -## defradb client rpc replicator getall - -Get all replicators - -### Synopsis - -Get all the replicators active in the P2P data sync system. -These are the replicators that are currently replicating data from one node to another. - -``` -defradb client rpc replicator getall [flags] -``` - -### Options - -``` - -h, --help help for getall -``` - -### Options inherited from parent commands - -``` - --addr string RPC endpoint address (default "0.0.0.0:9161") - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client rpc replicator](defradb_client_rpc_replicator.md) - Configure the replicator system - diff --git a/docs/cli/defradb_client_schema.md b/docs/cli/defradb_client_schema.md index 615a5b5d29..6b04bb2a5d 100644 --- a/docs/cli/defradb_client_schema.md +++ b/docs/cli/defradb_client_schema.md @@ -22,6 +22,7 @@ Make changes, updates, or look for existing schema types. --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` diff --git a/docs/cli/defradb_client_schema_add.md b/docs/cli/defradb_client_schema_add.md index b278431034..aa73039d0c 100644 --- a/docs/cli/defradb_client_schema_add.md +++ b/docs/cli/defradb_client_schema_add.md @@ -38,6 +38,7 @@ defradb client schema add [schema] [flags] --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` diff --git a/docs/cli/defradb_client_schema_migration.md b/docs/cli/defradb_client_schema_migration.md index 0a20968378..91f2f324e3 100644 --- a/docs/cli/defradb_client_schema_migration.md +++ b/docs/cli/defradb_client_schema_migration.md @@ -22,12 +22,16 @@ Make set or look for existing schema migrations on a DefraDB node. --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` ### SEE ALSO * [defradb client schema](defradb_client_schema.md) - Interact with the schema system of a DefraDB node +* [defradb client schema migration down](defradb_client_schema_migration_down.md) - Reverse a migration on the specified schema version. * [defradb client schema migration get](defradb_client_schema_migration_get.md) - Gets the schema migrations within DefraDB +* [defradb client schema migration reload](defradb_client_schema_migration_reload.md) - Reload the schema migrations within DefraDB * [defradb client schema migration set](defradb_client_schema_migration_set.md) - Set a schema migration within DefraDB +* [defradb client schema migration up](defradb_client_schema_migration_up.md) - Runs a migration on the specified schema version. diff --git a/docs/cli/defradb_client_schema_migration_down.md b/docs/cli/defradb_client_schema_migration_down.md new file mode 100644 index 0000000000..3d8a2eb6a5 --- /dev/null +++ b/docs/cli/defradb_client_schema_migration_down.md @@ -0,0 +1,37 @@ +## defradb client schema migration down + +Reverse a migration on the specified schema version. + +### Synopsis + +Reverse a migration on the specified schema version. + +``` +defradb client schema migration down --version [flags] +``` + +### Options + +``` + -h, --help help for down + --version string Schema version id +``` + +### Options inherited from parent commands + +``` + --logformat string Log format to use. Options are csv, json (default "csv") + --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... + --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") + --lognocolor Disable colored log output + --logoutput string Log output path (default "stderr") + --logtrace Include stacktrace in error and fatal logs + --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") +``` + +### SEE ALSO + +* [defradb client schema migration](defradb_client_schema_migration.md) - Interact with the schema migration system of a running DefraDB instance + diff --git a/docs/cli/defradb_client_schema_migration_get.md b/docs/cli/defradb_client_schema_migration_get.md index d2164ed6bd..20ed8edb91 100644 --- a/docs/cli/defradb_client_schema_migration_get.md +++ b/docs/cli/defradb_client_schema_migration_get.md @@ -31,6 +31,7 @@ defradb client schema migration get [flags] --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` diff --git a/docs/cli/defradb_client_schema_migration_reload.md b/docs/cli/defradb_client_schema_migration_reload.md new file mode 100644 index 0000000000..f9acfd2d19 --- /dev/null +++ b/docs/cli/defradb_client_schema_migration_reload.md @@ -0,0 +1,36 @@ +## defradb client schema migration reload + +Reload the schema migrations within DefraDB + +### Synopsis + +Reload the schema migrations within DefraDB + +``` +defradb client schema migration reload [flags] +``` + +### Options + +``` + -h, --help help for reload +``` + +### Options inherited from parent commands + +``` + --logformat string Log format to use. Options are csv, json (default "csv") + --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... + --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") + --lognocolor Disable colored log output + --logoutput string Log output path (default "stderr") + --logtrace Include stacktrace in error and fatal logs + --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") +``` + +### SEE ALSO + +* [defradb client schema migration](defradb_client_schema_migration.md) - Interact with the schema migration system of a running DefraDB instance + diff --git a/docs/cli/defradb_client_schema_migration_set.md b/docs/cli/defradb_client_schema_migration_set.md index 8013fd2a29..b9626bfeed 100644 --- a/docs/cli/defradb_client_schema_migration_set.md +++ b/docs/cli/defradb_client_schema_migration_set.md @@ -38,6 +38,7 @@ defradb client schema migration set [src] [dst] [cfg] [flags] --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` diff --git a/docs/cli/defradb_client_schema_migration_up.md b/docs/cli/defradb_client_schema_migration_up.md new file mode 100644 index 0000000000..a637f2f28d --- /dev/null +++ b/docs/cli/defradb_client_schema_migration_up.md @@ -0,0 +1,37 @@ +## defradb client schema migration up + +Runs a migration on the specified schema version. + +### Synopsis + +Runs a migration on the specified schema version. + +``` +defradb client schema migration up --version [flags] +``` + +### Options + +``` + -h, --help help for up + --version string Schema version id +``` + +### Options inherited from parent commands + +``` + --logformat string Log format to use. Options are csv, json (default "csv") + --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... + --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") + --lognocolor Disable colored log output + --logoutput string Log output path (default "stderr") + --logtrace Include stacktrace in error and fatal logs + --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") +``` + +### SEE ALSO + +* [defradb client schema migration](defradb_client_schema_migration.md) - Interact with the schema migration system of a running DefraDB instance + diff --git a/docs/cli/defradb_client_schema_patch.md b/docs/cli/defradb_client_schema_patch.md index ec64d293e0..ba04faddf2 100644 --- a/docs/cli/defradb_client_schema_patch.md +++ b/docs/cli/defradb_client_schema_patch.md @@ -40,6 +40,7 @@ defradb client schema patch [schema] [flags] --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` diff --git a/docs/cli/defradb_client_rpc.md b/docs/cli/defradb_client_tx.md similarity index 65% rename from docs/cli/defradb_client_rpc.md rename to docs/cli/defradb_client_tx.md index d7046433c5..4feab4af7b 100644 --- a/docs/cli/defradb_client_rpc.md +++ b/docs/cli/defradb_client_tx.md @@ -1,16 +1,15 @@ -## defradb client rpc +## defradb client tx -Interact with a DefraDB node via RPC +Create, commit, and discard DefraDB transactions ### Synopsis -Interact with a DefraDB node via RPC. +Create, commit, and discard DefraDB transactions ### Options ``` - --addr string RPC endpoint address (default "0.0.0.0:9161") - -h, --help help for rpc + -h, --help help for tx ``` ### Options inherited from parent commands @@ -23,12 +22,14 @@ Interact with a DefraDB node via RPC. --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` ### SEE ALSO * [defradb client](defradb_client.md) - Interact with a DefraDB node -* [defradb client rpc p2pcollection](defradb_client_rpc_p2pcollection.md) - Configure the P2P collection system -* [defradb client rpc replicator](defradb_client_rpc_replicator.md) - Configure the replicator system +* [defradb client tx commit](defradb_client_tx_commit.md) - Commit a DefraDB transaction. +* [defradb client tx create](defradb_client_tx_create.md) - Create a new DefraDB transaction. +* [defradb client tx discard](defradb_client_tx_discard.md) - Discard a DefraDB transaction. diff --git a/docs/cli/defradb_client_peerid.md b/docs/cli/defradb_client_tx_commit.md similarity index 73% rename from docs/cli/defradb_client_peerid.md rename to docs/cli/defradb_client_tx_commit.md index f4596111c8..21f0b50325 100644 --- a/docs/cli/defradb_client_peerid.md +++ b/docs/cli/defradb_client_tx_commit.md @@ -1,19 +1,19 @@ -## defradb client peerid +## defradb client tx commit -Get the PeerID of the node +Commit a DefraDB transaction. ### Synopsis -Get the PeerID of the node. +Commit a DefraDB transaction. ``` -defradb client peerid [flags] +defradb client tx commit [id] [flags] ``` ### Options ``` - -h, --help help for peerid + -h, --help help for commit ``` ### Options inherited from parent commands @@ -26,10 +26,11 @@ defradb client peerid [flags] --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` ### SEE ALSO -* [defradb client](defradb_client.md) - Interact with a DefraDB node +* [defradb client tx](defradb_client_tx.md) - Create, commit, and discard DefraDB transactions diff --git a/docs/cli/defradb_client_tx_create.md b/docs/cli/defradb_client_tx_create.md new file mode 100644 index 0000000000..8ba600b611 --- /dev/null +++ b/docs/cli/defradb_client_tx_create.md @@ -0,0 +1,38 @@ +## defradb client tx create + +Create a new DefraDB transaction. + +### Synopsis + +Create a new DefraDB transaction. + +``` +defradb client tx create [flags] +``` + +### Options + +``` + --concurrent Transaction is concurrent + -h, --help help for create + --read-only Transaction is read only +``` + +### Options inherited from parent commands + +``` + --logformat string Log format to use. Options are csv, json (default "csv") + --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... + --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") + --lognocolor Disable colored log output + --logoutput string Log output path (default "stderr") + --logtrace Include stacktrace in error and fatal logs + --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID + --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") +``` + +### SEE ALSO + +* [defradb client tx](defradb_client_tx.md) - Create, commit, and discard DefraDB transactions + diff --git a/docs/cli/defradb_client_schema_list.md b/docs/cli/defradb_client_tx_discard.md similarity index 71% rename from docs/cli/defradb_client_schema_list.md rename to docs/cli/defradb_client_tx_discard.md index ffbe253e31..d1f0bb6025 100644 --- a/docs/cli/defradb_client_schema_list.md +++ b/docs/cli/defradb_client_tx_discard.md @@ -1,15 +1,19 @@ -## defradb client schema list +## defradb client tx discard -List schema types with their respective fields +Discard a DefraDB transaction. + +### Synopsis + +Discard a DefraDB transaction. ``` -defradb client schema list [flags] +defradb client tx discard [id] [flags] ``` ### Options ``` - -h, --help help for list + -h, --help help for discard ``` ### Options inherited from parent commands @@ -22,10 +26,11 @@ defradb client schema list [flags] --logoutput string Log output path (default "stderr") --logtrace Include stacktrace in error and fatal logs --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) + --tx uint Transaction ID --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") ``` ### SEE ALSO -* [defradb client schema](defradb_client_schema.md) - Interact with the schema system of a DefraDB node +* [defradb client tx](defradb_client_tx.md) - Create, commit, and discard DefraDB transactions diff --git a/docs/cli/defradb_server-dump.md b/docs/cli/defradb_server-dump.md index 58a799e229..0b91e10232 100644 --- a/docs/cli/defradb_server-dump.md +++ b/docs/cli/defradb_server-dump.md @@ -9,7 +9,8 @@ defradb server-dump [flags] ### Options ``` - -h, --help help for server-dump + -h, --help help for server-dump + --store string Datastore to use. Options are badger, memory (default "badger") ``` ### Options inherited from parent commands From 9db73fdede505b6cd2a0b6f47d0f4d7ea2d75ee0 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 8 Sep 2023 11:56:50 -0700 Subject: [PATCH 067/107] fix linter error --- cli/document_update.go | 6 ++++-- docs/cli/defradb_client_document_update.md | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/cli/document_update.go b/cli/document_update.go index 0e10fd282e..5475f3f631 100644 --- a/cli/document_update.go +++ b/cli/document_update.go @@ -33,10 +33,12 @@ Example: defradb client document update --collection User --key bae123 '{ "name": "Bob" }' Example: update by filter - defradb client document update --collection User --filter '{ "_gte": { "points": 100 } }' --updater '{ "verified": true }' + defradb client document update --collection User \ + --filter '{ "_gte": { "points": 100 } }' --updater '{ "verified": true }' Example: update by keys - defradb client document update --collection User --key bae123,bae456 --updater '{ "verified": true }' + defradb client document update --collection User \ + --key bae123,bae456 --updater '{ "verified": true }' `, Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { diff --git a/docs/cli/defradb_client_document_update.md b/docs/cli/defradb_client_document_update.md index 5ed0c13b0b..afcdd7aae7 100644 --- a/docs/cli/defradb_client_document_update.md +++ b/docs/cli/defradb_client_document_update.md @@ -10,10 +10,12 @@ Example: defradb client document update --collection User --key bae123 '{ "name": "Bob" }' Example: update by filter - defradb client document update --collection User --filter '{ "_gte": { "points": 100 } }' --updater '{ "verified": true }' + defradb client document update --collection User \ + --filter '{ "_gte": { "points": 100 } }' --updater '{ "verified": true }' Example: update by keys - defradb client document update --collection User --key bae123,bae456 --updater '{ "verified": true }' + defradb client document update --collection User \ + --key bae123,bae456 --updater '{ "verified": true }' ``` From df81848c19572287497d4097f4b1ebe93576ff06 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 8 Sep 2023 12:55:01 -0700 Subject: [PATCH 068/107] cli doc updates --- cli/document_create.go | 4 ++-- cli/document_delete.go | 2 +- cli/document_get.go | 2 +- cli/document_keys.go | 2 +- cli/document_save.go | 2 +- cli/document_update.go | 4 ++-- docs/cli/defradb_client_document_create.md | 4 ++-- docs/cli/defradb_client_document_delete.md | 2 +- docs/cli/defradb_client_document_get.md | 2 +- docs/cli/defradb_client_document_keys.md | 2 +- docs/cli/defradb_client_document_save.md | 2 +- docs/cli/defradb_client_document_update.md | 4 ++-- 12 files changed, 16 insertions(+), 16 deletions(-) diff --git a/cli/document_create.go b/cli/document_create.go index 372d6e2ab6..a49dbbf57e 100644 --- a/cli/document_create.go +++ b/cli/document_create.go @@ -28,10 +28,10 @@ func MakeDocumentCreateCommand() *cobra.Command { Long: `Create a new document. Example: create document - defradb client collection create --collection User '{ "name": "Bob" }' + defradb client document create --collection User '{ "name": "Bob" }' Example: create documents - defradb client collection create --collection User '[{ "name": "Alice" }, { "name": "Bob" }]' + defradb client document create --collection User '[{ "name": "Alice" }, { "name": "Bob" }]' `, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { diff --git a/cli/document_delete.go b/cli/document_delete.go index 73afc2d2bd..687567cf46 100644 --- a/cli/document_delete.go +++ b/cli/document_delete.go @@ -29,7 +29,7 @@ func MakeDocumentDeleteCommand() *cobra.Command { Long: `Delete documents by key or filter and lists the number of documents deleted. Example: delete by key(s) - defradb client document delete --collection User --key bae123,bae456,... + defradb client document delete --collection User --key bae-123,bae-456 Example: delete by filter defradb client document delete --collection User --filter '{ "_gte": { "points": 100 } }' diff --git a/cli/document_get.go b/cli/document_get.go index a2bc5c9ff6..ddab716326 100644 --- a/cli/document_get.go +++ b/cli/document_get.go @@ -26,7 +26,7 @@ func MakeDocumentGetCommand() *cobra.Command { Long: `View detailed document info. Example: - defradb client document get --collection User bae123 + defradb client document get --collection User bae-123 `, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { diff --git a/cli/document_keys.go b/cli/document_keys.go index c9f5f0eebd..6cd22c21dd 100644 --- a/cli/document_keys.go +++ b/cli/document_keys.go @@ -26,7 +26,7 @@ func MakeDocumentKeysCommand() *cobra.Command { Long: `List all collection document keys. Example: - defradb client collection keys --collection User keys + defradb client document keys --collection User keys `, RunE: func(cmd *cobra.Command, args []string) error { store := cmd.Context().Value(storeContextKey).(client.Store) diff --git a/cli/document_save.go b/cli/document_save.go index 175b21609e..f888734861 100644 --- a/cli/document_save.go +++ b/cli/document_save.go @@ -27,7 +27,7 @@ func MakeDocumentSaveCommand() *cobra.Command { Long: `Create or update a document. Example: - defradb client document save --collection User --key bae123 '{ "name": "Bob" }' + defradb client document save --collection User --key bae-123 '{ "name": "Bob" }' `, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { diff --git a/cli/document_update.go b/cli/document_update.go index 5475f3f631..6d6a137190 100644 --- a/cli/document_update.go +++ b/cli/document_update.go @@ -30,7 +30,7 @@ func MakeDocumentUpdateCommand() *cobra.Command { Long: `Update documents by key or filter. Example: - defradb client document update --collection User --key bae123 '{ "name": "Bob" }' + defradb client document update --collection User --key bae-123 '{ "name": "Bob" }' Example: update by filter defradb client document update --collection User \ @@ -38,7 +38,7 @@ Example: update by filter Example: update by keys defradb client document update --collection User \ - --key bae123,bae456 --updater '{ "verified": true }' + --key bae-123,bae-456 --updater '{ "verified": true }' `, Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { diff --git a/docs/cli/defradb_client_document_create.md b/docs/cli/defradb_client_document_create.md index 9b2fbd2e49..99dbd0d7f5 100644 --- a/docs/cli/defradb_client_document_create.md +++ b/docs/cli/defradb_client_document_create.md @@ -7,10 +7,10 @@ Create a new document. Create a new document. Example: create document - defradb client collection create --collection User '{ "name": "Bob" }' + defradb client document create --collection User '{ "name": "Bob" }' Example: create documents - defradb client collection create --collection User '[{ "name": "Alice" }, { "name": "Bob" }]' + defradb client document create --collection User '[{ "name": "Alice" }, { "name": "Bob" }]' ``` diff --git a/docs/cli/defradb_client_document_delete.md b/docs/cli/defradb_client_document_delete.md index 78ee1ef3d0..96a0b1e973 100644 --- a/docs/cli/defradb_client_document_delete.md +++ b/docs/cli/defradb_client_document_delete.md @@ -7,7 +7,7 @@ Delete documents by key or filter. Delete documents by key or filter and lists the number of documents deleted. Example: delete by key(s) - defradb client document delete --collection User --key bae123,bae456,... + defradb client document delete --collection User --key bae-123,bae-456 Example: delete by filter defradb client document delete --collection User --filter '{ "_gte": { "points": 100 } }' diff --git a/docs/cli/defradb_client_document_get.md b/docs/cli/defradb_client_document_get.md index 4d23e1e8f2..600712ec0b 100644 --- a/docs/cli/defradb_client_document_get.md +++ b/docs/cli/defradb_client_document_get.md @@ -7,7 +7,7 @@ View detailed document info. View detailed document info. Example: - defradb client document get --collection User bae123 + defradb client document get --collection User bae-123 ``` diff --git a/docs/cli/defradb_client_document_keys.md b/docs/cli/defradb_client_document_keys.md index 21dd4ca30e..e436f4df6b 100644 --- a/docs/cli/defradb_client_document_keys.md +++ b/docs/cli/defradb_client_document_keys.md @@ -7,7 +7,7 @@ List all collection document keys. List all collection document keys. Example: - defradb client collection keys --collection User keys + defradb client document keys --collection User keys ``` diff --git a/docs/cli/defradb_client_document_save.md b/docs/cli/defradb_client_document_save.md index ff7bd71d41..41f59a860c 100644 --- a/docs/cli/defradb_client_document_save.md +++ b/docs/cli/defradb_client_document_save.md @@ -7,7 +7,7 @@ Create or update a document. Create or update a document. Example: - defradb client document save --collection User --key bae123 '{ "name": "Bob" }' + defradb client document save --collection User --key bae-123 '{ "name": "Bob" }' ``` diff --git a/docs/cli/defradb_client_document_update.md b/docs/cli/defradb_client_document_update.md index afcdd7aae7..3efc67ebf0 100644 --- a/docs/cli/defradb_client_document_update.md +++ b/docs/cli/defradb_client_document_update.md @@ -7,7 +7,7 @@ Update documents by key or filter. Update documents by key or filter. Example: - defradb client document update --collection User --key bae123 '{ "name": "Bob" }' + defradb client document update --collection User --key bae-123 '{ "name": "Bob" }' Example: update by filter defradb client document update --collection User \ @@ -15,7 +15,7 @@ Example: update by filter Example: update by keys defradb client document update --collection User \ - --key bae123,bae456 --updater '{ "verified": true }' + --key bae-123,bae-456 --updater '{ "verified": true }' ``` From 3f4c94025787bee4bdc77387e36572d10c18fa20 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 8 Sep 2023 14:30:28 -0700 Subject: [PATCH 069/107] setup test matrix. --- .github/workflows/run-tests.yml | 29 ++++++----- .github/workflows/test-collection-named.yml | 54 --------------------- .github/workflows/test-gql-mutations.yml | 48 ------------------ 3 files changed, 18 insertions(+), 113 deletions(-) delete mode 100644 .github/workflows/test-collection-named.yml delete mode 100644 .github/workflows/test-gql-mutations.yml diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index bfa696a283..89ed78a158 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -20,10 +20,24 @@ on: jobs: run-tests: - name: Run tests job + name: Run tests job matrix runs-on: ubuntu-latest + strategy: + matrix: + client-type: [go, http] + database-type: [badger-file, badger-memory] + mutation-type: [gql, collection-named, collection-save] + tests: [names, lens, cli] + + env: + DEFRA_CLIENT_GO: ${{ matrix.client-type == 'go' }} + DEFRA_CLIENT_HTTP: ${{ matrix.client-type == 'http' }} + DEFRA_BADGER_MEMORY: ${{ matrix.database-type == 'badger-memory' }} + DEFRA_BADGER_FILE: ${{ matrix.database-type == 'badger-file' }} + DEFRA_MUTATION_TYPE: ${{ matrix.mutation-type }} + steps: - name: Checkout code into the directory uses: actions/checkout@v3 @@ -38,13 +52,6 @@ jobs: run: | make deps:modules make deps:test - - - name: Build binary - run: make build - - # This is to ensure tests pass with a running server. - - name: Start server from binary - run: ./build/defradb start & - - - name: Run the tests, showing name of each test - run: make test:ci + + - name: Run the tests + run: make test:${{ matrix.tests }} diff --git a/.github/workflows/test-collection-named.yml b/.github/workflows/test-collection-named.yml deleted file mode 100644 index 5adabe4fdf..0000000000 --- a/.github/workflows/test-collection-named.yml +++ /dev/null @@ -1,54 +0,0 @@ -# Copyright 2023 Democratized Data Foundation -# -# Use of this software is governed by the Business Source License -# included in the file licenses/BSL.txt. -# -# As of the Change Date specified in that file, in accordance with -# the Business Source License, use of this software will be governed -# by the Apache License, Version 2.0, included in the file -# licenses/APL.txt. - -name: Run Collection Named Mutations Tests Workflow - -# This workflow runs the test suite with any supporting mutation test actions -# running their mutations via their corresponding named [Collection] call. -# -# For example, CreateDoc will call [Collection.Create], and -# UpdateDoc will call [Collection.Update]. - -on: - pull_request: - branches: - - master - - develop - - push: - tags: - - 'v[0-9]+.[0-9]+.[0-9]+' - branches: - - master - - develop - -jobs: - test-collection-named-mutations: - name: Test Collection Named Mutations job - - runs-on: ubuntu-latest - - steps: - - name: Checkout code into the directory - uses: actions/checkout@v3 - - - name: Setup Go environment explicitly - uses: actions/setup-go@v3 - with: - go-version: "1.20" - check-latest: true - - - name: Build dependencies - run: | - make deps:modules - make deps:test - - - name: Run tests with Collection Named mutations - run: make test:ci-col-named-mutations diff --git a/.github/workflows/test-gql-mutations.yml b/.github/workflows/test-gql-mutations.yml deleted file mode 100644 index 827dd22098..0000000000 --- a/.github/workflows/test-gql-mutations.yml +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright 2022 Democratized Data Foundation -# -# Use of this software is governed by the Business Source License -# included in the file licenses/BSL.txt. -# -# As of the Change Date specified in that file, in accordance with -# the Business Source License, use of this software will be governed -# by the Apache License, Version 2.0, included in the file -# licenses/APL.txt. - -name: Run GQL Mutations Tests Workflow - -on: - pull_request: - branches: - - master - - develop - - push: - tags: - - 'v[0-9]+.[0-9]+.[0-9]+' - branches: - - master - - develop - -jobs: - test-gql-mutations: - name: Test GQL mutations job - - runs-on: ubuntu-latest - - steps: - - name: Checkout code into the directory - uses: actions/checkout@v3 - - - name: Setup Go environment explicitly - uses: actions/setup-go@v3 - with: - go-version: "1.20" - check-latest: true - - - name: Build dependencies - run: | - make deps:modules - make deps:test - - - name: Run tests with gql mutations - run: make test:ci-gql-mutations From 25eb1cc5c3e36a27531b6cf61d767cf00580c6bf Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 11 Sep 2023 09:24:24 -0700 Subject: [PATCH 070/107] add code coverage to run-tests workflow. remove code-test-coverage workflow. replace Makefile actions with standard actions. --- .github/workflows/code-test-coverage.yml | 76 ------------------------ .github/workflows/run-tests.yml | 51 ++++++++++++++-- 2 files changed, 45 insertions(+), 82 deletions(-) delete mode 100644 .github/workflows/code-test-coverage.yml diff --git a/.github/workflows/code-test-coverage.yml b/.github/workflows/code-test-coverage.yml deleted file mode 100644 index 65c0a92f1f..0000000000 --- a/.github/workflows/code-test-coverage.yml +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright 2022 Democratized Data Foundation -# -# Use of this software is governed by the Business Source License -# included in the file licenses/BSL.txt. -# -# As of the Change Date specified in that file, in accordance with -# the Business Source License, use of this software will be governed -# by the Apache License, Version 2.0, included in the file -# licenses/APL.txt. - -name: Code Test Coverage Workflow - -on: - pull_request: - branches: - - master - - develop - - push: - tags: - - 'v[0-9]+.[0-9]+.[0-9]+' - branches: - - master - - develop - -jobs: - code-test-coverage: - name: Code test coverage job - - runs-on: ubuntu-latest - - steps: - - name: Checkout code - uses: actions/checkout@v3 - - - name: Setup Go environment explicitly - uses: actions/setup-go@v3 - with: - go-version: "1.20" - check-latest: true - - - name: Generate full test coverage report using go-acc - run: make test:coverage - - - name: Upload coverage to Codecov without token, retry on failure - env: - codecov_secret: ${{ secrets.CODECOV_TOKEN }} - if: env.codecov_secret == '' - uses: Wandalen/wretry.action@v1.0.36 - with: - attempt_limit: 5 - attempt_delay: 10000 - action: codecov/codecov-action@v3 - with: | - name: defradb-codecov - files: ./coverage.txt - flags: all-tests - os: 'linux' - fail_ci_if_error: true - verbose: true - - - name: Upload coverage to Codecov with token - env: - codecov_secret: ${{ secrets.CODECOV_TOKEN }} - if: env.codecov_secret != '' - uses: codecov/codecov-action@v3 - with: - token: ${{ env.codecov_secret }} - name: defradb-codecov - files: ./coverage.txt - flags: all-tests - os: 'linux' - fail_ci_if_error: true - verbose: true - # path_to_write_report: ./coverage/codecov_report.txt - # directory: ./coverage/reports/ diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 89ed78a158..e893e9169a 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -29,7 +29,6 @@ jobs: client-type: [go, http] database-type: [badger-file, badger-memory] mutation-type: [gql, collection-named, collection-save] - tests: [names, lens, cli] env: DEFRA_CLIENT_GO: ${{ matrix.client-type == 'go' }} @@ -48,10 +47,50 @@ jobs: go-version: "1.20" check-latest: true - - name: Build dependencies + - name: Install Go dependencies run: | - make deps:modules - make deps:test - + go mod download + go install gotest.tools/gotestsum@latest + go install github.com/ory/go-acc@latest + + - name: Build Lens dependencies + run: | + rustup target add wasm32-unknown-unknown + cargo build --target wasm32-unknown-unknown --manifest-path "./tests/lenses/rust_wasm32_set_default/Cargo.toml" + cargo build --target wasm32-unknown-unknown --manifest-path "./tests/lenses/rust_wasm32_remove/Cargo.toml" + cargo build --target wasm32-unknown-unknown --manifest-path "./tests/lenses/rust_wasm32_copy/Cargo.toml" + - name: Run the tests - run: make test:${{ matrix.tests }} + run: gotestsum ./... --format testname -- -race -shuffle=on -timeout 300s + + - name: Generate test coverage + run: go-acc ./... --output=coverage.txt --covermode=atomic -- -failfast + + - name: Upload coverage to Codecov without token, retry on failure + env: + codecov_secret: ${{ secrets.CODECOV_TOKEN }} + if: env.codecov_secret == '' + uses: Wandalen/wretry.action@v1.0.36 + with: + attempt_limit: 5 + attempt_delay: 10000 + action: codecov/codecov-action@v3 + with: | + name: defradb-codecov + files: ./coverage.txt + os: 'linux' + fail_ci_if_error: true + verbose: true + + - name: Upload coverage to Codecov with token + env: + codecov_secret: ${{ secrets.CODECOV_TOKEN }} + if: env.codecov_secret != '' + uses: codecov/codecov-action@v3 + with: + token: ${{ env.codecov_secret }} + name: defradb-codecov + files: ./coverage.txt + os: 'linux' + fail_ci_if_error: true + verbose: true From 95200e6f6dfb81893fd6ec02c564da7380390963 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Tue, 12 Sep 2023 14:32:06 -0700 Subject: [PATCH 071/107] use makefile for run-tests workflow. use a single step for codecov upload --- .github/workflows/run-tests.yml | 56 +++++++++++++-------------------- Makefile | 6 ++++ 2 files changed, 27 insertions(+), 35 deletions(-) diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index e893e9169a..029f18cfd4 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -47,50 +47,36 @@ jobs: go-version: "1.20" check-latest: true - - name: Install Go dependencies + - name: Build dependencies run: | - go mod download - go install gotest.tools/gotestsum@latest - go install github.com/ory/go-acc@latest - - - name: Build Lens dependencies - run: | - rustup target add wasm32-unknown-unknown - cargo build --target wasm32-unknown-unknown --manifest-path "./tests/lenses/rust_wasm32_set_default/Cargo.toml" - cargo build --target wasm32-unknown-unknown --manifest-path "./tests/lenses/rust_wasm32_remove/Cargo.toml" - cargo build --target wasm32-unknown-unknown --manifest-path "./tests/lenses/rust_wasm32_copy/Cargo.toml" + make deps:modules + make deps:test - name: Run the tests - run: gotestsum ./... --format testname -- -race -shuffle=on -timeout 300s - - - name: Generate test coverage - run: go-acc ./... --output=coverage.txt --covermode=atomic -- -failfast + run: make test:ci-matrix - - name: Upload coverage to Codecov without token, retry on failure - env: - codecov_secret: ${{ secrets.CODECOV_TOKEN }} - if: env.codecov_secret == '' - uses: Wandalen/wretry.action@v1.0.36 + - name: Upload coverage artifact + uses: actions/upload-artifact@v3 with: - attempt_limit: 5 - attempt_delay: 10000 - action: codecov/codecov-action@v3 - with: | - name: defradb-codecov - files: ./coverage.txt - os: 'linux' - fail_ci_if_error: true - verbose: true + name: ${{ matrix.client-type }}_${{ matrix.database-type }}_${{ matrix.mutation-type }} + path: coverage.txt + if-no-files-found: error + + upload-coverage: + name: Upload test code coverage + + needs: run-tests + + steps: + - name: Download coverage reports + uses: actions/download-artifact@v3 - - name: Upload coverage to Codecov with token - env: - codecov_secret: ${{ secrets.CODECOV_TOKEN }} - if: env.codecov_secret != '' + - name: Upload coverage to Codecov uses: codecov/codecov-action@v3 with: - token: ${{ env.codecov_secret }} + token: ${{ secrets.CODECOV_TOKEN }} name: defradb-codecov - files: ./coverage.txt + flags: all-tests os: 'linux' fail_ci_if_error: true verbose: true diff --git a/Makefile b/Makefile index 6eb3456fcc..754c6738b3 100644 --- a/Makefile +++ b/Makefile @@ -30,6 +30,7 @@ BUILD_FLAGS+=-tags $(BUILD_TAGS) endif TEST_FLAGS=-race -shuffle=on -timeout 300s +COVER_FLAGS=-covermode=atomic -coverprofile=coverage.txt PLAYGROUND_DIRECTORY=playground LENS_TEST_DIRECTORY=tests/integration/schema/migrations @@ -191,6 +192,11 @@ test\:ci: DEFRA_CLIENT_GO=true DEFRA_CLIENT_HTTP=true \ $(MAKE) test:all +.PHONY: test\:ci-matrix +test\:ci-matrix: + @$(MAKE) deps:lens + gotestsum --format testname -- ./... $(COVER_FLAGS) $(TEST_FLAGS) + .PHONY: test\:ci-gql-mutations test\:ci-gql-mutations: DEFRA_MUTATION_TYPE=gql DEFRA_BADGER_MEMORY=true \ From a6c6876936b572345602a11e41dd312625d6c022 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Tue, 12 Sep 2023 14:35:05 -0700 Subject: [PATCH 072/107] add missing runs-on property to run-tests workflow --- .github/workflows/run-tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 029f18cfd4..02519608a5 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -64,6 +64,8 @@ jobs: upload-coverage: name: Upload test code coverage + + runs-on: ubuntu-latest needs: run-tests From 4f4bb35501079dee3e6feaa7cd4d9c44ac756788 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Tue, 12 Sep 2023 15:52:24 -0700 Subject: [PATCH 073/107] set retention for coverage files. add coverpkg flag to test:ci Make target --- .github/workflows/run-tests.yml | 1 + Makefile | 20 +------------------- 2 files changed, 2 insertions(+), 19 deletions(-) diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 02519608a5..dda8e1e41e 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -61,6 +61,7 @@ jobs: name: ${{ matrix.client-type }}_${{ matrix.database-type }}_${{ matrix.mutation-type }} path: coverage.txt if-no-files-found: error + retention-days: 1 upload-coverage: name: Upload test code coverage diff --git a/Makefile b/Makefile index 754c6738b3..cb96937219 100644 --- a/Makefile +++ b/Makefile @@ -30,7 +30,7 @@ BUILD_FLAGS+=-tags $(BUILD_TAGS) endif TEST_FLAGS=-race -shuffle=on -timeout 300s -COVER_FLAGS=-covermode=atomic -coverprofile=coverage.txt +COVER_FLAGS=-covermode=atomic -coverprofile=coverage.txt -coverpkg=./... PLAYGROUND_DIRECTORY=playground LENS_TEST_DIRECTORY=tests/integration/schema/migrations @@ -188,21 +188,9 @@ test\:build: .PHONY: test\:ci test\:ci: - DEFRA_BADGER_MEMORY=true DEFRA_BADGER_FILE=true \ - DEFRA_CLIENT_GO=true DEFRA_CLIENT_HTTP=true \ - $(MAKE) test:all - -.PHONY: test\:ci-matrix -test\:ci-matrix: @$(MAKE) deps:lens gotestsum --format testname -- ./... $(COVER_FLAGS) $(TEST_FLAGS) -.PHONY: test\:ci-gql-mutations -test\:ci-gql-mutations: - DEFRA_MUTATION_TYPE=gql DEFRA_BADGER_MEMORY=true \ - DEFRA_CLIENT_GO=true DEFRA_CLIENT_HTTP=true \ - $(MAKE) test:all - .PHONY: test\:gql-mutations test\:gql-mutations: DEFRA_MUTATION_TYPE=gql DEFRA_BADGER_MEMORY=true gotestsum --format pkgname -- $(DEFAULT_TEST_DIRECTORIES) @@ -212,12 +200,6 @@ test\:gql-mutations: # # For example, CreateDoc will call [Collection.Create], and # UpdateDoc will call [Collection.Update]. -.PHONY: test\:ci-col-named-mutations -test\:ci-col-named-mutations: - DEFRA_MUTATION_TYPE=collection-named DEFRA_BADGER_MEMORY=true \ - DEFRA_CLIENT_GO=true DEFRA_CLIENT_HTTP=true \ - $(MAKE) test:all - .PHONY: test\:col-named-mutations test\:col-named-mutations: DEFRA_MUTATION_TYPE=collection-named DEFRA_BADGER_MEMORY=true gotestsum --format pkgname -- $(DEFAULT_TEST_DIRECTORIES) From 9cfbbb7c9fa220d3b776b9eba182536580704f8f Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Tue, 12 Sep 2023 15:55:31 -0700 Subject: [PATCH 074/107] fix incorrect make command in run-tests workflow --- .github/workflows/run-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index dda8e1e41e..11a87cde9f 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -53,7 +53,7 @@ jobs: make deps:test - name: Run the tests - run: make test:ci-matrix + run: make test:ci - name: Upload coverage artifact uses: actions/upload-artifact@v3 From 5dfb0b3b181ba95023783f13446b05c22cfa677e Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 13 Sep 2023 16:16:05 -0700 Subject: [PATCH 075/107] add peer info http endoing and cli command --- cli/cli.go | 1 + cli/p2p_info.go | 34 ++++++++++++++++++++++++++++++++++ http/client.go | 14 ++++++++++++++ http/handler.go | 1 + http/handler_store.go | 12 ++++++++++++ http/middleware.go | 3 +++ 6 files changed, 65 insertions(+) create mode 100644 cli/p2p_info.go diff --git a/cli/cli.go b/cli/cli.go index f6caed242c..89ab5f3034 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -42,6 +42,7 @@ func NewDefraCommand(cfg *config.Config) *cobra.Command { p2p.AddCommand( p2p_replicator, p2p_collection, + MakeP2PInfoCommand(), ) schema_migrate := MakeSchemaMigrationCommand() diff --git a/cli/p2p_info.go b/cli/p2p_info.go new file mode 100644 index 0000000000..588f03bb8a --- /dev/null +++ b/cli/p2p_info.go @@ -0,0 +1,34 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "github.com/sourcenetwork/defradb/http" + "github.com/spf13/cobra" +) + +func MakeP2PInfoCommand() *cobra.Command { + var cmd = &cobra.Command{ + Use: "info", + Short: "Get peer info from a DefraDB node", + Long: `Get peer info from a DefraDB node`, + RunE: func(cmd *cobra.Command, args []string) error { + db := cmd.Context().Value(dbContextKey).(*http.Client) + + res, err := db.PeerInfo(cmd.Context()) + if err != nil { + return err + } + return writeJSON(cmd, res) + }, + } + return cmd +} diff --git a/http/client.go b/http/client.go index 9ee261f49b..af96405158 100644 --- a/http/client.go +++ b/http/client.go @@ -396,6 +396,20 @@ func (c *Client) PrintDump(ctx context.Context) error { return err } +func (c *Client) PeerInfo(ctx context.Context) (*PeerInfoResponse, error) { + methodURL := c.http.baseURL.JoinPath("p2p", "info") + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) + if err != nil { + return nil, err + } + var res PeerInfoResponse + if err := c.http.requestJson(req, &res); err != nil { + return nil, err + } + return &res, nil +} + func (c *Client) Close(ctx context.Context) { // do nothing } diff --git a/http/handler.go b/http/handler.go index d5cbb23e44..7d7b31d528 100644 --- a/http/handler.go +++ b/http/handler.go @@ -93,6 +93,7 @@ func NewHandler(db client.DB, opts ServerOptions) *Handler { graphQL.Post("/", store_handler.ExecRequest) }) api.Route("/p2p", func(p2p chi.Router) { + p2p.Get("/info", store_handler.PeerInfo) p2p.Route("/replicators", func(p2p_replicators chi.Router) { p2p_replicators.Get("/", store_handler.GetAllReplicators) p2p_replicators.Post("/", store_handler.SetReplicator) diff --git a/http/handler_store.go b/http/handler_store.go index d0cbdf42d2..85c934f24c 100644 --- a/http/handler_store.go +++ b/http/handler_store.go @@ -224,6 +224,18 @@ func (s *storeHandler) PrintDump(rw http.ResponseWriter, req *http.Request) { rw.WriteHeader(http.StatusOK) } +type PeerInfoResponse struct { + PeerID string `json:"peerID"` +} + +func (s *storeHandler) PeerInfo(rw http.ResponseWriter, req *http.Request) { + var res PeerInfoResponse + if value, ok := req.Context().Value(peerIdContextKey).(string); ok { + res.PeerID = value + } + responseJSON(rw, http.StatusOK, &res) +} + type GraphQLRequest struct { Query string `json:"query"` } diff --git a/http/middleware.go b/http/middleware.go index b8c823ed6d..0478b6c6bd 100644 --- a/http/middleware.go +++ b/http/middleware.go @@ -53,6 +53,8 @@ var ( // If a transaction exists, all operations will be executed // in the current transaction context. colContextKey = contextKey("col") + // peerIdContextKey contains the peerId of the DefraDB node. + peerIdContextKey = contextKey("peerId") ) // CorsMiddleware handles cross origin request @@ -78,6 +80,7 @@ func ApiMiddleware(db client.DB, txs *sync.Map, opts ServerOptions) func(http.Ha ctx := req.Context() ctx = context.WithValue(ctx, dbContextKey, db) ctx = context.WithValue(ctx, txsContextKey, txs) + ctx = context.WithValue(ctx, peerIdContextKey, opts.PeerID) next.ServeHTTP(rw, req.WithContext(ctx)) }) } From 2dd2c8435561491fbb5e693a99d339f3f1f02edd Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 14 Sep 2023 13:59:31 -0700 Subject: [PATCH 076/107] fix create many document cli and http handler --- cli/document_create.go | 29 +++++++++++++++++++++++++---- http/handler_collection.go | 9 +++++++-- 2 files changed, 32 insertions(+), 6 deletions(-) diff --git a/cli/document_create.go b/cli/document_create.go index a49dbbf57e..e5f4100950 100644 --- a/cli/document_create.go +++ b/cli/document_create.go @@ -13,6 +13,7 @@ package cli import ( "encoding/json" "fmt" + "os" "github.com/spf13/cobra" @@ -22,6 +23,7 @@ import ( func MakeDocumentCreateCommand() *cobra.Command { var collection string + var file string var cmd = &cobra.Command{ Use: "create --collection ", Short: "Create a new document.", @@ -33,7 +35,7 @@ Example: create document Example: create documents defradb client document create --collection User '[{ "name": "Alice" }, { "name": "Bob" }]' `, - Args: cobra.ExactArgs(1), + Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { store := cmd.Context().Value(storeContextKey).(client.Store) @@ -45,8 +47,22 @@ Example: create documents col = col.WithTxn(tx) } + var docData []byte + switch { + case len(args) == 1: + docData = []byte(args[0]) + case file != "": + data, err := os.ReadFile(file) + if err != nil { + return err + } + docData = data + default: + return fmt.Errorf("Document or file must be defined") + } + var docMap any - if err := json.Unmarshal([]byte(args[0]), &docMap); err != nil { + if err := json.Unmarshal(docData, &docMap); err != nil { return err } @@ -57,10 +73,14 @@ Example: create documents return err } return col.Create(cmd.Context(), doc) - case []map[string]any: + case []any: docs := make([]*client.Document, len(t)) for i, v := range t { - doc, err := client.NewDocFromMap(v) + docMap, ok := v.(map[string]any) + if !ok { + return fmt.Errorf("invalid document") + } + doc, err := client.NewDocFromMap(docMap) if err != nil { return err } @@ -72,6 +92,7 @@ Example: create documents } }, } + cmd.Flags().StringVarP(&file, "file", "f", "", "File containing document(s)") cmd.Flags().StringVarP(&collection, "collection", "c", "", "Collection name") return cmd } diff --git a/http/handler_collection.go b/http/handler_collection.go index 8f8ff8423b..607c1f1b21 100644 --- a/http/handler_collection.go +++ b/http/handler_collection.go @@ -47,9 +47,14 @@ func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) { } switch t := body.(type) { - case []map[string]any: + case []any: var docList []*client.Document - for _, docMap := range t { + for _, v := range t { + docMap, ok := v.(map[string]any) + if !ok { + responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidRequestBody}) + return + } doc, err := client.NewDocFromMap(docMap) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) From 936e931e5a8184860da8684ea90641ac14ced8ae Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 20 Sep 2023 10:42:34 -0700 Subject: [PATCH 077/107] silence usage on error --- cli/root.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/cli/root.go b/cli/root.go index 40290279a8..729b638f02 100644 --- a/cli/root.go +++ b/cli/root.go @@ -20,8 +20,9 @@ import ( func MakeRootCommand(cfg *config.Config) *cobra.Command { var cmd = &cobra.Command{ - Use: "defradb", - Short: "DefraDB Edge Database", + SilenceUsage: true, + Use: "defradb", + Short: "DefraDB Edge Database", Long: `DefraDB is the edge database to power the user-centric future. Start a DefraDB node, interact with a local or remote node, and much more. From 307ce52eb30a7a5b50996ede89e48e38fa4407f3 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 20 Sep 2023 10:48:21 -0700 Subject: [PATCH 078/107] fix playground handler type --- http/handler.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/http/handler.go b/http/handler.go index 7d7b31d528..3d9951ec1d 100644 --- a/http/handler.go +++ b/http/handler.go @@ -26,7 +26,7 @@ import ( var Version string = "v0" // playgroundHandler is set when building with the playground build tag -var playgroundHandler = http.HandlerFunc(http.NotFound) +var playgroundHandler http.Handler = http.HandlerFunc(http.NotFound) type Handler struct { db client.DB From a34956140ffea01b617edf3723ce358e339aa5f4 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 20 Sep 2023 11:49:12 -0700 Subject: [PATCH 079/107] replace go-acc with go 1.20 aggregate test coverage. add change detector to matrix workflow --- .github/workflows/run-tests.yml | 12 ++++++++- .gitignore | 1 + Makefile | 47 +++++++++++++++------------------ 3 files changed, 34 insertions(+), 26 deletions(-) diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 11a87cde9f..4a7a408631 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -29,6 +29,11 @@ jobs: client-type: [go, http] database-type: [badger-file, badger-memory] mutation-type: [gql, collection-named, collection-save] + include: + - client-type: go + database-type: badger-memory + mutation-type: collection-save + detect-changes: true env: DEFRA_CLIENT_GO: ${{ matrix.client-type == 'go' }} @@ -52,9 +57,14 @@ jobs: make deps:modules make deps:test - - name: Run the tests + - name: Run integration tests + if: ${{ !matrix.detect-changes }} run: make test:ci + - name: Run change detector tests + if: ${{ matrix.detect-changes }} + run: make test:changes + - name: Upload coverage artifact uses: actions/upload-artifact@v3 with: diff --git a/.gitignore b/.gitignore index b19a6d9259..81c1a16d62 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,7 @@ cmd/defradb/defradb cmd/genclidocs/genclidocs cmd/genmanpages/genmanpages coverage.txt +coverage tests/bench/*.log tests/bench/*.svg diff --git a/Makefile b/Makefile index 060819d248..473d2312b7 100644 --- a/Makefile +++ b/Makefile @@ -30,7 +30,10 @@ BUILD_FLAGS+=-tags $(BUILD_TAGS) endif TEST_FLAGS=-race -shuffle=on -timeout 300s -COVER_FLAGS=-covermode=atomic -coverprofile=coverage.txt -coverpkg=./... + +COVERAGE_DIRECTORY=$(PWD)/coverage +COVERAGE_FILE=coverage.txt +COVERAGE_FLAGS=-covermode=atomic -coverpkg=./... -args -test.gocoverdir=$(COVERAGE_DIRECTORY) PLAYGROUND_DIRECTORY=playground LENS_TEST_DIRECTORY=tests/integration/schema/migrations @@ -89,11 +92,6 @@ deps\:lens: rustup target add wasm32-unknown-unknown @$(MAKE) -C ./tests/lenses build -.PHONY: deps\:coverage -deps\:coverage: - go install github.com/ory/go-acc@latest - @$(MAKE) deps:lens - .PHONY: deps\:bench deps\:bench: go install golang.org/x/perf/cmd/benchstat@latest @@ -162,6 +160,11 @@ clean: clean\:test: go clean -testcache +.PHONY: clean\:coverage +clean\:coverage: + rm -rf $(COVERAGE_DIRECTORY) + rm -f $(COVERAGE_FILE) + # Example: `make tls-certs path="~/.defradb/certs"` .PHONY: tls-certs tls-certs: @@ -190,7 +193,10 @@ test\:build: .PHONY: test\:ci test\:ci: @$(MAKE) deps:lens - gotestsum --format testname -- ./... $(COVER_FLAGS) $(TEST_FLAGS) + @$(MAKE) clean:coverage + mkdir $(COVERAGE_DIRECTORY) + gotestsum --format testname -- ./... $(TEST_FLAGS) $(COVERAGE_FLAGS) + go tool covdata textfmt -i=$(COVERAGE_DIRECTORY) -o $(COVERAGE_FILE) .PHONY: test\:gql-mutations test\:gql-mutations: @@ -257,29 +263,20 @@ test\:cli: @$(MAKE) deps:lens gotestsum --format testname -- ./$(CLI_TEST_DIRECTORY)/... $(TEST_FLAGS) -# Using go-acc to ensure integration tests are included. -# Usage: `make test:coverage` or `make test:coverage path="{pathToPackage}"` -# Example: `make test:coverage path="./api/..."` + .PHONY: test\:coverage test\:coverage: - @$(MAKE) deps:coverage -ifeq ($(path),) - go-acc ./... --output=coverage.txt --covermode=atomic -- -failfast -coverpkg=./... - @echo "Show coverage information for each function in ./..." -else - go-acc $(path) --output=coverage.txt --covermode=atomic -- -failfast -coverpkg=$(path) - @echo "Show coverage information for each function in" path=$(path) -endif - go tool cover -func coverage.txt | grep total | awk '{print $$3}' + @$(MAKE) deps:lens + @$(MAKE) clean:coverage + mkdir $(COVERAGE_DIRECTORY) + go test ./... $(TEST_FLAGS) $(COVERAGE_FLAGS) + go tool covdata textfmt -i=$(COVERAGE_DIRECTORY) -o $(COVERAGE_FILE) + go tool cover -func $(COVERAGE_FILE) -# Usage: `make test:coverage-html` or `make test:coverage-html path="{pathToPackage}"` -# Example: `make test:coverage-html path="./api/..."` .PHONY: test\:coverage-html test\:coverage-html: - @$(MAKE) test:coverage path=$(path) - @echo "Generate coverage information in HTML" - go tool cover -html=coverage.txt - rm ./coverage.txt + @$(MAKE) test:coverage + go tool cover -html=$(COVERAGE_FILE) .PHONY: test\:changes test\:changes: From 6662cf3c26afefd9b10bced5d3a97400301f0196 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 20 Sep 2023 11:58:38 -0700 Subject: [PATCH 080/107] fix missing Makefile target --- Makefile | 1 - 1 file changed, 1 deletion(-) diff --git a/Makefile b/Makefile index 473d2312b7..628d21596a 100644 --- a/Makefile +++ b/Makefile @@ -117,7 +117,6 @@ deps: @$(MAKE) deps:modules && \ $(MAKE) deps:bench && \ $(MAKE) deps:chglog && \ - $(MAKE) deps:coverage && \ $(MAKE) deps:lint && \ $(MAKE) deps:test && \ $(MAKE) deps:mock From 80784f6e5024429d0cbed9fb0205c1a2185f28d5 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 20 Sep 2023 12:05:10 -0700 Subject: [PATCH 081/107] remove detect-change workflow. update run-tests matrix to include change detector and non change detector runs --- .github/workflows/detect-change.yml | 55 ----------------------------- .github/workflows/run-tests.yml | 6 ++++ 2 files changed, 6 insertions(+), 55 deletions(-) delete mode 100644 .github/workflows/detect-change.yml diff --git a/.github/workflows/detect-change.yml b/.github/workflows/detect-change.yml deleted file mode 100644 index b6272c21cd..0000000000 --- a/.github/workflows/detect-change.yml +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2022 Democratized Data Foundation -# -# Use of this software is governed by the Business Source License -# included in the file licenses/BSL.txt. -# -# As of the Change Date specified in that file, in accordance with -# the Business Source License, use of this software will be governed -# by the Apache License, Version 2.0, included in the file -# licenses/APL.txt. - -name: Detect Change Workflow - -on: - pull_request: - branches: - - master - - develop - - push: - tags: - - 'v[0-9]+.[0-9]+.[0-9]+' - branches: - - master - - develop - -jobs: - detect-change: - name: Detect change job - - runs-on: ubuntu-latest - - steps: - - name: Checkout code into the directory - uses: actions/checkout@v3 - - - name: Setup Go environment explicitly - uses: actions/setup-go@v3 - with: - go-version: "1.20" - check-latest: true - - - name: Build dependencies - run: | - make deps:modules - make deps:test - - - name: Run detection for changes - run: make test:changes - - ## Uncomment to enable ability to SSH into the runner. - #- name: Setup upterm ssh session for debugging - # uses: lhotari/action-upterm@v1 - # with: - # limit-access-to-actor: true - # limit-access-to-users: shahzadlone diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 4a7a408631..37b139b742 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -30,10 +30,16 @@ jobs: database-type: [badger-file, badger-memory] mutation-type: [gql, collection-named, collection-save] include: + # one runs with detect changes - client-type: go database-type: badger-memory mutation-type: collection-save detect-changes: true + # one runs without detect changes + - client-type: go + database-type: badger-memory + mutation-type: collection-save + detect-changes: false env: DEFRA_CLIENT_GO: ${{ matrix.client-type == 'go' }} From d9be82dc8a450541f9050c95bed1ccb2ebb7ad9b Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 20 Sep 2023 12:14:43 -0700 Subject: [PATCH 082/107] add change-detector matrix variable to run-tests workflow --- .github/workflows/run-tests.yml | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 37b139b742..6e32af1730 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -29,17 +29,12 @@ jobs: client-type: [go, http] database-type: [badger-file, badger-memory] mutation-type: [gql, collection-named, collection-save] + detect-changes: [false] include: - # one runs with detect changes - client-type: go database-type: badger-memory mutation-type: collection-save detect-changes: true - # one runs without detect changes - - client-type: go - database-type: badger-memory - mutation-type: collection-save - detect-changes: false env: DEFRA_CLIENT_GO: ${{ matrix.client-type == 'go' }} From 5a4aa5ddf26eb4aedb6dd9902dcc23b0c7200fe5 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 20 Sep 2023 12:27:44 -0700 Subject: [PATCH 083/107] increase test timeout in Makefile --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index d1e0acf504..a18c61729f 100644 --- a/Makefile +++ b/Makefile @@ -29,7 +29,7 @@ ifdef BUILD_TAGS BUILD_FLAGS+=-tags $(BUILD_TAGS) endif -TEST_FLAGS=-race -shuffle=on -timeout 300s +TEST_FLAGS=-race -shuffle=on -timeout 600s PLAYGROUND_DIRECTORY=playground LENS_TEST_DIRECTORY=tests/integration/schema/migrations From 652bd3b597ca7d941f8d3e76ff84628a58cbe54f Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 20 Sep 2023 13:01:02 -0700 Subject: [PATCH 084/107] add missing env variable to run-tests --- .github/workflows/run-tests.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index b16a667455..8d0b0fa323 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -39,6 +39,7 @@ jobs: env: DEFRA_CLIENT_GO: ${{ matrix.client-type == 'go' }} DEFRA_CLIENT_HTTP: ${{ matrix.client-type == 'http' }} + DEFRA_CLIENT_CLI: ${{ matrix.client-type == 'cli' }} DEFRA_BADGER_MEMORY: ${{ matrix.database-type == 'badger-memory' }} DEFRA_BADGER_FILE: ${{ matrix.database-type == 'badger-file' }} DEFRA_MUTATION_TYPE: ${{ matrix.mutation-type }} From 04955c2caa7a8915af0ae98b434fff194e506bd6 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 20 Sep 2023 14:24:31 -0700 Subject: [PATCH 085/107] increase timeout on test:changes Makefile target --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index eccbed9e17..680767aad3 100644 --- a/Makefile +++ b/Makefile @@ -29,7 +29,7 @@ ifdef BUILD_TAGS BUILD_FLAGS+=-tags $(BUILD_TAGS) endif -TEST_FLAGS=-race -shuffle=on -timeout 300s +TEST_FLAGS=-race -shuffle=on -timeout 5m COVERAGE_DIRECTORY=$(PWD)/coverage COVERAGE_FILE=coverage.txt @@ -275,7 +275,7 @@ test\:coverage-html: .PHONY: test\:changes test\:changes: - gotestsum --format testname -- ./$(CHANGE_DETECTOR_TEST_DIRECTORY)/... --tags change_detector + gotestsum --format testname -- ./$(CHANGE_DETECTOR_TEST_DIRECTORY)/... -timeout 15m --tags change_detector .PHONY: validate\:codecov validate\:codecov: From 9f5c6ebf0f4e69e3e9e032fd519d78ca086971db Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 20 Sep 2023 14:47:50 -0700 Subject: [PATCH 086/107] disable change detector in test matrix for now --- .github/workflows/run-tests.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 8d0b0fa323..3ae598c80c 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -30,11 +30,11 @@ jobs: database-type: [badger-file, badger-memory] mutation-type: [gql, collection-named, collection-save] detect-changes: [false] - include: - - client-type: go - database-type: badger-memory - mutation-type: collection-save - detect-changes: true + # include: + # - client-type: go + # database-type: badger-memory + # mutation-type: collection-save + # detect-changes: true env: DEFRA_CLIENT_GO: ${{ matrix.client-type == 'go' }} From 3b65bc4933acc7874b038343b58e586bc4f4bfe5 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 27 Sep 2023 11:40:53 -0700 Subject: [PATCH 087/107] enable change detector in test matrix --- .github/workflows/run-tests.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 3ae598c80c..8d0b0fa323 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -30,11 +30,11 @@ jobs: database-type: [badger-file, badger-memory] mutation-type: [gql, collection-named, collection-save] detect-changes: [false] - # include: - # - client-type: go - # database-type: badger-memory - # mutation-type: collection-save - # detect-changes: true + include: + - client-type: go + database-type: badger-memory + mutation-type: collection-save + detect-changes: true env: DEFRA_CLIENT_GO: ${{ matrix.client-type == 'go' }} From 28b087b23accc2b37f08489deafb4181ef2aced3 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 27 Sep 2023 11:55:06 -0700 Subject: [PATCH 088/107] don't upload test coverage when running change detector matrix job --- .github/workflows/run-tests.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 8d0b0fa323..2124d186c4 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -68,6 +68,7 @@ jobs: run: make test:changes - name: Upload coverage artifact + if: ${{ !matrix.detect-changes }} uses: actions/upload-artifact@v3 with: name: ${{ matrix.client-type }}_${{ matrix.database-type }}_${{ matrix.mutation-type }} From 9977c382faa4e79562c76a7253ba4d35490f933a Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 27 Sep 2023 12:28:47 -0700 Subject: [PATCH 089/107] checkout code before uploading test coverage --- .github/workflows/run-tests.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 2124d186c4..17637c01ab 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -84,8 +84,13 @@ jobs: needs: run-tests steps: + - name: Checkout code into the directory + uses: actions/checkout@v3 + - name: Download coverage reports uses: actions/download-artifact@v3 + with: + path: coverage_reports - name: Upload coverage to Codecov uses: codecov/codecov-action@v3 From a3e758620974eadecb84030afa72e35658cf48f1 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 27 Sep 2023 14:33:04 -0700 Subject: [PATCH 090/107] restore cli version_test --- cli/version_test.go | 89 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 89 insertions(+) create mode 100644 cli/version_test.go diff --git a/cli/version_test.go b/cli/version_test.go new file mode 100644 index 0000000000..4f62f3659b --- /dev/null +++ b/cli/version_test.go @@ -0,0 +1,89 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "bytes" + "testing" + + "github.com/stretchr/testify/assert" +) + +// The version information comes from the build process which is not [easily] accessible from unit tests. +// Therefore we test that the command outputs the expected formats *without the version info*. + +// case: no args, meaning `--format text` +func TestVersionNoArg(t *testing.T) { + cmd := MakeVersionCommand() + buf := new(bytes.Buffer) + cmd.SetOut(buf) + err := cmd.Execute() + assert.NoError(t, err) + t.Log(buf.String()) + assert.Contains(t, buf.String(), "defradb") + assert.Contains(t, buf.String(), "built with Go") +} + +// case: `--full`, meaning `--format text --full` +func TestVersionFull(t *testing.T) { + cmd := MakeVersionCommand() + buf := new(bytes.Buffer) + cmd.SetOut(buf) + cmd.SetArgs([]string{"--full"}) + err := cmd.Execute() + assert.NoError(t, err) + t.Log(buf.String()) + assert.Contains(t, buf.String(), "* HTTP API") + assert.Contains(t, buf.String(), "* DocKey versions") + assert.Contains(t, buf.String(), "* P2P multicodec") +} + +// case: `--format json` +func TestVersionJSON(t *testing.T) { + cmd := MakeVersionCommand() + buf := new(bytes.Buffer) + cmd.SetOut(buf) + cmd.SetArgs([]string{"--format", "json"}) + err := cmd.Execute() + assert.NoError(t, err) + t.Log(buf.String()) + assert.JSONEq(t, buf.String(), ` + { + "release": "", + "commit": "", + "commitdate": "", + "go": "", + "httpapi": "v0", + "dockeyversions": "1", + "netprotocol": "/defra/0.0.1" + }`) +} + +// case: `--format json --full` (is equivalent to previous one) +func TestVersionJSONFull(t *testing.T) { + cmd := MakeVersionCommand() + buf := new(bytes.Buffer) + cmd.SetOut(buf) + cmd.SetArgs([]string{"--format", "json", "--full"}) + err := cmd.Execute() + assert.NoError(t, err) + t.Log(buf.String()) + assert.JSONEq(t, buf.String(), ` + { + "release": "", + "commit": "", + "commitdate": "", + "go": "", + "httpapi": "v0", + "dockeyversions": "1", + "netprotocol": "/defra/0.0.1" + }`) +} From 2b32b0a6cd31f68dd9f15ae949039b2ef4bc6a88 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 27 Sep 2023 15:56:26 -0700 Subject: [PATCH 091/107] add cli context key documentation --- cli/utils.go | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/cli/utils.go b/cli/utils.go index cf61865edd..8e84095342 100644 --- a/cli/utils.go +++ b/cli/utils.go @@ -25,8 +25,16 @@ import ( type contextKey string var ( - txContextKey = contextKey("tx") - dbContextKey = contextKey("db") + // txContextKey is the context key for the datastore.Txn + // + // This will only be set if a transaction id is specified. + txContextKey = contextKey("tx") + // dbContextKey is the context key for the client.DB + dbContextKey = contextKey("db") + // storeContextKey is the context key for the client.Store + // + // If a transaction exists, all operations will be executed + // in the current transaction context. storeContextKey = contextKey("store") ) @@ -39,8 +47,7 @@ func setTransactionContext(cmd *cobra.Command, cfg *config.Config, txId uint64) if err != nil { return err } - ctx := cmd.Context() - ctx = context.WithValue(ctx, txContextKey, tx) + ctx := context.WithValue(cmd.Context(), txContextKey, tx) cmd.SetContext(ctx) return nil } @@ -51,8 +58,7 @@ func setStoreContext(cmd *cobra.Command, cfg *config.Config) error { if err != nil { return err } - ctx := cmd.Context() - ctx = context.WithValue(ctx, dbContextKey, db) + ctx := context.WithValue(cmd.Context(), dbContextKey, db) if tx, ok := ctx.Value(txContextKey).(datastore.Txn); ok { ctx = context.WithValue(ctx, storeContextKey, db.WithTxn(tx)) } else { From 12206255ab2b8cf92d6721cfa77c0faf09b5091a Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 28 Sep 2023 10:07:46 -0700 Subject: [PATCH 092/107] rename run-tests workflow to run-tests-and-upload-coverage. rename Makefile target test:ci to test:coverage. --- ...sts.yml => run-tests-and-upload-coverage.yml} | 6 +++--- Makefile | 16 ++++++---------- 2 files changed, 9 insertions(+), 13 deletions(-) rename .github/workflows/{run-tests.yml => run-tests-and-upload-coverage.yml} (96%) diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests-and-upload-coverage.yml similarity index 96% rename from .github/workflows/run-tests.yml rename to .github/workflows/run-tests-and-upload-coverage.yml index 17637c01ab..45b681e7aa 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests-and-upload-coverage.yml @@ -20,7 +20,7 @@ on: jobs: run-tests: - name: Run tests job matrix + name: Run tests matrix job runs-on: ubuntu-latest @@ -61,7 +61,7 @@ jobs: - name: Run integration tests if: ${{ !matrix.detect-changes }} - run: make test:ci + run: make test:coverage - name: Run change detector tests if: ${{ matrix.detect-changes }} @@ -77,7 +77,7 @@ jobs: retention-days: 1 upload-coverage: - name: Upload test code coverage + name: Upload test code coverage job runs-on: ubuntu-latest diff --git a/Makefile b/Makefile index 680767aad3..7268834d5a 100644 --- a/Makefile +++ b/Makefile @@ -188,14 +188,6 @@ test\:quick: test\:build: gotestsum --format pkgname -- $(DEFAULT_TEST_DIRECTORIES) $(TEST_FLAGS) -run=nope -.PHONY: test\:ci -test\:ci: - @$(MAKE) deps:lens - @$(MAKE) clean:coverage - mkdir $(COVERAGE_DIRECTORY) - gotestsum --format testname -- ./... $(TEST_FLAGS) $(COVERAGE_FLAGS) - go tool covdata textfmt -i=$(COVERAGE_DIRECTORY) -o $(COVERAGE_FILE) - .PHONY: test\:gql-mutations test\:gql-mutations: DEFRA_MUTATION_TYPE=gql DEFRA_BADGER_MEMORY=true gotestsum --format pkgname -- $(DEFAULT_TEST_DIRECTORIES) @@ -264,9 +256,13 @@ test\:coverage: @$(MAKE) deps:lens @$(MAKE) clean:coverage mkdir $(COVERAGE_DIRECTORY) - go test ./... $(TEST_FLAGS) $(COVERAGE_FLAGS) + gotestsum --format testname -- ./... $(TEST_FLAGS) $(COVERAGE_FLAGS) go tool covdata textfmt -i=$(COVERAGE_DIRECTORY) -o $(COVERAGE_FILE) - go tool cover -func $(COVERAGE_FILE) + +.PHONY: test\:coverage-func +test\:coverage-func: + @$(MAKE) test:coverage + go tool cover -func=$(COVERAGE_FILE) .PHONY: test\:coverage-html test\:coverage-html: From 9389969488673e6c7a1721b5649df6cd69e697db Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 28 Sep 2023 10:12:48 -0700 Subject: [PATCH 093/107] add exit code to defradb main command --- cmd/defradb/main.go | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/cmd/defradb/main.go b/cmd/defradb/main.go index 11a7f75947..318d53b63d 100644 --- a/cmd/defradb/main.go +++ b/cmd/defradb/main.go @@ -12,6 +12,8 @@ package main import ( + "os" + "github.com/sourcenetwork/defradb/cli" "github.com/sourcenetwork/defradb/config" ) @@ -19,5 +21,7 @@ import ( // Execute adds all child commands to the root command and sets flags appropriately. func main() { defraCmd := cli.NewDefraCommand(config.DefaultConfig()) - defraCmd.Execute() //nolint:errcheck + if err := defraCmd.Execute(); err != nil { + os.Exit(1) + } } From a79e88e9157637e330ad600ab8eae4856ce50315 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 28 Sep 2023 16:26:50 -0700 Subject: [PATCH 094/107] update run-tests-and-upload-coverage workflow name --- .github/workflows/run-tests-and-upload-coverage.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/run-tests-and-upload-coverage.yml b/.github/workflows/run-tests-and-upload-coverage.yml index 45b681e7aa..deebcaa1cd 100644 --- a/.github/workflows/run-tests-and-upload-coverage.yml +++ b/.github/workflows/run-tests-and-upload-coverage.yml @@ -8,7 +8,7 @@ # by the Apache License, Version 2.0, included in the file # licenses/APL.txt. -name: Run Tests Workflow +name: Run Tests And Upload Coverage Workflow on: pull_request: From 26d8f72c4ed1e067ec1559221d4096b7350a3388 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 28 Sep 2023 16:33:28 -0700 Subject: [PATCH 095/107] add comment about ignored error in cmd/defradb/main.go --- cmd/defradb/main.go | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/cmd/defradb/main.go b/cmd/defradb/main.go index 318d53b63d..2406885a76 100644 --- a/cmd/defradb/main.go +++ b/cmd/defradb/main.go @@ -22,6 +22,11 @@ import ( func main() { defraCmd := cli.NewDefraCommand(config.DefaultConfig()) if err := defraCmd.Execute(); err != nil { + // this error is okay to discard because cobra + // logs any errors encountered during execution + // + // exiting with a non-zero status code signals + // that an error has ocurred during execution os.Exit(1) } } From f0c89b9e5088acfd4920cc2942921655103e6965 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 29 Sep 2023 13:34:49 -0700 Subject: [PATCH 096/107] simplify cli store context value retrieval --- cli/backup_export.go | 2 +- cli/backup_import.go | 4 +--- cli/collection.go | 2 +- cli/document_create.go | 2 +- cli/document_delete.go | 2 +- cli/document_get.go | 2 +- cli/document_keys.go | 3 +-- cli/document_update.go | 2 +- cli/index_create.go | 2 +- cli/index_drop.go | 3 +-- cli/index_list.go | 3 +-- cli/p2p_collection_add.go | 4 +--- cli/p2p_collection_getall.go | 4 +--- cli/p2p_collection_remove.go | 4 +--- cli/p2p_replicator_delete.go | 2 +- cli/p2p_replicator_getall.go | 4 +--- cli/p2p_replicator_set.go | 2 +- cli/request.go | 3 +-- cli/schema_add.go | 4 +--- cli/schema_migration_down.go | 3 +-- cli/schema_migration_get.go | 4 +--- cli/schema_migration_reload.go | 3 +-- cli/schema_migration_set.go | 2 +- cli/schema_migration_up.go | 3 +-- cli/schema_patch.go | 4 +--- cli/schema_set_default.go | 4 +--- cli/utils.go | 7 +++++++ 27 files changed, 33 insertions(+), 51 deletions(-) diff --git a/cli/backup_export.go b/cli/backup_export.go index f1ca1dc953..9e8d1c056e 100644 --- a/cli/backup_export.go +++ b/cli/backup_export.go @@ -38,7 +38,7 @@ Example: export data for the 'Users' collection: defradb client export --collection Users user_data.json`, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) if !isValidExportFormat(format) { return ErrInvalidExportFormat diff --git a/cli/backup_import.go b/cli/backup_import.go index 770fccc52e..35af345a0a 100644 --- a/cli/backup_import.go +++ b/cli/backup_import.go @@ -12,8 +12,6 @@ package cli import ( "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/client" ) func MakeBackupImportCommand() *cobra.Command { @@ -26,7 +24,7 @@ Example: import data to the database: defradb client import user_data.json`, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) return store.BasicImport(cmd.Context(), args[0]) }, } diff --git a/cli/collection.go b/cli/collection.go index 09df92ed74..a3b4d5cbb5 100644 --- a/cli/collection.go +++ b/cli/collection.go @@ -38,7 +38,7 @@ Example: view collection by version id defradb client collection --version bae123 `, RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) switch { case name != "": diff --git a/cli/document_create.go b/cli/document_create.go index fbf4d6df28..39eb91d19f 100644 --- a/cli/document_create.go +++ b/cli/document_create.go @@ -37,7 +37,7 @@ Example: create documents `, Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) col, err := store.GetCollectionByName(cmd.Context(), collection) if err != nil { diff --git a/cli/document_delete.go b/cli/document_delete.go index 687567cf46..51f4141737 100644 --- a/cli/document_delete.go +++ b/cli/document_delete.go @@ -35,7 +35,7 @@ Example: delete by filter defradb client document delete --collection User --filter '{ "_gte": { "points": 100 } }' `, RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) col, err := store.GetCollectionByName(cmd.Context(), collection) if err != nil { diff --git a/cli/document_get.go b/cli/document_get.go index ddab716326..b1dde8c7d2 100644 --- a/cli/document_get.go +++ b/cli/document_get.go @@ -30,7 +30,7 @@ Example: `, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) col, err := store.GetCollectionByName(cmd.Context(), collection) if err != nil { diff --git a/cli/document_keys.go b/cli/document_keys.go index 6cd22c21dd..942c0ea2c2 100644 --- a/cli/document_keys.go +++ b/cli/document_keys.go @@ -13,7 +13,6 @@ package cli import ( "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/http" ) @@ -29,7 +28,7 @@ Example: defradb client document keys --collection User keys `, RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) col, err := store.GetCollectionByName(cmd.Context(), collection) if err != nil { diff --git a/cli/document_update.go b/cli/document_update.go index 6d6a137190..ab25dec5bc 100644 --- a/cli/document_update.go +++ b/cli/document_update.go @@ -42,7 +42,7 @@ Example: update by keys `, Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) col, err := store.GetCollectionByName(cmd.Context(), collection) if err != nil { diff --git a/cli/index_create.go b/cli/index_create.go index 13ea19bab1..42866267fc 100644 --- a/cli/index_create.go +++ b/cli/index_create.go @@ -35,7 +35,7 @@ Example: create a named index for 'Users' collection on 'name' field: defradb client index create --collection Users --fields name --name UsersByName`, ValidArgs: []string{"collection", "fields", "name"}, RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) var fields []client.IndexedFieldDescription for _, name := range fieldsArg { diff --git a/cli/index_drop.go b/cli/index_drop.go index 9af7cfdfc7..03639fb277 100644 --- a/cli/index_drop.go +++ b/cli/index_drop.go @@ -13,7 +13,6 @@ package cli import ( "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" ) @@ -29,7 +28,7 @@ Example: drop the index 'UsersByName' for 'Users' collection: defradb client index create --collection Users --name UsersByName`, ValidArgs: []string{"collection", "name"}, RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) col, err := store.GetCollectionByName(cmd.Context(), collectionArg) if err != nil { diff --git a/cli/index_list.go b/cli/index_list.go index bf342d2d7f..92ada3e007 100644 --- a/cli/index_list.go +++ b/cli/index_list.go @@ -13,7 +13,6 @@ package cli import ( "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" ) @@ -31,7 +30,7 @@ Example: show all index for 'Users' collection: defradb client index list --collection Users`, ValidArgs: []string{"collection"}, RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) switch { case collectionArg != "": diff --git a/cli/p2p_collection_add.go b/cli/p2p_collection_add.go index c5417d80c5..6970e8daec 100644 --- a/cli/p2p_collection_add.go +++ b/cli/p2p_collection_add.go @@ -12,8 +12,6 @@ package cli import ( "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/client" ) func MakeP2PCollectionAddCommand() *cobra.Command { @@ -24,7 +22,7 @@ func MakeP2PCollectionAddCommand() *cobra.Command { The collections are synchronized between nodes of a pubsub network.`, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) return store.AddP2PCollection(cmd.Context(), args[0]) }, } diff --git a/cli/p2p_collection_getall.go b/cli/p2p_collection_getall.go index 85d6e32da0..2d4da7e530 100644 --- a/cli/p2p_collection_getall.go +++ b/cli/p2p_collection_getall.go @@ -12,8 +12,6 @@ package cli import ( "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/client" ) func MakeP2PCollectionGetallCommand() *cobra.Command { @@ -24,7 +22,7 @@ func MakeP2PCollectionGetallCommand() *cobra.Command { This is the list of collections of the node that are synchronized on the pubsub network.`, Args: cobra.NoArgs, RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) cols, err := store.GetAllP2PCollections(cmd.Context()) if err != nil { diff --git a/cli/p2p_collection_remove.go b/cli/p2p_collection_remove.go index 9aae42b1b1..ed67f5e7c6 100644 --- a/cli/p2p_collection_remove.go +++ b/cli/p2p_collection_remove.go @@ -12,8 +12,6 @@ package cli import ( "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/client" ) func MakeP2PCollectionRemoveCommand() *cobra.Command { @@ -24,7 +22,7 @@ func MakeP2PCollectionRemoveCommand() *cobra.Command { The removed collections will no longer be synchronized between nodes.`, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) return store.RemoveP2PCollection(cmd.Context(), args[0]) }, } diff --git a/cli/p2p_replicator_delete.go b/cli/p2p_replicator_delete.go index 6bf6425a51..a89be2c788 100644 --- a/cli/p2p_replicator_delete.go +++ b/cli/p2p_replicator_delete.go @@ -24,7 +24,7 @@ func MakeP2PReplicatorDeleteCommand() *cobra.Command { Long: `Delete a replicator. It will stop synchronizing.`, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) addr, err := peer.AddrInfoFromString(args[0]) if err != nil { diff --git a/cli/p2p_replicator_getall.go b/cli/p2p_replicator_getall.go index 9041996902..52a81d4a57 100644 --- a/cli/p2p_replicator_getall.go +++ b/cli/p2p_replicator_getall.go @@ -12,8 +12,6 @@ package cli import ( "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/client" ) func MakeP2PReplicatorGetallCommand() *cobra.Command { @@ -23,7 +21,7 @@ func MakeP2PReplicatorGetallCommand() *cobra.Command { Long: `Get all the replicators active in the P2P data sync system. These are the replicators that are currently replicating data from one node to another.`, RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) reps, err := store.GetAllReplicators(cmd.Context()) if err != nil { diff --git a/cli/p2p_replicator_set.go b/cli/p2p_replicator_set.go index d839d4bed6..63cc3a31f9 100644 --- a/cli/p2p_replicator_set.go +++ b/cli/p2p_replicator_set.go @@ -27,7 +27,7 @@ A replicator replicates one or all collection(s) from this node to another. `, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) addr, err := peer.AddrInfoFromString(args[0]) if err != nil { diff --git a/cli/request.go b/cli/request.go index 77e6b9a5b6..56e33d7c4a 100644 --- a/cli/request.go +++ b/cli/request.go @@ -16,7 +16,6 @@ import ( "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/errors" ) @@ -46,7 +45,7 @@ with the database more conveniently. To learn more about the DefraDB GraphQL Query Language, refer to https://docs.source.network.`, RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) var request string switch { diff --git a/cli/schema_add.go b/cli/schema_add.go index 5bc2a83e2f..b93427a883 100644 --- a/cli/schema_add.go +++ b/cli/schema_add.go @@ -16,8 +16,6 @@ import ( "os" "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/client" ) func MakeSchemaAddCommand() *cobra.Command { @@ -38,7 +36,7 @@ Example: add from stdin: Learn more about the DefraDB GraphQL Schema Language on https://docs.source.network.`, RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) var schema string switch { diff --git a/cli/schema_migration_down.go b/cli/schema_migration_down.go index 8b6d75a444..186db11f80 100644 --- a/cli/schema_migration_down.go +++ b/cli/schema_migration_down.go @@ -16,7 +16,6 @@ import ( "github.com/sourcenetwork/immutable/enumerable" "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" ) @@ -28,7 +27,7 @@ func MakeSchemaMigrationDownCommand() *cobra.Command { Long: `Reverse a migration on the specified schema version.`, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) var src []map[string]any if err := json.Unmarshal([]byte(args[0]), &src); err != nil { diff --git a/cli/schema_migration_get.go b/cli/schema_migration_get.go index 5474ea09d8..43b66599b7 100644 --- a/cli/schema_migration_get.go +++ b/cli/schema_migration_get.go @@ -12,8 +12,6 @@ package cli import ( "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/client" ) func MakeSchemaMigrationGetCommand() *cobra.Command { @@ -27,7 +25,7 @@ Example: Learn more about the DefraDB GraphQL Schema Language on https://docs.source.network.`, RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) cfgs, err := store.LensRegistry().Config(cmd.Context()) if err != nil { diff --git a/cli/schema_migration_reload.go b/cli/schema_migration_reload.go index d009d23f76..d04aebed65 100644 --- a/cli/schema_migration_reload.go +++ b/cli/schema_migration_reload.go @@ -13,7 +13,6 @@ package cli import ( "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" ) @@ -23,7 +22,7 @@ func MakeSchemaMigrationReloadCommand() *cobra.Command { Short: "Reload the schema migrations within DefraDB", Long: `Reload the schema migrations within DefraDB`, RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) lens := store.LensRegistry() if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { diff --git a/cli/schema_migration_set.go b/cli/schema_migration_set.go index 7a89d1681b..90b4289313 100644 --- a/cli/schema_migration_set.go +++ b/cli/schema_migration_set.go @@ -43,7 +43,7 @@ Example: add from stdin: Learn more about the DefraDB GraphQL Schema Language on https://docs.source.network.`, Args: cobra.RangeArgs(2, 3), RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) var lensCfgJson string switch { diff --git a/cli/schema_migration_up.go b/cli/schema_migration_up.go index 78ff18daeb..b272694f43 100644 --- a/cli/schema_migration_up.go +++ b/cli/schema_migration_up.go @@ -16,7 +16,6 @@ import ( "github.com/sourcenetwork/immutable/enumerable" "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" ) @@ -28,7 +27,7 @@ func MakeSchemaMigrationUpCommand() *cobra.Command { Long: `Runs a migration on the specified schema version.`, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) var src []map[string]any if err := json.Unmarshal([]byte(args[0]), &src); err != nil { diff --git a/cli/schema_patch.go b/cli/schema_patch.go index 395577afdd..70f4283c85 100644 --- a/cli/schema_patch.go +++ b/cli/schema_patch.go @@ -16,8 +16,6 @@ import ( "os" "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/client" ) func MakeSchemaPatchCommand() *cobra.Command { @@ -41,7 +39,7 @@ Example: patch from stdin: To learn more about the DefraDB GraphQL Schema Language, refer to https://docs.source.network.`, RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) var patch string switch { diff --git a/cli/schema_set_default.go b/cli/schema_set_default.go index bde43c9269..cdb6bd8bd8 100644 --- a/cli/schema_set_default.go +++ b/cli/schema_set_default.go @@ -12,8 +12,6 @@ package cli import ( "github.com/spf13/cobra" - - "github.com/sourcenetwork/defradb/client" ) func MakeSchemaSetDefaultCommand() *cobra.Command { @@ -23,7 +21,7 @@ func MakeSchemaSetDefaultCommand() *cobra.Command { Long: `Set the default schema version`, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - store := cmd.Context().Value(storeContextKey).(client.Store) + store := mustGetStoreContext(cmd) return store.SetDefaultSchemaVersion(cmd.Context(), args[0]) }, } diff --git a/cli/utils.go b/cli/utils.go index 8e84095342..1a0e536b75 100644 --- a/cli/utils.go +++ b/cli/utils.go @@ -38,6 +38,13 @@ var ( storeContextKey = contextKey("store") ) +// mustGetStoreContext returns the store for the current command context. +// +// If a store is not set in the current context this function panics. +func mustGetStoreContext(cmd *cobra.Command) client.Store { + return cmd.Context().Value(storeContextKey).(client.Store) +} + // setTransactionContext sets the transaction for the current command context. func setTransactionContext(cmd *cobra.Command, cfg *config.Config, txId uint64) error { if txId == 0 { From 25f36ee2369ab598cb9d783272953c8e01791053 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 29 Sep 2023 13:47:18 -0700 Subject: [PATCH 097/107] fix getall command names --- cli/cli.go | 4 ++-- cli/p2p_collection_getall.go | 2 +- cli/p2p_replicator_getall.go | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/cli/cli.go b/cli/cli.go index 10b2a1145e..6a2cbe95b4 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -28,12 +28,12 @@ func NewDefraCommand(cfg *config.Config) *cobra.Command { p2p_collection.AddCommand( MakeP2PCollectionAddCommand(), MakeP2PCollectionRemoveCommand(), - MakeP2PCollectionGetallCommand(), + MakeP2PCollectionGetAllCommand(), ) p2p_replicator := MakeP2PReplicatorCommand() p2p_replicator.AddCommand( - MakeP2PReplicatorGetallCommand(), + MakeP2PReplicatorGetAllCommand(), MakeP2PReplicatorSetCommand(), MakeP2PReplicatorDeleteCommand(), ) diff --git a/cli/p2p_collection_getall.go b/cli/p2p_collection_getall.go index 2d4da7e530..c07a63f453 100644 --- a/cli/p2p_collection_getall.go +++ b/cli/p2p_collection_getall.go @@ -14,7 +14,7 @@ import ( "github.com/spf13/cobra" ) -func MakeP2PCollectionGetallCommand() *cobra.Command { +func MakeP2PCollectionGetAllCommand() *cobra.Command { var cmd = &cobra.Command{ Use: "getall", Short: "Get all P2P collections", diff --git a/cli/p2p_replicator_getall.go b/cli/p2p_replicator_getall.go index 52a81d4a57..527c8ab1ba 100644 --- a/cli/p2p_replicator_getall.go +++ b/cli/p2p_replicator_getall.go @@ -14,7 +14,7 @@ import ( "github.com/spf13/cobra" ) -func MakeP2PReplicatorGetallCommand() *cobra.Command { +func MakeP2PReplicatorGetAllCommand() *cobra.Command { var cmd = &cobra.Command{ Use: "getall", Short: "Get all replicators", From 8158750cb4ccb48d73a88d1ed0f132679a15de69 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 29 Sep 2023 13:49:56 -0700 Subject: [PATCH 098/107] fix license header year --- cli/p2p_replicator_delete.go | 2 +- cli/p2p_replicator_getall.go | 2 +- cli/p2p_replicator_set.go | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/cli/p2p_replicator_delete.go b/cli/p2p_replicator_delete.go index a89be2c788..7504d0c932 100644 --- a/cli/p2p_replicator_delete.go +++ b/cli/p2p_replicator_delete.go @@ -1,4 +1,4 @@ -// Copyright 2022 Democratized Data Foundation +// Copyright 2023 Democratized Data Foundation // // Use of this software is governed by the Business Source License // included in the file licenses/BSL.txt. diff --git a/cli/p2p_replicator_getall.go b/cli/p2p_replicator_getall.go index 527c8ab1ba..9192ed4d10 100644 --- a/cli/p2p_replicator_getall.go +++ b/cli/p2p_replicator_getall.go @@ -1,4 +1,4 @@ -// Copyright 2022 Democratized Data Foundation +// Copyright 2023 Democratized Data Foundation // // Use of this software is governed by the Business Source License // included in the file licenses/BSL.txt. diff --git a/cli/p2p_replicator_set.go b/cli/p2p_replicator_set.go index 63cc3a31f9..6b590b6ea7 100644 --- a/cli/p2p_replicator_set.go +++ b/cli/p2p_replicator_set.go @@ -1,4 +1,4 @@ -// Copyright 2022 Democratized Data Foundation +// Copyright 2023 Democratized Data Foundation // // Use of this software is governed by the Business Source License // included in the file licenses/BSL.txt. From dd34f90ec515d61d256fb959c7e9be28e45ddf15 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 29 Sep 2023 17:10:50 -0700 Subject: [PATCH 099/107] rename cli document commands to collection --- cli/cli.go | 18 ++-- cli/collection.go | 90 +++++++++---------- ...ocument_create.go => collection_create.go} | 22 ++--- ...ocument_delete.go => collection_delete.go} | 22 ++--- cli/collection_describe.go | 56 ++++++++++++ cli/{document_get.go => collection_get.go} | 25 ++---- cli/{document_keys.go => collection_keys.go} | 30 +++---- ...ocument_update.go => collection_update.go} | 24 ++--- cli/document.go | 25 ------ cli/utils.go | 5 ++ cli/wrapper.go | 10 +-- cli/wrapper_collection.go | 44 ++++----- 12 files changed, 185 insertions(+), 186 deletions(-) rename cli/{document_create.go => collection_create.go} (73%) rename cli/{document_delete.go => collection_delete.go} (72%) create mode 100644 cli/collection_describe.go rename cli/{document_get.go => collection_get.go} (61%) rename cli/{document_keys.go => collection_keys.go} (56%) rename cli/{document_update.go => collection_update.go} (76%) delete mode 100644 cli/document.go diff --git a/cli/cli.go b/cli/cli.go index 6a2cbe95b4..0cb9fbb5bc 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -82,26 +82,26 @@ func NewDefraCommand(cfg *config.Config) *cobra.Command { MakeTxDiscardCommand(cfg), ) - document := MakeDocumentCommand() - document.AddCommand( - MakeDocumentGetCommand(), - MakeDocumentKeysCommand(), - MakeDocumentDeleteCommand(), - MakeDocumentUpdateCommand(), - MakeDocumentCreateCommand(), + collection := MakeCollectionCommand(cfg) + collection.AddCommand( + MakeCollectionGetCommand(), + MakeCollectionKeysCommand(), + MakeCollectionDeleteCommand(), + MakeCollectionUpdateCommand(), + MakeCollectionCreateCommand(), + MakeCollectionDescribeCommand(), ) client := MakeClientCommand(cfg) client.AddCommand( MakeDumpCommand(), MakeRequestCommand(), - MakeCollectionCommand(), schema, index, p2p, backup, tx, - document, + collection, ) root := MakeRootCommand(cfg) diff --git a/cli/collection.go b/cli/collection.go index a3b4d5cbb5..e21c29283b 100644 --- a/cli/collection.go +++ b/cli/collection.go @@ -11,69 +11,67 @@ package cli import ( + "context" + "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/datastore" ) -func MakeCollectionCommand() *cobra.Command { +func MakeCollectionCommand(cfg *config.Config) *cobra.Command { + var txID uint64 var name string var schemaID string var versionID string var cmd = &cobra.Command{ Use: "collection [--name --schema --version ]", - Short: "View detailed collection info.", - Long: `View detailed collection info. - -Example: view all collections - defradb client collection - -Example: view collection by name - defradb client collection --name User - -Example: view collection by schema id - defradb client collection --schema bae123 - -Example: view collection by version id - defradb client collection --version bae123 - `, - RunE: func(cmd *cobra.Command, args []string) error { + Short: "Interact with a collection.", + Long: `Create, read, update, and delete documents within a collection.`, + PersistentPreRunE: func(cmd *cobra.Command, args []string) (err error) { + // cobra does not chain pre run calls so we have to run them again here + if err := loadConfig(cfg); err != nil { + return err + } + if err := setTransactionContext(cmd, cfg, txID); err != nil { + return err + } + if err := setStoreContext(cmd, cfg); err != nil { + return err + } store := mustGetStoreContext(cmd) + var col client.Collection switch { - case name != "": - col, err := store.GetCollectionByName(cmd.Context(), name) - if err != nil { - return err - } - return writeJSON(cmd, col.Description()) - case schemaID != "": - col, err := store.GetCollectionBySchemaID(cmd.Context(), schemaID) - if err != nil { - return err - } - return writeJSON(cmd, col.Description()) case versionID != "": - col, err := store.GetCollectionByVersionID(cmd.Context(), versionID) - if err != nil { - return err - } - return writeJSON(cmd, col.Description()) + col, err = store.GetCollectionByVersionID(cmd.Context(), versionID) + + case schemaID != "": + col, err = store.GetCollectionBySchemaID(cmd.Context(), schemaID) + + case name != "": + col, err = store.GetCollectionByName(cmd.Context(), name) + default: - cols, err := store.GetAllCollections(cmd.Context()) - if err != nil { - return err - } - colDesc := make([]client.CollectionDescription, len(cols)) - for i, col := range cols { - colDesc[i] = col.Description() - } - return writeJSON(cmd, colDesc) + return nil } + + if err != nil { + return err + } + if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { + col = col.WithTxn(tx) + } + + ctx := context.WithValue(cmd.Context(), colContextKey, col) + cmd.SetContext(ctx) + return nil }, } - cmd.Flags().StringVar(&name, "name", "", "Get collection by name") - cmd.Flags().StringVar(&schemaID, "schema", "", "Get collection by schema ID") - cmd.Flags().StringVar(&versionID, "version", "", "Get collection by version ID") + cmd.PersistentFlags().Uint64Var(&txID, "tx", 0, "Transaction ID") + cmd.PersistentFlags().StringVar(&name, "name", "", "Collection name") + cmd.PersistentFlags().StringVar(&schemaID, "schema", "", "Collection schema ID") + cmd.PersistentFlags().StringVar(&versionID, "version", "", "Collection version ID") return cmd } diff --git a/cli/document_create.go b/cli/collection_create.go similarity index 73% rename from cli/document_create.go rename to cli/collection_create.go index 39eb91d19f..5fc93e842c 100644 --- a/cli/document_create.go +++ b/cli/collection_create.go @@ -18,33 +18,26 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" ) -func MakeDocumentCreateCommand() *cobra.Command { - var collection string +func MakeCollectionCreateCommand() *cobra.Command { var file string var cmd = &cobra.Command{ - Use: "create --collection ", + Use: "create ", Short: "Create a new document.", Long: `Create a new document. Example: create document - defradb client document create --collection User '{ "name": "Bob" }' + defradb client collection create --name User '{ "name": "Bob" }' Example: create documents - defradb client document create --collection User '[{ "name": "Alice" }, { "name": "Bob" }]' + defradb client collection create --name User '[{ "name": "Alice" }, { "name": "Bob" }]' `, Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { - store := mustGetStoreContext(cmd) - - col, err := store.GetCollectionByName(cmd.Context(), collection) - if err != nil { - return err - } - if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { - col = col.WithTxn(tx) + col, ok := cmd.Context().Value(colContextKey).(client.Collection) + if !ok { + return cmd.Usage() } var docData []byte @@ -93,6 +86,5 @@ Example: create documents }, } cmd.Flags().StringVarP(&file, "file", "f", "", "File containing document(s)") - cmd.Flags().StringVarP(&collection, "collection", "c", "", "Collection name") return cmd } diff --git a/cli/document_delete.go b/cli/collection_delete.go similarity index 72% rename from cli/document_delete.go rename to cli/collection_delete.go index 51f4141737..9032d0c935 100644 --- a/cli/document_delete.go +++ b/cli/collection_delete.go @@ -16,33 +16,26 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" ) -func MakeDocumentDeleteCommand() *cobra.Command { - var collection string +func MakeCollectionDeleteCommand() *cobra.Command { var keys []string var filter string var cmd = &cobra.Command{ - Use: "delete --collection [--filter --key ]", + Use: "delete [--filter --key ]", Short: "Delete documents by key or filter.", Long: `Delete documents by key or filter and lists the number of documents deleted. Example: delete by key(s) - defradb client document delete --collection User --key bae-123,bae-456 + defradb client collection delete --name User --key bae-123,bae-456 Example: delete by filter - defradb client document delete --collection User --filter '{ "_gte": { "points": 100 } }' + defradb client collection delete --name User --filter '{ "_gte": { "points": 100 } }' `, RunE: func(cmd *cobra.Command, args []string) error { - store := mustGetStoreContext(cmd) - - col, err := store.GetCollectionByName(cmd.Context(), collection) - if err != nil { - return err - } - if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { - col = col.WithTxn(tx) + col, ok := cmd.Context().Value(colContextKey).(client.Collection) + if !ok { + return cmd.Usage() } switch { @@ -81,7 +74,6 @@ Example: delete by filter } }, } - cmd.Flags().StringVarP(&collection, "collection", "c", "", "Collection name") cmd.Flags().StringSliceVar(&keys, "key", nil, "Document key") cmd.Flags().StringVar(&filter, "filter", "", "Document filter") return cmd diff --git a/cli/collection_describe.go b/cli/collection_describe.go new file mode 100644 index 0000000000..e93c575326 --- /dev/null +++ b/cli/collection_describe.go @@ -0,0 +1,56 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "github.com/sourcenetwork/defradb/client" + "github.com/spf13/cobra" +) + +func MakeCollectionDescribeCommand() *cobra.Command { + var cmd = &cobra.Command{ + Use: "describe", + Short: "View collection description.", + Long: `Introspect collection types. + + Example: view all collections + defradb client collection describe + + Example: view collection by name + defradb client collection describe --name User + + Example: view collection by schema id + defradb client collection describe --schema bae123 + + Example: view collection by version id + defradb client collection describe --version bae123 + `, + RunE: func(cmd *cobra.Command, args []string) error { + store := mustGetStoreContext(cmd) + + col, ok := cmd.Context().Value(colContextKey).(client.Collection) + if ok { + return writeJSON(cmd, col.Description()) + } + // if no collection specified list all collections + cols, err := store.GetAllCollections(cmd.Context()) + if err != nil { + return err + } + colDesc := make([]client.CollectionDescription, len(cols)) + for i, col := range cols { + colDesc[i] = col.Description() + } + return writeJSON(cmd, colDesc) + }, + } + return cmd +} diff --git a/cli/document_get.go b/cli/collection_get.go similarity index 61% rename from cli/document_get.go rename to cli/collection_get.go index b1dde8c7d2..33a26e6c53 100644 --- a/cli/document_get.go +++ b/cli/collection_get.go @@ -14,31 +14,25 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" ) -func MakeDocumentGetCommand() *cobra.Command { +func MakeCollectionGetCommand() *cobra.Command { var showDeleted bool - var collection string var cmd = &cobra.Command{ - Use: "get --collection [--show-deleted]", - Short: "View detailed document info.", - Long: `View detailed document info. + Use: "get [--show-deleted]", + Short: "View document fields.", + Long: `View document fields. Example: - defradb client document get --collection User bae-123 + defradb client collection get --name User bae-123 `, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - store := mustGetStoreContext(cmd) - - col, err := store.GetCollectionByName(cmd.Context(), collection) - if err != nil { - return err - } - if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { - col = col.WithTxn(tx) + col, ok := cmd.Context().Value(colContextKey).(client.Collection) + if !ok { + return cmd.Usage() } + docKey, err := client.NewDocKeyFromString(args[0]) if err != nil { return err @@ -55,6 +49,5 @@ Example: }, } cmd.Flags().BoolVar(&showDeleted, "show-deleted", false, "Show deleted documents") - cmd.Flags().StringVarP(&collection, "collection", "c", "", "Collection name") return cmd } diff --git a/cli/document_keys.go b/cli/collection_keys.go similarity index 56% rename from cli/document_keys.go rename to cli/collection_keys.go index 942c0ea2c2..66656f12f4 100644 --- a/cli/document_keys.go +++ b/cli/collection_keys.go @@ -13,30 +13,25 @@ package cli import ( "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/http" ) -func MakeDocumentKeysCommand() *cobra.Command { - var collection string +func MakeCollectionKeysCommand() *cobra.Command { var cmd = &cobra.Command{ - Use: "keys --collection ", - Short: "List all collection document keys.", - Long: `List all collection document keys. + Use: "keys", + Short: "List all document keys.", + Long: `List all document keys. Example: - defradb client document keys --collection User keys + defradb client collection keys --name User `, RunE: func(cmd *cobra.Command, args []string) error { - store := mustGetStoreContext(cmd) - - col, err := store.GetCollectionByName(cmd.Context(), collection) - if err != nil { - return err - } - if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { - col = col.WithTxn(tx) + col, ok := cmd.Context().Value(colContextKey).(client.Collection) + if !ok { + return cmd.Usage() } + docCh, err := col.GetAllDocKeys(cmd.Context()) if err != nil { return err @@ -48,11 +43,12 @@ Example: if docKey.Err != nil { results.Error = docKey.Err.Error() } - writeJSON(cmd, results) //nolint:errcheck + if err := writeJSON(cmd, results); err != nil { + return err + } } return nil }, } - cmd.Flags().StringVarP(&collection, "collection", "c", "", "Collection name") return cmd } diff --git a/cli/document_update.go b/cli/collection_update.go similarity index 76% rename from cli/document_update.go rename to cli/collection_update.go index ab25dec5bc..55e88dbebf 100644 --- a/cli/document_update.go +++ b/cli/collection_update.go @@ -16,40 +16,33 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/datastore" ) -func MakeDocumentUpdateCommand() *cobra.Command { - var collection string +func MakeCollectionUpdateCommand() *cobra.Command { var keys []string var filter string var updater string var cmd = &cobra.Command{ - Use: "update --collection [--filter --key --updater ] ", + Use: "update [--filter --key --updater ] ", Short: "Update documents by key or filter.", Long: `Update documents by key or filter. Example: - defradb client document update --collection User --key bae-123 '{ "name": "Bob" }' + defradb client collection update --name User --key bae-123 '{ "name": "Bob" }' Example: update by filter - defradb client document update --collection User \ + defradb client collection update --name User \ --filter '{ "_gte": { "points": 100 } }' --updater '{ "verified": true }' Example: update by keys - defradb client document update --collection User \ + defradb client collection update --name User \ --key bae-123,bae-456 --updater '{ "verified": true }' `, Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { - store := mustGetStoreContext(cmd) - - col, err := store.GetCollectionByName(cmd.Context(), collection) - if err != nil { - return err - } - if tx, ok := cmd.Context().Value(txContextKey).(datastore.Txn); ok { - col = col.WithTxn(tx) + col, ok := cmd.Context().Value(colContextKey).(client.Collection) + if !ok { + return cmd.Usage() } switch { @@ -101,7 +94,6 @@ Example: update by keys } }, } - cmd.Flags().StringVarP(&collection, "collection", "c", "", "Collection name") cmd.Flags().StringSliceVar(&keys, "key", nil, "Document key") cmd.Flags().StringVar(&filter, "filter", "", "Document filter") cmd.Flags().StringVar(&updater, "updater", "", "Document updater") diff --git a/cli/document.go b/cli/document.go deleted file mode 100644 index ca0e966ccd..0000000000 --- a/cli/document.go +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package cli - -import ( - "github.com/spf13/cobra" -) - -func MakeDocumentCommand() *cobra.Command { - var cmd = &cobra.Command{ - Use: "document", - Short: "Create, read, update, and delete documents.", - Long: `Create, read, update, and delete documents.`, - } - - return cmd -} diff --git a/cli/utils.go b/cli/utils.go index 1a0e536b75..8266ab4fd0 100644 --- a/cli/utils.go +++ b/cli/utils.go @@ -36,6 +36,11 @@ var ( // If a transaction exists, all operations will be executed // in the current transaction context. storeContextKey = contextKey("store") + // colContextKey is the context key for the client.Collection + // + // If a transaction exists, all operations will be executed + // in the current transaction context. + colContextKey = contextKey("col") ) // mustGetStoreContext returns the store for the current command context. diff --git a/cli/wrapper.go b/cli/wrapper.go index 11091a3425..4a0676b006 100644 --- a/cli/wrapper.go +++ b/cli/wrapper.go @@ -192,7 +192,7 @@ func (w *Wrapper) LensRegistry() client.LensRegistry { } func (w *Wrapper) GetCollectionByName(ctx context.Context, name client.CollectionName) (client.Collection, error) { - args := []string{"client", "collection"} + args := []string{"client", "collection", "describe"} args = append(args, "--name", name) data, err := w.cmd.execute(ctx, args) @@ -207,7 +207,7 @@ func (w *Wrapper) GetCollectionByName(ctx context.Context, name client.Collectio } func (w *Wrapper) GetCollectionBySchemaID(ctx context.Context, schemaId string) (client.Collection, error) { - args := []string{"client", "collection"} + args := []string{"client", "collection", "describe"} args = append(args, "--schema", schemaId) data, err := w.cmd.execute(ctx, args) @@ -222,8 +222,8 @@ func (w *Wrapper) GetCollectionBySchemaID(ctx context.Context, schemaId string) } func (w *Wrapper) GetCollectionByVersionID(ctx context.Context, versionId string) (client.Collection, error) { - args := []string{"client", "collection"} - args = append(args, "--versionId", versionId) + args := []string{"client", "collection", "describe"} + args = append(args, "--version", versionId) data, err := w.cmd.execute(ctx, args) if err != nil { @@ -237,7 +237,7 @@ func (w *Wrapper) GetCollectionByVersionID(ctx context.Context, versionId string } func (w *Wrapper) GetAllCollections(ctx context.Context) ([]client.Collection, error) { - args := []string{"client", "collection"} + args := []string{"client", "collection", "describe"} data, err := w.cmd.execute(ctx, args) if err != nil { diff --git a/cli/wrapper_collection.go b/cli/wrapper_collection.go index c8e1839ada..f32c661501 100644 --- a/cli/wrapper_collection.go +++ b/cli/wrapper_collection.go @@ -51,8 +51,8 @@ func (c *Collection) SchemaID() string { } func (c *Collection) Create(ctx context.Context, doc *client.Document) error { - args := []string{"client", "document", "create"} - args = append(args, "--collection", c.desc.Name) + args := []string{"client", "collection", "create"} + args = append(args, "--name", c.desc.Name) // We must call this here, else the doc key on the given object will not match // that of the document saved in the database @@ -75,8 +75,8 @@ func (c *Collection) Create(ctx context.Context, doc *client.Document) error { } func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) error { - args := []string{"client", "document", "create"} - args = append(args, "--collection", c.desc.Name) + args := []string{"client", "collection", "create"} + args = append(args, "--name", c.desc.Name) docMapList := make([]map[string]any, len(docs)) for i, doc := range docs { @@ -109,8 +109,8 @@ func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) er } func (c *Collection) Update(ctx context.Context, doc *client.Document) error { - args := []string{"client", "document", "update"} - args = append(args, "--collection", c.desc.Name) + args := []string{"client", "collection", "update"} + args = append(args, "--name", c.desc.Name) args = append(args, "--key", doc.Key().String()) document, err := documentJSON(doc) @@ -187,8 +187,8 @@ func (c *Collection) UpdateWithFilter( filter any, updater string, ) (*client.UpdateResult, error) { - args := []string{"client", "document", "update"} - args = append(args, "--collection", c.desc.Name) + args := []string{"client", "collection", "update"} + args = append(args, "--name", c.desc.Name) args = append(args, "--updater", updater) filterJSON, err := json.Marshal(filter) @@ -205,8 +205,8 @@ func (c *Collection) UpdateWithKey( key client.DocKey, updater string, ) (*client.UpdateResult, error) { - args := []string{"client", "document", "update"} - args = append(args, "--collection", c.desc.Name) + args := []string{"client", "collection", "update"} + args = append(args, "--name", c.desc.Name) args = append(args, "--key", key.String()) args = append(args, "--updater", updater) @@ -218,8 +218,8 @@ func (c *Collection) UpdateWithKeys( docKeys []client.DocKey, updater string, ) (*client.UpdateResult, error) { - args := []string{"client", "document", "update"} - args = append(args, "--collection", c.desc.Name) + args := []string{"client", "collection", "update"} + args = append(args, "--name", c.desc.Name) args = append(args, "--updater", updater) keys := make([]string, len(docKeys)) @@ -260,8 +260,8 @@ func (c *Collection) deleteWith( } func (c *Collection) DeleteWithFilter(ctx context.Context, filter any) (*client.DeleteResult, error) { - args := []string{"client", "document", "delete"} - args = append(args, "--collection", c.desc.Name) + args := []string{"client", "collection", "delete"} + args = append(args, "--name", c.desc.Name) filterJSON, err := json.Marshal(filter) if err != nil { @@ -273,16 +273,16 @@ func (c *Collection) DeleteWithFilter(ctx context.Context, filter any) (*client. } func (c *Collection) DeleteWithKey(ctx context.Context, docKey client.DocKey) (*client.DeleteResult, error) { - args := []string{"client", "document", "delete"} - args = append(args, "--collection", c.desc.Name) + args := []string{"client", "collection", "delete"} + args = append(args, "--name", c.desc.Name) args = append(args, "--key", docKey.String()) return c.deleteWith(ctx, args) } func (c *Collection) DeleteWithKeys(ctx context.Context, docKeys []client.DocKey) (*client.DeleteResult, error) { - args := []string{"client", "document", "delete"} - args = append(args, "--collection", c.desc.Name) + args := []string{"client", "collection", "delete"} + args = append(args, "--name", c.desc.Name) keys := make([]string, len(docKeys)) for i, v := range docKeys { @@ -294,8 +294,8 @@ func (c *Collection) DeleteWithKeys(ctx context.Context, docKeys []client.DocKey } func (c *Collection) Get(ctx context.Context, key client.DocKey, showDeleted bool) (*client.Document, error) { - args := []string{"client", "document", "get"} - args = append(args, "--collection", c.desc.Name) + args := []string{"client", "collection", "get"} + args = append(args, "--name", c.desc.Name) args = append(args, key.String()) if showDeleted { @@ -321,8 +321,8 @@ func (c *Collection) WithTxn(tx datastore.Txn) client.Collection { } func (c *Collection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysResult, error) { - args := []string{"client", "document", "keys"} - args = append(args, "--collection", c.desc.Name) + args := []string{"client", "collection", "keys"} + args = append(args, "--name", c.desc.Name) stdOut, _, err := c.cmd.executeStream(ctx, args) if err != nil { From 789476f71bd5a344de5f9da99bf396ce2d195787 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 29 Sep 2023 17:24:44 -0700 Subject: [PATCH 100/107] cleanup cli errors --- cli/collection_create.go | 7 +- cli/collection_delete.go | 4 +- cli/collection_update.go | 4 +- cli/errors.go | 131 +++--------------------------------- cli/schema_migration_set.go | 6 +- 5 files changed, 16 insertions(+), 136 deletions(-) diff --git a/cli/collection_create.go b/cli/collection_create.go index 5fc93e842c..39e16f4407 100644 --- a/cli/collection_create.go +++ b/cli/collection_create.go @@ -12,7 +12,6 @@ package cli import ( "encoding/json" - "fmt" "os" "github.com/spf13/cobra" @@ -51,7 +50,7 @@ Example: create documents } docData = data default: - return fmt.Errorf("document or file must be defined") + return ErrNoDocOrFile } var docMap any @@ -71,7 +70,7 @@ Example: create documents for i, v := range t { docMap, ok := v.(map[string]any) if !ok { - return fmt.Errorf("invalid document") + return ErrInvalidDocument } doc, err := client.NewDocFromMap(docMap) if err != nil { @@ -81,7 +80,7 @@ Example: create documents } return col.CreateMany(cmd.Context(), docs) default: - return fmt.Errorf("invalid document") + return ErrInvalidDocument } }, } diff --git a/cli/collection_delete.go b/cli/collection_delete.go index 9032d0c935..0c5bafcb65 100644 --- a/cli/collection_delete.go +++ b/cli/collection_delete.go @@ -11,8 +11,6 @@ package cli import ( - "fmt" - "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" @@ -70,7 +68,7 @@ Example: delete by filter } return writeJSON(cmd, res) default: - return fmt.Errorf("document key or filter must be defined") + return ErrNoDocKeyOrFilter } }, } diff --git a/cli/collection_update.go b/cli/collection_update.go index 55e88dbebf..6ad4626852 100644 --- a/cli/collection_update.go +++ b/cli/collection_update.go @@ -11,8 +11,6 @@ package cli import ( - "fmt" - "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" @@ -90,7 +88,7 @@ Example: update by keys } return col.Update(cmd.Context(), doc) default: - return fmt.Errorf("document key or filter must be defined") + return ErrNoDocKeyOrFilter } }, } diff --git a/cli/errors.go b/cli/errors.go index 17e4819a8b..a7d6cbd26b 100644 --- a/cli/errors.go +++ b/cli/errors.go @@ -11,133 +11,20 @@ package cli import ( - "strings" - "github.com/sourcenetwork/defradb/errors" ) -const ( - errMissingArg string = "missing argument" - errMissingArgs string = "missing arguments" - errTooManyArgs string = "too many arguments" - errEmptyStdin string = "empty stdin" - errEmptyFile string = "empty file" - errFailedToReadFile string = "failed to read file" - errFailedToReadStdin string = "failed to read stdin" - errFailedToCreateRPCClient string = "failed to create RPC client" - errFailedToAddReplicator string = "failed to add replicator, request failed" - errFailedToJoinEndpoint string = "failed to join endpoint" - errFailedToSendRequest string = "failed to send request" - errFailedToReadResponseBody string = "failed to read response body" - errFailedToCloseResponseBody string = "failed to close response body" - errFailedToStatStdOut string = "failed to stat stdout" - errFailedToHandleGQLErrors string = "failed to handle GraphQL errors" - errFailedToPrettyPrintResponse string = "failed to pretty print response" - errFailedToUnmarshalResponse string = "failed to unmarshal response" - errFailedParsePeerID string = "failed to parse PeerID" - errFailedToMarshalData string = "failed to marshal data" - errInvalidArgumentLength string = "invalid argument length" -) +const errInvalidLensConfig = "invalid lens configuration" -// Errors returnable from this package. -// -// This list is incomplete and undefined errors may also be returned. -// Errors returned from this package may be tested against these errors with errors.Is. var ( - ErrMissingArg = errors.New(errMissingArg) - ErrMissingArgs = errors.New(errMissingArgs) - ErrTooManyArgs = errors.New(errTooManyArgs) - ErrEmptyFile = errors.New(errEmptyFile) - ErrEmptyStdin = errors.New(errEmptyStdin) - ErrFailedToReadFile = errors.New(errFailedToReadFile) - ErrFailedToReadStdin = errors.New(errFailedToReadStdin) - ErrFailedToCreateRPCClient = errors.New(errFailedToCreateRPCClient) - ErrFailedToAddReplicator = errors.New(errFailedToAddReplicator) - ErrFailedToJoinEndpoint = errors.New(errFailedToJoinEndpoint) - ErrFailedToSendRequest = errors.New(errFailedToSendRequest) - ErrFailedToReadResponseBody = errors.New(errFailedToReadResponseBody) - ErrFailedToStatStdOut = errors.New(errFailedToStatStdOut) - ErrFailedToHandleGQLErrors = errors.New(errFailedToHandleGQLErrors) - ErrFailedToPrettyPrintResponse = errors.New(errFailedToPrettyPrintResponse) - ErrFailedToUnmarshalResponse = errors.New(errFailedToUnmarshalResponse) - ErrFailedParsePeerID = errors.New(errFailedParsePeerID) - ErrInvalidExportFormat = errors.New("invalid export format") - ErrInvalidArgumentLength = errors.New(errInvalidArgumentLength) + ErrNoDocOrFile = errors.New("document or file must be defined") + ErrInvalidDocument = errors.New("invalid document") + ErrNoDocKeyOrFilter = errors.New("document key or filter must be defined") + ErrInvalidExportFormat = errors.New("invalid export format") + ErrNoLensConfig = errors.New("lens config cannot be empty") + ErrInvalidLensConfig = errors.New("invalid lens configuration") ) -func NewErrMissingArg(name string) error { - return errors.New(errMissingArg, errors.NewKV("Name", name)) -} - -func NewErrMissingArgs(names []string) error { - return errors.New(errMissingArgs, errors.NewKV("Required", strings.Join(names, ", "))) -} - -func NewErrTooManyArgs(max, actual int) error { - return errors.New(errTooManyArgs, errors.NewKV("Max", max), errors.NewKV("Actual", actual)) -} - -func NewFailedToReadFile(inner error) error { - return errors.Wrap(errFailedToReadFile, inner) -} - -func NewFailedToReadStdin(inner error) error { - return errors.Wrap(errFailedToReadStdin, inner) -} - -func NewErrFailedToCreateRPCClient(inner error) error { - return errors.Wrap(errFailedToCreateRPCClient, inner) -} - -func NewErrFailedToAddReplicator(inner error) error { - return errors.Wrap(errFailedToAddReplicator, inner) -} - -func NewErrFailedToJoinEndpoint(inner error) error { - return errors.Wrap(errFailedToJoinEndpoint, inner) -} - -func NewErrFailedToSendRequest(inner error) error { - return errors.Wrap(errFailedToSendRequest, inner) -} - -func NewErrFailedToReadResponseBody(inner error) error { - return errors.Wrap(errFailedToReadResponseBody, inner) -} - -func NewErrFailedToCloseResponseBody(closeErr, other error) error { - if other != nil { - return errors.Wrap(errFailedToCloseResponseBody, closeErr, errors.NewKV("Other error", other)) - } - return errors.Wrap(errFailedToCloseResponseBody, closeErr) -} - -func NewErrFailedToStatStdOut(inner error) error { - return errors.Wrap(errFailedToStatStdOut, inner) -} - -func NewErrFailedToHandleGQLErrors(inner error) error { - return errors.Wrap(errFailedToHandleGQLErrors, inner) -} - -func NewErrFailedToPrettyPrintResponse(inner error) error { - return errors.Wrap(errFailedToPrettyPrintResponse, inner) -} - -func NewErrFailedToUnmarshalResponse(inner error) error { - return errors.Wrap(errFailedToUnmarshalResponse, inner) -} - -func NewErrFailedParsePeerID(inner error) error { - return errors.Wrap(errFailedParsePeerID, inner) -} - -// NewFailedToMarshalData returns an error indicating that a there was a problem with mashalling. -func NewFailedToMarshalData(inner error) error { - return errors.Wrap(errFailedToMarshalData, inner) -} - -// NewErrInvalidArgumentLength returns an error indicating an incorrect number of arguments. -func NewErrInvalidArgumentLength(inner error, expected int) error { - return errors.Wrap(errInvalidArgumentLength, inner, errors.NewKV("Expected", expected)) +func NewErrInvalidLensConfig(inner error) error { + return errors.Wrap(errInvalidLensConfig, inner) } diff --git a/cli/schema_migration_set.go b/cli/schema_migration_set.go index 90b4289313..280130b8db 100644 --- a/cli/schema_migration_set.go +++ b/cli/schema_migration_set.go @@ -12,7 +12,6 @@ package cli import ( "encoding/json" - "fmt" "io" "os" "strings" @@ -21,7 +20,6 @@ import ( "github.com/spf13/cobra" "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/errors" ) func MakeSchemaMigrationSetCommand() *cobra.Command { @@ -62,7 +60,7 @@ Learn more about the DefraDB GraphQL Schema Language on https://docs.source.netw case len(args) == 3: lensCfgJson = args[2] default: - return fmt.Errorf("lens config cannot be empty") + return ErrNoLensConfig } srcSchemaVersionID := args[0] @@ -73,7 +71,7 @@ Learn more about the DefraDB GraphQL Schema Language on https://docs.source.netw var lensCfg model.Lens if err := decoder.Decode(&lensCfg); err != nil { - return errors.Wrap("invalid lens configuration", err) + return NewErrInvalidLensConfig(err) } migrationCfg := client.LensConfig{ From d8b582711a1e9736153d10661aed8132bfd6c5ab Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 29 Sep 2023 17:39:22 -0700 Subject: [PATCH 101/107] update cli schema migration up and down documentation --- cli/schema_migration_down.go | 13 +++++++++---- cli/schema_migration_up.go | 13 +++++++++---- 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/cli/schema_migration_down.go b/cli/schema_migration_down.go index 186db11f80..d857b576a3 100644 --- a/cli/schema_migration_down.go +++ b/cli/schema_migration_down.go @@ -22,10 +22,15 @@ import ( func MakeSchemaMigrationDownCommand() *cobra.Command { var schemaVersionID string var cmd = &cobra.Command{ - Use: "down --version ", - Short: "Reverse a migration on the specified schema version.", - Long: `Reverse a migration on the specified schema version.`, - Args: cobra.ExactArgs(1), + Use: "down --version ", + Short: "Reverses the migration from the specified schema version.", + Long: `Reverses the migration from the specified schema version. +Documents is a list of documents to reverse the migration from. + +Example: + defradb client schema migration down --version bae123 '[{"name": "Bob"}]' + `, + Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { store := mustGetStoreContext(cmd) diff --git a/cli/schema_migration_up.go b/cli/schema_migration_up.go index b272694f43..146ba7f67b 100644 --- a/cli/schema_migration_up.go +++ b/cli/schema_migration_up.go @@ -22,10 +22,15 @@ import ( func MakeSchemaMigrationUpCommand() *cobra.Command { var schemaVersionID string var cmd = &cobra.Command{ - Use: "up --version ", - Short: "Runs a migration on the specified schema version.", - Long: `Runs a migration on the specified schema version.`, - Args: cobra.ExactArgs(1), + Use: "up --version ", + Short: "Applies the migration to the specified schema version.", + Long: `Applies the migration to the specified schema version. +Documents is a list of documents to apply the migration to. + +Example: + defradb client schema migration down --version bae123 '[{"name": "Bob"}]' + `, + Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { store := mustGetStoreContext(cmd) From 3255d05b76dd6aa34d7711498f9225d5f3f79763 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 29 Sep 2023 18:00:23 -0700 Subject: [PATCH 102/107] move client wrappers to tests/clients package --- cli/collection_describe.go | 3 ++- cli/utils.go | 19 -------------- client/document.go | 20 ++++++++++++++ http/client_collection.go | 4 +-- http/utils.go | 19 -------------- {cli => tests/clients/cli}/wrapper.go | 3 ++- {cli => tests/clients/cli}/wrapper_cli.go | 3 ++- .../clients/cli}/wrapper_collection.go | 2 +- {cli => tests/clients/cli}/wrapper_lens.go | 0 {cli => tests/clients/cli}/wrapper_tx.go | 0 {http => tests/clients/http}/wrapper.go | 26 +++++++++---------- {http => tests/clients/http}/wrapper_tx.go | 0 tests/integration/utils2.go | 4 +-- 13 files changed, 44 insertions(+), 59 deletions(-) rename {cli => tests/clients/cli}/wrapper.go (99%) rename {cli => tests/clients/cli}/wrapper_cli.go (95%) rename {cli => tests/clients/cli}/wrapper_collection.go (99%) rename {cli => tests/clients/cli}/wrapper_lens.go (100%) rename {cli => tests/clients/cli}/wrapper_tx.go (100%) rename {http => tests/clients/http}/wrapper.go (90%) rename {http => tests/clients/http}/wrapper_tx.go (100%) diff --git a/cli/collection_describe.go b/cli/collection_describe.go index e93c575326..3cdd9788f2 100644 --- a/cli/collection_describe.go +++ b/cli/collection_describe.go @@ -11,8 +11,9 @@ package cli import ( - "github.com/sourcenetwork/defradb/client" "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/client" ) func MakeCollectionDescribeCommand() *cobra.Command { diff --git a/cli/utils.go b/cli/utils.go index 8266ab4fd0..df24f9e5a8 100644 --- a/cli/utils.go +++ b/cli/utils.go @@ -103,22 +103,3 @@ func writeJSON(cmd *cobra.Command, out any) error { enc.SetIndent("", " ") return enc.Encode(out) } - -func documentJSON(doc *client.Document) ([]byte, error) { - docMap, err := doc.ToMap() - if err != nil { - return nil, err - } - delete(docMap, "_key") - - for field, value := range doc.Values() { - if !value.IsDirty() { - delete(docMap, field.Name()) - } - if value.IsDelete() { - docMap[field.Name()] = nil - } - } - - return json.Marshal(docMap) -} diff --git a/client/document.go b/client/document.go index c48ccfce88..bcb8ae6070 100644 --- a/client/document.go +++ b/client/document.go @@ -398,6 +398,26 @@ func (doc *Document) ToMap() (map[string]any, error) { return doc.toMapWithKey() } +// ToJSONPatch returns a json patch that can be used to update +// a document by calling SetWithJSON. +func (doc *Document) ToJSONPatch() ([]byte, error) { + docMap, err := doc.toMap() + if err != nil { + return nil, err + } + + for field, value := range doc.Values() { + if !value.IsDirty() { + delete(docMap, field.Name()) + } + if value.IsDelete() { + docMap[field.Name()] = nil + } + } + + return json.Marshal(docMap) +} + // Clean cleans the document by removing all dirty fields. func (doc *Document) Clean() { for _, v := range doc.Fields() { diff --git a/http/client_collection.go b/http/client_collection.go index 835a223dbf..9641157d1b 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -93,7 +93,7 @@ func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) er return err } - docMap, err := documentJSON(doc) + docMap, err := doc.ToJSONPatch() if err != nil { return err } @@ -120,7 +120,7 @@ func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) er func (c *Collection) Update(ctx context.Context, doc *client.Document) error { methodURL := c.http.baseURL.JoinPath("collections", c.desc.Name, doc.Key().String()) - body, err := documentJSON(doc) + body, err := doc.ToJSONPatch() if err != nil { return err } diff --git a/http/utils.go b/http/utils.go index a171e0ed38..c7b1507c4e 100644 --- a/http/utils.go +++ b/http/utils.go @@ -34,25 +34,6 @@ func responseJSON(rw http.ResponseWriter, status int, out any) { json.NewEncoder(rw).Encode(out) //nolint:errcheck } -func documentJSON(doc *client.Document) ([]byte, error) { - docMap, err := doc.ToMap() - if err != nil { - return nil, err - } - delete(docMap, "_key") - - for field, value := range doc.Values() { - if !value.IsDirty() { - delete(docMap, field.Name()) - } - if value.IsDelete() { - docMap[field.Name()] = nil - } - } - - return json.Marshal(docMap) -} - func parseError(msg any) error { switch msg { case client.ErrDocumentNotFound.Error(): diff --git a/cli/wrapper.go b/tests/clients/cli/wrapper.go similarity index 99% rename from cli/wrapper.go rename to tests/clients/cli/wrapper.go index 4a0676b006..f167b882d8 100644 --- a/cli/wrapper.go +++ b/tests/clients/cli/wrapper.go @@ -22,6 +22,7 @@ import ( blockstore "github.com/ipfs/boxo/blockstore" "github.com/libp2p/go-libp2p/core/peer" + "github.com/sourcenetwork/defradb/cli" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/events" @@ -285,7 +286,7 @@ func (w *Wrapper) ExecRequest(ctx context.Context, query string) *client.Request result.GQL.Errors = []error{err} return result } - if header == SUB_RESULTS_HEADER { + if header == cli.SUB_RESULTS_HEADER { result.Pub = w.execRequestSubscription(ctx, buffer) return result } diff --git a/cli/wrapper_cli.go b/tests/clients/cli/wrapper_cli.go similarity index 95% rename from cli/wrapper_cli.go rename to tests/clients/cli/wrapper_cli.go index 7f204a48e3..1f73b20e25 100644 --- a/cli/wrapper_cli.go +++ b/tests/clients/cli/wrapper_cli.go @@ -16,6 +16,7 @@ import ( "io" "strings" + "github.com/sourcenetwork/defradb/cli" "github.com/sourcenetwork/defradb/config" "github.com/sourcenetwork/defradb/datastore" ) @@ -66,7 +67,7 @@ func (w *cliWrapper) executeStream(ctx context.Context, args []string) (io.ReadC } args = append(args, "--url", w.address) - cmd := NewDefraCommand(config.DefaultConfig()) + cmd := cli.NewDefraCommand(config.DefaultConfig()) cmd.SetOut(stdOutWrite) cmd.SetErr(stdErrWrite) cmd.SetArgs(args) diff --git a/cli/wrapper_collection.go b/tests/clients/cli/wrapper_collection.go similarity index 99% rename from cli/wrapper_collection.go rename to tests/clients/cli/wrapper_collection.go index f32c661501..3500bdce7c 100644 --- a/cli/wrapper_collection.go +++ b/tests/clients/cli/wrapper_collection.go @@ -113,7 +113,7 @@ func (c *Collection) Update(ctx context.Context, doc *client.Document) error { args = append(args, "--name", c.desc.Name) args = append(args, "--key", doc.Key().String()) - document, err := documentJSON(doc) + document, err := doc.ToJSONPatch() if err != nil { return err } diff --git a/cli/wrapper_lens.go b/tests/clients/cli/wrapper_lens.go similarity index 100% rename from cli/wrapper_lens.go rename to tests/clients/cli/wrapper_lens.go diff --git a/cli/wrapper_tx.go b/tests/clients/cli/wrapper_tx.go similarity index 100% rename from cli/wrapper_tx.go rename to tests/clients/cli/wrapper_tx.go diff --git a/http/wrapper.go b/tests/clients/http/wrapper.go similarity index 90% rename from http/wrapper.go rename to tests/clients/http/wrapper.go index 67dc70136a..10b34129d8 100644 --- a/http/wrapper.go +++ b/tests/clients/http/wrapper.go @@ -12,7 +12,6 @@ package http import ( "context" - "fmt" "net/http/httptest" blockstore "github.com/ipfs/boxo/blockstore" @@ -20,6 +19,7 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/events" + "github.com/sourcenetwork/defradb/http" ) var _ client.DB = (*Wrapper)(nil) @@ -28,16 +28,16 @@ var _ client.DB = (*Wrapper)(nil) // single struct that implements the client.DB interface. type Wrapper struct { db client.DB - handler *Handler - client *Client + handler *http.Handler + client *http.Client httpServer *httptest.Server } func NewWrapper(db client.DB) (*Wrapper, error) { - handler := NewHandler(db, ServerOptions{}) + handler := http.NewHandler(db, http.ServerOptions{}) httpServer := httptest.NewServer(handler) - client, err := NewClient(httpServer.URL) + client, err := http.NewClient(httpServer.URL) if err != nil { return nil, err } @@ -131,11 +131,11 @@ func (w *Wrapper) NewTxn(ctx context.Context, readOnly bool) (datastore.Txn, err if err != nil { return nil, err } - server, ok := w.handler.txs.Load(client.ID()) - if !ok { - return nil, fmt.Errorf("failed to get server transaction") + server, err := w.handler.Transaction(client.ID()) + if err != nil { + return nil, err } - return &TxWrapper{server.(datastore.Txn), client}, nil + return &TxWrapper{server, client}, nil } func (w *Wrapper) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastore.Txn, error) { @@ -143,11 +143,11 @@ func (w *Wrapper) NewConcurrentTxn(ctx context.Context, readOnly bool) (datastor if err != nil { return nil, err } - server, ok := w.handler.txs.Load(client.ID()) - if !ok { - return nil, fmt.Errorf("failed to get server transaction") + server, err := w.handler.Transaction(client.ID()) + if err != nil { + return nil, err } - return &TxWrapper{server.(datastore.Txn), client}, nil + return &TxWrapper{server, client}, nil } func (w *Wrapper) WithTxn(tx datastore.Txn) client.Store { diff --git a/http/wrapper_tx.go b/tests/clients/http/wrapper_tx.go similarity index 100% rename from http/wrapper_tx.go rename to tests/clients/http/wrapper_tx.go diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 6c047ff6c1..420e7f4c9c 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -26,17 +26,17 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/sourcenetwork/defradb/cli" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/datastore" badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" "github.com/sourcenetwork/defradb/datastore/memory" "github.com/sourcenetwork/defradb/db" "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/http" "github.com/sourcenetwork/defradb/logging" "github.com/sourcenetwork/defradb/net" changeDetector "github.com/sourcenetwork/defradb/tests/change_detector" + "github.com/sourcenetwork/defradb/tests/clients/cli" + "github.com/sourcenetwork/defradb/tests/clients/http" ) const ( From 0ad7eb3e2977277ae51a8f5914b4c8f245c15f97 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 2 Oct 2023 09:06:08 -0700 Subject: [PATCH 103/107] update run-tests-and-upload-coverage workflow triggers --- .github/workflows/run-tests-and-upload-coverage.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/run-tests-and-upload-coverage.yml b/.github/workflows/run-tests-and-upload-coverage.yml index deebcaa1cd..f1f8724ced 100644 --- a/.github/workflows/run-tests-and-upload-coverage.yml +++ b/.github/workflows/run-tests-and-upload-coverage.yml @@ -11,13 +11,13 @@ name: Run Tests And Upload Coverage Workflow on: - pull_request: + push: + tags: + - 'v[0-9]+.[0-9]+.[0-9]+' branches: - master - develop - push: - jobs: run-tests: name: Run tests matrix job From 0b3514ec9e3e24fa20bd7cdc0db5b712414dd92a Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 2 Oct 2023 09:16:21 -0700 Subject: [PATCH 104/107] add cli collection context helper --- cli/collection_create.go | 2 +- cli/collection_delete.go | 2 +- cli/collection_describe.go | 2 +- cli/collection_get.go | 2 +- cli/collection_keys.go | 3 +-- cli/collection_update.go | 2 +- cli/utils.go | 7 +++++++ 7 files changed, 13 insertions(+), 7 deletions(-) diff --git a/cli/collection_create.go b/cli/collection_create.go index 39e16f4407..3755b3ecb9 100644 --- a/cli/collection_create.go +++ b/cli/collection_create.go @@ -34,7 +34,7 @@ Example: create documents `, Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { - col, ok := cmd.Context().Value(colContextKey).(client.Collection) + col, ok := tryGetCollectionContext(cmd) if !ok { return cmd.Usage() } diff --git a/cli/collection_delete.go b/cli/collection_delete.go index 0c5bafcb65..85539d5eb3 100644 --- a/cli/collection_delete.go +++ b/cli/collection_delete.go @@ -31,7 +31,7 @@ Example: delete by filter defradb client collection delete --name User --filter '{ "_gte": { "points": 100 } }' `, RunE: func(cmd *cobra.Command, args []string) error { - col, ok := cmd.Context().Value(colContextKey).(client.Collection) + col, ok := tryGetCollectionContext(cmd) if !ok { return cmd.Usage() } diff --git a/cli/collection_describe.go b/cli/collection_describe.go index 3cdd9788f2..e0f878d93e 100644 --- a/cli/collection_describe.go +++ b/cli/collection_describe.go @@ -37,7 +37,7 @@ func MakeCollectionDescribeCommand() *cobra.Command { RunE: func(cmd *cobra.Command, args []string) error { store := mustGetStoreContext(cmd) - col, ok := cmd.Context().Value(colContextKey).(client.Collection) + col, ok := tryGetCollectionContext(cmd) if ok { return writeJSON(cmd, col.Description()) } diff --git a/cli/collection_get.go b/cli/collection_get.go index 33a26e6c53..d908bbdb7a 100644 --- a/cli/collection_get.go +++ b/cli/collection_get.go @@ -28,7 +28,7 @@ Example: `, Args: cobra.ExactArgs(1), RunE: func(cmd *cobra.Command, args []string) error { - col, ok := cmd.Context().Value(colContextKey).(client.Collection) + col, ok := tryGetCollectionContext(cmd) if !ok { return cmd.Usage() } diff --git a/cli/collection_keys.go b/cli/collection_keys.go index 66656f12f4..a453c16a86 100644 --- a/cli/collection_keys.go +++ b/cli/collection_keys.go @@ -13,7 +13,6 @@ package cli import ( "github.com/spf13/cobra" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/http" ) @@ -27,7 +26,7 @@ Example: defradb client collection keys --name User `, RunE: func(cmd *cobra.Command, args []string) error { - col, ok := cmd.Context().Value(colContextKey).(client.Collection) + col, ok := tryGetCollectionContext(cmd) if !ok { return cmd.Usage() } diff --git a/cli/collection_update.go b/cli/collection_update.go index 6ad4626852..557bd78434 100644 --- a/cli/collection_update.go +++ b/cli/collection_update.go @@ -38,7 +38,7 @@ Example: update by keys `, Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { - col, ok := cmd.Context().Value(colContextKey).(client.Collection) + col, ok := tryGetCollectionContext(cmd) if !ok { return cmd.Usage() } diff --git a/cli/utils.go b/cli/utils.go index df24f9e5a8..b9e4d1a710 100644 --- a/cli/utils.go +++ b/cli/utils.go @@ -50,6 +50,13 @@ func mustGetStoreContext(cmd *cobra.Command) client.Store { return cmd.Context().Value(storeContextKey).(client.Store) } +// tryGetCollectionContext returns the collection for the current command context +// and a boolean indicating if the collection was set. +func tryGetCollectionContext(cmd *cobra.Command) (client.Collection, bool) { + col, ok := cmd.Context().Value(colContextKey).(client.Collection) + return col, ok +} + // setTransactionContext sets the transaction for the current command context. func setTransactionContext(cmd *cobra.Command, cfg *config.Config, txId uint64) error { if txId == 0 { From 70b2420cef6ab8a1bb4de171d4162dccde6844cc Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 2 Oct 2023 09:57:49 -0700 Subject: [PATCH 105/107] fixes for cli lens migrate up and down --- cli/schema_migration_down.go | 30 +++++++++++++++++++++++++++--- cli/schema_migration_up.go | 30 +++++++++++++++++++++++++++--- http/handler_lens.go | 20 ++++++++++++++++++-- 3 files changed, 72 insertions(+), 8 deletions(-) diff --git a/cli/schema_migration_down.go b/cli/schema_migration_down.go index d857b576a3..6755a70373 100644 --- a/cli/schema_migration_down.go +++ b/cli/schema_migration_down.go @@ -12,6 +12,7 @@ package cli import ( "encoding/json" + "os" "github.com/sourcenetwork/immutable/enumerable" "github.com/spf13/cobra" @@ -20,6 +21,7 @@ import ( ) func MakeSchemaMigrationDownCommand() *cobra.Command { + var file string var schemaVersionID string var cmd = &cobra.Command{ Use: "down --version ", @@ -30,12 +32,26 @@ Documents is a list of documents to reverse the migration from. Example: defradb client schema migration down --version bae123 '[{"name": "Bob"}]' `, - Args: cobra.ExactArgs(1), + Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { store := mustGetStoreContext(cmd) + var srcData []byte + switch { + case file != "": + data, err := os.ReadFile(file) + if err != nil { + return err + } + srcData = data + case len(args) == 1: + srcData = []byte(args[0]) + default: + return ErrNoDocOrFile + } + var src []map[string]any - if err := json.Unmarshal([]byte(args[0]), &src); err != nil { + if err := json.Unmarshal(srcData, &src); err != nil { return err } lens := store.LensRegistry() @@ -46,9 +62,17 @@ Example: if err != nil { return err } - return writeJSON(cmd, out) + var value []map[string]any + err = enumerable.ForEach(out, func(item map[string]any) { + value = append(value, item) + }) + if err != nil { + return err + } + return writeJSON(cmd, value) }, } + cmd.Flags().StringVarP(&file, "file", "f", "", "File containing document(s)") cmd.Flags().StringVar(&schemaVersionID, "version", "", "Schema version id") return cmd } diff --git a/cli/schema_migration_up.go b/cli/schema_migration_up.go index 146ba7f67b..d244721664 100644 --- a/cli/schema_migration_up.go +++ b/cli/schema_migration_up.go @@ -12,6 +12,7 @@ package cli import ( "encoding/json" + "os" "github.com/sourcenetwork/immutable/enumerable" "github.com/spf13/cobra" @@ -20,6 +21,7 @@ import ( ) func MakeSchemaMigrationUpCommand() *cobra.Command { + var file string var schemaVersionID string var cmd = &cobra.Command{ Use: "up --version ", @@ -30,12 +32,26 @@ Documents is a list of documents to apply the migration to. Example: defradb client schema migration down --version bae123 '[{"name": "Bob"}]' `, - Args: cobra.ExactArgs(1), + Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { store := mustGetStoreContext(cmd) + var srcData []byte + switch { + case file != "": + data, err := os.ReadFile(file) + if err != nil { + return err + } + srcData = data + case len(args) == 1: + srcData = []byte(args[0]) + default: + return ErrNoDocOrFile + } + var src []map[string]any - if err := json.Unmarshal([]byte(args[0]), &src); err != nil { + if err := json.Unmarshal(srcData, &src); err != nil { return err } lens := store.LensRegistry() @@ -46,9 +62,17 @@ Example: if err != nil { return err } - return writeJSON(cmd, out) + var value []map[string]any + err = enumerable.ForEach(out, func(item map[string]any) { + value = append(value, item) + }) + if err != nil { + return err + } + return writeJSON(cmd, value) }, } + cmd.Flags().StringVarP(&file, "file", "f", "", "File containing document(s)") cmd.Flags().StringVar(&schemaVersionID, "version", "", "Schema version id") return cmd } diff --git a/http/handler_lens.go b/http/handler_lens.go index ccf8dd01a8..d5ddb704c8 100644 --- a/http/handler_lens.go +++ b/http/handler_lens.go @@ -61,7 +61,15 @@ func (s *lensHandler) MigrateUp(rw http.ResponseWriter, req *http.Request) { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - responseJSON(rw, http.StatusOK, result) + var value []map[string]any + err = enumerable.ForEach(result, func(item map[string]any) { + value = append(value, item) + }) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, value) } func (s *lensHandler) MigrateDown(rw http.ResponseWriter, req *http.Request) { @@ -77,7 +85,15 @@ func (s *lensHandler) MigrateDown(rw http.ResponseWriter, req *http.Request) { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - responseJSON(rw, http.StatusOK, result) + var value []map[string]any + err = enumerable.ForEach(result, func(item map[string]any) { + value = append(value, item) + }) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + responseJSON(rw, http.StatusOK, value) } func (s *lensHandler) Config(rw http.ResponseWriter, req *http.Request) { From 1e654622a85abd7c5ca661c2eb9cc311bfe1633c Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 2 Oct 2023 10:11:58 -0700 Subject: [PATCH 106/107] add a few more stdin args --- cli/collection_create.go | 21 +++++++++++++++++---- cli/collection_describe.go | 16 ++++++++-------- cli/collection_update.go | 2 +- cli/schema_migration_down.go | 15 ++++++++++++++- cli/schema_migration_up.go | 17 +++++++++++++++-- 5 files changed, 55 insertions(+), 16 deletions(-) diff --git a/cli/collection_create.go b/cli/collection_create.go index 3755b3ecb9..4dca9be33a 100644 --- a/cli/collection_create.go +++ b/cli/collection_create.go @@ -12,6 +12,7 @@ package cli import ( "encoding/json" + "io" "os" "github.com/spf13/cobra" @@ -26,11 +27,17 @@ func MakeCollectionCreateCommand() *cobra.Command { Short: "Create a new document.", Long: `Create a new document. -Example: create document +Example: create from string defradb client collection create --name User '{ "name": "Bob" }' -Example: create documents +Example: create multiple from string defradb client collection create --name User '[{ "name": "Alice" }, { "name": "Bob" }]' + +Example: create from file + defradb client collection create --name User -f document.json + +Example: create from stdin + cat document.json | defradb client collection create --name User - `, Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { @@ -41,14 +48,20 @@ Example: create documents var docData []byte switch { - case len(args) == 1: - docData = []byte(args[0]) case file != "": data, err := os.ReadFile(file) if err != nil { return err } docData = data + case len(args) == 1 && args[0] == "-": + data, err := io.ReadAll(cmd.InOrStdin()) + if err != nil { + return err + } + docData = data + case len(args) == 1: + docData = []byte(args[0]) default: return ErrNoDocOrFile } diff --git a/cli/collection_describe.go b/cli/collection_describe.go index e0f878d93e..1d6ee55821 100644 --- a/cli/collection_describe.go +++ b/cli/collection_describe.go @@ -22,17 +22,17 @@ func MakeCollectionDescribeCommand() *cobra.Command { Short: "View collection description.", Long: `Introspect collection types. - Example: view all collections - defradb client collection describe +Example: view all collections + defradb client collection describe - Example: view collection by name - defradb client collection describe --name User +Example: view collection by name + defradb client collection describe --name User - Example: view collection by schema id - defradb client collection describe --schema bae123 +Example: view collection by schema id + defradb client collection describe --schema bae123 - Example: view collection by version id - defradb client collection describe --version bae123 +Example: view collection by version id + defradb client collection describe --version bae123 `, RunE: func(cmd *cobra.Command, args []string) error { store := mustGetStoreContext(cmd) diff --git a/cli/collection_update.go b/cli/collection_update.go index 557bd78434..317a2e8119 100644 --- a/cli/collection_update.go +++ b/cli/collection_update.go @@ -25,7 +25,7 @@ func MakeCollectionUpdateCommand() *cobra.Command { Short: "Update documents by key or filter.", Long: `Update documents by key or filter. -Example: +Example: update from string defradb client collection update --name User --key bae-123 '{ "name": "Bob" }' Example: update by filter diff --git a/cli/schema_migration_down.go b/cli/schema_migration_down.go index 6755a70373..1dcb5e64da 100644 --- a/cli/schema_migration_down.go +++ b/cli/schema_migration_down.go @@ -12,6 +12,7 @@ package cli import ( "encoding/json" + "io" "os" "github.com/sourcenetwork/immutable/enumerable" @@ -29,8 +30,14 @@ func MakeSchemaMigrationDownCommand() *cobra.Command { Long: `Reverses the migration from the specified schema version. Documents is a list of documents to reverse the migration from. -Example: +Example: migrate from string defradb client schema migration down --version bae123 '[{"name": "Bob"}]' + +Example: migrate from file + defradb client schema migration down --version bae123 -f documents.json + +Example: migrate from stdin + cat documents.json | defradb client schema migration down --version bae123 - `, Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { @@ -44,6 +51,12 @@ Example: return err } srcData = data + case len(args) == 1 && args[0] == "-": + data, err := io.ReadAll(cmd.InOrStdin()) + if err != nil { + return err + } + srcData = data case len(args) == 1: srcData = []byte(args[0]) default: diff --git a/cli/schema_migration_up.go b/cli/schema_migration_up.go index d244721664..3b0b522349 100644 --- a/cli/schema_migration_up.go +++ b/cli/schema_migration_up.go @@ -12,6 +12,7 @@ package cli import ( "encoding/json" + "io" "os" "github.com/sourcenetwork/immutable/enumerable" @@ -29,8 +30,14 @@ func MakeSchemaMigrationUpCommand() *cobra.Command { Long: `Applies the migration to the specified schema version. Documents is a list of documents to apply the migration to. -Example: - defradb client schema migration down --version bae123 '[{"name": "Bob"}]' +Example: migrate from string + defradb client schema migration up --version bae123 '[{"name": "Bob"}]' + +Example: migrate from file + defradb client schema migration up --version bae123 -f documents.json + +Example: migrate from stdin + cat documents.json | defradb client schema migration up --version bae123 - `, Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { @@ -44,6 +51,12 @@ Example: return err } srcData = data + case len(args) == 1 && args[0] == "-": + data, err := io.ReadAll(cmd.InOrStdin()) + if err != nil { + return err + } + srcData = data case len(args) == 1: srcData = []byte(args[0]) default: From cd5d0aceb5392c85065d6b02fa5e4ceca014e39b Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Mon, 2 Oct 2023 10:19:40 -0700 Subject: [PATCH 107/107] fix http cors middleware --- http/middleware.go | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/http/middleware.go b/http/middleware.go index 0478b6c6bd..932797ff2c 100644 --- a/http/middleware.go +++ b/http/middleware.go @@ -61,7 +61,10 @@ var ( func CorsMiddleware(opts ServerOptions) func(http.Handler) http.Handler { return cors.Handler(cors.Options{ AllowOriginFunc: func(r *http.Request, origin string) bool { - return slices.Contains[string](opts.AllowedOrigins, strings.ToLower(origin)) + if slices.Contains(opts.AllowedOrigins, "*") { + return true + } + return slices.Contains(opts.AllowedOrigins, strings.ToLower(origin)) }, AllowedMethods: []string{"GET", "HEAD", "POST", "PATCH", "DELETE"}, AllowedHeaders: []string{"Content-Type"},