diff --git a/benches/gitgc.ts b/benches/gitgc.ts new file mode 100644 index 000000000..3ab0f19fb --- /dev/null +++ b/benches/gitgc.ts @@ -0,0 +1,104 @@ +import b from 'benny'; +import packageJson from '../package.json'; + +async function main () { + let map = new Map(); + let obj = {}; + let arr = []; + let set = new Set(); + const summary = await b.suite( + 'gitgc', + b.add('map', async () => { + map = new Map(); + return async () => { + for (let i = 0; i < 1000; i++) { + map.set(i, undefined); + } + for (let i = 0; i < 1000; i++) { + map.delete(i); + } + for (const i of map) { + // NOOP + } + } + }), + b.add('obj', async () => { + obj = {}; + return async () => { + for (let i = 0; i < 1000; i++) { + obj[i] = undefined; + } + for (let i = 0; i < 1000; i++) { + delete obj[i]; + } + for (const i in obj) { + // NOOP + } + }; + }), + b.add('arr', async () => { + // you first have to count the number of objects + arr = []; + return async () => { + // you have to iterate for each object + // then for each value in length + for (let i = 0; i < 1000; i++) { + if (i === arr.length) { + // double the vector + arr.length = arr.length * 2 || 2; + } + arr[i] = { id: i, mark: false }; + // arr.push({ id: i, mark: false}); + } + // this has to iterate the length of the array + // but stop as soon as it reaches the end + // it gets complicate, but for 5x improvement + // it could be interesting + for (let i = 0; i < 1000; i++) { + arr[i].mark = true; + } + for (let i = 0; i < 1000; i++) { + if (arr[i].mark === false) { + // NOOP + } + } + }; + }), + b.add('set', async () => { + set = new Set(); + return async () => { + for (let i = 0; i < 1000; i++) { + set.add(i); + } + for (let i = 0; i < 1000; i++) { + set.delete(i); + } + for (const i of set) { + // NOOP + } + }; + }), + b.cycle(), + b.complete(), + b.save({ + file: 'gitgc', + folder: 'benches/results', + version: packageJson.version, + details: true, + }), + b.save({ + file: 'gitgc', + folder: 'benches/results', + format: 'chart.html', + }), + ); + return summary; +} + +if (require.main === module) { + (async () => { + await main(); + })(); +} + +export default main; diff --git a/benches/index.ts b/benches/index.ts new file mode 100644 index 000000000..98a870855 --- /dev/null +++ b/benches/index.ts @@ -0,0 +1,26 @@ +#!/usr/bin/env node + +import fs from 'fs'; +import si from 'systeminformation'; +import gitgc from './gitgc'; + +async function main(): Promise { + await gitgc(); + const systemData = await si.get({ + cpu: '*', + osInfo: 'platform, distro, release, kernel, arch', + system: 'model, manufacturer', + }); + await fs.promises.writeFile( + 'benches/results/system.json', + JSON.stringify(systemData, null, 2), + ); +} + +if (require.main === module) { + (async () => { + await main(); + })(); +} + +export default main; diff --git a/benches/results/gitgc.chart.html b/benches/results/gitgc.chart.html new file mode 100644 index 000000000..31d69d540 --- /dev/null +++ b/benches/results/gitgc.chart.html @@ -0,0 +1,116 @@ + + + + + + + + gitgc + + + +
+ +
+ + + \ No newline at end of file diff --git a/benches/results/gitgc.json b/benches/results/gitgc.json new file mode 100644 index 000000000..634754e7a --- /dev/null +++ b/benches/results/gitgc.json @@ -0,0 +1,451 @@ +{ + "name": "gitgc", + "date": "2022-01-28T05:51:50.845Z", + "version": "1.0.0", + "results": [ + { + "name": "map", + "ops": 12413, + "margin": 1.36, + "options": { + "delay": 0.005, + "initCount": 1, + "minTime": 0.05, + "maxTime": 5, + "minSamples": 5 + }, + "samples": 72, + "promise": true, + "details": { + "min": 0.00007077850282485876, + "max": 0.0000894375988700565, + "mean": 0.00008055894499724271, + "median": 0.00008118181255079868, + "standardDeviation": 0.000004759840515182626, + "marginOfError": 0.0000010994670651796875, + "relativeMarginOfError": 1.3647982421037401, + "standardErrorOfMean": 5.609525842753508e-7, + "sampleVariance": 2.2656081729974e-11, + "sampleResults": [ + 0.00007077850282485876, + 0.00007097972765957447, + 0.00007161048068669528, + 0.0000716254878397711, + 0.00007233768005540166, + 0.0000729321731044349, + 0.00007298697838616715, + 0.00007363390634005764, + 0.00007377773631123919, + 0.00007389356259204713, + 0.00007414712103746398, + 0.00007531327286356821, + 0.00007556276671619614, + 0.0000756751266568483, + 0.00007613181859070464, + 0.00007620914542728636, + 0.00007621787256371814, + 0.00007629743778110945, + 0.0000764165616641902, + 0.00007689579341317365, + 0.00007736970958083831, + 0.00007843196101949026, + 0.00007898940057636888, + 0.0000795086251874063, + 0.00007993748065476191, + 0.00008000828276877761, + 0.00008004185326953748, + 0.00008018392344497607, + 0.0000803963149717514, + 0.0000804585292353823, + 0.0000804720243204578, + 0.00008054201594896332, + 0.00008075200576368876, + 0.00008080592907801419, + 0.0000811265326953748, + 0.00008114411911357341, + 0.00008121950598802395, + 0.00008135791412742382, + 0.00008165932904148783, + 0.00008186257121439281, + 0.00008221428571428571, + 0.00008229257421289355, + 0.00008232091754122938, + 0.00008232919340329836, + 0.00008243745606060607, + 0.00008273944976076555, + 0.00008290637031484259, + 0.0000829488474025974, + 0.00008305361812778602, + 0.0000830936871257485, + 0.00008318165817091454, + 0.00008323229346092504, + 0.00008338557780979827, + 0.0000835751552238806, + 0.00008360263112391931, + 0.00008438872362555721, + 0.00008480622978723404, + 0.0000848238645066274, + 0.00008495250954478708, + 0.00008502449279538904, + 0.00008513051939058171, + 0.00008533426129943503, + 0.00008582617596566523, + 0.00008582906389301635, + 0.00008592566987179488, + 0.00008616907035928144, + 0.0000862462365269461, + 0.00008650909585121602, + 0.00008851563922155689, + 0.0000889068603896104, + 0.00008941313193403299, + 0.0000894375988700565 + ] + }, + "completed": true, + "percentSlower": 84.62 + }, + { + "name": "obj", + "ops": 17311, + "margin": 0.6, + "options": { + "delay": 0.005, + "initCount": 1, + "minTime": 0.05, + "maxTime": 5, + "minSamples": 5 + }, + "samples": 85, + "promise": true, + "details": { + "min": 0.00005589308370535714, + "max": 0.00006599035208098987, + "mean": 0.00005776653152777129, + "median": 0.000057438973003374575, + "standardDeviation": 0.0000016170787282694694, + "marginOfError": 3.437777562752602e-7, + "relativeMarginOfError": 0.5951158000718614, + "standardErrorOfMean": 1.7539681442615316e-7, + "sampleVariance": 2.6149436134216042e-12, + "sampleResults": [ + 0.00005589308370535714, + 0.000055923575892857144, + 0.00005592791731843576, + 0.00005611031620111732, + 0.00005615302346368715, + 0.000056256111731843576, + 0.000056266712849162016, + 0.000056274237723214286, + 0.00005633941899441341, + 0.000056344213169642855, + 0.000056347307086614175, + 0.00005641687626546681, + 0.000056452138357705285, + 0.00005646863892013498, + 0.000056488960937499994, + 0.000056507857142857145, + 0.000056584775028121486, + 0.000056634034870641165, + 0.000056758037120359955, + 0.000056766039370078735, + 0.00005678472440944882, + 0.000056785659167604044, + 0.000056791463442069747, + 0.000056791497187851516, + 0.00005684813948256468, + 0.00005693674353205849, + 0.00005693990502793296, + 0.00005702166815642458, + 0.00005703376602924635, + 0.00005704974578177728, + 0.00005707559842519685, + 0.00005707628683914511, + 0.00005710713160854893, + 0.000057117591676040494, + 0.0000571776366704162, + 0.000057179823397075365, + 0.00005726050393700788, + 0.00005726404274465692, + 0.000057280159730033745, + 0.00005732342633928571, + 0.00005734832960893855, + 0.00005739825586592179, + 0.000057438973003374575, + 0.000057496350456621005, + 0.000057506095890410956, + 0.00005751210348706412, + 0.00005752209832402235, + 0.000057529072625698327, + 0.00005759033858267717, + 0.00005759104836895388, + 0.000057611492688413946, + 0.000057639962053571425, + 0.0000576807120359955, + 0.000057696478065241844, + 0.00005770783914510686, + 0.00005777350055865922, + 0.000057786456692913384, + 0.0000578248203125, + 0.00005786656355455568, + 0.000057876658482142856, + 0.00005790582793296089, + 0.00005817434420697413, + 0.00005818454218222722, + 0.00005825720433789954, + 0.000058297696089385474, + 0.000058414106145251395, + 0.00005842866929133858, + 0.000058655880446927374, + 0.00005872889876265466, + 0.00005874819347581553, + 0.00005887123734533183, + 0.00005892923172103487, + 0.00005896419441340782, + 0.00005902182564679415, + 0.000059149572067039104, + 0.00005934727374301676, + 0.00005941950506186727, + 0.000059622240223463686, + 0.000059698642004773274, + 0.00006059778994413408, + 0.00006075867181926279, + 0.00006084922159730034, + 0.00006127136782902137, + 0.00006371275195530727, + 0.00006599035208098987 + ] + }, + "completed": true, + "percentSlower": 78.55 + }, + { + "name": "arr", + "ops": 80712, + "margin": 0.68, + "options": { + "delay": 0.005, + "initCount": 1, + "minTime": 0.05, + "maxTime": 5, + "minSamples": 5 + }, + "samples": 85, + "promise": true, + "details": { + "min": 0.000011858622632575757, + "max": 0.000013705367660984848, + "mean": 0.000012389732753812623, + "median": 0.000012287901041666666, + "standardDeviation": 3.959461486703419e-7, + "marginOfError": 8.417492371653915e-8, + "relativeMarginOfError": 0.6793925695502712, + "standardErrorOfMean": 4.2946389651295486e-8, + "sampleVariance": 1.5677335264687652e-13, + "sampleResults": [ + 0.000011858622632575757, + 0.000011860888020833333, + 0.00001186896425189394, + 0.000011887816051136364, + 0.000011913577414772726, + 0.000011929929214015153, + 0.000011943966145833332, + 0.000011944056107954545, + 0.000011970370028409091, + 0.00001197090625, + 0.000011985623579545454, + 0.000011992682528409092, + 0.000012006678977272726, + 0.000012013747395833333, + 0.00001203840790719697, + 0.00001204254237689394, + 0.000012044332859848485, + 0.000012057760416666668, + 0.000012065281486742426, + 0.000012066254261363636, + 0.000012071526988636364, + 0.000012076806818181818, + 0.00001208674502840909, + 0.000012090615767045454, + 0.000012091645657926388, + 0.000012098692234848485, + 0.000012099003077651515, + 0.000012105143702651516, + 0.000012107508522727273, + 0.000012108986268939394, + 0.000012133881865530303, + 0.000012167275568181818, + 0.000012188720712051961, + 0.000012190344933712122, + 0.000012195850142045455, + 0.000012197638494318183, + 0.000012204208806818182, + 0.000012224388037928519, + 0.000012224616240530303, + 0.000012235240056818181, + 0.000012253570312499999, + 0.000012279734611742424, + 0.000012287901041666666, + 0.000012296046164772728, + 0.000012322409801136362, + 0.00001234451112689394, + 0.00001236041737689394, + 0.000012381963541666667, + 0.000012423882102272727, + 0.000012433329308712121, + 0.000012448884232954545, + 0.000012452242897727274, + 0.000012495593986742423, + 0.000012501668560606061, + 0.000012513556581439395, + 0.000012514464962121213, + 0.00001256769981060606, + 0.000012587941761363636, + 0.000012596262310606061, + 0.000012598890388257575, + 0.000012639550206661804, + 0.000012652286458333333, + 0.000012652606770833334, + 0.000012654231534090909, + 0.00001265561671401515, + 0.000012705125328868692, + 0.000012709302556818183, + 0.00001272277959280303, + 0.00001272655800189394, + 0.000012763128859713105, + 0.000012780962121212122, + 0.000012802757575757576, + 0.000012806080729166667, + 0.000012806569128787879, + 0.00001281809659090909, + 0.0000128334453125, + 0.00001283447940340909, + 0.0000128418802020688, + 0.000012932291556410873, + 0.000012976824100378788, + 0.000012983733428030303, + 0.000013109263967803029, + 0.000013405590681003583, + 0.000013586537878787878, + 0.000013705367660984848 + ] + }, + "completed": true, + "percentSlower": 0 + }, + { + "name": "set", + "ops": 16847, + "margin": 1.94, + "options": { + "delay": 0.005, + "initCount": 1, + "minTime": 0.05, + "maxTime": 5, + "minSamples": 5 + }, + "samples": 78, + "promise": true, + "details": { + "min": 0.000050886196138211386, + "max": 0.00006873873163265307, + "mean": 0.00005935926529388902, + "median": 0.0000615916637755102, + "standardDeviation": 0.000005193393503907441, + "marginOfError": 0.0000011525505979750247, + "relativeMarginOfError": 1.9416523979343772, + "standardErrorOfMean": 5.880360193750126e-7, + "sampleVariance": 2.6971336086428002e-11, + "sampleResults": [ + 0.000050886196138211386, + 0.0000509471698685541, + 0.000050978451971688576, + 0.0000517820306122449, + 0.00005211355284552845, + 0.000052252162244897964, + 0.000052254794715447155, + 0.00005231770408163265, + 0.00005256923983739838, + 0.00005259830894308943, + 0.00005267006326530612, + 0.00005284759150657229, + 0.000052991025510204085, + 0.00005299677142857143, + 0.00005348157448979592, + 0.000053637285714285715, + 0.000053777775510204086, + 0.00005405282551020408, + 0.00005419058469387755, + 0.00005443484183673469, + 0.00005446711734693878, + 0.000054547145918367346, + 0.000054657097959183674, + 0.00005476208265306122, + 0.00005487187142857143, + 0.00005500928265306123, + 0.0000550447081632653, + 0.00005606326224489796, + 0.00005639379387755102, + 0.000056477297959183674, + 0.00005653203367346939, + 0.00005679514285714286, + 0.000057151315306122454, + 0.00005743637346938775, + 0.000058363009183673465, + 0.00005867430102040816, + 0.00005892348571428572, + 0.00006139411632653062, + 0.0000615379193877551, + 0.00006164540816326531, + 0.00006168543265306122, + 0.00006187692653061224, + 0.00006195303979591837, + 0.00006221790447154472, + 0.0000623579756097561, + 0.00006259268469387755, + 0.00006264848469387755, + 0.0000626569387755102, + 0.00006270273979591837, + 0.0000627145581632653, + 0.00006296146916076846, + 0.00006315933571428571, + 0.00006328053469387754, + 0.00006328582959183674, + 0.00006333463617886179, + 0.00006341867138523762, + 0.0000635722275510204, + 0.00006357350510204081, + 0.00006367220204081632, + 0.0000636918612244898, + 0.0000637768081632653, + 0.00006379471836734694, + 0.00006397991203235592, + 0.00006435207653061225, + 0.00006444884081632652, + 0.00006463790853658536, + 0.00006486812142857144, + 0.00006516360161779576, + 0.00006516925204081633, + 0.00006520715918367347, + 0.00006554987653061224, + 0.00006571110102040817, + 0.0000660327887755102, + 0.00006606507448979592, + 0.00006640331199186991, + 0.00006700257330637006, + 0.00006723716260162602, + 0.00006873873163265307 + ] + }, + "completed": true, + "percentSlower": 79.13 + } + ], + "fastest": { + "name": "arr", + "index": 2 + }, + "slowest": { + "name": "map", + "index": 0 + } +} \ No newline at end of file diff --git a/benches/results/system.json b/benches/results/system.json new file mode 100644 index 000000000..312b2e10f --- /dev/null +++ b/benches/results/system.json @@ -0,0 +1,39 @@ +{ + "cpu": { + "manufacturer": "AMD", + "brand": "Ryzen 7 2700X Eight-Core Processor", + "vendor": "AMD", + "family": "23", + "model": "8", + "stepping": "2", + "revision": "", + "voltage": "", + "speed": 3.7, + "speedMin": 2.2, + "speedMax": 3.7, + "governor": "ondemand", + "cores": 16, + "physicalCores": 8, + "processors": 1, + "socket": "", + "flags": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl nonstop_tsc cpuid extd_apicid aperfmperf pni pclmulqdq monitor ssse3 fma cx16 sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand lahf_lm cmp_legacy svm extapic cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw skinit wdt tce topoext perfctr_core perfctr_nb bpext perfctr_llc mwaitx cpb hw_pstate sme ssbd sev ibpb vmmcall sev_es fsgsbase bmi1 avx2 smep bmi2 rdseed adx smap clflushopt sha_ni xsaveopt xsavec xgetbv1 xsaves clzero irperf xsaveerptr arat npt lbrv svm_lock nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold avic v_vmsave_vmload vgif overflow_recov succor smca", + "virtualization": true, + "cache": { + "l1d": 262144, + "l1i": 524288, + "l2": 4194304, + "l3": 16777216 + } + }, + "osInfo": { + "platform": "linux", + "distro": "Matrix ML 1", + "release": "unknown", + "kernel": "5.10.81", + "arch": "x64" + }, + "system": { + "model": "System Product Name", + "manufacturer": "System manufacturer" + } +} \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 37fe8ac1c..eca4f5a4d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1593,10 +1593,11 @@ } }, "@matrixai/db": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@matrixai/db/-/db-1.1.2.tgz", - "integrity": "sha512-wkVEEAJZaWS5Kbg6T/LcI6lS8AdWqszp8L1Dxmk7vwr1ihIkoIVQNSQ+FQryaFpor2eqh/wJaOKjDUpcHo+hEg==", + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@matrixai/db/-/db-1.1.5.tgz", + "integrity": "sha512-zPpP/J1A3TLRaQKaGa5smualzjW4Rin4K48cpU5/9ThyXfpVBBp/mrkbDfjL/O5z6YTcuGVf2+yLck8tF8kVUw==", "requires": { + "@matrixai/async-init": "^1.6.0", "@matrixai/logger": "^2.0.1", "@matrixai/workers": "^1.2.3", "abstract-leveldown": "^7.0.0", @@ -3338,14 +3339,15 @@ } }, "encryptedfs": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/encryptedfs/-/encryptedfs-3.2.1.tgz", - "integrity": "sha512-Rt8aFd32ZMXYkfZC9/H1wN+44zClCXXY/JKo4JtIVqPLyScsWv/hzmd5+ijh3vpCOs+YAWYIZSuhIiS4pGvkqA==", + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/encryptedfs/-/encryptedfs-3.4.3.tgz", + "integrity": "sha512-OQqsGw3eNrMdFpiYRX17nMq1NKKebaA0KXyM9IRY9aPOxpaeOwcdvWnOcvvO9wCxZFNxgy/A2SOZdxnhCe3paA==", "requires": { - "@matrixai/db": "^1.0.1", - "@matrixai/logger": "^2.0.1", - "@matrixai/workers": "^1.2.3", - "async-mutex": "^0.3.1", + "@matrixai/async-init": "^1.6.0", + "@matrixai/db": "^1.1.5", + "@matrixai/logger": "^2.1.0", + "@matrixai/workers": "^1.2.5", + "async-mutex": "^0.3.2", "errno": "^0.1.7", "lexicographic-integer": "^1.1.0", "node-forge": "^0.10.0", @@ -3354,16 +3356,6 @@ "threads": "^1.6.5", "ts-custom-error": "^3.2.0", "util-callbackify": "^1.0.0" - }, - "dependencies": { - "async-mutex": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/async-mutex/-/async-mutex-0.3.2.tgz", - "integrity": "sha512-HuTK7E7MT7jZEh1P9GtRW9+aTWiDWWi9InbZ5hjxrnRa39KS4BW04+xLBhYNS2aXhHUIKZSw3gj4Pn1pj+qGAA==", - "requires": { - "tslib": "^2.3.1" - } - } } }, "end-of-stream": { diff --git a/package.json b/package.json index f370abaa0..cf62e9042 100644 --- a/package.json +++ b/package.json @@ -66,13 +66,14 @@ "lint": "eslint '{src,tests}/**/*.{js,ts}'", "lintfix": "eslint '{src,tests}/**/*.{js,ts}' --fix", "docs": "rm -r ./docs || true; typedoc --gitRevision master --tsconfig ./tsconfig.build.json --out ./docs src && touch ./docs/.nojekyll", + "bench": "rm -r ./benches/results || true; ts-node --require tsconfig-paths/register --compiler typescript-cached-transpile --transpile-only ./benches", "proto-generate": "scripts/proto-generate.sh", "polykey": "ts-node --require tsconfig-paths/register --compiler typescript-cached-transpile --transpile-only src/bin/polykey.ts" }, "dependencies": { "@grpc/grpc-js": "1.3.7", "@matrixai/async-init": "^1.6.0", - "@matrixai/db": "^1.1.2", + "@matrixai/db": "^1.1.5", "@matrixai/id": "^3.3.2", "@matrixai/logger": "^2.1.0", "@matrixai/workers": "^1.2.5", @@ -84,7 +85,7 @@ "commander": "^8.3.0", "cross-fetch": "^3.0.6", "cross-spawn": "^7.0.3", - "encryptedfs": "^3.2.0", + "encryptedfs": "^3.4.3", "fast-fuzzy": "^1.10.8", "fd-lock": "^1.2.0", "google-protobuf": "^3.14.0", diff --git a/src/GenericIdTypes.ts b/src/GenericIdTypes.ts deleted file mode 100644 index e4d2db430..000000000 --- a/src/GenericIdTypes.ts +++ /dev/null @@ -1,119 +0,0 @@ -import type { Codec } from 'multiformats/bases/base'; -import type { Id as InternalId } from '@matrixai/id/dist/Id'; -import { utils as idUtils } from '@matrixai/id'; -import { bases } from 'multiformats/basics'; -import { ErrorInvalidId } from './errors'; - -type MultibaseFormats = keyof typeof bases; -// / This is the internal form of the Id. -export type Id = InternalId; -// / This is the user readable string form of the Id. -export type IdString = string; -// This is the number of bytes a valid Id has -const idValidByteLength = 16; - -// Type guards for generic RandomId types. -function isId(arg: any): arg is T { - if (!(arg instanceof Uint8Array)) return false; - return arg.length === idValidByteLength; -} - -/** - * This will return arg as a valid VaultId or throw an error if it can't be converted. - * This will take a multibase string of the ID or the raw Buffer of the ID. - * @param arg - The variable we wish to convert - * @throws vaultErrors.ErrorInvalidVaultId if the arg can't be converted into a VaultId - * @returns VaultIdRaw - */ -function makeId(arg: any): T { - let id = arg; - // Checking and converting a string - if (typeof arg === 'string') { - // Covert the string to the Buffer form. - try { - id = idUtils.fromMultibase(arg); - if (id == null) throw new ErrorInvalidId(); - } catch (err) { - throw new ErrorInvalidId(); - } - } - - // If its a buffer we convert it to a Id. - if (arg instanceof Buffer) id = idUtils.fromBuffer(id); - - // Checking if valid buffer. - if (isId(id)) return id; - throw new ErrorInvalidId(); -} - -function isIdString( - arg: any, - validByteLength: number = idValidByteLength, -): arg is T { - if (typeof arg !== 'string') return false; - const id = fromMultibase(arg); - if (id == null) return false; - return id.length === validByteLength; -} - -function makeIdString( - arg: any, - validByteLength: number = idValidByteLength, - format: MultibaseFormats = 'base58btc', -): T { - const id = arg; - if (id instanceof Uint8Array) { - if (id.length !== validByteLength) throw new ErrorInvalidId(); - return toMultibase(arg, format) as T; - } - if (isIdString(id, validByteLength)) return id; - throw new ErrorInvalidId(); -} - -function idToString(id: Id): IdString { - return id.toString(); -} - -function stringToId(idString: IdString): Id { - return idUtils.fromString(idString)!; -} - -// Multibase helper functions. -const basesByPrefix: Record> = {}; -for (const k in bases) { - const codec = bases[k]; - basesByPrefix[codec.prefix] = codec; -} - -/** - * Encodes an multibase ID string - */ -function toMultibase(id: Uint8Array, format: MultibaseFormats): string { - const codec = bases[format]; - return codec.encode(id); -} - -/** - * Decodes a multibase encoded ID - * Do not use this for generic multibase strings - */ -function fromMultibase(idString: string): Uint8Array | undefined { - const prefix = idString[0]; - const codec = basesByPrefix[prefix]; - if (codec == null) { - return; - } - const buffer = codec.decode(idString); - return new Uint8Array(buffer); -} - -export { - isId, - makeId, - isIdString, - makeIdString, - idToString, - stringToId, - toMultibase, - fromMultibase, -}; diff --git a/src/PolykeyAgent.ts b/src/PolykeyAgent.ts index 87c605c77..5329b8747 100644 --- a/src/PolykeyAgent.ts +++ b/src/PolykeyAgent.ts @@ -310,13 +310,24 @@ class PolykeyAgent { sigchain, logger: logger.getChild(Discovery.name), })); + notificationsManager = + notificationsManager ?? + (await NotificationsManager.createNotificationsManager({ + acl, + db, + nodeConnectionManager, + nodeManager, + keyManager, + logger: logger.getChild(NotificationsManager.name), + fresh, + })); vaultManager = vaultManager ?? (await VaultManager.createVaultManager({ - vaultsKey: keyManager.vaultKey, vaultsPath, keyManager, nodeConnectionManager, + notificationsManager, gestaltGraph, acl, db, @@ -324,17 +335,6 @@ class PolykeyAgent { logger: logger.getChild(VaultManager.name), fresh, })); - notificationsManager = - notificationsManager ?? - (await NotificationsManager.createNotificationsManager({ - acl, - db, - nodeConnectionManager, - nodeManager, - keyManager, - logger: logger.getChild(NotificationsManager.name), - fresh, - })); sessionManager = sessionManager ?? (await SessionManager.createSessionManager({ @@ -561,6 +561,9 @@ class PolykeyAgent { sigchain: this.sigchain, nodeConnectionManager: this.nodeConnectionManager, notificationsManager: this.notificationsManager, + acl: this.acl, + gestaltGraph: this.gestaltGraph, + revProxy: this.revProxy, }); const clientService = createClientService({ pkAgent: this, @@ -575,6 +578,7 @@ class PolykeyAgent { sessionManager: this.sessionManager, vaultManager: this.vaultManager, sigchain: this.sigchain, + acl: this.acl, grpcServerClient: this.grpcServerClient, grpcServerAgent: this.grpcServerAgent, fwdProxy: this.fwdProxy, diff --git a/src/acl/ACL.ts b/src/acl/ACL.ts index f1f551059..358663d51 100644 --- a/src/acl/ACL.ts +++ b/src/acl/ACL.ts @@ -201,7 +201,8 @@ class ACL { const vaultPerms: Record> = {}; const ops: Array = []; for await (const o of this.aclVaultsDb.createReadStream()) { - const vaultId = (o as any).key as VaultId; + const vaultIdBuffer = (o as any).key as Buffer; + const vaultId = IdInternal.fromBuffer(vaultIdBuffer); const data = (o as any).value as Buffer; const nodeIds = await this.db.deserializeDecrypt>( data, diff --git a/src/acl/types.ts b/src/acl/types.ts index 92ae07d13..d5e0362e3 100644 --- a/src/acl/types.ts +++ b/src/acl/types.ts @@ -1,6 +1,6 @@ import type { Opaque } from '../types'; import type { GestaltAction } from '../gestalts/types'; -import type { VaultActions, VaultId } from '../vaults/types'; +import type { VaultActions, VaultIdString } from '../vaults/types'; import type { Id } from '@matrixai/id'; type PermissionId = Opaque<'PermissionId', Id>; @@ -8,7 +8,7 @@ type PermissionIdString = Opaque<'PermissionIdString', string>; type Permission = { gestalt: GestaltActions; - vaults: Record; // FIXME: the string union on VaultId is to prevent some false errors. + vaults: Record; }; type GestaltActions = Partial>; diff --git a/src/agent/GRPCClientAgent.ts b/src/agent/GRPCClientAgent.ts index 1289354c7..4190f66b6 100644 --- a/src/agent/GRPCClientAgent.ts +++ b/src/agent/GRPCClientAgent.ts @@ -99,18 +99,25 @@ class GRPCClientAgent extends GRPCClient { @ready(new agentErrors.ErrorAgentClientDestroyed()) public vaultsGitPackGet( ...args - ): ClientDuplexStream { - return this.client.vaultsGitPackGet(...args); + ): AsyncGeneratorDuplexStreamClient< + vaultsPB.PackChunk, + vaultsPB.PackChunk, + ClientDuplexStream + > { + return grpcUtils.promisifyDuplexStreamCall( + this.client, + this.client.vaultsGitPackGet, + )(...args); } @ready(new agentErrors.ErrorAgentClientDestroyed()) public vaultsScan( ...args ): AsyncGeneratorReadableStreamClient< - vaultsPB.Vault, - ClientReadableStream + vaultsPB.List, + ClientReadableStream > { - return grpcUtils.promisifyReadableStreamCall( + return grpcUtils.promisifyReadableStreamCall( this.client, this.client.vaultsScan, )(...args); @@ -156,14 +163,6 @@ class GRPCClientAgent extends GRPCClient { )(...args); } - @ready(new agentErrors.ErrorAgentClientDestroyed()) - public vaultsPermissionsCheck(...args) { - return grpcUtils.promisifyUnaryCall( - this.client, - this.client.vaultsPermissionsCheck, - )(...args); - } - @ready(new agentErrors.ErrorAgentClientDestroyed()) public nodesCrossSignClaim( ...args diff --git a/src/agent/errors.ts b/src/agent/errors.ts index b92f3b306..e4db4293c 100644 --- a/src/agent/errors.ts +++ b/src/agent/errors.ts @@ -1,4 +1,4 @@ -import { ErrorPolykey } from '../errors'; +import { ErrorPolykey, sysexits } from '../errors'; class ErrorAgent extends ErrorPolykey {} @@ -8,9 +8,15 @@ class ErrorAgentClientNotStarted extends ErrorAgent {} class ErrorAgentClientDestroyed extends ErrorAgent {} +class ErrorConnectionInfoMissing extends ErrorAgent { + description = 'Vault already exists'; + exitCode = sysexits.UNAVAILABLE; +} + export { ErrorAgent, ErrorAgentClientNotStarted, ErrorAgentRunning, ErrorAgentClientDestroyed, + ErrorConnectionInfoMissing, }; diff --git a/src/agent/index.ts b/src/agent/index.ts index f45d230fe..4e55eb824 100644 --- a/src/agent/index.ts +++ b/src/agent/index.ts @@ -1,3 +1,5 @@ export { default as createAgentService, AgentServiceService } from './service'; export { default as GRPCClientAgent } from './GRPCClientAgent'; export * as errors from './errors'; +export * as types from './types'; +export * as utils from './utils'; diff --git a/src/agent/service/echo.ts b/src/agent/service/echo.ts index 45b8f0279..b99923bbb 100644 --- a/src/agent/service/echo.ts +++ b/src/agent/service/echo.ts @@ -1,11 +1,13 @@ import type * as grpc from '@grpc/grpc-js'; +import type { ConnectionInfoGet } from 'agent/types'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -function echo(_) { +function echo({ connectionInfoGet }: { connectionInfoGet: ConnectionInfoGet }) { return async ( call: grpc.ServerUnaryCall, callback: grpc.sendUnaryData, ): Promise => { + connectionInfoGet(call); const response = new utilsPB.EchoMessage(); response.setChallenge(call.request.getChallenge()); callback(null, response); diff --git a/src/agent/service/index.ts b/src/agent/service/index.ts index f20a9fdb8..d7427d601 100644 --- a/src/agent/service/index.ts +++ b/src/agent/service/index.ts @@ -7,7 +7,10 @@ import type { } from '../../nodes'; import type { NotificationsManager } from '../../notifications'; import type { Sigchain } from '../../sigchain'; +import type { ACL } from '../../acl'; +import type { GestaltGraph } from '../../gestalts'; import type { IAgentServiceServer } from '../../proto/js/polykey/v1/agent_service_grpc_pb'; +import type ReverseProxy from '../../network/ReverseProxy'; import echo from './echo'; import nodesChainDataGet from './nodesChainDataGet'; import nodesClaimsGet from './nodesClaimsGet'; @@ -17,9 +20,9 @@ import nodesHolePunchMessageSend from './nodesHolePunchMessageSend'; import notificationsSend from './notificationsSend'; import vaultsGitInfoGet from './vaultsGitInfoGet'; import vaultsGitPackGet from './vaultsGitPackGet'; -import vaultsPermissionsCheck from './vaultsPermissionsCheck'; import vaultsScan from './vaultsScan'; import { AgentServiceService } from '../../proto/js/polykey/v1/agent_service_grpc_pb'; +import * as agentUtils from '../utils'; function createService(container: { keyManager: KeyManager; @@ -29,9 +32,14 @@ function createService(container: { nodeGraph: NodeGraph; notificationsManager: NotificationsManager; sigchain: Sigchain; -}) { + acl: ACL; + gestaltGraph: GestaltGraph; + revProxy: ReverseProxy; +}): IAgentServiceServer { + const connectionInfoGet = agentUtils.connectionInfoGetter(container.revProxy); const container_ = { ...container, + connectionInfoGet: connectionInfoGet, }; const service: IAgentServiceServer = { echo: echo(container_), @@ -43,7 +51,6 @@ function createService(container: { notificationsSend: notificationsSend(container_), vaultsGitInfoGet: vaultsGitInfoGet(container_), vaultsGitPackGet: vaultsGitPackGet(container_), - vaultsPermissionsCheck: vaultsPermissionsCheck(container_), vaultsScan: vaultsScan(container_), }; return service; diff --git a/src/agent/service/vaultsGitInfoGet.ts b/src/agent/service/vaultsGitInfoGet.ts index 8ee13efed..6391b3f7b 100644 --- a/src/agent/service/vaultsGitInfoGet.ts +++ b/src/agent/service/vaultsGitInfoGet.ts @@ -1,36 +1,81 @@ import type { VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; +import type ACL from '../../acl/ACL'; +import type { ConnectionInfoGet } from '../../agent/types'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; -import { utils as grpcUtils } from '../../grpc'; -import { utils as vaultsUtils, errors as vaultsErrors } from '../../vaults'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultsUtils from '../../vaults/utils'; +import * as vaultsErrors from '../../vaults/errors'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import * as validationUtils from '../../validation/utils'; +import * as nodesUtils from '../../nodes/utils'; +import * as agentErrors from '../errors'; -function vaultsGitInfoGet({ vaultManager }: { vaultManager: VaultManager }) { +function vaultsGitInfoGet({ + vaultManager, + acl, + connectionInfoGet, +}: { + vaultManager: VaultManager; + acl: ACL; + connectionInfoGet: ConnectionInfoGet; +}) { return async ( - call: grpc.ServerWritableStream, + call: grpc.ServerWritableStream, ): Promise => { const genWritable = grpcUtils.generatorWritable(call); const request = call.request; - const vaultNameOrId = request.getNameOrId(); - let vaultId, vaultName; - try { - vaultId = vaultsUtils.makeVaultId(idUtils.fromString(vaultNameOrId)); - await vaultManager.openVault(vaultId); - vaultName = await vaultManager.getVaultName(vaultId); - } catch (err) { - if (err instanceof vaultsErrors.ErrorVaultUndefined) { - vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); - await vaultManager.openVault(vaultId); - vaultName = vaultNameOrId; - } else { - throw err; + const vaultMessage = request.getVault(); + if (vaultMessage == null) { + await genWritable.throw({ code: grpc.status.NOT_FOUND }); + return; + } + let vaultName; + const vaultNameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); + vaultName = vaultNameOrId; + if (vaultId == null) { + try { + vaultId = validationUtils.parseVaultId(vaultNameOrId); + vaultName = (await vaultManager.getVaultMeta(vaultId))?.vaultName; + } catch (err) { + await genWritable.throw(new vaultsErrors.ErrorVaultsVaultUndefined()); + return; } } - // TODO: Check the permissions here + // Getting the NodeId from the ReverseProxy connection info + const connectionInfo = connectionInfoGet(call); + // If this is getting run the connection exists + // It SHOULD exist here + if (connectionInfo == null) { + throw new agentErrors.ErrorConnectionInfoMissing(); + } + const nodeId = connectionInfo.nodeId; + const nodeIdEncoded = nodesUtils.encodeNodeId(nodeId); + const actionType = validationUtils.parseVaultAction(request.getAction()); + const permissions = await acl.getNodePerm(nodeId); + if (permissions == null) { + await genWritable.throw( + new vaultsErrors.ErrorVaultsPermissionDenied( + `No permissions found for ${nodeIdEncoded}`, + ), + ); + return; + } + const vaultPerms = permissions.vaults[vaultId]; + if (vaultPerms?.[actionType] !== null) { + await genWritable.throw( + new vaultsErrors.ErrorVaultsPermissionDenied( + `${nodeIdEncoded} does not have permission to ${actionType} from vault ${vaultsUtils.encodeVaultId( + vaultId, + )}`, + ), + ); + return; + } const meta = new grpc.Metadata(); meta.set('vaultName', vaultName); - meta.set('vaultId', vaultsUtils.makeVaultIdPretty(vaultId)); + meta.set('vaultId', vaultsUtils.encodeVaultId(vaultId)); genWritable.stream.sendMetadata(meta); const response = new vaultsPB.PackChunk(); const responseGen = vaultManager.handleInfoRequest(vaultId); diff --git a/src/agent/service/vaultsGitPackGet.ts b/src/agent/service/vaultsGitPackGet.ts index 4fad805a1..0a180b4ff 100644 --- a/src/agent/service/vaultsGitPackGet.ts +++ b/src/agent/service/vaultsGitPackGet.ts @@ -1,69 +1,100 @@ import type * as grpc from '@grpc/grpc-js'; import type { VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; -import { promisify } from '../../utils'; -import { errors as grpcErrors } from '../../grpc'; -import { utils as vaultsUtils, errors as vaultsErrors } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; +import type { ConnectionInfoGet } from '../../agent/types'; +import type ACL from '../../acl/ACL'; +import * as nodesUtils from '../../nodes/utils'; +import * as grpcErrors from '../../grpc/errors'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultsErrors from '../../vaults/errors'; +import * as vaultsUtils from '../../vaults/utils'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import * as validationUtils from '../../validation/utils'; +import * as agentErrors from '../errors'; -function vaultsGitPackGet({ vaultManager }: { vaultManager: VaultManager }) { +function vaultsGitPackGet({ + vaultManager, + acl, + connectionInfoGet, +}: { + vaultManager: VaultManager; + acl: ACL; + connectionInfoGet: ConnectionInfoGet; +}) { return async ( call: grpc.ServerDuplexStream, ) => { - const write = promisify(call.write).bind(call); - const clientBodyBuffers: Buffer[] = []; - call.on('data', (d) => { - clientBodyBuffers.push(d.getChunk_asU8()); - }); - - call.on('end', async () => { - const body = Buffer.concat(clientBodyBuffers); - const meta = call.metadata; - const vaultNameOrId = meta.get('vaultNameOrId').pop()!.toString(); - if (vaultNameOrId == null) { - throw new grpcErrors.ErrorGRPC('vault-name not in metadata.'); - } - let vaultId; - try { - vaultId = vaultsUtils.makeVaultId(vaultNameOrId); - await vaultManager.openVault(vaultId); - } catch (err) { - if ( - err instanceof vaultsErrors.ErrorVaultUndefined || - err instanceof SyntaxError - ) { - vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); - await vaultManager.openVault(vaultId); - } else { - throw err; - } - } - // TODO: Check the permissions here - const response = new vaultsPB.PackChunk(); - const [sideBand, progressStream] = await vaultManager.handlePackRequest( - vaultId, - Buffer.from(body), + const genDuplex = grpcUtils.generatorDuplex(call); + const clientBodyBuffers: Uint8Array[] = []; + const clientRequest = (await genDuplex.read()).value; + clientBodyBuffers.push(clientRequest!.getChunk_asU8()); + const body = Buffer.concat(clientBodyBuffers); + const meta = call.metadata; + // Getting the NodeId from the ReverseProxy connection info + const connectionInfo = connectionInfoGet(call); + // If this is getting run the connection exists + // It SHOULD exist here + if (connectionInfo == null) { + throw new agentErrors.ErrorConnectionInfoMissing(); + } + const nodeId = connectionInfo.nodeId; + const nodeIdEncoded = nodesUtils.encodeNodeId(nodeId); + // Getting vaultId + const vaultNameOrId = meta.get('vaultNameOrId').pop()!.toString(); + if (vaultNameOrId == null) { + throw new grpcErrors.ErrorGRPC('vault-name not in metadata'); + } + let vaultId = await vaultManager.getVaultId(vaultNameOrId as VaultName); + vaultId = vaultId ?? vaultsUtils.decodeVaultId(vaultNameOrId); + if (vaultId == null) { + await genDuplex.throw( + // Throwing permission error to hide information about vaults existence + new vaultsErrors.ErrorVaultsPermissionDenied( + `No permissions found for ${nodeIdEncoded}`, + ), + ); + return; + } + // Checking permissions + const permissions = await acl.getNodePerm(nodeId); + const vaultPerms = permissions?.vaults[vaultId]; + const actionType = validationUtils.parseVaultAction( + meta.get('vaultAction').pop(), + ); + if (vaultPerms?.[actionType] !== null) { + await genDuplex.throw( + new vaultsErrors.ErrorVaultsPermissionDenied( + `${nodeIdEncoded} does not have permission to ${actionType} from vault ${vaultsUtils.encodeVaultId( + vaultId, + )}`, + ), ); - response.setChunk(Buffer.from('0008NAK\n')); - await write(response); - const responseBuffers: Buffer[] = []; - await new Promise((resolve, reject) => { - sideBand.on('data', async (data: Buffer) => { - responseBuffers.push(data); - }); - sideBand.on('end', async () => { - response.setChunk(Buffer.concat(responseBuffers)); - await write(response); - resolve(); - }); - sideBand.on('error', (err) => { - reject(err); - }); - progressStream.write(Buffer.from('0014progress is at 50%\n')); - progressStream.end(); + return; + } + const response = new vaultsPB.PackChunk(); + const [sideBand, progressStream] = await vaultManager.handlePackRequest( + vaultId, + Buffer.from(body), + ); + response.setChunk(Buffer.from('0008NAK\n')); + await genDuplex.write(response); + const responseBuffers: Uint8Array[] = []; + await new Promise((resolve, reject) => { + sideBand.on('data', async (data: Uint8Array) => { + responseBuffers.push(data); + }); + sideBand.on('end', async () => { + response.setChunk(Buffer.concat(responseBuffers)); + await genDuplex.write(response); + resolve(); + }); + sideBand.on('error', (err) => { + reject(err); }); - call.end(); + progressStream.write(Buffer.from('0014progress is at 50%\n')); + progressStream.end(); }); + await genDuplex.next(null); }; } diff --git a/src/agent/service/vaultsPermissionsCheck.ts b/src/agent/service/vaultsPermissionsCheck.ts deleted file mode 100644 index 47c61d77e..000000000 --- a/src/agent/service/vaultsPermissionsCheck.ts +++ /dev/null @@ -1,35 +0,0 @@ -import type * as grpc from '@grpc/grpc-js'; -import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -import { utils as grpcUtils } from '../../grpc'; - -function vaultsPermissionsCheck(_) { - return async ( - call: grpc.ServerUnaryCall< - vaultsPB.NodePermission, - vaultsPB.NodePermissionAllowed - >, - callback: grpc.sendUnaryData, - ): Promise => { - // Const response = new vaultsPB.NodePermissionAllowed(); - try { - // Const nodeId = makeNodeId(call.request.getNodeId()); - // const vaultId = makeVaultId(call.request.getVaultId()); - throw Error('Not Implemented'); - // FIXME: getVaultPermissions not implemented. - // const result = await vaultManager.getVaultPermissions(vaultId, nodeId); - // let result; - // if (result[nodeId] === undefined) { - // response.setPermission(false); - // } else if (result[nodeId]['pull'] === undefined) { - // response.setPermission(false); - // } else { - // response.setPermission(true); - // } - // callback(null, response); - } catch (e) { - callback(grpcUtils.fromError(e)); - } - }; -} - -export default vaultsPermissionsCheck; diff --git a/src/agent/service/vaultsScan.ts b/src/agent/service/vaultsScan.ts index f7e618664..cb4447b03 100644 --- a/src/agent/service/vaultsScan.ts +++ b/src/agent/service/vaultsScan.ts @@ -1,31 +1,47 @@ import type * as grpc from '@grpc/grpc-js'; -import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; -import { utils as grpcUtils } from '../../grpc'; +import type VaultManager from '../../vaults/VaultManager'; +import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import type { ConnectionInfoGet } from '../../agent/types'; +import * as agentErrors from '../../agent/errors'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import * as vaultsUtils from '../../vaults/utils'; +import * as grpcUtils from '../../grpc/utils'; -function vaultsScan(_) { +function vaultsScan({ + vaultManager, + connectionInfoGet, +}: { + vaultManager: VaultManager; + connectionInfoGet: ConnectionInfoGet; +}) { return async ( - call: grpc.ServerWritableStream, + call: grpc.ServerWritableStream, ): Promise => { const genWritable = grpcUtils.generatorWritable(call); - // Const response = new vaultsPB.Vault(); - // const id = makeNodeId(call.request.getNodeId()); + const listMessage = new vaultsPB.List(); + // Getting the NodeId from the ReverseProxy connection info + const connectionInfo = connectionInfoGet(call); + // If this is getting run the connection exists + // It SHOULD exist here + if (connectionInfo == null) { + throw new agentErrors.ErrorConnectionInfoMissing(); + } + const nodeId = connectionInfo.nodeId; try { - throw Error('Not implemented'); - // FIXME: handleVaultNamesRequest doesn't exist. - // const listResponse = vaultManager.handleVaultNamesRequest(id); - // let listResponse; - // for await (const vault of listResponse) { - // if (vault !== null) { - // response.setNameOrId(vault); - // await genWritable.next(response); - // } else { - // await genWritable.next(null); - // } - // } - // await genWritable.next(null); - } catch (err) { - await genWritable.throw(err); + const listResponse = vaultManager.handleScanVaults(nodeId); + for await (const { + vaultId, + vaultName, + vaultPermissions, + } of listResponse) { + listMessage.setVaultId(vaultsUtils.encodeVaultId(vaultId)); + listMessage.setVaultName(vaultName); + listMessage.setVaultPermissionsList(vaultPermissions); + await genWritable.next(listMessage); + } + await genWritable.next(null); + } catch (e) { + await genWritable.throw(e); } }; } diff --git a/src/agent/types.ts b/src/agent/types.ts new file mode 100644 index 000000000..ced17bbf1 --- /dev/null +++ b/src/agent/types.ts @@ -0,0 +1,8 @@ +import type { ConnectionInfo } from 'network/types'; +import type { ServerSurfaceCall } from '@grpc/grpc-js/build/src/server-call'; + +type ConnectionInfoGet = ( + call: ServerSurfaceCall, +) => ConnectionInfo | undefined; + +export type { ConnectionInfoGet }; diff --git a/src/agent/utils.ts b/src/agent/utils.ts new file mode 100644 index 000000000..6d6b6fdd2 --- /dev/null +++ b/src/agent/utils.ts @@ -0,0 +1,18 @@ +import type { Host, Port } from 'network/types'; +import type ReverseProxy from 'network/ReverseProxy'; +import type { ConnectionInfoGet } from './types'; +import type { ServerSurfaceCall } from '@grpc/grpc-js/build/src/server-call'; + +function connectionInfoGetter(revProxy: ReverseProxy): ConnectionInfoGet { + return (call: ServerSurfaceCall) => { + let urlString = call.getPeer(); + if (!/^.*:\/\//.test(urlString)) urlString = 'pk://' + urlString; + const url = new URL(urlString); + return revProxy.getConnectionInfoByProxy( + url.hostname as Host, + parseInt(url.port) as Port, + ); + }; +} + +export { connectionInfoGetter }; diff --git a/src/bin/errors.ts b/src/bin/errors.ts index 05cf5eff2..e3383163c 100644 --- a/src/bin/errors.ts +++ b/src/bin/errors.ts @@ -51,22 +51,6 @@ class ErrorCLIFileRead extends ErrorCLI { exitCode = sysexits.NOINPUT; } -class ErrorSecretPathFormat extends ErrorCLI { - description = "Secret name needs to be of format: ':'"; - exitCode = 64; -} - -class ErrorVaultNameAmbiguous extends ErrorCLI { - description = - 'There is more than 1 Vault with this name. Please specify a Vault ID'; - exitCode = 1; -} - -class ErrorSecretsUndefined extends ErrorCLI { - description = 'At least one secret must be specified as an argument'; - exitCode = 64; -} - class ErrorNodeFindFailed extends ErrorCLI { description = 'Failed to find the node in the DHT'; exitCode = 1; @@ -88,9 +72,6 @@ export { ErrorCLIPasswordFileRead, ErrorCLIRecoveryCodeFileRead, ErrorCLIFileRead, - ErrorSecretPathFormat, - ErrorVaultNameAmbiguous, - ErrorSecretsUndefined, ErrorNodeFindFailed, ErrorNodePingFailed, }; diff --git a/src/bin/secrets/CommandSecrets.ts b/src/bin/secrets/CommandSecrets.ts index 904592b93..0cf1c7661 100644 --- a/src/bin/secrets/CommandSecrets.ts +++ b/src/bin/secrets/CommandSecrets.ts @@ -8,6 +8,7 @@ import CommandList from './CommandList'; import CommandMkdir from './CommandMkdir'; import CommandRename from './CommandRename'; import CommandUpdate from './CommandUpdate'; +import commandStat from './CommandStat'; import CommandPolykey from '../CommandPolykey'; class CommandSecrets extends CommandPolykey { @@ -25,6 +26,7 @@ class CommandSecrets extends CommandPolykey { this.addCommand(new CommandMkdir(...args)); this.addCommand(new CommandRename(...args)); this.addCommand(new CommandUpdate(...args)); + this.addCommand(new commandStat(...args)); } } diff --git a/src/bin/secrets/CommandStat.ts b/src/bin/secrets/CommandStat.ts new file mode 100644 index 000000000..77d94cf6c --- /dev/null +++ b/src/bin/secrets/CommandStat.ts @@ -0,0 +1,87 @@ +import type { Stat } from 'encryptedfs'; +import type PolykeyClient from '../../PolykeyClient'; +import * as binProcessors from '../utils/processors'; +import * as parsers from '../utils/parsers'; +import * as binUtils from '../utils'; + +import CommandPolykey from '../CommandPolykey'; +import * as binOptions from '../utils/options'; + +class CommandStat extends CommandPolykey { + constructor(...args: ConstructorParameters) { + super(...args); + this.name('stat'); + this.description('Vaults Stat'); + this.argument( + '', + 'Path to where the secret, specified as :', + parsers.parseSecretPath, + ); + this.addOption(binOptions.nodeId); + this.addOption(binOptions.clientHost); + this.addOption(binOptions.clientPort); + this.action(async (secretPath, options) => { + const { default: PolykeyClient } = await import('../../PolykeyClient'); + const vaultsPB = await import( + '../../proto/js/polykey/v1/vaults/vaults_pb' + ); + const secretsPB = await import( + '../../proto/js/polykey/v1/secrets/secrets_pb' + ); + const clientOptions = await binProcessors.processClientOptions( + options.nodePath, + options.nodeId, + options.clientHost, + options.clientPort, + this.fs, + this.logger.getChild(binProcessors.processClientOptions.name), + ); + const meta = await binProcessors.processAuthentication( + options.passwordFile, + this.fs, + ); + let pkClient: PolykeyClient; + this.exitHandlers.handlers.push(async () => { + if (pkClient != null) await pkClient.stop(); + }); + try { + pkClient = await PolykeyClient.createPolykeyClient({ + nodePath: options.nodePath, + nodeId: clientOptions.nodeId, + host: clientOptions.clientHost, + port: clientOptions.clientPort, + logger: this.logger.getChild(PolykeyClient.name), + }); + + const secretMessage = new secretsPB.Secret(); + const vaultMessage = new vaultsPB.Vault(); + vaultMessage.setNameOrId(secretPath[0]); + secretMessage.setVault(vaultMessage); + secretMessage.setSecretName(secretPath[1]); + // Get the secret's stat. + const response = await binUtils.retryAuthentication( + (auth) => pkClient.grpcClient.vaultsSecretsStat(secretMessage, auth), + meta, + ); + + const stat: Stat = JSON.parse(response.getJson()); + const data: string[] = [`Stats for "${secretPath[1]}"`]; + for (const key in stat) { + data.push(`${key}: ${stat[key]}`); + } + + // Print out the result. + process.stdout.write( + binUtils.outputFormatter({ + type: options.format === 'json' ? 'json' : 'list', + data, + }), + ); + } finally { + if (pkClient! != null) await pkClient.stop(); + } + }); + } +} + +export default CommandStat; diff --git a/src/bin/utils/options.ts b/src/bin/utils/options.ts index e7832b67c..b9238919b 100644 --- a/src/bin/utils/options.ts +++ b/src/bin/utils/options.ts @@ -155,6 +155,11 @@ const workers = new commander.Option( .argParser(binParsers.parseCoreCount) .default(undefined); +const pullVault = new commander.Option( + '-pv, --pull-vault ', + 'Name or Id of the vault to pull from', +); + export { nodePath, format, @@ -176,4 +181,5 @@ export { seedNodes, network, workers, + pullVault, }; diff --git a/src/bin/vaults/CommandClone.ts b/src/bin/vaults/CommandClone.ts index 55853a796..a91fb9680 100644 --- a/src/bin/vaults/CommandClone.ts +++ b/src/bin/vaults/CommandClone.ts @@ -11,7 +11,7 @@ class CommandClone extends CommandPolykey { super(...args); this.name('clone'); this.description('Clone a Vault from Another Node'); - this.argument('', 'Id of the vault to be cloned'); + this.argument('', 'Name or Id of the vault to be cloned'); this.argument( '', 'Id of the node to clone the vault from', diff --git a/src/bin/vaults/CommandLog.ts b/src/bin/vaults/CommandLog.ts index 12e1f6a3f..01a0c4839 100644 --- a/src/bin/vaults/CommandLog.ts +++ b/src/bin/vaults/CommandLog.ts @@ -63,8 +63,8 @@ class CommandLog extends CommandPolykey { meta, ); for await (const commit of stream) { - const timeStamp = commit.getTimeStamp(); - const date = new Date(timeStamp); + const timestamp = commit.getTimeStamp(); + const date = timestamp!.toDate(); data.push(`commit ${commit.getOid()}`); data.push(`committer ${commit.getCommitter()}`); data.push(`Date: ${date.toDateString()}`); diff --git a/src/bin/vaults/CommandPermissions.ts b/src/bin/vaults/CommandPermissions.ts index 96ab965f6..d45117249 100644 --- a/src/bin/vaults/CommandPermissions.ts +++ b/src/bin/vaults/CommandPermissions.ts @@ -1,115 +1,82 @@ -// Import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -// import PolykeyClient from '../../PolykeyClient'; -// import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -// import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; -// import * as utils from '../../utils'; -// import * as binUtils from '../utils'; -// import * as grpcErrors from '../../grpc/errors'; +import type PolykeyClient from '../../PolykeyClient'; +import * as binProcessors from '../utils/processors'; +import * as binUtils from '../utils'; -// import CommandPolykey from '../CommandPolykey'; -// import * as binOptions from '../utils/options'; +import CommandPolykey from '../CommandPolykey'; +import * as binOptions from '../utils/options'; -// class CommandPermissions extends CommandPolykey { -// constructor(...args: ConstructorParameters) { -// super(...args); -// this.name('permissions'); -// this.description('Vaults Permissions'); -// this.arguments(' [nodeId]'); -// this.addOption(binOptions.nodeId); -// this.addOption(binOptions.clientHost); -// this.addOption(binOptions.clientPort); -// this.action(async (vaultName, nodeId, options) => { +class CommandPermissions extends CommandPolykey { + constructor(...args: ConstructorParameters) { + super(...args); + this.name('permissions'); + this.alias('perms'); + this.description('Sets the permissions of a vault for Node Ids'); + this.argument('', 'Name or ID of the vault'); + this.addOption(binOptions.nodeId); + this.addOption(binOptions.clientHost); + this.addOption(binOptions.clientPort); + this.action(async (vaultName, options) => { + const { default: PolykeyClient } = await import('../../PolykeyClient'); + const vaultsPB = await import( + '../../proto/js/polykey/v1/vaults/vaults_pb' + ); + const clientOptions = await binProcessors.processClientOptions( + options.nodePath, + options.nodeId, + options.clientHost, + options.clientPort, + this.fs, + this.logger.getChild(binProcessors.processClientOptions.name), + ); + const meta = await binProcessors.processAuthentication( + options.passwordFile, + this.fs, + ); + let pkClient: PolykeyClient; + this.exitHandlers.handlers.push(async () => { + if (pkClient != null) await pkClient.stop(); + }); -// }); -// } -// } + try { + pkClient = await PolykeyClient.createPolykeyClient({ + nodePath: options.nodePath, + nodeId: clientOptions.nodeId, + host: clientOptions.clientHost, + port: clientOptions.clientPort, + logger: this.logger.getChild(PolykeyClient.name), + }); -// export default CommandPermissions; + const vaultMessage = new vaultsPB.Vault(); + vaultMessage.setNameOrId(vaultName); -// OLD COMMAND -// const permissions = binUtils.createCommand('permissions', { -// description: { -// description: 'Sets the permissions of a vault for Node Ids', -// args: { -// vaultName: 'Name or ID of the vault', -// nodeId: '(optional) nodeId to check permission on', -// }, -// }, -// aliases: ['perms'], -// nodePath: true, -// verbose: true, -// format: true, -// }); -// permissions.arguments(' [nodeId]'); -// permissions.action(async (vaultName, nodeId, options) => { -// const clientConfig = {}; -// clientConfig['logger'] = new Logger('CLI Logger', LogLevel.WARN, [ -// new StreamHandler(), -// ]); -// if (options.verbose) { -// clientConfig['logger'].setLevel(LogLevel.DEBUG); -// } -// clientConfig['nodePath'] = options.nodePath -// ? options.nodePath -// : utils.getDefaultNodePath(); + await pkClient.start(); -// const client = await PolykeyClient.createPolykeyClient(clientConfig); + const data: Array = []; + await binUtils.retryAuthentication(async (auth) => { + const permissionStream = pkClient.grpcClient.vaultsPermissionGet( + vaultMessage, + auth, + ); + for await (const permission of permissionStream) { + const nodeId = permission.getNode()?.getNodeId(); + const actions = permission.getVaultPermissionsList().join(', '); + data.push(`${nodeId}: ${actions}`); + } + return true; + }, meta); -// const vaultMessage = new vaultsPB.Vault(); -// vaultMessage.setNameOrId(vaultName); + if (data.length === 0) data.push('No permissions were found'); + process.stdout.write( + binUtils.outputFormatter({ + type: options.format === 'json' ? 'json' : 'list', + data: data, + }), + ); + } finally { + if (pkClient! != null) await pkClient.stop(); + } + }); + } +} -// const nodeMessage = new nodesPB.Node(); -// nodeMessage.setNodeId(nodeId); - -// const getVaultMessage = new vaultsPB.PermGet(); -// getVaultMessage.setVault(vaultMessage); -// getVaultMessage.setNode(nodeMessage); - -// try { -// await client.start({}); -// const grpcClient = client.grpcClient; - -// const data: Array = []; -// const response = await binUtils.streamCallCARL( -// client, -// setupStreamCall( -// client, -// client.grpcClient.vaultPermissions, -// ), -// )(getVaultMessage); - -// for await (const perm of response.data) { -// data.push(`${perm.getNodeId()}:\t\t${perm.getAction()}`); -// } -// await response.refresh; - -// process.stdout.write( -// binUtils.outputFormatter({ -// type: options.format === 'json' ? 'json' : 'list', -// data: data, -// }), -// ); -// } catch (err) { -// if (err instanceof grpcErrors.ErrorGRPCClientTimeout) { -// process.stderr.write(`${err.message}\n`); -// } else if (err instanceof grpcErrors.ErrorGRPCServerNotStarted) { -// process.stderr.write(`${err.message}\n`); -// } else { -// process.stderr.write( -// binUtils.outputFormatter({ -// type: 'error', -// description: err.description, -// message: err.message, -// }), -// ); -// } -// throw err; -// } finally { -// await client.stop(); -// options.nodePath = undefined; -// options.verbose = undefined; -// options.format = undefined; -// } -// }); - -// export default permissions; +export default CommandPermissions; diff --git a/src/bin/vaults/CommandPull.ts b/src/bin/vaults/CommandPull.ts index b7aacd3a8..928db7734 100644 --- a/src/bin/vaults/CommandPull.ts +++ b/src/bin/vaults/CommandPull.ts @@ -11,61 +11,73 @@ class CommandPull extends CommandPolykey { super(...args); this.name('pull'); this.description('Pull a Vault from Another Node'); + this.argument('', 'Name of the vault to be pulled into'); this.argument( - '', - 'Id of the node to pull the vault from', + '[targetNodeId]', + '(Optional) target node to pull from', binParsers.parseNodeId, ); - this.argument('', 'Name of the vault to be pulled'); + this.addOption(binOptions.pullVault); this.addOption(binOptions.nodeId); this.addOption(binOptions.clientHost); this.addOption(binOptions.clientPort); - this.action(async (nodeId: NodeId, vaultName, options) => { - const { default: PolykeyClient } = await import('../../PolykeyClient'); - const nodesUtils = await import('../../nodes/utils'); - const vaultsPB = await import( - '../../proto/js/polykey/v1/vaults/vaults_pb' - ); - const nodesPB = await import('../../proto/js/polykey/v1/nodes/nodes_pb'); - const clientOptions = await binProcessors.processClientOptions( - options.nodePath, - options.nodeId, - options.clientHost, - options.clientPort, - this.fs, - this.logger.getChild(binProcessors.processClientOptions.name), - ); - const meta = await binProcessors.processAuthentication( - options.passwordFile, - this.fs, - ); - let pkClient: PolykeyClient; - this.exitHandlers.handlers.push(async () => { - if (pkClient != null) await pkClient.stop(); - }); - try { - pkClient = await PolykeyClient.createPolykeyClient({ - nodePath: options.nodePath, - nodeId: clientOptions.nodeId, - host: clientOptions.clientHost, - port: clientOptions.clientPort, - logger: this.logger.getChild(PolykeyClient.name), - }); - const vaultMessage = new vaultsPB.Vault(); - const nodeMessage = new nodesPB.Node(); - const vaultPullMessage = new vaultsPB.Pull(); - vaultPullMessage.setVault(vaultMessage); - vaultPullMessage.setNode(nodeMessage); - nodeMessage.setNodeId(nodesUtils.encodeNodeId(nodeId)); - vaultMessage.setNameOrId(vaultName); - await binUtils.retryAuthentication( - (auth) => pkClient.grpcClient.vaultsPull(vaultPullMessage, auth), - meta, + this.action( + async (vaultNameOrId, targetNodeId: NodeId | undefined, options) => { + const { default: PolykeyClient } = await import('../../PolykeyClient'); + const nodesUtils = await import('../../nodes/utils'); + const vaultsPB = await import( + '../../proto/js/polykey/v1/vaults/vaults_pb' + ); + const nodesPB = await import( + '../../proto/js/polykey/v1/nodes/nodes_pb' + ); + const clientOptions = await binProcessors.processClientOptions( + options.nodePath, + options.nodeId, + options.clientHost, + options.clientPort, + this.fs, + this.logger.getChild(binProcessors.processClientOptions.name), ); - } finally { - if (pkClient! != null) await pkClient.stop(); - } - }); + const meta = await binProcessors.processAuthentication( + options.passwordFile, + this.fs, + ); + let pkClient: PolykeyClient; + this.exitHandlers.handlers.push(async () => { + if (pkClient != null) await pkClient.stop(); + }); + try { + pkClient = await PolykeyClient.createPolykeyClient({ + nodePath: options.nodePath, + nodeId: clientOptions.nodeId, + host: clientOptions.clientHost, + port: clientOptions.clientPort, + logger: this.logger.getChild(PolykeyClient.name), + }); + const vaultMessage = new vaultsPB.Vault(); + const pullVaultMessage = new vaultsPB.Vault(); + const nodeMessage = new nodesPB.Node(); + const vaultPullMessage = new vaultsPB.Pull(); + vaultPullMessage.setVault(vaultMessage); + vaultMessage.setNameOrId(vaultNameOrId); + if (targetNodeId != null) { + nodeMessage.setNodeId(nodesUtils.encodeNodeId(targetNodeId)); + vaultPullMessage.setNode(nodeMessage); + } + if (options.pullVault) { + vaultPullMessage.setPullVault(pullVaultMessage); + pullVaultMessage.setNameOrId(options.pullVault); + } + await binUtils.retryAuthentication( + (auth) => pkClient.grpcClient.vaultsPull(vaultPullMessage, auth), + meta, + ); + } finally { + if (pkClient! != null) await pkClient.stop(); + } + }, + ); } } diff --git a/src/bin/vaults/CommandScan.ts b/src/bin/vaults/CommandScan.ts index e8887d35b..8477156ed 100644 --- a/src/bin/vaults/CommandScan.ts +++ b/src/bin/vaults/CommandScan.ts @@ -1,109 +1,75 @@ -// Import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -// import PolykeyClient from '../../PolykeyClient'; -// import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; -// import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -// import * as utils from '../../utils'; -// import * as binUtils from '../utils'; -// import * as grpcErrors from '../../grpc/errors'; +import type { Metadata } from '@grpc/grpc-js'; -// import CommandPolykey from '../CommandPolykey'; -// import * as binOptions from '../utils/options'; +import CommandPolykey from '../CommandPolykey'; +import * as binUtils from '../utils'; +import * as binOptions from '../utils/options'; +import * as binProcessors from '../utils/processors'; -// class CommandScan extends CommandPolykey { -// constructor(...args: ConstructorParameters) { -// super(...args); -// this.name('scan'); -// this.description('Vaults Scan'); -// this.requiredOption( -// '-ni, --node-id ', -// '(required) Id of the node to be scanned', -// ); -// this.addOption(binOptions.nodeId); -// this.addOption(binOptions.clientHost); -// this.addOption(binOptions.clientPort); -// this.action(async (options) => { +class CommandScan extends CommandPolykey { + constructor(...args: ConstructorParameters) { + super(...args); + this.name('scan'); + this.description('Scans a node to reveal their shared vaults'); + this.argument('', 'Id of the node to scan'); + this.addOption(binOptions.nodeId); + this.addOption(binOptions.clientHost); + this.addOption(binOptions.clientPort); + this.action(async (nodeId, options) => { + const { default: PolykeyClient } = await import('../../PolykeyClient'); + const nodesPB = await import('../../proto/js/polykey/v1/nodes/nodes_pb'); -// }); -// } -// } + const clientOptions = await binProcessors.processClientOptions( + options.nodePath, + options.nodeId, + options.clientHost, + options.clientPort, + this.fs, + this.logger.getChild(binProcessors.processClientOptions.name), + ); + const client = await PolykeyClient.createPolykeyClient({ + nodePath: options.nodePath, + nodeId: clientOptions.nodeId, + host: clientOptions.clientHost, + port: clientOptions.clientPort, + logger: this.logger.getChild(PolykeyClient.name), + }); -// export default CommandScan; + const meta = await binProcessors.processAuthentication( + options.passwordFile, + this.fs, + ); -// OLD COMMAND -// const commandScanVaults = binUtils.createCommand('scan', { -// description: 'Lists the vaults of another node', -// aliases: ['fetch'], -// nodePath: true, -// verbose: true, -// format: true, -// }); -// commandScanVaults.requiredOption( -// '-ni, --node-id ', -// '(required) Id of the node to be scanned', -// ); -// commandScanVaults.action(async (options) => { -// const clientConfig = {}; -// clientConfig['logger'] = new Logger('CLI Logger', LogLevel.WARN, [ -// new StreamHandler(), -// ]); -// if (options.verbose) { -// clientConfig['logger'].setLevel(LogLevel.DEBUG); -// } -// if (options.nodePath) { -// clientConfig['nodePath'] = options.nodePath; -// } -// clientConfig['nodePath'] = options.nodePath -// ? options.nodePath -// : utils.getDefaultNodePath(); + try { + const grpcClient = client.grpcClient; + const nodeMessage = new nodesPB.Node(); + nodeMessage.setNodeId(nodeId); -// const client = await PolykeyClient.createPolykeyClient(clientConfig); -// const nodeMessage = new nodesPB.Node(); -// nodeMessage.setNodeId(options.nodeId); + const data = await binUtils.retryAuthentication( + async (meta: Metadata) => { + const data: Array = []; + const stream = grpcClient.vaultsScan(nodeMessage, meta); + for await (const vault of stream) { + const vaultName = vault.getVaultName(); + const vaultIdEncoded = vault.getVaultId(); + const permissions = vault.getVaultPermissionsList().join(','); + data.push(`${vaultName}\t\t${vaultIdEncoded}\t\t${permissions}`); + } + return data; + }, + meta, + ); -// try { -// await client.start({}); -// const grpcClient = client.grpcClient; + process.stdout.write( + binUtils.outputFormatter({ + type: options.format === 'json' ? 'json' : 'list', + data: data, + }), + ); + } finally { + await client.stop(); + } + }); + } +} -// const data: Array = []; -// const response = await binUtils.streamCallCARL( -// client, -// setupStreamCall( -// client, -// client.grpcClient.vaultsScan, -// ), -// )(nodeMessage); - -// for await (const vault of response.data) { -// data.push(`${vault.getVaultName()}`); -// } -// await response.refresh; -// process.stdout.write( -// binUtils.outputFormatter({ -// type: options.format === 'json' ? 'json' : 'list', -// data: data, -// }), -// ); -// } catch (err) { -// if (err instanceof grpcErrors.ErrorGRPCClientTimeout) { -// process.stderr.write(`${err.message}\n`); -// } else if (err instanceof grpcErrors.ErrorGRPCServerNotStarted) { -// process.stderr.write(`${err.message}\n`); -// } else { -// process.stderr.write( -// binUtils.outputFormatter({ -// type: 'error', -// description: err.description, -// message: err.message, -// }), -// ); -// throw err; -// } -// } finally { -// await client.stop(); -// options.nodePath = undefined; -// options.verbose = undefined; -// options.format = undefined; -// } -// }); - -// export default commandScanVaults; +export default CommandScan; diff --git a/src/bin/vaults/CommandShare.ts b/src/bin/vaults/CommandShare.ts index e71e8de20..a37658c0b 100644 --- a/src/bin/vaults/CommandShare.ts +++ b/src/bin/vaults/CommandShare.ts @@ -52,16 +52,17 @@ class CommandShare extends CommandPolykey { logger: this.logger.getChild(PolykeyClient.name), }); const vaultMessage = new vaultsPB.Vault(); - const nodeMessage = new nodesPB.Node(); - const setVaultPermsMessage = new vaultsPB.PermSet(); - setVaultPermsMessage.setVault(vaultMessage); - setVaultPermsMessage.setNode(nodeMessage); vaultMessage.setNameOrId(vaultName); + const nodeMessage = new nodesPB.Node(); nodeMessage.setNodeId(nodesUtils.encodeNodeId(nodeId)); + const vaultsPermissionsList = new vaultsPB.Permissions(); + vaultsPermissionsList.setVault(vaultMessage); + vaultsPermissionsList.setNode(nodeMessage); + vaultsPermissionsList.setVaultPermissionsList(['pull', 'clone']); await binUtils.retryAuthentication( (auth) => - pkClient.grpcClient.vaultsPermissionsSet( - setVaultPermsMessage, + pkClient.grpcClient.vaultsPermissionSet( + vaultsPermissionsList, auth, ), meta, diff --git a/src/bin/vaults/CommandStat.ts b/src/bin/vaults/CommandStat.ts deleted file mode 100644 index 1b44f3c5d..000000000 --- a/src/bin/vaults/CommandStat.ts +++ /dev/null @@ -1,98 +0,0 @@ -// Import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -// import PolykeyClient from '../../PolykeyClient'; -// import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -// import * as utils from '../../utils'; -// import * as binUtils from '../utils'; - -// import * as grpcErrors from '../../grpc/errors'; - -// import CommandPolykey from '../CommandPolykey'; -// import * as binOptions from '../utils/options'; - -// class CommandStat extends CommandPolykey { -// constructor(...args: ConstructorParameters) { -// super(...args); -// this.name('stat'); -// this.description('Vaults Stat'); -// this.requiredOption( -// '-vn, --vault-name ', -// '(required) Name of the vault to get stats from', -// ); -// this.addOption(binOptions.nodeId); -// this.addOption(binOptions.clientHost); -// this.addOption(binOptions.clientPort); -// this.action(async (options) => { - -// }); -// } -// } - -// export default CommandStat; - -// OLD COMMAND -// const stat = binUtils.createCommand('stat', { -// description: 'Gets stats of an existing vault', -// nodePath: true, -// verbose: true, -// format: true, -// }); -// stat.requiredOption( -// '-vn, --vault-name ', -// '(required) Name of the vault to get stats from', -// ); -// stat.action(async (options) => { -// const clientConfig = {}; -// clientConfig['logger'] = new Logger('CLI Logger', LogLevel.WARN, [ -// new StreamHandler(), -// ]); -// if (options.verbose) { -// clientConfig['logger'].setLevel(LogLevel.DEBUG); -// } -// clientConfig['nodePath'] = options.nodePath -// ? options.nodePath -// : utils.getDefaultNodePath(); - -// const client = await PolykeyClient.createPolykeyClient(clientConfig); -// const vaultMessage = new vaultsPB.Vault(); -// vaultMessage.setNameOrId(options.vaultName); - -// try { -// await client.start({}); -// const grpcClient = client.grpcClient; -// const responseMessage = await binUtils.unaryCallCARL( -// client, -// attemptUnaryCall(client, grpcClient.vaultsSecretsStat), -// )(vaultMessage); - -// process.stdout.write( -// binUtils.outputFormatter({ -// type: options.format === 'json' ? 'json' : 'list', -// data: [ -// `${vaultMessage.getNameOrId()}:\t\t${responseMessage.getStats()}`, -// ], -// }), -// ); -// } catch (err) { -// if (err instanceof grpcErrors.ErrorGRPCClientTimeout) { -// process.stderr.write(`${err.message}\n`); -// } else if (err instanceof grpcErrors.ErrorGRPCServerNotStarted) { -// process.stderr.write(`${err.message}\n`); -// } else { -// process.stderr.write( -// binUtils.outputFormatter({ -// type: 'error', -// description: err.description, -// message: err.message, -// }), -// ); -// throw err; -// } -// } finally { -// await client.stop(); -// options.nodePath = undefined; -// options.verbose = undefined; -// options.format = undefined; -// } -// }); - -// export default stat; diff --git a/src/bin/vaults/CommandUnshare.ts b/src/bin/vaults/CommandUnshare.ts index 097af67a5..5a189632d 100644 --- a/src/bin/vaults/CommandUnshare.ts +++ b/src/bin/vaults/CommandUnshare.ts @@ -51,17 +51,18 @@ class CommandUnshare extends CommandPolykey { port: clientOptions.clientPort, logger: this.logger.getChild(PolykeyClient.name), }); - const unsetVaultPermsMessage = new vaultsPB.PermUnset(); + const vaultsPermissionsMessage = new vaultsPB.Permissions(); const vaultMessage = new vaultsPB.Vault(); - const nodeMessage = new nodesPB.Node(); - unsetVaultPermsMessage.setVault(vaultMessage); - unsetVaultPermsMessage.setNode(nodeMessage); vaultMessage.setNameOrId(vaultName); + const nodeMessage = new nodesPB.Node(); nodeMessage.setNodeId(nodesUtils.encodeNodeId(nodeId)); + vaultsPermissionsMessage.setVault(vaultMessage); + vaultsPermissionsMessage.setNode(nodeMessage); + vaultsPermissionsMessage.setVaultPermissionsList(['clone', 'pull']); await binUtils.retryAuthentication( (auth) => - pkClient.grpcClient.vaultsPermissionsUnset( - unsetVaultPermsMessage, + pkClient.grpcClient.vaultsPermissionUnset( + vaultsPermissionsMessage, auth, ), meta, diff --git a/src/bin/vaults/CommandVaults.ts b/src/bin/vaults/CommandVaults.ts index e7ba102f5..2c9a5d47c 100644 --- a/src/bin/vaults/CommandVaults.ts +++ b/src/bin/vaults/CommandVaults.ts @@ -3,12 +3,11 @@ import CommandCreate from './CommandCreate'; import CommandDelete from './CommandDelete'; import CommandList from './CommandList'; import CommandLog from './CommandLog'; -// Import CommandPermissions from './CommandPermissions'; +import CommandScan from './CommandScan'; +import CommandPermissions from './CommandPermissions'; import CommandPull from './CommandPull'; import CommandRename from './CommandRename'; -// Import CommandScan from './CommandScan'; import CommandShare from './CommandShare'; -// Import CommandStat from './CommandStat'; import CommandUnshare from './CommandUnshare'; import CommandVersion from './CommandVersion'; import CommandPolykey from '../CommandPolykey'; @@ -23,14 +22,13 @@ class CommandVaults extends CommandPolykey { this.addCommand(new CommandDelete(...args)); this.addCommand(new CommandList(...args)); this.addCommand(new CommandLog(...args)); - // This.addCommand(new CommandPermissions(...args)); + this.addCommand(new CommandPermissions(...args)); this.addCommand(new CommandPull(...args)); this.addCommand(new CommandRename(...args)); - // This.addCommand(new CommandScan(...args)); this.addCommand(new CommandShare(...args)); - // This.addCommand(new CommandStat(...args)); this.addCommand(new CommandUnshare(...args)); this.addCommand(new CommandVersion(...args)); + this.addCommand(new CommandScan(...args)); } } diff --git a/src/bootstrap/utils.ts b/src/bootstrap/utils.ts index feba00657..fc855bb02 100644 --- a/src/bootstrap/utils.ts +++ b/src/bootstrap/utils.ts @@ -160,17 +160,6 @@ async function bootstrapState({ sigchain, logger: logger.getChild(NodeManager.name), }); - const vaultManager = await VaultManager.createVaultManager({ - acl, - db, - gestaltGraph, - keyManager, - nodeConnectionManager, - vaultsKey: keyManager.vaultKey, - vaultsPath, - logger: logger.getChild(VaultManager.name), - fresh, - }); const notificationsManager = await NotificationsManager.createNotificationsManager({ acl, @@ -181,6 +170,17 @@ async function bootstrapState({ logger: logger.getChild(NotificationsManager.name), fresh, }); + const vaultManager = await VaultManager.createVaultManager({ + acl, + db, + gestaltGraph, + keyManager, + nodeConnectionManager, + vaultsPath, + notificationsManager, + logger: logger.getChild(VaultManager.name), + fresh, + }); const sessionManager = await SessionManager.createSessionManager({ db, keyManager, diff --git a/src/client/GRPCClientClient.ts b/src/client/GRPCClientClient.ts index ae0de84f1..3b07305ea 100644 --- a/src/client/GRPCClientClient.ts +++ b/src/client/GRPCClientClient.ts @@ -186,31 +186,26 @@ class GRPCClientClient extends GRPCClient { } @ready(new clientErrors.ErrorClientClientDestroyed()) - public vaultsPermissionsSet(...args) { - return grpcUtils.promisifyUnaryCall( + public vaultsPermissionGet(...args) { + return grpcUtils.promisifyReadableStreamCall( this.client, - this.client.vaultsPermissionsSet, + this.client.vaultsPermissionGet, )(...args); } @ready(new clientErrors.ErrorClientClientDestroyed()) - public vaultsPermissionsUnset(...args) { + public vaultsPermissionSet(...args) { return grpcUtils.promisifyUnaryCall( this.client, - this.client.vaultsPermissionsUnset, + this.client.vaultsPermissionSet, )(...args); } @ready(new clientErrors.ErrorClientClientDestroyed()) - public vaultPermissions( - ...args - ): AsyncGeneratorReadableStreamClient< - vaultsPB.Permission, - ClientReadableStream - > { - return grpcUtils.promisifyReadableStreamCall( + public vaultsPermissionUnset(...args) { + return grpcUtils.promisifyUnaryCall( this.client, - this.client.vaultsPermissions, + this.client.vaultsPermissionUnset, )(...args); } @@ -235,14 +230,6 @@ class GRPCClientClient extends GRPCClient { )(...args); } - @ready(new clientErrors.ErrorClientClientDestroyed()) - public vaultsSecretsStat(...args) { - return grpcUtils.promisifyUnaryCall( - this.client, - this.client.vaultsSecretsStat, - )(...args); - } - @ready(new clientErrors.ErrorClientClientDestroyed()) public vaultsSecretsDelete(...args) { return grpcUtils.promisifyUnaryCall( @@ -267,6 +254,14 @@ class GRPCClientClient extends GRPCClient { )(...args); } + @ready(new clientErrors.ErrorClientClientDestroyed()) + public vaultsSecretsStat(...args) { + return grpcUtils.promisifyUnaryCall( + this.client, + this.client.vaultsSecretsStat, + )(...args); + } + @ready(new clientErrors.ErrorClientClientDestroyed()) public vaultsSecretsRename(...args) { return grpcUtils.promisifyUnaryCall( diff --git a/src/client/service/gestaltsActionsGetByIdentity.ts b/src/client/service/gestaltsActionsGetByIdentity.ts index 1ee46b1fd..c4df02f2c 100644 --- a/src/client/service/gestaltsActionsGetByIdentity.ts +++ b/src/client/service/gestaltsActionsGetByIdentity.ts @@ -44,7 +44,7 @@ function gestaltsActionsGetByIdentity({ identityId, ); if (result == null) { - // Node doesn't exist, so no permissions. might throw error instead TBD. + // Node doesn't exist, so no permissions response.setActionList([]); } else { // Contains permission diff --git a/src/client/service/gestaltsActionsGetByNode.ts b/src/client/service/gestaltsActionsGetByNode.ts index 3d1f6b1c5..f4bcd4d5a 100644 --- a/src/client/service/gestaltsActionsGetByNode.ts +++ b/src/client/service/gestaltsActionsGetByNode.ts @@ -36,7 +36,7 @@ function gestaltsActionsGetByNode({ ); const result = await gestaltGraph.getGestaltActionsByNode(nodeId); if (result == null) { - // Node doesn't exist, so no permissions. might throw error instead TBD. + // Node doesn't exist, so no permissions response.setActionList([]); } else { // Contains permission diff --git a/src/client/service/index.ts b/src/client/service/index.ts index 70864a2f3..50e282fbb 100644 --- a/src/client/service/index.ts +++ b/src/client/service/index.ts @@ -13,6 +13,7 @@ import type { NotificationsManager } from '../../notifications'; import type { Discovery } from '../../discovery'; import type { Sigchain } from '../../sigchain'; import type { GRPCServer } from '../../grpc'; +import type ACL from '../../acl/ACL'; import type ForwardProxy from '../../network/ForwardProxy'; import type ReverseProxy from '../../network/ReverseProxy'; import type { IClientServiceServer } from '../../proto/js/polykey/v1/client_service_grpc_pb'; @@ -66,9 +67,9 @@ import vaultsCreate from './vaultsCreate'; import vaultsDelete from './vaultsDelete'; import vaultsList from './vaultsList'; import vaultsLog from './vaultsLog'; -import vaultsPermissions from './vaultsPermissions'; -import vaultsPermissionsSet from './vaultsPermissionsSet'; -import vaultsPermissionsUnset from './vaultsPermissionsUnset'; +import vaultsPermissionGet from './vaultsPermissionGet'; +import vaultsPermissionSet from './vaultsPermissionSet'; +import vaultsPermissionUnset from './vaultsPermissionUnset'; import vaultsPull from './vaultsPull'; import vaultsRename from './vaultsRename'; import vaultsScan from './vaultsScan'; @@ -104,6 +105,7 @@ function createService({ notificationsManager: NotificationsManager; discovery: Discovery; sigchain: Sigchain; + acl: ACL; grpcServerClient: GRPCServer; grpcServerAgent: GRPCServer; fwdProxy: ForwardProxy; @@ -169,9 +171,9 @@ function createService({ vaultsDelete: vaultsDelete(container), vaultsList: vaultsList(container), vaultsLog: vaultsLog(container), - vaultsPermissions: vaultsPermissions(container), - vaultsPermissionsSet: vaultsPermissionsSet(container), - vaultsPermissionsUnset: vaultsPermissionsUnset(container), + vaultsPermissionSet: vaultsPermissionSet(container), + vaultsPermissionUnset: vaultsPermissionUnset(container), + vaultsPermissionGet: vaultsPermissionGet(container), vaultsPull: vaultsPull(container), vaultsRename: vaultsRename(container), vaultsScan: vaultsScan(container), diff --git a/src/client/service/vaultsClone.ts b/src/client/service/vaultsClone.ts index d83cf771f..a35d70e7f 100644 --- a/src/client/service/vaultsClone.ts +++ b/src/client/service/vaultsClone.ts @@ -1,10 +1,19 @@ import type { Authenticate } from '../types'; +import type VaultManager from '../../vaults/VaultManager'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; -import { utils as grpcUtils } from '../../grpc'; +import * as grpcUtils from '../../grpc/utils'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as validationUtils from '../../validation/utils'; +import * as vaultsUtils from '../../vaults/utils'; -function vaultsClone({ authenticate }: { authenticate: Authenticate }) { +function vaultsClone({ + authenticate, + vaultManager, +}: { + authenticate: Authenticate; + vaultManager: VaultManager; +}) { return async ( call: grpc.ServerUnaryCall, callback: grpc.sendUnaryData, @@ -25,13 +34,13 @@ function vaultsClone({ authenticate }: { authenticate: Authenticate }) { return; } // Vault id - // const vaultId = parseVaultInput(vaultMessage, vaultManager); + let vaultId; + const vaultNameOrId = vaultMessage.getNameOrId(); + vaultId = vaultsUtils.decodeVaultId(vaultNameOrId); + vaultId = vaultId ?? vaultNameOrId; // Node id - // const id = makeNodeId(nodeMessage.getNodeId()); - - throw Error('Not implemented'); - // FIXME, not fully implemented - // await vaultManager.cloneVault(vaultId, id); + const nodeId = validationUtils.parseNodeId(nodeMessage.getNodeId()); + await vaultManager.cloneVault(nodeId, vaultId); response.setSuccess(true); callback(null, response); return; diff --git a/src/client/service/vaultsCreate.ts b/src/client/service/vaultsCreate.ts index bf1ddf33b..363e4a200 100644 --- a/src/client/service/vaultsCreate.ts +++ b/src/client/service/vaultsCreate.ts @@ -1,10 +1,10 @@ import type { Authenticate } from '../types'; -import type { Vault, VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type { VaultId, VaultName } from '../../vaults/types'; +import type VaultManager from '../../vaults/VaultManager'; import type * as grpc from '@grpc/grpc-js'; import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import { utils as grpcUtils } from '../../grpc'; -import { utils as vaultsUtils } from '../../vaults'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultsUtils from '../../vaults/utils'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; function vaultsCreate({ @@ -19,14 +19,14 @@ function vaultsCreate({ callback: grpc.sendUnaryData, ): Promise => { const response = new vaultsPB.Vault(); - let vault: Vault; + let vaultId: VaultId; try { const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - vault = await vaultManager.createVault( + vaultId = await vaultManager.createVault( call.request.getNameOrId() as VaultName, ); - response.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + response.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); callback(null, response); return; } catch (e) { diff --git a/src/client/service/vaultsDelete.ts b/src/client/service/vaultsDelete.ts index e8c80bb84..d2f029c4a 100644 --- a/src/client/service/vaultsDelete.ts +++ b/src/client/service/vaultsDelete.ts @@ -1,18 +1,11 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type { VaultName } from '../../vaults/types'; +import type VaultManager from '../../vaults/VaultManager'; import type * as grpc from '@grpc/grpc-js'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -import { utils as idUtils } from '@matrixai/id'; -import { utils as grpcUtils } from '../../grpc'; -import { errors as vaultsErrors } from '../../vaults'; +import * as grpcUtils from '../../grpc/utils'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; - -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} +import * as validationUtils from '../../validation/utils'; function vaultsDelete({ vaultManager, @@ -32,8 +25,7 @@ function vaultsDelete({ call.sendMetadata(metadata); const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); await vaultManager.destroyVault(vaultId); response.setSuccess(true); callback(null, response); diff --git a/src/client/service/vaultsList.ts b/src/client/service/vaultsList.ts index b22723fa0..d81902976 100644 --- a/src/client/service/vaultsList.ts +++ b/src/client/service/vaultsList.ts @@ -1,9 +1,9 @@ import type { Authenticate } from '../types'; -import type { VaultManager } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; import type * as grpc from '@grpc/grpc-js'; import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -import { utils as grpcUtils } from '../../grpc'; -import { utils as vaultsUtils } from '../../vaults'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultsUtils from '../../vaults/utils'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; function vaultsList({ @@ -23,12 +23,11 @@ function vaultsList({ try { const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - const vaults = await vaultManager.listVaults(); for await (const [vaultName, vaultId] of vaults) { const vaultListMessage = new vaultsPB.List(); vaultListMessage.setVaultName(vaultName); - vaultListMessage.setVaultId(vaultsUtils.makeVaultIdPretty(vaultId)); + vaultListMessage.setVaultId(vaultsUtils.encodeVaultId(vaultId)); await genWritable.next(((_) => vaultListMessage)()); } await genWritable.next(null); diff --git a/src/client/service/vaultsLog.ts b/src/client/service/vaultsLog.ts index 3c06e172d..99056911a 100644 --- a/src/client/service/vaultsLog.ts +++ b/src/client/service/vaultsLog.ts @@ -1,17 +1,11 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type { VaultName } from '../../vaults/types'; +import type VaultManager from '../../vaults/VaultManager'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; -import { utils as grpcUtils } from '../../grpc'; -import { errors as vaultsErrors } from '../../vaults'; +import { Timestamp } from 'google-protobuf/google/protobuf/timestamp_pb'; +import * as grpcUtils from '../../grpc/utils'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; - -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} +import * as validationUtils from '../../validation/utils'; function vaultsLog({ vaultManager, @@ -27,7 +21,7 @@ function vaultsLog({ try { const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - // Getting the vault. + // Getting the vault const vaultsLogMessage = call.request; const vaultMessage = vaultsLogMessage.getVault(); if (vaultMessage == null) { @@ -36,21 +30,21 @@ function vaultsLog({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); // Getting the log const depth = vaultsLogMessage.getLogDepth(); let commitId: string | undefined = vaultsLogMessage.getCommitId(); commitId = commitId ? commitId : undefined; - const log = await vault.log(depth, commitId); - + const log = await vaultManager.withVaults([vaultId], async (vault) => { + return await vault.log(commitId, depth); + }); const vaultsLogEntryMessage = new vaultsPB.LogEntry(); for (const entry of log) { - vaultsLogEntryMessage.setOid(entry.oid); - vaultsLogEntryMessage.setCommitter(entry.committer); - vaultsLogEntryMessage.setTimeStamp(entry.timeStamp); + vaultsLogEntryMessage.setOid(entry.commitId); + vaultsLogEntryMessage.setCommitter(entry.committer.name); + const timestampMessage = new Timestamp(); + timestampMessage.fromDate(entry.committer.timestamp); + vaultsLogEntryMessage.setTimeStamp(timestampMessage); vaultsLogEntryMessage.setMessage(entry.message); await genWritable.next(vaultsLogEntryMessage); } diff --git a/src/client/service/vaultsPermissionGet.ts b/src/client/service/vaultsPermissionGet.ts new file mode 100644 index 000000000..23780000e --- /dev/null +++ b/src/client/service/vaultsPermissionGet.ts @@ -0,0 +1,67 @@ +import type { Authenticate } from '../types'; +import type VaultManager from '../../vaults/VaultManager'; +import type { VaultName } from '../../vaults/types'; +import type * as grpc from '@grpc/grpc-js'; +import type { VaultActions } from '../../vaults/types'; +import type ACL from '../../acl/ACL'; +import type { NodeId, NodeIdEncoded } from 'nodes/types'; +import { IdInternal } from '@matrixai/id'; +import * as grpcUtils from '../../grpc/utils'; +import * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; +import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import * as validationUtils from '../../validation/utils'; +import * as nodesUtils from '../../nodes/utils'; + +function vaultsPermissionGet({ + authenticate, + vaultManager, + acl, +}: { + authenticate: Authenticate; + vaultManager: VaultManager; + acl: ACL; +}) { + return async ( + call: grpc.ServerWritableStream, + ): Promise => { + const genWritable = grpcUtils.generatorWritable(call); + try { + const vaultMessage = call.request; + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + // Getting vaultId + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); + + // Getting permissions + const rawPermissions = await acl.getVaultPerm(vaultId); + const permissionList: Record = {}; + // Getting the relevant information + for (const nodeId in rawPermissions) { + permissionList[nodeId] = rawPermissions[nodeId].vaults[vaultId]; + } + + const vaultPermissionsMessage = new vaultsPB.Permissions(); + vaultPermissionsMessage.setVault(vaultMessage); + const nodeMessage = new nodesPB.Node(); + + // Constructing the message + for (const nodeIdString in permissionList) { + const nodeId = IdInternal.fromString(nodeIdString); + nodeMessage.setNodeId(nodesUtils.encodeNodeId(nodeId)); + vaultPermissionsMessage.setNode(nodeMessage); + const actions = Object.keys(permissionList[nodeIdString]); + vaultPermissionsMessage.setVaultPermissionsList(actions); + await genWritable.next(vaultPermissionsMessage); + } + await genWritable.next(null); + return; + } catch (err) { + await genWritable.throw(err); + return; + } + }; +} + +export default vaultsPermissionGet; diff --git a/src/client/service/vaultsPermissionSet.ts b/src/client/service/vaultsPermissionSet.ts new file mode 100644 index 000000000..6b4768ee8 --- /dev/null +++ b/src/client/service/vaultsPermissionSet.ts @@ -0,0 +1,82 @@ +import type { Authenticate } from '../types'; +import type { VaultName } from '../../vaults/types'; +import type VaultManager from '../../vaults/VaultManager'; +import type GestaltGraph from '../../gestalts/GestaltGraph'; +import type ACL from '../../acl/ACL'; +import type NotificationsManager from '../../notifications/NotificationsManager'; +import type { VaultActions } from '../../vaults/types'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import * as grpc from '@grpc/grpc-js'; +import * as vaultsUtils from '../../vaults/utils'; +import * as vaultsErrors from '../../vaults/errors'; +import * as validationUtils from '../../validation/utils'; +import * as grpcUtils from '../../grpc/utils'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function vaultsPermissionSet({ + vaultManager, + authenticate, + gestaltGraph, + acl, + notificationsManager, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; + gestaltGraph: GestaltGraph; + acl: ACL; + notificationsManager: NotificationsManager; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + try { + // Checking session token + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const vaultsPermissionsMessage = call.request; + const vaultMessage = vaultsPermissionsMessage.getVault(); + const nodeMessage = vaultsPermissionsMessage.getNode(); + if (vaultMessage == null || nodeMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + // Parsing VaultId + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); + // Parsing NodeId + const nodeId = validationUtils.parseNodeId(nodeMessage.getNodeId()); + // Parsing actions + const actions = vaultsPermissionsMessage + .getVaultPermissionsList() + .map((vaultAction) => validationUtils.parseVaultAction(vaultAction)); + // Checking if vault exists + const vaultMeta = await vaultManager.getVaultMeta(vaultId); + if (!vaultMeta) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + // Setting permissions + const actionsSet: VaultActions = {}; + await gestaltGraph.setGestaltActionByNode(nodeId, 'scan'); + for (const action of actions) { + await acl.setVaultAction(vaultId, nodeId, action); + actionsSet[action] = null; + } + // Sending notification + await notificationsManager.sendNotification(nodeId, { + type: 'VaultShare', + vaultId: vaultsUtils.encodeVaultId(vaultId), + vaultName: vaultMeta.vaultName, + actions: actionsSet, + }); + // Formatting response + const response = new utilsPB.StatusMessage().setSuccess(true); + callback(null, response); + return; + } catch (e) { + callback(grpcUtils.fromError(e)); + return; + } + }; +} + +export default vaultsPermissionSet; diff --git a/src/client/service/vaultsPermissionUnset.ts b/src/client/service/vaultsPermissionUnset.ts new file mode 100644 index 000000000..d16d81d98 --- /dev/null +++ b/src/client/service/vaultsPermissionUnset.ts @@ -0,0 +1,81 @@ +import type { Authenticate } from '../types'; +import type { VaultName } from '../../vaults/types'; +import type VaultManager from '../../vaults/VaultManager'; +import type GestaltGraph from '../../gestalts/GestaltGraph'; +import type ACL from '../../acl/ACL'; +import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import * as grpc from '@grpc/grpc-js'; +import * as vaultsErrors from '../../vaults/errors'; +import * as validationUtils from '../../validation/utils'; +import * as grpcUtils from '../../grpc/utils'; +import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; + +function vaultsPermissionUnset({ + vaultManager, + authenticate, + gestaltGraph, + acl, +}: { + vaultManager: VaultManager; + authenticate: Authenticate; + gestaltGraph: GestaltGraph; + acl: ACL; +}) { + return async ( + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, + ): Promise => { + try { + // Checking session token + const metadata = await authenticate(call.metadata); + call.sendMetadata(metadata); + const vaultsPermissionsMessage = call.request; + const vaultMessage = vaultsPermissionsMessage.getVault(); + const nodeMessage = vaultsPermissionsMessage.getNode(); + if (vaultMessage == null || nodeMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + // Parsing VaultId + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); + // Parsing NodeId + const nodeId = validationUtils.parseNodeId(nodeMessage.getNodeId()); + // Parsing actions + const actions = vaultsPermissionsMessage + .getVaultPermissionsList() + .map((vaultAction) => validationUtils.parseVaultAction(vaultAction)); + // Checking if vault exists + const vaultMeta = await vaultManager.getVaultMeta(vaultId); + if (!vaultMeta) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + // Unsetting permissions + await gestaltGraph.setGestaltActionByNode(nodeId, 'scan'); + for (const action of actions) { + await acl.unsetVaultAction(vaultId, nodeId, action); + } + // We need to check if there are still shared vaults + const nodePermissions = await acl.getNodePerm(nodeId); + // Remove scan permissions if no more shared vaults + if (nodePermissions != null) { + // Counting total number of permissions + const totalPermissions = Object.keys(nodePermissions.vaults) + .map((key) => Object.keys(nodePermissions.vaults[key]).length) + .reduce((prev, current) => current + prev); + // If no permissions are left then we remove the scan permission + if (totalPermissions === 0) { + await gestaltGraph.unsetGestaltActionByNode(nodeId, 'scan'); + } + } + // Formatting response + const response = new utilsPB.StatusMessage().setSuccess(true); + callback(null, response); + return; + } catch (e) { + callback(grpcUtils.fromError(e)); + return; + } + }; +} + +export default vaultsPermissionUnset; diff --git a/src/client/service/vaultsPermissions.ts b/src/client/service/vaultsPermissions.ts deleted file mode 100644 index 8fba60112..000000000 --- a/src/client/service/vaultsPermissions.ts +++ /dev/null @@ -1,53 +0,0 @@ -import type { Authenticate } from '../types'; -import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -import * as grpc from '@grpc/grpc-js'; -import { utils as grpcUtils } from '../../grpc'; - -function vaultsPermissions({ authenticate }: { authenticate: Authenticate }) { - return async ( - call: grpc.ServerWritableStream, - ): Promise => { - const genWritable = grpcUtils.generatorWritable(call); - - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const nodeMessage = call.request.getNode(); - if (nodeMessage == null) { - await genWritable.throw({ code: grpc.status.NOT_FOUND }); - return; - } - // Const node = nodeMessage.getNodeId(); - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - await genWritable.throw({ code: grpc.status.NOT_FOUND }); - return; - } - // Const id = await parseVaultInput(vaultMessage, vaultManager); - // let perms: Record; - throw Error('Not implemented'); - // FIXME - // if (isNodeId(node)) { - // Perms = await vaultManager.getVaultPermissions(id, node); - // } else { - // Perms = await vaultManager.getVaultPermissions(id); - // } - // const permissionMessage = new vaultsPB.Permission(); - // For (const nodeId in perms) { - // permissionMessage.setNodeId(nodeId); - // if (perms[nodeId]['pull'] !== undefined) { - // permissionMessage.setAction('pull'); - // } - // await genWritable.next(permissionMessage); - // } - await genWritable.next(null); - return; - } catch (e) { - await genWritable.throw(e); - return; - } - }; -} - -export default vaultsPermissions; diff --git a/src/client/service/vaultsPermissionsSet.ts b/src/client/service/vaultsPermissionsSet.ts deleted file mode 100644 index 05ddb055f..000000000 --- a/src/client/service/vaultsPermissionsSet.ts +++ /dev/null @@ -1,45 +0,0 @@ -import type { Authenticate } from '../types'; -import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -import * as grpc from '@grpc/grpc-js'; -import { utils as grpcUtils } from '../../grpc'; -import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; - -function vaultsPermissionsSet({ - authenticate, -}: { - authenticate: Authenticate; -}) { - return async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const nodeMessage = call.request.getNode(); - if (nodeMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - // Const node = makeNodeId(nodeMessage.getNodeId()); - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - // Const id = await parseVaultInput(vaultMessage, vaultManager); - throw Error('Not Implemented'); - // Await vaultManager.setVaultPermissions(node, id); // FIXME - const response = new utilsPB.StatusMessage(); - response.setSuccess(true); - callback(null, response); - return; - } catch (e) { - callback(grpcUtils.fromError(e)); - return; - } - }; -} - -export default vaultsPermissionsSet; diff --git a/src/client/service/vaultsPermissionsUnset.ts b/src/client/service/vaultsPermissionsUnset.ts deleted file mode 100644 index fd2e8429f..000000000 --- a/src/client/service/vaultsPermissionsUnset.ts +++ /dev/null @@ -1,45 +0,0 @@ -import type { Authenticate } from '../types'; -import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -import * as grpc from '@grpc/grpc-js'; -import { utils as grpcUtils } from '../../grpc'; -import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; - -function vaultsPermissionsUnset({ - authenticate, -}: { - authenticate: Authenticate; -}) { - return async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, - ): Promise => { - try { - const metadata = await authenticate(call.metadata); - call.sendMetadata(metadata); - - const nodeMessage = call.request.getNode(); - if (nodeMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - // Const node = makeNodeId(nodeMessage.getNodeId()); - const vaultMessage = call.request.getVault(); - if (vaultMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; - } - // Const id = await parseVaultInput(vaultMessage, vaultManager); - throw Error('Not implemented'); - // Await vaultManager.unsetVaultPermissions(node, id); // FIXME - const response = new utilsPB.StatusMessage(); - response.setSuccess(true); - callback(null, response); - return; - } catch (e) { - callback(grpcUtils.fromError(e)); - return; - } - }; -} - -export default vaultsPermissionsUnset; diff --git a/src/client/service/vaultsPull.ts b/src/client/service/vaultsPull.ts index a9dcf663a..8c18e1a29 100644 --- a/src/client/service/vaultsPull.ts +++ b/src/client/service/vaultsPull.ts @@ -1,10 +1,20 @@ import type { Authenticate } from '../types'; +import type VaultManager from '../../vaults/VaultManager'; +import type { VaultName } from '../../vaults/types'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; -import { utils as grpcUtils } from '../../grpc'; +import * as grpcUtils from '../../grpc/utils'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; +import * as validationUtils from '../../validation/utils'; +import * as vaultsUtils from '../../vaults/utils'; -function vaultsPull({ authenticate }: { authenticate: Authenticate }) { +function vaultsPull({ + authenticate, + vaultManager, +}: { + authenticate: Authenticate; + vaultManager: VaultManager; +}) { return async ( call: grpc.ServerUnaryCall, callback: grpc.sendUnaryData, @@ -19,17 +29,30 @@ function vaultsPull({ authenticate }: { authenticate: Authenticate }) { callback({ code: grpc.status.NOT_FOUND }, null); return; } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); + let nodeId; const nodeMessage = call.request.getNode(); if (nodeMessage == null) { - callback({ code: grpc.status.NOT_FOUND }, null); - return; + nodeId = null; + } else { + nodeId = validationUtils.parseNodeId(nodeMessage.getNodeId()); } - // Vault name - // const vaultId = await parseVaultInput(vaultMessage, vaultManager); - // Node id - // const id = makeNodeId(nodeMessage.getNodeId()); - - // Await vaultManager.pullVault(vaultId, id); + let pullVault; + const pullVaultMessage = call.request.getPullVault(); + if (pullVaultMessage == null) { + pullVault = null; + } else { + pullVault = vaultsUtils.decodeVaultId(pullVaultMessage.getNameOrId()); + pullVault = pullVault ?? pullVaultMessage.getNameOrId(); + if (pullVault == null) pullVault = pullVaultMessage.getNameOrId(); + } + await vaultManager.pullVault({ + vaultId, + pullNodeId: nodeId, + pullVaultNameOrId: pullVault, + }); response.setSuccess(true); callback(null, response); return; diff --git a/src/client/service/vaultsRename.ts b/src/client/service/vaultsRename.ts index 42e1aee97..506162989 100644 --- a/src/client/service/vaultsRename.ts +++ b/src/client/service/vaultsRename.ts @@ -1,18 +1,12 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type { VaultName } from '../../vaults/types'; +import type VaultManager from '../../vaults/VaultManager'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; -import { utils as grpcUtils } from '../../grpc'; -import { utils as vaultsUtils, errors as vaultsErrors } from '../../vaults'; +import * as validationUtils from '../../validation/utils'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultsUtils from '../../vaults/utils'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsRename({ vaultManager, authenticate, @@ -35,11 +29,10 @@ function vaultsRename({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); const newName = call.request.getNewName() as VaultName; await vaultManager.renameVault(vaultId, newName); - response.setNameOrId(vaultsUtils.makeVaultIdPretty(vaultId)); + response.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); callback(null, response); return; } catch (e) { diff --git a/src/client/service/vaultsScan.ts b/src/client/service/vaultsScan.ts index 0845809ed..3d8d73a7e 100644 --- a/src/client/service/vaultsScan.ts +++ b/src/client/service/vaultsScan.ts @@ -1,9 +1,12 @@ import type { Authenticate } from '../types'; -import type { VaultManager } from '../../vaults'; +import type { NodeId } from '../../nodes/types'; import type * as nodesPB from '../../proto/js/polykey/v1/nodes/nodes_pb'; import type * as grpc from '@grpc/grpc-js'; -import { utils as grpcUtils } from '../../grpc'; -import { utils as vaultsUtils } from '../../vaults'; +import type VaultManager from '../../vaults/VaultManager'; +import * as grpcUtils from '../../grpc/utils'; +import { validateSync } from '../../validation'; +import * as validationUtils from '../../validation/utils'; +import { matchSync } from '../../utils'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; function vaultsScan({ @@ -20,13 +23,32 @@ function vaultsScan({ try { const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - const vaults = await vaultManager.listVaults(); - vaults.forEach(async (vaultId, vaultName) => { - const vaultListMessage = new vaultsPB.List(); + const { + nodeId, + }: { + nodeId: NodeId; + } = validateSync( + (keyPath, value) => { + return matchSync(keyPath)( + [['nodeId'], () => validationUtils.parseNodeId(value)], + () => value, + ); + }, + { + nodeId: call.request.getNodeId(), + }, + ); + const vaultListMessage = new vaultsPB.List(); + for await (const { + vaultIdEncoded, + vaultName, + vaultPermissions, + } of vaultManager.scanVaults(nodeId)) { vaultListMessage.setVaultName(vaultName); - vaultListMessage.setVaultId(vaultsUtils.makeVaultIdPretty(vaultId)); + vaultListMessage.setVaultId(vaultIdEncoded); + vaultListMessage.setVaultPermissionsList(vaultPermissions); await genWritable.next(vaultListMessage); - }); + } await genWritable.next(null); return; } catch (e) { diff --git a/src/client/service/vaultsSecretsDelete.ts b/src/client/service/vaultsSecretsDelete.ts index 8c042e845..07a56a92d 100644 --- a/src/client/service/vaultsSecretsDelete.ts +++ b/src/client/service/vaultsSecretsDelete.ts @@ -1,19 +1,13 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type { VaultName } from '../../vaults/types'; +import type VaultManager from '../../vaults/VaultManager'; import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; -import { utils as grpcUtils } from '../../grpc'; -import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import * as validationUtils from '../../validation/utils'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultOps from '../../vaults/VaultOps'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsSecretsDelete({ vaultManager, authenticate, @@ -29,7 +23,6 @@ function vaultsSecretsDelete({ const response = new utilsPB.StatusMessage(); const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - const vaultMessage = call.request.getVault(); if (vaultMessage == null) { callback({ code: grpc.status.NOT_FOUND }, null); @@ -37,11 +30,11 @@ function vaultsSecretsDelete({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); const secretName = call.request.getSecretName(); - await vaultOps.deleteSecret(vault, secretName); + await vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.deleteSecret(vault, secretName); + }); response.setSuccess(true); callback(null, response); return; diff --git a/src/client/service/vaultsSecretsEdit.ts b/src/client/service/vaultsSecretsEdit.ts index 2142d5014..8f45362b2 100644 --- a/src/client/service/vaultsSecretsEdit.ts +++ b/src/client/service/vaultsSecretsEdit.ts @@ -1,19 +1,13 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type { VaultName } from '../../vaults/types'; +import type VaultManager from '../../vaults/VaultManager'; import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; -import { utils as grpcUtils } from '../../grpc'; -import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import * as validationUtils from '../../validation/utils'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultOps from '../../vaults/VaultOps'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsSecretsEdit({ vaultManager, authenticate, @@ -29,7 +23,6 @@ function vaultsSecretsEdit({ const response = new utilsPB.StatusMessage(); const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - const secretMessage = call.request; if (secretMessage == null) { callback({ code: grpc.status.NOT_FOUND }, null); @@ -42,12 +35,12 @@ function vaultsSecretsEdit({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); const secretName = secretMessage.getSecretName(); const secretContent = Buffer.from(secretMessage.getSecretContent()); - await vaultOps.updateSecret(vault, secretName, secretContent); + await vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.updateSecret(vault, secretName, secretContent); + }); response.setSuccess(true); callback(null, response); return; diff --git a/src/client/service/vaultsSecretsGet.ts b/src/client/service/vaultsSecretsGet.ts index c3fd06cbc..fa836e1b0 100644 --- a/src/client/service/vaultsSecretsGet.ts +++ b/src/client/service/vaultsSecretsGet.ts @@ -1,19 +1,13 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type { VaultName } from '../../vaults/types'; +import type VaultManager from '../../vaults/VaultManager'; import type * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; -import { utils as grpcUtils } from '../../grpc'; -import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import * as validationUtils from '../../validation/utils'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultOps from '../../vaults/VaultOps'; import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsSecretsGet({ vaultManager, authenticate, @@ -29,7 +23,6 @@ function vaultsSecretsGet({ const response = new secretsPB.Secret(); const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - const vaultMessage = call.request.getVault(); if (vaultMessage == null) { callback({ code: grpc.status.NOT_FOUND }, null); @@ -37,11 +30,14 @@ function vaultsSecretsGet({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); const secretName = call.request.getSecretName(); - const secretContent = await vaultOps.getSecret(vault, secretName); + const secretContent = await vaultManager.withVaults( + [vaultId], + async (vault) => { + return await vaultOps.getSecret(vault, secretName); + }, + ); response.setSecretContent(secretContent); callback(null, response); return; diff --git a/src/client/service/vaultsSecretsList.ts b/src/client/service/vaultsSecretsList.ts index 42eaea942..db2a1cc36 100644 --- a/src/client/service/vaultsSecretsList.ts +++ b/src/client/service/vaultsSecretsList.ts @@ -1,19 +1,13 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type { VaultName } from '../../vaults/types'; +import type VaultManager from '../../vaults/VaultManager'; import type * as grpc from '@grpc/grpc-js'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -import { utils as idUtils } from '@matrixai/id'; -import { utils as grpcUtils } from '../../grpc'; -import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import * as validationUtils from '../../validation/utils'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultOps from '../../vaults/VaultOps'; import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsSecretsList({ vaultManager, authenticate, @@ -31,10 +25,13 @@ function vaultsSecretsList({ const vaultMessage = call.request; const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - const secrets = await vaultOps.listSecrets(vault); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); + const secrets = await vaultManager.withVaults( + [vaultId], + async (vault) => { + return await vaultOps.listSecrets(vault); + }, + ); let secretMessage: secretsPB.Secret; for (const secret of secrets) { secretMessage = new secretsPB.Secret(); diff --git a/src/client/service/vaultsSecretsMkdir.ts b/src/client/service/vaultsSecretsMkdir.ts index 5c51f0673..fca32d4f9 100644 --- a/src/client/service/vaultsSecretsMkdir.ts +++ b/src/client/service/vaultsSecretsMkdir.ts @@ -1,19 +1,13 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type { VaultName } from '../../vaults/types'; +import type VaultManager from '../../vaults/VaultManager'; import type * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; -import { utils as grpcUtils } from '../../grpc'; -import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import * as validationUtils from '../../validation/utils'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultOps from '../../vaults/VaultOps'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsSecretsMkdir({ vaultManager, authenticate, @@ -29,7 +23,6 @@ function vaultsSecretsMkdir({ const response = new utilsPB.StatusMessage(); const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - const vaultMkdirMessge = call.request; const vaultMessage = vaultMkdirMessge.getVault(); if (vaultMessage == null) { @@ -38,11 +31,11 @@ function vaultsSecretsMkdir({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); - await vaultOps.mkdir(vault, vaultMkdirMessge.getDirName(), { - recursive: vaultMkdirMessge.getRecursive(), + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); + await vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.mkdir(vault, vaultMkdirMessge.getDirName(), { + recursive: vaultMkdirMessge.getRecursive(), + }); }); response.setSuccess(true); callback(null, response); diff --git a/src/client/service/vaultsSecretsNew.ts b/src/client/service/vaultsSecretsNew.ts index 2e3e7d18b..3c22baa7a 100644 --- a/src/client/service/vaultsSecretsNew.ts +++ b/src/client/service/vaultsSecretsNew.ts @@ -1,19 +1,13 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type { VaultName } from '../../vaults/types'; +import type VaultManager from '../../vaults/VaultManager'; import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; -import { utils as grpcUtils } from '../../grpc'; -import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import * as validationUtils from '../../validation/utils'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultOps from '../../vaults/VaultOps'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsSecretsNew({ vaultManager, authenticate, @@ -29,7 +23,6 @@ function vaultsSecretsNew({ const response = new utilsPB.StatusMessage(); const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - const vaultMessage = call.request.getVault(); if (vaultMessage == null) { callback({ code: grpc.status.NOT_FOUND }, null); @@ -37,12 +30,12 @@ function vaultsSecretsNew({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); const secret = call.request.getSecretName(); const content = Buffer.from(call.request.getSecretContent()); - await vaultOps.addSecret(vault, secret, content); + await vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.addSecret(vault, secret, content); + }); response.setSuccess(true); callback(null, response); return; diff --git a/src/client/service/vaultsSecretsNewDir.ts b/src/client/service/vaultsSecretsNewDir.ts index 33d9b6968..31a075e01 100644 --- a/src/client/service/vaultsSecretsNewDir.ts +++ b/src/client/service/vaultsSecretsNewDir.ts @@ -1,20 +1,14 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type { VaultName } from '../../vaults/types'; +import type VaultManager from '../../vaults/VaultManager'; import type { FileSystem } from '../../types'; import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; -import { utils as grpcUtils } from '../../grpc'; -import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import * as validationUtils from '../../validation/utils'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultOps from '../../vaults/VaultOps'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsSecretsNewDir({ vaultManager, authenticate, @@ -32,7 +26,6 @@ function vaultsSecretsNewDir({ const response = new utilsPB.StatusMessage(); const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - const vaultMessage = call.request.getVault(); if (vaultMessage == null) { callback({ code: grpc.status.NOT_FOUND }, null); @@ -40,11 +33,11 @@ function vaultsSecretsNewDir({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); const secretsPath = call.request.getSecretDirectory(); - await vaultOps.addSecretDirectory(vault, secretsPath, fs); + await vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.addSecretDirectory(vault, secretsPath, fs); + }); response.setSuccess(true); callback(null, response); return; diff --git a/src/client/service/vaultsSecretsRename.ts b/src/client/service/vaultsSecretsRename.ts index 2fe81c7b5..7de527519 100644 --- a/src/client/service/vaultsSecretsRename.ts +++ b/src/client/service/vaultsSecretsRename.ts @@ -1,19 +1,13 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type { VaultName } from '../../vaults/types'; +import type VaultManager from '../../vaults/VaultManager'; import type * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; -import { utils as grpcUtils } from '../../grpc'; -import { vaultOps, errors as vaultsErrors } from '../../vaults'; +import * as validationUtils from '../../validation/utils'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultOps from '../../vaults/VaultOps'; import * as utilsPB from '../../proto/js/polykey/v1/utils/utils_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsSecretsRename({ vaultManager, authenticate, @@ -41,12 +35,12 @@ function vaultsSecretsRename({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await vaultManager.openVault(vaultId); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); const oldSecret = secretMessage.getSecretName(); const newSecret = call.request.getNewName(); - await vaultOps.renameSecret(vault, oldSecret, newSecret); + await vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.renameSecret(vault, oldSecret, newSecret); + }); response.setSuccess(true); callback(null, response); return; diff --git a/src/client/service/vaultsSecretsStat.ts b/src/client/service/vaultsSecretsStat.ts index f7250cdad..e657d4009 100644 --- a/src/client/service/vaultsSecretsStat.ts +++ b/src/client/service/vaultsSecretsStat.ts @@ -1,25 +1,40 @@ -import type * as grpc from '@grpc/grpc-js'; +import type VaultManager from '../../vaults/VaultManager'; +import type { VaultName } from '../../vaults/types'; import type { Authenticate } from '../types'; -import { utils as grpcUtils } from '../../grpc'; -import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; +import * as grpc from '@grpc/grpc-js'; +import * as validationUtils from '../../validation/utils'; +import * as grpcUtils from '../../grpc/utils'; +import * as vaultOps from '../../vaults/VaultOps'; +import * as secretsPB from '../../proto/js/polykey/v1/secrets/secrets_pb'; -function vaultsSecretsStat({ authenticate }: { authenticate: Authenticate }) { +function vaultsSecretsStat({ + authenticate, + vaultManager, +}: { + authenticate: Authenticate; + vaultManager: VaultManager; +}) { return async ( - call: grpc.ServerUnaryCall, - callback: grpc.sendUnaryData, + call: grpc.ServerUnaryCall, + callback: grpc.sendUnaryData, ): Promise => { try { - const response = new vaultsPB.Stat(); + const response = new secretsPB.Stat(); const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - - // Const vaultMessage = call.request; - // Const id = await parseVaultInput(vaultMessage, vaultManager); - // const vault = await vaultManager.openVault(id); - // FIXME, reimplement this. - throw Error('Not Implemented'); - // Const stats = await vaultManager.vaultStats(id); - // response.setStats(JSON.stringify(stats));); + const vaultMessage = call.request.getVault(); + if (vaultMessage == null) { + callback({ code: grpc.status.NOT_FOUND }, null); + return; + } + const nameOrId = vaultMessage.getNameOrId(); + let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); + const secretName = call.request.getSecretName(); + const stat = await vaultManager.withVaults([vaultId], async (vault) => { + return await vaultOps.statSecret(vault, secretName); + }); + response.setJson(JSON.stringify(stat)); callback(null, response); return; } catch (e) { diff --git a/src/client/service/vaultsVersion.ts b/src/client/service/vaultsVersion.ts index 4533dca9e..4338966da 100644 --- a/src/client/service/vaultsVersion.ts +++ b/src/client/service/vaultsVersion.ts @@ -1,18 +1,11 @@ import type { Authenticate } from '../types'; -import type { VaultId, VaultName } from '../../vaults/types'; -import type { VaultManager } from '../../vaults'; +import type { VaultName } from '../../vaults/types'; +import type VaultManager from '../../vaults/VaultManager'; import * as grpc from '@grpc/grpc-js'; -import { utils as idUtils } from '@matrixai/id'; -import { utils as grpcUtils } from '../../grpc'; -import { errors as vaultsErrors } from '../../vaults'; +import * as validationUtils from '../../validation/utils'; +import * as grpcUtils from '../../grpc/utils'; import * as vaultsPB from '../../proto/js/polykey/v1/vaults/vaults_pb'; -function decodeVaultId(input: string): VaultId | undefined { - return idUtils.fromMultibase(input) - ? (idUtils.fromMultibase(input) as VaultId) - : undefined; -} - function vaultsVersion({ vaultManager, authenticate, @@ -29,9 +22,7 @@ function vaultsVersion({ // Checking session token const metadata = await authenticate(call.metadata); call.sendMetadata(metadata); - const vaultsVersionMessage = call.request; - // Getting vault ID const vaultMessage = vaultsVersionMessage.getVault(); if (vaultMessage == null) { @@ -40,23 +31,22 @@ function vaultsVersion({ } const nameOrId = vaultMessage.getNameOrId(); let vaultId = await vaultManager.getVaultId(nameOrId as VaultName); - if (!vaultId) vaultId = decodeVaultId(nameOrId); - if (!vaultId) throw new vaultsErrors.ErrorVaultUndefined(); - + vaultId = vaultId ?? validationUtils.parseVaultId(nameOrId); // Doing the deed - const vault = await vaultManager.openVault(vaultId); - const latestOid = (await vault.log())[0].oid; const versionId = vaultsVersionMessage.getVersionId(); - - await vault.version(versionId); - const currentVersionId = (await vault.log(0, versionId))[0]?.oid; - - // Checking if latest version ID. + const [latestOid, currentVersionId] = await vaultManager.withVaults( + [vaultId], + async (vault) => { + const latestOid = (await vault.log())[0].commitId; + await vault.version(versionId); + const currentVersionId = (await vault.log(versionId, 0))[0]?.commitId; + return [latestOid, currentVersionId]; + }, + ); + // Checking if latest version ID const isLatestVersion = latestOid === currentVersionId; - // Creating message response.setIsLatestVersion(isLatestVersion); - // Sending message callback(null, response); return; diff --git a/src/config.ts b/src/config.ts index 09f88b66a..c8322e2f2 100644 --- a/src/config.ts +++ b/src/config.ts @@ -62,6 +62,7 @@ const config = { dbBase: 'db', keysBase: 'keys', vaultsBase: 'vaults', + efsBase: 'efs', tokenBase: 'token', keysConfig: { rootKeyPairBits: 4096, diff --git a/src/discovery/types.ts b/src/discovery/types.ts index 9c32ed947..c91021c7e 100644 --- a/src/discovery/types.ts +++ b/src/discovery/types.ts @@ -1,5 +1,5 @@ import type { Opaque } from '../types'; -import type { Id } from '../GenericIdTypes'; +import type { Id } from '@matrixai/id'; /** * Used to preserve order in the Discovery Queue. diff --git a/src/discovery/utils.ts b/src/discovery/utils.ts index b8a9f9808..b0c774a63 100644 --- a/src/discovery/utils.ts +++ b/src/discovery/utils.ts @@ -1,19 +1,13 @@ import type { DiscoveryQueueId, DiscoveryQueueIdGenerator } from './types'; import { IdSortable } from '@matrixai/id'; -import { makeId } from '../GenericIdTypes'; - -function makeDiscoveryQueueId(arg: any) { - return makeId(arg); -} function createDiscoveryQueueIdGenerator( lastId?: DiscoveryQueueId, ): DiscoveryQueueIdGenerator { - const idSortableGenerator = new IdSortable({ + const idSortableGenerator = new IdSortable({ lastId, }); - return (): DiscoveryQueueId => - makeDiscoveryQueueId(idSortableGenerator.get()); + return (): DiscoveryQueueId => idSortableGenerator.get(); } -export { makeDiscoveryQueueId, createDiscoveryQueueIdGenerator }; +export { createDiscoveryQueueIdGenerator }; diff --git a/src/git/GitRequest.ts b/src/git/GitRequest.ts deleted file mode 100644 index 14f304d66..000000000 --- a/src/git/GitRequest.ts +++ /dev/null @@ -1,87 +0,0 @@ -/** - * Responsible for converting HTTP messages from isomorphic-git into requests and sending them to a specific node. - */ - -class GitRequest { - private requestInfo: ( - vaultNameOrId: string, - ) => AsyncIterableIterator; - private requestPack: ( - vaultNameOrId: string, - body: any, - ) => AsyncIterableIterator; - private requestVaultNames: () => Promise; - - constructor( - requestInfo: (vaultNameOrId: string) => AsyncIterableIterator, - requestPack: ( - vaultNameOrId: string, - body: Buffer, - ) => AsyncIterableIterator, - requestVaultNames: () => Promise, - ) { - this.requestInfo = requestInfo; - this.requestPack = requestPack; - this.requestVaultNames = requestVaultNames; - } - - /** - * The custom http request method to feed into isomorphic-git's [custom http object](https://isomorphic-git.org/docs/en/http) - * In the future this will need to be changed in order to handle the receive-pack command from isomorphic-git. This will be - * in the url passed into the request function and is needed for push functionality - */ - public async request({ - url, - method = 'GET', - headers = {}, - body = Buffer.from(''), - }) { - const u = new URL(url); - - // Parse request - if (method === 'GET') { - const match = u.pathname.match(/\/(.+)\/info\/refs$/); - if (!match || /\.\./.test(match[1])) { - throw new Error('Error'); - } - - const vaultNameOrId = match![1]; - const infoResponse = this.requestInfo(vaultNameOrId); - - return { - url: url, - method: method, - body: infoResponse, - headers: headers, - statusCode: 200, - statusMessage: 'OK', - }; - } else if (method === 'POST') { - const match = u.pathname.match(/\/(.+)\/git-(.+)/); - if (!match || /\.\./.test(match[1])) { - throw new Error('Error'); - } - - const vaultNameOrId = match![1]; - - const packResponse = this.requestPack(vaultNameOrId, body[0]); - - return { - url: url, - method: method, - body: packResponse, - headers: headers, - statusCode: 200, - statusMessage: 'OK', - }; - } else { - throw new Error('Method not supported'); - } - } - - public async scanVaults() { - return await this.requestVaultNames(); - } -} - -export default GitRequest; diff --git a/src/git/index.ts b/src/git/index.ts index dae0d1ba1..006019213 100644 --- a/src/git/index.ts +++ b/src/git/index.ts @@ -1,4 +1,3 @@ -export { default as GitRequest } from './GitRequest'; export * as utils from './utils'; export * as types from './types'; export * as errors from './errors'; diff --git a/src/git/utils.ts b/src/git/utils.ts index d565ddc15..d7d6b55e2 100644 --- a/src/git/utils.ts +++ b/src/git/utils.ts @@ -1,17 +1,17 @@ import type { - Refs, - SymRefs, Ack, + DeflatedObject, Identity, Pack, PackIndex, - DeflatedObject, - WrappedObject, RawObject, + Refs, + SymRefs, + WrappedObject, } from './types'; import type { - ReadCommitResult, CommitObject, + ReadCommitResult, TreeEntry, TreeObject, } from 'isomorphic-git'; @@ -22,8 +22,13 @@ import pako from 'pako'; import Hash from 'sha.js/sha1'; import { PassThrough } from 'readable-stream'; import createHash from 'sha.js'; -import * as gitErrors from './errors'; +import { errors as gitErrors } from './'; +import * as vaultsUtils from '../vaults/utils'; +/** + * List of paths to check for a specific ref. + * @param ref Reference string + */ const refpaths = (ref: string) => [ `${ref}`, `refs/${ref}`, @@ -121,6 +126,10 @@ function compareRefNames(refa: string, refb: string): number { return tmp; } +/** + * Parses the packed-refs file. + * @param text - contents of the packed refs file. + */ function textToPackedRefs(text: string): Refs { const refs: Refs = {}; if (text) { @@ -152,14 +161,30 @@ function textToPackedRefs(text: string): Refs { return refs; } +/** + * Reads and parses the packed-refs file. + * @param fs Filesystem implementation + * @param gitdir Git '.git' directory + */ async function packedRefs(fs: EncryptedFS, gitdir: string): Promise { - const text = await fs.promises.readFile(path.join(gitdir, 'packed-refs'), { - encoding: 'utf8', - }); - const refs = textToPackedRefs(text.toString()); - return refs; + let text: string | Buffer = '# pack-refs with: peeled fully-peeled sorted'; + try { + text = await fs.promises.readFile(path.join(gitdir, 'packed-refs'), { + encoding: 'utf8', + }); + } catch (err) { + if (err.code !== 'ENOENT') throw err; + // If no file then ignore and return default. + } + return textToPackedRefs(text!.toString()); } +/** + * Obtains a list of all refs by recursively reading the FS. + * @param fs Filesystem implementation + * @param gitdir Git '.git' directory + * @param filepath Path to start listing from. + */ async function listRefs( fs: EncryptedFS, gitdir: string, @@ -168,7 +193,7 @@ async function listRefs( const packedMap = packedRefs(fs, gitdir); let files: string[] = []; try { - for await (const file of readdirRecursively( + for await (const file of vaultsUtils.readdirRecursively( fs, path.join(gitdir, filepath), )) { @@ -194,33 +219,28 @@ async function listRefs( return files; } -async function* readdirRecursively( - efs: EncryptedFS, - dir: string, - dirs?: boolean, -) { - const dirents = await efs.readdir(dir); - let secretPath: string; - for (const dirent of dirents) { - const res = dirent.toString(); // Makes string | buffer a string. - secretPath = path.join(dir, res); - if ((await efs.stat(secretPath)).isDirectory() && dirent !== '.git') { - if (dirs === true) { - yield secretPath; - } - yield* readdirRecursively(efs, secretPath, dirs); - } else if ((await efs.stat(secretPath)).isFile()) { - yield secretPath; - } - } -} - -async function resolve( - fs: EncryptedFS, - gitdir: string, - ref: string, - depth?: number, -): Promise { +/** + * Resolves a ref to it's sha hash by walking the fs and packed refs. + * @param fs Filesystem implementation + * @param dir Git working directory + * @param gitdir Git '.git' directory + * @param ref Ref we wish to resolve. + * @param depth How deep to search. + * @returns {String} the resolved sha hash. + */ +async function resolve({ + fs, + dir = '.', + gitdir = '.git', + ref, + depth, +}: { + fs: EncryptedFS; + dir?: string; + gitdir?: string; + ref: string; + depth?: number; +}): Promise { if (depth !== undefined) { depth--; if (depth === -1) { @@ -230,7 +250,7 @@ async function resolve( // Is it a ref pointer? if (ref.startsWith('ref: ')) { ref = ref.slice('ref: '.length); - return resolve(fs, gitdir, ref, depth); + return resolve({ fs, dir, gitdir, ref, depth }); } // Is it a complete and valid SHA? if (ref.length === 40 && /[0-9a-f]{40}/.test(ref)) { @@ -248,24 +268,37 @@ async function resolve( await fs.promises.readFile(path.join(gitdir, ref), { encoding: 'utf8', }) - ).toString() || packedMap[ref].line; // FIXME: not sure what is going on here. + ).toString() || packedMap[ref].line; } catch (err) { if (err.code === 'ENOENT') { throw new gitErrors.ErrorGitUndefinedRefs(`Ref ${ref} cannot be found`); } } if (sha != null) { - return resolve(fs, gitdir, sha.trim(), depth); // FIXME: sha is string or config? + return resolve({ fs, dir, gitdir, ref: sha.trim(), depth }); } } throw new gitErrors.ErrorGitUndefinedRefs(`ref ${ref} corrupted`); } -async function uploadPack( - fs: EncryptedFS, - gitdir: string = '.git', +/** + * Obtains a list of all the refs in the repository and formats it. + * @param fs Filesystem implementation + * @param dir Git working directory + * @param gitdir Git '.git' directory + * @param advertiseRefs Bool to specify if we want to advertise the refs. + */ +async function uploadPack({ + fs, + dir = '.', + gitdir = '.git', advertiseRefs = false, -): Promise | undefined> { +}: { + fs: EncryptedFS; + dir?: string; + gitdir?: string; + advertiseRefs: boolean; +}): Promise> { try { if (advertiseRefs) { const capabilities = ['side-band-64k']; @@ -274,16 +307,24 @@ async function uploadPack( const refs = {}; keys.unshift('HEAD'); for (const key of keys) { - refs[key] = await resolve(fs, gitdir, key); + refs[key] = await resolve({ fs, dir, gitdir, ref: key }); } const symrefs = {}; - symrefs['HEAD'] = await resolve(fs, gitdir, 'HEAD', 2); + symrefs['HEAD'] = await resolve({ + fs, + dir, + gitdir, + ref: 'HEAD', + depth: 2, + }); const write = { capabilities: capabilities, refs: refs, symrefs: symrefs, }; return writeRefsAdResponse(write); + } else { + return []; } } catch (err) { err.caller = 'git.uploadPack'; @@ -291,28 +332,41 @@ async function uploadPack( } } +/** + * This when given a list of refs works out the missing commits and sends them over as a stream. + * @param fs Filesystem implementation + * @param dir Git working directory + * @param gitdir Git '.git' directory + * @param refs List of refs we want. + * @param depth How deep we want to search commits for. + * @param haves list of oids we already have and can be excluded from the stream. + */ async function packObjects({ fs, + dir = '.', gitdir = '.git', refs, depth = undefined, haves = undefined, }: { fs: EncryptedFS; + dir: string; gitdir: string; refs: string[]; depth?: number; haves?: string[]; }): Promise { - const oids = new Set(); + const oids = new Set(); // List of oids for commits we wish to send. const shallows = new Set(); const unshallows = new Set(); - const acks: Ack[] = []; - haves = haves ? haves : []; + const acks: Ack[] = []; // A list of the commits that were found but already had. + haves = haves ? haves : []; // The list of commits we already have. const since = undefined; + // For each desired ref. for (const ref of refs) { - const commits = await log({ fs, gitdir, ref, depth, since }); - const oldshallows: string[] = []; + // Obtain a list of the relevant commits + const commits = await log({ fs, dir, gitdir, ref, depth, since }); + const oldshallows: string[] = []; // Never actually updated so I have no idea. for (let i = 0; i < commits.length; i++) { const commit = commits[i]; if (haves.includes(commit.oid)) { @@ -334,18 +388,34 @@ async function packObjects({ } } } - const objects = await listObjects({ fs, gitdir, oids: Array.from(oids) }); + // Getting all of the Oids within the tree of the desired Oids. + const objects = await listObjects({ + fs, + dir, + gitdir, + oids: Array.from(oids), + }); const packstream = new PassThrough(); - await pack({ fs, gitdir, oids: [...objects], outputStream: packstream }); + // Packing, gzipping and returning a stream of all the desired data through packstream. + await pack({ fs, dir, gitdir, oids: [...objects], outputStream: packstream }); return { packstream, shallows, unshallows, acks }; } +/** + * Walks the git objects and returns a list of blobs, commits and trees. + * @param fs Filesystem implementation + * @param dir Git working directory + * @param gitdir Git '.git' directory + * @param oids List of starting oids. + */ async function listObjects({ fs, + dir = '.', gitdir = '.git', oids, }: { fs: EncryptedFS; + dir: string; gitdir: string; oids: string[]; }): Promise> { @@ -358,7 +428,7 @@ async function listObjects({ // tell us which oids are Blobs and which are Trees. And we // do not need to recurse through commit parents. async function walk(oid: string): Promise { - const gitObject = await readObject({ fs, gitdir, oid }); + const gitObject = await readObject({ fs, dir, gitdir, oid }); if (gitObject.type === 'commit') { commits.add(oid); const commit = commitFrom(Buffer.from(gitObject.object)); @@ -454,8 +524,19 @@ function parseBuffer(buffer: Buffer): TreeObject { return _entries; } +/** + * Returns a commit lg for a given ref + * @param fs Filesystem implementation + * @param dir Git working directory + * @param gitdir Git '.git' directory + * @param ref Ref we're getting the commit long for. + * @param depth How many commits to fetch + * @param since Date to start from. + * @param signing Bool to specify signing + */ async function log({ fs, + dir = '.', gitdir = '.git', ref = 'HEAD', depth, @@ -463,6 +544,7 @@ async function log({ signing = false, }: { fs: EncryptedFS; + dir: string; gitdir: string; ref: string; depth?: number; @@ -475,8 +557,8 @@ async function log({ // TODO: In the future, we may want to have an API where we return a // async iterator that emits commits. const commits: ReadCommitResult[] = []; - const oid = await resolve(fs, gitdir, ref); - const tips = [await logCommit(fs, gitdir, oid, signing)]; + const oid = await resolve({ fs, dir, gitdir, ref }); + const tips = [await logCommit({ fs, dir, gitdir, oid, signing })]; // eslint-disable-next-line while (true) { @@ -502,7 +584,13 @@ async function log({ // Add the parents of this commit to the queue // Note: for the case of a commit with no parents, it will concat an empty array, having no net effect. for (const oid of commit.parent) { - const commitResult1 = await logCommit(fs, gitdir, oid, signing); + const commitResult1 = await logCommit({ + fs, + dir, + gitdir, + oid, + signing, + }); if (!tips.map((commit) => commit.oid).includes(commitResult1.oid)) { tips.push(commitResult1); } @@ -525,13 +613,20 @@ function compareAge(a: ReadCommitResult, b: ReadCommitResult): number { return a.commit.committer.timestamp - b.commit.committer.timestamp; } -async function logCommit( - fs: EncryptedFS, - gitdir: string, - oid: string, - signing: boolean, -): Promise { - const gitObject = await readObject({ fs, gitdir, oid }); +async function logCommit({ + fs, + dir = '.', + gitdir = '.git', + oid, + signing, +}: { + fs: EncryptedFS; + dir: string; + gitdir: string; + oid: string; + signing: boolean; +}): Promise { + const gitObject = await readObject({ fs, dir, gitdir, oid }); if (gitObject.type !== 'commit') { throw new gitErrors.ErrorGitUndefinedType( `Expected type to be commit, but instead found ${gitObject.type}`, @@ -734,12 +829,14 @@ function commitFrom(commit: string | Buffer): string { async function readObject({ fs, + dir, gitdir, oid, format, encoding, }: { fs: EncryptedFS; + dir: string; gitdir: string; oid: string; format?: 'parsed' | 'content'; @@ -747,12 +844,14 @@ async function readObject({ }): Promise; async function readObject({ fs, + dir, gitdir, oid, format, encoding, }: { fs: EncryptedFS; + dir: string; gitdir: string; oid: string; format: 'deflated'; @@ -760,12 +859,14 @@ async function readObject({ }): Promise; async function readObject({ fs, + dir, gitdir, oid, format, encoding, }: { fs: EncryptedFS; + dir: string; gitdir: string; oid: string; format: 'wrapped'; @@ -773,12 +874,14 @@ async function readObject({ }): Promise; async function readObject({ fs, - gitdir, + dir = '.', + gitdir = '.git', oid, format = 'parsed', encoding, }: { fs: EncryptedFS; + dir: string; gitdir: string; oid: string; format?: 'wrapped' | 'parsed' | 'deflated' | 'content'; @@ -787,7 +890,8 @@ async function readObject({ const _format = format === 'parsed' ? 'content' : format; // Curry the current read method so that the packfile un-deltification // process can acquire external ref-deltas. - const getExternalRefDelta = (oid: string) => readObject({ fs, gitdir, oid }); + const getExternalRefDelta = (oid: string) => + readObject({ fs, dir, gitdir, oid }); let result; // Empty tree - hard-coded so we can use it as a shorthand. // Note: I think the canonical git implementation must do this too because @@ -1191,13 +1295,23 @@ function unwrap(buffer: Buffer): { }; } +/** + * Without getting to deep into it, it seems to be prepping and then sending all the required data through the output stream. + * @param fs Filesystem implementation + * @param dir Git working directory + * @param gitdir Git '.git' directory + * @param oids Desired Oids to be sent. + * @param outputStream data output stream. + */ async function pack({ fs, + dir = '.', gitdir = '.git', oids, outputStream, }: { fs: EncryptedFS; + dir: string; gitdir: string; oids: string[]; outputStream: PassThrough; @@ -1250,7 +1364,7 @@ async function pack({ const paddedChunk = '0'.repeat(8 - unpaddedChunk.length) + unpaddedChunk; write(paddedChunk, 'hex'); for (const oid of oids) { - const { type, object } = await readObject({ fs, gitdir, oid }); + const { type, object } = await readObject({ fs, dir, gitdir, oid }); writeObject(object as Uint8Array, type); } // Write SHA1 checksum diff --git a/src/keys/KeyManager.ts b/src/keys/KeyManager.ts index dea504132..268dce9fe 100644 --- a/src/keys/KeyManager.ts +++ b/src/keys/KeyManager.ts @@ -12,7 +12,6 @@ import type { FileSystem } from '../types'; import type { NodeId } from '../nodes/types'; import type { PolykeyWorkerManagerInterface } from '../workers/types'; -import type { VaultKey } from '../vaults/types'; import path from 'path'; import { Buffer } from 'buffer'; import Logger from '@matrixai/logger'; @@ -33,35 +32,12 @@ interface KeyManager extends CreateDestroyStartStop {} new keysErrors.ErrorKeyManagerDestroyed(), ) class KeyManager { - public readonly keysPath: string; - public readonly rootPubPath: string; - public readonly rootKeyPath: string; - public readonly rootCertPath: string; - public readonly rootCertsPath: string; - public readonly dbKeyPath: string; - public readonly vaultKeyPath: string; - - protected fs: FileSystem; - protected logger: Logger; - protected rootKeyPairChange: RootKeyPairChange; - protected rootKeyPair: KeyPair; - protected recoveryCode: RecoveryCode | undefined; - protected _dbKey: Buffer; - protected _vaultKey: Buffer; - protected rootCert: Certificate; - protected workerManager?: PolykeyWorkerManagerInterface; - protected rootKeyPairBits: number; - protected rootCertDuration: number; - protected dbKeyBits: number; - protected vaultKeyBits: number; - static async createKeyManager({ keysPath, password, rootKeyPairBits = 4096, rootCertDuration = 31536000, dbKeyBits = 256, - vaultKeyBits = 256, rootKeyPairChange = async () => {}, fs = require('fs'), logger = new Logger(this.name), @@ -73,7 +49,6 @@ class KeyManager { rootKeyPairBits?: number; rootCertDuration?: number; dbKeyBits?: number; - vaultKeyBits?: number; rootKeyPairChange?: RootKeyPairChange; fs?: FileSystem; logger?: Logger; @@ -87,7 +62,6 @@ class KeyManager { rootCertDuration, rootKeyPairBits, dbKeyBits, - vaultKeyBits, rootKeyPairChange, fs, logger, @@ -101,12 +75,30 @@ class KeyManager { return keyManager; } + public readonly keysPath: string; + public readonly rootPubPath: string; + public readonly rootKeyPath: string; + public readonly rootCertPath: string; + public readonly rootCertsPath: string; + public readonly dbKeyPath: string; + + protected fs: FileSystem; + protected logger: Logger; + protected rootKeyPairChange: RootKeyPairChange; + protected rootKeyPair: KeyPair; + protected recoveryCode: RecoveryCode | undefined; + protected _dbKey: Buffer; + protected rootCert: Certificate; + protected workerManager?: PolykeyWorkerManagerInterface; + protected rootKeyPairBits: number; + protected rootCertDuration: number; + protected dbKeyBits: number; + constructor({ keysPath, rootKeyPairBits, rootCertDuration, dbKeyBits, - vaultKeyBits, rootKeyPairChange, fs, logger, @@ -115,7 +107,6 @@ class KeyManager { rootKeyPairBits: number; rootCertDuration: number; dbKeyBits: number; - vaultKeyBits: number; rootKeyPairChange: RootKeyPairChange; fs: FileSystem; logger: Logger; @@ -127,11 +118,9 @@ class KeyManager { this.rootCertPath = path.join(keysPath, 'root.crt'); this.rootCertsPath = path.join(keysPath, 'root_certs'); this.dbKeyPath = path.join(keysPath, 'db.key'); - this.vaultKeyPath = path.join(keysPath, 'vault.key'); this.rootKeyPairBits = rootKeyPairBits; this.rootCertDuration = rootCertDuration; this.dbKeyBits = dbKeyBits; - this.vaultKeyBits = vaultKeyBits; this.rootKeyPairChange = rootKeyPairChange; this.fs = fs; } @@ -182,7 +171,6 @@ class KeyManager { this.recoveryCode = recoveryCode; this.rootCert = rootCert; this._dbKey = await this.setupKey(this.dbKeyPath, this.dbKeyBits); - this._vaultKey = await this.setupKey(this.vaultKeyPath, this.vaultKeyBits); this.logger.info(`Started ${this.constructor.name}`); } @@ -205,11 +193,6 @@ class KeyManager { return this._dbKey; } - @ready(new keysErrors.ErrorKeyManagerNotRunning()) - get vaultKey(): VaultKey { - return this._vaultKey as VaultKey; - } - @ready(new keysErrors.ErrorKeyManagerNotRunning()) public getRootKeyPair(): KeyPair { return keysUtils.keyPairCopy(this.rootKeyPair); @@ -410,7 +393,6 @@ class KeyManager { ): Promise { this.logger.info('Renewing root key pair'); const keysDbKeyPlain = await this.readKey(this.dbKeyPath); - const keysVaultKeyPlain = await this.readKey(this.vaultKeyPath); const recoveryCodeNew = keysUtils.generateRecoveryCode(); const rootKeyPair = await this.generateKeyPair(bits, recoveryCodeNew); const now = new Date(); @@ -445,7 +427,6 @@ class KeyManager { this.writeRootKeyPair(rootKeyPair, password), this.writeRootCert(rootCert), this.writeKey(keysDbKeyPlain, this.dbKeyPath, rootKeyPair), - this.writeKey(keysVaultKeyPlain, this.vaultKeyPath, rootKeyPair), ]); this.rootKeyPair = rootKeyPair; this.recoveryCode = recoveryCodeNew; @@ -476,7 +457,6 @@ class KeyManager { ): Promise { this.logger.info('Resetting root key pair'); const keysDbKeyPlain = await this.readKey(this.dbKeyPath); - const keysVaultKeyPlain = await this.readKey(this.vaultKeyPath); const recoveryCodeNew = keysUtils.generateRecoveryCode(); const rootKeyPair = await this.generateKeyPair(bits, recoveryCodeNew); const rootCert = keysUtils.generateCertificate( @@ -493,7 +473,6 @@ class KeyManager { this.writeRootKeyPair(rootKeyPair, password), this.writeRootCert(rootCert), this.writeKey(keysDbKeyPlain, this.dbKeyPath, rootKeyPair), - this.writeKey(keysVaultKeyPlain, this.vaultKeyPath, rootKeyPair), ]); this.rootKeyPair = rootKeyPair; this.recoveryCode = recoveryCodeNew; diff --git a/src/notifications/NotificationsManager.ts b/src/notifications/NotificationsManager.ts index 69766e41e..44974ae67 100644 --- a/src/notifications/NotificationsManager.ts +++ b/src/notifications/NotificationsManager.ts @@ -137,14 +137,14 @@ class NotificationsManager { this.notificationsDb = notificationsDb; this.notificationsMessagesDb = notificationsMessagesDb; - // Getting latest ID and creating ID generator FIXME, does this need to be a transaction? + // Getting latest ID and creating ID generator let latestId: NotificationId | undefined; const keyStream = this.notificationsMessagesDb.createKeyStream({ limit: 1, reverse: true, }); for await (const o of keyStream) { - latestId = IdInternal.fromBuffer(o); + latestId = IdInternal.fromBuffer(o as Buffer); } this.notificationIdGenerator = createNotificationIdGenerator(latestId); this.logger.info(`Started ${this.constructor.name}`); diff --git a/src/notifications/types.ts b/src/notifications/types.ts index 428000a3a..a9d9b6dd4 100644 --- a/src/notifications/types.ts +++ b/src/notifications/types.ts @@ -1,7 +1,7 @@ +import type { Id } from '@matrixai/id'; import type { Opaque } from '../types'; import type { NodeIdEncoded } from '../nodes/types'; -import type { VaultName, VaultActions } from '../vaults/types'; -import type { Id, IdString } from '../GenericIdTypes'; +import type { VaultName, VaultActions, VaultIdEncoded } from '../vaults/types'; type NotificationId = Opaque<'NotificationId', Id>; @@ -12,7 +12,7 @@ type GestaltInvite = { }; type VaultShare = { type: 'VaultShare'; - vaultId: IdString; + vaultId: VaultIdEncoded; vaultName: VaultName; actions: VaultActions; }; diff --git a/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts b/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts index ea1c11386..068ddd535 100644 --- a/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts +++ b/src/proto/js/polykey/v1/agent_service_grpc_pb.d.ts @@ -16,7 +16,6 @@ interface IAgentServiceService extends grpc.ServiceDefinition; responseDeserialize: grpc.deserialize; } -interface IAgentServiceService_IVaultsGitInfoGet extends grpc.MethodDefinition { +interface IAgentServiceService_IVaultsGitInfoGet extends grpc.MethodDefinition { path: "/polykey.v1.AgentService/VaultsGitInfoGet"; requestStream: false; responseStream: true; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; + requestSerialize: grpc.serialize; + requestDeserialize: grpc.deserialize; responseSerialize: grpc.serialize; responseDeserialize: grpc.deserialize; } @@ -52,23 +51,14 @@ interface IAgentServiceService_IVaultsGitPackGet extends grpc.MethodDefinition

; responseDeserialize: grpc.deserialize; } -interface IAgentServiceService_IVaultsScan extends grpc.MethodDefinition { +interface IAgentServiceService_IVaultsScan extends grpc.MethodDefinition { path: "/polykey.v1.AgentService/VaultsScan"; requestStream: false; responseStream: true; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; -} -interface IAgentServiceService_IVaultsPermissionsCheck extends grpc.MethodDefinition { - path: "/polykey.v1.AgentService/VaultsPermissionsCheck"; - requestStream: false; - responseStream: false; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; + requestSerialize: grpc.serialize; + requestDeserialize: grpc.deserialize; + responseSerialize: grpc.serialize; + responseDeserialize: grpc.deserialize; } interface IAgentServiceService_INodesClosestLocalNodesGet extends grpc.MethodDefinition { path: "/polykey.v1.AgentService/NodesClosestLocalNodesGet"; @@ -129,10 +119,9 @@ export const AgentServiceService: IAgentServiceService; export interface IAgentServiceServer extends grpc.UntypedServiceImplementation { echo: grpc.handleUnaryCall; - vaultsGitInfoGet: grpc.handleServerStreamingCall; + vaultsGitInfoGet: grpc.handleServerStreamingCall; vaultsGitPackGet: grpc.handleBidiStreamingCall; - vaultsScan: grpc.handleServerStreamingCall; - vaultsPermissionsCheck: grpc.handleUnaryCall; + vaultsScan: grpc.handleServerStreamingCall; nodesClosestLocalNodesGet: grpc.handleUnaryCall; nodesClaimsGet: grpc.handleUnaryCall; nodesChainDataGet: grpc.handleUnaryCall; @@ -145,16 +134,13 @@ export interface IAgentServiceClient { echo(request: polykey_v1_utils_utils_pb.EchoMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EchoMessage) => void): grpc.ClientUnaryCall; echo(request: polykey_v1_utils_utils_pb.EchoMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EchoMessage) => void): grpc.ClientUnaryCall; echo(request: polykey_v1_utils_utils_pb.EchoMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EchoMessage) => void): grpc.ClientUnaryCall; - vaultsGitInfoGet(request: polykey_v1_vaults_vaults_pb.Vault, options?: Partial): grpc.ClientReadableStream; - vaultsGitInfoGet(request: polykey_v1_vaults_vaults_pb.Vault, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + vaultsGitInfoGet(request: polykey_v1_vaults_vaults_pb.InfoRequest, options?: Partial): grpc.ClientReadableStream; + vaultsGitInfoGet(request: polykey_v1_vaults_vaults_pb.InfoRequest, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; vaultsGitPackGet(): grpc.ClientDuplexStream; vaultsGitPackGet(options: Partial): grpc.ClientDuplexStream; vaultsGitPackGet(metadata: grpc.Metadata, options?: Partial): grpc.ClientDuplexStream; - vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; - vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; - vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; - vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; - vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; + vaultsScan(request: polykey_v1_utils_utils_pb.EmptyMessage, options?: Partial): grpc.ClientReadableStream; + vaultsScan(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; @@ -180,15 +166,12 @@ export class AgentServiceClient extends grpc.Client implements IAgentServiceClie public echo(request: polykey_v1_utils_utils_pb.EchoMessage, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EchoMessage) => void): grpc.ClientUnaryCall; public echo(request: polykey_v1_utils_utils_pb.EchoMessage, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EchoMessage) => void): grpc.ClientUnaryCall; public echo(request: polykey_v1_utils_utils_pb.EchoMessage, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.EchoMessage) => void): grpc.ClientUnaryCall; - public vaultsGitInfoGet(request: polykey_v1_vaults_vaults_pb.Vault, options?: Partial): grpc.ClientReadableStream; - public vaultsGitInfoGet(request: polykey_v1_vaults_vaults_pb.Vault, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + public vaultsGitInfoGet(request: polykey_v1_vaults_vaults_pb.InfoRequest, options?: Partial): grpc.ClientReadableStream; + public vaultsGitInfoGet(request: polykey_v1_vaults_vaults_pb.InfoRequest, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public vaultsGitPackGet(options?: Partial): grpc.ClientDuplexStream; public vaultsGitPackGet(metadata?: grpc.Metadata, options?: Partial): grpc.ClientDuplexStream; - public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; - public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; - public vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; - public vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; - public vaultsPermissionsCheck(request: polykey_v1_vaults_vaults_pb.NodePermission, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.NodePermissionAllowed) => void): grpc.ClientUnaryCall; + public vaultsScan(request: polykey_v1_utils_utils_pb.EmptyMessage, options?: Partial): grpc.ClientReadableStream; + public vaultsScan(request: polykey_v1_utils_utils_pb.EmptyMessage, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; public nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; public nodesClosestLocalNodesGet(request: polykey_v1_nodes_nodes_pb.Node, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_nodes_nodes_pb.NodeTable) => void): grpc.ClientUnaryCall; diff --git a/src/proto/js/polykey/v1/agent_service_grpc_pb.js b/src/proto/js/polykey/v1/agent_service_grpc_pb.js index 782ed2f8e..387b87b83 100644 --- a/src/proto/js/polykey/v1/agent_service_grpc_pb.js +++ b/src/proto/js/polykey/v1/agent_service_grpc_pb.js @@ -117,26 +117,26 @@ function deserialize_polykey_v1_utils_EmptyMessage(buffer_arg) { return polykey_v1_utils_utils_pb.EmptyMessage.deserializeBinary(new Uint8Array(buffer_arg)); } -function serialize_polykey_v1_vaults_NodePermission(arg) { - if (!(arg instanceof polykey_v1_vaults_vaults_pb.NodePermission)) { - throw new Error('Expected argument of type polykey.v1.vaults.NodePermission'); +function serialize_polykey_v1_vaults_InfoRequest(arg) { + if (!(arg instanceof polykey_v1_vaults_vaults_pb.InfoRequest)) { + throw new Error('Expected argument of type polykey.v1.vaults.InfoRequest'); } return Buffer.from(arg.serializeBinary()); } -function deserialize_polykey_v1_vaults_NodePermission(buffer_arg) { - return polykey_v1_vaults_vaults_pb.NodePermission.deserializeBinary(new Uint8Array(buffer_arg)); +function deserialize_polykey_v1_vaults_InfoRequest(buffer_arg) { + return polykey_v1_vaults_vaults_pb.InfoRequest.deserializeBinary(new Uint8Array(buffer_arg)); } -function serialize_polykey_v1_vaults_NodePermissionAllowed(arg) { - if (!(arg instanceof polykey_v1_vaults_vaults_pb.NodePermissionAllowed)) { - throw new Error('Expected argument of type polykey.v1.vaults.NodePermissionAllowed'); +function serialize_polykey_v1_vaults_List(arg) { + if (!(arg instanceof polykey_v1_vaults_vaults_pb.List)) { + throw new Error('Expected argument of type polykey.v1.vaults.List'); } return Buffer.from(arg.serializeBinary()); } -function deserialize_polykey_v1_vaults_NodePermissionAllowed(buffer_arg) { - return polykey_v1_vaults_vaults_pb.NodePermissionAllowed.deserializeBinary(new Uint8Array(buffer_arg)); +function deserialize_polykey_v1_vaults_List(buffer_arg) { + return polykey_v1_vaults_vaults_pb.List.deserializeBinary(new Uint8Array(buffer_arg)); } function serialize_polykey_v1_vaults_PackChunk(arg) { @@ -150,17 +150,6 @@ function deserialize_polykey_v1_vaults_PackChunk(buffer_arg) { return polykey_v1_vaults_vaults_pb.PackChunk.deserializeBinary(new Uint8Array(buffer_arg)); } -function serialize_polykey_v1_vaults_Vault(arg) { - if (!(arg instanceof polykey_v1_vaults_vaults_pb.Vault)) { - throw new Error('Expected argument of type polykey.v1.vaults.Vault'); - } - return Buffer.from(arg.serializeBinary()); -} - -function deserialize_polykey_v1_vaults_Vault(buffer_arg) { - return polykey_v1_vaults_vaults_pb.Vault.deserializeBinary(new Uint8Array(buffer_arg)); -} - var AgentServiceService = exports.AgentServiceService = { // Echo @@ -180,10 +169,10 @@ vaultsGitInfoGet: { path: '/polykey.v1.AgentService/VaultsGitInfoGet', requestStream: false, responseStream: true, - requestType: polykey_v1_vaults_vaults_pb.Vault, + requestType: polykey_v1_vaults_vaults_pb.InfoRequest, responseType: polykey_v1_vaults_vaults_pb.PackChunk, - requestSerialize: serialize_polykey_v1_vaults_Vault, - requestDeserialize: deserialize_polykey_v1_vaults_Vault, + requestSerialize: serialize_polykey_v1_vaults_InfoRequest, + requestDeserialize: deserialize_polykey_v1_vaults_InfoRequest, responseSerialize: serialize_polykey_v1_vaults_PackChunk, responseDeserialize: deserialize_polykey_v1_vaults_PackChunk, }, @@ -202,23 +191,12 @@ vaultsGitInfoGet: { path: '/polykey.v1.AgentService/VaultsScan', requestStream: false, responseStream: true, - requestType: polykey_v1_nodes_nodes_pb.Node, - responseType: polykey_v1_vaults_vaults_pb.Vault, - requestSerialize: serialize_polykey_v1_nodes_Node, - requestDeserialize: deserialize_polykey_v1_nodes_Node, - responseSerialize: serialize_polykey_v1_vaults_Vault, - responseDeserialize: deserialize_polykey_v1_vaults_Vault, - }, - vaultsPermissionsCheck: { - path: '/polykey.v1.AgentService/VaultsPermissionsCheck', - requestStream: false, - responseStream: false, - requestType: polykey_v1_vaults_vaults_pb.NodePermission, - responseType: polykey_v1_vaults_vaults_pb.NodePermissionAllowed, - requestSerialize: serialize_polykey_v1_vaults_NodePermission, - requestDeserialize: deserialize_polykey_v1_vaults_NodePermission, - responseSerialize: serialize_polykey_v1_vaults_NodePermissionAllowed, - responseDeserialize: deserialize_polykey_v1_vaults_NodePermissionAllowed, + requestType: polykey_v1_utils_utils_pb.EmptyMessage, + responseType: polykey_v1_vaults_vaults_pb.List, + requestSerialize: serialize_polykey_v1_utils_EmptyMessage, + requestDeserialize: deserialize_polykey_v1_utils_EmptyMessage, + responseSerialize: serialize_polykey_v1_vaults_List, + responseDeserialize: deserialize_polykey_v1_vaults_List, }, // Nodes nodesClosestLocalNodesGet: { diff --git a/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts b/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts index d558b7140..42258e4bc 100644 --- a/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts +++ b/src/proto/js/polykey/v1/client_service_grpc_pb.d.ts @@ -43,21 +43,21 @@ interface IClientServiceService extends grpc.ServiceDefinition; responseDeserialize: grpc.deserialize; } -interface IClientServiceService_IVaultsScan extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/VaultsScan"; - requestStream: false; - responseStream: true; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; -} interface IClientServiceService_IVaultsSecretsList extends grpc.MethodDefinition { path: "/polykey.v1.ClientService/VaultsSecretsList"; requestStream: false; @@ -328,15 +319,6 @@ interface IClientServiceService_IVaultsSecretsMkdir extends grpc.MethodDefinitio responseSerialize: grpc.serialize; responseDeserialize: grpc.deserialize; } -interface IClientServiceService_IVaultsSecretsStat extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/VaultsSecretsStat"; - requestStream: false; - responseStream: false; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; -} interface IClientServiceService_IVaultsSecretsDelete extends grpc.MethodDefinition { path: "/polykey.v1.ClientService/VaultsSecretsDelete"; requestStream: false; @@ -391,32 +373,14 @@ interface IClientServiceService_IVaultsSecretsNewDir extends grpc.MethodDefiniti responseSerialize: grpc.serialize; responseDeserialize: grpc.deserialize; } -interface IClientServiceService_IVaultsPermissionsSet extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/VaultsPermissionsSet"; +interface IClientServiceService_IvaultsSecretsStat extends grpc.MethodDefinition { + path: "/polykey.v1.ClientService/vaultsSecretsStat"; requestStream: false; responseStream: false; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; -} -interface IClientServiceService_IVaultsPermissionsUnset extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/VaultsPermissionsUnset"; - requestStream: false; - responseStream: false; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; -} -interface IClientServiceService_IVaultsPermissions extends grpc.MethodDefinition { - path: "/polykey.v1.ClientService/VaultsPermissions"; - requestStream: false; - responseStream: true; - requestSerialize: grpc.serialize; - requestDeserialize: grpc.deserialize; - responseSerialize: grpc.serialize; - responseDeserialize: grpc.deserialize; + requestSerialize: grpc.serialize; + requestDeserialize: grpc.deserialize; + responseSerialize: grpc.serialize; + responseDeserialize: grpc.deserialize; } interface IClientServiceService_IVaultsVersion extends grpc.MethodDefinition { path: "/polykey.v1.ClientService/VaultsVersion"; @@ -436,6 +400,42 @@ interface IClientServiceService_IVaultsLog extends grpc.MethodDefinition; responseDeserialize: grpc.deserialize; } +interface IClientServiceService_IVaultsScan extends grpc.MethodDefinition { + path: "/polykey.v1.ClientService/VaultsScan"; + requestStream: false; + responseStream: true; + requestSerialize: grpc.serialize; + requestDeserialize: grpc.deserialize; + responseSerialize: grpc.serialize; + responseDeserialize: grpc.deserialize; +} +interface IClientServiceService_IVaultsPermissionSet extends grpc.MethodDefinition { + path: "/polykey.v1.ClientService/VaultsPermissionSet"; + requestStream: false; + responseStream: false; + requestSerialize: grpc.serialize; + requestDeserialize: grpc.deserialize; + responseSerialize: grpc.serialize; + responseDeserialize: grpc.deserialize; +} +interface IClientServiceService_IVaultsPermissionUnset extends grpc.MethodDefinition { + path: "/polykey.v1.ClientService/VaultsPermissionUnset"; + requestStream: false; + responseStream: false; + requestSerialize: grpc.serialize; + requestDeserialize: grpc.deserialize; + responseSerialize: grpc.serialize; + responseDeserialize: grpc.deserialize; +} +interface IClientServiceService_IVaultsPermissionGet extends grpc.MethodDefinition { + path: "/polykey.v1.ClientService/VaultsPermissionGet"; + requestStream: false; + responseStream: true; + requestSerialize: grpc.serialize; + requestDeserialize: grpc.deserialize; + responseSerialize: grpc.serialize; + responseDeserialize: grpc.deserialize; +} interface IClientServiceService_IIdentitiesAuthenticate extends grpc.MethodDefinition { path: "/polykey.v1.ClientService/IdentitiesAuthenticate"; requestStream: false; @@ -689,21 +689,21 @@ export interface IClientServiceServer extends grpc.UntypedServiceImplementation vaultsDelete: grpc.handleUnaryCall; vaultsPull: grpc.handleUnaryCall; vaultsClone: grpc.handleUnaryCall; - vaultsScan: grpc.handleServerStreamingCall; vaultsSecretsList: grpc.handleServerStreamingCall; vaultsSecretsMkdir: grpc.handleUnaryCall; - vaultsSecretsStat: grpc.handleUnaryCall; vaultsSecretsDelete: grpc.handleUnaryCall; vaultsSecretsEdit: grpc.handleUnaryCall; vaultsSecretsGet: grpc.handleUnaryCall; vaultsSecretsRename: grpc.handleUnaryCall; vaultsSecretsNew: grpc.handleUnaryCall; vaultsSecretsNewDir: grpc.handleUnaryCall; - vaultsPermissionsSet: grpc.handleUnaryCall; - vaultsPermissionsUnset: grpc.handleUnaryCall; - vaultsPermissions: grpc.handleServerStreamingCall; + vaultsSecretsStat: grpc.handleUnaryCall; vaultsVersion: grpc.handleUnaryCall; vaultsLog: grpc.handleServerStreamingCall; + vaultsScan: grpc.handleServerStreamingCall; + vaultsPermissionSet: grpc.handleUnaryCall; + vaultsPermissionUnset: grpc.handleUnaryCall; + vaultsPermissionGet: grpc.handleServerStreamingCall; identitiesAuthenticate: grpc.handleServerStreamingCall; identitiesAuthenticatedGet: grpc.handleServerStreamingCall; identitiesTokenPut: grpc.handleUnaryCall; @@ -802,16 +802,11 @@ export interface IClientServiceClient { vaultsClone(request: polykey_v1_vaults_vaults_pb.Clone, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsClone(request: polykey_v1_vaults_vaults_pb.Clone, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsClone(request: polykey_v1_vaults_vaults_pb.Clone, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; - vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; vaultsSecretsList(request: polykey_v1_vaults_vaults_pb.Vault, options?: Partial): grpc.ClientReadableStream; vaultsSecretsList(request: polykey_v1_vaults_vaults_pb.Vault, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; vaultsSecretsMkdir(request: polykey_v1_vaults_vaults_pb.Mkdir, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsSecretsMkdir(request: polykey_v1_vaults_vaults_pb.Mkdir, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsSecretsMkdir(request: polykey_v1_vaults_vaults_pb.Mkdir, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsSecretsStat(request: polykey_v1_vaults_vaults_pb.Vault, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.Stat) => void): grpc.ClientUnaryCall; - vaultsSecretsStat(request: polykey_v1_vaults_vaults_pb.Vault, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.Stat) => void): grpc.ClientUnaryCall; - vaultsSecretsStat(request: polykey_v1_vaults_vaults_pb.Vault, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.Stat) => void): grpc.ClientUnaryCall; vaultsSecretsDelete(request: polykey_v1_secrets_secrets_pb.Secret, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsSecretsDelete(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsSecretsDelete(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; @@ -830,19 +825,24 @@ export interface IClientServiceClient { vaultsSecretsNewDir(request: polykey_v1_secrets_secrets_pb.Directory, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsSecretsNewDir(request: polykey_v1_secrets_secrets_pb.Directory, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; vaultsSecretsNewDir(request: polykey_v1_secrets_secrets_pb.Directory, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsPermissionsSet(request: polykey_v1_vaults_vaults_pb.PermSet, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsPermissionsSet(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsPermissionsSet(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsPermissionsUnset(request: polykey_v1_vaults_vaults_pb.PermUnset, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsPermissionsUnset(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsPermissionsUnset(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - vaultsPermissions(request: polykey_v1_vaults_vaults_pb.PermGet, options?: Partial): grpc.ClientReadableStream; - vaultsPermissions(request: polykey_v1_vaults_vaults_pb.PermGet, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + vaultsSecretsStat(request: polykey_v1_secrets_secrets_pb.Secret, callback: (error: grpc.ServiceError | null, response: polykey_v1_secrets_secrets_pb.Stat) => void): grpc.ClientUnaryCall; + vaultsSecretsStat(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_secrets_secrets_pb.Stat) => void): grpc.ClientUnaryCall; + vaultsSecretsStat(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_secrets_secrets_pb.Stat) => void): grpc.ClientUnaryCall; vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; vaultsLog(request: polykey_v1_vaults_vaults_pb.Log, options?: Partial): grpc.ClientReadableStream; vaultsLog(request: polykey_v1_vaults_vaults_pb.Log, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; + vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + vaultsPermissionSet(request: polykey_v1_vaults_vaults_pb.Permissions, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsPermissionSet(request: polykey_v1_vaults_vaults_pb.Permissions, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsPermissionSet(request: polykey_v1_vaults_vaults_pb.Permissions, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsPermissionUnset(request: polykey_v1_vaults_vaults_pb.Permissions, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsPermissionUnset(request: polykey_v1_vaults_vaults_pb.Permissions, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsPermissionUnset(request: polykey_v1_vaults_vaults_pb.Permissions, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + vaultsPermissionGet(request: polykey_v1_vaults_vaults_pb.Vault, options?: Partial): grpc.ClientReadableStream; + vaultsPermissionGet(request: polykey_v1_vaults_vaults_pb.Vault, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; identitiesAuthenticate(request: polykey_v1_identities_identities_pb.Provider, options?: Partial): grpc.ClientReadableStream; identitiesAuthenticate(request: polykey_v1_identities_identities_pb.Provider, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; identitiesAuthenticatedGet(request: polykey_v1_identities_identities_pb.OptionalProvider, options?: Partial): grpc.ClientReadableStream; @@ -987,16 +987,11 @@ export class ClientServiceClient extends grpc.Client implements IClientServiceCl public vaultsClone(request: polykey_v1_vaults_vaults_pb.Clone, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsClone(request: polykey_v1_vaults_vaults_pb.Clone, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsClone(request: polykey_v1_vaults_vaults_pb.Clone, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; - public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public vaultsSecretsList(request: polykey_v1_vaults_vaults_pb.Vault, options?: Partial): grpc.ClientReadableStream; public vaultsSecretsList(request: polykey_v1_vaults_vaults_pb.Vault, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public vaultsSecretsMkdir(request: polykey_v1_vaults_vaults_pb.Mkdir, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsSecretsMkdir(request: polykey_v1_vaults_vaults_pb.Mkdir, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsSecretsMkdir(request: polykey_v1_vaults_vaults_pb.Mkdir, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsSecretsStat(request: polykey_v1_vaults_vaults_pb.Vault, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.Stat) => void): grpc.ClientUnaryCall; - public vaultsSecretsStat(request: polykey_v1_vaults_vaults_pb.Vault, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.Stat) => void): grpc.ClientUnaryCall; - public vaultsSecretsStat(request: polykey_v1_vaults_vaults_pb.Vault, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.Stat) => void): grpc.ClientUnaryCall; public vaultsSecretsDelete(request: polykey_v1_secrets_secrets_pb.Secret, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsSecretsDelete(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsSecretsDelete(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; @@ -1015,19 +1010,24 @@ export class ClientServiceClient extends grpc.Client implements IClientServiceCl public vaultsSecretsNewDir(request: polykey_v1_secrets_secrets_pb.Directory, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsSecretsNewDir(request: polykey_v1_secrets_secrets_pb.Directory, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; public vaultsSecretsNewDir(request: polykey_v1_secrets_secrets_pb.Directory, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsPermissionsSet(request: polykey_v1_vaults_vaults_pb.PermSet, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsPermissionsSet(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsPermissionsSet(request: polykey_v1_vaults_vaults_pb.PermSet, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsPermissionsUnset(request: polykey_v1_vaults_vaults_pb.PermUnset, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsPermissionsUnset(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsPermissionsUnset(request: polykey_v1_vaults_vaults_pb.PermUnset, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; - public vaultsPermissions(request: polykey_v1_vaults_vaults_pb.PermGet, options?: Partial): grpc.ClientReadableStream; - public vaultsPermissions(request: polykey_v1_vaults_vaults_pb.PermGet, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + public vaultsSecretsStat(request: polykey_v1_secrets_secrets_pb.Secret, callback: (error: grpc.ServiceError | null, response: polykey_v1_secrets_secrets_pb.Stat) => void): grpc.ClientUnaryCall; + public vaultsSecretsStat(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_secrets_secrets_pb.Stat) => void): grpc.ClientUnaryCall; + public vaultsSecretsStat(request: polykey_v1_secrets_secrets_pb.Secret, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_secrets_secrets_pb.Stat) => void): grpc.ClientUnaryCall; public vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; public vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; public vaultsVersion(request: polykey_v1_vaults_vaults_pb.Version, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_vaults_vaults_pb.VersionResult) => void): grpc.ClientUnaryCall; public vaultsLog(request: polykey_v1_vaults_vaults_pb.Log, options?: Partial): grpc.ClientReadableStream; public vaultsLog(request: polykey_v1_vaults_vaults_pb.Log, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, options?: Partial): grpc.ClientReadableStream; + public vaultsScan(request: polykey_v1_nodes_nodes_pb.Node, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; + public vaultsPermissionSet(request: polykey_v1_vaults_vaults_pb.Permissions, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsPermissionSet(request: polykey_v1_vaults_vaults_pb.Permissions, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsPermissionSet(request: polykey_v1_vaults_vaults_pb.Permissions, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsPermissionUnset(request: polykey_v1_vaults_vaults_pb.Permissions, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsPermissionUnset(request: polykey_v1_vaults_vaults_pb.Permissions, metadata: grpc.Metadata, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsPermissionUnset(request: polykey_v1_vaults_vaults_pb.Permissions, metadata: grpc.Metadata, options: Partial, callback: (error: grpc.ServiceError | null, response: polykey_v1_utils_utils_pb.StatusMessage) => void): grpc.ClientUnaryCall; + public vaultsPermissionGet(request: polykey_v1_vaults_vaults_pb.Vault, options?: Partial): grpc.ClientReadableStream; + public vaultsPermissionGet(request: polykey_v1_vaults_vaults_pb.Vault, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public identitiesAuthenticate(request: polykey_v1_identities_identities_pb.Provider, options?: Partial): grpc.ClientReadableStream; public identitiesAuthenticate(request: polykey_v1_identities_identities_pb.Provider, metadata?: grpc.Metadata, options?: Partial): grpc.ClientReadableStream; public identitiesAuthenticatedGet(request: polykey_v1_identities_identities_pb.OptionalProvider, options?: Partial): grpc.ClientReadableStream; diff --git a/src/proto/js/polykey/v1/client_service_grpc_pb.js b/src/proto/js/polykey/v1/client_service_grpc_pb.js index 59bfc3d72..6b6e4bf1b 100644 --- a/src/proto/js/polykey/v1/client_service_grpc_pb.js +++ b/src/proto/js/polykey/v1/client_service_grpc_pb.js @@ -300,6 +300,17 @@ function deserialize_polykey_v1_secrets_Secret(buffer_arg) { return polykey_v1_secrets_secrets_pb.Secret.deserializeBinary(new Uint8Array(buffer_arg)); } +function serialize_polykey_v1_secrets_Stat(arg) { + if (!(arg instanceof polykey_v1_secrets_secrets_pb.Stat)) { + throw new Error('Expected argument of type polykey.v1.secrets.Stat'); + } + return Buffer.from(arg.serializeBinary()); +} + +function deserialize_polykey_v1_secrets_Stat(buffer_arg) { + return polykey_v1_secrets_secrets_pb.Stat.deserializeBinary(new Uint8Array(buffer_arg)); +} + function serialize_polykey_v1_sessions_Password(arg) { if (!(arg instanceof polykey_v1_sessions_sessions_pb.Password)) { throw new Error('Expected argument of type polykey.v1.sessions.Password'); @@ -388,48 +399,15 @@ function deserialize_polykey_v1_vaults_Mkdir(buffer_arg) { return polykey_v1_vaults_vaults_pb.Mkdir.deserializeBinary(new Uint8Array(buffer_arg)); } -function serialize_polykey_v1_vaults_PermGet(arg) { - if (!(arg instanceof polykey_v1_vaults_vaults_pb.PermGet)) { - throw new Error('Expected argument of type polykey.v1.vaults.PermGet'); - } - return Buffer.from(arg.serializeBinary()); -} - -function deserialize_polykey_v1_vaults_PermGet(buffer_arg) { - return polykey_v1_vaults_vaults_pb.PermGet.deserializeBinary(new Uint8Array(buffer_arg)); -} - -function serialize_polykey_v1_vaults_PermSet(arg) { - if (!(arg instanceof polykey_v1_vaults_vaults_pb.PermSet)) { - throw new Error('Expected argument of type polykey.v1.vaults.PermSet'); - } - return Buffer.from(arg.serializeBinary()); -} - -function deserialize_polykey_v1_vaults_PermSet(buffer_arg) { - return polykey_v1_vaults_vaults_pb.PermSet.deserializeBinary(new Uint8Array(buffer_arg)); -} - -function serialize_polykey_v1_vaults_PermUnset(arg) { - if (!(arg instanceof polykey_v1_vaults_vaults_pb.PermUnset)) { - throw new Error('Expected argument of type polykey.v1.vaults.PermUnset'); - } - return Buffer.from(arg.serializeBinary()); -} - -function deserialize_polykey_v1_vaults_PermUnset(buffer_arg) { - return polykey_v1_vaults_vaults_pb.PermUnset.deserializeBinary(new Uint8Array(buffer_arg)); -} - -function serialize_polykey_v1_vaults_Permission(arg) { - if (!(arg instanceof polykey_v1_vaults_vaults_pb.Permission)) { - throw new Error('Expected argument of type polykey.v1.vaults.Permission'); +function serialize_polykey_v1_vaults_Permissions(arg) { + if (!(arg instanceof polykey_v1_vaults_vaults_pb.Permissions)) { + throw new Error('Expected argument of type polykey.v1.vaults.Permissions'); } return Buffer.from(arg.serializeBinary()); } -function deserialize_polykey_v1_vaults_Permission(buffer_arg) { - return polykey_v1_vaults_vaults_pb.Permission.deserializeBinary(new Uint8Array(buffer_arg)); +function deserialize_polykey_v1_vaults_Permissions(buffer_arg) { + return polykey_v1_vaults_vaults_pb.Permissions.deserializeBinary(new Uint8Array(buffer_arg)); } function serialize_polykey_v1_vaults_Pull(arg) { @@ -454,17 +432,6 @@ function deserialize_polykey_v1_vaults_Rename(buffer_arg) { return polykey_v1_vaults_vaults_pb.Rename.deserializeBinary(new Uint8Array(buffer_arg)); } -function serialize_polykey_v1_vaults_Stat(arg) { - if (!(arg instanceof polykey_v1_vaults_vaults_pb.Stat)) { - throw new Error('Expected argument of type polykey.v1.vaults.Stat'); - } - return Buffer.from(arg.serializeBinary()); -} - -function deserialize_polykey_v1_vaults_Stat(buffer_arg) { - return polykey_v1_vaults_vaults_pb.Stat.deserializeBinary(new Uint8Array(buffer_arg)); -} - function serialize_polykey_v1_vaults_Vault(arg) { if (!(arg instanceof polykey_v1_vaults_vaults_pb.Vault)) { throw new Error('Expected argument of type polykey.v1.vaults.Vault'); @@ -768,17 +735,6 @@ vaultsList: { responseSerialize: serialize_polykey_v1_utils_StatusMessage, responseDeserialize: deserialize_polykey_v1_utils_StatusMessage, }, - vaultsScan: { - path: '/polykey.v1.ClientService/VaultsScan', - requestStream: false, - responseStream: true, - requestType: polykey_v1_nodes_nodes_pb.Node, - responseType: polykey_v1_vaults_vaults_pb.List, - requestSerialize: serialize_polykey_v1_nodes_Node, - requestDeserialize: deserialize_polykey_v1_nodes_Node, - responseSerialize: serialize_polykey_v1_vaults_List, - responseDeserialize: deserialize_polykey_v1_vaults_List, - }, vaultsSecretsList: { path: '/polykey.v1.ClientService/VaultsSecretsList', requestStream: false, @@ -801,17 +757,6 @@ vaultsList: { responseSerialize: serialize_polykey_v1_utils_StatusMessage, responseDeserialize: deserialize_polykey_v1_utils_StatusMessage, }, - vaultsSecretsStat: { - path: '/polykey.v1.ClientService/VaultsSecretsStat', - requestStream: false, - responseStream: false, - requestType: polykey_v1_vaults_vaults_pb.Vault, - responseType: polykey_v1_vaults_vaults_pb.Stat, - requestSerialize: serialize_polykey_v1_vaults_Vault, - requestDeserialize: deserialize_polykey_v1_vaults_Vault, - responseSerialize: serialize_polykey_v1_vaults_Stat, - responseDeserialize: deserialize_polykey_v1_vaults_Stat, - }, vaultsSecretsDelete: { path: '/polykey.v1.ClientService/VaultsSecretsDelete', requestStream: false, @@ -878,38 +823,16 @@ vaultsList: { responseSerialize: serialize_polykey_v1_utils_StatusMessage, responseDeserialize: deserialize_polykey_v1_utils_StatusMessage, }, - vaultsPermissionsSet: { - path: '/polykey.v1.ClientService/VaultsPermissionsSet', - requestStream: false, - responseStream: false, - requestType: polykey_v1_vaults_vaults_pb.PermSet, - responseType: polykey_v1_utils_utils_pb.StatusMessage, - requestSerialize: serialize_polykey_v1_vaults_PermSet, - requestDeserialize: deserialize_polykey_v1_vaults_PermSet, - responseSerialize: serialize_polykey_v1_utils_StatusMessage, - responseDeserialize: deserialize_polykey_v1_utils_StatusMessage, - }, - vaultsPermissionsUnset: { - path: '/polykey.v1.ClientService/VaultsPermissionsUnset', + vaultsSecretsStat: { + path: '/polykey.v1.ClientService/vaultsSecretsStat', requestStream: false, responseStream: false, - requestType: polykey_v1_vaults_vaults_pb.PermUnset, - responseType: polykey_v1_utils_utils_pb.StatusMessage, - requestSerialize: serialize_polykey_v1_vaults_PermUnset, - requestDeserialize: deserialize_polykey_v1_vaults_PermUnset, - responseSerialize: serialize_polykey_v1_utils_StatusMessage, - responseDeserialize: deserialize_polykey_v1_utils_StatusMessage, - }, - vaultsPermissions: { - path: '/polykey.v1.ClientService/VaultsPermissions', - requestStream: false, - responseStream: true, - requestType: polykey_v1_vaults_vaults_pb.PermGet, - responseType: polykey_v1_vaults_vaults_pb.Permission, - requestSerialize: serialize_polykey_v1_vaults_PermGet, - requestDeserialize: deserialize_polykey_v1_vaults_PermGet, - responseSerialize: serialize_polykey_v1_vaults_Permission, - responseDeserialize: deserialize_polykey_v1_vaults_Permission, + requestType: polykey_v1_secrets_secrets_pb.Secret, + responseType: polykey_v1_secrets_secrets_pb.Stat, + requestSerialize: serialize_polykey_v1_secrets_Secret, + requestDeserialize: deserialize_polykey_v1_secrets_Secret, + responseSerialize: serialize_polykey_v1_secrets_Stat, + responseDeserialize: deserialize_polykey_v1_secrets_Stat, }, vaultsVersion: { path: '/polykey.v1.ClientService/VaultsVersion', @@ -933,6 +856,50 @@ vaultsList: { responseSerialize: serialize_polykey_v1_vaults_LogEntry, responseDeserialize: deserialize_polykey_v1_vaults_LogEntry, }, + vaultsScan: { + path: '/polykey.v1.ClientService/VaultsScan', + requestStream: false, + responseStream: true, + requestType: polykey_v1_nodes_nodes_pb.Node, + responseType: polykey_v1_vaults_vaults_pb.List, + requestSerialize: serialize_polykey_v1_nodes_Node, + requestDeserialize: deserialize_polykey_v1_nodes_Node, + responseSerialize: serialize_polykey_v1_vaults_List, + responseDeserialize: deserialize_polykey_v1_vaults_List, + }, + vaultsPermissionSet: { + path: '/polykey.v1.ClientService/VaultsPermissionSet', + requestStream: false, + responseStream: false, + requestType: polykey_v1_vaults_vaults_pb.Permissions, + responseType: polykey_v1_utils_utils_pb.StatusMessage, + requestSerialize: serialize_polykey_v1_vaults_Permissions, + requestDeserialize: deserialize_polykey_v1_vaults_Permissions, + responseSerialize: serialize_polykey_v1_utils_StatusMessage, + responseDeserialize: deserialize_polykey_v1_utils_StatusMessage, + }, + vaultsPermissionUnset: { + path: '/polykey.v1.ClientService/VaultsPermissionUnset', + requestStream: false, + responseStream: false, + requestType: polykey_v1_vaults_vaults_pb.Permissions, + responseType: polykey_v1_utils_utils_pb.StatusMessage, + requestSerialize: serialize_polykey_v1_vaults_Permissions, + requestDeserialize: deserialize_polykey_v1_vaults_Permissions, + responseSerialize: serialize_polykey_v1_utils_StatusMessage, + responseDeserialize: deserialize_polykey_v1_utils_StatusMessage, + }, + vaultsPermissionGet: { + path: '/polykey.v1.ClientService/VaultsPermissionGet', + requestStream: false, + responseStream: true, + requestType: polykey_v1_vaults_vaults_pb.Vault, + responseType: polykey_v1_vaults_vaults_pb.Permissions, + requestSerialize: serialize_polykey_v1_vaults_Vault, + requestDeserialize: deserialize_polykey_v1_vaults_Vault, + responseSerialize: serialize_polykey_v1_vaults_Permissions, + responseDeserialize: deserialize_polykey_v1_vaults_Permissions, + }, // Identities identitiesAuthenticate: { path: '/polykey.v1.ClientService/IdentitiesAuthenticate', diff --git a/src/proto/js/polykey/v1/permissions/permissions_pb.d.ts b/src/proto/js/polykey/v1/permissions/permissions_pb.d.ts index e4f06b338..60133c0c0 100644 --- a/src/proto/js/polykey/v1/permissions/permissions_pb.d.ts +++ b/src/proto/js/polykey/v1/permissions/permissions_pb.d.ts @@ -30,6 +30,34 @@ export namespace Actions { } } +export class NodeActions extends jspb.Message { + + hasNode(): boolean; + clearNode(): void; + getNode(): polykey_v1_nodes_nodes_pb.Node | undefined; + setNode(value?: polykey_v1_nodes_nodes_pb.Node): NodeActions; + clearActionsList(): void; + getActionsList(): Array; + setActionsList(value: Array): NodeActions; + addActions(value: string, index?: number): string; + + serializeBinary(): Uint8Array; + toObject(includeInstance?: boolean): NodeActions.AsObject; + static toObject(includeInstance: boolean, msg: NodeActions): NodeActions.AsObject; + static extensions: {[key: number]: jspb.ExtensionFieldInfo}; + static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo}; + static serializeBinaryToWriter(message: NodeActions, writer: jspb.BinaryWriter): void; + static deserializeBinary(bytes: Uint8Array): NodeActions; + static deserializeBinaryFromReader(message: NodeActions, reader: jspb.BinaryReader): NodeActions; +} + +export namespace NodeActions { + export type AsObject = { + node?: polykey_v1_nodes_nodes_pb.Node.AsObject, + actionsList: Array, + } +} + export class ActionSet extends jspb.Message { hasNode(): boolean; diff --git a/src/proto/js/polykey/v1/permissions/permissions_pb.js b/src/proto/js/polykey/v1/permissions/permissions_pb.js index 29dd4ba20..53e129985 100644 --- a/src/proto/js/polykey/v1/permissions/permissions_pb.js +++ b/src/proto/js/polykey/v1/permissions/permissions_pb.js @@ -21,6 +21,7 @@ goog.object.extend(proto, polykey_v1_identities_identities_pb); goog.exportSymbol('proto.polykey.v1.permissions.ActionSet', null, global); goog.exportSymbol('proto.polykey.v1.permissions.ActionSet.NodeOrProviderCase', null, global); goog.exportSymbol('proto.polykey.v1.permissions.Actions', null, global); +goog.exportSymbol('proto.polykey.v1.permissions.NodeActions', null, global); /** * Generated by JsPbCodeGenerator. * @param {Array=} opt_data Optional initial data array, typically from a @@ -42,6 +43,27 @@ if (goog.DEBUG && !COMPILED) { */ proto.polykey.v1.permissions.Actions.displayName = 'proto.polykey.v1.permissions.Actions'; } +/** + * Generated by JsPbCodeGenerator. + * @param {Array=} opt_data Optional initial data array, typically from a + * server response, or constructed directly in Javascript. The array is used + * in place and becomes part of the constructed object. It is not cloned. + * If no data is provided, the constructed object will be empty, but still + * valid. + * @extends {jspb.Message} + * @constructor + */ +proto.polykey.v1.permissions.NodeActions = function(opt_data) { + jspb.Message.initialize(this, opt_data, 0, -1, proto.polykey.v1.permissions.NodeActions.repeatedFields_, null); +}; +goog.inherits(proto.polykey.v1.permissions.NodeActions, jspb.Message); +if (goog.DEBUG && !COMPILED) { + /** + * @public + * @override + */ + proto.polykey.v1.permissions.NodeActions.displayName = 'proto.polykey.v1.permissions.NodeActions'; +} /** * Generated by JsPbCodeGenerator. * @param {Array=} opt_data Optional initial data array, typically from a @@ -220,6 +242,213 @@ proto.polykey.v1.permissions.Actions.prototype.clearActionList = function() { +/** + * List of repeated fields within this message type. + * @private {!Array} + * @const + */ +proto.polykey.v1.permissions.NodeActions.repeatedFields_ = [2]; + + + +if (jspb.Message.GENERATE_TO_OBJECT) { +/** + * Creates an object representation of this proto. + * Field names that are reserved in JavaScript and will be renamed to pb_name. + * Optional fields that are not set will be set to undefined. + * To access a reserved field use, foo.pb_, eg, foo.pb_default. + * For the list of reserved names please see: + * net/proto2/compiler/js/internal/generator.cc#kKeyword. + * @param {boolean=} opt_includeInstance Deprecated. whether to include the + * JSPB instance for transitional soy proto support: + * http://goto/soy-param-migration + * @return {!Object} + */ +proto.polykey.v1.permissions.NodeActions.prototype.toObject = function(opt_includeInstance) { + return proto.polykey.v1.permissions.NodeActions.toObject(opt_includeInstance, this); +}; + + +/** + * Static version of the {@see toObject} method. + * @param {boolean|undefined} includeInstance Deprecated. Whether to include + * the JSPB instance for transitional soy proto support: + * http://goto/soy-param-migration + * @param {!proto.polykey.v1.permissions.NodeActions} msg The msg instance to transform. + * @return {!Object} + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.polykey.v1.permissions.NodeActions.toObject = function(includeInstance, msg) { + var f, obj = { + node: (f = msg.getNode()) && polykey_v1_nodes_nodes_pb.Node.toObject(includeInstance, f), + actionsList: (f = jspb.Message.getRepeatedField(msg, 2)) == null ? undefined : f + }; + + if (includeInstance) { + obj.$jspbMessageInstance = msg; + } + return obj; +}; +} + + +/** + * Deserializes binary data (in protobuf wire format). + * @param {jspb.ByteSource} bytes The bytes to deserialize. + * @return {!proto.polykey.v1.permissions.NodeActions} + */ +proto.polykey.v1.permissions.NodeActions.deserializeBinary = function(bytes) { + var reader = new jspb.BinaryReader(bytes); + var msg = new proto.polykey.v1.permissions.NodeActions; + return proto.polykey.v1.permissions.NodeActions.deserializeBinaryFromReader(msg, reader); +}; + + +/** + * Deserializes binary data (in protobuf wire format) from the + * given reader into the given message object. + * @param {!proto.polykey.v1.permissions.NodeActions} msg The message object to deserialize into. + * @param {!jspb.BinaryReader} reader The BinaryReader to use. + * @return {!proto.polykey.v1.permissions.NodeActions} + */ +proto.polykey.v1.permissions.NodeActions.deserializeBinaryFromReader = function(msg, reader) { + while (reader.nextField()) { + if (reader.isEndGroup()) { + break; + } + var field = reader.getFieldNumber(); + switch (field) { + case 1: + var value = new polykey_v1_nodes_nodes_pb.Node; + reader.readMessage(value,polykey_v1_nodes_nodes_pb.Node.deserializeBinaryFromReader); + msg.setNode(value); + break; + case 2: + var value = /** @type {string} */ (reader.readString()); + msg.addActions(value); + break; + default: + reader.skipField(); + break; + } + } + return msg; +}; + + +/** + * Serializes the message to binary data (in protobuf wire format). + * @return {!Uint8Array} + */ +proto.polykey.v1.permissions.NodeActions.prototype.serializeBinary = function() { + var writer = new jspb.BinaryWriter(); + proto.polykey.v1.permissions.NodeActions.serializeBinaryToWriter(this, writer); + return writer.getResultBuffer(); +}; + + +/** + * Serializes the given message to binary data (in protobuf wire + * format), writing to the given BinaryWriter. + * @param {!proto.polykey.v1.permissions.NodeActions} message + * @param {!jspb.BinaryWriter} writer + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.polykey.v1.permissions.NodeActions.serializeBinaryToWriter = function(message, writer) { + var f = undefined; + f = message.getNode(); + if (f != null) { + writer.writeMessage( + 1, + f, + polykey_v1_nodes_nodes_pb.Node.serializeBinaryToWriter + ); + } + f = message.getActionsList(); + if (f.length > 0) { + writer.writeRepeatedString( + 2, + f + ); + } +}; + + +/** + * optional polykey.v1.nodes.Node node = 1; + * @return {?proto.polykey.v1.nodes.Node} + */ +proto.polykey.v1.permissions.NodeActions.prototype.getNode = function() { + return /** @type{?proto.polykey.v1.nodes.Node} */ ( + jspb.Message.getWrapperField(this, polykey_v1_nodes_nodes_pb.Node, 1)); +}; + + +/** + * @param {?proto.polykey.v1.nodes.Node|undefined} value + * @return {!proto.polykey.v1.permissions.NodeActions} returns this +*/ +proto.polykey.v1.permissions.NodeActions.prototype.setNode = function(value) { + return jspb.Message.setWrapperField(this, 1, value); +}; + + +/** + * Clears the message field making it undefined. + * @return {!proto.polykey.v1.permissions.NodeActions} returns this + */ +proto.polykey.v1.permissions.NodeActions.prototype.clearNode = function() { + return this.setNode(undefined); +}; + + +/** + * Returns whether this field is set. + * @return {boolean} + */ +proto.polykey.v1.permissions.NodeActions.prototype.hasNode = function() { + return jspb.Message.getField(this, 1) != null; +}; + + +/** + * repeated string actions = 2; + * @return {!Array} + */ +proto.polykey.v1.permissions.NodeActions.prototype.getActionsList = function() { + return /** @type {!Array} */ (jspb.Message.getRepeatedField(this, 2)); +}; + + +/** + * @param {!Array} value + * @return {!proto.polykey.v1.permissions.NodeActions} returns this + */ +proto.polykey.v1.permissions.NodeActions.prototype.setActionsList = function(value) { + return jspb.Message.setField(this, 2, value || []); +}; + + +/** + * @param {string} value + * @param {number=} opt_index + * @return {!proto.polykey.v1.permissions.NodeActions} returns this + */ +proto.polykey.v1.permissions.NodeActions.prototype.addActions = function(value, opt_index) { + return jspb.Message.addToRepeatedField(this, 2, value, opt_index); +}; + + +/** + * Clears the list making it empty but non-null. + * @return {!proto.polykey.v1.permissions.NodeActions} returns this + */ +proto.polykey.v1.permissions.NodeActions.prototype.clearActionsList = function() { + return this.setActionsList([]); +}; + + + /** * Oneof group definitions for this message. Each group defines the field * numbers belonging to that group. When of these fields' value is set, all diff --git a/src/proto/js/polykey/v1/secrets/secrets_pb.d.ts b/src/proto/js/polykey/v1/secrets/secrets_pb.d.ts index 4fb4ca872..1e9d951d8 100644 --- a/src/proto/js/polykey/v1/secrets/secrets_pb.d.ts +++ b/src/proto/js/polykey/v1/secrets/secrets_pb.d.ts @@ -89,3 +89,23 @@ export namespace Directory { secretDirectory: string, } } + +export class Stat extends jspb.Message { + getJson(): string; + setJson(value: string): Stat; + + serializeBinary(): Uint8Array; + toObject(includeInstance?: boolean): Stat.AsObject; + static toObject(includeInstance: boolean, msg: Stat): Stat.AsObject; + static extensions: {[key: number]: jspb.ExtensionFieldInfo}; + static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo}; + static serializeBinaryToWriter(message: Stat, writer: jspb.BinaryWriter): void; + static deserializeBinary(bytes: Uint8Array): Stat; + static deserializeBinaryFromReader(message: Stat, reader: jspb.BinaryReader): Stat; +} + +export namespace Stat { + export type AsObject = { + json: string, + } +} diff --git a/src/proto/js/polykey/v1/secrets/secrets_pb.js b/src/proto/js/polykey/v1/secrets/secrets_pb.js index 58fec23fc..5008028d8 100644 --- a/src/proto/js/polykey/v1/secrets/secrets_pb.js +++ b/src/proto/js/polykey/v1/secrets/secrets_pb.js @@ -19,6 +19,7 @@ goog.object.extend(proto, polykey_v1_vaults_vaults_pb); goog.exportSymbol('proto.polykey.v1.secrets.Directory', null, global); goog.exportSymbol('proto.polykey.v1.secrets.Rename', null, global); goog.exportSymbol('proto.polykey.v1.secrets.Secret', null, global); +goog.exportSymbol('proto.polykey.v1.secrets.Stat', null, global); /** * Generated by JsPbCodeGenerator. * @param {Array=} opt_data Optional initial data array, typically from a @@ -82,6 +83,27 @@ if (goog.DEBUG && !COMPILED) { */ proto.polykey.v1.secrets.Directory.displayName = 'proto.polykey.v1.secrets.Directory'; } +/** + * Generated by JsPbCodeGenerator. + * @param {Array=} opt_data Optional initial data array, typically from a + * server response, or constructed directly in Javascript. The array is used + * in place and becomes part of the constructed object. It is not cloned. + * If no data is provided, the constructed object will be empty, but still + * valid. + * @extends {jspb.Message} + * @constructor + */ +proto.polykey.v1.secrets.Stat = function(opt_data) { + jspb.Message.initialize(this, opt_data, 0, -1, null, null); +}; +goog.inherits(proto.polykey.v1.secrets.Stat, jspb.Message); +if (goog.DEBUG && !COMPILED) { + /** + * @public + * @override + */ + proto.polykey.v1.secrets.Stat.displayName = 'proto.polykey.v1.secrets.Stat'; +} @@ -679,4 +701,134 @@ proto.polykey.v1.secrets.Directory.prototype.setSecretDirectory = function(value }; + + + +if (jspb.Message.GENERATE_TO_OBJECT) { +/** + * Creates an object representation of this proto. + * Field names that are reserved in JavaScript and will be renamed to pb_name. + * Optional fields that are not set will be set to undefined. + * To access a reserved field use, foo.pb_, eg, foo.pb_default. + * For the list of reserved names please see: + * net/proto2/compiler/js/internal/generator.cc#kKeyword. + * @param {boolean=} opt_includeInstance Deprecated. whether to include the + * JSPB instance for transitional soy proto support: + * http://goto/soy-param-migration + * @return {!Object} + */ +proto.polykey.v1.secrets.Stat.prototype.toObject = function(opt_includeInstance) { + return proto.polykey.v1.secrets.Stat.toObject(opt_includeInstance, this); +}; + + +/** + * Static version of the {@see toObject} method. + * @param {boolean|undefined} includeInstance Deprecated. Whether to include + * the JSPB instance for transitional soy proto support: + * http://goto/soy-param-migration + * @param {!proto.polykey.v1.secrets.Stat} msg The msg instance to transform. + * @return {!Object} + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.polykey.v1.secrets.Stat.toObject = function(includeInstance, msg) { + var f, obj = { + json: jspb.Message.getFieldWithDefault(msg, 1, "") + }; + + if (includeInstance) { + obj.$jspbMessageInstance = msg; + } + return obj; +}; +} + + +/** + * Deserializes binary data (in protobuf wire format). + * @param {jspb.ByteSource} bytes The bytes to deserialize. + * @return {!proto.polykey.v1.secrets.Stat} + */ +proto.polykey.v1.secrets.Stat.deserializeBinary = function(bytes) { + var reader = new jspb.BinaryReader(bytes); + var msg = new proto.polykey.v1.secrets.Stat; + return proto.polykey.v1.secrets.Stat.deserializeBinaryFromReader(msg, reader); +}; + + +/** + * Deserializes binary data (in protobuf wire format) from the + * given reader into the given message object. + * @param {!proto.polykey.v1.secrets.Stat} msg The message object to deserialize into. + * @param {!jspb.BinaryReader} reader The BinaryReader to use. + * @return {!proto.polykey.v1.secrets.Stat} + */ +proto.polykey.v1.secrets.Stat.deserializeBinaryFromReader = function(msg, reader) { + while (reader.nextField()) { + if (reader.isEndGroup()) { + break; + } + var field = reader.getFieldNumber(); + switch (field) { + case 1: + var value = /** @type {string} */ (reader.readString()); + msg.setJson(value); + break; + default: + reader.skipField(); + break; + } + } + return msg; +}; + + +/** + * Serializes the message to binary data (in protobuf wire format). + * @return {!Uint8Array} + */ +proto.polykey.v1.secrets.Stat.prototype.serializeBinary = function() { + var writer = new jspb.BinaryWriter(); + proto.polykey.v1.secrets.Stat.serializeBinaryToWriter(this, writer); + return writer.getResultBuffer(); +}; + + +/** + * Serializes the given message to binary data (in protobuf wire + * format), writing to the given BinaryWriter. + * @param {!proto.polykey.v1.secrets.Stat} message + * @param {!jspb.BinaryWriter} writer + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.polykey.v1.secrets.Stat.serializeBinaryToWriter = function(message, writer) { + var f = undefined; + f = message.getJson(); + if (f.length > 0) { + writer.writeString( + 1, + f + ); + } +}; + + +/** + * optional string json = 1; + * @return {string} + */ +proto.polykey.v1.secrets.Stat.prototype.getJson = function() { + return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 1, "")); +}; + + +/** + * @param {string} value + * @return {!proto.polykey.v1.secrets.Stat} returns this + */ +proto.polykey.v1.secrets.Stat.prototype.setJson = function(value) { + return jspb.Message.setProto3StringField(this, 1, value); +}; + + goog.object.extend(exports, proto.polykey.v1.secrets); diff --git a/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts b/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts index 072887bfe..63e148525 100644 --- a/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts +++ b/src/proto/js/polykey/v1/vaults/vaults_pb.d.ts @@ -6,6 +6,7 @@ import * as jspb from "google-protobuf"; import * as polykey_v1_nodes_nodes_pb from "../../../polykey/v1/nodes/nodes_pb"; +import * as google_protobuf_timestamp_pb from "google-protobuf/google/protobuf/timestamp_pb"; export class Vault extends jspb.Message { getNameOrId(): string; @@ -32,6 +33,10 @@ export class List extends jspb.Message { setVaultName(value: string): List; getVaultId(): string; setVaultId(value: string): List; + clearVaultPermissionsList(): void; + getVaultPermissionsList(): Array; + setVaultPermissionsList(value: Array): List; + addVaultPermissions(value: string, index?: number): string; serializeBinary(): Uint8Array; toObject(includeInstance?: boolean): List.AsObject; @@ -47,6 +52,7 @@ export namespace List { export type AsObject = { vaultName: string, vaultId: string, + vaultPermissionsList: Array, } } @@ -117,6 +123,11 @@ export class Pull extends jspb.Message { getNode(): polykey_v1_nodes_nodes_pb.Node | undefined; setNode(value?: polykey_v1_nodes_nodes_pb.Node): Pull; + hasPullVault(): boolean; + clearPullVault(): void; + getPullVault(): Vault | undefined; + setPullVault(value?: Vault): Pull; + serializeBinary(): Uint8Array; toObject(includeInstance?: boolean): Pull.AsObject; static toObject(includeInstance: boolean, msg: Pull): Pull.AsObject; @@ -131,6 +142,7 @@ export namespace Pull { export type AsObject = { vault?: Vault.AsObject, node?: polykey_v1_nodes_nodes_pb.Node.AsObject, + pullVault?: Vault.AsObject, } } @@ -183,113 +195,37 @@ export namespace Stat { } } -export class PermSet extends jspb.Message { +export class Permissions extends jspb.Message { hasVault(): boolean; clearVault(): void; getVault(): Vault | undefined; - setVault(value?: Vault): PermSet; + setVault(value?: Vault): Permissions; hasNode(): boolean; clearNode(): void; getNode(): polykey_v1_nodes_nodes_pb.Node | undefined; - setNode(value?: polykey_v1_nodes_nodes_pb.Node): PermSet; + setNode(value?: polykey_v1_nodes_nodes_pb.Node): Permissions; + clearVaultPermissionsList(): void; + getVaultPermissionsList(): Array; + setVaultPermissionsList(value: Array): Permissions; + addVaultPermissions(value: string, index?: number): string; serializeBinary(): Uint8Array; - toObject(includeInstance?: boolean): PermSet.AsObject; - static toObject(includeInstance: boolean, msg: PermSet): PermSet.AsObject; + toObject(includeInstance?: boolean): Permissions.AsObject; + static toObject(includeInstance: boolean, msg: Permissions): Permissions.AsObject; static extensions: {[key: number]: jspb.ExtensionFieldInfo}; static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo}; - static serializeBinaryToWriter(message: PermSet, writer: jspb.BinaryWriter): void; - static deserializeBinary(bytes: Uint8Array): PermSet; - static deserializeBinaryFromReader(message: PermSet, reader: jspb.BinaryReader): PermSet; + static serializeBinaryToWriter(message: Permissions, writer: jspb.BinaryWriter): void; + static deserializeBinary(bytes: Uint8Array): Permissions; + static deserializeBinaryFromReader(message: Permissions, reader: jspb.BinaryReader): Permissions; } -export namespace PermSet { +export namespace Permissions { export type AsObject = { vault?: Vault.AsObject, node?: polykey_v1_nodes_nodes_pb.Node.AsObject, - } -} - -export class PermUnset extends jspb.Message { - - hasVault(): boolean; - clearVault(): void; - getVault(): Vault | undefined; - setVault(value?: Vault): PermUnset; - - hasNode(): boolean; - clearNode(): void; - getNode(): polykey_v1_nodes_nodes_pb.Node | undefined; - setNode(value?: polykey_v1_nodes_nodes_pb.Node): PermUnset; - - serializeBinary(): Uint8Array; - toObject(includeInstance?: boolean): PermUnset.AsObject; - static toObject(includeInstance: boolean, msg: PermUnset): PermUnset.AsObject; - static extensions: {[key: number]: jspb.ExtensionFieldInfo}; - static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo}; - static serializeBinaryToWriter(message: PermUnset, writer: jspb.BinaryWriter): void; - static deserializeBinary(bytes: Uint8Array): PermUnset; - static deserializeBinaryFromReader(message: PermUnset, reader: jspb.BinaryReader): PermUnset; -} - -export namespace PermUnset { - export type AsObject = { - vault?: Vault.AsObject, - node?: polykey_v1_nodes_nodes_pb.Node.AsObject, - } -} - -export class PermGet extends jspb.Message { - - hasVault(): boolean; - clearVault(): void; - getVault(): Vault | undefined; - setVault(value?: Vault): PermGet; - - hasNode(): boolean; - clearNode(): void; - getNode(): polykey_v1_nodes_nodes_pb.Node | undefined; - setNode(value?: polykey_v1_nodes_nodes_pb.Node): PermGet; - - serializeBinary(): Uint8Array; - toObject(includeInstance?: boolean): PermGet.AsObject; - static toObject(includeInstance: boolean, msg: PermGet): PermGet.AsObject; - static extensions: {[key: number]: jspb.ExtensionFieldInfo}; - static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo}; - static serializeBinaryToWriter(message: PermGet, writer: jspb.BinaryWriter): void; - static deserializeBinary(bytes: Uint8Array): PermGet; - static deserializeBinaryFromReader(message: PermGet, reader: jspb.BinaryReader): PermGet; -} - -export namespace PermGet { - export type AsObject = { - vault?: Vault.AsObject, - node?: polykey_v1_nodes_nodes_pb.Node.AsObject, - } -} - -export class Permission extends jspb.Message { - getNodeId(): string; - setNodeId(value: string): Permission; - getAction(): string; - setAction(value: string): Permission; - - serializeBinary(): Uint8Array; - toObject(includeInstance?: boolean): Permission.AsObject; - static toObject(includeInstance: boolean, msg: Permission): Permission.AsObject; - static extensions: {[key: number]: jspb.ExtensionFieldInfo}; - static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo}; - static serializeBinaryToWriter(message: Permission, writer: jspb.BinaryWriter): void; - static deserializeBinary(bytes: Uint8Array): Permission; - static deserializeBinaryFromReader(message: Permission, reader: jspb.BinaryReader): Permission; -} - -export namespace Permission { - export type AsObject = { - nodeId: string, - action: string, + vaultPermissionsList: Array, } } @@ -373,8 +309,11 @@ export class LogEntry extends jspb.Message { setOid(value: string): LogEntry; getCommitter(): string; setCommitter(value: string): LogEntry; - getTimeStamp(): number; - setTimeStamp(value: number): LogEntry; + + hasTimeStamp(): boolean; + clearTimeStamp(): void; + getTimeStamp(): google_protobuf_timestamp_pb.Timestamp | undefined; + setTimeStamp(value?: google_protobuf_timestamp_pb.Timestamp): LogEntry; getMessage(): string; setMessage(value: string): LogEntry; @@ -392,11 +331,37 @@ export namespace LogEntry { export type AsObject = { oid: string, committer: string, - timeStamp: number, + timeStamp?: google_protobuf_timestamp_pb.Timestamp.AsObject, message: string, } } +export class InfoRequest extends jspb.Message { + + hasVault(): boolean; + clearVault(): void; + getVault(): Vault | undefined; + setVault(value?: Vault): InfoRequest; + getAction(): string; + setAction(value: string): InfoRequest; + + serializeBinary(): Uint8Array; + toObject(includeInstance?: boolean): InfoRequest.AsObject; + static toObject(includeInstance: boolean, msg: InfoRequest): InfoRequest.AsObject; + static extensions: {[key: number]: jspb.ExtensionFieldInfo}; + static extensionsBinary: {[key: number]: jspb.ExtensionFieldBinaryInfo}; + static serializeBinaryToWriter(message: InfoRequest, writer: jspb.BinaryWriter): void; + static deserializeBinary(bytes: Uint8Array): InfoRequest; + static deserializeBinaryFromReader(message: InfoRequest, reader: jspb.BinaryReader): InfoRequest; +} + +export namespace InfoRequest { + export type AsObject = { + vault?: Vault.AsObject, + action: string, + } +} + export class PackChunk extends jspb.Message { getChunk(): Uint8Array | string; getChunk_asU8(): Uint8Array; diff --git a/src/proto/js/polykey/v1/vaults/vaults_pb.js b/src/proto/js/polykey/v1/vaults/vaults_pb.js index 6fbd3c4ac..6b793dc63 100644 --- a/src/proto/js/polykey/v1/vaults/vaults_pb.js +++ b/src/proto/js/polykey/v1/vaults/vaults_pb.js @@ -16,7 +16,10 @@ var global = Function('return this')(); var polykey_v1_nodes_nodes_pb = require('../../../polykey/v1/nodes/nodes_pb.js'); goog.object.extend(proto, polykey_v1_nodes_nodes_pb); +var google_protobuf_timestamp_pb = require('google-protobuf/google/protobuf/timestamp_pb.js'); +goog.object.extend(proto, google_protobuf_timestamp_pb); goog.exportSymbol('proto.polykey.v1.vaults.Clone', null, global); +goog.exportSymbol('proto.polykey.v1.vaults.InfoRequest', null, global); goog.exportSymbol('proto.polykey.v1.vaults.List', null, global); goog.exportSymbol('proto.polykey.v1.vaults.Log', null, global); goog.exportSymbol('proto.polykey.v1.vaults.LogEntry', null, global); @@ -25,10 +28,7 @@ goog.exportSymbol('proto.polykey.v1.vaults.NodePermission', null, global); goog.exportSymbol('proto.polykey.v1.vaults.NodePermissionAllowed', null, global); goog.exportSymbol('proto.polykey.v1.vaults.PackChunk', null, global); goog.exportSymbol('proto.polykey.v1.vaults.PackRequest', null, global); -goog.exportSymbol('proto.polykey.v1.vaults.PermGet', null, global); -goog.exportSymbol('proto.polykey.v1.vaults.PermSet', null, global); -goog.exportSymbol('proto.polykey.v1.vaults.PermUnset', null, global); -goog.exportSymbol('proto.polykey.v1.vaults.Permission', null, global); +goog.exportSymbol('proto.polykey.v1.vaults.Permissions', null, global); goog.exportSymbol('proto.polykey.v1.vaults.Pull', null, global); goog.exportSymbol('proto.polykey.v1.vaults.Rename', null, global); goog.exportSymbol('proto.polykey.v1.vaults.Stat', null, global); @@ -67,7 +67,7 @@ if (goog.DEBUG && !COMPILED) { * @constructor */ proto.polykey.v1.vaults.List = function(opt_data) { - jspb.Message.initialize(this, opt_data, 0, -1, null, null); + jspb.Message.initialize(this, opt_data, 0, -1, proto.polykey.v1.vaults.List.repeatedFields_, null); }; goog.inherits(proto.polykey.v1.vaults.List, jspb.Message); if (goog.DEBUG && !COMPILED) { @@ -192,58 +192,16 @@ if (goog.DEBUG && !COMPILED) { * @extends {jspb.Message} * @constructor */ -proto.polykey.v1.vaults.PermSet = function(opt_data) { - jspb.Message.initialize(this, opt_data, 0, -1, null, null); -}; -goog.inherits(proto.polykey.v1.vaults.PermSet, jspb.Message); -if (goog.DEBUG && !COMPILED) { - /** - * @public - * @override - */ - proto.polykey.v1.vaults.PermSet.displayName = 'proto.polykey.v1.vaults.PermSet'; -} -/** - * Generated by JsPbCodeGenerator. - * @param {Array=} opt_data Optional initial data array, typically from a - * server response, or constructed directly in Javascript. The array is used - * in place and becomes part of the constructed object. It is not cloned. - * If no data is provided, the constructed object will be empty, but still - * valid. - * @extends {jspb.Message} - * @constructor - */ -proto.polykey.v1.vaults.PermUnset = function(opt_data) { - jspb.Message.initialize(this, opt_data, 0, -1, null, null); -}; -goog.inherits(proto.polykey.v1.vaults.PermUnset, jspb.Message); -if (goog.DEBUG && !COMPILED) { - /** - * @public - * @override - */ - proto.polykey.v1.vaults.PermUnset.displayName = 'proto.polykey.v1.vaults.PermUnset'; -} -/** - * Generated by JsPbCodeGenerator. - * @param {Array=} opt_data Optional initial data array, typically from a - * server response, or constructed directly in Javascript. The array is used - * in place and becomes part of the constructed object. It is not cloned. - * If no data is provided, the constructed object will be empty, but still - * valid. - * @extends {jspb.Message} - * @constructor - */ -proto.polykey.v1.vaults.PermGet = function(opt_data) { - jspb.Message.initialize(this, opt_data, 0, -1, null, null); +proto.polykey.v1.vaults.Permissions = function(opt_data) { + jspb.Message.initialize(this, opt_data, 0, -1, proto.polykey.v1.vaults.Permissions.repeatedFields_, null); }; -goog.inherits(proto.polykey.v1.vaults.PermGet, jspb.Message); +goog.inherits(proto.polykey.v1.vaults.Permissions, jspb.Message); if (goog.DEBUG && !COMPILED) { /** * @public * @override */ - proto.polykey.v1.vaults.PermGet.displayName = 'proto.polykey.v1.vaults.PermGet'; + proto.polykey.v1.vaults.Permissions.displayName = 'proto.polykey.v1.vaults.Permissions'; } /** * Generated by JsPbCodeGenerator. @@ -255,16 +213,16 @@ if (goog.DEBUG && !COMPILED) { * @extends {jspb.Message} * @constructor */ -proto.polykey.v1.vaults.Permission = function(opt_data) { +proto.polykey.v1.vaults.Version = function(opt_data) { jspb.Message.initialize(this, opt_data, 0, -1, null, null); }; -goog.inherits(proto.polykey.v1.vaults.Permission, jspb.Message); +goog.inherits(proto.polykey.v1.vaults.Version, jspb.Message); if (goog.DEBUG && !COMPILED) { /** * @public * @override */ - proto.polykey.v1.vaults.Permission.displayName = 'proto.polykey.v1.vaults.Permission'; + proto.polykey.v1.vaults.Version.displayName = 'proto.polykey.v1.vaults.Version'; } /** * Generated by JsPbCodeGenerator. @@ -276,16 +234,16 @@ if (goog.DEBUG && !COMPILED) { * @extends {jspb.Message} * @constructor */ -proto.polykey.v1.vaults.Version = function(opt_data) { +proto.polykey.v1.vaults.VersionResult = function(opt_data) { jspb.Message.initialize(this, opt_data, 0, -1, null, null); }; -goog.inherits(proto.polykey.v1.vaults.Version, jspb.Message); +goog.inherits(proto.polykey.v1.vaults.VersionResult, jspb.Message); if (goog.DEBUG && !COMPILED) { /** * @public * @override */ - proto.polykey.v1.vaults.Version.displayName = 'proto.polykey.v1.vaults.Version'; + proto.polykey.v1.vaults.VersionResult.displayName = 'proto.polykey.v1.vaults.VersionResult'; } /** * Generated by JsPbCodeGenerator. @@ -297,16 +255,16 @@ if (goog.DEBUG && !COMPILED) { * @extends {jspb.Message} * @constructor */ -proto.polykey.v1.vaults.VersionResult = function(opt_data) { +proto.polykey.v1.vaults.Log = function(opt_data) { jspb.Message.initialize(this, opt_data, 0, -1, null, null); }; -goog.inherits(proto.polykey.v1.vaults.VersionResult, jspb.Message); +goog.inherits(proto.polykey.v1.vaults.Log, jspb.Message); if (goog.DEBUG && !COMPILED) { /** * @public * @override */ - proto.polykey.v1.vaults.VersionResult.displayName = 'proto.polykey.v1.vaults.VersionResult'; + proto.polykey.v1.vaults.Log.displayName = 'proto.polykey.v1.vaults.Log'; } /** * Generated by JsPbCodeGenerator. @@ -318,16 +276,16 @@ if (goog.DEBUG && !COMPILED) { * @extends {jspb.Message} * @constructor */ -proto.polykey.v1.vaults.Log = function(opt_data) { +proto.polykey.v1.vaults.LogEntry = function(opt_data) { jspb.Message.initialize(this, opt_data, 0, -1, null, null); }; -goog.inherits(proto.polykey.v1.vaults.Log, jspb.Message); +goog.inherits(proto.polykey.v1.vaults.LogEntry, jspb.Message); if (goog.DEBUG && !COMPILED) { /** * @public * @override */ - proto.polykey.v1.vaults.Log.displayName = 'proto.polykey.v1.vaults.Log'; + proto.polykey.v1.vaults.LogEntry.displayName = 'proto.polykey.v1.vaults.LogEntry'; } /** * Generated by JsPbCodeGenerator. @@ -339,16 +297,16 @@ if (goog.DEBUG && !COMPILED) { * @extends {jspb.Message} * @constructor */ -proto.polykey.v1.vaults.LogEntry = function(opt_data) { +proto.polykey.v1.vaults.InfoRequest = function(opt_data) { jspb.Message.initialize(this, opt_data, 0, -1, null, null); }; -goog.inherits(proto.polykey.v1.vaults.LogEntry, jspb.Message); +goog.inherits(proto.polykey.v1.vaults.InfoRequest, jspb.Message); if (goog.DEBUG && !COMPILED) { /** * @public * @override */ - proto.polykey.v1.vaults.LogEntry.displayName = 'proto.polykey.v1.vaults.LogEntry'; + proto.polykey.v1.vaults.InfoRequest.displayName = 'proto.polykey.v1.vaults.InfoRequest'; } /** * Generated by JsPbCodeGenerator. @@ -565,6 +523,13 @@ proto.polykey.v1.vaults.Vault.prototype.setNameOrId = function(value) { +/** + * List of repeated fields within this message type. + * @private {!Array} + * @const + */ +proto.polykey.v1.vaults.List.repeatedFields_ = [3]; + if (jspb.Message.GENERATE_TO_OBJECT) { @@ -597,7 +562,8 @@ proto.polykey.v1.vaults.List.prototype.toObject = function(opt_includeInstance) proto.polykey.v1.vaults.List.toObject = function(includeInstance, msg) { var f, obj = { vaultName: jspb.Message.getFieldWithDefault(msg, 1, ""), - vaultId: jspb.Message.getFieldWithDefault(msg, 2, "") + vaultId: jspb.Message.getFieldWithDefault(msg, 2, ""), + vaultPermissionsList: (f = jspb.Message.getRepeatedField(msg, 3)) == null ? undefined : f }; if (includeInstance) { @@ -642,6 +608,10 @@ proto.polykey.v1.vaults.List.deserializeBinaryFromReader = function(msg, reader) var value = /** @type {string} */ (reader.readString()); msg.setVaultId(value); break; + case 3: + var value = /** @type {string} */ (reader.readString()); + msg.addVaultPermissions(value); + break; default: reader.skipField(); break; @@ -685,6 +655,13 @@ proto.polykey.v1.vaults.List.serializeBinaryToWriter = function(message, writer) f ); } + f = message.getVaultPermissionsList(); + if (f.length > 0) { + writer.writeRepeatedString( + 3, + f + ); + } }; @@ -724,6 +701,43 @@ proto.polykey.v1.vaults.List.prototype.setVaultId = function(value) { }; +/** + * repeated string vault_permissions = 3; + * @return {!Array} + */ +proto.polykey.v1.vaults.List.prototype.getVaultPermissionsList = function() { + return /** @type {!Array} */ (jspb.Message.getRepeatedField(this, 3)); +}; + + +/** + * @param {!Array} value + * @return {!proto.polykey.v1.vaults.List} returns this + */ +proto.polykey.v1.vaults.List.prototype.setVaultPermissionsList = function(value) { + return jspb.Message.setField(this, 3, value || []); +}; + + +/** + * @param {string} value + * @param {number=} opt_index + * @return {!proto.polykey.v1.vaults.List} returns this + */ +proto.polykey.v1.vaults.List.prototype.addVaultPermissions = function(value, opt_index) { + return jspb.Message.addToRepeatedField(this, 3, value, opt_index); +}; + + +/** + * Clears the list making it empty but non-null. + * @return {!proto.polykey.v1.vaults.List} returns this + */ +proto.polykey.v1.vaults.List.prototype.clearVaultPermissionsList = function() { + return this.setVaultPermissionsList([]); +}; + + @@ -1149,7 +1163,8 @@ proto.polykey.v1.vaults.Pull.prototype.toObject = function(opt_includeInstance) proto.polykey.v1.vaults.Pull.toObject = function(includeInstance, msg) { var f, obj = { vault: (f = msg.getVault()) && proto.polykey.v1.vaults.Vault.toObject(includeInstance, f), - node: (f = msg.getNode()) && polykey_v1_nodes_nodes_pb.Node.toObject(includeInstance, f) + node: (f = msg.getNode()) && polykey_v1_nodes_nodes_pb.Node.toObject(includeInstance, f), + pullVault: (f = msg.getPullVault()) && proto.polykey.v1.vaults.Vault.toObject(includeInstance, f) }; if (includeInstance) { @@ -1196,6 +1211,11 @@ proto.polykey.v1.vaults.Pull.deserializeBinaryFromReader = function(msg, reader) reader.readMessage(value,polykey_v1_nodes_nodes_pb.Node.deserializeBinaryFromReader); msg.setNode(value); break; + case 3: + var value = new proto.polykey.v1.vaults.Vault; + reader.readMessage(value,proto.polykey.v1.vaults.Vault.deserializeBinaryFromReader); + msg.setPullVault(value); + break; default: reader.skipField(); break; @@ -1241,6 +1261,14 @@ proto.polykey.v1.vaults.Pull.serializeBinaryToWriter = function(message, writer) polykey_v1_nodes_nodes_pb.Node.serializeBinaryToWriter ); } + f = message.getPullVault(); + if (f != null) { + writer.writeMessage( + 3, + f, + proto.polykey.v1.vaults.Vault.serializeBinaryToWriter + ); + } }; @@ -1318,6 +1346,43 @@ proto.polykey.v1.vaults.Pull.prototype.hasNode = function() { }; +/** + * optional Vault pull_vault = 3; + * @return {?proto.polykey.v1.vaults.Vault} + */ +proto.polykey.v1.vaults.Pull.prototype.getPullVault = function() { + return /** @type{?proto.polykey.v1.vaults.Vault} */ ( + jspb.Message.getWrapperField(this, proto.polykey.v1.vaults.Vault, 3)); +}; + + +/** + * @param {?proto.polykey.v1.vaults.Vault|undefined} value + * @return {!proto.polykey.v1.vaults.Pull} returns this +*/ +proto.polykey.v1.vaults.Pull.prototype.setPullVault = function(value) { + return jspb.Message.setWrapperField(this, 3, value); +}; + + +/** + * Clears the message field making it undefined. + * @return {!proto.polykey.v1.vaults.Pull} returns this + */ +proto.polykey.v1.vaults.Pull.prototype.clearPullVault = function() { + return this.setPullVault(undefined); +}; + + +/** + * Returns whether this field is set. + * @return {boolean} + */ +proto.polykey.v1.vaults.Pull.prototype.hasPullVault = function() { + return jspb.Message.getField(this, 3) != null; +}; + + @@ -1651,6 +1716,13 @@ proto.polykey.v1.vaults.Stat.prototype.setStats = function(value) { +/** + * List of repeated fields within this message type. + * @private {!Array} + * @const + */ +proto.polykey.v1.vaults.Permissions.repeatedFields_ = [3]; + if (jspb.Message.GENERATE_TO_OBJECT) { @@ -1666,8 +1738,8 @@ if (jspb.Message.GENERATE_TO_OBJECT) { * http://goto/soy-param-migration * @return {!Object} */ -proto.polykey.v1.vaults.PermSet.prototype.toObject = function(opt_includeInstance) { - return proto.polykey.v1.vaults.PermSet.toObject(opt_includeInstance, this); +proto.polykey.v1.vaults.Permissions.prototype.toObject = function(opt_includeInstance) { + return proto.polykey.v1.vaults.Permissions.toObject(opt_includeInstance, this); }; @@ -1676,14 +1748,15 @@ proto.polykey.v1.vaults.PermSet.prototype.toObject = function(opt_includeInstanc * @param {boolean|undefined} includeInstance Deprecated. Whether to include * the JSPB instance for transitional soy proto support: * http://goto/soy-param-migration - * @param {!proto.polykey.v1.vaults.PermSet} msg The msg instance to transform. + * @param {!proto.polykey.v1.vaults.Permissions} msg The msg instance to transform. * @return {!Object} * @suppress {unusedLocalVariables} f is only used for nested messages */ -proto.polykey.v1.vaults.PermSet.toObject = function(includeInstance, msg) { +proto.polykey.v1.vaults.Permissions.toObject = function(includeInstance, msg) { var f, obj = { vault: (f = msg.getVault()) && proto.polykey.v1.vaults.Vault.toObject(includeInstance, f), - node: (f = msg.getNode()) && polykey_v1_nodes_nodes_pb.Node.toObject(includeInstance, f) + node: (f = msg.getNode()) && polykey_v1_nodes_nodes_pb.Node.toObject(includeInstance, f), + vaultPermissionsList: (f = jspb.Message.getRepeatedField(msg, 3)) == null ? undefined : f }; if (includeInstance) { @@ -1697,23 +1770,23 @@ proto.polykey.v1.vaults.PermSet.toObject = function(includeInstance, msg) { /** * Deserializes binary data (in protobuf wire format). * @param {jspb.ByteSource} bytes The bytes to deserialize. - * @return {!proto.polykey.v1.vaults.PermSet} + * @return {!proto.polykey.v1.vaults.Permissions} */ -proto.polykey.v1.vaults.PermSet.deserializeBinary = function(bytes) { +proto.polykey.v1.vaults.Permissions.deserializeBinary = function(bytes) { var reader = new jspb.BinaryReader(bytes); - var msg = new proto.polykey.v1.vaults.PermSet; - return proto.polykey.v1.vaults.PermSet.deserializeBinaryFromReader(msg, reader); + var msg = new proto.polykey.v1.vaults.Permissions; + return proto.polykey.v1.vaults.Permissions.deserializeBinaryFromReader(msg, reader); }; /** * Deserializes binary data (in protobuf wire format) from the * given reader into the given message object. - * @param {!proto.polykey.v1.vaults.PermSet} msg The message object to deserialize into. + * @param {!proto.polykey.v1.vaults.Permissions} msg The message object to deserialize into. * @param {!jspb.BinaryReader} reader The BinaryReader to use. - * @return {!proto.polykey.v1.vaults.PermSet} + * @return {!proto.polykey.v1.vaults.Permissions} */ -proto.polykey.v1.vaults.PermSet.deserializeBinaryFromReader = function(msg, reader) { +proto.polykey.v1.vaults.Permissions.deserializeBinaryFromReader = function(msg, reader) { while (reader.nextField()) { if (reader.isEndGroup()) { break; @@ -1730,6 +1803,10 @@ proto.polykey.v1.vaults.PermSet.deserializeBinaryFromReader = function(msg, read reader.readMessage(value,polykey_v1_nodes_nodes_pb.Node.deserializeBinaryFromReader); msg.setNode(value); break; + case 3: + var value = /** @type {string} */ (reader.readString()); + msg.addVaultPermissions(value); + break; default: reader.skipField(); break; @@ -1743,9 +1820,9 @@ proto.polykey.v1.vaults.PermSet.deserializeBinaryFromReader = function(msg, read * Serializes the message to binary data (in protobuf wire format). * @return {!Uint8Array} */ -proto.polykey.v1.vaults.PermSet.prototype.serializeBinary = function() { +proto.polykey.v1.vaults.Permissions.prototype.serializeBinary = function() { var writer = new jspb.BinaryWriter(); - proto.polykey.v1.vaults.PermSet.serializeBinaryToWriter(this, writer); + proto.polykey.v1.vaults.Permissions.serializeBinaryToWriter(this, writer); return writer.getResultBuffer(); }; @@ -1753,11 +1830,11 @@ proto.polykey.v1.vaults.PermSet.prototype.serializeBinary = function() { /** * Serializes the given message to binary data (in protobuf wire * format), writing to the given BinaryWriter. - * @param {!proto.polykey.v1.vaults.PermSet} message + * @param {!proto.polykey.v1.vaults.Permissions} message * @param {!jspb.BinaryWriter} writer * @suppress {unusedLocalVariables} f is only used for nested messages */ -proto.polykey.v1.vaults.PermSet.serializeBinaryToWriter = function(message, writer) { +proto.polykey.v1.vaults.Permissions.serializeBinaryToWriter = function(message, writer) { var f = undefined; f = message.getVault(); if (f != null) { @@ -1775,6 +1852,13 @@ proto.polykey.v1.vaults.PermSet.serializeBinaryToWriter = function(message, writ polykey_v1_nodes_nodes_pb.Node.serializeBinaryToWriter ); } + f = message.getVaultPermissionsList(); + if (f.length > 0) { + writer.writeRepeatedString( + 3, + f + ); + } }; @@ -1782,7 +1866,7 @@ proto.polykey.v1.vaults.PermSet.serializeBinaryToWriter = function(message, writ * optional Vault vault = 1; * @return {?proto.polykey.v1.vaults.Vault} */ -proto.polykey.v1.vaults.PermSet.prototype.getVault = function() { +proto.polykey.v1.vaults.Permissions.prototype.getVault = function() { return /** @type{?proto.polykey.v1.vaults.Vault} */ ( jspb.Message.getWrapperField(this, proto.polykey.v1.vaults.Vault, 1)); }; @@ -1790,18 +1874,18 @@ proto.polykey.v1.vaults.PermSet.prototype.getVault = function() { /** * @param {?proto.polykey.v1.vaults.Vault|undefined} value - * @return {!proto.polykey.v1.vaults.PermSet} returns this + * @return {!proto.polykey.v1.vaults.Permissions} returns this */ -proto.polykey.v1.vaults.PermSet.prototype.setVault = function(value) { +proto.polykey.v1.vaults.Permissions.prototype.setVault = function(value) { return jspb.Message.setWrapperField(this, 1, value); }; /** * Clears the message field making it undefined. - * @return {!proto.polykey.v1.vaults.PermSet} returns this + * @return {!proto.polykey.v1.vaults.Permissions} returns this */ -proto.polykey.v1.vaults.PermSet.prototype.clearVault = function() { +proto.polykey.v1.vaults.Permissions.prototype.clearVault = function() { return this.setVault(undefined); }; @@ -1810,7 +1894,7 @@ proto.polykey.v1.vaults.PermSet.prototype.clearVault = function() { * Returns whether this field is set. * @return {boolean} */ -proto.polykey.v1.vaults.PermSet.prototype.hasVault = function() { +proto.polykey.v1.vaults.Permissions.prototype.hasVault = function() { return jspb.Message.getField(this, 1) != null; }; @@ -1819,7 +1903,7 @@ proto.polykey.v1.vaults.PermSet.prototype.hasVault = function() { * optional polykey.v1.nodes.Node node = 2; * @return {?proto.polykey.v1.nodes.Node} */ -proto.polykey.v1.vaults.PermSet.prototype.getNode = function() { +proto.polykey.v1.vaults.Permissions.prototype.getNode = function() { return /** @type{?proto.polykey.v1.nodes.Node} */ ( jspb.Message.getWrapperField(this, polykey_v1_nodes_nodes_pb.Node, 2)); }; @@ -1827,18 +1911,18 @@ proto.polykey.v1.vaults.PermSet.prototype.getNode = function() { /** * @param {?proto.polykey.v1.nodes.Node|undefined} value - * @return {!proto.polykey.v1.vaults.PermSet} returns this + * @return {!proto.polykey.v1.vaults.Permissions} returns this */ -proto.polykey.v1.vaults.PermSet.prototype.setNode = function(value) { +proto.polykey.v1.vaults.Permissions.prototype.setNode = function(value) { return jspb.Message.setWrapperField(this, 2, value); }; /** * Clears the message field making it undefined. - * @return {!proto.polykey.v1.vaults.PermSet} returns this + * @return {!proto.polykey.v1.vaults.Permissions} returns this */ -proto.polykey.v1.vaults.PermSet.prototype.clearNode = function() { +proto.polykey.v1.vaults.Permissions.prototype.clearNode = function() { return this.setNode(undefined); }; @@ -1847,11 +1931,48 @@ proto.polykey.v1.vaults.PermSet.prototype.clearNode = function() { * Returns whether this field is set. * @return {boolean} */ -proto.polykey.v1.vaults.PermSet.prototype.hasNode = function() { +proto.polykey.v1.vaults.Permissions.prototype.hasNode = function() { return jspb.Message.getField(this, 2) != null; }; +/** + * repeated string vault_permissions = 3; + * @return {!Array} + */ +proto.polykey.v1.vaults.Permissions.prototype.getVaultPermissionsList = function() { + return /** @type {!Array} */ (jspb.Message.getRepeatedField(this, 3)); +}; + + +/** + * @param {!Array} value + * @return {!proto.polykey.v1.vaults.Permissions} returns this + */ +proto.polykey.v1.vaults.Permissions.prototype.setVaultPermissionsList = function(value) { + return jspb.Message.setField(this, 3, value || []); +}; + + +/** + * @param {string} value + * @param {number=} opt_index + * @return {!proto.polykey.v1.vaults.Permissions} returns this + */ +proto.polykey.v1.vaults.Permissions.prototype.addVaultPermissions = function(value, opt_index) { + return jspb.Message.addToRepeatedField(this, 3, value, opt_index); +}; + + +/** + * Clears the list making it empty but non-null. + * @return {!proto.polykey.v1.vaults.Permissions} returns this + */ +proto.polykey.v1.vaults.Permissions.prototype.clearVaultPermissionsList = function() { + return this.setVaultPermissionsList([]); +}; + + @@ -1868,8 +1989,8 @@ if (jspb.Message.GENERATE_TO_OBJECT) { * http://goto/soy-param-migration * @return {!Object} */ -proto.polykey.v1.vaults.PermUnset.prototype.toObject = function(opt_includeInstance) { - return proto.polykey.v1.vaults.PermUnset.toObject(opt_includeInstance, this); +proto.polykey.v1.vaults.Version.prototype.toObject = function(opt_includeInstance) { + return proto.polykey.v1.vaults.Version.toObject(opt_includeInstance, this); }; @@ -1878,14 +1999,14 @@ proto.polykey.v1.vaults.PermUnset.prototype.toObject = function(opt_includeInsta * @param {boolean|undefined} includeInstance Deprecated. Whether to include * the JSPB instance for transitional soy proto support: * http://goto/soy-param-migration - * @param {!proto.polykey.v1.vaults.PermUnset} msg The msg instance to transform. + * @param {!proto.polykey.v1.vaults.Version} msg The msg instance to transform. * @return {!Object} * @suppress {unusedLocalVariables} f is only used for nested messages */ -proto.polykey.v1.vaults.PermUnset.toObject = function(includeInstance, msg) { +proto.polykey.v1.vaults.Version.toObject = function(includeInstance, msg) { var f, obj = { vault: (f = msg.getVault()) && proto.polykey.v1.vaults.Vault.toObject(includeInstance, f), - node: (f = msg.getNode()) && polykey_v1_nodes_nodes_pb.Node.toObject(includeInstance, f) + versionId: jspb.Message.getFieldWithDefault(msg, 2, "") }; if (includeInstance) { @@ -1899,23 +2020,23 @@ proto.polykey.v1.vaults.PermUnset.toObject = function(includeInstance, msg) { /** * Deserializes binary data (in protobuf wire format). * @param {jspb.ByteSource} bytes The bytes to deserialize. - * @return {!proto.polykey.v1.vaults.PermUnset} + * @return {!proto.polykey.v1.vaults.Version} */ -proto.polykey.v1.vaults.PermUnset.deserializeBinary = function(bytes) { +proto.polykey.v1.vaults.Version.deserializeBinary = function(bytes) { var reader = new jspb.BinaryReader(bytes); - var msg = new proto.polykey.v1.vaults.PermUnset; - return proto.polykey.v1.vaults.PermUnset.deserializeBinaryFromReader(msg, reader); + var msg = new proto.polykey.v1.vaults.Version; + return proto.polykey.v1.vaults.Version.deserializeBinaryFromReader(msg, reader); }; /** * Deserializes binary data (in protobuf wire format) from the * given reader into the given message object. - * @param {!proto.polykey.v1.vaults.PermUnset} msg The message object to deserialize into. + * @param {!proto.polykey.v1.vaults.Version} msg The message object to deserialize into. * @param {!jspb.BinaryReader} reader The BinaryReader to use. - * @return {!proto.polykey.v1.vaults.PermUnset} + * @return {!proto.polykey.v1.vaults.Version} */ -proto.polykey.v1.vaults.PermUnset.deserializeBinaryFromReader = function(msg, reader) { +proto.polykey.v1.vaults.Version.deserializeBinaryFromReader = function(msg, reader) { while (reader.nextField()) { if (reader.isEndGroup()) { break; @@ -1928,9 +2049,8 @@ proto.polykey.v1.vaults.PermUnset.deserializeBinaryFromReader = function(msg, re msg.setVault(value); break; case 2: - var value = new polykey_v1_nodes_nodes_pb.Node; - reader.readMessage(value,polykey_v1_nodes_nodes_pb.Node.deserializeBinaryFromReader); - msg.setNode(value); + var value = /** @type {string} */ (reader.readString()); + msg.setVersionId(value); break; default: reader.skipField(); @@ -1945,9 +2065,9 @@ proto.polykey.v1.vaults.PermUnset.deserializeBinaryFromReader = function(msg, re * Serializes the message to binary data (in protobuf wire format). * @return {!Uint8Array} */ -proto.polykey.v1.vaults.PermUnset.prototype.serializeBinary = function() { +proto.polykey.v1.vaults.Version.prototype.serializeBinary = function() { var writer = new jspb.BinaryWriter(); - proto.polykey.v1.vaults.PermUnset.serializeBinaryToWriter(this, writer); + proto.polykey.v1.vaults.Version.serializeBinaryToWriter(this, writer); return writer.getResultBuffer(); }; @@ -1955,11 +2075,11 @@ proto.polykey.v1.vaults.PermUnset.prototype.serializeBinary = function() { /** * Serializes the given message to binary data (in protobuf wire * format), writing to the given BinaryWriter. - * @param {!proto.polykey.v1.vaults.PermUnset} message + * @param {!proto.polykey.v1.vaults.Version} message * @param {!jspb.BinaryWriter} writer * @suppress {unusedLocalVariables} f is only used for nested messages */ -proto.polykey.v1.vaults.PermUnset.serializeBinaryToWriter = function(message, writer) { +proto.polykey.v1.vaults.Version.serializeBinaryToWriter = function(message, writer) { var f = undefined; f = message.getVault(); if (f != null) { @@ -1969,12 +2089,11 @@ proto.polykey.v1.vaults.PermUnset.serializeBinaryToWriter = function(message, wr proto.polykey.v1.vaults.Vault.serializeBinaryToWriter ); } - f = message.getNode(); - if (f != null) { - writer.writeMessage( + f = message.getVersionId(); + if (f.length > 0) { + writer.writeString( 2, - f, - polykey_v1_nodes_nodes_pb.Node.serializeBinaryToWriter + f ); } }; @@ -1984,7 +2103,7 @@ proto.polykey.v1.vaults.PermUnset.serializeBinaryToWriter = function(message, wr * optional Vault vault = 1; * @return {?proto.polykey.v1.vaults.Vault} */ -proto.polykey.v1.vaults.PermUnset.prototype.getVault = function() { +proto.polykey.v1.vaults.Version.prototype.getVault = function() { return /** @type{?proto.polykey.v1.vaults.Vault} */ ( jspb.Message.getWrapperField(this, proto.polykey.v1.vaults.Vault, 1)); }; @@ -1992,18 +2111,18 @@ proto.polykey.v1.vaults.PermUnset.prototype.getVault = function() { /** * @param {?proto.polykey.v1.vaults.Vault|undefined} value - * @return {!proto.polykey.v1.vaults.PermUnset} returns this + * @return {!proto.polykey.v1.vaults.Version} returns this */ -proto.polykey.v1.vaults.PermUnset.prototype.setVault = function(value) { +proto.polykey.v1.vaults.Version.prototype.setVault = function(value) { return jspb.Message.setWrapperField(this, 1, value); }; /** * Clears the message field making it undefined. - * @return {!proto.polykey.v1.vaults.PermUnset} returns this + * @return {!proto.polykey.v1.vaults.Version} returns this */ -proto.polykey.v1.vaults.PermUnset.prototype.clearVault = function() { +proto.polykey.v1.vaults.Version.prototype.clearVault = function() { return this.setVault(undefined); }; @@ -2012,45 +2131,26 @@ proto.polykey.v1.vaults.PermUnset.prototype.clearVault = function() { * Returns whether this field is set. * @return {boolean} */ -proto.polykey.v1.vaults.PermUnset.prototype.hasVault = function() { +proto.polykey.v1.vaults.Version.prototype.hasVault = function() { return jspb.Message.getField(this, 1) != null; }; /** - * optional polykey.v1.nodes.Node node = 2; - * @return {?proto.polykey.v1.nodes.Node} - */ -proto.polykey.v1.vaults.PermUnset.prototype.getNode = function() { - return /** @type{?proto.polykey.v1.nodes.Node} */ ( - jspb.Message.getWrapperField(this, polykey_v1_nodes_nodes_pb.Node, 2)); -}; - - -/** - * @param {?proto.polykey.v1.nodes.Node|undefined} value - * @return {!proto.polykey.v1.vaults.PermUnset} returns this -*/ -proto.polykey.v1.vaults.PermUnset.prototype.setNode = function(value) { - return jspb.Message.setWrapperField(this, 2, value); -}; - - -/** - * Clears the message field making it undefined. - * @return {!proto.polykey.v1.vaults.PermUnset} returns this + * optional string version_id = 2; + * @return {string} */ -proto.polykey.v1.vaults.PermUnset.prototype.clearNode = function() { - return this.setNode(undefined); +proto.polykey.v1.vaults.Version.prototype.getVersionId = function() { + return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 2, "")); }; /** - * Returns whether this field is set. - * @return {boolean} + * @param {string} value + * @return {!proto.polykey.v1.vaults.Version} returns this */ -proto.polykey.v1.vaults.PermUnset.prototype.hasNode = function() { - return jspb.Message.getField(this, 2) != null; +proto.polykey.v1.vaults.Version.prototype.setVersionId = function(value) { + return jspb.Message.setProto3StringField(this, 2, value); }; @@ -2070,8 +2170,8 @@ if (jspb.Message.GENERATE_TO_OBJECT) { * http://goto/soy-param-migration * @return {!Object} */ -proto.polykey.v1.vaults.PermGet.prototype.toObject = function(opt_includeInstance) { - return proto.polykey.v1.vaults.PermGet.toObject(opt_includeInstance, this); +proto.polykey.v1.vaults.VersionResult.prototype.toObject = function(opt_includeInstance) { + return proto.polykey.v1.vaults.VersionResult.toObject(opt_includeInstance, this); }; @@ -2080,14 +2180,13 @@ proto.polykey.v1.vaults.PermGet.prototype.toObject = function(opt_includeInstanc * @param {boolean|undefined} includeInstance Deprecated. Whether to include * the JSPB instance for transitional soy proto support: * http://goto/soy-param-migration - * @param {!proto.polykey.v1.vaults.PermGet} msg The msg instance to transform. + * @param {!proto.polykey.v1.vaults.VersionResult} msg The msg instance to transform. * @return {!Object} * @suppress {unusedLocalVariables} f is only used for nested messages */ -proto.polykey.v1.vaults.PermGet.toObject = function(includeInstance, msg) { +proto.polykey.v1.vaults.VersionResult.toObject = function(includeInstance, msg) { var f, obj = { - vault: (f = msg.getVault()) && proto.polykey.v1.vaults.Vault.toObject(includeInstance, f), - node: (f = msg.getNode()) && polykey_v1_nodes_nodes_pb.Node.toObject(includeInstance, f) + isLatestVersion: jspb.Message.getBooleanFieldWithDefault(msg, 1, false) }; if (includeInstance) { @@ -2101,23 +2200,23 @@ proto.polykey.v1.vaults.PermGet.toObject = function(includeInstance, msg) { /** * Deserializes binary data (in protobuf wire format). * @param {jspb.ByteSource} bytes The bytes to deserialize. - * @return {!proto.polykey.v1.vaults.PermGet} + * @return {!proto.polykey.v1.vaults.VersionResult} */ -proto.polykey.v1.vaults.PermGet.deserializeBinary = function(bytes) { +proto.polykey.v1.vaults.VersionResult.deserializeBinary = function(bytes) { var reader = new jspb.BinaryReader(bytes); - var msg = new proto.polykey.v1.vaults.PermGet; - return proto.polykey.v1.vaults.PermGet.deserializeBinaryFromReader(msg, reader); + var msg = new proto.polykey.v1.vaults.VersionResult; + return proto.polykey.v1.vaults.VersionResult.deserializeBinaryFromReader(msg, reader); }; /** * Deserializes binary data (in protobuf wire format) from the * given reader into the given message object. - * @param {!proto.polykey.v1.vaults.PermGet} msg The message object to deserialize into. + * @param {!proto.polykey.v1.vaults.VersionResult} msg The message object to deserialize into. * @param {!jspb.BinaryReader} reader The BinaryReader to use. - * @return {!proto.polykey.v1.vaults.PermGet} + * @return {!proto.polykey.v1.vaults.VersionResult} */ -proto.polykey.v1.vaults.PermGet.deserializeBinaryFromReader = function(msg, reader) { +proto.polykey.v1.vaults.VersionResult.deserializeBinaryFromReader = function(msg, reader) { while (reader.nextField()) { if (reader.isEndGroup()) { break; @@ -2125,550 +2224,8 @@ proto.polykey.v1.vaults.PermGet.deserializeBinaryFromReader = function(msg, read var field = reader.getFieldNumber(); switch (field) { case 1: - var value = new proto.polykey.v1.vaults.Vault; - reader.readMessage(value,proto.polykey.v1.vaults.Vault.deserializeBinaryFromReader); - msg.setVault(value); - break; - case 2: - var value = new polykey_v1_nodes_nodes_pb.Node; - reader.readMessage(value,polykey_v1_nodes_nodes_pb.Node.deserializeBinaryFromReader); - msg.setNode(value); - break; - default: - reader.skipField(); - break; - } - } - return msg; -}; - - -/** - * Serializes the message to binary data (in protobuf wire format). - * @return {!Uint8Array} - */ -proto.polykey.v1.vaults.PermGet.prototype.serializeBinary = function() { - var writer = new jspb.BinaryWriter(); - proto.polykey.v1.vaults.PermGet.serializeBinaryToWriter(this, writer); - return writer.getResultBuffer(); -}; - - -/** - * Serializes the given message to binary data (in protobuf wire - * format), writing to the given BinaryWriter. - * @param {!proto.polykey.v1.vaults.PermGet} message - * @param {!jspb.BinaryWriter} writer - * @suppress {unusedLocalVariables} f is only used for nested messages - */ -proto.polykey.v1.vaults.PermGet.serializeBinaryToWriter = function(message, writer) { - var f = undefined; - f = message.getVault(); - if (f != null) { - writer.writeMessage( - 1, - f, - proto.polykey.v1.vaults.Vault.serializeBinaryToWriter - ); - } - f = message.getNode(); - if (f != null) { - writer.writeMessage( - 2, - f, - polykey_v1_nodes_nodes_pb.Node.serializeBinaryToWriter - ); - } -}; - - -/** - * optional Vault vault = 1; - * @return {?proto.polykey.v1.vaults.Vault} - */ -proto.polykey.v1.vaults.PermGet.prototype.getVault = function() { - return /** @type{?proto.polykey.v1.vaults.Vault} */ ( - jspb.Message.getWrapperField(this, proto.polykey.v1.vaults.Vault, 1)); -}; - - -/** - * @param {?proto.polykey.v1.vaults.Vault|undefined} value - * @return {!proto.polykey.v1.vaults.PermGet} returns this -*/ -proto.polykey.v1.vaults.PermGet.prototype.setVault = function(value) { - return jspb.Message.setWrapperField(this, 1, value); -}; - - -/** - * Clears the message field making it undefined. - * @return {!proto.polykey.v1.vaults.PermGet} returns this - */ -proto.polykey.v1.vaults.PermGet.prototype.clearVault = function() { - return this.setVault(undefined); -}; - - -/** - * Returns whether this field is set. - * @return {boolean} - */ -proto.polykey.v1.vaults.PermGet.prototype.hasVault = function() { - return jspb.Message.getField(this, 1) != null; -}; - - -/** - * optional polykey.v1.nodes.Node node = 2; - * @return {?proto.polykey.v1.nodes.Node} - */ -proto.polykey.v1.vaults.PermGet.prototype.getNode = function() { - return /** @type{?proto.polykey.v1.nodes.Node} */ ( - jspb.Message.getWrapperField(this, polykey_v1_nodes_nodes_pb.Node, 2)); -}; - - -/** - * @param {?proto.polykey.v1.nodes.Node|undefined} value - * @return {!proto.polykey.v1.vaults.PermGet} returns this -*/ -proto.polykey.v1.vaults.PermGet.prototype.setNode = function(value) { - return jspb.Message.setWrapperField(this, 2, value); -}; - - -/** - * Clears the message field making it undefined. - * @return {!proto.polykey.v1.vaults.PermGet} returns this - */ -proto.polykey.v1.vaults.PermGet.prototype.clearNode = function() { - return this.setNode(undefined); -}; - - -/** - * Returns whether this field is set. - * @return {boolean} - */ -proto.polykey.v1.vaults.PermGet.prototype.hasNode = function() { - return jspb.Message.getField(this, 2) != null; -}; - - - - - -if (jspb.Message.GENERATE_TO_OBJECT) { -/** - * Creates an object representation of this proto. - * Field names that are reserved in JavaScript and will be renamed to pb_name. - * Optional fields that are not set will be set to undefined. - * To access a reserved field use, foo.pb_, eg, foo.pb_default. - * For the list of reserved names please see: - * net/proto2/compiler/js/internal/generator.cc#kKeyword. - * @param {boolean=} opt_includeInstance Deprecated. whether to include the - * JSPB instance for transitional soy proto support: - * http://goto/soy-param-migration - * @return {!Object} - */ -proto.polykey.v1.vaults.Permission.prototype.toObject = function(opt_includeInstance) { - return proto.polykey.v1.vaults.Permission.toObject(opt_includeInstance, this); -}; - - -/** - * Static version of the {@see toObject} method. - * @param {boolean|undefined} includeInstance Deprecated. Whether to include - * the JSPB instance for transitional soy proto support: - * http://goto/soy-param-migration - * @param {!proto.polykey.v1.vaults.Permission} msg The msg instance to transform. - * @return {!Object} - * @suppress {unusedLocalVariables} f is only used for nested messages - */ -proto.polykey.v1.vaults.Permission.toObject = function(includeInstance, msg) { - var f, obj = { - nodeId: jspb.Message.getFieldWithDefault(msg, 1, ""), - action: jspb.Message.getFieldWithDefault(msg, 2, "") - }; - - if (includeInstance) { - obj.$jspbMessageInstance = msg; - } - return obj; -}; -} - - -/** - * Deserializes binary data (in protobuf wire format). - * @param {jspb.ByteSource} bytes The bytes to deserialize. - * @return {!proto.polykey.v1.vaults.Permission} - */ -proto.polykey.v1.vaults.Permission.deserializeBinary = function(bytes) { - var reader = new jspb.BinaryReader(bytes); - var msg = new proto.polykey.v1.vaults.Permission; - return proto.polykey.v1.vaults.Permission.deserializeBinaryFromReader(msg, reader); -}; - - -/** - * Deserializes binary data (in protobuf wire format) from the - * given reader into the given message object. - * @param {!proto.polykey.v1.vaults.Permission} msg The message object to deserialize into. - * @param {!jspb.BinaryReader} reader The BinaryReader to use. - * @return {!proto.polykey.v1.vaults.Permission} - */ -proto.polykey.v1.vaults.Permission.deserializeBinaryFromReader = function(msg, reader) { - while (reader.nextField()) { - if (reader.isEndGroup()) { - break; - } - var field = reader.getFieldNumber(); - switch (field) { - case 1: - var value = /** @type {string} */ (reader.readString()); - msg.setNodeId(value); - break; - case 2: - var value = /** @type {string} */ (reader.readString()); - msg.setAction(value); - break; - default: - reader.skipField(); - break; - } - } - return msg; -}; - - -/** - * Serializes the message to binary data (in protobuf wire format). - * @return {!Uint8Array} - */ -proto.polykey.v1.vaults.Permission.prototype.serializeBinary = function() { - var writer = new jspb.BinaryWriter(); - proto.polykey.v1.vaults.Permission.serializeBinaryToWriter(this, writer); - return writer.getResultBuffer(); -}; - - -/** - * Serializes the given message to binary data (in protobuf wire - * format), writing to the given BinaryWriter. - * @param {!proto.polykey.v1.vaults.Permission} message - * @param {!jspb.BinaryWriter} writer - * @suppress {unusedLocalVariables} f is only used for nested messages - */ -proto.polykey.v1.vaults.Permission.serializeBinaryToWriter = function(message, writer) { - var f = undefined; - f = message.getNodeId(); - if (f.length > 0) { - writer.writeString( - 1, - f - ); - } - f = message.getAction(); - if (f.length > 0) { - writer.writeString( - 2, - f - ); - } -}; - - -/** - * optional string node_id = 1; - * @return {string} - */ -proto.polykey.v1.vaults.Permission.prototype.getNodeId = function() { - return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 1, "")); -}; - - -/** - * @param {string} value - * @return {!proto.polykey.v1.vaults.Permission} returns this - */ -proto.polykey.v1.vaults.Permission.prototype.setNodeId = function(value) { - return jspb.Message.setProto3StringField(this, 1, value); -}; - - -/** - * optional string action = 2; - * @return {string} - */ -proto.polykey.v1.vaults.Permission.prototype.getAction = function() { - return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 2, "")); -}; - - -/** - * @param {string} value - * @return {!proto.polykey.v1.vaults.Permission} returns this - */ -proto.polykey.v1.vaults.Permission.prototype.setAction = function(value) { - return jspb.Message.setProto3StringField(this, 2, value); -}; - - - - - -if (jspb.Message.GENERATE_TO_OBJECT) { -/** - * Creates an object representation of this proto. - * Field names that are reserved in JavaScript and will be renamed to pb_name. - * Optional fields that are not set will be set to undefined. - * To access a reserved field use, foo.pb_, eg, foo.pb_default. - * For the list of reserved names please see: - * net/proto2/compiler/js/internal/generator.cc#kKeyword. - * @param {boolean=} opt_includeInstance Deprecated. whether to include the - * JSPB instance for transitional soy proto support: - * http://goto/soy-param-migration - * @return {!Object} - */ -proto.polykey.v1.vaults.Version.prototype.toObject = function(opt_includeInstance) { - return proto.polykey.v1.vaults.Version.toObject(opt_includeInstance, this); -}; - - -/** - * Static version of the {@see toObject} method. - * @param {boolean|undefined} includeInstance Deprecated. Whether to include - * the JSPB instance for transitional soy proto support: - * http://goto/soy-param-migration - * @param {!proto.polykey.v1.vaults.Version} msg The msg instance to transform. - * @return {!Object} - * @suppress {unusedLocalVariables} f is only used for nested messages - */ -proto.polykey.v1.vaults.Version.toObject = function(includeInstance, msg) { - var f, obj = { - vault: (f = msg.getVault()) && proto.polykey.v1.vaults.Vault.toObject(includeInstance, f), - versionId: jspb.Message.getFieldWithDefault(msg, 2, "") - }; - - if (includeInstance) { - obj.$jspbMessageInstance = msg; - } - return obj; -}; -} - - -/** - * Deserializes binary data (in protobuf wire format). - * @param {jspb.ByteSource} bytes The bytes to deserialize. - * @return {!proto.polykey.v1.vaults.Version} - */ -proto.polykey.v1.vaults.Version.deserializeBinary = function(bytes) { - var reader = new jspb.BinaryReader(bytes); - var msg = new proto.polykey.v1.vaults.Version; - return proto.polykey.v1.vaults.Version.deserializeBinaryFromReader(msg, reader); -}; - - -/** - * Deserializes binary data (in protobuf wire format) from the - * given reader into the given message object. - * @param {!proto.polykey.v1.vaults.Version} msg The message object to deserialize into. - * @param {!jspb.BinaryReader} reader The BinaryReader to use. - * @return {!proto.polykey.v1.vaults.Version} - */ -proto.polykey.v1.vaults.Version.deserializeBinaryFromReader = function(msg, reader) { - while (reader.nextField()) { - if (reader.isEndGroup()) { - break; - } - var field = reader.getFieldNumber(); - switch (field) { - case 1: - var value = new proto.polykey.v1.vaults.Vault; - reader.readMessage(value,proto.polykey.v1.vaults.Vault.deserializeBinaryFromReader); - msg.setVault(value); - break; - case 2: - var value = /** @type {string} */ (reader.readString()); - msg.setVersionId(value); - break; - default: - reader.skipField(); - break; - } - } - return msg; -}; - - -/** - * Serializes the message to binary data (in protobuf wire format). - * @return {!Uint8Array} - */ -proto.polykey.v1.vaults.Version.prototype.serializeBinary = function() { - var writer = new jspb.BinaryWriter(); - proto.polykey.v1.vaults.Version.serializeBinaryToWriter(this, writer); - return writer.getResultBuffer(); -}; - - -/** - * Serializes the given message to binary data (in protobuf wire - * format), writing to the given BinaryWriter. - * @param {!proto.polykey.v1.vaults.Version} message - * @param {!jspb.BinaryWriter} writer - * @suppress {unusedLocalVariables} f is only used for nested messages - */ -proto.polykey.v1.vaults.Version.serializeBinaryToWriter = function(message, writer) { - var f = undefined; - f = message.getVault(); - if (f != null) { - writer.writeMessage( - 1, - f, - proto.polykey.v1.vaults.Vault.serializeBinaryToWriter - ); - } - f = message.getVersionId(); - if (f.length > 0) { - writer.writeString( - 2, - f - ); - } -}; - - -/** - * optional Vault vault = 1; - * @return {?proto.polykey.v1.vaults.Vault} - */ -proto.polykey.v1.vaults.Version.prototype.getVault = function() { - return /** @type{?proto.polykey.v1.vaults.Vault} */ ( - jspb.Message.getWrapperField(this, proto.polykey.v1.vaults.Vault, 1)); -}; - - -/** - * @param {?proto.polykey.v1.vaults.Vault|undefined} value - * @return {!proto.polykey.v1.vaults.Version} returns this -*/ -proto.polykey.v1.vaults.Version.prototype.setVault = function(value) { - return jspb.Message.setWrapperField(this, 1, value); -}; - - -/** - * Clears the message field making it undefined. - * @return {!proto.polykey.v1.vaults.Version} returns this - */ -proto.polykey.v1.vaults.Version.prototype.clearVault = function() { - return this.setVault(undefined); -}; - - -/** - * Returns whether this field is set. - * @return {boolean} - */ -proto.polykey.v1.vaults.Version.prototype.hasVault = function() { - return jspb.Message.getField(this, 1) != null; -}; - - -/** - * optional string version_id = 2; - * @return {string} - */ -proto.polykey.v1.vaults.Version.prototype.getVersionId = function() { - return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 2, "")); -}; - - -/** - * @param {string} value - * @return {!proto.polykey.v1.vaults.Version} returns this - */ -proto.polykey.v1.vaults.Version.prototype.setVersionId = function(value) { - return jspb.Message.setProto3StringField(this, 2, value); -}; - - - - - -if (jspb.Message.GENERATE_TO_OBJECT) { -/** - * Creates an object representation of this proto. - * Field names that are reserved in JavaScript and will be renamed to pb_name. - * Optional fields that are not set will be set to undefined. - * To access a reserved field use, foo.pb_, eg, foo.pb_default. - * For the list of reserved names please see: - * net/proto2/compiler/js/internal/generator.cc#kKeyword. - * @param {boolean=} opt_includeInstance Deprecated. whether to include the - * JSPB instance for transitional soy proto support: - * http://goto/soy-param-migration - * @return {!Object} - */ -proto.polykey.v1.vaults.VersionResult.prototype.toObject = function(opt_includeInstance) { - return proto.polykey.v1.vaults.VersionResult.toObject(opt_includeInstance, this); -}; - - -/** - * Static version of the {@see toObject} method. - * @param {boolean|undefined} includeInstance Deprecated. Whether to include - * the JSPB instance for transitional soy proto support: - * http://goto/soy-param-migration - * @param {!proto.polykey.v1.vaults.VersionResult} msg The msg instance to transform. - * @return {!Object} - * @suppress {unusedLocalVariables} f is only used for nested messages - */ -proto.polykey.v1.vaults.VersionResult.toObject = function(includeInstance, msg) { - var f, obj = { - isLatestVersion: jspb.Message.getBooleanFieldWithDefault(msg, 1, false) - }; - - if (includeInstance) { - obj.$jspbMessageInstance = msg; - } - return obj; -}; -} - - -/** - * Deserializes binary data (in protobuf wire format). - * @param {jspb.ByteSource} bytes The bytes to deserialize. - * @return {!proto.polykey.v1.vaults.VersionResult} - */ -proto.polykey.v1.vaults.VersionResult.deserializeBinary = function(bytes) { - var reader = new jspb.BinaryReader(bytes); - var msg = new proto.polykey.v1.vaults.VersionResult; - return proto.polykey.v1.vaults.VersionResult.deserializeBinaryFromReader(msg, reader); -}; - - -/** - * Deserializes binary data (in protobuf wire format) from the - * given reader into the given message object. - * @param {!proto.polykey.v1.vaults.VersionResult} msg The message object to deserialize into. - * @param {!jspb.BinaryReader} reader The BinaryReader to use. - * @return {!proto.polykey.v1.vaults.VersionResult} - */ -proto.polykey.v1.vaults.VersionResult.deserializeBinaryFromReader = function(msg, reader) { - while (reader.nextField()) { - if (reader.isEndGroup()) { - break; - } - var field = reader.getFieldNumber(); - switch (field) { - case 1: - var value = /** @type {boolean} */ (reader.readBool()); - msg.setIsLatestVersion(value); + var value = /** @type {boolean} */ (reader.readBool()); + msg.setIsLatestVersion(value); break; default: reader.skipField(); @@ -2972,7 +2529,7 @@ proto.polykey.v1.vaults.LogEntry.toObject = function(includeInstance, msg) { var f, obj = { oid: jspb.Message.getFieldWithDefault(msg, 1, ""), committer: jspb.Message.getFieldWithDefault(msg, 2, ""), - timeStamp: jspb.Message.getFieldWithDefault(msg, 4, 0), + timeStamp: (f = msg.getTimeStamp()) && google_protobuf_timestamp_pb.Timestamp.toObject(includeInstance, f), message: jspb.Message.getFieldWithDefault(msg, 3, "") }; @@ -3019,7 +2576,8 @@ proto.polykey.v1.vaults.LogEntry.deserializeBinaryFromReader = function(msg, rea msg.setCommitter(value); break; case 4: - var value = /** @type {number} */ (reader.readUint64()); + var value = new google_protobuf_timestamp_pb.Timestamp; + reader.readMessage(value,google_protobuf_timestamp_pb.Timestamp.deserializeBinaryFromReader); msg.setTimeStamp(value); break; case 3: @@ -3070,10 +2628,11 @@ proto.polykey.v1.vaults.LogEntry.serializeBinaryToWriter = function(message, wri ); } f = message.getTimeStamp(); - if (f !== 0) { - writer.writeUint64( + if (f != null) { + writer.writeMessage( 4, - f + f, + google_protobuf_timestamp_pb.Timestamp.serializeBinaryToWriter ); } f = message.getMessage(); @@ -3123,20 +2682,39 @@ proto.polykey.v1.vaults.LogEntry.prototype.setCommitter = function(value) { /** - * optional uint64 time_stamp = 4; - * @return {number} + * optional google.protobuf.Timestamp time_stamp = 4; + * @return {?proto.google.protobuf.Timestamp} */ proto.polykey.v1.vaults.LogEntry.prototype.getTimeStamp = function() { - return /** @type {number} */ (jspb.Message.getFieldWithDefault(this, 4, 0)); + return /** @type{?proto.google.protobuf.Timestamp} */ ( + jspb.Message.getWrapperField(this, google_protobuf_timestamp_pb.Timestamp, 4)); }; /** - * @param {number} value + * @param {?proto.google.protobuf.Timestamp|undefined} value * @return {!proto.polykey.v1.vaults.LogEntry} returns this - */ +*/ proto.polykey.v1.vaults.LogEntry.prototype.setTimeStamp = function(value) { - return jspb.Message.setProto3IntField(this, 4, value); + return jspb.Message.setWrapperField(this, 4, value); +}; + + +/** + * Clears the message field making it undefined. + * @return {!proto.polykey.v1.vaults.LogEntry} returns this + */ +proto.polykey.v1.vaults.LogEntry.prototype.clearTimeStamp = function() { + return this.setTimeStamp(undefined); +}; + + +/** + * Returns whether this field is set. + * @return {boolean} + */ +proto.polykey.v1.vaults.LogEntry.prototype.hasTimeStamp = function() { + return jspb.Message.getField(this, 4) != null; }; @@ -3161,6 +2739,187 @@ proto.polykey.v1.vaults.LogEntry.prototype.setMessage = function(value) { +if (jspb.Message.GENERATE_TO_OBJECT) { +/** + * Creates an object representation of this proto. + * Field names that are reserved in JavaScript and will be renamed to pb_name. + * Optional fields that are not set will be set to undefined. + * To access a reserved field use, foo.pb_, eg, foo.pb_default. + * For the list of reserved names please see: + * net/proto2/compiler/js/internal/generator.cc#kKeyword. + * @param {boolean=} opt_includeInstance Deprecated. whether to include the + * JSPB instance for transitional soy proto support: + * http://goto/soy-param-migration + * @return {!Object} + */ +proto.polykey.v1.vaults.InfoRequest.prototype.toObject = function(opt_includeInstance) { + return proto.polykey.v1.vaults.InfoRequest.toObject(opt_includeInstance, this); +}; + + +/** + * Static version of the {@see toObject} method. + * @param {boolean|undefined} includeInstance Deprecated. Whether to include + * the JSPB instance for transitional soy proto support: + * http://goto/soy-param-migration + * @param {!proto.polykey.v1.vaults.InfoRequest} msg The msg instance to transform. + * @return {!Object} + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.polykey.v1.vaults.InfoRequest.toObject = function(includeInstance, msg) { + var f, obj = { + vault: (f = msg.getVault()) && proto.polykey.v1.vaults.Vault.toObject(includeInstance, f), + action: jspb.Message.getFieldWithDefault(msg, 3, "") + }; + + if (includeInstance) { + obj.$jspbMessageInstance = msg; + } + return obj; +}; +} + + +/** + * Deserializes binary data (in protobuf wire format). + * @param {jspb.ByteSource} bytes The bytes to deserialize. + * @return {!proto.polykey.v1.vaults.InfoRequest} + */ +proto.polykey.v1.vaults.InfoRequest.deserializeBinary = function(bytes) { + var reader = new jspb.BinaryReader(bytes); + var msg = new proto.polykey.v1.vaults.InfoRequest; + return proto.polykey.v1.vaults.InfoRequest.deserializeBinaryFromReader(msg, reader); +}; + + +/** + * Deserializes binary data (in protobuf wire format) from the + * given reader into the given message object. + * @param {!proto.polykey.v1.vaults.InfoRequest} msg The message object to deserialize into. + * @param {!jspb.BinaryReader} reader The BinaryReader to use. + * @return {!proto.polykey.v1.vaults.InfoRequest} + */ +proto.polykey.v1.vaults.InfoRequest.deserializeBinaryFromReader = function(msg, reader) { + while (reader.nextField()) { + if (reader.isEndGroup()) { + break; + } + var field = reader.getFieldNumber(); + switch (field) { + case 1: + var value = new proto.polykey.v1.vaults.Vault; + reader.readMessage(value,proto.polykey.v1.vaults.Vault.deserializeBinaryFromReader); + msg.setVault(value); + break; + case 3: + var value = /** @type {string} */ (reader.readString()); + msg.setAction(value); + break; + default: + reader.skipField(); + break; + } + } + return msg; +}; + + +/** + * Serializes the message to binary data (in protobuf wire format). + * @return {!Uint8Array} + */ +proto.polykey.v1.vaults.InfoRequest.prototype.serializeBinary = function() { + var writer = new jspb.BinaryWriter(); + proto.polykey.v1.vaults.InfoRequest.serializeBinaryToWriter(this, writer); + return writer.getResultBuffer(); +}; + + +/** + * Serializes the given message to binary data (in protobuf wire + * format), writing to the given BinaryWriter. + * @param {!proto.polykey.v1.vaults.InfoRequest} message + * @param {!jspb.BinaryWriter} writer + * @suppress {unusedLocalVariables} f is only used for nested messages + */ +proto.polykey.v1.vaults.InfoRequest.serializeBinaryToWriter = function(message, writer) { + var f = undefined; + f = message.getVault(); + if (f != null) { + writer.writeMessage( + 1, + f, + proto.polykey.v1.vaults.Vault.serializeBinaryToWriter + ); + } + f = message.getAction(); + if (f.length > 0) { + writer.writeString( + 3, + f + ); + } +}; + + +/** + * optional Vault vault = 1; + * @return {?proto.polykey.v1.vaults.Vault} + */ +proto.polykey.v1.vaults.InfoRequest.prototype.getVault = function() { + return /** @type{?proto.polykey.v1.vaults.Vault} */ ( + jspb.Message.getWrapperField(this, proto.polykey.v1.vaults.Vault, 1)); +}; + + +/** + * @param {?proto.polykey.v1.vaults.Vault|undefined} value + * @return {!proto.polykey.v1.vaults.InfoRequest} returns this +*/ +proto.polykey.v1.vaults.InfoRequest.prototype.setVault = function(value) { + return jspb.Message.setWrapperField(this, 1, value); +}; + + +/** + * Clears the message field making it undefined. + * @return {!proto.polykey.v1.vaults.InfoRequest} returns this + */ +proto.polykey.v1.vaults.InfoRequest.prototype.clearVault = function() { + return this.setVault(undefined); +}; + + +/** + * Returns whether this field is set. + * @return {boolean} + */ +proto.polykey.v1.vaults.InfoRequest.prototype.hasVault = function() { + return jspb.Message.getField(this, 1) != null; +}; + + +/** + * optional string action = 3; + * @return {string} + */ +proto.polykey.v1.vaults.InfoRequest.prototype.getAction = function() { + return /** @type {string} */ (jspb.Message.getFieldWithDefault(this, 3, "")); +}; + + +/** + * @param {string} value + * @return {!proto.polykey.v1.vaults.InfoRequest} returns this + */ +proto.polykey.v1.vaults.InfoRequest.prototype.setAction = function(value) { + return jspb.Message.setProto3StringField(this, 3, value); +}; + + + + + if (jspb.Message.GENERATE_TO_OBJECT) { /** * Creates an object representation of this proto. diff --git a/src/proto/schemas/polykey/v1/agent_service.proto b/src/proto/schemas/polykey/v1/agent_service.proto index 712ee5d8a..a4c824360 100644 --- a/src/proto/schemas/polykey/v1/agent_service.proto +++ b/src/proto/schemas/polykey/v1/agent_service.proto @@ -13,10 +13,9 @@ service AgentService { rpc Echo(polykey.v1.utils.EchoMessage) returns (polykey.v1.utils.EchoMessage); // Vaults - rpc VaultsGitInfoGet (polykey.v1.vaults.Vault) returns (stream polykey.v1.vaults.PackChunk); + rpc VaultsGitInfoGet (polykey.v1.vaults.InfoRequest) returns (stream polykey.v1.vaults.PackChunk); rpc VaultsGitPackGet(stream polykey.v1.vaults.PackChunk) returns (stream polykey.v1.vaults.PackChunk); - rpc VaultsScan (polykey.v1.nodes.Node) returns (stream polykey.v1.vaults.Vault); - rpc VaultsPermissionsCheck (polykey.v1.vaults.NodePermission) returns (polykey.v1.vaults.NodePermissionAllowed); + rpc VaultsScan (polykey.v1.utils.EmptyMessage) returns (stream polykey.v1.vaults.List); // Nodes rpc NodesClosestLocalNodesGet (polykey.v1.nodes.Node) returns (polykey.v1.nodes.NodeTable); diff --git a/src/proto/schemas/polykey/v1/client_service.proto b/src/proto/schemas/polykey/v1/client_service.proto index eb5aa9ced..57788c678 100644 --- a/src/proto/schemas/polykey/v1/client_service.proto +++ b/src/proto/schemas/polykey/v1/client_service.proto @@ -46,21 +46,21 @@ service ClientService { rpc VaultsDelete(polykey.v1.vaults.Vault) returns (polykey.v1.utils.StatusMessage); rpc VaultsPull(polykey.v1.vaults.Pull) returns (polykey.v1.utils.StatusMessage); rpc VaultsClone(polykey.v1.vaults.Clone) returns (polykey.v1.utils.StatusMessage); - rpc VaultsScan(polykey.v1.nodes.Node) returns (stream polykey.v1.vaults.List); rpc VaultsSecretsList(polykey.v1.vaults.Vault) returns (stream polykey.v1.secrets.Secret); rpc VaultsSecretsMkdir(polykey.v1.vaults.Mkdir) returns (polykey.v1.utils.StatusMessage); - rpc VaultsSecretsStat(polykey.v1.vaults.Vault) returns (polykey.v1.vaults.Stat); rpc VaultsSecretsDelete(polykey.v1.secrets.Secret) returns (polykey.v1.utils.StatusMessage); rpc VaultsSecretsEdit(polykey.v1.secrets.Secret) returns (polykey.v1.utils.StatusMessage); rpc VaultsSecretsGet(polykey.v1.secrets.Secret) returns (polykey.v1.secrets.Secret); rpc VaultsSecretsRename(polykey.v1.secrets.Rename) returns (polykey.v1.utils.StatusMessage); rpc VaultsSecretsNew(polykey.v1.secrets.Secret) returns (polykey.v1.utils.StatusMessage); rpc VaultsSecretsNewDir(polykey.v1.secrets.Directory) returns (polykey.v1.utils.StatusMessage); - rpc VaultsPermissionsSet(polykey.v1.vaults.PermSet) returns (polykey.v1.utils.StatusMessage); - rpc VaultsPermissionsUnset(polykey.v1.vaults.PermUnset) returns (polykey.v1.utils.StatusMessage); - rpc VaultsPermissions(polykey.v1.vaults.PermGet) returns (stream polykey.v1.vaults.Permission); + rpc vaultsSecretsStat(polykey.v1.secrets.Secret) returns (polykey.v1.secrets.Stat); + rpc VaultsPermissionGet(polykey.v1.vaults.Vault) returns (stream polykey.v1.vaults.Permissions); + rpc VaultsPermissionSet(polykey.v1.vaults.Permissions) returns (polykey.v1.utils.StatusMessage); + rpc VaultsPermissionUnset(polykey.v1.vaults.Permissions) returns (polykey.v1.utils.StatusMessage); rpc VaultsVersion(polykey.v1.vaults.Version) returns (polykey.v1.vaults.VersionResult); rpc VaultsLog(polykey.v1.vaults.Log) returns (stream polykey.v1.vaults.LogEntry); + rpc VaultsScan(polykey.v1.nodes.Node) returns (stream polykey.v1.vaults.List); // Identities rpc IdentitiesAuthenticate(polykey.v1.identities.Provider) returns (stream polykey.v1.identities.AuthenticationProcess); diff --git a/src/proto/schemas/polykey/v1/permissions/permissions.proto b/src/proto/schemas/polykey/v1/permissions/permissions.proto index 65441b294..8285f92f0 100644 --- a/src/proto/schemas/polykey/v1/permissions/permissions.proto +++ b/src/proto/schemas/polykey/v1/permissions/permissions.proto @@ -9,6 +9,11 @@ message Actions { repeated string action = 1; } +message NodeActions { + polykey.v1.nodes.Node node = 1; + repeated string actions = 2; +} + message ActionSet { oneof node_or_provider { polykey.v1.nodes.Node node = 1; diff --git a/src/proto/schemas/polykey/v1/secrets/secrets.proto b/src/proto/schemas/polykey/v1/secrets/secrets.proto index a8f5cc207..b13943466 100644 --- a/src/proto/schemas/polykey/v1/secrets/secrets.proto +++ b/src/proto/schemas/polykey/v1/secrets/secrets.proto @@ -19,3 +19,7 @@ message Directory { polykey.v1.vaults.Vault vault = 1; string secret_directory = 2; } + +message Stat { + string json = 1; +} diff --git a/src/proto/schemas/polykey/v1/vaults/vaults.proto b/src/proto/schemas/polykey/v1/vaults/vaults.proto index efd2d45b0..3ea6926af 100644 --- a/src/proto/schemas/polykey/v1/vaults/vaults.proto +++ b/src/proto/schemas/polykey/v1/vaults/vaults.proto @@ -1,6 +1,7 @@ syntax = "proto3"; import "polykey/v1/nodes/nodes.proto"; +import "google/protobuf/timestamp.proto"; package polykey.v1.vaults; @@ -16,6 +17,7 @@ message Vault { message List { string vault_name = 1; string vault_id = 2; + repeated string vault_permissions = 3; } message Rename { @@ -32,6 +34,7 @@ message Mkdir { message Pull { Vault vault = 1; polykey.v1.nodes.Node node = 2; + Vault pull_vault = 3; } message Clone { @@ -43,24 +46,10 @@ message Stat { string stats = 1; } -message PermSet { +message Permissions { Vault vault = 1; polykey.v1.nodes.Node node = 2; -} - -message PermUnset { - Vault vault = 1; - polykey.v1.nodes.Node node = 2; -} - -message PermGet { - Vault vault = 1; - polykey.v1.nodes.Node node = 2; -} - -message Permission { - string node_id = 1; - string action = 2; + repeated string vault_permissions = 3; } message Version { @@ -81,11 +70,15 @@ message Log { message LogEntry { string oid = 1; string committer = 2; - uint64 time_stamp = 4; + google.protobuf.Timestamp time_stamp = 4; string message = 3; } // Agent specific. +message InfoRequest { + Vault vault = 1; + string action = 3; +} message PackChunk { bytes chunk = 1; diff --git a/src/sigchain/Sigchain.ts b/src/sigchain/Sigchain.ts index 57f85ca06..fd4beaa35 100644 --- a/src/sigchain/Sigchain.ts +++ b/src/sigchain/Sigchain.ts @@ -431,7 +431,7 @@ class Sigchain { const claimStream = this.sigchainClaimsDb.createKeyStream(); let seq = 1; for await (const o of claimStream) { - map[seq] = IdInternal.fromBuffer(o); + map[seq] = IdInternal.fromBuffer(o as Buffer); seq++; } return map; @@ -439,7 +439,7 @@ class Sigchain { @ready(new sigchainErrors.ErrorSigchainNotRunning()) public async clearDB() { - this.sigchainDb.clear(); + await this.sigchainDb.clear(); await this._transaction(async () => { await this.db.put( @@ -458,7 +458,7 @@ class Sigchain { reverse: true, }); for await (const o of keyStream) { - latestId = IdInternal.fromBuffer(o); + latestId = IdInternal.fromBuffer(o as Buffer); } return latestId; }); diff --git a/src/utils/sysexits.ts b/src/utils/sysexits.ts index d48e6dacf..935c1810e 100644 --- a/src/utils/sysexits.ts +++ b/src/utils/sysexits.ts @@ -2,20 +2,84 @@ const sysexits = Object.freeze({ OK: 0, GENERAL: 1, // Sysexit standard starts at 64 to avoid conflicts + /** + * The command was used incorrectly, e.g., with the wrong number of arguments, + * a bad flag, a bad syntax in a parameter, or whatever. + */ USAGE: 64, + /** + * The input data was incorrect in some way. This should only be used for + * user's data and not system files. + */ DATAERR: 65, + /** + * An input file (not a system file) did not exist or was not readable. + * This could also include errors like "No message" to a mailer + * (if it cared to catch it). + */ NOINPUT: 66, + /** + * The user specified did not exist. This might be used for mail addresses + * or remote logins. + */ NOUSER: 67, + /** + * The host specified did not exist. This is used in mail addresses or + * network requests. + */ NOHOST: 68, + /** + * A service is unavailable. This can occur if a support program or file + * does not exist. This can also be used as a catchall message when + * something you wanted to do does not work, but you do not know why. + */ UNAVAILABLE: 69, + /** + * An internal software error has been detected. This should be limited to + * non-operating system related errors as possible. + */ SOFTWARE: 70, + /** + * An operating system error has been detected. This is intended to be used + * for such things as "cannot fork", "cannot create pipe", or the like. + * It in-cludes things like getuid returning a user that does not exist in + * the passwd file. + */ OSERR: 71, + /** + * Some system file (e.g., /etc/passwd, /var/run/utx.active, etc.) + * does not exist, cannot be opened, or has some sort of error + * (e.g., syntax error). + */ OSFILE: 72, + /** + * A (user specified) output file cannot be created. + */ CANTCREAT: 73, + /** + * An error occurred while doing I/O on some file. + */ IOERR: 74, + /** + * Temporary failure, indicating something that is not really an error. + * In sendmail, this means that a mailer (e.g.) could not create a connection, + * and the request should be reattempted later. + */ TEMPFAIL: 75, + /** + * The remote system returned something that was "not possible" during a + * protocol exchange. + */ PROTOCOL: 76, + /** + * You did not have sufficient permission to perform the operation. This is + * not intended for file system problems, which should use EX_NOINPUT or + * EX_CANTCREAT, but rather for higher level permissions. + */ NOPERM: 77, + /** + * Something was found in an un-configured or mis-configured state. + */ CONFIG: 78, CANNOT_EXEC: 126, COMMAND_NOT_FOUND: 127, diff --git a/src/validation/utils.ts b/src/validation/utils.ts index 5c68e3d71..3ce13f258 100644 --- a/src/validation/utils.ts +++ b/src/validation/utils.ts @@ -9,7 +9,7 @@ import type { NodeId, SeedNodes } from '../nodes/types'; import type { ProviderId, IdentityId } from '../identities/types'; import type { GestaltAction, GestaltId } from '../gestalts/types'; -import type { VaultAction } from '../vaults/types'; +import type { VaultAction, VaultId } from '../vaults/types'; import type { Host, Hostname, Port } from '../network/types'; import type { ClaimId } from '../claims/types'; import * as validationErrors from './errors'; @@ -82,6 +82,16 @@ function parseClaimId(data: any): ClaimId { return data; } +function parseVaultId(data: any): VaultId { + data = vaultsUtils.decodeVaultId(data); + if (data == null) { + throw new validationErrors.ErrorParse( + 'Vault ID must be multibase base58btc encoded strings', + ); + } + return data; +} + function parseGestaltAction(data: any): GestaltAction { if (!gestaltsUtils.isGestaltAction(data)) { throw new validationErrors.ErrorParse( @@ -249,6 +259,7 @@ export { parseNodeId, parseGestaltId, parseClaimId, + parseVaultId, parseGestaltAction, parseVaultAction, parseProviderId, diff --git a/src/vaults/Vault.ts b/src/vaults/Vault.ts new file mode 100644 index 000000000..8c3981c6c --- /dev/null +++ b/src/vaults/Vault.ts @@ -0,0 +1,15 @@ +import type VaultInternal from './VaultInternal'; + +interface Vault { + vaultDataDir: VaultInternal['vaultDataDir']; + vaultGitDir: VaultInternal['vaultGitDir']; + vaultId: VaultInternal['vaultId']; + writeF: VaultInternal['writeF']; + writeG: VaultInternal['writeG']; + readF: VaultInternal['readF']; + readG: VaultInternal['readG']; + log: VaultInternal['log']; + version: VaultInternal['version']; +} + +export type { Vault }; diff --git a/src/vaults/VaultInternal.ts b/src/vaults/VaultInternal.ts index ff737fc11..63611b1a8 100644 --- a/src/vaults/VaultInternal.ts +++ b/src/vaults/VaultInternal.ts @@ -1,358 +1,1038 @@ +import type { ReadCommitResult } from 'isomorphic-git'; +import type { EncryptedFS } from 'encryptedfs'; +import type { DB, DBDomain, DBLevel } from '@matrixai/db'; import type { - VaultId, + CommitId, + CommitLog, FileSystemReadable, FileSystemWritable, - CommitLog, + VaultAction, + VaultId, + VaultIdEncoded, + VaultName, + VaultRef, } from './types'; -import type { MutexInterface } from 'async-mutex'; - -import type { EncryptedFS } from 'encryptedfs'; -import type { KeyManager } from '../keys'; +import type KeyManager from '../keys/KeyManager'; +import type { NodeId, NodeIdEncoded } from '../nodes/types'; +import type NodeConnectionManager from '../nodes/NodeConnectionManager'; +import type { ResourceAcquire } from '../utils/context'; +import type GRPCClientAgent from '../agent/GRPCClientAgent'; +import type { POJO } from '../types'; import path from 'path'; import git from 'isomorphic-git'; -import { Mutex } from 'async-mutex'; +import * as grpc from '@grpc/grpc-js'; import Logger from '@matrixai/logger'; -import { CreateDestroy, ready } from '@matrixai/async-init/dist/CreateDestroy'; -import * as vaultsUtils from './utils'; +import { + CreateDestroyStartStop, + ready, +} from '@matrixai/async-init/dist/CreateDestroyStartStop'; import * as vaultsErrors from './errors'; -import { makeVaultIdPretty } from './utils'; -import { utils as nodesUtils } from '../nodes'; +import * as vaultsUtils from './utils'; +import { tagLast } from './types'; +import * as nodesUtils from '../nodes/utils'; +import * as validationUtils from '../validation/utils'; +import { withF, withG } from '../utils/context'; +import { RWLock } from '../utils/locks'; +import * as vaultsPB from '../proto/js/polykey/v1/vaults/vaults_pb'; +import { never } from '../utils/utils'; -const lastTag = 'last'; +export type RemoteInfo = { + remoteNode: NodeIdEncoded; + remoteVault: VaultIdEncoded; +}; -interface VaultInternal extends CreateDestroy {} -@CreateDestroy() +interface VaultInternal extends CreateDestroyStartStop {} +@CreateDestroyStartStop( + new vaultsErrors.ErrorVaultRunning(), + new vaultsErrors.ErrorVaultDestroyed(), +) class VaultInternal { - public readonly baseDir: string; - public readonly gitDir: string; - public readonly vaultId: VaultId; - - protected efsRoot: EncryptedFS; - protected efsVault: EncryptedFS; - protected logger: Logger; - protected lock: MutexInterface; - protected workingDir: string; - protected keyManager: KeyManager; - - public static async create({ + public static async createVaultInternal({ vaultId, + vaultName, + db, + vaultsDb, + vaultsDbDomain, keyManager, efs, logger = new Logger(this.name), fresh = false, }: { vaultId: VaultId; + vaultName?: VaultName; + db: DB; + vaultsDb: DBLevel; + vaultsDbDomain: DBDomain; keyManager: KeyManager; efs: EncryptedFS; logger?: Logger; fresh?: boolean; - }) { - logger.info(`Creating ${this.name}`); - if (fresh) { - try { - await efs.rmdir(makeVaultIdPretty(vaultId), { recursive: true }); - } catch (err) { - if (err.code !== 'ENOENT') { - throw err; - } - } - await efs.mkdir(path.join(makeVaultIdPretty(vaultId), 'contents'), { - recursive: true, - }); - const efsVault = await efs.chroot( - path.join(makeVaultIdPretty(vaultId), 'contents'), - ); - await efsVault.start(); - // Creating a new vault. - await git.init({ - fs: efs, - dir: path.join(makeVaultIdPretty(vaultId), 'contents'), - gitdir: path.join(makeVaultIdPretty(vaultId), '.git'), - }); - const workingDir = await git.commit({ - fs: efs, - dir: path.join(makeVaultIdPretty(vaultId), 'contents'), - gitdir: path.join(makeVaultIdPretty(vaultId), '.git'), - author: { - name: makeVaultIdPretty(vaultId), + }): Promise { + const vaultIdEncoded = vaultsUtils.encodeVaultId(vaultId); + logger.info(`Creating ${this.name} - ${vaultIdEncoded}`); + const vault = new VaultInternal({ + vaultId, + db, + vaultsDb, + vaultsDbDomain, + keyManager, + efs, + logger, + }); + await vault.start({ fresh, vaultName }); + logger.info(`Created ${this.name} - ${vaultIdEncoded}`); + return vault; + } + + public static async cloneVaultInternal({ + targetNodeId, + targetVaultNameOrId, + vaultId, + db, + vaultsDb, + vaultsDbDomain, + keyManager, + nodeConnectionManager, + efs, + logger = new Logger(this.name), + }: { + targetNodeId: NodeId; + targetVaultNameOrId: VaultId | VaultName; + vaultId: VaultId; + db: DB; + vaultsDb: DBLevel; + vaultsDbDomain: DBDomain; + efs: EncryptedFS; + keyManager: KeyManager; + nodeConnectionManager: NodeConnectionManager; + logger?: Logger; + }): Promise { + const vaultIdEncoded = vaultsUtils.encodeVaultId(vaultId); + logger.info(`Cloning ${this.name} - ${vaultIdEncoded}`); + const vault = new VaultInternal({ + vaultId, + db, + vaultsDb, + vaultsDbDomain, + keyManager, + efs, + logger, + }); + // This error flag will contain the error returned by the cloning grpc stream + let error; + // Make the directory where the .git files will be auto generated and + // where the contents will be cloned to ('contents' file) + await efs.mkdir(vault.vaultDataDir, { recursive: true }); + let vaultName: VaultName; + let remoteVaultId: VaultId; + let remote: RemoteInfo; + try { + [vaultName, remoteVaultId] = await nodeConnectionManager.withConnF( + targetNodeId, + async (connection) => { + const client = connection.getClient(); + const [request, vaultName, remoteVaultId] = await vault.request( + client, + targetVaultNameOrId, + 'clone', + ); + await git.clone({ + fs: efs, + http: { request }, + dir: vault.vaultDataDir, + gitdir: vault.vaultGitDir, + url: 'http://', + singleBranch: true, + }); + return [vaultName, remoteVaultId]; }, - message: 'Initial Commit', - }); - await efs.writeFile( - path.join(makeVaultIdPretty(vaultId), '.git', 'packed-refs'), - '# pack-refs with: peeled fully-peeled sorted', - ); - await efs.writeFile( - path.join(makeVaultIdPretty(vaultId), '.git', 'workingDir'), - workingDir, - ); - const vault = new VaultInternal({ - vaultId, - keyManager, - efs, - efsVault, - workingDir, - logger, - }); - logger.info(`Initialising vault at '${makeVaultIdPretty(vaultId)}'`); - return vault; - } else { - // Loading an existing vault. - const efsVault = await efs.chroot( - path.join(makeVaultIdPretty(vaultId), 'contents'), ); - await efsVault.start(); - const workingDir = (await efs.readFile( - path.join(makeVaultIdPretty(vaultId), '.git', 'workingDir'), - { - encoding: 'utf8', - }, - )) as string; - const vault = new VaultInternal({ - vaultId, - keyManager, - efs, - efsVault, - workingDir, - logger, - }); - logger.info(`Created ${this.name} at '${makeVaultIdPretty(vaultId)}'`); - return vault; + remote = { + remoteNode: nodesUtils.encodeNodeId(targetNodeId), + remoteVault: vaultsUtils.encodeVaultId(remoteVaultId), + }; + } catch (e) { + // If the error flag set and we have the generalised SmartHttpError from + // isomorphic git then we need to throw the polykey error + if (e instanceof git.Errors.SmartHttpError && error) { + throw error; + } + throw e; } + + await vault.start({ vaultName }); + // Setting the remote in the metadata + await vault.db.put( + vault.vaultMetadataDbDomain, + VaultInternal.remoteKey, + remote, + ); + logger.info(`Cloned ${this.name} - ${vaultIdEncoded}`); + return vault; } + static dirtyKey = 'dirty'; + static remoteKey = 'remote'; + static nameKey = 'key'; + + public readonly vaultId: VaultId; + public readonly vaultIdEncoded: string; + public readonly vaultDataDir: string; + public readonly vaultGitDir: string; + + protected logger: Logger; + protected db: DB; + protected vaultsDbDomain: DBDomain; + protected vaultsDb: DBLevel; + protected vaultMetadataDbDomain: DBDomain; + protected vaultMetadataDb: DBLevel; + protected keyManager: KeyManager; + protected vaultsNamesDomain: DBDomain; + protected efs: EncryptedFS; + protected efsVault: EncryptedFS; + protected lock: RWLock = new RWLock(); + + public readLock: ResourceAcquire = async () => { + const release = await this.lock.acquireRead(); + return [async () => release()]; + }; + + public writeLock: ResourceAcquire = async () => { + const release = await this.lock.acquireWrite(); + return [async () => release()]; + }; + constructor({ vaultId, + db, + vaultsDbDomain, + vaultsDb, keyManager, efs, - efsVault, - workingDir, logger, }: { vaultId: VaultId; + db: DB; + vaultsDbDomain: DBDomain; + vaultsDb: DBLevel; keyManager: KeyManager; efs: EncryptedFS; - efsVault: EncryptedFS; - workingDir: string; logger: Logger; }) { - this.baseDir = path.join(makeVaultIdPretty(vaultId), 'contents'); - this.gitDir = path.join(makeVaultIdPretty(vaultId), '.git'); + const vaultIdEncoded = vaultsUtils.encodeVaultId(vaultId); + this.logger = logger; this.vaultId = vaultId; + this.vaultIdEncoded = vaultIdEncoded; + this.vaultDataDir = path.join(vaultIdEncoded, 'data'); + this.vaultGitDir = path.join(vaultIdEncoded, '.git'); + this.db = db; + this.vaultsDbDomain = vaultsDbDomain; + this.vaultsDb = vaultsDb; this.keyManager = keyManager; - this.efsRoot = efs; - this.efsVault = efsVault; - this.workingDir = workingDir; - this.logger = logger; - this.lock = new Mutex(); + this.efs = efs; + } + + /** + * + * @param fresh Clears all state before starting + * @param vaultName Name of the vault, Only used when creating a new vault + */ + public async start({ + fresh = false, + vaultName, + }: { + fresh?: boolean; + vaultName?: VaultName; + } = {}): Promise { + this.logger.info( + `Starting ${this.constructor.name} - ${this.vaultIdEncoded}`, + ); + this.vaultMetadataDbDomain = [...this.vaultsDbDomain, this.vaultIdEncoded]; + this.vaultsNamesDomain = [...this.vaultsDbDomain, 'names']; + this.vaultMetadataDb = await this.db.level( + this.vaultIdEncoded, + this.vaultsDb, + ); + // Let's backup any metadata + + if (fresh) { + await this.vaultMetadataDb.clear(); + try { + await this.efs.rmdir(this.vaultIdEncoded, { + recursive: true, + }); + } catch (e) { + if (e.code !== 'ENOENT') { + throw e; + } + } + } + await this.mkdirExists(this.vaultIdEncoded); + await this.mkdirExists(this.vaultDataDir); + await this.mkdirExists(this.vaultGitDir); + await this.setupMeta({ vaultName }); + await this.setupGit(); + this.efsVault = await this.efs.chroot(this.vaultDataDir); + this.logger.info( + `Started ${this.constructor.name} - ${this.vaultIdEncoded}`, + ); + } + + private async mkdirExists(directory: string) { + try { + await this.efs.mkdir(directory, { recursive: true }); + } catch (e) { + if (e.code !== 'EEXIST') { + throw e; + } + } + } + + public async stop(): Promise { + this.logger.info( + `Stopping ${this.constructor.name} - ${this.vaultIdEncoded}`, + ); + this.logger.info( + `Stopped ${this.constructor.name} - ${this.vaultIdEncoded}`, + ); } public async destroy(): Promise { this.logger.info( - `Destroying ${this.constructor.name} at '${makeVaultIdPretty( + `Destroying ${this.constructor.name} - ${this.vaultIdEncoded}`, + ); + const vaultDb = await this.db.level(this.vaultIdEncoded, this.vaultsDb); + await vaultDb.clear(); + try { + await this.efs.rmdir(this.vaultIdEncoded, { + recursive: true, + }); + } catch (e) { + if (e.code !== 'ENOENT') throw e; + // Otherwise ignore + } + this.logger.info( + `Destroyed ${this.constructor.name} - ${this.vaultIdEncoded}`, + ); + } + + @ready(new vaultsErrors.ErrorVaultNotRunning()) + public async log( + ref: string | VaultRef = 'HEAD', + limit?: number, + ): Promise> { + if (!vaultsUtils.validateRef(ref)) { + throw new vaultsErrors.ErrorVaultReferenceInvalid(); + } + if (ref === vaultsUtils.tagLast) { + ref = vaultsUtils.canonicalBranch; + } + const commits = await git.log({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + ref, + depth: limit, + }); + return commits.map(({ oid, commit }: ReadCommitResult) => { + return { + commitId: oid as CommitId, + parent: commit.parent as Array, + author: { + name: commit.author.name, + timestamp: new Date(commit.author.timestamp * 1000), + }, + committer: { + name: commit.committer.name, + timestamp: new Date(commit.committer.timestamp * 1000), + }, + message: commit.message, + }; + }); + } + + /** + * Checks out the vault repository to specific commit ID or special tags + * This changes the working directory and updates the HEAD reference + */ + @ready(new vaultsErrors.ErrorVaultNotRunning()) + public async version(ref: string | VaultRef = tagLast): Promise { + if (!vaultsUtils.validateRef(ref)) { + throw new vaultsErrors.ErrorVaultReferenceInvalid(); + } + if (ref === vaultsUtils.tagLast) { + ref = vaultsUtils.canonicalBranch; + } + try { + await git.checkout({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + ref, + force: true, + }); + } catch (e) { + if ( + e instanceof git.Errors.NotFoundError || + e instanceof git.Errors.CommitNotFetchedError + ) { + throw new vaultsErrors.ErrorVaultReferenceMissing(); + } + throw e; + } + } + + @ready(new vaultsErrors.ErrorVaultNotRunning()) + public async readF(f: (fs: FileSystemReadable) => Promise): Promise { + return withF([this.readLock], async () => { + return await f(this.efsVault); + }); + } + + @ready(new vaultsErrors.ErrorVaultNotRunning()) + public readG( + g: (fs: FileSystemReadable) => AsyncGenerator, + ): AsyncGenerator { + const efsVault = this.efsVault; + return withG([this.readLock], async function* () { + return yield* g(efsVault); + }); + } + + @ready(new vaultsErrors.ErrorVaultNotRunning()) + public async writeF( + f: (fs: FileSystemWritable) => Promise, + ): Promise { + // This should really be an internal property + // get whether this is remote, and the remote address + // if it is, we consider this repo an "attached repo" + // this vault is a "mirrored" vault + if ( + (await this.db.get( + this.vaultMetadataDbDomain, + VaultInternal.remoteKey, + )) != null + ) { + // Mirrored vaults are immutable + throw new vaultsErrors.ErrorVaultRemoteDefined(); + } + return withF([this.writeLock], async () => { + await this.db.put( + this.vaultMetadataDbDomain, + VaultInternal.dirtyKey, + true, + ); + try { + await f(this.efsVault); + // After doing mutation we need to commit the new history + await this.createCommit(); + } catch (e) { + // Error implies dirty state + await this.cleanWorkingDirectory(); + throw e; + } + await this.db.put( + this.vaultMetadataDbDomain, + VaultInternal.dirtyKey, + false, + ); + }); + } + + @ready(new vaultsErrors.ErrorVaultNotRunning()) + public writeG( + g: (fs: FileSystemWritable) => AsyncGenerator, + ): AsyncGenerator { + const efsVault = this.efsVault; + const db = this.db; + const vaultDbDomain = this.vaultMetadataDbDomain; + const createCommit = () => this.createCommit(); + const cleanWorkingDirectory = () => this.cleanWorkingDirectory(); + return withG([this.writeLock], async function* () { + if ((await db.get(vaultDbDomain, VaultInternal.remoteKey)) != null) { + // Mirrored vaults are immutable + throw new vaultsErrors.ErrorVaultRemoteDefined(); + } + await db.put(vaultDbDomain, VaultInternal.dirtyKey, true); + + let result; + // Do what you need to do here, create the commit + try { + result = yield* g(efsVault); + // At the end of the generator + // you need to do this + // but just before + // you need to finish it up + // After doing mutation we need to commit the new history + await createCommit(); + } catch (e) { + // Error implies dirty state + await cleanWorkingDirectory(); + throw e; + } + await db.put(vaultDbDomain, VaultInternal.dirtyKey, false); + return result; + }); + } + + @ready(new vaultsErrors.ErrorVaultNotRunning()) + public async pullVault({ + nodeConnectionManager, + pullNodeId, + pullVaultNameOrId, + }: { + nodeConnectionManager: NodeConnectionManager; + pullNodeId?: NodeId; + pullVaultNameOrId?: VaultId | VaultName; + }) { + // This error flag will contain the error returned by the cloning grpc stream + let error; + // Keeps track of whether the metadata needs changing to avoid unnecessary db ops + // 0 = no change, 1 = change with vault Id, 2 = change with vault name + let metaChange = 0; + const remoteInfo = await this.db.get( + this.vaultMetadataDbDomain, + VaultInternal.remoteKey, + ); + if (remoteInfo == null) throw new vaultsErrors.ErrorVaultRemoteUndefined(); + + if (pullNodeId == null) { + pullNodeId = nodesUtils.decodeNodeId(remoteInfo.remoteNode)!; + } else { + metaChange = 1; + remoteInfo.remoteNode = nodesUtils.encodeNodeId(pullNodeId); + } + if (pullVaultNameOrId == null) { + pullVaultNameOrId = vaultsUtils.decodeVaultId(remoteInfo.remoteVault!)!; + } else { + metaChange = 1; + if (typeof pullVaultNameOrId === 'string') { + metaChange = 2; + } else { + remoteInfo.remoteVault = vaultsUtils.encodeVaultId(pullVaultNameOrId); + } + } + this.logger.info( + `Pulling Vault ${vaultsUtils.encodeVaultId( this.vaultId, - )}'`, + )} from Node ${pullNodeId}`, ); - const release = await this.lock.acquire(); + let remoteVaultId: VaultId; try { - await this.efsRoot.writeFile( - path.join(makeVaultIdPretty(this.vaultId), '.git', 'workingDirectory'), - this.workingDir, + remoteVaultId = await nodeConnectionManager.withConnF( + pullNodeId!, + async (connection) => { + const client = connection.getClient(); + const [request, , remoteVaultId] = await this.request( + client, + pullVaultNameOrId!, + 'pull', + ); + await withF([this.writeLock], async () => { + await git.pull({ + fs: this.efs, + http: { request }, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + url: `http://`, + ref: 'HEAD', + singleBranch: true, + author: { + name: nodesUtils.encodeNodeId(pullNodeId!), + }, + }); + }); + return remoteVaultId; + }, + ); + } catch (err) { + // If the error flag set and we have the generalised SmartHttpError from + // isomorphic git then we need to throw the polykey error + if (err instanceof git.Errors.SmartHttpError && error) { + throw error; + } else if (err instanceof git.Errors.MergeNotSupportedError) { + throw new vaultsErrors.ErrorVaultsMergeConflict(); + } + throw err; + } + if (metaChange !== 0) { + if (metaChange === 2) { + remoteInfo.remoteVault = vaultsUtils.encodeVaultId(remoteVaultId); + } + await this.db.put( + this.vaultMetadataDbDomain, + VaultInternal.remoteKey, + remoteInfo, ); - } finally { - release(); } this.logger.info( - `Destroyed ${this.constructor.name} at '${makeVaultIdPretty( + `Pulled Vault ${vaultsUtils.encodeVaultId( this.vaultId, - )}'`, + )} from Node ${pullNodeId}`, ); } - @ready(new vaultsErrors.ErrorVaultDestroyed()) - public async commit( - f: (fs: FileSystemWritable) => Promise, - ): Promise { - const release = await this.lock.acquire(); - const message: string[] = []; + /** + * Setup the vault metadata + */ + protected async setupMeta({ + vaultName, + }: { + vaultName?: VaultName; + }): Promise { + // Setup the vault metadata + // and you need to make certain preparations + // the meta gets created first + // if the SoT is the database + // are we supposed to check this? + + // If this is not existing + // setup default vaults db + if ( + (await this.db.get( + this.vaultMetadataDbDomain, + VaultInternal.dirtyKey, + )) == null + ) { + await this.db.put( + this.vaultMetadataDbDomain, + VaultInternal.dirtyKey, + false, + ); + } + + // Set up vault Name + if ( + (await this.db.get( + this.vaultMetadataDbDomain, + VaultInternal.nameKey, + )) == null && + vaultName != null + ) { + await this.db.put( + this.vaultMetadataDbDomain, + VaultInternal.nameKey, + vaultName, + ); + } + + // Remote: [NodeId, VaultId] | undefined + // dirty: boolean + // name: string | undefined + } + + protected async setupGit(): Promise { + // Initialization is idempotent + // It works even with an existing git repository + await git.init({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + defaultBranch: vaultsUtils.canonicalBranch, + }); + let commitIdLatest: CommitId | undefined; try { - await git.checkout({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - ref: this.workingDir, + const commits = await git.log({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + ref: vaultsUtils.canonicalBranch, + depth: 1, }); - await f(this.efsVault); - const statusMatrix = await git.statusMatrix({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, + commitIdLatest = commits[0]?.oid as CommitId | undefined; + } catch (e) { + // Initialized repositories do not have any commits + // It complains that `refs/heads/master` file does not exist + if (!(e instanceof git.Errors.NotFoundError)) { + throw e; + } + } + if (commitIdLatest == null) { + // All vault repositories start with an initial commit + commitIdLatest = (await git.commit({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + author: vaultsUtils.commitAuthor(this.keyManager.getNodeId()), + message: 'Initial Commit', + ref: 'HEAD', + })) as CommitId; + // Update master ref + await git.writeRef({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + ref: vaultsUtils.canonicalBranchRef, + value: commitIdLatest, + force: true, }); - for (let file of statusMatrix) { - if (file[1] === file[2] && file[2] === file[3]) { - await git.resetIndex({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - filepath: file[0], - }); - file = ( - await git.statusMatrix({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - filepaths: [file[0]], - }) - ).pop()!; - if (file[1] === file[2] && file[2] === file[3]) continue; - } - if (file[2] !== file[3]) { - let status: 'added' | 'modified' | 'deleted'; - if (file[2] === 0) { - status = 'deleted'; - await git.remove({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - filepath: file[0], - }); - } else { - await git.add({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - filepath: file[0], - }); - if (file[1] === 1) { - status = 'modified'; - } else { - status = 'added'; - } + } else { + // Checking for dirty + if ( + (await this.db.get( + this.vaultMetadataDbDomain, + VaultInternal.dirtyKey, + )) === true + ) { + // Force checkout out to the latest commit + // This ensures that any uncommitted state is dropped + await this.cleanWorkingDirectory(); + // Do global GC operation + await this.garbageCollectGitObjects(); + + // Setting dirty back to false + await this.db.put( + this.vaultMetadataDbDomain, + VaultInternal.dirtyKey, + false, + ); + } + } + return commitIdLatest; + } + + protected async request( + client: GRPCClientAgent, + vaultNameOrId: VaultId | VaultName, + vaultAction: VaultAction, + ): Promise { + const requestMessage = new vaultsPB.InfoRequest(); + const vaultMessage = new vaultsPB.Vault(); + requestMessage.setAction(vaultAction); + if (typeof vaultNameOrId === 'string') { + vaultMessage.setNameOrId(vaultNameOrId); + } else { + // To have consistency between GET and POST, send the user + // readable form of the vault Id + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultNameOrId)); + } + requestMessage.setVault(vaultMessage); + const response = client.vaultsGitInfoGet(requestMessage); + let vaultName, remoteVaultId; + response.stream.on('metadata', async (meta) => { + // Receive the Id of the remote vault + vaultName = meta.get('vaultName').pop(); + if (vaultName) vaultName = vaultName.toString(); + const vId = meta.get('vaultId').pop(); + if (vId) remoteVaultId = validationUtils.parseVaultId(vId.toString()); + }); + // Collect the response buffers from the GET request + const infoResponse: Uint8Array[] = []; + for await (const resp of response) { + infoResponse.push(resp.getChunk_asU8()); + } + const metadata = new grpc.Metadata(); + metadata.set('vaultAction', vaultAction); + if (typeof vaultNameOrId === 'string') { + metadata.set('vaultNameOrId', vaultNameOrId); + } else { + // Metadata only accepts the user readable form of the vault Id + // as the string form has illegal characters + metadata.set('vaultNameOrId', vaultsUtils.encodeVaultId(vaultNameOrId)); + } + return [ + async function ({ + url, + method = 'GET', + headers = {}, + body = [Buffer.from('')], + }: { + url: string; + method: string; + headers: POJO; + body: Buffer[]; + }) { + if (method === 'GET') { + // Send back the GET request info response + return { + url: url, + method: method, + body: infoResponse, + headers: headers, + statusCode: 200, + statusMessage: 'OK', + }; + } else if (method === 'POST') { + const responseBuffers: Array = []; + const stream = client.vaultsGitPackGet(metadata); + const chunk = new vaultsPB.PackChunk(); + // Body is usually an async generator but in the cases we are using, + // only the first value is used + chunk.setChunk(body[0]); + // Tell the server what commit we need + await stream.write(chunk); + let packResponse = (await stream.read()).value; + while (packResponse != null) { + responseBuffers.push(packResponse.getChunk_asU8()); + packResponse = (await stream.read()).value; } - message.push(file[0] + ' ' + status); + return { + url: url, + method: method, + body: responseBuffers, + headers: headers, + statusCode: 200, + statusMessage: 'OK', + }; + } else { + never(); } - } - if (message.length !== 0) { - this.workingDir = await git.commit({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - author: { - name: nodesUtils.encodeNodeId(this.keyManager.getNodeId()), - }, - message: message.toString(), + }, + vaultName, + remoteVaultId, + ]; + } + + /** + * Creates a commit while moving the canonicalBranch reference + */ + protected async createCommit() { + // Checking if commit is appending or branching + const headRef = await git.resolveRef({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + ref: 'HEAD', + }); + const masterRef = await git.resolveRef({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + ref: vaultsUtils.canonicalBranchRef, + }); + const nodeIdEncoded = nodesUtils.encodeNodeId(this.keyManager.getNodeId()); + // Staging changes and creating commit message + const message: string[] = []; + // Get the status of each file in the working directory + // https://isomorphic-git.org/docs/en/statusMatrix + const statusMatrix = await git.statusMatrix({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + }); + for (let [ + filePath, + HEADStatus, + workingDirStatus, + stageStatus, + ] of statusMatrix) { + // Reset the index of files that are marked as 'unmodified' + // The working directory, HEAD and staging area are all the same + // https://github.com/MatrixAI/js-polykey/issues/260 + if (HEADStatus === workingDirStatus && workingDirStatus === stageStatus) { + await git.resetIndex({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + filepath: filePath, }); + // Check if the file is still 'unmodified' and leave + // it out of the commit if it is + [filePath, HEADStatus, workingDirStatus, stageStatus] = ( + await git.statusMatrix({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + filepaths: [filePath], + }) + ).pop()!; + if ( + HEADStatus === workingDirStatus && + workingDirStatus === stageStatus + ) { + continue; + } } - } finally { - const statusMatrix = await git.statusMatrix({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - }); - for await (const file of statusMatrix) { - if (file[2] === 0) { + // We want files in the working directory that are both different + // from the head commit and the staged changes + // If working directory and stage status are not equal then filepath has un-staged + // changes in the working directory relative to both the HEAD and staging + // area that need to be added + // https://isomorphic-git.org/docs/en/statusMatrix + if (workingDirStatus !== stageStatus) { + let status: 'added' | 'modified' | 'deleted'; + // If the working directory status is 0 then the file has + // been deleted + if (workingDirStatus === 0) { + status = 'deleted'; await git.remove({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - filepath: file[0], + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + filepath: filePath, }); } else { await git.add({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - filepath: file[0], + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + filepath: filePath, }); + // Check whether the file already exists inside the HEAD + // commit and if it does then it is unmodified + if (HEADStatus === 1) { + status = 'modified'; + } else { + status = 'added'; + } } + message.push(`${filePath} ${status}`); } - await git.checkout({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - ref: this.workingDir, - }); - release(); } - } - - @ready(new vaultsErrors.ErrorVaultDestroyed()) - public async access( - f: (fs: FileSystemReadable) => Promise, - ): Promise { - const release = await this.lock.acquire(); - try { - return await f(this.efsVault); - } finally { - release(); + // Skip commit if no changes were made + if (message.length !== 0) { + // Creating commit + const commitRef = await git.commit({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + author: { + name: nodeIdEncoded, + }, + message: message.toString(), + ref: 'HEAD', + }); + // Updating branch pointer + await git.writeRef({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + ref: vaultsUtils.canonicalBranchRef, + value: commitRef, + force: true, + }); + // We clean old history if a commit was made on previous version + if (headRef !== masterRef) { + // Delete old commits following chain from masterRef -> headRef + let currentRef = masterRef; + while (currentRef !== headRef) { + // Read commit info + const commit = await git.readCommit({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + oid: currentRef, + }); + // Delete commit + await vaultsUtils.deleteObject( + this.efs, + this.vaultGitDir, + commit.oid, + ); + // Getting new ref + const nextRef = commit.commit.parent.pop(); + if (nextRef == null) break; + currentRef = nextRef; + } + } } } - @ready(new vaultsErrors.ErrorVaultDestroyed()) - public async log(depth?: number, commit?: string): Promise> { - const commit_ = commit?.toLowerCase() === lastTag ? 'HEAD' : commit; - const log = await git.log({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - depth: depth, - ref: commit_, + /** + * Cleans the git working directory by checking out the canonicalBranch + */ + protected async cleanWorkingDirectory() { + // Check the status matrix for any un-staged file changes + // which are considered dirty commits + const statusMatrix = await git.statusMatrix({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, }); - return log.map((readCommit) => { - return { - oid: readCommit.oid, - committer: readCommit.commit.committer.name, - timeStamp: readCommit.commit.committer.timestamp * 1000, // Needs to be in milliseconds for Date. - message: readCommit.commit.message, - }; + for await (const [filePath, , workingDirStatus] of statusMatrix) { + // For all files stage all changes, this is needed + // so that we can check out all untracked files as well + if (workingDirStatus === 0) { + await git.remove({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + filepath: filePath, + }); + } else { + await git.add({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + filepath: filePath, + }); + } + } + // Remove the staged dirty commits by checking out + await git.checkout({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + ref: vaultsUtils.canonicalBranchRef, + force: true, }); } - @ready(new vaultsErrors.ErrorVaultDestroyed()) - public async version(commit: string): Promise { - // Checking for special tags. - const commit_ = commit.toLowerCase() === lastTag ? 'HEAD' : commit; - // TODO: add a tag for the start of the histoy so we can use that as the operator. - - try { - await git.checkout({ - fs: this.efsRoot, - dir: this.baseDir, - gitdir: this.gitDir, - ref: commit_, - noUpdateHead: true, + /** + * Deletes any git objects that can't be reached from the canonicalBranch + */ + protected async garbageCollectGitObjects() { + // To garbage collect the git objects, + // we need to walk all objects connected to the master branch + // and delete the object files that are not touched by this walk + const touchedOids = {}; + const masterRef = await git.resolveRef({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + ref: vaultsUtils.canonicalBranch, + }); + const queuedOids: string[] = [masterRef]; + while (queuedOids.length > 0) { + const currentOid = queuedOids.shift()!; + if (touchedOids[currentOid] === null) continue; + const result = await git.readObject({ + fs: this.efs, + dir: this.vaultDataDir, + gitdir: this.vaultGitDir, + oid: currentOid, }); - this.workingDir = commit_; - } catch (err) { - if (err.code === 'NotFoundError') { - throw new vaultsErrors.ErrorVaultCommitUndefined(); + touchedOids[result.oid] = result.type; + if (result.format !== 'parsed') continue; + switch (result.type) { + case 'commit': + { + const object = result.object; + queuedOids.push(...object.parent); + queuedOids.push(object.tree); + } + break; + case 'tree': + { + const object = result.object; + for (const item of object) { + touchedOids[item.oid] = item.type; + } + } + break; + default: { + never(); + } + } + } + // Walking all objects + const objectPath = path.join(this.vaultGitDir, 'objects'); + const buckets = (await this.efs.readdir(objectPath)).filter((item) => { + return item !== 'info' && item !== 'pack'; + }); + for (const bucket of buckets) { + const bucketPath = path.join(objectPath, bucket.toString()); + const oids = await this.efs.readdir(bucketPath); + for (const shortOid of oids) { + const oidPath = path.join(bucketPath, shortOid.toString()); + const oid = bucket.toString() + shortOid.toString(); + if (touchedOids[oid] === undefined) { + // Removing unused objects + await this.efs.unlink(oidPath); + } } - throw err; } } - - @ready(new vaultsErrors.ErrorVaultDestroyed()) - public async readWorkingDirectory(): Promise { - const workingDir = ( - await git.log({ - fs: this.efsRoot, - dir: path.join(vaultsUtils.makeVaultIdPretty(this.vaultId), 'contents'), - gitdir: path.join(vaultsUtils.makeVaultIdPretty(this.vaultId), '.git'), - depth: 1, - }) - ).pop()!; - await this.efsRoot.writeFile( - path.join( - vaultsUtils.makeVaultIdPretty(this.vaultId), - '.git', - 'workingDir', - ), - workingDir.oid, - ); - } - - @ready(new vaultsErrors.ErrorVaultDestroyed()) - public async applySchema() {} } export default VaultInternal; diff --git a/src/vaults/VaultManager.ts b/src/vaults/VaultManager.ts index 613603117..ed65478f5 100644 --- a/src/vaults/VaultManager.ts +++ b/src/vaults/VaultManager.ts @@ -1,46 +1,63 @@ -import type { DB, DBLevel } from '@matrixai/db'; +import type { DB, DBDomain, DBLevel } from '@matrixai/db'; import type { VaultId, VaultName, - VaultMap, - VaultKey, - VaultList, - Vault, + VaultActions, + VaultIdString, + VaultIdEncoded, } from './types'; +import type { Vault } from './Vault'; import type { FileSystem } from '../types'; -import type { NodeId } from '../nodes/types'; import type { PolykeyWorkerManagerInterface } from '../workers/types'; +import type { NodeId } from '../nodes/types'; +import type KeyManager from '../keys/KeyManager'; +import type NodeConnectionManager from '../nodes/NodeConnectionManager'; +import type GestaltGraph from '../gestalts/GestaltGraph'; +import type NotificationsManager from '../notifications/NotificationsManager'; +import type ACL from '../acl/ACL'; -import type { MutexInterface } from 'async-mutex'; -import type { POJO } from 'encryptedfs'; -import type { KeyManager } from '../keys'; -import type { GestaltGraph } from '../gestalts'; -import type { ACL } from '../acl'; -import type { NotificationsManager } from '../notifications'; -import type { NodeConnection, NodeConnectionManager } from '../nodes'; -import type { GRPCClientAgent } from '../agent'; +import type { RemoteInfo } from './VaultInternal'; +import type { ResourceAcquire } from '../utils/context'; +import type { VaultAction } from './types'; import path from 'path'; -import Logger from '@matrixai/logger'; -import { Mutex } from 'async-mutex'; -import git from 'isomorphic-git'; import { PassThrough } from 'readable-stream'; -import * as grpc from '@grpc/grpc-js'; -import { EncryptedFS } from 'encryptedfs'; +import { EncryptedFS, errors as encryptedFsErrors } from 'encryptedfs'; +import Logger from '@matrixai/logger'; import { CreateDestroyStartStop, ready, } from '@matrixai/async-init/dist/CreateDestroyStartStop'; -import { utils as idUtils } from '@matrixai/id'; -import * as vaultsUtils from './utils'; -import * as vaultsErrors from './errors'; +import { IdInternal } from '@matrixai/id'; import VaultInternal from './VaultInternal'; -import { makeVaultId } from './utils'; -import * as vaultsPB from '../proto/js/polykey/v1/vaults/vaults_pb'; -import * as utils from '../utils'; +import * as vaultsUtils from '../vaults/utils'; +import * as vaultsErrors from '../vaults/errors'; import * as gitUtils from '../git/utils'; import * as gitErrors from '../git/errors'; -import * as gestaltErrors from '../gestalts/errors'; -import { utils as nodesUtils } from '../nodes'; +import * as nodesUtils from '../nodes/utils'; +import * as keysUtils from '../keys/utils'; +import config from '../config'; +import { mkdirExists } from '../utils/utils'; +import { RWLock } from '../utils/locks'; +import { withF, withG } from '../utils/context'; +import * as utilsPB from '../proto/js/polykey/v1/utils/utils_pb'; + +/** + * Object map pattern for each vault + */ +type VaultMap = Map< + VaultIdString, + { + vault?: VaultInternal; + lock: RWLock; + } +>; + +type VaultList = Map; +type VaultMetadata = { + dirty: boolean; + vaultName: VaultName; + remoteInfo?: RemoteInfo; +}; interface VaultManager extends CreateDestroyStartStop {} @CreateDestroyStartStop( @@ -48,150 +65,187 @@ interface VaultManager extends CreateDestroyStartStop {} new vaultsErrors.ErrorVaultManagerDestroyed(), ) class VaultManager { - public readonly vaultsPath: string; - - protected fs: FileSystem; - protected nodeConnectionManager: NodeConnectionManager; - protected gestaltGraph: GestaltGraph; - protected acl: ACL; - protected notificationsManager: NotificationsManager; - protected efs: EncryptedFS; - protected db: DB; - protected logger: Logger; - protected vaultsKey: VaultKey; - protected vaultsMap: VaultMap; - protected vaultsDbDomain: string; - protected vaultsNamesDbDomain: Array; - protected vaultsDb: DBLevel; - protected vaultsNamesDb: DBLevel; - protected keyManager: KeyManager; - static async createVaultManager({ vaultsPath, + db, + acl, keyManager, nodeConnectionManager, gestaltGraph, - acl, - db, - vaultsKey, + notificationsManager, + keyBits = 256, fs = require('fs'), logger = new Logger(this.name), fresh = false, }: { vaultsPath: string; + db: DB; + acl: ACL; keyManager: KeyManager; nodeConnectionManager: NodeConnectionManager; gestaltGraph: GestaltGraph; - acl: ACL; - db: DB; - vaultsKey: VaultKey; + notificationsManager: NotificationsManager; + keyBits?: 128 | 192 | 256; fs?: FileSystem; logger?: Logger; fresh?: boolean; }) { logger.info(`Creating ${this.name}`); + logger.info(`Setting vaults path to ${vaultsPath}`); const vaultManager = new VaultManager({ vaultsPath, + db, + acl, keyManager, nodeConnectionManager, gestaltGraph, - acl, - db, - vaultsKey, + notificationsManager, + keyBits, fs, logger, }); - logger.info(`Created ${this.name}`); await vaultManager.start({ fresh }); + logger.info(`Created ${this.name}`); return vaultManager; } + public readonly vaultsPath: string; + public readonly efsPath: string; + public readonly keyBits: 128 | 192 | 256; + + protected fs: FileSystem; + protected logger: Logger; + protected db: DB; + protected acl: ACL; + protected keyManager: KeyManager; + protected nodeConnectionManager: NodeConnectionManager; + protected gestaltGraph: GestaltGraph; + protected notificationsManager: NotificationsManager; + protected vaultsDbDomain: DBDomain = [this.constructor.name]; + protected vaultsDb: DBLevel; + protected vaultsNamesDbDomain: DBDomain = [...this.vaultsDbDomain, 'names']; + protected vaultsNamesDb: DBLevel; + protected vaultsNamesLock: RWLock = new RWLock(); + // VaultId -> VaultMetadata + protected vaultMap: VaultMap = new Map(); + protected vaultKey: Buffer; + protected efs: EncryptedFS; + constructor({ vaultsPath, + db, + acl, keyManager, nodeConnectionManager, gestaltGraph, - acl, - db, - vaultsKey, + notificationsManager, + keyBits, fs, logger, }: { vaultsPath: string; + db: DB; + acl: ACL; keyManager: KeyManager; nodeConnectionManager: NodeConnectionManager; gestaltGraph: GestaltGraph; - acl: ACL; - db: DB; - vaultsKey: VaultKey; + notificationsManager: NotificationsManager; + keyBits: 128 | 192 | 256; fs: FileSystem; logger: Logger; }) { + this.logger = logger; this.vaultsPath = vaultsPath; + this.efsPath = path.join(this.vaultsPath, config.defaults.efsBase); + this.db = db; + this.acl = acl; this.keyManager = keyManager; this.nodeConnectionManager = nodeConnectionManager; this.gestaltGraph = gestaltGraph; - this.acl = acl; - this.db = db; - this.vaultsMap = new Map(); + this.notificationsManager = notificationsManager; + this.keyBits = keyBits; this.fs = fs; - this.vaultsKey = vaultsKey; - this.logger = logger; } public async start({ fresh = false, - }: { fresh?: boolean } = {}): Promise { + }: { + fresh?: boolean; + } = {}): Promise { try { this.logger.info(`Starting ${this.constructor.name}`); - this.vaultsDbDomain = 'VaultManager'; - this.vaultsDb = await this.db.level(this.vaultsDbDomain); - this.vaultsNamesDbDomain = [this.vaultsDbDomain, 'names']; - this.vaultsNamesDb = await this.db.level( + const vaultsDb = await this.db.level(this.vaultsDbDomain[0]); + const vaultsNamesDb = await this.db.level( this.vaultsNamesDbDomain[1], - this.vaultsDb, + vaultsDb, ); if (fresh) { - await this.vaultsDb.clear(); + await vaultsDb.clear(); await this.fs.promises.rm(this.vaultsPath, { force: true, recursive: true, }); - this.logger.info(`Removing vaults directory at '${this.vaultsPath}'`); } - await utils.mkdirExists(this.fs, this.vaultsPath); - this.efs = await EncryptedFS.createEncryptedFS({ - dbPath: this.vaultsPath, - dbKey: this.vaultsKey, - logger: this.logger, - }); - await this.efs.start(); + await mkdirExists(this.fs, this.vaultsPath); + const vaultKey = await this.setupKey(this.keyBits); + let efs; + try { + efs = await EncryptedFS.createEncryptedFS({ + dbPath: this.efsPath, + dbKey: vaultKey, + logger: this.logger.getChild('EncryptedFileSystem'), + }); + } catch (e) { + if (e instanceof encryptedFsErrors.ErrorEncryptedFSKey) { + throw new vaultsErrors.ErrorVaultManagerKey(); + } + throw new vaultsErrors.ErrorVaultManagerEFS(e.message, { + errno: e.errno, + syscall: e.syscall, + code: e.code, + path: e.path, + }); + } + this.vaultsDb = vaultsDb; + this.vaultsNamesDb = vaultsNamesDb; + this.vaultKey = vaultKey; + this.efs = efs; this.logger.info(`Started ${this.constructor.name}`); } catch (e) { this.logger.warn(`Failed Starting ${this.constructor.name}`); - await this.efs.stop(); + await this.efs?.stop(); throw e; } } public async stop(): Promise { this.logger.info(`Stopping ${this.constructor.name}`); - // Destroying managed vaults. - for (const vault of this.vaultsMap.values()) { - await vault.vault?.destroy(); + + // Iterate over vaults in memory and destroy them, ensuring that + // the working directory commit state is saved + + for (const [vaultIdString, vaultAndLock] of this.vaultMap) { + const vaultId = IdInternal.fromString(vaultIdString); + await withF([this.getWriteLock(vaultId)], async () => { + await vaultAndLock.vault?.stop(); + }); + this.vaultMap.delete(vaultIdString); } + await this.efs.stop(); + this.vaultMap = new Map(); this.logger.info(`Stopped ${this.constructor.name}`); } public async destroy(): Promise { this.logger.info(`Destroying ${this.constructor.name}`); - // We want to remove any state for the vault manager. - // this includes clearing out all DB domains and destroying the EFS. - const vaultsDb = await this.db.level(this.vaultsDbDomain); - await vaultsDb.clear(); await this.efs.destroy(); - this.logger.info(`Removing vaults directory at '${this.vaultsPath}'`); + // If the DB was stopped, the existing sublevel `this.vaultsDb` will not be valid + // Therefore we recreate the sublevel here + const vaultsDb = await this.db.level(this.vaultsDbDomain[0]); + // Clearing all vaults db data + await vaultsDb.clear(); + // Is it necessary to remove the vaults domain? await this.fs.promises.rm(this.vaultsPath, { force: true, recursive: true, @@ -207,335 +261,364 @@ class VaultManager { this.efs.unsetWorkerManager(); } - public async transaction( - f: (vaultManager: VaultManager) => Promise, - lock: MutexInterface, - ): Promise { - const release = await lock.acquire(); - try { - return await f(this); - } finally { - release(); - } + protected getLock(vaultId: VaultId): RWLock { + const vaultIdString = vaultId.toString() as VaultIdString; + const vaultAndLock = this.vaultMap.get(vaultIdString); + if (vaultAndLock != null) return vaultAndLock.lock; + const lock = new RWLock(); + this.vaultMap.set(vaultIdString, { lock }); + return lock; } - protected async _transaction( - f: () => Promise, - vaults: Array = [], - ): Promise { - const releases: Array = []; - for (const vault of vaults) { - const lock = this.vaultsMap.get(idUtils.toString(vault)); - if (lock) releases.push(await lock.lock.acquire()); - } - try { - return await f(); - } finally { - // Release them in the opposite order - releases.reverse(); - for (const r of releases) { - r(); - } - } + protected getReadLock(vaultId: VaultId): ResourceAcquire { + const lock = this.getLock(vaultId); + return async () => { + const release = await lock.acquireRead(); + return [async () => release()]; + }; } - @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) - public async getVaultName(vaultId: VaultId): Promise { - const vaultMeta = await this.db.get( - this.vaultsNamesDbDomain, - idUtils.toBuffer(vaultId), - ); - if (vaultMeta == null) throw new vaultsErrors.ErrorVaultUndefined(); - return vaultMeta.name; + protected getWriteLock(vaultId: VaultId): ResourceAcquire { + const lock = this.getLock(vaultId); + return async () => { + const release = await lock.acquireWrite(); + return [async () => release()]; + }; } + /** + * Constructs a new vault instance with a given name and + * stores it in memory + */ + + // this should actually + @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) - public async createVault(vaultName: VaultName): Promise { + public async createVault(vaultName: VaultName): Promise { + // Adding vault to name map const vaultId = await this.generateVaultId(); - const lock = new Mutex(); - this.vaultsMap.set(idUtils.toString(vaultId), { lock }); - return await this._transaction(async () => { - await this.db.put(this.vaultsNamesDbDomain, idUtils.toBuffer(vaultId), { - name: vaultName, - }); - const vault = await VaultInternal.create({ + await this.vaultsNamesLock.withWrite(async () => { + const vaultIdBuffer = await this.db.get( + this.vaultsNamesDbDomain, + vaultName, + true, + ); + // Check if the vault name already exists; + if (vaultIdBuffer != null) { + throw new vaultsErrors.ErrorVaultsVaultDefined(); + } + await this.db.put( + this.vaultsNamesDbDomain, + vaultName, + vaultId.toBuffer(), + true, + ); + }); + const lock = new RWLock(); + const vaultIdString = vaultId.toString() as VaultIdString; + this.vaultMap.set(vaultIdString, { lock }); + return await withF([this.getWriteLock(vaultId)], async () => { + // Creating vault + const vault = await VaultInternal.createVaultInternal({ vaultId, + vaultName, keyManager: this.keyManager, efs: this.efs, logger: this.logger.getChild(VaultInternal.name), + db: this.db, + vaultsDb: this.vaultsDb, + vaultsDbDomain: this.vaultsDbDomain, fresh: true, }); - this.vaultsMap.set(idUtils.toString(vaultId), { lock, vault }); - return vault; - }, [vaultId]); + // Adding vault to object map + this.vaultMap.set(vaultIdString, { lock, vault }); + return vault.vaultId; + }); } + /** + * Retreives the vault metadata using the vault Id + * and parses it to return the associated vault name + */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) - public async destroyVault(vaultId: VaultId) { - await this._transaction(async () => { - const vaultName = await this.getVaultName(vaultId); - if (!vaultName) return; - await this.db.del(this.vaultsNamesDbDomain, idUtils.toBuffer(vaultId)); - this.vaultsMap.delete(idUtils.toString(vaultId)); - await this.efs.rmdir(vaultsUtils.makeVaultIdPretty(vaultId), { - recursive: true, - }); - }, [vaultId]); + public async getVaultMeta( + vaultId: VaultId, + ): Promise { + // First check if the metadata exists + const vaultIdEncoded = vaultsUtils.encodeVaultId(vaultId); + const vaultDbDomain = [...this.vaultsDbDomain, vaultIdEncoded]; + const vaultDb = await this.db.level(vaultIdEncoded, this.vaultsDb); + // Return if metadata has no data + if ((await this.db.count(vaultDb)) === 0) return; + // Obtain the metadata; + const dirty = (await this.db.get( + vaultDbDomain, + VaultInternal.dirtyKey, + ))!; + const vaultName = (await this.db.get( + vaultDbDomain, + VaultInternal.nameKey, + ))!; + const remoteInfo = await this.db.get( + vaultDbDomain, + VaultInternal.remoteKey, + ); + return { + dirty, + vaultName, + remoteInfo, + }; } + /** + * Removes the metadata and EFS state of a vault using a + * given vault Id + */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) - public async openVault(vaultId: VaultId): Promise { - const vaultName = await this.getVaultName(vaultId); - if (!vaultName) throw new vaultsErrors.ErrorVaultUndefined(); - return await this.getVault(vaultId); + public async destroyVault(vaultId: VaultId) { + const vaultMeta = await this.getVaultMeta(vaultId); + if (vaultMeta == null) return; + const vaultName = vaultMeta.vaultName; + this.logger.info(`Destroying Vault ${vaultsUtils.encodeVaultId(vaultId)}`); + const vaultIdString = vaultId.toString() as VaultIdString; + await withF([this.getWriteLock(vaultId)], async () => { + const vault = await this.getVault(vaultId); + // Destroying vault state and metadata + await vault.stop(); + await vault.destroy(); + // Removing from map + this.vaultMap.delete(vaultIdString); + // Removing name->id mapping + await this.vaultsNamesLock.withWrite(async () => { + await this.db.del(this.vaultsNamesDbDomain, vaultName); + }); + }); + this.logger.info(`Destroyed Vault ${vaultsUtils.encodeVaultId(vaultId)}`); } + /** + * Removes vault from the vault map + */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async closeVault(vaultId: VaultId) { - const vaultName = await this.getVaultName(vaultId); - if (!vaultName) throw new vaultsErrors.ErrorVaultUndefined(); - const vault = await this.getVault(vaultId); - await vault.destroy(); - this.vaultsMap.delete(idUtils.toString(vaultId)); + if ((await this.getVaultName(vaultId)) == null) { + throw new vaultsErrors.ErrorVaultsVaultUndefined(); + } + const vaultIdString = vaultId.toString() as VaultIdString; + await withF([this.getWriteLock(vaultId)], async () => { + const vault = await this.getVault(vaultId); + await vault.stop(); + this.vaultMap.delete(vaultIdString); + }); } + /** + * Lists the vault name and associated vault Id of all + * the vaults stored + */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async listVaults(): Promise { const vaults: VaultList = new Map(); - for await (const o of this.vaultsNamesDb.createReadStream({})) { - const dbMeta = (o as any).value; - const dbId = (o as any).key; - const vaultMeta = await this.db.deserializeDecrypt(dbMeta, false); - vaults.set(vaultMeta.name, makeVaultId(dbId)); + // Stream of vaultName VaultId key value pairs + for await (const vaultNameBuffer of this.vaultsNamesDb.createKeyStream()) { + const vaultName = vaultNameBuffer.toString() as VaultName; + const vaultId = (await this.getVaultId(vaultName))!; + vaults.set(vaultName, vaultId); } return vaults; } + /** + * Changes the vault name metadata of a vault Id + */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async renameVault( vaultId: VaultId, newVaultName: VaultName, ): Promise { - await this._transaction(async () => { - const meta = await this.db.get( - this.vaultsNamesDbDomain, - idUtils.toBuffer(vaultId), - ); - if (!meta) throw new vaultsErrors.ErrorVaultUndefined(); - meta.name = newVaultName; - await this.db.put( + await withF([this.getWriteLock(vaultId)], async () => { + this.logger.info(`Renaming Vault ${vaultsUtils.encodeVaultId(vaultId)}`); + // Checking if new name exists + if (await this.getVaultId(newVaultName)) { + throw new vaultsErrors.ErrorVaultsVaultDefined(); + } + // Checking if vault exists + const vaultMetadata = await this.getVaultMeta(vaultId); + if (vaultMetadata == null) { + throw new vaultsErrors.ErrorVaultsVaultUndefined(); + } + const oldVaultName = vaultMetadata.vaultName; + // Updating metadata with new name; + const vaultDbDomain = [ + ...this.vaultsDbDomain, + vaultsUtils.encodeVaultId(vaultId), + ]; + await this.db.put(vaultDbDomain, VaultInternal.nameKey, newVaultName); + // Updating name->id map + await this.vaultsNamesLock.withWrite(async () => { + await this.db.del(this.vaultsNamesDbDomain, oldVaultName); + await this.db.put( + this.vaultsNamesDbDomain, + newVaultName, + vaultId.toBuffer(), + true, + ); + }); + }); + } + + /** + * Retreives the vault Id associated with a vault name + */ + @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + public async getVaultId(vaultName: VaultName): Promise { + return await this.vaultsNamesLock.withWrite(async () => { + const vaultIdBuffer = await this.db.get( this.vaultsNamesDbDomain, - idUtils.toBuffer(vaultId), - meta, + vaultName, + true, ); - }, [vaultId]); + if (vaultIdBuffer == null) return; + return IdInternal.fromBuffer(vaultIdBuffer); + }); } + /** + * Retreives the vault name associated with a vault Id + */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) - public async getVaultId(vaultName: VaultName): Promise { - for await (const o of this.vaultsNamesDb.createReadStream({})) { - const dbMeta = (o as any).value; - const dbId = (o as any).key; - const vaultMeta = await this.db.deserializeDecrypt(dbMeta, false); - if (vaultName === vaultMeta.name) { - return makeVaultId(dbId); - } + public async getVaultName(vaultId: VaultId): Promise { + const metadata = await this.getVaultMeta(vaultId); + return metadata?.vaultName; + } + + /** + * Returns a dictionary of VaultActions for each node + * @param vaultId + */ + @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + public async getVaultPermission( + vaultId: VaultId, + ): Promise> { + const rawPermissions = await this.acl.getVaultPerm(vaultId); + const permissions: Record = {}; + // Getting the relevant information + for (const nodeId in rawPermissions) { + permissions[nodeId] = rawPermissions[nodeId].vaults[vaultId]; } + return permissions; } + /** + * Sets clone, pull and scan permissions of a vault for a + * gestalt and send a notification to this gestalt + */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async shareVault(vaultId: VaultId, nodeId: NodeId): Promise { - const vaultName = await this.getVaultName(vaultId); - if (!vaultName) throw new vaultsErrors.ErrorVaultUndefined(); - return await this.gestaltGraph._transaction(async () => { - return await this.acl._transaction(async () => { - const gestalt = await this.gestaltGraph.getGestaltByNode(nodeId); - if (gestalt == null) { - throw new gestaltErrors.ErrorGestaltsGraphNodeIdMissing(); - } - const nodes = gestalt.nodes; - for (const node in nodes) { - await this.acl.setNodeAction(nodeId, 'scan'); - await this.acl.setVaultAction( - vaultId, - nodesUtils.decodeNodeId(nodes[node].id)!, - 'pull', - ); - await this.acl.setVaultAction( - vaultId, - nodesUtils.decodeNodeId(nodes[node].id)!, - 'clone', - ); - } - await this.notificationsManager.sendNotification(nodeId, { - type: 'VaultShare', - vaultId: idUtils.toString(vaultId), - vaultName, - actions: { - clone: null, - pull: null, - }, - }); - }); + const vaultMeta = await this.getVaultMeta(vaultId); + if (!vaultMeta) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + // Node Id permissions translated to other nodes in + // a gestalt by other domains + await this.gestaltGraph.setGestaltActionByNode(nodeId, 'scan'); + await this.acl.setVaultAction(vaultId, nodeId, 'pull'); + await this.acl.setVaultAction(vaultId, nodeId, 'clone'); + await this.notificationsManager.sendNotification(nodeId, { + type: 'VaultShare', + vaultId: vaultsUtils.encodeVaultId(vaultId), + vaultName: vaultMeta.vaultName, + actions: { + clone: null, + pull: null, + }, }); } + /** + * Unsets clone, pull and scan permissions of a vault for a + * gestalt + */ + @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + public async unshareVault(vaultId: VaultId, nodeId: NodeId): Promise { + const vaultMeta = await this.getVaultMeta(vaultId); + if (!vaultMeta) throw new vaultsErrors.ErrorVaultsVaultUndefined(); + await this.gestaltGraph.unsetGestaltActionByNode(nodeId, 'scan'); + await this.acl.unsetVaultAction(vaultId, nodeId, 'pull'); + await this.acl.unsetVaultAction(vaultId, nodeId, 'clone'); + } + + /** + * Clones the contents of a remote vault into a new local + * vault instance + */ @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) public async cloneVault( nodeId: NodeId, vaultNameOrId: VaultId | VaultName, - ): Promise { - let vaultName, remoteVaultId; - return await this.nodeConnectionManager.withConnF( - nodeId, - async (connection) => { - const client = connection.getClient(); - const vaultId = await this.generateVaultId(); - const lock = new Mutex(); - this.vaultsMap.set(idUtils.toString(vaultId), { lock }); - return await this._transaction(async () => { - await this.efs.mkdir( - path.join(vaultsUtils.makeVaultIdPretty(vaultId), 'contents'), - { recursive: true }, - ); - const request = async ({ - url, - method = 'GET', - headers = {}, - body = [Buffer.from('')], - }: { - url: string; - method: string; - headers: POJO; - body: Buffer[]; - }) => { - if (method === 'GET') { - const infoResponse = { - async *[Symbol.iterator]() { - const request = new vaultsPB.Vault(); - if (typeof vaultNameOrId === 'string') { - request.setNameOrId(vaultNameOrId); - } else { - request.setNameOrId(idUtils.toString(vaultNameOrId)); - } - const response = client.vaultsGitInfoGet(request); - response.stream.on('metadata', async (meta) => { - vaultName = meta.get('vaultName').pop()!.toString(); - remoteVaultId = makeVaultId( - meta.get('vaultId').pop()!.toString(), - ); - }); - for await (const resp of response) { - yield resp.getChunk_asU8(); - } - }, - }; - return { - url: url, - method: method, - body: infoResponse, - headers: headers, - statusCode: 200, - statusMessage: 'OK', - }; - } else if (method === 'POST') { - const packResponse = { - async *[Symbol.iterator]() { - const responseBuffers: Array = []; - const meta = new grpc.Metadata(); - if (typeof vaultNameOrId === 'string') { - meta.set('vaultNameOrId', vaultNameOrId); - } else { - meta.set( - 'vaultNameOrId', - vaultsUtils.makeVaultIdPretty(vaultNameOrId), - ); - } - const stream = client.vaultsGitPackGet(meta); - const write = utils.promisify(stream.write).bind(stream); - stream.on('data', (d) => { - responseBuffers.push(d.getChunk_asU8()); - }); - const chunk = new vaultsPB.PackChunk(); - chunk.setChunk(body[0]); - write(chunk); - stream.end(); - yield await new Promise((resolve) => { - stream.once('end', () => { - resolve(Buffer.concat(responseBuffers)); - }); - }); - }, - }; - return { - url: url, - method: method, - body: packResponse, - headers: headers, - statusCode: 200, - statusMessage: 'OK', - }; - } else { - throw new Error('Method not supported'); - } - }; - await git.clone({ - fs: this.efs, - http: { request }, - dir: path.join(vaultsUtils.makeVaultIdPretty(vaultId), 'contents'), - gitdir: path.join(vaultsUtils.makeVaultIdPretty(vaultId), '.git'), - url: 'http://', - singleBranch: true, - }); - await this.efs.writeFile( - path.join( - vaultsUtils.makeVaultIdPretty(vaultId), - '.git', - 'packed-refs', - ), - '# pack-refs with: peeled fully-peeled sorted', - ); - const workingDir = ( - await git.log({ - fs: this.efs, - dir: path.join( - vaultsUtils.makeVaultIdPretty(vaultId), - 'contents', - ), - gitdir: path.join(vaultsUtils.makeVaultIdPretty(vaultId), '.git'), - depth: 1, - }) - ).pop()!; - await this.efs.writeFile( - path.join( - vaultsUtils.makeVaultIdPretty(vaultId), - '.git', - 'workingDir', - ), - workingDir.oid, - ); - const vault = await VaultInternal.create({ - vaultId, - keyManager: this.keyManager, - efs: this.efs, - logger: this.logger.getChild(VaultInternal.name), - }); - this.vaultsMap.set(idUtils.toString(vaultId), { lock, vault }); - await this.db.put( - this.vaultsNamesDbDomain, - idUtils.toBuffer(vaultId), - { - name: vaultName, - defaultPullNode: nodeId, - defaultPullVault: idUtils.toBuffer(remoteVaultId), - }, - ); - return vault; - }, [vaultId]); - }, + ): Promise { + const vaultId = await this.generateVaultId(); + const lock = new RWLock(); + const vaultIdString = vaultId.toString() as VaultIdString; + this.vaultMap.set(vaultIdString, { lock }); + this.logger.info( + `Cloning Vault ${vaultsUtils.encodeVaultId(vaultId)} on Node ${nodeId}`, ); + return await withF([this.getWriteLock(vaultId)], async () => { + const vault = await VaultInternal.cloneVaultInternal({ + targetNodeId: nodeId, + targetVaultNameOrId: vaultNameOrId, + vaultId, + db: this.db, + nodeConnectionManager: this.nodeConnectionManager, + vaultsDb: this.vaultsDb, + vaultsDbDomain: this.vaultsDbDomain, + keyManager: this.keyManager, + efs: this.efs, + logger: this.logger.getChild(VaultInternal.name), + }); + this.vaultMap.set(vaultIdString, { lock, vault }); + const vaultMetadata = (await this.getVaultMeta(vaultId))!; + const baseVaultName = vaultMetadata.vaultName; + // Need to check if the name is taken, 10 attempts + let newVaultName = baseVaultName; + let attempts = 1; + while (true) { + const existingVaultId = await this.db.get( + this.vaultsNamesDbDomain, + newVaultName, + ); + if (existingVaultId == null) break; + newVaultName = `${baseVaultName}-${attempts}`; + if (attempts >= 50) { + throw new vaultsErrors.ErrorVaultsNameConflict( + `Too many copies of ${baseVaultName}`, + ); + } + attempts++; + } + // Set the vaultName -> vaultId mapping + await this.db.put( + this.vaultsNamesDbDomain, + newVaultName, + vaultId.toBuffer(), + true, + ); + // Update vault metadata + await this.db.put( + [...this.vaultsDbDomain, vaultsUtils.encodeVaultId(vaultId)], + VaultInternal.nameKey, + newVaultName, + ); + this.logger.info( + `Cloned Vault ${vaultsUtils.encodeVaultId(vaultId)} on Node ${nodeId}`, + ); + return vault.vaultId; + }); } + /** + * Pulls the contents of a remote vault into an existing vault + * instance + */ public async pullVault({ vaultId, pullNodeId, @@ -544,164 +627,170 @@ class VaultManager { vaultId: VaultId; pullNodeId?: NodeId; pullVaultNameOrId?: VaultId | VaultName; - }): Promise { - throw Error('Not fully implemented.'); - let metaChange = 0; - let vaultMeta, remoteVaultId; - return await this._transaction(async () => { - if (pullNodeId == null || pullVaultNameOrId == null) { - vaultMeta = await this.db.get( - this.vaultsNamesDbDomain, - idUtils.toBuffer(vaultId), + }): Promise { + if ((await this.getVaultName(vaultId)) == null) return; + await withF([this.getWriteLock(vaultId)], async () => { + const vault = await this.getVault(vaultId); + await vault.pullVault({ + nodeConnectionManager: this.nodeConnectionManager, + pullNodeId, + pullVaultNameOrId, + }); + }); + } + + /** + * Handler for receiving http GET requests when being + * cloned or pulled from + */ + @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + public async *handleInfoRequest(vaultId: VaultId): AsyncGenerator { + const efs = this.efs; + const vault = await this.getVault(vaultId); + return yield* withG( + [this.getReadLock(vaultId), vault.readLock], + async function* (): AsyncGenerator { + // Adherence to git protocol + yield Buffer.from( + gitUtils.createGitPacketLine('# service=git-upload-pack\n'), ); - if (!vaultMeta) throw new vaultsErrors.ErrorVaultUnlinked(); - if (pullNodeId == null) { - pullNodeId = vaultMeta.defaultPullNode; - } else { - metaChange = 1; - vaultMeta.defaultPullNode = pullNodeId; + yield Buffer.from('0000'); + // Read the commit state of the vault + const uploadPack = await gitUtils.uploadPack({ + fs: efs, + dir: path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), + gitdir: path.join(vaultsUtils.encodeVaultId(vaultId), '.git'), + advertiseRefs: true, + }); + for (const buffer of uploadPack) { + yield buffer; } - if (pullVaultNameOrId == null) { - pullVaultNameOrId = makeVaultId( - idUtils.fromBuffer(Buffer.from(vaultMeta.defaultPullVault.data)), + }, + ); + } + + /** + * Handler for receiving http POST requests when being + * cloned or pulled from + */ + @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + public async handlePackRequest( + vaultId: VaultId, + body: Buffer, + ): Promise<[PassThrough, PassThrough]> { + const vault = await this.getVault(vaultId); + return await withF( + [this.getReadLock(vaultId), vault.readLock], + async () => { + if (body.toString().slice(4, 8) === 'want') { + // Parse the request to get the wanted git object + const wantedObjectId = body.toString().slice(9, 49); + const packResult = await gitUtils.packObjects({ + fs: this.efs, + dir: path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), + gitdir: path.join(vaultsUtils.encodeVaultId(vaultId), '.git'), + refs: [wantedObjectId], + }); + // Generate a contents and progress stream + const readable = new PassThrough(); + const progressStream = new PassThrough(); + const sideBand = gitUtils.mux( + 'side-band-64', + readable, + packResult.packstream, + progressStream, ); + return [sideBand, progressStream]; } else { - metaChange = 1; - if (typeof pullVaultNameOrId === 'string') { - metaChange = 2; - } else { - vaultMeta.defaultPullVault = idUtils.toBuffer(pullVaultNameOrId); - } + throw new gitErrors.ErrorGitUnimplementedMethod( + `Request of type '${body + .toString() + .slice(4, 8)}' not valid, expected 'want'`, + ); } - } - // TODO: this will need a generator variant of nodeConnectionManager.withConnection() to fix. - // const nodeConnection = await this.nodeConnectionManager.getConnectionToNode( - // pullNodeId!, - // ); - let nodeConnection: NodeConnection; - const client = nodeConnection!.getClient(); - const request = async ({ - url, - method = 'GET', - headers = {}, - body = [Buffer.from('')], - }: { - url: string; - method: string; - headers: POJO; - body: Buffer[]; - }) => { - if (method === 'GET') { - const infoResponse = { - async *[Symbol.iterator]() { - const request = new vaultsPB.Vault(); - if (typeof pullVaultNameOrId === 'string') { - request.setNameOrId(pullVaultNameOrId); - } else { - request.setNameOrId(idUtils.toString(pullVaultNameOrId!)); - } - const response = client.vaultsGitInfoGet(request); - response.stream.on('metadata', async (meta) => { - remoteVaultId = makeVaultId( - meta.get('vaultId').pop()!.toString(), - ); - }); - for await (const resp of response) { - yield resp.getChunk_asU8(); - } - }, - }; - return { - url: url, - method: method, - body: infoResponse, - headers: headers, - statusCode: 200, - statusMessage: 'OK', - }; - } else if (method === 'POST') { - const packResponse = { - async *[Symbol.iterator]() { - const responseBuffers: Array = []; - const meta = new grpc.Metadata(); - if (typeof pullVaultNameOrId === 'string') { - meta.set('vaultNameOrId', pullVaultNameOrId); - } else { - meta.set( - 'vaultNameOrId', - vaultsUtils.makeVaultIdPretty(pullVaultNameOrId), - ); - } - const stream = client.vaultsGitPackGet(meta); - const write = utils.promisify(stream.write).bind(stream); - stream.on('data', (d) => { - responseBuffers.push(d.getChunk_asU8()); - }); - const chunk = new vaultsPB.PackChunk(); - chunk.setChunk(body[0]); - write(chunk); - stream.end(); - yield await new Promise((resolve) => { - stream.once('end', () => { - resolve(Buffer.concat(responseBuffers)); - }); - }); - }, - }; - return { - url: url, - method: method, - body: packResponse, - headers: headers, - statusCode: 200, - statusMessage: 'OK', - }; - } else { - throw new Error('Method not supported'); + }, + ); + } + + /** + * Retrieves all the vaults for a peers node + */ + public async *scanVaults(targetNodeId: NodeId): AsyncGenerator<{ + vaultName: VaultName; + vaultIdEncoded: VaultIdEncoded; + vaultPermissions: VaultAction[]; + }> { + // Create a connection to another node + return yield* this.nodeConnectionManager.withConnG( + targetNodeId, + async function* (connection): AsyncGenerator<{ + vaultName: VaultName; + vaultIdEncoded: VaultIdEncoded; + vaultPermissions: VaultAction[]; + }> { + const client = connection.getClient(); + const genReadable = client.vaultsScan(new utilsPB.EmptyMessage()); + for await (const vault of genReadable) { + const vaultName = vault.getVaultName() as VaultName; + const vaultIdEncoded = vault.getVaultId() as VaultIdEncoded; + const vaultPermissions = + vault.getVaultPermissionsList() as VaultAction[]; + yield { vaultName, vaultIdEncoded, vaultPermissions }; } + }, + ); + } + + /** + * Returns all the shared vaults for a NodeId. + */ + public async *handleScanVaults(nodeId: NodeId): AsyncGenerator<{ + vaultId: VaultId; + vaultName: VaultName; + vaultPermissions: VaultAction[]; + }> { + // Checking permission + const nodeIdEncoded = nodesUtils.encodeNodeId(nodeId); + const permissions = await this.acl.getNodePerm(nodeId); + if (permissions == null) { + throw new vaultsErrors.ErrorVaultsPermissionDenied( + `No permissions found for ${nodeIdEncoded}`, + ); + } + if (permissions.gestalt.scan === undefined) { + throw new vaultsErrors.ErrorVaultsPermissionDenied( + `Scanning is not allowed for ${nodeIdEncoded}`, + ); + } + + // Getting the list of vaults + const vaults = permissions.vaults; + for (const vaultIdString of Object.keys(vaults)) { + // Getting vault permissions + const vaultId = IdInternal.fromString(vaultIdString); + const vaultPermissions = Object.keys( + vaults[vaultIdString], + ) as VaultAction[]; + // Getting the vault name + const metadata = await this.getVaultMeta(vaultId); + const vaultName = metadata!.vaultName; + const element = { + vaultId, + vaultName, + vaultPermissions, }; - try { - await git.pull({ - fs: this.efs, - http: { request }, - dir: path.join(vaultsUtils.makeVaultIdPretty(vaultId), 'contents'), - gitdir: path.join(vaultsUtils.makeVaultIdPretty(vaultId), '.git'), - url: `http://`, - ref: 'HEAD', - singleBranch: true, - author: { - name: nodesUtils.encodeNodeId(pullNodeId!), - }, - }); - } catch (err) { - if (err instanceof git.Errors.MergeNotSupportedError) { - throw new vaultsErrors.ErrorVaultMergeConflict( - 'Merge Conflicts are not supported yet', - ); - } - throw err; - } - if (metaChange !== 0) { - if (metaChange === 2) vaultMeta.defaultPullVault = remoteVaultId; - await this.db.put( - this.vaultsNamesDbDomain, - idUtils.toBuffer(vaultId), - vaultMeta, - ); - } - const vault = await this.getVault(vaultId); - await vault.readWorkingDirectory(); - return vault; - }, [vaultId]); + yield element; + } } + @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) protected async generateVaultId(): Promise { let vaultId = vaultsUtils.generateVaultId(); let i = 0; - while (await this.efs.exists(idUtils.toString(vaultId))) { + while (await this.efs.exists(vaultsUtils.encodeVaultId(vaultId))) { i++; if (i > 50) { - throw new vaultsErrors.ErrorCreateVaultId( + throw new vaultsErrors.ErrorVaultsCreateVaultId( 'Could not create a unique vaultId after 50 attempts', ); } @@ -711,115 +800,71 @@ class VaultManager { } @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) - public async *handleInfoRequest( - vaultId: VaultId, - ): AsyncGenerator { - const service = 'upload-pack'; - yield Buffer.from( - gitUtils.createGitPacketLine('# service=git-' + service + '\n'), - ); - yield Buffer.from('0000'); - for (const buffer of (await gitUtils.uploadPack( - this.efs, - path.join(vaultsUtils.makeVaultIdPretty(vaultId), '.git'), - true, - )) ?? []) { - yield buffer; - } - } - - @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) - public async handlePackRequest( - vaultId: VaultId, - body: Buffer, - ): Promise { - if (body.toString().slice(4, 8) === 'want') { - const wantedObjectId = body.toString().slice(9, 49); - const packResult = await gitUtils.packObjects({ - fs: this.efs, - gitdir: path.join(vaultsUtils.makeVaultIdPretty(vaultId), '.git'), - refs: [wantedObjectId], - }); - const readable = new PassThrough(); - const progressStream = new PassThrough(); - const sideBand = gitUtils.mux( - 'side-band-64', - readable, - packResult.packstream, - progressStream, - ); - return [sideBand, progressStream]; - } else { - throw new gitErrors.ErrorGitUnimplementedMethod( - `Request of type '${body - .toString() - .slice(4, 8)}' not valid, expected 'want'`, - ); - } - } - - /** - * Retrieves all the vaults for a peers node - */ - @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) - public async scanVaults(targetNodeId: NodeId): Promise> { - // Create the handler for git to scan from - return this.nodeConnectionManager.withConnF( - targetNodeId, - async (connection) => { - const client = connection.getClient(); - const gitRequest = await vaultsUtils.constructGitHandler( - client, - this.keyManager.getNodeId(), - ); - return await gitRequest.scanVaults(); - }, - ); - } - protected async getVault(vaultId: VaultId): Promise { let vault: VaultInternal | undefined; - let lock: MutexInterface; - let vaultAndLock = this.vaultsMap.get(idUtils.toString(vaultId)); + let lock: RWLock; + const vaultIdString = vaultId.toString() as VaultIdString; + let vaultAndLock = this.vaultMap.get(vaultIdString); if (vaultAndLock != null) { ({ vault, lock } = vaultAndLock); + // Lock and vault exist if (vault != null) { return vault; } + // Only lock exists let release; try { - release = await lock.acquire(); - ({ vault, lock } = vaultAndLock); + release = await lock.acquireWrite(); + ({ vault } = vaultAndLock); if (vault != null) { return vault; } - vault = await VaultInternal.create({ + // Only create if the vault state already exists + if ((await this.getVaultMeta(vaultId)) == null) { + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault ${vaultsUtils.encodeVaultId(vaultId)} doesn't exist`, + ); + } + vault = await VaultInternal.createVaultInternal({ vaultId, keyManager: this.keyManager, efs: this.efs, logger: this.logger.getChild(VaultInternal.name), + db: this.db, + vaultsDb: this.vaultsDb, + vaultsDbDomain: this.vaultsDbDomain, }); vaultAndLock.vault = vault; - this.vaultsMap.set(idUtils.toString(vaultId), vaultAndLock); + this.vaultMap.set(vaultIdString, vaultAndLock); return vault; } finally { release(); } } else { - lock = new Mutex(); + // Neither vault nor lock exists + lock = new RWLock(); vaultAndLock = { lock }; - this.vaultsMap.set(idUtils.toString(vaultId), vaultAndLock); + this.vaultMap.set(vaultIdString, vaultAndLock); let release; try { - release = await lock.acquire(); - vault = await VaultInternal.create({ + release = await lock.acquireWrite(); + // Only create if the vault state already exists + if ((await this.getVaultMeta(vaultId)) == null) { + throw new vaultsErrors.ErrorVaultsVaultUndefined( + `Vault ${vaultsUtils.encodeVaultId(vaultId)} doesn't exist`, + ); + } + vault = await VaultInternal.createVaultInternal({ vaultId, keyManager: this.keyManager, efs: this.efs, + db: this.db, + vaultsDb: this.vaultsDb, + vaultsDbDomain: this.vaultsDbDomain, logger: this.logger.getChild(VaultInternal.name), }); vaultAndLock.vault = vault; - this.vaultsMap.set(idUtils.toString(vaultId), vaultAndLock); + this.vaultMap.set(vaultIdString, vaultAndLock); return vault; } finally { release(); @@ -827,14 +872,75 @@ class VaultManager { } } - protected async getLock(vaultId: VaultId): Promise { - const vaultLock = this.vaultsMap.get(idUtils.toString(vaultId)); - let lock = vaultLock?.lock; - if (!lock) { - lock = new Mutex(); - this.vaultsMap.set(idUtils.toString(vaultId), { lock }); + // THIS can also be replaced with generic withF and withG + + /** + * Takes a function and runs it with the listed vaults. locking is handled automatically + * @param vaultIds List of vault ID for vaults you wish to use + * @param f Function you wish to run with the provided vaults + */ + @ready(new vaultsErrors.ErrorVaultManagerNotRunning()) + public async withVaults( + vaultIds: VaultId[], + f: (...args: Vault[]) => Promise, + ): Promise { + // Stages: + // 1. Obtain vaults + // 2. Call function with vaults while locking the vaults + // 3. Catch any problems and preform clean up in finally + // 4. return result + + const vaults = await Promise.all( + vaultIds.map(async (vaultId) => { + return await this.getVault(vaultId); + }), + ); + + // Obtaining locks + const vaultLocks = vaultIds.map((vaultId) => { + return this.getReadLock(vaultId); + }); + + // Running the function with locking + return await withF(vaultLocks, () => { + return f(...vaults); + }); + } + + protected async setupKey(bits: 128 | 192 | 256): Promise { + let key: Buffer | undefined; + key = await this.db.get(this.vaultsDbDomain, 'key', true); + // If the EFS already exists, but the key doesn't, then we have lost the key + if (key == null && (await this.existsEFS())) { + throw new vaultsErrors.ErrorVaultManagerKey(); + } + if (key != null) { + return key; + } + this.logger.info('Generating vaults key'); + key = await this.generateKey(bits); + await this.db.put(this.vaultsDbDomain, 'key', key, true); + return key; + } + + protected async generateKey(bits: 128 | 192 | 256): Promise { + return await keysUtils.generateKey(bits); + } + + protected async existsEFS(): Promise { + try { + return (await this.fs.promises.readdir(this.efsPath)).length > 0; + } catch (e) { + if (e.code === 'ENOENT') { + return false; + } + throw new vaultsErrors.ErrorVaultManagerEFS(e.message, { + errno: e.errno, + syscall: e.syscall, + code: e.code, + path: e.path, + }); } - return lock; } } diff --git a/src/vaults/VaultOps.ts b/src/vaults/VaultOps.ts index a2d90921f..703f09752 100644 --- a/src/vaults/VaultOps.ts +++ b/src/vaults/VaultOps.ts @@ -2,34 +2,31 @@ * Adds a secret to the vault */ import type Logger from '@matrixai/logger'; -import type { - FileOptions, - SecretContent, - SecretList, - SecretName, - Vault, -} from './types'; -import type { FileSystem } from '../types'; +import type { Vault } from './Vault'; import path from 'path'; import * as vaultsErrors from './errors'; import * as vaultsUtils from './utils'; +type FileOptions = { + recursive?: boolean; +}; + // TODO: tests -// - add succeded +// - add succeeded // - secret exists // - secret with directory -// Might just drop the return type. -// I don't see a case where it would be false without an error. +// Might just drop the return type +// I don't see a case where it would be false without an error // - Add locking? async function addSecret( vault: Vault, - secretName: SecretName, - content: SecretContent, + secretName: string, + content: Buffer | string, logger?: Logger, ): Promise { - await vault.commit(async (efs) => { + await vault.writeF(async (efs) => { if (await efs.exists(secretName)) { - throw new vaultsErrors.ErrorSecretDefined( + throw new vaultsErrors.ErrorSecretsSecretDefined( `${secretName} already exists, try updating instead`, ); } @@ -56,14 +53,14 @@ async function addSecret( // - invalid name async function updateSecret( vault: Vault, - secretName: SecretName, - content: SecretContent, + secretName: string, + content: Buffer | string, logger?: Logger, ): Promise { - await vault.commit(async (efs) => { + await vault.writeF(async (efs) => { // Throw error if secret does not exist if (!(await efs.exists(secretName))) { - throw new vaultsErrors.ErrorSecretUndefined( + throw new vaultsErrors.ErrorSecretsSecretUndefined( 'Secret does not exist, try adding it instead.', ); } @@ -83,15 +80,15 @@ async function updateSecret( // - invalid name async function renameSecret( vault: Vault, - currSecretName: SecretName, - newSecretName: SecretName, + currstring: string, + newstring: string, logger?: Logger, ): Promise { - await vault.commit(async (efs) => { - await efs.rename(currSecretName, newSecretName); + await vault.writeF(async (efs) => { + await efs.rename(currstring, newstring); }); logger?.info( - `Renamed secret at ${currSecretName} to ${newSecretName} in vault ${vault.vaultId}`, + `Renamed secret at ${currstring} to ${newstring} in vault ${vault.vaultId}`, ); } @@ -102,17 +99,29 @@ async function renameSecret( // - read existing file // - try to read non-existent file // - read directory? -async function getSecret( - vault: Vault, - secretName: SecretName, -): Promise { +async function getSecret(vault: Vault, secretName: string): Promise { try { - return await vault.access(async (efs) => { + return await vault.readF(async (efs) => { return (await efs.readFile(secretName)) as Buffer; }); } catch (err) { if (err.code === 'ENOENT') { - throw new vaultsErrors.ErrorSecretUndefined( + throw new vaultsErrors.ErrorSecretsSecretUndefined( + `Secret with name: ${secretName} does not exist`, + ); + } + throw err; + } +} + +async function statSecret(vault: Vault, secretName: string) { + try { + return await vault.readF(async (efs) => { + return await efs.stat(secretName); + }); + } catch (err) { + if (err.code === 'ENOENT') { + throw new vaultsErrors.ErrorSecretsSecretUndefined( `Secret with name: ${secretName} does not exist`, ); } @@ -126,14 +135,14 @@ async function getSecret( // TODO: tests // - delete a secret // - Secret doesn't exist -// - delete a full and empty directory with and without recursive. +// - delete a full and empty directory with and without recursive async function deleteSecret( vault: Vault, - secretName: SecretName, + secretName: string, fileOptions?: FileOptions, logger?: Logger, ): Promise { - await vault.commit(async (efs) => { + await vault.writeF(async (efs) => { if ((await efs.stat(secretName)).isDirectory()) { await efs.rmdir(secretName, fileOptions); logger?.info(`Deleted directory at '${secretName}'`); @@ -142,7 +151,7 @@ async function deleteSecret( await efs.unlink(secretName); logger?.info(`Deleted secret at '${secretName}'`); } else { - throw new vaultsErrors.ErrorSecretUndefined( + throw new vaultsErrors.ErrorSecretsSecretUndefined( `path '${secretName}' does not exist in vault`, ); } @@ -155,18 +164,18 @@ async function deleteSecret( */ async function mkdir( vault: Vault, - dirPath: SecretName, + dirPath: string, fileOptions?: FileOptions, logger?: Logger, ): Promise { const recursive = !!fileOptions?.recursive; - await vault.commit(async (efs) => { + await vault.writeF(async (efs) => { try { await efs.mkdir(dirPath, fileOptions); } catch (err) { if (err.code === 'ENOENT' && !recursive) { - throw new vaultsErrors.ErrorRecursive( + throw new vaultsErrors.ErrorVaultsRecursive( `Could not create directory '${dirPath}' without recursive option`, ); } @@ -181,16 +190,16 @@ async function mkdir( // TODO: tests // - adding existing directory // - adding non-existent directory -// - adding a file. +// - adding a file async function addSecretDirectory( vault: Vault, - secretDirectory: SecretName, - fs: FileSystem, + secretDirectory: string, + fs = require('fs'), logger?: Logger, ): Promise { const absoluteDirPath = path.resolve(secretDirectory); - await vault.commit(async (efs) => { + await vault.writeF(async (efs) => { for await (const secretPath of vaultsUtils.readdirRecursively( fs, absoluteDirPath, @@ -235,12 +244,12 @@ async function addSecretDirectory( * Retrieves a list of the secrets in a vault */ // TODO: tests -// - read secrets. +// - read secrets // - no secrets -async function listSecrets(vault: Vault): Promise { - return await vault.access(async (efs) => { - const secrets: SecretList = []; - for await (const secret of vaultsUtils.readdirRecursivelyEFS(efs, '.')) { +async function listSecrets(vault: Vault): Promise { + return await vault.readF(async (efs) => { + const secrets: string[] = []; + for await (const secret of vaultsUtils.readdirRecursively(efs)) { secrets.push(secret); } return secrets; @@ -252,6 +261,7 @@ export { updateSecret, renameSecret, getSecret, + statSecret, deleteSecret, mkdir, addSecretDirectory, diff --git a/src/vaults/errors.ts b/src/vaults/errors.ts index 69e4e3968..43e877caf 100644 --- a/src/vaults/errors.ts +++ b/src/vaults/errors.ts @@ -1,81 +1,139 @@ -import { ErrorPolykey } from '../errors'; +import { ErrorPolykey, sysexits } from '../errors'; class ErrorVaults extends ErrorPolykey {} -class ErrorSecrets extends ErrorPolykey {} +class ErrorVaultManagerRunning extends ErrorVaults { + description = 'VaultManager is running'; + exitCode = sysexits.USAGE; +} -class ErrorVaultManagerRunning extends ErrorVaults {} +class ErrorVaultManagerNotRunning extends ErrorVaults { + description = 'VaultManager is not running'; + exitCode = sysexits.USAGE; +} -class ErrorVaultManagerNotRunning extends ErrorVaults {} +class ErrorVaultManagerDestroyed extends ErrorVaults { + description = 'VaultManager is destroyed'; + exitCode = sysexits.USAGE; +} -class ErrorVaultManagerDestroyed extends ErrorVaults {} +class ErrorVaultManagerKey extends ErrorVaults { + description = 'Vault key is invalid'; + exitCode = sysexits.CONFIG; +} -class ErrorVaultUndefined extends ErrorVaults { - description: string = 'Vault does not exist'; - exitCode: number = 10; +class ErrorVaultManagerEFS extends ErrorVaults { + description = 'EFS failed'; + exitCode = sysexits.UNAVAILABLE; } -class ErrorVaultDefined extends ErrorVaults {} +class ErrorVault extends ErrorVaults {} -class ErrorRemoteVaultUndefined extends ErrorVaults {} +class ErrorVaultRunning extends ErrorVault { + description = 'Vault is running'; + exitCode = sysexits.USAGE; +} -class ErrorVaultUninitialised extends ErrorVaults {} +class ErrorVaultNotRunning extends ErrorVault { + description = 'Vault is not running'; + exitCode = sysexits.USAGE; +} -class ErrorVaultNotStarted extends ErrorVaults {} +class ErrorVaultDestroyed extends ErrorVault { + description = 'Vault is destroyed'; + exitCode = sysexits.USAGE; +} -class ErrorVaultDestroyed extends ErrorVaults {} +class ErrorVaultReferenceInvalid extends ErrorVault { + description = 'Reference is invalid'; + exitCode = sysexits.USAGE; +} -class ErrorRecursive extends ErrorVaults {} +class ErrorVaultReferenceMissing extends ErrorVault { + description = 'Reference does not exist'; + exitCode = sysexits.USAGE; +} -class ErrorVaultModified extends ErrorVaults {} +class ErrorVaultRemoteDefined extends ErrorVaults { + description = 'Vault is a clone of a remote vault and can not be mutated'; + exitCode = sysexits.USAGE; +} -class ErrorMalformedVaultDBValue extends ErrorVaults {} +class ErrorVaultRemoteUndefined extends ErrorVaults { + description = 'Vault has no remote set and can not be pulled'; + exitCode = sysexits.USAGE; +} -class ErrorVaultUnlinked extends ErrorVaults {} +class ErrorVaultsVaultUndefined extends ErrorVaults { + description = 'Vault does not exist'; + exitCode = sysexits.USAGE; +} -class ErrorCreateVaultId extends ErrorVaults {} +class ErrorVaultsVaultDefined extends ErrorVaults { + description = 'Vault already exists'; + exitCode = sysexits.USAGE; +} -class ErrorInvalidVaultId extends ErrorVaults {} // TODO: Assign a proper error code and message. +class ErrorVaultsRecursive extends ErrorVaults { + description = 'Recursive option was not set'; + exitCode = sysexits.USAGE; +} -class ErrorVaultMergeConflict extends ErrorVaults {} +class ErrorVaultsCreateVaultId extends ErrorVaults { + description = 'Failed to create unique VaultId'; + exitCode = sysexits.SOFTWARE; +} -class ErrorVaultCommitUndefined extends ErrorVaults { - description: string = 'Commit does not exist'; - exitCode: number = 10; +class ErrorVaultsMergeConflict extends ErrorVaults { + description = 'Merge Conflicts are not supported yet'; + exitCode = sysexits.SOFTWARE; } -class ErrorSecretUndefined extends ErrorSecrets {} +class ErrorVaultsPermissionDenied extends ErrorVaults { + description = 'Permission was denied'; + exitCode = sysexits.NOPERM; +} -class ErrorSecretDefined extends ErrorSecrets {} +class ErrorVaultsNameConflict extends ErrorVaults { + description = 'Unique name could not be created'; + exitCode = sysexits.UNAVAILABLE; +} -class ErrorReadingSecret extends ErrorSecrets {} +class ErrorSecrets extends ErrorPolykey {} -class ErrorGitFile extends ErrorSecrets {} +class ErrorSecretsSecretUndefined extends ErrorSecrets { + description = 'Secret does not exist'; + exitCode = sysexits.USAGE; +} -class ErrorVaultsInvalidVaultId extends ErrorVaults {} +class ErrorSecretsSecretDefined extends ErrorSecrets { + description = 'Secret already exists'; + exitCode = sysexits.USAGE; +} export { ErrorVaults, ErrorVaultManagerRunning, ErrorVaultManagerNotRunning, ErrorVaultManagerDestroyed, - ErrorVaultUndefined, - ErrorVaultDefined, - ErrorRemoteVaultUndefined, - ErrorVaultUninitialised, - ErrorVaultNotStarted, + ErrorVaultManagerKey, + ErrorVaultManagerEFS, + ErrorVault, + ErrorVaultRunning, + ErrorVaultNotRunning, ErrorVaultDestroyed, - ErrorRecursive, - ErrorVaultModified, - ErrorMalformedVaultDBValue, - ErrorVaultUnlinked, - ErrorCreateVaultId, - ErrorInvalidVaultId, - ErrorVaultMergeConflict, - ErrorVaultCommitUndefined, - ErrorSecretUndefined, - ErrorSecretDefined, - ErrorReadingSecret, - ErrorGitFile, - ErrorVaultsInvalidVaultId, + ErrorVaultReferenceInvalid, + ErrorVaultReferenceMissing, + ErrorVaultRemoteDefined, + ErrorVaultRemoteUndefined, + ErrorVaultsVaultUndefined, + ErrorVaultsVaultDefined, + ErrorVaultsRecursive, + ErrorVaultsCreateVaultId, + ErrorVaultsMergeConflict, + ErrorVaultsPermissionDenied, + ErrorVaultsNameConflict, + ErrorSecrets, + ErrorSecretsSecretUndefined, + ErrorSecretsSecretDefined, }; diff --git a/src/vaults/index.ts b/src/vaults/index.ts index 95188f863..84fc46769 100644 --- a/src/vaults/index.ts +++ b/src/vaults/index.ts @@ -1,5 +1,6 @@ export { default as VaultManager } from './VaultManager'; export { default as VaultInternal } from './VaultInternal'; +export type { Vault } from './Vault'; export * as utils from './utils'; export * as types from './types'; export * as errors from './errors'; diff --git a/src/vaults/types.ts b/src/vaults/types.ts index 559b89bd8..f4eee7644 100644 --- a/src/vaults/types.ts +++ b/src/vaults/types.ts @@ -1,173 +1,157 @@ -import type VaultInternal from './VaultInternal'; -import type { Opaque } from '../types'; -import type { NodeId } from '../nodes/types'; -import type { MutexInterface } from 'async-mutex'; +import type { Id } from '@matrixai/id'; +import type { EncryptedFS } from 'encryptedfs'; import type { Callback, Path } from 'encryptedfs/dist/types'; import type { FdIndex } from 'encryptedfs/dist/fd/types'; -import type { EncryptedFS } from 'encryptedfs'; -import type { Id, IdString } from '../GenericIdTypes'; +import type { Opaque } from '../types'; const vaultActions = ['clone', 'pull'] as const; +type VaultAction = typeof vaultActions[number]; + /** - * Randomly generated vault ID for each new vault + * Special tags that are managed by VaultInternal + * They are used to refer to specific commits + * These may or may not be implemented using Git tags */ -type VaultId = Opaque<'VaultId', Id>; - -type VaultIdPretty = Opaque<'VaultIdPretty', IdString>; - -type VaultName = Opaque<'VaultName', string>; - -type VaultKey = Opaque<'VaultKey', Buffer>; +const tagLast = 'last'; /** - * Actions relating to what is possible with vaults + * Tuple of static references */ -type VaultAction = typeof vaultActions[number]; - -type VaultList = Map; - -type VaultMetadata = { - name: VaultName; - id: VaultId; - remoteNode: NodeId; - remoteVault: VaultId; -}; +const refs = ['HEAD', tagLast] as const; -type SecretName = string; - -type SecretList = string[]; +type VaultId = Opaque<'VaultId', Id>; +type VaultIdEncoded = Opaque<'VaultIdEncoded', string>; +type VaultIdString = Opaque<'VaultIdString', string>; -type SecretContent = Buffer | string; +type VaultRef = typeof refs[number]; -type VaultMap = Map< - string, - { - vault?: VaultInternal; - lock: MutexInterface; - } ->; +type CommitId = Opaque<'CommitId', string>; -type FileOptions = { - recursive?: boolean; +type CommitLog = { + commitId: CommitId; + parent: Array; + author: { + name: string; + timestamp: Date; + }; + committer: { + name: string; + timestamp: Date; + }; + message: string; }; -type VaultActions = Partial>; - +/** + * Readonly-only interface for EncryptedFS + * Note that open flags type are not complete + * Combinations of the flags can be used as well + */ interface FileSystemReadable { - chdir: typeof EncryptedFS.prototype.chdir; - access: typeof EncryptedFS.prototype.access; - chmod: typeof EncryptedFS.prototype.chmod; - chown: typeof EncryptedFS.prototype.chown; - chownr: typeof EncryptedFS.prototype.chownr; - close: typeof EncryptedFS.prototype.close; - createReadStream: typeof EncryptedFS.prototype.createReadStream; - exists: typeof EncryptedFS.prototype.exists; - fchmod: typeof EncryptedFS.prototype.fchmod; - fchown: typeof EncryptedFS.prototype.fchown; - fstat: typeof EncryptedFS.prototype.fstat; - futimes: typeof EncryptedFS.prototype.futimes; - lchmod: typeof EncryptedFS.prototype.lchmod; - lchown: typeof EncryptedFS.prototype.lchown; - lseek: typeof EncryptedFS.prototype.lseek; - lstat: typeof EncryptedFS.prototype.lstat; + constants: EncryptedFS['constants']; + promises: FileSystemReadable; + access: EncryptedFS['access']; + close: EncryptedFS['close']; + createReadStream: EncryptedFS['createReadStream']; + exists: EncryptedFS['exists']; + fstat: EncryptedFS['fstat']; + lseek: EncryptedFS['lseek']; + lstat: EncryptedFS['lstat']; open( path: Path, - flags: 'r' | 'rs' | 'r+' | 'rs+', + flags: + | 'r' + | EncryptedFS['constants']['O_RDONLY'] + | EncryptedFS['constants']['O_DIRECTORY'] + | EncryptedFS['constants']['O_NOATIME'] + | EncryptedFS['constants']['O_DIRECT'] + | EncryptedFS['constants']['O_NONBLOCK'], mode?: number, ): Promise; open( path: Path, - flags: 'r' | 'rs' | 'r+' | 'rs+', + flags: + | 'r' + | EncryptedFS['constants']['O_RDONLY'] + | EncryptedFS['constants']['O_DIRECTORY'] + | EncryptedFS['constants']['O_NOATIME'] + | EncryptedFS['constants']['O_DIRECT'] + | EncryptedFS['constants']['O_NONBLOCK'], callback: Callback<[FdIndex]>, ): Promise; open( path: Path, - flags: 'r' | 'rs' | 'r+' | 'rs+', + flags: + | 'r' + | EncryptedFS['constants']['O_RDONLY'] + | EncryptedFS['constants']['O_DIRECTORY'] + | EncryptedFS['constants']['O_NOATIME'] + | EncryptedFS['constants']['O_DIRECT'] + | EncryptedFS['constants']['O_NONBLOCK'], mode: number, callback: Callback<[FdIndex]>, ): Promise; - read: typeof EncryptedFS.prototype.read; - readdir: typeof EncryptedFS.prototype.readdir; - readFile: typeof EncryptedFS.prototype.readFile; - readlink: typeof EncryptedFS.prototype.readlink; - realpath: typeof EncryptedFS.prototype.realpath; - stat: typeof EncryptedFS.prototype.stat; - utimes: typeof EncryptedFS.prototype.utimes; + read: EncryptedFS['read']; + readdir: EncryptedFS['readdir']; + readFile: EncryptedFS['readFile']; + readlink: EncryptedFS['readlink']; + realpath: EncryptedFS['realpath']; + stat: EncryptedFS['stat']; } +/** + * Readable & Writable interface for EncryptedFS + */ interface FileSystemWritable extends FileSystemReadable { - chdir: typeof EncryptedFS.prototype.chdir; - access: typeof EncryptedFS.prototype.access; - appendFile: typeof EncryptedFS.prototype.appendFile; - chmod: typeof EncryptedFS.prototype.chmod; - chown: typeof EncryptedFS.prototype.chown; - chownr: typeof EncryptedFS.prototype.chownr; - close: typeof EncryptedFS.prototype.close; - copyFile: typeof EncryptedFS.prototype.copyFile; - createWriteStream: typeof EncryptedFS.prototype.createWriteStream; - fallocate: typeof EncryptedFS.prototype.fallocate; - fchmod: typeof EncryptedFS.prototype.fchmod; - fchown: typeof EncryptedFS.prototype.fchown; - ftruncate: typeof EncryptedFS.prototype.ftruncate; - futimes: typeof EncryptedFS.prototype.futimes; - lchmod: typeof EncryptedFS.prototype.lchmod; - lchown: typeof EncryptedFS.prototype.lchown; - link: typeof EncryptedFS.prototype.link; - lseek: typeof EncryptedFS.prototype.lseek; - mkdir: typeof EncryptedFS.prototype.mkdir; - mkdtemp: typeof EncryptedFS.prototype.mkdtemp; - mknod: typeof EncryptedFS.prototype.mknod; - open: typeof EncryptedFS.prototype.open; - rename: typeof EncryptedFS.prototype.rename; - rmdir: typeof EncryptedFS.prototype.rmdir; - symlink: typeof EncryptedFS.prototype.symlink; - truncate: typeof EncryptedFS.prototype.truncate; - unlink: typeof EncryptedFS.prototype.unlink; - utimes: typeof EncryptedFS.prototype.utimes; - write: typeof EncryptedFS.prototype.write; - writeFile: typeof EncryptedFS.prototype.writeFile; + promises: FileSystemWritable; + appendFile: EncryptedFS['appendFile']; + chmod: EncryptedFS['chmod']; + chown: EncryptedFS['chown']; + chownr: EncryptedFS['chownr']; + copyFile: EncryptedFS['copyFile']; + createWriteStream: EncryptedFS['createWriteStream']; + fallocate: EncryptedFS['fallocate']; + fchmod: EncryptedFS['fchmod']; + fchown: EncryptedFS['fchown']; + fdatasync: EncryptedFS['fdatasync']; + fsync: EncryptedFS['fsync']; + ftruncate: EncryptedFS['ftruncate']; + futimes: EncryptedFS['futimes']; + lchmod: EncryptedFS['lchmod']; + lchown: EncryptedFS['lchown']; + link: EncryptedFS['link']; + mkdir: EncryptedFS['mkdir']; + mkdtemp: EncryptedFS['mkdtemp']; + mknod: EncryptedFS['mknod']; + open: EncryptedFS['open']; + rename: EncryptedFS['rename']; + rmdir: EncryptedFS['rmdir']; + symlink: EncryptedFS['symlink']; + truncate: EncryptedFS['truncate']; + unlink: EncryptedFS['unlink']; + utimes: EncryptedFS['utimes']; + write: EncryptedFS['write']; + writeFile: EncryptedFS['writeFile']; } -type CommitType = typeof VaultInternal.prototype.commit; -type AccessType = typeof VaultInternal.prototype.access; -type LogType = typeof VaultInternal.prototype.log; -type VersionType = typeof VaultInternal.prototype.version; -interface Vault { - baseDir: typeof VaultInternal.prototype.baseDir; - gitDir: typeof VaultInternal.prototype.gitDir; - vaultId: typeof VaultInternal.prototype.vaultId; - commit(...arg: Parameters): ReturnType; - access: AccessType; - log(...arg: Parameters): ReturnType; - version(...arg: Parameters): ReturnType; -} +type VaultName = string; -type CommitLog = { - oid: string; - committer: string; - timeStamp: number; - message: string; -}; +type VaultActions = Partial>; export { vaultActions }; export type { VaultId, - VaultIdPretty, + VaultIdEncoded, + VaultIdString, + VaultRef, VaultAction, - VaultKey, - VaultName, - VaultList, - VaultMap, - VaultMetadata, - VaultActions, - SecretName, - SecretList, - SecretContent, - FileOptions, + CommitId, + CommitLog, FileSystemReadable, FileSystemWritable, - Vault, - CommitLog, + VaultName, + VaultActions, }; + +export { tagLast, refs }; diff --git a/src/vaults/utils.ts b/src/vaults/utils.ts index b9987c04f..5758f91e9 100644 --- a/src/vaults/utils.ts +++ b/src/vaults/utils.ts @@ -1,248 +1,78 @@ -import type { EncryptedFS } from 'encryptedfs'; import type { VaultId, - VaultKey, - VaultList, - VaultName, + VaultIdEncoded, + VaultRef, VaultAction, - FileSystemReadable, - VaultIdPretty, + CommitId, } from './types'; -import type { FileSystem } from '../types'; import type { NodeId } from '../nodes/types'; -import type { GRPCClientAgent } from '../agent'; -import path from 'path'; -import { IdRandom } from '@matrixai/id'; -import * as grpc from '@grpc/grpc-js'; -import { vaultActions } from './types'; -import * as vaultsErrors from './errors'; -import { GitRequest } from '../git'; -import { promisify } from '../utils'; -import * as vaultsPB from '../proto/js/polykey/v1/vaults/vaults_pb'; -import * as nodesPB from '../proto/js/polykey/v1/nodes/nodes_pb'; -import * as keysUtils from '../keys/utils'; -import { isIdString, isId, makeIdString, makeId } from '../GenericIdTypes'; -import { utils as nodesUtils } from '../nodes'; - -async function generateVaultKey(bits: number = 256): Promise { - return (await keysUtils.generateKey(bits)) as VaultKey; -} -function isVaultId(arg: any) { - return isId(arg); -} +import type { EncryptedFS } from 'encryptedfs'; +import path from 'path'; +import { IdInternal, IdRandom } from '@matrixai/id'; +import { tagLast, refs, vaultActions } from './types'; +import * as nodesUtils from '../nodes/utils'; /** - * This will return arg as a valid VaultId or throw an error if it can't be converted. - * This will take a multibase string of the ID or the raw Buffer of the ID. - * @param arg - The variable we wish to convert - * @throws vaultsErrors.ErrorInvalidVaultId if the arg can't be converted into a VaultId - * @returns VaultId + * Vault history is designed for linear-history + * The canonical branch represents the one and only true timeline + * In the future, we can introduce non-linear history + * Where branches are automatically made when new timelines are created */ -function makeVaultId(arg: any): VaultId { - return makeId(arg); -} +const canonicalBranch = 'master'; +const canonicalBranchRef = 'refs/heads/' + canonicalBranch; -function isVaultIdPretty(arg: any): arg is VaultIdPretty { - return isIdString(arg); -} - -function makeVaultIdPretty(arg: any): VaultIdPretty { - return makeIdString(arg); -} +const vaultIdGenerator = new IdRandom(); -const randomIdGenerator = new IdRandom(); function generateVaultId(): VaultId { - return makeVaultId(randomIdGenerator.get()); + return vaultIdGenerator.get(); } -async function fileExists(fs: FileSystem, path: string): Promise { - try { - const fh = await fs.promises.open(path, 'r'); - await fh.close(); - } catch (err) { - if (err.code === 'ENOENT') { - return false; - } - } - return true; -} - -async function* readdirRecursively(fs, dir: string) { - const dirents = await fs.promises.readdir(dir, { withFileTypes: true }); - for (const dirent of dirents) { - const res = path.resolve(dir, dirent.name); - if (dirent.isDirectory()) { - yield* readdirRecursively(fs, res); - } else if (dirent.isFile()) { - yield res; - } - } -} - -async function* readdirRecursivelyEFS( - efs: FileSystemReadable, - dir: string, - dirs?: boolean, -) { - const dirents = await efs.readdir(dir); - let secretPath: string; - for (const dirent of dirents) { - const res = dirent.toString(); // Makes string | buffer a string. - secretPath = path.join(dir, res); - if ((await efs.stat(secretPath)).isDirectory() && dirent !== '.git') { - if (dirs === true) { - yield secretPath; - } - yield* readdirRecursivelyEFS(efs, secretPath, dirs); - } else if ((await efs.stat(secretPath)).isFile()) { - yield secretPath; - } - } -} - -async function* readdirRecursivelyEFS2( - fs: EncryptedFS, - dir: string, - dirs?: boolean, -): AsyncGenerator { - const dirents = await fs.readdir(dir); - let secretPath: string; - for (const dirent of dirents) { - const res = dirent.toString(); - secretPath = path.join(dir, res); - if (dirent !== '.git') { - try { - await fs.readdir(secretPath); - if (dirs === true) { - yield secretPath; - } - yield* readdirRecursivelyEFS2(fs, secretPath, dirs); - } catch (err) { - if (err.code === 'ENOTDIR') { - yield secretPath; - } - } - } - } +function encodeVaultId(vaultId: VaultId): VaultIdEncoded { + return vaultId.toMultibase('base58btc') as VaultIdEncoded; } -/** - * Searches a list of vaults for the given vault Id and associated name - * @throws If the vault Id does not exist - */ -function searchVaultName(vaultList: VaultList, vaultId: VaultId): VaultName { - let vaultName: VaultName | undefined; - - // Search each element in the list of vaults - for (const elem in vaultList) { - // List is of form \t - const value = vaultList[elem].split('\t'); - if (value[1] === vaultId) { - vaultName = value[0]; - break; - } - } - if (vaultName == null) { - throw new vaultsErrors.ErrorRemoteVaultUndefined( - `${vaultId} does not exist on connected node`, - ); - } - return vaultName; +function decodeVaultId(vaultIdEncoded: any): VaultId | undefined { + if (typeof vaultIdEncoded !== 'string') return; + const vaultId = IdInternal.fromMultibase(vaultIdEncoded); + if (vaultId == null) return; + // All VaultIds are 16 bytes long + if (vaultId.length !== 16) return; + return vaultId; } /** - * Creates a GitRequest object from the desired node connection. - * @param client GRPC connection to desired node - * @param nodeId + * Vault reference can be HEAD, any of the special tags or a commit ID */ -async function constructGitHandler( - client: GRPCClientAgent, - nodeId: NodeId, -): Promise { - const gitRequest = new GitRequest( - ((vaultNameOrId: string) => requestInfo(vaultNameOrId, client)).bind(this), - ((vaultNameOrId: string, body: Buffer) => - requestPack(vaultNameOrId, body, client)).bind(this), - (() => requestVaultNames(client, nodeId)).bind(this), - ); - return gitRequest; +function validateRef(ref: any): ref is VaultRef { + return refs.includes(ref) || validateCommitId(ref); } /** - * Requests remote info from the connected node for the named vault. - * @param vaultId ID of the desired vault - * @param client A connection object to the node - * @returns Async Generator of Uint8Arrays representing the Info Response + * Commit ids are SHA1 hashes encoded as 40-character long lowercase hexadecimal strings */ -async function* requestInfo( - vaultNameOrId: string, - client: GRPCClientAgent, -): AsyncGenerator { - const request = new vaultsPB.Vault(); - request.setNameOrId(vaultNameOrId); - const response = client.vaultsGitInfoGet(request); - for await (const resp of response) { - yield resp.getChunk_asU8(); - } +function validateCommitId(commitId: any): commitId is CommitId { + return /^[a-f0-9]{40}$/.test(commitId); } -/** - * Requests a pack from the connected node for the named vault - * @param vaultId ID of vault - * @param body contains the pack request - * @param client A connection object to the node - * @returns AsyncGenerator of Uint8Arrays representing the Pack Response - */ -async function* requestPack( - vaultNameOrId: string, - body: Buffer, - client: GRPCClientAgent, -): AsyncGenerator { - const responseBuffers: Array = []; - - const meta = new grpc.Metadata(); - // FIXME make it a VaultIdReadable - meta.set('vaultNameOrId', vaultNameOrId); - - const stream = client.vaultsGitPackGet(meta); - const write = promisify(stream.write).bind(stream); - - stream.on('data', (d) => { - responseBuffers.push(d.getChunk_asU8()); - }); - - const chunk = new vaultsPB.PackChunk(); - chunk.setChunk(body); - write(chunk); - stream.end(); - - yield await new Promise((resolve) => { - stream.once('end', () => { - resolve(Buffer.concat(responseBuffers)); - }); - }); +function commitAuthor(nodeId: NodeId): { name: string; email: string } { + return { + name: nodesUtils.encodeNodeId(nodeId), + email: '', + }; } -/** - * Requests the vault names from the connected node. - * @param client A connection object to the node - * @param nodeId - */ -async function requestVaultNames( - client: GRPCClientAgent, - nodeId: NodeId, -): Promise { - const request = new nodesPB.Node(); - request.setNodeId(nodesUtils.encodeNodeId(nodeId)); - const vaultList = client.vaultsScan(request); - const data: string[] = []; - for await (const vault of vaultList) { - const vaultMessage = vault.getNameOrId(); - data.push(vaultMessage); +async function* readdirRecursively(fs, dir = '.') { + const dirents = await fs.promises.readdir(dir); + for (const dirent of dirents) { + const res = path.join(dir, dirent.toString()); + const stat = await fs.promises.stat(res); + if (stat.isDirectory()) { + yield* readdirRecursively(fs, res); + } else if (stat.isFile()) { + yield res; + } } - - return data; } function isVaultAction(action: any): action is VaultAction { @@ -250,18 +80,29 @@ function isVaultAction(action: any): action is VaultAction { return (vaultActions as Readonly>).includes(action); } +async function deleteObject(fs: EncryptedFS, gitdir: string, ref: string) { + const bucket = ref.slice(0, 2); + const shortref = ref.slice(2); + const objectPath = path.join(gitdir, 'objects', bucket, shortref); + try { + await fs.unlink(objectPath); + } catch (e) { + if (e.code !== 'ENOENT') throw e; + } +} + export { - isVaultId, - isVaultIdPretty, - makeVaultId, - makeVaultIdPretty, - generateVaultKey, + tagLast, + refs, + canonicalBranch, + canonicalBranchRef, generateVaultId, - fileExists, - readdirRecursively, - readdirRecursivelyEFS, - readdirRecursivelyEFS2, - constructGitHandler, - searchVaultName, + encodeVaultId, + decodeVaultId, + validateRef, + validateCommitId, + commitAuthor, isVaultAction, + readdirRecursively, + deleteObject, }; diff --git a/tests/GenericIdTypes.test.ts b/tests/GenericIdTypes.test.ts deleted file mode 100644 index fa3fa6d42..000000000 --- a/tests/GenericIdTypes.test.ts +++ /dev/null @@ -1,66 +0,0 @@ -import type { Id } from '@matrixai/id/dist/Id'; -import type { IdString } from '@/GenericIdTypes'; -import type { Opaque } from '@/types'; -import { utils as idUtils } from '@matrixai/id'; -import { makeIdString, makeId } from '@/GenericIdTypes'; -import { ErrorInvalidId } from '@/errors'; - -describe('GenericID Type utility functions', () => { - type TestRawType = Opaque<'testRawType', Id>; - type TestType = Opaque<'testType', IdString>; - - const validString = 'zUGWu8zn6VSa6dYrty8DJdm'; - const invalidString = 'notAValidString'; - const validBuffer = Buffer.alloc(16); - const invalidBuffer = Buffer.alloc(20); - const validTestRawType = idUtils.fromString( - 'Vaultxxxxxxxxxxx', - ) as TestRawType; - - // Testing generation. - // test('can generate a Id', async () => { - // const idGen = new IdRandom(); - // const id = idGen.get(); - // console.log(id.toString()); - // console.log(Buffer.from(id).toString()); - // }); - // // Testing conversions. - // test('random tests', () => { - // const idGen = new IdRandom(); - // const id = idGen.get(); - // const idString = id.toString(); - // console.log(idString); - // - // const testString = 'vault1xxxxxxxxxx'; - // console.log(idUtils.fromString(testString)) - // console.log(idUtils.toString(idUtils.fromString(testString)!)) - // }); - - test('makeId converts a buffer', () => { - expect(() => makeId(validTestRawType)).not.toThrow(); - }); - test('makeId converts a buffer', () => { - expect(() => makeId(validBuffer)).not.toThrow(); - }); - test('makeId converts a string', () => { - expect(() => makeId(validString)).not.toThrow(); - }); - test('makeId throws error for invalid buffer.', () => { - expect(() => makeId(invalidBuffer)).toThrow(ErrorInvalidId); - }); - test('makeId throws error for invalid string.', () => { - expect(() => makeId(invalidString)).toThrow(ErrorInvalidId); - }); - test('makeIdString converts a Buffer.', () => { - expect(() => makeIdString(validBuffer)).not.toThrow(); - }); - test('makeIdString converts a string.', () => { - expect(() => makeIdString(validString)).not.toThrow(); - }); - test('makeIdString throws error for invalid buffer.', () => { - expect(() => makeIdString(invalidBuffer)).toThrow(ErrorInvalidId); - }); - test('makeIdString throws error for invalid buffer.', () => { - expect(() => makeIdString(invalidString)).toThrow(ErrorInvalidId); - }); -}); diff --git a/tests/acl/ACL.test.ts b/tests/acl/ACL.test.ts index a6f8c46f6..a75819f2f 100644 --- a/tests/acl/ACL.test.ts +++ b/tests/acl/ACL.test.ts @@ -7,7 +7,6 @@ import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { DB } from '@matrixai/db'; -import { utils as idUtils } from '@matrixai/id'; import { ACL, errors as aclErrors } from '@/acl'; import { utils as keysUtils } from '@/keys'; import { utils as vaultsUtils } from '@/vaults'; @@ -51,10 +50,10 @@ describe(ACL.name, () => { }, }, }); - vaultId1 = vaultsUtils.makeVaultId(idUtils.fromString('vault1xxxxxxxxxx')); - vaultId2 = vaultsUtils.makeVaultId(idUtils.fromString('vault2xxxxxxxxxx')); - vaultId3 = vaultsUtils.makeVaultId(idUtils.fromString('vault3xxxxxxxxxx')); - vaultId4 = vaultsUtils.makeVaultId(idUtils.fromString('vault4xxxxxxxxxx')); + vaultId1 = vaultsUtils.generateVaultId(); + vaultId2 = vaultsUtils.generateVaultId(); + vaultId3 = vaultsUtils.generateVaultId(); + vaultId4 = vaultsUtils.generateVaultId(); }); afterEach(async () => { await db.stop(); @@ -144,7 +143,7 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: { pull: null }, + [vaultId1]: { pull: null }, }, }); // Gestalt2 @@ -153,7 +152,7 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault2xxxxxxxxxx: { clone: null }, + [vaultId2]: { clone: null }, }, }); // Check g1 perm @@ -215,7 +214,7 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: { pull: null }, + [vaultId1]: { pull: null }, }, }); await acl.unsetVaultAction(vaultId1, nodeIdG1First, 'pull'); @@ -227,7 +226,7 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: {}, + [vaultId1]: {}, }, }); await acl.setVaultAction(vaultId1, nodeIdG1First, 'pull'); @@ -237,13 +236,13 @@ describe(ACL.name, () => { expect(vaultPerm[nodeIdG1First].vaults[vaultId1]).toHaveProperty('clone'); const vaultPerms = await acl.getVaultPerms(); expect(vaultPerms).toEqual({ - vault1xxxxxxxxxx: { + [vaultId1]: { [nodeIdG1First]: { gestalt: { notify: null, }, vaults: { - vault1xxxxxxxxxx: { pull: null, clone: null }, + [vaultId1]: { pull: null, clone: null }, }, }, }, @@ -257,7 +256,7 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: { pull: null }, + [vaultId1]: { pull: null }, }, }; await acl.setNodesPerm( @@ -288,7 +287,7 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: { clone: null }, + [vaultId1]: { clone: null }, }, }); await acl.setVaultAction(vaultId1, nodeIdG1First, 'pull'); @@ -302,39 +301,39 @@ describe(ACL.name, () => { expect(vaultPerm1[nodeIdG1First].vaults[vaultId1]).toHaveProperty('pull'); const vaultPerms = await acl.getVaultPerms(); expect(vaultPerms).toMatchObject({ - vault1xxxxxxxxxx: { + [vaultId1]: { [nodeIdG1First]: { gestalt: { notify: null, }, vaults: { - vault1xxxxxxxxxx: { clone: null, pull: null }, - vault2xxxxxxxxxx: { clone: null, pull: null }, - vault3xxxxxxxxxx: { clone: null, pull: null }, + [vaultId1]: { clone: null, pull: null }, + [vaultId2]: { clone: null, pull: null }, + [vaultId3]: { clone: null, pull: null }, }, }, }, - vault2xxxxxxxxxx: { + [vaultId2]: { [nodeIdG1First]: { gestalt: { notify: null, }, vaults: { - vault1xxxxxxxxxx: { clone: null, pull: null }, - vault2xxxxxxxxxx: { clone: null, pull: null }, - vault3xxxxxxxxxx: { clone: null, pull: null }, + [vaultId1]: { clone: null, pull: null }, + [vaultId2]: { clone: null, pull: null }, + [vaultId3]: { clone: null, pull: null }, }, }, }, - vault3xxxxxxxxxx: { + [vaultId3]: { [nodeIdG1First]: { gestalt: { notify: null, }, vaults: { - vault1xxxxxxxxxx: { clone: null, pull: null }, - vault2xxxxxxxxxx: { clone: null, pull: null }, - vault3xxxxxxxxxx: { clone: null, pull: null }, + [vaultId1]: { clone: null, pull: null }, + [vaultId2]: { clone: null, pull: null }, + [vaultId3]: { clone: null, pull: null }, }, }, }, @@ -355,7 +354,7 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: { pull: null }, + [vaultId1]: { pull: null }, }, }; await acl.setNodesPerm( @@ -395,8 +394,8 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: { clone: null }, - vault2xxxxxxxxxx: { pull: null }, + [vaultId1]: { clone: null }, + [vaultId2]: { pull: null }, }, }, }); @@ -409,7 +408,7 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault2xxxxxxxxxx: { pull: null }, + [vaultId2]: { pull: null }, }, }, }); @@ -457,8 +456,8 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: {}, - vault4xxxxxxxxxx: { pull: null }, + [vaultId1]: {}, + [vaultId4]: { pull: null }, }, }, [nodeIdG1Fourth]: { @@ -466,8 +465,8 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: {}, - vault4xxxxxxxxxx: { pull: null }, + [vaultId1]: {}, + [vaultId4]: { pull: null }, }, }, [nodeIdG1Third]: { @@ -475,8 +474,8 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: {}, - vault4xxxxxxxxxx: { pull: null }, + [vaultId1]: {}, + [vaultId4]: { pull: null }, }, }, }); @@ -486,8 +485,8 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: { clone: null }, - vault4xxxxxxxxxx: { clone: null }, + [vaultId1]: { clone: null }, + [vaultId4]: { clone: null }, }, }, [nodeIdG2Second]: { @@ -495,8 +494,8 @@ describe(ACL.name, () => { notify: null, }, vaults: { - vault1xxxxxxxxxx: { clone: null }, - vault4xxxxxxxxxx: { clone: null }, + [vaultId1]: { clone: null }, + [vaultId4]: { clone: null }, }, }, }); diff --git a/tests/agent/GRPCClientAgent.test.ts b/tests/agent/GRPCClientAgent.test.ts index eca793c49..408775ab8 100644 --- a/tests/agent/GRPCClientAgent.test.ts +++ b/tests/agent/GRPCClientAgent.test.ts @@ -1,6 +1,6 @@ -import type { TLSConfig } from '@/network/types'; -import type { NodeIdEncoded, NodeInfo } from '@/nodes/types'; +import type { Host, Port, TLSConfig } from '@/network/types'; import type * as grpc from '@grpc/grpc-js'; +import type { NodeId } from '@/nodes/types'; import fs from 'fs'; import path from 'path'; import os from 'os'; @@ -19,37 +19,23 @@ import GRPCClientAgent from '@/agent/GRPCClientAgent'; import VaultManager from '@/vaults/VaultManager'; import NotificationsManager from '@/notifications/NotificationsManager'; import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; -import * as vaultsPB from '@/proto/js/polykey/v1/vaults/vaults_pb'; -import * as nodesPB from '@/proto/js/polykey/v1/nodes/nodes_pb'; import * as agentErrors from '@/agent/errors'; import * as keysUtils from '@/keys/utils'; -import * as nodesUtils from '@/nodes/utils'; import * as testAgentUtils from './utils'; -import * as testUtils from '../utils'; describe(GRPCClientAgent.name, () => { + const host = '127.0.0.1' as Host; const password = 'password'; const logger = new Logger(`${GRPCClientAgent.name} test`, LogLevel.WARN, [ new StreamHandler(), ]); - const node1: NodeInfo = { - id: 'v359vgrgmqf1r5g4fvisiddjknjko6bmm4qv7646jr7fi9enbfuug' as NodeIdEncoded, - chain: {}, - }; - const nodeId1 = nodesUtils.decodeNodeId(node1.id)!; - let mockedGenerateKeyPair: jest.SpyInstance; let mockedGenerateDeterministicKeyPair: jest.SpyInstance; beforeAll(async () => { - const globalKeyPair = await testUtils.setupGlobalKeypair(); - mockedGenerateKeyPair = jest - .spyOn(keysUtils, 'generateKeyPair') - .mockResolvedValue(globalKeyPair); mockedGenerateDeterministicKeyPair = jest .spyOn(keysUtils, 'generateDeterministicKeyPair') - .mockResolvedValue(globalKeyPair); + .mockImplementation((bits, _) => keysUtils.generateKeyPair(bits)); }); afterAll(async () => { - mockedGenerateKeyPair.mockRestore(); mockedGenerateDeterministicKeyPair.mockRestore(); }); let client: GRPCClientAgent; @@ -94,6 +80,8 @@ describe(GRPCClientAgent.name, () => { }); await fwdProxy.start({ tlsConfig, + egressHost: host, + proxyHost: host, }); revProxy = new ReverseProxy({ logger: logger, @@ -159,10 +147,10 @@ describe(GRPCClientAgent.name, () => { keyManager: keyManager, vaultsPath: vaultsPath, nodeConnectionManager: nodeConnectionManager, - vaultsKey: keyManager.vaultKey, db: db, acl: acl, gestaltGraph: gestaltGraph, + notificationsManager: notificationsManager, fs: fs, logger: logger, }); @@ -174,12 +162,22 @@ describe(GRPCClientAgent.name, () => { sigchain, nodeGraph, notificationsManager, + acl, + gestaltGraph, + revProxy, + }); + await revProxy.start({ + ingressHost: host, + serverHost: host, + serverPort: port as Port, + tlsConfig: tlsConfig, }); client = await testAgentUtils.openTestAgentClient(port); }, global.defaultTimeout); afterEach(async () => { await testAgentUtils.closeTestAgentClient(client); await testAgentUtils.closeTestAgentServer(server); + await revProxy.stop(); await vaultManager.stop(); await notificationsManager.stop(); await sigchain.stop(); @@ -208,46 +206,6 @@ describe(GRPCClientAgent.name, () => { const response = await client.echo(echoMessage); expect(response.getChallenge()).toBe('yes'); }); - test.skip('can check permissions', async () => { - // FIXME: permissions not implemented on vaults. - // const vault = await vaultManager.createVault('TestAgentVault' as VaultName); - await gestaltGraph.setNode(node1); - // Await vaultManager.setVaultPermissions('12345' as NodeId, vault.vaultId); - // await vaultManager.unsetVaultPermissions('12345' as NodeId, vault.vaultId); - const vaultPermMessage = new vaultsPB.NodePermission(); - vaultPermMessage.setNodeId(nodesUtils.encodeNodeId(nodeId1)); - // VaultPermMessage.setVaultId(vault.vaultId); - const response = await client.vaultsPermissionsCheck(vaultPermMessage); - expect(response.getPermission()).toBeFalsy(); - // Await vaultManager.setVaultPermissions('12345' as NodeId, vault.vaultId); - const response2 = await client.vaultsPermissionsCheck(vaultPermMessage); - expect(response2.getPermission()).toBeTruthy(); - // Await vaultManager.deleteVault(vault.vaultId); - }); - test.skip('can scan vaults', async () => { - // FIXME, permissions not implemented on vaults - // const vault = await vaultManager.createVault('TestAgentVault' as VaultName); - await gestaltGraph.setNode(node1); - const nodeIdMessage = new nodesPB.Node(); - nodeIdMessage.setNodeId(nodesUtils.encodeNodeId(nodeId1)); - const response = client.vaultsScan(nodeIdMessage); - const data: string[] = []; - for await (const resp of response) { - const chunk = resp.getNameOrId(); - data.push(Buffer.from(chunk).toString()); - } - expect(data).toStrictEqual([]); - fail(); - // Await vaultManager.setVaultPermissions('12345' as NodeId, vault.vaultId); - // const response2 = client.vaultsScan(nodeIdMessage); - // Const data2: string[] = []; - // for await (const resp of response2) { - // Const chunk = resp.getNameOrId(); - // Data2.push(Buffer.from(chunk).toString()); - // } - // Expect(data2).toStrictEqual([`${vault.vaultName}\t${vault.vaultId}`]); - // await vaultManager.deleteVault(vault.vaultId); - }); test('Can connect over insecure connection.', async () => { const echoMessage = new utilsPB.EchoMessage(); echoMessage.setChallenge('yes'); @@ -256,4 +214,120 @@ describe(GRPCClientAgent.name, () => { expect(response.getChallenge()).toBe('yes'); expect(client.secured).toBeFalsy(); }); + describe('With connection through proxies', () => { + const logger = new Logger(`${GRPCClientAgent.name} test`, LogLevel.WARN, [ + new StreamHandler(), + ]); + const localHost = '127.0.0.1' as Host; + + let clientWithProxies1: GRPCClientAgent; + let clientFwdProxy1: ForwardProxy; + let clientKeyManager1: KeyManager; + let nodeId1: NodeId; + + let clientWithProxies2: GRPCClientAgent; + let clientFwdProxy2: ForwardProxy; + let clientKeyManager2: KeyManager; + let nodeId2: NodeId; + + beforeEach(async () => { + dataDir = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + // Setting up clients + clientFwdProxy1 = new ForwardProxy({ + authToken: 'auth', + logger, + }); + clientKeyManager1 = await KeyManager.createKeyManager({ + keysPath: path.join(dataDir, 'clientKeys1'), + password: 'password', + logger, + }); + nodeId1 = clientKeyManager1.getNodeId(); + await clientFwdProxy1.start({ + tlsConfig: { + keyPrivatePem: clientKeyManager1.getRootKeyPairPem().privateKey, + certChainPem: await clientKeyManager1.getRootCertChainPem(), + }, + egressHost: localHost, + proxyHost: localHost, + }); + clientWithProxies1 = await GRPCClientAgent.createGRPCClientAgent({ + host: localHost, + nodeId: keyManager.getNodeId(), + port: revProxy.getIngressPort(), + proxyConfig: { + host: clientFwdProxy1.getProxyHost(), + port: clientFwdProxy1.getProxyPort(), + authToken: clientFwdProxy1.authToken, + }, + timeout: 5000, + logger, + }); + + clientFwdProxy2 = new ForwardProxy({ + authToken: 'auth', + logger, + }); + clientKeyManager2 = await KeyManager.createKeyManager({ + keysPath: path.join(dataDir, 'clientKeys2'), + password: 'password', + logger, + }); + nodeId2 = clientKeyManager2.getNodeId(); + await clientFwdProxy2.start({ + tlsConfig: { + keyPrivatePem: clientKeyManager2.getRootKeyPairPem().privateKey, + certChainPem: await clientKeyManager2.getRootCertChainPem(), + }, + egressHost: localHost, + proxyHost: localHost, + }); + clientWithProxies2 = await GRPCClientAgent.createGRPCClientAgent({ + host: localHost, + logger, + nodeId: keyManager.getNodeId(), + port: revProxy.getIngressPort(), + proxyConfig: { + host: clientFwdProxy2.getProxyHost(), + port: clientFwdProxy2.getProxyPort(), + authToken: clientFwdProxy2.authToken, + }, + timeout: 5000, + }); + }); + afterEach(async () => { + await testAgentUtils.closeTestAgentClient(clientWithProxies1); + await clientFwdProxy1.stop(); + await clientKeyManager1.stop(); + await testAgentUtils.closeTestAgentClient(clientWithProxies2); + await clientFwdProxy2.stop(); + await clientKeyManager2.stop(); + }); + test('connectionInfoGetter returns correct information for each connection', async () => { + // We can't directly spy on the connectionInfoGetter result + // but we can check that it called `getConnectionInfoByProxy` properly + const getConnectionInfoByProxySpy = jest.spyOn( + ReverseProxy.prototype, + 'getConnectionInfoByProxy', + ); + await clientWithProxies1.echo(new utilsPB.EchoMessage()); + await clientWithProxies2.echo(new utilsPB.EchoMessage()); + // It should've returned the expected information + const returnedInfo1 = getConnectionInfoByProxySpy.mock.results[0].value; + expect(returnedInfo1.ingressPort).toEqual(revProxy.getIngressPort()); + expect(returnedInfo1.ingressHost).toEqual(localHost); + expect(returnedInfo1.egressPort).toEqual(clientFwdProxy1.getEgressPort()); + expect(returnedInfo1.egressHost).toEqual(localHost); + expect(returnedInfo1.nodeId).toStrictEqual(nodeId1); + // Checking second call + const returnedInfo2 = getConnectionInfoByProxySpy.mock.results[1].value; + expect(returnedInfo2.ingressPort).toEqual(revProxy.getIngressPort()); + expect(returnedInfo2.ingressHost).toEqual(localHost); + expect(returnedInfo2.egressPort).toEqual(clientFwdProxy2.getEgressPort()); + expect(returnedInfo2.egressHost).toEqual(localHost); + expect(returnedInfo2.nodeId).toStrictEqual(nodeId2); + }); + }); }); diff --git a/tests/agent/utils.ts b/tests/agent/utils.ts index 6b91930dd..6bfda2465 100644 --- a/tests/agent/utils.ts +++ b/tests/agent/utils.ts @@ -1,4 +1,4 @@ -import type { Host, Port } from '@/network/types'; +import type { Host, Port, ProxyConfig } from '@/network/types'; import type { IAgentServiceServer } from '@/proto/js/polykey/v1/agent_service_grpc_pb'; import type { KeyManager } from '@/keys'; @@ -6,6 +6,10 @@ import type { VaultManager } from '@/vaults'; import type { NodeGraph, NodeConnectionManager, NodeManager } from '@/nodes'; import type { Sigchain } from '@/sigchain'; import type { NotificationsManager } from '@/notifications'; +import type { ACL } from '@/acl'; +import type { GestaltGraph } from '@/gestalts'; +import type { NodeId } from 'nodes/types'; +import type { ReverseProxy } from 'network/index'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import * as grpc from '@grpc/grpc-js'; import { promisify } from '@/utils'; @@ -24,6 +28,9 @@ async function openTestAgentServer({ nodeGraph, sigchain, notificationsManager, + acl, + gestaltGraph, + revProxy, }: { keyManager: KeyManager; vaultManager: VaultManager; @@ -32,15 +39,21 @@ async function openTestAgentServer({ nodeGraph: NodeGraph; sigchain: Sigchain; notificationsManager: NotificationsManager; + acl: ACL; + gestaltGraph: GestaltGraph; + revProxy: ReverseProxy; }) { const agentService: IAgentServiceServer = createAgentService({ keyManager, vaultManager, nodeManager, nodeGraph, - sigchain: sigchain, - notificationsManager: notificationsManager, - nodeConnectionManager: nodeConnectionManager, + sigchain, + notificationsManager, + nodeConnectionManager, + acl, + gestaltGraph, + revProxy, }); const server = new grpc.Server(); @@ -59,16 +72,21 @@ async function closeTestAgentServer(server) { await tryShutdown(); } -async function openTestAgentClient(port: number): Promise { +async function openTestAgentClient( + port: number, + nodeId?: NodeId, + proxyConfig?: ProxyConfig, +): Promise { const logger = new Logger('AgentClientTest', LogLevel.WARN, [ new StreamHandler(), ]); const agentClient = await GRPCClientAgent.createGRPCClientAgent({ - nodeId: testUtils.generateRandomNodeId(), + nodeId: nodeId ?? testUtils.generateRandomNodeId(), host: '127.0.0.1' as Host, port: port as Port, logger: logger, destroyCallback: async () => {}, + proxyConfig, timeout: 30000, }); return agentClient; diff --git a/tests/bin/secrets/secrets.test.ts b/tests/bin/secrets/secrets.test.ts index c72aee00d..aeee174d9 100644 --- a/tests/bin/secrets/secrets.test.ts +++ b/tests/bin/secrets/secrets.test.ts @@ -56,7 +56,7 @@ describe('CLI secrets', () => { 'should create secrets', async () => { const vaultName = 'Vault1' as VaultName; - const vault = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); const secretPath = path.join(dataDir, 'secret'); await fs.promises.writeFile(secretPath, 'this is a secret'); @@ -72,11 +72,13 @@ describe('CLI secrets', () => { const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - const list = await vaultOps.listSecrets(vault); - expect(list.sort()).toStrictEqual(['MySecret']); - expect( - (await vaultOps.getSecret(vault, 'MySecret')).toString(), - ).toStrictEqual('this is a secret'); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + const list = await vaultOps.listSecrets(vault); + expect(list.sort()).toStrictEqual(['MySecret']); + expect( + (await vaultOps.getSecret(vault, 'MySecret')).toString(), + ).toStrictEqual('this is a secret'); + }); }, global.defaultTimeout * 2, ); @@ -84,28 +86,33 @@ describe('CLI secrets', () => { describe('commandDeleteSecret', () => { test('should delete secrets', async () => { const vaultName = 'Vault2' as VaultName; - const vault = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); - await vaultOps.addSecret(vault, 'MySecret', 'this is the secret'); - - let list = await vaultOps.listSecrets(vault); - expect(list.sort()).toStrictEqual(['MySecret']); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.addSecret(vault, 'MySecret', 'this is the secret'); + const list = await vaultOps.listSecrets(vault); + expect(list.sort()).toStrictEqual(['MySecret']); + }); command = ['secrets', 'delete', '-np', dataDir, `${vaultName}:MySecret`]; const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - list = await vaultOps.listSecrets(vault); - expect(list.sort()).toStrictEqual([]); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + const list = await vaultOps.listSecrets(vault); + expect(list.sort()).toStrictEqual([]); + }); }); }); describe('commandGetSecret', () => { test('should retrieve secrets', async () => { const vaultName = 'Vault3' as VaultName; - const vault = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); - await vaultOps.addSecret(vault, 'MySecret', 'this is the secret'); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.addSecret(vault, 'MySecret', 'this is the secret'); + }); command = ['secrets', 'get', '-np', dataDir, `${vaultName}:MySecret`]; @@ -116,11 +123,13 @@ describe('CLI secrets', () => { describe('commandListSecrets', () => { test('should list secrets', async () => { const vaultName = 'Vault4' as VaultName; - const vault = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); - await vaultOps.addSecret(vault, 'MySecret1', 'this is the secret 1'); - await vaultOps.addSecret(vault, 'MySecret2', 'this is the secret 2'); - await vaultOps.addSecret(vault, 'MySecret3', 'this is the secret 3'); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.addSecret(vault, 'MySecret1', 'this is the secret 1'); + await vaultOps.addSecret(vault, 'MySecret2', 'this is the secret 2'); + await vaultOps.addSecret(vault, 'MySecret3', 'this is the secret 3'); + }); command = ['secrets', 'list', '-np', dataDir, vaultName]; @@ -131,7 +140,7 @@ describe('CLI secrets', () => { describe('commandNewDir', () => { test('should make a directory', async () => { const vaultName = 'Vault5' as VaultName; - const vault = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); command = [ 'secrets', @@ -145,25 +154,33 @@ describe('CLI secrets', () => { const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - await vaultOps.addSecret(vault, 'dir1/MySecret1', 'this is the secret 1'); - await vaultOps.addSecret( - vault, - 'dir1/dir2/MySecret2', - 'this is the secret 2', - ); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.addSecret( + vault, + 'dir1/MySecret1', + 'this is the secret 1', + ); + await vaultOps.addSecret( + vault, + 'dir1/dir2/MySecret2', + 'this is the secret 2', + ); - const list = await vaultOps.listSecrets(vault); - expect(list.sort()).toStrictEqual( - ['dir1/MySecret1', 'dir1/dir2/MySecret2'].sort(), - ); + const list = await vaultOps.listSecrets(vault); + expect(list.sort()).toStrictEqual( + ['dir1/MySecret1', 'dir1/dir2/MySecret2'].sort(), + ); + }); }); }); describe('commandRenameSecret', () => { test('should rename secrets', async () => { const vaultName = 'Vault6' as VaultName; - const vault = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); - await vaultOps.addSecret(vault, 'MySecret', 'this is the secret'); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.addSecret(vault, 'MySecret', 'this is the secret'); + }); command = [ 'secrets', @@ -177,23 +194,26 @@ describe('CLI secrets', () => { const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - const list = await vaultOps.listSecrets(vault); - expect(list.sort()).toStrictEqual(['MyRenamedSecret']); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + const list = await vaultOps.listSecrets(vault); + expect(list.sort()).toStrictEqual(['MyRenamedSecret']); + }); }); }); describe('commandUpdateSecret', () => { test('should update secrets', async () => { const vaultName = 'Vault7' as VaultName; - const vault = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); const secretPath = path.join(dataDir, 'secret'); await fs.promises.writeFile(secretPath, 'updated-content'); - await vaultOps.addSecret(vault, 'MySecret', 'original-content'); - - expect( - (await vaultOps.getSecret(vault, 'MySecret')).toString(), - ).toStrictEqual('original-content'); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.addSecret(vault, 'MySecret', 'original-content'); + expect( + (await vaultOps.getSecret(vault, 'MySecret')).toString(), + ).toStrictEqual('original-content'); + }); command = [ 'secrets', @@ -207,17 +227,19 @@ describe('CLI secrets', () => { const result2 = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result2.exitCode).toBe(0); - const list = await vaultOps.listSecrets(vault); - expect(list.sort()).toStrictEqual(['MySecret']); - expect( - (await vaultOps.getSecret(vault, 'MySecret')).toString(), - ).toStrictEqual('updated-content'); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + const list = await vaultOps.listSecrets(vault); + expect(list.sort()).toStrictEqual(['MySecret']); + expect( + (await vaultOps.getSecret(vault, 'MySecret')).toString(), + ).toStrictEqual('updated-content'); + }); }); }); describe('commandNewDirSecret', () => { test('should add a directory of secrets', async () => { const vaultName = 'Vault8' as VaultName; - const vault = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); const secretDir = path.join(dataDir, 'secrets'); await fs.promises.mkdir(secretDir); @@ -234,20 +256,43 @@ describe('CLI secrets', () => { 'this is the secret 3', ); - let list = await vaultOps.listSecrets(vault); - expect(list.sort()).toStrictEqual([]); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + const list = await vaultOps.listSecrets(vault); + expect(list.sort()).toStrictEqual([]); + }); command = ['secrets', 'dir', '-np', dataDir, secretDir, vaultName]; const result2 = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result2.exitCode).toBe(0); - list = await vaultOps.listSecrets(vault); - expect(list.sort()).toStrictEqual([ - 'secrets/secret-1', - 'secrets/secret-2', - 'secrets/secret-3', - ]); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + const list = await vaultOps.listSecrets(vault); + expect(list.sort()).toStrictEqual([ + 'secrets/secret-1', + 'secrets/secret-2', + 'secrets/secret-3', + ]); + }); + }); + }); + describe('commandStat', () => { + test('should retrieve secrets', async () => { + const vaultName = 'Vault9'; + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); + + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + await vaultOps.addSecret(vault, 'MySecret', 'this is the secret'); + }); + + command = ['secrets', 'stat', '-np', dataDir, `${vaultName}:MySecret`]; + + const result = await testBinUtils.pkStdio([...command], {}, dataDir); + expect(result.exitCode).toBe(0); + expect(result.stdout).toContain('nlink: 1'); + expect(result.stdout).toContain('blocks: 1'); + expect(result.stdout).toContain('blksize: 4096'); + expect(result.stdout).toContain('size: 18'); }); }); }); diff --git a/tests/bin/vaults/vaults.test.ts b/tests/bin/vaults/vaults.test.ts index db23e80fc..6a5c1a974 100644 --- a/tests/bin/vaults/vaults.test.ts +++ b/tests/bin/vaults/vaults.test.ts @@ -1,17 +1,25 @@ -import type { NodeIdEncoded, NodeInfo } from '@/nodes/types'; -import type { Vault, VaultName } from '@/vaults/types'; +import type { NodeIdEncoded, NodeAddress, NodeInfo } from '@/nodes/types'; +import type { VaultId, VaultName } from '@/vaults/types'; import os from 'os'; import path from 'path'; import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import PolykeyAgent from '@/PolykeyAgent'; -import { makeVaultIdPretty } from '@/vaults/utils'; -import { utils as nodesUtils } from '@/nodes'; -import * as keysUtils from '@/keys/utils'; +import * as nodesUtils from '@/nodes/utils'; +import * as vaultsUtils from '@/vaults/utils'; +import sysexits from '@/utils/sysexits'; +import NotificationsManager from '@/notifications/NotificationsManager'; import * as testBinUtils from '../utils'; +import * as testUtils from '../../utils'; + +jest.mock('@/keys/utils', () => ({ + ...jest.requireActual('@/keys/utils'), + generateDeterministicKeyPair: + jest.requireActual('@/keys/utils').generateKeyPair, +})); /** - * This test file has been optimised to use only one instance of PolykeyAgent where posible. + * This test file has been optimised to use only one instance of PolykeyAgent where possible. * Setting up the PolykeyAgent has been done in a beforeAll block. * Keep this in mind when adding or editing tests. * Any side effects need to be undone when the test has completed. @@ -33,16 +41,12 @@ describe('CLI vaults', () => { let vaultNumber: number; let vaultName: VaultName; - // Constants const nodeId1Encoded = 'vrsc24a1er424epq77dtoveo93meij0pc8ig4uvs9jbeld78n9nl0' as NodeIdEncoded; - const nodeId1 = nodesUtils.decodeNodeId(nodeId1Encoded)!; const nodeId2Encoded = 'vrcacp9vsb4ht25hds6s4lpp2abfaso0mptcfnh499n35vfcn2gkg' as NodeIdEncoded; - // Const nodeId2 = nodesUtils.decodeNodeId(nodeId2Encoded); const nodeId3Encoded = 'v359vgrgmqf1r5g4fvisiddjknjko6bmm4qv7646jr7fi9enbfuug' as NodeIdEncoded; - // Const nodeId3 = nodesUtils.decodeNodeId(nodeId3Encoded); const node1: NodeInfo = { id: nodeId1Encoded, @@ -63,16 +67,7 @@ describe('CLI vaults', () => { return `vault-${vaultNumber}` as VaultName; } - const mockedGenerateDeterministicKeyPair = jest.spyOn( - keysUtils, - 'generateDeterministicKeyPair', - ); - beforeAll(async () => { - mockedGenerateDeterministicKeyPair.mockImplementation((bits, _) => { - return keysUtils.generateKeyPair(bits); - }); - dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); @@ -161,7 +156,7 @@ describe('CLI vaults', () => { command = [ 'vaults', 'rename', - 'InvalidVaultId', // Vault does not exist + 'z4iAXFwgHGeyUrdC5CiCNU4', // Vault does not exist 'RenamedVault', '-np', dataDir, @@ -172,7 +167,7 @@ describe('CLI vaults', () => { const result = await testBinUtils.pkStdio([...command], {}, dataDir); // Exit code of the exception - expect(result.exitCode).toBe(10); + expect(result.exitCode).toBe(sysexits.USAGE); const list = (await polykeyAgent.vaultManager.listVaults()).keys(); const namesList: string[] = []; @@ -203,380 +198,382 @@ describe('CLI vaults', () => { expect(namesList).not.toContain(vaultName); }); }); - describe.skip('commandVaultStats', () => { - test('should return the stats of a vault', async () => { - command = ['vaults', 'stat', '-np', dataDir, vaultName]; - await polykeyAgent.vaultManager.createVault(vaultName); - const id = polykeyAgent.vaultManager.getVaultId(vaultName); - expect(id).toBeTruthy(); + test( + 'should clone and pull a vault', + async () => { + const dataDir2 = await fs.promises.mkdtemp( + path.join(os.tmpdir(), 'polykey-test-'), + ); + const targetPolykeyAgent = await PolykeyAgent.createPolykeyAgent({ + password, + nodePath: dataDir2, + logger: logger, + }); + const vaultId = await targetPolykeyAgent.vaultManager.createVault( + vaultName, + ); + await targetPolykeyAgent.vaultManager.withVaults( + [vaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret 1', 'secret the first'); + }); + }, + ); + + await targetPolykeyAgent.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(polykeyAgent.keyManager.getNodeId()), + chain: {}, + }); + const targetNodeId = targetPolykeyAgent.keyManager.getNodeId(); + const targetNodeIdEncoded = nodesUtils.encodeNodeId(targetNodeId); + await polykeyAgent.nodeManager.setNode(targetNodeId, { + host: targetPolykeyAgent.revProxy.getIngressHost(), + port: targetPolykeyAgent.revProxy.getIngressPort(), + }); + await targetPolykeyAgent.nodeManager.setNode( + polykeyAgent.keyManager.getNodeId(), + { + host: polykeyAgent.revProxy.getIngressHost(), + port: polykeyAgent.revProxy.getIngressPort(), + }, + ); + await polykeyAgent.acl.setNodePerm(targetNodeId, { + gestalt: { + notify: null, + }, + vaults: {}, + }); + + const nodeId = polykeyAgent.keyManager.getNodeId(); + await targetPolykeyAgent.gestaltGraph.setGestaltActionByNode( + nodeId, + 'scan', + ); + await targetPolykeyAgent.acl.setVaultAction(vaultId, nodeId, 'clone'); + await targetPolykeyAgent.acl.setVaultAction(vaultId, nodeId, 'pull'); - const result = await testBinUtils.pkStdio([...command], {}, dataDir); - expect(result.exitCode).toBe(0); - }); - }); - describe.skip('commandSetPermsVault', () => { - test('should share a vault', async () => { command = [ 'vaults', - 'share', + 'clone', '-np', dataDir, - vaultName, - nodesUtils.encodeNodeId(nodeId1), + vaultsUtils.encodeVaultId(vaultId), + targetNodeIdEncoded, ]; - await polykeyAgent.vaultManager.createVault(vaultName); - const id = await polykeyAgent.vaultManager.getVaultId(vaultName); - expect(id).toBeTruthy(); - const result = await testBinUtils.pkStdio([...command], {}, dataDir); + let result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - fail(); - // FIXME methods not implemented. - // const sharedNodes = await polykeyAgent.vaults.getVaultPermissions( - // id!, - // undefined, - // ); - // const sharedNodesString = JSON.stringify(sharedNodes); - // expect(sharedNodesString).toContain(node1.id); - // expect(sharedNodesString).not.toContain(node2.id); - }); - }); - describe.skip('commandUnsetPermsVault', () => { - test('should un-share a vault', async () => { + + const clonedVaultId = await polykeyAgent.vaultManager.getVaultId( + vaultName, + ); + + await polykeyAgent.vaultManager.withVaults( + [clonedVaultId!], + async (clonedVault) => { + const file = await clonedVault.readF(async (efs) => { + return await efs.readFile('secret 1', { encoding: 'utf8' }); + }); + expect(file).toBe('secret the first'); + }, + ); + + await polykeyAgent.vaultManager.destroyVault(clonedVaultId!); command = [ 'vaults', - 'unshare', + 'clone', '-np', dataDir, vaultName, - nodesUtils.encodeNodeId(nodeId1), + nodesUtils.encodeNodeId(targetNodeId), ]; - // Creating vault. - await polykeyAgent.vaultManager.createVault(vaultName); - const id = await polykeyAgent.vaultManager.getVaultId(vaultName); - expect(id).toBeTruthy(); - - // Init sharing. - fail(); - // FIXME methods not implemented. - // await polykeyAgent.vaults.setVaultPermissions(node1.id, id!); - // await polykeyAgent.vaults.setVaultPermissions(node2.id, id!); - // await polykeyAgent.vaults.setVaultPermissions(node3.id, id!); - - const result = await testBinUtils.pkStdio([...command]); + result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - // Const sharedNodes = await polykeyAgent.vaults.getVaultPermissions( - // id!, - // undefined, - // ); - // expect(sharedNodes[node1.id]['pull']).toBeUndefined(); - // expect(sharedNodes[node2.id]['pull']).toBeNull(); - // expect(sharedNodes[node3.id]['pull']).toBeNull(); - }); - }); - describe.skip('commandVaultPermissions', () => { - test('should get permissions of a vault', async () => { - command = ['vaults', 'perms', '-np', dataDir, vaultName]; - - await polykeyAgent.vaultManager.createVault(vaultName); - const id = await polykeyAgent.vaultManager.getVaultId(vaultName); - expect(id).toBeTruthy(); - fail(); - // FIXME methods not implemented. - // await polykeyAgent.vaults.setVaultPermissions(node1.id, vault.vaultId); - // await polykeyAgent.vaults.setVaultPermissions(node2.id, vault.vaultId); - // await polykeyAgent.vaults.setVaultPermissions(node3.id, vault.vaultId); + const secondClonedVaultId = (await polykeyAgent.vaultManager.getVaultId( + vaultName, + ))!; + await polykeyAgent.vaultManager.withVaults( + [secondClonedVaultId!], + async (secondClonedVault) => { + const file = await secondClonedVault.readF(async (efs) => { + return await efs.readFile('secret 1', { encoding: 'utf8' }); + }); + expect(file).toBe('secret the first'); + }, + ); - // await polykeyAgent.vaults.unsetVaultPermissions(node2.id, vault.vaultId); + await targetPolykeyAgent.vaultManager.withVaults( + [vaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret 2', 'secret the second'); + }); + }, + ); - const result = await testBinUtils.pkStdio([...command]); + command = ['vaults', 'pull', '-np', dataDir, vaultName]; + result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - }); - }); - describe.skip('commandPullVault', () => { - test( - 'should clone a vault', - async () => { - const dataDir2 = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - const targetPolykeyAgent = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath: dataDir2, - logger: logger, - }); - const vault = await targetPolykeyAgent.vaultManager.createVault( - vaultName, - ); - const id = await targetPolykeyAgent.vaultManager.getVaultId(vaultName); - expect(id).toBeTruthy(); - - await targetPolykeyAgent.gestaltGraph.setNode({ - id: nodesUtils.encodeNodeId(polykeyAgent.keyManager.getNodeId()), - chain: {}, - }); - fail(); - // FIXME methods not implemented. - // await targetPolykeyAgent.vaults.setVaultPermissions( - // polykeyAgent.nodes.getNodeId(), - // vault.vaultId, - // ); - - const targetNodeId = targetPolykeyAgent.keyManager.getNodeId(); - const targetHost = targetPolykeyAgent.revProxy.getIngressHost(); - const targetPort = targetPolykeyAgent.revProxy.getIngressPort(); - await polykeyAgent.nodeGraph.setNode(targetNodeId, { - host: targetHost, - port: targetPort, - }); - // Client agent: Start sending hole-punching packets to the target - await polykeyAgent.nodeConnectionManager.withConnF( - targetNodeId, - async () => {}, - ); - const clientEgressHost = polykeyAgent.fwdProxy.getEgressHost(); - const clientEgressPort = polykeyAgent.fwdProxy.getEgressPort(); - // Server agent: start sending hole-punching packets back to the 'client' - // agent (in order to establish a connection) - await targetPolykeyAgent.nodeConnectionManager.holePunchReverse( - clientEgressHost, - clientEgressPort, - ); - - command = [ - 'vaults', - 'clone', - '-np', - dataDir, - '-ni', - nodesUtils.encodeNodeId(targetNodeId), - '-vi', - makeVaultIdPretty(vault.vaultId), - ]; - - // Vault does not exist on the source PolykeyAgent so the pull command throws an error which - // caught, the error is checked and if it is ErrorVaultUndefined, then the Agent attempts a - // clone instead - const result = await testBinUtils.pkStdio([...command]); - expect(result.exitCode).toBe(0); - // Const list = (await polykeyAgent.vaults.listVaults()).map( - // (vault) => vault, - // ); - // expect(JSON.stringify(list)).toContain(vaultName); + await polykeyAgent.vaultManager.withVaults( + [secondClonedVaultId!], + async (secondClonedVault) => { + const file = await secondClonedVault.readF(async (efs) => { + return await efs.readFile('secret 2', { encoding: 'utf8' }); + }); + expect(file).toBe('secret the second'); + }, + ); - await targetPolykeyAgent.stop(); - await targetPolykeyAgent.destroy(); - await fs.promises.rm(dataDir2, { - force: true, - recursive: true, - }); - }, - global.defaultTimeout * 2, - ); - test( - 'should pull a vault', - async () => { - const dataDir2 = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - const targetPolykeyAgent = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath: dataDir2, - logger: logger, - }); - await targetPolykeyAgent.vaultManager.createVault(vaultName); + command = [ + 'vaults', + 'pull', + '-np', + dataDir, + '-pv', + 'InvalidName', + vaultsUtils.encodeVaultId(secondClonedVaultId), + targetNodeIdEncoded, + ]; + result = await testBinUtils.pkStdio([...command], {}, dataDir); + expect(result.exitCode).toBe(sysexits.USAGE); + expect(result.stderr).toContain('ErrorVaultsVaultUndefined'); - const id = await targetPolykeyAgent.vaultManager.getVaultId(vaultName); - expect(id).toBeTruthy(); + command = [ + 'vaults', + 'pull', + '-np', + dataDir, + '-pv', + vaultName, + vaultsUtils.encodeVaultId(secondClonedVaultId), + 'InvalidNodeId', + ]; + result = await testBinUtils.pkStdio([...command], {}, dataDir); + expect(result.exitCode).toBe(sysexits.USAGE); - await targetPolykeyAgent.gestaltGraph.setNode({ - id: nodesUtils.encodeNodeId(polykeyAgent.keyManager.getNodeId()), + await targetPolykeyAgent.stop(); + await targetPolykeyAgent.destroy(); + await fs.promises.rm(dataDir2, { + force: true, + recursive: true, + }); + }, + global.defaultTimeout * 3, + ); + describe('commandShare', () => { + test('Should share a vault', async () => { + const mockedSendNotification = jest.spyOn( + NotificationsManager.prototype, + 'sendNotification', + ); + try { + // We don't want to actually send a notification + mockedSendNotification.mockImplementation(async (_) => {}); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultIdEncoded = vaultsUtils.encodeVaultId(vaultId); + const targetNodeId = testUtils.generateRandomNodeId(); + const targetNodeIdEncoded = nodesUtils.encodeNodeId(targetNodeId); + await polykeyAgent.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(targetNodeId), chain: {}, }); - fail(); - // FIXME methods not implemented. - // await targetPolykeyAgent.vaults.setVaultPermissions( - // polykeyAgent.nodes.getNodeId(), - // vault.vaultId, - // ); - - const targetNodeId = targetPolykeyAgent.keyManager.getNodeId(); - const targetHost = targetPolykeyAgent.revProxy.getIngressHost(); - const targetPort = targetPolykeyAgent.revProxy.getIngressPort(); - await polykeyAgent.nodeGraph.setNode(targetNodeId, { - host: targetHost, - port: targetPort, - }); - // Client agent: Start sending hole-punching packets to the target - await polykeyAgent.nodeConnectionManager.withConnF( - targetNodeId, - async () => {}, - ); - const clientEgressHost = polykeyAgent.fwdProxy.getEgressHost(); - const clientEgressPort = polykeyAgent.fwdProxy.getEgressPort(); - // Server agent: start sending hole-punching packets back to the 'client' - // agent (in order to establish a connection) - await targetPolykeyAgent.nodeConnectionManager.holePunchReverse( - clientEgressHost, - clientEgressPort, - ); - // Await polykeyAgent.vaults.cloneVault(vault.vaultId, targetNodeId); - - // await vault.addSecret('MySecret', 'This secret will be pulled'); - - // const list = (await polykeyAgent.vaults.listVaults()).map( - // (vault) => vault, - // ); - // const filteredList = list.filter((value) => { - // return value.name === vaultName; - // }); - // expect(filteredList.length).toBe(1); - // const clonedVault = await polykeyAgent.vaults.getVault( - // filteredList[0].id, - // ); - // await expect(clonedVault.listSecrets()).resolves.toStrictEqual([]); + expect( + (await polykeyAgent.acl.getNodePerm(targetNodeId))?.vaults[vaultId], + ).toBeUndefined(); command = [ 'vaults', - 'pull', + 'share', '-np', dataDir, - '-vn', - vaultName, - '-ni', - nodesUtils.encodeNodeId(targetNodeId), + vaultIdEncoded, + targetNodeIdEncoded, ]; - - const result = await testBinUtils.pkStdio([...command]); + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - // Await expect(clonedVault.listSecrets()).resolves.toStrictEqual([ - // 'MySecret', - // ]); - // await expect(clonedVault.getSecret('MySecret')).resolves.toStrictEqual( - // 'This secret will be pulled', - // ); - - await targetPolykeyAgent.stop(); - await targetPolykeyAgent.destroy(); - await fs.promises.rm(dataDir2, { recursive: true }); - }, - global.defaultTimeout * 2, - ); + // Check permission + const permissions1 = (await polykeyAgent.acl.getNodePerm(targetNodeId)) + ?.vaults[vaultId]; + expect(permissions1).toBeDefined(); + expect(permissions1.pull).toBeDefined(); + expect(permissions1.clone).toBeDefined(); + } finally { + mockedSendNotification.mockRestore(); + } + }); }); - describe.skip('commandScanVault', () => { - test('should scan a node for vaults', async () => { - const dataDir2 = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), + describe('commandUnshare', () => { + test('Should unshare a vault', async () => { + const vaultId1 = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId2 = await polykeyAgent.vaultManager.createVault( + vaultName + '1', ); - const targetPolykeyAgent = await PolykeyAgent.createPolykeyAgent({ - password, - nodePath: dataDir2, - logger: logger, + const vaultIdEncoded1 = vaultsUtils.encodeVaultId(vaultId1); + const vaultIdEncoded2 = vaultsUtils.encodeVaultId(vaultId2); + const targetNodeId = testUtils.generateRandomNodeId(); + const targetNodeIdEncoded = nodesUtils.encodeNodeId(targetNodeId); + await polykeyAgent.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(targetNodeId), + chain: {}, }); - const targetNodeId = targetPolykeyAgent.keyManager.getNodeId(); - const targetHost = targetPolykeyAgent.revProxy.getIngressHost(); - const targetPort = targetPolykeyAgent.revProxy.getIngressPort(); - await polykeyAgent.nodeManager.setNode(targetNodeId, { - host: targetHost, - port: targetPort, - }); - // Client agent: Start sending hole-punching packets to the target - await polykeyAgent.nodeConnectionManager.withConnF( + // Creating permissions + await polykeyAgent.gestaltGraph.setGestaltActionByNode( targetNodeId, - async () => {}, - ); - const clientEgressHost = polykeyAgent.fwdProxy.getEgressHost(); - const clientEgressPort = polykeyAgent.fwdProxy.getEgressPort(); - // Server agent: start sending hole-punching packets back to the 'client' - // agent (in order to establish a connection) - await targetPolykeyAgent.nodeConnectionManager.holePunchReverse( - clientEgressHost, - clientEgressPort, + 'scan', ); + await polykeyAgent.acl.setVaultAction(vaultId1, targetNodeId, 'clone'); + await polykeyAgent.acl.setVaultAction(vaultId1, targetNodeId, 'pull'); + await polykeyAgent.acl.setVaultAction(vaultId2, targetNodeId, 'clone'); + await polykeyAgent.acl.setVaultAction(vaultId2, targetNodeId, 'pull'); - await targetPolykeyAgent.vaultManager.createVault( - `${vaultName}-Vault1` as VaultName, - ); - await targetPolykeyAgent.vaultManager.createVault( - `${vaultName}-Vault2` as VaultName, - ); - await targetPolykeyAgent.vaultManager.createVault( - `${vaultName}-Vault3` as VaultName, - ); + command = [ + 'vaults', + 'unshare', + '-np', + dataDir, + vaultIdEncoded1, + targetNodeIdEncoded, + ]; + const result = await testBinUtils.pkStdio([...command], {}, dataDir); + expect(result.exitCode).toBe(0); - const targetVaults = ( - await targetPolykeyAgent.vaultManager.listVaults() - ).keys(); - const namesList: string[] = []; - for await (const name of targetVaults) { - namesList.push(name); - } - expect(namesList.length).toBe(3); + // Check permission + const permissions = (await polykeyAgent.acl.getNodePerm(targetNodeId)) + ?.vaults[vaultId1]; + expect(permissions).toBeDefined(); + expect(permissions.pull).toBeUndefined(); + expect(permissions.clone).toBeUndefined(); + + expect( + (await polykeyAgent.acl.getNodePerm(targetNodeId))?.gestalt['scan'], + ).toBeDefined(); command = [ 'vaults', - 'scan', + 'unshare', '-np', dataDir, - '-ni', - nodesUtils.encodeNodeId(targetNodeId), + vaultIdEncoded2, + targetNodeIdEncoded, ]; - const result = await testBinUtils.pkStdio([...command]); + const result2 = await testBinUtils.pkStdio([...command], {}, dataDir); + expect(result2.exitCode).toBe(0); + + // Check permission + const permissions2 = (await polykeyAgent.acl.getNodePerm(targetNodeId)) + ?.vaults[vaultId2]; + expect(permissions2).toBeDefined(); + expect(permissions2.pull).toBeUndefined(); + expect(permissions2.clone).toBeUndefined(); + + // And the scan permission should be removed + expect( + (await polykeyAgent.acl.getNodePerm(targetNodeId))?.gestalt['scan'], + ).toBeUndefined(); + }); + }); + describe('commandPermissions', () => { + test('Should get a vaults permissions', async () => { + const vaultId1 = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId2 = await polykeyAgent.vaultManager.createVault( + vaultName + '1', + ); + const vaultIdEncoded1 = vaultsUtils.encodeVaultId(vaultId1); + const vaultIdEncoded2 = vaultsUtils.encodeVaultId(vaultId2); + const targetNodeId = testUtils.generateRandomNodeId(); + const targetNodeIdEncoded = nodesUtils.encodeNodeId(targetNodeId); + await polykeyAgent.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(targetNodeId), + chain: {}, + }); + + // Creating permissions + await polykeyAgent.gestaltGraph.setGestaltActionByNode( + targetNodeId, + 'scan', + ); + await polykeyAgent.acl.setVaultAction(vaultId1, targetNodeId, 'clone'); + await polykeyAgent.acl.setVaultAction(vaultId1, targetNodeId, 'pull'); + await polykeyAgent.acl.setVaultAction(vaultId2, targetNodeId, 'pull'); + + command = ['vaults', 'permissions', '-np', dataDir, vaultIdEncoded1]; + const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); + expect(result.stdout).toContain(targetNodeIdEncoded); + expect(result.stdout).toContain('clone'); + expect(result.stdout).toContain('pull'); - await targetPolykeyAgent.stop(); - await targetPolykeyAgent.destroy(); - await fs.promises.rmdir(dataDir2, { recursive: true }); + command = ['vaults', 'permissions', '-np', dataDir, vaultIdEncoded2]; + const result2 = await testBinUtils.pkStdio([...command], {}, dataDir); + expect(result2.exitCode).toBe(0); + expect(result2.stdout).toContain(targetNodeIdEncoded); + expect(result2.stdout).not.toContain('clone'); + expect(result2.stdout).toContain('pull'); }); }); describe('commandVaultVersion', () => { test('should switch the version of a vault', async () => { - const vault = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); const id = polykeyAgent.vaultManager.getVaultId(vaultName); expect(id).toBeTruthy(); const secret1 = { name: 'Secret-1', content: 'Secret-1-content' }; const secret2 = { name: 'Secret-1', content: 'Secret-2-content' }; - await vault.commit(async (efs) => { - await efs.writeFile(secret1.name, secret1.content); - }); - const ver1Oid = (await vault.log(1))[0].oid; - - await vault.commit(async (efs) => { - await efs.writeFile(secret2.name, secret2.content); - }); + const ver1Oid = await polykeyAgent.vaultManager.withVaults( + [vaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + }); + const ver1Oid = (await vault.log(undefined, 1))[0].commitId; + + await vault.writeF(async (efs) => { + await efs.writeFile(secret2.name, secret2.content); + }); + return ver1Oid; + }, + ); const command = ['vaults', 'version', '-np', dataDir, vaultName, ver1Oid]; const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toBe(0); - const fileContents = await vault.access(async (efs) => { - return (await efs.readFile(secret1.name)).toString(); + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + const fileContents = await vault.readF(async (efs) => { + return (await efs.readFile(secret1.name)).toString(); + }); + expect(fileContents).toStrictEqual(secret1.content); }); - expect(fileContents).toStrictEqual(secret1.content); }); test('should switch the version of a vault to the latest version', async () => { - const vault = await polykeyAgent.vaultManager.createVault(vaultName); + const vaultId = await polykeyAgent.vaultManager.createVault(vaultName); const id = polykeyAgent.vaultManager.getVaultId(vaultName); expect(id).toBeTruthy(); const secret1 = { name: 'Secret-1', content: 'Secret-1-content' }; const secret2 = { name: 'Secret-1', content: 'Secret-2-content' }; - await vault.commit(async (efs) => { - await efs.writeFile(secret1.name, secret1.content); - }); - const ver1Oid = (await vault.log(1))[0].oid; - - await vault.commit(async (efs) => { - await efs.writeFile(secret2.name, secret2.content); - }); + const ver1Oid = await polykeyAgent.vaultManager.withVaults( + [vaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + }); + const ver1Oid = (await vault.log(undefined, 1))[0].commitId; + + await vault.writeF(async (efs) => { + await efs.writeFile(secret2.name, secret2.content); + }); + return ver1Oid; + }, + ); const command = ['vaults', 'version', '-np', dataDir, vaultName, ver1Oid]; @@ -603,9 +600,9 @@ describe('CLI vaults', () => { ]; const result = await testBinUtils.pkStdio([...command], {}, dataDir); - expect(result.exitCode).toBe(10); + expect(result.exitCode).toBe(sysexits.USAGE); - expect(result.stderr).toContain('ErrorVaultCommitUndefined'); + expect(result.stderr).toContain('ErrorVaultReferenceInvalid'); }); test('should throw an error if the vault is not found', async () => { const command = [ @@ -613,65 +610,67 @@ describe('CLI vaults', () => { 'version', '-np', dataDir, - 'A' + vaultName, + 'zLnM7puKobbh4YXEz66StAq', 'NOT_A_VALID_CHECKOUT_ID', ]; const result = await testBinUtils.pkStdio([...command], {}, dataDir); - expect(result.exitCode).toBe(10); - expect(result.stderr).toContain('ErrorVaultUndefined'); + expect(result.exitCode).toBe(sysexits.USAGE); + expect(result.stderr).toContain('ErrorVaultsVaultUndefined'); }); }); describe('commandVaultLog', () => { const secret1 = { name: 'secret1', content: 'Secret-1-content' }; const secret2 = { name: 'secret2', content: 'Secret-2-content' }; - let vault: Vault; - let commit1Oid: string; - let commit2Oid: string; - let commit3Oid: string; + let vaultId: VaultId; + let writeF1Oid: string; + let writeF2Oid: string; + let writeF3Oid: string; beforeEach(async () => { - vault = await polykeyAgent.vaultManager.createVault(vaultName); + vaultId = await polykeyAgent.vaultManager.createVault(vaultName); - await vault.commit(async (efs) => { - await efs.writeFile(secret1.name, secret1.content); - }); - commit1Oid = (await vault.log(0))[0].oid; + await polykeyAgent.vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + }); + writeF1Oid = (await vault.log(undefined, 0))[0].commitId; - await vault.commit(async (efs) => { - await efs.writeFile(secret2.name, secret2.content); - }); - commit2Oid = (await vault.log(0))[0].oid; + await vault.writeF(async (efs) => { + await efs.writeFile(secret2.name, secret2.content); + }); + writeF2Oid = (await vault.log(undefined, 0))[0].commitId; - await vault.commit(async (efs) => { - await efs.unlink(secret2.name); + await vault.writeF(async (efs) => { + await efs.unlink(secret2.name); + }); + writeF3Oid = (await vault.log(undefined, 0))[0].commitId; }); - commit3Oid = (await vault.log(0))[0].oid; }); afterEach(async () => { - await polykeyAgent.vaultManager.destroyVault(vault.vaultId); + await polykeyAgent.vaultManager.destroyVault(vaultId); }); - test('Should get all commits', async () => { + test('Should get all writeFs', async () => { const command = ['vaults', 'log', '-np', dataDir, vaultName]; const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toEqual(0); - expect(result.stdout).toContain(commit1Oid); - expect(result.stdout).toContain(commit2Oid); - expect(result.stdout).toContain(commit3Oid); + expect(result.stdout).toContain(writeF1Oid); + expect(result.stdout).toContain(writeF2Oid); + expect(result.stdout).toContain(writeF3Oid); }); test('should get a part of the log', async () => { const command = ['vaults', 'log', '-np', dataDir, '-d', '2', vaultName]; const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toEqual(0); - expect(result.stdout).not.toContain(commit1Oid); - expect(result.stdout).toContain(commit2Oid); - expect(result.stdout).toContain(commit3Oid); + expect(result.stdout).not.toContain(writeF1Oid); + expect(result.stdout).toContain(writeF2Oid); + expect(result.stdout).toContain(writeF3Oid); }); - test('should get a specific commit', async () => { + test('should get a specific writeF', async () => { const command = [ 'vaults', 'log', @@ -681,15 +680,126 @@ describe('CLI vaults', () => { '1', vaultName, '-ci', - commit2Oid, + writeF2Oid, ]; const result = await testBinUtils.pkStdio([...command], {}, dataDir); expect(result.exitCode).toEqual(0); - expect(result.stdout).not.toContain(commit1Oid); - expect(result.stdout).toContain(commit2Oid); - expect(result.stdout).not.toContain(commit3Oid); + expect(result.stdout).not.toContain(writeF1Oid); + expect(result.stdout).toContain(writeF2Oid); + expect(result.stdout).not.toContain(writeF3Oid); }); test.todo('test formatting of the output'); }); + describe('commandScanNode', () => { + test( + 'should return the vaults names and ids of the remote vault', + async () => { + let remoteOnline: PolykeyAgent | undefined; + try { + remoteOnline = await PolykeyAgent.createPolykeyAgent({ + password, + logger, + nodePath: path.join(dataDir, 'remoteOnline'), + }); + const remoteOnlineNodeId = remoteOnline.keyManager.getNodeId(); + const remoteOnlineNodeIdEncoded = + nodesUtils.encodeNodeId(remoteOnlineNodeId); + await polykeyAgent.nodeManager.setNode(remoteOnlineNodeId, { + host: remoteOnline.revProxy.getIngressHost(), + port: remoteOnline.revProxy.getIngressPort(), + } as NodeAddress); + + await remoteOnline.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(polykeyAgent.keyManager.getNodeId()), + chain: {}, + }); + + const commands1 = [ + 'vaults', + 'scan', + remoteOnlineNodeIdEncoded, + '-np', + dataDir, + ]; + const result1 = await testBinUtils.pkStdio( + commands1, + { PK_PASSWORD: 'password' }, + dataDir, + ); + expect(result1.exitCode).toEqual(sysexits.NOPERM); + expect(result1.stderr).toContain( + 'ErrorVaultsPermissionDenied: Permission was denied - Scanning is not allowed for', + ); + + await remoteOnline.gestaltGraph.setGestaltActionByNode( + polykeyAgent.keyManager.getNodeId(), + 'notify', + ); + + const commands2 = [ + 'vaults', + 'scan', + remoteOnlineNodeIdEncoded, + '-np', + dataDir, + ]; + const result2 = await testBinUtils.pkStdio( + commands2, + { PK_PASSWORD: 'password' }, + dataDir, + ); + expect(result2.exitCode).toEqual(sysexits.NOPERM); + expect(result2.stderr).toContain( + 'ErrorVaultsPermissionDenied: Permission was denied - Scanning is not allowed for', + ); + + await remoteOnline.gestaltGraph.setGestaltActionByNode( + polykeyAgent.keyManager.getNodeId(), + 'scan', + ); + + const vault1Id = await remoteOnline.vaultManager.createVault( + 'Vault1' as VaultName, + ); + const vault2Id = await remoteOnline.vaultManager.createVault( + 'Vault2' as VaultName, + ); + const vault3Id = await remoteOnline.vaultManager.createVault( + 'Vault3' as VaultName, + ); + const nodeId = polykeyAgent.keyManager.getNodeId(); + await remoteOnline.acl.setVaultAction(vault1Id, nodeId, 'clone'); + await remoteOnline.acl.setVaultAction(vault2Id, nodeId, 'pull'); + await remoteOnline.acl.setVaultAction(vault2Id, nodeId, 'clone'); + const commands3 = [ + 'vaults', + 'scan', + remoteOnlineNodeIdEncoded, + '-np', + dataDir, + ]; + const result3 = await testBinUtils.pkStdio( + commands3, + { PK_PASSWORD: 'password' }, + dataDir, + ); + expect(result3.exitCode).toBe(0); + expect(result3.stdout).toContain( + `Vault1\t\t${vaultsUtils.encodeVaultId(vault1Id)}\t\tclone`, + ); + expect(result3.stdout).toContain( + `Vault2\t\t${vaultsUtils.encodeVaultId(vault2Id)}\t\tpull,clone`, + ); + expect(result3.stdout).not.toContain( + `Vault3\t\t${vaultsUtils.encodeVaultId(vault3Id)}`, + ); + } finally { + await remoteOnline?.stop(); + await remoteOnline?.destroy(); + } + }, + global.defaultTimeout * 2, + ); + }); }); diff --git a/tests/client/rpcVaults.test.ts b/tests/client/rpcVaults.test.ts index 5b051f25f..34192efde 100644 --- a/tests/client/rpcVaults.test.ts +++ b/tests/client/rpcVaults.test.ts @@ -1,7 +1,8 @@ import type * as grpc from '@grpc/grpc-js'; -import type { VaultManager } from '@/vaults'; -import type { Vault, VaultName } from '@/vaults/types'; +import type VaultManager from '@/vaults/VaultManager'; +import type { VaultId, VaultName } from '@/vaults/types'; import type { ClientServiceClient } from '@/proto/js/polykey/v1/client_service_grpc_pb'; +import type { Stat } from 'encryptedfs'; import os from 'os'; import path from 'path'; import fs from 'fs'; @@ -10,13 +11,14 @@ import { PolykeyAgent } from '@'; import * as utilsPB from '@/proto/js/polykey/v1/utils/utils_pb'; import * as vaultsPB from '@/proto/js/polykey/v1/vaults/vaults_pb'; import * as secretsPB from '@/proto/js/polykey/v1/secrets/secrets_pb'; -import { KeyManager } from '@/keys'; -import { ForwardProxy } from '@/network'; +import KeyManager from '@/keys/KeyManager'; +import ForwardProxy from '@/network/ForwardProxy'; import * as grpcUtils from '@/grpc/utils'; import * as vaultErrors from '@/vaults/errors'; import * as vaultsUtils from '@/vaults/utils'; -import { vaultOps } from '@/vaults'; -import * as testUtils from './utils'; +import * as vaultOps from '@/vaults/VaultOps'; +import * as nodesUtils from '@/nodes/utils'; +import * as clientUtils from './utils'; jest.mock('@/keys/utils', () => ({ ...jest.requireActual('@/keys/utils'), @@ -77,16 +79,16 @@ describe('Vaults client service', () => { vaultManager = pkAgent.vaultManager; - [server, port] = await testUtils.openTestClientServer({ + [server, port] = await clientUtils.openTestClientServer({ pkAgent, secure: false, }); - client = await testUtils.openSimpleClientClient(port); + client = await clientUtils.openSimpleClientClient(port); }, global.polykeyStartupTimeout); afterAll(async () => { - await testUtils.closeTestClientServer(server); - testUtils.closeSimpleClientClient(client); + await clientUtils.closeTestClientServer(server); + clientUtils.closeSimpleClientClient(client); await pkAgent.stop(); await pkAgent.destroy(); @@ -99,7 +101,7 @@ describe('Vaults client service', () => { }); beforeEach(async () => { const sessionToken = await pkAgent.sessionManager.createToken(); - callCredentials = testUtils.createCallCredentials(sessionToken); + callCredentials = clientUtils.createCallCredentials(sessionToken); }); afterEach(async () => { const aliveVaults = await vaultManager.listVaults(); @@ -135,9 +137,9 @@ describe('Vaults client service', () => { const vaultId = await createVault(vaultMessage, callCredentials); const vaultNames = await vaultManager.listVaults(); expect(vaultNames.get(vaultList[0])).toBeTruthy(); - expect(vaultNames.get(vaultList[0])).toStrictEqual( - vaultsUtils.makeVaultId(vaultId.getNameOrId()), - ); + expect( + vaultsUtils.encodeVaultId(vaultNames.get(vaultList[0])!), + ).toStrictEqual(vaultId.getNameOrId()); }); test('should delete vaults', async () => { const deleteVault = grpcUtils.promisifyUnaryCall( @@ -162,21 +164,21 @@ describe('Vaults client service', () => { client, client.vaultsRename, ); - const vault = await vaultManager.createVault(vaultList[0]); + const vaultId1 = await vaultManager.createVault(vaultList[0]); const vaultRenameMessage = new vaultsPB.Rename(); const vaultMessage = new vaultsPB.Vault(); - vaultMessage.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId1)); vaultRenameMessage.setVault(vaultMessage); vaultRenameMessage.setNewName(vaultList[1]); - const vaultId = await renameVault(vaultRenameMessage, callCredentials); - expect(vaultsUtils.makeVaultId(vaultId.getNameOrId())).toStrictEqual( - vault.vaultId, + const vaultId2 = await renameVault(vaultRenameMessage, callCredentials); + expect(vaultsUtils.decodeVaultId(vaultId2.getNameOrId())).toStrictEqual( + vaultId1, ); const renamedVaultId = await vaultManager.getVaultId(vaultList[1]); - expect(renamedVaultId).toEqual(vault.vaultId); + expect(renamedVaultId).toEqual(vaultId1); }); describe('Version', () => { const secretVer1 = { @@ -187,11 +189,11 @@ describe('Vaults client service', () => { name: secretList[0], content: 'Secret-1-content-ver2', }; - let vault: Vault; + let vaultId: VaultId; let vaultsVersion; beforeEach(async () => { - vault = await vaultManager.createVault(vaultList[0]); + vaultId = await vaultManager.createVault(vaultList[0]); vaultsVersion = grpcUtils.promisifyUnaryCall( client, client.vaultsVersion, @@ -200,13 +202,20 @@ describe('Vaults client service', () => { test('should switch a vault to a version', async () => { // Commit some history - await vault.commit(async (efs) => { - await efs.writeFile(secretVer1.name, secretVer1.content); - }); - const ver1Oid = (await vault.log())[0].oid; - await vault.commit(async (efs) => { - await efs.writeFile(secretVer2.name, secretVer2.content); - }); + const ver1Oid = await vaultManager.withVaults( + [vaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile(secretVer1.name, secretVer1.content); + }); + const ver1Oid = (await vault.log())[0].commitId; + await vault.writeF(async (efs) => { + await efs.writeFile(secretVer2.name, secretVer2.content); + }); + return ver1Oid; + }, + ); + // Revert the version const vaultMessage = new vaultsPB.Vault(); vaultMessage.setNameOrId(vaultList[0]); @@ -221,22 +230,33 @@ describe('Vaults client service', () => { ); expect(version.getIsLatestVersion()).toBeFalsy(); // Read old history - await vault.access(async (efs) => { - expect( - (await efs.readFile(secretVer1.name)).toString(), - ).toStrictEqual(secretVer1.content); + + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.readF(async (efs) => { + expect( + (await efs.readFile(secretVer1.name)).toString(), + ).toStrictEqual(secretVer1.content); + }); }); }); test('should fail to find a non existent version', async () => { // Revert the version const vaultMessage = new vaultsPB.Vault(); - vaultMessage.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); const vaultVersionMessage = new vaultsPB.Version(); vaultVersionMessage.setVault(vaultMessage); vaultVersionMessage.setVersionId('invalidOid'); const version = vaultsVersion(vaultVersionMessage, callCredentials); await expect(version).rejects.toThrow( - vaultErrors.ErrorVaultCommitUndefined, + vaultErrors.ErrorVaultReferenceInvalid, + ); + + vaultVersionMessage.setVersionId( + '7660aa9a2fee90e875c2d19e5deefe882ca1d4d9', + ); + const version2 = vaultsVersion(vaultVersionMessage, callCredentials); + await expect(version2).rejects.toThrow( + vaultErrors.ErrorVaultReferenceMissing, ); }); }); @@ -244,7 +264,7 @@ describe('Vaults client service', () => { let vaultLog; const secret1 = { name: secretList[0], content: 'Secret-1-content' }; const secret2 = { name: secretList[1], content: 'Secret-2-content' }; - let vault: Vault; + let vaultId: VaultId; let commit1Oid: string; let commit2Oid: string; let commit3Oid: string; @@ -254,22 +274,24 @@ describe('Vaults client service', () => { client, client.vaultsLog, ); - vault = await vaultManager.createVault(vaultList[0]); - - await vault.commit(async (efs) => { - await efs.writeFile(secret1.name, secret1.content); + vaultId = await vaultManager.createVault(vaultList[0]); + + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + }); + commit1Oid = (await vault.log(undefined, 0))[0].commitId; + + await vault.writeF(async (efs) => { + await efs.writeFile(secret2.name, secret2.content); + }); + commit2Oid = (await vault.log(undefined, 0))[0].commitId; + + await vault.writeF(async (efs) => { + await efs.unlink(secret2.name); + }); + commit3Oid = (await vault.log(undefined, 0))[0].commitId; }); - commit1Oid = (await vault.log(0))[0].oid; - - await vault.commit(async (efs) => { - await efs.writeFile(secret2.name, secret2.content); - }); - commit2Oid = (await vault.log(0))[0].oid; - - await vault.commit(async (efs) => { - await efs.unlink(secret2.name); - }); - commit3Oid = (await vault.log(0))[0].oid; }); test('should get the full log', async () => { @@ -325,6 +347,124 @@ describe('Vaults client service', () => { expect(logMessages[0].getOid()).toEqual(commit2Oid); }); }); + test('should get vault permissions', async () => { + const vaultsPermissionsGet = + grpcUtils.promisifyReadableStreamCall( + client, + client.vaultsPermissionGet, + ); + + let remoteKeynode1: PolykeyAgent | undefined; + let remoteKeynode2: PolykeyAgent | undefined; + try { + remoteKeynode1 = await PolykeyAgent.createPolykeyAgent({ + password, + logger: logger.getChild('Remote Keynode 1'), + nodePath: path.join(dataDir, 'remoteKeynode1'), + }); + remoteKeynode2 = await PolykeyAgent.createPolykeyAgent({ + password, + logger: logger.getChild('Remote Keynode 2'), + nodePath: path.join(dataDir, 'remoteKeynode2'), + }); + const targetNodeId1 = remoteKeynode1.keyManager.getNodeId(); + const targetNodeId2 = remoteKeynode2.keyManager.getNodeId(); + const pkAgentNodeId = pkAgent.keyManager.getNodeId(); + await pkAgent.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(targetNodeId1), + chain: {}, + }); + await pkAgent.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(targetNodeId2), + chain: {}, + }); + + await pkAgent.nodeManager.setNode(targetNodeId1, { + host: remoteKeynode1.revProxy.getIngressHost(), + port: remoteKeynode1.revProxy.getIngressPort(), + }); + await pkAgent.nodeManager.setNode(targetNodeId2, { + host: remoteKeynode2.revProxy.getIngressHost(), + port: remoteKeynode2.revProxy.getIngressPort(), + }); + + await remoteKeynode1.nodeManager.setNode(pkAgentNodeId, { + host: pkAgent.revProxy.getIngressHost(), + port: pkAgent.revProxy.getIngressPort(), + }); + await remoteKeynode2.nodeManager.setNode(pkAgentNodeId, { + host: pkAgent.revProxy.getIngressHost(), + port: pkAgent.revProxy.getIngressPort(), + }); + await remoteKeynode1.acl.setNodePerm(pkAgentNodeId, { + gestalt: { + notify: null, + }, + vaults: {}, + }); + await remoteKeynode2.acl.setNodePerm(pkAgentNodeId, { + gestalt: { + notify: null, + }, + vaults: {}, + }); + + const vaultId1 = await vaultManager.createVault(vaultList[0]); + const vaultId2 = await vaultManager.createVault(vaultList[1]); + + await pkAgent.gestaltGraph.setGestaltActionByNode( + targetNodeId1, + 'scan', + ); + await pkAgent.acl.setVaultAction(vaultId1, targetNodeId1, 'clone'); + await pkAgent.acl.setVaultAction(vaultId1, targetNodeId1, 'pull'); + await pkAgent.gestaltGraph.setGestaltActionByNode( + targetNodeId2, + 'scan', + ); + await pkAgent.acl.setVaultAction(vaultId1, targetNodeId2, 'clone'); + await pkAgent.acl.setVaultAction(vaultId1, targetNodeId2, 'pull'); + await pkAgent.gestaltGraph.setGestaltActionByNode( + targetNodeId1, + 'scan', + ); + await pkAgent.acl.setVaultAction(vaultId2, targetNodeId1, 'clone'); + await pkAgent.acl.setVaultAction(vaultId2, targetNodeId1, 'pull'); + + const vaultMessage = new vaultsPB.Vault(); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId1)); + + const permissionsStream = vaultsPermissionsGet( + vaultMessage, + callCredentials, + ); + const list: Record[] = []; + for await (const permission of permissionsStream) { + const permissionsList = permission.getVaultPermissionsList(); + expect(permissionsList).toContain('pull'); + expect(permissionsList).toContain('clone'); + list.push(permission.toObject()); + } + expect(list).toHaveLength(2); + + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId2)); + const permissionStream2 = vaultsPermissionsGet( + vaultMessage, + callCredentials, + ); + for await (const permission of permissionStream2) { + const permissionsList = permission.getVaultPermissionsList(); + expect(permissionsList).toContain('pull'); + expect(permissionsList).toContain('clone'); + const node = permission.getNode(); + const nodeId = node?.getNodeId(); + expect(nodeId).toEqual(nodesUtils.encodeNodeId(targetNodeId1)); + } + } finally { + await remoteKeynode1?.stop(); + await remoteKeynode2?.stop(); + } + }); }); describe('Secrets', () => { test('should make a directory in a vault', async () => { @@ -333,17 +473,19 @@ describe('Vaults client service', () => { client.vaultsSecretsMkdir, ); - const vault = await vaultManager.createVault(vaultList[0]); + const vaultId = await vaultManager.createVault(vaultList[0]); const dirPath = 'dir/dir1/dir2'; const vaultMkdirMessage = new vaultsPB.Mkdir(); const vaultMessage = new vaultsPB.Vault(); - vaultMessage.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); vaultMkdirMessage.setVault(vaultMessage); vaultMkdirMessage.setDirName(dirPath); vaultMkdirMessage.setRecursive(true); await mkdirVault(vaultMkdirMessage, callCredentials); - await vault.access(async (efs) => { - expect(await efs.exists(dirPath)).toBeTruthy(); + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.readF(async (efs) => { + expect(await efs.exists(dirPath)).toBeTruthy(); + }); }); }); test('should list secrets in a vault', async () => { @@ -353,14 +495,17 @@ describe('Vaults client service', () => { client.vaultsSecretsList, ); - const vault = await vaultManager.createVault(vaultList[0]); - await vault.commit(async (efs) => { - for (const secretName of secretList) { - await efs.writeFile(secretName, secretName); - } + const vaultId = await vaultManager.createVault(vaultList[0]); + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (efs) => { + for (const secretName of secretList) { + await efs.writeFile(secretName, secretName); + } + }); }); + const vaultMessage = new vaultsPB.Vault(); - vaultMessage.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); const secretsStream = listSecretsVault(vaultMessage, callCredentials); const names: Array = []; for await (const secret of secretsStream) { @@ -375,22 +520,27 @@ describe('Vaults client service', () => { client.vaultsSecretsDelete, ); - const vault = await vaultManager.createVault(vaultList[0]); - await vault.commit(async (efs) => { - for (const secretName of secretList) { - await efs.writeFile(secretName, secretName); - } + const vaultId = await vaultManager.createVault(vaultList[0]); + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (efs) => { + for (const secretName of secretList) { + await efs.writeFile(secretName, secretName); + } + }); }); + const secretMessage = new secretsPB.Secret(); const vaultMessage = new vaultsPB.Vault(); - vaultMessage.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); secretMessage.setVault(vaultMessage); secretMessage.setSecretName(secretList[0]); await deleteSecretVault(secretMessage, callCredentials); - const secrets = await vault.access(async (efs) => { - return await efs.readdir('.', { encoding: 'utf8' }); + await vaultManager.withVaults([vaultId], async (vault) => { + const secrets = await vault.readF(async (efs) => { + return await efs.readdir('.', { encoding: 'utf8' }); + }); + expect(secrets.sort()).toEqual(secretList.slice(1).sort()); }); - expect(secrets.sort()).toEqual(secretList.slice(1).sort()); }); test('should edit secrets in a vault', async () => { const editSecretVault = @@ -398,21 +548,26 @@ describe('Vaults client service', () => { client, client.vaultsSecretsEdit, ); - const vault = await vaultManager.createVault(vaultList[0]); - await vault.commit(async (efs) => { - await efs.writeFile(secretList[0], secretList[0]); + const vaultId = await vaultManager.createVault(vaultList[0]); + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile(secretList[0], secretList[0]); + }); }); const secretMessage = new secretsPB.Secret(); const vaultMessage = new vaultsPB.Vault(); - vaultMessage.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); secretMessage.setVault(vaultMessage); secretMessage.setSecretName(secretList[0]); secretMessage.setSecretContent(Buffer.from('content-change')); await editSecretVault(secretMessage, callCredentials); - await vault.access(async (efs) => { - expect((await efs.readFile(secretList[0])).toString()).toStrictEqual( - 'content-change', - ); + + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.readF(async (efs) => { + expect((await efs.readFile(secretList[0])).toString()).toStrictEqual( + 'content-change', + ); + }); }); }); test('should get secrets in a vault', async () => { @@ -420,13 +575,15 @@ describe('Vaults client service', () => { client, client.vaultsSecretsGet, ); - const vault = await vaultManager.createVault(vaultList[0]); - await vault.commit(async (efs) => { - await efs.writeFile(secretList[0], secretList[0]); + const vaultId = await vaultManager.createVault(vaultList[0]); + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile(secretList[0], secretList[0]); + }); }); const secretMessage = new secretsPB.Secret(); const vaultMessage = new vaultsPB.Vault(); - vaultMessage.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); secretMessage.setVault(vaultMessage); secretMessage.setSecretName(secretList[0]); const secret = await getSecretVault(secretMessage, callCredentials); @@ -439,24 +596,29 @@ describe('Vaults client service', () => { client, client.vaultsSecretsRename, ); - const vault = await vaultManager.createVault(vaultList[0]); - await vault.commit(async (efs) => { - await efs.writeFile(secretList[0], secretList[0]); + const vaultId = await vaultManager.createVault(vaultList[0]); + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile(secretList[0], secretList[0]); + }); }); const secretRenameMessage = new secretsPB.Rename(); const vaultMessage = new vaultsPB.Vault(); const secretMessage = new secretsPB.Secret(); - vaultMessage.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); secretMessage.setSecretName(secretList[0]); secretMessage.setVault(vaultMessage); secretRenameMessage.setNewName(secretList[1]); secretRenameMessage.setOldSecret(secretMessage); await renameSecretVault(secretRenameMessage, callCredentials); - const secrets = await vault.access(async (efs) => { - return await efs.readdir('.'); + + await vaultManager.withVaults([vaultId], async (vault) => { + const secrets = await vault.readF(async (efs) => { + return await efs.readdir('.'); + }); + expect(secrets.sort()).toEqual(secretList.splice(1, 1).sort()); }); - expect(secrets.sort()).toEqual(secretList.splice(1, 1).sort()); }); test('should add secrets in a vault', async () => { const newSecretVault = @@ -465,20 +627,23 @@ describe('Vaults client service', () => { client.vaultsSecretsNew, ); - const vault = await vaultManager.createVault(vaultList[0]); + const vaultId = await vaultManager.createVault(vaultList[0]); const secretMessage = new secretsPB.Secret(); const vaultMessage = new vaultsPB.Vault(); - vaultMessage.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); secretMessage.setVault(vaultMessage); secretMessage.setSecretName(secretList[0]); secretMessage.setSecretContent(Buffer.from(secretList[0])); await newSecretVault(secretMessage, callCredentials); - const secret = await vault.access(async (efs) => { - return await efs.readFile(secretList[0], { encoding: 'utf8' }); + await vaultManager.withVaults([vaultId], async (vault) => { + const secret = await vault.readF(async (efs) => { + return await efs.readFile(secretList[0], { encoding: 'utf8' }); + }); + expect(secret).toBe(secretList[0]); }); - expect(secret).toBe(secretList[0]); }); - test.only('should add a directory of secrets in a vault', async () => { + test('should add a directory of secrets in a vault', async () => { const newDirSecretVault = grpcUtils.promisifyUnaryCall( client, @@ -492,82 +657,42 @@ describe('Vaults client service', () => { // Write secret to file await fs.promises.writeFile(secretFile, secret); } - const vault = await vaultManager.createVault(vaultList[0]); + const vaultId = await vaultManager.createVault(vaultList[0]); const secretDirectoryMessage = new secretsPB.Directory(); const vaultMessage = new vaultsPB.Vault(); - vaultMessage.setNameOrId(vaultsUtils.makeVaultIdPretty(vault.vaultId)); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); secretDirectoryMessage.setVault(vaultMessage); secretDirectoryMessage.setSecretDirectory(secretDir); await newDirSecretVault(secretDirectoryMessage, callCredentials); - const secrets = await vaultOps.listSecrets(vault); - expect(secrets.sort()).toEqual( - secretList.map((secret) => path.join('secretDir', secret)).sort(), + await vaultManager.withVaults([vaultId], async (vault) => { + const secrets = await vaultOps.listSecrets(vault); + expect(secrets.sort()).toEqual( + secretList.map((secret) => path.join('secretDir', secret)).sort(), + ); + }); + }); + test('should stat a file', async () => { + const getSecretStat = grpcUtils.promisifyUnaryCall( + client, + client.vaultsSecretsStat, ); + const vaultId = await vaultManager.createVault(vaultList[0]); + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile(secretList[0], secretList[0]); + }); + }); + const secretMessage = new secretsPB.Secret(); + const vaultMessage = new vaultsPB.Vault(); + vaultMessage.setNameOrId(vaultsUtils.encodeVaultId(vaultId)); + secretMessage.setVault(vaultMessage); + secretMessage.setSecretName(secretList[0]); + const result = await getSecretStat(secretMessage, callCredentials); + const stat: Stat = JSON.parse(result.getJson()); + expect(stat.size).toBe(7); + expect(stat.blksize).toBe(4096); + expect(stat.blocks).toBe(1); + expect(stat.nlink).toBe(1); }); - // TODO: Permissions not supported yet. - // test.skip('should add permissions to a vault', async () => { - // fail('Functionality not fully implemented'); - // const vaultName = 'vault1' as VaultName; - // const vaultsSetPerms = - // grpcUtils.promisifyUnaryCall( - // client, - // client.vaultsPermissionsSet, - // ); - - // // Creating a vault - // await vaultManager.createVault(vaultName); - - // // Creating a gestalts state - // await createGestaltState(); - - // const setVaultPermMessage = new vaultsPB.PermSet(); - // const nodeMessage = new nodesPB.Node(); - // const vaultMessage = new vaultsPB.Vault(); - // nodeMessage.setNodeId(node2.id); - // vaultMessage.setNameOrId(vaultName); - // setVaultPermMessage.setVault(vaultMessage); - // setVaultPermMessage.setNode(nodeMessage); - // await vaultsSetPerms(setVaultPermMessage, callCredentials); - - // // FIXME: this is not implemented yet. - // const result = 'Not implemented'; //Await vaultManager.getVaultPermissions(vaultId); - // const stringResult = JSON.stringify(result); - // expect(stringResult).toContain(node2.id); - // expect(stringResult).toContain('pull'); - // }); - // test.skip('should remove permissions to a vault', async () => { - // const vaultName = 'vault1' as VaultName; - // const vaultsUnsetPerms = - // grpcUtils.promisifyUnaryCall( - // client, - // client.vaultsPermissionsUnset, - // ); - - // // Creating a vault. - // const vault = await vaultManager.createVault(vaultName); - // const vaults = await vaultManager.listVaults(); - // const vaultId = vault.vaultId; - - // // Creating a gestalts state - // await createGestaltState(); - // fail('Functionality not fully implemented'); - // // FIXME: not implemented yet - // // await vaultManager.setVaultPermissions(node2.id, vaultId); - - // const unsetVaultPermMessage = new vaultsPB.PermUnset(); - // const nodeMessage = new nodesPB.Node(); - // const vaultMessage = new vaultsPB.Vault(); - // nodeMessage.setNodeId(node2.id); - // vaultMessage.setNameOrId(vaults[0].name); - // unsetVaultPermMessage.setVault(vaultMessage); - // unsetVaultPermMessage.setNode(nodeMessage); - // await vaultsUnsetPerms(unsetVaultPermMessage, callCredentials); - - // // FIXME: not implemented yet - // // const result = await vaultManager.getVaultPermissions(vaultId); - // // const stringResult = JSON.stringify(result); - // // expect(stringResult).toContain(node2.id); - // // expect(stringResult.includes('pull')).toBeFalsy(); - // }); }); }); diff --git a/tests/client/service/notificationsRead.test.ts b/tests/client/service/notificationsRead.test.ts index c3882bc8a..c5442b973 100644 --- a/tests/client/service/notificationsRead.test.ts +++ b/tests/client/service/notificationsRead.test.ts @@ -1,5 +1,5 @@ import type { Host, Port } from '@/network/types'; -import type { VaultName } from '@/vaults/types'; +import type { VaultIdEncoded, VaultName } from '@/vaults/types'; import fs from 'fs'; import path from 'path'; import os from 'os'; @@ -106,7 +106,7 @@ describe('notificationsRead', () => { { data: { type: 'VaultShare', - vaultId: 'vault', + vaultId: 'vault' as VaultIdEncoded, vaultName: 'vault' as VaultName, actions: { clone: null, diff --git a/tests/client/utils.ts b/tests/client/utils.ts index 7c55b5c2e..71d81d943 100644 --- a/tests/client/utils.ts +++ b/tests/client/utils.ts @@ -36,6 +36,7 @@ async function openTestClientServer({ notificationsManager: pkAgent.notificationsManager, discovery: pkAgent.discovery, sigchain: pkAgent.sigchain, + acl: pkAgent.acl, fwdProxy: pkAgent.fwdProxy, revProxy: pkAgent.revProxy, grpcServerClient: pkAgent.grpcServerClient, diff --git a/tests/git/utils.test.ts b/tests/git/utils.test.ts index 357070cd6..3f4b6cf9b 100644 --- a/tests/git/utils.test.ts +++ b/tests/git/utils.test.ts @@ -69,7 +69,11 @@ describe('Git utils', () => { }); describe('list refs', () => { test('on master', async () => { - const refs = await gitUtils.listRefs(efs, '.git', 'refs/heads'); + const refs = await gitUtils.listRefs( + efs, + '.git', + path.join('refs', 'heads'), + ); expect(refs).toEqual(['master']); }); }); @@ -85,11 +89,10 @@ describe('Git utils', () => { expect(gitEncodedString.equals(Buffer.from('0004'))).toBe(true); }); test('an upload pack', async () => { - const uploadPackBuffers = (await gitUtils.uploadPack( - efs, - '.git', - true, - )) as Buffer[]; + const uploadPackBuffers = (await gitUtils.uploadPack({ + fs: efs, + advertiseRefs: true, + })) as Buffer[]; const uploadPack = Buffer.concat(uploadPackBuffers); expect(uploadPack.toString('utf8')).toBe( `007d${firstCommit.oid} HEAD\0side-band-64k symref=HEAD:refs/heads/master agent=git/isomorphic-git@1.8.1 @@ -100,20 +103,23 @@ describe('Git utils', () => { }); describe('resolve refs', () => { test('to a commit oid', async () => { - const ref = await gitUtils.resolve(efs, '.git', commits[0].oid); + const ref = await gitUtils.resolve({ + fs: efs, + ref: commits[0].oid, + }); expect(ref).toBe(firstCommit.oid); }); test('to HEAD', async () => { - const ref = await gitUtils.resolve(efs, '.git', 'HEAD'); + const ref = await gitUtils.resolve({ fs: efs, ref: 'HEAD' }); expect(ref).toBe(firstCommit.oid); }); test('to HEAD including depth', async () => { - const ref = await gitUtils.resolve(efs, '.git', 'HEAD', 2); + const ref = await gitUtils.resolve({ fs: efs, ref: 'HEAD', depth: 2 }); expect(ref).toBe('refs/heads/master'); }); test('to non-existant refs', async () => { await expect(() => - gitUtils.resolve(efs, '.git', 'this-is-not-a-ref'), + gitUtils.resolve({ fs: efs, ref: 'this-is-not-a-ref' }), ).rejects.toThrow(gitErrors.ErrorGitUndefinedRefs); }); }); @@ -122,6 +128,7 @@ describe('Git utils', () => { await expect(() => gitUtils.readObject({ fs: efs, + dir: '.', gitdir: '.git', oid: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', }), @@ -130,6 +137,7 @@ describe('Git utils', () => { test('parsed', async () => { const ref = await gitUtils.readObject({ fs: efs, + dir: '.', gitdir: '.git', oid: firstCommit.oid, }); @@ -139,6 +147,7 @@ describe('Git utils', () => { test('content', async () => { const ref = await gitUtils.readObject({ fs: efs, + dir: '.', gitdir: '.git', oid: firstCommit.oid, format: 'content', @@ -163,6 +172,7 @@ describe('Git utils', () => { test('wrapped', async () => { const ref = await gitUtils.readObject({ fs: efs, + dir: '.', gitdir: '.git', oid: firstCommit.oid, format: 'wrapped', @@ -187,6 +197,7 @@ describe('Git utils', () => { test('deflated', async () => { const ref = await gitUtils.readObject({ fs: efs, + dir: '.', gitdir: '.git', oid: firstCommit.oid, format: 'deflated', @@ -209,6 +220,7 @@ describe('Git utils', () => { ); const ref = await gitUtils.readObject({ fs: efs, + dir: '.', gitdir: '.git', oid: firstCommit.oid, format: 'deflated', diff --git a/tests/nodes/NodeConnection.test.ts b/tests/nodes/NodeConnection.test.ts index 9d50ae94a..16dee5e65 100644 --- a/tests/nodes/NodeConnection.test.ts +++ b/tests/nodes/NodeConnection.test.ts @@ -253,8 +253,8 @@ describe(`${NodeConnection.name} test`, () => { serverVaultManager = await VaultManager.createVaultManager({ keyManager: serverKeyManager, vaultsPath: serverVaultsPath, - nodeConnectionManager: serverNodeConnectionManager, - vaultsKey: serverKeyManager.vaultKey, + nodeConnectionManager: dummyNodeConnectionManager, + notificationsManager: serverNotificationsManager, db: serverDb, acl: serverACL, gestaltGraph: serverGestaltGraph, @@ -279,6 +279,9 @@ describe(`${NodeConnection.name} test`, () => { nodeGraph: serverNodeGraph, sigchain: serverSigchain, notificationsManager: serverNotificationsManager, + acl: serverACL, + gestaltGraph: serverGestaltGraph, + revProxy: serverRevProxy, }); agentServer = new GRPCServer({ logger: logger, diff --git a/tests/nodes/NodeManager.test.ts b/tests/nodes/NodeManager.test.ts index 11680548b..cbca0e52e 100644 --- a/tests/nodes/NodeManager.test.ts +++ b/tests/nodes/NodeManager.test.ts @@ -129,7 +129,6 @@ describe(`${NodeManager.name} test`, () => { recursive: true, }); }); - test( 'pings node', async () => { diff --git a/tests/notifications/NotificationsManager.test.ts b/tests/notifications/NotificationsManager.test.ts index d52aa0968..8153178a0 100644 --- a/tests/notifications/NotificationsManager.test.ts +++ b/tests/notifications/NotificationsManager.test.ts @@ -212,7 +212,7 @@ describe('NotificationsManager', () => { }; const vaultNotification: NotificationData = { type: 'VaultShare', - vaultId: vaultsUtils.generateVaultId().toString(), + vaultId: vaultsUtils.encodeVaultId(vaultsUtils.generateVaultId()), vaultName: 'vaultName' as VaultName, actions: { clone: null, @@ -276,7 +276,7 @@ describe('NotificationsManager', () => { }; const vaultNotification: NotificationData = { type: 'VaultShare', - vaultId: vaultsUtils.generateVaultId().toString(), + vaultId: vaultsUtils.encodeVaultId(vaultsUtils.generateVaultId()), vaultName: 'vaultName' as VaultName, actions: { clone: null, @@ -341,7 +341,7 @@ describe('NotificationsManager', () => { const notification3: Notification = { data: { type: 'VaultShare', - vaultId: vaultsUtils.generateVaultId().toString(), + vaultId: vaultsUtils.encodeVaultId(vaultsUtils.generateVaultId()), vaultName: 'vaultName' as VaultName, actions: { clone: null, diff --git a/tests/notifications/utils.test.ts b/tests/notifications/utils.test.ts index a57e393bf..5a3b8a617 100644 --- a/tests/notifications/utils.test.ts +++ b/tests/notifications/utils.test.ts @@ -2,8 +2,7 @@ import type { Notification, NotificationData } from '@/notifications/types'; import type { VaultActions, VaultName } from '@/vaults/types'; import { createPublicKey } from 'crypto'; import { EmbeddedJWK, jwtVerify, exportJWK } from 'jose'; -import { IdInternal } from '@matrixai/id'; -import { sleep } from '@/utils'; + import * as keysUtils from '@/keys/utils'; import * as notificationsUtils from '@/notifications/utils'; import * as notificationsErrors from '@/notifications/errors'; @@ -14,9 +13,8 @@ import * as testUtils from '../utils'; describe('Notifications utils', () => { const nodeId = testUtils.generateRandomNodeId(); const nodeIdEncoded = nodesUtils.encodeNodeId(nodeId); - const vaultId = vaultsUtils - .makeVaultId(IdInternal.fromString('vaultIdxxxxxxxxx')) - .toString(); + const vaultId = vaultsUtils.generateVaultId(); + const vaultIdEncoded = vaultsUtils.encodeVaultId(vaultId); test('generates notification ids', async () => { const generator = notificationsUtils.createNotificationIdGenerator(); @@ -39,7 +37,6 @@ describe('Notifications utils', () => { currentId = generator(); expect(Buffer.compare(lastId, currentId)).toBeTruthy(); lastId = currentId; - await sleep(10); } }); @@ -62,7 +59,7 @@ describe('Notifications utils', () => { const vaultShareNotification: Notification = { data: { type: 'VaultShare', - vaultId: vaultId, + vaultId: vaultIdEncoded, vaultName: 'vaultName' as VaultName, actions: { clone: null, @@ -112,7 +109,7 @@ describe('Notifications utils', () => { result = await jwtVerify(signedVaultShareNotification, EmbeddedJWK, {}); expect(result.payload.data).toEqual({ type: 'VaultShare', - vaultId: vaultId, + vaultId: vaultIdEncoded, vaultName: 'vaultName', actions: { clone: null, @@ -143,7 +140,7 @@ describe('Notifications utils', () => { const vaultShareNotification: Notification = { data: { type: 'VaultShare', - vaultId: vaultId, + vaultId: vaultIdEncoded, vaultName: 'vaultName' as VaultName, actions: { clone: null, @@ -197,7 +194,7 @@ describe('Notifications utils', () => { ); expect(decodedVaultShareNotification.data).toEqual({ type: 'VaultShare', - vaultId: vaultId, + vaultId: vaultIdEncoded, vaultName: 'vaultName', actions: { clone: null, @@ -237,7 +234,7 @@ describe('Notifications utils', () => { const vaultShareNotification: Notification = { data: { type: 'VaultShare', - vaultId: vaultId, + vaultId: vaultIdEncoded, vaultName: 'vaultName' as VaultName, actions: { clone: null, diff --git a/tests/vaults/VaultInternal.test.ts b/tests/vaults/VaultInternal.test.ts index defec75f8..34f03d70c 100644 --- a/tests/vaults/VaultInternal.test.ts +++ b/tests/vaults/VaultInternal.test.ts @@ -1,611 +1,944 @@ -import type { Vault, VaultId, VaultKey } from '@/vaults/types'; +import type { VaultId } from '@/vaults/types'; +import type { Vault } from '@/vaults/Vault'; +import type KeyManager from '@/keys/KeyManager'; +import type { DBDomain, DBLevel } from '@matrixai/db'; import os from 'os'; import path from 'path'; import fs from 'fs'; +import { DB } from '@matrixai/db'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { EncryptedFS } from 'encryptedfs'; -import { VaultInternal } from '@/vaults'; - -import { generateVaultId, generateVaultKey } from '@/vaults/utils'; +import git from 'isomorphic-git'; +import { tagLast } from '@/vaults/types'; +import VaultInternal from '@/vaults/VaultInternal'; import * as vaultsErrors from '@/vaults/errors'; import { sleep } from '@/utils'; -import { KeyManager, utils as keysUtils } from '@/keys'; -import * as testUtils from '../utils'; +import * as keysUtils from '@/keys/utils'; +import * as vaultsUtils from '@/vaults/utils'; +import * as testsUtils from '../utils'; + +jest.mock('@/keys/utils', () => ({ + ...jest.requireActual('@/keys/utils'), + generateDeterministicKeyPair: + jest.requireActual('@/keys/utils').generateKeyPair, +})); describe('VaultInternal', () => { + const logger = new Logger('Vault', LogLevel.WARN, [new StreamHandler()]); + let dataDir: string; - let dbPath: string; + let efsDbPath: string; let vault: VaultInternal; - let dbKey: VaultKey; + let dbKey: Buffer; let vaultId: VaultId; let efs: EncryptedFS; - const logger = new Logger('Vault', LogLevel.WARN, [new StreamHandler()]); - let keyManager: KeyManager; - let mockedGenerateKeyPair: jest.SpyInstance; - let mockedGenerateDeterministicKeyPair: jest.SpyInstance; - beforeEach(async () => { - const globalKeyPair = await testUtils.setupGlobalKeypair(); - mockedGenerateKeyPair = jest - .spyOn(keysUtils, 'generateKeyPair') - .mockResolvedValue(globalKeyPair); - mockedGenerateDeterministicKeyPair = jest - .spyOn(keysUtils, 'generateDeterministicKeyPair') - .mockResolvedValue(globalKeyPair); + let db: DB; + let vaultsDb: DBLevel; + let vaultsDbDomain: DBDomain; + + const fakeKeyManager = { + getNodeId: () => { + return testsUtils.generateRandomNodeId(); + }, + } as KeyManager; + const secret1 = { name: 'secret-1', content: 'secret-content-1' }; + const secret2 = { name: 'secret-2', content: 'secret-content-2' }; + const secret3 = { name: 'secret-3', content: 'secret-content-3' }; + + const runGen = async (gen) => { + for await (const _ of gen) { + // Do nothing + } + }; + beforeEach(async () => { dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); - dataDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - dbKey = await generateVaultKey(); - dbPath = path.join(dataDir, 'db'); - await fs.promises.mkdir(dbPath); - vaultId = generateVaultId(); + dbKey = await keysUtils.generateKey(); + efsDbPath = path.join(dataDir, 'efsDb'); + await fs.promises.mkdir(efsDbPath); efs = await EncryptedFS.createEncryptedFS({ - dbPath, + dbPath: efsDbPath, dbKey, logger, }); await efs.start(); - const keysPath = path.join(dataDir, 'KEYS'); - keyManager = await KeyManager.createKeyManager({ - keysPath, - password: 'password', + + db = await DB.createDB({ + crypto: { + key: await keysUtils.generateKey(), + ops: { + encrypt: keysUtils.encryptWithKey, + decrypt: keysUtils.decryptWithKey, + }, + }, + dbPath: path.join(dataDir, 'db'), + fs: fs, logger: logger, }); - vault = await VaultInternal.create({ + vaultsDbDomain = ['vaults']; + vaultsDb = await db.level(vaultsDbDomain[0]); + + vaultId = vaultsUtils.generateVaultId(); + vault = await VaultInternal.createVaultInternal({ vaultId, - keyManager, + keyManager: fakeKeyManager, efs, logger, fresh: true, + db, + vaultsDb, + vaultsDbDomain, + vaultName: 'testVault', }); }); afterEach(async () => { - mockedGenerateKeyPair.mockRestore(); - mockedGenerateDeterministicKeyPair.mockRestore(); + await vault.stop(); await vault.destroy(); + await db.stop(); + await db.destroy(); await efs.stop(); await efs.destroy(); - await keyManager.stop(); - await keyManager.destroy(); await fs.promises.rm(dataDir, { force: true, recursive: true, }); }); - test('is type correct', async () => { - expect(vault).toBeInstanceOf(VaultInternal); - }); - - describe('version', () => { - test('can change to the current commit', async () => { - let commit = (await vault.log(1))[0]; - await vault.version(commit.oid); - const files = await vault.access(async (efs) => { - return await efs.readdir('.'); - }); - expect(files).toEqual([]); - await vault.commit(async (efs) => { - await efs.writeFile('test', 'testdata'); - }); - commit = (await vault.log(1))[0]; - await vault.version(commit.oid); - const file = await vault.access(async (efs) => { - return await efs.readFile('test', { encoding: 'utf8' }); - }); - expect(file).toBe('testdata'); - }); - test('can change commits and preserve the log with no intermediate vault mutation', async () => { - const initCommit = (await vault.log(1))[0].oid; - await vault.commit(async (efs) => { - await efs.writeFile('test1', 'testdata1'); - }); - await vault.commit(async (efs) => { - await efs.writeFile('test2', 'testdata2'); - }); - await vault.commit(async (efs) => { - await efs.writeFile('test3', 'testdata3'); - }); - await vault.version(initCommit); - const endCommit = (await vault.log(1))[0].oid; - let files = await vault.access(async (efs) => { - return await efs.readdir('.'); - }); - expect(files).toEqual([]); - await vault.version(endCommit); - files = await vault.access(async (efs) => { - return await efs.readdir('.'); - }); - expect(files).toEqual(['test1', 'test2', 'test3']); - }); - test( - 'does not allow changing to an unrecognised commit', - async () => { - await expect(() => vault.version('unrecognisedcommit')).rejects.toThrow( - vaultsErrors.ErrorVaultCommitUndefined, - ); - await vault.commit(async (efs) => { - await efs.writeFile('test1', 'testdata1'); - }); - const secondCommit = (await vault.log(1))[0].oid; - await vault.commit(async (efs) => { - await efs.writeFile('test2', 'testdata2'); - }); - await vault.commit(async (efs) => { - await efs.writeFile('test3', 'testdata3'); - }); - const fourthCommit = (await vault.log(1))[0].oid; - await vault.version(secondCommit); - await vault.commit(async (efs) => { - const fd = await efs.open('test3', 'w'); - await efs.write(fd, 'testdata6', 3, 6); - await efs.close(fd); - }); - await vault.version(fourthCommit); - await vault.commit(async (efs) => { - await efs.writeFile('test4', 'testdata4'); - }); - }, - global.defaultTimeout * 2, - ); - test('can change to the HEAD commit', async () => { - const initCommit = (await vault.log(1))[0].oid; - await vault.commit(async (efs) => { - await efs.writeFile('test1', 'testdata1'); - }); - await vault.commit(async (efs) => { - await efs.writeFile('test2', 'testdata2'); - }); - await vault.commit(async (efs) => { - await efs.writeFile('test3', 'testdata3'); - }); - await vault.version(initCommit); - await vault.version('HEAD'); - let files = await vault.access(async (efs) => { - return await efs.readdir('.'); - }); - expect(files).toEqual(['test1', 'test2', 'test3']); - await vault.version(initCommit); - await vault.version('last'); - files = await vault.access(async (efs) => { - return await efs.readdir('.'); - }); - expect(files).toEqual(['test1', 'test2', 'test3']); - }); - test('adjusts HEAD after vault mutation, discarding forward and preserving backwards history', async () => { - const initCommit = (await vault.log(1))[0].oid; - await vault.commit(async (efs) => { - await efs.writeFile('test1', 'testdata1'); - }); - const secondCommit = (await vault.log(1))[0].oid; - await vault.commit(async (efs) => { - await efs.writeFile('test2', 'testdata2'); - }); - await vault.commit(async (efs) => { - await efs.writeFile('test3', 'testdata3'); - }); - await vault.version(secondCommit); - await vault.commit(async (efs) => { - await efs.writeFile('test4', 'testdata4'); - }); - let files = await vault.access(async (efs) => { - return await efs.readdir('.'); - }); - expect(files).toEqual(['test1', 'test4']); - await vault.version(initCommit); - files = await vault.access(async (efs) => { - return await efs.readdir('.'); - }); - expect(files).toEqual([]); - }); - }); - test('VaultInternal readiness', async () => { - await vault.destroy(); + await vault.stop(); await expect(async () => { await vault.log(); - }).rejects.toThrow(vaultsErrors.ErrorVaultDestroyed); + }).rejects.toThrow(vaultsErrors.ErrorVaultNotRunning); + await vault.destroy(); await expect(async () => { - await vault.readWorkingDirectory(); + await vault.start(); }).rejects.toThrow(vaultsErrors.ErrorVaultDestroyed); }); + test('is type correct', async () => { + expect(vault).toBeInstanceOf(VaultInternal); + }); test('creating state on disk', async () => { expect(await fs.promises.readdir(dataDir)).toContain('db'); }); - test('Accessing a change', async () => { - await vault.commit(async (efs) => { + test('accessing a change', async () => { + await vault.writeF(async (efs) => { await efs.writeFile('secret-1', 'secret-content'); }); - await vault.access(async (efs) => { + await vault.readF(async (efs) => { expect(await efs.readdir('.')).toContain('secret-1'); expect((await efs.readFile('secret-1')).toString()).toStrictEqual( 'secret-content', ); }); }); - test('Vault maintains data across VaultInternal instances', async () => { - await vault.commit(async (efs) => { + test('maintains data across VaultInternal instances', async () => { + await vault.writeF(async (efs) => { await efs.writeFile('secret-1', 'secret-content'); }); - await vault.destroy(); - vault = await VaultInternal.create({ + await vault.writeF(async (efs) => { + await efs.writeFile('secret-2', 'secret-content'); + }); + await vault.stop(); + vault = await VaultInternal.createVaultInternal({ vaultId, - keyManager, + keyManager: fakeKeyManager, efs, logger, fresh: false, + db, + vaultName: 'testVault1', + vaultsDb, + vaultsDbDomain, }); - await vault.access(async (efs) => { + await vault.readF(async (efs) => { expect((await efs.readFile('secret-1')).toString()).toStrictEqual( 'secret-content', ); }); }); - describe('Writing operations', () => { - const secret1 = { name: 'secret-1', content: 'secret-content-1' }; - const secret2 = { name: 'secret-2', content: 'secret-content-2' }; - test('Write operation allowed', async () => { - await vault.commit(async (efs) => { - await efs.writeFile('secret-1', 'secret-content'); - }); + // Mutation and history + test('can change to the current commit', async () => { + let commit = (await vault.log(undefined, 1))[0]; + await vault.version(commit.commitId); + const files = await vault.readF(async (efs) => { + return await efs.readdir('.'); }); - test('Concurrent write operations prevented', async () => { - await Promise.all([ - vault.commit(async (efs) => { - await efs.writeFile('secret-1', 'secret-content-1'); - }), - vault.commit(async (efs) => { - await efs.writeFile('secret-2', 'secret-content-2'); - }), - vault.commit(async (efs) => { - await efs.writeFile('secret-3', 'secret-content-3'); - }), - ]); - - await vault.access(async (efs) => { - const directory = await efs.readdir('.'); - expect(directory).toContain('secret-1'); - expect(directory).toContain('secret-2'); - expect(directory).toContain('secret-3'); - }); - const log = await vault.log(); - expect(log.length).toEqual(4); + expect(files).toEqual([]); + await vault.writeF(async (efs) => { + await efs.writeFile('test', 'testdata'); }); - test('Write locks read', async () => { - await vault.commit(async (efs) => { - await efs.writeFile('secret-1', 'secret-content'); - }); - - await Promise.all([ - vault.commit(async (efs) => { - await efs.writeFile('secret-1', 'SUPER-DUPER-SECRET-CONTENT'); - }), - vault.access(async (efs) => { - expect((await efs.readFile('secret-1')).toString()).toEqual( - 'SUPER-DUPER-SECRET-CONTENT', - ); - }), - ]); + commit = (await vault.log(undefined, 1))[0]; + await vault.version(commit.commitId); + const file = await vault.readF(async (efs) => { + return await efs.readFile('test', { encoding: 'utf8' }); + }); + expect(file).toBe('testdata'); + }); + test('can change commits and preserve the log with no intermediate vault mutation', async () => { + const initCommit = (await vault.log(undefined, 1))[0].commitId; + await vault.writeF(async (efs) => { + await efs.writeFile('test1', 'testdata1'); + }); + await vault.writeF(async (efs) => { + await efs.writeFile('test2', 'testdata2'); + }); + await vault.writeF(async (efs) => { + await efs.writeFile('test3', 'testdata3'); + }); + const endCommit = (await vault.log(undefined, 1))[0].commitId; + await vault.version(initCommit); + let files = await vault.readF(async (efs) => { + return await efs.readdir('.'); + }); + expect(files).toEqual([]); + await vault.version(endCommit); + files = await vault.readF(async (efs) => { + return await efs.readdir('.'); + }); + expect(files).toEqual(['test1', 'test2', 'test3']); + }); + test('does not allow changing to an unrecognised commit', async () => { + await expect(() => vault.version('unrecognisedcommit')).rejects.toThrow( + vaultsErrors.ErrorVaultReferenceInvalid, + ); + await vault.writeF(async (efs) => { + await efs.writeFile('test1', 'testdata1'); + }); + const secondCommit = (await vault.log(undefined, 1))[0].commitId; + await vault.writeF(async (efs) => { + await efs.writeFile('test2', 'testdata2'); + }); + await vault.writeF(async (efs) => { + await efs.writeFile('test3', 'testdata3'); + }); + const fourthCommit = (await vault.log(undefined, 1))[0].commitId; + await vault.version(secondCommit); + await vault.writeF(async (efs) => { + const fd = await efs.open('test3', 'w'); + await efs.write(fd, 'testdata6', 3, 6); + await efs.close(fd); + }); + await expect(vault.version(fourthCommit)).rejects.toThrow( + vaultsErrors.ErrorVaultReferenceMissing, + ); + }); + test('can change to the latest commit', async () => { + const initCommit = (await vault.log(undefined, 1))[0].commitId; + await vault.writeF(async (efs) => { + await efs.writeFile('test1', 'testdata1'); + }); + await vault.writeF(async (efs) => { + await efs.writeFile('test2', 'testdata2'); + }); + await vault.writeF(async (efs) => { + await efs.writeFile('test3', 'testdata3'); + }); + await vault.version(initCommit); + await vault.version(tagLast); + let files = await vault.readF(async (efs) => { + return await efs.readdir('.'); + }); + expect(files).toEqual(['test1', 'test2', 'test3']); + await vault.version(initCommit); + await vault.version('last'); + files = await vault.readF(async (efs) => { + return await efs.readdir('.'); + }); + expect(files).toEqual(['test1', 'test2', 'test3']); + }); + test('adjusts HEAD after vault mutation, discarding forward and preserving backwards history', async () => { + const initCommit = (await vault.log(undefined, 1))[0].commitId; + await vault.writeF(async (efs) => { + await efs.writeFile('test1', 'testdata1'); + }); + const secondCommit = (await vault.log(undefined, 1))[0].commitId; + await vault.writeF(async (efs) => { + await efs.writeFile('test2', 'testdata2'); + }); + await vault.writeF(async (efs) => { + await efs.writeFile('test3', 'testdata3'); + }); + await vault.version(secondCommit); + await vault.writeF(async (efs) => { + await efs.writeFile('test4', 'testdata4'); + }); + let files = await vault.readF(async (efs) => { + return await efs.readdir('.'); + }); + expect(files).toEqual(['test1', 'test4']); + await vault.version(initCommit); + files = await vault.readF(async (efs) => { + return await efs.readdir('.'); + }); + expect(files).toEqual([]); + }); + test('write operation allowed', async () => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret-content'); + }); + }); + test('read operation allowed', async () => { + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + await efs.writeFile(secret2.name, secret2.content); + }); + await vault.readF(async (efs) => { + expect((await efs.readFile(secret1.name)).toString()).toEqual( + secret1.content, + ); + }); + }); + test('concurrent write operations prevented', async () => { + await Promise.all([ + vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret-content-1'); + }), + vault.writeF(async (efs) => { + await efs.writeFile('secret-2', 'secret-content-2'); + }), + vault.writeF(async (efs) => { + await efs.writeFile('secret-3', 'secret-content-3'); + }), + ]); + + await vault.readF(async (efs) => { + const directory = await efs.readdir('.'); + expect(directory).toContain('secret-1'); + expect(directory).toContain('secret-2'); + expect(directory).toContain('secret-3'); + }); + const log = await vault.log(); + expect(log.length).toEqual(4); + }); + test('commit added if mutation in writeF', async () => { + const commit = (await vault.log())[0].commitId; + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret-content'); + }); + const log = await vault.log(); + expect(log).toHaveLength(2); + expect(log[0].commitId).not.toStrictEqual(commit); + }); + test('no commit added if no mutation in write', async () => { + const commit = (await vault.log())[0].commitId; + await vault.writeF(async (_efs) => {}); + const log = await vault.log(); + expect(log).toHaveLength(1); + expect(log[0].message).not.toContain('secret-1'); + expect(log[0].commitId).toStrictEqual(commit); + }); + test('commit message contains all actions made in the commit', async () => { + // Adding + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + await efs.writeFile(secret2.name, secret2.content); + }); + let log = await vault.log(); + expect(log[0].message).toContain(`${secret1.name} added`); + expect(log[0].message).toContain(`${secret2.name} added`); + // Checking contents + await vault.readF(async (efs) => { + expect((await efs.readFile(secret1.name)).toString()).toEqual( + secret1.content, + ); + expect((await efs.readFile(secret2.name)).toString()).toEqual( + secret2.content, + ); }); - test('Commit added if mutation in write', async () => { - const commit = (await vault.log())[0].oid; - await vault.commit(async (efs) => { - await efs.writeFile('secret-1', 'secret-content'); - }); - const log = await vault.log(); - expect(log).toHaveLength(2); - expect(log[0].message).toContain('secret-1'); - expect(log[0].oid).not.toStrictEqual(commit); - }); - test('No commit added if no mutation in write', async () => { - const commit = (await vault.log())[0].oid; - await vault.commit(async (_efs) => {}); - const log = await vault.log(); - expect(log).toHaveLength(1); - expect(log[0].message).not.toContain('secret-1'); - expect(log[0].oid).toStrictEqual(commit); - }); - test('Commit message contains all actions made in the commit', async () => { - // Adding - await vault.commit(async (efs) => { - await efs.writeFile(secret1.name, secret1.content); - await efs.writeFile(secret2.name, secret2.content); - }); - let log = await vault.log(); - expect(log[0].message).toContain(`${secret1.name} added`); - expect(log[0].message).toContain(`${secret2.name} added`); - // Checking contents - await vault.access(async (efs) => { - expect((await efs.readFile(secret1.name)).toString()).toEqual( - secret1.content, - ); - expect((await efs.readFile(secret2.name)).toString()).toEqual( - secret2.content, - ); - }); - - // Modifying - await vault.commit(async (efs) => { - await efs.writeFile(secret2.name, `${secret2.content} new content`); - }); - log = await vault.log(); - expect(log[0].message).toContain(`${secret2.name} modified`); - // Checking changes - await vault.access(async (efs) => { - expect((await efs.readFile(secret2.name)).toString()).toEqual( - `${secret2.content} new content`, - ); - }); - - // Moving and removing - await vault.commit(async (efs) => { - await efs.rename(secret1.name, `${secret1.name}-new`); - await efs.unlink(secret2.name); - }); - // Checking changes. - await vault.access(async (efs) => { - expect(await efs.exists(secret1.name)).toBeFalsy(); - expect(await efs.exists(`${secret1.name}-new`)).toBeTruthy(); - expect(await efs.exists(secret2.name)).toBeFalsy(); - }); - - log = await vault.log(); - expect(log[0].message).toContain(`${secret1.name}-new added`); - expect(log[0].message).toContain(`${secret1.name} deleted`); - expect(log[0].message).toContain(`${secret2.name} deleted`); - }); - test('No mutation to vault when part of a commit operation fails', async () => { - // Failing commit operation - await expect(() => - vault.commit(async (efs) => { - await efs.writeFile(secret1.name, secret1.content); - await efs.rename('notValid', 'randomName'); // Throws - }), - ).rejects.toThrow(); - - // Make sure secret1 wasn't written when the above commit failed. - await vault.access(async (efs) => { - expect(await efs.readdir('.')).not.toContain(secret1.name); - }); - // No new commit. - expect(await vault.log()).toHaveLength(1); + // Modifying + await vault.writeF(async (efs) => { + await efs.writeFile(secret2.name, `${secret2.content} new content`); + }); + log = await vault.log(); + expect(log[0].message).toContain(`${secret2.name} modified`); + // Checking changes + await vault.readF(async (efs) => { + expect((await efs.readFile(secret2.name)).toString()).toEqual( + `${secret2.content} new content`, + ); + }); - // Succeeding commit operation. - await vault.commit(async (efs) => { - await efs.writeFile(secret2.name, secret2.content); - }); + // Moving and removing + await vault.writeF(async (efs) => { + await efs.rename(secret1.name, `${secret1.name}-new`); + await efs.unlink(secret2.name); + }); + // Checking changes + await vault.readF(async (efs) => { + expect(await efs.exists(secret1.name)).toBeFalsy(); + expect(await efs.exists(`${secret1.name}-new`)).toBeTruthy(); + expect(await efs.exists(secret2.name)).toBeFalsy(); + }); - // Secret 1 shouldn't exist while secret2 exists. - await vault.access(async (efs) => { - const directory = await efs.readdir('.'); - expect(directory).not.toContain(secret1.name); // - expect(directory).toContain(secret2.name); - }); + log = await vault.log(); + expect(log[0].message).toContain(`${secret1.name}-new added`); + expect(log[0].message).toContain(`${secret1.name} deleted`); + expect(log[0].message).toContain(`${secret2.name} deleted`); + }); + test('no mutation to vault when part of a commit operation fails', async () => { + // Failing commit operation + await expect(() => + vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + await efs.rename('notValid', 'randomName'); // Throws + }), + ).rejects.toThrow(); - // Has a new commit. - expect(await vault.log()).toHaveLength(2); + // Make sure secret1 wasn't written when the above commit failed + await vault.readF(async (efs) => { + expect(await efs.readdir('.')).not.toContain(secret1.name); }); - test('Locking occurs when making a commit.', async () => { - // We want to check if the locking is happening. so we need a way to see if an operation is being blocked. - let resolveDelay; - const delayPromise = new Promise((resolve, _reject) => { - resolveDelay = resolve; - }); - let firstCommitResolved = false; - let firstCommitResolveTime; + // No new commit + expect(await vault.log()).toHaveLength(1); - // @ts-ignore - expect(vault.lock.isLocked()).toBeFalsy(); - - const commit1 = vault.commit(async (efs) => { - await efs.writeFile(secret1.name, secret1.content); - await delayPromise; // Hold the lock hostage. - firstCommitResolved = true; - firstCommitResolveTime = Date.now(); - }); + // Succeeding commit operation + await vault.writeF(async (efs) => { + await efs.writeFile(secret2.name, secret2.content); + }); - // Now that we are holding the lock hostage, - // @ts-ignore - expect(vault.lock.isLocked()).toBeTruthy(); - // We want to check if any action resolves before the lock is released. - - let secondCommitResolved = false; - let secondCommitResolveTime; - const commit2 = vault.commit(async (efs) => { - await efs.writeFile(secret2.name, secret2.content); - secondCommitResolved = true; - await sleep(2); - secondCommitResolveTime = Date.now(); - }); + // Secret 1 shouldn't exist while secret2 exists + await vault.readF(async (efs) => { + const directory = await efs.readdir('.'); + expect(directory).not.toContain(secret1.name); // + expect(directory).toContain(secret2.name); + }); - // Give plenty of time for a commit to resolve. - await sleep(200); - - // Now we want to check for the expected conditions. - // 1. Both commist have not completed. - // commit 1 is holding the lock. - expect(firstCommitResolved).toBeFalsy(); - expect(secondCommitResolved).toBeFalsy(); - - // 2. We release the hostage so both should resolve. - await sleep(200); - resolveDelay(); - await commit1; - await commit2; - expect(firstCommitResolved).toBeTruthy(); - expect(secondCommitResolved).toBeTruthy(); - expect(secondCommitResolveTime).toBeGreaterThan(firstCommitResolveTime); - // @ts-ignore - expect(vault.lock.isLocked()).toBeFalsy(); - - // Commit order should be commit2 -> commit1 -> init - const log = await vault.log(); - expect(log[0].message).toContain(secret2.name); - expect(log[1].message).toContain(secret1.name); - }); - }); - describe('Reading operations', () => { - const secret1 = { name: 'secret-1', content: 'secret-content-1' }; - const secret2 = { name: 'secret-2', content: 'secret-content-2' }; - - beforeEach(async () => { - await vault.commit(async (efs) => { - await efs.writeFile(secret1.name, secret1.content); - await efs.writeFile(secret2.name, secret2.content); - }); + // Has a new commit + expect(await vault.log()).toHaveLength(2); + }); + test('concurrent read operations allowed', async () => { + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + await efs.writeFile(secret2.name, secret2.content); }); - test('Read operation allowed', async () => { - await vault.access(async (efs) => { - expect((await efs.readFile(secret1.name)).toString()).toEqual( - secret1.content, - ); - }); + await vault.readF(async (efs) => { + expect((await efs.readFile(secret1.name)).toString()).toEqual( + secret1.content, + ); + expect((await efs.readFile(secret2.name)).toString()).toEqual( + secret2.content, + ); + expect((await efs.readFile(secret1.name)).toString()).toEqual( + secret1.content, + ); }); - test('Concurrent read operations allowed', async () => { - await vault.access(async (efs) => { + + await Promise.all([ + vault.readF(async (efs) => { expect((await efs.readFile(secret1.name)).toString()).toEqual( secret1.content, ); + }), + vault.readF(async (efs) => { expect((await efs.readFile(secret2.name)).toString()).toEqual( secret2.content, ); + }), + vault.readF(async (efs) => { expect((await efs.readFile(secret1.name)).toString()).toEqual( secret1.content, ); - }); - - await Promise.all([ - vault.access(async (efs) => { - expect((await efs.readFile(secret1.name)).toString()).toEqual( - secret1.content, - ); - }), - vault.access(async (efs) => { - expect((await efs.readFile(secret2.name)).toString()).toEqual( - secret2.content, - ); - }), - vault.access(async (efs) => { - expect((await efs.readFile(secret1.name)).toString()).toEqual( - secret1.content, - ); - }), - ]); - }); - test('Read locks write', async () => { - await Promise.all([ - vault.access(async (efs) => { - expect((await efs.readFile(secret1.name)).toString()).toEqual( - secret1.content, - ); - }), - vault.commit(async (efs) => { - await efs.writeFile(secret1.name, 'NEW-CONTENT'); - }), - vault.access(async (efs) => { - expect((await efs.readFile(secret1.name)).toString()).toEqual( - 'NEW-CONTENT', - ); - }), - ]); + }), + ]); + }); + test('no commit after read', async () => { + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + await efs.writeFile(secret2.name, secret2.content); }); - test('No commit after read', async () => { - const commit = (await vault.log())[0].oid; - await vault.access(async (efs) => { - expect((await efs.readFile(secret1.name)).toString()).toEqual( - secret1.content, - ); - }); - const log = await vault.log(); - expect(log).toHaveLength(2); - expect(log[0].oid).toStrictEqual(commit); - }); - test('Locking occurs when making an access.', async () => { - // We want to check if the locking is happening. so we need a way to see if an operation is being blocked. - let resolveDelay; - const delayPromise = new Promise((resolve, _reject) => { - resolveDelay = resolve; - }); - let firstCommitResolved = false; - let firstCommitResolveTime; - - // @ts-ignore - expect(vault.lock.isLocked()).toBeFalsy(); - - const commit1 = vault.access(async (efs) => { - await efs.readFile(secret1.name); - await delayPromise; // Hold the lock hostage. - firstCommitResolved = true; - firstCommitResolveTime = Date.now(); - }); - - // Now that we are holding the lock hostage, - // we want to check if any action resolves before the lock is released. - // @ts-ignore - expect(vault.lock.isLocked()).toBeTruthy(); - - let secondCommitResolved = false; - let secondCommitResolveTime; - const commit2 = vault.access(async (efs) => { - await efs.readFile(secret2.name); - secondCommitResolved = true; - await sleep(10); - secondCommitResolveTime = Date.now(); - }); - - // Give plenty of time for a commit to resolve. - await sleep(200); - - // Now we want to check for the expected conditions. - // 1. Both commist have not completed. - // commit 1 is holding the lock. - expect(firstCommitResolved).toBeFalsy(); - expect(secondCommitResolved).toBeFalsy(); - - // 2. We release the hostage so both should resolve. - await sleep(200); - resolveDelay(); - await commit1; - await commit2; - expect(firstCommitResolved).toBeTruthy(); - expect(secondCommitResolved).toBeTruthy(); - expect(secondCommitResolveTime).toBeGreaterThan(firstCommitResolveTime); - // @ts-ignore - expect(vault.lock.isLocked()).toBeFalsy(); + const commit = (await vault.log())[0].commitId; + await vault.readF(async (efs) => { + expect((await efs.readFile(secret1.name)).toString()).toEqual( + secret1.content, + ); }); + const log = await vault.log(); + expect(log).toHaveLength(2); + expect(log[0].commitId).toStrictEqual(commit); }); - test('Vault only exposes limited commands of VaultInternal', async () => { + test('only exposes limited commands of VaultInternal', async () => { // Converting a vault to the interface const vaultInterface = vault as Vault; - // Using the avaliable functions. - await vaultInterface.commit(async (efs) => { + // Using the avaliable functions + await vaultInterface.writeF(async (efs) => { await efs.writeFile('test', 'testContent'); }); - await vaultInterface.access(async (efs) => { + await vaultInterface.readF(async (efs) => { const content = (await efs.readFile('test')).toString(); expect(content).toStrictEqual('testContent'); }); - expect(vaultInterface.baseDir).toBeTruthy(); - expect(vaultInterface.gitDir).toBeTruthy(); + expect(vaultInterface.vaultDataDir).toBeTruthy(); + expect(vaultInterface.vaultGitDir).toBeTruthy(); expect(vaultInterface.vaultId).toBeTruthy(); - expect(vaultInterface.commit).toBeTruthy(); - expect(vaultInterface.access).toBeTruthy(); + expect(vaultInterface.writeF).toBeTruthy(); + expect(vaultInterface.writeG).toBeTruthy(); + expect(vaultInterface.readF).toBeTruthy(); + expect(vaultInterface.readG).toBeTruthy(); expect(vaultInterface.log).toBeTruthy(); expect(vaultInterface.version).toBeTruthy(); // Can we convert back? const vaultNormal = vaultInterface as VaultInternal; - expect(vaultNormal.destroy).toBeTruthy(); // This exists again. + expect(vaultNormal.destroy).toBeTruthy(); // This exists again + }); + test('cannot commit when the remote field is set', async () => { + // Write remote metadata + await db.put( + [...vaultsDbDomain, vaultsUtils.encodeVaultId(vaultId)], + VaultInternal.remoteKey, + { remoteNode: '', remoteVault: '' }, + ); + const commit = (await vault.log(undefined, 1))[0]; + await vault.version(commit.commitId); + const files = await vault.readF(async (efs) => { + return await efs.readdir('.'); + }); + expect(files).toEqual([]); + await expect( + vault.writeF(async (efs) => { + await efs.writeFile('test', 'testdata'); + }), + ).rejects.toThrow(vaultsErrors.ErrorVaultRemoteDefined); + }); + test('cannot checkout old commits after branching commit', async () => { + await vault.writeF(async (efs) => { + await efs.writeFile('test1', 'testdata1'); + }); + const secondCommit = (await vault.log(undefined, 1))[0].commitId; + await vault.writeF(async (efs) => { + await efs.writeFile('test2', 'testdata2'); + }); + const thirdCommit = (await vault.log(undefined, 1))[0].commitId; + await vault.writeF(async (efs) => { + await efs.writeFile('test3', 'testdata3'); + }); + const fourthCommit = (await vault.log(undefined, 1))[0].commitId; + await vault.version(secondCommit); + await vault.writeF(async (efs) => { + await efs.writeFile('test4', 'testdata4'); + }); + await expect(() => { + return vault.version(thirdCommit); + }).rejects.toThrow(); + await expect(() => { + return vault.version(fourthCommit); + }).rejects.toThrow(); + }); + test('can recover from dirty state', async () => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret-content'); + }); + await vault.writeF(async (efs) => { + await efs.writeFile('secret-2', 'secret-content'); + }); + // Write files to the working directory + // @ts-ignore: kidnap vault EFS + const vaultEFS = vault.efsVault; + await vaultEFS.writeFile('dirty', 'dirtyData'); + await vaultEFS.writeFile('secret-1', 'dirtyData'); + // Setting dirty flag true + const vaultMetadataDbDomain = [ + ...vaultsDbDomain, + vaultsUtils.encodeVaultId(vaultId), + ]; + await db.put(vaultMetadataDbDomain, VaultInternal.dirtyKey, true); + + // Restarting vault + await vault.stop(); + await vault.start({}); + + // Checking if working directory was cleaned + // and head was moved to latest commit + await vault.readF(async (efs) => { + const files = await efs.readdir('.'); + expect(files).toContain('secret-1'); + expect((await efs.readFile('secret-1')).toString()).toEqual( + 'secret-content', + ); + expect(files).toContain('secret-2'); + expect(files).not.toContain('dirty'); + }); + }); + test('clean errant commits recovering from dirty state', async () => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret-content'); + }); + await vault.writeF(async (efs) => { + await efs.writeFile('secret-2', 'secret-content'); + }); + // Creating out of history commits + // @ts-ignore: kidnap vault EFS + const vaultEFS = vault.efs; + const log = await vault.log(); + const ref = log[1].commitId; + await efs.writeFile(path.join(vault.vaultDataDir, 'newfile1'), 'hello'); + const newRef1 = await git.commit({ + fs: vaultEFS, + dir: vault.vaultDataDir, + gitdir: vault.vaultGitDir, + author: { + name: 'test', + email: 'test', + }, + message: 'test', + ref: ref, + }); + await efs.writeFile(path.join(vault.vaultDataDir, 'newfile2'), 'world'); + const newRef2 = await git.commit({ + fs: vaultEFS, + dir: vault.vaultDataDir, + gitdir: vault.vaultGitDir, + author: { + name: 'test', + email: 'test', + }, + message: 'test', + ref: newRef1, + }); + + // Setting dirty flag true + const vaultMetadataDbDomain = [ + ...vaultsDbDomain, + vaultsUtils.encodeVaultId(vaultId), + ]; + await db.put(vaultMetadataDbDomain, VaultInternal.dirtyKey, true); + + // Restarting vault + await vault.stop(); + await vault.start({}); + + // Checking if errant commits were cleaned up + await expect(vault.version(newRef1)).rejects.toThrow(); + await expect(vault.version(newRef2)).rejects.toThrow(); + }); + test('commit added if mutation in writeG', async () => { + const commit = (await vault.log())[0].commitId; + const gen = vault.writeG(async function* (efs): AsyncGenerator { + yield await efs.writeFile('secret-1', 'secret-content'); + }); + for await (const _ of gen) { + // Do nothing + } + const log = await vault.log(); + expect(log).toHaveLength(2); + expect(log[0].commitId).not.toStrictEqual(commit); + }); + test('no commit added if no mutation in writeG', async () => { + const commit = (await vault.log())[0].commitId; + const gen = vault.writeG(async function* (_efs): AsyncGenerator {}); + for await (const _ of gen) { + // Do nothing + } + const log = await vault.log(); + expect(log).toHaveLength(1); + expect(log[0].message).not.toContain('secret-1'); + expect(log[0].commitId).toStrictEqual(commit); + }); + test('no mutation to vault when part of a commit operation fails in writeG', async () => { + const gen = vault.writeG(async function* (efs): AsyncGenerator { + yield await efs.writeFile(secret1.name, secret1.content); + yield await efs.rename('notValid', 'randomName'); // Throws + }); + // Failing commit operation + await expect(() => runGen(gen)).rejects.toThrow(); + + // Make sure secret1 wasn't written when the above commit failed + await vault.readF(async (efs) => { + expect(await efs.readdir('.')).not.toContain(secret1.name); + }); + // No new commit + expect(await vault.log()).toHaveLength(1); + }); + test('no commit after readG', async () => { + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + await efs.writeFile(secret2.name, secret2.content); + }); + const commit = (await vault.log())[0].commitId; + const gen = await vault.readG(async function* (efs): AsyncGenerator { + yield expect((await efs.readFile(secret1.name)).toString()).toEqual( + secret1.content, + ); + }); + await runGen(gen); + const log = await vault.log(); + expect(log).toHaveLength(2); + expect(log[0].commitId).toStrictEqual(commit); + }); + test('garbage collection', async () => { + await vault.writeF(async (efs) => { + await efs.writeFile(secret1.name, secret1.content); + }); + await vault.writeF(async (efs) => { + await efs.writeFile(secret2.name, secret2.content); + }); + await vault.writeF(async (efs) => { + await efs.writeFile(secret3.name, secret3.content); + }); + // @ts-ignore: kidnap efs + const vaultEfs = vault.efs; + // @ts-ignore: kidnap efs + const vaultEfsData = vault.efsVault; + const quickCommit = async (ref: string, secret: string) => { + await vaultEfsData.writeFile(secret, secret); + await git.add({ + fs: vaultEfs, + dir: vault.vaultDataDir, + gitdir: vault.vaultGitDir, + filepath: secret, + }); + return await git.commit({ + fs: vaultEfs, + dir: vault.vaultDataDir, + gitdir: vault.vaultGitDir, + author: { + name: 'test', + email: 'test', + }, + message: 'test', + ref: ref, + }); + }; + const log = await vault.log(); + let num = 5; + const refs: string[] = []; + for (const logElement of log) { + refs.push(await quickCommit(logElement.commitId, `secret-${num++}`)); + } + // @ts-ignore + await vault.garbageCollectGitObjects(); + + for (const ref of refs) { + await expect( + git.checkout({ + fs: vaultEfs, + dir: vault.vaultDataDir, + gitdir: vault.vaultGitDir, + ref, + }), + ).rejects.toThrow(git.Errors.CommitNotFetchedError); + } + }); + // Locking tests + const waitDelay = 200; + test('writeF respects read and write locking', async () => { + // @ts-ignore: kidnap lock + const lock = vault.lock; + // Hold a write lock + const releaseWrite = await lock.acquireWrite(); + + let finished = false; + const writeP = vault.writeF(async () => { + finished = true; + }); + await sleep(waitDelay); + expect(finished).toBe(false); + releaseWrite(); + await writeP; + expect(finished).toBe(true); + + const releaseRead = await lock.acquireRead(); + finished = false; + const writeP2 = vault.writeF(async () => { + finished = true; + }); + await sleep(waitDelay); + releaseRead(); + await writeP2; + expect(finished).toBe(true); + }); + test('writeG respects read and write locking', async () => { + // @ts-ignore: kidnap lock + const lock = vault.lock; + // Hold a write lock + const releaseWrite = await lock.acquireWrite(); + + let finished = false; + const writeGen = vault.writeG(async function* () { + yield; + finished = true; + yield; + }); + const runP = runGen(writeGen); + await sleep(waitDelay); + expect(finished).toBe(false); + releaseWrite(); + await runP; + expect(finished).toBe(true); + + const releaseRead = await lock.acquireRead(); + finished = false; + const writeGen2 = vault.writeG(async function* () { + yield; + finished = true; + yield; + }); + const runP2 = runGen(writeGen2); + await sleep(waitDelay); + releaseRead(); + await runP2; + expect(finished).toBe(true); + }); + test('readF respects write locking', async () => { + // @ts-ignore: kidnap lock + const lock = vault.lock; + // Hold a write lock + const releaseWrite = await lock.acquireWrite(); + + let finished = false; + const writeP = vault.readF(async () => { + finished = true; + }); + await sleep(waitDelay); + expect(finished).toBe(false); + releaseWrite(); + await writeP; + expect(finished).toBe(true); + }); + test('readG respects write locking', async () => { + // @ts-ignore: kidnap lock + const lock = vault.lock; + // Hold a write lock + const releaseWrite = await lock.acquireWrite(); + let finished = false; + const writeGen = vault.readG(async function* () { + yield; + finished = true; + yield; + }); + const runP = runGen(writeGen); + await sleep(waitDelay); + expect(finished).toBe(false); + releaseWrite(); + await runP; + expect(finished).toBe(true); + }); + test('readF allows concurrent reads', async () => { + // @ts-ignore: kidnap lock + const lock = vault.lock; + // Hold a write lock + const releaseRead = await lock.acquireRead(); + const finished: boolean[] = []; + const doThing = async () => { + finished.push(true); + }; + await Promise.all([ + vault.readF(doThing), + vault.readF(doThing), + vault.readF(doThing), + vault.readF(doThing), + ]); + expect(finished.length).toBe(4); + releaseRead(); + }); + test('readG allows concurrent reads', async () => { + // @ts-ignore: kidnap lock + const lock = vault.lock; + // Hold a write lock + const releaseRead = await lock.acquireRead(); + const finished: boolean[] = []; + const doThing = async function* () { + yield; + finished.push(true); + yield; + }; + await Promise.all([ + runGen(vault.readG(doThing)), + runGen(vault.readG(doThing)), + runGen(vault.readG(doThing)), + runGen(vault.readG(doThing)), + ]); + expect(finished.length).toBe(4); + releaseRead(); + }); + // Life-cycle + test('can create with CreateVaultInternal', async () => { + let vault1: VaultInternal | undefined; + try { + const vaultId1 = vaultsUtils.generateVaultId(); + vault1 = await VaultInternal.createVaultInternal({ + db, + efs, + keyManager: fakeKeyManager, + vaultId: vaultId1, + vaultsDb, + vaultsDbDomain, + logger, + }); + // Data exists for vault now + expect(await efs.readdir('.')).toContain( + vaultsUtils.encodeVaultId(vaultId1), + ); + } finally { + await vault1?.stop(); + await vault1?.destroy(); + } + }); + test('can create an existing vault with CreateVaultInternal', async () => { + let vault1: VaultInternal | undefined; + let vault2: VaultInternal | undefined; + try { + const vaultId1 = vaultsUtils.generateVaultId(); + vault1 = await VaultInternal.createVaultInternal({ + db, + efs, + keyManager: fakeKeyManager, + vaultId: vaultId1, + vaultsDb, + vaultsDbDomain, + logger, + }); + // Data exists for vault now + expect(await efs.readdir('.')).toContain( + vaultsUtils.encodeVaultId(vaultId1), + ); + await vault1.stop(); + // Data persists + expect(await efs.readdir('.')).toContain( + vaultsUtils.encodeVaultId(vaultId1), + ); + + // Re-opening the vault + vault2 = await VaultInternal.createVaultInternal({ + db, + efs, + keyManager: fakeKeyManager, + vaultId: vaultId1, + vaultsDb, + vaultsDbDomain, + logger, + }); + + // Data still exists and no new data was created + expect(await efs.readdir('.')).toContain( + vaultsUtils.encodeVaultId(vaultId1), + ); + expect(await efs.readdir('.')).toHaveLength(2); + } finally { + await vault1?.stop(); + await vault1?.destroy(); + await vault2?.stop(); + await vault2?.destroy(); + } + }); + test('stop is idempotent', async () => { + // Should complete with no errors + await vault.stop(); + await vault.stop(); + }); + test('destroy is idempotent', async () => { + await vault.stop(); + await vault.destroy(); + await vault.destroy(); }); }); diff --git a/tests/vaults/VaultManager.test.ts b/tests/vaults/VaultManager.test.ts index 8235ad70e..fe5cd97b8 100644 --- a/tests/vaults/VaultManager.test.ts +++ b/tests/vaults/VaultManager.test.ts @@ -1,224 +1,155 @@ -import type { NodeId, NodeAddress, NodeInfo } from '@/nodes/types'; +import type { NodeId, NodeIdEncoded } from '@/nodes/types'; +import type { + VaultAction, + VaultId, + VaultIdString, + VaultName, +} from '@/vaults/types'; +import type NotificationsManager from '@/notifications/NotificationsManager'; +import type ReverseProxy from '@/network/ReverseProxy'; import type { Host, Port, TLSConfig } from '@/network/types'; -import type { VaultId, VaultKey, VaultName } from '@/vaults/types'; -import type { ChainData } from '@/sigchain/types'; -import type { IAgentServiceServer } from '@/proto/js/polykey/v1/agent_service_grpc_pb'; +import fs from 'fs'; import os from 'os'; import path from 'path'; -import fs from 'fs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; +import { IdInternal } from '@matrixai/id'; import { DB } from '@matrixai/db'; -import { utils as idUtils } from '@matrixai/id'; - -import { KeyManager, utils as keysUtils } from '@/keys'; -import { NodeConnectionManager, NodeGraph, NodeManager } from '@/nodes'; -import { Sigchain } from '@/sigchain'; -import { VaultManager, vaultOps } from '@/vaults'; -import { ACL } from '@/acl'; -import { GestaltGraph } from '@/gestalts'; -import { ForwardProxy, ReverseProxy } from '@/network'; -import GRPCServer from '@/grpc/GRPCServer'; -import { AgentServiceService, createAgentService } from '@/agent'; -import { NotificationsManager } from '@/notifications'; - -import { errors as vaultErrors } from '@/vaults'; -import { utils as vaultUtils } from '@/vaults'; -import { utils as nodesUtils } from '@/nodes'; -import * as testUtils from '../utils'; +import { destroyed, running } from '@matrixai/async-init'; +import git from 'isomorphic-git'; +import ACL from '@/acl/ACL'; +import GestaltGraph from '@/gestalts/GestaltGraph'; +import NodeConnectionManager from '@/nodes/NodeConnectionManager'; +import KeyManager from '@/keys/KeyManager'; +import PolykeyAgent from '@/PolykeyAgent'; +import VaultManager from '@/vaults/VaultManager'; +import * as vaultsErrors from '@/vaults/errors'; +import NodeGraph from '@/nodes/NodeGraph'; +import * as nodesUtils from '@/nodes/utils'; +import ForwardProxy from '@/network/ForwardProxy'; +import * as vaultsUtils from '@/vaults/utils'; +import * as keysUtils from '@/keys/utils'; +import { sleep } from '@/utils'; +import VaultInternal from '@/vaults/VaultInternal'; +import * as testsUtils from '../utils'; + +const mockedGenerateDeterministicKeyPair = jest + .spyOn(keysUtils, 'generateDeterministicKeyPair') + .mockImplementation((bits, _) => { + return keysUtils.generateKeyPair(bits); + }); describe('VaultManager', () => { - const password = 'password'; const logger = new Logger('VaultManager Test', LogLevel.WARN, [ new StreamHandler(), ]); - const nonExistantVaultId = idUtils.fromString('DoesNotExist') as VaultId; - let dataDir: string; - let vaultsPath: string; - let vaultsKey: VaultKey; - let keyManager: KeyManager; - let db: DB; - let acl: ACL; - let gestaltGraph: GestaltGraph; - let nodeGraph: NodeGraph; - let nodeConnectionManager: NodeConnectionManager; - let nodeManager: NodeManager; - let vaultManager: VaultManager; - let sigchain: Sigchain; - - // FIXME, try not to do this, they can all have the localhost, - // but use the generated port when the server is started. - const sourceHost = '127.0.0.1' as Host; - const sourcePort = 11112 as Port; - const targetHost = '127.0.0.2' as Host; - const targetPort = 11113 as Port; - const altHost = '127.0.0.3' as Host; - const altPort = 11114 as Port; - const altHostIn = '127.0.0.4' as Host; - const altPortIn = 11115 as Port; - - let fwdProxy: ForwardProxy; - let revProxy: ReverseProxy; - let altRevProxy: ReverseProxy; + const nonExistentVaultId = IdInternal.fromString('DoesNotExistxxxx'); + const password = 'password'; + let remoteVaultId: VaultId; + + let remoteKeynode1Id: NodeId; + let remoteKeynode1IdEncoded: NodeIdEncoded; + let remoteKeynode2Id: NodeId; + let remoteKeynode2IdEncoded: NodeIdEncoded; + + const secretNames = ['Secret1', 'Secret2', 'Secret3', 'Secret4']; const vaultName = 'TestVault' as VaultName; const secondVaultName = 'SecondTestVault' as VaultName; const thirdVaultName = 'ThirdTestVault' as VaultName; - let mockedGenerateKeyPair: jest.SpyInstance; - let mockedGenerateDeterministicKeyPair: jest.SpyInstance; - - beforeAll(async () => { - const globalKeyPair = await testUtils.setupGlobalKeypair(); - mockedGenerateKeyPair = jest - .spyOn(keysUtils, 'generateKeyPair') - .mockResolvedValue(globalKeyPair); - mockedGenerateDeterministicKeyPair = jest - .spyOn(keysUtils, 'generateDeterministicKeyPair') - .mockResolvedValue(globalKeyPair); - - fwdProxy = new ForwardProxy({ - authToken: 'abc', - logger: logger, - }); - revProxy = new ReverseProxy({ - logger: logger, - }); - altRevProxy = new ReverseProxy({ - logger: logger, - }); - }); + let dataDir: string; + let vaultsPath: string; + let db: DB; + + // We only ever use this to get NodeId, No need to create a whole one + const nodeId = testsUtils.generateRandomNodeId(); + const dummyKeyManager = { + getNodeId: () => nodeId, + } as KeyManager; + beforeEach(async () => { + mockedGenerateDeterministicKeyPair.mockImplementation((bits, _) => { + return keysUtils.generateKeyPair(bits); + }); dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); - const keysPath = path.join(dataDir, 'keys'); - const dbPath = path.join(dataDir, 'db'); - vaultsPath = path.join(dataDir, 'vaults'); - vaultsKey = await vaultUtils.generateVaultKey(); - keyManager = await KeyManager.createKeyManager({ - password, - keysPath: keysPath, - logger: logger, - }); - - await fwdProxy.start({ - tlsConfig: { - keyPrivatePem: keyManager.getRootKeyPairPem().privateKey, - certChainPem: await keyManager.getRootCertChainPem(), - }, - egressHost: sourceHost, - egressPort: sourcePort, - }); - + vaultsPath = path.join(dataDir, 'VAULTS'); db = await DB.createDB({ - dbPath: dbPath, - logger: logger, - crypto: { - key: keyManager.dbKey, - ops: { - encrypt: keysUtils.encryptWithKey, - decrypt: keysUtils.decryptWithKey, - }, - }, - }); - - sigchain = await Sigchain.createSigchain({ - keyManager: keyManager, - db: db, - logger: logger, - }); - nodeGraph = await NodeGraph.createNodeGraph({ - db: db, - keyManager: keyManager, - logger: logger, - }); - nodeConnectionManager = new NodeConnectionManager({ - keyManager, - nodeGraph, - fwdProxy: fwdProxy, - revProxy: revProxy, - logger: logger, - }); - await nodeConnectionManager.start(); - nodeManager = new NodeManager({ - db: db, - sigchain: sigchain, - keyManager: keyManager, - nodeGraph: nodeGraph, - nodeConnectionManager: nodeConnectionManager, - logger: logger, - }); - - acl = await ACL.createACL({ - db: db, - logger: logger, - }); - - gestaltGraph = await GestaltGraph.createGestaltGraph({ - db: db, - acl: acl, - logger: logger, - }); - - vaultManager = await VaultManager.createVaultManager({ - keyManager: keyManager, - vaultsPath, - vaultsKey, - nodeConnectionManager, - db, - acl: acl, - gestaltGraph: gestaltGraph, - fs, - logger: logger, - fresh: true, + dbPath: path.join(dataDir, 'DB'), + logger: logger.getChild(DB.name), }); }); + afterEach(async () => { - await vaultManager.stop(); - await gestaltGraph.stop(); - await acl.stop(); await db.stop(); - await nodeConnectionManager.stop(); - await nodeGraph.stop(); - await keyManager.stop(); + await db.destroy(); await fs.promises.rm(dataDir, { force: true, recursive: true, }); }); - afterAll(async () => { - mockedGenerateKeyPair.mockRestore(); - mockedGenerateDeterministicKeyPair.mockRestore(); - await fwdProxy.stop(); - }); test('VaultManager readiness', async () => { - await expect(vaultManager.destroy()).rejects.toThrow( - vaultErrors.ErrorVaultManagerRunning, - ); - // Should be a noop - await vaultManager.start(); - await vaultManager.stop(); - await vaultManager.destroy(); - await expect(vaultManager.start()).rejects.toThrow( - vaultErrors.ErrorVaultManagerDestroyed, - ); - await expect(async () => { - await vaultManager.listVaults(); - }).rejects.toThrow(vaultErrors.ErrorVaultManagerNotRunning); + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + await expect(vaultManager.destroy()).rejects.toThrow( + vaultsErrors.ErrorVaultManagerRunning, + ); + // Should be a noop + await vaultManager.start(); + await vaultManager.stop(); + await vaultManager.destroy(); + await expect(vaultManager.start()).rejects.toThrow( + vaultsErrors.ErrorVaultManagerDestroyed, + ); + await expect(async () => { + await vaultManager.listVaults(); + }).rejects.toThrow(vaultsErrors.ErrorVaultManagerNotRunning); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); - test('is type correct', () => { - expect(vaultManager).toBeInstanceOf(VaultManager); + test('is type correct', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + expect(vaultManager).toBeInstanceOf(VaultManager); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); - test( - 'can create many vaults and open a vault', - async () => { - const vault = await vaultManager.createVault(vaultName); - const theVault = await vaultManager.openVault(vault.vaultId); - expect(vault).toBe(theVault); - await expect(() => - vaultManager.openVault(nonExistantVaultId), - ).rejects.toThrow(vaultErrors.ErrorVaultUndefined); + test('can create many vaults and open a vault', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { const vaultNames = [ 'Vault1', 'Vault2', @@ -235,75 +166,119 @@ describe('VaultManager', () => { 'Vault13', 'Vault14', 'Vault15', - 'Vault16', - 'Vault17', - 'Vault18', - 'Vault19', - 'Vault20', ]; for (const vaultName of vaultNames) { await vaultManager.createVault(vaultName as VaultName); } - expect((await vaultManager.listVaults()).size).toEqual( - vaultNames.length + 1, - ); - }, - global.defaultTimeout * 2, - ); - test('can open the same vault twice and perform mutations', async () => { - const vault = await vaultManager.createVault(vaultName); - const vaultCopyOne = await vaultManager.openVault(vault.vaultId); - const vaultCopyTwo = await vaultManager.openVault(vault.vaultId); - expect(vaultCopyOne).toBe(vaultCopyTwo); - await vaultCopyOne.commit(async (efs) => { - await efs.writeFile('test', 'test'); - }); - const read = await vaultCopyTwo.access(async (efs) => { - return (await efs.readFile('test', { encoding: 'utf8' })) as string; - }); - expect(read).toBe('test'); + expect((await vaultManager.listVaults()).size).toEqual(vaultNames.length); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); test('can rename a vault', async () => { - const vault = await vaultManager.createVault(vaultName); - await vaultManager.renameVault(vault.vaultId, secondVaultName as VaultName); - await expect(vaultManager.openVault(vault.vaultId)).resolves.toBe(vault); - await expect(vaultManager.getVaultId(vaultName)).resolves.toBeUndefined(); - await expect( - vaultManager.getVaultId(secondVaultName), - ).resolves.toStrictEqual(vault.vaultId); - await expect(() => - vaultManager.renameVault(nonExistantVaultId, 'DNE' as VaultName), - ).rejects.toThrow(vaultErrors.ErrorVaultUndefined); + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + const vaultId = await vaultManager.createVault(vaultName); + // We can rename the vault here + await vaultManager.renameVault(vaultId, secondVaultName); + await expect(vaultManager.getVaultId(vaultName)).resolves.toBeUndefined(); + await expect( + vaultManager.getVaultId(secondVaultName), + ).resolves.toStrictEqual(vaultId); + // Can't rename an non existing vault + await expect(() => + vaultManager.renameVault(nonExistentVaultId, 'DNE' as VaultName), + ).rejects.toThrow(vaultsErrors.ErrorVaultsVaultUndefined); + await vaultManager.createVault(thirdVaultName); + // Can't rename vault to a name that exists + await expect( + vaultManager.renameVault(vaultId, thirdVaultName), + ).rejects.toThrow(vaultsErrors.ErrorVaultsVaultDefined); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); test('can delete a vault', async () => { - const firstVault = await vaultManager.createVault(vaultName); - const secondVault = await vaultManager.createVault(secondVaultName); - await vaultManager.destroyVault(secondVault.vaultId); - await expect(vaultManager.openVault(firstVault.vaultId)).resolves.toBe( - firstVault, - ); - await expect(() => - vaultManager.openVault(secondVault.vaultId), - ).rejects.toThrow(vaultErrors.ErrorVaultUndefined); + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + expect((await vaultManager.listVaults()).size).toBe(0); + const secondVaultId = await vaultManager.createVault(secondVaultName); + // @ts-ignore: protected method + const vault = await vaultManager.getVault(secondVaultId); + await vaultManager.destroyVault(secondVaultId); + // The mapping should be gone + expect((await vaultManager.listVaults()).size).toBe(0); + // The vault should be destroyed + expect(vault[destroyed]).toBe(true); + // Metadata should be gone + expect(await vaultManager.getVaultMeta(secondVaultId)).toBeUndefined(); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); test('can list vaults', async () => { - const firstVault = await vaultManager.createVault(vaultName); - const secondVault = await vaultManager.createVault(secondVaultName); - const vaultNames: Array = []; - const vaultIds: Array = []; - const vaultList = await vaultManager.listVaults(); - vaultList.forEach((vaultId, vaultName) => { - vaultNames.push(vaultName); - vaultIds.push(vaultId.toString()); + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); - expect(vaultNames.sort()).toEqual([vaultName, secondVaultName].sort()); - expect(vaultIds.sort()).toEqual( - [firstVault.vaultId.toString(), secondVault.vaultId.toString()].sort(), - ); + try { + const firstVaultId = await vaultManager.createVault(vaultName); + const secondVaultId = await vaultManager.createVault(secondVaultName); + const vaultNames: Array = []; + const vaultIds: Array = []; + const vaultList = await vaultManager.listVaults(); + vaultList.forEach((vaultId, vaultName) => { + vaultNames.push(vaultName); + vaultIds.push(vaultId.toString()); + }); + expect(vaultNames.sort()).toEqual([vaultName, secondVaultName].sort()); + expect(vaultIds.sort()).toEqual( + [firstVaultId.toString(), secondVaultId.toString()].sort(), + ); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); - test( - 'able to read and load existing metadata', - async () => { + test('able to read and load existing metadata', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { const vaultNames = [ 'Vault1', 'Vault2', @@ -322,819 +297,1621 @@ describe('VaultManager', () => { const vaults = await vaultManager.listVaults(); const vaultId = vaults.get('Vault1' as VaultName) as VaultId; expect(vaultId).not.toBeUndefined(); - const vault = await vaultManager.openVault(vaultId); - expect(vault).toBeTruthy(); await vaultManager.stop(); - await db.stop(); - await db.start(); - vaultManager = await VaultManager.createVaultManager({ - keyManager: keyManager, - vaultsPath, - vaultsKey, - nodeConnectionManager, - gestaltGraph, - acl, - db, - logger, - }); + await vaultManager.start(); const restartedVaultNames: Array = []; const vaultList = await vaultManager.listVaults(); vaultList.forEach((_, vaultName) => { restartedVaultNames.push(vaultName); }); expect(restartedVaultNames.sort()).toEqual(vaultNames.sort()); - }, - global.defaultTimeout * 2, - ); - test.skip('cannot concurrently create the same vault', async () => { - const vaults = Promise.all([ - vaultManager.createVault(vaultName), - vaultManager.createVault(vaultName), - ]); - await expect(() => vaults).rejects.toThrow(vaultErrors.ErrorVaultDefined); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('cannot concurrently create vaults with the same name', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + const vaults = Promise.all([ + vaultManager.createVault(vaultName), + vaultManager.createVault(vaultName), + ]); + await expect(() => vaults).rejects.toThrow( + vaultsErrors.ErrorVaultsVaultDefined, + ); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); test('can concurrently rename the same vault', async () => { - const vault = await vaultManager.createVault(vaultName); - await Promise.all([ - vaultManager.renameVault(vault.vaultId, secondVaultName), - vaultManager.renameVault(vault.vaultId, thirdVaultName), - ]); - await expect(vaultManager.getVaultName(vault.vaultId)).resolves.toBe( - thirdVaultName, - ); + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + const vaultId = await vaultManager.createVault(vaultName); + await Promise.all([ + vaultManager.renameVault(vaultId, secondVaultName), + vaultManager.renameVault(vaultId, thirdVaultName), + ]); + const vaultNameTest = (await vaultManager.getVaultMeta(vaultId)) + ?.vaultName; + expect(vaultNameTest).toBe(thirdVaultName); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); test('can concurrently open and rename the same vault', async () => { - const vault = await vaultManager.createVault(vaultName); - await Promise.all([ - vaultManager.renameVault(vault.vaultId, secondVaultName), - vaultManager.openVault(vault.vaultId), - ]); - await expect(vaultManager.getVaultName(vault.vaultId)).resolves.toBe( - secondVaultName, - ); + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + const vaultId = await vaultManager.createVault(vaultName); + await Promise.all([ + vaultManager.renameVault(vaultId, secondVaultName), + vaultManager.withVaults([vaultId], async (vault) => vault.vaultId), + ]); + const vaultNameTest = (await vaultManager.getVaultMeta(vaultId)) + ?.vaultName; + expect(vaultNameTest).toBe(secondVaultName); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); test('can save the commit state of a vault', async () => { - const vault = await vaultManager.createVault(vaultName); - await vault.commit(async (efs) => { - await efs.writeFile('test', 'test'); - }); - await vaultManager.closeVault(vault.vaultId); - await vaultManager.stop(); - vaultManager = await VaultManager.createVaultManager({ - keyManager: keyManager, + const vaultManager = await VaultManager.createVaultManager({ vaultsPath, - vaultsKey, - nodeConnectionManager, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, db, - acl: acl, - gestaltGraph: gestaltGraph, - fs, - logger, + logger: logger.getChild(VaultManager.name), }); - const vaultLoaded = await vaultManager.openVault(vault.vaultId); - const read = await vaultLoaded.access(async (efs) => { - return await efs.readFile('test', { encoding: 'utf8' }); - }); - expect(read).toBe('test'); - }); - test( - 'able to recover metadata after complex operations', - async () => { - const vaultNames = [ - 'Vault1', - 'Vault2', - 'Vault3', - 'Vault4', - 'Vault5', - 'Vault6', - 'Vault7', - 'Vault8', - 'Vault9', - 'Vault10', - ]; - const alteredVaultNames = [ - 'Vault1', - 'Vault2', - 'Vault3', - 'Vault4', - 'Vault6', - 'Vault7', - 'Vault8', - 'Vault10', - 'ThirdImpact', - 'Cake', - ]; - for (const vaultName of vaultNames) { - await vaultManager.createVault(vaultName as VaultName); - } - const v10 = await vaultManager.getVaultId('Vault10' as VaultName); - expect(v10).not.toBeUndefined(); - await vaultManager.destroyVault(v10!); - const v5 = await vaultManager.getVaultId('Vault5' as VaultName); - expect(v5).not.toBeUndefined(); - await vaultManager.destroyVault(v5!); - const v9 = await vaultManager.getVaultId('Vault9' as VaultName); - expect(v9).toBeTruthy(); - await vaultManager.renameVault(v9!, 'Vault10' as VaultName); - await vaultManager.createVault('ThirdImpact' as VaultName); - await vaultManager.createVault('Cake' as VaultName); - const vn: Array = []; - (await vaultManager.listVaults()).forEach((_, vaultName) => - vn.push(vaultName), - ); - expect(vn.sort()).toEqual(alteredVaultNames.sort()); + try { + const vaultId = await vaultManager.createVault(vaultName); + await vaultManager.withVaults([vaultId], async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('test', 'test'); + }); + }); + await vaultManager.stop(); - await db.stop(); + await vaultManager.start(); - await db.start(); - const vaultManagerReloaded = await VaultManager.createVaultManager({ - keyManager: keyManager, - vaultsPath, - vaultsKey, - nodeConnectionManager, - db, - acl: acl, - gestaltGraph: gestaltGraph, - fs, - logger, - }); - await vaultManagerReloaded.createVault('Pumpkin' as VaultName); - const v102 = await vaultManagerReloaded.getVaultId( - 'Vault10' as VaultName, - ); - expect(v102).not.toBeUndefined(); - alteredVaultNames.push('Pumpkin'); - expect((await vaultManagerReloaded.listVaults()).size).toEqual( - alteredVaultNames.length, - ); - const vnAltered: Array = []; - (await vaultManagerReloaded.listVaults()).forEach((_, vaultName) => - vnAltered.push(vaultName), + const read = await vaultManager.withVaults( + [vaultId], + async (vaultLoaded) => { + return await vaultLoaded.readF(async (efs) => { + return await efs.readFile('test', { encoding: 'utf8' }); + }); + }, ); - expect(vnAltered.sort()).toEqual(alteredVaultNames.sort()); - await vaultManagerReloaded.stop(); - }, - global.defaultTimeout * 2, - ); - // Test('able to update the default node repo to pull from', async () => { - // await vaultManager.start({}); - // const vault1 = await vaultManager.createVault('MyTestVault'); - // const vault2 = await vaultManager.createVault('MyOtherTestVault'); - // const noNode = await vaultManager.getDefaultNode(vault1.vaultId); - // expect(noNode).toBeUndefined(); - // await vaultManager.setDefaultNode(vault1.vaultId, 'abc' as NodeId); - // const node = await vaultManager.getDefaultNode(vault1.vaultId); - // const noNode2 = await vaultManager.getDefaultNode(vault2.vaultId); - // expect(node).toBe('abc'); - // expect(noNode2).toBeUndefined(); - // await vaultManager.stop(); - // }); - // test('checking gestalt permissions for vaults', async () => { - // const node1: NodeInfo = { - // id: '123' as NodeId, - // chain: { nodes: {}, identities: {} } as ChainData, - // }; - // const node2: NodeInfo = { - // id: '345' as NodeId, - // chain: { nodes: {}, identities: {} } as ChainData, - // }; - // const node3: NodeInfo = { - // id: '678' as NodeId, - // chain: { nodes: {}, identities: {} } as ChainData, - // }; - // const node4: NodeInfo = { - // id: '890' as NodeId, - // chain: { nodes: {}, identities: {} } as ChainData, - // }; - // const id1: IdentityInfo = { - // providerId: 'github.com' as ProviderId, - // identityId: 'abc' as IdentityId, - // claims: { - // nodes: {}, - // } as ChainData, - // }; - // const id2: IdentityInfo = { - // providerId: 'github.com' as ProviderId, - // identityId: 'def' as IdentityId, - // claims: { - // nodes: {}, - // } as ChainData, - // }; - // - // await gestaltGraph.setNode(node1); - // await gestaltGraph.setNode(node2); - // await gestaltGraph.setNode(node3); - // await gestaltGraph.setNode(node4); - // await gestaltGraph.setIdentity(id1); - // await gestaltGraph.setIdentity(id2); - // await gestaltGraph.linkNodeAndNode(node1, node2); - // await gestaltGraph.linkNodeAndIdentity(node1, id1); - // await gestaltGraph.linkNodeAndIdentity(node4, id2); - // - // await vaultManager.start({}); - // const vault = await vaultManager.createVault('Test'); - // await vaultManager.setVaultPermissions('123' as NodeId, vault.vaultId); - // let record = await vaultManager.getVaultPermissions(vault.vaultId); - // expect(record).not.toBeUndefined(); - // expect(record['123']['pull']).toBeNull(); - // expect(record['345']['pull']).toBeNull(); - // expect(record['678']).toBeUndefined(); - // expect(record['890']).toBeUndefined(); - // - // await vaultManager.unsetVaultPermissions('345' as NodeId, vault.vaultId); - // record = await vaultManager.getVaultPermissions(vault.vaultId); - // expect(record).not.toBeUndefined(); - // expect(record['123']['pull']).toBeUndefined(); - // expect(record['345']['pull']).toBeUndefined(); - // - // await gestaltGraph.unlinkNodeAndNode(node1.id, node2.id); - // await vaultManager.setVaultPermissions('345' as NodeId, vault.vaultId); - // record = await vaultManager.getVaultPermissions(vault.vaultId); - // expect(record).not.toBeUndefined(); - // expect(record['123']['pull']).toBeUndefined(); - // expect(record['345']['pull']).toBeNull(); - // - // await vaultManager.stop(); - // }); - // /* TESTING TODO: - // * Changing the default node to pull from - // */ - describe('interacting with another node to', () => { - let targetDataDir: string, altDataDir: string; - let targetKeyManager: KeyManager, altKeyManager: KeyManager; - let targetFwdProxy: ForwardProxy; - let targetDb: DB, altDb: DB; - let targetACL: ACL, altACL: ACL; - let targetGestaltGraph: GestaltGraph, altGestaltGraph: GestaltGraph; - let targetNodeGraph: NodeGraph, altNodeGraph: NodeGraph; - let targetNodeConnectionManager: NodeConnectionManager, - altNodeConnectionManager: NodeConnectionManager; - let targetNodeManager: NodeManager, altNodeManager: NodeManager; - let targetVaultManager: VaultManager, altVaultManager: VaultManager; - let targetSigchain: Sigchain, altSigchain: Sigchain; - let targetNotificationsManager: NotificationsManager, - altNotificationsManager: NotificationsManager; - - let targetNodeId: NodeId, altNodeId: NodeId; - let revTLSConfig: TLSConfig, altRevTLSConfig: TLSConfig; - - let targetAgentService: IAgentServiceServer, - altAgentService: IAgentServiceServer; - let targetAgentServer: GRPCServer, altAgentServer: GRPCServer; - - let node: NodeInfo; - - let altFwdProxy: ForwardProxy; + expect(read).toBe('test'); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('Do actions on a vault using `withVault`', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + const vault1 = await vaultManager.createVault('testVault1' as VaultName); + const vault2 = await vaultManager.createVault('testVault2' as VaultName); + const vaults = [vault1, vault2]; - beforeAll(async () => { - altFwdProxy = new ForwardProxy({ - authToken: 'abc', - logger: logger, + await vaultManager.withVaults(vaults, async (vault1, vault2) => { + expect(vault1.vaultId).toEqual(vaults[0]); + expect(vault2.vaultId).toEqual(vaults[1]); + await vault1.writeF(async (fs) => { + await fs.writeFile('test', 'test1'); + }); + await vault2.writeF(async (fs) => { + await fs.writeFile('test', 'test2'); + }); }); - }); - beforeEach(async () => { - targetDataDir = await fs.promises.mkdtemp( + await vaultManager.withVaults(vaults, async (vault1, vault2) => { + const a = await vault1.readF((fs) => { + return fs.readFile('test'); + }); + const b = await vault2.readF((fs) => { + return fs.readFile('test'); + }); + + expect(a.toString()).toEqual('test1'); + expect(b.toString()).toEqual('test2'); + }); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + describe('With remote agents', () => { + let allDataDir: string; + let keyManager: KeyManager; + let fwdProxy: ForwardProxy; + let nodeGraph: NodeGraph; + let nodeConnectionManager: NodeConnectionManager; + let remoteKeynode1: PolykeyAgent, remoteKeynode2: PolykeyAgent; + let localNodeId: NodeId; + let localNodeIdEncoded: NodeIdEncoded; + + beforeAll(async () => { + // Creating agents + allDataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); - targetKeyManager = await KeyManager.createKeyManager({ + + remoteKeynode1 = await PolykeyAgent.createPolykeyAgent({ password, - keysPath: path.join(targetDataDir, 'keys'), - fs: fs, - logger: logger, + logger: logger.getChild('Remote Keynode 1'), + nodePath: path.join(allDataDir, 'remoteKeynode1'), }); - targetNodeId = targetKeyManager.getNodeId(); - revTLSConfig = { - keyPrivatePem: targetKeyManager.getRootKeyPairPem().privateKey, - certChainPem: await targetKeyManager.getRootCertChainPem(), - }; - node = { - id: nodesUtils.encodeNodeId(keyManager.getNodeId()), - chain: { nodes: {}, identities: {} } as ChainData, - }; - targetFwdProxy = new ForwardProxy({ - authToken: '', - logger: logger, + remoteKeynode1Id = remoteKeynode1.keyManager.getNodeId(); + remoteKeynode1IdEncoded = nodesUtils.encodeNodeId(remoteKeynode1Id); + remoteKeynode2 = await PolykeyAgent.createPolykeyAgent({ + password, + logger: logger.getChild('Remote Keynode 2'), + nodePath: path.join(allDataDir, 'remoteKeynode2'), }); - targetDb = await DB.createDB({ - dbPath: path.join(targetDataDir, 'db'), - logger: logger, - crypto: { - key: keyManager.dbKey, - ops: { - encrypt: keysUtils.encryptWithKey, - decrypt: keysUtils.decryptWithKey, - }, - }, + remoteKeynode2Id = remoteKeynode2.keyManager.getNodeId(); + remoteKeynode2IdEncoded = nodesUtils.encodeNodeId(remoteKeynode2Id); + + // Adding details to each agent + await remoteKeynode1.nodeGraph.setNode(remoteKeynode2Id, { + host: remoteKeynode2.revProxy.getIngressHost(), + port: remoteKeynode2.revProxy.getIngressPort(), }); - targetSigchain = await Sigchain.createSigchain({ - keyManager: targetKeyManager, - db: targetDb, - logger: logger, + await remoteKeynode2.nodeGraph.setNode(remoteKeynode1Id, { + host: remoteKeynode1.revProxy.getIngressHost(), + port: remoteKeynode1.revProxy.getIngressPort(), }); - targetNodeGraph = await NodeGraph.createNodeGraph({ - db: targetDb, - keyManager: targetKeyManager, - logger: logger, + + await remoteKeynode1.gestaltGraph.setNode({ + id: remoteKeynode2IdEncoded, + chain: {}, }); - targetNodeConnectionManager = new NodeConnectionManager({ - keyManager: targetKeyManager, - nodeGraph: targetNodeGraph, - fwdProxy: targetFwdProxy, - revProxy: revProxy, - logger: logger, + await remoteKeynode2.gestaltGraph.setNode({ + id: remoteKeynode1IdEncoded, + chain: {}, }); - targetNodeManager = new NodeManager({ - db: targetDb, - sigchain: targetSigchain, - keyManager: targetKeyManager, - nodeGraph: nodeGraph, - nodeConnectionManager: targetNodeConnectionManager, - logger: logger, + }); + afterAll(async () => { + await remoteKeynode2.stop(); + await remoteKeynode2.destroy(); + await remoteKeynode1.stop(); + await remoteKeynode1.destroy(); + await fs.promises.rm(allDataDir, { + recursive: true, + force: true, }); - targetACL = await ACL.createACL({ - db: targetDb, - logger: logger, + }); + beforeEach(async () => { + remoteVaultId = await remoteKeynode1.vaultManager.createVault(vaultName); + + await remoteKeynode1.gestaltGraph.stop(); + await remoteKeynode1.gestaltGraph.start({ fresh: true }); + await remoteKeynode1.acl.stop(); + await remoteKeynode1.acl.start({ fresh: true }); + + nodeGraph = await NodeGraph.createNodeGraph({ + db, + keyManager: dummyKeyManager, + logger, }); - targetNotificationsManager = - await NotificationsManager.createNotificationsManager({ - acl: targetACL, - db: targetDb, - nodeConnectionManager: targetNodeConnectionManager, - nodeManager: targetNodeManager, - keyManager: targetKeyManager, - messageCap: 5, - logger: logger, - }); - targetGestaltGraph = await GestaltGraph.createGestaltGraph({ - db: targetDb, - acl: targetACL, - logger: logger, + fwdProxy = new ForwardProxy({ + authToken: 'auth', + logger, }); - await targetGestaltGraph.setNode(node); - const targetVaultKey = await vaultUtils.generateVaultKey(); - targetVaultManager = await VaultManager.createVaultManager({ - keyManager: keyManager, - vaultsPath: path.join(targetDataDir, 'vaults'), - vaultsKey: targetVaultKey, - nodeConnectionManager: targetNodeConnectionManager, - db: targetDb, - acl: targetACL, - gestaltGraph: targetGestaltGraph, - logger: logger, - fresh: true, + + keyManager = await KeyManager.createKeyManager({ + keysPath: path.join(allDataDir, 'allKeyManager'), + password: 'password', + logger, }); - targetAgentService = createAgentService({ - keyManager: targetKeyManager, - vaultManager: targetVaultManager, - nodeManager: targetNodeManager, - nodeGraph: targetNodeGraph, - sigchain: targetSigchain, - notificationsManager: targetNotificationsManager, - nodeConnectionManager, + localNodeId = keyManager.getNodeId(); + localNodeIdEncoded = nodesUtils.encodeNodeId(localNodeId); + + const tlsConfig: TLSConfig = { + keyPrivatePem: keyManager.getRootKeyPairPem().privateKey, + certChainPem: await keyManager.getRootCertChainPem(), + }; + + await fwdProxy.start({ tlsConfig }); + const dummyRevProxy = { + getIngressHost: () => 'localhost' as Host, + getIngressPort: () => 0 as Port, + } as ReverseProxy; + + nodeConnectionManager = new NodeConnectionManager({ + keyManager, + nodeGraph, + fwdProxy, + revProxy: dummyRevProxy, + logger, }); - targetAgentServer = new GRPCServer({ - logger: logger, + await nodeConnectionManager.start(); + + await nodeGraph.setNode(remoteKeynode1Id, { + host: remoteKeynode1.revProxy.getIngressHost(), + port: remoteKeynode1.revProxy.getIngressPort(), }); - await targetAgentServer.start({ - services: [[AgentServiceService, targetAgentService]], - host: targetHost, + await nodeGraph.setNode(remoteKeynode2Id, { + host: remoteKeynode2.revProxy.getIngressHost(), + port: remoteKeynode2.revProxy.getIngressPort(), }); + }); + afterEach(async () => { + await remoteKeynode1.vaultManager.destroyVault(remoteVaultId); + await nodeConnectionManager.stop(); + await fwdProxy.stop(); + await nodeGraph.stop(); + await nodeGraph.destroy(); + await keyManager.stop(); + await keyManager.destroy(); + }); - altDataDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - altKeyManager = await KeyManager.createKeyManager({ - password, - keysPath: path.join(altDataDir, 'keys'), - fs: fs, - logger: logger, + test('clone vaults from a remote keynode using a vault name', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); - altNodeId = altKeyManager.getNodeId(); - await targetGestaltGraph.setNode({ - id: nodesUtils.encodeNodeId(altNodeId), - chain: {}, + try { + // Creating some state at the remote + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret1'); + await efs.writeFile('secret-2', 'secret2'); + }); + }, + ); + + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + id: localNodeIdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setGestaltActionByNode( + localNodeId, + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); + + await vaultManager.cloneVault(remoteKeynode1Id, vaultName); + const vaultId = await vaultManager.getVaultId(vaultName); + if (vaultId === undefined) fail('VaultId is not found.'); + const [file, secretsList] = await vaultManager.withVaults( + [vaultId], + async (vaultClone) => { + return await vaultClone.readF(async (efs) => { + const file = await efs.readFile('secret-1', { encoding: 'utf8' }); + const secretsList = await efs.readdir('.'); + return [file, secretsList]; + }); + }, + ); + expect(file).toBe('secret1'); + expect(secretsList).toContain('secret-1'); + expect(secretsList).toContain('secret-2'); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('clone vaults from a remote keynode using a vault name with no history', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); - altRevTLSConfig = { - keyPrivatePem: altKeyManager.getRootKeyPairPem().privateKey, - certChainPem: await altKeyManager.getRootCertChainPem(), - }; - await altFwdProxy.start({ - tlsConfig: { - keyPrivatePem: altKeyManager.getRootKeyPairPem().privateKey, - certChainPem: await altKeyManager.getRootCertChainPem(), - }, - egressHost: altHost, - egressPort: altPort, + try { + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + id: localNodeIdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setGestaltActionByNode( + localNodeId, + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); + + await vaultManager.cloneVault(remoteKeynode1Id, vaultName); + const vaultId = await vaultManager.getVaultId(vaultName); + if (vaultId === undefined) fail('VaultId is not found.'); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('fails to clone non existing vault', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); - altDb = await DB.createDB({ - dbPath: path.join(altDataDir, 'db'), - logger: logger, - crypto: { - key: keyManager.dbKey, - ops: { - encrypt: keysUtils.encryptWithKey, - decrypt: keysUtils.decryptWithKey, - }, - }, + try { + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + id: localNodeIdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setGestaltActionByNode( + localNodeId, + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); + + await expect(() => + vaultManager.cloneVault( + remoteKeynode1Id, + 'not-existing' as VaultName, + ), + ).rejects.toThrow(vaultsErrors.ErrorVaultsVaultUndefined); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('clone and pull vaults using a vault id', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); - altSigchain = await Sigchain.createSigchain({ - keyManager: altKeyManager, - db: altDb, - logger: logger, + try { + // Creating some state at the remote + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret1'); + await efs.writeFile('secret-2', 'secret2'); + }); + }, + ); + + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + id: localNodeIdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setGestaltActionByNode( + localNodeId, + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); + + await vaultManager.cloneVault(remoteKeynode1Id, remoteVaultId); + const vaultId = await vaultManager.getVaultId(vaultName); + if (vaultId === undefined) fail('VaultId is not found.'); + const [file, secretsList] = await vaultManager.withVaults( + [vaultId], + async (vaultClone) => { + return await vaultClone.readF(async (efs) => { + const file = await efs.readFile('secret-1', { encoding: 'utf8' }); + const secretsList = await efs.readdir('.'); + return [file, secretsList]; + }); + }, + ); + expect(file).toBe('secret1'); + expect(secretsList).toContain('secret-1'); + expect(secretsList).toContain('secret-2'); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('should reject cloning when permissions are not set', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); - altNodeGraph = await NodeGraph.createNodeGraph({ - db: altDb, - keyManager: altKeyManager, - logger: logger, + try { + // Should reject with no permissions set + await expect(() => + vaultManager.cloneVault(remoteKeynode1Id, remoteVaultId), + ).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); + // No new vault created + expect((await vaultManager.listVaults()).size).toBe(0); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('should reject Pulling when permissions are not set', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); - altNodeConnectionManager = new NodeConnectionManager({ - keyManager: altKeyManager, - nodeGraph: altNodeGraph, - fwdProxy: altFwdProxy, - revProxy: altRevProxy, - logger: logger, + try { + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + id: localNodeIdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setGestaltActionByNode( + localNodeId, + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + + const clonedVaultId = await vaultManager.cloneVault( + remoteKeynode1Id, + remoteVaultId, + ); + + await expect(() => + vaultManager.pullVault({ vaultId: clonedVaultId }), + ).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('can pull a cloned vault', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); - await altNodeConnectionManager.start(); - altNodeManager = new NodeManager({ - db: altDb, - sigchain: altSigchain, - keyManager: altKeyManager, - nodeGraph: nodeGraph, - nodeConnectionManager: altNodeConnectionManager, - logger: logger, + try { + // Creating some state at the remote + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret1'); + }); + }, + ); + + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + id: localNodeIdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setGestaltActionByNode( + localNodeId, + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); + + await vaultManager.cloneVault(remoteKeynode1Id, vaultName); + const vaultId = await vaultManager.getVaultId(vaultName); + if (vaultId === undefined) fail('VaultId is not found.'); + await vaultManager.withVaults([vaultId], async (vaultClone) => { + return await vaultClone.readF(async (efs) => { + const file = await efs.readFile('secret-1', { encoding: 'utf8' }); + const secretsList = await efs.readdir('.'); + expect(file).toBe('secret1'); + expect(secretsList).toContain('secret-1'); + expect(secretsList).not.toContain('secret-2'); + }); + }); + + // Creating new history + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-2', 'secret2'); + }); + }, + ); + + // Pulling vault + await vaultManager.pullVault({ + vaultId: vaultId, + }); + + // Should have new data + await vaultManager.withVaults([vaultId], async (vaultClone) => { + return await vaultClone.readF(async (efs) => { + const file = await efs.readFile('secret-1', { encoding: 'utf8' }); + const secretsList = await efs.readdir('.'); + expect(file).toBe('secret1'); + expect(secretsList).toContain('secret-1'); + expect(secretsList).toContain('secret-2'); + }); + }); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('manage pulling from different remotes', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); - altACL = await ACL.createACL({ - db: altDb, - logger: logger, + try { + // Initial history + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (remoteVault) => { + await remoteVault.writeF(async (efs) => { + await efs.writeFile(secretNames[0], 'success?'); + await efs.writeFile(secretNames[1], 'success?'); + }); + }, + ); + + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + id: localNodeIdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setGestaltActionByNode( + localNodeId, + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); + + await remoteKeynode1.gestaltGraph.setNode({ + id: remoteKeynode2IdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setGestaltActionByNode( + remoteKeynode2Id, + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + remoteKeynode2Id, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + remoteKeynode2Id, + 'pull', + ); + + const clonedVaultRemote2Id = + await remoteKeynode2.vaultManager.cloneVault( + remoteKeynode1Id, + remoteVaultId, + ); + + await remoteKeynode2.gestaltGraph.setNode({ + id: localNodeIdEncoded, + chain: {}, + }); + await remoteKeynode2.gestaltGraph.setGestaltActionByNode( + localNodeId, + 'scan', + ); + await remoteKeynode2.acl.setVaultAction( + clonedVaultRemote2Id, + localNodeId, + 'clone', + ); + await remoteKeynode2.acl.setVaultAction( + clonedVaultRemote2Id, + localNodeId, + 'pull', + ); + const vaultCloneId = await vaultManager.cloneVault( + remoteKeynode2Id, + clonedVaultRemote2Id, + ); + + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (remoteVault) => { + await remoteVault.writeF(async (efs) => { + await efs.writeFile(secretNames[2], 'success?'); + }); + }, + ); + await vaultManager.pullVault({ + vaultId: vaultCloneId, + pullNodeId: remoteKeynode1Id, + pullVaultNameOrId: vaultName, + }); + await vaultManager.withVaults([vaultCloneId], async (vaultClone) => { + await vaultClone.readF(async (efs) => { + expect((await efs.readdir('.')).sort()).toStrictEqual( + secretNames.slice(0, 3).sort(), + ); + }); + }); + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (remoteVault) => { + await remoteVault.writeF(async (efs) => { + await efs.writeFile(secretNames[3], 'second success?'); + }); + }, + ); + await vaultManager.pullVault({ vaultId: vaultCloneId }); + await vaultManager.withVaults([vaultCloneId], async (vaultClone) => { + await vaultClone.readF(async (efs) => { + expect((await efs.readdir('.')).sort()).toStrictEqual( + secretNames.sort(), + ); + }); + }); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('able to recover metadata after complex operations', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); - altNotificationsManager = - await NotificationsManager.createNotificationsManager({ - acl: altACL, - db: altDb, - nodeConnectionManager: altNodeConnectionManager, - nodeManager: altNodeManager, - keyManager: altKeyManager, - messageCap: 5, - logger: logger, + try { + const vaultNames = ['Vault1', 'Vault2', 'Vault3', 'Vault4', 'Vault5']; + const alteredVaultNames = [ + 'Vault1', + 'Vault2', + 'Vault3', + 'Vault6', + 'Vault10', + ]; + for (const vaultName of vaultNames) { + await vaultManager.createVault(vaultName as VaultName); + } + const v5 = await vaultManager.getVaultId('Vault5' as VaultName); + expect(v5).not.toBeUndefined(); + await vaultManager.destroyVault(v5!); + const v4 = await vaultManager.getVaultId('Vault4' as VaultName); + expect(v4).toBeTruthy(); + await vaultManager.renameVault(v4!, 'Vault10' as VaultName); + const v6 = await vaultManager.createVault('Vault6' as VaultName); + + await vaultManager.withVaults([v6], async (vault6) => { + await vault6.writeF(async (efs) => { + await efs.writeFile('reloaded', 'reload'); + }); }); - altGestaltGraph = await GestaltGraph.createGestaltGraph({ - db: altDb, - acl: altACL, - logger: logger, + + const vn: Array = []; + (await vaultManager.listVaults()).forEach((_, vaultName) => + vn.push(vaultName), + ); + expect(vn.sort()).toEqual(alteredVaultNames.sort()); + await vaultManager.stop(); + await vaultManager.start(); + await vaultManager.createVault('Vault7' as VaultName); + + const v10 = await vaultManager.getVaultId('Vault10' as VaultName); + expect(v10).not.toBeUndefined(); + alteredVaultNames.push('Vault7'); + expect((await vaultManager.listVaults()).size).toEqual( + alteredVaultNames.length, + ); + const vnAltered: Array = []; + (await vaultManager.listVaults()).forEach((_, vaultName) => + vnAltered.push(vaultName), + ); + expect(vnAltered.sort()).toEqual(alteredVaultNames.sort()); + const file = await vaultManager.withVaults( + [v6], + async (reloadedVault) => { + return await reloadedVault.readF(async (efs) => { + return await efs.readFile('reloaded', { encoding: 'utf8' }); + }); + }, + ); + + expect(file).toBe('reload'); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('throw when trying to commit to a cloned vault', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); - await altGestaltGraph.setNode(node); - const altVaultKey = await vaultUtils.generateVaultKey(); - altVaultManager = await VaultManager.createVaultManager({ - keyManager: keyManager, - vaultsPath: path.join(altDataDir, 'vaults'), - vaultsKey: altVaultKey, - nodeConnectionManager: altNodeConnectionManager, - db: altDb, - acl: altACL, - gestaltGraph: altGestaltGraph, - logger: logger, + try { + // Creating some state at the remote + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret1'); + await efs.writeFile('secret-2', 'secret2'); + }); + }, + ); + + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + id: localNodeIdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setGestaltActionByNode( + localNodeId, + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); + + await vaultManager.cloneVault(remoteKeynode1Id, vaultName); + const vaultId = await vaultManager.getVaultId(vaultName); + if (vaultId === undefined) fail('VaultId is not found.'); + await vaultManager.withVaults([vaultId], async (vaultClone) => { + await expect( + vaultClone.writeF(async (efs) => { + await efs.writeFile('secret-3', 'secret3'); + }), + ).rejects.toThrow(vaultsErrors.ErrorVaultRemoteDefined); + }); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test("test pulling a vault that isn't remote", async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); - altAgentService = createAgentService({ - keyManager: altKeyManager, - vaultManager: altVaultManager, - nodeManager: altNodeManager, - nodeGraph: altNodeGraph, - sigchain: altSigchain, - notificationsManager: altNotificationsManager, + try { + // Creating some state at the remote + const vaultId = await vaultManager.createVault('testVault1'); + await expect(vaultManager.pullVault({ vaultId })).rejects.toThrow( + vaultsErrors.ErrorVaultRemoteUndefined, + ); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('pullVault respects locking', async () => { + // This should respect the VaultManager read lock + // and the VaultInternal write lock + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, nodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); - altAgentServer = new GRPCServer({ - logger: logger, + const pullVaultMock = jest.spyOn(VaultInternal.prototype, 'pullVault'); + const gitPullMock = jest.spyOn(git, 'pull'); + try { + // Creating some state at the remote + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-1', 'secret1'); + await efs.writeFile('secret-2', 'secret2'); + }); + }, + ); + + // Setting permissions + await remoteKeynode1.gestaltGraph.setNode({ + id: localNodeIdEncoded, + chain: {}, + }); + await remoteKeynode1.gestaltGraph.setGestaltActionByNode( + localNodeId, + 'scan', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'clone', + ); + await remoteKeynode1.acl.setVaultAction( + remoteVaultId, + localNodeId, + 'pull', + ); + + await vaultManager.cloneVault(remoteKeynode1Id, vaultName); + const vaultId = await vaultManager.getVaultId(vaultName); + if (vaultId === undefined) fail('VaultId is not found.'); + + // Creating new history + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-2', 'secret2'); + }); + }, + ); + + // @ts-ignore: kidnap vaultManager map and grabbing lock + const vaultsMap = vaultManager.vaultMap; + const vaultAndLock = vaultsMap.get(vaultId.toString() as VaultIdString); + const lock = vaultAndLock!.lock; + const releaseWrite = await lock.acquireWrite(); + + // Pulling vault respects VaultManager write lock + const pullP = vaultManager.pullVault({ + vaultId: vaultId, + }); + await sleep(200); + expect(pullVaultMock).not.toHaveBeenCalled(); + await releaseWrite(); + await pullP; + expect(pullVaultMock).toHaveBeenCalled(); + pullVaultMock.mockClear(); + + // Creating new history + await remoteKeynode1.vaultManager.withVaults( + [remoteVaultId], + async (vault) => { + await vault.writeF(async (efs) => { + await efs.writeFile('secret-3', 'secret3'); + }); + }, + ); + + // Respects VaultInternal write lock + const vault = vaultAndLock!.vault!; + // @ts-ignore: kidnap vault lock + const vaultLock = vault.lock; + const releaseVaultWrite = await vaultLock.acquireWrite(); + // Pulling vault respects VaultManager write lock + gitPullMock.mockClear(); + const pullP2 = vaultManager.pullVault({ + vaultId: vaultId, + }); + await sleep(200); + expect(gitPullMock).not.toHaveBeenCalled(); + await releaseVaultWrite(); + await pullP2; + expect(gitPullMock).toHaveBeenCalled(); + } finally { + pullVaultMock.mockRestore(); + gitPullMock.mockRestore(); + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + }); + test('handleScanVaults should list all vaults with permissions', async () => { + // 1. we need to set up state + const acl = await ACL.createACL({ + db, + logger, + }); + const gestaltGraph = await GestaltGraph.createGestaltGraph({ + db, + acl, + logger, + }); + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + nodeConnectionManager: {} as NodeConnectionManager, + acl, + gestaltGraph, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + // Setting up state + const nodeId1 = testsUtils.generateRandomNodeId(); + const nodeId2 = testsUtils.generateRandomNodeId(); + await gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(nodeId1), + chain: {}, }); - await altAgentServer.start({ - services: [[AgentServiceService, altAgentService]], - host: altHostIn, + await gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(nodeId2), + chain: {}, }); + await gestaltGraph.setGestaltActionByNode(nodeId1, 'scan'); + + const vault1 = await vaultManager.createVault('testVault1' as VaultName); + const vault2 = await vaultManager.createVault('testVault2' as VaultName); + const vault3 = await vaultManager.createVault('testVault3' as VaultName); - await revProxy.start({ - serverHost: targetHost, - serverPort: targetAgentServer.getPort(), - ingressHost: targetHost, - ingressPort: targetPort, - tlsConfig: revTLSConfig, + // Setting permissions + await acl.setVaultAction(vault1, nodeId1, 'clone'); + await acl.setVaultAction(vault1, nodeId1, 'pull'); + await acl.setVaultAction(vault2, nodeId1, 'clone'); + // No permissions for vault3 + + // scanning vaults + const gen = vaultManager.handleScanVaults(nodeId1); + const vaults: Record = {}; + for await (const vault of gen) { + vaults[vault.vaultId] = [vault.vaultName, vault.vaultPermissions]; + } + expect(vaults[vault1]).toEqual(['testVault1', ['clone', 'pull']]); + expect(vaults[vault2]).toEqual(['testVault2', ['clone']]); + expect(vaults[vault3]).toBeUndefined(); + + // Should throw due to no permission + await expect(async () => { + for await (const _ of vaultManager.handleScanVaults(nodeId2)) { + // Should throw + } + }).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); + // Should throw due to lack of scan permission + await gestaltGraph.setGestaltActionByNode(nodeId2, 'notify'); + await expect(async () => { + for await (const _ of vaultManager.handleScanVaults(nodeId2)) { + // Should throw + } + }).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); + } finally { + await vaultManager.stop(); + await vaultManager.destroy(); + await gestaltGraph.stop(); + await gestaltGraph.destroy(); + await acl.stop(); + await acl.destroy(); + } + }); + test('ScanVaults should get all vaults with permissions from remote node', async () => { + // 1. we need to set up state + const remoteAgent = await PolykeyAgent.createPolykeyAgent({ + password: 'password', + nodePath: path.join(dataDir, 'remoteNode'), + logger, + }); + const acl = await ACL.createACL({ + db, + logger, + }); + const gestaltGraph = await GestaltGraph.createGestaltGraph({ + db, + acl, + logger, + }); + const nodeGraph = await NodeGraph.createNodeGraph({ + db, + keyManager: dummyKeyManager, + logger, + }); + const fwdProxy = new ForwardProxy({ + authToken: 'auth', + logger, + }); + const keyManager = await KeyManager.createKeyManager({ + keysPath: path.join(dataDir, 'keys'), + password: 'password', + logger, + }); + await fwdProxy.start({ + tlsConfig: { + keyPrivatePem: keyManager.getRootKeyPairPem().privateKey, + certChainPem: await keyManager.getRootCertChainPem(), + }, + }); + const nodeConnectionManager = new NodeConnectionManager({ + keyManager, + logger, + nodeGraph, + fwdProxy, + revProxy: {} as ReverseProxy, + connConnectTime: 1000, + }); + await nodeConnectionManager.start(); + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager, + nodeConnectionManager, + acl, + gestaltGraph, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + // Setting up state + const targetNodeId = remoteAgent.keyManager.getNodeId(); + const nodeId1 = keyManager.getNodeId(); + + // Letting nodeGraph know where the remote agent is + await nodeGraph.setNode(targetNodeId, { + host: 'localhost' as Host, + port: remoteAgent.revProxy.getIngressPort(), }); - await altRevProxy.start({ - serverHost: altHostIn, - serverPort: altAgentServer.getPort(), - ingressHost: altHostIn, - ingressPort: altPortIn, - tlsConfig: altRevTLSConfig, + await remoteAgent.gestaltGraph.setNode({ + id: nodesUtils.encodeNodeId(nodeId1), + chain: {}, }); - }, global.polykeyStartupTimeout * 2); - afterEach(async () => { - await revProxy.closeConnection(altHost, altPort); - await revProxy.closeConnection(sourceHost, sourcePort); - await altRevProxy.closeConnection(sourceHost, sourcePort); - await fwdProxy.closeConnection( - fwdProxy.getEgressHost(), - fwdProxy.getEgressPort(), + const vault1 = await remoteAgent.vaultManager.createVault( + 'testVault1' as VaultName, ); - await altFwdProxy.closeConnection( - altFwdProxy.getEgressHost(), - altFwdProxy.getEgressPort(), + const vault2 = await remoteAgent.vaultManager.createVault( + 'testVault2' as VaultName, ); - await revProxy.stop(); - await altRevProxy.stop(); - await targetAgentServer.stop(); - await targetVaultManager.stop(); - await targetGestaltGraph.stop(); - await targetNotificationsManager.stop(); - await targetACL.stop(); - await targetDb.stop(); - await targetNodeConnectionManager.stop(); - await targetNodeGraph.stop(); - await targetKeyManager.stop(); - await fs.promises.rm(targetDataDir, { - force: true, - recursive: true, - }); - await altAgentServer.stop(); - await altGestaltGraph.stop(); - await altVaultManager.stop(); - await altNotificationsManager.stop(); - await altACL.stop(); - await altDb.stop(); - await altNodeConnectionManager.stop(); - await altNodeGraph.stop(); - await altKeyManager.stop(); - await fs.promises.rm(altDataDir, { - force: true, - recursive: true, + const vault3 = await remoteAgent.vaultManager.createVault( + 'testVault3' as VaultName, + ); + + // Scanning vaults + + // Should throw due to no permission + await expect(async () => { + for await (const _ of vaultManager.scanVaults(targetNodeId)) { + // Should throw + } + }).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); + // Should throw due to lack of scan permission + await remoteAgent.gestaltGraph.setGestaltActionByNode(nodeId1, 'notify'); + await expect(async () => { + for await (const _ of vaultManager.scanVaults(targetNodeId)) { + // Should throw + } + }).rejects.toThrow(vaultsErrors.ErrorVaultsPermissionDenied); + + // Setting permissions + await remoteAgent.gestaltGraph.setGestaltActionByNode(nodeId1, 'scan'); + await remoteAgent.acl.setVaultAction(vault1, nodeId1, 'clone'); + await remoteAgent.acl.setVaultAction(vault1, nodeId1, 'pull'); + await remoteAgent.acl.setVaultAction(vault2, nodeId1, 'clone'); + // No permissions for vault3 + + const gen = vaultManager.scanVaults(targetNodeId); + const vaults: Record = {}; + for await (const vault of gen) { + vaults[vault.vaultIdEncoded] = [ + vault.vaultName, + vault.vaultPermissions, + ]; + } + + expect(vaults[vaultsUtils.encodeVaultId(vault1)]).toEqual([ + 'testVault1', + ['clone', 'pull'], + ]); + expect(vaults[vaultsUtils.encodeVaultId(vault2)]).toEqual([ + 'testVault2', + ['clone'], + ]); + expect(vaults[vaultsUtils.encodeVaultId(vault3)]).toBeUndefined(); + } finally { + await vaultManager.stop(); + await vaultManager.destroy(); + await nodeConnectionManager.stop(); + await fwdProxy.stop(); + await nodeGraph.stop(); + await nodeGraph.destroy(); + await gestaltGraph.stop(); + await gestaltGraph.destroy(); + await acl.stop(); + await acl.destroy(); + await remoteAgent.stop(); + await remoteAgent.destroy(); + } + }); + test('stopping respects locks', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + // Create the vault + const vaultId = await vaultManager.createVault('vaultName'); + // Getting and holding the lock + const vaultAndLock = vaultMap.get(vaultId.toString() as VaultIdString)!; + const lock = vaultAndLock.lock; + const vault = vaultAndLock.vault!; + const release = await lock.acquireWrite(); + // Try to destroy + const closeP = vaultManager.closeVault(vaultId); + await sleep(1000); + // Shouldn't be closed + expect(vault[running]).toBe(true); + expect( + vaultMap.get(vaultId.toString() as VaultIdString)!.vault, + ).toBeDefined(); + // Release the lock + release(); + await closeP; + expect(vault[running]).toBe(false); + expect(vaultMap.get(vaultId.toString() as VaultIdString)).toBeUndefined(); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('destroying respects locks', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + // Create the vault + const vaultId = await vaultManager.createVault('vaultName'); + // Getting and holding the lock + const vaultAndLock = vaultMap.get(vaultId.toString() as VaultIdString)!; + const lock = vaultAndLock.lock; + const vault = vaultAndLock.vault!; + const release = await lock.acquireWrite(); + // Try to destroy + const destroyP = vaultManager.destroyVault(vaultId); + await sleep(1000); + // Shouldn't be destroyed + expect(vault[destroyed]).toBe(false); + expect( + vaultMap.get(vaultId.toString() as VaultIdString)!.vault, + ).toBeDefined(); + // Release the lock + release(); + await destroyP; + expect(vault[destroyed]).toBe(true); + expect(vaultMap.get(vaultId.toString() as VaultIdString)).toBeUndefined(); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('withVault respects locks', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + // Create the vault + const vaultId = await vaultManager.createVault('vaultName'); + // Getting and holding the lock + const vaultAndLock = vaultMap.get(vaultId.toString() as VaultIdString)!; + const lock = vaultAndLock.lock; + const release = await lock.acquireWrite(); + // Try to use vault + let finished = false; + const withP = vaultManager.withVaults([vaultId], async () => { + finished = true; }); + await sleep(1000); + // Shouldn't be destroyed + expect(finished).toBe(false); + // Release the lock + release(); + await withP; + expect(finished).toBe(true); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('Creation adds a vault', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); + try { + await vaultManager.createVault(vaultName); + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + expect(vaultMap.size).toBe(1); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('Concurrently creating vault with same name only creates 1 vault', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + await expect( + Promise.all([ + vaultManager.createVault(vaultName), + vaultManager.createVault(vaultName), + ]), + ).rejects.toThrow(vaultsErrors.ErrorVaultsVaultDefined); + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + expect(vaultMap.size).toBe(1); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('vaults persist', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + const vaultId = await vaultManager.createVault(vaultName); + await vaultManager.closeVault(vaultId); + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + expect(vaultMap.size).toBe(0); - afterAll(async () => { - await altFwdProxy.stop(); + // @ts-ignore: protected method + const vault1 = await vaultManager.getVault(vaultId); + expect(vaultMap.size).toBe(1); + + // @ts-ignore: protected method + const vault2 = await vaultManager.getVault(vaultId); + expect(vaultMap.size).toBe(1); + expect(vault1).toEqual(vault2); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('vaults can be removed from map', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + const vaultId = await vaultManager.createVault(vaultName); + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + expect(vaultMap.size).toBe(1); + // @ts-ignore: protected method + const vault1 = await vaultManager.getVault(vaultId); + await vaultManager.closeVault(vaultId); + expect(vaultMap.size).toBe(0); + // @ts-ignore: protected method + const vault2 = await vaultManager.getVault(vaultId); + expect(vault1).not.toEqual(vault2); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('stopping vaultManager empties map and stops all vaults', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + const vaultId1 = await vaultManager.createVault('vault1'); + const vaultId2 = await vaultManager.createVault('vault2'); + // @ts-ignore: kidnapping the map + const vaultMap = vaultManager.vaultMap; + expect(vaultMap.size).toBe(2); + // @ts-ignore: protected method + const vault1 = await vaultManager.getVault(vaultId1); + // @ts-ignore: protected method + const vault2 = await vaultManager.getVault(vaultId2); + await vaultManager.stop(); + expect(vaultMap.size).toBe(0); + expect(vault1[running]).toBe(false); + expect(vault2[running]).toBe(false); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('destroying vaultManager destroys all data', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), }); + let vaultManager2: VaultManager | undefined; + try { + const vaultId = await vaultManager.createVault('vault1'); + await vaultManager.stop(); + await vaultManager.destroy(); + // Vaults DB should be empty + const vaultsDb = await db.level(VaultManager.constructor.name); + expect(await db.count(vaultsDb)).toBe(0); + vaultManager2 = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); - test( - 'clone and pull vaults', - async () => { - // Await vaultManager.createVault(vaultName); - // await vaultManager.createVault('MyFirstVault copy'); - const vault = await targetVaultManager.createVault(vaultName); - // Await targetVaultManager.setVaultPermissions( - // nodeManager.getNodeId(), - // vault.vaultId, - // ); - const names: string[] = []; - for (let i = 0; i < 1; i++) { - const name = 'secret ' + i.toString(); - names.push(name); - const content = 'Success?'; - await vaultOps.addSecret(vault, name, content); - } - await nodeManager.setNode(targetNodeId, { - host: targetHost, - port: targetPort, - } as NodeAddress); - await nodeConnectionManager.withConnF(targetNodeId, async () => {}); - await revProxy.openConnection(sourceHost, sourcePort); - await vaultManager.cloneVault(targetNodeId, vault.vaultId); - const vaultId = await vaultManager.getVaultId(vaultName); - const vaultClone = await vaultManager.openVault(vaultId!); - let file = await vaultClone.access(async (efs) => { - return await efs.readFile('secret 0', { encoding: 'utf8' }); - }); - expect(file).toBe('Success?'); - // Expect(vaultsList[2].name).toStrictEqual('MyFirstVault copy copy'); - // await expect( - // vaultManager.getDefaultNode(vaultsList[2].id), - // ).resolves.toBe(targetNodeId); - // const clonedVault = await vaultManager.getVault(vaultsList[2].id); - // expect(await clonedVault.getSecret('secret 9')).toStrictEqual( - // 'Success?', - // ); - // expect((await clonedVault.listSecrets()).sort()).toStrictEqual( - // names.sort(), - // ); - for (let i = 1; i < 2; i++) { - const name = 'secret ' + i.toString(); - names.push(name); - const content = 'Second Success?'; - await vaultOps.addSecret(vault, name, content); + // @ts-ignore: protected method + await expect(vaultManager2.getVault(vaultId)).rejects.toThrow( + vaultsErrors.ErrorVaultsVaultUndefined, + ); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + await vaultManager2?.stop(); + await vaultManager2?.destroy(); + } + }); + test("withVaults should throw if vaultId doesn't exist", async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + try { + const vaultId = vaultsUtils.generateVaultId(); + await expect( + vaultManager.withVaults([vaultId], async () => { + // Do nothing + }), + ).rejects.toThrow(vaultsErrors.ErrorVaultsVaultUndefined); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } + }); + test('generateVaultId handles vault conflicts', async () => { + const vaultManager = await VaultManager.createVaultManager({ + vaultsPath, + keyManager: dummyKeyManager, + gestaltGraph: {} as GestaltGraph, + nodeConnectionManager: {} as NodeConnectionManager, + acl: {} as ACL, + notificationsManager: {} as NotificationsManager, + db, + logger: logger.getChild(VaultManager.name), + }); + const generateVaultIdMock = jest.spyOn(vaultsUtils, 'generateVaultId'); + try { + // Generate 100 ids + const vaultIds: VaultId[] = []; + for (let i = 0; i < 100; i++) { + vaultIds.push( + // @ts-ignore: protected method + vaultsUtils.encodeVaultId(await vaultManager.generateVaultId()), + ); + } + const duplicates = vaultIds.filter( + (item, index) => vaultIds.indexOf(item) !== index, + ); + expect(duplicates.length).toBe(0); + + const vaultId = await vaultManager.createVault('testvault'); + // Now only returns duplicates + generateVaultIdMock.mockReturnValue(vaultId); + const asd = async () => { + for (let i = 0; i < 100; i++) { + // @ts-ignore: protected method + await vaultManager.generateVaultId(); } - await vaultManager.pullVault({ vaultId: vaultClone.vaultId }); - file = await vaultClone.access(async (efs) => { - return await efs.readFile('secret 1', { encoding: 'utf8' }); - }); - expect(file).toBe('Second Success?'); - // Expect((await clonedVault.listSecrets()).sort()).toStrictEqual( - // names.sort(), - // ); - // expect(await clonedVault.getSecret('secret 19')).toStrictEqual( - // 'Second Success?', - // ); - }, - global.defaultTimeout * 2, - ); - // TODO: what is this? do we need it? - // Test( - // 'reject clone and pull ops when permissions are not set', - // async () => { - // await vaultManager.start({}); - // const vault = await targetVaultManager.createVault('MyFirstVault'); - // await vault.addSecret('MyFirstSecret', 'Success?'); - // await nodeManager.setNode(targetNodeId, { - // ip: targetHost, - // port: targetPort, - // } as NodeAddress); - // await nodeManager.getConnectionToNode(targetNodeId); - // await revProxy.openConnection(sourceHost, sourcePort); - // await expect(() => - // vaultManager.cloneVault(vault.vaultId, targetNodeId), - // ).rejects.toThrow(gitErrors.ErrorGitPermissionDenied); - // const vaultsList = await vaultManager.listVaults(); - // expect(vaultsList).toStrictEqual([]); - // await targetVaultManager.setVaultPermissions( - // nodeManager.getNodeId(), - // vault.vaultId, - // ); - // await vaultManager.cloneVault(vault.vaultId, targetNodeId); - // const vaultList = await vaultManager.listVaults(); - // await targetVaultManager.unsetVaultPermissions( - // nodeManager.getNodeId(), - // vault.vaultId, - // ); - // vault.addSecret('MySecondSecret', 'SecondSuccess?'); - // await expect(() => - // vaultManager.pullVault(vaultList[0].id, targetNodeId), - // ).rejects.toThrow(gitErrors.ErrorGitPermissionDenied); - // const list = await vaultManager.listVaults(); - // const clonedVault = await vaultManager.getVault(list[0].id); - // expect((await clonedVault.listSecrets()).sort()).toStrictEqual( - // ['MyFirstSecret'].sort(), - // ); - // await vaultManager.stop(); - // }, - // global.defaultTimeout * 2, - // ); - // test( - // 'handle vault conflicts', - // async () => { - // await vaultManager.start({}); - // const vault = await targetVaultManager.createVault('MyFirstVault'); - // await targetVaultManager.setVaultPermissions( - // nodeManager.getNodeId(), - // vault.vaultId, - // ); - // const names: string[] = []; - // for (let i = 0; i < 10; i++) { - // const name = 'secret ' + i.toString(); - // names.push(name); - // const content = 'Success?'; - // await vault.addSecret(name, content); - // } - // await vault.mkdir('dir', { recursive: true }); - // await nodeManager.setNode(targetNodeId, { - // ip: targetHost, - // port: targetPort, - // } as NodeAddress); - // await nodeManager.getConnectionToNode(targetNodeId); - // await revProxy.openConnection(sourceHost, sourcePort); - // await vaultManager.cloneVault(vault.vaultId, targetNodeId); - // const vaultList = await vaultManager.listVaults(); - // const clonedVault = await vaultManager.getVault(vaultList[0].id); - // await clonedVault.renameSecret('secret 9', 'secret 10'); - // await vault.renameSecret('secret 9', 'causing merge conflict'); - // await expect(() => - // vaultManager.pullVault(clonedVault.vaultId), - // ).rejects.toThrow(vaultErrors.ErrorVaultMergeConflict); - // }, - // global.defaultTimeout * 2, - // ); - // test( - // 'clone and pull from a default node', - // async () => { - // await vaultManager.start({}); - // const vault = await targetVaultManager.createVault('MyFirstVault'); - // await targetVaultManager.setVaultPermissions( - // altNodeManager.getNodeId(), - // vault.vaultId, - // ); - // await targetVaultManager.setVaultPermissions( - // nodeManager.getNodeId(), - // vault.vaultId, - // ); - // const names: string[] = []; - // for (let i = 0; i < 10; i++) { - // const name = 'secret ' + i.toString(); - // names.push(name); - // const content = 'Success?'; - // await vault.addSecret(name, content); - // } - // await altNodeManager.setNode(targetNodeId, { - // ip: targetHost, - // port: targetPort, - // } as NodeAddress); - // await altNodeManager.getConnectionToNode(targetNodeId); - // await revProxy.openConnection(altHost, altPort); - // await altVaultManager.cloneVault(vault.vaultId, targetNodeId); - // const altVaultsList = await altVaultManager.listVaults(); - // expect(altVaultsList[0].name).toStrictEqual('MyFirstVault'); - // await nodeManager.setNode(targetNodeId, { - // ip: targetHost, - // port: targetPort, - // } as NodeAddress); - // await nodeManager.getConnectionToNode(targetNodeId); - // await revProxy.openConnection(sourceHost, sourcePort); - // await vaultManager.cloneVault(vault.vaultId, targetNodeId); - // await altVaultManager.setVaultPermissions( - // nodeManager.getNodeId(), - // altVaultsList[0].id, - // ); - // const vaultsList = await vaultManager.listVaults(); - // expect(vaultsList[0].name).toStrictEqual('MyFirstVault'); - // const clonedVault = await vaultManager.getVault(vaultsList[0].id); - // const altClonedVault = await altVaultManager.getVault( - // altVaultsList[0].id, - // ); - // await altClonedVault.updateSecret('secret 9', 'this is new'); - // await nodeManager.setNode(altNodeId, { - // ip: altHostIn, - // port: altPortIn, - // } as NodeAddress); - // await nodeManager.getConnectionToNode(altNodeId); - // await altRevProxy.openConnection(sourceHost, sourcePort); - // await vaultManager.pullVault(clonedVault.vaultId, altNodeId); - // expect(await clonedVault.getSecret('secret 9')).toStrictEqual( - // 'this is new', - // ); - // await altClonedVault.addSecret('secret 10', 'default pull?'); - // await vaultManager.pullVault(clonedVault.vaultId); - // expect(await clonedVault.getSecret('secret 10')).toStrictEqual( - // 'default pull?', - // ); - // }, - // global.defaultTimeout * 2, - // ); - // test( - // 'clone and pull within a system of 3 nodes', - // async () => { - // await vaultManager.start({}); - // const vault = await targetVaultManager.createVault('MyFirstVault'); - // await targetVaultManager.setVaultPermissions( - // altNodeManager.getNodeId(), - // vault.vaultId, - // ); - // await targetVaultManager.setVaultPermissions( - // nodeManager.getNodeId(), - // vault.vaultId, - // ); - // const names: string[] = []; - // for (let i = 0; i < 10; i++) { - // const name = 'secret ' + i.toString(); - // names.push(name); - // const content = 'Success?'; - // await vault.addSecret(name, content); - // } - // await altNodeManager.setNode(targetNodeId, { - // ip: targetHost, - // port: targetPort, - // } as NodeAddress); - // await altNodeManager.getConnectionToNode(targetNodeId); - // await revProxy.openConnection(altHost, altPort); - // await altVaultManager.cloneVault(vault.vaultId, targetNodeId); - // const altVaultsList = await altVaultManager.listVaults(); - // expect(altVaultsList[0].name).toStrictEqual('MyFirstVault'); - // await nodeManager.setNode(targetNodeId, { - // ip: targetHost, - // port: targetPort, - // } as NodeAddress); - // await nodeManager.getConnectionToNode(targetNodeId); - // await revProxy.openConnection(sourceHost, sourcePort); - // await vaultManager.cloneVault(vault.vaultId, targetNodeId); - // await altVaultManager.setVaultPermissions( - // nodeManager.getNodeId(), - // altVaultsList[0].id, - // ); - // const vaultsList = await vaultManager.listVaults(); - // expect(vaultsList[0].name).toStrictEqual('MyFirstVault'); - // const clonedVault = await vaultManager.getVault(vaultsList[0].id); - // const altClonedVault = await altVaultManager.getVault( - // altVaultsList[0].id, - // ); - // await altClonedVault.updateSecret('secret 9', 'this is new'); - // await nodeManager.setNode(altNodeId, { - // ip: altHostIn, - // port: altPortIn, - // } as NodeAddress); - // await nodeManager.getConnectionToNode(altNodeId); - // await altRevProxy.openConnection(sourceHost, sourcePort); - // await vaultManager.pullVault(clonedVault.vaultId, altNodeId); - // expect(await clonedVault.getSecret('secret 9')).toStrictEqual( - // 'this is new', - // ); - // }, - // global.defaultTimeout * 2, - // ); + }; + await expect(async () => { + return await asd(); + }).rejects.toThrow(vaultsErrors.ErrorVaultsCreateVaultId); + } finally { + await vaultManager?.stop(); + await vaultManager?.destroy(); + } }); }); diff --git a/tests/vaults/VaultOps.test.ts b/tests/vaults/VaultOps.test.ts index b07c08029..e376eb306 100644 --- a/tests/vaults/VaultOps.test.ts +++ b/tests/vaults/VaultOps.test.ts @@ -1,37 +1,41 @@ -import type { Vault, VaultId } from '@/vaults/types'; +import type { VaultId } from '@/vaults/types'; +import type { Vault } from '@/vaults/Vault'; +import type KeyManager from '@/keys/KeyManager'; +import type { DBDomain, DBLevel } from '@matrixai/db'; import fs from 'fs'; import path from 'path'; import os from 'os'; import { EncryptedFS } from 'encryptedfs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; -import { utils as idUtils } from '@matrixai/id'; +import { DB } from '@matrixai/db'; import * as errors from '@/vaults/errors'; -import { VaultInternal, vaultOps } from '@/vaults'; -import { KeyManager } from '@/keys'; -import { generateVaultId } from '@/vaults/utils'; +import VaultInternal from '@/vaults/VaultInternal'; +import * as vaultOps from '@/vaults/VaultOps'; +import * as vaultsUtils from '@/vaults/utils'; import * as keysUtils from '@/keys/utils'; import * as testUtils from '../utils'; describe('VaultOps', () => { - const password = 'password'; const logger = new Logger('VaultOps', LogLevel.WARN, [new StreamHandler()]); - // Const probeLogger = new Logger('vaultOpsProbe', LogLevel.INFO, [ - // new StreamHandler(), - // ]); let dataDir: string; - - let keyManager: KeyManager; let baseEfs: EncryptedFS; - let vaultId: VaultId; let vaultInternal: VaultInternal; let vault: Vault; + let db: DB; + let vaultsDb: DBLevel; + let vaultsDbDomain: DBDomain; + const dummyKeyManager = { + getNodeId: () => { + return testUtils.generateRandomNodeId(); + }, + } as KeyManager; let mockedGenerateKeyPair: jest.SpyInstance; let mockedGenerateDeterministicKeyPair: jest.SpyInstance; - beforeAll(async () => { + beforeEach(async () => { const globalKeyPair = await testUtils.setupGlobalKeypair(); mockedGenerateKeyPair = jest .spyOn(keysUtils, 'generateKeyPair') @@ -43,60 +47,57 @@ describe('VaultOps', () => { dataDir = await fs.promises.mkdtemp( path.join(os.tmpdir(), 'polykey-test-'), ); - const keysPath = path.join(dataDir, 'keys'); - - keyManager = await KeyManager.createKeyManager({ - keysPath, - password, - logger, - }); - - const dbPath = path.join(dataDir, 'db'); + const dbPath = path.join(dataDir, 'efsDb'); + const dbKey = await keysUtils.generateKey(); baseEfs = await EncryptedFS.createEncryptedFS({ - dbKey: keyManager.dbKey, + dbKey, dbPath, logger, }); await baseEfs.start(); - }); - afterAll(async () => { - mockedGenerateKeyPair.mockRestore(); - mockedGenerateDeterministicKeyPair.mockRestore(); - await baseEfs.stop(); - await baseEfs.destroy(); - await keyManager.stop(); - await keyManager.destroy(); - await fs.promises.rm(dataDir, { - force: true, - recursive: true, - }); - }); - - beforeEach(async () => { - vaultId = generateVaultId(); - await baseEfs.mkdir(path.join(idUtils.toString(vaultId), 'contents'), { - recursive: true, - }); - vaultInternal = await VaultInternal.create({ - keyManager: keyManager, + vaultId = vaultsUtils.generateVaultId(); + await baseEfs.mkdir( + path.join(vaultsUtils.encodeVaultId(vaultId), 'contents'), + { + recursive: true, + }, + ); + db = await DB.createDB({ dbPath: path.join(dataDir, 'db'), logger }); + vaultsDbDomain = ['vaults']; + vaultsDb = await db.level(vaultsDbDomain[0]); + vaultInternal = await VaultInternal.createVaultInternal({ + keyManager: dummyKeyManager, vaultId, efs: baseEfs, logger: logger.getChild(VaultInternal.name), fresh: true, + db, + vaultsDbDomain, + vaultsDb, + vaultName: 'VaultName', }); vault = vaultInternal as Vault; }); + afterEach(async () => { + await vaultInternal.stop(); await vaultInternal.destroy(); + await db.stop(); + await db.destroy(); + mockedGenerateKeyPair.mockRestore(); + mockedGenerateDeterministicKeyPair.mockRestore(); + await baseEfs.stop(); + await baseEfs.destroy(); + await fs.promises.rm(dataDir, { + force: true, + recursive: true, + }); }); test('adding a secret', async () => { - // Await vault.access(async efs => { - // console.log(await efs.readdir('.')); - // }) await vaultOps.addSecret(vault, 'secret-1', 'secret-content'); - const dir = await vault.access(async (efs) => { + const dir = await vault.readF(async (efs) => { return await efs.readdir('.'); }); expect(dir).toContain('secret-1'); @@ -107,7 +108,7 @@ describe('VaultOps', () => { expect(secret.toString()).toBe('secret-content'); await expect(() => vaultOps.getSecret(vault, 'doesnotexist'), - ).rejects.toThrow(errors.ErrorSecretUndefined); + ).rejects.toThrow(errors.ErrorSecretsSecretUndefined); }); test('able to make directories', async () => { await vaultOps.mkdir(vault, 'dir-1', { recursive: true }); @@ -120,7 +121,7 @@ describe('VaultOps', () => { path.join('dir-3', 'dir-4', 'secret-1'), 'secret-content', ); - await vault.access(async (efs) => { + await vault.readF(async (efs) => { const dir = await efs.readdir('.'); expect(dir).toContain('dir-1'); expect(dir).toContain('dir-2'); @@ -143,9 +144,9 @@ describe('VaultOps', () => { (await vaultOps.getSecret(vault, name)).toString(), ).toStrictEqual(content); - await expect( - vault.access((efs) => efs.readdir('.')), - ).resolves.toContain(name); + await expect(vault.readF((efs) => efs.readdir('.'))).resolves.toContain( + name, + ); } }, global.defaultTimeout * 4, @@ -207,14 +208,14 @@ describe('VaultOps', () => { await expect(() => vaultOps.deleteSecret(vault, 'dir-1')).rejects.toThrow(); await vaultOps.deleteSecret(vault, path.join('dir-1', 'secret-2')); await vaultOps.deleteSecret(vault, 'dir-1'); - await expect( - vault.access((efs) => efs.readdir('.')), - ).resolves.not.toContain('secret-1'); + await expect(vault.readF((efs) => efs.readdir('.'))).resolves.not.toContain( + 'secret-1', + ); }); test('deleting a secret within a directory', async () => { await expect(() => vaultOps.mkdir(vault, path.join('dir-1', 'dir-2')), - ).rejects.toThrow(errors.ErrorRecursive); + ).rejects.toThrow(errors.ErrorVaultsRecursive); await vaultOps.mkdir(vault, path.join('dir-1', 'dir-2'), { recursive: true, }); @@ -223,16 +224,11 @@ describe('VaultOps', () => { path.join('dir-1', 'dir-2', 'secret-1'), 'secret-content', ); - await vaultOps.deleteSecret( - vault, - path.join('dir-1', 'dir-2'), - { - recursive: true, - }, - logger, - ); + await vaultOps.deleteSecret(vault, path.join('dir-1', 'dir-2'), { + recursive: true, + }); await expect( - vault.access((efs) => efs.readdir('dir-1')), + vault.readF((efs) => efs.readdir('dir-1')), ).resolves.not.toContain('dir-2'); }); test( @@ -247,7 +243,7 @@ describe('VaultOps', () => { ).toStrictEqual(content); await vaultOps.deleteSecret(vault, name, { recursive: true }); await expect( - vault.access((efs) => efs.readdir('.')), + vault.readF((efs) => efs.readdir('.')), ).resolves.not.toContain(name); } }, @@ -256,7 +252,7 @@ describe('VaultOps', () => { test('renaming a secret', async () => { await vaultOps.addSecret(vault, 'secret-1', 'secret-content'); await vaultOps.renameSecret(vault, 'secret-1', 'secret-change'); - const dir = vault.access((efs) => efs.readdir('.')); + const dir = vault.readF((efs) => efs.readdir('.')); await expect(dir).resolves.not.toContain('secret-1'); await expect(dir).resolves.toContain('secret-change'); }); @@ -273,9 +269,9 @@ describe('VaultOps', () => { path.join(dirPath, 'secret-1'), path.join(dirPath, 'secret-change'), ); - await expect( - vault.access((efs) => efs.readdir(dirPath)), - ).resolves.toContain(`secret-change`); + await expect(vault.readF((efs) => efs.readdir(dirPath))).resolves.toContain( + `secret-change`, + ); }); test('listing secrets', async () => { await vaultOps.addSecret(vault, 'secret-1', 'secret-content'); @@ -384,7 +380,7 @@ describe('VaultOps', () => { await vaultOps.addSecretDirectory(vault, secretDir, fs); await expect( - vault.access((efs) => efs.readdir(secretDirName)), + vault.readF((efs) => efs.readdir(secretDirName)), ).resolves.toContain('secret'); await fs.promises.rm(secretDir, { @@ -516,7 +512,7 @@ describe('VaultOps', () => { for (let j = 0; j < 8; j++) { await expect( - vault.access((efs) => efs.readdir(secretDirName)), + vault.readF((efs) => efs.readdir(secretDirName)), ).resolves.toContain('secret ' + j.toString()); } expect( diff --git a/tests/vaults/old/Vault.test.ts.old b/tests/vaults/old/Vault.test.ts.old deleted file mode 100644 index 96202daa5..000000000 --- a/tests/vaults/old/Vault.test.ts.old +++ /dev/null @@ -1,565 +0,0 @@ -import type { NodeId } from '@/nodes/types'; -import type { VaultId, VaultIdRaw, VaultKey, VaultName } from "@/vaults/types"; - -import os from 'os'; -import path from 'path'; -import fs from 'fs'; -import git from 'isomorphic-git'; -import Vault from '@/vaults/old/Vault'; -import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; - -import { generateVaultId, generateVaultKey } from '@/vaults/utils'; -import { getRandomBytes } from '@/keys/utils'; -import { EncryptedFS } from 'encryptedfs'; -import * as errors from '@/vaults/errors'; -import * as utils from '@/utils'; - -describe.skip('Vault is', () => { - let dataDir: string; - let vault: Vault; - let key: VaultKey; - let vaultId: VaultId; - let efsDir: string; - const logger = new Logger('Vault', LogLevel.WARN, [new StreamHandler()]); - const name = 'vault-1' as VaultName; - - beforeEach(async () => { - dataDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'polykey-test-'), - ); - key = await generateVaultKey(); - vaultId = generateVaultId(); - efsDir = path.join(dataDir, vaultId); - await fs.promises.mkdir(efsDir); - vault = new Vault({ - vaultId: vaultId, - vaultName: name, - baseDir: efsDir, - fs: fs, - logger: logger, - }); - }); - - afterEach(async () => { - await fs.promises.rm(dataDir, { - force: true, - recursive: true, - }); - }); - - test('type correct', async () => { - expect(vault).toBeInstanceOf(Vault); - }); - test('creating the vault directory', async () => { - await vault.start({ key }); - await expect(fs.promises.readdir(dataDir)).resolves.toContain(vaultId); - }); - test('able to destroy an empty vault', async () => { - await vault.start({ key }); - await expect(fs.promises.readdir(dataDir)).resolves.toContain(vaultId); - await vault.stop(); - await expect(fs.promises.readdir(dataDir)).resolves.not.toContain(vaultId); - }); - test('adding a secret', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.toContain('secret-1.data'); - await vault.stop(); - }); - test('adding a secret and getting it', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - const secret = await vault.getSecret('secret-1'); - expect(secret).toBe('secret-content'); - await expect(() => vault.getSecret('doesnotexist')).rejects.toThrow( - errors.ErrorSecretUndefined, - ); - await vault.stop(); - }); - test('able to make directories', async () => { - await vault.start({ key }); - await vault.mkdir('dir-1', { recursive: true }); - await vault.mkdir('dir-2', { recursive: true }); - await vault.mkdir(path.join('dir-3', 'dir-4'), { recursive: true }); - await vault.addSecret( - path.join('dir-3', 'dir-4', 'secret-1'), - 'secret-content', - ); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.toContain('dir-1.data'); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.toContain('dir-2.data'); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId, 'dir-3.data')), - ).resolves.toContain('dir-4.data'); - await expect( - fs.promises.readdir( - path.join(dataDir, vaultId, 'dir-3.data', 'dir-4.data'), - ), - ).resolves.toContain('secret-1.data'); - await vault.stop(); - }); - test('adding and committing a secret 10 times', async () => { - await vault.start({ key }); - for (let i = 0; i < 10; i++) { - const name = 'secret ' + i.toString(); - const content = 'secret-content'; - await vault.addSecret(name, content); - await expect(vault.getSecret(name)).resolves.toStrictEqual(content); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.toContain(`${name}.data`); - } - await vault.stop(); - }); - test('updating secret content', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - await vault.updateSecret('secret-1', 'secret-content-change'); - await expect(vault.getSecret('secret-1')).resolves.toStrictEqual( - 'secret-content-change', - ); - await vault.stop(); - }); - test('updating secret content within a directory', async () => { - await vault.start({ key }); - await vault.mkdir(path.join('dir-1', 'dir-2'), { recursive: true }); - await vault.addSecret( - path.join('dir-1', 'dir-2', 'secret-1'), - 'secret-content', - ); - await vault.updateSecret( - path.join('dir-1', 'dir-2', 'secret-1'), - 'secret-content-change', - ); - await expect( - vault.getSecret(path.join('dir-1', 'dir-2', 'secret-1')), - ).resolves.toStrictEqual('secret-content-change'); - await vault.stop(); - }); - test('updating a secret 10 times', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - for (let i = 0; i < 10; i++) { - const content = 'secret-content'; - await vault.updateSecret('secret-1', content); - await expect(vault.getSecret('secret-1')).resolves.toStrictEqual(content); - } - await vault.stop(); - }); - test('deleting a secret', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - await vault.mkdir('dir-1'); - await vault.deleteSecret('secret-1'); - await expect(() => vault.deleteSecret('dir-1')).rejects.toThrow( - errors.ErrorRecursive, - ); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.not.toContain('secret-1.data'); - await vault.stop(); - }); - test('deleting a secret within a directory', async () => { - await vault.start({ key }); - await expect(() => vault.mkdir(path.join('dir-1', 'dir-2'))).rejects.toThrow( - errors.ErrorRecursive, - ); - await vault.mkdir(path.join('dir-1', 'dir-2'), { recursive: true }); - await vault.addSecret( - path.join('dir-1', 'dir-2', 'secret-1'), - 'secret-content', - ); - await vault.deleteSecret(path.join('dir-1', 'dir-2'), { recursive: true }); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId, 'dir-1.data')), - ).resolves.not.toContain('dir2-1.data'); - await vault.stop(); - }); - test('deleting a secret 10 times', async () => { - await vault.start({ key }); - for (let i = 0; i < 10; i++) { - const name = 'secret ' + i.toString(); - const content = 'secret-content'; - await vault.addSecret(name, content); - await expect(vault.getSecret(name)).resolves.toStrictEqual(content); - await vault.deleteSecret(name, { recursive: true }); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.not.toContain(`${name}.data`); - } - await vault.stop(); - }); - test('renaming a vault', async () => { - await vault.start({ key }); - await vault.renameVault('vault-change' as VaultName); - expect(vault.vaultName).toEqual('vault-change'); - await vault.stop(); - }); - test('renaming a secret', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - await vault.renameSecret('secret-1', 'secret-change'); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.not.toContain('secret-1.data'); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.toContain('secret-change.data'); - await vault.stop(); - }); - test('renaming a secret within a directory', async () => { - await vault.start({ key }); - await vault.mkdir(path.join('dir-1', 'dir-2'), { recursive: true }); - await vault.addSecret( - path.join('dir-1', 'dir-2', 'secret-1'), - 'secret-content', - ); - await vault.renameSecret( - path.join('dir-1', 'dir-2', 'secret-1'), - path.join('dir-1', 'dir-2', 'secret-change'), - ); - await expect( - fs.promises.readdir( - path.join(dataDir, vaultId, 'dir-1.data', 'dir-2.data'), - ), - ).resolves.toContain(`secret-change.data`); - await vault.stop(); - }); - test('listing secrets', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - await vault.addSecret('secret-2', 'secret-content'); - await vault.mkdir(path.join('dir1', 'dir2'), { recursive: true }); - await vault.addSecret( - path.join('dir1', 'dir2', 'secret-3'), - 'secret-content', - ); - expect((await vault.listSecrets()).sort()).toStrictEqual( - ['secret-1', 'secret-2', 'dir1/dir2/secret-3'].sort(), - ); - await vault.stop(); - }); - test('listing secret directories', async () => { - const secretDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'secret-directory-'), - ); - const secretDirName = path.basename(secretDir); - for (let i = 0; i < 10; i++) { - const name = 'secret ' + i.toString(); - const content = await getRandomBytes(5); - await fs.promises.writeFile(path.join(secretDir, name), content); - } - await vault.start({ key }); - await vault.addSecretDirectory(secretDir); - expect(await vault.listSecrets()).toStrictEqual([ - path.join(secretDirName, `secret 0`), - path.join(secretDirName, `secret 1`), - path.join(secretDirName, `secret 2`), - path.join(secretDirName, `secret 3`), - path.join(secretDirName, `secret 4`), - path.join(secretDirName, `secret 5`), - path.join(secretDirName, `secret 6`), - path.join(secretDirName, `secret 7`), - path.join(secretDirName, `secret 8`), - path.join(secretDirName, `secret 9`), - ]); - await vault.stop(); - await fs.promises.rm(secretDir, { - force: true, - recursive: true, - }); - }); - test('adding hidden files and directories', async () => { - await vault.start({ key }); - await vault.addSecret('.hiddenSecret', 'hidden_contents'); - await vault.mkdir('.hiddenDir', { recursive: true }); - await vault.addSecret('.hiddenDir/.hiddenInSecret', 'hidden_inside'); - const list = await vault.listSecrets(); - expect(list.sort()).toStrictEqual( - ['.hiddenSecret', '.hiddenDir/.hiddenInSecret'].sort(), - ); - await vault.stop(); - }); - test('updating and deleting hidden files and directories', async () => { - await vault.start({ key }); - await vault.addSecret('.hiddenSecret', 'hidden_contents'); - await vault.mkdir('.hiddenDir', { recursive: true }); - await vault.addSecret('.hiddenDir/.hiddenInSecret', 'hidden_inside'); - await vault.updateSecret('.hiddenSecret', 'change_contents'); - await vault.updateSecret('.hiddenDir/.hiddenInSecret', 'change_inside'); - await vault.renameSecret('.hiddenSecret', '.hidingSecret'); - await vault.renameSecret('.hiddenDir', '.hidingDir'); - let list = await vault.listSecrets(); - expect(list.sort()).toStrictEqual( - ['.hidingSecret', '.hidingDir/.hiddenInSecret'].sort(), - ); - await expect(vault.getSecret('.hidingSecret')).resolves.toStrictEqual( - 'change_contents', - ); - await expect( - vault.getSecret('.hidingDir/.hiddenInSecret'), - ).resolves.toStrictEqual('change_inside'); - await vault.deleteSecret('.hidingSecret', { recursive: true }); - await vault.deleteSecret('.hidingDir', { recursive: true }); - list = await vault.listSecrets(); - expect(list.sort()).toStrictEqual([].sort()); - await vault.stop(); - }); - test( - 'adding and committing a secret 100 times on efs', - async () => { - const efs = await EncryptedFS.createEncryptedFS({ - dbKey: await getRandomBytes(32), - dbPath: dataDir, - }); - const exists = utils.promisify(efs.exists).bind(efs); - const mkdir = utils.promisify(efs.mkdir).bind(efs); - const writeFile = utils.promisify(efs.writeFile).bind(efs); - const vaultId = vault.vaultId; - await mkdir(path.join(dataDir, vaultId), { - recursive: true, - }); - await git.init({ - fs: efs, - dir: path.join(dataDir, vaultId), - }); - await git.commit({ - fs: efs, - dir: path.join(dataDir, vaultId), - author: { - name: vaultId, - }, - message: 'Initial Commit', - }); - await writeFile( - path.join(path.join(dataDir, vaultId), '.git', 'packed-refs'), - '# pack-refs with: peeled fully-peeled sorted', - ); - for (let i = 0; i < 100; i++) { - const name = 'secret ' + i.toString(); - const content = await getRandomBytes(5); - const writePath = path.join(dataDir, vaultId, name); - await writeFile(writePath, content, {}); - await git.add({ - fs: efs, - dir: path.join(dataDir, vaultId), - filepath: name, - }); - await git.commit({ - fs: efs, - dir: path.join(dataDir, vaultId), - author: { - name: vaultId, - }, - message: `Add secret: ${name}`, - }); - - await expect(exists(path.join(dataDir, vaultId, name))).resolves.toBe( - true, - ); - } - }, - global.defaultTimeout * 2, - ); - test('adding a directory of 1 secret', async () => { - const secretDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'secret-directory-'), - ); - const secretDirName = path.basename(secretDir); - const name = 'secret'; - const content = await getRandomBytes(5); - await fs.promises.writeFile(path.join(secretDir, name), content); - await vault.start({ key }); - await vault.addSecretDirectory(path.join(secretDir)); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId, `${secretDirName}.data`)), - ).resolves.toContain('secret.data'); - await vault.stop(); - await fs.promises.rm(secretDir, { - force: true, - recursive: true, - }); - }); - test('getting the stats of a vault', async () => { - await vault.start({ key }); - const stats = await vault.stats(); - expect(stats).toBeInstanceOf(fs.Stats); - await vault.stop(); - }); - test('adding a directory with subdirectories and files', async () => { - const secretDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'secret-directory-'), - ); - const secretDirName = path.basename(secretDir); - await fs.promises.mkdir(path.join(secretDir, 'dir1')); - await fs.promises.mkdir(path.join(secretDir, 'dir1', 'dir2')); - await fs.promises.mkdir(path.join(secretDir, 'dir3')); - - await fs.promises.writeFile(path.join(secretDir, 'secret1'), 'secret1'); - await fs.promises.writeFile( - path.join(secretDir, 'dir1', 'secret2'), - 'secret2', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir1', 'dir2', 'secret3'), - 'secret3', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir3', 'secret4'), - 'secret4', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir3', 'secret5'), - 'secret5', - ); - await vault.start({ key }); - await vault.addSecretDirectory(path.join(secretDir)); - const list = await vault.listSecrets(); - expect(list.sort()).toStrictEqual( - [ - path.join(secretDirName, 'secret1'), - path.join(secretDirName, 'dir1', 'secret2'), - path.join(secretDirName, 'dir1', 'dir2', 'secret3'), - path.join(secretDirName, 'dir3', 'secret4'), - path.join(secretDirName, 'dir3', 'secret5'), - ].sort(), - ); - await vault.stop(); - await fs.promises.rm(secretDir, { - force: true, - recursive: true, - }); - }); - test('testing the errors handling of adding secret directories', async () => { - const secretDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'secret-directory-'), - ); - const secretDirName = path.basename(secretDir); - await fs.promises.mkdir(path.join(secretDir, 'dir1')); - await fs.promises.mkdir(path.join(secretDir, 'dir1', 'dir2')); - await fs.promises.mkdir(path.join(secretDir, 'dir3')); - await fs.promises.writeFile(path.join(secretDir, 'secret1'), 'secret1'); - await fs.promises.writeFile( - path.join(secretDir, 'dir1', 'secret2'), - 'secret2', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir1', 'dir2', 'secret3'), - 'secret3', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir3', 'secret4'), - 'secret4', - ); - await fs.promises.writeFile( - path.join(secretDir, 'dir3', 'secret5'), - 'secret5', - ); - await vault.start({ key }); - await vault.mkdir(secretDirName, { recursive: true }); - await vault.addSecret( - path.join(secretDirName, 'secret1'), - 'blocking-secret', - ); - await vault.addSecretDirectory(secretDir); - const list = await vault.listSecrets(); - expect(list.sort()).toStrictEqual( - [ - path.join(secretDirName, 'secret1'), - path.join(secretDirName, 'dir1', 'secret2'), - path.join(secretDirName, 'dir1', 'dir2', 'secret3'), - path.join(secretDirName, 'dir3', 'secret4'), - path.join(secretDirName, 'dir3', 'secret5'), - ].sort(), - ); - await vault.start({ key }); - await fs.promises.rm(secretDir, { - force: true, - recursive: true, - }); - }); - test('adding a directory of 100 secrets with some secrets already existing', async () => { - const secretDir = await fs.promises.mkdtemp( - path.join(os.tmpdir(), 'secret-directory-'), - ); - const secretDirName = path.basename(secretDir); - for (let i = 0; i < 50; i++) { - const name = 'secret ' + i.toString(); - const content = 'this is secret ' + i.toString(); - await fs.promises.writeFile( - path.join(secretDir, name), - Buffer.from(content), - ); - } - await vault.start({ key }); - await vault.mkdir(secretDirName, { recursive: false }); - await vault.addSecret( - path.join(secretDirName, 'secret 8'), - 'secret-content', - ); - await vault.addSecret( - path.join(secretDirName, 'secret 9'), - 'secret-content', - ); - await vault.addSecretDirectory(secretDir); - - for (let j = 0; j < 8; j++) { - await expect( - fs.promises.readdir( - path.join(dataDir, vaultId, `${secretDirName}.data`), - ), - ).resolves.toContain('secret ' + j.toString() + '.data'); - } - await expect( - vault.getSecret(path.join(secretDirName, 'secret 8')), - ).resolves.toStrictEqual('this is secret 8'); - await expect( - vault.getSecret(path.join(secretDirName, 'secret 9')), - ).resolves.toStrictEqual('this is secret 9'); - await vault.stop(); - await fs.promises.rm(secretDir, { - force: true, - recursive: true, - }); - }); - test('able to persist data across multiple vault objects', async () => { - await vault.start({ key }); - await vault.addSecret('secret-1', 'secret-content'); - await expect( - fs.promises.readdir(path.join(dataDir, vaultId)), - ).resolves.toContain('secret-1.data'); - const vault2 = new Vault({ - vaultId: vaultId, - vaultName: name, - baseDir: efsDir, - fs: fs, - logger: logger, - }); - await vault2.start({ key }); - const content = await vault2.getSecret('secret-1'); - expect(content).toBe('secret-content'); - await vault2.stop(); - }); - // Test('able to erase dirty commits on start up', async () => { - // await vault.start({ key }); - // await vault.addSecret('secret-1', 'secret-content'); - // await vault.mkdir('dir-1', { recursive: true }); - // await vault.addSecret('dir-1/secret-1', 'secret-content'); - // await vault.start({ key }); - // await fs.promises.writeFile(path.join(dataDir, `${vault.vaultId}:nodeID`), 'dirty-commit'); - // const vault2 = new Vault({ - // vaultId: vaultId, - // vaultName: name, - // baseDir: efsDir, - // fs: fs, - // logger: logger, - // }); - // await vault2.start({ key }); - // await vault2.stop(); - // }); -}); diff --git a/tests/vaults/utils.test.ts b/tests/vaults/utils.test.ts index 6cb6d6280..a2333467b 100644 --- a/tests/vaults/utils.test.ts +++ b/tests/vaults/utils.test.ts @@ -1,3 +1,4 @@ +import type { VaultId } from '@/vaults/types'; import fs from 'fs'; import os from 'os'; import path from 'path'; @@ -5,9 +6,8 @@ import { EncryptedFS } from 'encryptedfs'; import Logger, { LogLevel, StreamHandler } from '@matrixai/logger'; import { IdRandom } from '@matrixai/id'; -import * as utils from '@/utils'; import * as vaultsUtils from '@/vaults/utils'; -import { isVaultId } from '@/vaults/utils'; +import * as keysUtils from '@/keys/utils'; describe('Vaults utils', () => { const logger = new Logger('Vaults utils tests', LogLevel.WARN, [ @@ -28,116 +28,61 @@ describe('Vaults utils', () => { }); }); - test('VaultId type guard works', async () => { - // Const nodeId = makeNodeId('A'.repeat(44)); - const vaultId = vaultsUtils.generateVaultId(); - expect(isVaultId(vaultId)).toBeTruthy(); - }); - // TODO: this may be fully removed later. check if splitting is needed for vaultIds - // test('vaultIds can be split', async () => { - // const nodeId = 'alkjsddfjknacqqquiry32741834id'; - // const id = vaultsUtils.generateVaultId(); - // expect(id).toContain(nodeId); - // const vaultId = vaultsUtils.splitVaultId(id); - // expect(vaultId).not.toContain(nodeId); - // }); - test.skip('EFS can be read recursively', async () => { - const key = await vaultsUtils.generateVaultKey(); + test('EFS can be read recursively', async () => { + const key = await keysUtils.generateKey(256); const efs = await EncryptedFS.createEncryptedFS({ dbKey: key, dbPath: dataDir, logger, }); - const mkdir = utils.promisify(efs.mkdir).bind(efs); - const writeFile = utils.promisify(efs.writeFile).bind(efs); - await mkdir('dir', { recursive: true }); - await mkdir('dir/dir2/dir3', { recursive: true }); - await writeFile('dir/file', 'content'); + await efs.promises.mkdir(path.join('dir', 'dir2', 'dir3'), { + recursive: true, + }); + const filePath1 = path.join('dir', 'file'); + await efs.promises.writeFile(filePath1, 'content'); let files: string[] = []; - for await (const file of vaultsUtils.readdirRecursivelyEFS( - efs, - '', - false, - )) { + for await (const file of vaultsUtils.readdirRecursively(efs, './')) { files.push(file); } - expect(files.sort()).toStrictEqual(['dir/file'].sort()); + expect(files).toStrictEqual([filePath1]); files = []; - for await (const file of vaultsUtils.readdirRecursivelyEFS(efs, '', true)) { + const filePath2 = path.join('dir', 'dir2', 'dir3', 'file'); + await efs.promises.writeFile(filePath2, 'content'); + for await (const file of vaultsUtils.readdirRecursively(efs)) { files.push(file); } - expect(files.sort()).toStrictEqual( - ['dir', 'dir/dir2', 'dir/dir2/dir3', 'dir/file'].sort(), - ); + expect(files.sort()).toStrictEqual([filePath1, filePath2].sort()); }); - // Test('a persisted EFS object can be read recursively', async () => { - // const key = await vaultsUtils.generateVaultKey(); - // const efs = new EncryptedFS(key, fs, dataDir); - // const mkdir = utils.promisify(efs.mkdir).bind(efs); - // const writeFile = utils.promisify(efs.writeFile).bind(efs); - // await mkdir('dir', { recursive: true }); - // await mkdir('dir/dir2/dir3', { recursive: true }); - // await writeFile('dir/file', 'content'); - // const efs2 = new EncryptedFS(key, fs, dataDir); - // let files: string[] = []; - // for await (const file of vaultsUtils.readdirRecursivelyEFS( - // efs2, - // '', - // false, - // )) { - // files.push(file); - // } - // expect(files.sort()).toStrictEqual(['dir/file'].sort()); - // files = []; - // for await (const file of vaultsUtils.readdirRecursivelyEFS( - // efs2, - // '', - // true, - // )) { - // files.push(file); - // } - // expect(files.sort()).toStrictEqual( - // ['dir', 'dir/dir2', 'dir/dir2/dir3', 'dir/file'].sort(), - // ); - // }); - test.skip('can search for a vault name', async () => { - // Const vaultList = ['a\tb', 'b\ta', '', 'c\tc', 'e\tf']; - - fail(); - // FIXME secret methods not implemented. - // expect(vaultsUtils.searchVaultName(vaultList, 'b' as VaultId)).toEqual('a'); - // expect(vaultsUtils.searchVaultName(vaultList, 'a' as VaultId)).toEqual('b'); - // expect(vaultsUtils.searchVaultName(vaultList, 'c' as VaultId)).toEqual('c'); - // expect(vaultsUtils.searchVaultName(vaultList, 'f' as VaultId)).toEqual('e'); - // expect(() => - // vaultsUtils.searchVaultName(vaultList, 'd' as VaultId), - // ).toThrow(vaultsErrors.ErrorRemoteVaultUndefined); + test('fs can be read recursively', async () => { + await fs.promises.mkdir(path.join(dataDir, 'dir'), { recursive: true }); + await fs.promises.mkdir(path.join(dataDir, 'dir', 'dir2', 'dir3'), { + recursive: true, + }); + const filePath1 = path.join(dataDir, 'dir', 'file'); + await fs.promises.writeFile(filePath1, 'content'); + let files: string[] = []; + for await (const file of vaultsUtils.readdirRecursively(fs, dataDir)) { + files.push(file); + } + expect(files).toStrictEqual([filePath1]); + files = []; + const filePath2 = path.join(dataDir, 'dir', 'dir2', 'dir3', 'file'); + await fs.promises.writeFile(filePath2, 'content'); + for await (const file of vaultsUtils.readdirRecursively(fs, dataDir)) { + files.push(file); + } + expect(files.sort()).toStrictEqual([filePath1, filePath2].sort()); }); - test('makeVaultId converts a buffer', async () => { - const randomIdGen = new IdRandom(); - Buffer.from(randomIdGen.get()); + test('decodeNodeId does not throw an error', async () => { + const randomIdGen = new IdRandom(); + const vaultId = randomIdGen.get(); + const vaultIdEncoded = vaultsUtils.encodeVaultId(vaultId); + + expect(vaultsUtils.decodeVaultId(vaultIdEncoded)).toBeDefined(); + expect(vaultsUtils.decodeVaultId('invalidVaultIdEncoded')).toBeUndefined(); + expect( + vaultsUtils.decodeVaultId('zF4VfF3uRhSqgxTOOLONGxTRdVKauV9'), + ).toBeUndefined(); + expect(vaultsUtils.decodeVaultId('zF4VfxTOOSHORTxTV9')).toBeUndefined(); }); }); - -// Test('vaultIds are alphanumeric', async () => { -// const id1 = utils.generateVaultId('abc'); -// -// expect(isAlphaNumeric(id1)).toBe(true); -// }); -// -// function isAlphaNumeric(str) { -// let code, i, len; -// -// for (i = 0, len = str.length; i < len; i++) { -// code = str.charCodeAt(i); -// if ( -// !(code > 47 && code < 58) && // numeric (0-9) -// !(code > 64 && code < 91) && // upper alpha (A-Z) -// !(code > 96 && code < 123) -// ) { -// // lower alpha (a-z) -// return false; -// } -// } -// return true; -// }