Skip to content

Commit

Permalink
feat: support compressed keychain data V2 format (#9)
Browse files Browse the repository at this point in the history
* feat: support compressed keychain data V2 format

* chore: minor comments & clean up

* chore(misc)

* chore: do integrity check on ShareID in V2 prefix part and in the inflated JSON itself

* chore: rename shadowed loop iter var

* chore(compress): edit a comment

* chore: print deflated & inflated sizes on share decompress

* chore: add 'Processing share' log output for regular JSON shares, improve logs in general
  • Loading branch information
notatestuser authored Jul 4, 2024
1 parent ad303d3 commit 4da7e2c
Show file tree
Hide file tree
Showing 2 changed files with 71 additions and 6 deletions.
30 changes: 30 additions & 0 deletions compress.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
package main

import (
"bytes"
"compress/flate"
"fmt"
"io"
)

// DEFLATE (customized)

// deflateCommonJSONDict is a custom dictionary for the DEFLATE algorithm based on samples of our JSON save data format.
// This reduces the size of the compressed data (in some cases, significantly) because tokens can be referenced in this dictionary string rather than be included in the output save files.
// DO NOT CHANGE THIS VALUE WITHOUT MIGRATING SAVED DATA PROPERLY! It will break the ability to decompress older save data.
const deflateCommonJSONDict = `null` +
`{"PaillierSK":{"N":6922045424785223,"LambdaN":4363699717840427,"PhiN":1145683160139719},"NTildei":8522668679230366,"H1i":431112616415448,"H2i":2218581434585855,"Alpha":1644458411253359,"Beta":2055026955915508,"P":1241053165406178,"Q":1516049695813965,"Xi":8108379843691545,"ShareID":332537562,"Ks":[8215999875339097],"NTildej":[8884582175310771],"H1j":[4444713407350296],"H2j":[7785566466619086,3388458350150109],"BigXj":[{"Curve":"secp256k1","Coords":[1159753063359249,8401050585979724]},{"Curve":"secp256k1","Coords":[4204142946914243,1580053746046931]}],"PaillierPKs":[{"N":6991977320107385},{"N":1990415854994626}],"ECDSAPub":{"Curve":"secp256k1","Coords":[4388167466892256,5461155207642833]}}` +
`{"Xi":3754872620939198,"ShareID":1643074317,"Ks":[2807299711782590,4735268842394955],"BigXj":[{"Curve":"ed25519","Coords":[5485415139763324,743952773955764]},{"Curve":"ed25519","Coords":[8068345193554698,8977361460270075]}],"EDDSAPub":{"Curve":"ed25519","Coords":[8317261857323617,796509558082006]}}` +
`secp256k1` + `nist256p1` + `ed25519` + `P384` + `P521` +
`Anomalous` + `M-221` + `E-222` + `M-511` + `E-521` + `NIST P-224` + `Curve1174` + `curve25519` + `BN(2,254)` + `brainpoolP256t1` + `ANSSI` + `FRP256v1` + `NIST P-256` + `E-382` + `M-383` + `Curve383187` + `brainpoolP384t1` + `NIST P-384` + `Curve41417` + `Ed448-Goldilocks` +
`LocalSecrets` + `LocalPreParams`

// inflateSaveDataJSON decompresses TSS save data in JSON format using the DEFLATE algorithm using a custom dictionary.
func inflateSaveDataJSON(compressed []byte) ([]byte, error) {
reader := flate.NewReaderDict(bytes.NewReader(compressed), []byte(deflateCommonJSONDict))
decompressed, err := io.ReadAll(reader)
if err != nil {
return nil, fmt.Errorf("failed to read from flate reader: %v", err)
}
return decompressed, reader.Close()
}
47 changes: 41 additions & 6 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,6 @@ import (
"golang.org/x/crypto/sha3"
)

const (
WORDS = 24
)

type (
SavedData struct {
Vaults map[string]CipheredVaultMap `json:"vaults"`
Expand Down Expand Up @@ -60,6 +56,11 @@ type (
VaultAllShares map[string][]*keygen.LocalPartySaveData
)

const (
WORDS = 24
v2MagicPrefix = "_V2_"
)

func main() {
reader := bufio.NewReader(os.Stdin)

Expand Down Expand Up @@ -232,12 +233,46 @@ func main() {
vaultAllShares[vID] = make([]*keygen.LocalPartySaveData, 0, len(clearVaults[vID].Shares))
}
shareDatas := make([]*keygen.LocalPartySaveData, len(clearVaults[vID].Shares))
for i, strShare := range clearVaults[vID].Shares {
for j, strShare := range clearVaults[vID].Shares {
// handle compressed "V2" format (ECDSA)
if strings.HasPrefix(strShare, v2MagicPrefix) {
strShare = strings.TrimPrefix(strShare, v2MagicPrefix)
expShareID, b64Part, found := strings.Cut(strShare, "_")
if !found {
panic("failed to split on share ID delim in V2 save data")
}
deflated, err2 := base64.StdEncoding.DecodeString(b64Part)
if err2 != nil {
panic(errors2.Wrapf(err, "failed to decode base64 part of V2 save data"))
}
inflated, err2 := inflateSaveDataJSON(deflated)
// shareID integrity check
abridgedData := new(struct {
ShareID *big.Int `json:"shareID"`
})
if err2 = json.Unmarshal(inflated, abridgedData); err2 != nil {
panic(errors2.Wrapf(err, "invalid data format - is this an old backup file? (code: 4)"))
}
if abridgedData.ShareID.String() != expShareID {
panic(fmt.Sprintf("share ID mismatch in V2 save data with ShareID %s", abridgedData.ShareID))
}
strShare = string(inflated)

// log deflated vs inflated sizes in KB
fmt.Printf("Processing share %s.\t %.1f KB → %.1f KB\n",
abridgedData.ShareID, float64(len(deflated))/1024, float64(len(inflated))/1024)
}
// proceed with regular json unmarshal
shareData := new(keygen.LocalPartySaveData)
if err = json.Unmarshal([]byte(strShare), shareData); err != nil {
panic(errors2.Wrapf(err, "invalid data format - is this an old backup file? (code: 4)"))
}
shareDatas[i] = shareData
// log out a variation of this line if the share is legacy
if !strings.HasPrefix(strShare, v2MagicPrefix) {
fmt.Printf("Processing share %s.\t %.1f KB\n",
shareData.ShareID, float64(len(strShare))/1024)
}
shareDatas[j] = shareData
}
vaultAllShares[vID] = append(vaultAllShares[vID], shareDatas...)
}
Expand Down

0 comments on commit 4da7e2c

Please sign in to comment.