From 01dd5c7f7b285284c3c1ef8fe62ba60a6f43ba88 Mon Sep 17 00:00:00 2001 From: Giuseppe Bertone Date: Thu, 17 Sep 2020 10:23:56 +0200 Subject: [PATCH] cmd/geth: added counters to the geth inspect report (#21495) * database: added counters * Improved stats for ancient db * Small improvement * Better message and added percentage while counting receipts * Fast counting for receipts * added info message * Show both receips itemscount from ancient db and counted receipts * Fixed default case * Removed counter for receipts in ancient store * Removed counting of receipts present in leveldb --- core/rawdb/database.go | 190 ++++++++++++++++++++++++----------------- 1 file changed, 114 insertions(+), 76 deletions(-) diff --git a/core/rawdb/database.go b/core/rawdb/database.go index 316b5addf3c4..b1ac3e95878f 100644 --- a/core/rawdb/database.go +++ b/core/rawdb/database.go @@ -238,6 +238,36 @@ func NewLevelDBDatabaseWithFreezer(file string, cache int, handles int, freezer return frdb, nil } +type counter uint64 + +func (c counter) String() string { + return fmt.Sprintf("%d", c) +} + +func (c counter) Percentage(current uint64) string { + return fmt.Sprintf("%d", current*100/uint64(c)) +} + +// stat stores sizes and count for a parameter +type stat struct { + size common.StorageSize + count counter +} + +// Add size to the stat and increase the counter by 1 +func (s *stat) Add(size common.StorageSize) { + s.size += size + s.count++ +} + +func (s *stat) Size() string { + return s.size.String() +} + +func (s *stat) Count() string { + return s.count.String() +} + // InspectDatabase traverses the entire database and checks the size // of all different categories of data. func InspectDatabase(db ethdb.Database) error { @@ -250,36 +280,38 @@ func InspectDatabase(db ethdb.Database) error { logged = time.Now() // Key-value store statistics - total common.StorageSize - headerSize common.StorageSize - bodySize common.StorageSize - receiptSize common.StorageSize - tdSize common.StorageSize - numHashPairing common.StorageSize - hashNumPairing common.StorageSize - trieSize common.StorageSize - codeSize common.StorageSize - txlookupSize common.StorageSize - accountSnapSize common.StorageSize - storageSnapSize common.StorageSize - preimageSize common.StorageSize - bloomBitsSize common.StorageSize - cliqueSnapsSize common.StorageSize + headers stat + bodies stat + receipts stat + tds stat + numHashPairings stat + hashNumPairings stat + tries stat + codes stat + txLookups stat + accountSnaps stat + storageSnaps stat + preimages stat + bloomBits stat + cliqueSnaps stat // Ancient store statistics - ancientHeaders common.StorageSize - ancientBodies common.StorageSize - ancientReceipts common.StorageSize - ancientHashes common.StorageSize - ancientTds common.StorageSize + ancientHeadersSize common.StorageSize + ancientBodiesSize common.StorageSize + ancientReceiptsSize common.StorageSize + ancientTdsSize common.StorageSize + ancientHashesSize common.StorageSize // Les statistic - chtTrieNodes common.StorageSize - bloomTrieNodes common.StorageSize + chtTrieNodes stat + bloomTrieNodes stat // Meta- and unaccounted data - metadata common.StorageSize - unaccounted common.StorageSize + metadata stat + unaccounted stat + + // Totals + total common.StorageSize ) // Inspect key-value database first. for it.Next() { @@ -289,98 +321,104 @@ func InspectDatabase(db ethdb.Database) error { ) total += size switch { - case bytes.HasPrefix(key, headerPrefix) && bytes.HasSuffix(key, headerTDSuffix): - tdSize += size - case bytes.HasPrefix(key, headerPrefix) && bytes.HasSuffix(key, headerHashSuffix): - numHashPairing += size case bytes.HasPrefix(key, headerPrefix) && len(key) == (len(headerPrefix)+8+common.HashLength): - headerSize += size - case bytes.HasPrefix(key, headerNumberPrefix) && len(key) == (len(headerNumberPrefix)+common.HashLength): - hashNumPairing += size + headers.Add(size) case bytes.HasPrefix(key, blockBodyPrefix) && len(key) == (len(blockBodyPrefix)+8+common.HashLength): - bodySize += size + bodies.Add(size) case bytes.HasPrefix(key, blockReceiptsPrefix) && len(key) == (len(blockReceiptsPrefix)+8+common.HashLength): - receiptSize += size + receipts.Add(size) + case bytes.HasPrefix(key, headerPrefix) && bytes.HasSuffix(key, headerTDSuffix): + tds.Add(size) + case bytes.HasPrefix(key, headerPrefix) && bytes.HasSuffix(key, headerHashSuffix): + numHashPairings.Add(size) + case bytes.HasPrefix(key, headerNumberPrefix) && len(key) == (len(headerNumberPrefix)+common.HashLength): + hashNumPairings.Add(size) + case len(key) == common.HashLength: + tries.Add(size) + case bytes.HasPrefix(key, codePrefix) && len(key) == len(codePrefix)+common.HashLength: + codes.Add(size) case bytes.HasPrefix(key, txLookupPrefix) && len(key) == (len(txLookupPrefix)+common.HashLength): - txlookupSize += size + txLookups.Add(size) case bytes.HasPrefix(key, SnapshotAccountPrefix) && len(key) == (len(SnapshotAccountPrefix)+common.HashLength): - accountSnapSize += size + accountSnaps.Add(size) case bytes.HasPrefix(key, SnapshotStoragePrefix) && len(key) == (len(SnapshotStoragePrefix)+2*common.HashLength): - storageSnapSize += size + storageSnaps.Add(size) case bytes.HasPrefix(key, preimagePrefix) && len(key) == (len(preimagePrefix)+common.HashLength): - preimageSize += size + preimages.Add(size) case bytes.HasPrefix(key, bloomBitsPrefix) && len(key) == (len(bloomBitsPrefix)+10+common.HashLength): - bloomBitsSize += size + bloomBits.Add(size) case bytes.HasPrefix(key, []byte("clique-")) && len(key) == 7+common.HashLength: - cliqueSnapsSize += size + cliqueSnaps.Add(size) case bytes.HasPrefix(key, []byte("cht-")) && len(key) == 4+common.HashLength: - chtTrieNodes += size + chtTrieNodes.Add(size) case bytes.HasPrefix(key, []byte("blt-")) && len(key) == 4+common.HashLength: - bloomTrieNodes += size - case bytes.HasPrefix(key, codePrefix) && len(key) == len(codePrefix)+common.HashLength: - codeSize += size - case len(key) == common.HashLength: - trieSize += size + bloomTrieNodes.Add(size) default: var accounted bool for _, meta := range [][]byte{databaseVerisionKey, headHeaderKey, headBlockKey, headFastBlockKey, fastTrieProgressKey} { if bytes.Equal(key, meta) { - metadata += size + metadata.Add(size) accounted = true break } } if !accounted { - unaccounted += size + unaccounted.Add(size) } } - count += 1 + count++ if count%1000 == 0 && time.Since(logged) > 8*time.Second { log.Info("Inspecting database", "count", count, "elapsed", common.PrettyDuration(time.Since(start))) logged = time.Now() } } // Inspect append-only file store then. - ancients := []*common.StorageSize{&ancientHeaders, &ancientBodies, &ancientReceipts, &ancientHashes, &ancientTds} + ancientSizes := []*common.StorageSize{&ancientHeadersSize, &ancientBodiesSize, &ancientReceiptsSize, &ancientHashesSize, &ancientTdsSize} for i, category := range []string{freezerHeaderTable, freezerBodiesTable, freezerReceiptTable, freezerHashTable, freezerDifficultyTable} { if size, err := db.AncientSize(category); err == nil { - *ancients[i] += common.StorageSize(size) + *ancientSizes[i] += common.StorageSize(size) total += common.StorageSize(size) } } + // Get number of ancient rows inside the freezer + ancients := counter(0) + if count, err := db.Ancients(); err == nil { + ancients = counter(count) + } // Display the database statistic. stats := [][]string{ - {"Key-Value store", "Headers", headerSize.String()}, - {"Key-Value store", "Bodies", bodySize.String()}, - {"Key-Value store", "Receipts", receiptSize.String()}, - {"Key-Value store", "Difficulties", tdSize.String()}, - {"Key-Value store", "Block number->hash", numHashPairing.String()}, - {"Key-Value store", "Block hash->number", hashNumPairing.String()}, - {"Key-Value store", "Transaction index", txlookupSize.String()}, - {"Key-Value store", "Bloombit index", bloomBitsSize.String()}, - {"Key-Value store", "Contract codes", codeSize.String()}, - {"Key-Value store", "Trie nodes", trieSize.String()}, - {"Key-Value store", "Trie preimages", preimageSize.String()}, - {"Key-Value store", "Account snapshot", accountSnapSize.String()}, - {"Key-Value store", "Storage snapshot", storageSnapSize.String()}, - {"Key-Value store", "Clique snapshots", cliqueSnapsSize.String()}, - {"Key-Value store", "Singleton metadata", metadata.String()}, - {"Ancient store", "Headers", ancientHeaders.String()}, - {"Ancient store", "Bodies", ancientBodies.String()}, - {"Ancient store", "Receipts", ancientReceipts.String()}, - {"Ancient store", "Difficulties", ancientTds.String()}, - {"Ancient store", "Block number->hash", ancientHashes.String()}, - {"Light client", "CHT trie nodes", chtTrieNodes.String()}, - {"Light client", "Bloom trie nodes", bloomTrieNodes.String()}, + {"Key-Value store", "Headers", headers.Size(), headers.Count()}, + {"Key-Value store", "Bodies", bodies.Size(), bodies.Count()}, + {"Key-Value store", "Receipt lists", receipts.Size(), receipts.Count()}, + {"Key-Value store", "Difficulties", tds.Size(), tds.Count()}, + {"Key-Value store", "Block number->hash", numHashPairings.Size(), numHashPairings.Count()}, + {"Key-Value store", "Block hash->number", hashNumPairings.Size(), hashNumPairings.Count()}, + {"Key-Value store", "Transaction index", txLookups.Size(), txLookups.Count()}, + {"Key-Value store", "Bloombit index", bloomBits.Size(), bloomBits.Count()}, + {"Key-Value store", "Contract codes", codes.Size(), codes.Count()}, + {"Key-Value store", "Trie nodes", tries.Size(), tries.Count()}, + {"Key-Value store", "Trie preimages", preimages.Size(), preimages.Count()}, + {"Key-Value store", "Account snapshot", accountSnaps.Size(), accountSnaps.Count()}, + {"Key-Value store", "Storage snapshot", storageSnaps.Size(), storageSnaps.Count()}, + {"Key-Value store", "Clique snapshots", cliqueSnaps.Size(), cliqueSnaps.Count()}, + {"Key-Value store", "Singleton metadata", metadata.Size(), metadata.Count()}, + {"Ancient store", "Headers", ancientHeadersSize.String(), ancients.String()}, + {"Ancient store", "Bodies", ancientBodiesSize.String(), ancients.String()}, + {"Ancient store", "Receipt lists", ancientReceiptsSize.String(), ancients.String()}, + {"Ancient store", "Difficulties", ancientTdsSize.String(), ancients.String()}, + {"Ancient store", "Block number->hash", ancientHashesSize.String(), ancients.String()}, + {"Light client", "CHT trie nodes", chtTrieNodes.Size(), chtTrieNodes.Count()}, + {"Light client", "Bloom trie nodes", bloomTrieNodes.Size(), bloomTrieNodes.Count()}, } table := tablewriter.NewWriter(os.Stdout) - table.SetHeader([]string{"Database", "Category", "Size"}) - table.SetFooter([]string{"", "Total", total.String()}) + table.SetHeader([]string{"Database", "Category", "Size", "Items"}) + table.SetFooter([]string{"", "Total", total.String(), " "}) table.AppendBulk(stats) table.Render() - if unaccounted > 0 { - log.Error("Database contains unaccounted data", "size", unaccounted) + if unaccounted.size > 0 { + log.Error("Database contains unaccounted data", "size", unaccounted.size, "count", unaccounted.count) } + return nil }