Skip to content
This repository has been archived by the owner on Oct 18, 2023. It is now read-only.

Commit

Permalink
change bucket type to string (erigontech#894)
Browse files Browse the repository at this point in the history
  • Loading branch information
AskAlexSharov authored Aug 10, 2020
1 parent 7a1b892 commit d9d9e14
Show file tree
Hide file tree
Showing 59 changed files with 289 additions and 797 deletions.
10 changes: 5 additions & 5 deletions cmd/hack/hack.go
Original file line number Diff line number Diff line change
Expand Up @@ -314,7 +314,7 @@ func accountSavings(db *bolt.DB) (int, int) {
emptyRoots := 0
emptyCodes := 0
db.View(func(tx *bolt.Tx) error {
b := tx.Bucket(dbutils.CurrentStateBucket)
b := tx.Bucket([]byte(dbutils.CurrentStateBucket))
c := b.Cursor()
for k, v := c.First(); k != nil; k, v = c.Next() {
if len(k) != 32 {
Expand Down Expand Up @@ -350,7 +350,7 @@ func bucketStats(chaindata string) {
fmt.Printf(",BranchPageN,BranchOverflowN,LeafPageN,LeafOverflowN,KeyN,Depth,BranchAlloc,BranchInuse,LeafAlloc,LeafInuse,BucketN,InlineBucketN,InlineBucketInuse\n")
_ = db.View(func(tx *bolt.Tx) error {
for _, bucket := range bucketList {
b := tx.Bucket(bucket)
b := tx.Bucket([]byte(bucket))
bs := b.Stats()
fmt.Printf("%s,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d\n", string(bucket),
bs.BranchPageN, bs.BranchOverflowN, bs.LeafPageN, bs.LeafOverflowN, bs.KeyN, bs.Depth, bs.BranchAlloc, bs.BranchInuse,
Expand Down Expand Up @@ -775,7 +775,7 @@ func testStartup() {
fmt.Printf("Took %v\n", time.Since(startTime))
}

func dbSlice(chaindata string, bucket []byte, prefix []byte) {
func dbSlice(chaindata string, bucket string, prefix []byte) {
db := ethdb.MustOpen(chaindata)
defer db.Close()
if err := db.KV().View(context.Background(), func(tx ethdb.Tx) error {
Expand Down Expand Up @@ -1019,7 +1019,7 @@ func printBranches(block uint64) {
var hashes []common.Hash
numberEnc := make([]byte, 8)
binary.BigEndian.PutUint64(numberEnc, block)
if err := ethDb.Walk([]byte("h"), numberEnc, 8*8, func(k, v []byte) (bool, error) {
if err := ethDb.Walk("h", numberEnc, 8*8, func(k, v []byte) (bool, error) {
if len(k) == 8+32 {
hashes = append(hashes, common.BytesToHash(k[8:]))
}
Expand Down Expand Up @@ -2122,7 +2122,7 @@ func main() {
getModifiedAccounts(*chaindata)
}
if *action == "slice" {
dbSlice(*chaindata, []byte(*bucket), common.FromHex(*hash))
dbSlice(*chaindata, *bucket, common.FromHex(*hash))
}
if *action == "resetState" {
resetState(*chaindata)
Expand Down
4 changes: 2 additions & 2 deletions cmd/integration/commands/refetence_db.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import (
"github.com/spf13/cobra"
)

var stateBuckets = [][]byte{
var stateBuckets = []string{
dbutils.CurrentStateBucket,
dbutils.AccountChangeSetBucket,
dbutils.StorageChangeSetBucket,
Expand Down Expand Up @@ -111,7 +111,7 @@ func compareBucketBetweenDatabases(ctx context.Context, chaindata string, refere

if err := db.KV().View(context.Background(), func(tx ethdb.Tx) error {
return refDB.KV().View(context.Background(), func(refTX ethdb.Tx) error {
return compareBuckets(ctx, tx.Bucket([]byte(bucket)), refTX.Bucket([]byte(bucket)))
return compareBuckets(ctx, tx.Bucket(bucket), refTX.Bucket(bucket))
})
}); err != nil {
return err
Expand Down
13 changes: 7 additions & 6 deletions cmd/pics/state.go
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,7 @@ func stateDatabaseComparison(first ethdb.KV, second ethdb.KV, number int) error
if err = second.View(context.Background(), func(readTx ethdb.Tx) error {
return first.View(context.Background(), func(firstTx ethdb.Tx) error {
for _, bucketName := range dbutils.Buckets {
bucketName := bucketName
b := readTx.Bucket(bucketName)
firstB := firstTx.Bucket(bucketName)
if err2 := b.Cursor().Walk(func(k, v []byte) (bool, error) {
Expand All @@ -184,19 +185,19 @@ func stateDatabaseComparison(first ethdb.KV, second ethdb.KV, number int) error
key := keyKeyBytes.ToHex()
var f1 *os.File
var ok bool
if f1, ok = perBucketFiles[string(bucketName)]; !ok {
if f1, ok = perBucketFiles[bucketName]; !ok {
f1, err = os.Create(fmt.Sprintf("changes_%d_%s_%d.dot", number, bucketName, len(perBucketFiles)))
if err != nil {
return false, err
}
visual.StartGraph(f1, true)
var clusterLabel string
var ok bool
if clusterLabel, ok = bucketLabels[string(bucketName)]; !ok {
clusterLabel = string(bucketName)
if clusterLabel, ok = bucketLabels[bucketName]; !ok {
clusterLabel = bucketName
}
visual.StartCluster(f1, 0, clusterLabel)
perBucketFiles[string(common.CopyBytes(bucketName))] = f1
perBucketFiles[bucketName] = f1
}
visual.Horizontal(f1, key, len(key), fmt.Sprintf("k_%d", i), visual.HexIndexColors, visual.HexFontColors, 0)
if len(val) > 0 {
Expand All @@ -222,9 +223,9 @@ func stateDatabaseComparison(first ethdb.KV, second ethdb.KV, number int) error
} else {
noValues[i] = struct{}{}
}
lst := m[string(bucketName)]
lst := m[bucketName]
lst = append(lst, i)
m[string(common.CopyBytes(bucketName))] = lst
m[bucketName] = lst
i++
return true, nil
}); err2 != nil {
Expand Down
2 changes: 1 addition & 1 deletion cmd/state/commands/check_index.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,6 @@ var checkIndexCMD = &cobra.Command{
Use: "checkIndex",
Short: "Index checker",
RunE: func(cmd *cobra.Command, args []string) error {
return verify.CheckIndex(chaindata, []byte(changeSetBucket), []byte(indexBucket))
return verify.CheckIndex(chaindata, changeSetBucket, indexBucket)
},
}
2 changes: 1 addition & 1 deletion cmd/state/commands/index_stats.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,6 @@ var indexStatsCmd = &cobra.Command{
if statsfile == "stateless.csv" {
statsfile = ""
}
return stats.IndexStats(chaindata, []byte(indexBucket), statsfile)
return stats.IndexStats(chaindata, indexBucket, statsfile)
},
}
2 changes: 1 addition & 1 deletion cmd/state/commands/regenerate_index.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,6 @@ var regenerateIndexCmd = &cobra.Command{
Use: "regenerateIndex",
Short: "Generate index for accounts/storage based on changesets",
RunE: func(cmd *cobra.Command, args []string) error {
return generate.RegenerateIndex(chaindata, []byte(changeSetBucket))
return generate.RegenerateIndex(chaindata, changeSetBucket)
},
}
4 changes: 2 additions & 2 deletions cmd/state/generate/regenerate_index.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import (
"github.com/ledgerwatch/turbo-geth/log"
)

func RegenerateIndex(chaindata string, csBucket []byte) error {
func RegenerateIndex(chaindata string, csBucket string) error {
db := ethdb.MustOpen(chaindata)
ch := make(chan os.Signal, 1)
quitCh := make(chan struct{})
Expand All @@ -31,7 +31,7 @@ func RegenerateIndex(chaindata string, csBucket []byte) error {
}

ig := core.NewIndexGenerator(db, quitCh)
cs, ok := changeset.Mapper[string(csBucket)]
cs, ok := changeset.Mapper[csBucket]
if !ok {
return errors.New("unknown changeset")
}
Expand Down
4 changes: 2 additions & 2 deletions cmd/state/stateless/state.go
Original file line number Diff line number Diff line change
Expand Up @@ -1719,7 +1719,7 @@ func oldStorage() {
itemsByAddress := make(map[common.Address]int)
count := 0
err = db.View(func(tx *bolt.Tx) error {
b := tx.Bucket(dbutils.CurrentStateBucket)
b := tx.Bucket([]byte(dbutils.CurrentStateBucket))
if b == nil {
return nil
}
Expand All @@ -1739,7 +1739,7 @@ func oldStorage() {
})
check(err)
err = db.View(func(tx *bolt.Tx) error {
b := tx.Bucket(dbutils.AccountsHistoryBucket)
b := tx.Bucket([]byte(dbutils.AccountsHistoryBucket))
if b == nil {
return nil
}
Expand Down
10 changes: 5 additions & 5 deletions cmd/state/stateless/state_snapshot.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import (

type bucketWriter struct {
db ethdb.Database
bucket []byte
bucket string
pending ethdb.DbWithPendingMutations
written uint64
}
Expand Down Expand Up @@ -69,7 +69,7 @@ func (bw *bucketWriter) commit() error {
return nil
}

func newBucketWriter(db ethdb.Database, bucket []byte) *bucketWriter {
func newBucketWriter(db ethdb.Database, bucket string) *bucketWriter {
return &bucketWriter{
db: db,
bucket: bucket,
Expand All @@ -79,7 +79,7 @@ func newBucketWriter(db ethdb.Database, bucket []byte) *bucketWriter {
}

func copyDatabase(fromDB ethdb.Database, toDB ethdb.Database) error {
for _, bucket := range [][]byte{dbutils.CurrentStateBucket, dbutils.CodeBucket, dbutils.DatabaseInfoBucket} {
for _, bucket := range []string{dbutils.CurrentStateBucket, dbutils.CodeBucket, dbutils.DatabaseInfoBucket} {
fmt.Printf(" - copying bucket '%s'...\n", string(bucket))
writer := newBucketWriter(toDB, bucket)

Expand Down Expand Up @@ -124,8 +124,8 @@ func loadSnapshot(db ethdb.Database, filename string, createDb CreateDbFunc) {
func loadCodes(db *bolt.DB, codeDb ethdb.Database) error {
var account accounts.Account
err := db.Update(func(tx *bolt.Tx) error {
b := tx.Bucket(dbutils.CurrentStateBucket)
cb, err := tx.CreateBucket(dbutils.CodeBucket, true)
b := tx.Bucket([]byte(dbutils.CurrentStateBucket))
cb, err := tx.CreateBucket([]byte(dbutils.CodeBucket), true)
if err != nil {
return err
}
Expand Down
8 changes: 4 additions & 4 deletions cmd/state/stateless/state_snapshot_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ func generateData(prefix string) testData {
return testData(data)
}

func writeDataToDb(t *testing.T, db ethdb.Database, bucket []byte, data testData) {
func writeDataToDb(t *testing.T, db ethdb.Database, bucket string, data testData) {
for k, v := range data {
err := db.Put(bucket, []byte(k), v)
if err != nil {
Expand All @@ -29,7 +29,7 @@ func writeDataToDb(t *testing.T, db ethdb.Database, bucket []byte, data testData
}
}

func checkDataInDb(t *testing.T, db ethdb.Database, bucket []byte, data testData) {
func checkDataInDb(t *testing.T, db ethdb.Database, bucket string, data testData) {
for k, v := range data {
val, err := db.Get(bucket, []byte(k))
if err != nil {
Expand Down Expand Up @@ -101,7 +101,7 @@ func doTestcase(t *testing.T, testCase map[string]testData) {
defer destDb.Close()

for bucket, data := range testCase {
writeDataToDb(t, sourceDb, []byte(bucket), data)
writeDataToDb(t, sourceDb, bucket, data)
}

err := copyDatabase(sourceDb, destDb)
Expand All @@ -111,6 +111,6 @@ func doTestcase(t *testing.T, testCase map[string]testData) {
}

for bucket, data := range testCase {
checkDataInDb(t, destDb, []byte(bucket), data)
checkDataInDb(t, destDb, bucket, data)
}
}
2 changes: 1 addition & 1 deletion cmd/state/stateless/witness_db.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ import (
)

var (
witnessesBucket = []byte("witnesses")
witnessesBucket = "witnesses"
)

type WitnessDBWriter struct {
Expand Down
5 changes: 3 additions & 2 deletions cmd/state/stats/index_stats.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,18 +8,19 @@ import (
"os"
"sort"
"strconv"
"strings"
"time"

"github.com/ledgerwatch/turbo-geth/common"
"github.com/ledgerwatch/turbo-geth/common/dbutils"
"github.com/ledgerwatch/turbo-geth/ethdb"
)

func IndexStats(chaindata string, indexBucket []byte, statsFile string) error {
func IndexStats(chaindata string, indexBucket string, statsFile string) error {
db := ethdb.MustOpen(chaindata)
startTime := time.Now()
lenOfKey := common.HashLength
if bytes.HasPrefix(indexBucket, dbutils.StorageHistoryBucket) {
if strings.HasPrefix(indexBucket, dbutils.StorageHistoryBucket) {
lenOfKey = common.HashLength*2 + common.IncarnationLength
}

Expand Down
7 changes: 3 additions & 4 deletions cmd/state/verify/check_indexes.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package verify

import (
"bytes"
"fmt"
"time"

Expand All @@ -11,18 +10,18 @@ import (
"github.com/ledgerwatch/turbo-geth/ethdb"
)

func CheckIndex(chaindata string, changeSetBucket []byte, indexBucket []byte) error {
func CheckIndex(chaindata string, changeSetBucket string, indexBucket string) error {
db := ethdb.MustOpen(chaindata)
startTime := time.Now()

var walker func([]byte) changeset.Walker
if bytes.Equal(dbutils.AccountChangeSetBucket, changeSetBucket) {
if dbutils.AccountChangeSetBucket == changeSetBucket {
walker = func(cs []byte) changeset.Walker {
return changeset.AccountChangeSetBytes(cs)
}
}

if bytes.Equal(dbutils.StorageChangeSetBucket, changeSetBucket) {
if dbutils.StorageChangeSetBucket == changeSetBucket {
walker = func(cs []byte) changeset.Walker {
return changeset.StorageChangeSetBytes(cs)
}
Expand Down
10 changes: 5 additions & 5 deletions common/changeset/changeset.go
Original file line number Diff line number Diff line change
Expand Up @@ -109,14 +109,14 @@ func Len(b []byte) int {
}

var Mapper = map[string]struct {
IndexBucket []byte
IndexBucket string
WalkerAdapter func(v []byte) Walker
KeySize int
Template string
New func() *ChangeSet
Encode func(*ChangeSet) ([]byte, error)
}{
string(dbutils.AccountChangeSetBucket): {
dbutils.AccountChangeSetBucket: {
IndexBucket: dbutils.AccountsHistoryBucket,
WalkerAdapter: func(v []byte) Walker {
return AccountChangeSetBytes(v)
Expand All @@ -126,7 +126,7 @@ var Mapper = map[string]struct {
New: NewAccountChangeSet,
Encode: EncodeAccounts,
},
string(dbutils.StorageChangeSetBucket): {
dbutils.StorageChangeSetBucket: {
IndexBucket: dbutils.StorageHistoryBucket,
WalkerAdapter: func(v []byte) Walker {
return StorageChangeSetBytes(v)
Expand All @@ -136,7 +136,7 @@ var Mapper = map[string]struct {
New: NewStorageChangeSet,
Encode: EncodeStorage,
},
string(dbutils.PlainAccountChangeSetBucket): {
dbutils.PlainAccountChangeSetBucket: {
IndexBucket: dbutils.AccountsHistoryBucket,
WalkerAdapter: func(v []byte) Walker {
return AccountChangeSetPlainBytes(v)
Expand All @@ -146,7 +146,7 @@ var Mapper = map[string]struct {
New: NewAccountChangeSetPlain,
Encode: EncodeAccountsPlain,
},
string(dbutils.PlainStorageChangeSetBucket): {
dbutils.PlainStorageChangeSetBucket: {
IndexBucket: dbutils.StorageHistoryBucket,
WalkerAdapter: func(v []byte) Walker {
return StorageChangeSetPlainBytes(v)
Expand Down
Loading

0 comments on commit d9d9e14

Please sign in to comment.