From edfee91d0c0a05add3a376c1a876c3b87096da6f Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Thu, 14 Apr 2022 11:13:51 +0300 Subject: [PATCH 01/69] implemented count check --- .../clusters-checker/cmd/checker/config.toml | 11 ++ tools/clusters-checker/cmd/checker/main.go | 108 ++++++++++++ tools/clusters-checker/go.mod | 21 +++ tools/clusters-checker/go.sum | 122 +++++++++++++ tools/clusters-checker/pkg/checkers/count.go | 50 ++++++ .../clusters-checker/pkg/checkers/factory.go | 34 ++++ .../pkg/checkers/interface.go | 11 ++ .../clusters-checker/pkg/checkers/process.go | 30 ++++ tools/clusters-checker/pkg/client/client.go | 165 ++++++++++++++++++ tools/clusters-checker/pkg/client/count.go | 29 +++ tools/clusters-checker/pkg/config/config.go | 17 ++ 11 files changed, 598 insertions(+) create mode 100644 tools/clusters-checker/cmd/checker/config.toml create mode 100644 tools/clusters-checker/cmd/checker/main.go create mode 100644 tools/clusters-checker/go.mod create mode 100644 tools/clusters-checker/go.sum create mode 100644 tools/clusters-checker/pkg/checkers/count.go create mode 100644 tools/clusters-checker/pkg/checkers/factory.go create mode 100644 tools/clusters-checker/pkg/checkers/interface.go create mode 100644 tools/clusters-checker/pkg/checkers/process.go create mode 100644 tools/clusters-checker/pkg/client/client.go create mode 100644 tools/clusters-checker/pkg/client/count.go create mode 100644 tools/clusters-checker/pkg/config/config.go diff --git a/tools/clusters-checker/cmd/checker/config.toml b/tools/clusters-checker/cmd/checker/config.toml new file mode 100644 index 00000000..a5bbc929 --- /dev/null +++ b/tools/clusters-checker/cmd/checker/config.toml @@ -0,0 +1,11 @@ +[config] + [source-cluster] + url = "https://index.elrond.com" + user = "" + password = "" + [destination-cluster] + url = "https://index.elrond.com" + user = "" + password = "" + [compare] + indices = ["rating", "transactions", "blocks", "validators", "miniblocks", "rounds", "accounts", "accountshistory", "receipts", "scresults", "accountsesdt", "accountsesdthistory", "epochinfo", "scdeploys", "tokens", "tags", "logs", "delegators"] diff --git a/tools/clusters-checker/cmd/checker/main.go b/tools/clusters-checker/cmd/checker/main.go new file mode 100644 index 00000000..6945e6f0 --- /dev/null +++ b/tools/clusters-checker/cmd/checker/main.go @@ -0,0 +1,108 @@ +package main + +import ( + "github.com/ElrondNetwork/elastic-indexer-go/tools/clusters-checker/pkg/checkers" + "io/ioutil" + "os" + "path" + + "github.com/ElrondNetwork/elastic-indexer-go/tools/clusters-checker/pkg/config" + logger "github.com/ElrondNetwork/elrond-go-logger" + "github.com/pelletier/go-toml" + "github.com/urfave/cli" +) + +const configFileName = "config.toml" + +var ( + log = logger.GetOrCreate("main") + + // defines the path to the config folder + configPath = cli.StringFlag{ + Name: "config-path", + Usage: "The path to the config folder", + Value: "./", + } +) + +const helpTemplate = `NAME: + {{.Name}} - {{.Usage}} +USAGE: + {{.HelpName}} {{if .VisibleFlags}}[global options]{{end}} + {{if len .Authors}} +AUTHOR: + {{range .Authors}}{{ . }}{{end}} + {{end}}{{if .Commands}} +GLOBAL OPTIONS: + {{range .VisibleFlags}}{{.}} + {{end}} +VERSION: + {{.Version}} + {{end}} +` + +func main() { + app := cli.NewApp() + cli.AppHelpTemplate = helpTemplate + app.Name = "Cluster checker" + app.Version = "v1.0.0" + app.Usage = "Cluster checker" + app.Flags = []cli.Flag{ + configPath, + } + app.Authors = []cli.Author{ + { + Name: "The Elrond Team", + Email: "contact@elrond.com", + }, + } + + _ = logger.SetLogLevel("*:DEBUG") + + app.Action = checkClusters + + err := app.Run(os.Args) + if err != nil { + log.Error(err.Error()) + os.Exit(1) + } + +} + +func checkClusters(ctx *cli.Context) { + cfgPath := ctx.String(configPath.Name) + cfg, err := loadConfigFile(cfgPath) + if err != nil { + log.Error("cannot load config file", "error", err.Error()) + } + + clusterChecker, err := checkers.CreateClusterChecker(cfg) + if err != nil { + log.Error("cannot create cluster checker", "error", err.Error()) + } + + err = clusterChecker.CompareCounts() + if err != nil { + log.Error("cannot check counts", "error", err.Error()) + } + +} + +func loadConfigFile(pathStr string) (*config.Config, error) { + tomlBytes, err := loadBytesFromFile(path.Join(pathStr, configFileName)) + if err != nil { + return nil, err + } + + var cfg config.Config + err = toml.Unmarshal(tomlBytes, &cfg) + if err != nil { + return nil, err + } + + return &cfg, nil +} + +func loadBytesFromFile(file string) ([]byte, error) { + return ioutil.ReadFile(file) +} diff --git a/tools/clusters-checker/go.mod b/tools/clusters-checker/go.mod new file mode 100644 index 00000000..9e439f0a --- /dev/null +++ b/tools/clusters-checker/go.mod @@ -0,0 +1,21 @@ +module github.com/ElrondNetwork/elastic-indexer-go/tools/clusters-checker + +go 1.17 + +require ( + github.com/ElrondNetwork/elrond-go-logger v1.0.7 + github.com/elastic/go-elasticsearch/v7 v7.17.1 + github.com/pelletier/go-toml v1.9.3 + github.com/tidwall/gjson v1.14.0 + github.com/urfave/cli v1.22.5 +) + +require ( + github.com/ElrondNetwork/elrond-go-core v1.0.0 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d // indirect + github.com/gogo/protobuf v1.3.2 // indirect + github.com/russross/blackfriday/v2 v2.0.1 // indirect + github.com/shurcooL/sanitized_anchor_name v1.0.0 // indirect + github.com/tidwall/match v1.1.1 // indirect + github.com/tidwall/pretty v1.2.0 // indirect +) diff --git a/tools/clusters-checker/go.sum b/tools/clusters-checker/go.sum new file mode 100644 index 00000000..e5ec143c --- /dev/null +++ b/tools/clusters-checker/go.sum @@ -0,0 +1,122 @@ +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/ElrondNetwork/elrond-go-core v1.0.0 h1:jMgNCQAG2ncOoc+I7u8mQBxbMT/eCuFNSrX1YIKN5v8= +github.com/ElrondNetwork/elrond-go-core v1.0.0/go.mod h1:FQMem7fFF4+8pQ6lVsBZq6yO+smD0nV23P4bJpmPjTo= +github.com/ElrondNetwork/elrond-go-logger v1.0.4/go.mod h1:e5D+c97lKUfFdAzFX7rrI2Igl/z4Y0RkKYKWyzprTGk= +github.com/ElrondNetwork/elrond-go-logger v1.0.7 h1:Ldl1rVS0RGKc1IsW8jIaGCb6Zwei04gsMvyjL05X6mE= +github.com/ElrondNetwork/elrond-go-logger v1.0.7/go.mod h1:cBfgx0ST/CJx8jrxJSC5aiSrvkGzcnF7sK06RD8mFxQ= +github.com/ElrondNetwork/elrond-vm-common v1.1.0/go.mod h1:w3i6f8uiuRkE68Ie/gebRcLgTuHqvruJSYrFyZWuLrE= +github.com/StackExchange/wmi v0.0.0-20180116203802-5d049714c4a6/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg= +github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII= +github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ= +github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f/go.mod h1:TdznJufoqS23FtqVCzL0ZqgP5MqXbb4fg/WgDys70nA= +github.com/btcsuite/btcutil v0.0.0-20190425235716-9e5f4b9a998d/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg= +github.com/btcsuite/btcutil v1.0.2/go.mod h1:j9HUFwoQRsZL3V4n+qG+CUnEGHOarIxfC3Le2Yhbcts= +github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd/go.mod h1:HHNXQzUsZCxOoE+CPiyCTO6x34Zs86zZUiwtpXoGdtg= +github.com/btcsuite/goleveldb v0.0.0-20160330041536-7834afc9e8cd/go.mod h1:F+uVaaLLH7j4eDXPRvw78tMflu7Ie2bzYOH4Y8rRKBY= +github.com/btcsuite/snappy-go v0.0.0-20151229074030-0bdef8d06723/go.mod h1:8woku9dyThutzjeg+3xrA5iCpBRH8XEEg3lh6TiUghc= +github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792/go.mod h1:ghJtEyQwv5/p4Mg4C0fgbePVuGr935/5ddU9Z3TmDRY= +github.com/btcsuite/winsvc v1.0.0/go.mod h1:jsenWakMcC0zFBFurPLEAyrnc/teJEM1O46fmI40EZs= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d h1:U+s90UTSYgptZMwQh2aRr3LuazLJIa+Pg3Kc1ylSYVY= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/denisbrodbeck/machineid v1.0.1/go.mod h1:dJUwb7PTidGDeYyUBmXZ2GphQBbjJCrnectwCyxcUSI= +github.com/elastic/go-elasticsearch/v7 v7.17.1 h1:49mHcHx7lpCL8cW1aioEwSEVKQF3s+Igi4Ye/QTWwmk= +github.com/elastic/go-elasticsearch/v7 v7.17.1/go.mod h1:OJ4wdbtDNk5g503kvlHLyErCgQwwzmDtaFC4XyOxXA4= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/go-ole/go-ole v1.2.1/go.mod h1:7FAglXiTm7HKlQRDeOQ6ZNUHidzCWXuZWq/1dTyBNF8= +github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= +github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ= +github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4= +github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mr-tron/base58 v1.2.0/go.mod h1:BinMc/sQntlIE1frQmRFPUoPA1Zkr8VRgBdjWI2mNwc= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= +github.com/pelletier/go-toml v1.9.3 h1:zeC5b1GviRUyKYd6OJPvBU/mcVDVoL1OhT17FCt5dSQ= +github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/shirou/gopsutil v0.0.0-20190901111213-e4ec7b275ada/go.mod h1:WWnYX4lzhCH5h/3YBfyVA3VbLYjlMZZAQcW9ojMexNc= +github.com/shirou/w32 v0.0.0-20160930032740-bb4de0191aa4/go.mod h1:qsXQc7+bwAM3Q1u/4XEfrquwF8Lw7D7y5cD8CuHnfIc= +github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/tidwall/gjson v1.14.0 h1:6aeJ0bzojgWLa82gDQHcx3S0Lr/O51I9bJ5nv6JFx5w= +github.com/tidwall/gjson v1.14.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs= +github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/urfave/cli v1.22.5 h1:lNq9sAHXK2qfdI8W+GRItjCEkI+2oR4d+MEHy1CKXoU= +github.com/urfave/cli v1.22.5/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200115085410-6d4e4cb37c7d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/tools/clusters-checker/pkg/checkers/count.go b/tools/clusters-checker/pkg/checkers/count.go new file mode 100644 index 00000000..cfe8ac34 --- /dev/null +++ b/tools/clusters-checker/pkg/checkers/count.go @@ -0,0 +1,50 @@ +package checkers + +import "math" + +func (cc *clusterChecker) CompareCounts() error { + for _, index := range cc.indices { + err := cc.compareCount(index) + if err != nil { + return err + } + } + + return nil +} + +func (cc *clusterChecker) compareCount(index string) error { + countSourceCluster, err := cc.clientSource.DoCountRequest(index, nil) + if err != nil { + return err + } + + countDestinationCluster, err := cc.clientDestination.DoCountRequest(index, nil) + if err != nil { + return err + } + + difference := int64(countSourceCluster) - int64(countDestinationCluster) + + if difference == 0 { + log.Info("number of documents are the same", "index", index, + "source cluster", countSourceCluster, + "destination cluster", countDestinationCluster, + ) + } else if difference < 0 { + log.Info("number of documents", "index", index, + "source cluster", countSourceCluster, + "destination cluster", countDestinationCluster, + "in destination cluster are more elements, difference", math.Abs(float64(difference)), + ) + + } else { + log.Info("number of documents", "index", index, + "source cluster", countSourceCluster, + "destination cluster", countDestinationCluster, + "in source cluster are more elements, difference", math.Abs(float64(difference)), + ) + } + + return nil +} diff --git a/tools/clusters-checker/pkg/checkers/factory.go b/tools/clusters-checker/pkg/checkers/factory.go new file mode 100644 index 00000000..7fb53de6 --- /dev/null +++ b/tools/clusters-checker/pkg/checkers/factory.go @@ -0,0 +1,34 @@ +package checkers + +import ( + "fmt" + "github.com/ElrondNetwork/elastic-indexer-go/tools/clusters-checker/pkg/client" + "github.com/ElrondNetwork/elastic-indexer-go/tools/clusters-checker/pkg/config" + "github.com/elastic/go-elasticsearch/v7" +) + +func CreateClusterChecker(cfg *config.Config) (*clusterChecker, error) { + clientSource, err := client.NewElasticClient(elasticsearch.Config{ + Addresses: []string{cfg.SourceCluster.URL}, + Username: cfg.SourceCluster.User, + Password: cfg.SourceCluster.Password, + }) + if err != nil { + return nil, fmt.Errorf("cannot create source client %s", err.Error()) + } + + clientDestination, err := client.NewElasticClient(elasticsearch.Config{ + Addresses: []string{cfg.DestinationCluster.URL}, + Username: cfg.DestinationCluster.User, + Password: cfg.DestinationCluster.Password, + }) + if err != nil { + return nil, fmt.Errorf("cannot create destination client %s", err.Error()) + } + + return &clusterChecker{ + clientSource: clientSource, + clientDestination: clientDestination, + indices: cfg.Compare.Indices, + }, nil +} diff --git a/tools/clusters-checker/pkg/checkers/interface.go b/tools/clusters-checker/pkg/checkers/interface.go new file mode 100644 index 00000000..0cf7758c --- /dev/null +++ b/tools/clusters-checker/pkg/checkers/interface.go @@ -0,0 +1,11 @@ +package checkers + +// ESClient defines what a ES client should do +type ESClient interface { + DoCountRequest(index string, body []byte) (uint64, error) + DoScrollRequestAllDocuments( + index string, + body []byte, + handlerFunc func(responseBytes []byte) error, + ) error +} diff --git a/tools/clusters-checker/pkg/checkers/process.go b/tools/clusters-checker/pkg/checkers/process.go new file mode 100644 index 00000000..16130fde --- /dev/null +++ b/tools/clusters-checker/pkg/checkers/process.go @@ -0,0 +1,30 @@ +package checkers + +import ( + logger "github.com/ElrondNetwork/elrond-go-logger" +) + +var ( + log = logger.GetOrCreate("pkg/checkers") +) + +type clusterChecker struct { + clientSource ESClient + clientDestination ESClient + indices []string +} + +func (cc *clusterChecker) CompareIndices() error { + for _, index := range cc.indices { + err := cc.compareIndex(index) + if err != nil { + return err + } + } + + return nil +} + +func (cc *clusterChecker) compareIndex(index string) error { + return nil +} diff --git a/tools/clusters-checker/pkg/client/client.go b/tools/clusters-checker/pkg/client/client.go new file mode 100644 index 00000000..3ccd0e9d --- /dev/null +++ b/tools/clusters-checker/pkg/client/client.go @@ -0,0 +1,165 @@ +package client + +import ( + "bytes" + "context" + "fmt" + logger "github.com/ElrondNetwork/elrond-go-logger" + "io/ioutil" + "math" + "net/http" + "time" + + "github.com/elastic/go-elasticsearch/v7" + "github.com/elastic/go-elasticsearch/v7/esapi" + "github.com/tidwall/gjson" +) + +var ( + log = logger.GetOrCreate("clusters-checker/pkg/client") + + httpStatusesForRetry = []int{429, 502, 503, 504} +) + +type esClient struct { + client *elasticsearch.Client + // countScroll is used to be incremented after each scroll so the scroll duration is different each time, + // bypassing any possible caching based on the same request + countScroll int +} + +// NewElasticClient will create a new instance of an esClient +func NewElasticClient(cfg elasticsearch.Config) (*esClient, error) { + if len(cfg.RetryOnStatus) == 0 { + cfg.RetryOnStatus = httpStatusesForRetry + cfg.RetryBackoff = func(i int) time.Duration { + // A simple exponential delay + d := time.Duration(math.Exp2(float64(i))) * time.Second + log.Info("elastic: retry backoff", "attempt", i, "sleep duration", d) + return d + } + cfg.MaxRetries = 5 + } + + elasticClient, err := elasticsearch.NewClient(cfg) + if err != nil { + return nil, err + } + + return &esClient{ + client: elasticClient, + countScroll: 0, + }, nil +} + +// DoScrollRequestAllDocuments will perform a documents request using scroll api +func (esc *esClient) DoScrollRequestAllDocuments( + index string, + body []byte, + handlerFunc func(responseBytes []byte) error, +) error { + esc.countScroll++ + res, err := esc.client.Search( + esc.client.Search.WithSize(9999), + esc.client.Search.WithScroll(10*time.Minute+time.Duration(esc.countScroll)*time.Millisecond), + esc.client.Search.WithContext(context.Background()), + esc.client.Search.WithIndex(index), + esc.client.Search.WithBody(bytes.NewBuffer(body)), + ) + if err != nil { + return err + } + + bodyBytes, err := getBytesFromResponse(res) + if err != nil { + return err + } + + err = handlerFunc(bodyBytes) + if err != nil { + return err + } + + scrollID := gjson.Get(string(bodyBytes), "_scroll_id") + return esc.iterateScroll(scrollID.String(), handlerFunc) +} + +func (esc *esClient) iterateScroll( + scrollID string, + handlerFunc func(responseBytes []byte) error, +) error { + if scrollID == "" { + return nil + } + defer func() { + err := esc.clearScroll(scrollID) + if err != nil { + log.Warn("cannot clear scroll", "error", err) + } + }() + + for { + scrollBodyBytes, errScroll := esc.getScrollResponse(scrollID) + if errScroll != nil { + return errScroll + } + + numberOfHits := gjson.Get(string(scrollBodyBytes), "hits.hits.#") + if numberOfHits.Int() < 1 { + return nil + } + err := handlerFunc(scrollBodyBytes) + if err != nil { + return err + } + } +} + +func (esc *esClient) getScrollResponse(scrollID string) ([]byte, error) { + esc.countScroll++ + res, err := esc.client.Scroll( + esc.client.Scroll.WithScrollID(scrollID), + esc.client.Scroll.WithScroll(2*time.Minute+time.Duration(esc.countScroll)*time.Millisecond), + ) + if err != nil { + return nil, err + } + + return getBytesFromResponse(res) +} + +func (esc *esClient) clearScroll(scrollID string) error { + resp, err := esc.client.ClearScroll( + esc.client.ClearScroll.WithScrollID(scrollID), + ) + if err != nil { + return err + } + defer closeBody(resp) + + if resp.IsError() && resp.StatusCode != http.StatusNotFound { + return fmt.Errorf("error response: %s", resp) + } + + return nil +} + +func getBytesFromResponse(res *esapi.Response) ([]byte, error) { + if res.IsError() { + return nil, fmt.Errorf("error response: %s", res) + } + defer closeBody(res) + + bodyBytes, err := ioutil.ReadAll(res.Body) + if err != nil { + return nil, err + } + + return bodyBytes, nil +} + +func closeBody(res *esapi.Response) { + if res != nil && res.Body != nil { + _ = res.Body.Close() + } +} diff --git a/tools/clusters-checker/pkg/client/count.go b/tools/clusters-checker/pkg/client/count.go new file mode 100644 index 00000000..e17d847c --- /dev/null +++ b/tools/clusters-checker/pkg/client/count.go @@ -0,0 +1,29 @@ +package client + +import ( + "bytes" + "github.com/tidwall/gjson" +) + +// DoCountRequest will get the number of elements that correspond with the provided query +func (esc *esClient) DoCountRequest(index string, body []byte) (uint64, error) { + res, err := esc.client.Count( + esc.client.Count.WithIndex(index), + esc.client.Count.WithBody(bytes.NewBuffer(body)), + ) + if err != nil { + return 0, err + } + if err != nil { + return 0, err + } + + bodyBytes, err := getBytesFromResponse(res) + if err != nil { + return 0, err + } + + countRes := gjson.Get(string(bodyBytes), "count") + + return countRes.Uint(), nil +} diff --git a/tools/clusters-checker/pkg/config/config.go b/tools/clusters-checker/pkg/config/config.go new file mode 100644 index 00000000..19b40cff --- /dev/null +++ b/tools/clusters-checker/pkg/config/config.go @@ -0,0 +1,17 @@ +package config + +type Config struct { + SourceCluster struct { + URL string `toml:"url"` + User string `toml:"user"` + Password string `toml:"password"` + } `toml:"source-cluster"` + DestinationCluster struct { + URL string `toml:"url"` + User string `toml:"user"` + Password string `toml:"password"` + } `toml:"destination-cluster"` + Compare struct { + Indices []string `toml:"indices"` + } `toml:"compare"` +} From b28d0c4ea960a1df4689d30e5aae654ccc57d221 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Thu, 14 Apr 2022 21:23:55 +0300 Subject: [PATCH 02/69] continue the implementation --- .../clusters-checker/cmd/checker/config.toml | 6 +- tools/clusters-checker/cmd/checker/main.go | 9 +- tools/clusters-checker/go.mod | 2 +- tools/clusters-checker/go.sum | 4 +- tools/clusters-checker/pkg/checkers/count.go | 9 +- tools/clusters-checker/pkg/checkers/data.go | 12 ++ .../clusters-checker/pkg/checkers/factory.go | 11 +- .../pkg/checkers/interface.go | 4 + .../clusters-checker/pkg/checkers/process.go | 30 ---- .../checkers/process_indices_no_timestamp.go | 88 +++++++++ .../process_indices_with_timestamp.go | 168 ++++++++++++++++++ tools/clusters-checker/pkg/checkers/query.go | 53 ++++++ tools/clusters-checker/pkg/checkers/utils.go | 24 +++ tools/clusters-checker/pkg/client/client.go | 97 +++++++++- tools/clusters-checker/pkg/client/get.go | 47 +++++ tools/clusters-checker/pkg/config/config.go | 3 +- 16 files changed, 519 insertions(+), 48 deletions(-) create mode 100644 tools/clusters-checker/pkg/checkers/data.go delete mode 100644 tools/clusters-checker/pkg/checkers/process.go create mode 100644 tools/clusters-checker/pkg/checkers/process_indices_no_timestamp.go create mode 100644 tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go create mode 100644 tools/clusters-checker/pkg/checkers/query.go create mode 100644 tools/clusters-checker/pkg/checkers/utils.go create mode 100644 tools/clusters-checker/pkg/client/get.go diff --git a/tools/clusters-checker/cmd/checker/config.toml b/tools/clusters-checker/cmd/checker/config.toml index a5bbc929..17c390d3 100644 --- a/tools/clusters-checker/cmd/checker/config.toml +++ b/tools/clusters-checker/cmd/checker/config.toml @@ -4,8 +4,10 @@ user = "" password = "" [destination-cluster] - url = "https://index.elrond.com" + url = "" user = "" password = "" [compare] - indices = ["rating", "transactions", "blocks", "validators", "miniblocks", "rounds", "accounts", "accountshistory", "receipts", "scresults", "accountsesdt", "accountsesdthistory", "epochinfo", "scdeploys", "tokens", "tags", "logs", "delegators"] + #indices = ["rating", "transactions", "blocks", "validators", "miniblocks", "rounds", "accounts", "accountshistory", "receipts", "scresults", "accountsesdt", "accountsesdthistory", "epochinfo", "scdeploys", "tokens", "tags", "logs", "delegators"] + indices-with-timestamp = ["receipts"] + indices-no-timestamp = ["accounts"] diff --git a/tools/clusters-checker/cmd/checker/main.go b/tools/clusters-checker/cmd/checker/main.go index 6945e6f0..5cca44ce 100644 --- a/tools/clusters-checker/cmd/checker/main.go +++ b/tools/clusters-checker/cmd/checker/main.go @@ -81,9 +81,14 @@ func checkClusters(ctx *cli.Context) { log.Error("cannot create cluster checker", "error", err.Error()) } - err = clusterChecker.CompareCounts() + //err = clusterChecker.CompareCounts() + //if err != nil { + // log.Error("cannot check counts", "error", err.Error()) + //} + + err = clusterChecker.CompareIndicesWithTimestamp() if err != nil { - log.Error("cannot check counts", "error", err.Error()) + log.Error("cannot check indices", "error", err.Error()) } } diff --git a/tools/clusters-checker/go.mod b/tools/clusters-checker/go.mod index 9e439f0a..06dc3d56 100644 --- a/tools/clusters-checker/go.mod +++ b/tools/clusters-checker/go.mod @@ -4,7 +4,7 @@ go 1.17 require ( github.com/ElrondNetwork/elrond-go-logger v1.0.7 - github.com/elastic/go-elasticsearch/v7 v7.17.1 + github.com/elastic/go-elasticsearch/v7 v7.12.0 github.com/pelletier/go-toml v1.9.3 github.com/tidwall/gjson v1.14.0 github.com/urfave/cli v1.22.5 diff --git a/tools/clusters-checker/go.sum b/tools/clusters-checker/go.sum index e5ec143c..db0462cd 100644 --- a/tools/clusters-checker/go.sum +++ b/tools/clusters-checker/go.sum @@ -23,8 +23,8 @@ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/denisbrodbeck/machineid v1.0.1/go.mod h1:dJUwb7PTidGDeYyUBmXZ2GphQBbjJCrnectwCyxcUSI= -github.com/elastic/go-elasticsearch/v7 v7.17.1 h1:49mHcHx7lpCL8cW1aioEwSEVKQF3s+Igi4Ye/QTWwmk= -github.com/elastic/go-elasticsearch/v7 v7.17.1/go.mod h1:OJ4wdbtDNk5g503kvlHLyErCgQwwzmDtaFC4XyOxXA4= +github.com/elastic/go-elasticsearch/v7 v7.12.0 h1:j4tvcMrZJLp39L2NYvBb7f+lHKPqPHSL3nvB8+/DV+s= +github.com/elastic/go-elasticsearch/v7 v7.12.0/go.mod h1:OJ4wdbtDNk5g503kvlHLyErCgQwwzmDtaFC4XyOxXA4= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/go-ole/go-ole v1.2.1/go.mod h1:7FAglXiTm7HKlQRDeOQ6ZNUHidzCWXuZWq/1dTyBNF8= github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= diff --git a/tools/clusters-checker/pkg/checkers/count.go b/tools/clusters-checker/pkg/checkers/count.go index cfe8ac34..6a94ce33 100644 --- a/tools/clusters-checker/pkg/checkers/count.go +++ b/tools/clusters-checker/pkg/checkers/count.go @@ -3,7 +3,14 @@ package checkers import "math" func (cc *clusterChecker) CompareCounts() error { - for _, index := range cc.indices { + for _, index := range cc.indicesNoTimestamp { + err := cc.compareCount(index) + if err != nil { + return err + } + } + + for _, index := range cc.indicesWithTimestamp { err := cc.compareCount(index) if err != nil { return err diff --git a/tools/clusters-checker/pkg/checkers/data.go b/tools/clusters-checker/pkg/checkers/data.go new file mode 100644 index 00000000..cdad92ff --- /dev/null +++ b/tools/clusters-checker/pkg/checkers/data.go @@ -0,0 +1,12 @@ +package checkers + +import "encoding/json" + +type generalElasticResponse struct { + Hits struct { + Hits []struct { + ID string `json:"_id"` + Source json.RawMessage `json:"_source"` + } `json:"hits"` + } `json:"hits"` +} diff --git a/tools/clusters-checker/pkg/checkers/factory.go b/tools/clusters-checker/pkg/checkers/factory.go index 7fb53de6..07026ab3 100644 --- a/tools/clusters-checker/pkg/checkers/factory.go +++ b/tools/clusters-checker/pkg/checkers/factory.go @@ -1,6 +1,7 @@ package checkers import ( + "encoding/json" "fmt" "github.com/ElrondNetwork/elastic-indexer-go/tools/clusters-checker/pkg/client" "github.com/ElrondNetwork/elastic-indexer-go/tools/clusters-checker/pkg/config" @@ -27,8 +28,12 @@ func CreateClusterChecker(cfg *config.Config) (*clusterChecker, error) { } return &clusterChecker{ - clientSource: clientSource, - clientDestination: clientDestination, - indices: cfg.Compare.Indices, + clientSource: clientSource, + clientDestination: clientDestination, + indicesWithTimestamp: cfg.Compare.IndicesWithTimestamp, + indicesNoTimestamp: cfg.Compare.IndicesNoTimestamp, + + missingFromSource: map[string]json.RawMessage{}, + missingFromDestination: map[string]json.RawMessage{}, }, nil } diff --git a/tools/clusters-checker/pkg/checkers/interface.go b/tools/clusters-checker/pkg/checkers/interface.go index 0cf7758c..23be2363 100644 --- a/tools/clusters-checker/pkg/checkers/interface.go +++ b/tools/clusters-checker/pkg/checkers/interface.go @@ -2,7 +2,11 @@ package checkers // ESClient defines what a ES client should do type ESClient interface { + InitializeScroll(index string, body []byte, response interface{}) (string, bool, error) + DoScrollRequestV2(scrollID string, response interface{}) (string, bool, error) + DoCountRequest(index string, body []byte) (uint64, error) + DoGetRequest(index string, body []byte, response interface{}, size int) error DoScrollRequestAllDocuments( index string, body []byte, diff --git a/tools/clusters-checker/pkg/checkers/process.go b/tools/clusters-checker/pkg/checkers/process.go deleted file mode 100644 index 16130fde..00000000 --- a/tools/clusters-checker/pkg/checkers/process.go +++ /dev/null @@ -1,30 +0,0 @@ -package checkers - -import ( - logger "github.com/ElrondNetwork/elrond-go-logger" -) - -var ( - log = logger.GetOrCreate("pkg/checkers") -) - -type clusterChecker struct { - clientSource ESClient - clientDestination ESClient - indices []string -} - -func (cc *clusterChecker) CompareIndices() error { - for _, index := range cc.indices { - err := cc.compareIndex(index) - if err != nil { - return err - } - } - - return nil -} - -func (cc *clusterChecker) compareIndex(index string) error { - return nil -} diff --git a/tools/clusters-checker/pkg/checkers/process_indices_no_timestamp.go b/tools/clusters-checker/pkg/checkers/process_indices_no_timestamp.go new file mode 100644 index 00000000..0a891b08 --- /dev/null +++ b/tools/clusters-checker/pkg/checkers/process_indices_no_timestamp.go @@ -0,0 +1,88 @@ +package checkers + +import ( + "encoding/json" + + logger "github.com/ElrondNetwork/elrond-go-logger" +) + +var ( + log = logger.GetOrCreate("pkg/checkers") +) + +func (cc *clusterChecker) CompareIndicesNoTimestamp() error { + for _, index := range cc.indicesNoTimestamp { + err := cc.compareIndex(index) + if err != nil { + return err + } + } + + return nil +} + +func (cc *clusterChecker) compareIndex(index string) error { + count := 0 + handlerFunc := func(responseBytes []byte) error { + count++ + genericResponse := &generalElasticResponse{} + err := json.Unmarshal(responseBytes, genericResponse) + if err != nil { + return err + } + + log.Info("comparing", "bulk size", len(genericResponse.Hits.Hits), "count", count) + + return cc.processResponse(index, genericResponse) + } + + return cc.clientSource.DoScrollRequestAllDocuments(index, getAll(true), handlerFunc) +} + +func (cc *clusterChecker) processResponse(index string, genericResponse *generalElasticResponse) error { + mapResponseSource, ids := convertResponseInMap(genericResponse) + + genericResponseDestination := &generalElasticResponse{} + err := cc.clientDestination.DoGetRequest(index, queryMultipleObj(ids, true), genericResponseDestination, len(ids)) + if err != nil { + return err + } + + mapResponseDestination, _ := convertResponseInMap(genericResponseDestination) + + compareResults(index, mapResponseSource, mapResponseDestination) + + return nil +} + +func compareResults(index string, sourceRes, destinationRes map[string]json.RawMessage) { + for id, rawDataSource := range sourceRes { + rawDataDestination, found := destinationRes[id] + if !found { + log.Warn("cannot find document", "index", index, "id", id) + continue + } + + equal, err := areEqualJSON(rawDataSource, rawDataDestination) + if err != nil { + log.Error("cannot compare json", "error", err.Error(), "index", index, "id", id) + continue + } + + if !equal { + log.Warn("different documents", "index", index, "id", id) + } + } +} + +func convertResponseInMap(response *generalElasticResponse) (map[string]json.RawMessage, []string) { + mapResponse := make(map[string]json.RawMessage, len(response.Hits.Hits)) + ids := make([]string, 0, len(response.Hits.Hits)) + + for _, hit := range response.Hits.Hits { + ids = append(ids, hit.ID) + mapResponse[hit.ID] = hit.Source + } + + return mapResponse, ids +} diff --git a/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go b/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go new file mode 100644 index 00000000..9174fb59 --- /dev/null +++ b/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go @@ -0,0 +1,168 @@ +package checkers + +import "encoding/json" + +type clusterChecker struct { + missingFromSource map[string]json.RawMessage + missingFromDestination map[string]json.RawMessage + + clientSource ESClient + clientDestination ESClient + indicesNoTimestamp []string + indicesWithTimestamp []string +} + +func (cc *clusterChecker) CompareIndicesWithTimestamp() error { + for _, index := range cc.indicesWithTimestamp { + err := cc.compareIndexWithTimestamp(index) + if err != nil { + return err + } + } + + return nil +} + +func (cc *clusterChecker) compareIndexWithTimestamp(index string) error { + rspSource := &generalElasticResponse{} + nextScrollIDSource, _, err := cc.clientSource.InitializeScroll( + index, + getAllSortTimestampASC(true), + rspSource, + ) + if err != nil { + return err + } + + rspDestination := &generalElasticResponse{} + nextScrollIDDestination, _, err := cc.clientDestination.InitializeScroll( + index, + getAllSortTimestampASC(true), + rspDestination, + ) + if err != nil { + return err + } + + cc.compareResults(index, rspSource, rspDestination) + + cc.continueReading(index, nextScrollIDSource, nextScrollIDDestination) + + return nil +} + +func (cc *clusterChecker) continueReading(index string, scrollIDSource, scrollIDDestination string) { + sourceID := scrollIDSource + destinationID := scrollIDDestination + var errSource, errDestination error + var doneSource, doneDestination bool + + count := 0 + for { + count++ + + chanResponseSource := make(chan *generalElasticResponse) + chanResponseDestination := make(chan *generalElasticResponse) + + go func() { + responseS := &generalElasticResponse{} + var nextScroll string + if !doneSource { + nextScroll, doneSource, errSource = cc.clientSource.DoScrollRequestV2(sourceID, responseS) + if errSource != nil { + log.Error("cannot read from source", "index", index, "error", errSource.Error()) + } + } + sourceID = nextScroll + chanResponseSource <- responseS + }() + + go func() { + var nextScroll string + responseD := &generalElasticResponse{} + if !doneDestination { + nextScroll, doneDestination, errDestination = cc.clientDestination.DoScrollRequestV2(destinationID, responseD) + if errDestination != nil { + log.Error("cannot read from destination", "index", index, "error", errDestination.Error()) + } + } + destinationID = nextScroll + chanResponseDestination <- responseD + }() + + rspFromSource := <-chanResponseSource + rspFromDestination := <-chanResponseDestination + + cc.compareResults(index, rspFromSource, rspFromDestination) + log.Info("comparing results", "count", count) + if count%10 == 0 { + cc.checkMaps(index, false) + } + + if doneSource && doneDestination { + cc.checkMaps(index, true) + return + } + } +} + +func (cc *clusterChecker) compareResults(index string, respSource, respDestination *generalElasticResponse) { + mapSource, _ := convertResponseInMap(respSource) + mapDestination, _ := convertResponseInMap(respDestination) + + for id, rawDataSource := range mapSource { + rawDataDestination, found := mapDestination[id] + if !found { + cc.missingFromSource[id] = rawDataSource + continue + } + + delete(mapDestination, id) + + equal, err := areEqualJSON(rawDataSource, rawDataDestination) + if err != nil { + log.Error("cannot compare json", "error", err.Error(), "index", index, "id", id) + continue + } + + if !equal { + log.Warn("different documents", "index", index, "id", id) + continue + } + } + + for id, rawDataSource := range mapDestination { + cc.missingFromDestination[id] = rawDataSource + } +} + +func (cc *clusterChecker) checkMaps(index string, finish bool) { + for id, rawDataSource := range cc.missingFromSource { + rawDataDestination, found := cc.missingFromDestination[id] + if !found { + if finish { + log.Warn("cannot find document source", "index", index, "id", id) + } + continue + } + + delete(cc.missingFromSource, id) + delete(cc.missingFromDestination, id) + + equal, err := areEqualJSON(rawDataSource, rawDataDestination) + if err != nil { + log.Error("cannot compare json", "error", err.Error(), "index", index, "id", id) + continue + } + + if !equal { + log.Warn("different documents", "index", index, "id", id) + continue + } + } + if finish { + for id, _ := range cc.missingFromDestination { + log.Warn("cannot find document destination", "index", index, "id", id) + } + } +} diff --git a/tools/clusters-checker/pkg/checkers/query.go b/tools/clusters-checker/pkg/checkers/query.go new file mode 100644 index 00000000..889c2792 --- /dev/null +++ b/tools/clusters-checker/pkg/checkers/query.go @@ -0,0 +1,53 @@ +package checkers + +import ( + "bytes" + "encoding/json" + "fmt" + "strconv" +) + +type object = map[string]interface{} + +func getAll(withSource bool) []byte { + query := fmt.Sprintf(`{"query": {"match_all": {}},"_source": %s}`, strconv.FormatBool(withSource)) + + return []byte(query) +} + +func queryMultipleObj(ids []string, withSource bool) []byte { + query := object{ + "query": object{ + "terms": object{ + "_id": ids, + }, + }, + "_source": withSource, + } + + var buff bytes.Buffer + _ = json.NewEncoder(&buff).Encode(query) + + return buff.Bytes() +} + +func getAllSortTimestampASC(withSource bool) []byte { + obj := object{ + "query": object{ + "match_all": object{}, + }, + "_source": withSource, + "sort": []interface{}{ + object{ + "timestamp": object{ + "order": "asc", + }, + }, + }, + } + + var buff bytes.Buffer + _ = json.NewEncoder(&buff).Encode(obj) + + return buff.Bytes() +} diff --git a/tools/clusters-checker/pkg/checkers/utils.go b/tools/clusters-checker/pkg/checkers/utils.go new file mode 100644 index 00000000..6cba8283 --- /dev/null +++ b/tools/clusters-checker/pkg/checkers/utils.go @@ -0,0 +1,24 @@ +package checkers + +import ( + "encoding/json" + "fmt" + "reflect" +) + +func areEqualJSON(s1, s2 json.RawMessage) (bool, error) { + var o1 interface{} + var o2 interface{} + + var err error + err = json.Unmarshal(s1, &o1) + if err != nil { + return false, fmt.Errorf("error unmashalling s1: %s", err.Error()) + } + err = json.Unmarshal(s2, &o2) + if err != nil { + return false, fmt.Errorf("error unmashalling s2: %s", err.Error()) + } + + return reflect.DeepEqual(o1, o2), nil +} diff --git a/tools/clusters-checker/pkg/client/client.go b/tools/clusters-checker/pkg/client/client.go index 3ccd0e9d..33027bac 100644 --- a/tools/clusters-checker/pkg/client/client.go +++ b/tools/clusters-checker/pkg/client/client.go @@ -3,13 +3,15 @@ package client import ( "bytes" "context" + "encoding/json" "fmt" - logger "github.com/ElrondNetwork/elrond-go-logger" "io/ioutil" "math" "net/http" + "sync" "time" + logger "github.com/ElrondNetwork/elrond-go-logger" "github.com/elastic/go-elasticsearch/v7" "github.com/elastic/go-elasticsearch/v7/esapi" "github.com/tidwall/gjson" @@ -26,6 +28,8 @@ type esClient struct { // countScroll is used to be incremented after each scroll so the scroll duration is different each time, // bypassing any possible caching based on the same request countScroll int + countSearch int + mutex sync.Mutex } // NewElasticClient will create a new instance of an esClient @@ -49,19 +53,93 @@ func NewElasticClient(cfg elasticsearch.Config) (*esClient, error) { return &esClient{ client: elasticClient, countScroll: 0, + mutex: sync.Mutex{}, }, nil } +func (esc *esClient) InitializeScroll(index string, body []byte, response interface{}) (string, bool, error) { + res, err := esc.client.Search( + esc.client.Search.WithSize(9000), + esc.client.Search.WithScroll(10*time.Minute+time.Duration(esc.updateAndGetCountScroll())*time.Millisecond), + esc.client.Search.WithIndex(index), + esc.client.Search.WithBody(bytes.NewBuffer(body)), + ) + if err != nil { + return "", false, err + } + if res.IsError() || res.StatusCode >= 400 { + return "", false, fmt.Errorf("%s", res.String()) + } + + bodyBytes, err := getBytesFromResponse(res) + if err != nil { + return "", false, err + } + scrollID := gjson.Get(string(bodyBytes), "_scroll_id").String() + numberOfHits := gjson.Get(string(bodyBytes), "hits.hits.#") + isDone := numberOfHits.Int() == 0 + + if isDone { + defer func() { + errC := esc.clearScroll(scrollID) + if errC != nil { + log.Warn("cannot clear scroll", "error", errC) + } + }() + } + + err = json.Unmarshal(bodyBytes, response) + if err != nil { + return "", false, err + } + + return scrollID, isDone, nil +} + +func (esc *esClient) DoScrollRequestV2(scrollID string, response interface{}) (string, bool, error) { + res, err := esc.client.Scroll( + esc.client.Scroll.WithScrollID(scrollID), + esc.client.Scroll.WithScroll(2*time.Minute+time.Duration(esc.updateAndGetCountScroll())*time.Millisecond), + ) + if err != nil { + return "", false, err + } + + bodyBytes, err := getBytesFromResponse(res) + if err != nil { + return "", false, err + } + + nextScrollID := gjson.Get(string(bodyBytes), "_scroll_id").String() + numberOfHits := gjson.Get(string(bodyBytes), "hits.hits.#") + isDone := numberOfHits.Int() == 0 + + if isDone { + defer func() { + errC := esc.clearScroll(scrollID) + if errC != nil { + log.Warn("cannot clear scroll", "error", errC) + } + }() + } + + err = json.Unmarshal(bodyBytes, response) + if err != nil { + return "", false, err + } + + return nextScrollID, isDone, nil +} + // DoScrollRequestAllDocuments will perform a documents request using scroll api func (esc *esClient) DoScrollRequestAllDocuments( index string, body []byte, handlerFunc func(responseBytes []byte) error, ) error { - esc.countScroll++ res, err := esc.client.Search( - esc.client.Search.WithSize(9999), - esc.client.Search.WithScroll(10*time.Minute+time.Duration(esc.countScroll)*time.Millisecond), + esc.client.Search.WithSize(9000), + esc.client.Search.WithScroll(10*time.Minute+time.Duration(esc.updateAndGetCountScroll())*time.Millisecond), esc.client.Search.WithContext(context.Background()), esc.client.Search.WithIndex(index), esc.client.Search.WithBody(bytes.NewBuffer(body)), @@ -116,10 +194,9 @@ func (esc *esClient) iterateScroll( } func (esc *esClient) getScrollResponse(scrollID string) ([]byte, error) { - esc.countScroll++ res, err := esc.client.Scroll( esc.client.Scroll.WithScrollID(scrollID), - esc.client.Scroll.WithScroll(2*time.Minute+time.Duration(esc.countScroll)*time.Millisecond), + esc.client.Scroll.WithScroll(2*time.Minute+time.Duration(esc.updateAndGetCountScroll())*time.Millisecond), ) if err != nil { return nil, err @@ -163,3 +240,11 @@ func closeBody(res *esapi.Response) { _ = res.Body.Close() } } + +func (esc *esClient) updateAndGetCountScroll() int { + esc.mutex.Lock() + defer esc.mutex.Unlock() + + esc.countScroll++ + return esc.countScroll +} diff --git a/tools/clusters-checker/pkg/client/get.go b/tools/clusters-checker/pkg/client/get.go new file mode 100644 index 00000000..43d4a6bc --- /dev/null +++ b/tools/clusters-checker/pkg/client/get.go @@ -0,0 +1,47 @@ +package client + +import ( + "bytes" + "encoding/json" + "fmt" + "io/ioutil" + "time" +) + +func (esc *esClient) DoGetRequest(index string, body []byte, response interface{}, size int) error { + res, err := esc.client.Search( + esc.client.Search.WithIndex(index), + esc.client.Search.WithBody(bytes.NewBuffer(body)), + esc.client.Search.WithRequestCache(false), + esc.client.Search.WithSize(size), + esc.client.Search.WithTimeout(10*time.Minute+time.Duration(esc.updateAndGetCount())*time.Millisecond), + ) + if err != nil { + return err + } + if res.IsError() || res.StatusCode >= 400 { + return fmt.Errorf("%s", res.String()) + } + + defer closeBody(res) + + bodyBytes, err := ioutil.ReadAll(res.Body) + if err != nil { + return err + } + + err = json.Unmarshal(bodyBytes, response) + if err != nil { + return err + } + + return nil +} + +func (esc *esClient) updateAndGetCount() int { + esc.mutex.Lock() + defer esc.mutex.Unlock() + + esc.countSearch++ + return esc.countSearch +} diff --git a/tools/clusters-checker/pkg/config/config.go b/tools/clusters-checker/pkg/config/config.go index 19b40cff..cbcecad0 100644 --- a/tools/clusters-checker/pkg/config/config.go +++ b/tools/clusters-checker/pkg/config/config.go @@ -12,6 +12,7 @@ type Config struct { Password string `toml:"password"` } `toml:"destination-cluster"` Compare struct { - Indices []string `toml:"indices"` + IndicesWithTimestamp []string `toml:"indices-with-timestamp"` + IndicesNoTimestamp []string `toml:"indices-no-timestamp"` } `toml:"compare"` } From e8af7815b02e98a06bc71d680ac211ce86b940b1 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Thu, 14 Apr 2022 21:49:46 +0300 Subject: [PATCH 03/69] more logs --- tools/clusters-checker/cmd/checker/config.toml | 4 ++-- .../pkg/checkers/process_indices_with_timestamp.go | 4 ++++ tools/clusters-checker/pkg/client/client.go | 6 ++++++ 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/tools/clusters-checker/cmd/checker/config.toml b/tools/clusters-checker/cmd/checker/config.toml index 17c390d3..45e9e88a 100644 --- a/tools/clusters-checker/cmd/checker/config.toml +++ b/tools/clusters-checker/cmd/checker/config.toml @@ -9,5 +9,5 @@ password = "" [compare] #indices = ["rating", "transactions", "blocks", "validators", "miniblocks", "rounds", "accounts", "accountshistory", "receipts", "scresults", "accountsesdt", "accountsesdthistory", "epochinfo", "scdeploys", "tokens", "tags", "logs", "delegators"] - indices-with-timestamp = ["receipts"] - indices-no-timestamp = ["accounts"] + indices-with-timestamp = ["transactions", "blocks", "miniblocks", "rounds", "accountshistory", "receipts","scresults", "accountsesdt", "accountsesdthistory", "scdeploys", "tokens"] + indices-no-timestamp = ["rating", "validators", "accounts", "epochinfo", "epochinfo", "tags", "logs", "delegators"] diff --git a/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go b/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go index 9174fb59..98974dfb 100644 --- a/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go +++ b/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go @@ -137,6 +137,10 @@ func (cc *clusterChecker) compareResults(index string, respSource, respDestinati } func (cc *clusterChecker) checkMaps(index string, finish bool) { + log.Info("missing from source", + "num", len(cc.missingFromDestination), + "missing from destination num", len(cc.missingFromDestination), + ) for id, rawDataSource := range cc.missingFromSource { rawDataDestination, found := cc.missingFromDestination[id] if !found { diff --git a/tools/clusters-checker/pkg/client/client.go b/tools/clusters-checker/pkg/client/client.go index 33027bac..a08d2125 100644 --- a/tools/clusters-checker/pkg/client/client.go +++ b/tools/clusters-checker/pkg/client/client.go @@ -97,6 +97,8 @@ func (esc *esClient) InitializeScroll(index string, body []byte, response interf } func (esc *esClient) DoScrollRequestV2(scrollID string, response interface{}) (string, bool, error) { + defer logExecutionTime(time.Now(), "esClient.DoScrollRequestV2") + res, err := esc.client.Scroll( esc.client.Scroll.WithScrollID(scrollID), esc.client.Scroll.WithScroll(2*time.Minute+time.Duration(esc.updateAndGetCountScroll())*time.Millisecond), @@ -131,6 +133,10 @@ func (esc *esClient) DoScrollRequestV2(scrollID string, response interface{}) (s return nextScrollID, isDone, nil } +func logExecutionTime(start time.Time, message string) { + log.Debug(message, "duration in seconds", time.Since(start).Seconds()) +} + // DoScrollRequestAllDocuments will perform a documents request using scroll api func (esc *esClient) DoScrollRequestAllDocuments( index string, From 4125cf5b7d4765a5f8551fa09286e5bba13614cd Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Thu, 14 Apr 2022 21:52:41 +0300 Subject: [PATCH 04/69] sort imports --- tools/clusters-checker/cmd/checker/main.go | 2 +- tools/clusters-checker/pkg/checkers/factory.go | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/tools/clusters-checker/cmd/checker/main.go b/tools/clusters-checker/cmd/checker/main.go index 5cca44ce..2cce7634 100644 --- a/tools/clusters-checker/cmd/checker/main.go +++ b/tools/clusters-checker/cmd/checker/main.go @@ -1,11 +1,11 @@ package main import ( - "github.com/ElrondNetwork/elastic-indexer-go/tools/clusters-checker/pkg/checkers" "io/ioutil" "os" "path" + "github.com/ElrondNetwork/elastic-indexer-go/tools/clusters-checker/pkg/checkers" "github.com/ElrondNetwork/elastic-indexer-go/tools/clusters-checker/pkg/config" logger "github.com/ElrondNetwork/elrond-go-logger" "github.com/pelletier/go-toml" diff --git a/tools/clusters-checker/pkg/checkers/factory.go b/tools/clusters-checker/pkg/checkers/factory.go index 07026ab3..06751632 100644 --- a/tools/clusters-checker/pkg/checkers/factory.go +++ b/tools/clusters-checker/pkg/checkers/factory.go @@ -3,6 +3,7 @@ package checkers import ( "encoding/json" "fmt" + "github.com/ElrondNetwork/elastic-indexer-go/tools/clusters-checker/pkg/client" "github.com/ElrondNetwork/elastic-indexer-go/tools/clusters-checker/pkg/config" "github.com/elastic/go-elasticsearch/v7" From ce206fd5c3b6aac3078695c52bde26ffcd41ef39 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Fri, 15 Apr 2022 11:13:17 +0300 Subject: [PATCH 05/69] check with interval --- .../clusters-checker/cmd/checker/config.toml | 12 ++- tools/clusters-checker/cmd/checker/main.go | 81 ++++++++++++++++--- tools/clusters-checker/pkg/checkers/count.go | 6 +- .../clusters-checker/pkg/checkers/factory.go | 24 +++++- .../pkg/checkers/interface.go | 5 ++ .../checkers/process_indices_no_timestamp.go | 18 ++--- .../process_indices_with_timestamp.go | 45 +++++++---- tools/clusters-checker/pkg/checkers/query.go | 9 ++- tools/clusters-checker/pkg/client/client.go | 3 +- tools/clusters-checker/pkg/config/config.go | 4 + 10 files changed, 159 insertions(+), 48 deletions(-) diff --git a/tools/clusters-checker/cmd/checker/config.toml b/tools/clusters-checker/cmd/checker/config.toml index 45e9e88a..ffd29c02 100644 --- a/tools/clusters-checker/cmd/checker/config.toml +++ b/tools/clusters-checker/cmd/checker/config.toml @@ -9,5 +9,13 @@ password = "" [compare] #indices = ["rating", "transactions", "blocks", "validators", "miniblocks", "rounds", "accounts", "accountshistory", "receipts", "scresults", "accountsesdt", "accountsesdthistory", "epochinfo", "scdeploys", "tokens", "tags", "logs", "delegators"] - indices-with-timestamp = ["transactions", "blocks", "miniblocks", "rounds", "accountshistory", "receipts","scresults", "accountsesdt", "accountsesdthistory", "scdeploys", "tokens"] - indices-no-timestamp = ["rating", "validators", "accounts", "epochinfo", "epochinfo", "tags", "logs", "delegators"] + interval = [ + {start = 1596117600, stop = 1613397600}, # Day 0 --- Day 200 + {start = 1613397601, stop = 1630677600}, # Day 200 - Day 400 + {start = 1630677601, stop = 1639317601}, # Day 400 - Day 500 + {start = 1639317602, stop = 1647957602}, # Day 500 - Day 600 + {start = 1647957603, stop = 1667288039}, # Day 600 - Day XXX + ] + indices-with-timestamp = [ "receipts", "transactions", "blocks", "miniblocks", "rounds", "accountshistory", "scresults", "accountsesdt", "accountsesdthistory", "scdeploys", "tokens"] + #indices-with-timestamp = [ "blocks"] + indices-no-timestamp = ["rating", "validators", "epochinfo", "epochinfo", "tags", "logs", "delegators"] diff --git a/tools/clusters-checker/cmd/checker/main.go b/tools/clusters-checker/cmd/checker/main.go index 2cce7634..c5802f64 100644 --- a/tools/clusters-checker/cmd/checker/main.go +++ b/tools/clusters-checker/cmd/checker/main.go @@ -4,6 +4,7 @@ import ( "io/ioutil" "os" "path" + "sync" "github.com/ElrondNetwork/elastic-indexer-go/tools/clusters-checker/pkg/checkers" "github.com/ElrondNetwork/elastic-indexer-go/tools/clusters-checker/pkg/config" @@ -23,6 +24,18 @@ var ( Usage: "The path to the config folder", Value: "./", } + checkCounts = cli.BoolFlag{ + Name: "check-counts", + Usage: "If set, the checker wil verify the counts between clusters", + } + checkWithTimestamp = cli.BoolFlag{ + Name: "check-with-timestamp", + Usage: "If set, the checker wil verify all the indices from list with timestamp", + } + checkNoTimestamp = cli.BoolFlag{ + Name: "check-no-timestamp", + Usage: "If set, the checker wil verify the indices from list with no timestamp", + } ) const helpTemplate = `NAME: @@ -44,11 +57,11 @@ VERSION: func main() { app := cli.NewApp() cli.AppHelpTemplate = helpTemplate - app.Name = "Cluster checker" + app.Name = "Clusters checker" app.Version = "v1.0.0" - app.Usage = "Cluster checker" + app.Usage = "Clusters checker" app.Flags = []cli.Flag{ - configPath, + configPath, checkCounts, checkNoTimestamp, checkWithTimestamp, } app.Authors = []cli.Author{ { @@ -76,21 +89,65 @@ func checkClusters(ctx *cli.Context) { log.Error("cannot load config file", "error", err.Error()) } - clusterChecker, err := checkers.CreateClusterChecker(cfg) - if err != nil { - log.Error("cannot create cluster checker", "error", err.Error()) + checkCountsFlag := ctx.Bool(checkCounts.Name) + if checkCountsFlag { + clusterChecker, errC := checkers.CreateClusterChecker(cfg, 0, "instance_0") + if errC != nil { + log.Error("cannot create cluster checker", "error", errC.Error()) + } + + errC = clusterChecker.CompareCounts() + if errC != nil { + log.Error("cannot check counts", "error", errC.Error()) + } + + return + } + + checkIndicesNoTimestampFlag := ctx.Bool(checkNoTimestamp.Name) + if checkIndicesNoTimestampFlag { + clusterChecker, errC := checkers.CreateClusterChecker(cfg, 0, "instance_0") + if errC != nil { + log.Error("cannot create cluster checker", "error", errC.Error()) + } + + errC = clusterChecker.CompareIndicesNoTimestamp() + if errC != nil { + log.Error("cannot check indices", "error", errC.Error()) + } + + return } - //err = clusterChecker.CompareCounts() - //if err != nil { - // log.Error("cannot check counts", "error", err.Error()) - //} + checkWithTimestampFlag := ctx.Bool(checkWithTimestamp.Name) + if checkWithTimestampFlag { + checkClustersIndexesWithInterval(cfg) + return + } + + log.Error("no flag has been provided") +} - err = clusterChecker.CompareIndicesWithTimestamp() +func checkClustersIndexesWithInterval(cfg *config.Config) { + wg := sync.WaitGroup{} + ccs, err := checkers.CreateMultipleCheckers(cfg) if err != nil { - log.Error("cannot check indices", "error", err.Error()) + log.Error("cannot create cluster checker", "error", err.Error()) + } + + wg.Add(len(ccs)) + for _, c := range ccs { + go func(che checkers.Checker) { + errC := che.CompareIndicesWithTimestamp() + if errC != nil { + log.Error("cannot check indices", "error", errC.Error()) + } + wg.Done() + }(c) } + wg.Wait() + } func loadConfigFile(pathStr string) (*config.Config, error) { diff --git a/tools/clusters-checker/pkg/checkers/count.go b/tools/clusters-checker/pkg/checkers/count.go index 6a94ce33..0e9e5620 100644 --- a/tools/clusters-checker/pkg/checkers/count.go +++ b/tools/clusters-checker/pkg/checkers/count.go @@ -34,19 +34,19 @@ func (cc *clusterChecker) compareCount(index string) error { difference := int64(countSourceCluster) - int64(countDestinationCluster) if difference == 0 { - log.Info("number of documents are the same", "index", index, + log.Info(cc.logPrefix+": number of documents are the same", "index", index, "source cluster", countSourceCluster, "destination cluster", countDestinationCluster, ) } else if difference < 0 { - log.Info("number of documents", "index", index, + log.Info(cc.logPrefix+": number of documents", "index", index, "source cluster", countSourceCluster, "destination cluster", countDestinationCluster, "in destination cluster are more elements, difference", math.Abs(float64(difference)), ) } else { - log.Info("number of documents", "index", index, + log.Info(cc.logPrefix+": number of documents", "index", index, "source cluster", countSourceCluster, "destination cluster", countDestinationCluster, "in source cluster are more elements, difference", math.Abs(float64(difference)), diff --git a/tools/clusters-checker/pkg/checkers/factory.go b/tools/clusters-checker/pkg/checkers/factory.go index 06751632..43cf227d 100644 --- a/tools/clusters-checker/pkg/checkers/factory.go +++ b/tools/clusters-checker/pkg/checkers/factory.go @@ -3,13 +3,13 @@ package checkers import ( "encoding/json" "fmt" - "github.com/ElrondNetwork/elastic-indexer-go/tools/clusters-checker/pkg/client" "github.com/ElrondNetwork/elastic-indexer-go/tools/clusters-checker/pkg/config" "github.com/elastic/go-elasticsearch/v7" + "strconv" ) -func CreateClusterChecker(cfg *config.Config) (*clusterChecker, error) { +func CreateClusterChecker(cfg *config.Config, timestampIndex int, logPrefix string) (*clusterChecker, error) { clientSource, err := client.NewElasticClient(elasticsearch.Config{ Addresses: []string{cfg.SourceCluster.URL}, Username: cfg.SourceCluster.User, @@ -36,5 +36,25 @@ func CreateClusterChecker(cfg *config.Config) (*clusterChecker, error) { missingFromSource: map[string]json.RawMessage{}, missingFromDestination: map[string]json.RawMessage{}, + + startTimestamp: cfg.Compare.IntervalSettings[timestampIndex].Start, + stopTimestamp: cfg.Compare.IntervalSettings[timestampIndex].Stop, + logPrefix: logPrefix, }, nil } + +func CreateMultipleCheckers(cfg *config.Config) ([]*clusterChecker, error) { + checkers := make([]*clusterChecker, 0, len(cfg.Compare.IntervalSettings)) + + for idx := 0; idx < len(cfg.Compare.IntervalSettings); idx++ { + logPrefix := "instance_" + strconv.FormatUint(uint64(idx), 10) + cc, err := CreateClusterChecker(cfg, idx, logPrefix) + if err != nil { + return nil, err + } + + checkers = append(checkers, cc) + } + + return checkers, nil +} diff --git a/tools/clusters-checker/pkg/checkers/interface.go b/tools/clusters-checker/pkg/checkers/interface.go index 23be2363..719f45ac 100644 --- a/tools/clusters-checker/pkg/checkers/interface.go +++ b/tools/clusters-checker/pkg/checkers/interface.go @@ -13,3 +13,8 @@ type ESClient interface { handlerFunc func(responseBytes []byte) error, ) error } + +type Checker interface { + CompareIndicesNoTimestamp() error + CompareIndicesWithTimestamp() error +} diff --git a/tools/clusters-checker/pkg/checkers/process_indices_no_timestamp.go b/tools/clusters-checker/pkg/checkers/process_indices_no_timestamp.go index 0a891b08..2f1fdcde 100644 --- a/tools/clusters-checker/pkg/checkers/process_indices_no_timestamp.go +++ b/tools/clusters-checker/pkg/checkers/process_indices_no_timestamp.go @@ -2,12 +2,6 @@ package checkers import ( "encoding/json" - - logger "github.com/ElrondNetwork/elrond-go-logger" -) - -var ( - log = logger.GetOrCreate("pkg/checkers") ) func (cc *clusterChecker) CompareIndicesNoTimestamp() error { @@ -31,7 +25,7 @@ func (cc *clusterChecker) compareIndex(index string) error { return err } - log.Info("comparing", "bulk size", len(genericResponse.Hits.Hits), "count", count) + log.Info(cc.logPrefix+": comparing", "bulk size", len(genericResponse.Hits.Hits), "count", count) return cc.processResponse(index, genericResponse) } @@ -50,27 +44,27 @@ func (cc *clusterChecker) processResponse(index string, genericResponse *general mapResponseDestination, _ := convertResponseInMap(genericResponseDestination) - compareResults(index, mapResponseSource, mapResponseDestination) + cc.compareResultsNo(index, mapResponseSource, mapResponseDestination) return nil } -func compareResults(index string, sourceRes, destinationRes map[string]json.RawMessage) { +func (cc *clusterChecker) compareResultsNo(index string, sourceRes, destinationRes map[string]json.RawMessage) { for id, rawDataSource := range sourceRes { rawDataDestination, found := destinationRes[id] if !found { - log.Warn("cannot find document", "index", index, "id", id) + log.Warn(cc.logPrefix+": cannot find document", "index", index, "id", id) continue } equal, err := areEqualJSON(rawDataSource, rawDataDestination) if err != nil { - log.Error("cannot compare json", "error", err.Error(), "index", index, "id", id) + log.Error(cc.logPrefix+": cannot compare json", "error", err.Error(), "index", index, "id", id) continue } if !equal { - log.Warn("different documents", "index", index, "id", id) + log.Warn(cc.logPrefix+": different documents", "index", index, "id", id) } } } diff --git a/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go b/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go index 98974dfb..0f3a9406 100644 --- a/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go +++ b/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go @@ -1,6 +1,13 @@ package checkers -import "encoding/json" +import ( + "encoding/json" + logger "github.com/ElrondNetwork/elrond-go-logger" +) + +var ( + log = logger.GetOrCreate("checkers") +) type clusterChecker struct { missingFromSource map[string]json.RawMessage @@ -10,6 +17,10 @@ type clusterChecker struct { clientDestination ESClient indicesNoTimestamp []string indicesWithTimestamp []string + + startTimestamp, stopTimestamp int + + logPrefix string } func (cc *clusterChecker) CompareIndicesWithTimestamp() error { @@ -27,7 +38,7 @@ func (cc *clusterChecker) compareIndexWithTimestamp(index string) error { rspSource := &generalElasticResponse{} nextScrollIDSource, _, err := cc.clientSource.InitializeScroll( index, - getAllSortTimestampASC(true), + getAllSortTimestampASC(true, cc.startTimestamp, cc.stopTimestamp), rspSource, ) if err != nil { @@ -37,7 +48,7 @@ func (cc *clusterChecker) compareIndexWithTimestamp(index string) error { rspDestination := &generalElasticResponse{} nextScrollIDDestination, _, err := cc.clientDestination.InitializeScroll( index, - getAllSortTimestampASC(true), + getAllSortTimestampASC(true, cc.startTimestamp, cc.stopTimestamp), rspDestination, ) if err != nil { @@ -70,7 +81,7 @@ func (cc *clusterChecker) continueReading(index string, scrollIDSource, scrollID if !doneSource { nextScroll, doneSource, errSource = cc.clientSource.DoScrollRequestV2(sourceID, responseS) if errSource != nil { - log.Error("cannot read from source", "index", index, "error", errSource.Error()) + log.Error(cc.logPrefix+": cannot read from source", "index", index, "error", errSource.Error()) } } sourceID = nextScroll @@ -83,7 +94,7 @@ func (cc *clusterChecker) continueReading(index string, scrollIDSource, scrollID if !doneDestination { nextScroll, doneDestination, errDestination = cc.clientDestination.DoScrollRequestV2(destinationID, responseD) if errDestination != nil { - log.Error("cannot read from destination", "index", index, "error", errDestination.Error()) + log.Error(cc.logPrefix+": cannot read from destination", "index", index, "error", errDestination.Error()) } } destinationID = nextScroll @@ -94,7 +105,7 @@ func (cc *clusterChecker) continueReading(index string, scrollIDSource, scrollID rspFromDestination := <-chanResponseDestination cc.compareResults(index, rspFromSource, rspFromDestination) - log.Info("comparing results", "count", count) + log.Info(cc.logPrefix+": comparing results", "index", index, "count", count) if count%10 == 0 { cc.checkMaps(index, false) } @@ -121,12 +132,12 @@ func (cc *clusterChecker) compareResults(index string, respSource, respDestinati equal, err := areEqualJSON(rawDataSource, rawDataDestination) if err != nil { - log.Error("cannot compare json", "error", err.Error(), "index", index, "id", id) + log.Error(cc.logPrefix+": cannot compare json", "error", err.Error(), "index", index, "id", id) continue } if !equal { - log.Warn("different documents", "index", index, "id", id) + log.Warn(cc.logPrefix+": different documents", "index", index, "id", id) continue } } @@ -137,7 +148,7 @@ func (cc *clusterChecker) compareResults(index string, respSource, respDestinati } func (cc *clusterChecker) checkMaps(index string, finish bool) { - log.Info("missing from source", + log.Info(cc.logPrefix+": missing from source", "num", len(cc.missingFromDestination), "missing from destination num", len(cc.missingFromDestination), ) @@ -145,7 +156,7 @@ func (cc *clusterChecker) checkMaps(index string, finish bool) { rawDataDestination, found := cc.missingFromDestination[id] if !found { if finish { - log.Warn("cannot find document source", "index", index, "id", id) + log.Warn(cc.logPrefix+": cannot find document source", "index", index, "id", id) } continue } @@ -155,18 +166,24 @@ func (cc *clusterChecker) checkMaps(index string, finish bool) { equal, err := areEqualJSON(rawDataSource, rawDataDestination) if err != nil { - log.Error("cannot compare json", "error", err.Error(), "index", index, "id", id) + log.Error(cc.logPrefix+": cannot compare json", "error", err.Error(), "index", index, "id", id) continue } if !equal { - log.Warn("different documents", "index", index, "id", id) + log.Warn(cc.logPrefix+": different documents", "index", index, "id", id) continue } } if finish { - for id, _ := range cc.missingFromDestination { - log.Warn("cannot find document destination", "index", index, "id", id) + for id := range cc.missingFromDestination { + log.Warn(cc.logPrefix+": cannot find document destination", "index", index, "id", id) } } + + if finish { + log.Info(cc.logPrefix + ": DONE") + cc.missingFromDestination = make(map[string]json.RawMessage, 0) + cc.missingFromSource = make(map[string]json.RawMessage, 0) + } } diff --git a/tools/clusters-checker/pkg/checkers/query.go b/tools/clusters-checker/pkg/checkers/query.go index 889c2792..bf090db0 100644 --- a/tools/clusters-checker/pkg/checkers/query.go +++ b/tools/clusters-checker/pkg/checkers/query.go @@ -31,10 +31,15 @@ func queryMultipleObj(ids []string, withSource bool) []byte { return buff.Bytes() } -func getAllSortTimestampASC(withSource bool) []byte { +func getAllSortTimestampASC(withSource bool, start, stop int) []byte { obj := object{ "query": object{ - "match_all": object{}, + "range": object{ + "timestamp": object{ + "gte": start, + "lte": stop, + }, + }, }, "_source": withSource, "sort": []interface{}{ diff --git a/tools/clusters-checker/pkg/client/client.go b/tools/clusters-checker/pkg/client/client.go index a08d2125..663d45c9 100644 --- a/tools/clusters-checker/pkg/client/client.go +++ b/tools/clusters-checker/pkg/client/client.go @@ -7,6 +7,7 @@ import ( "fmt" "io/ioutil" "math" + "math/rand" "net/http" "sync" "time" @@ -251,6 +252,6 @@ func (esc *esClient) updateAndGetCountScroll() int { esc.mutex.Lock() defer esc.mutex.Unlock() - esc.countScroll++ + esc.countScroll += 1 + rand.Intn(10) return esc.countScroll } diff --git a/tools/clusters-checker/pkg/config/config.go b/tools/clusters-checker/pkg/config/config.go index cbcecad0..dafee7df 100644 --- a/tools/clusters-checker/pkg/config/config.go +++ b/tools/clusters-checker/pkg/config/config.go @@ -12,6 +12,10 @@ type Config struct { Password string `toml:"password"` } `toml:"destination-cluster"` Compare struct { + IntervalSettings []struct { + Start int `toml:"start"` + Stop int `toml:"stop"` + } `toml:"interval"` IndicesWithTimestamp []string `toml:"indices-with-timestamp"` IndicesNoTimestamp []string `toml:"indices-no-timestamp"` } `toml:"compare"` From 662d83fe7d8948b1043ef42921ca8aaf89e9dbc8 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Fri, 15 Apr 2022 11:16:43 +0300 Subject: [PATCH 06/69] fix imports --- tools/clusters-checker/cmd/checker/config.toml | 2 +- tools/clusters-checker/pkg/checkers/factory.go | 3 ++- .../pkg/checkers/process_indices_with_timestamp.go | 1 + tools/clusters-checker/pkg/client/count.go | 1 + 4 files changed, 5 insertions(+), 2 deletions(-) diff --git a/tools/clusters-checker/cmd/checker/config.toml b/tools/clusters-checker/cmd/checker/config.toml index ffd29c02..3446e825 100644 --- a/tools/clusters-checker/cmd/checker/config.toml +++ b/tools/clusters-checker/cmd/checker/config.toml @@ -18,4 +18,4 @@ ] indices-with-timestamp = [ "receipts", "transactions", "blocks", "miniblocks", "rounds", "accountshistory", "scresults", "accountsesdt", "accountsesdthistory", "scdeploys", "tokens"] #indices-with-timestamp = [ "blocks"] - indices-no-timestamp = ["rating", "validators", "epochinfo", "epochinfo", "tags", "logs", "delegators"] + indices-no-timestamp = ["rating", "validators", "epochinfo", "epochinfo", "tags", "logs", "delegators", "accounts"] diff --git a/tools/clusters-checker/pkg/checkers/factory.go b/tools/clusters-checker/pkg/checkers/factory.go index 43cf227d..190dee6f 100644 --- a/tools/clusters-checker/pkg/checkers/factory.go +++ b/tools/clusters-checker/pkg/checkers/factory.go @@ -3,10 +3,11 @@ package checkers import ( "encoding/json" "fmt" + "strconv" + "github.com/ElrondNetwork/elastic-indexer-go/tools/clusters-checker/pkg/client" "github.com/ElrondNetwork/elastic-indexer-go/tools/clusters-checker/pkg/config" "github.com/elastic/go-elasticsearch/v7" - "strconv" ) func CreateClusterChecker(cfg *config.Config, timestampIndex int, logPrefix string) (*clusterChecker, error) { diff --git a/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go b/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go index 0f3a9406..5038d3f5 100644 --- a/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go +++ b/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go @@ -2,6 +2,7 @@ package checkers import ( "encoding/json" + logger "github.com/ElrondNetwork/elrond-go-logger" ) diff --git a/tools/clusters-checker/pkg/client/count.go b/tools/clusters-checker/pkg/client/count.go index e17d847c..b2435c95 100644 --- a/tools/clusters-checker/pkg/client/count.go +++ b/tools/clusters-checker/pkg/client/count.go @@ -2,6 +2,7 @@ package client import ( "bytes" + "github.com/tidwall/gjson" ) From 6a2b097d1a90263383ae223b0168fbfe71a3f740 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Fri, 15 Apr 2022 12:51:01 +0300 Subject: [PATCH 07/69] small fix --- .../pkg/checkers/process_indices_with_timestamp.go | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go b/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go index 5038d3f5..5fb7a0ad 100644 --- a/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go +++ b/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go @@ -37,7 +37,7 @@ func (cc *clusterChecker) CompareIndicesWithTimestamp() error { func (cc *clusterChecker) compareIndexWithTimestamp(index string) error { rspSource := &generalElasticResponse{} - nextScrollIDSource, _, err := cc.clientSource.InitializeScroll( + nextScrollIDSource, doneSource, err := cc.clientSource.InitializeScroll( index, getAllSortTimestampASC(true, cc.startTimestamp, cc.stopTimestamp), rspSource, @@ -47,7 +47,7 @@ func (cc *clusterChecker) compareIndexWithTimestamp(index string) error { } rspDestination := &generalElasticResponse{} - nextScrollIDDestination, _, err := cc.clientDestination.InitializeScroll( + nextScrollIDDestination, doneDestination, err := cc.clientDestination.InitializeScroll( index, getAllSortTimestampASC(true, cc.startTimestamp, cc.stopTimestamp), rspDestination, @@ -57,6 +57,9 @@ func (cc *clusterChecker) compareIndexWithTimestamp(index string) error { } cc.compareResults(index, rspSource, rspDestination) + if doneSource && doneDestination { + return nil + } cc.continueReading(index, nextScrollIDSource, nextScrollIDDestination) From 09abb286d71baed2db32600770e970b9c766a9f2 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Fri, 15 Apr 2022 14:03:15 +0300 Subject: [PATCH 08/69] small refactor --- .../pkg/checkers/process_indices_with_timestamp.go | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go b/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go index 5fb7a0ad..95d36f03 100644 --- a/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go +++ b/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go @@ -6,6 +6,10 @@ import ( logger "github.com/ElrondNetwork/elrond-go-logger" ) +const ( + checkAccumulateInterval = 10 +) + var ( log = logger.GetOrCreate("checkers") ) @@ -110,7 +114,7 @@ func (cc *clusterChecker) continueReading(index string, scrollIDSource, scrollID cc.compareResults(index, rspFromSource, rspFromDestination) log.Info(cc.logPrefix+": comparing results", "index", index, "count", count) - if count%10 == 0 { + if count%checkAccumulateInterval == 0 { cc.checkMaps(index, false) } From 416f284adbd76587eec147fbbacd618f1c6b71b5 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Mon, 18 Apr 2022 12:51:02 +0300 Subject: [PATCH 09/69] small fix --- tools/clusters-checker/pkg/checkers/count.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/clusters-checker/pkg/checkers/count.go b/tools/clusters-checker/pkg/checkers/count.go index 0e9e5620..0df7a44b 100644 --- a/tools/clusters-checker/pkg/checkers/count.go +++ b/tools/clusters-checker/pkg/checkers/count.go @@ -42,14 +42,14 @@ func (cc *clusterChecker) compareCount(index string) error { log.Info(cc.logPrefix+": number of documents", "index", index, "source cluster", countSourceCluster, "destination cluster", countDestinationCluster, - "in destination cluster are more elements, difference", math.Abs(float64(difference)), + "in destination cluster are more elements, difference", uint64(math.Abs(float64(difference))), ) } else { log.Info(cc.logPrefix+": number of documents", "index", index, "source cluster", countSourceCluster, "destination cluster", countDestinationCluster, - "in source cluster are more elements, difference", math.Abs(float64(difference)), + "in source cluster are more elements, difference", uint64(math.Abs(float64(difference))), ) } From b904195dc4c325c499740510d92c8aac286c28f4 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Mon, 18 Apr 2022 14:50:04 +0300 Subject: [PATCH 10/69] fix log --- .../pkg/checkers/process_indices_with_timestamp.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go b/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go index 95d36f03..6f0d62ff 100644 --- a/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go +++ b/tools/clusters-checker/pkg/checkers/process_indices_with_timestamp.go @@ -164,7 +164,7 @@ func (cc *clusterChecker) checkMaps(index string, finish bool) { rawDataDestination, found := cc.missingFromDestination[id] if !found { if finish { - log.Warn(cc.logPrefix+": cannot find document source", "index", index, "id", id) + log.Warn(cc.logPrefix+": cannot find document destination", "index", index, "id", id) } continue } @@ -185,7 +185,7 @@ func (cc *clusterChecker) checkMaps(index string, finish bool) { } if finish { for id := range cc.missingFromDestination { - log.Warn(cc.logPrefix+": cannot find document destination", "index", index, "id", id) + log.Warn(cc.logPrefix+": cannot find document source", "index", index, "id", id) } } From abfc216e5109b4faf7c87eb526fe25421b235b1f Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Wed, 27 Apr 2022 12:59:41 +0300 Subject: [PATCH 11/69] cleanup config --- tools/clusters-checker/cmd/checker/config.toml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tools/clusters-checker/cmd/checker/config.toml b/tools/clusters-checker/cmd/checker/config.toml index 3446e825..1c078bf1 100644 --- a/tools/clusters-checker/cmd/checker/config.toml +++ b/tools/clusters-checker/cmd/checker/config.toml @@ -8,7 +8,6 @@ user = "" password = "" [compare] - #indices = ["rating", "transactions", "blocks", "validators", "miniblocks", "rounds", "accounts", "accountshistory", "receipts", "scresults", "accountsesdt", "accountsesdthistory", "epochinfo", "scdeploys", "tokens", "tags", "logs", "delegators"] interval = [ {start = 1596117600, stop = 1613397600}, # Day 0 --- Day 200 {start = 1613397601, stop = 1630677600}, # Day 200 - Day 400 @@ -16,6 +15,6 @@ {start = 1639317602, stop = 1647957602}, # Day 500 - Day 600 {start = 1647957603, stop = 1667288039}, # Day 600 - Day XXX ] - indices-with-timestamp = [ "receipts", "transactions", "blocks", "miniblocks", "rounds", "accountshistory", "scresults", "accountsesdt", "accountsesdthistory", "scdeploys", "tokens"] + #indices-with-timestamp = [ "receipts", "transactions", "blocks", "miniblocks", "rounds", "accountshistory", "scresults", "accountsesdt", "accountsesdthistory", "scdeploys", "tokens", "accounts"] #indices-with-timestamp = [ "blocks"] - indices-no-timestamp = ["rating", "validators", "epochinfo", "epochinfo", "tags", "logs", "delegators", "accounts"] + indices-no-timestamp = ["rating", "validators", "epochinfo", "tags", "logs", "delegators"] From 604b8c29284805905bca131f4b20dea697c18d36 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Wed, 27 Apr 2022 13:22:49 +0300 Subject: [PATCH 12/69] fix rating index --- tools/clusters-checker/cmd/checker/config.toml | 5 ++--- tools/clusters-checker/pkg/checkers/interface.go | 1 + .../pkg/checkers/process_indices_no_timestamp.go | 12 +++++++++++- tools/clusters-checker/pkg/client/client.go | 3 ++- 4 files changed, 16 insertions(+), 5 deletions(-) diff --git a/tools/clusters-checker/cmd/checker/config.toml b/tools/clusters-checker/cmd/checker/config.toml index 1c078bf1..641aecf1 100644 --- a/tools/clusters-checker/cmd/checker/config.toml +++ b/tools/clusters-checker/cmd/checker/config.toml @@ -15,6 +15,5 @@ {start = 1639317602, stop = 1647957602}, # Day 500 - Day 600 {start = 1647957603, stop = 1667288039}, # Day 600 - Day XXX ] - #indices-with-timestamp = [ "receipts", "transactions", "blocks", "miniblocks", "rounds", "accountshistory", "scresults", "accountsesdt", "accountsesdthistory", "scdeploys", "tokens", "accounts"] - #indices-with-timestamp = [ "blocks"] - indices-no-timestamp = ["rating", "validators", "epochinfo", "tags", "logs", "delegators"] + indices-with-timestamp = [ "receipts", "transactions", "blocks", "miniblocks", "rounds", "accountshistory", "scresults", "accountsesdt", "accountsesdthistory", "scdeploys", "tokens", "accounts", "logs"] + indices-no-timestamp = ["rating", "validators", "epochinfo", "tags", "delegators"] diff --git a/tools/clusters-checker/pkg/checkers/interface.go b/tools/clusters-checker/pkg/checkers/interface.go index 719f45ac..af6de57d 100644 --- a/tools/clusters-checker/pkg/checkers/interface.go +++ b/tools/clusters-checker/pkg/checkers/interface.go @@ -11,6 +11,7 @@ type ESClient interface { index string, body []byte, handlerFunc func(responseBytes []byte) error, + size int, ) error } diff --git a/tools/clusters-checker/pkg/checkers/process_indices_no_timestamp.go b/tools/clusters-checker/pkg/checkers/process_indices_no_timestamp.go index 2f1fdcde..28771100 100644 --- a/tools/clusters-checker/pkg/checkers/process_indices_no_timestamp.go +++ b/tools/clusters-checker/pkg/checkers/process_indices_no_timestamp.go @@ -4,6 +4,11 @@ import ( "encoding/json" ) +const ( + defaultSize = 9000 + sizeRating = 5000 +) + func (cc *clusterChecker) CompareIndicesNoTimestamp() error { for _, index := range cc.indicesNoTimestamp { err := cc.compareIndex(index) @@ -30,7 +35,12 @@ func (cc *clusterChecker) compareIndex(index string) error { return cc.processResponse(index, genericResponse) } - return cc.clientSource.DoScrollRequestAllDocuments(index, getAll(true), handlerFunc) + size := defaultSize + if index == "rating" { + size = sizeRating + } + + return cc.clientSource.DoScrollRequestAllDocuments(index, getAll(true), handlerFunc, size) } func (cc *clusterChecker) processResponse(index string, genericResponse *generalElasticResponse) error { diff --git a/tools/clusters-checker/pkg/client/client.go b/tools/clusters-checker/pkg/client/client.go index 663d45c9..d7aeb5d2 100644 --- a/tools/clusters-checker/pkg/client/client.go +++ b/tools/clusters-checker/pkg/client/client.go @@ -143,9 +143,10 @@ func (esc *esClient) DoScrollRequestAllDocuments( index string, body []byte, handlerFunc func(responseBytes []byte) error, + size int, ) error { res, err := esc.client.Search( - esc.client.Search.WithSize(9000), + esc.client.Search.WithSize(size), esc.client.Search.WithScroll(10*time.Minute+time.Duration(esc.updateAndGetCountScroll())*time.Millisecond), esc.client.Search.WithContext(context.Background()), esc.client.Search.WithIndex(index), From 7040788c8bb3c1149b6999c8befd1f4abb34e274 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Tue, 3 May 2022 13:50:10 +0300 Subject: [PATCH 13/69] integrate partial execution --- go.mod | 2 +- go.sum | 4 +-- process/logsevents/serialize.go | 34 ++++++++++++++---- process/transactions/transactionsGrouper.go | 36 +++++++++++++++++-- process/transactions/transactionsProcessor.go | 6 ++-- 5 files changed, 67 insertions(+), 15 deletions(-) diff --git a/go.mod b/go.mod index 77999405..72fd5d70 100644 --- a/go.mod +++ b/go.mod @@ -3,7 +3,7 @@ module github.com/ElrondNetwork/elastic-indexer-go go 1.17 require ( - github.com/ElrondNetwork/elrond-go-core v1.1.14 + github.com/ElrondNetwork/elrond-go-core v1.1.16-0.20220414130405-e3cc29bc7711 github.com/ElrondNetwork/elrond-go-logger v1.0.5 github.com/ElrondNetwork/elrond-vm-common v1.2.9 github.com/elastic/go-elasticsearch/v7 v7.12.0 diff --git a/go.sum b/go.sum index b7e33794..a52a7bef 100644 --- a/go.sum +++ b/go.sum @@ -1,7 +1,7 @@ github.com/ElrondNetwork/elrond-go-core v1.0.0/go.mod h1:FQMem7fFF4+8pQ6lVsBZq6yO+smD0nV23P4bJpmPjTo= github.com/ElrondNetwork/elrond-go-core v1.1.7/go.mod h1:O9FkkTT2H9kxCzfn40TbhoCDXzGmUrRVusMomhK/Y3g= -github.com/ElrondNetwork/elrond-go-core v1.1.14 h1:JKpeI+1US4FuE8NwN3dqe0HUTYKLQuYKvwbTqhGt334= -github.com/ElrondNetwork/elrond-go-core v1.1.14/go.mod h1:Yz8JK5sGBctw7+gU8j2mZHbzQ09Ek4XHJ4Uinq1N6nM= +github.com/ElrondNetwork/elrond-go-core v1.1.16-0.20220414130405-e3cc29bc7711 h1:pU3ZyHL/gMg/2cN+DxG3tpalVT+iJfKysE6S7GwzB4Y= +github.com/ElrondNetwork/elrond-go-core v1.1.16-0.20220414130405-e3cc29bc7711/go.mod h1:Yz8JK5sGBctw7+gU8j2mZHbzQ09Ek4XHJ4Uinq1N6nM= github.com/ElrondNetwork/elrond-go-logger v1.0.4/go.mod h1:e5D+c97lKUfFdAzFX7rrI2Igl/z4Y0RkKYKWyzprTGk= github.com/ElrondNetwork/elrond-go-logger v1.0.5 h1:tB/HBvV9IVeCaSrGakX+GLGu7K5UPLv8gA0TNKPOTOU= github.com/ElrondNetwork/elrond-go-logger v1.0.5/go.mod h1:cBfgx0ST/CJx8jrxJSC5aiSrvkGzcnF7sK06RD8mFxQ= diff --git a/process/logsevents/serialize.go b/process/logsevents/serialize.go index e158448c..0de8a8ae 100644 --- a/process/logsevents/serialize.go +++ b/process/logsevents/serialize.go @@ -64,12 +64,19 @@ func serializeDeploy(deployInfo *data.ScDeployInfo) ([]byte, error) { return nil, errPrepareU } + codeToExecute := ` + if (!ctx._source.containsKey('upgrades')) { + ctx._source.upgrades = [params.elem]; + } else { + ctx._source.upgrades.add(params.elem); + } +` serializedDataStr := fmt.Sprintf(`{"script": {`+ - `"source": "if (!ctx._source.containsKey('upgrades')) { ctx._source.upgrades = [ params.elem ]; } else { ctx._source.upgrades.add(params.elem); }",`+ + `"source": "%s",`+ `"lang": "painless",`+ `"params": {"elem": %s}},`+ `"upsert": %s}`, - string(upgradeSerialized), string(serializedData)) + converters.FormatPainlessSource(codeToExecute), string(upgradeSerialized), string(serializedData)) return []byte(serializedDataStr), nil } @@ -102,12 +109,19 @@ func serializeToken(tokenData *data.TokenInfo, index string) ([]byte, []byte, er return nil, nil, err } + codeToExecute := ` + if (ctx._source.containsKey('roles')) { + HashMap roles = ctx._source.roles; + ctx._source = params.token; + ctx._source.roles = roles + } +` serializedDataStr := fmt.Sprintf(`{"script": {`+ - `"source": "if (ctx._source.containsKey('roles')) {HashMap roles = ctx._source.roles; ctx._source = params.token; ctx._source.roles = roles}",`+ + `"source": "%s",`+ `"lang": "painless",`+ `"params": {"token": %s}},`+ `"upsert": %s}`, - string(serializedTokenData), string(serializedTokenData)) + converters.FormatPainlessSource(codeToExecute), string(serializedTokenData), string(serializedTokenData)) return meta, []byte(serializedDataStr), nil } @@ -129,12 +143,20 @@ func serializeTokenTransferOwnership(tokenData *data.TokenInfo, index string) ([ return nil, nil, err } + codeToExecute := ` + if (!ctx._source.containsKey('ownersHistory')) { + ctx._source.ownersHistory = [params.elem] + } else { + ctx._source.ownersHistory.add(params.elem) + } + ctx._source.currentOwner = params.owner +` serializedDataStr := fmt.Sprintf(`{"script": {`+ - `"source": "if (!ctx._source.containsKey('ownersHistory')) { ctx._source.ownersHistory = [ params.elem ] } else { ctx._source.ownersHistory.add(params.elem) } ctx._source.currentOwner = params.owner ",`+ + `"source": "%s",`+ `"lang": "painless",`+ `"params": {"elem": %s, "owner": "%s"}},`+ `"upsert": %s}`, - string(ownerDataSerialized), tokenData.CurrentOwner, string(tokenDataSerialized)) + converters.FormatPainlessSource(codeToExecute), string(ownerDataSerialized), tokenData.CurrentOwner, string(tokenDataSerialized)) return meta, []byte(serializedDataStr), nil } diff --git a/process/transactions/transactionsGrouper.go b/process/transactions/transactionsGrouper.go index f7092bc8..e8e48bac 100644 --- a/process/transactions/transactionsGrouper.go +++ b/process/transactions/transactionsGrouper.go @@ -41,6 +41,7 @@ func newTxsGrouper( } func (tg *txsGrouper) groupNormalTxs( + mbIndex int, mb *block.MiniBlock, header coreData.HeaderHandler, txs map[string]coreData.TransactionHandler, @@ -54,7 +55,11 @@ func (tg *txsGrouper) groupNormalTxs( } mbStatus := computeStatus(tg.selfShardID, mb.ReceiverShardID) - for _, txHash := range mb.TxHashes { + for index, txHash := range mb.TxHashes { + if shouldIgnoreNotExecutedTx(mbIndex, index, header) { + continue + } + dbTx, ok := tg.prepareNormalTxForDB(mbHash, mb, mbStatus, txHash, txs, header) if !ok { continue @@ -69,6 +74,21 @@ func (tg *txsGrouper) groupNormalTxs( return transactions, nil } +func shouldIgnoreNotExecutedTx(mbIndex, txIndex int, header coreData.HeaderHandler) bool { + miniblockHeaders := header.GetMiniBlockHeaderHandlers() + if len(miniblockHeaders) <= mbIndex { + return false + } + + firstProcessed := miniblockHeaders[mbIndex].GetIndexOfFirstTxProcessed() + lastProcessed := miniblockHeaders[mbIndex].GetIndexOfLastTxProcessed() + if int32(txIndex) < firstProcessed || int32(txIndex) > lastProcessed { + return true + } + + return false +} + func (tg *txsGrouper) prepareNormalTxForDB( mbHash []byte, mb *block.MiniBlock, @@ -93,6 +113,7 @@ func (tg *txsGrouper) prepareNormalTxForDB( } func (tg *txsGrouper) groupRewardsTxs( + mbIndex int, mb *block.MiniBlock, header coreData.HeaderHandler, txs map[string]coreData.TransactionHandler, @@ -105,7 +126,11 @@ func (tg *txsGrouper) groupRewardsTxs( } mbStatus := computeStatus(tg.selfShardID, mb.ReceiverShardID) - for _, txHash := range mb.TxHashes { + for index, txHash := range mb.TxHashes { + if shouldIgnoreNotExecutedTx(mbIndex, index, header) { + continue + } + rewardDBTx, ok := tg.prepareRewardTxForDB(mbHash, mb, mbStatus, txHash, txs, header) if !ok { continue @@ -144,6 +169,7 @@ func (tg *txsGrouper) prepareRewardTxForDB( } func (tg *txsGrouper) groupInvalidTxs( + mbIndex int, mb *block.MiniBlock, header coreData.HeaderHandler, txs map[string]coreData.TransactionHandler, @@ -155,7 +181,11 @@ func (tg *txsGrouper) groupInvalidTxs( return nil, err } - for _, txHash := range mb.TxHashes { + for index, txHash := range mb.TxHashes { + if shouldIgnoreNotExecutedTx(mbIndex, index, header) { + continue + } + invalidDBTx, ok := tg.prepareInvalidTxForDB(mbHash, mb, txHash, txs, header) if !ok { continue diff --git a/process/transactions/transactionsProcessor.go b/process/transactions/transactionsProcessor.go index 27b3ea41..01c959a5 100644 --- a/process/transactions/transactionsProcessor.go +++ b/process/transactions/transactionsProcessor.go @@ -103,21 +103,21 @@ func (tdp *txsDatabaseProcessor) PrepareTransactionsForDatabase( continue } - txs, errGroup := tdp.txsGrouper.groupNormalTxs(mb, header, pool.Txs, alteredAccounts) + txs, errGroup := tdp.txsGrouper.groupNormalTxs(mbIndex, mb, header, pool.Txs, alteredAccounts) if errGroup != nil { log.Warn("txsDatabaseProcessor.groupNormalTxs", "error", errGroup) continue } mergeTxsMaps(normalTxs, txs) case block.RewardsBlock: - txs, errGroup := tdp.txsGrouper.groupRewardsTxs(mb, header, pool.Rewards, alteredAccounts) + txs, errGroup := tdp.txsGrouper.groupRewardsTxs(mbIndex, mb, header, pool.Rewards, alteredAccounts) if errGroup != nil { log.Warn("txsDatabaseProcessor.groupRewardsTxs", "error", errGroup) continue } mergeTxsMaps(rewardsTxs, txs) case block.InvalidBlock: - txs, errGroup := tdp.txsGrouper.groupInvalidTxs(mb, header, pool.Invalid, alteredAccounts) + txs, errGroup := tdp.txsGrouper.groupInvalidTxs(mbIndex, mb, header, pool.Invalid, alteredAccounts) if errGroup != nil { log.Warn("txsDatabaseProcessor.groupInvalidTxs", "error", errGroup) continue From 61358d399cac849c8e5758827038438a16d2284c Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Tue, 3 May 2022 13:59:33 +0300 Subject: [PATCH 14/69] fix unit tests --- process/logsevents/serialize_test.go | 6 +++--- process/transactions/transactionsGrouper_test.go | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/process/logsevents/serialize_test.go b/process/logsevents/serialize_test.go index a13f4b7c..851b2166 100644 --- a/process/logsevents/serialize_test.go +++ b/process/logsevents/serialize_test.go @@ -57,7 +57,7 @@ func TestLogsAndEventsProcessor_SerializeSCDeploys(t *testing.T) { require.Nil(t, err) expectedRes := `{ "update" : { "_index":"scdeploys", "_id" : "scAddr" } } -{"script": {"source": "if (!ctx._source.containsKey('upgrades')) { ctx._source.upgrades = [ params.elem ]; } else { ctx._source.upgrades.add(params.elem); }","lang": "painless","params": {"elem": {"upgradeTxHash":"hash","upgrader":"creator","timestamp":123}}},"upsert": {"deployTxHash":"hash","deployer":"creator","timestamp":123,"upgrades":[]}} +{"script": {"source": "if (!ctx._source.containsKey('upgrades')) {ctx._source.upgrades = [params.elem];} else {ctx._source.upgrades.add(params.elem);}","lang": "painless","params": {"elem": {"upgradeTxHash":"hash","upgrader":"creator","timestamp":123}}},"upsert": {"deployTxHash":"hash","deployer":"creator","timestamp":123,"upgrades":[]}} ` require.Equal(t, expectedRes, buffSlice.Buffers()[0].String()) } @@ -104,9 +104,9 @@ func TestSerializeTokens(t *testing.T) { require.Equal(t, 1, len(buffSlice.Buffers())) expectedRes := `{ "update" : { "_index":"tokens", "_id" : "TKN-01234" } } -{"script": {"source": "if (ctx._source.containsKey('roles')) {HashMap roles = ctx._source.roles; ctx._source = params.token; ctx._source.roles = roles}","lang": "painless","params": {"token": {"name":"TokenName","ticker":"TKN","token":"TKN-01234","issuer":"erd123","currentOwner":"erd123","type":"SemiFungibleESDT","timestamp":50000,"ownersHistory":[{"address":"erd123","timestamp":50000}]}}},"upsert": {"name":"TokenName","ticker":"TKN","token":"TKN-01234","issuer":"erd123","currentOwner":"erd123","type":"SemiFungibleESDT","timestamp":50000,"ownersHistory":[{"address":"erd123","timestamp":50000}]}} +{"script": {"source": "if (ctx._source.containsKey('roles')) {HashMap roles = ctx._source.roles;ctx._source = params.token;ctx._source.roles = roles}","lang": "painless","params": {"token": {"name":"TokenName","ticker":"TKN","token":"TKN-01234","issuer":"erd123","currentOwner":"erd123","type":"SemiFungibleESDT","timestamp":50000,"ownersHistory":[{"address":"erd123","timestamp":50000}]}}},"upsert": {"name":"TokenName","ticker":"TKN","token":"TKN-01234","issuer":"erd123","currentOwner":"erd123","type":"SemiFungibleESDT","timestamp":50000,"ownersHistory":[{"address":"erd123","timestamp":50000}]}} { "update" : { "_index":"tokens", "_id" : "TKN2-51234" } } -{"script": {"source": "if (!ctx._source.containsKey('ownersHistory')) { ctx._source.ownersHistory = [ params.elem ] } else { ctx._source.ownersHistory.add(params.elem) } ctx._source.currentOwner = params.owner ","lang": "painless","params": {"elem": {"address":"abde123456","timestamp":60000}, "owner": "abde123456"}},"upsert": {"name":"Token2","ticker":"TKN2","token":"TKN2-51234","issuer":"erd1231213123","currentOwner":"abde123456","type":"NonFungibleESDT","timestamp":60000,"ownersHistory":[{"address":"abde123456","timestamp":60000}]}} +{"script": {"source": "if (!ctx._source.containsKey('ownersHistory')) {ctx._source.ownersHistory = [params.elem]} else {ctx._source.ownersHistory.add(params.elem)}ctx._source.currentOwner = params.owner","lang": "painless","params": {"elem": {"address":"abde123456","timestamp":60000}, "owner": "abde123456"}},"upsert": {"name":"Token2","ticker":"TKN2","token":"TKN2-51234","issuer":"erd1231213123","currentOwner":"abde123456","type":"NonFungibleESDT","timestamp":60000,"ownersHistory":[{"address":"abde123456","timestamp":60000}]}} ` require.Equal(t, expectedRes, buffSlice.Buffers()[0].String()) } diff --git a/process/transactions/transactionsGrouper_test.go b/process/transactions/transactionsGrouper_test.go index 20f5b329..64f38199 100644 --- a/process/transactions/transactionsGrouper_test.go +++ b/process/transactions/transactionsGrouper_test.go @@ -39,7 +39,7 @@ func TestGroupNormalTxs(t *testing.T) { } alteredAddresses := data.NewAlteredAccounts() - normalTxs, _ := grouper.groupNormalTxs(mb, header, txs, alteredAddresses) + normalTxs, _ := grouper.groupNormalTxs(0, mb, header, txs, alteredAddresses) require.Len(t, normalTxs, 2) require.Equal(t, 4, alteredAddresses.Len()) } @@ -68,7 +68,7 @@ func TestGroupRewardsTxs(t *testing.T) { } alteredAddresses := data.NewAlteredAccounts() - normalTxs, _ := grouper.groupRewardsTxs(mb, header, txs, alteredAddresses) + normalTxs, _ := grouper.groupRewardsTxs(0, mb, header, txs, alteredAddresses) require.Len(t, normalTxs, 2) require.Equal(t, 2, alteredAddresses.Len()) } @@ -99,7 +99,7 @@ func TestGroupInvalidTxs(t *testing.T) { } alteredAddresses := data.NewAlteredAccounts() - normalTxs, _ := grouper.groupInvalidTxs(mb, header, txs, alteredAddresses) + normalTxs, _ := grouper.groupInvalidTxs(0, mb, header, txs, alteredAddresses) require.Len(t, normalTxs, 2) require.Equal(t, 2, alteredAddresses.Len()) } From 1e8de9f41c7a11df1d2a98dd46f66504a75ab8cb Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Mon, 30 May 2022 11:15:15 +0300 Subject: [PATCH 15/69] optimeze --- process/transactions/transactionsGrouper.go | 37 ++++++++++----------- 1 file changed, 17 insertions(+), 20 deletions(-) diff --git a/process/transactions/transactionsGrouper.go b/process/transactions/transactionsGrouper.go index e8e48bac..f8968586 100644 --- a/process/transactions/transactionsGrouper.go +++ b/process/transactions/transactionsGrouper.go @@ -54,12 +54,9 @@ func (tg *txsGrouper) groupNormalTxs( return nil, err } + executedTxHashes := extractExecutedTxHashes(mbIndex, mb.TxHashes, header) mbStatus := computeStatus(tg.selfShardID, mb.ReceiverShardID) - for index, txHash := range mb.TxHashes { - if shouldIgnoreNotExecutedTx(mbIndex, index, header) { - continue - } - + for _, txHash := range executedTxHashes { dbTx, ok := tg.prepareNormalTxForDB(mbHash, mb, mbStatus, txHash, txs, header) if !ok { continue @@ -74,19 +71,25 @@ func (tg *txsGrouper) groupNormalTxs( return transactions, nil } -func shouldIgnoreNotExecutedTx(mbIndex, txIndex int, header coreData.HeaderHandler) bool { +func extractExecutedTxHashes(mbIndex int, mbTxHashes [][]byte, header coreData.HeaderHandler) [][]byte { miniblockHeaders := header.GetMiniBlockHeaderHandlers() if len(miniblockHeaders) <= mbIndex { - return false + return mbTxHashes } firstProcessed := miniblockHeaders[mbIndex].GetIndexOfFirstTxProcessed() lastProcessed := miniblockHeaders[mbIndex].GetIndexOfLastTxProcessed() - if int32(txIndex) < firstProcessed || int32(txIndex) > lastProcessed { - return true + + executedTxHashes := make([][]byte, 0) + for txIndex, txHash := range mbTxHashes { + if int32(txIndex) < firstProcessed || int32(txIndex) > lastProcessed { + continue + } + + executedTxHashes = append(executedTxHashes, txHash) } - return false + return executedTxHashes } func (tg *txsGrouper) prepareNormalTxForDB( @@ -126,11 +129,8 @@ func (tg *txsGrouper) groupRewardsTxs( } mbStatus := computeStatus(tg.selfShardID, mb.ReceiverShardID) - for index, txHash := range mb.TxHashes { - if shouldIgnoreNotExecutedTx(mbIndex, index, header) { - continue - } - + executedTxHashes := extractExecutedTxHashes(mbIndex, mb.TxHashes, header) + for _, txHash := range executedTxHashes { rewardDBTx, ok := tg.prepareRewardTxForDB(mbHash, mb, mbStatus, txHash, txs, header) if !ok { continue @@ -181,11 +181,8 @@ func (tg *txsGrouper) groupInvalidTxs( return nil, err } - for index, txHash := range mb.TxHashes { - if shouldIgnoreNotExecutedTx(mbIndex, index, header) { - continue - } - + executedTxHashes := extractExecutedTxHashes(mbIndex, mb.TxHashes, header) + for _, txHash := range executedTxHashes { invalidDBTx, ok := tg.prepareInvalidTxForDB(mbHash, mb, txHash, txs, header) if !ok { continue From 028b006368b15663fa0646201d85cd5d4366bad1 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Mon, 6 Jun 2022 17:04:36 +0300 Subject: [PATCH 16/69] extend github action --- .github/workflows/pr-build.yml | 4 ++-- .github/workflows/pr-integration-tests.yml | 4 ++-- .github/workflows/pr-tests.yml | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/pr-build.yml b/.github/workflows/pr-build.yml index 4c816dab..febc9c77 100644 --- a/.github/workflows/pr-build.yml +++ b/.github/workflows/pr-build.yml @@ -3,9 +3,9 @@ name: Go build on: push: - branches: [ master, feat/* ] + branches: [ master, development, feat/* ] pull_request: - branches: [ master, feat/* ] + branches: [ master, development, feat/* ] jobs: build: diff --git a/.github/workflows/pr-integration-tests.yml b/.github/workflows/pr-integration-tests.yml index 4658d8e2..c2a60703 100644 --- a/.github/workflows/pr-integration-tests.yml +++ b/.github/workflows/pr-integration-tests.yml @@ -2,9 +2,9 @@ name: Integration tests on: push: - branches: [ master, feat/* ] + branches: [ master, development, feat/* ] pull_request: - branches: [ master, feat/* ] + branches: [ master, development, feat/* ] jobs: test-1: diff --git a/.github/workflows/pr-tests.yml b/.github/workflows/pr-tests.yml index bdd749be..1cda49d1 100644 --- a/.github/workflows/pr-tests.yml +++ b/.github/workflows/pr-tests.yml @@ -2,9 +2,9 @@ name: Tests on: push: - branches: [ master, feat/* ] + branches: [ master, development, feat/* ] pull_request: - branches: [ master, feat/* ] + branches: [ master, development, feat/* ] jobs: test: From 6cea394bf1ec4a2863a8d855ba32a064d26b8cdf Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Mon, 6 Jun 2022 17:18:04 +0300 Subject: [PATCH 17/69] fixes after review --- tools/clusters-checker/cmd/checker/main.go | 5 +++++ tools/clusters-checker/pkg/checkers/count.go | 2 +- tools/clusters-checker/pkg/checkers/factory.go | 1 + 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/tools/clusters-checker/cmd/checker/main.go b/tools/clusters-checker/cmd/checker/main.go index c5802f64..c70fa6ab 100644 --- a/tools/clusters-checker/cmd/checker/main.go +++ b/tools/clusters-checker/cmd/checker/main.go @@ -87,6 +87,7 @@ func checkClusters(ctx *cli.Context) { cfg, err := loadConfigFile(cfgPath) if err != nil { log.Error("cannot load config file", "error", err.Error()) + return } checkCountsFlag := ctx.Bool(checkCounts.Name) @@ -94,11 +95,13 @@ func checkClusters(ctx *cli.Context) { clusterChecker, errC := checkers.CreateClusterChecker(cfg, 0, "instance_0") if errC != nil { log.Error("cannot create cluster checker", "error", errC.Error()) + return } errC = clusterChecker.CompareCounts() if errC != nil { log.Error("cannot check counts", "error", errC.Error()) + return } return @@ -109,11 +112,13 @@ func checkClusters(ctx *cli.Context) { clusterChecker, errC := checkers.CreateClusterChecker(cfg, 0, "instance_0") if errC != nil { log.Error("cannot create cluster checker", "error", errC.Error()) + return } errC = clusterChecker.CompareIndicesNoTimestamp() if errC != nil { log.Error("cannot check indices", "error", errC.Error()) + return } return diff --git a/tools/clusters-checker/pkg/checkers/count.go b/tools/clusters-checker/pkg/checkers/count.go index 0df7a44b..8e8362f0 100644 --- a/tools/clusters-checker/pkg/checkers/count.go +++ b/tools/clusters-checker/pkg/checkers/count.go @@ -34,7 +34,7 @@ func (cc *clusterChecker) compareCount(index string) error { difference := int64(countSourceCluster) - int64(countDestinationCluster) if difference == 0 { - log.Info(cc.logPrefix+": number of documents are the same", "index", index, + log.Info(cc.logPrefix+": number of documents is the same", "index", index, "source cluster", countSourceCluster, "destination cluster", countDestinationCluster, ) diff --git a/tools/clusters-checker/pkg/checkers/factory.go b/tools/clusters-checker/pkg/checkers/factory.go index 190dee6f..951b8d1b 100644 --- a/tools/clusters-checker/pkg/checkers/factory.go +++ b/tools/clusters-checker/pkg/checkers/factory.go @@ -10,6 +10,7 @@ import ( "github.com/elastic/go-elasticsearch/v7" ) +// CreateClusterChecker will create a new instance of clusterChecker structure func CreateClusterChecker(cfg *config.Config, timestampIndex int, logPrefix string) (*clusterChecker, error) { clientSource, err := client.NewElasticClient(elasticsearch.Config{ Addresses: []string{cfg.SourceCluster.URL}, From db85cef5d791637e95b2a62604aae7361aba555c Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Wed, 15 Jun 2022 17:32:38 +0300 Subject: [PATCH 18/69] fix --- tools/clusters-checker/pkg/client/count.go | 3 --- 1 file changed, 3 deletions(-) diff --git a/tools/clusters-checker/pkg/client/count.go b/tools/clusters-checker/pkg/client/count.go index b2435c95..eb4e0684 100644 --- a/tools/clusters-checker/pkg/client/count.go +++ b/tools/clusters-checker/pkg/client/count.go @@ -15,9 +15,6 @@ func (esc *esClient) DoCountRequest(index string, body []byte) (uint64, error) { if err != nil { return 0, err } - if err != nil { - return 0, err - } bodyBytes, err := getBytesFromResponse(res) if err != nil { From 97e7b4505e9441dc6a78c15db7dc1c2d3d2b0677 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Thu, 16 Jun 2022 14:04:42 +0300 Subject: [PATCH 19/69] add operations --- tools/clusters-checker/cmd/checker/config.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/clusters-checker/cmd/checker/config.toml b/tools/clusters-checker/cmd/checker/config.toml index 641aecf1..824793d3 100644 --- a/tools/clusters-checker/cmd/checker/config.toml +++ b/tools/clusters-checker/cmd/checker/config.toml @@ -15,5 +15,5 @@ {start = 1639317602, stop = 1647957602}, # Day 500 - Day 600 {start = 1647957603, stop = 1667288039}, # Day 600 - Day XXX ] - indices-with-timestamp = [ "receipts", "transactions", "blocks", "miniblocks", "rounds", "accountshistory", "scresults", "accountsesdt", "accountsesdthistory", "scdeploys", "tokens", "accounts", "logs"] + indices-with-timestamp = [ "receipts", "transactions", "blocks", "miniblocks", "rounds", "accountshistory", "scresults", "accountsesdt", "accountsesdthistory", "scdeploys", "tokens", "accounts", "logs", "operations"] indices-no-timestamp = ["rating", "validators", "epochinfo", "tags", "delegators"] From 3db265a14c650b201dc5a3cff429bfbd9f072cfd Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Thu, 23 Jun 2022 12:36:40 +0300 Subject: [PATCH 20/69] fix nft processor --- process/logsevents/nftsProcessor.go | 3 ++- process/logsevents/nftsProcessor_test.go | 32 ++++++++++++++++++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/process/logsevents/nftsProcessor.go b/process/logsevents/nftsProcessor.go index 766a84b8..e0b512b9 100644 --- a/process/logsevents/nftsProcessor.go +++ b/process/logsevents/nftsProcessor.go @@ -38,6 +38,7 @@ func newNFTsProcessor( core.BuiltInFunctionESDTNFTAddQuantity: {}, core.BuiltInFunctionESDTNFTCreate: {}, core.BuiltInFunctionMultiESDTNFTTransfer: {}, + core.BuiltInFunctionESDTWipe: {}, }, } } @@ -134,7 +135,7 @@ func (np *nftsProcessor) processNFTEventOnSender( bech32Addr := np.pubKeyConverter.Encode(sender) eventIdentifier := string(event.GetIdentifier()) - if eventIdentifier == core.BuiltInFunctionESDTNFTBurn { + if eventIdentifier == core.BuiltInFunctionESDTNFTBurn || eventIdentifier == core.BuiltInFunctionESDTWipe { tokensSupply.Add(&data.TokenInfo{ Token: token, Identifier: converters.ComputeTokenIdentifier(token, nonceBig.Uint64()), diff --git a/process/logsevents/nftsProcessor_test.go b/process/logsevents/nftsProcessor_test.go index 822ee43e..224a648b 100644 --- a/process/logsevents/nftsProcessor_test.go +++ b/process/logsevents/nftsProcessor_test.go @@ -107,3 +107,35 @@ func TestNftsProcessor_processLogAndEventsNFTs_TransferNFT(t *testing.T) { NFTNonce: 19, }, alteredAddrReceiver[0]) } + +func TestNftsProcessor_processLogAndEventsNFTs_Wipe(t *testing.T) { + t.Parallel() + + nonce := uint64(20) + nftsProc := newNFTsProcessor(&mock.ShardCoordinatorMock{}, &mock.PubkeyConverterMock{}, &mock.MarshalizerMock{}) + + events := &transaction.Event{ + Address: []byte("addr"), + Identifier: []byte(core.BuiltInFunctionESDTWipe), + Topics: [][]byte{[]byte("nft-0123"), big.NewInt(0).SetUint64(nonce).Bytes(), big.NewInt(1).Bytes(), []byte("receiver")}, + } + + altered := data.NewAlteredAccounts() + + res := nftsProc.processEvent(&argsProcessEvent{ + event: events, + accounts: altered, + timestamp: 10000, + }) + require.Equal(t, "nft-0123-14", res.identifier) + require.Equal(t, "1", res.value) + require.Equal(t, true, res.processed) + + alteredAddrSender, ok := altered.Get("61646472") + require.True(t, ok) + require.Equal(t, &data.AlteredAccount{ + IsNFTOperation: true, + TokenIdentifier: "nft-0123", + NFTNonce: 20, + }, alteredAddrSender[0]) +} From a73c8e96322ad10daaa36240f0799fdbd3a40f42 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Thu, 23 Jun 2022 12:36:53 +0300 Subject: [PATCH 21/69] collections index initial impl --- constants.go | 2 ++ process/elasticProcessor.go | 1 + process/templatesAndPolicies/noKibana.go | 1 + process/templatesAndPolicies/withKibana.go | 1 + templates/noKibana/collections.go | 16 ++++++++++++++++ templates/withKibana/collections.go | 16 ++++++++++++++++ 6 files changed, 37 insertions(+) create mode 100644 templates/noKibana/collections.go create mode 100644 templates/withKibana/collections.go diff --git a/constants.go b/constants.go index e3b8c40e..3b41d7da 100644 --- a/constants.go +++ b/constants.go @@ -43,6 +43,8 @@ const ( DelegatorsIndex = "delegators" // OperationsIndex is the Elasticsearch index for transactions and smart contract results OperationsIndex = "operations" + // CollectionsIndex is the Elasticsearch index for collections + CollectionsIndex = "collections" // TransactionsPolicy is the Elasticsearch policy for the transactions TransactionsPolicy = "transactions_policy" diff --git a/process/elasticProcessor.go b/process/elasticProcessor.go index 48bdfb5b..7ce3a4b0 100644 --- a/process/elasticProcessor.go +++ b/process/elasticProcessor.go @@ -32,6 +32,7 @@ var ( elasticIndexer.TransactionsIndex, elasticIndexer.BlockIndex, elasticIndexer.MiniblocksIndex, elasticIndexer.RatingIndex, elasticIndexer.RoundsIndex, elasticIndexer.ValidatorsIndex, elasticIndexer.AccountsIndex, elasticIndexer.AccountsHistoryIndex, elasticIndexer.ReceiptsIndex, elasticIndexer.ScResultsIndex, elasticIndexer.AccountsESDTHistoryIndex, elasticIndexer.AccountsESDTIndex, elasticIndexer.EpochInfoIndex, elasticIndexer.SCDeploysIndex, elasticIndexer.TokensIndex, elasticIndexer.TagsIndex, elasticIndexer.LogsIndex, elasticIndexer.DelegatorsIndex, elasticIndexer.OperationsIndex, + elasticIndexer.CollectionsIndex, } ) diff --git a/process/templatesAndPolicies/noKibana.go b/process/templatesAndPolicies/noKibana.go index aa0c6f00..ce96e2a9 100644 --- a/process/templatesAndPolicies/noKibana.go +++ b/process/templatesAndPolicies/noKibana.go @@ -39,6 +39,7 @@ func (tr *templatesAndPolicyReaderNoKibana) GetElasticTemplatesAndPolicies() (ma indexTemplates[indexer.LogsIndex] = noKibana.Logs.ToBuffer() indexTemplates[indexer.DelegatorsIndex] = noKibana.Delegators.ToBuffer() indexTemplates[indexer.OperationsIndex] = noKibana.Operations.ToBuffer() + indexTemplates[indexer.CollectionsIndex] = noKibana.Collections.ToBuffer() return indexTemplates, indexPolicies, nil } diff --git a/process/templatesAndPolicies/withKibana.go b/process/templatesAndPolicies/withKibana.go index c2d0bdc0..1976c1bc 100644 --- a/process/templatesAndPolicies/withKibana.go +++ b/process/templatesAndPolicies/withKibana.go @@ -45,6 +45,7 @@ func getTemplatesKibana() map[string]*bytes.Buffer { indexTemplates[indexer.LogsIndex] = withKibana.Logs.ToBuffer() indexTemplates[indexer.DelegatorsIndex] = withKibana.Delegators.ToBuffer() indexTemplates[indexer.OperationsIndex] = withKibana.Operations.ToBuffer() + indexTemplates[indexer.CollectionsIndex] = withKibana.Collections.ToBuffer() return indexTemplates } diff --git a/templates/noKibana/collections.go b/templates/noKibana/collections.go new file mode 100644 index 00000000..f5c0d6df --- /dev/null +++ b/templates/noKibana/collections.go @@ -0,0 +1,16 @@ +package noKibana + +// Collections will hold the configuration for the collections index +var Collections = Object{ + "index_patterns": Array{ + "collections-*", + }, + "settings": Object{ + "number_of_shards": 3, + "number_of_replicas": 0, + }, + + "mappings": Object{ + "dynamic": false, + }, +} diff --git a/templates/withKibana/collections.go b/templates/withKibana/collections.go new file mode 100644 index 00000000..02346cf6 --- /dev/null +++ b/templates/withKibana/collections.go @@ -0,0 +1,16 @@ +package withKibana + +// Collections will hold the configuration for the collections index +var Collections = Object{ + "index_patterns": Array{ + "collections-*", + }, + "settings": Object{ + "number_of_shards": 3, + "number_of_replicas": 0, + }, + + "mappings": Object{ + "dynamic": false, + }, +} From c7aa4b1d927968c0ca27fba7498e468031b537d3 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Thu, 23 Jun 2022 12:38:46 +0300 Subject: [PATCH 22/69] collections --- .../config/noKibana/collections.json | 13 +++++++++++++ .../config/withKibana/collections.json | 13 +++++++++++++ 2 files changed, 26 insertions(+) create mode 100644 tools/indices-creator/cmd/indices-creator/config/noKibana/collections.json create mode 100644 tools/indices-creator/cmd/indices-creator/config/withKibana/collections.json diff --git a/tools/indices-creator/cmd/indices-creator/config/noKibana/collections.json b/tools/indices-creator/cmd/indices-creator/config/noKibana/collections.json new file mode 100644 index 00000000..0c4e0573 --- /dev/null +++ b/tools/indices-creator/cmd/indices-creator/config/noKibana/collections.json @@ -0,0 +1,13 @@ +{ + "index_patterns": [ + "collections-*" + ], + "settings": { + "number_of_shards": 3, + "number_of_replicas": 0 + }, + + "mappings": { + "dynamic": false + } +} \ No newline at end of file diff --git a/tools/indices-creator/cmd/indices-creator/config/withKibana/collections.json b/tools/indices-creator/cmd/indices-creator/config/withKibana/collections.json new file mode 100644 index 00000000..0c4e0573 --- /dev/null +++ b/tools/indices-creator/cmd/indices-creator/config/withKibana/collections.json @@ -0,0 +1,13 @@ +{ + "index_patterns": [ + "collections-*" + ], + "settings": { + "number_of_shards": 3, + "number_of_replicas": 0 + }, + + "mappings": { + "dynamic": false + } +} \ No newline at end of file From 5de1320001c4153aefdc666fa653f408de2a26ea Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Thu, 23 Jun 2022 12:39:55 +0300 Subject: [PATCH 23/69] empty --- .../cmd/indices-creator/config/noKibana/collections.json | 2 +- .../cmd/indices-creator/config/withKibana/collections.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/indices-creator/cmd/indices-creator/config/noKibana/collections.json b/tools/indices-creator/cmd/indices-creator/config/noKibana/collections.json index 0c4e0573..fee59d37 100644 --- a/tools/indices-creator/cmd/indices-creator/config/noKibana/collections.json +++ b/tools/indices-creator/cmd/indices-creator/config/noKibana/collections.json @@ -10,4 +10,4 @@ "mappings": { "dynamic": false } -} \ No newline at end of file +} diff --git a/tools/indices-creator/cmd/indices-creator/config/withKibana/collections.json b/tools/indices-creator/cmd/indices-creator/config/withKibana/collections.json index 0c4e0573..fee59d37 100644 --- a/tools/indices-creator/cmd/indices-creator/config/withKibana/collections.json +++ b/tools/indices-creator/cmd/indices-creator/config/withKibana/collections.json @@ -10,4 +10,4 @@ "mappings": { "dynamic": false } -} \ No newline at end of file +} From 97fd2297204e7020f8b961df8686d693034a014f Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Thu, 23 Jun 2022 17:17:51 +0300 Subject: [PATCH 24/69] implement serialize for collections --- integrationtests/utils.go | 7 ++-- process/accounts/serialize.go | 45 ++++++++++++++++++++---- process/collections/serialize.go | 60 ++++++++++++++++++++++++++++++++ process/elasticProcessor.go | 6 ++++ 4 files changed, 109 insertions(+), 9 deletions(-) create mode 100644 process/collections/serialize.go diff --git a/integrationtests/utils.go b/integrationtests/utils.go index f6710cb6..af37ca3b 100644 --- a/integrationtests/utils.go +++ b/integrationtests/utils.go @@ -47,9 +47,10 @@ func CreateElasticProcessor( AccountsDB: accountsDB, ShardCoordinator: shardCoordinator, TransactionFeeCalculator: feeProcessor, - EnabledIndexes: []string{indexer.TransactionsIndex, indexer.LogsIndex, indexer.AccountsESDTIndex, indexer.ScResultsIndex, indexer.ReceiptsIndex, indexer.BlockIndex, indexer.AccountsIndex, indexer.TokensIndex, indexer.TagsIndex}, - Denomination: 18, - IsInImportDBMode: false, + EnabledIndexes: []string{indexer.TransactionsIndex, indexer.LogsIndex, indexer.AccountsESDTIndex, indexer.ScResultsIndex, + indexer.ReceiptsIndex, indexer.BlockIndex, indexer.AccountsIndex, indexer.TokensIndex, indexer.TagsIndex, indexer.CollectionsIndex}, + Denomination: 18, + IsInImportDBMode: false, } return factory.CreateElasticProcessor(args) diff --git a/process/accounts/serialize.go b/process/accounts/serialize.go index e8d0c272..e9065061 100644 --- a/process/accounts/serialize.go +++ b/process/accounts/serialize.go @@ -88,12 +88,25 @@ func prepareDeleteAccountInfo(acct *data.AccountInfo, isESDT bool, index string) meta := []byte(fmt.Sprintf(`{ "update" : {"_index":"%s", "_id" : "%s" } }%s`, index, id, "\n")) + codeToExecute := ` + if (ctx.op == 'create') { + ctx.op = 'noop' + } else { + if (ctx._source.containsKey('timestamp')) { + if (ctx._source.timestamp <= params.timestamp) { + ctx.op = 'delete' + } + } else { + ctx.op = 'delete' + } + } +` serializedDataStr := fmt.Sprintf(`{"scripted_upsert": true, "script": {`+ - `"source": "if ( ctx.op == 'create' ) { ctx.op = 'noop' } else { if (ctx._source.containsKey('timestamp')) { if (ctx._source.timestamp <= params.timestamp ) { ctx.op = 'delete' } } else { ctx.op = 'delete' } }",`+ + `"source": "%s",`+ `"lang": "painless",`+ `"params": {"timestamp": %d}},`+ `"upsert": {}}`, - acct.Timestamp, + converters.FormatPainlessSource(codeToExecute), acct.Timestamp, ) return meta, []byte(serializedDataStr) @@ -116,12 +129,25 @@ func prepareSerializedAccountInfo( } meta := []byte(fmt.Sprintf(`{ "update" : {"_index": "%s", "_id" : "%s" } }%s`, index, id, "\n")) + codeToExecute := ` + if (ctx.op == 'create') { + ctx._source = params.account + } else { + if (ctx._source.containsKey('timestamp')) { + if (ctx._source.timestamp <= params.account.timestamp) { + ctx._source = params.account + } + } else { + ctx._source = params.account + } + } +` serializedDataStr := fmt.Sprintf(`{"scripted_upsert": true, "script": {`+ - `"source": "if ( ctx.op == 'create' ) { ctx._source = params.account } else { if (ctx._source.containsKey('timestamp')) { if (ctx._source.timestamp <= params.account.timestamp ) { ctx._source = params.account } } else { ctx._source = params.account } }",`+ + `"source": "%s",`+ `"lang": "painless",`+ `"params": { "account": %s }},`+ `"upsert": {}}`, - serializedAccount, + converters.FormatPainlessSource(codeToExecute), serializedAccount, ) return meta, []byte(serializedDataStr), nil @@ -183,12 +209,19 @@ func (ap *accountsProcessor) SerializeTypeForProvidedIDs( for _, id := range ids { meta := []byte(fmt.Sprintf(`{ "update" : {"_index":"%s", "_id" : "%s" } }%s`, index, id, "\n")) + codeToExecute := ` + if (ctx.op == 'create') { + ctx.op = 'noop' + } else { + ctx._source.type = params.type + } +` serializedDataStr := fmt.Sprintf(`{"scripted_upsert": true, "script": {`+ - `"source": "if ( ctx.op == 'create' ) { ctx.op = 'noop' } else { ctx._source.type = params.type }",`+ + `"source": "%s",`+ `"lang": "painless",`+ `"params": {"type": "%s"}},`+ `"upsert": {}}`, - tokenType) + converters.FormatPainlessSource(codeToExecute), tokenType) err := buffSlice.PutData(meta, []byte(serializedDataStr)) if err != nil { diff --git a/process/collections/serialize.go b/process/collections/serialize.go new file mode 100644 index 00000000..8151f09e --- /dev/null +++ b/process/collections/serialize.go @@ -0,0 +1,60 @@ +package collections + +import ( + "encoding/hex" + "fmt" + "math/big" + + "github.com/ElrondNetwork/elastic-indexer-go/converters" + "github.com/ElrondNetwork/elastic-indexer-go/data" + "github.com/ElrondNetwork/elrond-go-core/core" +) + +// ExtractAndSerializeCollectionsData will extra the accounts with NFT/SFT and serialize +func ExtractAndSerializeCollectionsData( + accountsESDT map[string]*data.AccountInfo, + buffSlice *data.BufferSlice, + index string, +) error { + for _, acct := range accountsESDT { + shouldIgnore := acct.Type != core.NonFungibleESDT && acct.Type != core.SemiFungibleESDT + if shouldIgnore { + continue + } + + nonceBig := big.NewInt(0).SetUint64(acct.TokenNonce) + hexEncodedNonce := hex.EncodeToString(nonceBig.Bytes()) + + meta := []byte(fmt.Sprintf(`{ "update" : {"_index":"%s", "_id" : "%s" } }%s`, index, acct.Address, "\n")) + codeToExecute := ` + if (params.value != '0') { + if (!ctx._source.containsKey(params.col)) { + ctx._source[params.col] = new HashMap(); + } + ctx._source[params.col][params.nonce] = params.value + } else { + if (ctx._source.containsKey(params.col)) { + ctx._source[params.col].delete(params.nonce); + if (ctx._source[params.col].length == 0) { + ctx._source.delete(params.col) + } + } + } +` + + collection := fmt.Sprintf(`{"%s":{"%s": "%s"}}`, acct.TokenName, hexEncodedNonce, acct.Balance) + serializedDataStr := fmt.Sprintf(`{"script": {`+ + `"source": "%s",`+ + `"lang": "painless",`+ + `"params": { "col": "%s", "nonce": "%s", "value": "%s"}},`+ + `"upsert": %s}`, + converters.FormatPainlessSource(codeToExecute), acct.TokenName, hexEncodedNonce, acct.Balance, collection) + + err := buffSlice.PutData(meta, []byte(serializedDataStr)) + if err != nil { + return err + } + } + + return nil +} diff --git a/process/elasticProcessor.go b/process/elasticProcessor.go index 7ce3a4b0..8f6204f4 100644 --- a/process/elasticProcessor.go +++ b/process/elasticProcessor.go @@ -7,6 +7,7 @@ import ( elasticIndexer "github.com/ElrondNetwork/elastic-indexer-go" "github.com/ElrondNetwork/elastic-indexer-go/data" + "github.com/ElrondNetwork/elastic-indexer-go/process/collections" "github.com/ElrondNetwork/elastic-indexer-go/process/tags" "github.com/ElrondNetwork/elastic-indexer-go/process/tokeninfo" "github.com/ElrondNetwork/elrond-go-core/core" @@ -599,6 +600,11 @@ func (ei *elasticProcessor) saveAccountsESDT( return err } + err = collections.ExtractAndSerializeCollectionsData(accountsESDTMap, buffSlice, elasticIndexer.CollectionsIndex) + if err != nil { + return err + } + err = ei.indexAccountsESDT(accountsESDTMap, updatesNFTsData, buffSlice) if err != nil { return err From 2b8ce9df5dce1f81c06e788b16475db97cbe0080 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Thu, 23 Jun 2022 17:29:08 +0300 Subject: [PATCH 25/69] fix unit tests --- process/accounts/serialize_test.go | 10 +++++----- process/logsevents/nftsProcessor_test.go | 7 ++++--- process/templatesAndPolicies/noKibana_test.go | 2 +- process/templatesAndPolicies/withKibana_test.go | 2 +- 4 files changed, 11 insertions(+), 10 deletions(-) diff --git a/process/accounts/serialize_test.go b/process/accounts/serialize_test.go index dc8f4c0a..ef3aea7b 100644 --- a/process/accounts/serialize_test.go +++ b/process/accounts/serialize_test.go @@ -56,7 +56,7 @@ func TestSerializeAccounts(t *testing.T) { require.Equal(t, 1, len(buffSlice.Buffers())) expectedRes := `{ "update" : {"_index": "accounts", "_id" : "addr1" } } -{"scripted_upsert": true, "script": {"source": "if ( ctx.op == 'create' ) { ctx._source = params.account } else { if (ctx._source.containsKey('timestamp')) { if (ctx._source.timestamp <= params.account.timestamp ) { ctx._source = params.account } } else { ctx._source = params.account } }","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"50","balanceNum":0.1,"totalBalanceWithStake":"50","totalBalanceWithStakeNum":0.1} }},"upsert": {}} +{"scripted_upsert": true, "script": {"source": "if (ctx.op == 'create') {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"50","balanceNum":0.1,"totalBalanceWithStake":"50","totalBalanceWithStakeNum":0.1} }},"upsert": {}} ` require.Equal(t, expectedRes, buffSlice.Buffers()[0].String()) } @@ -83,7 +83,7 @@ func TestSerializeAccountsESDTNonceZero(t *testing.T) { require.Equal(t, 1, len(buffSlice.Buffers())) expectedRes := `{ "update" : {"_index": "accountsesdt", "_id" : "addr1-token-abcd-00" } } -{"scripted_upsert": true, "script": {"source": "if ( ctx.op == 'create' ) { ctx._source = params.account } else { if (ctx._source.containsKey('timestamp')) { if (ctx._source.timestamp <= params.account.timestamp ) { ctx._source = params.account } } else { ctx._source = params.account } }","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"10000000000000","balanceNum":1,"token":"token-abcd","properties":"000","timestamp":123} }},"upsert": {}} +{"scripted_upsert": true, "script": {"source": "if (ctx.op == 'create') {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"10000000000000","balanceNum":1,"token":"token-abcd","properties":"000","timestamp":123} }},"upsert": {}} ` require.Equal(t, expectedRes, buffSlice.Buffers()[0].String()) } @@ -109,7 +109,7 @@ func TestSerializeAccountsESDT(t *testing.T) { require.Equal(t, 1, len(buffSlice.Buffers())) expectedRes := `{ "update" : {"_index": "accountsesdt", "_id" : "addr1-token-0001-05" } } -{"scripted_upsert": true, "script": {"source": "if ( ctx.op == 'create' ) { ctx._source = params.account } else { if (ctx._source.containsKey('timestamp')) { if (ctx._source.timestamp <= params.account.timestamp ) { ctx._source = params.account } } else { ctx._source = params.account } }","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"10000000000000","balanceNum":1,"token":"token-0001","tokenNonce":5,"properties":"000"} }},"upsert": {}} +{"scripted_upsert": true, "script": {"source": "if (ctx.op == 'create') {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"10000000000000","balanceNum":1,"token":"token-0001","tokenNonce":5,"properties":"000"} }},"upsert": {}} ` require.Equal(t, expectedRes, buffSlice.Buffers()[0].String()) } @@ -149,7 +149,7 @@ func TestSerializeAccountsNFTWithMedaData(t *testing.T) { require.Equal(t, 1, len(buffSlice.Buffers())) expectedRes := `{ "update" : {"_index": "accountsesdt", "_id" : "addr1-token-0001-16" } } -{"scripted_upsert": true, "script": {"source": "if ( ctx.op == 'create' ) { ctx._source = params.account } else { if (ctx._source.containsKey('timestamp')) { if (ctx._source.timestamp <= params.account.timestamp ) { ctx._source = params.account } } else { ctx._source = params.account } }","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"10000000000000","balanceNum":1,"token":"token-0001","identifier":"token-0001-5","tokenNonce":22,"properties":"000","data":{"name":"nft","creator":"010101","royalties":1,"hash":"aGFzaA==","uris":["dXJp"],"tags":["test","free","fun"],"attributes":"dGFnczp0ZXN0LGZyZWUsZnVuO2Rlc2NyaXB0aW9uOlRoaXMgaXMgYSB0ZXN0IGRlc2NyaXB0aW9uIGZvciBhbiBhd2Vzb21lIG5mdA==","metadata":"metadata-test","nonEmptyURIs":true,"whiteListedStorage":false}} }},"upsert": {}} +{"scripted_upsert": true, "script": {"source": "if (ctx.op == 'create') {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"10000000000000","balanceNum":1,"token":"token-0001","identifier":"token-0001-5","tokenNonce":22,"properties":"000","data":{"name":"nft","creator":"010101","royalties":1,"hash":"aGFzaA==","uris":["dXJp"],"tags":["test","free","fun"],"attributes":"dGFnczp0ZXN0LGZyZWUsZnVuO2Rlc2NyaXB0aW9uOlRoaXMgaXMgYSB0ZXN0IGRlc2NyaXB0aW9uIGZvciBhbiBhd2Vzb21lIG5mdA==","metadata":"metadata-test","nonEmptyURIs":true,"whiteListedStorage":false}} }},"upsert": {}} ` require.Equal(t, expectedRes, buffSlice.Buffers()[0].String()) } @@ -174,7 +174,7 @@ func TestSerializeAccountsESDTDelete(t *testing.T) { require.Equal(t, 1, len(buffSlice.Buffers())) expectedRes := `{ "update" : {"_index":"accountsesdt", "_id" : "addr1-token-0001-00" } } -{"scripted_upsert": true, "script": {"source": "if ( ctx.op == 'create' ) { ctx.op = 'noop' } else { if (ctx._source.containsKey('timestamp')) { if (ctx._source.timestamp <= params.timestamp ) { ctx.op = 'delete' } } else { ctx.op = 'delete' } }","lang": "painless","params": {"timestamp": 0}},"upsert": {}} +{"scripted_upsert": true, "script": {"source": "if (ctx.op == 'create') {ctx.op = 'noop'} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.timestamp) {ctx.op = 'delete'}} else {ctx.op = 'delete'}}","lang": "painless","params": {"timestamp": 0}},"upsert": {}} ` require.Equal(t, expectedRes, buffSlice.Buffers()[0].String()) } diff --git a/process/logsevents/nftsProcessor_test.go b/process/logsevents/nftsProcessor_test.go index 224a648b..9cc8bb6c 100644 --- a/process/logsevents/nftsProcessor_test.go +++ b/process/logsevents/nftsProcessor_test.go @@ -123,9 +123,10 @@ func TestNftsProcessor_processLogAndEventsNFTs_Wipe(t *testing.T) { altered := data.NewAlteredAccounts() res := nftsProc.processEvent(&argsProcessEvent{ - event: events, - accounts: altered, - timestamp: 10000, + event: events, + accounts: altered, + timestamp: 10000, + tokensSupply: data.NewTokensInfo(), }) require.Equal(t, "nft-0123-14", res.identifier) require.Equal(t, "1", res.value) diff --git a/process/templatesAndPolicies/noKibana_test.go b/process/templatesAndPolicies/noKibana_test.go index 0fe8f1c1..dd000be2 100644 --- a/process/templatesAndPolicies/noKibana_test.go +++ b/process/templatesAndPolicies/noKibana_test.go @@ -14,5 +14,5 @@ func TestTemplatesAndPolicyReaderNoKibana_GetElasticTemplatesAndPolicies(t *test templates, policies, err := reader.GetElasticTemplatesAndPolicies() require.Nil(t, err) require.Len(t, policies, 0) - require.Len(t, templates, 20) + require.Len(t, templates, 21) } diff --git a/process/templatesAndPolicies/withKibana_test.go b/process/templatesAndPolicies/withKibana_test.go index 6e63d584..0a9ab191 100644 --- a/process/templatesAndPolicies/withKibana_test.go +++ b/process/templatesAndPolicies/withKibana_test.go @@ -14,5 +14,5 @@ func TestTemplatesAndPolicyReaderWithKibana_GetElasticTemplatesAndPolicies(t *te templates, policies, err := reader.GetElasticTemplatesAndPolicies() require.Nil(t, err) require.Len(t, policies, 12) - require.Len(t, templates, 20) + require.Len(t, templates, 21) } From 6a279d9aefd2a69d1802fa3536d9554cc9b774e8 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Fri, 24 Jun 2022 12:16:55 +0300 Subject: [PATCH 26/69] integration test --- integrationtests/collectionsIndex_test.go | 209 ++++++++++++++++++ .../collectionsIndex/collections-1.json | 5 + .../collectionsIndex/collections-2.json | 6 + process/collections/serialize.go | 14 +- 4 files changed, 229 insertions(+), 5 deletions(-) create mode 100644 integrationtests/collectionsIndex_test.go create mode 100644 integrationtests/testdata/collectionsIndex/collections-1.json create mode 100644 integrationtests/testdata/collectionsIndex/collections-2.json diff --git a/integrationtests/collectionsIndex_test.go b/integrationtests/collectionsIndex_test.go new file mode 100644 index 00000000..61b7f51b --- /dev/null +++ b/integrationtests/collectionsIndex_test.go @@ -0,0 +1,209 @@ +package integrationtests + +import ( + "encoding/json" + indexerdata "github.com/ElrondNetwork/elastic-indexer-go" + "math/big" + "testing" + + "github.com/ElrondNetwork/elastic-indexer-go/mock" + "github.com/ElrondNetwork/elrond-go-core/core" + coreData "github.com/ElrondNetwork/elrond-go-core/data" + dataBlock "github.com/ElrondNetwork/elrond-go-core/data/block" + "github.com/ElrondNetwork/elrond-go-core/data/esdt" + "github.com/ElrondNetwork/elrond-go-core/data/indexer" + "github.com/ElrondNetwork/elrond-go-core/data/transaction" + vmcommon "github.com/ElrondNetwork/elrond-vm-common" + "github.com/stretchr/testify/require" +) + +func TestCollectionsIndexInsertAndDelete(t *testing.T) { + setLogLevelDebug() + + esClient, err := createESClient(esURL) + require.Nil(t, err) + + // ################ ISSUE NON FUNGIBLE TOKEN ########################## + shardCoordinator := &mock.ShardCoordinatorMock{ + SelfID: core.MetachainShardId, + } + + accounts := &mock.AccountsStub{} + feeComputer := &mock.EconomicsHandlerMock{} + esProc, err := CreateElasticProcessor(esClient, accounts, shardCoordinator, feeComputer) + require.Nil(t, err) + + body := &dataBlock.Body{} + header := &dataBlock.Header{ + Round: 50, + TimeStamp: 5040, + } + + pool := &indexer.Pool{ + Logs: []*coreData.LogData{ + { + TxHash: "h1", + LogHandler: &transaction.Log{ + Events: []*transaction.Event{ + { + Address: []byte("addr"), + Identifier: []byte("issueSemiFungible"), + Topics: [][]byte{[]byte("SSSS-dddd"), []byte("SEMI-semi"), []byte("SSS"), []byte(core.SemiFungibleESDT)}, + }, + nil, + }, + }, + }, + }, + } + + err = esProc.SaveTransactions(body, header, pool) + require.Nil(t, err) + + // ################ CREATE SEMI FUNGIBLE TOKEN 1 ########################## + shardCoordinator = &mock.ShardCoordinatorMock{ + SelfID: 0, + } + + esdtToken := &esdt.ESDigitalToken{ + Value: big.NewInt(1000), + Properties: []byte("ok"), + TokenMetaData: &esdt.MetaData{ + Creator: []byte("creator"), + }, + } + + addr := "aaaabbbbcccccccc" + mockAccount := &mock.UserAccountStub{ + RetrieveValueFromDataTrieTrackerCalled: func(key []byte) ([]byte, error) { + return json.Marshal(esdtToken) + }, + AddressBytesCalled: func() []byte { + return []byte(addr) + }, + } + accounts = &mock.AccountsStub{ + LoadAccountCalled: func(container []byte) (vmcommon.AccountHandler, error) { + return mockAccount, nil + }, + } + esProc, err = CreateElasticProcessor(esClient, accounts, shardCoordinator, feeComputer) + require.Nil(t, err) + + header = &dataBlock.Header{ + Round: 51, + TimeStamp: 5600, + } + + esdtData := &esdt.ESDigitalToken{ + TokenMetaData: &esdt.MetaData{ + Creator: []byte("creator"), + }, + } + esdtDataBytes, _ := json.Marshal(esdtData) + + pool = &indexer.Pool{ + Logs: []*coreData.LogData{ + { + TxHash: "h1", + LogHandler: &transaction.Log{ + Events: []*transaction.Event{ + { + Address: []byte("aaaabbbb"), + Identifier: []byte(core.BuiltInFunctionESDTNFTCreate), + Topics: [][]byte{[]byte("SSSS-dddd"), big.NewInt(2).Bytes(), big.NewInt(1).Bytes(), esdtDataBytes}, + }, + nil, + }, + }, + }, + }, + } + + err = esProc.SaveTransactions(body, header, pool) + require.Nil(t, err) + ids := []string{"61616161626262626363636363636363"} + genericResponse := &GenericResponse{} + err = esClient.DoMultiGet(ids, indexerdata.CollectionsIndex, true, genericResponse) + require.Nil(t, err) + require.JSONEq(t, readExpectedResult("./testdata/collectionsIndex/collections-1.json"), string(genericResponse.Docs[0].Source)) + + // ################ CREATE SEMI FUNGIBLE TOKEN 2 ########################## + pool = &indexer.Pool{ + Logs: []*coreData.LogData{ + { + TxHash: "h1", + LogHandler: &transaction.Log{ + Events: []*transaction.Event{ + { + Address: []byte("aaaabbbb"), + Identifier: []byte(core.BuiltInFunctionESDTNFTCreate), + Topics: [][]byte{[]byte("SSSS-dddd"), big.NewInt(22).Bytes(), big.NewInt(1).Bytes(), esdtDataBytes}, + }, + nil, + }, + }, + }, + }, + } + + err = esProc.SaveTransactions(body, header, pool) + require.Nil(t, err) + ids = []string{"61616161626262626363636363636363"} + genericResponse = &GenericResponse{} + err = esClient.DoMultiGet(ids, indexerdata.CollectionsIndex, true, genericResponse) + require.Nil(t, err) + require.JSONEq(t, readExpectedResult("./testdata/collectionsIndex/collections-2.json"), string(genericResponse.Docs[0].Source)) + + // ################ TRANSFER SEMI FUNGIBLE TOKEN 2 ########################## + esdtToken = &esdt.ESDigitalToken{ + Value: big.NewInt(0), + Properties: []byte("ok"), + TokenMetaData: &esdt.MetaData{ + Creator: []byte("creator"), + }, + } + + addr = "aaaabbbbcccccccc" + mockAccount = &mock.UserAccountStub{ + RetrieveValueFromDataTrieTrackerCalled: func(key []byte) ([]byte, error) { + return json.Marshal(esdtToken) + }, + AddressBytesCalled: func() []byte { + return []byte(addr) + }, + } + accounts = &mock.AccountsStub{ + LoadAccountCalled: func(container []byte) (vmcommon.AccountHandler, error) { + return mockAccount, nil + }, + } + esProc, err = CreateElasticProcessor(esClient, accounts, shardCoordinator, feeComputer) + require.Nil(t, err) + + pool = &indexer.Pool{ + Logs: []*coreData.LogData{ + { + TxHash: "h1", + LogHandler: &transaction.Log{ + Events: []*transaction.Event{ + { + Address: []byte(addr), + Identifier: []byte(core.BuiltInFunctionESDTNFTTransfer), + Topics: [][]byte{[]byte("SSSS-dddd"), big.NewInt(22).Bytes(), big.NewInt(1).Bytes(), []byte("746573742d616464726573732d62616c616e63652d31")}, + }, + nil, + }, + }, + }, + }, + } + + err = esProc.SaveTransactions(body, header, pool) + require.Nil(t, err) + ids = []string{"61616161626262626363636363636363"} + genericResponse = &GenericResponse{} + err = esClient.DoMultiGet(ids, indexerdata.CollectionsIndex, true, genericResponse) + require.Nil(t, err) + require.JSONEq(t, readExpectedResult("./testdata/collectionsIndex/collections-1.json"), string(genericResponse.Docs[0].Source)) +} diff --git a/integrationtests/testdata/collectionsIndex/collections-1.json b/integrationtests/testdata/collectionsIndex/collections-1.json new file mode 100644 index 00000000..5d0b25be --- /dev/null +++ b/integrationtests/testdata/collectionsIndex/collections-1.json @@ -0,0 +1,5 @@ +{ + "SSSS-dddd":{ + "02":"1000" + } +} diff --git a/integrationtests/testdata/collectionsIndex/collections-2.json b/integrationtests/testdata/collectionsIndex/collections-2.json new file mode 100644 index 00000000..cd7cb59a --- /dev/null +++ b/integrationtests/testdata/collectionsIndex/collections-2.json @@ -0,0 +1,6 @@ +{ + "SSSS-dddd":{ + "02":"1000", + "16":"1000" + } +} diff --git a/process/collections/serialize.go b/process/collections/serialize.go index 8151f09e..cc16803e 100644 --- a/process/collections/serialize.go +++ b/process/collections/serialize.go @@ -19,7 +19,9 @@ func ExtractAndSerializeCollectionsData( for _, acct := range accountsESDT { shouldIgnore := acct.Type != core.NonFungibleESDT && acct.Type != core.SemiFungibleESDT if shouldIgnore { - continue + if acct.Balance != "0" || acct.TokenNonce == 0 { + continue + } } nonceBig := big.NewInt(0).SetUint64(acct.TokenNonce) @@ -27,23 +29,25 @@ func ExtractAndSerializeCollectionsData( meta := []byte(fmt.Sprintf(`{ "update" : {"_index":"%s", "_id" : "%s" } }%s`, index, acct.Address, "\n")) codeToExecute := ` - if (params.value != '0') { + if ((ctx.op == 'create') && (params.value == '0')) { + ctx.op = 'noop'; + } else if (params.value != '0') { if (!ctx._source.containsKey(params.col)) { ctx._source[params.col] = new HashMap(); } ctx._source[params.col][params.nonce] = params.value } else { if (ctx._source.containsKey(params.col)) { - ctx._source[params.col].delete(params.nonce); + ctx._source[params.col].remove(params.nonce); if (ctx._source[params.col].length == 0) { - ctx._source.delete(params.col) + ctx._source.remove(params.col) } } } ` collection := fmt.Sprintf(`{"%s":{"%s": "%s"}}`, acct.TokenName, hexEncodedNonce, acct.Balance) - serializedDataStr := fmt.Sprintf(`{"script": {`+ + serializedDataStr := fmt.Sprintf(`{"scripted_upsert": true, "script": {`+ `"source": "%s",`+ `"lang": "painless",`+ `"params": { "col": "%s", "nonce": "%s", "value": "%s"}},`+ From 7dab841ed7375a2152be7bbdf25ae3dacbc47be8 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Fri, 24 Jun 2022 12:17:11 +0300 Subject: [PATCH 27/69] fix import --- integrationtests/collectionsIndex_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integrationtests/collectionsIndex_test.go b/integrationtests/collectionsIndex_test.go index 61b7f51b..2d191b01 100644 --- a/integrationtests/collectionsIndex_test.go +++ b/integrationtests/collectionsIndex_test.go @@ -2,10 +2,10 @@ package integrationtests import ( "encoding/json" - indexerdata "github.com/ElrondNetwork/elastic-indexer-go" "math/big" "testing" + indexerdata "github.com/ElrondNetwork/elastic-indexer-go" "github.com/ElrondNetwork/elastic-indexer-go/mock" "github.com/ElrondNetwork/elrond-go-core/core" coreData "github.com/ElrondNetwork/elrond-go-core/data" From a1a578100cd053b46d1b26dbad2772508b53e494 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Fri, 24 Jun 2022 13:59:52 +0300 Subject: [PATCH 28/69] small fix --- process/collections/serialize.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/process/collections/serialize.go b/process/collections/serialize.go index cc16803e..8f10748b 100644 --- a/process/collections/serialize.go +++ b/process/collections/serialize.go @@ -39,7 +39,7 @@ func ExtractAndSerializeCollectionsData( } else { if (ctx._source.containsKey(params.col)) { ctx._source[params.col].remove(params.nonce); - if (ctx._source[params.col].length == 0) { + if (ctx._source[params.col].size() == 0) { ctx._source.remove(params.col) } } From d68de8802f730ab8ced3c18e9a588f4084c21c40 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Fri, 24 Jun 2022 14:34:19 +0300 Subject: [PATCH 29/69] delete if empty --- process/collections/serialize.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/process/collections/serialize.go b/process/collections/serialize.go index 8f10748b..45cd8e00 100644 --- a/process/collections/serialize.go +++ b/process/collections/serialize.go @@ -42,6 +42,9 @@ func ExtractAndSerializeCollectionsData( if (ctx._source[params.col].size() == 0) { ctx._source.remove(params.col) } + if (ctx._source.size() == 0) { + ctx.op = 'delete'; + } } } ` From 8e215b172d862949c08ca60621c870ef86d7efbe Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Mon, 27 Jun 2022 15:02:30 +0300 Subject: [PATCH 30/69] remove data field parser --- factory/indexerFactory_test.go | 2 +- go.mod | 2 +- go.sum | 5 +- interface.go | 3 + process/elasticProcessor_test.go | 4 +- .../factory/elasticProcessorFactory_test.go | 2 +- process/transactions/datafield/dtos.go | 25 -- .../datafield/parseMultiESDTNFTTransfer.go | 44 --- .../parseMultiESDTNFTTransfer_test.go | 78 ---- .../datafield/parseSingleESDTTransfer.go | 28 -- .../datafield/parseSingleESDTTransfer_test.go | 73 ---- .../datafield/parseSingleNFTTransfer.go | 43 --- .../datafield/parseSingleNFTTransfer_test.go | 93 ----- process/transactions/datafield/parser.go | 240 ------------ process/transactions/datafield/parser_test.go | 361 ------------------ process/transactions/datafield/utils.go | 96 ----- process/transactions/datafield/utils_test.go | 16 - process/transactions/interface.go | 2 +- .../smartContractResultsProcessor.go | 3 +- .../smartContractResultsProcessor_test.go | 5 +- process/transactions/transactionDBBuilder.go | 5 +- .../transactions/transactionDBBuilder_test.go | 1 + .../transactions/transactionsGrouper_test.go | 2 +- process/transactions/transactionsProcessor.go | 6 +- .../transactionsProcessor_test.go | 2 +- 25 files changed, 25 insertions(+), 1116 deletions(-) delete mode 100644 process/transactions/datafield/dtos.go delete mode 100644 process/transactions/datafield/parseMultiESDTNFTTransfer.go delete mode 100644 process/transactions/datafield/parseMultiESDTNFTTransfer_test.go delete mode 100644 process/transactions/datafield/parseSingleESDTTransfer.go delete mode 100644 process/transactions/datafield/parseSingleESDTTransfer_test.go delete mode 100644 process/transactions/datafield/parseSingleNFTTransfer.go delete mode 100644 process/transactions/datafield/parseSingleNFTTransfer_test.go delete mode 100644 process/transactions/datafield/parser.go delete mode 100644 process/transactions/datafield/parser_test.go delete mode 100644 process/transactions/datafield/utils.go delete mode 100644 process/transactions/datafield/utils_test.go diff --git a/factory/indexerFactory_test.go b/factory/indexerFactory_test.go index 980f5487..8dbfb1bc 100644 --- a/factory/indexerFactory_test.go +++ b/factory/indexerFactory_test.go @@ -22,7 +22,7 @@ func createMockIndexerFactoryArgs() *ArgsIndexerFactory { Password: "", Marshalizer: &mock.MarshalizerMock{}, Hasher: &mock.HasherMock{}, - AddressPubkeyConverter: &mock.PubkeyConverterMock{}, + AddressPubkeyConverter: mock.NewPubkeyConverterMock(32), ValidatorPubkeyConverter: &mock.PubkeyConverterMock{}, TemplatesPath: "../testdata", EnabledIndexes: []string{"blocks", "transactions", "miniblocks", "validators", "round", "accounts", "rating"}, diff --git a/go.mod b/go.mod index dfaec0ee..72762edb 100644 --- a/go.mod +++ b/go.mod @@ -5,7 +5,7 @@ go 1.17 require ( github.com/ElrondNetwork/elrond-go-core v1.1.16-0.20220414130405-e3cc29bc7711 github.com/ElrondNetwork/elrond-go-logger v1.0.7 - github.com/ElrondNetwork/elrond-vm-common v1.2.9 + github.com/ElrondNetwork/elrond-vm-common v1.3.7 github.com/elastic/go-elasticsearch/v7 v7.12.0 github.com/stretchr/testify v1.7.0 github.com/tidwall/gjson v1.14.0 diff --git a/go.sum b/go.sum index aa24a554..2591cd8e 100644 --- a/go.sum +++ b/go.sum @@ -3,12 +3,11 @@ github.com/ElrondNetwork/elrond-go-core v1.1.7/go.mod h1:O9FkkTT2H9kxCzfn40TbhoC github.com/ElrondNetwork/elrond-go-core v1.1.16-0.20220414130405-e3cc29bc7711 h1:pU3ZyHL/gMg/2cN+DxG3tpalVT+iJfKysE6S7GwzB4Y= github.com/ElrondNetwork/elrond-go-core v1.1.16-0.20220414130405-e3cc29bc7711/go.mod h1:Yz8JK5sGBctw7+gU8j2mZHbzQ09Ek4XHJ4Uinq1N6nM= github.com/ElrondNetwork/elrond-go-logger v1.0.4/go.mod h1:e5D+c97lKUfFdAzFX7rrI2Igl/z4Y0RkKYKWyzprTGk= -github.com/ElrondNetwork/elrond-go-logger v1.0.5/go.mod h1:cBfgx0ST/CJx8jrxJSC5aiSrvkGzcnF7sK06RD8mFxQ= github.com/ElrondNetwork/elrond-go-logger v1.0.7 h1:Ldl1rVS0RGKc1IsW8jIaGCb6Zwei04gsMvyjL05X6mE= github.com/ElrondNetwork/elrond-go-logger v1.0.7/go.mod h1:cBfgx0ST/CJx8jrxJSC5aiSrvkGzcnF7sK06RD8mFxQ= github.com/ElrondNetwork/elrond-vm-common v1.1.0/go.mod h1:w3i6f8uiuRkE68Ie/gebRcLgTuHqvruJSYrFyZWuLrE= -github.com/ElrondNetwork/elrond-vm-common v1.2.9 h1:TiW7HFBdtraJnSOXC+OjTIRbOqwG9t3PkL8aTqkAUZM= -github.com/ElrondNetwork/elrond-vm-common v1.2.9/go.mod h1:B/Y8WiqHyDd7xsjNYsaYbVMp1jQgQ+z4jTJkFvj/EWI= +github.com/ElrondNetwork/elrond-vm-common v1.3.7 h1:nd3pTbZD+JQfhCYX4n2QlysGbIsllGjdMU4BuFfiyUk= +github.com/ElrondNetwork/elrond-vm-common v1.3.7/go.mod h1:seROQuR7RJCoCS7mgRXVAlvjztltY1c+UroAgWr/USE= github.com/StackExchange/wmi v0.0.0-20180116203802-5d049714c4a6/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg= github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII= github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ= diff --git a/interface.go b/interface.go index a2cfc8c4..30ca2511 100644 --- a/interface.go +++ b/interface.go @@ -51,8 +51,11 @@ type FeesProcessorHandler interface { // ShardCoordinator defines what a shard state coordinator should hold type ShardCoordinator interface { + NumberOfShards() uint32 ComputeId(address []byte) uint32 SelfId() uint32 + SameShard(firstAddress, secondAddress []byte) bool + CommunicationIdentifier(destShardID uint32) string IsInterfaceNil() bool } diff --git a/process/elasticProcessor_test.go b/process/elasticProcessor_test.go index 689132b7..d4c83d6b 100644 --- a/process/elasticProcessor_test.go +++ b/process/elasticProcessor_test.go @@ -419,7 +419,7 @@ func TestElasticseachSaveTransactions(t *testing.T) { txPool := newTestTxPool() args := &transactions.ArgsTransactionProcessor{ - AddressPubkeyConverter: &mock.PubkeyConverterMock{}, + AddressPubkeyConverter: mock.NewPubkeyConverterMock(32), TxFeeCalculator: &mock.EconomicsHandlerStub{}, ShardCoordinator: &mock.ShardCoordinatorMock{}, Hasher: &mock.HasherMock{}, @@ -571,7 +571,7 @@ func TestElasticProcessor_RemoveTransactions(t *testing.T) { } args := &transactions.ArgsTransactionProcessor{ - AddressPubkeyConverter: &mock.PubkeyConverterMock{}, + AddressPubkeyConverter: mock.NewPubkeyConverterMock(32), TxFeeCalculator: &mock.EconomicsHandlerStub{}, ShardCoordinator: &mock.ShardCoordinatorMock{}, Hasher: &mock.HasherMock{}, diff --git a/process/factory/elasticProcessorFactory_test.go b/process/factory/elasticProcessorFactory_test.go index b7da73fe..1c42fbec 100644 --- a/process/factory/elasticProcessorFactory_test.go +++ b/process/factory/elasticProcessorFactory_test.go @@ -12,7 +12,7 @@ func TestCreateElasticProcessor(t *testing.T) { args := ArgElasticProcessorFactory{ Marshalizer: &mock.MarshalizerMock{}, Hasher: &mock.HasherMock{}, - AddressPubkeyConverter: &mock.PubkeyConverterMock{}, + AddressPubkeyConverter: mock.NewPubkeyConverterMock(32), ValidatorPubkeyConverter: &mock.PubkeyConverterMock{}, DBClient: &mock.DatabaseWriterStub{}, AccountsDB: &mock.AccountsStub{}, diff --git a/process/transactions/datafield/dtos.go b/process/transactions/datafield/dtos.go deleted file mode 100644 index d845b476..00000000 --- a/process/transactions/datafield/dtos.go +++ /dev/null @@ -1,25 +0,0 @@ -package datafield - -import ( - indexer "github.com/ElrondNetwork/elastic-indexer-go" - "github.com/ElrondNetwork/elrond-go-core/core" - "github.com/ElrondNetwork/elrond-go-core/marshal" -) - -// ResponseParseData is the response with results after the data field was parsed -type ResponseParseData struct { - Operation string - Function string - ESDTValues []string - Tokens []string - Receivers []string - ReceiversShardID []uint32 - IsRelayed bool -} - -// ArgsOperationDataFieldParser holds all the components required to create a new instance of data field parser -type ArgsOperationDataFieldParser struct { - PubKeyConverter core.PubkeyConverter - Marshalizer marshal.Marshalizer - ShardCoordinator indexer.ShardCoordinator -} diff --git a/process/transactions/datafield/parseMultiESDTNFTTransfer.go b/process/transactions/datafield/parseMultiESDTNFTTransfer.go deleted file mode 100644 index d5b21694..00000000 --- a/process/transactions/datafield/parseMultiESDTNFTTransfer.go +++ /dev/null @@ -1,44 +0,0 @@ -package datafield - -import ( - "github.com/ElrondNetwork/elastic-indexer-go/converters" - "github.com/ElrondNetwork/elrond-go-core/core" -) - -func (odp *operationDataFieldParser) parseMultiESDTNFTTransfer(args [][]byte, sender, receiver []byte) *ResponseParseData { - responseParse := &ResponseParseData{ - Operation: core.BuiltInFunctionMultiESDTNFTTransfer, - } - - parsedESDTTransfers, err := odp.esdtTransferParser.ParseESDTTransfers(sender, receiver, core.BuiltInFunctionMultiESDTNFTTransfer, args) - if err != nil { - return responseParse - } - - if core.IsSmartContractAddress(parsedESDTTransfers.RcvAddr) && isASCIIString(parsedESDTTransfers.CallFunction) { - responseParse.Function = parsedESDTTransfers.CallFunction - } - - receiverEncoded := odp.pubKeyConverter.Encode(parsedESDTTransfers.RcvAddr) - receiverShardID := odp.shardCoordinator.ComputeId(parsedESDTTransfers.RcvAddr) - - for _, esdtTransferData := range parsedESDTTransfers.ESDTTransfers { - if !isASCIIString(string(esdtTransferData.ESDTTokenName)) { - return &ResponseParseData{ - Operation: core.BuiltInFunctionMultiESDTNFTTransfer, - } - } - - token := string(esdtTransferData.ESDTTokenName) - if esdtTransferData.ESDTTokenNonce != 0 { - token = converters.ComputeTokenIdentifier(string(esdtTransferData.ESDTTokenName), esdtTransferData.ESDTTokenNonce) - } - - responseParse.Tokens = append(responseParse.Tokens, token) - responseParse.ESDTValues = append(responseParse.ESDTValues, esdtTransferData.ESDTValue.String()) - responseParse.Receivers = append(responseParse.Receivers, receiverEncoded) - responseParse.ReceiversShardID = append(responseParse.ReceiversShardID, receiverShardID) - } - - return responseParse -} diff --git a/process/transactions/datafield/parseMultiESDTNFTTransfer_test.go b/process/transactions/datafield/parseMultiESDTNFTTransfer_test.go deleted file mode 100644 index 18e0a3ef..00000000 --- a/process/transactions/datafield/parseMultiESDTNFTTransfer_test.go +++ /dev/null @@ -1,78 +0,0 @@ -package datafield - -import ( - "testing" - - "github.com/ElrondNetwork/elastic-indexer-go/mock" - "github.com/stretchr/testify/require" -) - -func TestMultiESDTNFTTransferParse(t *testing.T) { - t.Parallel() - - args := &ArgsOperationDataFieldParser{ - PubKeyConverter: pubKeyConv, - Marshalizer: &mock.MarshalizerMock{}, - ShardCoordinator: &mock.ShardCoordinatorMock{}, - } - - parser, _ := NewOperationDataFieldParser(args) - - t.Run("MultiNFTTransferWithSCCall", func(t *testing.T) { - t.Parallel() - - dataField := []byte("MultiESDTNFTTransfer@000000000000000005001e2a1428dd1e3a5146b3960d9e0f4a50369904ee5483@02@4c4b4d45582d616162393130@0d3d@058184103ad80ffb19f7@4c4b4641524d2d396431656138@1ecf06@0423fc01830d455ee5510c@656e7465724661726d416e644c6f636b5265776172647350726f7879@00000000000000000500656d0acc53561c5d6f6fd7d7e82bf13247014f615483") - res := parser.Parse(dataField, sender, sender) - require.Equal(t, &ResponseParseData{ - Operation: "MultiESDTNFTTransfer", - Function: "enterFarmAndLockRewardsProxy", - ESDTValues: []string{"26000978570569047546359", "5005634793810936671326476"}, - Tokens: []string{"LKMEX-aab910-0d3d", "LKFARM-9d1ea8-1ecf06"}, - Receivers: []string{"erd1qqqqqqqqqqqqqpgqrc4pg2xarca9z34njcxeur622qmfjp8w2jps89fxnl", "erd1qqqqqqqqqqqqqpgqrc4pg2xarca9z34njcxeur622qmfjp8w2jps89fxnl"}, - ReceiversShardID: []uint32{0, 0}, - }, res) - }) - - t.Run("MultiNFTTransfer", func(t *testing.T) { - t.Parallel() - - dataField := []byte("MultiESDTNFTTransfer@000000000000000005001e2a1428dd1e3a5146b3960d9e0f4a50369904ee5483@02@4d4949552d61626364@00@01@4d4949552d616263646566@02@05") - res := parser.Parse(dataField, sender, sender) - require.Equal(t, &ResponseParseData{ - Operation: "MultiESDTNFTTransfer", - ESDTValues: []string{"1", "5"}, - Tokens: []string{"MIIU-abcd", "MIIU-abcdef-02"}, - Receivers: []string{"erd1qqqqqqqqqqqqqpgqrc4pg2xarca9z34njcxeur622qmfjp8w2jps89fxnl", "erd1qqqqqqqqqqqqqpgqrc4pg2xarca9z34njcxeur622qmfjp8w2jps89fxnl"}, - ReceiversShardID: []uint32{0, 0}, - }, res) - }) - - t.Run("MultiNFTTransferNonHexArguments", func(t *testing.T) { - t.Parallel() - - dataField := []byte("MultiESDTNFTTransfer@000000000000000005001e2a1428dd1e3a5146b3960d9e0f4a50369904ee5483@02@4d4949552d61626364@00@01@4d4949552d616263646566@02@05@1") - res := parser.Parse(dataField, sender, sender) - require.Equal(t, &ResponseParseData{ - Operation: "transfer", - }, res) - }) - t.Run("MultiNFTTransferInvalidNumberOfArguments", func(t *testing.T) { - t.Parallel() - - dataField := []byte("MultiESDTNFTTransfer@000000000000000005001e2a1428dd1e3a5146b3960d9e0f4a50369904ee5483@02@4d4949552d61626364@00@01@4d4949552d616263646566@02") - res := parser.Parse(dataField, sender, sender) - require.Equal(t, &ResponseParseData{ - Operation: "MultiESDTNFTTransfer", - }, res) - }) - - t.Run("MultiNFTTransferEmptyArguments", func(t *testing.T) { - t.Parallel() - - dataField := []byte("MultiESDTNFTTransfer@@@@@@@") - res := parser.Parse(dataField, sender, sender) - require.Equal(t, &ResponseParseData{ - Operation: "MultiESDTNFTTransfer", - }, res) - }) -} diff --git a/process/transactions/datafield/parseSingleESDTTransfer.go b/process/transactions/datafield/parseSingleESDTTransfer.go deleted file mode 100644 index 96c8be67..00000000 --- a/process/transactions/datafield/parseSingleESDTTransfer.go +++ /dev/null @@ -1,28 +0,0 @@ -package datafield - -import ( - "github.com/ElrondNetwork/elrond-go-core/core" -) - -func (odp *operationDataFieldParser) parseESDTTransfer(args [][]byte, sender, receiver []byte) *ResponseParseData { - responseParse := &ResponseParseData{ - Operation: core.BuiltInFunctionESDTTransfer, - } - - parsedESDTTransfers, err := odp.esdtTransferParser.ParseESDTTransfers(sender, receiver, core.BuiltInFunctionESDTTransfer, args) - if err != nil { - return responseParse - } - - if core.IsSmartContractAddress(receiver) && isASCIIString(parsedESDTTransfers.CallFunction) { - responseParse.Function = parsedESDTTransfers.CallFunction - } - - if len(parsedESDTTransfers.ESDTTransfers) == 0 || !isASCIIString(string(parsedESDTTransfers.ESDTTransfers[0].ESDTTokenName)) { - return responseParse - } - responseParse.Tokens = append(responseParse.Tokens, string(parsedESDTTransfers.ESDTTransfers[0].ESDTTokenName)) - responseParse.ESDTValues = append(responseParse.ESDTValues, parsedESDTTransfers.ESDTTransfers[0].ESDTValue.String()) - - return responseParse -} diff --git a/process/transactions/datafield/parseSingleESDTTransfer_test.go b/process/transactions/datafield/parseSingleESDTTransfer_test.go deleted file mode 100644 index c3f61d4f..00000000 --- a/process/transactions/datafield/parseSingleESDTTransfer_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package datafield - -import ( - "testing" - - "github.com/ElrondNetwork/elastic-indexer-go/mock" - "github.com/stretchr/testify/require" -) - -func TestParseESDTTransfer(t *testing.T) { - t.Parallel() - - args := &ArgsOperationDataFieldParser{ - PubKeyConverter: pubKeyConv, - Marshalizer: &mock.MarshalizerMock{}, - ShardCoordinator: &mock.ShardCoordinatorMock{}, - } - - parser, _ := NewOperationDataFieldParser(args) - - t.Run("TransferNonHexArguments", func(t *testing.T) { - t.Parallel() - - dataField := []byte("ESDTTransfer@1234@011") - res := parser.Parse(dataField, sender, receiver) - require.Equal(t, &ResponseParseData{ - Operation: operationTransfer, - }, res) - }) - - t.Run("TransferNotEnoughtArguments", func(t *testing.T) { - t.Parallel() - - dataField := []byte("ESDTTransfer@1234") - res := parser.Parse(dataField, sender, receiver) - require.Equal(t, &ResponseParseData{ - Operation: "ESDTTransfer", - }, res) - }) - - t.Run("TransferEmptyArguments", func(t *testing.T) { - t.Parallel() - - dataField := []byte("ESDTTransfer@544f4b454e@") - res := parser.Parse(dataField, sender, receiver) - require.Equal(t, &ResponseParseData{ - Operation: "ESDTTransfer", - Tokens: []string{"TOKEN"}, - ESDTValues: []string{"0"}, - }, res) - }) - - t.Run("TransferWithSCCall", func(t *testing.T) { - t.Parallel() - - dataField := []byte("ESDTTransfer@544f4b454e@01@63616c6c4d65") - res := parser.Parse(dataField, sender, receiverSC) - require.Equal(t, &ResponseParseData{ - Operation: "ESDTTransfer", - Function: "callMe", - ESDTValues: []string{"1"}, - Tokens: []string{"TOKEN"}, - }, res) - }) - - t.Run("TransferNonAsciStringToken", func(t *testing.T) { - dataField := []byte("ESDTTransfer@055de6a779bbac0000@01") - res := parser.Parse(dataField, sender, receiverSC) - require.Equal(t, &ResponseParseData{ - Operation: "ESDTTransfer", - }, res) - }) -} diff --git a/process/transactions/datafield/parseSingleNFTTransfer.go b/process/transactions/datafield/parseSingleNFTTransfer.go deleted file mode 100644 index d269b146..00000000 --- a/process/transactions/datafield/parseSingleNFTTransfer.go +++ /dev/null @@ -1,43 +0,0 @@ -package datafield - -import ( - "bytes" - "github.com/ElrondNetwork/elastic-indexer-go/converters" - "github.com/ElrondNetwork/elrond-go-core/core" -) - -func (odp *operationDataFieldParser) parseESDTNFTTransfer(args [][]byte, sender, receiver []byte) *ResponseParseData { - responseParse := &ResponseParseData{ - Operation: core.BuiltInFunctionESDTNFTTransfer, - } - - parsedESDTTransfers, err := odp.esdtTransferParser.ParseESDTTransfers(sender, receiver, core.BuiltInFunctionESDTNFTTransfer, args) - if err != nil { - return responseParse - } - - if len(parsedESDTTransfers.ESDTTransfers) == 0 || !isASCIIString(string(parsedESDTTransfers.ESDTTransfers[0].ESDTTokenName)) { - return responseParse - } - - if core.IsSmartContractAddress(parsedESDTTransfers.RcvAddr) && isASCIIString(parsedESDTTransfers.CallFunction) { - responseParse.Function = parsedESDTTransfers.CallFunction - } - - rcvAddr := receiver - if bytes.Equal(sender, receiver) { - rcvAddr = parsedESDTTransfers.RcvAddr - } - - esdtNFTTransfer := parsedESDTTransfers.ESDTTransfers[0] - receiverEncoded := odp.pubKeyConverter.Encode(rcvAddr) - receiverShardID := odp.shardCoordinator.ComputeId(rcvAddr) - token := converters.ComputeTokenIdentifier(string(esdtNFTTransfer.ESDTTokenName), esdtNFTTransfer.ESDTTokenNonce) - - responseParse.Tokens = append(responseParse.Tokens, token) - responseParse.ESDTValues = append(responseParse.ESDTValues, esdtNFTTransfer.ESDTValue.String()) - responseParse.Receivers = append(responseParse.Receivers, receiverEncoded) - responseParse.ReceiversShardID = append(responseParse.ReceiversShardID, receiverShardID) - - return responseParse -} diff --git a/process/transactions/datafield/parseSingleNFTTransfer_test.go b/process/transactions/datafield/parseSingleNFTTransfer_test.go deleted file mode 100644 index fbd78350..00000000 --- a/process/transactions/datafield/parseSingleNFTTransfer_test.go +++ /dev/null @@ -1,93 +0,0 @@ -package datafield - -import ( - "testing" - - "github.com/ElrondNetwork/elastic-indexer-go/mock" - "github.com/ElrondNetwork/elrond-go-core/core/pubkeyConverter" - logger "github.com/ElrondNetwork/elrond-go-logger" - "github.com/stretchr/testify/require" -) - -var log = logger.GetOrCreate("parse-tests") - -var pubKeyConv, _ = pubkeyConverter.NewBech32PubkeyConverter(32, log) - -var sender, _ = pubKeyConv.Decode("erd1kqdm94ef5dr9nz3208rrsdzkgwkz53saj4t5chx26cm4hlq8qz8qqd9207") -var receiver, _ = pubKeyConv.Decode("erd1kszzq4egxj5m3t22vt2s8vplmxmqrstghecmnk3tq9mn5fdy7pqqgvzkug") -var receiverSC, _ = pubKeyConv.Decode("erd1qqqqqqqqqqqqqpgqp699jngundfqw07d8jzkepucvpzush6k3wvqyc44rx") - -func TestESDTNFTTransfer(t *testing.T) { - t.Parallel() - - args := &ArgsOperationDataFieldParser{ - PubKeyConverter: pubKeyConv, - Marshalizer: &mock.MarshalizerMock{}, - ShardCoordinator: &mock.ShardCoordinatorMock{}, - } - - parser, _ := NewOperationDataFieldParser(args) - - t.Run("NFTTransferNotOkNonHexArguments", func(t *testing.T) { - t.Parallel() - - dataField := []byte("ESDTNFTTransfer@@11316@01") - res := parser.Parse(dataField, sender, receiver) - require.Equal(t, &ResponseParseData{ - Operation: "transfer", - }, res) - }) - - t.Run("NFTTransferNotEnoughArguments", func(t *testing.T) { - t.Parallel() - - dataField := []byte("ESDTNFTTransfer@@1131@01") - res := parser.Parse(dataField, sender, receiver) - require.Equal(t, &ResponseParseData{ - Operation: "ESDTNFTTransfer", - }, res) - }) - - t.Run("NftTransferOk", func(t *testing.T) { - t.Parallel() - - dataField := []byte("ESDTNFTTransfer@444541442d373966386431@1136@01@08011202000122bc0308b622120c556e646561642023343430361a2000000000000000000500a536e203953414ff92e0a2fdb9b9c0d987fac394242920e8072a2e516d5a39447237447051516b79336e51484a6a4e646b6a393570574c547542384273596a6f4e4c71326262587764324c68747470733a2f2f697066732e696f2f697066732f516d5a39447237447051516b79336e51484a6a4e646b6a393570574c547542384273596a6f4e4c713262625877642f313939302e706e67324d68747470733a2f2f697066732e696f2f697066732f516d5a39447237447051516b79336e51484a6a4e646b6a393570574c547542384273596a6f4e4c713262625877642f313939302e6a736f6e325368747470733a2f2f697066732e696f2f697066732f516d5a39447237447051516b79336e51484a6a4e646b6a393570574c547542384273596a6f4e4c713262625877642f636f6c6c656374696f6e2e6a736f6e3a62746167733a556e646561642c54726561737572652048756e742c456c726f6e643b6d657461646174613a516d5a39447237447051516b79336e51484a6a4e646b6a393570574c547542384273596a6f4e4c713262625877642f313939302e6a736f6e") - res := parser.Parse(dataField, sender, receiver) - require.Equal(t, &ResponseParseData{ - Operation: "ESDTNFTTransfer", - ESDTValues: []string{"1"}, - Tokens: []string{"DEAD-79f8d1-1136"}, - Receivers: []string{"erd1kszzq4egxj5m3t22vt2s8vplmxmqrstghecmnk3tq9mn5fdy7pqqgvzkug"}, - ReceiversShardID: []uint32{0}, - }, res) - }) - - t.Run("NFTTransferWithSCCallOk", func(t *testing.T) { - t.Parallel() - - dataField := []byte(`ESDTNFTTransfer@4c4b4641524d2d396431656138@1e47f1@018c88873c27e96447@000000000000000005001e2a1428dd1e3a5146b3960d9e0f4a50369904ee5483@636c61696d5265776172647350726f7879@0000000000000000050026751893d6789be9e5a99863ba9eeaa8088dd25f5483`) - res := parser.Parse(dataField, sender, sender) - require.Equal(t, &ResponseParseData{ - Operation: "ESDTNFTTransfer", - Function: "claimRewardsProxy", - ESDTValues: []string{"28573236528289506375"}, - Tokens: []string{"LKFARM-9d1ea8-1e47f1"}, - Receivers: []string{"erd1qqqqqqqqqqqqqpgqrc4pg2xarca9z34njcxeur622qmfjp8w2jps89fxnl"}, - ReceiversShardID: []uint32{0}, - }, res) - }) - - t.Run("NFTTransferInvalidTx", func(t *testing.T) { - t.Parallel() - - dataField := []byte("ESDTNFTTransfer@53434f56452d3561363336652d3031@0de0b6b3a7640000@0de0b6b3a7640000@01@055de6a779bbac0000@14c36e6f35b4ea4c6818580000@53434f56452d3561363336652d3031") - res := parser.Parse(dataField, sender, receiverSC) - require.Equal(t, &ResponseParseData{ - Operation: "ESDTNFTTransfer", - ESDTValues: []string{"1000000000000000000"}, - Tokens: []string{"SCOVE-5a636e-01-0de0b6b3a7640000"}, - Receivers: []string{"erd1qqqqqqqqqqqqqpgqp699jngundfqw07d8jzkepucvpzush6k3wvqyc44rx"}, - ReceiversShardID: []uint32{0}, - }, res) - }) -} diff --git a/process/transactions/datafield/parser.go b/process/transactions/datafield/parser.go deleted file mode 100644 index 032c9f96..00000000 --- a/process/transactions/datafield/parser.go +++ /dev/null @@ -1,240 +0,0 @@ -package datafield - -import ( - "encoding/json" - "math/big" - - indexer "github.com/ElrondNetwork/elastic-indexer-go" - "github.com/ElrondNetwork/elastic-indexer-go/converters" - "github.com/ElrondNetwork/elrond-go-core/core" - "github.com/ElrondNetwork/elrond-go-core/core/check" - "github.com/ElrondNetwork/elrond-go-core/data/transaction" - vmcommon "github.com/ElrondNetwork/elrond-vm-common" - "github.com/ElrondNetwork/elrond-vm-common/parsers" -) - -const ( - operationTransfer = `transfer` - operationDeploy = `scDeploy` - minArgumentsQuantityOperationESDT = 2 - minArgumentsQuantityOperationNFT = 3 - numArgsRelayedV2 = 4 -) - -type operationDataFieldParser struct { - builtInFunctionsList []string - - argsParser vmcommon.CallArgsParser - pubKeyConverter core.PubkeyConverter - shardCoordinator indexer.ShardCoordinator - esdtTransferParser vmcommon.ESDTTransferParser -} - -// NewOperationDataFieldParser will return a new instance of operationDataFieldParser -func NewOperationDataFieldParser(args *ArgsOperationDataFieldParser) (*operationDataFieldParser, error) { - if check.IfNil(args.ShardCoordinator) { - return nil, indexer.ErrNilShardCoordinator - } - if check.IfNil(args.PubKeyConverter) { - return nil, indexer.ErrNilPubkeyConverter - } - if check.IfNil(args.Marshalizer) { - return nil, indexer.ErrNilMarshalizer - } - - argsParser := parsers.NewCallArgsParser() - esdtTransferParser, err := parsers.NewESDTTransferParser(args.Marshalizer) - if err != nil { - return nil, err - } - - return &operationDataFieldParser{ - argsParser: argsParser, - pubKeyConverter: args.PubKeyConverter, - shardCoordinator: args.ShardCoordinator, - esdtTransferParser: esdtTransferParser, - builtInFunctionsList: getAllBuiltInFunctions(), - }, nil -} - -// Parse will parse the provided data field -func (odp *operationDataFieldParser) Parse(dataField []byte, sender, receiver []byte) *ResponseParseData { - return odp.parse(dataField, sender, receiver, false) -} - -func (odp *operationDataFieldParser) parse(dataField []byte, sender, receiver []byte, ignoreRelayed bool) *ResponseParseData { - responseParse := &ResponseParseData{ - Operation: operationTransfer, - } - - isSCDeploy := len(dataField) > 0 && isEmptyAddr(odp.pubKeyConverter, receiver) - if isSCDeploy { - responseParse.Operation = operationDeploy - return responseParse - } - - function, args, err := odp.argsParser.ParseData(string(dataField)) - if err != nil { - return responseParse - } - - switch function { - case core.BuiltInFunctionESDTTransfer: - return odp.parseESDTTransfer(args, sender, receiver) - case core.BuiltInFunctionESDTNFTTransfer: - return odp.parseESDTNFTTransfer(args, sender, receiver) - case core.BuiltInFunctionMultiESDTNFTTransfer: - return odp.parseMultiESDTNFTTransfer(args, sender, receiver) - case core.BuiltInFunctionESDTLocalBurn, core.BuiltInFunctionESDTLocalMint: - return parseQuantityOperationESDT(args, function) - case core.BuiltInFunctionESDTWipe, core.BuiltInFunctionESDTFreeze, core.BuiltInFunctionESDTUnFreeze: - return parseBlockingOperationESDT(args, function) - case core.BuiltInFunctionESDTNFTCreate, core.BuiltInFunctionESDTNFTBurn, core.BuiltInFunctionESDTNFTAddQuantity: - return parseQuantityOperationNFT(args, function) - case core.RelayedTransaction, core.RelayedTransactionV2: - if ignoreRelayed { - return &ResponseParseData{ - IsRelayed: true, - } - } - return odp.parseRelayed(function, args, receiver) - } - - isBuiltInFunc := isBuiltInFunction(odp.builtInFunctionsList, function) - if isBuiltInFunc { - responseParse.Operation = function - } - - if function != "" && core.IsSmartContractAddress(receiver) && isASCIIString(function) && !isBuiltInFunc { - responseParse.Function = function - } - - return responseParse -} - -func (odp *operationDataFieldParser) parseRelayed(function string, args [][]byte, receiver []byte) *ResponseParseData { - if len(args) == 0 { - return &ResponseParseData{ - IsRelayed: true, - } - } - - tx, ok := extractInnerTx(function, args, receiver) - if !ok { - return &ResponseParseData{ - IsRelayed: true, - } - } - - res := odp.parse(tx.Data, tx.SndAddr, tx.RcvAddr, true) - if res.IsRelayed { - return &ResponseParseData{ - IsRelayed: true, - } - } - - receivers := []string{odp.pubKeyConverter.Encode(tx.RcvAddr)} - receiversShardID := []uint32{odp.shardCoordinator.ComputeId(tx.RcvAddr)} - if res.Operation == core.BuiltInFunctionMultiESDTNFTTransfer || res.Operation == core.BuiltInFunctionESDTNFTTransfer { - receivers = res.Receivers - receiversShardID = res.ReceiversShardID - } - - return &ResponseParseData{ - Operation: res.Operation, - Function: res.Function, - ESDTValues: res.ESDTValues, - Tokens: res.Tokens, - Receivers: receivers, - ReceiversShardID: receiversShardID, - IsRelayed: true, - } -} - -func extractInnerTx(function string, args [][]byte, receiver []byte) (*transaction.Transaction, bool) { - tx := &transaction.Transaction{} - - if function == core.RelayedTransaction { - err := json.Unmarshal(args[0], &tx) - - return tx, err == nil - } - - if len(args) != numArgsRelayedV2 { - return nil, false - } - - // sender of the inner tx is the receiver of the relayed tx - tx.SndAddr = receiver - tx.RcvAddr = args[0] - tx.Data = args[2] - - return tx, true -} - -func parseBlockingOperationESDT(args [][]byte, funcName string) *ResponseParseData { - responseData := &ResponseParseData{ - Operation: funcName, - } - - if len(args) == 0 { - return responseData - } - - token, nonce := extractTokenIdentifierAndNonce(args[0]) - if !isASCIIString(string(token)) { - return responseData - } - - tokenStr := string(token) - if nonce != 0 { - tokenStr = converters.ComputeTokenIdentifier(tokenStr, nonce) - } - - responseData.Tokens = append(responseData.Tokens, tokenStr) - return responseData -} - -func parseQuantityOperationESDT(args [][]byte, funcName string) *ResponseParseData { - responseData := &ResponseParseData{ - Operation: funcName, - } - - if len(args) < minArgumentsQuantityOperationESDT { - return responseData - } - - token := string(args[0]) - if !isASCIIString(token) { - return responseData - } - - responseData.Tokens = append(responseData.Tokens, token) - responseData.ESDTValues = append(responseData.ESDTValues, big.NewInt(0).SetBytes(args[1]).String()) - - return responseData -} - -func parseQuantityOperationNFT(args [][]byte, funcName string) *ResponseParseData { - responseData := &ResponseParseData{ - Operation: funcName, - } - - if len(args) < minArgumentsQuantityOperationNFT { - return responseData - } - - token := string(args[0]) - if !isASCIIString(token) { - return responseData - } - - nonce := big.NewInt(0).SetBytes(args[1]).Uint64() - tokenIdentifier := converters.ComputeTokenIdentifier(token, nonce) - responseData.Tokens = append(responseData.Tokens, tokenIdentifier) - - value := big.NewInt(0).SetBytes(args[2]).String() - responseData.ESDTValues = append(responseData.ESDTValues, value) - - return responseData -} diff --git a/process/transactions/datafield/parser_test.go b/process/transactions/datafield/parser_test.go deleted file mode 100644 index 1e83bfd9..00000000 --- a/process/transactions/datafield/parser_test.go +++ /dev/null @@ -1,361 +0,0 @@ -package datafield - -import ( - "encoding/hex" - "github.com/ElrondNetwork/elrond-go-core/core" - "testing" - - indexer "github.com/ElrondNetwork/elastic-indexer-go" - "github.com/ElrondNetwork/elastic-indexer-go/mock" - "github.com/stretchr/testify/require" -) - -func createMockArgumentsOperationParser() *ArgsOperationDataFieldParser { - return &ArgsOperationDataFieldParser{ - PubKeyConverter: &mock.PubkeyConverterMock{}, - Marshalizer: &mock.MarshalizerMock{}, - ShardCoordinator: &mock.ShardCoordinatorMock{}, - } -} - -func TestNewOperationDataFieldParser(t *testing.T) { - t.Parallel() - - t.Run("NilMarshalizer", func(t *testing.T) { - t.Parallel() - - arguments := createMockArgumentsOperationParser() - arguments.Marshalizer = nil - - _, err := NewOperationDataFieldParser(arguments) - require.Equal(t, indexer.ErrNilMarshalizer, err) - }) - - t.Run("NilPubKeyConverter", func(t *testing.T) { - t.Parallel() - - arguments := createMockArgumentsOperationParser() - arguments.PubKeyConverter = nil - - _, err := NewOperationDataFieldParser(arguments) - require.Equal(t, indexer.ErrNilPubkeyConverter, err) - }) - - t.Run("NilShardCoordinator", func(t *testing.T) { - t.Parallel() - - arguments := createMockArgumentsOperationParser() - arguments.ShardCoordinator = nil - - _, err := NewOperationDataFieldParser(arguments) - require.Equal(t, indexer.ErrNilShardCoordinator, err) - }) - - t.Run("ShouldWork", func(t *testing.T) { - t.Parallel() - - arguments := createMockArgumentsOperationParser() - - parser, err := NewOperationDataFieldParser(arguments) - require.NotNil(t, parser) - require.Nil(t, err) - }) -} - -func TestParseSCDeploy(t *testing.T) { - arguments := createMockArgumentsOperationParser() - parser, _ := NewOperationDataFieldParser(arguments) - - t.Run("ScDeploy", func(t *testing.T) { - t.Parallel() - - dataField := []byte("0101020304050607") - rcvAddr := make([]byte, 0) - - res := parser.Parse(dataField, sender, rcvAddr) - require.Equal(t, &ResponseParseData{ - Operation: operationDeploy, - }, res) - }) -} - -func TestParseQuantityOperationsESDT(t *testing.T) { - t.Parallel() - - arguments := createMockArgumentsOperationParser() - parser, _ := NewOperationDataFieldParser(arguments) - - t.Run("ESDTLocalBurn", func(t *testing.T) { - t.Parallel() - - dataField := []byte("ESDTLocalBurn@4d4949552d616263646566@0102") - res := parser.Parse(dataField, sender, sender) - require.Equal(t, &ResponseParseData{ - Operation: "ESDTLocalBurn", - ESDTValues: []string{"258"}, - Tokens: []string{"MIIU-abcdef"}, - }, res) - }) - - t.Run("ESDTLocalMint", func(t *testing.T) { - t.Parallel() - - dataField := []byte("ESDTLocalMint@4d4949552d616263646566@1122") - res := parser.Parse(dataField, sender, sender) - require.Equal(t, &ResponseParseData{ - Operation: "ESDTLocalMint", - ESDTValues: []string{"4386"}, - Tokens: []string{"MIIU-abcdef"}, - }, res) - }) - - t.Run("ESDTLocalMintNotEnoughArguments", func(t *testing.T) { - t.Parallel() - - dataField := []byte("ESDTLocalMint@4d4949552d616263646566") - res := parser.Parse(dataField, sender, sender) - require.Equal(t, &ResponseParseData{ - Operation: "ESDTLocalMint", - }, res) - }) -} - -func TestParseQuantityOperationsNFT(t *testing.T) { - t.Parallel() - - arguments := createMockArgumentsOperationParser() - parser, _ := NewOperationDataFieldParser(arguments) - - t.Run("ESDTNFTCreate", func(t *testing.T) { - t.Parallel() - - dataField := []byte("ESDTNFTCreate@4d494841492d316630666638@01@54657374@03e8@516d664132487465726e674d6242655467506b3261327a6f4d357965616f33456f61373678513775346d63646947@746167733a746573742c667265652c66756e3b6d657461646174613a5468697320697320612074657374206465736372697074696f6e20666f7220616e20617765736f6d65206e6674@0101") - res := parser.Parse(dataField, sender, sender) - require.Equal(t, &ResponseParseData{ - Operation: "ESDTNFTCreate", - ESDTValues: []string{"1415934836"}, - Tokens: []string{"MIHAI-1f0ff8-01"}, - }, res) - }) - - t.Run("ESDTNFTBurn", func(t *testing.T) { - t.Parallel() - - dataField := []byte("ESDTNFTBurn@54494b4954414b41@0102@123456") - res := parser.Parse(dataField, sender, sender) - require.Equal(t, &ResponseParseData{ - Operation: "ESDTNFTBurn", - ESDTValues: []string{"1193046"}, - Tokens: []string{"TIKITAKA-0102"}, - }, res) - }) - - t.Run("ESDTNFTAddQuantity", func(t *testing.T) { - t.Parallel() - - dataField := []byte("ESDTNFTAddQuantity@54494b4954414b41@02@03") - res := parser.Parse(dataField, sender, sender) - require.Equal(t, &ResponseParseData{ - Operation: "ESDTNFTAddQuantity", - ESDTValues: []string{"3"}, - Tokens: []string{"TIKITAKA-02"}, - }, res) - }) - - t.Run("ESDTNFTAddQuantityNotEnoughtArguments", func(t *testing.T) { - t.Parallel() - - dataField := []byte("ESDTNFTAddQuantity@54494b4954414b41@02") - res := parser.Parse(dataField, sender, sender) - require.Equal(t, &ResponseParseData{ - Operation: "ESDTNFTAddQuantity", - }, res) - }) -} - -func TestParseBlockingOperationESDT(t *testing.T) { - t.Parallel() - - arguments := createMockArgumentsOperationParser() - parser, _ := NewOperationDataFieldParser(arguments) - - t.Run("ESDTFreeze", func(t *testing.T) { - t.Parallel() - - dataField := []byte("ESDTFreeze@54494b4954414b41") - res := parser.Parse(dataField, sender, receiver) - require.Equal(t, &ResponseParseData{ - Operation: "ESDTFreeze", - Tokens: []string{"TIKITAKA"}, - }, res) - }) - - t.Run("ESDTFreezeNFT", func(t *testing.T) { - t.Parallel() - - dataField := []byte("ESDTFreeze@544f4b454e2d616263642d3031") - res := parser.Parse(dataField, sender, receiver) - require.Equal(t, &ResponseParseData{ - Operation: "ESDTFreeze", - Tokens: []string{"TOKEN-abcd-01"}, - }, res) - }) - - t.Run("ESDTWipe", func(t *testing.T) { - t.Parallel() - - dataField := []byte("ESDTWipe@534b4537592d37336262636404") - res := parser.Parse(dataField, sender, receiver) - require.Equal(t, &ResponseParseData{ - Operation: "ESDTWipe", - Tokens: []string{"SKE7Y-73bbcd-04"}, - }, res) - }) - - t.Run("ESDTWipe", func(t *testing.T) { - t.Parallel() - - dataField := []byte("ESDTFreeze") - res := parser.Parse(dataField, sender, receiver) - require.Equal(t, &ResponseParseData{ - Operation: "ESDTFreeze", - }, res) - }) - - t.Run("SCCall", func(t *testing.T) { - t.Parallel() - - dataField := []byte("callMe@01") - res := parser.Parse(dataField, sender, receiverSC) - require.Equal(t, &ResponseParseData{ - Operation: operationTransfer, - Function: "callMe", - }, res) - }) - - t.Run("ESDTTransferRole", func(t *testing.T) { - t.Parallel() - - dataField := []byte("ESDTNFTCreateRoleTransfer@01010101@020202") - res := parser.Parse(dataField, sender, receiver) - require.Equal(t, &ResponseParseData{ - Operation: "ESDTNFTCreateRoleTransfer", - }, res) - }) - -} - -func TestOperationDataFieldParser_ParseRelayed(t *testing.T) { - t.Parallel() - - args := &ArgsOperationDataFieldParser{ - PubKeyConverter: pubKeyConv, - Marshalizer: &mock.MarshalizerMock{}, - ShardCoordinator: &mock.ShardCoordinatorMock{}, - } - - parser, _ := NewOperationDataFieldParser(args) - - t.Run("RelayedTxOk", func(t *testing.T) { - t.Parallel() - - dataField := []byte("relayedTx@7b226e6f6e6365223a362c2276616c7565223a302c227265636569766572223a2241414141414141414141414641436e626331733351534939726e6d697a69684d7a3631665539446a71786b3d222c2273656e646572223a2248714b386459464a43474144346a756d4e4e742b314530745a6579736376714c7a38624c47574e774177453d222c226761735072696365223a313030303030303030302c226761734c696d6974223a31353030303030302c2264617461223a2252564e45564652795957357a5a6d56795144517a4e446330597a51304d6d517a4f544d794d7a677a4e444d354d7a4a414d444e6c4f4541324d6a63314e7a6b304d7a59344e6a55334d7a6330514745774d4441774d444177222c22636861696e4944223a2252413d3d222c2276657273696f6e223a312c227369676e6174757265223a2262367331755349396f6d4b63514448344337624f534a632f62343166577a3961584d777334526966552b71343870486d315430636f72744b727443484a4258724f67536b3651333254546f7a6e4e2b7074324f4644413d3d227d") - - res := parser.Parse(dataField, sender, receiver) - require.Equal(t, &ResponseParseData{ - IsRelayed: true, - Operation: "ESDTTransfer", - Function: "buyChest", - Tokens: []string{"CGLD-928492"}, - ESDTValues: []string{"1000"}, - Receivers: []string{"erd1qqqqqqqqqqqqqpgq98dhxkehgy3rmtne5t8zsnx04404858r4vvsamdlsv"}, - ReceiversShardID: []uint32{0}, - }, res) - }) - - t.Run("RelayedTxV2Ok", func(t *testing.T) { - t.Parallel() - - dataField := []byte(core.RelayedTransactionV2 + - "@" + - hex.EncodeToString(receiverSC) + - "@" + - "0A" + - "@" + - hex.EncodeToString([]byte("callMe@02")) + - "@" + - "01a2") - - res := parser.Parse(dataField, sender, receiver) - require.Equal(t, &ResponseParseData{ - IsRelayed: true, - Operation: "transfer", - Function: "callMe", - Receivers: []string{"erd1qqqqqqqqqqqqqpgqp699jngundfqw07d8jzkepucvpzush6k3wvqyc44rx"}, - ReceiversShardID: []uint32{0}, - }, res) - }) - - t.Run("RelayedTxV2NotEnoughtArgs", func(t *testing.T) { - t.Parallel() - - dataField := []byte(core.RelayedTransactionV2 + "@abcd") - res := parser.Parse(dataField, sender, receiver) - require.Equal(t, &ResponseParseData{ - IsRelayed: true, - }, res) - }) - - t.Run("RelayedTxV1NoArguments", func(t *testing.T) { - t.Parallel() - - dataField := []byte(core.RelayedTransaction) - res := parser.Parse(dataField, sender, receiver) - require.Equal(t, &ResponseParseData{ - IsRelayed: true, - }, res) - }) - - t.Run("RelayedTxV2WithRelayedTxIn", func(t *testing.T) { - t.Parallel() - - dataField := []byte(core.RelayedTransactionV2 + - "@" + - hex.EncodeToString(receiverSC) + - "@" + - "0A" + - "@" + - hex.EncodeToString([]byte(core.RelayedTransaction)) + - "@" + - "01a2") - res := parser.Parse(dataField, sender, receiver) - require.Equal(t, &ResponseParseData{ - IsRelayed: true, - }, res) - }) - - t.Run("RelayedTxV2WithNFTTransfer", func(t *testing.T) { - t.Parallel() - - nftTransferData := []byte("ESDTNFTTransfer@4c4b4641524d2d396431656138@34ae14@728faa2c8883760aaf53bb@000000000000000005001e2a1428dd1e3a5146b3960d9e0f4a50369904ee5483@636c61696d5265776172647350726f7879@00000000000000000500a655b2b534218d6d8cfa1f219960be2f462e92565483") - dataField := []byte(core.RelayedTransactionV2 + - "@" + - hex.EncodeToString(receiver) + - "@" + - "0A" + - "@" + - hex.EncodeToString(nftTransferData) + - "@" + - "01a2") - res := parser.Parse(dataField, sender, receiver) - require.Equal(t, &ResponseParseData{ - IsRelayed: true, - Operation: "ESDTNFTTransfer", - ESDTValues: []string{"138495980998569893315957691"}, - Tokens: []string{"LKFARM-9d1ea8-34ae14"}, - Receivers: []string{"erd1qqqqqqqqqqqqqpgqrc4pg2xarca9z34njcxeur622qmfjp8w2jps89fxnl"}, - ReceiversShardID: []uint32{0}, - Function: "claimRewardsProxy", - }, res) - }) -} diff --git a/process/transactions/datafield/utils.go b/process/transactions/datafield/utils.go deleted file mode 100644 index 0d2ad926..00000000 --- a/process/transactions/datafield/utils.go +++ /dev/null @@ -1,96 +0,0 @@ -package datafield - -import ( - "bytes" - "fmt" - "math/big" - "unicode" - - "github.com/ElrondNetwork/elrond-go-core/core" -) - -const ( - esdtIdentifierSeparator = "-" - esdtRandomSequenceLength = 6 -) - -func getAllBuiltInFunctions() []string { - return []string{ - core.BuiltInFunctionClaimDeveloperRewards, - core.BuiltInFunctionChangeOwnerAddress, - core.BuiltInFunctionSetUserName, - core.BuiltInFunctionSaveKeyValue, - core.BuiltInFunctionESDTTransfer, - core.BuiltInFunctionESDTBurn, - core.BuiltInFunctionESDTFreeze, - core.BuiltInFunctionESDTUnFreeze, - core.BuiltInFunctionESDTWipe, - core.BuiltInFunctionESDTPause, - core.BuiltInFunctionESDTUnPause, - core.BuiltInFunctionSetESDTRole, - core.BuiltInFunctionUnSetESDTRole, - core.BuiltInFunctionESDTSetLimitedTransfer, - core.BuiltInFunctionESDTUnSetLimitedTransfer, - core.BuiltInFunctionESDTLocalMint, - core.BuiltInFunctionESDTLocalBurn, - core.BuiltInFunctionESDTNFTTransfer, - core.BuiltInFunctionESDTNFTCreate, - core.BuiltInFunctionESDTNFTAddQuantity, - core.BuiltInFunctionESDTNFTCreateRoleTransfer, - core.BuiltInFunctionESDTNFTBurn, - core.BuiltInFunctionESDTNFTAddURI, - core.BuiltInFunctionESDTNFTUpdateAttributes, - core.BuiltInFunctionMultiESDTNFTTransfer, - core.ESDTRoleLocalMint, - core.ESDTRoleLocalBurn, - core.ESDTRoleNFTCreate, - core.ESDTRoleNFTCreateMultiShard, - core.ESDTRoleNFTAddQuantity, - core.ESDTRoleNFTBurn, - core.ESDTRoleNFTAddURI, - core.ESDTRoleNFTUpdateAttributes, - core.ESDTRoleTransfer, - } -} - -func isBuiltInFunction(builtInFunctionsList []string, function string) bool { - for _, builtInFunction := range builtInFunctionsList { - if builtInFunction == function { - return true - } - } - - return false -} - -func extractTokenIdentifierAndNonce(arg []byte) ([]byte, uint64) { - argsSplit := bytes.Split(arg, []byte(esdtIdentifierSeparator)) - if len(argsSplit) < 2 { - return arg, 0 - } - - if len(argsSplit[1]) <= esdtRandomSequenceLength { - return arg, 0 - } - - identifier := []byte(fmt.Sprintf("%s-%s", argsSplit[0], argsSplit[1][:esdtRandomSequenceLength])) - nonce := big.NewInt(0).SetBytes(argsSplit[1][esdtRandomSequenceLength:]) - - return identifier, nonce.Uint64() -} - -func isEmptyAddr(pubKeyConverter core.PubkeyConverter, receiver []byte) bool { - emptyAddr := make([]byte, pubKeyConverter.Len()) - - return bytes.Equal(receiver, emptyAddr) -} - -func isASCIIString(input string) bool { - for i := 0; i < len(input); i++ { - if input[i] > unicode.MaxASCII { - return false - } - } - - return true -} diff --git a/process/transactions/datafield/utils_test.go b/process/transactions/datafield/utils_test.go deleted file mode 100644 index 1e8a40cd..00000000 --- a/process/transactions/datafield/utils_test.go +++ /dev/null @@ -1,16 +0,0 @@ -package datafield - -import ( - "testing" - - "github.com/stretchr/testify/require" -) - -func TestIsASCIIString(t *testing.T) { - t.Parallel() - - require.True(t, isASCIIString("hello")) - require.True(t, isASCIIString("TOKEN-abcd")) - require.False(t, isASCIIString(string([]byte{12, 255}))) - require.False(t, isASCIIString(string([]byte{12, 188}))) -} diff --git a/process/transactions/interface.go b/process/transactions/interface.go index 15a362b0..85d13296 100644 --- a/process/transactions/interface.go +++ b/process/transactions/interface.go @@ -1,6 +1,6 @@ package transactions -import "github.com/ElrondNetwork/elastic-indexer-go/process/transactions/datafield" +import datafield "github.com/ElrondNetwork/elrond-vm-common/parsers/dataField" // DataFieldParser defines what a data field parser should be able to do type DataFieldParser interface { diff --git a/process/transactions/smartContractResultsProcessor.go b/process/transactions/smartContractResultsProcessor.go index 7e049190..0cee7e2d 100644 --- a/process/transactions/smartContractResultsProcessor.go +++ b/process/transactions/smartContractResultsProcessor.go @@ -14,6 +14,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data/smartContractResult" "github.com/ElrondNetwork/elrond-go-core/hashing" "github.com/ElrondNetwork/elrond-go-core/marshal" + datafield "github.com/ElrondNetwork/elrond-vm-common/parsers/dataField" ) type smartContractResultsProcessor struct { @@ -169,7 +170,7 @@ func (proc *smartContractResultsProcessor) prepareSmartContractResult( Function: res.Function, ESDTValues: res.ESDTValues, Tokens: res.Tokens, - Receivers: res.Receivers, + Receivers: datafield.EncodeBytesSlice(proc.pubKeyConverter.Encode, res.Receivers), ReceiversShardIDs: res.ReceiversShardID, IsRelayed: res.IsRelayed, OriginalSender: originalSenderAddr, diff --git a/process/transactions/smartContractResultsProcessor_test.go b/process/transactions/smartContractResultsProcessor_test.go index 9f645e11..998392a0 100644 --- a/process/transactions/smartContractResultsProcessor_test.go +++ b/process/transactions/smartContractResultsProcessor_test.go @@ -7,15 +7,15 @@ import ( "github.com/ElrondNetwork/elastic-indexer-go/data" "github.com/ElrondNetwork/elastic-indexer-go/mock" - "github.com/ElrondNetwork/elastic-indexer-go/process/transactions/datafield" "github.com/ElrondNetwork/elrond-go-core/data/block" "github.com/ElrondNetwork/elrond-go-core/data/smartContractResult" + datafield "github.com/ElrondNetwork/elrond-vm-common/parsers/dataField" "github.com/stretchr/testify/require" ) func createDataFieldParserMock() DataFieldParser { args := &datafield.ArgsOperationDataFieldParser{ - PubKeyConverter: &mock.PubkeyConverterMock{}, + AddressLength: 32, Marshalizer: &mock.MarshalizerMock{}, ShardCoordinator: &mock.ShardCoordinatorMock{}, } @@ -65,6 +65,7 @@ func TestPrepareSmartContractResult(t *testing.T) { ReceiverShard: 1, Operation: "transfer", SenderAddressBytes: sndAddr, + Receivers: []string{}, } require.Equal(t, expectedTx, scRes) diff --git a/process/transactions/transactionDBBuilder.go b/process/transactions/transactionDBBuilder.go index 196f7b3b..848d33c4 100644 --- a/process/transactions/transactionDBBuilder.go +++ b/process/transactions/transactionDBBuilder.go @@ -5,7 +5,7 @@ import ( "fmt" "time" - "github.com/ElrondNetwork/elastic-indexer-go" + indexer "github.com/ElrondNetwork/elastic-indexer-go" "github.com/ElrondNetwork/elastic-indexer-go/data" "github.com/ElrondNetwork/elrond-go-core/core" coreData "github.com/ElrondNetwork/elrond-go-core/data" @@ -13,6 +13,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/data/receipt" "github.com/ElrondNetwork/elrond-go-core/data/rewardTx" "github.com/ElrondNetwork/elrond-go-core/data/transaction" + datafield "github.com/ElrondNetwork/elrond-vm-common/parsers/dataField" ) const emptyString = "" @@ -78,7 +79,7 @@ func (dtb *dbTransactionBuilder) prepareTransaction( Function: res.Function, ESDTValues: res.ESDTValues, Tokens: res.Tokens, - Receivers: res.Receivers, + Receivers: datafield.EncodeBytesSlice(dtb.addressPubkeyConverter.Encode, res.Receivers), ReceiversShardIDs: res.ReceiversShardID, IsRelayed: res.IsRelayed, Version: tx.Version, diff --git a/process/transactions/transactionDBBuilder_test.go b/process/transactions/transactionDBBuilder_test.go index 8744ae94..5160c66a 100644 --- a/process/transactions/transactionDBBuilder_test.go +++ b/process/transactions/transactionDBBuilder_test.go @@ -83,6 +83,7 @@ func TestGetMoveBalanceTransaction(t *testing.T) { SenderUserName: []byte("snd"), Operation: "transfer", Version: 1, + Receivers: []string{}, } dbTx := cp.prepareTransaction(tx, txHash, mbHash, mb, header, status) diff --git a/process/transactions/transactionsGrouper_test.go b/process/transactions/transactionsGrouper_test.go index 64f38199..9fce1b45 100644 --- a/process/transactions/transactionsGrouper_test.go +++ b/process/transactions/transactionsGrouper_test.go @@ -77,7 +77,7 @@ func TestGroupInvalidTxs(t *testing.T) { t.Parallel() parser := createDataFieldParserMock() - txBuilder := newTransactionDBBuilder(&mock.PubkeyConverterMock{}, &mock.ShardCoordinatorMock{}, &mock.EconomicsHandlerStub{}, parser) + txBuilder := newTransactionDBBuilder(mock.NewPubkeyConverterMock(32), &mock.ShardCoordinatorMock{}, &mock.EconomicsHandlerStub{}, parser) grouper := newTxsGrouper(txBuilder, false, 0, &mock.HasherMock{}, &mock.MarshalizerMock{}) txHash1 := []byte("txHash1") diff --git a/process/transactions/transactionsProcessor.go b/process/transactions/transactionsProcessor.go index 01c959a5..26c908a9 100644 --- a/process/transactions/transactionsProcessor.go +++ b/process/transactions/transactionsProcessor.go @@ -2,11 +2,10 @@ package transactions import ( "encoding/hex" + "github.com/ElrondNetwork/elrond-go-core/core" indexer "github.com/ElrondNetwork/elastic-indexer-go" "github.com/ElrondNetwork/elastic-indexer-go/data" - "github.com/ElrondNetwork/elastic-indexer-go/process/transactions/datafield" - "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/core/check" coreData "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" @@ -14,6 +13,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/hashing" "github.com/ElrondNetwork/elrond-go-core/marshal" logger "github.com/ElrondNetwork/elrond-go-logger" + "github.com/ElrondNetwork/elrond-vm-common/parsers/dataField" ) var log = logger.GetOrCreate("indexer/process/transactions") @@ -45,7 +45,7 @@ func NewTransactionsProcessor(args *ArgsTransactionProcessor) (*txsDatabaseProce } argsParser := &datafield.ArgsOperationDataFieldParser{ - PubKeyConverter: args.AddressPubkeyConverter, + AddressLength: args.AddressPubkeyConverter.Len(), Marshalizer: args.Marshalizer, ShardCoordinator: args.ShardCoordinator, } diff --git a/process/transactions/transactionsProcessor_test.go b/process/transactions/transactionsProcessor_test.go index a77e4803..a78bfa97 100644 --- a/process/transactions/transactionsProcessor_test.go +++ b/process/transactions/transactionsProcessor_test.go @@ -23,7 +23,7 @@ import ( func createMockArgsTxsDBProc() *ArgsTransactionProcessor { args := &ArgsTransactionProcessor{ - AddressPubkeyConverter: &mock.PubkeyConverterMock{}, + AddressPubkeyConverter: mock.NewPubkeyConverterMock(10), TxFeeCalculator: &mock.EconomicsHandlerStub{}, ShardCoordinator: &mock.ShardCoordinatorMock{}, Hasher: &mock.HasherMock{}, From a3b6cc84dd8c45a60d0dcad8462446046ae9246e Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Mon, 27 Jun 2022 15:03:09 +0300 Subject: [PATCH 31/69] small fix --- process/transactions/transactionsProcessor.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/process/transactions/transactionsProcessor.go b/process/transactions/transactionsProcessor.go index 26c908a9..e3cb786a 100644 --- a/process/transactions/transactionsProcessor.go +++ b/process/transactions/transactionsProcessor.go @@ -2,10 +2,10 @@ package transactions import ( "encoding/hex" - "github.com/ElrondNetwork/elrond-go-core/core" indexer "github.com/ElrondNetwork/elastic-indexer-go" "github.com/ElrondNetwork/elastic-indexer-go/data" + "github.com/ElrondNetwork/elrond-go-core/core" "github.com/ElrondNetwork/elrond-go-core/core/check" coreData "github.com/ElrondNetwork/elrond-go-core/data" "github.com/ElrondNetwork/elrond-go-core/data/block" @@ -13,7 +13,7 @@ import ( "github.com/ElrondNetwork/elrond-go-core/hashing" "github.com/ElrondNetwork/elrond-go-core/marshal" logger "github.com/ElrondNetwork/elrond-go-logger" - "github.com/ElrondNetwork/elrond-vm-common/parsers/dataField" + datafield "github.com/ElrondNetwork/elrond-vm-common/parsers/dataField" ) var log = logger.GetOrCreate("indexer/process/transactions") From 79adec7bab879e7737eb08fecff84e69d67ca158 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Mon, 27 Jun 2022 15:35:46 +0300 Subject: [PATCH 32/69] fixes after review --- process/accounts/serialize.go | 6 +++--- process/collections/serialize.go | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/process/accounts/serialize.go b/process/accounts/serialize.go index e9065061..a7926ae5 100644 --- a/process/accounts/serialize.go +++ b/process/accounts/serialize.go @@ -89,7 +89,7 @@ func prepareDeleteAccountInfo(acct *data.AccountInfo, isESDT bool, index string) meta := []byte(fmt.Sprintf(`{ "update" : {"_index":"%s", "_id" : "%s" } }%s`, index, id, "\n")) codeToExecute := ` - if (ctx.op == 'create') { + if ('create' == ctx.op) { ctx.op = 'noop' } else { if (ctx._source.containsKey('timestamp')) { @@ -130,7 +130,7 @@ func prepareSerializedAccountInfo( meta := []byte(fmt.Sprintf(`{ "update" : {"_index": "%s", "_id" : "%s" } }%s`, index, id, "\n")) codeToExecute := ` - if (ctx.op == 'create') { + if ('create' == ctx.op) { ctx._source = params.account } else { if (ctx._source.containsKey('timestamp')) { @@ -210,7 +210,7 @@ func (ap *accountsProcessor) SerializeTypeForProvidedIDs( meta := []byte(fmt.Sprintf(`{ "update" : {"_index":"%s", "_id" : "%s" } }%s`, index, id, "\n")) codeToExecute := ` - if (ctx.op == 'create') { + if ('create' == ctx.op) { ctx.op = 'noop' } else { ctx._source.type = params.type diff --git a/process/collections/serialize.go b/process/collections/serialize.go index 45cd8e00..fcd86d5c 100644 --- a/process/collections/serialize.go +++ b/process/collections/serialize.go @@ -29,9 +29,9 @@ func ExtractAndSerializeCollectionsData( meta := []byte(fmt.Sprintf(`{ "update" : {"_index":"%s", "_id" : "%s" } }%s`, index, acct.Address, "\n")) codeToExecute := ` - if ((ctx.op == 'create') && (params.value == '0')) { + if (('create' == ctx.op) && ('0' == params.value)) { ctx.op = 'noop'; - } else if (params.value != '0') { + } else if ('0' == params.value) { if (!ctx._source.containsKey(params.col)) { ctx._source[params.col] = new HashMap(); } From e3a7be83998127c70ae641049b51919e346ce223 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Mon, 27 Jun 2022 16:07:13 +0300 Subject: [PATCH 33/69] use converters.JsonEscape --- process/accounts/serialize.go | 16 +++++++------- process/block/serialize.go | 3 ++- process/collections/serialize.go | 14 +++++++++--- process/logsevents/serialize.go | 36 +++++++++++++++++++------------ process/miniblocks/serialize.go | 9 ++++---- process/operations/serialize.go | 5 +++-- process/tags/serialize.go | 2 +- process/transactions/serialize.go | 36 +++++++++++++++++++------------ 8 files changed, 74 insertions(+), 47 deletions(-) diff --git a/process/accounts/serialize.go b/process/accounts/serialize.go index a7926ae5..40a466e5 100644 --- a/process/accounts/serialize.go +++ b/process/accounts/serialize.go @@ -8,10 +8,10 @@ import ( "github.com/ElrondNetwork/elastic-indexer-go/data" ) -// SerializeNFTCreateInfo will serialize the provided nft create information in a way that Elastic Search expects a bulk request +// SerializeNFTCreateInfo will serialize the provided nft create information in a way that Elasticsearch expects a bulk request func (ap *accountsProcessor) SerializeNFTCreateInfo(tokensInfo []*data.TokenInfo, buffSlice *data.BufferSlice, index string) error { for _, tokenData := range tokensInfo { - meta := []byte(fmt.Sprintf(`{ "index" : { "_index":"%s", "_id" : "%s" } }%s`, index, tokenData.Identifier, "\n")) + meta := []byte(fmt.Sprintf(`{ "index" : { "_index":"%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(tokenData.Identifier), "\n")) serializedData, errMarshal := json.Marshal(tokenData) if errMarshal != nil { return errMarshal @@ -86,7 +86,7 @@ func prepareDeleteAccountInfo(acct *data.AccountInfo, isESDT bool, index string) id += fmt.Sprintf("-%s-%s", acct.TokenName, hexEncodedNonce) } - meta := []byte(fmt.Sprintf(`{ "update" : {"_index":"%s", "_id" : "%s" } }%s`, index, id, "\n")) + meta := []byte(fmt.Sprintf(`{ "update" : {"_index":"%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(id), "\n")) codeToExecute := ` if ('create' == ctx.op) { @@ -128,7 +128,7 @@ func prepareSerializedAccountInfo( return nil, nil, err } - meta := []byte(fmt.Sprintf(`{ "update" : {"_index": "%s", "_id" : "%s" } }%s`, index, id, "\n")) + meta := []byte(fmt.Sprintf(`{ "update" : {"_index": "%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(id), "\n")) codeToExecute := ` if ('create' == ctx.op) { ctx._source = params.account @@ -153,7 +153,7 @@ func prepareSerializedAccountInfo( return meta, []byte(serializedDataStr), nil } -// SerializeAccountsHistory will serialize accounts history in a way that Elastic Search expects a bulk request +// SerializeAccountsHistory will serialize accounts history in a way that Elasticsearch expects a bulk request func (ap *accountsProcessor) SerializeAccountsHistory( accounts map[string]*data.AccountBalanceHistory, buffSlice *data.BufferSlice, @@ -189,7 +189,7 @@ func prepareSerializedAccountBalanceHistory( } id += fmt.Sprintf("-%d", account.Timestamp) - meta := []byte(fmt.Sprintf(`{ "index" : { "_index":"%s", "_id" : "%s" } }%s`, index, id, "\n")) + meta := []byte(fmt.Sprintf(`{ "index" : { "_index":"%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(id), "\n")) serializedData, err := json.Marshal(account) if err != nil { @@ -207,7 +207,7 @@ func (ap *accountsProcessor) SerializeTypeForProvidedIDs( index string, ) error { for _, id := range ids { - meta := []byte(fmt.Sprintf(`{ "update" : {"_index":"%s", "_id" : "%s" } }%s`, index, id, "\n")) + meta := []byte(fmt.Sprintf(`{ "update" : {"_index":"%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(id), "\n")) codeToExecute := ` if ('create' == ctx.op) { @@ -221,7 +221,7 @@ func (ap *accountsProcessor) SerializeTypeForProvidedIDs( `"lang": "painless",`+ `"params": {"type": "%s"}},`+ `"upsert": {}}`, - converters.FormatPainlessSource(codeToExecute), tokenType) + converters.FormatPainlessSource(codeToExecute), converters.JsonEscape(tokenType)) err := buffSlice.PutData(meta, []byte(serializedDataStr)) if err != nil { diff --git a/process/block/serialize.go b/process/block/serialize.go index 3f06b836..371bd2e3 100644 --- a/process/block/serialize.go +++ b/process/block/serialize.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/ElrondNetwork/elastic-indexer-go" + "github.com/ElrondNetwork/elastic-indexer-go/converters" "github.com/ElrondNetwork/elastic-indexer-go/data" "github.com/ElrondNetwork/elrond-go-core/core/check" coreData "github.com/ElrondNetwork/elrond-go-core/data" @@ -17,7 +18,7 @@ func (bp *blockProcessor) SerializeBlock(elasticBlock *data.Block, buffSlice *da return indexer.ErrNilElasticBlock } - meta := []byte(fmt.Sprintf(`{ "index" : { "_index":"%s", "_id" : "%s" } }%s`, index, elasticBlock.Hash, "\n")) + meta := []byte(fmt.Sprintf(`{ "index" : { "_index":"%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(elasticBlock.Hash), "\n")) serializedData, errMarshal := json.Marshal(elasticBlock) if errMarshal != nil { return errMarshal diff --git a/process/collections/serialize.go b/process/collections/serialize.go index fcd86d5c..e0922dcb 100644 --- a/process/collections/serialize.go +++ b/process/collections/serialize.go @@ -27,7 +27,7 @@ func ExtractAndSerializeCollectionsData( nonceBig := big.NewInt(0).SetUint64(acct.TokenNonce) hexEncodedNonce := hex.EncodeToString(nonceBig.Bytes()) - meta := []byte(fmt.Sprintf(`{ "update" : {"_index":"%s", "_id" : "%s" } }%s`, index, acct.Address, "\n")) + meta := []byte(fmt.Sprintf(`{ "update" : {"_index":"%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(acct.Address), "\n")) codeToExecute := ` if (('create' == ctx.op) && ('0' == params.value)) { ctx.op = 'noop'; @@ -49,13 +49,21 @@ func ExtractAndSerializeCollectionsData( } ` - collection := fmt.Sprintf(`{"%s":{"%s": "%s"}}`, acct.TokenName, hexEncodedNonce, acct.Balance) + tokenName := converters.JsonEscape(acct.TokenName) + tokenNonceHex := converters.JsonEscape(hexEncodedNonce) + balanceStr := converters.JsonEscape(acct.Balance) + + collection := fmt.Sprintf(`{"%s":{"%s": "%s"}}`, + tokenName, + tokenNonceHex, + balanceStr, + ) serializedDataStr := fmt.Sprintf(`{"scripted_upsert": true, "script": {`+ `"source": "%s",`+ `"lang": "painless",`+ `"params": { "col": "%s", "nonce": "%s", "value": "%s"}},`+ `"upsert": %s}`, - converters.FormatPainlessSource(codeToExecute), acct.TokenName, hexEncodedNonce, acct.Balance, collection) + converters.FormatPainlessSource(codeToExecute), tokenName, tokenNonceHex, balanceStr, collection) err := buffSlice.PutData(meta, []byte(serializedDataStr)) if err != nil { diff --git a/process/logsevents/serialize.go b/process/logsevents/serialize.go index cb8bfc11..f68dbf6a 100644 --- a/process/logsevents/serialize.go +++ b/process/logsevents/serialize.go @@ -14,7 +14,7 @@ import ( // SerializeLogs will serialize the provided logs in a way that Elastic Search expects a bulk request func (logsAndEventsProcessor) SerializeLogs(logs []*data.Logs, buffSlice *data.BufferSlice, index string) error { for _, lg := range logs { - meta := []byte(fmt.Sprintf(`{ "index" : {"_index":"%s", "_id" : "%s" } }%s`, index, lg.ID, "\n")) + meta := []byte(fmt.Sprintf(`{ "index" : {"_index":"%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(lg.ID), "\n")) serializedData, errMarshal := json.Marshal(lg) if errMarshal != nil { return errMarshal @@ -32,7 +32,7 @@ func (logsAndEventsProcessor) SerializeLogs(logs []*data.Logs, buffSlice *data.B // SerializeSCDeploys will serialize the provided smart contract deploys in a way that Elastic Search expects a bulk request func (logsAndEventsProcessor) SerializeSCDeploys(deploys map[string]*data.ScDeployInfo, buffSlice *data.BufferSlice, index string) error { for scAddr, deployInfo := range deploys { - meta := []byte(fmt.Sprintf(`{ "update" : { "_index":"%s", "_id" : "%s" } }%s`, index, scAddr, "\n")) + meta := []byte(fmt.Sprintf(`{ "update" : { "_index":"%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(scAddr), "\n")) serializedData, err := serializeDeploy(deployInfo) if err != nil { @@ -82,7 +82,7 @@ func serializeDeploy(deployInfo *data.ScDeployInfo) ([]byte, error) { return []byte(serializedDataStr), nil } -// SerializeTokens will serialize the provided tokens data in a way that Elastic Search expects a bulk request +// SerializeTokens will serialize the provided tokens' data in a way that Elasticsearch expects a bulk request func (logsAndEventsProcessor) SerializeTokens(tokens []*data.TokenInfo, updateNFTData []*data.NFTDataUpdate, buffSlice *data.BufferSlice, index string) error { for _, tokenData := range tokens { meta, serializedData, err := serializeToken(tokenData, index) @@ -104,7 +104,7 @@ func serializeToken(tokenData *data.TokenInfo, index string) ([]byte, []byte, er return serializeTokenTransferOwnership(tokenData, index) } - meta := []byte(fmt.Sprintf(`{ "update" : { "_index":"%s", "_id" : "%s" } }%s`, index, tokenData.Token, "\n")) + meta := []byte(fmt.Sprintf(`{ "update" : { "_index":"%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(tokenData.Token), "\n")) serializedTokenData, err := json.Marshal(tokenData) if err != nil { return nil, nil, err @@ -128,7 +128,7 @@ func serializeToken(tokenData *data.TokenInfo, index string) ([]byte, []byte, er } func serializeTokenTransferOwnership(tokenData *data.TokenInfo, index string) ([]byte, []byte, error) { - meta := []byte(fmt.Sprintf(`{ "update" : { "_index":"%s", "_id" : "%s" } }%s`, index, tokenData.Token, "\n")) + meta := []byte(fmt.Sprintf(`{ "update" : { "_index":"%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(tokenData.Token), "\n")) tokenDataSerialized, err := json.Marshal(tokenData) if err != nil { return nil, nil, err @@ -157,12 +157,12 @@ func serializeTokenTransferOwnership(tokenData *data.TokenInfo, index string) ([ `"lang": "painless",`+ `"params": {"elem": %s, "owner": "%s"}},`+ `"upsert": %s}`, - converters.FormatPainlessSource(codeToExecute), string(ownerDataSerialized), tokenData.CurrentOwner, string(tokenDataSerialized)) + converters.FormatPainlessSource(codeToExecute), string(ownerDataSerialized), converters.JsonEscape(tokenData.CurrentOwner), string(tokenDataSerialized)) return meta, []byte(serializedDataStr), nil } -// SerializeDelegators will serialize the provided delegators in a way that Elastic Search expects a bulk request +// SerializeDelegators will serialize the provided delegators in a way that Elasticsearch expects a bulk request func (lep *logsAndEventsProcessor) SerializeDelegators(delegators map[string]*data.Delegator, buffSlice *data.BufferSlice, index string) error { for _, delegator := range delegators { meta, serializedData, err := lep.prepareSerializedDelegator(delegator, index) @@ -182,11 +182,11 @@ func (lep *logsAndEventsProcessor) SerializeDelegators(delegators map[string]*da func (lep *logsAndEventsProcessor) prepareSerializedDelegator(delegator *data.Delegator, index string) ([]byte, []byte, error) { id := lep.computeDelegatorID(delegator) if delegator.ShouldDelete { - meta := []byte(fmt.Sprintf(`{ "delete" : { "_index": "%s", "_id" : "%s" } }%s`, index, id, "\n")) + meta := []byte(fmt.Sprintf(`{ "delete" : { "_index": "%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(id), "\n")) return meta, nil, nil } - meta := []byte(fmt.Sprintf(`{ "index" : { "_index": "%s", "_id" : "%s" } }%s`, index, id, "\n")) + meta := []byte(fmt.Sprintf(`{ "index" : { "_index": "%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(id), "\n")) serializedData, errMarshal := json.Marshal(delegator) if errMarshal != nil { return nil, nil, errMarshal @@ -210,7 +210,7 @@ func (lep *logsAndEventsProcessor) SerializeSupplyData(tokensSupply data.TokensH continue } - meta := []byte(fmt.Sprintf(`{ "delete" : { "_index": "%s", "_id" : "%s" } }%s`, index, supplyData.Identifier, "\n")) + meta := []byte(fmt.Sprintf(`{ "delete" : { "_index": "%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(supplyData.Identifier), "\n")) err := buffSlice.PutData(meta, nil) if err != nil { return err @@ -246,7 +246,7 @@ func (lep *logsAndEventsProcessor) SerializeRolesData( } func serializeRoleData(buffSlice *data.BufferSlice, rd *tokeninfo.RoleData, role string, index string) error { - meta := []byte(fmt.Sprintf(`{ "update" : {"_index": "%s", "_id" : "%s" } }%s`, index, rd.Token, "\n")) + meta := []byte(fmt.Sprintf(`{ "update" : {"_index": "%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(rd.Token), "\n")) var serializedDataStr string if rd.Set { codeToExecute := ` @@ -270,7 +270,12 @@ func serializeRoleData(buffSlice *data.BufferSlice, rd *tokeninfo.RoleData, role `"lang": "painless",`+ `"params": { "role": "%s", "address": "%s"}},`+ `"upsert": { "roles": {"%s": ["%s"]}}}`, - converters.FormatPainlessSource(codeToExecute), role, rd.Address, role, rd.Address) + converters.FormatPainlessSource(codeToExecute), + converters.JsonEscape(role), + converters.JsonEscape(rd.Address), + converters.JsonEscape(role), + converters.JsonEscape(rd.Address), + ) } else { codeToExecute := ` if (ctx._source.containsKey('roles')) { @@ -284,14 +289,17 @@ func serializeRoleData(buffSlice *data.BufferSlice, rd *tokeninfo.RoleData, role `"lang": "painless",`+ `"params": { "role": "%s", "address": "%s" }},`+ `"upsert": {} }`, - converters.FormatPainlessSource(codeToExecute), role, rd.Address) + converters.FormatPainlessSource(codeToExecute), + converters.JsonEscape(role), + converters.JsonEscape(rd.Address), + ) } return buffSlice.PutData(meta, []byte(serializedDataStr)) } func serializePropertiesData(buffSlice *data.BufferSlice, index string, tokenProp *tokeninfo.PropertiesData) error { - meta := []byte(fmt.Sprintf(`{ "update" : {"_index": "%s", "_id" : "%s" } }%s`, index, tokenProp.Token, "\n")) + meta := []byte(fmt.Sprintf(`{ "update" : {"_index": "%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(tokenProp.Token), "\n")) propertiesBytes, err := json.Marshal(tokenProp.Properties) if err != nil { diff --git a/process/miniblocks/serialize.go b/process/miniblocks/serialize.go index b1b45812..9b52ec94 100644 --- a/process/miniblocks/serialize.go +++ b/process/miniblocks/serialize.go @@ -4,6 +4,7 @@ import ( "encoding/json" "fmt" + "github.com/ElrondNetwork/elastic-indexer-go/converters" "github.com/ElrondNetwork/elastic-indexer-go/data" "github.com/ElrondNetwork/elrond-go-core/data/block" ) @@ -32,23 +33,23 @@ func (mp *miniblocksProcessor) SerializeBulkMiniBlocks( func (mp *miniblocksProcessor) prepareMiniblockData(miniblockDB *data.Miniblock, isInDB bool, index string) ([]byte, []byte, error) { if !isInDB { - meta := []byte(fmt.Sprintf(`{ "index" : { "_index":"%s", "_id" : "%s"} }%s`, index, miniblockDB.Hash, "\n")) + meta := []byte(fmt.Sprintf(`{ "index" : { "_index":"%s", "_id" : "%s"} }%s`, index, converters.JsonEscape(miniblockDB.Hash), "\n")) serializedData, err := json.Marshal(miniblockDB) return meta, serializedData, err } // prepare data for update operation - meta := []byte(fmt.Sprintf(`{ "update" : {"_index":"%s", "_id" : "%s" } }%s`, index, miniblockDB.Hash, "\n")) + meta := []byte(fmt.Sprintf(`{ "update" : {"_index":"%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(miniblockDB.Hash), "\n")) if mp.selfShardID == miniblockDB.SenderShardID && miniblockDB.ProcessingTypeOnDestination != block.Processed.String() { // prepare for update sender block hash - serializedData := []byte(fmt.Sprintf(`{ "doc" : { "senderBlockHash" : "%s", "procTypeS": "%s" } }`, miniblockDB.SenderBlockHash, miniblockDB.ProcessingTypeOnSource)) + serializedData := []byte(fmt.Sprintf(`{ "doc" : { "senderBlockHash" : "%s", "procTypeS": "%s" } }`, converters.JsonEscape(miniblockDB.SenderBlockHash), converters.JsonEscape(miniblockDB.ProcessingTypeOnSource))) return meta, serializedData, nil } // prepare for update receiver block hash - serializedData := []byte(fmt.Sprintf(`{ "doc" : { "receiverBlockHash" : "%s", "procTypeD": "%s" } }`, miniblockDB.ReceiverBlockHash, miniblockDB.ProcessingTypeOnDestination)) + serializedData := []byte(fmt.Sprintf(`{ "doc" : { "receiverBlockHash" : "%s", "procTypeD": "%s" } }`, converters.JsonEscape(miniblockDB.ReceiverBlockHash), converters.JsonEscape(miniblockDB.ProcessingTypeOnDestination))) return meta, serializedData, nil } diff --git a/process/operations/serialize.go b/process/operations/serialize.go index 229680d5..07fea407 100644 --- a/process/operations/serialize.go +++ b/process/operations/serialize.go @@ -4,6 +4,7 @@ import ( "encoding/json" "fmt" + "github.com/ElrondNetwork/elastic-indexer-go/converters" "github.com/ElrondNetwork/elastic-indexer-go/data" ) @@ -28,7 +29,7 @@ func (op *operationsProcessor) prepareSerializedDataForAScResult( scr *data.ScResult, index string, ) ([]byte, []byte, error) { - metaData := []byte(fmt.Sprintf(`{"update":{"_index":"%s","_id":"%s"}}%s`, index, scr.Hash, "\n")) + metaData := []byte(fmt.Sprintf(`{"update":{"_index":"%s","_id":"%s"}}%s`, index, converters.JsonEscape(scr.Hash), "\n")) marshaledSCR, err := json.Marshal(scr) if err != nil { return nil, nil, err @@ -44,7 +45,7 @@ func (op *operationsProcessor) prepareSerializedDataForAScResult( return metaData, serializedData, nil } - meta := []byte(fmt.Sprintf(`{ "index" : { "_index":"%s","_id" : "%s" } }%s`, index, scr.Hash, "\n")) + meta := []byte(fmt.Sprintf(`{ "index" : { "_index":"%s","_id" : "%s" } }%s`, index, converters.JsonEscape(scr.Hash), "\n")) return meta, marshaledSCR, nil } diff --git a/process/tags/serialize.go b/process/tags/serialize.go index 6b439123..d5ab7b8a 100644 --- a/process/tags/serialize.go +++ b/process/tags/serialize.go @@ -15,7 +15,7 @@ func (tc *tagsCount) Serialize(buffSlice *data.BufferSlice, index string) error } base64Tag := base64.StdEncoding.EncodeToString([]byte(tag)) - meta := []byte(fmt.Sprintf(`{ "update" : {"_index":"%s", "_id" : "%s" } }%s`, index, base64Tag, "\n")) + meta := []byte(fmt.Sprintf(`{ "update" : {"_index":"%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(base64Tag), "\n")) codeToExecute := ` ctx._source.count += params.count; diff --git a/process/transactions/serialize.go b/process/transactions/serialize.go index 52af3cf4..e77571a5 100644 --- a/process/transactions/serialize.go +++ b/process/transactions/serialize.go @@ -6,6 +6,7 @@ import ( "math/big" "strings" + "github.com/ElrondNetwork/elastic-indexer-go/converters" "github.com/ElrondNetwork/elastic-indexer-go/data" "github.com/ElrondNetwork/elrond-go-core/core" ) @@ -13,7 +14,7 @@ import ( // SerializeScResults will serialize the provided smart contract results in a way that Elastic Search expects a bulk request func (tdp *txsDatabaseProcessor) SerializeScResults(scResults []*data.ScResult, buffSlice *data.BufferSlice, index string) error { for _, sc := range scResults { - meta := []byte(fmt.Sprintf(`{ "index" : { "_index": "%s", "_id" : "%s" } }%s`, index, sc.Hash, "\n")) + meta := []byte(fmt.Sprintf(`{ "index" : { "_index": "%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(sc.Hash), "\n")) serializedData, errPrepareSc := json.Marshal(sc) if errPrepareSc != nil { return errPrepareSc @@ -31,7 +32,7 @@ func (tdp *txsDatabaseProcessor) SerializeScResults(scResults []*data.ScResult, // SerializeReceipts will serialize the receipts in a way that Elastic Search expects a bulk request func (tdp *txsDatabaseProcessor) SerializeReceipts(receipts []*data.Receipt, buffSlice *data.BufferSlice, index string) error { for _, rec := range receipts { - meta := []byte(fmt.Sprintf(`{ "index" : { "_index": "%s", "_id" : "%s" } }%s`, index, rec.Hash, "\n")) + meta := []byte(fmt.Sprintf(`{ "index" : { "_index": "%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(rec.Hash), "\n")) serializedData, errPrepareReceipt := json.Marshal(rec) if errPrepareReceipt != nil { return errPrepareReceipt @@ -71,7 +72,7 @@ func (tdp *txsDatabaseProcessor) SerializeTransactionWithRefund( tx.GasUsed = gasUsed tx.Fee = fee.String() - meta := []byte(fmt.Sprintf(`{ "index" : { "_index": "%s", "_id" : "%s" } }%s`, index, txHash, "\n")) + meta := []byte(fmt.Sprintf(`{ "index" : { "_index": "%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(txHash), "\n")) serializedData, errPrepare := json.Marshal(tx) if errPrepare != nil { return errPrepare @@ -86,7 +87,7 @@ func (tdp *txsDatabaseProcessor) SerializeTransactionWithRefund( return nil } -// SerializeTransactions will serialize the transactions in a way that Elastic Search expects a bulk request +// SerializeTransactions will serialize the transactions in a way that Elasticsearch expects a bulk request func (tdp *txsDatabaseProcessor) SerializeTransactions( transactions []*data.Transaction, txHashStatus map[string]string, @@ -126,7 +127,10 @@ func serializeTxHashStatus(buffSlice *data.BufferSlice, txHashStatus map[string] return err } - serializedData := []byte(fmt.Sprintf(`{"script": {"source": "ctx._source.status = params.status","lang": "painless","params": {"status": "%s"}},"upsert": %s }`, status, string(marshaledTx))) + codeToExecute := ` + ctx._source.status = params.status +` + serializedData := []byte(fmt.Sprintf(`{"script": {"source": "%s","lang": "painless","params": {"status": "%s"}},"upsert": %s }`, converters.FormatPainlessSource(codeToExecute), converters.JsonEscape(status), string(marshaledTx))) err = buffSlice.PutData(metaData, serializedData) if err != nil { return err @@ -141,7 +145,7 @@ func prepareSerializedDataForATransaction( selfShardID uint32, index string, ) ([]byte, []byte, error) { - metaData := []byte(fmt.Sprintf(`{"update":{ "_index":"%s", "_id":"%s"}}%s`, index, tx.Hash, "\n")) + metaData := []byte(fmt.Sprintf(`{"update":{ "_index":"%s", "_id":"%s"}}%s`, index, converters.JsonEscape(tx.Hash), "\n")) marshaledTx, err := json.Marshal(tx) if err != nil { return nil, nil, err @@ -166,19 +170,23 @@ func prepareSerializedDataForATransaction( } // transaction is intra-shard, invalid or cross-shard destination me - meta := []byte(fmt.Sprintf(`{ "index" : { "_index":"%s", "_id" : "%s" } }%s`, index, tx.Hash, "\n")) + meta := []byte(fmt.Sprintf(`{ "index" : { "_index":"%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(tx.Hash), "\n")) return meta, marshaledTx, nil } func prepareNFTESDTTransferOrMultiESDTTransfer(marshaledTx []byte) ([]byte, error) { - serializedData := []byte(fmt.Sprintf(`{"script":{"source":"`+ - `def status = ctx._source.status;`+ - `ctx._source = params.tx;`+ - `ctx._source.status = status;`+ - `","lang": "painless","params":`+ - `{"tx": %s}},"upsert":%s}`, - string(marshaledTx), string(marshaledTx))) + codeToExecute := ` + if ('create' == ctx.op) { + ctx._source = params.tx; + } else { + def status = ctx._source.status; + ctx._source = params.tx; + ctx._source.status = status; + } +` + serializedData := []byte(fmt.Sprintf(`{"scripted_upsert": true, "script":{"source":"%s","lang": "painless","params":{"tx": %s}},"upsert":{}}`, + converters.FormatPainlessSource(codeToExecute), string(marshaledTx))) return serializedData, nil } From e768a559bb510afac249495b5336407d2334c4f9 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Mon, 27 Jun 2022 16:18:34 +0300 Subject: [PATCH 34/69] fix unit tests --- integrationtests/collectionsIndex_test.go | 2 ++ process/accounts/serialize_test.go | 10 +++++----- process/collections/serialize.go | 2 +- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/integrationtests/collectionsIndex_test.go b/integrationtests/collectionsIndex_test.go index 2d191b01..a09f3afa 100644 --- a/integrationtests/collectionsIndex_test.go +++ b/integrationtests/collectionsIndex_test.go @@ -1,3 +1,5 @@ +//go:build integrationtests + package integrationtests import ( diff --git a/process/accounts/serialize_test.go b/process/accounts/serialize_test.go index ef3aea7b..6ecc5644 100644 --- a/process/accounts/serialize_test.go +++ b/process/accounts/serialize_test.go @@ -56,7 +56,7 @@ func TestSerializeAccounts(t *testing.T) { require.Equal(t, 1, len(buffSlice.Buffers())) expectedRes := `{ "update" : {"_index": "accounts", "_id" : "addr1" } } -{"scripted_upsert": true, "script": {"source": "if (ctx.op == 'create') {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"50","balanceNum":0.1,"totalBalanceWithStake":"50","totalBalanceWithStakeNum":0.1} }},"upsert": {}} +{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"50","balanceNum":0.1,"totalBalanceWithStake":"50","totalBalanceWithStakeNum":0.1} }},"upsert": {}} ` require.Equal(t, expectedRes, buffSlice.Buffers()[0].String()) } @@ -83,7 +83,7 @@ func TestSerializeAccountsESDTNonceZero(t *testing.T) { require.Equal(t, 1, len(buffSlice.Buffers())) expectedRes := `{ "update" : {"_index": "accountsesdt", "_id" : "addr1-token-abcd-00" } } -{"scripted_upsert": true, "script": {"source": "if (ctx.op == 'create') {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"10000000000000","balanceNum":1,"token":"token-abcd","properties":"000","timestamp":123} }},"upsert": {}} +{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"10000000000000","balanceNum":1,"token":"token-abcd","properties":"000","timestamp":123} }},"upsert": {}} ` require.Equal(t, expectedRes, buffSlice.Buffers()[0].String()) } @@ -109,7 +109,7 @@ func TestSerializeAccountsESDT(t *testing.T) { require.Equal(t, 1, len(buffSlice.Buffers())) expectedRes := `{ "update" : {"_index": "accountsesdt", "_id" : "addr1-token-0001-05" } } -{"scripted_upsert": true, "script": {"source": "if (ctx.op == 'create') {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"10000000000000","balanceNum":1,"token":"token-0001","tokenNonce":5,"properties":"000"} }},"upsert": {}} +{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"10000000000000","balanceNum":1,"token":"token-0001","tokenNonce":5,"properties":"000"} }},"upsert": {}} ` require.Equal(t, expectedRes, buffSlice.Buffers()[0].String()) } @@ -149,7 +149,7 @@ func TestSerializeAccountsNFTWithMedaData(t *testing.T) { require.Equal(t, 1, len(buffSlice.Buffers())) expectedRes := `{ "update" : {"_index": "accountsesdt", "_id" : "addr1-token-0001-16" } } -{"scripted_upsert": true, "script": {"source": "if (ctx.op == 'create') {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"10000000000000","balanceNum":1,"token":"token-0001","identifier":"token-0001-5","tokenNonce":22,"properties":"000","data":{"name":"nft","creator":"010101","royalties":1,"hash":"aGFzaA==","uris":["dXJp"],"tags":["test","free","fun"],"attributes":"dGFnczp0ZXN0LGZyZWUsZnVuO2Rlc2NyaXB0aW9uOlRoaXMgaXMgYSB0ZXN0IGRlc2NyaXB0aW9uIGZvciBhbiBhd2Vzb21lIG5mdA==","metadata":"metadata-test","nonEmptyURIs":true,"whiteListedStorage":false}} }},"upsert": {}} +{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"10000000000000","balanceNum":1,"token":"token-0001","identifier":"token-0001-5","tokenNonce":22,"properties":"000","data":{"name":"nft","creator":"010101","royalties":1,"hash":"aGFzaA==","uris":["dXJp"],"tags":["test","free","fun"],"attributes":"dGFnczp0ZXN0LGZyZWUsZnVuO2Rlc2NyaXB0aW9uOlRoaXMgaXMgYSB0ZXN0IGRlc2NyaXB0aW9uIGZvciBhbiBhd2Vzb21lIG5mdA==","metadata":"metadata-test","nonEmptyURIs":true,"whiteListedStorage":false}} }},"upsert": {}} ` require.Equal(t, expectedRes, buffSlice.Buffers()[0].String()) } @@ -174,7 +174,7 @@ func TestSerializeAccountsESDTDelete(t *testing.T) { require.Equal(t, 1, len(buffSlice.Buffers())) expectedRes := `{ "update" : {"_index":"accountsesdt", "_id" : "addr1-token-0001-00" } } -{"scripted_upsert": true, "script": {"source": "if (ctx.op == 'create') {ctx.op = 'noop'} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.timestamp) {ctx.op = 'delete'}} else {ctx.op = 'delete'}}","lang": "painless","params": {"timestamp": 0}},"upsert": {}} +{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx.op = 'noop'} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.timestamp) {ctx.op = 'delete'}} else {ctx.op = 'delete'}}","lang": "painless","params": {"timestamp": 0}},"upsert": {}} ` require.Equal(t, expectedRes, buffSlice.Buffers()[0].String()) } diff --git a/process/collections/serialize.go b/process/collections/serialize.go index e0922dcb..6c68ca98 100644 --- a/process/collections/serialize.go +++ b/process/collections/serialize.go @@ -31,7 +31,7 @@ func ExtractAndSerializeCollectionsData( codeToExecute := ` if (('create' == ctx.op) && ('0' == params.value)) { ctx.op = 'noop'; - } else if ('0' == params.value) { + } else if ('0' != params.value) { if (!ctx._source.containsKey(params.col)) { ctx._source[params.col] = new HashMap(); } From 6f5d9a3f6854b3063894ec58d414d2839de1314c Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Tue, 28 Jun 2022 11:19:09 +0300 Subject: [PATCH 35/69] extra protection JsonEscape --- converters/json.go | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/converters/json.go b/converters/json.go index b52cc79a..67ba105e 100644 --- a/converters/json.go +++ b/converters/json.go @@ -1,12 +1,24 @@ package converters -import "encoding/json" +import ( + "encoding/json" + + logger "github.com/ElrondNetwork/elrond-go-logger" +) + +const defaultStr = "default" + +var log = logger.GetOrCreate("indexer/converters") // JsonEscape will format the provided string in a json compatible string func JsonEscape(i string) string { b, err := json.Marshal(i) if err != nil { - return "" + log.Warn("converters.JsonEscape something went wrong", + "input", i, + "error", err, + ) + return defaultStr } // Trim the beginning and trailing " character From f6f25618f938d35d4835f0c70fd57ad3f68ca529 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Thu, 14 Jul 2022 17:14:31 +0300 Subject: [PATCH 36/69] fix gas used operations index --- .../nftTransferCrossShard_test.go | 8 +++++ .../op-nft-transfer-sc-call-after-refund.json | 35 +++++++++++++++++++ integrationtests/utils.go | 3 +- process/elasticProcessor.go | 21 ++++++----- process/operations/operationsProcessor.go | 29 ++++++++------- process/transactions/serialize.go | 7 ++++ 6 files changed, 81 insertions(+), 22 deletions(-) create mode 100644 integrationtests/testdata/nftTransferCrossShard/op-nft-transfer-sc-call-after-refund.json diff --git a/integrationtests/nftTransferCrossShard_test.go b/integrationtests/nftTransferCrossShard_test.go index ad6075d4..6240cb7c 100644 --- a/integrationtests/nftTransferCrossShard_test.go +++ b/integrationtests/nftTransferCrossShard_test.go @@ -131,6 +131,14 @@ func TestNFTTransferCrossShardWithSCCall(t *testing.T) { readExpectedResult("./testdata/nftTransferCrossShard/tx-nft-transfer-sc-call-after-refund.json"), string(genericResponse.Docs[0].Source), ) + + genericResponse = &GenericResponse{} + err = esClient.DoMultiGet(ids, indexerdata.OperationsIndex, true, genericResponse) + require.Nil(t, err) + require.JSONEq(t, + readExpectedResult("./testdata/nftTransferCrossShard/op-nft-transfer-sc-call-after-refund.json"), + string(genericResponse.Docs[0].Source), + ) } // TODO check also indexes that are altered diff --git a/integrationtests/testdata/nftTransferCrossShard/op-nft-transfer-sc-call-after-refund.json b/integrationtests/testdata/nftTransferCrossShard/op-nft-transfer-sc-call-after-refund.json new file mode 100644 index 00000000..cb204cef --- /dev/null +++ b/integrationtests/testdata/nftTransferCrossShard/op-nft-transfer-sc-call-after-refund.json @@ -0,0 +1,35 @@ +{ + "miniBlockHash": "b30aaa656bf101a7fb87f6c02a9da9e70cd053a79de24f5d14276232757d9766", + "nonce": 79, + "round": 50, + "value": "0", + "receiver": "657264316566397878336b336d3839617a6634633478633938777063646e78356830636e787936656d34377236646334616c756430757771783234663530", + "sender": "657264316566397878336b336d3839617a6634633478633938777063646e78356830636e787936656d34377236646334616c756430757771783234663530", + "receiverShard": 0, + "senderShard": 0, + "gasPrice": 1000000000, + "gasLimit": 150000000, + "gasUsed": 139832352, + "fee": "1802738520000000", + "data": "RVNEVE5GVFRyYW5zZmVyQDRjNGI0NjQxNTI0ZDJkMzM2NjM0NjYzOTYyQDAxNjUzNEA2ZjFlNmYwMWJjNzYyN2Y1YWVAMDAwMDAwMDAwMDAwMDAwMDA1MDBmMWM4ZjJmZGM1OGE2M2M2YjIwMWZjMmVkNjI5OTYyZDNkZmEzM2ZlN2NlYkA2MzZmNmQ3MDZmNzU2ZTY0NTI2NTc3NjE3MjY0NzM1MDcyNmY3ODc5QDAwMDAwMDAwMDAwMDAwMDAwNTAwNGY3OWVjNDRiYjEzMzcyYjVhYzlkOTk2ZDc0OTEyMGY0NzY0Mjc2MjdjZWI=", + "signature": "", + "timestamp": 5040, + "status": "success", + "searchOrder": 0, + "hasScResults": true, + "tokens": [ + "LKFARM-3f4f9b-016534" + ], + "esdtValues": [ + "2049781565634260759982" + ], + "receivers": [ + "00000000000000000500f1c8f2fdc58a63c6b201fc2ed629962d3dfa33fe7ceb" + ], + "receiversShardIDs": [ + 0 + ], + "operation": "ESDTNFTTransfer", + "type": "normal", + "function": "compoundRewardsProxy" +} diff --git a/integrationtests/utils.go b/integrationtests/utils.go index af37ca3b..08b78381 100644 --- a/integrationtests/utils.go +++ b/integrationtests/utils.go @@ -48,7 +48,8 @@ func CreateElasticProcessor( ShardCoordinator: shardCoordinator, TransactionFeeCalculator: feeProcessor, EnabledIndexes: []string{indexer.TransactionsIndex, indexer.LogsIndex, indexer.AccountsESDTIndex, indexer.ScResultsIndex, - indexer.ReceiptsIndex, indexer.BlockIndex, indexer.AccountsIndex, indexer.TokensIndex, indexer.TagsIndex, indexer.CollectionsIndex}, + indexer.ReceiptsIndex, indexer.BlockIndex, indexer.AccountsIndex, indexer.TokensIndex, indexer.TagsIndex, indexer.CollectionsIndex, + indexer.OperationsIndex}, Denomination: 18, IsInImportDBMode: false, } diff --git a/process/elasticProcessor.go b/process/elasticProcessor.go index 8f6204f4..3710bf43 100644 --- a/process/elasticProcessor.go +++ b/process/elasticProcessor.go @@ -364,7 +364,12 @@ func (ei *elasticProcessor) SaveTransactions( return err } - err = ei.indexTransactionsWithRefund(preparedResults.TxHashRefund, buffers) + err = ei.prepareAndIndexOperations(preparedResults.Transactions, preparedResults.TxHashStatus, header, preparedResults.ScResults, buffers) + if err != nil { + return err + } + + err = ei.indexTransactionsAndOperationsWithRefund(preparedResults.TxHashRefund, buffers) if err != nil { return err } @@ -410,11 +415,6 @@ func (ei *elasticProcessor) SaveTransactions( return err } - err = ei.prepareAndIndexOperations(preparedResults.Transactions, preparedResults.TxHashStatus, header, preparedResults.ScResults, buffers) - if err != nil { - return err - } - err = ei.indexNFTBurnInfo(logsData.TokensSupply, buffers) if err != nil { return err @@ -449,7 +449,7 @@ func (ei *elasticProcessor) prepareAndIndexDelegators(delegators map[string]*dat return ei.logsAndEventsProc.SerializeDelegators(delegators, buffSlice, elasticIndexer.DelegatorsIndex) } -func (ei *elasticProcessor) indexTransactionsWithRefund(txsHashRefund map[string]*data.RefundData, buffSlice *data.BufferSlice) error { +func (ei *elasticProcessor) indexTransactionsAndOperationsWithRefund(txsHashRefund map[string]*data.RefundData, buffSlice *data.BufferSlice) error { if len(txsHashRefund) == 0 { return nil } @@ -474,7 +474,12 @@ func (ei *elasticProcessor) indexTransactionsWithRefund(txsHashRefund map[string txsFromDB[txRes.ID] = &txRes.Source } - return ei.transactionsProc.SerializeTransactionWithRefund(txsFromDB, txsHashRefund, buffSlice, elasticIndexer.TransactionsIndex) + err = ei.transactionsProc.SerializeTransactionWithRefund(txsFromDB, txsHashRefund, buffSlice, elasticIndexer.TransactionsIndex) + if err != nil { + return err + } + + return ei.transactionsProc.SerializeTransactionWithRefund(txsFromDB, txsHashRefund, buffSlice, elasticIndexer.OperationsIndex) } func (ei *elasticProcessor) prepareAndIndexLogs(logsAndEvents []*coreData.LogData, timestamp uint64, buffSlice *data.BufferSlice) error { diff --git a/process/operations/operationsProcessor.go b/process/operations/operationsProcessor.go index 2e6feb44..c35164a9 100644 --- a/process/operations/operationsProcessor.go +++ b/process/operations/operationsProcessor.go @@ -34,38 +34,41 @@ func (op *operationsProcessor) ProcessTransactionsAndSCRs( txs []*data.Transaction, scrs []*data.ScResult, ) ([]*data.Transaction, []*data.ScResult) { + newTxsSlice := make([]*data.Transaction, 0) + newScrsSlice := make([]*data.ScResult, 0) + for idx, tx := range txs { if !op.shouldIndex(txs[idx].ReceiverShard) { - // remove tx from slice - txs = append(txs[:idx], txs[idx+1:]...) continue } - tx.SmartContractResults = nil - tx.Type = string(transaction.TxTypeNormal) + copiedTx := *tx + copiedTx.SmartContractResults = nil + copiedTx.Type = string(transaction.TxTypeNormal) + newTxsSlice = append(newTxsSlice, &copiedTx) } for idx := 0; idx < len(scrs); idx++ { if !op.shouldIndex(scrs[idx].ReceiverShard) { - // remove scr from slice - scrs = append(scrs[:idx], scrs[idx+1:]...) continue } - scr := scrs[idx] - scr.Type = string(transaction.TxTypeUnsigned) + copiedScr := *scrs[idx] + copiedScr.Type = string(transaction.TxTypeUnsigned) - setCanBeIgnoredField(scr) + setCanBeIgnoredField(&copiedScr) selfShard := op.shardCoordinator.SelfId() - if selfShard == scr.ReceiverShard { - scr.Status = transaction.TxStatusSuccess.String() + if selfShard == copiedScr.ReceiverShard { + copiedScr.Status = transaction.TxStatusSuccess.String() } else { - scr.Status = transaction.TxStatusPending.String() + copiedScr.Status = transaction.TxStatusPending.String() } + + newScrsSlice = append(newScrsSlice, &copiedScr) } - return txs, scrs + return newTxsSlice, newScrsSlice } func (op *operationsProcessor) shouldIndex(destinationShardID uint32) bool { diff --git a/process/transactions/serialize.go b/process/transactions/serialize.go index e77571a5..00ff0c55 100644 --- a/process/transactions/serialize.go +++ b/process/transactions/serialize.go @@ -6,9 +6,11 @@ import ( "math/big" "strings" + elasticIndexer "github.com/ElrondNetwork/elastic-indexer-go" "github.com/ElrondNetwork/elastic-indexer-go/converters" "github.com/ElrondNetwork/elastic-indexer-go/data" "github.com/ElrondNetwork/elrond-go-core/core" + "github.com/ElrondNetwork/elrond-go-core/data/transaction" ) // SerializeScResults will serialize the provided smart contract results in a way that Elastic Search expects a bulk request @@ -68,6 +70,11 @@ func (tdp *txsDatabaseProcessor) SerializeTransactionWithRefund( if !ok { continue } + + if index == elasticIndexer.OperationsIndex { + tx.Type = string(transaction.TxTypeNormal) + } + gasUsed, fee := tdp.txFeeCalculator.ComputeGasUsedAndFeeBasedOnRefundValue(tx, refundValueBig) tx.GasUsed = gasUsed tx.Fee = fee.String() From bb027956b42eccc176e7d7124013a86162221061 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Fri, 15 Jul 2022 12:17:53 +0300 Subject: [PATCH 37/69] fix gas used txs with informative logs and refund --- data/transaction.go | 1 + integrationtests/claimRewards_test.go | 128 ++++++++++++++++++ .../claimRewards/tx-claim-rewards.json | 21 +++ .../logsevents/informativeLogsProcessor.go | 9 +- .../transactions/scrsDataToTransactions.go | 1 + 5 files changed, 157 insertions(+), 3 deletions(-) create mode 100644 integrationtests/claimRewards_test.go create mode 100644 integrationtests/testdata/claimRewards/tx-claim-rewards.json diff --git a/data/transaction.go b/data/transaction.go index 3ff5fd2e..e26b7fdd 100644 --- a/data/transaction.go +++ b/data/transaction.go @@ -44,6 +44,7 @@ type Transaction struct { ReceiverAddressBytes []byte `json:"-"` Hash string `json:"-"` BlockHash string `json:"-"` + HadRefund bool `json:"-"` } // GetGasLimit will return transaction gas limit diff --git a/integrationtests/claimRewards_test.go b/integrationtests/claimRewards_test.go new file mode 100644 index 00000000..9151605e --- /dev/null +++ b/integrationtests/claimRewards_test.go @@ -0,0 +1,128 @@ +package integrationtests + +import ( + "encoding/hex" + "math/big" + "testing" + + indexerdata "github.com/ElrondNetwork/elastic-indexer-go" + "github.com/ElrondNetwork/elastic-indexer-go/mock" + "github.com/ElrondNetwork/elrond-go-core/core" + coreData "github.com/ElrondNetwork/elrond-go-core/data" + dataBlock "github.com/ElrondNetwork/elrond-go-core/data/block" + "github.com/ElrondNetwork/elrond-go-core/data/indexer" + "github.com/ElrondNetwork/elrond-go-core/data/smartContractResult" + "github.com/ElrondNetwork/elrond-go-core/data/transaction" + "github.com/stretchr/testify/require" +) + +func TestTransactionWithClaimRewardsGasRefund(t *testing.T) { + setLogLevelDebug() + + esClient, err := createESClient(esURL) + require.Nil(t, err) + + accounts := &mock.AccountsStub{} + feeComputer := &mock.EconomicsHandlerMock{} + shardCoordinator := &mock.ShardCoordinatorMock{ + SelfID: core.MetachainShardId, + } + + esProc, err := CreateElasticProcessor(esClient, accounts, shardCoordinator, feeComputer) + require.Nil(t, err) + + txHash := []byte("claimRewards") + header := &dataBlock.Header{ + Round: 50, + TimeStamp: 5040, + } + + scrHash1 := []byte("scrRefundGasReward") + body := &dataBlock.Body{ + MiniBlocks: dataBlock.MiniBlockSlice{ + { + Type: dataBlock.TxBlock, + SenderShardID: 0, + ReceiverShardID: core.MetachainShardId, + TxHashes: [][]byte{txHash}, + }, + { + Type: dataBlock.SmartContractResultBlock, + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + TxHashes: [][]byte{scrHash1}, + }, + }, + } + + refundValue, _ := big.NewInt(0).SetString("49320000000000", 10) + scr1 := &smartContractResult.SmartContractResult{ + Nonce: 618, + GasPrice: 1000000000, + SndAddr: []byte("erd1qqqqqqqqqqqqqqqpqqqqqqqqqqqqqqqqqqqqqqqqqqqqq8hlllls7a6h81"), + RcvAddr: []byte("erd13tfnxanefpjltv9kesf6e6f4n4muvkdqrk0we52nelsjw3lf2t5q8l45u1"), + Data: []byte("@6f6b"), + PrevTxHash: txHash, + OriginalTxHash: txHash, + Value: refundValue, + } + + rewards, _ := big.NewInt(0).SetString("2932360285576807", 10) + scrHash2 := []byte("scrRewards") + scr2 := &smartContractResult.SmartContractResult{ + Nonce: 0, + GasPrice: 1000000000, + SndAddr: []byte("erd1qqqqqqqqqqqqqqqpqqqqqqqqqqqqqqqqqqqqqqqqqqqqq8hlllls7a6h81"), + RcvAddr: []byte("erd13tfnxanefpjltv9kesf6e6f4n4muvkdqrk0we52nelsjw3lf2t5q8l45u1"), + PrevTxHash: txHash, + OriginalTxHash: txHash, + Value: rewards, + } + + tx1 := &transaction.Transaction{ + Nonce: 617, + SndAddr: []byte("erd13tfnxanefpjltv9kesf6e6f4n4muvkdqrk0we52nelsjw3lf2t5q8l45u1"), + RcvAddr: []byte("erd1qqqqqqqqqqqqqqqpqqqqqqqqqqqqqqqqqqqqqqqqqqqqq8hlllls7a6h81"), + GasLimit: 6000000, + GasPrice: 1000000000, + Data: []byte("claimRewards"), + Value: big.NewInt(0), + } + + pool := &indexer.Pool{ + Txs: map[string]coreData.TransactionHandler{ + string(txHash): tx1, + }, + Scrs: map[string]coreData.TransactionHandler{ + string(scrHash2): scr2, + string(scrHash1): scr1, + }, + Logs: []*coreData.LogData{ + { + TxHash: string(txHash), + LogHandler: &transaction.Log{ + Events: []*transaction.Event{ + { + Address: []byte("addr"), + Identifier: []byte("writeLog"), + Topics: [][]byte{[]byte("something")}, + }, + }, + }, + }, + }, + } + + err = esProc.SaveTransactions(body, header, pool) + require.Nil(t, err) + + ids := []string{hex.EncodeToString(txHash)} + genericResponse := &GenericResponse{} + err = esClient.DoMultiGet(ids, indexerdata.TransactionsIndex, true, genericResponse) + require.Nil(t, err) + + require.JSONEq(t, + readExpectedResult("./testdata/claimRewards/tx-claim-rewards.json"), + string(genericResponse.Docs[0].Source), + ) +} diff --git a/integrationtests/testdata/claimRewards/tx-claim-rewards.json b/integrationtests/testdata/claimRewards/tx-claim-rewards.json new file mode 100644 index 00000000..d54773b7 --- /dev/null +++ b/integrationtests/testdata/claimRewards/tx-claim-rewards.json @@ -0,0 +1,21 @@ +{ + "miniBlockHash": "582fecdda564e76162bbb07d797c9ee3780a82fab226f5faced83a6cce2cf5d9", + "nonce": 617, + "round": 50, + "value": "0", + "receiver": "6572643171717171717171717171717171717170717171717171717171717171717171717171717171717171717171717138686c6c6c6c73376136683831", + "sender": "657264313374666e78616e6566706a6c7476396b65736636653666346e346d75766b6471726b30776535326e656c736a77336c6632743571386c34357531", + "receiverShard": 4294967295, + "senderShard": 0, + "gasPrice": 1000000000, + "gasLimit": 6000000, + "gasUsed": 1068000, + "fee": "78000000000000", + "data": "Y2xhaW1SZXdhcmRz", + "signature": "", + "timestamp": 5040, + "status": "success", + "searchOrder": 0, + "hasScResults": true, + "operation": "transfer" +} diff --git a/process/logsevents/informativeLogsProcessor.go b/process/logsevents/informativeLogsProcessor.go index 9d559b6b..6f104db8 100644 --- a/process/logsevents/informativeLogsProcessor.go +++ b/process/logsevents/informativeLogsProcessor.go @@ -44,9 +44,12 @@ func (ilp *informativeLogsProcessor) processEvent(args *argsProcessEvent) argOut switch identifier { case writeLogOperation: { - gasLimit, fee := ilp.txFeeCalculator.ComputeGasUsedAndFeeBasedOnRefundValue(tx, big.NewInt(0)) - tx.GasUsed = gasLimit - tx.Fee = fee.String() + if !tx.HadRefund { + gasLimit, fee := ilp.txFeeCalculator.ComputeGasUsedAndFeeBasedOnRefundValue(tx, big.NewInt(0)) + tx.GasUsed = gasLimit + tx.Fee = fee.String() + } + tx.Status = transaction.TxStatusSuccess.String() } case signalErrorOperation: diff --git a/process/transactions/scrsDataToTransactions.go b/process/transactions/scrsDataToTransactions.go index 4910bc9c..0bb5c8df 100644 --- a/process/transactions/scrsDataToTransactions.go +++ b/process/transactions/scrsDataToTransactions.go @@ -78,6 +78,7 @@ func (st *scrsDataToTransactions) addScResultInfoIntoTx(dbScResult *data.ScResul gasUsed, fee := st.txFeeCalculator.ComputeGasUsedAndFeeBasedOnRefundValue(tx, refundValue) tx.GasUsed = gasUsed tx.Fee = fee.String() + tx.HadRefund = true } return From d9d8ce7ec3ebc3b46d4e6eb6a5029b81639b7bae Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Mon, 18 Jul 2022 14:32:31 +0300 Subject: [PATCH 38/69] silent retry in case of conflict code --- factory/indexerFactory.go | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/factory/indexerFactory.go b/factory/indexerFactory.go index cf884a90..012f5f2d 100644 --- a/factory/indexerFactory.go +++ b/factory/indexerFactory.go @@ -2,6 +2,9 @@ package factory import ( "fmt" + "math" + "net/http" + "time" indexer "github.com/ElrondNetwork/elastic-indexer-go" "github.com/ElrondNetwork/elastic-indexer-go/client" @@ -11,9 +14,12 @@ import ( "github.com/ElrondNetwork/elrond-go-core/core/check" "github.com/ElrondNetwork/elrond-go-core/hashing" "github.com/ElrondNetwork/elrond-go-core/marshal" + logger "github.com/ElrondNetwork/elrond-go-logger" "github.com/elastic/go-elasticsearch/v7" ) +var log = logger.GetOrCreate("indexer/factory") + // ArgsIndexerFactory holds all dependencies required by the data indexer factory in order to create // new instances type ArgsIndexerFactory struct { @@ -72,10 +78,16 @@ func NewIndexer(args *ArgsIndexerFactory) (indexer.Indexer, error) { func createElasticProcessor(args *ArgsIndexerFactory) (indexer.ElasticProcessor, error) { databaseClient, err := client.NewElasticClient(elasticsearch.Config{ - Addresses: []string{args.Url}, - Username: args.UserName, - Password: args.Password, - Logger: &logging.CustomLogger{}, + Addresses: []string{args.Url}, + Username: args.UserName, + Password: args.Password, + Logger: &logging.CustomLogger{}, + RetryOnStatus: []int{http.StatusConflict}, + RetryBackoff: func(attempt int) time.Duration { + d := time.Duration(math.Exp2(float64(attempt))) * time.Second + log.Debug("elastic: retry backoff", "attempt", attempt, "sleep duration", d) + return d + }, }) if err != nil { return nil, err From a15f64e834c7afed0b231b08016eaf098d863b5b Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Tue, 19 Jul 2022 13:59:29 +0300 Subject: [PATCH 39/69] fixes after review --- factory/indexerFactory.go | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/factory/indexerFactory.go b/factory/indexerFactory.go index 012f5f2d..503e8777 100644 --- a/factory/indexerFactory.go +++ b/factory/indexerFactory.go @@ -76,6 +76,13 @@ func NewIndexer(args *ArgsIndexerFactory) (indexer.Indexer, error) { return indexer.NewDataIndexer(arguments) } +func retryBackOff(attempt int) time.Duration { + d := time.Duration(math.Exp2(float64(attempt))) * time.Second + log.Debug("elastic: retry backoff", "attempt", attempt, "sleep duration", d) + + return d +} + func createElasticProcessor(args *ArgsIndexerFactory) (indexer.ElasticProcessor, error) { databaseClient, err := client.NewElasticClient(elasticsearch.Config{ Addresses: []string{args.Url}, @@ -83,11 +90,7 @@ func createElasticProcessor(args *ArgsIndexerFactory) (indexer.ElasticProcessor, Password: args.Password, Logger: &logging.CustomLogger{}, RetryOnStatus: []int{http.StatusConflict}, - RetryBackoff: func(attempt int) time.Duration { - d := time.Duration(math.Exp2(float64(attempt))) * time.Second - log.Debug("elastic: retry backoff", "attempt", attempt, "sleep duration", d) - return d - }, + RetryBackoff: retryBackOff, }) if err != nil { return nil, err From 7c4e811300d574811b90eb588956ecd612701f8a Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Wed, 20 Jul 2022 14:25:59 +0300 Subject: [PATCH 40/69] extra field block structure --- data/block.go | 75 ++++++++++++++++----------- integrationtests/claimRewards_test.go | 2 + process/block/blockProcessor.go | 41 +++++++++++++++ process/block/blockProcessor_test.go | 47 +++++++++++++++-- process/miniblocks/serialize.go | 7 ++- process/miniblocks/serialize_test.go | 8 +-- 6 files changed, 141 insertions(+), 39 deletions(-) diff --git a/data/block.go b/data/block.go index cbc8c7e3..ef8e33a4 100644 --- a/data/block.go +++ b/data/block.go @@ -8,33 +8,34 @@ import ( // to be saved for a block. It has all the default fields // plus some extra information for ease of search and filter type Block struct { - Nonce uint64 `json:"nonce"` - Round uint64 `json:"round"` - Epoch uint32 `json:"epoch"` - Hash string `json:"-"` - MiniBlocksHashes []string `json:"miniBlocksHashes"` - NotarizedBlocksHashes []string `json:"notarizedBlocksHashes"` - Proposer uint64 `json:"proposer"` - Validators []uint64 `json:"validators"` - PubKeyBitmap string `json:"pubKeyBitmap"` - Size int64 `json:"size"` - SizeTxs int64 `json:"sizeTxs"` - Timestamp time.Duration `json:"timestamp"` - StateRootHash string `json:"stateRootHash"` - PrevHash string `json:"prevHash"` - ShardID uint32 `json:"shardId"` - TxCount uint32 `json:"txCount"` - NotarizedTxsCount uint32 `json:"notarizedTxsCount"` - AccumulatedFees string `json:"accumulatedFees"` - DeveloperFees string `json:"developerFees"` - EpochStartBlock bool `json:"epochStartBlock"` - SearchOrder uint64 `json:"searchOrder"` - EpochStartInfo *EpochStartInfo `json:"epochStartInfo,omitempty"` - GasProvided uint64 `json:"gasProvided"` - GasRefunded uint64 `json:"gasRefunded"` - GasPenalized uint64 `json:"gasPenalized"` - MaxGasLimit uint64 `json:"maxGasLimit"` - ScheduledData *ScheduledData `json:"scheduledData,omitempty"` + Nonce uint64 `json:"nonce"` + Round uint64 `json:"round"` + Epoch uint32 `json:"epoch"` + Hash string `json:"-"` + MiniBlocksHashes []string `json:"miniBlocksHashes"` + NotarizedBlocksHashes []string `json:"notarizedBlocksHashes"` + Proposer uint64 `json:"proposer"` + Validators []uint64 `json:"validators"` + PubKeyBitmap string `json:"pubKeyBitmap"` + Size int64 `json:"size"` + SizeTxs int64 `json:"sizeTxs"` + Timestamp time.Duration `json:"timestamp"` + StateRootHash string `json:"stateRootHash"` + PrevHash string `json:"prevHash"` + ShardID uint32 `json:"shardId"` + TxCount uint32 `json:"txCount"` + NotarizedTxsCount uint32 `json:"notarizedTxsCount"` + AccumulatedFees string `json:"accumulatedFees"` + DeveloperFees string `json:"developerFees"` + EpochStartBlock bool `json:"epochStartBlock"` + SearchOrder uint64 `json:"searchOrder"` + EpochStartInfo *EpochStartInfo `json:"epochStartInfo,omitempty"` + GasProvided uint64 `json:"gasProvided"` + GasRefunded uint64 `json:"gasRefunded"` + GasPenalized uint64 `json:"gasPenalized"` + MaxGasLimit uint64 `json:"maxGasLimit"` + ScheduledData *ScheduledData `json:"scheduledData,omitempty"` + EpochStartShardsData []*EpochStartShardData `json:"epochStartShardsData,omitempty"` } // ScheduledData is a structure that hold information about scheduled events @@ -59,13 +60,27 @@ type EpochStartInfo struct { PrevEpochStartHash string `json:"prevEpochStartHash"` } +// EpochStartShardData is a structure that hold information about epoch start meta block shard data +type EpochStartShardData struct { + ShardID uint32 `json:"shardID,omitempty"` + Epoch uint32 `json:"epoch,omitempty"` + Round uint64 `json:"round,omitempty"` + Nonce uint64 `json:"nonce,omitempty"` + HeaderHash string `json:"headerHash,omitempty"` + RootHash string `json:"rootHash,omitempty"` + ScheduledRootHash string `json:"scheduledRootHash,omitempty"` + FirstPendingMetaBlock string `json:"firstPendingMetaBlock,omitempty"` + LastFinishedMetaBlock string `json:"lastFinishedMetaBlock,omitempty"` + PendingMiniBlockHeaders []*Miniblock `json:"pendingMiniBlockHeaders,omitempty"` +} + // Miniblock is a structure containing miniblock information type Miniblock struct { - Hash string `json:"-"` + Hash string `json:"hash,omitempty"` SenderShardID uint32 `json:"senderShard"` ReceiverShardID uint32 `json:"receiverShard"` - SenderBlockHash string `json:"senderBlockHash"` - ReceiverBlockHash string `json:"receiverBlockHash"` + SenderBlockHash string `json:"senderBlockHash,omitempty"` + ReceiverBlockHash string `json:"receiverBlockHash,omitempty"` Type string `json:"type"` ProcessingTypeOnSource string `json:"procTypeS,omitempty"` ProcessingTypeOnDestination string `json:"procTypeD,omitempty"` diff --git a/integrationtests/claimRewards_test.go b/integrationtests/claimRewards_test.go index 9151605e..bcfcd881 100644 --- a/integrationtests/claimRewards_test.go +++ b/integrationtests/claimRewards_test.go @@ -1,3 +1,5 @@ +//go:build integrationtests + package integrationtests import ( diff --git a/process/block/blockProcessor.go b/process/block/blockProcessor.go index b7464073..60ccfc7f 100644 --- a/process/block/blockProcessor.go +++ b/process/block/blockProcessor.go @@ -161,6 +161,47 @@ func (bp *blockProcessor) addEpochStartInfoForMeta(header coreData.HeaderHandler PrevEpochStartRound: metaHeaderEconomics.PrevEpochStartRound, PrevEpochStartHash: hex.EncodeToString(metaHeaderEconomics.PrevEpochStartHash), } + if len(metaHeader.EpochStart.LastFinalizedHeaders) == 0 { + return + } + + epochStartShardsData := metaHeader.EpochStart.LastFinalizedHeaders + block.EpochStartShardsData = make([]*data.EpochStartShardData, 0, len(metaHeader.EpochStart.LastFinalizedHeaders)) + for _, epochStartShardData := range epochStartShardsData { + bp.addEpochStartShardDataForMeta(epochStartShardData, block) + } +} + +func (bp *blockProcessor) addEpochStartShardDataForMeta(epochStartShardData nodeBlock.EpochStartShardData, block *data.Block) { + shardData := &data.EpochStartShardData{ + ShardID: epochStartShardData.ShardID, + Epoch: epochStartShardData.Epoch, + Round: epochStartShardData.Round, + Nonce: epochStartShardData.Nonce, + HeaderHash: hex.EncodeToString(epochStartShardData.HeaderHash), + RootHash: hex.EncodeToString(epochStartShardData.RootHash), + ScheduledRootHash: hex.EncodeToString(epochStartShardData.ScheduledRootHash), + FirstPendingMetaBlock: hex.EncodeToString(epochStartShardData.FirstPendingMetaBlock), + LastFinishedMetaBlock: hex.EncodeToString(epochStartShardData.LastFinishedMetaBlock), + } + + if len(epochStartShardData.PendingMiniBlockHeaders) == 0 { + block.EpochStartShardsData = append(block.EpochStartShardsData, shardData) + return + } + + shardData.PendingMiniBlockHeaders = make([]*data.Miniblock, 0, len(epochStartShardData.PendingMiniBlockHeaders)) + for _, pendingMb := range epochStartShardData.PendingMiniBlockHeaders { + shardData.PendingMiniBlockHeaders = append(shardData.PendingMiniBlockHeaders, &data.Miniblock{ + Hash: hex.EncodeToString(pendingMb.Hash), + SenderShardID: pendingMb.SenderShardID, + ReceiverShardID: pendingMb.ReceiverShardID, + Type: pendingMb.Type.String(), + Reserved: pendingMb.Reserved, + }) + } + + block.EpochStartShardsData = append(block.EpochStartShardsData, shardData) } func (bp *blockProcessor) getEncodedMBSHashes(body *block.Body) []string { diff --git a/process/block/blockProcessor_test.go b/process/block/blockProcessor_test.go index 2e93b07a..21d9e271 100644 --- a/process/block/blockProcessor_test.go +++ b/process/block/blockProcessor_test.go @@ -164,7 +164,26 @@ func TestBlockProcessor_PrepareBlockForDBEpochStartMeta(t *testing.T) { dbBlock, err := bp.PrepareBlockForDB(&dataBlock.MetaBlock{ TxCount: 1000, EpochStart: dataBlock.EpochStart{ - LastFinalizedHeaders: []dataBlock.EpochStartShardData{{}}, + LastFinalizedHeaders: []dataBlock.EpochStartShardData{{ + ShardID: 1, + Nonce: 1234, + Round: 1500, + Epoch: 10, + HeaderHash: []byte("hh"), + RootHash: []byte("rh"), + ScheduledRootHash: []byte("sch"), + FirstPendingMetaBlock: []byte("fpmb"), + LastFinishedMetaBlock: []byte("lfmb"), + PendingMiniBlockHeaders: []dataBlock.MiniBlockHeader{ + { + Hash: []byte("mbh"), + SenderShardID: 0, + ReceiverShardID: 1, + Type: dataBlock.TxBlock, + Reserved: []byte("rrr"), + }, + }, + }}, Economics: dataBlock.Economics{ TotalSupply: big.NewInt(100), TotalToDistribute: big.NewInt(55), @@ -190,13 +209,13 @@ func TestBlockProcessor_PrepareBlockForDBEpochStartMeta(t *testing.T) { Nonce: 0, Round: 0, Epoch: 0, - Hash: "ae3fe1896d1ecc5fa685a8042b7410378c4ea8451b451f8ade319d7c0b7976e6", + Hash: "a6d891a7692e19f97ad2993b7804708995d2d9deb008692d1e43084a79d04da5", MiniBlocksHashes: []string{}, NotarizedBlocksHashes: nil, Proposer: 0, Validators: nil, PubKeyBitmap: "", - Size: 388, + Size: 623, SizeTxs: 0, Timestamp: 0, StateRootHash: "", @@ -214,6 +233,28 @@ func TestBlockProcessor_PrepareBlockForDBEpochStartMeta(t *testing.T) { PrevEpochStartRound: 222, PrevEpochStartHash: "7072657645706f6368", }, + EpochStartShardsData: []*data.EpochStartShardData{ + { + ShardID: 1, + Epoch: 10, + Round: 1500, + Nonce: 1234, + HeaderHash: "6868", + RootHash: "7268", + ScheduledRootHash: "736368", + FirstPendingMetaBlock: "66706d62", + LastFinishedMetaBlock: "6c666d62", + PendingMiniBlockHeaders: []*data.Miniblock{ + { + Hash: "6d6268", + SenderShardID: 0, + ReceiverShardID: 1, + Type: "TxBlock", + Reserved: []byte("rrr"), + }, + }, + }, + }, NotarizedTxsCount: 830, TxCount: 170, AccumulatedFees: "0", diff --git a/process/miniblocks/serialize.go b/process/miniblocks/serialize.go index 9b52ec94..0a436a8a 100644 --- a/process/miniblocks/serialize.go +++ b/process/miniblocks/serialize.go @@ -32,15 +32,18 @@ func (mp *miniblocksProcessor) SerializeBulkMiniBlocks( } func (mp *miniblocksProcessor) prepareMiniblockData(miniblockDB *data.Miniblock, isInDB bool, index string) ([]byte, []byte, error) { + mbHash := miniblockDB.Hash + miniblockDB.Hash = "" + if !isInDB { - meta := []byte(fmt.Sprintf(`{ "index" : { "_index":"%s", "_id" : "%s"} }%s`, index, converters.JsonEscape(miniblockDB.Hash), "\n")) + meta := []byte(fmt.Sprintf(`{ "index" : { "_index":"%s", "_id" : "%s"} }%s`, index, converters.JsonEscape(mbHash), "\n")) serializedData, err := json.Marshal(miniblockDB) return meta, serializedData, err } // prepare data for update operation - meta := []byte(fmt.Sprintf(`{ "update" : {"_index":"%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(miniblockDB.Hash), "\n")) + meta := []byte(fmt.Sprintf(`{ "update" : {"_index":"%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(mbHash), "\n")) if mp.selfShardID == miniblockDB.SenderShardID && miniblockDB.ProcessingTypeOnDestination != block.Processed.String() { // prepare for update sender block hash serializedData := []byte(fmt.Sprintf(`{ "doc" : { "senderBlockHash" : "%s", "procTypeS": "%s" } }`, converters.JsonEscape(miniblockDB.SenderBlockHash), converters.JsonEscape(miniblockDB.ProcessingTypeOnSource))) diff --git a/process/miniblocks/serialize_test.go b/process/miniblocks/serialize_test.go index 3d5d0369..a98b6134 100644 --- a/process/miniblocks/serialize_test.go +++ b/process/miniblocks/serialize_test.go @@ -23,9 +23,9 @@ func TestMiniblocksProcessor_SerializeBulkMiniBlocks(t *testing.T) { mp.SerializeBulkMiniBlocks(miniblocks, nil, buffSlice, "miniblocks") expectedBuff := `{ "index" : { "_index":"miniblocks", "_id" : "h1"} } -{"senderShard":0,"receiverShard":1,"senderBlockHash":"","receiverBlockHash":"","type":"","timestamp":0} +{"senderShard":0,"receiverShard":1,"type":"","timestamp":0} { "index" : { "_index":"miniblocks", "_id" : "h2"} } -{"senderShard":0,"receiverShard":2,"senderBlockHash":"","receiverBlockHash":"","type":"","timestamp":0} +{"senderShard":0,"receiverShard":2,"type":"","timestamp":0} ` require.Equal(t, expectedBuff, buffSlice.Buffers()[0].String()) } @@ -48,7 +48,7 @@ func TestMiniblocksProcessor_SerializeBulkMiniBlocksInDB(t *testing.T) { expectedBuff := `{ "update" : {"_index":"miniblocks", "_id" : "h1" } } { "doc" : { "senderBlockHash" : "", "procTypeS": "" } } { "index" : { "_index":"miniblocks", "_id" : "h2"} } -{"senderShard":0,"receiverShard":2,"senderBlockHash":"","receiverBlockHash":"","type":"","timestamp":0} +{"senderShard":0,"receiverShard":2,"type":"","timestamp":0} ` require.Equal(t, expectedBuff, buffSlice.Buffers()[0].String()) } @@ -85,7 +85,7 @@ func TestSerializeMiniblock_IntraShardScheduled(t *testing.T) { }, buffSlice, "miniblocks") expectedBuff := `{ "index" : { "_index":"miniblocks", "_id" : "h1"} } -{"senderShard":1,"receiverShard":1,"senderBlockHash":"senderBlock","receiverBlockHash":"","type":"","procTypeS":"Scheduled","timestamp":0} +{"senderShard":1,"receiverShard":1,"senderBlockHash":"senderBlock","type":"","procTypeS":"Scheduled","timestamp":0} ` require.Equal(t, expectedBuff, buffSlice.Buffers()[0].String()) From 2984885f4f8c5b35c7da94d4f68c2cc22662ce7c Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Wed, 20 Jul 2022 14:33:55 +0300 Subject: [PATCH 41/69] upgrade version integration tests --- .github/workflows/pr-integration-tests.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/pr-integration-tests.yml b/.github/workflows/pr-integration-tests.yml index c2a60703..242184b0 100644 --- a/.github/workflows/pr-integration-tests.yml +++ b/.github/workflows/pr-integration-tests.yml @@ -41,8 +41,8 @@ jobs: - name: Get dependencies run: | go get -v -t -d ./... - - name: Run integration tests with Elasticsearch `v8.1.1` - run: make integration-tests ES_VERSION=8.1.1 + - name: Run integration tests with Elasticsearch `v8.3.2` + run: make integration-tests ES_VERSION=8.3.2 test-3: name: OpenSearch v1.2.4 runs-on: ubuntu-latest @@ -78,5 +78,5 @@ jobs: - name: Get dependencies run: | go get -v -t -d ./... - - name: Run integration tests with OpenSearch `v1.3.2` - run: make integration-tests-open-search OPEN_VERSION=1.3.2 + - name: Run integration tests with OpenSearch `v2.1.0` + run: make integration-tests-open-search OPEN_VERSION=2.1.0 From 7326448834bf8ae4c7c735716e075e8e933926c5 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Wed, 20 Jul 2022 14:35:32 +0300 Subject: [PATCH 42/69] small fix --- .github/workflows/pr-integration-tests.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pr-integration-tests.yml b/.github/workflows/pr-integration-tests.yml index 242184b0..361b534f 100644 --- a/.github/workflows/pr-integration-tests.yml +++ b/.github/workflows/pr-integration-tests.yml @@ -26,7 +26,7 @@ jobs: - name: Run integration tests with Elasticsearch `v7.16.2` run: make integration-tests ES_VERSION=7.16.2 test-2: - name: Elasticsearch v8.1.1 + name: Elasticsearch v8.3.2 runs-on: ubuntu-latest steps: - name: Set up Go 1.x @@ -63,7 +63,7 @@ jobs: run: make integration-tests-open-search OPEN_VERSION=1.2.4 test-4: - name: OpenSearch v1.3.2 + name: OpenSearch v2.1.0 runs-on: ubuntu-latest steps: - name: Set up Go 1.x From 1aeab7ce6be83cfb255f3af5db1515916bf45b1d Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Fri, 19 Aug 2022 12:16:31 +0300 Subject: [PATCH 43/69] shard id in accounts and accountsesdt index --- data/account.go | 8 +-- process/accounts/accountsProcessor.go | 8 ++- process/accounts/accountsProcessor_test.go | 62 +++++++++++----------- process/accounts/serialize_test.go | 10 ++-- process/factory/elasticProcessorFactory.go | 1 + 5 files changed, 49 insertions(+), 40 deletions(-) diff --git a/data/account.go b/data/account.go index 351caafe..059d7c6f 100644 --- a/data/account.go +++ b/data/account.go @@ -16,15 +16,16 @@ type AccountInfo struct { TokenIdentifier string `json:"identifier,omitempty"` TokenNonce uint64 `json:"tokenNonce,omitempty"` Properties string `json:"properties,omitempty"` - IsSender bool `json:"-"` - IsSmartContract bool `json:"-"` - IsNFTCreate bool `json:"-"` TotalBalanceWithStake string `json:"totalBalanceWithStake,omitempty"` TotalBalanceWithStakeNum float64 `json:"totalBalanceWithStakeNum,omitempty"` Data *TokenMetaData `json:"data,omitempty"` Timestamp time.Duration `json:"timestamp,omitempty"` Type string `json:"type,omitempty"` CurrentOwner string `json:"currentOwner,omitempty"` + ShardID uint32 `json:"shardID"` + IsSender bool `json:"-"` + IsSmartContract bool `json:"-"` + IsNFTCreate bool `json:"-"` } // TokenMetaData holds data about a token metadata @@ -51,6 +52,7 @@ type AccountBalanceHistory struct { TokenNonce uint64 `json:"tokenNonce,omitempty"` IsSender bool `json:"isSender,omitempty"` IsSmartContract bool `json:"isSmartContract,omitempty"` + ShardID uint32 `json:"shardID"` } // Account is a structure that is needed for regular accounts diff --git a/process/accounts/accountsProcessor.go b/process/accounts/accountsProcessor.go index eaf99fd1..eb80e814 100644 --- a/process/accounts/accountsProcessor.go +++ b/process/accounts/accountsProcessor.go @@ -20,12 +20,13 @@ import ( var log = logger.GetOrCreate("indexer/process/accounts") -// accountsProcessor a is structure responsible for processing accounts +// accountsProcessor is structure responsible for processing accounts type accountsProcessor struct { internalMarshalizer marshal.Marshalizer addressPubkeyConverter core.PubkeyConverter accountsDB indexer.AccountsAdapter balanceConverter indexer.BalanceConverter + shardID uint32 } // NewAccountsProcessor will create a new instance of accounts processor @@ -34,6 +35,7 @@ func NewAccountsProcessor( addressPubkeyConverter core.PubkeyConverter, accountsDB indexer.AccountsAdapter, balanceConverter indexer.BalanceConverter, + shardID uint32, ) (*accountsProcessor, error) { if check.IfNil(marshalizer) { return nil, indexer.ErrNilMarshalizer @@ -53,6 +55,7 @@ func NewAccountsProcessor( addressPubkeyConverter: addressPubkeyConverter, accountsDB: accountsDB, balanceConverter: balanceConverter, + shardID: shardID, }, nil } @@ -156,6 +159,7 @@ func (ap *accountsProcessor) PrepareRegularAccountsMap(timestamp uint64, account TotalBalanceWithStake: converters.BigIntToString(balance), TotalBalanceWithStakeNum: balanceAsFloat, Timestamp: time.Duration(timestamp), + ShardID: ap.shardID, } accountsMap[address] = acc @@ -199,6 +203,7 @@ func (ap *accountsProcessor) PrepareAccountsMapESDT( IsSmartContract: core.IsSmartContractAddress(accountESDT.Account.AddressBytes()), Data: tokenMetaData, Timestamp: time.Duration(timestamp), + ShardID: ap.shardID, } if acc.TokenNonce == 0 { @@ -237,6 +242,7 @@ func (ap *accountsProcessor) PrepareAccountsHistory( IsSender: userAccount.IsSender, IsSmartContract: userAccount.IsSmartContract, Identifier: converters.ComputeTokenIdentifier(userAccount.TokenName, userAccount.TokenNonce), + ShardID: ap.shardID, } keyInMap := fmt.Sprintf("%s-%s-%d", acc.Address, acc.Token, acc.TokenNonce) accountsMap[keyInMap] = acc diff --git a/process/accounts/accountsProcessor_test.go b/process/accounts/accountsProcessor_test.go index ffc9eb18..8306f3dd 100644 --- a/process/accounts/accountsProcessor_test.go +++ b/process/accounts/accountsProcessor_test.go @@ -28,41 +28,41 @@ func TestNewAccountsProcessor(t *testing.T) { tests := []struct { name string - argsFunc func() (marshal.Marshalizer, core.PubkeyConverter, indexer.AccountsAdapter, indexer.BalanceConverter) + argsFunc func() (marshal.Marshalizer, core.PubkeyConverter, indexer.AccountsAdapter, indexer.BalanceConverter, uint32) exError error }{ { name: "NilBalanceConverter", - argsFunc: func() (marshal.Marshalizer, core.PubkeyConverter, indexer.AccountsAdapter, indexer.BalanceConverter) { - return &mock.MarshalizerMock{}, &mock.PubkeyConverterMock{}, &mock.AccountsStub{}, nil + argsFunc: func() (marshal.Marshalizer, core.PubkeyConverter, indexer.AccountsAdapter, indexer.BalanceConverter, uint32) { + return &mock.MarshalizerMock{}, &mock.PubkeyConverterMock{}, &mock.AccountsStub{}, nil, 0 }, exError: indexer.ErrNilBalanceConverter, }, { name: "NilMarshalizer", - argsFunc: func() (marshal.Marshalizer, core.PubkeyConverter, indexer.AccountsAdapter, indexer.BalanceConverter) { - return nil, &mock.PubkeyConverterMock{}, &mock.AccountsStub{}, balanceConverter + argsFunc: func() (marshal.Marshalizer, core.PubkeyConverter, indexer.AccountsAdapter, indexer.BalanceConverter, uint32) { + return nil, &mock.PubkeyConverterMock{}, &mock.AccountsStub{}, balanceConverter, 0 }, exError: indexer.ErrNilMarshalizer, }, { name: "NilPubKeyConverter", - argsFunc: func() (marshal.Marshalizer, core.PubkeyConverter, indexer.AccountsAdapter, indexer.BalanceConverter) { - return &mock.MarshalizerMock{}, nil, &mock.AccountsStub{}, balanceConverter + argsFunc: func() (marshal.Marshalizer, core.PubkeyConverter, indexer.AccountsAdapter, indexer.BalanceConverter, uint32) { + return &mock.MarshalizerMock{}, nil, &mock.AccountsStub{}, balanceConverter, 0 }, exError: indexer.ErrNilPubkeyConverter, }, { name: "NilAccounts", - argsFunc: func() (marshal.Marshalizer, core.PubkeyConverter, indexer.AccountsAdapter, indexer.BalanceConverter) { - return &mock.MarshalizerMock{}, &mock.PubkeyConverterMock{}, nil, balanceConverter + argsFunc: func() (marshal.Marshalizer, core.PubkeyConverter, indexer.AccountsAdapter, indexer.BalanceConverter, uint32) { + return &mock.MarshalizerMock{}, &mock.PubkeyConverterMock{}, nil, balanceConverter, 0 }, exError: indexer.ErrNilAccountsDB, }, { name: "ShouldWork", - argsFunc: func() (marshal.Marshalizer, core.PubkeyConverter, indexer.AccountsAdapter, indexer.BalanceConverter) { - return &mock.MarshalizerMock{}, &mock.PubkeyConverterMock{}, &mock.AccountsStub{}, balanceConverter + argsFunc: func() (marshal.Marshalizer, core.PubkeyConverter, indexer.AccountsAdapter, indexer.BalanceConverter, uint32) { + return &mock.MarshalizerMock{}, &mock.PubkeyConverterMock{}, &mock.AccountsStub{}, balanceConverter, 0 }, exError: nil, }, @@ -79,7 +79,7 @@ func TestNewAccountsProcessor(t *testing.T) { func TestAccountsProcessor_GetAccountsWithNil(t *testing.T) { t.Parallel() - ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), &mock.AccountsStub{}, balanceConverter) + ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), &mock.AccountsStub{}, balanceConverter, 0) regularAccounts, esdtAccounts := ap.GetAccounts(nil) require.Len(t, regularAccounts, 0) @@ -89,7 +89,7 @@ func TestAccountsProcessor_GetAccountsWithNil(t *testing.T) { func TestAccountsProcessor_PrepareRegularAccountsMapWithNil(t *testing.T) { t.Parallel() - ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), &mock.AccountsStub{}, balanceConverter) + ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), &mock.AccountsStub{}, balanceConverter, 0) accountsInfo := ap.PrepareRegularAccountsMap(0, nil) require.Len(t, accountsInfo, 0) @@ -98,7 +98,7 @@ func TestAccountsProcessor_PrepareRegularAccountsMapWithNil(t *testing.T) { func TestGetESDTInfo_CannotRetriveValueShoudError(t *testing.T) { t.Parallel() - ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), &mock.AccountsStub{}, balanceConverter) + ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), &mock.AccountsStub{}, balanceConverter, 0) require.NotNil(t, ap) localErr := errors.New("local error") @@ -117,7 +117,7 @@ func TestGetESDTInfo_CannotRetriveValueShoudError(t *testing.T) { func TestGetESDTInfo(t *testing.T) { t.Parallel() - ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), &mock.AccountsStub{}, balanceConverter) + ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), &mock.AccountsStub{}, balanceConverter, 0) require.NotNil(t, ap) esdtToken := &esdt.ESDigitalToken{ @@ -143,7 +143,7 @@ func TestGetESDTInfo(t *testing.T) { func TestGetESDTInfoNFT(t *testing.T) { t.Parallel() - ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), &mock.AccountsStub{}, balanceConverter) + ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), &mock.AccountsStub{}, balanceConverter, 0) require.NotNil(t, ap) esdtToken := &esdt.ESDigitalToken{ @@ -174,7 +174,7 @@ func TestGetESDTInfoNFTWithMetaData(t *testing.T) { t.Parallel() pubKeyConverter := mock.NewPubkeyConverterMock(32) - ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, pubKeyConverter, &mock.AccountsStub{}, balanceConverter) + ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, pubKeyConverter, &mock.AccountsStub{}, balanceConverter, 0) require.NotNil(t, ap) nftName := "Test-nft" @@ -223,7 +223,7 @@ func TestAccountsProcessor_GetAccountsEGLDAccounts(t *testing.T) { return mockAccount, nil }, } - ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), accountsStub, balanceConverter) + ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), accountsStub, balanceConverter, 0) require.NotNil(t, ap) alteredAccounts := data.NewAlteredAccounts() @@ -253,7 +253,7 @@ func TestAccountsProcessor_GetAccountsESDTAccount(t *testing.T) { return mockAccount, nil }, } - ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), accountsStub, balanceConverter) + ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), accountsStub, balanceConverter, 0) require.NotNil(t, ap) alteredAccounts := data.NewAlteredAccounts() @@ -282,7 +282,7 @@ func TestAccountsProcessor_GetAccountsESDTAccountNewAccountShouldBeInRegularAcco return mockAccount, nil }, } - ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), accountsStub, balanceConverter) + ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), accountsStub, balanceConverter, 0) require.NotNil(t, ap) alteredAccounts := data.NewAlteredAccounts() @@ -327,7 +327,7 @@ func TestAccountsProcessor_PrepareAccountsMapEGLD(t *testing.T) { return mockAccount, nil }, } - ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), accountsStub, balanceConverter) + ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), accountsStub, balanceConverter, 0) require.NotNil(t, ap) res := ap.PrepareRegularAccountsMap(123, []*data.Account{egldAccount}) @@ -370,7 +370,7 @@ func TestAccountsProcessor_PrepareAccountsMapESDT(t *testing.T) { return mockAccount, nil }, } - ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), accountsStub, balanceConverter) + ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), accountsStub, balanceConverter, 0) require.NotNil(t, ap) accountsESDT := []*data.AccountESDT{ @@ -424,7 +424,7 @@ func TestAccountsProcessor_PrepareAccountsHistory(t *testing.T) { }, } - ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), &mock.AccountsStub{}, balanceConverter) + ap, _ := NewAccountsProcessor(&mock.MarshalizerMock{}, mock.NewPubkeyConverterMock(32), &mock.AccountsStub{}, balanceConverter, 0) res := ap.PrepareAccountsHistory(100, accounts) accountBalanceHistory := res["addr1-token-112-10"] @@ -445,40 +445,40 @@ func TestAccountsProcessor_GetUserAccountErrors(t *testing.T) { localErr := errors.New("local error") tests := []struct { name string - argsFunc func() (marshal.Marshalizer, core.PubkeyConverter, indexer.AccountsAdapter, indexer.BalanceConverter) + argsFunc func() (marshal.Marshalizer, core.PubkeyConverter, indexer.AccountsAdapter, indexer.BalanceConverter, uint32) inputAddress string exError error }{ { name: "InvalidAddress", exError: localErr, - argsFunc: func() (marshal.Marshalizer, core.PubkeyConverter, indexer.AccountsAdapter, indexer.BalanceConverter) { + argsFunc: func() (marshal.Marshalizer, core.PubkeyConverter, indexer.AccountsAdapter, indexer.BalanceConverter, uint32) { return &mock.MarshalizerMock{}, &mock.PubkeyConverterStub{ DecodeCalled: func(humanReadable string) ([]byte, error) { return nil, localErr - }}, &mock.AccountsStub{}, balanceConverter + }}, &mock.AccountsStub{}, balanceConverter, 0 }, }, { name: "CannotLoadAccount", exError: localErr, - argsFunc: func() (marshal.Marshalizer, core.PubkeyConverter, indexer.AccountsAdapter, indexer.BalanceConverter) { + argsFunc: func() (marshal.Marshalizer, core.PubkeyConverter, indexer.AccountsAdapter, indexer.BalanceConverter, uint32) { return &mock.MarshalizerMock{}, &mock.PubkeyConverterMock{}, &mock.AccountsStub{ LoadAccountCalled: func(container []byte) (vmcommon.AccountHandler, error) { return nil, localErr }, - }, balanceConverter + }, balanceConverter, 0 }, }, { name: "CannotCastAccount", exError: indexer.ErrCannotCastAccountHandlerToUserAccount, - argsFunc: func() (marshal.Marshalizer, core.PubkeyConverter, indexer.AccountsAdapter, indexer.BalanceConverter) { + argsFunc: func() (marshal.Marshalizer, core.PubkeyConverter, indexer.AccountsAdapter, indexer.BalanceConverter, uint32) { return &mock.MarshalizerMock{}, &mock.PubkeyConverterMock{}, &mock.AccountsStub{ LoadAccountCalled: func(container []byte) (vmcommon.AccountHandler, error) { return nil, nil }, - }, balanceConverter + }, balanceConverter, 0 }, }, } @@ -512,7 +512,7 @@ func TestGetESDTInfoNFTAndMetadataFromSystemAccount(t *testing.T) { }, }, nil }, - }, balanceConverter) + }, balanceConverter, 0) require.NotNil(t, ap) tokenIdentifier := "token-001" diff --git a/process/accounts/serialize_test.go b/process/accounts/serialize_test.go index 6ecc5644..1d867187 100644 --- a/process/accounts/serialize_test.go +++ b/process/accounts/serialize_test.go @@ -56,7 +56,7 @@ func TestSerializeAccounts(t *testing.T) { require.Equal(t, 1, len(buffSlice.Buffers())) expectedRes := `{ "update" : {"_index": "accounts", "_id" : "addr1" } } -{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"50","balanceNum":0.1,"totalBalanceWithStake":"50","totalBalanceWithStakeNum":0.1} }},"upsert": {}} +{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"50","balanceNum":0.1,"totalBalanceWithStake":"50","totalBalanceWithStakeNum":0.1,"shardID":0} }},"upsert": {}} ` require.Equal(t, expectedRes, buffSlice.Buffers()[0].String()) } @@ -83,7 +83,7 @@ func TestSerializeAccountsESDTNonceZero(t *testing.T) { require.Equal(t, 1, len(buffSlice.Buffers())) expectedRes := `{ "update" : {"_index": "accountsesdt", "_id" : "addr1-token-abcd-00" } } -{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"10000000000000","balanceNum":1,"token":"token-abcd","properties":"000","timestamp":123} }},"upsert": {}} +{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"10000000000000","balanceNum":1,"token":"token-abcd","properties":"000","timestamp":123,"shardID":0} }},"upsert": {}} ` require.Equal(t, expectedRes, buffSlice.Buffers()[0].String()) } @@ -109,7 +109,7 @@ func TestSerializeAccountsESDT(t *testing.T) { require.Equal(t, 1, len(buffSlice.Buffers())) expectedRes := `{ "update" : {"_index": "accountsesdt", "_id" : "addr1-token-0001-05" } } -{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"10000000000000","balanceNum":1,"token":"token-0001","tokenNonce":5,"properties":"000"} }},"upsert": {}} +{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"10000000000000","balanceNum":1,"token":"token-0001","tokenNonce":5,"properties":"000","shardID":0} }},"upsert": {}} ` require.Equal(t, expectedRes, buffSlice.Buffers()[0].String()) } @@ -149,7 +149,7 @@ func TestSerializeAccountsNFTWithMedaData(t *testing.T) { require.Equal(t, 1, len(buffSlice.Buffers())) expectedRes := `{ "update" : {"_index": "accountsesdt", "_id" : "addr1-token-0001-16" } } -{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"10000000000000","balanceNum":1,"token":"token-0001","identifier":"token-0001-5","tokenNonce":22,"properties":"000","data":{"name":"nft","creator":"010101","royalties":1,"hash":"aGFzaA==","uris":["dXJp"],"tags":["test","free","fun"],"attributes":"dGFnczp0ZXN0LGZyZWUsZnVuO2Rlc2NyaXB0aW9uOlRoaXMgaXMgYSB0ZXN0IGRlc2NyaXB0aW9uIGZvciBhbiBhd2Vzb21lIG5mdA==","metadata":"metadata-test","nonEmptyURIs":true,"whiteListedStorage":false}} }},"upsert": {}} +{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"10000000000000","balanceNum":1,"token":"token-0001","identifier":"token-0001-5","tokenNonce":22,"properties":"000","data":{"name":"nft","creator":"010101","royalties":1,"hash":"aGFzaA==","uris":["dXJp"],"tags":["test","free","fun"],"attributes":"dGFnczp0ZXN0LGZyZWUsZnVuO2Rlc2NyaXB0aW9uOlRoaXMgaXMgYSB0ZXN0IGRlc2NyaXB0aW9uIGZvciBhbiBhd2Vzb21lIG5mdA==","metadata":"metadata-test","nonEmptyURIs":true,"whiteListedStorage":false},"shardID":0} }},"upsert": {}} ` require.Equal(t, expectedRes, buffSlice.Buffers()[0].String()) } @@ -199,7 +199,7 @@ func TestSerializeAccountsHistory(t *testing.T) { require.Equal(t, 1, len(buffSlice.Buffers())) expectedRes := `{ "index" : { "_index":"accountshistory", "_id" : "account1-token-0001-00-10" } } -{"address":"account1","timestamp":10,"balance":"123","token":"token-0001","isSender":true,"isSmartContract":true} +{"address":"account1","timestamp":10,"balance":"123","token":"token-0001","isSender":true,"isSmartContract":true,"shardID":0} ` require.Equal(t, expectedRes, buffSlice.Buffers()[0].String()) } diff --git a/process/factory/elasticProcessorFactory.go b/process/factory/elasticProcessorFactory.go index 794f21cc..e39041ed 100644 --- a/process/factory/elasticProcessorFactory.go +++ b/process/factory/elasticProcessorFactory.go @@ -61,6 +61,7 @@ func CreateElasticProcessor(arguments ArgElasticProcessorFactory) (indexer.Elast arguments.AddressPubkeyConverter, arguments.AccountsDB, balanceConverter, + arguments.ShardCoordinator.SelfId(), ) if err != nil { return nil, err From 4734751c77400d288f34ec72da304062a23fef3d Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Fri, 19 Aug 2022 12:22:15 +0300 Subject: [PATCH 44/69] delete rewards txs in case of metachain observer and rollback --- process/transactions/transactionsProcessor.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/process/transactions/transactionsProcessor.go b/process/transactions/transactionsProcessor.go index e3cb786a..840fb7ec 100644 --- a/process/transactions/transactionsProcessor.go +++ b/process/transactions/transactionsProcessor.go @@ -181,7 +181,8 @@ func (tdp *txsDatabaseProcessor) GetRewardsTxsHashesHexEncoded(header coreData.H } isDstMe := selfShardID == miniblock.ReceiverShardID - if isDstMe { + notMeta := header.GetShardID() != core.MetachainShardId + if isDstMe && notMeta { // reward miniblock is always cross-shard continue } From 26a5708d2686255620b06f1077d21eab4be3c9f1 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Fri, 19 Aug 2022 12:22:50 +0300 Subject: [PATCH 45/69] remove comment --- process/transactions/transactionsProcessor.go | 1 - 1 file changed, 1 deletion(-) diff --git a/process/transactions/transactionsProcessor.go b/process/transactions/transactionsProcessor.go index 840fb7ec..f65de5a1 100644 --- a/process/transactions/transactionsProcessor.go +++ b/process/transactions/transactionsProcessor.go @@ -183,7 +183,6 @@ func (tdp *txsDatabaseProcessor) GetRewardsTxsHashesHexEncoded(header coreData.H isDstMe := selfShardID == miniblock.ReceiverShardID notMeta := header.GetShardID() != core.MetachainShardId if isDstMe && notMeta { - // reward miniblock is always cross-shard continue } From 2844dab23bab1dcab0b76a9df73aef2305235992 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Fri, 19 Aug 2022 14:56:19 +0300 Subject: [PATCH 46/69] remove also accounts esdt in case of rollback --- client/elasticClient.go | 20 +++++-------------- client/queries.go | 10 ---------- converters/json.go | 15 ++++++++++++++ converters/json_test.go | 19 ++++++++++++++++++ process/elasticProcessor.go | 34 ++++++++++++++++++++++++++++---- process/interface.go | 2 +- workItems/interface.go | 1 + workItems/workItemRemoveBlock.go | 2 +- 8 files changed, 72 insertions(+), 31 deletions(-) diff --git a/client/elasticClient.go b/client/elasticClient.go index 62585665..c34b8729 100644 --- a/client/elasticClient.go +++ b/client/elasticClient.go @@ -15,8 +15,6 @@ import ( "github.com/elastic/go-elasticsearch/v7/esapi" ) -// TODO add more unit tests - const ( errPolicyAlreadyExists = "document already exists" ) @@ -152,31 +150,23 @@ func (ec *elasticClient) DoMultiGet(ids []string, index string, withSource bool, return nil } -// DoBulkRemove will do a bulk remove to elasticsearch server -func (ec *elasticClient) DoBulkRemove(index string, hashes []string) error { - obj := prepareHashesForBulkRemove(hashes) - body, err := encode(obj) - if err != nil { - return err - } - +// DoQueryRemove will do a query remove to elasticsearch server +func (ec *elasticClient) DoQueryRemove(index string, body *bytes.Buffer) error { res, err := ec.es.DeleteByQuery( []string{index}, - &body, + body, ec.es.DeleteByQuery.WithIgnoreUnavailable(true), ) if err != nil { - log.Warn("elasticClient.DoBulkRemove", - "cannot do bulk remove", err.Error()) + log.Warn("elasticClient.DoQueryRemove", "cannot do query remove", err.Error()) return err } var decodedBody objectsMap err = parseResponse(res, &decodedBody, elasticDefaultErrorResponseHandler) if err != nil { - log.Warn("elasticClient.DoBulkRemove", - "error parsing response", err.Error()) + log.Warn("elasticClient.DoQueryRemove", "error parsing response", err.Error()) return err } diff --git a/client/queries.go b/client/queries.go index 2451b765..386b9cf3 100644 --- a/client/queries.go +++ b/client/queries.go @@ -28,13 +28,3 @@ func getDocumentsByIDsQuery(hashes []string, withSource bool) objectsMap { "docs": interfaceSlice, } } - -func prepareHashesForBulkRemove(hashes []string) objectsMap { - return objectsMap{ - "query": objectsMap{ - "ids": objectsMap{ - "values": hashes, - }, - }, - } -} diff --git a/converters/json.go b/converters/json.go index 67ba105e..a6c7cbbc 100644 --- a/converters/json.go +++ b/converters/json.go @@ -1,7 +1,9 @@ package converters import ( + "bytes" "encoding/json" + "fmt" logger "github.com/ElrondNetwork/elrond-go-logger" ) @@ -24,3 +26,16 @@ func JsonEscape(i string) string { // Trim the beginning and trailing " character return string(b[1 : len(b)-1]) } + +// PrepareHashesForQueryRemove will prepare the provided hashes for query remove +func PrepareHashesForQueryRemove(hashes []string) *bytes.Buffer { + if len(hashes) == 0 { + hashes = []string{} + } + + hashesM, _ := json.Marshal(hashes) + query := `{"query": {"ids": {"values": %s}}}` + deleteQuery := fmt.Sprintf(query, hashesM) + + return bytes.NewBuffer([]byte(deleteQuery)) +} diff --git a/converters/json_test.go b/converters/json_test.go index fa387a62..2786f148 100644 --- a/converters/json_test.go +++ b/converters/json_test.go @@ -17,3 +17,22 @@ func TestJsonEscape(t *testing.T) { require.Equal(t, `tag\u003e`, JsonEscape(`tag>`)) require.Equal(t, ",.\\u003c.\\u003e\\u003c\\u003c\\u003c\\u003c\\u003e\\u003e\\u003e\\u003e\\u003e", JsonEscape(",.<.><<<<>>>>>")) } + +func TestPrepareHashesForQueryRemove(t *testing.T) { + t.Parallel() + + res := PrepareHashesForQueryRemove([]string{"1", "2"}) + require.Equal(t, `{"query": {"ids": {"values": ["1","2"]}}}`, res.String()) + + res = PrepareHashesForQueryRemove(nil) + require.Equal(t, `{"query": {"ids": {"values": []}}}`, res.String()) + + res = PrepareHashesForQueryRemove([]string{}) + require.Equal(t, `{"query": {"ids": {"values": []}}}`, res.String()) + + res = PrepareHashesForQueryRemove([]string{`"""`, "1111", `~''`}) + require.Equal(t, `{"query": {"ids": {"values": ["\"\"\"","1111","~''"]}}}`, res.String()) + + res = PrepareHashesForQueryRemove([]string{""}) + require.Equal(t, `{"query": {"ids": {"values": [""]}}}`, res.String()) +} diff --git a/process/elasticProcessor.go b/process/elasticProcessor.go index 3710bf43..86a3b4a2 100644 --- a/process/elasticProcessor.go +++ b/process/elasticProcessor.go @@ -4,8 +4,8 @@ import ( "bytes" "encoding/hex" "fmt" - elasticIndexer "github.com/ElrondNetwork/elastic-indexer-go" + "github.com/ElrondNetwork/elastic-indexer-go/converters" "github.com/ElrondNetwork/elastic-indexer-go/data" "github.com/ElrondNetwork/elastic-indexer-go/process/collections" "github.com/ElrondNetwork/elastic-indexer-go/process/tags" @@ -293,7 +293,10 @@ func (ei *elasticProcessor) RemoveHeader(header coreData.HeaderHandler) error { return err } - return ei.elasticClient.DoBulkRemove(elasticIndexer.BlockIndex, []string{hex.EncodeToString(headerHash)}) + return ei.elasticClient.DoQueryRemove( + elasticIndexer.BlockIndex, + converters.PrepareHashesForQueryRemove([]string{hex.EncodeToString(headerHash)}), + ) } // RemoveMiniblocks will remove all miniblocks that are in header from elasticsearch server @@ -303,7 +306,10 @@ func (ei *elasticProcessor) RemoveMiniblocks(header coreData.HeaderHandler, body return nil } - return ei.elasticClient.DoBulkRemove(elasticIndexer.MiniblocksIndex, encodedMiniblocksHashes) + return ei.elasticClient.DoQueryRemove( + elasticIndexer.MiniblocksIndex, + converters.PrepareHashesForQueryRemove(encodedMiniblocksHashes), + ) } // RemoveTransactions will remove transaction that are in miniblock from the elasticsearch server @@ -313,7 +319,27 @@ func (ei *elasticProcessor) RemoveTransactions(header coreData.HeaderHandler, bo return nil } - return ei.elasticClient.DoBulkRemove(elasticIndexer.TransactionsIndex, encodedTxsHashes) + return ei.elasticClient.DoQueryRemove( + elasticIndexer.TransactionsIndex, + converters.PrepareHashesForQueryRemove(encodedTxsHashes), + ) +} + +// RemoveAccountsESDT will remove data from accountsesdt index and accountsesdthistory +func (ei *elasticProcessor) RemoveAccountsESDT(headerTimestamp uint64) error { + query := fmt.Sprintf(`{"query": {"bool": {"must": [{"match": {"shardID": {"query": %d,"operator": "AND"}}},{"match": {"timestamp": {"query": %d,"operator": "AND"}}}]}}}`, ei.selfShardID, headerTimestamp) + err := ei.elasticClient.DoQueryRemove( + elasticIndexer.AccountsESDTIndex, + bytes.NewBuffer([]byte(query)), + ) + if err != nil { + return err + } + + return ei.elasticClient.DoQueryRemove( + elasticIndexer.AccountsESDTHistoryIndex, + bytes.NewBuffer([]byte(query)), + ) } // SaveMiniblocks will prepare and save information about miniblocks in elasticsearch server diff --git a/process/interface.go b/process/interface.go index 7d31fb81..357334ff 100644 --- a/process/interface.go +++ b/process/interface.go @@ -15,7 +15,7 @@ import ( type DatabaseClientHandler interface { DoRequest(req *esapi.IndexRequest) error DoBulkRequest(buff *bytes.Buffer, index string) error - DoBulkRemove(index string, hashes []string) error + DoQueryRemove(index string, buff *bytes.Buffer) error DoMultiGet(ids []string, index string, withSource bool, res interface{}) error DoScrollRequest(index string, body []byte, withSource bool, handlerFunc func(responseBytes []byte) error) error DoCountRequest(index string, body []byte) (uint64, error) diff --git a/workItems/interface.go b/workItems/interface.go index c7c98770..1a817ca6 100644 --- a/workItems/interface.go +++ b/workItems/interface.go @@ -34,6 +34,7 @@ type removeIndexer interface { RemoveHeader(header coreData.HeaderHandler) error RemoveMiniblocks(header coreData.HeaderHandler, body *block.Body) error RemoveTransactions(header coreData.HeaderHandler, body *block.Body) error + RemoveAccountsESDT(headerTimestamp uint64) error } type saveRounds interface { diff --git a/workItems/workItemRemoveBlock.go b/workItems/workItemRemoveBlock.go index 5dd88ff7..db66f14a 100644 --- a/workItems/workItemRemoveBlock.go +++ b/workItems/workItemRemoveBlock.go @@ -51,5 +51,5 @@ func (wirb *itemRemoveBlock) Save() error { return err } - return nil + return wirb.indexer.RemoveAccountsESDT(wirb.headerHandler.GetTimeStamp()) } From be3d256e825b0b8555ab8619648a0bb85b47c520 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Fri, 19 Aug 2022 15:09:16 +0300 Subject: [PATCH 47/69] fix unit tests --- interface.go | 1 + mock/databaseWriterStub.go | 10 +++++----- mock/elasticProcessorStub.go | 10 ++++++++++ process/elasticProcessor_test.go | 18 +++++++++++------- 4 files changed, 27 insertions(+), 12 deletions(-) diff --git a/interface.go b/interface.go index 30ca2511..255b3e54 100644 --- a/interface.go +++ b/interface.go @@ -32,6 +32,7 @@ type ElasticProcessor interface { RemoveHeader(header coreData.HeaderHandler) error RemoveMiniblocks(header coreData.HeaderHandler, body *block.Body) error RemoveTransactions(header coreData.HeaderHandler, body *block.Body) error + RemoveAccountsESDT(headerTimestamp uint64) error SaveMiniblocks(header coreData.HeaderHandler, body *block.Body) error SaveTransactions(body *block.Body, header coreData.HeaderHandler, pool *indexer.Pool) error SaveValidatorsRating(index string, validatorsRatingInfo []*data.ValidatorRatingInfo) error diff --git a/mock/databaseWriterStub.go b/mock/databaseWriterStub.go index 57171396..90d80bd5 100644 --- a/mock/databaseWriterStub.go +++ b/mock/databaseWriterStub.go @@ -10,7 +10,7 @@ import ( type DatabaseWriterStub struct { DoRequestCalled func(req *esapi.IndexRequest) error DoBulkRequestCalled func(buff *bytes.Buffer, index string) error - DoBulkRemoveCalled func(index string, hashes []string) error + DoQueryRemoveCalled func(index string, body *bytes.Buffer) error DoMultiGetCalled func(ids []string, index string, withSource bool, response interface{}) error CheckAndCreateIndexCalled func(index string) error DoScrollRequestCalled func(index string, body []byte, withSource bool, handlerFunc func(responseBytes []byte) error) error @@ -54,10 +54,10 @@ func (dwm *DatabaseWriterStub) DoMultiGet(hashes []string, index string, withSou return nil } -// DoBulkRemove - -func (dwm *DatabaseWriterStub) DoBulkRemove(index string, hashes []string) error { - if dwm.DoBulkRemoveCalled != nil { - return dwm.DoBulkRemoveCalled(index, hashes) +// DoQueryRemove - +func (dwm *DatabaseWriterStub) DoQueryRemove(index string, body *bytes.Buffer) error { + if dwm.DoQueryRemoveCalled != nil { + return dwm.DoQueryRemoveCalled(index, body) } return nil diff --git a/mock/elasticProcessorStub.go b/mock/elasticProcessorStub.go index a85ba636..3d74f136 100644 --- a/mock/elasticProcessorStub.go +++ b/mock/elasticProcessorStub.go @@ -26,6 +26,16 @@ type ElasticProcessorStub struct { SaveRoundsInfoCalled func(infos []*data.RoundInfo) error SaveShardValidatorsPubKeysCalled func(shardID, epoch uint32, shardValidatorsPubKeys [][]byte) error SaveAccountsCalled func(timestamp uint64, acc []*data.Account) error + RemoveAccountsESDTCalled func(headerTimestamp uint64) error +} + +// RemoveAccountsESDT - +func (eim *ElasticProcessorStub) RemoveAccountsESDT(headerTimestamp uint64) error { + if eim.RemoveAccountsESDTCalled != nil { + return eim.RemoveAccountsESDTCalled(headerTimestamp) + } + + return nil } // SaveHeader - diff --git a/process/elasticProcessor_test.go b/process/elasticProcessor_test.go index d4c83d6b..e9eabc93 100644 --- a/process/elasticProcessor_test.go +++ b/process/elasticProcessor_test.go @@ -9,6 +9,7 @@ import ( "io/ioutil" "math/big" "strconv" + "strings" "testing" elasticIndexer "github.com/ElrondNetwork/elastic-indexer-go" @@ -50,7 +51,7 @@ func newElasticsearchProcessor(elasticsearchWriter DatabaseClientHandler, argume func createMockElasticProcessorArgs() *ArgElasticProcessor { balanceConverter, _ := converters.NewBalanceConverter(10) - acp, _ := accounts.NewAccountsProcessor(&mock.MarshalizerMock{}, &mock.PubkeyConverterMock{}, &mock.AccountsStub{}, balanceConverter) + acp, _ := accounts.NewAccountsProcessor(&mock.MarshalizerMock{}, &mock.PubkeyConverterMock{}, &mock.AccountsStub{}, balanceConverter, 0) bp, _ := block.NewBlockProcessor(&mock.HasherMock{}, &mock.MarshalizerMock{}) mp, _ := miniblocks.NewMiniblocksProcessor(0, &mock.HasherMock{}, &mock.MarshalizerMock{}, false) vp, _ := validators.NewValidatorsProcessor(mock.NewPubkeyConverterMock(32), 0) @@ -266,7 +267,7 @@ func TestElasticProcessor_RemoveHeader(t *testing.T) { args := createMockElasticProcessorArgs() args.DBClient = &mock.DatabaseWriterStub{ - DoBulkRemoveCalled: func(index string, hashes []string) error { + DoQueryRemoveCalled: func(index string, body *bytes.Buffer) error { called = true return nil }, @@ -307,10 +308,11 @@ func TestElasticProcessor_RemoveMiniblocks(t *testing.T) { mbHash3, _ := core.CalculateHash(&mock.MarshalizerMock{}, &mock.HasherMock{}, mb3) args.DBClient = &mock.DatabaseWriterStub{ - DoBulkRemoveCalled: func(index string, hashes []string) error { + DoQueryRemoveCalled: func(index string, body *bytes.Buffer) error { called = true - require.Equal(t, hashes[0], hex.EncodeToString(mbHash2)) - require.Equal(t, hashes[1], hex.EncodeToString(mbHash3)) + bodyStr := body.String() + require.True(t, strings.Contains(bodyStr, hex.EncodeToString(mbHash2))) + require.True(t, strings.Contains(bodyStr, hex.EncodeToString(mbHash3))) return nil }, } @@ -562,9 +564,11 @@ func TestElasticProcessor_RemoveTransactions(t *testing.T) { txsHashes := [][]byte{[]byte("txHas1"), []byte("txHash2")} expectedHashes := []string{hex.EncodeToString(txsHashes[0]), hex.EncodeToString(txsHashes[1])} dbWriter := &mock.DatabaseWriterStub{ - DoBulkRemoveCalled: func(index string, hashes []string) error { + DoQueryRemoveCalled: func(index string, body *bytes.Buffer) error { + bodyStr := body.String() require.Equal(t, elasticIndexer.TransactionsIndex, index) - require.Equal(t, expectedHashes, expectedHashes) + require.True(t, strings.Contains(bodyStr, expectedHashes[0])) + require.True(t, strings.Contains(bodyStr, expectedHashes[1])) called = true return nil }, From e4e11e846a41617882ce3cb204ac093dd469a2b8 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Fri, 19 Aug 2022 15:22:21 +0300 Subject: [PATCH 48/69] fix integration tests --- .../accountsBalanceNftTransfer/balance-nft-after-create.json | 3 ++- .../accountsBalanceNftTransfer/balance-nft-after-transfer.json | 3 ++- .../account-balance-esdt-first-update.json | 3 ++- .../account-balance-esdt-second-update.json | 3 ++- .../account-balance-first-update.json | 3 ++- .../account-balance-second-update.json | 3 ++- .../accountsESDTWithTokenType/account-esdt-with-type.json | 3 ++- .../accountsESDTWithTokenType/account-esdt-without-type.json | 3 ++- .../testdata/accountsESDTWithTokenType/account-esdt.json | 3 ++- .../createNFTWithTags/accounts-esdt-address-balance.json | 3 ++- 10 files changed, 20 insertions(+), 10 deletions(-) diff --git a/integrationtests/testdata/accountsBalanceNftTransfer/balance-nft-after-create.json b/integrationtests/testdata/accountsBalanceNftTransfer/balance-nft-after-create.json index 8f0d0aaa..e83e56e3 100644 --- a/integrationtests/testdata/accountsBalanceNftTransfer/balance-nft-after-create.json +++ b/integrationtests/testdata/accountsBalanceNftTransfer/balance-nft-after-create.json @@ -5,5 +5,6 @@ "balanceNum": 1e-15, "tokenNonce": 7440483, "token": "NFT-abcdef", - "timestamp": 5600 + "timestamp": 5600, + "shardID": 0 } diff --git a/integrationtests/testdata/accountsBalanceNftTransfer/balance-nft-after-transfer.json b/integrationtests/testdata/accountsBalanceNftTransfer/balance-nft-after-transfer.json index 261616e6..0adfa4ab 100644 --- a/integrationtests/testdata/accountsBalanceNftTransfer/balance-nft-after-transfer.json +++ b/integrationtests/testdata/accountsBalanceNftTransfer/balance-nft-after-transfer.json @@ -5,5 +5,6 @@ "balanceNum": 1e-15, "tokenNonce": 7440483, "token": "NFT-abcdef", - "timestamp": 5600 + "timestamp": 5600, + "shardID": 0 } diff --git a/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-esdt-first-update.json b/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-esdt-first-update.json index 02b03b73..1da62b33 100644 --- a/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-esdt-first-update.json +++ b/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-esdt-first-update.json @@ -4,5 +4,6 @@ "balanceNum": 1e-15, "token": "TTTT-abcd", "timestamp": 5600, - "type": "FungibleESDT" + "type": "FungibleESDT", + "shardID": 0 } diff --git a/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-esdt-second-update.json b/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-esdt-second-update.json index 6f764f38..469a1d04 100644 --- a/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-esdt-second-update.json +++ b/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-esdt-second-update.json @@ -4,5 +4,6 @@ "balanceNum": 1e-15, "timestamp": 6000, "token": "TTTT-abcd", - "type": "FungibleESDT" + "type": "FungibleESDT", + "shardID": 0 } diff --git a/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-first-update.json b/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-first-update.json index 429fedbf..336d110e 100644 --- a/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-first-update.json +++ b/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-first-update.json @@ -3,5 +3,6 @@ "balance": "0", "balanceNum": 0, "totalBalanceWithStake": "0", - "timestamp": 5600 + "timestamp": 5600, + "shardID": 0 } diff --git a/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-second-update.json b/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-second-update.json index f3b9b98f..ebfe0be4 100644 --- a/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-second-update.json +++ b/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-second-update.json @@ -3,5 +3,6 @@ "balance": "2000", "balanceNum": 0, "timestamp": 6000, - "totalBalanceWithStake": "2000" + "totalBalanceWithStake": "2000", + "shardID": 0 } diff --git a/integrationtests/testdata/accountsESDTWithTokenType/account-esdt-with-type.json b/integrationtests/testdata/accountsESDTWithTokenType/account-esdt-with-type.json index bc768da1..27b8fbdf 100644 --- a/integrationtests/testdata/accountsESDTWithTokenType/account-esdt-with-type.json +++ b/integrationtests/testdata/accountsESDTWithTokenType/account-esdt-with-type.json @@ -12,5 +12,6 @@ "whiteListedStorage": false }, "timestamp": 5600, - "type": "SemiFungibleESDT" + "type": "SemiFungibleESDT", + "shardID": 0 } diff --git a/integrationtests/testdata/accountsESDTWithTokenType/account-esdt-without-type.json b/integrationtests/testdata/accountsESDTWithTokenType/account-esdt-without-type.json index 625e2c71..ba3608b2 100644 --- a/integrationtests/testdata/accountsESDTWithTokenType/account-esdt-without-type.json +++ b/integrationtests/testdata/accountsESDTWithTokenType/account-esdt-without-type.json @@ -11,5 +11,6 @@ "nonEmptyURIs": false, "whiteListedStorage": false }, - "timestamp": 5600 + "timestamp": 5600, + "shardID": 0 } diff --git a/integrationtests/testdata/accountsESDTWithTokenType/account-esdt.json b/integrationtests/testdata/accountsESDTWithTokenType/account-esdt.json index a0b5acd4..3f9e4418 100644 --- a/integrationtests/testdata/accountsESDTWithTokenType/account-esdt.json +++ b/integrationtests/testdata/accountsESDTWithTokenType/account-esdt.json @@ -13,5 +13,6 @@ }, "timestamp": 5600, "type": "SemiFungibleESDT", - "currentOwner":"61646472" + "currentOwner":"61646472", + "shardID": 0 } diff --git a/integrationtests/testdata/createNFTWithTags/accounts-esdt-address-balance.json b/integrationtests/testdata/createNFTWithTags/accounts-esdt-address-balance.json index 820c79c2..7730e4f1 100644 --- a/integrationtests/testdata/createNFTWithTags/accounts-esdt-address-balance.json +++ b/integrationtests/testdata/createNFTWithTags/accounts-esdt-address-balance.json @@ -21,5 +21,6 @@ "tokenNonce": 1, "properties": "6f6b", "token": "DESK-abcd", - "timestamp": 5600 + "timestamp": 5600, + "shardID": 0 } From e17d553b0a446ca864fd2a79b7dea16a6102046b Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Mon, 22 Aug 2022 11:17:48 +0300 Subject: [PATCH 49/69] integration test and small refactor --- client/elasticClient.go | 54 +++++---- client/elasticClientScroll.go | 30 ++--- integrationtests/accountsESDTRollback_test.go | 104 ++++++++++++++++++ .../account-after-create.json | 16 +++ 4 files changed, 170 insertions(+), 34 deletions(-) create mode 100644 integrationtests/accountsESDTRollback_test.go create mode 100644 integrationtests/testdata/accountsESDTRollback/account-after-create.json diff --git a/client/elasticClient.go b/client/elasticClient.go index c34b8729..b725a6a1 100644 --- a/client/elasticClient.go +++ b/client/elasticClient.go @@ -28,7 +28,7 @@ type ( type elasticClient struct { elasticBaseUrl string - es *elasticsearch.Client + client *elasticsearch.Client // countScroll is used to be incremented after each scroll so the scroll duration is different each time, // bypassing any possible caching based on the same request @@ -47,7 +47,7 @@ func NewElasticClient(cfg elasticsearch.Config) (*elasticClient, error) { } ec := &elasticClient{ - es: es, + client: es, elasticBaseUrl: cfg.Addresses[0], } @@ -92,7 +92,7 @@ func (ec *elasticClient) CheckAndCreateAlias(alias string, indexName string) err // DoRequest will do a request to elastic server func (ec *elasticClient) DoRequest(req *esapi.IndexRequest) error { - res, err := req.Do(context.Background(), ec.es) + res, err := req.Do(context.Background(), ec.client) if err != nil { return err } @@ -106,10 +106,10 @@ func (ec *elasticClient) DoBulkRequest(buff *bytes.Buffer, index string) error { options := make([]func(*esapi.BulkRequest), 0) if index != "" { - options = append(options, ec.es.Bulk.WithIndex(index)) + options = append(options, ec.client.Bulk.WithIndex(index)) } - res, err := ec.es.Bulk( + res, err := ec.client.Bulk( reader, options..., ) @@ -130,9 +130,9 @@ func (ec *elasticClient) DoMultiGet(ids []string, index string, withSource bool, return err } - res, err := ec.es.Mget( + res, err := ec.client.Mget( &body, - ec.es.Mget.WithIndex(index), + ec.client.Mget.WithIndex(index), ) if err != nil { log.Warn("elasticClient.DoMultiGet", @@ -152,10 +152,15 @@ func (ec *elasticClient) DoMultiGet(ids []string, index string, withSource bool, // DoQueryRemove will do a query remove to elasticsearch server func (ec *elasticClient) DoQueryRemove(index string, body *bytes.Buffer) error { - res, err := ec.es.DeleteByQuery( + if err := ec.doRefresh(index); err != nil { + log.Warn("elasticClient.doRefresh", "cannot to refresh", err.Error()) + } + + res, err := ec.client.DeleteByQuery( []string{index}, body, - ec.es.DeleteByQuery.WithIgnoreUnavailable(true), + ec.client.DeleteByQuery.WithIgnoreUnavailable(true), + ec.client.DeleteByQuery.WithConflicts("proceed"), ) if err != nil { @@ -163,8 +168,7 @@ func (ec *elasticClient) DoQueryRemove(index string, body *bytes.Buffer) error { return err } - var decodedBody objectsMap - err = parseResponse(res, &decodedBody, elasticDefaultErrorResponseHandler) + err = parseResponse(res, nil, elasticDefaultErrorResponseHandler) if err != nil { log.Warn("elasticClient.DoQueryRemove", "error parsing response", err.Error()) return err @@ -173,15 +177,27 @@ func (ec *elasticClient) DoQueryRemove(index string, body *bytes.Buffer) error { return nil } +func (ec *elasticClient) doRefresh(index string) error { + res, err := ec.client.Indices.Refresh( + ec.client.Indices.Refresh.WithIndex(index), + ec.client.Indices.Refresh.WithIgnoreUnavailable(true), + ) + if err != nil { + return err + } + + return parseResponse(res, nil, elasticDefaultErrorResponseHandler) +} + // TemplateExists checks weather a template is already created func (ec *elasticClient) templateExists(index string) bool { - res, err := ec.es.Indices.ExistsTemplate([]string{index}) + res, err := ec.client.Indices.ExistsTemplate([]string{index}) return exists(res, err) } // IndexExists checks if a given index already exists func (ec *elasticClient) indexExists(index string) bool { - res, err := ec.es.Indices.Exists([]string{index}) + res, err := ec.client.Indices.Exists([]string{index}) return exists(res, err) } @@ -195,7 +211,7 @@ func (ec *elasticClient) PolicyExists(policy string) bool { ) req := newRequest(http.MethodGet, policyRoute, nil) - res, err := ec.es.Transport.Perform(req) + res, err := ec.client.Transport.Perform(req) if err != nil { log.Warn("elasticClient.PolicyExists", "error performing request", err.Error()) @@ -227,7 +243,7 @@ func (ec *elasticClient) aliasExists(alias string) bool { ) req := newRequest(http.MethodHead, aliasRoute, nil) - res, err := ec.es.Transport.Perform(req) + res, err := ec.client.Transport.Perform(req) if err != nil { log.Warn("elasticClient.AliasExists", "error performing request", err.Error()) @@ -245,7 +261,7 @@ func (ec *elasticClient) aliasExists(alias string) bool { // CreateIndex creates an elasticsearch index func (ec *elasticClient) createIndex(index string) error { - res, err := ec.es.Indices.Create(index) + res, err := ec.client.Indices.Create(index) if err != nil { return err } @@ -264,7 +280,7 @@ func (ec *elasticClient) createPolicy(policyName string, policy *bytes.Buffer) e req := newRequest(http.MethodPut, policyRoute, policy) req.Header[headerContentType] = headerContentTypeJSON req.Header[headerXSRF] = []string{"false"} - res, err := ec.es.Transport.Perform(req) + res, err := ec.client.Transport.Perform(req) if err != nil { return err } @@ -291,7 +307,7 @@ func (ec *elasticClient) createPolicy(policyName string, policy *bytes.Buffer) e // CreateIndexTemplate creates an elasticsearch index template func (ec *elasticClient) createIndexTemplate(templateName string, template io.Reader) error { - res, err := ec.es.Indices.PutTemplate(templateName, template) + res, err := ec.client.Indices.PutTemplate(templateName, template) if err != nil { return err } @@ -301,7 +317,7 @@ func (ec *elasticClient) createIndexTemplate(templateName string, template io.Re // CreateAlias creates an index alias func (ec *elasticClient) createAlias(alias string, index string) error { - res, err := ec.es.Indices.PutAlias([]string{index}, alias) + res, err := ec.client.Indices.PutAlias([]string{index}, alias) if err != nil { return err } diff --git a/client/elasticClientScroll.go b/client/elasticClientScroll.go index b90a6b8b..1bda8a88 100644 --- a/client/elasticClientScroll.go +++ b/client/elasticClientScroll.go @@ -15,9 +15,9 @@ import ( // DoCountRequest will get the number of elements that correspond with the provided query func (ec *elasticClient) DoCountRequest(index string, body []byte) (uint64, error) { - res, err := ec.es.Count( - ec.es.Count.WithIndex(index), - ec.es.Count.WithBody(bytes.NewBuffer(body)), + res, err := ec.client.Count( + ec.client.Count.WithIndex(index), + ec.client.Count.WithBody(bytes.NewBuffer(body)), ) if err != nil { return 0, err @@ -44,13 +44,13 @@ func (ec *elasticClient) DoScrollRequest( handlerFunc func(responseBytes []byte) error, ) error { ec.countScroll++ - res, err := ec.es.Search( - ec.es.Search.WithSize(9000), - ec.es.Search.WithScroll(10*time.Minute+time.Duration(ec.countScroll)*time.Millisecond), - ec.es.Search.WithContext(context.Background()), - ec.es.Search.WithIndex(index), - ec.es.Search.WithBody(bytes.NewBuffer(body)), - ec.es.Search.WithSource(strconv.FormatBool(withSource)), + res, err := ec.client.Search( + ec.client.Search.WithSize(9000), + ec.client.Search.WithScroll(10*time.Minute+time.Duration(ec.countScroll)*time.Millisecond), + ec.client.Search.WithContext(context.Background()), + ec.client.Search.WithIndex(index), + ec.client.Search.WithBody(bytes.NewBuffer(body)), + ec.client.Search.WithSource(strconv.FormatBool(withSource)), ) if err != nil { return err @@ -103,9 +103,9 @@ func (ec *elasticClient) iterateScroll( func (ec *elasticClient) getScrollResponse(scrollID string) ([]byte, error) { ec.countScroll++ - res, err := ec.es.Scroll( - ec.es.Scroll.WithScrollID(scrollID), - ec.es.Scroll.WithScroll(2*time.Minute+time.Duration(ec.countScroll)*time.Millisecond), + res, err := ec.client.Scroll( + ec.client.Scroll.WithScrollID(scrollID), + ec.client.Scroll.WithScroll(2*time.Minute+time.Duration(ec.countScroll)*time.Millisecond), ) if err != nil { return nil, err @@ -115,8 +115,8 @@ func (ec *elasticClient) getScrollResponse(scrollID string) ([]byte, error) { } func (ec *elasticClient) clearScroll(scrollID string) error { - resp, err := ec.es.ClearScroll( - ec.es.ClearScroll.WithScrollID(scrollID), + resp, err := ec.client.ClearScroll( + ec.client.ClearScroll.WithScrollID(scrollID), ) if err != nil { return err diff --git a/integrationtests/accountsESDTRollback_test.go b/integrationtests/accountsESDTRollback_test.go new file mode 100644 index 00000000..2fd4afdd --- /dev/null +++ b/integrationtests/accountsESDTRollback_test.go @@ -0,0 +1,104 @@ +//go:build integrationtests + +package integrationtests + +import ( + "encoding/json" + "math/big" + "testing" + + indexerdata "github.com/ElrondNetwork/elastic-indexer-go" + "github.com/ElrondNetwork/elastic-indexer-go/mock" + "github.com/ElrondNetwork/elrond-go-core/core" + coreData "github.com/ElrondNetwork/elrond-go-core/data" + dataBlock "github.com/ElrondNetwork/elrond-go-core/data/block" + "github.com/ElrondNetwork/elrond-go-core/data/esdt" + "github.com/ElrondNetwork/elrond-go-core/data/indexer" + "github.com/ElrondNetwork/elrond-go-core/data/transaction" + vmcommon "github.com/ElrondNetwork/elrond-vm-common" + "github.com/stretchr/testify/require" +) + +func TestAccountsESDTDeleteOnRollback(t *testing.T) { + setLogLevelDebug() + + esClient, err := createESClient(esURL) + require.Nil(t, err) + + feeComputer := &mock.EconomicsHandlerMock{} + + shardCoordinator := &mock.ShardCoordinatorMock{ + SelfID: 1, + ComputeIdCalled: func(address []byte) uint32 { + return 1 + }, + } + + esdtToken := &esdt.ESDigitalToken{ + Value: big.NewInt(1000), + Properties: []byte("ok"), + TokenMetaData: &esdt.MetaData{ + Creator: []byte("creator"), + }, + } + addr := "aaaabbbb" + mockAccount := &mock.UserAccountStub{ + RetrieveValueFromDataTrieTrackerCalled: func(key []byte) ([]byte, error) { + return json.Marshal(esdtToken) + }, + AddressBytesCalled: func() []byte { + return []byte(addr) + }, + } + accounts := &mock.AccountsStub{ + LoadAccountCalled: func(container []byte) (vmcommon.AccountHandler, error) { + return mockAccount, nil + }, + } + + esProc, err := CreateElasticProcessor(esClient, accounts, shardCoordinator, feeComputer) + require.Nil(t, err) + + // CREATE SEMI-FUNGIBLE TOKEN + esdtDataBytes, _ := json.Marshal(esdtToken) + pool := &indexer.Pool{ + Logs: []*coreData.LogData{ + { + TxHash: "h1", + LogHandler: &transaction.Log{ + Events: []*transaction.Event{ + { + Address: []byte("aaaabbbb"), + Identifier: []byte(core.BuiltInFunctionESDTNFTCreate), + Topics: [][]byte{[]byte("TOKEN-eeee"), big.NewInt(2).Bytes(), big.NewInt(1).Bytes(), esdtDataBytes}, + }, + nil, + }, + }, + }, + }, + } + + body := &dataBlock.Body{} + header := &dataBlock.Header{ + Round: 50, + TimeStamp: 5040, + } + + err = esProc.SaveTransactions(body, header, pool) + require.Nil(t, err) + + ids := []string{"6161616162626262-TOKEN-eeee-02"} + genericResponse := &GenericResponse{} + err = esClient.DoMultiGet(ids, indexerdata.AccountsESDTIndex, true, genericResponse) + require.Nil(t, err) + require.JSONEq(t, readExpectedResult("./testdata/accountsESDTRollback/account-after-create.json"), string(genericResponse.Docs[0].Source)) + + // DO ROLLBACK + err = esProc.RemoveAccountsESDT(5040) + require.Nil(t, err) + + err = esClient.DoMultiGet(ids, indexerdata.AccountsESDTIndex, true, genericResponse) + require.Nil(t, err) + require.False(t, genericResponse.Docs[0].Found) +} diff --git a/integrationtests/testdata/accountsESDTRollback/account-after-create.json b/integrationtests/testdata/accountsESDTRollback/account-after-create.json new file mode 100644 index 00000000..b4cec8ec --- /dev/null +++ b/integrationtests/testdata/accountsESDTRollback/account-after-create.json @@ -0,0 +1,16 @@ +{ + "identifier": "TOKEN-eeee-02", + "address": "6161616162626262", + "balance": "1000", + "balanceNum": 1.0E-15, + "data": { + "creator": "63726561746f72", + "whiteListedStorage": false, + "nonEmptyURIs": false + }, + "shardID": 1, + "tokenNonce": 2, + "properties": "6f6b", + "token": "TOKEN-eeee", + "timestamp": 5040 +} \ No newline at end of file From 80ea9365eb55c390165d34550eeed00da518d12e Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Mon, 22 Aug 2022 11:18:03 +0300 Subject: [PATCH 50/69] empty line --- .../testdata/accountsESDTRollback/account-after-create.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integrationtests/testdata/accountsESDTRollback/account-after-create.json b/integrationtests/testdata/accountsESDTRollback/account-after-create.json index b4cec8ec..8a59d7d8 100644 --- a/integrationtests/testdata/accountsESDTRollback/account-after-create.json +++ b/integrationtests/testdata/accountsESDTRollback/account-after-create.json @@ -13,4 +13,4 @@ "properties": "6f6b", "token": "TOKEN-eeee", "timestamp": 5040 -} \ No newline at end of file +} From b469c353ae99fd93362601b534adec51ea645772 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Mon, 22 Aug 2022 15:09:42 +0300 Subject: [PATCH 51/69] small fix --- process/elasticProcessor.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/process/elasticProcessor.go b/process/elasticProcessor.go index 86a3b4a2..a2deb6d5 100644 --- a/process/elasticProcessor.go +++ b/process/elasticProcessor.go @@ -327,7 +327,7 @@ func (ei *elasticProcessor) RemoveTransactions(header coreData.HeaderHandler, bo // RemoveAccountsESDT will remove data from accountsesdt index and accountsesdthistory func (ei *elasticProcessor) RemoveAccountsESDT(headerTimestamp uint64) error { - query := fmt.Sprintf(`{"query": {"bool": {"must": [{"match": {"shardID": {"query": %d,"operator": "AND"}}},{"match": {"timestamp": {"query": %d,"operator": "AND"}}}]}}}`, ei.selfShardID, headerTimestamp) + query := fmt.Sprintf(`{"query": {"bool": {"must": [{"match": {"shardID": {"query": %d,"operator": "AND"}}},{"match": {"timestamp": {"query": "%d","operator": "AND"}}}]}}}`, ei.selfShardID, headerTimestamp) err := ei.elasticClient.DoQueryRemove( elasticIndexer.AccountsESDTIndex, bytes.NewBuffer([]byte(query)), From 598cd45eaaa2482c820673d4a2239717d7dafc64 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Tue, 23 Aug 2022 14:01:10 +0300 Subject: [PATCH 52/69] use header hash from arg instead of compute it --- interface.go | 1 + mock/elasticProcessorStub.go | 4 +++- process/block/blockProcessor.go | 13 ++++++------- process/block/blockProcessor_test.go | 15 ++++++++------- process/elasticProcessor.go | 3 ++- process/elasticProcessor_test.go | 6 +++--- process/interface.go | 1 + workItems/interface.go | 1 + workItems/workItemBlock.go | 1 + workItems/workItemBlock_test.go | 6 +++--- 10 files changed, 29 insertions(+), 22 deletions(-) diff --git a/interface.go b/interface.go index 255b3e54..9ae5d3ba 100644 --- a/interface.go +++ b/interface.go @@ -22,6 +22,7 @@ type DispatcherHandler interface { // ElasticProcessor defines the interface for the elastic search indexer type ElasticProcessor interface { SaveHeader( + headerHash []byte, header coreData.HeaderHandler, signersIndexes []uint64, body *block.Body, diff --git a/mock/elasticProcessorStub.go b/mock/elasticProcessorStub.go index 3d74f136..c74f3869 100644 --- a/mock/elasticProcessorStub.go +++ b/mock/elasticProcessorStub.go @@ -10,6 +10,7 @@ import ( // ElasticProcessorStub - type ElasticProcessorStub struct { SaveHeaderCalled func( + headerHash []byte, header coreData.HeaderHandler, signersIndexes []uint64, body *block.Body, @@ -40,6 +41,7 @@ func (eim *ElasticProcessorStub) RemoveAccountsESDT(headerTimestamp uint64) erro // SaveHeader - func (eim *ElasticProcessorStub) SaveHeader( + headerHash []byte, header coreData.HeaderHandler, signersIndexes []uint64, body *block.Body, @@ -47,7 +49,7 @@ func (eim *ElasticProcessorStub) SaveHeader( gasConsumptionData indexer.HeaderGasConsumption, txsSize int) error { if eim.SaveHeaderCalled != nil { - return eim.SaveHeaderCalled(header, signersIndexes, body, notarizedHeadersHashes, gasConsumptionData, txsSize) + return eim.SaveHeaderCalled(headerHash, header, signersIndexes, body, notarizedHeadersHashes, gasConsumptionData, txsSize) } return nil } diff --git a/process/block/blockProcessor.go b/process/block/blockProcessor.go index 60ccfc7f..216ab2db 100644 --- a/process/block/blockProcessor.go +++ b/process/block/blockProcessor.go @@ -44,6 +44,7 @@ func NewBlockProcessor(hasher hashing.Hasher, marshalizer marshal.Marshalizer) ( // PrepareBlockForDB will prepare a database block and serialize it for database func (bp *blockProcessor) PrepareBlockForDB( + headerHash []byte, header coreData.HeaderHandler, signersIndexes []uint64, body *block.Body, @@ -58,7 +59,7 @@ func (bp *blockProcessor) PrepareBlockForDB( return nil, indexer.ErrNilBlockBody } - blockSizeInBytes, headerHash, err := bp.computeBlockSizeAndHeaderHash(header, body) + blockSizeInBytes, err := bp.computeBlockSize(header, body) if err != nil { return nil, err } @@ -221,21 +222,19 @@ func (bp *blockProcessor) getEncodedMBSHashes(body *block.Body) []string { return miniblocksHashes } -func (bp *blockProcessor) computeBlockSizeAndHeaderHash(header coreData.HeaderHandler, body *block.Body) (int, []byte, error) { +func (bp *blockProcessor) computeBlockSize(header coreData.HeaderHandler, body *block.Body) (int, error) { headerBytes, err := bp.marshalizer.Marshal(header) if err != nil { - return 0, nil, err + return 0, err } bodyBytes, err := bp.marshalizer.Marshal(body) if err != nil { - return 0, nil, err + return 0, err } blockSize := len(headerBytes) + len(bodyBytes) - headerHash := bp.hasher.Compute(string(headerBytes)) - - return blockSize, headerHash, nil + return blockSize, nil } func (bp *blockProcessor) getLeaderIndex(signersIndexes []uint64) uint64 { diff --git a/process/block/blockProcessor_test.go b/process/block/blockProcessor_test.go index 21d9e271..1640a457 100644 --- a/process/block/blockProcessor_test.go +++ b/process/block/blockProcessor_test.go @@ -60,6 +60,7 @@ func TestBlockProcessor_PrepareBlockForDBShouldWork(t *testing.T) { bp, _ := NewBlockProcessor(&mock.HasherMock{}, &mock.MarshalizerMock{}) dbBlock, err := bp.PrepareBlockForDB( + []byte("hash"), &dataBlock.Header{}, []uint64{0, 1, 2}, &dataBlock.Body{ @@ -75,7 +76,7 @@ func TestBlockProcessor_PrepareBlockForDBShouldWork(t *testing.T) { require.Nil(t, err) expectedBlock := &data.Block{ - Hash: "c7c81a1b22b67680f35837b474387ddfe10f67e104034c80f94ab9e5a0a089fb", + Hash: "68617368", Validators: []uint64{0x0, 0x1, 0x2}, EpochStartBlock: false, SearchOrder: 0x3fc, @@ -93,7 +94,7 @@ func TestBlockProcessor_PrepareBlockForDBNilHeader(t *testing.T) { bp, _ := NewBlockProcessor(&mock.HasherMock{}, &mock.MarshalizerMock{}) - dbBlock, err := bp.PrepareBlockForDB(nil, nil, &dataBlock.Body{}, nil, coreIndexerData.HeaderGasConsumption{}, 0) + dbBlock, err := bp.PrepareBlockForDB([]byte("hash"), nil, nil, &dataBlock.Body{}, nil, coreIndexerData.HeaderGasConsumption{}, 0) require.Equal(t, indexer.ErrNilHeaderHandler, err) require.Nil(t, dbBlock) } @@ -103,7 +104,7 @@ func TestBlockProcessor_PrepareBlockForDBNilBody(t *testing.T) { bp, _ := NewBlockProcessor(&mock.HasherMock{}, &mock.MarshalizerMock{}) - dbBlock, err := bp.PrepareBlockForDB(&dataBlock.MetaBlock{}, nil, nil, nil, coreIndexerData.HeaderGasConsumption{}, 0) + dbBlock, err := bp.PrepareBlockForDB([]byte("hash"), &dataBlock.MetaBlock{}, nil, nil, nil, coreIndexerData.HeaderGasConsumption{}, 0) require.Equal(t, indexer.ErrNilBlockBody, err) require.Nil(t, dbBlock) } @@ -118,7 +119,7 @@ func TestBlockProcessor_PrepareBlockForDBMarshalFailHeader(t *testing.T) { }, }) - dbBlock, err := bp.PrepareBlockForDB(&dataBlock.MetaBlock{}, nil, &dataBlock.Body{}, nil, coreIndexerData.HeaderGasConsumption{}, 0) + dbBlock, err := bp.PrepareBlockForDB([]byte("hash"), &dataBlock.MetaBlock{}, nil, &dataBlock.Body{}, nil, coreIndexerData.HeaderGasConsumption{}, 0) require.Equal(t, expectedErr, err) require.Nil(t, dbBlock) } @@ -140,7 +141,7 @@ func TestBlockProcessor_PrepareBlockForDBMarshalFailBlock(t *testing.T) { }, }) - dbBlock, err := bp.PrepareBlockForDB(&dataBlock.MetaBlock{}, nil, &dataBlock.Body{}, nil, coreIndexerData.HeaderGasConsumption{}, 0) + dbBlock, err := bp.PrepareBlockForDB([]byte("hash"), &dataBlock.MetaBlock{}, nil, &dataBlock.Body{}, nil, coreIndexerData.HeaderGasConsumption{}, 0) require.Equal(t, expectedErr, err) require.Nil(t, dbBlock) } @@ -161,7 +162,7 @@ func TestBlockProcessor_PrepareBlockForDBEpochStartMeta(t *testing.T) { bp, _ := NewBlockProcessor(&mock.HasherMock{}, &mock.MarshalizerMock{}) - dbBlock, err := bp.PrepareBlockForDB(&dataBlock.MetaBlock{ + dbBlock, err := bp.PrepareBlockForDB([]byte("hash"), &dataBlock.MetaBlock{ TxCount: 1000, EpochStart: dataBlock.EpochStart{ LastFinalizedHeaders: []dataBlock.EpochStartShardData{{ @@ -209,7 +210,7 @@ func TestBlockProcessor_PrepareBlockForDBEpochStartMeta(t *testing.T) { Nonce: 0, Round: 0, Epoch: 0, - Hash: "a6d891a7692e19f97ad2993b7804708995d2d9deb008692d1e43084a79d04da5", + Hash: "68617368", MiniBlocksHashes: []string{}, NotarizedBlocksHashes: nil, Proposer: 0, diff --git a/process/elasticProcessor.go b/process/elasticProcessor.go index a2deb6d5..32b11106 100644 --- a/process/elasticProcessor.go +++ b/process/elasticProcessor.go @@ -247,6 +247,7 @@ func getTemplateByName(templateName string, templateList map[string]*bytes.Buffe // SaveHeader will prepare and save information about a header in elasticsearch server func (ei *elasticProcessor) SaveHeader( + headerHash []byte, header coreData.HeaderHandler, signersIndexes []uint64, body *block.Body, @@ -258,7 +259,7 @@ func (ei *elasticProcessor) SaveHeader( return nil } - elasticBlock, err := ei.blockProc.PrepareBlockForDB(header, signersIndexes, body, notarizedHeadersHashes, gasConsumptionData, txsSize) + elasticBlock, err := ei.blockProc.PrepareBlockForDB(headerHash, header, signersIndexes, body, notarizedHeadersHashes, gasConsumptionData, txsSize) if err != nil { return err } diff --git a/process/elasticProcessor_test.go b/process/elasticProcessor_test.go index e9eabc93..08ef93ec 100644 --- a/process/elasticProcessor_test.go +++ b/process/elasticProcessor_test.go @@ -362,7 +362,7 @@ func TestElasticseachDatabaseSaveHeader_RequestError(t *testing.T) { arguments.BlockProc, _ = block.NewBlockProcessor(&mock.HasherMock{}, &mock.MarshalizerMock{}) elasticDatabase := newElasticsearchProcessor(dbWriter, arguments) - err := elasticDatabase.SaveHeader(header, signerIndexes, &dataBlock.Body{}, nil, indexer.HeaderGasConsumption{}, 1) + err := elasticDatabase.SaveHeader([]byte("hh"), header, signerIndexes, &dataBlock.Body{}, nil, indexer.HeaderGasConsumption{}, 1) require.Equal(t, localErr, err) } @@ -403,7 +403,7 @@ func TestElasticseachDatabaseSaveHeader_CheckRequestBody(t *testing.T) { arguments.BlockProc, _ = block.NewBlockProcessor(&mock.HasherMock{}, &mock.MarshalizerMock{}) elasticDatabase := newElasticsearchProcessor(dbWriter, arguments) - err := elasticDatabase.SaveHeader(header, signerIndexes, blockBody, nil, indexer.HeaderGasConsumption{}, 1) + err := elasticDatabase.SaveHeader([]byte("hh"), header, signerIndexes, blockBody, nil, indexer.HeaderGasConsumption{}, 1) require.Nil(t, err) } @@ -630,7 +630,7 @@ func TestElasticProcessor_IndexEpochInfoData(t *testing.T) { body := &dataBlock.Body{} metaHeader := &dataBlock.MetaBlock{} - err = elasticSearchProc.SaveHeader(metaHeader, nil, body, nil, indexer.HeaderGasConsumption{}, 0) + err = elasticSearchProc.SaveHeader([]byte("hh"), metaHeader, nil, body, nil, indexer.HeaderGasConsumption{}, 0) require.Nil(t, err) require.True(t, called) } diff --git a/process/interface.go b/process/interface.go index 357334ff..a726d67a 100644 --- a/process/interface.go +++ b/process/interface.go @@ -46,6 +46,7 @@ type DBAccountHandler interface { // DBBlockHandler defines the actions that a block handler should do type DBBlockHandler interface { PrepareBlockForDB( + headerHash []byte, header coreData.HeaderHandler, signersIndexes []uint64, body *block.Body, diff --git a/workItems/interface.go b/workItems/interface.go index 1a817ca6..8aa081d8 100644 --- a/workItems/interface.go +++ b/workItems/interface.go @@ -15,6 +15,7 @@ type WorkItemHandler interface { type saveBlockIndexer interface { SaveHeader( + headerHash []byte, header coreData.HeaderHandler, signersIndexes []uint64, body *block.Body, diff --git a/workItems/workItemBlock.go b/workItems/workItemBlock.go index 46bd1474..61f0083e 100644 --- a/workItems/workItemBlock.go +++ b/workItems/workItemBlock.go @@ -65,6 +65,7 @@ func (wib *itemBlock) Save() error { txsSizeInBytes := ComputeSizeOfTxs(wib.marshalizer, wib.argsSaveBlock.TransactionsPool) err := wib.indexer.SaveHeader( + wib.argsSaveBlock.HeaderHash, wib.argsSaveBlock.Header, wib.argsSaveBlock.SignersIndexes, body, diff --git a/workItems/workItemBlock_test.go b/workItems/workItemBlock_test.go index f2fbf69a..fb43a163 100644 --- a/workItems/workItemBlock_test.go +++ b/workItems/workItemBlock_test.go @@ -54,7 +54,7 @@ func TestItemBlock_SaveHeaderShouldErr(t *testing.T) { localErr := errors.New("local err") itemBlock := workItems.NewItemBlock( &mock.ElasticProcessorStub{ - SaveHeaderCalled: func(header data.HeaderHandler, signersIndexes []uint64, body *dataBlock.Body, notarizedHeadersHashes []string, gasConsumptionData indexer.HeaderGasConsumption, txsSize int) error { + SaveHeaderCalled: func(headerHash []byte, header data.HeaderHandler, signersIndexes []uint64, body *dataBlock.Body, notarizedHeadersHashes []string, gasConsumptionData indexer.HeaderGasConsumption, txsSize int) error { return localErr }, }, @@ -75,7 +75,7 @@ func TestItemBlock_SaveNoMiniblocksShoulCallSaveHeader(t *testing.T) { countCalled := 0 itemBlock := workItems.NewItemBlock( &mock.ElasticProcessorStub{ - SaveHeaderCalled: func(header data.HeaderHandler, signersIndexes []uint64, body *dataBlock.Body, notarizedHeadersHashes []string, gasConsumptionData indexer.HeaderGasConsumption, txsSize int) error { + SaveHeaderCalled: func(headerHash []byte, header data.HeaderHandler, signersIndexes []uint64, body *dataBlock.Body, notarizedHeadersHashes []string, gasConsumptionData indexer.HeaderGasConsumption, txsSize int) error { countCalled++ return nil }, @@ -148,7 +148,7 @@ func TestItemBlock_SaveShouldWork(t *testing.T) { countCalled := 0 itemBlock := workItems.NewItemBlock( &mock.ElasticProcessorStub{ - SaveHeaderCalled: func(header data.HeaderHandler, signersIndexes []uint64, body *dataBlock.Body, notarizedHeadersHashes []string, gasConsumptionData indexer.HeaderGasConsumption, txsSize int) error { + SaveHeaderCalled: func(headerHash []byte, header data.HeaderHandler, signersIndexes []uint64, body *dataBlock.Body, notarizedHeadersHashes []string, gasConsumptionData indexer.HeaderGasConsumption, txsSize int) error { countCalled++ return nil }, From 59067cb07136b28c5f4085dc0654edce446e82b5 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Wed, 24 Aug 2022 12:08:55 +0300 Subject: [PATCH 53/69] change logic how logs are indexed --- process/logsevents/serialize.go | 25 +++++++++++++++++++++++-- process/logsevents/serialize_test.go | 4 ++-- 2 files changed, 25 insertions(+), 4 deletions(-) diff --git a/process/logsevents/serialize.go b/process/logsevents/serialize.go index f68dbf6a..37000829 100644 --- a/process/logsevents/serialize.go +++ b/process/logsevents/serialize.go @@ -14,13 +14,34 @@ import ( // SerializeLogs will serialize the provided logs in a way that Elastic Search expects a bulk request func (logsAndEventsProcessor) SerializeLogs(logs []*data.Logs, buffSlice *data.BufferSlice, index string) error { for _, lg := range logs { - meta := []byte(fmt.Sprintf(`{ "index" : {"_index":"%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(lg.ID), "\n")) + meta := []byte(fmt.Sprintf(`{ "update" : { "_index":"%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(lg.ID), "\n")) serializedData, errMarshal := json.Marshal(lg) if errMarshal != nil { return errMarshal } - err := buffSlice.PutData(meta, serializedData) + codeToExecute := ` + if ('create' == ctx.op) { + ctx._source = params.log + } else { + if (ctx._source.containsKey('timestamp')) { + if (ctx._source.timestamp <= params.account.timestamp) { + ctx._source = params.log + } + } else { + ctx._source = params.log + } + } +` + serializedDataStr := fmt.Sprintf(`{"scripted_upsert": true, "script": {`+ + `"source": "%s",`+ + `"lang": "painless",`+ + `"params": { "log": %s }},`+ + `"upsert": {}}`, + converters.FormatPainlessSource(codeToExecute), serializedData, + ) + + err := buffSlice.PutData(meta, []byte(serializedDataStr)) if err != nil { return err } diff --git a/process/logsevents/serialize_test.go b/process/logsevents/serialize_test.go index 851b2166..6028f38d 100644 --- a/process/logsevents/serialize_test.go +++ b/process/logsevents/serialize_test.go @@ -35,8 +35,8 @@ func TestLogsAndEventsProcessor_SerializeLogs(t *testing.T) { err := (&logsAndEventsProcessor{}).SerializeLogs(logs, buffSlice, "logs") require.Nil(t, err) - expectedRes := `{ "index" : {"_index":"logs", "_id" : "747848617368" } } -{"address":"61646472657373","events":[{"address":"61646472","identifier":"ESDTNFTTransfer","topics":["bXktdG9rZW4=","AQ==","cmVjZWl2ZXI="],"data":"ZGF0YQ==","order":0}],"timestamp":1234} + expectedRes := `{ "update" : { "_index":"logs", "_id" : "747848617368" } } +{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.log} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.log}} else {ctx._source = params.log}}","lang": "painless","params": { "log": {"address":"61646472657373","events":[{"address":"61646472","identifier":"ESDTNFTTransfer","topics":["bXktdG9rZW4=","AQ==","cmVjZWl2ZXI="],"data":"ZGF0YQ==","order":0}],"timestamp":1234} }},"upsert": {}} ` require.Equal(t, expectedRes, buffSlice.Buffers()[0].String()) } From d8a9de19df01d8dbb4fc2f950d58b08f337531b3 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Wed, 24 Aug 2022 12:50:18 +0300 Subject: [PATCH 54/69] fix integration tests --- process/logsevents/serialize.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/process/logsevents/serialize.go b/process/logsevents/serialize.go index 37000829..c01f0334 100644 --- a/process/logsevents/serialize.go +++ b/process/logsevents/serialize.go @@ -25,7 +25,7 @@ func (logsAndEventsProcessor) SerializeLogs(logs []*data.Logs, buffSlice *data.B ctx._source = params.log } else { if (ctx._source.containsKey('timestamp')) { - if (ctx._source.timestamp <= params.account.timestamp) { + if (ctx._source.timestamp <= params.log.timestamp) { ctx._source = params.log } } else { From 1bfa2dcfcb3fa4caad0a8e812678de83ff43323f Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Wed, 24 Aug 2022 13:46:33 +0300 Subject: [PATCH 55/69] integration tests index logs --- integrationtests/logsCrossShard_test.go | 139 ++++++++++++++++++ .../logsCrossShard/log-at-destination.json | 27 ++++ .../logsCrossShard/log-at-source.json | 17 +++ process/logsevents/serialize_test.go | 2 +- 4 files changed, 184 insertions(+), 1 deletion(-) create mode 100644 integrationtests/logsCrossShard_test.go create mode 100644 integrationtests/testdata/logsCrossShard/log-at-destination.json create mode 100644 integrationtests/testdata/logsCrossShard/log-at-source.json diff --git a/integrationtests/logsCrossShard_test.go b/integrationtests/logsCrossShard_test.go new file mode 100644 index 00000000..982c7fc5 --- /dev/null +++ b/integrationtests/logsCrossShard_test.go @@ -0,0 +1,139 @@ +package integrationtests + +import ( + "encoding/hex" + "math/big" + "testing" + + indexerdata "github.com/ElrondNetwork/elastic-indexer-go" + "github.com/ElrondNetwork/elastic-indexer-go/mock" + "github.com/ElrondNetwork/elrond-go-core/core" + coreData "github.com/ElrondNetwork/elrond-go-core/data" + dataBlock "github.com/ElrondNetwork/elrond-go-core/data/block" + "github.com/ElrondNetwork/elrond-go-core/data/indexer" + "github.com/ElrondNetwork/elrond-go-core/data/transaction" + "github.com/stretchr/testify/require" +) + +func TestIndexLogSourceShardAndAfterDestinationAndAgainSource(t *testing.T) { + setLogLevelDebug() + + esClient, err := createESClient(esURL) + require.Nil(t, err) + + accounts := &mock.AccountsStub{} + feeComputer := &mock.EconomicsHandlerMock{} + shardCoordinator := &mock.ShardCoordinatorMock{} + + esProc, err := CreateElasticProcessor(esClient, accounts, shardCoordinator, feeComputer) + require.Nil(t, err) + + header := &dataBlock.Header{ + Round: 50, + TimeStamp: 5040, + } + body := &dataBlock.Body{} + + // INDEX ON SOURCE + pool := &indexer.Pool{ + Logs: []*coreData.LogData{ + { + LogHandler: &transaction.Log{ + Address: []byte("addr-1"), + Events: []*transaction.Event{ + { + Address: []byte("addr"), + Identifier: []byte(core.BuiltInFunctionESDTTransfer), + Topics: [][]byte{[]byte("ESDT-abcd"), big.NewInt(0).Bytes(), big.NewInt(1).Bytes()}, + }, + nil, + }, + }, + TxHash: "h1", + }, + }, + } + err = esProc.SaveTransactions(body, header, pool) + require.Nil(t, err) + + ids := []string{hex.EncodeToString([]byte("h1"))} + genericResponse := &GenericResponse{} + err = esClient.DoMultiGet(ids, indexerdata.LogsIndex, true, genericResponse) + require.Nil(t, err) + require.JSONEq(t, + readExpectedResult("./testdata/logsCrossShard/log-at-source.json"), + string(genericResponse.Docs[0].Source), + ) + + // INDEX ON DESTINATION + header = &dataBlock.Header{ + Round: 50, + TimeStamp: 6040, + } + pool = &indexer.Pool{ + Logs: []*coreData.LogData{ + { + LogHandler: &transaction.Log{ + Address: []byte("addr-1"), + Events: []*transaction.Event{ + { + Address: []byte("addr"), + Identifier: []byte(core.BuiltInFunctionESDTTransfer), + Topics: [][]byte{[]byte("ESDT-abcd"), big.NewInt(0).Bytes(), big.NewInt(1).Bytes()}, + }, + { + + Address: []byte("addr-3"), + Identifier: []byte("do-something"), + Topics: [][]byte{[]byte("topic1"), []byte("topic2")}, + }, + nil, + }, + }, + TxHash: "h1", + }, + }, + } + err = esProc.SaveTransactions(body, header, pool) + require.Nil(t, err) + + err = esClient.DoMultiGet(ids, indexerdata.LogsIndex, true, genericResponse) + require.Nil(t, err) + require.JSONEq(t, + readExpectedResult("./testdata/logsCrossShard/log-at-destination.json"), + string(genericResponse.Docs[0].Source), + ) + + // INDEX ON SOURCE AGAIN SHOULD NOT CHANGE + header = &dataBlock.Header{ + Round: 50, + TimeStamp: 5000, + } + pool = &indexer.Pool{ + Logs: []*coreData.LogData{ + { + LogHandler: &transaction.Log{ + Address: []byte("addr-1"), + Events: []*transaction.Event{ + { + Address: []byte("addr"), + Identifier: []byte(core.BuiltInFunctionESDTTransfer), + Topics: [][]byte{[]byte("ESDT-abcd"), big.NewInt(0).Bytes(), big.NewInt(1).Bytes()}, + }, + nil, + }, + }, + TxHash: "h1", + }, + }, + } + err = esProc.SaveTransactions(body, header, pool) + require.Nil(t, err) + + err = esClient.DoMultiGet(ids, indexerdata.LogsIndex, true, genericResponse) + require.Nil(t, err) + require.JSONEq(t, + readExpectedResult("./testdata/logsCrossShard/log-at-destination.json"), + string(genericResponse.Docs[0].Source), + ) +} diff --git a/integrationtests/testdata/logsCrossShard/log-at-destination.json b/integrationtests/testdata/logsCrossShard/log-at-destination.json new file mode 100644 index 00000000..e748e1fd --- /dev/null +++ b/integrationtests/testdata/logsCrossShard/log-at-destination.json @@ -0,0 +1,27 @@ +{ + "address": "616464722d31", + "events": [ + { + "identifier": "ESDTTransfer", + "address": "61646472", + "data": null, + "topics": [ + "RVNEVC1hYmNk", + "", + "AQ==" + ], + "order": 0 + }, + { + "identifier": "do-something", + "address": "616464722d33", + "data": null, + "topics": [ + "dG9waWMx", + "dG9waWMy" + ], + "order": 1 + } + ], + "timestamp": 6040 +} diff --git a/integrationtests/testdata/logsCrossShard/log-at-source.json b/integrationtests/testdata/logsCrossShard/log-at-source.json new file mode 100644 index 00000000..8283acb8 --- /dev/null +++ b/integrationtests/testdata/logsCrossShard/log-at-source.json @@ -0,0 +1,17 @@ +{ + "address": "616464722d31", + "events": [ + { + "identifier": "ESDTTransfer", + "address": "61646472", + "data": null, + "topics": [ + "RVNEVC1hYmNk", + "", + "AQ==" + ], + "order": 0 + } + ], + "timestamp": 5040 +} diff --git a/process/logsevents/serialize_test.go b/process/logsevents/serialize_test.go index 6028f38d..b9d9e5cc 100644 --- a/process/logsevents/serialize_test.go +++ b/process/logsevents/serialize_test.go @@ -36,7 +36,7 @@ func TestLogsAndEventsProcessor_SerializeLogs(t *testing.T) { require.Nil(t, err) expectedRes := `{ "update" : { "_index":"logs", "_id" : "747848617368" } } -{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.log} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.log}} else {ctx._source = params.log}}","lang": "painless","params": { "log": {"address":"61646472657373","events":[{"address":"61646472","identifier":"ESDTNFTTransfer","topics":["bXktdG9rZW4=","AQ==","cmVjZWl2ZXI="],"data":"ZGF0YQ==","order":0}],"timestamp":1234} }},"upsert": {}} +{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.log} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.log.timestamp) {ctx._source = params.log}} else {ctx._source = params.log}}","lang": "painless","params": { "log": {"address":"61646472657373","events":[{"address":"61646472","identifier":"ESDTNFTTransfer","topics":["bXktdG9rZW4=","AQ==","cmVjZWl2ZXI="],"data":"ZGF0YQ==","order":0}],"timestamp":1234} }},"upsert": {}} ` require.Equal(t, expectedRes, buffSlice.Buffers()[0].String()) } From f0c615d54cfc232fe8d332afc9092c1348ebf3e0 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Wed, 24 Aug 2022 14:08:30 +0300 Subject: [PATCH 56/69] fix integration tests --- integrationtests/logsCrossShard_test.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/integrationtests/logsCrossShard_test.go b/integrationtests/logsCrossShard_test.go index 982c7fc5..968780a8 100644 --- a/integrationtests/logsCrossShard_test.go +++ b/integrationtests/logsCrossShard_test.go @@ -49,14 +49,14 @@ func TestIndexLogSourceShardAndAfterDestinationAndAgainSource(t *testing.T) { nil, }, }, - TxHash: "h1", + TxHash: "cross-log", }, }, } err = esProc.SaveTransactions(body, header, pool) require.Nil(t, err) - ids := []string{hex.EncodeToString([]byte("h1"))} + ids := []string{hex.EncodeToString([]byte("cross-log"))} genericResponse := &GenericResponse{} err = esClient.DoMultiGet(ids, indexerdata.LogsIndex, true, genericResponse) require.Nil(t, err) @@ -90,7 +90,7 @@ func TestIndexLogSourceShardAndAfterDestinationAndAgainSource(t *testing.T) { nil, }, }, - TxHash: "h1", + TxHash: "cross-log", }, }, } @@ -123,7 +123,7 @@ func TestIndexLogSourceShardAndAfterDestinationAndAgainSource(t *testing.T) { nil, }, }, - TxHash: "h1", + TxHash: "cross-log", }, }, } From 94bfd2754b437a0a179b53a33878a4fa374bd931 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Wed, 24 Aug 2022 14:16:03 +0300 Subject: [PATCH 57/69] fixes after first review --- client/elasticClient.go | 5 ++++- converters/json.go | 4 ++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/client/elasticClient.go b/client/elasticClient.go index b725a6a1..76b2d60a 100644 --- a/client/elasticClient.go +++ b/client/elasticClient.go @@ -15,7 +15,10 @@ import ( "github.com/elastic/go-elasticsearch/v7/esapi" ) +// TODO add more unit tests + const ( + esConflictsPolicy = "proceed" errPolicyAlreadyExists = "document already exists" ) @@ -160,7 +163,7 @@ func (ec *elasticClient) DoQueryRemove(index string, body *bytes.Buffer) error { []string{index}, body, ec.client.DeleteByQuery.WithIgnoreUnavailable(true), - ec.client.DeleteByQuery.WithConflicts("proceed"), + ec.client.DeleteByQuery.WithConflicts(esConflictsPolicy), ) if err != nil { diff --git a/converters/json.go b/converters/json.go index a6c7cbbc..d380b2e2 100644 --- a/converters/json.go +++ b/converters/json.go @@ -33,9 +33,9 @@ func PrepareHashesForQueryRemove(hashes []string) *bytes.Buffer { hashes = []string{} } - hashesM, _ := json.Marshal(hashes) + serializedHashes, _ := json.Marshal(hashes) query := `{"query": {"ids": {"values": %s}}}` - deleteQuery := fmt.Sprintf(query, hashesM) + deleteQuery := fmt.Sprintf(query, serializedHashes) return bytes.NewBuffer([]byte(deleteQuery)) } From e61484018dfae3904c8025fb106c827732338b8b Mon Sep 17 00:00:00 2001 From: Iuga Mihai <50499646+miiu96@users.noreply.github.com> Date: Wed, 24 Aug 2022 16:09:04 +0300 Subject: [PATCH 58/69] Update process/accounts/accountsProcessor.go Co-authored-by: Sorin Stanculeanu <34831323+sstanculeanu@users.noreply.github.com> --- process/accounts/accountsProcessor.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/process/accounts/accountsProcessor.go b/process/accounts/accountsProcessor.go index eb80e814..00ca7336 100644 --- a/process/accounts/accountsProcessor.go +++ b/process/accounts/accountsProcessor.go @@ -20,7 +20,7 @@ import ( var log = logger.GetOrCreate("indexer/process/accounts") -// accountsProcessor is structure responsible for processing accounts +// accountsProcessor is a structure responsible for processing accounts type accountsProcessor struct { internalMarshalizer marshal.Marshalizer addressPubkeyConverter core.PubkeyConverter From 5b666c0fd6cd96d6b4629199c06332853f3701d5 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Thu, 25 Aug 2022 13:07:36 +0300 Subject: [PATCH 59/69] initial paid fee in transaction structure --- data/transaction.go | 1 + process/transactions/transactionDBBuilder.go | 2 ++ 2 files changed, 3 insertions(+) diff --git a/data/transaction.go b/data/transaction.go index e26b7fdd..80873e35 100644 --- a/data/transaction.go +++ b/data/transaction.go @@ -21,6 +21,7 @@ type Transaction struct { GasLimit uint64 `json:"gasLimit"` GasUsed uint64 `json:"gasUsed"` Fee string `json:"fee"` + InitialPaidFee string `json:"initialPaidFee,omitempty"` Data []byte `json:"data"` Signature string `json:"signature"` Timestamp time.Duration `json:"timestamp"` diff --git a/process/transactions/transactionDBBuilder.go b/process/transactions/transactionDBBuilder.go index 848d33c4..1188212d 100644 --- a/process/transactions/transactionDBBuilder.go +++ b/process/transactions/transactionDBBuilder.go @@ -49,6 +49,7 @@ func (dtb *dbTransactionBuilder) prepareTransaction( ) *data.Transaction { gasUsed := dtb.txFeeCalculator.ComputeGasLimit(tx) fee := dtb.txFeeCalculator.ComputeTxFeeBasedOnGasUsed(tx, gasUsed) + initialPaidFee := dtb.txFeeCalculator.ComputeTxFeeBasedOnGasUsed(tx, tx.GasLimit) isScCall := core.IsSmartContractAddress(tx.RcvAddr) res := dtb.dataFieldParser.Parse(tx.Data, tx.SndAddr, tx.RcvAddr) @@ -70,6 +71,7 @@ func (dtb *dbTransactionBuilder) prepareTransaction( Timestamp: time.Duration(header.GetTimeStamp()), Status: txStatus, GasUsed: gasUsed, + InitialPaidFee: initialPaidFee.String(), Fee: fee.String(), ReceiverUserName: tx.RcvUserName, SenderUserName: tx.SndUserName, From 52b2a95af65462be027689c9119dc6f3b2cbc030 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Thu, 25 Aug 2022 13:07:51 +0300 Subject: [PATCH 60/69] fix unit and integration tests --- integrationtests/esdtTransfer_test.go | 2 +- integrationtests/nftTransferCrossWithScCall_test.go | 2 +- integrationtests/relayedTx_test.go | 6 +++--- integrationtests/scCallIntraShard_test.go | 4 ++-- .../testdata/claimRewards/tx-claim-rewards.json | 1 + .../op-nft-transfer-sc-call-after-refund.json | 1 + .../nftTransferCrossShard/tx-complete-with-status.json | 1 + .../tx-nft-transfer-failed-on-dst.json | 1 + .../tx-nft-transfer-sc-call-after-refund.json | 1 + .../tx-nft-transfer-sc-call-source.json | 1 + .../testdata/nftTransferCrossShard/tx-nft-transfer.json | 1 + integrationtests/transactions_test.go | 2 +- process/transactions/transactionDBBuilder_test.go | 1 + 13 files changed, 16 insertions(+), 8 deletions(-) diff --git a/integrationtests/esdtTransfer_test.go b/integrationtests/esdtTransfer_test.go index 18617b27..7161802f 100644 --- a/integrationtests/esdtTransfer_test.go +++ b/integrationtests/esdtTransfer_test.go @@ -18,7 +18,7 @@ import ( ) const ( - expectedESDTTransferTX = `{"miniBlockHash":"1ecea6dff9ab9a785a2d55720e88c1bbd7d9c56310a035d16163e373879cd0e1","nonce":6,"round":50,"value":"0","receiver":"657264313375377a79656b7a7664767a656b38373638723567617539703636373775667070736a756b6c7539653674377978377268673473363865327a65","sender":"65726431656636343730746a64746c67706139663667336165346e7365646d6a6730677636773733763332787476686b6666663939336871373530786c39","receiverShard":0,"senderShard":0,"gasPrice":1000000000,"gasLimit":104011,"gasUsed":104011,"fee":"104000110000000","data":"RVNEVFRyYW5zZmVyQDU0NDc0ZTJkMzgzODYyMzgzMzY2QDBh","signature":"","timestamp":5040,"status":"success","searchOrder":0,"hasScResults":true,"tokens":["TGN-88b83f"],"esdtValues":["10"],"operation":"ESDTTransfer"}` + expectedESDTTransferTX = `{ "initialPaidFee":"104000110000000","miniBlockHash":"1ecea6dff9ab9a785a2d55720e88c1bbd7d9c56310a035d16163e373879cd0e1","nonce":6,"round":50,"value":"0","receiver":"657264313375377a79656b7a7664767a656b38373638723567617539703636373775667070736a756b6c7539653674377978377268673473363865327a65","sender":"65726431656636343730746a64746c67706139663667336165346e7365646d6a6730677636773733763332787476686b6666663939336871373530786c39","receiverShard":0,"senderShard":0,"gasPrice":1000000000,"gasLimit":104011,"gasUsed":104011,"fee":"104000110000000","data":"RVNEVFRyYW5zZmVyQDU0NDc0ZTJkMzgzODYyMzgzMzY2QDBh","signature":"","timestamp":5040,"status":"success","searchOrder":0,"hasScResults":true,"tokens":["TGN-88b83f"],"esdtValues":["10"],"operation":"ESDTTransfer"}` ) func TestESDTTransferTooMuchGasProvided(t *testing.T) { diff --git a/integrationtests/nftTransferCrossWithScCall_test.go b/integrationtests/nftTransferCrossWithScCall_test.go index f7776a17..15164998 100644 --- a/integrationtests/nftTransferCrossWithScCall_test.go +++ b/integrationtests/nftTransferCrossWithScCall_test.go @@ -18,7 +18,7 @@ import ( ) const ( - expectedCrossShardTransferWithSCCall = `{"miniBlockHash":"99a07aab4f6722a1473b33bd7bb35e339c69339c400737b14a94ad8bceaa1734","nonce":79,"round":50,"value":"0","receiver":"65726431757265376561323437636c6a3679716a673830756e7a36787a6a686c6a327a776d3467746736737564636d747364326377337873373468617376","sender":"65726431757265376561323437636c6a3679716a673830756e7a36787a6a686c6a327a776d3467746736737564636d747364326377337873373468617376","receiverShard":0,"senderShard":0,"gasPrice":1000000000,"gasLimit":5000000,"gasUsed":5000000,"fee":"595490000000000","data":"RVNEVE5GVFRyYW5zZmVyQDRkNDU1ODQ2NDE1MjRkMmQ2MzYzNjIzMjM1MzJAMDc4YkAwMzQ3NTQzZTViNTljOWJlODY3MEAwODAxMTIwYjAwMDM0NzU0M2U1YjU5YzliZTg2NzAyMjY2MDg4YjBmMWEyMDAwMDAwMDAwMDAwMDAwMDAwNTAwNTc1NGU0ZjZiYTBiOTRlZmQ3MWEwZTRkZDQ4MTRlZTI0ZTVmNzUyOTdjZWIzMjAwM2EzZDAwMDAwMDA3MDFiNjQwODYzNjU4N2MwMDAwMDAwMDAwMDAwNDEwMDAwMDAwMDAwMDAwMDQxMDAxMDAwMDAwMDAwYTAzNDc1NDNlNWI1OWM5YmU4NjcwMDAwMDAwMDAwMDAwMDAwYTAzNDc1NDNlNWI1OWM5YmU4NjcwQDYzNmM2MTY5NmQ1MjY1Nzc2MTcyNjQ3Mw==","signature":"","timestamp":5040,"status":"success","searchOrder":0,"hasScResults":true,"tokens":["MEXFARM-ccb252-078b"],"esdtValues":["15482888667631250736752"],"receivers":["0801120b000347543e5b59c9be86702266088b0f1a20000000000000000005005754e4f6ba0b94efd71a0e4dd4814ee24e5f75297ceb32003a3d0000000701b6408636587c0000000000000410000000000000041001000000000a0347543e5b59c9be8670000000000000000a0347543e5b59c9be8670"],"receiversShardIDs":[0],"operation":"ESDTNFTTransfer"}` + expectedCrossShardTransferWithSCCall = `{ "initialPaidFee":"595490000000000","miniBlockHash":"99a07aab4f6722a1473b33bd7bb35e339c69339c400737b14a94ad8bceaa1734","nonce":79,"round":50,"value":"0","receiver":"65726431757265376561323437636c6a3679716a673830756e7a36787a6a686c6a327a776d3467746736737564636d747364326377337873373468617376","sender":"65726431757265376561323437636c6a3679716a673830756e7a36787a6a686c6a327a776d3467746736737564636d747364326377337873373468617376","receiverShard":0,"senderShard":0,"gasPrice":1000000000,"gasLimit":5000000,"gasUsed":5000000,"fee":"595490000000000","data":"RVNEVE5GVFRyYW5zZmVyQDRkNDU1ODQ2NDE1MjRkMmQ2MzYzNjIzMjM1MzJAMDc4YkAwMzQ3NTQzZTViNTljOWJlODY3MEAwODAxMTIwYjAwMDM0NzU0M2U1YjU5YzliZTg2NzAyMjY2MDg4YjBmMWEyMDAwMDAwMDAwMDAwMDAwMDAwNTAwNTc1NGU0ZjZiYTBiOTRlZmQ3MWEwZTRkZDQ4MTRlZTI0ZTVmNzUyOTdjZWIzMjAwM2EzZDAwMDAwMDA3MDFiNjQwODYzNjU4N2MwMDAwMDAwMDAwMDAwNDEwMDAwMDAwMDAwMDAwMDQxMDAxMDAwMDAwMDAwYTAzNDc1NDNlNWI1OWM5YmU4NjcwMDAwMDAwMDAwMDAwMDAwYTAzNDc1NDNlNWI1OWM5YmU4NjcwQDYzNmM2MTY5NmQ1MjY1Nzc2MTcyNjQ3Mw==","signature":"","timestamp":5040,"status":"success","searchOrder":0,"hasScResults":true,"tokens":["MEXFARM-ccb252-078b"],"esdtValues":["15482888667631250736752"],"receivers":["0801120b000347543e5b59c9be86702266088b0f1a20000000000000000005005754e4f6ba0b94efd71a0e4dd4814ee24e5f75297ceb32003a3d0000000701b6408636587c0000000000000410000000000000041001000000000a0347543e5b59c9be8670000000000000000a0347543e5b59c9be8670"],"receiversShardIDs":[0],"operation":"ESDTNFTTransfer"}` ) func TestNFTTransferCrossShardWithScCall(t *testing.T) { diff --git a/integrationtests/relayedTx_test.go b/integrationtests/relayedTx_test.go index b32b94cc..7ee520b0 100644 --- a/integrationtests/relayedTx_test.go +++ b/integrationtests/relayedTx_test.go @@ -18,10 +18,10 @@ import ( ) const ( - expectedRelayedTxSource = `{"miniBlockHash":"fed7c174a849c30b88c36a26453407f1b95970941d0872e603e641c5c804104a","nonce":1196667,"round":50,"value":"0","receiver":"6572643134657961796672766c72687a66727767357a776c65756132356d6b7a676e6367676e33356e766336786876357978776d6c326573306633646874","sender":"657264316b376a3665776a736c61347a73677638763666366665336476726b677633643064396a6572637a773435687a6564687965643873683275333475","receiverShard":0,"senderShard":0,"gasPrice":1000000000,"gasLimit":16610000,"gasUsed":16610000,"fee":"1760000000000000","data":"cmVsYXllZFR4QDdiMjI2ZTZmNmU2MzY1MjIzYTMyMmMyMjc2NjE2Yzc1NjUyMjNhMzAyYzIyNzI2NTYzNjU2OTc2NjU3MjIyM2EyMjQxNDE0MTQxNDE0MTQxNDE0MTQxNDE0NjQxNDk3NDY3MzczODM1MmY3MzZjNzM1NTQxNDg2ODZiNTczMzQ1Njk2MjRjNmU0NzUyNGI3NjQ5NmY0ZTRkM2QyMjJjMjI3MzY1NmU2NDY1NzIyMjNhMjI3MjZiNmU1MzRhNDc3YTM0Mzc2OTUzNGU3OTRiNDM2NDJmNTA0ZjcxNzA3NTc3NmI1NDc3Njg0NTM0MzA2ZDdhNDc2YTU4NWE1MTY4NmU2MjJiNzI0ZDNkMjIyYzIyNjc2MTczNTA3MjY5NjM2NTIyM2EzMTMwMzAzMDMwMzAzMDMwMzAzMDJjMjI2NzYxNzM0YzY5NmQ2OTc0MjIzYTMxMzUzMDMwMzAzMDMwMzAyYzIyNjQ2MTc0NjEyMjNhMjI2MzMyNDYzMjVhNTU0NjMwNjQ0NzU2N2E2NDQ3NDYzMDYxNTczOTc1NTE0NDQ2Njg1OTdhNDkzMTRkNmE1OTM1NTk2ZDUxMzM1YTQ0NDk3NzU5MzI0YTY5NTk1NDRkMzE1OTZkNTY2YzRmNDQ1OTMxNGQ0NDY0Njg0ZjU3NGU2YTRlN2E2NzdhNWE0NzU1Nzc0ZjQ0NWE2OTRlNDQ0NTMzNGU1NDZiMzQ1YTU0NTE3YTU5NTQ0ZTZiNWE2YTU2NmE1OTMyNDU3OTVhNTQ2ODY4NGQ2YTZjNDE0ZDZhNTEzNDRlNTQ2NzdhNGQ1NzRlNmQ0ZDU0NDUzMDRkNTQ1NjZkNTk2YTQxMzU0ZDZhNjM3NzRlNDQ1MTMyNGU1NzU1MzI0ZTdhNTk3YTU5NTc0ZDMxNGY0NDQ1MzQ1YTU0NjczMTRlNDc1MTM0NTk1NzUyNmQ0ZTU0NDE3YTU5NmE2MzM1NGQ2YTZjNmI0ZjU0NTI2YzRlNmQ0OTc5NGU2YTQ5Nzc1YTY3M2QzZDIyMmMyMjYzNjg2MTY5NmU0OTQ0MjIzYTIyNGQ1MTNkM2QyMjJjMjI3NjY1NzI3MzY5NmY2ZTIyM2EzMTJjMjI3MzY5Njc2ZTYxNzQ3NTcyNjUyMjNhMjI1MjM5NDYyYjM0NTQ2MzUyNDE1YTM4NmQ3NzcxMzI0NTU5MzAzMTYzNTk2YzMzNzY2MjcxNmM0NjY1NzE3NjM4N2E3NjQ3NGE3NzVhNjgzMzU5NGQ0ZjU1NmI0MjM0NjQzNDUxNTc0ZTY2Mzc2NzQ0NjI2YzQ4NDgzMjU3NmI3MTYxNGE3NjYxNDg0NTc0NDM1NjYxNzA0OTcxMzM2NTM1NjU2MjM4NGU0MTc3M2QzZDIyN2Q=","signature":"","timestamp":5040,"status":"success","searchOrder":0,"hasScResults":true,"receivers":["000000000000000005008b60efce7fb25b140078645b71226cb9c644abc8a0d3"],"receiversShardIDs":[0],"operation":"transfer","function":"saveAttestation","isRelayed":true}` - expectedRelayedTxAfterRefund = `{"miniBlockHash":"fed7c174a849c30b88c36a26453407f1b95970941d0872e603e641c5c804104a","nonce":1196667,"round":50,"value":"0","receiver":"6572643134657961796672766c72687a66727767357a776c65756132356d6b7a676e6367676e33356e766336786876357978776d6c326573306633646874","sender":"657264316b376a3665776a736c61347a73677638763666366665336476726b677633643064396a6572637a773435687a6564687965643873683275333475","receiverShard":0,"senderShard":0,"gasPrice":1000000000,"gasLimit":16610000,"gasUsed":7982817,"fee":"1673728170000000","data":"cmVsYXllZFR4QDdiMjI2ZTZmNmU2MzY1MjIzYTMyMmMyMjc2NjE2Yzc1NjUyMjNhMzAyYzIyNzI2NTYzNjU2OTc2NjU3MjIyM2EyMjQxNDE0MTQxNDE0MTQxNDE0MTQxNDE0NjQxNDk3NDY3MzczODM1MmY3MzZjNzM1NTQxNDg2ODZiNTczMzQ1Njk2MjRjNmU0NzUyNGI3NjQ5NmY0ZTRkM2QyMjJjMjI3MzY1NmU2NDY1NzIyMjNhMjI3MjZiNmU1MzRhNDc3YTM0Mzc2OTUzNGU3OTRiNDM2NDJmNTA0ZjcxNzA3NTc3NmI1NDc3Njg0NTM0MzA2ZDdhNDc2YTU4NWE1MTY4NmU2MjJiNzI0ZDNkMjIyYzIyNjc2MTczNTA3MjY5NjM2NTIyM2EzMTMwMzAzMDMwMzAzMDMwMzAzMDJjMjI2NzYxNzM0YzY5NmQ2OTc0MjIzYTMxMzUzMDMwMzAzMDMwMzAyYzIyNjQ2MTc0NjEyMjNhMjI2MzMyNDYzMjVhNTU0NjMwNjQ0NzU2N2E2NDQ3NDYzMDYxNTczOTc1NTE0NDQ2Njg1OTdhNDkzMTRkNmE1OTM1NTk2ZDUxMzM1YTQ0NDk3NzU5MzI0YTY5NTk1NDRkMzE1OTZkNTY2YzRmNDQ1OTMxNGQ0NDY0Njg0ZjU3NGU2YTRlN2E2NzdhNWE0NzU1Nzc0ZjQ0NWE2OTRlNDQ0NTMzNGU1NDZiMzQ1YTU0NTE3YTU5NTQ0ZTZiNWE2YTU2NmE1OTMyNDU3OTVhNTQ2ODY4NGQ2YTZjNDE0ZDZhNTEzNDRlNTQ2NzdhNGQ1NzRlNmQ0ZDU0NDUzMDRkNTQ1NjZkNTk2YTQxMzU0ZDZhNjM3NzRlNDQ1MTMyNGU1NzU1MzI0ZTdhNTk3YTU5NTc0ZDMxNGY0NDQ1MzQ1YTU0NjczMTRlNDc1MTM0NTk1NzUyNmQ0ZTU0NDE3YTU5NmE2MzM1NGQ2YTZjNmI0ZjU0NTI2YzRlNmQ0OTc5NGU2YTQ5Nzc1YTY3M2QzZDIyMmMyMjYzNjg2MTY5NmU0OTQ0MjIzYTIyNGQ1MTNkM2QyMjJjMjI3NjY1NzI3MzY5NmY2ZTIyM2EzMTJjMjI3MzY5Njc2ZTYxNzQ3NTcyNjUyMjNhMjI1MjM5NDYyYjM0NTQ2MzUyNDE1YTM4NmQ3NzcxMzI0NTU5MzAzMTYzNTk2YzMzNzY2MjcxNmM0NjY1NzE3NjM4N2E3NjQ3NGE3NzVhNjgzMzU5NGQ0ZjU1NmI0MjM0NjQzNDUxNTc0ZTY2Mzc2NzQ0NjI2YzQ4NDgzMjU3NmI3MTYxNGE3NjYxNDg0NTc0NDM1NjYxNzA0OTcxMzM2NTM1NjU2MjM4NGU0MTc3M2QzZDIyN2Q=","signature":"","timestamp":5040,"status":"success","searchOrder":0,"hasScResults":true,"receivers":["000000000000000005008b60efce7fb25b140078645b71226cb9c644abc8a0d3"],"receiversShardIDs":[0],"operation":"transfer","function":"saveAttestation","isRelayed":true}` + expectedRelayedTxSource = `{"initialPaidFee":"1760000000000000","miniBlockHash":"fed7c174a849c30b88c36a26453407f1b95970941d0872e603e641c5c804104a","nonce":1196667,"round":50,"value":"0","receiver":"6572643134657961796672766c72687a66727767357a776c65756132356d6b7a676e6367676e33356e766336786876357978776d6c326573306633646874","sender":"657264316b376a3665776a736c61347a73677638763666366665336476726b677633643064396a6572637a773435687a6564687965643873683275333475","receiverShard":0,"senderShard":0,"gasPrice":1000000000,"gasLimit":16610000,"gasUsed":16610000,"fee":"1760000000000000","data":"cmVsYXllZFR4QDdiMjI2ZTZmNmU2MzY1MjIzYTMyMmMyMjc2NjE2Yzc1NjUyMjNhMzAyYzIyNzI2NTYzNjU2OTc2NjU3MjIyM2EyMjQxNDE0MTQxNDE0MTQxNDE0MTQxNDE0NjQxNDk3NDY3MzczODM1MmY3MzZjNzM1NTQxNDg2ODZiNTczMzQ1Njk2MjRjNmU0NzUyNGI3NjQ5NmY0ZTRkM2QyMjJjMjI3MzY1NmU2NDY1NzIyMjNhMjI3MjZiNmU1MzRhNDc3YTM0Mzc2OTUzNGU3OTRiNDM2NDJmNTA0ZjcxNzA3NTc3NmI1NDc3Njg0NTM0MzA2ZDdhNDc2YTU4NWE1MTY4NmU2MjJiNzI0ZDNkMjIyYzIyNjc2MTczNTA3MjY5NjM2NTIyM2EzMTMwMzAzMDMwMzAzMDMwMzAzMDJjMjI2NzYxNzM0YzY5NmQ2OTc0MjIzYTMxMzUzMDMwMzAzMDMwMzAyYzIyNjQ2MTc0NjEyMjNhMjI2MzMyNDYzMjVhNTU0NjMwNjQ0NzU2N2E2NDQ3NDYzMDYxNTczOTc1NTE0NDQ2Njg1OTdhNDkzMTRkNmE1OTM1NTk2ZDUxMzM1YTQ0NDk3NzU5MzI0YTY5NTk1NDRkMzE1OTZkNTY2YzRmNDQ1OTMxNGQ0NDY0Njg0ZjU3NGU2YTRlN2E2NzdhNWE0NzU1Nzc0ZjQ0NWE2OTRlNDQ0NTMzNGU1NDZiMzQ1YTU0NTE3YTU5NTQ0ZTZiNWE2YTU2NmE1OTMyNDU3OTVhNTQ2ODY4NGQ2YTZjNDE0ZDZhNTEzNDRlNTQ2NzdhNGQ1NzRlNmQ0ZDU0NDUzMDRkNTQ1NjZkNTk2YTQxMzU0ZDZhNjM3NzRlNDQ1MTMyNGU1NzU1MzI0ZTdhNTk3YTU5NTc0ZDMxNGY0NDQ1MzQ1YTU0NjczMTRlNDc1MTM0NTk1NzUyNmQ0ZTU0NDE3YTU5NmE2MzM1NGQ2YTZjNmI0ZjU0NTI2YzRlNmQ0OTc5NGU2YTQ5Nzc1YTY3M2QzZDIyMmMyMjYzNjg2MTY5NmU0OTQ0MjIzYTIyNGQ1MTNkM2QyMjJjMjI3NjY1NzI3MzY5NmY2ZTIyM2EzMTJjMjI3MzY5Njc2ZTYxNzQ3NTcyNjUyMjNhMjI1MjM5NDYyYjM0NTQ2MzUyNDE1YTM4NmQ3NzcxMzI0NTU5MzAzMTYzNTk2YzMzNzY2MjcxNmM0NjY1NzE3NjM4N2E3NjQ3NGE3NzVhNjgzMzU5NGQ0ZjU1NmI0MjM0NjQzNDUxNTc0ZTY2Mzc2NzQ0NjI2YzQ4NDgzMjU3NmI3MTYxNGE3NjYxNDg0NTc0NDM1NjYxNzA0OTcxMzM2NTM1NjU2MjM4NGU0MTc3M2QzZDIyN2Q=","signature":"","timestamp":5040,"status":"success","searchOrder":0,"hasScResults":true,"receivers":["000000000000000005008b60efce7fb25b140078645b71226cb9c644abc8a0d3"],"receiversShardIDs":[0],"operation":"transfer","function":"saveAttestation","isRelayed":true}` + expectedRelayedTxAfterRefund = `{"initialPaidFee":"1760000000000000","miniBlockHash":"fed7c174a849c30b88c36a26453407f1b95970941d0872e603e641c5c804104a","nonce":1196667,"round":50,"value":"0","receiver":"6572643134657961796672766c72687a66727767357a776c65756132356d6b7a676e6367676e33356e766336786876357978776d6c326573306633646874","sender":"657264316b376a3665776a736c61347a73677638763666366665336476726b677633643064396a6572637a773435687a6564687965643873683275333475","receiverShard":0,"senderShard":0,"gasPrice":1000000000,"gasLimit":16610000,"gasUsed":7982817,"fee":"1673728170000000","data":"cmVsYXllZFR4QDdiMjI2ZTZmNmU2MzY1MjIzYTMyMmMyMjc2NjE2Yzc1NjUyMjNhMzAyYzIyNzI2NTYzNjU2OTc2NjU3MjIyM2EyMjQxNDE0MTQxNDE0MTQxNDE0MTQxNDE0NjQxNDk3NDY3MzczODM1MmY3MzZjNzM1NTQxNDg2ODZiNTczMzQ1Njk2MjRjNmU0NzUyNGI3NjQ5NmY0ZTRkM2QyMjJjMjI3MzY1NmU2NDY1NzIyMjNhMjI3MjZiNmU1MzRhNDc3YTM0Mzc2OTUzNGU3OTRiNDM2NDJmNTA0ZjcxNzA3NTc3NmI1NDc3Njg0NTM0MzA2ZDdhNDc2YTU4NWE1MTY4NmU2MjJiNzI0ZDNkMjIyYzIyNjc2MTczNTA3MjY5NjM2NTIyM2EzMTMwMzAzMDMwMzAzMDMwMzAzMDJjMjI2NzYxNzM0YzY5NmQ2OTc0MjIzYTMxMzUzMDMwMzAzMDMwMzAyYzIyNjQ2MTc0NjEyMjNhMjI2MzMyNDYzMjVhNTU0NjMwNjQ0NzU2N2E2NDQ3NDYzMDYxNTczOTc1NTE0NDQ2Njg1OTdhNDkzMTRkNmE1OTM1NTk2ZDUxMzM1YTQ0NDk3NzU5MzI0YTY5NTk1NDRkMzE1OTZkNTY2YzRmNDQ1OTMxNGQ0NDY0Njg0ZjU3NGU2YTRlN2E2NzdhNWE0NzU1Nzc0ZjQ0NWE2OTRlNDQ0NTMzNGU1NDZiMzQ1YTU0NTE3YTU5NTQ0ZTZiNWE2YTU2NmE1OTMyNDU3OTVhNTQ2ODY4NGQ2YTZjNDE0ZDZhNTEzNDRlNTQ2NzdhNGQ1NzRlNmQ0ZDU0NDUzMDRkNTQ1NjZkNTk2YTQxMzU0ZDZhNjM3NzRlNDQ1MTMyNGU1NzU1MzI0ZTdhNTk3YTU5NTc0ZDMxNGY0NDQ1MzQ1YTU0NjczMTRlNDc1MTM0NTk1NzUyNmQ0ZTU0NDE3YTU5NmE2MzM1NGQ2YTZjNmI0ZjU0NTI2YzRlNmQ0OTc5NGU2YTQ5Nzc1YTY3M2QzZDIyMmMyMjYzNjg2MTY5NmU0OTQ0MjIzYTIyNGQ1MTNkM2QyMjJjMjI3NjY1NzI3MzY5NmY2ZTIyM2EzMTJjMjI3MzY5Njc2ZTYxNzQ3NTcyNjUyMjNhMjI1MjM5NDYyYjM0NTQ2MzUyNDE1YTM4NmQ3NzcxMzI0NTU5MzAzMTYzNTk2YzMzNzY2MjcxNmM0NjY1NzE3NjM4N2E3NjQ3NGE3NzVhNjgzMzU5NGQ0ZjU1NmI0MjM0NjQzNDUxNTc0ZTY2Mzc2NzQ0NjI2YzQ4NDgzMjU3NmI3MTYxNGE3NjYxNDg0NTc0NDM1NjYxNzA0OTcxMzM2NTM1NjU2MjM4NGU0MTc3M2QzZDIyN2Q=","signature":"","timestamp":5040,"status":"success","searchOrder":0,"hasScResults":true,"receivers":["000000000000000005008b60efce7fb25b140078645b71226cb9c644abc8a0d3"],"receiversShardIDs":[0],"operation":"transfer","function":"saveAttestation","isRelayed":true}` - expectedRelayedTxIntra = `{"miniBlockHash":"2709174224d13e49fd76a70b48bd3db7838ca715bcfe09be59cef043241d7ef3","nonce":1196665,"round":50,"value":"0","receiver":"6572643134657961796672766c72687a66727767357a776c65756132356d6b7a676e6367676e33356e766336786876357978776d6c326573306633646874","sender":"657264316b376a3665776a736c61347a73677638763666366665336476726b677633643064396a6572637a773435687a6564687965643873683275333475","receiverShard":0,"senderShard":0,"gasPrice":1000000000,"gasLimit":15406000,"gasUsed":10556000,"fee":"2257820000000000","data":"cmVsYXllZFR4QDdiMjI2ZTZmNmU2MzY1MjIzYTMwMmMyMjc2NjE2Yzc1NjUyMjNhMzAyYzIyNzI2NTYzNjU2OTc2NjU3MjIyM2EyMjcyNmI2ZTUzNGE0NzdhMzQzNzY5NTM0ZTc5NGI0MzY0MmY1MDRmNzE3MDc1Nzc2YjU0Nzc2ODQ1MzQzMDZkN2E0NzZhNTg1YTUxNjg2ZTYyMmI3MjRkM2QyMjJjMjI3MzY1NmU2NDY1NzIyMjNhMjI3MjZiNmU1MzRhNDc3YTM0Mzc2OTUzNGU3OTRiNDM2NDJmNTA0ZjcxNzA3NTc3NmI1NDc3Njg0NTM0MzA2ZDdhNDc2YTU4NWE1MTY4NmU2MjJiNzI0ZDNkMjIyYzIyNjc2MTczNTA3MjY5NjM2NTIyM2EzMTMwMzAzMDMwMzAzMDMwMzAzMDJjMjI2NzYxNzM0YzY5NmQ2OTc0MjIzYTMxMzMzMjMzMzIzMDMwMzAyYzIyNjQ2MTc0NjEyMjNhMjI1NTMyNDYzMjVhNTU3NDZjNjU1NjVhNjg2MjQ4NTY2YzUxNDQ1OTc5NGU2YjU1MzI0ZDZiNDEzMjRkNmE1YTQ2NGU2YTQ5N2E0ZDU0NGQzNTRlNmE1NTMyNGQ1NDRkMzI0ZTZiNGQzMjUxNTQ2Mzc4NGQ3YTZiMzI1MTdhNjMzMDRlNmE1NTMzNGU0NDYzMzA0ZTdhNjczMjRlNTQ0ZDMzNGU3YTUxN2E0ZTdhNTkzMzRlNmI1NTdhNGQ0NDYzNzc0ZDdhNDk3YTRmNTQ2NDQyNGU3YTU5MzM0ZTU0NWE0NDRlNmE0NTMzNGU1NDU5MzI0ZTZhNjMzMzRlNTQ0ZDMwNGU3YTQ1N2E0ZTdhNTkzMzRlN2E0YTQxNGU2YTU1MzM0ZTQ0NTkzNDUxNDQ0ZDc3NGU3YTY3N2E0ZDU0NGQzMDRlNDQ1MTMyNGQ2YTRkNzg0ZTZhNTU3YTRkNDQ0ZDMxNGU2YTU5N2E0ZTU0NTE3YTRlNmE0NTdhNGU1NDRkMzE0ZTZhNDk3YTRkNTQ0ZDdhNGU0NDQ1MzA0ZDdhNTk3ODRkN2E2MzMyNGQ3YTRkMzA0ZTZhNGQ3YTRkNDQ0ZDc4NGU2YTU5N2E0ZTU0NTkzMDRlNDQ1NTdhNGU1NDRkMzE0ZTQ0NDk3YTRmNTQ1MTdhNGU0NDQ1MzI0ZTZhNTk3ODRkN2E2MzdhNGY1NTQxMzI0ZDZhNjMzMDRlNmE0ZTQxNGU2YTQ5MzI0ZDdhNGQ3ODRlN2E0NTMyNGQ1NDU5MzE0ZDdhNTU3YTRmNDQ0ZDdhNGQ3YTRkMzM0ZDZhNjM3OTRlN2E1OTdhNGU0NDRkNzc0ZDdhNDE3YTRlNTQ1YTQ0NGU2YTU5MzI1MjQ0NGQzMTRlN2E1OTMyNTI0NDRkMzI0ZTdhNTUzMjUxNTQ2MzMyNGU2YTUxN2E0ZTQ0NTk3YTRlN2E1MTdhNGY1NDVhNDI0ZTZhNGQ3YTRlNTQ1YTQyNGU3YTYzMzM1MTU0NGQzNDRlNmE1NTMzNGY0NDYzNzcyMjJjMjI2MzY4NjE2OTZlNDk0NDIyM2EyMjRkNTEzZDNkMjIyYzIyNzY2NTcyNzM2OTZmNmUyMjNhMzEyYzIyNzM2OTY3NmU2MTc0NzU3MjY1MjIzYTIyNzE2NjcwNGE0Nzc2NzM0NDQ0NDI1NTUxNGUyZjUyNTU0NzRmNTA1Mzc1NTIzMjQ4NGY0YTYxNGI3MDM4NDUzNjYzNGU1NDc3MzAzMzQzMzc2OTM0NTU3Nzc2MmY0YzU0NzM2ZDJiNmE3MDQyMzk3NTZjNDgzOTY2NTMyYjQ0NzE2MTcyNzE0ZjYyNDg0MTcwMzg2NjZkNzIzMDZhNDE1NTMxNzM2ZTM1NDE2NzNkM2QyMjdk","signature":"","timestamp":5040,"status":"success","searchOrder":0,"hasScResults":true,"receivers":["ae49d2246cf8ee248dc8a09dfcf3aaa6ec244f0844e349b31a35d94219dbfab3"],"receiversShardIDs":[0],"operation":"SaveKeyValue","isRelayed":true}` + expectedRelayedTxIntra = `{"initialPaidFee":"2306320000000000","miniBlockHash":"2709174224d13e49fd76a70b48bd3db7838ca715bcfe09be59cef043241d7ef3","nonce":1196665,"round":50,"value":"0","receiver":"6572643134657961796672766c72687a66727767357a776c65756132356d6b7a676e6367676e33356e766336786876357978776d6c326573306633646874","sender":"657264316b376a3665776a736c61347a73677638763666366665336476726b677633643064396a6572637a773435687a6564687965643873683275333475","receiverShard":0,"senderShard":0,"gasPrice":1000000000,"gasLimit":15406000,"gasUsed":10556000,"fee":"2257820000000000","data":"cmVsYXllZFR4QDdiMjI2ZTZmNmU2MzY1MjIzYTMwMmMyMjc2NjE2Yzc1NjUyMjNhMzAyYzIyNzI2NTYzNjU2OTc2NjU3MjIyM2EyMjcyNmI2ZTUzNGE0NzdhMzQzNzY5NTM0ZTc5NGI0MzY0MmY1MDRmNzE3MDc1Nzc2YjU0Nzc2ODQ1MzQzMDZkN2E0NzZhNTg1YTUxNjg2ZTYyMmI3MjRkM2QyMjJjMjI3MzY1NmU2NDY1NzIyMjNhMjI3MjZiNmU1MzRhNDc3YTM0Mzc2OTUzNGU3OTRiNDM2NDJmNTA0ZjcxNzA3NTc3NmI1NDc3Njg0NTM0MzA2ZDdhNDc2YTU4NWE1MTY4NmU2MjJiNzI0ZDNkMjIyYzIyNjc2MTczNTA3MjY5NjM2NTIyM2EzMTMwMzAzMDMwMzAzMDMwMzAzMDJjMjI2NzYxNzM0YzY5NmQ2OTc0MjIzYTMxMzMzMjMzMzIzMDMwMzAyYzIyNjQ2MTc0NjEyMjNhMjI1NTMyNDYzMjVhNTU3NDZjNjU1NjVhNjg2MjQ4NTY2YzUxNDQ1OTc5NGU2YjU1MzI0ZDZiNDEzMjRkNmE1YTQ2NGU2YTQ5N2E0ZDU0NGQzNTRlNmE1NTMyNGQ1NDRkMzI0ZTZiNGQzMjUxNTQ2Mzc4NGQ3YTZiMzI1MTdhNjMzMDRlNmE1NTMzNGU0NDYzMzA0ZTdhNjczMjRlNTQ0ZDMzNGU3YTUxN2E0ZTdhNTkzMzRlNmI1NTdhNGQ0NDYzNzc0ZDdhNDk3YTRmNTQ2NDQyNGU3YTU5MzM0ZTU0NWE0NDRlNmE0NTMzNGU1NDU5MzI0ZTZhNjMzMzRlNTQ0ZDMwNGU3YTQ1N2E0ZTdhNTkzMzRlN2E0YTQxNGU2YTU1MzM0ZTQ0NTkzNDUxNDQ0ZDc3NGU3YTY3N2E0ZDU0NGQzMDRlNDQ1MTMyNGQ2YTRkNzg0ZTZhNTU3YTRkNDQ0ZDMxNGU2YTU5N2E0ZTU0NTE3YTRlNmE0NTdhNGU1NDRkMzE0ZTZhNDk3YTRkNTQ0ZDdhNGU0NDQ1MzA0ZDdhNTk3ODRkN2E2MzMyNGQ3YTRkMzA0ZTZhNGQ3YTRkNDQ0ZDc4NGU2YTU5N2E0ZTU0NTkzMDRlNDQ1NTdhNGU1NDRkMzE0ZTQ0NDk3YTRmNTQ1MTdhNGU0NDQ1MzI0ZTZhNTk3ODRkN2E2MzdhNGY1NTQxMzI0ZDZhNjMzMDRlNmE0ZTQxNGU2YTQ5MzI0ZDdhNGQ3ODRlN2E0NTMyNGQ1NDU5MzE0ZDdhNTU3YTRmNDQ0ZDdhNGQ3YTRkMzM0ZDZhNjM3OTRlN2E1OTdhNGU0NDRkNzc0ZDdhNDE3YTRlNTQ1YTQ0NGU2YTU5MzI1MjQ0NGQzMTRlN2E1OTMyNTI0NDRkMzI0ZTdhNTUzMjUxNTQ2MzMyNGU2YTUxN2E0ZTQ0NTk3YTRlN2E1MTdhNGY1NDVhNDI0ZTZhNGQ3YTRlNTQ1YTQyNGU3YTYzMzM1MTU0NGQzNDRlNmE1NTMzNGY0NDYzNzcyMjJjMjI2MzY4NjE2OTZlNDk0NDIyM2EyMjRkNTEzZDNkMjIyYzIyNzY2NTcyNzM2OTZmNmUyMjNhMzEyYzIyNzM2OTY3NmU2MTc0NzU3MjY1MjIzYTIyNzE2NjcwNGE0Nzc2NzM0NDQ0NDI1NTUxNGUyZjUyNTU0NzRmNTA1Mzc1NTIzMjQ4NGY0YTYxNGI3MDM4NDUzNjYzNGU1NDc3MzAzMzQzMzc2OTM0NTU3Nzc2MmY0YzU0NzM2ZDJiNmE3MDQyMzk3NTZjNDgzOTY2NTMyYjQ0NzE2MTcyNzE0ZjYyNDg0MTcwMzg2NjZkNzIzMDZhNDE1NTMxNzM2ZTM1NDE2NzNkM2QyMjdk","signature":"","timestamp":5040,"status":"success","searchOrder":0,"hasScResults":true,"receivers":["ae49d2246cf8ee248dc8a09dfcf3aaa6ec244f0844e349b31a35d94219dbfab3"],"receiversShardIDs":[0],"operation":"SaveKeyValue","isRelayed":true}` ) func TestRelayedTransactionGasUsedCrossShard(t *testing.T) { diff --git a/integrationtests/scCallIntraShard_test.go b/integrationtests/scCallIntraShard_test.go index c8315e47..2cf3230e 100644 --- a/integrationtests/scCallIntraShard_test.go +++ b/integrationtests/scCallIntraShard_test.go @@ -18,8 +18,8 @@ import ( ) const ( - claimRewardsTx = `{"miniBlockHash":"60b38b11110d28d1b361359f9688bb041bb9180219a612a83ff00dcc0db4d607","nonce":101,"round":50,"value":"0","receiver":"65726431717171717171717171717171717067717877616b7432673775396174736e723033677163676d68637633387074376d6b64393471367368757774","sender":"65726431757265376561323437636c6a3679716a673830756e7a36787a6a686c6a327a776d3467746736737564636d747364326377337873373468617376","receiverShard":0,"senderShard":0,"gasPrice":1000000000,"gasLimit":250000000,"gasUsed":33891715,"fee":"406237150000000","data":"Y2xhaW1SZXdhcmRz","signature":"","timestamp":5040,"status":"success","searchOrder":0,"hasScResults":true,"operation":"transfer"}` - scCallFailTx = `{"miniBlockHash":"5d04f80b044352bfbbde123702323eae07fdd8ca77f24f256079006058b6e7b4","nonce":46,"round":50,"value":"5000000000000000000","receiver":"6572643171717171717171717171717171717170717171717171717171717171717171717171717171717171717171717166686c6c6c6c73637274353672","sender":"65726431757265376561323437636c6a3679716a673830756e7a36787a6a686c6a327a776d3467746736737564636d747364326377337873373468617376","receiverShard":0,"senderShard":0,"gasPrice":1000000000,"gasLimit":12000000,"gasUsed":12000000,"fee":"181380000000000","data":"ZGVsZWdhdGU=","signature":"","timestamp":5040,"status":"fail","searchOrder":0,"hasScResults":true,"operation":"transfer"}` + claimRewardsTx = `{"initialPaidFee":"2567320000000000","miniBlockHash":"60b38b11110d28d1b361359f9688bb041bb9180219a612a83ff00dcc0db4d607","nonce":101,"round":50,"value":"0","receiver":"65726431717171717171717171717171717067717877616b7432673775396174736e723033677163676d68637633387074376d6b64393471367368757774","sender":"65726431757265376561323437636c6a3679716a673830756e7a36787a6a686c6a327a776d3467746736737564636d747364326377337873373468617376","receiverShard":0,"senderShard":0,"gasPrice":1000000000,"gasLimit":250000000,"gasUsed":33891715,"fee":"406237150000000","data":"Y2xhaW1SZXdhcmRz","signature":"","timestamp":5040,"status":"success","searchOrder":0,"hasScResults":true,"operation":"transfer"}` + scCallFailTx = `{"initialPaidFee":"181380000000000","miniBlockHash":"5d04f80b044352bfbbde123702323eae07fdd8ca77f24f256079006058b6e7b4","nonce":46,"round":50,"value":"5000000000000000000","receiver":"6572643171717171717171717171717171717170717171717171717171717171717171717171717171717171717171717166686c6c6c6c73637274353672","sender":"65726431757265376561323437636c6a3679716a673830756e7a36787a6a686c6a327a776d3467746736737564636d747364326377337873373468617376","receiverShard":0,"senderShard":0,"gasPrice":1000000000,"gasLimit":12000000,"gasUsed":12000000,"fee":"181380000000000","data":"ZGVsZWdhdGU=","signature":"","timestamp":5040,"status":"fail","searchOrder":0,"hasScResults":true,"operation":"transfer"}` ) func TestTransactionWithSCCallFail(t *testing.T) { diff --git a/integrationtests/testdata/claimRewards/tx-claim-rewards.json b/integrationtests/testdata/claimRewards/tx-claim-rewards.json index d54773b7..0f729fd3 100644 --- a/integrationtests/testdata/claimRewards/tx-claim-rewards.json +++ b/integrationtests/testdata/claimRewards/tx-claim-rewards.json @@ -11,6 +11,7 @@ "gasLimit": 6000000, "gasUsed": 1068000, "fee": "78000000000000", + "initialPaidFee":"127320000000000", "data": "Y2xhaW1SZXdhcmRz", "signature": "", "timestamp": 5040, diff --git a/integrationtests/testdata/nftTransferCrossShard/op-nft-transfer-sc-call-after-refund.json b/integrationtests/testdata/nftTransferCrossShard/op-nft-transfer-sc-call-after-refund.json index cb204cef..cb33ea19 100644 --- a/integrationtests/testdata/nftTransferCrossShard/op-nft-transfer-sc-call-after-refund.json +++ b/integrationtests/testdata/nftTransferCrossShard/op-nft-transfer-sc-call-after-refund.json @@ -11,6 +11,7 @@ "gasLimit": 150000000, "gasUsed": 139832352, "fee": "1802738520000000", + "initialPaidFee":"1904415000000000", "data": "RVNEVE5GVFRyYW5zZmVyQDRjNGI0NjQxNTI0ZDJkMzM2NjM0NjYzOTYyQDAxNjUzNEA2ZjFlNmYwMWJjNzYyN2Y1YWVAMDAwMDAwMDAwMDAwMDAwMDA1MDBmMWM4ZjJmZGM1OGE2M2M2YjIwMWZjMmVkNjI5OTYyZDNkZmEzM2ZlN2NlYkA2MzZmNmQ3MDZmNzU2ZTY0NTI2NTc3NjE3MjY0NzM1MDcyNmY3ODc5QDAwMDAwMDAwMDAwMDAwMDAwNTAwNGY3OWVjNDRiYjEzMzcyYjVhYzlkOTk2ZDc0OTEyMGY0NzY0Mjc2MjdjZWI=", "signature": "", "timestamp": 5040, diff --git a/integrationtests/testdata/nftTransferCrossShard/tx-complete-with-status.json b/integrationtests/testdata/nftTransferCrossShard/tx-complete-with-status.json index e9e52931..3632ab87 100644 --- a/integrationtests/testdata/nftTransferCrossShard/tx-complete-with-status.json +++ b/integrationtests/testdata/nftTransferCrossShard/tx-complete-with-status.json @@ -29,5 +29,6 @@ "gasPrice": 1000000000, "timestamp": 5040, "status": "fail", + "initialPaidFee": "279185000000000", "searchOrder": 0 } diff --git a/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-failed-on-dst.json b/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-failed-on-dst.json index 184c7051..a805e3c8 100644 --- a/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-failed-on-dst.json +++ b/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-failed-on-dst.json @@ -11,6 +11,7 @@ "gasLimit": 5000000, "gasUsed": 963500, "fee": "235850000000000", + "initialPaidFee": "276215000000000", "data": "RVNEVE5GVFRyYW5zZmVyQDUzNmY2ZDY1NzQ2ODY5NmU2NzJkNjE2MjYzNjQ2NTY2QDAxQDAxQDAwMDAwMDAwMDAwMDAwMDAwNTAwYTdhMDI3NzFhYTA3MDkwZTYwN2YwMmIyNWY0ZDZkMjQxYmZmMzJiOTkwYTI=", "signature": "", "timestamp": 5040, diff --git a/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-sc-call-after-refund.json b/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-sc-call-after-refund.json index 3dc0faa2..2a886119 100644 --- a/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-sc-call-after-refund.json +++ b/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-sc-call-after-refund.json @@ -11,6 +11,7 @@ "gasLimit": 150000000, "gasUsed": 139832352, "fee": "1802738520000000", + "initialPaidFee": "1904415000000000", "data": "RVNEVE5GVFRyYW5zZmVyQDRjNGI0NjQxNTI0ZDJkMzM2NjM0NjYzOTYyQDAxNjUzNEA2ZjFlNmYwMWJjNzYyN2Y1YWVAMDAwMDAwMDAwMDAwMDAwMDA1MDBmMWM4ZjJmZGM1OGE2M2M2YjIwMWZjMmVkNjI5OTYyZDNkZmEzM2ZlN2NlYkA2MzZmNmQ3MDZmNzU2ZTY0NTI2NTc3NjE3MjY0NzM1MDcyNmY3ODc5QDAwMDAwMDAwMDAwMDAwMDAwNTAwNGY3OWVjNDRiYjEzMzcyYjVhYzlkOTk2ZDc0OTEyMGY0NzY0Mjc2MjdjZWI=", "signature": "", "timestamp": 5040, diff --git a/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-sc-call-source.json b/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-sc-call-source.json index 664f35ed..86cbb229 100644 --- a/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-sc-call-source.json +++ b/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-sc-call-source.json @@ -30,5 +30,6 @@ "gasPrice": 1000000000, "timestamp": 5040, "status": "success", + "initialPaidFee": "1904415000000000", "searchOrder": 0 } diff --git a/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer.json b/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer.json index 6246c5f8..ea21f90e 100644 --- a/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer.json +++ b/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer.json @@ -6,6 +6,7 @@ "data": "RVNEVE5GVFRyYW5zZmVyQDUzNmY2ZDY1NzQ2ODY5NmU2NzJkNjE2MjYzNjQ2NTY2QDAxQDAxQDAwMDAwMDAwMDAwMDAwMDAwNTAwYTdhMDI3NzFhYTA3MDkwZTYwN2YwMmIyNWY0ZDZkMjQxYmZmMzJiOTkwYTI=", "signature": "", "fee": "235850000000000", + "initialPaidFee": "276215000000000", "esdtValues": [ "1" ], diff --git a/integrationtests/transactions_test.go b/integrationtests/transactions_test.go index a62a4ff3..324d4bb6 100644 --- a/integrationtests/transactions_test.go +++ b/integrationtests/transactions_test.go @@ -16,7 +16,7 @@ import ( "github.com/stretchr/testify/require" ) -const moveBalanceTransaction = `{"miniBlockHash":"24c374c9405540e88a36959ea83eede6ad50f6872f82d2e2a2280975615e1811","nonce":1,"round":50,"value":"1234","receiver":"7265636569766572","sender":"73656e646572","receiverShard":0,"senderShard":0,"gasPrice":1000000000,"gasLimit":70000,"gasUsed":62000,"fee":"62000000000000","data":"dHJhbnNmZXI=","signature":"","timestamp":5040,"status":"success","searchOrder":0,"operation":"transfer"}` +const moveBalanceTransaction = `{"initialPaidFee":"62080000000000","miniBlockHash":"24c374c9405540e88a36959ea83eede6ad50f6872f82d2e2a2280975615e1811","nonce":1,"round":50,"value":"1234","receiver":"7265636569766572","sender":"73656e646572","receiverShard":0,"senderShard":0,"gasPrice":1000000000,"gasLimit":70000,"gasUsed":62000,"fee":"62000000000000","data":"dHJhbnNmZXI=","signature":"","timestamp":5040,"status":"success","searchOrder":0,"operation":"transfer"}` func TestElasticIndexerSaveTransactions(t *testing.T) { setLogLevelDebug() diff --git a/process/transactions/transactionDBBuilder_test.go b/process/transactions/transactionDBBuilder_test.go index 5160c66a..10cf06c5 100644 --- a/process/transactions/transactionDBBuilder_test.go +++ b/process/transactions/transactionDBBuilder_test.go @@ -73,6 +73,7 @@ func TestGetMoveBalanceTransaction(t *testing.T) { GasPrice: gasPrice, GasLimit: gasLimit, GasUsed: uint64(500), + InitialPaidFee: "100", Data: tx.Data, Signature: hex.EncodeToString(tx.Signature), Timestamp: time.Duration(header.GetTimeStamp()), From 882990cc532cabeb374b400bb11a3a6316c51b2f Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Thu, 25 Aug 2022 16:44:10 +0300 Subject: [PATCH 61/69] change remove function --- mock/dbTransactionsHandlerStub.go | 6 ++-- process/elasticProcessor.go | 30 ++++++++++++++++--- process/interface.go | 2 +- process/transactions/transactionsProcessor.go | 23 +++++++------- .../transactionsProcessor_test.go | 4 +-- 5 files changed, 44 insertions(+), 21 deletions(-) diff --git a/mock/dbTransactionsHandlerStub.go b/mock/dbTransactionsHandlerStub.go index 4f9706fc..fe972c7e 100644 --- a/mock/dbTransactionsHandlerStub.go +++ b/mock/dbTransactionsHandlerStub.go @@ -27,9 +27,9 @@ func (tps *DBTransactionProcessorStub) PrepareTransactionsForDatabase(body *bloc return nil } -// GetRewardsTxsHashesHexEncoded - -func (tps *DBTransactionProcessorStub) GetRewardsTxsHashesHexEncoded(_ coreData.HeaderHandler, _ *block.Body) []string { - return nil +// GetHashesHexEncodedForRemove - +func (tps *DBTransactionProcessorStub) GetHashesHexEncodedForRemove(_ coreData.HeaderHandler, _ *block.Body) ([]string, []string) { + return nil, nil } // SerializeReceipts - diff --git a/process/elasticProcessor.go b/process/elasticProcessor.go index 32b11106..78b6608d 100644 --- a/process/elasticProcessor.go +++ b/process/elasticProcessor.go @@ -315,14 +315,36 @@ func (ei *elasticProcessor) RemoveMiniblocks(header coreData.HeaderHandler, body // RemoveTransactions will remove transaction that are in miniblock from the elasticsearch server func (ei *elasticProcessor) RemoveTransactions(header coreData.HeaderHandler, body *block.Body) error { - encodedTxsHashes := ei.transactionsProc.GetRewardsTxsHashesHexEncoded(header, body) - if len(encodedTxsHashes) == 0 { + encodedTxsHashes, encodedScrsHashes := ei.transactionsProc.GetHashesHexEncodedForRemove(header, body) + + together := make([]string, 0) + if len(encodedTxsHashes) != 0 { + err := ei.elasticClient.DoQueryRemove( + elasticIndexer.TransactionsIndex, + converters.PrepareHashesForQueryRemove(encodedTxsHashes), + ) + if err != nil { + return err + } + } + if len(encodedScrsHashes) != 0 { + err := ei.elasticClient.DoQueryRemove( + elasticIndexer.ScResultsIndex, + converters.PrepareHashesForQueryRemove(encodedScrsHashes), + ) + if err != nil { + return err + } + } + + together = append(encodedTxsHashes, encodedScrsHashes...) + if len(together) == 0 { return nil } return ei.elasticClient.DoQueryRemove( - elasticIndexer.TransactionsIndex, - converters.PrepareHashesForQueryRemove(encodedTxsHashes), + elasticIndexer.OperationsIndex, + converters.PrepareHashesForQueryRemove(together), ) } diff --git a/process/interface.go b/process/interface.go index a726d67a..bc596e01 100644 --- a/process/interface.go +++ b/process/interface.go @@ -67,7 +67,7 @@ type DBTransactionsHandler interface { header coreData.HeaderHandler, pool *indexer.Pool, ) *data.PreparedResults - GetRewardsTxsHashesHexEncoded(header coreData.HeaderHandler, body *block.Body) []string + GetHashesHexEncodedForRemove(header coreData.HeaderHandler, body *block.Body) ([]string, []string) SerializeReceipts(receipts []*data.Receipt, buffSlice *data.BufferSlice, index string) error SerializeTransactions(transactions []*data.Transaction, txHashStatus map[string]string, selfShardID uint32, buffSlice *data.BufferSlice, index string) error diff --git a/process/transactions/transactionsProcessor.go b/process/transactions/transactionsProcessor.go index f65de5a1..f9f0b491 100644 --- a/process/transactions/transactionsProcessor.go +++ b/process/transactions/transactionsProcessor.go @@ -162,35 +162,36 @@ func (tdp *txsDatabaseProcessor) setTransactionSearchOrder(transactions map[stri return transactions } -// GetRewardsTxsHashesHexEncoded will return reward transactions hashes from body hex encoded -func (tdp *txsDatabaseProcessor) GetRewardsTxsHashesHexEncoded(header coreData.HeaderHandler, body *block.Body) []string { +// GetHashesHexEncodedForRemove will return transactions hashes from body hex encoded +func (tdp *txsDatabaseProcessor) GetHashesHexEncodedForRemove(header coreData.HeaderHandler, body *block.Body) ([]string, []string) { if body == nil || check.IfNil(header) || len(header.GetMiniBlockHeadersHashes()) == 0 { - return nil + return nil, nil } selfShardID := header.GetShardID() encodedTxsHashes := make([]string, 0) + encodedScrsHashes := make([]string, 0) for _, miniblock := range body.MiniBlocks { - if miniblock.Type != block.RewardsBlock { + shouldIgnore := miniblock.SenderShardID != miniblock.ReceiverShardID && miniblock.SenderShardID == selfShardID + if shouldIgnore { + // ignore cross-shard miniblocks at source continue } if tdp.txsGrouper.isInImportMode { - // do not delete rewards transactions on import DB + // do not delete transactions on import DB continue } - isDstMe := selfShardID == miniblock.ReceiverShardID - notMeta := header.GetShardID() != core.MetachainShardId - if isDstMe && notMeta { + txsHashesFromMiniblock := getTxsHashesFromMiniblockHexEncoded(miniblock) + if miniblock.Type == block.SmartContractResultBlock { + encodedScrsHashes = append(encodedScrsHashes, txsHashesFromMiniblock...) continue } - - txsHashesFromMiniblock := getTxsHashesFromMiniblockHexEncoded(miniblock) encodedTxsHashes = append(encodedTxsHashes, txsHashesFromMiniblock...) } - return encodedTxsHashes + return encodedTxsHashes, encodedScrsHashes } func shouldIgnoreProcessedMBScheduled(header coreData.HeaderHandler, mbIndex int) bool { diff --git a/process/transactions/transactionsProcessor_test.go b/process/transactions/transactionsProcessor_test.go index a78bfa97..ec10c677 100644 --- a/process/transactions/transactionsProcessor_test.go +++ b/process/transactions/transactionsProcessor_test.go @@ -509,7 +509,7 @@ func TestGetRewardsTxsHashesHexEncoded(t *testing.T) { txDBProc, _ := NewTransactionsProcessor(createMockArgsTxsDBProc()) - res := txDBProc.GetRewardsTxsHashesHexEncoded(nil, nil) + res, _ := txDBProc.GetHashesHexEncodedForRemove(nil, nil) require.Nil(t, res) header := &block.Header{ @@ -544,7 +544,7 @@ func TestGetRewardsTxsHashesHexEncoded(t *testing.T) { expectedHashes := []string{ "6831", "6832", } - txsHashes := txDBProc.GetRewardsTxsHashesHexEncoded(header, body) + txsHashes, _ := txDBProc.GetHashesHexEncodedForRemove(header, body) require.Equal(t, expectedHashes, txsHashes) } From 39bc4d03b37a41333ee46451fa1ee6e06aa4912a Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Fri, 26 Aug 2022 10:18:27 +0300 Subject: [PATCH 62/69] extend unit tests and fixes --- .../transactionsProcessor_test.go | 35 +++++++++++++++++-- 1 file changed, 32 insertions(+), 3 deletions(-) diff --git a/process/transactions/transactionsProcessor_test.go b/process/transactions/transactionsProcessor_test.go index ec10c677..a9e6c2fa 100644 --- a/process/transactions/transactionsProcessor_test.go +++ b/process/transactions/transactionsProcessor_test.go @@ -534,18 +534,47 @@ func TestGetRewardsTxsHashesHexEncoded(t *testing.T) { }, { TxHashes: [][]byte{ - []byte("h2"), + []byte("h3"), }, Type: block.TxBlock, }, + { + TxHashes: [][]byte{ + []byte("h4"), + }, + Type: block.TxBlock, + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + }, + { + TxHashes: [][]byte{ + []byte("h5"), + }, + Type: block.TxBlock, + SenderShardID: 2, + ReceiverShardID: core.MetachainShardId, + }, + { + TxHashes: [][]byte{ + []byte("h6"), + }, + Type: block.SmartContractResultBlock, + SenderShardID: 2, + ReceiverShardID: core.MetachainShardId, + }, }, } expectedHashes := []string{ - "6831", "6832", + "6831", "6832", "6833", "6835", } - txsHashes, _ := txDBProc.GetHashesHexEncodedForRemove(header, body) + expectedScrHashes := []string{ + "6836", + } + + txsHashes, scrHashes := txDBProc.GetHashesHexEncodedForRemove(header, body) require.Equal(t, expectedHashes, txsHashes) + require.Equal(t, expectedScrHashes, scrHashes) } func TestTxsDatabaseProcessor_PrepareTransactionsForDatabaseInvalidTxWithSCR(t *testing.T) { From 67faa54db750b5eb804e16b286dfce2aa893142d Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Fri, 26 Aug 2022 10:46:18 +0300 Subject: [PATCH 63/69] fixes --- integrationtests/logsCrossShard_test.go | 2 ++ process/elasticProcessor_test.go | 2 +- process/transactions/transactionsProcessor.go | 13 ++++++++++--- 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/integrationtests/logsCrossShard_test.go b/integrationtests/logsCrossShard_test.go index 968780a8..93018f23 100644 --- a/integrationtests/logsCrossShard_test.go +++ b/integrationtests/logsCrossShard_test.go @@ -1,3 +1,5 @@ +//go:build integrationtests + package integrationtests import ( diff --git a/process/elasticProcessor_test.go b/process/elasticProcessor_test.go index 08ef93ec..b1db6c50 100644 --- a/process/elasticProcessor_test.go +++ b/process/elasticProcessor_test.go @@ -566,7 +566,7 @@ func TestElasticProcessor_RemoveTransactions(t *testing.T) { dbWriter := &mock.DatabaseWriterStub{ DoQueryRemoveCalled: func(index string, body *bytes.Buffer) error { bodyStr := body.String() - require.Equal(t, elasticIndexer.TransactionsIndex, index) + require.True(t, elasticIndexer.TransactionsIndex == index || elasticIndexer.OperationsIndex == index) require.True(t, strings.Contains(bodyStr, expectedHashes[0])) require.True(t, strings.Contains(bodyStr, expectedHashes[1])) called = true diff --git a/process/transactions/transactionsProcessor.go b/process/transactions/transactionsProcessor.go index f9f0b491..c168cedd 100644 --- a/process/transactions/transactionsProcessor.go +++ b/process/transactions/transactionsProcessor.go @@ -172,9 +172,8 @@ func (tdp *txsDatabaseProcessor) GetHashesHexEncodedForRemove(header coreData.He encodedTxsHashes := make([]string, 0) encodedScrsHashes := make([]string, 0) for _, miniblock := range body.MiniBlocks { - shouldIgnore := miniblock.SenderShardID != miniblock.ReceiverShardID && miniblock.SenderShardID == selfShardID - if shouldIgnore { - // ignore cross-shard miniblocks at source + if isCrossShardAtSourceAndNoRewardsMB(selfShardID, miniblock) { + // ignore cross-shard miniblocks at source ( exception to this rule are rewards miniblocks) continue } @@ -194,6 +193,14 @@ func (tdp *txsDatabaseProcessor) GetHashesHexEncodedForRemove(header coreData.He return encodedTxsHashes, encodedScrsHashes } +func isCrossShardAtSourceAndNoRewardsMB(selfShardID uint32, miniblock *block.MiniBlock) bool { + isCrossShard := miniblock.SenderShardID != miniblock.ReceiverShardID + isAtSource := miniblock.SenderShardID == selfShardID + noRewardsMb := miniblock.Type != block.RewardsBlock + + return isCrossShard && isAtSource && noRewardsMb +} + func shouldIgnoreProcessedMBScheduled(header coreData.HeaderHandler, mbIndex int) bool { miniblockHeaders := header.GetMiniBlockHeaderHandlers() if len(miniblockHeaders) <= mbIndex { From 2a0b2f115a22478303edbdb2b3eea724b4e4adc3 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Fri, 26 Aug 2022 10:59:12 +0300 Subject: [PATCH 64/69] fixes after review --- process/elasticProcessor.go | 3 +-- process/elasticProcessor_test.go | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/process/elasticProcessor.go b/process/elasticProcessor.go index 78b6608d..d014bdcf 100644 --- a/process/elasticProcessor.go +++ b/process/elasticProcessor.go @@ -317,7 +317,6 @@ func (ei *elasticProcessor) RemoveMiniblocks(header coreData.HeaderHandler, body func (ei *elasticProcessor) RemoveTransactions(header coreData.HeaderHandler, body *block.Body) error { encodedTxsHashes, encodedScrsHashes := ei.transactionsProc.GetHashesHexEncodedForRemove(header, body) - together := make([]string, 0) if len(encodedTxsHashes) != 0 { err := ei.elasticClient.DoQueryRemove( elasticIndexer.TransactionsIndex, @@ -337,7 +336,7 @@ func (ei *elasticProcessor) RemoveTransactions(header coreData.HeaderHandler, bo } } - together = append(encodedTxsHashes, encodedScrsHashes...) + together := append(encodedTxsHashes, encodedScrsHashes...) if len(together) == 0 { return nil } diff --git a/process/elasticProcessor_test.go b/process/elasticProcessor_test.go index b1db6c50..518c4111 100644 --- a/process/elasticProcessor_test.go +++ b/process/elasticProcessor_test.go @@ -566,7 +566,7 @@ func TestElasticProcessor_RemoveTransactions(t *testing.T) { dbWriter := &mock.DatabaseWriterStub{ DoQueryRemoveCalled: func(index string, body *bytes.Buffer) error { bodyStr := body.String() - require.True(t, elasticIndexer.TransactionsIndex == index || elasticIndexer.OperationsIndex == index) + require.Contains(t, []string{elasticIndexer.TransactionsIndex, elasticIndexer.OperationsIndex}, index) require.True(t, strings.Contains(bodyStr, expectedHashes[0])) require.True(t, strings.Contains(bodyStr, expectedHashes[1])) called = true From 8f72e256e269e502222ed30aca18222c852334d8 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Fri, 26 Aug 2022 12:00:18 +0300 Subject: [PATCH 65/69] fixes after second review --- mock/dbTransactionsHandlerStub.go | 4 +-- process/elasticProcessor.go | 36 ++++++++----------- process/interface.go | 2 +- process/transactions/transactionsProcessor.go | 4 +-- .../transactionsProcessor_test.go | 4 +-- 5 files changed, 22 insertions(+), 28 deletions(-) diff --git a/mock/dbTransactionsHandlerStub.go b/mock/dbTransactionsHandlerStub.go index fe972c7e..4c7f5f3b 100644 --- a/mock/dbTransactionsHandlerStub.go +++ b/mock/dbTransactionsHandlerStub.go @@ -27,8 +27,8 @@ func (tps *DBTransactionProcessorStub) PrepareTransactionsForDatabase(body *bloc return nil } -// GetHashesHexEncodedForRemove - -func (tps *DBTransactionProcessorStub) GetHashesHexEncodedForRemove(_ coreData.HeaderHandler, _ *block.Body) ([]string, []string) { +// GetHexEncodedHashesForRemove - +func (tps *DBTransactionProcessorStub) GetHexEncodedHashesForRemove(_ coreData.HeaderHandler, _ *block.Body) ([]string, []string) { return nil, nil } diff --git a/process/elasticProcessor.go b/process/elasticProcessor.go index d014bdcf..ec1fa6b5 100644 --- a/process/elasticProcessor.go +++ b/process/elasticProcessor.go @@ -315,35 +315,29 @@ func (ei *elasticProcessor) RemoveMiniblocks(header coreData.HeaderHandler, body // RemoveTransactions will remove transaction that are in miniblock from the elasticsearch server func (ei *elasticProcessor) RemoveTransactions(header coreData.HeaderHandler, body *block.Body) error { - encodedTxsHashes, encodedScrsHashes := ei.transactionsProc.GetHashesHexEncodedForRemove(header, body) + encodedTxsHashes, encodedScrsHashes := ei.transactionsProc.GetHexEncodedHashesForRemove(header, body) - if len(encodedTxsHashes) != 0 { - err := ei.elasticClient.DoQueryRemove( - elasticIndexer.TransactionsIndex, - converters.PrepareHashesForQueryRemove(encodedTxsHashes), - ) - if err != nil { - return err - } + err := ei.removeBasedOnHashesIfNotEmpty(elasticIndexer.TransactionsIndex, encodedTxsHashes) + if err != nil { + return err } - if len(encodedScrsHashes) != 0 { - err := ei.elasticClient.DoQueryRemove( - elasticIndexer.ScResultsIndex, - converters.PrepareHashesForQueryRemove(encodedScrsHashes), - ) - if err != nil { - return err - } + + err = ei.removeBasedOnHashesIfNotEmpty(elasticIndexer.ScResultsIndex, encodedScrsHashes) + if err != nil { + return err } - together := append(encodedTxsHashes, encodedScrsHashes...) - if len(together) == 0 { + return ei.removeBasedOnHashesIfNotEmpty(elasticIndexer.OperationsIndex, append(encodedTxsHashes, encodedScrsHashes...)) +} + +func (ei *elasticProcessor) removeBasedOnHashesIfNotEmpty(index string, hashes []string) error { + if len(hashes) == 0 { return nil } return ei.elasticClient.DoQueryRemove( - elasticIndexer.OperationsIndex, - converters.PrepareHashesForQueryRemove(together), + index, + converters.PrepareHashesForQueryRemove(hashes), ) } diff --git a/process/interface.go b/process/interface.go index bc596e01..1c30e5a5 100644 --- a/process/interface.go +++ b/process/interface.go @@ -67,7 +67,7 @@ type DBTransactionsHandler interface { header coreData.HeaderHandler, pool *indexer.Pool, ) *data.PreparedResults - GetHashesHexEncodedForRemove(header coreData.HeaderHandler, body *block.Body) ([]string, []string) + GetHexEncodedHashesForRemove(header coreData.HeaderHandler, body *block.Body) ([]string, []string) SerializeReceipts(receipts []*data.Receipt, buffSlice *data.BufferSlice, index string) error SerializeTransactions(transactions []*data.Transaction, txHashStatus map[string]string, selfShardID uint32, buffSlice *data.BufferSlice, index string) error diff --git a/process/transactions/transactionsProcessor.go b/process/transactions/transactionsProcessor.go index c168cedd..74083e8a 100644 --- a/process/transactions/transactionsProcessor.go +++ b/process/transactions/transactionsProcessor.go @@ -162,8 +162,8 @@ func (tdp *txsDatabaseProcessor) setTransactionSearchOrder(transactions map[stri return transactions } -// GetHashesHexEncodedForRemove will return transactions hashes from body hex encoded -func (tdp *txsDatabaseProcessor) GetHashesHexEncodedForRemove(header coreData.HeaderHandler, body *block.Body) ([]string, []string) { +// GetHexEncodedHashesForRemove will return hex encoded transactions hashes and smart contract results hashes from body +func (tdp *txsDatabaseProcessor) GetHexEncodedHashesForRemove(header coreData.HeaderHandler, body *block.Body) ([]string, []string) { if body == nil || check.IfNil(header) || len(header.GetMiniBlockHeadersHashes()) == 0 { return nil, nil } diff --git a/process/transactions/transactionsProcessor_test.go b/process/transactions/transactionsProcessor_test.go index a9e6c2fa..fb6c1929 100644 --- a/process/transactions/transactionsProcessor_test.go +++ b/process/transactions/transactionsProcessor_test.go @@ -509,7 +509,7 @@ func TestGetRewardsTxsHashesHexEncoded(t *testing.T) { txDBProc, _ := NewTransactionsProcessor(createMockArgsTxsDBProc()) - res, _ := txDBProc.GetHashesHexEncodedForRemove(nil, nil) + res, _ := txDBProc.GetHexEncodedHashesForRemove(nil, nil) require.Nil(t, res) header := &block.Header{ @@ -572,7 +572,7 @@ func TestGetRewardsTxsHashesHexEncoded(t *testing.T) { "6836", } - txsHashes, scrHashes := txDBProc.GetHashesHexEncodedForRemove(header, body) + txsHashes, scrHashes := txDBProc.GetHexEncodedHashesForRemove(header, body) require.Equal(t, expectedHashes, txsHashes) require.Equal(t, expectedScrHashes, scrHashes) } From f58f648b4ce71dae98df38ba8ecfe390db40c86b Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Fri, 26 Aug 2022 12:15:21 +0300 Subject: [PATCH 66/69] small rename --- process/elasticProcessor.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/process/elasticProcessor.go b/process/elasticProcessor.go index ec1fa6b5..efed1aa0 100644 --- a/process/elasticProcessor.go +++ b/process/elasticProcessor.go @@ -317,20 +317,20 @@ func (ei *elasticProcessor) RemoveMiniblocks(header coreData.HeaderHandler, body func (ei *elasticProcessor) RemoveTransactions(header coreData.HeaderHandler, body *block.Body) error { encodedTxsHashes, encodedScrsHashes := ei.transactionsProc.GetHexEncodedHashesForRemove(header, body) - err := ei.removeBasedOnHashesIfNotEmpty(elasticIndexer.TransactionsIndex, encodedTxsHashes) + err := ei.removeIfHashesNotEmpty(elasticIndexer.TransactionsIndex, encodedTxsHashes) if err != nil { return err } - err = ei.removeBasedOnHashesIfNotEmpty(elasticIndexer.ScResultsIndex, encodedScrsHashes) + err = ei.removeIfHashesNotEmpty(elasticIndexer.ScResultsIndex, encodedScrsHashes) if err != nil { return err } - return ei.removeBasedOnHashesIfNotEmpty(elasticIndexer.OperationsIndex, append(encodedTxsHashes, encodedScrsHashes...)) + return ei.removeIfHashesNotEmpty(elasticIndexer.OperationsIndex, append(encodedTxsHashes, encodedScrsHashes...)) } -func (ei *elasticProcessor) removeBasedOnHashesIfNotEmpty(index string, hashes []string) error { +func (ei *elasticProcessor) removeIfHashesNotEmpty(index string, hashes []string) error { if len(hashes) == 0 { return nil } From 6e1ac8c2a739cf9a89b26b416db31224a3924058 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Fri, 26 Aug 2022 12:16:22 +0300 Subject: [PATCH 67/69] small rename --- process/transactions/transactionsProcessor.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/process/transactions/transactionsProcessor.go b/process/transactions/transactionsProcessor.go index 74083e8a..02592fc1 100644 --- a/process/transactions/transactionsProcessor.go +++ b/process/transactions/transactionsProcessor.go @@ -162,7 +162,7 @@ func (tdp *txsDatabaseProcessor) setTransactionSearchOrder(transactions map[stri return transactions } -// GetHexEncodedHashesForRemove will return hex encoded transactions hashes and smart contract results hashes from body +// GetHexEncodedHashesForRemove will return hex encoded transaction hashes and smart contract result hashes from body func (tdp *txsDatabaseProcessor) GetHexEncodedHashesForRemove(header coreData.HeaderHandler, body *block.Body) ([]string, []string) { if body == nil || check.IfNil(header) || len(header.GetMiniBlockHeadersHashes()) == 0 { return nil, nil From 4c645f6f624978a35d5aaaa5177c5477f4d90567 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Thu, 15 Sep 2022 15:02:18 +0300 Subject: [PATCH 68/69] extend block structure --- data/block.go | 8 ++++++++ process/block/blockProcessor.go | 12 ++++++++++++ process/block/blockProcessor_test.go | 12 ++++++++++++ 3 files changed, 32 insertions(+) diff --git a/data/block.go b/data/block.go index ef8e33a4..fb0c5810 100644 --- a/data/block.go +++ b/data/block.go @@ -13,6 +13,7 @@ type Block struct { Epoch uint32 `json:"epoch"` Hash string `json:"-"` MiniBlocksHashes []string `json:"miniBlocksHashes"` + MiniBlocksDetails []*MiniBlocksDetails `json:"miniBlocksDetails,omitempty"` NotarizedBlocksHashes []string `json:"notarizedBlocksHashes"` Proposer uint64 `json:"proposer"` Validators []uint64 `json:"validators"` @@ -38,6 +39,13 @@ type Block struct { EpochStartShardsData []*EpochStartShardData `json:"epochStartShardsData,omitempty"` } +// MiniBlocksDetails is a structure that hold information about mini-blocks execution details +type MiniBlocksDetails struct { + IndexFirstProcessedTx int32 `json:"firstProcessedTx"` + IndexLastProcessedTx int32 `json:"lastProcessedTx"` + MBIndex int `json:"mbIndex"` +} + // ScheduledData is a structure that hold information about scheduled events type ScheduledData struct { ScheduledRootHash string `json:"rootHash,omitempty"` diff --git a/process/block/blockProcessor.go b/process/block/blockProcessor.go index 216ab2db..34249d81 100644 --- a/process/block/blockProcessor.go +++ b/process/block/blockProcessor.go @@ -109,6 +109,7 @@ func (bp *blockProcessor) PrepareBlockForDB( } bp.addEpochStartInfoForMeta(header, elasticBlock) + putMiniblocksDetailsInBlock(header, elasticBlock) return elasticBlock, nil } @@ -222,6 +223,17 @@ func (bp *blockProcessor) getEncodedMBSHashes(body *block.Body) []string { return miniblocksHashes } +func putMiniblocksDetailsInBlock(header coreData.HeaderHandler, block *data.Block) { + mbHeaders := header.GetMiniBlockHeaderHandlers() + for idx, mbHeader := range mbHeaders { + block.MiniBlocksDetails = append(block.MiniBlocksDetails, &data.MiniBlocksDetails{ + IndexFirstProcessedTx: mbHeader.GetIndexOfFirstTxProcessed(), + IndexLastProcessedTx: mbHeader.GetIndexOfLastTxProcessed(), + MBIndex: idx, + }) + } +} + func (bp *blockProcessor) computeBlockSize(header coreData.HeaderHandler, body *block.Body) (int, error) { headerBytes, err := bp.marshalizer.Marshal(header) if err != nil { diff --git a/process/block/blockProcessor_test.go b/process/block/blockProcessor_test.go index 1640a457..40fbc617 100644 --- a/process/block/blockProcessor_test.go +++ b/process/block/blockProcessor_test.go @@ -234,6 +234,18 @@ func TestBlockProcessor_PrepareBlockForDBEpochStartMeta(t *testing.T) { PrevEpochStartRound: 222, PrevEpochStartHash: "7072657645706f6368", }, + MiniBlocksDetails: []*data.MiniBlocksDetails{ + { + IndexFirstProcessedTx: 0, + IndexLastProcessedTx: 49, + MBIndex: 0, + }, + { + IndexFirstProcessedTx: 0, + IndexLastProcessedTx: 119, + MBIndex: 1, + }, + }, EpochStartShardsData: []*data.EpochStartShardData{ { ShardID: 1, From d4b2b44729f32b0022be8662defcb24f1e41a595 Mon Sep 17 00:00:00 2001 From: Iuga Mihai Date: Tue, 20 Sep 2022 10:40:30 +0300 Subject: [PATCH 69/69] delete cross shard scrs at source --- process/transactions/transactionsProcessor.go | 11 ++++++----- process/transactions/transactionsProcessor_test.go | 10 +++++++++- 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/process/transactions/transactionsProcessor.go b/process/transactions/transactionsProcessor.go index 02592fc1..f2a46139 100644 --- a/process/transactions/transactionsProcessor.go +++ b/process/transactions/transactionsProcessor.go @@ -172,8 +172,9 @@ func (tdp *txsDatabaseProcessor) GetHexEncodedHashesForRemove(header coreData.He encodedTxsHashes := make([]string, 0) encodedScrsHashes := make([]string, 0) for _, miniblock := range body.MiniBlocks { - if isCrossShardAtSourceAndNoRewardsMB(selfShardID, miniblock) { - // ignore cross-shard miniblocks at source ( exception to this rule are rewards miniblocks) + shouldIgnore := isCrossShardAtSourceNormalTx(selfShardID, miniblock) + if shouldIgnore { + // ignore cross-shard miniblocks at source with normal txs continue } @@ -193,12 +194,12 @@ func (tdp *txsDatabaseProcessor) GetHexEncodedHashesForRemove(header coreData.He return encodedTxsHashes, encodedScrsHashes } -func isCrossShardAtSourceAndNoRewardsMB(selfShardID uint32, miniblock *block.MiniBlock) bool { +func isCrossShardAtSourceNormalTx(selfShardID uint32, miniblock *block.MiniBlock) bool { isCrossShard := miniblock.SenderShardID != miniblock.ReceiverShardID isAtSource := miniblock.SenderShardID == selfShardID - noRewardsMb := miniblock.Type != block.RewardsBlock + txBlock := miniblock.Type == block.TxBlock - return isCrossShard && isAtSource && noRewardsMb + return isCrossShard && isAtSource && txBlock } func shouldIgnoreProcessedMBScheduled(header coreData.HeaderHandler, mbIndex int) bool { diff --git a/process/transactions/transactionsProcessor_test.go b/process/transactions/transactionsProcessor_test.go index fb6c1929..90dcb825 100644 --- a/process/transactions/transactionsProcessor_test.go +++ b/process/transactions/transactionsProcessor_test.go @@ -562,6 +562,14 @@ func TestGetRewardsTxsHashesHexEncoded(t *testing.T) { SenderShardID: 2, ReceiverShardID: core.MetachainShardId, }, + { + TxHashes: [][]byte{ + []byte("h7"), + }, + Type: block.SmartContractResultBlock, + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + }, }, } @@ -569,7 +577,7 @@ func TestGetRewardsTxsHashesHexEncoded(t *testing.T) { "6831", "6832", "6833", "6835", } expectedScrHashes := []string{ - "6836", + "6836", "6837", } txsHashes, scrHashes := txDBProc.GetHexEncodedHashesForRemove(header, body)