diff --git a/cmd/api/docs/docs.go b/cmd/api/docs/docs.go index a3eba714f..2939edfb8 100644 --- a/cmd/api/docs/docs.go +++ b/cmd/api/docs/docs.go @@ -2352,129 +2352,6 @@ var doc = `{ } } }, - "/v1/domains/{network}": { - "get": { - "description": "Show all tezos domains for network", - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "tags": [ - "domains" - ], - "summary": "Show all tezos domains for network", - "operationId": "list-domains", - "parameters": [ - { - "type": "string", - "description": "Network", - "name": "network", - "in": "path", - "required": true - }, - { - "maximum": 10, - "type": "integer", - "description": "Transfers count", - "name": "size", - "in": "query" - }, - { - "type": "integer", - "description": "Offset", - "name": "offset", - "in": "query" - } - ], - "responses": { - "200": { - "description": "OK", - "schema": { - "$ref": "#/definitions/handlers.DomainsResponse" - } - }, - "400": { - "description": "Bad Request", - "schema": { - "$ref": "#/definitions/handlers.Error" - } - }, - "500": { - "description": "Internal Server Error", - "schema": { - "$ref": "#/definitions/handlers.Error" - } - } - } - } - }, - "/v1/domains/{network}/resolve": { - "get": { - "description": "Resolve domain by address and vice versa", - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "tags": [ - "domains" - ], - "summary": "Resolve domain", - "operationId": "resolve-domain", - "parameters": [ - { - "type": "string", - "description": "Network", - "name": "network", - "in": "path", - "required": true - }, - { - "type": "string", - "description": "Domain name", - "name": "name", - "in": "query" - }, - { - "maxLength": 36, - "minLength": 36, - "type": "string", - "description": "Address", - "name": "address", - "in": "query" - } - ], - "responses": { - "200": { - "description": "OK", - "schema": { - "$ref": "#/definitions/handlers.TezosDomain" - } - }, - "204": { - "description": "No Content", - "schema": { - "$ref": "#/definitions/gin.H" - } - }, - "400": { - "description": "Bad Request", - "schema": { - "$ref": "#/definitions/handlers.Error" - } - }, - "500": { - "description": "Internal Server Error", - "schema": { - "$ref": "#/definitions/handlers.Error" - } - } - } - } - }, "/v1/global_constants/{network}/{address}": { "get": { "description": "Get global constant", @@ -2565,7 +2442,7 @@ var doc = `{ } } }, - "/v1/operation/{hash}/{index}/storage_diff": { + "/v1/operation/{id}/diff": { "get": { "consumes": [ "application/json" @@ -2576,22 +2453,13 @@ var doc = `{ "tags": [ "operations" ], - "summary": "Get code line where operation failed", - "operationId": "get-operation-storage-diff", + "summary": "Get operation storage diff", + "operationId": "get-operation-diff", "parameters": [ - { - "maxLength": 51, - "minLength": 51, - "type": "string", - "description": "Operation group hash", - "name": "hash", - "in": "path", - "required": true - }, { "type": "integer", - "description": "Content index", - "name": "index", + "description": "Internal BCD operation ID", + "name": "id", "in": "path", "required": true } @@ -2600,7 +2468,7 @@ var doc = `{ "200": { "description": "OK", "schema": { - "$ref": "#/definitions/handlers.GetErrorLocationResponse" + "$ref": "#/definitions/ast.MiguelNode" } }, "400": { @@ -2620,6 +2488,7 @@ var doc = `{ }, "/v1/operation/{id}/error_location": { "get": { + "description": "Get code line where operation failed", "consumes": [ "application/json" ], @@ -2835,12 +2704,6 @@ var doc = `{ "description": "Comma-separated list of indices for searching. Values: contract, operation, bigmapdiff", "name": "i", "in": "query" - }, - { - "type": "string", - "description": "Comma-separated list of languages for searching. Values: smartpy, liquidity, ligo, lorentz, michelson", - "name": "l", - "in": "query" } ], "responses": { @@ -3352,6 +3215,13 @@ var doc = `{ "name": "token_id", "in": "query", "required": true + }, + { + "type": "string", + "description": "DApp slug", + "name": "slug", + "in": "query", + "required": true } ], "responses": { @@ -4032,10 +3902,6 @@ var doc = `{ "id": { "type": "integer" }, - "language": { - "type": "string", - "x-nullable": true - }, "last_action": { "type": "string", "x-nullable": true @@ -4099,20 +3965,6 @@ var doc = `{ } } }, - "handlers.DomainsResponse": { - "type": "object", - "properties": { - "domains": { - "type": "array", - "items": { - "$ref": "#/definitions/handlers.TezosDomain" - } - }, - "total": { - "type": "integer" - } - } - }, "handlers.EntrypointSchema": { "type": "object", "properties": { @@ -4285,12 +4137,6 @@ var doc = `{ "type": "integer", "example": 100 }, - "languages": { - "type": "object", - "additionalProperties": { - "type": "integer" - } - }, "operations_count": { "type": "integer", "example": 100 @@ -4434,8 +4280,8 @@ var doc = `{ "type": "string" }, "storage_diff": { - "type": "object", - "x-nullable": true + "x-nullable": true, + "$ref": "#/definitions/ast.MiguelNode" }, "storage_limit": { "type": "integer", @@ -4583,10 +4429,6 @@ var doc = `{ "id": { "type": "integer" }, - "language": { - "type": "string", - "x-nullable": true - }, "last_action": { "type": "string", "x-nullable": true @@ -4763,33 +4605,6 @@ var doc = `{ } } }, - "handlers.TezosDomain": { - "type": "object", - "properties": { - "address": { - "type": "string" - }, - "data": { - "type": "object", - "additionalProperties": true - }, - "expiration": { - "type": "string" - }, - "level": { - "type": "integer" - }, - "name": { - "type": "string" - }, - "network": { - "type": "string" - }, - "timestamp": { - "type": "string" - } - } - }, "handlers.Token": { "type": "object", "properties": { diff --git a/cmd/api/docs/swagger.json b/cmd/api/docs/swagger.json index 327df4d42..6c793852b 100644 --- a/cmd/api/docs/swagger.json +++ b/cmd/api/docs/swagger.json @@ -2334,129 +2334,6 @@ } } }, - "/v1/domains/{network}": { - "get": { - "description": "Show all tezos domains for network", - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "tags": [ - "domains" - ], - "summary": "Show all tezos domains for network", - "operationId": "list-domains", - "parameters": [ - { - "type": "string", - "description": "Network", - "name": "network", - "in": "path", - "required": true - }, - { - "maximum": 10, - "type": "integer", - "description": "Transfers count", - "name": "size", - "in": "query" - }, - { - "type": "integer", - "description": "Offset", - "name": "offset", - "in": "query" - } - ], - "responses": { - "200": { - "description": "OK", - "schema": { - "$ref": "#/definitions/handlers.DomainsResponse" - } - }, - "400": { - "description": "Bad Request", - "schema": { - "$ref": "#/definitions/handlers.Error" - } - }, - "500": { - "description": "Internal Server Error", - "schema": { - "$ref": "#/definitions/handlers.Error" - } - } - } - } - }, - "/v1/domains/{network}/resolve": { - "get": { - "description": "Resolve domain by address and vice versa", - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "tags": [ - "domains" - ], - "summary": "Resolve domain", - "operationId": "resolve-domain", - "parameters": [ - { - "type": "string", - "description": "Network", - "name": "network", - "in": "path", - "required": true - }, - { - "type": "string", - "description": "Domain name", - "name": "name", - "in": "query" - }, - { - "maxLength": 36, - "minLength": 36, - "type": "string", - "description": "Address", - "name": "address", - "in": "query" - } - ], - "responses": { - "200": { - "description": "OK", - "schema": { - "$ref": "#/definitions/handlers.TezosDomain" - } - }, - "204": { - "description": "No Content", - "schema": { - "$ref": "#/definitions/gin.H" - } - }, - "400": { - "description": "Bad Request", - "schema": { - "$ref": "#/definitions/handlers.Error" - } - }, - "500": { - "description": "Internal Server Error", - "schema": { - "$ref": "#/definitions/handlers.Error" - } - } - } - } - }, "/v1/global_constants/{network}/{address}": { "get": { "description": "Get global constant", @@ -2547,7 +2424,7 @@ } } }, - "/v1/operation/{hash}/{index}/storage_diff": { + "/v1/operation/{id}/diff": { "get": { "consumes": [ "application/json" @@ -2558,22 +2435,13 @@ "tags": [ "operations" ], - "summary": "Get code line where operation failed", - "operationId": "get-operation-storage-diff", + "summary": "Get operation storage diff", + "operationId": "get-operation-diff", "parameters": [ - { - "maxLength": 51, - "minLength": 51, - "type": "string", - "description": "Operation group hash", - "name": "hash", - "in": "path", - "required": true - }, { "type": "integer", - "description": "Content index", - "name": "index", + "description": "Internal BCD operation ID", + "name": "id", "in": "path", "required": true } @@ -2582,7 +2450,7 @@ "200": { "description": "OK", "schema": { - "$ref": "#/definitions/handlers.GetErrorLocationResponse" + "$ref": "#/definitions/ast.MiguelNode" } }, "400": { @@ -2602,6 +2470,7 @@ }, "/v1/operation/{id}/error_location": { "get": { + "description": "Get code line where operation failed", "consumes": [ "application/json" ], @@ -2817,12 +2686,6 @@ "description": "Comma-separated list of indices for searching. Values: contract, operation, bigmapdiff", "name": "i", "in": "query" - }, - { - "type": "string", - "description": "Comma-separated list of languages for searching. Values: smartpy, liquidity, ligo, lorentz, michelson", - "name": "l", - "in": "query" } ], "responses": { @@ -3334,6 +3197,13 @@ "name": "token_id", "in": "query", "required": true + }, + { + "type": "string", + "description": "DApp slug", + "name": "slug", + "in": "query", + "required": true } ], "responses": { @@ -4014,10 +3884,6 @@ "id": { "type": "integer" }, - "language": { - "type": "string", - "x-nullable": true - }, "last_action": { "type": "string", "x-nullable": true @@ -4081,20 +3947,6 @@ } } }, - "handlers.DomainsResponse": { - "type": "object", - "properties": { - "domains": { - "type": "array", - "items": { - "$ref": "#/definitions/handlers.TezosDomain" - } - }, - "total": { - "type": "integer" - } - } - }, "handlers.EntrypointSchema": { "type": "object", "properties": { @@ -4267,12 +4119,6 @@ "type": "integer", "example": 100 }, - "languages": { - "type": "object", - "additionalProperties": { - "type": "integer" - } - }, "operations_count": { "type": "integer", "example": 100 @@ -4416,8 +4262,8 @@ "type": "string" }, "storage_diff": { - "type": "object", - "x-nullable": true + "x-nullable": true, + "$ref": "#/definitions/ast.MiguelNode" }, "storage_limit": { "type": "integer", @@ -4565,10 +4411,6 @@ "id": { "type": "integer" }, - "language": { - "type": "string", - "x-nullable": true - }, "last_action": { "type": "string", "x-nullable": true @@ -4745,33 +4587,6 @@ } } }, - "handlers.TezosDomain": { - "type": "object", - "properties": { - "address": { - "type": "string" - }, - "data": { - "type": "object", - "additionalProperties": true - }, - "expiration": { - "type": "string" - }, - "level": { - "type": "integer" - }, - "name": { - "type": "string" - }, - "network": { - "type": "string" - }, - "timestamp": { - "type": "string" - } - } - }, "handlers.Token": { "type": "object", "properties": { diff --git a/cmd/api/docs/swagger.yaml b/cmd/api/docs/swagger.yaml index cbf9157ab..32dfd3e55 100644 --- a/cmd/api/docs/swagger.yaml +++ b/cmd/api/docs/swagger.yaml @@ -319,9 +319,6 @@ definitions: type: string id: type: integer - language: - type: string - x-nullable: true last_action: type: string x-nullable: true @@ -366,15 +363,6 @@ definitions: count: type: integer type: object - handlers.DomainsResponse: - properties: - domains: - items: - $ref: '#/definitions/handlers.TezosDomain' - type: array - total: - type: integer - type: object handlers.EntrypointSchema: properties: default_model: @@ -490,10 +478,6 @@ definitions: fa_count: example: 100 type: integer - languages: - additionalProperties: - type: integer - type: object operations_count: example: 100 type: integer @@ -599,7 +583,7 @@ definitions: status: type: string storage_diff: - type: object + $ref: '#/definitions/ast.MiguelNode' x-nullable: true storage_limit: type: integer @@ -703,9 +687,6 @@ definitions: type: string id: type: integer - language: - type: string - x-nullable: true last_action: type: string x-nullable: true @@ -826,24 +807,6 @@ definitions: $ref: '#/definitions/tzip.View' type: array type: object - handlers.TezosDomain: - properties: - address: - type: string - data: - additionalProperties: true - type: object - expiration: - type: string - level: - type: integer - name: - type: string - network: - type: string - timestamp: - type: string - type: object handlers.Token: properties: artifact_uri: @@ -3063,89 +3026,6 @@ paths: summary: Get diff between two contracts tags: - contract - /v1/domains/{network}: - get: - consumes: - - application/json - description: Show all tezos domains for network - operationId: list-domains - parameters: - - description: Network - in: path - name: network - required: true - type: string - - description: Transfers count - in: query - maximum: 10 - name: size - type: integer - - description: Offset - in: query - name: offset - type: integer - produces: - - application/json - responses: - "200": - description: OK - schema: - $ref: '#/definitions/handlers.DomainsResponse' - "400": - description: Bad Request - schema: - $ref: '#/definitions/handlers.Error' - "500": - description: Internal Server Error - schema: - $ref: '#/definitions/handlers.Error' - summary: Show all tezos domains for network - tags: - - domains - /v1/domains/{network}/resolve: - get: - consumes: - - application/json - description: Resolve domain by address and vice versa - operationId: resolve-domain - parameters: - - description: Network - in: path - name: network - required: true - type: string - - description: Domain name - in: query - name: name - type: string - - description: Address - in: query - maxLength: 36 - minLength: 36 - name: address - type: string - produces: - - application/json - responses: - "200": - description: OK - schema: - $ref: '#/definitions/handlers.TezosDomain' - "204": - description: No Content - schema: - $ref: '#/definitions/gin.H' - "400": - description: Bad Request - schema: - $ref: '#/definitions/handlers.Error' - "500": - description: Internal Server Error - schema: - $ref: '#/definitions/handlers.Error' - summary: Resolve domain - tags: - - domains /v1/global_constants/{network}/{address}: get: consumes: @@ -3207,22 +3087,15 @@ paths: summary: Show indexer head tags: - head - /v1/operation/{hash}/{index}/storage_diff: + /v1/operation/{id}/diff: get: consumes: - application/json - operationId: get-operation-storage-diff + operationId: get-operation-diff parameters: - - description: Operation group hash - in: path - maxLength: 51 - minLength: 51 - name: hash - required: true - type: string - - description: Content index + - description: Internal BCD operation ID in: path - name: index + name: id required: true type: integer produces: @@ -3231,7 +3104,7 @@ paths: "200": description: OK schema: - $ref: '#/definitions/handlers.GetErrorLocationResponse' + $ref: '#/definitions/ast.MiguelNode' "400": description: Bad Request schema: @@ -3240,13 +3113,14 @@ paths: description: Internal Server Error schema: $ref: '#/definitions/handlers.Error' - summary: Get code line where operation failed + summary: Get operation storage diff tags: - operations /v1/operation/{id}/error_location: get: consumes: - application/json + description: Get code line where operation failed operationId: get-operation-error-location parameters: - description: Internal BCD operation ID @@ -3388,11 +3262,6 @@ paths: in: query name: i type: string - - description: 'Comma-separated list of languages for searching. Values: smartpy, - liquidity, ligo, lorentz, michelson' - in: query - name: l - type: string produces: - application/json responses: @@ -3745,6 +3614,11 @@ paths: name: token_id required: true type: integer + - description: DApp slug + in: query + name: slug + required: true + type: string produces: - application/json responses: diff --git a/cmd/api/handlers/account.go b/cmd/api/handlers/account.go index cc2292cd0..40c53346f 100644 --- a/cmd/api/handlers/account.go +++ b/cmd/api/handlers/account.go @@ -39,12 +39,12 @@ func (ctx *Context) GetInfo(c *gin.Context) { if ctx.handleError(c, err, 0) { return } - block, err := ctx.CachedCurrentBlock(req.NetworkID()) + block, err := ctx.Cache.CurrentBlock(req.NetworkID()) if ctx.handleError(c, err, 0) { return } - balance, err := ctx.CachedTezosBalance(req.NetworkID(), req.Address, block.Level) + balance, err := ctx.Cache.TezosBalance(req.NetworkID(), req.Address, block.Level) if ctx.handleError(c, err, 0) { return } @@ -258,7 +258,7 @@ func (ctx *Context) GetAccountTokensCountByContractWithMetadata(c *gin.Context) response := make(map[string]TokensCountWithMetadata) for address, count := range res { - metadata, err := ctx.CachedContractMetadata(req.NetworkID(), address) + metadata, err := ctx.Cache.ContractMetadata(req.NetworkID(), address) if err != nil { if !ctx.Storage.IsRecordNotFound(err) && ctx.handleError(c, err, 0) { return @@ -269,7 +269,7 @@ func (ctx *Context) GetAccountTokensCountByContractWithMetadata(c *gin.Context) } } } - contract, err := ctx.CachedContract(metadata.Network, metadata.Address) + contract, err := ctx.Cache.Contract(metadata.Network, metadata.Address) if ctx.handleError(c, err, 0) { return } diff --git a/cmd/api/handlers/bigmap.go b/cmd/api/handlers/bigmap.go index d5f337f9b..6db691064 100644 --- a/cmd/api/handlers/bigmap.go +++ b/cmd/api/handlers/bigmap.go @@ -6,6 +6,7 @@ import ( "github.com/baking-bad/bcdhub/internal/bcd" "github.com/baking-bad/bcdhub/internal/bcd/ast" + "github.com/baking-bad/bcdhub/internal/bcd/consts" "github.com/baking-bad/bcdhub/internal/bcd/formatter" "github.com/baking-bad/bcdhub/internal/models/bigmapaction" "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" @@ -55,11 +56,11 @@ func (ctx *Context) GetBigMap(c *gin.Context) { res.Address = actions[0].Address } } else { - script, err := ctx.getScript(req.NetworkID(), res.Address, bcd.SymLinkBabylon) + script, err := ctx.Contracts.ScriptPart(req.NetworkID(), res.Address, bcd.SymLinkBabylon, consts.STORAGE) if ctx.handleError(c, err, 0) { return } - storage, err := script.StorageType() + storage, err := ast.NewTypedAstFromBytes(script) if ctx.handleError(c, err, 0) { return } @@ -67,7 +68,7 @@ func (ctx *Context) GetBigMap(c *gin.Context) { if ctx.handleError(c, err, 0) { return } - proto, err := ctx.CachedProtocolByID(operation.Network, operation.ProtocolID) + proto, err := ctx.Cache.ProtocolByID(operation.Network, operation.ProtocolID) if ctx.handleError(c, err, 0) { return } diff --git a/cmd/api/handlers/code.go b/cmd/api/handlers/code.go index 706405543..1b2f618d0 100644 --- a/cmd/api/handlers/code.go +++ b/cmd/api/handlers/code.go @@ -3,8 +3,8 @@ package handlers import ( "net/http" + "github.com/baking-bad/bcdhub/internal/bcd" "github.com/baking-bad/bcdhub/internal/bcd/formatter" - "github.com/baking-bad/bcdhub/internal/fetch" "github.com/baking-bad/bcdhub/internal/models/types" "github.com/gin-gonic/gin" "github.com/pkg/errors" @@ -39,11 +39,11 @@ func (ctx *Context) GetContractCode(c *gin.Context) { network := types.NewNetwork(req.Network) if req.Protocol == "" { - state, err := ctx.CachedCurrentBlock(network) + state, err := ctx.Cache.CurrentBlock(network) if ctx.handleError(c, err, 0) { return } - proto, err := ctx.CachedProtocolByID(state.Network, state.ProtocolID) + proto, err := ctx.Cache.ProtocolByID(state.Network, state.ProtocolID) if ctx.handleError(c, err, 0) { return } @@ -89,17 +89,20 @@ func (ctx *Context) GetDiff(c *gin.Context) { c.SecureJSON(http.StatusOK, resp) } -func (ctx *Context) getContractCodeJSON(network types.Network, address, protocol string) (res gjson.Result, err error) { - data, err := fetch.Contract(network, address, protocol, ctx.SharePath) +func (ctx *Context) getContractCodeJSON(network types.Network, address string, protocol string) (res gjson.Result, err error) { + symLink, err := bcd.GetProtoSymLink(protocol) if err != nil { - return + return res, err + } + script, err := ctx.Cache.ScriptBytes(network, address, symLink) + if err != nil { + return res, err } - contract := gjson.ParseBytes(data) + contract := gjson.ParseBytes(script) if !contract.IsArray() && !contract.IsObject() { return res, errors.Errorf("Unknown contract: %s", address) } - // return macros.FindMacros(contractJSON) return contract, nil } @@ -111,11 +114,11 @@ func (ctx *Context) getContractCodeDiff(left, right CodeDiffLeg) (res CodeDiffRe if leg.Protocol == "" { protocol, ok := currentProtocols[leg.Network] if !ok { - state, err := ctx.CachedCurrentBlock(leg.Network) + state, err := ctx.Cache.CurrentBlock(leg.Network) if err != nil { return res, err } - proto, err := ctx.CachedProtocolByID(state.Network, state.ProtocolID) + proto, err := ctx.Cache.ProtocolByID(state.Network, state.ProtocolID) if err != nil { return res, err } diff --git a/cmd/api/handlers/context.go b/cmd/api/handlers/context.go index 1405f03ad..a317fcefd 100644 --- a/cmd/api/handlers/context.go +++ b/cmd/api/handlers/context.go @@ -18,7 +18,6 @@ func NewContext(cfg config.Config) (*Context, error) { config.WithStorage(cfg.Storage, cfg.API.ProjectName, int64(cfg.API.PageSize), cfg.API.Connections.Open, cfg.API.Connections.Idle), config.WithRPC(cfg.RPC), config.WithSearch(cfg.Storage), - config.WithShare(cfg.SharePath), config.WithMempool(cfg.Services), config.WithLoadErrorDescriptions(), config.WithConfigCopy(cfg), diff --git a/cmd/api/handlers/contract.go b/cmd/api/handlers/contract.go index 3e2bca4c0..34b9c3b3a 100644 --- a/cmd/api/handlers/contract.go +++ b/cmd/api/handlers/contract.go @@ -84,8 +84,8 @@ func (ctx *Context) contractPostprocessing(contract contract.Contract) (Contract var res Contract res.FromModel(contract) - res.Alias = ctx.CachedAlias(contract.Network, contract.Address) - res.DelegateAlias = ctx.CachedAlias(contract.Network, contract.Delegate.String()) + res.Alias = ctx.Cache.Alias(contract.Network, contract.Address) + res.DelegateAlias = ctx.Cache.Alias(contract.Network, contract.Delegate.String()) if alias, err := ctx.TZIP.Get(contract.Network, contract.Address); err == nil { res.Slug = alias.Slug diff --git a/cmd/api/handlers/dapp.go b/cmd/api/handlers/dapp.go index b3905940e..b40f866d9 100644 --- a/cmd/api/handlers/dapp.go +++ b/cmd/api/handlers/dapp.go @@ -160,7 +160,7 @@ func (ctx *Context) appendDAppInfo(dapp dapp.DApp, withDetails bool) (DApp, erro result.Contracts = append(result.Contracts, DAppContract{ Network: contract.Network.String(), Address: contract.Address, - Alias: ctx.CachedAlias(contract.Network, contract.Address), + Alias: ctx.Cache.Alias(contract.Network, contract.Address), ReleaseDate: contract.Timestamp.UTC(), }) diff --git a/cmd/api/handlers/mempool.go b/cmd/api/handlers/mempool.go index 470bfb109..6869a8805 100644 --- a/cmd/api/handlers/mempool.go +++ b/cmd/api/handlers/mempool.go @@ -89,8 +89,8 @@ func (ctx *Context) prepareMempoolTransaction(network modelTypes.Network, tx mem Hash: tx.Hash, Network: network.String(), Timestamp: time.Unix(tx.UpdatedAt, 0).UTC(), - SourceAlias: ctx.CachedAlias(network, tx.Source), - DestinationAlias: ctx.CachedAlias(network, tx.Destination), + SourceAlias: ctx.Cache.Alias(network, tx.Source), + DestinationAlias: ctx.Cache.Alias(network, tx.Destination), Kind: tx.Kind, Source: tx.Source, Fee: tx.Fee, @@ -135,7 +135,7 @@ func (ctx *Context) prepareMempoolOrigination(network modelTypes.Network, origin Hash: origination.Hash, Network: network.String(), Timestamp: time.Unix(origination.UpdatedAt, 0).UTC(), - SourceAlias: ctx.CachedAlias(network, origination.Source), + SourceAlias: ctx.Cache.Alias(network, origination.Source), Kind: origination.Kind, Source: origination.Source, Fee: origination.Fee, @@ -158,7 +158,7 @@ func (ctx *Context) prepareMempoolOrigination(network modelTypes.Network, origin func (ctx *Context) buildMempoolOperationParameters(data []byte, op *Operation) error { network := modelTypes.NewNetwork(op.Network) - proto, err := ctx.CachedProtocolByHash(network, op.Protocol) + proto, err := ctx.Cache.ProtocolByHash(network, op.Protocol) if err != nil { return err } diff --git a/cmd/api/handlers/migrations.go b/cmd/api/handlers/migrations.go index 17086b02b..3a933cd2f 100644 --- a/cmd/api/handlers/migrations.go +++ b/cmd/api/handlers/migrations.go @@ -43,11 +43,11 @@ func (ctx *Context) GetContractMigrations(c *gin.Context) { func prepareMigrations(ctx *Context, data []migration.Migration) ([]Migration, error) { result := make([]Migration, len(data)) for i := range data { - proto, err := ctx.CachedProtocolByID(data[i].Network, data[i].ProtocolID) + proto, err := ctx.Cache.ProtocolByID(data[i].Network, data[i].ProtocolID) if err != nil && !ctx.Storage.IsRecordNotFound(err) { return nil, err } - prevProto, err := ctx.CachedProtocolByID(data[i].Network, data[i].PrevProtocolID) + prevProto, err := ctx.Cache.ProtocolByID(data[i].Network, data[i].PrevProtocolID) if err != nil && !ctx.Storage.IsRecordNotFound(err) { return nil, err } diff --git a/cmd/api/handlers/operations.go b/cmd/api/handlers/operations.go index be9e32f91..014580e76 100644 --- a/cmd/api/handlers/operations.go +++ b/cmd/api/handlers/operations.go @@ -12,7 +12,6 @@ import ( formattererror "github.com/baking-bad/bcdhub/internal/bcd/formatter/error" "github.com/baking-bad/bcdhub/internal/bcd/tezerrors" "github.com/baking-bad/bcdhub/internal/bcd/types" - "github.com/baking-bad/bcdhub/internal/fetch" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" "github.com/baking-bad/bcdhub/internal/models/operation" @@ -134,78 +133,53 @@ func (ctx *Context) GetOperation(c *gin.Context) { c.SecureJSON(http.StatusOK, resp) } -// GetContentDiff godoc -// @Summary Get storage diff for content in OPG -// @Description Get storage diff for content in OPG +// GetOperationErrorLocation godoc +// @Summary Get code line where operation failed +// @Description Get code line where operation failed // @Tags operations -// @ID get-operation-storage-diff -// @Param hash path string true "Operation group hash" minlength(51) maxlength(51) -// @Param index path integer true "Content index" mininum(0) +// @ID get-operation-error-location +// @Param id path integer true "Internal BCD operation ID" // @Accept json // @Produce json -// @Success 200 {object} map[string]interface{} +// @Success 200 {object} GetErrorLocationResponse // @Failure 400 {object} Error // @Failure 500 {object} Error -// @Router /v1/operation/{hash}/{index}/storage_diff [get] -func (ctx *Context) GetContentDiff(c *gin.Context) { - var req getContent +// @Router /v1/operation/{id}/error_location [get] +func (ctx *Context) GetOperationErrorLocation(c *gin.Context) { + var req getOperationByIDRequest if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } - - operations, err := ctx.Operations.Get(map[string]interface{}{ - "hash": req.Hash, - "content_index": req.ContentIndex, - }, 1, false) - if ctx.handleError(c, err, 0) { - return - } - if len(operations) == 0 { - ctx.handleError(c, errors.Errorf("unknown operation content: %s %d", req.Hash, req.ContentIndex), http.StatusNotFound) + operation := operation.Operation{ID: req.ID} + if err := ctx.Storage.GetByID(&operation); ctx.handleError(c, err, 0) { return } - operation := operations[0] - data, err := ctx.BigMapDiffs.GetForOperations(operation.ID) - if ctx.handleError(c, err, 0) { + if !tezerrors.HasScriptRejectedError(operation.Errors) { + ctx.handleError(c, errors.Errorf("No reject script error in operation"), http.StatusBadRequest) return } - if len(operation.DeffatedStorage) > 0 && (operation.IsCall() || operation.IsOrigination()) && operation.IsApplied() { - protocol, err := ctx.Protocols.GetByID(operation.ProtocolID) - if ctx.handleError(c, err, 0) { - return - } - - script, err := ctx.getScript(operation.Network, operation.Destination, protocol.SymLink) - if ctx.handleError(c, err, 0) { - return - } - var op Operation - op.FromModel(operation) - - if err := ctx.setStorageDiff(operation.Destination, operation.DeffatedStorage, &op, data, script); ctx.handleError(c, err, 0) { - return - } - c.SecureJSON(http.StatusOK, op.StorageDiff) + response, err := ctx.getErrorLocation(operation, 2) + if ctx.handleError(c, err, 0) { return } - c.SecureJSON(http.StatusOK, nil) + c.SecureJSON(http.StatusOK, response) } -// GetOperationErrorLocation godoc -// @Summary Get code line where operation failed -// @Description Get code line where operation failed +// GetOperationDiff godoc +// @Summary Get operation storage diff +// @DescriptionGet Get operation storage diff // @Tags operations -// @ID get-operation-error-location +// @ID get-operation-diff // @Param id path integer true "Internal BCD operation ID" // @Accept json // @Produce json -// @Success 200 {object} GetErrorLocationResponse +// @Success 200 {object} ast.MiguelNode // @Failure 400 {object} Error // @Failure 500 {object} Error -// @Router /v1/operation/{id}/error_location [get] -func (ctx *Context) GetOperationErrorLocation(c *gin.Context) { +// @Router /v1/operation/{id}/diff [get] +func (ctx *Context) GetOperationDiff(c *gin.Context) { var req getOperationByIDRequest if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return @@ -215,16 +189,36 @@ func (ctx *Context) GetOperationErrorLocation(c *gin.Context) { return } - if !tezerrors.HasScriptRejectedError(operation.Errors) { - ctx.handleError(c, errors.Errorf("No reject script error in operation"), http.StatusBadRequest) - return - } + var result Operation + result.FromModel(operation) - response, err := ctx.getErrorLocation(operation, 2) - if ctx.handleError(c, err, 0) { - return + if len(operation.DeffatedStorage) > 0 && (operation.IsCall() || operation.IsOrigination()) && operation.IsApplied() { + proto, err := ctx.Cache.ProtocolByID(operation.Network, operation.ProtocolID) + if ctx.handleError(c, err, 0) { + return + } + result.Protocol = proto.Hash + + storageBytes, err := ctx.Contracts.ScriptPart(operation.Network, operation.Destination, proto.SymLink, consts.STORAGE) + if ctx.handleError(c, err, 0) { + return + } + + storageType, err := ast.NewTypedAstFromBytes(storageBytes) + if ctx.handleError(c, err, 0) { + return + } + + bmd, err := ctx.BigMapDiffs.GetForOperation(operation.ID) + if ctx.handleError(c, err, 0) { + return + } + + if err := ctx.setStorageDiff(operation.Destination, operation.DeffatedStorage, &result, bmd, storageType); ctx.handleError(c, err, 0) { + return + } } - c.SecureJSON(http.StatusOK, response) + c.SecureJSON(http.StatusOK, result.StorageDiff) } func (ctx *Context) getOperationFromMempool(hash string) *Operation { @@ -318,10 +312,10 @@ func (ctx *Context) prepareOperation(operation operation.Operation, bmd []bigmap var op Operation op.FromModel(operation) - op.SourceAlias = ctx.CachedAlias(operation.Network, operation.Source) - op.DestinationAlias = ctx.CachedAlias(operation.Network, operation.Destination) + op.SourceAlias = ctx.Cache.Alias(operation.Network, operation.Source) + op.DestinationAlias = ctx.Cache.Alias(operation.Network, operation.Destination) - proto, err := ctx.CachedProtocolByID(operation.Network, operation.ProtocolID) + proto, err := ctx.Cache.ProtocolByID(operation.Network, operation.ProtocolID) if err != nil { return op, err } @@ -337,8 +331,12 @@ func (ctx *Context) prepareOperation(operation operation.Operation, bmd []bigmap } if withStorageDiff { + storageType, err := script.StorageType() + if err != nil { + return op, err + } if len(operation.DeffatedStorage) > 0 && (operation.IsCall() || operation.IsOrigination()) && operation.IsApplied() { - if err := ctx.setStorageDiff(op.Destination, operation.DeffatedStorage, &op, bmd, script); err != nil { + if err := ctx.setStorageDiff(op.Destination, operation.DeffatedStorage, &op, bmd, storageType); err != nil { return op, err } } @@ -429,11 +427,7 @@ func setParatemetersWithType(params *types.Parameters, script *ast.Script, op *O return nil } -func (ctx *Context) setStorageDiff(address string, storage []byte, op *Operation, bmd []bigmapdiff.BigMapDiff, script *ast.Script) error { - storageType, err := script.StorageType() - if err != nil { - return err - } +func (ctx *Context) setStorageDiff(address string, storage []byte, op *Operation, bmd []bigmapdiff.BigMapDiff, storageType *ast.TypedAst) error { storageDiff, err := ctx.getStorageDiff(bmd, address, storage, storageType, op) if err != nil { return err @@ -442,7 +436,7 @@ func (ctx *Context) setStorageDiff(address string, storage []byte, op *Operation return nil } -func (ctx *Context) getStorageDiff(bmd []bigmapdiff.BigMapDiff, address string, storage []byte, storageType *ast.TypedAst, op *Operation) (interface{}, error) { +func (ctx *Context) getStorageDiff(bmd []bigmapdiff.BigMapDiff, address string, storage []byte, storageType *ast.TypedAst, op *Operation) (*ast.MiguelNode, error) { currentStorage := &ast.TypedAst{ Nodes: []ast.Node{ast.Copy(storageType.Nodes[0])}, } @@ -499,11 +493,11 @@ func getEnrichStorage(storageType *ast.TypedAst, bmd []bigmapdiff.BigMapDiff) er } func (ctx *Context) getErrorLocation(operation operation.Operation, window int) (GetErrorLocationResponse, error) { - proto, err := ctx.CachedProtocolByID(operation.Network, operation.ProtocolID) + proto, err := ctx.Cache.ProtocolByID(operation.Network, operation.ProtocolID) if err != nil { return GetErrorLocationResponse{}, err } - code, err := fetch.ContractBySymLink(operation.Network, operation.Destination, proto.SymLink, ctx.SharePath) + code, err := ctx.getScriptBytes(operation.Network, operation.Destination, proto.SymLink) if err != nil { return GetErrorLocationResponse{}, err } diff --git a/cmd/api/handlers/project.go b/cmd/api/handlers/project.go index 8f8856527..e917e10b1 100644 --- a/cmd/api/handlers/project.go +++ b/cmd/api/handlers/project.go @@ -105,8 +105,8 @@ func (ctx *Context) GetSimilarContracts(c *gin.Context) { } response.Contracts[i].FromModel(similar[i], diff) - response.Contracts[i].Alias = ctx.CachedAlias(similar[i].Network, similar[i].Address) - response.Contracts[i].DelegateAlias = ctx.CachedAlias(similar[i].Network, similar[i].Delegate.String()) + response.Contracts[i].Alias = ctx.Cache.Alias(similar[i].Network, similar[i].Address) + response.Contracts[i].DelegateAlias = ctx.Cache.Alias(similar[i].Network, similar[i].Delegate.String()) } c.SecureJSON(http.StatusOK, response) diff --git a/cmd/api/handlers/requests.go b/cmd/api/handlers/requests.go index 6fa4c0c6f..3729c8bc3 100644 --- a/cmd/api/handlers/requests.go +++ b/cmd/api/handlers/requests.go @@ -170,12 +170,6 @@ type getOperationByIDRequest struct { ID int64 `uri:"id" binding:"required"` } -type getContent struct { - OPGRequest - - ContentIndex int64 `uri:"content" example:"1"` -} - type runOperationRequest struct { Data map[string]interface{} `json:"data" binding:"required"` Name string `json:"name" binding:"required"` diff --git a/cmd/api/handlers/responses.go b/cmd/api/handlers/responses.go index 8314f5f4e..3655b65ce 100644 --- a/cmd/api/handlers/responses.go +++ b/cmd/api/handlers/responses.go @@ -42,7 +42,7 @@ type Operation struct { PaidStorageSizeDiff int64 `json:"paid_storage_size_diff,omitempty" extensions:"x-nullable" example:"300"` Errors []*tezerrors.Error `json:"errors,omitempty" extensions:"x-nullable"` Parameters interface{} `json:"parameters,omitempty" extensions:"x-nullable"` - StorageDiff interface{} `json:"storage_diff,omitempty" extensions:"x-nullable"` + StorageDiff *ast.MiguelNode `json:"storage_diff,omitempty" extensions:"x-nullable"` RawMempool interface{} `json:"rawMempool,omitempty" extensions:"x-nullable"` Timestamp time.Time `json:"timestamp"` Protocol string `json:"protocol"` @@ -161,8 +161,6 @@ type Contract struct { func (c *Contract) FromModel(contract contract.Contract) { c.Address = contract.Address c.Delegate = contract.Delegate.String() - c.Entrypoints = contract.Entrypoints - c.Hash = contract.Hash c.TxCount = contract.TxCount c.LastAction = contract.LastAction @@ -170,11 +168,19 @@ func (c *Contract) FromModel(contract contract.Contract) { c.Manager = contract.Manager.String() c.MigrationsCount = contract.MigrationsCount c.Network = contract.Network.String() - c.ProjectID = contract.ProjectID.String() c.Tags = contract.Tags.ToArray() c.Timestamp = contract.Timestamp - c.FailStrings = contract.FailStrings - c.Annotations = contract.Annotations + + script := contract.Alpha + if contract.BabylonID > 0 { + script = contract.Babylon + } + + c.Hash = script.Hash + c.FailStrings = script.FailStrings + c.Annotations = script.Annotations + c.Entrypoints = script.Entrypoints + c.ProjectID = script.ProjectID.String() c.ID = contract.ID } @@ -452,8 +458,8 @@ func (c *SameContractsResponse) FromModel(same contract.SameResponse, ctx *Conte for i := range same.Contracts { var contract Contract contract.FromModel(same.Contracts[i]) - contract.Alias = ctx.CachedAlias(same.Contracts[i].Network, same.Contracts[i].Address) - contract.DelegateAlias = ctx.CachedAlias(same.Contracts[i].Network, same.Contracts[i].Delegate.String()) + contract.Alias = ctx.Cache.Alias(same.Contracts[i].Network, same.Contracts[i].Address) + contract.DelegateAlias = ctx.Cache.Alias(same.Contracts[i].Network, same.Contracts[i].Delegate.String()) c.Contracts[i] = contract } } diff --git a/cmd/api/handlers/run_code.go b/cmd/api/handlers/run_code.go index acdc35f9d..b42103c7d 100644 --- a/cmd/api/handlers/run_code.go +++ b/cmd/api/handlers/run_code.go @@ -32,7 +32,7 @@ func (ctx *Context) RunOperation(c *gin.Context) { network := types.NewNetwork(req.Network) - state, err := ctx.CachedCurrentBlock(network) + state, err := ctx.Cache.CurrentBlock(network) if ctx.handleError(c, err, 0) { return } @@ -57,7 +57,7 @@ func (ctx *Context) RunOperation(c *gin.Context) { return } - response, err := rpc.RunOperation( + response, err := rpc.RunOperationLight( state.ChainID, state.Hash, reqRunOp.Source, @@ -140,7 +140,7 @@ func (ctx *Context) RunCode(c *gin.Context) { return } - state, err := ctx.CachedCurrentBlock(req.NetworkID()) + state, err := ctx.Cache.CurrentBlock(req.NetworkID()) if ctx.handleError(c, err, 0) { return } diff --git a/cmd/api/handlers/script.go b/cmd/api/handlers/script.go index e2bea6aa7..d96096d14 100644 --- a/cmd/api/handlers/script.go +++ b/cmd/api/handlers/script.go @@ -2,7 +2,6 @@ package handlers import ( "github.com/baking-bad/bcdhub/internal/bcd/ast" - "github.com/baking-bad/bcdhub/internal/fetch" "github.com/baking-bad/bcdhub/internal/models/types" ) @@ -11,18 +10,22 @@ func (ctx *Context) getScript(network types.Network, address, symLink string) (* if err != nil { return nil, err } - return ast.NewScript(data) + return ast.NewScriptWithoutCode(data) } func (ctx *Context) getScriptBytes(network types.Network, address, symLink string) ([]byte, error) { if symLink == "" { - state, err := ctx.CachedCurrentBlock(network) + state, err := ctx.Cache.CurrentBlock(network) if err != nil { return nil, err } symLink = state.Protocol.SymLink } - return fetch.ContractBySymLink(network, address, symLink, ctx.SharePath) + script, err := ctx.Contracts.Script(network, address, symLink) + if err != nil { + return nil, err + } + return script.Full() } func (ctx *Context) getParameterType(network types.Network, address, symLink string) (*ast.TypedAst, error) { diff --git a/cmd/api/handlers/storage.go b/cmd/api/handlers/storage.go index 52526f0a5..45ff8c8d8 100644 --- a/cmd/api/handlers/storage.go +++ b/cmd/api/handlers/storage.go @@ -45,7 +45,7 @@ func (ctx *Context) GetContractStorage(c *gin.Context) { var protocol string if sReq.Level == 0 { - block, err := ctx.CachedCurrentBlock(network) + block, err := ctx.Cache.CurrentBlock(network) if ctx.handleError(c, err, 0) { return } @@ -64,7 +64,7 @@ func (ctx *Context) GetContractStorage(c *gin.Context) { return } - proto, err := ctx.CachedProtocolByHash(network, protocol) + proto, err := ctx.Cache.ProtocolByHash(network, protocol) if ctx.handleError(c, err, 0) { return } @@ -214,7 +214,7 @@ func (ctx *Context) GetContractStorageRich(c *gin.Context) { return } - proto, err := ctx.CachedProtocolByID(ops[0].Network, ops[0].ProtocolID) + proto, err := ctx.Cache.ProtocolByID(ops[0].Network, ops[0].ProtocolID) if ctx.handleError(c, err, 0) { return } diff --git a/cmd/api/handlers/tokens.go b/cmd/api/handlers/tokens.go index 86fdd7caa..eb3f0b976 100644 --- a/cmd/api/handlers/tokens.go +++ b/cmd/api/handlers/tokens.go @@ -198,8 +198,8 @@ func (ctx *Context) contractToTokens(contracts []contract.Contract, network type Address: contracts[i].Address, Manager: contracts[i].Manager.String(), Delegate: contracts[i].Delegate.String(), - Alias: ctx.CachedAlias(contracts[i].Network, contracts[i].Address), - DelegateAlias: ctx.CachedAlias(contracts[i].Network, contracts[i].Delegate.String()), + Alias: ctx.Cache.Alias(contracts[i].Network, contracts[i].Address), + DelegateAlias: ctx.Cache.Alias(contracts[i].Network, contracts[i].Delegate.String()), LastAction: contracts[i].LastAction, TxCount: contracts[i].TxCount, } diff --git a/cmd/api/handlers/transfers.go b/cmd/api/handlers/transfers.go index ebb4cbf6a..336fc5ef1 100644 --- a/cmd/api/handlers/transfers.go +++ b/cmd/api/handlers/transfers.go @@ -67,10 +67,10 @@ func (ctx *Context) transfersPostprocessing(transfers domains.TransfersResponse, response.Transfers[i] = TransferFromModel(transfers.Transfers[i]) response.Transfers[i].Token = &token - response.Transfers[i].Alias = ctx.CachedAlias(transfers.Transfers[i].Network, transfers.Transfers[i].Contract) - response.Transfers[i].InitiatorAlias = ctx.CachedAlias(transfers.Transfers[i].Network, transfers.Transfers[i].Initiator) - response.Transfers[i].FromAlias = ctx.CachedAlias(transfers.Transfers[i].Network, transfers.Transfers[i].From) - response.Transfers[i].ToAlias = ctx.CachedAlias(transfers.Transfers[i].Network, transfers.Transfers[i].To) + response.Transfers[i].Alias = ctx.Cache.Alias(transfers.Transfers[i].Network, transfers.Transfers[i].Contract) + response.Transfers[i].InitiatorAlias = ctx.Cache.Alias(transfers.Transfers[i].Network, transfers.Transfers[i].Initiator) + response.Transfers[i].FromAlias = ctx.Cache.Alias(transfers.Transfers[i].Network, transfers.Transfers[i].From) + response.Transfers[i].ToAlias = ctx.Cache.Alias(transfers.Transfers[i].Network, transfers.Transfers[i].To) } return } diff --git a/cmd/api/handlers/views.go b/cmd/api/handlers/views.go index 02c6a527d..537045979 100644 --- a/cmd/api/handlers/views.go +++ b/cmd/api/handlers/views.go @@ -116,7 +116,7 @@ func (ctx *Context) ExecuteView(c *gin.Context) { return } - state, err := ctx.CachedCurrentBlock(req.NetworkID()) + state, err := ctx.Cache.CurrentBlock(req.NetworkID()) if ctx.handleError(c, err, 0) { return } diff --git a/cmd/api/main.go b/cmd/api/main.go index 7bbbf1092..7aadf8dcb 100644 --- a/cmd/api/main.go +++ b/cmd/api/main.go @@ -89,7 +89,6 @@ func (api *app) makeRouter() { v1.GET("head", cache.CachePage(store, time.Second*10, api.Context.GetHead)) v1.GET("opg/:hash", api.Context.GetOperation) - v1.GET("opg/:hash/:content/storage_diff", api.Context.GetContentDiff) v1.GET("operation/:id/error_location", api.Context.GetOperationErrorLocation) v1.GET("pick_random", api.Context.GetRandomContract) v1.GET("search", api.Context.Search) @@ -98,6 +97,12 @@ func (api *app) makeRouter() { v1.POST("diff", api.Context.GetDiff) + operation := v1.Group("operation/:id") + { + operation.GET("error_location", api.Context.GetOperationErrorLocation) + operation.GET("diff", api.Context.GetOperationDiff) + } + stats := v1.Group("stats") { stats.GET("", cache.CachePage(store, time.Second*30, api.Context.GetStats)) diff --git a/cmd/indexer/indexer/boost.go b/cmd/indexer/indexer/boost.go index 4a5ba1826..489c6f024 100644 --- a/cmd/indexer/indexer/boost.go +++ b/cmd/indexer/indexer/boost.go @@ -9,7 +9,6 @@ import ( "github.com/baking-bad/bcdhub/internal/bcd" "github.com/baking-bad/bcdhub/internal/config" "github.com/baking-bad/bcdhub/internal/helpers" - "github.com/baking-bad/bcdhub/internal/index" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/models" "github.com/baking-bad/bcdhub/internal/models/block" @@ -34,7 +33,6 @@ type BoostIndexer struct { *config.Context rpc noderpc.INode - externalIndexer index.Indexer state block.Block currentProtocol protocol.Protocol @@ -42,13 +40,10 @@ type BoostIndexer struct { Network types.Network indicesInit sync.Once - - boost bool - skipDelegatorBlocks bool } // NewBoostIndexer - -func NewBoostIndexer(ctx context.Context, internalCtx config.Context, rpcConfig config.RPCConfig, network types.Network, opts ...BoostIndexerOption) (*BoostIndexer, error) { +func NewBoostIndexer(ctx context.Context, internalCtx config.Context, rpcConfig config.RPCConfig, network types.Network) (*BoostIndexer, error) { logger.Info().Str("network", network.String()).Msg("Creating indexer object...") rpc := noderpc.NewWaitNodeRPC( @@ -62,10 +57,6 @@ func NewBoostIndexer(ctx context.Context, internalCtx config.Context, rpcConfig rpc: rpc, } - for _, opt := range opts { - opt(bi) - } - if err := bi.init(ctx, bi.Context.StorageDB); err != nil { return nil, err } @@ -74,12 +65,6 @@ func NewBoostIndexer(ctx context.Context, internalCtx config.Context, rpcConfig } func (bi *BoostIndexer) init(ctx context.Context, db *core.Postgres) error { - if bi.boost { - if err := bi.fetchExternalProtocols(ctx); err != nil { - return err - } - } - currentState, err := bi.Blocks.Last(bi.Network) if err != nil { return err @@ -174,13 +159,8 @@ func (bi *BoostIndexer) setUpdateTicker(seconds int) { } // Index - -func (bi *BoostIndexer) Index(ctx context.Context, levels []int64) error { - if len(levels) == 0 { - return nil - } - helpers.SetTagSentry("network", bi.Network.String()) - - for _, level := range levels { +func (bi *BoostIndexer) Index(ctx context.Context, head noderpc.Header) error { + for level := bi.state.Level + 1; level <= head.Level; level++ { helpers.SetTagSentry("block", fmt.Sprintf("%d", level)) select { @@ -194,7 +174,7 @@ func (bi *BoostIndexer) Index(ctx context.Context, levels []int64) error { return err } - if bi.state.Level > 0 && head.Predecessor != bi.state.Hash && !bi.boost { + if bi.state.Level > 0 && head.Predecessor != bi.state.Hash { return errRollback } @@ -213,9 +193,9 @@ func (bi *BoostIndexer) handleBlock(ctx context.Context, head noderpc.Header) er result := parsers.NewResult() err := bi.StorageDB.DB.RunInTransaction(ctx, func(tx *pg.Tx) error { - logger.Info().Str("network", bi.Network.String()).Msgf("indexing %d block", head.Level) + logger.Info().Str("network", bi.Network.String()).Msgf("indexing %7d block", head.Level) - if head.Protocol != bi.currentProtocol.Hash { + if head.Protocol != bi.currentProtocol.Hash || (bi.Network == types.Mainnet && head.Level == 1) { logger.Info().Str("network", bi.Network.String()).Msgf("New protocol detected: %s -> %s", bi.currentProtocol.Hash, head.Protocol) if err := bi.migrate(head, tx); err != nil { @@ -244,7 +224,7 @@ func (bi *BoostIndexer) handleBlock(ctx context.Context, head noderpc.Header) er // Rollback - func (bi *BoostIndexer) Rollback(ctx context.Context) error { - logger.Warning().Str("network", bi.Network.String()).Msgf("Rollback from %d", bi.state.Level) + logger.Warning().Str("network", bi.Network.String()).Msgf("Rollback from %7d", bi.state.Level) lastLevel, err := bi.getLastRollbackBlock() if err != nil { @@ -256,14 +236,14 @@ func (bi *BoostIndexer) Rollback(ctx context.Context) error { return err } - helpers.CatchErrorSentry(errors.Errorf("[%s] Rollback from %d to %d", bi.Network, bi.state.Level, lastLevel)) + helpers.CatchErrorSentry(errors.Errorf("[%s] Rollback from %7d to %7d", bi.Network, bi.state.Level, lastLevel)) newState, err := bi.Blocks.Last(bi.Network) if err != nil { return err } bi.state = newState - logger.Info().Str("network", bi.Network.String()).Msgf("New indexer state: %d", bi.state.Level) + logger.Info().Str("network", bi.Network.String()).Msgf("New indexer state: %7d", bi.state.Level) logger.Info().Str("network", bi.Network.String()).Msg("Rollback finished") return nil } @@ -284,7 +264,7 @@ func (bi *BoostIndexer) getLastRollbackBlock() (int64, error) { } if block.Predecessor == headAtLevel.Predecessor { - logger.Warning().Str("network", bi.Network.String()).Msgf("Found equal predecessors at level: %d", block.Level) + logger.Warning().Str("network", bi.Network.String()).Msgf("Found equal predecessors at level: %7d", block.Level) end = true lastLevel = block.Level - 1 } @@ -292,28 +272,6 @@ func (bi *BoostIndexer) getLastRollbackBlock() (int64, error) { return lastLevel, nil } -func (bi *BoostIndexer) getBoostBlocks(head noderpc.Header) ([]int64, error) { - levels, err := bi.externalIndexer.GetContractOperationBlocks(bi.state.Level, head.Level, bi.skipDelegatorBlocks) - if err != nil { - return nil, err - } - - protocols, err := bi.externalIndexer.GetProtocols() - if err != nil { - return nil, err - } - - protocolLevels := make([]int64, 0) - for i := range protocols { - if protocols[i].StartLevel > bi.state.Level && protocols[i].StartLevel > 0 { - protocolLevels = append(protocolLevels, protocols[i].StartLevel) - } - } - - result := helpers.Merge2ArraysInt64(levels, protocolLevels) - return result, err -} - func (bi *BoostIndexer) process(ctx context.Context) error { head, err := bi.rpc.GetHead() if err != nil { @@ -324,25 +282,11 @@ func (bi *BoostIndexer) process(ctx context.Context) error { return errors.Errorf("Invalid chain_id: %s (state) != %s (head)", bi.state.ChainID, head.ChainID) } - logger.Info().Str("network", bi.Network.String()).Msgf("Current node state: %d", head.Level) - logger.Info().Str("network", bi.Network.String()).Msgf("Current indexer state: %d", bi.state.Level) + logger.Info().Str("network", bi.Network.String()).Msgf("Current node state: %7d", head.Level) + logger.Info().Str("network", bi.Network.String()).Msgf("Current indexer state: %7d", bi.state.Level) if head.Level > bi.state.Level { - levels := make([]int64, 0) - if bi.boost { - levels, err = bi.getBoostBlocks(head) - if err != nil { - return err - } - } else { - for i := bi.state.Level + 1; i <= head.Level; i++ { - levels = append(levels, i) - } - } - - logger.Info().Str("network", bi.Network.String()).Msgf("Found %d new levels", len(levels)) - - if err := bi.Index(ctx, levels); err != nil { + if err := bi.Index(ctx, head); err != nil { if errors.Is(err, errBcdQuit) { return nil } @@ -357,9 +301,6 @@ func (bi *BoostIndexer) process(ctx context.Context) error { return err } - if bi.boost { - bi.boost = false - } logger.Info().Str("network", bi.Network.String()).Msg("Synced") return nil } else if head.Level < bi.state.Level { @@ -370,9 +311,8 @@ func (bi *BoostIndexer) process(ctx context.Context) error { return errSameLevel } - func (bi *BoostIndexer) createBlock(head noderpc.Header, tx pg.DBI) error { - proto, err := bi.CachedProtocolByHash(bi.Network, head.Protocol) + proto, err := bi.Cache.ProtocolByHash(bi.Network, head.Protocol) if err != nil { return err } @@ -398,7 +338,7 @@ func (bi *BoostIndexer) getDataFromBlock(head noderpc.Header) (*parsers.Result, if head.Level <= 1 { return result, nil } - opg, err := bi.rpc.GetOPG(head.Level) + opg, err := bi.rpc.GetLightOPG(head.Level) if err != nil { return nil, err } @@ -499,9 +439,9 @@ func (bi *BoostIndexer) standartMigration(newProtocol protocol.Protocol, head no if err != nil { return err } - logger.Info().Str("network", bi.Network.String()).Msgf("Now %d contracts are indexed", len(contracts)) + logger.Info().Str("network", bi.Network.String()).Msgf("Now %2d contracts are indexed", len(contracts)) - migrationParser := migrations.NewMigrationParser(bi.Storage, bi.BigMapDiffs, bi.Config.SharePath) + migrationParser := migrations.NewMigrationParser(bi.Storage, bi.BigMapDiffs) for i := range contracts { logger.Info().Str("network", bi.Network.String()).Msgf("Migrate %s...", contracts[i].Address) @@ -524,7 +464,7 @@ func (bi *BoostIndexer) vestingMigration(head noderpc.Header, tx pg.DBI) error { return err } - p := migrations.NewVestingParser(bi.Context, bi.Config.SharePath) + p := migrations.NewVestingParser(bi.Context) for _, address := range addresses { if !bcd.IsContract(address) { diff --git a/cmd/indexer/indexer/create.go b/cmd/indexer/indexer/create.go index bdfc771c6..17defa17b 100644 --- a/cmd/indexer/indexer/create.go +++ b/cmd/indexer/indexer/create.go @@ -20,21 +20,13 @@ func CreateIndexers(ctx context.Context, internalCtx *config.Context, cfg config } indexers := make([]Indexer, 0) - for network, options := range cfg.Indexer.Networks { - boostOptions := make([]BoostIndexerOption, 0) - if options.Boost != "" { - boostOptions = append(boostOptions, WithBoost(options.Boost, network, cfg)) - } - if cfg.Indexer.SkipDelegatorBlocks { - boostOptions = append(boostOptions, WithSkipDelegatorBlocks()) - } - + for network := range cfg.Indexer.Networks { rpc, ok := cfg.RPC[network] if !ok { return nil, errors.Errorf("Unknown network %s", network) } - bi, err := NewBoostIndexer(ctx, *internalCtx, rpc, types.NewNetwork(network), boostOptions...) + bi, err := NewBoostIndexer(ctx, *internalCtx, rpc, types.NewNetwork(network)) if err != nil { return nil, err } diff --git a/cmd/indexer/indexer/interface.go b/cmd/indexer/indexer/interface.go index da520be8f..52ad124e0 100644 --- a/cmd/indexer/indexer/interface.go +++ b/cmd/indexer/indexer/interface.go @@ -3,11 +3,13 @@ package indexer import ( "context" "sync" + + "github.com/baking-bad/bcdhub/internal/noderpc" ) // Indexer - type Indexer interface { Sync(ctx context.Context, wg *sync.WaitGroup) - Index(ctx context.Context, levels []int64) error + Index(ctx context.Context, head noderpc.Header) error Rollback(ctx context.Context) error } diff --git a/cmd/indexer/indexer/options.go b/cmd/indexer/indexer/options.go deleted file mode 100644 index c4b5c5774..000000000 --- a/cmd/indexer/indexer/options.go +++ /dev/null @@ -1,37 +0,0 @@ -package indexer - -import ( - "fmt" - "time" - - "github.com/baking-bad/bcdhub/internal/config" - "github.com/baking-bad/bcdhub/internal/index" -) - -// BoostIndexerOption - -type BoostIndexerOption func(*BoostIndexer) - -// WithBoost - -func WithBoost(externalType, network string, cfg config.Config) BoostIndexerOption { - return func(bi *BoostIndexer) { - if externalType == "" { - return - } - - bi.boost = true - switch externalType { - case "tzkt": - bi.externalIndexer = index.NewTzKT(cfg.TzKT[network].URI, time.Duration(cfg.TzKT[network].Timeout)*time.Second) - return - default: - panic(fmt.Errorf("unsupported external indexer type: %s", externalType)) - } - } -} - -// WithSkipDelegatorBlocks - -func WithSkipDelegatorBlocks() BoostIndexerOption { - return func(bi *BoostIndexer) { - bi.skipDelegatorBlocks = true - } -} diff --git a/cmd/indexer/indexer/protocol.go b/cmd/indexer/indexer/protocol.go index e5b291fc7..1c32570e2 100644 --- a/cmd/indexer/indexer/protocol.go +++ b/cmd/indexer/indexer/protocol.go @@ -1,11 +1,8 @@ package indexer import ( - "context" - "github.com/baking-bad/bcdhub/internal/bcd" "github.com/baking-bad/bcdhub/internal/logger" - "github.com/baking-bad/bcdhub/internal/models" "github.com/baking-bad/bcdhub/internal/models/protocol" "github.com/baking-bad/bcdhub/internal/models/types" "github.com/baking-bad/bcdhub/internal/noderpc" @@ -43,56 +40,3 @@ func setProtocolConstants(rpc noderpc.INode, proto *protocol.Protocol) error { return nil } - -func (bi *BoostIndexer) fetchExternalProtocols(ctx context.Context) error { - logger.Info().Str("network", bi.Network.String()).Msg("Fetching external protocols") - existingProtocols, err := bi.Protocols.GetByNetworkWithSort(bi.Network, "start_level", "desc") - if err != nil { - return err - } - - exists := make(map[string]bool) - for _, existingProtocol := range existingProtocols { - exists[existingProtocol.Hash] = true - } - - extProtocols, err := bi.externalIndexer.GetProtocols() - if err != nil { - return err - } - - protocols := make([]models.Model, 0) - for i := range extProtocols { - if _, ok := exists[extProtocols[i].Hash]; ok { - continue - } - symLink, err := bcd.GetProtoSymLink(extProtocols[i].Hash) - if err != nil { - return err - } - alias := extProtocols[i].Alias - if alias == "" { - alias = extProtocols[i].Hash[:8] - } - - newProtocol := &protocol.Protocol{ - Hash: extProtocols[i].Hash, - Alias: alias, - StartLevel: extProtocols[i].StartLevel, - EndLevel: extProtocols[i].LastLevel, - SymLink: symLink, - Network: bi.Network, - Constants: &protocol.Constants{ - CostPerByte: extProtocols[i].Constants.CostPerByte, - HardStorageLimitPerOperation: extProtocols[i].Constants.HardStorageLimitPerOperation, - HardGasLimitPerOperation: extProtocols[i].Constants.HardGasLimitPerOperation, - TimeBetweenBlocks: extProtocols[i].Constants.TimeBetweenBlocks, - }, - } - - protocols = append(protocols, newProtocol) - logger.Info().Str("network", bi.Network.String()).Msgf("Fetched %s", alias) - } - - return bi.Storage.Save(ctx, protocols) -} diff --git a/cmd/indexer/main.go b/cmd/indexer/main.go index ac3845b9b..5b1471aac 100644 --- a/cmd/indexer/main.go +++ b/cmd/indexer/main.go @@ -4,7 +4,6 @@ import ( "context" "os" "os/signal" - "runtime" "sync" "syscall" @@ -33,7 +32,6 @@ func main() { config.WithConfigCopy(cfg), config.WithStorage(cfg.Storage, "indexer", 10, cfg.Indexer.Connections.Open, cfg.Indexer.Connections.Idle), config.WithSearch(cfg.Storage), - config.WithShare(cfg.SharePath), ) defer internalCtx.Close() @@ -45,13 +43,6 @@ func main() { return } - countCPU := runtime.NumCPU() - if countCPU > len(indexers)+1 { - countCPU = len(indexers) + 1 - } - logger.Warning().Msgf("Indexer started on %d CPU cores", countCPU) - runtime.GOMAXPROCS(countCPU) - sigChan := make(chan os.Signal, 1) signal.Notify(sigChan, os.Interrupt, syscall.SIGTERM, syscall.SIGINT) diff --git a/cmd/metrics/main.go b/cmd/metrics/main.go index a4d989f0c..ca6569b82 100644 --- a/cmd/metrics/main.go +++ b/cmd/metrics/main.go @@ -35,7 +35,6 @@ func main() { config.WithStorage(cfg.Storage, cfg.Metrics.ProjectName, 0, cfg.Metrics.Connections.Open, cfg.Metrics.Connections.Idle), config.WithRPC(cfg.RPC), config.WithSearch(cfg.Storage), - config.WithShare(cfg.SharePath), config.WithDomains(cfg.Domains), config.WithConfigCopy(cfg), ) @@ -76,6 +75,13 @@ func main() { time.Second*15, bulkSize, ), + services.NewStorageBased( + "contracts", + ctx.Services, + services.NewContractsHandler(ctx), + time.Second*15, + bulkSize, + ), services.NewStorageBased( "big_map_diffs", ctx.Services, diff --git a/cmd/metrics/services/big_map_diffs.go b/cmd/metrics/services/big_map_diffs.go index 020197072..13fc4cf3f 100644 --- a/cmd/metrics/services/big_map_diffs.go +++ b/cmd/metrics/services/big_map_diffs.go @@ -2,6 +2,7 @@ package services import ( "context" + "sync" "github.com/baking-bad/bcdhub/internal/config" "github.com/baking-bad/bcdhub/internal/logger" @@ -19,12 +20,12 @@ func NewBigMapDiffHandler(ctx *config.Context) *BigMapDiffHandler { } // Handle - -func (oh *BigMapDiffHandler) Handle(ctx context.Context, items []models.Model) error { +func (oh *BigMapDiffHandler) Handle(ctx context.Context, items []models.Model, wg *sync.WaitGroup) error { if len(items) == 0 { return nil } - logger.Info().Msgf("%2d big map diffs are processed", len(items)) + logger.Info().Msgf("%3d big map diffs are processed", len(items)) return saveSearchModels(oh.Context, items) } diff --git a/cmd/metrics/services/contract_metadata.go b/cmd/metrics/services/contract_metadata.go index ad56994aa..595d847fb 100644 --- a/cmd/metrics/services/contract_metadata.go +++ b/cmd/metrics/services/contract_metadata.go @@ -2,6 +2,7 @@ package services import ( "context" + "sync" "github.com/baking-bad/bcdhub/internal/config" "github.com/baking-bad/bcdhub/internal/handlers" @@ -21,16 +22,18 @@ type ContractMetadataHandler struct { func NewContractMetadataHandler(ctx *config.Context) *ContractMetadataHandler { return &ContractMetadataHandler{ ctx, - handlers.NewContractMetadata(ctx.BigMapDiffs, ctx.Blocks, ctx.Storage, ctx.TZIP, ctx.RPC, ctx.SharePath, ctx.Config.IPFSGateways), + handlers.NewContractMetadata(ctx.BigMapDiffs, ctx.Blocks, ctx.Contracts, ctx.Storage, ctx.TZIP, ctx.RPC, ctx.Config.IPFSGateways), } } // Handle - -func (cm *ContractMetadataHandler) Handle(ctx context.Context, items []models.Model) error { +func (cm *ContractMetadataHandler) Handle(ctx context.Context, items []models.Model, wg *sync.WaitGroup) error { if len(items) == 0 { return nil } + var localWg sync.WaitGroup + updates := make([]models.Model, 0) for i := range items { bmd, ok := items[i].(*domains.BigMapDiff) @@ -38,24 +41,34 @@ func (cm *ContractMetadataHandler) Handle(ctx context.Context, items []models.Mo return errors.Errorf("[ContractMetadata.Handle] invalid type: expected *bigmapdiff.BigMapDiff got %T", items[i]) } - storageType, err := cm.CachedStorageType(bmd.Network, bmd.Contract, bmd.Protocol.SymLink) + storageType, err := cm.Cache.StorageType(bmd.Network, bmd.Contract, bmd.Protocol.SymLink) if err != nil { return errors.Errorf("[ContractMetadata.Handle] can't get storage type for '%s' in %s: %s", bmd.Contract, bmd.Network.String(), err) } - res, err := cm.handler.Do(bmd, storageType) - if err != nil { - return errors.Errorf("[ContractMetadata.Handle] compute error message: %s", err) - } - - updates = append(updates, res...) + wg.Add(1) + localWg.Add(1) + func() { + defer func() { + wg.Done() + localWg.Done() + }() + res, err := cm.handler.Do(bmd, storageType) + if err != nil { + logger.Warning().Err(err).Msgf("ContractMetadata.Handle") + return + } + updates = append(updates, res...) + }() } + localWg.Wait() + if len(updates) == 0 { return nil } - logger.Info().Msgf("%2d contract metadata are processed", len(updates)) + logger.Info().Msgf("%3d contract metadata are processed", len(updates)) if err := saveSearchModels(cm.Context, updates); err != nil { return err diff --git a/cmd/metrics/services/contracts.go b/cmd/metrics/services/contracts.go new file mode 100644 index 000000000..66f057223 --- /dev/null +++ b/cmd/metrics/services/contracts.go @@ -0,0 +1,45 @@ +package services + +import ( + "context" + "sync" + + "github.com/baking-bad/bcdhub/internal/config" + "github.com/baking-bad/bcdhub/internal/logger" + "github.com/baking-bad/bcdhub/internal/models" +) + +// ContractsHandler - +type ContractsHandler struct { + *config.Context +} + +// NewContractsHandler - +func NewContractsHandler(ctx *config.Context) *ContractsHandler { + return &ContractsHandler{ctx} +} + +// Handle - +func (ch *ContractsHandler) Handle(ctx context.Context, items []models.Model, wg *sync.WaitGroup) error { + if len(items) == 0 { + return nil + } + + logger.Info().Msgf("%3d contracts are processed", len(items)) + + return saveSearchModels(ch.Context, items) +} + +// Chunk - +func (ch *ContractsHandler) Chunk(lastID, size int64) ([]models.Model, error) { + operations, err := getContracts(ch.StorageDB.DB, lastID, size) + if err != nil { + return nil, err + } + + data := make([]models.Model, len(operations)) + for i := range operations { + data[i] = &operations[i] + } + return data, nil +} diff --git a/cmd/metrics/services/operations.go b/cmd/metrics/services/operations.go index 26c7d7cae..bbb12f9bb 100644 --- a/cmd/metrics/services/operations.go +++ b/cmd/metrics/services/operations.go @@ -2,6 +2,7 @@ package services import ( "context" + "sync" "github.com/baking-bad/bcdhub/internal/config" "github.com/baking-bad/bcdhub/internal/logger" @@ -19,12 +20,12 @@ func NewOperationsHandler(ctx *config.Context) *OperationsHandler { } // Handle - -func (oh *OperationsHandler) Handle(ctx context.Context, items []models.Model) error { +func (oh *OperationsHandler) Handle(ctx context.Context, items []models.Model, wg *sync.WaitGroup) error { if len(items) == 0 { return nil } - logger.Info().Msgf("%2d operations are processed", len(items)) + logger.Info().Msgf("%3d operations are processed", len(items)) return saveSearchModels(oh.Context, items) } diff --git a/cmd/metrics/services/projects.go b/cmd/metrics/services/projects.go index 4c032c315..40a19dcfc 100644 --- a/cmd/metrics/services/projects.go +++ b/cmd/metrics/services/projects.go @@ -2,6 +2,7 @@ package services import ( "context" + "sync" "github.com/baking-bad/bcdhub/internal/config" "github.com/baking-bad/bcdhub/internal/logger" @@ -22,45 +23,28 @@ func NewProjectsHandler(ctx *config.Context) *ProjectsHandler { } // Handle - -func (p *ProjectsHandler) Handle(ctx context.Context, items []models.Model) error { +func (p *ProjectsHandler) Handle(ctx context.Context, items []models.Model, wg *sync.WaitGroup) error { if len(items) == 0 { return nil } - contracts := make([]*contract.Contract, len(items)) + scripts := make([]contract.Script, len(items)) for i := range items { - c, ok := items[i].(*contract.Contract) + c, ok := items[i].(*contract.Script) if !ok { - return errors.Errorf("[Projects.Handle] invalid entity type: wait *contract.Contract got %T", items[i]) + return errors.Errorf("[Projects.Handle] invalid entity type: wait *contract.Script got %T", items[i]) } - contracts[i] = c + scripts[i] = *c } - updates := make([]models.Model, 0) - searchModels := make([]models.Model, 0) - for i := range contracts { - res, err := p.process(contracts[i], contracts[:i]) - if err != nil { + for i := range scripts { + if err := p.process(&scripts[i], scripts[:i]); err != nil { return errors.Errorf("[Projects.Handle] compute error message: %s", err) } - - if len(res) > 0 { - updates = append(updates, res...) - searchModels = append(searchModels, res...) - } else { - searchModels = append(searchModels, contracts[i]) - } - } - if len(searchModels) > 0 { - if err := saveSearchModels(p.Context, searchModels); err != nil { - return err - } - } - - if len(updates) > 0 { - logger.Info().Msgf("%2d contracts are processed", len(updates)) - return p.Storage.Save(ctx, updates) + if len(scripts) > 0 { + logger.Info().Msgf("%3d scripts are processed", len(scripts)) + return p.Scripts.UpdateProjectID(scripts) } return nil @@ -68,26 +52,22 @@ func (p *ProjectsHandler) Handle(ctx context.Context, items []models.Model) erro // Chunk - func (p *ProjectsHandler) Chunk(lastID, size int64) ([]models.Model, error) { - contracts, err := getContracts(p.StorageDB.DB, lastID, size) + scripts, err := getScripts(p.StorageDB.DB, lastID, size) if err != nil { return nil, err } - data := make([]models.Model, len(contracts)) - for i := range contracts { - data[i] = &contracts[i] + data := make([]models.Model, len(scripts)) + for i := range scripts { + data[i] = &scripts[i] } return data, nil } -func (p *ProjectsHandler) process(contract *contract.Contract, chunk []*contract.Contract) ([]models.Model, error) { - if contract.ProjectID.Valid { - return nil, nil - } - - if err := metrics.SetContractProjectID(p.Contracts, contract, chunk); err != nil { - return nil, errors.Errorf("error during set contract projectID: %s", err) +func (p *ProjectsHandler) process(script *contract.Script, chunk []contract.Script) error { + if script.ProjectID.Valid { + return nil } - return []models.Model{contract}, nil + return metrics.SetScriptProjectID(p.Scripts, script, chunk) } diff --git a/cmd/metrics/services/queries.go b/cmd/metrics/services/queries.go index 016f25b62..4282cf481 100644 --- a/cmd/metrics/services/queries.go +++ b/cmd/metrics/services/queries.go @@ -11,8 +11,8 @@ import ( "github.com/go-pg/pg/v10" ) -func getContracts(db pg.DBI, lastID, size int64) (resp []contract.Contract, err error) { - query := db.Model((*contract.Contract)(nil)).Order("id asc") +func getScripts(db pg.DBI, lastID, size int64) (resp []contract.Script, err error) { + query := db.Model((*contract.Script)(nil)).Order("id asc") if lastID > 0 { query.Where("id > ?", lastID) } @@ -23,6 +23,18 @@ func getContracts(db pg.DBI, lastID, size int64) (resp []contract.Contract, err return } +func getContracts(db pg.DBI, lastID, size int64) (resp []contract.Contract, err error) { + query := db.Model((*contract.Contract)(nil)).Order("id asc") + if lastID > 0 { + query.Where("contract.id > ?", lastID) + } + if size == 0 || size > 1000 { + size = 10 + } + err = query.Limit(int(size)).Relation("Alpha").Relation("Babylon").Select(&resp) + return +} + func getOperations(db pg.DBI, lastID, size int64) (resp []operation.Operation, err error) { query := db.Model((*operation.Operation)(nil)).Order("id asc") if lastID > 0 { @@ -53,12 +65,12 @@ func saveSearchModels(ctx *config.Context, items []models.Model) error { for i := range data { switch typ := data[i].(type) { case *search.Contract: - typ.Alias = ctx.CachedAlias(types.NewNetwork(typ.Network), typ.Address) - typ.DelegateAlias = ctx.CachedAlias(types.NewNetwork(typ.Network), typ.Delegate) + typ.Alias = ctx.Cache.Alias(types.NewNetwork(typ.Network), typ.Address) + typ.DelegateAlias = ctx.Cache.Alias(types.NewNetwork(typ.Network), typ.Delegate) case *search.Operation: - typ.SourceAlias = ctx.CachedAlias(types.NewNetwork(typ.Network), typ.Source) - typ.DestinationAlias = ctx.CachedAlias(types.NewNetwork(typ.Network), typ.Destination) - typ.DelegateAlias = ctx.CachedAlias(types.NewNetwork(typ.Network), typ.Delegate) + typ.SourceAlias = ctx.Cache.Alias(types.NewNetwork(typ.Network), typ.Source) + typ.DestinationAlias = ctx.Cache.Alias(types.NewNetwork(typ.Network), typ.Destination) + typ.DelegateAlias = ctx.Cache.Alias(types.NewNetwork(typ.Network), typ.Delegate) } } diff --git a/cmd/metrics/services/service.go b/cmd/metrics/services/service.go index b232ddb7f..bc263f7b8 100644 --- a/cmd/metrics/services/service.go +++ b/cmd/metrics/services/service.go @@ -2,6 +2,7 @@ package services import ( "context" + "sync" "github.com/baking-bad/bcdhub/internal/models" ) @@ -16,5 +17,5 @@ type Service interface { // Handler - type Handler interface { Chunk(lastID, size int64) ([]models.Model, error) - Handle(ctx context.Context, items []models.Model) error + Handle(ctx context.Context, items []models.Model, wg *sync.WaitGroup) error } diff --git a/cmd/metrics/services/storage_based.go b/cmd/metrics/services/storage_based.go index 9a6edc177..dc863a5a8 100644 --- a/cmd/metrics/services/storage_based.go +++ b/cmd/metrics/services/storage_based.go @@ -70,7 +70,7 @@ func (s *StorageBased) work(ctx context.Context) { ticker := time.NewTicker(s.updatePeriod) defer ticker.Stop() - isFull, err := s.do(ctx) + isFull, err := s.do(ctx, &s.wg) if err != nil { logger.Err(err) } @@ -82,7 +82,7 @@ func (s *StorageBased) work(ctx context.Context) { return case <-ticker.C: - isFull, err = s.do(ctx) + isFull, err = s.do(ctx, &s.wg) if err != nil { logger.Err(err) continue @@ -90,7 +90,7 @@ func (s *StorageBased) work(ctx context.Context) { default: if isFull { - isFull, err = s.do(ctx) + isFull, err = s.do(ctx, &s.wg) if err != nil { logger.Err(err) continue @@ -102,13 +102,13 @@ func (s *StorageBased) work(ctx context.Context) { } } -func (s *StorageBased) do(ctx context.Context) (bool, error) { +func (s *StorageBased) do(ctx context.Context, wg *sync.WaitGroup) (bool, error) { items, err := s.handler.Chunk(s.state.LastID, s.bulkSize) if err != nil { return false, err } - if err := s.handler.Handle(ctx, items); err != nil { + if err := s.handler.Handle(ctx, items, wg); err != nil { return false, err } diff --git a/cmd/metrics/services/token_metadata.go b/cmd/metrics/services/token_metadata.go index 472888568..dc095d60c 100644 --- a/cmd/metrics/services/token_metadata.go +++ b/cmd/metrics/services/token_metadata.go @@ -2,6 +2,7 @@ package services import ( "context" + "sync" "github.com/baking-bad/bcdhub/internal/config" "github.com/baking-bad/bcdhub/internal/handlers" @@ -20,16 +21,17 @@ type TokenMetadataHandler struct { // NewTokenMetadataHandler - func NewTokenMetadataHandler(ctx *config.Context) *TokenMetadataHandler { return &TokenMetadataHandler{ - ctx, - handlers.NewTokenMetadata(ctx.BigMapDiffs, ctx.Blocks, ctx.TokenMetadata, ctx.Storage, ctx.RPC, ctx.SharePath, ctx.Config.IPFSGateways), + Context: ctx, + handler: handlers.NewTokenMetadata(ctx.BigMapDiffs, ctx.Blocks, ctx.Contracts, ctx.TokenMetadata, ctx.Storage, ctx.RPC, ctx.Config.IPFSGateways), } } // Handle - -func (tm *TokenMetadataHandler) Handle(ctx context.Context, items []models.Model) error { +func (tm *TokenMetadataHandler) Handle(ctx context.Context, items []models.Model, wg *sync.WaitGroup) error { if len(items) == 0 { return nil } + var localWg sync.WaitGroup updates := make([]models.Model, 0) for i := range items { @@ -38,24 +40,34 @@ func (tm *TokenMetadataHandler) Handle(ctx context.Context, items []models.Model return errors.Errorf("[TokenMetadata.Handle] invalid type: expected *domains.BigMapDiff got %T", items[i]) } - storageType, err := tm.CachedStorageType(bmd.Network, bmd.Contract, bmd.Protocol.SymLink) + storageType, err := tm.Cache.StorageType(bmd.Network, bmd.Contract, bmd.Protocol.SymLink) if err != nil { return errors.Errorf("[TokenMetadata.Handle] can't get storage type for '%s' in %s: %s", bmd.Contract, bmd.Network.String(), err) } - res, err := tm.handler.Do(bmd, storageType) - if err != nil { - return errors.Errorf("[TokenMetadata.Handle] compute error message: %s", err) - } - - updates = append(updates, res...) + wg.Add(1) + localWg.Add(1) + func(wg *sync.WaitGroup) { + defer func() { + wg.Done() + localWg.Done() + }() + res, err := tm.handler.Do(bmd, storageType) + if err != nil { + logger.Warning().Err(err).Msgf("TokenMetadata.Handle") + return + } + updates = append(updates, res...) + }(wg) } + localWg.Wait() + if len(updates) == 0 { return nil } - logger.Info().Msgf("%2d token metadata are processed", len(updates)) + logger.Info().Msgf("%3d token metadata are processed", len(updates)) if err := saveSearchModels(tm.Context, updates); err != nil { return err diff --git a/configs/development.yml b/configs/development.yml index fe954f9dc..729590feb 100644 --- a/configs/development.yml +++ b/configs/development.yml @@ -2,6 +2,7 @@ rpc: mainnet: uri: https://rpc.tzkt.io/mainnet timeout: 20 + # cache: /etc/bcd granadanet: uri: https://rpc.tzkt.io/granadanet timeout: 20 @@ -81,7 +82,6 @@ indexer: skip_delegator_blocks: true networks: mainnet: - boost: tzkt # granadanet: # hangzhou2net: connections: diff --git a/internal/bcd/raw_script.go b/internal/bcd/raw_script.go new file mode 100644 index 000000000..65043cf69 --- /dev/null +++ b/internal/bcd/raw_script.go @@ -0,0 +1,74 @@ +package bcd + +import ( + "bytes" + "encoding/json" + + "github.com/baking-bad/bcdhub/internal/bcd/consts" + "github.com/pkg/errors" +) + +// RawScript - +type RawScript struct { + Code []byte + Parameter []byte + Storage []byte + Views []byte +} + +type prim struct { + Prim string `json:"prim"` + Args json.RawMessage `json:"args"` +} + +// UnmarshalJSON - +func (s *RawScript) UnmarshalJSON(data []byte) error { + var raw []json.RawMessage + if err := json.Unmarshal(data, &raw); err != nil { + return err + } + + if len(raw) < 3 { + return errors.Errorf("length of script types must be 3 but got %d", len(raw)) + } + + var views bytes.Buffer + if err := views.WriteByte('['); err != nil { + return err + } + for i := range raw { + var p prim + if err := json.Unmarshal(raw[i], &p); err != nil { + return err + } + switch p.Prim { + case consts.PARAMETER: + s.Parameter = p.Args + case consts.STORAGE: + s.Storage = p.Args + case consts.CODE: + s.Code = p.Args + case consts.View: + if views.Len() > 1 { + if err := views.WriteByte(','); err != nil { + return err + } + } + if _, err := views.Write(raw[i]); err != nil { + return err + } + default: + return errors.Errorf("unknown script high level primitive: %s", p.Prim) + } + } + + if err := views.WriteByte(']'); err != nil { + return err + } + + if views.Len() > 2 { + s.Views = views.Bytes() + } + + return nil +} diff --git a/internal/bcd/raw_script_test.go b/internal/bcd/raw_script_test.go new file mode 100644 index 000000000..4c30d1985 --- /dev/null +++ b/internal/bcd/raw_script_test.go @@ -0,0 +1,45 @@ +package bcd + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRawScript_UnmarshalJSON(t *testing.T) { + tests := []struct { + name string + data []byte + wantErr bool + wantCode []byte + wantParameter []byte + wantStorage []byte + }{ + { + name: "test 1", + data: []byte(`[{"prim":"code","args":[{"prim":"code"}]},{"prim":"storage","args":[{"prim":"storage"}]},{"prim":"parameter","args":[{"prim":"parameter"}]}]`), + wantCode: []byte(`[{"prim":"code"}]`), + wantParameter: []byte(`[{"prim":"parameter"}]`), + wantStorage: []byte(`[{"prim":"storage"}]`), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var s RawScript + if err := s.UnmarshalJSON(tt.data); (err != nil) != tt.wantErr { + t.Errorf("RawScript.UnmarshalJSON() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if !assert.ElementsMatch(t, tt.wantCode, s.Code) { + return + } + if !assert.ElementsMatch(t, tt.wantParameter, s.Parameter) { + return + } + if !assert.ElementsMatch(t, tt.wantStorage, s.Storage) { + return + } + }) + } +} diff --git a/internal/cache/cache.go b/internal/cache/cache.go index 0d67698a6..58143b9f3 100644 --- a/internal/cache/cache.go +++ b/internal/cache/cache.go @@ -2,36 +2,113 @@ package cache import ( "fmt" + "time" + "github.com/baking-bad/bcdhub/internal/bcd" + "github.com/baking-bad/bcdhub/internal/bcd/ast" + "github.com/baking-bad/bcdhub/internal/bcd/consts" + "github.com/baking-bad/bcdhub/internal/models/block" + "github.com/baking-bad/bcdhub/internal/models/contract" + "github.com/baking-bad/bcdhub/internal/models/protocol" "github.com/baking-bad/bcdhub/internal/models/types" + "github.com/baking-bad/bcdhub/internal/models/tzip" + "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/karlseguin/ccache" + "github.com/microcosm-cc/bluemonday" + "github.com/pkg/errors" ) // Cache - type Cache struct { *ccache.Cache + rpc map[types.Network]noderpc.INode + + blocks block.Repository + contracts contract.Repository + protocols protocol.Repository + sanitizer *bluemonday.Policy + tzip tzip.Repository } // NewCache - -func NewCache() *Cache { +func NewCache(rpc map[types.Network]noderpc.INode, blocks block.Repository, contracts contract.Repository, protocols protocol.Repository, tzip tzip.Repository, sanitizer *bluemonday.Policy) *Cache { return &Cache{ ccache.New(ccache.Configure().MaxSize(100000)), + rpc, + blocks, + contracts, + protocols, + sanitizer, + tzip, } } -// AliasKey - -func (cache *Cache) AliasKey(network types.Network, address string) string { - return fmt.Sprintf("alias:%d:%s", network, address) +// Alias - +func (cache *Cache) Alias(network types.Network, address string) string { + if !bcd.IsContract(address) { + return "" + } + key := fmt.Sprintf("alias:%d:%s", network, address) + item, err := cache.Fetch(key, time.Minute*30, func() (interface{}, error) { + return cache.tzip.Get(network, address) + }) + if err != nil { + return "" + } + + if data, ok := item.Value().(*tzip.TZIP); ok && data != nil { + return cache.sanitizer.Sanitize(data.Name) + } + return "" } -// ContractMetadataKey - -func (cache *Cache) ContractMetadataKey(network types.Network, address string) string { - return fmt.Sprintf("contract_metadata:%d:%s", network, address) +// ContractMetadata - +func (cache *Cache) ContractMetadata(network types.Network, address string) (*tzip.TZIP, error) { + if !bcd.IsContract(address) { + return nil, nil + } + key := fmt.Sprintf("contract_metadata:%d:%s", network, address) + item, err := cache.Fetch(key, time.Minute*30, func() (interface{}, error) { + return cache.tzip.Get(network, address) + }) + if err != nil { + return nil, err + } + + return item.Value().(*tzip.TZIP), nil } -// ContractKey - -func (cache *Cache) ContractKey(network types.Network, address string) string { - return fmt.Sprintf("contract:%d:%s", network, address) +// Events - +func (cache *Cache) Events(network types.Network, address string) (tzip.Events, error) { + if !bcd.IsContract(address) { + return nil, nil + } + key := fmt.Sprintf("contract_metadata:%d:%s", network, address) + item, err := cache.Fetch(key, time.Hour, func() (interface{}, error) { + return cache.tzip.Events(network, address) + }) + if err != nil { + return nil, err + } + + return item.Value().(tzip.Events), nil +} + +// Contract - +func (cache *Cache) Contract(network types.Network, address string) (*contract.Contract, error) { + if !bcd.IsContract(address) { + return nil, nil + } + + key := fmt.Sprintf("contract:%d:%s", network, address) + item, err := cache.Fetch(key, time.Minute*10, func() (interface{}, error) { + return cache.contracts.Get(network, address) + }) + if err != nil { + return nil, err + } + cntr := item.Value().(contract.Contract) + return &cntr, nil } // ProjectIDByHash - @@ -39,37 +116,97 @@ func (cache *Cache) ProjectIDByHash(hash string) string { return fmt.Sprintf("project_id:%s", hash) } -// BlockKey - -func (cache *Cache) BlockKey(network types.Network) string { - return fmt.Sprintf("block:%d", network) +// CurrentBlock - +func (cache *Cache) CurrentBlock(network types.Network) (block.Block, error) { + key := fmt.Sprintf("block:%d", network) + item, err := cache.Fetch(key, time.Second*15, func() (interface{}, error) { + return cache.blocks.Last(network) + }) + if err != nil { + return block.Block{}, err + } + return item.Value().(block.Block), nil } -// TezosBalanceKey - -func (cache *Cache) TezosBalanceKey(network types.Network, address string, level int64) string { - return fmt.Sprintf("tezos_balance:%d:%s:%d", network, address, level) -} +//nolint +// TezosBalance - +func (cache *Cache) TezosBalance(network types.Network, address string, level int64) (int64, error) { + node, ok := cache.rpc[network] + if !ok { + return 0, errors.Errorf("unknown network: %s", network.String()) + } -// ScriptKey - -func (cache *Cache) ScriptKey(network types.Network, address string) string { - return fmt.Sprintf("script:%d:%s", network, address) + key := fmt.Sprintf("tezos_balance:%d:%s:%d", network, address, level) + item, err := cache.Fetch(key, 30*time.Second, func() (interface{}, error) { + + return node.GetContractBalance(address, level) + }) + if err != nil { + return 0, err + } + return item.Value().(int64), nil } -// ScriptBytesKey - -func (cache *Cache) ScriptBytesKey(network types.Network, address string) string { - return fmt.Sprintf("script_bytes:%d:%s", network, address) +// ScriptBytes - +func (cache *Cache) ScriptBytes(network types.Network, address, symLink string) ([]byte, error) { + if !bcd.IsContract(address) { + return nil, nil + } + + key := fmt.Sprintf("script_bytes:%d:%s", network, address) + item, err := cache.Fetch(key, time.Hour, func() (interface{}, error) { + script, err := cache.contracts.Script(network, address, symLink) + if err != nil { + return nil, err + } + return script.Full() + }) + if err != nil { + return nil, err + } + return item.Value().([]byte), nil } // StorageType - -func (cache *Cache) StorageType(network types.Network, address string) string { - return fmt.Sprintf("storage:%d:%s", network, address) +func (cache *Cache) StorageType(network types.Network, address, symLink string) (*ast.TypedAst, error) { + if !bcd.IsContract(address) { + return nil, nil + } + + key := fmt.Sprintf("storage:%d:%s", network, address) + item, err := cache.Fetch(key, time.Hour, func() (interface{}, error) { + data, err := cache.contracts.ScriptPart(network, address, symLink, consts.STORAGE) + if err != nil { + return nil, err + } + return ast.NewTypedAstFromBytes(data) + }) + if err != nil { + return nil, err + } + return item.Value().(*ast.TypedAst), nil } -// ProtocolByHashKey - -func (cache *Cache) ProtocolByHashKey(network types.Network, id int64) string { - return fmt.Sprintf("protocol_hash:%d:%d", network, id) +// ProtocolByID - +func (cache *Cache) ProtocolByID(network types.Network, id int64) (protocol.Protocol, error) { + key := fmt.Sprintf("protocol_id:%d:%d", network, id) + item, err := cache.Fetch(key, time.Hour, func() (interface{}, error) { + return cache.protocols.GetByID(id) + }) + if err != nil { + return protocol.Protocol{}, err + } + return item.Value().(protocol.Protocol), nil } -// ProtocolByIDKey - -func (cache *Cache) ProtocolByIDKey(network types.Network, hash string) string { - return fmt.Sprintf("protocol_id:%d:%s", network, hash) +// ProtocolByHash - +func (cache *Cache) ProtocolByHash(network types.Network, hash string) (protocol.Protocol, error) { + key := fmt.Sprintf("protocol_hash:%d:%s", network, hash) + item, err := cache.Fetch(key, time.Hour, func() (interface{}, error) { + return cache.protocols.Get(network, hash, -1) + }) + if err != nil { + return protocol.Protocol{}, err + } + return item.Value().(protocol.Protocol), nil } diff --git a/internal/classification/functions/simple.go b/internal/classification/functions/simple.go index 0e3b2b9ef..a0c9ade63 100644 --- a/internal/classification/functions/simple.go +++ b/internal/classification/functions/simple.go @@ -10,7 +10,7 @@ type Simple struct { func NewSimple() Simple { return Simple{ coeffs: []float64{ - 0.15, 0.1, 0.1, 0.1, 0.05, 0.05, 0.05, 0.05, 0.05, 0.1, 0.1, 0.1, + 0.15, 0.15, 0.15, 0.05, 0.05, 0.05, 0.05, 0.05, 0.1, 0.1, 0.1, }, intercepts: 0.85, } diff --git a/internal/classification/metrics/array_metric.go b/internal/classification/metrics/array_metric.go index 9065b19ee..abcd255be 100644 --- a/internal/classification/metrics/array_metric.go +++ b/internal/classification/metrics/array_metric.go @@ -22,7 +22,7 @@ func NewArray(field string) *Array { } // Compute - -func (m *Array) Compute(a, b contract.Contract) Feature { +func (m *Array) Compute(a, b contract.Script) Feature { f := Feature{ Name: strings.ToLower(m.Field), } @@ -66,7 +66,7 @@ func (m *Array) Compute(a, b contract.Contract) Feature { return f } -func (m *Array) getContractFieldArray(c contract.Contract) ([]interface{}, error) { +func (m *Array) getContractFieldArray(c contract.Script) ([]interface{}, error) { r := reflect.ValueOf(c) f := reflect.Indirect(r).FieldByName(m.Field) diff --git a/internal/classification/metrics/bin_mask_metrics.go b/internal/classification/metrics/bin_mask_metrics.go index 9155dc48a..a458ee80c 100644 --- a/internal/classification/metrics/bin_mask_metrics.go +++ b/internal/classification/metrics/bin_mask_metrics.go @@ -23,7 +23,7 @@ func NewBinMask(field string) *BinMask { } // Compute - -func (m *BinMask) Compute(a, b contract.Contract) Feature { +func (m *BinMask) Compute(a, b contract.Script) Feature { f := Feature{ Name: strings.ToLower(m.Field), } @@ -49,7 +49,7 @@ func (m *BinMask) Compute(a, b contract.Contract) Feature { return f } -func (m *BinMask) getContractFieldBinMask(c contract.Contract) (int64, error) { +func (m *BinMask) getContractFieldBinMask(c contract.Script) (int64, error) { r := reflect.ValueOf(c) f := reflect.Indirect(r).FieldByName(m.Field) diff --git a/internal/classification/metrics/bin_mask_metrics_test.go b/internal/classification/metrics/bin_mask_metrics_test.go index dc688afc0..0de0c6fb9 100644 --- a/internal/classification/metrics/bin_mask_metrics_test.go +++ b/internal/classification/metrics/bin_mask_metrics_test.go @@ -8,8 +8,8 @@ import ( func TestBinMask_Compute(t *testing.T) { type args struct { - a contract.Contract - b contract.Contract + a contract.Script + b contract.Script } tests := []struct { name string @@ -19,22 +19,22 @@ func TestBinMask_Compute(t *testing.T) { { name: "test 1", args: args{ - a: contract.Contract{Tags: 192}, - b: contract.Contract{Tags: 192}, + a: contract.Script{Tags: 192}, + b: contract.Script{Tags: 192}, }, want: 1, }, { name: "test 2", args: args{ - a: contract.Contract{Tags: 3}, - b: contract.Contract{Tags: 0}, + a: contract.Script{Tags: 3}, + b: contract.Script{Tags: 0}, }, want: 0.133333, }, { name: "test 3", args: args{ - a: contract.Contract{Tags: 7}, - b: contract.Contract{Tags: 3}, + a: contract.Script{Tags: 7}, + b: contract.Script{Tags: 3}, }, want: 0.066666, }, diff --git a/internal/classification/metrics/bool_metric.go b/internal/classification/metrics/bool_metric.go index e92d876fe..d151600bb 100644 --- a/internal/classification/metrics/bool_metric.go +++ b/internal/classification/metrics/bool_metric.go @@ -20,7 +20,7 @@ func NewBool(field string) *Bool { } // Compute - -func (m *Bool) Compute(a, b contract.Contract) Feature { +func (m *Bool) Compute(a, b contract.Script) Feature { f := Feature{ Name: strings.ToLower(m.Field), } @@ -33,7 +33,7 @@ func (m *Bool) Compute(a, b contract.Contract) Feature { return f } -func (m *Bool) getContractField(c contract.Contract) interface{} { +func (m *Bool) getContractField(c contract.Script) interface{} { r := reflect.ValueOf(c) return reflect.Indirect(r).FieldByName(m.Field).Interface() } diff --git a/internal/classification/metrics/fingerprint.go b/internal/classification/metrics/fingerprint.go index 7bc2500d1..e9fcd2f8f 100644 --- a/internal/classification/metrics/fingerprint.go +++ b/internal/classification/metrics/fingerprint.go @@ -21,10 +21,11 @@ func NewFingerprint(section string) *Fingerprint { } // Compute - -func (m *Fingerprint) Compute(a, b contract.Contract) Feature { +func (m *Fingerprint) Compute(a, b contract.Script) Feature { f := Feature{ Name: fmt.Sprintf("fingerprint_%s", m.Section), } + var x, y []byte switch m.Section { case consts.PARAMETER: @@ -129,7 +130,7 @@ func NewFingerprintLength(section string) *FingerprintLength { } // Compute - -func (m *FingerprintLength) Compute(a, b contract.Contract) Feature { +func (m *FingerprintLength) Compute(a, b contract.Script) Feature { f := Feature{ Name: fmt.Sprintf("fingerprint_length_%s", m.Section), } diff --git a/internal/classification/metrics/interface.go b/internal/classification/metrics/interface.go index 930b8f467..654d9adc2 100644 --- a/internal/classification/metrics/interface.go +++ b/internal/classification/metrics/interface.go @@ -4,7 +4,7 @@ import "github.com/baking-bad/bcdhub/internal/models/contract" // Metric - type Metric interface { - Compute(a, b contract.Contract) Feature + Compute(a, b contract.Script) Feature } // Feature - diff --git a/internal/classification/metrics/manager.go b/internal/classification/metrics/manager.go deleted file mode 100644 index dcde48530..000000000 --- a/internal/classification/metrics/manager.go +++ /dev/null @@ -1,23 +0,0 @@ -package metrics - -import "github.com/baking-bad/bcdhub/internal/models/contract" - -// Manager - -type Manager struct{} - -// NewManager - -func NewManager() *Manager { - return &Manager{} -} - -// Compute - -func (m *Manager) Compute(a, b contract.Contract) Feature { - f := Feature{ - Name: "manager", - } - - if a.Manager == b.Manager && a.Network == b.Network { - f.Value = 1 - } - return f -} diff --git a/internal/classification/metrics/manager_test.go b/internal/classification/metrics/manager_test.go deleted file mode 100644 index 8e141992a..000000000 --- a/internal/classification/metrics/manager_test.go +++ /dev/null @@ -1,119 +0,0 @@ -package metrics - -import ( - "reflect" - "testing" - - "github.com/baking-bad/bcdhub/internal/models/contract" - "github.com/baking-bad/bcdhub/internal/models/types" -) - -func TestManager_Compute(t *testing.T) { - type args struct { - a contract.Contract - b contract.Contract - } - tests := []struct { - name string - args args - want Feature - }{ - { - name: "Case 1", - args: args{ - a: contract.Contract{ - Manager: types.NullString{ - Str: "test", - Valid: true, - }, - Network: 0, - }, - b: contract.Contract{ - Manager: types.NullString{ - Str: "test", - Valid: true, - }, - Network: 0, - }, - }, - want: Feature{ - Name: "manager", - Value: 1.0, - }, - }, { - name: "Case 2", - args: args{ - a: contract.Contract{ - Manager: types.NullString{ - Str: "other", - Valid: true, - }, - Network: 1, - }, - b: contract.Contract{ - Manager: types.NullString{ - Str: "test", - Valid: true, - }, - Network: 1, - }, - }, - want: Feature{ - Name: "manager", - Value: 0.0, - }, - }, { - name: "Case 3", - args: args{ - a: contract.Contract{ - Manager: types.NullString{ - Str: "test", - Valid: true, - }, - Network: 1, - }, - b: contract.Contract{ - Manager: types.NullString{ - Str: "test", - Valid: true, - }, - Network: 2, - }, - }, - want: Feature{ - Name: "manager", - Value: 0.0, - }, - }, { - name: "Case 4", - args: args{ - a: contract.Contract{ - Manager: types.NullString{ - Str: "other", - Valid: true, - }, - Network: 1, - }, - b: contract.Contract{ - Manager: types.NullString{ - Str: "test", - Valid: true, - }, - Network: 2, - }, - }, - want: Feature{ - Name: "manager", - Value: 0.0, - }, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - m := &Manager{} - if got := m.Compute(tt.args.a, tt.args.b); !reflect.DeepEqual(got, tt.want) { - t.Errorf("Manager.Compute() = %v, want %v", got, tt.want) - } - }) - } -} diff --git a/internal/config/cache.go b/internal/config/cache.go deleted file mode 100644 index 7b2afa037..000000000 --- a/internal/config/cache.go +++ /dev/null @@ -1,190 +0,0 @@ -package config - -import ( - "time" - - "github.com/baking-bad/bcdhub/internal/bcd" - "github.com/baking-bad/bcdhub/internal/bcd/ast" - "github.com/baking-bad/bcdhub/internal/fetch" - "github.com/baking-bad/bcdhub/internal/models/block" - "github.com/baking-bad/bcdhub/internal/models/contract" - "github.com/baking-bad/bcdhub/internal/models/protocol" - "github.com/baking-bad/bcdhub/internal/models/types" - "github.com/baking-bad/bcdhub/internal/models/tzip" -) - -// CachedAlias - -func (ctx *Context) CachedAlias(network types.Network, address string) string { - if !bcd.IsContract(address) { - return "" - } - key := ctx.Cache.AliasKey(network, address) - item, err := ctx.Cache.Fetch(key, time.Minute*30, func() (interface{}, error) { - return ctx.TZIP.Get(network, address) - }) - if err != nil { - return "" - } - - if data, ok := item.Value().(*tzip.TZIP); ok && data != nil { - return ctx.Sanitizer.Sanitize(data.Name) - } - return "" -} - -// CachedContractMetadata - -func (ctx *Context) CachedContractMetadata(network types.Network, address string) (*tzip.TZIP, error) { - if !bcd.IsContract(address) { - return nil, nil - } - key := ctx.Cache.ContractMetadataKey(network, address) - item, err := ctx.Cache.Fetch(key, time.Minute*30, func() (interface{}, error) { - return ctx.TZIP.Get(network, address) - }) - if err != nil { - return nil, err - } - - return item.Value().(*tzip.TZIP), nil -} - -// CachedCurrentBlock - -func (ctx *Context) CachedCurrentBlock(network types.Network) (block.Block, error) { - key := ctx.Cache.BlockKey(network) - item, err := ctx.Cache.Fetch(key, time.Second*15, func() (interface{}, error) { - return ctx.Blocks.Last(network) - }) - if err != nil { - return block.Block{}, err - } - return item.Value().(block.Block), nil -} - -// CachedTezosBalance - -func (ctx *Context) CachedTezosBalance(network types.Network, address string, level int64) (int64, error) { - key := ctx.Cache.TezosBalanceKey(network, address, level) - item, err := ctx.Cache.Fetch(key, 30*time.Second, func() (interface{}, error) { - rpc, err := ctx.GetRPC(network) - if err != nil { - return 0, err - } - return rpc.GetContractBalance(address, level) - }) - if err != nil { - return 0, err - } - return item.Value().(int64), nil -} - -// CachedContract - -func (ctx *Context) CachedContract(network types.Network, address string) (*contract.Contract, error) { - if !bcd.IsContract(address) { - return nil, nil - } - - key := ctx.Cache.ContractKey(network, address) - item, err := ctx.Cache.Fetch(key, time.Minute*10, func() (interface{}, error) { - return ctx.Contracts.Get(network, address) - }) - if err != nil { - return nil, err - } - cntr := item.Value().(contract.Contract) - return &cntr, nil -} - -// CachedScript - -func (ctx *Context) CachedScript(network types.Network, address, symLink string) (*ast.Script, error) { - if !bcd.IsContract(address) { - return nil, nil - } - - key := ctx.Cache.ScriptKey(network, address) - item, err := ctx.Cache.Fetch(key, time.Hour, func() (interface{}, error) { - script, err := ctx.CachedScriptBytes(network, address, symLink) - if err != nil { - return nil, err - } - return ast.NewScriptWithoutCode(script) - }) - if err != nil { - return nil, err - } - return item.Value().(*ast.Script), nil -} - -// CachedScriptBytes - -func (ctx *Context) CachedScriptBytes(network types.Network, address, symLink string) ([]byte, error) { - if !bcd.IsContract(address) { - return nil, nil - } - - key := ctx.Cache.ScriptBytesKey(network, address) - item, err := ctx.Cache.Fetch(key, time.Hour, func() (interface{}, error) { - return fetch.ContractBySymLink(network, address, symLink, ctx.SharePath) - }) - if err != nil { - return nil, err - } - return item.Value().([]byte), nil -} - -// CachedStorageType - -func (ctx *Context) CachedStorageType(network types.Network, address, symLink string) (*ast.TypedAst, error) { - if !bcd.IsContract(address) { - return nil, nil - } - - key := ctx.Cache.StorageType(network, address) - item, err := ctx.Cache.Fetch(key, time.Hour, func() (interface{}, error) { - data, err := ctx.CachedScriptBytes(network, address, symLink) - if err != nil { - return nil, err - } - script, err := ast.NewScriptWithoutCode(data) - if err != nil { - return nil, err - } - return script.StorageType() - }) - if err != nil { - return nil, err - } - return item.Value().(*ast.TypedAst), nil -} - -// CachedProtocolByHash - -func (ctx *Context) CachedProtocolByHash(network types.Network, hash string) (protocol.Protocol, error) { - key := ctx.Cache.ProtocolByIDKey(network, hash) - item, err := ctx.Cache.Fetch(key, time.Hour, func() (interface{}, error) { - return ctx.Protocols.Get(network, hash, -1) - }) - if err != nil { - return protocol.Protocol{}, err - } - return item.Value().(protocol.Protocol), nil -} - -// CachedProtocolByID - -func (ctx *Context) CachedProtocolByID(network types.Network, id int64) (protocol.Protocol, error) { - key := ctx.Cache.ProtocolByHashKey(network, id) - item, err := ctx.Cache.Fetch(key, time.Hour, func() (interface{}, error) { - return ctx.Protocols.GetByID(id) - }) - if err != nil { - return protocol.Protocol{}, err - } - return item.Value().(protocol.Protocol), nil -} - -// CachedProtocolByID - -func (ctx *Context) CachedProjectIDByHash(hash string) (string, error) { - key := ctx.Cache.ProjectIDByHash(hash) - item, err := ctx.Cache.Fetch(key, time.Hour, func() (interface{}, error) { - return ctx.Contracts.GetProjectIDByHash(hash) - }) - if err != nil { - return "", err - } - return item.Value().(string), nil -} diff --git a/internal/config/config.go b/internal/config/config.go index 217daf383..abbf3caea 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -33,13 +33,10 @@ type Config struct { API APIConfig `yaml:"api"` Indexer struct { - Networks map[string]struct { - Boost string `yaml:"boost"` - } `yaml:"networks"` - ProjectName string `yaml:"project_name"` - SentryEnabled bool `yaml:"sentry_enabled"` - SkipDelegatorBlocks bool `yaml:"skip_delegator_blocks"` - Connections Connections `yaml:"connections"` + Networks map[string]struct{} `yaml:"networks"` + ProjectName string `yaml:"project_name"` + SentryEnabled bool `yaml:"sentry_enabled"` + Connections Connections `yaml:"connections"` } `yaml:"indexer"` Metrics struct { @@ -64,6 +61,7 @@ type Config struct { type RPCConfig struct { URI string `yaml:"uri"` Timeout int `yaml:"timeout"` + Cache string `yaml:"cache"` } // TzKTConfig - diff --git a/internal/config/context.go b/internal/config/context.go index 08e08e8b6..d6740a1b7 100644 --- a/internal/config/context.go +++ b/internal/config/context.go @@ -37,7 +37,6 @@ type Context struct { StorageDB *core.Postgres Config Config - SharePath string TzipSchema string TezosDomainsContracts map[types.Network]string @@ -59,6 +58,7 @@ type Context struct { TZIP tzip.Repository Domains domains.Repository Services service.Repository + Scripts contract.ScriptRepository Searcher search.Searcher @@ -69,7 +69,6 @@ type Context struct { // NewContext - func NewContext(opts ...ContextOption) *Context { ctx := &Context{ - Cache: cache.NewCache(), Sanitizer: bluemonday.UGCPolicy(), } ctx.Sanitizer.AllowAttrs("em") @@ -77,6 +76,10 @@ func NewContext(opts ...ContextOption) *Context { for _, opt := range opts { opt(ctx) } + + ctx.Cache = cache.NewCache( + ctx.RPC, ctx.Blocks, ctx.Contracts, ctx.Protocols, ctx.TZIP, ctx.Sanitizer, + ) return ctx } diff --git a/internal/config/options.go b/internal/config/options.go index c50cc31e6..f198db3b5 100644 --- a/internal/config/options.go +++ b/internal/config/options.go @@ -42,10 +42,18 @@ func WithRPC(rpcConfig map[string]RPCConfig) ContextOption { rpc := make(map[types.Network]noderpc.INode) for name, rpcProvider := range rpcConfig { network := types.NewNetwork(name) - rpc[network] = noderpc.NewPool( - []string{rpcProvider.URI}, - noderpc.WithTimeout(time.Second*time.Duration(rpcProvider.Timeout)), - ) + if rpcProvider.Cache != "" { + rpc[network] = noderpc.NewFS( + rpcProvider.URI, + rpcProvider.Cache, + name, + ) + } else { + rpc[network] = noderpc.NewPool( + []string{rpcProvider.URI}, + noderpc.WithTimeout(time.Second*time.Duration(rpcProvider.Timeout)), + ) + } } ctx.RPC = rpc } @@ -64,13 +72,15 @@ func WithStorage(cfg StorageConfig, appName string, maxPageSize int64, maxConnCo pgCore.WithMaxConnections(maxConnCount), // pgCore.WithQueryLogging(), ) + + contractStorage := contract.NewStorage(pg) ctx.StorageDB = pg ctx.Storage = pg ctx.BigMapActions = bigmapaction.NewStorage(pg) ctx.Blocks = block.NewStorage(pg) ctx.BigMapDiffs = bigmapdiff.NewStorage(pg) ctx.DApps = dapp.NewStorage(pg) - ctx.Contracts = contract.NewStorage(pg) + ctx.Contracts = contractStorage ctx.Migrations = migration.NewStorage(pg) ctx.Operations = operation.NewStorage(pg) ctx.Protocols = protocol.NewStorage(pg) @@ -81,6 +91,7 @@ func WithStorage(cfg StorageConfig, appName string, maxPageSize int64, maxConnCo ctx.GlobalConstants = global_constant.NewStorage(pg) ctx.Domains = domains.NewStorage(pg) ctx.Services = service.NewStorage(pg) + ctx.Scripts = contractStorage } } @@ -94,16 +105,6 @@ func WithSearch(cfg StorageConfig) ContextOption { } -// WithShare - -func WithShare(path string) ContextOption { - return func(ctx *Context) { - if path == "" { - panic("Empty share path in config") - } - ctx.SharePath = path - } -} - // WithConfigCopy - func WithConfigCopy(cfg Config) ContextOption { return func(ctx *Context) { diff --git a/internal/fetch/contract.go b/internal/fetch/contract.go deleted file mode 100644 index 8527c42f0..000000000 --- a/internal/fetch/contract.go +++ /dev/null @@ -1,116 +0,0 @@ -package fetch - -import ( - "fmt" - "io/ioutil" - "os" - "path" - "strings" - - "github.com/baking-bad/bcdhub/internal/bcd" - "github.com/baking-bad/bcdhub/internal/models/types" - "github.com/pkg/errors" -) - -var ( - delegatorContract = []byte(`[{"prim":"parameter","args":[{"prim":"or","args":[{"prim":"lambda","args":[{"prim":"unit"},{"prim":"list","args":[{"prim":"operation"}]}],"annots":["%do"]},{"prim":"unit","annots":["%default"]}]}]},{"prim":"storage","args":[{"prim":"key_hash"}]},{"prim":"code","args":[[[[{"prim":"DUP"},{"prim":"CAR"},{"prim":"DIP","args":[[{"prim":"CDR"}]]}]],{"prim":"IF_LEFT","args":[[{"prim":"PUSH","args":[{"prim":"mutez"},{"int":"0"}]},{"prim":"AMOUNT"},[[{"prim":"COMPARE"},{"prim":"EQ"}],{"prim":"IF","args":[[],[[{"prim":"UNIT"},{"prim":"FAILWITH"}]]]}],[{"prim":"DIP","args":[[{"prim":"DUP"}]]},{"prim":"SWAP"}],{"prim":"IMPLICIT_ACCOUNT"},{"prim":"ADDRESS"},{"prim":"SENDER"},[[{"prim":"COMPARE"},{"prim":"EQ"}],{"prim":"IF","args":[[],[[{"prim":"UNIT"},{"prim":"FAILWITH"}]]]}],{"prim":"UNIT"},{"prim":"EXEC"},{"prim":"PAIR"}],[{"prim":"DROP"},{"prim":"NIL","args":[{"prim":"operation"}]},{"prim":"PAIR"}]]}]]}]`) -) - -// RemoveContract - -func RemoveContract(network types.Network, address, protocol, filesDirectory string) error { - if filesDirectory == "" { - return errors.Errorf("Invalid filesDirectory: %s", filesDirectory) - } - protoSymLink, err := bcd.GetProtoSymLink(protocol) - if err != nil { - return err - } - - filePath, err := getFilePath(network, address, protoSymLink, filesDirectory) - if err != nil { - return err - } - if _, err = os.Stat(filePath); err == nil { - return os.Remove(filePath) - } else if !os.IsNotExist(err) { - return err - } - return nil -} - -// RemoveAllContracts - -func RemoveAllContracts(network fmt.Stringer, filesDirectory string) error { - if filesDirectory == "" { - return errors.Errorf("Invalid filesDirectory: %s", filesDirectory) - } - - if err := chechPath(filesDirectory); err != nil { - return err - } - - dirPath := path.Join(filesDirectory, "contracts", network.String()) - if _, err := os.Stat(dirPath); err == nil { - return os.RemoveAll(dirPath) - } else if !os.IsNotExist(err) { - return err - } - return nil -} - -// Contract - reads contract from file system -func Contract(network types.Network, address, protocol, filesDirectory string) ([]byte, error) { - if protocol == "" { - protocol = bcd.GetCurrentProtocol() - } - protoSymLink, err := bcd.GetProtoSymLink(protocol) - if err != nil { - return nil, err - } - - filePath, err := getFilePath(network, address, protoSymLink, filesDirectory) - if err != nil { - return nil, err - } - if _, err = os.Stat(filePath); err != nil { - if os.IsNotExist(err) { - return delegatorContract, nil - } else { - return nil, err - } - } - return ContractBySymLink(network, address, protoSymLink, filesDirectory) -} - -// ContractBySymLink - reads contract from file system -func ContractBySymLink(network types.Network, address, symLink, filesDirectory string) ([]byte, error) { - filePath, err := getFilePath(network, address, symLink, filesDirectory) - if err != nil { - return nil, err - } - if _, err := os.Stat(filePath); err != nil { - if os.IsNotExist(err) { - return delegatorContract, nil - } else { - return nil, err - } - } - return ioutil.ReadFile(filePath) -} - -func getFilePath(network types.Network, address, symLink, filesDirectory string) (string, error) { - if err := chechPath(filesDirectory); err != nil { - return "", err - } - if err := chechPath(address); err != nil { - return "", err - } - name := fmt.Sprintf("%s_%s.json", address, symLink) - return path.Join(filesDirectory, "contracts", network.String(), name), nil -} - -func chechPath(path string) error { - if strings.Count(path, ".") > 1 { - return errors.Errorf("you can't change directory in share path: %s", path) - } - return nil -} diff --git a/internal/handlers/token_metadata.go b/internal/handlers/token_metadata.go index 6b3af66d8..c60efada5 100644 --- a/internal/handlers/token_metadata.go +++ b/internal/handlers/token_metadata.go @@ -6,6 +6,7 @@ import ( "github.com/baking-bad/bcdhub/internal/models" "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" "github.com/baking-bad/bcdhub/internal/models/block" + "github.com/baking-bad/bcdhub/internal/models/contract" "github.com/baking-bad/bcdhub/internal/models/domains" "github.com/baking-bad/bcdhub/internal/models/tokenmetadata" "github.com/baking-bad/bcdhub/internal/models/types" @@ -21,10 +22,10 @@ type TokenMetadata struct { } // NewTokenMetadata - -func NewTokenMetadata(bigMapRepo bigmapdiff.Repository, blockRepo block.Repository, tm tokenmetadata.Repository, storage models.GeneralRepository, rpcs map[types.Network]noderpc.INode, sharePath string, ipfs []string) *TokenMetadata { +func NewTokenMetadata(bigMapRepo bigmapdiff.Repository, blockRepo block.Repository, contractsRepo contract.Repository, tm tokenmetadata.Repository, storage models.GeneralRepository, rpcs map[types.Network]noderpc.INode, ipfs []string) *TokenMetadata { parsers := make(map[types.Network]tokens.Parser) for network, rpc := range rpcs { - parsers[network] = tokens.NewParser(bigMapRepo, blockRepo, tm, storage, rpc, sharePath, network, ipfs...) + parsers[network] = tokens.NewParser(bigMapRepo, blockRepo, contractsRepo, tm, storage, rpc, network, ipfs...) } return &TokenMetadata{ storage, parsers, diff --git a/internal/handlers/tzip.go b/internal/handlers/tzip.go index 7a0b09cbf..bf3246d0e 100644 --- a/internal/handlers/tzip.go +++ b/internal/handlers/tzip.go @@ -6,6 +6,7 @@ import ( "github.com/baking-bad/bcdhub/internal/models" "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" "github.com/baking-bad/bcdhub/internal/models/block" + "github.com/baking-bad/bcdhub/internal/models/contract" "github.com/baking-bad/bcdhub/internal/models/domains" "github.com/baking-bad/bcdhub/internal/models/types" tzipModel "github.com/baking-bad/bcdhub/internal/models/tzip" @@ -21,12 +22,11 @@ type ContractMetadata struct { } // NewContractMetadata - -func NewContractMetadata(bigMapRepo bigmapdiff.Repository, blockRepo block.Repository, storage models.GeneralRepository, repo tzipModel.Repository, rpcs map[types.Network]noderpc.INode, sharePath string, ipfs []string) *ContractMetadata { +func NewContractMetadata(bigMapRepo bigmapdiff.Repository, blockRepo block.Repository, contractsRepo contract.Repository, storage models.GeneralRepository, repo tzipModel.Repository, rpcs map[types.Network]noderpc.INode, ipfs []string) *ContractMetadata { parsers := make(map[types.Network]tzip.Parser) for network, rpc := range rpcs { - parsers[network] = tzip.NewParser(bigMapRepo, blockRepo, storage, rpc, tzip.ParserConfig{ + parsers[network] = tzip.NewParser(bigMapRepo, blockRepo, contractsRepo, storage, rpc, tzip.ParserConfig{ IPFSGateways: ipfs, - SharePath: sharePath, }) } return &ContractMetadata{ @@ -53,7 +53,7 @@ func (t *ContractMetadata) handle(bmd *domains.BigMapDiff) ([]models.Model, erro BigMapDiff: *bmd.BigMapDiff, }) if err != nil { - logger.Warning().Fields(bmd.LogFields()).Err(err).Msg("") + logger.Warning().Fields(bmd.LogFields()).Err(err).Msg("ContractMetadata.handle") return nil, nil } if model == nil { diff --git a/internal/index/data.go b/internal/index/data.go deleted file mode 100644 index d25354792..000000000 --- a/internal/index/data.go +++ /dev/null @@ -1,39 +0,0 @@ -package index - -import "time" - -// Head - -type Head struct { - Level int64 - Hash string - Timestamp time.Time -} - -// Contract - -type Contract struct { - Level int64 - Timestamp time.Time - Counter int - Balance int64 - Manager string - Delegate string - Address string - Kind string -} - -// Protocol - -type Protocol struct { - Hash string - StartLevel int64 - LastLevel int64 - Alias string - Constants Constants -} - -// Constants - -type Constants struct { - CostPerByte int64 - HardGasLimitPerOperation int64 - HardStorageLimitPerOperation int64 - TimeBetweenBlocks int64 -} diff --git a/internal/index/interface.go b/internal/index/interface.go deleted file mode 100644 index f53418f04..000000000 --- a/internal/index/interface.go +++ /dev/null @@ -1,9 +0,0 @@ -package index - -// Indexer - -type Indexer interface { - GetHead() (Head, error) - GetContracts(startLevel int64) ([]Contract, error) - GetContractOperationBlocks(startBlock int64, endBlock int64, skipDelegatorBlocks bool) ([]int64, error) - GetProtocols() ([]Protocol, error) -} diff --git a/internal/index/tzkt.go b/internal/index/tzkt.go deleted file mode 100644 index 44779f8a6..000000000 --- a/internal/index/tzkt.go +++ /dev/null @@ -1,129 +0,0 @@ -package index - -import ( - "time" - - "github.com/baking-bad/bcdhub/internal/tzkt" -) - -// TzKT - -type TzKT struct { - api *tzkt.TzKT - - lastContractsPage int64 -} - -// NewTzKT - -func NewTzKT(host string, timeout time.Duration) *TzKT { - return &TzKT{ - api: tzkt.NewTzKT(host, timeout), - } -} - -// GetHead - -func (t *TzKT) GetHead() (Head, error) { - resp := Head{} - head, err := t.api.GetHead() - if err != nil { - return resp, err - } - resp.Level = head.Level - resp.Hash = head.Hash - resp.Timestamp = head.Timestamp.UTC() - return resp, err -} - -// GetContracts - -func (t *TzKT) GetContracts(startLevel int64) ([]Contract, error) { - resp := make([]Contract, 0) - - end := false - for !end { - contracts, err := t.api.GetAccounts(tzkt.ContractKindSmart, t.lastContractsPage, 1000) - if err != nil { - return nil, err - } - for _, contract := range contracts { - if contract.FirstActivity <= startLevel { - continue - } - - data := Contract{ - Address: contract.Address, - Level: contract.FirstActivity, - Timestamp: contract.FirstActivityTime.UTC(), - Balance: contract.Balance, - } - if contract.Creator != nil { - data.Manager = contract.Creator.Address - } - if contract.Delegate != nil { - data.Delegate = contract.Delegate.Address - } - resp = append(resp, data) - - } - if len(contracts) == 1000 { - t.lastContractsPage++ - } else { - end = true - } - } - - return resp, nil -} - -// GetContractOperationBlocks - -func (t *TzKT) GetContractOperationBlocks(startBlock, endBlock int64, skipDelegatorBlocks bool) ([]int64, error) { - start := startBlock - end := false - - result := make([]int64, 0) - for !end { - blocks, err := t.api.GetContractOperationBlocks(start, 10000, true, !skipDelegatorBlocks) - if err != nil { - return nil, err - } - - if len(blocks) == 0 { - end = true - continue - } - - for i := range blocks { - if blocks[i] <= endBlock { - result = append(result, blocks[i]) - } else { - return result, nil - } - } - - start = blocks[len(blocks)-1] - } - - return result, nil -} - -// GetProtocols - -func (t *TzKT) GetProtocols() ([]Protocol, error) { - protocols, err := t.api.GetProtocols() - if err != nil { - return nil, err - } - res := make([]Protocol, len(protocols)) - for i := range protocols { - res[i] = Protocol{ - Hash: protocols[i].Hash, - StartLevel: protocols[i].StartLevel, - LastLevel: protocols[i].LastLevel, - Alias: protocols[i].Metadata.Alias, - Constants: Constants{ - CostPerByte: protocols[i].Constants.CostPerByte, - HardGasLimitPerOperation: protocols[i].Constants.HardGasLimitPerOperation, - HardStorageLimitPerOperation: protocols[i].Constants.HardStorageLimitPerOperation, - TimeBetweenBlocks: protocols[i].Constants.TimeBetweenBlocks, - }, - } - } - return res, nil -} diff --git a/internal/metrics/contract.go b/internal/metrics/contract.go index 2e6c3cc16..9e88b1e88 100644 --- a/internal/metrics/contract.go +++ b/internal/metrics/contract.go @@ -9,23 +9,23 @@ import ( clmetrics "github.com/baking-bad/bcdhub/internal/classification/metrics" ) -// SetContractProjectID - -func SetContractProjectID(contracts contract.Repository, c *contract.Contract, chunk []*contract.Contract) error { +// SetScriptProjectID - +func SetScriptProjectID(scripts contract.ScriptRepository, c *contract.Script, chunk []contract.Script) error { projectID := getContractProjectID(*c, chunk) if projectID != "" { c.ProjectID = types.NewNullString(&projectID) return nil } - var offset int64 - size := int64(100) + var offset int + size := 100 var end bool for !end { - buckets, err := contracts.GetProjectsLastContract(*c, size, offset) + buckets, err := scripts.GetScripts(size, offset) if err != nil { return err } - end = len(buckets) < int(size) + end = len(buckets) < size if !end { projectID := getContractProjectID(*c, buckets) @@ -43,14 +43,9 @@ func SetContractProjectID(contracts contract.Repository, c *contract.Contract, c return nil } -func getContractProjectID(c contract.Contract, buckets []*contract.Contract) string { +func getContractProjectID(c contract.Script, buckets []contract.Script) string { for i := len(buckets) - 1; i > -1; i-- { - if c.Hash == buckets[i].Hash && buckets[i].ProjectID.Valid { - return buckets[i].ProjectID.String() - } - } - for i := len(buckets) - 1; i > -1; i-- { - if buckets[i].ProjectID.Valid && compare(c, *buckets[i]) { + if buckets[i].ProjectID.Valid && compare(c, buckets[i]) { return buckets[i].ProjectID.String() } } @@ -59,7 +54,6 @@ func getContractProjectID(c contract.Contract, buckets []*contract.Contract) str } var precomputedMetrics = []clmetrics.Metric{ - clmetrics.NewManager(), clmetrics.NewBinMask("Tags"), clmetrics.NewArray("FailStrings"), clmetrics.NewArray("Annotations"), @@ -75,7 +69,7 @@ var fingerprintMetrics = []clmetrics.Metric{ clmetrics.NewFingerprint("code"), } -func compare(a, b contract.Contract) bool { +func compare(a, b contract.Script) bool { features := make([]float64, len(precomputedMetrics)) for i := range precomputedMetrics { diff --git a/internal/models/bigmapdiff/repository.go b/internal/models/bigmapdiff/repository.go index 9abf3b1ef..0b19736ac 100644 --- a/internal/models/bigmapdiff/repository.go +++ b/internal/models/bigmapdiff/repository.go @@ -6,7 +6,7 @@ import "github.com/baking-bad/bcdhub/internal/models/types" type Repository interface { Get(ctx GetContext) ([]Bucket, error) GetByAddress(network types.Network, address string) ([]BigMapDiff, error) - GetForOperation(id int64) ([]*BigMapDiff, error) + GetForOperation(id int64) ([]BigMapDiff, error) GetForOperations(ids ...int64) ([]BigMapDiff, error) GetByPtr(network types.Network, contract string, ptr int64) ([]BigMapState, error) GetByPtrAndKeyHash(ptr int64, network types.Network, keyHash string, size int64, offset int64) ([]BigMapDiff, int64, error) diff --git a/internal/models/consts.go b/internal/models/consts.go index fa4c00f9d..fcec19929 100644 --- a/internal/models/consts.go +++ b/internal/models/consts.go @@ -30,6 +30,7 @@ const ( DocOperations = "operations" DocProtocol = "protocols" DocServices = "states" + DocScripts = "scripts" DocTokenBalances = "token_balances" DocTokenMetadata = "token_metadata" DocTransfers = "transfers" @@ -50,6 +51,7 @@ func AllDocuments() []string { DocMigrations, DocOperations, DocProtocol, + DocScripts, DocTokenBalances, DocTokenMetadata, DocTransfers, @@ -69,7 +71,8 @@ func AllModels() []Model { &transfer.Transfer{}, &operation.Operation{}, &global_constant.GlobalConstant{}, - &contract.ContractConstants{}, + &contract.Script{}, + &contract.ScriptConstants{}, &contract.Contract{}, &migration.Migration{}, &tokenbalance.TokenBalance{}, @@ -82,6 +85,6 @@ func AllModels() []Model { // ManyToMany - func ManyToMany() []interface{} { return []interface{}{ - &contract.ContractConstants{}, + &contract.ScriptConstants{}, } } diff --git a/internal/models/contract/contract_constants.go b/internal/models/contract/contract_constants.go deleted file mode 100644 index 9798bd5be..000000000 --- a/internal/models/contract/contract_constants.go +++ /dev/null @@ -1,27 +0,0 @@ -package contract - -import "github.com/go-pg/pg/v10" - -// ContractConstants - -type ContractConstants struct { - // nolint - tableName struct{} `pg:"contract_constants"` - - ContractId int64 - GlobalConstantId int64 -} - -// GetID - -func (ContractConstants) GetID() int64 { - return 0 -} - -// GetIndex - -func (ContractConstants) GetIndex() string { - return "contracts" -} - -// Save - -func (ContractConstants) Save(tx pg.DBI) error { - return nil -} diff --git a/internal/models/contract/model.go b/internal/models/contract/model.go index 738e9b90e..471baaaad 100644 --- a/internal/models/contract/model.go +++ b/internal/models/contract/model.go @@ -3,10 +3,8 @@ package contract import ( "time" - "github.com/baking-bad/bcdhub/internal/models/global_constant" "github.com/baking-bad/bcdhub/internal/models/types" "github.com/go-pg/pg/v10" - "github.com/lib/pq" ) // Contract - entity for contract @@ -19,26 +17,19 @@ type Contract struct { Level int64 Timestamp time.Time - Hash string - FingerprintCode []byte - FingerprintParameter []byte - FingerprintStorage []byte - Tags types.Tags `pg:",use_zero"` - Entrypoints pq.StringArray `pg:",type:text[]"` - FailStrings pq.StringArray `pg:",type:text[]"` - Annotations pq.StringArray `pg:",type:text[]"` - Hardcoded pq.StringArray `pg:",type:text[]"` - - Address string - Manager types.NullString `pg:",type:varchar(36)"` - Delegate types.NullString `pg:",type:varchar(36)"` - ProjectID types.NullString `pg:",type:varchar(36)"` + Address string + Manager types.NullString `pg:",type:varchar(36)"` + Delegate types.NullString `pg:",type:varchar(36)"` TxCount int64 `pg:",use_zero"` LastAction time.Time - MigrationsCount int64 `pg:",use_zero"` + MigrationsCount int64 `pg:",use_zero"` + Tags types.Tags `pg:",use_zero"` - Constants []global_constant.GlobalConstant `pg:",many2many:contract_constants"` + AlphaID int64 + Alpha Script `pg:",rel:has-one"` + BabylonID int64 + Babylon Script `pg:",rel:has-one"` } // NewEmptyContract - diff --git a/internal/models/contract/repository.go b/internal/models/contract/repository.go index 4fbe3de2c..97142d92d 100644 --- a/internal/models/contract/repository.go +++ b/internal/models/contract/repository.go @@ -8,9 +8,24 @@ type Repository interface { GetMany(by map[string]interface{}) ([]Contract, error) GetRandom(network types.Network) (Contract, error) GetTokens(network types.Network, tokenInterface string, offset, size int64) ([]Contract, int64, error) - GetProjectsLastContract(c Contract, size, offset int64) ([]*Contract, error) + GetSameContracts(contact Contract, manager string, size, offset int64) (SameResponse, error) GetSimilarContracts(Contract, int64, int64) ([]Similar, int, error) Stats(c Contract) (Stats, error) - GetProjectIDByHash(hash string) (result string, err error) + + Script(network types.Network, address string, symLink string) (Script, error) + + // ScriptPart - returns part of script type. Part can be `storage`, `parameter` or `code`. + ScriptPart(network types.Network, address string, symLink, part string) ([]byte, error) +} + +// ScriptRepository - +type ScriptRepository interface { + GetScripts(limit, offset int) ([]Script, error) + ByHash(hash string) (Script, error) + UpdateProjectID(script []Script) error + + Code(id int64) ([]byte, error) + Parameter(id int64) ([]byte, error) + Storage(id int64) ([]byte, error) } diff --git a/internal/models/contract/script.go b/internal/models/contract/script.go new file mode 100644 index 000000000..075ebf76d --- /dev/null +++ b/internal/models/contract/script.go @@ -0,0 +1,79 @@ +package contract + +import ( + "bytes" + + "github.com/baking-bad/bcdhub/internal/models/global_constant" + "github.com/baking-bad/bcdhub/internal/models/types" + "github.com/go-pg/pg/v10" + "github.com/lib/pq" +) + +// Scripts - +type Script struct { + // nolint + tableName struct{} `pg:"scripts"` + + ID int64 + Hash string `pg:",unique,type:varchar(64)"` + ProjectID types.NullString `pg:",type:varchar(36)"` + Code []byte `pg:",type:bytea"` + Parameter []byte `pg:",type:bytea"` + Storage []byte `pg:",type:bytea"` + Views []byte `pg:",type:bytea"` + FingerprintCode []byte `pg:",type:bytea"` + FingerprintParameter []byte `pg:",type:bytea"` + FingerprintStorage []byte `pg:",type:bytea"` + Entrypoints pq.StringArray `pg:",type:text[]"` + FailStrings pq.StringArray `pg:",type:text[]"` + Annotations pq.StringArray `pg:",type:text[]"` + Hardcoded pq.StringArray `pg:",type:text[]"` + Tags types.Tags `pg:",use_zero"` + + Constants []global_constant.GlobalConstant `pg:",many2many:script_constants"` +} + +// GetID - +func (s *Script) GetID() int64 { + return s.ID +} + +// GetIndex - +func (s *Script) GetIndex() string { + return "scripts" +} + +// Save - +func (s *Script) Save(tx pg.DBI) error { + _, err := tx.Model(s). + Where("hash = ?hash"). + OnConflict("DO NOTHING"). + Returning("id").SelectOrInsert() + return err +} + +// Full - +func (s *Script) Full() ([]byte, error) { + var buf bytes.Buffer + buf.WriteString(`[{"prim":"code","args":`) + if _, err := buf.Write(s.Code); err != nil { + return nil, err + } + buf.WriteString(`},{"prim":"parameter","args":`) + if _, err := buf.Write(s.Parameter); err != nil { + return nil, err + } + buf.WriteString(`},{"prim":"storage","args":`) + if _, err := buf.Write(s.Storage); err != nil { + return nil, err + } + buf.WriteByte('}') + if len(s.Views) > 2 { + if _, err := buf.Write(s.Views[1 : len(s.Views)-1]); err != nil { + return nil, err + } + } + buf.WriteByte(']') + + return buf.Bytes(), nil +} diff --git a/internal/models/contract/script_constants.go b/internal/models/contract/script_constants.go new file mode 100644 index 000000000..5791f8ab6 --- /dev/null +++ b/internal/models/contract/script_constants.go @@ -0,0 +1,27 @@ +package contract + +import "github.com/go-pg/pg/v10" + +// ScriptConstants - +type ScriptConstants struct { + // nolint + tableName struct{} `pg:"script_constants"` + + ScriptId int64 + GlobalConstantId int64 +} + +// GetID - +func (ScriptConstants) GetID() int64 { + return 0 +} + +// GetIndex - +func (ScriptConstants) GetIndex() string { + return "script_constants" +} + +// Save - +func (ScriptConstants) Save(tx pg.DBI) error { + return nil +} diff --git a/internal/models/mock/bigmapdiff/mock.go b/internal/models/mock/bigmapdiff/mock.go index 33802af97..06e33c780 100644 --- a/internal/models/mock/bigmapdiff/mock.go +++ b/internal/models/mock/bigmapdiff/mock.go @@ -1,11 +1,11 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: bigmapdiff/repository.go +// Source: internal/models/bigmapdiff/repository.go // Package bigmapdiff is a generated GoMock package. package bigmapdiff import ( - bmd "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + model "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" types "github.com/baking-bad/bcdhub/internal/models/types" gomock "github.com/golang/mock/gomock" reflect "reflect" @@ -35,10 +35,10 @@ func (m *MockRepository) EXPECT() *MockRepositoryMockRecorder { } // Get mocks base method -func (m *MockRepository) Get(ctx bmd.GetContext) ([]bmd.Bucket, error) { +func (m *MockRepository) Get(ctx model.GetContext) ([]model.Bucket, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Get", ctx) - ret0, _ := ret[0].([]bmd.Bucket) + ret0, _ := ret[0].([]model.Bucket) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -50,10 +50,10 @@ func (mr *MockRepositoryMockRecorder) Get(ctx interface{}) *gomock.Call { } // GetByAddress mocks base method -func (m *MockRepository) GetByAddress(network types.Network, address string) ([]bmd.BigMapDiff, error) { +func (m *MockRepository) GetByAddress(network types.Network, address string) ([]model.BigMapDiff, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetByAddress", network, address) - ret0, _ := ret[0].([]bmd.BigMapDiff) + ret0, _ := ret[0].([]model.BigMapDiff) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -65,10 +65,10 @@ func (mr *MockRepositoryMockRecorder) GetByAddress(network, address interface{}) } // GetForOperation mocks base method -func (m *MockRepository) GetForOperation(id int64) ([]*bmd.BigMapDiff, error) { +func (m *MockRepository) GetForOperation(id int64) ([]model.BigMapDiff, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetForOperation", id) - ret0, _ := ret[0].([]*bmd.BigMapDiff) + ret0, _ := ret[0].([]model.BigMapDiff) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -80,14 +80,14 @@ func (mr *MockRepositoryMockRecorder) GetForOperation(id interface{}) *gomock.Ca } // GetForOperations mocks base method -func (m *MockRepository) GetForOperations(ids ...int64) ([]bmd.BigMapDiff, error) { +func (m *MockRepository) GetForOperations(ids ...int64) ([]model.BigMapDiff, error) { m.ctrl.T.Helper() varargs := []interface{}{} for _, a := range ids { varargs = append(varargs, a) } ret := m.ctrl.Call(m, "GetForOperations", varargs...) - ret0, _ := ret[0].([]bmd.BigMapDiff) + ret0, _ := ret[0].([]model.BigMapDiff) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -99,10 +99,10 @@ func (mr *MockRepositoryMockRecorder) GetForOperations(ids ...interface{}) *gomo } // GetByPtr mocks base method -func (m *MockRepository) GetByPtr(network types.Network, contract string, ptr int64) ([]bmd.BigMapState, error) { +func (m *MockRepository) GetByPtr(network types.Network, contract string, ptr int64) ([]model.BigMapState, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetByPtr", network, contract, ptr) - ret0, _ := ret[0].([]bmd.BigMapState) + ret0, _ := ret[0].([]model.BigMapState) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -114,10 +114,10 @@ func (mr *MockRepositoryMockRecorder) GetByPtr(network, contract, ptr interface{ } // GetByPtrAndKeyHash mocks base method -func (m *MockRepository) GetByPtrAndKeyHash(ptr int64, network types.Network, keyHash string, size, offset int64) ([]bmd.BigMapDiff, int64, error) { +func (m *MockRepository) GetByPtrAndKeyHash(ptr int64, network types.Network, keyHash string, size, offset int64) ([]model.BigMapDiff, int64, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetByPtrAndKeyHash", ptr, network, keyHash, size, offset) - ret0, _ := ret[0].([]bmd.BigMapDiff) + ret0, _ := ret[0].([]model.BigMapDiff) ret1, _ := ret[1].(int64) ret2, _ := ret[2].(error) return ret0, ret1, ret2 @@ -130,10 +130,10 @@ func (mr *MockRepositoryMockRecorder) GetByPtrAndKeyHash(ptr, network, keyHash, } // GetForAddress mocks base method -func (m *MockRepository) GetForAddress(network types.Network, address string) ([]bmd.BigMapState, error) { +func (m *MockRepository) GetForAddress(network types.Network, address string) ([]model.BigMapState, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetForAddress", network, address) - ret0, _ := ret[0].([]bmd.BigMapState) + ret0, _ := ret[0].([]model.BigMapState) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -145,10 +145,10 @@ func (mr *MockRepositoryMockRecorder) GetForAddress(network, address interface{} } // GetValuesByKey mocks base method -func (m *MockRepository) GetValuesByKey(keyHash string) ([]bmd.BigMapDiff, error) { +func (m *MockRepository) GetValuesByKey(keyHash string) ([]model.BigMapDiff, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetValuesByKey", keyHash) - ret0, _ := ret[0].([]bmd.BigMapDiff) + ret0, _ := ret[0].([]model.BigMapDiff) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -175,10 +175,10 @@ func (mr *MockRepositoryMockRecorder) Count(network, ptr interface{}) *gomock.Ca } // Current mocks base method -func (m *MockRepository) Current(network types.Network, keyHash string, ptr int64) (bmd.BigMapState, error) { +func (m *MockRepository) Current(network types.Network, keyHash string, ptr int64) (model.BigMapState, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Current", network, keyHash, ptr) - ret0, _ := ret[0].(bmd.BigMapState) + ret0, _ := ret[0].(model.BigMapState) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -190,10 +190,10 @@ func (mr *MockRepositoryMockRecorder) Current(network, keyHash, ptr interface{}) } // CurrentByContract mocks base method -func (m *MockRepository) CurrentByContract(network types.Network, contract string) ([]bmd.BigMapState, error) { +func (m *MockRepository) CurrentByContract(network types.Network, contract string) ([]model.BigMapState, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "CurrentByContract", network, contract) - ret0, _ := ret[0].([]bmd.BigMapState) + ret0, _ := ret[0].([]model.BigMapState) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -205,10 +205,10 @@ func (mr *MockRepositoryMockRecorder) CurrentByContract(network, contract interf } // Previous mocks base method -func (m *MockRepository) Previous(arg0 []bmd.BigMapDiff) ([]bmd.BigMapDiff, error) { +func (m *MockRepository) Previous(arg0 []model.BigMapDiff) ([]model.BigMapDiff, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Previous", arg0) - ret0, _ := ret[0].([]bmd.BigMapDiff) + ret0, _ := ret[0].([]model.BigMapDiff) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -220,10 +220,10 @@ func (mr *MockRepositoryMockRecorder) Previous(arg0 interface{}) *gomock.Call { } // GetStats mocks base method -func (m *MockRepository) GetStats(network types.Network, ptr int64) (bmd.Stats, error) { +func (m *MockRepository) GetStats(network types.Network, ptr int64) (model.Stats, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetStats", network, ptr) - ret0, _ := ret[0].(bmd.Stats) + ret0, _ := ret[0].(model.Stats) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -235,10 +235,10 @@ func (mr *MockRepositoryMockRecorder) GetStats(network, ptr interface{}) *gomock } // StatesChangedAfter mocks base method -func (m *MockRepository) StatesChangedAfter(network types.Network, level int64) ([]bmd.BigMapState, error) { +func (m *MockRepository) StatesChangedAfter(network types.Network, level int64) ([]model.BigMapState, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "StatesChangedAfter", network, level) - ret0, _ := ret[0].([]bmd.BigMapState) + ret0, _ := ret[0].([]model.BigMapState) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -250,10 +250,10 @@ func (mr *MockRepositoryMockRecorder) StatesChangedAfter(network, level interfac } // LastDiff mocks base method -func (m *MockRepository) LastDiff(network types.Network, ptr int64, keyHash string, skipRemoved bool) (bmd.BigMapDiff, error) { +func (m *MockRepository) LastDiff(network types.Network, ptr int64, keyHash string, skipRemoved bool) (model.BigMapDiff, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "LastDiff", network, ptr, keyHash, skipRemoved) - ret0, _ := ret[0].(bmd.BigMapDiff) + ret0, _ := ret[0].(model.BigMapDiff) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -265,10 +265,10 @@ func (mr *MockRepositoryMockRecorder) LastDiff(network, ptr, keyHash, skipRemove } // Keys mocks base method -func (m *MockRepository) Keys(ctx bmd.GetContext) ([]bmd.BigMapState, error) { +func (m *MockRepository) Keys(ctx model.GetContext) ([]model.BigMapState, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Keys", ctx) - ret0, _ := ret[0].([]bmd.BigMapState) + ret0, _ := ret[0].([]model.BigMapState) ret1, _ := ret[1].(error) return ret0, ret1 } diff --git a/internal/models/mock/contract/mock.go b/internal/models/mock/contract/mock.go index 61dc95d69..6f480ae68 100644 --- a/internal/models/mock/contract/mock.go +++ b/internal/models/mock/contract/mock.go @@ -1,11 +1,11 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: contract/repository.go +// Source: internal/models/contract/repository.go // Package contract is a generated GoMock package. package contract import ( - contractModel "github.com/baking-bad/bcdhub/internal/models/contract" + model "github.com/baking-bad/bcdhub/internal/models/contract" types "github.com/baking-bad/bcdhub/internal/models/types" gomock "github.com/golang/mock/gomock" reflect "reflect" @@ -35,10 +35,10 @@ func (m *MockRepository) EXPECT() *MockRepositoryMockRecorder { } // Get mocks base method -func (m *MockRepository) Get(network types.Network, address string) (contractModel.Contract, error) { +func (m *MockRepository) Get(network types.Network, address string) (model.Contract, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Get", network, address) - ret0, _ := ret[0].(contractModel.Contract) + ret0, _ := ret[0].(model.Contract) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -50,10 +50,10 @@ func (mr *MockRepositoryMockRecorder) Get(network, address interface{}) *gomock. } // GetMany mocks base method -func (m *MockRepository) GetMany(by map[string]interface{}) ([]contractModel.Contract, error) { +func (m *MockRepository) GetMany(by map[string]interface{}) ([]model.Contract, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetMany", by) - ret0, _ := ret[0].([]contractModel.Contract) + ret0, _ := ret[0].([]model.Contract) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -65,10 +65,10 @@ func (mr *MockRepositoryMockRecorder) GetMany(by interface{}) *gomock.Call { } // GetRandom mocks base method -func (m *MockRepository) GetRandom(network types.Network) (contractModel.Contract, error) { +func (m *MockRepository) GetRandom(network types.Network) (model.Contract, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetRandom", network) - ret0, _ := ret[0].(contractModel.Contract) + ret0, _ := ret[0].(model.Contract) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -80,10 +80,10 @@ func (mr *MockRepositoryMockRecorder) GetRandom(network interface{}) *gomock.Cal } // GetTokens mocks base method -func (m *MockRepository) GetTokens(network types.Network, tokenInterface string, offset, size int64) ([]contractModel.Contract, int64, error) { +func (m *MockRepository) GetTokens(network types.Network, tokenInterface string, offset, size int64) ([]model.Contract, int64, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetTokens", network, tokenInterface, offset, size) - ret0, _ := ret[0].([]contractModel.Contract) + ret0, _ := ret[0].([]model.Contract) ret1, _ := ret[1].(int64) ret2, _ := ret[2].(error) return ret0, ret1, ret2 @@ -95,26 +95,11 @@ func (mr *MockRepositoryMockRecorder) GetTokens(network, tokenInterface, offset, return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTokens", reflect.TypeOf((*MockRepository)(nil).GetTokens), network, tokenInterface, offset, size) } -// GetProjectsLastContract mocks base method -func (m *MockRepository) GetProjectsLastContract(c contractModel.Contract, size, offset int64) ([]*contractModel.Contract, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetProjectsLastContract", c, size, offset) - ret0, _ := ret[0].([]*contractModel.Contract) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetProjectsLastContract indicates an expected call of GetProjectsLastContract -func (mr *MockRepositoryMockRecorder) GetProjectsLastContract(c, size, offset interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProjectsLastContract", reflect.TypeOf((*MockRepository)(nil).GetProjectsLastContract), c, size, offset) -} - // GetSameContracts mocks base method -func (m *MockRepository) GetSameContracts(contact contractModel.Contract, manager string, size, offset int64) (contractModel.SameResponse, error) { +func (m *MockRepository) GetSameContracts(contact model.Contract, manager string, size, offset int64) (model.SameResponse, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetSameContracts", contact, manager, size, offset) - ret0, _ := ret[0].(contractModel.SameResponse) + ret0, _ := ret[0].(model.SameResponse) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -126,10 +111,10 @@ func (mr *MockRepositoryMockRecorder) GetSameContracts(contact, manager, size, o } // GetSimilarContracts mocks base method -func (m *MockRepository) GetSimilarContracts(arg0 contractModel.Contract, arg1, arg2 int64) ([]contractModel.Similar, int, error) { +func (m *MockRepository) GetSimilarContracts(arg0 model.Contract, arg1, arg2 int64) ([]model.Similar, int, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetSimilarContracts", arg0, arg1, arg2) - ret0, _ := ret[0].([]contractModel.Similar) + ret0, _ := ret[0].([]model.Similar) ret1, _ := ret[1].(int) ret2, _ := ret[2].(error) return ret0, ret1, ret2 @@ -141,51 +126,159 @@ func (mr *MockRepositoryMockRecorder) GetSimilarContracts(arg0, arg1, arg2 inter return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetSimilarContracts", reflect.TypeOf((*MockRepository)(nil).GetSimilarContracts), arg0, arg1, arg2) } -// GetByIDs mocks base method -func (m *MockRepository) GetByIDs(ids ...int64) ([]contractModel.Contract, error) { +// Stats mocks base method +func (m *MockRepository) Stats(c model.Contract) (model.Stats, error) { m.ctrl.T.Helper() - varargs := []interface{}{} - for _, a := range ids { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "GetByIDs", varargs...) - ret0, _ := ret[0].([]contractModel.Contract) + ret := m.ctrl.Call(m, "Stats", c) + ret0, _ := ret[0].(model.Stats) ret1, _ := ret[1].(error) return ret0, ret1 } -// GetByIDs indicates an expected call of GetByIDs -func (mr *MockRepositoryMockRecorder) GetByIDs(ids ...interface{}) *gomock.Call { +// Stats indicates an expected call of Stats +func (mr *MockRepositoryMockRecorder) Stats(c interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetByIDs", reflect.TypeOf((*MockRepository)(nil).GetByIDs), ids...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Stats", reflect.TypeOf((*MockRepository)(nil).Stats), c) } -// Stats mocks base method -func (m *MockRepository) Stats(c contractModel.Contract) (contractModel.Stats, error) { +// Script mocks base method +func (m *MockRepository) Script(network types.Network, address, symLink string) (model.Script, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Stats", c) - ret0, _ := ret[0].(contractModel.Stats) + ret := m.ctrl.Call(m, "Script", network, address, symLink) + ret0, _ := ret[0].(model.Script) ret1, _ := ret[1].(error) return ret0, ret1 } -// Stats indicates an expected call of Stats -func (mr *MockRepositoryMockRecorder) Stats(c interface{}) *gomock.Call { +// Script indicates an expected call of Script +func (mr *MockRepositoryMockRecorder) Script(network, address, symLink interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Stats", reflect.TypeOf((*MockRepository)(nil).Stats), c) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Script", reflect.TypeOf((*MockRepository)(nil).Script), network, address, symLink) +} + +// ScriptPart mocks base method +func (m *MockRepository) ScriptPart(network types.Network, address, symLink, part string) ([]byte, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ScriptPart", network, address, symLink, part) + ret0, _ := ret[0].([]byte) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ScriptPart indicates an expected call of ScriptPart +func (mr *MockRepositoryMockRecorder) ScriptPart(network, address, symLink, part interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ScriptPart", reflect.TypeOf((*MockRepository)(nil).ScriptPart), network, address, symLink, part) +} + +// MockScriptRepository is a mock of ScriptRepository interface +type MockScriptRepository struct { + ctrl *gomock.Controller + recorder *MockScriptRepositoryMockRecorder +} + +// MockScriptRepositoryMockRecorder is the mock recorder for MockScriptRepository +type MockScriptRepositoryMockRecorder struct { + mock *MockScriptRepository +} + +// NewMockScriptRepository creates a new mock instance +func NewMockScriptRepository(ctrl *gomock.Controller) *MockScriptRepository { + mock := &MockScriptRepository{ctrl: ctrl} + mock.recorder = &MockScriptRepositoryMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockScriptRepository) EXPECT() *MockScriptRepositoryMockRecorder { + return m.recorder +} + +// GetScripts mocks base method +func (m *MockScriptRepository) GetScripts(limit, offset int) ([]model.Script, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetScripts", limit, offset) + ret0, _ := ret[0].([]model.Script) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetScripts indicates an expected call of GetScripts +func (mr *MockScriptRepositoryMockRecorder) GetScripts(limit, offset interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetScripts", reflect.TypeOf((*MockScriptRepository)(nil).GetScripts), limit, offset) +} + +// ByHash mocks base method +func (m *MockScriptRepository) ByHash(hash string) (model.Script, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ByHash", hash) + ret0, _ := ret[0].(model.Script) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ByHash indicates an expected call of ByHash +func (mr *MockScriptRepositoryMockRecorder) ByHash(hash interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ByHash", reflect.TypeOf((*MockScriptRepository)(nil).ByHash), hash) +} + +// UpdateProjectID mocks base method +func (m *MockScriptRepository) UpdateProjectID(script []model.Script) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpdateProjectID", script) + ret0, _ := ret[0].(error) + return ret0 +} + +// UpdateProjectID indicates an expected call of UpdateProjectID +func (mr *MockScriptRepositoryMockRecorder) UpdateProjectID(script interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateProjectID", reflect.TypeOf((*MockScriptRepository)(nil).UpdateProjectID), script) +} + +// Code mocks base method +func (m *MockScriptRepository) Code(id int64) ([]byte, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Code", id) + ret0, _ := ret[0].([]byte) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Code indicates an expected call of Code +func (mr *MockScriptRepositoryMockRecorder) Code(id interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Code", reflect.TypeOf((*MockScriptRepository)(nil).Code), id) +} + +// Parameter mocks base method +func (m *MockScriptRepository) Parameter(id int64) ([]byte, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Parameter", id) + ret0, _ := ret[0].([]byte) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Parameter indicates an expected call of Parameter +func (mr *MockScriptRepositoryMockRecorder) Parameter(id interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Parameter", reflect.TypeOf((*MockScriptRepository)(nil).Parameter), id) } -// GetProjectIDByHash mocks base method -func (m *MockRepository) GetProjectIDByHash(hash string) (string, error) { +// Storage mocks base method +func (m *MockScriptRepository) Storage(id int64) ([]byte, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetProjectIDByHash", hash) - ret0, _ := ret[0].(string) + ret := m.ctrl.Call(m, "Storage", id) + ret0, _ := ret[0].([]byte) ret1, _ := ret[1].(error) return ret0, ret1 } -// GetProjectIDByHash indicates an expected call of GetProjectIDByHash -func (mr *MockRepositoryMockRecorder) GetProjectIDByHash(hash interface{}) *gomock.Call { +// Storage indicates an expected call of Storage +func (mr *MockScriptRepositoryMockRecorder) Storage(id interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProjectIDByHash", reflect.TypeOf((*MockRepository)(nil).GetProjectIDByHash), hash) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Storage", reflect.TypeOf((*MockScriptRepository)(nil).Storage), id) } diff --git a/internal/models/mock/tzip/mock.go b/internal/models/mock/tzip/mock.go index 8ee88d0c1..de8a48893 100644 --- a/internal/models/mock/tzip/mock.go +++ b/internal/models/mock/tzip/mock.go @@ -1,12 +1,12 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: tzip/repository.go +// Source: internal/models/tzip/repository.go -// Package mock_tzip is a generated GoMock package. -package mock_tzip +// Package tzip is a generated GoMock package. +package tzip import ( types "github.com/baking-bad/bcdhub/internal/models/types" - tzip "github.com/baking-bad/bcdhub/internal/models/tzip" + model "github.com/baking-bad/bcdhub/internal/models/tzip" gomock "github.com/golang/mock/gomock" reflect "reflect" ) @@ -35,10 +35,10 @@ func (m *MockRepository) EXPECT() *MockRepositoryMockRecorder { } // Get mocks base method -func (m *MockRepository) Get(network types.Network, address string) (*tzip.TZIP, error) { +func (m *MockRepository) Get(network types.Network, address string) (*model.TZIP, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Get", network, address) - ret0, _ := ret[0].(*tzip.TZIP) + ret0, _ := ret[0].(*model.TZIP) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -50,10 +50,10 @@ func (mr *MockRepositoryMockRecorder) Get(network, address interface{}) *gomock. } // GetWithEvents mocks base method -func (m *MockRepository) GetWithEvents(updatedAt uint64) ([]tzip.TZIP, error) { +func (m *MockRepository) GetWithEvents(updatedAt uint64) ([]model.TZIP, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetWithEvents", updatedAt) - ret0, _ := ret[0].([]tzip.TZIP) + ret0, _ := ret[0].([]model.TZIP) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -65,10 +65,10 @@ func (mr *MockRepositoryMockRecorder) GetWithEvents(updatedAt interface{}) *gomo } // GetBySlug mocks base method -func (m *MockRepository) GetBySlug(slug string) (*tzip.TZIP, error) { +func (m *MockRepository) GetBySlug(slug string) (*model.TZIP, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetBySlug", slug) - ret0, _ := ret[0].(*tzip.TZIP) + ret0, _ := ret[0].(*model.TZIP) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -80,10 +80,10 @@ func (mr *MockRepositoryMockRecorder) GetBySlug(slug interface{}) *gomock.Call { } // GetAliases mocks base method -func (m *MockRepository) GetAliases(network types.Network) ([]tzip.TZIP, error) { +func (m *MockRepository) GetAliases(network types.Network) ([]model.TZIP, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "GetAliases", network) - ret0, _ := ret[0].([]tzip.TZIP) + ret0, _ := ret[0].([]model.TZIP) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -93,3 +93,18 @@ func (mr *MockRepositoryMockRecorder) GetAliases(network interface{}) *gomock.Ca mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAliases", reflect.TypeOf((*MockRepository)(nil).GetAliases), network) } + +// Events mocks base method +func (m *MockRepository) Events(network types.Network, address string) (model.Events, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Events", network, address) + ret0, _ := ret[0].(model.Events) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Events indicates an expected call of Events +func (mr *MockRepositoryMockRecorder) Events(network, address interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Events", reflect.TypeOf((*MockRepository)(nil).Events), network, address) +} diff --git a/internal/models/tzip/repository.go b/internal/models/tzip/repository.go index b9577fbab..7a1b1924b 100644 --- a/internal/models/tzip/repository.go +++ b/internal/models/tzip/repository.go @@ -8,4 +8,5 @@ type Repository interface { GetWithEvents(updatedAt uint64) ([]TZIP, error) GetBySlug(slug string) (*TZIP, error) GetAliases(network types.Network) ([]TZIP, error) + Events(network types.Network, address string) (Events, error) } diff --git a/internal/noderpc/fs.go b/internal/noderpc/fs.go new file mode 100644 index 000000000..4d8b493b8 --- /dev/null +++ b/internal/noderpc/fs.go @@ -0,0 +1,46 @@ +package noderpc + +import ( + "os" + "path/filepath" +) + +// FS - +type FS struct { + *NodeRPC + + shareDir string + network string +} + +// NewFS - +func NewFS(uri, shareDir, network string) *FS { + return &FS{ + NewNodeRPC(uri), + shareDir, + network, + } +} + +func (fs *FS) get(filename string, output interface{}) error { + filePath := filepath.Join(fs.shareDir, "node_cache", fs.network, filename) + f, err := os.Open(filePath) + if err != nil { + return err + } + defer f.Close() + + return json.NewDecoder(f).Decode(output) +} + +// GetOPG - +func (fs *FS) GetOPG(block int64) (group []OperationGroup, err error) { + err = fs.get(filepath.Join(getBlockString(block), "operations.json"), &group) + return +} + +// GetHeader - +func (fs *FS) GetHeader(block int64) (header Header, err error) { + err = fs.get(filepath.Join(getBlockString(block), "header.json"), &header) + return +} diff --git a/internal/noderpc/interface.go b/internal/noderpc/interface.go index 0659d830e..d0653586e 100644 --- a/internal/noderpc/interface.go +++ b/internal/noderpc/interface.go @@ -1,29 +1,22 @@ package noderpc -import ( - "time" - - "github.com/baking-bad/bcdhub/internal/bcd/ast" -) - // INode - type INode interface { GetHead() (Header, error) GetHeader(int64) (Header, error) - GetLevel() (int64, error) - GetLevelTime(int) (time.Time, error) GetScriptJSON(string, int64) (Script, error) GetRawScript(address string, level int64) ([]byte, error) GetScriptStorageRaw(string, int64) ([]byte, error) GetContractBalance(string, int64) (int64, error) GetContractData(string, int64) (ContractData, error) GetOPG(block int64) ([]OperationGroup, error) + GetLightOPG(block int64) ([]LightOperationGroup, error) GetContractsByBlock(int64) ([]string, error) GetNetworkConstants(int64) (Constants, error) RunCode([]byte, []byte, []byte, string, string, string, string, string, int64, int64) (RunCodeResponse, error) RunOperation(string, string, string, string, int64, int64, int64, int64, int64, []byte) (OperationGroup, error) + RunOperationLight(string, string, string, string, int64, int64, int64, int64, int64, []byte) (LightOperationGroup, error) GetCounter(string) (int64, error) - GetCode(address string, level int64) (*ast.Script, error) GetBigMapType(ptr, level int64) (BigMap, error) GetBlockMetadata(level int64) (metadata Metadata, err error) } diff --git a/internal/noderpc/mock.go b/internal/noderpc/mock.go index fc7393c61..654a3cb7c 100644 --- a/internal/noderpc/mock.go +++ b/internal/noderpc/mock.go @@ -1,14 +1,12 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: interface.go +// Source: internal/noderpc/interface.go // Package noderpc is a generated GoMock package. package noderpc import ( - ast "github.com/baking-bad/bcdhub/internal/bcd/ast" gomock "github.com/golang/mock/gomock" reflect "reflect" - time "time" ) // MockINode is a mock of INode interface @@ -64,36 +62,6 @@ func (mr *MockINodeMockRecorder) GetHeader(arg0 interface{}) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetHeader", reflect.TypeOf((*MockINode)(nil).GetHeader), arg0) } -// GetLevel mocks base method -func (m *MockINode) GetLevel() (int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLevel") - ret0, _ := ret[0].(int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLevel indicates an expected call of GetLevel -func (mr *MockINodeMockRecorder) GetLevel() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLevel", reflect.TypeOf((*MockINode)(nil).GetLevel)) -} - -// GetLevelTime mocks base method -func (m *MockINode) GetLevelTime(arg0 int) (time.Time, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLevelTime", arg0) - ret0, _ := ret[0].(time.Time) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLevelTime indicates an expected call of GetLevelTime -func (mr *MockINodeMockRecorder) GetLevelTime(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLevelTime", reflect.TypeOf((*MockINode)(nil).GetLevelTime), arg0) -} - // GetScriptJSON mocks base method func (m *MockINode) GetScriptJSON(arg0 string, arg1 int64) (Script, error) { m.ctrl.T.Helper() @@ -184,6 +152,21 @@ func (mr *MockINodeMockRecorder) GetOPG(block interface{}) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetOPG", reflect.TypeOf((*MockINode)(nil).GetOPG), block) } +// GetLightOPG mocks base method +func (m *MockINode) GetLightOPG(block int64) ([]LightOperationGroup, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetLightOPG", block) + ret0, _ := ret[0].([]LightOperationGroup) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetLightOPG indicates an expected call of GetLightOPG +func (mr *MockINodeMockRecorder) GetLightOPG(block interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLightOPG", reflect.TypeOf((*MockINode)(nil).GetLightOPG), block) +} + // GetContractsByBlock mocks base method func (m *MockINode) GetContractsByBlock(arg0 int64) ([]string, error) { m.ctrl.T.Helper() @@ -244,34 +227,34 @@ func (mr *MockINodeMockRecorder) RunOperation(arg0, arg1, arg2, arg3, arg4, arg5 return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RunOperation", reflect.TypeOf((*MockINode)(nil).RunOperation), arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9) } -// GetCounter mocks base method -func (m *MockINode) GetCounter(arg0 string) (int64, error) { +// RunOperationLight mocks base method +func (m *MockINode) RunOperationLight(arg0, arg1, arg2, arg3 string, arg4, arg5, arg6, arg7, arg8 int64, arg9 []byte) (LightOperationGroup, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetCounter", arg0) - ret0, _ := ret[0].(int64) + ret := m.ctrl.Call(m, "RunOperationLight", arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9) + ret0, _ := ret[0].(LightOperationGroup) ret1, _ := ret[1].(error) return ret0, ret1 } -// GetCounter indicates an expected call of GetCounter -func (mr *MockINodeMockRecorder) GetCounter(arg0 interface{}) *gomock.Call { +// RunOperationLight indicates an expected call of RunOperationLight +func (mr *MockINodeMockRecorder) RunOperationLight(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetCounter", reflect.TypeOf((*MockINode)(nil).GetCounter), arg0) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RunOperationLight", reflect.TypeOf((*MockINode)(nil).RunOperationLight), arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9) } -// GetCode mocks base method -func (m *MockINode) GetCode(address string, level int64) (*ast.Script, error) { +// GetCounter mocks base method +func (m *MockINode) GetCounter(arg0 string) (int64, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetCode", address, level) - ret0, _ := ret[0].(*ast.Script) + ret := m.ctrl.Call(m, "GetCounter", arg0) + ret0, _ := ret[0].(int64) ret1, _ := ret[1].(error) return ret0, ret1 } -// GetCode indicates an expected call of GetCode -func (mr *MockINodeMockRecorder) GetCode(address, level interface{}) *gomock.Call { +// GetCounter indicates an expected call of GetCounter +func (mr *MockINodeMockRecorder) GetCounter(arg0 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetCode", reflect.TypeOf((*MockINode)(nil).GetCode), address, level) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetCounter", reflect.TypeOf((*MockINode)(nil).GetCounter), arg0) } // GetBigMapType mocks base method diff --git a/internal/noderpc/pool.go b/internal/noderpc/pool.go index fe759d91d..30d6cbecd 100644 --- a/internal/noderpc/pool.go +++ b/internal/noderpc/pool.go @@ -5,7 +5,6 @@ import ( "reflect" "time" - "github.com/baking-bad/bcdhub/internal/bcd/ast" "github.com/pkg/errors" ) @@ -138,15 +137,6 @@ func (p Pool) GetLevel() (int64, error) { return data.Int(), nil } -// GetLevelTime - get level time -func (p Pool) GetLevelTime(level int) (time.Time, error) { - data, err := p.call("GetLevelTime", level) - if err != nil { - return time.Now(), err - } - return data.Interface().(time.Time), nil -} - // GetScriptJSON - func (p Pool) GetScriptJSON(address string, level int64) (Script, error) { data, err := p.call("GetScriptJSON", address, level) @@ -192,6 +182,15 @@ func (p Pool) GetOPG(block int64) ([]OperationGroup, error) { return data.Interface().([]OperationGroup), nil } +// GetLightOPG - +func (p Pool) GetLightOPG(block int64) ([]LightOperationGroup, error) { + data, err := p.call("GetLightOPG", block) + if err != nil { + return nil, err + } + return data.Interface().([]LightOperationGroup), nil +} + // GetContractsByBlock - func (p Pool) GetContractsByBlock(block int64) ([]string, error) { data, err := p.call("GetContractsByBlock", block) @@ -228,6 +227,15 @@ func (p Pool) RunOperation(chainID, branch, source, destination string, fee, gas return data.Interface().(OperationGroup), nil } +// RunOperationLight - +func (p Pool) RunOperationLight(chainID, branch, source, destination string, fee, gasLimit, storageLimit, counter, amount int64, parameters []byte) (LightOperationGroup, error) { + data, err := p.call("RunOperationLight", chainID, branch, source, destination, fee, gasLimit, storageLimit, counter, amount, parameters) + if err != nil { + return LightOperationGroup{}, err + } + return data.Interface().(LightOperationGroup), nil +} + // GetCounter - func (p Pool) GetCounter(address string) (int64, error) { data, err := p.call("GetCounter", address) @@ -237,15 +245,6 @@ func (p Pool) GetCounter(address string) (int64, error) { return data.Int(), nil } -// GetCode - -func (p Pool) GetCode(address string, level int64) (*ast.Script, error) { - data, err := p.call("GetCode", address, level) - if err != nil { - return nil, err - } - return data.Interface().(*ast.Script), nil -} - // GetBigMapType - func (p Pool) GetBigMapType(ptr, level int64) (BigMap, error) { data, err := p.call("GetBigMapType", ptr, level) diff --git a/internal/noderpc/responses.go b/internal/noderpc/responses.go index de21b4794..872dc2892 100644 --- a/internal/noderpc/responses.go +++ b/internal/noderpc/responses.go @@ -112,6 +112,31 @@ func (op Operation) GetResult() *OperationResult { } } +// LightOperationGroup - +type LightOperationGroup struct { + Protocol string `json:"protocol"` + ChainID string `json:"chain_id"` + Hash string `json:"hash"` + Branch string `json:"branch"` + Signature string `json:"signature"` + Contents []LightOperation `json:"contents"` +} + +// LightOperation - +type LightOperation struct { + Raw stdJSON.RawMessage `json:"-"` + Kind string `json:"kind"` + Source string `json:"source"` + Destination *string `json:"destination,omitempty"` +} + +// UnmarshalJSON - +func (op *LightOperation) UnmarshalJSON(data []byte) error { + op.Raw = data + type buf LightOperation + return json.Unmarshal(data, (*buf)(op)) +} + // Script - type Script struct { Code *ast.Script `json:"code"` diff --git a/internal/noderpc/rpc.go b/internal/noderpc/rpc.go index 928f3cb53..8e19883c9 100644 --- a/internal/noderpc/rpc.go +++ b/internal/noderpc/rpc.go @@ -9,7 +9,6 @@ import ( "strconv" "time" - "github.com/baking-bad/bcdhub/internal/bcd/ast" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/logger" jsoniter "github.com/json-iterator/go" @@ -195,17 +194,6 @@ func (rpc *NodeRPC) GetHeader(level int64) (header Header, err error) { return } -// GetLevelTime - get level time -func (rpc *NodeRPC) GetLevelTime(level int) (time.Time, error) { - var head struct { - Timestamp time.Time `json:"timestamp"` - } - if err := rpc.get(fmt.Sprintf("chains/main/blocks/%s/header", getBlockString(int64(level))), &head); err != nil { - return time.Now(), err - } - return head.Timestamp.UTC(), nil -} - // GetScriptJSON - func (rpc *NodeRPC) GetScriptJSON(address string, level int64) (script Script, err error) { err = rpc.get(fmt.Sprintf("chains/main/blocks/%s/context/contracts/%s/script", getBlockString(level), address), &script) @@ -248,6 +236,12 @@ func (rpc *NodeRPC) GetOPG(block int64) (group []OperationGroup, err error) { return } +// GetLightOPG - +func (rpc *NodeRPC) GetLightOPG(block int64) (group []LightOperationGroup, err error) { + err = rpc.get(fmt.Sprintf("chains/main/blocks/%s/operations/3", getBlockString(block)), &group) + return +} + // GetContractsByBlock - func (rpc *NodeRPC) GetContractsByBlock(block int64) ([]string, error) { if block != 1 { @@ -323,6 +317,33 @@ func (rpc *NodeRPC) RunOperation(chainID, branch, source, destination string, fe return } +// RunOperationLight - +func (rpc *NodeRPC) RunOperationLight(chainID, branch, source, destination string, fee, gasLimit, storageLimit, counter, amount int64, parameters []byte) (group LightOperationGroup, err error) { + request := runOperationRequest{ + ChainID: chainID, + Operation: runOperationItem{ + Branch: branch, + Signature: "sigUHx32f9wesZ1n2BWpixXz4AQaZggEtchaQNHYGRCoWNAXx45WGW2ua3apUUUAGMLPwAU41QoaFCzVSL61VaessLg4YbbP", // base58_encode(b'0' * 64, b'sig').decode() + Contents: []runOperationItemContent{ + { + Kind: "transaction", + Fee: fee, + Counter: counter, + GasLimit: gasLimit, + StorageLimit: storageLimit, + Source: source, + Destination: destination, + Amount: amount, + Parameters: parameters, + }, + }, + }, + } + + err = rpc.post("chains/main/blocks/head/helpers/scripts/run_operation", request, true, &group) + return +} + // GetCounter - func (rpc *NodeRPC) GetCounter(address string) (int64, error) { var counter string @@ -332,16 +353,6 @@ func (rpc *NodeRPC) GetCounter(address string) (int64, error) { return strconv.ParseInt(counter, 10, 64) } -// GetCode - -func (rpc *NodeRPC) GetCode(address string, level int64) (*ast.Script, error) { - contract, err := rpc.GetScriptJSON(address, level) - if err != nil { - return nil, err - } - - return contract.Code, nil -} - // GetBigMapType - func (rpc *NodeRPC) GetBigMapType(ptr, level int64) (bm BigMap, err error) { err = rpc.get(fmt.Sprintf("chains/main/blocks/%s/context/raw/json/big_maps/index/%d", getBlockString(level), ptr), &bm) diff --git a/internal/parsers/contract/contract.go b/internal/parsers/contract/contract.go index cce0aef4c..84c1096d4 100644 --- a/internal/parsers/contract/contract.go +++ b/internal/parsers/contract/contract.go @@ -2,8 +2,10 @@ package contract import ( "bytes" + "encoding/json" "fmt" + "github.com/baking-bad/bcdhub/internal/bcd" astContract "github.com/baking-bad/bcdhub/internal/bcd/contract" "github.com/baking-bad/bcdhub/internal/config" "github.com/baking-bad/bcdhub/internal/models/contract" @@ -15,31 +17,12 @@ import ( // Parser - type Parser struct { - scriptSaver ScriptSaver - ctx *config.Context + ctx *config.Context } // NewParser - -func NewParser(ctx *config.Context, opts ...ParserOption) *Parser { - parser := &Parser{ctx: ctx} - for i := range opts { - opts[i](parser) - } - - return parser -} - -// ParserOption - -type ParserOption func(p *Parser) - -// WithShareDir - -func WithShareDir(dir string) ParserOption { - return func(p *Parser) { - if dir == "" { - return - } - p.scriptSaver = NewFileScriptSaver(dir) - } +func NewParser(ctx *config.Context) *Parser { + return &Parser{ctx: ctx} } // Parse - @@ -77,24 +60,41 @@ func (p *Parser) computeMetrics(operation *operation.Operation, c *contract.Cont return errors.Wrap(err, "astContract.NewParser") } - constants, err := script.FindConstants() + contractScript, err := p.ctx.Scripts.ByHash(script.Hash) if err != nil { - return errors.Wrap(err, "script.FindConstants") - } - - if len(constants) > 0 { - globalConstants, err := p.ctx.GlobalConstants.All(c.Network, constants...) - if err != nil { + if !p.ctx.Storage.IsRecordNotFound(err) { return err } - c.Constants = globalConstants - p.replaceConstants(c, operation) + var s bcd.RawScript + if err := json.Unmarshal(script.CodeRaw, &s); err != nil { + return err + } + contractScript = contract.Script{ + Hash: script.Hash, + Code: s.Code, + Parameter: s.Parameter, + Storage: s.Storage, + Views: s.Views, + } - script, err = astContract.NewParser(operation.Script) + constants, err := script.FindConstants() if err != nil { - return errors.Wrap(err, "astContract.NewParser") + return errors.Wrap(err, "script.FindConstants") } + if len(constants) > 0 { + globalConstants, err := p.ctx.GlobalConstants.All(c.Network, constants...) + if err != nil { + return err + } + contractScript.Constants = globalConstants + p.replaceConstants(&contractScript, operation) + + script, err = astContract.NewParser(operation.Script) + if err != nil { + return errors.Wrap(err, "astContract.NewParser") + } + } } if err := script.Parse(); err != nil { @@ -103,48 +103,53 @@ func (p *Parser) computeMetrics(operation *operation.Operation, c *contract.Cont operation.Script = script.CodeRaw operation.AST = script.Code - c.Hash = script.Hash - c.FailStrings = script.FailStrings.Values() - c.Annotations = script.Annotations.Values() - c.Tags = types.NewTags(script.Tags.Values()) - c.Hardcoded = script.HardcodedAddresses.Values() - c.FingerprintCode = script.Fingerprint.Code - c.FingerprintParameter = script.Fingerprint.Parameter - c.FingerprintStorage = script.Fingerprint.Storage + contractScript.FingerprintParameter = script.Fingerprint.Parameter + contractScript.FingerprintCode = script.Fingerprint.Code + contractScript.FingerprintStorage = script.Fingerprint.Storage + contractScript.FailStrings = script.FailStrings.Values() + contractScript.Annotations = script.Annotations.Values() + contractScript.Tags = types.NewTags(script.Tags.Values()) + contractScript.Hardcoded = script.HardcodedAddresses.Values() params, err := script.Code.Parameter.ToTypedAST() if err != nil { return err } - c.Entrypoints = params.GetEntrypoints() + contractScript.Entrypoints = params.GetEntrypoints() if script.IsUpgradable() { - c.Tags.Set(types.UpgradableTag) + contractScript.Tags.Set(types.UpgradableTag) } - projectID, err := p.ctx.CachedProjectIDByHash(c.Hash) + proto, err := p.ctx.Cache.ProtocolByID(operation.Network, operation.ProtocolID) if err != nil { return err } - c.ProjectID = types.NewNullString(&projectID) - proto, err := p.ctx.CachedProtocolByID(operation.Network, operation.ProtocolID) - if err != nil { - return err + if contractScript.ID > 0 { + switch proto.SymLink { + case bcd.SymLinkAlpha: + c.AlphaID = contractScript.ID + c.Alpha = contractScript + case bcd.SymLinkBabylon: + c.BabylonID = contractScript.ID + c.Babylon = contractScript + } + } else { + switch proto.SymLink { + case bcd.SymLinkAlpha: + c.Alpha = contractScript + case bcd.SymLinkBabylon: + c.Babylon = contractScript + } } - if p.scriptSaver != nil { - return p.scriptSaver.Save(operation.Script, ScriptSaveContext{ - Network: c.Network.String(), - Address: c.Address, - Hash: c.Hash, - SymLink: proto.SymLink, - }) - } + c.Tags = contractScript.Tags + return nil } -func (p *Parser) replaceConstants(c *contract.Contract, operation *operation.Operation) { +func (p *Parser) replaceConstants(c *contract.Script, operation *operation.Operation) { pattern := `{"prim":"constant","args":[{"string":"%s"}]}` for i := range c.Constants { operation.Script = bytes.ReplaceAll( diff --git a/internal/parsers/contract/mock_script_saver.go b/internal/parsers/contract/mock_script_saver.go deleted file mode 100644 index d6b05d7c3..000000000 --- a/internal/parsers/contract/mock_script_saver.go +++ /dev/null @@ -1,47 +0,0 @@ -// Code generated by MockGen. DO NOT EDIT. -// Source: script_saver.go - -// Package mock_contract is a generated GoMock package. -package contract - -import ( - gomock "github.com/golang/mock/gomock" - reflect "reflect" -) - -// MockScriptSaver is a mock of ScriptSaver interface -type MockScriptSaver struct { - ctrl *gomock.Controller - recorder *MockScriptSaverMockRecorder -} - -// MockScriptSaverMockRecorder is the mock recorder for MockScriptSaver -type MockScriptSaverMockRecorder struct { - mock *MockScriptSaver -} - -// NewMockScriptSaver creates a new mock instance -func NewMockScriptSaver(ctrl *gomock.Controller) *MockScriptSaver { - mock := &MockScriptSaver{ctrl: ctrl} - mock.recorder = &MockScriptSaverMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockScriptSaver) EXPECT() *MockScriptSaverMockRecorder { - return m.recorder -} - -// Save mocks base method -func (m *MockScriptSaver) Save(code []byte, ctx ScriptSaveContext) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Save", code, ctx) - ret0, _ := ret[0].(error) - return ret0 -} - -// Save indicates an expected call of Save -func (mr *MockScriptSaverMockRecorder) Save(code, ctx interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Save", reflect.TypeOf((*MockScriptSaver)(nil).Save), code, ctx) -} diff --git a/internal/parsers/contract/script_saver.go b/internal/parsers/contract/script_saver.go deleted file mode 100644 index ace0ca890..000000000 --- a/internal/parsers/contract/script_saver.go +++ /dev/null @@ -1,87 +0,0 @@ -package contract - -import ( - "fmt" - "os" - "path" - - "github.com/pkg/errors" -) - -const ( - symLinkPath = "%s/contracts/%s/%s_%s.json" - fullFilePath = "%s/contracts/scripts/%s.json" -) - -// ScriptSaver - -type ScriptSaver interface { - Save(code []byte, ctx ScriptSaveContext) error -} - -// FileScriptSaver - -type FileScriptSaver struct { - shareDir string -} - -// NewFileScriptSaver - -func NewFileScriptSaver(shareDir string) FileScriptSaver { - return FileScriptSaver{ - shareDir: shareDir, - } -} - -// ScriptSaveContext - -type ScriptSaveContext struct { - Network string - Address string - Hash string - SymLink string -} - -// Errors -var ( - ErrEmptyShareFolder = errors.New("FileScriptSaver: empty share folder") -) - -// Save - -func (ss FileScriptSaver) Save(code []byte, ctx ScriptSaveContext) error { - if ss.shareDir == "" { - return ErrEmptyShareFolder - } - - filePath := fmt.Sprintf(fullFilePath, ss.shareDir, ctx.Hash) - if _, err := os.Stat(filePath); os.IsNotExist(err) { - d := path.Dir(filePath) - if _, err := os.Stat(d); os.IsNotExist(err) { - if err := os.MkdirAll(d, os.ModePerm); err != nil { - return err - } - } - - f, err := os.Create(filePath) - if err != nil { - return err - } - defer f.Close() - - if _, err := f.Write(code); err != nil { - return err - } - } else if err != nil { - return err - } - - symLink := fmt.Sprintf(symLinkPath, ss.shareDir, ctx.Network, ctx.Address, ctx.SymLink) - if _, err := os.Stat(symLink); os.IsNotExist(err) { - d := path.Dir(symLink) - if _, err := os.Stat(d); os.IsNotExist(err) { - if err := os.MkdirAll(d, os.ModePerm); err != nil { - return err - } - } - if err := os.Symlink(filePath, symLink); err != nil && !os.IsExist(err) { - return err - } - } - return nil -} diff --git a/internal/parsers/migrations/implicit.go b/internal/parsers/migrations/implicit.go index d7917d37e..9d7de5965 100644 --- a/internal/parsers/migrations/implicit.go +++ b/internal/parsers/migrations/implicit.go @@ -30,7 +30,7 @@ func (p *ImplicitParser) Parse(metadata noderpc.Metadata, head noderpc.Header) ( return nil, nil } - protocol, err := p.ctx.CachedProtocolByHash(p.network, head.Protocol) + protocol, err := p.ctx.Cache.ProtocolByHash(p.network, head.Protocol) if err != nil { return nil, err } @@ -78,7 +78,7 @@ func (p *ImplicitParser) origination(implicit noderpc.ImplicitOperationsResult, } origination.Script = script - contractParser := contract.NewParser(p.ctx, contract.WithShareDir(p.ctx.SharePath)) + contractParser := contract.NewParser(p.ctx) contractResult, err := contractParser.Parse(&origination) if err != nil { return err diff --git a/internal/parsers/migrations/migration.go b/internal/parsers/migrations/migration.go index 7a9b74f59..abf124838 100644 --- a/internal/parsers/migrations/migration.go +++ b/internal/parsers/migrations/migration.go @@ -1,6 +1,7 @@ package migrations import ( + "bytes" "encoding/json" "time" @@ -13,23 +14,21 @@ import ( "github.com/baking-bad/bcdhub/internal/models/protocol" "github.com/baking-bad/bcdhub/internal/models/types" "github.com/baking-bad/bcdhub/internal/noderpc" - contractParser "github.com/baking-bad/bcdhub/internal/parsers/contract" "github.com/go-pg/pg/v10" + "github.com/pkg/errors" ) // MigrationParser - type MigrationParser struct { - storage models.GeneralRepository - bmdRepo bigmapdiff.Repository - scriptSaver contractParser.ScriptSaver + storage models.GeneralRepository + bmdRepo bigmapdiff.Repository } // NewMigrationParser - -func NewMigrationParser(storage models.GeneralRepository, bmdRepo bigmapdiff.Repository, filesDirectory string) *MigrationParser { +func NewMigrationParser(storage models.GeneralRepository, bmdRepo bigmapdiff.Repository) *MigrationParser { return &MigrationParser{ - storage: storage, - bmdRepo: bmdRepo, - scriptSaver: contractParser.NewFileScriptSaver(filesDirectory), + storage: storage, + bmdRepo: bmdRepo, } } @@ -46,22 +45,41 @@ func (p *MigrationParser) Parse(script noderpc.Script, old modelsContract.Contra return err } - newHash, err := contract.ComputeHash(codeBytes) + var buf bytes.Buffer + if err := json.Compact(&buf, codeBytes); err != nil { + return err + } + + newHash, err := contract.ComputeHash(buf.Bytes()) if err != nil { return err } - if err := p.scriptSaver.Save(codeBytes, contractParser.ScriptSaveContext{ - Hash: newHash, - Address: old.Address, - Network: old.Network.String(), - SymLink: next.SymLink, - }); err != nil { + var s bcd.RawScript + if err := json.Unmarshal(buf.Bytes(), &s); err != nil { return err } - if newHash == old.Hash { - return nil + contractScript := modelsContract.Script{ + Hash: newHash, + Code: s.Code, + Storage: s.Storage, + Parameter: s.Parameter, + Views: s.Views, + } + + if err := contractScript.Save(tx); err != nil { + return err + } + + switch next.SymLink { + case bcd.SymLinkAlpha: + case bcd.SymLinkBabylon: + if contractScript.ID == old.AlphaID { + return nil + } + default: + return errors.Errorf("unknown protocol symbolic link: %s", next.SymLink) } m := &migration.Migration{ diff --git a/internal/parsers/migrations/vesting.go b/internal/parsers/migrations/vesting.go index 9568cd19e..35beafdcd 100644 --- a/internal/parsers/migrations/vesting.go +++ b/internal/parsers/migrations/vesting.go @@ -12,21 +12,19 @@ import ( // VestingParser - type VestingParser struct { - ctx *config.Context - filesDirectory string + ctx *config.Context } // NewVestingParser - -func NewVestingParser(ctx *config.Context, filesDirectory string) *VestingParser { +func NewVestingParser(ctx *config.Context) *VestingParser { return &VestingParser{ - ctx: ctx, - filesDirectory: filesDirectory, + ctx: ctx, } } // Parse - func (p *VestingParser) Parse(data noderpc.ContractData, head noderpc.Header, network types.Network, address string) (*parsers.Result, error) { - proto, err := p.ctx.CachedProtocolByHash(network, head.Protocol) + proto, err := p.ctx.Cache.ProtocolByHash(network, head.Protocol) if err != nil { return nil, err } @@ -40,7 +38,7 @@ func (p *VestingParser) Parse(data noderpc.ContractData, head noderpc.Header, ne Kind: types.MigrationKindBootstrap, } - parser := contract.NewParser(p.ctx, contract.WithShareDir(p.filesDirectory)) + parser := contract.NewParser(p.ctx) contractModels, err := parser.Parse(&operation.Operation{ Network: network, ProtocolID: proto.ID, diff --git a/internal/parsers/operations/data/rpc/opg/onv6Q1dNejAGEJeQzwRannWsDSGw85FuFdhLnBrY18TBcC9p8kC.json b/internal/parsers/operations/data/rpc/opg/onv6Q1dNejAGEJeQzwRannWsDSGw85FuFdhLnBrY18TBcC9p8kC.json index a00e7205d..96f7d622c 100644 --- a/internal/parsers/operations/data/rpc/opg/onv6Q1dNejAGEJeQzwRannWsDSGw85FuFdhLnBrY18TBcC9p8kC.json +++ b/internal/parsers/operations/data/rpc/opg/onv6Q1dNejAGEJeQzwRannWsDSGw85FuFdhLnBrY18TBcC9p8kC.json @@ -1,141 +1 @@ -{ - "protocol": "PsddFKi32cMJ2qPjf43Qv5GDWLDPZb3T3bF6fLKiF5HtvHNU7aP", - "chain_id": "NetXdQprcVkpaWU", - "hash": "onv6Q1dNejAGEJeQzwRannWsDSGw85FuFdhLnBrY18TBcC9p8kC", - "branch": "BMMyRDBL2ESFkMAmYabnYVvtnL84wevYKbKT6JST9gNVuBYYWY2", - "contents": [ - { - "kind": "origination", - "source": "tz1MXrEgDNnR8PDryN8sq4B2m9Pqcf57wBqM", - "fee": "1555", - "counter": "983250", - "gas_limit": "12251", - "storage_limit": "351", - "managerPubkey": "tz1MXrEgDNnR8PDryN8sq4B2m9Pqcf57wBqM", - "balance": "0", - "spendable": false, - "delegatable": false, - "script": { - "code": [ - { - "prim": "parameter", - "args": [ - { - "prim": "pair", - "args": [ - { - "prim": "string" - }, - { - "prim": "nat" - } - ] - } - ] - }, - { - "prim": "storage", - "args": [ - { - "prim": "list", - "args": [ - { - "prim": "pair", - "args": [ - { - "prim": "string" - }, - { - "prim": "nat" - } - ] - } - ] - } - ] - }, - { - "prim": "code", - "args": [ - [ - [ - [ - { - "prim": "DUP" - }, - { - "prim": "CAR" - }, - { - "prim": "DIP", - "args": [ - [ - { - "prim": "CDR" - } - ] - ] - } - ] - ], - { - "prim": "CONS" - }, - { - "prim": "NIL", - "args": [ - { - "prim": "operation" - } - ] - }, - { - "prim": "PAIR" - } - ] - ] - } - ], - "storage": [] - }, - "metadata": { - "balance_updates": [ - { - "kind": "contract", - "contract": "tz1MXrEgDNnR8PDryN8sq4B2m9Pqcf57wBqM", - "change": "-1555" - }, - { - "kind": "freezer", - "category": "fees", - "delegate": "tz1NpWrAyDL9k2Lmnyxcgr9xuJakbBxdq7FB", - "level": 73, - "change": "1555" - } - ], - "operation_result": { - "status": "applied", - "balance_updates": [ - { - "kind": "contract", - "contract": "tz1MXrEgDNnR8PDryN8sq4B2m9Pqcf57wBqM", - "change": "-74000" - }, - { - "kind": "contract", - "contract": "tz1MXrEgDNnR8PDryN8sq4B2m9Pqcf57wBqM", - "change": "-257000" - } - ], - "originated_contracts": [ - "KT1AbjG7vtpV8osdoJXcMRck8eTwst8dWoz4" - ], - "consumed_gas": "12151", - "storage_size": "74", - "paid_storage_size_diff": "74" - } - } - } - ], - "signature": "sighHf7xmNQV3C7gRFVAKUeqcoRXoL5CWoZrEfxYKA8aGjN71DPFp1QxLKzcXikcChGZTr7LqyFS3vSvj5Jz11ALEj2fhrRN" -} \ No newline at end of file +{"protocol":"PsddFKi32cMJ2qPjf43Qv5GDWLDPZb3T3bF6fLKiF5HtvHNU7aP","chain_id":"NetXdQprcVkpaWU","hash":"onv6Q1dNejAGEJeQzwRannWsDSGw85FuFdhLnBrY18TBcC9p8kC","branch":"BMMyRDBL2ESFkMAmYabnYVvtnL84wevYKbKT6JST9gNVuBYYWY2","contents":[{"kind":"origination","source":"tz1MXrEgDNnR8PDryN8sq4B2m9Pqcf57wBqM","fee":"1555","counter":"983250","gas_limit":"12251","storage_limit":"351","managerPubkey":"tz1MXrEgDNnR8PDryN8sq4B2m9Pqcf57wBqM","balance":"0","spendable":false,"delegatable":false,"script":{"code":[{"prim":"parameter","args":[{"prim":"pair","args":[{"prim":"string"},{"prim":"nat"}]}]},{"prim":"storage","args":[{"prim":"list","args":[{"prim":"pair","args":[{"prim":"string"},{"prim":"nat"}]}]}]},{"prim":"code","args":[[[[{"prim":"DUP"},{"prim":"CAR"},{"prim":"DIP","args":[[{"prim":"CDR"}]]}]],{"prim":"CONS"},{"prim":"NIL","args":[{"prim":"operation"}]},{"prim":"PAIR"}]]}],"storage":[]},"metadata":{"balance_updates":[{"kind":"contract","contract":"tz1MXrEgDNnR8PDryN8sq4B2m9Pqcf57wBqM","change":"-1555"},{"kind":"freezer","category":"fees","delegate":"tz1NpWrAyDL9k2Lmnyxcgr9xuJakbBxdq7FB","level":73,"change":"1555"}],"operation_result":{"status":"applied","balance_updates":[{"kind":"contract","contract":"tz1MXrEgDNnR8PDryN8sq4B2m9Pqcf57wBqM","change":"-74000"},{"kind":"contract","contract":"tz1MXrEgDNnR8PDryN8sq4B2m9Pqcf57wBqM","change":"-257000"}],"originated_contracts":["KT1AbjG7vtpV8osdoJXcMRck8eTwst8dWoz4"],"consumed_gas":"12151","storage_size":"74","paid_storage_size_diff":"74"}}}],"signature":"sighHf7xmNQV3C7gRFVAKUeqcoRXoL5CWoZrEfxYKA8aGjN71DPFp1QxLKzcXikcChGZTr7LqyFS3vSvj5Jz11ALEj2fhrRN"} \ No newline at end of file diff --git a/internal/parsers/operations/operation_group.go b/internal/parsers/operations/operation_group.go index 5fdfc605d..08e42dbee 100644 --- a/internal/parsers/operations/operation_group.go +++ b/internal/parsers/operations/operation_group.go @@ -21,7 +21,7 @@ func NewGroup(params *ParseParams) Group { } // Parse - -func (opg Group) Parse(data noderpc.OperationGroup) (*parsers.Result, error) { +func (opg Group) Parse(data noderpc.LightOperationGroup) (*parsers.Result, error) { result := parsers.NewResult() opg.hash = data.Hash @@ -30,18 +30,37 @@ func (opg Group) Parse(data noderpc.OperationGroup) (*parsers.Result, error) { for idx, item := range data.Contents { opg.contentIdx = int64(idx) + if !opg.needParse(item) { + continue + } + + var operation noderpc.Operation + if err := json.Unmarshal(item.Raw, &operation); err != nil { + return nil, err + } + contentParser := NewContent(opg.ParseParams) - contentResult, err := contentParser.Parse(item) - if err != nil { + if err := contentParser.Parse(operation, result); err != nil { return nil, err } - result.Merge(contentResult) contentParser.clear() } return result, nil } +func (Group) needParse(item noderpc.LightOperation) bool { + var destination string + if item.Destination != nil { + destination = *item.Destination + } + prefixCondition := bcd.IsContract(item.Source) || bcd.IsContract(destination) + transactionCondition := item.Kind == consts.Transaction && prefixCondition + originationCondition := (item.Kind == consts.Origination || item.Kind == consts.OriginationNew) + registerGlobalConstantCondition := item.Kind == consts.RegisterGlobalConstant + return originationCondition || transactionCondition || registerGlobalConstantCondition +} + // Content - type Content struct { *ParseParams @@ -53,77 +72,49 @@ func NewContent(params *ParseParams) Content { } // Parse - -func (content Content) Parse(data noderpc.Operation) (*parsers.Result, error) { - if !content.needParse(data) { - return nil, nil - } - result := parsers.NewResult() - +func (content Content) Parse(data noderpc.Operation, result *parsers.Result) error { switch data.Kind { case consts.Origination, consts.OriginationNew: - originationResult, err := NewOrigination(content.ParseParams).Parse(data) - if err != nil { - return nil, err + if err := NewOrigination(content.ParseParams).Parse(data, result); err != nil { + return err } - result.Merge(originationResult) case consts.Transaction: - txResult, err := NewTransaction(content.ParseParams).Parse(data) - if err != nil { - return nil, err + if err := NewTransaction(content.ParseParams).Parse(data, result); err != nil { + return err } - result.Merge(txResult) case consts.RegisterGlobalConstant: - txResult, err := NewRegisterGlobalConstant(content.ParseParams).Parse(data) - if err != nil { - return nil, err + if err := NewRegisterGlobalConstant(content.ParseParams).Parse(data, result); err != nil { + return err } - result.Merge(txResult) default: - return nil, errors.Errorf("Invalid operation kind: %s", data.Kind) + return errors.Errorf("Invalid operation kind: %s", data.Kind) } - internalResult, err := content.parseInternal(data) - if err != nil { - return nil, err + if err := content.parseInternal(data, result); err != nil { + return err } - result.Merge(internalResult) - return result, nil + return nil } -func (content Content) needParse(item noderpc.Operation) bool { - var destination string - if item.Destination != nil { - destination = *item.Destination - } - prefixCondition := bcd.IsContract(item.Source) || bcd.IsContract(destination) - transactionCondition := item.Kind == consts.Transaction && prefixCondition - originationCondition := (item.Kind == consts.Origination || item.Kind == consts.OriginationNew) && item.Script != nil - registerGlobalConstantCondition := item.Kind == consts.RegisterGlobalConstant - return originationCondition || transactionCondition || registerGlobalConstantCondition -} - -func (content Content) parseInternal(data noderpc.Operation) (*parsers.Result, error) { +func (content Content) parseInternal(data noderpc.Operation, result *parsers.Result) error { if data.Metadata == nil { - return nil, nil + return nil } internals := data.Metadata.Internal if internals == nil { internals = data.Metadata.InternalOperations if internals == nil { - return nil, nil + return nil } } - result := parsers.NewResult() for i := range internals { - parsedModels, err := content.Parse(internals[i]) - if err != nil { - return nil, err + if err := content.Parse(internals[i], result); err != nil { + return err } - result.Merge(parsedModels) } - return result, nil + return nil } func (content *Content) clear() { diff --git a/internal/parsers/operations/operation_group_test.go b/internal/parsers/operations/operation_group_test.go index e3e90090f..3fc75a4ef 100644 --- a/internal/parsers/operations/operation_group_test.go +++ b/internal/parsers/operations/operation_group_test.go @@ -30,8 +30,9 @@ import ( "github.com/baking-bad/bcdhub/internal/models/tzip" "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/baking-bad/bcdhub/internal/parsers" - "github.com/baking-bad/bcdhub/internal/parsers/contract" + "github.com/go-pg/pg/v10" "github.com/golang/mock/gomock" + "github.com/microcosm-cc/bluemonday" "github.com/shopspring/decimal" ) @@ -62,6 +63,10 @@ func TestGroup_Parse(t *testing.T) { defer ctrlContractRepo.Finish() contractRepo := mock_contract.NewMockRepository(ctrlContractRepo) + ctrlScriptRepo := gomock.NewController(t) + defer ctrlScriptRepo.Finish() + scriptRepo := mock_contract.NewMockScriptRepository(ctrlScriptRepo) + ctrlTokenBalanceRepo := gomock.NewController(t) defer ctrlTokenBalanceRepo.Finish() tbRepo := mock_token_balance.NewMockRepository(ctrlTokenBalanceRepo) @@ -70,21 +75,18 @@ func TestGroup_Parse(t *testing.T) { defer ctrlRPC.Finish() rpc := noderpc.NewMockINode(ctrlRPC) - ctrlScriptSaver := gomock.NewController(t) - defer ctrlScriptSaver.Finish() - scriptSaver := contract.NewMockScriptSaver(ctrlScriptSaver) - - scriptSaver. - EXPECT(). - Save(gomock.Any(), gomock.Any()). - Return(nil).AnyTimes() - tzipRepo. EXPECT(). GetWithEvents(gomock.Any()). Return(make([]tzip.TZIP, 0), nil). AnyTimes() + tzipRepo. + EXPECT(). + Events(gomock.Any(), gomock.Any()). + Return(make(tzip.Events, 0), nil). + AnyTimes() + tzipRepo. EXPECT(). Get(gomock.Any(), gomock.Any()). @@ -99,8 +101,20 @@ func TestGroup_Parse(t *testing.T) { contractRepo. EXPECT(). - GetProjectIDByHash(gomock.Any()). - Return("", nil). + Script(gomock.Any(), gomock.Any(), gomock.Any()). + DoAndReturn(readTestScriptModel). + AnyTimes() + + contractRepo. + EXPECT(). + ScriptPart(gomock.Any(), gomock.Any(), gomock.Any(), gomock.Any()). + DoAndReturn(readTestScriptPart). + AnyTimes() + + scriptRepo. + EXPECT(). + ByHash(gomock.Any()). + Return(modelContract.Script{}, pg.ErrNoRows). AnyTimes() generalRepo. @@ -203,7 +217,7 @@ func TestGroup_Parse(t *testing.T) { Return(protocol.Protocol{ Hash: "PsddFKi32cMJ2qPjf43Qv5GDWLDPZb3T3bF6fLKiF5HtvHNU7aP", Network: types.Mainnet, - SymLink: bcd.SymLinkBabylon, + SymLink: bcd.SymLinkAlpha, ID: 2, }, nil). AnyTimes() @@ -277,7 +291,7 @@ func TestGroup_Parse(t *testing.T) { Return(protocol.Protocol{ Hash: "PsddFKi32cMJ2qPjf43Qv5GDWLDPZb3T3bF6fLKiF5HtvHNU7aP", Network: types.Mainnet, - SymLink: bcd.SymLinkBabylon, + SymLink: bcd.SymLinkAlpha, ID: 2, }, nil). AnyTimes() @@ -336,7 +350,12 @@ func TestGroup_Parse(t *testing.T) { Protocols: protoRepo, TZIP: tzipRepo, TokenBalances: tbRepo, - Cache: cache.NewCache(), + Scripts: scriptRepo, + Cache: cache.NewCache( + map[types.Network]noderpc.INode{ + types.Mainnet: rpc, + }, blockRepo, contractRepo, protoRepo, tzipRepo, bluemonday.UGCPolicy(), + ), }, paramsOpts: []ParseParamsOption{ WithHead(noderpc.Header{ @@ -360,8 +379,12 @@ func TestGroup_Parse(t *testing.T) { Protocols: protoRepo, TZIP: tzipRepo, TokenBalances: tbRepo, - Cache: cache.NewCache(), - SharePath: "./test", + Scripts: scriptRepo, + Cache: cache.NewCache( + map[types.Network]noderpc.INode{ + types.Mainnet: rpc, + }, blockRepo, contractRepo, protoRepo, tzipRepo, bluemonday.UGCPolicy(), + ), }, paramsOpts: []ParseParamsOption{ WithHead(noderpc.Header{ @@ -538,8 +561,12 @@ func TestGroup_Parse(t *testing.T) { Protocols: protoRepo, TZIP: tzipRepo, TokenBalances: tbRepo, - Cache: cache.NewCache(), - SharePath: "./test", + Scripts: scriptRepo, + Cache: cache.NewCache( + map[types.Network]noderpc.INode{ + types.Mainnet: rpc, + }, blockRepo, contractRepo, protoRepo, tzipRepo, bluemonday.UGCPolicy(), + ), }, paramsOpts: []ParseParamsOption{ WithHead(noderpc.Header{ @@ -740,8 +767,12 @@ func TestGroup_Parse(t *testing.T) { Protocols: protoRepo, TZIP: tzipRepo, TokenBalances: tbRepo, - Cache: cache.NewCache(), - SharePath: "./test", + Scripts: scriptRepo, + Cache: cache.NewCache( + map[types.Network]noderpc.INode{ + types.Delphinet: rpc, + }, blockRepo, contractRepo, protoRepo, tzipRepo, bluemonday.UGCPolicy(), + ), }, paramsOpts: []ParseParamsOption{ WithHead(noderpc.Header{ @@ -789,16 +820,25 @@ func TestGroup_Parse(t *testing.T) { }, Contracts: []*modelContract.Contract{ { - Network: types.Delphinet, - Level: 86142, - Timestamp: timestamp, - Hash: "97a40c7ff3bad5edb92c8e1dcfd4bfc778da8166a7632c1bcecbf8d8f9e4490b", - Entrypoints: []string{"decrement", "increment"}, - Address: "KT1NppzrgyLZD3aku7fssfhYPm5QqZwyabvR", + Network: types.Delphinet, + Level: 86142, + Timestamp: timestamp, + Address: "KT1NppzrgyLZD3aku7fssfhYPm5QqZwyabvR", Manager: types.NullString{ Str: "tz1SX7SPdx4ZJb6uP5Hh5XBVZhh9wTfFaud3", Valid: true, }, + Babylon: modelContract.Script{ + Entrypoints: []string{"decrement", "increment"}, + Annotations: []string{"%decrement", "%increment"}, + Hash: "97a40c7ff3bad5edb92c8e1dcfd4bfc778da8166a7632c1bcecbf8d8f9e4490b", + Code: []byte(`[[{"prim":"DUP"},{"prim":"CDR"},{"prim":"SWAP"},{"prim":"CAR"},{"prim":"IF_LEFT","args":[[{"prim":"SWAP"},{"prim":"SUB"}],[{"prim":"ADD"}]]},{"prim":"NIL","args":[{"prim":"operation"}]},{"prim":"PAIR"}]]`), + Parameter: []byte(`[{"prim":"or","args":[{"prim":"int","annots":["%decrement"]},{"prim":"int","annots":["%increment"]}]}]`), + Storage: []byte(`[{"prim":"int"}]`), + FingerprintCode: []byte{33, 23, 76, 22, 46, 76, 75, 18, 61, 109, 66}, + FingerprintParameter: []byte{91, 91}, + FingerprintStorage: []byte{91}, + }, }, }, }, @@ -813,8 +853,12 @@ func TestGroup_Parse(t *testing.T) { Protocols: protoRepo, TZIP: tzipRepo, TokenBalances: tbRepo, - Cache: cache.NewCache(), - SharePath: "./test", + Scripts: scriptRepo, + Cache: cache.NewCache( + map[types.Network]noderpc.INode{ + types.Mainnet: rpc, + }, blockRepo, contractRepo, protoRepo, tzipRepo, bluemonday.UGCPolicy(), + ), }, paramsOpts: []ParseParamsOption{ WithHead(noderpc.Header{ @@ -859,17 +903,24 @@ func TestGroup_Parse(t *testing.T) { }, Contracts: []*modelContract.Contract{ { - Network: types.Mainnet, - Level: 301436, - Timestamp: timestamp, - Hash: "8fe2bee899e8700c88f620d06b4623fc6facddfce7157d56c1548108fefca7ca", - Tags: types.Tags(0), - Entrypoints: []string{"default"}, - Address: "KT1AbjG7vtpV8osdoJXcMRck8eTwst8dWoz4", + Network: types.Mainnet, + Level: 301436, + Timestamp: timestamp, + Address: "KT1AbjG7vtpV8osdoJXcMRck8eTwst8dWoz4", Manager: types.NullString{ Str: "tz1MXrEgDNnR8PDryN8sq4B2m9Pqcf57wBqM", Valid: true, }, + Alpha: modelContract.Script{ + Hash: "c4915a55dbe0a3dfc8feb77e46f3e32828f80730a506fab277d8d6c0d5e2f1ec", + Tags: types.Tags(0), + Entrypoints: []string{"default"}, + Code: []byte(`[[[[{"prim":"DUP"},{"prim":"CAR"},{"prim":"DIP","args":[[{"prim":"CDR"}]]}]],{"prim":"CONS"},{"prim":"NIL","args":[{"prim":"operation"}]},{"prim":"PAIR"}]]`), + Parameter: []byte(`[{"prim":"pair","args":[{"prim":"string"},{"prim":"nat"}]}]`), + Storage: []byte(`[{"prim":"list","args":[{"prim":"pair","args":[{"prim":"string"},{"prim":"nat"}]}]}]`), FingerprintCode: []byte{33, 22, 31, 23, 27, 61, 109, 66}, + FingerprintParameter: []byte{104, 98}, + FingerprintStorage: []byte{95, 104, 98}, + }, }, }, }, @@ -884,8 +935,12 @@ func TestGroup_Parse(t *testing.T) { Protocols: protoRepo, TZIP: tzipRepo, TokenBalances: tbRepo, - Cache: cache.NewCache(), - SharePath: "./test", + Scripts: scriptRepo, + Cache: cache.NewCache( + map[types.Network]noderpc.INode{ + types.Edo2net: rpc, + }, blockRepo, contractRepo, protoRepo, tzipRepo, bluemonday.UGCPolicy(), + ), }, paramsOpts: []ParseParamsOption{ WithHead(noderpc.Header{ @@ -1018,19 +1073,28 @@ func TestGroup_Parse(t *testing.T) { }, Contracts: []*modelContract.Contract{ { - Network: types.Edo2net, - Level: 72207, - Timestamp: timestamp, - Hash: "b82a20d0647f5ec74ef2daf404cd365a894f6868da0cd623ed07c6b85977b8db", - Tags: types.LedgerTag | types.FA2Tag, - FailStrings: []string{"FA2_INSUFFICIENT_BALANCE"}, - Annotations: []string{"%token_address", "%drop_proposal", "%transfer_contract_tokens", "%permits_counter", "%remove_operator", "%mint", "%ledger", "%voters", "%owner", "%balance", "%transfer", "%from_", "%max_voting_period", "%not_in_migration", "%start_date", "%custom_entrypoints", "%proposal_check", "%accept_ownership", "%migrate", "%set_quorum_threshold", "%amount", "%proposals", "%min_voting_period", "%rejected_proposal_return_value", "%burn", "%flush", "%max_quorum_threshold", "%migratingTo", "%operators", "%proposer", "%call_FA2", "%argument", "%params", "%transfer_ownership", "%voting_period", "%request", "%confirm_migration", "%frozen_token", "%param", "%admin", "%migration_status", "%proposal_key_list_sort_by_date", "%requests", "%update_operators", "%add_operator", "%getVotePermitCounter", "%propose", "%vote", "%vote_amount", "%proposer_frozen_token", "%callCustom", "%txs", "%operator", "%quorum_threshold", "%to_", "%set_voting_period", "%callback", "%contract_address", "%downvotes", "%max_votes", "%balance_of", "%proposal_key", "%vote_type", "%signature", "%decision_lambda", "%token_id", "%permit", "%key", "%extra", "%pending_owner", "%upvotes", "%max_proposals", "%min_quorum_threshold", "%proposal_metadata", "%metadata", "%migratedTo"}, - Entrypoints: []string{"callCustom", "accept_ownership", "burn", "balance_of", "transfer", "update_operators", "confirm_migration", "drop_proposal", "flush", "getVotePermitCounter", "migrate", "mint", "propose", "set_quorum_threshold", "set_voting_period", "transfer_ownership", "vote", "transfer_contract_tokens"}, - Address: "KT1JgHoXtZPjVfG82BY3FSys2VJhKVZo2EJU", + Network: types.Edo2net, + Level: 72207, + Timestamp: timestamp, + Address: "KT1JgHoXtZPjVfG82BY3FSys2VJhKVZo2EJU", Manager: types.NullString{ Str: "KT1C2MfcjWb5R1ZDDxVULCsGuxrf5fEn5264", Valid: true, }, + Babylon: modelContract.Script{ + Hash: "b82a20d0647f5ec74ef2daf404cd365a894f6868da0cd623ed07c6b85977b8db", + Tags: types.LedgerTag | types.FA2Tag, + FailStrings: []string{"FA2_INSUFFICIENT_BALANCE"}, + Annotations: []string{"%token_address", "%drop_proposal", "%transfer_contract_tokens", "%permits_counter", "%remove_operator", "%mint", "%ledger", "%voters", "%owner", "%balance", "%transfer", "%from_", "%max_voting_period", "%not_in_migration", "%start_date", "%custom_entrypoints", "%proposal_check", "%accept_ownership", "%migrate", "%set_quorum_threshold", "%amount", "%proposals", "%min_voting_period", "%rejected_proposal_return_value", "%burn", "%flush", "%max_quorum_threshold", "%migratingTo", "%operators", "%proposer", "%call_FA2", "%argument", "%params", "%transfer_ownership", "%voting_period", "%request", "%confirm_migration", "%frozen_token", "%param", "%admin", "%migration_status", "%proposal_key_list_sort_by_date", "%requests", "%update_operators", "%add_operator", "%getVotePermitCounter", "%propose", "%vote", "%vote_amount", "%proposer_frozen_token", "%callCustom", "%txs", "%operator", "%quorum_threshold", "%to_", "%set_voting_period", "%callback", "%contract_address", "%downvotes", "%max_votes", "%balance_of", "%proposal_key", "%vote_type", "%signature", "%decision_lambda", "%token_id", "%permit", "%key", "%extra", "%pending_owner", "%upvotes", "%max_proposals", "%min_quorum_threshold", "%proposal_metadata", "%metadata", "%migratedTo"}, + Entrypoints: []string{"callCustom", "accept_ownership", "burn", "balance_of", "transfer", "update_operators", "confirm_migration", "drop_proposal", "flush", "getVotePermitCounter", "migrate", "mint", "propose", "set_quorum_threshold", "set_voting_period", "transfer_ownership", "vote", "transfer_contract_tokens"}, + Code: []byte(`[[{"prim":"PUSH","args":[{"prim":"string"},{"string":"FA2_INSUFFICIENT_BALANCE"}]},{"prim":"FAILWITH"}]]`), + Parameter: []byte(`[{"prim":"or","args":[{"prim":"or","args":[{"prim":"pair","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%callCustom"]},{"prim":"or","args":[{"prim":"or","args":[{"prim":"or","args":[{"prim":"or","args":[{"prim":"unit","annots":["%accept_ownership"]},{"prim":"pair","args":[{"prim":"address","annots":["%from_"]},{"prim":"nat","annots":["%token_id"]},{"prim":"nat","annots":["%amount"]}],"annots":["%burn"]}]},{"prim":"or","args":[{"prim":"or","args":[{"prim":"or","args":[{"prim":"pair","args":[{"prim":"list","args":[{"prim":"pair","args":[{"prim":"address","annots":["%owner"]},{"prim":"nat","annots":["%token_id"]}]}],"annots":["%requests"]},{"prim":"contract","args":[{"prim":"list","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"address","annots":["%owner"]},{"prim":"nat","annots":["%token_id"]}],"annots":["%request"]},{"prim":"nat","annots":["%balance"]}]}]}],"annots":["%callback"]}],"annots":["%balance_of"]},{"prim":"list","args":[{"prim":"pair","args":[{"prim":"address","annots":["%from_"]},{"prim":"list","args":[{"prim":"pair","args":[{"prim":"address","annots":["%to_"]},{"prim":"nat","annots":["%token_id"]},{"prim":"nat","annots":["%amount"]}]}],"annots":["%txs"]}]}],"annots":["%transfer"]}]},{"prim":"list","args":[{"prim":"or","args":[{"prim":"pair","args":[{"prim":"address","annots":["%owner"]},{"prim":"address","annots":["%operator"]},{"prim":"nat","annots":["%token_id"]}],"annots":["%add_operator"]},{"prim":"pair","args":[{"prim":"address","annots":["%owner"]},{"prim":"address","annots":["%operator"]},{"prim":"nat","annots":["%token_id"]}],"annots":["%remove_operator"]}]}],"annots":["%update_operators"]}],"annots":["%call_FA2"]},{"prim":"unit","annots":["%confirm_migration"]}]}]},{"prim":"or","args":[{"prim":"or","args":[{"prim":"bytes","annots":["%drop_proposal"]},{"prim":"nat","annots":["%flush"]}]},{"prim":"or","args":[{"prim":"pair","args":[{"prim":"unit","annots":["%param"]},{"prim":"contract","args":[{"prim":"nat"}],"annots":["%callback"]}],"annots":["%getVotePermitCounter"]},{"prim":"address","annots":["%migrate"]}]}]}]},{"prim":"or","args":[{"prim":"or","args":[{"prim":"or","args":[{"prim":"pair","args":[{"prim":"address","annots":["%to_"]},{"prim":"nat","annots":["%token_id"]},{"prim":"nat","annots":["%amount"]}],"annots":["%mint"]},{"prim":"pair","args":[{"prim":"nat","annots":["%frozen_token"]},{"prim":"map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%proposal_metadata"]}],"annots":["%propose"]}]},{"prim":"or","args":[{"prim":"nat","annots":["%set_quorum_threshold"]},{"prim":"nat","annots":["%set_voting_period"]}]}]},{"prim":"or","args":[{"prim":"address","annots":["%transfer_ownership"]},{"prim":"list","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"bytes","annots":["%proposal_key"]},{"prim":"bool","annots":["%vote_type"]},{"prim":"nat","annots":["%vote_amount"]}],"annots":["%argument"]},{"prim":"option","args":[{"prim":"pair","args":[{"prim":"key","annots":["%key"]},{"prim":"signature","annots":["%signature"]}]}],"annots":["%permit"]}]}],"annots":["%vote"]}]}]}]}]},{"prim":"pair","args":[{"prim":"address","annots":["%contract_address"]},{"prim":"list","args":[{"prim":"pair","args":[{"prim":"address","annots":["%from_"]},{"prim":"list","args":[{"prim":"pair","args":[{"prim":"address","annots":["%to_"]},{"prim":"nat","annots":["%token_id"]},{"prim":"nat","annots":["%amount"]}]}],"annots":["%txs"]}]}],"annots":["%params"]}],"annots":["%transfer_contract_tokens"]}]}]}]`), + Storage: []byte(`[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"address","annots":["%admin"]},{"prim":"map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%extra"]}]},{"prim":"big_map","args":[{"prim":"pair","args":[{"prim":"address"},{"prim":"nat"}]},{"prim":"nat"}],"annots":["%ledger"]},{"prim":"big_map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%metadata"]}]},{"prim":"pair","args":[{"prim":"or","args":[{"prim":"unit","annots":["%not_in_migration"]},{"prim":"or","args":[{"prim":"address","annots":["%migratingTo"]},{"prim":"address","annots":["%migratedTo"]}]}],"annots":["%migration_status"]},{"prim":"big_map","args":[{"prim":"pair","args":[{"prim":"address","annots":["%owner"]},{"prim":"address","annots":["%operator"]}]},{"prim":"unit"}],"annots":["%operators"]}]},{"prim":"address","annots":["%pending_owner"]},{"prim":"nat","annots":["%permits_counter"]}]},{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"set","args":[{"prim":"pair","args":[{"prim":"timestamp"},{"prim":"bytes"}]}],"annots":["%proposal_key_list_sort_by_date"]},{"prim":"big_map","args":[{"prim":"bytes"},{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"nat","annots":["%downvotes"]},{"prim":"map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%metadata"]}]},{"prim":"address","annots":["%proposer"]},{"prim":"nat","annots":["%proposer_frozen_token"]}]},{"prim":"pair","args":[{"prim":"timestamp","annots":["%start_date"]},{"prim":"nat","annots":["%upvotes"]}]},{"prim":"list","args":[{"prim":"pair","args":[{"prim":"address"},{"prim":"nat"}]}],"annots":["%voters"]}]}],"annots":["%proposals"]}]},{"prim":"nat","annots":["%quorum_threshold"]},{"prim":"address","annots":["%token_address"]}]},{"prim":"nat","annots":["%voting_period"]}]},{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%custom_entrypoints"]},{"prim":"lambda","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"nat","annots":["%downvotes"]},{"prim":"map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%metadata"]}]},{"prim":"address","annots":["%proposer"]},{"prim":"nat","annots":["%proposer_frozen_token"]}]},{"prim":"pair","args":[{"prim":"timestamp","annots":["%start_date"]},{"prim":"nat","annots":["%upvotes"]}]},{"prim":"list","args":[{"prim":"pair","args":[{"prim":"address"},{"prim":"nat"}]}],"annots":["%voters"]}]},{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"address","annots":["%admin"]},{"prim":"map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%extra"]}]},{"prim":"big_map","args":[{"prim":"pair","args":[{"prim":"address"},{"prim":"nat"}]},{"prim":"nat"}],"annots":["%ledger"]},{"prim":"big_map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%metadata"]}]},{"prim":"pair","args":[{"prim":"or","args":[{"prim":"unit","annots":["%not_in_migration"]},{"prim":"or","args":[{"prim":"address","annots":["%migratingTo"]},{"prim":"address","annots":["%migratedTo"]}]}],"annots":["%migration_status"]},{"prim":"big_map","args":[{"prim":"pair","args":[{"prim":"address","annots":["%owner"]},{"prim":"address","annots":["%operator"]}]},{"prim":"unit"}],"annots":["%operators"]}]},{"prim":"address","annots":["%pending_owner"]},{"prim":"nat","annots":["%permits_counter"]}]},{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"set","args":[{"prim":"pair","args":[{"prim":"timestamp"},{"prim":"bytes"}]}],"annots":["%proposal_key_list_sort_by_date"]},{"prim":"big_map","args":[{"prim":"bytes"},{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"nat","annots":["%downvotes"]},{"prim":"map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%metadata"]}]},{"prim":"address","annots":["%proposer"]},{"prim":"nat","annots":["%proposer_frozen_token"]}]},{"prim":"pair","args":[{"prim":"timestamp","annots":["%start_date"]},{"prim":"nat","annots":["%upvotes"]}]},{"prim":"list","args":[{"prim":"pair","args":[{"prim":"address"},{"prim":"nat"}]}],"annots":["%voters"]}]}],"annots":["%proposals"]}]},{"prim":"nat","annots":["%quorum_threshold"]},{"prim":"address","annots":["%token_address"]}]},{"prim":"nat","annots":["%voting_period"]}]},{"prim":"pair","args":[{"prim":"list","args":[{"prim":"operation"}]},{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"address","annots":["%admin"]},{"prim":"map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%extra"]}]},{"prim":"big_map","args":[{"prim":"pair","args":[{"prim":"address"},{"prim":"nat"}]},{"prim":"nat"}],"annots":["%ledger"]},{"prim":"big_map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%metadata"]}]},{"prim":"pair","args":[{"prim":"or","args":[{"prim":"unit","annots":["%not_in_migration"]},{"prim":"or","args":[{"prim":"address","annots":["%migratingTo"]},{"prim":"address","annots":["%migratedTo"]}]}],"annots":["%migration_status"]},{"prim":"big_map","args":[{"prim":"pair","args":[{"prim":"address","annots":["%owner"]},{"prim":"address","annots":["%operator"]}]},{"prim":"unit"}],"annots":["%operators"]}]},{"prim":"address","annots":["%pending_owner"]},{"prim":"nat","annots":["%permits_counter"]}]},{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"set","args":[{"prim":"pair","args":[{"prim":"timestamp"},{"prim":"bytes"}]}],"annots":["%proposal_key_list_sort_by_date"]},{"prim":"big_map","args":[{"prim":"bytes"},{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"nat","annots":["%downvotes"]},{"prim":"map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%metadata"]}]},{"prim":"address","annots":["%proposer"]},{"prim":"nat","annots":["%proposer_frozen_token"]}]},{"prim":"pair","args":[{"prim":"timestamp","annots":["%start_date"]},{"prim":"nat","annots":["%upvotes"]}]},{"prim":"list","args":[{"prim":"pair","args":[{"prim":"address"},{"prim":"nat"}]}],"annots":["%voters"]}]}],"annots":["%proposals"]}]},{"prim":"nat","annots":["%quorum_threshold"]},{"prim":"address","annots":["%token_address"]}]},{"prim":"nat","annots":["%voting_period"]}]}],"annots":["%decision_lambda"]}]},{"prim":"nat","annots":["%max_proposals"]},{"prim":"nat","annots":["%max_quorum_threshold"]}]},{"prim":"pair","args":[{"prim":"nat","annots":["%max_votes"]},{"prim":"nat","annots":["%max_voting_period"]}]},{"prim":"nat","annots":["%min_quorum_threshold"]},{"prim":"nat","annots":["%min_voting_period"]}]},{"prim":"lambda","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"nat","annots":["%frozen_token"]},{"prim":"map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%proposal_metadata"]}]},{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"address","annots":["%admin"]},{"prim":"map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%extra"]}]},{"prim":"big_map","args":[{"prim":"pair","args":[{"prim":"address"},{"prim":"nat"}]},{"prim":"nat"}],"annots":["%ledger"]},{"prim":"big_map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%metadata"]}]},{"prim":"pair","args":[{"prim":"or","args":[{"prim":"unit","annots":["%not_in_migration"]},{"prim":"or","args":[{"prim":"address","annots":["%migratingTo"]},{"prim":"address","annots":["%migratedTo"]}]}],"annots":["%migration_status"]},{"prim":"big_map","args":[{"prim":"pair","args":[{"prim":"address","annots":["%owner"]},{"prim":"address","annots":["%operator"]}]},{"prim":"unit"}],"annots":["%operators"]}]},{"prim":"address","annots":["%pending_owner"]},{"prim":"nat","annots":["%permits_counter"]}]},{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"set","args":[{"prim":"pair","args":[{"prim":"timestamp"},{"prim":"bytes"}]}],"annots":["%proposal_key_list_sort_by_date"]},{"prim":"big_map","args":[{"prim":"bytes"},{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"nat","annots":["%downvotes"]},{"prim":"map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%metadata"]}]},{"prim":"address","annots":["%proposer"]},{"prim":"nat","annots":["%proposer_frozen_token"]}]},{"prim":"pair","args":[{"prim":"timestamp","annots":["%start_date"]},{"prim":"nat","annots":["%upvotes"]}]},{"prim":"list","args":[{"prim":"pair","args":[{"prim":"address"},{"prim":"nat"}]}],"annots":["%voters"]}]}],"annots":["%proposals"]}]},{"prim":"nat","annots":["%quorum_threshold"]},{"prim":"address","annots":["%token_address"]}]},{"prim":"nat","annots":["%voting_period"]}]},{"prim":"bool"}],"annots":["%proposal_check"]},{"prim":"lambda","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"nat","annots":["%downvotes"]},{"prim":"map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%metadata"]}]},{"prim":"address","annots":["%proposer"]},{"prim":"nat","annots":["%proposer_frozen_token"]}]},{"prim":"pair","args":[{"prim":"timestamp","annots":["%start_date"]},{"prim":"nat","annots":["%upvotes"]}]},{"prim":"list","args":[{"prim":"pair","args":[{"prim":"address"},{"prim":"nat"}]}],"annots":["%voters"]}]},{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"address","annots":["%admin"]},{"prim":"map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%extra"]}]},{"prim":"big_map","args":[{"prim":"pair","args":[{"prim":"address"},{"prim":"nat"}]},{"prim":"nat"}],"annots":["%ledger"]},{"prim":"big_map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%metadata"]}]},{"prim":"pair","args":[{"prim":"or","args":[{"prim":"unit","annots":["%not_in_migration"]},{"prim":"or","args":[{"prim":"address","annots":["%migratingTo"]},{"prim":"address","annots":["%migratedTo"]}]}],"annots":["%migration_status"]},{"prim":"big_map","args":[{"prim":"pair","args":[{"prim":"address","annots":["%owner"]},{"prim":"address","annots":["%operator"]}]},{"prim":"unit"}],"annots":["%operators"]}]},{"prim":"address","annots":["%pending_owner"]},{"prim":"nat","annots":["%permits_counter"]}]},{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"set","args":[{"prim":"pair","args":[{"prim":"timestamp"},{"prim":"bytes"}]}],"annots":["%proposal_key_list_sort_by_date"]},{"prim":"big_map","args":[{"prim":"bytes"},{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"nat","annots":["%downvotes"]},{"prim":"map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%metadata"]}]},{"prim":"address","annots":["%proposer"]},{"prim":"nat","annots":["%proposer_frozen_token"]}]},{"prim":"pair","args":[{"prim":"timestamp","annots":["%start_date"]},{"prim":"nat","annots":["%upvotes"]}]},{"prim":"list","args":[{"prim":"pair","args":[{"prim":"address"},{"prim":"nat"}]}],"annots":["%voters"]}]}],"annots":["%proposals"]}]},{"prim":"nat","annots":["%quorum_threshold"]},{"prim":"address","annots":["%token_address"]}]},{"prim":"nat","annots":["%voting_period"]}]},{"prim":"nat"}],"annots":["%rejected_proposal_return_value"]}]}]}]`), + FingerprintCode: []byte{67, 104, 104, 39}, + FingerprintParameter: []byte{104, 105, 108, 110, 98, 98, 95, 110, 98, 90, 95, 110, 98, 98, 95, 110, 95, 110, 98, 98, 95, 110, 110, 98, 110, 110, 98, 108, 105, 98, 108, 90, 98, 110, 110, 98, 98, 98, 96, 104, 105, 98, 98, 110, 95, 105, 89, 98, 99, 92, 103, 110, 95, 110, 95, 110, 98, 98}, + FingerprintStorage: []byte{110, 96, 104, 105, 97, 110, 98, 98, 97, 104, 105, 108, 110, 110, 97, 110, 110, 108, 110, 98, 102, 107, 105, 97, 105, 98, 96, 104, 105, 110, 98, 107, 98, 95, 110, 98, 98, 110, 98, 96, 104, 105, 94, 98, 96, 104, 105, 110, 98, 107, 98, 95, 110, 98, 110, 96, 104, 105, 97, 110, 98, 98, 97, 104, 105, 108, 110, 110, 97, 110, 110, 108, 110, 98, 102, 107, 105, 97, 105, 98, 96, 104, 105, 110, 98, 107, 98, 95, 110, 98, 98, 110, 98, 95, 109, 110, 96, 104, 105, 97, 110, 98, 98, 97, 104, 105, 108, 110, 110, 97, 110, 110, 108, 110, 98, 102, 107, 105, 97, 105, 98, 96, 104, 105, 110, 98, 107, 98, 95, 110, 98, 98, 110, 98, 98, 98, 98, 98, 98, 98, 94, 98, 96, 104, 105, 110, 96, 104, 105, 97, 110, 98, 98, 97, 104, 105, 108, 110, 110, 97, 110, 110, 108, 110, 98, 102, 107, 105, 97, 105, 98, 96, 104, 105, 110, 98, 107, 98, 95, 110, 98, 98, 110, 98, 89, 94, 98, 96, 104, 105, 110, 98, 107, 98, 95, 110, 98, 110, 96, 104, 105, 97, 110, 98, 98, 97, 104, 105, 108, 110, 110, 97, 110, 110, 108, 110, 98, 102, 107, 105, 97, 105, 98, 96, 104, 105, 110, 98, 107, 98, 95, 110, 98, 98, 110, 98, 98}, + }, + Tags: types.LedgerTag | types.FA2Tag, }, }, }, @@ -1045,8 +1109,12 @@ func TestGroup_Parse(t *testing.T) { Protocols: protoRepo, TZIP: tzipRepo, TokenBalances: tbRepo, - Cache: cache.NewCache(), - SharePath: "./test", + Scripts: scriptRepo, + Cache: cache.NewCache( + map[types.Network]noderpc.INode{ + types.Mainnet: rpc, + }, blockRepo, contractRepo, protoRepo, tzipRepo, bluemonday.UGCPolicy(), + ), }, paramsOpts: []ParseParamsOption{ WithHead(noderpc.Header{ @@ -1159,8 +1227,12 @@ func TestGroup_Parse(t *testing.T) { Protocols: protoRepo, TZIP: tzipRepo, TokenBalances: tbRepo, - Cache: cache.NewCache(), - SharePath: "./test", + Scripts: scriptRepo, + Cache: cache.NewCache( + map[types.Network]noderpc.INode{ + types.Mainnet: rpc, + }, blockRepo, contractRepo, protoRepo, tzipRepo, bluemonday.UGCPolicy(), + ), }, paramsOpts: []ParseParamsOption{ WithHead(noderpc.Header{ @@ -1266,8 +1338,12 @@ func TestGroup_Parse(t *testing.T) { Protocols: protoRepo, TZIP: tzipRepo, TokenBalances: tbRepo, - Cache: cache.NewCache(), - SharePath: "./test", + Scripts: scriptRepo, + Cache: cache.NewCache( + map[types.Network]noderpc.INode{ + types.Hangzhounet: rpc, + }, blockRepo, contractRepo, protoRepo, tzipRepo, bluemonday.UGCPolicy(), + ), }, paramsOpts: []ParseParamsOption{ WithHead(noderpc.Header{ @@ -1311,6 +1387,7 @@ func TestGroup_Parse(t *testing.T) { }, }, } + for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { for address, level := range tt.storage { @@ -1326,7 +1403,7 @@ func TestGroup_Parse(t *testing.T) { AnyTimes() } - var op noderpc.OperationGroup + var op noderpc.LightOperationGroup if err := readJSONFile(tt.filename, &op); err != nil { t.Errorf(`readJSONFile("%s") = error %v`, tt.filename, err) return diff --git a/internal/parsers/operations/origination.go b/internal/parsers/operations/origination.go index b7a852cb0..6327fcebc 100644 --- a/internal/parsers/operations/origination.go +++ b/internal/parsers/operations/origination.go @@ -25,13 +25,13 @@ func NewOrigination(params *ParseParams) Origination { return Origination{params} } -// Parse - -func (p Origination) Parse(data noderpc.Operation) (*parsers.Result, error) { - result := parsers.NewResult() +var delegatorContract = []byte(`{"code":[{"prim":"parameter","args":[{"prim":"or","args":[{"prim":"lambda","args":[{"prim":"unit"},{"prim":"list","args":[{"prim":"operation"}]}],"annots":["%do"]},{"prim":"unit","annots":["%default"]}]}]},{"prim":"storage","args":[{"prim":"key_hash"}]},{"prim":"code","args":[[[[{"prim":"DUP"},{"prim":"CAR"},{"prim":"DIP","args":[[{"prim":"CDR"}]]}]],{"prim":"IF_LEFT","args":[[{"prim":"PUSH","args":[{"prim":"mutez"},{"int":"0"}]},{"prim":"AMOUNT"},[[{"prim":"COMPARE"},{"prim":"EQ"}],{"prim":"IF","args":[[],[[{"prim":"UNIT"},{"prim":"FAILWITH"}]]]}],[{"prim":"DIP","args":[[{"prim":"DUP"}]]},{"prim":"SWAP"}],{"prim":"IMPLICIT_ACCOUNT"},{"prim":"ADDRESS"},{"prim":"SENDER"},[[{"prim":"COMPARE"},{"prim":"EQ"}],{"prim":"IF","args":[[],[[{"prim":"UNIT"},{"prim":"FAILWITH"}]]]}],{"prim":"UNIT"},{"prim":"EXEC"},{"prim":"PAIR"}],[{"prim":"DROP"},{"prim":"NIL","args":[{"prim":"operation"}]},{"prim":"PAIR"}]]}]]}],"storage":{"bytes":"0079943a60100e0394ac1c8f6ccfaeee71ec9c2d94"}}`) - proto, err := p.ctx.CachedProtocolByHash(p.network, p.head.Protocol) +// Parse - +func (p Origination) Parse(data noderpc.Operation, result *parsers.Result) error { + proto, err := p.ctx.Cache.ProtocolByHash(p.network, p.head.Protocol) if err != nil { - return nil, err + return err } origination := operation.Operation{ @@ -55,6 +55,10 @@ func (p Origination) Parse(data noderpc.Operation) (*parsers.Result, error) { Script: data.Script, } + if origination.Script == nil { + origination.Script = delegatorContract + } + p.fillInternal(&origination) parseOperationResult(data, &origination) @@ -65,13 +69,13 @@ func (p Origination) Parse(data noderpc.Operation) (*parsers.Result, error) { if origination.IsApplied() { if err := p.appliedHandler(data, &origination, result); err != nil { - return nil, err + return err } } result.Operations = append(result.Operations, &origination) - return result, nil + return nil } func (p Origination) appliedHandler(item noderpc.Operation, origination *operation.Operation, result *parsers.Result) error { @@ -136,25 +140,26 @@ func (p Origination) executeInitialStorageEvent(raw []byte, origination *operati if origination == nil || result == nil || origination.Tags.Has(types.LedgerTag) { return nil } - tzip, err := p.ctx.CachedContractMetadata(origination.Network, origination.Destination) + + contractEvents, err := p.ctx.Cache.Events(origination.Network, origination.Destination) if err != nil { if p.ctx.Storage.IsRecordNotFound(err) { return nil } return err } - if tzip == nil || len(tzip.Events) == 0 { + if len(contractEvents) == 0 { return nil } - for i := range tzip.Events { - for j := range tzip.Events[i].Implementations { - impl := tzip.Events[i].Implementations[j] + for i := range contractEvents { + for j := range contractEvents[i].Implementations { + impl := contractEvents[i].Implementations[j] if impl.MichelsonInitialStorageEvent == nil || impl.MichelsonInitialStorageEvent.Empty() { continue } - event, err := events.NewMichelsonInitialStorage(impl, tzip.Events[i].Name) + event, err := events.NewMichelsonInitialStorage(impl, contractEvents[i].Name) if err != nil { return err } @@ -197,10 +202,10 @@ func (p Origination) executeInitialStorageEvent(raw []byte, origination *operati for i := range balances { result.TokenBalances = append(result.TokenBalances, &tbModel.TokenBalance{ - Network: tzip.Network, + Network: origination.Network, Address: balances[i].Address, TokenID: balances[i].TokenID, - Contract: tzip.Address, + Contract: origination.Destination, Balance: balances[i].Value, }) diff --git a/internal/parsers/operations/params.go b/internal/parsers/operations/params.go index 8a8576cfe..1df45aef4 100644 --- a/internal/parsers/operations/params.go +++ b/internal/parsers/operations/params.go @@ -90,7 +90,7 @@ func NewParseParams(rpc noderpc.INode, ctx *config.Context, opts ...ParseParamsO transferParser, err := transfer.NewParser( rpc, - ctx.TZIP, ctx.Blocks, ctx.TokenBalances, ctx.SharePath, + ctx.TZIP, ctx.Blocks, ctx.TokenBalances, transfer.WithStackTrace(params.stackTrace), transfer.WithNetwork(params.network), transfer.WithChainID(params.head.ChainID), @@ -101,10 +101,7 @@ func NewParseParams(rpc noderpc.INode, ctx *config.Context, opts ...ParseParamsO } params.transferParser = transferParser - params.contractParser = contract.NewParser( - params.ctx, - contract.WithShareDir(ctx.SharePath), - ) + params.contractParser = contract.NewParser(params.ctx) storageParser, err := NewRichStorage(ctx.BigMapDiffs, rpc, params.head.Protocol) if err != nil { return nil, err diff --git a/internal/parsers/operations/register_global_constant.go b/internal/parsers/operations/register_global_constant.go index 47e5c564d..4a1de8f41 100644 --- a/internal/parsers/operations/register_global_constant.go +++ b/internal/parsers/operations/register_global_constant.go @@ -18,12 +18,10 @@ func NewRegisterGlobalConstant(params *ParseParams) RegisterGlobalConstant { } // Parse - -func (p RegisterGlobalConstant) Parse(data noderpc.Operation) (*parsers.Result, error) { - result := parsers.NewResult() - - proto, err := p.ctx.CachedProtocolByHash(p.network, p.head.Protocol) +func (p RegisterGlobalConstant) Parse(data noderpc.Operation, result *parsers.Result) error { + proto, err := p.ctx.Cache.ProtocolByHash(p.network, p.head.Protocol) if err != nil { - return nil, err + return err } registerGlobalConstant := operation.Operation{ @@ -48,9 +46,9 @@ func (p RegisterGlobalConstant) Parse(data noderpc.Operation) (*parsers.Result, result.Operations = append(result.Operations, ®isterGlobalConstant) if registerGlobalConstant.IsApplied() { if err != nil { - return nil, err + return err } result.GlobalConstants = append(result.GlobalConstants, NewGlobalConstant().Parse(data, registerGlobalConstant)) } - return result, nil + return nil } diff --git a/internal/parsers/operations/rich_storage.go b/internal/parsers/operations/rich_storage.go index 3421d5360..9f99007c9 100644 --- a/internal/parsers/operations/rich_storage.go +++ b/internal/parsers/operations/rich_storage.go @@ -40,11 +40,13 @@ func (p *RichStorage) Parse(data noderpc.Operation, operation *operation.Operati if err != nil { return nil, err } - storage, err := p.rpc.GetScriptStorageRaw(operation.Destination, operation.Level) - if err != nil { - return nil, err + if result != nil { + storage, err := p.rpc.GetScriptStorageRaw(operation.Destination, operation.Level) + if err != nil { + return nil, err + } + operation.DeffatedStorage = storage } - operation.DeffatedStorage = storage return result, nil default: return nil, nil diff --git a/internal/parsers/operations/rich_storage_test.go b/internal/parsers/operations/rich_storage_test.go index 9e3f82b8a..5ff07c7a2 100644 --- a/internal/parsers/operations/rich_storage_test.go +++ b/internal/parsers/operations/rich_storage_test.go @@ -4,8 +4,8 @@ import ( "testing" "time" + "github.com/baking-bad/bcdhub/internal/bcd" "github.com/baking-bad/bcdhub/internal/bcd/ast" - "github.com/baking-bad/bcdhub/internal/fetch" "github.com/baking-bad/bcdhub/internal/models/bigmapaction" "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" mock_bmd "github.com/baking-bad/bcdhub/internal/models/mock/bigmapdiff" @@ -240,15 +240,21 @@ func TestRichStorage_Parse(t *testing.T) { t.Errorf(`readJSONFile("%s") = error %v`, tt.filename, err) return } - proto, ok := protocols[tt.operation.ProtocolID] if !ok { t.Errorf(`unknown protocol ID: %d`, tt.operation.ProtocolID) return } - script, err := fetch.Contract(tt.operation.Network, tt.operation.Destination, proto, "./test") + + symLink, err := bcd.GetProtoSymLink(proto) if err != nil { - t.Errorf(`readJSONFile("%s") = error %v`, tt.filename, err) + t.Error(err) + return + } + + script, err := readTestScript(tt.operation.Network, tt.operation.Destination, symLink) + if err != nil { + t.Errorf(`readTestScript= error %v`, err) return } tt.operation.Script = script diff --git a/internal/parsers/operations/tag.go b/internal/parsers/operations/tag.go index 6609eec52..7b978b72a 100644 --- a/internal/parsers/operations/tag.go +++ b/internal/parsers/operations/tag.go @@ -14,7 +14,7 @@ func setTags(ctx *config.Context, contract *contract.Contract, op *operation.Ope } if contract == nil { - c, err := ctx.CachedContract(op.Network, op.Destination) + c, err := ctx.Cache.Contract(op.Network, op.Destination) if err != nil { if ctx.Storage.IsRecordNotFound(err) { return nil diff --git a/internal/parsers/operations/test/contracts/scripts/8fe2bee899e8700c88f620d06b4623fc6facddfce7157d56c1548108fefca7ca.json b/internal/parsers/operations/test/contracts/scripts/8fe2bee899e8700c88f620d06b4623fc6facddfce7157d56c1548108fefca7ca.json deleted file mode 100644 index e4a1cc8f0..000000000 --- a/internal/parsers/operations/test/contracts/scripts/8fe2bee899e8700c88f620d06b4623fc6facddfce7157d56c1548108fefca7ca.json +++ /dev/null @@ -1,80 +0,0 @@ - [ - { - "prim": "parameter", - "args": [ - { - "prim": "pair", - "args": [ - { - "prim": "string" - }, - { - "prim": "nat" - } - ] - } - ] - }, - { - "prim": "storage", - "args": [ - { - "prim": "list", - "args": [ - { - "prim": "pair", - "args": [ - { - "prim": "string" - }, - { - "prim": "nat" - } - ] - } - ] - } - ] - }, - { - "prim": "code", - "args": [ - [ - [ - [ - { - "prim": "DUP" - }, - { - "prim": "CAR" - }, - { - "prim": "DIP", - "args": [ - [ - { - "prim": "CDR" - } - ] - ] - } - ] - ], - { - "prim": "CONS" - }, - { - "prim": "NIL", - "args": [ - { - "prim": "operation" - } - ] - }, - { - "prim": "PAIR" - } - ] - ] - } - ] \ No newline at end of file diff --git a/internal/parsers/operations/test/contracts/scripts/c4915a55dbe0a3dfc8feb77e46f3e32828f80730a506fab277d8d6c0d5e2f1ec.json b/internal/parsers/operations/test/contracts/scripts/c4915a55dbe0a3dfc8feb77e46f3e32828f80730a506fab277d8d6c0d5e2f1ec.json new file mode 100644 index 000000000..9fa6ef514 --- /dev/null +++ b/internal/parsers/operations/test/contracts/scripts/c4915a55dbe0a3dfc8feb77e46f3e32828f80730a506fab277d8d6c0d5e2f1ec.json @@ -0,0 +1 @@ +[{"prim":"parameter","args":[{"prim":"pair","args":[{"prim":"string"},{"prim":"nat"}]}]},{"prim":"storage","args":[{"prim":"list","args":[{"prim":"pair","args":[{"prim":"string"},{"prim":"nat"}]}]}]},{"prim":"code","args":[[[[{"prim":"DUP"},{"prim":"CAR"},{"prim":"DIP","args":[[{"prim":"CDR"}]]}]],{"prim":"CONS"},{"prim":"NIL","args":[{"prim":"operation"}]},{"prim":"PAIR"}]]}] \ No newline at end of file diff --git a/internal/parsers/operations/test_common.go b/internal/parsers/operations/test_common.go index 68b870050..4e48dc164 100644 --- a/internal/parsers/operations/test_common.go +++ b/internal/parsers/operations/test_common.go @@ -1,11 +1,15 @@ package operations import ( + "bytes" "fmt" "io/ioutil" + "path/filepath" "testing" - "github.com/baking-bad/bcdhub/internal/helpers" + "github.com/baking-bad/bcdhub/internal/bcd" + "github.com/baking-bad/bcdhub/internal/bcd/consts" + astContract "github.com/baking-bad/bcdhub/internal/bcd/contract" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/models/bigmapaction" "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" @@ -14,6 +18,7 @@ import ( "github.com/baking-bad/bcdhub/internal/models/transfer" "github.com/baking-bad/bcdhub/internal/models/types" "github.com/baking-bad/bcdhub/internal/parsers" + "github.com/pkg/errors" "github.com/stretchr/testify/assert" ) @@ -29,6 +34,68 @@ func readJSONFile(name string, response interface{}) error { return json.Unmarshal(bytes, response) } +func readTestScript(network types.Network, address, symLink string) ([]byte, error) { + path := filepath.Join("./test/contracts", network.String(), fmt.Sprintf("%s_%s.json", address, symLink)) + return ioutil.ReadFile(path) +} + +func readTestScriptModel(network types.Network, address, symLink string) (contract.Script, error) { + data, err := readTestScript(network, address, bcd.SymLinkBabylon) + if err != nil { + return contract.Script{}, err + } + var buffer bytes.Buffer + buffer.WriteString(`{"code":`) + buffer.Write(data) + buffer.WriteString(`,"storage":{}}`) + script, err := astContract.NewParser(buffer.Bytes()) + if err != nil { + return contract.Script{}, errors.Wrap(err, "astContract.NewParser") + } + if err := script.Parse(); err != nil { + return contract.Script{}, err + } + var s bcd.RawScript + if err := json.Unmarshal(data, &s); err != nil { + return contract.Script{}, err + } + return contract.Script{ + Code: s.Code, + Parameter: s.Parameter, + Storage: s.Storage, + Hash: script.Hash, + FingerprintParameter: script.Fingerprint.Parameter, + FingerprintCode: script.Fingerprint.Code, + FingerprintStorage: script.Fingerprint.Storage, + FailStrings: script.FailStrings.Values(), + Annotations: script.Annotations.Values(), + Tags: types.NewTags(script.Tags.Values()), + Hardcoded: script.HardcodedAddresses.Values(), + }, nil +} + +//nolint +func readTestScriptPart(network types.Network, address, symLink, part string) ([]byte, error) { + data, err := readTestScript(network, address, bcd.SymLinkBabylon) + if err != nil { + return nil, err + } + var s bcd.RawScript + if err := json.Unmarshal(data, &s); err != nil { + return nil, err + } + + switch part { + case consts.CODE: + return s.Code, nil + case consts.PARAMETER: + return s.Parameter, nil + case consts.STORAGE: + return s.Storage, nil + } + return nil, nil +} + func readTestContractModel(network types.Network, address string) (contract.Contract, error) { var c contract.Contract bytes, err := ioutil.ReadFile(fmt.Sprintf("./data/models/contract/%s.json", address)) @@ -65,7 +132,7 @@ func compareParserResponse(t *testing.T, got, want *parsers.Result) bool { } for i := range got.Contracts { - if !compareContract(want.Contracts[i], got.Contracts[i]) { + if !compareContract(t, want.Contracts[i], got.Contracts[i]) { return false } } @@ -380,56 +447,70 @@ func compareBigMapAction(one, two *bigmapaction.BigMapAction) bool { return true } -func compareContract(one, two *contract.Contract) bool { - if one.Network != two.Network { - logger.Info().Msgf("Contract.Network: %s != %s", one.Network, two.Network) +func compareContract(t *testing.T, one, two *contract.Contract) bool { + if !assert.Equal(t, one.Network, two.Network) { return false } - if one.Address != two.Address { - logger.Info().Msgf("Contract.Address: %s != %s", one.Address, two.Address) + if !assert.Equal(t, one.Address, two.Address) { return false } - if one.Hash != two.Hash { - logger.Info().Msgf("Contract.Hash: %s != %s", one.Hash, two.Hash) + if !assert.Equal(t, one.Manager, two.Manager) { return false } - if one.Manager != two.Manager { - logger.Info().Msgf("Contract.Manager: %s != %s", one.Manager, two.Manager) + if !assert.Equal(t, one.Level, two.Level) { return false } - if one.Level != two.Level { - logger.Info().Msgf("Contract.Level: %d != %d", one.Level, two.Level) + if !assert.Equal(t, one.Timestamp, two.Timestamp) { return false } - if one.Timestamp != two.Timestamp { - logger.Info().Msgf("Contract.Timestamp: %s != %s", one.Timestamp, two.Timestamp) + if !assert.Equal(t, one.Tags, two.Tags) { return false } - if one.Tags != two.Tags { - logger.Info().Msgf("Contract.Tags: %d != %d", one.Tags, two.Tags) + if !compareScript(t, one.Alpha, two.Alpha) { + logger.Info().Msgf("Contract.Alpha: %v != %v", one.Alpha, two.Alpha) return false } - if !compareStringArray(one.Entrypoints, two.Entrypoints) { - logger.Info().Msgf("Contract.Entrypoints: %v != %v", one.Entrypoints, two.Entrypoints) + if !compareScript(t, one.Babylon, two.Babylon) { + logger.Info().Msgf("Contract.Babylon: %v != %v", one.Babylon, two.Babylon) return false } return true } -func compareInt64Ptr(one, two *int64) bool { - return (one != nil && two != nil && *one == *two) || (one == nil && two == nil) -} - -func compareStringArray(one, two []string) bool { - if len(one) != len(two) { +func compareScript(t *testing.T, one, two contract.Script) bool { + if !assert.Equal(t, one.Hash, two.Hash) { return false } - - for i := range one { - if !helpers.StringInArray(one[i], two) { - return false - } + if !assert.Equal(t, one.ProjectID, two.ProjectID) { + return false + } + if !assert.ElementsMatch(t, one.Entrypoints, two.Entrypoints) { + return false + } + if !assert.ElementsMatch(t, one.Annotations, two.Annotations) { + return false + } + if !assert.ElementsMatch(t, one.FailStrings, two.FailStrings) { + return false + } + if !assert.ElementsMatch(t, one.Hardcoded, two.Hardcoded) { + return false + } + if !assert.ElementsMatch(t, one.Code, two.Code) { + return false + } + if !assert.ElementsMatch(t, one.FingerprintParameter, two.FingerprintParameter) { + return false + } + if !assert.ElementsMatch(t, one.FingerprintCode, two.FingerprintCode) { + return false + } + if !assert.ElementsMatch(t, one.FingerprintStorage, two.FingerprintStorage) { + return false } - return true } + +func compareInt64Ptr(one, two *int64) bool { + return (one != nil && two != nil && *one == *two) || (one == nil && two == nil) +} diff --git a/internal/parsers/operations/transaction.go b/internal/parsers/operations/transaction.go index 6b68edfba..e8aa4ba7c 100644 --- a/internal/parsers/operations/transaction.go +++ b/internal/parsers/operations/transaction.go @@ -2,6 +2,7 @@ package operations import ( "github.com/baking-bad/bcdhub/internal/bcd" + "github.com/baking-bad/bcdhub/internal/bcd/ast" "github.com/baking-bad/bcdhub/internal/bcd/consts" "github.com/baking-bad/bcdhub/internal/bcd/tezerrors" "github.com/baking-bad/bcdhub/internal/bcd/types" @@ -30,12 +31,10 @@ func NewTransaction(params *ParseParams) Transaction { } // Parse - -func (p Transaction) Parse(data noderpc.Operation) (*parsers.Result, error) { - result := parsers.NewResult() - - proto, err := p.ctx.CachedProtocolByHash(p.network, p.head.Protocol) +func (p Transaction) Parse(data noderpc.Operation, result *parsers.Result) error { + proto, err := p.ctx.Cache.ProtocolByHash(p.network, p.head.Protocol) if err != nil { - return nil, err + return err } tx := operation.Operation{ @@ -67,36 +66,70 @@ func (p Transaction) Parse(data noderpc.Operation) (*parsers.Result, error) { result.Operations = append(result.Operations, &tx) - script, err := p.ctx.CachedScriptBytes(tx.Network, tx.Destination, proto.SymLink) + if !bcd.IsContract(tx.Destination) { + return nil + } + + for i := range tx.Errors { + if tx.Errors[i].Is("contract.non_existing_contract") { + return nil + } + } + + script, err := p.ctx.Cache.ScriptBytes(tx.Network, tx.Destination, proto.SymLink) if err != nil { - return nil, err + if !tx.Internal { + return nil + } + + for i := range result.Contracts { + if tx.Destination == result.Contracts[i].Address { + switch proto.SymLink { + case bcd.SymLinkAlpha: + script, err = result.Contracts[i].Alpha.Full() + if err != nil { + return err + } + case bcd.SymLinkBabylon: + script, err = result.Contracts[i].Babylon.Full() + if err != nil { + return err + } + default: + return errors.Errorf("unknwon protocol symbolic link: %s", proto.SymLink) + } + } + } + if script == nil { + return err + } } tx.Script = script - tx.AST, err = p.ctx.CachedScript(tx.Network, tx.Destination, proto.SymLink) + tx.AST, err = ast.NewScriptWithoutCode(script) if err != nil { - return nil, err + return err } if err := setTags(p.ctx, nil, &tx); err != nil { - return nil, err + return err } if err := p.getEntrypoint(&tx); err != nil { - return nil, err + return err } p.stackTrace.Add(tx) if tx.IsApplied() { if err := p.appliedHandler(data, &tx, result); err != nil { - return nil, err + return err } } if !tezerrors.HasParametersError(tx.Errors) { if err := p.transferParser.Parse(tx.BigMapDiffs, p.head.Protocol, &tx); err != nil { if !errors.Is(err, noderpc.InvalidNodeResponse{}) { - return nil, err + return err } logger.Warning().Err(err).Msg("transferParser.Parse") } @@ -106,14 +139,14 @@ func (p Transaction) Parse(data noderpc.Operation) (*parsers.Result, error) { if tx.IsApplied() { ledgerResult, err := ledger.New(p.ctx.TokenBalances).Parse(&tx, p.stackTrace) if err != nil { - return nil, err + return err } if ledgerResult != nil { result.TokenBalances = append(result.TokenBalances, ledgerResult.TokenBalances...) } } - return result, nil + return nil } func (p Transaction) fillInternal(tx *operation.Operation) { diff --git a/internal/parsers/result.go b/internal/parsers/result.go index af79a2c49..57651f292 100644 --- a/internal/parsers/result.go +++ b/internal/parsers/result.go @@ -16,7 +16,6 @@ import ( // Result - type Result struct { - // BigMapActions []*bigmapaction.BigMapAction BigMapState []*bigmapdiff.BigMapState Contracts []*contract.Contract Migrations []*migration.Migration @@ -28,7 +27,6 @@ type Result struct { // NewResult - func NewResult() *Result { return &Result{ - // BigMapActions: make([]*bigmapaction.BigMapAction, 0), BigMapState: make([]*bigmapdiff.BigMapState, 0), Contracts: make([]*contract.Contract, 0), Migrations: make([]*migration.Migration, 0), @@ -79,6 +77,24 @@ func (result *Result) Save(tx pg.DBI) error { } } if len(result.Contracts) > 0 { + for i, contract := range result.Contracts { + if contract.Alpha.Code != nil { + if err := contract.Alpha.Save(tx); err != nil { + return err + } + result.Contracts[i].AlphaID = contract.Alpha.ID + } + if contract.Babylon.Code != nil { + if contract.Alpha.Hash != contract.Babylon.Hash { + if err := contract.Babylon.Save(tx); err != nil { + return err + } + result.Contracts[i].BabylonID = contract.Babylon.ID + } else { + result.Contracts[i].BabylonID = contract.Alpha.ID + } + } + } if _, err := tx.Model(&result.Contracts).Returning("id").Insert(); err != nil { return err } diff --git a/internal/parsers/storage/alpha.go b/internal/parsers/storage/alpha.go index e11d21733..8fbccc3b4 100644 --- a/internal/parsers/storage/alpha.go +++ b/internal/parsers/storage/alpha.go @@ -30,6 +30,9 @@ func (a *Alpha) ParseTransaction(content noderpc.Operation, operation *operation // ParseOrigination - func (a *Alpha) ParseOrigination(content noderpc.Operation, operation *operation.Operation) (*parsers.Result, error) { + if content.Script == nil { + return nil, nil + } storage, err := operation.AST.StorageType() if err != nil { return nil, err diff --git a/internal/parsers/storage/big_map.go b/internal/parsers/storage/big_map.go index 8b6162651..2f740f3d5 100644 --- a/internal/parsers/storage/big_map.go +++ b/internal/parsers/storage/big_map.go @@ -1,8 +1,11 @@ package storage import ( + "github.com/baking-bad/bcdhub/internal/bcd" "github.com/baking-bad/bcdhub/internal/bcd/ast" - "github.com/baking-bad/bcdhub/internal/fetch" + "github.com/baking-bad/bcdhub/internal/bcd/consts" + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" "github.com/baking-bad/bcdhub/internal/models/types" "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/pkg/errors" @@ -14,16 +17,16 @@ var ( ) // GetBigMapPtr - -func GetBigMapPtr(rpc noderpc.INode, network types.Network, address, key, protocol, sharePath string, level int64) (int64, error) { - data, err := fetch.Contract(network, address, protocol, sharePath) +func GetBigMapPtr(repo models.GeneralRepository, contracts contract.Repository, rpc noderpc.INode, network types.Network, address, key, protocol string, level int64) (int64, error) { + symLink, err := bcd.GetProtoSymLink(protocol) if err != nil { return 0, err } - script, err := ast.NewScript(data) + storageTypeByte, err := contracts.ScriptPart(network, address, symLink, consts.STORAGE) if err != nil { return 0, err } - storage, err := script.StorageType() + storage, err := ast.NewTypedAstFromBytes(storageTypeByte) if err != nil { return 0, err } @@ -53,27 +56,23 @@ func GetBigMapPtr(rpc noderpc.INode, network types.Network, address, key, protoc } // FindByName - -func FindByName(network types.Network, address, key, protocol, sharePath string) *ast.BigMap { - data, err := fetch.Contract(network, address, protocol, sharePath) +func FindByName(repo models.GeneralRepository, contracts contract.Repository, network types.Network, address, key, protocol string) *ast.BigMap { + symLink, err := bcd.GetProtoSymLink(protocol) if err != nil { return nil } - - script, err := ast.NewScript(data) + storageTypeByte, err := contracts.ScriptPart(network, address, symLink, consts.STORAGE) if err != nil { return nil } - - storage, err := script.StorageType() + storage, err := ast.NewTypedAstFromBytes(storageTypeByte) if err != nil { return nil } - node := storage.FindByName(key, false) if node == nil { return nil } - if bm, ok := node.(*ast.BigMap); ok { return bm } diff --git a/internal/parsers/transfer/transfer.go b/internal/parsers/transfer/transfer.go index 3f0fa485e..a277226e9 100644 --- a/internal/parsers/transfer/transfer.go +++ b/internal/parsers/transfer/transfer.go @@ -31,7 +31,6 @@ type Parser struct { blocks block.Repository rpc noderpc.INode - shareDir string stackTrace *stacktrace.StackTrace network modelTypes.Network @@ -45,13 +44,12 @@ type Parser struct { var globalEvents *TokenEvents // NewParser - -func NewParser(rpc noderpc.INode, tzipRepo tzip.Repository, blocks block.Repository, tokenBalances tokenbalance.Repository, shareDir string, opts ...ParserOption) (*Parser, error) { +func NewParser(rpc noderpc.INode, tzipRepo tzip.Repository, blocks block.Repository, tokenBalances tokenbalance.Repository, opts ...ParserOption) (*Parser, error) { tp := &Parser{ rpc: rpc, tokenBalances: tokenBalances, tzipRepo: tzipRepo, blocks: blocks, - shareDir: shareDir, } for i := range opts { diff --git a/internal/parsers/tzip/parser.go b/internal/parsers/tzip/parser.go index 68d729f63..d81e7e65c 100644 --- a/internal/parsers/tzip/parser.go +++ b/internal/parsers/tzip/parser.go @@ -10,6 +10,7 @@ import ( "github.com/baking-bad/bcdhub/internal/models" "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" "github.com/baking-bad/bcdhub/internal/models/block" + "github.com/baking-bad/bcdhub/internal/models/contract" "github.com/baking-bad/bcdhub/internal/models/tzip" "github.com/baking-bad/bcdhub/internal/noderpc" tzipStorage "github.com/baking-bad/bcdhub/internal/parsers/tzip/storage" @@ -49,21 +50,23 @@ type ParseContext struct { // Parser - type Parser struct { - bigMapRepo bigmapdiff.Repository - blocksRepo block.Repository - storage models.GeneralRepository - rpc noderpc.INode + bigMapRepo bigmapdiff.Repository + blocksRepo block.Repository + contractRepo contract.Repository + storage models.GeneralRepository + rpc noderpc.INode cfg ParserConfig } // NewParser - -func NewParser(bigMapRepo bigmapdiff.Repository, blocksRepo block.Repository, storage models.GeneralRepository, rpc noderpc.INode, cfg ParserConfig) Parser { +func NewParser(bigMapRepo bigmapdiff.Repository, blocksRepo block.Repository, contractRepo contract.Repository, storage models.GeneralRepository, rpc noderpc.INode, cfg ParserConfig) Parser { return Parser{ - bigMapRepo: bigMapRepo, - blocksRepo: blocksRepo, - storage: storage, - rpc: rpc, + bigMapRepo: bigMapRepo, + blocksRepo: blocksRepo, + contractRepo: contractRepo, + storage: storage, + rpc: rpc, cfg: cfg, } @@ -77,16 +80,19 @@ func (p *Parser) Parse(ctx ParseContext) (*tzip.TZIP, error) { } data := new(bufTzip) - s := tzipStorage.NewFull(p.bigMapRepo, p.blocksRepo, p.storage, p.rpc, p.cfg.SharePath, p.cfg.IPFSGateways...) + s := tzipStorage.NewFull(p.bigMapRepo, p.contractRepo, p.blocksRepo, p.storage, p.rpc, p.cfg.IPFSGateways...) if err := s.Get(ctx.BigMapDiff.Network, ctx.BigMapDiff.Contract, decoded, ctx.BigMapDiff.Ptr, data); err != nil { switch { case errors.Is(err, tzipStorage.ErrHTTPRequest) || errors.Is(err, tzipStorage.ErrJSONDecoding) || errors.Is(err, tzipStorage.ErrUnknownStorageType): - logger.Warning().Fields(ctx.BigMapDiff.LogFields()).Str("kind", "contract_metadata").Err(err).Msg("") + logger.Warning().Fields(ctx.BigMapDiff.LogFields()).Str("kind", "contract_metadata").Err(err).Msg("tzip.Parser.Parse") return nil, nil case errors.Is(err, tzipStorage.ErrNoIPFSResponse): data.Description = fmt.Sprintf("Failed to fetch metadata %s", decoded) data.Name = consts.Unknown logger.Warning().Str("url", decoded).Str("kind", "contract_metadata").Err(err).Msg("") + case p.storage.IsRecordNotFound(err): + logger.Warning().Fields(ctx.BigMapDiff.LogFields()).Str("kind", "contract_metadata").Err(err).Msg("tzip.Parser.Parse") + return nil, nil default: return nil, err } diff --git a/internal/parsers/tzip/storage/storage.go b/internal/parsers/tzip/storage/storage.go index ef490ecc8..18099214f 100644 --- a/internal/parsers/tzip/storage/storage.go +++ b/internal/parsers/tzip/storage/storage.go @@ -7,6 +7,7 @@ import ( "github.com/baking-bad/bcdhub/internal/models" "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" "github.com/baking-bad/bcdhub/internal/models/block" + "github.com/baking-bad/bcdhub/internal/models/contract" "github.com/baking-bad/bcdhub/internal/models/types" "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/pkg/errors" @@ -24,19 +25,19 @@ type Storage interface { // Full - type Full struct { - bmdRepo bigmapdiff.Repository - blockRepo block.Repository - storage models.GeneralRepository + bmdRepo bigmapdiff.Repository + contractRepo contract.Repository + blockRepo block.Repository + storage models.GeneralRepository - rpc noderpc.INode - sharePath string - ipfs []string + rpc noderpc.INode + ipfs []string } // NewFull - -func NewFull(bmdRepo bigmapdiff.Repository, blockRepo block.Repository, storage models.GeneralRepository, rpc noderpc.INode, sharePath string, ipfs ...string) *Full { +func NewFull(bmdRepo bigmapdiff.Repository, contractRepo contract.Repository, blockRepo block.Repository, storage models.GeneralRepository, rpc noderpc.INode, ipfs ...string) *Full { return &Full{ - bmdRepo, blockRepo, storage, rpc, sharePath, ipfs, + bmdRepo, contractRepo, blockRepo, storage, rpc, ipfs, } } @@ -59,7 +60,7 @@ func (f Full) Get(network types.Network, address, url string, ptr int64, output WithHashSha256(url), ) case strings.HasPrefix(url, PrefixTezosStorage): - store = NewTezosStorage(f.bmdRepo, f.blockRepo, f.storage, f.rpc, address, f.sharePath, network, ptr) + store = NewTezosStorage(f.bmdRepo, f.blockRepo, f.contractRepo, f.storage, f.rpc, address, network, ptr) default: return errors.Wrap(ErrUnknownStorageType, url) } diff --git a/internal/parsers/tzip/storage/tezos.go b/internal/parsers/tzip/storage/tezos.go index 62d5f5951..8bd0b2285 100644 --- a/internal/parsers/tzip/storage/tezos.go +++ b/internal/parsers/tzip/storage/tezos.go @@ -7,6 +7,7 @@ import ( "github.com/baking-bad/bcdhub/internal/models" "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" "github.com/baking-bad/bcdhub/internal/models/block" + "github.com/baking-bad/bcdhub/internal/models/contract" "github.com/baking-bad/bcdhub/internal/models/types" "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/baking-bad/bcdhub/internal/parsers/storage" @@ -23,28 +24,28 @@ const ( // TezosStorage - type TezosStorage struct { - bigMapRepo bigmapdiff.Repository - blockRepo block.Repository - storage models.GeneralRepository - - rpc noderpc.INode - network types.Network - address string - ptr int64 - sharePath string + bigMapRepo bigmapdiff.Repository + blockRepo block.Repository + contractRepo contract.Repository + storage models.GeneralRepository + + rpc noderpc.INode + network types.Network + address string + ptr int64 } // NewTezosStorage - -func NewTezosStorage(bigMapRepo bigmapdiff.Repository, blockRepo block.Repository, storage models.GeneralRepository, rpc noderpc.INode, address, sharePath string, network types.Network, ptr int64) TezosStorage { +func NewTezosStorage(bigMapRepo bigmapdiff.Repository, blockRepo block.Repository, contractRepo contract.Repository, storage models.GeneralRepository, rpc noderpc.INode, address string, network types.Network, ptr int64) TezosStorage { return TezosStorage{ - bigMapRepo: bigMapRepo, - blockRepo: blockRepo, - storage: storage, - rpc: rpc, - address: address, - network: network, - ptr: ptr, - sharePath: sharePath, + bigMapRepo: bigMapRepo, + blockRepo: blockRepo, + contractRepo: contractRepo, + storage: storage, + rpc: rpc, + address: address, + network: network, + ptr: ptr, } } @@ -99,7 +100,7 @@ func (s *TezosStorage) fillFields(uri TezosStorageURI) error { return err } - bmPtr, err := storage.GetBigMapPtr(s.rpc, s.network, s.address, metadataAnnot, block.Protocol.Hash, s.sharePath, block.Level) + bmPtr, err := storage.GetBigMapPtr(s.storage, s.contractRepo, s.rpc, s.network, s.address, metadataAnnot, block.Protocol.Hash, block.Level) if err != nil { return err } diff --git a/internal/parsers/tzip/tokens/parser.go b/internal/parsers/tzip/tokens/parser.go index b8119ee89..476732224 100644 --- a/internal/parsers/tzip/tokens/parser.go +++ b/internal/parsers/tzip/tokens/parser.go @@ -9,6 +9,7 @@ import ( "github.com/baking-bad/bcdhub/internal/models" "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" "github.com/baking-bad/bcdhub/internal/models/block" + "github.com/baking-bad/bcdhub/internal/models/contract" "github.com/baking-bad/bcdhub/internal/models/domains" "github.com/baking-bad/bcdhub/internal/models/tokenmetadata" "github.com/baking-bad/bcdhub/internal/models/types" @@ -21,22 +22,23 @@ import ( // Parser - type Parser struct { - bmdRepo bigmapdiff.Repository - blocksRepo block.Repository - tmRepo tokenmetadata.Repository - storage models.GeneralRepository - - rpc noderpc.INode - sharePath string - network types.Network - ipfs []string + bmdRepo bigmapdiff.Repository + blocksRepo block.Repository + contractsRepo contract.Repository + tmRepo tokenmetadata.Repository + storage models.GeneralRepository + + rpc noderpc.INode + network types.Network + ipfs []string } // NewParser - -func NewParser(bmdRepo bigmapdiff.Repository, blocksRepo block.Repository, tmRepo tokenmetadata.Repository, storage models.GeneralRepository, rpc noderpc.INode, sharePath string, network types.Network, ipfs ...string) Parser { +func NewParser(bmdRepo bigmapdiff.Repository, blocksRepo block.Repository, contractsRepo contract.Repository, tmRepo tokenmetadata.Repository, storage models.GeneralRepository, rpc noderpc.INode, network types.Network, ipfs ...string) Parser { return Parser{ bmdRepo: bmdRepo, blocksRepo: blocksRepo, storage: storage, - rpc: rpc, sharePath: sharePath, network: network, ipfs: ipfs, tmRepo: tmRepo, + rpc: rpc, network: network, ipfs: ipfs, tmRepo: tmRepo, + contractsRepo: contractsRepo, } } @@ -79,14 +81,14 @@ func (t Parser) ParseBigMapDiff(bmd *domains.BigMapDiff, storageAST *ast.TypedAs } } case strings.HasPrefix(m.Link, "tezos-storage:"): - bmPtr, err := storage.GetBigMapPtr(t.rpc, t.network, bmd.Contract, "metadata", bmd.Protocol.Hash, t.sharePath, bmd.Level) + bmPtr, err := storage.GetBigMapPtr(t.storage, t.contractsRepo, t.rpc, t.network, bmd.Contract, "metadata", bmd.Protocol.Hash, bmd.Level) if err != nil { return nil, err } ptr = bmPtr } - s := tzipStorage.NewFull(t.bmdRepo, t.blocksRepo, t.storage, t.rpc, t.sharePath, t.ipfs...) + s := tzipStorage.NewFull(t.bmdRepo, t.contractsRepo, t.blocksRepo, t.storage, t.rpc, t.ipfs...) remoteMetadata := new(TokenMetadata) if err := s.Get(t.network, bmd.Contract, m.Link, ptr, remoteMetadata); err != nil { @@ -108,7 +110,7 @@ func (t Parser) ParseBigMapDiff(bmd *domains.BigMapDiff, storageAST *ast.TypedAs } func (t Parser) parse(address string, state block.Block) ([]tokenmetadata.TokenMetadata, error) { - ptr, err := storage.GetBigMapPtr(t.rpc, state.Network, address, TokenMetadataStorageKey, state.Protocol.Hash, t.sharePath, state.Level) + ptr, err := storage.GetBigMapPtr(t.storage, t.contractsRepo, t.rpc, state.Network, address, TokenMetadataStorageKey, state.Protocol.Hash, state.Level) if err != nil { return nil, err } @@ -142,7 +144,7 @@ func (t Parser) parse(address string, state block.Block) ([]tokenmetadata.TokenM result := make([]tokenmetadata.TokenMetadata, 0) for _, m := range metadata { if m.Link != "" { - s := tzipStorage.NewFull(t.bmdRepo, t.blocksRepo, t.storage, t.rpc, t.sharePath, t.ipfs...) + s := tzipStorage.NewFull(t.bmdRepo, t.contractsRepo, t.blocksRepo, t.storage, t.rpc, t.ipfs...) remoteMetadata := &TokenMetadata{} if err := s.Get(t.network, address, m.Link, ptr, remoteMetadata); err != nil { diff --git a/internal/parsers/tzip/tokens/parser_test.go b/internal/parsers/tzip/tokens/parser_test.go index d2abc825f..0567798de 100644 --- a/internal/parsers/tzip/tokens/parser_test.go +++ b/internal/parsers/tzip/tokens/parser_test.go @@ -1,11 +1,15 @@ package tokens import ( + "io/ioutil" "testing" "time" + "github.com/baking-bad/bcdhub/internal/bcd" "github.com/baking-bad/bcdhub/internal/bcd/ast" + "github.com/baking-bad/bcdhub/internal/bcd/consts" "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/contract" "github.com/baking-bad/bcdhub/internal/models/domains" "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/baking-bad/bcdhub/internal/models/protocol" @@ -17,6 +21,7 @@ import ( mock_general "github.com/baking-bad/bcdhub/internal/models/mock" mock_bmd "github.com/baking-bad/bcdhub/internal/models/mock/bigmapdiff" mock_block "github.com/baking-bad/bcdhub/internal/models/mock/block" + mock_contract "github.com/baking-bad/bcdhub/internal/models/mock/contract" mock_token_metadata "github.com/baking-bad/bcdhub/internal/models/mock/tokenmetadata" "github.com/golang/mock/gomock" ) @@ -40,6 +45,10 @@ func TestParser_ParseBigMapDiff(t *testing.T) { defer ctrlBlockRepo.Finish() blocksRepo := mock_block.NewMockRepository(ctrlBlockRepo) + ctrlContractRepo := gomock.NewController(t) + defer ctrlContractRepo.Finish() + contractsRepo := mock_contract.NewMockRepository(ctrlContractRepo) + ctrlTokenMetadataRepo := gomock.NewController(t) defer ctrlTokenMetadataRepo.Finish() tmRepo := mock_token_metadata.NewMockRepository(ctrlTokenMetadataRepo) @@ -73,9 +82,17 @@ func TestParser_ParseBigMapDiff(t *testing.T) { LastUpdateTime: timestamp, }, nil).AnyTimes() + contractsRepo.EXPECT().ScriptPart( + types.Granadanet, + "KT1QaDvkDe1sLXGL9rqmDMtNCmvNyPfUTYWK", + bcd.SymLinkBabylon, + consts.STORAGE, + ).Return( + []byte(`[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"nat","annots":["%MAX_SUPPLY"]},{"prim":"mutez","annots":["%PURCHASE_PRICE_MUTEZ"]}]},{"prim":"pair","args":[{"prim":"address","annots":["%administrator"]},{"prim":"pair","args":[{"prim":"nat","annots":["%all_tokens"]},{"prim":"big_map","args":[{"prim":"pair","args":[{"prim":"address"},{"prim":"nat"}]},{"prim":"nat"}],"annots":["%ledger"]}]}]}]},{"prim":"pair","args":[{"prim":"pair","args":[{"prim":"big_map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%metadata"]},{"prim":"nat","annots":["%next_id"]}]},{"prim":"pair","args":[{"prim":"big_map","args":[{"prim":"bytes"},{"prim":"unit"}],"annots":["%operators"]},{"prim":"pair","args":[{"prim":"bool","annots":["%paused"]},{"prim":"big_map","args":[{"prim":"nat"},{"prim":"pair","args":[{"prim":"nat","annots":["%token_id"]},{"prim":"map","args":[{"prim":"string"},{"prim":"bytes"}],"annots":["%token_info"]}]}],"annots":["%token_metadata"]}]}]}]}]}]`), nil, + ).AnyTimes() + tests := []struct { name string - sharePath string network types.Network bmd *domains.BigMapDiff storageAST string @@ -83,9 +100,8 @@ func TestParser_ParseBigMapDiff(t *testing.T) { wantErr bool }{ { - name: "Token metadata in tezos storage", - sharePath: "./test", - network: types.Granadanet, + name: "Token metadata in tezos storage", + network: types.Granadanet, bmd: &domains.BigMapDiff{ BigMapDiff: &bigmapdiff.BigMapDiff{ Ptr: 66051, @@ -152,15 +168,26 @@ func TestParser_ParseBigMapDiff(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { parser := Parser{ - bmdRepo: bmdRepo, - blocksRepo: blocksRepo, - tmRepo: tmRepo, - storage: generalRepo, - rpc: rpc, - network: tt.network, - sharePath: tt.sharePath, + bmdRepo: bmdRepo, + blocksRepo: blocksRepo, + tmRepo: tmRepo, + contractsRepo: contractsRepo, + storage: generalRepo, + rpc: rpc, + network: tt.network, } + generalRepo. + EXPECT(). + GetByID(gomock.Any()). + DoAndReturn(func(output interface{}) error { + typ := output.(*contract.Script) + data, err := ioutil.ReadFile("./test/contracts/granadanet/KT1QaDvkDe1sLXGL9rqmDMtNCmvNyPfUTYWK_babylon.json") + typ.Code = data + return err + }). + AnyTimes() + storageAST, err := ast.NewTypedAstFromString(tt.storageAST) if err != nil { t.Errorf("NewTypedAstFromString() error = %v", err) diff --git a/internal/postgres/bigmapdiff/storage.go b/internal/postgres/bigmapdiff/storage.go index 69bdf15aa..5f8d997ba 100644 --- a/internal/postgres/bigmapdiff/storage.go +++ b/internal/postgres/bigmapdiff/storage.go @@ -114,7 +114,7 @@ func (storage *Storage) Previous(filters []bigmapdiff.BigMapDiff) (response []bi } // GetForOperation - -func (storage *Storage) GetForOperation(id int64) (response []*bigmapdiff.BigMapDiff, err error) { +func (storage *Storage) GetForOperation(id int64) (response []bigmapdiff.BigMapDiff, err error) { err = storage.DB.Model().Table(models.DocBigMapDiff). Where("operation_id = ?", id).Select(&response) return diff --git a/internal/postgres/block/storage.go b/internal/postgres/block/storage.go index 2f7d3b523..33fb8ed64 100644 --- a/internal/postgres/block/storage.go +++ b/internal/postgres/block/storage.go @@ -33,7 +33,7 @@ func (storage *Storage) Last(network types.Network) (block block.Block, err erro Where("block.network = ?", network). Order("id desc"). Limit(1). - Relation("Protocol.id"). + Relation("Protocol"). Select() if storage.IsRecordNotFound(err) { err = nil diff --git a/internal/postgres/contract/storage.go b/internal/postgres/contract/storage.go index f247bd5dc..0ed0b919d 100644 --- a/internal/postgres/contract/storage.go +++ b/internal/postgres/contract/storage.go @@ -1,13 +1,12 @@ package contract import ( - "encoding/hex" "math/rand" + "github.com/baking-bad/bcdhub/internal/bcd" "github.com/baking-bad/bcdhub/internal/models" "github.com/baking-bad/bcdhub/internal/models/contract" "github.com/baking-bad/bcdhub/internal/models/types" - "github.com/baking-bad/bcdhub/internal/postgres/consts" "github.com/baking-bad/bcdhub/internal/postgres/core" "github.com/go-pg/pg/v10" "github.com/go-pg/pg/v10/orm" @@ -26,9 +25,7 @@ func NewStorage(pg *core.Postgres) *Storage { // Get - func (storage *Storage) Get(network types.Network, address string) (response contract.Contract, err error) { - query := storage.DB.Model(&response) - core.NetworkAndAddress(network, address)(query) - err = query.First() + err = storage.DB.Model(&response).Where("network = ?", network).Where("address = ?", address).Limit(1).Select() return } @@ -80,47 +77,14 @@ func (storage *Storage) GetByAddresses(addresses []contract.Address) (response [ return } -// GetProjectsLastContract - -func (storage *Storage) GetProjectsLastContract(c contract.Contract, size, offset int64) (response []*contract.Contract, err error) { - if c.FingerprintCode == nil || c.FingerprintParameter == nil || c.FingerprintStorage == nil { - return nil, nil - } - - code := hex.EncodeToString(c.FingerprintCode) - params := hex.EncodeToString(c.FingerprintParameter) - s := hex.EncodeToString(c.FingerprintStorage) - - limit := storage.GetPageSize(size) - - query := storage.DB.Model().Table(models.DocContracts). - ColumnExpr("MAX(id) as id"). - Where("project_id is not null"). - Where("encode(fingerprint_code, 'hex') = ?", code). - Where("encode(fingerprint_parameter, 'hex') = ?", params). - Where("encode(fingerprint_storage, 'hex') = ?", s) - - if c.Manager.Valid { - query.WhereOr("manager = ?", c.Manager.String()) - } - - query.Group("project_id"). - Limit(limit). - Offset(int(offset)). - Order("id desc") - - err = storage.DB.Model().Table(models.DocContracts).Where("id IN (?)", query).Select(&response) - return -} - // GetSameContracts - func (storage *Storage) GetSameContracts(c contract.Contract, manager string, size, offset int64) (pcr contract.SameResponse, err error) { - if c.FingerprintCode == nil || c.FingerprintParameter == nil || c.FingerprintStorage == nil { - return pcr, errors.Wrap(consts.ErrInvalidFingerprint, c.Address) - } - limit := storage.GetPageSize(size) - query := storage.DB.Model().Table(models.DocContracts).Where("hash = ?", c.Hash).Where("address != ?", c.Address) + query := storage.DB.Model().Table(models.DocContracts). + Where("alpha_id = ?", c.AlphaID). + Where("babylon_id = ?", c.BabylonID). + Where("address != ?", c.Address) if manager != "" { query.Where("manager = ?", manager) } @@ -129,7 +93,10 @@ func (storage *Storage) GetSameContracts(c contract.Contract, manager string, si return } - countQuery := storage.DB.Model().Table(models.DocContracts).Where("hash = ?", c.Hash).Where("address != ?", c.Address) + countQuery := storage.DB.Model().Table(models.DocContracts). + Where("alpha_id = ?", c.AlphaID). + Where("babylon_id = ?", c.BabylonID). + Where("address != ?", c.Address) if manager != "" { countQuery.Where("manager = ?", manager) } @@ -143,35 +110,33 @@ func (storage *Storage) GetSameContracts(c contract.Contract, manager string, si // GetSimilarContracts - func (storage *Storage) GetSimilarContracts(c contract.Contract, size, offset int64) ([]contract.Similar, int, error) { - if !c.ProjectID.Valid { - return nil, 0, nil + scriptID := c.AlphaID + if c.BabylonID > 0 { + scriptID = c.BabylonID } - limit := storage.GetPageSize(size) + scriptsQuery := storage.DB.Model(). + Table(models.DocScripts).Column("id"). + Where("project_id = ?", + storage.DB.Model().Table(models.DocScripts).Column("project_id").Where("id = ?", scriptID).Limit(1), + ) - subQuery := storage.DB.Model((*contract.Contract)(nil)). - ColumnExpr("MAX(id) as id"). - Where("project_id = ?", c.ProjectID). - Where("hash != ?", c.Hash). - Group("hash") + limit := storage.GetPageSize(size) var contracts []contract.Contract - if err := storage.DB.Model((*contract.Contract)(nil)). - Where("id IN (?)", subQuery). + if err := storage.DB.Model(&contracts). + Where("(alpha_id IN (?0)) OR (babylon_id IN (?0))", scriptsQuery). Order("last_action desc"). Limit(limit). Offset(int(offset)). - Select(&contracts); err != nil { + Select(); err != nil { return nil, 0, err } - var count int - if err := storage.DB.Model((*contract.Contract)(nil)). - ColumnExpr("count(distinct hash)"). - Where("project_id = ?", c.ProjectID). - Where("hash != ?", c.Hash). - Group("hash"). - Select(&count); err != nil { + count, err := storage.DB.Model((*contract.Contract)(nil)). + Where("(alpha_id IN (?0)) OR (babylon_id IN (?0))", scriptsQuery). + Count() + if err != nil { return nil, 0, err } @@ -210,11 +175,11 @@ func (storage *Storage) GetTokens(network types.Network, tokenInterface string, // Stats - func (storage *Storage) Stats(c contract.Contract) (stats contract.Stats, err error) { - if !c.ProjectID.Valid { - return - } sameCount, err := storage.DB.Model().Table(models.DocContracts). - Where("hash = ?", c.Hash). + WhereOrGroup(func(q *orm.Query) (*orm.Query, error) { + q.WhereOr("alpha_id = ?", c.AlphaID).WhereOr("babylon_id = ?", c.BabylonID) + return q, err + }). Where("address != ?", c.Address). Count() if err != nil { @@ -222,23 +187,124 @@ func (storage *Storage) Stats(c contract.Contract) (stats contract.Stats, err er } stats.SameCount = int64(sameCount) - if err = storage.DB.Model((*contract.Contract)(nil)). - ColumnExpr("count(distinct hash)"). - Where("project_id = ?", c.ProjectID). - Where("hash != ?", c.Hash). - Group("hash"). - Select(&stats.SimilarCount); err != nil { + scriptID := c.AlphaID + if c.BabylonID > 0 { + scriptID = c.BabylonID + } + scriptsQuery := storage.DB.Model(). + Table(models.DocScripts).Column("id"). + Where("project_id = ?", + storage.DB.Model().Table(models.DocScripts).Column("project_id").Where("id = ?", scriptID).Limit(1), + ) + + similarCount, err := storage.DB.Model((*contract.Contract)(nil)). + Where("(alpha_id IN (?0)) OR (babylon_id IN (?0))", scriptsQuery). + Count() + if err != nil { return } - + stats.SimilarCount = int64(similarCount) return } // GetProjectIDByHash - func (storage *Storage) GetProjectIDByHash(hash string) (result string, err error) { - err = storage.DB.Model().Table(models.DocContracts).Column("project_id").Where("hash = ?", hash).Where("project_id is not null").Limit(1).Select(&result) + err = storage.DB.Model(&contract.Contract{}).Relation("Alpha.id").Relation("Babylon.id").Column("project_id").Where("babylon.hash = ?0 OR alpha.hash = ?0", hash).Where("project_id is not null").Limit(1).Select(&result) if errors.Is(err, pg.ErrNoRows) { return "", nil } return } + +// ByHash - +func (storage *Storage) ByHash(hash string) (result contract.Script, err error) { + err = storage.DB.Model(&result).Where("hash = ?", hash).First() + return +} + +// Script - +func (storage *Storage) Script(network types.Network, address string, symLink string) (contract.Script, error) { + var c contract.Contract + query := storage.DB.Model(&c). + Where("network = ?", network). + Where("address = ?", address) + switch symLink { + case bcd.SymLinkAlpha: + err := query.Relation("Alpha").Select() + return c.Alpha, err + case bcd.SymLinkBabylon: + err := query.Relation("Babylon").Select() + return c.Babylon, err + } + return c.Alpha, errors.Errorf("unknown protocol symbolic link: %s", symLink) +} + +// GetScripts - +func (storage *Storage) GetScripts(limit, offset int) (scripts []contract.Script, err error) { + err = storage.DB.Model(&scripts).Limit(limit).Offset(offset).Order("id asc").Select() + return +} + +// UpdateProjectID - +func (storage *Storage) UpdateProjectID(scripts []contract.Script) error { + _, err := storage.DB.Model(&scripts).Set("project_id = _data.project_id").WherePK().Update() + return err +} + +// Code - +func (storage *Storage) Code(id int64) ([]byte, error) { + var data []byte + err := storage.DB.Model((*contract.Script)(nil)).Where("id = ?", id).Column("code").Select(&data) + return data, err +} + +// Parameter - +func (storage *Storage) Parameter(id int64) ([]byte, error) { + var data []byte + err := storage.DB.Model((*contract.Script)(nil)).Where("id = ?", id).Column("parameter").Select(&data) + return data, err +} + +// Storage - +func (storage *Storage) Storage(id int64) ([]byte, error) { + var data []byte + err := storage.DB.Model((*contract.Script)(nil)).Where("id = ?", id).Column("storage").Select(&data) + return data, err +} + +// ScriptPart - +func (storage *Storage) ScriptPart(network types.Network, address string, symLink, part string) ([]byte, error) { + query := storage.DB.Model((*contract.Contract)(nil)). + Where("network = ?", network). + Where("address = ?", address) + + switch symLink { + case "alpha": + switch part { + case "parameter": + query.Column("_").Relation("Alpha.parameter") + case "code": + query.Column("_").Relation("Alpha.code") + case "storage": + query.Column("_").Relation("Alpha.storage") + default: + return nil, errors.Errorf("unknown script part name: %s", part) + } + case "babylon": + switch part { + case "parameter": + query.Column("_").Relation("Babylon.parameter") + case "code": + query.Column("_").Relation("Babylon.code") + case "storage": + query.Column("_").Relation("Babylon.storage") + default: + return nil, errors.Errorf("unknown script part name: %s", part) + } + default: + return nil, errors.Errorf("unknown protocol symbolic link: %s", symLink) + } + var data []byte + err := query.Select(pg.Scan(&data)) + return data, err +} diff --git a/internal/postgres/tzip/storage.go b/internal/postgres/tzip/storage.go index 4cbfb4634..a7b029d98 100644 --- a/internal/postgres/tzip/storage.go +++ b/internal/postgres/tzip/storage.go @@ -1,7 +1,6 @@ package tzip import ( - "github.com/baking-bad/bcdhub/internal/models" "github.com/baking-bad/bcdhub/internal/models/types" "github.com/baking-bad/bcdhub/internal/models/tzip" "github.com/baking-bad/bcdhub/internal/postgres/core" @@ -45,8 +44,7 @@ func (storage *Storage) GetAliases(network types.Network) (t []tzip.TZIP, err er // GetWithEvents - func (storage *Storage) GetWithEvents(updatedAt uint64) ([]tzip.TZIP, error) { - query := storage.DB.Model(). - Table(models.DocTZIP). + query := storage.DB.Model((*tzip.TZIP)(nil)). Where("events is not null AND jsonb_array_length(events) > 0") if updatedAt > 0 { @@ -59,3 +57,11 @@ func (storage *Storage) GetWithEvents(updatedAt uint64) ([]tzip.TZIP, error) { } return t, nil } + +// Events - +func (storage *Storage) Events(network types.Network, address string) (events tzip.Events, err error) { + err = storage.DB.Model((*tzip.TZIP)(nil)).Column("events"). + Where("network = ?", network).Where("address = ?", address). + Limit(1).Select(&events) + return +} diff --git a/internal/search/contract.go b/internal/search/contract.go index 004ad1153..7dc9f7699 100644 --- a/internal/search/contract.go +++ b/internal/search/contract.go @@ -89,17 +89,22 @@ func (c *Contract) Prepare(model models.Model) { return } + script := cont.Alpha + if cont.BabylonID > 0 { + script = cont.Babylon + } + c.Address = cont.Address - c.Annotations = cont.Annotations + c.Annotations = script.Annotations c.Delegate = cont.Delegate.String() - c.Entrypoints = cont.Entrypoints - c.FailStrings = cont.FailStrings - c.Hardcoded = cont.Hardcoded - c.Hash = cont.Hash + c.Entrypoints = script.Entrypoints + c.FailStrings = script.FailStrings + c.Hardcoded = script.Hardcoded + c.Hash = script.Hash c.Level = cont.Level c.Manager = cont.Manager.String() c.Network = cont.Network.String() - c.ProjectID = cont.ProjectID.String() + c.ProjectID = script.ProjectID.String() c.Tags = cont.Tags.ToArray() c.Timestamp = cont.Timestamp.UTC() } diff --git a/scripts/api_tester/contract.go b/scripts/api_tester/contract.go index 5c5820c16..eb6d47b1b 100644 --- a/scripts/api_tester/contract.go +++ b/scripts/api_tester/contract.go @@ -122,11 +122,6 @@ func testContract(tasks chan contract.Contract, stop chan struct{}, counter *int if err := request(fmt.Sprintf("%s/entrypoints", prefix)); err != nil { logger.Err(err) } - for i := range contract.Entrypoints { - if err := request(fmt.Sprintf("%s/entrypoints/schema?entrypoint=%s", prefix, contract.Entrypoints[i])); err != nil { - logger.Err(err) - } - } atomic.AddInt64(counter, 1) } diff --git a/scripts/api_tester/main.go b/scripts/api_tester/main.go index 4af95e3df..ee8b1637b 100644 --- a/scripts/api_tester/main.go +++ b/scripts/api_tester/main.go @@ -24,7 +24,6 @@ func main() { ctx := config.NewContext( config.WithStorage(cfg.Storage, "api_tester", 0, cfg.Scripts.Connections.Open, cfg.Scripts.Connections.Idle), config.WithRPC(cfg.RPC), - config.WithShare(cfg.SharePath), config.WithSearch(cfg.Storage), config.WithLoadErrorDescriptions(), config.WithConfigCopy(cfg), diff --git a/scripts/bcdctl/main.go b/scripts/bcdctl/main.go index 841ec7754..42df375a9 100644 --- a/scripts/bcdctl/main.go +++ b/scripts/bcdctl/main.go @@ -34,7 +34,6 @@ func main() { config.WithStorage(cfg.Storage, "bcdctl", 0, cfg.Scripts.Connections.Open, cfg.Scripts.Connections.Idle), config.WithConfigCopy(cfg), config.WithRPC(cfg.RPC), - config.WithShare(cfg.SharePath), config.WithSearch(cfg.Storage), ) defer ctx.Close() diff --git a/scripts/migration/main.go b/scripts/migration/main.go index 665a51117..a93ef3d5b 100644 --- a/scripts/migration/main.go +++ b/scripts/migration/main.go @@ -26,7 +26,6 @@ var migrationsList = []migrations.Migration{ &migrations.TokenMetadataUnknown{}, &migrations.DefaultEntrypoint{}, &migrations.FixLostSearchContracts{}, - &migrations.NullableProjectID{}, } func main() { @@ -45,7 +44,6 @@ func main() { start := time.Now() ctx := config.NewContext( - config.WithShare(cfg.SharePath), config.WithStorage(cfg.Storage, "migrations", 0, cfg.Scripts.Connections.Open, cfg.Scripts.Connections.Idle), config.WithRPC(cfg.RPC), config.WithConfigCopy(cfg), diff --git a/scripts/migration/migrations/create_transfers.go b/scripts/migration/migrations/create_transfers.go index f90c848a4..e1f75b1b4 100644 --- a/scripts/migration/migrations/create_transfers.go +++ b/scripts/migration/migrations/create_transfers.go @@ -4,7 +4,6 @@ import ( "context" "github.com/baking-bad/bcdhub/internal/config" - "github.com/baking-bad/bcdhub/internal/fetch" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/models" "github.com/baking-bad/bcdhub/internal/models/operation" @@ -61,7 +60,6 @@ func (m *CreateTransfersTags) Do(ctx *config.Context) error { } parser, err := transferParsers.NewParser(rpc, ctx.TZIP, ctx.Blocks, ctx.TokenBalances, - ctx.SharePath, transferParsers.WithNetwork(operations[i].Network), transferParsers.WithGasLimit(protocol.Constants.HardGasLimitPerOperation), transferParsers.WithoutViews(), @@ -69,11 +67,15 @@ func (m *CreateTransfersTags) Do(ctx *config.Context) error { if err != nil { return err } - proto, err := ctx.CachedProtocolByID(operations[i].Network, operations[i].ProtocolID) + proto, err := ctx.Cache.ProtocolByID(operations[i].Network, operations[i].ProtocolID) if err != nil { return err } - operations[i].Script, err = fetch.ContractBySymLink(operations[i].Network, operations[i].Destination, proto.SymLink, ctx.SharePath) + script, err := ctx.Contracts.Script(operations[i].Network, operations[i].Destination, proto.SymLink) + if err != nil { + return err + } + operations[i].Script, err = script.Full() if err != nil { return err } diff --git a/scripts/migration/migrations/create_tzip.go b/scripts/migration/migrations/create_tzip.go index 6633fe68e..ba86a05c4 100644 --- a/scripts/migration/migrations/create_tzip.go +++ b/scripts/migration/migrations/create_tzip.go @@ -52,9 +52,8 @@ func (m *CreateTZIP) Do(ctx *config.Context) error { if err != nil { return err } - parser := tzipParsers.NewParser(ctx.BigMapDiffs, ctx.Blocks, ctx.Storage, rpc, tzipParsers.ParserConfig{ + parser := tzipParsers.NewParser(ctx.BigMapDiffs, ctx.Blocks, ctx.Contracts, ctx.Storage, rpc, tzipParsers.ParserConfig{ IPFSGateways: ctx.Config.IPFSGateways, - SharePath: ctx.SharePath, }) t, err := parser.Parse(tzipParsers.ParseContext{ diff --git a/scripts/migration/migrations/extended_storage_events.go b/scripts/migration/migrations/extended_storage_events.go index a2079ad1d..24816568e 100644 --- a/scripts/migration/migrations/extended_storage_events.go +++ b/scripts/migration/migrations/extended_storage_events.go @@ -6,9 +6,9 @@ import ( "github.com/baking-bad/bcdhub/internal/bcd/ast" "github.com/baking-bad/bcdhub/internal/config" - "github.com/baking-bad/bcdhub/internal/fetch" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/baking-bad/bcdhub/internal/models/transfer" "github.com/baking-bad/bcdhub/internal/models/types" @@ -73,18 +73,20 @@ func (m *ExtendedStorageEvents) Do(ctx *config.Context) error { continue } - script, err := fetch.ContractBySymLink(tzips[i].Network, tzips[i].Address, protocol.SymLink, ctx.SharePath) + script, err := ctx.Contracts.Script(tzips[i].Network, tzips[i].Address, protocol.SymLink) if err != nil { return err } for _, op := range operations { - op.Script = script - tree, err := ast.NewScriptWithoutCode(script) + op.Script, err = script.Full() + if err != nil { + return err + } + op.AST, err = ast.NewScriptWithoutCode(op.Script) if err != nil { return err } - op.AST = tree st := stacktrace.New() if err := st.Fill(ctx.Operations, op); err != nil { @@ -92,7 +94,6 @@ func (m *ExtendedStorageEvents) Do(ctx *config.Context) error { } parser, err := transferParsers.NewParser(rpc, ctx.TZIP, ctx.Blocks, ctx.TokenBalances, - ctx.SharePath, transferParsers.WithNetwork(tzips[i].Network), transferParsers.WithGasLimit(protocol.Constants.HardGasLimitPerOperation), transferParsers.WithStackTrace(st), @@ -107,11 +108,17 @@ func (m *ExtendedStorageEvents) Do(ctx *config.Context) error { return err } } - proto, err := ctx.CachedProtocolByID(operations[i].Network, operations[i].ProtocolID) + proto, err := ctx.Cache.ProtocolByID(operations[i].Network, operations[i].ProtocolID) if err != nil { return err } - if err := parser.Parse(bmd, proto.Hash, &op); err != nil { + + ptrsBmd := make([]*bigmapdiff.BigMapDiff, len(bmd)) + for i := range bmd { + ptrsBmd[i] = &bmd[i] + } + + if err := parser.Parse(ptrsBmd, proto.Hash, &op); err != nil { if errors.Is(err, noderpc.InvalidNodeResponse{}) { logger.Err(err) continue diff --git a/scripts/migration/migrations/lost_search_contracts.go b/scripts/migration/migrations/lost_search_contracts.go index 2b6e2ccb3..01c57df0a 100644 --- a/scripts/migration/migrations/lost_search_contracts.go +++ b/scripts/migration/migrations/lost_search_contracts.go @@ -65,8 +65,8 @@ func (m *FixLostSearchContracts) saveSearchModels(ctx *config.Context, contracts for i := range data { if typ, ok := data[i].(*search.Contract); ok { - typ.Alias = ctx.CachedAlias(types.NewNetwork(typ.Network), typ.Address) - typ.DelegateAlias = ctx.CachedAlias(types.NewNetwork(typ.Network), typ.Delegate) + typ.Alias = ctx.Cache.Alias(types.NewNetwork(typ.Network), typ.Address) + typ.DelegateAlias = ctx.Cache.Alias(types.NewNetwork(typ.Network), typ.Delegate) } } diff --git a/scripts/migration/migrations/parameter_events.go b/scripts/migration/migrations/parameter_events.go index 639bfe2f1..9fed4ca58 100644 --- a/scripts/migration/migrations/parameter_events.go +++ b/scripts/migration/migrations/parameter_events.go @@ -6,7 +6,6 @@ import ( "github.com/baking-bad/bcdhub/internal/bcd/ast" "github.com/baking-bad/bcdhub/internal/config" - "github.com/baking-bad/bcdhub/internal/fetch" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/models" "github.com/baking-bad/bcdhub/internal/models/operation" @@ -71,7 +70,7 @@ func (m *ParameterEvents) Do(ctx *config.Context) error { continue } - script, err := fetch.ContractBySymLink(tzips[i].Network, tzips[i].Address, protocol.SymLink, ctx.SharePath) + script, err := ctx.Contracts.Script(tzips[i].Network, tzips[i].Address, protocol.SymLink) if err != nil { return err } @@ -85,12 +84,14 @@ func (m *ParameterEvents) Do(ctx *config.Context) error { if err := bar.Add(1); err != nil { return err } - op.Script = script - tree, err := ast.NewScriptWithoutCode(script) + op.Script, err = script.Full() + if err != nil { + return err + } + op.AST, err = ast.NewScriptWithoutCode(op.Script) if err != nil { return err } - op.AST = tree st := stacktrace.New() if err := st.Fill(ctx.Operations, op); err != nil { @@ -98,7 +99,6 @@ func (m *ParameterEvents) Do(ctx *config.Context) error { } parser, err := transferParser.NewParser(rpc, ctx.TZIP, ctx.Blocks, ctx.TokenBalances, - ctx.SharePath, transferParser.WithNetwork(tzips[i].Network), transferParser.WithGasLimit(protocol.Constants.HardGasLimitPerOperation), transferParser.WithStackTrace(st), @@ -107,7 +107,7 @@ func (m *ParameterEvents) Do(ctx *config.Context) error { return err } - proto, err := ctx.CachedProtocolByID(operations[i].Network, operations[i].ProtocolID) + proto, err := ctx.Cache.ProtocolByID(operations[i].Network, operations[i].ProtocolID) if err != nil { return err } diff --git a/scripts/migration/migrations/project_id_as_nullable.go b/scripts/migration/migrations/project_id_as_nullable.go deleted file mode 100644 index 6324c786f..000000000 --- a/scripts/migration/migrations/project_id_as_nullable.go +++ /dev/null @@ -1,69 +0,0 @@ -package migrations - -import ( - "context" - "fmt" - - "github.com/baking-bad/bcdhub/internal/config" - "github.com/baking-bad/bcdhub/internal/logger" - "github.com/baking-bad/bcdhub/internal/models/contract" - "github.com/go-pg/pg/v10" -) - -// NullableProjectID - -type NullableProjectID struct { - limit int -} - -// NewNullableFields - -func NewNullableProjectID(limit int) *NullableProjectID { - return &NullableProjectID{limit} -} - -// Key - -func (m *NullableProjectID) Key() string { - return "nullable_project_id" -} - -// Description - -func (m *NullableProjectID) Description() string { - return "set `nullable_project_id` field of `contract` model to nullable type" -} - -// Do - migrate function -func (m *NullableProjectID) Do(ctx *config.Context) error { - if m.limit == 0 { - m.limit = 10000 - } - return ctx.StorageDB.DB.RunInTransaction(context.Background(), m.migrateContracts) -} - -func (m *NullableProjectID) migrateContracts(tx *pg.Tx) error { - logger.Info().Msg("processing contracts...") - - var end bool - var offset int - for !end { - var contracts []contract.Contract - if err := tx.Model(&contract.Contract{}). - Where("project_id = ''"). - Limit(m.limit). - Offset(offset). - Select(&contracts); err != nil { - return err - } - - for i := range contracts { - contracts[i].ProjectID.Valid = false - if _, err := tx.Model(&contracts[i]).WherePK().Update(); err != nil { - return err - } - } - - offset += len(contracts) - end = len(contracts) < m.limit - fmt.Printf("processed %d", offset) - } - - return nil -} diff --git a/scripts/node_cache/main.go b/scripts/node_cache/main.go new file mode 100644 index 000000000..f59a3c9cb --- /dev/null +++ b/scripts/node_cache/main.go @@ -0,0 +1,134 @@ +package main + +import ( + "context" + "flag" + "net/http" + "os" + "path" + "path/filepath" + "strconv" + "sync" + "time" + + "github.com/baking-bad/bcdhub/internal/config" + "github.com/baking-bad/bcdhub/internal/logger" + "github.com/pkg/errors" + "github.com/schollz/progressbar/v3" +) + +func main() { + var network string + flag.StringVar(&network, "n", "mainnet", "network name") + + var startLevel int + flag.IntVar(&startLevel, "l", 1, "start level") + + cfg, err := config.LoadDefaultConfig() + if err != nil { + logger.Err(err) + return + } + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + workers := make(chan struct{}, 50) + var wg sync.WaitGroup + + dir := path.Join(cfg.SharePath, "node_cache", network) + + if _, err := os.Stat(dir); os.IsNotExist(err) { + if err := os.MkdirAll(dir, os.ModePerm); err != nil { + logger.Err(err) + return + } + } + + rpcConfig, ok := cfg.RPC[network] + if !ok { + logger.Error().Msgf("unknwon RPC: %s", network) + return + } + bar := progressbar.NewOptions(1969278, progressbar.OptionSetPredictTime(false), progressbar.OptionClearOnFinish(), progressbar.OptionShowCount()) + for level := int64(1); level < 1969278; level++ { + if err := bar.Add(1); err != nil { + logger.Err(err) + return + } + + sLevel := strconv.FormatInt(level, 10) + levelDir := path.Join(dir, sLevel) + + if _, err := os.Stat(levelDir); os.IsNotExist(err) { + if err := os.MkdirAll(levelDir, os.ModePerm); err != nil { + logger.Err(err) + return + } + } else { + continue + } + + workers <- struct{}{} + wg.Add(1) + go cache(ctx, rpcConfig, levelDir, network, sLevel, workers, &wg) + } +} + +func cache(ctx context.Context, rpcConfig config.RPCConfig, dir, network, level string, workers chan struct{}, wg *sync.WaitGroup) { + defer func() { + wg.Done() + <-workers + }() + + getHeader(ctx, rpcConfig, dir, network, level) + getOperations(ctx, rpcConfig, dir, network, level) +} + +func getHeader(ctx context.Context, rpcConfig config.RPCConfig, dir, network, level string) { + urlHeader := rpcConfig.URI + path.Join("/chains/main/blocks", level, "header") + fileHeader := filepath.Join(dir, "header.json") + ctxHeader, cancelHeader := context.WithTimeout(ctx, time.Second*time.Duration(rpcConfig.Timeout)) + defer cancelHeader() + if err := getAndSave(ctxHeader, urlHeader, fileHeader); err != nil { + logger.Error().Err(err).Str("network", network).Msg("get header") + return + } +} + +func getOperations(ctx context.Context, rpcConfig config.RPCConfig, dir, network, level string) { + urlOperations := rpcConfig.URI + path.Join("/chains/main/blocks", level, "operations/3") + fileOperations := filepath.Join(dir, "operations.json") + ctxOperations, cancelOperations := context.WithTimeout(ctx, time.Second*time.Duration(rpcConfig.Timeout)) + defer cancelOperations() + if err := getAndSave(ctxOperations, urlOperations, fileOperations); err != nil { + logger.Error().Err(err).Str("network", network).Msg("get operations") + return + } +} + +func getAndSave(ctx context.Context, url, filename string) error { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return errors.Errorf("makeGetRequest.NewRequest: %v", err) + } + + res, err := http.DefaultClient.Do(req) + if err != nil { + return errors.Errorf("http.DefaultClient.Do: %v", err) + } + defer res.Body.Close() + + f, err := os.Create(filename) + if err != nil { + return err + } + defer f.Close() + + _, err = f.ReadFrom(res.Body) + if err != nil { + return err + } + + return f.Sync() +}