diff --git a/.github/workflows/tests_master.yml b/.github/workflows/tests_master.yml index 9f88500f8..3742e9577 100644 --- a/.github/workflows/tests_master.yml +++ b/.github/workflows/tests_master.yml @@ -12,7 +12,7 @@ jobs: - name: golangci-lint uses: golangci/golangci-lint-action@v2.3.0 with: - version: v1.29 + version: v1.34 args: --timeout=2m test: runs-on: ubuntu-latest diff --git a/.github/workflows/tests_pr.yml b/.github/workflows/tests_pr.yml index 89adc6963..8fffef34d 100644 --- a/.github/workflows/tests_pr.yml +++ b/.github/workflows/tests_pr.yml @@ -13,7 +13,7 @@ jobs: - name: golangci-lint uses: golangci/golangci-lint-action@v2.3.0 with: - version: v1.29 + version: v1.34 args: --timeout=2m test: runs-on: ubuntu-latest diff --git a/cmd/api/docs/docs.go b/cmd/api/docs/docs.go index 5224cb2c8..a59a0704c 100644 --- a/cmd/api/docs/docs.go +++ b/cmd/api/docs/docs.go @@ -123,7 +123,7 @@ var doc = `{ "200": { "description": "OK", "schema": { - "$ref": "#/definitions/models.TZIP" + "$ref": "#/definitions/tzip.TZIP" } }, "204": { @@ -1769,7 +1769,7 @@ var doc = `{ "200": { "description": "OK", "schema": { - "$ref": "#/definitions/models.TezosDomain" + "$ref": "#/definitions/tezosdomain.TezosDomain" } }, "204": { @@ -1996,7 +1996,7 @@ var doc = `{ "200": { "description": "OK", "schema": { - "$ref": "#/definitions/search.Result" + "$ref": "#/definitions/models.Result" } }, "400": { @@ -2186,7 +2186,7 @@ var doc = `{ "200": { "description": "OK", "schema": { - "$ref": "#/definitions/elastic.DAppStats" + "$ref": "#/definitions/operation.DAppStats" } }, "400": { @@ -2515,7 +2515,7 @@ var doc = `{ "200": { "description": "OK", "schema": { - "$ref": "#/definitions/elastic.TransfersResponse" + "$ref": "#/definitions/transfer.Pageable" } }, "400": { @@ -2707,37 +2707,6 @@ var doc = `{ } } }, - "elastic.DAppStats": { - "type": "object", - "properties": { - "txs": { - "type": "integer" - }, - "users": { - "type": "integer" - }, - "volume": { - "type": "integer" - } - } - }, - "elastic.TransfersResponse": { - "type": "object", - "properties": { - "last_id": { - "type": "string" - }, - "total": { - "type": "integer" - }, - "transfers": { - "type": "array", - "items": { - "$ref": "#/definitions/models.Transfer" - } - } - } - }, "formatter.DiffResult": { "type": "object", "properties": { @@ -3126,7 +3095,7 @@ var doc = `{ "domains": { "type": "array", "items": { - "$ref": "#/definitions/models.TezosDomain" + "$ref": "#/definitions/tezosdomain.TezosDomain" } }, "total": { @@ -3663,6 +3632,9 @@ var doc = `{ }, "transfered": { "type": "number" + }, + "volume_24_hours": { + "type": "number" } } }, @@ -3699,6 +3671,9 @@ var doc = `{ }, "token_id": { "type": "integer" + }, + "volume_24_hours": { + "type": "number" } } }, @@ -3779,6 +3754,9 @@ var doc = `{ }, "token_id": { "type": "integer" + }, + "volume_24_hours": { + "type": "number" } } }, @@ -3880,83 +3858,129 @@ var doc = `{ "type": "object", "additionalProperties": true }, - "models.ReverseTezosDomain": { + "models.Group": { "type": "object", "properties": { - "expiration": { - "type": "string" + "count": { + "type": "integer" }, - "name": { - "type": "string" + "top": { + "type": "array", + "items": { + "$ref": "#/definitions/models.Top" + } } } }, - "models.TZIP": { + "models.Item": { "type": "object", "properties": { - "address": { - "type": "string" + "body": { + "type": "object" }, - "authors": { - "type": "array", - "items": { - "type": "string" - } + "group": { + "$ref": "#/definitions/models.Group" }, - "dapps": { - "type": "array", - "items": { - "$ref": "#/definitions/tzip.DApp" + "highlights": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + } } }, - "description": { + "type": { "type": "string" }, - "domain": { - "$ref": "#/definitions/models.ReverseTezosDomain" + "value": { + "type": "string" + } + } + }, + "models.Result": { + "type": "object", + "properties": { + "count": { + "type": "integer" }, - "events": { + "items": { "type": "array", "items": { - "$ref": "#/definitions/tzip.Event" + "$ref": "#/definitions/models.Item" } }, - "homepage": { + "time": { + "type": "integer" + } + } + }, + "models.Top": { + "type": "object", + "properties": { + "key": { "type": "string" }, - "interfaces": { + "network": { + "type": "string" + } + } + }, + "newmiguel.Node": { + "type": "object", + "properties": { + "children": { "type": "array", "items": { - "type": "string" + "$ref": "#/definitions/newmiguel.Node" } }, - "level": { - "type": "integer" + "diff_type": { + "type": "string" }, - "license": { - "$ref": "#/definitions/tzip.License" + "from": { + "type": "object" }, "name": { "type": "string" }, - "network": { + "prim": { "type": "string" }, - "slug": { + "type": { "type": "string" }, - "timestamp": { - "type": "string" + "value": { + "type": "object" + } + } + }, + "operation.DAppStats": { + "type": "object", + "properties": { + "txs": { + "type": "integer" }, - "tokens": { - "$ref": "#/definitions/tzip.TokenMetadataType" + "users": { + "type": "integer" }, - "version": { + "volume": { + "type": "integer" + } + } + }, + "tezosdomain.ReverseTezosDomain": { + "type": "object", + "properties": { + "expiration": { + "type": "string" + }, + "name": { "type": "string" } } }, - "models.TezosDomain": { + "tezosdomain.TezosDomain": { "type": "object", "properties": { "address": { @@ -3985,7 +4009,24 @@ var doc = `{ } } }, - "models.Transfer": { + "transfer.Pageable": { + "type": "object", + "properties": { + "last_id": { + "type": "string" + }, + "total": { + "type": "integer" + }, + "transfers": { + "type": "array", + "items": { + "$ref": "#/definitions/transfer.Transfer" + } + } + } + }, + "transfer.Transfer": { "type": "object", "properties": { "alias": { @@ -4047,103 +4088,6 @@ var doc = `{ } } }, - "newmiguel.Node": { - "type": "object", - "properties": { - "children": { - "type": "array", - "items": { - "$ref": "#/definitions/newmiguel.Node" - } - }, - "diff_type": { - "type": "string" - }, - "from": { - "type": "object" - }, - "name": { - "type": "string" - }, - "prim": { - "type": "string" - }, - "type": { - "type": "string" - }, - "value": { - "type": "object" - } - } - }, - "search.Group": { - "type": "object", - "properties": { - "count": { - "type": "integer" - }, - "top": { - "type": "array", - "items": { - "$ref": "#/definitions/search.Top" - } - } - } - }, - "search.Item": { - "type": "object", - "properties": { - "body": { - "type": "object" - }, - "group": { - "$ref": "#/definitions/search.Group" - }, - "highlights": { - "type": "object", - "additionalProperties": { - "type": "array", - "items": { - "type": "string" - } - } - }, - "type": { - "type": "string" - }, - "value": { - "type": "string" - } - } - }, - "search.Result": { - "type": "object", - "properties": { - "count": { - "type": "integer" - }, - "items": { - "type": "array", - "items": { - "$ref": "#/definitions/search.Item" - } - }, - "time": { - "type": "integer" - } - } - }, - "search.Top": { - "type": "object", - "properties": { - "key": { - "type": "string" - }, - "network": { - "type": "string" - } - } - }, "tzip.DApp": { "type": "object", "properties": { @@ -4377,7 +4321,72 @@ var doc = `{ } } }, - "tzip.TokenMetadata": { + "tzip.TZIP": { + "type": "object", + "properties": { + "address": { + "type": "string" + }, + "authors": { + "type": "array", + "items": { + "type": "string" + } + }, + "dapps": { + "type": "array", + "items": { + "$ref": "#/definitions/tzip.DApp" + } + }, + "description": { + "type": "string" + }, + "domain": { + "$ref": "#/definitions/tezosdomain.ReverseTezosDomain" + }, + "events": { + "type": "array", + "items": { + "$ref": "#/definitions/tzip.Event" + } + }, + "homepage": { + "type": "string" + }, + "interfaces": { + "type": "array", + "items": { + "type": "string" + } + }, + "level": { + "type": "integer" + }, + "license": { + "$ref": "#/definitions/tzip.License" + }, + "name": { + "type": "string" + }, + "network": { + "type": "string" + }, + "slug": { + "type": "string" + }, + "timestamp": { + "type": "string" + }, + "tokens": { + "$ref": "#/definitions/tzip.TokenMetadataType" + }, + "version": { + "type": "string" + } + } + }, + "tzip.TokenMetadataEntity": { "type": "object", "properties": { "decimals": { @@ -4407,7 +4416,7 @@ var doc = `{ "static": { "type": "array", "items": { - "$ref": "#/definitions/tzip.TokenMetadata" + "$ref": "#/definitions/tzip.TokenMetadataEntity" } } } diff --git a/cmd/api/docs/swagger.json b/cmd/api/docs/swagger.json index c30cc803f..1d4d52837 100644 --- a/cmd/api/docs/swagger.json +++ b/cmd/api/docs/swagger.json @@ -106,7 +106,7 @@ "200": { "description": "OK", "schema": { - "$ref": "#/definitions/models.TZIP" + "$ref": "#/definitions/tzip.TZIP" } }, "204": { @@ -1752,7 +1752,7 @@ "200": { "description": "OK", "schema": { - "$ref": "#/definitions/models.TezosDomain" + "$ref": "#/definitions/tezosdomain.TezosDomain" } }, "204": { @@ -1979,7 +1979,7 @@ "200": { "description": "OK", "schema": { - "$ref": "#/definitions/search.Result" + "$ref": "#/definitions/models.Result" } }, "400": { @@ -2169,7 +2169,7 @@ "200": { "description": "OK", "schema": { - "$ref": "#/definitions/elastic.DAppStats" + "$ref": "#/definitions/operation.DAppStats" } }, "400": { @@ -2498,7 +2498,7 @@ "200": { "description": "OK", "schema": { - "$ref": "#/definitions/elastic.TransfersResponse" + "$ref": "#/definitions/transfer.Pageable" } }, "400": { @@ -2690,37 +2690,6 @@ } } }, - "elastic.DAppStats": { - "type": "object", - "properties": { - "txs": { - "type": "integer" - }, - "users": { - "type": "integer" - }, - "volume": { - "type": "integer" - } - } - }, - "elastic.TransfersResponse": { - "type": "object", - "properties": { - "last_id": { - "type": "string" - }, - "total": { - "type": "integer" - }, - "transfers": { - "type": "array", - "items": { - "$ref": "#/definitions/models.Transfer" - } - } - } - }, "formatter.DiffResult": { "type": "object", "properties": { @@ -3109,7 +3078,7 @@ "domains": { "type": "array", "items": { - "$ref": "#/definitions/models.TezosDomain" + "$ref": "#/definitions/tezosdomain.TezosDomain" } }, "total": { @@ -3646,6 +3615,9 @@ }, "transfered": { "type": "number" + }, + "volume_24_hours": { + "type": "number" } } }, @@ -3682,6 +3654,9 @@ }, "token_id": { "type": "integer" + }, + "volume_24_hours": { + "type": "number" } } }, @@ -3762,6 +3737,9 @@ }, "token_id": { "type": "integer" + }, + "volume_24_hours": { + "type": "number" } } }, @@ -3863,83 +3841,129 @@ "type": "object", "additionalProperties": true }, - "models.ReverseTezosDomain": { + "models.Group": { "type": "object", "properties": { - "expiration": { - "type": "string" + "count": { + "type": "integer" }, - "name": { - "type": "string" + "top": { + "type": "array", + "items": { + "$ref": "#/definitions/models.Top" + } } } }, - "models.TZIP": { + "models.Item": { "type": "object", "properties": { - "address": { - "type": "string" + "body": { + "type": "object" }, - "authors": { - "type": "array", - "items": { - "type": "string" - } + "group": { + "$ref": "#/definitions/models.Group" }, - "dapps": { - "type": "array", - "items": { - "$ref": "#/definitions/tzip.DApp" + "highlights": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + } } }, - "description": { + "type": { "type": "string" }, - "domain": { - "$ref": "#/definitions/models.ReverseTezosDomain" + "value": { + "type": "string" + } + } + }, + "models.Result": { + "type": "object", + "properties": { + "count": { + "type": "integer" }, - "events": { + "items": { "type": "array", "items": { - "$ref": "#/definitions/tzip.Event" + "$ref": "#/definitions/models.Item" } }, - "homepage": { + "time": { + "type": "integer" + } + } + }, + "models.Top": { + "type": "object", + "properties": { + "key": { "type": "string" }, - "interfaces": { + "network": { + "type": "string" + } + } + }, + "newmiguel.Node": { + "type": "object", + "properties": { + "children": { "type": "array", "items": { - "type": "string" + "$ref": "#/definitions/newmiguel.Node" } }, - "level": { - "type": "integer" + "diff_type": { + "type": "string" }, - "license": { - "$ref": "#/definitions/tzip.License" + "from": { + "type": "object" }, "name": { "type": "string" }, - "network": { + "prim": { "type": "string" }, - "slug": { + "type": { "type": "string" }, - "timestamp": { - "type": "string" + "value": { + "type": "object" + } + } + }, + "operation.DAppStats": { + "type": "object", + "properties": { + "txs": { + "type": "integer" }, - "tokens": { - "$ref": "#/definitions/tzip.TokenMetadataType" + "users": { + "type": "integer" }, - "version": { + "volume": { + "type": "integer" + } + } + }, + "tezosdomain.ReverseTezosDomain": { + "type": "object", + "properties": { + "expiration": { + "type": "string" + }, + "name": { "type": "string" } } }, - "models.TezosDomain": { + "tezosdomain.TezosDomain": { "type": "object", "properties": { "address": { @@ -3968,7 +3992,24 @@ } } }, - "models.Transfer": { + "transfer.Pageable": { + "type": "object", + "properties": { + "last_id": { + "type": "string" + }, + "total": { + "type": "integer" + }, + "transfers": { + "type": "array", + "items": { + "$ref": "#/definitions/transfer.Transfer" + } + } + } + }, + "transfer.Transfer": { "type": "object", "properties": { "alias": { @@ -4030,103 +4071,6 @@ } } }, - "newmiguel.Node": { - "type": "object", - "properties": { - "children": { - "type": "array", - "items": { - "$ref": "#/definitions/newmiguel.Node" - } - }, - "diff_type": { - "type": "string" - }, - "from": { - "type": "object" - }, - "name": { - "type": "string" - }, - "prim": { - "type": "string" - }, - "type": { - "type": "string" - }, - "value": { - "type": "object" - } - } - }, - "search.Group": { - "type": "object", - "properties": { - "count": { - "type": "integer" - }, - "top": { - "type": "array", - "items": { - "$ref": "#/definitions/search.Top" - } - } - } - }, - "search.Item": { - "type": "object", - "properties": { - "body": { - "type": "object" - }, - "group": { - "$ref": "#/definitions/search.Group" - }, - "highlights": { - "type": "object", - "additionalProperties": { - "type": "array", - "items": { - "type": "string" - } - } - }, - "type": { - "type": "string" - }, - "value": { - "type": "string" - } - } - }, - "search.Result": { - "type": "object", - "properties": { - "count": { - "type": "integer" - }, - "items": { - "type": "array", - "items": { - "$ref": "#/definitions/search.Item" - } - }, - "time": { - "type": "integer" - } - } - }, - "search.Top": { - "type": "object", - "properties": { - "key": { - "type": "string" - }, - "network": { - "type": "string" - } - } - }, "tzip.DApp": { "type": "object", "properties": { @@ -4360,7 +4304,72 @@ } } }, - "tzip.TokenMetadata": { + "tzip.TZIP": { + "type": "object", + "properties": { + "address": { + "type": "string" + }, + "authors": { + "type": "array", + "items": { + "type": "string" + } + }, + "dapps": { + "type": "array", + "items": { + "$ref": "#/definitions/tzip.DApp" + } + }, + "description": { + "type": "string" + }, + "domain": { + "$ref": "#/definitions/tezosdomain.ReverseTezosDomain" + }, + "events": { + "type": "array", + "items": { + "$ref": "#/definitions/tzip.Event" + } + }, + "homepage": { + "type": "string" + }, + "interfaces": { + "type": "array", + "items": { + "type": "string" + } + }, + "level": { + "type": "integer" + }, + "license": { + "$ref": "#/definitions/tzip.License" + }, + "name": { + "type": "string" + }, + "network": { + "type": "string" + }, + "slug": { + "type": "string" + }, + "timestamp": { + "type": "string" + }, + "tokens": { + "$ref": "#/definitions/tzip.TokenMetadataType" + }, + "version": { + "type": "string" + } + } + }, + "tzip.TokenMetadataEntity": { "type": "object", "properties": { "decimals": { @@ -4390,7 +4399,7 @@ "static": { "type": "array", "items": { - "$ref": "#/definitions/tzip.TokenMetadata" + "$ref": "#/definitions/tzip.TokenMetadataEntity" } } } diff --git a/cmd/api/docs/swagger.yaml b/cmd/api/docs/swagger.yaml index db2dffdd4..54bb3d70b 100644 --- a/cmd/api/docs/swagger.yaml +++ b/cmd/api/docs/swagger.yaml @@ -29,26 +29,6 @@ definitions: value: type: string type: object - elastic.DAppStats: - properties: - txs: - type: integer - users: - type: integer - volume: - type: integer - type: object - elastic.TransfersResponse: - properties: - last_id: - type: string - total: - type: integer - transfers: - items: - $ref: '#/definitions/models.Transfer' - type: array - type: object formatter.DiffResult: properties: added: @@ -307,7 +287,7 @@ definitions: properties: domains: items: - $ref: '#/definitions/models.TezosDomain' + $ref: '#/definitions/tezosdomain.TezosDomain' type: array total: type: integer @@ -665,6 +645,8 @@ definitions: type: integer transfered: type: number + volume_24_hours: + type: number type: object handlers.TokenBalance: properties: @@ -689,6 +671,8 @@ definitions: type: string token_id: type: integer + volume_24_hours: + type: number type: object handlers.TokenContract: properties: @@ -742,6 +726,8 @@ definitions: type: string token_id: type: integer + volume_24_hours: + type: number type: object handlers.TokenMethodStats: properties: @@ -808,57 +794,86 @@ definitions: jsonschema.Schema: additionalProperties: true type: object - models.ReverseTezosDomain: + models.Group: properties: - expiration: + count: + type: integer + top: + items: + $ref: '#/definitions/models.Top' + type: array + type: object + models.Item: + properties: + body: + type: object + group: + $ref: '#/definitions/models.Group' + highlights: + additionalProperties: + items: + type: string + type: array + type: object + type: type: string - name: + value: type: string type: object - models.TZIP: + models.Result: properties: - address: - type: string - authors: - items: - type: string - type: array - dapps: + count: + type: integer + items: items: - $ref: '#/definitions/tzip.DApp' + $ref: '#/definitions/models.Item' type: array - description: + time: + type: integer + type: object + models.Top: + properties: + key: type: string - domain: - $ref: '#/definitions/models.ReverseTezosDomain' - events: - items: - $ref: '#/definitions/tzip.Event' - type: array - homepage: + network: type: string - interfaces: + type: object + newmiguel.Node: + properties: + children: items: - type: string + $ref: '#/definitions/newmiguel.Node' type: array - level: - type: integer - license: - $ref: '#/definitions/tzip.License' + diff_type: + type: string + from: + type: object name: type: string - network: + prim: type: string - slug: + type: type: string - timestamp: + value: + type: object + type: object + operation.DAppStats: + properties: + txs: + type: integer + users: + type: integer + volume: + type: integer + type: object + tezosdomain.ReverseTezosDomain: + properties: + expiration: type: string - tokens: - $ref: '#/definitions/tzip.TokenMetadataType' - version: + name: type: string type: object - models.TezosDomain: + tezosdomain.TezosDomain: properties: address: type: string @@ -877,7 +892,18 @@ definitions: timestamp: type: string type: object - models.Transfer: + transfer.Pageable: + properties: + last_id: + type: string + total: + type: integer + transfers: + items: + $ref: '#/definitions/transfer.Transfer' + type: array + type: object + transfer.Transfer: properties: alias: type: string @@ -918,69 +944,6 @@ definitions: token_id: type: integer type: object - newmiguel.Node: - properties: - children: - items: - $ref: '#/definitions/newmiguel.Node' - type: array - diff_type: - type: string - from: - type: object - name: - type: string - prim: - type: string - type: - type: string - value: - type: object - type: object - search.Group: - properties: - count: - type: integer - top: - items: - $ref: '#/definitions/search.Top' - type: array - type: object - search.Item: - properties: - body: - type: object - group: - $ref: '#/definitions/search.Group' - highlights: - additionalProperties: - items: - type: string - type: array - type: object - type: - type: string - value: - type: string - type: object - search.Result: - properties: - count: - type: integer - items: - items: - $ref: '#/definitions/search.Item' - type: array - time: - type: integer - type: object - search.Top: - properties: - key: - type: string - network: - type: string - type: object tzip.DApp: properties: agora_qa_post_id: @@ -1133,7 +1096,50 @@ definitions: type: type: string type: object - tzip.TokenMetadata: + tzip.TZIP: + properties: + address: + type: string + authors: + items: + type: string + type: array + dapps: + items: + $ref: '#/definitions/tzip.DApp' + type: array + description: + type: string + domain: + $ref: '#/definitions/tezosdomain.ReverseTezosDomain' + events: + items: + $ref: '#/definitions/tzip.Event' + type: array + homepage: + type: string + interfaces: + items: + type: string + type: array + level: + type: integer + license: + $ref: '#/definitions/tzip.License' + name: + type: string + network: + type: string + slug: + type: string + timestamp: + type: string + tokens: + $ref: '#/definitions/tzip.TokenMetadataType' + version: + type: string + type: object + tzip.TokenMetadataEntity: properties: decimals: type: integer @@ -1153,7 +1159,7 @@ definitions: properties: static: items: - $ref: '#/definitions/tzip.TokenMetadata' + $ref: '#/definitions/tzip.TokenMetadataEntity' type: array type: object info: @@ -1267,7 +1273,7 @@ paths: "200": description: OK schema: - $ref: '#/definitions/models.TZIP' + $ref: '#/definitions/tzip.TZIP' "204": description: No Content schema: @@ -2386,7 +2392,7 @@ paths: "200": description: OK schema: - $ref: '#/definitions/models.TezosDomain' + $ref: '#/definitions/tezosdomain.TezosDomain' "204": description: No Content schema: @@ -2541,7 +2547,7 @@ paths: "200": description: OK schema: - $ref: '#/definitions/search.Result' + $ref: '#/definitions/models.Result' "400": description: Bad Request schema: @@ -2670,7 +2676,7 @@ paths: "200": description: OK schema: - $ref: '#/definitions/elastic.DAppStats' + $ref: '#/definitions/operation.DAppStats' "400": description: Bad Request schema: @@ -2898,7 +2904,7 @@ paths: "200": description: OK schema: - $ref: '#/definitions/elastic.TransfersResponse' + $ref: '#/definitions/transfer.Pageable' "400": description: Bad Request schema: diff --git a/cmd/api/handlers/account.go b/cmd/api/handlers/account.go index f601c55ee..ca9b2df65 100644 --- a/cmd/api/handlers/account.go +++ b/cmd/api/handlers/account.go @@ -3,7 +3,7 @@ package handlers import ( "net/http" - "github.com/baking-bad/bcdhub/internal/elastic" + "github.com/baking-bad/bcdhub/internal/models/tzip" "github.com/gin-gonic/gin" ) @@ -22,25 +22,25 @@ import ( // @Router /account/{network}/{address} [get] func (ctx *Context) GetInfo(c *gin.Context) { var req getContractRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } - stats, err := ctx.ES.GetOperationsStats(req.Network, req.Address) - if handleError(c, err, 0) { + stats, err := ctx.Operations.GetStats(req.Network, req.Address) + if ctx.handleError(c, err, 0) { return } - block, err := ctx.ES.GetLastBlock(req.Network) - if handleError(c, err, 0) { + block, err := ctx.Blocks.Last(req.Network) + if ctx.handleError(c, err, 0) { return } rpc, err := ctx.GetRPC(req.Network) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } balance, err := rpc.GetContractBalance(req.Address, block.Level) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -52,10 +52,10 @@ func (ctx *Context) GetInfo(c *gin.Context) { LastAction: stats.LastAction, } - alias, err := ctx.ES.GetAlias(req.Network, req.Address) + alias, err := ctx.TZIP.GetAlias(req.Network, req.Address) if err != nil { - if !elastic.IsRecordNotFound(err) { - handleError(c, err, 0) + if !ctx.Storage.IsRecordNotFound(err) { + ctx.handleError(c, err, 0) return } } else { @@ -63,7 +63,7 @@ func (ctx *Context) GetInfo(c *gin.Context) { } tokenBalances, err := ctx.getAccountBalances(req.Network, req.Address) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } accountInfo.Tokens = tokenBalances @@ -72,14 +72,14 @@ func (ctx *Context) GetInfo(c *gin.Context) { } func (ctx *Context) getAccountBalances(network, address string) ([]TokenBalance, error) { - tokenBalances, err := ctx.ES.GetAccountBalances(network, address) + tokenBalances, err := ctx.TokenBalances.GetAccountBalances(network, address) if err != nil { return nil, err } result := make([]TokenBalance, 0) for _, balance := range tokenBalances { - token, err := ctx.ES.GetTokenMetadata(elastic.GetTokenMetadataContext{ + token, err := ctx.TZIP.GetTokenMetadata(tzip.GetTokenMetadataContext{ TokenID: balance.TokenID, Contract: balance.Contract, Network: network, diff --git a/cmd/api/handlers/alias.go b/cmd/api/handlers/alias.go index faeb97eb3..d5eaefbfd 100644 --- a/cmd/api/handlers/alias.go +++ b/cmd/api/handlers/alias.go @@ -21,16 +21,16 @@ import ( // @Router /slug/{slug} [get] func (ctx *Context) GetBySlug(c *gin.Context) { var req getBySlugRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } - a, err := ctx.ES.GetBySlug(req.Slug) + a, err := ctx.TZIP.GetBySlug(req.Slug) if gorm.IsRecordNotFoundError(err) { - handleError(c, err, http.StatusBadRequest) + ctx.handleError(c, err, http.StatusBadRequest) return } - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } var alias Alias diff --git a/cmd/api/handlers/auth.go b/cmd/api/handlers/auth.go index 2adba9742..275db74aa 100644 --- a/cmd/api/handlers/auth.go +++ b/cmd/api/handlers/auth.go @@ -17,7 +17,7 @@ func (ctx *Context) AuthJWTRequired() gin.HandlerFunc { return func(c *gin.Context) { userID, err := ctx.getUserFromToken(c) - if handleError(c, err, http.StatusUnauthorized) { + if ctx.handleError(c, err, http.StatusUnauthorized) { return } diff --git a/cmd/api/handlers/bigmap.go b/cmd/api/handlers/bigmap.go index 0c3dd56cf..46e4400fa 100644 --- a/cmd/api/handlers/bigmap.go +++ b/cmd/api/handlers/bigmap.go @@ -8,8 +8,8 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/meta" "github.com/baking-bad/bcdhub/internal/contractparser/newmiguel" "github.com/baking-bad/bcdhub/internal/contractparser/stringer" - "github.com/baking-bad/bcdhub/internal/elastic" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapaction" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" "github.com/gin-gonic/gin" "github.com/pkg/errors" "github.com/tidwall/gjson" @@ -31,16 +31,16 @@ import ( // @Router /bigmap/{network}/{ptr} [get] func (ctx *Context) GetBigMap(c *gin.Context) { var req getBigMapRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } - bm, err := ctx.ES.GetBigMapKeys(elastic.GetBigMapKeysContext{ + bm, err := ctx.BigMapDiffs.Get(bigmapdiff.GetContext{ Ptr: &req.Ptr, Network: req.Network, Size: 10000, // TODO: >10k }) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -59,32 +59,29 @@ func (ctx *Context) GetBigMap(c *gin.Context) { } } - metadata, err := getStorageMetadata(ctx.ES, res.Address, res.Network) - if handleError(c, err, 0) { + metadata, err := ctx.getStorageMetadata(res.Address, res.Network) + if ctx.handleError(c, err, 0) { return } res.Typedef, err = docstring.GetTypedef(bm[0].BinPath, metadata) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } } else { - actions, err := ctx.ES.GetBigMapHistory(req.Ptr, req.Network) - if handleError(c, err, 0) { + actions, err := ctx.BigMapActions.Get(req.Ptr, req.Network) + if ctx.handleError(c, err, 0) { return } - if len(actions) == 0 { - c.JSON(http.StatusNoContent, gin.H{}) - return + if len(actions) > 0 { + res.Address = actions[0].Address } - - res.Address = actions[0].Address } - alias, err := ctx.ES.GetAlias(req.Network, res.Address) + alias, err := ctx.TZIP.GetAlias(req.Network, res.Address) if err != nil { - if !elastic.IsRecordNotFound(err) { - handleError(c, err, 0) + if !ctx.Storage.IsRecordNotFound(err) { + ctx.handleError(c, err, 0) return } } else { @@ -110,12 +107,12 @@ func (ctx *Context) GetBigMap(c *gin.Context) { // @Router /bigmap/{network}/{ptr}/history [get] func (ctx *Context) GetBigMapHistory(c *gin.Context) { var req getBigMapRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } - bm, err := ctx.ES.GetBigMapHistory(req.Ptr, req.Network) - if handleError(c, err, 0) { + bm, err := ctx.BigMapActions.Get(req.Ptr, req.Network) + if ctx.handleError(c, err, 0) { return } if bm == nil { @@ -145,16 +142,16 @@ func (ctx *Context) GetBigMapHistory(c *gin.Context) { // @Router /bigmap/{network}/{ptr}/keys [get] func (ctx *Context) GetBigMapKeys(c *gin.Context) { var req getBigMapRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var pageReq bigMapSearchRequest - if err := c.BindQuery(&pageReq); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&pageReq); ctx.handleError(c, err, http.StatusBadRequest) { return } - bm, err := ctx.ES.GetBigMapKeys(elastic.GetBigMapKeysContext{ + bm, err := ctx.BigMapDiffs.Get(bigmapdiff.GetContext{ Ptr: &req.Ptr, Network: req.Network, Query: pageReq.Search, @@ -162,12 +159,12 @@ func (ctx *Context) GetBigMapKeys(c *gin.Context) { Offset: pageReq.Offset, Level: pageReq.Level, }) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } response, err := ctx.prepareBigMapKeys(bm) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -192,22 +189,22 @@ func (ctx *Context) GetBigMapKeys(c *gin.Context) { // @Router /bigmap/{network}/{ptr}/keys/{key_hash} [get] func (ctx *Context) GetBigMapByKeyHash(c *gin.Context) { var req getBigMapByKeyHashRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var pageReq pageableRequest - if err := c.BindQuery(&pageReq); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&pageReq); ctx.handleError(c, err, http.StatusBadRequest) { return } - bm, total, err := ctx.ES.GetBigMapDiffsByPtrAndKeyHash(req.Ptr, req.Network, req.KeyHash, pageReq.Size, pageReq.Offset) - if handleError(c, err, 0) { + bm, total, err := ctx.BigMapDiffs.GetByPtrAndKeyHash(req.Ptr, req.Network, req.KeyHash, pageReq.Size, pageReq.Offset) + if ctx.handleError(c, err, 0) { return } response, err := ctx.prepareBigMapItem(bm, req.KeyHash) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -230,28 +227,28 @@ func (ctx *Context) GetBigMapByKeyHash(c *gin.Context) { // @Router /bigmap/{network}/{ptr}/count [get] func (ctx *Context) GetBigMapDiffCount(c *gin.Context) { var req getBigMapRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } - count, err := ctx.ES.GetBigMapDiffsCount(req.Network, req.Ptr) + count, err := ctx.BigMapDiffs.Count(req.Network, req.Ptr) if err != nil { - if elastic.IsRecordNotFound(err) { + if ctx.Storage.IsRecordNotFound(err) { c.JSON(http.StatusOK, CountResponse{}) return } - handleError(c, err, 0) + ctx.handleError(c, err, 0) return } c.JSON(http.StatusOK, CountResponse{count}) } -func (ctx *Context) prepareBigMapKeys(data []elastic.BigMapDiff) ([]BigMapResponseItem, error) { +func (ctx *Context) prepareBigMapKeys(data []bigmapdiff.BigMapDiff) ([]BigMapResponseItem, error) { if len(data) == 0 { return []BigMapResponseItem{}, nil } - contractMetadata, err := meta.GetContractMetadata(ctx.ES, data[0].Address) + contractMetadata, err := meta.GetContractMetadata(ctx.Schema, data[0].Address) if err != nil { return nil, err } @@ -272,18 +269,18 @@ func (ctx *Context) prepareBigMapKeys(data []elastic.BigMapDiff) ([]BigMapRespon Value: value, Timestamp: data[i].Timestamp, }, - Count: data[i].Count, + // Count: data[i].Count, TODO: fill count } } return res, nil } -func (ctx *Context) prepareBigMapItem(data []elastic.BigMapDiff, keyHash string) (res BigMapDiffByKeyResponse, err error) { +func (ctx *Context) prepareBigMapItem(data []bigmapdiff.BigMapDiff, keyHash string) (res BigMapDiffByKeyResponse, err error) { if len(data) == 0 { return } - contractMetadata, err := meta.GetContractMetadata(ctx.ES, data[0].Address) + contractMetadata, err := meta.GetContractMetadata(ctx.Schema, data[0].Address) if err != nil { return } @@ -309,7 +306,7 @@ func (ctx *Context) prepareBigMapItem(data []elastic.BigMapDiff, keyHash string) return } -func prepareItem(item elastic.BigMapDiff, contractMetadata *meta.ContractMetadata) (interface{}, interface{}, string, error) { +func prepareItem(item bigmapdiff.BigMapDiff, contractMetadata *meta.ContractMetadata) (interface{}, interface{}, string, error) { var protoSymLink string protoSymLink, err := meta.GetProtoSymLink(item.Protocol) if err != nil { @@ -352,7 +349,7 @@ func prepareItem(item elastic.BigMapDiff, contractMetadata *meta.ContractMetadat return key, value, keyString, err } -func prepareBigMapHistory(arr []models.BigMapAction, ptr int64) BigMapHistoryResponse { +func prepareBigMapHistory(arr []bigmapaction.BigMapAction, ptr int64) BigMapHistoryResponse { if len(arr) == 0 { return BigMapHistoryResponse{} } diff --git a/cmd/api/handlers/code.go b/cmd/api/handlers/code.go index 6e86c255b..94b7c940d 100644 --- a/cmd/api/handlers/code.go +++ b/cmd/api/handlers/code.go @@ -27,24 +27,24 @@ import ( // @Router /contract/{network}/{address}/code [get] func (ctx *Context) GetContractCode(c *gin.Context) { var req getContractCodeRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } - if err := c.BindQuery(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } if req.Protocol == "" { - state, err := ctx.ES.GetLastBlock(req.Network) - if handleError(c, err, 0) { + state, err := ctx.Blocks.Last(req.Network) + if ctx.handleError(c, err, 0) { return } req.Protocol = state.Protocol } code, err := ctx.getContractCodeJSON(req.Network, req.Address, req.Protocol, req.Level) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -55,7 +55,7 @@ func (ctx *Context) GetContractCode(c *gin.Context) { } resp, err := formatter.MichelineToMichelson(collapsed, false, formatter.DefLineSize) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -77,12 +77,12 @@ func (ctx *Context) GetContractCode(c *gin.Context) { // @Router /diff [post] func (ctx *Context) GetDiff(c *gin.Context) { var req CodeDiffRequest - if err := c.BindJSON(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindJSON(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } resp, err := ctx.getContractCodeDiff(req.Left, req.Right) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -115,7 +115,7 @@ func (ctx *Context) getContractCodeDiff(left, right CodeDiffLeg) (res CodeDiffRe if leg.Protocol == "" { protocol, ok := currentProtocols[leg.Network] if !ok { - state, err := ctx.ES.GetLastBlock(leg.Network) + state, err := ctx.Blocks.Last(leg.Network) if err != nil { return res, err } diff --git a/cmd/api/handlers/compilation_tasks.go b/cmd/api/handlers/compilation_tasks.go index c3469c894..551306150 100644 --- a/cmd/api/handlers/compilation_tasks.go +++ b/cmd/api/handlers/compilation_tasks.go @@ -21,17 +21,17 @@ func (ctx *Context) ListCompilationTasks(c *gin.Context) { } _, err := ctx.DB.GetUser(userID) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } var ctReq compilationTasksRequest - if err := c.BindQuery(&ctReq); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&ctReq); ctx.handleError(c, err, http.StatusBadRequest) { return } tasks, err := ctx.DB.ListCompilationTasks(userID, ctReq.Limit, ctReq.Offset, ctReq.Kind) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } diff --git a/cmd/api/handlers/context.go b/cmd/api/handlers/context.go index e45ff33e4..d849cc210 100644 --- a/cmd/api/handlers/context.go +++ b/cmd/api/handlers/context.go @@ -24,7 +24,7 @@ func NewContext(cfg config.Config) (*Context, error) { } ctx := config.NewContext( - config.WithElasticSearch(cfg.Elastic), + config.WithStorage(cfg.Storage), config.WithRPC(cfg.RPC), config.WithDatabase(cfg.DB), config.WithShare(cfg.SharePath), diff --git a/cmd/api/handlers/contract.go b/cmd/api/handlers/contract.go index 0f3db6354..899c8b344 100644 --- a/cmd/api/handlers/contract.go +++ b/cmd/api/handlers/contract.go @@ -3,9 +3,8 @@ package handlers import ( "net/http" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" "github.com/gin-gonic/gin" "github.com/jinzhu/gorm" ) @@ -25,16 +24,16 @@ import ( // @Router /contract/{network}/{address} [get] func (ctx *Context) GetContract(c *gin.Context) { var req getContractRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } - contract := models.NewEmptyContract(req.Network, req.Address) - if err := ctx.ES.GetByID(&contract); handleError(c, err, 0) { + contract := contract.NewEmptyContract(req.Network, req.Address) + if err := ctx.Storage.GetByID(&contract); ctx.handleError(c, err, 0) { return } res, err := ctx.contractPostprocessing(contract, c) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } c.JSON(http.StatusOK, res) @@ -51,24 +50,24 @@ func (ctx *Context) GetContract(c *gin.Context) { // @Failure 500 {object} Error // @Router /pick_random [get] func (ctx *Context) GetRandomContract(c *gin.Context) { - var contract models.Contract + var contract contract.Contract for !helpers.StringInArray(contract.Network, ctx.Config.API.Networks) { - cntr, err := ctx.ES.GetContractRandom() - if handleError(c, err, 0) { + cntr, err := ctx.Contracts.GetRandom() + if ctx.handleError(c, err, 0) { return } contract = cntr } res, err := ctx.contractPostprocessing(contract, c) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } c.JSON(http.StatusOK, res) } -func (ctx *Context) contractPostprocessing(contract models.Contract, c *gin.Context) (Contract, error) { +func (ctx *Context) contractPostprocessing(contract contract.Contract, c *gin.Context) (Contract, error) { var res Contract res.FromModel(contract) @@ -87,9 +86,9 @@ func (ctx *Context) contractPostprocessing(contract models.Contract, c *gin.Cont return res, err } - if alias, err := ctx.ES.GetAlias(contract.Network, contract.Address); err == nil { + if alias, err := ctx.TZIP.GetAlias(contract.Network, contract.Address); err == nil { res.Slug = alias.Slug - } else if !elastic.IsRecordNotFound(err) { + } else if !ctx.Storage.IsRecordNotFound(err) { return res, err } diff --git a/cmd/api/handlers/dapp.go b/cmd/api/handlers/dapp.go index 57fbb8e40..13035fe90 100644 --- a/cmd/api/handlers/dapp.go +++ b/cmd/api/handlers/dapp.go @@ -4,28 +4,27 @@ import ( "net/http" "github.com/baking-bad/bcdhub/internal/contractparser/consts" - "github.com/baking-bad/bcdhub/internal/elastic" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" "github.com/baking-bad/bcdhub/internal/models/tzip" "github.com/gin-gonic/gin" ) // GetDAppList - func (ctx *Context) GetDAppList(c *gin.Context) { - dapps, err := ctx.ES.GetDApps() + dapps, err := ctx.TZIP.GetDApps() if err != nil { - if elastic.IsRecordNotFound(err) { + if ctx.Storage.IsRecordNotFound(err) { c.JSON(http.StatusOK, []interface{}{}) return } - handleError(c, err, 0) + ctx.handleError(c, err, 0) return } results := make([]DApp, len(dapps)) for i := range dapps { result, err := ctx.appendDAppInfo(&dapps[i], false) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } results[i] = result @@ -37,22 +36,22 @@ func (ctx *Context) GetDAppList(c *gin.Context) { // GetDApp - func (ctx *Context) GetDApp(c *gin.Context) { var req getDappRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } - dapp, err := ctx.ES.GetDAppBySlug(req.Slug) + dapp, err := ctx.TZIP.GetDAppBySlug(req.Slug) if err != nil { - if elastic.IsRecordNotFound(err) { + if ctx.Storage.IsRecordNotFound(err) { c.JSON(http.StatusOK, gin.H{}) return } - handleError(c, err, 0) + ctx.handleError(c, err, 0) return } response, err := ctx.appendDAppInfo(dapp, true) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -99,13 +98,13 @@ func (ctx *Context) appendDAppInfo(dapp *tzip.DApp, withDetails bool) (DApp, err result.DexTokens = make([]TokenMetadata, 0) for _, token := range dapp.DexTokens { - tokenMetadata, err := ctx.ES.GetTokenMetadata(elastic.GetTokenMetadataContext{ + tokenMetadata, err := ctx.TZIP.GetTokenMetadata(tzip.GetTokenMetadataContext{ Contract: token.Contract, Network: consts.Mainnet, TokenID: token.TokenID, }) if err != nil { - if elastic.IsRecordNotFound(err) { + if ctx.Storage.IsRecordNotFound(err) { continue } return result, err @@ -117,9 +116,9 @@ func (ctx *Context) appendDAppInfo(dapp *tzip.DApp, withDetails bool) (DApp, err entrypoints = append(entrypoints, c.DexVolumeEntrypoints...) } - vol, err := ctx.ES.GetToken24HoursVolume(consts.Mainnet, token.Contract, initiators, entrypoints, token.TokenID) + vol, err := ctx.Transfers.GetToken24HoursVolume(consts.Mainnet, token.Contract, initiators, entrypoints, token.TokenID) if err != nil { - if elastic.IsRecordNotFound(err) { + if ctx.Storage.IsRecordNotFound(err) { continue } return result, err @@ -137,8 +136,8 @@ func (ctx *Context) appendDAppInfo(dapp *tzip.DApp, withDetails bool) (DApp, err result.Contracts = make([]DAppContract, 0) for _, address := range dapp.Contracts { - contract := models.NewEmptyContract(consts.Mainnet, address.Address) - if err := ctx.ES.GetByID(&contract); err != nil { + contract := contract.NewEmptyContract(consts.Mainnet, address.Address) + if err := ctx.Storage.GetByID(&contract); err != nil { return result, err } result.Contracts = append(result.Contracts, DAppContract{ @@ -154,7 +153,7 @@ func (ctx *Context) appendDAppInfo(dapp *tzip.DApp, withDetails bool) (DApp, err } result.Tokens = append(result.Tokens, tokens...) - vol, err := ctx.ES.GetContract24HoursVolume(consts.Mainnet, address.Address, address.DexVolumeEntrypoints) + vol, err := ctx.Operations.GetContract24HoursVolume(consts.Mainnet, address.Address, address.DexVolumeEntrypoints) if err != nil { return result, err } diff --git a/cmd/api/handlers/deployment.go b/cmd/api/handlers/deployment.go index b60250269..634a88ea3 100644 --- a/cmd/api/handlers/deployment.go +++ b/cmd/api/handlers/deployment.go @@ -23,17 +23,17 @@ func (ctx *Context) ListDeployments(c *gin.Context) { } _, err := ctx.DB.GetUser(userID) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } var ctReq compilationRequest - if err := c.BindQuery(&ctReq); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&ctReq); ctx.handleError(c, err, http.StatusBadRequest) { return } deployments, err := ctx.DB.ListDeployments(userID, ctReq.Limit, ctReq.Offset) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -49,12 +49,12 @@ func (ctx *Context) CreateDeployment(c *gin.Context) { } user, err := ctx.DB.GetUser(userID) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } form, err := c.MultipartForm() - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { logger.Error(err) return } @@ -65,11 +65,11 @@ func (ctx *Context) CreateDeployment(c *gin.Context) { Status: compilation.StatusPending, } - if err = ctx.DB.CreateCompilationTask(&task); handleError(c, err, 0) { + if err = ctx.DB.CreateCompilationTask(&task); ctx.handleError(c, err, 0) { return } - if err = ctx.runDeployment(task.ID, form); handleError(c, err, 0) { + if err = ctx.runDeployment(task.ID, form); ctx.handleError(c, err, 0) { return } @@ -118,17 +118,17 @@ func (ctx *Context) FinalizeDeployment(c *gin.Context) { } var req deploymentRequest - if err := c.ShouldBindJSON(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.ShouldBindJSON(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } user, err := ctx.DB.GetUser(userID) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } task, err := ctx.DB.GetCompilationTask(req.TaskID) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -145,7 +145,7 @@ func (ctx *Context) FinalizeDeployment(c *gin.Context) { } err = ctx.DB.CreateDeployment(&d) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } diff --git a/cmd/api/handlers/domains.go b/cmd/api/handlers/domains.go index c5b8d0ae1..ca1627732 100644 --- a/cmd/api/handlers/domains.go +++ b/cmd/api/handlers/domains.go @@ -3,8 +3,7 @@ package handlers import ( "net/http" - "github.com/baking-bad/bcdhub/internal/elastic" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/tezosdomain" "github.com/gin-gonic/gin" "github.com/pkg/errors" ) @@ -25,17 +24,17 @@ import ( // @Router /domains/{network} [get] func (ctx *Context) TezosDomainsList(c *gin.Context) { var req getByNetwork - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var args pageableRequest - if err := c.BindQuery(&args); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&args); ctx.handleError(c, err, http.StatusBadRequest) { return } - domains, err := ctx.ES.ListDomains(req.Network, args.Size, args.Offset) - if handleError(c, err, 0) { + domains, err := ctx.TezosDomains.ListDomains(req.Network, args.Size, args.Offset) + if ctx.handleError(c, err, 0) { return } @@ -57,52 +56,52 @@ func (ctx *Context) TezosDomainsList(c *gin.Context) { // @Param address query string false "Address" minlength(36) maxlength(36) // @Accept json // @Produce json -// @Success 200 {object} models.TezosDomain +// @Success 200 {object} tezosdomain.TezosDomain // @Success 204 {object} gin.H // @Failure 400 {object} Error // @Failure 500 {object} Error // @Router /domains/{network}/resolve [get] func (ctx *Context) ResolveDomain(c *gin.Context) { var req getByNetwork - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var args resolveDomainRequest - if err := c.BindQuery(&args); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&args); ctx.handleError(c, err, http.StatusBadRequest) { return } switch { case args.Name != "": - td := models.TezosDomain{ + td := tezosdomain.TezosDomain{ Network: req.Network, Name: args.Name, } - if err := ctx.ES.GetByID(&td); err != nil { - if elastic.IsRecordNotFound(err) { + if err := ctx.Storage.GetByID(&td); err != nil { + if ctx.Storage.IsRecordNotFound(err) { c.JSON(http.StatusNoContent, gin.H{}) return } - handleError(c, err, 0) + ctx.handleError(c, err, 0) return } if td.Address == "" { - handleError(c, errors.Errorf("Unknown domain name"), http.StatusBadRequest) + ctx.handleError(c, errors.Errorf("Unknown domain name"), http.StatusBadRequest) return } c.JSON(http.StatusOK, td) case args.Address != "": - td, err := ctx.ES.ResolveDomainByAddress(req.Network, args.Address) + td, err := ctx.TezosDomains.ResolveDomainByAddress(req.Network, args.Address) if err != nil { - if elastic.IsRecordNotFound(err) { + if ctx.Storage.IsRecordNotFound(err) { c.JSON(http.StatusNoContent, gin.H{}) return } - handleError(c, err, 0) + ctx.handleError(c, err, 0) return } c.JSON(http.StatusOK, td) default: - handleError(c, errors.Errorf("Invalid resolve request: %##v", args), http.StatusBadRequest) + ctx.handleError(c, errors.Errorf("Invalid resolve request: %##v", args), http.StatusBadRequest) } } diff --git a/cmd/api/handlers/entrypoints.go b/cmd/api/handlers/entrypoints.go index d942d666f..159cd226a 100644 --- a/cmd/api/handlers/entrypoints.go +++ b/cmd/api/handlers/entrypoints.go @@ -7,7 +7,6 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/docstring" "github.com/baking-bad/bcdhub/internal/contractparser/formatter" "github.com/baking-bad/bcdhub/internal/contractparser/meta" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/jsonschema" "github.com/gin-gonic/gin" "github.com/tidwall/gjson" @@ -28,16 +27,16 @@ import ( // @Router /contract/{network}/{address}/entrypoints [get] func (ctx *Context) GetEntrypoints(c *gin.Context) { var req getContractRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } - metadata, err := getParameterMetadata(ctx.ES, req.Address, req.Network) - if handleError(c, err, 0) { + metadata, err := ctx.getParameterMetadata(req.Address, req.Network) + if ctx.handleError(c, err, 0) { return } entrypoints, err := docstring.GetEntrypoints(metadata) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -45,7 +44,7 @@ func (ctx *Context) GetEntrypoints(c *gin.Context) { for i, entrypoint := range entrypoints { resp[i].EntrypointType = entrypoint resp[i].Schema, err = jsonschema.Create(entrypoint.BinPath, metadata) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } } @@ -71,23 +70,23 @@ func (ctx *Context) GetEntrypoints(c *gin.Context) { // @Router /contract/{network}/{address}/entrypoints/data [post] func (ctx *Context) GetEntrypointData(c *gin.Context) { var req getContractRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var reqData getEntrypointDataRequest - if err := c.BindJSON(&reqData); handleError(c, err, http.StatusBadRequest) { + if err := c.BindJSON(&reqData); ctx.handleError(c, err, http.StatusBadRequest) { return } result, err := ctx.buildEntrypointMicheline(req.Network, req.Address, reqData.BinPath, reqData.Data, false) - if handleError(c, err, http.StatusBadRequest) { + if ctx.handleError(c, err, http.StatusBadRequest) { return } if reqData.Format == "michelson" { value := result.Get("value") michelson, err := formatter.MichelineToMichelson(value, false, formatter.DefLineSize) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } c.JSON(http.StatusOK, michelson) @@ -114,22 +113,22 @@ func (ctx *Context) GetEntrypointData(c *gin.Context) { // @Router /contract/{network}/{address}/entrypoints/schema [get] func (ctx *Context) GetEntrypointSchema(c *gin.Context) { var req getContractRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var esReq entrypointSchemaRequest - if err := c.BindQuery(&esReq); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&esReq); ctx.handleError(c, err, http.StatusBadRequest) { return } - metadata, err := getParameterMetadata(ctx.ES, req.Address, req.Network) - if handleError(c, err, 0) { + metadata, err := ctx.getParameterMetadata(req.Address, req.Network) + if ctx.handleError(c, err, 0) { return } entrypoints, err := docstring.GetEntrypoints(metadata) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -141,14 +140,14 @@ func (ctx *Context) GetEntrypointSchema(c *gin.Context) { schema.EntrypointType = entrypoint schema.Schema, err = jsonschema.Create(entrypoint.BinPath, metadata) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } if esReq.FillType != "latest" { continue } - op, err := ctx.ES.GetOperations( + op, err := ctx.Operations.Get( map[string]interface{}{ "network": req.Network, "destination": req.Address, @@ -158,7 +157,7 @@ func (ctx *Context) GetEntrypointSchema(c *gin.Context) { 1, true, ) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } if len(op) != 1 { @@ -169,7 +168,7 @@ func (ctx *Context) GetEntrypointSchema(c *gin.Context) { parameters = parameters.Get("value") } schema.DefaultModel = make(jsonschema.DefaultModel) - if err := schema.DefaultModel.FillForEntrypoint(parameters, metadata, esReq.EntrypointName); handleError(c, err, 0) { + if err := schema.DefaultModel.FillForEntrypoint(parameters, metadata, esReq.EntrypointName); ctx.handleError(c, err, 0) { return } } @@ -178,7 +177,7 @@ func (ctx *Context) GetEntrypointSchema(c *gin.Context) { } func (ctx *Context) buildEntrypointMicheline(network, address, binPath string, data map[string]interface{}, needValidate bool) (gjson.Result, error) { - metadata, err := getParameterMetadata(ctx.ES, address, network) + metadata, err := ctx.getParameterMetadata(address, network) if err != nil { return gjson.Result{}, err } @@ -186,13 +185,13 @@ func (ctx *Context) buildEntrypointMicheline(network, address, binPath string, d return metadata.BuildEntrypointMicheline(binPath, data, needValidate) } -func getParameterMetadata(es elastic.IElastic, address, network string) (meta.Metadata, error) { - state, err := es.GetLastBlock(network) +func (ctx *Context) getParameterMetadata(address, network string) (meta.Metadata, error) { + state, err := ctx.Blocks.Last(network) if err != nil { return nil, err } - metadata, err := meta.GetMetadata(es, address, consts.PARAMETER, state.Protocol) + metadata, err := meta.GetMetadata(ctx.Schema, address, consts.PARAMETER, state.Protocol) if err != nil { return nil, err } @@ -200,13 +199,13 @@ func getParameterMetadata(es elastic.IElastic, address, network string) (meta.Me return metadata, nil } -func getStorageMetadata(es elastic.IElastic, address, network string) (meta.Metadata, error) { - state, err := es.GetLastBlock(network) +func (ctx *Context) getStorageMetadata(address, network string) (meta.Metadata, error) { + state, err := ctx.Blocks.Last(network) if err != nil { return nil, err } - metadata, err := meta.GetMetadata(es, address, consts.STORAGE, state.Protocol) + metadata, err := meta.GetMetadata(ctx.Schema, address, consts.STORAGE, state.Protocol) if err != nil { return nil, err } diff --git a/cmd/api/handlers/error.go b/cmd/api/handlers/error.go index 3dcc8a51c..d707969e3 100644 --- a/cmd/api/handlers/error.go +++ b/cmd/api/handlers/error.go @@ -4,12 +4,11 @@ import ( "errors" "net/http" - "github.com/baking-bad/bcdhub/internal/elastic" sentrygin "github.com/getsentry/sentry-go/gin" "github.com/gin-gonic/gin" ) -func handleError(c *gin.Context, err error, code int) bool { +func (ctx *Context) handleError(c *gin.Context, err error, code int) bool { if err == nil { return false } @@ -18,7 +17,7 @@ func handleError(c *gin.Context, err error, code int) bool { case http.StatusUnauthorized: err = errors.New("Invalid authentication") case 0: - code = getErrorCode(err) + code = ctx.getErrorCode(err) if code == http.StatusInternalServerError { if hub := sentrygin.GetHubFromContext(c); hub != nil { @@ -31,8 +30,8 @@ func handleError(c *gin.Context, err error, code int) bool { return true } -func getErrorCode(err error) int { - if elastic.IsRecordNotFound(err) { +func (ctx *Context) getErrorCode(err error) int { + if ctx.Storage.IsRecordNotFound(err) { return http.StatusNotFound } return http.StatusInternalServerError diff --git a/cmd/api/handlers/events.go b/cmd/api/handlers/events.go index 18038251c..13bdd2718 100644 --- a/cmd/api/handlers/events.go +++ b/cmd/api/handlers/events.go @@ -8,9 +8,10 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/consts" "github.com/baking-bad/bcdhub/internal/contractparser/meta" "github.com/baking-bad/bcdhub/internal/database" - "github.com/baking-bad/bcdhub/internal/elastic" + "github.com/baking-bad/bcdhub/internal/elastic/core" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" "github.com/gin-gonic/gin" "github.com/tidwall/gjson" ) @@ -24,17 +25,17 @@ func (ctx *Context) GetEvents(c *gin.Context) { } var pageReq pageableRequest - if err := c.BindQuery(&pageReq); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&pageReq); ctx.handleError(c, err, http.StatusBadRequest) { return } subscriptions, err := ctx.DB.ListSubscriptions(userID) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } events, err := ctx.getEvents(subscriptions, pageReq.Size, pageReq.Offset) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -50,22 +51,22 @@ func (ctx *Context) GetMempoolEvents(c *gin.Context) { } subscriptions, err := ctx.DB.ListSubscriptions(userID) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } events, err := ctx.getMempoolEvents(subscriptions) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } c.JSON(http.StatusOK, events) } -func (ctx *Context) getEvents(subscriptions []database.Subscription, size, offset int64) ([]elastic.Event, error) { - subs := make([]elastic.SubscriptionRequest, len(subscriptions)) +func (ctx *Context) getEvents(subscriptions []database.Subscription, size, offset int64) ([]models.Event, error) { + subs := make([]models.SubscriptionRequest, len(subscriptions)) for i := range subscriptions { - subs[i] = elastic.SubscriptionRequest{ + subs[i] = models.SubscriptionRequest{ Address: subscriptions[i].Address, Network: subscriptions[i].Network, Alias: subscriptions[i].Alias, @@ -80,20 +81,20 @@ func (ctx *Context) getEvents(subscriptions []database.Subscription, size, offse } if helpers.IsContract(subscriptions[i].Address) { - contract := models.NewEmptyContract(subscriptions[i].Network, subscriptions[i].Address) - if err := ctx.ES.GetByID(&contract); err != nil { - return []elastic.Event{}, err + contract := contract.NewEmptyContract(subscriptions[i].Network, subscriptions[i].Address) + if err := ctx.Storage.GetByID(&contract); err != nil { + return []models.Event{}, err } subs[i].Hash = contract.Hash subs[i].ProjectID = contract.ProjectID } } - return ctx.ES.GetEvents(subs, size, offset) + return ctx.Storage.GetEvents(subs, size, offset) } -func (ctx *Context) getMempoolEvents(subscriptions []database.Subscription) ([]elastic.Event, error) { - events := make([]elastic.Event, 0) +func (ctx *Context) getMempoolEvents(subscriptions []database.Subscription) ([]models.Event, error) { + events := make([]models.Event, 0) for _, sub := range subscriptions { if sub.WatchMask&WatchMempool == 0 { @@ -113,9 +114,9 @@ func (ctx *Context) getMempoolEvents(subscriptions []database.Subscription) ([]e continue } - aliases, err := ctx.ES.GetAliasesMap(sub.Network) + aliases, err := ctx.TZIP.GetAliasesMap(sub.Network) if err != nil { - if !elastic.IsRecordNotFound(err) { + if !ctx.Storage.IsRecordNotFound(err) { return nil, err } aliases = make(map[string]string) @@ -127,7 +128,7 @@ func (ctx *Context) getMempoolEvents(subscriptions []database.Subscription) ([]e status = "pending" //nolint } - op := elastic.EventOperation{ + op := core.EventOperation{ Network: sub.Network, Hash: item.Body.Hash, Status: status, @@ -148,7 +149,7 @@ func (ctx *Context) getMempoolEvents(subscriptions []database.Subscription) ([]e if helpers.IsContract(op.Destination) && item.Body.Protocol != "" { if params := gjson.ParseBytes(item.Body.Parameters); params.Exists() { - metadata, err := meta.GetMetadata(ctx.ES, op.Destination, consts.PARAMETER, item.Body.Protocol) + metadata, err := meta.GetMetadata(ctx.Schema, op.Destination, consts.PARAMETER, item.Body.Protocol) if err != nil { return events, err } @@ -162,8 +163,8 @@ func (ctx *Context) getMempoolEvents(subscriptions []database.Subscription) ([]e } } - event := elastic.Event{ - Type: elastic.EventTypeMempool, + event := models.Event{ + Type: models.EventTypeMempool, Address: sub.Address, Network: sub.Network, Alias: sub.Alias, diff --git a/cmd/api/handlers/fork.go b/cmd/api/handlers/fork.go index 0916e2274..8580c4dc1 100644 --- a/cmd/api/handlers/fork.go +++ b/cmd/api/handlers/fork.go @@ -12,7 +12,7 @@ import ( // ForkContract - func (ctx *Context) ForkContract(c *gin.Context) { var req forkRequest - if err := c.BindJSON(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindJSON(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } response, err := ctx.buildStorageDataFromForkRequest(req) @@ -21,7 +21,7 @@ func (ctx *Context) ForkContract(c *gin.Context) { if errors.As(err, &meta.ValidationError{}) || errors.As(err, &meta.RequiredError{}) { code = http.StatusBadRequest } - handleError(c, err, code) + ctx.handleError(c, err, code) return } c.JSON(http.StatusOK, response) @@ -48,7 +48,7 @@ func (ctx *Context) buildStorageDataFromForkRequest(req forkRequest) (gin.H, err if err != nil { return nil, err } - metadata, err = getStorageMetadata(ctx.ES, req.Address, req.Network) + metadata, err = ctx.getStorageMetadata(req.Address, req.Network) if err != nil { return nil, err } diff --git a/cmd/api/handlers/mempool.go b/cmd/api/handlers/mempool.go index 4761648eb..420b81ee6 100644 --- a/cmd/api/handlers/mempool.go +++ b/cmd/api/handlers/mempool.go @@ -8,7 +8,6 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/consts" "github.com/baking-bad/bcdhub/internal/contractparser/meta" "github.com/baking-bad/bcdhub/internal/contractparser/newmiguel" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/tzkt" "github.com/gin-gonic/gin" @@ -30,7 +29,7 @@ import ( // @Router /contract/{network}/{address}/mempool [get] func (ctx *Context) GetMempool(c *gin.Context) { var req getContractRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } @@ -41,12 +40,12 @@ func (ctx *Context) GetMempool(c *gin.Context) { } res, err := api.GetMempool(req.Address) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } ret, err := ctx.prepareMempoolOperations(res, req.Address, req.Network) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -59,9 +58,9 @@ func (ctx *Context) prepareMempoolOperations(res []tzkt.MempoolOperation, addres return ret, nil } - aliases, err := ctx.ES.GetAliasesMap(network) + aliases, err := ctx.TZIP.GetAliasesMap(network) if err != nil { - if !elastic.IsRecordNotFound(err) { + if !ctx.Storage.IsRecordNotFound(err) { return nil, err } aliases = make(map[string]string) @@ -108,7 +107,7 @@ func (ctx *Context) prepareMempoolOperations(res []tzkt.MempoolOperation, addres if helpers.IsContract(op.Destination) && op.Protocol != "" { params := gjson.ParseBytes(res[i].Body.Parameters) if params.Exists() { - metadata, err := meta.GetMetadata(ctx.ES, address, consts.PARAMETER, op.Protocol) + metadata, err := meta.GetMetadata(ctx.Schema, address, consts.PARAMETER, op.Protocol) if err != nil { return nil, err } diff --git a/cmd/api/handlers/metadata.go b/cmd/api/handlers/metadata.go index b709417ff..122871bd9 100644 --- a/cmd/api/handlers/metadata.go +++ b/cmd/api/handlers/metadata.go @@ -20,7 +20,7 @@ func (ctx *Context) UploadMetadata(c *gin.Context) { } body, err := ioutil.ReadAll(c.Request.Body) - if handleError(c, err, http.StatusBadRequest) { + if ctx.handleError(c, err, http.StatusBadRequest) { return } @@ -32,7 +32,7 @@ func (ctx *Context) UploadMetadata(c *gin.Context) { schemaLoader := gojsonschema.NewStringLoader(ctx.TzipSchema) documentLoader := gojsonschema.NewStringLoader(string(body)) result, err := gojsonschema.Validate(schemaLoader, documentLoader) - if handleError(c, err, http.StatusBadRequest) { + if ctx.handleError(c, err, http.StatusBadRequest) { return } @@ -42,7 +42,7 @@ func (ctx *Context) UploadMetadata(c *gin.Context) { } response, err := ctx.Pinata.PinJSONToIPFS(bytes.NewBuffer(body)) - if handleError(c, err, http.StatusBadRequest) { + if ctx.handleError(c, err, http.StatusBadRequest) { return } @@ -52,7 +52,7 @@ func (ctx *Context) UploadMetadata(c *gin.Context) { // ListMetadata - func (ctx *Context) ListMetadata(c *gin.Context) { list, err := ctx.Pinata.PinList() - if handleError(c, err, http.StatusInternalServerError) { + if ctx.handleError(c, err, http.StatusInternalServerError) { return } @@ -62,11 +62,11 @@ func (ctx *Context) ListMetadata(c *gin.Context) { // DeleteMetadata - func (ctx *Context) DeleteMetadata(c *gin.Context) { var req metadataRequest - if err := c.BindJSON(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindJSON(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } - if err := ctx.Pinata.UnPin(req.Hash); handleError(c, err, http.StatusBadRequest) { + if err := ctx.Pinata.UnPin(req.Hash); ctx.handleError(c, err, http.StatusBadRequest) { return } diff --git a/cmd/api/handlers/migrations.go b/cmd/api/handlers/migrations.go index f679456a7..e6e048370 100644 --- a/cmd/api/handlers/migrations.go +++ b/cmd/api/handlers/migrations.go @@ -3,7 +3,7 @@ package handlers import ( "net/http" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/migration" "github.com/gin-gonic/gin" ) @@ -22,19 +22,19 @@ import ( // @Router /contract/{network}/{address}/migrations [get] func (ctx *Context) GetContractMigrations(c *gin.Context) { var req getContractRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } - migrations, err := ctx.ES.GetMigrations(req.Network, req.Address) - if handleError(c, err, 0) { + migrations, err := ctx.Migrations.Get(req.Network, req.Address) + if ctx.handleError(c, err, 0) { return } c.JSON(http.StatusOK, prepareMigrations(migrations)) } -func prepareMigrations(data []models.Migration) []Migration { +func prepareMigrations(data []migration.Migration) []Migration { result := make([]Migration, len(data)) for i := range data { result[i] = Migration{ diff --git a/cmd/api/handlers/oauth.go b/cmd/api/handlers/oauth.go index 42ede788d..5e50a47e8 100644 --- a/cmd/api/handlers/oauth.go +++ b/cmd/api/handlers/oauth.go @@ -12,7 +12,7 @@ import ( // OauthLogin - func (ctx *Context) OauthLogin(c *gin.Context) { var params OauthParams - if err := c.BindUri(¶ms); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(¶ms); ctx.handleError(c, err, http.StatusBadRequest) { return } @@ -21,7 +21,7 @@ func (ctx *Context) OauthLogin(c *gin.Context) { if provider, ok := ctx.OAUTH.Providers[params.Provider]; ok { redirectURL = provider.AuthCodeURL(ctx.OAUTH.State) } else { - handleError(c, fmt.Errorf("invalid provider %v", params.Provider), http.StatusBadRequest) + ctx.handleError(c, fmt.Errorf("invalid provider %v", params.Provider), http.StatusBadRequest) return } @@ -31,17 +31,17 @@ func (ctx *Context) OauthLogin(c *gin.Context) { // OauthCallback - func (ctx *Context) OauthCallback(c *gin.Context) { var params OauthParams - if err := c.BindUri(¶ms); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(¶ms); ctx.handleError(c, err, http.StatusBadRequest) { return } var req OauthRequest - if err := c.ShouldBind(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.ShouldBind(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } if req.State != ctx.OAUTH.State { - handleError(c, errors.Errorf("invalid oauth state"), http.StatusBadRequest) + ctx.handleError(c, errors.Errorf("invalid oauth state"), http.StatusBadRequest) return } @@ -51,20 +51,20 @@ func (ctx *Context) OauthCallback(c *gin.Context) { if provider, ok := ctx.OAUTH.Providers[params.Provider]; ok { user, err = provider.AuthUser(req.Code) } else { - handleError(c, fmt.Errorf("invalid provider %v", params.Provider), http.StatusBadRequest) + ctx.handleError(c, fmt.Errorf("invalid provider %v", params.Provider), http.StatusBadRequest) return } - if handleError(c, err, http.StatusBadRequest) { + if ctx.handleError(c, err, http.StatusBadRequest) { return } - if err := ctx.DB.GetOrCreateUser(&user, user.Token); handleError(c, err, http.StatusBadRequest) { + if err := ctx.DB.GetOrCreateUser(&user, user.Token); ctx.handleError(c, err, http.StatusBadRequest) { return } jwt, err := ctx.OAUTH.MakeJWT(user.ID) - if handleError(c, err, http.StatusBadRequest) { + if ctx.handleError(c, err, http.StatusBadRequest) { return } diff --git a/cmd/api/handlers/operations.go b/cmd/api/handlers/operations.go index a921a741d..7e2b3888d 100644 --- a/cmd/api/handlers/operations.go +++ b/cmd/api/handlers/operations.go @@ -13,9 +13,9 @@ import ( formattererror "github.com/baking-bad/bcdhub/internal/contractparser/formatter_error" "github.com/baking-bad/bcdhub/internal/contractparser/meta" "github.com/baking-bad/bcdhub/internal/contractparser/newmiguel" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/baking-bad/bcdhub/internal/tzkt" "github.com/gin-gonic/gin" "github.com/pkg/errors" @@ -44,23 +44,23 @@ import ( // @Router /contract/{network}/{address}/operations [get] func (ctx *Context) GetContractOperations(c *gin.Context) { var req getContractRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var filtersReq operationsRequest - if err := c.BindQuery(&filtersReq); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&filtersReq); ctx.handleError(c, err, http.StatusBadRequest) { return } filters := prepareFilters(filtersReq) - ops, err := ctx.ES.GetOperationsForContract(req.Network, req.Address, filtersReq.Size, filters) - if handleError(c, err, 0) { + ops, err := ctx.Operations.GetByContract(req.Network, req.Address, filtersReq.Size, filters) + if ctx.handleError(c, err, 0) { return } - resp, err := PrepareOperations(ctx.ES, ops.Operations, filtersReq.WithStorageDiff) - if handleError(c, err, 0) { + resp, err := ctx.PrepareOperations(ops.Operations, filtersReq.WithStorageDiff) + if ctx.handleError(c, err, 0) { return } c.JSON(http.StatusOK, OperationResponse{ @@ -83,23 +83,23 @@ func (ctx *Context) GetContractOperations(c *gin.Context) { // @Router /opg/{hash} [get] func (ctx *Context) GetOperation(c *gin.Context) { var req OPGRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var queryReq opgRequest - if err := c.BindQuery(&queryReq); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&queryReq); ctx.handleError(c, err, http.StatusBadRequest) { return } - op, err := ctx.ES.GetOperations( + op, err := ctx.Operations.Get( map[string]interface{}{ "hash": req.Hash, }, 0, true, ) - if !elastic.IsRecordNotFound(err) && handleError(c, err, 0) { + if !ctx.Storage.IsRecordNotFound(err) && ctx.handleError(c, err, 0) { return } @@ -117,8 +117,8 @@ func (ctx *Context) GetOperation(c *gin.Context) { return } - resp, err := PrepareOperations(ctx.ES, op, true) - if handleError(c, err, 0) { + resp, err := ctx.PrepareOperations(op, true) + if ctx.handleError(c, err, 0) { return } @@ -139,21 +139,21 @@ func (ctx *Context) GetOperation(c *gin.Context) { // @Router /operation/{id}/error_location [get] func (ctx *Context) GetOperationErrorLocation(c *gin.Context) { var req getOperationByIDRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } - operation := models.Operation{ID: req.ID} - if err := ctx.ES.GetByID(&operation); handleError(c, err, 0) { + operation := operation.Operation{ID: req.ID} + if err := ctx.Storage.GetByID(&operation); ctx.handleError(c, err, 0) { return } if !cerrors.HasScriptRejectedError(operation.Errors) { - handleError(c, errors.Errorf("No reject script error in operation"), http.StatusBadRequest) + ctx.handleError(c, errors.Errorf("No reject script error in operation"), http.StatusBadRequest) return } response, err := ctx.getErrorLocation(operation, 2) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } c.JSON(http.StatusOK, response) @@ -242,7 +242,7 @@ func formatErrors(errs []*cerrors.Error, op *Operation) error { return nil } -func prepareOperation(es elastic.IElastic, operation models.Operation, bmd []models.BigMapDiff, withStorageDiff bool) (Operation, error) { +func (ctx *Context) prepareOperation(operation operation.Operation, bmd []bigmapdiff.BigMapDiff, withStorageDiff bool) (Operation, error) { var op Operation op.FromModel(operation) @@ -254,19 +254,19 @@ func prepareOperation(es elastic.IElastic, operation models.Operation, bmd []mod return op, err } if withStorageDiff { - if operation.DeffatedStorage != "" && strings.HasPrefix(op.Destination, "KT") && op.Status == consts.Applied { - if err := setStorageDiff(es, op.Destination, operation.DeffatedStorage, &op, bmd); err != nil { + if operation.DeffatedStorage != "" && operation.IsCall() && operation.IsApplied() { + if err := ctx.setStorageDiff(op.Destination, operation.DeffatedStorage, &op, bmd); err != nil { return op, err } } } - if op.Kind != consts.Transaction { + if !operation.IsTransaction() { return op, nil } - if strings.HasPrefix(op.Destination, "KT") && !cerrors.HasParametersError(op.Errors) { - if err := setParameters(es, operation.Parameters, &op); err != nil { + if helpers.IsContract(op.Destination) && !cerrors.HasParametersError(op.Errors) { + if err := ctx.setParameters(operation.Parameters, &op); err != nil { return op, err } } @@ -275,20 +275,20 @@ func prepareOperation(es elastic.IElastic, operation models.Operation, bmd []mod } // PrepareOperations - -func PrepareOperations(es elastic.IElastic, ops []models.Operation, withStorageDiff bool) ([]Operation, error) { +func (ctx *Context) PrepareOperations(ops []operation.Operation, withStorageDiff bool) ([]Operation, error) { resp := make([]Operation, len(ops)) for i := 0; i < len(ops); i++ { - var bmd []models.BigMapDiff + var bmd []bigmapdiff.BigMapDiff var err error if withStorageDiff { - bmd, err = es.GetBigMapDiffsUniqueByOperationID(ops[i].ID) + bmd, err = ctx.BigMapDiffs.GetUniqueByOperationID(ops[i].ID) if err != nil { return nil, err } } - op, err := prepareOperation(es, ops[i], bmd, withStorageDiff) + op, err := ctx.prepareOperation(ops[i], bmd, withStorageDiff) if err != nil { return nil, err } @@ -297,8 +297,8 @@ func PrepareOperations(es elastic.IElastic, ops []models.Operation, withStorageD return resp, nil } -func setParameters(es elastic.IElastic, parameters string, op *Operation) error { - metadata, err := meta.GetMetadata(es, op.Destination, consts.PARAMETER, op.Protocol) +func (ctx *Context) setParameters(parameters string, op *Operation) error { + metadata, err := meta.GetMetadata(ctx.Schema, op.Destination, consts.PARAMETER, op.Protocol) if err != nil { return nil } @@ -314,12 +314,12 @@ func setParameters(es elastic.IElastic, parameters string, op *Operation) error return nil } -func setStorageDiff(es elastic.IElastic, address, storage string, op *Operation, bmd []models.BigMapDiff) error { - metadata, err := meta.GetContractMetadata(es, address) +func (ctx *Context) setStorageDiff(address, storage string, op *Operation, bmd []bigmapdiff.BigMapDiff) error { + metadata, err := meta.GetContractMetadata(ctx.Schema, address) if err != nil { return err } - storageDiff, err := getStorageDiff(es, bmd, address, storage, metadata, false, op) + storageDiff, err := ctx.getStorageDiff(bmd, address, storage, metadata, false, op) if err != nil { return err } @@ -327,21 +327,21 @@ func setStorageDiff(es elastic.IElastic, address, storage string, op *Operation, return nil } -func getStorageDiff(es elastic.IElastic, bmd []models.BigMapDiff, address, storage string, metadata *meta.ContractMetadata, isSimulating bool, op *Operation) (interface{}, error) { +func (ctx *Context) getStorageDiff(bmd []bigmapdiff.BigMapDiff, address, storage string, metadata *meta.ContractMetadata, isSimulating bool, op *Operation) (interface{}, error) { var prevStorage *newmiguel.Node var prevDeffatedStorage string - prev, err := es.GetLastOperation(address, op.Network, op.IndexedTime) + prev, err := ctx.Operations.Last(address, op.Network, op.IndexedTime) if err == nil { - prevBmd, err := getPrevBmd(es, bmd, op.IndexedTime, op.Destination) + prevBmd, err := ctx.getPrevBmd(bmd, op.IndexedTime, op.Destination) if err != nil { return nil, err } var exDeffatedStorage string - exOp, err := es.GetLastOperation(address, prev.Network, prev.IndexedTime) + exOp, err := ctx.Operations.Last(address, prev.Network, prev.IndexedTime) if err == nil { exDeffatedStorage = exOp.DeffatedStorage - } else if !elastic.IsRecordNotFound(err) { + } else if !ctx.Storage.IsRecordNotFound(err) { return nil, err } @@ -351,7 +351,7 @@ func getStorageDiff(es elastic.IElastic, bmd []models.BigMapDiff, address, stora } prevDeffatedStorage = prev.DeffatedStorage } else { - if !elastic.IsRecordNotFound(err) { + if !ctx.Storage.IsRecordNotFound(err) { return nil, err } prevStorage = nil @@ -369,7 +369,7 @@ func getStorageDiff(es elastic.IElastic, bmd []models.BigMapDiff, address, stora return currentStorage, nil } -func getEnrichStorageMiguel(bmd []models.BigMapDiff, protocol, storage, prevStorage string, metadata *meta.ContractMetadata, isSimulating bool) (*newmiguel.Node, error) { +func getEnrichStorageMiguel(bmd []bigmapdiff.BigMapDiff, protocol, storage, prevStorage string, metadata *meta.ContractMetadata, isSimulating bool) (*newmiguel.Node, error) { store, err := enrichStorage(storage, prevStorage, bmd, protocol, false, isSimulating) if err != nil { return nil, err @@ -381,7 +381,7 @@ func getEnrichStorageMiguel(bmd []models.BigMapDiff, protocol, storage, prevStor return newmiguel.MichelineToMiguel(store, storageMetadata) } -func enrichStorage(s, prevStorage string, bmd []models.BigMapDiff, protocol string, skipEmpty, isSimulating bool) (gjson.Result, error) { +func enrichStorage(s, prevStorage string, bmd []bigmapdiff.BigMapDiff, protocol string, skipEmpty, isSimulating bool) (gjson.Result, error) { if len(bmd) == 0 { return gjson.Parse(s), nil } @@ -394,11 +394,11 @@ func enrichStorage(s, prevStorage string, bmd []models.BigMapDiff, protocol stri return parser.Enrich(s, prevStorage, bmd, skipEmpty, true) } -func getPrevBmd(es elastic.IElastic, bmd []models.BigMapDiff, indexedTime int64, address string) ([]models.BigMapDiff, error) { +func (ctx *Context) getPrevBmd(bmd []bigmapdiff.BigMapDiff, indexedTime int64, address string) ([]bigmapdiff.BigMapDiff, error) { if len(bmd) == 0 { return nil, nil } - return es.GetBigMapDiffsPrevious(bmd, indexedTime, address) + return ctx.BigMapDiffs.Previous(bmd, indexedTime, address) } func (ctx *Context) prepareMempoolOperation(item tzkt.MempoolOperation, network string) *Operation { @@ -430,9 +430,9 @@ func (ctx *Context) prepareMempoolOperation(item tzkt.MempoolOperation, network RawMempool: string(item.Raw), } - aliases, err := ctx.ES.GetAliasesMap(network) + aliases, err := ctx.TZIP.GetAliasesMap(network) if err != nil { - if !elastic.IsRecordNotFound(err) { + if !ctx.Storage.IsRecordNotFound(err) { return &op } } else { @@ -461,7 +461,7 @@ func (ctx *Context) prepareMempoolOperation(item tzkt.MempoolOperation, network } func (ctx *Context) buildOperationParameters(params gjson.Result, op *Operation) { - metadata, err := meta.GetMetadata(ctx.ES, op.Destination, consts.PARAMETER, op.Protocol) + metadata, err := meta.GetMetadata(ctx.Schema, op.Destination, consts.PARAMETER, op.Protocol) if err != nil { return } @@ -479,7 +479,7 @@ func (ctx *Context) buildOperationParameters(params gjson.Result, op *Operation) } } -func (ctx *Context) getErrorLocation(operation models.Operation, window int) (GetErrorLocationResponse, error) { +func (ctx *Context) getErrorLocation(operation operation.Operation, window int) (GetErrorLocationResponse, error) { rpc, err := ctx.GetRPC(operation.Network) if err != nil { return GetErrorLocationResponse{}, err diff --git a/cmd/api/handlers/project.go b/cmd/api/handlers/project.go index f334b15f4..f751ba331 100644 --- a/cmd/api/handlers/project.go +++ b/cmd/api/handlers/project.go @@ -3,8 +3,7 @@ package handlers import ( "net/http" - "github.com/baking-bad/bcdhub/internal/elastic" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" "github.com/gin-gonic/gin" ) @@ -25,28 +24,28 @@ import ( // @Router /contract/{network}/{address}/same [get] func (ctx *Context) GetSameContracts(c *gin.Context) { var req getContractRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var pageReq pageableRequest - if err := c.BindQuery(&pageReq); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&pageReq); ctx.handleError(c, err, http.StatusBadRequest) { return } - contract := models.NewEmptyContract(req.Network, req.Address) - err := ctx.ES.GetByID(&contract) - if handleError(c, err, 0) { + contract := contract.NewEmptyContract(req.Network, req.Address) + err := ctx.Storage.GetByID(&contract) + if ctx.handleError(c, err, 0) { return } - sameContracts, err := ctx.ES.GetSameContracts(contract, pageReq.Size, pageReq.Offset) + sameContracts, err := ctx.Contracts.GetSameContracts(contract, pageReq.Size, pageReq.Offset) if err != nil { - if elastic.IsRecordNotFound(err) { + if ctx.Storage.IsRecordNotFound(err) { c.JSON(http.StatusOK, []interface{}{}) return } - handleError(c, err, 0) + ctx.handleError(c, err, 0) return } @@ -73,23 +72,23 @@ func (ctx *Context) GetSameContracts(c *gin.Context) { // @Router /contract/{network}/{address}/similar [get] func (ctx *Context) GetSimilarContracts(c *gin.Context) { var req getContractRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var pageReq pageableRequest - if err := c.BindQuery(&pageReq); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&pageReq); ctx.handleError(c, err, http.StatusBadRequest) { return } - contract := models.NewEmptyContract(req.Network, req.Address) - err := ctx.ES.GetByID(&contract) - if handleError(c, err, 0) { + contract := contract.NewEmptyContract(req.Network, req.Address) + err := ctx.Storage.GetByID(&contract) + if ctx.handleError(c, err, 0) { return } - similar, total, err := ctx.ES.GetSimilarContracts(contract, pageReq.Size, pageReq.Offset) - if handleError(c, err, 0) { + similar, total, err := ctx.Contracts.GetSimilarContracts(contract, pageReq.Size, pageReq.Offset) + if ctx.handleError(c, err, 0) { return } @@ -102,7 +101,7 @@ func (ctx *Context) GetSimilarContracts(c *gin.Context) { CodeDiffLeg{Address: contract.Address, Network: contract.Network}, CodeDiffLeg{Address: similar[i].Address, Network: similar[i].Network}, ) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } response.Contracts[i].FromModel(similar[i], diff) diff --git a/cmd/api/handlers/repos.go b/cmd/api/handlers/repos.go index ac85ea2d8..a4c56c8fa 100644 --- a/cmd/api/handlers/repos.go +++ b/cmd/api/handlers/repos.go @@ -18,12 +18,12 @@ func (ctx *Context) ListPublicAccounts(c *gin.Context) { } user, err := ctx.DB.GetUser(userID) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } orgs, err := getPublicOrgs(user) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -57,7 +57,7 @@ func getPublicOrgs(user *database.User) ([]providers.Account, error) { // ListPublicRepos - func (ctx *Context) ListPublicRepos(c *gin.Context) { var req publicReposRequest - if err := c.BindQuery(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } @@ -68,12 +68,12 @@ func (ctx *Context) ListPublicRepos(c *gin.Context) { } user, err := ctx.DB.GetUser(userID) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } repos, err := getPublicRepos(req.Login, user) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -106,17 +106,17 @@ func (ctx *Context) ListPublicRefs(c *gin.Context) { } user, err := ctx.DB.GetUser(userID) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } var req publicRefsRequest - if err := c.BindQuery(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } refs, err := getPublicRefs(user, req.Owner, req.Repo) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } diff --git a/cmd/api/handlers/responses.go b/cmd/api/handlers/responses.go index 7f7953828..c0962377e 100644 --- a/cmd/api/handlers/responses.go +++ b/cmd/api/handlers/responses.go @@ -6,9 +6,14 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/cerrors" "github.com/baking-bad/bcdhub/internal/contractparser/docstring" "github.com/baking-bad/bcdhub/internal/contractparser/formatter" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/jsonschema" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/block" + "github.com/baking-bad/bcdhub/internal/models/contract" + "github.com/baking-bad/bcdhub/internal/models/operation" + "github.com/baking-bad/bcdhub/internal/models/protocol" + "github.com/baking-bad/bcdhub/internal/models/tezosdomain" + "github.com/baking-bad/bcdhub/internal/models/transfer" + "github.com/baking-bad/bcdhub/internal/models/tzip" "github.com/tidwall/gjson" ) @@ -71,7 +76,7 @@ func (o *Operation) ParseJSON(raw gjson.Result) { } // FromModel - -func (o *Operation) FromModel(operation models.Operation) { +func (o *Operation) FromModel(operation operation.Operation) { o.ID = operation.ID o.Protocol = operation.Protocol o.Hash = operation.Hash @@ -102,12 +107,12 @@ func (o *Operation) FromModel(operation models.Operation) { } // ToModel - -func (o *Operation) ToModel() models.Operation { - var result *models.OperationResult +func (o *Operation) ToModel() operation.Operation { + var result *operation.Result if o.Result != nil { result = o.Result.ToModel() } - return models.Operation{ + return operation.Operation{ ID: o.ID, Protocol: o.Protocol, Hash: o.Hash, @@ -147,7 +152,7 @@ type OperationResult struct { } // FromModel - -func (r *OperationResult) FromModel(result *models.OperationResult) { +func (r *OperationResult) FromModel(result *operation.Result) { if result == nil || r == nil { return } @@ -158,12 +163,12 @@ func (r *OperationResult) FromModel(result *models.OperationResult) { } // ToModel - -func (r *OperationResult) ToModel() *models.OperationResult { +func (r *OperationResult) ToModel() *operation.Result { if r == nil { return nil } - return &models.OperationResult{ + return &operation.Result{ AllocatedDestinationContract: r.AllocatedDestinationContract, ConsumedGas: r.ConsumedGas, PaidStorageSizeDiff: r.PaidStorageSizeDiff, @@ -208,7 +213,7 @@ type Contract struct { } // FromModel - -func (c *Contract) FromModel(contract models.Contract) { +func (c *Contract) FromModel(contract contract.Contract) { c.Address = contract.Address c.Alias = contract.Alias c.Annotations = contract.Annotations @@ -434,7 +439,7 @@ type Alias struct { } // FromModel - -func (a *Alias) FromModel(alias *models.TZIP) { +func (a *Alias) FromModel(alias *tzip.TZIP) { a.Alias = alias.Name a.Address = alias.Address a.Network = alias.Network @@ -451,7 +456,7 @@ type Protocol struct { } // FromModel - -func (p *Protocol) FromModel(protocol models.Protocol) { +func (p *Protocol) FromModel(protocol protocol.Protocol) { p.Hash = protocol.Hash p.Network = protocol.Network p.StartLevel = protocol.StartLevel @@ -471,7 +476,7 @@ type Block struct { } // FromModel - -func (b *Block) FromModel(block models.Block) { +func (b *Block) FromModel(block block.Block) { b.Network = block.Network b.Hash = block.Hash b.Level = block.Level @@ -489,7 +494,7 @@ type LightContract struct { } // FromModel - -func (c *LightContract) FromModel(light elastic.LightContract) { +func (c *LightContract) FromModel(light contract.Light) { c.Address = light.Address c.Network = light.Network c.Deployed = light.Deployed @@ -510,7 +515,7 @@ type SimilarContract struct { } // FromModel - -func (c *SimilarContract) FromModel(similar elastic.SimilarContract, diff CodeDiffResponse) { +func (c *SimilarContract) FromModel(similar contract.Similar, diff CodeDiffResponse) { var contract Contract contract.FromModel(*similar.Contract) c.Contract = &contract @@ -527,7 +532,7 @@ type SameContractsResponse struct { } // FromModel - -func (c *SameContractsResponse) FromModel(same elastic.SameContractsResponse) { +func (c *SameContractsResponse) FromModel(same contract.SameResponse) { c.Count = same.Count c.Contracts = make([]Contract, len(same.Contracts)) @@ -559,7 +564,7 @@ type BigMapHistoryItem struct { // Transfer - type Transfer struct { - *models.Transfer + *transfer.Transfer Token *TokenMetadata `json:"token,omitempty"` } @@ -619,7 +624,7 @@ type Screenshot struct { // Token - type Token struct { TokenMetadata - elastic.TokenSupply + transfer.TokenSupply } // AccountInfo - @@ -654,7 +659,7 @@ type TokenMetadata struct { } // TokenMetadataFromElasticModel - -func TokenMetadataFromElasticModel(model elastic.TokenMetadata) (tm TokenMetadata) { +func TokenMetadataFromElasticModel(model tzip.TokenMetadata) (tm TokenMetadata) { tm.TokenID = model.TokenID tm.Symbol = model.Symbol tm.Name = model.Name @@ -669,8 +674,8 @@ func TokenMetadataFromElasticModel(model elastic.TokenMetadata) (tm TokenMetadat // DomainsResponse - type DomainsResponse struct { - Domains []models.TezosDomain `json:"domains"` - Total int64 `json:"total"` + Domains []tezosdomain.TezosDomain `json:"domains"` + Total int64 `json:"total"` } // CountResponse - diff --git a/cmd/api/handlers/run_code.go b/cmd/api/handlers/run_code.go index 7a79654d1..4713ef034 100644 --- a/cmd/api/handlers/run_code.go +++ b/cmd/api/handlers/run_code.go @@ -10,7 +10,8 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/consts" "github.com/baking-bad/bcdhub/internal/contractparser/meta" "github.com/baking-bad/bcdhub/internal/contractparser/storage" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/baking-bad/bcdhub/internal/parsers/operations" "github.com/gin-gonic/gin" @@ -21,41 +22,41 @@ import ( // RunOperation - func (ctx *Context) RunOperation(c *gin.Context) { var req getContractRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var reqRunOp runOperationRequest - if err := c.BindJSON(&reqRunOp); handleError(c, err, http.StatusBadRequest) { + if err := c.BindJSON(&reqRunOp); ctx.handleError(c, err, http.StatusBadRequest) { return } rpc, err := ctx.GetRPC(req.Network) - if handleError(c, err, http.StatusBadRequest) { + if ctx.handleError(c, err, http.StatusBadRequest) { return } - state, err := ctx.ES.GetLastBlock(req.Network) - if handleError(c, err, 0) { + state, err := ctx.Blocks.Last(req.Network) + if ctx.handleError(c, err, 0) { return } parameters, err := ctx.buildEntrypointMicheline(req.Network, req.Address, reqRunOp.BinPath, reqRunOp.Data, true) - if handleError(c, err, http.StatusBadRequest) { + if ctx.handleError(c, err, http.StatusBadRequest) { return } if !parameters.Get("entrypoint").Exists() || !parameters.Get("value").Exists() { - handleError(c, errors.Errorf("Error occured while building parameters: %s", parameters.String()), 0) + ctx.handleError(c, errors.Errorf("Error occured while building parameters: %s", parameters.String()), 0) return } counter, err := rpc.GetCounter(reqRunOp.Source) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } - protocol, err := ctx.ES.GetProtocol(req.Network, "", -1) - if handleError(c, err, 0) { + protocol, err := ctx.Protocols.GetProtocol(req.Network, "", -1) + if ctx.handleError(c, err, 0) { return } @@ -71,7 +72,7 @@ func (ctx *Context) RunOperation(c *gin.Context) { reqRunOp.Amount, parameters, ) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -86,7 +87,7 @@ func (ctx *Context) RunOperation(c *gin.Context) { parser := operations.NewGroup(operations.NewParseParams( rpc, - ctx.ES, + ctx.Storage, ctx.BigMapDiffs, ctx.Blocks, ctx.TZIP, ctx.Schema, ctx.TokenBalances, operations.WithConstants(protocol.Constants), operations.WithHead(header), operations.WithInterfaces(ctx.Interfaces), @@ -95,32 +96,32 @@ func (ctx *Context) RunOperation(c *gin.Context) { )) parsedModels, err := parser.Parse(response) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } - operations := make([]*models.Operation, 0) - diffs := make([]*models.BigMapDiff, 0) + operations := make([]*operation.Operation, 0) + diffs := make([]*bigmapdiff.BigMapDiff, 0) for i := range parsedModels { switch val := parsedModels[i].(type) { - case *models.Operation: + case *operation.Operation: operations = append(operations, val) - case *models.BigMapDiff: + case *bigmapdiff.BigMapDiff: diffs = append(diffs, val) } } resp := make([]Operation, len(operations)) for i := range operations { - bmd := make([]models.BigMapDiff, 0) + bmd := make([]bigmapdiff.BigMapDiff, 0) for j := range diffs { if diffs[j].OperationID == operations[i].ID { bmd = append(bmd, *diffs[j]) } } - op, err := prepareOperation(ctx.ES, *operations[i], bmd, true) - if handleError(c, err, 0) { + op, err := ctx.prepareOperation(*operations[i], bmd, true) + if ctx.handleError(c, err, 0) { return } resp[i] = op @@ -150,36 +151,36 @@ func (ctx *Context) RunOperation(c *gin.Context) { // @Router /contract/{network}/{address}/entrypoints/trace [post] func (ctx *Context) RunCode(c *gin.Context) { var req getContractRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var reqRunCode runCodeRequest - if err := c.BindJSON(&reqRunCode); handleError(c, err, http.StatusBadRequest) { + if err := c.BindJSON(&reqRunCode); ctx.handleError(c, err, http.StatusBadRequest) { return } rpc, err := ctx.GetRPC(req.Network) - if handleError(c, err, http.StatusBadRequest) { + if ctx.handleError(c, err, http.StatusBadRequest) { return } - state, err := ctx.ES.GetLastBlock(req.Network) - if handleError(c, err, 0) { + state, err := ctx.Blocks.Last(req.Network) + if ctx.handleError(c, err, 0) { return } script, err := contractparser.GetContract(rpc, req.Address, req.Network, state.Protocol, ctx.SharePath, 0) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } input, err := ctx.buildEntrypointMicheline(req.Network, req.Address, reqRunCode.BinPath, reqRunCode.Data, true) - if handleError(c, err, http.StatusBadRequest) { + if ctx.handleError(c, err, http.StatusBadRequest) { return } if !input.Get("entrypoint").Exists() || !input.Get("value").Exists() { - handleError(c, errors.Errorf("Error during build parameters: %s", input.String()), 0) + ctx.handleError(c, errors.Errorf("Error during build parameters: %s", input.String()), 0) return } @@ -187,12 +188,12 @@ func (ctx *Context) RunCode(c *gin.Context) { value := input.Get("value") storage, err := rpc.GetScriptStorageJSON(req.Address, 0) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } response, err := rpc.RunCode(script.Get("code"), storage, value, state.ChainID, reqRunCode.Source, reqRunCode.Sender, entrypoint, reqRunCode.Amount, reqRunCode.GasLimit) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } main := Operation{ @@ -210,14 +211,14 @@ func (ctx *Context) RunCode(c *gin.Context) { Entrypoint: entrypoint, } - if err := setParameters(ctx.ES, input.Raw, &main); handleError(c, err, 0) { + if err := ctx.setParameters(input.Raw, &main); ctx.handleError(c, err, 0) { return } - if err := ctx.setSimulateStorageDiff(response, &main); handleError(c, err, 0) { + if err := ctx.setSimulateStorageDiff(response, &main); ctx.handleError(c, err, 0) { return } operations, err := ctx.parseRunCodeResponse(response, &main) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -262,7 +263,7 @@ func (ctx *Context) parseAppliedRunCode(response gjson.Result, main *Operation) op.Protocol = main.Protocol op.Level = main.Level op.Internal = true - if err := setParameters(ctx.ES, item.Get("parameters").Raw, &op); err != nil { + if err := ctx.setParameters(item.Get("parameters").Raw, &op); err != nil { return nil, err } if err := ctx.setSimulateStorageDiff(item, &op); err != nil { @@ -273,14 +274,14 @@ func (ctx *Context) parseAppliedRunCode(response gjson.Result, main *Operation) return operations, nil } -func (ctx *Context) parseBigMapDiffs(response gjson.Result, metadata meta.Metadata, operation *Operation) ([]models.BigMapDiff, error) { +func (ctx *Context) parseBigMapDiffs(response gjson.Result, metadata meta.Metadata, operation *Operation) ([]bigmapdiff.BigMapDiff, error) { rpc, err := ctx.GetRPC(operation.Network) if err != nil { return nil, err } model := operation.ToModel() - parser := storage.NewSimulate(rpc, ctx.ES) + parser := storage.NewSimulate(rpc, ctx.BigMapDiffs) rs := storage.RichStorage{Empty: true} switch operation.Kind { @@ -295,9 +296,9 @@ func (ctx *Context) parseBigMapDiffs(response gjson.Result, metadata meta.Metada if rs.Empty { return nil, nil } - bmd := make([]models.BigMapDiff, len(rs.Models)) + bmd := make([]bigmapdiff.BigMapDiff, len(rs.Models)) for i := range rs.Models { - if val, ok := rs.Models[i].(*models.BigMapDiff); ok { + if val, ok := rs.Models[i].(*bigmapdiff.BigMapDiff); ok { bmd[i] = *val } } @@ -309,7 +310,7 @@ func (ctx *Context) setSimulateStorageDiff(response gjson.Result, main *Operatio if storage == "" || !strings.HasPrefix(main.Destination, "KT") || main.Status != "applied" { return nil } - metadata, err := meta.GetContractMetadata(ctx.ES, main.Destination) + metadata, err := meta.GetContractMetadata(ctx.Schema, main.Destination) if err != nil { return err } @@ -321,7 +322,7 @@ func (ctx *Context) setSimulateStorageDiff(response gjson.Result, main *Operatio if err != nil { return err } - storageDiff, err := getStorageDiff(ctx.ES, bmd, main.Destination, storage, metadata, true, main) + storageDiff, err := ctx.getStorageDiff(bmd, main.Destination, storage, metadata, true, main) if err != nil { return err } diff --git a/cmd/api/handlers/search.go b/cmd/api/handlers/search.go index 5bd534e01..b898dfee0 100644 --- a/cmd/api/handlers/search.go +++ b/cmd/api/handlers/search.go @@ -7,9 +7,8 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/stringer" "github.com/baking-bad/bcdhub/internal/contractparser/unpack/domaintypes" - "github.com/baking-bad/bcdhub/internal/elastic" - "github.com/baking-bad/bcdhub/internal/elastic/search" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" "github.com/gin-gonic/gin" ) @@ -29,13 +28,13 @@ import ( // @Param l query string false "Comma-separated list of languages for searching. Values: smartpy, liquidity, ligo, lorentz, michelson" // @Accept json // @Produce json -// @Success 200 {object} search.Result +// @Success 200 {object} models.Result // @Failure 400 {object} Error // @Failure 500 {object} Error // @Router /search [get] func (ctx *Context) Search(c *gin.Context) { var req searchRequest - if err := c.BindQuery(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } @@ -45,12 +44,12 @@ func (ctx *Context) Search(c *gin.Context) { } filters := getSearchFilters(req) - result, err := ctx.ES.SearchByText(req.Text, int64(req.Offset), fields, filters, req.Grouping != 0) - if handleError(c, err, 0) { + result, err := ctx.Storage.SearchByText(req.Text, int64(req.Offset), fields, filters, req.Grouping != 0) + if ctx.handleError(c, err, 0) { return } result, err = postProcessing(result) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -91,13 +90,13 @@ func getSearchFilters(req searchRequest) map[string]interface{} { return filters } -func postProcessing(result search.Result) (search.Result, error) { +func postProcessing(result models.Result) (models.Result, error) { for i := range result.Items { - if result.Items[i].Type != elastic.DocBigMapDiff { + if result.Items[i].Type != models.DocBigMapDiff { continue } - bmd := result.Items[i].Body.(models.BigMapDiff) + bmd := result.Items[i].Body.(bigmapdiff.BigMapDiff) key, err := stringer.StringifyInterface(bmd.Key) if err != nil { return result, err @@ -118,15 +117,15 @@ func postProcessing(result search.Result) (search.Result, error) { return result, nil } -func (ctx *Context) searchInMempool(q string) (search.Item, error) { +func (ctx *Context) searchInMempool(q string) (models.Item, error) { if _, err := domaintypes.DecodeOpgHash(q); err != nil { - return search.Item{}, err + return models.Item{}, err } operation := ctx.getOperationFromMempool(q) - return search.Item{ - Type: elastic.DocOperations, + return models.Item{ + Type: models.DocOperations, Value: operation.Hash, Body: operation, Highlights: map[string][]string{ diff --git a/cmd/api/handlers/stats.go b/cmd/api/handlers/stats.go index db3e3a331..2d54ef579 100644 --- a/cmd/api/handlers/stats.go +++ b/cmd/api/handlers/stats.go @@ -5,9 +5,9 @@ import ( "strings" "github.com/baking-bad/bcdhub/internal/contractparser/consts" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/protocol" "github.com/gin-gonic/gin" "github.com/pkg/errors" ) @@ -23,8 +23,8 @@ import ( // @Failure 500 {object} Error // @Router /stats [get] func (ctx *Context) GetStats(c *gin.Context) { - stats, err := ctx.ES.GetLastBlocks() - if handleError(c, err, 0) { + stats, err := ctx.Blocks.LastByNetworks() + if ctx.handleError(c, err, 0) { return } blocks := make([]Block, 0) @@ -53,20 +53,20 @@ func (ctx *Context) GetStats(c *gin.Context) { // @Router /stats/{network} [get] func (ctx *Context) GetNetworkStats(c *gin.Context) { var req getByNetwork - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var stats NetworkStats - counts, err := ctx.ES.GetNetworkCountStats(req.Network) - if handleError(c, err, 0) { + counts, err := ctx.Storage.GetNetworkCountStats(req.Network) + if ctx.handleError(c, err, 0) { return } - stats.ContractsCount = counts[elastic.DocContracts] - stats.OperationsCount = counts[elastic.DocOperations] + stats.ContractsCount = counts[models.DocContracts] + stats.OperationsCount = counts[models.DocOperations] - var protocols []models.Protocol - if err := ctx.ES.GetByNetworkWithSort(req.Network, "start_level", "desc", &protocols); handleError(c, err, 0) { + var protocols []protocol.Protocol + if err := ctx.Storage.GetByNetworkWithSort(req.Network, "start_level", "desc", &protocols); ctx.handleError(c, err, 0) { return } ps := make([]Protocol, len(protocols)) @@ -75,8 +75,8 @@ func (ctx *Context) GetNetworkStats(c *gin.Context) { } stats.Protocols = ps - languages, err := ctx.ES.GetLanguagesForNetwork(req.Network) - if handleError(c, err, 0) { + languages, err := ctx.Storage.GetLanguagesForNetwork(req.Network) + if ctx.handleError(c, err, 0) { return } stats.Languages = languages @@ -101,12 +101,12 @@ func (ctx *Context) GetNetworkStats(c *gin.Context) { // @Router /stats/{network}/series [get] func (ctx *Context) GetSeries(c *gin.Context) { var req getByNetwork - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var reqArgs getSeriesRequest - if err := c.BindQuery(&reqArgs); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&reqArgs); ctx.handleError(c, err, http.StatusBadRequest) { return } @@ -116,119 +116,119 @@ func (ctx *Context) GetSeries(c *gin.Context) { } options, err := ctx.getHistogramOptions(reqArgs.Name, req.Network, addresses...) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } - series, err := ctx.ES.GetDateHistogram(reqArgs.Period, options...) - if handleError(c, err, 0) { + series, err := ctx.Storage.GetDateHistogram(reqArgs.Period, options...) + if ctx.handleError(c, err, 0) { return } c.JSON(http.StatusOK, series) } -func (ctx *Context) getHistogramOptions(name, network string, addresses ...string) ([]elastic.HistogramOption, error) { - filters := []elastic.HistogramFilter{ +func (ctx *Context) getHistogramOptions(name, network string, addresses ...string) ([]models.HistogramOption, error) { + filters := []models.HistogramFilter{ { Field: "network", Value: network, - Kind: elastic.HistogramFilterKindMatch, + Kind: models.HistogramFilterKindMatch, }, } switch name { case "contract": - return []elastic.HistogramOption{ - elastic.WithHistogramIndices(elastic.DocContracts), - elastic.WithHistogramFilters(filters), + return []models.HistogramOption{ + models.WithHistogramIndices(models.DocContracts), + models.WithHistogramFilters(filters), }, nil case "operation": - filters = append(filters, elastic.HistogramFilter{ + filters = append(filters, models.HistogramFilter{ Field: "entrypoint", Value: "", - Kind: elastic.HistogramFilterKindExists, + Kind: models.HistogramFilterKindExists, }) - filters = append(filters, elastic.HistogramFilter{ + filters = append(filters, models.HistogramFilter{ Field: "status", Value: consts.Applied, - Kind: elastic.HistogramFilterKindMatch, + Kind: models.HistogramFilterKindMatch, }) if len(addresses) > 0 { - filters = append(filters, elastic.HistogramFilter{ - Kind: elastic.HistogramFilterKindAddresses, + filters = append(filters, models.HistogramFilter{ + Kind: models.HistogramFilterKindAddresses, Value: addresses, Field: "destination", }) } - return []elastic.HistogramOption{ - elastic.WithHistogramIndices(elastic.DocOperations), - elastic.WithHistogramFilters(filters), + return []models.HistogramOption{ + models.WithHistogramIndices(models.DocOperations), + models.WithHistogramFilters(filters), }, nil case "paid_storage_size_diff": if len(addresses) > 0 { - filters = append(filters, elastic.HistogramFilter{ - Kind: elastic.HistogramFilterKindAddresses, + filters = append(filters, models.HistogramFilter{ + Kind: models.HistogramFilterKindAddresses, Value: addresses, Field: "destination", }) } - return []elastic.HistogramOption{ - elastic.WithHistogramIndices(elastic.DocOperations), - elastic.WithHistogramFunction("sum", "result.paid_storage_size_diff"), - elastic.WithHistogramFilters(filters), + return []models.HistogramOption{ + models.WithHistogramIndices(models.DocOperations), + models.WithHistogramFunction("sum", "result.paid_storage_size_diff"), + models.WithHistogramFilters(filters), }, nil case "consumed_gas": if len(addresses) > 0 { - filters = append(filters, elastic.HistogramFilter{ - Kind: elastic.HistogramFilterKindAddresses, + filters = append(filters, models.HistogramFilter{ + Kind: models.HistogramFilterKindAddresses, Value: addresses, Field: "destination", }) } - return []elastic.HistogramOption{ - elastic.WithHistogramIndices(elastic.DocOperations), - elastic.WithHistogramFunction("sum", "result.consumed_gas"), - elastic.WithHistogramFilters(filters), + return []models.HistogramOption{ + models.WithHistogramIndices(models.DocOperations), + models.WithHistogramFunction("sum", "result.consumed_gas"), + models.WithHistogramFilters(filters), }, nil case "users": if len(addresses) > 0 { - filters = append(filters, elastic.HistogramFilter{ - Kind: elastic.HistogramFilterKindAddresses, + filters = append(filters, models.HistogramFilter{ + Kind: models.HistogramFilterKindAddresses, Value: addresses, Field: "destination", }) } - return []elastic.HistogramOption{ - elastic.WithHistogramIndices(elastic.DocOperations), - elastic.WithHistogramFunction("cardinality", "initiator.keyword"), - elastic.WithHistogramFilters(filters), + return []models.HistogramOption{ + models.WithHistogramIndices(models.DocOperations), + models.WithHistogramFunction("cardinality", "initiator.keyword"), + models.WithHistogramFilters(filters), }, nil case "volume": if len(addresses) > 0 { - filters = append(filters, elastic.HistogramFilter{ - Kind: elastic.HistogramFilterKindAddresses, + filters = append(filters, models.HistogramFilter{ + Kind: models.HistogramFilterKindAddresses, Value: addresses, Field: "destination", }) } - return []elastic.HistogramOption{ - elastic.WithHistogramIndices(elastic.DocOperations), - elastic.WithHistogramFunction("sum", "amount"), - elastic.WithHistogramFilters(filters), + return []models.HistogramOption{ + models.WithHistogramIndices(models.DocOperations), + models.WithHistogramFunction("sum", "amount"), + models.WithHistogramFilters(filters), }, nil case "token_volume": - return []elastic.HistogramOption{ - elastic.WithHistogramIndices("transfer"), - elastic.WithHistogramFunction("sum", "amount"), - elastic.WithHistogramFilters(filters), + return []models.HistogramOption{ + models.WithHistogramIndices("transfer"), + models.WithHistogramFunction("sum", "amount"), + models.WithHistogramFilters(filters), }, nil default: return nil, errors.Errorf("Unknown series name: %s", name) @@ -245,26 +245,26 @@ func (ctx *Context) getHistogramOptions(name, network string, addresses ...strin // @Param period query string true "One of periods" Enums(all, year, month, week, day) // @Accept json // @Produce json -// @Success 200 {object} elastic.DAppStats +// @Success 200 {object} operation.DAppStats // @Failure 400 {object} Error // @Failure 500 {object} Error // @Router /stats/{network}/contracts [get] func (ctx *Context) GetContractsStats(c *gin.Context) { var req getByNetwork - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var reqStats GetTokenStatsRequest - if err := c.BindQuery(&reqStats); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&reqStats); ctx.handleError(c, err, http.StatusBadRequest) { return } addresses := reqStats.Addresses() if len(addresses) == 0 { - handleError(c, errors.Errorf("Empty address list"), http.StatusBadRequest) + ctx.handleError(c, errors.Errorf("Empty address list"), http.StatusBadRequest) return } - stats, err := ctx.ES.GetDAppStats(req.Network, addresses, reqStats.Period) - if handleError(c, err, 0) { + stats, err := ctx.Operations.GetDAppStats(req.Network, addresses, reqStats.Period) + if ctx.handleError(c, err, 0) { return } diff --git a/cmd/api/handlers/storage.go b/cmd/api/handlers/storage.go index db5e80054..2d601bb4a 100644 --- a/cmd/api/handlers/storage.go +++ b/cmd/api/handlers/storage.go @@ -9,7 +9,7 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/meta" "github.com/baking-bad/bcdhub/internal/contractparser/newmiguel" "github.com/baking-bad/bcdhub/internal/jsonschema" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/gin-gonic/gin" "github.com/tidwall/gjson" ) @@ -30,34 +30,41 @@ import ( // @Router /contract/{network}/{address}/storage [get] func (ctx *Context) GetContractStorage(c *gin.Context) { var req getContractRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var sReq storageRequest - if err := c.BindQuery(&sReq); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&sReq); ctx.handleError(c, err, http.StatusBadRequest) { return } rpc, err := ctx.GetRPC(req.Network) - if handleError(c, err, http.StatusBadRequest) { + if ctx.handleError(c, err, http.StatusBadRequest) { return } + if sReq.Level == 0 { + block, err := ctx.Blocks.Last(req.Network) + if ctx.handleError(c, err, 0) { + return + } + sReq.Level = int(block.Level) + } deffatedStorage, err := rpc.GetScriptStorageJSON(req.Address, int64(sReq.Level)) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } header, err := rpc.GetHeader(int64(sReq.Level)) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } - metadata, err := meta.GetMetadata(ctx.ES, req.Address, consts.STORAGE, header.Protocol) - if handleError(c, err, 0) { + metadata, err := meta.GetMetadata(ctx.Schema, req.Address, consts.STORAGE, header.Protocol) + if ctx.handleError(c, err, 0) { return } resp, err := newmiguel.MichelineToMiguel(deffatedStorage, metadata) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -81,11 +88,11 @@ func (ctx *Context) GetContractStorage(c *gin.Context) { // @Router /contract/{network}/{address}/storage/raw [get] func (ctx *Context) GetContractStorageRaw(c *gin.Context) { var req getContractRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var sReq storageRequest - if err := c.BindQuery(&sReq); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&sReq); ctx.handleError(c, err, http.StatusBadRequest) { return } filters := map[string]interface{}{ @@ -96,8 +103,8 @@ func (ctx *Context) GetContractStorageRaw(c *gin.Context) { filters["level"] = sReq.Level } - ops, err := ctx.ES.GetOperations(filters, 1, true) - if handleError(c, err, 0) { + ops, err := ctx.Operations.Get(filters, 1, true) + if ctx.handleError(c, err, 0) { return } if len(ops) == 0 { @@ -107,7 +114,7 @@ func (ctx *Context) GetContractStorageRaw(c *gin.Context) { s := gjson.Parse(ops[0].DeffatedStorage) resp, err := formatter.MichelineToMichelson(s, false, formatter.DefLineSize) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -131,11 +138,11 @@ func (ctx *Context) GetContractStorageRaw(c *gin.Context) { // @Router /contract/{network}/{address}/storage/rich [get] func (ctx *Context) GetContractStorageRich(c *gin.Context) { var req getContractRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var sReq storageRequest - if err := c.BindQuery(&sReq); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&sReq); ctx.handleError(c, err, http.StatusBadRequest) { return } filters := map[string]interface{}{ @@ -146,8 +153,8 @@ func (ctx *Context) GetContractStorageRich(c *gin.Context) { filters["level"] = sReq.Level } - ops, err := ctx.ES.GetOperations(filters, 2, true) - if handleError(c, err, 0) { + ops, err := ctx.Operations.Get(filters, 2, true) + if ctx.handleError(c, err, 0) { return } if len(ops) == 0 { @@ -155,18 +162,18 @@ func (ctx *Context) GetContractStorageRich(c *gin.Context) { return } - prev := models.Operation{} + prev := operation.Operation{} if len(ops) > 1 { prev = ops[1] } - bmd, err := ctx.ES.GetBigMapDiffsForAddress(req.Address) - if handleError(c, err, 0) { + bmd, err := ctx.BigMapDiffs.GetForAddress(req.Address) + if ctx.handleError(c, err, 0) { return } resp, err := enrichStorage(ops[0].DeffatedStorage, prev.DeffatedStorage, bmd, ops[0].Protocol, true, false) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -189,44 +196,44 @@ func (ctx *Context) GetContractStorageRich(c *gin.Context) { // @Router /contract/{network}/{address}/storage/schema [get] func (ctx *Context) GetContractStorageSchema(c *gin.Context) { var req getContractRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var ssReq storageSchemaRequest - if err := c.BindQuery(&ssReq); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&ssReq); ctx.handleError(c, err, http.StatusBadRequest) { return } - metadata, err := getStorageMetadata(ctx.ES, req.Address, req.Network) - if handleError(c, err, 0) { + metadata, err := ctx.getStorageMetadata(req.Address, req.Network) + if ctx.handleError(c, err, 0) { return } schema := new(EntrypointSchema) data, err := docstring.GetStorage(metadata) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } if len(data) > 0 { schema.EntrypointType = data[0] } schema.Schema, err = jsonschema.Create("0", metadata) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } if ssReq.FillType == "current" { rpc, err := ctx.GetRPC(req.Network) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } storage, err := rpc.GetScriptStorageJSON(req.Address, 0) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } schema.DefaultModel = make(jsonschema.DefaultModel) - if err := schema.DefaultModel.Fill(storage, metadata); handleError(c, err, 0) { + if err := schema.DefaultModel.Fill(storage, metadata); ctx.handleError(c, err, 0) { return } } diff --git a/cmd/api/handlers/subscriptions.go b/cmd/api/handlers/subscriptions.go index 423c2c859..9d4e7171b 100644 --- a/cmd/api/handlers/subscriptions.go +++ b/cmd/api/handlers/subscriptions.go @@ -18,7 +18,7 @@ func (ctx *Context) ListSubscriptions(c *gin.Context) { } subscriptions, err := ctx.DB.ListSubscriptions(userID) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -28,11 +28,11 @@ func (ctx *Context) ListSubscriptions(c *gin.Context) { // CreateSubscription - func (ctx *Context) CreateSubscription(c *gin.Context) { var sub subRequest - if err := c.ShouldBindJSON(&sub); handleError(c, err, http.StatusBadRequest) { + if err := c.ShouldBindJSON(&sub); ctx.handleError(c, err, http.StatusBadRequest) { return } if sub.SentryEnabled && sub.SentryDSN == "" { - handleError(c, fmt.Errorf("You have to set `Sentry DSN` when sentry notifications is enabled"), http.StatusBadRequest) + ctx.handleError(c, fmt.Errorf("You have to set `Sentry DSN` when sentry notifications is enabled"), http.StatusBadRequest) return } @@ -51,7 +51,7 @@ func (ctx *Context) CreateSubscription(c *gin.Context) { SentryDSN: sub.SentryDSN, } - if err := ctx.DB.UpsertSubscription(&subscription); handleError(c, err, 0) { + if err := ctx.DB.UpsertSubscription(&subscription); ctx.handleError(c, err, 0) { return } @@ -61,7 +61,7 @@ func (ctx *Context) CreateSubscription(c *gin.Context) { // DeleteSubscription - func (ctx *Context) DeleteSubscription(c *gin.Context) { var sub subRequest - if err := c.ShouldBindJSON(&sub); handleError(c, err, http.StatusBadRequest) { + if err := c.ShouldBindJSON(&sub); ctx.handleError(c, err, http.StatusBadRequest) { return } @@ -77,7 +77,7 @@ func (ctx *Context) DeleteSubscription(c *gin.Context) { Network: sub.Network, } - if err := ctx.DB.DeleteSubscription(&subscription); handleError(c, err, 0) { + if err := ctx.DB.DeleteSubscription(&subscription); ctx.handleError(c, err, 0) { return } diff --git a/cmd/api/handlers/swagger.go b/cmd/api/handlers/swagger.go index 64d434a48..859966b64 100644 --- a/cmd/api/handlers/swagger.go +++ b/cmd/api/handlers/swagger.go @@ -8,13 +8,13 @@ import ( // GetSwaggerDoc - func (ctx *Context) GetSwaggerDoc(c *gin.Context) { doc, err := swag.ReadDoc() - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } c.Header("Content-Type", "application/json") _, err = c.Writer.Write([]byte(doc)) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } } diff --git a/cmd/api/handlers/tokens.go b/cmd/api/handlers/tokens.go index 57f111907..d5c3cba32 100644 --- a/cmd/api/handlers/tokens.go +++ b/cmd/api/handlers/tokens.go @@ -5,8 +5,9 @@ import ( "strings" "github.com/baking-bad/bcdhub/internal/contractparser/consts" - "github.com/baking-bad/bcdhub/internal/elastic" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" + "github.com/baking-bad/bcdhub/internal/models/transfer" + "github.com/baking-bad/bcdhub/internal/models/tzip" "github.com/gin-gonic/gin" "github.com/pkg/errors" ) @@ -27,24 +28,24 @@ import ( // @Router /tokens/{network} [get] func (ctx *Context) GetFA(c *gin.Context) { var req getByNetwork - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var cursorReq pageableRequest - if err := c.BindQuery(&cursorReq); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&cursorReq); ctx.handleError(c, err, http.StatusBadRequest) { return } if cursorReq.Size == 0 { cursorReq.Size = 20 } - contracts, total, err := ctx.ES.GetTokens(req.Network, "", cursorReq.Offset, cursorReq.Size) - if handleError(c, err, 0) { + contracts, total, err := ctx.Contracts.GetTokens(req.Network, "", cursorReq.Offset, cursorReq.Size) + if ctx.handleError(c, err, 0) { return } tokens, err := ctx.contractToTokens(contracts, req.Network, "") - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } tokens.Total = total @@ -69,24 +70,24 @@ func (ctx *Context) GetFA(c *gin.Context) { // @Router /tokens/{network}/version/{faversion} [get] func (ctx *Context) GetFAByVersion(c *gin.Context) { var req getTokensByVersion - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var cursorReq pageableRequest - if err := c.BindQuery(&cursorReq); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&cursorReq); ctx.handleError(c, err, http.StatusBadRequest) { return } if cursorReq.Size == 0 { cursorReq.Size = 20 } - contracts, total, err := ctx.ES.GetTokens(req.Network, req.Version, cursorReq.Offset, cursorReq.Size) - if handleError(c, err, 0) { + contracts, total, err := ctx.Contracts.GetTokens(req.Network, req.Version, cursorReq.Offset, cursorReq.Size) + if ctx.handleError(c, err, 0) { return } tokens, err := ctx.contractToTokens(contracts, req.Network, req.Version) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } tokens.Total = total @@ -109,18 +110,18 @@ func (ctx *Context) GetFAByVersion(c *gin.Context) { // @Param token_id query integer false "Token ID" mininum(0) // @Accept json // @Produce json -// @Success 200 {object} elastic.TransfersResponse +// @Success 200 {object} transfer.Pageable // @Failure 400 {object} Error // @Failure 500 {object} Error // @Router /tokens/{network}/transfers/{address} [get] func (ctx *Context) GetFA12OperationsForAddress(c *gin.Context) { var req getContractRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var ctxReq getTransfersRequest - if err := c.BindQuery(&ctxReq); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&ctxReq); ctx.handleError(c, err, http.StatusBadRequest) { return } @@ -134,7 +135,7 @@ func (ctx *Context) GetFA12OperationsForAddress(c *gin.Context) { tokenID = *ctxReq.TokenID } - transfers, err := ctx.ES.GetTransfers(elastic.GetTransfersContext{ + transfers, err := ctx.Transfers.Get(transfer.GetContext{ Network: req.Network, Address: req.Address, Contracts: contracts, @@ -145,7 +146,7 @@ func (ctx *Context) GetFA12OperationsForAddress(c *gin.Context) { Size: ctxReq.Size, TokenID: tokenID, }) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -170,29 +171,29 @@ func (ctx *Context) GetFA12OperationsForAddress(c *gin.Context) { // @Router /tokens/{network}/series [get] func (ctx *Context) GetTokenVolumeSeries(c *gin.Context) { var req getByNetwork - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var args getTokenSeriesRequest - if err := c.BindQuery(&args); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&args); ctx.handleError(c, err, http.StatusBadRequest) { return } - dapp, err := ctx.ES.GetDAppBySlug(args.Slug) - if handleError(c, err, 0) { + dapp, err := ctx.TZIP.GetDAppBySlug(args.Slug) + if ctx.handleError(c, err, 0) { return } - series, err := ctx.ES.GetTokenVolumeSeries(req.Network, args.Period, []string{args.Contract}, dapp.Contracts, args.TokenID) - if handleError(c, err, 0) { + series, err := ctx.Transfers.GetTokenVolumeSeries(req.Network, args.Period, []string{args.Contract}, dapp.Contracts, args.TokenID) + if ctx.handleError(c, err, 0) { return } c.JSON(http.StatusOK, series) } -func (ctx *Context) contractToTokens(contracts []models.Contract, network, version string) (PageableTokenContracts, error) { +func (ctx *Context) contractToTokens(contracts []contract.Contract, network, version string) (PageableTokenContracts, error) { tokens := make([]TokenContract, len(contracts)) addresses := make([]string, len(contracts)) for i := range contracts { @@ -234,7 +235,7 @@ func (ctx *Context) contractToTokens(contracts []models.Contract, network, versi methods[i] = interfaceVersion.Entrypoints[i].Name } - stats, err := ctx.ES.GetTokensStats(network, addresses, methods) + stats, err := ctx.Operations.GetTokensStats(network, addresses, methods) if err != nil { return PageableTokenContracts{}, err } @@ -283,31 +284,31 @@ func (ctx *Context) contractToTokens(contracts []models.Contract, network, versi // @Router /contract/{network}/{address}/tokens [get] func (ctx *Context) GetContractTokens(c *gin.Context) { var req getContractRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } tokens, err := ctx.getTokens(req.Network, req.Address) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } c.JSON(http.StatusOK, tokens) } func (ctx *Context) getTokens(network, address string) ([]Token, error) { - metadata, err := ctx.ES.GetTokenMetadata(elastic.GetTokenMetadataContext{ + metadata, err := ctx.TZIP.GetTokenMetadata(tzip.GetTokenMetadataContext{ Contract: address, Network: network, TokenID: -1, }) if err != nil { - if elastic.IsRecordNotFound(err) { + if ctx.Storage.IsRecordNotFound(err) { return []Token{}, nil } return nil, err } tokens := make([]Token, 0) for _, token := range metadata { - supply, err := ctx.ES.GetTokenSupply(network, address, token.TokenID) + supply, err := ctx.Transfers.GetTokenSupply(network, address, token.TokenID) if err != nil { return nil, err } @@ -335,16 +336,16 @@ func (ctx *Context) getTokens(network, address string) ([]Token, error) { // @Router /contract/{network}/{address}/tokens/holders [get] func (ctx *Context) GetTokenHolders(c *gin.Context) { var req getContractRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } var reqArgs getTokenHolders - if err := c.BindQuery(&reqArgs); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&reqArgs); ctx.handleError(c, err, http.StatusBadRequest) { return } - balances, err := ctx.ES.GetHolders(req.Network, req.Address, *reqArgs.TokenID) - if handleError(c, err, 0) { + balances, err := ctx.TokenBalances.GetHolders(req.Network, req.Address, *reqArgs.TokenID) + if ctx.handleError(c, err, 0) { return } result := make(map[string]int64) diff --git a/cmd/api/handlers/transfers.go b/cmd/api/handlers/transfers.go index 9f1f608ea..4d0f120d4 100644 --- a/cmd/api/handlers/transfers.go +++ b/cmd/api/handlers/transfers.go @@ -3,7 +3,8 @@ package handlers import ( "net/http" - "github.com/baking-bad/bcdhub/internal/elastic" + "github.com/baking-bad/bcdhub/internal/models/transfer" + "github.com/baking-bad/bcdhub/internal/models/tzip" "github.com/gin-gonic/gin" ) @@ -23,11 +24,11 @@ import ( // @Router /{network}/{address}/transfers [get] func (ctx *Context) GetContractTransfers(c *gin.Context) { var contractRequest getContractRequest - if err := c.BindUri(&contractRequest); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&contractRequest); ctx.handleError(c, err, http.StatusBadRequest) { return } var req getContractTransfers - if err := c.BindQuery(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } @@ -36,18 +37,18 @@ func (ctx *Context) GetContractTransfers(c *gin.Context) { tokenID = int64(*req.TokenID) } - transfers, err := ctx.ES.GetTransfers(elastic.GetTransfersContext{ + transfers, err := ctx.Transfers.Get(transfer.GetContext{ Network: contractRequest.Network, Contracts: []string{contractRequest.Address}, Size: req.Size, Offset: req.Offset, TokenID: tokenID, }) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } response, err := ctx.transfersPostprocessing(transfers) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } c.JSON(http.StatusOK, response) @@ -59,16 +60,16 @@ type tokenKey struct { TokenID int64 } -func (ctx *Context) transfersPostprocessing(transfers elastic.TransfersResponse) (response TransferResponse, err error) { +func (ctx *Context) transfersPostprocessing(transfers transfer.Pageable) (response TransferResponse, err error) { response.Total = transfers.Total response.Transfers = make([]Transfer, len(transfers.Transfers)) mapTokens := make(map[tokenKey]*TokenMetadata) - tokens, err := ctx.ES.GetTokenMetadata(elastic.GetTokenMetadataContext{ + tokens, err := ctx.TZIP.GetTokenMetadata(tzip.GetTokenMetadataContext{ TokenID: -1, }) if err != nil { - if !elastic.IsRecordNotFound(err) { + if !ctx.Storage.IsRecordNotFound(err) { return } } else { diff --git a/cmd/api/handlers/tzip.go b/cmd/api/handlers/tzip.go index 74342bdcd..2ca0fe7b6 100644 --- a/cmd/api/handlers/tzip.go +++ b/cmd/api/handlers/tzip.go @@ -3,7 +3,6 @@ package handlers import ( "net/http" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/gin-gonic/gin" ) @@ -16,22 +15,22 @@ import ( // @Param address path string true "KT or tz address" minlength(36) maxlength(36) // @Accept json // @Produce json -// @Success 200 {object} models.TZIP +// @Success 200 {object} tzip.TZIP // @Success 204 {object} gin.H // @Failure 400 {object} Error // @Failure 500 {object} Error // @Router /account/{network}/{address}/metadata [get] func (ctx *Context) GetMetadata(c *gin.Context) { var req getContractRequest - if err := c.BindUri(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindUri(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } - tzip, err := ctx.ES.GetTZIP(req.Network, req.Address) + tzip, err := ctx.TZIP.Get(req.Network, req.Address) if err != nil { - if elastic.IsRecordNotFound(err) { + if ctx.Storage.IsRecordNotFound(err) { c.JSON(http.StatusNoContent, gin.H{}) } else { - handleError(c, err, 0) + ctx.handleError(c, err, 0) } return } diff --git a/cmd/api/handlers/user.go b/cmd/api/handlers/user.go index 72035fd43..061067004 100644 --- a/cmd/api/handlers/user.go +++ b/cmd/api/handlers/user.go @@ -16,32 +16,32 @@ func (ctx *Context) GetUserProfile(c *gin.Context) { } user, err := ctx.DB.GetUser(userID) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } subscriptions, err := ctx.DB.ListSubscriptions(userID) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } count, err := ctx.DB.GetUserCompletedAssesments(user.ID) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } compilationTasks, err := ctx.DB.CountCompilationTasks(user.ID) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } verifications, err := ctx.DB.CountVerifications(user.ID) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } deployments, err := ctx.DB.CountDeployments(user.ID) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -70,7 +70,7 @@ func (ctx *Context) UserMarkAllRead(c *gin.Context) { } var req markReadRequest - if err := c.ShouldBindJSON(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.ShouldBindJSON(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } @@ -80,7 +80,7 @@ func (ctx *Context) UserMarkAllRead(c *gin.Context) { } err := ctx.DB.UpdateUserMarkReadAt(userID, req.Timestamp) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } diff --git a/cmd/api/handlers/verification.go b/cmd/api/handlers/verification.go index f4baa6c30..9aa2a131e 100644 --- a/cmd/api/handlers/verification.go +++ b/cmd/api/handlers/verification.go @@ -23,17 +23,17 @@ func (ctx *Context) ListVerifications(c *gin.Context) { } _, err := ctx.DB.GetUser(userID) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } var ctReq compilationRequest - if err := c.BindQuery(&ctReq); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&ctReq); ctx.handleError(c, err, http.StatusBadRequest) { return } verifications, err := ctx.DB.ListVerifications(userID, ctReq.Limit, ctReq.Offset) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -49,17 +49,17 @@ func (ctx *Context) CreateVerification(c *gin.Context) { } var req verificationRequest - if err := c.ShouldBindJSON(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.ShouldBindJSON(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } user, err := ctx.DB.GetUser(userID) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } provider, err := providers.NewPublic(user.Provider) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } @@ -75,7 +75,7 @@ func (ctx *Context) CreateVerification(c *gin.Context) { } err = ctx.DB.CreateCompilationTask(&task) - if handleError(c, err, 0) { + if ctx.handleError(c, err, 0) { return } diff --git a/cmd/api/handlers/vote.go b/cmd/api/handlers/vote.go index 405f14d84..d8b14cce4 100644 --- a/cmd/api/handlers/vote.go +++ b/cmd/api/handlers/vote.go @@ -4,7 +4,7 @@ import ( "net/http" "github.com/baking-bad/bcdhub/internal/database" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" "github.com/gin-gonic/gin" "github.com/jinzhu/gorm" ) @@ -12,18 +12,18 @@ import ( // Vote - func (ctx *Context) Vote(c *gin.Context) { var req voteRequest - if err := c.BindJSON(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindJSON(&req); ctx.handleError(c, err, http.StatusBadRequest) { _ = c.AbortWithError(http.StatusBadRequest, err) return } - a := models.NewEmptyContract(req.SourceNetwork, req.SourceAddress) - if err := ctx.ES.GetByID(&a); handleError(c, err, 0) { + a := contract.NewEmptyContract(req.SourceNetwork, req.SourceAddress) + if err := ctx.Storage.GetByID(&a); ctx.handleError(c, err, 0) { return } - b := models.NewEmptyContract(req.DestinationNetwork, req.DestinationAddress) - if err := ctx.ES.GetByID(&b); handleError(c, err, 0) { + b := contract.NewEmptyContract(req.DestinationNetwork, req.DestinationAddress) + if err := ctx.Storage.GetByID(&b); ctx.handleError(c, err, 0) { return } @@ -35,7 +35,7 @@ func (ctx *Context) Vote(c *gin.Context) { UserID: CurrentUserID(c), Assessment: req.Vote, } - if err := ctx.DB.CreateOrUpdateAssessment(&assessment); handleError(c, err, 0) { + if err := ctx.DB.CreateOrUpdateAssessment(&assessment); ctx.handleError(c, err, 0) { return } c.JSON(http.StatusOK, "") @@ -44,14 +44,14 @@ func (ctx *Context) Vote(c *gin.Context) { // GetTasks - func (ctx *Context) GetTasks(c *gin.Context) { var req pageableRequest - if err := c.BindQuery(&req); handleError(c, err, http.StatusBadRequest) { + if err := c.BindQuery(&req); ctx.handleError(c, err, http.StatusBadRequest) { return } userID := CurrentUserID(c) assesments, err := ctx.DB.GetAssessmentsWithValue(userID, database.AssessmentUndefined, uint(req.Size)) if err != nil { - if !gorm.IsRecordNotFoundError(err) && handleError(c, err, 0) { + if !gorm.IsRecordNotFoundError(err) && ctx.handleError(c, err, 0) { return } assesments = make([]database.Assessments, 0) @@ -63,8 +63,8 @@ func (ctx *Context) GetTasks(c *gin.Context) { // GenerateTasks - func (ctx *Context) GenerateTasks(c *gin.Context) { userID := CurrentUserID(c) - tasks, err := ctx.ES.GetDiffTasks() - if handleError(c, err, 0) { + tasks, err := ctx.Contracts.GetDiffTasks() + if ctx.handleError(c, err, 0) { return } assesments := make([]database.Assessments, 0) @@ -77,7 +77,7 @@ func (ctx *Context) GenerateTasks(c *gin.Context) { UserID: userID, Assessment: database.AssessmentUndefined, } - if err := ctx.DB.CreateAssessment(&a); handleError(c, err, 0) { + if err := ctx.DB.CreateAssessment(&a); ctx.handleError(c, err, 0) { return } if a.Assessment == database.AssessmentUndefined { diff --git a/cmd/api/main.go b/cmd/api/main.go index 28127cf50..47a60f071 100644 --- a/cmd/api/main.go +++ b/cmd/api/main.go @@ -58,7 +58,7 @@ func newApp() *app { } api := &app{ - Hub: ws.DefaultHub(cfg.Elastic.URI, cfg.Elastic.Timeout, ctx.MQ), + Hub: ws.DefaultHub(ctx), Context: ctx, } diff --git a/cmd/api/seed/seed.go b/cmd/api/seed/seed.go index 1bb35df8a..3a320c07f 100644 --- a/cmd/api/seed/seed.go +++ b/cmd/api/seed/seed.go @@ -4,7 +4,6 @@ import ( "github.com/baking-bad/bcdhub/cmd/api/handlers" "github.com/baking-bad/bcdhub/internal/config" "github.com/baking-bad/bcdhub/internal/database" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/models" "github.com/baking-bad/bcdhub/internal/models/tzip" ) @@ -40,9 +39,9 @@ func Run(ctx *handlers.Context, seed config.SeedConfig) error { } // 3. seed aliases - aliasModels := make([]elastic.Model, 0) + aliasModels := make([]models.Model, 0) for _, a := range seed.Aliases { - aliasModels = append(aliasModels, &models.TZIP{ + aliasModels = append(aliasModels, &tzip.TZIP{ TZIP16: tzip.TZIP16{ Name: a.Alias, }, @@ -50,7 +49,7 @@ func Run(ctx *handlers.Context, seed config.SeedConfig) error { Address: a.Address, }) } - if err := ctx.ES.BulkInsert(aliasModels); err != nil { + if err := ctx.Bulk.Insert(aliasModels); err != nil { return err } diff --git a/cmd/api/ws/channels/channel.go b/cmd/api/ws/channels/channel.go index 29ae8d5e8..f9bac1e66 100644 --- a/cmd/api/ws/channels/channel.go +++ b/cmd/api/ws/channels/channel.go @@ -1,8 +1,8 @@ package channels import ( + "github.com/baking-bad/bcdhub/cmd/api/handlers" "github.com/baking-bad/bcdhub/cmd/api/ws/datasources" - "github.com/baking-bad/bcdhub/internal/elastic" ) // Channel - @@ -18,7 +18,7 @@ type Channel interface { type DefaultChannel struct { sources []datasources.DataSource - es elastic.IElastic + ctx *handlers.Context } // NewDefaultChannel - diff --git a/cmd/api/ws/channels/operations.go b/cmd/api/ws/channels/operations.go index 9b9f0336e..54beb7de9 100644 --- a/cmd/api/ws/channels/operations.go +++ b/cmd/api/ws/channels/operations.go @@ -4,11 +4,9 @@ import ( "fmt" "sync" - "github.com/baking-bad/bcdhub/cmd/api/handlers" "github.com/baking-bad/bcdhub/cmd/api/ws/datasources" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/logger" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/pkg/errors" ) @@ -97,8 +95,8 @@ func (c *OperationsChannel) listen(source datasources.DataSource) { } func (c *OperationsChannel) createMessage(data datasources.Data) error { - op := models.Operation{ID: string(data.Body.([]byte))} - if err := c.es.GetByID(&op); err != nil { + op := operation.Operation{ID: string(data.Body.([]byte))} + if err := c.ctx.Storage.GetByID(&op); err != nil { return errors.Errorf("[OperationsChannel.createMessage] Find operation error: %s", err) } if op.Network != c.Network { @@ -110,18 +108,18 @@ func (c *OperationsChannel) createMessage(data datasources.Data) error { if _, ok := c.hashes[op.Hash]; ok { return nil } - operations, err := c.es.GetOperations( + operations, err := c.ctx.Operations.Get( map[string]interface{}{ "hash": op.Hash, }, 0, true, ) - if err != nil && !elastic.IsRecordNotFound(err) { + if err != nil && !c.ctx.Storage.IsRecordNotFound(err) { return err } - response, err := handlers.PrepareOperations(c.es, operations, true) + response, err := c.ctx.PrepareOperations(operations, true) if err != nil { return err } diff --git a/cmd/api/ws/channels/option.go b/cmd/api/ws/channels/option.go index 18f222ab0..a998cd096 100644 --- a/cmd/api/ws/channels/option.go +++ b/cmd/api/ws/channels/option.go @@ -1,8 +1,8 @@ package channels import ( + "github.com/baking-bad/bcdhub/cmd/api/handlers" "github.com/baking-bad/bcdhub/cmd/api/ws/datasources" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/logger" "github.com/pkg/errors" ) @@ -22,10 +22,10 @@ func WithSource(sources []datasources.DataSource, typ string) ChannelOption { } } -// WithElasticSearch - -func WithElasticSearch(es elastic.IElastic) ChannelOption { +// WithContext - +func WithContext(ctx *handlers.Context) ChannelOption { return func(c *DefaultChannel) { - c.es = es + c.ctx = ctx } } diff --git a/cmd/api/ws/channels/stats.go b/cmd/api/ws/channels/stats.go index e5af258ea..9d852d0f0 100644 --- a/cmd/api/ws/channels/stats.go +++ b/cmd/api/ws/channels/stats.go @@ -103,20 +103,20 @@ func (c *StatsChannel) listen(source datasources.DataSource) { } func (c *StatsChannel) createMessage() error { - states, err := c.es.GetLastBlocks() + states, err := c.ctx.Blocks.LastByNetworks() if err != nil { return err } - callCounts, err := c.es.GetCallsCountByNetwork() + callCounts, err := c.ctx.Storage.GetCallsCountByNetwork() if err != nil { return err } - contractStats, err := c.es.GetContractStatsByNetwork() + contractStats, err := c.ctx.Storage.GetContractStatsByNetwork() if err != nil { return err } - faCount, err := c.es.GetFACountByNetwork() + faCount, err := c.ctx.Storage.GetFACountByNetwork() if err != nil { return err } @@ -141,7 +141,6 @@ func (c *StatsChannel) createMessage() error { if ok { body[i].Total = stats.Total body[i].TotalBalance = stats.Balance - body[i].TotalWithdrawn = stats.TotalWithdrawn body[i].UniqueContracts = stats.SameCount } } diff --git a/cmd/api/ws/hub.go b/cmd/api/ws/hub.go index 012363646..7fbcc472c 100644 --- a/cmd/api/ws/hub.go +++ b/cmd/api/ws/hub.go @@ -4,10 +4,9 @@ import ( "fmt" "sync" + "github.com/baking-bad/bcdhub/cmd/api/handlers" "github.com/baking-bad/bcdhub/cmd/api/ws/channels" "github.com/baking-bad/bcdhub/cmd/api/ws/datasources" - "github.com/baking-bad/bcdhub/internal/elastic" - "github.com/baking-bad/bcdhub/internal/mq" "github.com/pkg/errors" "github.com/valyala/fastjson" ) @@ -18,7 +17,7 @@ type Hub struct { clients sync.Map public sync.Map - elastic elastic.IElastic + ctx *handlers.Context stop chan struct{} wg sync.WaitGroup @@ -40,16 +39,15 @@ func NewHub(opts ...HubOption) *Hub { } // DefaultHub - -func DefaultHub(connectionElastic []string, timeoutElastic int, messageQueue mq.Mediator) *Hub { - es := elastic.WaitNew(connectionElastic, timeoutElastic) +func DefaultHub(ctx *handlers.Context) *Hub { hub := NewHub( - WithRabbitSource(messageQueue), - WithElastic(es), + WithRabbitSource(ctx.MQ), + WithContext(ctx), ) hub.AddPublicChannel(channels.NewStatsChannel( channels.WithSource(hub.sources, datasources.RabbitType), - channels.WithElasticSearch(es), + channels.WithContext(ctx), )) return hub } @@ -154,7 +152,7 @@ func createDynamicChannels(c *Client, channelName string, data *fastjson.Value) } return channels.NewOperationsChannel(address, network, channels.WithSource(c.hub.sources, datasources.RabbitType), - channels.WithElasticSearch(c.hub.elastic), + channels.WithContext(c.hub.ctx), ), nil default: return nil, errors.Errorf("Unknown channel: %s", channelName) diff --git a/cmd/api/ws/options.go b/cmd/api/ws/options.go index 12f9a8fa5..bd69b8872 100644 --- a/cmd/api/ws/options.go +++ b/cmd/api/ws/options.go @@ -1,8 +1,8 @@ package ws import ( + "github.com/baking-bad/bcdhub/cmd/api/handlers" "github.com/baking-bad/bcdhub/cmd/api/ws/datasources" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/mq" ) @@ -29,9 +29,9 @@ func WithRabbitSource(messageQueue mq.Mediator) HubOption { } } -// WithElastic - -func WithElastic(es elastic.IElastic) HubOption { +// WithContext - +func WithContext(ctx *handlers.Context) HubOption { return func(h *Hub) { - h.elastic = es + h.ctx = ctx } } diff --git a/cmd/compiler/main.go b/cmd/compiler/main.go index 243076fa9..2efeb5b86 100644 --- a/cmd/compiler/main.go +++ b/cmd/compiler/main.go @@ -13,10 +13,11 @@ import ( "github.com/baking-bad/bcdhub/internal/config" "github.com/baking-bad/bcdhub/internal/contractparser/consts" "github.com/baking-bad/bcdhub/internal/database" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" + "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/baking-bad/bcdhub/internal/mq" ) @@ -42,7 +43,7 @@ func main() { config.WithRPC(cfg.RPC), config.WithDatabase(cfg.DB), config.WithRabbit(cfg.RabbitMQ, cfg.Compiler.ProjectName, cfg.Compiler.MQ), - config.WithElasticSearch(cfg.Elastic), + config.WithStorage(cfg.Storage), config.WithAWS(cfg.Compiler.AWS), ), } @@ -52,9 +53,9 @@ func main() { signals := make(chan os.Signal, 1) signal.Notify(signals, os.Interrupt, syscall.SIGTERM, syscall.SIGINT) - protocol, err := context.ES.GetProtocol(consts.Mainnet, "", -1) + protocol, err := context.Protocols.GetProtocol(consts.Mainnet, "", -1) if err != nil { - log.Fatal(err) + log.Panic(err) } tickerTime := protocol.Constants.TimeBetweenBlocks @@ -94,14 +95,14 @@ func (ctx *Context) setDeployment() error { } for i, d := range deployments { - ops, err := ctx.ES.GetOperations( + ops, err := ctx.Operations.Get( map[string]interface{}{"hash": d.OperationHash}, 0, true, ) if err != nil { - if elastic.IsRecordNotFound(err) { + if ctx.Storage.IsRecordNotFound(err) { continue } @@ -120,7 +121,7 @@ func (ctx *Context) setDeployment() error { return nil } -func (ctx *Context) processDeployment(deployment *database.Deployment, operation *models.Operation) error { +func (ctx *Context) processDeployment(deployment *database.Deployment, operation *operation.Operation) error { deployment.Address = operation.Destination deployment.Network = operation.Network @@ -154,11 +155,11 @@ func (ctx *Context) processDeployment(deployment *database.Deployment, operation return fmt.Errorf("CreateVerification error %w", err) } - contract := models.NewEmptyContract(task.Network, task.Address) + contract := contract.NewEmptyContract(task.Network, task.Address) contract.Verified = true contract.VerificationSource = sourcePath - return ctx.ES.UpdateFields(elastic.DocContracts, contract.GetID(), contract, "Verified", "VerificationSource") + return ctx.Storage.UpdateFields(models.DocContracts, contract.GetID(), contract, "Verified", "VerificationSource") } func (ctx *Context) handleMessage(data mq.Data) error { diff --git a/cmd/compiler/verification.go b/cmd/compiler/verification.go index 52753c9f6..f5f487db8 100644 --- a/cmd/compiler/verification.go +++ b/cmd/compiler/verification.go @@ -5,10 +5,10 @@ import ( "github.com/baking-bad/bcdhub/internal/compiler/compilation" "github.com/baking-bad/bcdhub/internal/database" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" "github.com/baking-bad/bcdhub/internal/providers" "github.com/jinzhu/gorm/dialects/postgres" "github.com/tidwall/gjson" @@ -59,11 +59,11 @@ func (ctx *Context) verification(ct compilation.Task) error { return err } - contract := models.NewEmptyContract(task.Network, task.Address) + contract := contract.NewEmptyContract(task.Network, task.Address) contract.Verified = true contract.VerificationSource = sourcePath - return ctx.ES.UpdateFields(elastic.DocContracts, contract.GetID(), contract, "Verified", "VerificationSource") + return ctx.Storage.UpdateFields(models.DocContracts, contract.GetID(), contract, "Verified", "VerificationSource") } func (ctx *Context) verify(ct compilation.Task) (*database.CompilationTask, error) { diff --git a/cmd/indexer/indexer/boost.go b/cmd/indexer/indexer/boost.go index c794b47a4..bce90c89a 100644 --- a/cmd/indexer/indexer/boost.go +++ b/cmd/indexer/indexer/boost.go @@ -10,11 +10,38 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/consts" "github.com/baking-bad/bcdhub/internal/contractparser/kinds" "github.com/baking-bad/bcdhub/internal/contractparser/meta" - "github.com/baking-bad/bcdhub/internal/elastic" + elasticBalanceUpdate "github.com/baking-bad/bcdhub/internal/elastic/balanceupdate" + elasticBigMapAction "github.com/baking-bad/bcdhub/internal/elastic/bigmapaction" + elasticBigMapDiff "github.com/baking-bad/bcdhub/internal/elastic/bigmapdiff" + elasticBlock "github.com/baking-bad/bcdhub/internal/elastic/block" + "github.com/baking-bad/bcdhub/internal/elastic/bulk" + elasticContract "github.com/baking-bad/bcdhub/internal/elastic/contract" + "github.com/baking-bad/bcdhub/internal/elastic/core" + elasticMigration "github.com/baking-bad/bcdhub/internal/elastic/migration" + elasticOperation "github.com/baking-bad/bcdhub/internal/elastic/operation" + elasticProtocol "github.com/baking-bad/bcdhub/internal/elastic/protocol" + elasticSchema "github.com/baking-bad/bcdhub/internal/elastic/schema" + elasticTezosDomain "github.com/baking-bad/bcdhub/internal/elastic/tezosdomain" + elasticTokenBalance "github.com/baking-bad/bcdhub/internal/elastic/tokenbalance" + elasticTransfer "github.com/baking-bad/bcdhub/internal/elastic/transfer" + elasticTZIP "github.com/baking-bad/bcdhub/internal/elastic/tzip" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/index" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/balanceupdate" + "github.com/baking-bad/bcdhub/internal/models/bigmapaction" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/block" + "github.com/baking-bad/bcdhub/internal/models/contract" + "github.com/baking-bad/bcdhub/internal/models/migration" + "github.com/baking-bad/bcdhub/internal/models/operation" + "github.com/baking-bad/bcdhub/internal/models/protocol" + "github.com/baking-bad/bcdhub/internal/models/schema" + "github.com/baking-bad/bcdhub/internal/models/tezosdomain" + "github.com/baking-bad/bcdhub/internal/models/tokenbalance" + "github.com/baking-bad/bcdhub/internal/models/transfer" + "github.com/baking-bad/bcdhub/internal/models/tzip" "github.com/baking-bad/bcdhub/internal/mq" "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/baking-bad/bcdhub/internal/parsers" @@ -29,13 +56,28 @@ var errSameLevel = errors.New("Same level") // BoostIndexer - type BoostIndexer struct { + Storage models.GeneralRepository + Bulk models.BulkRepository + BalanceUpdates balanceupdate.Repository + BigMapActions bigmapaction.Repository + BigMapDiffs bigmapdiff.Repository + Blocks block.Repository + Contracts contract.Repository + Migrations migration.Repository + Operations operation.Repository + Protocols protocol.Repository + Schema schema.Repository + TezosDomains tezosdomain.Repository + TokenBalances tokenbalance.Repository + Transfers transfer.Repository + TZIP tzip.Repository + rpc noderpc.INode - es elastic.IElastic externalIndexer index.Indexer interfaces map[string]kinds.ContractKind messageQueue mq.Mediator - state models.Block - currentProtocol models.Protocol + state block.Block + currentProtocol protocol.Protocol cfg config.Config updateTicker *time.Ticker @@ -48,8 +90,8 @@ type BoostIndexer struct { func (bi *BoostIndexer) fetchExternalProtocols() error { logger.WithNetwork(bi.Network).Info("Fetching external protocols") - var existingProtocols []models.Protocol - if err := bi.es.GetByNetworkWithSort(bi.Network, "start_level", "desc", &existingProtocols); err != nil { + var existingProtocols []protocol.Protocol + if err := bi.Storage.GetByNetworkWithSort(bi.Network, "start_level", "desc", &existingProtocols); err != nil { return err } @@ -63,7 +105,7 @@ func (bi *BoostIndexer) fetchExternalProtocols() error { return err } - protocols := make([]elastic.Model, 0) + protocols := make([]models.Model, 0) for i := range extProtocols { if _, ok := exists[extProtocols[i].Hash]; ok { continue @@ -77,7 +119,7 @@ func (bi *BoostIndexer) fetchExternalProtocols() error { alias = extProtocols[i].Hash[:8] } - newProtocol := &models.Protocol{ + newProtocol := &protocol.Protocol{ ID: helpers.GenerateID(), Hash: extProtocols[i].Hash, Alias: alias, @@ -87,7 +129,7 @@ func (bi *BoostIndexer) fetchExternalProtocols() error { Network: bi.Network, } - protocolConstants := models.Constants{} + protocolConstants := protocol.Constants{} if newProtocol.StartLevel != newProtocol.EndLevel || newProtocol.EndLevel != 0 { constants, err := bi.rpc.GetNetworkConstants(extProtocols[i].StartLevel) if err != nil { @@ -104,13 +146,14 @@ func (bi *BoostIndexer) fetchExternalProtocols() error { logger.WithNetwork(bi.Network).Infof("Fetched %s", alias) } - return bi.es.BulkInsert(protocols) + return bi.Bulk.Insert(protocols) } // NewBoostIndexer - func NewBoostIndexer(cfg config.Config, network string, opts ...BoostIndexerOption) (*BoostIndexer, error) { logger.WithNetwork(network).Info("Creating indexer object...") - es := elastic.WaitNew(cfg.Elastic.URI, cfg.Elastic.Timeout) + es := core.WaitNew(cfg.Storage.URI, cfg.Storage.Timeout) + rpcProvider, ok := cfg.RPC[network] if !ok { return nil, errors.Errorf("Unknown network %s", network) @@ -128,13 +171,27 @@ func NewBoostIndexer(cfg config.Config, network string, opts ...BoostIndexerOpti } bi := &BoostIndexer{ - Network: network, - rpc: rpc, - es: es, - messageQueue: messageQueue, - stop: make(chan struct{}), - interfaces: interfaces, - cfg: cfg, + Storage: es, + Bulk: bulk.NewStorage(es), + BalanceUpdates: elasticBalanceUpdate.NewStorage(es), + BigMapActions: elasticBigMapAction.NewStorage(es), + BigMapDiffs: elasticBigMapDiff.NewStorage(es), + Blocks: elasticBlock.NewStorage(es), + Contracts: elasticContract.NewStorage(es), + Migrations: elasticMigration.NewStorage(es), + Operations: elasticOperation.NewStorage(es), + Protocols: elasticProtocol.NewStorage(es), + Schema: elasticSchema.NewStorage(es), + TezosDomains: elasticTezosDomain.NewStorage(es), + TokenBalances: elasticTokenBalance.NewStorage(es), + Transfers: elasticTransfer.NewStorage(es), + TZIP: elasticTZIP.NewStorage(es), + Network: network, + rpc: rpc, + messageQueue: messageQueue, + stop: make(chan struct{}), + interfaces: interfaces, + cfg: cfg, } for _, opt := range opts { @@ -146,7 +203,7 @@ func NewBoostIndexer(cfg config.Config, network string, opts ...BoostIndexerOpti } func (bi *BoostIndexer) init() error { - if err := bi.es.CreateIndexes(); err != nil { + if err := bi.Storage.CreateIndexes(); err != nil { return err } @@ -156,14 +213,14 @@ func (bi *BoostIndexer) init() error { } } - currentState, err := bi.es.GetLastBlock(bi.Network) + currentState, err := bi.Blocks.Last(bi.Network) if err != nil { return err } bi.state = currentState logger.WithNetwork(bi.Network).Infof("Current indexer state: %d", currentState.Level) - currentProtocol, err := bi.es.GetProtocol(bi.Network, "", currentState.Level) + currentProtocol, err := bi.Protocols.GetProtocol(bi.Network, "", currentState.Level) if err != nil { header, err := bi.rpc.GetHeader(helpers.MaxInt64(1, currentState.Level)) if err != nil { @@ -311,14 +368,14 @@ func (bi *BoostIndexer) Rollback() error { return err } - manager := rollback.NewManager(bi.es, bi.messageQueue, bi.rpc, bi.cfg.SharePath) + manager := rollback.NewManager(bi.Storage, bi.Bulk, bi.Contracts, bi.Operations, bi.Transfers, bi.TokenBalances, bi.Protocols, bi.messageQueue, bi.rpc, bi.cfg.SharePath) if err := manager.Rollback(bi.state, lastLevel); err != nil { return err } helpers.CatchErrorSentry(errors.Errorf("[%s] Rollback from %d to %d", bi.Network, bi.state.Level, lastLevel)) - newState, err := bi.es.GetLastBlock(bi.Network) + newState, err := bi.Blocks.Last(bi.Network) if err != nil { return err } @@ -338,7 +395,7 @@ func (bi *BoostIndexer) getLastRollbackBlock() (int64, error) { return 0, err } - block, err := bi.es.GetBlock(bi.Network, level) + block, err := bi.Blocks.Get(bi.Network, level) if err != nil { return 0, err } @@ -431,8 +488,8 @@ func (bi *BoostIndexer) process() error { return errSameLevel } -func (bi *BoostIndexer) createBlock(head noderpc.Header) *models.Block { - newBlock := models.Block{ +func (bi *BoostIndexer) createBlock(head noderpc.Header) *block.Block { + newBlock := block.Block{ ID: helpers.GenerateID(), Network: bi.Network, Hash: head.Hash, @@ -447,9 +504,9 @@ func (bi *BoostIndexer) createBlock(head noderpc.Header) *models.Block { return &newBlock } -func (bi *BoostIndexer) saveModels(items []elastic.Model) error { +func (bi *BoostIndexer) saveModels(items []models.Model) error { logger.WithNetwork(bi.Network).Debugf("Found %d new models", len(items)) - if err := bi.es.BulkInsert(items); err != nil { + if err := bi.Bulk.Insert(items); err != nil { return err } @@ -461,7 +518,7 @@ func (bi *BoostIndexer) saveModels(items []elastic.Model) error { return nil } -func (bi *BoostIndexer) getDataFromBlock(network string, head noderpc.Header) ([]elastic.Model, error) { +func (bi *BoostIndexer) getDataFromBlock(network string, head noderpc.Header) ([]models.Model, error) { if head.Level <= 1 { return nil, nil } @@ -470,11 +527,11 @@ func (bi *BoostIndexer) getDataFromBlock(network string, head noderpc.Header) ([ return nil, err } - parsedModels := make([]elastic.Model, 0) + parsedModels := make([]models.Model, 0) for _, opg := range data.Array() { parser := operations.NewGroup(operations.NewParseParams( bi.rpc, - bi.es, + bi.Storage, bi.BigMapDiffs, bi.Blocks, bi.TZIP, bi.Schema, bi.TokenBalances, operations.WithConstants(bi.currentProtocol.Constants), operations.WithHead(head), operations.WithIPFSGateways(bi.cfg.IPFSGateways), @@ -492,9 +549,9 @@ func (bi *BoostIndexer) getDataFromBlock(network string, head noderpc.Header) ([ return parsedModels, nil } -func (bi *BoostIndexer) migrate(head noderpc.Header) ([]elastic.Model, error) { - updates := make([]elastic.Model, 0) - newModels := make([]elastic.Model, 0) +func (bi *BoostIndexer) migrate(head noderpc.Header) ([]models.Model, error) { + updates := make([]models.Model, 0) + newModels := make([]models.Model, 0) if bi.currentProtocol.EndLevel == 0 && head.Level > 1 { logger.WithNetwork(bi.Network).Infof("Finalizing the previous protocol: %s", bi.currentProtocol.Alias) @@ -502,7 +559,7 @@ func (bi *BoostIndexer) migrate(head noderpc.Header) ([]elastic.Model, error) { updates = append(updates, &bi.currentProtocol) } - newProtocol, err := bi.es.GetProtocol(bi.Network, head.Protocol, head.Level) + newProtocol, err := bi.Protocols.GetProtocol(bi.Network, head.Protocol, head.Level) if err != nil { logger.Warning("%s", err) newProtocol, err = createProtocol(bi.Network, head.Protocol, head.Level) @@ -539,7 +596,7 @@ func (bi *BoostIndexer) migrate(head noderpc.Header) ([]elastic.Model, error) { bi.currentProtocol = newProtocol newModels = append(newModels, &newProtocol) - if err := bi.es.BulkUpdate(updates); err != nil { + if err := bi.Bulk.Update(updates); err != nil { return nil, err } @@ -548,7 +605,7 @@ func (bi *BoostIndexer) migrate(head noderpc.Header) ([]elastic.Model, error) { return newModels, nil } -func createProtocol(network, hash string, level int64) (protocol models.Protocol, err error) { +func createProtocol(network, hash string, level int64) (protocol protocol.Protocol, err error) { logger.WithNetwork(network).Infof("Creating new protocol %s starting at %d", hash, level) protocol.SymLink, err = meta.GetProtoSymLink(hash) if err != nil { @@ -563,9 +620,9 @@ func createProtocol(network, hash string, level int64) (protocol models.Protocol return } -func (bi *BoostIndexer) standartMigration(newProtocol models.Protocol, head noderpc.Header) ([]elastic.Model, []elastic.Model, error) { +func (bi *BoostIndexer) standartMigration(newProtocol protocol.Protocol, head noderpc.Header) ([]models.Model, []models.Model, error) { logger.WithNetwork(bi.Network).Info("Try to find migrations...") - contracts, err := bi.es.GetContracts(map[string]interface{}{ + contracts, err := bi.Contracts.GetMany(map[string]interface{}{ "network": bi.Network, }) if err != nil { @@ -573,9 +630,9 @@ func (bi *BoostIndexer) standartMigration(newProtocol models.Protocol, head node } logger.WithNetwork(bi.Network).Infof("Now %d contracts are indexed", len(contracts)) - p := parsers.NewMigrationParser(bi.es, bi.cfg.SharePath) - newModels := make([]elastic.Model, 0) - newUpdates := make([]elastic.Model, 0) + p := parsers.NewMigrationParser(bi.Storage, bi.BigMapDiffs, bi.cfg.SharePath) + newModels := make([]models.Model, 0) + newUpdates := make([]models.Model, 0) for i := range contracts { logger.WithNetwork(bi.Network).Infof("Migrate %s...", contracts[i].Address) script, err := bi.rpc.GetScriptJSON(contracts[i].Address, newProtocol.StartLevel) @@ -598,7 +655,7 @@ func (bi *BoostIndexer) standartMigration(newProtocol models.Protocol, head node return newModels, newUpdates, nil } -func (bi *BoostIndexer) vestingMigration(head noderpc.Header) ([]elastic.Model, error) { +func (bi *BoostIndexer) vestingMigration(head noderpc.Header) ([]models.Model, error) { addresses, err := bi.rpc.GetContractsByBlock(head.Level) if err != nil { return nil, err @@ -606,7 +663,7 @@ func (bi *BoostIndexer) vestingMigration(head noderpc.Header) ([]elastic.Model, p := parsers.NewVestingParser(bi.cfg.SharePath, bi.interfaces) - parsedModels := make([]elastic.Model, 0) + parsedModels := make([]models.Model, 0) for _, address := range addresses { if !strings.HasPrefix(address, "KT") { continue diff --git a/cmd/metrics/bigmapdiff.go b/cmd/metrics/bigmapdiff.go index 69754dab4..2387e64b8 100644 --- a/cmd/metrics/bigmapdiff.go +++ b/cmd/metrics/bigmapdiff.go @@ -3,10 +3,10 @@ package main import ( "sync" - "github.com/baking-bad/bcdhub/internal/elastic" contractHandlers "github.com/baking-bad/bcdhub/internal/handlers" "github.com/baking-bad/bcdhub/internal/metrics" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" "github.com/pkg/errors" ) @@ -16,20 +16,20 @@ var bigMapDiffHandlersInit = sync.Once{} func getBigMapDiff(ids []string) error { bigMapDiffHandlersInit.Do(initHandlers) - bmd := make([]models.BigMapDiff, 0) - if err := ctx.ES.GetByIDs(&bmd, ids...); err != nil { + bmd := make([]bigmapdiff.BigMapDiff, 0) + if err := ctx.Storage.GetByIDs(&bmd, ids...); err != nil { return errors.Errorf("[getBigMapDiff] Find big map diff error for IDs %v: %s", ids, err) } r := result{ - Updated: make([]elastic.Model, 0), + Updated: make([]models.Model, 0), } for i := range bmd { if err := parseBigMapDiff(bmd[i], &r); err != nil { return errors.Errorf("[getBigMapDiff] Compute error message: %s", err) } } - if err := ctx.ES.BulkUpdate(r.Updated); err != nil { + if err := ctx.Bulk.Update(r.Updated); err != nil { return err } return nil @@ -37,20 +37,20 @@ func getBigMapDiff(ids []string) error { func initHandlers() { bigMapDiffHandlers = append(bigMapDiffHandlers, - contractHandlers.NewTZIP(ctx.ES, ctx.RPC, ctx.Config.IPFSGateways), + contractHandlers.NewTZIP(ctx.BigMapDiffs, ctx.Blocks, ctx.Schema, ctx.Storage, ctx.Bulk, ctx.RPC, ctx.Config.IPFSGateways), ) bigMapDiffHandlers = append(bigMapDiffHandlers, - contractHandlers.NewTezosDomains(ctx.ES, ctx.Domains), + contractHandlers.NewTezosDomains(ctx.Storage, ctx.Schema, ctx.Domains), ) } type result struct { - Updated []elastic.Model + Updated []models.Model } //nolint -func parseBigMapDiff(bmd models.BigMapDiff, r *result) error { - h := metrics.New(ctx.ES, ctx.DB) +func parseBigMapDiff(bmd bigmapdiff.BigMapDiff, r *result) error { + h := metrics.New(ctx.Contracts, ctx.BigMapDiffs, ctx.Blocks, ctx.Protocols, ctx.Operations, ctx.Schema, ctx.TokenBalances, ctx.TZIP, ctx.Migrations, ctx.Storage, ctx.Bulk, ctx.DB) if err := h.SetBigMapDiffsStrings(&bmd); err != nil { return err diff --git a/cmd/metrics/contract.go b/cmd/metrics/contract.go index 1efa6f39d..a5960edb7 100644 --- a/cmd/metrics/contract.go +++ b/cmd/metrics/contract.go @@ -5,12 +5,12 @@ import ( "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/metrics" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" ) func getContract(ids []string) error { - contracts := make([]models.Contract, 0) - if err := ctx.ES.GetByIDs(&contracts, ids...); err != nil { + contracts := make([]contract.Contract, 0) + if err := ctx.Storage.GetByIDs(&contracts, ids...); err != nil { return errors.Errorf("[getContract] Find contracts error for IDs %v: %s", ids, err) } @@ -21,11 +21,11 @@ func getContract(ids []string) error { } logger.Info("Metrics of %d contracts are computed", len(contracts)) - return ctx.ES.BulkUpdateField(contracts, "Alias", "Verified", "VerificationSource") + return ctx.Bulk.UpdateField(contracts, "Alias", "Verified", "VerificationSource") } -func parseContract(contract *models.Contract) error { - h := metrics.New(ctx.ES, ctx.DB) +func parseContract(contract *contract.Contract) error { + h := metrics.New(ctx.Contracts, ctx.BigMapDiffs, ctx.Blocks, ctx.Protocols, ctx.Operations, ctx.Schema, ctx.TokenBalances, ctx.TZIP, ctx.Migrations, ctx.Storage, ctx.Bulk, ctx.DB) if _, err := h.SetContractAlias(contract); err != nil { return err diff --git a/cmd/metrics/main.go b/cmd/metrics/main.go index 68c9172c5..29b57009a 100644 --- a/cmd/metrics/main.go +++ b/cmd/metrics/main.go @@ -77,7 +77,7 @@ func main() { } ctx = config.NewContext( - config.WithElasticSearch(cfg.Elastic), + config.WithStorage(cfg.Storage), config.WithRPC(cfg.RPC), config.WithDatabase(cfg.DB), config.WithRabbit(cfg.RabbitMQ, cfg.Metrics.ProjectName, cfg.Metrics.MQ), diff --git a/cmd/metrics/migrations.go b/cmd/metrics/migrations.go index 1985b3a76..fbfb3cc63 100644 --- a/cmd/metrics/migrations.go +++ b/cmd/metrics/migrations.go @@ -2,13 +2,13 @@ package main import ( "github.com/baking-bad/bcdhub/internal/logger" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/migration" "github.com/pkg/errors" ) func getMigrations(ids []string) error { - migrations := make([]models.Migration, 0) - if err := ctx.ES.GetByIDs(&migrations, ids...); err != nil { + migrations := make([]migration.Migration, 0) + if err := ctx.Storage.GetByIDs(&migrations, ids...); err != nil { return errors.Errorf("[getMigrations] Find migration error for IDs %v: %s", ids, err) } @@ -22,6 +22,6 @@ func getMigrations(ids []string) error { return nil } -func parseMigration(migration models.Migration) error { - return ctx.ES.UpdateContractMigrationsCount(migration.Address, migration.Network) +func parseMigration(migration migration.Migration) error { + return ctx.Contracts.UpdateMigrationsCount(migration.Address, migration.Network) } diff --git a/cmd/metrics/operations.go b/cmd/metrics/operations.go index edfece6dd..a04b17b0f 100644 --- a/cmd/metrics/operations.go +++ b/cmd/metrics/operations.go @@ -6,20 +6,21 @@ import ( "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/metrics" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" + "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/pkg/errors" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/logger" ) func getOperation(ids []string) error { - operations := make([]models.Operation, 0) - if err := ctx.ES.GetByIDs(&operations, ids...); err != nil { + operations := make([]operation.Operation, 0) + if err := ctx.Storage.GetByIDs(&operations, ids...); err != nil { return errors.Errorf("[getOperation] Find operation error for IDs %v: %s", ids, err) } - h := metrics.New(ctx.ES, ctx.DB) - updated := make([]elastic.Model, 0) + h := metrics.New(ctx.Contracts, ctx.BigMapDiffs, ctx.Blocks, ctx.Protocols, ctx.Operations, ctx.Schema, ctx.TokenBalances, ctx.TZIP, ctx.Migrations, ctx.Storage, ctx.Bulk, ctx.DB) + updated := make([]models.Model, 0) for i := range operations { if err := parseOperation(h, operations[i]); err != nil { return errors.Errorf("[getOperation] Compute error message: %s", err) @@ -28,7 +29,7 @@ func getOperation(ids []string) error { updated = append(updated, &operations[i]) } - if err := ctx.ES.BulkUpdate(updated); err != nil { + if err := ctx.Bulk.Update(updated); err != nil { return err } @@ -37,7 +38,7 @@ func getOperation(ids []string) error { return getOperationsContracts(h, operations) } -func parseOperation(h *metrics.Handler, operation models.Operation) error { +func parseOperation(h *metrics.Handler, operation operation.Operation) error { if _, err := h.SetOperationAliases(&operation); err != nil { return err } @@ -65,12 +66,12 @@ func (s *stats) isZero() bool { return s.Count == 0 && s.LastAction.IsZero() } -func getOperationsContracts(h *metrics.Handler, operations []models.Operation) error { - addresses := make([]elastic.Address, 0) - addressesMap := make(map[elastic.Address]*stats) +func getOperationsContracts(h *metrics.Handler, operations []operation.Operation) error { + addresses := make([]contract.Address, 0) + addressesMap := make(map[contract.Address]*stats) for i := range operations { if helpers.IsContract(operations[i].Destination) { - dest := elastic.Address{ + dest := contract.Address{ Address: operations[i].Destination, Network: operations[i].Network, } @@ -81,7 +82,7 @@ func getOperationsContracts(h *metrics.Handler, operations []models.Operation) e addressesMap[dest].update(operations[i].Timestamp) } if helpers.IsContract(operations[i].Source) { - src := elastic.Address{ + src := contract.Address{ Address: operations[i].Source, Network: operations[i].Network, } @@ -93,15 +94,15 @@ func getOperationsContracts(h *metrics.Handler, operations []models.Operation) e } } - contracts, err := ctx.ES.GetContractsByAddresses(addresses) + contracts, err := ctx.Contracts.GetByAddresses(addresses) if err != nil { return err } - updated := make([]models.Contract, 0) - contractsMap := make(map[elastic.Address]models.Contract) + updated := make([]contract.Contract, 0) + contractsMap := make(map[contract.Address]contract.Contract) for i := range contracts { - addr := elastic.Address{ + addr := contract.Address{ Address: contracts[i].Address, Network: contracts[i].Network, } @@ -115,7 +116,7 @@ func getOperationsContracts(h *metrics.Handler, operations []models.Operation) e } } - if err := ctx.ES.BulkUpdateField(updated, "TxCount", "LastAction"); err != nil { + if err := ctx.Bulk.UpdateField(updated, "TxCount", "LastAction"); err != nil { return err } @@ -123,7 +124,7 @@ func getOperationsContracts(h *metrics.Handler, operations []models.Operation) e if !operations[i].IsTransaction() || !operations[i].IsCall() { continue } - addr := elastic.Address{ + addr := contract.Address{ Address: operations[i].Destination, Network: operations[i].Network, } diff --git a/cmd/metrics/projects.go b/cmd/metrics/projects.go index 86cff0496..0fe8d78dc 100644 --- a/cmd/metrics/projects.go +++ b/cmd/metrics/projects.go @@ -3,15 +3,15 @@ package main import ( "github.com/pkg/errors" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/metrics" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" ) func getProject(ids []string) error { - contracts := make([]models.Contract, 0) - if err := ctx.ES.GetByIDs(&contracts, ids...); err != nil { + contracts := make([]contract.Contract, 0) + if err := ctx.Storage.GetByIDs(&contracts, ids...); err != nil { return errors.Errorf("[getContract] Find contracts error for IDs %v: %s", ids, err) } @@ -24,13 +24,13 @@ func getProject(ids []string) error { return nil } -func parseProject(contract models.Contract) error { - h := metrics.New(ctx.ES, ctx.DB) +func parseProject(contract contract.Contract) error { + h := metrics.New(ctx.Contracts, ctx.BigMapDiffs, ctx.Blocks, ctx.Protocols, ctx.Operations, ctx.Schema, ctx.TokenBalances, ctx.TZIP, ctx.Migrations, ctx.Storage, ctx.Bulk, ctx.DB) if contract.ProjectID == "" { if err := h.SetContractProjectID(&contract); err != nil { return errors.Errorf("[parseContract] Error during set contract projectID: %s", err) } } - return ctx.ES.UpdateFields(elastic.DocContracts, contract.GetID(), contract, "ProjectID") + return ctx.Storage.UpdateFields(models.DocContracts, contract.GetID(), contract, "ProjectID") } diff --git a/cmd/metrics/recalc.go b/cmd/metrics/recalc.go index 10e886b85..f30a1de0c 100644 --- a/cmd/metrics/recalc.go +++ b/cmd/metrics/recalc.go @@ -3,13 +3,13 @@ package main import ( "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/metrics" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" "github.com/pkg/errors" ) func recalculateAll(ids []string) error { - contracts := make([]models.Contract, 0) - if err := ctx.ES.GetByIDs(&contracts, ids...); err != nil { + contracts := make([]contract.Contract, 0) + if err := ctx.Storage.GetByIDs(&contracts, ids...); err != nil { return errors.Errorf("[recalculateAll] Find contracts error for IDs %v: %s", ids, err) } @@ -23,8 +23,8 @@ func recalculateAll(ids []string) error { return nil } -func recalc(contract models.Contract) error { - h := metrics.New(ctx.ES, ctx.DB) +func recalc(contract contract.Contract) error { + h := metrics.New(ctx.Contracts, ctx.BigMapDiffs, ctx.Blocks, ctx.Protocols, ctx.Operations, ctx.Schema, ctx.TokenBalances, ctx.TZIP, ctx.Migrations, ctx.Storage, ctx.Bulk, ctx.DB) if _, err := h.SetContractAlias(&contract); err != nil { return err @@ -40,5 +40,5 @@ func recalc(contract models.Contract) error { return errors.Errorf("[recalc] Compute contract stats error message: %s", err) } - return ctx.ES.UpdateDoc(&contract) + return ctx.Storage.UpdateDoc(&contract) } diff --git a/cmd/metrics/transfers.go b/cmd/metrics/transfers.go index c56781984..233204857 100644 --- a/cmd/metrics/transfers.go +++ b/cmd/metrics/transfers.go @@ -1,16 +1,16 @@ package main import ( - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/metrics" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/transfer" "github.com/pkg/errors" ) func getTransfer(ids []string) error { - transfers := make([]models.Transfer, 0) - if err := ctx.ES.GetByIDs(&transfers, ids...); err != nil { + transfers := make([]transfer.Transfer, 0) + if err := ctx.Storage.GetByIDs(&transfers, ids...); err != nil { return errors.Errorf("[getTransfer] Find transfer error for IDs %v: %s", ids, err) } @@ -23,12 +23,12 @@ func getTransfer(ids []string) error { return nil } -func parseTransfer(transfer models.Transfer) error { - h := metrics.New(ctx.ES, ctx.DB) +func parseTransfer(transfer transfer.Transfer) error { + h := metrics.New(ctx.Contracts, ctx.BigMapDiffs, ctx.Blocks, ctx.Protocols, ctx.Operations, ctx.Schema, ctx.TokenBalances, ctx.TZIP, ctx.Migrations, ctx.Storage, ctx.Bulk, ctx.DB) if flag, err := h.SetTransferAliases(&transfer); flag { - if err := ctx.ES.UpdateFields( - elastic.DocTransfers, transfer.ID, + if err := ctx.Storage.UpdateFields( + models.DocTransfers, transfer.ID, transfer, "FromAlias", "ToAlias", "Alias", "InitiatorAlias", ); err != nil { diff --git a/configs/development.yml b/configs/development.yml index cfc3a3042..20e9e79a6 100644 --- a/configs/development.yml +++ b/configs/development.yml @@ -26,7 +26,7 @@ tzkt: base_uri: https://carthage.tzkt.io/ timeout: 20 -elastic: +storage: uri: - http://127.0.0.1:9200 timeout: 10 diff --git a/configs/production.yml b/configs/production.yml index a8b6dbafc..c7e542dca 100644 --- a/configs/production.yml +++ b/configs/production.yml @@ -26,7 +26,7 @@ tzkt: base_uri: https://carthage.tzkt.io/ timeout: 20 -elastic: +storage: uri: - http://elastic:9200 - http://elastic:9200 diff --git a/configs/sandbox.yml b/configs/sandbox.yml index 19d8df440..9252b26e8 100644 --- a/configs/sandbox.yml +++ b/configs/sandbox.yml @@ -3,7 +3,7 @@ rpc: uri: ${SANDBOX_NODE_URI} timeout: 10 -elastic: +storage: uri: - http://elastic:9200 timeout: 10 diff --git a/configs/you.yml b/configs/you.yml index 0dba9a105..c357fdbf4 100644 --- a/configs/you.yml +++ b/configs/you.yml @@ -26,7 +26,7 @@ tzkt: base_uri: https://carthage.tzkt.io/ timeout: 20 -elastic: +storage: uri: - http://elastic:9200 timeout: 10 diff --git a/go.mod b/go.mod index 164098dd2..906ad0287 100644 --- a/go.mod +++ b/go.mod @@ -6,6 +6,7 @@ require ( github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 github.com/aws/aws-sdk-go v1.30.10 github.com/btcsuite/btcutil v1.0.1 + github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect github.com/dgrijalva/jwt-go v3.2.0+incompatible github.com/elastic/go-elasticsearch/v8 v8.0.0-20191218082911-5398a82b748f github.com/fatih/color v1.9.0 @@ -20,6 +21,7 @@ require ( github.com/google/uuid v1.1.1 github.com/gorilla/websocket v1.4.2 github.com/hashicorp/go-retryablehttp v0.6.6 // indirect + github.com/iancoleman/orderedmap v0.1.0 // indirect github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89 github.com/jinzhu/gorm v1.9.11 github.com/json-iterator/go v1.1.10 @@ -28,6 +30,8 @@ require ( github.com/nats-io/nats-server/v2 v2.1.9 // indirect github.com/nats-io/nats.go v1.10.0 github.com/pkg/errors v0.9.1 + github.com/restream/reindexer v3.0.0+incompatible + github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/schollz/progressbar/v3 v3.1.1 github.com/sergi/go-diff v1.1.0 github.com/sirupsen/logrus v1.4.2 @@ -44,7 +48,7 @@ require ( golang.org/x/net v0.0.0-20201224014010-6772e930b67b // indirect golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e // indirect - golang.org/x/tools v0.0.0-20201226215659-b1c90890d22a // indirect + golang.org/x/tools v0.0.0-20210107193943-4ed967dd8eff // indirect google.golang.org/appengine v1.6.6 // indirect google.golang.org/protobuf v1.25.0 // indirect gopkg.in/go-playground/validator.v9 v9.31.0 diff --git a/go.sum b/go.sum index 9adeacb6e..3b3e86ff0 100644 --- a/go.sum +++ b/go.sum @@ -51,6 +51,8 @@ github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d h1:U+s90UTSYgptZMwQh2aRr3LuazLJIa+Pg3Kc1ylSYVY= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM= +github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -147,6 +149,7 @@ github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvq github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= github.com/golang/protobuf v1.4.2 h1:+Z5KGCizgyZCbGh1KZqA0fcLLkwbsjIzS4aV2v7wJX0= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db h1:woRePGFeVFfLKN/pOkfl+p/TAqKOfFu+7KPlMVpok/w= github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/gomodule/redigo v1.7.1-0.20190724094224-574c33c3df38/go.mod h1:B4C85qUVwatsJoIUNIfCRsp7qO0iAmpGFZ4EELWSbC4= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= @@ -190,6 +193,8 @@ github.com/hashicorp/golang-lru v0.5.0 h1:CL2msUPvZTLb5O648aiLNJw3hnBxN2+1Jq8rCO github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/iancoleman/orderedmap v0.1.0 h1:2orAxZBJsvimgEBmMWfXaFlzSG2fbQil5qzP3F6cCkg= +github.com/iancoleman/orderedmap v0.1.0/go.mod h1:N0Wam8K1arqPXNWjMo21EXnBPOPp36vB07FNRdD2geA= github.com/imkira/go-interpol v1.1.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/iris-contrib/blackfriday v2.0.0+incompatible/go.mod h1:UzZ2bDEoaSGPbkg6SAB4att1aAwTmVIx/5gCVqeyUdI= @@ -326,10 +331,14 @@ github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y8 github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/restream/reindexer v3.0.0+incompatible h1:cSQitup80L5JfgW9fImsi0ZPx9woYn7VeXPS+IlJ/tY= +github.com/restream/reindexer v3.0.0+incompatible/go.mod h1:1zcuRS92j/mekSQJgL8s8ZHVFrBL3IAuVPmOoIJUGvw= github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/schollz/progressbar/v3 v3.1.1 h1:IsKbXcalNftS2i4KUTkf/LTcMIrm1RUoefVaJQaizYw= github.com/schollz/progressbar/v3 v3.1.1/go.mod h1:d+PD64vPuv+GL2EhUpvV579FR91WWhRHEnImqGsIBU4= @@ -405,22 +414,18 @@ github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FB github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM= github.com/yudai/pp v2.0.1+incompatible/go.mod h1:PuxR/8QJ7cyCkFp/aUDS+JY727OFEZkTdatxwunjIkc= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -go.opencensus.io v0.20.1 h1:pMEjRZ1M4ebWGikflH7nQpV6+Zr88KBMA2XJD3sbijw= go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c h1:Vj5n4GlwjmQteupaxJ9+0FNOmBrHfq7vN4btdGoDZgI= golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200115085410-6d4e4cb37c7d h1:2+ZP7EfsZV7Vvmx3TIqSlSzATMkTAKqM14YGFPoSKjI= golang.org/x/crypto v0.0.0-20200115085410-6d4e4cb37c7d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 h1:psW17arqaxU48Z5kZ0CQnkZWQJsqcURM6tKiBApRjXI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4 h1:c2HOrn5iMezYjSlGPncknSEr/8x5LELb/ilJbXi9DEA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= @@ -440,13 +445,10 @@ golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73r golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190327091125-710a502c58a2/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c h1:uOCk1iQW6Vc18bnC13MfzScl+wdKBmM9Y9kU7Z83/lw= golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297 h1:k7pJ2yAPLPgbskkFdhRCsA77k2fySZ1zf2zCjvQCiIM= golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200202094626-16171245cfb2 h1:CCH4IOTTfewWjGOlSp+zGcjutRKlBEZQ6wTn8ozI/nI= golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= @@ -454,14 +456,12 @@ golang.org/x/net v0.0.0-20201224014010-6772e930b67b h1:iFwSg7t5GZmB/Q5TjiEAsdoLD golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421 h1:Wo7BWFiOk0QRFMLYMqJGFMd9CgUAcGx7V+qEg/h5IBI= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d h1:TzXSXBo42m9gQenoE3b9BGiEpg5IG2JkU5FkPIawgtw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6 h1:bjcUS9ztw9kFmmIxJInhon/0Is3p+EHBKNgquIzo1OI= golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -478,25 +478,19 @@ golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a h1:aYOabOQFp6Vj6W1F80affTUvO9UxmJRx8K0gsfABByQ= golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68 h1:nxC68pudNYkKU6jWhgrqdreuFiOQWj1Fs7T3VrH4Pjw= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2 h1:z99zHgr7hKfrUcX/KsoJk5FJfjTceCKIp96+biqP4To= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= -golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.4 h1:0YWbFKbhXG/wIiuHDSKpS0Iy7FSA+u45VtBMfQcFTTc= golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20191024005414-555d28b269f0 h1:/5xXl8Y5W96D+TtHSlonuFqGHIWVuyCkGJLwGh9JJFs= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e h1:EHBhcS0mlXEAVwNyO2dLfjToGsyY4j24pTs2ScHnX7s= golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -508,7 +502,6 @@ golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3 golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190327201419-c70d86f8b7cf/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190328211700-ab21143f2384 h1:TFlARGu6Czu1z7q93HTxcP1P+/ZFC/IKythI5RzrnRg= golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= @@ -516,28 +509,25 @@ golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20201120155355-20be4ac4bd6e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201226215659-b1c90890d22a h1:pdfjQ7VswBeGam3EpuEJ4e8EAb7JgaubV570LO/SIQM= golang.org/x/tools v0.0.0-20201226215659-b1c90890d22a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210107193943-4ed967dd8eff h1:6EkB024TP1fu6cmQqeCNw685zYDVt5g8N1BXh755SQM= +golang.org/x/tools v0.0.0-20210107193943-4ed967dd8eff/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.3.1 h1:oJra/lMfmtm13/rgY/8i3MzjFWYXvQIAKjQ3HqofMk8= google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.4.0 h1:/wp5JvzpHIxhs/dumFmF7BXTf3Z+dd4uXta4kVyO508= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.6 h1:lMO5rYAqUxkmaj76jAkRUvt5JZgFymx/+Q5Mzfivuhc= google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190404172233-64821d5d2107 h1:xtNn7qFlagY2mQNFHMSRPjT2RkOV4OXM7P5TVy9xATo= google.golang.org/genproto v0.0.0-20190404172233-64821d5d2107/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= -google.golang.org/grpc v1.19.0 h1:cfg4PD8YEdSFnm7qLV4++93WcmhH2nIUhMjhdCvl3j8= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= @@ -547,14 +537,12 @@ google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQ google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.0 h1:4MY060fB1DLGMB/7MBTLnwQUY6+F09GEiz6SsrNqyzM= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.25.0 h1:Ejskq+SyPohKW+1uil0JJMtmHCgJPJ/qWTxr8qp+R4c= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= @@ -562,30 +550,23 @@ gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/go-playground/assert.v1 v1.2.1 h1:xoYuJVE7KT85PYWrN730RguIQO0ePzVRfFMXadIrXTM= gopkg.in/go-playground/assert.v1 v1.2.1/go.mod h1:9RXL0bg/zibRAgZUYszZSwO/z8Y/a8bDuhia5mkpMnE= -gopkg.in/go-playground/validator.v8 v8.18.2 h1:lFB4DoMU6B626w8ny76MV7VX6W2VHct2GVOI3xgiMrQ= gopkg.in/go-playground/validator.v8 v8.18.2/go.mod h1:RX2a/7Ha8BgOhfk7j780h4/u/RRjR0eouCJSH80/M2Y= -gopkg.in/go-playground/validator.v9 v9.29.1 h1:SvGtYmN60a5CVKTOzMSyfzWDeZRxRuGvRQyEAKbw1xc= gopkg.in/go-playground/validator.v9 v9.29.1/go.mod h1:+c9/zcJMFNgbLvly1L1V+PpxWdVbfP1avr/N00E2vyQ= gopkg.in/go-playground/validator.v9 v9.31.0 h1:bmXmP2RSNtFES+bn4uYuHT7iJFJv7Vj+an+ZQdDaD1M= gopkg.in/go-playground/validator.v9 v9.31.0/go.mod h1:+c9/zcJMFNgbLvly1L1V+PpxWdVbfP1avr/N00E2vyQ= gopkg.in/mgo.v2 v2.0.0-20180705113604-9856a29383ce/go.mod h1:yeKp02qBN3iKW1OzL3MGk2IdtZzaj7SFntXj72NppTA= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 h1:tQIYjPdBoyREyB9XMu+nnTclpTYkz2zFM+lzLJFO4gQ= gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a h1:/8zB6iBfHCl1qAnEAWwGPNrUvapuy6CPla1VM0k8hQw= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/internal/classification/metrics/array_metric.go b/internal/classification/metrics/array_metric.go index 1c8e1278f..934fbe74c 100644 --- a/internal/classification/metrics/array_metric.go +++ b/internal/classification/metrics/array_metric.go @@ -5,7 +5,7 @@ import ( "strings" "github.com/baking-bad/bcdhub/internal/logger" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" "github.com/pkg/errors" ) @@ -22,7 +22,7 @@ func NewArray(field string) *Array { } // Compute - -func (m *Array) Compute(a, b models.Contract) Feature { +func (m *Array) Compute(a, b contract.Contract) Feature { f := Feature{ Name: strings.ToLower(m.Field), } @@ -66,7 +66,7 @@ func (m *Array) Compute(a, b models.Contract) Feature { return f } -func (m *Array) getContractFieldArray(c models.Contract) ([]interface{}, error) { +func (m *Array) getContractFieldArray(c contract.Contract) ([]interface{}, error) { r := reflect.ValueOf(c) f := reflect.Indirect(r).FieldByName(m.Field) diff --git a/internal/classification/metrics/bool_metric.go b/internal/classification/metrics/bool_metric.go index 30324ef44..e92d876fe 100644 --- a/internal/classification/metrics/bool_metric.go +++ b/internal/classification/metrics/bool_metric.go @@ -4,7 +4,7 @@ import ( "reflect" "strings" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" ) // Bool - @@ -20,7 +20,7 @@ func NewBool(field string) *Bool { } // Compute - -func (m *Bool) Compute(a, b models.Contract) Feature { +func (m *Bool) Compute(a, b contract.Contract) Feature { f := Feature{ Name: strings.ToLower(m.Field), } @@ -33,7 +33,7 @@ func (m *Bool) Compute(a, b models.Contract) Feature { return f } -func (m *Bool) getContractField(c models.Contract) interface{} { +func (m *Bool) getContractField(c contract.Contract) interface{} { r := reflect.ValueOf(c) return reflect.Indirect(r).FieldByName(m.Field).Interface() } diff --git a/internal/classification/metrics/fingerprint.go b/internal/classification/metrics/fingerprint.go index 8ec8789fb..feb0d5279 100644 --- a/internal/classification/metrics/fingerprint.go +++ b/internal/classification/metrics/fingerprint.go @@ -6,7 +6,7 @@ import ( "math" "github.com/baking-bad/bcdhub/internal/contractparser/consts" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" ) // Fingerprint - @@ -22,7 +22,7 @@ func NewFingerprint(section string) *Fingerprint { } // Compute - -func (m *Fingerprint) Compute(a, b models.Contract) Feature { +func (m *Fingerprint) Compute(a, b contract.Contract) Feature { f := Feature{ Name: fmt.Sprintf("fingerprint_%s", m.Section), } @@ -135,7 +135,7 @@ func NewFingerprintLength(section string) *FingerprintLength { } // Compute - -func (m *FingerprintLength) Compute(a, b models.Contract) Feature { +func (m *FingerprintLength) Compute(a, b contract.Contract) Feature { f := Feature{ Name: fmt.Sprintf("fingerprint_length_%s", m.Section), } diff --git a/internal/classification/metrics/interface.go b/internal/classification/metrics/interface.go index 18646c3f5..930b8f467 100644 --- a/internal/classification/metrics/interface.go +++ b/internal/classification/metrics/interface.go @@ -1,10 +1,10 @@ package metrics -import "github.com/baking-bad/bcdhub/internal/models" +import "github.com/baking-bad/bcdhub/internal/models/contract" // Metric - type Metric interface { - Compute(a, b models.Contract) Feature + Compute(a, b contract.Contract) Feature } // Feature - diff --git a/internal/classification/metrics/manager.go b/internal/classification/metrics/manager.go index 9e9b06fa9..de053e287 100644 --- a/internal/classification/metrics/manager.go +++ b/internal/classification/metrics/manager.go @@ -1,6 +1,6 @@ package metrics -import "github.com/baking-bad/bcdhub/internal/models" +import "github.com/baking-bad/bcdhub/internal/models/contract" // Manager - type Manager struct{} @@ -11,7 +11,7 @@ func NewManager() *Manager { } // Compute - -func (m *Manager) Compute(a, b models.Contract) Feature { +func (m *Manager) Compute(a, b contract.Contract) Feature { f := Feature{ Name: "manager", } diff --git a/internal/classification/metrics/manager_test.go b/internal/classification/metrics/manager_test.go index a8cdc10cf..20c4e8ad7 100644 --- a/internal/classification/metrics/manager_test.go +++ b/internal/classification/metrics/manager_test.go @@ -4,13 +4,13 @@ import ( "reflect" "testing" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" ) func TestManager_Compute(t *testing.T) { type args struct { - a models.Contract - b models.Contract + a contract.Contract + b contract.Contract } tests := []struct { name string @@ -20,11 +20,11 @@ func TestManager_Compute(t *testing.T) { { name: "Case 1", args: args{ - a: models.Contract{ + a: contract.Contract{ Address: "test", Network: "network", }, - b: models.Contract{ + b: contract.Contract{ Address: "test", Network: "network", }, @@ -36,11 +36,11 @@ func TestManager_Compute(t *testing.T) { }, { name: "Case 2", args: args{ - a: models.Contract{ + a: contract.Contract{ Address: "other", Network: "network", }, - b: models.Contract{ + b: contract.Contract{ Address: "test", Network: "network", }, @@ -52,11 +52,11 @@ func TestManager_Compute(t *testing.T) { }, { name: "Case 3", args: args{ - a: models.Contract{ + a: contract.Contract{ Address: "test", Network: "other", }, - b: models.Contract{ + b: contract.Contract{ Address: "test", Network: "network", }, @@ -68,11 +68,11 @@ func TestManager_Compute(t *testing.T) { }, { name: "Case 4", args: args{ - a: models.Contract{ + a: contract.Contract{ Address: "other", Network: "other", }, - b: models.Contract{ + b: contract.Contract{ Address: "test", Network: "network", }, diff --git a/internal/config/config.go b/internal/config/config.go index 58c2fcf48..224bd68bf 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -21,7 +21,7 @@ const ( type Config struct { RPC map[string]RPCConfig `yaml:"rpc"` TzKT map[string]TzKTConfig `yaml:"tzkt"` - Elastic ElasticSearchConfig `yaml:"elastic"` + Storage StorageConfig `yaml:"storage"` RabbitMQ RabbitConfig `yaml:"rabbitmq"` DB DatabaseConfig `yaml:"db"` OAuth OAuthConfig `yaml:"oauth"` @@ -90,8 +90,8 @@ type TzKTConfig struct { Timeout int `yaml:"timeout"` } -// ElasticSearchConfig - -type ElasticSearchConfig struct { +// StorageConfig - +type StorageConfig struct { URI []string `yaml:"uri"` Timeout int `yaml:"timeout"` } diff --git a/internal/config/context.go b/internal/config/context.go index 4805cfa4f..b2b98fd93 100644 --- a/internal/config/context.go +++ b/internal/config/context.go @@ -4,7 +4,20 @@ import ( "github.com/baking-bad/bcdhub/internal/aws" "github.com/baking-bad/bcdhub/internal/contractparser/kinds" "github.com/baking-bad/bcdhub/internal/database" - "github.com/baking-bad/bcdhub/internal/elastic" + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/balanceupdate" + "github.com/baking-bad/bcdhub/internal/models/bigmapaction" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/block" + "github.com/baking-bad/bcdhub/internal/models/contract" + "github.com/baking-bad/bcdhub/internal/models/migration" + "github.com/baking-bad/bcdhub/internal/models/operation" + "github.com/baking-bad/bcdhub/internal/models/protocol" + "github.com/baking-bad/bcdhub/internal/models/schema" + "github.com/baking-bad/bcdhub/internal/models/tezosdomain" + "github.com/baking-bad/bcdhub/internal/models/tokenbalance" + "github.com/baking-bad/bcdhub/internal/models/transfer" + "github.com/baking-bad/bcdhub/internal/models/tzip" "github.com/baking-bad/bcdhub/internal/mq" "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/baking-bad/bcdhub/internal/pinata" @@ -15,7 +28,6 @@ import ( // Context - type Context struct { DB database.DB - ES elastic.IElastic MQ mq.Mediator AWS *aws.Client RPC map[string]noderpc.INode @@ -28,6 +40,22 @@ type Context struct { Interfaces map[string]kinds.ContractKind Domains map[string]string + + Storage models.GeneralRepository + Bulk models.BulkRepository + BalanceUpdates balanceupdate.Repository + BigMapActions bigmapaction.Repository + BigMapDiffs bigmapdiff.Repository + Blocks block.Repository + Contracts contract.Repository + Migrations migration.Repository + Operations operation.Repository + Protocols protocol.Repository + Schema schema.Repository + TezosDomains tezosdomain.Repository + TokenBalances tokenbalance.Repository + Transfers transfer.Repository + TZIP tzip.Repository } // NewContext - diff --git a/internal/config/options.go b/internal/config/options.go index f90ebb691..04090531b 100644 --- a/internal/config/options.go +++ b/internal/config/options.go @@ -3,13 +3,45 @@ package config import ( "io/ioutil" "log" + "strings" "time" "github.com/baking-bad/bcdhub/internal/aws" "github.com/baking-bad/bcdhub/internal/contractparser/cerrors" "github.com/baking-bad/bcdhub/internal/contractparser/kinds" "github.com/baking-bad/bcdhub/internal/database" - "github.com/baking-bad/bcdhub/internal/elastic" + "github.com/baking-bad/bcdhub/internal/elastic/balanceupdate" + "github.com/baking-bad/bcdhub/internal/elastic/bigmapaction" + "github.com/baking-bad/bcdhub/internal/elastic/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/elastic/block" + "github.com/baking-bad/bcdhub/internal/elastic/bulk" + "github.com/baking-bad/bcdhub/internal/elastic/contract" + "github.com/baking-bad/bcdhub/internal/elastic/core" + "github.com/baking-bad/bcdhub/internal/elastic/migration" + "github.com/baking-bad/bcdhub/internal/elastic/operation" + "github.com/baking-bad/bcdhub/internal/elastic/protocol" + "github.com/baking-bad/bcdhub/internal/elastic/schema" + "github.com/baking-bad/bcdhub/internal/elastic/tezosdomain" + "github.com/baking-bad/bcdhub/internal/elastic/tokenbalance" + "github.com/baking-bad/bcdhub/internal/elastic/transfer" + "github.com/baking-bad/bcdhub/internal/elastic/tzip" + + reindexerBU "github.com/baking-bad/bcdhub/internal/reindexer/balanceupdate" + reindexerBMA "github.com/baking-bad/bcdhub/internal/reindexer/bigmapaction" + reindexerBMD "github.com/baking-bad/bcdhub/internal/reindexer/bigmapdiff" + reindexerBlock "github.com/baking-bad/bcdhub/internal/reindexer/block" + reindexerBulk "github.com/baking-bad/bcdhub/internal/reindexer/bulk" + reindexerContract "github.com/baking-bad/bcdhub/internal/reindexer/contract" + reindexerCore "github.com/baking-bad/bcdhub/internal/reindexer/core" + reindexerMigration "github.com/baking-bad/bcdhub/internal/reindexer/migration" + reindexerOperation "github.com/baking-bad/bcdhub/internal/reindexer/operation" + reindexerProtocol "github.com/baking-bad/bcdhub/internal/reindexer/protocol" + reindexerSchema "github.com/baking-bad/bcdhub/internal/reindexer/schema" + reindexerTD "github.com/baking-bad/bcdhub/internal/reindexer/tezosdomain" + reindexerTB "github.com/baking-bad/bcdhub/internal/reindexer/tokenbalance" + reindexerTransfer "github.com/baking-bad/bcdhub/internal/reindexer/transfer" + reindexertzip "github.com/baking-bad/bcdhub/internal/reindexer/tzip" + "github.com/baking-bad/bcdhub/internal/mq" "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/baking-bad/bcdhub/internal/pinata" @@ -36,10 +68,56 @@ func WithRPC(rpcConfig map[string]RPCConfig) ContextOption { } } -// WithElasticSearch - -func WithElasticSearch(esConfig ElasticSearchConfig) ContextOption { +// WithStorage - +func WithStorage(cfg StorageConfig) ContextOption { return func(ctx *Context) { - ctx.ES = elastic.WaitNew(esConfig.URI, esConfig.Timeout) + if len(cfg.URI) == 0 { + panic("Please set connection strings to storage in config") + } + if strings.HasPrefix(cfg.URI[0], "builtin://") { + storage, err := reindexerCore.New(cfg.URI[0]) + if err != nil { + panic(err) + } + + ctx.Storage = storage + ctx.Bulk = reindexerBulk.NewStorage(storage) + ctx.BalanceUpdates = reindexerBU.NewStorage(storage) + ctx.BigMapActions = reindexerBMA.NewStorage(storage) + ctx.BigMapDiffs = reindexerBMD.NewStorage(storage) + ctx.Blocks = reindexerBlock.NewStorage(storage) + ctx.Contracts = reindexerContract.NewStorage(storage) + ctx.Migrations = reindexerMigration.NewStorage(storage) + ctx.Operations = reindexerOperation.NewStorage(storage) + ctx.Protocols = reindexerProtocol.NewStorage(storage) + ctx.Schema = reindexerSchema.NewStorage(storage) + ctx.TezosDomains = reindexerTD.NewStorage(storage) + ctx.TokenBalances = reindexerTB.NewStorage(storage) + ctx.Transfers = reindexerTransfer.NewStorage(storage) + ctx.TZIP = reindexertzip.NewStorage(storage) + + if err := ctx.Storage.CreateIndexes(); err != nil { + panic(err) + } + } else { + es := core.WaitNew(cfg.URI, cfg.Timeout) + + ctx.Storage = es + ctx.Bulk = bulk.NewStorage(es) + ctx.BalanceUpdates = balanceupdate.NewStorage(es) + ctx.BigMapActions = bigmapaction.NewStorage(es) + ctx.BigMapDiffs = bigmapdiff.NewStorage(es) + ctx.Blocks = block.NewStorage(es) + ctx.Contracts = contract.NewStorage(es) + ctx.Migrations = migration.NewStorage(es) + ctx.Operations = operation.NewStorage(es) + ctx.Protocols = protocol.NewStorage(es) + ctx.Schema = schema.NewStorage(es) + ctx.TezosDomains = tezosdomain.NewStorage(es) + ctx.TokenBalances = tokenbalance.NewStorage(es) + ctx.Transfers = transfer.NewStorage(es) + ctx.TZIP = tzip.NewStorage(es) + } } } diff --git a/internal/contractparser/meta/data.go b/internal/contractparser/meta/data.go index 0e41d00a1..fb2299273 100644 --- a/internal/contractparser/meta/data.go +++ b/internal/contractparser/meta/data.go @@ -7,9 +7,8 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/consts" "github.com/baking-bad/bcdhub/internal/contractparser/node" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/schema" "github.com/pkg/errors" "github.com/tidwall/gjson" ) @@ -373,13 +372,13 @@ func getNodeType(n internalNode, metadata Metadata) (string, []string) { } // GetContractMetadata - -func GetContractMetadata(es elastic.IGeneral, address string) (*ContractMetadata, error) { +func GetContractMetadata(schemaRepo schema.Repository, address string) (*ContractMetadata, error) { if address == "" { return nil, errors.Errorf("[GetContractMetadata] Empty address") } - data := models.Metadata{ID: address} - if err := es.GetByID(&data); err != nil { + data, err := schemaRepo.Get(address) + if err != nil { return nil, err } @@ -387,7 +386,7 @@ func GetContractMetadata(es elastic.IGeneral, address string) (*ContractMetadata } // GetContractMetadataFromModel - -func GetContractMetadataFromModel(metadata models.Metadata) (*ContractMetadata, error) { +func GetContractMetadataFromModel(metadata schema.Schema) (*ContractMetadata, error) { contractMetadata := ContractMetadata{ Parameter: map[string]Metadata{}, Storage: map[string]Metadata{}, @@ -412,13 +411,13 @@ func GetContractMetadataFromModel(metadata models.Metadata) (*ContractMetadata, } // GetMetadata - -func GetMetadata(es elastic.IElastic, address, part, protocol string) (Metadata, error) { +func GetMetadata(schemaRepo schema.Repository, address, part, protocol string) (Metadata, error) { if address == "" { return nil, errors.Errorf("[GetMetadata] Empty address") } - data := models.Metadata{ID: address} - if err := es.GetByID(&data); err != nil { + data, err := schemaRepo.Get(address) + if err != nil { return nil, err } diff --git a/internal/contractparser/meta/data_test.go b/internal/contractparser/meta/data_test.go index d4f6a67eb..11b12b271 100644 --- a/internal/contractparser/meta/data_test.go +++ b/internal/contractparser/meta/data_test.go @@ -7,7 +7,7 @@ import ( "log" "testing" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/schema" "github.com/tidwall/gjson" ) @@ -95,7 +95,7 @@ func TestContractMetadata_IsUpgradable(t *testing.T) { } symLink := "test" - metadata := models.Metadata{ + metadata := schema.Schema{ Parameter: map[string]string{ symLink: string(paramFile), }, diff --git a/internal/contractparser/newmiguel/enum.go b/internal/contractparser/newmiguel/enum.go index 0d460b1dd..019e49f39 100644 --- a/internal/contractparser/newmiguel/enum.go +++ b/internal/contractparser/newmiguel/enum.go @@ -47,7 +47,7 @@ func (l *enumDecoder) Decode(data gjson.Result, path string, nm *meta.NodeMetada case tail == "": node.Value = consts.UNIT default: - bin := strings.Replace(tail, "/", "", -1) + bin := strings.ReplaceAll(tail, "/", "") i, err := strconv.ParseInt(bin, 2, 64) if err != nil { return nil, err diff --git a/internal/contractparser/parser.go b/internal/contractparser/parser.go index 822b8d8e5..5b0d3df75 100644 --- a/internal/contractparser/parser.go +++ b/internal/contractparser/parser.go @@ -5,7 +5,7 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/meta" "github.com/baking-bad/bcdhub/internal/contractparser/node" "github.com/baking-bad/bcdhub/internal/contractparser/storage" - "github.com/baking-bad/bcdhub/internal/elastic" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/pkg/errors" "github.com/tidwall/gjson" @@ -53,9 +53,9 @@ func (p *parser) parse(v gjson.Result) error { } // MakeStorageParser - -func MakeStorageParser(rpc noderpc.INode, es elastic.IBigMapDiff, protocol string, isSimulating bool) (storage.Parser, error) { +func MakeStorageParser(rpc noderpc.INode, repo bigmapdiff.Repository, protocol string, isSimulating bool) (storage.Parser, error) { if isSimulating { - return storage.NewSimulate(rpc, es), nil + return storage.NewSimulate(rpc, repo), nil } protoSymLink, err := meta.GetProtoSymLink(protocol) @@ -65,7 +65,7 @@ func MakeStorageParser(rpc noderpc.INode, es elastic.IBigMapDiff, protocol strin switch protoSymLink { case consts.MetadataBabylon: - return storage.NewBabylon(rpc, es), nil + return storage.NewBabylon(rpc, repo), nil case consts.MetadataAlpha: return storage.NewAlpha(), nil default: diff --git a/internal/contractparser/storage/alpha.go b/internal/contractparser/storage/alpha.go index 12c8b3f9f..968c4a84a 100644 --- a/internal/contractparser/storage/alpha.go +++ b/internal/contractparser/storage/alpha.go @@ -8,9 +8,10 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/newmiguel" "github.com/baking-bad/bcdhub/internal/contractparser/storage/hash" "github.com/baking-bad/bcdhub/internal/contractparser/stringer" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/tidwall/gjson" "github.com/tidwall/sjson" ) @@ -24,7 +25,7 @@ func NewAlpha() *Alpha { } // ParseTransaction - -func (a *Alpha) ParseTransaction(content gjson.Result, _ meta.Metadata, operation models.Operation) (RichStorage, error) { +func (a *Alpha) ParseTransaction(content gjson.Result, _ meta.Metadata, operation operation.Operation) (RichStorage, error) { address := content.Get("destination").String() result, err := getResult(content) @@ -39,7 +40,7 @@ func (a *Alpha) ParseTransaction(content gjson.Result, _ meta.Metadata, operatio } // ParseOrigination - -func (a *Alpha) ParseOrigination(content gjson.Result, metadata meta.Metadata, operation models.Operation) (RichStorage, error) { +func (a *Alpha) ParseOrigination(content gjson.Result, metadata meta.Metadata, operation operation.Operation) (RichStorage, error) { result, err := getResult(content) if err != nil { return RichStorage{Empty: true}, err @@ -47,17 +48,17 @@ func (a *Alpha) ParseOrigination(content gjson.Result, metadata meta.Metadata, o address := result.Get("originated_contracts.0").String() storage := content.Get("script.storage") - var bmd []elastic.Model + var bmd []models.Model if bmMeta, ok := metadata["0/0"]; ok && bmMeta.Type == consts.BIGMAP { bigMapData := storage.Get("args.0") - bmd = make([]elastic.Model, 0) + bmd = make([]models.Model, 0) for _, item := range bigMapData.Array() { keyHash, err := hash.Key(item.Get("args.0")) if err != nil { return RichStorage{Empty: true}, err } - bmd = append(bmd, &models.BigMapDiff{ + bmd = append(bmd, &bigmapdiff.BigMapDiff{ ID: helpers.GenerateID(), BinPath: "0/0", Key: item.Get("args.0").Value(), @@ -89,7 +90,7 @@ func (a *Alpha) ParseOrigination(content gjson.Result, metadata meta.Metadata, o } // Enrich - -func (a *Alpha) Enrich(storage, sPrevStorage string, bmd []models.BigMapDiff, skipEmpty, unpack bool) (gjson.Result, error) { +func (a *Alpha) Enrich(storage, sPrevStorage string, bmd []bigmapdiff.BigMapDiff, skipEmpty, unpack bool) (gjson.Result, error) { if len(bmd) == 0 { return gjson.Parse(storage), nil } @@ -134,10 +135,10 @@ func (a *Alpha) Enrich(storage, sPrevStorage string, bmd []models.BigMapDiff, sk return gjson.Parse(value), nil } -func (a *Alpha) getBigMapDiff(result gjson.Result, address string, operation models.Operation) []elastic.Model { - bmd := make([]elastic.Model, 0) +func (a *Alpha) getBigMapDiff(result gjson.Result, address string, operation operation.Operation) []models.Model { + bmd := make([]models.Model, 0) for _, item := range result.Get("big_map_diff").Array() { - bmd = append(bmd, &models.BigMapDiff{ + bmd = append(bmd, &bigmapdiff.BigMapDiff{ ID: helpers.GenerateID(), BinPath: "0/0", Key: item.Get("key").Value(), diff --git a/internal/contractparser/storage/babylon.go b/internal/contractparser/storage/babylon.go index ca1da93fd..f9bc69892 100644 --- a/internal/contractparser/storage/babylon.go +++ b/internal/contractparser/storage/babylon.go @@ -8,9 +8,11 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/meta" "github.com/baking-bad/bcdhub/internal/contractparser/newmiguel" "github.com/baking-bad/bcdhub/internal/contractparser/stringer" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapaction" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/pkg/errors" "github.com/tidwall/gjson" @@ -35,32 +37,32 @@ func (tpd *temporaryPointerData) updateSourcePointer(sourcePtr int64) { // Babylon - type Babylon struct { - rpc noderpc.INode - es elastic.IBigMapDiff + rpc noderpc.INode + repo bigmapdiff.Repository - updates map[int64][]elastic.Model + updates map[int64][]models.Model temporaryPointers map[int64]*temporaryPointerData } // NewBabylon - -func NewBabylon(rpc noderpc.INode, es elastic.IBigMapDiff) *Babylon { +func NewBabylon(rpc noderpc.INode, repo bigmapdiff.Repository) *Babylon { return &Babylon{ - rpc: rpc, - es: es, + rpc: rpc, + repo: repo, - updates: make(map[int64][]elastic.Model), + updates: make(map[int64][]models.Model), temporaryPointers: make(map[int64]*temporaryPointerData), } } // ParseTransaction - -func (b *Babylon) ParseTransaction(content gjson.Result, metadata meta.Metadata, operation models.Operation) (RichStorage, error) { +func (b *Babylon) ParseTransaction(content gjson.Result, metadata meta.Metadata, operation operation.Operation) (RichStorage, error) { address := content.Get("destination").String() result, err := getResult(content) if err != nil { return RichStorage{Empty: true}, err } - var modelUpdates []elastic.Model + var modelUpdates []models.Model if result.Get("big_map_diff.#").Int() > 0 { ptrMap, err := FindBigMapPointers(metadata, result.Get("storage")) if err != nil { @@ -78,7 +80,7 @@ func (b *Babylon) ParseTransaction(content gjson.Result, metadata meta.Metadata, } // ParseOrigination - -func (b *Babylon) ParseOrigination(content gjson.Result, metadata meta.Metadata, operation models.Operation) (RichStorage, error) { +func (b *Babylon) ParseOrigination(content gjson.Result, metadata meta.Metadata, operation operation.Operation) (RichStorage, error) { result, err := getResult(content) if err != nil { return RichStorage{Empty: true}, err @@ -87,7 +89,7 @@ func (b *Babylon) ParseOrigination(content gjson.Result, metadata meta.Metadata, address := result.Get("originated_contracts.0").String() storage := content.Get("script.storage") - var bm []elastic.Model + var bm []models.Model if result.Get("big_map_diff.#").Int() > 0 { ptrToBin, err := FindBigMapPointers(metadata, storage) if err != nil || len(ptrToBin) == 0 { @@ -115,7 +117,7 @@ func (b *Babylon) ParseOrigination(content gjson.Result, metadata meta.Metadata, } // Enrich - -func (b *Babylon) Enrich(sStorage, sPrevStorage string, bmd []models.BigMapDiff, skipEmpty, unpack bool) (gjson.Result, error) { +func (b *Babylon) Enrich(sStorage, sPrevStorage string, bmd []bigmapdiff.BigMapDiff, skipEmpty, unpack bool) (gjson.Result, error) { if len(bmd) == 0 { return gjson.Parse(sStorage), nil } @@ -193,10 +195,10 @@ func (b *Babylon) Enrich(sStorage, sPrevStorage string, bmd []models.BigMapDiff, return storage, nil } -func (b *Babylon) handleBigMapDiff(result gjson.Result, ptrMap map[int64]string, address string, operation models.Operation) ([]elastic.Model, error) { - storageModels := make([]elastic.Model, 0) +func (b *Babylon) handleBigMapDiff(result gjson.Result, ptrMap map[int64]string, address string, op operation.Operation) ([]models.Model, error) { + storageModels := make([]models.Model, 0) - handlers := map[string]func(gjson.Result, map[int64]string, string, models.Operation) ([]elastic.Model, error){ + handlers := map[string]func(gjson.Result, map[int64]string, string, operation.Operation) ([]models.Model, error){ "update": b.handleBigMapDiffUpdate, "copy": b.handleBigMapDiffCopy, "remove": b.handleBigMapDiffRemove, @@ -209,7 +211,7 @@ func (b *Babylon) handleBigMapDiff(result gjson.Result, ptrMap map[int64]string, if !ok { continue } - data, err := handler(item, ptrMap, address, operation) + data, err := handler(item, ptrMap, address, op) if err != nil { return nil, err } @@ -220,10 +222,10 @@ func (b *Babylon) handleBigMapDiff(result gjson.Result, ptrMap map[int64]string, return storageModels, nil } -func (b *Babylon) handleBigMapDiffUpdate(item gjson.Result, ptrMap map[int64]string, address string, operation models.Operation) ([]elastic.Model, error) { +func (b *Babylon) handleBigMapDiffUpdate(item gjson.Result, ptrMap map[int64]string, address string, operation operation.Operation) ([]models.Model, error) { ptr := item.Get("big_map").Int() - bmd := &models.BigMapDiff{ + bmd := &bigmapdiff.BigMapDiff{ ID: helpers.GenerateID(), Ptr: ptr, Key: item.Get("key").Value(), @@ -257,16 +259,16 @@ func (b *Babylon) handleBigMapDiffUpdate(item gjson.Result, ptrMap map[int64]str b.addToUpdates(bmd, ptr) if ptr > -1 { - return []elastic.Model{bmd}, nil + return []models.Model{bmd}, nil } return nil, nil } -func (b *Babylon) handleBigMapDiffCopy(item gjson.Result, ptrMap map[int64]string, address string, operation models.Operation) ([]elastic.Model, error) { +func (b *Babylon) handleBigMapDiffCopy(item gjson.Result, ptrMap map[int64]string, address string, operation operation.Operation) ([]models.Model, error) { sourcePtr := item.Get("source_big_map").Int() destinationPtr := item.Get("destination_big_map").Int() - newUpdates := make([]elastic.Model, 0) + newUpdates := make([]models.Model, 0) if destinationPtr > -1 { var srcPtr int64 @@ -314,17 +316,17 @@ func (b *Babylon) handleBigMapDiffCopy(item gjson.Result, ptrMap map[int64]strin return newUpdates, nil } -func (b *Babylon) handleBigMapDiffRemove(item gjson.Result, _ map[int64]string, address string, operation models.Operation) ([]elastic.Model, error) { +func (b *Babylon) handleBigMapDiffRemove(item gjson.Result, _ map[int64]string, address string, operation operation.Operation) ([]models.Model, error) { ptr := item.Get("big_map").Int() if ptr < 0 { delete(b.updates, ptr) return nil, nil } - bmd, err := b.es.GetBigMapDiffsByPtr(address, operation.Network, ptr) + bmd, err := b.repo.GetByPtr(address, operation.Network, ptr) if err != nil { return nil, err } - newUpdates := make([]elastic.Model, len(bmd)) + newUpdates := make([]models.Model, len(bmd)) for i := range bmd { bmd[i].ID = helpers.GenerateID() bmd[i].OperationID = operation.ID @@ -340,14 +342,14 @@ func (b *Babylon) handleBigMapDiffRemove(item gjson.Result, _ map[int64]string, return newUpdates, nil } -func (b *Babylon) handleBigMapDiffAlloc(item gjson.Result, _ map[int64]string, address string, operation models.Operation) ([]elastic.Model, error) { +func (b *Babylon) handleBigMapDiffAlloc(item gjson.Result, _ map[int64]string, address string, operation operation.Operation) ([]models.Model, error) { ptr := item.Get("big_map").Int() - b.updates[ptr] = []elastic.Model{} + b.updates[ptr] = []models.Model{} b.temporaryPointers[ptr] = &temporaryPointerData{ sourcePtr: defaultPointer, } - var models []elastic.Model + var models []models.Model if ptr > -1 { models = append( models, @@ -358,22 +360,22 @@ func (b *Babylon) handleBigMapDiffAlloc(item gjson.Result, _ map[int64]string, a return models, nil } -func (b *Babylon) getDiffsFromUpdates(ptr int64) ([]models.BigMapDiff, error) { +func (b *Babylon) getDiffsFromUpdates(ptr int64) ([]bigmapdiff.BigMapDiff, error) { updates, ok := b.updates[ptr] if !ok { return nil, errors.Errorf("[handleBigMapDiffCopy] Unknown temporary pointer: %d %v", ptr, b.updates) } - bmd := make([]models.BigMapDiff, 0) + bmd := make([]bigmapdiff.BigMapDiff, 0) for i := range updates { - if item, ok := updates[i].(*models.BigMapDiff); ok { + if item, ok := updates[i].(*bigmapdiff.BigMapDiff); ok { bmd = append(bmd, *item) } } return bmd, nil } -func (b *Babylon) createBigMapDiffAction(action, address string, srcPtr, dstPtr *int64, operation models.Operation) *models.BigMapAction { - entity := &models.BigMapAction{ +func (b *Babylon) createBigMapDiffAction(action, address string, srcPtr, dstPtr *int64, operation operation.Operation) *bigmapaction.BigMapAction { + entity := &bigmapaction.BigMapAction{ ID: helpers.GenerateID(), Action: action, OperationID: operation.ID, @@ -395,9 +397,9 @@ func (b *Babylon) createBigMapDiffAction(action, address string, srcPtr, dstPtr return entity } -func (b *Babylon) addToUpdates(newModel elastic.Model, ptr int64) { +func (b *Babylon) addToUpdates(newModel models.Model, ptr int64) { if _, ok := b.updates[ptr]; !ok { - b.updates[ptr] = []elastic.Model{newModel} + b.updates[ptr] = []models.Model{newModel} } else { b.updates[ptr] = append(b.updates[ptr], newModel) } @@ -433,9 +435,9 @@ func (b *Babylon) updateTemporaryPointers(src, dst int64, ptrMap map[int64]strin return nil } -func (b *Babylon) getCopyBigMapDiff(src int64, address, network string) (bmd []models.BigMapDiff, err error) { +func (b *Babylon) getCopyBigMapDiff(src int64, address, network string) (bmd []bigmapdiff.BigMapDiff, err error) { if src > -1 { - bmd, err = b.es.GetBigMapDiffsByPtr(address, network, src) + bmd, err = b.repo.GetByPtr(address, network, src) if err != nil { return nil, err } diff --git a/internal/contractparser/storage/rich_storage.go b/internal/contractparser/storage/rich_storage.go index e62b0ea51..fecceda01 100644 --- a/internal/contractparser/storage/rich_storage.go +++ b/internal/contractparser/storage/rich_storage.go @@ -2,21 +2,22 @@ package storage import ( "github.com/baking-bad/bcdhub/internal/contractparser/meta" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/tidwall/gjson" ) // RichStorage - type RichStorage struct { DeffatedStorage string - Models []elastic.Model + Models []models.Model Empty bool } // Parser - type Parser interface { - ParseTransaction(content gjson.Result, metadata meta.Metadata, operation models.Operation) (RichStorage, error) - ParseOrigination(content gjson.Result, metadata meta.Metadata, operation models.Operation) (RichStorage, error) - Enrich(string, string, []models.BigMapDiff, bool, bool) (gjson.Result, error) + ParseTransaction(content gjson.Result, metadata meta.Metadata, operation operation.Operation) (RichStorage, error) + ParseOrigination(content gjson.Result, metadata meta.Metadata, operation operation.Operation) (RichStorage, error) + Enrich(string, string, []bigmapdiff.BigMapDiff, bool, bool) (gjson.Result, error) } diff --git a/internal/contractparser/storage/simulate.go b/internal/contractparser/storage/simulate.go index 40251b278..065425ed6 100644 --- a/internal/contractparser/storage/simulate.go +++ b/internal/contractparser/storage/simulate.go @@ -2,8 +2,9 @@ package storage import ( "github.com/baking-bad/bcdhub/internal/contractparser/meta" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/tidwall/gjson" ) @@ -14,16 +15,16 @@ type Simulate struct { } // NewSimulate - -func NewSimulate(rpc noderpc.INode, es elastic.IBigMapDiff) *Simulate { +func NewSimulate(rpc noderpc.INode, repo bigmapdiff.Repository) *Simulate { return &Simulate{ - Babylon: NewBabylon(rpc, es), + Babylon: NewBabylon(rpc, repo), } } // ParseTransaction - -func (b *Simulate) ParseTransaction(content gjson.Result, metadata meta.Metadata, operation models.Operation) (RichStorage, error) { +func (b *Simulate) ParseTransaction(content gjson.Result, metadata meta.Metadata, operation operation.Operation) (RichStorage, error) { storage := content.Get("storage") - var bm []elastic.Model + var bm []models.Model if content.Get("big_map_diff.#").Int() > 0 { ptrMap, err := FindBigMapPointers(metadata, storage) if err != nil { @@ -41,10 +42,10 @@ func (b *Simulate) ParseTransaction(content gjson.Result, metadata meta.Metadata } // ParseOrigination - -func (b *Simulate) ParseOrigination(content gjson.Result, metadata meta.Metadata, operation models.Operation) (RichStorage, error) { +func (b *Simulate) ParseOrigination(content gjson.Result, metadata meta.Metadata, operation operation.Operation) (RichStorage, error) { storage := operation.Script.Get("storage") - var bm []elastic.Model + var bm []models.Model if content.Get("big_map_diff.#").Int() > 0 { ptrMap, err := FindBigMapPointers(metadata, storage) if err != nil { diff --git a/internal/elastic/balance_update.go b/internal/elastic/balance_update.go deleted file mode 100644 index fb8c2b419..000000000 --- a/internal/elastic/balance_update.go +++ /dev/null @@ -1,29 +0,0 @@ -package elastic - -type getBalanceResponse struct { - Agg struct { - Balance floatValue `json:"balance"` - } `json:"aggregations"` -} - -// GetBalance - -func (e *Elastic) GetBalance(network, address string) (int64, error) { - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - matchPhrase("contract", address), - ), - ), - ).Add( - aggs( - aggItem{"balance", sum("change")}, - ), - ).Zero() - - var response getBalanceResponse - if err := e.query([]string{DocBalanceUpdates}, query, &response); err != nil { - return 0, err - } - return int64(response.Agg.Balance.Value), nil -} diff --git a/internal/elastic/balanceupdate/storage.go b/internal/elastic/balanceupdate/storage.go new file mode 100644 index 000000000..17cb1bf1f --- /dev/null +++ b/internal/elastic/balanceupdate/storage.go @@ -0,0 +1,47 @@ +package balanceupdate + +import ( + "github.com/baking-bad/bcdhub/internal/elastic/core" + "github.com/baking-bad/bcdhub/internal/models" +) + +// Storage - +type Storage struct { + es *core.Elastic +} + +// NewStorage - +func NewStorage(es *core.Elastic) *Storage { + return &Storage{es} +} + +type getBalanceResponse struct { + Agg struct { + Balance core.FloatValue `json:"balance"` + } `json:"aggregations"` +} + +// GetBalance - +func (storage *Storage) GetBalance(network, address string) (int64, error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + core.MatchPhrase("contract", address), + ), + ), + ).Add( + core.Aggs( + core.AggItem{ + Name: "balance", + Body: core.Sum("change"), + }, + ), + ).Zero() + + var response getBalanceResponse + if err := storage.es.Query([]string{models.DocBalanceUpdates}, query, &response); err != nil { + return 0, err + } + return int64(response.Agg.Balance.Value), nil +} diff --git a/internal/elastic/big_map.go b/internal/elastic/big_map.go deleted file mode 100644 index c284dce35..000000000 --- a/internal/elastic/big_map.go +++ /dev/null @@ -1,480 +0,0 @@ -package elastic - -import ( - "fmt" - - "github.com/baking-bad/bcdhub/internal/models" - "github.com/pkg/errors" -) - -type getBigMapDiffsWithKeysResponse struct { - Agg struct { - Keys struct { - Buckets []struct { - DocCount int64 `json:"doc_count"` - TopKey struct { - Hits HitsArray `json:"hits"` - } `json:"top_key"` - } `json:"buckets"` - } `json:"keys"` - } `json:"aggregations"` -} - -// GetBigMapDiffsUniqueByOperationID - -func (e *Elastic) GetBigMapDiffsUniqueByOperationID(operationID string) ([]models.BigMapDiff, error) { - query := newQuery(). - Query( - boolQ( - filter( - matchPhrase("operation_id", operationID), - ), - ), - ). - Add( - aggs( - aggItem{ - "keys", - composite( - maxQuerySize, - aggItem{ - "ptr", termsAgg("ptr", 0), - }, - aggItem{ - "key_hash", termsAgg("key_hash.keyword", 0), - }, - ).Extend( - aggs( - aggItem{ - "top_key", topHits(1, "indexed_time", "desc"), - }, - ), - ), - }, - ), - ).Zero() - - var response getBigMapDiffsWithKeysResponse - if err := e.query([]string{DocBigMapDiff}, query, &response); err != nil { - return nil, err - } - arr := response.Agg.Keys.Buckets - diffs := make([]models.BigMapDiff, len(arr)) - for i := range arr { - if err := json.Unmarshal(arr[i].TopKey.Hits.Hits[0].Source, &diffs[i]); err != nil { - return nil, err - } - diffs[i].ID = arr[i].TopKey.Hits.Hits[0].ID - } - return diffs, nil -} - -// GetBigMapDiffsPrevious - -func (e *Elastic) GetBigMapDiffsPrevious(filters []models.BigMapDiff, indexedTime int64, address string) ([]models.BigMapDiff, error) { - shouldData := make([]qItem, len(filters)) - for i := range filters { - shouldData[i] = boolQ(filter( - matchPhrase("key_hash", filters[i].KeyHash), - matchPhrase("bin_path", filters[i].BinPath), - )) - } - b := boolQ( - should(shouldData...), - filter( - matchPhrase("address", address), - rangeQ("indexed_time", qItem{"lt": indexedTime}), - ), - minimumShouldMatch(1), - ) - - query := newQuery().Query(b). - Add( - aggs( - aggItem{ - "keys", qItem{ - "terms": qItem{ - "field": "key_hash.keyword", - "size": maxQuerySize, - }, - "aggs": qItem{ - "top_key": topHits(1, "indexed_time", "desc"), - }, - }, - }, - ), - ). - Sort("indexed_time", "desc").Zero() - - var response getBigMapDiffsWithKeysResponse - if err := e.query([]string{DocBigMapDiff}, query, &response); err != nil { - return nil, err - } - - arr := response.Agg.Keys.Buckets - diffs := make([]models.BigMapDiff, 0) - for i := range arr { - var b models.BigMapDiff - if err := json.Unmarshal(arr[i].TopKey.Hits.Hits[0].Source, &b); err != nil { - return nil, err - } - if b.Value != "" { - b.ID = arr[i].TopKey.Hits.Hits[0].ID - diffs = append(diffs, b) - } - } - return diffs, nil -} - -// GetBigMapDiffsForAddress - -func (e *Elastic) GetBigMapDiffsForAddress(address string) ([]models.BigMapDiff, error) { - query := newQuery().Query( - boolQ( - must( - matchPhrase("address", address), - ), - ), - ).Add( - aggs( - aggItem{ - "keys", qItem{ - "terms": qItem{ - "field": "key_hash.keyword", - "size": maxQuerySize, // TODO: arbitrary number of keys - }, - "aggs": qItem{ - "top_key": topHits(1, "indexed_time", "desc"), - }, - }, - }, - ), - ).Zero() - - var response getBigMapDiffsWithKeysResponse - if err := e.query([]string{DocBigMapDiff}, query, &response); err != nil { - return nil, err - } - arr := response.Agg.Keys.Buckets - diffs := make([]models.BigMapDiff, len(arr)) - for i := range arr { - if err := json.Unmarshal(arr[i].TopKey.Hits.Hits[0].Source, &diffs[i]); err != nil { - return nil, err - } - diffs[i].ID = arr[i].TopKey.Hits.Hits[0].ID - } - return diffs, nil -} - -// GetBigMapKeysContext - -type GetBigMapKeysContext struct { - Network string - Ptr *int64 - Query string - Size int64 - Offset int64 - Level *int64 - - to int64 -} - -func (ctx *GetBigMapKeysContext) build() base { - filters := make([]qItem, 0) - - if ctx.Ptr != nil { - filters = append(filters, term("ptr", *ctx.Ptr)) - } - if ctx.Network != "" { - filters = append(filters, matchQ("network", ctx.Network)) - } - - if ctx.Query != "" { - filters = append(filters, queryString(fmt.Sprintf("*%s*", ctx.Query), []string{"key", "key_hash", "key_strings", "bin_path", "value", "value_strings"})) - } - - if ctx.Size == 0 { - ctx.Size = defaultSize - } - - if ctx.Level != nil { - filters = append(filters, NewLessThanEqRange(*ctx.Level).build()) - } - - ctx.to = ctx.Size + ctx.Offset - b := boolQ( - must(filters...), - ) - return newQuery().Query(b).Add( - aggs(aggItem{ - "keys", qItem{ - "terms": qItem{ - "field": "key_hash.keyword", - "size": ctx.to, - "order": qItem{ - "bucketsSort": "desc", - }, - }, - "aggs": qItem{ - "top_key": topHits(1, "indexed_time", "desc"), - "bucketsSort": max("indexed_time"), - }, - }, - }), - ).Sort("indexed_time", "desc").Zero() -} - -// GetBigMapKeys - -func (e *Elastic) GetBigMapKeys(ctx GetBigMapKeysContext) ([]BigMapDiff, error) { - if *ctx.Ptr < 0 { - return nil, errors.Errorf("Invalid pointer value: %d", *ctx.Ptr) - } - - var response getBigMapDiffsWithKeysResponse - if err := e.query([]string{DocBigMapDiff}, ctx.build(), &response); err != nil { - return nil, err - } - - arr := response.Agg.Keys.Buckets - if int64(len(arr)) < ctx.Offset { - return nil, nil - } - - if int64(len(arr)) < ctx.to { - ctx.to = int64(len(arr)) - } - - arr = arr[ctx.Offset:ctx.to] - result := make([]BigMapDiff, len(arr)) - for i := range arr { - var b models.BigMapDiff - if err := json.Unmarshal(arr[i].TopKey.Hits.Hits[0].Source, &b); err != nil { - return nil, err - } - b.ID = arr[i].TopKey.Hits.Hits[0].ID - result[i].FromModel(&b) - result[i].Count = arr[i].DocCount - } - return result, nil -} - -// GetBigMapDiffsByPtrAndKeyHash - -func (e *Elastic) GetBigMapDiffsByPtrAndKeyHash(ptr int64, network, keyHash string, size, offset int64) ([]BigMapDiff, int64, error) { - if ptr < 0 { - return nil, 0, errors.Errorf("Invalid pointer value: %d", ptr) - } - mustQuery := must( - matchPhrase("network", network), - matchPhrase("key_hash", keyHash), - term("ptr", ptr), - ) - b := boolQ(mustQuery) - - if size == 0 { - size = defaultSize - } - - query := newQuery().Query(b).Sort("level", "desc").Size(size).From(offset) - - var response SearchResponse - if err := e.query([]string{DocBigMapDiff}, query, &response); err != nil { - return nil, 0, err - } - - result := make([]BigMapDiff, len(response.Hits.Hits)) - for i := range response.Hits.Hits { - var b models.BigMapDiff - if err := json.Unmarshal(response.Hits.Hits[i].Source, &b); err != nil { - return nil, 0, err - } - b.ID = response.Hits.Hits[i].ID - result[i].FromModel(&b) - } - - return result, response.Hits.Total.Value, nil -} - -// GetBigMapDiffsByOperationID - -func (e *Elastic) GetBigMapDiffsByOperationID(operationID string) ([]*models.BigMapDiff, error) { - query := newQuery(). - Query( - boolQ( - must( - matchPhrase("operation_id", operationID), - ), - ), - ).All() - - var response SearchResponse - if err := e.query([]string{DocBigMapDiff}, query, &response); err != nil { - return nil, err - } - result := make([]*models.BigMapDiff, len(response.Hits.Hits)) - for i := range response.Hits.Hits { - if err := json.Unmarshal(response.Hits.Hits[i].Source, &result[i]); err != nil { - return nil, err - } - result[i].ID = response.Hits.Hits[i].ID - } - return result, nil -} - -// GetBigMapDiffsByPtr - -func (e *Elastic) GetBigMapDiffsByPtr(address, network string, ptr int64) ([]models.BigMapDiff, error) { - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - matchPhrase("address", address), - term("ptr", ptr), - ), - ), - ).Add( - aggs(aggItem{ - "keys", qItem{ - "terms": qItem{ - "field": "key_hash.keyword", - "size": maxQuerySize, - }, - "aggs": qItem{ - "top_key": topHits(1, "indexed_time", "desc"), - }, - }, - }), - ).Sort("indexed_time", "desc").Zero() - - var response getBigMapDiffsWithKeysResponse - if err := e.query([]string{DocBigMapDiff}, query, &response); err != nil { - return nil, err - } - bmd := make([]models.BigMapDiff, len(response.Agg.Keys.Buckets)) - for i := range response.Agg.Keys.Buckets { - if err := json.Unmarshal(response.Agg.Keys.Buckets[i].TopKey.Hits.Hits[0].Source, &bmd[i]); err != nil { - return nil, err - } - } - return bmd, nil -} - -// GetBigMapsForAddress - -func (e *Elastic) GetBigMapsForAddress(network, address string) (response []models.BigMapDiff, err error) { - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - matchPhrase("address", address), - ), - ), - ).Sort("indexed_time", "desc") - - err = e.getAllByQuery(query, &response) - return -} - -// GetBigMapHistory - -func (e *Elastic) GetBigMapHistory(ptr int64, network string) (response []models.BigMapAction, err error) { - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - ), - should( - term("source_ptr", ptr), - term("destination_ptr", ptr), - ), - minimumShouldMatch(1), - ), - ).Sort("indexed_time", "desc") - - err = e.getAllByQuery(query, &response) - return -} - -// GetBigMapKey - -func (e *Elastic) GetBigMapKey(network, keyHash string, ptr int64) (data BigMapDiff, err error) { - if ptr < 0 { - err = errors.Errorf("Invalid pointer value: %d", ptr) - return - } - mustQuery := must( - matchPhrase("network", network), - matchPhrase("key_hash", keyHash), - term("ptr", ptr), - ) - b := boolQ(mustQuery) - - query := newQuery().Query(b).Sort("level", "desc").One() - - var response SearchResponse - if err = e.query([]string{DocBigMapDiff}, query, &response); err != nil { - return - } - - if response.Hits.Total.Value == 0 { - return data, NewRecordNotFoundError(DocBigMapDiff, "", query) - } - err = json.Unmarshal(response.Hits.Hits[0].Source, &data) - return -} - -// GetBigMapValuesByKey - -func (e *Elastic) GetBigMapValuesByKey(keyHash string) ([]BigMapDiff, error) { - mustQuery := must( - matchPhrase("key_hash", keyHash), - ) - b := boolQ(mustQuery) - - query := newQuery().Query(b).Add( - aggs( - aggItem{ - "keys", qItem{ - "terms": qItem{ - "script": qItem{ - "source": "doc['network.keyword'].value + doc['address.keyword'].value + String.format('%d', new def[] {doc['ptr'].value})", - }, - }, - "aggs": qItem{ - "top_key": topHits(1, "indexed_time", "desc"), - }, - }, - }, - ), - ).Zero() - - var response getBigMapDiffsWithKeysResponse - if err := e.query([]string{DocBigMapDiff}, query, &response); err != nil { - return nil, err - } - - bmd := make([]BigMapDiff, len(response.Agg.Keys.Buckets)) - for i, item := range response.Agg.Keys.Buckets { - if err := json.Unmarshal(item.TopKey.Hits.Hits[0].Source, &bmd[i]); err != nil { - return nil, err - } - } - return bmd, nil -} - -type getBigMapDiffsCountResponse struct { - Agg struct { - Count intValue `json:"count"` - } `json:"aggregations"` -} - -// GetBigMapDiffsCount - -func (e *Elastic) GetBigMapDiffsCount(network string, ptr int64) (int64, error) { - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - term("ptr", ptr), - ), - ), - ).Add( - aggs(aggItem{ - "count", cardinality("key_hash.keyword"), - }), - ).Zero() - - var response getBigMapDiffsCountResponse - if err := e.query([]string{DocBigMapDiff}, query, &response); err != nil { - return 0, err - } - return response.Agg.Count.Value, nil -} diff --git a/internal/elastic/bigmapaction/storage.go b/internal/elastic/bigmapaction/storage.go new file mode 100644 index 000000000..69ed24ceb --- /dev/null +++ b/internal/elastic/bigmapaction/storage.go @@ -0,0 +1,35 @@ +package bigmapaction + +import ( + "github.com/baking-bad/bcdhub/internal/elastic/core" + "github.com/baking-bad/bcdhub/internal/models/bigmapaction" +) + +// Storage - +type Storage struct { + es *core.Elastic +} + +// NewStorage - +func NewStorage(es *core.Elastic) *Storage { + return &Storage{es} +} + +// Get - +func (storage *Storage) Get(ptr int64, network string) (response []bigmapaction.BigMapAction, err error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + ), + core.Should( + core.Term("source_ptr", ptr), + core.Term("destination_ptr", ptr), + ), + core.MinimumShouldMatch(1), + ), + ).Sort("indexed_time", "desc") + + err = storage.es.GetAllByQuery(query, &response) + return +} diff --git a/internal/elastic/bigmapdiff/data.go b/internal/elastic/bigmapdiff/data.go new file mode 100644 index 000000000..74f53868f --- /dev/null +++ b/internal/elastic/bigmapdiff/data.go @@ -0,0 +1,74 @@ +package bigmapdiff + +import ( + "fmt" + + "github.com/baking-bad/bcdhub/internal/elastic/consts" + "github.com/baking-bad/bcdhub/internal/elastic/core" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" +) + +type getBigMapDiffsWithKeysResponse struct { + Agg struct { + Keys struct { + Buckets []struct { + DocCount int64 `json:"doc_count"` + TopKey struct { + Hits core.HitsArray `json:"hits"` + } `json:"top_key"` + } `json:"buckets"` + } `json:"keys"` + } `json:"aggregations"` +} + +type getBigMapDiffsCountResponse struct { + Agg struct { + Count core.IntValue `json:"count"` + } `json:"aggregations"` +} + +func buildGetContext(ctx *bigmapdiff.GetContext) core.Base { + filters := make([]core.Item, 0) + + if ctx.Ptr != nil { + filters = append(filters, core.Term("ptr", *ctx.Ptr)) + } + if ctx.Network != "" { + filters = append(filters, core.Match("network", ctx.Network)) + } + + if ctx.Query != "" { + filters = append(filters, core.QueryString(fmt.Sprintf("*%s*", ctx.Query), []string{"key", "key_hash", "key_strings", "bin_path", "value", "value_strings"})) + } + + if ctx.Size == 0 { + ctx.Size = consts.DefaultSize + } + + if ctx.Level != nil { + filters = append(filters, core.BuildComparator(core.NewLessThanEqRange(*ctx.Level))) + } + + ctx.To = ctx.Size + ctx.Offset + b := core.Bool( + core.Must(filters...), + ) + return core.NewQuery().Query(b).Add( + core.Aggs(core.AggItem{ + Name: "keys", + Body: core.Item{ + "terms": core.Item{ + "field": "key_hash.keyword", + "size": ctx.To, + "order": core.Item{ + "bucketsSort": "desc", + }, + }, + "aggs": core.Item{ + "top_key": core.TopHits(1, "indexed_time", "desc"), + "bucketsSort": core.Max("indexed_time"), + }, + }, + }), + ).Sort("indexed_time", "desc").Zero() +} diff --git a/internal/elastic/bigmapdiff/storage.go b/internal/elastic/bigmapdiff/storage.go new file mode 100644 index 000000000..d3afe8ae9 --- /dev/null +++ b/internal/elastic/bigmapdiff/storage.go @@ -0,0 +1,403 @@ +package bigmapdiff + +import ( + "encoding/json" + + "github.com/baking-bad/bcdhub/internal/elastic/consts" + "github.com/baking-bad/bcdhub/internal/elastic/core" + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/pkg/errors" +) + +// Storage - +type Storage struct { + es *core.Elastic +} + +// NewStorage - +func NewStorage(es *core.Elastic) *Storage { + return &Storage{es} +} + +// CurrentByKey - +func (storage *Storage) CurrentByKey(network, keyHash string, ptr int64) (data bigmapdiff.BigMapDiff, err error) { + if ptr < 0 { + err = errors.Errorf("Invalid pointer value: %d", ptr) + return + } + mustQuery := core.Must( + core.MatchPhrase("network", network), + core.MatchPhrase("key_hash", keyHash), + core.Term("ptr", ptr), + ) + b := core.Bool(mustQuery) + + query := core.NewQuery().Query(b).Sort("level", "desc").One() + + var response core.SearchResponse + if err = storage.es.Query([]string{models.DocBigMapDiff}, query, &response); err != nil { + return + } + + if response.Hits.Total.Value == 0 { + return data, core.NewRecordNotFoundError(models.DocBigMapDiff, "") + } + err = json.Unmarshal(response.Hits.Hits[0].Source, &data) + return +} + +// GetForAddress - +func (storage *Storage) GetForAddress(address string) ([]bigmapdiff.BigMapDiff, error) { + query := core.NewQuery().Query( + core.Bool( + core.Must( + core.MatchPhrase("address", address), + ), + ), + ).Add( + core.Aggs( + core.AggItem{ + Name: "keys", + Body: core.Item{ + "terms": core.Item{ + "field": "key_hash.keyword", + "size": core.MaxQuerySize, // TODO: arbitrary number of keys + }, + "aggs": core.Item{ + "top_key": core.TopHits(1, "indexed_time", "desc"), + }, + }, + }, + ), + ).Zero() + + var response getBigMapDiffsWithKeysResponse + if err := storage.es.Query([]string{models.DocBigMapDiff}, query, &response); err != nil { + return nil, err + } + arr := response.Agg.Keys.Buckets + diffs := make([]bigmapdiff.BigMapDiff, len(arr)) + for i := range arr { + if err := json.Unmarshal(arr[i].TopKey.Hits.Hits[0].Source, &diffs[i]); err != nil { + return nil, err + } + diffs[i].ID = arr[i].TopKey.Hits.Hits[0].ID + } + return diffs, nil +} + +// GetByAddress - +func (storage *Storage) GetByAddress(network, address string) (response []bigmapdiff.BigMapDiff, err error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + core.MatchPhrase("address", address), + ), + ), + ).Sort("indexed_time", "desc") + + err = storage.es.GetAllByQuery(query, &response) + return +} + +// GetValuesByKey - +func (storage *Storage) GetValuesByKey(keyHash string) ([]bigmapdiff.BigMapDiff, error) { + mustQuery := core.Must( + core.MatchPhrase("key_hash", keyHash), + ) + b := core.Bool(mustQuery) + + query := core.NewQuery().Query(b).Add( + core.Aggs( + core.AggItem{ + Name: "keys", + Body: core.Item{ + "terms": core.Item{ + "script": core.Item{ + "source": "doc['network.keyword'].value + doc['address.keyword'].value + String.format('%d', new def[] {doc['ptr'].value})", + }, + }, + "aggs": core.Item{ + "top_key": core.TopHits(1, "indexed_time", "desc"), + }, + }, + }, + ), + ).Zero() + + var response getBigMapDiffsWithKeysResponse + if err := storage.es.Query([]string{models.DocBigMapDiff}, query, &response); err != nil { + return nil, err + } + + bmd := make([]bigmapdiff.BigMapDiff, len(response.Agg.Keys.Buckets)) + for i, item := range response.Agg.Keys.Buckets { + if err := json.Unmarshal(item.TopKey.Hits.Hits[0].Source, &bmd[i]); err != nil { + return nil, err + } + } + return bmd, nil +} + +// Count - +func (storage *Storage) Count(network string, ptr int64) (int64, error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + core.Term("ptr", ptr), + ), + ), + ).Add( + core.Aggs(core.AggItem{ + Name: "count", + Body: core.Cardinality("key_hash.keyword"), + }), + ).Zero() + + var response getBigMapDiffsCountResponse + if err := storage.es.Query([]string{models.DocBigMapDiff}, query, &response); err != nil { + return 0, err + } + return response.Agg.Count.Value, nil +} + +// Previous - +func (storage *Storage) Previous(filters []bigmapdiff.BigMapDiff, indexedTime int64, address string) ([]bigmapdiff.BigMapDiff, error) { + shouldData := make([]core.Item, len(filters)) + for i := range filters { + shouldData[i] = core.Bool(core.Filter( + core.MatchPhrase("key_hash", filters[i].KeyHash), + core.MatchPhrase("bin_path", filters[i].BinPath), + )) + } + b := core.Bool( + core.Should(shouldData...), + core.Filter( + core.MatchPhrase("address", address), + core.Range("indexed_time", core.Item{"lt": indexedTime}), + ), + core.MinimumShouldMatch(1), + ) + + query := core.NewQuery().Query(b). + Add( + core.Aggs( + core.AggItem{ + Name: "keys", + Body: core.Item{ + "terms": core.Item{ + "field": "key_hash.keyword", + "size": core.MaxQuerySize, + }, + "aggs": core.Item{ + "top_key": core.TopHits(1, "indexed_time", "desc"), + }, + }, + }, + ), + ). + Sort("indexed_time", "desc").Zero() + + var response getBigMapDiffsWithKeysResponse + if err := storage.es.Query([]string{models.DocBigMapDiff}, query, &response); err != nil { + return nil, err + } + + arr := response.Agg.Keys.Buckets + diffs := make([]bigmapdiff.BigMapDiff, 0) + for i := range arr { + var b bigmapdiff.BigMapDiff + if err := json.Unmarshal(arr[i].TopKey.Hits.Hits[0].Source, &b); err != nil { + return nil, err + } + if b.Value != "" { + b.ID = arr[i].TopKey.Hits.Hits[0].ID + diffs = append(diffs, b) + } + } + return diffs, nil +} + +// GetUniqueByOperationID - +func (storage *Storage) GetUniqueByOperationID(operationID string) ([]bigmapdiff.BigMapDiff, error) { + query := core.NewQuery(). + Query( + core.Bool( + core.Filter( + core.MatchPhrase("operation_id", operationID), + ), + ), + ). + Add( + core.Aggs( + core.AggItem{ + Name: "keys", + Body: core.Composite( + core.MaxQuerySize, + core.AggItem{ + Name: "ptr", + Body: core.TermsAgg("ptr", 0), + }, + core.AggItem{ + Name: "key_hash", + Body: core.TermsAgg("key_hash.keyword", 0), + }, + ).Extend( + core.Aggs( + core.AggItem{ + Name: "top_key", + Body: core.TopHits(1, "indexed_time", "desc"), + }, + ), + ), + }, + ), + ).Zero() + + var response getBigMapDiffsWithKeysResponse + if err := storage.es.Query([]string{models.DocBigMapDiff}, query, &response); err != nil { + return nil, err + } + arr := response.Agg.Keys.Buckets + diffs := make([]bigmapdiff.BigMapDiff, len(arr)) + for i := range arr { + if err := json.Unmarshal(arr[i].TopKey.Hits.Hits[0].Source, &diffs[i]); err != nil { + return nil, err + } + diffs[i].ID = arr[i].TopKey.Hits.Hits[0].ID + } + return diffs, nil +} + +// GetByPtrAndKeyHash - +func (storage *Storage) GetByPtrAndKeyHash(ptr int64, network, keyHash string, size, offset int64) ([]bigmapdiff.BigMapDiff, int64, error) { + if ptr < 0 { + return nil, 0, errors.Errorf("Invalid pointer value: %d", ptr) + } + mustQuery := core.Must( + core.MatchPhrase("network", network), + core.MatchPhrase("key_hash", keyHash), + core.Term("ptr", ptr), + ) + b := core.Bool(mustQuery) + + if size == 0 { + size = consts.DefaultSize + } + + query := core.NewQuery().Query(b).Sort("level", "desc").Size(size).From(offset) + + var response core.SearchResponse + if err := storage.es.Query([]string{models.DocBigMapDiff}, query, &response); err != nil { + return nil, 0, err + } + + result := make([]bigmapdiff.BigMapDiff, len(response.Hits.Hits)) + for i := range response.Hits.Hits { + if err := json.Unmarshal(response.Hits.Hits[i].Source, &result[i]); err != nil { + return nil, 0, err + } + result[i].ID = response.Hits.Hits[i].ID + } + + return result, response.Hits.Total.Value, nil +} + +// GetByOperationID - +func (storage *Storage) GetByOperationID(operationID string) ([]*bigmapdiff.BigMapDiff, error) { + query := core.NewQuery(). + Query( + core.Bool( + core.Must( + core.MatchPhrase("operation_id", operationID), + ), + ), + ).All() + + var response core.SearchResponse + if err := storage.es.Query([]string{models.DocBigMapDiff}, query, &response); err != nil { + return nil, err + } + result := make([]*bigmapdiff.BigMapDiff, len(response.Hits.Hits)) + for i := range response.Hits.Hits { + if err := json.Unmarshal(response.Hits.Hits[i].Source, &result[i]); err != nil { + return nil, err + } + result[i].ID = response.Hits.Hits[i].ID + } + return result, nil +} + +// GetByPtr - +func (storage *Storage) GetByPtr(address, network string, ptr int64) ([]bigmapdiff.BigMapDiff, error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + core.MatchPhrase("address", address), + core.Term("ptr", ptr), + ), + ), + ).Add( + core.Aggs(core.AggItem{ + Name: "keys", + Body: core.Item{ + "terms": core.Item{ + "field": "key_hash.keyword", + "size": core.MaxQuerySize, + }, + "aggs": core.Item{ + "top_key": core.TopHits(1, "indexed_time", "desc"), + }, + }, + }), + ).Sort("indexed_time", "desc").Zero() + + var response getBigMapDiffsWithKeysResponse + if err := storage.es.Query([]string{models.DocBigMapDiff}, query, &response); err != nil { + return nil, err + } + bmd := make([]bigmapdiff.BigMapDiff, len(response.Agg.Keys.Buckets)) + for i := range response.Agg.Keys.Buckets { + if err := json.Unmarshal(response.Agg.Keys.Buckets[i].TopKey.Hits.Hits[0].Source, &bmd[i]); err != nil { + return nil, err + } + } + return bmd, nil +} + +// Get - +func (storage *Storage) Get(ctx bigmapdiff.GetContext) ([]bigmapdiff.BigMapDiff, error) { + if *ctx.Ptr < 0 { + return nil, errors.Errorf("Invalid pointer value: %d", *ctx.Ptr) + } + + query := buildGetContext(&ctx) + + var response getBigMapDiffsWithKeysResponse + if err := storage.es.Query([]string{models.DocBigMapDiff}, query, &response); err != nil { + return nil, err + } + + arr := response.Agg.Keys.Buckets + if int64(len(arr)) < ctx.Offset { + return nil, nil + } + + if int64(len(arr)) < ctx.To { + ctx.To = int64(len(arr)) + } + + arr = arr[ctx.Offset:ctx.To] + result := make([]bigmapdiff.BigMapDiff, len(arr)) + for i := range arr { + if err := json.Unmarshal(arr[i].TopKey.Hits.Hits[0].Source, &result[i]); err != nil { + return nil, err + } + result[i].ID = arr[i].TopKey.Hits.Hits[0].ID + } + return result, nil +} diff --git a/internal/elastic/block/data.go b/internal/elastic/block/data.go new file mode 100644 index 000000000..19a5d050d --- /dev/null +++ b/internal/elastic/block/data.go @@ -0,0 +1,15 @@ +package block + +import "github.com/baking-bad/bcdhub/internal/elastic/core" + +type getLastBlocksResponse struct { + Agg struct { + ByNetwork struct { + Buckets []struct { + Last struct { + Hits core.HitsArray `json:"hits"` + } `json:"last"` + } `json:"buckets"` + } `json:"by_network"` + } `json:"aggregations"` +} diff --git a/internal/elastic/block/storage.go b/internal/elastic/block/storage.go new file mode 100644 index 000000000..de563a53a --- /dev/null +++ b/internal/elastic/block/storage.go @@ -0,0 +1,134 @@ +package block + +import ( + "encoding/json" + "strings" + + "github.com/baking-bad/bcdhub/internal/elastic/consts" + "github.com/baking-bad/bcdhub/internal/elastic/core" + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/block" +) + +// Storage - +type Storage struct { + es *core.Elastic +} + +// NewStorage - +func NewStorage(es *core.Elastic) *Storage { + return &Storage{es} +} + +// Get - +func (storage *Storage) Get(network string, level int64) (block block.Block, err error) { + block.Network = network + + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + core.Term("level", level), + ), + ), + ).One() + + var response core.SearchResponse + if err = storage.es.Query([]string{models.DocBlocks}, query, &response); err != nil { + return + } + + if response.Hits.Total.Value == 0 { + return block, core.NewRecordNotFoundError(models.DocBlocks, "") + } + + err = json.Unmarshal(response.Hits.Hits[0].Source, &block) + return +} + +// Last - returns current indexer state for network +func (storage *Storage) Last(network string) (block block.Block, err error) { + block.Network = network + + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + ), + ), + ).Sort("level", "desc").One() + + var response core.SearchResponse + if err = storage.es.Query([]string{models.DocBlocks}, query, &response); err != nil { + if strings.Contains(err.Error(), consts.IndexNotFoundError) { + return block, nil + } + return + } + + if response.Hits.Total.Value == 0 { + return block, nil + } + err = json.Unmarshal(response.Hits.Hits[0].Source, &block) + return +} + +// LastByNetworks - return last block for all networks +func (storage *Storage) LastByNetworks() ([]block.Block, error) { + query := core.NewQuery().Add( + core.Aggs( + core.AggItem{ + Name: "by_network", + Body: core.Item{ + "terms": core.Item{ + "field": "network.keyword", + "size": core.MaxQuerySize, + }, + "aggs": core.Item{ + "last": core.TopHits(1, "level", "desc"), + }, + }, + }, + ), + ).Zero() + + var response getLastBlocksResponse + if err := storage.es.Query([]string{models.DocBlocks}, query, &response); err != nil { + return nil, err + } + + buckets := response.Agg.ByNetwork.Buckets + blocks := make([]block.Block, len(buckets)) + for i := range buckets { + var block block.Block + if err := json.Unmarshal(buckets[i].Last.Hits.Hits[0].Source, &block); err != nil { + return nil, err + } + blocks[i] = block + } + return blocks, nil +} + +// GetNetworkAlias - +func (storage *Storage) GetNetworkAlias(chainID string) (string, error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("chain_id", chainID), + ), + ), + ).One() + + var response core.SearchResponse + if err := storage.es.Query([]string{models.DocBlocks}, query, &response); err != nil { + return "", err + } + + if response.Hits.Total.Value == 0 { + return "", core.NewRecordNotFoundError(models.DocBlocks, "") + } + + var block block.Block + err := json.Unmarshal(response.Hits.Hits[0].Source, &block) + return block.Network, err +} diff --git a/internal/elastic/blocks.go b/internal/elastic/blocks.go deleted file mode 100644 index c8d44733f..000000000 --- a/internal/elastic/blocks.go +++ /dev/null @@ -1,131 +0,0 @@ -package elastic - -import ( - "strings" - - "github.com/baking-bad/bcdhub/internal/models" -) - -// GetBlock - -func (e *Elastic) GetBlock(network string, level int64) (block models.Block, err error) { - block.Network = network - - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - term("level", level), - ), - ), - ).One() - - var response SearchResponse - if err = e.query([]string{DocBlocks}, query, &response); err != nil { - return - } - - if response.Hits.Total.Value == 0 { - return block, NewRecordNotFoundError(DocBlocks, "", query) - } - - err = json.Unmarshal(response.Hits.Hits[0].Source, &block) - return -} - -// GetLastBlock - returns current indexer state for network -func (e *Elastic) GetLastBlock(network string) (block models.Block, err error) { - block.Network = network - - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - ), - ), - ).Sort("level", "desc").One() - - var response SearchResponse - if err = e.query([]string{DocBlocks}, query, &response); err != nil { - if strings.Contains(err.Error(), IndexNotFoundError) { - return block, nil - } - return - } - - if response.Hits.Total.Value == 0 { - return block, nil - } - err = json.Unmarshal(response.Hits.Hits[0].Source, &block) - return -} - -type getLastBlocksResponse struct { - Agg struct { - ByNetwork struct { - Buckets []struct { - Last struct { - Hits HitsArray `json:"hits"` - } `json:"last"` - } `json:"buckets"` - } `json:"by_network"` - } `json:"aggregations"` -} - -// GetLastBlocks - return last block for all networks -func (e *Elastic) GetLastBlocks() ([]models.Block, error) { - query := newQuery().Add( - aggs( - aggItem{ - "by_network", qItem{ - "terms": qItem{ - "field": "network.keyword", - "size": maxQuerySize, - }, - "aggs": qItem{ - "last": topHits(1, "level", "desc"), - }, - }, - }, - ), - ).Zero() - - var response getLastBlocksResponse - if err := e.query([]string{DocBlocks}, query, &response); err != nil { - return nil, err - } - - buckets := response.Agg.ByNetwork.Buckets - blocks := make([]models.Block, len(buckets)) - for i := range buckets { - var block models.Block - if err := json.Unmarshal(buckets[i].Last.Hits.Hits[0].Source, &block); err != nil { - return nil, err - } - blocks[i] = block - } - return blocks, nil -} - -// GetNetworkAlias - -func (e *Elastic) GetNetworkAlias(chainID string) (string, error) { - query := newQuery().Query( - boolQ( - filter( - matchQ("chain_id", chainID), - ), - ), - ).One() - - var response SearchResponse - if err := e.query([]string{DocBlocks}, query, &response); err != nil { - return "", err - } - - if response.Hits.Total.Value == 0 { - return "", NewRecordNotFoundError(DocBlocks, "", query) - } - - var block models.Block - err := json.Unmarshal(response.Hits.Hits[0].Source, &block) - return block.Network, err -} diff --git a/internal/elastic/bulk.go b/internal/elastic/bulk/storage.go similarity index 62% rename from internal/elastic/bulk.go rename to internal/elastic/bulk/storage.go index e4a1de9b0..4e88d22f1 100644 --- a/internal/elastic/bulk.go +++ b/internal/elastic/bulk/storage.go @@ -1,34 +1,48 @@ -package elastic +package bulk import ( "bytes" "context" + "encoding/json" stdJSON "encoding/json" "fmt" + "strings" + "github.com/baking-bad/bcdhub/internal/elastic/core" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" "github.com/elastic/go-elasticsearch/v8/esapi" ) -func (e *Elastic) bulk(buf *bytes.Buffer) error { +// Storage - +type Storage struct { + es *core.Elastic +} + +// NewStorage - +func NewStorage(es *core.Elastic) *Storage { + return &Storage{es} +} + +func (storage *Storage) bulk(buf *bytes.Buffer) error { req := esapi.BulkRequest{ Body: bytes.NewReader(buf.Bytes()), Refresh: "true", } - res, err := req.Do(context.Background(), e) + res, err := req.Do(context.Background(), storage.es) if err != nil { return err } defer res.Body.Close() - var response BulkResponse - err = e.getResponse(res, &response) + var response core.BulkResponse + err = storage.es.GetResponse(res, &response) return err } -// BulkInsert - -func (e *Elastic) BulkInsert(items []Model) error { +// Insert - +func (storage *Storage) Insert(items []models.Model) error { if len(items) == 0 { return nil } @@ -56,7 +70,7 @@ func (e *Elastic) BulkInsert(items []Model) error { } if (i%1000 == 0 && i > 0) || i == len(items)-1 { - if err := e.bulk(bulk); err != nil { + if err := storage.bulk(bulk); err != nil { return err } bulk.Reset() @@ -65,8 +79,8 @@ func (e *Elastic) BulkInsert(items []Model) error { return nil } -// BulkUpdate - -func (e *Elastic) BulkUpdate(updates []Model) error { +// Update - +func (storage *Storage) Update(updates []models.Model) error { if len(updates) == 0 { return nil } @@ -78,7 +92,7 @@ func (e *Elastic) BulkUpdate(updates []Model) error { if err := bulk.WriteByte('\n'); err != nil { return err } - data, err := json.Marshal(map[string]Model{ + data, err := json.Marshal(map[string]models.Model{ "doc": updates[i], }) if err != nil { @@ -92,7 +106,7 @@ func (e *Elastic) BulkUpdate(updates []Model) error { } if (i%1000 == 0 && i > 0) || i == len(updates)-1 { - if err := e.bulk(bulk); err != nil { + if err := storage.bulk(bulk); err != nil { return err } bulk.Reset() @@ -101,8 +115,8 @@ func (e *Elastic) BulkUpdate(updates []Model) error { return nil } -// BulkDelete - -func (e *Elastic) BulkDelete(updates []Model) error { +// Delete - +func (storage *Storage) Delete(updates []models.Model) error { if len(updates) == 0 { return nil } @@ -113,7 +127,7 @@ func (e *Elastic) BulkDelete(updates []Model) error { bulk.Write(meta) if (i%1000 == 0 && i > 0) || i == len(updates)-1 { - if err := e.bulk(bulk); err != nil { + if err := storage.bulk(bulk); err != nil { return err } bulk.Reset() @@ -122,19 +136,36 @@ func (e *Elastic) BulkDelete(updates []Model) error { return nil } -// BulkRemoveField - -func (e *Elastic) BulkRemoveField(script string, where []Model) error { +// RemoveField - +func (storage *Storage) RemoveField(field string, where []models.Model) error { if len(where) == 0 { return nil } + var sb strings.Builder + if _, err := sb.WriteString("ctx._source."); err != nil { + return err + } + + idx := strings.LastIndex(field, ".") + if idx > -1 { + if _, err := sb.WriteString(field[:idx]); err != nil { + return err + } + if _, err := sb.WriteString(fmt.Sprintf(`.remove('%s')`, field[idx+1:])); err != nil { + return err + } + } else if _, err := sb.WriteString(fmt.Sprintf(`remove('%s')`, field)); err != nil { + return err + } + bulk := bytes.NewBuffer([]byte{}) for i := range where { - meta := fmt.Sprintf(`{ "update": { "_id": "%s", "_index": "%s"}}%s{"script" : "%s"}%s`, where[i].GetID(), where[i].GetIndex(), "\n", script, "\n") + meta := fmt.Sprintf(`{ "update": { "_id": "%s", "_index": "%s"}}%s{"script" : "%s"}%s`, where[i].GetID(), where[i].GetIndex(), "\n", sb.String(), "\n") bulk.Grow(len(meta)) bulk.WriteString(meta) if (i%1000 == 0 && i > 0) || i == len(where)-1 { - if err := e.bulk(bulk); err != nil { + if err := storage.bulk(bulk); err != nil { return err } bulk.Reset() @@ -143,14 +174,14 @@ func (e *Elastic) BulkRemoveField(script string, where []Model) error { return nil } -// BulkUpdateField - -func (e *Elastic) BulkUpdateField(where []models.Contract, fields ...string) error { +// UpdateField - +func (storage *Storage) UpdateField(where []contract.Contract, fields ...string) error { if len(where) == 0 { return nil } bulk := bytes.NewBuffer([]byte{}) for i := range where { - updated, err := e.buildFieldsForModel(where[i], fields...) + updated, err := storage.es.BuildFieldsForModel(where[i], fields...) if err != nil { return err } @@ -159,7 +190,7 @@ func (e *Elastic) BulkUpdateField(where []models.Contract, fields ...string) err bulk.WriteString(meta) if (i%1000 == 0 && i > 0) || i == len(where)-1 { - if err := e.bulk(bulk); err != nil { + if err := storage.bulk(bulk); err != nil { return err } bulk.Reset() diff --git a/internal/elastic/consts.go b/internal/elastic/consts.go deleted file mode 100644 index 65e215fdc..000000000 --- a/internal/elastic/consts.go +++ /dev/null @@ -1,28 +0,0 @@ -package elastic - -// Document names -const ( - DocContracts = "contract" - DocBlocks = "block" - DocBalanceUpdates = "balance_update" - DocOperations = "operation" - DocBigMapDiff = "bigmapdiff" - DocBigMapActions = "bigmapaction" - DocMetadata = "metadata" - DocMigrations = "migration" - DocProtocol = "protocol" - DocTransfers = "transfer" - DocTZIP = "tzip" - DocTokenBalances = "token_balance" - DocTezosDomains = "tezos_domain" -) - -// Index names -const ( - IndexName = "bcd" -) - -// Errors -const ( - IndexNotFoundError = "index_not_found_exception" -) diff --git a/internal/elastic/consts/consts.go b/internal/elastic/consts/consts.go new file mode 100644 index 000000000..5afc80b55 --- /dev/null +++ b/internal/elastic/consts/consts.go @@ -0,0 +1,12 @@ +package consts + +// Errors +const ( + IndexNotFoundError = "index_not_found_exception" +) + +// default +const ( + DefaultSize = 10 + DefaultScrollSize = 1000 +) diff --git a/internal/elastic/contract/data.go b/internal/elastic/contract/data.go new file mode 100644 index 000000000..2c6a6d741 --- /dev/null +++ b/internal/elastic/contract/data.go @@ -0,0 +1,37 @@ +package contract + +import "github.com/baking-bad/bcdhub/internal/elastic/core" + +type getDiffTasksResponse struct { + Agg struct { + Projects struct { + Buckets []struct { + core.Bucket + Last struct { + Hits core.HitsArray `json:"hits"` + } `json:"last"` + ByHash struct { + Buckets []struct { + core.Bucket + Last struct { + Hits core.HitsArray `json:"hits"` + } `json:"last"` + } `json:"buckets"` + } `json:"by_hash"` + } `json:"buckets"` + } `json:"by_project"` + } `json:"aggregations"` +} + +type getProjectsResponse struct { + Agg struct { + Projects struct { + Buckets []struct { + core.Bucket + Last struct { + Hits core.HitsArray `json:"hits"` + } `json:"last"` + } `json:"buckets"` + } `json:"projects"` + } `json:"aggregations"` +} diff --git a/internal/elastic/contract/storage.go b/internal/elastic/contract/storage.go new file mode 100644 index 000000000..cec0254f0 --- /dev/null +++ b/internal/elastic/contract/storage.go @@ -0,0 +1,445 @@ +package contract + +import ( + "encoding/json" + "math/rand" + "time" + + "github.com/baking-bad/bcdhub/internal/elastic/consts" + "github.com/baking-bad/bcdhub/internal/elastic/core" + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" + "github.com/pkg/errors" +) + +// Storage - +type Storage struct { + es *core.Elastic +} + +// NewStorage - +func NewStorage(es *core.Elastic) *Storage { + return &Storage{es} +} + +func (storage *Storage) getContract(q core.Base) (c contract.Contract, err error) { + var response core.SearchResponse + if err = storage.es.Query([]string{models.DocContracts}, q, &response); err != nil { + return + } + if response.Hits.Total.Value == 0 { + return c, core.NewRecordNotFoundError(models.DocContracts, "") + } + err = json.Unmarshal(response.Hits.Hits[0].Source, &c) + return +} + +func (storage *Storage) getContracts(query core.Base) ([]contract.Contract, error) { + contracts := make([]contract.Contract, 0) + if err := storage.es.GetAllByQuery(query, &contracts); err != nil { + return nil, err + } + + return contracts, nil +} + +// Get - +func (storage *Storage) Get(by map[string]interface{}) (contract.Contract, error) { + query := core.FiltersToQuery(by).One() + return storage.getContract(query) +} + +// GetMany - +func (storage *Storage) GetMany(by map[string]interface{}) ([]contract.Contract, error) { + query := core.FiltersToQuery(by) + return storage.getContracts(query) +} + +// GetRandom - +func (storage *Storage) GetRandom() (contract.Contract, error) { + random := core.Item{ + "function_score": core.Item{ + "functions": []core.Item{ + { + "random_score": core.Item{ + "seed": time.Now().UnixNano(), + }, + }, + }, + }, + } + + txRange := core.Range("tx_count", core.Item{ + "gte": 2, + }) + b := core.Bool(core.Must(txRange, random)) + query := core.NewQuery().Query(b).One() + return storage.getContract(query) +} + +// IsFA - +func (storage *Storage) IsFA(network, address string) (bool, error) { + query := core.NewQuery().Query( + core.Bool( + core.Must( + core.MatchPhrase("network", network), + core.MatchPhrase("address", address), + ), + core.Filter( + core.Item{ + "terms": core.Item{ + "tags": []string{"fa12", "fa1"}, + }, + }, + ), + ), + ) + var response core.SearchResponse + if err := storage.es.Query([]string{models.DocContracts}, query, &response, "address"); err != nil { + return false, err + } + return response.Hits.Total.Value == 1, nil +} + +// UpdateMigrationsCount - +func (storage *Storage) UpdateMigrationsCount(address, network string) error { + // TODO: update via ID and script + contract := contract.NewEmptyContract(network, address) + if err := storage.es.GetByID(&contract); err != nil { + return err + } + contract.MigrationsCount++ + return storage.es.UpdateDoc(&contract) +} + +// GetAddressesByNetworkAndLevel - +func (storage *Storage) GetAddressesByNetworkAndLevel(network string, maxLevel int64) ([]string, error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + core.Range("level", core.Item{ + "gt": maxLevel, + }), + ), + ), + ).All() + + var response core.SearchResponse + if err := storage.es.Query([]string{models.DocContracts}, query, &response, "address"); err != nil { + return nil, err + } + + addresses := make([]string, len(response.Hits.Hits)) + for i := range response.Hits.Hits { + var c struct { + Address string `json:"address"` + } + if err := json.Unmarshal(response.Hits.Hits[i].Source, &c); err != nil { + return nil, err + } + addresses[i] = c.Address + } + + return addresses, nil +} + +// GetIDsByAddresses - +func (storage *Storage) GetIDsByAddresses(addresses []string, network string) ([]string, error) { + shouldItems := make([]core.Item, len(addresses)) + for i := range addresses { + shouldItems[i] = core.MatchPhrase("address", addresses[i]) + } + + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + ), + core.Should(shouldItems...), + core.MinimumShouldMatch(1), + ), + ).Add(core.Item{ + "_source": false, + }) + + var response core.SearchResponse + if err := storage.es.Query([]string{models.DocContracts}, query, &response, "address"); err != nil { + return nil, err + } + ids := make([]string, len(response.Hits.Hits)) + for i := range response.Hits.Hits { + ids[i] = response.Hits.Hits[i].ID + } + return ids, nil +} + +// GetByAddresses - +func (storage *Storage) GetByAddresses(addresses []contract.Address) ([]contract.Contract, error) { + items := make([]core.Item, len(addresses)) + for i := range addresses { + items[i] = core.Bool( + core.Filter( + core.MatchPhrase("address", addresses[i].Address), + core.Match("network", addresses[i].Network), + ), + ) + } + + query := core.NewQuery().Query( + core.Bool( + core.Should(items...), + core.MinimumShouldMatch(1), + ), + ) + contracts := make([]contract.Contract, 0) + err := storage.es.GetAllByQuery(query, &contracts) + return contracts, err +} + +// GetProjectsLastContract - +func (storage *Storage) GetProjectsLastContract() ([]contract.Contract, error) { + query := core.NewQuery().Add( + core.Aggs( + core.AggItem{ + Name: "projects", + Body: core.Item{ + "terms": core.Item{ + "field": "project_id.keyword", + "size": core.MaxQuerySize, + }, + "aggs": core.Item{ + "last": core.TopHits(1, "timestamp", "desc"), + }, + }, + }, + ), + ).Sort("timestamp", "desc").Zero() + + var response getProjectsResponse + if err := storage.es.Query([]string{models.DocContracts}, query, &response); err != nil { + return nil, err + } + + if len(response.Agg.Projects.Buckets) == 0 { + return nil, core.NewRecordNotFoundError(models.DocContracts, "") + } + + contracts := make([]contract.Contract, len(response.Agg.Projects.Buckets)) + for i := range response.Agg.Projects.Buckets { + if err := json.Unmarshal(response.Agg.Projects.Buckets[i].Last.Hits.Hits[0].Source, &contracts[i]); err != nil { + return nil, err + } + } + return contracts, nil +} + +// GetSameContracts - +func (storage *Storage) GetSameContracts(c contract.Contract, size, offset int64) (pcr contract.SameResponse, err error) { + if c.Fingerprint == nil { + return pcr, errors.Errorf("Invalid contract data") + } + + if size == 0 { + size = consts.DefaultSize + } else if size+offset > core.MaxQuerySize { + size = core.MaxQuerySize - offset + } + + q := core.NewQuery().Query( + core.Bool( + core.Filter( + core.MatchPhrase("hash", c.Hash), + ), + core.MustNot( + core.MatchPhrase("address", c.Address), + ), + ), + ).Sort("last_action", "desc").Size(size).From(offset) + + var response core.SearchResponse + if err = storage.es.Query([]string{models.DocContracts}, q, &response); err != nil { + return + } + + if len(response.Hits.Hits) == 0 { + return pcr, core.NewRecordNotFoundError(models.DocContracts, "") + } + + contracts := make([]contract.Contract, len(response.Hits.Hits)) + for i := range response.Hits.Hits { + if err = json.Unmarshal(response.Hits.Hits[i].Source, &contracts[i]); err != nil { + return + } + } + pcr.Contracts = contracts + pcr.Count = response.Hits.Total.Value + return +} + +// GetSimilarContracts - +func (storage *Storage) GetSimilarContracts(c contract.Contract, size, offset int64) (pcr []contract.Similar, total int, err error) { + if c.Fingerprint == nil { + return + } + + if size == 0 { + size = consts.DefaultSize + } else if size+offset > core.MaxQuerySize { + size = core.MaxQuerySize - offset + } + + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.MatchPhrase("project_id", c.ProjectID), + ), + core.MustNot( + core.Match("hash.keyword", c.Hash), + ), + ), + ).Add( + core.Aggs( + core.AggItem{ + Name: "projects", + Body: core.Item{ + "terms": core.Item{ + "field": "hash.keyword", + "size": size + offset, + "order": core.Item{ + "bucketsSort": "desc", + }, + }, + "aggs": core.Item{ + "last": core.TopHits(1, "last_action", "desc"), + "bucketsSort": core.Max("last_action"), + }, + }, + }, + ), + ).Zero() + + var response getProjectsResponse + if err = storage.es.Query([]string{models.DocContracts}, query, &response); err != nil { + return + } + + total = len(response.Agg.Projects.Buckets) + if len(response.Agg.Projects.Buckets) == 0 { + return + } + + contracts := make([]contract.Similar, 0) + arr := response.Agg.Projects.Buckets[offset:] + for _, item := range arr { + var cntr contract.Contract + if err = json.Unmarshal(item.Last.Hits.Hits[0].Source, &cntr); err != nil { + return + } + + contracts = append(contracts, contract.Similar{ + Contract: &cntr, + Count: item.DocCount, + }) + } + return contracts, total, nil +} + +// GetDiffTasks - +func (storage *Storage) GetDiffTasks() ([]contract.DiffTask, error) { + query := core.NewQuery().Add( + core.Aggs( + core.AggItem{ + Name: "by_project", + Body: core.Item{ + "terms": core.Item{ + "field": "project_id.keyword", + "size": core.MaxQuerySize, + }, + "aggs": core.Item{ + "by_hash": core.Item{ + "terms": core.Item{ + "field": "hash.keyword", + "size": core.MaxQuerySize, + }, + "aggs": core.Item{ + "last": core.TopHits(1, "last_action", "desc"), + }, + }, + }, + }, + }, + ), + ).Zero() + + var response getDiffTasksResponse + if err := storage.es.Query([]string{models.DocContracts}, query, &response); err != nil { + return nil, err + } + + tasks := make([]contract.DiffTask, 0) + for _, bucket := range response.Agg.Projects.Buckets { + if len(bucket.ByHash.Buckets) < 2 { + continue + } + + similar := bucket.ByHash.Buckets + for i := 0; i < len(similar)-1; i++ { + var current contract.Contract + if err := json.Unmarshal(similar[i].Last.Hits.Hits[0].Source, ¤t); err != nil { + return nil, err + } + for j := i + 1; j < len(similar); j++ { + var next contract.Contract + if err := json.Unmarshal(similar[j].Last.Hits.Hits[0].Source, &next); err != nil { + return nil, err + } + + tasks = append(tasks, contract.DiffTask{ + Network1: current.Network, + Address1: current.Address, + Network2: next.Network, + Address2: next.Address, + }) + } + } + } + + rand.Seed(time.Now().Unix()) + rand.Shuffle(len(tasks), func(i, j int) { tasks[i], tasks[j] = tasks[j], tasks[i] }) + return tasks, nil +} + +// GetTokens - +func (storage *Storage) GetTokens(network, tokenInterface string, offset, size int64) ([]contract.Contract, int64, error) { + tags := []string{"fa12", "fa1", "fa2"} + if tokenInterface == "fa12" || tokenInterface == "fa1" || tokenInterface == "fa2" { + tags = []string{tokenInterface} + } + + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + core.In("tags", tags), + ), + ), + ).Sort("timestamp", "desc").Size(size) + + if offset != 0 { + query = query.From(offset) + } + + var response core.SearchResponse + if err := storage.es.Query([]string{models.DocContracts}, query, &response); err != nil { + return nil, 0, err + } + + contracts := make([]contract.Contract, len(response.Hits.Hits)) + for i := range response.Hits.Hits { + if err := json.Unmarshal(response.Hits.Hits[i].Source, &contracts[i]); err != nil { + return nil, 0, err + } + } + return contracts, response.Hits.Total.Value, nil +} diff --git a/internal/elastic/contracts.go b/internal/elastic/contracts.go deleted file mode 100644 index cdd11aec6..000000000 --- a/internal/elastic/contracts.go +++ /dev/null @@ -1,391 +0,0 @@ -package elastic - -import ( - "time" - - "github.com/baking-bad/bcdhub/internal/models" - "github.com/pkg/errors" -) - -func filtersToQuery(by map[string]interface{}) base { - matches := make([]qItem, 0) - for k, v := range by { - matches = append(matches, matchPhrase(k, v)) - } - return newQuery().Query( - boolQ( - must(matches...), - ), - ) -} - -func (e *Elastic) getContract(q base) (c models.Contract, err error) { - var response SearchResponse - if err = e.query([]string{DocContracts}, q, &response); err != nil { - return - } - if response.Hits.Total.Value == 0 { - return c, NewRecordNotFoundError(DocContracts, "", q) - } - err = json.Unmarshal(response.Hits.Hits[0].Source, &c) - return -} - -func (e *Elastic) getContracts(query base) ([]models.Contract, error) { - contracts := make([]models.Contract, 0) - if err := e.getAllByQuery(query, &contracts); err != nil { - return nil, err - } - - return contracts, nil -} - -// GetContract - -func (e *Elastic) GetContract(by map[string]interface{}) (models.Contract, error) { - query := filtersToQuery(by).One() - return e.getContract(query) -} - -// GetContracts - -func (e *Elastic) GetContracts(by map[string]interface{}) ([]models.Contract, error) { - query := filtersToQuery(by) - return e.getContracts(query) -} - -// GetContractRandom - -func (e *Elastic) GetContractRandom() (models.Contract, error) { - random := qItem{ - "function_score": qItem{ - "functions": []qItem{ - { - "random_score": qItem{ - "seed": time.Now().UnixNano(), - }, - }, - }, - }, - } - - txRange := rangeQ("tx_count", qItem{ - "gte": 2, - }) - b := boolQ(must(txRange, random)) - query := newQuery().Query(b).One() - return e.getContract(query) -} - -// IsFAContract - -func (e *Elastic) IsFAContract(network, address string) (bool, error) { - query := newQuery().Query( - boolQ( - must( - matchPhrase("network", network), - matchPhrase("address", address), - ), - filter( - qItem{ - "terms": qItem{ - "tags": []string{"fa12", "fa1"}, - }, - }, - ), - ), - ) - var response SearchResponse - if err := e.query([]string{DocContracts}, query, &response, "address"); err != nil { - return false, err - } - return response.Hits.Total.Value == 1, nil -} - -// UpdateContractMigrationsCount - -func (e *Elastic) UpdateContractMigrationsCount(address, network string) error { - // TODO: update via ID and script - contract := models.NewEmptyContract(network, address) - if err := e.GetByID(&contract); err != nil { - return err - } - contract.MigrationsCount++ - return e.UpdateDoc(&contract) -} - -// GetContractAddressesByNetworkAndLevel - -func (e *Elastic) GetContractAddressesByNetworkAndLevel(network string, maxLevel int64) ([]string, error) { - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - rangeQ("level", qItem{ - "gt": maxLevel, - }), - ), - ), - ).All() - - var response SearchResponse - if err := e.query([]string{DocContracts}, query, &response, "address"); err != nil { - return nil, err - } - - addresses := make([]string, len(response.Hits.Hits)) - for i := range response.Hits.Hits { - var c struct { - Address string `json:"address"` - } - if err := json.Unmarshal(response.Hits.Hits[i].Source, &c); err != nil { - return nil, err - } - addresses[i] = c.Address - } - - return addresses, nil -} - -// GetContractsIDByAddress - -func (e *Elastic) GetContractsIDByAddress(addresses []string, network string) ([]string, error) { - shouldItems := make([]qItem, len(addresses)) - for i := range addresses { - shouldItems[i] = matchPhrase("address", addresses[i]) - } - - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - ), - should(shouldItems...), - minimumShouldMatch(1), - ), - ).Add(qItem{ - "_source": false, - }) - - var response SearchResponse - if err := e.query([]string{DocContracts}, query, &response, "address"); err != nil { - return nil, err - } - ids := make([]string, len(response.Hits.Hits)) - for i := range response.Hits.Hits { - ids[i] = response.Hits.Hits[i].ID - } - return ids, nil -} - -type recalcContractStatsResponse struct { - Aggs struct { - TxCount struct { - Value int64 `json:"value"` - } `json:"tx_count"` - Balance struct { - Value int64 `json:"value"` - } `json:"balance"` - LastAction struct { - Value int64 `json:"value"` - } `json:"last_action"` - TotalWithdrawn struct { - Value int64 `json:"value"` - } `json:"total_withdrawn"` - } `json:"aggregations"` -} - -// RecalcContractStats - -func (e *Elastic) RecalcContractStats(network, address string) (stats ContractStats, err error) { - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - ), - should( - matchPhrase("source", address), - matchPhrase("destination", address), - ), - minimumShouldMatch(1), - ), - ).Add( - qItem{ - "aggs": qItem{ - "tx_count": count("indexed_time"), - "last_action": max("timestamp"), - "balance": qItem{ - "scripted_metric": qItem{ - "init_script": "state.operations = []", - "map_script": "if (doc['status.keyword'].value == 'applied' && doc['amount'].size() != 0) {state.operations.add(doc['destination.keyword'].value == params.address ? doc['amount'].value : -1L * doc['amount'].value)}", - "combine_script": "double balance = 0; for (amount in state.operations) { balance += amount } return balance", - "reduce_script": "double balance = 0; for (a in states) { balance += a } return balance", - "params": qItem{ - "address": address, - }, - }, - }, - "total_withdrawn": qItem{ - "scripted_metric": qItem{ - "init_script": "state.operations = []", - "map_script": "if (doc['status.keyword'].value == 'applied' && doc['amount'].size() != 0 && doc['source.keyword'].value == params.address) {state.operations.add(doc['amount'].value)}", - "combine_script": "double balance = 0; for (amount in state.operations) { balance += amount } return balance", - "reduce_script": "double balance = 0; for (a in states) { balance += a } return balance", - "params": qItem{ - "address": address, - }, - }, - }, - }, - }, - ).Zero() - var response recalcContractStatsResponse - if err = e.query([]string{DocOperations}, query, &response); err != nil { - return - } - - stats.LastAction = time.Unix(0, response.Aggs.LastAction.Value*1000000).UTC() - stats.Balance = response.Aggs.Balance.Value - stats.TotalWithdrawn = response.Aggs.TotalWithdrawn.Value - stats.TxCount = response.Aggs.TxCount.Value - return -} - -type getContractMigrationStatsResponse struct { - Agg struct { - MigrationsCount struct { - Value int64 `json:"value"` - } `json:"migrations_count"` - } `json:"aggregations"` -} - -// GetContractMigrationStats - -func (e *Elastic) GetContractMigrationStats(network, address string) (stats ContractMigrationsStats, err error) { - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - ), - should( - matchPhrase("source", address), - matchPhrase("destination", address), - ), - minimumShouldMatch(1), - ), - ).Add( - aggs( - aggItem{ - "migrations_count", count("indexed_time"), - }, - ), - ).Zero() - - var response getContractMigrationStatsResponse - if err = e.query([]string{DocMigrations}, query, &response); err != nil { - return - } - - stats.MigrationsCount = response.Agg.MigrationsCount.Value - return -} - -type getDAppStatsResponse struct { - Aggs struct { - Users struct { - Value float64 `json:"value"` - } `json:"users"` - Calls struct { - Value float64 `json:"value"` - } `json:"calls"` - Volume struct { - Value float64 `json:"value"` - } `json:"volume"` - } `json:"aggregations"` -} - -// GetDAppStats - -func (e *Elastic) GetDAppStats(network string, addresses []string, period string) (stats DAppStats, err error) { - addressMatches := make([]qItem, len(addresses)) - for i := range addresses { - addressMatches[i] = matchPhrase("destination", addresses[i]) - } - - matches := []qItem{ - matchQ("network", network), - exists("entrypoint"), - boolQ( - should(addressMatches...), - minimumShouldMatch(1), - ), - matchQ("status", "applied"), - } - r, err := periodToRange(period) - if err != nil { - return - } - if r != nil { - matches = append(matches, r) - } - - query := newQuery().Query( - boolQ( - filter(matches...), - ), - ).Add( - aggs( - aggItem{"users", cardinality("source.keyword")}, - aggItem{"calls", count("indexed_time")}, - aggItem{"volume", sum("amount")}, - ), - ).Zero() - - var response getDAppStatsResponse - if err = e.query([]string{DocOperations}, query, &response); err != nil { - return - } - - stats.Calls = int64(response.Aggs.Calls.Value) - stats.Users = int64(response.Aggs.Users.Value) - stats.Volume = int64(response.Aggs.Volume.Value) - return -} - -func periodToRange(period string) (qItem, error) { - var str string - switch period { - case "year": - str = "now-1y/d" - case "month": - str = "now-1M/d" - case "week": - str = "now-1w/d" - case "day": - str = "now-1d/d" - case "all": - return nil, nil - default: - return nil, errors.Errorf("Unknown period value: %s", period) - } - return qItem{ - "range": qItem{ - "timestamp": qItem{ - "gte": str, - }, - }, - }, nil -} - -// GetContractsByAddresses - -func (e *Elastic) GetContractsByAddresses(addresses []Address) ([]models.Contract, error) { - items := make([]qItem, len(addresses)) - for i := range addresses { - items[i] = boolQ( - filter( - matchPhrase("address", addresses[i].Address), - matchQ("network", addresses[i].Network), - ), - ) - } - - query := newQuery().Query( - boolQ( - should(items...), - minimumShouldMatch(1), - ), - ) - contracts := make([]models.Contract, 0) - err := e.getAllByQuery(query, &contracts) - return contracts, err -} diff --git a/internal/elastic/core/comparator.go b/internal/elastic/core/comparator.go new file mode 100644 index 000000000..f03a8c67e --- /dev/null +++ b/internal/elastic/core/comparator.go @@ -0,0 +1,30 @@ +package core + +import "github.com/baking-bad/bcdhub/internal/models/tzip" + +// BuildComparator - +func BuildComparator(rng tzip.Comparator) Item { + return Range("level", Item{ + rng.Comparator: rng.Value, + }) +} + +// NewGreaterThanRange - +func NewGreaterThanRange(value int64) tzip.Comparator { + return tzip.NewRange("gt", value) +} + +// NewGreaterThanEqRange - +func NewGreaterThanEqRange(value int64) tzip.Comparator { + return tzip.NewRange("gte", value) +} + +// NewLessThanRange - +func NewLessThanRange(value int64) tzip.Comparator { + return tzip.NewRange("lt", value) +} + +// NewLessThanEqRange - +func NewLessThanEqRange(value int64) tzip.Comparator { + return tzip.NewRange("lte", value) +} diff --git a/internal/elastic/core/data.go b/internal/elastic/core/data.go new file mode 100644 index 000000000..321325ba9 --- /dev/null +++ b/internal/elastic/core/data.go @@ -0,0 +1,165 @@ +package core + +import ( + stdJSON "encoding/json" + "time" + + "github.com/baking-bad/bcdhub/internal/contractparser/cerrors" + "github.com/baking-bad/bcdhub/internal/models/operation" +) + +// TestConnectionResponse - +type TestConnectionResponse struct { + Version struct { + Number string `json:"number"` + } `json:"version"` +} + +// Bucket - +type Bucket struct { + Key string `json:"key"` + DocCount int64 `json:"doc_count"` +} + +// IntValue - +type IntValue struct { + Value int64 `json:"value"` +} + +// FloatValue - +type FloatValue struct { + Value float64 `json:"value"` +} + +// SQLResponse - +type SQLResponse struct { + Rows [][]interface{} `json:"rows"` +} + +// Hit - +type Hit struct { + ID string `json:"_id"` + Index string `json:"_index"` + Source stdJSON.RawMessage `json:"_source"` + Score float64 `json:"_score"` + Type string `json:"_type"` + Highlight map[string][]string `json:"highlight,omitempty"` +} + +// HitsArray - +type HitsArray struct { + Total struct { + Value int64 `json:"value"` + Relation string `json:"relation"` + } `json:"total"` + Hits []Hit `json:"hits"` +} + +// SearchResponse - +type SearchResponse struct { + ScrollID string `json:"_scroll_id,omitempty"` + Took int `json:"took,omitempty"` + TimedOut *bool `json:"timed_out,omitempty"` + Hits *HitsArray `json:"hits,omitempty"` +} + +// GetResponse - +type GetResponse struct { + Index string `json:"_index"` + Type string `json:"_type"` + ID string `json:"_id"` + Found bool `json:"found"` + Source stdJSON.RawMessage `json:"_source"` +} + +// BulkResponse - +type BulkResponse struct { + Took int64 `json:"took"` + Errors bool `json:"errors"` +} + +// Header - +type Header struct { + Took int64 `json:"took"` + TimedOut bool `json:"timed_out"` +} + +// DeleteByQueryResponse - +type DeleteByQueryResponse struct { + Header + Total int64 `json:"total"` + Deleted int64 `json:"deleted"` + VersionConflicts int64 `json:"version_conflicts"` +} + +// EventOperation - +type EventOperation struct { + Network string `json:"network"` + Hash string `json:"hash"` + Internal bool `json:"internal"` + Status string `json:"status"` + Timestamp time.Time `json:"timestamp"` + Kind string `json:"kind"` + Fee int64 `json:"fee,omitempty"` + Amount int64 `json:"amount,omitempty"` + Entrypoint string `json:"entrypoint,omitempty"` + Source string `json:"source"` + SourceAlias string `json:"source_alias,omitempty"` + Destination string `json:"destination,omitempty"` + DestinationAlias string `json:"destination_alias,omitempty"` + Delegate string `json:"delegate,omitempty"` + DelegateAlias string `json:"delegate_alias,omitempty"` + + Result *operation.Result `json:"result,omitempty"` + Errors []*cerrors.Error `json:"errors,omitempty"` + Burned int64 `json:"burned,omitempty"` +} + +// EventMigration - +type EventMigration struct { + Network string `json:"network"` + Protocol string `json:"protocol"` + PrevProtocol string `json:"prev_protocol,omitempty"` + Hash string `json:"hash,omitempty"` + Timestamp time.Time `json:"timestamp"` + Level int64 `json:"level"` + Address string `json:"address"` + Kind string `json:"kind"` +} + +// EventContract - +type EventContract struct { + Network string `json:"network"` + Address string `json:"address"` + Hash string `json:"hash"` + ProjectID string `json:"project_id"` + Timestamp time.Time `json:"timestamp"` +} + +// searchByTextResponse - +type searchByTextResponse struct { + Took int64 `json:"took"` + Hits HitsArray `json:"hits"` + Agg struct { + Projects struct { + Buckets []struct { + Bucket + Last struct { + Hits HitsArray `json:"hits"` + } `json:"last"` + } `json:"buckets"` + } `json:"projects"` + } `json:"aggregations"` +} + +type getDateHistogramResponse struct { + Agg struct { + Hist struct { + Buckets []struct { + Key int64 `json:"key"` + DocCount int64 `json:"doc_count"` + Result FloatValue `json:"result,omitempty"` + } `json:"buckets"` + } `json:"hist"` + } `json:"aggregations"` +} diff --git a/internal/elastic/elastic.go b/internal/elastic/core/elastic.go similarity index 83% rename from internal/elastic/elastic.go rename to internal/elastic/core/elastic.go index bc0543025..c3c511d57 100644 --- a/internal/elastic/elastic.go +++ b/internal/elastic/core/elastic.go @@ -1,9 +1,8 @@ -package elastic +package core import ( "bytes" "context" - "fmt" "io/ioutil" "log" @@ -12,6 +11,7 @@ import ( "time" "github.com/baking-bad/bcdhub/internal/logger" + "github.com/baking-bad/bcdhub/internal/models" "github.com/elastic/go-elasticsearch/v8" "github.com/elastic/go-elasticsearch/v8/esapi" jsoniter "github.com/json-iterator/go" @@ -35,13 +35,7 @@ func New(addresses []string) (*Elastic, error) { return nil, err } e := &Elastic{es} - info, err := e.TestConnection() - if err != nil { - return nil, err - } - logger.Info("Elasticsearch Server: %s", info.Version.Number) - - return e, nil + return e, e.TestConnection() } // WaitNew - @@ -64,7 +58,8 @@ func (e *Elastic) GetAPI() *esapi.API { return e.API } -func (e *Elastic) getResponse(resp *esapi.Response, result interface{}) error { +// GetResponse - +func (e *Elastic) GetResponse(resp *esapi.Response, result interface{}) error { if resp.IsError() { if resp.StatusCode == 404 { return NewRecordNotFoundErrorFromResponse(resp) @@ -95,7 +90,8 @@ func (e *Elastic) getTextResponse(resp *esapi.Response) (string, error) { return string(b), nil } -func (e *Elastic) query(indices []string, query map[string]interface{}, response interface{}, source ...string) (err error) { +// Query - +func (e *Elastic) Query(indices []string, query map[string]interface{}, response interface{}, source ...string) (err error) { var buf bytes.Buffer if err = json.NewEncoder(&buf).Encode(query); err != nil { return @@ -120,11 +116,12 @@ func (e *Elastic) query(indices []string, query map[string]interface{}, response defer resp.Body.Close() - return e.getResponse(resp, response) + return e.GetResponse(resp, response) } -func (e *Elastic) executeSQL(sqlString string, response interface{}) (err error) { - query := qItem{ +// ExecuteSQL - +func (e *Elastic) ExecuteSQL(sqlString string, response interface{}) (err error) { + query := Item{ "query": sqlString, } @@ -143,18 +140,18 @@ func (e *Elastic) executeSQL(sqlString string, response interface{}) (err error) } defer resp.Body.Close() - return e.getResponse(resp, response) + return e.GetResponse(resp, response) } // TestConnection - -func (e *Elastic) TestConnection() (result TestConnectionResponse, err error) { +func (e *Elastic) TestConnection() (err error) { res, err := e.Info() if err != nil { return } - err = e.getResponse(res, &result) - return + var result TestConnectionResponse + return e.GetResponse(res, &result) } func (e *Elastic) createIndexIfNotExists(index string) error { @@ -195,21 +192,7 @@ func (e *Elastic) createIndexIfNotExists(index string) error { // CreateIndexes - func (e *Elastic) CreateIndexes() error { - for _, index := range []string{ - DocContracts, - DocBlocks, - DocBalanceUpdates, - DocOperations, - DocBigMapDiff, - DocBigMapActions, - DocMetadata, - DocMigrations, - DocProtocol, - DocTransfers, - DocTZIP, - DocTokenBalances, - DocTezosDomains, - } { + for _, index := range models.AllDocuments() { if err := e.createIndexIfNotExists(index); err != nil { return err } @@ -247,7 +230,7 @@ func (e *Elastic) updateByQuery(indices []string, query map[string]interface{}, defer resp.Body.Close() var v interface{} - return e.getResponse(resp, &v) + return e.GetResponse(resp, &v) } func (e *Elastic) deleteByQuery(indices []string, query map[string]interface{}) (result *DeleteByQueryResponse, err error) { @@ -274,17 +257,17 @@ func (e *Elastic) deleteByQuery(indices []string, query map[string]interface{}) defer resp.Body.Close() - err = e.getResponse(resp, &result) + err = e.GetResponse(resp, &result) return } // DeleteByLevelAndNetwork - func (e *Elastic) DeleteByLevelAndNetwork(indices []string, network string, maxLevel int64) error { - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - rangeQ("level", qItem{"gt": maxLevel}), + query := NewQuery().Query( + Bool( + Filter( + Match("network", network), + Range("level", Item{"gt": maxLevel}), ), ), ) @@ -341,16 +324,16 @@ func (e *Elastic) ReloadSecureSettings() error { // DeleteByContract - // TODO - delete context func (e *Elastic) DeleteByContract(indices []string, network, address string) error { - filters := make([]qItem, 0) + filters := make([]Item, 0) if network != "" { - filters = append(filters, matchQ("network", network)) + filters = append(filters, Match("network", network)) } if address != "" { - filters = append(filters, matchPhrase("contract", address)) + filters = append(filters, MatchPhrase("contract", address)) } - query := newQuery().Query( - boolQ( - filter(filters...), + query := NewQuery().Query( + Bool( + Filter(filters...), ), ) end := false diff --git a/internal/elastic/errors.go b/internal/elastic/core/errors.go similarity index 67% rename from internal/elastic/errors.go rename to internal/elastic/core/errors.go index 3ef907388..6abb717be 100644 --- a/internal/elastic/errors.go +++ b/internal/elastic/core/errors.go @@ -1,4 +1,4 @@ -package elastic +package core import ( "strings" @@ -7,7 +7,7 @@ import ( ) // IsRecordNotFound - -func IsRecordNotFound(err error) bool { +func (e *Elastic) IsRecordNotFound(err error) bool { _, ok := err.(*RecordNotFoundError) return ok } @@ -16,17 +16,16 @@ func IsRecordNotFound(err error) bool { type RecordNotFoundError struct { index string id string - query base } // NewRecordNotFoundError - -func NewRecordNotFoundError(index, id string, query base) *RecordNotFoundError { - return &RecordNotFoundError{index, id, query} +func NewRecordNotFoundError(index, id string) *RecordNotFoundError { + return &RecordNotFoundError{index, id} } // NewRecordNotFoundErrorFromResponse - func NewRecordNotFoundErrorFromResponse(resp *esapi.Response) *RecordNotFoundError { - return &RecordNotFoundError{resp.String(), "", nil} + return &RecordNotFoundError{resp.String(), ""} } // Error - @@ -43,10 +42,5 @@ func (e *RecordNotFoundError) Error() string { builder.WriteString(e.id) builder.WriteString(" ") } - if e.query != nil { - builder.WriteString("query=") - b, _ := json.MarshalIndent(e.query, "", " ") - builder.Write(b) - } return builder.String() } diff --git a/internal/elastic/core/events.go b/internal/elastic/core/events.go new file mode 100644 index 000000000..9403ab2bb --- /dev/null +++ b/internal/elastic/core/events.go @@ -0,0 +1,289 @@ +package core + +import ( + "strings" + + constants "github.com/baking-bad/bcdhub/internal/contractparser/consts" + "github.com/baking-bad/bcdhub/internal/elastic/consts" + "github.com/baking-bad/bcdhub/internal/helpers" + "github.com/baking-bad/bcdhub/internal/models" + "github.com/pkg/errors" +) + +// GetEvents - +func (e *Elastic) GetEvents(subscriptions []models.SubscriptionRequest, size, offset int64) ([]models.Event, error) { + if len(subscriptions) == 0 { + return []models.Event{}, nil + } + + if size == 0 || size > 50 { // TODO: ??? + size = consts.DefaultSize + } + + shouldItems := make([]Item, 0) + indicesMap := make(map[string]struct{}) + + for i := range subscriptions { + items := getEventsQuery(subscriptions[i], indicesMap) + shouldItems = append(shouldItems, items...) + } + + indices := make([]string, 0) + for ind := range indicesMap { + indices = append(indices, ind) + } + if len(indices) == 0 { + return []models.Event{}, nil + } + + return e.getEvents(subscriptions, shouldItems, indices, size, offset) +} + +func (e *Elastic) getEvents(subscriptions []models.SubscriptionRequest, shouldItems []Item, indices []string, size, offset int64) ([]models.Event, error) { + query := NewQuery() + if len(shouldItems) != 0 { + query.Query( + Bool( + Should(shouldItems...), + MinimumShouldMatch(1), + ), + ) + } + query.Sort("timestamp", "desc").Size(size).From(offset) + + var response SearchResponse + if err := e.Query(indices, query, &response); err != nil { + return nil, err + } + + hits := response.Hits.Hits + events := make([]models.Event, len(hits)) + for i := range hits { + event, err := parseEvent(subscriptions, hits[i]) + if err != nil { + return nil, err + } + events[i] = event + } + + return events, nil +} + +func (m *EventMigration) makeEvent(subscriptions []models.SubscriptionRequest) (models.Event, error) { + res := models.Event{ + Type: models.EventTypeMigration, + Address: m.Address, + Network: m.Network, + Body: m, + } + for i := range subscriptions { + if m.Network == subscriptions[i].Network && m.Address == subscriptions[i].Address { + res.Alias = subscriptions[i].Alias + return res, nil + } + } + return models.Event{}, errors.Errorf("Couldn't find a matching subscription for %v", m) +} + +func (o *EventOperation) makeEvent(subscriptions []models.SubscriptionRequest) (models.Event, error) { + res := models.Event{ + Network: o.Network, + Body: o, + } + for i := range subscriptions { + if o.Network != subscriptions[i].Network { + continue + } + if o.Source != subscriptions[i].Address && o.Destination != subscriptions[i].Address { + continue + } + + res.Address = subscriptions[i].Address + res.Alias = subscriptions[i].Alias + + switch { + case o.Status != "applied": + res.Type = models.EventTypeError + case o.Source == subscriptions[i].Address && o.Kind == "origination": + res.Type = models.EventTypeDeploy + case o.Source == subscriptions[i].Address && o.Kind == "transaction": + res.Type = models.EventTypeCall + case o.Destination == subscriptions[i].Address && o.Kind == "transaction": + res.Type = models.EventTypeInvoke + } + + return res, nil + } + return models.Event{}, errors.Errorf("Couldn't find a matching subscription for %v", o) +} + +func (c *EventContract) makeEvent(subscriptions []models.SubscriptionRequest) (models.Event, error) { + res := models.Event{ + Body: c, + } + for i := range subscriptions { + if c.Hash == subscriptions[i].Hash || c.ProjectID == subscriptions[i].ProjectID { + res.Network = subscriptions[i].Network + res.Address = subscriptions[i].Address + res.Alias = subscriptions[i].Alias + + if c.Hash == subscriptions[i].Hash { + res.Type = models.EventTypeSame + } else { + res.Type = models.EventTypeSimilar + } + return res, nil + } + } + return models.Event{}, errors.Errorf("Couldn't find a matching subscription for %v", c) +} + +func parseEvent(subscriptions []models.SubscriptionRequest, hit Hit) (models.Event, error) { + switch hit.Index { + case models.DocOperations: + var event EventOperation + if err := json.Unmarshal(hit.Source, &event); err != nil { + return models.Event{}, err + } + return event.makeEvent(subscriptions) + case models.DocMigrations: + var event EventMigration + if err := json.Unmarshal(hit.Source, &event); err != nil { + return models.Event{}, err + } + return event.makeEvent(subscriptions) + case models.DocContracts: + var event EventContract + if err := json.Unmarshal(hit.Source, &event); err != nil { + return models.Event{}, err + } + return event.makeEvent(subscriptions) + default: + return models.Event{}, errors.Errorf("[parseEvent] Invalid reponse type: %s", hit.Index) + } +} + +func getEventsQuery(subscription models.SubscriptionRequest, indices map[string]struct{}) []Item { + shouldItems := make([]Item, 0) + + if item := getEventsWatchCalls(subscription); item != nil { + shouldItems = append(shouldItems, item) + indices[models.DocOperations] = struct{}{} + } + if item := getEventsWatchErrors(subscription); item != nil { + shouldItems = append(shouldItems, item) + indices[models.DocOperations] = struct{}{} + } + if item := getEventsWatchDeployments(subscription); item != nil { + shouldItems = append(shouldItems, item) + indices[models.DocOperations] = struct{}{} + } + + if helpers.IsContract(subscription.Address) { + if item := getEventsWatchMigrations(subscription); item != nil { + shouldItems = append(shouldItems, item) + indices[models.DocMigrations] = struct{}{} + } + if item := getSubscriptionWithSame(subscription); item != nil { + shouldItems = append(shouldItems, item) + indices[models.DocContracts] = struct{}{} + } + if item := getSubscriptionWithSimilar(subscription); item != nil { + shouldItems = append(shouldItems, item) + indices[models.DocContracts] = struct{}{} + } + } + + return shouldItems +} + +func getEventsWatchMigrations(subscription models.SubscriptionRequest) Item { + if !subscription.WithMigrations { + return nil + } + + return Bool( + Filter( + In("kind.keyword", []string{constants.MigrationBootstrap, constants.MigrationLambda, constants.MigrationUpdate}), + Term("network.keyword", subscription.Network), + Term("address.keyword", subscription.Address), + ), + ) +} + +func getEventsWatchDeployments(subscription models.SubscriptionRequest) Item { + if !subscription.WithDeployments { + return nil + } + + return Bool( + Filter( + Term("kind.keyword", "origination"), + Term("network.keyword", subscription.Network), + Term("source.keyword", subscription.Address), + ), + ) +} + +func getEventsWatchCalls(subscription models.SubscriptionRequest) Item { + if !subscription.WithCalls { + return nil + } + + addressKeyword := "destination.keyword" + if strings.HasPrefix(subscription.Address, "tz") { + addressKeyword = "source.keyword" + } + + return Bool( + Filter( + Term("kind.keyword", "transaction"), + Term("status.keyword", "applied"), + Term("network.keyword", subscription.Network), + Term(addressKeyword, subscription.Address), + ), + ) +} + +func getEventsWatchErrors(subscription models.SubscriptionRequest) Item { + if !subscription.WithErrors { + return nil + } + + addressKeyword := "destination.keyword" + if strings.HasPrefix(subscription.Address, "tz") { + addressKeyword = "source.keyword" + } + + return Bool( + Filter( + Term("network.keyword", subscription.Network), + Term(addressKeyword, subscription.Address), + ), + MustNot( + Term("status.keyword", "applied"), + ), + ) +} + +func getSubscriptionWithSame(subscription models.SubscriptionRequest) Item { + if !subscription.WithSame { + return nil + } + + return Bool( + Filter(Term("hash.keyword", subscription.Hash)), + MustNot(Term("address.keyword", subscription.Address)), + ) +} + +func getSubscriptionWithSimilar(subscription models.SubscriptionRequest) Item { + if !subscription.WithSimilar { + return nil + } + return Bool( + Filter(Term("project_id.keyword", subscription.ProjectID)), + MustNot(Term("hash.keyword", subscription.Hash)), + MustNot(Term("address.keyword", subscription.Address)), + ) +} diff --git a/internal/elastic/get.go b/internal/elastic/core/get.go similarity index 52% rename from internal/elastic/get.go rename to internal/elastic/core/get.go index e45dd5492..1bc29c768 100644 --- a/internal/elastic/get.go +++ b/internal/elastic/core/get.go @@ -1,13 +1,14 @@ -package elastic +package core import ( "context" + "github.com/baking-bad/bcdhub/internal/models" "github.com/elastic/go-elasticsearch/v8/esapi" ) // GetByID - -func (e *Elastic) GetByID(ret Model) error { +func (e *Elastic) GetByID(ret models.Model) error { req := esapi.GetRequest{ Index: ret.GetIndex(), DocumentID: ret.GetID(), @@ -19,60 +20,61 @@ func (e *Elastic) GetByID(ret Model) error { defer resp.Body.Close() var response GetResponse - if err := e.getResponse(resp, &response); err != nil { + if err := e.GetResponse(resp, &response); err != nil { return err } if !response.Found { - return NewRecordNotFoundError(ret.GetIndex(), ret.GetID(), nil) + return NewRecordNotFoundError(ret.GetIndex(), ret.GetID()) } return json.Unmarshal(response.Source, ret) } // GetByIDs - func (e *Elastic) GetByIDs(output interface{}, ids ...string) error { - query := newQuery().Query( - qItem{ - "ids": qItem{ + query := NewQuery().Query( + Item{ + "ids": Item{ "values": ids, }, }, ) - return e.getAllByQuery(query, output) + return e.GetAllByQuery(query, output) } // GetAll - func (e *Elastic) GetAll(output interface{}) error { - return e.getAllByQuery(newQuery(), output) + return e.GetAllByQuery(NewQuery(), output) } // GetByNetwork - func (e *Elastic) GetByNetwork(network string, output interface{}) error { - query := newQuery().Query( - boolQ( - must( - matchPhrase("network", network), + query := NewQuery().Query( + Bool( + Must( + MatchPhrase("network", network), ), ), ).Sort("level", "asc") - return e.getAllByQuery(query, output) + return e.GetAllByQuery(query, output) } // GetByNetworkWithSort - func (e *Elastic) GetByNetworkWithSort(network, sortField, sortOrder string, output interface{}) error { - query := newQuery().Query( - boolQ( - must( - matchPhrase("network", network), + query := NewQuery().Query( + Bool( + Must( + MatchPhrase("network", network), ), ), ).Sort(sortField, sortOrder) - return e.getAllByQuery(query, output) + return e.GetAllByQuery(query, output) } -func (e *Elastic) getAllByQuery(query base, output interface{}) error { - ctx := newScrollContext(e, query, 0, defaultScrollSize) - return ctx.get(output) +// GetAllByQuery - +func (e *Elastic) GetAllByQuery(query Base, output interface{}) error { + ctx := NewScrollContext(e, query, 0, defaultScrollSize) + return ctx.Get(output) } type getCountAggResponse struct { @@ -83,9 +85,10 @@ type getCountAggResponse struct { } `json:"aggregations"` } -func (e *Elastic) getCountAgg(index []string, query base) (map[string]int64, error) { +// GetCountAgg - +func (e *Elastic) GetCountAgg(index []string, query Base) (map[string]int64, error) { var response getCountAggResponse - if err := e.query(index, query, &response); err != nil { + if err := e.Query(index, query, &response); err != nil { return nil, err } @@ -95,3 +98,16 @@ func (e *Elastic) getCountAgg(index []string, query base) (map[string]int64, err } return counts, nil } + +// FiltersToQuery - +func FiltersToQuery(by map[string]interface{}) Base { + matches := make([]Item, 0) + for k, v := range by { + matches = append(matches, MatchPhrase(k, v)) + } + return NewQuery().Query( + Bool( + Must(matches...), + ), + ) +} diff --git a/internal/elastic/core/histogram.go b/internal/elastic/core/histogram.go new file mode 100644 index 000000000..145e62a8f --- /dev/null +++ b/internal/elastic/core/histogram.go @@ -0,0 +1,110 @@ +package core + +import ( + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/tzip" +) + +func buildHistogramContext(ctx models.HistogramContext) Base { + hist := Item{ + "date_histogram": Item{ + "field": "timestamp", + "calendar_interval": ctx.Period, + }, + } + + if ctx.HasFunction() { + hist.Extend(Aggs( + AggItem{ + "result", Item{ + ctx.Function.Name: Item{ + "field": ctx.Function.Field, + }, + }, + }, + )) + } + + matches := make([]Item, 0) + for _, fltr := range ctx.Filters { + switch fltr.Kind { + case models.HistogramFilterKindExists: + matches = append(matches, Exists(fltr.Field)) + case models.HistogramFilterKindMatch: + matches = append(matches, Match(fltr.Field, fltr.Value)) + case models.HistogramFilterKindIn: + if arr, ok := fltr.Value.([]string); ok { + matches = append(matches, In(fltr.Field, arr)) + } + case models.HistogramFilterKindAddresses: + if value, ok := fltr.Value.([]string); ok { + addresses := make([]Item, len(value)) + for i := range value { + addresses[i] = MatchPhrase(fltr.Field, value[i]) + } + matches = append(matches, Bool( + Should(addresses...), + MinimumShouldMatch(1), + )) + } + case models.HistogramFilterDexEnrtypoints: + if value, ok := fltr.Value.([]tzip.DAppContract); ok { + entrypoints := make([]Item, 0) + for i := range value { + for j := range value[i].DexVolumeEntrypoints { + entrypoints = append(entrypoints, Bool( + Filter( + MatchPhrase("initiator", value[i].Address), + Match("parent", value[i].DexVolumeEntrypoints[j]), + ), + )) + } + } + matches = append(matches, Bool( + Should(entrypoints...), + MinimumShouldMatch(1), + )) + } + } + } + + return NewQuery().Query( + Bool( + Filter( + matches..., + ), + ), + ).Add( + Aggs(AggItem{Name: "hist", Body: hist}), + ).Zero() +} + +// GetDateHistogram - +func (e *Elastic) GetDateHistogram(period string, opts ...models.HistogramOption) ([][]int64, error) { + ctx := models.HistogramContext{ + Period: period, + } + for _, opt := range opts { + opt(&ctx) + } + + var response getDateHistogramResponse + if err := e.Query(ctx.Indices, buildHistogramContext(ctx), &response); err != nil { + return nil, err + } + + histogram := make([][]int64, 0) + for _, bucket := range response.Agg.Hist.Buckets { + val := bucket.DocCount + if ctx.HasFunction() { + val = int64(bucket.Result.Value) + } + + item := []int64{ + bucket.Key, + val, + } + histogram = append(histogram, item) + } + return histogram, nil +} diff --git a/internal/elastic/core/query_builder.go b/internal/elastic/core/query_builder.go new file mode 100644 index 000000000..a3130aad0 --- /dev/null +++ b/internal/elastic/core/query_builder.go @@ -0,0 +1,411 @@ +package core + +import ( + "github.com/baking-bad/bcdhub/internal/helpers" +) + +// sizes +const ( + MaxQuerySize = 10000 + MinQuerySize = 0 +) + +// Item - +type Item map[string]interface{} + +// List - +type List []interface{} + +// Bool - +func Bool(items ...Item) Item { + bq := Item{} + q := Item{} + for i := range items { + for k, v := range items[i] { + if helpers.StringInArray(k, []string{"must", "should", "filter", "must_not", "minimum_should_match"}) { + q[k] = v + } + } + } + bq["bool"] = q + return bq +} + +// MinimumShouldMatch - +func MinimumShouldMatch(value int) Item { + return Item{ + "minimum_should_match": value, + } +} + +// Exists - +func Exists(field string) Item { + return Item{ + "exists": Item{ + "field": field, + }, + } +} + +// Must - +func Must(items ...Item) Item { + return Item{ + "must": items, + } +} + +// MustNot - +func MustNot(items ...Item) Item { + return Item{ + "must_not": items, + } +} + +// Should - +func Should(items ...Item) Item { + return Item{ + "should": items, + } +} + +// Filter - +func Filter(items ...Item) Item { + return Item{ + "filter": items, + } +} + +// Range - +func Range(field string, orders ...Item) Item { + q := Item{} + for i := range orders { + for k, v := range orders[i] { + if helpers.StringInArray(k, []string{"lt", "gt", "lte", "gte"}) { + q[k] = v + } + } + } + return Item{ + "range": Item{ + field: q, + }, + } +} + +// MatchPhrase - +func MatchPhrase(key string, value interface{}) Item { + return Item{ + "match_phrase": Item{ + key: value, + }, + } +} + +// Match - +func Match(key string, value interface{}) Item { + return Item{ + "match": Item{ + key: value, + }, + } +} + +// Term - +func Term(key string, value interface{}) Item { + return Item{ + "term": Item{ + key: value, + }, + } +} + +// In - +func In(key string, value []string) Item { + return Item{ + "terms": Item{ + key: value, + }, + } +} + +// AggItem - +type AggItem struct { + Name string + Body Item +} + +// Aggs - +func Aggs(items ...AggItem) Item { + body := Item{} + for i := range items { + body[items[i].Name] = items[i].Body + } + return Item{ + "aggs": body, + } +} + +// Cardinality - +func Cardinality(field string) Item { + return Item{ + "cardinality": Item{ + "field": field, + }, + } +} + +// Avg - +func Avg(field string) Item { + return Item{ + "avg": Item{ + "field": field, + }, + } +} + +// TermsAgg - +func TermsAgg(field string, size int64) Item { + t := Item{ + "field": field, + } + if size > 0 { + t["size"] = size + } + return Item{ + "terms": t, + } +} + +// Composite - +func Composite(size int64, items ...AggItem) Item { + body := make([]Item, 0) + for i := range items { + body = append(body, Item{ + items[i].Name: items[i].Body, + }) + } + return Item{ + "composite": Item{ + "sources": body, + "size": size, + }, + } +} + +// TopHits - +func TopHits(size int, sortField, order string) Item { + return Item{ + "top_hits": Item{ + "size": size, + "sort": Sort(sortField, order), + }, + } +} + +// Sort - +func Sort(field, order string) Item { + return Item{ + field: Item{ + "order": order, + }, + } +} + +// Max - +func Max(field string) Item { + return Item{ + "max": Item{ + "field": field, + }, + } +} + +// Min - +func Min(field string) Item { + return Item{ + "min": Item{ + "field": field, + }, + } +} + +// Sum - +func Sum(field string) Item { + return Item{ + "sum": Item{ + "field": field, + }, + } +} + +// Count - +func Count(field string) Item { + return Item{ + "value_count": Item{ + "field": field, + }, + } +} + +// MaxBucket - +func MaxBucket(bucketsPath string) Item { + return Item{ + "max_bucket": Item{ + "buckets_path": bucketsPath, + }, + } +} + +// MinBucket - +func MinBucket(bucketsPath string) Item { + return Item{ + "min_bucket": Item{ + "buckets_path": bucketsPath, + }, + } +} + +// QueryString - +func QueryString(text string, fields []string) Item { + queryS := Item{ + "query": text, + } + if len(fields) > 0 { + queryS["fields"] = fields + } + return Item{ + "query_string": queryS, + } +} + +// Append - +func (q Item) Append(key string, value interface{}) Item { + q[key] = value + return q +} + +// Extend - +func (q Item) Extend(item Item) Item { + for k, v := range item { + q[k] = v + } + return q +} + +// Get - +func (q Item) Get(name string) Item { + if val, ok := q[name]; ok { + if typ, ok := val.(Item); ok { + return typ + } + return nil + } + return nil +} + +// Base - +type Base Item + +// NewQuery - +func NewQuery() Base { + return Base{} +} + +// Size - +func (q Base) Size(size int64) Base { + if size != 0 { + q["size"] = size + } + return q +} + +// All - +func (q Base) All() Base { + q["size"] = MaxQuerySize + return q +} + +// One - +func (q Base) One() Base { + q["size"] = 1 + return q +} + +// Zero - +func (q Base) Zero() Base { + q["size"] = MinQuerySize + return q +} + +// From - +func (q Base) From(from int64) Base { + if from != 0 { + q["from"] = from + } + return q +} + +// Query - +func (q Base) Query(item Item) Base { + q["query"] = item + return q +} + +// Sort - +func (q Base) Sort(key, order string) Base { + q["sort"] = Item{ + key: Item{ + "order": order, + }, + } + return q +} + +// SearchAfter - +func (q Base) SearchAfter(value []interface{}) Base { + q["search_after"] = value + return q +} + +// Add - +func (q Base) Add(items ...Item) Base { + for _, item := range items { + for k, v := range item { + q[k] = v + } + } + return q +} + +// Source - +func (q Base) Source(items ...Item) Base { + qi := Item{} + for i := range items { + for k, v := range items[i] { + if helpers.StringInArray(k, []string{"excludes", "includes"}) { + qi[k] = v + } + } + } + q["_source"] = qi + return q +} + +// Highlights - +func (q Base) Highlights(highlights Item) Base { + q["highlight"] = Item{ + "fields": highlights, + } + return q +} + +// Get - +func (q Base) Get(name string) Item { + if val, ok := q[name]; ok { + if typ, ok := val.(Item); ok { + return typ + } + return nil + } + return nil +} diff --git a/internal/elastic/scroll.go b/internal/elastic/core/scroll.go similarity index 83% rename from internal/elastic/scroll.go rename to internal/elastic/core/scroll.go index 9ff873b25..2ad19a4f2 100644 --- a/internal/elastic/scroll.go +++ b/internal/elastic/core/scroll.go @@ -1,4 +1,4 @@ -package elastic +package core import ( "bytes" @@ -6,14 +6,16 @@ import ( "reflect" "time" + "github.com/baking-bad/bcdhub/internal/models" "github.com/elastic/go-elasticsearch/v8/esapi" "github.com/pkg/errors" ) const defaultScrollSize = 1000 -type scrollContext struct { - Query base +// ScrollContext - +type ScrollContext struct { + Query Base Size int64 ChunkSize int64 @@ -21,11 +23,12 @@ type scrollContext struct { scrollIds map[string]struct{} } -func newScrollContext(e *Elastic, query base, size, chunkSize int64) *scrollContext { +// NewScrollContext - +func NewScrollContext(e *Elastic, query Base, size, chunkSize int64) *ScrollContext { if chunkSize == 0 { chunkSize = defaultScrollSize } - return &scrollContext{ + return &ScrollContext{ e: e, scrollIds: make(map[string]struct{}), @@ -35,7 +38,7 @@ func newScrollContext(e *Elastic, query base, size, chunkSize int64) *scrollCont } } -func (ctx *scrollContext) createScroll(index string, query map[string]interface{}) (response SearchResponse, err error) { +func (ctx *ScrollContext) createScroll(index string, query map[string]interface{}) (response SearchResponse, err error) { var buf bytes.Buffer if err = json.NewEncoder(&buf).Encode(query); err != nil { return @@ -57,22 +60,22 @@ func (ctx *scrollContext) createScroll(index string, query map[string]interface{ } defer resp.Body.Close() - err = ctx.e.getResponse(resp, &response) + err = ctx.e.GetResponse(resp, &response) return } -func (ctx *scrollContext) queryScroll(scrollID string) (response SearchResponse, err error) { +func (ctx *ScrollContext) queryScroll(scrollID string) (response SearchResponse, err error) { resp, err := ctx.e.Scroll(ctx.e.Scroll.WithScrollID(scrollID), ctx.e.Scroll.WithScroll(time.Minute)) if err != nil { return } defer resp.Body.Close() - err = ctx.e.getResponse(resp, &response) + err = ctx.e.GetResponse(resp, &response) return } -func (ctx *scrollContext) removeScroll(scrollIDs []string) error { +func (ctx *ScrollContext) removeScroll(scrollIDs []string) error { if len(scrollIDs) == 0 { return nil } @@ -86,7 +89,8 @@ func (ctx *scrollContext) removeScroll(scrollIDs []string) error { return nil } -func (ctx *scrollContext) get(output interface{}) error { +// Get - +func (ctx *ScrollContext) Get(output interface{}) error { typ, err := getElementType(output) if err != nil { return err @@ -134,7 +138,7 @@ func (ctx *scrollContext) get(output interface{}) error { return ctx.clear() } -func (ctx *scrollContext) clear() error { +func (ctx *ScrollContext) clear() error { ctx.Query = nil ctx.Size = 0 ctx.ChunkSize = 0 @@ -160,7 +164,7 @@ func getElementType(output interface{}) (reflect.Type, error) { func getIndex(typ reflect.Type) (string, error) { newItem := reflect.New(typ) - interfaceType := reflect.TypeOf((*Model)(nil)).Elem() + interfaceType := reflect.TypeOf((*models.Model)(nil)).Elem() if !newItem.Type().Implements(interfaceType) { return "", errors.Errorf("Implements: 'output' is not implemented `Model` interface") diff --git a/internal/elastic/search.go b/internal/elastic/core/search.go similarity index 68% rename from internal/elastic/search.go rename to internal/elastic/core/search.go index 78475f1cf..f9b909639 100644 --- a/internal/elastic/search.go +++ b/internal/elastic/core/search.go @@ -1,11 +1,12 @@ -package elastic +package core import ( "fmt" "regexp" "strings" - "github.com/baking-bad/bcdhub/internal/elastic/search" + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/search" "github.com/pkg/errors" ) @@ -13,26 +14,9 @@ const ( defaultSize = 10 ) -var ptrRegEx = regexp.MustCompile(`^ptr:\d+$`) var sanitizeRegEx = regexp.MustCompile(`[\:]`) -type searchContext struct { - Text string - Indices []string - Fields []string - Highlights qItem - Offset int64 -} - -func newSearchContext() searchContext { - return searchContext{ - Fields: make([]string, 0), - Indices: make([]string, 0), - Highlights: make(qItem), - } -} - -func getFields(searchString string, filters map[string]interface{}, fields []string) ([]string, []string, qItem, error) { +func getFields(searchString string, filters map[string]interface{}, fields []string) ([]string, []string, Item, error) { var indices []string if val, ok := filters["indices"]; ok { indices = val.([]string) @@ -45,10 +29,10 @@ func getFields(searchString string, filters map[string]interface{}, fields []str } f := make([]string, 0) - h := make(qItem) + h := make(Item) for _, score := range scores.Scores { s := strings.Split(score, "^") - h[s[0]] = qItem{} + h[s[0]] = Item{} f = append(f, s[0]) } return f, scores.Indices, h, nil @@ -112,36 +96,20 @@ func prepareSearchFilters(filters map[string]interface{}) (string, error) { return builder.String(), nil } -// searchByTextResponse - -type searchByTextResponse struct { - Took int64 `json:"took"` - Hits HitsArray `json:"hits"` - Agg struct { - Projects struct { - Buckets []struct { - Bucket - Last struct { - Hits HitsArray `json:"hits"` - } `json:"last"` - } `json:"buckets"` - } `json:"projects"` - } `json:"aggregations"` -} - // SearchByText - -func (e *Elastic) SearchByText(text string, offset int64, fields []string, filters map[string]interface{}, group bool) (search.Result, error) { +func (e *Elastic) SearchByText(text string, offset int64, fields []string, filters map[string]interface{}, group bool) (models.Result, error) { if text == "" { - return search.Result{}, errors.Errorf("Empty search string. Please query something") + return models.Result{}, errors.Errorf("Empty search string. Please query something") } ctx, err := prepare(text, filters, fields) if err != nil { - return search.Result{}, err + return models.Result{}, err } ctx.Offset = offset - query := newQuery().Query( - queryString(ctx.Text, ctx.Fields), + query := NewQuery().Query( + QueryString(ctx.Text, ctx.Fields), ) if group { @@ -149,29 +117,29 @@ func (e *Elastic) SearchByText(text string, offset int64, fields []string, filte } var response searchByTextResponse - if err := e.query(ctx.Indices, query, &response); err != nil { - return search.Result{}, err + if err := e.Query(ctx.Indices, query, &response); err != nil { + return models.Result{}, err } - var items []search.Item + var items []models.Item if group { items, err = parseSearchGroupingResponse(response, offset) } else { items, err = parseSearchResponse(response) } if err != nil { - return search.Result{}, nil + return models.Result{}, nil } - return search.Result{ + return models.Result{ Items: items, Time: response.Took, Count: response.Hits.Total.Value, }, nil } -func parseSearchResponse(response searchByTextResponse) ([]search.Item, error) { - items := make([]search.Item, 0) +func parseSearchResponse(response searchByTextResponse) ([]models.Item, error) { + items := make([]models.Item, 0) arr := response.Hits.Hits for i := range arr { val, err := search.Parse(arr[i].Index, arr[i].Highlight, arr[i].Source) @@ -183,31 +151,31 @@ func parseSearchResponse(response searchByTextResponse) ([]search.Item, error) { } switch t := val.(type) { - case search.Item: + case models.Item: items = append(items, t) - case []search.Item: + case []models.Item: items = append(items, t...) } } return items, nil } -func parseSearchGroupingResponse(response searchByTextResponse, offset int64) ([]search.Item, error) { +func parseSearchGroupingResponse(response searchByTextResponse, offset int64) ([]models.Item, error) { if len(response.Agg.Projects.Buckets) == 0 { - return make([]search.Item, 0), nil + return make([]models.Item, 0), nil } arr := response.Agg.Projects.Buckets lArr := int64(len(arr)) - items := make([]search.Item, 0) + items := make([]models.Item, 0) if offset > lArr { return items, nil } arr = arr[offset:] for i := range arr { - searchItem := search.Item{} + searchItem := models.Item{} if arr[i].DocCount > 1 { - searchItem.Group = search.NewGroup(arr[i].DocCount) + searchItem.Group = models.NewGroup(arr[i].DocCount) } for j, item := range arr[i].Last.Hits.Hits { @@ -219,19 +187,19 @@ func parseSearchGroupingResponse(response searchByTextResponse, offset int64) ([ continue } switch t := val.(type) { - case search.Item: + case models.Item: if j == 0 { searchItem.Type = t.Type searchItem.Body = t.Body searchItem.Value = t.Value searchItem.Highlights = item.Highlight } else { - searchItem.Group.Top = append(searchItem.Group.Top, search.Top{ + searchItem.Group.Top = append(searchItem.Group.Top, models.Top{ Key: t.Value, Network: t.Network, }) } - case []search.Item: + case []models.Item: if j == 0 { if len(t) > 0 { searchItem.Type = t[0].Type @@ -241,7 +209,7 @@ func parseSearchGroupingResponse(response searchByTextResponse, offset int64) ([ } if len(t) > 1 { for k := range t[1:] { - searchItem.Group.Top = append(searchItem.Group.Top, search.Top{ + searchItem.Group.Top = append(searchItem.Group.Top, models.Top{ Key: t[k].Value, Network: t[k].Network, }) @@ -249,7 +217,7 @@ func parseSearchGroupingResponse(response searchByTextResponse, offset int64) ([ } } else { for k := range t { - searchItem.Group.Top = append(searchItem.Group.Top, search.Top{ + searchItem.Group.Top = append(searchItem.Group.Top, models.Top{ Key: t[k].Value, Network: t[k].Network, }) @@ -262,12 +230,12 @@ func parseSearchGroupingResponse(response searchByTextResponse, offset int64) ([ return items, nil } -func prepare(search string, filters map[string]interface{}, fields []string) (searchContext, error) { - ctx := newSearchContext() +func prepare(searchString string, filters map[string]interface{}, fields []string) (search.Context, error) { + ctx := search.NewContext() - if ptrRegEx.MatchString(search) { - ctx.Text = strings.TrimPrefix(search, "ptr:") - ctx.Indices = []string{DocBigMapDiff} + if search.IsPtrSearch(searchString) { + ctx.Text = strings.TrimPrefix(searchString, "ptr:") + ctx.Indices = []string{models.DocBigMapDiff} ctx.Fields = []string{"ptr"} } else { internalFields, usingIndices, highlights, err := getFields(ctx.Text, filters, fields) @@ -277,7 +245,7 @@ func prepare(search string, filters map[string]interface{}, fields []string) (se ctx.Indices = usingIndices ctx.Highlights = highlights ctx.Fields = internalFields - ctx.Text = fmt.Sprintf("%s*", search) + ctx.Text = fmt.Sprintf("%s*", searchString) } filterString, err := prepareSearchFilters(filters) @@ -291,27 +259,27 @@ func prepare(search string, filters map[string]interface{}, fields []string) (se return ctx, nil } -func grouping(ctx searchContext, query base) base { - topHits := qItem{ - "top_hits": qItem{ +func grouping(ctx search.Context, query Base) Base { + topHits := Item{ + "top_hits": Item{ "size": 1, - "sort": qList{ - sort("_score", "desc"), - qItem{"last_action": qItem{"order": "desc", "unmapped_type": "long"}}, - sort("timestamp", "desc"), + "sort": List{ + Sort("_score", "desc"), + Item{"last_action": Item{"order": "desc", "unmapped_type": "long"}}, + Sort("timestamp", "desc"), }, - "highlight": qItem{ + "highlight": Item{ "fields": ctx.Highlights, }, }, } query.Add( - aggs( - aggItem{ - "projects", - qItem{ - "terms": qItem{ + Aggs( + AggItem{ + Name: "projects", + Body: Item{ + "terms": Item{ "script": ` if (doc['_index'].value == "contract") { return doc['fingerprint.parameter'].value + '|' + doc['fingerprint.storage'].value + '|' + doc['fingerprint.code'].value @@ -325,20 +293,20 @@ func grouping(ctx searchContext, query base) base { return doc['name.keyword'].value + '|' + doc['network.keyword'].value }`, "size": defaultSize + ctx.Offset, - "order": qList{ - qItem{"bucket_score": "desc"}, - qItem{"bucket_time": "desc"}, + "order": List{ + Item{"bucket_score": "desc"}, + Item{"bucket_time": "desc"}, }, }, - "aggs": qItem{ + "aggs": Item{ "last": topHits, - "bucket_score": qItem{ - "max": qItem{ + "bucket_score": Item{ + "max": Item{ "script": "_score", }, }, - "bucket_time": qItem{ - "max": qItem{ + "bucket_time": Item{ + "max": Item{ "script": "if (doc.containsKey('last_action')) {return doc['last_action'].value} else {return doc['timestamp']}", }, }, diff --git a/internal/elastic/snapshots.go b/internal/elastic/core/snapshots.go similarity index 94% rename from internal/elastic/snapshots.go rename to internal/elastic/core/snapshots.go index fd569738a..9a3b2c6fb 100644 --- a/internal/elastic/snapshots.go +++ b/internal/elastic/core/snapshots.go @@ -1,4 +1,4 @@ -package elastic +package core import ( "bytes" @@ -9,6 +9,7 @@ import ( stdJSON "encoding/json" + "github.com/baking-bad/bcdhub/internal/models" "github.com/elastic/go-elasticsearch/v8/esapi" "github.com/pkg/errors" ) @@ -50,7 +51,7 @@ func (e *Elastic) CreateAWSRepository(name, awsBucketName, awsRegion string) err } // ListRepositories - -func (e *Elastic) ListRepositories() ([]Repository, error) { +func (e *Elastic) ListRepositories() ([]models.Repository, error) { options := []func(*esapi.CatRepositoriesRequest){ e.Cat.Repositories.WithContext(context.Background()), e.Cat.Repositories.WithFormat("JSON"), @@ -64,8 +65,8 @@ func (e *Elastic) ListRepositories() ([]Repository, error) { defer resp.Body.Close() - var response []Repository - if err := e.getResponse(resp, &response); err != nil { + var response []models.Repository + if err := e.GetResponse(resp, &response); err != nil { return nil, err } return response, nil @@ -215,7 +216,7 @@ func (e *Elastic) GetAllPolicies() ([]string, error) { defer resp.Body.Close() response := make(map[string]interface{}) - if err := e.getResponse(resp, &response); err != nil { + if err := e.GetResponse(resp, &response); err != nil { return nil, err } policyIDs := make([]string, 0) @@ -241,7 +242,7 @@ func (e *Elastic) GetMappings(indices []string) (map[string]string, error) { defer resp.Body.Close() response := make(map[string]stdJSON.RawMessage) - if err = e.getResponse(resp, &response); err != nil { + if err = e.GetResponse(resp, &response); err != nil { return nil, err } diff --git a/internal/elastic/core/stats.go b/internal/elastic/core/stats.go new file mode 100644 index 000000000..0095c510e --- /dev/null +++ b/internal/elastic/core/stats.go @@ -0,0 +1,142 @@ +package core + +import ( + "github.com/baking-bad/bcdhub/internal/models" +) + +// GetNetworkCountStats - +func (e *Elastic) GetNetworkCountStats(network string) (map[string]int64, error) { + query := NewQuery().Query( + Bool( + Filter( + Match("network", network), + ), + Should( + Exists("entrypoint"), + Exists("fingerprint"), + ), + MinimumShouldMatch(1), + ), + ).Add( + Aggs( + AggItem{ + "body", + TermsAgg("_index", MaxQuerySize), + }, + ), + ).Zero() + + return e.GetCountAgg([]string{models.DocContracts, models.DocOperations}, query) +} + +// GetCallsCountByNetwork - +func (e *Elastic) GetCallsCountByNetwork() (map[string]int64, error) { + query := NewQuery().Query(Exists("entrypoint")).Add( + Aggs( + AggItem{ + "body", Item{ + "terms": Item{ + "field": "network.keyword", + }, + }, + }, + ), + ).Zero() + + return e.GetCountAgg([]string{models.DocOperations}, query) +} + +type getContractStatsByNetworkStats struct { + Agg struct { + Network struct { + Buckets []struct { + Bucket + Same IntValue `json:"same"` + Balance FloatValue `json:"balance"` + } `json:"buckets"` + } `json:"network"` + } `json:"aggregations"` +} + +// GetContractStatsByNetwork - +func (e *Elastic) GetContractStatsByNetwork() (map[string]models.ContractCountStats, error) { + query := NewQuery().Add( + Aggs( + AggItem{ + "network", Item{ + "terms": Item{ + "field": "network.keyword", + }, + "aggs": Item{ + "same": Item{ + "cardinality": Item{ + "script": "doc['fingerprint.parameter'].value + '|' + doc['fingerprint.storage'].value + '|' + doc['fingerprint.code'].value", + }, + }, + "balance": Sum("balance"), + }, + }, + }, + ), + ).Zero() + + var response getContractStatsByNetworkStats + if err := e.Query([]string{models.DocContracts}, query, &response); err != nil { + return nil, err + } + + counts := make(map[string]models.ContractCountStats) + for _, item := range response.Agg.Network.Buckets { + counts[item.Key] = models.ContractCountStats{ + Total: item.DocCount, + SameCount: item.Same.Value, + Balance: int64(item.Balance.Value), + } + } + return counts, nil +} + +// GetFACountByNetwork - +func (e *Elastic) GetFACountByNetwork() (map[string]int64, error) { + query := NewQuery().Query( + In("tags", []string{ + "fa1", + "fa12", + }), + ).Add( + Aggs( + AggItem{ + "body", Item{ + "terms": Item{ + "field": "network.keyword", + }, + }, + }, + ), + ).Zero() + + return e.GetCountAgg([]string{models.DocContracts}, query) +} + +// GetLanguagesForNetwork - +func (e *Elastic) GetLanguagesForNetwork(network string) (map[string]int64, error) { + query := NewQuery().Query( + Bool( + Filter( + Match("network", network), + ), + ), + ).Add( + Aggs( + AggItem{ + "body", Item{ + "terms": Item{ + "field": "language.keyword", + }, + }, + }, + ), + ).Zero() + + return e.GetCountAgg([]string{models.DocContracts}, query) +} diff --git a/internal/elastic/update.go b/internal/elastic/core/update.go similarity index 82% rename from internal/elastic/update.go rename to internal/elastic/core/update.go index 1c86810fe..2437a277b 100644 --- a/internal/elastic/update.go +++ b/internal/elastic/core/update.go @@ -1,4 +1,4 @@ -package elastic +package core import ( "bytes" @@ -6,11 +6,12 @@ import ( "reflect" "strings" + "github.com/baking-bad/bcdhub/internal/models" "github.com/elastic/go-elasticsearch/v8/esapi" ) // UpdateDoc - updates document -func (e *Elastic) UpdateDoc(model Model) error { +func (e *Elastic) UpdateDoc(model models.Model) error { b, err := json.Marshal(model) if err != nil { return err @@ -28,10 +29,11 @@ func (e *Elastic) UpdateDoc(model Model) error { } defer res.Body.Close() - return e.getResponse(res, nil) + return e.GetResponse(res, nil) } -func (e *Elastic) buildFieldsForModel(data interface{}, fields ...string) ([]byte, error) { +// BuildFieldsForModel - +func (e *Elastic) BuildFieldsForModel(data interface{}, fields ...string) ([]byte, error) { t := reflect.TypeOf(data) val := reflect.ValueOf(data) if val.Kind() == reflect.Ptr { @@ -63,7 +65,7 @@ func (e *Elastic) buildFieldsForModel(data interface{}, fields ...string) ([]byt // UpdateFields - func (e *Elastic) UpdateFields(index, id string, data interface{}, fields ...string) error { - updated, err := e.buildFieldsForModel(data, fields...) + updated, err := e.BuildFieldsForModel(data, fields...) if err != nil { return err } @@ -81,5 +83,5 @@ func (e *Elastic) UpdateFields(index, id string, data interface{}, fields ...str } defer res.Body.Close() - return e.getResponse(res, nil) + return e.GetResponse(res, nil) } diff --git a/internal/elastic/data.go b/internal/elastic/data.go deleted file mode 100644 index 97c3c01ed..000000000 --- a/internal/elastic/data.go +++ /dev/null @@ -1,204 +0,0 @@ -package elastic - -import ( - "time" - - "github.com/baking-bad/bcdhub/internal/contractparser/cerrors" - "github.com/baking-bad/bcdhub/internal/models" -) - -// LightContract - -type LightContract struct { - Address string `json:"address"` - Network string `json:"network"` - Deployed time.Time `json:"deploy_time"` -} - -// PageableOperations - -type PageableOperations struct { - Operations []models.Operation `json:"operations"` - LastID string `json:"last_id"` -} - -// SameContractsResponse - -type SameContractsResponse struct { - Count int64 `json:"count"` - Contracts []models.Contract `json:"contracts"` -} - -// SimilarContract - -type SimilarContract struct { - *models.Contract - Count int64 `json:"count"` -} - -// BigMapDiff - -type BigMapDiff struct { - Ptr int64 `json:"ptr,omitempty"` - BinPath string `json:"bin_path"` - Key interface{} `json:"key"` - KeyHash string `json:"key_hash"` - Value string `json:"value"` - OperationID string `json:"operation_id"` - Level int64 `json:"level"` - Address string `json:"address"` - Network string `json:"network"` - Timestamp time.Time `json:"timestamp"` - Protocol string `json:"protocol"` - - Count int64 `json:"count"` -} - -// FromModel - -func (b *BigMapDiff) FromModel(bmd *models.BigMapDiff) { - b.Ptr = bmd.Ptr - b.BinPath = bmd.BinPath - b.KeyHash = bmd.KeyHash - b.Value = bmd.Value - b.OperationID = bmd.OperationID - b.Level = bmd.Level - b.Address = bmd.Address - b.Network = bmd.Network - b.Timestamp = bmd.Timestamp - b.Protocol = bmd.Protocol - b.Key = bmd.Key -} - -// ContractStats - -type ContractStats struct { - TxCount int64 `json:"tx_count"` - LastAction time.Time `json:"last_action"` - Balance int64 `json:"balance"` - TotalWithdrawn int64 `json:"total_withdrawn"` -} - -// ContractMigrationsStats - -type ContractMigrationsStats struct { - MigrationsCount int64 `json:"migrations_count"` -} - -// DiffTask - -type DiffTask struct { - Network1 string - Address1 string - Network2 string - Address2 string -} - -// ContractCountStats - -type ContractCountStats struct { - Total int64 - SameCount int64 - TotalWithdrawn int64 - Balance int64 -} - -// SubscriptionRequest - -type SubscriptionRequest struct { - Address string - Network string - Alias string - Hash string - ProjectID string - WithSame bool - WithSimilar bool - WithMempool bool - WithMigrations bool - WithErrors bool - WithCalls bool - WithDeployments bool -} - -// EventContract - -type EventContract struct { - Network string `json:"network"` - Address string `json:"address"` - Hash string `json:"hash"` - ProjectID string `json:"project_id"` - Timestamp time.Time `json:"timestamp"` -} - -// EventType - -const ( - EventTypeError = "error" - EventTypeMigration = "migration" - EventTypeCall = "call" - EventTypeInvoke = "invoke" - EventTypeDeploy = "deploy" - EventTypeSame = "same" - EventTypeSimilar = "similar" - EventTypeMempool = "mempool" -) - -// Event - -type Event struct { - Type string `json:"type"` - Address string `json:"address"` - Network string `json:"network"` - Alias string `json:"alias"` - Body interface{} `json:"body,omitempty"` -} - -// EventOperation - -type EventOperation struct { - Network string `json:"network"` - Hash string `json:"hash"` - Internal bool `json:"internal"` - Status string `json:"status"` - Timestamp time.Time `json:"timestamp"` - Kind string `json:"kind"` - Fee int64 `json:"fee,omitempty"` - Amount int64 `json:"amount,omitempty"` - Entrypoint string `json:"entrypoint,omitempty"` - Source string `json:"source"` - SourceAlias string `json:"source_alias,omitempty"` - Destination string `json:"destination,omitempty"` - DestinationAlias string `json:"destination_alias,omitempty"` - Delegate string `json:"delegate,omitempty"` - DelegateAlias string `json:"delegate_alias,omitempty"` - - Result *models.OperationResult `json:"result,omitempty"` - Errors []*cerrors.Error `json:"errors,omitempty"` - Burned int64 `json:"burned,omitempty"` -} - -// EventMigration - -type EventMigration struct { - Network string `json:"network"` - Protocol string `json:"protocol"` - PrevProtocol string `json:"prev_protocol,omitempty"` - Hash string `json:"hash,omitempty"` - Timestamp time.Time `json:"timestamp"` - Level int64 `json:"level"` - Address string `json:"address"` - Kind string `json:"kind"` -} - -// TokenMethodUsageStats - -type TokenMethodUsageStats struct { - Count int64 - ConsumedGas int64 -} - -// TokenUsageStats - -type TokenUsageStats map[string]TokenMethodUsageStats - -// DAppStats - -type DAppStats struct { - Users int64 `json:"users"` - Calls int64 `json:"txs"` - Volume int64 `json:"volume"` -} - -// TransfersResponse - -type TransfersResponse struct { - Transfers []models.Transfer `json:"transfers"` - Total int64 `json:"total"` - LastID string `json:"last_id"` -} - -// Address - -type Address struct { - Address string - Network string -} diff --git a/internal/elastic/domains.go b/internal/elastic/domains.go deleted file mode 100644 index 6b9575de7..000000000 --- a/internal/elastic/domains.go +++ /dev/null @@ -1,69 +0,0 @@ -package elastic - -import ( - "github.com/baking-bad/bcdhub/internal/helpers" - "github.com/baking-bad/bcdhub/internal/models" - "github.com/pkg/errors" -) - -// ListDomains - -func (e *Elastic) ListDomains(network string, size, offset int64) (DomainsResponse, error) { - if size > defaultScrollSize { - size = defaultScrollSize - } - - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - ), - ), - ).Size(size).From(offset).Sort("timestamp", "desc") - - var response SearchResponse - if err := e.query([]string{DocTezosDomains}, query, &response); err != nil { - return DomainsResponse{}, err - } - if response.Hits.Total.Value == 0 { - return DomainsResponse{}, nil - } - - domains := make([]models.TezosDomain, len(response.Hits.Hits)) - for i := range response.Hits.Hits { - if err := json.Unmarshal(response.Hits.Hits[i].Source, &domains[i]); err != nil { - return DomainsResponse{}, err - } - } - result := DomainsResponse{ - Domains: domains, - Total: response.Hits.Total.Value, - } - return result, nil -} - -// ResolveDomainByAddress - -func (e *Elastic) ResolveDomainByAddress(network string, address string) (*models.TezosDomain, error) { - if !helpers.IsAddress(address) { - return nil, errors.Errorf("Invalid address: %s", address) - } - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - matchPhrase("address", address), - ), - ), - ).One() - - var response SearchResponse - if err := e.query([]string{DocTezosDomains}, query, &response); err != nil { - return nil, err - } - if response.Hits.Total.Value == 0 { - return nil, NewRecordNotFoundError(DocTezosDomains, "", query) - } - - var td models.TezosDomain - err := json.Unmarshal(response.Hits.Hits[0].Source, &td) - return &td, err -} diff --git a/internal/elastic/events.go b/internal/elastic/events.go deleted file mode 100644 index 17a3c4e3f..000000000 --- a/internal/elastic/events.go +++ /dev/null @@ -1,286 +0,0 @@ -package elastic - -import ( - "strings" - - "github.com/baking-bad/bcdhub/internal/contractparser/consts" - "github.com/pkg/errors" -) - -// GetEvents - -func (e *Elastic) GetEvents(subscriptions []SubscriptionRequest, size, offset int64) ([]Event, error) { - if len(subscriptions) == 0 { - return []Event{}, nil - } - - if size == 0 || size > 50 { // TODO: ??? - size = defaultSize - } - - shouldItems := make([]qItem, 0) - indicesMap := make(map[string]struct{}) - - for i := range subscriptions { - items := getEventsQuery(subscriptions[i], indicesMap) - shouldItems = append(shouldItems, items...) - } - - indices := make([]string, 0) - for ind := range indicesMap { - indices = append(indices, ind) - } - if len(indices) == 0 { - return []Event{}, nil - } - - return e.getEvents(subscriptions, shouldItems, indices, size, offset) -} - -func (e *Elastic) getEvents(subscriptions []SubscriptionRequest, shouldItems []qItem, indices []string, size, offset int64) ([]Event, error) { - query := newQuery() - if len(shouldItems) != 0 { - query.Query( - boolQ( - should(shouldItems...), - minimumShouldMatch(1), - ), - ) - } - query.Sort("timestamp", "desc").Size(size).From(offset) - - var response SearchResponse - if err := e.query(indices, query, &response); err != nil { - return nil, err - } - - hits := response.Hits.Hits - events := make([]Event, len(hits)) - for i := range hits { - event, err := parseEvent(subscriptions, hits[i]) - if err != nil { - return nil, err - } - events[i] = event - } - - return events, nil -} - -func (m *EventMigration) makeEvent(subscriptions []SubscriptionRequest) (Event, error) { - res := Event{ - Type: EventTypeMigration, - Address: m.Address, - Network: m.Network, - Body: m, - } - for i := range subscriptions { - if m.Network == subscriptions[i].Network && m.Address == subscriptions[i].Address { - res.Alias = subscriptions[i].Alias - return res, nil - } - } - return Event{}, errors.Errorf("Couldn't find a matching subscription for %v", m) -} - -func (o *EventOperation) makeEvent(subscriptions []SubscriptionRequest) (Event, error) { - res := Event{ - Network: o.Network, - Body: o, - } - for i := range subscriptions { - if o.Network != subscriptions[i].Network { - continue - } - if o.Source != subscriptions[i].Address && o.Destination != subscriptions[i].Address { - continue - } - - res.Address = subscriptions[i].Address - res.Alias = subscriptions[i].Alias - - switch { - case o.Status != "applied": - res.Type = EventTypeError - case o.Source == subscriptions[i].Address && o.Kind == "origination": - res.Type = EventTypeDeploy - case o.Source == subscriptions[i].Address && o.Kind == "transaction": - res.Type = EventTypeCall - case o.Destination == subscriptions[i].Address && o.Kind == "transaction": - res.Type = EventTypeInvoke - } - - return res, nil - } - return Event{}, errors.Errorf("Couldn't find a matching subscription for %v", o) -} - -func (c *EventContract) makeEvent(subscriptions []SubscriptionRequest) (Event, error) { - res := Event{ - Body: c, - } - for i := range subscriptions { - if c.Hash == subscriptions[i].Hash || c.ProjectID == subscriptions[i].ProjectID { - res.Network = subscriptions[i].Network - res.Address = subscriptions[i].Address - res.Alias = subscriptions[i].Alias - - if c.Hash == subscriptions[i].Hash { - res.Type = EventTypeSame - } else { - res.Type = EventTypeSimilar - } - return res, nil - } - } - return Event{}, errors.Errorf("Couldn't find a matching subscription for %v", c) -} - -func parseEvent(subscriptions []SubscriptionRequest, hit Hit) (Event, error) { - switch hit.Index { - case DocOperations: - var event EventOperation - if err := json.Unmarshal(hit.Source, &event); err != nil { - return Event{}, err - } - return event.makeEvent(subscriptions) - case DocMigrations: - var event EventMigration - if err := json.Unmarshal(hit.Source, &event); err != nil { - return Event{}, err - } - return event.makeEvent(subscriptions) - case DocContracts: - var event EventContract - if err := json.Unmarshal(hit.Source, &event); err != nil { - return Event{}, err - } - return event.makeEvent(subscriptions) - default: - return Event{}, errors.Errorf("[parseEvent] Invalid reponse type: %s", hit.Index) - } -} - -func getEventsQuery(subscription SubscriptionRequest, indices map[string]struct{}) []qItem { - shouldItems := make([]qItem, 0) - - if item := getEventsWatchCalls(subscription); item != nil { - shouldItems = append(shouldItems, item) - indices[DocOperations] = struct{}{} - } - if item := getEventsWatchErrors(subscription); item != nil { - shouldItems = append(shouldItems, item) - indices[DocOperations] = struct{}{} - } - if item := getEventsWatchDeployments(subscription); item != nil { - shouldItems = append(shouldItems, item) - indices[DocOperations] = struct{}{} - } - - if strings.HasPrefix(subscription.Address, "KT") { - if item := getEventsWatchMigrations(subscription); item != nil { - shouldItems = append(shouldItems, item) - indices[DocMigrations] = struct{}{} - } - if item := getSubscriptionWithSame(subscription); item != nil { - shouldItems = append(shouldItems, item) - indices[DocContracts] = struct{}{} - } - if item := getSubscriptionWithSimilar(subscription); item != nil { - shouldItems = append(shouldItems, item) - indices[DocContracts] = struct{}{} - } - } - - return shouldItems -} - -func getEventsWatchMigrations(subscription SubscriptionRequest) qItem { - if !subscription.WithMigrations { - return nil - } - - return boolQ( - filter( - in("kind.keyword", []string{consts.MigrationBootstrap, consts.MigrationLambda, consts.MigrationUpdate}), - term("network.keyword", subscription.Network), - term("address.keyword", subscription.Address), - ), - ) -} - -func getEventsWatchDeployments(subscription SubscriptionRequest) qItem { - if !subscription.WithDeployments { - return nil - } - - return boolQ( - filter( - term("kind.keyword", "origination"), - term("network.keyword", subscription.Network), - term("source.keyword", subscription.Address), - ), - ) -} - -func getEventsWatchCalls(subscription SubscriptionRequest) qItem { - if !subscription.WithCalls { - return nil - } - - addressKeyword := "destination.keyword" - if strings.HasPrefix(subscription.Address, "tz") { - addressKeyword = "source.keyword" - } - - return boolQ( - filter( - term("kind.keyword", "transaction"), - term("status.keyword", "applied"), - term("network.keyword", subscription.Network), - term(addressKeyword, subscription.Address), - ), - ) -} - -func getEventsWatchErrors(subscription SubscriptionRequest) qItem { - if !subscription.WithErrors { - return nil - } - - addressKeyword := "destination.keyword" - if strings.HasPrefix(subscription.Address, "tz") { - addressKeyword = "source.keyword" - } - - return boolQ( - filter( - term("network.keyword", subscription.Network), - term(addressKeyword, subscription.Address), - ), - notMust( - term("status.keyword", "applied"), - ), - ) -} - -func getSubscriptionWithSame(subscription SubscriptionRequest) qItem { - if !subscription.WithSame { - return nil - } - - return boolQ( - filter(term("hash.keyword", subscription.Hash)), - notMust(term("address.keyword", subscription.Address)), - ) -} - -func getSubscriptionWithSimilar(subscription SubscriptionRequest) qItem { - if !subscription.WithSimilar { - return nil - } - return boolQ( - filter(term("project_id.keyword", subscription.ProjectID)), - notMust(term("hash.keyword", subscription.Hash)), - notMust(term("address.keyword", subscription.Address)), - ) -} diff --git a/internal/elastic/histogram.go b/internal/elastic/histogram.go deleted file mode 100644 index 15bb31b3c..000000000 --- a/internal/elastic/histogram.go +++ /dev/null @@ -1,176 +0,0 @@ -package elastic - -import "github.com/baking-bad/bcdhub/internal/models/tzip" - -// Histogram filter kinds -const ( - HistogramFilterKindExists = "exists" - HistogramFilterKindMatch = "match" - HistogramFilterKindIn = "in" - HistogramFilterKindAddresses = "address" - HistogramFilterDexEnrtypoints = "dex_entrypoints" -) - -type histogramContext struct { - Indices []string - Period string - Function struct { - Name string - Field string - } - Filters []HistogramFilter -} - -// HistogramFilter - -type HistogramFilter struct { - Field string - Value interface{} - Kind string -} - -func (ctx histogramContext) hasFunction() bool { - return ctx.Function.Name != "" && ctx.Function.Field != "" -} - -func (ctx histogramContext) build() base { - hist := qItem{ - "date_histogram": qItem{ - "field": "timestamp", - "calendar_interval": ctx.Period, - }, - } - - if ctx.hasFunction() { - hist.Extend(aggs( - aggItem{ - "result", qItem{ - ctx.Function.Name: qItem{ - "field": ctx.Function.Field, - }, - }, - }, - )) - } - - matches := make([]qItem, 0) - for _, fltr := range ctx.Filters { - switch fltr.Kind { - case HistogramFilterKindExists: - matches = append(matches, exists(fltr.Field)) - case HistogramFilterKindMatch: - matches = append(matches, matchQ(fltr.Field, fltr.Value)) - case HistogramFilterKindIn: - if arr, ok := fltr.Value.([]string); ok { - matches = append(matches, in(fltr.Field, arr)) - } - case HistogramFilterKindAddresses: - if value, ok := fltr.Value.([]string); ok { - addresses := make([]qItem, len(value)) - for i := range value { - addresses[i] = matchPhrase(fltr.Field, value[i]) - } - matches = append(matches, boolQ( - should(addresses...), - minimumShouldMatch(1), - )) - } - case HistogramFilterDexEnrtypoints: - if value, ok := fltr.Value.([]tzip.DAppContract); ok { - entrypoints := make([]qItem, 0) - for i := range value { - for j := range value[i].DexVolumeEntrypoints { - entrypoints = append(entrypoints, boolQ( - filter( - matchPhrase("initiator", value[i].Address), - matchQ("parent", value[i].DexVolumeEntrypoints[j]), - ), - )) - } - } - matches = append(matches, boolQ( - should(entrypoints...), - minimumShouldMatch(1), - )) - } - } - } - - return newQuery().Query( - boolQ( - filter( - matches..., - ), - ), - ).Add( - aggs(aggItem{"hist", hist}), - ).Zero() -} - -// HistogramOption - -type HistogramOption func(*histogramContext) - -// WithHistogramIndices - -func WithHistogramIndices(indices ...string) HistogramOption { - return func(h *histogramContext) { - h.Indices = indices - } -} - -// WithHistogramFunction - -func WithHistogramFunction(function, field string) HistogramOption { - return func(h *histogramContext) { - h.Function = struct { - Name string - Field string - }{function, field} - } -} - -// WithHistogramFilters - -func WithHistogramFilters(filters []HistogramFilter) HistogramOption { - return func(h *histogramContext) { - h.Filters = filters - } -} - -type getDateHistogramResponse struct { - Agg struct { - Hist struct { - Buckets []struct { - Key int64 `json:"key"` - DocCount int64 `json:"doc_count"` - Result floatValue `json:"result,omitempty"` - } `json:"buckets"` - } `json:"hist"` - } `json:"aggregations"` -} - -// GetDateHistogram - -func (e *Elastic) GetDateHistogram(period string, opts ...HistogramOption) ([][]int64, error) { - ctx := histogramContext{ - Period: period, - } - for _, opt := range opts { - opt(&ctx) - } - - var response getDateHistogramResponse - if err := e.query(ctx.Indices, ctx.build(), &response); err != nil { - return nil, err - } - - histogram := make([][]int64, 0) - for _, bucket := range response.Agg.Hist.Buckets { - val := bucket.DocCount - if ctx.hasFunction() { - val = int64(bucket.Result.Value) - } - - item := []int64{ - bucket.Key, - val, - } - histogram = append(histogram, item) - } - return histogram, nil -} diff --git a/internal/elastic/interface.go b/internal/elastic/interface.go deleted file mode 100644 index 2f12fe71e..000000000 --- a/internal/elastic/interface.go +++ /dev/null @@ -1,212 +0,0 @@ -package elastic - -import ( - "io" - - "github.com/baking-bad/bcdhub/internal/elastic/search" - "github.com/baking-bad/bcdhub/internal/models" - "github.com/baking-bad/bcdhub/internal/models/tzip" - "github.com/baking-bad/bcdhub/internal/mq" -) - -// Model - -type Model interface { - mq.IMessage - - GetID() string - GetIndex() string -} - -// IGeneral - -type IGeneral interface { - CreateIndexes() error - DeleteIndices(indices []string) error - DeleteByLevelAndNetwork([]string, string, int64) error - DeleteByContract(indices []string, network, address string) error - GetAll(interface{}) error - GetByID(Model) error - GetByIDs(output interface{}, ids ...string) error - GetByNetwork(string, interface{}) error - GetByNetworkWithSort(string, string, string, interface{}) error - UpdateDoc(model Model) (err error) - UpdateFields(string, string, interface{}, ...string) error -} - -// IBalanceUpdate - -type IBalanceUpdate interface { - GetBalance(network, address string) (int64, error) -} - -// IBigMap - -type IBigMap interface { - GetBigMapKey(network, keyHash string, ptr int64) (BigMapDiff, error) - GetBigMapKeys(ctx GetBigMapKeysContext) ([]BigMapDiff, error) - GetBigMapsForAddress(string, string) ([]models.BigMapDiff, error) - GetBigMapHistory(int64, string) ([]models.BigMapAction, error) - GetBigMapValuesByKey(string) ([]BigMapDiff, error) - GetBigMapDiffsCount(network string, ptr int64) (int64, error) -} - -// IBigMapDiff - -type IBigMapDiff interface { - GetBigMapDiffsForAddress(string) ([]models.BigMapDiff, error) - GetBigMapDiffsPrevious([]models.BigMapDiff, int64, string) ([]models.BigMapDiff, error) - GetBigMapDiffsUniqueByOperationID(string) ([]models.BigMapDiff, error) - GetBigMapDiffsByPtrAndKeyHash(int64, string, string, int64, int64) ([]BigMapDiff, int64, error) - GetBigMapDiffsByOperationID(string) ([]*models.BigMapDiff, error) - GetBigMapDiffsByPtr(string, string, int64) ([]models.BigMapDiff, error) -} - -// IBlock - -type IBlock interface { - GetBlock(string, int64) (models.Block, error) - GetLastBlock(string) (models.Block, error) - GetLastBlocks() ([]models.Block, error) - GetNetworkAlias(chainID string) (string, error) -} - -// IBulk - -type IBulk interface { - BulkInsert([]Model) error - BulkUpdate([]Model) error - BulkDelete([]Model) error - BulkRemoveField(string, []Model) error - BulkUpdateField(where []models.Contract, fields ...string) error -} - -// IContract - -type IContract interface { - GetContract(map[string]interface{}) (models.Contract, error) - GetContractRandom() (models.Contract, error) - GetContractMigrationStats(string, string) (ContractMigrationsStats, error) - GetContractAddressesByNetworkAndLevel(string, int64) ([]string, error) - GetContracts(map[string]interface{}) ([]models.Contract, error) - GetContractsIDByAddress([]string, string) ([]string, error) - GetAffectedContracts(string, int64, int64) ([]string, error) - IsFAContract(string, string) (bool, error) - RecalcContractStats(string, string) (ContractStats, error) - UpdateContractMigrationsCount(string, string) error - GetDAppStats(string, []string, string) (DAppStats, error) - GetContractsByAddresses(addresses []Address) ([]models.Contract, error) -} - -// IDomains - -type IDomains interface { - ListDomains(network string, size, offset int64) (DomainsResponse, error) - ResolveDomainByAddress(network string, address string) (*models.TezosDomain, error) -} - -// IEvents - -type IEvents interface { - GetEvents([]SubscriptionRequest, int64, int64) ([]Event, error) -} - -// IMigrations - -type IMigrations interface { - GetMigrations(string, string) ([]models.Migration, error) -} - -// IOperations - -type IOperations interface { - GetOperationsForContract(string, string, uint64, map[string]interface{}) (PageableOperations, error) - GetLastOperation(string, string, int64) (models.Operation, error) - GetOperationsStats(network, address string) (OperationsStats, error) - - // GetOperations - get operation by `filter`. `Size` - if 0 - return all, else certain `size` operations. - // `Sort` - sort by time and content index by desc - GetOperations(filter map[string]interface{}, size int64, sort bool) ([]models.Operation, error) - GetContract24HoursVolume(network, address string, entrypoints []string) (float64, error) -} - -// IProjects - -type IProjects interface { - GetProjectsLastContract() ([]models.Contract, error) - GetSameContracts(models.Contract, int64, int64) (SameContractsResponse, error) - GetSimilarContracts(models.Contract, int64, int64) ([]SimilarContract, int, error) - GetDiffTasks() ([]DiffTask, error) -} - -// IProtocol - -type IProtocol interface { - GetProtocol(string, string, int64) (models.Protocol, error) - GetSymLinks(string, int64) (map[string]struct{}, error) -} - -// ISearch - -type ISearch interface { - SearchByText(string, int64, []string, map[string]interface{}, bool) (search.Result, error) -} - -// ISnapshot - -type ISnapshot interface { - CreateAWSRepository(string, string, string) error - ListRepositories() ([]Repository, error) - CreateSnapshots(string, string, []string) error - RestoreSnapshots(string, string, []string) error - ListSnapshots(string) (string, error) - SetSnapshotPolicy(string, string, string, string, int64) error - GetAllPolicies() ([]string, error) - GetMappings([]string) (map[string]string, error) - CreateMapping(string, io.Reader) error - ReloadSecureSettings() error -} - -// IStats - -type IStats interface { - GetNetworkCountStats(string) (map[string]int64, error) - GetDateHistogram(period string, opts ...HistogramOption) ([][]int64, error) - GetCallsCountByNetwork() (map[string]int64, error) - GetContractStatsByNetwork() (map[string]ContractCountStats, error) - GetFACountByNetwork() (map[string]int64, error) - GetLanguagesForNetwork(network string) (map[string]int64, error) -} - -// ITokens - -type ITokens interface { - GetTokens(string, string, int64, int64) ([]models.Contract, int64, error) - GetTokensStats(string, []string, []string) (map[string]TokenUsageStats, error) - GetTokenVolumeSeries(string, string, []string, []tzip.DAppContract, uint) ([][]int64, error) - GetBalances(string, string, int64, ...TokenBalance) (map[TokenBalance]int64, error) - GetAccountBalances(string, string) ([]models.TokenBalance, error) - GetTokenSupply(network, address string, tokenID int64) (result TokenSupply, err error) - GetTransfers(ctx GetTransfersContext) (TransfersResponse, error) - GetAllTransfers(network string, level int64) ([]models.Transfer, error) - UpdateTokenBalances(updates []*models.TokenBalance) error - GetHolders(network, contract string, tokenID int64) ([]models.TokenBalance, error) - GetToken24HoursVolume(network, contract string, initiators, entrypoints []string, tokenID int64) (float64, error) -} - -// ITZIP - -type ITZIP interface { - GetTZIP(network, address string) (models.TZIP, error) - GetTZIPWithEvents() ([]models.TZIP, error) - GetTokenMetadata(ctx GetTokenMetadataContext) ([]TokenMetadata, error) - GetDApps() ([]tzip.DApp, error) - GetDAppBySlug(slug string) (*tzip.DApp, error) - GetBySlug(slug string) (*models.TZIP, error) - GetAliases(network string) ([]models.TZIP, error) - GetAliasesMap(network string) (map[string]string, error) - GetAlias(network, address string) (*models.TZIP, error) -} - -// IElastic - -type IElastic interface { - IGeneral - IBalanceUpdate - IBigMap - IBigMapDiff - IBlock - IBulk - IContract - IDomains - IEvents - IMigrations - IOperations - IProjects - IProtocol - ISearch - ISnapshot - IStats - ITokens - ITZIP -} diff --git a/internal/elastic/migration/data.go b/internal/elastic/migration/data.go new file mode 100644 index 000000000..2173108d6 --- /dev/null +++ b/internal/elastic/migration/data.go @@ -0,0 +1,9 @@ +package migration + +import "github.com/baking-bad/bcdhub/internal/elastic/core" + +type getContractMigrationCountResponse struct { + Agg struct { + MigrationsCount core.IntValue `json:"migrations_count"` + } `json:"aggregations"` +} diff --git a/internal/elastic/migration/storage.go b/internal/elastic/migration/storage.go new file mode 100644 index 000000000..68a1fda8c --- /dev/null +++ b/internal/elastic/migration/storage.go @@ -0,0 +1,71 @@ +package migration + +import ( + "encoding/json" + + "github.com/baking-bad/bcdhub/internal/elastic/core" + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/migration" +) + +// Storage - +type Storage struct { + es *core.Elastic +} + +// NewStorage - +func NewStorage(es *core.Elastic) *Storage { + return &Storage{es} +} + +// Get - +func (storage *Storage) Get(network, address string) ([]migration.Migration, error) { + query := core.NewQuery().Query( + core.Bool( + core.Must( + core.MatchPhrase("network", network), + core.MatchPhrase("address", address), + ), + ), + ).Sort("level", "desc").All() + + var response core.SearchResponse + if err := storage.es.Query([]string{models.DocMigrations}, query, &response); err != nil { + return nil, err + } + + migrations := make([]migration.Migration, len(response.Hits.Hits)) + for i := range response.Hits.Hits { + if err := json.Unmarshal(response.Hits.Hits[i].Source, &migrations[i]); err != nil { + return nil, err + } + } + return migrations, nil +} + +// Count - +func (storage *Storage) Count(network, address string) (int64, error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + ), + core.Should( + core.MatchPhrase("source", address), + core.MatchPhrase("destination", address), + ), + core.MinimumShouldMatch(1), + ), + ).Add( + core.Aggs( + core.AggItem{ + Name: "migrations_count", + Body: core.Count("indexed_time"), + }, + ), + ).Zero() + + var response getContractMigrationCountResponse + err := storage.es.Query([]string{models.DocMigrations}, query, &response) + return response.Agg.MigrationsCount.Value, err +} diff --git a/internal/elastic/migrations.go b/internal/elastic/migrations.go deleted file mode 100644 index 4969bc036..000000000 --- a/internal/elastic/migrations.go +++ /dev/null @@ -1,30 +0,0 @@ -package elastic - -import ( - "github.com/baking-bad/bcdhub/internal/models" -) - -// GetMigrations - -func (e *Elastic) GetMigrations(network, address string) ([]models.Migration, error) { - query := newQuery().Query( - boolQ( - must( - matchPhrase("network", network), - matchPhrase("address", address), - ), - ), - ).Sort("level", "desc").All() - - var response SearchResponse - if err := e.query([]string{DocMigrations}, query, &response); err != nil { - return nil, err - } - - migrations := make([]models.Migration, len(response.Hits.Hits)) - for i := range response.Hits.Hits { - if err := json.Unmarshal(response.Hits.Hits[i].Source, &migrations[i]); err != nil { - return nil, err - } - } - return migrations, nil -} diff --git a/internal/elastic/mock/mock.go b/internal/elastic/mock/mock.go deleted file mode 100644 index 4de6541ba..000000000 --- a/internal/elastic/mock/mock.go +++ /dev/null @@ -1,3450 +0,0 @@ -// Code generated by MockGen. DO NOT EDIT. -// Source: interface.go - -// Package mock_elastic is a generated GoMock package. -package mock_elastic - -import ( - elastic "github.com/baking-bad/bcdhub/internal/elastic" - search "github.com/baking-bad/bcdhub/internal/elastic/search" - models "github.com/baking-bad/bcdhub/internal/models" - tzip "github.com/baking-bad/bcdhub/internal/models/tzip" - gomock "github.com/golang/mock/gomock" - io "io" - reflect "reflect" -) - -// MockModel is a mock of Model interface -type MockModel struct { - ctrl *gomock.Controller - recorder *MockModelMockRecorder -} - -// MockModelMockRecorder is the mock recorder for MockModel -type MockModelMockRecorder struct { - mock *MockModel -} - -// NewMockModel creates a new mock instance -func NewMockModel(ctrl *gomock.Controller) *MockModel { - mock := &MockModel{ctrl: ctrl} - mock.recorder = &MockModelMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockModel) EXPECT() *MockModelMockRecorder { - return m.recorder -} - -// GetQueues mocks base method -func (m *MockModel) GetQueues() []string { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetQueues") - ret0, _ := ret[0].([]string) - return ret0 -} - -// GetQueues indicates an expected call of GetQueues -func (mr *MockModelMockRecorder) GetQueues() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetQueues", reflect.TypeOf((*MockModel)(nil).GetQueues)) -} - -// MarshalToQueue mocks base method -func (m *MockModel) MarshalToQueue() ([]byte, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "MarshalToQueue") - ret0, _ := ret[0].([]byte) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// MarshalToQueue indicates an expected call of MarshalToQueue -func (mr *MockModelMockRecorder) MarshalToQueue() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "MarshalToQueue", reflect.TypeOf((*MockModel)(nil).MarshalToQueue)) -} - -// GetID mocks base method -func (m *MockModel) GetID() string { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetID") - ret0, _ := ret[0].(string) - return ret0 -} - -// GetID indicates an expected call of GetID -func (mr *MockModelMockRecorder) GetID() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetID", reflect.TypeOf((*MockModel)(nil).GetID)) -} - -// GetIndex mocks base method -func (m *MockModel) GetIndex() string { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetIndex") - ret0, _ := ret[0].(string) - return ret0 -} - -// GetIndex indicates an expected call of GetIndex -func (mr *MockModelMockRecorder) GetIndex() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetIndex", reflect.TypeOf((*MockModel)(nil).GetIndex)) -} - -// MockIGeneral is a mock of IGeneral interface -type MockIGeneral struct { - ctrl *gomock.Controller - recorder *MockIGeneralMockRecorder -} - -// MockIGeneralMockRecorder is the mock recorder for MockIGeneral -type MockIGeneralMockRecorder struct { - mock *MockIGeneral -} - -// NewMockIGeneral creates a new mock instance -func NewMockIGeneral(ctrl *gomock.Controller) *MockIGeneral { - mock := &MockIGeneral{ctrl: ctrl} - mock.recorder = &MockIGeneralMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockIGeneral) EXPECT() *MockIGeneralMockRecorder { - return m.recorder -} - -// CreateIndexes mocks base method -func (m *MockIGeneral) CreateIndexes() error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CreateIndexes") - ret0, _ := ret[0].(error) - return ret0 -} - -// CreateIndexes indicates an expected call of CreateIndexes -func (mr *MockIGeneralMockRecorder) CreateIndexes() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateIndexes", reflect.TypeOf((*MockIGeneral)(nil).CreateIndexes)) -} - -// DeleteIndices mocks base method -func (m *MockIGeneral) DeleteIndices(indices []string) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "DeleteIndices", indices) - ret0, _ := ret[0].(error) - return ret0 -} - -// DeleteIndices indicates an expected call of DeleteIndices -func (mr *MockIGeneralMockRecorder) DeleteIndices(indices interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteIndices", reflect.TypeOf((*MockIGeneral)(nil).DeleteIndices), indices) -} - -// DeleteByLevelAndNetwork mocks base method -func (m *MockIGeneral) DeleteByLevelAndNetwork(arg0 []string, arg1 string, arg2 int64) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "DeleteByLevelAndNetwork", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// DeleteByLevelAndNetwork indicates an expected call of DeleteByLevelAndNetwork -func (mr *MockIGeneralMockRecorder) DeleteByLevelAndNetwork(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteByLevelAndNetwork", reflect.TypeOf((*MockIGeneral)(nil).DeleteByLevelAndNetwork), arg0, arg1, arg2) -} - -// DeleteByContract mocks base method -func (m *MockIGeneral) DeleteByContract(indices []string, network, address string) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "DeleteByContract", indices, network, address) - ret0, _ := ret[0].(error) - return ret0 -} - -// DeleteByContract indicates an expected call of DeleteByContract -func (mr *MockIGeneralMockRecorder) DeleteByContract(indices, network, address interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteByContract", reflect.TypeOf((*MockIGeneral)(nil).DeleteByContract), indices, network, address) -} - -// GetAll mocks base method -func (m *MockIGeneral) GetAll(arg0 interface{}) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAll", arg0) - ret0, _ := ret[0].(error) - return ret0 -} - -// GetAll indicates an expected call of GetAll -func (mr *MockIGeneralMockRecorder) GetAll(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAll", reflect.TypeOf((*MockIGeneral)(nil).GetAll), arg0) -} - -// GetByID mocks base method -func (m *MockIGeneral) GetByID(arg0 elastic.Model) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetByID", arg0) - ret0, _ := ret[0].(error) - return ret0 -} - -// GetByID indicates an expected call of GetByID -func (mr *MockIGeneralMockRecorder) GetByID(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetByID", reflect.TypeOf((*MockIGeneral)(nil).GetByID), arg0) -} - -// GetByIDs mocks base method -func (m *MockIGeneral) GetByIDs(output interface{}, ids ...string) error { - m.ctrl.T.Helper() - varargs := []interface{}{output} - for _, a := range ids { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "GetByIDs", varargs...) - ret0, _ := ret[0].(error) - return ret0 -} - -// GetByIDs indicates an expected call of GetByIDs -func (mr *MockIGeneralMockRecorder) GetByIDs(output interface{}, ids ...interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - varargs := append([]interface{}{output}, ids...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetByIDs", reflect.TypeOf((*MockIGeneral)(nil).GetByIDs), varargs...) -} - -// GetByNetwork mocks base method -func (m *MockIGeneral) GetByNetwork(arg0 string, arg1 interface{}) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetByNetwork", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// GetByNetwork indicates an expected call of GetByNetwork -func (mr *MockIGeneralMockRecorder) GetByNetwork(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetByNetwork", reflect.TypeOf((*MockIGeneral)(nil).GetByNetwork), arg0, arg1) -} - -// GetByNetworkWithSort mocks base method -func (m *MockIGeneral) GetByNetworkWithSort(arg0, arg1, arg2 string, arg3 interface{}) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetByNetworkWithSort", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].(error) - return ret0 -} - -// GetByNetworkWithSort indicates an expected call of GetByNetworkWithSort -func (mr *MockIGeneralMockRecorder) GetByNetworkWithSort(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetByNetworkWithSort", reflect.TypeOf((*MockIGeneral)(nil).GetByNetworkWithSort), arg0, arg1, arg2, arg3) -} - -// UpdateDoc mocks base method -func (m *MockIGeneral) UpdateDoc(model elastic.Model) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "UpdateDoc", model) - ret0, _ := ret[0].(error) - return ret0 -} - -// UpdateDoc indicates an expected call of UpdateDoc -func (mr *MockIGeneralMockRecorder) UpdateDoc(model interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateDoc", reflect.TypeOf((*MockIGeneral)(nil).UpdateDoc), model) -} - -// UpdateFields mocks base method -func (m *MockIGeneral) UpdateFields(arg0, arg1 string, arg2 interface{}, arg3 ...string) error { - m.ctrl.T.Helper() - varargs := []interface{}{arg0, arg1, arg2} - for _, a := range arg3 { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "UpdateFields", varargs...) - ret0, _ := ret[0].(error) - return ret0 -} - -// UpdateFields indicates an expected call of UpdateFields -func (mr *MockIGeneralMockRecorder) UpdateFields(arg0, arg1, arg2 interface{}, arg3 ...interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - varargs := append([]interface{}{arg0, arg1, arg2}, arg3...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateFields", reflect.TypeOf((*MockIGeneral)(nil).UpdateFields), varargs...) -} - -// MockIBalanceUpdate is a mock of IBalanceUpdate interface -type MockIBalanceUpdate struct { - ctrl *gomock.Controller - recorder *MockIBalanceUpdateMockRecorder -} - -// MockIBalanceUpdateMockRecorder is the mock recorder for MockIBalanceUpdate -type MockIBalanceUpdateMockRecorder struct { - mock *MockIBalanceUpdate -} - -// NewMockIBalanceUpdate creates a new mock instance -func NewMockIBalanceUpdate(ctrl *gomock.Controller) *MockIBalanceUpdate { - mock := &MockIBalanceUpdate{ctrl: ctrl} - mock.recorder = &MockIBalanceUpdateMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockIBalanceUpdate) EXPECT() *MockIBalanceUpdateMockRecorder { - return m.recorder -} - -// GetBalance mocks base method -func (m *MockIBalanceUpdate) GetBalance(network, address string) (int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBalance", network, address) - ret0, _ := ret[0].(int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBalance indicates an expected call of GetBalance -func (mr *MockIBalanceUpdateMockRecorder) GetBalance(network, address interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBalance", reflect.TypeOf((*MockIBalanceUpdate)(nil).GetBalance), network, address) -} - -// MockIBigMap is a mock of IBigMap interface -type MockIBigMap struct { - ctrl *gomock.Controller - recorder *MockIBigMapMockRecorder -} - -// MockIBigMapMockRecorder is the mock recorder for MockIBigMap -type MockIBigMapMockRecorder struct { - mock *MockIBigMap -} - -// NewMockIBigMap creates a new mock instance -func NewMockIBigMap(ctrl *gomock.Controller) *MockIBigMap { - mock := &MockIBigMap{ctrl: ctrl} - mock.recorder = &MockIBigMapMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockIBigMap) EXPECT() *MockIBigMapMockRecorder { - return m.recorder -} - -// GetBigMapKey mocks base method -func (m *MockIBigMap) GetBigMapKey(network, keyHash string, ptr int64) (elastic.BigMapDiff, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapKey", network, keyHash, ptr) - ret0, _ := ret[0].(elastic.BigMapDiff) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapKey indicates an expected call of GetBigMapKey -func (mr *MockIBigMapMockRecorder) GetBigMapKey(network, keyHash, ptr interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapKey", reflect.TypeOf((*MockIBigMap)(nil).GetBigMapKey), network, keyHash, ptr) -} - -// GetBigMapKeys mocks base method -func (m *MockIBigMap) GetBigMapKeys(ctx elastic.GetBigMapKeysContext) ([]elastic.BigMapDiff, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapKeys", ctx) - ret0, _ := ret[0].([]elastic.BigMapDiff) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapKeys indicates an expected call of GetBigMapKeys -func (mr *MockIBigMapMockRecorder) GetBigMapKeys(ctx interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapKeys", reflect.TypeOf((*MockIBigMap)(nil).GetBigMapKeys), ctx) -} - -// GetBigMapsForAddress mocks base method -func (m *MockIBigMap) GetBigMapsForAddress(arg0, arg1 string) ([]models.BigMapDiff, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapsForAddress", arg0, arg1) - ret0, _ := ret[0].([]models.BigMapDiff) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapsForAddress indicates an expected call of GetBigMapsForAddress -func (mr *MockIBigMapMockRecorder) GetBigMapsForAddress(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapsForAddress", reflect.TypeOf((*MockIBigMap)(nil).GetBigMapsForAddress), arg0, arg1) -} - -// GetBigMapHistory mocks base method -func (m *MockIBigMap) GetBigMapHistory(arg0 int64, arg1 string) ([]models.BigMapAction, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapHistory", arg0, arg1) - ret0, _ := ret[0].([]models.BigMapAction) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapHistory indicates an expected call of GetBigMapHistory -func (mr *MockIBigMapMockRecorder) GetBigMapHistory(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapHistory", reflect.TypeOf((*MockIBigMap)(nil).GetBigMapHistory), arg0, arg1) -} - -// GetBigMapValuesByKey mocks base method -func (m *MockIBigMap) GetBigMapValuesByKey(arg0 string) ([]elastic.BigMapDiff, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapValuesByKey", arg0) - ret0, _ := ret[0].([]elastic.BigMapDiff) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapValuesByKey indicates an expected call of GetBigMapValuesByKey -func (mr *MockIBigMapMockRecorder) GetBigMapValuesByKey(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapValuesByKey", reflect.TypeOf((*MockIBigMap)(nil).GetBigMapValuesByKey), arg0) -} - -// GetBigMapDiffsCount mocks base method -func (m *MockIBigMap) GetBigMapDiffsCount(network string, ptr int64) (int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapDiffsCount", network, ptr) - ret0, _ := ret[0].(int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapDiffsCount indicates an expected call of GetBigMapDiffsCount -func (mr *MockIBigMapMockRecorder) GetBigMapDiffsCount(network, ptr interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapDiffsCount", reflect.TypeOf((*MockIBigMap)(nil).GetBigMapDiffsCount), network, ptr) -} - -// MockIBigMapDiff is a mock of IBigMapDiff interface -type MockIBigMapDiff struct { - ctrl *gomock.Controller - recorder *MockIBigMapDiffMockRecorder -} - -// MockIBigMapDiffMockRecorder is the mock recorder for MockIBigMapDiff -type MockIBigMapDiffMockRecorder struct { - mock *MockIBigMapDiff -} - -// NewMockIBigMapDiff creates a new mock instance -func NewMockIBigMapDiff(ctrl *gomock.Controller) *MockIBigMapDiff { - mock := &MockIBigMapDiff{ctrl: ctrl} - mock.recorder = &MockIBigMapDiffMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockIBigMapDiff) EXPECT() *MockIBigMapDiffMockRecorder { - return m.recorder -} - -// GetBigMapDiffsForAddress mocks base method -func (m *MockIBigMapDiff) GetBigMapDiffsForAddress(arg0 string) ([]models.BigMapDiff, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapDiffsForAddress", arg0) - ret0, _ := ret[0].([]models.BigMapDiff) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapDiffsForAddress indicates an expected call of GetBigMapDiffsForAddress -func (mr *MockIBigMapDiffMockRecorder) GetBigMapDiffsForAddress(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapDiffsForAddress", reflect.TypeOf((*MockIBigMapDiff)(nil).GetBigMapDiffsForAddress), arg0) -} - -// GetBigMapDiffsPrevious mocks base method -func (m *MockIBigMapDiff) GetBigMapDiffsPrevious(arg0 []models.BigMapDiff, arg1 int64, arg2 string) ([]models.BigMapDiff, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapDiffsPrevious", arg0, arg1, arg2) - ret0, _ := ret[0].([]models.BigMapDiff) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapDiffsPrevious indicates an expected call of GetBigMapDiffsPrevious -func (mr *MockIBigMapDiffMockRecorder) GetBigMapDiffsPrevious(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapDiffsPrevious", reflect.TypeOf((*MockIBigMapDiff)(nil).GetBigMapDiffsPrevious), arg0, arg1, arg2) -} - -// GetBigMapDiffsUniqueByOperationID mocks base method -func (m *MockIBigMapDiff) GetBigMapDiffsUniqueByOperationID(arg0 string) ([]models.BigMapDiff, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapDiffsUniqueByOperationID", arg0) - ret0, _ := ret[0].([]models.BigMapDiff) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapDiffsUniqueByOperationID indicates an expected call of GetBigMapDiffsUniqueByOperationID -func (mr *MockIBigMapDiffMockRecorder) GetBigMapDiffsUniqueByOperationID(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapDiffsUniqueByOperationID", reflect.TypeOf((*MockIBigMapDiff)(nil).GetBigMapDiffsUniqueByOperationID), arg0) -} - -// GetBigMapDiffsByPtrAndKeyHash mocks base method -func (m *MockIBigMapDiff) GetBigMapDiffsByPtrAndKeyHash(arg0 int64, arg1, arg2 string, arg3, arg4 int64) ([]elastic.BigMapDiff, int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapDiffsByPtrAndKeyHash", arg0, arg1, arg2, arg3, arg4) - ret0, _ := ret[0].([]elastic.BigMapDiff) - ret1, _ := ret[1].(int64) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// GetBigMapDiffsByPtrAndKeyHash indicates an expected call of GetBigMapDiffsByPtrAndKeyHash -func (mr *MockIBigMapDiffMockRecorder) GetBigMapDiffsByPtrAndKeyHash(arg0, arg1, arg2, arg3, arg4 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapDiffsByPtrAndKeyHash", reflect.TypeOf((*MockIBigMapDiff)(nil).GetBigMapDiffsByPtrAndKeyHash), arg0, arg1, arg2, arg3, arg4) -} - -// GetBigMapDiffsByOperationID mocks base method -func (m *MockIBigMapDiff) GetBigMapDiffsByOperationID(arg0 string) ([]*models.BigMapDiff, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapDiffsByOperationID", arg0) - ret0, _ := ret[0].([]*models.BigMapDiff) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapDiffsByOperationID indicates an expected call of GetBigMapDiffsByOperationID -func (mr *MockIBigMapDiffMockRecorder) GetBigMapDiffsByOperationID(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapDiffsByOperationID", reflect.TypeOf((*MockIBigMapDiff)(nil).GetBigMapDiffsByOperationID), arg0) -} - -// GetBigMapDiffsByPtr mocks base method -func (m *MockIBigMapDiff) GetBigMapDiffsByPtr(arg0, arg1 string, arg2 int64) ([]models.BigMapDiff, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapDiffsByPtr", arg0, arg1, arg2) - ret0, _ := ret[0].([]models.BigMapDiff) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapDiffsByPtr indicates an expected call of GetBigMapDiffsByPtr -func (mr *MockIBigMapDiffMockRecorder) GetBigMapDiffsByPtr(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapDiffsByPtr", reflect.TypeOf((*MockIBigMapDiff)(nil).GetBigMapDiffsByPtr), arg0, arg1, arg2) -} - -// MockIBlock is a mock of IBlock interface -type MockIBlock struct { - ctrl *gomock.Controller - recorder *MockIBlockMockRecorder -} - -// MockIBlockMockRecorder is the mock recorder for MockIBlock -type MockIBlockMockRecorder struct { - mock *MockIBlock -} - -// NewMockIBlock creates a new mock instance -func NewMockIBlock(ctrl *gomock.Controller) *MockIBlock { - mock := &MockIBlock{ctrl: ctrl} - mock.recorder = &MockIBlockMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockIBlock) EXPECT() *MockIBlockMockRecorder { - return m.recorder -} - -// GetBlock mocks base method -func (m *MockIBlock) GetBlock(arg0 string, arg1 int64) (models.Block, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBlock", arg0, arg1) - ret0, _ := ret[0].(models.Block) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBlock indicates an expected call of GetBlock -func (mr *MockIBlockMockRecorder) GetBlock(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBlock", reflect.TypeOf((*MockIBlock)(nil).GetBlock), arg0, arg1) -} - -// GetLastBlock mocks base method -func (m *MockIBlock) GetLastBlock(arg0 string) (models.Block, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLastBlock", arg0) - ret0, _ := ret[0].(models.Block) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLastBlock indicates an expected call of GetLastBlock -func (mr *MockIBlockMockRecorder) GetLastBlock(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLastBlock", reflect.TypeOf((*MockIBlock)(nil).GetLastBlock), arg0) -} - -// GetLastBlocks mocks base method -func (m *MockIBlock) GetLastBlocks() ([]models.Block, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLastBlocks") - ret0, _ := ret[0].([]models.Block) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLastBlocks indicates an expected call of GetLastBlocks -func (mr *MockIBlockMockRecorder) GetLastBlocks() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLastBlocks", reflect.TypeOf((*MockIBlock)(nil).GetLastBlocks)) -} - -// GetNetworkAlias mocks base method -func (m *MockIBlock) GetNetworkAlias(chainID string) (string, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetNetworkAlias", chainID) - ret0, _ := ret[0].(string) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetNetworkAlias indicates an expected call of GetNetworkAlias -func (mr *MockIBlockMockRecorder) GetNetworkAlias(chainID interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetNetworkAlias", reflect.TypeOf((*MockIBlock)(nil).GetNetworkAlias), chainID) -} - -// MockIBulk is a mock of IBulk interface -type MockIBulk struct { - ctrl *gomock.Controller - recorder *MockIBulkMockRecorder -} - -// MockIBulkMockRecorder is the mock recorder for MockIBulk -type MockIBulkMockRecorder struct { - mock *MockIBulk -} - -// NewMockIBulk creates a new mock instance -func NewMockIBulk(ctrl *gomock.Controller) *MockIBulk { - mock := &MockIBulk{ctrl: ctrl} - mock.recorder = &MockIBulkMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockIBulk) EXPECT() *MockIBulkMockRecorder { - return m.recorder -} - -// BulkInsert mocks base method -func (m *MockIBulk) BulkInsert(arg0 []elastic.Model) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "BulkInsert", arg0) - ret0, _ := ret[0].(error) - return ret0 -} - -// BulkInsert indicates an expected call of BulkInsert -func (mr *MockIBulkMockRecorder) BulkInsert(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BulkInsert", reflect.TypeOf((*MockIBulk)(nil).BulkInsert), arg0) -} - -// BulkUpdate mocks base method -func (m *MockIBulk) BulkUpdate(arg0 []elastic.Model) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "BulkUpdate", arg0) - ret0, _ := ret[0].(error) - return ret0 -} - -// BulkUpdate indicates an expected call of BulkUpdate -func (mr *MockIBulkMockRecorder) BulkUpdate(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BulkUpdate", reflect.TypeOf((*MockIBulk)(nil).BulkUpdate), arg0) -} - -// BulkDelete mocks base method -func (m *MockIBulk) BulkDelete(arg0 []elastic.Model) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "BulkDelete", arg0) - ret0, _ := ret[0].(error) - return ret0 -} - -// BulkDelete indicates an expected call of BulkDelete -func (mr *MockIBulkMockRecorder) BulkDelete(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BulkDelete", reflect.TypeOf((*MockIBulk)(nil).BulkDelete), arg0) -} - -// BulkRemoveField mocks base method -func (m *MockIBulk) BulkRemoveField(arg0 string, arg1 []elastic.Model) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "BulkRemoveField", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// BulkRemoveField indicates an expected call of BulkRemoveField -func (mr *MockIBulkMockRecorder) BulkRemoveField(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BulkRemoveField", reflect.TypeOf((*MockIBulk)(nil).BulkRemoveField), arg0, arg1) -} - -// BulkUpdateField mocks base method -func (m *MockIBulk) BulkUpdateField(where []models.Contract, fields ...string) error { - m.ctrl.T.Helper() - varargs := []interface{}{where} - for _, a := range fields { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "BulkUpdateField", varargs...) - ret0, _ := ret[0].(error) - return ret0 -} - -// BulkUpdateField indicates an expected call of BulkUpdateField -func (mr *MockIBulkMockRecorder) BulkUpdateField(where interface{}, fields ...interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - varargs := append([]interface{}{where}, fields...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BulkUpdateField", reflect.TypeOf((*MockIBulk)(nil).BulkUpdateField), varargs...) -} - -// MockIContract is a mock of IContract interface -type MockIContract struct { - ctrl *gomock.Controller - recorder *MockIContractMockRecorder -} - -// MockIContractMockRecorder is the mock recorder for MockIContract -type MockIContractMockRecorder struct { - mock *MockIContract -} - -// NewMockIContract creates a new mock instance -func NewMockIContract(ctrl *gomock.Controller) *MockIContract { - mock := &MockIContract{ctrl: ctrl} - mock.recorder = &MockIContractMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockIContract) EXPECT() *MockIContractMockRecorder { - return m.recorder -} - -// GetContract mocks base method -func (m *MockIContract) GetContract(arg0 map[string]interface{}) (models.Contract, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetContract", arg0) - ret0, _ := ret[0].(models.Contract) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetContract indicates an expected call of GetContract -func (mr *MockIContractMockRecorder) GetContract(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetContract", reflect.TypeOf((*MockIContract)(nil).GetContract), arg0) -} - -// GetContractRandom mocks base method -func (m *MockIContract) GetContractRandom() (models.Contract, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetContractRandom") - ret0, _ := ret[0].(models.Contract) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetContractRandom indicates an expected call of GetContractRandom -func (mr *MockIContractMockRecorder) GetContractRandom() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetContractRandom", reflect.TypeOf((*MockIContract)(nil).GetContractRandom)) -} - -// GetContractMigrationStats mocks base method -func (m *MockIContract) GetContractMigrationStats(arg0, arg1 string) (elastic.ContractMigrationsStats, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetContractMigrationStats", arg0, arg1) - ret0, _ := ret[0].(elastic.ContractMigrationsStats) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetContractMigrationStats indicates an expected call of GetContractMigrationStats -func (mr *MockIContractMockRecorder) GetContractMigrationStats(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetContractMigrationStats", reflect.TypeOf((*MockIContract)(nil).GetContractMigrationStats), arg0, arg1) -} - -// GetContractAddressesByNetworkAndLevel mocks base method -func (m *MockIContract) GetContractAddressesByNetworkAndLevel(arg0 string, arg1 int64) ([]string, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetContractAddressesByNetworkAndLevel", arg0, arg1) - ret0, _ := ret[0].([]string) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetContractAddressesByNetworkAndLevel indicates an expected call of GetContractAddressesByNetworkAndLevel -func (mr *MockIContractMockRecorder) GetContractAddressesByNetworkAndLevel(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetContractAddressesByNetworkAndLevel", reflect.TypeOf((*MockIContract)(nil).GetContractAddressesByNetworkAndLevel), arg0, arg1) -} - -// GetContracts mocks base method -func (m *MockIContract) GetContracts(arg0 map[string]interface{}) ([]models.Contract, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetContracts", arg0) - ret0, _ := ret[0].([]models.Contract) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetContracts indicates an expected call of GetContracts -func (mr *MockIContractMockRecorder) GetContracts(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetContracts", reflect.TypeOf((*MockIContract)(nil).GetContracts), arg0) -} - -// GetContractsIDByAddress mocks base method -func (m *MockIContract) GetContractsIDByAddress(arg0 []string, arg1 string) ([]string, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetContractsIDByAddress", arg0, arg1) - ret0, _ := ret[0].([]string) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetContractsIDByAddress indicates an expected call of GetContractsIDByAddress -func (mr *MockIContractMockRecorder) GetContractsIDByAddress(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetContractsIDByAddress", reflect.TypeOf((*MockIContract)(nil).GetContractsIDByAddress), arg0, arg1) -} - -// GetAffectedContracts mocks base method -func (m *MockIContract) GetAffectedContracts(arg0 string, arg1, arg2 int64) ([]string, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAffectedContracts", arg0, arg1, arg2) - ret0, _ := ret[0].([]string) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAffectedContracts indicates an expected call of GetAffectedContracts -func (mr *MockIContractMockRecorder) GetAffectedContracts(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAffectedContracts", reflect.TypeOf((*MockIContract)(nil).GetAffectedContracts), arg0, arg1, arg2) -} - -// IsFAContract mocks base method -func (m *MockIContract) IsFAContract(arg0, arg1 string) (bool, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "IsFAContract", arg0, arg1) - ret0, _ := ret[0].(bool) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// IsFAContract indicates an expected call of IsFAContract -func (mr *MockIContractMockRecorder) IsFAContract(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsFAContract", reflect.TypeOf((*MockIContract)(nil).IsFAContract), arg0, arg1) -} - -// RecalcContractStats mocks base method -func (m *MockIContract) RecalcContractStats(arg0, arg1 string) (elastic.ContractStats, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "RecalcContractStats", arg0, arg1) - ret0, _ := ret[0].(elastic.ContractStats) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// RecalcContractStats indicates an expected call of RecalcContractStats -func (mr *MockIContractMockRecorder) RecalcContractStats(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RecalcContractStats", reflect.TypeOf((*MockIContract)(nil).RecalcContractStats), arg0, arg1) -} - -// UpdateContractMigrationsCount mocks base method -func (m *MockIContract) UpdateContractMigrationsCount(arg0, arg1 string) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "UpdateContractMigrationsCount", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// UpdateContractMigrationsCount indicates an expected call of UpdateContractMigrationsCount -func (mr *MockIContractMockRecorder) UpdateContractMigrationsCount(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateContractMigrationsCount", reflect.TypeOf((*MockIContract)(nil).UpdateContractMigrationsCount), arg0, arg1) -} - -// GetDAppStats mocks base method -func (m *MockIContract) GetDAppStats(arg0 string, arg1 []string, arg2 string) (elastic.DAppStats, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetDAppStats", arg0, arg1, arg2) - ret0, _ := ret[0].(elastic.DAppStats) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetDAppStats indicates an expected call of GetDAppStats -func (mr *MockIContractMockRecorder) GetDAppStats(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDAppStats", reflect.TypeOf((*MockIContract)(nil).GetDAppStats), arg0, arg1, arg2) -} - -// GetContractsByAddresses mocks base method -func (m *MockIContract) GetContractsByAddresses(addresses []elastic.Address) ([]models.Contract, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetContractsByAddresses", addresses) - ret0, _ := ret[0].([]models.Contract) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetContractsByAddresses indicates an expected call of GetContractsByAddresses -func (mr *MockIContractMockRecorder) GetContractsByAddresses(addresses interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetContractsByAddresses", reflect.TypeOf((*MockIContract)(nil).GetContractsByAddresses), addresses) -} - -// MockIDomains is a mock of IDomains interface -type MockIDomains struct { - ctrl *gomock.Controller - recorder *MockIDomainsMockRecorder -} - -// MockIDomainsMockRecorder is the mock recorder for MockIDomains -type MockIDomainsMockRecorder struct { - mock *MockIDomains -} - -// NewMockIDomains creates a new mock instance -func NewMockIDomains(ctrl *gomock.Controller) *MockIDomains { - mock := &MockIDomains{ctrl: ctrl} - mock.recorder = &MockIDomainsMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockIDomains) EXPECT() *MockIDomainsMockRecorder { - return m.recorder -} - -// ListDomains mocks base method -func (m *MockIDomains) ListDomains(network string, size, offset int64) (elastic.DomainsResponse, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListDomains", network, size, offset) - ret0, _ := ret[0].(elastic.DomainsResponse) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListDomains indicates an expected call of ListDomains -func (mr *MockIDomainsMockRecorder) ListDomains(network, size, offset interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListDomains", reflect.TypeOf((*MockIDomains)(nil).ListDomains), network, size, offset) -} - -// ResolveDomainByAddress mocks base method -func (m *MockIDomains) ResolveDomainByAddress(network, address string) (*models.TezosDomain, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ResolveDomainByAddress", network, address) - ret0, _ := ret[0].(*models.TezosDomain) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ResolveDomainByAddress indicates an expected call of ResolveDomainByAddress -func (mr *MockIDomainsMockRecorder) ResolveDomainByAddress(network, address interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ResolveDomainByAddress", reflect.TypeOf((*MockIDomains)(nil).ResolveDomainByAddress), network, address) -} - -// MockIEvents is a mock of IEvents interface -type MockIEvents struct { - ctrl *gomock.Controller - recorder *MockIEventsMockRecorder -} - -// MockIEventsMockRecorder is the mock recorder for MockIEvents -type MockIEventsMockRecorder struct { - mock *MockIEvents -} - -// NewMockIEvents creates a new mock instance -func NewMockIEvents(ctrl *gomock.Controller) *MockIEvents { - mock := &MockIEvents{ctrl: ctrl} - mock.recorder = &MockIEventsMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockIEvents) EXPECT() *MockIEventsMockRecorder { - return m.recorder -} - -// GetEvents mocks base method -func (m *MockIEvents) GetEvents(arg0 []elastic.SubscriptionRequest, arg1, arg2 int64) ([]elastic.Event, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetEvents", arg0, arg1, arg2) - ret0, _ := ret[0].([]elastic.Event) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetEvents indicates an expected call of GetEvents -func (mr *MockIEventsMockRecorder) GetEvents(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetEvents", reflect.TypeOf((*MockIEvents)(nil).GetEvents), arg0, arg1, arg2) -} - -// MockIMigrations is a mock of IMigrations interface -type MockIMigrations struct { - ctrl *gomock.Controller - recorder *MockIMigrationsMockRecorder -} - -// MockIMigrationsMockRecorder is the mock recorder for MockIMigrations -type MockIMigrationsMockRecorder struct { - mock *MockIMigrations -} - -// NewMockIMigrations creates a new mock instance -func NewMockIMigrations(ctrl *gomock.Controller) *MockIMigrations { - mock := &MockIMigrations{ctrl: ctrl} - mock.recorder = &MockIMigrationsMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockIMigrations) EXPECT() *MockIMigrationsMockRecorder { - return m.recorder -} - -// GetMigrations mocks base method -func (m *MockIMigrations) GetMigrations(arg0, arg1 string) ([]models.Migration, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetMigrations", arg0, arg1) - ret0, _ := ret[0].([]models.Migration) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetMigrations indicates an expected call of GetMigrations -func (mr *MockIMigrationsMockRecorder) GetMigrations(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMigrations", reflect.TypeOf((*MockIMigrations)(nil).GetMigrations), arg0, arg1) -} - -// MockIOperations is a mock of IOperations interface -type MockIOperations struct { - ctrl *gomock.Controller - recorder *MockIOperationsMockRecorder -} - -// MockIOperationsMockRecorder is the mock recorder for MockIOperations -type MockIOperationsMockRecorder struct { - mock *MockIOperations -} - -// NewMockIOperations creates a new mock instance -func NewMockIOperations(ctrl *gomock.Controller) *MockIOperations { - mock := &MockIOperations{ctrl: ctrl} - mock.recorder = &MockIOperationsMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockIOperations) EXPECT() *MockIOperationsMockRecorder { - return m.recorder -} - -// GetOperationsForContract mocks base method -func (m *MockIOperations) GetOperationsForContract(arg0, arg1 string, arg2 uint64, arg3 map[string]interface{}) (elastic.PageableOperations, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetOperationsForContract", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].(elastic.PageableOperations) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetOperationsForContract indicates an expected call of GetOperationsForContract -func (mr *MockIOperationsMockRecorder) GetOperationsForContract(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetOperationsForContract", reflect.TypeOf((*MockIOperations)(nil).GetOperationsForContract), arg0, arg1, arg2, arg3) -} - -// GetLastOperation mocks base method -func (m *MockIOperations) GetLastOperation(arg0, arg1 string, arg2 int64) (models.Operation, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLastOperation", arg0, arg1, arg2) - ret0, _ := ret[0].(models.Operation) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLastOperation indicates an expected call of GetLastOperation -func (mr *MockIOperationsMockRecorder) GetLastOperation(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLastOperation", reflect.TypeOf((*MockIOperations)(nil).GetLastOperation), arg0, arg1, arg2) -} - -// GetOperationsStats mocks base method -func (m *MockIOperations) GetOperationsStats(network, address string) (elastic.OperationsStats, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetOperationsStats", network, address) - ret0, _ := ret[0].(elastic.OperationsStats) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetOperationsStats indicates an expected call of GetOperationsStats -func (mr *MockIOperationsMockRecorder) GetOperationsStats(network, address interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetOperationsStats", reflect.TypeOf((*MockIOperations)(nil).GetOperationsStats), network, address) -} - -// GetOperations mocks base method -func (m *MockIOperations) GetOperations(filter map[string]interface{}, size int64, sort bool) ([]models.Operation, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetOperations", filter, size, sort) - ret0, _ := ret[0].([]models.Operation) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetOperations indicates an expected call of GetOperations -func (mr *MockIOperationsMockRecorder) GetOperations(filter, size, sort interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetOperations", reflect.TypeOf((*MockIOperations)(nil).GetOperations), filter, size, sort) -} - -// GetContract24HoursVolume mocks base method -func (m *MockIOperations) GetContract24HoursVolume(network, address string, entrypoints []string) (float64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetContract24HoursVolume", network, address, entrypoints) - ret0, _ := ret[0].(float64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetContract24HoursVolume indicates an expected call of GetContract24HoursVolume -func (mr *MockIOperationsMockRecorder) GetContract24HoursVolume(network, address, entrypoints interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetContract24HoursVolume", reflect.TypeOf((*MockIOperations)(nil).GetContract24HoursVolume), network, address, entrypoints) -} - -// MockIProjects is a mock of IProjects interface -type MockIProjects struct { - ctrl *gomock.Controller - recorder *MockIProjectsMockRecorder -} - -// MockIProjectsMockRecorder is the mock recorder for MockIProjects -type MockIProjectsMockRecorder struct { - mock *MockIProjects -} - -// NewMockIProjects creates a new mock instance -func NewMockIProjects(ctrl *gomock.Controller) *MockIProjects { - mock := &MockIProjects{ctrl: ctrl} - mock.recorder = &MockIProjectsMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockIProjects) EXPECT() *MockIProjectsMockRecorder { - return m.recorder -} - -// GetProjectsLastContract mocks base method -func (m *MockIProjects) GetProjectsLastContract() ([]models.Contract, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetProjectsLastContract") - ret0, _ := ret[0].([]models.Contract) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetProjectsLastContract indicates an expected call of GetProjectsLastContract -func (mr *MockIProjectsMockRecorder) GetProjectsLastContract() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProjectsLastContract", reflect.TypeOf((*MockIProjects)(nil).GetProjectsLastContract)) -} - -// GetSameContracts mocks base method -func (m *MockIProjects) GetSameContracts(arg0 models.Contract, arg1, arg2 int64) (elastic.SameContractsResponse, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetSameContracts", arg0, arg1, arg2) - ret0, _ := ret[0].(elastic.SameContractsResponse) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetSameContracts indicates an expected call of GetSameContracts -func (mr *MockIProjectsMockRecorder) GetSameContracts(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetSameContracts", reflect.TypeOf((*MockIProjects)(nil).GetSameContracts), arg0, arg1, arg2) -} - -// GetSimilarContracts mocks base method -func (m *MockIProjects) GetSimilarContracts(arg0 models.Contract, arg1, arg2 int64) ([]elastic.SimilarContract, int, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetSimilarContracts", arg0, arg1, arg2) - ret0, _ := ret[0].([]elastic.SimilarContract) - ret1, _ := ret[1].(int) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// GetSimilarContracts indicates an expected call of GetSimilarContracts -func (mr *MockIProjectsMockRecorder) GetSimilarContracts(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetSimilarContracts", reflect.TypeOf((*MockIProjects)(nil).GetSimilarContracts), arg0, arg1, arg2) -} - -// GetDiffTasks mocks base method -func (m *MockIProjects) GetDiffTasks() ([]elastic.DiffTask, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetDiffTasks") - ret0, _ := ret[0].([]elastic.DiffTask) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetDiffTasks indicates an expected call of GetDiffTasks -func (mr *MockIProjectsMockRecorder) GetDiffTasks() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDiffTasks", reflect.TypeOf((*MockIProjects)(nil).GetDiffTasks)) -} - -// MockIProtocol is a mock of IProtocol interface -type MockIProtocol struct { - ctrl *gomock.Controller - recorder *MockIProtocolMockRecorder -} - -// MockIProtocolMockRecorder is the mock recorder for MockIProtocol -type MockIProtocolMockRecorder struct { - mock *MockIProtocol -} - -// NewMockIProtocol creates a new mock instance -func NewMockIProtocol(ctrl *gomock.Controller) *MockIProtocol { - mock := &MockIProtocol{ctrl: ctrl} - mock.recorder = &MockIProtocolMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockIProtocol) EXPECT() *MockIProtocolMockRecorder { - return m.recorder -} - -// GetProtocol mocks base method -func (m *MockIProtocol) GetProtocol(arg0, arg1 string, arg2 int64) (models.Protocol, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetProtocol", arg0, arg1, arg2) - ret0, _ := ret[0].(models.Protocol) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetProtocol indicates an expected call of GetProtocol -func (mr *MockIProtocolMockRecorder) GetProtocol(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProtocol", reflect.TypeOf((*MockIProtocol)(nil).GetProtocol), arg0, arg1, arg2) -} - -// GetSymLinks mocks base method -func (m *MockIProtocol) GetSymLinks(arg0 string, arg1 int64) (map[string]struct{}, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetSymLinks", arg0, arg1) - ret0, _ := ret[0].(map[string]struct{}) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetSymLinks indicates an expected call of GetSymLinks -func (mr *MockIProtocolMockRecorder) GetSymLinks(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetSymLinks", reflect.TypeOf((*MockIProtocol)(nil).GetSymLinks), arg0, arg1) -} - -// MockISearch is a mock of ISearch interface -type MockISearch struct { - ctrl *gomock.Controller - recorder *MockISearchMockRecorder -} - -// MockISearchMockRecorder is the mock recorder for MockISearch -type MockISearchMockRecorder struct { - mock *MockISearch -} - -// NewMockISearch creates a new mock instance -func NewMockISearch(ctrl *gomock.Controller) *MockISearch { - mock := &MockISearch{ctrl: ctrl} - mock.recorder = &MockISearchMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockISearch) EXPECT() *MockISearchMockRecorder { - return m.recorder -} - -// SearchByText mocks base method -func (m *MockISearch) SearchByText(arg0 string, arg1 int64, arg2 []string, arg3 map[string]interface{}, arg4 bool) (search.Result, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "SearchByText", arg0, arg1, arg2, arg3, arg4) - ret0, _ := ret[0].(search.Result) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// SearchByText indicates an expected call of SearchByText -func (mr *MockISearchMockRecorder) SearchByText(arg0, arg1, arg2, arg3, arg4 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SearchByText", reflect.TypeOf((*MockISearch)(nil).SearchByText), arg0, arg1, arg2, arg3, arg4) -} - -// MockISnapshot is a mock of ISnapshot interface -type MockISnapshot struct { - ctrl *gomock.Controller - recorder *MockISnapshotMockRecorder -} - -// MockISnapshotMockRecorder is the mock recorder for MockISnapshot -type MockISnapshotMockRecorder struct { - mock *MockISnapshot -} - -// NewMockISnapshot creates a new mock instance -func NewMockISnapshot(ctrl *gomock.Controller) *MockISnapshot { - mock := &MockISnapshot{ctrl: ctrl} - mock.recorder = &MockISnapshotMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockISnapshot) EXPECT() *MockISnapshotMockRecorder { - return m.recorder -} - -// CreateAWSRepository mocks base method -func (m *MockISnapshot) CreateAWSRepository(arg0, arg1, arg2 string) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CreateAWSRepository", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// CreateAWSRepository indicates an expected call of CreateAWSRepository -func (mr *MockISnapshotMockRecorder) CreateAWSRepository(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateAWSRepository", reflect.TypeOf((*MockISnapshot)(nil).CreateAWSRepository), arg0, arg1, arg2) -} - -// ListRepositories mocks base method -func (m *MockISnapshot) ListRepositories() ([]elastic.Repository, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListRepositories") - ret0, _ := ret[0].([]elastic.Repository) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListRepositories indicates an expected call of ListRepositories -func (mr *MockISnapshotMockRecorder) ListRepositories() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListRepositories", reflect.TypeOf((*MockISnapshot)(nil).ListRepositories)) -} - -// CreateSnapshots mocks base method -func (m *MockISnapshot) CreateSnapshots(arg0, arg1 string, arg2 []string) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CreateSnapshots", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// CreateSnapshots indicates an expected call of CreateSnapshots -func (mr *MockISnapshotMockRecorder) CreateSnapshots(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateSnapshots", reflect.TypeOf((*MockISnapshot)(nil).CreateSnapshots), arg0, arg1, arg2) -} - -// RestoreSnapshots mocks base method -func (m *MockISnapshot) RestoreSnapshots(arg0, arg1 string, arg2 []string) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "RestoreSnapshots", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// RestoreSnapshots indicates an expected call of RestoreSnapshots -func (mr *MockISnapshotMockRecorder) RestoreSnapshots(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RestoreSnapshots", reflect.TypeOf((*MockISnapshot)(nil).RestoreSnapshots), arg0, arg1, arg2) -} - -// ListSnapshots mocks base method -func (m *MockISnapshot) ListSnapshots(arg0 string) (string, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListSnapshots", arg0) - ret0, _ := ret[0].(string) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListSnapshots indicates an expected call of ListSnapshots -func (mr *MockISnapshotMockRecorder) ListSnapshots(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListSnapshots", reflect.TypeOf((*MockISnapshot)(nil).ListSnapshots), arg0) -} - -// SetSnapshotPolicy mocks base method -func (m *MockISnapshot) SetSnapshotPolicy(arg0, arg1, arg2, arg3 string, arg4 int64) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "SetSnapshotPolicy", arg0, arg1, arg2, arg3, arg4) - ret0, _ := ret[0].(error) - return ret0 -} - -// SetSnapshotPolicy indicates an expected call of SetSnapshotPolicy -func (mr *MockISnapshotMockRecorder) SetSnapshotPolicy(arg0, arg1, arg2, arg3, arg4 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetSnapshotPolicy", reflect.TypeOf((*MockISnapshot)(nil).SetSnapshotPolicy), arg0, arg1, arg2, arg3, arg4) -} - -// GetAllPolicies mocks base method -func (m *MockISnapshot) GetAllPolicies() ([]string, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAllPolicies") - ret0, _ := ret[0].([]string) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAllPolicies indicates an expected call of GetAllPolicies -func (mr *MockISnapshotMockRecorder) GetAllPolicies() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAllPolicies", reflect.TypeOf((*MockISnapshot)(nil).GetAllPolicies)) -} - -// GetMappings mocks base method -func (m *MockISnapshot) GetMappings(arg0 []string) (map[string]string, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetMappings", arg0) - ret0, _ := ret[0].(map[string]string) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetMappings indicates an expected call of GetMappings -func (mr *MockISnapshotMockRecorder) GetMappings(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMappings", reflect.TypeOf((*MockISnapshot)(nil).GetMappings), arg0) -} - -// CreateMapping mocks base method -func (m *MockISnapshot) CreateMapping(arg0 string, arg1 io.Reader) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CreateMapping", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// CreateMapping indicates an expected call of CreateMapping -func (mr *MockISnapshotMockRecorder) CreateMapping(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateMapping", reflect.TypeOf((*MockISnapshot)(nil).CreateMapping), arg0, arg1) -} - -// ReloadSecureSettings mocks base method -func (m *MockISnapshot) ReloadSecureSettings() error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ReloadSecureSettings") - ret0, _ := ret[0].(error) - return ret0 -} - -// ReloadSecureSettings indicates an expected call of ReloadSecureSettings -func (mr *MockISnapshotMockRecorder) ReloadSecureSettings() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ReloadSecureSettings", reflect.TypeOf((*MockISnapshot)(nil).ReloadSecureSettings)) -} - -// MockIStats is a mock of IStats interface -type MockIStats struct { - ctrl *gomock.Controller - recorder *MockIStatsMockRecorder -} - -// MockIStatsMockRecorder is the mock recorder for MockIStats -type MockIStatsMockRecorder struct { - mock *MockIStats -} - -// NewMockIStats creates a new mock instance -func NewMockIStats(ctrl *gomock.Controller) *MockIStats { - mock := &MockIStats{ctrl: ctrl} - mock.recorder = &MockIStatsMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockIStats) EXPECT() *MockIStatsMockRecorder { - return m.recorder -} - -// GetNetworkCountStats mocks base method -func (m *MockIStats) GetNetworkCountStats(arg0 string) (map[string]int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetNetworkCountStats", arg0) - ret0, _ := ret[0].(map[string]int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetNetworkCountStats indicates an expected call of GetNetworkCountStats -func (mr *MockIStatsMockRecorder) GetNetworkCountStats(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetNetworkCountStats", reflect.TypeOf((*MockIStats)(nil).GetNetworkCountStats), arg0) -} - -// GetDateHistogram mocks base method -func (m *MockIStats) GetDateHistogram(period string, opts ...elastic.HistogramOption) ([][]int64, error) { - m.ctrl.T.Helper() - varargs := []interface{}{period} - for _, a := range opts { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "GetDateHistogram", varargs...) - ret0, _ := ret[0].([][]int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetDateHistogram indicates an expected call of GetDateHistogram -func (mr *MockIStatsMockRecorder) GetDateHistogram(period interface{}, opts ...interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - varargs := append([]interface{}{period}, opts...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDateHistogram", reflect.TypeOf((*MockIStats)(nil).GetDateHistogram), varargs...) -} - -// GetCallsCountByNetwork mocks base method -func (m *MockIStats) GetCallsCountByNetwork() (map[string]int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetCallsCountByNetwork") - ret0, _ := ret[0].(map[string]int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetCallsCountByNetwork indicates an expected call of GetCallsCountByNetwork -func (mr *MockIStatsMockRecorder) GetCallsCountByNetwork() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetCallsCountByNetwork", reflect.TypeOf((*MockIStats)(nil).GetCallsCountByNetwork)) -} - -// GetContractStatsByNetwork mocks base method -func (m *MockIStats) GetContractStatsByNetwork() (map[string]elastic.ContractCountStats, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetContractStatsByNetwork") - ret0, _ := ret[0].(map[string]elastic.ContractCountStats) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetContractStatsByNetwork indicates an expected call of GetContractStatsByNetwork -func (mr *MockIStatsMockRecorder) GetContractStatsByNetwork() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetContractStatsByNetwork", reflect.TypeOf((*MockIStats)(nil).GetContractStatsByNetwork)) -} - -// GetFACountByNetwork mocks base method -func (m *MockIStats) GetFACountByNetwork() (map[string]int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetFACountByNetwork") - ret0, _ := ret[0].(map[string]int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetFACountByNetwork indicates an expected call of GetFACountByNetwork -func (mr *MockIStatsMockRecorder) GetFACountByNetwork() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetFACountByNetwork", reflect.TypeOf((*MockIStats)(nil).GetFACountByNetwork)) -} - -// GetLanguagesForNetwork mocks base method -func (m *MockIStats) GetLanguagesForNetwork(network string) (map[string]int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLanguagesForNetwork", network) - ret0, _ := ret[0].(map[string]int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLanguagesForNetwork indicates an expected call of GetLanguagesForNetwork -func (mr *MockIStatsMockRecorder) GetLanguagesForNetwork(network interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLanguagesForNetwork", reflect.TypeOf((*MockIStats)(nil).GetLanguagesForNetwork), network) -} - -// MockITokens is a mock of ITokens interface -type MockITokens struct { - ctrl *gomock.Controller - recorder *MockITokensMockRecorder -} - -// MockITokensMockRecorder is the mock recorder for MockITokens -type MockITokensMockRecorder struct { - mock *MockITokens -} - -// NewMockITokens creates a new mock instance -func NewMockITokens(ctrl *gomock.Controller) *MockITokens { - mock := &MockITokens{ctrl: ctrl} - mock.recorder = &MockITokensMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockITokens) EXPECT() *MockITokensMockRecorder { - return m.recorder -} - -// GetTokens mocks base method -func (m *MockITokens) GetTokens(arg0, arg1 string, arg2, arg3 int64) ([]models.Contract, int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetTokens", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].([]models.Contract) - ret1, _ := ret[1].(int64) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// GetTokens indicates an expected call of GetTokens -func (mr *MockITokensMockRecorder) GetTokens(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTokens", reflect.TypeOf((*MockITokens)(nil).GetTokens), arg0, arg1, arg2, arg3) -} - -// GetTokensStats mocks base method -func (m *MockITokens) GetTokensStats(arg0 string, arg1, arg2 []string) (map[string]elastic.TokenUsageStats, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetTokensStats", arg0, arg1, arg2) - ret0, _ := ret[0].(map[string]elastic.TokenUsageStats) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetTokensStats indicates an expected call of GetTokensStats -func (mr *MockITokensMockRecorder) GetTokensStats(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTokensStats", reflect.TypeOf((*MockITokens)(nil).GetTokensStats), arg0, arg1, arg2) -} - -// GetTokenVolumeSeries mocks base method -func (m *MockITokens) GetTokenVolumeSeries(arg0, arg1 string, arg2 []string, arg3 []tzip.DAppContract, arg4 uint) ([][]int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetTokenVolumeSeries", arg0, arg1, arg2, arg3, arg4) - ret0, _ := ret[0].([][]int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetTokenVolumeSeries indicates an expected call of GetTokenVolumeSeries -func (mr *MockITokensMockRecorder) GetTokenVolumeSeries(arg0, arg1, arg2, arg3, arg4 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTokenVolumeSeries", reflect.TypeOf((*MockITokens)(nil).GetTokenVolumeSeries), arg0, arg1, arg2, arg3, arg4) -} - -// GetBalances mocks base method -func (m *MockITokens) GetBalances(arg0, arg1 string, arg2 int64, arg3 ...elastic.TokenBalance) (map[elastic.TokenBalance]int64, error) { - m.ctrl.T.Helper() - varargs := []interface{}{arg0, arg1, arg2} - for _, a := range arg3 { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "GetBalances", varargs...) - ret0, _ := ret[0].(map[elastic.TokenBalance]int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBalances indicates an expected call of GetBalances -func (mr *MockITokensMockRecorder) GetBalances(arg0, arg1, arg2 interface{}, arg3 ...interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - varargs := append([]interface{}{arg0, arg1, arg2}, arg3...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBalances", reflect.TypeOf((*MockITokens)(nil).GetBalances), varargs...) -} - -// GetAccountBalances mocks base method -func (m *MockITokens) GetAccountBalances(arg0, arg1 string) ([]models.TokenBalance, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAccountBalances", arg0, arg1) - ret0, _ := ret[0].([]models.TokenBalance) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAccountBalances indicates an expected call of GetAccountBalances -func (mr *MockITokensMockRecorder) GetAccountBalances(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccountBalances", reflect.TypeOf((*MockITokens)(nil).GetAccountBalances), arg0, arg1) -} - -// GetTokenSupply mocks base method -func (m *MockITokens) GetTokenSupply(network, address string, tokenID int64) (elastic.TokenSupply, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetTokenSupply", network, address, tokenID) - ret0, _ := ret[0].(elastic.TokenSupply) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetTokenSupply indicates an expected call of GetTokenSupply -func (mr *MockITokensMockRecorder) GetTokenSupply(network, address, tokenID interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTokenSupply", reflect.TypeOf((*MockITokens)(nil).GetTokenSupply), network, address, tokenID) -} - -// GetTransfers mocks base method -func (m *MockITokens) GetTransfers(ctx elastic.GetTransfersContext) (elastic.TransfersResponse, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetTransfers", ctx) - ret0, _ := ret[0].(elastic.TransfersResponse) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetTransfers indicates an expected call of GetTransfers -func (mr *MockITokensMockRecorder) GetTransfers(ctx interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTransfers", reflect.TypeOf((*MockITokens)(nil).GetTransfers), ctx) -} - -// GetAllTransfers mocks base method -func (m *MockITokens) GetAllTransfers(network string, level int64) ([]models.Transfer, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAllTransfers", network, level) - ret0, _ := ret[0].([]models.Transfer) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAllTransfers indicates an expected call of GetAllTransfers -func (mr *MockITokensMockRecorder) GetAllTransfers(network, level interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAllTransfers", reflect.TypeOf((*MockITokens)(nil).GetAllTransfers), network, level) -} - -// UpdateTokenBalances mocks base method -func (m *MockITokens) UpdateTokenBalances(updates []*models.TokenBalance) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "UpdateTokenBalances", updates) - ret0, _ := ret[0].(error) - return ret0 -} - -// UpdateTokenBalances indicates an expected call of UpdateTokenBalances -func (mr *MockITokensMockRecorder) UpdateTokenBalances(updates interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateTokenBalances", reflect.TypeOf((*MockITokens)(nil).UpdateTokenBalances), updates) -} - -// GetHolders mocks base method -func (m *MockITokens) GetHolders(network, contract string, tokenID int64) ([]models.TokenBalance, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetHolders", network, contract, tokenID) - ret0, _ := ret[0].([]models.TokenBalance) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetHolders indicates an expected call of GetHolders -func (mr *MockITokensMockRecorder) GetHolders(network, contract, tokenID interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetHolders", reflect.TypeOf((*MockITokens)(nil).GetHolders), network, contract, tokenID) -} - -// GetToken24HoursVolume mocks base method -func (m *MockITokens) GetToken24HoursVolume(network, contract string, initiators, entrypoints []string, tokenID int64) (float64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetToken24HoursVolume", network, contract, initiators, entrypoints, tokenID) - ret0, _ := ret[0].(float64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetToken24HoursVolume indicates an expected call of GetToken24HoursVolume -func (mr *MockITokensMockRecorder) GetToken24HoursVolume(network, contract, initiators, entrypoints, tokenID interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetToken24HoursVolume", reflect.TypeOf((*MockITokens)(nil).GetToken24HoursVolume), network, contract, initiators, entrypoints, tokenID) -} - -// MockITZIP is a mock of ITZIP interface -type MockITZIP struct { - ctrl *gomock.Controller - recorder *MockITZIPMockRecorder -} - -// MockITZIPMockRecorder is the mock recorder for MockITZIP -type MockITZIPMockRecorder struct { - mock *MockITZIP -} - -// NewMockITZIP creates a new mock instance -func NewMockITZIP(ctrl *gomock.Controller) *MockITZIP { - mock := &MockITZIP{ctrl: ctrl} - mock.recorder = &MockITZIPMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockITZIP) EXPECT() *MockITZIPMockRecorder { - return m.recorder -} - -// GetTZIP mocks base method -func (m *MockITZIP) GetTZIP(network, address string) (models.TZIP, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetTZIP", network, address) - ret0, _ := ret[0].(models.TZIP) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetTZIP indicates an expected call of GetTZIP -func (mr *MockITZIPMockRecorder) GetTZIP(network, address interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTZIP", reflect.TypeOf((*MockITZIP)(nil).GetTZIP), network, address) -} - -// GetTZIPWithEvents mocks base method -func (m *MockITZIP) GetTZIPWithEvents() ([]models.TZIP, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetTZIPWithEvents") - ret0, _ := ret[0].([]models.TZIP) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetTZIPWithEvents indicates an expected call of GetTZIPWithEvents -func (mr *MockITZIPMockRecorder) GetTZIPWithEvents() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTZIPWithEvents", reflect.TypeOf((*MockITZIP)(nil).GetTZIPWithEvents)) -} - -// GetTokenMetadata mocks base method -func (m *MockITZIP) GetTokenMetadata(ctx elastic.GetTokenMetadataContext) ([]elastic.TokenMetadata, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetTokenMetadata", ctx) - ret0, _ := ret[0].([]elastic.TokenMetadata) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetTokenMetadata indicates an expected call of GetTokenMetadata -func (mr *MockITZIPMockRecorder) GetTokenMetadata(ctx interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTokenMetadata", reflect.TypeOf((*MockITZIP)(nil).GetTokenMetadata), ctx) -} - -// GetDApps mocks base method -func (m *MockITZIP) GetDApps() ([]tzip.DApp, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetDApps") - ret0, _ := ret[0].([]tzip.DApp) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetDApps indicates an expected call of GetDApps -func (mr *MockITZIPMockRecorder) GetDApps() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDApps", reflect.TypeOf((*MockITZIP)(nil).GetDApps)) -} - -// GetDAppBySlug mocks base method -func (m *MockITZIP) GetDAppBySlug(slug string) (*tzip.DApp, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetDAppBySlug", slug) - ret0, _ := ret[0].(*tzip.DApp) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetDAppBySlug indicates an expected call of GetDAppBySlug -func (mr *MockITZIPMockRecorder) GetDAppBySlug(slug interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDAppBySlug", reflect.TypeOf((*MockITZIP)(nil).GetDAppBySlug), slug) -} - -// GetBySlug mocks base method -func (m *MockITZIP) GetBySlug(slug string) (*models.TZIP, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBySlug", slug) - ret0, _ := ret[0].(*models.TZIP) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBySlug indicates an expected call of GetBySlug -func (mr *MockITZIPMockRecorder) GetBySlug(slug interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBySlug", reflect.TypeOf((*MockITZIP)(nil).GetBySlug), slug) -} - -// GetAliases mocks base method -func (m *MockITZIP) GetAliases(network string) ([]models.TZIP, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAliases", network) - ret0, _ := ret[0].([]models.TZIP) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAliases indicates an expected call of GetAliases -func (mr *MockITZIPMockRecorder) GetAliases(network interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAliases", reflect.TypeOf((*MockITZIP)(nil).GetAliases), network) -} - -// GetAliasesMap mocks base method -func (m *MockITZIP) GetAliasesMap(network string) (map[string]string, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAliasesMap", network) - ret0, _ := ret[0].(map[string]string) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAliasesMap indicates an expected call of GetAliasesMap -func (mr *MockITZIPMockRecorder) GetAliasesMap(network interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAliasesMap", reflect.TypeOf((*MockITZIP)(nil).GetAliasesMap), network) -} - -// GetAlias mocks base method -func (m *MockITZIP) GetAlias(network, address string) (*models.TZIP, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAlias", network, address) - ret0, _ := ret[0].(*models.TZIP) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAlias indicates an expected call of GetAlias -func (mr *MockITZIPMockRecorder) GetAlias(network, address interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAlias", reflect.TypeOf((*MockITZIP)(nil).GetAlias), network, address) -} - -// MockIElastic is a mock of IElastic interface -type MockIElastic struct { - ctrl *gomock.Controller - recorder *MockIElasticMockRecorder -} - -// MockIElasticMockRecorder is the mock recorder for MockIElastic -type MockIElasticMockRecorder struct { - mock *MockIElastic -} - -// NewMockIElastic creates a new mock instance -func NewMockIElastic(ctrl *gomock.Controller) *MockIElastic { - mock := &MockIElastic{ctrl: ctrl} - mock.recorder = &MockIElasticMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use -func (m *MockIElastic) EXPECT() *MockIElasticMockRecorder { - return m.recorder -} - -// CreateIndexes mocks base method -func (m *MockIElastic) CreateIndexes() error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CreateIndexes") - ret0, _ := ret[0].(error) - return ret0 -} - -// CreateIndexes indicates an expected call of CreateIndexes -func (mr *MockIElasticMockRecorder) CreateIndexes() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateIndexes", reflect.TypeOf((*MockIElastic)(nil).CreateIndexes)) -} - -// DeleteIndices mocks base method -func (m *MockIElastic) DeleteIndices(indices []string) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "DeleteIndices", indices) - ret0, _ := ret[0].(error) - return ret0 -} - -// DeleteIndices indicates an expected call of DeleteIndices -func (mr *MockIElasticMockRecorder) DeleteIndices(indices interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteIndices", reflect.TypeOf((*MockIElastic)(nil).DeleteIndices), indices) -} - -// DeleteByLevelAndNetwork mocks base method -func (m *MockIElastic) DeleteByLevelAndNetwork(arg0 []string, arg1 string, arg2 int64) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "DeleteByLevelAndNetwork", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// DeleteByLevelAndNetwork indicates an expected call of DeleteByLevelAndNetwork -func (mr *MockIElasticMockRecorder) DeleteByLevelAndNetwork(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteByLevelAndNetwork", reflect.TypeOf((*MockIElastic)(nil).DeleteByLevelAndNetwork), arg0, arg1, arg2) -} - -// DeleteByContract mocks base method -func (m *MockIElastic) DeleteByContract(indices []string, network, address string) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "DeleteByContract", indices, network, address) - ret0, _ := ret[0].(error) - return ret0 -} - -// DeleteByContract indicates an expected call of DeleteByContract -func (mr *MockIElasticMockRecorder) DeleteByContract(indices, network, address interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteByContract", reflect.TypeOf((*MockIElastic)(nil).DeleteByContract), indices, network, address) -} - -// GetAll mocks base method -func (m *MockIElastic) GetAll(arg0 interface{}) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAll", arg0) - ret0, _ := ret[0].(error) - return ret0 -} - -// GetAll indicates an expected call of GetAll -func (mr *MockIElasticMockRecorder) GetAll(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAll", reflect.TypeOf((*MockIElastic)(nil).GetAll), arg0) -} - -// GetByID mocks base method -func (m *MockIElastic) GetByID(arg0 elastic.Model) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetByID", arg0) - ret0, _ := ret[0].(error) - return ret0 -} - -// GetByID indicates an expected call of GetByID -func (mr *MockIElasticMockRecorder) GetByID(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetByID", reflect.TypeOf((*MockIElastic)(nil).GetByID), arg0) -} - -// GetByIDs mocks base method -func (m *MockIElastic) GetByIDs(output interface{}, ids ...string) error { - m.ctrl.T.Helper() - varargs := []interface{}{output} - for _, a := range ids { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "GetByIDs", varargs...) - ret0, _ := ret[0].(error) - return ret0 -} - -// GetByIDs indicates an expected call of GetByIDs -func (mr *MockIElasticMockRecorder) GetByIDs(output interface{}, ids ...interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - varargs := append([]interface{}{output}, ids...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetByIDs", reflect.TypeOf((*MockIElastic)(nil).GetByIDs), varargs...) -} - -// GetByNetwork mocks base method -func (m *MockIElastic) GetByNetwork(arg0 string, arg1 interface{}) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetByNetwork", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// GetByNetwork indicates an expected call of GetByNetwork -func (mr *MockIElasticMockRecorder) GetByNetwork(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetByNetwork", reflect.TypeOf((*MockIElastic)(nil).GetByNetwork), arg0, arg1) -} - -// GetByNetworkWithSort mocks base method -func (m *MockIElastic) GetByNetworkWithSort(arg0, arg1, arg2 string, arg3 interface{}) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetByNetworkWithSort", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].(error) - return ret0 -} - -// GetByNetworkWithSort indicates an expected call of GetByNetworkWithSort -func (mr *MockIElasticMockRecorder) GetByNetworkWithSort(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetByNetworkWithSort", reflect.TypeOf((*MockIElastic)(nil).GetByNetworkWithSort), arg0, arg1, arg2, arg3) -} - -// UpdateDoc mocks base method -func (m *MockIElastic) UpdateDoc(model elastic.Model) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "UpdateDoc", model) - ret0, _ := ret[0].(error) - return ret0 -} - -// UpdateDoc indicates an expected call of UpdateDoc -func (mr *MockIElasticMockRecorder) UpdateDoc(model interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateDoc", reflect.TypeOf((*MockIElastic)(nil).UpdateDoc), model) -} - -// UpdateFields mocks base method -func (m *MockIElastic) UpdateFields(arg0, arg1 string, arg2 interface{}, arg3 ...string) error { - m.ctrl.T.Helper() - varargs := []interface{}{arg0, arg1, arg2} - for _, a := range arg3 { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "UpdateFields", varargs...) - ret0, _ := ret[0].(error) - return ret0 -} - -// UpdateFields indicates an expected call of UpdateFields -func (mr *MockIElasticMockRecorder) UpdateFields(arg0, arg1, arg2 interface{}, arg3 ...interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - varargs := append([]interface{}{arg0, arg1, arg2}, arg3...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateFields", reflect.TypeOf((*MockIElastic)(nil).UpdateFields), varargs...) -} - -// GetBalance mocks base method -func (m *MockIElastic) GetBalance(network, address string) (int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBalance", network, address) - ret0, _ := ret[0].(int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBalance indicates an expected call of GetBalance -func (mr *MockIElasticMockRecorder) GetBalance(network, address interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBalance", reflect.TypeOf((*MockIElastic)(nil).GetBalance), network, address) -} - -// GetBigMapKey mocks base method -func (m *MockIElastic) GetBigMapKey(network, keyHash string, ptr int64) (elastic.BigMapDiff, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapKey", network, keyHash, ptr) - ret0, _ := ret[0].(elastic.BigMapDiff) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapKey indicates an expected call of GetBigMapKey -func (mr *MockIElasticMockRecorder) GetBigMapKey(network, keyHash, ptr interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapKey", reflect.TypeOf((*MockIElastic)(nil).GetBigMapKey), network, keyHash, ptr) -} - -// GetBigMapKeys mocks base method -func (m *MockIElastic) GetBigMapKeys(ctx elastic.GetBigMapKeysContext) ([]elastic.BigMapDiff, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapKeys", ctx) - ret0, _ := ret[0].([]elastic.BigMapDiff) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapKeys indicates an expected call of GetBigMapKeys -func (mr *MockIElasticMockRecorder) GetBigMapKeys(ctx interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapKeys", reflect.TypeOf((*MockIElastic)(nil).GetBigMapKeys), ctx) -} - -// GetBigMapsForAddress mocks base method -func (m *MockIElastic) GetBigMapsForAddress(arg0, arg1 string) ([]models.BigMapDiff, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapsForAddress", arg0, arg1) - ret0, _ := ret[0].([]models.BigMapDiff) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapsForAddress indicates an expected call of GetBigMapsForAddress -func (mr *MockIElasticMockRecorder) GetBigMapsForAddress(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapsForAddress", reflect.TypeOf((*MockIElastic)(nil).GetBigMapsForAddress), arg0, arg1) -} - -// GetBigMapHistory mocks base method -func (m *MockIElastic) GetBigMapHistory(arg0 int64, arg1 string) ([]models.BigMapAction, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapHistory", arg0, arg1) - ret0, _ := ret[0].([]models.BigMapAction) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapHistory indicates an expected call of GetBigMapHistory -func (mr *MockIElasticMockRecorder) GetBigMapHistory(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapHistory", reflect.TypeOf((*MockIElastic)(nil).GetBigMapHistory), arg0, arg1) -} - -// GetBigMapValuesByKey mocks base method -func (m *MockIElastic) GetBigMapValuesByKey(arg0 string) ([]elastic.BigMapDiff, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapValuesByKey", arg0) - ret0, _ := ret[0].([]elastic.BigMapDiff) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapValuesByKey indicates an expected call of GetBigMapValuesByKey -func (mr *MockIElasticMockRecorder) GetBigMapValuesByKey(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapValuesByKey", reflect.TypeOf((*MockIElastic)(nil).GetBigMapValuesByKey), arg0) -} - -// GetBigMapDiffsCount mocks base method -func (m *MockIElastic) GetBigMapDiffsCount(network string, ptr int64) (int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapDiffsCount", network, ptr) - ret0, _ := ret[0].(int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapDiffsCount indicates an expected call of GetBigMapDiffsCount -func (mr *MockIElasticMockRecorder) GetBigMapDiffsCount(network, ptr interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapDiffsCount", reflect.TypeOf((*MockIElastic)(nil).GetBigMapDiffsCount), network, ptr) -} - -// GetBigMapDiffsForAddress mocks base method -func (m *MockIElastic) GetBigMapDiffsForAddress(arg0 string) ([]models.BigMapDiff, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapDiffsForAddress", arg0) - ret0, _ := ret[0].([]models.BigMapDiff) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapDiffsForAddress indicates an expected call of GetBigMapDiffsForAddress -func (mr *MockIElasticMockRecorder) GetBigMapDiffsForAddress(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapDiffsForAddress", reflect.TypeOf((*MockIElastic)(nil).GetBigMapDiffsForAddress), arg0) -} - -// GetBigMapDiffsPrevious mocks base method -func (m *MockIElastic) GetBigMapDiffsPrevious(arg0 []models.BigMapDiff, arg1 int64, arg2 string) ([]models.BigMapDiff, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapDiffsPrevious", arg0, arg1, arg2) - ret0, _ := ret[0].([]models.BigMapDiff) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapDiffsPrevious indicates an expected call of GetBigMapDiffsPrevious -func (mr *MockIElasticMockRecorder) GetBigMapDiffsPrevious(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapDiffsPrevious", reflect.TypeOf((*MockIElastic)(nil).GetBigMapDiffsPrevious), arg0, arg1, arg2) -} - -// GetBigMapDiffsUniqueByOperationID mocks base method -func (m *MockIElastic) GetBigMapDiffsUniqueByOperationID(arg0 string) ([]models.BigMapDiff, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapDiffsUniqueByOperationID", arg0) - ret0, _ := ret[0].([]models.BigMapDiff) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapDiffsUniqueByOperationID indicates an expected call of GetBigMapDiffsUniqueByOperationID -func (mr *MockIElasticMockRecorder) GetBigMapDiffsUniqueByOperationID(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapDiffsUniqueByOperationID", reflect.TypeOf((*MockIElastic)(nil).GetBigMapDiffsUniqueByOperationID), arg0) -} - -// GetBigMapDiffsByPtrAndKeyHash mocks base method -func (m *MockIElastic) GetBigMapDiffsByPtrAndKeyHash(arg0 int64, arg1, arg2 string, arg3, arg4 int64) ([]elastic.BigMapDiff, int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapDiffsByPtrAndKeyHash", arg0, arg1, arg2, arg3, arg4) - ret0, _ := ret[0].([]elastic.BigMapDiff) - ret1, _ := ret[1].(int64) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// GetBigMapDiffsByPtrAndKeyHash indicates an expected call of GetBigMapDiffsByPtrAndKeyHash -func (mr *MockIElasticMockRecorder) GetBigMapDiffsByPtrAndKeyHash(arg0, arg1, arg2, arg3, arg4 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapDiffsByPtrAndKeyHash", reflect.TypeOf((*MockIElastic)(nil).GetBigMapDiffsByPtrAndKeyHash), arg0, arg1, arg2, arg3, arg4) -} - -// GetBigMapDiffsByOperationID mocks base method -func (m *MockIElastic) GetBigMapDiffsByOperationID(arg0 string) ([]*models.BigMapDiff, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapDiffsByOperationID", arg0) - ret0, _ := ret[0].([]*models.BigMapDiff) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapDiffsByOperationID indicates an expected call of GetBigMapDiffsByOperationID -func (mr *MockIElasticMockRecorder) GetBigMapDiffsByOperationID(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapDiffsByOperationID", reflect.TypeOf((*MockIElastic)(nil).GetBigMapDiffsByOperationID), arg0) -} - -// GetBigMapDiffsByPtr mocks base method -func (m *MockIElastic) GetBigMapDiffsByPtr(arg0, arg1 string, arg2 int64) ([]models.BigMapDiff, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBigMapDiffsByPtr", arg0, arg1, arg2) - ret0, _ := ret[0].([]models.BigMapDiff) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBigMapDiffsByPtr indicates an expected call of GetBigMapDiffsByPtr -func (mr *MockIElasticMockRecorder) GetBigMapDiffsByPtr(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBigMapDiffsByPtr", reflect.TypeOf((*MockIElastic)(nil).GetBigMapDiffsByPtr), arg0, arg1, arg2) -} - -// GetBlock mocks base method -func (m *MockIElastic) GetBlock(arg0 string, arg1 int64) (models.Block, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBlock", arg0, arg1) - ret0, _ := ret[0].(models.Block) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBlock indicates an expected call of GetBlock -func (mr *MockIElasticMockRecorder) GetBlock(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBlock", reflect.TypeOf((*MockIElastic)(nil).GetBlock), arg0, arg1) -} - -// GetLastBlock mocks base method -func (m *MockIElastic) GetLastBlock(arg0 string) (models.Block, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLastBlock", arg0) - ret0, _ := ret[0].(models.Block) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLastBlock indicates an expected call of GetLastBlock -func (mr *MockIElasticMockRecorder) GetLastBlock(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLastBlock", reflect.TypeOf((*MockIElastic)(nil).GetLastBlock), arg0) -} - -// GetLastBlocks mocks base method -func (m *MockIElastic) GetLastBlocks() ([]models.Block, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLastBlocks") - ret0, _ := ret[0].([]models.Block) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLastBlocks indicates an expected call of GetLastBlocks -func (mr *MockIElasticMockRecorder) GetLastBlocks() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLastBlocks", reflect.TypeOf((*MockIElastic)(nil).GetLastBlocks)) -} - -// GetNetworkAlias mocks base method -func (m *MockIElastic) GetNetworkAlias(chainID string) (string, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetNetworkAlias", chainID) - ret0, _ := ret[0].(string) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetNetworkAlias indicates an expected call of GetNetworkAlias -func (mr *MockIElasticMockRecorder) GetNetworkAlias(chainID interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetNetworkAlias", reflect.TypeOf((*MockIElastic)(nil).GetNetworkAlias), chainID) -} - -// BulkInsert mocks base method -func (m *MockIElastic) BulkInsert(arg0 []elastic.Model) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "BulkInsert", arg0) - ret0, _ := ret[0].(error) - return ret0 -} - -// BulkInsert indicates an expected call of BulkInsert -func (mr *MockIElasticMockRecorder) BulkInsert(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BulkInsert", reflect.TypeOf((*MockIElastic)(nil).BulkInsert), arg0) -} - -// BulkUpdate mocks base method -func (m *MockIElastic) BulkUpdate(arg0 []elastic.Model) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "BulkUpdate", arg0) - ret0, _ := ret[0].(error) - return ret0 -} - -// BulkUpdate indicates an expected call of BulkUpdate -func (mr *MockIElasticMockRecorder) BulkUpdate(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BulkUpdate", reflect.TypeOf((*MockIElastic)(nil).BulkUpdate), arg0) -} - -// BulkDelete mocks base method -func (m *MockIElastic) BulkDelete(arg0 []elastic.Model) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "BulkDelete", arg0) - ret0, _ := ret[0].(error) - return ret0 -} - -// BulkDelete indicates an expected call of BulkDelete -func (mr *MockIElasticMockRecorder) BulkDelete(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BulkDelete", reflect.TypeOf((*MockIElastic)(nil).BulkDelete), arg0) -} - -// BulkRemoveField mocks base method -func (m *MockIElastic) BulkRemoveField(arg0 string, arg1 []elastic.Model) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "BulkRemoveField", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// BulkRemoveField indicates an expected call of BulkRemoveField -func (mr *MockIElasticMockRecorder) BulkRemoveField(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BulkRemoveField", reflect.TypeOf((*MockIElastic)(nil).BulkRemoveField), arg0, arg1) -} - -// BulkUpdateField mocks base method -func (m *MockIElastic) BulkUpdateField(where []models.Contract, fields ...string) error { - m.ctrl.T.Helper() - varargs := []interface{}{where} - for _, a := range fields { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "BulkUpdateField", varargs...) - ret0, _ := ret[0].(error) - return ret0 -} - -// BulkUpdateField indicates an expected call of BulkUpdateField -func (mr *MockIElasticMockRecorder) BulkUpdateField(where interface{}, fields ...interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - varargs := append([]interface{}{where}, fields...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "BulkUpdateField", reflect.TypeOf((*MockIElastic)(nil).BulkUpdateField), varargs...) -} - -// GetContract mocks base method -func (m *MockIElastic) GetContract(arg0 map[string]interface{}) (models.Contract, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetContract", arg0) - ret0, _ := ret[0].(models.Contract) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetContract indicates an expected call of GetContract -func (mr *MockIElasticMockRecorder) GetContract(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetContract", reflect.TypeOf((*MockIElastic)(nil).GetContract), arg0) -} - -// GetContractRandom mocks base method -func (m *MockIElastic) GetContractRandom() (models.Contract, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetContractRandom") - ret0, _ := ret[0].(models.Contract) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetContractRandom indicates an expected call of GetContractRandom -func (mr *MockIElasticMockRecorder) GetContractRandom() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetContractRandom", reflect.TypeOf((*MockIElastic)(nil).GetContractRandom)) -} - -// GetContractMigrationStats mocks base method -func (m *MockIElastic) GetContractMigrationStats(arg0, arg1 string) (elastic.ContractMigrationsStats, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetContractMigrationStats", arg0, arg1) - ret0, _ := ret[0].(elastic.ContractMigrationsStats) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetContractMigrationStats indicates an expected call of GetContractMigrationStats -func (mr *MockIElasticMockRecorder) GetContractMigrationStats(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetContractMigrationStats", reflect.TypeOf((*MockIElastic)(nil).GetContractMigrationStats), arg0, arg1) -} - -// GetContractAddressesByNetworkAndLevel mocks base method -func (m *MockIElastic) GetContractAddressesByNetworkAndLevel(arg0 string, arg1 int64) ([]string, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetContractAddressesByNetworkAndLevel", arg0, arg1) - ret0, _ := ret[0].([]string) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetContractAddressesByNetworkAndLevel indicates an expected call of GetContractAddressesByNetworkAndLevel -func (mr *MockIElasticMockRecorder) GetContractAddressesByNetworkAndLevel(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetContractAddressesByNetworkAndLevel", reflect.TypeOf((*MockIElastic)(nil).GetContractAddressesByNetworkAndLevel), arg0, arg1) -} - -// GetContracts mocks base method -func (m *MockIElastic) GetContracts(arg0 map[string]interface{}) ([]models.Contract, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetContracts", arg0) - ret0, _ := ret[0].([]models.Contract) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetContracts indicates an expected call of GetContracts -func (mr *MockIElasticMockRecorder) GetContracts(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetContracts", reflect.TypeOf((*MockIElastic)(nil).GetContracts), arg0) -} - -// GetContractsIDByAddress mocks base method -func (m *MockIElastic) GetContractsIDByAddress(arg0 []string, arg1 string) ([]string, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetContractsIDByAddress", arg0, arg1) - ret0, _ := ret[0].([]string) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetContractsIDByAddress indicates an expected call of GetContractsIDByAddress -func (mr *MockIElasticMockRecorder) GetContractsIDByAddress(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetContractsIDByAddress", reflect.TypeOf((*MockIElastic)(nil).GetContractsIDByAddress), arg0, arg1) -} - -// GetAffectedContracts mocks base method -func (m *MockIElastic) GetAffectedContracts(arg0 string, arg1, arg2 int64) ([]string, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAffectedContracts", arg0, arg1, arg2) - ret0, _ := ret[0].([]string) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAffectedContracts indicates an expected call of GetAffectedContracts -func (mr *MockIElasticMockRecorder) GetAffectedContracts(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAffectedContracts", reflect.TypeOf((*MockIElastic)(nil).GetAffectedContracts), arg0, arg1, arg2) -} - -// IsFAContract mocks base method -func (m *MockIElastic) IsFAContract(arg0, arg1 string) (bool, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "IsFAContract", arg0, arg1) - ret0, _ := ret[0].(bool) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// IsFAContract indicates an expected call of IsFAContract -func (mr *MockIElasticMockRecorder) IsFAContract(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsFAContract", reflect.TypeOf((*MockIElastic)(nil).IsFAContract), arg0, arg1) -} - -// RecalcContractStats mocks base method -func (m *MockIElastic) RecalcContractStats(arg0, arg1 string) (elastic.ContractStats, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "RecalcContractStats", arg0, arg1) - ret0, _ := ret[0].(elastic.ContractStats) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// RecalcContractStats indicates an expected call of RecalcContractStats -func (mr *MockIElasticMockRecorder) RecalcContractStats(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RecalcContractStats", reflect.TypeOf((*MockIElastic)(nil).RecalcContractStats), arg0, arg1) -} - -// UpdateContractMigrationsCount mocks base method -func (m *MockIElastic) UpdateContractMigrationsCount(arg0, arg1 string) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "UpdateContractMigrationsCount", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// UpdateContractMigrationsCount indicates an expected call of UpdateContractMigrationsCount -func (mr *MockIElasticMockRecorder) UpdateContractMigrationsCount(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateContractMigrationsCount", reflect.TypeOf((*MockIElastic)(nil).UpdateContractMigrationsCount), arg0, arg1) -} - -// GetDAppStats mocks base method -func (m *MockIElastic) GetDAppStats(arg0 string, arg1 []string, arg2 string) (elastic.DAppStats, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetDAppStats", arg0, arg1, arg2) - ret0, _ := ret[0].(elastic.DAppStats) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetDAppStats indicates an expected call of GetDAppStats -func (mr *MockIElasticMockRecorder) GetDAppStats(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDAppStats", reflect.TypeOf((*MockIElastic)(nil).GetDAppStats), arg0, arg1, arg2) -} - -// GetContractsByAddresses mocks base method -func (m *MockIElastic) GetContractsByAddresses(addresses []elastic.Address) ([]models.Contract, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetContractsByAddresses", addresses) - ret0, _ := ret[0].([]models.Contract) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetContractsByAddresses indicates an expected call of GetContractsByAddresses -func (mr *MockIElasticMockRecorder) GetContractsByAddresses(addresses interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetContractsByAddresses", reflect.TypeOf((*MockIElastic)(nil).GetContractsByAddresses), addresses) -} - -// ListDomains mocks base method -func (m *MockIElastic) ListDomains(network string, size, offset int64) (elastic.DomainsResponse, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListDomains", network, size, offset) - ret0, _ := ret[0].(elastic.DomainsResponse) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListDomains indicates an expected call of ListDomains -func (mr *MockIElasticMockRecorder) ListDomains(network, size, offset interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListDomains", reflect.TypeOf((*MockIElastic)(nil).ListDomains), network, size, offset) -} - -// ResolveDomainByAddress mocks base method -func (m *MockIElastic) ResolveDomainByAddress(network, address string) (*models.TezosDomain, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ResolveDomainByAddress", network, address) - ret0, _ := ret[0].(*models.TezosDomain) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ResolveDomainByAddress indicates an expected call of ResolveDomainByAddress -func (mr *MockIElasticMockRecorder) ResolveDomainByAddress(network, address interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ResolveDomainByAddress", reflect.TypeOf((*MockIElastic)(nil).ResolveDomainByAddress), network, address) -} - -// GetEvents mocks base method -func (m *MockIElastic) GetEvents(arg0 []elastic.SubscriptionRequest, arg1, arg2 int64) ([]elastic.Event, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetEvents", arg0, arg1, arg2) - ret0, _ := ret[0].([]elastic.Event) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetEvents indicates an expected call of GetEvents -func (mr *MockIElasticMockRecorder) GetEvents(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetEvents", reflect.TypeOf((*MockIElastic)(nil).GetEvents), arg0, arg1, arg2) -} - -// GetMigrations mocks base method -func (m *MockIElastic) GetMigrations(arg0, arg1 string) ([]models.Migration, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetMigrations", arg0, arg1) - ret0, _ := ret[0].([]models.Migration) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetMigrations indicates an expected call of GetMigrations -func (mr *MockIElasticMockRecorder) GetMigrations(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMigrations", reflect.TypeOf((*MockIElastic)(nil).GetMigrations), arg0, arg1) -} - -// GetOperationsForContract mocks base method -func (m *MockIElastic) GetOperationsForContract(arg0, arg1 string, arg2 uint64, arg3 map[string]interface{}) (elastic.PageableOperations, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetOperationsForContract", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].(elastic.PageableOperations) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetOperationsForContract indicates an expected call of GetOperationsForContract -func (mr *MockIElasticMockRecorder) GetOperationsForContract(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetOperationsForContract", reflect.TypeOf((*MockIElastic)(nil).GetOperationsForContract), arg0, arg1, arg2, arg3) -} - -// GetLastOperation mocks base method -func (m *MockIElastic) GetLastOperation(arg0, arg1 string, arg2 int64) (models.Operation, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLastOperation", arg0, arg1, arg2) - ret0, _ := ret[0].(models.Operation) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLastOperation indicates an expected call of GetLastOperation -func (mr *MockIElasticMockRecorder) GetLastOperation(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLastOperation", reflect.TypeOf((*MockIElastic)(nil).GetLastOperation), arg0, arg1, arg2) -} - -// GetOperationsStats mocks base method -func (m *MockIElastic) GetOperationsStats(network, address string) (elastic.OperationsStats, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetOperationsStats", network, address) - ret0, _ := ret[0].(elastic.OperationsStats) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetOperationsStats indicates an expected call of GetOperationsStats -func (mr *MockIElasticMockRecorder) GetOperationsStats(network, address interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetOperationsStats", reflect.TypeOf((*MockIElastic)(nil).GetOperationsStats), network, address) -} - -// GetOperations mocks base method -func (m *MockIElastic) GetOperations(filter map[string]interface{}, size int64, sort bool) ([]models.Operation, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetOperations", filter, size, sort) - ret0, _ := ret[0].([]models.Operation) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetOperations indicates an expected call of GetOperations -func (mr *MockIElasticMockRecorder) GetOperations(filter, size, sort interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetOperations", reflect.TypeOf((*MockIElastic)(nil).GetOperations), filter, size, sort) -} - -// GetContract24HoursVolume mocks base method -func (m *MockIElastic) GetContract24HoursVolume(network, address string, entrypoints []string) (float64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetContract24HoursVolume", network, address, entrypoints) - ret0, _ := ret[0].(float64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetContract24HoursVolume indicates an expected call of GetContract24HoursVolume -func (mr *MockIElasticMockRecorder) GetContract24HoursVolume(network, address, entrypoints interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetContract24HoursVolume", reflect.TypeOf((*MockIElastic)(nil).GetContract24HoursVolume), network, address, entrypoints) -} - -// GetProjectsLastContract mocks base method -func (m *MockIElastic) GetProjectsLastContract() ([]models.Contract, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetProjectsLastContract") - ret0, _ := ret[0].([]models.Contract) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetProjectsLastContract indicates an expected call of GetProjectsLastContract -func (mr *MockIElasticMockRecorder) GetProjectsLastContract() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProjectsLastContract", reflect.TypeOf((*MockIElastic)(nil).GetProjectsLastContract)) -} - -// GetSameContracts mocks base method -func (m *MockIElastic) GetSameContracts(arg0 models.Contract, arg1, arg2 int64) (elastic.SameContractsResponse, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetSameContracts", arg0, arg1, arg2) - ret0, _ := ret[0].(elastic.SameContractsResponse) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetSameContracts indicates an expected call of GetSameContracts -func (mr *MockIElasticMockRecorder) GetSameContracts(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetSameContracts", reflect.TypeOf((*MockIElastic)(nil).GetSameContracts), arg0, arg1, arg2) -} - -// GetSimilarContracts mocks base method -func (m *MockIElastic) GetSimilarContracts(arg0 models.Contract, arg1, arg2 int64) ([]elastic.SimilarContract, int, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetSimilarContracts", arg0, arg1, arg2) - ret0, _ := ret[0].([]elastic.SimilarContract) - ret1, _ := ret[1].(int) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// GetSimilarContracts indicates an expected call of GetSimilarContracts -func (mr *MockIElasticMockRecorder) GetSimilarContracts(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetSimilarContracts", reflect.TypeOf((*MockIElastic)(nil).GetSimilarContracts), arg0, arg1, arg2) -} - -// GetDiffTasks mocks base method -func (m *MockIElastic) GetDiffTasks() ([]elastic.DiffTask, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetDiffTasks") - ret0, _ := ret[0].([]elastic.DiffTask) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetDiffTasks indicates an expected call of GetDiffTasks -func (mr *MockIElasticMockRecorder) GetDiffTasks() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDiffTasks", reflect.TypeOf((*MockIElastic)(nil).GetDiffTasks)) -} - -// GetProtocol mocks base method -func (m *MockIElastic) GetProtocol(arg0, arg1 string, arg2 int64) (models.Protocol, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetProtocol", arg0, arg1, arg2) - ret0, _ := ret[0].(models.Protocol) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetProtocol indicates an expected call of GetProtocol -func (mr *MockIElasticMockRecorder) GetProtocol(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProtocol", reflect.TypeOf((*MockIElastic)(nil).GetProtocol), arg0, arg1, arg2) -} - -// GetSymLinks mocks base method -func (m *MockIElastic) GetSymLinks(arg0 string, arg1 int64) (map[string]struct{}, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetSymLinks", arg0, arg1) - ret0, _ := ret[0].(map[string]struct{}) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetSymLinks indicates an expected call of GetSymLinks -func (mr *MockIElasticMockRecorder) GetSymLinks(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetSymLinks", reflect.TypeOf((*MockIElastic)(nil).GetSymLinks), arg0, arg1) -} - -// SearchByText mocks base method -func (m *MockIElastic) SearchByText(arg0 string, arg1 int64, arg2 []string, arg3 map[string]interface{}, arg4 bool) (search.Result, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "SearchByText", arg0, arg1, arg2, arg3, arg4) - ret0, _ := ret[0].(search.Result) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// SearchByText indicates an expected call of SearchByText -func (mr *MockIElasticMockRecorder) SearchByText(arg0, arg1, arg2, arg3, arg4 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SearchByText", reflect.TypeOf((*MockIElastic)(nil).SearchByText), arg0, arg1, arg2, arg3, arg4) -} - -// CreateAWSRepository mocks base method -func (m *MockIElastic) CreateAWSRepository(arg0, arg1, arg2 string) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CreateAWSRepository", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// CreateAWSRepository indicates an expected call of CreateAWSRepository -func (mr *MockIElasticMockRecorder) CreateAWSRepository(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateAWSRepository", reflect.TypeOf((*MockIElastic)(nil).CreateAWSRepository), arg0, arg1, arg2) -} - -// ListRepositories mocks base method -func (m *MockIElastic) ListRepositories() ([]elastic.Repository, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListRepositories") - ret0, _ := ret[0].([]elastic.Repository) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListRepositories indicates an expected call of ListRepositories -func (mr *MockIElasticMockRecorder) ListRepositories() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListRepositories", reflect.TypeOf((*MockIElastic)(nil).ListRepositories)) -} - -// CreateSnapshots mocks base method -func (m *MockIElastic) CreateSnapshots(arg0, arg1 string, arg2 []string) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CreateSnapshots", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// CreateSnapshots indicates an expected call of CreateSnapshots -func (mr *MockIElasticMockRecorder) CreateSnapshots(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateSnapshots", reflect.TypeOf((*MockIElastic)(nil).CreateSnapshots), arg0, arg1, arg2) -} - -// RestoreSnapshots mocks base method -func (m *MockIElastic) RestoreSnapshots(arg0, arg1 string, arg2 []string) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "RestoreSnapshots", arg0, arg1, arg2) - ret0, _ := ret[0].(error) - return ret0 -} - -// RestoreSnapshots indicates an expected call of RestoreSnapshots -func (mr *MockIElasticMockRecorder) RestoreSnapshots(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RestoreSnapshots", reflect.TypeOf((*MockIElastic)(nil).RestoreSnapshots), arg0, arg1, arg2) -} - -// ListSnapshots mocks base method -func (m *MockIElastic) ListSnapshots(arg0 string) (string, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ListSnapshots", arg0) - ret0, _ := ret[0].(string) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// ListSnapshots indicates an expected call of ListSnapshots -func (mr *MockIElasticMockRecorder) ListSnapshots(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListSnapshots", reflect.TypeOf((*MockIElastic)(nil).ListSnapshots), arg0) -} - -// SetSnapshotPolicy mocks base method -func (m *MockIElastic) SetSnapshotPolicy(arg0, arg1, arg2, arg3 string, arg4 int64) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "SetSnapshotPolicy", arg0, arg1, arg2, arg3, arg4) - ret0, _ := ret[0].(error) - return ret0 -} - -// SetSnapshotPolicy indicates an expected call of SetSnapshotPolicy -func (mr *MockIElasticMockRecorder) SetSnapshotPolicy(arg0, arg1, arg2, arg3, arg4 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetSnapshotPolicy", reflect.TypeOf((*MockIElastic)(nil).SetSnapshotPolicy), arg0, arg1, arg2, arg3, arg4) -} - -// GetAllPolicies mocks base method -func (m *MockIElastic) GetAllPolicies() ([]string, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAllPolicies") - ret0, _ := ret[0].([]string) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAllPolicies indicates an expected call of GetAllPolicies -func (mr *MockIElasticMockRecorder) GetAllPolicies() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAllPolicies", reflect.TypeOf((*MockIElastic)(nil).GetAllPolicies)) -} - -// GetMappings mocks base method -func (m *MockIElastic) GetMappings(arg0 []string) (map[string]string, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetMappings", arg0) - ret0, _ := ret[0].(map[string]string) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetMappings indicates an expected call of GetMappings -func (mr *MockIElasticMockRecorder) GetMappings(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMappings", reflect.TypeOf((*MockIElastic)(nil).GetMappings), arg0) -} - -// CreateMapping mocks base method -func (m *MockIElastic) CreateMapping(arg0 string, arg1 io.Reader) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CreateMapping", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// CreateMapping indicates an expected call of CreateMapping -func (mr *MockIElasticMockRecorder) CreateMapping(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateMapping", reflect.TypeOf((*MockIElastic)(nil).CreateMapping), arg0, arg1) -} - -// ReloadSecureSettings mocks base method -func (m *MockIElastic) ReloadSecureSettings() error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "ReloadSecureSettings") - ret0, _ := ret[0].(error) - return ret0 -} - -// ReloadSecureSettings indicates an expected call of ReloadSecureSettings -func (mr *MockIElasticMockRecorder) ReloadSecureSettings() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ReloadSecureSettings", reflect.TypeOf((*MockIElastic)(nil).ReloadSecureSettings)) -} - -// GetNetworkCountStats mocks base method -func (m *MockIElastic) GetNetworkCountStats(arg0 string) (map[string]int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetNetworkCountStats", arg0) - ret0, _ := ret[0].(map[string]int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetNetworkCountStats indicates an expected call of GetNetworkCountStats -func (mr *MockIElasticMockRecorder) GetNetworkCountStats(arg0 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetNetworkCountStats", reflect.TypeOf((*MockIElastic)(nil).GetNetworkCountStats), arg0) -} - -// GetDateHistogram mocks base method -func (m *MockIElastic) GetDateHistogram(period string, opts ...elastic.HistogramOption) ([][]int64, error) { - m.ctrl.T.Helper() - varargs := []interface{}{period} - for _, a := range opts { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "GetDateHistogram", varargs...) - ret0, _ := ret[0].([][]int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetDateHistogram indicates an expected call of GetDateHistogram -func (mr *MockIElasticMockRecorder) GetDateHistogram(period interface{}, opts ...interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - varargs := append([]interface{}{period}, opts...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDateHistogram", reflect.TypeOf((*MockIElastic)(nil).GetDateHistogram), varargs...) -} - -// GetCallsCountByNetwork mocks base method -func (m *MockIElastic) GetCallsCountByNetwork() (map[string]int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetCallsCountByNetwork") - ret0, _ := ret[0].(map[string]int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetCallsCountByNetwork indicates an expected call of GetCallsCountByNetwork -func (mr *MockIElasticMockRecorder) GetCallsCountByNetwork() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetCallsCountByNetwork", reflect.TypeOf((*MockIElastic)(nil).GetCallsCountByNetwork)) -} - -// GetContractStatsByNetwork mocks base method -func (m *MockIElastic) GetContractStatsByNetwork() (map[string]elastic.ContractCountStats, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetContractStatsByNetwork") - ret0, _ := ret[0].(map[string]elastic.ContractCountStats) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetContractStatsByNetwork indicates an expected call of GetContractStatsByNetwork -func (mr *MockIElasticMockRecorder) GetContractStatsByNetwork() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetContractStatsByNetwork", reflect.TypeOf((*MockIElastic)(nil).GetContractStatsByNetwork)) -} - -// GetFACountByNetwork mocks base method -func (m *MockIElastic) GetFACountByNetwork() (map[string]int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetFACountByNetwork") - ret0, _ := ret[0].(map[string]int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetFACountByNetwork indicates an expected call of GetFACountByNetwork -func (mr *MockIElasticMockRecorder) GetFACountByNetwork() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetFACountByNetwork", reflect.TypeOf((*MockIElastic)(nil).GetFACountByNetwork)) -} - -// GetLanguagesForNetwork mocks base method -func (m *MockIElastic) GetLanguagesForNetwork(network string) (map[string]int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetLanguagesForNetwork", network) - ret0, _ := ret[0].(map[string]int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetLanguagesForNetwork indicates an expected call of GetLanguagesForNetwork -func (mr *MockIElasticMockRecorder) GetLanguagesForNetwork(network interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLanguagesForNetwork", reflect.TypeOf((*MockIElastic)(nil).GetLanguagesForNetwork), network) -} - -// GetTokens mocks base method -func (m *MockIElastic) GetTokens(arg0, arg1 string, arg2, arg3 int64) ([]models.Contract, int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetTokens", arg0, arg1, arg2, arg3) - ret0, _ := ret[0].([]models.Contract) - ret1, _ := ret[1].(int64) - ret2, _ := ret[2].(error) - return ret0, ret1, ret2 -} - -// GetTokens indicates an expected call of GetTokens -func (mr *MockIElasticMockRecorder) GetTokens(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTokens", reflect.TypeOf((*MockIElastic)(nil).GetTokens), arg0, arg1, arg2, arg3) -} - -// GetTokensStats mocks base method -func (m *MockIElastic) GetTokensStats(arg0 string, arg1, arg2 []string) (map[string]elastic.TokenUsageStats, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetTokensStats", arg0, arg1, arg2) - ret0, _ := ret[0].(map[string]elastic.TokenUsageStats) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetTokensStats indicates an expected call of GetTokensStats -func (mr *MockIElasticMockRecorder) GetTokensStats(arg0, arg1, arg2 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTokensStats", reflect.TypeOf((*MockIElastic)(nil).GetTokensStats), arg0, arg1, arg2) -} - -// GetTokenVolumeSeries mocks base method -func (m *MockIElastic) GetTokenVolumeSeries(arg0, arg1 string, arg2 []string, arg3 []tzip.DAppContract, arg4 uint) ([][]int64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetTokenVolumeSeries", arg0, arg1, arg2, arg3, arg4) - ret0, _ := ret[0].([][]int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetTokenVolumeSeries indicates an expected call of GetTokenVolumeSeries -func (mr *MockIElasticMockRecorder) GetTokenVolumeSeries(arg0, arg1, arg2, arg3, arg4 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTokenVolumeSeries", reflect.TypeOf((*MockIElastic)(nil).GetTokenVolumeSeries), arg0, arg1, arg2, arg3, arg4) -} - -// GetBalances mocks base method -func (m *MockIElastic) GetBalances(arg0, arg1 string, arg2 int64, arg3 ...elastic.TokenBalance) (map[elastic.TokenBalance]int64, error) { - m.ctrl.T.Helper() - varargs := []interface{}{arg0, arg1, arg2} - for _, a := range arg3 { - varargs = append(varargs, a) - } - ret := m.ctrl.Call(m, "GetBalances", varargs...) - ret0, _ := ret[0].(map[elastic.TokenBalance]int64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBalances indicates an expected call of GetBalances -func (mr *MockIElasticMockRecorder) GetBalances(arg0, arg1, arg2 interface{}, arg3 ...interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - varargs := append([]interface{}{arg0, arg1, arg2}, arg3...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBalances", reflect.TypeOf((*MockIElastic)(nil).GetBalances), varargs...) -} - -// GetAccountBalances mocks base method -func (m *MockIElastic) GetAccountBalances(arg0, arg1 string) ([]models.TokenBalance, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAccountBalances", arg0, arg1) - ret0, _ := ret[0].([]models.TokenBalance) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAccountBalances indicates an expected call of GetAccountBalances -func (mr *MockIElasticMockRecorder) GetAccountBalances(arg0, arg1 interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccountBalances", reflect.TypeOf((*MockIElastic)(nil).GetAccountBalances), arg0, arg1) -} - -// GetTokenSupply mocks base method -func (m *MockIElastic) GetTokenSupply(network, address string, tokenID int64) (elastic.TokenSupply, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetTokenSupply", network, address, tokenID) - ret0, _ := ret[0].(elastic.TokenSupply) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetTokenSupply indicates an expected call of GetTokenSupply -func (mr *MockIElasticMockRecorder) GetTokenSupply(network, address, tokenID interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTokenSupply", reflect.TypeOf((*MockIElastic)(nil).GetTokenSupply), network, address, tokenID) -} - -// GetTransfers mocks base method -func (m *MockIElastic) GetTransfers(ctx elastic.GetTransfersContext) (elastic.TransfersResponse, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetTransfers", ctx) - ret0, _ := ret[0].(elastic.TransfersResponse) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetTransfers indicates an expected call of GetTransfers -func (mr *MockIElasticMockRecorder) GetTransfers(ctx interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTransfers", reflect.TypeOf((*MockIElastic)(nil).GetTransfers), ctx) -} - -// GetAllTransfers mocks base method -func (m *MockIElastic) GetAllTransfers(network string, level int64) ([]models.Transfer, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAllTransfers", network, level) - ret0, _ := ret[0].([]models.Transfer) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAllTransfers indicates an expected call of GetAllTransfers -func (mr *MockIElasticMockRecorder) GetAllTransfers(network, level interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAllTransfers", reflect.TypeOf((*MockIElastic)(nil).GetAllTransfers), network, level) -} - -// UpdateTokenBalances mocks base method -func (m *MockIElastic) UpdateTokenBalances(updates []*models.TokenBalance) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "UpdateTokenBalances", updates) - ret0, _ := ret[0].(error) - return ret0 -} - -// UpdateTokenBalances indicates an expected call of UpdateTokenBalances -func (mr *MockIElasticMockRecorder) UpdateTokenBalances(updates interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateTokenBalances", reflect.TypeOf((*MockIElastic)(nil).UpdateTokenBalances), updates) -} - -// GetHolders mocks base method -func (m *MockIElastic) GetHolders(network, contract string, tokenID int64) ([]models.TokenBalance, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetHolders", network, contract, tokenID) - ret0, _ := ret[0].([]models.TokenBalance) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetHolders indicates an expected call of GetHolders -func (mr *MockIElasticMockRecorder) GetHolders(network, contract, tokenID interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetHolders", reflect.TypeOf((*MockIElastic)(nil).GetHolders), network, contract, tokenID) -} - -// GetToken24HoursVolume mocks base method -func (m *MockIElastic) GetToken24HoursVolume(network, contract string, initiators, entrypoints []string, tokenID int64) (float64, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetToken24HoursVolume", network, contract, initiators, entrypoints, tokenID) - ret0, _ := ret[0].(float64) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetToken24HoursVolume indicates an expected call of GetToken24HoursVolume -func (mr *MockIElasticMockRecorder) GetToken24HoursVolume(network, contract, initiators, entrypoints, tokenID interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetToken24HoursVolume", reflect.TypeOf((*MockIElastic)(nil).GetToken24HoursVolume), network, contract, initiators, entrypoints, tokenID) -} - -// GetTZIP mocks base method -func (m *MockIElastic) GetTZIP(network, address string) (models.TZIP, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetTZIP", network, address) - ret0, _ := ret[0].(models.TZIP) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetTZIP indicates an expected call of GetTZIP -func (mr *MockIElasticMockRecorder) GetTZIP(network, address interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTZIP", reflect.TypeOf((*MockIElastic)(nil).GetTZIP), network, address) -} - -// GetTZIPWithEvents mocks base method -func (m *MockIElastic) GetTZIPWithEvents() ([]models.TZIP, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetTZIPWithEvents") - ret0, _ := ret[0].([]models.TZIP) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetTZIPWithEvents indicates an expected call of GetTZIPWithEvents -func (mr *MockIElasticMockRecorder) GetTZIPWithEvents() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTZIPWithEvents", reflect.TypeOf((*MockIElastic)(nil).GetTZIPWithEvents)) -} - -// GetTokenMetadata mocks base method -func (m *MockIElastic) GetTokenMetadata(ctx elastic.GetTokenMetadataContext) ([]elastic.TokenMetadata, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetTokenMetadata", ctx) - ret0, _ := ret[0].([]elastic.TokenMetadata) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetTokenMetadata indicates an expected call of GetTokenMetadata -func (mr *MockIElasticMockRecorder) GetTokenMetadata(ctx interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTokenMetadata", reflect.TypeOf((*MockIElastic)(nil).GetTokenMetadata), ctx) -} - -// GetDApps mocks base method -func (m *MockIElastic) GetDApps() ([]tzip.DApp, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetDApps") - ret0, _ := ret[0].([]tzip.DApp) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetDApps indicates an expected call of GetDApps -func (mr *MockIElasticMockRecorder) GetDApps() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDApps", reflect.TypeOf((*MockIElastic)(nil).GetDApps)) -} - -// GetDAppBySlug mocks base method -func (m *MockIElastic) GetDAppBySlug(slug string) (*tzip.DApp, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetDAppBySlug", slug) - ret0, _ := ret[0].(*tzip.DApp) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetDAppBySlug indicates an expected call of GetDAppBySlug -func (mr *MockIElasticMockRecorder) GetDAppBySlug(slug interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDAppBySlug", reflect.TypeOf((*MockIElastic)(nil).GetDAppBySlug), slug) -} - -// GetBySlug mocks base method -func (m *MockIElastic) GetBySlug(slug string) (*models.TZIP, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetBySlug", slug) - ret0, _ := ret[0].(*models.TZIP) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetBySlug indicates an expected call of GetBySlug -func (mr *MockIElasticMockRecorder) GetBySlug(slug interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBySlug", reflect.TypeOf((*MockIElastic)(nil).GetBySlug), slug) -} - -// GetAliases mocks base method -func (m *MockIElastic) GetAliases(network string) ([]models.TZIP, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAliases", network) - ret0, _ := ret[0].([]models.TZIP) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAliases indicates an expected call of GetAliases -func (mr *MockIElasticMockRecorder) GetAliases(network interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAliases", reflect.TypeOf((*MockIElastic)(nil).GetAliases), network) -} - -// GetAliasesMap mocks base method -func (m *MockIElastic) GetAliasesMap(network string) (map[string]string, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAliasesMap", network) - ret0, _ := ret[0].(map[string]string) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAliasesMap indicates an expected call of GetAliasesMap -func (mr *MockIElasticMockRecorder) GetAliasesMap(network interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAliasesMap", reflect.TypeOf((*MockIElastic)(nil).GetAliasesMap), network) -} - -// GetAlias mocks base method -func (m *MockIElastic) GetAlias(network, address string) (*models.TZIP, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetAlias", network, address) - ret0, _ := ret[0].(*models.TZIP) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetAlias indicates an expected call of GetAlias -func (mr *MockIElasticMockRecorder) GetAlias(network, address interface{}) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAlias", reflect.TypeOf((*MockIElastic)(nil).GetAlias), network, address) -} diff --git a/internal/elastic/operation/data.go b/internal/elastic/operation/data.go new file mode 100644 index 000000000..51b3fb4aa --- /dev/null +++ b/internal/elastic/operation/data.go @@ -0,0 +1,75 @@ +package operation + +import ( + "time" + + "github.com/baking-bad/bcdhub/internal/elastic/core" +) + +type getOperationsStatsResponse struct { + Aggs struct { + OPG struct { + Value int64 `json:"value"` + } `json:"opg"` + LastAction struct { + Value time.Time `json:"value_as_string"` + } `json:"last_action"` + } `json:"aggregations"` +} + +type getByContract struct { + Hist core.HitsArray `json:"hits"` + Agg struct { + LastID core.FloatValue `json:"last_id"` + } `json:"aggregations"` +} + +type aggVolumeSumResponse struct { + Aggs struct { + Result struct { + Value float64 `json:"value"` + } `json:"volume"` + } +} + +type getTokensStatsResponse struct { + Aggs struct { + Body struct { + Buckets []struct { + DocCount int64 `json:"doc_count"` + Key struct { + Destination string `json:"destination"` + Entrypoint string `json:"entrypoint"` + } `json:"key"` + AVG core.FloatValue `json:"average_consumed_gas"` + } `json:"buckets"` + } `json:"body"` + } `json:"aggregations"` +} + +type operationAddresses struct { + Source string `json:"source"` + Destination string `json:"destination"` +} + +type opgForContract struct { + hash string + counter int64 +} + +type recalcContractStatsResponse struct { + Aggs struct { + TxCount core.IntValue `json:"tx_count"` + Balance core.IntValue `json:"balance"` + LastAction core.IntValue `json:"last_action"` + TotalWithdrawn core.IntValue `json:"total_withdrawn"` + } `json:"aggregations"` +} + +type getDAppStatsResponse struct { + Aggs struct { + Users core.FloatValue `json:"users"` + Calls core.FloatValue `json:"calls"` + Volume core.FloatValue `json:"volume"` + } `json:"aggregations"` +} diff --git a/internal/elastic/operation/storage.go b/internal/elastic/operation/storage.go new file mode 100644 index 000000000..863fe3193 --- /dev/null +++ b/internal/elastic/operation/storage.go @@ -0,0 +1,500 @@ +package operation + +import ( + "encoding/json" + "fmt" + "time" + + constants "github.com/baking-bad/bcdhub/internal/contractparser/consts" + "github.com/baking-bad/bcdhub/internal/elastic/consts" + "github.com/baking-bad/bcdhub/internal/elastic/core" + "github.com/baking-bad/bcdhub/internal/helpers" + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/operation" + "github.com/pkg/errors" +) + +// Storage - +type Storage struct { + es *core.Elastic +} + +// NewStorage - +func NewStorage(es *core.Elastic) *Storage { + return &Storage{es} +} + +func (storage *Storage) getContractOPG(address, network string, size uint64, filters map[string]interface{}) ([]opgForContract, error) { + if size == 0 || size > core.MaxQuerySize { + size = consts.DefaultSize + } + + filtersString, err := prepareOperationFilters(filters) + if err != nil { + return nil, err + } + + sqlString := fmt.Sprintf(`SELECT hash, counter + FROM operation + WHERE (source = '%s' OR destination = '%s') AND network = '%s' %s + GROUP BY hash, counter, level + ORDER BY level DESC + LIMIT %d`, address, address, network, filtersString, size) + + var response core.SQLResponse + if err := storage.es.ExecuteSQL(sqlString, &response); err != nil { + return nil, err + } + + resp := make([]opgForContract, 0) + for i := range response.Rows { + resp = append(resp, opgForContract{ + hash: response.Rows[i][0].(string), + counter: int64(response.Rows[i][1].(float64)), + }) + } + + return resp, nil +} + +func prepareOperationFilters(filters map[string]interface{}) (s string, err error) { + for k, v := range filters { + if v != "" { + s += " AND " + switch k { + case "from": + s += fmt.Sprintf("timestamp >= %d", v) + case "to": + s += fmt.Sprintf("timestamp <= %d", v) + case "entrypoints": + s += fmt.Sprintf("entrypoint IN (%s)", v) + case "last_id": + s += fmt.Sprintf("indexed_time < %s", v) + case "status": + s += fmt.Sprintf("status IN (%s)", v) + default: + return "", errors.Errorf("Unknown operation filter: %s %v", k, v) + } + } + } + return +} + +// GetByContract - +func (storage *Storage) GetByContract(network, address string, size uint64, filters map[string]interface{}) (po operation.Pageable, err error) { + opg, err := storage.getContractOPG(address, network, size, filters) + if err != nil { + return + } + + s := make([]core.Item, len(opg)) + for i := range opg { + s[i] = core.Bool(core.Filter( + core.Match("hash", opg[i].hash), + core.Term("counter", opg[i].counter), + )) + } + b := core.Bool( + core.Should(s...), + core.Filter( + core.Match("network", network), + ), + core.MinimumShouldMatch(1), + ) + query := core.NewQuery(). + Query(b). + Add( + core.Aggs(core.AggItem{Name: "last_id", Body: core.Min("indexed_time")}), + ). + Add(core.Item{ + "sort": core.Item{ + "_script": core.Item{ + "type": "number", + "script": core.Item{ + "lang": "painless", + "source": "doc['level'].value * 10000000000L + (doc['counter'].value) * 1000L + (doc['internal'].value ? (998L - doc['nonce'].value) : 999L)", + }, + "order": "desc", + }, + }, + }).All() + + var response getByContract + if err = storage.es.Query([]string{models.DocOperations}, query, &response); err != nil { + return + } + + ops := make([]operation.Operation, len(response.Hist.Hits)) + for i := range response.Hist.Hits { + if err = json.Unmarshal(response.Hist.Hits[i].Source, &ops[i]); err != nil { + return + } + ops[i].ID = response.Hist.Hits[i].ID + } + + po.Operations = ops + po.LastID = fmt.Sprintf("%.0f", response.Agg.LastID.Value) + return +} + +// Last - +func (storage *Storage) Last(network, address string, indexedTime int64) (op operation.Operation, err error) { + query := core.NewQuery(). + Query( + core.Bool( + core.Must( + core.MatchPhrase("destination", address), + core.MatchPhrase("network", network), + ), + core.Filter( + core.Range("indexed_time", core.Item{"lt": indexedTime}), + core.Term("status", "applied"), + ), + core.MustNot( + core.Term("deffated_storage", ""), + ), + ), + ).Sort("indexed_time", "desc").One() + + var response core.SearchResponse + if err = storage.es.Query([]string{models.DocOperations}, query, &response); err != nil { + return + } + + if response.Hits.Total.Value == 0 { + return op, core.NewRecordNotFoundError(models.DocOperations, "") + } + err = json.Unmarshal(response.Hits.Hits[0].Source, &op) + op.ID = response.Hits.Hits[0].ID + return +} + +// Get - +func (storage *Storage) Get(filters map[string]interface{}, size int64, sort bool) ([]operation.Operation, error) { + operations := make([]operation.Operation, 0) + + query := core.FiltersToQuery(filters) + + if sort { + query.Add(core.Item{ + "sort": core.Item{ + "_script": core.Item{ + "type": "number", + "script": core.Item{ + "lang": "painless", + "source": "doc['level'].value * 10000000000L + (doc['counter'].value) * 1000L + (doc['internal'].value ? (998L - doc['nonce'].value) : 999L)", + }, + "order": "desc", + }, + }, + }) + } + + scrollSize := size + if consts.DefaultScrollSize < scrollSize || scrollSize == 0 { + scrollSize = consts.DefaultScrollSize + } + + ctx := core.NewScrollContext(storage.es, query, size, scrollSize) + err := ctx.Get(&operations) + return operations, err +} + +// GetStats - +func (storage *Storage) GetStats(network, address string) (stats operation.Stats, err error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + core.Bool( + core.Should( + core.MatchPhrase("source", address), + core.MatchPhrase("destination", address), + ), + core.MinimumShouldMatch(1), + ), + ), + ), + ).Add( + core.Aggs( + core.AggItem{ + Name: "opg", Body: core.Count("hash.keyword"), + }, + core.AggItem{ + Name: "last_action", Body: core.Max("timestamp"), + }, + ), + ).Zero() + + var response getOperationsStatsResponse + if err = storage.es.Query([]string{models.DocOperations}, query, &response); err != nil { + return + } + + stats.Count = response.Aggs.OPG.Value + stats.LastAction = response.Aggs.LastAction.Value + return +} + +// GetContract24HoursVolume - +func (storage *Storage) GetContract24HoursVolume(network, address string, entrypoints []string) (float64, error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Bool( + core.Should( + core.MatchPhrase("destination", address), + core.MatchPhrase("source", address), + ), + core.MinimumShouldMatch(1), + ), + core.Term("network", network), + core.Term("status", constants.Applied), + core.Range("timestamp", core.Item{ + "lte": "now", + "gt": "now-24h", + }), + core.In("entrypoint.keyword", entrypoints), + ), + ), + ).Add( + core.Aggs( + core.AggItem{Name: "volume", Body: core.Sum("amount")}, + ), + ).Zero() + + var response aggVolumeSumResponse + if err := storage.es.Query([]string{models.DocOperations}, query, &response); err != nil { + return 0, err + } + + return response.Aggs.Result.Value, nil +} + +// GetTokensStats - +func (storage *Storage) GetTokensStats(network string, addresses, entrypoints []string) (map[string]operation.TokenUsageStats, error) { + addressFilters := make([]core.Item, len(addresses)) + for i := range addresses { + addressFilters[i] = core.MatchPhrase("destination", addresses[i]) + } + + entrypointFilters := make([]core.Item, len(entrypoints)) + for i := range entrypoints { + entrypointFilters[i] = core.MatchPhrase("entrypoint", entrypoints[i]) + } + + query := core.NewQuery().Query( + core.Bool( + core.Must( + core.Match("network", network), + core.Bool( + core.Should(addressFilters...), + core.MinimumShouldMatch(1), + ), + core.Bool( + core.Should(entrypointFilters...), + core.MinimumShouldMatch(1), + ), + ), + ), + ).Add( + core.Aggs( + core.AggItem{ + Name: "body", + Body: core.Composite( + core.MaxQuerySize, + core.AggItem{ + Name: "destination", Body: core.TermsAgg("destination.keyword", 0), + }, + core.AggItem{ + Name: "entrypoint", Body: core.TermsAgg("entrypoint.keyword", 0), + }, + ).Extend( + core.Aggs( + core.AggItem{ + Name: "average_consumed_gas", Body: core.Avg("result.consumed_gas"), + }, + ), + ), + }, + ), + ).Zero() + + var response getTokensStatsResponse + if err := storage.es.Query([]string{models.DocOperations}, query, &response); err != nil { + return nil, err + } + + usageStats := make(map[string]operation.TokenUsageStats) + for _, bucket := range response.Aggs.Body.Buckets { + usage := operation.TokenMethodUsageStats{ + Count: bucket.DocCount, + ConsumedGas: int64(bucket.AVG.Value), + } + + if _, ok := usageStats[bucket.Key.Destination]; !ok { + usageStats[bucket.Key.Destination] = make(operation.TokenUsageStats) + } + usageStats[bucket.Key.Destination][bucket.Key.Entrypoint] = usage + } + + return usageStats, nil +} + +// GetParticipatingContracts - +func (storage *Storage) GetParticipatingContracts(network string, fromLevel, toLevel int64) ([]string, error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + core.Range("level", core.Item{ + "lte": fromLevel, + "gt": toLevel, + }), + ), + ), + ) + + var response core.SearchResponse + if err := storage.es.Query([]string{models.DocOperations}, query, &response); err != nil { + return nil, err + } + + if response.Hits.Total.Value == 0 { + return nil, nil + } + + exists := make(map[string]struct{}) + addresses := make([]string, 0) + for i := range response.Hits.Hits { + var op operationAddresses + if err := json.Unmarshal(response.Hits.Hits[i].Source, &op); err != nil { + return nil, err + } + if _, ok := exists[op.Source]; !ok && helpers.IsContract(op.Source) { + addresses = append(addresses, op.Source) + exists[op.Source] = struct{}{} + } + if _, ok := exists[op.Destination]; !ok && helpers.IsContract(op.Destination) { + addresses = append(addresses, op.Destination) + exists[op.Destination] = struct{}{} + } + } + + return addresses, nil +} + +// RecalcStats - +func (storage *Storage) RecalcStats(network, address string) (stats operation.ContractStats, err error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + ), + core.Should( + core.MatchPhrase("source", address), + core.MatchPhrase("destination", address), + ), + core.MinimumShouldMatch(1), + ), + ).Add( + core.Item{ + "aggs": core.Item{ + "tx_count": core.Count("indexed_time"), + "last_action": core.Max("timestamp"), + "balance": core.Item{ + "scripted_metric": core.Item{ + "init_script": "state.operations = []", + "map_script": "if (doc['status.keyword'].value == 'applied' && doc['amount'].size() != 0) {state.operations.add(doc['destination.keyword'].value == params.address ? doc['amount'].value : -1L * doc['amount'].value)}", + "combine_script": "double balance = 0; for (amount in state.operations) { balance += amount } return balance", + "reduce_script": "double balance = 0; for (a in states) { balance += a } return balance", + "params": core.Item{ + "address": address, + }, + }, + }, + }, + }, + ).Zero() + var response recalcContractStatsResponse + if err = storage.es.Query([]string{models.DocOperations}, query, &response); err != nil { + return + } + + stats.LastAction = time.Unix(0, response.Aggs.LastAction.Value*1000000).UTC() + stats.Balance = response.Aggs.Balance.Value + stats.TxCount = response.Aggs.TxCount.Value + return +} + +// GetDAppStats - +func (storage *Storage) GetDAppStats(network string, addresses []string, period string) (stats operation.DAppStats, err error) { + addressMatches := make([]core.Item, len(addresses)) + for i := range addresses { + addressMatches[i] = core.MatchPhrase("destination", addresses[i]) + } + + matches := []core.Item{ + core.Match("network", network), + core.Exists("entrypoint"), + core.Bool( + core.Should(addressMatches...), + core.MinimumShouldMatch(1), + ), + core.Match("status", "applied"), + } + r, err := periodToRange(period) + if err != nil { + return + } + if r != nil { + matches = append(matches, r) + } + + query := core.NewQuery().Query( + core.Bool( + core.Filter(matches...), + ), + ).Add( + core.Aggs( + core.AggItem{Name: "users", Body: core.Cardinality("source.keyword")}, + core.AggItem{Name: "calls", Body: core.Count("indexed_time")}, + core.AggItem{Name: "volume", Body: core.Sum("amount")}, + ), + ).Zero() + + var response getDAppStatsResponse + if err = storage.es.Query([]string{models.DocOperations}, query, &response); err != nil { + return + } + + stats.Calls = int64(response.Aggs.Calls.Value) + stats.Users = int64(response.Aggs.Users.Value) + stats.Volume = int64(response.Aggs.Volume.Value) + return +} + +func periodToRange(period string) (core.Item, error) { + var str string + switch period { + case "year": + str = "now-1y/d" + case "month": + str = "now-1M/d" + case "week": + str = "now-1w/d" + case "day": + str = "now-1d/d" + case "all": + return nil, nil + default: + return nil, errors.Errorf("Unknown period value: %s", period) + } + return core.Item{ + "range": core.Item{ + "timestamp": core.Item{ + "gte": str, + }, + }, + }, nil +} diff --git a/internal/elastic/operations.go b/internal/elastic/operations.go deleted file mode 100644 index 72da61351..000000000 --- a/internal/elastic/operations.go +++ /dev/null @@ -1,335 +0,0 @@ -package elastic - -import ( - "fmt" - "time" - - "github.com/baking-bad/bcdhub/internal/contractparser/consts" - "github.com/baking-bad/bcdhub/internal/helpers" - "github.com/baking-bad/bcdhub/internal/models" - "github.com/pkg/errors" -) - -type opgForContract struct { - hash string - counter int64 -} - -func (e *Elastic) getContractOPG(address, network string, size uint64, filters map[string]interface{}) ([]opgForContract, error) { - if size == 0 || size > maxQuerySize { - size = defaultSize - } - - filtersString, err := prepareOperationFilters(filters) - if err != nil { - return nil, err - } - - sqlString := fmt.Sprintf(`SELECT hash, counter - FROM operation - WHERE (source = '%s' OR destination = '%s') AND network = '%s' %s - GROUP BY hash, counter, level - ORDER BY level DESC - LIMIT %d`, address, address, network, filtersString, size) - - var response sqlResponse - if err := e.executeSQL(sqlString, &response); err != nil { - return nil, err - } - - resp := make([]opgForContract, 0) - for i := range response.Rows { - resp = append(resp, opgForContract{ - hash: response.Rows[i][0].(string), - counter: int64(response.Rows[i][1].(float64)), - }) - } - - return resp, nil -} - -func prepareOperationFilters(filters map[string]interface{}) (s string, err error) { - for k, v := range filters { - if v != "" { - s += " AND " - switch k { - case "from": - s += fmt.Sprintf("timestamp >= %d", v) - case "to": - s += fmt.Sprintf("timestamp <= %d", v) - case "entrypoints": - s += fmt.Sprintf("entrypoint IN (%s)", v) - case "last_id": - s += fmt.Sprintf("indexed_time < %s", v) - case "status": - s += fmt.Sprintf("status IN (%s)", v) - default: - return "", errors.Errorf("Unknown operation filter: %s %v", k, v) - } - } - } - return -} - -type getOperationsForContractResponse struct { - Hist HitsArray `json:"hits"` - Agg struct { - LastID floatValue `json:"last_id"` - } `json:"aggregations"` -} - -// GetOperationsForContract - -func (e *Elastic) GetOperationsForContract(network, address string, size uint64, filters map[string]interface{}) (po PageableOperations, err error) { - opg, err := e.getContractOPG(address, network, size, filters) - if err != nil { - return - } - - s := make([]qItem, len(opg)) - for i := range opg { - s[i] = boolQ(filter( - matchQ("hash", opg[i].hash), - term("counter", opg[i].counter), - )) - } - b := boolQ( - should(s...), - filter( - matchQ("network", network), - ), - minimumShouldMatch(1), - ) - query := newQuery(). - Query(b). - Add( - aggs(aggItem{"last_id", min("indexed_time")}), - ). - Add(qItem{ - "sort": qItem{ - "_script": qItem{ - "type": "number", - "script": qItem{ - "lang": "painless", - "source": "doc['level'].value * 10000000000L + (doc['counter'].value) * 1000L + (doc['internal'].value ? (998L - doc['nonce'].value) : 999L)", - }, - "order": "desc", - }, - }, - }).All() - - var response getOperationsForContractResponse - if err = e.query([]string{DocOperations}, query, &response); err != nil { - return - } - - ops := make([]models.Operation, len(response.Hist.Hits)) - for i := range response.Hist.Hits { - if err = json.Unmarshal(response.Hist.Hits[i].Source, &ops[i]); err != nil { - return - } - ops[i].ID = response.Hist.Hits[i].ID - } - - po.Operations = ops - po.LastID = fmt.Sprintf("%.0f", response.Agg.LastID.Value) - return -} - -// GetLastOperation - -func (e *Elastic) GetLastOperation(address, network string, indexedTime int64) (op models.Operation, err error) { - query := newQuery(). - Query( - boolQ( - must( - matchPhrase("destination", address), - matchPhrase("network", network), - ), - filter( - rangeQ("indexed_time", qItem{"lt": indexedTime}), - term("status", "applied"), - ), - notMust( - term("deffated_storage", ""), - ), - ), - ).Sort("indexed_time", "desc").One() - - var response SearchResponse - if err = e.query([]string{DocOperations}, query, &response); err != nil { - return - } - - if response.Hits.Total.Value == 0 { - return op, NewRecordNotFoundError(DocOperations, "", query) - } - err = json.Unmarshal(response.Hits.Hits[0].Source, &op) - op.ID = response.Hits.Hits[0].ID - return -} - -type operationAddresses struct { - Source string `json:"source"` - Destination string `json:"destination"` -} - -// GetAffectedContracts - -func (e *Elastic) GetAffectedContracts(network string, fromLevel, toLevel int64) ([]string, error) { - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - rangeQ("level", qItem{ - "lte": fromLevel, - "gt": toLevel, - }), - ), - ), - ) - - var response SearchResponse - if err := e.query([]string{DocOperations}, query, &response); err != nil { - return nil, err - } - - if response.Hits.Total.Value == 0 { - return nil, nil - } - - exists := make(map[string]struct{}) - addresses := make([]string, 0) - for i := range response.Hits.Hits { - var op operationAddresses - if err := json.Unmarshal(response.Hits.Hits[i].Source, &op); err != nil { - return nil, err - } - if _, ok := exists[op.Source]; !ok && helpers.IsContract(op.Source) { - addresses = append(addresses, op.Source) - exists[op.Source] = struct{}{} - } - if _, ok := exists[op.Destination]; !ok && helpers.IsContract(op.Destination) { - addresses = append(addresses, op.Destination) - exists[op.Destination] = struct{}{} - } - } - - return addresses, nil -} - -// GetOperations - -func (e *Elastic) GetOperations(filters map[string]interface{}, size int64, sort bool) ([]models.Operation, error) { - operations := make([]models.Operation, 0) - - query := filtersToQuery(filters) - - if sort { - query.Add(qItem{ - "sort": qItem{ - "_script": qItem{ - "type": "number", - "script": qItem{ - "lang": "painless", - "source": "doc['level'].value * 10000000000L + (doc['counter'].value) * 1000L + (doc['internal'].value ? (998L - doc['nonce'].value) : 999L)", - }, - "order": "desc", - }, - }, - }) - } - - scrollSize := size - if defaultScrollSize < scrollSize || scrollSize == 0 { - scrollSize = defaultScrollSize - } - - ctx := newScrollContext(e, query, size, scrollSize) - err := ctx.get(&operations) - return operations, err -} - -// GetContract24HoursVolume - -func (e *Elastic) GetContract24HoursVolume(network, address string, entrypoints []string) (float64, error) { - query := newQuery().Query( - boolQ( - filter( - boolQ( - should( - matchPhrase("destination", address), - matchPhrase("source", address), - ), - minimumShouldMatch(1), - ), - term("network", network), - term("status", consts.Applied), - rangeQ("timestamp", qItem{ - "lte": "now", - "gt": "now-24h", - }), - in("entrypoint.keyword", entrypoints), - ), - ), - ).Add( - aggs( - aggItem{"volume", sum("amount")}, - ), - ).Zero() - - var response aggVolumeSumResponse - if err := e.query([]string{DocOperations}, query, &response); err != nil { - return 0, err - } - - return response.Aggs.Result.Value, nil -} - -// OperationsStats - -type OperationsStats struct { - Count int64 - LastAction time.Time -} - -type getOperationsStatsResponse struct { - Aggs struct { - OPG struct { - Value int64 `json:"value"` - } `json:"opg"` - LastAction struct { - Value time.Time `json:"value_as_string"` - } `json:"last_action"` - } `json:"aggregations"` -} - -// GetOperationsStats - -func (e Elastic) GetOperationsStats(network, address string) (stats OperationsStats, err error) { - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - boolQ( - should( - matchPhrase("source", address), - matchPhrase("destination", address), - ), - minimumShouldMatch(1), - ), - ), - ), - ).Add( - aggs( - aggItem{ - "opg", count("hash.keyword"), - }, - aggItem{ - "last_action", max("timestamp"), - }, - ), - ).Zero() - - var response getOperationsStatsResponse - if err = e.query([]string{DocOperations}, query, &response); err != nil { - return - } - - stats.Count = response.Aggs.OPG.Value - stats.LastAction = response.Aggs.LastAction.Value - return -} diff --git a/internal/elastic/projects.go b/internal/elastic/projects.go deleted file mode 100644 index a1c3d9348..000000000 --- a/internal/elastic/projects.go +++ /dev/null @@ -1,255 +0,0 @@ -package elastic - -import ( - "math/rand" - "time" - - "github.com/baking-bad/bcdhub/internal/models" - "github.com/pkg/errors" -) - -type getProjectsResponse struct { - Agg struct { - Projects struct { - Buckets []struct { - Bucket - Last struct { - Hits HitsArray `json:"hits"` - } `json:"last"` - } `json:"buckets"` - } `json:"projects"` - } `json:"aggregations"` -} - -// GetProjectsLastContract - -func (e *Elastic) GetProjectsLastContract() ([]models.Contract, error) { - query := newQuery().Add( - aggs( - aggItem{ - "projects", qItem{ - "terms": qItem{ - "field": "project_id.keyword", - "size": maxQuerySize, - }, - "aggs": qItem{ - "last": topHits(1, "timestamp", "desc"), - }, - }, - }, - ), - ).Sort("timestamp", "desc").Zero() - - var response getProjectsResponse - if err := e.query([]string{DocContracts}, query, &response); err != nil { - return nil, err - } - - if len(response.Agg.Projects.Buckets) == 0 { - return nil, NewRecordNotFoundError(DocContracts, "", query) - } - - contracts := make([]models.Contract, len(response.Agg.Projects.Buckets)) - for i := range response.Agg.Projects.Buckets { - if err := json.Unmarshal(response.Agg.Projects.Buckets[i].Last.Hits.Hits[0].Source, &contracts[i]); err != nil { - return nil, err - } - } - return contracts, nil -} - -// GetSameContracts - -func (e *Elastic) GetSameContracts(c models.Contract, size, offset int64) (pcr SameContractsResponse, err error) { - if c.Fingerprint == nil { - return pcr, errors.Errorf("Invalid contract data") - } - - if size == 0 { - size = defaultSize - } else if size+offset > maxQuerySize { - size = maxQuerySize - offset - } - - q := newQuery().Query( - boolQ( - filter( - matchPhrase("hash", c.Hash), - ), - notMust( - matchPhrase("address", c.Address), - ), - ), - ).Sort("last_action", "desc").Size(size).From(offset) - - var response SearchResponse - if err = e.query([]string{DocContracts}, q, &response); err != nil { - return - } - - if len(response.Hits.Hits) == 0 { - return pcr, NewRecordNotFoundError(DocContracts, "", q) - } - - contracts := make([]models.Contract, len(response.Hits.Hits)) - for i := range response.Hits.Hits { - if err = json.Unmarshal(response.Hits.Hits[i].Source, &contracts[i]); err != nil { - return - } - } - pcr.Contracts = contracts - pcr.Count = response.Hits.Total.Value - return -} - -// GetSimilarContracts - -func (e *Elastic) GetSimilarContracts(c models.Contract, size, offset int64) (pcr []SimilarContract, total int, err error) { - if c.Fingerprint == nil { - return - } - - if size == 0 { - size = defaultSize - } else if size+offset > maxQuerySize { - size = maxQuerySize - offset - } - - query := newQuery().Query( - boolQ( - filter( - matchPhrase("project_id", c.ProjectID), - ), - notMust( - matchQ("hash.keyword", c.Hash), - ), - ), - ).Add( - aggs( - aggItem{ - "projects", - qItem{ - "terms": qItem{ - "field": "hash.keyword", - "size": size + offset, - "order": qItem{ - "bucketsSort": "desc", - }, - }, - "aggs": qItem{ - "last": topHits(1, "last_action", "desc"), - "bucketsSort": max("last_action"), - }, - }, - }, - ), - ).Zero() - - var response getProjectsResponse - if err = e.query([]string{DocContracts}, query, &response); err != nil { - return - } - - total = len(response.Agg.Projects.Buckets) - if len(response.Agg.Projects.Buckets) == 0 { - return - } - - contracts := make([]SimilarContract, 0) - arr := response.Agg.Projects.Buckets[offset:] - for _, item := range arr { - var contract models.Contract - if err = json.Unmarshal(item.Last.Hits.Hits[0].Source, &contract); err != nil { - return - } - - similar := SimilarContract{ - Contract: &contract, - Count: item.DocCount, - } - contracts = append(contracts, similar) - } - return contracts, total, nil -} - -type getDiffTasksResponse struct { - Agg struct { - Projects struct { - Buckets []struct { - Bucket - Last struct { - Hits HitsArray `json:"hits"` - } `json:"last"` - ByHash struct { - Buckets []struct { - Bucket - Last struct { - Hits HitsArray `json:"hits"` - } `json:"last"` - } `json:"buckets"` - } `json:"by_hash"` - } `json:"buckets"` - } `json:"by_project"` - } `json:"aggregations"` -} - -// GetDiffTasks - -func (e *Elastic) GetDiffTasks() ([]DiffTask, error) { - query := newQuery().Add( - aggs( - aggItem{ - "by_project", qItem{ - "terms": qItem{ - "field": "project_id.keyword", - "size": maxQuerySize, - }, - "aggs": qItem{ - "by_hash": qItem{ - "terms": qItem{ - "field": "hash.keyword", - "size": maxQuerySize, - }, - "aggs": qItem{ - "last": topHits(1, "last_action", "desc"), - }, - }, - }, - }, - }, - ), - ).Zero() - - var response getDiffTasksResponse - if err := e.query([]string{DocContracts}, query, &response); err != nil { - return nil, err - } - - tasks := make([]DiffTask, 0) - for _, bucket := range response.Agg.Projects.Buckets { - if len(bucket.ByHash.Buckets) < 2 { - continue - } - - similar := bucket.ByHash.Buckets - for i := 0; i < len(similar)-1; i++ { - var current models.Contract - if err := json.Unmarshal(similar[i].Last.Hits.Hits[0].Source, ¤t); err != nil { - return nil, err - } - for j := i + 1; j < len(similar); j++ { - var next models.Contract - if err := json.Unmarshal(similar[j].Last.Hits.Hits[0].Source, &next); err != nil { - return nil, err - } - - tasks = append(tasks, DiffTask{ - Network1: current.Network, - Address1: current.Address, - Network2: next.Network, - Address2: next.Address, - }) - } - } - } - - rand.Seed(time.Now().Unix()) - rand.Shuffle(len(tasks), func(i, j int) { tasks[i], tasks[j] = tasks[j], tasks[i] }) - return tasks, nil -} diff --git a/internal/elastic/protocol.go b/internal/elastic/protocol.go deleted file mode 100644 index 9e80f630a..000000000 --- a/internal/elastic/protocol.go +++ /dev/null @@ -1,70 +0,0 @@ -package elastic - -import ( - "github.com/baking-bad/bcdhub/internal/models" - "github.com/pkg/errors" -) - -// GetProtocol - returns current protocol for `network` and `level` (`hash` is optional, leave empty string for default) -func (e *Elastic) GetProtocol(network, hash string, level int64) (p models.Protocol, err error) { - filters := []qItem{ - matchQ("network", network), - } - if level > -1 { - filters = append(filters, - rangeQ("start_level", qItem{ - "lte": level, - }), - ) - } - if hash != "" { - filters = append(filters, - matchQ("hash", hash), - ) - } - - query := newQuery().Query( - boolQ( - filter(filters...), - ), - ).Sort("start_level", "desc").One() - - var response SearchResponse - if err = e.query([]string{DocProtocol}, query, &response); err != nil { - return - } - if response.Hits.Total.Value == 0 { - err = errors.Errorf("Couldn't find a protocol for %s (hash = %s) at level %d", network, hash, level) - return - } - err = json.Unmarshal(response.Hits.Hits[0].Source, &p) - return -} - -// GetSymLinks - returns list of symlinks in `network` after `level` -func (e *Elastic) GetSymLinks(network string, level int64) (map[string]struct{}, error) { - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - rangeQ("start_level", qItem{ - "gt": level, - }), - ), - ), - ).Sort("start_level", "desc").All() - var response SearchResponse - if err := e.query([]string{DocProtocol}, query, &response); err != nil { - return nil, err - } - - symMap := make(map[string]struct{}) - for _, hit := range response.Hits.Hits { - var p models.Protocol - if err := json.Unmarshal(hit.Source, &p); err != nil { - return nil, err - } - symMap[p.SymLink] = struct{}{} - } - return symMap, nil -} diff --git a/internal/elastic/protocol/storage.go b/internal/elastic/protocol/storage.go new file mode 100644 index 000000000..e4b0bf458 --- /dev/null +++ b/internal/elastic/protocol/storage.go @@ -0,0 +1,84 @@ +package protocol + +import ( + "encoding/json" + + "github.com/baking-bad/bcdhub/internal/elastic/core" + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/protocol" + "github.com/pkg/errors" +) + +// Storage - +type Storage struct { + es *core.Elastic +} + +// NewStorage - +func NewStorage(es *core.Elastic) *Storage { + return &Storage{es} +} + +// GetProtocol - returns current protocol for `network` and `level` (`hash` is optional, leave empty string for default) +func (storage *Storage) GetProtocol(network, hash string, level int64) (p protocol.Protocol, err error) { + filters := []core.Item{ + core.Match("network", network), + } + if level > -1 { + filters = append(filters, + core.Range("start_level", core.Item{ + "lte": level, + }), + ) + } + if hash != "" { + filters = append(filters, + core.Match("hash", hash), + ) + } + + query := core.NewQuery().Query( + core.Bool( + core.Filter(filters...), + ), + ).Sort("start_level", "desc").One() + + var response core.SearchResponse + if err = storage.es.Query([]string{models.DocProtocol}, query, &response); err != nil { + return + } + if response.Hits.Total.Value == 0 { + err = errors.Errorf("Couldn't find a protocol for %s (hash = %s) at level %d", network, hash, level) + return + } + err = json.Unmarshal(response.Hits.Hits[0].Source, &p) + return +} + +// GetSymLinks - returns list of symlinks in `network` after `level` +func (storage *Storage) GetSymLinks(network string, level int64) (map[string]struct{}, error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + core.Range("start_level", core.Item{ + "gt": level, + }), + ), + ), + ).Sort("start_level", "desc").All() + var response core.SearchResponse + if err := storage.es.Query([]string{models.DocProtocol}, query, &response); err != nil { + return nil, err + } + + symMap := make(map[string]struct{}) + for _, hit := range response.Hits.Hits { + var p protocol.Protocol + if err := json.Unmarshal(hit.Source, &p); err != nil { + return nil, err + } + symMap[p.SymLink] = struct{}{} + } + return symMap, nil +} diff --git a/internal/elastic/query_builder.go b/internal/elastic/query_builder.go deleted file mode 100644 index 523b415af..000000000 --- a/internal/elastic/query_builder.go +++ /dev/null @@ -1,367 +0,0 @@ -package elastic - -import ( - "github.com/baking-bad/bcdhub/internal/helpers" -) - -const ( - maxQuerySize = 10000 - minQuerySize = 0 -) - -type qItem map[string]interface{} -type qList []interface{} - -func boolQ(items ...qItem) qItem { - bq := qItem{} - q := qItem{} - for i := range items { - for k, v := range items[i] { - if helpers.StringInArray(k, []string{"must", "should", "filter", "must_not", "minimum_should_match"}) { - q[k] = v - } - } - } - bq["bool"] = q - return bq -} - -//nolint -func minimumShouldMatch(value int) qItem { - return qItem{ - "minimum_should_match": value, - } -} - -func exists(field string) qItem { - return qItem{ - "exists": qItem{ - "field": field, - }, - } -} - -func must(items ...qItem) qItem { - return qItem{ - "must": items, - } -} - -func notMust(items ...qItem) qItem { - return qItem{ - "must_not": items, - } -} - -func should(items ...qItem) qItem { - return qItem{ - "should": items, - } -} - -func filter(items ...qItem) qItem { - return qItem{ - "filter": items, - } -} - -func rangeQ(field string, orders ...qItem) qItem { - q := qItem{} - for i := range orders { - for k, v := range orders[i] { - if helpers.StringInArray(k, []string{"lt", "gt", "lte", "gte"}) { - q[k] = v - } - } - } - return qItem{ - "range": qItem{ - field: q, - }, - } -} - -func matchPhrase(key string, value interface{}) qItem { - return qItem{ - "match_phrase": qItem{ - key: value, - }, - } -} - -func matchQ(key string, value interface{}) qItem { - return qItem{ - "match": qItem{ - key: value, - }, - } -} - -func term(key string, value interface{}) qItem { - return qItem{ - "term": qItem{ - key: value, - }, - } -} - -func in(key string, value []string) qItem { - return qItem{ - "terms": qItem{ - key: value, - }, - } -} - -type aggItem struct { - name string - body qItem -} - -func aggs(items ...aggItem) qItem { - body := qItem{} - for i := range items { - body[items[i].name] = items[i].body - } - return qItem{ - "aggs": body, - } -} - -func cardinality(field string) qItem { - return qItem{ - "cardinality": qItem{ - "field": field, - }, - } -} - -func avg(field string) qItem { - return qItem{ - "avg": qItem{ - "field": field, - }, - } -} - -func termsAgg(field string, size int64) qItem { - t := qItem{ - "field": field, - } - if size > 0 { - t["size"] = size - } - return qItem{ - "terms": t, - } -} - -func composite(size int64, items ...aggItem) qItem { - body := make([]qItem, 0) - for i := range items { - body = append(body, qItem{ - items[i].name: items[i].body, - }) - } - return qItem{ - "composite": qItem{ - "sources": body, - "size": size, - }, - } -} - -//nolint -func topHits(size int, sortField, order string) qItem { - return qItem{ - "top_hits": qItem{ - "size": size, - "sort": sort(sortField, order), - }, - } -} - -func sort(field, order string) qItem { - return qItem{ - field: qItem{ - "order": order, - }, - } -} - -func max(field string) qItem { - return qItem{ - "max": qItem{ - "field": field, - }, - } -} - -func min(field string) qItem { - return qItem{ - "min": qItem{ - "field": field, - }, - } -} - -func sum(field string) qItem { - return qItem{ - "sum": qItem{ - "field": field, - }, - } -} - -func count(field string) qItem { - return qItem{ - "value_count": qItem{ - "field": field, - }, - } -} - -// nolint -func maxBucket(bucketsPath string) qItem { - return qItem{ - "max_bucket": qItem{ - "buckets_path": bucketsPath, - }, - } -} - -// nolint -func minBucket(bucketsPath string) qItem { - return qItem{ - "min_bucket": qItem{ - "buckets_path": bucketsPath, - }, - } -} - -func queryString(text string, fields []string) qItem { - queryS := qItem{ - "query": text, - } - if len(fields) > 0 { - queryS["fields"] = fields - } - return qItem{ - "query_string": queryS, - } -} - -func (q qItem) Append(key string, value interface{}) qItem { - q[key] = value - return q -} - -func (q qItem) Extend(item qItem) qItem { - for k, v := range item { - q[k] = v - } - return q -} - -func (q qItem) Get(name string) qItem { - if val, ok := q[name]; ok { - if typ, ok := val.(qItem); ok { - return typ - } - return nil - } - return nil -} - -type base qItem - -func newQuery() base { - return base{} -} - -func (q base) Size(size int64) base { - if size != 0 { - q["size"] = size - } - return q -} - -func (q base) All() base { - q["size"] = maxQuerySize - return q -} - -func (q base) One() base { - q["size"] = 1 - return q -} - -func (q base) Zero() base { - q["size"] = minQuerySize - return q -} - -func (q base) From(from int64) base { - if from != 0 { - q["from"] = from - } - return q -} - -func (q base) Query(item qItem) base { - q["query"] = item - return q -} - -func (q base) Sort(key, order string) base { - q["sort"] = qItem{ - key: qItem{ - "order": order, - }, - } - return q -} - -func (q base) SearchAfter(value []interface{}) base { - q["search_after"] = value - return q -} - -func (q base) Add(items ...qItem) base { - for _, item := range items { - for k, v := range item { - q[k] = v - } - } - return q -} - -func (q base) Source(items ...qItem) base { - qi := qItem{} - for i := range items { - for k, v := range items[i] { - if helpers.StringInArray(k, []string{"excludes", "includes"}) { - qi[k] = v - } - } - } - q["_source"] = qi - return q -} - -func (q base) Highlights(highlights qItem) base { - q["highlight"] = qItem{ - "fields": highlights, - } - return q -} - -func (q base) Get(name string) qItem { - if val, ok := q[name]; ok { - if typ, ok := val.(qItem); ok { - return typ - } - return nil - } - return nil -} diff --git a/internal/elastic/range.go b/internal/elastic/range.go deleted file mode 100644 index a0cc4898e..000000000 --- a/internal/elastic/range.go +++ /dev/null @@ -1,45 +0,0 @@ -package elastic - -// Range - -type Range struct { - Comparator string - Value int64 -} - -// NewRange - -func NewRange(cmp string, value int64) Range { - return Range{ - Comparator: cmp, - Value: value, - } -} - -func (rng Range) build() qItem { - return rangeQ("level", qItem{ - rng.Comparator: rng.Value, - }) -} - -func (rng Range) isFilled() bool { - return rng.Comparator != "" && rng.Value > 0 -} - -// NewGreaterThanRange - -func NewGreaterThanRange(value int64) Range { - return NewRange("gt", value) -} - -// NewGreaterThanEqRange - -func NewGreaterThanEqRange(value int64) Range { - return NewRange("gte", value) -} - -// NewLessThanRange - -func NewLessThanRange(value int64) Range { - return NewRange("lt", value) -} - -// NewLessThanEqRange - -func NewLessThanEqRange(value int64) Range { - return NewRange("lte", value) -} diff --git a/internal/elastic/responses.go b/internal/elastic/responses.go deleted file mode 100644 index a81b78ff8..000000000 --- a/internal/elastic/responses.go +++ /dev/null @@ -1,106 +0,0 @@ -package elastic - -import ( - stdJSON "encoding/json" - "fmt" - - "github.com/baking-bad/bcdhub/internal/models" -) - -// Header - -type Header struct { - Took int64 `json:"took"` - TimedOut bool `json:"timed_out"` -} - -// SearchResponse - -type SearchResponse struct { - ScrollID string `json:"_scroll_id,omitempty"` - Took int `json:"took,omitempty"` - TimedOut *bool `json:"timed_out,omitempty"` - Hits *HitsArray `json:"hits,omitempty"` -} - -// HitsArray - -type HitsArray struct { - Total struct { - Value int64 `json:"value"` - Relation string `json:"relation"` - } `json:"total"` - Hits []Hit `json:"hits"` -} - -// Hit - -type Hit struct { - ID string `json:"_id"` - Index string `json:"_index"` - Source stdJSON.RawMessage `json:"_source"` - Score float64 `json:"_score"` - Type string `json:"_type"` - Highlight map[string][]string `json:"highlight,omitempty"` -} - -// DeleteByQueryResponse - -type DeleteByQueryResponse struct { - Header - Total int64 `json:"total"` - Deleted int64 `json:"deleted"` - VersionConflicts int64 `json:"version_conflicts"` -} - -// TestConnectionResponse - -type TestConnectionResponse struct { - Version struct { - Number string `json:"number"` - } `json:"version"` -} - -// GetResponse - -type GetResponse struct { - Index string `json:"_index"` - Type string `json:"_type"` - ID string `json:"_id"` - Found bool `json:"found"` - Source stdJSON.RawMessage `json:"_source"` -} - -// BulkResponse - -type BulkResponse struct { - Took int64 `json:"took"` - Errors bool `json:"errors"` -} - -// Repository - -type Repository struct { - ID string `json:"id"` - Type string `json:"type"` -} - -// String - -func (repo Repository) String() string { - return fmt.Sprintf("%s (type: %s)", repo.ID, repo.Type) -} - -// Bucket - -type Bucket struct { - Key string `json:"key"` - DocCount int64 `json:"doc_count"` -} - -type intValue struct { - Value int64 `json:"value"` -} - -type floatValue struct { - Value float64 `json:"value"` -} - -type sqlResponse struct { - Rows [][]interface{} `json:"rows"` -} - -// DomainsResponse - -type DomainsResponse struct { - Domains []models.TezosDomain `json:"domains"` - Total int64 `json:"total"` -} diff --git a/internal/elastic/schema/storage.go b/internal/elastic/schema/storage.go new file mode 100644 index 000000000..48fcc048b --- /dev/null +++ b/internal/elastic/schema/storage.go @@ -0,0 +1,23 @@ +package schema + +import ( + "github.com/baking-bad/bcdhub/internal/elastic/core" + "github.com/baking-bad/bcdhub/internal/models/schema" +) + +// Storage - +type Storage struct { + es *core.Elastic +} + +// NewStorage - +func NewStorage(es *core.Elastic) *Storage { + return &Storage{es} +} + +// Get - +func (storage *Storage) Get(address string) (schema.Schema, error) { + data := schema.Schema{ID: address} + err := storage.es.GetByID(&data) + return data, err +} diff --git a/internal/elastic/stats.go b/internal/elastic/stats.go deleted file mode 100644 index 77a87c7c1..000000000 --- a/internal/elastic/stats.go +++ /dev/null @@ -1,141 +0,0 @@ -package elastic - -// GetNetworkCountStats - -func (e *Elastic) GetNetworkCountStats(network string) (map[string]int64, error) { - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - ), - should( - exists("entrypoint"), - exists("fingerprint"), - ), - minimumShouldMatch(1), - ), - ).Add( - aggs( - aggItem{ - "body", - termsAgg("_index", maxQuerySize), - }, - ), - ).Zero() - - return e.getCountAgg([]string{DocContracts, DocOperations}, query) -} - -// GetCallsCountByNetwork - -func (e *Elastic) GetCallsCountByNetwork() (map[string]int64, error) { - query := newQuery().Query(exists("entrypoint")).Add( - aggs( - aggItem{ - "body", qItem{ - "terms": qItem{ - "field": "network.keyword", - }, - }, - }, - ), - ).Zero() - - return e.getCountAgg([]string{DocOperations}, query) -} - -type getContractStatsByNetworkStats struct { - Agg struct { - Network struct { - Buckets []struct { - Bucket - Same intValue `json:"same"` - Balance floatValue `json:"balance"` - TotalWithdrawn floatValue `json:"total_withdrawn"` - } `json:"buckets"` - } `json:"network"` - } `json:"aggregations"` -} - -// GetContractStatsByNetwork - -func (e *Elastic) GetContractStatsByNetwork() (map[string]ContractCountStats, error) { - query := newQuery().Add( - aggs( - aggItem{ - "network", qItem{ - "terms": qItem{ - "field": "network.keyword", - }, - "aggs": qItem{ - "same": qItem{ - "cardinality": qItem{ - "script": "doc['fingerprint.parameter'].value + '|' + doc['fingerprint.storage'].value + '|' + doc['fingerprint.code'].value", - }, - }, - "balance": sum("balance"), - "total_withdrawn": sum("total_withdrawn"), - }, - }, - }, - ), - ).Zero() - - var response getContractStatsByNetworkStats - if err := e.query([]string{DocContracts}, query, &response); err != nil { - return nil, err - } - - counts := make(map[string]ContractCountStats) - for _, item := range response.Agg.Network.Buckets { - counts[item.Key] = ContractCountStats{ - Total: item.DocCount, - SameCount: item.Same.Value, - Balance: int64(item.Balance.Value), - TotalWithdrawn: int64(item.TotalWithdrawn.Value), - } - } - return counts, nil -} - -// GetFACountByNetwork - -func (e *Elastic) GetFACountByNetwork() (map[string]int64, error) { - query := newQuery().Query( - in("tags", []string{ - "fa1", - "fa12", - }), - ).Add( - aggs( - aggItem{ - "body", qItem{ - "terms": qItem{ - "field": "network.keyword", - }, - }, - }, - ), - ).Zero() - - return e.getCountAgg([]string{DocContracts}, query) -} - -// GetLanguagesForNetwork - -func (e *Elastic) GetLanguagesForNetwork(network string) (map[string]int64, error) { - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - ), - ), - ).Add( - aggs( - aggItem{ - "body", qItem{ - "terms": qItem{ - "field": "language.keyword", - }, - }, - }, - ), - ).Zero() - - return e.getCountAgg([]string{DocContracts}, query) -} diff --git a/internal/elastic/tezosdomain/storage.go b/internal/elastic/tezosdomain/storage.go new file mode 100644 index 000000000..b10649fc3 --- /dev/null +++ b/internal/elastic/tezosdomain/storage.go @@ -0,0 +1,83 @@ +package tezosdomain + +import ( + "encoding/json" + + "github.com/baking-bad/bcdhub/internal/elastic/consts" + "github.com/baking-bad/bcdhub/internal/elastic/core" + "github.com/baking-bad/bcdhub/internal/helpers" + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/tezosdomain" + "github.com/pkg/errors" +) + +// Storage - +type Storage struct { + es *core.Elastic +} + +// NewStorage - +func NewStorage(es *core.Elastic) *Storage { + return &Storage{es} +} + +// ListDomains - +func (storage *Storage) ListDomains(network string, size, offset int64) (tezosdomain.DomainsResponse, error) { + if size > consts.DefaultScrollSize { + size = consts.DefaultScrollSize + } + + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + ), + ), + ).Size(size).From(offset).Sort("timestamp", "desc") + + var response core.SearchResponse + if err := storage.es.Query([]string{models.DocTezosDomains}, query, &response); err != nil { + return tezosdomain.DomainsResponse{}, err + } + if response.Hits.Total.Value == 0 { + return tezosdomain.DomainsResponse{}, nil + } + + domains := make([]tezosdomain.TezosDomain, len(response.Hits.Hits)) + for i := range response.Hits.Hits { + if err := json.Unmarshal(response.Hits.Hits[i].Source, &domains[i]); err != nil { + return tezosdomain.DomainsResponse{}, err + } + } + return tezosdomain.DomainsResponse{ + Domains: domains, + Total: response.Hits.Total.Value, + }, nil +} + +// ResolveDomainByAddress - +func (storage *Storage) ResolveDomainByAddress(network string, address string) (*tezosdomain.TezosDomain, error) { + if !helpers.IsAddress(address) { + return nil, errors.Errorf("Invalid address: %s", address) + } + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + core.MatchPhrase("address", address), + ), + ), + ).One() + + var response core.SearchResponse + if err := storage.es.Query([]string{models.DocTezosDomains}, query, &response); err != nil { + return nil, err + } + if response.Hits.Total.Value == 0 { + return nil, core.NewRecordNotFoundError(models.DocTezosDomains, "") + } + + var td tezosdomain.TezosDomain + err := json.Unmarshal(response.Hits.Hits[0].Source, &td) + return &td, err +} diff --git a/internal/elastic/token_balance.go b/internal/elastic/token_balance.go deleted file mode 100644 index 893095c21..000000000 --- a/internal/elastic/token_balance.go +++ /dev/null @@ -1,94 +0,0 @@ -package elastic - -import ( - "bytes" - "context" - "fmt" - - "github.com/baking-bad/bcdhub/internal/models" - "github.com/elastic/go-elasticsearch/v8/esapi" -) - -const scriptUpdateBalance = `{"source": "ctx._source.balance = ctx._source.balance + (long)params.delta", "lang": "painless", "params": { "delta": %d }}` - -// UpdateTokenBalances - -func (e *Elastic) UpdateTokenBalances(updates []*models.TokenBalance) error { - if len(updates) == 0 { - return nil - } - bulk := bytes.NewBuffer([]byte{}) - for i := range updates { - bulk.WriteString(fmt.Sprintf(`{ "update": { "_id": "%s"}}`, updates[i].GetID())) - bulk.WriteByte('\n') - - script := fmt.Sprintf(scriptUpdateBalance, updates[i].Balance) - - upsert, err := json.Marshal(updates[i]) - if err != nil { - return err - } - - bulk.WriteString(fmt.Sprintf(`{ "script": %s, "upsert": %s }`, script, string(upsert))) - bulk.WriteByte('\n') - if (i%1000 == 0 && i > 0) || i == len(updates)-1 { - if err := e.bulkUpsertBalances(bulk); err != nil { - return err - } - bulk.Reset() - } - } - return nil -} - -func (e *Elastic) bulkUpsertBalances(buf *bytes.Buffer) error { - req := esapi.BulkRequest{ - Body: bytes.NewReader(buf.Bytes()), - Refresh: "true", - Index: DocTokenBalances, - } - - res, err := req.Do(context.Background(), e) - if err != nil { - return err - } - defer res.Body.Close() - - var response BulkResponse - return e.getResponse(res, &response) -} - -// GetHolders - -func (e *Elastic) GetHolders(network, contract string, tokenID int64) ([]models.TokenBalance, error) { - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - matchPhrase("contract", contract), - term("token_id", tokenID), - ), - notMust( - term("balance", 0), - ), - ), - ).All() - - balances := make([]models.TokenBalance, 0) - err := e.getAllByQuery(query, &balances) - return balances, err -} - -// GetAccountBalances - -func (e *Elastic) GetAccountBalances(network, address string) ([]models.TokenBalance, error) { - query := newQuery().Query( - boolQ( - filter( - matchPhrase("address", address), - matchQ("network", network), - ), - ), - ).All() - - tokenBalances := make([]models.TokenBalance, 0) - err := e.getAllByQuery(query, &tokenBalances) - return tokenBalances, err -} diff --git a/internal/elastic/tokenbalance/storage.go b/internal/elastic/tokenbalance/storage.go new file mode 100644 index 000000000..b8c078150 --- /dev/null +++ b/internal/elastic/tokenbalance/storage.go @@ -0,0 +1,107 @@ +package tokenbalance + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + + "github.com/baking-bad/bcdhub/internal/elastic/core" + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/tokenbalance" + "github.com/elastic/go-elasticsearch/v8/esapi" +) + +// Storage - +type Storage struct { + es *core.Elastic +} + +// NewStorage - +func NewStorage(es *core.Elastic) *Storage { + return &Storage{es} +} + +const scriptUpdateBalance = `{"source": "ctx._source.balance = ctx._source.balance + (long)params.delta", "lang": "painless", "params": { "delta": %d }}` + +// Update - +func (storage *Storage) Update(updates []*tokenbalance.TokenBalance) error { + if len(updates) == 0 { + return nil + } + bulk := bytes.NewBuffer([]byte{}) + for i := range updates { + bulk.WriteString(fmt.Sprintf(`{ "update": { "_id": "%s"}}`, updates[i].GetID())) + bulk.WriteByte('\n') + + script := fmt.Sprintf(scriptUpdateBalance, updates[i].Balance) + + upsert, err := json.Marshal(updates[i]) + if err != nil { + return err + } + + bulk.WriteString(fmt.Sprintf(`{ "script": %s, "upsert": %s }`, script, string(upsert))) + bulk.WriteByte('\n') + if (i%1000 == 0 && i > 0) || i == len(updates)-1 { + if err := storage.bulkUpsertBalances(bulk); err != nil { + return err + } + bulk.Reset() + } + } + return nil +} + +func (storage *Storage) bulkUpsertBalances(buf *bytes.Buffer) error { + req := esapi.BulkRequest{ + Body: bytes.NewReader(buf.Bytes()), + Refresh: "true", + Index: models.DocTokenBalances, + } + + res, err := req.Do(context.Background(), storage.es) + if err != nil { + return err + } + defer res.Body.Close() + + var response core.BulkResponse + return storage.es.GetResponse(res, &response) +} + +// GetHolders - +func (storage *Storage) GetHolders(network, contract string, tokenID int64) ([]tokenbalance.TokenBalance, error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + core.MatchPhrase("contract", contract), + core.Term("token_id", tokenID), + ), + core.MustNot( + core.Term("balance", 0), + ), + ), + ).All() + + balances := make([]tokenbalance.TokenBalance, 0) + err := storage.es.GetAllByQuery(query, &balances) + return balances, err +} + +// GetAccountBalances - +func (storage *Storage) GetAccountBalances(network, address string) ([]tokenbalance.TokenBalance, error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.MatchPhrase("address", address), + core.Match("network", network), + ), + ), + ).All() + + tokenBalances := make([]tokenbalance.TokenBalance, 0) + err := storage.es.GetAllByQuery(query, &tokenBalances) + return tokenBalances, err +} diff --git a/internal/elastic/tokens.go b/internal/elastic/tokens.go deleted file mode 100644 index 3c5dd5d1b..000000000 --- a/internal/elastic/tokens.go +++ /dev/null @@ -1,477 +0,0 @@ -package elastic - -import ( - "strconv" - "strings" - - "github.com/baking-bad/bcdhub/internal/contractparser/consts" - "github.com/baking-bad/bcdhub/internal/models" - "github.com/baking-bad/bcdhub/internal/models/tzip" - "github.com/pkg/errors" -) - -// GetTokens - -func (e *Elastic) GetTokens(network, tokenInterface string, offset, size int64) ([]models.Contract, int64, error) { - tags := []string{"fa12", "fa1", "fa2"} - if tokenInterface == "fa12" || tokenInterface == "fa1" || tokenInterface == "fa2" { - tags = []string{tokenInterface} - } - - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - in("tags", tags), - ), - ), - ).Sort("timestamp", "desc").Size(size) - - if offset != 0 { - query = query.From(offset) - } - - var response SearchResponse - if err := e.query([]string{DocContracts}, query, &response); err != nil { - return nil, 0, err - } - - contracts := make([]models.Contract, len(response.Hits.Hits)) - for i := range response.Hits.Hits { - if err := json.Unmarshal(response.Hits.Hits[i].Source, &contracts[i]); err != nil { - return nil, 0, err - } - } - return contracts, response.Hits.Total.Value, nil -} - -type getTokensStatsResponse struct { - Aggs struct { - Body struct { - Buckets []struct { - DocCount int64 `json:"doc_count"` - Key struct { - Destination string `json:"destination"` - Entrypoint string `json:"entrypoint"` - } `json:"key"` - AVG floatValue `json:"average_consumed_gas"` - } `json:"buckets"` - } `json:"body"` - } `json:"aggregations"` -} - -// GetTokensStats - -func (e *Elastic) GetTokensStats(network string, addresses, entrypoints []string) (map[string]TokenUsageStats, error) { - addressFilters := make([]qItem, len(addresses)) - for i := range addresses { - addressFilters[i] = matchPhrase("destination", addresses[i]) - } - - entrypointFilters := make([]qItem, len(entrypoints)) - for i := range entrypoints { - entrypointFilters[i] = matchPhrase("entrypoint", entrypoints[i]) - } - - query := newQuery().Query( - boolQ( - must( - matchQ("network", network), - boolQ( - should(addressFilters...), - minimumShouldMatch(1), - ), - boolQ( - should(entrypointFilters...), - minimumShouldMatch(1), - ), - ), - ), - ).Add( - aggs( - aggItem{ - "body", - composite( - maxQuerySize, - aggItem{ - "destination", termsAgg("destination.keyword", 0), - }, - aggItem{ - "entrypoint", termsAgg("entrypoint.keyword", 0), - }, - ).Extend( - aggs( - aggItem{ - "average_consumed_gas", avg("result.consumed_gas"), - }, - ), - ), - }, - ), - ).Zero() - - var response getTokensStatsResponse - if err := e.query([]string{DocOperations}, query, &response); err != nil { - return nil, err - } - - usageStats := make(map[string]TokenUsageStats) - for _, bucket := range response.Aggs.Body.Buckets { - usage := TokenMethodUsageStats{ - Count: bucket.DocCount, - ConsumedGas: int64(bucket.AVG.Value), - } - - if _, ok := usageStats[bucket.Key.Destination]; !ok { - usageStats[bucket.Key.Destination] = make(TokenUsageStats) - } - usageStats[bucket.Key.Destination][bucket.Key.Entrypoint] = usage - } - - return usageStats, nil -} - -type getTokenVolumeSeriesResponse struct { - Agg struct { - Hist struct { - Buckets []struct { - Key int64 `json:"key"` - Result floatValue `json:"result"` - } `json:"buckets"` - } `json:"hist"` - } `json:"aggregations"` -} - -// GetTokenVolumeSeries - -func (e *Elastic) GetTokenVolumeSeries(network, period string, contracts []string, entrypoints []tzip.DAppContract, tokenID uint) ([][]int64, error) { - hist := qItem{ - "date_histogram": qItem{ - "field": "timestamp", - "calendar_interval": period, - }, - } - - hist.Append("aggs", qItem{ - "result": qItem{ - "sum": qItem{ - "field": "amount", - }, - }, - }) - - matches := []qItem{ - matchQ("network", network), - matchQ("status", "applied"), - term("token_id", tokenID), - } - if len(contracts) > 0 { - addresses := make([]qItem, len(contracts)) - for i := range contracts { - addresses[i] = matchPhrase("contract", contracts[i]) - } - matches = append(matches, boolQ( - should(addresses...), - minimumShouldMatch(1), - )) - } - - if len(entrypoints) > 0 { - addresses := make([]qItem, 0) - for i := range entrypoints { - for j := range entrypoints[i].DexVolumeEntrypoints { - addresses = append(addresses, boolQ( - filter( - matchPhrase("initiator", entrypoints[i].Address), - matchQ("parent", entrypoints[i].DexVolumeEntrypoints[j]), - ), - )) - } - } - matches = append(matches, boolQ( - should(addresses...), - minimumShouldMatch(1), - )) - } - - query := newQuery().Query( - boolQ( - filter( - matches..., - ), - ), - ).Add( - aggs(aggItem{"hist", hist}), - ).Zero() - - var response getTokenVolumeSeriesResponse - if err := e.query([]string{DocTransfers}, query, &response); err != nil { - return nil, err - } - - histogram := make([][]int64, len(response.Agg.Hist.Buckets)) - for i := range response.Agg.Hist.Buckets { - item := []int64{ - response.Agg.Hist.Buckets[i].Key, - int64(response.Agg.Hist.Buckets[i].Result.Value), - } - histogram[i] = item - } - return histogram, nil -} - -// TokenBalance - -type TokenBalance struct { - Address string - TokenID int64 -} - -// GetBalances - -func (e *Elastic) GetBalances(network, contract string, level int64, addresses ...TokenBalance) (map[TokenBalance]int64, error) { - filters := []qItem{ - matchQ("network", network), - } - - if contract != "" { - filters = append(filters, matchPhrase("contract", contract)) - } - - if level > 0 { - filters = append(filters, rangeQ("level", qItem{ - "lt": level, - })) - } - - b := boolQ( - filter(filters...), - ) - - if len(addresses) > 0 { - addressFilters := make([]qItem, 0) - - for _, a := range addresses { - addressFilters = append(addressFilters, boolQ( - filter( - matchPhrase("from", a.Address), - term("token_id", a.TokenID), - ), - )) - } - - b.Get("bool").Extend( - should(addressFilters...), - ) - b.Get("bool").Extend(minimumShouldMatch(1)) - } - - query := newQuery().Query(b).Add( - qItem{ - "aggs": qItem{ - "balances": qItem{ - "scripted_metric": qItem{ - "init_script": "state.balances = [:]", - "map_script": ` - if (!state.balances.containsKey(doc['from.keyword'].value)) { - state.balances[doc['from.keyword'].value + '_' + doc['token_id'].value] = doc['amount'].value; - } else { - state.balances[doc['from.keyword'].value + '_' + doc['token_id'].value] = state.balances[doc['from.keyword'].value + '_' + doc['token_id'].value] - doc['amount'].value; - } - - if (!state.balances.containsKey(doc['to.keyword'].value)) { - state.balances[doc['to.keyword'].value + '_' + doc['token_id'].value] = doc['amount'].value; - } else { - state.balances[doc['to.keyword'].value + '_' + doc['token_id'].value] = state.balances[doc['to.keyword'].value + '_' + doc['token_id'].value] + doc['amount'].value; - } - `, - "combine_script": ` - Map balances = [:]; - for (entry in state.balances.entrySet()) { - if (!balances.containsKey(entry.getKey())) { - balances[entry.getKey()] = entry.getValue(); - } else { - balances[entry.getKey()] = balances[entry.getKey()] + entry.getValue(); - } - } - return balances; - `, - "reduce_script": ` - Map balances = [:]; - for (state in states) { - for (entry in state.entrySet()) { - if (!balances.containsKey(entry.getKey())) { - balances[entry.getKey()] = entry.getValue(); - } else { - balances[entry.getKey()] = balances[entry.getKey()] + entry.getValue(); - } - } - } - return balances; - `, - }, - }, - }, - }, - ).Zero() - var response getAccountBalancesResponse - if err := e.query([]string{DocTransfers}, query, &response); err != nil { - return nil, err - } - - balances := make(map[TokenBalance]int64) - for key, balance := range response.Agg.Balances.Value { - parts := strings.Split(key, "_") - if len(parts) != 2 { - return nil, errors.Errorf("Invalid addressToken key split size: %d", len(parts)) - } - tokenID, err := strconv.ParseInt(parts[1], 10, 64) - if err != nil { - return nil, err - } - balances[TokenBalance{ - Address: parts[0], - TokenID: tokenID, - }] = int64(balance) - } - return balances, nil -} - -type getAccountBalancesResponse struct { - Agg struct { - Balances struct { - Value map[string]float64 `json:"value"` - } `json:"balances"` - } `json:"aggregations"` -} - -// TokenSupply - -type TokenSupply struct { - Supply float64 `json:"supply"` - Transfered float64 `json:"transfered"` -} - -type getTokenSupplyResponse struct { - Aggs struct { - Result struct { - Value struct { - Supply float64 `json:"supply"` - Transfered float64 `json:"transfered"` - } `json:"value"` - } `json:"result"` - } `json:"aggregations"` -} - -// GetTokenSupply - -func (e *Elastic) GetTokenSupply(network, address string, tokenID int64) (result TokenSupply, err error) { - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - matchPhrase("contract", address), - term("token_id", tokenID), - matchQ("status", "applied"), - ), - ), - ).Add( - qItem{ - "aggs": qItem{ - "result": qItem{ - "scripted_metric": qItem{ - "init_script": `state.result = ["supply":0, "transfered":0]`, - "map_script": ` - if (doc['from.keyword'].value == "") { - state.result["supply"] = state.result["supply"] + doc["amount"].value; - } else if (doc['to.keyword'].value == "") { - state.result["supply"] = state.result["supply"] - doc["amount"].value; - } else { - state.result["transfered"] = state.result["transfered"] + doc["amount"].value; - }`, - "combine_script": `return state.result`, - "reduce_script": ` - Map result = ["supply":0, "transfered":0]; - for (state in states) { - result["transfered"] = result["transfered"] + state["transfered"]; - result["supply"] = result["supply"] + state["supply"]; - } - return result; - `, - }, - }, - }, - }, - ).Zero() - - var response getTokenSupplyResponse - if err = e.query([]string{DocTransfers}, query, &response); err != nil { - return - } - - result.Supply = response.Aggs.Result.Value.Supply - result.Transfered = response.Aggs.Result.Value.Transfered - return -} - -// CreateTokenBalanceUpdates - -func CreateTokenBalanceUpdates(tokensStorage ITokens, transfers []*models.Transfer) error { - exists := make(map[string]*models.TokenBalance) - updates := make([]*models.TokenBalance, 0) - for i := range transfers { - idFrom := transfers[i].GetFromTokenBalanceID() - if idFrom != "" { - if update, ok := exists[idFrom]; ok { - update.Balance -= int64(transfers[i].Amount) - } else { - upd := transfers[i].MakeTokenBalanceUpdate(true, false) - updates = append(updates, upd) - exists[idFrom] = upd - } - } - idTo := transfers[i].GetToTokenBalanceID() - if idTo != "" { - if update, ok := exists[idTo]; ok { - update.Balance += int64(transfers[i].Amount) - } else { - upd := transfers[i].MakeTokenBalanceUpdate(false, false) - updates = append(updates, upd) - exists[idTo] = upd - } - } - } - - return tokensStorage.UpdateTokenBalances(updates) -} - -type aggVolumeSumResponse struct { - Aggs struct { - Result struct { - Value float64 `json:"value"` - } `json:"volume"` - } `json:"aggregations"` -} - -// GetToken24HoursVolume - returns token volume for last 24 hours -func (e *Elastic) GetToken24HoursVolume(network, contract string, initiators, entrypoints []string, tokenID int64) (float64, error) { - query := newQuery().Query( - boolQ( - filter( - term("contract.keyword", contract), - term("network", network), - term("status", consts.Applied), - term("token_id", tokenID), - rangeQ("timestamp", qItem{ - "lte": "now", - "gt": "now-24h", - }), - in("parent.keyword", entrypoints), - in("initiator.keyword", initiators), - ), - ), - ).Add( - aggs( - aggItem{"volume", sum("amount")}, - ), - ).Zero() - - var response aggVolumeSumResponse - if err := e.query([]string{DocTransfers}, query, &response); err != nil { - return 0, err - } - - return response.Aggs.Result.Value, nil -} diff --git a/internal/elastic/transfer/context.go b/internal/elastic/transfer/context.go new file mode 100644 index 000000000..9b2c43aff --- /dev/null +++ b/internal/elastic/transfer/context.go @@ -0,0 +1,164 @@ +package transfer + +import ( + "github.com/baking-bad/bcdhub/internal/elastic/core" + "github.com/baking-bad/bcdhub/internal/helpers" + "github.com/baking-bad/bcdhub/internal/models/transfer" +) + +func buildGetContext(ctx transfer.GetContext) core.Base { + query := core.NewQuery() + filters := make([]core.Item, 0) + + if f := filterNetwork(ctx); f != nil { + filters = append(filters, f) + } + if f := filterAddress(ctx); f != nil { + filters = append(filters, f) + } + if f := filterTime(ctx); f != nil { + filters = append(filters, f) + } + if f := filterCursor(ctx); f != nil { + filters = append(filters, f) + } + if f := filterContracts(ctx); f != nil { + filters = append(filters, f) + } + if f := filterTokenID(ctx); f != nil { + filters = append(filters, f) + } + if f := filterHash(ctx); f != nil { + filters = append(filters, f) + } + if f := filterCounter(ctx); f != nil { + filters = append(filters, f) + } + if f := filterNonce(ctx); f != nil { + filters = append(filters, f) + } + + query.Query( + core.Bool( + core.Filter( + filters..., + ), + ), + ) + appendSort(ctx, query) + appendOffset(ctx, query) + appendSize(ctx, query) + return query +} + +func filterNetwork(ctx transfer.GetContext) core.Item { + if ctx.Network != "" { + return core.Match("network", ctx.Network) + } + return nil +} + +func filterHash(ctx transfer.GetContext) core.Item { + if ctx.Hash != "" { + return core.MatchPhrase("hash", ctx.Hash) + } + return nil +} + +func filterAddress(ctx transfer.GetContext) core.Item { + if ctx.Address == "" { + return nil + } + + return core.Bool( + core.Should( + core.MatchPhrase("from", ctx.Address), + core.MatchPhrase("to", ctx.Address), + ), + core.MinimumShouldMatch(1), + ) +} + +func filterTokenID(ctx transfer.GetContext) core.Item { + if ctx.TokenID >= 0 { + return core.Term("token_id", ctx.TokenID) + } + return nil +} + +func filterTime(ctx transfer.GetContext) core.Item { + ts := core.Item{} + if ctx.Start > 0 { + ts["gte"] = ctx.Start + } + if ctx.End > 0 { + ts["lt"] = ctx.End + } + if len(ts) > 0 { + return core.Range("timestamp", ts) + } + return nil +} + +func filterCursor(ctx transfer.GetContext) core.Item { + if ctx.LastID != "" { + eq := "lt" + if ctx.SortOrder == "asc" { + eq = "gt" + } + return core.Range("indexed_time", core.Item{eq: ctx.LastID}) + } + return nil +} + +func filterContracts(ctx transfer.GetContext) core.Item { + if len(ctx.Contracts) == 0 { + return nil + } + + shouldItems := make([]core.Item, len(ctx.Contracts)) + for i := range ctx.Contracts { + shouldItems[i] = core.MatchPhrase("contract", ctx.Contracts[i]) + } + + return core.Bool( + core.Should(shouldItems...), + core.MinimumShouldMatch(1), + ) +} + +func filterCounter(ctx transfer.GetContext) core.Item { + if ctx.Counter != nil { + return core.Term("counter", *ctx.Counter) + } + return nil +} + +func filterNonce(ctx transfer.GetContext) core.Item { + if ctx.Nonce != nil { + return core.Term("nonce", *ctx.Nonce) + } + return nil +} + +func appendSize(ctx transfer.GetContext, query core.Base) { + if ctx.Size > 0 && ctx.Size <= maxTransfersSize { + query.Size(ctx.Size) + } else { + query.Size(maxTransfersSize) + } +} + +func appendOffset(ctx transfer.GetContext, query core.Base) { + if ctx.Offset > 0 && ctx.Offset <= maxTransfersSize { + query.From(ctx.Offset) + } +} + +func appendSort(ctx transfer.GetContext, query core.Base) { + if helpers.StringInArray(ctx.SortOrder, []string{"desc", "asc"}) { + query.Sort("timestamp", ctx.SortOrder) + } else { + query.Sort("timestamp", "desc") + } +} diff --git a/internal/elastic/transfer/data.go b/internal/elastic/transfer/data.go new file mode 100644 index 000000000..38544e696 --- /dev/null +++ b/internal/elastic/transfer/data.go @@ -0,0 +1,33 @@ +package transfer + +import "github.com/baking-bad/bcdhub/internal/elastic/core" + +type getTokenSupplyResponse struct { + Aggs struct { + Result struct { + Value struct { + Supply float64 `json:"supply"` + Transfered float64 `json:"transfered"` + } `json:"value"` + } `json:"result"` + } `json:"aggregations"` +} + +type aggVolumeSumResponse struct { + Aggs struct { + Result struct { + Value float64 `json:"value"` + } `json:"volume"` + } +} + +type getTokenVolumeSeriesResponse struct { + Agg struct { + Hist struct { + Buckets []struct { + Key int64 `json:"key"` + Result core.FloatValue `json:"result"` + } `json:"buckets"` + } `json:"hist"` + } `json:"aggregations"` +} diff --git a/internal/elastic/transfer/storage.go b/internal/elastic/transfer/storage.go new file mode 100644 index 000000000..435fe3e0b --- /dev/null +++ b/internal/elastic/transfer/storage.go @@ -0,0 +1,223 @@ +package transfer + +import ( + "encoding/json" + "fmt" + + "github.com/baking-bad/bcdhub/internal/contractparser/consts" + "github.com/baking-bad/bcdhub/internal/elastic/core" + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/transfer" + "github.com/baking-bad/bcdhub/internal/models/tzip" +) + +// Storage - +type Storage struct { + es *core.Elastic +} + +// NewStorage - +func NewStorage(es *core.Elastic) *Storage { + return &Storage{es} +} + +const ( + maxTransfersSize = 10000 +) + +// Get - +func (storage *Storage) Get(ctx transfer.GetContext) (po transfer.Pageable, err error) { + query := buildGetContext(ctx) + var response core.SearchResponse + if err := storage.es.Query([]string{models.DocTransfers}, query, &response); err != nil { + return po, err + } + + hits := response.Hits.Hits + transfers := make([]transfer.Transfer, len(hits)) + for i := range hits { + if err := json.Unmarshal(hits[i].Source, &transfers[i]); err != nil { + return po, err + } + } + po.Transfers = transfers + po.Total = response.Hits.Total.Value + if len(transfers) > 0 { + po.LastID = fmt.Sprintf("%d", transfers[len(transfers)-1].IndexedTime) + } + return po, nil +} + +// GetAll - +func (storage *Storage) GetAll(network string, level int64) ([]transfer.Transfer, error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + core.Range("level", core.Item{"gt": level}), + ), + ), + ) + + transfers := make([]transfer.Transfer, 0) + err := storage.es.GetAllByQuery(query, &transfers) + return transfers, err +} + +// GetTokenSupply - +func (storage *Storage) GetTokenSupply(network, address string, tokenID int64) (result transfer.TokenSupply, err error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + core.MatchPhrase("contract", address), + core.Term("token_id", tokenID), + core.Match("status", "applied"), + ), + ), + ).Add( + core.Item{ + "aggs": core.Item{ + "result": core.Item{ + "scripted_metric": core.Item{ + "init_script": `state.result = ["supply":0, "transfered":0]`, + "map_script": ` + if (doc['from.keyword'].value == "") { + state.result["supply"] = state.result["supply"] + doc["amount"].value; + } else if (doc['to.keyword'].value == "") { + state.result["supply"] = state.result["supply"] - doc["amount"].value; + } else { + state.result["transfered"] = state.result["transfered"] + doc["amount"].value; + }`, + "combine_script": `return state.result`, + "reduce_script": ` + Map result = ["supply":0, "transfered":0]; + for (state in states) { + result["transfered"] = result["transfered"] + state["transfered"]; + result["supply"] = result["supply"] + state["supply"]; + } + return result; + `, + }, + }, + }, + }, + ).Zero() + + var response getTokenSupplyResponse + if err = storage.es.Query([]string{models.DocTransfers}, query, &response); err != nil { + return + } + + result.Supply = response.Aggs.Result.Value.Supply + result.Transfered = response.Aggs.Result.Value.Transfered + return +} + +// GetToken24HoursVolume - returns token volume for last 24 hours +func (storage *Storage) GetToken24HoursVolume(network, contract string, initiators, entrypoints []string, tokenID int64) (float64, error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Term("contract.keyword", contract), + core.Term("network", network), + core.Term("status", consts.Applied), + core.Term("token_id", tokenID), + core.Range("timestamp", core.Item{ + "lte": "now", + "gt": "now-24h", + }), + core.In("parent.keyword", entrypoints), + core.In("initiator.keyword", initiators), + ), + ), + ).Add( + core.Aggs( + core.AggItem{Name: "volume", Body: core.Sum("amount")}, + ), + ).Zero() + + var response aggVolumeSumResponse + if err := storage.es.Query([]string{models.DocTransfers}, query, &response); err != nil { + return 0, err + } + + return response.Aggs.Result.Value, nil +} + +// GetTokenVolumeSeries - +func (storage *Storage) GetTokenVolumeSeries(network, period string, contracts []string, entrypoints []tzip.DAppContract, tokenID uint) ([][]int64, error) { + hist := core.Item{ + "date_histogram": core.Item{ + "field": "timestamp", + "calendar_interval": period, + }, + } + + hist.Append("aggs", core.Item{ + "result": core.Item{ + "sum": core.Item{ + "field": "amount", + }, + }, + }) + + matches := []core.Item{ + core.Match("network", network), + core.Match("status", "applied"), + core.Term("token_id", tokenID), + } + if len(contracts) > 0 { + addresses := make([]core.Item, len(contracts)) + for i := range contracts { + addresses[i] = core.MatchPhrase("contract", contracts[i]) + } + matches = append(matches, core.Bool( + core.Should(addresses...), + core.MinimumShouldMatch(1), + )) + } + + if len(entrypoints) > 0 { + addresses := make([]core.Item, 0) + for i := range entrypoints { + for j := range entrypoints[i].DexVolumeEntrypoints { + addresses = append(addresses, core.Bool( + core.Filter( + core.MatchPhrase("initiator", entrypoints[i].Address), + core.Match("parent", entrypoints[i].DexVolumeEntrypoints[j]), + ), + )) + } + } + matches = append(matches, core.Bool( + core.Should(addresses...), + core.MinimumShouldMatch(1), + )) + } + + query := core.NewQuery().Query( + core.Bool( + core.Filter( + matches..., + ), + ), + ).Add( + core.Aggs(core.AggItem{Name: "hist", Body: hist}), + ).Zero() + + var response getTokenVolumeSeriesResponse + if err := storage.es.Query([]string{models.DocTransfers}, query, &response); err != nil { + return nil, err + } + + histogram := make([][]int64, len(response.Agg.Hist.Buckets)) + for i := range response.Agg.Hist.Buckets { + item := []int64{ + response.Agg.Hist.Buckets[i].Key, + int64(response.Agg.Hist.Buckets[i].Result.Value), + } + histogram[i] = item + } + return histogram, nil +} diff --git a/internal/elastic/transfers.go b/internal/elastic/transfers.go deleted file mode 100644 index 1fc5cab52..000000000 --- a/internal/elastic/transfers.go +++ /dev/null @@ -1,207 +0,0 @@ -package elastic - -import ( - "fmt" - - "github.com/baking-bad/bcdhub/internal/helpers" - "github.com/baking-bad/bcdhub/internal/models" -) - -const ( - maxTransfersSize = 10000 -) - -// GetTransfersContext - -type GetTransfersContext struct { - Contracts []string - Network string - Address string - Hash string - Start uint - End uint - SortOrder string - LastID string - Size int64 - Offset int64 - TokenID int64 - Nonce *int64 - Counter *int64 - - query base - filters []qItem -} - -func (ctx *GetTransfersContext) buildQuery() base { - ctx.query = newQuery() - ctx.filters = make([]qItem, 0) - - ctx.filterNetwork() - ctx.filterAddress() - ctx.filterTime() - ctx.filterCursor() - ctx.filterContracts() - ctx.filterTokenID() - ctx.filterCounter() - ctx.filterNonce() - ctx.filterHash() - - ctx.query.Query( - boolQ( - filter( - ctx.filters..., - ), - ), - ) - ctx.appendSort() - ctx.appendOffset() - ctx.appendSize() - return ctx.query -} - -func (ctx *GetTransfersContext) filterNetwork() { - if ctx.Network != "" { - ctx.filters = append(ctx.filters, matchQ("network", ctx.Network)) - } -} - -func (ctx *GetTransfersContext) filterHash() { - if ctx.Hash != "" { - ctx.filters = append(ctx.filters, matchPhrase("hash", ctx.Hash)) - } -} - -func (ctx *GetTransfersContext) filterAddress() { - if ctx.Address == "" { - return - } - - ctx.filters = append(ctx.filters, boolQ( - should( - matchPhrase("from", ctx.Address), - matchPhrase("to", ctx.Address), - ), - minimumShouldMatch(1), - )) -} - -func (ctx *GetTransfersContext) filterTokenID() { - if ctx.TokenID >= 0 { - ctx.filters = append(ctx.filters, term("token_id", ctx.TokenID)) - } -} - -func (ctx *GetTransfersContext) filterCounter() { - if ctx.Counter != nil { - ctx.filters = append(ctx.filters, term("counter", *ctx.Counter)) - } -} - -func (ctx *GetTransfersContext) filterNonce() { - if ctx.Nonce != nil { - ctx.filters = append(ctx.filters, term("nonce", *ctx.Nonce)) - } -} - -func (ctx *GetTransfersContext) filterTime() { - ts := qItem{} - if ctx.Start > 0 { - ts["gte"] = ctx.Start - } - if ctx.End > 0 { - ts["lt"] = ctx.End - } - if len(ts) > 0 { - ctx.filters = append(ctx.filters, rangeQ("timestamp", ts)) - } -} - -func (ctx *GetTransfersContext) filterCursor() { - if ctx.LastID != "" { - eq := "lt" - if ctx.SortOrder == "asc" { - eq = "gt" - } - ctx.filters = append(ctx.filters, rangeQ("indexed_time", qItem{eq: ctx.LastID})) - } -} - -func (ctx *GetTransfersContext) filterContracts() { - if len(ctx.Contracts) == 0 { - return - } - - shouldItems := make([]qItem, len(ctx.Contracts)) - for i := range ctx.Contracts { - shouldItems[i] = matchPhrase("contract", ctx.Contracts[i]) - } - - ctx.filters = append(ctx.filters, boolQ( - should(shouldItems...), - minimumShouldMatch(1), - )) -} - -func (ctx *GetTransfersContext) appendSize() { - if ctx.Size > 0 && ctx.Size <= maxTransfersSize { - ctx.query.Size(ctx.Size) - } else { - ctx.query.Size(maxTransfersSize) - } -} - -func (ctx *GetTransfersContext) appendOffset() { - if ctx.Offset > 0 && ctx.Offset <= maxTransfersSize { - ctx.query.From(ctx.Offset) - } -} - -func (ctx *GetTransfersContext) appendSort() { - if helpers.StringInArray(ctx.SortOrder, []string{"desc", "asc"}) { - ctx.query.Sort("timestamp", ctx.SortOrder) - } else { - ctx.query.Sort("timestamp", "desc") - } -} - -// GetTransfers - -func (e *Elastic) GetTransfers(ctx GetTransfersContext) (TransfersResponse, error) { - query := ctx.buildQuery() - - po := TransfersResponse{} - - var response SearchResponse - if err := e.query([]string{DocTransfers}, query, &response); err != nil { - return po, err - } - - hits := response.Hits.Hits - transfers := make([]models.Transfer, len(hits)) - for i := range hits { - if err := json.Unmarshal(hits[i].Source, &transfers[i]); err != nil { - return po, err - } - transfers[i].ID = hits[i].ID - } - po.Transfers = transfers - po.Total = response.Hits.Total.Value - if len(transfers) > 0 { - po.LastID = fmt.Sprintf("%d", transfers[len(transfers)-1].IndexedTime) - } - return po, nil -} - -// GetAllTransfers - -func (e *Elastic) GetAllTransfers(network string, level int64) ([]models.Transfer, error) { - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - rangeQ("level", qItem{"gt": level}), - ), - ), - ) - - transfers := make([]models.Transfer, 0) - err := e.getAllByQuery(query, &transfers) - return transfers, err -} diff --git a/internal/elastic/tzip.go b/internal/elastic/tzip.go deleted file mode 100644 index eaa8b0674..000000000 --- a/internal/elastic/tzip.go +++ /dev/null @@ -1,280 +0,0 @@ -package elastic - -import ( - "github.com/baking-bad/bcdhub/internal/models" - "github.com/baking-bad/bcdhub/internal/models/tzip" -) - -// GetTokenMetadataContext - -type GetTokenMetadataContext struct { - Contract string - Network string - TokenID int64 - Level Range -} - -func (ctx GetTokenMetadataContext) buildQuery() base { - filters := make([]qItem, 0) - - if ctx.Contract != "" { - filters = append(filters, matchPhrase("address", ctx.Contract)) - } - if ctx.Network != "" { - filters = append(filters, matchQ("network", ctx.Network)) - } - if ctx.Level.isFilled() { - filters = append(filters, ctx.Level.build()) - } - if ctx.TokenID != -1 { - filters = append(filters, term( - "tokens.static.token_id", ctx.TokenID, - )) - } - return newQuery().Query( - boolQ( - filter(filters...), - ), - ).All() -} - -// TokenMetadata - -type TokenMetadata struct { - Address string - Network string - Level int64 - Symbol string - Name string - TokenID int64 - Decimals *int64 - RegistryAddress string - Extras map[string]interface{} -} - -// GetTokenMetadata - -func (e *Elastic) GetTokenMetadata(ctx GetTokenMetadataContext) (tokens []TokenMetadata, err error) { - tzips := make([]models.TZIP, 0) - query := ctx.buildQuery() - if err = e.getAllByQuery(query, &tzips); err != nil { - return - } - if len(tzips) == 0 { - return nil, NewRecordNotFoundError(DocTZIP, "", query) - } - - tokens = make([]TokenMetadata, 0) - for _, tzip := range tzips { - if tzip.Tokens == nil { - continue - } - - for i := range tzip.Tokens.Static { - tokens = append(tokens, TokenMetadata{ - Address: tzip.Address, - Network: tzip.Network, - Level: tzip.Level, - RegistryAddress: tzip.Tokens.Static[i].RegistryAddress, - Symbol: tzip.Tokens.Static[i].Symbol, - Name: tzip.Tokens.Static[i].Name, - Decimals: tzip.Tokens.Static[i].Decimals, - TokenID: tzip.Tokens.Static[i].TokenID, - Extras: tzip.Tokens.Static[i].Extras, - }) - } - } - return -} - -// GetTZIP - -func (e *Elastic) GetTZIP(network, address string) (t models.TZIP, err error) { - t.Address = address - t.Network = network - err = e.GetByID(&t) - return -} - -// GetDApps - -func (e *Elastic) GetDApps() ([]tzip.DApp, error) { - query := newQuery().Query( - boolQ( - filter( - exists("dapps"), - ), - ), - ).Sort("dapps.order", "asc").All() - - var response SearchResponse - if err := e.query([]string{DocTZIP}, query, &response, "dapps"); err != nil { - return nil, err - } - if response.Hits.Total.Value == 0 { - return nil, NewRecordNotFoundError(DocTZIP, "", query) - } - - tokens := make([]tzip.DApp, 0) - for _, hit := range response.Hits.Hits { - var model models.TZIP - if err := json.Unmarshal(hit.Source, &model); err != nil { - return nil, err - } - tokens = append(tokens, model.DApps...) - } - - return tokens, nil -} - -// GetDAppBySlug - -func (e *Elastic) GetDAppBySlug(slug string) (*tzip.DApp, error) { - query := newQuery().Query( - boolQ( - filter( - matchQ("dapps.slug", slug), - ), - ), - ).One() - - var response SearchResponse - if err := e.query([]string{DocTZIP}, query, &response, "dapps"); err != nil { - return nil, err - } - if response.Hits.Total.Value == 0 { - return nil, NewRecordNotFoundError(DocTZIP, "", query) - } - - var model models.TZIP - if err := json.Unmarshal(response.Hits.Hits[0].Source, &model); err != nil { - return nil, err - } - return &model.DApps[0], nil -} - -// GetBySlug - -func (e *Elastic) GetBySlug(slug string) (*models.TZIP, error) { - query := newQuery().Query( - boolQ( - filter( - term("slug.keyword", slug), - ), - ), - ).One() - - var response SearchResponse - if err := e.query([]string{DocTZIP}, query, &response); err != nil { - return nil, err - } - if response.Hits.Total.Value == 0 { - return nil, NewRecordNotFoundError(DocTZIP, "", query) - } - - var data models.TZIP - err := json.Unmarshal(response.Hits.Hits[0].Source, &data) - return &data, err -} - -// GetAliasesMap - -func (e *Elastic) GetAliasesMap(network string) (map[string]string, error) { - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - ), - ), - ).All() - - var response SearchResponse - if err := e.query([]string{DocTZIP}, query, &response); err != nil { - return nil, err - } - if response.Hits.Total.Value == 0 { - return nil, NewRecordNotFoundError(DocTZIP, "", query) - } - - aliases := make(map[string]string) - for _, hit := range response.Hits.Hits { - var data models.TZIP - if err := json.Unmarshal(hit.Source, &data); err != nil { - return nil, err - } - aliases[data.Address] = data.Name - } - - return aliases, nil -} - -// GetAliases - -func (e *Elastic) GetAliases(network string) ([]models.TZIP, error) { - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - exists("name"), - ), - ), - ).All() - - var response SearchResponse - if err := e.query([]string{DocTZIP}, query, &response); err != nil { - return nil, err - } - if response.Hits.Total.Value == 0 { - return nil, NewRecordNotFoundError(DocTZIP, "", query) - } - - aliases := make([]models.TZIP, len(response.Hits.Hits)) - for i := range response.Hits.Hits { - if err := json.Unmarshal(response.Hits.Hits[i].Source, &aliases[i]); err != nil { - return nil, err - } - } - return aliases, nil -} - -// GetAlias - -func (e *Elastic) GetAlias(network, address string) (*models.TZIP, error) { - query := newQuery().Query( - boolQ( - filter( - matchQ("network", network), - matchPhrase("address", address), - ), - ), - ).One() - - var response SearchResponse - if err := e.query([]string{DocTZIP}, query, &response); err != nil { - return nil, err - } - if response.Hits.Total.Value == 0 { - return nil, NewRecordNotFoundError(DocTZIP, "", query) - } - - var data models.TZIP - err := json.Unmarshal(response.Hits.Hits[0].Source, &data) - return &data, err -} - -// GetTZIPWithEvents - -func (e *Elastic) GetTZIPWithEvents() ([]models.TZIP, error) { - query := newQuery().Query( - boolQ( - filter( - exists("events"), - ), - ), - ).All() - - var response SearchResponse - if err := e.query([]string{DocTZIP}, query, &response); err != nil { - return nil, err - } - if response.Hits.Total.Value == 0 { - return nil, NewRecordNotFoundError(DocTZIP, "", query) - } - - tokens := make([]models.TZIP, len(response.Hits.Hits)) - for i := range response.Hits.Hits { - if err := json.Unmarshal(response.Hits.Hits[i].Source, &tokens[i]); err != nil { - return nil, err - } - } - return tokens, nil -} diff --git a/internal/elastic/tzip/context.go b/internal/elastic/tzip/context.go new file mode 100644 index 000000000..30de2dec5 --- /dev/null +++ b/internal/elastic/tzip/context.go @@ -0,0 +1,30 @@ +package tzip + +import ( + "github.com/baking-bad/bcdhub/internal/elastic/core" + "github.com/baking-bad/bcdhub/internal/models/tzip" +) + +func buildGetTokenMetadataContext(ctx tzip.GetTokenMetadataContext) interface{} { + filters := make([]core.Item, 0) + + if ctx.Contract != "" { + filters = append(filters, core.MatchPhrase("address", ctx.Contract)) + } + if ctx.Network != "" { + filters = append(filters, core.Match("network", ctx.Network)) + } + if ctx.Level.IsFilled() { + filters = append(filters, core.BuildComparator(ctx.Level)) + } + if ctx.TokenID != -1 { + filters = append(filters, core.Term( + "tokens.static.token_id", ctx.TokenID, + )) + } + return core.NewQuery().Query( + core.Bool( + core.Filter(filters...), + ), + ).All() +} diff --git a/internal/elastic/tzip/storage.go b/internal/elastic/tzip/storage.go new file mode 100644 index 000000000..548acc860 --- /dev/null +++ b/internal/elastic/tzip/storage.go @@ -0,0 +1,248 @@ +package tzip + +import ( + "encoding/json" + + "github.com/baking-bad/bcdhub/internal/elastic/core" + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/tzip" +) + +// Storage - +type Storage struct { + es *core.Elastic +} + +// NewStorage - +func NewStorage(es *core.Elastic) *Storage { + return &Storage{es} +} + +// GetTokenMetadata - +func (storage *Storage) GetTokenMetadata(ctx tzip.GetTokenMetadataContext) (tokens []tzip.TokenMetadata, err error) { + tzips := make([]tzip.TZIP, 0) + query := buildGetTokenMetadataContext(ctx) + if err = storage.es.GetAllByQuery(query.(core.Base), &tzips); err != nil { + return + } + if len(tzips) == 0 { + return nil, core.NewRecordNotFoundError(models.DocTZIP, "") + } + + tokens = make([]tzip.TokenMetadata, 0) + for k := range tzips { + if tzips[k].Tokens == nil { + continue + } + + for i := range tzips[k].Tokens.Static { + tokens = append(tokens, tzip.TokenMetadata{ + Address: tzips[k].Address, + Network: tzips[k].Network, + Level: tzips[k].Level, + RegistryAddress: tzips[k].Tokens.Static[i].RegistryAddress, + Symbol: tzips[k].Tokens.Static[i].Symbol, + Name: tzips[k].Tokens.Static[i].Name, + Decimals: tzips[k].Tokens.Static[i].Decimals, + TokenID: tzips[k].Tokens.Static[i].TokenID, + Extras: tzips[k].Tokens.Static[i].Extras, + }) + } + } + return +} + +// Get - +func (storage *Storage) Get(network, address string) (t tzip.TZIP, err error) { + t.Address = address + t.Network = network + err = storage.es.GetByID(&t) + return +} + +// GetDApps - +func (storage *Storage) GetDApps() ([]tzip.DApp, error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Exists("dapps"), + ), + ), + ).Sort("dapps.order", "asc").All() + + var response core.SearchResponse + if err := storage.es.Query([]string{models.DocTZIP}, query, &response, "dapps"); err != nil { + return nil, err + } + if response.Hits.Total.Value == 0 { + return nil, core.NewRecordNotFoundError(models.DocTZIP, "") + } + + tokens := make([]tzip.DApp, 0) + for _, hit := range response.Hits.Hits { + var model tzip.TZIP + if err := json.Unmarshal(hit.Source, &model); err != nil { + return nil, err + } + tokens = append(tokens, model.DApps...) + } + + return tokens, nil +} + +// GetDAppBySlug - +func (storage *Storage) GetDAppBySlug(slug string) (*tzip.DApp, error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("dapps.slug", slug), + ), + ), + ).One() + + var response core.SearchResponse + if err := storage.es.Query([]string{models.DocTZIP}, query, &response, "dapps"); err != nil { + return nil, err + } + if response.Hits.Total.Value == 0 { + return nil, core.NewRecordNotFoundError(models.DocTZIP, "") + } + + var model tzip.TZIP + if err := json.Unmarshal(response.Hits.Hits[0].Source, &model); err != nil { + return nil, err + } + return &model.DApps[0], nil +} + +// GetBySlug - +func (storage *Storage) GetBySlug(slug string) (*tzip.TZIP, error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Term("slug.keyword", slug), + ), + ), + ).One() + + var response core.SearchResponse + if err := storage.es.Query([]string{models.DocTZIP}, query, &response); err != nil { + return nil, err + } + if response.Hits.Total.Value == 0 { + return nil, core.NewRecordNotFoundError(models.DocTZIP, "") + } + + var data tzip.TZIP + err := json.Unmarshal(response.Hits.Hits[0].Source, &data) + return &data, err +} + +// GetAliasesMap - +func (storage *Storage) GetAliasesMap(network string) (map[string]string, error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + ), + ), + ).All() + + var response core.SearchResponse + if err := storage.es.Query([]string{models.DocTZIP}, query, &response); err != nil { + return nil, err + } + if response.Hits.Total.Value == 0 { + return nil, core.NewRecordNotFoundError(models.DocTZIP, "") + } + + aliases := make(map[string]string) + for _, hit := range response.Hits.Hits { + var data tzip.TZIP + if err := json.Unmarshal(hit.Source, &data); err != nil { + return nil, err + } + aliases[data.Address] = data.Name + } + + return aliases, nil +} + +// GetAliases - +func (storage *Storage) GetAliases(network string) ([]tzip.TZIP, error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + core.Exists("name"), + ), + ), + ).All() + + var response core.SearchResponse + if err := storage.es.Query([]string{models.DocTZIP}, query, &response); err != nil { + return nil, err + } + if response.Hits.Total.Value == 0 { + return nil, core.NewRecordNotFoundError(models.DocTZIP, "") + } + + aliases := make([]tzip.TZIP, len(response.Hits.Hits)) + for i := range response.Hits.Hits { + if err := json.Unmarshal(response.Hits.Hits[i].Source, &aliases[i]); err != nil { + return nil, err + } + } + return aliases, nil +} + +// GetAlias - +func (storage *Storage) GetAlias(network, address string) (*tzip.TZIP, error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Match("network", network), + core.MatchPhrase("address", address), + ), + ), + ).One() + + var response core.SearchResponse + if err := storage.es.Query([]string{models.DocTZIP}, query, &response); err != nil { + return nil, err + } + if response.Hits.Total.Value == 0 { + return nil, core.NewRecordNotFoundError(models.DocTZIP, "") + } + + var data tzip.TZIP + err := json.Unmarshal(response.Hits.Hits[0].Source, &data) + return &data, err +} + +// GetWithEvents - +func (storage *Storage) GetWithEvents() ([]tzip.TZIP, error) { + query := core.NewQuery().Query( + core.Bool( + core.Filter( + core.Exists("events"), + ), + ), + ).All() + + var response core.SearchResponse + if err := storage.es.Query([]string{models.DocTZIP}, query, &response); err != nil { + return nil, err + } + if response.Hits.Total.Value == 0 { + return nil, core.NewRecordNotFoundError(models.DocTZIP, "") + } + + tokens := make([]tzip.TZIP, len(response.Hits.Hits)) + for i := range response.Hits.Hits { + if err := json.Unmarshal(response.Hits.Hits[i].Source, &tokens[i]); err != nil { + return nil, err + } + } + return tokens, nil +} diff --git a/internal/events/michelson_extended_storage.go b/internal/events/michelson_extended_storage.go index 291dbface..55aae8d54 100644 --- a/internal/events/michelson_extended_storage.go +++ b/internal/events/michelson_extended_storage.go @@ -5,9 +5,9 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/consts" "github.com/baking-bad/bcdhub/internal/contractparser/meta" "github.com/baking-bad/bcdhub/internal/contractparser/storage" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/logger" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/schema" "github.com/baking-bad/bcdhub/internal/models/tzip" "github.com/tidwall/gjson" ) @@ -22,12 +22,12 @@ type MichelsonExtendedStorage struct { protocol string operationID string contract string - es elastic.IElastic - bmd []models.BigMapDiff + repo schema.Repository + bmd []bigmapdiff.BigMapDiff } // NewMichelsonExtendedStorage - -func NewMichelsonExtendedStorage(impl tzip.EventImplementation, name, protocol, operationID, contract string, es elastic.IElastic, bmd []models.BigMapDiff) (*MichelsonExtendedStorage, error) { +func NewMichelsonExtendedStorage(impl tzip.EventImplementation, name, protocol, operationID, contract string, repo schema.Repository, bmd []bigmapdiff.BigMapDiff) (*MichelsonExtendedStorage, error) { parser, err := GetParser(name, impl.MichelsonExtendedStorageEvent.ReturnType) if err != nil { return nil, err @@ -43,7 +43,7 @@ func NewMichelsonExtendedStorage(impl tzip.EventImplementation, name, protocol, parser: parser, protocol: protocol, operationID: operationID, - es: es, + repo: repo, bmd: bmd, contract: contract, }, nil @@ -68,7 +68,7 @@ func (mes *MichelsonExtendedStorage) Normalize(value string) gjson.Result { return gjson.Parse(value) } - metadata, err := meta.GetMetadata(mes.es, mes.contract, consts.STORAGE, mes.protocol) + metadata, err := meta.GetMetadata(mes.repo, mes.contract, consts.STORAGE, mes.protocol) if err != nil { logger.Error(err) return gjson.Parse(value) diff --git a/internal/handlers/interface.go b/internal/handlers/interface.go index af08d5cf8..855c5d0dc 100644 --- a/internal/handlers/interface.go +++ b/internal/handlers/interface.go @@ -1,8 +1,8 @@ package handlers -import "github.com/baking-bad/bcdhub/internal/elastic" +import "github.com/baking-bad/bcdhub/internal/models" // Handler - type Handler interface { - Do(model elastic.Model) (bool, error) + Do(model models.Model) (bool, error) } diff --git a/internal/handlers/tezos_domains.go b/internal/handlers/tezos_domains.go index d896b0e04..2582a5794 100644 --- a/internal/handlers/tezos_domains.go +++ b/internal/handlers/tezos_domains.go @@ -6,8 +6,11 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/consts" "github.com/baking-bad/bcdhub/internal/contractparser/meta" "github.com/baking-bad/bcdhub/internal/contractparser/unpack" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/contract" + "github.com/baking-bad/bcdhub/internal/models/schema" + "github.com/baking-bad/bcdhub/internal/models/tezosdomain" "github.com/pkg/errors" "github.com/tidwall/gjson" ) @@ -19,28 +22,29 @@ const ( // TezosDomain - type TezosDomain struct { - es elastic.IElastic + storage models.GeneralRepository + schemaRepo schema.Repository - contracts map[elastic.Address]struct{} - metadata map[elastic.Address]meta.Metadata + contracts map[contract.Address]struct{} + metadata map[contract.Address]meta.Metadata } // NewTezosDomains - -func NewTezosDomains(es elastic.IElastic, contracts map[string]string) *TezosDomain { - addresses := make(map[elastic.Address]struct{}) +func NewTezosDomains(storage models.GeneralRepository, schemaRepo schema.Repository, contracts map[string]string) *TezosDomain { + addresses := make(map[contract.Address]struct{}) for k, v := range contracts { - addresses[elastic.Address{ + addresses[contract.Address{ Network: k, Address: v, }] = struct{}{} } return &TezosDomain{ - es, addresses, make(map[elastic.Address]meta.Metadata), + storage, schemaRepo, addresses, make(map[contract.Address]meta.Metadata), } } // Do - -func (td *TezosDomain) Do(model elastic.Model) (bool, error) { +func (td *TezosDomain) Do(model models.Model) (bool, error) { bmd, handler := td.getBigMapDiff(model) if bmd == nil { return false, nil @@ -54,15 +58,15 @@ func (td *TezosDomain) Do(model elastic.Model) (bool, error) { return false, nil } -func (td *TezosDomain) getBigMapDiff(model elastic.Model) (*models.BigMapDiff, string) { +func (td *TezosDomain) getBigMapDiff(model models.Model) (*bigmapdiff.BigMapDiff, string) { if len(td.contracts) == 0 { return nil, "" } - bmd, ok := model.(*models.BigMapDiff) + bmd, ok := model.(*bigmapdiff.BigMapDiff) if !ok { return nil, "" } - address := elastic.Address{ + address := contract.Address{ Address: bmd.Address, Network: bmd.Network, } @@ -86,12 +90,12 @@ func (td *TezosDomain) getBigMapDiff(model elastic.Model) (*models.BigMapDiff, s return nil, "" } -func (td *TezosDomain) getMetadata(address elastic.Address, protocol string) (meta.Metadata, error) { +func (td *TezosDomain) getMetadata(address contract.Address, protocol string) (meta.Metadata, error) { metadata, ok := td.metadata[address] if ok { return metadata, nil } - metadata, err := meta.GetMetadata(td.es, address.Address, consts.STORAGE, protocol) + metadata, err := meta.GetMetadata(td.schemaRepo, address.Address, consts.STORAGE, protocol) if err != nil { return metadata, err } @@ -99,7 +103,7 @@ func (td *TezosDomain) getMetadata(address elastic.Address, protocol string) (me return metadata, nil } -func (td *TezosDomain) updateRecordsTZIP(bmd *models.BigMapDiff) error { +func (td *TezosDomain) updateRecordsTZIP(bmd *bigmapdiff.BigMapDiff) error { if len(bmd.KeyStrings) == 0 || len(bmd.ValueStrings) == 0 { return errors.Errorf("Invalid tezos domains big map diff: %s", bmd.GetID()) } @@ -107,7 +111,7 @@ func (td *TezosDomain) updateRecordsTZIP(bmd *models.BigMapDiff) error { if err != nil { return err } - tezosDomain := models.TezosDomain{ + tezosDomain := tezosdomain.TezosDomain{ Network: bmd.Network, Name: bmd.KeyStrings[0], Level: bmd.Level, @@ -116,21 +120,21 @@ func (td *TezosDomain) updateRecordsTZIP(bmd *models.BigMapDiff) error { if address != nil { tezosDomain.Address = *address } - return td.es.UpdateFields(elastic.DocTezosDomains, tezosDomain.GetID(), tezosDomain, "Name", "Address", "Network", "Level", "Timestamp") + return td.storage.UpdateFields(models.DocTezosDomains, tezosDomain.GetID(), tezosDomain, "Name", "Address", "Network", "Level", "Timestamp") } -func (td *TezosDomain) updateExpirationDate(bmd *models.BigMapDiff) error { +func (td *TezosDomain) updateExpirationDate(bmd *bigmapdiff.BigMapDiff) error { if len(bmd.KeyStrings) == 0 { return errors.Errorf("Invalid tezos domains big map diff: %s", bmd.GetID()) } ts := gjson.Parse(bmd.Value).Get("int").Int() date := time.Unix(ts, 0).UTC() - tezosDomain := models.TezosDomain{ + tezosDomain := tezosdomain.TezosDomain{ Name: bmd.KeyStrings[0], Network: bmd.Network, Expiration: date, } - return td.es.UpdateFields(elastic.DocTezosDomains, tezosDomain.GetID(), tezosDomain, "Expiration") + return td.storage.UpdateFields(models.DocTezosDomains, tezosDomain.GetID(), tezosDomain, "Expiration") } func (td *TezosDomain) getAddress(value string) (*string, error) { diff --git a/internal/handlers/tzip.go b/internal/handlers/tzip.go index 955ebe0c6..11d79aaca 100644 --- a/internal/handlers/tzip.go +++ b/internal/handlers/tzip.go @@ -1,9 +1,11 @@ package handlers import ( - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/block" + "github.com/baking-bad/bcdhub/internal/models/schema" "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/baking-bad/bcdhub/internal/parsers/tzip" "github.com/pkg/errors" @@ -11,26 +13,26 @@ import ( // TZIP - type TZIP struct { - es elastic.IElastic + bulk models.BulkRepository parsers map[string]tzip.Parser } // NewTZIP - -func NewTZIP(es elastic.IElastic, rpcs map[string]noderpc.INode, ipfs []string) *TZIP { +func NewTZIP(bigMapRepo bigmapdiff.Repository, blockRepo block.Repository, schemaRepo schema.Repository, storage models.GeneralRepository, bulk models.BulkRepository, rpcs map[string]noderpc.INode, ipfs []string) *TZIP { parsers := make(map[string]tzip.Parser) for network, rpc := range rpcs { - parsers[network] = tzip.NewParser(es, rpc, tzip.ParserConfig{ + parsers[network] = tzip.NewParser(bigMapRepo, blockRepo, schemaRepo, storage, rpc, tzip.ParserConfig{ IPFSGateways: ipfs, }) } return &TZIP{ - es, parsers, + bulk, parsers, } } // Do - -func (t *TZIP) Do(model elastic.Model) (bool, error) { - bmd, ok := model.(*models.BigMapDiff) +func (t *TZIP) Do(model models.Model) (bool, error) { + bmd, ok := model.(*bigmapdiff.BigMapDiff) if !ok { return false, nil } @@ -40,7 +42,7 @@ func (t *TZIP) Do(model elastic.Model) (bool, error) { return true, t.handle(bmd) } -func (t *TZIP) handle(bmd *models.BigMapDiff) error { +func (t *TZIP) handle(bmd *bigmapdiff.BigMapDiff) error { tzipParser, ok := t.parsers[bmd.Network] if !ok { return errors.Errorf("Unknown network for tzip parser: %s", bmd.Network) @@ -58,5 +60,5 @@ func (t *TZIP) handle(bmd *models.BigMapDiff) error { } logger.With(bmd).Info("Big map diff with TZIP is processed") - return t.es.BulkInsert([]elastic.Model{model}) + return t.bulk.Insert([]models.Model{model}) } diff --git a/internal/metrics/bigmapdiff.go b/internal/metrics/bigmapdiff.go index 5a023c851..af5fb7050 100644 --- a/internal/metrics/bigmapdiff.go +++ b/internal/metrics/bigmapdiff.go @@ -4,11 +4,11 @@ import ( "encoding/json" "github.com/baking-bad/bcdhub/internal/contractparser/stringer" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" ) // SetBigMapDiffsStrings - -func (h *Handler) SetBigMapDiffsStrings(bmd *models.BigMapDiff) error { +func (h *Handler) SetBigMapDiffsStrings(bmd *bigmapdiff.BigMapDiff) error { keyBytes, err := json.Marshal(bmd.Key) if err != nil { return err diff --git a/internal/metrics/contract.go b/internal/metrics/contract.go index dd749f2c8..ffbf41b64 100644 --- a/internal/metrics/contract.go +++ b/internal/metrics/contract.go @@ -1,25 +1,24 @@ package metrics import ( - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" + "github.com/baking-bad/bcdhub/internal/models/contract" "github.com/baking-bad/bcdhub/internal/classification/functions" clmetrics "github.com/baking-bad/bcdhub/internal/classification/metrics" - "github.com/baking-bad/bcdhub/internal/models" ) // SetContractAlias - -func (h *Handler) SetContractAlias(c *models.Contract) (bool, error) { +func (h *Handler) SetContractAlias(c *contract.Contract) (bool, error) { var changed bool if c.Alias != "" && ((c.Delegate != "" && c.DelegateAlias != "") || c.Delegate == "") { return false, nil } - aliases, err := h.ES.GetAliasesMap(c.Network) + aliases, err := h.TZIP.GetAliasesMap(c.Network) if err != nil { - if elastic.IsRecordNotFound(err) { + if h.Storage.IsRecordNotFound(err) { err = nil } return changed, err @@ -39,20 +38,20 @@ func (h *Handler) SetContractAlias(c *models.Contract) (bool, error) { } // UpdateContractStats - -func (h *Handler) UpdateContractStats(c *models.Contract) error { - migrationsStats, err := h.ES.GetContractMigrationStats(c.Network, c.Address) +func (h *Handler) UpdateContractStats(c *contract.Contract) error { + count, err := h.Migrations.Count(c.Network, c.Address) if err != nil { return err } - c.MigrationsCount = migrationsStats.MigrationsCount + c.MigrationsCount = count return nil } // SetContractProjectID - -func (h *Handler) SetContractProjectID(c *models.Contract) error { - buckets, err := h.ES.GetProjectsLastContract() +func (h *Handler) SetContractProjectID(c *contract.Contract) error { + buckets, err := h.Contracts.GetProjectsLastContract() if err != nil { - if elastic.IsRecordNotFound(err) { + if h.Storage.IsRecordNotFound(err) { c.ProjectID = helpers.GenerateID() return nil } @@ -64,7 +63,7 @@ func (h *Handler) SetContractProjectID(c *models.Contract) error { return nil } -func getContractProjectID(c models.Contract, buckets []models.Contract) string { +func getContractProjectID(c contract.Contract, buckets []contract.Contract) string { for i := len(buckets) - 1; i > -1; i-- { if compare(c, buckets[i]) { return buckets[i].ProjectID @@ -89,7 +88,7 @@ var model = []clmetrics.Metric{ clmetrics.NewFingerprint("code"), } -func compare(a, b models.Contract) bool { +func compare(a, b contract.Contract) bool { features := make([]float64, len(model)) for i := range model { diff --git a/internal/metrics/fingerprint.go b/internal/metrics/fingerprint.go index d115da2ec..ea221d337 100644 --- a/internal/metrics/fingerprint.go +++ b/internal/metrics/fingerprint.go @@ -8,13 +8,13 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/consts" "github.com/baking-bad/bcdhub/internal/contractparser/macros" "github.com/baking-bad/bcdhub/internal/helpers" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" "github.com/pkg/errors" "github.com/tidwall/gjson" ) // SetFingerprint - -func SetFingerprint(script gjson.Result, contract *models.Contract) error { +func SetFingerprint(script gjson.Result, contract *contract.Contract) error { fgpt, err := GetFingerprint(script) if err != nil { return err @@ -24,12 +24,12 @@ func SetFingerprint(script gjson.Result, contract *models.Contract) error { } // GetFingerprint - -func GetFingerprint(script gjson.Result) (*models.Fingerprint, error) { +func GetFingerprint(script gjson.Result) (*contract.Fingerprint, error) { colapsed, err := macros.Collapse(script, macros.GetAllFamilies()) if err != nil { return nil, err } - fgpt := models.Fingerprint{} + fgpt := contract.Fingerprint{} code := colapsed.Get(`code.#(prim="code")`) codeFgpt, err := fingerprint(code, true) if err != nil { diff --git a/internal/metrics/metrics.go b/internal/metrics/metrics.go index a025018d7..f1aab8136 100644 --- a/internal/metrics/metrics.go +++ b/internal/metrics/metrics.go @@ -2,16 +2,49 @@ package metrics import ( "github.com/baking-bad/bcdhub/internal/database" - "github.com/baking-bad/bcdhub/internal/elastic" + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/block" + "github.com/baking-bad/bcdhub/internal/models/contract" + "github.com/baking-bad/bcdhub/internal/models/migration" + "github.com/baking-bad/bcdhub/internal/models/operation" + "github.com/baking-bad/bcdhub/internal/models/protocol" + "github.com/baking-bad/bcdhub/internal/models/schema" + "github.com/baking-bad/bcdhub/internal/models/tokenbalance" + "github.com/baking-bad/bcdhub/internal/models/tzip" ) // Handler - type Handler struct { - ES elastic.IElastic + Contracts contract.Repository + BigMapDiffs bigmapdiff.Repository + Blocks block.Repository + Protocol protocol.Repository + Operations operation.Repository + Migrations migration.Repository + Schema schema.Repository + TokenBalances tokenbalance.Repository + TZIP tzip.Repository + Storage models.GeneralRepository + Bulk models.BulkRepository + DB database.DB } // New - -func New(es elastic.IElastic, db database.DB) *Handler { - return &Handler{es, db} +func New( + contracts contract.Repository, + bmdRepo bigmapdiff.Repository, + blocksRepo block.Repository, + protocolRepo protocol.Repository, + operations operation.Repository, + schemaRepo schema.Repository, + tbRepo tokenbalance.Repository, + tzipRepo tzip.Repository, + migrationRepo migration.Repository, + storage models.GeneralRepository, + bulk models.BulkRepository, + db database.DB, +) *Handler { + return &Handler{contracts, bmdRepo, blocksRepo, protocolRepo, operations, migrationRepo, schemaRepo, tbRepo, tzipRepo, storage, bulk, db} } diff --git a/internal/metrics/operation.go b/internal/metrics/operation.go index 6fda74098..7724054dc 100644 --- a/internal/metrics/operation.go +++ b/internal/metrics/operation.go @@ -6,19 +6,18 @@ import ( "time" "github.com/baking-bad/bcdhub/internal/contractparser/stringer" - "github.com/baking-bad/bcdhub/internal/elastic" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/getsentry/sentry-go" ) // SetOperationAliases - -func (h *Handler) SetOperationAliases(op *models.Operation) (bool, error) { +func (h *Handler) SetOperationAliases(op *operation.Operation) (bool, error) { var changed bool - aliases, err := h.ES.GetAliasesMap(op.Network) + aliases, err := h.TZIP.GetAliasesMap(op.Network) if err != nil { - if elastic.IsRecordNotFound(err) { - err = nil + if h.Storage.IsRecordNotFound(err) { + return changed, nil } return changed, err } @@ -42,18 +41,18 @@ func (h *Handler) SetOperationAliases(op *models.Operation) (bool, error) { } // SetOperationStrings - -func (h *Handler) SetOperationStrings(op *models.Operation) { +func (h *Handler) SetOperationStrings(op *operation.Operation) { op.ParameterStrings = stringer.Get(op.Parameters) op.StorageStrings = stringer.Get(op.DeffatedStorage) } // SendSentryNotifications - -func (h *Handler) SendSentryNotifications(operation models.Operation) error { - if operation.Status != "failed" { +func (h *Handler) SendSentryNotifications(op operation.Operation) error { + if op.Status != "failed" { return nil } - subscriptions, err := h.DB.GetSubscriptions(operation.Destination, operation.Network) + subscriptions, err := h.DB.GetSubscriptions(op.Destination, op.Network) if err != nil { return err } @@ -64,44 +63,44 @@ func (h *Handler) SendSentryNotifications(operation models.Operation) error { defer sentry.Flush(2 * time.Second) for _, subscription := range subscriptions { - initSentry(operation.Network, subscription.SentryDSN) + initSentry(op.Network, subscription.SentryDSN) hub := sentry.CurrentHub().Clone() tags := map[string]string{ - "hash": operation.Hash, - "source": operation.Source, - "address": operation.Destination, - "kind": operation.Kind, - "block": fmt.Sprintf("%d", operation.Level), + "hash": op.Hash, + "source": op.Source, + "address": op.Destination, + "kind": op.Kind, + "block": fmt.Sprintf("%d", op.Level), "os.name": "tezos", } - if operation.Entrypoint != "" { - tags["entrypoint"] = operation.Entrypoint + if op.Entrypoint != "" { + tags["entrypoint"] = op.Entrypoint } exceptions := make([]sentry.Exception, 0) var message string - for i := range operation.Errors { - if err := operation.Errors[i].Format(); err != nil { + for i := range op.Errors { + if err := op.Errors[i].Format(); err != nil { return err } if i == 0 { - message = operation.Errors[i].GetTitle() + message = op.Errors[i].GetTitle() } exceptions = append(exceptions, sentry.Exception{ - Value: operation.Errors[i].String(), - Type: operation.Errors[i].GetTitle(), + Value: op.Errors[i].String(), + Type: op.Errors[i].GetTitle(), }) } hub.Client().Transport.SendEvent(&sentry.Event{ Tags: tags, - Timestamp: operation.Timestamp.Unix(), + Timestamp: op.Timestamp.Unix(), Level: sentry.LevelError, - Environment: operation.Network, + Environment: op.Network, Message: message, Exception: exceptions, Sdk: sentry.SdkInfo{ diff --git a/internal/metrics/token_metadata.go b/internal/metrics/token_metadata.go index e6bc75080..1306e0265 100644 --- a/internal/metrics/token_metadata.go +++ b/internal/metrics/token_metadata.go @@ -4,30 +4,34 @@ import ( "fmt" "github.com/baking-bad/bcdhub/internal/contractparser/consts" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/events" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" + "github.com/baking-bad/bcdhub/internal/models/operation" + "github.com/baking-bad/bcdhub/internal/models/tokenbalance" + "github.com/baking-bad/bcdhub/internal/models/transfer" + "github.com/baking-bad/bcdhub/internal/models/tzip" "github.com/baking-bad/bcdhub/internal/noderpc" - "github.com/baking-bad/bcdhub/internal/parsers/transfer" + transferParsers "github.com/baking-bad/bcdhub/internal/parsers/transfer" "github.com/baking-bad/bcdhub/internal/parsers/tzip/tokens" "github.com/pkg/errors" ) // CreateTokenMetadata - -func (h *Handler) CreateTokenMetadata(rpc noderpc.INode, sharePath string, c *models.Contract) error { +func (h *Handler) CreateTokenMetadata(rpc noderpc.INode, sharePath string, c *contract.Contract) error { if !helpers.StringInArray(consts.FA2Tag, c.Tags) { return nil } - parser := tokens.NewTokenMetadataParser(h.ES, rpc, sharePath, c.Network) + parser := tokens.NewTokenMetadataParser(h.BigMapDiffs, h.Blocks, h.Protocol, h.Schema, h.Storage, rpc, sharePath, c.Network) metadata, err := parser.Parse(c.Address, c.Level) if err != nil { return err } - result := make([]elastic.Model, 0) + result := make([]models.Model, 0) for i := range metadata { tzip := metadata[i].ToModel(c.Address, c.Network) logger.With(tzip).Info("Token metadata is found") @@ -42,11 +46,11 @@ func (h *Handler) CreateTokenMetadata(rpc noderpc.INode, sharePath string, c *mo } } - return h.ES.BulkInsert(result) + return h.Bulk.Insert(result) } // FixTokenMetadata - -func (h *Handler) FixTokenMetadata(rpc noderpc.INode, sharePath string, contract *models.Contract, operation *models.Operation) error { +func (h *Handler) FixTokenMetadata(rpc noderpc.INode, sharePath string, contract *contract.Contract, operation *operation.Operation) error { if !operation.IsTransaction() || !operation.IsApplied() || !operation.IsCall() { return nil } @@ -55,21 +59,21 @@ func (h *Handler) FixTokenMetadata(rpc noderpc.INode, sharePath string, contract return nil } - tokenMetadatas, err := h.ES.GetTokenMetadata(elastic.GetTokenMetadataContext{ + tokenMetadatas, err := h.TZIP.GetTokenMetadata(tzip.GetTokenMetadataContext{ Contract: operation.Destination, Network: operation.Network, TokenID: -1, }) if err != nil { - if !elastic.IsRecordNotFound(err) { + if !h.Storage.IsRecordNotFound(err) { return err } return nil } - result := make([]elastic.Model, 0) + result := make([]models.Model, 0) for _, tokenMetadata := range tokenMetadatas { - parser := tokens.NewTokenMetadataParser(h.ES, rpc, sharePath, operation.Network) + parser := tokens.NewTokenMetadataParser(h.BigMapDiffs, h.Blocks, h.Protocol, h.Schema, h.Storage, rpc, sharePath, operation.Network) metadata, err := parser.ParseWithRegistry(tokenMetadata.RegistryAddress, operation.Level) if err != nil { return err @@ -87,18 +91,18 @@ func (h *Handler) FixTokenMetadata(rpc noderpc.INode, sharePath string, contract return nil } - return h.ES.BulkUpdate(result) + return h.Bulk.Update(result) } // ExecuteInitialStorageEvent - -func (h *Handler) ExecuteInitialStorageEvent(rpc noderpc.INode, tzip *models.TZIP) ([]*models.Transfer, error) { - ops, err := h.ES.GetOperations(map[string]interface{}{ +func (h *Handler) ExecuteInitialStorageEvent(rpc noderpc.INode, tzip *tzip.TZIP) ([]*transfer.Transfer, error) { + ops, err := h.Operations.Get(map[string]interface{}{ "destination": tzip.Address, "network": tzip.Network, "kind": consts.Origination, }, 1, false) if err != nil { - if elastic.IsRecordNotFound(err) { + if h.Storage.IsRecordNotFound(err) { return nil, nil } return nil, err @@ -109,19 +113,19 @@ func (h *Handler) ExecuteInitialStorageEvent(rpc noderpc.INode, tzip *models.TZI origination := ops[0] - protocol, err := h.ES.GetProtocol(tzip.Network, origination.Protocol, origination.Level) + protocol, err := h.Protocol.GetProtocol(tzip.Network, origination.Protocol, origination.Level) if err != nil { return nil, err } - state, err := h.ES.GetLastBlock(tzip.Network) + state, err := h.Blocks.Last(tzip.Network) if err != nil { return nil, err } - data := make([]*models.Transfer, 0) + data := make([]*transfer.Transfer, 0) - balanceUpdates := make([]*models.TokenBalance, 0) + balanceUpdates := make([]*tokenbalance.TokenBalance, 0) for i := range tzip.Events { for j := range tzip.Events[i].Implementations { if !tzip.Events[i].Implementations[j].MichelsonInitialStorageEvent.Empty() { @@ -154,7 +158,7 @@ func (h *Handler) ExecuteInitialStorageEvent(rpc noderpc.INode, tzip *models.TZI return nil, err } - res, err := transfer.NewDefaultBalanceParser().Parse(balances, origination) + res, err := transferParsers.NewDefaultBalanceParser().Parse(balances, origination) if err != nil { return nil, err } @@ -162,7 +166,7 @@ func (h *Handler) ExecuteInitialStorageEvent(rpc noderpc.INode, tzip *models.TZI data = append(data, res...) for i := range balances { - balanceUpdates = append(balanceUpdates, &models.TokenBalance{ + balanceUpdates = append(balanceUpdates, &tokenbalance.TokenBalance{ Network: tzip.Network, Address: balances[i].Address, TokenID: balances[i].TokenID, @@ -174,5 +178,5 @@ func (h *Handler) ExecuteInitialStorageEvent(rpc noderpc.INode, tzip *models.TZI } } - return data, h.ES.UpdateTokenBalances(balanceUpdates) + return data, h.TokenBalances.Update(balanceUpdates) } diff --git a/internal/metrics/transfer.go b/internal/metrics/transfer.go index 66f27bdba..312bff98b 100644 --- a/internal/metrics/transfer.go +++ b/internal/metrics/transfer.go @@ -1,17 +1,14 @@ package metrics -import ( - "github.com/baking-bad/bcdhub/internal/elastic" - "github.com/baking-bad/bcdhub/internal/models" -) +import "github.com/baking-bad/bcdhub/internal/models/transfer" // SetTransferAliases - -func (h *Handler) SetTransferAliases(transfer *models.Transfer) (bool, error) { +func (h *Handler) SetTransferAliases(transfer *transfer.Transfer) (bool, error) { var changed bool - aliases, err := h.ES.GetAliasesMap(transfer.Network) + aliases, err := h.TZIP.GetAliasesMap(transfer.Network) if err != nil { - if elastic.IsRecordNotFound(err) { + if h.Storage.IsRecordNotFound(err) { err = nil } return changed, err diff --git a/internal/models/balance_update.go b/internal/models/balanceupdate/model.go similarity index 97% rename from internal/models/balance_update.go rename to internal/models/balanceupdate/model.go index 8ebf2bd29..061cad1d9 100644 --- a/internal/models/balance_update.go +++ b/internal/models/balanceupdate/model.go @@ -1,4 +1,4 @@ -package models +package balanceupdate import ( "github.com/sirupsen/logrus" diff --git a/internal/models/balanceupdate/repository.go b/internal/models/balanceupdate/repository.go new file mode 100644 index 000000000..2388b3b85 --- /dev/null +++ b/internal/models/balanceupdate/repository.go @@ -0,0 +1,6 @@ +package balanceupdate + +// Repository - +type Repository interface { + GetBalance(network, address string) (int64, error) +} diff --git a/internal/models/big_map_action.go b/internal/models/bigmapaction/model.go similarity index 97% rename from internal/models/big_map_action.go rename to internal/models/bigmapaction/model.go index ca6fa1850..5d07ca5f7 100644 --- a/internal/models/big_map_action.go +++ b/internal/models/bigmapaction/model.go @@ -1,4 +1,4 @@ -package models +package bigmapaction import ( "time" diff --git a/internal/models/bigmapaction/repository.go b/internal/models/bigmapaction/repository.go new file mode 100644 index 000000000..8b1cb7165 --- /dev/null +++ b/internal/models/bigmapaction/repository.go @@ -0,0 +1,6 @@ +package bigmapaction + +// Repository - +type Repository interface { + Get(ptr int64, network string) ([]BigMapAction, error) +} diff --git a/internal/models/bigmapdiff/context.go b/internal/models/bigmapdiff/context.go new file mode 100644 index 000000000..edf2900b7 --- /dev/null +++ b/internal/models/bigmapdiff/context.go @@ -0,0 +1,13 @@ +package bigmapdiff + +// GetContext - +type GetContext struct { + Network string + Ptr *int64 + Query string + Size int64 + Offset int64 + Level *int64 + + To int64 +} diff --git a/internal/models/big_map_diff.go b/internal/models/bigmapdiff/model.go similarity index 98% rename from internal/models/big_map_diff.go rename to internal/models/bigmapdiff/model.go index d943ce165..785108765 100644 --- a/internal/models/big_map_diff.go +++ b/internal/models/bigmapdiff/model.go @@ -1,4 +1,4 @@ -package models +package bigmapdiff import ( "time" diff --git a/internal/models/bigmapdiff/repository.go b/internal/models/bigmapdiff/repository.go new file mode 100644 index 000000000..7ef989203 --- /dev/null +++ b/internal/models/bigmapdiff/repository.go @@ -0,0 +1,16 @@ +package bigmapdiff + +// Repository - +type Repository interface { + Get(ctx GetContext) ([]BigMapDiff, error) + GetByAddress(string, string) ([]BigMapDiff, error) + GetByOperationID(string) ([]*BigMapDiff, error) + GetByPtr(string, string, int64) ([]BigMapDiff, error) + GetByPtrAndKeyHash(int64, string, string, int64, int64) ([]BigMapDiff, int64, error) + GetForAddress(string) ([]BigMapDiff, error) + GetValuesByKey(string) ([]BigMapDiff, error) + GetUniqueByOperationID(string) ([]BigMapDiff, error) + Count(network string, ptr int64) (int64, error) + CurrentByKey(network, keyHash string, ptr int64) (BigMapDiff, error) + Previous([]BigMapDiff, int64, string) ([]BigMapDiff, error) +} diff --git a/internal/models/block.go b/internal/models/block/model.go similarity index 98% rename from internal/models/block.go rename to internal/models/block/model.go index 94f9eea2e..33410835b 100644 --- a/internal/models/block.go +++ b/internal/models/block/model.go @@ -1,4 +1,4 @@ -package models +package block import ( "time" diff --git a/internal/models/block/repository.go b/internal/models/block/repository.go new file mode 100644 index 000000000..0fcc21472 --- /dev/null +++ b/internal/models/block/repository.go @@ -0,0 +1,9 @@ +package block + +// Repository - +type Repository interface { + Get(string, int64) (Block, error) + Last(string) (Block, error) + LastByNetworks() ([]Block, error) + GetNetworkAlias(chainID string) (string, error) +} diff --git a/internal/models/consts.go b/internal/models/consts.go new file mode 100644 index 000000000..64fc0267f --- /dev/null +++ b/internal/models/consts.go @@ -0,0 +1,70 @@ +package models + +import ( + "github.com/baking-bad/bcdhub/internal/models/balanceupdate" + "github.com/baking-bad/bcdhub/internal/models/bigmapaction" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/block" + "github.com/baking-bad/bcdhub/internal/models/contract" + "github.com/baking-bad/bcdhub/internal/models/migration" + "github.com/baking-bad/bcdhub/internal/models/operation" + "github.com/baking-bad/bcdhub/internal/models/protocol" + "github.com/baking-bad/bcdhub/internal/models/tezosdomain" + "github.com/baking-bad/bcdhub/internal/models/tokenbalance" + "github.com/baking-bad/bcdhub/internal/models/transfer" + "github.com/baking-bad/bcdhub/internal/models/tzip" +) + +// Document names +const ( + DocContracts = "contract" + DocBlocks = "block" + DocBalanceUpdates = "balance_update" + DocOperations = "operation" + DocBigMapDiff = "bigmapdiff" + DocBigMapActions = "bigmapaction" + DocSchema = "schema" + DocMigrations = "migration" + DocProtocol = "protocol" + DocTransfers = "transfer" + DocTZIP = "tzip" + DocTokenBalances = "token_balance" + DocTezosDomains = "tezos_domain" +) + +// AllDocuments - returns all document names +func AllDocuments() []string { + return []string{ + DocBalanceUpdates, + DocBigMapActions, + DocBigMapDiff, + DocBlocks, + DocContracts, + DocMigrations, + DocOperations, + DocProtocol, + DocSchema, + DocTZIP, + DocTezosDomains, + DocTokenBalances, + DocTransfers, + } +} + +// AllModels - +func AllModels() []Model { + return []Model{ + &balanceupdate.BalanceUpdate{}, + &bigmapaction.BigMapAction{}, + &bigmapdiff.BigMapDiff{}, + &block.Block{}, + &contract.Contract{}, + &migration.Migration{}, + &operation.Operation{}, + &protocol.Protocol{}, + &transfer.Transfer{}, + &tzip.TZIP{}, + &tokenbalance.TokenBalance{}, + &tezosdomain.TezosDomain{}, + } +} diff --git a/internal/models/contract/data.go b/internal/models/contract/data.go new file mode 100644 index 000000000..52b09d1b4 --- /dev/null +++ b/internal/models/contract/data.go @@ -0,0 +1,27 @@ +package contract + +// SameResponse - +type SameResponse struct { + Count int64 `json:"count"` + Contracts []Contract `json:"contracts"` +} + +// Similar - +type Similar struct { + *Contract + Count int64 `json:"count"` +} + +// DiffTask - +type DiffTask struct { + Network1 string + Address1 string + Network2 string + Address2 string +} + +// Address - +type Address struct { + Address string + Network string +} diff --git a/internal/models/contract.go b/internal/models/contract/model.go similarity index 94% rename from internal/models/contract.go rename to internal/models/contract/model.go index 9a2a4295d..540b2227c 100644 --- a/internal/models/contract.go +++ b/internal/models/contract/model.go @@ -1,4 +1,4 @@ -package models +package contract import ( "fmt" @@ -103,3 +103,10 @@ func (f *Fingerprint) ParseElasticJSON(hit gjson.Result) { f.Parameter = hit.Get("parameter").String() f.Storage = hit.Get("storage").String() } + +// Light - +type Light struct { + Address string `json:"address"` + Network string `json:"network"` + Deployed time.Time `json:"deploy_time"` +} diff --git a/internal/models/contract/repository.go b/internal/models/contract/repository.go new file mode 100644 index 000000000..54c8b72c7 --- /dev/null +++ b/internal/models/contract/repository.go @@ -0,0 +1,18 @@ +package contract + +// Repository - +type Repository interface { + Get(map[string]interface{}) (Contract, error) + GetMany(map[string]interface{}) ([]Contract, error) + GetRandom() (Contract, error) + GetAddressesByNetworkAndLevel(string, int64) ([]string, error) + GetIDsByAddresses([]string, string) ([]string, error) + IsFA(string, string) (bool, error) + UpdateMigrationsCount(string, string) error + GetByAddresses(addresses []Address) ([]Contract, error) + GetTokens(string, string, int64, int64) ([]Contract, int64, error) + GetProjectsLastContract() ([]Contract, error) + GetSameContracts(Contract, int64, int64) (SameResponse, error) + GetSimilarContracts(Contract, int64, int64) ([]Similar, int, error) + GetDiffTasks() ([]DiffTask, error) +} diff --git a/internal/models/data.go b/internal/models/data.go new file mode 100644 index 000000000..e3b7f5684 --- /dev/null +++ b/internal/models/data.go @@ -0,0 +1,96 @@ +package models + +import "fmt" + +// SubscriptionRequest - +type SubscriptionRequest struct { + Address string + Network string + Alias string + Hash string + ProjectID string + WithSame bool + WithSimilar bool + WithMempool bool + WithMigrations bool + WithErrors bool + WithCalls bool + WithDeployments bool +} + +// EventType - +const ( + EventTypeError = "error" + EventTypeMigration = "migration" + EventTypeCall = "call" + EventTypeInvoke = "invoke" + EventTypeDeploy = "deploy" + EventTypeSame = "same" + EventTypeSimilar = "similar" + EventTypeMempool = "mempool" +) + +// Event - +type Event struct { + Type string `json:"type"` + Address string `json:"address"` + Network string `json:"network"` + Alias string `json:"alias"` + Body interface{} `json:"body,omitempty"` +} + +// Repository - +type Repository struct { + ID string `json:"id"` + Type string `json:"type"` +} + +// String - +func (repo Repository) String() string { + return fmt.Sprintf("%s (type: %s)", repo.ID, repo.Type) +} + +// ContractCountStats - +type ContractCountStats struct { + Total int64 + SameCount int64 + Balance int64 +} + +// Result - +type Result struct { + Count int64 `json:"count"` + Time int64 `json:"time"` + Items []Item `json:"items"` +} + +// Item - +type Item struct { + Type string `json:"type"` + Value string `json:"value"` + Group *Group `json:"group,omitempty"` + Body interface{} `json:"body"` + Highlights map[string][]string `json:"highlights,omitempty"` + + Network string `json:"-"` +} + +// Group - +type Group struct { + Count int64 `json:"count"` + Top []Top `json:"top"` +} + +// NewGroup - +func NewGroup(docCount int64) *Group { + return &Group{ + Count: docCount, + Top: make([]Top, 0), + } +} + +// Top - +type Top struct { + Network string `json:"network"` + Key string `json:"key"` +} diff --git a/internal/models/histogram.go b/internal/models/histogram.go new file mode 100644 index 000000000..f7bda5da4 --- /dev/null +++ b/internal/models/histogram.go @@ -0,0 +1,60 @@ +package models + +// Histogram filter kinds +const ( + HistogramFilterKindExists = "exists" + HistogramFilterKindMatch = "match" + HistogramFilterKindIn = "in" + HistogramFilterKindAddresses = "address" + HistogramFilterDexEnrtypoints = "dex_entrypoints" +) + +// HistogramContext - +type HistogramContext struct { + Indices []string + Period string + Function struct { + Name string + Field string + } + Filters []HistogramFilter +} + +// HasFunction - +func (ctx HistogramContext) HasFunction() bool { + return ctx.Function.Name != "" && ctx.Function.Field != "" +} + +// HistogramFilter - +type HistogramFilter struct { + Field string + Value interface{} + Kind string +} + +// HistogramOption - +type HistogramOption func(*HistogramContext) + +// WithHistogramIndices - +func WithHistogramIndices(indices ...string) HistogramOption { + return func(h *HistogramContext) { + h.Indices = indices + } +} + +// WithHistogramFunction - +func WithHistogramFunction(function, field string) HistogramOption { + return func(h *HistogramContext) { + h.Function = struct { + Name string + Field string + }{function, field} + } +} + +// WithHistogramFilters - +func WithHistogramFilters(filters []HistogramFilter) HistogramOption { + return func(h *HistogramContext) { + h.Filters = filters + } +} diff --git a/internal/models/interface.go b/internal/models/interface.go new file mode 100644 index 000000000..32c454755 --- /dev/null +++ b/internal/models/interface.go @@ -0,0 +1,50 @@ +package models + +import ( + "io" + + "github.com/baking-bad/bcdhub/internal/models/contract" +) + +// GeneralRepository - +type GeneralRepository interface { + CreateIndexes() error + DeleteIndices(indices []string) error + DeleteByLevelAndNetwork([]string, string, int64) error + DeleteByContract(indices []string, network, address string) error + GetAll(interface{}) error + GetByID(Model) error + GetByIDs(output interface{}, ids ...string) error + GetByNetwork(string, interface{}) error + GetByNetworkWithSort(string, string, string, interface{}) error + UpdateDoc(model Model) (err error) + UpdateFields(string, string, interface{}, ...string) error + GetEvents([]SubscriptionRequest, int64, int64) ([]Event, error) + SearchByText(string, int64, []string, map[string]interface{}, bool) (Result, error) + CreateAWSRepository(string, string, string) error + ListRepositories() ([]Repository, error) + CreateSnapshots(string, string, []string) error + RestoreSnapshots(string, string, []string) error + ListSnapshots(string) (string, error) + SetSnapshotPolicy(string, string, string, string, int64) error + GetAllPolicies() ([]string, error) + GetMappings([]string) (map[string]string, error) + CreateMapping(string, io.Reader) error + ReloadSecureSettings() error + GetNetworkCountStats(string) (map[string]int64, error) + GetDateHistogram(period string, opts ...HistogramOption) ([][]int64, error) + GetCallsCountByNetwork() (map[string]int64, error) + GetContractStatsByNetwork() (map[string]ContractCountStats, error) + GetFACountByNetwork() (map[string]int64, error) + GetLanguagesForNetwork(network string) (map[string]int64, error) + IsRecordNotFound(err error) bool +} + +// BulkRepository - +type BulkRepository interface { + Insert([]Model) error + Update([]Model) error + Delete([]Model) error + RemoveField(string, []Model) error + UpdateField(where []contract.Contract, fields ...string) error +} diff --git a/internal/models/migration.go b/internal/models/migration/model.go similarity index 98% rename from internal/models/migration.go rename to internal/models/migration/model.go index 615d1bc5e..468dd90a8 100644 --- a/internal/models/migration.go +++ b/internal/models/migration/model.go @@ -1,4 +1,4 @@ -package models +package migration import ( "time" diff --git a/internal/models/migration/repository.go b/internal/models/migration/repository.go new file mode 100644 index 000000000..bf73dd0d7 --- /dev/null +++ b/internal/models/migration/repository.go @@ -0,0 +1,7 @@ +package migration + +// Repository - +type Repository interface { + Get(string, string) ([]Migration, error) + Count(string, string) (int64, error) +} diff --git a/internal/models/mock/balanceupdate/mock.go b/internal/models/mock/balanceupdate/mock.go new file mode 100644 index 000000000..46f6ede48 --- /dev/null +++ b/internal/models/mock/balanceupdate/mock.go @@ -0,0 +1,48 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: balanceupdate/repository.go + +// Package mock_balanceupdate is a generated GoMock package. +package mock_balanceupdate + +import ( + gomock "github.com/golang/mock/gomock" + reflect "reflect" +) + +// MockRepository is a mock of Repository interface +type MockRepository struct { + ctrl *gomock.Controller + recorder *MockRepositoryMockRecorder +} + +// MockRepositoryMockRecorder is the mock recorder for MockRepository +type MockRepositoryMockRecorder struct { + mock *MockRepository +} + +// NewMockRepository creates a new mock instance +func NewMockRepository(ctrl *gomock.Controller) *MockRepository { + mock := &MockRepository{ctrl: ctrl} + mock.recorder = &MockRepositoryMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockRepository) EXPECT() *MockRepositoryMockRecorder { + return m.recorder +} + +// GetBalance mocks base method +func (m *MockRepository) GetBalance(network, address string) (int64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetBalance", network, address) + ret0, _ := ret[0].(int64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetBalance indicates an expected call of GetBalance +func (mr *MockRepositoryMockRecorder) GetBalance(network, address interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBalance", reflect.TypeOf((*MockRepository)(nil).GetBalance), network, address) +} diff --git a/internal/models/mock/bigmapaction/mock.go b/internal/models/mock/bigmapaction/mock.go new file mode 100644 index 000000000..d3a574063 --- /dev/null +++ b/internal/models/mock/bigmapaction/mock.go @@ -0,0 +1,49 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: bigmapaction/repository.go + +// Package mock_bigmapaction is a generated GoMock package. +package mock_bigmapaction + +import ( + bigmapaction "github.com/baking-bad/bcdhub/internal/models/bigmapaction" + gomock "github.com/golang/mock/gomock" + reflect "reflect" +) + +// MockRepository is a mock of Repository interface +type MockRepository struct { + ctrl *gomock.Controller + recorder *MockRepositoryMockRecorder +} + +// MockRepositoryMockRecorder is the mock recorder for MockRepository +type MockRepositoryMockRecorder struct { + mock *MockRepository +} + +// NewMockRepository creates a new mock instance +func NewMockRepository(ctrl *gomock.Controller) *MockRepository { + mock := &MockRepository{ctrl: ctrl} + mock.recorder = &MockRepositoryMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockRepository) EXPECT() *MockRepositoryMockRecorder { + return m.recorder +} + +// Get mocks base method +func (m *MockRepository) Get(ptr int64, network string) ([]bigmapaction.BigMapAction, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Get", ptr, network) + ret0, _ := ret[0].([]bigmapaction.BigMapAction) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Get indicates an expected call of Get +func (mr *MockRepositoryMockRecorder) Get(ptr, network interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Get", reflect.TypeOf((*MockRepository)(nil).Get), ptr, network) +} diff --git a/internal/models/mock/bigmapdiff/mock.go b/internal/models/mock/bigmapdiff/mock.go new file mode 100644 index 000000000..91b54c43c --- /dev/null +++ b/internal/models/mock/bigmapdiff/mock.go @@ -0,0 +1,200 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: bigmapdiff/repository.go + +// Package mock_bigmapdiff is a generated GoMock package. +package mock_bigmapdiff + +import ( + bigmapdiff "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + gomock "github.com/golang/mock/gomock" + reflect "reflect" +) + +// MockRepository is a mock of Repository interface +type MockRepository struct { + ctrl *gomock.Controller + recorder *MockRepositoryMockRecorder +} + +// MockRepositoryMockRecorder is the mock recorder for MockRepository +type MockRepositoryMockRecorder struct { + mock *MockRepository +} + +// NewMockRepository creates a new mock instance +func NewMockRepository(ctrl *gomock.Controller) *MockRepository { + mock := &MockRepository{ctrl: ctrl} + mock.recorder = &MockRepositoryMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockRepository) EXPECT() *MockRepositoryMockRecorder { + return m.recorder +} + +// Get mocks base method +func (m *MockRepository) Get(ctx bigmapdiff.GetContext) ([]bigmapdiff.BigMapDiff, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Get", ctx) + ret0, _ := ret[0].([]bigmapdiff.BigMapDiff) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Get indicates an expected call of Get +func (mr *MockRepositoryMockRecorder) Get(ctx interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Get", reflect.TypeOf((*MockRepository)(nil).Get), ctx) +} + +// GetByAddress mocks base method +func (m *MockRepository) GetByAddress(arg0, arg1 string) ([]bigmapdiff.BigMapDiff, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetByAddress", arg0, arg1) + ret0, _ := ret[0].([]bigmapdiff.BigMapDiff) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetByAddress indicates an expected call of GetByAddress +func (mr *MockRepositoryMockRecorder) GetByAddress(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetByAddress", reflect.TypeOf((*MockRepository)(nil).GetByAddress), arg0, arg1) +} + +// GetByOperationID mocks base method +func (m *MockRepository) GetByOperationID(arg0 string) ([]*bigmapdiff.BigMapDiff, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetByOperationID", arg0) + ret0, _ := ret[0].([]*bigmapdiff.BigMapDiff) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetByOperationID indicates an expected call of GetByOperationID +func (mr *MockRepositoryMockRecorder) GetByOperationID(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetByOperationID", reflect.TypeOf((*MockRepository)(nil).GetByOperationID), arg0) +} + +// GetByPtr mocks base method +func (m *MockRepository) GetByPtr(arg0, arg1 string, arg2 int64) ([]bigmapdiff.BigMapDiff, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetByPtr", arg0, arg1, arg2) + ret0, _ := ret[0].([]bigmapdiff.BigMapDiff) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetByPtr indicates an expected call of GetByPtr +func (mr *MockRepositoryMockRecorder) GetByPtr(arg0, arg1, arg2 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetByPtr", reflect.TypeOf((*MockRepository)(nil).GetByPtr), arg0, arg1, arg2) +} + +// GetByPtrAndKeyHash mocks base method +func (m *MockRepository) GetByPtrAndKeyHash(arg0 int64, arg1, arg2 string, arg3, arg4 int64) ([]bigmapdiff.BigMapDiff, int64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetByPtrAndKeyHash", arg0, arg1, arg2, arg3, arg4) + ret0, _ := ret[0].([]bigmapdiff.BigMapDiff) + ret1, _ := ret[1].(int64) + ret2, _ := ret[2].(error) + return ret0, ret1, ret2 +} + +// GetByPtrAndKeyHash indicates an expected call of GetByPtrAndKeyHash +func (mr *MockRepositoryMockRecorder) GetByPtrAndKeyHash(arg0, arg1, arg2, arg3, arg4 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetByPtrAndKeyHash", reflect.TypeOf((*MockRepository)(nil).GetByPtrAndKeyHash), arg0, arg1, arg2, arg3, arg4) +} + +// GetForAddress mocks base method +func (m *MockRepository) GetForAddress(arg0 string) ([]bigmapdiff.BigMapDiff, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetForAddress", arg0) + ret0, _ := ret[0].([]bigmapdiff.BigMapDiff) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetForAddress indicates an expected call of GetForAddress +func (mr *MockRepositoryMockRecorder) GetForAddress(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetForAddress", reflect.TypeOf((*MockRepository)(nil).GetForAddress), arg0) +} + +// GetValuesByKey mocks base method +func (m *MockRepository) GetValuesByKey(arg0 string) ([]bigmapdiff.BigMapDiff, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetValuesByKey", arg0) + ret0, _ := ret[0].([]bigmapdiff.BigMapDiff) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetValuesByKey indicates an expected call of GetValuesByKey +func (mr *MockRepositoryMockRecorder) GetValuesByKey(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetValuesByKey", reflect.TypeOf((*MockRepository)(nil).GetValuesByKey), arg0) +} + +// GetUniqueByOperationID mocks base method +func (m *MockRepository) GetUniqueByOperationID(arg0 string) ([]bigmapdiff.BigMapDiff, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetUniqueByOperationID", arg0) + ret0, _ := ret[0].([]bigmapdiff.BigMapDiff) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetUniqueByOperationID indicates an expected call of GetUniqueByOperationID +func (mr *MockRepositoryMockRecorder) GetUniqueByOperationID(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetUniqueByOperationID", reflect.TypeOf((*MockRepository)(nil).GetUniqueByOperationID), arg0) +} + +// Count mocks base method +func (m *MockRepository) Count(network string, ptr int64) (int64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Count", network, ptr) + ret0, _ := ret[0].(int64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Count indicates an expected call of Count +func (mr *MockRepositoryMockRecorder) Count(network, ptr interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Count", reflect.TypeOf((*MockRepository)(nil).Count), network, ptr) +} + +// CurrentByKey mocks base method +func (m *MockRepository) CurrentByKey(network, keyHash string, ptr int64) (bigmapdiff.BigMapDiff, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CurrentByKey", network, keyHash, ptr) + ret0, _ := ret[0].(bigmapdiff.BigMapDiff) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CurrentByKey indicates an expected call of CurrentByKey +func (mr *MockRepositoryMockRecorder) CurrentByKey(network, keyHash, ptr interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CurrentByKey", reflect.TypeOf((*MockRepository)(nil).CurrentByKey), network, keyHash, ptr) +} + +// Previous mocks base method +func (m *MockRepository) Previous(arg0 []bigmapdiff.BigMapDiff, arg1 int64, arg2 string) ([]bigmapdiff.BigMapDiff, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Previous", arg0, arg1, arg2) + ret0, _ := ret[0].([]bigmapdiff.BigMapDiff) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Previous indicates an expected call of Previous +func (mr *MockRepositoryMockRecorder) Previous(arg0, arg1, arg2 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Previous", reflect.TypeOf((*MockRepository)(nil).Previous), arg0, arg1, arg2) +} diff --git a/internal/models/mock/block/mock.go b/internal/models/mock/block/mock.go new file mode 100644 index 000000000..70a0b8f11 --- /dev/null +++ b/internal/models/mock/block/mock.go @@ -0,0 +1,94 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: block/repository.go + +// Package mock_block is a generated GoMock package. +package mock_block + +import ( + block "github.com/baking-bad/bcdhub/internal/models/block" + gomock "github.com/golang/mock/gomock" + reflect "reflect" +) + +// MockRepository is a mock of Repository interface +type MockRepository struct { + ctrl *gomock.Controller + recorder *MockRepositoryMockRecorder +} + +// MockRepositoryMockRecorder is the mock recorder for MockRepository +type MockRepositoryMockRecorder struct { + mock *MockRepository +} + +// NewMockRepository creates a new mock instance +func NewMockRepository(ctrl *gomock.Controller) *MockRepository { + mock := &MockRepository{ctrl: ctrl} + mock.recorder = &MockRepositoryMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockRepository) EXPECT() *MockRepositoryMockRecorder { + return m.recorder +} + +// Get mocks base method +func (m *MockRepository) Get(arg0 string, arg1 int64) (block.Block, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Get", arg0, arg1) + ret0, _ := ret[0].(block.Block) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Get indicates an expected call of Get +func (mr *MockRepositoryMockRecorder) Get(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Get", reflect.TypeOf((*MockRepository)(nil).Get), arg0, arg1) +} + +// Last mocks base method +func (m *MockRepository) Last(arg0 string) (block.Block, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Last", arg0) + ret0, _ := ret[0].(block.Block) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Last indicates an expected call of Last +func (mr *MockRepositoryMockRecorder) Last(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Last", reflect.TypeOf((*MockRepository)(nil).Last), arg0) +} + +// LastByNetworks mocks base method +func (m *MockRepository) LastByNetworks() ([]block.Block, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "LastByNetworks") + ret0, _ := ret[0].([]block.Block) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// LastByNetworks indicates an expected call of LastByNetworks +func (mr *MockRepositoryMockRecorder) LastByNetworks() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "LastByNetworks", reflect.TypeOf((*MockRepository)(nil).LastByNetworks)) +} + +// GetNetworkAlias mocks base method +func (m *MockRepository) GetNetworkAlias(chainID string) (string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetNetworkAlias", chainID) + ret0, _ := ret[0].(string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetNetworkAlias indicates an expected call of GetNetworkAlias +func (mr *MockRepositoryMockRecorder) GetNetworkAlias(chainID interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetNetworkAlias", reflect.TypeOf((*MockRepository)(nil).GetNetworkAlias), chainID) +} diff --git a/internal/models/mock/contract/mock.go b/internal/models/mock/contract/mock.go new file mode 100644 index 000000000..c4c2391dc --- /dev/null +++ b/internal/models/mock/contract/mock.go @@ -0,0 +1,260 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: contract/repository.go + +// Package mock_contract is a generated GoMock package. +package mock_contract + +import ( + contract "github.com/baking-bad/bcdhub/internal/models/contract" + gomock "github.com/golang/mock/gomock" + reflect "reflect" +) + +// MockRepository is a mock of Repository interface +type MockRepository struct { + ctrl *gomock.Controller + recorder *MockRepositoryMockRecorder +} + +// MockRepositoryMockRecorder is the mock recorder for MockRepository +type MockRepositoryMockRecorder struct { + mock *MockRepository +} + +// NewMockRepository creates a new mock instance +func NewMockRepository(ctrl *gomock.Controller) *MockRepository { + mock := &MockRepository{ctrl: ctrl} + mock.recorder = &MockRepositoryMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockRepository) EXPECT() *MockRepositoryMockRecorder { + return m.recorder +} + +// Get mocks base method +func (m *MockRepository) Get(arg0 map[string]interface{}) (contract.Contract, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Get", arg0) + ret0, _ := ret[0].(contract.Contract) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Get indicates an expected call of Get +func (mr *MockRepositoryMockRecorder) Get(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Get", reflect.TypeOf((*MockRepository)(nil).Get), arg0) +} + +// GetMany mocks base method +func (m *MockRepository) GetMany(arg0 map[string]interface{}) ([]contract.Contract, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetMany", arg0) + ret0, _ := ret[0].([]contract.Contract) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetMany indicates an expected call of GetMany +func (mr *MockRepositoryMockRecorder) GetMany(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMany", reflect.TypeOf((*MockRepository)(nil).GetMany), arg0) +} + +// GetRandom mocks base method +func (m *MockRepository) GetRandom() (contract.Contract, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetRandom") + ret0, _ := ret[0].(contract.Contract) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetRandom indicates an expected call of GetRandom +func (mr *MockRepositoryMockRecorder) GetRandom() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetRandom", reflect.TypeOf((*MockRepository)(nil).GetRandom)) +} + +// GetMigrationsCount mocks base method +func (m *MockRepository) GetMigrationsCount(arg0, arg1 string) (int64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetMigrationsCount", arg0, arg1) + ret0, _ := ret[0].(int64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetMigrationsCount indicates an expected call of GetMigrationsCount +func (mr *MockRepositoryMockRecorder) GetMigrationsCount(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMigrationsCount", reflect.TypeOf((*MockRepository)(nil).GetMigrationsCount), arg0, arg1) +} + +// GetAddressesByNetworkAndLevel mocks base method +func (m *MockRepository) GetAddressesByNetworkAndLevel(arg0 string, arg1 int64) ([]string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAddressesByNetworkAndLevel", arg0, arg1) + ret0, _ := ret[0].([]string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAddressesByNetworkAndLevel indicates an expected call of GetAddressesByNetworkAndLevel +func (mr *MockRepositoryMockRecorder) GetAddressesByNetworkAndLevel(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAddressesByNetworkAndLevel", reflect.TypeOf((*MockRepository)(nil).GetAddressesByNetworkAndLevel), arg0, arg1) +} + +// GetIDsByAddresses mocks base method +func (m *MockRepository) GetIDsByAddresses(arg0 []string, arg1 string) ([]string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetIDsByAddresses", arg0, arg1) + ret0, _ := ret[0].([]string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetIDsByAddresses indicates an expected call of GetIDsByAddresses +func (mr *MockRepositoryMockRecorder) GetIDsByAddresses(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetIDsByAddresses", reflect.TypeOf((*MockRepository)(nil).GetIDsByAddresses), arg0, arg1) +} + +// GetByLevels mocks base method +func (m *MockRepository) GetByLevels(arg0 string, arg1, arg2 int64) ([]string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetByLevels", arg0, arg1, arg2) + ret0, _ := ret[0].([]string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetByLevels indicates an expected call of GetByLevels +func (mr *MockRepositoryMockRecorder) GetByLevels(arg0, arg1, arg2 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetByLevels", reflect.TypeOf((*MockRepository)(nil).GetByLevels), arg0, arg1, arg2) +} + +// IsFA mocks base method +func (m *MockRepository) IsFA(arg0, arg1 string) (bool, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "IsFA", arg0, arg1) + ret0, _ := ret[0].(bool) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// IsFA indicates an expected call of IsFA +func (mr *MockRepositoryMockRecorder) IsFA(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsFA", reflect.TypeOf((*MockRepository)(nil).IsFA), arg0, arg1) +} + +// UpdateMigrationsCount mocks base method +func (m *MockRepository) UpdateMigrationsCount(arg0, arg1 string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpdateMigrationsCount", arg0, arg1) + ret0, _ := ret[0].(error) + return ret0 +} + +// UpdateMigrationsCount indicates an expected call of UpdateMigrationsCount +func (mr *MockRepositoryMockRecorder) UpdateMigrationsCount(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateMigrationsCount", reflect.TypeOf((*MockRepository)(nil).UpdateMigrationsCount), arg0, arg1) +} + +// GetByAddresses mocks base method +func (m *MockRepository) GetByAddresses(addresses []contract.Address) ([]contract.Contract, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetByAddresses", addresses) + ret0, _ := ret[0].([]contract.Contract) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetByAddresses indicates an expected call of GetByAddresses +func (mr *MockRepositoryMockRecorder) GetByAddresses(addresses interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetByAddresses", reflect.TypeOf((*MockRepository)(nil).GetByAddresses), addresses) +} + +// GetTokens mocks base method +func (m *MockRepository) GetTokens(arg0, arg1 string, arg2, arg3 int64) ([]contract.Contract, int64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetTokens", arg0, arg1, arg2, arg3) + ret0, _ := ret[0].([]contract.Contract) + ret1, _ := ret[1].(int64) + ret2, _ := ret[2].(error) + return ret0, ret1, ret2 +} + +// GetTokens indicates an expected call of GetTokens +func (mr *MockRepositoryMockRecorder) GetTokens(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTokens", reflect.TypeOf((*MockRepository)(nil).GetTokens), arg0, arg1, arg2, arg3) +} + +// GetProjectsLastContract mocks base method +func (m *MockRepository) GetProjectsLastContract() ([]contract.Contract, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetProjectsLastContract") + ret0, _ := ret[0].([]contract.Contract) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetProjectsLastContract indicates an expected call of GetProjectsLastContract +func (mr *MockRepositoryMockRecorder) GetProjectsLastContract() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProjectsLastContract", reflect.TypeOf((*MockRepository)(nil).GetProjectsLastContract)) +} + +// GetSameContracts mocks base method +func (m *MockRepository) GetSameContracts(arg0 contract.Contract, arg1, arg2 int64) (contract.SameResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetSameContracts", arg0, arg1, arg2) + ret0, _ := ret[0].(contract.SameResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetSameContracts indicates an expected call of GetSameContracts +func (mr *MockRepositoryMockRecorder) GetSameContracts(arg0, arg1, arg2 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetSameContracts", reflect.TypeOf((*MockRepository)(nil).GetSameContracts), arg0, arg1, arg2) +} + +// GetSimilarContracts mocks base method +func (m *MockRepository) GetSimilarContracts(arg0 contract.Contract, arg1, arg2 int64) ([]contract.Similar, int, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetSimilarContracts", arg0, arg1, arg2) + ret0, _ := ret[0].([]contract.Similar) + ret1, _ := ret[1].(int) + ret2, _ := ret[2].(error) + return ret0, ret1, ret2 +} + +// GetSimilarContracts indicates an expected call of GetSimilarContracts +func (mr *MockRepositoryMockRecorder) GetSimilarContracts(arg0, arg1, arg2 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetSimilarContracts", reflect.TypeOf((*MockRepository)(nil).GetSimilarContracts), arg0, arg1, arg2) +} + +// GetDiffTasks mocks base method +func (m *MockRepository) GetDiffTasks() ([]contract.DiffTask, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetDiffTasks") + ret0, _ := ret[0].([]contract.DiffTask) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetDiffTasks indicates an expected call of GetDiffTasks +func (mr *MockRepositoryMockRecorder) GetDiffTasks() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDiffTasks", reflect.TypeOf((*MockRepository)(nil).GetDiffTasks)) +} diff --git a/internal/models/mock/general.go b/internal/models/mock/general.go new file mode 100644 index 000000000..5464f2180 --- /dev/null +++ b/internal/models/mock/general.go @@ -0,0 +1,581 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: interface.go + +// Package mock is a generated GoMock package. +package mock + +import ( + models "github.com/baking-bad/bcdhub/internal/models" + contract "github.com/baking-bad/bcdhub/internal/models/contract" + gomock "github.com/golang/mock/gomock" + io "io" + reflect "reflect" +) + +// MockGeneralRepository is a mock of GeneralRepository interface +type MockGeneralRepository struct { + ctrl *gomock.Controller + recorder *MockGeneralRepositoryMockRecorder +} + +// MockGeneralRepositoryMockRecorder is the mock recorder for MockGeneralRepository +type MockGeneralRepositoryMockRecorder struct { + mock *MockGeneralRepository +} + +// NewMockGeneralRepository creates a new mock instance +func NewMockGeneralRepository(ctrl *gomock.Controller) *MockGeneralRepository { + mock := &MockGeneralRepository{ctrl: ctrl} + mock.recorder = &MockGeneralRepositoryMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockGeneralRepository) EXPECT() *MockGeneralRepositoryMockRecorder { + return m.recorder +} + +// CreateIndexes mocks base method +func (m *MockGeneralRepository) CreateIndexes() error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateIndexes") + ret0, _ := ret[0].(error) + return ret0 +} + +// CreateIndexes indicates an expected call of CreateIndexes +func (mr *MockGeneralRepositoryMockRecorder) CreateIndexes() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateIndexes", reflect.TypeOf((*MockGeneralRepository)(nil).CreateIndexes)) +} + +// DeleteIndices mocks base method +func (m *MockGeneralRepository) DeleteIndices(indices []string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteIndices", indices) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteIndices indicates an expected call of DeleteIndices +func (mr *MockGeneralRepositoryMockRecorder) DeleteIndices(indices interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteIndices", reflect.TypeOf((*MockGeneralRepository)(nil).DeleteIndices), indices) +} + +// DeleteByLevelAndNetwork mocks base method +func (m *MockGeneralRepository) DeleteByLevelAndNetwork(arg0 []string, arg1 string, arg2 int64) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteByLevelAndNetwork", arg0, arg1, arg2) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteByLevelAndNetwork indicates an expected call of DeleteByLevelAndNetwork +func (mr *MockGeneralRepositoryMockRecorder) DeleteByLevelAndNetwork(arg0, arg1, arg2 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteByLevelAndNetwork", reflect.TypeOf((*MockGeneralRepository)(nil).DeleteByLevelAndNetwork), arg0, arg1, arg2) +} + +// DeleteByContract mocks base method +func (m *MockGeneralRepository) DeleteByContract(indices []string, network, address string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteByContract", indices, network, address) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteByContract indicates an expected call of DeleteByContract +func (mr *MockGeneralRepositoryMockRecorder) DeleteByContract(indices, network, address interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteByContract", reflect.TypeOf((*MockGeneralRepository)(nil).DeleteByContract), indices, network, address) +} + +// GetAll mocks base method +func (m *MockGeneralRepository) GetAll(arg0 interface{}) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAll", arg0) + ret0, _ := ret[0].(error) + return ret0 +} + +// GetAll indicates an expected call of GetAll +func (mr *MockGeneralRepositoryMockRecorder) GetAll(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAll", reflect.TypeOf((*MockGeneralRepository)(nil).GetAll), arg0) +} + +// GetByID mocks base method +func (m *MockGeneralRepository) GetByID(arg0 models.Model) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetByID", arg0) + ret0, _ := ret[0].(error) + return ret0 +} + +// GetByID indicates an expected call of GetByID +func (mr *MockGeneralRepositoryMockRecorder) GetByID(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetByID", reflect.TypeOf((*MockGeneralRepository)(nil).GetByID), arg0) +} + +// GetByIDs mocks base method +func (m *MockGeneralRepository) GetByIDs(output interface{}, ids ...string) error { + m.ctrl.T.Helper() + varargs := []interface{}{output} + for _, a := range ids { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "GetByIDs", varargs...) + ret0, _ := ret[0].(error) + return ret0 +} + +// GetByIDs indicates an expected call of GetByIDs +func (mr *MockGeneralRepositoryMockRecorder) GetByIDs(output interface{}, ids ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{output}, ids...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetByIDs", reflect.TypeOf((*MockGeneralRepository)(nil).GetByIDs), varargs...) +} + +// GetByNetwork mocks base method +func (m *MockGeneralRepository) GetByNetwork(arg0 string, arg1 interface{}) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetByNetwork", arg0, arg1) + ret0, _ := ret[0].(error) + return ret0 +} + +// GetByNetwork indicates an expected call of GetByNetwork +func (mr *MockGeneralRepositoryMockRecorder) GetByNetwork(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetByNetwork", reflect.TypeOf((*MockGeneralRepository)(nil).GetByNetwork), arg0, arg1) +} + +// GetByNetworkWithSort mocks base method +func (m *MockGeneralRepository) GetByNetworkWithSort(arg0, arg1, arg2 string, arg3 interface{}) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetByNetworkWithSort", arg0, arg1, arg2, arg3) + ret0, _ := ret[0].(error) + return ret0 +} + +// GetByNetworkWithSort indicates an expected call of GetByNetworkWithSort +func (mr *MockGeneralRepositoryMockRecorder) GetByNetworkWithSort(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetByNetworkWithSort", reflect.TypeOf((*MockGeneralRepository)(nil).GetByNetworkWithSort), arg0, arg1, arg2, arg3) +} + +// UpdateDoc mocks base method +func (m *MockGeneralRepository) UpdateDoc(model models.Model) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpdateDoc", model) + ret0, _ := ret[0].(error) + return ret0 +} + +// UpdateDoc indicates an expected call of UpdateDoc +func (mr *MockGeneralRepositoryMockRecorder) UpdateDoc(model interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateDoc", reflect.TypeOf((*MockGeneralRepository)(nil).UpdateDoc), model) +} + +// UpdateFields mocks base method +func (m *MockGeneralRepository) UpdateFields(arg0, arg1 string, arg2 interface{}, arg3 ...string) error { + m.ctrl.T.Helper() + varargs := []interface{}{arg0, arg1, arg2} + for _, a := range arg3 { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "UpdateFields", varargs...) + ret0, _ := ret[0].(error) + return ret0 +} + +// UpdateFields indicates an expected call of UpdateFields +func (mr *MockGeneralRepositoryMockRecorder) UpdateFields(arg0, arg1, arg2 interface{}, arg3 ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{arg0, arg1, arg2}, arg3...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateFields", reflect.TypeOf((*MockGeneralRepository)(nil).UpdateFields), varargs...) +} + +// GetEvents mocks base method +func (m *MockGeneralRepository) GetEvents(arg0 []models.SubscriptionRequest, arg1, arg2 int64) ([]models.Event, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetEvents", arg0, arg1, arg2) + ret0, _ := ret[0].([]models.Event) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetEvents indicates an expected call of GetEvents +func (mr *MockGeneralRepositoryMockRecorder) GetEvents(arg0, arg1, arg2 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetEvents", reflect.TypeOf((*MockGeneralRepository)(nil).GetEvents), arg0, arg1, arg2) +} + +// SearchByText mocks base method +func (m *MockGeneralRepository) SearchByText(arg0 string, arg1 int64, arg2 []string, arg3 map[string]interface{}, arg4 bool) (models.Result, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SearchByText", arg0, arg1, arg2, arg3, arg4) + ret0, _ := ret[0].(models.Result) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// SearchByText indicates an expected call of SearchByText +func (mr *MockGeneralRepositoryMockRecorder) SearchByText(arg0, arg1, arg2, arg3, arg4 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SearchByText", reflect.TypeOf((*MockGeneralRepository)(nil).SearchByText), arg0, arg1, arg2, arg3, arg4) +} + +// CreateAWSRepository mocks base method +func (m *MockGeneralRepository) CreateAWSRepository(arg0, arg1, arg2 string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateAWSRepository", arg0, arg1, arg2) + ret0, _ := ret[0].(error) + return ret0 +} + +// CreateAWSRepository indicates an expected call of CreateAWSRepository +func (mr *MockGeneralRepositoryMockRecorder) CreateAWSRepository(arg0, arg1, arg2 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateAWSRepository", reflect.TypeOf((*MockGeneralRepository)(nil).CreateAWSRepository), arg0, arg1, arg2) +} + +// ListRepositories mocks base method +func (m *MockGeneralRepository) ListRepositories() ([]models.Repository, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListRepositories") + ret0, _ := ret[0].([]models.Repository) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListRepositories indicates an expected call of ListRepositories +func (mr *MockGeneralRepositoryMockRecorder) ListRepositories() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListRepositories", reflect.TypeOf((*MockGeneralRepository)(nil).ListRepositories)) +} + +// CreateSnapshots mocks base method +func (m *MockGeneralRepository) CreateSnapshots(arg0, arg1 string, arg2 []string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateSnapshots", arg0, arg1, arg2) + ret0, _ := ret[0].(error) + return ret0 +} + +// CreateSnapshots indicates an expected call of CreateSnapshots +func (mr *MockGeneralRepositoryMockRecorder) CreateSnapshots(arg0, arg1, arg2 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateSnapshots", reflect.TypeOf((*MockGeneralRepository)(nil).CreateSnapshots), arg0, arg1, arg2) +} + +// RestoreSnapshots mocks base method +func (m *MockGeneralRepository) RestoreSnapshots(arg0, arg1 string, arg2 []string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RestoreSnapshots", arg0, arg1, arg2) + ret0, _ := ret[0].(error) + return ret0 +} + +// RestoreSnapshots indicates an expected call of RestoreSnapshots +func (mr *MockGeneralRepositoryMockRecorder) RestoreSnapshots(arg0, arg1, arg2 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RestoreSnapshots", reflect.TypeOf((*MockGeneralRepository)(nil).RestoreSnapshots), arg0, arg1, arg2) +} + +// ListSnapshots mocks base method +func (m *MockGeneralRepository) ListSnapshots(arg0 string) (string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListSnapshots", arg0) + ret0, _ := ret[0].(string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListSnapshots indicates an expected call of ListSnapshots +func (mr *MockGeneralRepositoryMockRecorder) ListSnapshots(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListSnapshots", reflect.TypeOf((*MockGeneralRepository)(nil).ListSnapshots), arg0) +} + +// SetSnapshotPolicy mocks base method +func (m *MockGeneralRepository) SetSnapshotPolicy(arg0, arg1, arg2, arg3 string, arg4 int64) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SetSnapshotPolicy", arg0, arg1, arg2, arg3, arg4) + ret0, _ := ret[0].(error) + return ret0 +} + +// SetSnapshotPolicy indicates an expected call of SetSnapshotPolicy +func (mr *MockGeneralRepositoryMockRecorder) SetSnapshotPolicy(arg0, arg1, arg2, arg3, arg4 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetSnapshotPolicy", reflect.TypeOf((*MockGeneralRepository)(nil).SetSnapshotPolicy), arg0, arg1, arg2, arg3, arg4) +} + +// GetAllPolicies mocks base method +func (m *MockGeneralRepository) GetAllPolicies() ([]string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAllPolicies") + ret0, _ := ret[0].([]string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAllPolicies indicates an expected call of GetAllPolicies +func (mr *MockGeneralRepositoryMockRecorder) GetAllPolicies() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAllPolicies", reflect.TypeOf((*MockGeneralRepository)(nil).GetAllPolicies)) +} + +// GetMappings mocks base method +func (m *MockGeneralRepository) GetMappings(arg0 []string) (map[string]string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetMappings", arg0) + ret0, _ := ret[0].(map[string]string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetMappings indicates an expected call of GetMappings +func (mr *MockGeneralRepositoryMockRecorder) GetMappings(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMappings", reflect.TypeOf((*MockGeneralRepository)(nil).GetMappings), arg0) +} + +// CreateMapping mocks base method +func (m *MockGeneralRepository) CreateMapping(arg0 string, arg1 io.Reader) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CreateMapping", arg0, arg1) + ret0, _ := ret[0].(error) + return ret0 +} + +// CreateMapping indicates an expected call of CreateMapping +func (mr *MockGeneralRepositoryMockRecorder) CreateMapping(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateMapping", reflect.TypeOf((*MockGeneralRepository)(nil).CreateMapping), arg0, arg1) +} + +// ReloadSecureSettings mocks base method +func (m *MockGeneralRepository) ReloadSecureSettings() error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ReloadSecureSettings") + ret0, _ := ret[0].(error) + return ret0 +} + +// ReloadSecureSettings indicates an expected call of ReloadSecureSettings +func (mr *MockGeneralRepositoryMockRecorder) ReloadSecureSettings() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ReloadSecureSettings", reflect.TypeOf((*MockGeneralRepository)(nil).ReloadSecureSettings)) +} + +// GetNetworkCountStats mocks base method +func (m *MockGeneralRepository) GetNetworkCountStats(arg0 string) (map[string]int64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetNetworkCountStats", arg0) + ret0, _ := ret[0].(map[string]int64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetNetworkCountStats indicates an expected call of GetNetworkCountStats +func (mr *MockGeneralRepositoryMockRecorder) GetNetworkCountStats(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetNetworkCountStats", reflect.TypeOf((*MockGeneralRepository)(nil).GetNetworkCountStats), arg0) +} + +// GetDateHistogram mocks base method +func (m *MockGeneralRepository) GetDateHistogram(period string, opts ...models.HistogramOption) ([][]int64, error) { + m.ctrl.T.Helper() + varargs := []interface{}{period} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "GetDateHistogram", varargs...) + ret0, _ := ret[0].([][]int64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetDateHistogram indicates an expected call of GetDateHistogram +func (mr *MockGeneralRepositoryMockRecorder) GetDateHistogram(period interface{}, opts ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{period}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDateHistogram", reflect.TypeOf((*MockGeneralRepository)(nil).GetDateHistogram), varargs...) +} + +// GetCallsCountByNetwork mocks base method +func (m *MockGeneralRepository) GetCallsCountByNetwork() (map[string]int64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetCallsCountByNetwork") + ret0, _ := ret[0].(map[string]int64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetCallsCountByNetwork indicates an expected call of GetCallsCountByNetwork +func (mr *MockGeneralRepositoryMockRecorder) GetCallsCountByNetwork() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetCallsCountByNetwork", reflect.TypeOf((*MockGeneralRepository)(nil).GetCallsCountByNetwork)) +} + +// GetContractStatsByNetwork mocks base method +func (m *MockGeneralRepository) GetContractStatsByNetwork() (map[string]models.ContractCountStats, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetContractStatsByNetwork") + ret0, _ := ret[0].(map[string]models.ContractCountStats) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetContractStatsByNetwork indicates an expected call of GetContractStatsByNetwork +func (mr *MockGeneralRepositoryMockRecorder) GetContractStatsByNetwork() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetContractStatsByNetwork", reflect.TypeOf((*MockGeneralRepository)(nil).GetContractStatsByNetwork)) +} + +// GetFACountByNetwork mocks base method +func (m *MockGeneralRepository) GetFACountByNetwork() (map[string]int64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetFACountByNetwork") + ret0, _ := ret[0].(map[string]int64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetFACountByNetwork indicates an expected call of GetFACountByNetwork +func (mr *MockGeneralRepositoryMockRecorder) GetFACountByNetwork() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetFACountByNetwork", reflect.TypeOf((*MockGeneralRepository)(nil).GetFACountByNetwork)) +} + +// GetLanguagesForNetwork mocks base method +func (m *MockGeneralRepository) GetLanguagesForNetwork(network string) (map[string]int64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetLanguagesForNetwork", network) + ret0, _ := ret[0].(map[string]int64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetLanguagesForNetwork indicates an expected call of GetLanguagesForNetwork +func (mr *MockGeneralRepositoryMockRecorder) GetLanguagesForNetwork(network interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLanguagesForNetwork", reflect.TypeOf((*MockGeneralRepository)(nil).GetLanguagesForNetwork), network) +} + +// IsRecordNotFound mocks base method +func (m *MockGeneralRepository) IsRecordNotFound(err error) bool { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "IsRecordNotFound", err) + ret0, _ := ret[0].(bool) + return ret0 +} + +// IsRecordNotFound indicates an expected call of IsRecordNotFound +func (mr *MockGeneralRepositoryMockRecorder) IsRecordNotFound(err interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "IsRecordNotFound", reflect.TypeOf((*MockGeneralRepository)(nil).IsRecordNotFound), err) +} + +// MockBulkRepository is a mock of BulkRepository interface +type MockBulkRepository struct { + ctrl *gomock.Controller + recorder *MockBulkRepositoryMockRecorder +} + +// MockBulkRepositoryMockRecorder is the mock recorder for MockBulkRepository +type MockBulkRepositoryMockRecorder struct { + mock *MockBulkRepository +} + +// NewMockBulkRepository creates a new mock instance +func NewMockBulkRepository(ctrl *gomock.Controller) *MockBulkRepository { + mock := &MockBulkRepository{ctrl: ctrl} + mock.recorder = &MockBulkRepositoryMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockBulkRepository) EXPECT() *MockBulkRepositoryMockRecorder { + return m.recorder +} + +// Insert mocks base method +func (m *MockBulkRepository) Insert(arg0 []models.Model) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Insert", arg0) + ret0, _ := ret[0].(error) + return ret0 +} + +// Insert indicates an expected call of Insert +func (mr *MockBulkRepositoryMockRecorder) Insert(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Insert", reflect.TypeOf((*MockBulkRepository)(nil).Insert), arg0) +} + +// Update mocks base method +func (m *MockBulkRepository) Update(arg0 []models.Model) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Update", arg0) + ret0, _ := ret[0].(error) + return ret0 +} + +// Update indicates an expected call of Update +func (mr *MockBulkRepositoryMockRecorder) Update(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Update", reflect.TypeOf((*MockBulkRepository)(nil).Update), arg0) +} + +// Delete mocks base method +func (m *MockBulkRepository) Delete(arg0 []models.Model) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Delete", arg0) + ret0, _ := ret[0].(error) + return ret0 +} + +// Delete indicates an expected call of Delete +func (mr *MockBulkRepositoryMockRecorder) Delete(arg0 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Delete", reflect.TypeOf((*MockBulkRepository)(nil).Delete), arg0) +} + +// RemoveField mocks base method +func (m *MockBulkRepository) RemoveField(arg0 string, arg1 []models.Model) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RemoveField", arg0, arg1) + ret0, _ := ret[0].(error) + return ret0 +} + +// RemoveField indicates an expected call of RemoveField +func (mr *MockBulkRepositoryMockRecorder) RemoveField(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RemoveField", reflect.TypeOf((*MockBulkRepository)(nil).RemoveField), arg0, arg1) +} + +// UpdateField mocks base method +func (m *MockBulkRepository) UpdateField(where []contract.Contract, fields ...string) error { + m.ctrl.T.Helper() + varargs := []interface{}{where} + for _, a := range fields { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "UpdateField", varargs...) + ret0, _ := ret[0].(error) + return ret0 +} + +// UpdateField indicates an expected call of UpdateField +func (mr *MockBulkRepositoryMockRecorder) UpdateField(where interface{}, fields ...interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]interface{}{where}, fields...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateField", reflect.TypeOf((*MockBulkRepository)(nil).UpdateField), varargs...) +} diff --git a/internal/models/mock/migration/mock.go b/internal/models/mock/migration/mock.go new file mode 100644 index 000000000..ae96d420f --- /dev/null +++ b/internal/models/mock/migration/mock.go @@ -0,0 +1,49 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: migration/repository.go + +// Package mock_migration is a generated GoMock package. +package mock_migration + +import ( + migration "github.com/baking-bad/bcdhub/internal/models/migration" + gomock "github.com/golang/mock/gomock" + reflect "reflect" +) + +// MockRepository is a mock of Repository interface +type MockRepository struct { + ctrl *gomock.Controller + recorder *MockRepositoryMockRecorder +} + +// MockRepositoryMockRecorder is the mock recorder for MockRepository +type MockRepositoryMockRecorder struct { + mock *MockRepository +} + +// NewMockRepository creates a new mock instance +func NewMockRepository(ctrl *gomock.Controller) *MockRepository { + mock := &MockRepository{ctrl: ctrl} + mock.recorder = &MockRepositoryMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockRepository) EXPECT() *MockRepositoryMockRecorder { + return m.recorder +} + +// GetMigrations mocks base method +func (m *MockRepository) GetMigrations(arg0, arg1 string) ([]migration.Migration, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetMigrations", arg0, arg1) + ret0, _ := ret[0].([]migration.Migration) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetMigrations indicates an expected call of GetMigrations +func (mr *MockRepositoryMockRecorder) GetMigrations(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetMigrations", reflect.TypeOf((*MockRepository)(nil).GetMigrations), arg0, arg1) +} diff --git a/internal/models/mock/operation/mock.go b/internal/models/mock/operation/mock.go new file mode 100644 index 000000000..5774cf21f --- /dev/null +++ b/internal/models/mock/operation/mock.go @@ -0,0 +1,169 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: operation/repository.go + +// Package mock_operation is a generated GoMock package. +package mock_operation + +import ( + operation "github.com/baking-bad/bcdhub/internal/models/operation" + gomock "github.com/golang/mock/gomock" + reflect "reflect" +) + +// MockRepository is a mock of Repository interface +type MockRepository struct { + ctrl *gomock.Controller + recorder *MockRepositoryMockRecorder +} + +// MockRepositoryMockRecorder is the mock recorder for MockRepository +type MockRepositoryMockRecorder struct { + mock *MockRepository +} + +// NewMockRepository creates a new mock instance +func NewMockRepository(ctrl *gomock.Controller) *MockRepository { + mock := &MockRepository{ctrl: ctrl} + mock.recorder = &MockRepositoryMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockRepository) EXPECT() *MockRepositoryMockRecorder { + return m.recorder +} + +// GetByContract mocks base method +func (m *MockRepository) GetByContract(network, address string, size uint64, filters map[string]interface{}) (operation.Pageable, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetByContract", network, address, size, filters) + ret0, _ := ret[0].(operation.Pageable) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetByContract indicates an expected call of GetByContract +func (mr *MockRepositoryMockRecorder) GetByContract(network, address, size, filters interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetByContract", reflect.TypeOf((*MockRepository)(nil).GetByContract), network, address, size, filters) +} + +// GetStats mocks base method +func (m *MockRepository) GetStats(network, address string) (operation.Stats, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetStats", network, address) + ret0, _ := ret[0].(operation.Stats) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetStats indicates an expected call of GetStats +func (mr *MockRepositoryMockRecorder) GetStats(network, address interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetStats", reflect.TypeOf((*MockRepository)(nil).GetStats), network, address) +} + +// Last mocks base method +func (m *MockRepository) Last(network, address string, indexedTime int64) (operation.Operation, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Last", network, address, indexedTime) + ret0, _ := ret[0].(operation.Operation) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Last indicates an expected call of Last +func (mr *MockRepositoryMockRecorder) Last(network, address, indexedTime interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Last", reflect.TypeOf((*MockRepository)(nil).Last), network, address, indexedTime) +} + +// Get mocks base method +func (m *MockRepository) Get(filter map[string]interface{}, size int64, sort bool) ([]operation.Operation, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Get", filter, size, sort) + ret0, _ := ret[0].([]operation.Operation) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Get indicates an expected call of Get +func (mr *MockRepositoryMockRecorder) Get(filter, size, sort interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Get", reflect.TypeOf((*MockRepository)(nil).Get), filter, size, sort) +} + +// GetContract24HoursVolume mocks base method +func (m *MockRepository) GetContract24HoursVolume(network, address string, entrypoints []string) (float64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetContract24HoursVolume", network, address, entrypoints) + ret0, _ := ret[0].(float64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetContract24HoursVolume indicates an expected call of GetContract24HoursVolume +func (mr *MockRepositoryMockRecorder) GetContract24HoursVolume(network, address, entrypoints interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetContract24HoursVolume", reflect.TypeOf((*MockRepository)(nil).GetContract24HoursVolume), network, address, entrypoints) +} + +// GetTokensStats mocks base method +func (m *MockRepository) GetTokensStats(network string, addresses, entrypoints []string) (map[string]operation.TokenUsageStats, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetTokensStats", network, addresses, entrypoints) + ret0, _ := ret[0].(map[string]operation.TokenUsageStats) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetTokensStats indicates an expected call of GetTokensStats +func (mr *MockRepositoryMockRecorder) GetTokensStats(network, addresses, entrypoints interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTokensStats", reflect.TypeOf((*MockRepository)(nil).GetTokensStats), network, addresses, entrypoints) +} + +// GetParticipatingContracts mocks base method +func (m *MockRepository) GetParticipatingContracts(network string, fromLevel, toLevel int64) ([]string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetParticipatingContracts", network, fromLevel, toLevel) + ret0, _ := ret[0].([]string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetParticipatingContracts indicates an expected call of GetParticipatingContracts +func (mr *MockRepositoryMockRecorder) GetParticipatingContracts(network, fromLevel, toLevel interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetParticipatingContracts", reflect.TypeOf((*MockRepository)(nil).GetParticipatingContracts), network, fromLevel, toLevel) +} + +// RecalcStats mocks base method +func (m *MockRepository) RecalcStats(network, address string) (operation.ContractStats, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "RecalcStats", network, address) + ret0, _ := ret[0].(operation.ContractStats) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// RecalcStats indicates an expected call of RecalcStats +func (mr *MockRepositoryMockRecorder) RecalcStats(network, address interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RecalcStats", reflect.TypeOf((*MockRepository)(nil).RecalcStats), network, address) +} + +// GetDAppStats mocks base method +func (m *MockRepository) GetDAppStats(arg0 string, arg1 []string, arg2 string) (operation.DAppStats, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetDAppStats", arg0, arg1, arg2) + ret0, _ := ret[0].(operation.DAppStats) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetDAppStats indicates an expected call of GetDAppStats +func (mr *MockRepositoryMockRecorder) GetDAppStats(arg0, arg1, arg2 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDAppStats", reflect.TypeOf((*MockRepository)(nil).GetDAppStats), arg0, arg1, arg2) +} diff --git a/internal/models/mock/protocol/mock.go b/internal/models/mock/protocol/mock.go new file mode 100644 index 000000000..ef4db2a45 --- /dev/null +++ b/internal/models/mock/protocol/mock.go @@ -0,0 +1,64 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: protocol/repository.go + +// Package mock_protocol is a generated GoMock package. +package mock_protocol + +import ( + protocol "github.com/baking-bad/bcdhub/internal/models/protocol" + gomock "github.com/golang/mock/gomock" + reflect "reflect" +) + +// MockRepository is a mock of Repository interface +type MockRepository struct { + ctrl *gomock.Controller + recorder *MockRepositoryMockRecorder +} + +// MockRepositoryMockRecorder is the mock recorder for MockRepository +type MockRepositoryMockRecorder struct { + mock *MockRepository +} + +// NewMockRepository creates a new mock instance +func NewMockRepository(ctrl *gomock.Controller) *MockRepository { + mock := &MockRepository{ctrl: ctrl} + mock.recorder = &MockRepositoryMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockRepository) EXPECT() *MockRepositoryMockRecorder { + return m.recorder +} + +// GetProtocol mocks base method +func (m *MockRepository) GetProtocol(arg0, arg1 string, arg2 int64) (protocol.Protocol, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetProtocol", arg0, arg1, arg2) + ret0, _ := ret[0].(protocol.Protocol) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetProtocol indicates an expected call of GetProtocol +func (mr *MockRepositoryMockRecorder) GetProtocol(arg0, arg1, arg2 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProtocol", reflect.TypeOf((*MockRepository)(nil).GetProtocol), arg0, arg1, arg2) +} + +// GetSymLinks mocks base method +func (m *MockRepository) GetSymLinks(arg0 string, arg1 int64) (map[string]struct{}, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetSymLinks", arg0, arg1) + ret0, _ := ret[0].(map[string]struct{}) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetSymLinks indicates an expected call of GetSymLinks +func (mr *MockRepositoryMockRecorder) GetSymLinks(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetSymLinks", reflect.TypeOf((*MockRepository)(nil).GetSymLinks), arg0, arg1) +} diff --git a/internal/models/mock/schema/mock.go b/internal/models/mock/schema/mock.go new file mode 100644 index 000000000..510b51ad5 --- /dev/null +++ b/internal/models/mock/schema/mock.go @@ -0,0 +1,49 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: schema/repository.go + +// Package mock_schema is a generated GoMock package. +package mock_schema + +import ( + schema "github.com/baking-bad/bcdhub/internal/models/schema" + gomock "github.com/golang/mock/gomock" + reflect "reflect" +) + +// MockRepository is a mock of Repository interface +type MockRepository struct { + ctrl *gomock.Controller + recorder *MockRepositoryMockRecorder +} + +// MockRepositoryMockRecorder is the mock recorder for MockRepository +type MockRepositoryMockRecorder struct { + mock *MockRepository +} + +// NewMockRepository creates a new mock instance +func NewMockRepository(ctrl *gomock.Controller) *MockRepository { + mock := &MockRepository{ctrl: ctrl} + mock.recorder = &MockRepositoryMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockRepository) EXPECT() *MockRepositoryMockRecorder { + return m.recorder +} + +// Get mocks base method +func (m *MockRepository) Get(address string) (schema.Schema, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Get", address) + ret0, _ := ret[0].(schema.Schema) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Get indicates an expected call of Get +func (mr *MockRepositoryMockRecorder) Get(address interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Get", reflect.TypeOf((*MockRepository)(nil).Get), address) +} diff --git a/internal/models/mock/tezosdomain/mock.go b/internal/models/mock/tezosdomain/mock.go new file mode 100644 index 000000000..c83761b96 --- /dev/null +++ b/internal/models/mock/tezosdomain/mock.go @@ -0,0 +1,64 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: tezosdomain/repository.go + +// Package mock_tezosdomain is a generated GoMock package. +package mock_tezosdomain + +import ( + tezosdomain "github.com/baking-bad/bcdhub/internal/models/tezosdomain" + gomock "github.com/golang/mock/gomock" + reflect "reflect" +) + +// MockRepository is a mock of Repository interface +type MockRepository struct { + ctrl *gomock.Controller + recorder *MockRepositoryMockRecorder +} + +// MockRepositoryMockRecorder is the mock recorder for MockRepository +type MockRepositoryMockRecorder struct { + mock *MockRepository +} + +// NewMockRepository creates a new mock instance +func NewMockRepository(ctrl *gomock.Controller) *MockRepository { + mock := &MockRepository{ctrl: ctrl} + mock.recorder = &MockRepositoryMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockRepository) EXPECT() *MockRepositoryMockRecorder { + return m.recorder +} + +// ListDomains mocks base method +func (m *MockRepository) ListDomains(network string, size, offset int64) (tezosdomain.DomainsResponse, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ListDomains", network, size, offset) + ret0, _ := ret[0].(tezosdomain.DomainsResponse) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ListDomains indicates an expected call of ListDomains +func (mr *MockRepositoryMockRecorder) ListDomains(network, size, offset interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ListDomains", reflect.TypeOf((*MockRepository)(nil).ListDomains), network, size, offset) +} + +// ResolveDomainByAddress mocks base method +func (m *MockRepository) ResolveDomainByAddress(network, address string) (*tezosdomain.TezosDomain, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "ResolveDomainByAddress", network, address) + ret0, _ := ret[0].(*tezosdomain.TezosDomain) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ResolveDomainByAddress indicates an expected call of ResolveDomainByAddress +func (mr *MockRepositoryMockRecorder) ResolveDomainByAddress(network, address interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ResolveDomainByAddress", reflect.TypeOf((*MockRepository)(nil).ResolveDomainByAddress), network, address) +} diff --git a/internal/models/mock/tokenbalance/mock.go b/internal/models/mock/tokenbalance/mock.go new file mode 100644 index 000000000..f106ed62e --- /dev/null +++ b/internal/models/mock/tokenbalance/mock.go @@ -0,0 +1,78 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: tokenbalance/repository.go + +// Package mock_tokenbalance is a generated GoMock package. +package mock_tokenbalance + +import ( + tokenbalance "github.com/baking-bad/bcdhub/internal/models/tokenbalance" + gomock "github.com/golang/mock/gomock" + reflect "reflect" +) + +// MockRepository is a mock of Repository interface +type MockRepository struct { + ctrl *gomock.Controller + recorder *MockRepositoryMockRecorder +} + +// MockRepositoryMockRecorder is the mock recorder for MockRepository +type MockRepositoryMockRecorder struct { + mock *MockRepository +} + +// NewMockRepository creates a new mock instance +func NewMockRepository(ctrl *gomock.Controller) *MockRepository { + mock := &MockRepository{ctrl: ctrl} + mock.recorder = &MockRepositoryMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockRepository) EXPECT() *MockRepositoryMockRecorder { + return m.recorder +} + +// GetAccountBalances mocks base method +func (m *MockRepository) GetAccountBalances(arg0, arg1 string) ([]tokenbalance.TokenBalance, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAccountBalances", arg0, arg1) + ret0, _ := ret[0].([]tokenbalance.TokenBalance) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAccountBalances indicates an expected call of GetAccountBalances +func (mr *MockRepositoryMockRecorder) GetAccountBalances(arg0, arg1 interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAccountBalances", reflect.TypeOf((*MockRepository)(nil).GetAccountBalances), arg0, arg1) +} + +// Update mocks base method +func (m *MockRepository) Update(updates []*tokenbalance.TokenBalance) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Update", updates) + ret0, _ := ret[0].(error) + return ret0 +} + +// Update indicates an expected call of Update +func (mr *MockRepositoryMockRecorder) Update(updates interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Update", reflect.TypeOf((*MockRepository)(nil).Update), updates) +} + +// GetHolders mocks base method +func (m *MockRepository) GetHolders(network, contract string, tokenID int64) ([]tokenbalance.TokenBalance, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetHolders", network, contract, tokenID) + ret0, _ := ret[0].([]tokenbalance.TokenBalance) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetHolders indicates an expected call of GetHolders +func (mr *MockRepositoryMockRecorder) GetHolders(network, contract, tokenID interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetHolders", reflect.TypeOf((*MockRepository)(nil).GetHolders), network, contract, tokenID) +} diff --git a/internal/models/mock/transfer/mock.go b/internal/models/mock/transfer/mock.go new file mode 100644 index 000000000..eabae4dcd --- /dev/null +++ b/internal/models/mock/transfer/mock.go @@ -0,0 +1,110 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: transfer/repository.go + +// Package mock_transfer is a generated GoMock package. +package mock_transfer + +import ( + transfer "github.com/baking-bad/bcdhub/internal/models/transfer" + tzip "github.com/baking-bad/bcdhub/internal/models/tzip" + gomock "github.com/golang/mock/gomock" + reflect "reflect" +) + +// MockRepository is a mock of Repository interface +type MockRepository struct { + ctrl *gomock.Controller + recorder *MockRepositoryMockRecorder +} + +// MockRepositoryMockRecorder is the mock recorder for MockRepository +type MockRepositoryMockRecorder struct { + mock *MockRepository +} + +// NewMockRepository creates a new mock instance +func NewMockRepository(ctrl *gomock.Controller) *MockRepository { + mock := &MockRepository{ctrl: ctrl} + mock.recorder = &MockRepositoryMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockRepository) EXPECT() *MockRepositoryMockRecorder { + return m.recorder +} + +// Get mocks base method +func (m *MockRepository) Get(ctx transfer.GetContext) (transfer.Pageable, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Get", ctx) + ret0, _ := ret[0].(transfer.Pageable) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Get indicates an expected call of Get +func (mr *MockRepositoryMockRecorder) Get(ctx interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Get", reflect.TypeOf((*MockRepository)(nil).Get), ctx) +} + +// GetAll mocks base method +func (m *MockRepository) GetAll(network string, level int64) ([]transfer.Transfer, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAll", network, level) + ret0, _ := ret[0].([]transfer.Transfer) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAll indicates an expected call of GetAll +func (mr *MockRepositoryMockRecorder) GetAll(network, level interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAll", reflect.TypeOf((*MockRepository)(nil).GetAll), network, level) +} + +// GetTokenSupply mocks base method +func (m *MockRepository) GetTokenSupply(network, address string, tokenID int64) (transfer.TokenSupply, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetTokenSupply", network, address, tokenID) + ret0, _ := ret[0].(transfer.TokenSupply) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetTokenSupply indicates an expected call of GetTokenSupply +func (mr *MockRepositoryMockRecorder) GetTokenSupply(network, address, tokenID interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTokenSupply", reflect.TypeOf((*MockRepository)(nil).GetTokenSupply), network, address, tokenID) +} + +// GetToken24HoursVolume mocks base method +func (m *MockRepository) GetToken24HoursVolume(network, contract string, initiators, entrypoints []string, tokenID int64) (float64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetToken24HoursVolume", network, contract, initiators, entrypoints, tokenID) + ret0, _ := ret[0].(float64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetToken24HoursVolume indicates an expected call of GetToken24HoursVolume +func (mr *MockRepositoryMockRecorder) GetToken24HoursVolume(network, contract, initiators, entrypoints, tokenID interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetToken24HoursVolume", reflect.TypeOf((*MockRepository)(nil).GetToken24HoursVolume), network, contract, initiators, entrypoints, tokenID) +} + +// GetTokenVolumeSeries mocks base method +func (m *MockRepository) GetTokenVolumeSeries(network, period string, contracts []string, entrypoints []tzip.DAppContract, tokenID uint) ([][]int64, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetTokenVolumeSeries", network, period, contracts, entrypoints, tokenID) + ret0, _ := ret[0].([][]int64) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetTokenVolumeSeries indicates an expected call of GetTokenVolumeSeries +func (mr *MockRepositoryMockRecorder) GetTokenVolumeSeries(network, period, contracts, entrypoints, tokenID interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTokenVolumeSeries", reflect.TypeOf((*MockRepository)(nil).GetTokenVolumeSeries), network, period, contracts, entrypoints, tokenID) +} diff --git a/internal/models/mock/tzip/mock.go b/internal/models/mock/tzip/mock.go new file mode 100644 index 000000000..fdf999789 --- /dev/null +++ b/internal/models/mock/tzip/mock.go @@ -0,0 +1,169 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: tzip/repository.go + +// Package mock_tzip is a generated GoMock package. +package mock_tzip + +import ( + tzip "github.com/baking-bad/bcdhub/internal/models/tzip" + gomock "github.com/golang/mock/gomock" + reflect "reflect" +) + +// MockRepository is a mock of Repository interface +type MockRepository struct { + ctrl *gomock.Controller + recorder *MockRepositoryMockRecorder +} + +// MockRepositoryMockRecorder is the mock recorder for MockRepository +type MockRepositoryMockRecorder struct { + mock *MockRepository +} + +// NewMockRepository creates a new mock instance +func NewMockRepository(ctrl *gomock.Controller) *MockRepository { + mock := &MockRepository{ctrl: ctrl} + mock.recorder = &MockRepositoryMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockRepository) EXPECT() *MockRepositoryMockRecorder { + return m.recorder +} + +// Get mocks base method +func (m *MockRepository) Get(network, address string) (tzip.TZIP, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Get", network, address) + ret0, _ := ret[0].(tzip.TZIP) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Get indicates an expected call of Get +func (mr *MockRepositoryMockRecorder) Get(network, address interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Get", reflect.TypeOf((*MockRepository)(nil).Get), network, address) +} + +// GetWithEvents mocks base method +func (m *MockRepository) GetWithEvents() ([]tzip.TZIP, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetWithEvents") + ret0, _ := ret[0].([]tzip.TZIP) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetWithEvents indicates an expected call of GetWithEvents +func (mr *MockRepositoryMockRecorder) GetWithEvents() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetWithEvents", reflect.TypeOf((*MockRepository)(nil).GetWithEvents)) +} + +// GetTokenMetadata mocks base method +func (m *MockRepository) GetTokenMetadata(ctx tzip.GetTokenMetadataContext) ([]tzip.TokenMetadata, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetTokenMetadata", ctx) + ret0, _ := ret[0].([]tzip.TokenMetadata) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetTokenMetadata indicates an expected call of GetTokenMetadata +func (mr *MockRepositoryMockRecorder) GetTokenMetadata(ctx interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTokenMetadata", reflect.TypeOf((*MockRepository)(nil).GetTokenMetadata), ctx) +} + +// GetDApps mocks base method +func (m *MockRepository) GetDApps() ([]tzip.DApp, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetDApps") + ret0, _ := ret[0].([]tzip.DApp) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetDApps indicates an expected call of GetDApps +func (mr *MockRepositoryMockRecorder) GetDApps() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDApps", reflect.TypeOf((*MockRepository)(nil).GetDApps)) +} + +// GetDAppBySlug mocks base method +func (m *MockRepository) GetDAppBySlug(slug string) (*tzip.DApp, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetDAppBySlug", slug) + ret0, _ := ret[0].(*tzip.DApp) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetDAppBySlug indicates an expected call of GetDAppBySlug +func (mr *MockRepositoryMockRecorder) GetDAppBySlug(slug interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetDAppBySlug", reflect.TypeOf((*MockRepository)(nil).GetDAppBySlug), slug) +} + +// GetBySlug mocks base method +func (m *MockRepository) GetBySlug(slug string) (*tzip.TZIP, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetBySlug", slug) + ret0, _ := ret[0].(*tzip.TZIP) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetBySlug indicates an expected call of GetBySlug +func (mr *MockRepositoryMockRecorder) GetBySlug(slug interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetBySlug", reflect.TypeOf((*MockRepository)(nil).GetBySlug), slug) +} + +// GetAliases mocks base method +func (m *MockRepository) GetAliases(network string) ([]tzip.TZIP, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAliases", network) + ret0, _ := ret[0].([]tzip.TZIP) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAliases indicates an expected call of GetAliases +func (mr *MockRepositoryMockRecorder) GetAliases(network interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAliases", reflect.TypeOf((*MockRepository)(nil).GetAliases), network) +} + +// GetAliasesMap mocks base method +func (m *MockRepository) GetAliasesMap(network string) (map[string]string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAliasesMap", network) + ret0, _ := ret[0].(map[string]string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAliasesMap indicates an expected call of GetAliasesMap +func (mr *MockRepositoryMockRecorder) GetAliasesMap(network interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAliasesMap", reflect.TypeOf((*MockRepository)(nil).GetAliasesMap), network) +} + +// GetAlias mocks base method +func (m *MockRepository) GetAlias(network, address string) (*tzip.TZIP, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAlias", network, address) + ret0, _ := ret[0].(*tzip.TZIP) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAlias indicates an expected call of GetAlias +func (mr *MockRepositoryMockRecorder) GetAlias(network, address interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAlias", reflect.TypeOf((*MockRepository)(nil).GetAlias), network, address) +} diff --git a/internal/models/model.go b/internal/models/model.go new file mode 100644 index 000000000..eded4409f --- /dev/null +++ b/internal/models/model.go @@ -0,0 +1,11 @@ +package models + +import "github.com/baking-bad/bcdhub/internal/mq" + +// Model - +type Model interface { + mq.IMessage + + GetID() string + GetIndex() string +} diff --git a/internal/models/operation/data.go b/internal/models/operation/data.go new file mode 100644 index 000000000..f5de6abc2 --- /dev/null +++ b/internal/models/operation/data.go @@ -0,0 +1,17 @@ +package operation + +import "time" + +// ContractStats - +type ContractStats struct { + TxCount int64 `json:"tx_count"` + LastAction time.Time `json:"last_action"` + Balance int64 `json:"balance"` +} + +// DAppStats - +type DAppStats struct { + Users int64 `json:"users"` + Calls int64 `json:"txs"` + Volume int64 `json:"volume"` +} diff --git a/internal/models/operation.go b/internal/models/operation/model.go similarity index 86% rename from internal/models/operation.go rename to internal/models/operation/model.go index eec55ddf0..7ba6f8b12 100644 --- a/internal/models/operation.go +++ b/internal/models/operation/model.go @@ -1,4 +1,4 @@ -package models +package operation import ( "time" @@ -6,6 +6,7 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/cerrors" "github.com/baking-bad/bcdhub/internal/contractparser/consts" "github.com/baking-bad/bcdhub/internal/helpers" + "github.com/baking-bad/bcdhub/internal/models/protocol" "github.com/sirupsen/logrus" "github.com/tidwall/gjson" ) @@ -44,7 +45,7 @@ type Operation struct { SourceAlias string `json:"source_alias,omitempty"` DestinationAlias string `json:"destination_alias,omitempty"` - Result *OperationResult `json:"result,omitempty"` + Result *Result `json:"result,omitempty"` Errors []*cerrors.Error `json:"errors,omitempty"` Burned int64 `json:"burned,omitempty"` AllocatedDestinationContractBurned int64 `json:"allocated_destination_contract_burned,omitempty"` @@ -89,12 +90,12 @@ func (o *Operation) LogFields() logrus.Fields { } // SetAllocationBurn - -func (o *Operation) SetAllocationBurn(constants Constants) { +func (o *Operation) SetAllocationBurn(constants protocol.Constants) { o.AllocatedDestinationContractBurned = 257 * constants.CostPerByte } // SetBurned - -func (o *Operation) SetBurned(constants Constants) { +func (o *Operation) SetBurned(constants protocol.Constants) { if o.Status != consts.Applied { return } @@ -137,8 +138,8 @@ func (o *Operation) IsCall() bool { return helpers.IsContract(o.Destination) } -// OperationResult - -type OperationResult struct { +// Result - +type Result struct { Status string `json:"-"` ConsumedGas int64 `json:"consumed_gas,omitempty"` StorageSize int64 `json:"storage_size,omitempty"` @@ -147,3 +148,24 @@ type OperationResult struct { Originated string `json:"-"` Errors []*cerrors.Error `json:"-"` } + +// Stats - +type Stats struct { + Count int64 + LastAction time.Time +} + +// Pageable - +type Pageable struct { + Operations []Operation `json:"operations"` + LastID string `json:"last_id"` +} + +// TokenMethodUsageStats - +type TokenMethodUsageStats struct { + Count int64 + ConsumedGas int64 +} + +// TokenUsageStats - +type TokenUsageStats map[string]TokenMethodUsageStats diff --git a/internal/models/operation/repository.go b/internal/models/operation/repository.go new file mode 100644 index 000000000..23d37969c --- /dev/null +++ b/internal/models/operation/repository.go @@ -0,0 +1,20 @@ +package operation + +// Repository - +type Repository interface { + GetByContract(network string, address string, size uint64, filters map[string]interface{}) (Pageable, error) + GetStats(network, address string) (Stats, error) + // Last - returns last operation. TODO: change network and address. + Last(network string, address string, indexedTime int64) (Operation, error) + + // GetOperations - get operation by `filter`. `Size` - if 0 - return all, else certain `size` operations. + // `Sort` - sort by time and content index by desc + Get(filter map[string]interface{}, size int64, sort bool) ([]Operation, error) + + GetContract24HoursVolume(network, address string, entrypoints []string) (float64, error) + GetTokensStats(network string, addresses, entrypoints []string) (map[string]TokenUsageStats, error) + + GetParticipatingContracts(network string, fromLevel int64, toLevel int64) ([]string, error) + RecalcStats(network, address string) (ContractStats, error) + GetDAppStats(string, []string, string) (DAppStats, error) +} diff --git a/internal/models/protocol.go b/internal/models/protocol/model.go similarity index 98% rename from internal/models/protocol.go rename to internal/models/protocol/model.go index de3bd8462..aca5f78e9 100644 --- a/internal/models/protocol.go +++ b/internal/models/protocol/model.go @@ -1,4 +1,4 @@ -package models +package protocol // Protocol - type Protocol struct { diff --git a/internal/models/protocol/repository.go b/internal/models/protocol/repository.go new file mode 100644 index 000000000..ed8817cc8 --- /dev/null +++ b/internal/models/protocol/repository.go @@ -0,0 +1,7 @@ +package protocol + +// Repository - +type Repository interface { + GetProtocol(string, string, int64) (Protocol, error) + GetSymLinks(string, int64) (map[string]struct{}, error) +} diff --git a/internal/models/metadata.go b/internal/models/schema/model.go similarity index 50% rename from internal/models/metadata.go rename to internal/models/schema/model.go index 47f565435..2b4c1b676 100644 --- a/internal/models/metadata.go +++ b/internal/models/schema/model.go @@ -1,28 +1,28 @@ -package models +package schema -// Metadata - -type Metadata struct { +// Schema - +type Schema struct { ID string `json:"-"` Parameter map[string]string `json:"parameter"` Storage map[string]string `json:"storage"` } // GetID - -func (m *Metadata) GetID() string { +func (m *Schema) GetID() string { return m.ID } // GetIndex - -func (m *Metadata) GetIndex() string { - return "metadata" +func (m *Schema) GetIndex() string { + return "schema" } // GetQueues - -func (m *Metadata) GetQueues() []string { +func (m *Schema) GetQueues() []string { return nil } // MarshalToQueue - -func (m *Metadata) MarshalToQueue() ([]byte, error) { +func (m *Schema) MarshalToQueue() ([]byte, error) { return nil, nil } diff --git a/internal/models/schema/repository.go b/internal/models/schema/repository.go new file mode 100644 index 000000000..6d8cb73aa --- /dev/null +++ b/internal/models/schema/repository.go @@ -0,0 +1,6 @@ +package schema + +// Repository - +type Repository interface { + Get(address string) (Schema, error) +} diff --git a/internal/models/tezos_domain.go b/internal/models/tezosdomain/model.go similarity index 85% rename from internal/models/tezos_domain.go rename to internal/models/tezosdomain/model.go index 992dabba5..111a94d05 100644 --- a/internal/models/tezos_domain.go +++ b/internal/models/tezosdomain/model.go @@ -1,4 +1,4 @@ -package models +package tezosdomain import ( "fmt" @@ -41,3 +41,9 @@ type ReverseTezosDomain struct { Name string `json:"name"` Expiration time.Time `json:"expiration"` } + +// DomainsResponse - +type DomainsResponse struct { + Domains []TezosDomain `json:"domains"` + Total int64 `json:"total"` +} diff --git a/internal/models/tezosdomain/repository.go b/internal/models/tezosdomain/repository.go new file mode 100644 index 000000000..cdae1d586 --- /dev/null +++ b/internal/models/tezosdomain/repository.go @@ -0,0 +1,7 @@ +package tezosdomain + +// Repository - +type Repository interface { + ListDomains(network string, size, offset int64) (DomainsResponse, error) + ResolveDomainByAddress(network string, address string) (*TezosDomain, error) +} diff --git a/internal/models/token_balance.go b/internal/models/tokenbalance/model.go similarity index 97% rename from internal/models/token_balance.go rename to internal/models/tokenbalance/model.go index 556368b52..296ac804b 100644 --- a/internal/models/token_balance.go +++ b/internal/models/tokenbalance/model.go @@ -1,4 +1,4 @@ -package models +package tokenbalance import ( "fmt" diff --git a/internal/models/tokenbalance/repository.go b/internal/models/tokenbalance/repository.go new file mode 100644 index 000000000..77e30acb1 --- /dev/null +++ b/internal/models/tokenbalance/repository.go @@ -0,0 +1,8 @@ +package tokenbalance + +// Repository - +type Repository interface { + GetAccountBalances(string, string) ([]TokenBalance, error) + Update(updates []*TokenBalance) error + GetHolders(network, contract string, tokenID int64) ([]TokenBalance, error) +} diff --git a/internal/models/transfer/context.go b/internal/models/transfer/context.go new file mode 100644 index 000000000..78debd0a8 --- /dev/null +++ b/internal/models/transfer/context.go @@ -0,0 +1,18 @@ +package transfer + +// GetContext - +type GetContext struct { + Contracts []string + Network string + Address string + Hash string + Start uint + End uint + SortOrder string + LastID string + Size int64 + Offset int64 + TokenID int64 + Nonce *int64 + Counter *int64 +} diff --git a/internal/models/transfer.go b/internal/models/transfer/model.go similarity index 82% rename from internal/models/transfer.go rename to internal/models/transfer/model.go index 0697e498c..974f92828 100644 --- a/internal/models/transfer.go +++ b/internal/models/transfer/model.go @@ -1,10 +1,12 @@ -package models +package transfer import ( "fmt" "time" "github.com/baking-bad/bcdhub/internal/helpers" + "github.com/baking-bad/bcdhub/internal/models/operation" + "github.com/baking-bad/bcdhub/internal/models/tokenbalance" "github.com/sirupsen/logrus" ) @@ -64,7 +66,7 @@ func (t *Transfer) LogFields() logrus.Fields { } // EmptyTransfer - -func EmptyTransfer(o Operation) *Transfer { +func EmptyTransfer(o operation.Operation) *Transfer { return &Transfer{ ID: helpers.GenerateID(), IndexedTime: o.IndexedTime, @@ -97,8 +99,8 @@ func (t *Transfer) GetToTokenBalanceID() string { } // MakeTokenBalanceUpdate - -func (t *Transfer) MakeTokenBalanceUpdate(from, rollback bool) *TokenBalance { - tb := &TokenBalance{ +func (t *Transfer) MakeTokenBalanceUpdate(from, rollback bool) *tokenbalance.TokenBalance { + tb := &tokenbalance.TokenBalance{ Network: t.Network, Contract: t.Contract, TokenID: t.TokenID, @@ -119,3 +121,22 @@ func (t *Transfer) MakeTokenBalanceUpdate(from, rollback bool) *TokenBalance { } return tb } + +// TokenBalance - +type TokenBalance struct { + Address string + TokenID int64 +} + +// TokenSupply - +type TokenSupply struct { + Supply float64 `json:"supply"` + Transfered float64 `json:"transfered"` +} + +// Pageable - +type Pageable struct { + Transfers []Transfer `json:"transfers"` + Total int64 `json:"total"` + LastID string `json:"last_id"` +} diff --git a/internal/models/transfer/repository.go b/internal/models/transfer/repository.go new file mode 100644 index 000000000..45d3e5488 --- /dev/null +++ b/internal/models/transfer/repository.go @@ -0,0 +1,12 @@ +package transfer + +import "github.com/baking-bad/bcdhub/internal/models/tzip" + +// Repository - +type Repository interface { + Get(ctx GetContext) (Pageable, error) + GetAll(network string, level int64) ([]Transfer, error) + GetTokenSupply(network, address string, tokenID int64) (result TokenSupply, err error) + GetToken24HoursVolume(network, contract string, initiators, entrypoints []string, tokenID int64) (float64, error) + GetTokenVolumeSeries(network, period string, contracts []string, entrypoints []tzip.DAppContract, tokenID uint) ([][]int64, error) +} diff --git a/internal/models/tzip/context.go b/internal/models/tzip/context.go new file mode 100644 index 000000000..e93542b65 --- /dev/null +++ b/internal/models/tzip/context.go @@ -0,0 +1,28 @@ +package tzip + +// GetTokenMetadataContext - +type GetTokenMetadataContext struct { + Contract string + Network string + TokenID int64 + Level Comparator +} + +// Comparator - +type Comparator struct { + Comparator string + Value int64 +} + +// NewRange - +func NewRange(cmp string, value int64) Comparator { + return Comparator{ + Comparator: cmp, + Value: value, + } +} + +// IsFilled - +func (rng Comparator) IsFilled() bool { + return rng.Comparator != "" && rng.Value > 0 +} diff --git a/internal/models/tzip.go b/internal/models/tzip/model.go similarity index 51% rename from internal/models/tzip.go rename to internal/models/tzip/model.go index 9e2cb7776..7ada7e06d 100644 --- a/internal/models/tzip.go +++ b/internal/models/tzip/model.go @@ -1,26 +1,26 @@ -package models +package tzip import ( "fmt" "time" - "github.com/baking-bad/bcdhub/internal/models/tzip" + "github.com/baking-bad/bcdhub/internal/models/tezosdomain" "github.com/sirupsen/logrus" ) // TZIP - type TZIP struct { - Level int64 `json:"level,omitempty"` - Timestamp time.Time `json:"timestamp,omitempty"` - Address string `json:"address"` - Network string `json:"network"` - Slug string `json:"slug,omitempty"` - Domain *ReverseTezosDomain `json:"domain,omitempty"` + Level int64 `json:"level,omitempty"` + Timestamp time.Time `json:"timestamp,omitempty"` + Address string `json:"address"` + Network string `json:"network"` + Slug string `json:"slug,omitempty"` + Domain *tezosdomain.ReverseTezosDomain `json:"domain,omitempty"` - tzip.TZIP12 - tzip.TZIP16 - tzip.TZIP20 - tzip.DAppsTZIP + TZIP12 + TZIP16 + TZIP20 + DAppsTZIP } // HasToken - @@ -61,3 +61,16 @@ func (t *TZIP) LogFields() logrus.Fields { "level": t.Level, } } + +// TokenMetadata - +type TokenMetadata struct { + Address string + Network string + Level int64 + Symbol string + Name string + TokenID int64 + Decimals *int64 + RegistryAddress string + Extras map[string]interface{} +} diff --git a/internal/models/tzip/repository.go b/internal/models/tzip/repository.go new file mode 100644 index 000000000..e1a324f61 --- /dev/null +++ b/internal/models/tzip/repository.go @@ -0,0 +1,14 @@ +package tzip + +// Repository - +type Repository interface { + Get(network, address string) (TZIP, error) + GetWithEvents() ([]TZIP, error) + GetTokenMetadata(ctx GetTokenMetadataContext) ([]TokenMetadata, error) + GetDApps() ([]DApp, error) + GetDAppBySlug(slug string) (*DApp, error) + GetBySlug(slug string) (*TZIP, error) + GetAliases(network string) ([]TZIP, error) + GetAliasesMap(network string) (map[string]string, error) + GetAlias(network, address string) (*TZIP, error) +} diff --git a/internal/models/tzip/tzip12.go b/internal/models/tzip/tzip12.go index c914d5f6c..c754f3d7b 100644 --- a/internal/models/tzip/tzip12.go +++ b/internal/models/tzip/tzip12.go @@ -11,12 +11,12 @@ type TZIP12 struct { // TokenMetadataType - type TokenMetadataType struct { - Static []TokenMetadata `json:"static,omitempty"` + Static []TokenMetadataEntity `json:"static,omitempty"` // Dynamic []TokenMetadata `json:"dynamic,omitempty"` } -// TokenMetadata - -type TokenMetadata struct { +// TokenMetadataEntity - +type TokenMetadataEntity struct { RegistryAddress string `json:"registry_address"` TokenID int64 `json:"token_id"` Symbol string `json:"symbol"` @@ -26,7 +26,7 @@ type TokenMetadata struct { } // Compare - full compare objects -func (tm TokenMetadata) Compare(other TokenMetadata) bool { +func (tm TokenMetadataEntity) Compare(other TokenMetadataEntity) bool { return tm.RegistryAddress == other.RegistryAddress && tm.Symbol == other.Symbol && tm.Name == other.Name && diff --git a/internal/parsers/contract/contract.go b/internal/parsers/contract/contract.go index f7ee98e84..cc177040f 100644 --- a/internal/parsers/contract/contract.go +++ b/internal/parsers/contract/contract.go @@ -5,10 +5,11 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/consts" "github.com/baking-bad/bcdhub/internal/contractparser/kinds" "github.com/baking-bad/bcdhub/internal/contractparser/meta" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/metrics" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" + "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/pkg/errors" ) @@ -51,13 +52,13 @@ func WithShareDirContractParser(dir string) ParserOption { } // Parse - -func (p *Parser) Parse(operation models.Operation) ([]elastic.Model, error) { +func (p *Parser) Parse(operation operation.Operation) ([]models.Model, error) { if !helpers.StringInArray(operation.Kind, []string{ consts.Origination, consts.OriginationNew, consts.Migration, }) { return nil, errors.Errorf("Invalid operation kind in computeContractMetrics: %s", operation.Kind) } - contract := models.Contract{ + contract := contract.Contract{ Network: operation.Network, Level: operation.Level, Timestamp: operation.Timestamp, @@ -76,16 +77,16 @@ func (p *Parser) Parse(operation models.Operation) ([]elastic.Model, error) { return nil, err } - metadata, err := NewMetadataParser(protoSymLink).Parse(operation.Script, contract.Address) + schema, err := NewSchemaParser(protoSymLink).Parse(operation.Script, contract.Address) if err != nil { return nil, err } - contractMetadata, err := meta.GetContractMetadataFromModel(metadata) + contractMetadata, err := meta.GetContractMetadataFromModel(schema) if err != nil { return nil, err } - p.metadata[metadata.ID] = contractMetadata + p.metadata[schema.ID] = contractMetadata if contractMetadata.IsUpgradable(protoSymLink) { contract.Tags = append(contract.Tags, consts.UpgradableTag) @@ -95,7 +96,7 @@ func (p *Parser) Parse(operation models.Operation) ([]elastic.Model, error) { return nil, err } - return []elastic.Model{&metadata, &contract}, nil + return []models.Model{&schema, &contract}, nil } // GetContractMetadata - @@ -107,7 +108,7 @@ func (p *Parser) GetContractMetadata(address string) (*meta.ContractMetadata, er return metadata, nil } -func (p *Parser) computeMetrics(operation models.Operation, protoSymLink string, contract *models.Contract) error { +func (p *Parser) computeMetrics(operation operation.Operation, protoSymLink string, contract *contract.Contract) error { script, err := contractparser.New(operation.Script) if err != nil { return errors.Errorf("contractparser.New: %v", err) @@ -139,7 +140,7 @@ func (p *Parser) computeMetrics(operation models.Operation, protoSymLink string, return nil } -func setEntrypoints(metadata *meta.ContractMetadata, symLink string, contract *models.Contract) error { +func setEntrypoints(metadata *meta.ContractMetadata, symLink string, contract *contract.Contract) error { entrypoints, err := metadata.Parameter[symLink].GetEntrypoints() if err != nil { return err diff --git a/internal/parsers/contract/metadata.go b/internal/parsers/contract/metadata.go deleted file mode 100644 index 5626dc30a..000000000 --- a/internal/parsers/contract/metadata.go +++ /dev/null @@ -1,79 +0,0 @@ -package contract - -import ( - "encoding/json" - "fmt" - - "github.com/baking-bad/bcdhub/internal/contractparser/consts" - "github.com/baking-bad/bcdhub/internal/contractparser/meta" - "github.com/baking-bad/bcdhub/internal/models" - "github.com/pkg/errors" - "github.com/tidwall/gjson" -) - -// MetadataParser - -type MetadataParser struct { - symLink string -} - -// NewMetadataParser - -func NewMetadataParser(symLink string) MetadataParser { - return MetadataParser{symLink} -} - -// Parse - -func (p MetadataParser) Parse(script gjson.Result, address string) (m models.Metadata, err error) { - m.ID = address - m.Storage, err = p.getMetadata(script, consts.STORAGE, address) - if err != nil { - return - } - m.Parameter, err = p.getMetadata(script, consts.PARAMETER, address) - if err != nil { - return - } - return -} - -func (p MetadataParser) getMetadata(script gjson.Result, tag, address string) (map[string]string, error) { - res := make(map[string]string) - metadata, err := p.createMetadataSection(script, tag, address) - if err != nil { - return nil, err - } - res[p.symLink] = metadata - return res, nil -} - -// UpdateMetadata - -func (p MetadataParser) UpdateMetadata(script gjson.Result, address string, metadata *models.Metadata) error { - storage, err := p.createMetadataSection(script, consts.STORAGE, address) - if err != nil { - return err - } - parameter, err := p.createMetadataSection(script, consts.PARAMETER, address) - if err != nil { - return err - } - metadata.Storage[p.symLink] = storage - metadata.Parameter[p.symLink] = parameter - - return nil -} - -func (p MetadataParser) createMetadataSection(script gjson.Result, tag, address string) (string, error) { - args := script.Get(fmt.Sprintf("code.#(prim==\"%s\").args", tag)) - if args.Exists() { - metadata, err := meta.ParseMetadata(args) - if err != nil { - return "", nil - } - - b, err := json.Marshal(metadata) - if err != nil { - return "", err - } - return string(b), nil - } - return "", errors.Errorf("[createMetadata] Unknown tag '%s' contract %s", tag, address) -} diff --git a/internal/parsers/contract/schema.go b/internal/parsers/contract/schema.go new file mode 100644 index 000000000..944b72c23 --- /dev/null +++ b/internal/parsers/contract/schema.go @@ -0,0 +1,79 @@ +package contract + +import ( + "encoding/json" + "fmt" + + "github.com/baking-bad/bcdhub/internal/contractparser/consts" + "github.com/baking-bad/bcdhub/internal/contractparser/meta" + "github.com/baking-bad/bcdhub/internal/models/schema" + "github.com/pkg/errors" + "github.com/tidwall/gjson" +) + +// SchemaParser - +type SchemaParser struct { + symLink string +} + +// NewSchemaParser - +func NewSchemaParser(symLink string) SchemaParser { + return SchemaParser{symLink} +} + +// Parse - +func (p SchemaParser) Parse(script gjson.Result, address string) (s schema.Schema, err error) { + s.ID = address + s.Storage, err = p.getSchema(script, consts.STORAGE, address) + if err != nil { + return + } + s.Parameter, err = p.getSchema(script, consts.PARAMETER, address) + if err != nil { + return + } + return +} + +func (p SchemaParser) getSchema(script gjson.Result, tag, address string) (map[string]string, error) { + res := make(map[string]string) + schema, err := p.createSchemaSection(script, tag, address) + if err != nil { + return nil, err + } + res[p.symLink] = schema + return res, nil +} + +// Update - +func (p SchemaParser) Update(script gjson.Result, address string, s *schema.Schema) error { + storage, err := p.createSchemaSection(script, consts.STORAGE, address) + if err != nil { + return err + } + parameter, err := p.createSchemaSection(script, consts.PARAMETER, address) + if err != nil { + return err + } + s.Storage[p.symLink] = storage + s.Parameter[p.symLink] = parameter + + return nil +} + +func (p SchemaParser) createSchemaSection(script gjson.Result, tag, address string) (string, error) { + args := script.Get(fmt.Sprintf("code.#(prim==\"%s\").args", tag)) + if args.Exists() { + schema, err := meta.ParseMetadata(args) + if err != nil { + return "", nil + } + + b, err := json.Marshal(schema) + if err != nil { + return "", err + } + return string(b), nil + } + return "", errors.Errorf("[createSchemaSection] Unknown tag '%s' contract %s", tag, address) +} diff --git a/internal/parsers/migration.go b/internal/parsers/migration.go index 1dd895bc1..5096e7b52 100644 --- a/internal/parsers/migration.go +++ b/internal/parsers/migration.go @@ -8,9 +8,13 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/consts" "github.com/baking-bad/bcdhub/internal/contractparser/meta" "github.com/baking-bad/bcdhub/internal/contractparser/storage" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + modelsContract "github.com/baking-bad/bcdhub/internal/models/contract" + "github.com/baking-bad/bcdhub/internal/models/migration" + "github.com/baking-bad/bcdhub/internal/models/protocol" + "github.com/baking-bad/bcdhub/internal/models/schema" "github.com/baking-bad/bcdhub/internal/parsers/contract" "github.com/pkg/errors" "github.com/tidwall/gjson" @@ -18,32 +22,34 @@ import ( // MigrationParser - type MigrationParser struct { - es elastic.IElastic + storage models.GeneralRepository + bmdRepo bigmapdiff.Repository filesDirectory string } // NewMigrationParser - -func NewMigrationParser(es elastic.IElastic, filesDirectory string) *MigrationParser { +func NewMigrationParser(storage models.GeneralRepository, bmdRepo bigmapdiff.Repository, filesDirectory string) *MigrationParser { return &MigrationParser{ - es: es, + storage: storage, + bmdRepo: bmdRepo, filesDirectory: filesDirectory, } } // Parse - -func (p *MigrationParser) Parse(script gjson.Result, old models.Contract, previous, next models.Protocol, timestamp time.Time) ([]elastic.Model, []elastic.Model, error) { - metadata := models.Metadata{ID: old.Address} - if err := p.es.GetByID(&metadata); err != nil { +func (p *MigrationParser) Parse(script gjson.Result, old modelsContract.Contract, previous, next protocol.Protocol, timestamp time.Time) ([]models.Model, []models.Model, error) { + s := schema.Schema{ID: old.Address} + if err := p.storage.GetByID(&s); err != nil { return nil, nil, err } - if err := contract.NewMetadataParser(next.SymLink).UpdateMetadata(script, old.Address, &metadata); err != nil { + if err := contract.NewSchemaParser(next.SymLink).Update(script, old.Address, &s); err != nil { return nil, nil, err } - var updates []elastic.Model + var updates []models.Model if previous.SymLink == "alpha" { - newUpdates, err := p.getUpdates(script, old, next, metadata) + newUpdates, err := p.getUpdates(script, old, next, s) if err != nil { return nil, nil, err } @@ -55,10 +61,10 @@ func (p *MigrationParser) Parse(script gjson.Result, old models.Contract, previo return nil, nil, err } if newHash == old.Hash { - return []elastic.Model{&metadata}, updates, nil + return []models.Model{&s}, updates, nil } - migration := models.Migration{ + migration := migration.Migration{ ID: helpers.GenerateID(), IndexedTime: time.Now().UnixNano() / 1000, @@ -71,11 +77,11 @@ func (p *MigrationParser) Parse(script gjson.Result, old models.Contract, previo Kind: consts.MigrationUpdate, } - return []elastic.Model{&metadata, &migration}, updates, nil + return []models.Model{&s, &migration}, updates, nil } -func (p *MigrationParser) getUpdates(script gjson.Result, contract models.Contract, protocol models.Protocol, metadata models.Metadata) ([]elastic.Model, error) { - stringMetadata, ok := metadata.Storage[protocol.SymLink] +func (p *MigrationParser) getUpdates(script gjson.Result, contract modelsContract.Contract, protocol protocol.Protocol, s schema.Schema) ([]models.Model, error) { + stringMetadata, ok := s.Storage[protocol.SymLink] if !ok { return nil, errors.Errorf("[MigrationParser.getUpdates] Unknown metadata sym link: %s", protocol.SymLink) } @@ -100,7 +106,7 @@ func (p *MigrationParser) getUpdates(script gjson.Result, contract models.Contra newPtr = p } - bmd, err := p.es.GetBigMapsForAddress(contract.Network, contract.Address) + bmd, err := p.bmdRepo.GetByAddress(contract.Network, contract.Address) if err != nil { return nil, err } @@ -108,7 +114,7 @@ func (p *MigrationParser) getUpdates(script gjson.Result, contract models.Contra return nil, nil } - updates := make([]elastic.Model, len(bmd)) + updates := make([]models.Model, len(bmd)) for i := range bmd { bmd[i].BinPath = newPath bmd[i].Ptr = newPtr diff --git a/internal/parsers/operations/balance_update.go b/internal/parsers/operations/balance_update.go index 6e73e68c3..1cf41cfbb 100644 --- a/internal/parsers/operations/balance_update.go +++ b/internal/parsers/operations/balance_update.go @@ -4,36 +4,37 @@ import ( "fmt" "github.com/baking-bad/bcdhub/internal/helpers" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/balanceupdate" + "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/tidwall/gjson" ) // BalanceUpdate - type BalanceUpdate struct { - operation models.Operation + operation operation.Operation root string } // NewBalanceUpdate - -func NewBalanceUpdate(root string, operation models.Operation) BalanceUpdate { +func NewBalanceUpdate(root string, operation operation.Operation) BalanceUpdate { return BalanceUpdate{operation, root} } // Parse - -func (b BalanceUpdate) Parse(data gjson.Result) []*models.BalanceUpdate { +func (b BalanceUpdate) Parse(data gjson.Result) []*balanceupdate.BalanceUpdate { if b.root != "" { b.root = fmt.Sprintf("%s.", b.root) } filter := fmt.Sprintf(`%sbalance_updates.#(kind="contract")#`, b.root) contracts := data.Get(filter).Array() - bu := make([]*models.BalanceUpdate, 0) + bu := make([]*balanceupdate.BalanceUpdate, 0) for i := range contracts { address := contracts[i].Get("contract").String() if !helpers.IsContract(address) { continue } - bu = append(bu, &models.BalanceUpdate{ + bu = append(bu, &balanceupdate.BalanceUpdate{ ID: helpers.GenerateID(), Change: contracts[i].Get("change").Int(), Network: b.operation.Network, diff --git a/internal/parsers/operations/balance_update_test.go b/internal/parsers/operations/balance_update_test.go index d97adeee3..441e22ac4 100644 --- a/internal/parsers/operations/balance_update_test.go +++ b/internal/parsers/operations/balance_update_test.go @@ -3,11 +3,12 @@ package operations import ( "testing" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/balanceupdate" + "github.com/baking-bad/bcdhub/internal/models/operation" ) func TestBalanceUpdate_Parse(t *testing.T) { - operation := models.Operation{ + operation := operation.Operation{ Network: "test", Level: 100, Hash: "hash", @@ -18,13 +19,13 @@ func TestBalanceUpdate_Parse(t *testing.T) { name string root string fileName string - want []*models.BalanceUpdate + want []*balanceupdate.BalanceUpdate }{ { name: "test 1", root: "", fileName: "./data/balance_update/test1.json", - want: []*models.BalanceUpdate{ + want: []*balanceupdate.BalanceUpdate{ { Contract: "KT1A946hDgLGfFudWU7hzfnTdZK8TZyLRHeT", Change: -2655, @@ -40,7 +41,7 @@ func TestBalanceUpdate_Parse(t *testing.T) { name: "test 2", root: "operation_result", fileName: "./data/balance_update/test2.json", - want: []*models.BalanceUpdate{ + want: []*balanceupdate.BalanceUpdate{ { Contract: "KT1A946hDgLGfFudWU7hzfnTdZK8TZyLRHeT", Change: -29075891, diff --git a/internal/parsers/operations/common.go b/internal/parsers/operations/common.go index 4405ccf7c..003632d95 100644 --- a/internal/parsers/operations/common.go +++ b/internal/parsers/operations/common.go @@ -1,17 +1,18 @@ package operations import ( - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/balanceupdate" + "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/tidwall/gjson" ) // Metadata - type Metadata struct { - Result models.OperationResult - BalanceUpdates []*models.BalanceUpdate + Result operation.Result + BalanceUpdates []*balanceupdate.BalanceUpdate } -func parseMetadata(item gjson.Result, operation models.Operation) *Metadata { +func parseMetadata(item gjson.Result, operation operation.Operation) *Metadata { path := "metadata.operation_result" if !item.Get(path).Exists() { path = "result" diff --git a/internal/parsers/operations/common_test.go b/internal/parsers/operations/common_test.go index decb19dea..97126a43e 100644 --- a/internal/parsers/operations/common_test.go +++ b/internal/parsers/operations/common_test.go @@ -4,11 +4,12 @@ import ( "reflect" "testing" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/balanceupdate" + "github.com/baking-bad/bcdhub/internal/models/operation" ) func Test_parseMetadata(t *testing.T) { - operation := models.Operation{ + op := operation.Operation{ Network: "test", Level: 100, Hash: "hash", @@ -25,7 +26,7 @@ func Test_parseMetadata(t *testing.T) { name: "test 1", fileName: "./data/operation_metadata/test1.json", want: &Metadata{ - BalanceUpdates: []*models.BalanceUpdate{ + BalanceUpdates: []*balanceupdate.BalanceUpdate{ { Contract: "KT1PDAELuX7CypUHinUgFgGFskKs7ytwh5Vw", Change: 6410, @@ -36,7 +37,7 @@ func Test_parseMetadata(t *testing.T) { Nonce: nil, }, }, - Result: models.OperationResult{ + Result: operation.Result{ Status: "applied", ConsumedGas: 10207, }, @@ -45,8 +46,8 @@ func Test_parseMetadata(t *testing.T) { name: "test 2", fileName: "./data/operation_metadata/test2.json", want: &Metadata{ - BalanceUpdates: []*models.BalanceUpdate{}, - Result: models.OperationResult{ + BalanceUpdates: []*balanceupdate.BalanceUpdate{}, + Result: operation.Result{ Status: "backtracked", ConsumedGas: 96591, StorageSize: 196, @@ -61,7 +62,7 @@ func Test_parseMetadata(t *testing.T) { t.Errorf(`readJSONFile("%s") = error %v`, tt.fileName, err) return } - got := parseMetadata(data, operation) + got := parseMetadata(data, op) if !reflect.DeepEqual(got.Result, tt.want.Result) { t.Errorf("parseMetadata() Result = %v, want %v", got.Result, tt.want.Result) return diff --git a/internal/parsers/operations/migration.go b/internal/parsers/operations/migration.go index d7267ec45..ea396b336 100644 --- a/internal/parsers/operations/migration.go +++ b/internal/parsers/operations/migration.go @@ -7,22 +7,23 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/consts" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/logger" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/migration" + "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/tidwall/gjson" ) // Migration - type Migration struct { - operation *models.Operation + operation *operation.Operation } // NewMigration - -func NewMigration(operation *models.Operation) Migration { +func NewMigration(operation *operation.Operation) Migration { return Migration{operation} } // Parse - -func (m Migration) Parse(data gjson.Result) *models.Migration { +func (m Migration) Parse(data gjson.Result) *migration.Migration { path := "metadata.operation_result.big_map_diff" if !data.Get(path).Exists() { path = "result.big_map_diff" @@ -38,7 +39,7 @@ func (m Migration) Parse(data gjson.Result) *models.Migration { value := bmd.Get("value") if contractparser.HasLambda(value) { logger.Info("[%s] Migration detected: %s", m.operation.Network, m.operation.Destination) - return &models.Migration{ + return &migration.Migration{ ID: helpers.GenerateID(), IndexedTime: time.Now().UnixNano() / 1000, diff --git a/internal/parsers/operations/migration_test.go b/internal/parsers/operations/migration_test.go index def4a2b60..d1dac72e8 100644 --- a/internal/parsers/operations/migration_test.go +++ b/internal/parsers/operations/migration_test.go @@ -5,7 +5,8 @@ import ( "time" "github.com/baking-bad/bcdhub/internal/contractparser/consts" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/migration" + "github.com/baking-bad/bcdhub/internal/models/operation" ) func TestMigration_Parse(t *testing.T) { @@ -13,13 +14,13 @@ func TestMigration_Parse(t *testing.T) { tests := []struct { name string - operation *models.Operation + operation *operation.Operation fileName string - want *models.Migration + want *migration.Migration }{ { name: "test 1", - operation: &models.Operation{ + operation: &operation.Operation{ Network: "mainnet", Level: 123, Protocol: "protocol", @@ -31,7 +32,7 @@ func TestMigration_Parse(t *testing.T) { want: nil, }, { name: "test 2", - operation: &models.Operation{ + operation: &operation.Operation{ Network: "mainnet", Level: 123, Protocol: "protocol", @@ -40,7 +41,7 @@ func TestMigration_Parse(t *testing.T) { Hash: "hash", }, fileName: "./data/migration/test2.json", - want: &models.Migration{ + want: &migration.Migration{ Network: "mainnet", Level: 123, Protocol: "protocol", diff --git a/internal/parsers/operations/operation_group.go b/internal/parsers/operations/operation_group.go index 527adda4f..d5a6d47b8 100644 --- a/internal/parsers/operations/operation_group.go +++ b/internal/parsers/operations/operation_group.go @@ -4,8 +4,8 @@ import ( "github.com/pkg/errors" "github.com/baking-bad/bcdhub/internal/contractparser/consts" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" + "github.com/baking-bad/bcdhub/internal/models" "github.com/tidwall/gjson" ) @@ -20,8 +20,8 @@ func NewGroup(params *ParseParams) Group { } // Parse - -func (opg Group) Parse(data gjson.Result) ([]elastic.Model, error) { - parsedModels := make([]elastic.Model, 0) +func (opg Group) Parse(data gjson.Result) ([]models.Model, error) { + parsedModels := make([]models.Model, 0) opg.hash = data.Get("hash").String() helpers.SetTagSentry("hash", opg.hash) @@ -52,12 +52,12 @@ func NewContent(params *ParseParams) Content { } // Parse - -func (content Content) Parse(data gjson.Result) ([]elastic.Model, error) { +func (content Content) Parse(data gjson.Result) ([]models.Model, error) { if !content.needParse(data) { return nil, nil } - models := make([]elastic.Model, 0) + models := make([]models.Model, 0) kind := data.Get("kind").String() switch kind { @@ -96,7 +96,7 @@ func (content Content) needParse(item gjson.Result) bool { return originationCondition || transactionCondition } -func (content Content) parseInternal(data gjson.Result) ([]elastic.Model, error) { +func (content Content) parseInternal(data gjson.Result) ([]models.Model, error) { path := "metadata.internal_operation_results" if !data.Get(path).Exists() { path = "metadata.internal_operations" @@ -105,7 +105,7 @@ func (content Content) parseInternal(data gjson.Result) ([]elastic.Model, error) } } - internalModels := make([]elastic.Model, 0) + internalModels := make([]models.Model, 0) for _, internal := range data.Get(path).Array() { parsedModels, err := content.Parse(internal) if err != nil { diff --git a/internal/parsers/operations/operation_group_test.go b/internal/parsers/operations/operation_group_test.go index 3ca5e1c35..37b56664c 100644 --- a/internal/parsers/operations/operation_group_test.go +++ b/internal/parsers/operations/operation_group_test.go @@ -5,9 +5,23 @@ import ( "testing" "time" - "github.com/baking-bad/bcdhub/internal/elastic" - mock_elastic "github.com/baking-bad/bcdhub/internal/elastic/mock" + "github.com/baking-bad/bcdhub/internal/contractparser/consts" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapaction" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + modelContract "github.com/baking-bad/bcdhub/internal/models/contract" + mock_general "github.com/baking-bad/bcdhub/internal/models/mock" + mock_bmd "github.com/baking-bad/bcdhub/internal/models/mock/bigmapdiff" + mock_block "github.com/baking-bad/bcdhub/internal/models/mock/block" + mock_schema "github.com/baking-bad/bcdhub/internal/models/mock/schema" + mock_token_balance "github.com/baking-bad/bcdhub/internal/models/mock/tokenbalance" + mock_tzip "github.com/baking-bad/bcdhub/internal/models/mock/tzip" + "github.com/baking-bad/bcdhub/internal/models/operation" + "github.com/baking-bad/bcdhub/internal/models/protocol" + "github.com/baking-bad/bcdhub/internal/models/schema" + modelSchema "github.com/baking-bad/bcdhub/internal/models/schema" + "github.com/baking-bad/bcdhub/internal/models/transfer" + "github.com/baking-bad/bcdhub/internal/models/tzip" "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/baking-bad/bcdhub/internal/parsers/contract" "github.com/golang/mock/gomock" @@ -17,9 +31,29 @@ import ( func TestGroup_Parse(t *testing.T) { timestamp := time.Now() - ctrlES := gomock.NewController(t) - defer ctrlES.Finish() - es := mock_elastic.NewMockIElastic(ctrlES) + ctrlStorage := gomock.NewController(t) + defer ctrlStorage.Finish() + generalRepo := mock_general.NewMockGeneralRepository(ctrlStorage) + + ctrlBmdRepo := gomock.NewController(t) + defer ctrlBmdRepo.Finish() + bmdRepo := mock_bmd.NewMockRepository(ctrlBmdRepo) + + ctrlBlockRepo := gomock.NewController(t) + defer ctrlBlockRepo.Finish() + blockRepo := mock_block.NewMockRepository(ctrlBlockRepo) + + ctrlTzipRepo := gomock.NewController(t) + defer ctrlTzipRepo.Finish() + tzipRepo := mock_tzip.NewMockRepository(ctrlTzipRepo) + + ctrlSchemaRepo := gomock.NewController(t) + defer ctrlSchemaRepo.Finish() + schemaRepo := mock_schema.NewMockRepository(ctrlSchemaRepo) + + ctrlTokenBalanceRepo := gomock.NewController(t) + defer ctrlTokenBalanceRepo.Finish() + tbRepo := mock_token_balance.NewMockRepository(ctrlTokenBalanceRepo) ctrlRPC := gomock.NewController(t) defer ctrlRPC.Finish() @@ -34,43 +68,98 @@ func TestGroup_Parse(t *testing.T) { Save(gomock.Any(), gomock.Any()). Return(nil).AnyTimes() - es. + tzipRepo. EXPECT(). - GetTZIPWithEvents(). - Return(make([]models.TZIP, 0), nil). + GetWithEvents(). + Return(make([]tzip.TZIP, 0), nil). AnyTimes() - es. + + tbRepo. EXPECT(). - UpdateTokenBalances(gomock.Any()). + Update(gomock.Any()). Return(nil). AnyTimes() + generalRepo. + EXPECT(). + GetByID(gomock.AssignableToTypeOf(&modelContract.Contract{})). + DoAndReturn(readTestContractModel). + AnyTimes() + + bmdRepo. + EXPECT(). + GetByPtr( + gomock.Eq("KT1HBy1L43tiLe5MVJZ5RoxGy53Kx8kMgyoU"), + gomock.Eq("carthagenet"), + gomock.Eq(int64(2416))). + Return([]bigmapdiff.BigMapDiff{ + { + Ptr: 2416, + BinPath: "0/0/0/1/0", + Key: map[string]interface{}{"bytes": "000085ef0c18b31983603d978a152de4cd61803db881"}, + KeyHash: "exprtfKNhZ1G8vMscchFjt1G1qww2P93VTLHMuhyThVYygZLdnRev2", + KeyStrings: []string{"tz1XrCvviH8CqoHMSKpKuznLArEa1yR9U7ep"}, + Value: `{"prim":"Pair","args":[[],{"int":"6000"}]}`, + ValueStrings: []string{}, + Level: 386026, + Address: "KT1HBy1L43tiLe5MVJZ5RoxGy53Kx8kMgyoU", + Network: "carthagenet", + Timestamp: timestamp, + Protocol: "PsCARTHAGazKbHtnKfLzQg3kms52kSRpgnDY982a9oYsSXRLQEb", + }, + }, nil). + AnyTimes() + + bmdRepo. + EXPECT(). + GetByPtr( + gomock.Eq("KT1Dc6A6jTY9sG4UvqKciqbJNAGtXqb4n7vZ"), + gomock.Eq("carthagenet"), + gomock.Eq(int64(2417))). + Return([]bigmapdiff.BigMapDiff{ + { + Ptr: 2417, + BinPath: "0/0/0/1/0", + Key: map[string]interface{}{"bytes": "000085ef0c18b31983603d978a152de4cd61803db881"}, + KeyHash: "exprtfKNhZ1G8vMscchFjt1G1qww2P93VTLHMuhyThVYygZLdnRev2", + KeyStrings: []string{"tz1XrCvviH8CqoHMSKpKuznLArEa1yR9U7ep"}, + Value: "", + ValueStrings: []string{}, + Level: 386026, + Address: "KT1Dc6A6jTY9sG4UvqKciqbJNAGtXqb4n7vZ", + Network: "carthagenet", + Timestamp: timestamp, + Protocol: "PsCARTHAGazKbHtnKfLzQg3kms52kSRpgnDY982a9oYsSXRLQEb", + }, + }, nil). + AnyTimes() + tests := []struct { name string ParseParams *ParseParams filename string address string level int64 - want []elastic.Model + want []models.Model wantErr bool }{ { name: "opToHHcqFhRTQWJv2oTGAtywucj9KM1nDnk5eHsEETYJyvJLsa5", - ParseParams: NewParseParams(rpc, es), + ParseParams: NewParseParams(rpc, generalRepo, bmdRepo, blockRepo, tzipRepo, schemaRepo, tbRepo), filename: "./data/rpc/opg/opToHHcqFhRTQWJv2oTGAtywucj9KM1nDnk5eHsEETYJyvJLsa5.json", - want: []elastic.Model{}, + want: []models.Model{}, }, { name: "opPUPCpQu6pP38z9TkgFfwLiqVBFGSWQCH8Z2PUL3jrpxqJH5gt", ParseParams: NewParseParams( - rpc, es, + rpc, generalRepo, bmdRepo, blockRepo, tzipRepo, schemaRepo, tbRepo, WithHead(noderpc.Header{ Timestamp: timestamp, Protocol: "PsCARTHAGazKbHtnKfLzQg3kms52kSRpgnDY982a9oYsSXRLQEb", Level: 1151495, ChainID: "test", }), - WithNetwork("mainnet"), - WithConstants(models.Constants{ + WithNetwork(consts.Mainnet), + WithConstants(protocol.Constants{ CostPerByte: 1000, HardGasLimitPerOperation: 1040000, HardStorageLimitPerOperation: 60000, @@ -80,15 +169,15 @@ func TestGroup_Parse(t *testing.T) { address: "KT1Ap287P1NzsnToSJdA4aqSNjPomRaHBZSr", level: 1151495, filename: "./data/rpc/opg/opPUPCpQu6pP38z9TkgFfwLiqVBFGSWQCH8Z2PUL3jrpxqJH5gt.json", - want: []elastic.Model{ - &models.Operation{ + want: []models.Model{ + &operation.Operation{ ContentIndex: 0, - Network: "mainnet", + Network: consts.Mainnet, Protocol: "PsCARTHAGazKbHtnKfLzQg3kms52kSRpgnDY982a9oYsSXRLQEb", Hash: "opPUPCpQu6pP38z9TkgFfwLiqVBFGSWQCH8Z2PUL3jrpxqJH5gt", Internal: false, Nonce: nil, - Status: "applied", + Status: consts.Applied, Timestamp: timestamp, Level: 1151495, Kind: "transaction", @@ -106,7 +195,7 @@ func TestGroup_Parse(t *testing.T) { StorageStrings: []string{}, Tags: []string{}, }, - &models.BigMapDiff{ + &bigmapdiff.BigMapDiff{ Ptr: 32, BinPath: "0/0", Key: map[string]interface{}{"bytes": "80729e85e284dff3a30bb24a58b37ccdf474bbbe7794aad439ba034f48d66af3"}, @@ -116,19 +205,19 @@ func TestGroup_Parse(t *testing.T) { OperationID: "f79b897e69e64aa9b6d7f0199fed08f9", Level: 1151495, Address: "KT1Ap287P1NzsnToSJdA4aqSNjPomRaHBZSr", - Network: "mainnet", + Network: consts.Mainnet, IndexedTime: 1602764979843131, Timestamp: timestamp, Protocol: "PsCARTHAGazKbHtnKfLzQg3kms52kSRpgnDY982a9oYsSXRLQEb", }, - &models.Operation{ + &operation.Operation{ ContentIndex: 0, - Network: "mainnet", + Network: consts.Mainnet, Protocol: "PsCARTHAGazKbHtnKfLzQg3kms52kSRpgnDY982a9oYsSXRLQEb", Hash: "opPUPCpQu6pP38z9TkgFfwLiqVBFGSWQCH8Z2PUL3jrpxqJH5gt", Internal: true, Nonce: setInt64(0), - Status: "applied", + Status: consts.Applied, Timestamp: timestamp, Level: 1151495, Kind: "transaction", @@ -144,7 +233,7 @@ func TestGroup_Parse(t *testing.T) { StorageStrings: nil, Tags: []string{"fa12"}, }, - &models.BigMapDiff{ + &bigmapdiff.BigMapDiff{ Ptr: 31, BinPath: "0/0", Key: map[string]interface{}{"bytes": "05010000000b746f74616c537570706c79"}, @@ -155,12 +244,12 @@ func TestGroup_Parse(t *testing.T) { OperationID: "55baa67b04044639932a1bef22a2d0bc", Level: 1151495, Address: "KT1PWx2mnDueood7fEmfbBDKx1D9BAnnXitn", - Network: "mainnet", + Network: consts.Mainnet, IndexedTime: 1602764979845825, Timestamp: timestamp, Protocol: "PsCARTHAGazKbHtnKfLzQg3kms52kSRpgnDY982a9oYsSXRLQEb", }, - &models.BigMapDiff{ + &bigmapdiff.BigMapDiff{ Ptr: 31, BinPath: "0/0", Key: map[string]interface{}{"bytes": "05070701000000066c65646765720a000000160000c2473c617946ce7b9f6843f193401203851cb2ec"}, @@ -170,12 +259,12 @@ func TestGroup_Parse(t *testing.T) { ValueStrings: nil, OperationID: "55baa67b04044639932a1bef22a2d0bc", Level: 1151495, Address: "KT1PWx2mnDueood7fEmfbBDKx1D9BAnnXitn", - Network: "mainnet", + Network: consts.Mainnet, IndexedTime: 1602764979845832, Timestamp: timestamp, Protocol: "PsCARTHAGazKbHtnKfLzQg3kms52kSRpgnDY982a9oYsSXRLQEb", }, - &models.BigMapDiff{ + &bigmapdiff.BigMapDiff{ Ptr: 31, BinPath: "0/0", Key: map[string]interface{}{"bytes": "05070701000000066c65646765720a00000016011871cfab6dafee00330602b4342b6500c874c93b00"}, @@ -186,17 +275,17 @@ func TestGroup_Parse(t *testing.T) { OperationID: "55baa67b04044639932a1bef22a2d0bc", Level: 1151495, Address: "KT1PWx2mnDueood7fEmfbBDKx1D9BAnnXitn", - Network: "mainnet", + Network: consts.Mainnet, IndexedTime: 1602764979845839, Timestamp: timestamp, Protocol: "PsCARTHAGazKbHtnKfLzQg3kms52kSRpgnDY982a9oYsSXRLQEb", }, - &models.Transfer{ - Network: "mainnet", + &transfer.Transfer{ + Network: consts.Mainnet, Contract: "KT1PWx2mnDueood7fEmfbBDKx1D9BAnnXitn", Initiator: "KT1Ap287P1NzsnToSJdA4aqSNjPomRaHBZSr", Hash: "opPUPCpQu6pP38z9TkgFfwLiqVBFGSWQCH8Z2PUL3jrpxqJH5gt", - Status: "applied", + Status: consts.Applied, Timestamp: timestamp, Level: 1151495, From: "KT1Ap287P1NzsnToSJdA4aqSNjPomRaHBZSr", @@ -210,7 +299,7 @@ func TestGroup_Parse(t *testing.T) { }, { name: "onzUDQhwunz2yqzfEsoURXEBz9p7Gk8DgY4QBva52Z4b3AJCZjt", ParseParams: NewParseParams( - rpc, es, + rpc, generalRepo, bmdRepo, blockRepo, tzipRepo, schemaRepo, tbRepo, WithHead(noderpc.Header{ Timestamp: timestamp, Protocol: "PsDELPH1Kxsxt8f9eWbxQeRxkjfbxoqM52jvs5Y5fBxWWh4ifpo", @@ -218,7 +307,7 @@ func TestGroup_Parse(t *testing.T) { ChainID: "test", }), WithNetwork("delphinet"), - WithConstants(models.Constants{ + WithConstants(protocol.Constants{ CostPerByte: 250, HardGasLimitPerOperation: 1040000, HardStorageLimitPerOperation: 60000, @@ -229,14 +318,14 @@ func TestGroup_Parse(t *testing.T) { address: "KT1NppzrgyLZD3aku7fssfhYPm5QqZwyabvR", level: 86142, filename: "./data/rpc/opg/onzUDQhwunz2yqzfEsoURXEBz9p7Gk8DgY4QBva52Z4b3AJCZjt.json", - want: []elastic.Model{ - &models.Operation{ + want: []models.Model{ + &operation.Operation{ ContentIndex: 0, Network: "delphinet", Protocol: "PsDELPH1Kxsxt8f9eWbxQeRxkjfbxoqM52jvs5Y5fBxWWh4ifpo", Hash: "onzUDQhwunz2yqzfEsoURXEBz9p7Gk8DgY4QBva52Z4b3AJCZjt", Internal: false, - Status: "applied", + Status: consts.Applied, Timestamp: timestamp, Level: 86142, Kind: "origination", @@ -255,12 +344,12 @@ func TestGroup_Parse(t *testing.T) { StorageStrings: nil, Tags: nil, }, - &models.Metadata{ + &schema.Schema{ ID: "KT1NppzrgyLZD3aku7fssfhYPm5QqZwyabvR", Parameter: map[string]string{"babylon": "{\"0\":{\"prim\":\"or\",\"args\":[\"0/0\",\"0/1\"],\"type\":\"namedunion\"},\"0/0\":{\"fieldname\":\"decrement\",\"prim\":\"int\",\"type\":\"int\",\"name\":\"decrement\"},\"0/1\":{\"fieldname\":\"increment\",\"prim\":\"int\",\"type\":\"int\",\"name\":\"increment\"}}"}, Storage: map[string]string{"babylon": "{\"0\":{\"prim\":\"int\",\"type\":\"int\"}}"}, }, - &models.Contract{ + &modelContract.Contract{ Network: "delphinet", Level: 86142, Timestamp: timestamp, @@ -278,7 +367,7 @@ func TestGroup_Parse(t *testing.T) { }, { name: "opQMNBmME834t76enxSBqhJcPqwV2R2BP2pTKv438bHaxRZen6x", ParseParams: NewParseParams( - rpc, es, + rpc, generalRepo, bmdRepo, blockRepo, tzipRepo, schemaRepo, tbRepo, WithHead(noderpc.Header{ Timestamp: timestamp, Protocol: "PsCARTHAGazKbHtnKfLzQg3kms52kSRpgnDY982a9oYsSXRLQEb", @@ -286,7 +375,7 @@ func TestGroup_Parse(t *testing.T) { ChainID: "test", }), WithNetwork("carthagenet"), - WithConstants(models.Constants{ + WithConstants(protocol.Constants{ CostPerByte: 1000, HardGasLimitPerOperation: 1040000, HardStorageLimitPerOperation: 60000, @@ -296,14 +385,14 @@ func TestGroup_Parse(t *testing.T) { address: "KT1Dc6A6jTY9sG4UvqKciqbJNAGtXqb4n7vZ", level: 386026, filename: "./data/rpc/opg/opQMNBmME834t76enxSBqhJcPqwV2R2BP2pTKv438bHaxRZen6x.json", - want: []elastic.Model{ - &models.Operation{ + want: []models.Model{ + &operation.Operation{ ContentIndex: 0, Network: "carthagenet", Protocol: "PsCARTHAGazKbHtnKfLzQg3kms52kSRpgnDY982a9oYsSXRLQEb", Hash: "opQMNBmME834t76enxSBqhJcPqwV2R2BP2pTKv438bHaxRZen6x", Internal: false, - Status: "applied", + Status: consts.Applied, Timestamp: timestamp, Level: 386026, Kind: "transaction", @@ -322,14 +411,14 @@ func TestGroup_Parse(t *testing.T) { StorageStrings: []string{}, Tags: []string{}, }, - &models.Operation{ + &operation.Operation{ ContentIndex: 0, Network: "carthagenet", Protocol: "PsCARTHAGazKbHtnKfLzQg3kms52kSRpgnDY982a9oYsSXRLQEb", Hash: "opQMNBmME834t76enxSBqhJcPqwV2R2BP2pTKv438bHaxRZen6x", Internal: true, Nonce: setInt64(0), - Status: "applied", + Status: consts.Applied, Timestamp: timestamp, Level: 386026, Kind: "transaction", @@ -350,7 +439,7 @@ func TestGroup_Parse(t *testing.T) { StorageStrings: []string{}, Tags: []string{}, }, - &models.BigMapDiff{ + &bigmapdiff.BigMapDiff{ Ptr: 2416, BinPath: "0/0", Key: map[string]interface{}{"bytes": "000086b7990605548cb13db091c7a68a46a7aef3d0a2"}, @@ -364,14 +453,14 @@ func TestGroup_Parse(t *testing.T) { Timestamp: timestamp, Protocol: "PsCARTHAGazKbHtnKfLzQg3kms52kSRpgnDY982a9oYsSXRLQEb", }, - &models.Operation{ + &operation.Operation{ ContentIndex: 0, Network: "carthagenet", Protocol: "PsCARTHAGazKbHtnKfLzQg3kms52kSRpgnDY982a9oYsSXRLQEb", Hash: "opQMNBmME834t76enxSBqhJcPqwV2R2BP2pTKv438bHaxRZen6x", Internal: true, Nonce: setInt64(1), - Status: "applied", + Status: consts.Applied, Timestamp: timestamp, Level: 386026, Kind: "transaction", @@ -392,7 +481,7 @@ func TestGroup_Parse(t *testing.T) { StorageStrings: []string{}, Tags: []string{}, }, - &models.BigMapDiff{ + &bigmapdiff.BigMapDiff{ Ptr: 2417, BinPath: "0/0/0/1/0", Key: map[string]interface{}{"bytes": "000085ef0c18b31983603d978a152de4cd61803db881"}, @@ -406,7 +495,7 @@ func TestGroup_Parse(t *testing.T) { Timestamp: timestamp, Protocol: "PsCARTHAGazKbHtnKfLzQg3kms52kSRpgnDY982a9oYsSXRLQEb", }, - &models.BigMapAction{ + &bigmapaction.BigMapAction{ Action: "remove", SourcePtr: setInt64(2417), DestinationPtr: nil, @@ -415,7 +504,7 @@ func TestGroup_Parse(t *testing.T) { Network: "carthagenet", Timestamp: timestamp, }, - &models.BigMapAction{ + &bigmapaction.BigMapAction{ Action: "copy", SourcePtr: setInt64(2416), DestinationPtr: setInt64(2418), @@ -424,7 +513,7 @@ func TestGroup_Parse(t *testing.T) { Network: "carthagenet", Timestamp: timestamp, }, - &models.BigMapDiff{ + &bigmapdiff.BigMapDiff{ Ptr: 2418, BinPath: "0/0/0/1/0", Key: map[string]interface{}{"bytes": "000085ef0c18b31983603d978a152de4cd61803db881"}, @@ -438,7 +527,7 @@ func TestGroup_Parse(t *testing.T) { Timestamp: timestamp, Protocol: "PsCARTHAGazKbHtnKfLzQg3kms52kSRpgnDY982a9oYsSXRLQEb", }, - &models.BigMapDiff{ + &bigmapdiff.BigMapDiff{ Ptr: 2418, BinPath: "0/0/0/1/0", Key: map[string]interface{}{"bytes": "000086b7990605548cb13db091c7a68a46a7aef3d0a2"}, @@ -457,77 +546,23 @@ func TestGroup_Parse(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - metadata := &models.Metadata{ID: tt.address} - es. + schemaRepo. EXPECT(). - GetByID(gomock.AssignableToTypeOf(metadata)). + Get(gomock.Any()). DoAndReturn( - func(val *models.Metadata) error { - buf, err := readTestMetadataModel(val.GetID()) + func(address string) (modelSchema.Schema, error) { + val := modelSchema.Schema{ID: address} + buf, err := readTestMetadataModel(address) if err != nil { - return err + return val, err } val.Parameter = buf.Parameter val.Storage = buf.Storage - return nil + return val, nil }, ). AnyTimes() - es. - EXPECT(). - GetByID(gomock.AssignableToTypeOf(&models.Contract{})). - DoAndReturn(readTestContractModel). - AnyTimes() - - es. - EXPECT(). - GetBigMapDiffsByPtr( - gomock.Eq("KT1HBy1L43tiLe5MVJZ5RoxGy53Kx8kMgyoU"), - gomock.Eq("carthagenet"), - gomock.Eq(int64(2416))). - Return([]models.BigMapDiff{ - { - Ptr: 2416, - BinPath: "0/0/0/1/0", - Key: map[string]interface{}{"bytes": "000085ef0c18b31983603d978a152de4cd61803db881"}, - KeyHash: "exprtfKNhZ1G8vMscchFjt1G1qww2P93VTLHMuhyThVYygZLdnRev2", - KeyStrings: []string{"tz1XrCvviH8CqoHMSKpKuznLArEa1yR9U7ep"}, - Value: `{"prim":"Pair","args":[[],{"int":"6000"}]}`, - ValueStrings: []string{}, - Level: 386026, - Address: "KT1HBy1L43tiLe5MVJZ5RoxGy53Kx8kMgyoU", - Network: "carthagenet", - Timestamp: timestamp, - Protocol: "PsCARTHAGazKbHtnKfLzQg3kms52kSRpgnDY982a9oYsSXRLQEb", - }, - }, nil). - AnyTimes() - - es. - EXPECT(). - GetBigMapDiffsByPtr( - gomock.Eq("KT1Dc6A6jTY9sG4UvqKciqbJNAGtXqb4n7vZ"), - gomock.Eq("carthagenet"), - gomock.Eq(int64(2417))). - Return([]models.BigMapDiff{ - { - Ptr: 2417, - BinPath: "0/0/0/1/0", - Key: map[string]interface{}{"bytes": "000085ef0c18b31983603d978a152de4cd61803db881"}, - KeyHash: "exprtfKNhZ1G8vMscchFjt1G1qww2P93VTLHMuhyThVYygZLdnRev2", - KeyStrings: []string{"tz1XrCvviH8CqoHMSKpKuznLArEa1yR9U7ep"}, - Value: "", - ValueStrings: []string{}, - Level: 386026, - Address: "KT1Dc6A6jTY9sG4UvqKciqbJNAGtXqb4n7vZ", - Network: "carthagenet", - Timestamp: timestamp, - Protocol: "PsCARTHAGazKbHtnKfLzQg3kms52kSRpgnDY982a9oYsSXRLQEb", - }, - }, nil). - AnyTimes() - rpc. EXPECT(). GetScriptStorageJSON(tt.address, tt.level). diff --git a/internal/parsers/operations/origination.go b/internal/parsers/operations/origination.go index d46fb2c4b..59bc5c1bc 100644 --- a/internal/parsers/operations/origination.go +++ b/internal/parsers/operations/origination.go @@ -3,9 +3,9 @@ package operations import ( "time" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/tidwall/gjson" ) @@ -20,8 +20,8 @@ func NewOrigination(params *ParseParams) Origination { } // Parse - -func (p Origination) Parse(data gjson.Result) ([]elastic.Model, error) { - origination := models.Operation{ +func (p Origination) Parse(data gjson.Result) ([]models.Model, error) { + origination := operation.Operation{ ID: helpers.GenerateID(), Network: p.network, Hash: p.hash, @@ -60,7 +60,7 @@ func (p Origination) Parse(data gjson.Result) ([]elastic.Model, error) { origination.SetBurned(p.constants) - originationModels := []elastic.Model{&origination} + originationModels := []models.Model{&origination} for i := range operationMetadata.BalanceUpdates { originationModels = append(originationModels, operationMetadata.BalanceUpdates[i]) @@ -78,12 +78,12 @@ func (p Origination) Parse(data gjson.Result) ([]elastic.Model, error) { return originationModels, nil } -func (p Origination) appliedHandler(item gjson.Result, origination *models.Operation) ([]elastic.Model, error) { +func (p Origination) appliedHandler(item gjson.Result, origination *operation.Operation) ([]models.Model, error) { if !helpers.IsContract(origination.Destination) || !origination.IsApplied() { return nil, nil } - models := make([]elastic.Model, 0) + models := make([]models.Model, 0) contractModels, err := p.contractParser.Parse(*origination) if err != nil { @@ -111,7 +111,7 @@ func (p Origination) appliedHandler(item gjson.Result, origination *models.Opera return models, nil } -func (p Origination) fillInternal(tx *models.Operation) { +func (p Origination) fillInternal(tx *operation.Operation) { if p.main == nil { return } diff --git a/internal/parsers/operations/params.go b/internal/parsers/operations/params.go index c0b523607..701cb755c 100644 --- a/internal/parsers/operations/params.go +++ b/internal/parsers/operations/params.go @@ -4,9 +4,15 @@ import ( "sync" "github.com/baking-bad/bcdhub/internal/contractparser/kinds" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/block" + "github.com/baking-bad/bcdhub/internal/models/operation" + "github.com/baking-bad/bcdhub/internal/models/protocol" + "github.com/baking-bad/bcdhub/internal/models/schema" + "github.com/baking-bad/bcdhub/internal/models/tokenbalance" + "github.com/baking-bad/bcdhub/internal/models/tzip" "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/baking-bad/bcdhub/internal/parsers/contract" "github.com/baking-bad/bcdhub/internal/parsers/stacktrace" @@ -15,12 +21,16 @@ import ( // ParseParams - type ParseParams struct { + Storage models.GeneralRepository + BigMapDiffs bigmapdiff.Repository + Schema schema.Repository + TokenBalances tokenbalance.Repository + rpc noderpc.INode - es elastic.IElastic shareDir string interfaces map[string]kinds.ContractKind - constants models.Constants + constants protocol.Constants contractParser *contract.Parser transferParser *transfer.Parser @@ -35,7 +45,7 @@ type ParseParams struct { hash string head noderpc.Header contentIdx int64 - main *models.Operation + main *operation.Operation once *sync.Once } @@ -51,7 +61,7 @@ func WithIPFSGateways(ipfs []string) ParseParamsOption { } // WithConstants - -func WithConstants(constants models.Constants) ParseParamsOption { +func WithConstants(constants protocol.Constants) ParseParamsOption { return func(dp *ParseParams) { dp.constants = constants } @@ -100,26 +110,30 @@ func WithContentIndex(index int64) ParseParamsOption { } // WithMainOperation - -func WithMainOperation(main *models.Operation) ParseParamsOption { +func WithMainOperation(main *operation.Operation) ParseParamsOption { return func(dp *ParseParams) { dp.main = main } } // NewParseParams - -func NewParseParams(rpc noderpc.INode, es elastic.IElastic, opts ...ParseParamsOption) *ParseParams { +func NewParseParams(rpc noderpc.INode, storage models.GeneralRepository, bmdRepo bigmapdiff.Repository, blockRepo block.Repository, tzipRepo tzip.Repository, schemaRepo schema.Repository, tbRepo tokenbalance.Repository, opts ...ParseParamsOption) *ParseParams { params := &ParseParams{ - es: es, - rpc: rpc, - once: &sync.Once{}, - stackTrace: stacktrace.New(), + Storage: storage, + BigMapDiffs: bmdRepo, + Schema: schemaRepo, + TokenBalances: tbRepo, + rpc: rpc, + once: &sync.Once{}, + stackTrace: stacktrace.New(), } for i := range opts { opts[i](params) } transferParser, err := transfer.NewParser( - params.rpc, params.es, + params.rpc, + tzipRepo, blockRepo, schemaRepo, storage, transfer.WithStackTrace(params.stackTrace), transfer.WithNetwork(params.network), transfer.WithChainID(params.head.ChainID), @@ -134,7 +148,7 @@ func NewParseParams(rpc noderpc.INode, es elastic.IElastic, opts ...ParseParamsO params.interfaces, contract.WithShareDirContractParser(params.shareDir), ) - storageParser, err := NewRichStorage(es, rpc, params.head.Protocol) + storageParser, err := NewRichStorage(bmdRepo, rpc, params.head.Protocol) if err != nil { logger.Error(err) } diff --git a/internal/parsers/operations/parser.go b/internal/parsers/operations/parser.go index 3337c487d..e3bdb2b85 100644 --- a/internal/parsers/operations/parser.go +++ b/internal/parsers/operations/parser.go @@ -1,11 +1,11 @@ package operations import ( - "github.com/baking-bad/bcdhub/internal/elastic" + "github.com/baking-bad/bcdhub/internal/models" "github.com/tidwall/gjson" ) // Parser - type Parser interface { - Parse(data gjson.Result) ([]elastic.Model, error) + Parse(data gjson.Result) ([]models.Model, error) } diff --git a/internal/parsers/operations/result.go b/internal/parsers/operations/result.go index d39b2cf3b..784197832 100644 --- a/internal/parsers/operations/result.go +++ b/internal/parsers/operations/result.go @@ -5,7 +5,7 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/cerrors" "github.com/baking-bad/bcdhub/internal/contractparser/consts" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/tidwall/gjson" ) @@ -20,11 +20,11 @@ func NewResult(root string) Result { } // Parse - -func (r Result) Parse(data gjson.Result) models.OperationResult { +func (r Result) Parse(data gjson.Result) operation.Result { if r.root != "" { r.root = fmt.Sprintf("%s.", r.root) } - result := models.OperationResult{ + result := operation.Result{ Status: data.Get(r.root + "status").String(), ConsumedGas: data.Get(r.root + "consumed_gas").Int(), StorageSize: data.Get(r.root + "storage_size").Int(), diff --git a/internal/parsers/operations/result_test.go b/internal/parsers/operations/result_test.go index 76accdb1f..a732bbf34 100644 --- a/internal/parsers/operations/result_test.go +++ b/internal/parsers/operations/result_test.go @@ -4,7 +4,7 @@ import ( "reflect" "testing" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/operation" ) func TestResult_Parse(t *testing.T) { @@ -12,13 +12,13 @@ func TestResult_Parse(t *testing.T) { name string root string fileName string - want models.OperationResult + want operation.Result }{ { name: "test 1", root: "", fileName: "./data/result/test1.json", - want: models.OperationResult{ + want: operation.Result{ Status: "applied", ConsumedGas: 10207, }, @@ -26,7 +26,7 @@ func TestResult_Parse(t *testing.T) { name: "test 2", root: "operation_result", fileName: "./data/result/test2.json", - want: models.OperationResult{ + want: operation.Result{ Status: "applied", ConsumedGas: 10207, }, diff --git a/internal/parsers/operations/rich_storage.go b/internal/parsers/operations/rich_storage.go index 40dfa1e6a..6f319a1f8 100644 --- a/internal/parsers/operations/rich_storage.go +++ b/internal/parsers/operations/rich_storage.go @@ -5,8 +5,8 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/consts" "github.com/baking-bad/bcdhub/internal/contractparser/meta" "github.com/baking-bad/bcdhub/internal/contractparser/storage" - "github.com/baking-bad/bcdhub/internal/elastic" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/pkg/errors" "github.com/tidwall/gjson" @@ -14,27 +14,27 @@ import ( // RichStorage - type RichStorage struct { - es elastic.IElastic - rpc noderpc.INode + repo bigmapdiff.Repository + rpc noderpc.INode parser storage.Parser } // NewRichStorage - -func NewRichStorage(es elastic.IElastic, rpc noderpc.INode, protocol string) (*RichStorage, error) { - storageParser, err := contractparser.MakeStorageParser(rpc, es, protocol, false) +func NewRichStorage(repo bigmapdiff.Repository, rpc noderpc.INode, protocol string) (*RichStorage, error) { + storageParser, err := contractparser.MakeStorageParser(rpc, repo, protocol, false) if err != nil { return nil, err } return &RichStorage{ - es: es, + repo: repo, rpc: rpc, parser: storageParser, }, nil } // Parse - -func (p *RichStorage) Parse(data gjson.Result, metadata *meta.ContractMetadata, operation *models.Operation) (storage.RichStorage, error) { +func (p *RichStorage) Parse(data gjson.Result, metadata *meta.ContractMetadata, operation *operation.Operation) (storage.RichStorage, error) { protoSymLink, err := meta.GetProtoSymLink(operation.Protocol) if err != nil { return storage.RichStorage{Empty: true}, err diff --git a/internal/parsers/operations/rich_storage_test.go b/internal/parsers/operations/rich_storage_test.go index dc7f50966..0c9c36bdf 100644 --- a/internal/parsers/operations/rich_storage_test.go +++ b/internal/parsers/operations/rich_storage_test.go @@ -5,9 +5,11 @@ import ( "time" "github.com/baking-bad/bcdhub/internal/contractparser/storage" - "github.com/baking-bad/bcdhub/internal/elastic" - mock_elastic "github.com/baking-bad/bcdhub/internal/elastic/mock" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapaction" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + mock_bmd "github.com/baking-bad/bcdhub/internal/models/mock/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/golang/mock/gomock" "github.com/stretchr/testify/assert" @@ -16,9 +18,9 @@ import ( func TestRichStorage_Parse(t *testing.T) { timestamp := time.Now() - ctrlES := gomock.NewController(t) - defer ctrlES.Finish() - es := mock_elastic.NewMockIElastic(ctrlES) + ctrlBmdRepo := gomock.NewController(t) + defer ctrlBmdRepo.Finish() + bmdRepo := mock_bmd.NewMockRepository(ctrlBmdRepo) ctrlRPC := gomock.NewController(t) defer ctrlRPC.Finish() @@ -26,7 +28,7 @@ func TestRichStorage_Parse(t *testing.T) { tests := []struct { name string - operation *models.Operation + operation *operation.Operation filename string sourcePtr int64 want storage.RichStorage @@ -34,7 +36,7 @@ func TestRichStorage_Parse(t *testing.T) { }{ { name: "test 1", - operation: &models.Operation{ + operation: &operation.Operation{ ID: "operation_id", Level: 1151463, Destination: "KT1PWx2mnDueood7fEmfbBDKx1D9BAnnXitn", @@ -45,8 +47,8 @@ func TestRichStorage_Parse(t *testing.T) { }, filename: "./data/rich_storage/test1.json", want: storage.RichStorage{ - Models: []elastic.Model{ - &models.BigMapDiff{ + Models: []models.Model{ + &bigmapdiff.BigMapDiff{ Ptr: 31, KeyHash: "exprunzteC5uyXRHbKnqJd3hUMGTWE9Gv5EtovDZHnuqu6SaGViV3N", Key: map[string]interface{}{ @@ -60,7 +62,7 @@ func TestRichStorage_Parse(t *testing.T) { Network: "mainnet", Timestamp: timestamp, Protocol: "PsCARTHAGazKbHtnKfLzQg3kms52kSRpgnDY982a9oYsSXRLQEb", - }, &models.BigMapDiff{ + }, &bigmapdiff.BigMapDiff{ Ptr: 31, KeyHash: "exprtzVE8dHF7nePZxF6PSRf3yhfecTEKavyCZpndJGN2hz6PzQkFi", Key: map[string]interface{}{ @@ -74,7 +76,7 @@ func TestRichStorage_Parse(t *testing.T) { Network: "mainnet", Timestamp: timestamp, Protocol: "PsCARTHAGazKbHtnKfLzQg3kms52kSRpgnDY982a9oYsSXRLQEb", - }, &models.BigMapDiff{ + }, &bigmapdiff.BigMapDiff{ Ptr: 31, KeyHash: "expruyvqmgBYpF54i1c4p6r3oVV7FmW7ZH8EyjSjahKoQEfWPmcjGg", Key: map[string]interface{}{ @@ -93,7 +95,7 @@ func TestRichStorage_Parse(t *testing.T) { }, }, { name: "test 2", - operation: &models.Operation{ + operation: &operation.Operation{ ID: "operation_id", Level: 359942, Destination: "KT1Xk1XJD2M8GYFUXRN12oMvDAysECDWwGdS", @@ -105,8 +107,8 @@ func TestRichStorage_Parse(t *testing.T) { sourcePtr: 1055, filename: "./data/rich_storage/test2.json", want: storage.RichStorage{ - Models: []elastic.Model{ - &models.BigMapAction{ + Models: []models.Model{ + &bigmapaction.BigMapAction{ Action: "copy", SourcePtr: setInt64(1055), DestinationPtr: setInt64(1509), @@ -135,10 +137,10 @@ func TestRichStorage_Parse(t *testing.T) { DoAndReturn(readStorage). AnyTimes() - es. + bmdRepo. EXPECT(). - GetBigMapDiffsByPtr(tt.operation.Destination, tt.operation.Network, tt.sourcePtr). - Return([]models.BigMapDiff{}, nil). + GetByPtr(tt.operation.Destination, tt.operation.Network, tt.sourcePtr). + Return([]bigmapdiff.BigMapDiff{}, nil). AnyTimes() metadata, err := readTestMetadata(tt.operation.Destination) @@ -152,7 +154,7 @@ func TestRichStorage_Parse(t *testing.T) { return } - parser, err := NewRichStorage(es, rpc, tt.operation.Protocol) + parser, err := NewRichStorage(bmdRepo, rpc, tt.operation.Protocol) if err != nil { t.Errorf(`NewRichStorage = error %v`, err) return diff --git a/internal/parsers/operations/test_common.go b/internal/parsers/operations/test_common.go index 9bf8d7bb9..435eb4629 100644 --- a/internal/parsers/operations/test_common.go +++ b/internal/parsers/operations/test_common.go @@ -9,9 +9,15 @@ import ( "testing" "github.com/baking-bad/bcdhub/internal/contractparser/meta" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/balanceupdate" + "github.com/baking-bad/bcdhub/internal/models/bigmapaction" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/contract" + "github.com/baking-bad/bcdhub/internal/models/operation" + "github.com/baking-bad/bcdhub/internal/models/schema" + "github.com/baking-bad/bcdhub/internal/models/transfer" "github.com/stretchr/testify/assert" "github.com/tidwall/gjson" ) @@ -34,17 +40,17 @@ func readTestMetadata(address string) (*meta.ContractMetadata, error) { return &metadata, err } -func readTestMetadataModel(address string) (*models.Metadata, error) { +func readTestMetadataModel(address string) (*schema.Schema, error) { bytes, err := ioutil.ReadFile(fmt.Sprintf("./data/models/metadata/%s.json", address)) if err != nil { return nil, err } - var metadata models.Metadata + var metadata schema.Schema err = json.Unmarshal(bytes, &metadata) return &metadata, err } -func readTestContractModel(contract *models.Contract) error { +func readTestContractModel(contract *contract.Contract) error { bytes, err := ioutil.ReadFile(fmt.Sprintf("./data/models/contract/%s.json", contract.Address)) if err != nil { return err @@ -57,15 +63,15 @@ func readStorage(address string, level int64) (gjson.Result, error) { return readJSONFile(storageFile) } -func compareParserResponse(t *testing.T, got, want []elastic.Model) bool { +func compareParserResponse(t *testing.T, got, want []models.Model) bool { if len(got) != len(want) { log.Printf("len(got) != len(want): %d != %d", len(got), len(want)) return false } for i := range got { switch one := got[i].(type) { - case *models.Transfer: - two, ok := want[i].(*models.Transfer) + case *transfer.Transfer: + two, ok := want[i].(*transfer.Transfer) if !ok { log.Printf("Differrrent types: %T != %T", one, two) return false @@ -73,8 +79,8 @@ func compareParserResponse(t *testing.T, got, want []elastic.Model) bool { if !compareTransfers(one, two) { return false } - case *models.Operation: - two, ok := want[i].(*models.Operation) + case *operation.Operation: + two, ok := want[i].(*operation.Operation) if !ok { log.Printf("Differrrent types: %T != %T", one, two) return false @@ -82,8 +88,8 @@ func compareParserResponse(t *testing.T, got, want []elastic.Model) bool { if !compareOperations(t, one, two) { return false } - case *models.BigMapDiff: - two, ok := want[i].(*models.BigMapDiff) + case *bigmapdiff.BigMapDiff: + two, ok := want[i].(*bigmapdiff.BigMapDiff) if !ok { log.Printf("Differrrent types: %T != %T", one, two) return false @@ -91,32 +97,32 @@ func compareParserResponse(t *testing.T, got, want []elastic.Model) bool { if !compareBigMapDiff(t, one, two) { return false } - case *models.BigMapAction: - two, ok := want[i].(*models.BigMapAction) + case *bigmapaction.BigMapAction: + two, ok := want[i].(*bigmapaction.BigMapAction) if !ok { return false } if !compareBigMapAction(one, two) { return false } - case *models.Contract: - two, ok := want[i].(*models.Contract) + case *contract.Contract: + two, ok := want[i].(*contract.Contract) if !ok { return false } if !compareContract(one, two) { return false } - case *models.Metadata: - two, ok := want[i].(*models.Metadata) + case *schema.Schema: + two, ok := want[i].(*schema.Schema) if !ok { return false } if !compareMetadata(t, one, two) { return false } - case *models.BalanceUpdate: - two, ok := want[i].(*models.BalanceUpdate) + case *balanceupdate.BalanceUpdate: + two, ok := want[i].(*balanceupdate.BalanceUpdate) if !ok { return false } @@ -132,7 +138,7 @@ func compareParserResponse(t *testing.T, got, want []elastic.Model) bool { return true } -func compareTransfers(one, two *models.Transfer) bool { +func compareTransfers(one, two *transfer.Transfer) bool { if one.Network != two.Network { log.Printf("Network: %s != %s", one.Network, two.Network) return false @@ -204,7 +210,7 @@ func compareTransfers(one, two *models.Transfer) bool { return true } -func compareOperations(t *testing.T, one, two *models.Operation) bool { +func compareOperations(t *testing.T, one, two *operation.Operation) bool { if one.Internal != two.Internal { log.Printf("Internal: %v != %v", one.Internal, two.Internal) return false @@ -328,7 +334,7 @@ func compareOperations(t *testing.T, one, two *models.Operation) bool { return true } -func compareBigMapDiff(t *testing.T, one, two *models.BigMapDiff) bool { +func compareBigMapDiff(t *testing.T, one, two *bigmapdiff.BigMapDiff) bool { if one.Address != two.Address { log.Printf("BigMapDiff.Address: %s != %s", one.Address, two.Address) return false @@ -368,7 +374,7 @@ func compareBigMapDiff(t *testing.T, one, two *models.BigMapDiff) bool { return true } -func compareBigMapAction(one, two *models.BigMapAction) bool { +func compareBigMapAction(one, two *bigmapaction.BigMapAction) bool { if one.Action != two.Action { log.Printf("Action: %s != %s", one.Action, two.Action) return false @@ -400,7 +406,7 @@ func compareBigMapAction(one, two *models.BigMapAction) bool { return true } -func compareContract(one, two *models.Contract) bool { +func compareContract(one, two *contract.Contract) bool { if one.Network != two.Network { log.Printf("Contract.Network: %s != %s", one.Network, two.Network) return false @@ -452,7 +458,7 @@ func compareContract(one, two *models.Contract) bool { return true } -func compareBalanceUpdates(a, b *models.BalanceUpdate) bool { +func compareBalanceUpdates(a, b *balanceupdate.BalanceUpdate) bool { if a.Change != b.Change { log.Printf("BalanceUpdate.Change: %d != %d", a.Change, b.Change) return false @@ -484,7 +490,7 @@ func compareBalanceUpdates(a, b *models.BalanceUpdate) bool { return true } -func compareMetadata(t *testing.T, one, two *models.Metadata) bool { +func compareMetadata(t *testing.T, one, two *schema.Schema) bool { if one.ID != two.ID { log.Printf("Metadata.ID: %s != %s", one.ID, two.ID) return false diff --git a/internal/parsers/operations/transaction.go b/internal/parsers/operations/transaction.go index 2e465899e..f07305c82 100644 --- a/internal/parsers/operations/transaction.go +++ b/internal/parsers/operations/transaction.go @@ -6,11 +6,13 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/consts" "github.com/baking-bad/bcdhub/internal/contractparser/meta" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/events" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" + "github.com/baking-bad/bcdhub/internal/models/operation" + transferParsers "github.com/baking-bad/bcdhub/internal/parsers/transfer" "github.com/pkg/errors" "github.com/tidwall/gjson" ) @@ -26,8 +28,8 @@ func NewTransaction(params *ParseParams) Transaction { } // Parse - -func (p Transaction) Parse(data gjson.Result) ([]elastic.Model, error) { - tx := models.Operation{ +func (p Transaction) Parse(data gjson.Result) ([]models.Model, error) { + tx := operation.Operation{ ID: helpers.GenerateID(), Network: p.network, Hash: p.hash, @@ -64,7 +66,7 @@ func (p Transaction) Parse(data gjson.Result) ([]elastic.Model, error) { tx.Errors = tx.Result.Errors tx.SetBurned(p.constants) - txModels := []elastic.Model{&tx} + txModels := []models.Model{&tx} if tx.IsApplied() { for i := range txMetadata.BalanceUpdates { @@ -95,14 +97,14 @@ func (p Transaction) Parse(data gjson.Result) ([]elastic.Model, error) { txModels = append(txModels, transfers[i]) } - if err := elastic.CreateTokenBalanceUpdates(p.es, transfers); err != nil { + if err := transferParsers.UpdateTokenBalances(p.TokenBalances, transfers); err != nil { return nil, err } return txModels, nil } -func (p Transaction) fillInternal(tx *models.Operation) { +func (p Transaction) fillInternal(tx *operation.Operation) { if p.main == nil { p.main = tx return @@ -116,12 +118,12 @@ func (p Transaction) fillInternal(tx *models.Operation) { tx.Initiator = p.main.Source } -func (p Transaction) appliedHandler(item gjson.Result, op *models.Operation) ([]elastic.Model, error) { +func (p Transaction) appliedHandler(item gjson.Result, op *operation.Operation) ([]models.Model, error) { if !helpers.IsContract(op.Destination) || !op.IsApplied() { return nil, nil } - metadata, err := meta.GetContractMetadata(p.es, op.Destination) + metadata, err := meta.GetContractMetadata(p.Schema, op.Destination) if err != nil { if strings.Contains(err.Error(), "404 Not Found") { return nil, nil @@ -129,7 +131,7 @@ func (p Transaction) appliedHandler(item gjson.Result, op *models.Operation) ([] return nil, err } - resultModels := make([]elastic.Model, 0) + resultModels := make([]models.Model, 0) rs, err := p.storageParser.Parse(item, metadata, op) if err != nil { @@ -154,7 +156,7 @@ func (p Transaction) appliedHandler(item gjson.Result, op *models.Operation) ([] return resultModels, p.getEntrypoint(item, metadata, op) } -func (p Transaction) getEntrypoint(item gjson.Result, metadata *meta.ContractMetadata, op *models.Operation) error { +func (p Transaction) getEntrypoint(item gjson.Result, metadata *meta.ContractMetadata, op *operation.Operation) error { m, err := metadata.Get(consts.PARAMETER, op.Protocol) if err != nil { return err @@ -174,14 +176,14 @@ func (p Transaction) getEntrypoint(item gjson.Result, metadata *meta.ContractMet return nil } -func (p Transaction) tagTransaction(tx *models.Operation) error { +func (p Transaction) tagTransaction(tx *operation.Operation) error { if !helpers.IsContract(tx.Destination) { return nil } - contract := models.NewEmptyContract(tx.Network, tx.Destination) - if err := p.es.GetByID(&contract); err != nil { - if elastic.IsRecordNotFound(err) { + contract := contract.NewEmptyContract(tx.Network, tx.Destination) + if err := p.Storage.GetByID(&contract); err != nil { + if p.Storage.IsRecordNotFound(err) { return nil } return err diff --git a/internal/parsers/stacktrace/stacktrace.go b/internal/parsers/stacktrace/stacktrace.go index 06387da88..c2bcfcc94 100644 --- a/internal/parsers/stacktrace/stacktrace.go +++ b/internal/parsers/stacktrace/stacktrace.go @@ -6,7 +6,7 @@ import ( "strings" "github.com/baking-bad/bcdhub/internal/logger" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/operation" ) // Item - @@ -22,7 +22,7 @@ type Item struct { } // NewItem - -func NewItem(operation models.Operation, parentID int64) *Item { +func NewItem(operation operation.Operation, parentID int64) *Item { return &Item{ ParentID: parentID, Entrypoint: operation.Entrypoint, @@ -54,7 +54,7 @@ func (sti *Item) AddChild(child *Item) { } // IsNext - -func (sti *Item) IsNext(operation models.Operation) bool { +func (sti *Item) IsNext(operation operation.Operation) bool { if !sti.gtNonce(operation.Nonce) { return false } @@ -94,7 +94,7 @@ func New() *StackTrace { } // Get - -func (st *StackTrace) Get(operation models.Operation) *Item { +func (st *StackTrace) Get(operation operation.Operation) *Item { id := computeID(operation.ContentIndex, operation.Nonce) result, ok := st.tree[id] if !ok { @@ -113,7 +113,7 @@ func (st *StackTrace) GetByID(id int64) *Item { } // Add - -func (st *StackTrace) Add(operation models.Operation) { +func (st *StackTrace) Add(operation operation.Operation) { var parent *Item for i := len(st.order) - 1; i >= 0; i-- { if st.order[i].IsNext(operation) { diff --git a/internal/parsers/transfer/balance_parser.go b/internal/parsers/transfer/balance_parser.go index 45815c0b5..f758ea42a 100644 --- a/internal/parsers/transfer/balance_parser.go +++ b/internal/parsers/transfer/balance_parser.go @@ -2,12 +2,13 @@ package transfer import ( "github.com/baking-bad/bcdhub/internal/events" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/operation" + "github.com/baking-bad/bcdhub/internal/models/transfer" ) // BalanceParser - type BalanceParser interface { - Parse(balances []events.TokenBalance) ([]*models.Transfer, error) + Parse(balances []events.TokenBalance) ([]*transfer.Transfer, error) } // DefaultBalanceParser - @@ -19,10 +20,10 @@ func NewDefaultBalanceParser() *DefaultBalanceParser { } // Parse - -func (parser *DefaultBalanceParser) Parse(balances []events.TokenBalance, operation models.Operation) ([]*models.Transfer, error) { - transfers := make([]*models.Transfer, 0) +func (parser *DefaultBalanceParser) Parse(balances []events.TokenBalance, operation operation.Operation) ([]*transfer.Transfer, error) { + transfers := make([]*transfer.Transfer, 0) for _, balance := range balances { - transfer := models.EmptyTransfer(operation) + transfer := transfer.EmptyTransfer(operation) if balance.Value > 0 { transfer.To = balance.Address } else { diff --git a/internal/parsers/transfer/token_balance.go b/internal/parsers/transfer/token_balance.go new file mode 100644 index 000000000..a553c1911 --- /dev/null +++ b/internal/parsers/transfer/token_balance.go @@ -0,0 +1,36 @@ +package transfer + +import ( + "github.com/baking-bad/bcdhub/internal/models/tokenbalance" + "github.com/baking-bad/bcdhub/internal/models/transfer" +) + +// UpdateTokenBalances - +func UpdateTokenBalances(repo tokenbalance.Repository, transfers []*transfer.Transfer) error { + exists := make(map[string]*tokenbalance.TokenBalance) + updates := make([]*tokenbalance.TokenBalance, 0) + for i := range transfers { + idFrom := transfers[i].GetFromTokenBalanceID() + if idFrom != "" { + if update, ok := exists[idFrom]; ok { + update.Balance -= int64(transfers[i].Amount) + } else { + upd := transfers[i].MakeTokenBalanceUpdate(true, false) + updates = append(updates, upd) + exists[idFrom] = upd + } + } + idTo := transfers[i].GetToTokenBalanceID() + if idTo != "" { + if update, ok := exists[idTo]; ok { + update.Balance += int64(transfers[i].Amount) + } else { + upd := transfers[i].MakeTokenBalanceUpdate(false, false) + updates = append(updates, upd) + exists[idTo] = upd + } + } + } + + return repo.Update(updates) +} diff --git a/internal/parsers/transfer/token_view.go b/internal/parsers/transfer/token_view.go index 371c1196c..fa36a2c12 100644 --- a/internal/parsers/transfer/token_view.go +++ b/internal/parsers/transfer/token_view.go @@ -1,9 +1,9 @@ package transfer import ( - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/events" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/baking-bad/bcdhub/internal/models/tzip" ) @@ -19,11 +19,11 @@ type ImplementationKey struct { type TokenEvents map[ImplementationKey]tzip.EventImplementation // NewTokenEvents - -func NewTokenEvents(tzipStorage elastic.ITZIP) (TokenEvents, error) { +func NewTokenEvents(repo tzip.Repository, storage models.GeneralRepository) (TokenEvents, error) { views := make(TokenEvents) - tokens, err := tzipStorage.GetTZIPWithEvents() + tokens, err := repo.GetWithEvents() if err != nil { - if elastic.IsRecordNotFound(err) { + if storage.IsRecordNotFound(err) { return views, nil } return nil, err @@ -62,11 +62,11 @@ func NewTokenEvents(tzipStorage elastic.ITZIP) (TokenEvents, error) { } // NewInitialStorageEvents - -func NewInitialStorageEvents(es elastic.IElastic) (TokenEvents, error) { +func NewInitialStorageEvents(repo tzip.Repository, storage models.GeneralRepository) (TokenEvents, error) { views := make(TokenEvents) - tokens, err := es.GetTZIPWithEvents() + tokens, err := repo.GetWithEvents() if err != nil { - if elastic.IsRecordNotFound(err) { + if storage.IsRecordNotFound(err) { return views, nil } return nil, err @@ -94,7 +94,7 @@ func NewInitialStorageEvents(es elastic.IElastic) (TokenEvents, error) { } // GetByOperation - -func (tokenEvents TokenEvents) GetByOperation(operation models.Operation) (tzip.EventImplementation, string, bool) { +func (tokenEvents TokenEvents) GetByOperation(operation operation.Operation) (tzip.EventImplementation, string, bool) { if event, ok := tokenEvents[ImplementationKey{ Address: operation.Destination, Network: operation.Network, diff --git a/internal/parsers/transfer/transfer.go b/internal/parsers/transfer/transfer.go index ffe917bbb..ab8f57078 100644 --- a/internal/parsers/transfer/transfer.go +++ b/internal/parsers/transfer/transfer.go @@ -2,9 +2,13 @@ package transfer import ( "github.com/baking-bad/bcdhub/internal/contractparser/consts" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/events" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/block" + "github.com/baking-bad/bcdhub/internal/models/operation" + "github.com/baking-bad/bcdhub/internal/models/schema" + "github.com/baking-bad/bcdhub/internal/models/transfer" "github.com/baking-bad/bcdhub/internal/models/tzip" "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/baking-bad/bcdhub/internal/parsers/stacktrace" @@ -13,8 +17,9 @@ import ( // Parser - type Parser struct { + Schema schema.Repository + rpc noderpc.INode - es elastic.IElastic events TokenEvents stackTrace *stacktrace.StackTrace @@ -26,10 +31,10 @@ type Parser struct { } // NewParser - -func NewParser(rpc noderpc.INode, es elastic.IElastic, opts ...ParserOption) (*Parser, error) { +func NewParser(rpc noderpc.INode, tzipRepo tzip.Repository, blocks block.Repository, schemaRepo schema.Repository, storage models.GeneralRepository, opts ...ParserOption) (*Parser, error) { tp := &Parser{ - rpc: rpc, - es: es, + rpc: rpc, + Schema: schemaRepo, } for i := range opts { @@ -41,7 +46,7 @@ func NewParser(rpc noderpc.INode, es elastic.IElastic, opts ...ParserOption) (*P } if !tp.withoutViews { - tokenEvents, err := NewTokenEvents(es) + tokenEvents, err := NewTokenEvents(tzipRepo, storage) if err != nil { return nil, err } @@ -51,7 +56,7 @@ func NewParser(rpc noderpc.INode, es elastic.IElastic, opts ...ParserOption) (*P } if tp.network != "" && tp.chainID == "" { - state, err := es.GetLastBlock(tp.network) + state, err := blocks.Last(tp.network) if err != nil { return nil, err } @@ -61,7 +66,7 @@ func NewParser(rpc noderpc.INode, es elastic.IElastic, opts ...ParserOption) (*P } // Parse - -func (p *Parser) Parse(operation models.Operation, operationModels []elastic.Model) ([]*models.Transfer, error) { +func (p *Parser) Parse(operation operation.Operation, operationModels []models.Model) ([]*transfer.Transfer, error) { if impl, name, ok := p.events.GetByOperation(operation); ok { return p.executeEvents(impl, name, operation, operationModels) } else if operation.Entrypoint == consts.TransferEntrypoint { @@ -78,7 +83,7 @@ func (p *Parser) Parse(operation models.Operation, operationModels []elastic.Mod return nil, nil } -func (p *Parser) executeEvents(impl tzip.EventImplementation, name string, operation models.Operation, operationModels []elastic.Model) ([]*models.Transfer, error) { +func (p *Parser) executeEvents(impl tzip.EventImplementation, name string, operation operation.Operation, operationModels []models.Model) ([]*transfer.Transfer, error) { if operation.Kind != consts.Transaction { return nil, nil } @@ -103,13 +108,13 @@ func (p *Parser) executeEvents(impl tzip.EventImplementation, name string, opera case impl.MichelsonExtendedStorageEvent.Is(operation.Entrypoint): ctx.Parameters = operation.DeffatedStorage ctx.Entrypoint = consts.DefaultEntrypoint - bmd := make([]models.BigMapDiff, 0) + bmd := make([]bigmapdiff.BigMapDiff, 0) for i := range operationModels { - if model, ok := operationModels[i].(*models.BigMapDiff); ok && model.OperationID == operation.ID { + if model, ok := operationModels[i].(*bigmapdiff.BigMapDiff); ok && model.OperationID == operation.ID { bmd = append(bmd, *model) } } - event, err = events.NewMichelsonExtendedStorage(impl, name, operation.Protocol, operation.GetID(), operation.Destination, p.es, bmd) + event, err = events.NewMichelsonExtendedStorage(impl, name, operation.Protocol, operation.GetID(), operation.Destination, p.Schema, bmd) default: return nil, nil } @@ -135,8 +140,8 @@ func (p *Parser) executeEvents(impl tzip.EventImplementation, name string, opera return transfers, err } -func (p *Parser) makeFA12Transfers(operation models.Operation, parameters gjson.Result) ([]*models.Transfer, error) { - transfer := models.EmptyTransfer(operation) +func (p *Parser) makeFA12Transfers(operation operation.Operation, parameters gjson.Result) ([]*transfer.Transfer, error) { + t := transfer.EmptyTransfer(operation) fromAddr, err := getAddress(parameters.Get("args.0")) if err != nil { return nil, err @@ -145,17 +150,17 @@ func (p *Parser) makeFA12Transfers(operation models.Operation, parameters gjson. if err != nil { return nil, err } - transfer.From = fromAddr - transfer.To = toAddr - transfer.Amount = parameters.Get("args.1.args.1.int").Float() + t.From = fromAddr + t.To = toAddr + t.Amount = parameters.Get("args.1.args.1.int").Float() - p.setParentEntrypoint(operation, transfer) + p.setParentEntrypoint(operation, t) - return []*models.Transfer{transfer}, nil + return []*transfer.Transfer{t}, nil } -func (p *Parser) makeFA2Transfers(operation models.Operation, parameters gjson.Result) ([]*models.Transfer, error) { - transfers := make([]*models.Transfer, 0) +func (p *Parser) makeFA2Transfers(operation operation.Operation, parameters gjson.Result) ([]*transfer.Transfer, error) { + transfers := make([]*transfer.Transfer, 0) for _, from := range parameters.Array() { fromAddr, err := getAddress(from.Get("args.0")) if err != nil { @@ -166,7 +171,7 @@ func (p *Parser) makeFA2Transfers(operation models.Operation, parameters gjson.R if err != nil { return nil, err } - transfer := models.EmptyTransfer(operation) + transfer := transfer.EmptyTransfer(operation) transfer.From = fromAddr transfer.To = toAddr transfer.Amount = to.Get("args.1.args.1.int").Float() @@ -180,7 +185,7 @@ func (p *Parser) makeFA2Transfers(operation models.Operation, parameters gjson.R return transfers, nil } -func (p Parser) setParentEntrypoint(operation models.Operation, transfer *models.Transfer) { +func (p Parser) setParentEntrypoint(operation operation.Operation, transfer *transfer.Transfer) { if p.stackTrace.Empty() { return } diff --git a/internal/parsers/tzip/parser.go b/internal/parsers/tzip/parser.go index 89d843a80..90730cd53 100644 --- a/internal/parsers/tzip/parser.go +++ b/internal/parsers/tzip/parser.go @@ -3,8 +3,11 @@ package tzip import ( "strings" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/block" + "github.com/baking-bad/bcdhub/internal/models/schema" + "github.com/baking-bad/bcdhub/internal/models/tzip" "github.com/baking-bad/bcdhub/internal/noderpc" tzipStorage "github.com/baking-bad/bcdhub/internal/parsers/tzip/storage" "github.com/pkg/errors" @@ -17,30 +20,36 @@ const ( // ParseContext - type ParseContext struct { - BigMapDiff models.BigMapDiff + BigMapDiff bigmapdiff.BigMapDiff Hash string } // Parser - type Parser struct { - es elastic.IElastic - rpc noderpc.INode + bigMapRepo bigmapdiff.Repository + blockRepo block.Repository + schemaRepo schema.Repository + storage models.GeneralRepository + rpc noderpc.INode cfg ParserConfig } // NewParser - -func NewParser(es elastic.IElastic, rpc noderpc.INode, cfg ParserConfig) Parser { +func NewParser(bigMapRepo bigmapdiff.Repository, blockRepo block.Repository, schemaRepo schema.Repository, storage models.GeneralRepository, rpc noderpc.INode, cfg ParserConfig) Parser { return Parser{ - es: es, - rpc: rpc, + bigMapRepo: bigMapRepo, + blockRepo: blockRepo, + schemaRepo: schemaRepo, + storage: storage, + rpc: rpc, cfg: cfg, } } // Parse - -func (p *Parser) Parse(ctx ParseContext) (*models.TZIP, error) { +func (p *Parser) Parse(ctx ParseContext) (*tzip.TZIP, error) { decoded := tzipStorage.DecodeValue(ctx.BigMapDiff.Value) if decoded == "" { return nil, nil @@ -49,7 +58,7 @@ func (p *Parser) Parse(ctx ParseContext) (*models.TZIP, error) { return p.getFromStorage(ctx, decoded) } -func (p Parser) getFromStorage(ctx ParseContext, url string) (*models.TZIP, error) { +func (p Parser) getFromStorage(ctx ParseContext, url string) (*tzip.TZIP, error) { var store tzipStorage.Storage switch { case strings.HasPrefix(url, tzipStorage.PrefixHTTPS), strings.HasPrefix(url, tzipStorage.PrefixHTTP): @@ -67,8 +76,7 @@ func (p Parser) getFromStorage(ctx ParseContext, url string) (*models.TZIP, erro tzipStorage.WithHashSha256(ctx.Hash), ) case strings.HasPrefix(url, tzipStorage.PrefixTezosStorage): - store = tzipStorage.NewTezosStorage(p.es, p.rpc, ctx.BigMapDiff.Address, ctx.BigMapDiff.Network, ctx.BigMapDiff.Ptr) - default: + store = tzipStorage.NewTezosStorage(p.bigMapRepo, p.blockRepo, p.schemaRepo, p.storage, p.rpc, ctx.BigMapDiff.Address, ctx.BigMapDiff.Network, ctx.BigMapDiff.Ptr) return nil, errors.Wrap(ErrUnknownStorageType, url) } val, err := store.Get(url) diff --git a/internal/parsers/tzip/repository/item.go b/internal/parsers/tzip/repository/item.go index 19787a5c7..9f1443fe1 100644 --- a/internal/parsers/tzip/repository/item.go +++ b/internal/parsers/tzip/repository/item.go @@ -4,7 +4,7 @@ import ( "encoding/json" "time" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/tzip" ) // Item - @@ -15,12 +15,12 @@ type Item struct { } // ToModel - -func (item Item) ToModel() (*models.TZIP, error) { +func (item Item) ToModel() (*tzip.TZIP, error) { t, err := time.Parse("2006 01 02 15 04", "2018 06 30 00 00") if err != nil { return nil, err } - model := models.TZIP{ + model := tzip.TZIP{ Network: item.Network, Address: item.Address, Timestamp: t.UTC(), diff --git a/internal/parsers/tzip/storage/http.go b/internal/parsers/tzip/storage/http.go index 5255b8fc9..e6f83abe2 100644 --- a/internal/parsers/tzip/storage/http.go +++ b/internal/parsers/tzip/storage/http.go @@ -5,7 +5,7 @@ import ( "net/http" "time" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/tzip" ) // HTTP Storage prefixes @@ -45,7 +45,7 @@ func NewHTTPStorage(opts ...HTTPStorageOption) HTTPStorage { } // Get - -func (s HTTPStorage) Get(value string) (*models.TZIP, error) { +func (s HTTPStorage) Get(value string) (*tzip.TZIP, error) { client := http.Client{ Timeout: s.timeout, } @@ -60,7 +60,7 @@ func (s HTTPStorage) Get(value string) (*models.TZIP, error) { } defer resp.Body.Close() - var data models.TZIP + var data tzip.TZIP err = json.NewDecoder(resp.Body).Decode(&data) return &data, err } diff --git a/internal/parsers/tzip/storage/ipfs.go b/internal/parsers/tzip/storage/ipfs.go index 73966bac3..7f06cb9ef 100644 --- a/internal/parsers/tzip/storage/ipfs.go +++ b/internal/parsers/tzip/storage/ipfs.go @@ -6,7 +6,7 @@ import ( "strings" "time" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/tzip" ) // IPFS storage prefix @@ -45,7 +45,7 @@ func NewIPFSStorage(gateways []string, opts ...IPFSStorageOption) IPFSStorage { } // Get - -func (s IPFSStorage) Get(value string) (data *models.TZIP, err error) { +func (s IPFSStorage) Get(value string) (data *tzip.TZIP, err error) { if len(s.gateways) == 0 { return nil, ErrEmptyIPFSGatewayList } diff --git a/internal/parsers/tzip/storage/sha256.go b/internal/parsers/tzip/storage/sha256.go index 5bb6072c8..3ec405423 100644 --- a/internal/parsers/tzip/storage/sha256.go +++ b/internal/parsers/tzip/storage/sha256.go @@ -3,7 +3,7 @@ package storage import ( "time" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/tzip" ) // Sha256 storage prefix @@ -49,7 +49,7 @@ func NewSha256Storage(opts ...Sha256StorageOption) Sha256Storage { } // Get - -func (s Sha256Storage) Get(value string) (*models.TZIP, error) { +func (s Sha256Storage) Get(value string) (*tzip.TZIP, error) { var uri Sha256URI if err := uri.Parse(value); err != nil { return nil, err diff --git a/internal/parsers/tzip/storage/storage.go b/internal/parsers/tzip/storage/storage.go index 32fc2cf92..96467d2ff 100644 --- a/internal/parsers/tzip/storage/storage.go +++ b/internal/parsers/tzip/storage/storage.go @@ -1,8 +1,8 @@ package storage -import "github.com/baking-bad/bcdhub/internal/models" +import "github.com/baking-bad/bcdhub/internal/models/tzip" // Storage - type Storage interface { - Get(value string) (*models.TZIP, error) + Get(value string) (*tzip.TZIP, error) } diff --git a/internal/parsers/tzip/storage/tezos.go b/internal/parsers/tzip/storage/tezos.go index 65d6bea83..ad17ad23f 100644 --- a/internal/parsers/tzip/storage/tezos.go +++ b/internal/parsers/tzip/storage/tezos.go @@ -8,8 +8,11 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/meta" "github.com/baking-bad/bcdhub/internal/contractparser/storage" "github.com/baking-bad/bcdhub/internal/contractparser/storage/hash" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/block" + "github.com/baking-bad/bcdhub/internal/models/schema" + "github.com/baking-bad/bcdhub/internal/models/tzip" "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/pkg/errors" "github.com/tidwall/gjson" @@ -26,7 +29,11 @@ const ( // TezosStorage - type TezosStorage struct { - es elastic.IElastic + bigMapRepo bigmapdiff.Repository + blockRepo block.Repository + schemaRepo schema.Repository + storage models.GeneralRepository + rpc noderpc.INode network string address string @@ -34,25 +41,31 @@ type TezosStorage struct { } // NewTezosStorage - -func NewTezosStorage(es elastic.IElastic, rpc noderpc.INode, address, network string, ptr int64) TezosStorage { +func NewTezosStorage(bigMapRepo bigmapdiff.Repository, blockRepo block.Repository, schemaRepo schema.Repository, storage models.GeneralRepository, rpc noderpc.INode, address, network string, ptr int64) TezosStorage { return TezosStorage{ - es: es, - rpc: rpc, - address: address, - network: network, - ptr: ptr, + bigMapRepo: bigMapRepo, + blockRepo: blockRepo, + schemaRepo: schemaRepo, + storage: storage, + rpc: rpc, + address: address, + network: network, + ptr: ptr, } } // Get - -func (s TezosStorage) Get(value string) (*models.TZIP, error) { +func (s TezosStorage) Get(value string) (*tzip.TZIP, error) { var uri TezosStorageURI if err := uri.Parse(value); err != nil { return nil, err } - if err := uri.networkByChainID(s.es); err != nil { - return nil, err + if err := uri.networkByChainID(s.blockRepo); err != nil { + if !s.storage.IsRecordNotFound(err) { + return nil, err + } + return nil, nil } if err := s.fillFields(uri); err != nil { @@ -64,9 +77,9 @@ func (s TezosStorage) Get(value string) (*models.TZIP, error) { return nil, err } - bmd, err := s.es.GetBigMapKey(s.network, key, s.ptr) + bmd, err := s.bigMapRepo.CurrentByKey(s.network, key, s.ptr) if err != nil { - if elastic.IsRecordNotFound(err) { + if s.storage.IsRecordNotFound(err) { return nil, nil } return nil, err @@ -74,7 +87,7 @@ func (s TezosStorage) Get(value string) (*models.TZIP, error) { decoded := DecodeValue(bmd.Value) - var data models.TZIP + var data tzip.TZIP err = json.Unmarshal([]byte(decoded), &data) return &data, err } @@ -86,12 +99,12 @@ func (s *TezosStorage) fillFields(uri TezosStorageURI) error { if uri.Address != "" && uri.Address != s.address { s.address = uri.Address - block, err := s.es.GetLastBlock(s.network) + block, err := s.blockRepo.Last(s.network) if err != nil { return err } - bmPtr, err := FindBigMapPointer(s.es, s.rpc, s.address, s.network, block.Protocol) + bmPtr, err := FindBigMapPointer(s.schemaRepo, s.rpc, s.address, s.network, block.Protocol) if err != nil { return err } @@ -103,8 +116,8 @@ func (s *TezosStorage) fillFields(uri TezosStorageURI) error { } // FindBigMapPointer - -func FindBigMapPointer(es elastic.IElastic, rpc noderpc.INode, address, network, protocol string) (int64, error) { - metadata, err := meta.GetMetadata(es, address, consts.STORAGE, protocol) +func FindBigMapPointer(schemaRepo schema.Repository, rpc noderpc.INode, address, network, protocol string) (int64, error) { + metadata, err := meta.GetMetadata(schemaRepo, address, consts.STORAGE, protocol) if err != nil { return -1, err } diff --git a/internal/parsers/tzip/storage/uri.go b/internal/parsers/tzip/storage/uri.go index 39fb34e4a..f4f76a886 100644 --- a/internal/parsers/tzip/storage/uri.go +++ b/internal/parsers/tzip/storage/uri.go @@ -4,8 +4,8 @@ import ( "net/url" "strings" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" + "github.com/baking-bad/bcdhub/internal/models/block" "github.com/pkg/errors" ) @@ -56,17 +56,14 @@ func (uri *TezosStorageURI) parseHost(host string) { } } -func (uri *TezosStorageURI) networkByChainID(es elastic.IElastic) error { +func (uri *TezosStorageURI) networkByChainID(blockRepo block.Repository) error { if uri.Network == "" { return nil } - network, err := es.GetNetworkAlias(uri.Network) + network, err := blockRepo.GetNetworkAlias(uri.Network) if err != nil { - if !elastic.IsRecordNotFound(err) { - return err - } - return nil + return err } uri.Network = network return nil diff --git a/internal/parsers/tzip/tokens/metadata.go b/internal/parsers/tzip/tokens/metadata.go index 317ddbef7..f1b06c94d 100644 --- a/internal/parsers/tzip/tokens/metadata.go +++ b/internal/parsers/tzip/tokens/metadata.go @@ -3,7 +3,6 @@ package tokens import ( "time" - "github.com/baking-bad/bcdhub/internal/models" "github.com/baking-bad/bcdhub/internal/models/tzip" ) @@ -20,15 +19,15 @@ type Metadata struct { } // ToModel - -func (m Metadata) ToModel(address, network string) *models.TZIP { - return &models.TZIP{ +func (m Metadata) ToModel(address, network string) *tzip.TZIP { + return &tzip.TZIP{ Address: address, Network: network, Level: m.Level, Timestamp: m.Timestamp, TZIP12: tzip.TZIP12{ Tokens: &tzip.TokenMetadataType{ - Static: []tzip.TokenMetadata{ + Static: []tzip.TokenMetadataEntity{ { Symbol: m.Symbol, Name: m.Name, diff --git a/internal/parsers/tzip/tokens/parser.go b/internal/parsers/tzip/tokens/parser.go index 0ad9bda9f..8ebb1a0b6 100644 --- a/internal/parsers/tzip/tokens/parser.go +++ b/internal/parsers/tzip/tokens/parser.go @@ -7,16 +7,24 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/meta" "github.com/baking-bad/bcdhub/internal/contractparser/storage" "github.com/baking-bad/bcdhub/internal/contractparser/unpack" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/block" + "github.com/baking-bad/bcdhub/internal/models/protocol" + "github.com/baking-bad/bcdhub/internal/models/schema" "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/tidwall/gjson" ) // TokenMetadataParser - type TokenMetadataParser struct { - es elastic.IElastic + bmdRepo bigmapdiff.Repository + blocksRepo block.Repository + protocolRepo protocol.Repository + schemaRepo schema.Repository + storage models.GeneralRepository + rpc noderpc.INode sharePath string network string @@ -25,9 +33,10 @@ type TokenMetadataParser struct { } // NewTokenMetadataParser - -func NewTokenMetadataParser(es elastic.IElastic, rpc noderpc.INode, sharePath, network string) TokenMetadataParser { +func NewTokenMetadataParser(bmdRepo bigmapdiff.Repository, blocksRepo block.Repository, protocolRepo protocol.Repository, schemaRepo schema.Repository, storage models.GeneralRepository, rpc noderpc.INode, sharePath, network string) TokenMetadataParser { return TokenMetadataParser{ - es: es, rpc: rpc, sharePath: sharePath, network: network, + bmdRepo: bmdRepo, blocksRepo: blocksRepo, storage: storage, protocolRepo: protocolRepo, schemaRepo: schemaRepo, + rpc: rpc, sharePath: sharePath, network: network, sources: map[string]string{ "carthagenet": "tz1grSQDByRpnVs7sPtaprNZRp531ZKz6Jmm", "mainnet": "tz2FCNBrERXtaTtNX6iimR1UJ5JSDxvdHM93", @@ -58,13 +67,13 @@ func (t TokenMetadataParser) ParseWithRegistry(registry string, level int64) ([] return t.parse(registry, state) } -func (t TokenMetadataParser) parse(registry string, state models.Block) ([]Metadata, error) { +func (t TokenMetadataParser) parse(registry string, state block.Block) ([]Metadata, error) { ptr, err := t.getBigMapPtr(registry, state) if err != nil { return nil, err } - bmd, err := t.es.GetBigMapKeys(elastic.GetBigMapKeysContext{ + bmd, err := t.bmdRepo.Get(bigmapdiff.GetContext{ Ptr: &ptr, Network: t.network, Size: 1000, @@ -87,14 +96,14 @@ func (t TokenMetadataParser) parse(registry string, state models.Block) ([]Metad return metadata, nil } -func (t TokenMetadataParser) getState(level int64) (models.Block, error) { +func (t TokenMetadataParser) getState(level int64) (block.Block, error) { if level > 0 { - return t.es.GetBlock(t.network, level) + return t.blocksRepo.Get(t.network, level) } - return t.es.GetLastBlock(t.network) + return t.blocksRepo.Last(t.network) } -func (t TokenMetadataParser) getTokenMetadataRegistry(address string, state models.Block) (string, error) { +func (t TokenMetadataParser) getTokenMetadataRegistry(address string, state block.Block) (string, error) { metadata, err := t.hasTokenMetadataRegistry(address, state.Protocol) if err != nil { return "", err @@ -107,9 +116,9 @@ func (t TokenMetadataParser) getTokenMetadataRegistry(address string, state mode return "", ErrUnknownNetwork } - result, err := t.es.SearchByText("view_address", 0, nil, map[string]interface{}{ + result, err := t.storage.SearchByText("view_address", 0, nil, map[string]interface{}{ "networks": []string{t.network}, - "indices": []string{elastic.DocContracts}, + "indices": []string{models.DocContracts}, }, false) if err != nil { return "", err @@ -123,7 +132,7 @@ func (t TokenMetadataParser) getTokenMetadataRegistry(address string, state mode return "", err } - protocol, err := t.es.GetProtocol(t.network, "", state.Level) + protocol, err := t.protocolRepo.GetProtocol(t.network, "", state.Level) if err != nil { return "", err } @@ -171,7 +180,7 @@ func (t TokenMetadataParser) parseRegistryAddress(response gjson.Result) (string } func (t TokenMetadataParser) hasTokenMetadataRegistry(address, protocol string) (meta.Metadata, error) { - metadata, err := meta.GetMetadata(t.es, address, consts.PARAMETER, protocol) + metadata, err := meta.GetMetadata(t.schemaRepo, address, consts.PARAMETER, protocol) if err != nil { return nil, err } @@ -184,8 +193,8 @@ func (t TokenMetadataParser) hasTokenMetadataRegistry(address, protocol string) return nil, nil } -func (t TokenMetadataParser) getBigMapPtr(address string, state models.Block) (int64, error) { - registryStorageMetadata, err := meta.GetMetadata(t.es, address, consts.STORAGE, state.Protocol) +func (t TokenMetadataParser) getBigMapPtr(address string, state block.Block) (int64, error) { + registryStorageMetadata, err := meta.GetMetadata(t.schemaRepo, address, consts.STORAGE, state.Protocol) if err != nil { return 0, err } diff --git a/internal/parsers/vesting.go b/internal/parsers/vesting.go index f8ffcc527..35d1adb5c 100644 --- a/internal/parsers/vesting.go +++ b/internal/parsers/vesting.go @@ -5,9 +5,11 @@ import ( "github.com/baking-bad/bcdhub/internal/contractparser/consts" "github.com/baking-bad/bcdhub/internal/contractparser/kinds" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/balanceupdate" + "github.com/baking-bad/bcdhub/internal/models/migration" + "github.com/baking-bad/bcdhub/internal/models/operation" "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/baking-bad/bcdhub/internal/parsers/contract" "github.com/tidwall/gjson" @@ -28,8 +30,8 @@ func NewVestingParser(filesDirectory string, interfaces map[string]kinds.Contrac } // Parse - -func (p *VestingParser) Parse(data gjson.Result, head noderpc.Header, network, address string) ([]elastic.Model, error) { - migration := &models.Migration{ +func (p *VestingParser) Parse(data gjson.Result, head noderpc.Header, network, address string) ([]models.Model, error) { + migration := &migration.Migration{ ID: helpers.GenerateID(), IndexedTime: time.Now().UnixNano() / 1000, @@ -40,10 +42,10 @@ func (p *VestingParser) Parse(data gjson.Result, head noderpc.Header, network, a Timestamp: head.Timestamp, Kind: consts.MigrationBootstrap, } - parsedModels := []elastic.Model{migration} + parsedModels := []models.Model{migration} script := data.Get("script") - op := models.Operation{ + op := operation.Operation{ ID: helpers.GenerateID(), Network: network, Protocol: head.Protocol, @@ -69,7 +71,7 @@ func (p *VestingParser) Parse(data gjson.Result, head noderpc.Header, network, a parsedModels = append(parsedModels, contractModels...) } - parsedModels = append(parsedModels, &models.BalanceUpdate{ + parsedModels = append(parsedModels, &balanceupdate.BalanceUpdate{ ID: helpers.GenerateID(), Change: op.Amount, Network: op.Network, diff --git a/internal/reindexer/balanceupdate/storage.go b/internal/reindexer/balanceupdate/storage.go new file mode 100644 index 000000000..575b81bf9 --- /dev/null +++ b/internal/reindexer/balanceupdate/storage.go @@ -0,0 +1,32 @@ +package balanceupdate + +import ( + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/reindexer/core" + "github.com/restream/reindexer" +) + +// Storage - +type Storage struct { + db *core.Reindexer +} + +// NewStorage - +func NewStorage(db *core.Reindexer) *Storage { + return &Storage{db} +} + +// GetBalance - +func (storage *Storage) GetBalance(network, address string) (int64, error) { + query := storage.db.Query(models.DocBalanceUpdates). + WhereString("network", reindexer.EQ, network). + WhereString("contract", reindexer.EQ, address) + query.AggregateSum("change") + + it := query.Exec() + if it.Error() != nil { + return 0, it.Error() + } + agg := it.AggResults()[0] + return int64(agg.Value), nil +} diff --git a/internal/reindexer/bigmapaction/storage.go b/internal/reindexer/bigmapaction/storage.go new file mode 100644 index 000000000..1bfd57963 --- /dev/null +++ b/internal/reindexer/bigmapaction/storage.go @@ -0,0 +1,45 @@ +package bigmapaction + +import ( + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapaction" + "github.com/baking-bad/bcdhub/internal/reindexer/core" + "github.com/restream/reindexer" +) + +// Storage - +type Storage struct { + db *core.Reindexer +} + +// NewStorage - +func NewStorage(db *core.Reindexer) *Storage { + return &Storage{db} +} + +// Get - +func (storage *Storage) Get(ptr int64, network string) ([]bigmapaction.BigMapAction, error) { + query := storage.db.Query(models.DocBigMapActions). + WhereString("network", reindexer.EQ, network). + OpenBracket(). + WhereInt64("source_ptr", reindexer.EQ, ptr). + Or(). + WhereInt64("destination_ptr", reindexer.EQ, ptr). + CloseBracket(). + Sort("indexed_time", true) + + it := query.Exec() + defer it.Close() + + if it.Error() != nil { + return nil, it.Error() + } + + result := make([]bigmapaction.BigMapAction, 0) + for it.Next() { + action := it.Object().(*bigmapaction.BigMapAction) + result = append(result, *action) + } + + return result, nil +} diff --git a/internal/reindexer/bigmapdiff/data.go b/internal/reindexer/bigmapdiff/data.go new file mode 100644 index 000000000..366c724f5 --- /dev/null +++ b/internal/reindexer/bigmapdiff/data.go @@ -0,0 +1,53 @@ +package bigmapdiff + +import ( + "fmt" + + "github.com/baking-bad/bcdhub/internal/elastic/consts" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/restream/reindexer" +) + +func buildGetContext(ctx bigmapdiff.GetContext, query *reindexer.Query) { + if ctx.Ptr != nil { + query = query.WhereInt64("ptr", reindexer.EQ, *ctx.Ptr) + } + if ctx.Network != "" { + query = query.Match("network", ctx.Network) + } + + if ctx.Query != "" { + query = query.OpenBracket(). + Where("key", reindexer.LIKE, fmt.Sprintf("%%%s%%", ctx.Query)). + Where("key_hash", reindexer.LIKE, fmt.Sprintf("%%%s%%", ctx.Query)). + Where("key_strings", reindexer.LIKE, fmt.Sprintf("%%%s%%", ctx.Query)). + CloseBracket() + } + + if ctx.Level != nil { + query = query.WhereInt64("level", reindexer.LE, *ctx.Level) + } + + if ctx.Size == 0 { + ctx.Size = consts.DefaultSize + } + + query.Offset(int(ctx.Offset)).Limit(int(ctx.Size)).Sort("indexed_time", true) +} + +// core.Aggs(core.AggItem{ +// Name: "keys", +// Body: core.Item{ +// "terms": core.Item{ +// "field": "key_hash.keyword", +// "size": ctx.To, +// "order": core.Item{ +// "bucketsSort": "desc", +// }, +// }, +// "aggs": core.Item{ +// "top_key": core.TopHits(1, "indexed_time", "desc"), +// "bucketsSort": core.Max("indexed_time"), +// }, +// }, +// }), diff --git a/internal/reindexer/bigmapdiff/storage.go b/internal/reindexer/bigmapdiff/storage.go new file mode 100644 index 000000000..d109a70b6 --- /dev/null +++ b/internal/reindexer/bigmapdiff/storage.go @@ -0,0 +1,207 @@ +package bigmapdiff + +import ( + "fmt" + + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/reindexer/core" + "github.com/pkg/errors" + "github.com/restream/reindexer" +) + +// Storage - +type Storage struct { + db *core.Reindexer +} + +// NewStorage - +func NewStorage(db *core.Reindexer) *Storage { + return &Storage{db} +} + +// CurrentByKey - +func (storage *Storage) CurrentByKey(network, keyHash string, ptr int64) (data bigmapdiff.BigMapDiff, err error) { + if ptr < 0 { + err = errors.Errorf("Invalid pointer value: %d", ptr) + return + } + + query := storage.db.Query(models.DocBigMapDiff). + Match("network", network). + Match("key_hash", keyHash). + WhereInt64("ptr", reindexer.EQ, ptr). + Sort("level", true) + + err = storage.db.GetOne(query, &data) + return +} + +// GetForAddress - +func (storage *Storage) GetForAddress(address string) ([]bigmapdiff.BigMapDiff, error) { + query := storage.db.Query(models.DocBigMapDiff). + Match("address", address). + Sort("indexed_time", true) + + return storage.getTop(query, func(bmd bigmapdiff.BigMapDiff) string { + return bmd.KeyHash + }) +} + +// GetByAddress - +func (storage *Storage) GetByAddress(network, address string) (response []bigmapdiff.BigMapDiff, err error) { + query := storage.db.Query(models.DocBigMapDiff). + Match("network", network). + Match("address", address). + Sort("indexed_time", true) + + err = storage.db.GetAllByQuery(query, &response) + return +} + +// GetValuesByKey - +func (storage *Storage) GetValuesByKey(keyHash string) ([]bigmapdiff.BigMapDiff, error) { + query := storage.db.Query(models.DocBigMapDiff). + Match("key_hash", keyHash). + Sort("indexed_time", true) + + return storage.getTop(query, func(bmd bigmapdiff.BigMapDiff) string { + return fmt.Sprintf("%s_%s_%d", bmd.Network, bmd.Address, bmd.Ptr) + }) +} + +// Count - +func (storage *Storage) Count(network string, ptr int64) (int64, error) { + query := storage.db.Query(models.DocBigMapDiff). + Distinct("key_hash"). + Match("network", network). + WhereInt64("ptr", reindexer.EQ, ptr) + + return storage.db.Count(query) +} + +// Previous - +func (storage *Storage) Previous(filters []bigmapdiff.BigMapDiff, indexedTime int64, address string) ([]bigmapdiff.BigMapDiff, error) { + query := storage.db.Query(models.DocBigMapDiff). + Match("address", address). + WhereInt64("indexed_time", reindexer.LT, indexedTime) + + if len(filters) > 0 { + query = query.OpenBracket() + for i := range filters { + query = query.OpenBracket(). + Match("key_hash", filters[i].KeyHash). + Match("bin_path", filters[i].BinPath). + CloseBracket() + if len(filters)-1 > i { + query = query.Or() + } + } + query = query.CloseBracket() + } + query = query.Sort("indexed_time", true) + + return storage.getTop(query, func(bmd bigmapdiff.BigMapDiff) string { + return fmt.Sprintf("%s_%s", bmd.KeyHash, bmd.BinPath) + }) +} + +// GetUniqueByOperationID - +func (storage *Storage) GetUniqueByOperationID(operationID string) ([]bigmapdiff.BigMapDiff, error) { + query := storage.db.Query(models.DocBigMapDiff). + Match("operation_id", operationID). + Sort("indexed_time", true) + + return storage.getTop(query, func(bmd bigmapdiff.BigMapDiff) string { + return fmt.Sprintf("%d_%s", bmd.Ptr, bmd.KeyHash) + }) +} + +// GetByPtrAndKeyHash - +func (storage *Storage) GetByPtrAndKeyHash(ptr int64, network, keyHash string, size, offset int64) ([]bigmapdiff.BigMapDiff, int64, error) { + if ptr < 0 { + return nil, 0, errors.Errorf("Invalid pointer value: %d", ptr) + } + if size == 0 { + size = core.DefaultSize + } + + query := storage.db.Query(models.DocBigMapDiff). + Match("network", network). + Match("key_hash", keyHash). + WhereInt64("ptr", reindexer.EQ, ptr). + Limit(int(size)). + Offset(int(offset)). + Sort("level", true) + + var total int + result := make([]bigmapdiff.BigMapDiff, 0) + total, err := storage.db.GetAllByQueryWithTotal(query, &result) + + return result, int64(total), err +} + +// GetByOperationID - +func (storage *Storage) GetByOperationID(operationID string) ([]*bigmapdiff.BigMapDiff, error) { + query := storage.db.Query(models.DocBigMapDiff). + Match("operation_id", operationID) + + result := make([]*bigmapdiff.BigMapDiff, 0) + err := storage.db.GetAllByQuery(query, &result) + return result, err +} + +// GetByPtr - +// TODO: check +func (storage *Storage) GetByPtr(address, network string, ptr int64) ([]bigmapdiff.BigMapDiff, error) { + query := storage.db.Query(models.DocBigMapDiff). + Match("network", network). + Match("address", address). + WhereInt64("ptr", reindexer.EQ, ptr) + + keyHash, err := storage.db.GetUnique("key_hash", query) + if err != nil { + return nil, err + } + + secondQuery := storage.db.Query(models.DocBigMapDiff). + Match("key_hash", keyHash...). + Match("network", network). + Match("address", address). + WhereInt64("ptr", reindexer.EQ, ptr). + Sort("indexed_time", true) + + response := make([]bigmapdiff.BigMapDiff, 0) + err = storage.db.GetAllByQuery(secondQuery, &response) + return response, err +} + +// Get - +func (storage *Storage) Get(ctx bigmapdiff.GetContext) (response []bigmapdiff.BigMapDiff, err error) { + if *ctx.Ptr < 0 { + return nil, errors.Errorf("Invalid pointer value: %d", *ctx.Ptr) + } + query := storage.db.Query(models.DocBigMapDiff) + buildGetContext(ctx, query) + err = storage.db.GetAllByQuery(query, &response) + return +} + +func (storage *Storage) getTop(query *reindexer.Query, idFunc func(bigmapdiff.BigMapDiff) string) ([]bigmapdiff.BigMapDiff, error) { + all := make([]bigmapdiff.BigMapDiff, 0) + if err := storage.db.GetAllByQuery(query, &all); err != nil { + return nil, err + } + + response := make([]bigmapdiff.BigMapDiff, 0) + found := make(map[string]struct{}) + for i := range all { + id := idFunc(all[i]) + if _, ok := found[id]; ok { + continue + } + found[id] = struct{}{} + response = append(response, all[i]) + } + return response, nil +} diff --git a/internal/reindexer/block/storage.go b/internal/reindexer/block/storage.go new file mode 100644 index 000000000..7bc915a71 --- /dev/null +++ b/internal/reindexer/block/storage.go @@ -0,0 +1,71 @@ +package block + +import ( + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/block" + "github.com/baking-bad/bcdhub/internal/reindexer/core" + "github.com/restream/reindexer" +) + +// Storage - +type Storage struct { + db *core.Reindexer +} + +// NewStorage - +func NewStorage(db *core.Reindexer) *Storage { + return &Storage{db} +} + +// Get - +func (storage *Storage) Get(network string, level int64) (block block.Block, err error) { + query := storage.db.Query(models.DocBlocks). + WhereString("network", reindexer.EQ, network). + WhereInt64("level", reindexer.EQ, level) + + err = storage.db.GetOne(query, &block) + return +} + +// Last - returns current indexer state for network +func (storage *Storage) Last(network string) (block block.Block, err error) { + query := storage.db.Query(models.DocBlocks). + WhereString("network", reindexer.EQ, network). + Sort("level", true) + + err = storage.db.GetOne(query, &block) + return +} + +// LastByNetworks - return last block for all networks +func (storage *Storage) LastByNetworks() ([]block.Block, error) { + network, err := storage.db.GetUnique("network", storage.db.Query(models.DocBlocks)) + if err != nil { + return nil, err + } + + response := make([]block.Block, 0) + for i := range network { + blockQuery := storage.db.Query(models.DocBlocks). + Match("network", network[i]). + Sort("level", true). + Limit(1) + + var b block.Block + if err := storage.db.GetOne(blockQuery, &b); err != nil { + return nil, err + } + response = append(response, b) + } + return response, nil +} + +// GetNetworkAlias - +func (storage *Storage) GetNetworkAlias(chainID string) (string, error) { + query := storage.db.Query(models.DocBlocks). + WhereString("chain_id", reindexer.EQ, chainID) + + var block block.Block + err := storage.db.GetOne(query, &block) + return block.Network, err +} diff --git a/internal/reindexer/bulk/storage.go b/internal/reindexer/bulk/storage.go new file mode 100644 index 000000000..c57551e8f --- /dev/null +++ b/internal/reindexer/bulk/storage.go @@ -0,0 +1,106 @@ +package bulk + +import ( + "reflect" + + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" + "github.com/baking-bad/bcdhub/internal/reindexer/core" +) + +// Storage - +type Storage struct { + db *core.Reindexer +} + +// NewStorage - +func NewStorage(db *core.Reindexer) *Storage { + return &Storage{db} +} + +// Insert - +func (storage *Storage) Insert(items []models.Model) error { + if len(items) == 0 { + return nil + } + + for i := range items { + if _, err := storage.db.Insert(items[i].GetIndex(), items[i]); err != nil { + return err + } + } + return nil +} + +// Update - +func (storage *Storage) Update(updates []models.Model) error { + if len(updates) == 0 { + return nil + } + for i := range updates { + if _, err := storage.db.Update(updates[i].GetIndex(), updates[i]); err != nil { + return err + } + } + return nil +} + +// Delete - +func (storage *Storage) Delete(updates []models.Model) error { + if len(updates) == 0 { + return nil + } + for i := range updates { + if err := storage.db.Delete(updates[i].GetIndex(), updates[i]); err != nil { + return err + } + } + return nil +} + +// RemoveField - +func (storage *Storage) RemoveField(field string, where []models.Model) error { + if len(where) == 0 { + return nil + } + for i := range where { + it := storage.db.Query(where[i].GetIndex()).Match("id", where[i].GetID()).Drop(field).Update() + defer it.Close() + + if it.Error() != nil { + return it.Error() + } + } + return nil +} + +// UpdateField - +func (storage *Storage) UpdateField(where []contract.Contract, fields ...string) error { + if len(where) == 0 { + return nil + } + tx, err := storage.db.BeginTx(models.DocContracts) + if err != nil { + return err + } + for i := range where { + query := tx.Query().Match("id", where[i].GetID()) + for j := range fields { + value := storage.getFieldValue(where[i], fields[j]) + query = query.Set(fields[j], value) + } + it := query.Update() + defer it.Close() + + if it.Error() != nil { + return it.Error() + } + } + return tx.Commit() +} + +func (storage *Storage) getFieldValue(c contract.Contract, field string) interface{} { + r := reflect.ValueOf(c) + f := reflect.Indirect(r).FieldByName(field) + return f.Interface() +} diff --git a/internal/reindexer/contract/storage.go b/internal/reindexer/contract/storage.go new file mode 100644 index 000000000..daab922c6 --- /dev/null +++ b/internal/reindexer/contract/storage.go @@ -0,0 +1,294 @@ +package contract + +import ( + "math/rand" + + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" + "github.com/baking-bad/bcdhub/internal/reindexer/core" + "github.com/pkg/errors" + "github.com/restream/reindexer" +) + +// Storage - +type Storage struct { + db *core.Reindexer +} + +// NewStorage - +func NewStorage(db *core.Reindexer) *Storage { + return &Storage{db} +} + +// Get - +func (storage *Storage) Get(by map[string]interface{}) (c contract.Contract, err error) { + query := storage.db.Query(models.DocContracts) + + for field, value := range by { + query = query.Where(field, reindexer.EQ, value) + } + + err = storage.db.GetOne(query, &c) + return +} + +// GetMany - +func (storage *Storage) GetMany(by map[string]interface{}) (contracts []contract.Contract, err error) { + query := storage.db.Query(models.DocContracts) + + for field, value := range by { + query = query.Where(field, reindexer.EQ, value) + } + + err = storage.db.GetAllByQuery(query, &contracts) + return +} + +// GetRandom - +func (storage *Storage) GetRandom() (c contract.Contract, err error) { + query := storage.db.Query(models.DocContracts).WhereInt("tx_count", reindexer.GE, 2) + count, err := storage.db.Count(query) + if err != nil { + return c, err + } + + idx := rand.Intn(int(count)) + secondQuery := storage.db.Query(models.DocContracts).WhereInt("tx_count", reindexer.GE, 2).Limit(1).Offset(idx) + err = storage.db.GetOne(secondQuery, &c) + return +} + +// IsFA - +func (storage *Storage) IsFA(network, address string) (bool, error) { + query := storage.db.Query(models.DocContracts). + Match("network", network). + Match("address", address). + Match("tags", "fa12", "fa1") + + it := query.Exec() + if it.Error() != nil { + return false, it.Error() + } + + return it.TotalCount() == 1, nil +} + +// UpdateMigrationsCount - +func (storage *Storage) UpdateMigrationsCount(address, network string) error { + contract := contract.NewEmptyContract(network, address) + it := storage.db.Query(models.DocOperations). + Where("id", reindexer.EQ, contract.GetID()). + Set("migrations_count", "migrations_count + 1"). + Update() + defer it.Close() + + return it.Error() +} + +// GetAddressesByNetworkAndLevel - +func (storage *Storage) GetAddressesByNetworkAndLevel(network string, maxLevel int64) ([]string, error) { + query := storage.db.Query(models.DocContracts). + Select("address"). + Match("network", network). + WhereInt64("level", reindexer.GT, maxLevel) + + addresses := make([]string, 0) + err := storage.db.GetAllByQuery(query, &addresses) + return addresses, err +} + +// GetIDsByAddresses - +func (storage *Storage) GetIDsByAddresses(addresses []string, network string) ([]string, error) { + if len(addresses) == 0 { + return nil, nil + } + + query := storage.db.Query(models.DocContracts). + Select("id"). + Match("network", network). + OpenBracket() + + for i := range addresses { + query = query.Match("address", addresses[i]) + if i < len(addresses)-1 { + query = query.Or() + } + } + query = query.CloseBracket() + + ids := make([]string, 0) + err := storage.db.GetAllByQuery(query, &ids) + return ids, err +} + +// GetByAddresses - +func (storage *Storage) GetByAddresses(addresses []contract.Address) (contracts []contract.Contract, err error) { + if len(addresses) == 0 { + return + } + query := storage.db.Query(models.DocContracts) + for i := range addresses { + query = query.OpenBracket(). + Match("address", addresses[i].Address). + Match("network", addresses[i].Network). + CloseBracket() + if i < len(addresses)-1 { + query = query.Or() + } + } + + err = storage.db.GetAllByQuery(query, &contracts) + return +} + +// GetProjectsLastContract - +func (storage *Storage) GetProjectsLastContract() ([]contract.Contract, error) { + query := storage.db.Query(models.DocContracts).Sort("timestamp", true) + return storage.topContracts(query, func(c contract.Contract) string { + return c.ProjectID + }) +} + +// GetSameContracts - +func (storage *Storage) GetSameContracts(c contract.Contract, size, offset int64) (pcr contract.SameResponse, err error) { + if c.Fingerprint == nil { + return pcr, errors.Errorf("Invalid contract data") + } + + if size == 0 { + size = core.DefaultSize + } + + query := storage.db.Query(models.DocContracts). + Match("hash", c.Hash). + Match("address", c.Address). + Limit(int(size)). + Offset(int(offset)). + Sort("last_action", true) + + contracts := make([]contract.Contract, 0) + total, err := storage.db.GetAllByQueryWithTotal(query, &contracts) + if err != nil { + return + } + + pcr.Contracts = contracts + pcr.Count = int64(total) + return +} + +// GetSimilarContracts - +func (storage *Storage) GetSimilarContracts(c contract.Contract, size, offset int64) ([]contract.Similar, int, error) { + if c.Fingerprint == nil { + return nil, 0, nil + } + + if size == 0 { + size = core.DefaultSize + } + + query := storage.db.Query(models.DocContracts). + Distinct("hash"). + Select("hash"). + Match("project_id", c.ProjectID). + Not(). + Match("hash", c.Hash).ReqTotal() + + query.AggregateFacet("hash").Limit(int(size)).Offset(int(offset)) + + it := query.Exec() + defer it.Close() + + if it.Error() != nil { + return nil, 0, it.Error() + } + + count := make(map[string]int) + hash := make([]string, 0) + agg := it.AggResults()[1] + for _, bucket := range agg.Facets { + hash = append(hash, bucket.Values[0]) + count[bucket.Values[0]] = bucket.Count + } + + sit := storage.db.Query(models.DocContracts).Match("hash", hash...).Exec() + defer it.Close() + + if sit.Error() != nil { + return nil, 0, sit.Error() + } + + found := make(map[string]struct{}) + contracts := make([]contract.Similar, 0) + + for sit.Next() { + var c contract.Contract + sit.NextObj(&c) + if _, ok := found[c.Hash]; ok { + continue + } + found[c.Hash] = struct{}{} + contracts = append(contracts, contract.Similar{ + Contract: &c, + Count: int64(count[c.Hash]), + }) + + } + + total := len(it.AggResults()[0].Distincts) + return contracts, total, nil +} + +// GetDiffTasks - +func (storage *Storage) GetDiffTasks() ([]contract.DiffTask, error) { + return nil, nil +} + +// GetTokens - +func (storage *Storage) GetTokens(network, tokenInterface string, offset, size int64) ([]contract.Contract, int64, error) { + tags := []string{"fa12", "fa1", "fa2"} + if tokenInterface == "fa12" || tokenInterface == "fa1" || tokenInterface == "fa2" { + tags = []string{tokenInterface} + } + + query := storage.db.Query(models.DocContracts). + Match("network", network). + Match("tags", tags...). + Sort("timestamp", true) + + if size > 0 { + query = query.Limit(int(size)) + } + + if offset > 0 { + query = query.Offset(int(offset)) + } + + contracts := make([]contract.Contract, 0) + total, err := storage.db.GetAllByQueryWithTotal(query, &contracts) + if err != nil { + return nil, 0, err + } + + return contracts, int64(total), nil +} + +func (storage *Storage) topContracts(query *reindexer.Query, idFunc func(c contract.Contract) string) ([]contract.Contract, error) { + all := make([]contract.Contract, 0) + if err := storage.db.GetAllByQuery(query, &all); err != nil { + return nil, err + } + + response := make([]contract.Contract, 0) + found := make(map[string]struct{}) + for i := range all { + id := idFunc(all[i]) + if _, ok := found[id]; ok { + continue + } + found[id] = struct{}{} + response = append(response, all[i]) + } + return response, nil + +} diff --git a/internal/reindexer/core/comparator.go b/internal/reindexer/core/comparator.go new file mode 100644 index 000000000..6f44bd234 --- /dev/null +++ b/internal/reindexer/core/comparator.go @@ -0,0 +1,22 @@ +package core + +import ( + "github.com/baking-bad/bcdhub/internal/models/tzip" + "github.com/restream/reindexer" +) + +// SetComaparator - +func SetComaparator(field string, cmp tzip.Comparator, query *reindexer.Query) { + switch cmp.Comparator { + case "gt": + query.WhereInt64(field, reindexer.GT, cmp.Value) + case "gte": + query.WhereInt64(field, reindexer.GE, cmp.Value) + case "lt": + query.WhereInt64(field, reindexer.LT, cmp.Value) + case "lte": + query.WhereInt64(field, reindexer.LE, cmp.Value) + case "eq": + query.WhereInt64(field, reindexer.EQ, cmp.Value) + } +} diff --git a/internal/reindexer/core/data.go b/internal/reindexer/core/data.go new file mode 100644 index 000000000..05d55b77b --- /dev/null +++ b/internal/reindexer/core/data.go @@ -0,0 +1,52 @@ +package core + +import ( + "time" + + "github.com/baking-bad/bcdhub/internal/contractparser/cerrors" + "github.com/baking-bad/bcdhub/internal/models/operation" +) + +// EventOperation - +type EventOperation struct { + Network string `json:"network"` + Hash string `json:"hash"` + Internal bool `json:"internal"` + Status string `json:"status"` + Timestamp time.Time `json:"timestamp"` + Kind string `json:"kind"` + Fee int64 `json:"fee,omitempty"` + Amount int64 `json:"amount,omitempty"` + Entrypoint string `json:"entrypoint,omitempty"` + Source string `json:"source"` + SourceAlias string `json:"source_alias,omitempty"` + Destination string `json:"destination,omitempty"` + DestinationAlias string `json:"destination_alias,omitempty"` + Delegate string `json:"delegate,omitempty"` + DelegateAlias string `json:"delegate_alias,omitempty"` + + Result *operation.Result `json:"result,omitempty"` + Errors []*cerrors.Error `json:"errors,omitempty"` + Burned int64 `json:"burned,omitempty"` +} + +// EventMigration - +type EventMigration struct { + Network string `json:"network"` + Protocol string `json:"protocol"` + PrevProtocol string `json:"prev_protocol,omitempty"` + Hash string `json:"hash,omitempty"` + Timestamp time.Time `json:"timestamp"` + Level int64 `json:"level"` + Address string `json:"address"` + Kind string `json:"kind"` +} + +// EventContract - +type EventContract struct { + Network string `json:"network"` + Address string `json:"address"` + Hash string `json:"hash"` + ProjectID string `json:"project_id"` + Timestamp time.Time `json:"timestamp"` +} diff --git a/internal/reindexer/core/errors.go b/internal/reindexer/core/errors.go new file mode 100644 index 000000000..723c13bc1 --- /dev/null +++ b/internal/reindexer/core/errors.go @@ -0,0 +1,51 @@ +package core + +import ( + "strings" + + "github.com/elastic/go-elasticsearch/v8/esapi" + "github.com/pkg/errors" +) + +// default errors +var ( + ErrQueryPointerIsNil = errors.New("Query pointer is nil") +) + +// IsRecordNotFound - +func (r *Reindexer) IsRecordNotFound(err error) bool { + return false +} + +// RecordNotFoundError - +type RecordNotFoundError struct { + index string + id string +} + +// NewRecordNotFoundError - +func NewRecordNotFoundError(index, id string) *RecordNotFoundError { + return &RecordNotFoundError{index, id} +} + +// NewRecordNotFoundErrorFromResponse - +func NewRecordNotFoundErrorFromResponse(resp *esapi.Response) *RecordNotFoundError { + return &RecordNotFoundError{resp.String(), ""} +} + +// Error - +func (e *RecordNotFoundError) Error() string { + var builder strings.Builder + builder.WriteString("Record is not found: ") + if e.index != "" { + builder.WriteString("index=") + builder.WriteString(e.index) + builder.WriteString(" ") + } + if e.id != "" { + builder.WriteString("id=") + builder.WriteString(e.id) + builder.WriteString(" ") + } + return builder.String() +} diff --git a/internal/reindexer/core/events.go b/internal/reindexer/core/events.go new file mode 100644 index 000000000..733753046 --- /dev/null +++ b/internal/reindexer/core/events.go @@ -0,0 +1,243 @@ +package core + +import ( + "strings" + + constants "github.com/baking-bad/bcdhub/internal/contractparser/consts" + "github.com/baking-bad/bcdhub/internal/models" + "github.com/restream/reindexer" +) + +// GetEvents - +func (r *Reindexer) GetEvents(subscriptions []models.SubscriptionRequest, size, offset int64) ([]models.Event, error) { + if len(subscriptions) == 0 { + return []models.Event{}, nil + } + + if size == 0 { + size = DefaultSize + } + + events := make([]models.Event, 0) + contractEvents, err := r.getContractEvents(subscriptions, size, offset) + if err != nil { + return nil, err + } + events = append(events, contractEvents...) + + operationEvents, err := r.getOperationEvents(subscriptions, size, offset) + if err != nil { + return nil, err + } + events = append(events, operationEvents...) + + migrationEvents, err := r.getMigrationEvents(subscriptions, size, offset) + if err != nil { + return nil, err + } + events = append(events, migrationEvents...) + return events, nil +} + +func (r *Reindexer) getContractEvents(subscriptions []models.SubscriptionRequest, size, offset int64) ([]models.Event, error) { + tx, err := r.BeginTx(models.DocContracts) + if err != nil { + return nil, err + } + + events := make([]models.Event, 0) + for _, subscription := range subscriptions { + if !subscription.WithSame && !subscription.WithSimilar { + continue + } + + query := tx.Query() + getSubscriptionWithSame(subscription, query) + getSubscriptionWithSimilar(subscription, query) + it := query.Limit(int(size)).Offset(int(offset)).Exec() + defer it.Close() + + if it.Error() != nil { + if err := tx.Rollback(); err != nil { + return nil, err + } + return nil, it.Error() + } + + for it.Next() { + var event EventContract + it.NextObj(&event) + res := models.Event{ + Body: event, + Network: subscription.Network, + Address: subscription.Address, + Alias: subscription.Alias, + } + + if event.Hash == subscription.Hash { + res.Type = models.EventTypeSame + } else { + res.Type = models.EventTypeSimilar + } + events = append(events, res) + } + } + + return events, tx.Commit() +} + +func (r *Reindexer) getOperationEvents(subscriptions []models.SubscriptionRequest, size, offset int64) ([]models.Event, error) { + tx, err := r.BeginTx(models.DocOperations) + if err != nil { + return nil, err + } + + events := make([]models.Event, 0) + for _, subscription := range subscriptions { + if !subscription.WithCalls && !subscription.WithErrors && !subscription.WithDeployments { + continue + } + query := tx.Query() + getEventsWatchCalls(subscription, query) + getEventsWatchErrors(subscription, query) + getEventsWatchDeployments(subscription, query) + it := query.Limit(int(size)).Offset(int(offset)).Exec() + defer it.Close() + + if it.Error() != nil { + if err := tx.Rollback(); err != nil { + return nil, err + } + return nil, it.Error() + } + + for it.Next() { + var event EventOperation + it.NextObj(&event) + res := models.Event{ + Body: event, + Network: subscription.Network, + Address: subscription.Address, + } + + switch { + case event.Status != constants.Applied: + res.Type = models.EventTypeError + case event.Source == subscription.Address && event.Kind == constants.Origination: + res.Type = models.EventTypeDeploy + case event.Source == subscription.Address && event.Kind == constants.Transaction: + res.Type = models.EventTypeCall + case event.Destination == subscription.Address && event.Kind == constants.Transaction: + res.Type = models.EventTypeInvoke + } + events = append(events, res) + } + } + return events, tx.Commit() +} + +func (r *Reindexer) getMigrationEvents(subscriptions []models.SubscriptionRequest, size, offset int64) ([]models.Event, error) { + tx, err := r.BeginTx(models.DocMigrations) + if err != nil { + return nil, err + } + events := make([]models.Event, 0) + for _, subscription := range subscriptions { + if !subscription.WithMigrations { + continue + } + it := tx.Query(). + Match("kind", constants.MigrationBootstrap, constants.MigrationLambda, constants.MigrationUpdate). + Match("network", subscription.Network). + Match("address", subscription.Address). + Limit(int(size)).Offset(int(offset)).Exec() + defer it.Close() + + if it.Error() != nil { + if err := tx.Rollback(); err != nil { + return nil, err + } + return nil, it.Error() + } + + for it.Next() { + var event EventOperation + it.NextObj(&event) + events = append(events, models.Event{ + Body: event, + Network: subscription.Network, + Address: subscription.Address, + Type: models.EventTypeMigration, + Alias: subscription.Alias, + }) + } + } + return events, tx.Commit() +} + +func getEventsWatchDeployments(subscription models.SubscriptionRequest, query *reindexer.Query) { + if !subscription.WithDeployments { + return + } + + query. + Match("kind", constants.Origination). + Match("network", subscription.Network). + Match("source", subscription.Address) +} + +func getEventsWatchCalls(subscription models.SubscriptionRequest, query *reindexer.Query) { + if !subscription.WithCalls { + return + } + + addressKeyword := "destination" + if strings.HasPrefix(subscription.Address, "tz") { + addressKeyword = "source" + } + query. + Match("kind", constants.Transaction). + Match("network", subscription.Network). + Match(addressKeyword, subscription.Address). + Match("status", constants.Applied) +} + +func getEventsWatchErrors(subscription models.SubscriptionRequest, query *reindexer.Query) { + if !subscription.WithErrors { + return + } + + addressKeyword := "destination" + if strings.HasPrefix(subscription.Address, "tz") { + addressKeyword = "source" + } + + query. + Match("network", subscription.Network). + Match(addressKeyword, subscription.Address). + Match("status", constants.Applied) +} + +func getSubscriptionWithSame(subscription models.SubscriptionRequest, query *reindexer.Query) { + if !subscription.WithSame { + return + } + + query. + Match("hash", subscription.Hash). + Not(). + Match("address", subscription.Address) +} + +func getSubscriptionWithSimilar(subscription models.SubscriptionRequest, query *reindexer.Query) { + if !subscription.WithSimilar { + return + } + + query. + Match("project_id", subscription.ProjectID). + Not(). + Match("hash", subscription.Hash). + Not(). + Match("address", subscription.Address) +} diff --git a/internal/reindexer/core/get.go b/internal/reindexer/core/get.go new file mode 100644 index 000000000..ec492a0fa --- /dev/null +++ b/internal/reindexer/core/get.go @@ -0,0 +1,186 @@ +package core + +import ( + "reflect" + + "github.com/baking-bad/bcdhub/internal/models" + "github.com/pkg/errors" + "github.com/restream/reindexer" +) + +// Sizes - +const ( + DefaultSize = 10 +) + +// Count - +func (r *Reindexer) Count(query *reindexer.Query) (int64, error) { + it := query.Exec() + defer it.Close() + + if it.Error() != nil { + return 0, it.Error() + } + count := it.TotalCount() + return int64(count), nil +} + +// GetOne - +func (r *Reindexer) GetOne(query *reindexer.Query, output interface{}) error { + it := query.Exec() + defer it.Close() + + if it.Error() != nil { + return it.Error() + } + it.NextObj(output) + return nil +} + +// GetByID - +func (r *Reindexer) GetByID(ret models.Model) error { + query := r.Query(ret.GetIndex()).WhereString("id", reindexer.EQ, ret.GetID()) + return r.GetOne(query, ret) +} + +// GetByIDs - +func (r *Reindexer) GetByIDs(output interface{}, ids ...string) error { + index, err := getIndex(output) + if err != nil { + return err + } + query := r.Query(index). + WhereString("id", reindexer.EQ, ids...) + + return r.GetAllByQuery(query, output) +} + +// GetAll - +func (r *Reindexer) GetAll(output interface{}) error { + index, err := getIndex(output) + if err != nil { + return err + } + return r.GetAllByQuery(r.Query(index), output) +} + +// GetByNetwork - +func (r *Reindexer) GetByNetwork(network string, output interface{}) error { + index, err := getIndex(output) + if err != nil { + return err + } + query := r.Query(index). + WhereString("network", reindexer.EQ, network). + Sort("level", false) + + return r.GetAllByQuery(query, output) +} + +// GetByNetworkWithSort - +func (r *Reindexer) GetByNetworkWithSort(network, sortField, sortOrder string, output interface{}) error { + index, err := getIndex(output) + if err != nil { + return err + } + query := r.Query(index). + WhereString("network", reindexer.EQ, network). + Sort(sortField, sortOrder == "desc") + + return r.GetAllByQuery(query, output) +} + +// GetAllByQuery - +func (r *Reindexer) GetAllByQuery(query *reindexer.Query, output interface{}) error { + if query == nil { + return ErrQueryPointerIsNil + } + + it := query.Exec() + defer it.Close() + + if it.Error() != nil { + return it.Error() + } + + if it.Count() == 0 { + return nil + } + + return parse(it, output) +} + +// GetAllByQueryWithTotal - +func (r *Reindexer) GetAllByQueryWithTotal(query *reindexer.Query, output interface{}) (int, error) { + if query == nil { + return 0, ErrQueryPointerIsNil + } + + it := query.ReqTotal().Exec() + defer it.Close() + + if it.Error() != nil { + return 0, it.Error() + } + + if it.Count() == 0 { + return 0, nil + } + + return it.TotalCount(), parse(it, output) +} + +func getElementType(output interface{}) (reflect.Type, error) { + arr := reflect.TypeOf(output) + if arr.Kind() != reflect.Ptr { + return arr.Elem(), errors.Errorf("Invalid `output` type: %s", arr.Kind()) + } + arr = arr.Elem() + if arr.Kind() == reflect.Slice { + return arr.Elem(), nil + } + return arr, nil +} + +func getIndex(output interface{}) (string, error) { + typ, err := getElementType(output) + if err != nil { + return "", err + } + newItem := reflect.New(typ) + interfaceType := reflect.TypeOf((*models.Model)(nil)).Elem() + + if !newItem.Type().Implements(interfaceType) { + return "", errors.Errorf("Implements: 'output' is not implemented `Model` interface") + } + + getIndex := newItem.MethodByName("GetIndex") + if !getIndex.IsValid() { + return "", errors.Errorf("getIndex: 'output' is not implemented `Model` interface") + } + getIndexResult := getIndex.Call(nil) + if len(getIndexResult) != 1 { + return "", errors.Errorf("Something went wrong during call GetIndex") + } + return getIndexResult[0].Interface().(string), nil +} + +func parse(it *reindexer.Iterator, output interface{}) error { + typ, err := getElementType(output) + if err != nil { + return err + } + el := reflect.ValueOf(output).Elem() + + for it.Next() { + obj := reflect.New(typ).Interface() + it.NextObj(obj) + val := reflect.ValueOf(obj).Elem() + if el.Kind() == reflect.Slice { + el.Set(reflect.Append(el, val)) + } else { + el.Set(val) + } + } + return nil +} diff --git a/internal/reindexer/core/histogram.go b/internal/reindexer/core/histogram.go new file mode 100644 index 000000000..62377537c --- /dev/null +++ b/internal/reindexer/core/histogram.go @@ -0,0 +1,10 @@ +package core + +import ( + "github.com/baking-bad/bcdhub/internal/models" +) + +// GetDateHistogram - +func (r *Reindexer) GetDateHistogram(period string, opts ...models.HistogramOption) ([][]int64, error) { + return make([][]int64, 0), nil +} diff --git a/internal/reindexer/core/reindexer.go b/internal/reindexer/core/reindexer.go new file mode 100644 index 000000000..99ef7942a --- /dev/null +++ b/internal/reindexer/core/reindexer.go @@ -0,0 +1,73 @@ +package core + +import ( + "fmt" + + "github.com/baking-bad/bcdhub/internal/models" + "github.com/restream/reindexer" +) + +// Reindexer - +type Reindexer struct { + *reindexer.Reindexer +} + +// New - +func New(uri string) (*Reindexer, error) { + db := reindexer.NewReindex(uri) + return &Reindexer{db}, nil +} + +// CreateIndexes - +func (r *Reindexer) CreateIndexes() error { + for _, index := range models.AllModels() { + if err := r.OpenNamespace(index.GetIndex(), reindexer.DefaultNamespaceOptions(), index); err != nil { + return err + } + } + return nil +} + +// DeleteByLevelAndNetwork - +func (r *Reindexer) DeleteByLevelAndNetwork(indices []string, network string, maxLevel int64) error { + for i := range indices { + val := r.ExecSQL(fmt.Sprintf("DELETE FROM %s WHERE network = '%s' AND level > %d", indices[i], network, maxLevel)) + if val.Error() != nil { + return val.Error() + } + } + return nil +} + +// DeleteIndices - +func (r *Reindexer) DeleteIndices(indices []string) error { + for i := range indices { + if err := r.DropNamespace(indices[i]); err != nil { + return err + } + } + return nil +} + +// DeleteByContract - +func (r *Reindexer) DeleteByContract(indices []string, network, address string) error { + for i := range indices { + val := r.ExecSQL(fmt.Sprintf("DELETE FROM %s WHERE network = '%s' AND contract = '%s'", indices[i], network, address)) + if val.Error() != nil { + return val.Error() + } + } + return nil +} + +// GetUnique - +func (r *Reindexer) GetUnique(field string, query *reindexer.Query) ([]string, error) { + it := query.Distinct(field).Exec() + defer it.Close() + + if it.Error() != nil { + return nil, it.Error() + } + + return it.AggResults()[0].Distincts, nil +} diff --git a/internal/reindexer/core/search.go b/internal/reindexer/core/search.go new file mode 100644 index 000000000..97f7598d8 --- /dev/null +++ b/internal/reindexer/core/search.go @@ -0,0 +1,168 @@ +package core + +import ( + "fmt" + "strings" + "time" + + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/contract" + "github.com/baking-bad/bcdhub/internal/models/operation" + "github.com/baking-bad/bcdhub/internal/models/tezosdomain" + "github.com/baking-bad/bcdhub/internal/models/tzip" + "github.com/baking-bad/bcdhub/internal/search" + "github.com/pkg/errors" + "github.com/restream/reindexer" +) + +// SearchByText - +func (r *Reindexer) SearchByText(text string, offset int64, fields []string, filters map[string]interface{}, group bool) (models.Result, error) { + result := models.Result{} + if text == "" { + return result, errors.Errorf("Empty search string. Please query something") + } + + query, err := r.prepareSearchQuery(text, filters, fields, offset) + if err != nil { + return result, err + } + + start := time.Now() + it := query.Exec() + defer it.Close() + + if it.Error() != nil { + return result, err + } + items, err := parseSearchResponse(it) + if err != nil { + return result, err + } + result.Time = time.Since(start).Milliseconds() + result.Count = int64(it.TotalCount()) + result.Items = items + + return result, nil +} + +func (r *Reindexer) prepareSearchQuery(searchString string, filters map[string]interface{}, fields []string, offset int64) (*reindexer.Query, error) { + ctx := search.NewContext() + + if search.IsPtrSearch(searchString) { + ctx.Text = strings.TrimPrefix(searchString, "ptr:") + ctx.Indices = []string{models.DocBigMapDiff} + ctx.Fields = []string{"ptr"} + } else { + info, err := getFields(ctx.Text, filters, fields) + if err != nil { + return nil, err + } + ctx.Indices = info.Indices + ctx.Fields = info.Scores + ctx.Text = fmt.Sprintf("%s*", searchString) + } + ctx.Offset = offset + + return r.buildSeacrhQuery(ctx, filters) +} + +func (r *Reindexer) buildSeacrhQuery(ctx search.Context, filters map[string]interface{}) (*reindexer.Query, error) { + var query *reindexer.Query + for i := range ctx.Indices { + subQuery := r.Query(ctx.Indices[i]) + for _, field := range ctx.Fields { + subQuery = subQuery.Match(field, ctx.Text) + } + if err := prepareFilters(filters, subQuery); err != nil { + return nil, err + } + + subQuery = subQuery.Offset(int(ctx.Offset)).Functions("text.highlight(,)") + + if query == nil { + query = subQuery + } else { + query.Merge(subQuery) + } + } + + query = query.ReqTotal() + + return query, nil +} + +func getFields(searchString string, filters map[string]interface{}, fields []string) (search.ScoreInfo, error) { + var indices []string + if val, ok := filters["indices"]; ok { + indices = val.([]string) + delete(filters, "indices") + } + + return search.GetScores(searchString, fields, indices...) +} + +func prepareFilters(filters map[string]interface{}, query *reindexer.Query) error { + for field, value := range filters { + switch field { + case "from": + query = query.Where("timestamp", reindexer.GT, value) + case "to": + query = query.Where("timestamp", reindexer.LT, value) + case "networks": + networks, ok := value.([]string) + if !ok { + return errors.Errorf("Invalid type for 'network' filter (wait []string): %T", value) + } + query = query.Match("network", networks...) + case "languages": + languages, ok := value.([]string) + if !ok { + return errors.Errorf("Invalid type for 'network' filter (wait []string): %T", value) + } + query = query.Match("language", languages...) + default: + return errors.Errorf("Unknown search filter: %s", field) + } + } + return nil +} + +func parseSearchResponse(it *reindexer.Iterator) ([]models.Item, error) { + items := make([]models.Item, 0) + for it.Next() { + searchItem := models.Item{} + + switch elem := it.Object().(type) { + case contract.Contract: + searchItem.Type = models.DocContracts + searchItem.Value = elem.Address + searchItem.Body = elem + searchItem.Network = elem.Network + case operation.Operation: + searchItem.Type = models.DocOperations + searchItem.Value = elem.Hash + searchItem.Body = elem + searchItem.Network = elem.Network + case bigmapdiff.BigMapDiff: + searchItem.Type = models.DocBigMapDiff + searchItem.Value = elem.KeyHash + searchItem.Body = elem + searchItem.Network = elem.Network + case tezosdomain.TezosDomain: + searchItem.Type = models.DocTezosDomains + searchItem.Value = elem.Address + searchItem.Body = elem + searchItem.Network = elem.Network + case tzip.TZIP: + searchItem.Value = elem.Address + searchItem.Network = elem.Network + searchItem.Type = search.MetadataSearchType + default: + return nil, errors.Errorf("Unknown search type") + } + + items = append(items, searchItem) + } + return items, nil +} diff --git a/internal/reindexer/core/snapshots.go b/internal/reindexer/core/snapshots.go new file mode 100644 index 000000000..ae813dfbb --- /dev/null +++ b/internal/reindexer/core/snapshots.go @@ -0,0 +1,57 @@ +package core + +import ( + "io" + + "github.com/baking-bad/bcdhub/internal/models" +) + +// CreateAWSRepository - +func (r *Reindexer) CreateAWSRepository(name, awsBucketName, awsRegion string) error { + return nil +} + +// ListRepositories - +func (r *Reindexer) ListRepositories() ([]models.Repository, error) { + return nil, nil +} + +// CreateSnapshots - +func (r *Reindexer) CreateSnapshots(repository, snapshot string, indices []string) error { + return nil +} + +// RestoreSnapshots - +func (r *Reindexer) RestoreSnapshots(repository, snapshot string, indices []string) error { + return nil +} + +// ListSnapshots - +func (r *Reindexer) ListSnapshots(repository string) (string, error) { + return "", nil +} + +// SetSnapshotPolicy - +func (r *Reindexer) SetSnapshotPolicy(policyID, cronSchedule, name, repository string, expireAfterInDays int64) error { + return nil +} + +// GetAllPolicies - +func (r *Reindexer) GetAllPolicies() ([]string, error) { + return nil, nil +} + +// GetMappings - +func (r *Reindexer) GetMappings(indices []string) (map[string]string, error) { + return nil, nil +} + +// CreateMapping - +func (r *Reindexer) CreateMapping(index string, reader io.Reader) error { + return nil +} + +// ReloadSecureSettings - +func (r *Reindexer) ReloadSecureSettings() error { + return nil +} diff --git a/internal/reindexer/core/stats.go b/internal/reindexer/core/stats.go new file mode 100644 index 000000000..fa9922622 --- /dev/null +++ b/internal/reindexer/core/stats.go @@ -0,0 +1,101 @@ +package core + +import ( + "github.com/baking-bad/bcdhub/internal/models" + "github.com/restream/reindexer" +) + +func countByField(field string, query *reindexer.Query) (map[string]int64, error) { + query.AggregateFacet(field) + + it := query.Exec() + defer it.Close() + + if it.Error() != nil { + return nil, it.Error() + } + + aggRes := it.AggResults()[0] + + response := make(map[string]int64) + for i := range aggRes.Facets { + response[aggRes.Facets[i].Values[0]] = int64(aggRes.Facets[i].Count) + } + return response, nil +} + +// GetNetworkCountStats - +func (r *Reindexer) GetNetworkCountStats(network string) (map[string]int64, error) { + res := make(map[string]int64) + for _, index := range []string{models.DocContracts, models.DocOperations} { + query := r.Query(index).Match("network", network) + + count, err := r.Count(query) + if err != nil { + return nil, err + } + res[index] = count + } + return res, nil +} + +// GetCallsCountByNetwork - +func (r *Reindexer) GetCallsCountByNetwork() (map[string]int64, error) { + query := r.Query(models.DocContracts). + WhereString("entrypoint", reindexer.EMPTY, "") + + return countByField("network", query) +} + +// GetContractStatsByNetwork - +// TODO: to do =) +func (r *Reindexer) GetContractStatsByNetwork() (map[string]models.ContractCountStats, error) { + // query := NewQuery().Add( + // Aggs( + // AggItem{ + // "network", Item{ + // "terms": Item{ + // "field": "network.keyword", + // }, + // "core.Aggs": Item{ + // "same": Item{ + // "cardinality": Item{ + // "script": "doc['fingerprint.parameter'].value + '|' + doc['fingerprint.storage'].value + '|' + doc['fingerprint.code'].value", + // }, + // }, + // "balance": Sum("balance"), + // }, + // }, + // }, + // ), + // ).Zero() + + // var response getContractStatsByNetworkStats + // if err := e.Query([]string{models.DocContracts}, query, &response); err != nil { + // return nil, err + // } + + // counts := make(map[string]models.ContractCountStats) + // for _, item := range response.Agg.Network.Buckets { + // counts[item.Key] = models.ContractCountStats{ + // Total: item.DocCount, + // SameCount: item.Same.Value, + // Balance: int64(item.Balance.Value), + // TotalWithdrawn: int64(item.TotalWithdrawn.Value), + // } + // } + // return counts, nil + return nil, nil +} + +// GetFACountByNetwork - +func (r *Reindexer) GetFACountByNetwork() (map[string]int64, error) { + query := r.Query(models.DocContracts).Match("tags", "fa1", "fa12") + return countByField("network", query) +} + +// GetLanguagesForNetwork - +func (r *Reindexer) GetLanguagesForNetwork(network string) (map[string]int64, error) { + query := r.Query(models.DocContracts).Match("network", network) + return countByField("language", query) +} diff --git a/internal/reindexer/core/update.go b/internal/reindexer/core/update.go new file mode 100644 index 000000000..98aa44cef --- /dev/null +++ b/internal/reindexer/core/update.go @@ -0,0 +1,31 @@ +package core + +import ( + "reflect" + + "github.com/baking-bad/bcdhub/internal/models" +) + +// UpdateDoc - updates document +func (r *Reindexer) UpdateDoc(model models.Model) error { + _, err := r.Update(model.GetIndex(), model) + return err +} + +// UpdateFields - +func (r *Reindexer) UpdateFields(index, id string, data interface{}, fields ...string) error { + query := r.Query(index).Match("id", id) + for j := range fields { + value := r.getFieldValue(data, fields[j]) + query = query.Set(fields[j], value) + } + it := query.Update() + defer it.Close() + return it.Error() +} + +func (r *Reindexer) getFieldValue(data interface{}, field string) interface{} { + val := reflect.ValueOf(data) + f := reflect.Indirect(val).FieldByName(field) + return f.Interface() +} diff --git a/internal/reindexer/migration/storage.go b/internal/reindexer/migration/storage.go new file mode 100644 index 000000000..9c382dbbd --- /dev/null +++ b/internal/reindexer/migration/storage.go @@ -0,0 +1,41 @@ +package migration + +import ( + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/migration" + "github.com/baking-bad/bcdhub/internal/reindexer/core" +) + +// Storage - +type Storage struct { + db *core.Reindexer +} + +// NewStorage - +func NewStorage(db *core.Reindexer) *Storage { + return &Storage{db} +} + +// Get - +func (storage *Storage) Get(network, address string) (migrations []migration.Migration, err error) { + query := storage.db.Query(models.DocMigrations). + Match("network", network). + Match("address", address). + Sort("level", true) + + err = storage.db.GetAllByQuery(query, &migrations) + return +} + +// Count - +func (storage *Storage) Count(network, address string) (int64, error) { + query := storage.db.Query(models.DocMigrations). + Match("network", network). + OpenBracket(). + Match("source", address). + Or(). + Match("destination", address). + CloseBracket() + + return storage.db.Count(query) +} diff --git a/internal/reindexer/operation/data.go b/internal/reindexer/operation/data.go new file mode 100644 index 000000000..2371872b4 --- /dev/null +++ b/internal/reindexer/operation/data.go @@ -0,0 +1,6 @@ +package operation + +type opgForContract struct { + Hash string `reindex:"hash"` + Counter int64 `reindex:"counter"` +} diff --git a/internal/reindexer/operation/storage.go b/internal/reindexer/operation/storage.go new file mode 100644 index 000000000..8e2729fd6 --- /dev/null +++ b/internal/reindexer/operation/storage.go @@ -0,0 +1,357 @@ +package operation + +import ( + "fmt" + "strings" + "time" + + "github.com/baking-bad/bcdhub/internal/contractparser/consts" + "github.com/baking-bad/bcdhub/internal/helpers" + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/operation" + "github.com/baking-bad/bcdhub/internal/reindexer/core" + "github.com/pkg/errors" + "github.com/restream/reindexer" +) + +const ( + sortString = "level * 10000000000 + counter * 1000 + internal ? (998 - nonce) : 999" +) + +// Storage - +type Storage struct { + db *core.Reindexer +} + +// NewStorage - +func NewStorage(db *core.Reindexer) *Storage { + return &Storage{db} +} + +func (storage *Storage) getContractOPG(address, network string, size uint64, filters map[string]interface{}) ([]opgForContract, error) { + if size == 0 { + size = core.DefaultSize + } + + query := storage.db.Query(models.DocOperations). + Distinct("hash").Distinct("counter").Distinct("level"). + OpenBracket(). + Match("source", address). + Or(). + Match("destination", address). + CloseBracket(). + Match("network", network) + + if err := prepareOperationFilters(filters, query); err != nil { + return nil, err + } + it := query.Limit(int(size)).Sort("level", true).Exec() + defer it.Close() + + if it.Error() != nil { + return nil, it.Error() + } + + resp := make([]opgForContract, 0) + for it.Next() { + var obj opgForContract + it.NextObj(&obj) + resp = append(resp, obj) + } + + return resp, nil +} + +func prepareOperationFilters(filters map[string]interface{}, query *reindexer.Query) error { + for k, v := range filters { + if v == "" || v == nil { + continue + } + switch k { + case "from": + query = query.Where("timestamp", reindexer.GE, v) + case "to": + query = query.Where("timestamp", reindexer.LE, v) + case "entrypoints": + query = query.Where("entrypoint", reindexer.EQ, v) + case "last_id": + query = query.Where("timestamp", reindexer.LT, v) + case "status": + query = query.Where("status", reindexer.EQ, v) + default: + return errors.Errorf("Unknown operation filter: %s %v", k, v) + } + + } + return nil +} + +// GetByContract - +func (storage *Storage) GetByContract(network, address string, size uint64, filters map[string]interface{}) (po operation.Pageable, err error) { + opg, err := storage.getContractOPG(address, network, size, filters) + if err != nil { + return + } + if len(opg) == 0 { + return + } + + query := storage.db.Query(models.DocOperations). + Match("network", network). + OpenBracket() + + for i := range opg { + query = query.OpenBracket().Match("hash", opg[i].Hash).WhereInt64("counter", reindexer.EQ, opg[i].Counter).CloseBracket() + if len(opg)-1 > i { + query = query.Or() + } + } + query = query.CloseBracket().Sort(sortString, true) + query.AggregateMin("indexed_time") + + it := query.Exec() + defer it.Close() + + if err = it.Error(); err != nil { + return + } + + po.Operations = make([]operation.Operation, it.Count()) + for i := 0; i < it.Count(); i++ { + it.NextObj(&po.Operations[i]) + } + po.LastID = fmt.Sprintf("%.0f", it.AggResults()[0].Value) + return +} + +// Last - +func (storage *Storage) Last(network, address string, indexedTime int64) (op operation.Operation, err error) { + query := storage.db.Query(models.DocOperations). + Match("destination", address). + Match("network", network). + Match("status", consts.Applied). + WhereString("deffated_storage", reindexer.EMPTY, ""). + WhereInt64("indexed_time", reindexer.LT, indexedTime). + Sort("indexed_time", true) + + err = storage.db.GetOne(query, &op) + return +} + +// Get - +func (storage *Storage) Get(filters map[string]interface{}, size int64, sort bool) (operations []operation.Operation, err error) { + query := storage.db.Query(models.DocOperations) + for field, value := range filters { + query = query.Where(field, reindexer.EQ, value) + } + + if sort { + query = query.Sort(sortString, true) + } + + if size > 0 { + query = query.Limit(int(size)) + } + + err = storage.db.GetAllByQuery(query, &operations) + return +} + +// GetStats - +func (storage *Storage) GetStats(network, address string) (stats operation.Stats, err error) { + query := storage.db.Query(models.DocOperations). + Distinct("hash"). + Match("network", network). + OpenBracket(). + Match("source", address). + Or(). + Match("destination", address). + CloseBracket(). + ReqTotal() + + query.AggregateMax("timestamp") + + it := query.Exec() + defer it.Close() + + if err = it.Error(); err != nil { + return + } + + stats.Count = int64(it.TotalCount()) + stats.LastAction = time.Unix(int64(it.AggResults()[0].Value), 0) // TODO: is the date valid? check parsing` + return +} + +// GetTokensStats - +func (storage *Storage) GetTokensStats(network string, addresses, entrypoints []string) (map[string]operation.TokenUsageStats, error) { + query := storage.db.Query(models.DocOperations) + + if len(addresses) > 0 { + query = query.Match("destination", addresses...) + } + if len(entrypoints) > 0 { + query = query.Match("entrypoint", entrypoints...) + } + + operations := make([]operation.Operation, 0) + if err := storage.db.GetAllByQuery(query, &operations); err != nil { + return nil, err + } + + all := make(map[string][]int64) + for i := range operations { + id := fmt.Sprintf("%s|%s", operations[i].Destination, operations[i].Entrypoint) + if _, ok := all[id]; !ok { + all[id] = make([]int64, 0) + } + all[id] = append(all[id], operations[i].Result.ConsumedGas) + } + + usageStats := make(map[string]operation.TokenUsageStats) + for id, arr := range all { + parts := strings.Split(id, "|") + var total float64 + for _, value := range arr { + total += float64(value) + } + avg := int64(total / float64(len(arr))) + address := parts[0] + entrypoint := parts[1] + + usage := operation.TokenMethodUsageStats{ + ConsumedGas: avg, + Count: int64(len(arr)), + } + if _, ok := usageStats[address]; !ok { + usageStats[address] = make(operation.TokenUsageStats) + } + usageStats[address][entrypoint] = usage + } + return usageStats, nil +} + +// GetParticipatingContracts - +func (storage *Storage) GetParticipatingContracts(network string, fromLevel, toLevel int64) ([]string, error) { + it := storage.db.Query(models.DocOperations). + Select("destination", "source"). + Match("network", network). + WhereInt64("level", reindexer.LE, fromLevel). + WhereInt64("level", reindexer.GT, toLevel).Exec() + defer it.Close() + + if it.Error() != nil { + return nil, it.Error() + } + + exists := make(map[string]struct{}) + addresses := make([]string, 0) + + type response struct { + Source string `reindex:"source"` + Destination string `reindex:"destination"` + } + for it.Next() { + var item response + it.NextObj(&item) + if _, ok := exists[item.Destination]; helpers.IsContract(item.Destination) && !ok { + exists[item.Destination] = struct{}{} + addresses = append(addresses, item.Destination) + } + if _, ok := exists[item.Source]; helpers.IsContract(item.Source) && !ok { + exists[item.Source] = struct{}{} + addresses = append(addresses, item.Source) + } + } + + return addresses, nil +} + +// RecalcStats - +func (storage *Storage) RecalcStats(network, address string) (stats operation.ContractStats, err error) { + query := storage.db.Query(models.DocOperations). + Match("network", network). + Match("status", consts.Applied). + OpenBracket(). + Match("source", address). + Or(). + Match("destination", address). + CloseBracket(). + ReqTotal() + + query.AggregateMax("timestamp") + + it := query.Exec() + defer it.Close() + + if it.Error() != nil { + return stats, it.Error() + } + + stats.TxCount = int64(it.TotalCount()) + stats.LastAction = time.Unix(0, int64(it.AggResults()[0].Value)*1000000).UTC() + + for it.Next() { + var op operation.Operation + it.NextObj(&op) + if op.Source == address { + stats.Balance -= op.Amount + } else { + stats.Balance += op.Amount + } + } + return +} + +// GetDAppStats - +func (storage *Storage) GetDAppStats(network string, addresses []string, period string) (stats operation.DAppStats, err error) { + query := storage.db.Query(models.DocOperations). + Distinct("source"). + Match("network", network). + Match("status", consts.Applied). + Match("destination", addresses...). + Not(). + Match("entrypoint", "") + + if err = periodToRange(period, query); err != nil { + return + } + query = query.ReqTotal() + query.AggregateSum("amount") + + it := query.Exec() + defer it.Close() + + if it.Error() != nil { + return stats, it.Error() + } + + stats.Calls = int64(it.TotalCount()) + stats.Users = int64(len(it.AggResults()[0].Distincts)) + stats.Volume = int64(it.AggResults()[1].Value) + return +} + +func periodToRange(period string, query *reindexer.Query) error { + now := time.Now() + switch period { + case "year": + query.WhereInt64("timestamp", reindexer.GT, now.AddDate(-1, 0, 0).Unix()) + case "month": + query.WhereInt64("timestamp", reindexer.GT, now.AddDate(0, -1, 0).Unix()) + case "week": + query.WhereInt64("timestamp", reindexer.GT, now.AddDate(0, 0, -7).Unix()) + case "day": + query.WhereInt64("timestamp", reindexer.GT, now.AddDate(0, 0, -1).Unix()) + case "all": + return nil + default: + return errors.Errorf("Unknown period value: %s", period) + } + return nil +} + +// GetContract24HoursVolume - +func (storage *Storage) GetContract24HoursVolume(network, address string, entrypoints []string) (float64, error) { + return 0, nil +} diff --git a/internal/reindexer/protocol/storage.go b/internal/reindexer/protocol/storage.go new file mode 100644 index 000000000..ab1f24423 --- /dev/null +++ b/internal/reindexer/protocol/storage.go @@ -0,0 +1,62 @@ +package protocol + +import ( + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/protocol" + "github.com/baking-bad/bcdhub/internal/reindexer/core" + "github.com/restream/reindexer" +) + +// Storage - +type Storage struct { + db *core.Reindexer +} + +// NewStorage - +func NewStorage(db *core.Reindexer) *Storage { + return &Storage{db} +} + +// GetProtocol - returns current protocol for `network` and `level` (`hash` is optional, leave empty string for default) +func (storage *Storage) GetProtocol(network, hash string, level int64) (p protocol.Protocol, err error) { + query := storage.db.Query(models.DocProtocol). + Match("network", network) + + if level > -1 { + query = query.WhereInt64("start_level", reindexer.LE, level) + } + if hash != "" { + query = query.Match("hash", hash) + } + query = query.Sort("start_level", true) + + err = storage.db.GetOne(query, &p) + return +} + +// GetSymLinks - returns list of symlinks in `network` after `level` +func (storage *Storage) GetSymLinks(network string, level int64) (map[string]struct{}, error) { + it := storage.db.Query(models.DocProtocol). + Select("sym_link"). + Match("network", network). + WhereInt64("start_level", reindexer.GT, level). + Sort("start_level", true).Exec() + defer it.Close() + + if it.Error() != nil { + return nil, it.Error() + } + + symMap := make(map[string]struct{}) + + type link struct { + symLimk string `reindex:"sym_link"` + } + for it.Next() { + var sl link + it.NextObj(&sl) + symMap[sl.symLimk] = struct{}{} + } + + return symMap, nil +} diff --git a/internal/reindexer/schema/storage.go b/internal/reindexer/schema/storage.go new file mode 100644 index 000000000..a30d5a3db --- /dev/null +++ b/internal/reindexer/schema/storage.go @@ -0,0 +1,23 @@ +package schema + +import ( + "github.com/baking-bad/bcdhub/internal/models/schema" + "github.com/baking-bad/bcdhub/internal/reindexer/core" +) + +// Storage - +type Storage struct { + db *core.Reindexer +} + +// NewStorage - +func NewStorage(db *core.Reindexer) *Storage { + return &Storage{db} +} + +// Get - +func (storage *Storage) Get(address string) (schema.Schema, error) { + data := schema.Schema{ID: address} + err := storage.db.GetByID(&data) + return data, err +} diff --git a/internal/reindexer/tezosdomain/storage.go b/internal/reindexer/tezosdomain/storage.go new file mode 100644 index 000000000..e64086965 --- /dev/null +++ b/internal/reindexer/tezosdomain/storage.go @@ -0,0 +1,58 @@ +package tezosdomain + +import ( + "github.com/baking-bad/bcdhub/internal/helpers" + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/tezosdomain" + "github.com/baking-bad/bcdhub/internal/reindexer/core" + "github.com/pkg/errors" +) + +// Storage - +type Storage struct { + db *core.Reindexer +} + +// NewStorage - +func NewStorage(db *core.Reindexer) *Storage { + return &Storage{db} +} + +// ListDomains - +func (storage *Storage) ListDomains(network string, size, offset int64) (tezosdomain.DomainsResponse, error) { + if size > core.DefaultSize { + size = core.DefaultSize + } + + query := storage.db.Query(models.DocTezosDomains). + Match("network", network). + Sort("timestamp", true). + Limit(int(size)). + Offset(int(offset)) + + domains := make([]tezosdomain.TezosDomain, 0) + total, err := storage.db.GetAllByQueryWithTotal(query, &domains) + if err != nil { + return tezosdomain.DomainsResponse{}, nil + } + + return tezosdomain.DomainsResponse{ + Domains: domains, + Total: int64(total), + }, nil +} + +// ResolveDomainByAddress - +func (storage *Storage) ResolveDomainByAddress(network string, address string) (*tezosdomain.TezosDomain, error) { + if !helpers.IsAddress(address) { + return nil, errors.Errorf("Invalid address: %s", address) + } + + query := storage.db.Query(models.DocTezosDomains). + Match("network", network). + Match("address", address) + + var td tezosdomain.TezosDomain + err := storage.db.GetOne(query, &td) + return &td, err +} diff --git a/internal/reindexer/tokenbalance/storage.go b/internal/reindexer/tokenbalance/storage.go new file mode 100644 index 000000000..f7a2acedf --- /dev/null +++ b/internal/reindexer/tokenbalance/storage.go @@ -0,0 +1,94 @@ +package tokenbalance + +import ( + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/tokenbalance" + "github.com/baking-bad/bcdhub/internal/reindexer/core" + "github.com/restream/reindexer" +) + +// Storage - +type Storage struct { + db *core.Reindexer +} + +// NewStorage - +func NewStorage(db *core.Reindexer) *Storage { + return &Storage{db} +} + +// const scriptUpdateBalance = `{"source": "ctx._source.balance = ctx._source.balance + (long)params.delta", "lang": "painless", "params": { "delta": %d }}` + +// Update - +func (storage *Storage) Update(updates []*tokenbalance.TokenBalance) error { + if len(updates) == 0 { + return nil + } + + tx, err := storage.db.BeginTx(models.DocTokenBalances) + for err != nil { + return err + } + + // bulk := bytes.NewBuffer([]byte{}) + // for i := range updates { + // bulk.WriteString(fmt.Sprintf(`{ "update": { "_id": "%s"}}`, updates[i].GetID())) + // bulk.WriteByte('\n') + + // script := fmt.Sprintf(scriptUpdateBalance, updates[i].Balance) + + // upsert, err := json.Marshal(updates[i]) + // if err != nil { + // return err + // } + + // bulk.WriteString(fmt.Sprintf(`{ "script": %s, "upsert": %s }`, script, string(upsert))) + // bulk.WriteByte('\n') + // if (i%1000 == 0 && i > 0) || i == len(updates)-1 { + // if err := storage.bulkUpsertBalances(bulk); err != nil { + // return err + // } + // bulk.Reset() + // } + // } + return tx.Commit() +} + +// func (storage *Storage) bulkUpsertBalances(buf *bytes.Buffer) error { +// req := esapi.BulkRequest{ +// Body: bytes.NewReader(buf.Bytes()), +// Refresh: "true", +// Index: models.DocTokenBalances, +// } + +// res, err := req.Do(context.Background(), storage.es) +// if err != nil { +// return err +// } +// defer res.Body.Close() + +// var response core.BulkResponse +// return storage.es.GetResponse(res, &response) +// } + +// GetHolders - +func (storage *Storage) GetHolders(network, contract string, tokenID int64) (balances []tokenbalance.TokenBalance, err error) { + query := storage.db.Query(models.DocTokenBalances). + Match("network", network). + Match("contract", contract). + WhereInt64("token_id", reindexer.EQ, tokenID). + WhereInt64("balance", reindexer.GT, 0) + + err = storage.db.GetAllByQuery(query, &balances) + return +} + +// GetAccountBalances - +func (storage *Storage) GetAccountBalances(network, address string) (tokenBalances []tokenbalance.TokenBalance, err error) { + query := storage.db.Query(models.DocTokenBalances). + Match("network", network). + Match("address", address) + + err = storage.db.GetAllByQuery(query, &tokenBalances) + return +} diff --git a/internal/reindexer/transfer/context.go b/internal/reindexer/transfer/context.go new file mode 100644 index 000000000..db7977fd7 --- /dev/null +++ b/internal/reindexer/transfer/context.go @@ -0,0 +1,107 @@ +package transfer + +import ( + "github.com/baking-bad/bcdhub/internal/models/transfer" + "github.com/restream/reindexer" +) + +func buildGetContext(ctx transfer.GetContext, query *reindexer.Query) { + buildGetContextWithoutLimits(ctx, query) + + appendSort(ctx, query) + appendOffset(ctx, query) + appendLimit(ctx, query) +} + +func buildGetContextWithoutLimits(ctx transfer.GetContext, query *reindexer.Query) { + filterNetwork(ctx, query) + filterAddress(ctx, query) + filterTime(ctx, query) + filterCursor(ctx, query) + filterContracts(ctx, query) + filterTokenID(ctx, query) + filterHash(ctx, query) +} + +func filterNetwork(ctx transfer.GetContext, query *reindexer.Query) { + if ctx.Network != "" { + query.Match("network", ctx.Network) + } +} + +func filterHash(ctx transfer.GetContext, query *reindexer.Query) { + if ctx.Hash != "" { + query.Match("hash", ctx.Hash) + } +} + +func filterAddress(ctx transfer.GetContext, query *reindexer.Query) { + if ctx.Address == "" { + return + } + query.OpenBracket(). + Match("from", ctx.Address). + Or(). + Match("to", ctx.Address). + CloseBracket() +} + +func filterTokenID(ctx transfer.GetContext, query *reindexer.Query) { + if ctx.TokenID >= 0 { + query.WhereInt64("token_id", reindexer.EQ, ctx.TokenID) + } +} + +func filterTime(ctx transfer.GetContext, query *reindexer.Query) { + if ctx.Start > 0 { + query.WhereInt64("timestamp", reindexer.GE, int64(ctx.Start)) + } + if ctx.End > 0 { + query.WhereInt64("timestamp", reindexer.LT, int64(ctx.End)) + } +} + +func filterCursor(ctx transfer.GetContext, query *reindexer.Query) { + if ctx.LastID != "" { + condition := reindexer.LT + if ctx.SortOrder == "asc" { + condition = reindexer.GT + } + query.Where("indexed_time", condition, ctx.LastID) + } +} + +func filterContracts(ctx transfer.GetContext, query *reindexer.Query) { + if len(ctx.Contracts) == 0 { + return + } + + query.OpenBracket() + + for i := range ctx.Contracts { + query.Match("contract", ctx.Contracts[i]) + if len(ctx.Contracts)-1 > i { + query.Or() + } + } + + query.CloseBracket() +} + +func appendLimit(ctx transfer.GetContext, query *reindexer.Query) { + if ctx.Size > 0 && ctx.Size <= maxTransfersSize { + query.Limit(int(ctx.Size)) + } else { + query.Limit(maxTransfersSize) + } +} + +func appendOffset(ctx transfer.GetContext, query *reindexer.Query) { + if ctx.Offset > 0 && ctx.Offset <= maxTransfersSize { + query.Offset(int(ctx.Offset)) + } +} + +func appendSort(ctx transfer.GetContext, query *reindexer.Query) { + query.Sort("timestamp", ctx.SortOrder == "desc") +} diff --git a/internal/reindexer/transfer/storage.go b/internal/reindexer/transfer/storage.go new file mode 100644 index 000000000..bcdbacdfb --- /dev/null +++ b/internal/reindexer/transfer/storage.go @@ -0,0 +1,96 @@ +package transfer + +import ( + "fmt" + + "github.com/baking-bad/bcdhub/internal/contractparser/consts" + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/transfer" + "github.com/baking-bad/bcdhub/internal/models/tzip" + "github.com/baking-bad/bcdhub/internal/reindexer/core" + "github.com/restream/reindexer" +) + +// Storage - +type Storage struct { + db *core.Reindexer +} + +// NewStorage - +func NewStorage(db *core.Reindexer) *Storage { + return &Storage{db} +} + +const ( + maxTransfersSize = 10000 +) + +// Get - +func (storage *Storage) Get(ctx transfer.GetContext) (po transfer.Pageable, err error) { + query := storage.db.Query(models.DocTransfers) + buildGetContext(ctx, query) + + transfers := make([]transfer.Transfer, 0) + total, err := storage.db.GetAllByQueryWithTotal(query, &transfers) + if err != nil { + return + } + + po.Transfers = transfers + po.Total = int64(total) + + if len(transfers) > 0 { + po.LastID = fmt.Sprintf("%d", transfers[len(transfers)-1].IndexedTime) + } + return +} + +// GetAll - +func (storage *Storage) GetAll(network string, level int64) (transfers []transfer.Transfer, err error) { + query := storage.db.Query(models.DocTransfers). + Match("network", network). + WhereInt64("level", reindexer.GT, level) + + err = storage.db.GetAllByQuery(query, &transfers) + return +} + +// GetTokenSupply - +func (storage *Storage) GetTokenSupply(network, address string, tokenID int64) (result transfer.TokenSupply, err error) { + it := storage.db.Query(models.DocTransfers). + Match("network", network). + Match("contract", address). + Match("status", consts.Applied). + WhereInt64("token_id", reindexer.EQ, tokenID). + Exec() + defer it.Close() + + if it.Error() != nil { + return result, it.Error() + } + + for it.Next() { + var t transfer.Transfer + it.NextObj(&t) + + switch { + case t.From == "": + result.Supply += t.Amount + case t.To == "": + result.Supply -= t.Amount + default: + result.Transfered += t.Amount + } + } + return +} + +// GetTokenVolumeSeries - +func (storage *Storage) GetTokenVolumeSeries(network, period string, contracts []string, entrypoints []tzip.DAppContract, tokenID uint) ([][]int64, error) { + return nil, nil +} + +// GetToken24HoursVolume - +func (storage *Storage) GetToken24HoursVolume(network, contract string, initiators, entrypoints []string, tokenID int64) (float64, error) { + return 0, nil +} diff --git a/internal/reindexer/tzip/context.go b/internal/reindexer/tzip/context.go new file mode 100644 index 000000000..92647ea99 --- /dev/null +++ b/internal/reindexer/tzip/context.go @@ -0,0 +1,22 @@ +package tzip + +import ( + "github.com/baking-bad/bcdhub/internal/models/tzip" + "github.com/baking-bad/bcdhub/internal/reindexer/core" + "github.com/restream/reindexer" +) + +func buildGetTokenMetadataContext(ctx tzip.GetTokenMetadataContext, query *reindexer.Query) { + if ctx.Contract != "" { + query.Match("address", ctx.Contract) + } + if ctx.Network != "" { + query.Match("network", ctx.Network) + } + if ctx.Level.IsFilled() { + core.SetComaparator("level", ctx.Level, query) + } + if ctx.TokenID != -1 { + query.WhereInt64("tokens.static.token_id", reindexer.EQ, ctx.TokenID) + } +} diff --git a/internal/reindexer/tzip/storage.go b/internal/reindexer/tzip/storage.go new file mode 100644 index 000000000..89e5c1953 --- /dev/null +++ b/internal/reindexer/tzip/storage.go @@ -0,0 +1,148 @@ +package tzip + +import ( + "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/tzip" + "github.com/baking-bad/bcdhub/internal/reindexer/core" + "github.com/restream/reindexer" +) + +// Storage - +type Storage struct { + db *core.Reindexer +} + +// NewStorage - +func NewStorage(db *core.Reindexer) *Storage { + return &Storage{db} +} + +// GetTokenMetadata - +func (storage *Storage) GetTokenMetadata(ctx tzip.GetTokenMetadataContext) (tokens []tzip.TokenMetadata, err error) { + tzips := make([]tzip.TZIP, 0) + + query := storage.db.Query(models.DocTZIP) + buildGetTokenMetadataContext(ctx, query) + if err = storage.db.GetAllByQuery(query, &tzips); err != nil { + return + } + if len(tzips) == 0 { + return nil, core.NewRecordNotFoundError(models.DocTZIP, "") + } + + tokens = make([]tzip.TokenMetadata, 0) + for k := range tzips { + if tzips[k].Tokens == nil { + continue + } + + for i := range tzips[k].Tokens.Static { + tokens = append(tokens, tzip.TokenMetadata{ + Address: tzips[k].Address, + Network: tzips[k].Network, + Level: tzips[k].Level, + RegistryAddress: tzips[k].Tokens.Static[i].RegistryAddress, + Symbol: tzips[k].Tokens.Static[i].Symbol, + Name: tzips[k].Tokens.Static[i].Name, + Decimals: tzips[k].Tokens.Static[i].Decimals, + TokenID: tzips[k].Tokens.Static[i].TokenID, + Extras: tzips[k].Tokens.Static[i].Extras, + }) + } + } + return +} + +// Get - +func (storage *Storage) Get(network, address string) (t tzip.TZIP, err error) { + t.Address = address + t.Network = network + err = storage.db.GetByID(&t) + return +} + +// GetDApps - +func (storage *Storage) GetDApps() (tokens []tzip.DApp, err error) { + query := storage.db.Query(models.DocTZIP). + Not(). + Where("dapps", reindexer.EMPTY, 0). + Sort("dapps.order", false) + err = storage.db.GetAllByQuery(query, &tokens) + return +} + +// GetDAppBySlug - +func (storage *Storage) GetDAppBySlug(slug string) (*tzip.DApp, error) { + model, err := storage.GetBySlug(slug) + if err != nil { + return nil, err + } + return &model.DApps[0], err +} + +// GetBySlug - +func (storage *Storage) GetBySlug(slug string) (*tzip.TZIP, error) { + query := storage.db.Query(models.DocTZIP).Match("dapps.slug", slug) + + var model tzip.TZIP + err := storage.db.GetOne(query, &model) + return &model, err +} + +// GetAliasesMap - +func (storage *Storage) GetAliasesMap(network string) (map[string]string, error) { + it := storage.db.Query(models.DocTZIP). + Select("address", "name"). + Match("network", network). + Not(). + Match("name", "").Exec() + defer it.Close() + + if it.Error() != nil { + return nil, it.Error() + } + aliases := make(map[string]string) + + type res struct { + Address string `reindex:"address"` + Name string `reindex:"name"` + } + for it.Next() { + var r res + it.NextObj(&r) + aliases[r.Address] = r.Name + } + + return aliases, nil +} + +// GetAliases - +func (storage *Storage) GetAliases(network string) (aliases []tzip.TZIP, err error) { + query := storage.db.Query(models.DocTZIP). + Match("network", network). + Not(). + Match("name", "") + + err = storage.db.GetAllByQuery(query, &aliases) + return +} + +// GetAlias - +func (storage *Storage) GetAlias(network, address string) (*tzip.TZIP, error) { + query := storage.db.Query(models.DocTZIP). + Match("network", network). + Match("address", address) + + var data tzip.TZIP + err := storage.db.GetOne(query, &data) + return &data, err +} + +// GetWithEvents - +func (storage *Storage) GetWithEvents() (tzips []tzip.TZIP, err error) { + query := storage.db.Query(models.DocTZIP). + Not(). + Where("events", reindexer.EMPTY, 0) + err = storage.db.GetAllByQuery(query, &tzips) + return +} diff --git a/internal/rollback/remove.go b/internal/rollback/remove.go index 5f0716ff6..a7c92b50b 100644 --- a/internal/rollback/remove.go +++ b/internal/rollback/remove.go @@ -2,26 +2,27 @@ package rollback import ( "github.com/baking-bad/bcdhub/internal/contractparser" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" + "github.com/baking-bad/bcdhub/internal/models/schema" ) // Remove - -func Remove(es elastic.IElastic, network, appDir string) error { - if err := removeContracts(es, network, appDir); err != nil { +func Remove(storage models.GeneralRepository, contractsRepo contract.Repository, bulk models.BulkRepository, network, appDir string) error { + if err := removeContracts(storage, contractsRepo, bulk, network, appDir); err != nil { return err } - return removeOthers(es, network) + return removeOthers(storage, network) } -func removeOthers(es elastic.IElastic, network string) error { +func removeOthers(storage models.GeneralRepository, network string) error { logger.Info("Deleting general data...") - return es.DeleteByLevelAndNetwork([]string{elastic.DocBigMapDiff, elastic.DocBigMapActions, elastic.DocMigrations, elastic.DocOperations, elastic.DocTransfers, elastic.DocBlocks, elastic.DocProtocol}, network, -1) + return storage.DeleteByLevelAndNetwork([]string{models.DocBigMapDiff, models.DocBigMapActions, models.DocMigrations, models.DocOperations, models.DocTransfers, models.DocBlocks, models.DocProtocol}, network, -1) } -func removeContracts(es elastic.IElastic, network, appDir string) error { - contracts, err := es.GetContracts(map[string]interface{}{ +func removeContracts(storage models.GeneralRepository, contractsRepo contract.Repository, bulk models.BulkRepository, network, appDir string) error { + contracts, err := contractsRepo.GetMany(map[string]interface{}{ "network": network, }) if err != nil { @@ -33,26 +34,26 @@ func removeContracts(es elastic.IElastic, network, appDir string) error { addresses[i] = contracts[i].Address } - if err := removeNetworkMetadata(es, network, addresses, appDir); err != nil { + if err := removeNetworkMetadata(bulk, network, addresses, appDir); err != nil { return err } logger.Info("Deleting contracts...") - return es.DeleteByLevelAndNetwork([]string{elastic.DocContracts}, network, -1) + return storage.DeleteByLevelAndNetwork([]string{models.DocContracts}, network, -1) } -func removeNetworkMetadata(e elastic.IElastic, network string, addresses []string, appDir string) error { - bulkDeleteMetadata := make([]elastic.Model, len(addresses)) +func removeNetworkMetadata(bulk models.BulkRepository, network string, addresses []string, appDir string) error { + bulkDeleteMetadata := make([]models.Model, len(addresses)) logger.Info("%d contracts will be removed", len(addresses)) for i := range addresses { - bulkDeleteMetadata[i] = &models.Metadata{ + bulkDeleteMetadata[i] = &schema.Schema{ ID: addresses[i], } } logger.Info("Removing metadata...") if len(bulkDeleteMetadata) > 0 { - if err := e.BulkDelete(bulkDeleteMetadata); err != nil { + if err := bulk.Delete(bulkDeleteMetadata); err != nil { return err } } diff --git a/internal/rollback/rollback.go b/internal/rollback/rollback.go index 62a5332c2..d155db975 100644 --- a/internal/rollback/rollback.go +++ b/internal/rollback/rollback.go @@ -5,10 +5,16 @@ import ( "time" "github.com/baking-bad/bcdhub/internal/contractparser" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/block" + "github.com/baking-bad/bcdhub/internal/models/contract" + "github.com/baking-bad/bcdhub/internal/models/operation" + "github.com/baking-bad/bcdhub/internal/models/protocol" + "github.com/baking-bad/bcdhub/internal/models/schema" + "github.com/baking-bad/bcdhub/internal/models/tokenbalance" + "github.com/baking-bad/bcdhub/internal/models/transfer" "github.com/baking-bad/bcdhub/internal/mq" "github.com/baking-bad/bcdhub/internal/noderpc" "github.com/pkg/errors" @@ -17,21 +23,27 @@ import ( // Manager - type Manager struct { - e elastic.IElastic - messageQueue mq.IMessagePublisher - rpc noderpc.INode - sharePath string + storage models.GeneralRepository + bulk models.BulkRepository + contractsRepo contract.Repository + operationRepo operation.Repository + transfersRepo transfer.Repository + tbRepo tokenbalance.Repository + protocolsRepo protocol.Repository + messageQueue mq.IMessagePublisher + rpc noderpc.INode + sharePath string } // NewManager - -func NewManager(e elastic.IElastic, messageQueue mq.IMessagePublisher, rpc noderpc.INode, sharePath string) Manager { +func NewManager(storage models.GeneralRepository, bulk models.BulkRepository, contractsRepo contract.Repository, operationRepo operation.Repository, transfersRepo transfer.Repository, tbRepo tokenbalance.Repository, protocolsRepo protocol.Repository, messageQueue mq.IMessagePublisher, rpc noderpc.INode, sharePath string) Manager { return Manager{ - e, messageQueue, rpc, sharePath, + storage, bulk, contractsRepo, operationRepo, transfersRepo, tbRepo, protocolsRepo, messageQueue, rpc, sharePath, } } // Rollback - rollback indexer state to level -func (rm Manager) Rollback(fromState models.Block, toLevel int64) error { +func (rm Manager) Rollback(fromState block.Block, toLevel int64) error { if toLevel >= fromState.Level { return errors.Errorf("To level must be less than from level: %d >= %d", toLevel, fromState.Level) } @@ -66,7 +78,7 @@ func (rm Manager) Rollback(fromState models.Block, toLevel int64) error { } func (rm Manager) rollbackTokenBalances(network string, toLevel int64) error { - transfers, err := rm.e.GetAllTransfers(network, toLevel) + transfers, err := rm.transfersRepo.GetAll(network, toLevel) if err != nil { return err } @@ -74,8 +86,8 @@ func (rm Manager) rollbackTokenBalances(network string, toLevel int64) error { return nil } - exists := make(map[string]*models.TokenBalance) - updates := make([]*models.TokenBalance, 0) + exists := make(map[string]*tokenbalance.TokenBalance) + updates := make([]*tokenbalance.TokenBalance, 0) for i := range transfers { if id := transfers[i].GetFromTokenBalanceID(); id != "" { @@ -99,20 +111,20 @@ func (rm Manager) rollbackTokenBalances(network string, toLevel int64) error { } } - return rm.e.UpdateTokenBalances(updates) + return rm.tbRepo.Update(updates) } func (rm Manager) rollbackBlocks(network string, toLevel int64) error { logger.Info("Deleting blocks...") - return rm.e.DeleteByLevelAndNetwork([]string{elastic.DocBlocks}, network, toLevel) + return rm.storage.DeleteByLevelAndNetwork([]string{models.DocBlocks}, network, toLevel) } func (rm Manager) rollbackOperations(network string, toLevel int64) error { logger.Info("Deleting operations, migrations, transfers and big map diffs...") - return rm.e.DeleteByLevelAndNetwork([]string{elastic.DocBigMapDiff, elastic.DocBigMapActions, elastic.DocTZIP, elastic.DocMigrations, elastic.DocOperations, elastic.DocTransfers}, network, toLevel) + return rm.storage.DeleteByLevelAndNetwork([]string{models.DocBigMapDiff, models.DocBigMapActions, models.DocTZIP, models.DocMigrations, models.DocOperations, models.DocTransfers}, network, toLevel) } -func (rm Manager) rollbackContracts(fromState models.Block, toLevel int64) error { +func (rm Manager) rollbackContracts(fromState block.Block, toLevel int64) error { if err := rm.removeMetadata(fromState, toLevel); err != nil { return err } @@ -124,33 +136,33 @@ func (rm Manager) rollbackContracts(fromState models.Block, toLevel int64) error if toLevel == 0 { toLevel = -1 } - return rm.e.DeleteByLevelAndNetwork([]string{elastic.DocContracts}, fromState.Network, toLevel) + return rm.storage.DeleteByLevelAndNetwork([]string{models.DocContracts}, fromState.Network, toLevel) } func (rm Manager) getAffectedContracts(network string, fromLevel, toLevel int64) ([]string, error) { - addresses, err := rm.e.GetAffectedContracts(network, fromLevel, toLevel) + addresses, err := rm.operationRepo.GetParticipatingContracts(network, fromLevel, toLevel) if err != nil { return nil, err } - return rm.e.GetContractsIDByAddress(addresses, network) + return rm.contractsRepo.GetIDsByAddresses(addresses, network) } -func (rm Manager) getProtocolByLevel(protocols []models.Protocol, level int64) (models.Protocol, error) { +func (rm Manager) getProtocolByLevel(protocols []protocol.Protocol, level int64) (protocol.Protocol, error) { for _, p := range protocols { if p.StartLevel <= level { return p, nil } } if len(protocols) == 0 { - return models.Protocol{}, errors.Errorf("Can't find protocol for level %d", level) + return protocol.Protocol{}, errors.Errorf("Can't find protocol for level %d", level) } return protocols[0], nil } -func (rm Manager) removeMetadata(fromState models.Block, toLevel int64) error { +func (rm Manager) removeMetadata(fromState block.Block, toLevel int64) error { logger.Info("Preparing metadata for removing...") - addresses, err := rm.e.GetContractAddressesByNetworkAndLevel(fromState.Network, toLevel) + addresses, err := rm.contractsRepo.GetAddressesByNetworkAndLevel(fromState.Network, toLevel) if err != nil { return err } @@ -159,13 +171,13 @@ func (rm Manager) removeMetadata(fromState models.Block, toLevel int64) error { } func (rm Manager) removeContractsMetadata(network string, addresses []string, protocol string) error { - bulkDeleteMetadata := make([]elastic.Model, 0) + bulkDeleteMetadata := make([]models.Model, 0) logger.Info("%d contracts will be removed", len(addresses)) bar := progressbar.NewOptions(len(addresses), progressbar.OptionSetPredictTime(false), progressbar.OptionClearOnFinish(), progressbar.OptionShowCount()) for _, address := range addresses { bar.Add(1) //nolint - bulkDeleteMetadata = append(bulkDeleteMetadata, &models.Metadata{ + bulkDeleteMetadata = append(bulkDeleteMetadata, &schema.Schema{ ID: address, }) @@ -176,7 +188,7 @@ func (rm Manager) removeContractsMetadata(network string, addresses []string, pr logger.Info("Removing metadata...") if len(bulkDeleteMetadata) > 0 { - if err := rm.e.BulkDelete(bulkDeleteMetadata); err != nil { + if err := rm.bulk.Delete(bulkDeleteMetadata); err != nil { return err } } @@ -185,8 +197,8 @@ func (rm Manager) removeContractsMetadata(network string, addresses []string, pr func (rm Manager) updateMetadata(network string, fromLevel, toLevel int64) error { logger.Info("Preparing metadata for updating...") - var protocols []models.Protocol - if err := rm.e.GetByNetworkWithSort(network, "start_level", "desc", &protocols); err != nil { + var protocols []protocol.Protocol + if err := rm.storage.GetByNetworkWithSort(network, "start_level", "desc", &protocols); err != nil { return err } rollbackProtocol, err := rm.getProtocolByLevel(protocols, toLevel) @@ -204,7 +216,7 @@ func (rm Manager) updateMetadata(network string, fromLevel, toLevel int64) error } logger.Info("Rollback to %s from %s", rollbackProtocol.Hash, currentProtocol.Hash) - deadSymLinks, err := rm.e.GetSymLinks(network, toLevel) + deadSymLinks, err := rm.protocolsRepo.GetSymLinks(network, toLevel) if err != nil { return err } @@ -212,13 +224,13 @@ func (rm Manager) updateMetadata(network string, fromLevel, toLevel int64) error delete(deadSymLinks, rollbackProtocol.SymLink) logger.Info("Getting all metadata...") - var metadata []models.Metadata - if err := rm.e.GetAll(&metadata); err != nil { + var metadata []schema.Schema + if err := rm.storage.GetAll(&metadata); err != nil { return err } logger.Info("Found %d metadata, will remove %v", len(metadata), deadSymLinks) - bulkUpdateMetadata := make([]elastic.Model, len(metadata)) + bulkUpdateMetadata := make([]models.Model, len(metadata)) for i := range metadata { bulkUpdateMetadata[i] = &metadata[i] } @@ -228,12 +240,10 @@ func (rm Manager) updateMetadata(network string, fromLevel, toLevel int64) error for i := 0; i < len(bulkUpdateMetadata); i += 1000 { start := i * 1000 end := helpers.MinInt((i+1)*1000, len(bulkUpdateMetadata)) - parameterScript := fmt.Sprintf("ctx._source.parameter.remove('%s')", symLink) - if err := rm.e.BulkRemoveField(parameterScript, bulkUpdateMetadata[start:end]); err != nil { + if err := rm.bulk.RemoveField(fmt.Sprintf("parameter.%s", symLink), bulkUpdateMetadata[start:end]); err != nil { return err } - storageScript := fmt.Sprintf("ctx._source.storage.remove('%s')", symLink) - if err := rm.e.BulkRemoveField(storageScript, bulkUpdateMetadata[start:end]); err != nil { + if err := rm.bulk.RemoveField(fmt.Sprintf("storage.%s", symLink), bulkUpdateMetadata[start:end]); err != nil { return err } } diff --git a/internal/elastic/search/bigmap.go b/internal/search/bigmap.go similarity index 84% rename from internal/elastic/search/bigmap.go rename to internal/search/bigmap.go index 63736f341..6d5c5782d 100644 --- a/internal/elastic/search/bigmap.go +++ b/internal/search/bigmap.go @@ -1,9 +1,8 @@ package search import ( - "encoding/json" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" ) // BigMap - @@ -11,7 +10,7 @@ type BigMap struct{} // GetIndex - func (b BigMap) GetIndex() string { - return "bigmapdiff" + return models.DocBigMapDiff } // GetScores - @@ -36,11 +35,11 @@ func (b BigMap) GetFields() []string { // Parse - func (b BigMap) Parse(highlight map[string][]string, data []byte) (interface{}, error) { - var bmd models.BigMapDiff + var bmd bigmapdiff.BigMapDiff if err := json.Unmarshal(data, &bmd); err != nil { return nil, err } - return Item{ + return models.Item{ Type: b.GetIndex(), Value: bmd.KeyHash, Body: bmd, diff --git a/internal/search/context.go b/internal/search/context.go new file mode 100644 index 000000000..e3b12e0ec --- /dev/null +++ b/internal/search/context.go @@ -0,0 +1,19 @@ +package search + +// Context - +type Context struct { + Text string + Indices []string + Fields []string + Highlights map[string]interface{} + Offset int64 +} + +// NewContext - +func NewContext() Context { + return Context{ + Fields: make([]string, 0), + Indices: make([]string, 0), + Highlights: make(map[string]interface{}), + } +} diff --git a/internal/elastic/search/contract.go b/internal/search/contract.go similarity index 90% rename from internal/elastic/search/contract.go rename to internal/search/contract.go index ed096b300..c8f8e8688 100644 --- a/internal/elastic/search/contract.go +++ b/internal/search/contract.go @@ -1,10 +1,9 @@ package search import ( - "encoding/json" - "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" ) // Contract - @@ -12,7 +11,7 @@ type Contract struct{} // GetIndex - func (c Contract) GetIndex() string { - return "contract" + return models.DocContracts } // GetScores - @@ -63,11 +62,11 @@ func (c Contract) GetFields() []string { // Parse - func (c Contract) Parse(highlight map[string][]string, data []byte) (interface{}, error) { - var contract models.Contract + var contract contract.Contract if err := json.Unmarshal(data, &contract); err != nil { return nil, err } - return Item{ + return models.Item{ Type: c.GetIndex(), Value: contract.Address, Body: contract, diff --git a/internal/elastic/search/domain.go b/internal/search/domain.go similarity index 82% rename from internal/elastic/search/domain.go rename to internal/search/domain.go index deba0f7ed..ea5ac3bfc 100644 --- a/internal/elastic/search/domain.go +++ b/internal/search/domain.go @@ -1,9 +1,8 @@ package search import ( - "encoding/json" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/tezosdomain" ) // Domain - @@ -11,7 +10,7 @@ type Domain struct{} // GetIndex - func (d Domain) GetIndex() string { - return "tezos_domain" + return models.DocTezosDomains } // GetScores - @@ -32,11 +31,11 @@ func (d Domain) GetFields() []string { // Parse - func (d Domain) Parse(highlight map[string][]string, data []byte) (interface{}, error) { - var domain models.TezosDomain + var domain tezosdomain.TezosDomain if err := json.Unmarshal(data, &domain); err != nil { return nil, err } - return Item{ + return models.Item{ Type: d.GetIndex(), Value: domain.Address, Body: domain, diff --git a/internal/search/functions.go b/internal/search/functions.go new file mode 100644 index 000000000..2b946f216 --- /dev/null +++ b/internal/search/functions.go @@ -0,0 +1,10 @@ +package search + +import "regexp" + +var ptrRegEx = regexp.MustCompile(`^ptr:\d+$`) + +// IsPtrSearch - check searchString on `ptr:%d` pattern +func IsPtrSearch(searchString string) bool { + return ptrRegEx.MatchString(searchString) +} diff --git a/internal/elastic/search/general.go b/internal/search/general.go similarity index 81% rename from internal/elastic/search/general.go rename to internal/search/general.go index 4926fc3a3..1c440b4af 100644 --- a/internal/elastic/search/general.go +++ b/internal/search/general.go @@ -4,9 +4,12 @@ import ( "strings" "github.com/baking-bad/bcdhub/internal/helpers" + jsoniter "github.com/json-iterator/go" "github.com/pkg/errors" ) +var json = jsoniter.ConfigCompatibleWithStandardLibrary + // Scorable - type Scorable interface { GetFields() []string @@ -153,41 +156,3 @@ func Parse(index string, highlight map[string][]string, data []byte) (interface{ return nil, errors.Errorf("Unknown index: %s", index) } } - -// Result - -type Result struct { - Count int64 `json:"count"` - Time int64 `json:"time"` - Items []Item `json:"items"` -} - -// Item - -type Item struct { - Type string `json:"type"` - Value string `json:"value"` - Group *Group `json:"group,omitempty"` - Body interface{} `json:"body"` - Highlights map[string][]string `json:"highlights,omitempty"` - - Network string `json:"-"` -} - -// Group - -type Group struct { - Count int64 `json:"count"` - Top []Top `json:"top"` -} - -// NewGroup - -func NewGroup(docCount int64) *Group { - return &Group{ - Count: docCount, - Top: make([]Top, 0), - } -} - -// Top - -type Top struct { - Network string `json:"network"` - Key string `json:"key"` -} diff --git a/internal/elastic/search/operation.go b/internal/search/operation.go similarity index 87% rename from internal/elastic/search/operation.go rename to internal/search/operation.go index 10e565afa..c4c899330 100644 --- a/internal/elastic/search/operation.go +++ b/internal/search/operation.go @@ -1,9 +1,8 @@ package search import ( - "encoding/json" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/operation" ) // Operation - @@ -11,7 +10,7 @@ type Operation struct{} // GetIndex - func (o Operation) GetIndex() string { - return "operation" + return models.DocOperations } // GetScores - @@ -44,11 +43,11 @@ func (o Operation) GetFields() []string { // Parse - func (o Operation) Parse(highlight map[string][]string, data []byte) (interface{}, error) { - var operation models.Operation + var operation operation.Operation if err := json.Unmarshal(data, &operation); err != nil { return nil, err } - return Item{ + return models.Item{ Type: o.GetIndex(), Value: operation.Hash, Body: operation, diff --git a/internal/elastic/search/token.go b/internal/search/token.go similarity index 90% rename from internal/elastic/search/token.go rename to internal/search/token.go index 97b4f1f61..eb9fa40d5 100644 --- a/internal/elastic/search/token.go +++ b/internal/search/token.go @@ -1,10 +1,10 @@ package search import ( - "encoding/json" "time" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/tzip" ) // TokenResponse - @@ -25,7 +25,7 @@ type Token struct{} // GetIndex - func (t Token) GetIndex() string { - return "tzip" + return models.DocTZIP } // GetScores - @@ -48,16 +48,16 @@ func (t Token) GetFields() []string { // Parse - func (t Token) Parse(highlight map[string][]string, data []byte) (interface{}, error) { - var token models.TZIP + var token tzip.TZIP if err := json.Unmarshal(data, &token); err != nil { return nil, err } if token.Tokens == nil { return nil, nil } - items := make([]Item, len(token.Tokens.Static)) + items := make([]models.Item, len(token.Tokens.Static)) for i := range token.Tokens.Static { - items[i] = Item{ + items[i] = models.Item{ Type: t.GetIndex(), Value: token.Address, Body: TokenResponse{ diff --git a/internal/elastic/search/metadata.go b/internal/search/tzip.go similarity index 63% rename from internal/elastic/search/metadata.go rename to internal/search/tzip.go index 1b62786dd..7ba5bb480 100644 --- a/internal/elastic/search/metadata.go +++ b/internal/search/tzip.go @@ -1,9 +1,13 @@ package search import ( - "encoding/json" - "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/tzip" +) + +// SearchTypes +const ( + MetadataSearchType = "metadata" ) // Metadata - @@ -11,7 +15,7 @@ type Metadata struct{} // GetIndex - func (m Metadata) GetIndex() string { - return "tzip" + return models.DocTZIP } // GetScores - @@ -36,15 +40,15 @@ func (m Metadata) GetFields() []string { // Parse - func (m Metadata) Parse(highlight map[string][]string, data []byte) (interface{}, error) { - var token models.TZIP - if err := json.Unmarshal(data, &token); err != nil { + var metadata tzip.TZIP + if err := json.Unmarshal(data, &metadata); err != nil { return nil, err } - return Item{ - Type: "metadata", - Value: token.Address, - Body: token, + return models.Item{ + Type: MetadataSearchType, + Value: metadata.Address, + Body: metadata, Highlights: highlight, - Network: token.Network, + Network: metadata.Network, }, nil } diff --git a/scripts/esctl/main.go b/scripts/esctl/main.go index faf434806..8d453bf11 100644 --- a/scripts/esctl/main.go +++ b/scripts/esctl/main.go @@ -30,7 +30,7 @@ func main() { } ctx = config.NewContext( - config.WithElasticSearch(cfg.Elastic), + config.WithStorage(cfg.Storage), config.WithRabbit(cfg.RabbitMQ, "", cfg.Scripts.MQ), config.WithConfigCopy(cfg), config.WithRPC(cfg.RPC), @@ -107,7 +107,7 @@ func yes() bool { if err != nil { panic(err) } - return strings.Replace(text, "\n", "", -1) == "yes" + return strings.ReplaceAll(text, "\n", "") == "yes" } func askQuestion(question string) (string, error) { @@ -118,5 +118,5 @@ func askQuestion(question string) (string, error) { if err != nil { return "", err } - return strings.Replace(text, "\n", "", -1), nil + return strings.ReplaceAll(text, "\n", ""), nil } diff --git a/scripts/esctl/remove.go b/scripts/esctl/remove.go index b1adc9962..97da1497c 100644 --- a/scripts/esctl/remove.go +++ b/scripts/esctl/remove.go @@ -2,6 +2,7 @@ package main import ( "github.com/baking-bad/bcdhub/internal/logger" + "github.com/baking-bad/bcdhub/internal/models" "github.com/baking-bad/bcdhub/internal/rollback" ) @@ -13,7 +14,7 @@ var removeCmd removeCommand // Execute func (x *removeCommand) Execute(_ []string) error { - state, err := ctx.ES.GetLastBlock(x.Network) + state, err := ctx.Blocks.Last(x.Network) if err != nil { panic(err) } @@ -24,7 +25,7 @@ func (x *removeCommand) Execute(_ []string) error { return nil } - if err = rollback.Remove(ctx.ES, x.Network, ctx.Config.SharePath); err != nil { + if err = rollback.Remove(ctx.Storage, ctx.Contracts, ctx.Bulk, x.Network, ctx.Config.SharePath); err != nil { return err } @@ -40,5 +41,5 @@ var deleteIndicesCmd deleteIndicesCommand // Execute func (x *deleteIndicesCommand) Execute(_ []string) error { - return ctx.ES.DeleteIndices(mappingNames) + return ctx.Storage.DeleteIndices(models.AllDocuments()) } diff --git a/scripts/esctl/repository.go b/scripts/esctl/repository.go index d9cb309aa..c77bc33a1 100644 --- a/scripts/esctl/repository.go +++ b/scripts/esctl/repository.go @@ -11,5 +11,5 @@ func (x *createRepoCommand) Execute(_ []string) error { return err } - return ctx.ES.CreateAWSRepository(name, creds.BucketName, creds.Region) + return ctx.Storage.CreateAWSRepository(name, creds.BucketName, creds.Region) } diff --git a/scripts/esctl/rollback.go b/scripts/esctl/rollback.go index ef726c3b2..9577767e7 100644 --- a/scripts/esctl/rollback.go +++ b/scripts/esctl/rollback.go @@ -14,7 +14,7 @@ var rollbackCmd rollbackCommand // Execute func (x *rollbackCommand) Execute(_ []string) error { - state, err := ctx.ES.GetLastBlock(x.Network) + state, err := ctx.Blocks.Last(x.Network) if err != nil { panic(err) } @@ -30,7 +30,7 @@ func (x *rollbackCommand) Execute(_ []string) error { panic(err) } - manager := rollback.NewManager(ctx.ES, ctx.MQ, rpc, ctx.SharePath) + manager := rollback.NewManager(ctx.Storage, ctx.Bulk, ctx.Contracts, ctx.Operations, ctx.Transfers, ctx.TokenBalances, ctx.Protocols, ctx.MQ, rpc, ctx.SharePath) if err = manager.Rollback(state, x.Level); err != nil { return err } diff --git a/scripts/esctl/snapshot.go b/scripts/esctl/snapshot.go index 0134a4079..9a9eb66b8 100644 --- a/scripts/esctl/snapshot.go +++ b/scripts/esctl/snapshot.go @@ -12,36 +12,20 @@ import ( "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/s3" "github.com/aws/aws-sdk-go/service/s3/s3manager" - "github.com/baking-bad/bcdhub/internal/elastic" + "github.com/baking-bad/bcdhub/internal/models" "github.com/pkg/errors" ) -var mappingNames = []string{ - elastic.DocBalanceUpdates, - elastic.DocBigMapActions, - elastic.DocBigMapDiff, - elastic.DocBlocks, - elastic.DocContracts, - elastic.DocMetadata, - elastic.DocMigrations, - elastic.DocOperations, - elastic.DocProtocol, - elastic.DocTokenBalances, - elastic.DocTransfers, - elastic.DocTZIP, - elastic.DocTezosDomains, -} - type snapshotCommand struct{} var snapshotCmd snapshotCommand // Execute func (x *snapshotCommand) Execute(_ []string) error { - if err := uploadMappings(ctx.ES, creds); err != nil { + if err := uploadMappings(ctx.Storage, creds); err != nil { return err } - if err := listRepositories(ctx.ES); err != nil { + if err := listRepositories(ctx.Storage); err != nil { return err } name, err := askQuestion("Please, enter target repository name:") @@ -49,7 +33,7 @@ func (x *snapshotCommand) Execute(_ []string) error { return err } snapshotName := fmt.Sprintf("snapshot_%s", strings.ToLower(time.Now().UTC().Format(time.RFC3339))) - return ctx.ES.CreateSnapshots(name, snapshotName, mappingNames) + return ctx.Storage.CreateSnapshots(name, snapshotName, models.AllDocuments()) } type restoreCommand struct{} @@ -58,7 +42,7 @@ var restoreCmd restoreCommand // Execute func (x *restoreCommand) Execute(_ []string) error { - if err := listRepositories(ctx.ES); err != nil { + if err := listRepositories(ctx.Storage); err != nil { return err } name, err := askQuestion("Please, enter target repository name:") @@ -66,14 +50,14 @@ func (x *restoreCommand) Execute(_ []string) error { return err } - if err := listSnapshots(ctx.ES, name); err != nil { + if err := listSnapshots(ctx.Storage, name); err != nil { return err } snapshotName, err := askQuestion("Please, enter target snapshot name:") if err != nil { return err } - return ctx.ES.RestoreSnapshots(name, snapshotName, mappingNames) + return ctx.Storage.RestoreSnapshots(name, snapshotName, models.AllDocuments()) } type setPolicyCommand struct{} @@ -82,7 +66,7 @@ var setPolicyCmd setPolicyCommand // Execute func (x *setPolicyCommand) Execute(_ []string) error { - if err := listPolicies(ctx.ES); err != nil { + if err := listPolicies(ctx.Storage); err != nil { return err } policyID, err := askQuestion("Please, enter target new or existing policy ID:") @@ -105,7 +89,7 @@ func (x *setPolicyCommand) Execute(_ []string) error { if err != nil { return err } - return ctx.ES.SetSnapshotPolicy(policyID, schedule, policyID, repository, iExpiredAfter) + return ctx.Storage.SetSnapshotPolicy(policyID, schedule, policyID, repository, iExpiredAfter) } type reloadSecureSettingsCommand struct{} @@ -114,11 +98,11 @@ var reloadSecureSettingsCmd reloadSecureSettingsCommand // Execute func (x *reloadSecureSettingsCommand) Execute(_ []string) error { - return ctx.ES.ReloadSecureSettings() + return ctx.Storage.ReloadSecureSettings() } -func listPolicies(es elastic.IElastic) error { - policies, err := es.GetAllPolicies() +func listPolicies(storage models.GeneralRepository) error { + policies, err := storage.GetAllPolicies() if err != nil { return err } @@ -133,8 +117,8 @@ func listPolicies(es elastic.IElastic) error { return nil } -func listRepositories(es elastic.IElastic) error { - listRepos, err := es.ListRepositories() +func listRepositories(storage models.GeneralRepository) error { + listRepos, err := storage.ListRepositories() if err != nil { return err } @@ -149,8 +133,8 @@ func listRepositories(es elastic.IElastic) error { return nil } -func listSnapshots(es elastic.IElastic, repository string) error { - listSnaps, err := es.ListSnapshots(repository) +func listSnapshots(storage models.GeneralRepository, repository string) error { + listSnaps, err := storage.ListSnapshots(repository) if err != nil { return err } @@ -160,8 +144,8 @@ func listSnapshots(es elastic.IElastic, repository string) error { return nil } -func uploadMappings(es elastic.IElastic, creds awsData) error { - mappings, err := es.GetMappings(mappingNames) +func uploadMappings(storage models.GeneralRepository, creds awsData) error { + mappings, err := storage.GetMappings(models.AllDocuments()) if err != nil { return err } @@ -192,7 +176,7 @@ func uploadMappings(es elastic.IElastic, creds awsData) error { } // nolint -func restoreMappings(es elastic.IElastic, creds awsData) error { +func restoreMappings(storage models.GeneralRepository, creds awsData) error { sess, err := session.NewSession(&aws.Config{ Region: aws.String(creds.Region), Credentials: credentials.NewEnvCredentials(), @@ -202,7 +186,7 @@ func restoreMappings(es elastic.IElastic, creds awsData) error { } downloader := s3manager.NewDownloader(sess) - for _, key := range mappingNames { + for _, key := range models.AllDocuments() { fileName := fmt.Sprintf("mappings/%s.json", key) buf := aws.NewWriteAtBuffer([]byte{}) @@ -214,7 +198,7 @@ func restoreMappings(es elastic.IElastic, creds awsData) error { } data := bytes.NewReader(buf.Bytes()) - if err := es.CreateMapping(key, data); err != nil { + if err := storage.CreateMapping(key, data); err != nil { return err } } diff --git a/scripts/migration/main.go b/scripts/migration/main.go index 784dea41a..2ac1b0ccd 100644 --- a/scripts/migration/main.go +++ b/scripts/migration/main.go @@ -52,7 +52,7 @@ func main() { start := time.Now() ctx := config.NewContext( - config.WithElasticSearch(cfg.Elastic), + config.WithStorage(cfg.Storage), config.WithDatabase(cfg.DB), config.WithRPC(cfg.RPC), config.WithConfigCopy(cfg), @@ -64,7 +64,7 @@ func main() { logger.Info("Starting %v migration...", migration.Key()) if err := migration.Do(ctx); err != nil { - log.Fatal(err) + log.Panic(err) } logger.Info("%s migration done. Spent: %v", migration.Key(), time.Since(start)) diff --git a/scripts/migration/migrations/common.go b/scripts/migration/migrations/common.go index c454ff4d2..3850d528a 100644 --- a/scripts/migration/migrations/common.go +++ b/scripts/migration/migrations/common.go @@ -15,5 +15,5 @@ func ask(question string) (string, error) { if err != nil { return "", err } - return strings.Replace(text, "\n", "", -1), nil + return strings.ReplaceAll(text, "\n", ""), nil } diff --git a/scripts/migration/migrations/create_transfers.go b/scripts/migration/migrations/create_transfers.go index 9a514f76b..60b61a07d 100644 --- a/scripts/migration/migrations/create_transfers.go +++ b/scripts/migration/migrations/create_transfers.go @@ -2,11 +2,12 @@ package migrations import ( "github.com/baking-bad/bcdhub/internal/config" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/metrics" "github.com/baking-bad/bcdhub/internal/models" - "github.com/baking-bad/bcdhub/internal/parsers/transfer" + "github.com/baking-bad/bcdhub/internal/models/operation" + "github.com/baking-bad/bcdhub/internal/models/transfer" + transferParsers "github.com/baking-bad/bcdhub/internal/parsers/transfer" "github.com/schollz/progressbar/v3" ) @@ -33,7 +34,7 @@ func (m *CreateTransfersTags) Do(ctx *config.Context) error { return err } - h := metrics.New(ctx.ES, ctx.DB) + h := metrics.New(ctx.Contracts, ctx.BigMapDiffs, ctx.Blocks, ctx.Protocols, ctx.Operations, ctx.Schema, ctx.TokenBalances, ctx.TZIP, ctx.Migrations, ctx.Storage, ctx.Bulk, ctx.DB) operations, err := m.getOperations(ctx) if err != nil { @@ -41,8 +42,8 @@ func (m *CreateTransfersTags) Do(ctx *config.Context) error { } logger.Info("Found %d operations with transfer entrypoint", len(operations)) - result := make([]elastic.Model, 0) - newTransfers := make([]*models.Transfer, 0) + result := make([]models.Model, 0) + newTransfers := make([]*transfer.Transfer, 0) bar := progressbar.NewOptions(len(operations), progressbar.OptionSetPredictTime(false), progressbar.OptionClearOnFinish(), progressbar.OptionShowCount()) for i := range operations { if err := bar.Add(1); err != nil { @@ -53,15 +54,15 @@ func (m *CreateTransfersTags) Do(ctx *config.Context) error { return err } - protocol, err := ctx.ES.GetProtocol(operations[i].Network, "", -1) + protocol, err := ctx.Protocols.GetProtocol(operations[i].Network, "", -1) if err != nil { return err } - parser, err := transfer.NewParser(rpc, ctx.ES, - transfer.WithNetwork(operations[i].Network), - transfer.WithGasLimit(protocol.Constants.HardGasLimitPerOperation), - transfer.WithoutViews(), + parser, err := transferParsers.NewParser(rpc, ctx.TZIP, ctx.Blocks, ctx.Schema, ctx.Storage, + transferParsers.WithNetwork(operations[i].Network), + transferParsers.WithGasLimit(protocol.Constants.HardGasLimitPerOperation), + transferParsers.WithoutViews(), ) if err != nil { return err @@ -81,14 +82,14 @@ func (m *CreateTransfersTags) Do(ctx *config.Context) error { } } - if err := ctx.ES.BulkInsert(result); err != nil { - logger.Errorf("ctx.ES.BulkUpdate error: %v", err) + if err := ctx.Bulk.Insert(result); err != nil { + logger.Errorf("ctx.Bulk.Insert error: %v", err) return err } logger.Info("Done. %d transfers were saved", len(result)) - return elastic.CreateTokenBalanceUpdates(ctx.ES, newTransfers) + return transferParsers.UpdateTokenBalances(ctx.TokenBalances, newTransfers) } func (m *CreateTransfersTags) deleteTransfers(ctx *config.Context) (err error) { @@ -102,10 +103,10 @@ func (m *CreateTransfersTags) deleteTransfers(ctx *config.Context) (err error) { } } - return ctx.ES.DeleteByContract([]string{elastic.DocTransfers}, m.Network, m.Address) + return ctx.Storage.DeleteByContract([]string{models.DocTransfers}, m.Network, m.Address) } -func (m *CreateTransfersTags) getOperations(ctx *config.Context) ([]models.Operation, error) { +func (m *CreateTransfersTags) getOperations(ctx *config.Context) ([]operation.Operation, error) { filters := map[string]interface{}{} if m.Network != "" { filters["network"] = m.Network @@ -117,5 +118,5 @@ func (m *CreateTransfersTags) getOperations(ctx *config.Context) ([]models.Opera } else { filters["entrypoint"] = "transfer" } - return ctx.ES.GetOperations(filters, 0, false) + return ctx.Operations.Get(filters, 0, false) } diff --git a/scripts/migration/migrations/create_tzip.go b/scripts/migration/migrations/create_tzip.go index 55fd93984..e65f9728f 100644 --- a/scripts/migration/migrations/create_tzip.go +++ b/scripts/migration/migrations/create_tzip.go @@ -2,10 +2,11 @@ package migrations import ( "github.com/baking-bad/bcdhub/internal/config" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/models" - "github.com/baking-bad/bcdhub/internal/parsers/tzip" + "github.com/baking-bad/bcdhub/internal/models/bigmapdiff" + "github.com/baking-bad/bcdhub/internal/models/tzip" + tzipParsers "github.com/baking-bad/bcdhub/internal/parsers/tzip" "github.com/schollz/progressbar/v3" ) @@ -24,26 +25,26 @@ func (m *CreateTZIP) Description() string { // Do - migrate function func (m *CreateTZIP) Do(ctx *config.Context) error { - bmd, err := ctx.ES.GetBigMapValuesByKey(tzip.EmptyStringKey) + bmd, err := ctx.BigMapDiffs.GetValuesByKey(tzipParsers.EmptyStringKey) if err != nil { return err } logger.Info("Found %d big maps with empty key", len(bmd)) - data := make([]elastic.Model, 0) + data := make([]models.Model, 0) bar := progressbar.NewOptions(len(bmd), progressbar.OptionSetPredictTime(false), progressbar.OptionClearOnFinish(), progressbar.OptionShowCount()) for i := range bmd { if err := bar.Add(1); err != nil { return err } - check := models.TZIP{ + check := tzip.TZIP{ Address: bmd[i].Address, Network: bmd[i].Network, } - if err := ctx.ES.GetByID(&check); err != nil { - if !elastic.IsRecordNotFound(err) { + if err := ctx.Storage.GetByID(&check); err != nil { + if !ctx.Storage.IsRecordNotFound(err) { return err } } else { @@ -54,12 +55,12 @@ func (m *CreateTZIP) Do(ctx *config.Context) error { if err != nil { return err } - parser := tzip.NewParser(ctx.ES, rpc, tzip.ParserConfig{ + parser := tzipParsers.NewParser(ctx.BigMapDiffs, ctx.Blocks, ctx.Schema, ctx.Storage, rpc, tzipParsers.ParserConfig{ IPFSGateways: ctx.Config.IPFSGateways, }) - t, err := parser.Parse(tzip.ParseContext{ - BigMapDiff: models.BigMapDiff{ + t, err := parser.Parse(tzipParsers.ParseContext{ + BigMapDiff: bigmapdiff.BigMapDiff{ Address: bmd[i].Address, Network: bmd[i].Network, Ptr: bmd[i].Ptr, @@ -77,5 +78,5 @@ func (m *CreateTZIP) Do(ctx *config.Context) error { } } - return ctx.ES.BulkInsert(data) + return ctx.Bulk.Insert(data) } diff --git a/scripts/migration/migrations/extended_storage_events.go b/scripts/migration/migrations/extended_storage_events.go index 0ca23bcfc..4915fccd1 100644 --- a/scripts/migration/migrations/extended_storage_events.go +++ b/scripts/migration/migrations/extended_storage_events.go @@ -5,12 +5,13 @@ import ( "github.com/baking-bad/bcdhub/internal/config" "github.com/baking-bad/bcdhub/internal/contractparser/consts" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/events" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/operation" + "github.com/baking-bad/bcdhub/internal/models/transfer" "github.com/baking-bad/bcdhub/internal/models/tzip" - "github.com/baking-bad/bcdhub/internal/parsers/transfer" + transferParsers "github.com/baking-bad/bcdhub/internal/parsers/transfer" ) // ExtendedStorageEvents - @@ -31,7 +32,7 @@ func (m *ExtendedStorageEvents) Description() string { // Do - migrate function func (m *ExtendedStorageEvents) Do(ctx *config.Context) error { m.contracts = make(map[string]string) - tzips, err := ctx.ES.GetTZIPWithEvents() + tzips, err := ctx.TZIP.GetWithEvents() if err != nil { return err } @@ -39,9 +40,9 @@ func (m *ExtendedStorageEvents) Do(ctx *config.Context) error { logger.Info("Found %d tzips", len(tzips)) logger.Info("Execution events...") - inserted := make([]elastic.Model, 0) - deleted := make([]elastic.Model, 0) - newTransfers := make([]*models.Transfer, 0) + inserted := make([]models.Model, 0) + deleted := make([]models.Model, 0) + newTransfers := make([]*transfer.Transfer, 0) for i := range tzips { for _, event := range tzips[i].Events { for _, impl := range event.Implementations { @@ -50,7 +51,7 @@ func (m *ExtendedStorageEvents) Do(ctx *config.Context) error { } logger.Info("%s...", tzips[i].Address) - protocol, err := ctx.ES.GetProtocol(tzips[i].Network, "", -1) + protocol, err := ctx.Protocols.GetProtocol(tzips[i].Network, "", -1) if err != nil { return err } @@ -59,9 +60,9 @@ func (m *ExtendedStorageEvents) Do(ctx *config.Context) error { return err } - parser, err := transfer.NewParser(rpc, ctx.ES, - transfer.WithNetwork(tzips[i].Network), - transfer.WithGasLimit(protocol.Constants.HardGasLimitPerOperation), + parser, err := transferParsers.NewParser(rpc, ctx.TZIP, ctx.Blocks, ctx.Schema, ctx.Storage, + transferParsers.WithNetwork(tzips[i].Network), + transferParsers.WithGasLimit(protocol.Constants.HardGasLimitPerOperation), ) if err != nil { return err @@ -77,13 +78,13 @@ func (m *ExtendedStorageEvents) Do(ctx *config.Context) error { } for _, op := range operations { - bmd, err := ctx.ES.GetBigMapDiffsByOperationID(op.ID) + bmd, err := ctx.BigMapDiffs.GetByOperationID(op.ID) if err != nil { - if !elastic.IsRecordNotFound(err) { + if !ctx.Storage.IsRecordNotFound(err) { return err } } - opModels := make([]elastic.Model, len(bmd)) + opModels := make([]models.Model, len(bmd)) for j := range bmd { opModels[j] = bmd[j] } @@ -96,7 +97,7 @@ func (m *ExtendedStorageEvents) Do(ctx *config.Context) error { return err } for _, t := range transfers { - old, err := ctx.ES.GetTransfers(elastic.GetTransfersContext{ + old, err := ctx.Transfers.Get(transfer.GetContext{ Hash: t.Hash, Network: t.Network, Counter: &t.Counter, @@ -119,22 +120,22 @@ func (m *ExtendedStorageEvents) Do(ctx *config.Context) error { } } logger.Info("Delete %d transfers", len(deleted)) - if err := ctx.ES.BulkDelete(deleted); err != nil { + if err := ctx.Bulk.Delete(deleted); err != nil { return err } logger.Info("Found %d transfers", len(inserted)) - if err := ctx.ES.BulkInsert(inserted); err != nil { + if err := ctx.Bulk.Insert(inserted); err != nil { return err } - return elastic.CreateTokenBalanceUpdates(ctx.ES, newTransfers) + return transferParsers.UpdateTokenBalances(ctx.TokenBalances, newTransfers) } -func (m *ExtendedStorageEvents) getOperations(ctx *config.Context, tzip models.TZIP, impl tzip.EventImplementation) ([]models.Operation, error) { - operations := make([]models.Operation, 0) +func (m *ExtendedStorageEvents) getOperations(ctx *config.Context, tzip tzip.TZIP, impl tzip.EventImplementation) ([]operation.Operation, error) { + operations := make([]operation.Operation, 0) for i := range impl.MichelsonExtendedStorageEvent.Entrypoints { - ops, err := ctx.ES.GetOperations(map[string]interface{}{ + ops, err := ctx.Operations.Get(map[string]interface{}{ "network": tzip.Network, "destination": tzip.Address, "kind": consts.Transaction, diff --git a/scripts/migration/migrations/fill_tzip.go b/scripts/migration/migrations/fill_tzip.go index 1d6e4701d..759d3522f 100644 --- a/scripts/migration/migrations/fill_tzip.go +++ b/scripts/migration/migrations/fill_tzip.go @@ -4,7 +4,7 @@ import ( "errors" "github.com/baking-bad/bcdhub/internal/config" - "github.com/baking-bad/bcdhub/internal/elastic" + "github.com/baking-bad/bcdhub/internal/models" "github.com/baking-bad/bcdhub/internal/parsers/tzip/repository" ) @@ -37,7 +37,7 @@ func (m *FillTZIP) Do(ctx *config.Context) error { return err } - blocks, err := ctx.ES.GetLastBlocks() + blocks, err := ctx.Blocks.LastByNetworks() if err != nil { return err } @@ -47,7 +47,7 @@ func (m *FillTZIP) Do(ctx *config.Context) error { networks[blocks[i].Network] = struct{}{} } - data := make([]elastic.Model, 0) + data := make([]models.Model, 0) if network == "" { items, err := fs.GetAll() if err != nil { @@ -82,5 +82,5 @@ func (m *FillTZIP) Do(ctx *config.Context) error { } data = append(data, model) } - return ctx.ES.BulkInsert(data) + return ctx.Bulk.Insert(data) } diff --git a/scripts/migration/migrations/get_aliases.go b/scripts/migration/migrations/get_aliases.go index cea0b38eb..41b563ee9 100644 --- a/scripts/migration/migrations/get_aliases.go +++ b/scripts/migration/migrations/get_aliases.go @@ -6,7 +6,6 @@ import ( "github.com/baking-bad/bcdhub/internal/config" "github.com/baking-bad/bcdhub/internal/contractparser/consts" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/models" @@ -45,14 +44,14 @@ func (m *GetAliases) Do(ctx *config.Context) error { logger.Info("Got %d aliases from tzkt api", len(aliases)) logger.Info("Saving aliases to elastic...") - newModels := make([]elastic.Model, 0) + newModels := make([]models.Model, 0) bar := progressbar.NewOptions(len(aliases), progressbar.OptionSetPredictTime(false), progressbar.OptionClearOnFinish(), progressbar.OptionShowCount()) for address, alias := range aliases { if err := bar.Add(1); err != nil { return err } - item := models.TZIP{ + item := tzip.TZIP{ Network: consts.Mainnet, Address: address, Slug: helpers.Slug(alias), @@ -61,14 +60,14 @@ func (m *GetAliases) Do(ctx *config.Context) error { }, } - if err := ctx.ES.GetByID(&item); err == nil { + if err := ctx.Storage.GetByID(&item); err == nil { item.Name = alias item.Slug = helpers.Slug(alias) - } else if !elastic.IsRecordNotFound(err) { + } else if !ctx.Storage.IsRecordNotFound(err) { log.Println(err) return err } newModels = append(newModels, &item) } - return ctx.ES.BulkInsert(newModels) + return ctx.Bulk.Insert(newModels) } diff --git a/scripts/migration/migrations/initial_storage_events.go b/scripts/migration/migrations/initial_storage_events.go index 568083057..7d6087e78 100644 --- a/scripts/migration/migrations/initial_storage_events.go +++ b/scripts/migration/migrations/initial_storage_events.go @@ -2,10 +2,11 @@ package migrations import ( "github.com/baking-bad/bcdhub/internal/config" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/metrics" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/transfer" + transferParsers "github.com/baking-bad/bcdhub/internal/parsers/transfer" ) // InitialStorageEvents - @@ -26,17 +27,17 @@ func (m *InitialStorageEvents) Description() string { // Do - migrate function func (m *InitialStorageEvents) Do(ctx *config.Context) error { m.contracts = make(map[string]string) - tzips, err := ctx.ES.GetTZIPWithEvents() + tzips, err := ctx.TZIP.GetWithEvents() if err != nil { return err } logger.Info("Found %d tzips", len(tzips)) - h := metrics.New(ctx.ES, ctx.DB) + h := metrics.New(ctx.Contracts, ctx.BigMapDiffs, ctx.Blocks, ctx.Protocols, ctx.Operations, ctx.Schema, ctx.TokenBalances, ctx.TZIP, ctx.Migrations, ctx.Storage, ctx.Bulk, ctx.DB) logger.Info("Execution events...") - newTransfers := make([]*models.Transfer, 0) + newTransfers := make([]*transfer.Transfer, 0) for i := range tzips { logger.Info("%s...", tzips[i].Address) @@ -49,13 +50,13 @@ func (m *InitialStorageEvents) Do(ctx *config.Context) error { return err } for i := range transfers { - found, err := ctx.ES.GetTransfers(elastic.GetTransfersContext{ + found, err := ctx.Transfers.Get(transfer.GetContext{ Hash: transfers[i].Hash, Network: transfers[i].Network, TokenID: -1, }) if err != nil { - if !elastic.IsRecordNotFound(err) { + if !ctx.Storage.IsRecordNotFound(err) { return err } } @@ -68,7 +69,7 @@ func (m *InitialStorageEvents) Do(ctx *config.Context) error { } } - updated := make([]elastic.Model, 0) + updated := make([]models.Model, 0) if len(newTransfers) == 0 { return nil } @@ -76,10 +77,10 @@ func (m *InitialStorageEvents) Do(ctx *config.Context) error { updated = append(updated, newTransfers[i]) } logger.Info("Found %d transfers", len(updated)) - if err := ctx.ES.BulkInsert(updated); err != nil { + if err := ctx.Bulk.Insert(updated); err != nil { return err } - return elastic.CreateTokenBalanceUpdates(ctx.ES, newTransfers) + return transferParsers.UpdateTokenBalances(ctx.TokenBalances, newTransfers) } // AffectedContracts - diff --git a/scripts/migration/migrations/parameter_events.go b/scripts/migration/migrations/parameter_events.go index 646d00658..090fb90e0 100644 --- a/scripts/migration/migrations/parameter_events.go +++ b/scripts/migration/migrations/parameter_events.go @@ -5,12 +5,13 @@ import ( "github.com/baking-bad/bcdhub/internal/config" "github.com/baking-bad/bcdhub/internal/contractparser/consts" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/events" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/operation" + "github.com/baking-bad/bcdhub/internal/models/transfer" "github.com/baking-bad/bcdhub/internal/models/tzip" - "github.com/baking-bad/bcdhub/internal/parsers/transfer" + transferParser "github.com/baking-bad/bcdhub/internal/parsers/transfer" ) // ParameterEvents - @@ -31,7 +32,7 @@ func (m *ParameterEvents) Description() string { // Do - migrate function func (m *ParameterEvents) Do(ctx *config.Context) error { m.contracts = make(map[string]string) - tzips, err := ctx.ES.GetTZIPWithEvents() + tzips, err := ctx.TZIP.GetWithEvents() if err != nil { return err } @@ -39,9 +40,9 @@ func (m *ParameterEvents) Do(ctx *config.Context) error { logger.Info("Found %d tzips", len(tzips)) logger.Info("Execution events...") - inserted := make([]elastic.Model, 0) - deleted := make([]elastic.Model, 0) - newTransfers := make([]*models.Transfer, 0) + inserted := make([]models.Model, 0) + deleted := make([]models.Model, 0) + newTransfers := make([]*transfer.Transfer, 0) for i := range tzips { for _, event := range tzips[i].Events { for _, impl := range event.Implementations { @@ -50,7 +51,7 @@ func (m *ParameterEvents) Do(ctx *config.Context) error { } logger.Info("%s...", tzips[i].Address) - protocol, err := ctx.ES.GetProtocol(tzips[i].Network, "", -1) + protocol, err := ctx.Protocols.GetProtocol(tzips[i].Network, "", -1) if err != nil { return err } @@ -59,9 +60,9 @@ func (m *ParameterEvents) Do(ctx *config.Context) error { return err } - parser, err := transfer.NewParser(rpc, ctx.ES, - transfer.WithNetwork(tzips[i].Network), - transfer.WithGasLimit(protocol.Constants.HardGasLimitPerOperation), + parser, err := transferParser.NewParser(rpc, ctx.TZIP, ctx.Blocks, ctx.Schema, ctx.Storage, + transferParser.WithNetwork(tzips[i].Network), + transferParser.WithGasLimit(protocol.Constants.HardGasLimitPerOperation), ) if err != nil { return err @@ -87,7 +88,7 @@ func (m *ParameterEvents) Do(ctx *config.Context) error { } for _, t := range transfers { - old, err := ctx.ES.GetTransfers(elastic.GetTransfersContext{ + old, err := ctx.Transfers.Get(transfer.GetContext{ Hash: t.Hash, Network: t.Network, Counter: &t.Counter, @@ -110,22 +111,22 @@ func (m *ParameterEvents) Do(ctx *config.Context) error { } } logger.Info("Delete %d transfers", len(deleted)) - if err := ctx.ES.BulkDelete(deleted); err != nil { + if err := ctx.Bulk.Delete(deleted); err != nil { return err } logger.Info("Found %d transfers", len(inserted)) - if err := ctx.ES.BulkInsert(inserted); err != nil { + if err := ctx.Bulk.Insert(inserted); err != nil { return err } - return elastic.CreateTokenBalanceUpdates(ctx.ES, newTransfers) + return transferParser.UpdateTokenBalances(ctx.TokenBalances, newTransfers) } -func (m *ParameterEvents) getOperations(ctx *config.Context, tzip models.TZIP, impl tzip.EventImplementation) ([]models.Operation, error) { - operations := make([]models.Operation, 0) +func (m *ParameterEvents) getOperations(ctx *config.Context, tzip tzip.TZIP, impl tzip.EventImplementation) ([]operation.Operation, error) { + operations := make([]operation.Operation, 0) for i := range impl.MichelsonParameterEvent.Entrypoints { - ops, err := ctx.ES.GetOperations(map[string]interface{}{ + ops, err := ctx.Operations.Get(map[string]interface{}{ "network": tzip.Network, "destination": tzip.Address, "kind": consts.Transaction, diff --git a/scripts/migration/migrations/recalc_contract_metrics.go b/scripts/migration/migrations/recalc_contract_metrics.go index 675e12ab5..2a52ae2ac 100644 --- a/scripts/migration/migrations/recalc_contract_metrics.go +++ b/scripts/migration/migrations/recalc_contract_metrics.go @@ -5,9 +5,9 @@ import ( "time" "github.com/baking-bad/bcdhub/internal/config" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/metrics" + "github.com/baking-bad/bcdhub/internal/models" "github.com/schollz/progressbar/v3" ) @@ -28,10 +28,10 @@ func (m *RecalcContractMetrics) Description() string { func (m *RecalcContractMetrics) Do(ctx *config.Context) error { logger.Info("Start RecalcContractMetrics migration...") start := time.Now() - h := metrics.New(ctx.ES, ctx.DB) + h := metrics.New(ctx.Contracts, ctx.BigMapDiffs, ctx.Blocks, ctx.Protocols, ctx.Operations, ctx.Schema, ctx.TokenBalances, ctx.TZIP, ctx.Migrations, ctx.Storage, ctx.Bulk, ctx.DB) for _, network := range ctx.Config.Scripts.Networks { - contracts, err := ctx.ES.GetContracts(map[string]interface{}{ + contracts, err := ctx.Contracts.GetMany(map[string]interface{}{ "network": network, }) if err != nil { @@ -51,11 +51,11 @@ func (m *RecalcContractMetrics) Do(ctx *config.Context) error { } if (i%1000 == 0 || i == len(contracts)-1) && i > 0 { - updates := make([]elastic.Model, len(contracts[lastIdx:i])) + updates := make([]models.Model, len(contracts[lastIdx:i])) for j := range contracts[lastIdx:i] { updates[j] = &contracts[lastIdx:i][j] } - if err := ctx.ES.BulkUpdate(updates); err != nil { + if err := ctx.Bulk.Update(updates); err != nil { return err } lastIdx = i diff --git a/scripts/migration/migrations/set_aliases.go b/scripts/migration/migrations/set_aliases.go index fa37fd140..5c0c07407 100644 --- a/scripts/migration/migrations/set_aliases.go +++ b/scripts/migration/migrations/set_aliases.go @@ -2,9 +2,9 @@ package migrations import ( "github.com/baking-bad/bcdhub/internal/config" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/metrics" + "github.com/baking-bad/bcdhub/internal/models" ) // SetAliases - migration that set aliases for operations, contracts and transfers @@ -22,15 +22,15 @@ func (m *SetAliases) Description() string { // Do - migrate function func (m *SetAliases) Do(ctx *config.Context) error { - h := metrics.New(ctx.ES, ctx.DB) + h := metrics.New(ctx.Contracts, ctx.BigMapDiffs, ctx.Blocks, ctx.Protocols, ctx.Operations, ctx.Schema, ctx.TokenBalances, ctx.TZIP, ctx.Migrations, ctx.Storage, ctx.Bulk, ctx.DB) - updatedModels := make([]elastic.Model, 0) + updatedModels := make([]models.Model, 0) for i := range ctx.Config.Scripts.Networks { logger.Info("Receiving aliases for %s...", ctx.Config.Scripts.Networks[i]) - aliases, err := ctx.ES.GetAliasesMap(ctx.Config.Scripts.Networks[i]) + aliases, err := ctx.TZIP.GetAliasesMap(ctx.Config.Scripts.Networks[i]) if err != nil { - if elastic.IsRecordNotFound(err) { + if ctx.Storage.IsRecordNotFound(err) { continue } return err @@ -45,7 +45,7 @@ func (m *SetAliases) Do(ctx *config.Context) error { "network": ctx.Config.Scripts.Networks[i], } - operations, err := ctx.ES.GetOperations(networkFilter, 0, false) + operations, err := ctx.Operations.Get(networkFilter, 0, false) if err != nil { return err } @@ -59,7 +59,7 @@ func (m *SetAliases) Do(ctx *config.Context) error { } } - contracts, err := ctx.ES.GetContracts(networkFilter) + contracts, err := ctx.Contracts.GetMany(networkFilter) if err != nil { return err } @@ -74,7 +74,7 @@ func (m *SetAliases) Do(ctx *config.Context) error { } } - transfers, err := ctx.ES.GetAllTransfers(ctx.Config.Scripts.Networks[i], 0) + transfers, err := ctx.Transfers.GetAll(ctx.Config.Scripts.Networks[i], 0) if err != nil { return err } @@ -92,5 +92,5 @@ func (m *SetAliases) Do(ctx *config.Context) error { logger.Info("Updating %d models...", len(updatedModels)) - return ctx.ES.BulkUpdate(updatedModels) + return ctx.Bulk.Update(updatedModels) } diff --git a/scripts/migration/migrations/set_operation_tags.go b/scripts/migration/migrations/set_operation_tags.go index 0368994c6..a30fc7ed5 100644 --- a/scripts/migration/migrations/set_operation_tags.go +++ b/scripts/migration/migrations/set_operation_tags.go @@ -3,10 +3,10 @@ package migrations import ( "github.com/baking-bad/bcdhub/internal/config" "github.com/baking-bad/bcdhub/internal/contractparser/consts" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/contract" "github.com/schollz/progressbar/v3" ) @@ -25,13 +25,13 @@ func (m *SetOperationTags) Description() string { // Do - migrate function func (m *SetOperationTags) Do(ctx *config.Context) error { - operations, err := ctx.ES.GetOperations(nil, 0, false) + operations, err := ctx.Operations.Get(nil, 0, false) if err != nil { return err } logger.Info("Found %d operations", len(operations)) - result := make([]elastic.Model, 0) + result := make([]models.Model, 0) bar := progressbar.NewOptions(len(operations), progressbar.OptionSetPredictTime(false), progressbar.OptionClearOnFinish(), progressbar.OptionShowCount()) @@ -42,9 +42,9 @@ func (m *SetOperationTags) Do(ctx *config.Context) error { } if _, ok := tags[operations[i].Destination]; !ok { - contract := models.NewEmptyContract(operations[i].Network, operations[i].Destination) - if err := ctx.ES.GetByID(&contract); err != nil { - if elastic.IsRecordNotFound(err) { + contract := contract.NewEmptyContract(operations[i].Network, operations[i].Destination) + if err := ctx.Storage.GetByID(&contract); err != nil { + if ctx.Storage.IsRecordNotFound(err) { continue } return err @@ -63,8 +63,8 @@ func (m *SetOperationTags) Do(ctx *config.Context) error { result = append(result, &operations[i]) } - if err := ctx.ES.BulkUpdate(result); err != nil { - logger.Errorf("ctx.ES.BulkUpdate error: %v", err) + if err := ctx.Bulk.Update(result); err != nil { + logger.Errorf("ctx.Bulk.Update error: %v", err) return err } diff --git a/scripts/migration/migrations/set_protocol_constants.go b/scripts/migration/migrations/set_protocol_constants.go index 76c52826e..c29e105fb 100644 --- a/scripts/migration/migrations/set_protocol_constants.go +++ b/scripts/migration/migrations/set_protocol_constants.go @@ -2,9 +2,9 @@ package migrations import ( "github.com/baking-bad/bcdhub/internal/config" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/protocol" ) // SetProtocolConstants - migration that set constants for protocol @@ -22,15 +22,15 @@ func (m *SetProtocolConstants) Description() string { // Do - migrate function func (m *SetProtocolConstants) Do(ctx *config.Context) error { - protocols := make([]models.Protocol, 0) - if err := ctx.ES.GetAll(&protocols); err != nil { + protocols := make([]protocol.Protocol, 0) + if err := ctx.Storage.GetAll(&protocols); err != nil { return err } - updatedModels := make([]elastic.Model, 0) + updatedModels := make([]models.Model, 0) for i := range protocols { if protocols[i].StartLevel == protocols[i].EndLevel && protocols[i].EndLevel == 0 { - protocols[i].Constants = models.Constants{} + protocols[i].Constants = protocol.Constants{} updatedModels = append(updatedModels, &protocols[i]) continue } @@ -47,7 +47,7 @@ func (m *SetProtocolConstants) Do(ctx *config.Context) error { if err != nil { return err } - protocols[i].Constants = models.Constants{ + protocols[i].Constants = protocol.Constants{ CostPerByte: constants.CostPerByte, HardGasLimitPerOperation: constants.HardGasLimitPerOperation, HardStorageLimitPerOperation: constants.HardStorageLimitPerOperation, @@ -57,5 +57,5 @@ func (m *SetProtocolConstants) Do(ctx *config.Context) error { logger.Info("%##v", protocols[i]) updatedModels = append(updatedModels, &protocols[i]) } - return ctx.ES.BulkUpdate(updatedModels) + return ctx.Bulk.Update(updatedModels) } diff --git a/scripts/migration/migrations/token_balance_recalc.go b/scripts/migration/migrations/token_balance_recalc.go index 84b9ea68a..9464ac898 100644 --- a/scripts/migration/migrations/token_balance_recalc.go +++ b/scripts/migration/migrations/token_balance_recalc.go @@ -4,10 +4,11 @@ import ( "strings" "github.com/baking-bad/bcdhub/internal/config" - "github.com/baking-bad/bcdhub/internal/elastic" "github.com/baking-bad/bcdhub/internal/helpers" "github.com/baking-bad/bcdhub/internal/logger" "github.com/baking-bad/bcdhub/internal/models" + "github.com/baking-bad/bcdhub/internal/models/transfer" + transferParsers "github.com/baking-bad/bcdhub/internal/parsers/transfer" "github.com/pkg/errors" ) @@ -53,16 +54,16 @@ func (m *TokenBalanceRecalc) Recalc(ctx *config.Context, network, address string } logger.Info("Removing token balance entities....") - if err := ctx.ES.DeleteByContract([]string{elastic.DocTokenBalances}, network, address); err != nil { + if err := ctx.Storage.DeleteByContract([]string{models.DocTokenBalances}, network, address); err != nil { return err } logger.Info("Receiving transfers....") - updates := make([]*models.Transfer, 0) + updates := make([]*transfer.Transfer, 0) var lastID string for { - transfers, err := ctx.ES.GetTransfers(elastic.GetTransfersContext{ + transfers, err := ctx.Transfers.Get(transfer.GetContext{ Network: network, Contracts: []string{address}, LastID: lastID, @@ -81,7 +82,7 @@ func (m *TokenBalanceRecalc) Recalc(ctx *config.Context, network, address string } logger.Info("Saving...") - return elastic.CreateTokenBalanceUpdates(ctx.ES, updates) + return transferParsers.UpdateTokenBalances(ctx.TokenBalances, updates) } // DoBatch - diff --git a/scripts/nginx/main.go b/scripts/nginx/main.go index cc96b7f72..313351896 100644 --- a/scripts/nginx/main.go +++ b/scripts/nginx/main.go @@ -16,17 +16,17 @@ func main() { } ctx := config.NewContext( - config.WithElasticSearch(cfg.Elastic), + config.WithStorage(cfg.Storage), config.WithConfigCopy(cfg), ) defer ctx.Close() - dapps, err := ctx.ES.GetDApps() + dapps, err := ctx.TZIP.GetDApps() if err != nil { logger.Fatal(err) } - aliases, err := ctx.ES.GetAliases(consts.Mainnet) + aliases, err := ctx.TZIP.GetAliases(consts.Mainnet) if err != nil { logger.Fatal(err) } diff --git a/scripts/nginx/nginx.go b/scripts/nginx/nginx.go index 7511f7c8e..bb22ce148 100644 --- a/scripts/nginx/nginx.go +++ b/scripts/nginx/nginx.go @@ -8,7 +8,6 @@ import ( "text/template" "github.com/baking-bad/bcdhub/internal/logger" - "github.com/baking-bad/bcdhub/internal/models" "github.com/baking-bad/bcdhub/internal/models/tzip" ) @@ -57,7 +56,7 @@ const locationTemplate = ` sub_filter_once on; }` -func makeNginxConfig(dapps []tzip.DApp, aliases []models.TZIP, filepath, baseURL string) error { +func makeNginxConfig(dapps []tzip.DApp, aliases []tzip.TZIP, filepath, baseURL string) error { var locations strings.Builder tmpl := template.Must(template.New("").Parse(locationTemplate)) diff --git a/scripts/nginx/sitemap.go b/scripts/nginx/sitemap.go index 9998507b9..55189b35b 100644 --- a/scripts/nginx/sitemap.go +++ b/scripts/nginx/sitemap.go @@ -5,12 +5,11 @@ import ( "github.com/baking-bad/bcdhub/internal/config" "github.com/baking-bad/bcdhub/internal/logger" - "github.com/baking-bad/bcdhub/internal/models" "github.com/baking-bad/bcdhub/internal/models/tzip" "github.com/baking-bad/bcdhub/scripts/nginx/pkg/sitemap" ) -func makeSitemap(dapps []tzip.DApp, aliases []models.TZIP, filepath string, cfg config.Config) error { +func makeSitemap(dapps []tzip.DApp, aliases []tzip.TZIP, filepath string, cfg config.Config) error { s := sitemap.New() s.AddLocation(cfg.BaseURL)