diff --git a/.eslintrc.js b/.eslintrc.js index c2d3446..d0c24e9 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -10,6 +10,7 @@ module.exports = { rules: { // Other rules... "@typescript-eslint/no-var-requires": "off", + 'no-constant-condition': 'off', "@typescript-eslint/no-explicit-any": "warn", '@typescript-eslint/no-unused-vars': [ 'error', // or 'off' to disable entirely diff --git a/README.md b/README.md index 3763866..7b44107 100644 --- a/README.md +++ b/README.md @@ -210,6 +210,12 @@ To deploy the script, cd into the frontend folder and run: cd frontend && offckb deploy --network ``` +Pass `--type-id` option if you want Scripts to be upgradable + +```sh +cd frontend && offckb deploy --type-id --network +``` + Once the deployment is done, you can use the following command to check the deployed scripts: ```sh diff --git a/package.json b/package.json index d0edee3..93f04a1 100644 --- a/package.json +++ b/package.json @@ -59,7 +59,7 @@ "typescript": "^5.3.3" }, "dependencies": { - "@ckb-ccc/core": "^0.0.11-alpha.3", + "@ckb-ccc/core": "^0.0.16-alpha.3", "@ckb-lumos/lumos": "0.23.0", "@iarna/toml": "^2.2.5", "@inquirer/prompts": "^4.1.0", diff --git a/src/cli.ts b/src/cli.ts index bbe415d..0e252d9 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -101,6 +101,7 @@ program .description('Deploy contracts to different networks, only supports devnet and testnet') .option('--network ', 'Specify the network to deploy to', 'devnet') .option('--target ', 'Specify the relative bin target folder to deploy to') + .option('-t, --type-id', 'Specify if use upgradable type id to deploy the script') .option('--privkey ', 'Specify the private key to deploy scripts') .action((options: DeployOptions) => deploy(options)); diff --git a/src/cmd/deploy.ts b/src/cmd/deploy.ts index 5de8412..d129665 100644 --- a/src/cmd/deploy.ts +++ b/src/cmd/deploy.ts @@ -1,39 +1,32 @@ -import { commons, hd, helpers } from '@ckb-lumos/lumos'; -import fs from 'fs'; import { NetworkOption, Network } from '../util/type'; import path from 'path'; -import { Account, CKB } from '../util/ckb'; import { deployerAccount } from '../cfg/account'; -import { listBinaryFilesInFolder, readFileToUint8Array, isAbsolutePath } from '../util/fs'; +import { listBinaryFilesInFolder, isAbsolutePath } from '../util/fs'; import { validateNetworkOpt, validateExecDappEnvironment } from '../util/validator'; -import { DeploymentOptions, generateDeploymentToml } from '../deploy/toml'; -import { DeploymentRecipe, generateDeploymentRecipeJsonFile } from '../deploy/migration'; -import { ckbHash, computeScriptHash } from '@ckb-lumos/lumos/utils'; -import { genMyScriptsJsonFile } from '../scripts/gen'; -import { OffCKBConfigFile } from '../template/offckb-config'; +import { deployBinaries, getToDeployBinsPath, recordDeployResult } from '../deploy'; +import { CKB } from '../sdk/ckb'; export interface DeployOptions extends NetworkOption { target: string | null | undefined; privkey?: string | null; + typeId?: boolean; } -export async function deploy(opt: DeployOptions = { network: Network.devnet, target: null }) { +export async function deploy(opt: DeployOptions = { network: Network.devnet, typeId: false, target: null }) { const network = opt.network as Network; validateNetworkOpt(network); - const ckb = new CKB(network); + const ckb = new CKB({ network }); // we use deployerAccount to deploy contract by default const privateKey = opt.privkey || deployerAccount.privkey; - const lumosConfig = ckb.getLumosConfig(); - const from = CKB.generateAccountFromPrivateKey(privateKey, lumosConfig); - + const enableTypeId = opt.typeId ?? false; const targetFolder = opt.target; if (targetFolder) { const binFolder = isAbsolutePath(targetFolder) ? targetFolder : path.resolve(process.cwd(), targetFolder); const bins = listBinaryFilesInFolder(binFolder); const binPaths = bins.map((bin) => path.resolve(binFolder, bin)); - const results = await deployBinaries(binPaths, from, ckb); + const results = await deployBinaries(binPaths, privateKey, enableTypeId, ckb); // record the deployed contract infos recordDeployResult(results, network, false); // we don't update my-scripts.json since we don't know where the file is @@ -49,115 +42,8 @@ export async function deploy(opt: DeployOptions = { network: Network.devnet, tar // read contract bin folder const bins = getToDeployBinsPath(); - const results = await deployBinaries(bins, from, ckb); + const results = await deployBinaries(bins, privateKey, enableTypeId, ckb); // record the deployed contract infos recordDeployResult(results, network); } - -function getToDeployBinsPath() { - const userOffCKBConfigPath = path.resolve(process.cwd(), 'offckb.config.ts'); - const fileContent = fs.readFileSync(userOffCKBConfigPath, 'utf-8'); - const match = fileContent.match(/contractBinFolder:\s*['"]([^'"]+)['"]/); - if (match && match[1]) { - const contractBinFolderValue = match[1]; - const binFolderPath = isAbsolutePath(contractBinFolderValue) - ? contractBinFolderValue - : path.resolve(process.cwd(), contractBinFolderValue); - const bins = listBinaryFilesInFolder(binFolderPath); - return bins.map((bin) => path.resolve(binFolderPath, bin)); - } else { - console.log('contractBinFolder value not found in offckb.config.ts'); - return []; - } -} - -type DeployBinaryReturnType = ReturnType; -type UnwrapPromise = T extends Promise ? U : T; -type DeployedInterfaceType = UnwrapPromise; - -async function recordDeployResult(results: DeployedInterfaceType[], network: Network, updateMyScriptsJsonFile = true) { - if (results.length === 0) { - return; - } - for (const result of results) { - generateDeploymentToml(result.deploymentOptions, network); - generateDeploymentRecipeJsonFile(result.deploymentOptions.name, result.deploymentRecipe, network); - } - - // update my-scripts.json - if (updateMyScriptsJsonFile) { - const userOffCKBConfigPath = path.resolve(process.cwd(), 'offckb.config.ts'); - const folder = OffCKBConfigFile.readContractInfoFolder(userOffCKBConfigPath); - if (folder) { - const myScriptsFilePath = path.resolve(folder, 'my-scripts.json'); - genMyScriptsJsonFile(myScriptsFilePath); - } - } - - console.log('done.'); -} - -async function deployBinaries(binPaths: string[], from: Account, ckb: CKB) { - if (binPaths.length === 0) { - console.log('No binary to deploy.'); - } - const results: DeployedInterfaceType[] = []; - for (const bin of binPaths) { - const result = await deployBinary(bin, from, ckb); - results.push(result); - } - return results; -} - -async function deployBinary( - binPath: string, - from: Account, - ckb: CKB, -): Promise<{ - deploymentRecipe: DeploymentRecipe; - deploymentOptions: DeploymentOptions; -}> { - const bin = await readFileToUint8Array(binPath); - const contractName = path.basename(binPath); - const result = await commons.deploy.generateDeployWithTypeIdTx({ - cellProvider: ckb.indexer, - fromInfo: from.address, - scriptBinary: bin, - config: ckb.getLumosConfig(), - }); - - // send deploy tx - let txSkeleton = result.txSkeleton; - txSkeleton = commons.common.prepareSigningEntries(txSkeleton); - const message = txSkeleton.get('signingEntries').get(0)!.message; - const Sig = hd.key.signRecoverable(message!, from.privKey); - const tx = helpers.sealTransaction(txSkeleton, [Sig]); - const res = await ckb.rpc.sendTransaction(tx, 'passthrough'); - console.log(`contract ${contractName} deployed, tx hash:`, res); - console.log('wait 4 blocks..'); - await ckb.indexer.waitForSync(-4); // why negative 4? a bug in ckb-lumos - - // todo: handle multiple cell recipes? - return { - deploymentOptions: { - name: contractName, - binFilePath: binPath, - enableTypeId: true, - lockScript: tx.outputs[+result.scriptConfig.INDEX].lock, - }, - deploymentRecipe: { - cellRecipes: [ - { - name: contractName, - txHash: result.scriptConfig.TX_HASH, - index: result.scriptConfig.INDEX, - occupiedCapacity: '0x' + BigInt(tx.outputsData[+result.scriptConfig.INDEX].slice(2).length / 2).toString(16), - dataHash: ckbHash(tx.outputsData[+result.scriptConfig.INDEX]), - typeId: computeScriptHash(result.typeId), - }, - ], - depGroupRecipes: [], - }, - }; -} diff --git a/src/deploy/index.ts b/src/deploy/index.ts new file mode 100644 index 0000000..e9cd45c --- /dev/null +++ b/src/deploy/index.ts @@ -0,0 +1,130 @@ +import { DeploymentOptions, generateDeploymentToml } from '../deploy/toml'; +import { DeploymentRecipe, generateDeploymentMigrationFile, Migration } from '../deploy/migration'; +import { ckbHash, computeScriptHash } from '@ckb-lumos/lumos/utils'; +import { genMyScriptsJsonFile } from '../scripts/gen'; +import { OffCKBConfigFile } from '../template/offckb-config'; +import { listBinaryFilesInFolder, readFileToUint8Array, isAbsolutePath } from '../util/fs'; +import path from 'path'; +import fs from 'fs'; +import { HexString } from '@ckb-lumos/lumos'; +import { Network } from '../util/type'; +import { CKB } from '../sdk/ckb'; + +export type DeployBinaryReturnType = ReturnType; +export type UnwrapPromise = T extends Promise ? U : T; +export type DeployedInterfaceType = UnwrapPromise; + +export function getToDeployBinsPath() { + const userOffCKBConfigPath = path.resolve(process.cwd(), 'offckb.config.ts'); + const fileContent = fs.readFileSync(userOffCKBConfigPath, 'utf-8'); + const match = fileContent.match(/contractBinFolder:\s*['"]([^'"]+)['"]/); + if (match && match[1]) { + const contractBinFolderValue = match[1]; + const binFolderPath = isAbsolutePath(contractBinFolderValue) + ? contractBinFolderValue + : path.resolve(process.cwd(), contractBinFolderValue); + const bins = listBinaryFilesInFolder(binFolderPath); + return bins.map((bin) => path.resolve(binFolderPath, bin)); + } else { + console.log('contractBinFolder value not found in offckb.config.ts'); + return []; + } +} + +export async function recordDeployResult( + results: DeployedInterfaceType[], + network: Network, + isUpdateMyScriptsJsonFile = true, +) { + if (results.length === 0) { + return; + } + for (const result of results) { + generateDeploymentToml(result.deploymentOptions, network); + generateDeploymentMigrationFile(result.deploymentOptions.name, result.deploymentRecipe, network); + } + + // update my-scripts.json + if (isUpdateMyScriptsJsonFile) { + const userOffCKBConfigPath = path.resolve(process.cwd(), 'offckb.config.ts'); + const folder = OffCKBConfigFile.readContractInfoFolder(userOffCKBConfigPath); + if (folder) { + const myScriptsFilePath = path.resolve(folder, 'my-scripts.json'); + genMyScriptsJsonFile(myScriptsFilePath); + } + } + + console.log('done.'); +} + +export async function deployBinaries(binPaths: string[], privateKey: HexString, enableTypeId: boolean, ckb: CKB) { + if (binPaths.length === 0) { + console.log('No binary to deploy.'); + } + const results: DeployedInterfaceType[] = []; + for (const bin of binPaths) { + const result = await deployBinary(bin, privateKey, enableTypeId, ckb); + results.push(result); + } + return results; +} + +export async function deployBinary( + binPath: string, + privateKey: HexString, + enableTypeId: boolean, + ckb: CKB, +): Promise<{ + deploymentRecipe: DeploymentRecipe; + deploymentOptions: DeploymentOptions; +}> { + const bin = await readFileToUint8Array(binPath); + const contractName = path.basename(binPath); + + const result = !enableTypeId + ? await ckb.deployScript(bin, privateKey) + : Migration.isDeployedWithTypeId(contractName, ckb.network) + ? await ckb.upgradeTypeIdScript(contractName, bin, privateKey) + : await ckb.deployNewTypeIDScript(bin, privateKey); + + console.log(`contract ${contractName} deployed, tx hash:`, result.txHash); + console.log('wait for tx confirmed on-chain...'); + await ckb.waitForTxConfirm(result.txHash); + console.log('tx committed.'); + + const txHash = result.txHash; + const typeIdScript = result.typeId; + const index = result.scriptOutputCellIndex; + const tx = result.tx; + const dataByteLen = BigInt(tx.outputsData[+index].slice(2).length / 2); + const dataShannonLen = dataByteLen * BigInt('100000000'); + const occupiedCapacity = '0x' + dataShannonLen.toString(16); + + if (enableTypeId && typeIdScript == null) { + throw new Error('type id script is null while enableTypeId is true.'); + } + const typeIdScriptHash = enableTypeId ? computeScriptHash(typeIdScript!) : undefined; + + // todo: handle multiple cell recipes? + return { + deploymentOptions: { + name: contractName, + binFilePath: binPath, + enableTypeId: enableTypeId, + lockScript: tx.outputs[+index].lock, + }, + deploymentRecipe: { + cellRecipes: [ + { + name: contractName, + txHash, + index: '0x' + index.toString(16), + occupiedCapacity, + dataHash: ckbHash(tx.outputsData[+index]), + typeId: typeIdScriptHash, + }, + ], + depGroupRecipes: [], + }, + }; +} diff --git a/src/deploy/migration.ts b/src/deploy/migration.ts index 801b281..b90f1f1 100644 --- a/src/deploy/migration.ts +++ b/src/deploy/migration.ts @@ -26,25 +26,53 @@ export interface DeploymentRecipe { depGroupRecipes: DepGroupRecipe[]; } -export interface DeploymentRecipeJson { +export interface MigrationJson { cell_recipes: { name: string; tx_hash: string; - index: HexNumber; - occupied_capacity: HexNumber; + index: number; + occupied_capacity: number; // CKB blocksize limit is 500k, so it should be impossible to have a cell occupied data larger than Number.MAX_SAFE_INTEGER which is 9007,1992,5474,0991 data_hash: string; type_id?: string; }[]; dep_group_recipes: { name: string; tx_hash: string; - index: HexNumber; + index: number; data_hash: string; - occupied_capacity: HexNumber; + occupied_capacity: number; // CKB blocksize limit is 500k, so it should be impossible to have a cell occupied data larger than Number.MAX_SAFE_INTEGER which is 9007,1992,5474,0991 }[]; } -export function generateDeploymentRecipeJsonFile( +export class Migration { + static find(scriptName: string, network: Network = Network.devnet) { + const filePath = getMigrationFolderPath(scriptName, network); + const migrationFile = getNewestMigrationFile(filePath); + if (migrationFile == null) return null; + + return readDeploymentMigrationFile(migrationFile); + } + + static isDeployed(scriptName: string, network: Network = Network.devnet) { + const deploymentReceipt = Migration.find(scriptName, network); + if (deploymentReceipt == null) return false; + + return true; + } + + static isDeployedWithTypeId(scriptName: string, network: Network = Network.devnet) { + const isDeployed = this.isDeployed(scriptName, network); + if (isDeployed === false) return false; + + const deploymentReceipt = Migration.find(scriptName, network)!; + const typeId = deploymentReceipt.cellRecipes[0].typeId; + if (typeId == null) return false; + + return true; + } +} + +export function generateDeploymentMigrationFile( name: string, deploymentRecipe: DeploymentRecipe, network = Network.devnet, @@ -62,9 +90,9 @@ export function generateDeploymentRecipeJsonFile( } } -export function readDeploymentRecipeJsonFile(filePath: string): DeploymentRecipe { +export function readDeploymentMigrationFile(filePath: string): DeploymentRecipe { const jsonString = fs.readFileSync(filePath, 'utf-8'); - const data: DeploymentRecipeJson = JSON.parse(jsonString); + const data: MigrationJson = JSON.parse(jsonString); return deploymentRecipeFromJson(data); } @@ -86,7 +114,10 @@ export function getMigrationFolderPath(scriptName: string, network: Network) { return path.resolve(contractsPath, `${scriptName}/migrations`); } -export function getNewestMigrationFile(folderPath: string): string | undefined { +export function getNewestMigrationFile(folderPath: string): string | null { + if (!fs.existsSync(folderPath) || !fs.lstatSync(folderPath).isDirectory()) { + return null; + } const files = fs .readdirSync(folderPath) .filter((file) => file.endsWith('.json')) // Ensure only JSON files are considered @@ -98,41 +129,53 @@ export function getNewestMigrationFile(folderPath: string): string | undefined { }); // Return the full path of the newest file (last in sorted array) or undefined if no files - return files.length > 0 ? path.join(folderPath, files[files.length - 1]) : undefined; + return files.length > 0 ? path.join(folderPath, files[files.length - 1]) : null; } -export function deploymentRecipeToJson(recipe: DeploymentRecipe): DeploymentRecipeJson { +export function deploymentRecipeToJson(recipe: DeploymentRecipe): MigrationJson { return { cell_recipes: recipe.cellRecipes.map((val) => { + if (BigInt(val.occupiedCapacity) > BigInt(Number.MAX_SAFE_INTEGER)) { + // CKB blocksize limit is 500k, so it should be impossible to have a cell occupied data larger than Number.MAX_SAFE_INTEGER which is 9007,1992,5474,0991 + console.error( + `invalid occupiedCapacity: ${val.occupiedCapacity}, the cell_recipes json might be incorrect for cell outpoint ${val.txHash}:${+val.index}`, + ); + } return { name: val.name, tx_hash: val.txHash, - index: val.index, - occupied_capacity: val.occupiedCapacity, + index: +val.index, + occupied_capacity: +BigInt(val.occupiedCapacity).toString(10), data_hash: val.dataHash, type_id: val.typeId, }; }), dep_group_recipes: recipe.depGroupRecipes.map((val) => { + if (BigInt(val.occupiedCapacity) > BigInt(Number.MAX_SAFE_INTEGER)) { + // CKB blocksize limit is 500k, so it should be impossible to have a cell occupied data larger than Number.MAX_SAFE_INTEGER which is 9007,1992,5474,0991 + console.error( + `invalid occupiedCapacity: ${val.occupiedCapacity}, the dep_group_recipes json might be incorrect for cell outpoint ${val.txHash}:${+val.index}`, + ); + } return { name: val.name, tx_hash: val.txHash, - index: val.index, + index: +val.index, data_hash: val.dataHash, - occupied_capacity: val.occupiedCapacity, + occupied_capacity: +BigInt(val.occupiedCapacity).toString(10), }; }), }; } -export function deploymentRecipeFromJson(json: DeploymentRecipeJson): DeploymentRecipe { +export function deploymentRecipeFromJson(json: MigrationJson): DeploymentRecipe { return { cellRecipes: json.cell_recipes.map((val) => { return { name: val.name, txHash: val.tx_hash, - index: val.index, - occupiedCapacity: val.occupied_capacity, + index: '0x' + val.index.toString(16), + occupiedCapacity: '0x' + val.occupied_capacity.toString(16), dataHash: val.data_hash, typeId: val.type_id, }; @@ -141,9 +184,9 @@ export function deploymentRecipeFromJson(json: DeploymentRecipeJson): Deployment return { name: val.name, txHash: val.tx_hash, - index: val.index, + index: '0x' + val.index.toString(16), dataHash: val.data_hash, - occupiedCapacity: val.occupied_capacity, + occupiedCapacity: '0x' + val.occupied_capacity.toString(16), }; }), }; diff --git a/src/deploy/toml.ts b/src/deploy/toml.ts index d8ad108..8971801 100644 --- a/src/deploy/toml.ts +++ b/src/deploy/toml.ts @@ -1,6 +1,6 @@ import { Script } from '@ckb-lumos/lumos'; import fs from 'fs'; -import toml from '@iarna/toml'; +import toml, { JsonMap } from '@iarna/toml'; import { Network } from '../util/type'; import { dirname } from 'path'; import { getContractsPath } from './util'; @@ -16,7 +16,7 @@ export interface DeploymentOptions { export interface DeploymentToml { cells: { name: string; - enable_type_id: 'true' | 'false'; + enable_type_id: boolean; location: { file: string; }; @@ -29,11 +29,11 @@ export interface DeploymentToml { } export function generateDeploymentToml(options: DeploymentOptions, network: Network) { - const data = { + const data: DeploymentToml = { cells: [ { name: options.name, - enable_type_id: options.enableTypeId ? 'true' : 'false', + enable_type_id: options.enableTypeId, location: { file: options.binFilePath, }, @@ -46,7 +46,7 @@ export function generateDeploymentToml(options: DeploymentOptions, network: Netw }, }; - const tomlString = toml.stringify(data); + const tomlString = toml.stringify(data as unknown as JsonMap); const outputFilePath: string = `${getContractsPath(network)}/${options.name}/deployment.toml`; if (outputFilePath) { if (!fs.existsSync(dirname(outputFilePath))) { @@ -65,7 +65,7 @@ export function readDeploymentToml(scriptName: string, network: Network) { cells: [ { name: data.cells[0].name as string, - enableTypeId: data.cells[0].enable_type_id === 'true' ? true : false, + enableTypeId: data.cells[0].enable_type_id, location: { file: data.cells[0].location.file as string, }, diff --git a/src/scripts/private.ts b/src/scripts/private.ts new file mode 100644 index 0000000..63621b3 --- /dev/null +++ b/src/scripts/private.ts @@ -0,0 +1,19 @@ +import { ccc, KnownScript } from '@ckb-ccc/core'; +//todo: extract getSystemScriptsFromListHashes/toCCCKnownScripts from cmd folder +import { getSystemScriptsFromListHashes, toCCCKnownScripts } from '../cmd/system-scripts'; + +export function buildCCCDevnetKnownScripts() { + const devnetSystemScripts = getSystemScriptsFromListHashes(); + if (devnetSystemScripts == null) { + throw new Error('can not getSystemScriptsFromListHashes in devnet'); + } + const devnetKnownScripts: + | Record< + KnownScript, + Pick & { + cellDeps: ccc.CellDepInfoLike[]; + } + > + | undefined = toCCCKnownScripts(devnetSystemScripts); + return devnetKnownScripts; +} diff --git a/src/scripts/util.ts b/src/scripts/util.ts index 2f3b6eb..1836f0f 100644 --- a/src/scripts/util.ts +++ b/src/scripts/util.ts @@ -1,6 +1,6 @@ import * as fs from 'fs'; import { getContractsPath } from '../deploy/util'; -import { getMigrationFolderPath, getNewestMigrationFile, readDeploymentRecipeJsonFile } from '../deploy/migration'; +import { getMigrationFolderPath, getNewestMigrationFile, readDeploymentMigrationFile } from '../deploy/migration'; import { MyScriptsRecord } from '../scripts/type'; import { getSubfolders } from '../util/fs'; import { Network } from '../util/type'; @@ -22,7 +22,7 @@ export function readUserDeployedScriptsInfo(network: Network) { if (newestFilePath) { try { // Read the file content - const recipe = readDeploymentRecipeJsonFile(newestFilePath); + const recipe = readDeploymentMigrationFile(newestFilePath); // todo: handle multiple cell recipes? const firstCell = recipe.cellRecipes[0]; const isDepCode = recipe.depGroupRecipes.length > 0; diff --git a/src/sdk/ckb.ts b/src/sdk/ckb.ts new file mode 100644 index 0000000..56f1464 --- /dev/null +++ b/src/sdk/ckb.ts @@ -0,0 +1,186 @@ +// this is a rewrite for util/ckb.ts +// to replace lumos with ccc + +import { ccc, ClientPublicMainnet, ClientPublicTestnet, OutPointLike, Script } from '@ckb-ccc/core'; +import { Network } from '../util/type'; +import { isValidNetworkString } from '../util/validator'; +import { networks } from './network'; +import { buildCCCDevnetKnownScripts } from '../scripts/private'; +import { HexString } from '@ckb-lumos/lumos'; +import { Migration } from '../deploy/migration'; + +export class CKBProps { + network?: Network; + isEnableProxyRpc?: boolean; +} + +export interface DeploymentResult { + txHash: HexString; + tx: ccc.Transaction; + scriptOutputCellIndex: number; // output cell index number of the deployed script + isTypeId: boolean; + typeId?: Script; +} + +export class CKB { + public network: Network; + private client: ClientPublicTestnet | ClientPublicMainnet; + + constructor({ network = Network.devnet, isEnableProxyRpc = false }: CKBProps) { + if (!isValidNetworkString(network)) { + throw new Error('invalid network option'); + } + + this.network = network; + + if (isEnableProxyRpc === true) { + this.client = + network === 'mainnet' + ? new ccc.ClientPublicMainnet({ url: networks.mainnet.proxy_rpc_url }) + : network === 'testnet' + ? new ccc.ClientPublicTestnet({ url: networks.testnet.proxy_rpc_url }) + : new ccc.ClientPublicTestnet({ + url: networks.devnet.proxy_rpc_url, + scripts: buildCCCDevnetKnownScripts(), + }); + } else { + this.client = + network === 'mainnet' + ? new ccc.ClientPublicMainnet() + : network === 'testnet' + ? new ccc.ClientPublicTestnet() + : new ccc.ClientPublicTestnet({ + url: networks.devnet.rpc_url, + scripts: buildCCCDevnetKnownScripts(), + }); + } + } + + private buildSigner(privateKey: HexString) { + const signer = new ccc.SignerCkbPrivateKey(this.client, privateKey); + return signer; + } + + async waitForTxConfirm(txHash: HexString, timeout: number = 60000) { + const query = async () => { + const res = await this.client.getTransactionNoCache(txHash); + if (res && res.status === 'committed') { + return true; + } else { + return false; + } + }; + return waitFor(query, timeout, 5000); + } + + async deployScript(scriptBinBytes: Uint8Array, privateKey: string): Promise { + const signer = this.buildSigner(privateKey); + const signerSecp256k1Address = await signer.getAddressObjSecp256k1(); + const tx = ccc.Transaction.from({ + outputs: [ + { + lock: signerSecp256k1Address.script, + }, + ], + outputsData: [scriptBinBytes], + }); + await tx.completeInputsByCapacity(signer); + await tx.completeFeeBy(signer, 1000); + const txHash = await signer.sendTransaction(tx); + return { txHash, tx, scriptOutputCellIndex: 0, isTypeId: false }; + } + + async deployNewTypeIDScript(scriptBinBytes: Uint8Array, privateKey: string): Promise { + const signer = this.buildSigner(privateKey); + const signerSecp256k1Address = await signer.getAddressObjSecp256k1(); + const typeIdTx = ccc.Transaction.from({ + outputs: [ + { + lock: signerSecp256k1Address.script, + type: await ccc.Script.fromKnownScript(this.client, ccc.KnownScript.TypeId, '00'.repeat(32)), + }, + ], + outputsData: [scriptBinBytes], + }); + await typeIdTx.completeInputsByCapacity(signer); + if (!typeIdTx.outputs[0].type) { + throw new Error('Unexpected disappeared output'); + } + typeIdTx.outputs[0].type.args = ccc.hashTypeId(typeIdTx.inputs[0], 0); + await typeIdTx.completeFeeBy(signer, 1000); + const txHash = await signer.sendTransaction(typeIdTx); + return { txHash, tx: typeIdTx, scriptOutputCellIndex: 0, isTypeId: true, typeId: typeIdTx.outputs[0].type }; + } + + async upgradeTypeIdScript( + scriptName: string, + newScriptBinBytes: Uint8Array, + privateKey: HexString, + ): Promise { + const deploymentReceipt = Migration.find(scriptName, this.network); + if (deploymentReceipt == null) throw new Error("no migration file, can't be updated."); + const outpoint: OutPointLike = { + txHash: deploymentReceipt.cellRecipes[0].txHash, + index: deploymentReceipt.cellRecipes[0].index, + }; + const typeId = deploymentReceipt.cellRecipes[0].typeId; + if (typeId == null) throw new Error("type id in migration file is null, can't be updated."); + + const cell = await this.client.getCell(outpoint); + if (cell == null) { + throw new Error('type id cell not found!'); + } + + const typeIdArgs = cell.cellOutput.type?.args; + if (typeIdArgs == null) { + throw new Error("type id args is null, can't be updated"); + } + const typeIdFromLiveCell = ccc.Script.from(cell.cellOutput.type!).hash(); + + if (typeId !== typeIdFromLiveCell) { + throw new Error( + `type id not matched! migration file type id: ${typeId}, live cell type id: ${typeIdFromLiveCell}`, + ); + } + + const cellInput = ccc.CellInput.from({ previousOutput: cell.outPoint, since: 0 }); + const signer = this.buildSigner(privateKey); + const signerSecp256k1Address = await signer.getAddressObjSecp256k1(); + const typeIdTx = ccc.Transaction.from({ + inputs: [cellInput], + outputs: [ + { + lock: signerSecp256k1Address.script, + type: await ccc.Script.fromKnownScript(this.client, ccc.KnownScript.TypeId, '00'.repeat(32)), + }, + ], + outputsData: [newScriptBinBytes], + }); + await typeIdTx.completeInputsByCapacity(signer); + if (!typeIdTx.outputs[0].type) { + throw new Error('Unexpected disappeared output'); + } + typeIdTx.outputs[0].type.args = typeIdArgs as `0x{string}`; + await typeIdTx.completeFeeBy(signer, 1000); + const txHash = await signer.sendTransaction(typeIdTx); + return { txHash, tx: typeIdTx, scriptOutputCellIndex: 0, isTypeId: true, typeId: typeIdTx.outputs[0].type }; + } +} + +async function waitFor(query: () => Promise, timeout: number, interval: number): Promise { + const startTime = Date.now(); + + while (true) { + if (Date.now() - startTime > timeout) { + throw new Error('Operation timed out'); + } + + try { + const result = await query(); + if (result) break; + } catch (error: unknown) { + console.debug((error as Error).message); + } + await new Promise((resolve) => setTimeout(resolve, interval)); + } +} diff --git a/src/sdk/network.ts b/src/sdk/network.ts new file mode 100644 index 0000000..e28e668 --- /dev/null +++ b/src/sdk/network.ts @@ -0,0 +1,21 @@ +import { readSettings } from '../cfg/setting'; + +const config = readSettings(); + +export const networks = { + devnet: { + addr_prefix: 'ckt', + rpc_url: config.devnet.rpcUrl, + proxy_rpc_url: `http://127.0.0.1:${config.rpc.proxyPort}`, + }, + testnet: { + addr_prefix: 'ckt', + rpc_url: config.testnet.rpcUrl, + proxy_rpc_url: `http://127.0.0.1:${config.rpc.proxyPort}`, + }, + mainnet: { + addr_prefix: 'ckb', + rpc_url: config.mainnet.rpcUrl, + proxy_rpc_url: `http://127.0.0.1:${config.rpc.proxyPort}`, + }, +}; diff --git a/yarn.lock b/yarn.lock index 5b9f34c..d43fda2 100644 --- a/yarn.lock +++ b/yarn.lock @@ -12,12 +12,11 @@ resolved "https://registry.yarnpkg.com/@adraffy/ens-normalize/-/ens-normalize-1.10.1.tgz#63430d04bd8c5e74f8d7d049338f1cd9d4f02069" integrity sha512-96Z2IP3mYmF1Xg2cDm8f1gWGf/HUVedQ3FMifV4kG/PQ4yEP51xDtRAEfhVNt5f/uzpNkZHwWQuUcu6D6K+Ekw== -"@ckb-ccc/core@^0.0.11-alpha.3": - version "0.0.11-alpha.3" - resolved "https://registry.yarnpkg.com/@ckb-ccc/core/-/core-0.0.11-alpha.3.tgz#586f22714cd27b92e5c9272a3d3c7a0e2a704d88" - integrity sha512-2+K6D7rdEFhDSqaiw4Crbe75p0urPOQ8hePJl/FTqGhengOOcBqp4G14+NXPHhS/S58r+V0kmg3zrRIRXBKPGQ== +"@ckb-ccc/core@^0.0.16-alpha.3": + version "0.0.16-alpha.3" + resolved "https://registry.yarnpkg.com/@ckb-ccc/core/-/core-0.0.16-alpha.3.tgz#5b15cbe4c459cec73959cfd124583f402fed7844" + integrity sha512-/tvZtEtRutZSqJPRJvfVPwj4X0f9XggD1+cTq+L3b2hLf99rqO0Y+9mdv9sGHUQsMLbngigVGEI1dY5HkLldEQ== dependencies: - "@ckb-lumos/helpers" "^0.22.2" "@joyid/ckb" "^1.0.1" "@noble/ciphers" "^0.5.3" "@noble/curves" "^1.4.2" @@ -25,24 +24,11 @@ abort-controller "^3.0.0" bech32 "^2.0.0" bitcoinjs-message "^2.2.0" - blake2b "^2.1.4" buffer "^6.0.3" cross-fetch "^4.0.0" ethers "^6.13.1" - -"@ckb-lumos/base@0.22.2": - version "0.22.2" - resolved "https://registry.yarnpkg.com/@ckb-lumos/base/-/base-0.22.2.tgz#337dac666bf3b20eb78b3545fbeb3249292e7bf7" - integrity sha512-nosUCSa5rTV2IzxbEpqzrvUeQNXB66mgA0h40+QEdnE/gV/s4ke83AScrTAxWkErJy1G/sToIHCc2kWwO95DfQ== - dependencies: - "@ckb-lumos/bi" "0.22.2" - "@ckb-lumos/codec" "0.22.2" - "@ckb-lumos/toolkit" "0.22.2" - "@types/blake2b" "^2.1.0" - "@types/lodash.isequal" "^4.5.5" - blake2b "^2.1.3" - js-xxhash "^1.0.4" - lodash.isequal "^4.5.0" + isomorphic-ws "^5.0.0" + ws "^8.18.0" "@ckb-lumos/base@0.23.0": version "0.23.0" @@ -58,13 +44,6 @@ js-xxhash "^1.0.4" lodash.isequal "^4.5.0" -"@ckb-lumos/bi@0.22.2": - version "0.22.2" - resolved "https://registry.yarnpkg.com/@ckb-lumos/bi/-/bi-0.22.2.tgz#af3f3b9afdaf363bc33a168ec4fefc5579029093" - integrity sha512-F+dLC/tE+xdtNuGgJxlDqbgX/f8azg1tvIFTR5mu7Vhz08nkFgnA+Z+yC0t/I3fDwwH4p/SlGP/yducrsfVTqw== - dependencies: - jsbi "^4.1.0" - "@ckb-lumos/bi@0.23.0": version "0.23.0" resolved "https://registry.yarnpkg.com/@ckb-lumos/bi/-/bi-0.23.0.tgz#8439d712b823234b858bffff2636ffc21d98199f" @@ -85,13 +64,6 @@ cross-fetch "^3.1.5" events "^3.3.0" -"@ckb-lumos/codec@0.22.2": - version "0.22.2" - resolved "https://registry.yarnpkg.com/@ckb-lumos/codec/-/codec-0.22.2.tgz#038afd6e7b5dcb072b19973ce4d64a110b66dfa3" - integrity sha512-P5SyhT2qkJwCwcHF3yMLInE0z3wWHDkqJNbSM2Q9oyu0+9kjMQfexNia3T+atBl2M7ELFzN5WvttojYr6DrCwQ== - dependencies: - "@ckb-lumos/bi" "0.22.2" - "@ckb-lumos/codec@0.23.0": version "0.23.0" resolved "https://registry.yarnpkg.com/@ckb-lumos/codec/-/codec-0.23.0.tgz#abc8c2da95931859d3347608af4b461d27c5daff" @@ -115,18 +87,6 @@ bs58 "^5.0.0" immutable "^4.3.0" -"@ckb-lumos/config-manager@0.22.2": - version "0.22.2" - resolved "https://registry.yarnpkg.com/@ckb-lumos/config-manager/-/config-manager-0.22.2.tgz#03f04dedf413a5bcfefa98e71f11dd4da7779fba" - integrity sha512-LJ4p80VrCHh178Ks4wW1rEyHC/JWtZxrFiwHinA9aG6aOm2Z9hbZO0/ZKoS5pLfW0gxP2+ZHA3oMVt0UJhlTKA== - dependencies: - "@ckb-lumos/base" "0.22.2" - "@ckb-lumos/bi" "0.22.2" - "@ckb-lumos/codec" "0.22.2" - "@ckb-lumos/rpc" "0.22.2" - "@types/deep-freeze-strict" "^1.1.0" - deep-freeze-strict "^1.1.1" - "@ckb-lumos/config-manager@0.23.0": version "0.23.0" resolved "https://registry.yarnpkg.com/@ckb-lumos/config-manager/-/config-manager-0.23.0.tgz#93a607f7857c9a24fae86735e29f1ca40ee34fb5" @@ -165,19 +125,6 @@ bech32 "^2.0.0" immutable "^4.3.0" -"@ckb-lumos/helpers@^0.22.2": - version "0.22.2" - resolved "https://registry.yarnpkg.com/@ckb-lumos/helpers/-/helpers-0.22.2.tgz#4f7365b8196cac4a690fae754dfa81f52503925b" - integrity sha512-6ztXwxsaCuoHjkbclAnfAv9BYl02t+/XxNtl3Et4Sl09xIp9HJ9/vyJouC9JjdQdnfkv1zVGN9tLiKwc6QudaQ== - dependencies: - "@ckb-lumos/base" "0.22.2" - "@ckb-lumos/bi" "0.22.2" - "@ckb-lumos/codec" "0.22.2" - "@ckb-lumos/config-manager" "0.22.2" - "@ckb-lumos/toolkit" "0.22.2" - bech32 "^2.0.0" - immutable "^4.3.0" - "@ckb-lumos/light-client@0.23.0": version "0.23.0" resolved "https://registry.yarnpkg.com/@ckb-lumos/light-client/-/light-client-0.23.0.tgz#093f28d4183971f1212241271e7b23bad9909689" @@ -207,16 +154,6 @@ "@ckb-lumos/toolkit" "0.23.0" "@ckb-lumos/transaction-manager" "0.23.0" -"@ckb-lumos/rpc@0.22.2": - version "0.22.2" - resolved "https://registry.yarnpkg.com/@ckb-lumos/rpc/-/rpc-0.22.2.tgz#5c8030ae2d5bf1fa4b7e118216a74ec7129e09ef" - integrity sha512-c2SX0ooDJO3dV2JOTTQtKZs0k+dHst+NHfbYJ6mYWApcZWx2nG4bQR3CQFMIKnd5CKYP/r2JuaonDkcRH9vmzw== - dependencies: - "@ckb-lumos/base" "0.22.2" - "@ckb-lumos/bi" "0.22.2" - abort-controller "^3.0.0" - cross-fetch "^3.1.5" - "@ckb-lumos/rpc@0.23.0": version "0.23.0" resolved "https://registry.yarnpkg.com/@ckb-lumos/rpc/-/rpc-0.23.0.tgz#e17bd281e62e17957db80459e6be25d0c64bbb59" @@ -227,13 +164,6 @@ abort-controller "^3.0.0" cross-fetch "^3.1.5" -"@ckb-lumos/toolkit@0.22.2": - version "0.22.2" - resolved "https://registry.yarnpkg.com/@ckb-lumos/toolkit/-/toolkit-0.22.2.tgz#54087a8947ae4de7cd0e91d955f273accca37d30" - integrity sha512-HmKz2dGQeaW2XDqkvjJfLv50VQWGKbthg2RDfIxGsZyjveluRROTyuHP1akypy4pqF8TApGLsXci2MaHCRau+w== - dependencies: - "@ckb-lumos/bi" "0.22.2" - "@ckb-lumos/toolkit@0.23.0": version "0.23.0" resolved "https://registry.yarnpkg.com/@ckb-lumos/toolkit/-/toolkit-0.23.0.tgz#4f5d25ea4292bd77dfc191506984ffc2c38078f7" @@ -940,7 +870,7 @@ blake2b-wasm@^2.4.0: b4a "^1.0.1" nanoassert "^2.0.0" -blake2b@^2.1.3, blake2b@^2.1.4: +blake2b@^2.1.3: version "2.1.4" resolved "https://registry.npmjs.org/blake2b/-/blake2b-2.1.4.tgz" integrity sha512-AyBuuJNI64gIvwx13qiICz6H6hpmjvYS5DGkG6jbXMOT8Z3WUJ3V1X0FlhIoT1b/5JtHE3ki+xjtMvu1nn+t9A== @@ -1827,6 +1757,11 @@ isexe@^2.0.0: resolved "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz" integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== +isomorphic-ws@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/isomorphic-ws/-/isomorphic-ws-5.0.0.tgz#e5529148912ecb9b451b46ed44d53dae1ce04bbf" + integrity sha512-muId7Zzn9ywDsyXgTIafTry2sV3nySZeUDe6YedVd1Hvuuep5AsIlqK+XefWpYTyJG5e503F2xIuT2lcU6rCSw== + js-xxhash@^1.0.4: version "1.0.4" resolved "https://registry.npmjs.org/js-xxhash/-/js-xxhash-1.0.4.tgz" @@ -2718,6 +2653,11 @@ ws@8.17.1: resolved "https://registry.yarnpkg.com/ws/-/ws-8.17.1.tgz#9293da530bb548febc95371d90f9c878727d919b" integrity sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ== +ws@^8.18.0: + version "8.18.0" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.18.0.tgz#0d7505a6eafe2b0e712d232b42279f53bc289bbc" + integrity sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw== + xtend@^4.0.0: version "4.0.2" resolved "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz"