diff --git a/src/modules/api/graphql/resolvers.ts b/src/modules/api/graphql/resolvers.ts index 3fb0ee1..ee53179 100644 --- a/src/modules/api/graphql/resolvers.ts +++ b/src/modules/api/graphql/resolvers.ts @@ -3,7 +3,9 @@ import { appContainer } from '../index' import { GraphQLError } from 'graphql'; import * as IPFS from 'kubo-rpc-client' import sift from 'sift' +import DAGCbor from 'ipld-dag-cbor' import { TransactionDbStatus, TransactionDbType } from '../../../types'; +import { verifyTx } from '../../../services/new/utils'; export const DebugResolvers = { peers: async (_, args) => { @@ -399,5 +401,14 @@ export const Resolvers = { tx_id: root.toString() } } + }, + submitTransactionV1: async (_, args) => { + console.log(args.payload) + const buf = Buffer.from(args.payload, 'base64') + + const decodedBuf = DAGCbor.util.deserialize(buf) + console.log(decodedBuf) + console.log(await verifyTx(decodedBuf, appContainer.self.identity)) + await appContainer.self.newService.transactionPool.broadcastRawTx(decodedBuf) } } diff --git a/src/modules/api/graphql/schema.ts b/src/modules/api/graphql/schema.ts index 34946f0..78e22c1 100644 --- a/src/modules/api/graphql/schema.ts +++ b/src/modules/api/graphql/schema.ts @@ -139,6 +139,7 @@ export const schema = ` findLedgerTXs(byContractId: String, byToFrom: String): FindtransactionResult submitTransaction(payload: String): TransactionSubmitResult + submitTransactionV1(payload: String): TransactionSubmitResult localNodeInfo: LocalNodeInfo witnessNodes: [WitnessNode] nextWitnessSlot(local: Boolean): JSON diff --git a/src/scripts/new/execute-contract.ts b/src/scripts/new/execute-contract.ts new file mode 100644 index 0000000..814bb51 --- /dev/null +++ b/src/scripts/new/execute-contract.ts @@ -0,0 +1,38 @@ +import { PrivateKey } from "@hiveio/dhive" +import { HiveClient } from "../../utils" +import { CoreService } from "../../services" + +const contractId = 'vs41q9c3ygy36jwdd06qe4wgmhxm8m3dxvapy69ckdgn6tp6esuusvd7tupu7smvypyg' + +void (async () => { + const core = new CoreService({ + prefix: 'manual tx core', + printMetadata: true, + level: 'debug', + mode: 'lite' + }) + + await core.start() + + const broadcast = await HiveClient.broadcast.json({ + + required_auths: [], + required_posting_auths: [process.env.HIVE_ACCOUNT], + id: "vsc.tx", + json: JSON.stringify({ + net_id: core.config.get('network.id'), + __v: '0.1', + __t: 'native', + data: { + op: 'call_contract', + action: 'testJSON', + contract_id: contractId, + payload: { + testData: "hello-world" + } + } + }) + }, PrivateKey.fromString(process.env.HIVE_ACCOUNT_POSTING)) + console.log(broadcast) + process.exit() +})() \ No newline at end of file diff --git a/src/services/chainBridge.ts b/src/services/chainBridge.ts index d4be83d..a57e300 100644 --- a/src/services/chainBridge.ts +++ b/src/services/chainBridge.ts @@ -886,7 +886,9 @@ export class ChainBridge { }) } } catch(ex) { - console.log(ex) + if(!ex.message.includes('Unexpected end of JSON input')) { + console.log(ex) + } } } if (op_id === "custom_json") { diff --git a/src/services/new/chainBridgeV2.ts b/src/services/new/chainBridgeV2.ts index 8aec658..5f890e5 100644 --- a/src/services/new/chainBridgeV2.ts +++ b/src/services/new/chainBridgeV2.ts @@ -3,9 +3,11 @@ import { Collection, Db } from "mongodb"; import DeepEqual from 'deep-equal' import PQueue from "p-queue"; import { NewCoreService } from "."; -import { HiveAccountAuthority } from "./types"; +import { BlockHeader, HiveAccountAuthority } from "./types"; import networks from "../networks"; import { createMongoDBClient, fastStream, sleep } from "../../utils"; +import { BlsCircuit } from './utils/crypto/bls-did'; +import BitSet from 'bitset'; @@ -32,6 +34,7 @@ export class ChainBridgeV2 { witnessHistoryDb: Collection consensusDb: Collection consensusDataDb: Collection + blockHeaders: Collection pinQueue: PQueue; self: NewCoreService; @@ -57,14 +60,16 @@ export class ChainBridgeV2 { if (op === "account_update") { transactions.push({ operations: tx.operations, - transaction_id: tx.transaction_id + transaction_id: tx.transaction_id, + index: block.transactions.indexOf(tx) }) } else if (op === 'custom_json') { try { if (opPayload.id.startsWith('vsc.')) { transactions.push({ operations: tx.operations, - transaction_id: tx.transaction_id + transaction_id: tx.transaction_id, + index: block.transactions.indexOf(tx) }) } } catch { @@ -321,14 +326,51 @@ export class ChainBridgeV2 { // }, { // upsert: true // }) - if(json.block_hash) { - this.pinQueue.add(async() => { - // console.log(json.block_hash) - await this.self.ipfs.pin.add(IPFS.CID.parse(json.block_hash), { - recursive: false + + if(opPayload.id === "vsc.propose_block" && json.net_id === this.self.config.get('network.id')) { + //Initial checks passed + const slotHeight = Number(blk.key); + const witnessSet = (await this.getWitnessesAtBlock(slotHeight)).map(e => { + return e.keys.find(key => { + console.log(key) + return key.t === "consensus" }) - await this.self.ipfs.pin.rm(IPFS.CID.parse(json.block_hash)) + }).filter(e => !!e).map(e => e.key) + const witnessSchedule = await this.self.witness.roundCheck(slotHeight) + + //Check witnessSlot validity prior to validation + const witnessSlot = witnessSchedule.find(e => { + return e.bn === slotHeight && e.account === opPayload.required_auths[0] }) + + if(witnessSlot) { + const signedBlock = { + ...json.signed_block, + block: IPFS.CID.parse(json.signed_block.block) + } + + const circuit = BlsCircuit.deserialize(signedBlock, witnessSet) + + let pubKeys = [] + for(let pub of circuit.aggPubKeys) { + pubKeys.push(pub) + } + + if(circuit.verifyPubkeys(pubKeys)) { + + } + + + this.pinQueue.add(async() => { + // console.log(json.block_hash) + await this.self.ipfs.pin.add(IPFS.CID.parse(json.block_hash), { + recursive: false + }) + await this.self.ipfs.pin.rm(IPFS.CID.parse(json.block_hash)) + }) + } + + } } } catch (ex) { @@ -497,7 +539,7 @@ export class ChainBridgeV2 { } catch { } - const startBlock = (await this.streamState.findOne({ id: "last_hb" }) || {} as any).val || networks['testnet/d12e6110-9c8c-4498-88f8-67ddf90d451c'].genesisDay + const startBlock = (await this.streamState.findOne({ id: "last_hb" }) || {} as any).val || networks[this.self.config.get('network.id')].genesisDay console.log('start block is', startBlock) this.stream = await fastStream.create({ startBlock diff --git a/src/services/new/contractEngineV2.ts b/src/services/new/contractEngineV2.ts new file mode 100644 index 0000000..9017332 --- /dev/null +++ b/src/services/new/contractEngineV2.ts @@ -0,0 +1,150 @@ +import { Collection } from "mongodb"; +import { NewCoreService } from "."; +import { AddrRecord } from "./types"; +import { encodePayload } from 'dag-jose-utils' +import { bech32 } from "bech32"; +import { VmContainer } from "./vm/utils"; + + +enum ContractErrors { + success = 0, + invalid_action = -1, + runtime_error = -2 +} + +export class ContractEngineV2 { + self: NewCoreService; + addrsDb: Collection; + contractDb: Collection<{ + id: string + code: string + name: string + description:string + creator: string + }>; + + constructor(self: NewCoreService) { + this.self = self; + + + this.blockTick = this.blockTick.bind(this) + } + + async blockTick([opPayload, tx]) { + console.log('opPayload, tx', opPayload, tx) + for(let index in tx.operations) { + const [opName, op] = tx.operations[index] + const json = JSON.parse(op.json) + + console.log('OPPAYLOAD DATA INSERT', op, opName) + if(op.id === "vsc.create_contract") { + const contractIdHash = (await encodePayload({ + ref_id: tx.transaction_id, + index + })).cid + + const bech32Addr = bech32.encode('vs4', bech32.toWords(contractIdHash.bytes)); + + console.log('smart contract addr', bech32Addr) + await this.contractDb.findOneAndUpdate({ + id: bech32Addr + }, { + $set: { + code: json.code, + name: json.name, + description: json.description, + creator: opPayload.required_auths[0], + state_merkle: (await this.self.ipfs.object.new({ template: 'unixfs-dir' })).toString(), + ref_id: tx.transaction_id + } + }, { + upsert: true + }) + } + } + } + + async init() { + this.addrsDb = this.self.db.collection('addrs') + this.contractDb = this.self.db.collection('contracts') + this.self.chainBridge.registerTickHandle('contract-engine', this.blockTick, { + type: 'tx' + }) + } + + + async createContractOutput(args: { + txs: any + contract_id: string + }) { + const contractInfo = await this.contractDb.findOne({ + id: args.contract_id + }) + if(!contractInfo) { + throw new Error('Contract not registered with node or does not exist') + } + + + if(args.txs.length === 0) { + return null; + } + + const vm = new VmContainer({ + state_merkle: (contractInfo as any).state_merkle, + cid: contractInfo.code, + contract_id: args.contract_id + }) + + await vm.init() + await vm.onReady() + + const txResults = [] + + for(let tx of args.txs) { + const result = await vm.call({ + action: tx.data.action, + payload: JSON.stringify(tx.data.payload) + }) + + + let ret + let code + let msg + if(result.ret) { + const parsedResult: { + msg?: string + code: number + ret?: string + } = JSON.parse((result as any).ret); + ret = parsedResult.ret, + code = parsedResult.code + msg = parsedResult.msg + } + console.log('parsed result', result) + txResults.push({ + ret: ret, + code: code || result.errorType, + logs: (result as any).logs, + //Dont store gas usage if 0 + ...(result.IOGas > 0 ? {gas: result.IOGas} : {}) + }) + } + const state_merkle = await vm.finishAndCleanup() + console.log('finishing and cleaning up') + + const returnObj = { + input_map: args.txs.map(e => e.id), + state_merkle, + results: txResults + } + + console.log('returnObj', returnObj) + + return returnObj + } + + async start() { + + } + +} \ No newline at end of file diff --git a/src/services/new/index.ts b/src/services/new/index.ts index 9ec3f70..1a65fe3 100644 --- a/src/services/new/index.ts +++ b/src/services/new/index.ts @@ -3,7 +3,7 @@ import { Ed25519Provider } from "key-did-provider-ed25519"; import { DID } from "dids"; import KeyResolver from 'key-did-resolver' import winston from 'winston'; -import { Db } from 'mongodb'; +import { Collection, Db } from 'mongodb'; import { Config } from "../nodeConfig"; import { ChainBridgeV2 } from "./chainBridgeV2"; import { NodeIdentity } from "./nodeIdentity"; @@ -14,6 +14,8 @@ import { getLogger } from '../../logger'; import { createMongoDBClient } from '../../utils'; import { TransactionPoolV2 } from './transactionPool'; import { P2PService } from './p2pService'; +import { AddrRecord } from './types'; +import { ContractEngineV2 } from './contractEngineV2'; export class NewCoreService { config: Config; @@ -28,6 +30,8 @@ export class NewCoreService { transactionPool: TransactionPoolV2; p2pService: P2PService identity: DID; + addrsDb: Collection; + contractEngine: ContractEngineV2; constructor() { this.config = new Config(Config.getConfigDir()) @@ -42,6 +46,7 @@ export class NewCoreService { this.witness = new WitnessServiceV2(this) this.p2pService = new P2PService(this) this.transactionPool = new TransactionPoolV2(this) + this.contractEngine = new ContractEngineV2(this) } async init(oldService) { @@ -61,8 +66,9 @@ export class NewCoreService { await this.p2pService.start() await this.witness.init(); await this.transactionPool.init() - - + await this.contractEngine.init() + + this.addrsDb = this.db.collection('addrs') } async start() { diff --git a/src/services/new/p2pService.ts b/src/services/new/p2pService.ts index e502907..b52a247 100644 --- a/src/services/new/p2pService.ts +++ b/src/services/new/p2pService.ts @@ -27,9 +27,9 @@ import { createJwsMultsign, verifyMultiJWS } from '../../utils'; enum PUBSUB_CHANNELS { - multicast = '/vsc/multicast', - routesAnnounce = '/vsc/multicast', - memoryPool = '/vsc/memorypool' + multicast = '/vsc/multicast/v1', + routesAnnounce = '/vsc/multicast/v1', + memoryPool = '/vsc/memorypool/v1' } enum MESSAGE_TYPES { diff --git a/src/services/new/transactionPool.ts b/src/services/new/transactionPool.ts index c74c9c3..249fba8 100644 --- a/src/services/new/transactionPool.ts +++ b/src/services/new/transactionPool.ts @@ -2,8 +2,27 @@ import { Collection } from "mongodb"; import { NewCoreService } from "."; import { MessageHandleOpts } from "./p2pService"; import { SignatureType, TransactionDbRecordV2, TransactionDbStatus, TransactionDbType } from "./types"; -import { HiveClient } from "../../utils"; +import { HiveClient, unwrapDagJws } from "../../utils"; import { PrivateKey } from "@hiveio/dhive"; +import { encodePayload } from 'dag-jose-utils' +import { CID } from "kubo-rpc-client/dist/src"; +import { verifyTx } from "./utils"; + +interface TxAnnounceMsg { + //ref_tx = only CID of TX useful for TXs under 2kb + //direct_tx entire TX data + type: "ref_tx" | 'direct_tx' + payload: any | CID +} + +const CONSTANTS = { + //Maximum size of directly broadcasting tx over pubsub. + //Instead of only CID + //For large TXs CID ref should be used instead to prevent flooding network + //TXs over pubsub direct are faster and incurr less receive latency. + //Delivery is guaranteed. + max_broadcast_size: 8_000 +} export class TransactionPoolV2 { self: NewCoreService; @@ -16,6 +35,93 @@ export class TransactionPoolV2 { async onTxAnnounce({message}: MessageHandleOpts) { console.log(message) + let payload; + let txId; + if(message.type === 'ref_tx') { + txId = message.id + payload = (await this.self.ipfs.dag.get(txId)).value + } else if(message.type = 'direct_tx') { + payload = Buffer.from(message.data, 'base64'); + txId = (await this.self.ipfs.dag.put(payload, { + onlyHash: true + })).toString() + } else { + return; + } + + const alreadyExistingTx = await this.txDb.findOne({ + id: txId.toString() + }) + let auths = [] + if(!alreadyExistingTx) { + const {content, auths: authsOut} = await unwrapDagJws(payload, this.self.ipfs, this.self.identity) + + console.log(content) + + await this.txDb.findOneAndUpdate({ + id: txId + }, { + + }, { + upsert: true + }) + } + } + + async broadcastRawTx(txData) { + //Intercept final size data + const {linkedBlock, cid} = await encodePayload(txData) + //Added to IPFS irresepctive of broadcast method. + await this.self.ipfs.block.put(linkedBlock) + + console.log('txData', txData) + + const validData = await verifyTx(txData, this.self.identity) + console.log('validDid', validData, txData) + + const txRecord = await this.txDb.findOne({ + id: cid.toString() + }) + if(!txRecord) { + await this.txDb.findOneAndUpdate({ + id: cid.toString() + }, { + $set: { + status: TransactionDbStatus.unconfirmed, + required_auths: [], + headers: { + lock_block: null + }, + data: txData.tx.payload, + result: null, + first_seen: new Date(), + accessible: true, + local: true, + src: 'vsc' + } + }, { + upsert: true + }) + } + + if(linkedBlock.length > CONSTANTS.max_broadcast_size) { + //Over broadcast limit + await this.self.p2pService.memoryPoolChannel.call('announce_tx', { + payload: { + type: 'ref_tx', + id: cid.toString() + }, + mode: 'basic' + }) + } else { + await this.self.p2pService.memoryPoolChannel.call('announce_tx', { + payload: { + type: 'direct_tx', + data: Buffer.from(linkedBlock).toString('base64') + }, + mode: 'basic' + }) + } } /** @@ -84,7 +190,7 @@ export class TransactionPoolV2 { async tickHandle(inData) { const [tx, fullTx] = inData // console.log('picked up TX', tx, fullTx) - if(tx.id === 'vsc.announce_tx') { + if(tx.id === 'vsc.announce_tx' || tx.id === 'vsc.tx') { const json = JSON.parse(tx.json) if(json.net_id !== this.self.config.get('network.id')) { @@ -111,12 +217,14 @@ export class TransactionPoolV2 { required_auths, headers: { anchored_height: fullTx.block_height, - lock_block: fullTx.block_height + 120 + lock_block: fullTx.block_height + 120, + index: fullTx.index, + contract_id: json.data.contract_id }, data: { - op: json.op, - payload: json.payload, - action: json.action, + op: json.data.op, + payload: json.data.payload, + action: json.data.action, }, result: null, local: false, diff --git a/src/services/new/types.ts b/src/services/new/types.ts index 3b0ea20..872b37c 100644 --- a/src/services/new/types.ts +++ b/src/services/new/types.ts @@ -193,10 +193,31 @@ export interface TransactionContainerV2 { headers: { payer?: string lock_block?: string + required_auths: Array<{ + type: 'consensus' | 'active' | 'posting', + value: string + }> } tx: { op: string payload: any // cid of ContractInput, ContractOutput or ContractUpdate and so on.. type: TransactionDbType } +} + +export interface AddrRecord { + id: string + headers: any + controllers?:Array + type: 'vs1' | 'vs2' | 'vs3' | 'vs4' +} + + +export interface BlockHeader { + hive_ref_block: number + hive_ref_tx: string + hive_ref_date: Date + height: number + proposer: string + id: string } \ No newline at end of file diff --git a/src/services/new/utils/crypto/bls-did.ts b/src/services/new/utils/crypto/bls-did.ts index ff3342a..ceee548 100644 --- a/src/services/new/utils/crypto/bls-did.ts +++ b/src/services/new/utils/crypto/bls-did.ts @@ -224,6 +224,25 @@ export class BlsCircuit { circuitHex: bitset.toString(16), } } + + static deserialize(signedPayload, keyset: Array) { + const signatures = signedPayload.signatures + delete signedPayload.signatures + + const bs = BitSet.fromHexString(signatures[0].circuitHex) + + const pubKeys = new Map(); + for(let keyIdx in keyset) { + if(bs.get(Number(keyIdx)) === 1) { + pubKeys.set(keyset[keyIdx], true) + } + } + + let circuit = new BlsCircuit(signedPayload); + circuit.aggPubKeys = pubKeys + + return circuit; + } } void (async () => { diff --git a/src/services/new/vm/compiler.ts b/src/services/new/vm/compiler.ts new file mode 100644 index 0000000..fa4f33e --- /dev/null +++ b/src/services/new/vm/compiler.ts @@ -0,0 +1,54 @@ +import fs from 'fs/promises' +import asc from "assemblyscript/dist/asc"; + +interface CompileResult { + binary: Uint8Array | null + err: string | null +} + +export async function compileAS(args: { + scriptPath: string +}): Promise { + const {scriptPath} = args; + + var stdout = asc.createMemoryStream(); + const compileResult = await asc.main([ + 'input.ts', + // "-b", + "-o", + "--optimize", + "--Osize", + "--exportRuntime", + '--runPasses', + "asyncify" + ], { + stdout: stdout, + readFile: async (filename: string, baseDir: string) => { + // console.log(filename, baseDir) + try { + if(filename === 'input.ts') { + return (await fs.readFile(scriptPath)).toString() + } + return (await fs.readFile(filename)).toString() + } catch { + return null + } + } + }); + + if(compileResult.error) { + console.log(compileResult.error) + console.log(compileResult.stderr.toString()) + return { + err: compileResult.stderr.toString(), + binary: null + } + } + + const binary = stdout.toBuffer() + + return { + binary, + err: null + } +} \ No newline at end of file diff --git a/src/services/new/vm/script.tsa b/src/services/new/vm/script.tsa index 5fb23f5..31a40fa 100644 --- a/src/services/new/vm/script.tsa +++ b/src/services/new/vm/script.tsa @@ -1,20 +1,37 @@ //@ts-nocheck import { JSON } from 'assemblyscript-json/assembly' + +import {db, console, TxOutput} from '@vsc.eco/sdk/assembly' // import {JSON} from 'json-as' // import { sdk } from '@vsc.eco/sdk' -declare function consoleLog(arg0: String): void -declare function logNumber(arg0: i32): void -declare function logBool(arg0: bool): void -declare function logUint8Array(arg0: Uint8Array): void -declare function api(): string -declare namespace db { - function setObject(key: String, val: String): i32 - function getObject(key: String): String +declare namespace System { + function getEnv(str: string): string + function call(str: string): string +} + +class InvalidInputError extends Error { + constructor(msg: string) { + super(msg); + + // Set the prototype explicitly. + Object.setPrototypeOf(this, InvalidInputError.prototype); + } +} + + +function testError(msg: string): void { + const json = new JSON.Obj() + json.set('msg', msg) + json.set('__t', 'invalid_input') + const error = new Error(json.stringify()) + throw error } +// function assertEqual + class ObjType { callCount: i32 } @@ -22,52 +39,127 @@ const obj:ObjType = { callCount: 0 } -@serializable -class Paramters { - to: string - from: string -} +@external('env', 'seed') +declare function seed(): i64; + + export function testJSON(payload: string):string { - // consoleLog(payload) - // const jsonPayload:Paramters = JSON.parse(payload) - // consoleLog(jsonPayload.to) - // if(jsonPayload.isObj === true) { - - // } let jsonObj: JSON.Obj = (JSON.parse(payload)); - consoleLog(jsonObj.stringify()) - consoleLog(jsonObj.keys[0]) + console.log(jsonObj.stringify()) + console.log(jsonObj.keys[0]) jsonObj.keys.forEach((e) => { - consoleLog(e) + console.log(e) }) + + console.log(`to value: ${jsonObj.getString('to')!} ${jsonObj.getString('to')! == "test1"}`) + assert(jsonObj.getString('to')!, "test2") + console.log(`assert code: ${assert(jsonObj.getString('to')!._str, "test2")}`) + if(jsonObj.getString('to')!.valueOf() === "test1") { + console.log('I should throw error') + testError('I should break here') + } + // state.setObject('key-1', jsonObj.stringify()) + // const val = state.getObject('key-2') + + // console.log(`test val` + val) - obj.callCount = obj.callCount + 1 + return `Count: ${obj.callCount}` } -export function test(call: Uint8Array): i32 { - // logNumber(call) - logUint8Array(call) - if (call === 'set') { - db.setObject('name', 'space') - consoleLog('testing') +class T_TOKEN_CONFIG { + + decimals: i64 + mint_authority: string +} + +const TOKEN_CONFIG: T_TOKEN_CONFIG = { + decimals: 3, + mint_authority: '' +} + +class transferPayload { + to: string + from: string + amount: i64 +} + +export function transfer(payload: string): string { + let jsonObj: JSON.Obj = (JSON.parse(payload)); + const transferPayload: transferPayload = { + to: jsonObj.getString('to')!._str, + from: '', + amount: 33 } - return 4 + + + return new TxOutput().exitCode(0).done() } -export function testString(a: string): void { - consoleLog(a + 'world') - api() - .split('.') - .map((e: string): string => { - consoleLog(e) - consoleLog((parseInt(e) * 5).toString()) - logBool(parseInt(e) * 5 === 810) - return (parseInt(e) * 5).toString() - }) - // let bytes = new Uint8Array(this.len); - // memory.copy(bytes.dataStart, a) - // return bytes + + +class MintPayload { + to: string + amount: i64 +} + +class MintVal { + val: i64 +} + +export function mint(payload: string): string { + let jsonObj: JSON.Obj = (JSON.parse(payload)); + const mintPayload:MintPayload = { + amount: jsonObj.getInteger('amount')!.valueOf(), + to: jsonObj.getString('to')!.valueOf() + } + db.setObject(mintPayload.to, JSON.from({ + val: mintPayload.amount + }).stringify()) + + return new TxOutput().exitCode(0).msg("MINT_SUCCESS").done() +} + +class BurnPayload { + address: string + amount: i64 } + +export function burn(payload: string): string { + let jsonObj: JSON.Obj = (JSON.parse(payload)); + const out = new TxOutput(); + const amount = jsonObj.getInteger('amount'); + + + if(!amount) { + return out.exitCode(-1).msg('Invalid Input').done() + } + + if(!amount.isInteger) { + return out.exitCode(-1).msg('Invalid data').done() + } + + const burnPayload:BurnPayload = { + amount: amount._num, + address: jsonObj.getString('address')!._str + } + const val = JSON.parse(db.getObject(`balances/${burnPayload.address}`)) + + const balance = val.getInteger('val') + + if(!balance) { + return out.exitCode(-1).msg('Invalid data').done() + } + + if(!balance.isInteger) { + return out.exitCode(-1).msg('Invalid data').done() + } + + if(balance._num < burnPayload.amount) { + return out.exitCode(-1).msg('In sufficient balance').done() + } + + return new TxOutput().done() +} \ No newline at end of file diff --git a/src/services/new/vm/utils.ts b/src/services/new/vm/utils.ts index 2601d6e..5194d87 100644 --- a/src/services/new/vm/utils.ts +++ b/src/services/new/vm/utils.ts @@ -206,6 +206,29 @@ enum CallResultError { TIMEOUT = 'timeout' } +export enum ContractErrorType { + //If transaction attempts to call invalid runtime function. + INVALID_ACTION = -1, + //Input data does not meet valiation requirements + INVALID_INPUT = -2, + //Any arbitrary exception occurred within the smart contract + RUNTIME_EXCEPTION = -3, + //Code error if WASM imports or attempts to use unavailable bindings. + RUNTIME_SETUP = -4, + //Unknown runtime error occurrs. + RUNTIME_UNKNOWN = -5, + //If overall VM becomes frozen a timeout is issued. + TIMEOUT = -6, + //If contract returns none JSON or other accepted format + INVALID_RETURN = -7, + + + //Reserved for future use. + //If contract over uses gas or TX does not have enough gas. + GAS_EXHAUSTED = -20 + +} + interface VmCallResult { code: number result: string @@ -255,7 +278,15 @@ export class VmContainer { }) } }, 1) - const executeStop = await new Promise((resolve, reject) => { + const executeStop = await new Promise<{ + type: string + ret: string | null + logs: Array + error: any + errorType: ContractErrorType + reqId: string + IOGas: number + } | null>((resolve, reject) => { this.events.once('execute-stop', (result0) => { resolve(result0) clearInterval(timeoutPid) @@ -271,18 +302,32 @@ export class VmContainer { } async finish() { - this.child.send({ - type: 'finish' - }) - const result = await new Promise((resolve, reject) => { - this.events.once('result', (result0) => { - resolve(result0) + if(this.child.connected) { + this.child.send({ + type: 'finish' }) - this.events.once('timeout', (resultErr) => { - resolve(resultErr) + const result = await new Promise<{ + stateMerkle: string + }>((resolve, reject) => { + this.events.once('finish-result', (result0) => { + console.log('finish-result', this.child.connected) + resolve(result0) + }) + this.events.once('timeout', (resultErr) => { + resolve(resultErr) + }) }) - }) - return result; + return result; + } else { + return null; + } + } + + async finishAndCleanup() { + const val = await this.finish() + this.cleanup() + + return val.stateMerkle; } async init() { @@ -296,7 +341,7 @@ export class VmContainer { cid: this.opts.cid, contract_id: this.opts.contract_id } as any, - silent: true, + // silent: true, detached: false }); this.child = child; @@ -305,8 +350,8 @@ export class VmContainer { this.ready = true this.events.emit('ready') } - if(message.type === 'result') { - this.events.emit('result', message) + if(message.type === 'finish-result') { + this.events.emit('finish-result', message) } if(message.type === 'execute-stop') { this.events.emit('execute-stop', message) diff --git a/src/services/new/vm/vm-runner.test.ts b/src/services/new/vm/vm-runner.test.ts index 085fe60..ab221fe 100644 --- a/src/services/new/vm/vm-runner.test.ts +++ b/src/services/new/vm/vm-runner.test.ts @@ -7,6 +7,7 @@ import * as IPFS from 'kubo-rpc-client' import {fork} from 'child_process' import Crypto from 'crypto' import { VmContainer } from './utils'; +import { sleep } from '../../../utils'; const ipfs = IPFS.create({url: 'http://127.0.0.1:5001'}) @@ -51,35 +52,39 @@ void (async () => { return; } - const binary = stdout.toBuffer() - - - const cid = await ipfs.block.put(binary) - console.log(cid) - - const vmContainer = new VmContainer({ - contract_id: 'vs41q9c3yg8estwk8q9yjrsu2hk6chgk5aelwlf8uj3amqfgywge8w3cul438q9tx556', - cid: cid.toString() - }) - - await vmContainer.init() - await vmContainer.onReady() - - - for(let x = 0; x < 1; x++) { - const result = await vmContainer.call({ - action: 'testJSON', - payload: JSON.stringify({ - to: "test1", - from: 'test2', - - }) + try { + const binary = stdout.toBuffer() + + + const cid = await ipfs.block.put(binary) + console.log(cid) + + const vmContainer = new VmContainer({ + contract_id: 'vs41q9c3yg8estwk8q9yjrsu2hk6chgk5aelwlf8uj3amqfgywge8w3cul438q9tx556', + cid: cid.toString() }) - console.log(result) - } - + + await vmContainer.init() + await vmContainer.onReady() + + + for(let x = 0; x < 1; x++) { + const result = await vmContainer.call({ + action: 'testJSON', + payload: JSON.stringify({ + to: "test1", + from: 'test2', + }) + }) + console.log(result) + } + + await vmContainer.finish() + } catch(ex) { + console.log(ex) + } - await vmContainer.finish() + process.exit(0) let reqId; let startTime diff --git a/src/services/new/vm/vm-runner.ts b/src/services/new/vm/vm-runner.ts index 8717dad..73ce38d 100644 --- a/src/services/new/vm/vm-runner.ts +++ b/src/services/new/vm/vm-runner.ts @@ -1,7 +1,7 @@ import * as IPFS from 'kubo-rpc-client' import { addLink } from '../../../ipfs-utils/add-link' import { removeLink } from '../../../ipfs-utils/rm-link' -import { instantiate } from './utils' +import { ContractErrorType, instantiate } from './utils' const CID = IPFS.CID const ipfs = IPFS.create({ url: 'http://127.0.0.1:5001' }) @@ -219,6 +219,7 @@ export class WasmRunner { } void (async () => { + console.log('init') const contract_id = process.env.contract_id @@ -240,87 +241,147 @@ void (async () => { }) let IOGas = 0; - + let error; const logs = [] - const insta = await instantiate(module, { - env: { - memory, - }, - input: { - 'console.log': (keyPtr) => { - const logMsg = insta.exports.__getString(keyPtr) - logs.push(logMsg) - IOGas = IOGas + logMsg.length - }, - 'console.logNumber': (val) => { - logs.push(val) - }, - 'console.logBool': (val) => { - logs.push(Boolean(val)) - }, - 'state.setObject': (keyPtr, valPtr) => { - const key = insta.exports.__getString(keyPtr) - const val = insta.exports.__getString(valPtr) - - IOGas = IOGas + key.length + val.length - - - wasmRunner.stateCache.set(key, val) - return 1 - }, - 'state.getObject': async (keyPtr) => { - const key = insta.exports.__getString(keyPtr) - let value; - if(wasmRunner.stateCache.has(key)) { - value = wasmRunner.stateCache.get(key) - } else { - value = await stateAccess.client.pull(key) - wasmRunner.stateCache.set(key, value) - } - - const val = JSON.stringify(value) - - IOGas = IOGas + val.length; // Total serialized length of gas - - - return val - }, - }, - } as any) - + + try { + const insta = await instantiate(module, { + env: { + memory, + abort(msg, file, line, colm) { + console.log(insta.exports.__getString(msg), 'LN1', insta.exports.__getString(file), line, colm) + console.log('error happened'); + error = { + msg: insta.exports.__getString(msg), + file: insta.exports.__getString(file), + line, + colm + } + }, + //Prevent AS loader from allowing any non-deterministic data in. + seed: () => { + return 0; + }, + }, + //Same here + Date: {}, + Math: {}, + sdk: { + 'console.log': (keyPtr) => { + const logMsg = (insta as any).exports.__getString(keyPtr) + logs.push(logMsg) + IOGas = IOGas + logMsg.length + }, + 'console.logNumber': (val) => { + logs.push(val) + }, + 'console.logBool': (val) => { + logs.push(Boolean(val)) + }, + 'db.setObject': (keyPtr, valPtr) => { + const key = (insta as any).exports.__getString(keyPtr) + const val = (insta as any).exports.__getString(valPtr) + + IOGas = IOGas + key.length + val.length + + + wasmRunner.stateCache.set(key, val) + return 1 + }, + 'db.getObject': async (keyPtr) => { + const key = (insta as any).exports.__getString(keyPtr) + let value; + if(wasmRunner.stateCache.has(key)) { + value = wasmRunner.stateCache.get(key) + } else { + value = await stateAccess.client.pull(key) + wasmRunner.stateCache.set(key, value) + } + + const val = JSON.stringify(value) + + IOGas = IOGas + val.length; // Total serialized length of gas + + + return val + }, + }, + } as any) + + if(!insta.instance.exports[message.action]) { + process.send({ + type: 'execute-stop', + ret: null, + errorType: ContractErrorType.INVALID_ACTION, + logs, + reqId: message.reqId, + IOGas: 0, + }) + return; + } + let ptr; + try { + ptr = await (insta.instance.exports[message.action] as any)( + (insta as any).exports.__newString(message.payload), + ) - console.log('message from parent:', message, new Date(), new Date().getTime()) - const ptr = await (insta.instance.exports[message.action] as any)( - insta.exports.__newString(message.payload), - ) - const str = insta.exports.__getString(ptr) - process.send({ - type: 'execute-stop', - result: str, - logs, - reqId: message.reqId, - IOGas, - }) - - console.log('message to parent:', message, new Date(), new Date().getTime()) - console.log('logs', logs, 'result', str) - // process.send({ - // type: 'result', - // result: str, - // reqId: message.reqId, - // }) + const str = (insta as any).exports.__getString(ptr) + process.send({ + type: 'execute-stop', + ret: str, + logs, + reqId: message.reqId, + IOGas, + }) + } catch (ex) { + if(ex.name === "RuntimeError" && ex.message === "unreachable") { + console.log(`RuntimeError: unreachable ${JSON.stringify(error)}`, error) + process.send({ + type: 'execute-stop', + ret: null, + error: error, + errorType: ContractErrorType.RUNTIME_EXCEPTION, + logs, + reqId: message.reqId, + IOGas, + }); + } else { + process.send({ + type: 'execute-stop', + ret: null, + errorType: ContractErrorType.RUNTIME_UNKNOWN, + logs, + reqId: message.reqId, + IOGas, + }); + } + } + + } catch (ex) { + console.log(ex) + process.send({ + type: 'execute-stop', + ret: null, + logs, + errorType: ContractErrorType.RUNTIME_SETUP, + reqId: message.reqId, + IOGas, + }) + } } + + //Finalization when VM is done if(message.type === "finish") { for(let [key, value] of wasmRunner.stateCache.entries()) { await stateAccess.client.update(key, JSON.parse(value)) } - console.log(stateAccess.finish()) + console.log('sending result') process.send({ - type: 'result', - result: stateAccess.finish().stateMerkle.toString(), + type: 'finish-result', + stateMerkle: stateAccess.finish().stateMerkle.toString(), }) } }) diff --git a/src/services/new/vm/wasm.ts b/src/services/new/vm/wasm.ts deleted file mode 100644 index faae594..0000000 --- a/src/services/new/vm/wasm.ts +++ /dev/null @@ -1,123 +0,0 @@ -import asc from "assemblyscript/dist/asc"; -import loader from '@assemblyscript/loader' -import Axios from 'axios' -import fs from 'fs/promises' -import base64 from 'base-64' -import { sleep } from "../../../utils"; -import { instantiate } from "./utils"; - - - - -void (async () => { - // loader.instantiate - // const data = await asc.compileString({ - // ['input.ts']: (await fs.readFile("./src/services/new/vm/script.ts")).toString(), - // ['~lib/assemblyscript-json/assembly.ts']: (await fs.readFile('node_modules/assemblyscript-json/assembly/index.ts')).toString(), - // ['~lib/assemblyscript-json/decoder.ts']: (await fs.readFile('node_modules/assemblyscript-json/assembly/decoder.ts')).toString(), - // ['~lib/assemblyscript-json/encoder.ts']: (await fs.readFile('node_modules/assemblyscript-json/assembly/encoder.ts')).toString(), - // ['~lib/assemblyscript-json/JSON.ts']: (await fs.readFile('node_modules/assemblyscript-json/assembly/JSON.ts')).toString(), - // ['~lib/assemblyscript-json/util.ts']: (await fs.readFile('node_modules/assemblyscript-json/assembly/util/index.ts')).toString(), - // ['~lib/assemblyscript-json/util/index.ts']: (await fs.readFile('node_modules/assemblyscript-json/assembly/util/index.ts')).toString(), - // }, { - // // textFile - // // runtime: "esm" - // bindings: ['esm'], - // runPasses: ['asyncify'], - // lib: ['assemblyscript-json'], - // path: ['node_modules'] - // }) - - var stdout = asc.createMemoryStream(); - const compileResult = await asc.main([ - 'input.ts', - // "-b", - "-o", - '--runPasses', - "asyncify" - ], { - stdout: stdout, - readFile: async (filename: string, baseDir: string) => { - console.log(filename, baseDir) - try { - if(filename === 'input.ts') { - return (await fs.readFile("./src/services/new/vm/script.ts")).toString() - } - return (await fs.readFile(filename)).toString() - } catch { - return null - } - } - }); - - // console.log(compileResult) - // console.log(compileResult.stderr.toString()) - const binary = stdout.toBuffer() - console.log(stdout.toString()) - console.log(compileResult) - // await fs.writeFile('debug.wat', stdout.toBuffer()) - console.log(compileResult.stderr.toString()) - // // console.log(data.text) - // await fs.writeFile('debug2.wat', Buffer.from(data.text)) - console.log('total Size', binary.length) - if(compileResult.error) { - return; - } - // await sleep(15_000) - if(binary) { - let dataStore = new Map() - const insta = await instantiate(binary, { - input: { - consoleLog: (d) => { - - console.log('d', insta.exports.__getString(d)) - // return 44 - }, - logNumber: (d) => { - console.log('logNumber', d) - }, - logBool: (d) => { - console.log('logBool', d) - }, - base64: () => { - return base64 - }, - logUint8Array: (d) => { - console.log('logUint8Array', d) - }, - "db.setObject": (keyPtr, valPtr) => { - const key = insta.exports.__getString(keyPtr) - const val = insta.exports.__getString(valPtr) - - console.log('setObject', key, val) - dataStore.set(key, val) - return 1; - }, - "db.getObject": (key) => { - console.log('getObject', key) - return "hello" - }, - api: async () => { - const { data } = await Axios.get('http://ipinfo.io/json') - console.log(data) - - return insta.exports.__newString(data.ip) - } - } - } as any) - const promise = (insta.instance.exports as any).testJSON() - - console.log('past promise') - void (async () => { - for ( ; ;) { - console.log('JS is still running') - await sleep(1000) - } - })() - console.log(insta.exports.__getString((await promise))) - - // console.log(await (insta.instance.exports as any).testString(insta.exports.__newString("hellos"))) - // console.log( (insta.instance.exports as any).test('hell')) - // console.log(dataStore) - } -})() diff --git a/src/services/new/witness/index.ts b/src/services/new/witness/index.ts index 12043f9..16dcbe5 100644 --- a/src/services/new/witness/index.ts +++ b/src/services/new/witness/index.ts @@ -6,18 +6,50 @@ import { NewCoreService } from ".."; import { HiveClient } from "../../../utils"; import { CID } from 'kubo-rpc-client'; import { BlsCircuit, BlsDID } from '../utils/crypto/bls-did'; +import { Collection } from 'mongodb'; +import { BlockHeader } from '../types'; +import { PrivateKey } from '@hiveio/dhive'; export class BlockContainer { - constructor() { + rawData: any; + ref_start: number; + ref_end: number + constructor(rawData) { + this.rawData = rawData + } + async toHeader() { + + const block = await encodePayload(this.rawData) + + return { + __t: "vsc-bh", + __v: '0.1', + headers: { + //Find previous block here + prevB: '', + //block range + br: [this.ref_start, this.ref_end] + }, + merkle_root: this.rawData.merkle_root, + block: block.cid + } } - static fromObject(): BlockContainer { - return new BlockContainer() + static fromObject(rawData): BlockContainer { + return new BlockContainer(rawData) } } +function simpleMerkleTree(tree: string[]) { + const leaves = tree.map(x => SHA256(x)) + const merkleTree = new MerkleTree(leaves, SHA256) + console.log(merkleTree.getRoot().length) + const root = merkleTree.getRoot().toString('base64url') + return root +} + export class WitnessServiceV2 { self: NewCoreService; @@ -28,6 +60,9 @@ export class WitnessServiceV2 { } //Precomputed list of blocks _candidateBlocks: Record + candidateApprovalsDb: Collection + //VSC block headres ref + blockHeaders: Collection constructor(self: NewCoreService) { this.self = self; @@ -101,19 +136,28 @@ export class WitnessServiceV2 { } - async createBlock() { + async createBlock(args: { + block_height: number + start_height: number + }): Promise { + const {block_height, start_height} = args; const transactions = await this.self.transactionPool.txDb.find({ - // $or: [ - // { - // 'headers.lock_block': { - // $lt: block_height - // } - // }, { - // 'headers.lock_block': { - // $exists: false - // } - // } - // ] + 'headers.anchored_height': { + $lte: block_height, + $gte: start_height + }, + $or: [ + { + //Make sure transactions are locked in the future + 'headers.lock_block': { + $gt: block_height + } + }, { + 'headers.lock_block': { + $exists: false + } + } + ] }).toArray() const offchainTxs = transactions.filter(e => { @@ -124,55 +168,120 @@ export class WitnessServiceV2 { return e.src === 'hive' }) + const totalTxIDs = [ + ...transactions.map(e => e.id) + ] + + // const root = simpleMerkleTree(txIds) - let hiveMerkleProof + let hiveMerkleProof = { + id: null, + data: null, + chain: 'hive', + type: "anchor_proof" + } if(onchainTxs.length > 0) { const txIds = onchainTxs.map(e => e.id); - const leaves =txIds.map(x => SHA256(x)) - const tree = new MerkleTree(leaves, SHA256) - const root = tree.getRoot().toString('hex') - console.log(root) - const proof = tree.getProof(SHA256(txIds[0])) - console.log(proof) - console.log('onchainTxs', onchainTxs.map(e => e.id)) - hiveMerkleProof = root; - } else { - hiveMerkleProof = '0'.repeat(64) + const root = simpleMerkleTree(txIds) + // console.log(root) + // const proof = tree.getProof(SHA256(txIds[0])) + // console.log(proof) + // console.log('onchainTxs', onchainTxs.map(e => e.id)) + hiveMerkleProof.id = await this.self.ipfs.dag.put({ + txs: txIds + }) + hiveMerkleProof.data = root; } + const contractIds = await this.self.transactionPool.txDb.distinct('headers.contract_id', { + $or: [ + { + 'headers.lock_block': { + $gt: block_height + } + }, { + 'headers.lock_block': { + $exists: false + } + } + ] + }) + + const contractTxs = onchainTxs.filter(e => { + return e.data.op === "call_contract" + }) + + console.log(contractTxs) + console.log('contractIds', contractIds) + let contractOutputs = [] + for(let contractId of contractIds) { + const output = await this.self.contractEngine.createContractOutput({ + txs: contractTxs, + contract_id: contractId + }) + + //Store unsigned outputs for now. + const outputCid = await this.self.ipfs.dag.put(output) + + contractOutputs.push({ + id: outputCid, + type: "contract_output" + }) + } + + const txList = [ + ...contractOutputs, + ...(hiveMerkleProof.id ? [hiveMerkleProof] : []) + ] + + const merkleRoot = simpleMerkleTree(txList.map(e => e.id)) + const blockFull = { __t: 'vsc-block', __v: '0.1', - // required_auths: [ - // { - // type: 'con' - // value: process.env.HIVE_ACCOUNT - // } - // ] - txs: [ - { - id: hiveMerkleProof, - type: "anchor_proof" - } - ], - contract_index: { - 'null': [] - }, - merkle_root: null + txs: txList, + // contract_index: { + // 'null': [] + // }, + merkle_root: !merkleRoot && null } - return blockFull + console.log('blockFull', blockFull) + const blockContainer = new BlockContainer(blockFull); + blockContainer.ref_start = block_height + blockContainer.ref_end = block_height + 20 + return blockContainer } async proposeBlock(block_height: number) { - const blockFull = await this.createBlock() + const lastHeader = await this.blockHeaders.findOne({ + + }, { + sort: { + hive_ref_block: -1 + } + }) + + + //If no other header is available. Use genesis day as range + const start_height = lastHeader ? lastHeader.hive_ref_block : networks[this.self.config.get('network.id')].genesisDay + + const blockFull = await this.createBlock({ + block_height, + start_height: start_height + }) + + const blockHeader = await blockFull.toHeader() + console.log('PROPOSING BLOCKFULL', blockFull) const sigPacked = await this.self.consensusKey.signObject(blockFull) console.log('sigPacked', sigPacked) - const encodedPayload = await encodePayload(blockFull) + const encodedPayload = await encodePayload(blockHeader) + + console.log('proposing block over p2p channels', blockHeader) const {drain} = await this.self.p2pService.memoryPoolChannel.call('propose_block', { payload: { block_height, @@ -181,17 +290,31 @@ export class WitnessServiceV2 { mode: 'stream', streamTimeout: 12_000 }) - const circuit = new BlsCircuit(blockFull) + const keys = await this.self.chainBridge.getWitnessesAtBlock(block_height) + const circuit = new BlsCircuit(blockHeader) + const keysMap = keys.map(e => { + return e.keys.find(key => { + console.log(key) + return key.t === "consensus" + }) + }).filter(e => !!e).map(e => e.key); + console.log('keysMap', keysMap) + + let voteMajority = 0.67 for await(let sigMsg of drain) { - console.log('sigMsg', sigMsg) const pub = JSON.parse(Buffer.from(sigMsg.payload.p, 'base64url').toString()).pub + console.log('INCOMING PUB SIG', pub) + //Prevent rogue key attacks + if(!keysMap.includes(pub)) { + continue; + } const sig = sigMsg.payload.s const verifiedSig = await circuit.verifySig({ sig, pub, }); // 'verified sig', - console.log(verifiedSig) + console.log(verifiedSig) if(verifiedSig) { console.log({ sig, @@ -201,14 +324,35 @@ export class WitnessServiceV2 { sig, did: pub, }) - const keys = await this.self.chainBridge.getWitnessesAtBlock(block_height) - console.log(keys.map(e => { - console.log(e) - return e.keys.find(e => e.t === 'consensus')?.key - }).filter(e => !!e)) + + + //Vote majority is over threshold. + if(circuit.aggPubKeys.size / keysMap.length > voteMajority ) { + //Stop filling circuit if over majority. Saving on unneeded extra bitvectors + break; + } console.log('result', result) + console.log('aggregated DID', circuit.did.id) } - console.log('aggregated DID', circuit.did.id) + } + + + if(circuit.aggPubKeys.size / keysMap.length > voteMajority){ + const signedBlock = { + ...blockHeader, + block: blockHeader.block.toString(), + signature: circuit.serialize(keysMap) + } + await HiveClient.broadcast.json({ + id: 'vsc.propose_block.ignoretest', + required_auths: [process.env.HIVE_ACCOUNT], + required_posting_auths: [], + json: JSON.stringify({ + net_id: this.self.config.get('network.id'), + signed_block: signedBlock + }) + }, PrivateKey.fromString(process.env.HIVE_ACCOUNT_ACTIVE)) + } } @@ -223,12 +367,27 @@ export class WitnessServiceV2 { const scheduleSlot = schedule.find(e => e.bn === block_height) if(!!scheduleSlot && this.self.chainBridge.stream.blockLag < 3) { + const lastHeader = await this.blockHeaders.findOne({ + + }, { + sort: { + hive_ref_block: -1 + } + }) + + + //If no other header is available. Use genesis day as range + const start_height = lastHeader ? lastHeader.hive_ref_block : networks[this.self.config.get('network.id')].genesisDay + + const blockFull = await this.createBlock({ + start_height, + block_height: block_height, + }) + this._candidateBlocks[block_height] = await blockFull.toHeader() + console.log('SAVING CANDIDATE BLOCK', this._candidateBlocks[block_height]) if(scheduleSlot.account === process.env.HIVE_ACCOUNT) { console.log('I can actually produce a block!') await this.proposeBlock(block_height) - } else { - const blockFull = await this.createBlock() - this._candidateBlocks[block_height] = blockFull } } } @@ -251,27 +410,30 @@ export class WitnessServiceV2 { const cadBlock = this._candidateBlocks[message.block_height] + console.log('VERIFYING block over p2p channels', cadBlock, message.block_height, message) if(cadBlock) { const signData = await this.self.consensusKey.signRaw((await encodePayload(cadBlock)).cid.bytes) console.log(signData) drain.push(signData) } - const cid = CID.parse(message.hash) - const signData = await this.self.consensusKey.signRaw(cid.bytes) - console.log(signData) + // const cid = CID.parse(message.hash) + // const signData = await this.self.consensusKey.signRaw(cid.bytes) + // console.log(signData) - drain.push(signData) + // drain.push(signData) } async init() { - this.self.chainBridge.registerTickHandle('witness.blockTick', this.blockTick, { - type: 'block' - }) + this.blockHeaders = this.self.db.collection('block_headers') - this.self.p2pService.memoryPoolChannel.register('propose_block', this.handleProposeBlockMsg, { - loopbackOk: true - }) + this.self.chainBridge.registerTickHandle('witness.blockTick', this.blockTick, { + type: 'block' + }) + + this.self.p2pService.memoryPoolChannel.register('propose_block', this.handleProposeBlockMsg, { + loopbackOk: true + }) } async start() { diff --git a/src/transactions/scriptDeployContract.ts b/src/transactions/scriptDeployContract.ts index 76838e2..61b0ce5 100644 --- a/src/transactions/scriptDeployContract.ts +++ b/src/transactions/scriptDeployContract.ts @@ -7,13 +7,14 @@ import { CID } from 'kubo-rpc-client' import { TransactionPoolService } from '../services/transactionPool' import { createHAMT } from 'hamt-sharding' import crypto from 'crypto' +import { compileAS } from '../services/new/vm/compiler' void (async () => { const setup: { identity; config; ipfsClient; logger } = await init() console.log(new Date()) - const contractLocation = './contracts' + const contractLocation = './contracts/new' const directory = (await fs.readdir(contractLocation)).filter(function (e) { return e.endsWith('manifest.json') @@ -21,9 +22,10 @@ void (async () => { console.log(directory) for (let contractFile of directory) { try { - const path = Path.join(contractLocation, [contractFile.split('.')[0], 'js'].join('.')) + const path = Path.join(contractLocation, [contractFile.split('.')[0], 'tsa'].join('.')) await fs.stat(path) + console.log(Path.join(contractLocation,contractFile)) const manifestData = JSON.parse((await fs.readFile(Path.join(contractLocation,contractFile))).toString()) console.log(manifestData) @@ -40,19 +42,51 @@ void (async () => { // setup.logger.error('not able to load contract file:\n', err); process.exit(0) } + + const result = await compileAS({ + scriptPath: path + }) + console.log(result) + if(result.err) { + console.log(`ERROR on compiling to WASM - ${result.err}`) + process.exit(0) + } + + if(result.binary.length > 240_000) { + console.log(`ERROR compiled result must be smaller than 240KB. Total size: ${result.binary.length}`) + process.exit(0) + } + const cid = await setup.ipfsClient.block.put(result.binary) + + console.log(cid) + + const broadcastResult = await HiveClient.broadcast.json({ + required_auths: [process.env.HIVE_ACCOUNT], + required_posting_auths: [], + id: 'vsc.create_contract', + json: JSON.stringify({ + __v: '0.1', + net_id: setup.config.get('network.id'), + name: manifestData.name, + code: cid.toString(), + description: manifestData.description + }) + }, PrivateKey.fromString(process.env.HIVE_ACCOUNT_ACTIVE)) + + console.log(broadcastResult) - const confirmation = await TransactionPoolService.createContract({ - name: manifestData.name, - code: code, - description: manifestData.description - }, - setup); - console.log(confirmation) - manifestData.deployedAt = new Date().toISOString() - manifestData.deployedId = confirmation.id - console.log(manifestData) - console.log(contractFile) - await fs.writeFile(Path.join(contractLocation, contractFile), JSON.stringify(manifestData)) + // const confirmation = await TransactionPoolService.createContract({ + // name: manifestData.name, + // code: code, + // description: manifestData.description + // }, + // setup); + // console.log(confirmation) + // manifestData.deployedAt = new Date().toISOString() + // manifestData.deployedId = confirmation.id + // console.log(manifestData) + // console.log(contractFile) + // await fs.writeFile(Path.join(contractLocation, contractFile), JSON.stringify(manifestData, null, 2)) } catch(ex) { console.log(ex) }