From f6e0ade3bed40171015ca8a1d4a833c2506fbac9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 30 Jun 2021 04:14:07 +0000 Subject: [PATCH 001/175] Bump color-string from 1.5.4 to 1.5.5 Bumps [color-string](https://github.com/Qix-/color-string) from 1.5.4 to 1.5.5. - [Release notes](https://github.com/Qix-/color-string/releases) - [Changelog](https://github.com/Qix-/color-string/blob/master/CHANGELOG.md) - [Commits](https://github.com/Qix-/color-string/compare/1.5.4...1.5.5) --- updated-dependencies: - dependency-name: color-string dependency-type: indirect ... Signed-off-by: dependabot[bot] --- yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/yarn.lock b/yarn.lock index 4e9d555ea..6547a36fd 100644 --- a/yarn.lock +++ b/yarn.lock @@ -977,9 +977,9 @@ color-name@^1.0.0, color-name@~1.1.4: integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== color-string@^1.5.2: - version "1.5.4" - resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.5.4.tgz#dd51cd25cfee953d138fe4002372cc3d0e504cb6" - integrity sha512-57yF5yt8Xa3czSEW1jfQDE79Idk0+AkN/4KWad6tbdxUmAs3MvjxlWSWD4deYytcRfoZ9nhKyFl1kj5tBvidbw== + version "1.5.5" + resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.5.5.tgz#65474a8f0e7439625f3d27a6a19d89fc45223014" + integrity sha512-jgIoum0OfQfq9Whcfc2z/VhCNcmQjWbey6qBX0vqt7YICflUmBCh9E9CiQD5GSJ+Uehixm3NUwHVhqUAWRivZg== dependencies: color-name "^1.0.0" simple-swizzle "^0.2.2" From b5631dea1eac93d5a88d06e9dfcf8c205c461a8d Mon Sep 17 00:00:00 2001 From: Lia Yoo Date: Mon, 5 Jul 2021 22:35:33 +0900 Subject: [PATCH 002/175] Move test paths under /apps. --- integration/afan_dapp.test.js | 63 +- integration/blockchain.test.js | 125 ++- integration/node.test.js | 917 ++++++++++++---------- integration/sharding.test.js | 369 ++++----- unittest/data/rules_for_testing.json | 17 +- unittest/db.test.js | 1090 +++++++++++++------------- unittest/functions.test.js | 12 +- unittest/test-util.js | 31 +- unittest/transaction.test.js | 2 +- 9 files changed, 1416 insertions(+), 1210 deletions(-) diff --git a/integration/afan_dapp.test.js b/integration/afan_dapp.test.js index ae028cbc8..fb41f7f6d 100644 --- a/integration/afan_dapp.test.js +++ b/integration/afan_dapp.test.js @@ -7,7 +7,7 @@ const syncRequest = require('sync-request'); const AfanClient = require('../afan_client'); const { CHAINS_DIR } = require('../common/constants'); const CommonUtil = require('../common/common-util'); -const { waitUntilTxFinalized, parseOrLog } = require('../unittest/test-util'); +const { waitUntilTxFinalized, parseOrLog, setUpApp } = require('../unittest/test-util'); const PROJECT_ROOT = require('path').dirname(__filename) + '/../'; const TRACKER_SERVER = PROJECT_ROOT + 'tracker-server/index.js'; const APP_SERVER = PROJECT_ROOT + 'client/index.js'; @@ -62,34 +62,43 @@ async function setUp() { 'GET', server3 + '/get_address').body.toString('utf-8')).result; const server4Addr = parseOrLog(syncRequest( 'GET', server4 + '/get_address').body.toString('utf-8')).result; - const appStakingRes = parseOrLog(syncRequest('POST', server1 + '/set_value', { - json: { - ref: `/staking/afan/${server1Addr}/0/stake/${Date.now()}/value`, - value: 1 - } - }).body.toString('utf-8')).result; - assert.deepEqual(CommonUtil.isFailedTx(_.get(appStakingRes, 'result')), false); - if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { - console.log(`setUp(): Failed to check finalization of app staking tx.`); - } + // const appStakingRes = parseOrLog(syncRequest('POST', server1 + '/set_value', { + // json: { + // ref: `/staking/afan/${server1Addr}/0/stake/${Date.now()}/value`, + // value: 1 + // } + // }).body.toString('utf-8')).result; + // assert.deepEqual(CommonUtil.isFailedTx(_.get(appStakingRes, 'result')), false); + // if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { + // console.log(`setUp(): Failed to check finalization of app staking tx.`); + // } - const createAppRes = parseOrLog(syncRequest('POST', server1 + '/set_value', { - json: { - ref: `/manage_app/afan/create/${Date.now()}`, - value: { - admin: { - [server1Addr]: true, - [server2Addr]: true, - [server3Addr]: true, - [server4Addr]: true, - } - } + // const createAppRes = parseOrLog(syncRequest('POST', server1 + '/set_value', { + // json: { + // ref: `/manage_app/afan/create/${Date.now()}`, + // value: { + // admin: { + // [server1Addr]: true, + // [server2Addr]: true, + // [server3Addr]: true, + // [server4Addr]: true, + // } + // } + // } + // }).body.toString('utf-8')).result; + // assert.deepEqual(CommonUtil.isFailedTx(_.get(createAppRes, 'result')), false); + // if (!(await waitUntilTxFinalized(serverList, createAppRes.tx_hash))) { + // console.log(`setUp(): Failed to check finalization of create app tx.`) + // } + + await setUpApp('afan', serverList, { + admin: { + [server1Addr]: true, + [server2Addr]: true, + [server3Addr]: true, + [server4Addr]: true, } - }).body.toString('utf-8')).result; - assert.deepEqual(CommonUtil.isFailedTx(_.get(createAppRes, 'result')), false); - if (!(await waitUntilTxFinalized(serverList, createAppRes.tx_hash))) { - console.log(`setUp(): Failed to check finalization of create app tx.`) - } + }); } async function cleanUp() { diff --git a/integration/blockchain.test.js b/integration/blockchain.test.js index 203800233..e4c8340f6 100644 --- a/integration/blockchain.test.js +++ b/integration/blockchain.test.js @@ -25,7 +25,8 @@ const { waitUntilTxFinalized, waitForNewBlocks, waitUntilNodeSyncs, - parseOrLog + parseOrLog, + setUpApp } = require('../unittest/test-util'); const ENV_VARIABLES = [ @@ -80,23 +81,23 @@ const LAST_BLOCK_NUMBER_ENDPOINT = '/last_block_number' // Data options RANDOM_OPERATION = [ - ['set_value', {ref: 'test/comeonnnnnnn', value: 'testme'}], - ['set_value', {ref: 'test/comeonnnnnnn', value: 'no meeeee'}], - ['set_value', {ref: 'test/comeon/nnnnnn', value: 'through'}], - ['set_value', {ref: 'test/comeonnnnnnn/new', value: {'new': 'path'}}], - ['set_value', {ref: 'test/builed/some/deep', value: {'place': {'next': 1, 'level': 'down'}}}], - ['set_value', {ref: 'test/b/u/i/l/e/d/hel', value: 'very nested'}], - ['set_value', {ref: 'test/b/u/i/l/e/d/hel', value: {1: 2, 3: 4, 5: 6}}], - ['set_value', {ref: 'test/new/final/path', value: {'more': {'now': 12, 'hellloooo': 123}}}], - ['inc_value', {ref: 'test/balance/user1', value: 10}], - ['inc_value', {ref: 'test/balance/user1', value: 20}], - ['inc_value', {ref: 'test/balance/user2', value: 1}], - ['inc_value', {ref: 'test/balance/user2', value: 1}], - ['dec_value', {ref: 'test/balance/user1', value: 10000}], - ['dec_value', {ref: 'test/balance/user1', value: 10000}], - ['dec_value', {ref: 'test/balance/user2', value: 100002}], - ['set_rule', {ref: 'test/test_rule/', value: { ".write": "some rule config"}}], - ['set_function', {ref: 'test/test_function/', value: { + ['set_value', {ref: '/apps/test/comeonnnnnnn', value: 'testme'}], + ['set_value', {ref: '/apps/test/comeonnnnnnn', value: 'no meeeee'}], + ['set_value', {ref: '/apps/test/comeon/nnnnnn', value: 'through'}], + ['set_value', {ref: '/apps/test/comeonnnnnnn/new', value: {'new': 'path'}}], + ['set_value', {ref: '/apps/test/builed/some/deep', value: {'place': {'next': 1, 'level': 'down'}}}], + ['set_value', {ref: '/apps/test/b/u/i/l/e/d/hel', value: 'very nested'}], + ['set_value', {ref: '/apps/test/b/u/i/l/e/d/hel', value: {1: 2, 3: 4, 5: 6}}], + ['set_value', {ref: '/apps/test/new/final/path', value: {'more': {'now': 12, 'hellloooo': 123}}}], + ['inc_value', {ref: '/apps/test/balance/user1', value: 10}], + ['inc_value', {ref: '/apps/test/balance/user1', value: 20}], + ['inc_value', {ref: '/apps/test/balance/user2', value: 1}], + ['inc_value', {ref: '/apps/test/balance/user2', value: 1}], + ['dec_value', {ref: '/apps/test/balance/user1', value: 10000}], + ['dec_value', {ref: '/apps/test/balance/user1', value: 10000}], + ['dec_value', {ref: '/apps/test/balance/user2', value: 100002}], + ['set_rule', {ref: '/apps/test/test_rule/', value: { ".write": "some rule config"}}], + ['set_function', {ref: '/apps/test/test_function/', value: { ".function": { "fid": { "function_type": "REST", @@ -106,7 +107,7 @@ RANDOM_OPERATION = [ }, } }}], - ['set_owner', {ref: 'test/test_owner/', value: { + ['set_owner', {ref: '/apps/test/test_owner/', value: { ".owner": { "owners": { "*": { @@ -118,28 +119,28 @@ RANDOM_OPERATION = [ } } }}], - ['set', {op_list: [{ref: 'test/increase/first/level', value: 10}, - {ref: 'test/increase/first/level2', value: 20}]}], - ['set', {op_list: [{ref: 'test/increase/second/level/deeper', value: 20}, - {ref: 'test/increase/second/level/deeper', value: 1000}]}], - ['set', {op_list: [{ref: 'test/increase', value: 1}]}], - ['set', {op_list: [{ref: 'test/new', value: 1}]}], - ['set', {op_list: [{ref: 'test/increase', value: 10000}]}], - ['set', {op_list: [{ref: 'test/b/u', value: 10000}]}], - ['set', {op_list: [{ref: 'test/builed/some/deep/place/next', value: 100002}]}], - ['batch', {tx_list: [{operation: {type: 'SET_VALUE', ref: 'test/comeonnnnnnn', + ['set', {op_list: [{ref: '/apps/test/increase/first/level', value: 10}, + {ref: '/apps/test/increase/first/level2', value: 20}]}], + ['set', {op_list: [{ref: '/apps/test/increase/second/level/deeper', value: 20}, + {ref: '/apps/test/increase/second/level/deeper', value: 1000}]}], + ['set', {op_list: [{ref: '/apps/test/increase', value: 1}]}], + ['set', {op_list: [{ref: '/apps/test/new', value: 1}]}], + ['set', {op_list: [{ref: '/apps/test/increase', value: 10000}]}], + ['set', {op_list: [{ref: '/apps/test/b/u', value: 10000}]}], + ['set', {op_list: [{ref: '/apps/test/builed/some/deep/place/next', value: 100002}]}], + ['batch', {tx_list: [{operation: {type: 'SET_VALUE', ref: '/apps/test/comeonnnnnnn', value: 'no meeeee'}}]}], - ['batch', {tx_list: [{operation: {type: 'SET_VALUE', ref: 'test/comeon/nnnnnn', + ['batch', {tx_list: [{operation: {type: 'SET_VALUE', ref: '/apps/test/comeon/nnnnnn', value: 'through'}}]}], - ['batch', {tx_list: [{operation: {type: 'SET_VALUE', ref: 'test/comeonnnnnnn/new', + ['batch', {tx_list: [{operation: {type: 'SET_VALUE', ref: '/apps/test/comeonnnnnnn/new', value: {'new': 'path'}}}]}], - ['batch', {tx_list: [{operation: {type: 'SET_VALUE', ref: 'test/builed/some/deep', + ['batch', {tx_list: [{operation: {type: 'SET_VALUE', ref: '/apps/test/builed/some/deep', value: {'place': {'next': 1, 'level': 'down'}}}}]}], - ['batch', {tx_list: [{operation: {type: 'SET_VALUE', ref: 'test/b/u/i/l/e/d/hel', + ['batch', {tx_list: [{operation: {type: 'SET_VALUE', ref: '/apps/test/b/u/i/l/e/d/hel', value: {'range': 1, 'another': 2}}}]}], - ['batch', {tx_list: [{operation: {type: 'SET_VALUE', ref: 'test/b/u/i/l/e/d/hel', + ['batch', {tx_list: [{operation: {type: 'SET_VALUE', ref: '/apps/test/b/u/i/l/e/d/hel', value: 'very nested'}}]}], - ['batch', {tx_list: [{operation: {type: 'SET_VALUE', ref: 'test/b/u/i/l/e/d/hel', + ['batch', {tx_list: [{operation: {type: 'SET_VALUE', ref: '/apps/test/b/u/i/l/e/d/hel', value: {1: 2, 3: 4, 5: 6}}}]}], ]; @@ -259,7 +260,53 @@ describe('Blockchain Cluster', () => { resolve(); }); })); - return Promise.all(promises); + await Promise.all(promises); + + const server1Addr = parseOrLog(syncRequest( + 'GET', server1 + '/get_address').body.toString('utf-8')).result; + const server2Addr = parseOrLog(syncRequest( + 'GET', server2 + '/get_address').body.toString('utf-8')).result; + const server3Addr = parseOrLog(syncRequest( + 'GET', server3 + '/get_address').body.toString('utf-8')).result; + const server4Addr = parseOrLog(syncRequest( + 'GET', server4 + '/get_address').body.toString('utf-8')).result; + // const appStakingRes = parseOrLog(syncRequest('POST', server1 + '/set_value', { + // json: { + // ref: `/staking/test/${server1Addr}/0/stake/${Date.now()}/value`, + // value: 1 + // } + // }).body.toString('utf-8')).result; + // assert.deepEqual(CommonUtil.isFailedTx(_.get(appStakingRes, 'result')), false); + // if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { + // console.log(`setUp(): Failed to check finalization of app staking tx.`); + // } + + // const createAppRes = parseOrLog(syncRequest('POST', server1 + '/set_value', { + // json: { + // ref: `/manage_app/test/create/${Date.now()}`, + // value: { + // admin: { + // [server1Addr]: true, + // [server2Addr]: true, + // [server3Addr]: true, + // [server4Addr]: true, + // } + // } + // } + // }).body.toString('utf-8')).result; + // assert.deepEqual(CommonUtil.isFailedTx(_.get(createAppRes, 'result')), false); + // if (!(await waitUntilTxFinalized(serverList, createAppRes.tx_hash))) { + // console.log(`setUp(): Failed to check finalization of create app tx.`) + // } + + await setUpApp('test', serverList, { + admin: { + [server1Addr]: true, + [server2Addr]: true, + [server3Addr]: true, + [server4Addr]: true, + } + }); }); after(() => { @@ -502,9 +549,9 @@ describe('Blockchain Cluster', () => { for (let i = 1; i < serverList.length; i++) { await sendTransactions(sentOperations); await waitForNewBlocks(serverList[i]); - body1 = parseOrLog(syncRequest('GET', server1 + GET_VALUE_ENDPOINT + '?ref=test') + body1 = parseOrLog(syncRequest('GET', server1 + GET_VALUE_ENDPOINT + '?ref=/apps/test') .body.toString('utf-8')); - body2 = parseOrLog(syncRequest('GET', serverList[i] + GET_VALUE_ENDPOINT + '?ref=test') + body2 = parseOrLog(syncRequest('GET', serverList[i] + GET_VALUE_ENDPOINT + '?ref=/apps/test') .body.toString('utf-8')); assert.deepEqual(body1.result, body2.result); } @@ -574,7 +621,7 @@ describe('Blockchain Cluster', () => { const txHash = parseOrLog(syncRequest('POST', server2 + '/' + 'set_value', { json: { - ref: '/test/nonce_test', + ref: '/apps/test/nonce_test', value: 'testing...' } }).body.toString('utf-8')).result.tx_hash; diff --git a/integration/node.test.js b/integration/node.test.js index 9f7c9eeeb..59a3d4e09 100644 --- a/integration/node.test.js +++ b/integration/node.test.js @@ -23,7 +23,7 @@ const { MICRO_AIN, } = require('../common/constants'); const CommonUtil = require('../common/common-util'); -const { waitUntilTxFinalized, parseOrLog } = require('../unittest/test-util'); +const { waitUntilTxFinalized, parseOrLog, setUpApp } = require('../unittest/test-util'); const ENV_VARIABLES = [ { @@ -80,12 +80,12 @@ async function setUp() { op_list: [ { type: 'SET_VALUE', - ref: 'test/test_value/some/path', + ref: '/apps/test/test_value/some/path', value: 100 }, { type: 'SET_VALUE', - ref: 'test/test_state_info/some/path', + ref: '/apps/test/test_state_info/some/path', value: { label1: { label11: 'value11', @@ -96,14 +96,14 @@ async function setUp() { }, { type: 'SET_RULE', - ref: '/test/test_rule/some/path', + ref: '/apps/test/test_rule/some/path', value: { ".write": "auth.addr === 'abcd'" } }, { type: 'SET_FUNCTION', - ref: '/test/test_function/some/path', + ref: '/apps/test/test_function/some/path', value: { ".function": { "fid": { @@ -117,7 +117,7 @@ async function setUp() { }, { type: 'SET_OWNER', - ref: '/test/test_owner/some/path', + ref: '/apps/test/test_owner/some/path', value: { ".owner": { "owners": { @@ -147,22 +147,22 @@ async function cleanUp() { op_list: [ { type: 'SET_VALUE', - ref: 'test/test_value/some/path', + ref: '/apps/test/test_value/some/path', value: null }, { type: 'SET_RULE', - ref: '/test/test_rule/some/path', + ref: '/apps/test/test_rule/some/path', value: null }, { type: 'SET_FUNCTION', - ref: '/test/test_function/some/path', + ref: '/apps/test/test_function/some/path', value: null }, { type: 'SET_OWNER', - ref: '/test/test_owner/some/path', + ref: '/apps/test/test_owner/some/path', value: null }, ], @@ -191,6 +191,24 @@ describe('Blockchain Node', () => { await CommonUtil.sleep(2000); server4_proc = startServer(APP_SERVER, 'server4', ENV_VARIABLES[3], true); await CommonUtil.sleep(2000); + + + const server1Addr = parseOrLog(syncRequest( + 'GET', server1 + '/get_address').body.toString('utf-8')).result; + const server2Addr = parseOrLog(syncRequest( + 'GET', server2 + '/get_address').body.toString('utf-8')).result; + const server3Addr = parseOrLog(syncRequest( + 'GET', server3 + '/get_address').body.toString('utf-8')).result; + const server4Addr = parseOrLog(syncRequest( + 'GET', server4 + '/get_address').body.toString('utf-8')).result; + await setUpApp('test', serverList, { + admin: { + [server1Addr]: true, + [server2Addr]: true, + [server3Addr]: true, + [server4Addr]: true, + } + }); }); after(() => { @@ -215,7 +233,7 @@ describe('Blockchain Node', () => { describe('/get_value', () => { it('get_value', () => { const body = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=test/test_value/some/path') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some/path') .body.toString('utf-8')); assert.deepEqual(body, {code: 0, result: 100}); }) @@ -224,7 +242,7 @@ describe('Blockchain Node', () => { describe('/get_function', () => { it('get_function', () => { const body = parseOrLog(syncRequest( - 'GET', server1 + '/get_function?ref=/test/test_function/some/path') + 'GET', server1 + '/get_function?ref=/apps/test/test_function/some/path') .body.toString('utf-8')); assert.deepEqual(body, { code: 0, @@ -245,7 +263,7 @@ describe('Blockchain Node', () => { describe('/get_rule', () => { it('get_rule', () => { const body = parseOrLog(syncRequest( - 'GET', server1 + '/get_rule?ref=/test/test_rule/some/path') + 'GET', server1 + '/get_rule?ref=/apps/test/test_rule/some/path') .body.toString('utf-8')); assert.deepEqual(body, { code: 0, @@ -259,7 +277,7 @@ describe('Blockchain Node', () => { describe('/get_owner', () => { it('get_owner', () => { const body = parseOrLog(syncRequest( - 'GET', server1 + '/get_owner?ref=/test/test_owner/some/path') + 'GET', server1 + '/get_owner?ref=/apps/test/test_owner/some/path') .body.toString('utf-8')); assert.deepEqual(body, { code: 0, @@ -281,13 +299,13 @@ describe('Blockchain Node', () => { describe('/match_function', () => { it('match_function', () => { - const ref = "/test/test_function/some/path"; + const ref = "/apps/test/test_function/some/path"; const body = parseOrLog(syncRequest('GET', `${server1}/match_function?ref=${ref}`) .body.toString('utf-8')); assert.deepEqual(body, {code: 0, result: { "matched_path": { - "target_path": "/test/test_function/some/path", - "ref_path": "/test/test_function/some/path", + "target_path": "/apps/test/test_function/some/path", + "ref_path": "/apps/test/test_function/some/path", "path_vars": {}, }, "matched_config": { @@ -299,7 +317,7 @@ describe('Blockchain Node', () => { "service_name": "https://ainetwork.ai" } }, - "path": "/test/test_function/some/path" + "path": "/apps/test/test_function/some/path" }, "subtree_configs": [] }}); @@ -308,18 +326,18 @@ describe('Blockchain Node', () => { describe('/match_rule', () => { it('match_rule', () => { - const ref = "/test/test_rule/some/path"; + const ref = "/apps/test/test_rule/some/path"; const body = parseOrLog(syncRequest('GET', `${server1}/match_rule?ref=${ref}`) .body.toString('utf-8')); assert.deepEqual(body, {code: 0, result: { "matched_path": { - "target_path": "/test/test_rule/some/path", - "ref_path": "/test/test_rule/some/path", + "target_path": "/apps/test/test_rule/some/path", + "ref_path": "/apps/test/test_rule/some/path", "path_vars": {}, }, "matched_config": { "config": "auth.addr === 'abcd'", - "path": "/test/test_rule/some/path" + "path": "/apps/test/test_rule/some/path" }, "subtree_configs": [] }}); @@ -328,12 +346,12 @@ describe('Blockchain Node', () => { describe('/match_owner', () => { it('match_owner', () => { - const ref = "/test/test_owner/some/path"; + const ref = "/apps/test/test_owner/some/path"; const body = parseOrLog(syncRequest('GET', `${server1}/match_owner?ref=${ref}`) .body.toString('utf-8')); assert.deepEqual(body, {code: 0, result: { "matched_path": { - "target_path": "/test/test_owner/some/path" + "target_path": "/apps/test/test_owner/some/path" }, "matched_config": { "config": { @@ -346,7 +364,7 @@ describe('Blockchain Node', () => { } } }, - "path": "/test/test_owner/some/path" + "path": "/apps/test/test_owner/some/path" } }}); }) @@ -354,7 +372,7 @@ describe('Blockchain Node', () => { describe('/eval_rule', () => { it('eval_rule returning true', () => { - const ref = "/test/test_rule/some/path"; + const ref = "/apps/test/test_rule/some/path"; const value = "value"; const address = "abcd"; const request = { ref, value, address, protoVer: CURRENT_PROTOCOL_VERSION }; @@ -364,7 +382,7 @@ describe('Blockchain Node', () => { }) it('eval_rule returning false', () => { - const ref = "/test/test_rule/some/path"; + const ref = "/apps/test/test_rule/some/path"; const value = "value"; const address = "efgh"; const request = { ref, value, address, protoVer: CURRENT_PROTOCOL_VERSION }; @@ -376,7 +394,7 @@ describe('Blockchain Node', () => { describe('/eval_owner', () => { it('eval_owner', () => { - const ref = "/test/test_owner/some/path"; + const ref = "/apps/test/test_owner/some/path"; const address = "abcd"; const permission = "write_owner"; const request = { ref, permission, address, protoVer: CURRENT_PROTOCOL_VERSION }; @@ -395,29 +413,29 @@ describe('Blockchain Node', () => { op_list: [ { type: "GET_VALUE", - ref: "/test/test_value/some/path", + ref: "/apps/test/test_value/some/path", }, { type: 'GET_FUNCTION', - ref: "/test/test_function/some/path", + ref: "/apps/test/test_function/some/path", }, { type: 'GET_RULE', - ref: "/test/test_rule/some/path", + ref: "/apps/test/test_rule/some/path", }, { type: 'GET_OWNER', - ref: "/test/test_owner/some/path", + ref: "/apps/test/test_owner/some/path", }, { type: 'EVAL_RULE', - ref: "/test/test_rule/some/path", + ref: "/apps/test/test_rule/some/path", value: "value", address: "abcd" }, { type: 'EVAL_OWNER', - ref: "/test/test_owner/some/path", + ref: "/apps/test/test_owner/some/path", permission: "write_owner", address: "abcd" } @@ -490,7 +508,7 @@ describe('Blockchain Node', () => { describe('/get_state_info', () => { it('get_state_info', () => { const infoBody = parseOrLog(syncRequest( - 'GET', server1 + `/get_state_info?ref=/values/test/test_state_info/some/path`) + 'GET', server1 + `/get_state_info?ref=/values/apps/test/test_state_info/some/path`) .body.toString('utf-8')); assert.deepEqual(infoBody, { code: 0, result: { tree_height: 2, tree_size: 5 }}); }); @@ -503,7 +521,7 @@ describe('Blockchain Node', () => { return jsonRpcClient.request('ain_get', { protoVer: CURRENT_PROTOCOL_VERSION, type: 'GET_VALUE', - ref: "/test/test_value/some/path" + ref: "/apps/test/test_value/some/path" }) .then(res => { expect(res.result.result).to.equal(expected); @@ -513,14 +531,14 @@ describe('Blockchain Node', () => { describe('ain_matchFunction', () => { it('returns correct value', () => { - const ref = "/test/test_function/some/path"; + const ref = "/apps/test/test_function/some/path"; const request = { ref, protoVer: CURRENT_PROTOCOL_VERSION }; return jayson.client.http(server1 + '/json-rpc').request('ain_matchFunction', request) .then(res => { assert.deepEqual(res.result.result, { "matched_path": { - "target_path": "/test/test_function/some/path", - "ref_path": "/test/test_function/some/path", + "target_path": "/apps/test/test_function/some/path", + "ref_path": "/apps/test/test_function/some/path", "path_vars": {}, }, "matched_config": { @@ -532,7 +550,7 @@ describe('Blockchain Node', () => { "service_name": "https://ainetwork.ai" } }, - "path": "/test/test_function/some/path" + "path": "/apps/test/test_function/some/path" }, "subtree_configs": [] }); @@ -542,19 +560,19 @@ describe('Blockchain Node', () => { describe('ain_matchRule', () => { it('returns correct value', () => { - const ref = "/test/test_rule/some/path"; + const ref = "/apps/test/test_rule/some/path"; const request = { ref, protoVer: CURRENT_PROTOCOL_VERSION }; return jayson.client.http(server1 + '/json-rpc').request('ain_matchRule', request) .then(res => { assert.deepEqual(res.result.result, { "matched_path": { - "target_path": "/test/test_rule/some/path", - "ref_path": "/test/test_rule/some/path", + "target_path": "/apps/test/test_rule/some/path", + "ref_path": "/apps/test/test_rule/some/path", "path_vars": {}, }, "matched_config": { "config": "auth.addr === 'abcd'", - "path": "/test/test_rule/some/path" + "path": "/apps/test/test_rule/some/path" }, "subtree_configs": [] }); @@ -564,13 +582,13 @@ describe('Blockchain Node', () => { describe('ain_matchOwner', () => { it('returns correct value', () => { - const ref = "/test/test_owner/some/path"; + const ref = "/apps/test/test_owner/some/path"; const request = { ref, protoVer: CURRENT_PROTOCOL_VERSION }; return jayson.client.http(server1 + '/json-rpc').request('ain_matchOwner', request) .then(res => { assert.deepEqual(res.result.result, { "matched_path": { - "target_path": "/test/test_owner/some/path" + "target_path": "/apps/test/test_owner/some/path" }, "matched_config": { "config": { @@ -583,7 +601,7 @@ describe('Blockchain Node', () => { } } }, - "path": "/test/test_owner/some/path" + "path": "/apps/test/test_owner/some/path" } }); }) @@ -592,7 +610,7 @@ describe('Blockchain Node', () => { describe('ain_evalRule', () => { it('returns true', () => { - const ref = "/test/test_rule/some/path"; + const ref = "/apps/test/test_rule/some/path"; const value = "value"; const address = "abcd"; const request = { ref, value, address, protoVer: CURRENT_PROTOCOL_VERSION }; @@ -603,7 +621,7 @@ describe('Blockchain Node', () => { }) it('returns false', () => { - const ref = "/test/test_rule/some/path"; + const ref = "/apps/test/test_rule/some/path"; const value = "value"; const address = "efgh"; const request = { ref, value, address, protoVer: CURRENT_PROTOCOL_VERSION }; @@ -616,7 +634,7 @@ describe('Blockchain Node', () => { describe('ain_evalOwner', () => { it('returns correct value', () => { - const ref = "/test/test_owner/some/path"; + const ref = "/apps/test/test_owner/some/path"; const address = "abcd"; const permission = "write_owner"; const request = { ref, permission, address, protoVer: CURRENT_PROTOCOL_VERSION }; @@ -659,7 +677,7 @@ describe('Blockchain Node', () => { describe('ain_getStateInfo', () => { it('returns correct value', () => { - const ref = '/values/test/test_state_info/some/path'; + const ref = '/values/apps/test/test_state_info/some/path'; const request = { ref, protoVer: CURRENT_PROTOCOL_VERSION }; return jayson.client.http(server1 + '/json-rpc').request('ain_getStateInfo', request) .then(res => { @@ -730,12 +748,12 @@ describe('Blockchain Node', () => { it('set_value', async () => { // Check the original value. const resultBefore = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=test/test_value/some/path') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some/path') .body.toString('utf-8')).result; assert.deepEqual(resultBefore, 100); const request = { - ref: 'test/test_value/some/path', + ref: '/apps/test/test_value/some/path', value: "some value" }; const body = parseOrLog(syncRequest( @@ -749,14 +767,14 @@ describe('Blockchain Node', () => { console.error(`Failed to check finalization of tx.`); } const resultAfter = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=test/test_value/some/path') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some/path') .body.toString('utf-8')).result; assert.deepEqual(resultAfter, "some value"); }) it('set_value with timestamp', async () => { const request = { - ref: 'test/test_value/some/path', + ref: '/apps/test/test_value/some/path', value: "some value with timestamp", timestamp: Date.now(), }; @@ -771,14 +789,14 @@ describe('Blockchain Node', () => { console.error(`Failed to check finalization of tx.`); } const resultAfter = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=test/test_value/some/path') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some/path') .body.toString('utf-8')).result; assert.deepEqual(resultAfter, "some value with timestamp"); }) it('set_value with nonce unordered (-1)', async () => { const request = { - ref: 'test/test_value/some/path', + ref: '/apps/test/test_value/some/path', value: "some value with nonce unordered", nonce: -1, }; @@ -793,7 +811,7 @@ describe('Blockchain Node', () => { console.error(`Failed to check finalization of tx.`); } const resultAfter = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=test/test_value/some/path') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some/path') .body.toString('utf-8')).result; assert.deepEqual(resultAfter, "some value with nonce unordered"); }) @@ -802,7 +820,7 @@ describe('Blockchain Node', () => { const nonce = parseOrLog( syncRequest('GET', server1 + '/get_nonce').body.toString('utf-8')).result; const request = { - ref: 'test/test_value/some/path', + ref: '/apps/test/test_value/some/path', value: "some value with numbered nonce", nonce, }; @@ -817,7 +835,7 @@ describe('Blockchain Node', () => { console.error(`Failed to check finalization of tx.`); } const resultAfter = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=test/test_value/some/path') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some/path') .body.toString('utf-8')).result; assert.deepEqual(resultAfter, "some value with numbered nonce"); }) @@ -825,16 +843,16 @@ describe('Blockchain Node', () => { it('set_value with failing operation', () => { // Check the original value. const resultBefore = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=some/wrong/path') + 'GET', server1 + '/get_value?ref=/apps/some/wrong/path') .body.toString('utf-8')).result; assert.deepEqual(resultBefore, null); - const request = {ref: 'some/wrong/path', value: "some other value"}; + const request = {ref: '/apps/some/wrong/path', value: "some other value"}; const body = parseOrLog(syncRequest('POST', server1 + '/set_value', {json: request}) .body.toString('utf-8')); assert.deepEqual(_.get(body, 'result.result'), { "code": 103, - "error_message": "No .write permission on: some/wrong/path", + "error_message": "No .write permission on: /apps/some/wrong/path", "gas_amount": 0, "gas_amount_total": { "app": {}, @@ -846,7 +864,7 @@ describe('Blockchain Node', () => { // Confirm that the original value is not altered. const resultAfter = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=some/wrong/path') + 'GET', server1 + '/get_value?ref=/apps/some/wrong/path') .body.toString('utf-8')).result; assert.deepEqual(resultAfter, null); }) @@ -856,11 +874,11 @@ describe('Blockchain Node', () => { it('inc_value', async () => { // Check the original value. const resultBefore = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=some/wrong/path2') + 'GET', server1 + '/get_value?ref=/apps/some/wrong/path2') .body.toString('utf-8')).result; assert.deepEqual(resultBefore, null); - const request = {ref: "test/test_value/some/path2", value: 10}; + const request = {ref: "/apps/test/test_value/some/path2", value: 10}; const body = parseOrLog(syncRequest('POST', server1 + '/inc_value', {json: request}) .body.toString('utf-8')); assert.deepEqual(_.get(body, 'result.result.code'), 0); @@ -872,7 +890,7 @@ describe('Blockchain Node', () => { console.error(`Failed to check finalization of tx.`); } const resultAfter = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=test/test_value/some/path2') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some/path2') .body.toString('utf-8')).result; assert.deepEqual(resultAfter, 10); }) @@ -880,16 +898,16 @@ describe('Blockchain Node', () => { it('inc_value with a failing operation', () => { // Check the original value. const resultBefore = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=some/wrong/path2') + 'GET', server1 + '/get_value?ref=/apps/some/wrong/path2') .body.toString('utf-8')).result; assert.deepEqual(resultBefore, null); - const request = {ref: "some/wrong/path2", value: 10}; + const request = {ref: "/apps/some/wrong/path2", value: 10}; const body = parseOrLog(syncRequest('POST', server1 + '/inc_value', {json: request}) .body.toString('utf-8')); assert.deepEqual(_.get(body, 'result.result'), { "code": 103, - "error_message": "No .write permission on: some/wrong/path2", + "error_message": "No .write permission on: /apps/some/wrong/path2", "gas_amount": 0, "gas_amount_total": { "app": {}, @@ -901,7 +919,7 @@ describe('Blockchain Node', () => { // Confirm that the original value is not altered. const resultAfter = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=some/wrong/path2') + 'GET', server1 + '/get_value?ref=/apps/some/wrong/path2') .body.toString('utf-8')).result; assert.deepEqual(resultAfter, null); }) @@ -911,11 +929,11 @@ describe('Blockchain Node', () => { it('dec_value', async () => { // Check the original value. const resultBefore = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=some/wrong/path3') + 'GET', server1 + '/get_value?ref=/apps/some/wrong/path3') .body.toString('utf-8')).result; assert.deepEqual(resultBefore, null); - const request = {ref: "test/test_value/some/path3", value: 10}; + const request = {ref: "/apps/test/test_value/some/path3", value: 10}; const body = parseOrLog(syncRequest('POST', server1 + '/dec_value', {json: request}) .body.toString('utf-8')); assert.deepEqual(_.get(body, 'result.result.code'), 0); @@ -927,7 +945,7 @@ describe('Blockchain Node', () => { console.error(`Failed to check finalization of tx.`); } const resultAfter = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=test/test_value/some/path3') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some/path3') .body.toString('utf-8')).result; assert.deepEqual(resultAfter, -10); }) @@ -935,16 +953,16 @@ describe('Blockchain Node', () => { it('dec_value with a failing operation', () => { // Check the original value. const resultBefore = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=some/wrong/path3') + 'GET', server1 + '/get_value?ref=/apps/some/wrong/path3') .body.toString('utf-8')).result; assert.deepEqual(resultBefore, null); - const request = {ref: "some/wrong/path3", value: 10}; + const request = {ref: "/apps/some/wrong/path3", value: 10}; const body = parseOrLog(syncRequest('POST', server1 + '/dec_value', {json: request}) .body.toString('utf-8')); assert.deepEqual(_.get(body, 'result.result'), { "code": 103, - "error_message": "No .write permission on: some/wrong/path3", + "error_message": "No .write permission on: /apps/some/wrong/path3", "gas_amount": 0, "gas_amount_total": { "app": {}, @@ -956,7 +974,7 @@ describe('Blockchain Node', () => { // Confirm that the original value is not altered. const resultAfter = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=some/wrong/path3') + 'GET', server1 + '/get_value?ref=/apps/some/wrong/path3') .body.toString('utf-8')).result; assert.deepEqual(resultAfter, null); }) @@ -966,7 +984,7 @@ describe('Blockchain Node', () => { it('set_function', async () => { // Check the original function. const resultBefore = parseOrLog(syncRequest( - 'GET', server1 + '/get_function?ref=test/test_function/some/path') + 'GET', server1 + '/get_function?ref=/apps/test/test_function/some/path') .body.toString('utf-8')).result; assert.deepEqual(resultBefore, { ".function": { @@ -980,7 +998,7 @@ describe('Blockchain Node', () => { }); const request = { - ref: "/test/test_function/some/path", + ref: "/apps/test/test_function/some/path", value: { ".function": { "fid": { @@ -1004,7 +1022,7 @@ describe('Blockchain Node', () => { console.error(`Failed to check finalization of tx.`); } const resultAfter = parseOrLog(syncRequest( - 'GET', server1 + '/get_function?ref=test/test_function/some/path') + 'GET', server1 + '/get_function?ref=/apps/test/test_function/some/path') .body.toString('utf-8')).result; assert.deepEqual(resultAfter, { ".function": { @@ -1021,12 +1039,12 @@ describe('Blockchain Node', () => { it('set_function with a failing operation', () => { // Check the original function. const resultBefore = parseOrLog(syncRequest( - 'GET', server1 + '/get_function?ref=some/wrong/path') + 'GET', server1 + '/get_function?ref=/apps/some/wrong/path') .body.toString('utf-8')).result; assert.deepEqual(resultBefore, null); const request = { - ref: "/some/wrong/path", + ref: "/apps/some/wrong/path", value: { ".function": { "fid": { @@ -1043,7 +1061,7 @@ describe('Blockchain Node', () => { .body.toString('utf-8')); assert.deepEqual(_.get(body, 'result.result'), { "code": 404, - "error_message": "No write_function permission on: /some/wrong/path", + "error_message": "No write_function permission on: /apps/some/wrong/path", "gas_amount": 0, "gas_amount_total": { "app": {}, @@ -1055,7 +1073,7 @@ describe('Blockchain Node', () => { // Confirm that the original function is not altered. const resultAfter = parseOrLog(syncRequest( - 'GET', server1 + '/get_function?ref=some/wrong/path') + 'GET', server1 + '/get_function?ref=/apps/some/wrong/path') .body.toString('utf-8')).result; assert.deepEqual(resultAfter, null); }) @@ -1065,14 +1083,14 @@ describe('Blockchain Node', () => { it('set_rule', async () => { // Check the original rule. const resultBefore = parseOrLog(syncRequest( - 'GET', server1 + '/get_rule?ref=test/test_rule/some/path') + 'GET', server1 + '/get_rule?ref=/apps/test/test_rule/some/path') .body.toString('utf-8')).result; assert.deepEqual(resultBefore, { ".write": "auth.addr === 'abcd'" }); const request = { - ref: "/test/test_rule/some/path", + ref: "/apps/test/test_rule/some/path", value: { ".write": "some other rule config" } @@ -1088,7 +1106,7 @@ describe('Blockchain Node', () => { console.error(`Failed to check finalization of tx.`); } const resultAfter = parseOrLog(syncRequest( - 'GET', server1 + '/get_rule?ref=test/test_rule/some/path') + 'GET', server1 + '/get_rule?ref=/apps/test/test_rule/some/path') .body.toString('utf-8')).result; assert.deepEqual(resultAfter, { ".write": "some other rule config" @@ -1098,12 +1116,12 @@ describe('Blockchain Node', () => { it('set_rule with a failing operation', () => { // Check the original rule. const resultBefore = parseOrLog(syncRequest( - 'GET', server1 + '/get_rule?ref=some/wrong/path') + 'GET', server1 + '/get_rule?ref=/apps/some/wrong/path') .body.toString('utf-8')).result; assert.deepEqual(resultBefore, null); const request = { - ref: "/some/wrong/path", + ref: "/apps/some/wrong/path", value: { ".write": "some other rule config" } @@ -1112,7 +1130,7 @@ describe('Blockchain Node', () => { .body.toString('utf-8')); assert.deepEqual(_.get(body, 'result.result'), { "code": 503, - "error_message": "No write_rule permission on: /some/wrong/path", + "error_message": "No write_rule permission on: /apps/some/wrong/path", "gas_amount": 0, "gas_amount_total": { "app": {}, @@ -1124,7 +1142,7 @@ describe('Blockchain Node', () => { // Confirm that the original rule is not altered. const resultAfter = parseOrLog(syncRequest( - 'GET', server1 + '/get_rule?ref=some/wrong/path') + 'GET', server1 + '/get_rule?ref=/apps/some/wrong/path') .body.toString('utf-8')).result; assert.deepEqual(resultAfter, null); }) @@ -1134,7 +1152,7 @@ describe('Blockchain Node', () => { it('set_owner', async () => { // Check the original owner. const resultBefore = parseOrLog(syncRequest( - 'GET', server1 + '/get_owner?ref=test/test_owner/some/path') + 'GET', server1 + '/get_owner?ref=/apps/test/test_owner/some/path') .body.toString('utf-8')).result; assert.deepEqual(resultBefore, { ".owner": { @@ -1150,7 +1168,7 @@ describe('Blockchain Node', () => { }); const request = { - ref: "/test/test_owner/some/path", + ref: "/apps/test/test_owner/some/path", value: { ".owner": { "owners": { @@ -1175,7 +1193,7 @@ describe('Blockchain Node', () => { console.error(`Failed to check finalization of tx.`); } const resultAfter = parseOrLog(syncRequest( - 'GET', server1 + '/get_owner?ref=test/test_owner/some/path') + 'GET', server1 + '/get_owner?ref=/apps/test/test_owner/some/path') .body.toString('utf-8')).result; assert.deepEqual(resultAfter, { ".owner": { @@ -1194,12 +1212,12 @@ describe('Blockchain Node', () => { it('set_owner with a failing operation', () => { // Check the original owner. const resultBefore = parseOrLog(syncRequest( - 'GET', server1 + '/get_owner?ref=some/wrong/path') + 'GET', server1 + '/get_owner?ref=/apps/some/wrong/path') .body.toString('utf-8')).result; assert.deepEqual(resultBefore, null); const request = { - ref: "/some/wrong/path", + ref: "/apps/some/wrong/path", value: { ".owner": { "owners": { @@ -1217,7 +1235,7 @@ describe('Blockchain Node', () => { .body.toString('utf-8')); assert.deepEqual(_.get(body, 'result.result'), { "code": 603, - "error_message": "No write_owner or branch_owner permission on: /some/wrong/path", + "error_message": "No write_owner or branch_owner permission on: /apps/some/wrong/path", "gas_amount": 0, "gas_amount_total": { "app": {}, @@ -1229,7 +1247,7 @@ describe('Blockchain Node', () => { // Confirm that the original owner is not altered. const resultAfter = parseOrLog(syncRequest( - 'GET', server1 + '/get_owner?ref=some/wrong/path') + 'GET', server1 + '/get_owner?ref=/apps/some/wrong/path') .body.toString('utf-8')).result; assert.deepEqual(resultAfter, null); }) @@ -1239,7 +1257,7 @@ describe('Blockchain Node', () => { it('set with successful operations', async () => { // Check the original value. const resultBefore = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=test/test_value/some100/path') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some100/path') .body.toString('utf-8')).result; assert.deepEqual(resultBefore, null); @@ -1247,22 +1265,22 @@ describe('Blockchain Node', () => { op_list: [ { // Default type: SET_VALUE - ref: "test/test_value/some100/path", + ref: "/apps/test/test_value/some100/path", value: "some other100 value", }, { type: 'INC_VALUE', - ref: "test/test_value/some100/path1", + ref: "/apps/test/test_value/some100/path1", value: 10 }, { type: 'DEC_VALUE', - ref: "test/test_value/some100/path2", + ref: "/apps/test/test_value/some100/path2", value: 10 }, { type: 'SET_FUNCTION', - ref: "/test/test_function/other100/path", + ref: "/apps/test/test_function/other100/path", value: { ".function": { "fid": { @@ -1276,14 +1294,14 @@ describe('Blockchain Node', () => { }, { type: 'SET_RULE', - ref: "/test/test_rule/other100/path", + ref: "/apps/test/test_rule/other100/path", value: { ".write": "some other100 rule config" } }, { type: 'SET_OWNER', - ref: "/test/test_owner/other100/path", + ref: "/apps/test/test_owner/other100/path", value: { ".owner": { "owners": { @@ -1329,8 +1347,10 @@ describe('Blockchain Node', () => { }, ], "gas_amount_total": { - "service": 6, - "app": {} + "service": 0, + "app": { + "test": 6 + } }, "gas_cost_total": 0 }); @@ -1342,7 +1362,7 @@ describe('Blockchain Node', () => { console.error(`Failed to check finalization of tx.`); } const resultAfter = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=test/test_value/some100/path') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some100/path') .body.toString('utf-8')).result; assert.deepEqual(resultAfter, 'some other100 value'); }) @@ -1350,7 +1370,7 @@ describe('Blockchain Node', () => { it('set with a failing operation', () => { // Check the original value. const resultBefore = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=test/test_value/some101/path') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some101/path') .body.toString('utf-8')).result; assert.deepEqual(resultBefore, null); @@ -1358,27 +1378,27 @@ describe('Blockchain Node', () => { op_list: [ { // Default type: SET_VALUE - ref: "test/test_value/some101/path", + ref: "/apps/test/test_value/some101/path", value: "some other101 value", }, { type: 'INC_VALUE', - ref: "test/test_value/some101/path2", + ref: "/apps/test/test_value/some101/path2", value: 10 }, { type: 'DEC_VALUE', - ref: "test/test_value/some101/path3", + ref: "/apps/test/test_value/some101/path3", value: 10 }, { type: 'SET_VALUE', - ref: "some/wrong/path", + ref: "/apps/some/wrong/path", value: "some other101 value", }, { type: 'SET_FUNCTION', - ref: "/test/test_function/other101/path", + ref: "/apps/test/test_function/other101/path", value: { ".function": { "fid": { @@ -1392,14 +1412,14 @@ describe('Blockchain Node', () => { }, { type: 'SET_RULE', - ref: "/test/test_rule/other101/path", + ref: "/apps/test/test_rule/other101/path", value: { ".write": "some other101 rule config" } }, { type: 'SET_OWNER', - ref: "/test/test_owner/other101/path", + ref: "/apps/test/test_owner/other101/path", value: { ".owner": "some other101 owner config" } @@ -1424,13 +1444,15 @@ describe('Blockchain Node', () => { }, { "code": 103, - "error_message": "No .write permission on: some/wrong/path", + "error_message": "No .write permission on: /apps/some/wrong/path", "gas_amount": 0 } ], "gas_amount_total": { - "app": {}, - "service": 3 + "app": { + "test": 3 + }, + "service": 0 }, "gas_cost_total": 0 }); @@ -1438,7 +1460,7 @@ describe('Blockchain Node', () => { // Confirm that the original value is not altered. const resultAfter = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=test/test_value/some101/path') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some101/path') .body.toString('utf-8')).result; assert.deepEqual(resultAfter, null); }) @@ -1450,11 +1472,11 @@ describe('Blockchain Node', () => { 'GET', server1 + '/get_address').body.toString('utf-8')).result; // Check the original value. const resultBefore = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=test/test_value/some200/path') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some200/path') .body.toString('utf-8')).result; assert.deepEqual(resultBefore, null); const resultBefore2 = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=test/test_value/some201/path') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some201/path') .body.toString('utf-8')).result; assert.deepEqual(resultBefore2, null); @@ -1465,7 +1487,7 @@ describe('Blockchain Node', () => { { operation: { // Default type: SET_VALUE - ref: "test/test_value/some200/path", + ref: "/apps/test/test_value/some200/path", value: "some other200 value", }, timestamp: Date.now(), @@ -1474,7 +1496,7 @@ describe('Blockchain Node', () => { { operation: { type: 'INC_VALUE', - ref: "test/test_value/some200/path2", + ref: "/apps/test/test_value/some200/path2", value: 10 }, timestamp: Date.now(), @@ -1483,7 +1505,7 @@ describe('Blockchain Node', () => { { operation: { type: 'DEC_VALUE', - ref: "test/test_value/some200/path3", + ref: "/apps/test/test_value/some200/path3", value: 10 }, timestamp: Date.now(), @@ -1492,7 +1514,7 @@ describe('Blockchain Node', () => { { operation: { type: 'SET_FUNCTION', - ref: "/test/test_function/other200/path", + ref: "/apps/test/test_function/other200/path", value: { ".function": { "fid": { @@ -1510,7 +1532,7 @@ describe('Blockchain Node', () => { { operation: { type: 'SET_RULE', - ref: "/test/test_rule/other200/path", + ref: "/apps/test/test_rule/other200/path", value: { ".write": "some other200 rule config" } @@ -1521,7 +1543,7 @@ describe('Blockchain Node', () => { { operation: { type: 'SET_OWNER', - ref: "/test/test_owner/other200/path", + ref: "/apps/test/test_owner/other200/path", value: { ".owner": { "owners": { @@ -1544,22 +1566,22 @@ describe('Blockchain Node', () => { op_list: [ { type: "SET_VALUE", - ref: "test/test_value/some201/path", + ref: "/apps/test/test_value/some201/path", value: "some other201 value", }, { type: 'INC_VALUE', - ref: "test/test_value/some201/path2", + ref: "/apps/test/test_value/some201/path2", value: 5 }, { type: 'DEC_VALUE', - ref: "test/test_value/some201/path3", + ref: "/apps/test/test_value/some201/path3", value: 5 }, { type: 'SET_FUNCTION', - ref: "/test/test_function/other201/path", + ref: "/apps/test/test_function/other201/path", value: { ".function": { "fid": { @@ -1573,14 +1595,14 @@ describe('Blockchain Node', () => { }, { type: 'SET_RULE', - ref: "/test/test_rule/other201/path", + ref: "/apps/test/test_rule/other201/path", value: { ".write": "some other201 rule config" } }, { type: 'SET_OWNER', - ref: "/test/test_owner/other201/path", + ref: "/apps/test/test_owner/other201/path", value: { ".owner": { "owners": { @@ -1615,8 +1637,10 @@ describe('Blockchain Node', () => { "code": 0, "gas_amount": 1, "gas_amount_total": { - "app": {}, - "service": 1 + "app": { + "test": 1 + }, + "service": 0 }, "gas_cost_total": 0, }, @@ -1627,8 +1651,10 @@ describe('Blockchain Node', () => { "code": 0, "gas_amount": 1, "gas_amount_total": { - "app": {}, - "service": 1 + "app": { + "test": 1 + }, + "service": 0 }, "gas_cost_total": 0 }, @@ -1639,8 +1665,10 @@ describe('Blockchain Node', () => { "code": 0, "gas_amount": 1, "gas_amount_total": { - "app": {}, - "service": 1 + "app": { + "test": 1 + }, + "service": 0 }, "gas_cost_total": 0 }, @@ -1651,8 +1679,10 @@ describe('Blockchain Node', () => { "code": 0, "gas_amount": 1, "gas_amount_total": { - "app": {}, - "service": 1 + "app": { + "test": 1 + }, + "service": 0 }, "gas_cost_total": 0 }, @@ -1663,8 +1693,10 @@ describe('Blockchain Node', () => { "code": 0, "gas_amount": 1, "gas_amount_total": { - "app": {}, - "service": 1 + "app": { + "test": 1 + }, + "service": 0 }, "gas_cost_total": 0 }, @@ -1675,8 +1707,10 @@ describe('Blockchain Node', () => { "code": 0, "gas_amount": 1, "gas_amount_total": { - "app": {}, - "service": 1 + "app": { + "test": 1 + }, + "service": 0 }, "gas_cost_total": 0 }, @@ -1711,8 +1745,10 @@ describe('Blockchain Node', () => { } ], "gas_amount_total": { - "app": {}, - "service": 6 + "app": { + "test": 6 + }, + "service": 0 }, "gas_cost_total": 0 }, @@ -1724,11 +1760,11 @@ describe('Blockchain Node', () => { // Confirm that the value is set properly. await CommonUtil.sleep(6); const resultAfter = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=test/test_value/some200/path') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some200/path') .body.toString('utf-8')).result; assert.deepEqual(resultAfter, 'some other200 value'); const resultAfter2 = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=test/test_value/some201/path') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some201/path') .body.toString('utf-8')).result; assert.deepEqual(resultAfter2, 'some other201 value'); }); @@ -1738,11 +1774,11 @@ describe('Blockchain Node', () => { 'GET', server1 + '/get_address').body.toString('utf-8')).result; // Check the original values. const resultBefore = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=test/test_value/some202/path') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some202/path') .body.toString('utf-8')).result; assert.deepEqual(resultBefore, null); const resultBefore2 = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=test/test_value/some203/path') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some203/path') .body.toString('utf-8')).result; assert.deepEqual(resultBefore2, null); const nonce = parseOrLog(syncRequest( @@ -1753,7 +1789,7 @@ describe('Blockchain Node', () => { { operation: { // Default type: SET_VALUE - ref: "test/test_value/some202/path", + ref: "/apps/test/test_value/some202/path", value: "some other202 value", }, timestamp: Date.now(), @@ -1762,7 +1798,7 @@ describe('Blockchain Node', () => { { operation: { type: 'INC_VALUE', - ref: "test/test_value/some202/path2", + ref: "/apps/test/test_value/some202/path2", value: 10 }, timestamp: Date.now(), @@ -1771,7 +1807,7 @@ describe('Blockchain Node', () => { { operation: { type: 'DEC_VALUE', - ref: "test/test_value/some202/path3", + ref: "/apps/test/test_value/some202/path3", value: 10 }, timestamp: Date.now(), @@ -1780,7 +1816,7 @@ describe('Blockchain Node', () => { { operation: { type: 'SET_VALUE', - ref: "some/wrong/path", + ref: "/apps/some/wrong/path", value: "some other202 value", }, timestamp: Date.now(), @@ -1789,7 +1825,7 @@ describe('Blockchain Node', () => { { operation: { type: 'SET_FUNCTION', - ref: "/test/test_function/other202/path", + ref: "/apps/test/test_function/other202/path", value: { ".function": { "fid": { @@ -1807,7 +1843,7 @@ describe('Blockchain Node', () => { { operation: { type: 'SET_RULE', - ref: "/test/test_rule/other202/path", + ref: "/apps/test/test_rule/other202/path", value: { ".write": "some other202 rule config" } @@ -1818,7 +1854,7 @@ describe('Blockchain Node', () => { { operation: { type: 'SET_OWNER', - ref: "/test/test_owner/other202/path", + ref: "/apps/test/test_owner/other202/path", value: { ".owner": { "owners": { @@ -1841,22 +1877,22 @@ describe('Blockchain Node', () => { op_list: [ { type: "SET_VALUE", - ref: "test/test_value/some203/path", + ref: "/apps/test/test_value/some203/path", value: "some other203 value", }, { type: 'INC_VALUE', - ref: "test/test_value/some203/path2", + ref: "/apps/test/test_value/some203/path2", value: 5 }, { type: 'DEC_VALUE', - ref: "test/test_value/some203/path3", + ref: "/apps/test/test_value/some203/path3", value: 5 }, { type: 'SET_FUNCTION', - ref: "/test/test_function/other203/path", + ref: "/apps/test/test_function/other203/path", value: { ".function": { "fid": { @@ -1870,14 +1906,14 @@ describe('Blockchain Node', () => { }, { type: 'SET_RULE', - ref: "/test/test_rule/other203/path", + ref: "/apps/test/test_rule/other203/path", value: { ".write": "some other203 rule config" } }, { type: 'SET_OWNER', - ref: "/test/test_owner/other203/path", + ref: "/apps/test/test_owner/other203/path", value: { ".owner": { "owners": { @@ -1912,8 +1948,10 @@ describe('Blockchain Node', () => { "code": 0, "gas_amount": 1, "gas_amount_total": { - "app": {}, - "service": 1 + "app": { + "test": 1 + }, + "service": 0 }, "gas_cost_total": 0 }, @@ -1924,8 +1962,10 @@ describe('Blockchain Node', () => { "code": 0, "gas_amount": 1, "gas_amount_total": { - "app": {}, - "service": 1 + "app": { + "test": 1 + }, + "service": 0 }, "gas_cost_total": 0 }, @@ -1936,8 +1976,10 @@ describe('Blockchain Node', () => { "code": 0, "gas_amount": 1, "gas_amount_total": { - "app": {}, - "service": 1 + "app": { + "test": 1 + }, + "service": 0 }, "gas_cost_total": 0 }, @@ -1946,7 +1988,7 @@ describe('Blockchain Node', () => { { "result": { "code": 103, - "error_message": "No .write permission on: some/wrong/path", + "error_message": "No .write permission on: /apps/some/wrong/path", "gas_amount": 0, "gas_amount_total": { "app": {}, @@ -1961,8 +2003,10 @@ describe('Blockchain Node', () => { "code": 0, "gas_amount": 1, "gas_amount_total": { - "app": {}, - "service": 1 + "app": { + "test": 1 + }, + "service": 0 }, "gas_cost_total": 0 }, @@ -1973,8 +2017,10 @@ describe('Blockchain Node', () => { "code": 0, "gas_amount": 1, "gas_amount_total": { - "app": {}, - "service": 1 + "app": { + "test": 1 + }, + "service": 0 }, "gas_cost_total": 0 }, @@ -1985,8 +2031,10 @@ describe('Blockchain Node', () => { "code": 0, "gas_amount": 1, "gas_amount_total": { - "app": {}, - "service": 1 + "app": { + "test": 1 + }, + "service": 0 }, "gas_cost_total": 0 }, @@ -2021,8 +2069,10 @@ describe('Blockchain Node', () => { } ], "gas_amount_total": { - "app": {}, - "service": 6 + "app": { + "test": 6 + }, + "service": 0 }, "gas_cost_total": 0, }, @@ -2034,11 +2084,11 @@ describe('Blockchain Node', () => { // Confirm that the value is set properly. await CommonUtil.sleep(6); const resultAfter = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=test/test_value/some202/path') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some202/path') .body.toString('utf-8')).result; assert.deepEqual(resultAfter, 'some other202 value'); const resultAfter2 = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=test/test_value/some203/path') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some203/path') .body.toString('utf-8')).result; assert.deepEqual(resultAfter2, 'some other203 value'); }) @@ -2051,13 +2101,52 @@ describe('Blockchain Node', () => { public_key: "eb8c8577e8be18a83829c5c8a2ec2a754ef0a190e5a01139e9a24aae8f56842dfaf708da56d0f395bbfef08633237398dec96343f62ce217130d9738a76adfdf" }; + before(async () => { + const currentRule = parseOrLog(syncRequest('GET', server1 + '/get_rule?ref=/apps/test') + .body.toString('utf-8')).result[".write"]; + const newOwners = parseOrLog(syncRequest('GET', server1 + '/get_owner?ref=/apps/test') + .body.toString('utf-8')).result[".owner"]; + const newRule = `${currentRule} || auth.addr === '${account.address}'`; + newOwners["owners"][account.address] = { + "branch_owner": true, + "write_owner": true, + "write_rule": true, + "write_function": true + }; + const res = parseOrLog(syncRequest('POST', server1 + '/set', {json: { + op_list: [ + { + type: 'SET_RULE', + ref: '/apps/test', + value: { + ".write": newRule + } + }, + { + type: 'SET_OWNER', + ref: '/apps/test', + value: { + ".owner": newOwners + } + } + ], + timestamp: Date.now(), + nonce: -1 + }}) + .body.toString('utf-8')).result; + assert.deepEqual(CommonUtil.isFailedTx(_.get(res, 'result')), false); + if (!(await waitUntilTxFinalized(serverList, _.get(res, 'tx_hash')))) { + console.error(`Failed to check finalization of tx.`); + } + }) + it('accepts a transaction with nonce unordered (-1)', () => { const client = jayson.client.http(server1 + '/json-rpc'); const txBody = { operation: { type: 'SET_VALUE', value: 'some other value', - ref: `test/test_value/some/path` + ref: `/apps/test/test_value/some/path` }, timestamp: Date.now(), nonce: -1 @@ -2078,8 +2167,10 @@ describe('Blockchain Node', () => { code: 0, gas_amount: 1, gas_amount_total: { - service: 1, - app: {} + service: 0, + app: { + test: 1 + } }, gas_cost_total: 0 }, @@ -2102,7 +2193,7 @@ describe('Blockchain Node', () => { operation: { type: 'SET_VALUE', value: 'some other value 2', - ref: `test/test_value/some/path` + ref: `/apps/test/test_value/some/path` }, timestamp: Date.now(), nonce, // numbered nonce @@ -2124,8 +2215,10 @@ describe('Blockchain Node', () => { code: 0, gas_amount: 1, gas_amount_total: { - service: 1, - app: {} + service: 0, + app: { + test: 1 + } }, gas_cost_total: 0, }, @@ -2143,7 +2236,7 @@ describe('Blockchain Node', () => { operation: { type: 'SET_VALUE', value: longText, - ref: `test/test_long_text` + ref: `/apps/test/test_long_text` }, timestamp: Date.now(), nonce: -1 @@ -2171,7 +2264,7 @@ describe('Blockchain Node', () => { operation: { type: 'SET_VALUE', value: 'some other value', - ref: `test/test_value/some/path` + ref: `/apps/test/test_value/some/path` }, timestamp: Date.now(), nonce: -1 @@ -2199,7 +2292,7 @@ describe('Blockchain Node', () => { operation: { type: 'SET_VALUE', value: 'some other value', - ref: `test/test_value/some/path` + ref: `/apps/test/test_value/some/path` }, timestamp: Date.now(), // missing nonce @@ -2238,7 +2331,7 @@ describe('Blockchain Node', () => { txBodyBefore = { operation: { // Default type: SET_VALUE - ref: "test/test_value/some400/path", + ref: "/apps/test/test_value/some400/path", value: "some other300 value", }, timestamp: Date.now(), @@ -2250,7 +2343,7 @@ describe('Blockchain Node', () => { txBodyAfter = { operation: { type: 'INC_VALUE', - ref: "test/test_value/some400/path2", + ref: "/apps/test/test_value/some400/path2", value: 10 }, timestamp: Date.now(), @@ -2266,7 +2359,7 @@ describe('Blockchain Node', () => { { operation: { // Default type: SET_VALUE - ref: "test/test_value/some300/path", + ref: "/apps/test/test_value/some300/path", value: "some other300 value", }, timestamp: Date.now(), @@ -2275,7 +2368,7 @@ describe('Blockchain Node', () => { { operation: { type: 'INC_VALUE', - ref: "test/test_value/some300/path2", + ref: "/apps/test/test_value/some300/path2", value: 10 }, timestamp: Date.now(), @@ -2284,7 +2377,7 @@ describe('Blockchain Node', () => { { operation: { type: 'DEC_VALUE', - ref: "test/test_value/some300/path3", + ref: "/apps/test/test_value/some300/path3", value: 10 }, timestamp: Date.now(), @@ -2293,7 +2386,7 @@ describe('Blockchain Node', () => { { operation: { type: 'SET_FUNCTION', - ref: "/test/test_function/other300/path", + ref: "/apps/test/test_function/other300/path", value: { ".function": { "fid": { @@ -2311,7 +2404,7 @@ describe('Blockchain Node', () => { { operation: { type: 'SET_RULE', - ref: "/test/test_rule/other300/path", + ref: "/apps/test/test_rule/other300/path", value: { ".write": "some other300 rule config" } @@ -2322,7 +2415,7 @@ describe('Blockchain Node', () => { { operation: { type: 'SET_OWNER', - ref: "/test/test_owner/other300/path", + ref: "/apps/test/test_owner/other300/path", value: { ".owner": { "owners": { @@ -2345,22 +2438,22 @@ describe('Blockchain Node', () => { op_list: [ { type: "SET_VALUE", - ref: "test/test_value/some301/path", + ref: "/apps/test/test_value/some301/path", value: "some other301 value", }, { type: 'INC_VALUE', - ref: "test/test_value/some301/path2", + ref: "/apps/test/test_value/some301/path2", value: 5 }, { type: 'DEC_VALUE', - ref: "test/test_value/some301/path3", + ref: "/apps/test/test_value/some301/path3", value: 5 }, { type: 'SET_FUNCTION', - ref: "/test/test_function/other301/path", + ref: "/apps/test/test_function/other301/path", value: { ".function": { "fid": { @@ -2374,14 +2467,14 @@ describe('Blockchain Node', () => { }, { type: 'SET_RULE', - ref: "/test/test_rule/other301/path", + ref: "/apps/test/test_rule/other301/path", value: { ".write": "some other301 rule config" } }, { type: 'SET_OWNER', - ref: "/test/test_owner/other301/path", + ref: "/apps/test/test_owner/other301/path", value: { ".owner": { "owners": { @@ -2428,7 +2521,7 @@ describe('Blockchain Node', () => { operation: { type: 'SET_VALUE', value: 'some other value', - ref: `test/test_value/some/path` + ref: `/apps/test/test_value/some/path` }, timestamp: Date.now(), nonce: -1 @@ -2459,7 +2552,7 @@ describe('Blockchain Node', () => { operation: { type: 'SET_VALUE', value: 'some other value', - ref: `test/test_value/some/path` + ref: `/apps/test/test_value/some/path` }, nonce: -1 }; @@ -2494,7 +2587,7 @@ describe('Blockchain Node', () => { operation: { type: 'SET_VALUE', value: 'some other value', - ref: `test/test_value/some/path` + ref: `/apps/test/test_value/some/path` }, nonce: -1 }; @@ -2533,7 +2626,7 @@ describe('Blockchain Node', () => { operation: { type: 'SET_VALUE', value: 'some other value', - ref: `test/test_value/some/path` + ref: `/apps/test/test_value/some/path` }, nonce: -1 }; @@ -2606,7 +2699,7 @@ describe('Blockchain Node', () => { const txBody = { operation: { type: 'SET_VALUE', - ref: `test/test_long_text`, + ref: `/apps/test/test_long_text`, value: longText }, timestamp: Date.now(), @@ -2649,7 +2742,7 @@ describe('Blockchain Node', () => { operation: { type: 'SET_VALUE', value: 'some other value', - ref: `test/test_value/some/path` + ref: `/apps/test/test_value/some/path` }, timestamp: Date.now(), nonce: -1 @@ -2687,7 +2780,7 @@ describe('Blockchain Node', () => { operation: { type: 'SET_VALUE', value: 'some other value', - ref: `test/test_value/some/path` + ref: `/apps/test/test_value/some/path` }, timestamp: Date.now(), // missing nonce @@ -2724,14 +2817,14 @@ describe('Blockchain Node', () => { }) describe('Function triggering', () => { - const setFunctionWithOwnerOnlyPath = '/test/test_function_triggering/owner_only'; - const saveLastTxAllowedPath = '/test/test_function_triggering/allowed_path_with_fid'; - const saveLastTxNotAllowedPath = '/test/test_function_triggering/not_allowed_path_with_fid'; - const saveLastTxAllowedPathWithFids = '/test/test_function_triggering/allowed_path_with_fids'; - const saveLastTxNotAllowedPathWithFids = '/test/test_function_triggering/not_allowed_path_with_fids'; - const setOwnerConfigAllowedPath = '/test/test_function_triggering/set_owner_allowed_path_with_fid'; - const setOwnerConfigNotAllowedPath = '/test/test_function_triggering/set_owner_not_allowed_path_with_fid'; - const triggerRestFunctionPath = '/test/test_function_triggering/rest_function_path'; + const setFunctionWithOwnerOnlyPath = '/apps/test/test_function_triggering/owner_only'; + const saveLastTxAllowedPath = '/apps/test/test_function_triggering/allowed_path_with_fid'; + const saveLastTxNotAllowedPath = '/apps/test/test_function_triggering/not_allowed_path_with_fid'; + const saveLastTxAllowedPathWithFids = '/apps/test/test_function_triggering/allowed_path_with_fids'; + const saveLastTxNotAllowedPathWithFids = '/apps/test/test_function_triggering/not_allowed_path_with_fids'; + const setOwnerConfigAllowedPath = '/apps/test/test_function_triggering/set_owner_allowed_path_with_fid'; + const setOwnerConfigNotAllowedPath = '/apps/test/test_function_triggering/set_owner_not_allowed_path_with_fid'; + const triggerRestFunctionPath = '/apps/test/test_function_triggering/rest_function_path'; let transferFrom; // = server1 let transferTo; // = server2 @@ -2790,7 +2883,7 @@ describe('Blockchain Node', () => { op_list: [ { type: 'SET_FUNCTION', - ref: '/test/test_function_triggering/allowed_path_with_fid/value', + ref: '/apps/test/test_function_triggering/allowed_path_with_fid/value', value: { ".function": { "_saveLastTx": { @@ -2802,21 +2895,21 @@ describe('Blockchain Node', () => { }, { type: 'SET_RULE', - ref: '/test/test_function_triggering/allowed_path_with_fid/value', + ref: '/apps/test/test_function_triggering/allowed_path_with_fid/value', value: { ".write": true, } }, { type: 'SET_RULE', - ref: '/test/test_function_triggering/allowed_path_with_fid/.last_tx/value', + ref: '/apps/test/test_function_triggering/allowed_path_with_fid/.last_tx/value', value: { ".write": "auth.fid === '_saveLastTx'", } }, { type: 'SET_FUNCTION', - ref: '/test/test_function_triggering/not_allowed_path_with_fid/value', + ref: '/apps/test/test_function_triggering/not_allowed_path_with_fid/value', value: { ".function": { "_saveLastTx": { @@ -2828,21 +2921,21 @@ describe('Blockchain Node', () => { }, { type: 'SET_RULE', - ref: '/test/test_function_triggering/not_allowed_path_with_fid/value', + ref: '/apps/test/test_function_triggering/not_allowed_path_with_fid/value', value: { ".write": true, } }, { type: 'SET_RULE', - ref: '/test/test_function_triggering/not_allowed_path_with_fid/.last_tx/value', + ref: '/apps/test/test_function_triggering/not_allowed_path_with_fid/.last_tx/value', value: { ".write": "auth.fid === 'some function id'", } }, { type: 'SET_FUNCTION', - ref: '/test/test_function_triggering/allowed_path_with_fids/value', + ref: '/apps/test/test_function_triggering/allowed_path_with_fids/value', value: { ".function": { "_saveLastTx": { @@ -2854,21 +2947,21 @@ describe('Blockchain Node', () => { }, { type: 'SET_RULE', - ref: '/test/test_function_triggering/allowed_path_with_fids/value', + ref: '/apps/test/test_function_triggering/allowed_path_with_fids/value', value: { ".write": true, } }, { type: 'SET_RULE', - ref: '/test/test_function_triggering/allowed_path_with_fids/.last_tx/value', + ref: '/apps/test/test_function_triggering/allowed_path_with_fids/.last_tx/value', value: { ".write": "util.includes(auth.fids, '_saveLastTx')", } }, { type: 'SET_FUNCTION', - ref: '/test/test_function_triggering/not_allowed_path_with_fids/value', + ref: '/apps/test/test_function_triggering/not_allowed_path_with_fids/value', value: { ".function": { "_saveLastTx": { @@ -2880,21 +2973,21 @@ describe('Blockchain Node', () => { }, { type: 'SET_RULE', - ref: '/test/test_function_triggering/not_allowed_path_with_fids/value', + ref: '/apps/test/test_function_triggering/not_allowed_path_with_fids/value', value: { ".write": true, } }, { type: 'SET_RULE', - ref: '/test/test_function_triggering/not_allowed_path_with_fids/.last_tx/value', + ref: '/apps/test/test_function_triggering/not_allowed_path_with_fids/.last_tx/value', value: { ".write": "util.includes(auth.fids, 'some function id')", } }, { type: 'SET_FUNCTION', - ref: '/test/test_function_triggering/set_owner_allowed_path_with_fid/value', + ref: '/apps/test/test_function_triggering/set_owner_allowed_path_with_fid/value', value: { ".function": { "_setOwnerConfig": { @@ -2906,7 +2999,7 @@ describe('Blockchain Node', () => { }, { type: 'SET_OWNER', - ref: '/test/test_function_triggering/set_owner_allowed_path_with_fid', + ref: '/apps/test/test_function_triggering/set_owner_allowed_path_with_fid', value: { ".owner": { "owners": { @@ -2928,7 +3021,7 @@ describe('Blockchain Node', () => { }, { type: 'SET_FUNCTION', - ref: '/test/test_function_triggering/set_owner_not_allowed_path_with_fid/value', + ref: '/apps/test/test_function_triggering/set_owner_not_allowed_path_with_fid/value', value: { ".function": { "_setOwnerConfig": { @@ -2940,7 +3033,7 @@ describe('Blockchain Node', () => { }, { type: 'SET_OWNER', - ref: '/test/test_function_triggering/set_owner_not_allowed_path_with_fid', + ref: '/apps/test/test_function_triggering/set_owner_not_allowed_path_with_fid', value: { ".owner": { "owners": { @@ -2962,7 +3055,7 @@ describe('Blockchain Node', () => { }, { type: 'SET_FUNCTION', - ref: '/test/test_function_triggering/rest_function_path', + ref: '/apps/test/test_function_triggering/rest_function_path', value: { ".function": { "0x11111": { @@ -2976,7 +3069,7 @@ describe('Blockchain Node', () => { }, { type: 'SET_RULE', - ref: '/test/test_function_triggering/rest_function_path', + ref: '/apps/test/test_function_triggering/rest_function_path', value: { ".write": true, } @@ -2997,72 +3090,72 @@ describe('Blockchain Node', () => { op_list: [ { type: 'SET_FUNCTION', - ref: '/test/test_function_triggering/allowed_path_with_fid/value', + ref: '/apps/test/test_function_triggering/allowed_path_with_fid/value', value: null }, { type: 'SET_RULE', - ref: '/test/test_function_triggering/allowed_path_with_fid/value', + ref: '/apps/test/test_function_triggering/allowed_path_with_fid/value', value: null }, { type: 'SET_FUNCTION', - ref: '/test/test_function_triggering/not_allowed_path_with_fid/value', + ref: '/apps/test/test_function_triggering/not_allowed_path_with_fid/value', value: null }, { type: 'SET_RULE', - ref: '/test/test_function_triggering/not_allowed_path_with_fid/value', + ref: '/apps/test/test_function_triggering/not_allowed_path_with_fid/value', value: null }, { type: 'SET_FUNCTION', - ref: '/test/test_function_triggering/allowed_path_with_fids/value', + ref: '/apps/test/test_function_triggering/allowed_path_with_fids/value', value: null }, { type: 'SET_RULE', - ref: '/test/test_function_triggering/allowed_path_with_fids/value', + ref: '/apps/test/test_function_triggering/allowed_path_with_fids/value', value: null }, { type: 'SET_FUNCTION', - ref: '/test/test_function_triggering/not_allowed_path_with_fids/value', + ref: '/apps/test/test_function_triggering/not_allowed_path_with_fids/value', value: null }, { type: 'SET_RULE', - ref: '/test/test_function_triggering/not_allowed_path_with_fids/value', + ref: '/apps/test/test_function_triggering/not_allowed_path_with_fids/value', value: null }, { type: 'SET_FUNCTION', - ref: '/test/test_function_triggering/set_owner_allowed_path_with_fid/value', + ref: '/apps/test/test_function_triggering/set_owner_allowed_path_with_fid/value', value: null }, { type: 'SET_OWNER', - ref: '/test/test_function_triggering/set_owner_allowed_path_with_fid', + ref: '/apps/test/test_function_triggering/set_owner_allowed_path_with_fid', value: null }, { type: 'SET_FUNCTION', - ref: '/test/test_function_triggering/set_owner_not_allowed_path_with_fid/value', + ref: '/apps/test/test_function_triggering/set_owner_not_allowed_path_with_fid/value', value: null }, { type: 'SET_OWNER', - ref: '/test/test_function_triggering/set_owner_not_allowed_path_with_fid', + ref: '/apps/test/test_function_triggering/set_owner_not_allowed_path_with_fid', value: null }, { type: 'SET_FUNCTION', - ref: '/test/test_function_triggering/rest_function_path', + ref: '/apps/test/test_function_triggering/rest_function_path', value: null, }, { type: 'SET_RULE', - ref: '/test/test_function_triggering/rest_function_path', + ref: '/apps/test/test_function_triggering/rest_function_path', value: null, }, ], @@ -3166,10 +3259,10 @@ describe('Blockchain Node', () => { "gas_amount": 0, "op_results": [ { - "path": "/test/test_function_triggering/not_allowed_path_with_fid/.last_tx/value", + "path": "/apps/test/test_function_triggering/not_allowed_path_with_fid/.last_tx/value", "result": { "code": 103, - "error_message": "No .write permission on: /test/test_function_triggering/not_allowed_path_with_fid/.last_tx/value", + "error_message": "No .write permission on: /apps/test/test_function_triggering/not_allowed_path_with_fid/.last_tx/value", "gas_amount": 0, } } @@ -3178,8 +3271,10 @@ describe('Blockchain Node', () => { }, "gas_amount": 1, "gas_amount_total": { - "app": {}, - "service": 1 + "app": { + "test": 1 + }, + "service": 0 }, "gas_cost_total": 0, }); @@ -3206,7 +3301,7 @@ describe('Blockchain Node', () => { "gas_amount": 0, "op_results": [ { - "path": "/test/test_function_triggering/allowed_path_with_fid/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path_with_fid/.last_tx/value", "result": { "code": 0, "gas_amount": 1, @@ -3217,8 +3312,10 @@ describe('Blockchain Node', () => { }, "gas_amount": 1, "gas_amount_total": { - "app": {}, - "service": 2 + "app": { + "test": 2 + }, + "service": 0 }, "gas_cost_total": 0, }); @@ -3250,10 +3347,10 @@ describe('Blockchain Node', () => { "gas_amount": 0, "op_results": [ { - "path": "/test/test_function_triggering/not_allowed_path_with_fids/.last_tx/value", + "path": "/apps/test/test_function_triggering/not_allowed_path_with_fids/.last_tx/value", "result": { "code": 103, - "error_message": "No .write permission on: /test/test_function_triggering/not_allowed_path_with_fids/.last_tx/value", + "error_message": "No .write permission on: /apps/test/test_function_triggering/not_allowed_path_with_fids/.last_tx/value", "gas_amount": 0, } } @@ -3262,8 +3359,10 @@ describe('Blockchain Node', () => { }, "gas_amount": 1, "gas_amount_total": { - "app": {}, - "service": 1 + "app": { + "test": 1 + }, + "service": 0 }, "gas_cost_total": 0, }); @@ -3290,7 +3389,7 @@ describe('Blockchain Node', () => { "gas_amount": 0, "op_results": [ { - "path": "/test/test_function_triggering/allowed_path_with_fids/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path_with_fids/.last_tx/value", "result": { "code": 0, "gas_amount": 1, @@ -3301,8 +3400,10 @@ describe('Blockchain Node', () => { }, "gas_amount": 1, "gas_amount_total": { - "app": {}, - "service": 2 + "app": { + "test": 2 + }, + "service": 0 }, "gas_cost_total": 0, }); @@ -3334,10 +3435,10 @@ describe('Blockchain Node', () => { "gas_amount": 0, "op_results": [ { - "path": "/test/test_function_triggering/set_owner_not_allowed_path_with_fid/value", + "path": "/apps/test/test_function_triggering/set_owner_not_allowed_path_with_fid/value", "result": { "code": 603, - "error_message": "No write_owner or branch_owner permission on: /test/test_function_triggering/set_owner_not_allowed_path_with_fid/value", + "error_message": "No write_owner or branch_owner permission on: /apps/test/test_function_triggering/set_owner_not_allowed_path_with_fid/value", "gas_amount": 0, } } @@ -3346,8 +3447,10 @@ describe('Blockchain Node', () => { }, "gas_amount": 1, "gas_amount_total": { - "app": {}, - "service": 1 + "app": { + "test": 1 + }, + "service": 0 }, "gas_cost_total": 0, }); @@ -3374,7 +3477,7 @@ describe('Blockchain Node', () => { "gas_amount": 0, "op_results": [ { - "path": "/test/test_function_triggering/set_owner_allowed_path_with_fid/value", + "path": "/apps/test/test_function_triggering/set_owner_allowed_path_with_fid/value", "result": { "code": 0, "gas_amount": 1, @@ -3385,8 +3488,10 @@ describe('Blockchain Node', () => { }, "gas_amount": 1, "gas_amount_total": { - "app": {}, - "service": 2 + "app": { + "test": 2 + }, + "service": 0 }, "gas_cost_total": 0, }); @@ -3406,8 +3511,8 @@ describe('Blockchain Node', () => { describe('Function execution', () => { describe('/set_value', () => { it("when successful with function triggering", async () => { - const valuePath = '/test/test_function_triggering/allowed_path1/value'; - const functionResultPath = '/test/test_function_triggering/allowed_path1/.last_tx/value'; + const valuePath = '/apps/test/test_function_triggering/allowed_path1/value'; + const functionResultPath = '/apps/test/test_function_triggering/allowed_path1/.last_tx/value'; const value = 'some value'; const timestamp = 1234567890000; @@ -3477,8 +3582,8 @@ describe('Blockchain Node', () => { }); it("when failed with function triggering", async () => { - const valuePath = '/test/test_function_triggering/allowed_path2/value'; - const functionResultPath = '/test/test_function_triggering/allowed_path2/.last_tx/value'; + const valuePath = '/apps/test/test_function_triggering/allowed_path2/value'; + const functionResultPath = '/apps/test/test_function_triggering/allowed_path2/.last_tx/value'; const value = 'some value'; const timestamp = 1234567890000 + 1; let valueBefore = null; @@ -3553,8 +3658,8 @@ describe('Blockchain Node', () => { describe('/set', () => { it("when successful with function triggering", async () => { - const valuePath = '/test/test_function_triggering/allowed_path101/value'; - const functionResultPath = '/test/test_function_triggering/allowed_path101/.last_tx/value'; + const valuePath = '/apps/test/test_function_triggering/allowed_path101/value'; + const functionResultPath = '/apps/test/test_function_triggering/allowed_path101/.last_tx/value'; const value = 'some value'; const timestamp = 1234567890000; @@ -3615,7 +3720,7 @@ describe('Blockchain Node', () => { }, { // Default type: SET_VALUE - ref: "test/nested/far/down101", + ref: "/apps/test/nested/far/down101", value: { "new": 12345 }, @@ -3635,8 +3740,8 @@ describe('Blockchain Node', () => { }); it("when failed with function triggering", async () => { - const valuePath = '/test/test_function_triggering/allowed_path102/value'; - const functionResultPath = '/test/test_function_triggering/allowed_path102/.last_tx/value'; + const valuePath = '/apps/test/test_function_triggering/allowed_path102/value'; + const functionResultPath = '/apps/test/test_function_triggering/allowed_path102/.last_tx/value'; const value = 'some value'; const timestamp = 1234567890000 + 1; let valueBefore = null; @@ -3701,7 +3806,7 @@ describe('Blockchain Node', () => { }, { // Default type: SET_VALUE - ref: "test/nested/far/down102", + ref: "/apps/test/nested/far/down102", value: { "new": 12345 }, @@ -3837,25 +3942,26 @@ describe('Blockchain Node', () => { describe('Gas fee', () => { before(async () => { - const appStakingPath = - `/staking/test_service_gas_fee/${serviceAdmin}/0/stake/${Date.now()}/value`; - const appStakingRes = parseOrLog(syncRequest('POST', server1 + '/set_value', {json: { - ref: appStakingPath, - value: 1 - }}).body.toString('utf-8')).result; - if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { - console.error(`Failed to check finalization of tx.`); - } - const manageAppPath = '/manage_app/test_service_gas_fee/create/1'; - const createAppRes = parseOrLog(syncRequest('POST', server2 + '/set_value', {json: { - ref: manageAppPath, - value: { - admin: { [serviceAdmin]: true }, - }, - }}).body.toString('utf-8')).result; - if (!(await waitUntilTxFinalized(serverList, createAppRes.tx_hash))) { - console.error(`Failed to check finalization of tx.`); - } + await setUpApp('test_service_gas_fee', serverList, { admin: { [serviceAdmin]: true } }); + // const appStakingPath = + // `/staking/test_service_gas_fee/${serviceAdmin}/0/stake/${Date.now()}/value`; + // const appStakingRes = parseOrLog(syncRequest('POST', server1 + '/set_value', {json: { + // ref: appStakingPath, + // value: 1 + // }}).body.toString('utf-8')).result; + // if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { + // console.error(`Failed to check finalization of tx.`); + // } + // const manageAppPath = '/manage_app/test_service_gas_fee/create/1'; + // const createAppRes = parseOrLog(syncRequest('POST', server2 + '/set_value', {json: { + // ref: manageAppPath, + // value: { + // admin: { [serviceAdmin]: true }, + // }, + // }}).body.toString('utf-8')).result; + // if (!(await waitUntilTxFinalized(serverList, createAppRes.tx_hash))) { + // console.error(`Failed to check finalization of tx.`); + // } }); it("native function (_transfer) with individual account registration", () => { @@ -4137,8 +4243,10 @@ describe('Blockchain Node', () => { "code": 0, "gas_amount": 1, "gas_amount_total": { - "app": {}, - "service": 11 + "app": { + "test": 11 + }, + "service": 0 }, "gas_cost_total": 0, }); @@ -4370,28 +4478,34 @@ describe('Blockchain Node', () => { describe('Staking: _stake, _unstake', () => { before(async () => { - const appStakingPath = `/staking/test_service_staking/${serviceAdmin}/0/stake/${Date.now()}/value` - const appStakingRes = parseOrLog(syncRequest('POST', server1 + '/set_value', {json: { - ref: appStakingPath, - value: 1 - }}).body.toString('utf-8')).result; - if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { - console.error(`Failed to check finalization of tx.`); - } - const manageAppPath = '/manage_app/test_service_staking/create/1' - const body = parseOrLog(syncRequest('POST', server2 + '/set_value', {json: { - ref: manageAppPath, - value: { - admin: { [serviceAdmin]: true }, - service: { - staking: { lockup_duration: 1000 } - } + // const appStakingPath = `/staking/test_service_staking/${serviceAdmin}/0/stake/${Date.now()}/value` + // const appStakingRes = parseOrLog(syncRequest('POST', server1 + '/set_value', {json: { + // ref: appStakingPath, + // value: 1 + // }}).body.toString('utf-8')).result; + // if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { + // console.error(`Failed to check finalization of tx.`); + // } + // const manageAppPath = '/manage_app/test_service_staking/create/1' + // const body = parseOrLog(syncRequest('POST', server2 + '/set_value', {json: { + // ref: manageAppPath, + // value: { + // admin: { [serviceAdmin]: true }, + // service: { + // staking: { lockup_duration: 1000 } + // } + // } + // }}).body.toString('utf-8')); + // expect(body.code).to.equals(0); + // if (!(await waitUntilTxFinalized(serverList, _.get(body, 'result.tx_hash')))) { + // console.error(`Failed to check finalization of tx.`); + // } + await setUpApp('test_service_staking', serverList, { + admin: { [serviceAdmin]: true }, + service: { + staking: { lockup_duration: 1000 } } - }}).body.toString('utf-8')); - expect(body.code).to.equals(0); - if (!(await waitUntilTxFinalized(serverList, _.get(body, 'result.tx_hash')))) { - console.error(`Failed to check finalization of tx.`); - } + }); }) describe('Stake', () => { @@ -4798,25 +4912,27 @@ describe('Blockchain Node', () => { describe('Payments: _pay, _claim', () => { before(async () => { - const appStakingPath = `/staking/test_service_payment/${serviceAdmin}/0/stake/${Date.now()}/value` - const appStakingRes = parseOrLog(syncRequest('POST', server1 + '/set_value', {json: { - ref: appStakingPath, - value: 1 - }}).body.toString('utf-8')).result; - if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { - console.error(`Failed to check finalization of tx.`); - } - const manageAppPath = '/manage_app/test_service_payment/create/1' - const body = parseOrLog(syncRequest('POST', server2 + '/set_value', {json: { - ref: manageAppPath, - value: { - admin: { [serviceAdmin]: true }, - }, - }}).body.toString('utf-8')); - expect(body.code).to.equals(0); - if (!(await waitUntilTxFinalized(serverList, _.get(body, 'result.tx_hash')))) { - console.error(`Failed to check finalization of tx.`); - } + // const appStakingPath = `/staking/test_service_payment/${serviceAdmin}/0/stake/${Date.now()}/value` + // const appStakingRes = parseOrLog(syncRequest('POST', server1 + '/set_value', {json: { + // ref: appStakingPath, + // value: 1 + // }}).body.toString('utf-8')).result; + // if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { + // console.error(`Failed to check finalization of tx.`); + // } + // const manageAppPath = '/manage_app/test_service_payment/create/1' + // const body = parseOrLog(syncRequest('POST', server2 + '/set_value', {json: { + // ref: manageAppPath, + // value: { + // admin: { [serviceAdmin]: true }, + // }, + // }}).body.toString('utf-8')); + // expect(body.code).to.equals(0); + // if (!(await waitUntilTxFinalized(serverList, _.get(body, 'result.tx_hash')))) { + // console.error(`Failed to check finalization of tx.`); + // } + + await setUpApp('test_service_payment', serverList, { admin: { [serviceAdmin]: true } }); }); it('payments: non-app admin cannot write pay records', () => { @@ -5212,25 +5328,26 @@ describe('Blockchain Node', () => { describe('Escrow: _hold, _release', () => { before(async () => { - const appStakingPath = `/staking/test_service_escrow/${serviceAdmin}/0/stake/${Date.now()}/value` - const appStakingRes = parseOrLog(syncRequest('POST', server1 + '/set_value', {json: { - ref: appStakingPath, - value: 1 - }}).body.toString('utf-8')).result; - if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { - console.error(`Failed to check finalization of tx.`); - } - const manageAppPath = '/manage_app/test_service_escrow/create/1' - const body = parseOrLog(syncRequest('POST', server2 + '/set_value', {json: { - ref: manageAppPath, - value: { - admin: { [serviceAdmin]: true }, - }, - }}).body.toString('utf-8')); - expect(body.code).to.equals(0); - if (!(await waitUntilTxFinalized(serverList, _.get(body, 'result.tx_hash')))) { - console.error(`Failed to check finalization of tx.`); - } + // const appStakingPath = `/staking/test_service_escrow/${serviceAdmin}/0/stake/${Date.now()}/value` + // const appStakingRes = parseOrLog(syncRequest('POST', server1 + '/set_value', {json: { + // ref: appStakingPath, + // value: 1 + // }}).body.toString('utf-8')).result; + // if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { + // console.error(`Failed to check finalization of tx.`); + // } + // const manageAppPath = '/manage_app/test_service_escrow/create/1' + // const body = parseOrLog(syncRequest('POST', server2 + '/set_value', {json: { + // ref: manageAppPath, + // value: { + // admin: { [serviceAdmin]: true }, + // }, + // }}).body.toString('utf-8')); + // expect(body.code).to.equals(0); + // if (!(await waitUntilTxFinalized(serverList, _.get(body, 'result.tx_hash')))) { + // console.error(`Failed to check finalization of tx.`); + // } + await setUpApp('test_service_escrow', serverList, { admin: { [serviceAdmin]: true } }); }); describe('Escrow: individual -> individual', () => { @@ -5841,43 +5958,47 @@ describe('Blockchain Node', () => { userBalancePathA = `/get_value?ref=/accounts/${billingUserA}/balance`; userBalancePathB = `/get_value?ref=/accounts/${billingUserB}/balance`; - const appStakingRes = parseOrLog(syncRequest('POST', server1 + '/set_value', {json: { - ref: `/staking/test_billing/${serviceAdmin}/0/stake/${Date.now()}/value`, - value: 1 - }}).body.toString('utf-8')).result; - if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { - console.error(`Failed to check finalization of app staking tx.`); - } - - const createAppRes = parseOrLog(syncRequest('POST', server2 + '/set_value', {json: { - ref: '/manage_app/test_billing/create/0', - value: { - admin: { + // const appStakingRes = parseOrLog(syncRequest('POST', server1 + '/set_value', {json: { + // ref: `/staking/test_billing/${serviceAdmin}/0/stake/${Date.now()}/value`, + // value: 1 + // }}).body.toString('utf-8')).result; + // if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { + // console.error(`Failed to check finalization of app staking tx.`); + // } + + // const createAppRes = parseOrLog(syncRequest('POST', server2 + '/set_value', {json: { + // ref: '/manage_app/test_billing/create/0', + // value: { + // admin: , + // billing: + // }, + // nonce: -1, + // timestamp: Date.now(), + // }}).body.toString('utf-8')).result; + // if (!(await waitUntilTxFinalized(serverList, createAppRes.tx_hash))) { + // console.error(`Failed to check finalization of create app tx.`); + // } + + const adminConfig = { + [serviceAdmin]: true, + [billingUserA]: true, + [billingUserB]: true + }; + const billingConfig = { + A: { + users: { [serviceAdmin]: true, - [billingUserA]: true, - [billingUserB]: true - }, - billing: { - A: { - users: { - [serviceAdmin]: true, - [billingUserA]: true - } - }, - B: { - users: { - [serviceAdmin]: true, - [billingUserB]: true - } - } + [billingUserA]: true } }, - nonce: -1, - timestamp: Date.now(), - }}).body.toString('utf-8')).result; - if (!(await waitUntilTxFinalized(serverList, createAppRes.tx_hash))) { - console.error(`Failed to check finalization of create app tx.`); - } + B: { + users: { + [serviceAdmin]: true, + [billingUserB]: true + } + } + }; + await setUpApp('test_billing', serverList, { admin: adminConfig, billing: billingConfig }); const server4Addr = parseOrLog(syncRequest('GET', server4 + '/get_address').body.toString('utf-8')).result; diff --git a/integration/sharding.test.js b/integration/sharding.test.js index 279052182..a48f777a0 100644 --- a/integration/sharding.test.js +++ b/integration/sharding.test.js @@ -32,6 +32,7 @@ const { waitForNewBlocks, waitUntilNodeSyncs, waitUntilTxFinalized, + setUpApp, } = require('../unittest/test-util'); const ENV_VARIABLES = [ @@ -86,6 +87,7 @@ const server2 = 'http://localhost:' + String(9091 + Number(ENV_VARIABLES[3].ACCO const server3 = 'http://localhost:' + String(9091 + Number(ENV_VARIABLES[4].ACCOUNT_INDEX)) const server4 = 'http://localhost:' + String(9091 + Number(ENV_VARIABLES[5].ACCOUNT_INDEX)) const shardServerList = [ server1, server2, server3, server4 ]; +const account = ainUtil.createAccount(); function startServer(application, serverName, envVars, stdioInherit = false) { const options = { @@ -116,24 +118,24 @@ async function waitUntilShardReporterStarts() { } async function setUp() { - let res = parseOrLog(syncRequest('POST', server2 + '/set', { + const res = parseOrLog(syncRequest('POST', server2 + '/set', { json: { op_list: [ { type: 'SET_VALUE', - ref: 'test/test_value/some/path', + ref: '/apps/test/test_value/some/path', value: 100 }, { type: 'SET_RULE', - ref: '/test/test_rule/some/path', + ref: '/apps/test/test_rule/some/path', value: { ".write": "auth.addr === 'abcd'" } }, { type: 'SET_FUNCTION', - ref: '/test/test_function/some/path', + ref: '/apps/test/test_function/some/path', value: { ".function": { "fid": { @@ -147,7 +149,7 @@ async function setUp() { }, { type: 'SET_OWNER', - ref: '/test/test_owner/some/path', + ref: '/apps/test/test_owner/some/path', value: { ".owner": { "owners": { @@ -176,22 +178,22 @@ async function cleanUp() { op_list: [ { type: 'SET_VALUE', - ref: 'test/test_value/some/path', + ref: '/apps/test/test_value/some/path', value: null }, { type: 'SET_RULE', - ref: '/test/test_rule/some/path', + ref: '/apps/test/test_rule/some/path', value: null }, { type: 'SET_FUNCTION', - ref: '/test/test_function/some/path', + ref: '/apps/test/test_function/some/path', value: null }, { type: 'SET_OWNER', - ref: '/test/test_owner/some/path', + ref: '/apps/test/test_owner/some/path', value: null }, ], @@ -247,28 +249,29 @@ describe('Sharding', async () => { ).result; await waitUntilTxFinalized(parentServerList, shardReportRes.tx_hash); // Create app at the parent chain for the shard - const appStakingRes = parseOrLog(syncRequest('POST', parentServer + '/set_value', { - json: { - ref: `/staking/afan/${parentServerAddr}/0/stake/${Date.now()}/value`, - value: 1 - } - }).body.toString('utf-8')).result; - assert.deepEqual(CommonUtil.isFailedTx(_.get(appStakingRes, 'result')), false); - if (!(await waitUntilTxFinalized(parentServerList, appStakingRes.tx_hash))) { - console.log(`Failed to check finalization of app staking tx.`); - } - const createAppRes = parseOrLog(syncRequest('POST', parentServer + '/set_value', { - json: { - ref: `/manage_app/afan/create/${Date.now()}`, - value: { - admin: { [shardOwnerAddr]: true } - } - } - }).body.toString('utf-8')).result; - assert.deepEqual(CommonUtil.isFailedTx(_.get(createAppRes, 'result')), false); - if (!(await waitUntilTxFinalized(parentServerList, createAppRes.tx_hash))) { - console.log(`Failed to check finalization of create app tx.`); - } + // const appStakingRes = parseOrLog(syncRequest('POST', parentServer + '/set_value', { + // json: { + // ref: `/staking/afan/${parentServerAddr}/0/stake/${Date.now()}/value`, + // value: 1 + // } + // }).body.toString('utf-8')).result; + // assert.deepEqual(CommonUtil.isFailedTx(_.get(appStakingRes, 'result')), false); + // if (!(await waitUntilTxFinalized(parentServerList, appStakingRes.tx_hash))) { + // console.log(`Failed to check finalization of app staking tx.`); + // } + // const createAppRes = parseOrLog(syncRequest('POST', parentServer + '/set_value', { + // json: { + // ref: `/manage_app/afan/create/${Date.now()}`, + // value: { + // admin: { [shardOwnerAddr]: true } + // } + // } + // }).body.toString('utf-8')).result; + // assert.deepEqual(CommonUtil.isFailedTx(_.get(createAppRes, 'result')), false); + // if (!(await waitUntilTxFinalized(parentServerList, createAppRes.tx_hash))) { + // console.log(`Failed to check finalization of create app tx.`); + // } + await setUpApp('afan', parentServerList, { admin: { [shardOwnerAddr]: true } }); tracker_proc = startServer(TRACKER_SERVER, 'tracker server', ENV_VARIABLES[1], true); await CommonUtil.sleep(2000); @@ -502,6 +505,7 @@ describe('Sharding', async () => { describe('API Tests', () => { describe('Get API', () => { before(async () => { + await setUpApp('test', shardServerList, { admin: { [account.address]: true } }); await setUp(); }) @@ -512,7 +516,7 @@ describe('Sharding', async () => { describe('/get_value', () => { it('/get_value with is_global = false', () => { const body = parseOrLog( - syncRequest('GET', server1 + '/get_value?ref=/test/test_value/some/path') + syncRequest('GET', server1 + '/get_value?ref=/apps/test/test_value/some/path') .body.toString('utf-8')); assert.deepEqual(body.code, 0); assert.deepEqual(body.result, 100); @@ -520,7 +524,7 @@ describe('Sharding', async () => { it('/get_value with is_global = false (explicit)', () => { const body = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=/test/test_value/some/path&is_global=false') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some/path&is_global=false') .body.toString('utf-8')); assert.deepEqual(body.code, 0); assert.deepEqual(body.result, 100); @@ -528,7 +532,7 @@ describe('Sharding', async () => { it('/get_value with is_global = true', () => { const body = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=/apps/afan/test/test_value/some/path&is_global=true') + 'GET', server1 + '/get_value?ref=/apps/afan/apps/test/test_value/some/path&is_global=true') .body.toString('utf-8')); assert.deepEqual(body.code, 0); assert.deepEqual(body.result, 100); @@ -538,7 +542,7 @@ describe('Sharding', async () => { describe('/get_function', () => { it('/get_function with is_global = false', () => { const body = parseOrLog( - syncRequest('GET', server1 + '/get_function?ref=/test/test_function/some/path') + syncRequest('GET', server1 + '/get_function?ref=/apps/test/test_function/some/path') .body.toString('utf-8')); assert.deepEqual(body.code, 0); assert.deepEqual(body.result, { @@ -555,7 +559,7 @@ describe('Sharding', async () => { it('/get_function with is_global = true', () => { const body = parseOrLog(syncRequest( - 'GET', server1 + '/get_function?ref=/apps/afan/test/test_function/some/path&is_global=true') + 'GET', server1 + '/get_function?ref=/apps/afan/apps/test/test_function/some/path&is_global=true') .body.toString('utf-8')); assert.deepEqual(body.code, 0); assert.deepEqual(body.result, { @@ -574,7 +578,7 @@ describe('Sharding', async () => { describe('/get_rule', () => { it('/get_rule with is_global = false', () => { const body = parseOrLog( - syncRequest('GET', server1 + '/get_rule?ref=/test/test_rule/some/path') + syncRequest('GET', server1 + '/get_rule?ref=/apps/test/test_rule/some/path') .body.toString('utf-8')); assert.deepEqual(body.code, 0); assert.deepEqual(body.result, { '.write': 'auth.addr === \'abcd\'' }); @@ -582,7 +586,7 @@ describe('Sharding', async () => { it('/get_rule with is_global = true', () => { const body = parseOrLog(syncRequest( - 'GET', server1 + '/get_rule?ref=/apps/afan/test/test_rule/some/path&is_global=true') + 'GET', server1 + '/get_rule?ref=/apps/afan/apps/test/test_rule/some/path&is_global=true') .body.toString('utf-8')); assert.deepEqual(body.code, 0); assert.deepEqual(body.result, { '.write': 'auth.addr === \'abcd\'' }); @@ -592,7 +596,7 @@ describe('Sharding', async () => { describe('/get_owner', () => { it('/get_owner with is_global = false', () => { const body = parseOrLog( - syncRequest('GET', server1 + '/get_owner?ref=/test/test_owner/some/path') + syncRequest('GET', server1 + '/get_owner?ref=/apps/test/test_owner/some/path') .body.toString('utf-8')); assert.deepEqual(body.code, 0); assert.deepEqual(body.result, { @@ -611,7 +615,7 @@ describe('Sharding', async () => { it('/get_owner with is_global = true', () => { const body = parseOrLog(syncRequest( - 'GET', server1 + '/get_owner?ref=/apps/afan/test/test_owner/some/path&is_global=true') + 'GET', server1 + '/get_owner?ref=/apps/afan/apps/test/test_owner/some/path&is_global=true') .body.toString('utf-8')); assert.deepEqual(body.code, 0); assert.deepEqual(body.result, { @@ -631,13 +635,13 @@ describe('Sharding', async () => { describe('/match_function', () => { it('/match_function with is_global = false', () => { - const ref = "/test/test_function/some/path"; + const ref = "/apps/test/test_function/some/path"; const body = parseOrLog(syncRequest('GET', `${server1}/match_function?ref=${ref}`) .body.toString('utf-8')); assert.deepEqual(body, {code: 0, result: { "matched_path": { - "target_path": "/test/test_function/some/path", - "ref_path": "/test/test_function/some/path", + "target_path": "/apps/test/test_function/some/path", + "ref_path": "/apps/test/test_function/some/path", "path_vars": {}, }, "matched_config": { @@ -649,21 +653,21 @@ describe('Sharding', async () => { "service_name": "https://ainetwork.ai" } }, - "path": "/test/test_function/some/path" + "path": "/apps/test/test_function/some/path" }, "subtree_configs": [] }}); }) it('/match_function with is_global = true', () => { - const ref = "/apps/afan/test/test_function/some/path"; + const ref = "/apps/afan/apps/test/test_function/some/path"; const body = parseOrLog(syncRequest('GET', `${server1}/match_function?ref=${ref}&is_global=true`) .body.toString('utf-8')); assert.deepEqual(body, {code: 0, result: { "matched_path": { - "target_path": "/apps/afan/test/test_function/some/path", - "ref_path": "/apps/afan/test/test_function/some/path", + "target_path": "/apps/afan/apps/test/test_function/some/path", + "ref_path": "/apps/afan/apps/test/test_function/some/path", "path_vars": {}, }, "matched_config": { @@ -675,7 +679,7 @@ describe('Sharding', async () => { "service_name": "https://ainetwork.ai" } }, - "path": "/apps/afan/test/test_function/some/path" + "path": "/apps/afan/apps/test/test_function/some/path" }, "subtree_configs": [] }}); @@ -684,37 +688,37 @@ describe('Sharding', async () => { describe('/match_rule', () => { it('/match_rule with is_global = false', () => { - const ref = "/test/test_rule/some/path"; + const ref = "/apps/test/test_rule/some/path"; const body = parseOrLog(syncRequest('GET', `${server1}/match_rule?ref=${ref}`) .body.toString('utf-8')); assert.deepEqual(body, {code: 0, result: { "matched_path": { - "target_path": "/test/test_rule/some/path", - "ref_path": "/test/test_rule/some/path", + "target_path": "/apps/test/test_rule/some/path", + "ref_path": "/apps/test/test_rule/some/path", "path_vars": {}, }, "matched_config": { "config": "auth.addr === 'abcd'", - "path": "/test/test_rule/some/path" + "path": "/apps/test/test_rule/some/path" }, "subtree_configs": [] }}); }) it('/match_rule with is_global = true', () => { - const ref = "/apps/afan/test/test_rule/some/path"; + const ref = "/apps/afan/apps/test/test_rule/some/path"; const body = parseOrLog(syncRequest('GET', `${server1}/match_rule?ref=${ref}&is_global=true`) .body.toString('utf-8')); assert.deepEqual(body, {code: 0, result: { "matched_path": { - "target_path": "/apps/afan/test/test_rule/some/path", - "ref_path": "/apps/afan/test/test_rule/some/path", + "target_path": "/apps/afan/apps/test/test_rule/some/path", + "ref_path": "/apps/afan/apps/test/test_rule/some/path", "path_vars": {}, }, "matched_config": { "config": "auth.addr === 'abcd'", - "path": "/apps/afan/test/test_rule/some/path" + "path": "/apps/afan/apps/test/test_rule/some/path" }, "subtree_configs": [] }}); @@ -723,12 +727,12 @@ describe('Sharding', async () => { describe('/match_owner', () => { it('/match_owner with is_global = false', () => { - const ref = "/test/test_owner/some/path"; + const ref = "/apps/test/test_owner/some/path"; const body = parseOrLog(syncRequest('GET', `${server1}/match_owner?ref=${ref}`) .body.toString('utf-8')); assert.deepEqual(body, {code: 0, result: { "matched_path": { - "target_path": "/test/test_owner/some/path" + "target_path": "/apps/test/test_owner/some/path" }, "matched_config": { "config": { @@ -741,19 +745,19 @@ describe('Sharding', async () => { } } }, - "path": "/test/test_owner/some/path" + "path": "/apps/test/test_owner/some/path" } }}); }) it('/match_owner with is_global = true', () => { - const ref = "/apps/afan/test/test_owner/some/path"; + const ref = "/apps/afan/apps/test/test_owner/some/path"; const body = parseOrLog(syncRequest('GET', `${server1}/match_owner?ref=${ref}&is_global=true`) .body.toString('utf-8')); assert.deepEqual(body, {code: 0, result: { "matched_path": { - "target_path": "/apps/afan/test/test_owner/some/path" + "target_path": "/apps/afan/apps/test/test_owner/some/path" }, "matched_config": { "config": { @@ -766,7 +770,7 @@ describe('Sharding', async () => { } } }, - "path": "/apps/afan/test/test_owner/some/path" + "path": "/apps/afan/apps/test/test_owner/some/path" } }}); }) @@ -774,7 +778,7 @@ describe('Sharding', async () => { describe('/eval_rule', () => { it('/eval_rule with is_global = false', () => { - const ref = "/test/test_rule/some/path"; + const ref = "/apps/test/test_rule/some/path"; const value = "value"; const address = "abcd"; const request = { ref, value, address, protoVer: CURRENT_PROTOCOL_VERSION }; @@ -784,7 +788,7 @@ describe('Sharding', async () => { }) it('/eval_rule with is_global = true', () => { - const ref = "/apps/afan/test/test_rule/some/path"; + const ref = "/apps/afan/apps/test/test_rule/some/path"; const value = "value"; const address = "abcd"; const is_global = true; @@ -797,7 +801,7 @@ describe('Sharding', async () => { describe('/eval_owner', () => { it('/eval_owner with is_global = false', () => { - const ref = "/test/test_owner/some/path"; + const ref = "/apps/test/test_owner/some/path"; const address = "abcd"; const permission = "write_owner"; const request = { ref, permission, address, protoVer: CURRENT_PROTOCOL_VERSION }; @@ -810,7 +814,7 @@ describe('Sharding', async () => { }) it('/eval_owner with is_global = true', () => { - const ref = "/apps/afan/test/test_owner/some/path"; + const ref = "/apps/afan/apps/test/test_owner/some/path"; const address = "abcd"; const permission = "write_owner"; const is_global = true; @@ -831,29 +835,29 @@ describe('Sharding', async () => { op_list: [ { type: "GET_VALUE", - ref: "/test/test_value/some/path", + ref: "/apps/test/test_value/some/path", }, { type: 'GET_FUNCTION', - ref: "/test/test_function/some/path", + ref: "/apps/test/test_function/some/path", }, { type: 'GET_RULE', - ref: "/test/test_rule/some/path", + ref: "/apps/test/test_rule/some/path", }, { type: 'GET_OWNER', - ref: "/test/test_owner/some/path", + ref: "/apps/test/test_owner/some/path", }, { type: 'EVAL_RULE', - ref: "/test/test_rule/some/path", + ref: "/apps/test/test_rule/some/path", value: "value", address: "abcd" }, { type: 'EVAL_OWNER', - ref: "/test/test_owner/some/path", + ref: "/apps/test/test_owner/some/path", permission: "write_owner", address: "abcd" } @@ -901,34 +905,34 @@ describe('Sharding', async () => { op_list: [ { type: "GET_VALUE", - ref: "/apps/afan/test/test_value/some/path", + ref: "/apps/afan/apps/test/test_value/some/path", is_global: true, }, { type: 'GET_FUNCTION', - ref: "/apps/afan/test/test_function/some/path", + ref: "/apps/afan/apps/test/test_function/some/path", is_global: true, }, { type: 'GET_RULE', - ref: "/apps/afan/test/test_rule/some/path", + ref: "/apps/afan/apps/test/test_rule/some/path", is_global: true, }, { type: 'GET_OWNER', - ref: "/apps/afan/test/test_owner/some/path", + ref: "/apps/afan/apps/test/test_owner/some/path", is_global: true, }, { type: 'EVAL_RULE', - ref: "/apps/afan/test/test_rule/some/path", + ref: "/apps/afan/apps/test/test_rule/some/path", value: "value", address: "abcd", is_global: true, }, { type: 'EVAL_OWNER', - ref: "/apps/afan/test/test_owner/some/path", + ref: "/apps/afan/apps/test/test_owner/some/path", permission: "write_owner", address: "abcd", is_global: true, @@ -980,7 +984,7 @@ describe('Sharding', async () => { return jsonRpcClient.request('ain_get', { protoVer: CURRENT_PROTOCOL_VERSION, type: 'GET_VALUE', - ref: "/test/test_value/some/path" + ref: "/apps/test/test_value/some/path" }) .then(res => { expect(res.result.result).to.equal(expected); @@ -993,7 +997,7 @@ describe('Sharding', async () => { return jsonRpcClient.request('ain_get', { protoVer: CURRENT_PROTOCOL_VERSION, type: 'GET_VALUE', - ref: "/test/test_value/some/path", + ref: "/apps/test/test_value/some/path", is_global: false, }) .then(res => { @@ -1007,7 +1011,7 @@ describe('Sharding', async () => { return jsonRpcClient.request('ain_get', { protoVer: CURRENT_PROTOCOL_VERSION, type: 'GET_VALUE', - ref: "/apps/afan/test/test_value/some/path", + ref: "/apps/afan/apps/test/test_value/some/path", is_global: true, }) .then(res => { @@ -1018,14 +1022,14 @@ describe('Sharding', async () => { describe('ain_matchFunction', () => { it('ain_matchFunction with is_global = false', () => { - const ref = "/test/test_function/some/path"; + const ref = "/apps/test/test_function/some/path"; const request = { ref, protoVer: CURRENT_PROTOCOL_VERSION }; return jayson.client.http(server1 + '/json-rpc').request('ain_matchFunction', request) .then(res => { assert.deepEqual(res.result.result, { "matched_path": { - "target_path": "/test/test_function/some/path", - "ref_path": "/test/test_function/some/path", + "target_path": "/apps/test/test_function/some/path", + "ref_path": "/apps/test/test_function/some/path", "path_vars": {}, }, "matched_config": { @@ -1037,7 +1041,7 @@ describe('Sharding', async () => { "service_name": "https://ainetwork.ai" } }, - "path": "/test/test_function/some/path" + "path": "/apps/test/test_function/some/path" }, "subtree_configs": [] }); @@ -1045,14 +1049,14 @@ describe('Sharding', async () => { }) it('ain_matchFunction with is_global = true', () => { - const ref = "/apps/afan/test/test_function/some/path"; + const ref = "/apps/afan/apps/test/test_function/some/path"; const request = { ref, is_global: true, protoVer: CURRENT_PROTOCOL_VERSION }; return jayson.client.http(server1 + '/json-rpc').request('ain_matchFunction', request) .then(res => { assert.deepEqual(res.result.result, { "matched_path": { - "target_path": "/apps/afan/test/test_function/some/path", - "ref_path": "/apps/afan/test/test_function/some/path", + "target_path": "/apps/afan/apps/test/test_function/some/path", + "ref_path": "/apps/afan/apps/test/test_function/some/path", "path_vars": {}, }, "matched_config": { @@ -1064,7 +1068,7 @@ describe('Sharding', async () => { "service_name": "https://ainetwork.ai" } }, - "path": "/apps/afan/test/test_function/some/path" + "path": "/apps/afan/apps/test/test_function/some/path" }, "subtree_configs": [] }); @@ -1074,19 +1078,19 @@ describe('Sharding', async () => { describe('ain_matchRule', () => { it('ain_matchRule with is_global = false', () => { - const ref = "/test/test_rule/some/path"; + const ref = "/apps/test/test_rule/some/path"; const request = { ref, protoVer: CURRENT_PROTOCOL_VERSION }; return jayson.client.http(server1 + '/json-rpc').request('ain_matchRule', request) .then(res => { assert.deepEqual(res.result.result, { "matched_path": { - "target_path": "/test/test_rule/some/path", - "ref_path": "/test/test_rule/some/path", + "target_path": "/apps/test/test_rule/some/path", + "ref_path": "/apps/test/test_rule/some/path", "path_vars": {}, }, "matched_config": { "config": "auth.addr === 'abcd'", - "path": "/test/test_rule/some/path" + "path": "/apps/test/test_rule/some/path" }, "subtree_configs": [] }); @@ -1094,19 +1098,19 @@ describe('Sharding', async () => { }) it('ain_matchRule with is_global = true', () => { - const ref = "/apps/afan/test/test_rule/some/path"; + const ref = "/apps/afan/apps/test/test_rule/some/path"; const request = { ref, is_global: true, protoVer: CURRENT_PROTOCOL_VERSION }; return jayson.client.http(server1 + '/json-rpc').request('ain_matchRule', request) .then(res => { assert.deepEqual(res.result.result, { "matched_path": { - "target_path": "/apps/afan/test/test_rule/some/path", - "ref_path": "/apps/afan/test/test_rule/some/path", + "target_path": "/apps/afan/apps/test/test_rule/some/path", + "ref_path": "/apps/afan/apps/test/test_rule/some/path", "path_vars": {}, }, "matched_config": { "config": "auth.addr === 'abcd'", - "path": "/apps/afan/test/test_rule/some/path" + "path": "/apps/afan/apps/test/test_rule/some/path" }, "subtree_configs": [] }); @@ -1116,13 +1120,13 @@ describe('Sharding', async () => { describe('ain_matchOwner', () => { it('ain_matchOwner with is_global = false', () => { - const ref = "/test/test_owner/some/path"; + const ref = "/apps/test/test_owner/some/path"; const request = { ref, protoVer: CURRENT_PROTOCOL_VERSION }; return jayson.client.http(server1 + '/json-rpc').request('ain_matchOwner', request) .then(res => { assert.deepEqual(res.result.result, { "matched_path": { - "target_path": "/test/test_owner/some/path" + "target_path": "/apps/test/test_owner/some/path" }, "matched_config": { "config": { @@ -1135,20 +1139,20 @@ describe('Sharding', async () => { } } }, - "path": "/test/test_owner/some/path" + "path": "/apps/test/test_owner/some/path" } }); }) }) it('ain_matchOwner with is_global = true', () => { - const ref = "/apps/afan/test/test_owner/some/path"; + const ref = "/apps/afan/apps/test/test_owner/some/path"; const request = { ref, is_global: true, protoVer: CURRENT_PROTOCOL_VERSION }; return jayson.client.http(server1 + '/json-rpc').request('ain_matchOwner', request) .then(res => { assert.deepEqual(res.result.result, { "matched_path": { - "target_path": "/apps/afan/test/test_owner/some/path" + "target_path": "/apps/afan/apps/test/test_owner/some/path" }, "matched_config": { "config": { @@ -1161,7 +1165,7 @@ describe('Sharding', async () => { } } }, - "path": "/apps/afan/test/test_owner/some/path" + "path": "/apps/afan/apps/test/test_owner/some/path" } }); }) @@ -1170,7 +1174,7 @@ describe('Sharding', async () => { describe('ain_evalRule', () => { it('ain_evalRule with is_global = false', () => { - const ref = "/test/test_rule/some/path"; + const ref = "/apps/test/test_rule/some/path"; const value = "value"; const address = "abcd"; const request = { ref, value, address, protoVer: CURRENT_PROTOCOL_VERSION }; @@ -1181,7 +1185,7 @@ describe('Sharding', async () => { }) it('ain_evalRule with is_global = true', () => { - const ref = "/apps/afan/test/test_rule/some/path"; + const ref = "/apps/afan/apps/test/test_rule/some/path"; const value = "value"; const address = "abcd"; const request = @@ -1195,7 +1199,7 @@ describe('Sharding', async () => { describe('ain_evalOwner', () => { it('ain_evalOwner with is_global = false', () => { - const ref = "/test/test_owner/some/path"; + const ref = "/apps/test/test_owner/some/path"; const address = "abcd"; const permission = "write_owner"; const request = { ref, permission, address, protoVer: CURRENT_PROTOCOL_VERSION }; @@ -1206,7 +1210,7 @@ describe('Sharding', async () => { }) it('ain_evalOwner with is_global = true', () => { - const ref = "/apps/afan/test/test_owner/some/path"; + const ref = "/apps/afan/apps/test/test_owner/some/path"; const address = "abcd"; const permission = "write_owner"; const request = @@ -1232,11 +1236,11 @@ describe('Sharding', async () => { it('/set_value with is_global = false', () => { // Check the original value. const resultBefore = parseOrLog(syncRequest( - 'GET', server1 + '/get_value?ref=test/test_value/some/path') + 'GET', server1 + '/get_value?ref=/apps/test/test_value/some/path') .body.toString('utf-8')).result; assert.deepEqual(resultBefore, 100); - const request = {ref: 'test/test_value/some/path', value: "some value", nonce: -1}; + const request = {ref: '/apps/test/test_value/some/path', value: "some value", nonce: -1}; const body = parseOrLog(syncRequest('POST', server1 + '/set_value', {json: request}) .body.toString('utf-8')); assert.deepEqual(_.get(body, 'result.result.code'), 0); @@ -1244,7 +1248,7 @@ describe('Sharding', async () => { }) it('/set_value with is_global = false (explicit)', () => { - const request = {ref: 'test/test_value/some/path', value: "some value", is_global: false, nonce: -1}; + const request = {ref: '/apps/test/test_value/some/path', value: "some value", is_global: false, nonce: -1}; const body = parseOrLog(syncRequest('POST', server1 + '/set_value', {json: request}) .body.toString('utf-8')); assert.deepEqual(_.get(body, 'result.result.code'), 0); @@ -1253,7 +1257,7 @@ describe('Sharding', async () => { it('/set_value with is_global = true', () => { const request = { - ref: 'apps/afan/test/test_value/some/path', value: "some value", is_global: true, nonce: -1 + ref: 'apps/afan/apps/test/test_value/some/path', value: "some value", is_global: true, nonce: -1 }; const body = parseOrLog(syncRequest('POST', server1 + '/set_value', {json: request}) .body.toString('utf-8')); @@ -1264,7 +1268,7 @@ describe('Sharding', async () => { describe('/inc_value', () => { it('/inc_value with is_global = false', () => { - const request = {ref: 'test/test_value/some/path', value: 10, nonce: -1}; + const request = {ref: '/apps/test/test_value/some/path', value: 10, nonce: -1}; const body = parseOrLog(syncRequest('POST', server1 + '/inc_value', {json: request}) .body.toString('utf-8')); assert.deepEqual(_.get(body, 'result.result.code'), 0); @@ -1273,7 +1277,7 @@ describe('Sharding', async () => { it('/inc_value with is_global = true', () => { const request = { - ref: 'apps/afan/test/test_value/some/path', value: 10, is_global: true, nonce: -1 + ref: 'apps/afan/apps/test/test_value/some/path', value: 10, is_global: true, nonce: -1 }; const body = parseOrLog(syncRequest('POST', server1 + '/inc_value', {json: request}) .body.toString('utf-8')); @@ -1284,7 +1288,7 @@ describe('Sharding', async () => { describe('/dec_value', () => { it('/dec_value with is_global = false', () => { - const request = {ref: 'test/test_value/some/path', value: 10, nonce: -1}; + const request = {ref: '/apps/test/test_value/some/path', value: 10, nonce: -1}; const body = parseOrLog(syncRequest('POST', server1 + '/dec_value', {json: request}) .body.toString('utf-8')); assert.deepEqual(_.get(body, 'result.result.code'), 0); @@ -1293,7 +1297,7 @@ describe('Sharding', async () => { it('/dec_value with is_global = true', () => { const request = { - ref: 'apps/afan/test/test_value/some/path', value: 10, is_global: true, nonce: -1 + ref: 'apps/afan/apps/test/test_value/some/path', value: 10, is_global: true, nonce: -1 }; const body = parseOrLog(syncRequest('POST', server1 + '/dec_value', {json: request}) .body.toString('utf-8')); @@ -1305,7 +1309,7 @@ describe('Sharding', async () => { describe('/set_function', () => { it('/set_function with is_global = false', () => { const request = { - ref: "test/test_function/other/path", + ref: "/apps/test/test_function/other/path", value: { ".function": { "fid": { @@ -1326,7 +1330,7 @@ describe('Sharding', async () => { it('/set_function with is_global = true', () => { const request = { - ref: "apps/afan/test/test_function/other/path", + ref: "apps/afan/apps/test/test_function/other/path", value: { ".function": { "fid": { @@ -1350,7 +1354,7 @@ describe('Sharding', async () => { describe('/set_rule', () => { it('/set_rule with is_global = false', () => { const request = { - ref: "test/test_rule/other/path", + ref: "/apps/test/test_rule/other/path", value: { ".write": "some other rule config" }, @@ -1364,7 +1368,7 @@ describe('Sharding', async () => { it('/set_rule with is_global = true', () => { const request = { - ref: "apps/afan/test/test_rule/other/path", + ref: "apps/afan/apps/test/test_rule/other/path", value: { ".write": "some other rule config" }, @@ -1381,7 +1385,7 @@ describe('Sharding', async () => { describe('/set_owner', () => { it('/set_owner with is_global = false', () => { const request = { - ref: "test/test_owner/other/path", + ref: "/apps/test/test_owner/other/path", value: { ".owner": { "owners": { @@ -1404,7 +1408,7 @@ describe('Sharding', async () => { it('/set_owner with is_global = true', () => { const request = { - ref: "apps/afan/test/test_owner/other2/path", + ref: "apps/afan/apps/test/test_owner/other2/path", value: { ".owner": { "owners": { @@ -1433,22 +1437,22 @@ describe('Sharding', async () => { op_list: [ { // Default type: SET_VALUE - ref: "test/test_value/other3/path", + ref: "/apps/test/test_value/other3/path", value: "some other3 value", }, { type: 'INC_VALUE', - ref: "test/test_value/some/path", + ref: "/apps/test/test_value/some/path", value: 10 }, { type: 'DEC_VALUE', - ref: "test/test_value/some/path2", + ref: "/apps/test/test_value/some/path2", value: 10 }, { type: 'SET_FUNCTION', - ref: "/test/test_function/other3/path", + ref: "/apps/test/test_function/other3/path", value: { ".function": { "fid": { @@ -1462,14 +1466,14 @@ describe('Sharding', async () => { }, { type: 'SET_RULE', - ref: "/test/test_rule/other3/path", + ref: "/apps/test/test_rule/other3/path", value: { ".write": "some other3 rule config" } }, { type: 'SET_OWNER', - ref: "/test/test_owner/other3/path", + ref: "/apps/test/test_owner/other3/path", value: { ".owner": { "owners": { @@ -1516,8 +1520,10 @@ describe('Sharding', async () => { }, ], "gas_amount_total": { - "app": {}, - "service": 6 + "app": { + "test": 6 + }, + "service": 0 }, "gas_cost_total": 0 }); @@ -1529,25 +1535,25 @@ describe('Sharding', async () => { op_list: [ { // Default type: SET_VALUE - ref: "test/test_value/other4/path", + ref: "/apps/test/test_value/other4/path", value: "some other4 value", is_global: true, }, { type: 'INC_VALUE', - ref: "test/test_value/some/path", + ref: "/apps/test/test_value/some/path", value: 10, is_global: true, }, { type: 'DEC_VALUE', - ref: "test/test_value/some/path4", + ref: "/apps/test/test_value/some/path4", value: 10, is_global: true, }, { type: 'SET_FUNCTION', - ref: "/test/test_function/other4/path", + ref: "/apps/test/test_function/other4/path", value: { ".function": { "fid": { @@ -1562,7 +1568,7 @@ describe('Sharding', async () => { }, { type: 'SET_RULE', - ref: "/test/test_rule/other4/path", + ref: "/apps/test/test_rule/other4/path", value: { ".write": "some other4 rule config" }, @@ -1570,7 +1576,7 @@ describe('Sharding', async () => { }, { type: 'SET_OWNER', - ref: "/test/test_owner/other4/path", + ref: "/apps/test/test_owner/other4/path", value: { ".owner": { "owners": { @@ -1629,13 +1635,12 @@ describe('Sharding', async () => { describe('ain_sendSignedTransaction', () => { it('ain_sendSignedTransaction with is_global = false', () => { - const account = ainUtil.createAccount(); const client = jayson.client.http(server1 + '/json-rpc'); const txBody = { operation: { type: 'SET_VALUE', value: 'some other value', - ref: `test/test_value/some/path` + ref: `/apps/test/test_value/some/path` }, timestamp: Date.now(), nonce: -1 @@ -1654,8 +1659,10 @@ describe('Sharding', async () => { code: 0, gas_amount: 1, gas_amount_total: { - app: {}, - service: 1 + app: { + test: 1, + }, + service: 0 }, gas_cost_total: 0 }, @@ -1666,13 +1673,13 @@ describe('Sharding', async () => { }) it('ain_sendSignedTransaction with is_global = false (explicit)', () => { - const account = ainUtil.createAccount(); + // const account = ainUtil.createAccount(); const client = jayson.client.http(server1 + '/json-rpc'); const txBody = { operation: { type: 'SET_VALUE', value: 'some other value', - ref: `test/test_value/some/path`, + ref: `/apps/test/test_value/some/path`, is_global: false, }, timestamp: Date.now(), @@ -1692,8 +1699,10 @@ describe('Sharding', async () => { code: 0, gas_amount: 1, gas_amount_total: { - app: {}, - service: 1 + app: { + test: 1 + }, + service: 0 }, gas_cost_total: 0 }, @@ -1704,13 +1713,13 @@ describe('Sharding', async () => { }) it('ain_sendSignedTransaction with is_global = true', () => { - const account = ainUtil.createAccount(); + // const account = ainUtil.createAccount(); const client = jayson.client.http(server1 + '/json-rpc'); const txBody = { operation: { type: 'SET_VALUE', value: 'some other value', - ref: `apps/afan/test/test_value/some/path`, + ref: `apps/afan/apps/test/test_value/some/path`, is_global: true, }, timestamp: Date.now(), @@ -1746,13 +1755,13 @@ describe('Sharding', async () => { describe('ain_sendSignedTransactionBatch', () => { it('ain_sendSignedTransactionBatch with is_global = false', () => { - const account = ainUtil.createAccount(); + // const account = ainUtil.createAccount(); const client = jayson.client.http(server1 + '/json-rpc'); const txBody = { operation: { type: 'SET_VALUE', value: 'some other value', - ref: `test/test_value/some/path` + ref: `/apps/test/test_value/some/path` }, timestamp: Date.now(), nonce: -1 @@ -1779,8 +1788,10 @@ describe('Sharding', async () => { code: 0, gas_amount: 1, gas_amount_total: { - app: {}, - service: 1 + app: { + test: 1 + }, + service: 0 }, gas_cost_total: 0 }, @@ -1792,13 +1803,13 @@ describe('Sharding', async () => { }) it('ain_sendSignedTransactionBatch with is_global = false (explicit)', () => { - const account = ainUtil.createAccount(); + // const account = ainUtil.createAccount(); const client = jayson.client.http(server1 + '/json-rpc'); const txBody = { operation: { type: 'SET_VALUE', value: 'some other value', - ref: `test/test_value/some/path`, + ref: `/apps/test/test_value/some/path`, is_global: false, }, timestamp: Date.now(), @@ -1829,8 +1840,10 @@ describe('Sharding', async () => { code: 0, gas_amount: 1, gas_amount_total: { - app: {}, - service: 1 + app: { + test: 1 + }, + service: 0 }, gas_cost_total: 0 }, @@ -1842,13 +1855,13 @@ describe('Sharding', async () => { }) it('ain_sendSignedTransactionBatch with is_global = true', () => { - const account = ainUtil.createAccount(); + // const account = ainUtil.createAccount(); const client = jayson.client.http(server1 + '/json-rpc'); const txBody = { operation: { type: 'SET_VALUE', value: 'some other value', - ref: `apps/afan/test/test_value/some/path`, + ref: `apps/afan/apps/test/test_value/some/path`, is_global: true, }, timestamp: Date.now(), @@ -1921,28 +1934,30 @@ describe('Sharding', async () => { describe('_updateLatestShardReport', () => { before(async () => { const { shard_owner, shard_reporter, sharding_path } = shardingConfig; - const appStakingRes = parseOrLog(syncRequest('POST', parentServer + '/set_value', { - json: { - ref: `/staking/a_dapp/${shard_owner}/0/stake/${Date.now()}/value`, - value: 1 - } - }).body.toString('utf-8')).result; - assert.deepEqual(CommonUtil.isFailedTx(_.get(appStakingRes, 'result')), false); - if (!(await waitUntilTxFinalized(parentServerList, appStakingRes.tx_hash))) { - console.log(`Failed to check finalization of app staking tx.`) - } - const createAppRes = parseOrLog(syncRequest('POST', parentServer + '/set_value', { - json: { - ref: `/manage_app/a_dapp/create/${Date.now()}`, - value: { - admin: { [shard_owner]: true } - } - } - }).body.toString('utf-8')).result; - assert.deepEqual(CommonUtil.isFailedTx(_.get(createAppRes, 'result')), false); - if (!(await waitUntilTxFinalized(parentServerList, createAppRes.tx_hash))) { - console.log(`Failed to check finalization of create app tx.`) - } + // const appStakingRes = parseOrLog(syncRequest('POST', parentServer + '/set_value', { + // json: { + // ref: `/staking/a_dapp/${shard_owner}/0/stake/${Date.now()}/value`, + // value: 1 + // } + // }).body.toString('utf-8')).result; + // assert.deepEqual(CommonUtil.isFailedTx(_.get(appStakingRes, 'result')), false); + // if (!(await waitUntilTxFinalized(parentServerList, appStakingRes.tx_hash))) { + // console.log(`Failed to check finalization of app staking tx.`) + // } + // const createAppRes = parseOrLog(syncRequest('POST', parentServer + '/set_value', { + // json: { + // ref: `/manage_app/a_dapp/create/${Date.now()}`, + // value: { + // admin: { [shard_owner]: true } + // } + // } + // }).body.toString('utf-8')).result; + // assert.deepEqual(CommonUtil.isFailedTx(_.get(createAppRes, 'result')), false); + // if (!(await waitUntilTxFinalized(parentServerList, createAppRes.tx_hash))) { + // console.log(`Failed to check finalization of create app tx.`) + // } + await setUpApp('a_dapp', parentServerList, { admin: { [shard_owner]: true } }); + const res = parseOrLog(syncRequest('POST', parentServer + '/set', { json: { op_list: [ diff --git a/unittest/data/rules_for_testing.json b/unittest/data/rules_for_testing.json index 7af9c6774..f4b066a2d 100644 --- a/unittest/data/rules_for_testing.json +++ b/unittest/data/rules_for_testing.json @@ -6,21 +6,6 @@ "comcom": { ".write": true }, - "consensus": { - "number": { - "$number": { - ".write": "newData === null && !!getValue('/consensus/number/' + (Number($number) + 1000))", - "propose": { - ".write": "newData !== null && util.isDict(newData) && newData.proposer === auth.addr && Number($number) === newData.number && (newData.number === 1 || getValue('/consensus/number/' + (newData.number - 1) + '/propose/validators/' + auth.addr) > 0)" - }, - "vote": { - "$user_addr": { - ".write": "auth.addr === $user_addr && util.isDict(newData) && util.isString(newData.block_hash) && util.isNumber(newData.stake) && newData.stake > 0 && getValue('/service_accounts/staking/consensus/' + $user_addr + '|0/value') >= newData.stake" - } - } - } - } - }, "billing_keys": { "update_billing": { ".write": true @@ -42,7 +27,7 @@ ".write": "data !== null" }, "balance_info": { - ".write": "getValue('test/billing_keys/update_billing/' + $uid) !== null" + ".write": "getValue('/apps/test/billing_keys/update_billing/' + $uid) !== null" }, "next_counter": { ".write": "typeof newData === 'number' && newData === data + 1" diff --git a/unittest/db.test.js b/unittest/db.test.js index 7e6602df3..eca753cee 100644 --- a/unittest/db.test.js +++ b/unittest/db.test.js @@ -153,7 +153,7 @@ describe("DB operations", () => { } } }; - result = node.db.setValue("test", dbValues); + result = node.db.setValue("/apps/test", dbValues); assert.deepEqual(result.code, 0); dbFuncs = { @@ -192,7 +192,7 @@ describe("DB operations", () => { }, } }; - result = node.db.setFunction("test/test_function", dbFuncs); + result = node.db.setFunction("/apps/test/test_function", dbFuncs); assert.deepEqual(result.code, 0); dbRules = { @@ -210,7 +210,7 @@ describe("DB operations", () => { } } }; - result = node.db.setRule("test/test_rule", dbRules); + result = node.db.setRule("/apps/test/test_rule", dbRules); assert.deepEqual(result.code, 0); dbOwners = { @@ -255,7 +255,7 @@ describe("DB operations", () => { } } }; - result = node.db.setOwner("test/test_owner", dbOwners); + result = node.db.setOwner("/apps/test/test_owner", dbOwners); assert.deepEqual(result.code, 0); }); @@ -266,11 +266,11 @@ describe("DB operations", () => { describe("Read operations", () => { describe("getValue()", () => { it("when retrieving high value near top of database", () => { - assert.deepEqual(node.db.getValue("test"), dbValues) + assert.deepEqual(node.db.getValue("/apps/test"), dbValues) }) it('when retrieving value near top of database with is_shallow', () => { - assert.deepEqual(node.db.getValue('test', true, false), { + assert.deepEqual(node.db.getValue('/apps/test', true, false), { 'ai': true, 'increment': true, 'decrement': true, @@ -280,30 +280,30 @@ describe("DB operations", () => { }); it("when retrieving shallow nested value", () => { - assert.deepEqual(node.db.getValue("test/ai/comcom"), dbValues["ai"]["comcom"]) + assert.deepEqual(node.db.getValue("/apps/test/ai/comcom"), dbValues["ai"]["comcom"]) }) it("when retrieving deeply nested value", () => { - assert.deepEqual(node.db.getValue("test/nested/far/down"), dbValues["nested"]["far"]["down"]) + assert.deepEqual(node.db.getValue("/apps/test/nested/far/down"), dbValues["nested"]["far"]["down"]) }) it("by failing when value is not present", () => { - expect(node.db.getValue("test/nested/far/down/to/nowhere")).to.equal(null) + expect(node.db.getValue("/apps/test/nested/far/down/to/nowhere")).to.equal(null) }) it("by failing when value is not present with is_shallow", () => { - expect(node.db.getValue("test/nested/far/down/to/nowhere", true, false)).to.equal(null) + expect(node.db.getValue("/apps/test/nested/far/down/to/nowhere", true, false)).to.equal(null) }) }) describe("getFunction()", () => { it("when retrieving non-existing function config", () => { - expect(node.db.getFunction("/test/test_function/other/function/path")).to.equal(null); - expect(node.db.getFunction("/test/test_function/some/other_path")).to.equal(null); + expect(node.db.getFunction("/apps/test/test_function/other/function/path")).to.equal(null); + expect(node.db.getFunction("/apps/test/test_function/some/other_path")).to.equal(null); }) it("when retrieving existing function config", () => { - assert.deepEqual(node.db.getFunction("/test/test_function/some/path"), { + assert.deepEqual(node.db.getFunction("/apps/test/test_function/some/path"), { ".function": { "fid": { "event_listener": "https://events.ainetwork.ai/trigger", @@ -328,7 +328,7 @@ describe("DB operations", () => { }) it("when retrieving existing function config with is_shallow", () => { - assert.deepEqual(node.db.getFunction('test/test_function', true, false), { + assert.deepEqual(node.db.getFunction('/apps/test/test_function', true, false), { some: true, }); }) @@ -341,7 +341,7 @@ describe("DB operations", () => { }) it("when retrieving existing rule config", () => { - assert.deepEqual(node.db.getRule("/test/test_rule/some/path"), { + assert.deepEqual(node.db.getRule("/apps/test/test_rule/some/path"), { ".write": "auth.addr === 'abcd'", "deeper": { "path": { @@ -352,7 +352,7 @@ describe("DB operations", () => { }) it('when retrieving existing rule config with is_shallow', () => { - assert.deepEqual(node.db.getRule('test/test_rule', true, false), { + assert.deepEqual(node.db.getRule('/apps/test/test_rule', true, false), { some: true, }); }); @@ -360,11 +360,11 @@ describe("DB operations", () => { describe("getOwner()", () => { it("when retrieving non-existing owner config", () => { - expect(node.db.getOwner("/test/test_owner/other/owner/path")).to.equal(null) + expect(node.db.getOwner("/apps/test/test_owner/other/owner/path")).to.equal(null) }) it("when retrieving existing owner config", () => { - assert.deepEqual(node.db.getOwner("/test/test_owner/some/path"), { + assert.deepEqual(node.db.getOwner("/apps/test/test_owner/some/path"), { ".owner": { "owners": { "*": { @@ -405,7 +405,7 @@ describe("DB operations", () => { }) it("when retrieving existing owner config with is_shallow", () => { - assert.deepEqual(node.db.getOwner("test/test_owner", true, false), { + assert.deepEqual(node.db.getOwner("/apps/test/test_owner", true, false), { some: true, }) }) @@ -413,10 +413,10 @@ describe("DB operations", () => { describe("matchFunction()", () => { it("when matching existing variable path function", () => { - assert.deepEqual(node.db.matchFunction("/test/test_function/some/var_path"), { + assert.deepEqual(node.db.matchFunction("/apps/test/test_function/some/var_path"), { "matched_path": { - "target_path": "/test/test_function/some/$var_path", - "ref_path": "/test/test_function/some/var_path", + "target_path": "/apps/test/test_function/some/$var_path", + "ref_path": "/apps/test/test_function/some/var_path", "path_vars": { "$var_path": "var_path" }, @@ -430,17 +430,17 @@ describe("DB operations", () => { "service_name": "https://ainetwork.ai", }, }, - "path": "/test/test_function/some/$var_path" + "path": "/apps/test/test_function/some/$var_path" }, "subtree_configs": [] }); }) it("when matching existing non-variable path function", () => { - assert.deepEqual(node.db.matchFunction("/test/test_function/some/path"), { + assert.deepEqual(node.db.matchFunction("/apps/test/test_function/some/path"), { "matched_path": { - "target_path": "/test/test_function/some/path", - "ref_path": "/test/test_function/some/path", + "target_path": "/apps/test/test_function/some/path", + "ref_path": "/apps/test/test_function/some/path", "path_vars": {}, }, "matched_config": { @@ -452,7 +452,7 @@ describe("DB operations", () => { "service_name": "https://ainetwork.ai" } }, - "path": "/test/test_function/some/path" + "path": "/apps/test/test_function/some/path" }, "subtree_configs": [ { @@ -468,10 +468,10 @@ describe("DB operations", () => { } ] }); - assert.deepEqual(node.db.matchFunction("/test/test_function/some/path/deeper/path"), { + assert.deepEqual(node.db.matchFunction("/apps/test/test_function/some/path/deeper/path"), { "matched_path": { - "target_path": "/test/test_function/some/path/deeper/path", - "ref_path": "/test/test_function/some/path/deeper/path", + "target_path": "/apps/test/test_function/some/path/deeper/path", + "ref_path": "/apps/test/test_function/some/path/deeper/path", "path_vars": {}, }, "matched_config": { @@ -483,22 +483,22 @@ describe("DB operations", () => { "service_name": "https://ainetwork.ai" } }, - "path": "/test/test_function/some/path/deeper/path" + "path": "/apps/test/test_function/some/path/deeper/path" }, "subtree_configs": [] }); }) it("when NOT matching existing closest non-variable path function", () => { - assert.deepEqual(node.db.matchFunction("/test/test_function/some/path/deeper"), { + assert.deepEqual(node.db.matchFunction("/apps/test/test_function/some/path/deeper"), { "matched_path": { - "target_path": "/test/test_function/some/path/deeper", - "ref_path": "/test/test_function/some/path/deeper", + "target_path": "/apps/test/test_function/some/path/deeper", + "ref_path": "/apps/test/test_function/some/path/deeper", "path_vars": {}, }, "matched_config": { "config": null, - "path": "/test/test_function/some/path/deeper" + "path": "/apps/test/test_function/some/path/deeper" }, "subtree_configs": [ { @@ -519,32 +519,32 @@ describe("DB operations", () => { describe("matchRule()", () => { it("when matching existing variable path rule", () => { - assert.deepEqual(node.db.matchRule("/test/test_rule/some/var_path"), { + assert.deepEqual(node.db.matchRule("/apps/test/test_rule/some/var_path"), { "matched_path": { - "target_path": "/test/test_rule/some/$var_path", - "ref_path": "/test/test_rule/some/var_path", + "target_path": "/apps/test/test_rule/some/$var_path", + "ref_path": "/apps/test/test_rule/some/var_path", "path_vars": { "$var_path": "var_path" }, }, "matched_config": { "config": "auth.addr !== 'abcd'", - "path": "/test/test_rule/some/$var_path" + "path": "/apps/test/test_rule/some/$var_path" }, "subtree_configs": [] }); }) it("when matching existing non-variable path rule", () => { - assert.deepEqual(node.db.matchRule("/test/test_rule/some/path"), { + assert.deepEqual(node.db.matchRule("/apps/test/test_rule/some/path"), { "matched_path": { - "target_path": "/test/test_rule/some/path", - "ref_path": "/test/test_rule/some/path", + "target_path": "/apps/test/test_rule/some/path", + "ref_path": "/apps/test/test_rule/some/path", "path_vars": {}, }, "matched_config": { "config": "auth.addr === 'abcd'", - "path": "/test/test_rule/some/path" + "path": "/apps/test/test_rule/some/path" }, "subtree_configs": [ { @@ -553,30 +553,30 @@ describe("DB operations", () => { } ] }); - assert.deepEqual(node.db.matchRule("/test/test_rule/some/path/deeper/path"), { + assert.deepEqual(node.db.matchRule("/apps/test/test_rule/some/path/deeper/path"), { "matched_path": { - "target_path": "/test/test_rule/some/path/deeper/path", - "ref_path": "/test/test_rule/some/path/deeper/path", + "target_path": "/apps/test/test_rule/some/path/deeper/path", + "ref_path": "/apps/test/test_rule/some/path/deeper/path", "path_vars": {}, }, "matched_config": { "config": "auth.addr === 'ijkl'", - "path": "/test/test_rule/some/path/deeper/path" + "path": "/apps/test/test_rule/some/path/deeper/path" }, "subtree_configs": [] }); }) it("when matching existing closest non-variable path rule", () => { - assert.deepEqual(node.db.matchRule("/test/test_rule/some/path/deeper"), { + assert.deepEqual(node.db.matchRule("/apps/test/test_rule/some/path/deeper"), { "matched_path": { - "target_path": "/test/test_rule/some/path/deeper", - "ref_path": "/test/test_rule/some/path/deeper", + "target_path": "/apps/test/test_rule/some/path/deeper", + "ref_path": "/apps/test/test_rule/some/path/deeper", "path_vars": {}, }, "matched_config": { "config": "auth.addr === 'abcd'", - "path": "/test/test_rule/some/path" + "path": "/apps/test/test_rule/some/path" }, "subtree_configs": [ { @@ -590,9 +590,9 @@ describe("DB operations", () => { describe("matchOwner()", () => { it("when matching existing owner with matching address", () => { - assert.deepEqual(node.db.matchOwner("/test/test_owner/some/path", 'write_owner', 'abcd'), { + assert.deepEqual(node.db.matchOwner("/apps/test/test_owner/some/path", 'write_owner', 'abcd'), { "matched_path": { - "target_path": "/test/test_owner/some/path" + "target_path": "/apps/test/test_owner/some/path" }, "matched_config": { "config": { @@ -611,12 +611,12 @@ describe("DB operations", () => { } } }, - "path": "/test/test_owner/some/path" + "path": "/apps/test/test_owner/some/path" } }); - assert.deepEqual(node.db.matchOwner("/test/test_owner/some/path/deeper/path", 'write_owner', 'ijkl'), { + assert.deepEqual(node.db.matchOwner("/apps/test/test_owner/some/path/deeper/path", 'write_owner', 'ijkl'), { "matched_path": { - "target_path": "/test/test_owner/some/path/deeper/path" + "target_path": "/apps/test/test_owner/some/path/deeper/path" }, "matched_config": { "config": { @@ -635,15 +635,15 @@ describe("DB operations", () => { } } }, - "path": "/test/test_owner/some/path/deeper/path" + "path": "/apps/test/test_owner/some/path/deeper/path" } }); }) it("when matching existing owner without matching address", () => { - assert.deepEqual(node.db.matchOwner("/test/test_owner/some/path", 'write_owner', 'other'), { + assert.deepEqual(node.db.matchOwner("/apps/test/test_owner/some/path", 'write_owner', 'other'), { "matched_path": { - "target_path": "/test/test_owner/some/path" + "target_path": "/apps/test/test_owner/some/path" }, "matched_config": { "config": { @@ -662,12 +662,12 @@ describe("DB operations", () => { } } }, - "path": "/test/test_owner/some/path" + "path": "/apps/test/test_owner/some/path" } }); - assert.deepEqual(node.db.matchOwner("/test/test_owner/some/path/deeper/path", 'write_owner', 'other'), { + assert.deepEqual(node.db.matchOwner("/apps/test/test_owner/some/path/deeper/path", 'write_owner', 'other'), { "matched_path": { - "target_path": "/test/test_owner/some/path/deeper/path" + "target_path": "/apps/test/test_owner/some/path/deeper/path" }, "matched_config": { "config": { @@ -686,15 +686,15 @@ describe("DB operations", () => { } } }, - "path": "/test/test_owner/some/path/deeper/path" + "path": "/apps/test/test_owner/some/path/deeper/path" } }); }) it("when matching closest owner", () => { - assert.deepEqual(node.db.matchOwner("/test/test_owner/some/path/deeper", 'write_owner', 'abcd'), { + assert.deepEqual(node.db.matchOwner("/apps/test/test_owner/some/path/deeper", 'write_owner', 'abcd'), { "matched_path": { - "target_path": "/test/test_owner/some/path/deeper" + "target_path": "/apps/test/test_owner/some/path/deeper" }, "matched_config": { "config": { @@ -713,7 +713,7 @@ describe("DB operations", () => { } } }, - "path": "/test/test_owner/some/path" + "path": "/apps/test/test_owner/some/path" } }); }) @@ -722,32 +722,32 @@ describe("DB operations", () => { describe("evalRule()", () => { it("when evaluating existing variable path rule", () => { expect(node.db.evalRule( - "/test/test_rule/some/var_path", 'value', { addr: 'abcd' }, Date.now())) + "/apps/test/test_rule/some/var_path", 'value', { addr: 'abcd' }, Date.now())) .to.equal(false); expect(node.db.evalRule( - "/test/test_rule/some/var_path", 'value', { addr: 'other' }, Date.now())) + "/apps/test/test_rule/some/var_path", 'value', { addr: 'other' }, Date.now())) .to.equal(true); }) it("when evaluating existing non-variable path rule", () => { - expect(node.db.evalRule("/test/test_rule/some/path", 'value', { addr: 'abcd' }, Date.now())) + expect(node.db.evalRule("/apps/test/test_rule/some/path", 'value', { addr: 'abcd' }, Date.now())) .to.equal(true); - expect(node.db.evalRule("/test/test_rule/some/path", 'value', { addr: 'other' }, Date.now())) + expect(node.db.evalRule("/apps/test/test_rule/some/path", 'value', { addr: 'other' }, Date.now())) .to.equal(false); expect(node.db.evalRule( - "/test/test_rule/some/path/deeper/path", 'value', { addr: 'ijkl' }, Date.now())) + "/apps/test/test_rule/some/path/deeper/path", 'value', { addr: 'ijkl' }, Date.now())) .to.equal(true); expect(node.db.evalRule( - "/test/test_rule/some/path/deeper/path", 'value', { addr: 'other' }, Date.now())) + "/apps/test/test_rule/some/path/deeper/path", 'value', { addr: 'other' }, Date.now())) .to.equal(false); }) it("when evaluating existing closest rule", () => { expect(node.db.evalRule( - "/test/test_rule/some/path/deeper", 'value', { addr: 'abcd' }, Date.now())) + "/apps/test/test_rule/some/path/deeper", 'value', { addr: 'abcd' }, Date.now())) .to.equal(true); expect(node.db.evalRule( - "/test/test_rule/some/path/deeper", 'value', { addr: 'other' }, Date.now())) + "/apps/test/test_rule/some/path/deeper", 'value', { addr: 'other' }, Date.now())) .to.equal(false); }) }) @@ -755,48 +755,48 @@ describe("DB operations", () => { describe("evalOwner()", () => { it("when evaluating existing owner with matching address", () => { expect(node.db.evalOwner( - "/test/test_owner/some/path", 'write_owner', + "/apps/test/test_owner/some/path", 'write_owner', { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' })) .to.equal(true); - expect(node.db.evalOwner("/test/test_owner/some/path", 'write_rule', { addr: '' })) + expect(node.db.evalOwner("/apps/test/test_owner/some/path", 'write_rule', { addr: '' })) .to.equal(false); expect(node.db.evalOwner( - "/test/test_owner/some/path/deeper/path", 'write_owner', + "/apps/test/test_owner/some/path/deeper/path", 'write_owner', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(true); expect(node.db.evalOwner( - "/test/test_owner/some/path/deeper/path", 'write_rule', + "/apps/test/test_owner/some/path/deeper/path", 'write_rule', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(false); }) it("when evaluating existing owner without matching address", () => { - expect(node.db.evalOwner("/test/test_owner/some/path", 'write_owner', { addr: 'other' })) + expect(node.db.evalOwner("/apps/test/test_owner/some/path", 'write_owner', { addr: 'other' })) .to.equal(false); - expect(node.db.evalOwner("/test/test_owner/some/path", 'write_rule', { addr: 'other' })) + expect(node.db.evalOwner("/apps/test/test_owner/some/path", 'write_rule', { addr: 'other' })) .to.equal(true); expect(node.db.evalOwner( - "/test/test_owner/some/path/deeper/path", 'write_owner', { addr: 'other' })) + "/apps/test/test_owner/some/path/deeper/path", 'write_owner', { addr: 'other' })) .to.equal(false); expect(node.db.evalOwner( - "/test/test_owner/some/path/deeper/path", 'write_rule', { addr: 'other' })) + "/apps/test/test_owner/some/path/deeper/path", 'write_rule', { addr: 'other' })) .to.equal(true); }) it("when evaluating closest owner", () => { expect(node.db.evalOwner( - "/test/test_owner/some/path/deeper", 'write_owner', + "/apps/test/test_owner/some/path/deeper", 'write_owner', { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' })) .to.equal(true); expect(node.db.evalOwner( - "/test/test_owner/some/path/deeper", 'write_rule', + "/apps/test/test_owner/some/path/deeper", 'write_rule', { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' })) .to.equal(false); expect(node.db.evalOwner( - "/test/test_owner/some/path/deeper", 'write_owner', { addr: 'other' })) + "/apps/test/test_owner/some/path/deeper", 'write_owner', { addr: 'other' })) .to.equal(false); expect(node.db.evalOwner( - "/test/test_owner/some/path/deeper", 'write_rule', { addr: 'other' })) + "/apps/test/test_owner/some/path/deeper", 'write_rule', { addr: 'other' })) .to.equal(true); }) }) @@ -806,42 +806,42 @@ describe("DB operations", () => { assert.deepEqual(node.db.get([ { // Default type: GET_VALUE - ref: "/value/other/path", + ref: "/apps/value/other/path", }, { type: "GET_RULE", - ref: "/rule/other/path", + ref: "/apps/rule/other/path", }, { type: "GET_FUNCTION", - ref: "/function/other/path", + ref: "/apps/function/other/path", }, { type: "GET_OWNER", - ref: "/owner/other/path", + ref: "/apps/owner/other/path", }, { type: "MATCH_FUNCTION", - ref: "/test/test_function/some/path/deeper", + ref: "/apps/test/test_function/some/path/deeper", }, { type: "MATCH_RULE", - ref: "/test/test_rule/some/path/deeper", + ref: "/apps/test/test_rule/some/path/deeper", }, { type: "MATCH_OWNER", - ref: "/test/test_owner/some/path/deeper", + ref: "/apps/test/test_owner/some/path/deeper", }, { type: "EVAL_RULE", - ref: "/rule/other/path", + ref: "/apps/rule/other/path", value: "value", address: "abcd", timestamp: Date.now(), }, { type: "EVAL_OWNER", - ref: "/owner/other/path", + ref: "/apps/owner/other/path", permission: "write_rule", address: "abcd", timestamp: Date.now(), @@ -853,13 +853,13 @@ describe("DB operations", () => { null, { "matched_path": { - "target_path": "/test/test_function/some/path/deeper", - "ref_path": "/test/test_function/some/path/deeper", + "target_path": "/apps/test/test_function/some/path/deeper", + "ref_path": "/apps/test/test_function/some/path/deeper", "path_vars": {}, }, "matched_config": { "config": null, - "path": "/test/test_function/some/path/deeper" + "path": "/apps/test/test_function/some/path/deeper" }, "subtree_configs": [ { @@ -877,13 +877,13 @@ describe("DB operations", () => { }, { "matched_path": { - "target_path": "/test/test_rule/some/path/deeper", - "ref_path": "/test/test_rule/some/path/deeper", + "target_path": "/apps/test/test_rule/some/path/deeper", + "ref_path": "/apps/test/test_rule/some/path/deeper", "path_vars": {}, }, "matched_config": { "config": "auth.addr === 'abcd'", - "path": "/test/test_rule/some/path" + "path": "/apps/test/test_rule/some/path" }, "subtree_configs": [ { @@ -894,7 +894,7 @@ describe("DB operations", () => { }, { "matched_path": { - "target_path": "/test/test_owner/some/path/deeper" + "target_path": "/apps/test/test_owner/some/path/deeper" }, "matched_config": { "config": { @@ -913,7 +913,7 @@ describe("DB operations", () => { } } }, - "path": "/test/test_owner/some/path" + "path": "/apps/test/test_owner/some/path" } }, false, @@ -925,42 +925,42 @@ describe("DB operations", () => { assert.deepEqual(node.db.get([ { // Default type: GET_VALUE - ref: "/test/nested/far/down", + ref: "/apps/test/nested/far/down", }, { type: "GET_RULE", - ref: "/test/test_rule/some/path", + ref: "/apps/test/test_rule/some/path", }, { type: "GET_FUNCTION", - ref: "/test/test_function/some/path", + ref: "/apps/test/test_function/some/path", }, { type: "GET_OWNER", - ref: "/test/test_owner/some/path", + ref: "/apps/test/test_owner/some/path", }, { type: "MATCH_FUNCTION", - ref: "/test/test_function/some/path", + ref: "/apps/test/test_function/some/path", }, { type: "MATCH_RULE", - ref: "/test/test_rule/some/path", + ref: "/apps/test/test_rule/some/path", }, { type: "MATCH_OWNER", - ref: "/test/test_owner/some/path", + ref: "/apps/test/test_owner/some/path", }, { type: "EVAL_RULE", - ref: "/test/test_rule/some/path", + ref: "/apps/test/test_rule/some/path", value: "value", address: "abcd", timestamp: Date.now(), }, { type: "EVAL_OWNER", - ref: "/test/test_owner/some/path", + ref: "/apps/test/test_owner/some/path", permission: "write_owner", address: "0x09A0d53FDf1c36A131938eb379b98910e55EEfe1", timestamp: Date.now(), @@ -1037,8 +1037,8 @@ describe("DB operations", () => { }, { "matched_path": { - "target_path": "/test/test_function/some/path", - "ref_path": "/test/test_function/some/path", + "target_path": "/apps/test/test_function/some/path", + "ref_path": "/apps/test/test_function/some/path", "path_vars": {}, }, "matched_config": { @@ -1050,7 +1050,7 @@ describe("DB operations", () => { "service_name": "https://ainetwork.ai" } }, - "path": "/test/test_function/some/path" + "path": "/apps/test/test_function/some/path" }, "subtree_configs": [ { @@ -1068,13 +1068,13 @@ describe("DB operations", () => { }, { "matched_path": { - "target_path": "/test/test_rule/some/path", - "ref_path": "/test/test_rule/some/path", + "target_path": "/apps/test/test_rule/some/path", + "ref_path": "/apps/test/test_rule/some/path", "path_vars": {}, }, "matched_config": { "config": "auth.addr === 'abcd'", - "path": "/test/test_rule/some/path" + "path": "/apps/test/test_rule/some/path" }, "subtree_configs": [ { @@ -1085,7 +1085,7 @@ describe("DB operations", () => { }, { "matched_path": { - "target_path": "/test/test_owner/some/path" + "target_path": "/apps/test/test_owner/some/path" }, "matched_config": { "config": { @@ -1104,7 +1104,7 @@ describe("DB operations", () => { } } }, - "path": "/test/test_owner/some/path" + "path": "/apps/test/test_owner/some/path" } }, true, @@ -1118,204 +1118,204 @@ describe("DB operations", () => { describe("setValue()", () => { it("when overwriting nested value", () => { const newValue = {"new": 12345} - expect(node.db.setValue("test/nested/far/down", newValue).code).to.equal(0) - assert.deepEqual(node.db.getValue("test/nested/far/down"), newValue) + expect(node.db.setValue("/apps/test/nested/far/down", newValue).code).to.equal(0) + assert.deepEqual(node.db.getValue("/apps/test/nested/far/down"), newValue) }) it("when creating new path in database", () => { const newValue = 12345 - expect(node.db.setValue("test/new/unchartered/nested/path", newValue).code).to.equal(0) - expect(node.db.getValue("test/new/unchartered/nested/path")).to.equal(newValue) + expect(node.db.setValue("/apps/test/new/unchartered/nested/path", newValue).code).to.equal(0) + expect(node.db.getValue("/apps/test/new/unchartered/nested/path")).to.equal(newValue) }) it("when writing invalid object", () => { - assert.deepEqual(node.db.setValue("test/unchartered/nested/path2", {array: []}), { + assert.deepEqual(node.db.setValue("/apps/test/unchartered/nested/path2", {array: []}), { "code": 101, "error_message": "Invalid object for states: /array", "gas_amount": 0 }); - expect(node.db.getValue("test/unchartered/nested/path2")).to.equal(null) + expect(node.db.getValue("/apps/test/unchartered/nested/path2")).to.equal(null) - assert.deepEqual(node.db.setValue("test/unchartered/nested/path2", {'.': 'x'}), { + assert.deepEqual(node.db.setValue("/apps/test/unchartered/nested/path2", {'.': 'x'}), { "code": 101, "error_message": "Invalid object for states: /.", "gas_amount": 0 }); - expect(node.db.getValue("test/unchartered/nested/path2")).to.equal(null) + expect(node.db.getValue("/apps/test/unchartered/nested/path2")).to.equal(null) - assert.deepEqual(node.db.setValue("test/unchartered/nested/path2", {'$': 'x'}), { + assert.deepEqual(node.db.setValue("/apps/test/unchartered/nested/path2", {'$': 'x'}), { "code": 101, "error_message": "Invalid object for states: /$", "gas_amount": 0 }); - expect(node.db.getValue("test/unchartered/nested/path2")).to.equal(null) + expect(node.db.getValue("/apps/test/unchartered/nested/path2")).to.equal(null) - assert.deepEqual(node.db.setValue("test/unchartered/nested/path2", {'*a': 'x'}), { + assert.deepEqual(node.db.setValue("/apps/test/unchartered/nested/path2", {'*a': 'x'}), { "code": 101, "error_message": "Invalid object for states: /*a", "gas_amount": 0 }); - expect(node.db.getValue("test/unchartered/nested/path2")).to.equal(null) + expect(node.db.getValue("/apps/test/unchartered/nested/path2")).to.equal(null) - assert.deepEqual(node.db.setValue("test/unchartered/nested/path2", {'a*': 'x'}), { + assert.deepEqual(node.db.setValue("/apps/test/unchartered/nested/path2", {'a*': 'x'}), { "code": 101, "error_message": "Invalid object for states: /a*", "gas_amount": 0 }); - expect(node.db.getValue("test/unchartered/nested/path2")).to.equal(null) + expect(node.db.getValue("/apps/test/unchartered/nested/path2")).to.equal(null) }) it("when writing with invalid path", () => { - assert.deepEqual(node.db.setValue("test/new/unchartered/nested/.", 12345), { + assert.deepEqual(node.db.setValue("/apps/test/new/unchartered/nested/.", 12345), { "code": 102, - "error_message": "Invalid path: /test/new/unchartered/nested/.", + "error_message": "Invalid path: /apps/test/new/unchartered/nested/.", "gas_amount": 0 }); - assert.deepEqual(node.db.setValue("test/new/unchartered/nested/$", 12345), { + assert.deepEqual(node.db.setValue("/apps/test/new/unchartered/nested/$", 12345), { "code": 102, - "error_message": "Invalid path: /test/new/unchartered/nested/$", + "error_message": "Invalid path: /apps/test/new/unchartered/nested/$", "gas_amount": 0 }); - assert.deepEqual(node.db.setValue("test/new/unchartered/nested/a*", 12345), { + assert.deepEqual(node.db.setValue("/apps/test/new/unchartered/nested/a*", 12345), { "code": 102, - "error_message": "Invalid path: /test/new/unchartered/nested/a*", + "error_message": "Invalid path: /apps/test/new/unchartered/nested/a*", "gas_amount": 0 }); - assert.deepEqual(node.db.setValue("test/new/unchartered/nested/*a", 12345), { + assert.deepEqual(node.db.setValue("/apps/test/new/unchartered/nested/*a", 12345), { "code": 102, - "error_message": "Invalid path: /test/new/unchartered/nested/*a", + "error_message": "Invalid path: /apps/test/new/unchartered/nested/*a", "gas_amount": 0 }); - assert.deepEqual(node.db.setValue("test/new/unchartered/nested/#", 12345), { + assert.deepEqual(node.db.setValue("/apps/test/new/unchartered/nested/#", 12345), { "code": 102, - "error_message": "Invalid path: /test/new/unchartered/nested/#", + "error_message": "Invalid path: /apps/test/new/unchartered/nested/#", "gas_amount": 0 }); - assert.deepEqual(node.db.setValue("test/new/unchartered/nested/{", 12345), { + assert.deepEqual(node.db.setValue("/apps/test/new/unchartered/nested/{", 12345), { "code": 102, - "error_message": "Invalid path: /test/new/unchartered/nested/{", + "error_message": "Invalid path: /apps/test/new/unchartered/nested/{", "gas_amount": 0 }); - assert.deepEqual(node.db.setValue("test/new/unchartered/nested/}", 12345), { + assert.deepEqual(node.db.setValue("/apps/test/new/unchartered/nested/}", 12345), { "code": 102, - "error_message": "Invalid path: /test/new/unchartered/nested/}", + "error_message": "Invalid path: /apps/test/new/unchartered/nested/}", "gas_amount": 0 }); - assert.deepEqual(node.db.setValue("test/new/unchartered/nested/[", 12345), { + assert.deepEqual(node.db.setValue("/apps/test/new/unchartered/nested/[", 12345), { "code": 102, - "error_message": "Invalid path: /test/new/unchartered/nested/[", + "error_message": "Invalid path: /apps/test/new/unchartered/nested/[", "gas_amount": 0 }); - assert.deepEqual(node.db.setValue("test/new/unchartered/nested/]", 12345), { + assert.deepEqual(node.db.setValue("/apps/test/new/unchartered/nested/]", 12345), { "code": 102, - "error_message": "Invalid path: /test/new/unchartered/nested/]", + "error_message": "Invalid path: /apps/test/new/unchartered/nested/]", "gas_amount": 0 }); - assert.deepEqual(node.db.setValue("test/new/unchartered/nested/\x00", 12345), { + assert.deepEqual(node.db.setValue("/apps/test/new/unchartered/nested/\x00", 12345), { "code": 102, - "error_message": "Invalid path: /test/new/unchartered/nested/\x00", + "error_message": "Invalid path: /apps/test/new/unchartered/nested/\x00", "gas_amount": 0 }); - assert.deepEqual(node.db.setValue("test/new/unchartered/nested/\x1F", 12345), { + assert.deepEqual(node.db.setValue("/apps/test/new/unchartered/nested/\x1F", 12345), { "code": 102, - "error_message": "Invalid path: /test/new/unchartered/nested/\x1F", + "error_message": "Invalid path: /apps/test/new/unchartered/nested/\x1F", "gas_amount": 0 }); - assert.deepEqual(node.db.setValue("test/new/unchartered/nested/\x7F", 12345), { + assert.deepEqual(node.db.setValue("/apps/test/new/unchartered/nested/\x7F", 12345), { "code": 102, - "error_message": "Invalid path: /test/new/unchartered/nested/\x7F", + "error_message": "Invalid path: /apps/test/new/unchartered/nested/\x7F", "gas_amount": 0 }); }) it("when writing with non-writable path with sharding", () => { - assert.deepEqual(node.db.setValue("test/shards/enabled_shard", 20), { + assert.deepEqual(node.db.setValue("/apps/test/shards/enabled_shard", 20), { "code": 104, - "error_message": "Non-writable path with shard config: /values/test/shards/enabled_shard", + "error_message": "Non-writable path with shard config: /values/apps/test/shards/enabled_shard", "gas_amount": 0 }); - assert.deepEqual(node.db.setValue("test/shards/enabled_shard/path", 20), { + assert.deepEqual(node.db.setValue("/apps/test/shards/enabled_shard/path", 20), { "code": 104, - "error_message": "Non-writable path with shard config: /values/test/shards/enabled_shard", + "error_message": "Non-writable path with shard config: /values/apps/test/shards/enabled_shard", "gas_amount": 0 }); }) it("when writing with writable path with sharding", () => { - expect(node.db.setValue("test/shards/disabled_shard", 20).code).to.equal(0); - expect(node.db.getValue("test/shards/disabled_shard")).to.equal(20) - expect(node.db.setValue("test/shards/disabled_shard/path", 20).code).to.equal(0); - expect(node.db.getValue("test/shards/disabled_shard/path")).to.equal(20) + expect(node.db.setValue("/apps/test/shards/disabled_shard", 20).code).to.equal(0); + expect(node.db.getValue("/apps/test/shards/disabled_shard")).to.equal(20) + expect(node.db.setValue("/apps/test/shards/disabled_shard/path", 20).code).to.equal(0); + expect(node.db.getValue("/apps/test/shards/disabled_shard/path")).to.equal(20) }) }) describe("incValue()", () => { it("when increasing value successfully", () => { - expect(node.db.incValue("test/increment/value", 10).code).to.equal(0) - expect(node.db.getValue("test/increment/value")).to.equal(30) + expect(node.db.incValue("/apps/test/increment/value", 10).code).to.equal(0) + expect(node.db.getValue("/apps/test/increment/value")).to.equal(30) }) it("returning error code and leaving value unchanged if delta is not numerical", () => { - expect(node.db.incValue("test/increment/value", '10').code).to.equal(201) - expect(node.db.getValue("test/increment/value")).to.equal(20) + expect(node.db.incValue("/apps/test/increment/value", '10').code).to.equal(201) + expect(node.db.getValue("/apps/test/increment/value")).to.equal(20) }) it("returning error code and leaving value unchanged if path is not numerical", () => { - expect(node.db.incValue("test/ai/foo", 10).code).to.equal(201) - expect(node.db.getValue("test/ai/foo")).to.equal("bar") + expect(node.db.incValue("/apps/test/ai/foo", 10).code).to.equal(201) + expect(node.db.getValue("/apps/test/ai/foo")).to.equal("bar") }) it("creating and increasing given path from 0 if not currently in database", () => { - node.db.incValue("test/completely/new/path/test", 100); - expect(node.db.getValue("test/completely/new/path/test")).to.equal(100) + node.db.incValue("/apps/test/completely/new/path/test", 100); + expect(node.db.getValue("/apps/test/completely/new/path/test")).to.equal(100) }) it("returning error code with non-writable path with sharding", () => { - assert.deepEqual(node.db.incValue("test/shards/enabled_shard/path", 5), { + assert.deepEqual(node.db.incValue("/apps/test/shards/enabled_shard/path", 5), { "code": 104, - "error_message": "Non-writable path with shard config: /values/test/shards/enabled_shard", + "error_message": "Non-writable path with shard config: /values/apps/test/shards/enabled_shard", "gas_amount": 0 }); }) it("when increasing with writable path with sharding", () => { - expect(node.db.incValue("test/shards/disabled_shard/path", 5).code).to.equal(0); - expect(node.db.getValue("test/shards/disabled_shard/path")).to.equal(15) + expect(node.db.incValue("/apps/test/shards/disabled_shard/path", 5).code).to.equal(0); + expect(node.db.getValue("/apps/test/shards/disabled_shard/path")).to.equal(15) }) }) describe("decValue()", () => { it("when decreasing value successfully", () => { - expect(node.db.decValue("test/decrement/value", 10).code).to.equal(0) - expect(node.db.getValue("test/decrement/value")).to.equal(10) + expect(node.db.decValue("/apps/test/decrement/value", 10).code).to.equal(0) + expect(node.db.getValue("/apps/test/decrement/value")).to.equal(10) }) it("returning error code and leaving value unchanged if delta is not numerical", () => { - expect(node.db.decValue("test/decrement/value", '10').code).to.equal(301) - expect(node.db.getValue("test/decrement/value")).to.equal(20) + expect(node.db.decValue("/apps/test/decrement/value", '10').code).to.equal(301) + expect(node.db.getValue("/apps/test/decrement/value")).to.equal(20) }) it("returning error code and leaving value unchanged if path is not numerical", () => { - expect(node.db.decValue("test/ai/foo", 10).code).to.equal(301) - expect(node.db.getValue("test/ai/foo")).to.equal("bar") + expect(node.db.decValue("/apps/test/ai/foo", 10).code).to.equal(301) + expect(node.db.getValue("/apps/test/ai/foo")).to.equal("bar") }) it("creating and decreasing given path from 0 if not currently in database", () => { - node.db.decValue("test/completely/new/path/test", 100); - expect(node.db.getValue("test/completely/new/path/test")).to.equal(-100) + node.db.decValue("/apps/test/completely/new/path/test", 100); + expect(node.db.getValue("/apps/test/completely/new/path/test")).to.equal(-100) }) it("returning error code with non-writable path with sharding", () => { - assert.deepEqual(node.db.decValue("test/shards/enabled_shard/path", 5), { + assert.deepEqual(node.db.decValue("/apps/test/shards/enabled_shard/path", 5), { "code": 104, - "error_message": "Non-writable path with shard config: /values/test/shards/enabled_shard", + "error_message": "Non-writable path with shard config: /values/apps/test/shards/enabled_shard", "gas_amount": 0 }); }) it("when increasing with writable path with sharding", () => { - expect(node.db.decValue("test/shards/disabled_shard/path", 5).code).to.equal(0); - expect(node.db.getValue("test/shards/disabled_shard/path")).to.equal(5) + expect(node.db.decValue("/apps/test/shards/disabled_shard/path", 5).code).to.equal(0); + expect(node.db.getValue("/apps/test/shards/disabled_shard/path")).to.equal(5) }) }) @@ -1331,9 +1331,9 @@ describe("DB operations", () => { } } }; - expect(node.db.setFunction("/test/test_function/some/path", functionConfig).code) + expect(node.db.setFunction("/apps/test/test_function/some/path", functionConfig).code) .to.equal(0); - assert.deepEqual(node.db.getFunction("/test/test_function/some/path"), { + assert.deepEqual(node.db.getFunction("/apps/test/test_function/some/path"), { ".function": { "fid": { "event_listener": "https://events.ainetwork.ai/trigger2", // modified @@ -1368,42 +1368,42 @@ describe("DB operations", () => { } } }; - expect(node.db.setFunction("/test/test_function/some/$variable/path", functionConfig).code) + expect(node.db.setFunction("/apps/test/test_function/some/$variable/path", functionConfig).code) .to.equal(0); assert.deepEqual( - node.db.getFunction("/test/test_function/some/$variable/path"), functionConfig) + node.db.getFunction("/apps/test/test_function/some/$variable/path"), functionConfig) }) it("when writing invalid object", () => { - assert.deepEqual(node.db.setFunction("/test/test_function/some/path2", {array: []}), { + assert.deepEqual(node.db.setFunction("/apps/test/test_function/some/path2", {array: []}), { "code": 401, "error_message": "Invalid object for states: /array", "gas_amount": 0 }); - expect(node.db.getFunction("test/new2/unchartered/nested/path2")).to.equal(null) + expect(node.db.getFunction("/apps/test/new2/unchartered/nested/path2")).to.equal(null) - assert.deepEqual(node.db.setFunction("/test/test_function/some/path2", {'.': 'x'}), { + assert.deepEqual(node.db.setFunction("/apps/test/test_function/some/path2", {'.': 'x'}), { "code": 401, "error_message": "Invalid object for states: /.", "gas_amount": 0 }); - expect(node.db.getFunction("test/new2/unchartered/nested/path2")).to.equal(null) + expect(node.db.getFunction("/apps/test/new2/unchartered/nested/path2")).to.equal(null) }) it("when writing invalid function tree", () => { - const functionTreeBefore = node.db.getOwner("/test/test_function/some/path"); + const functionTreeBefore = node.db.getOwner("/apps/test/test_function/some/path"); assert.deepEqual(node.db.setFunction( - "/test/test_function/some/path", { ".function": null }), { + "/apps/test/test_function/some/path", { ".function": null }), { "code": 405, "error_message": "Invalid function tree: /.function", "gas_amount": 0 }); - assert.deepEqual(node.db.getOwner("/test/test_function/some/path"), functionTreeBefore); + assert.deepEqual(node.db.getOwner("/apps/test/test_function/some/path"), functionTreeBefore); }) it("when writing with invalid path", () => { assert.deepEqual(node.db.setFunction( - "/test/test_function/some/path/.", { + "/apps/test/test_function/some/path/.", { ".function": { "fid": { "event_listener": "https://events.ainetwork.ai/trigger2", @@ -1414,7 +1414,7 @@ describe("DB operations", () => { } }), { "code": 402, - "error_message": "Invalid path: /test/test_function/some/path/.", + "error_message": "Invalid path: /apps/test/test_function/some/path/.", "gas_amount": 0 }); }) @@ -1423,49 +1423,49 @@ describe("DB operations", () => { describe("setRule()", () => { it("when overwriting existing rule config with simple path", () => { const ruleConfig = {".write": "other rule config"}; - expect(node.db.setRule("/test/test_rule/some/path", ruleConfig).code).to.equal(0); - assert.deepEqual(node.db.getRule("/test/test_rule/some/path"), ruleConfig) + expect(node.db.setRule("/apps/test/test_rule/some/path", ruleConfig).code).to.equal(0); + assert.deepEqual(node.db.getRule("/apps/test/test_rule/some/path"), ruleConfig) }) it("when writing with variable path", () => { const ruleConfig = {".write": "other rule config"}; - expect(node.db.setRule("/test/test_rule/some/$variable/path", ruleConfig).code) + expect(node.db.setRule("/apps/test/test_rule/some/$variable/path", ruleConfig).code) .to.equal(0) - assert.deepEqual(node.db.getRule("/test/test_rule/some/$variable/path"), ruleConfig) + assert.deepEqual(node.db.getRule("/apps/test/test_rule/some/$variable/path"), ruleConfig) }) it("when writing invalid object", () => { - assert.deepEqual(node.db.setRule("/test/test_rule/some/path2", {array: []}), { + assert.deepEqual(node.db.setRule("/apps/test/test_rule/some/path2", {array: []}), { "code": 501, "error_message": "Invalid object for states: /array", "gas_amount": 0 }); - expect(node.db.getRule("/test/test_rule/some/path2")).to.equal(null) + expect(node.db.getRule("/apps/test/test_rule/some/path2")).to.equal(null) - assert.deepEqual(node.db.setRule("/test/test_rule/some/path2", {'.': 'x'}), { + assert.deepEqual(node.db.setRule("/apps/test/test_rule/some/path2", {'.': 'x'}), { "code": 501, "error_message": "Invalid object for states: /.", "gas_amount": 0 }); - expect(node.db.getRule("/test/test_rule/some/path2")).to.equal(null) + expect(node.db.getRule("/apps/test/test_rule/some/path2")).to.equal(null) }) it("when writing invalid rule tree", () => { - const ruleTreeBefore = node.db.getRule("/test/test_rule/some/path"); + const ruleTreeBefore = node.db.getRule("/apps/test/test_rule/some/path"); assert.deepEqual(node.db.setRule( - "/test/test_rule/some/path", { ".write": null }), { + "/apps/test/test_rule/some/path", { ".write": null }), { "code": 504, "error_message": "Invalid rule tree: /.write", "gas_amount": 0 }); - assert.deepEqual(node.db.getRule("/test/test_rule/some/path"), ruleTreeBefore); + assert.deepEqual(node.db.getRule("/apps/test/test_rule/some/path"), ruleTreeBefore); }) it("when writing with invalid path", () => { - assert.deepEqual(node.db.setRule("/test/test_rule/some/path/.", + assert.deepEqual(node.db.setRule("/apps/test/test_rule/some/path/.", { ".write": "some rule config" } ), { "code": 502, - "error_message": "Invalid path: /test/test_rule/some/path/.", + "error_message": "Invalid path: /apps/test/test_rule/some/path/.", "gas_amount": 0 }); }) @@ -1498,42 +1498,42 @@ describe("DB operations", () => { } }; assert.deepEqual(node.db.setOwner( - "/test/test_owner/some/path", ownerTree, + "/apps/test/test_owner/some/path", ownerTree, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }), { "code": 0, "gas_amount": 1 }); - assert.deepEqual(node.db.getOwner("/test/test_owner/some/path"), ownerTree) + assert.deepEqual(node.db.getOwner("/apps/test/test_owner/some/path"), ownerTree) }) it("when writing invalid object", () => { - assert.deepEqual(node.db.setOwner("/test/test_owner/some/path2", {array: []}), { + assert.deepEqual(node.db.setOwner("/apps/test/test_owner/some/path2", {array: []}), { "code": 601, "error_message": "Invalid object for states: /array", "gas_amount": 0 }); - expect(node.db.getOwner("/test/test_owner/some/path2")).to.equal(null) + expect(node.db.getOwner("/apps/test/test_owner/some/path2")).to.equal(null) - assert.deepEqual(node.db.setOwner("/test/test_owner/some/path2", {'.': 'x'}), { + assert.deepEqual(node.db.setOwner("/apps/test/test_owner/some/path2", {'.': 'x'}), { "code": 601, "error_message": "Invalid object for states: /.", "gas_amount": 0 }); - expect(node.db.getOwner("/test/test_owner/some/path2")).to.equal(null) + expect(node.db.getOwner("/apps/test/test_owner/some/path2")).to.equal(null) }) it("when writing invalid owner tree", () => { - const ownerTreeBefore = node.db.getOwner("/test/test_owner/some/path"); - assert.deepEqual(node.db.setOwner("/test/test_owner/some/path", { + const ownerTreeBefore = node.db.getOwner("/apps/test/test_owner/some/path"); + assert.deepEqual(node.db.setOwner("/apps/test/test_owner/some/path", { ".owner": "invalid owners config" }, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }), { "code": 604, "error_message": "Invalid owner tree: /.owner", "gas_amount": 0 }); - assert.deepEqual(node.db.getOwner("/test/test_owner/some/path"), ownerTreeBefore); + assert.deepEqual(node.db.getOwner("/apps/test/test_owner/some/path"), ownerTreeBefore); - assert.deepEqual(node.db.setOwner("/test/test_owner/some/path", { + assert.deepEqual(node.db.setOwner("/apps/test/test_owner/some/path", { ".owner": { "owners": "invalid owners config" } @@ -1542,11 +1542,11 @@ describe("DB operations", () => { "error_message": "Invalid owner tree: /.owner/owners", "gas_amount": 0 }); - assert.deepEqual(node.db.getOwner("/test/test_owner/some/path"), ownerTreeBefore); + assert.deepEqual(node.db.getOwner("/apps/test/test_owner/some/path"), ownerTreeBefore); }) it("when writing with invalid path", () => { - assert.deepEqual(node.db.setOwner("/test/test_owner/some/path/.", { + assert.deepEqual(node.db.setOwner("/apps/test/test_owner/some/path/.", { ".owner": { "owners": { "*": { @@ -1559,7 +1559,7 @@ describe("DB operations", () => { } }, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }), { "code": 602, - "error_message": "Invalid path: /test/test_owner/some/path/.", + "error_message": "Invalid path: /apps/test/test_owner/some/path/.", "gas_amount": 0 }); }) @@ -1569,7 +1569,7 @@ describe("DB operations", () => { it("when successful", () => { assert.deepEqual(node.db.executeSingleSetOperation({ // Default type: SET_VALUE - ref: "test/nested/far/down", + ref: "/apps/test/nested/far/down", value: { "new": 12345 } @@ -1577,25 +1577,25 @@ describe("DB operations", () => { "code": 0, "gas_amount": 1 }); - assert.deepEqual(node.db.getValue("test/nested/far/down"), { "new": 12345 }) + assert.deepEqual(node.db.getValue("/apps/test/nested/far/down"), { "new": 12345 }) }) it("returning error code and leaving value unchanged when it fails", () => { assert.deepEqual(node.db.executeSingleSetOperation({ type: "INC_VALUE", - ref: "test/ai/foo", + ref: "/apps/test/ai/foo", value: 10 }), { "code": 201, "error_message": "Not a number type: bar or 10", "gas_amount": 0 }) - expect(node.db.getValue("test/ai/foo")).to.equal("bar") + expect(node.db.getValue("/apps/test/ai/foo")).to.equal("bar") }) it("when successful with function triggering", () => { - const valuePath = '/test/test_function_triggering/allowed_path/value'; - const functionResultPath = '/test/test_function_triggering/allowed_path/.last_tx/value'; + const valuePath = '/apps/test/test_function_triggering/allowed_path/value'; + const functionResultPath = '/apps/test/test_function_triggering/allowed_path/.last_tx/value'; const value = 'some value'; const timestamp = 1234567890000; @@ -1661,13 +1661,13 @@ describe("DB operations", () => { "_saveLastTx": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "func_results": { "_eraseValue": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "code": 0, "gas_amount": 1, @@ -1694,8 +1694,8 @@ describe("DB operations", () => { }) it("when failed with function triggering", () => { - const valuePath = '/test/test_function_triggering/allowed_path/value'; - const functionResultPath = '/test/test_function_triggering/allowed_path/.last_tx/value'; + const valuePath = '/apps/test/test_function_triggering/allowed_path/value'; + const functionResultPath = '/apps/test/test_function_triggering/allowed_path/.last_tx/value'; const value = 'some value'; const timestamp = 1234567890000; @@ -1761,16 +1761,16 @@ describe("DB operations", () => { "_saveLastTx": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "func_results": { "_eraseValue": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "code": 103, - "error_message": "No .write permission on: /test/test_function_triggering/allowed_path/.last_tx/value", + "error_message": "No .write permission on: /apps/test/test_function_triggering/allowed_path/.last_tx/value", "gas_amount": 0 } } @@ -1800,24 +1800,24 @@ describe("DB operations", () => { assert.deepEqual(node.db.executeMultiSetOperation([ { // Default type: SET_VALUE - ref: "test/nested/far/down", + ref: "/apps/test/nested/far/down", value: { "new": 12345 } }, { type: "INC_VALUE", - ref: "test/increment/value", + ref: "/apps/test/increment/value", value: 10 }, { type: "DEC_VALUE", - ref: "test/decrement/value", + ref: "/apps/test/decrement/value", value: 10 }, { type: "SET_FUNCTION", - ref: "/test/test_function/some/path", + ref: "/apps/test/test_function/some/path", value: { ".function": { "fid": { @@ -1831,14 +1831,14 @@ describe("DB operations", () => { }, { type: "SET_RULE", - ref: "/test/test_rule/some/path", + ref: "/apps/test/test_rule/some/path", value: { ".write": "other rule config" } }, { type: "SET_OWNER", - ref: "/test/test_owner/some/path", + ref: "/apps/test/test_owner/some/path", value: { ".owner": { "owners": { @@ -1892,10 +1892,10 @@ describe("DB operations", () => { } ] }); - assert.deepEqual(node.db.getValue("test/nested/far/down"), { "new": 12345 }) - expect(node.db.getValue("test/increment/value")).to.equal(30) - expect(node.db.getValue("test/decrement/value")).to.equal(10) - assert.deepEqual(node.db.getFunction("/test/test_function/some/path"), { + assert.deepEqual(node.db.getValue("/apps/test/nested/far/down"), { "new": 12345 }) + expect(node.db.getValue("/apps/test/increment/value")).to.equal(30) + expect(node.db.getValue("/apps/test/decrement/value")).to.equal(10) + assert.deepEqual(node.db.getFunction("/apps/test/test_function/some/path"), { ".function": { "fid": { "event_listener": "https://events.ainetwork.ai/trigger2", // modified @@ -1918,9 +1918,9 @@ describe("DB operations", () => { } }); assert.deepEqual( - node.db.getRule("/test/test_rule/some/path"), { ".write": "other rule config" }); + node.db.getRule("/apps/test/test_rule/some/path"), { ".write": "other rule config" }); assert.deepEqual( - node.db.getOwner("/test/test_owner/some/path"), { + node.db.getOwner("/apps/test/test_owner/some/path"), { ".owner": { "owners": { "*": { @@ -1950,19 +1950,19 @@ describe("DB operations", () => { assert.deepEqual(node.db.executeMultiSetOperation([ { type: "SET_VALUE", - ref: "test/nested/far/down", + ref: "/apps/test/nested/far/down", value: { "new": 12345 } }, { type: "INC_VALUE", - ref: "test/ai/foo", + ref: "/apps/test/ai/foo", value: 10 }, { type: "DEC_VALUE", - ref: "test/decrement/value", + ref: "/apps/test/decrement/value", value: 10 }, ]), { @@ -1978,12 +1978,12 @@ describe("DB operations", () => { } ] }) - expect(node.db.getValue("test/ai/foo")).to.equal("bar") + expect(node.db.getValue("/apps/test/ai/foo")).to.equal("bar") }) it("when successful with function triggering", () => { - const valuePath = '/test/test_function_triggering/allowed_path/value'; - const functionResultPath = '/test/test_function_triggering/allowed_path/.last_tx/value'; + const valuePath = '/apps/test/test_function_triggering/allowed_path/value'; + const functionResultPath = '/apps/test/test_function_triggering/allowed_path/.last_tx/value'; const value = 'some value'; const timestamp = 1234567890000; @@ -2040,7 +2040,7 @@ describe("DB operations", () => { }, { // Default type: SET_VALUE - ref: "test/nested/far/down", + ref: "/apps/test/nested/far/down", value: { "new": 12345 } @@ -2063,13 +2063,13 @@ describe("DB operations", () => { "_saveLastTx": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "func_results": { "_eraseValue": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "code": 0, "gas_amount": 1 @@ -2101,8 +2101,8 @@ describe("DB operations", () => { }) it("when failed with function triggering", () => { - const valuePath = '/test/test_function_triggering/allowed_path/value'; - const functionResultPath = '/test/test_function_triggering/allowed_path/.last_tx/value'; + const valuePath = '/apps/test/test_function_triggering/allowed_path/value'; + const functionResultPath = '/apps/test/test_function_triggering/allowed_path/.last_tx/value'; const value = 'some value'; const timestamp = 1234567890000; @@ -2159,7 +2159,7 @@ describe("DB operations", () => { }, { // Default type: SET_VALUE - ref: "test/nested/far/down", + ref: "/apps/test/nested/far/down", value: { "new": 12345 } @@ -2182,16 +2182,16 @@ describe("DB operations", () => { "_saveLastTx": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "func_results": { "_eraseValue": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "code": 103, - "error_message": "No .write permission on: /test/test_function_triggering/allowed_path/.last_tx/value", + "error_message": "No .write permission on: /apps/test/test_function_triggering/allowed_path/.last_tx/value", "gas_amount": 0, } } @@ -2233,7 +2233,7 @@ describe("DB operations", () => { txBody = { operation: { type: 'SET_VALUE', - ref: '/test/some/path/for/tx', + ref: '/apps/test/some/path/for/tx', value: 'some value', }, gas_price: 1000000, @@ -2256,10 +2256,12 @@ describe("DB operations", () => { code: 0, gas_amount: 1, gas_amount_total: { - app: {}, - service: 1 + app: { + test: 1 + }, + service: 0 }, - gas_cost_total: 1, + gas_cost_total: 0 }); // extra.executed_at is updated with a non-null value. expect(executableTx.extra.executed_at).to.not.equal(null); @@ -2268,7 +2270,7 @@ describe("DB operations", () => { it("returns error code for object transaction", () => { assert.deepEqual(node.db.executeTransaction(objectTx, node.bc.lastBlockNumber() + 1), { code: 21, - error_message: "[executeTransaction] Not executable transaction: {\"tx_body\":{\"operation\":{\"type\":\"SET_VALUE\",\"ref\":\"/test/some/path/for/tx\",\"value\":\"some value\"},\"gas_price\":1000000,\"nonce\":-1,\"timestamp\":1568798344000},\"signature\":\"0xd0c7aee750ef0437ac8efe6c8c8b304d760f3271c36c4ea96d11f3446c9d772124a165aedc7bd6483dd4b318da7729867863f81714c250bf460ec39d0467624a26c47189b3e20eb5d2d698cf00bb11f729833b73282925b759df9e652f0a33dd1c\",\"hash\":\"0xd0c7aee750ef0437ac8efe6c8c8b304d760f3271c36c4ea96d11f3446c9d7721\",\"address\":\"0x00ADEc28B6a845a085e03591bE7550dd68673C1C\"}", + error_message: "[executeTransaction] Not executable transaction: {\"tx_body\":{\"operation\":{\"type\":\"SET_VALUE\",\"ref\":\"/apps/test/some/path/for/tx\",\"value\":\"some value\"},\"gas_price\":1000000,\"nonce\":-1,\"timestamp\":1568798344000},\"signature\":\"0x8b07b9ba72d969396c460faee6959b1b89b61fa049e116fd81686224fffd4fb19a879b401552e0c07bc211a7ba7cbb99e2ac32922e6f3c118caf183c2b351a7527ad5322e51d5f8405b2751255ca012008894a7e2e7673232fba1490a9fb35671b\",\"hash\":\"0x8b07b9ba72d969396c460faee6959b1b89b61fa049e116fd81686224fffd4fb1\",\"address\":\"0x00ADEc28B6a845a085e03591bE7550dd68673C1C\"}", gas_amount: 0 }); assert.deepEqual(objectTx.extra, undefined); @@ -2278,7 +2280,7 @@ describe("DB operations", () => { const maxHeightTxBody = { operation: { type: 'SET_VALUE', - ref: '/test/3/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20', + ref: '/apps/test/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20', value: 'some value', }, gas_price: 0, @@ -2290,8 +2292,10 @@ describe("DB operations", () => { code: 0, gas_amount: 1, gas_amount_total: { - app: {}, - service: 1 + app: { + test: 1 + }, + service: 0 }, gas_cost_total: 0, }); @@ -2299,7 +2303,7 @@ describe("DB operations", () => { const overHeightTxBody = { operation: { type: 'SET_VALUE', - ref: '/test/3/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20/21', + ref: '/apps/test/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20/21', value: 'some value', }, gas_price: 0, @@ -2325,7 +2329,7 @@ describe("DB operations", () => { const overSizeTxBody = { operation: { type: 'SET_VALUE', - ref: '/test/tree', + ref: '/apps/test/tree', value: overSizeTree, }, gas_price: 1, @@ -2335,7 +2339,7 @@ describe("DB operations", () => { const overSizeTx = Transaction.fromTxBody(overSizeTxBody, node.account.private_key); assert.deepEqual(node.db.executeTransaction(overSizeTx, node.bc.lastBlockNumber() + 1), { code: 24, - error_message: "Out of tree size limit (1001532 > 1000000)", + error_message: "Out of tree size limit (1001508 > 1000000)", gas_amount: 0, }); }) @@ -2358,7 +2362,7 @@ describe("DB operations", () => { "terminal_2": null, } }; - const valueResult = node.db.setValue("/test/empty_values/node_0", emptyValues); + const valueResult = node.db.setValue("/apps/test/empty_values/node_0", emptyValues); assert.deepEqual(valueResult.code, 0); emptyRules = { @@ -2377,7 +2381,7 @@ describe("DB operations", () => { } } }; - const ruleResult = node.db.setRule("/test/empty_rules/node_0", emptyRules); + const ruleResult = node.db.setRule("/apps/test/empty_rules/node_0", emptyRules); assert.deepEqual(ruleResult.code, 0); emptyOwners = { @@ -2414,25 +2418,25 @@ describe("DB operations", () => { } } }; - const ownerResult = node.db.setOwner("/test/empty_owners/node_0", emptyOwners); + const ownerResult = node.db.setOwner("/apps/test/empty_owners/node_0", emptyOwners); assert.deepEqual(ownerResult.code, 0); }); afterEach(() => { - const valueResult = node.db.setValue("/test/empty_values/node_0", null); + const valueResult = node.db.setValue("/apps/test/empty_values/node_0", null); assert.deepEqual(valueResult.code, 0); - const ruleResult = node.db.setRule("/test/empty_rules/node_0", null); + const ruleResult = node.db.setRule("/apps/test/empty_rules/node_0", null); assert.deepEqual(ruleResult.code, 0); - const ownerResult = node.db.setRule("/test/empty_owners/node_0", null); + const ownerResult = node.db.setRule("/apps/test/empty_owners/node_0", null); assert.deepEqual(ownerResult.code, 0); }); it("when setValue() with non-empty value", () => { expect(node.db.setValue( - "/test/empty_values/node_0/node_1a/node_2/node_3", "another value").code).to.equal(0); - assert.deepEqual(node.db.getValue("/test/empty_values/node_0"), { + "/apps/test/empty_values/node_0/node_1a/node_2/node_3", "another value").code).to.equal(0); + assert.deepEqual(node.db.getValue("/apps/test/empty_values/node_0"), { "terminal_1a": null, "terminal_1b": null, "terminal_1c": "", @@ -2450,8 +2454,8 @@ describe("DB operations", () => { it("when setValue() with 'null' value", () => { expect(node.db.setValue( - "/test/empty_values/node_0/node_1a/node_2/node_3", null).code).to.equal(0); - assert.deepEqual(node.db.getValue("/test/empty_values/node_0"), { + "/apps/test/empty_values/node_0/node_1a/node_2/node_3", null).code).to.equal(0); + assert.deepEqual(node.db.getValue("/apps/test/empty_values/node_0"), { "terminal_1c": "", "node_1b": { "terminal_2": null, @@ -2461,10 +2465,10 @@ describe("DB operations", () => { it("when setRule() with non-empty rule", () => { expect(node.db.setRule( - "/test/empty_rules/node_0/node_1a/node_2a/node_3a", { + "/apps/test/empty_rules/node_0/node_1a/node_2a/node_3a", { ".write": "some other rule" }).code).to.equal(0) - assert.deepEqual(node.db.getRule("/test/empty_rules/node_0"), { + assert.deepEqual(node.db.getRule("/apps/test/empty_rules/node_0"), { "node_1a": { "node_2a": { "node_3a": { @@ -2484,8 +2488,8 @@ describe("DB operations", () => { it("when setRule() with 'null' rule", () => { expect(node.db.setRule( - "/test/empty_rules/node_0/node_1a/node_2a/node_3a", null).code).to.equal(0); - assert.deepEqual(node.db.getRule("/test/empty_rules/node_0"), { + "/apps/test/empty_rules/node_0/node_1a/node_2a/node_3a", null).code).to.equal(0); + assert.deepEqual(node.db.getRule("/apps/test/empty_rules/node_0"), { "node_1b": { "node_2b": { "node_3b": { @@ -2498,7 +2502,7 @@ describe("DB operations", () => { it("when setOwner() with non-empty owner", () => { expect(node.db.setOwner( - "/test/empty_owners/node_0/node_1a/node_2a/node_3a", { + "/apps/test/empty_owners/node_0/node_1a/node_2a/node_3a", { ".owner": { "owners": { "*": { @@ -2510,7 +2514,7 @@ describe("DB operations", () => { } } }).code).to.equal(0) - assert.deepEqual(node.db.getOwner("/test/empty_owners/node_0"), { + assert.deepEqual(node.db.getOwner("/apps/test/empty_owners/node_0"), { "node_1a": { "node_2a": { "node_3a": { @@ -2548,8 +2552,8 @@ describe("DB operations", () => { it("when setOwner() with 'null' owner", () => { expect(node.db.setOwner( - "/test/empty_owners/node_0/node_1a/node_2a/node_3a", null).code).to.equal(0); - assert.deepEqual(node.db.getOwner("/test/empty_owners/node_0"), { + "/apps/test/empty_owners/node_0/node_1a/node_2a/node_3a", null).code).to.equal(0); + assert.deepEqual(node.db.getOwner("/apps/test/empty_owners/node_0"), { "node_1b": { "node_2b": { "node_3b": { @@ -2612,9 +2616,9 @@ describe("DB rule config", () => { dbValues["second_users"][node2.account.address][node2.account.address] = "i can write"; dbValues["second_users"][node1.account.address]["something_else"] = "i can write"; - result = node1.db.setValue("test", dbValues); + result = node1.db.setValue("/apps/test", dbValues); assert.deepEqual(result.code, 0); - result = node2.db.setValue("test", dbValues); + result = node2.db.setValue("/apps/test", dbValues); assert.deepEqual(result.code, 0); }) @@ -2623,34 +2627,34 @@ describe("DB rule config", () => { }); it("only allows certain users to write certain info if balance is greater than 0", () => { - expect(node2.db.evalRule(`test/users/${node2.account.address}/balance`, 0, null, null)) + expect(node2.db.evalRule(`/apps/test/users/${node2.account.address}/balance`, 0, null, null)) .to.equal(true) - expect(node2.db.evalRule(`test/users/${node2.account.address}/balance`, -1, null, null)) + expect(node2.db.evalRule(`/apps/test/users/${node2.account.address}/balance`, -1, null, null)) .to.equal(false) - expect(node1.db.evalRule(`test/users/${node1.account.address}/balance`, 1, null, null)) + expect(node1.db.evalRule(`/apps/test/users/${node1.account.address}/balance`, 1, null, null)) .to.equal(true) }) it("only allows certain users to write certain info if data exists", () => { - expect(node1.db.evalRule(`test/users/${node1.account.address}/info`, "something", null, null)) + expect(node1.db.evalRule(`/apps/test/users/${node1.account.address}/info`, "something", null, null)) .to.equal(true) expect(node2.db.evalRule( - `test/users/${node2.account.address}/info`, "something else", null, null)) + `/apps/test/users/${node2.account.address}/info`, "something else", null, null)) .to.equal(false) expect(node2.db.evalRule( - `test/users/${node2.account.address}/new_info`, "something", + `/apps/test/users/${node2.account.address}/new_info`, "something", { addr: node2.account.address }, null)) .to.equal(true) }) it("apply the closest ancestor's rule config if not exists", () => { expect(node1.db.evalRule( - `test/users/${node1.account.address}/child/grandson`, "something", + `/apps/test/users/${node1.account.address}/child/grandson`, "something", { addr: node1.account.address }, null)) .to.equal(true) expect(node2.db.evalRule( - `test/users/${node2.account.address}/child/grandson`, "something", + `/apps/test/users/${node2.account.address}/child/grandson`, "something", { addr: node1.account.address }, null)) .to.equal(false) @@ -2658,34 +2662,34 @@ describe("DB rule config", () => { it("only allows certain users to write certain info if data at other locations exists", () => { expect(node2.db.evalRule( - `test/users/${node2.account.address}/balance_info`, "something", null, null)) + `/apps/test/users/${node2.account.address}/balance_info`, "something", null, null)) .to.equal(true) expect(node1.db.evalRule( - `test/users/${node1.account.address}/balance_info`, "something", null, null)) + `/apps/test/users/${node1.account.address}/balance_info`, "something", null, null)) .to.equal(false) }) it("validates old data and new data together", () => { - expect(node1.db.evalRule(`test/users/${node1.account.address}/next_counter`, 11, null, null)) + expect(node1.db.evalRule(`/apps/test/users/${node1.account.address}/next_counter`, 11, null, null)) .to.equal(true) - expect(node1.db.evalRule(`test/users/${node1.account.address}/next_counter`, 12, null, null)) + expect(node1.db.evalRule(`/apps/test/users/${node1.account.address}/next_counter`, 12, null, null)) .to.equal(false) }) it("can handle nested path variables", () => { expect(node2.db.evalRule( - `test/second_users/${node2.account.address}/${node2.account.address}`, "some value", null, + `/apps/test/second_users/${node2.account.address}/${node2.account.address}`, "some value", null, null)) .to.equal(true) expect(node1.db.evalRule( - `test/second_users/${node1.account.address}/next_counter`, "some other value", null, null)) + `/apps/test/second_users/${node1.account.address}/next_counter`, "some other value", null, null)) .to.equal(false) }) it("duplicated path variables", () => { - expect(node1.db.evalRule('test/no_dup_key/aaa/bbb', "some value", null, null)) + expect(node1.db.evalRule('/apps/test/no_dup_key/aaa/bbb', "some value", null, null)) .to.equal(true) - expect(node1.db.evalRule('test/dup_key/aaa/bbb', "some value", null, null)) + expect(node1.db.evalRule('/apps/test/dup_key/aaa/bbb', "some value", null, null)) .to.equal(true) }) }) @@ -2698,7 +2702,7 @@ describe("DB owner config", () => { node = new BlockchainNode(); setNodeForTesting(node); - assert.deepEqual(node.db.setOwner("test/test_owner/mixed/true/true/true", + assert.deepEqual(node.db.setOwner("/apps/test/test_owner/mixed/true/true/true", { ".owner": { "owners": { @@ -2724,7 +2728,7 @@ describe("DB owner config", () => { } } ).code, 0); - assert.deepEqual(node.db.setOwner("test/test_owner/mixed/false/true/true", + assert.deepEqual(node.db.setOwner("/apps/test/test_owner/mixed/false/true/true", { ".owner": { "owners": { @@ -2750,7 +2754,7 @@ describe("DB owner config", () => { } } ).code, 0); - assert.deepEqual(node.db.setOwner("test/test_owner/mixed/true/false/true", + assert.deepEqual(node.db.setOwner("/apps/test/test_owner/mixed/true/false/true", { ".owner": { "owners": { @@ -2776,7 +2780,7 @@ describe("DB owner config", () => { } } ).code, 0); - assert.deepEqual(node.db.setOwner("test/test_owner/mixed/true/true/false", + assert.deepEqual(node.db.setOwner("/apps/test/test_owner/mixed/true/true/false", { ".owner": { "owners": { @@ -2811,114 +2815,114 @@ describe("DB owner config", () => { // Known user it("branch_owner permission for known user with mixed config", () => { expect(node.db.evalOwner( - '/test/test_owner/mixed/true/true/true/branch', 'branch_owner', + '/apps/test/test_owner/mixed/true/true/true/branch', 'branch_owner', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(true) expect(node.db.evalOwner( - '/test/test_owner/mixed/false/true/true/branch', 'branch_owner', + '/apps/test/test_owner/mixed/false/true/true/branch', 'branch_owner', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(false) expect(node.db.evalOwner( - '/test/test_owner/mixed/true/false/true/branch', 'branch_owner', + '/apps/test/test_owner/mixed/true/false/true/branch', 'branch_owner', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(true) expect(node.db.evalOwner( - '/test/test_owner/mixed/true/true/false/branch', 'branch_owner', + '/apps/test/test_owner/mixed/true/true/false/branch', 'branch_owner', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(true) }) it("write_owner permission for known user with mixed config", () => { expect(node.db.evalOwner( - '/test/test_owner/mixed/true/true/true', 'write_owner', + '/apps/test/test_owner/mixed/true/true/true', 'write_owner', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(true) expect(node.db.evalOwner( - '/test/test_owner/mixed/false/true/true', 'write_owner', + '/apps/test/test_owner/mixed/false/true/true', 'write_owner', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(true) expect(node.db.evalOwner( - '/test/test_owner/mixed/true/false/true', 'write_owner', + '/apps/test/test_owner/mixed/true/false/true', 'write_owner', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(false) expect(node.db.evalOwner( - '/test/test_owner/mixed/true/true/false', 'write_owner', + '/apps/test/test_owner/mixed/true/true/false', 'write_owner', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(true) }) it("write_rule permission for known user with mixed config", () => { expect(node.db.evalOwner( - '/test/test_owner/mixed/true/true/true', 'write_rule', + '/apps/test/test_owner/mixed/true/true/true', 'write_rule', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(true) expect(node.db.evalOwner( - '/test/test_owner/mixed/false/true/true', 'write_rule', + '/apps/test/test_owner/mixed/false/true/true', 'write_rule', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(true) expect(node.db.evalOwner( - '/test/test_owner/mixed/true/false/true', 'write_rule', + '/apps/test/test_owner/mixed/true/false/true', 'write_rule', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(true) expect(node.db.evalOwner( - '/test/test_owner/mixed/true/true/false', 'write_rule', + '/apps/test/test_owner/mixed/true/true/false', 'write_rule', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(false) }) it("write_rule permission on deeper path for known user with mixed config", () => { expect(node.db.evalOwner( - '/test/test_owner/mixed/true/true/true/deeper_path', 'write_rule', + '/apps/test/test_owner/mixed/true/true/true/deeper_path', 'write_rule', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(true) expect(node.db.evalOwner( - '/test/test_owner/mixed/false/true/true/deeper_path', 'write_rule', + '/apps/test/test_owner/mixed/false/true/true/deeper_path', 'write_rule', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(true) expect(node.db.evalOwner( - '/test/test_owner/mixed/true/false/true/deeper_path', 'write_rule', + '/apps/test/test_owner/mixed/true/false/true/deeper_path', 'write_rule', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(true) expect(node.db.evalOwner( - '/test/test_owner/mixed/true/true/false/deeper_path', 'write_rule', + '/apps/test/test_owner/mixed/true/true/false/deeper_path', 'write_rule', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(false) }) it("write_function permission for known user with mixed config", () => { expect(node.db.evalOwner( - '/test/test_owner/mixed/true/true/true', 'write_function', + '/apps/test/test_owner/mixed/true/true/true', 'write_function', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(true) expect(node.db.evalOwner( - '/test/test_owner/mixed/false/true/true', 'write_function', + '/apps/test/test_owner/mixed/false/true/true', 'write_function', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(true) expect(node.db.evalOwner( - '/test/test_owner/mixed/true/false/true', 'write_function', + '/apps/test/test_owner/mixed/true/false/true', 'write_function', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(true) expect(node.db.evalOwner( - '/test/test_owner/mixed/true/true/false', 'write_function', + '/apps/test/test_owner/mixed/true/true/false', 'write_function', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(false) }) it("write_function permission on deeper path for known user with mixed config", () => { expect(node.db.evalOwner( - '/test/test_owner/mixed/true/true/true/deeper_path', 'write_function', + '/apps/test/test_owner/mixed/true/true/true/deeper_path', 'write_function', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(true) expect(node.db.evalOwner( - '/test/test_owner/mixed/false/true/true/deeper_path', 'write_function', + '/apps/test/test_owner/mixed/false/true/true/deeper_path', 'write_function', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(true) expect(node.db.evalOwner( - '/test/test_owner/mixed/true/false/true/deeper_path', 'write_function', + '/apps/test/test_owner/mixed/true/false/true/deeper_path', 'write_function', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(true) expect(node.db.evalOwner( - '/test/test_owner/mixed/true/true/false/deeper_path', 'write_function', + '/apps/test/test_owner/mixed/true/true/false/deeper_path', 'write_function', { addr: '0x08Aed7AF9354435c38d52143EE50ac839D20696b' })) .to.equal(false) }) @@ -2926,94 +2930,94 @@ describe("DB owner config", () => { // Unknown user it("branch_owner permission for unknown user with mixed config", () => { expect(node.db.evalOwner( - '/test/test_owner/mixed/true/true/true/branch', 'branch_owner', + '/apps/test/test_owner/mixed/true/true/true/branch', 'branch_owner', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })) .to.equal(false) expect(node.db.evalOwner( - '/test/test_owner/mixed/false/true/true/branch', 'branch_owner', + '/apps/test/test_owner/mixed/false/true/true/branch', 'branch_owner', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })) .to.equal(true) expect(node.db.evalOwner( - '/test/test_owner/mixed/true/false/true/branch', 'branch_owner', + '/apps/test/test_owner/mixed/true/false/true/branch', 'branch_owner', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })) .to.equal(false) expect(node.db.evalOwner( - '/test/test_owner/mixed/true/true/false/branch', 'branch_owner', + '/apps/test/test_owner/mixed/true/true/false/branch', 'branch_owner', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })) .to.equal(false) }) it("write_owner permission for unknown user with mixed config", () => { - expect(node.db.evalOwner('/test/test_owner/mixed/true/true/true', 'write_owner', + expect(node.db.evalOwner('/apps/test/test_owner/mixed/true/true/true', 'write_owner', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })) .to.equal(false) - expect(node.db.evalOwner('/test/test_owner/mixed/false/true/true', 'write_owner', + expect(node.db.evalOwner('/apps/test/test_owner/mixed/false/true/true', 'write_owner', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })) .to.equal(false) - expect(node.db.evalOwner('/test/test_owner/mixed/true/false/true', 'write_owner', + expect(node.db.evalOwner('/apps/test/test_owner/mixed/true/false/true', 'write_owner', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })) .to.equal(true) - expect(node.db.evalOwner('/test/test_owner/mixed/true/true/false', 'write_owner', + expect(node.db.evalOwner('/apps/test/test_owner/mixed/true/true/false', 'write_owner', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })) .to.equal(false) }) it("write_rule permission for unknown user with mixed config", () => { - expect(node.db.evalOwner('/test/test_owner/mixed/true/true/true', 'write_rule', + expect(node.db.evalOwner('/apps/test/test_owner/mixed/true/true/true', 'write_rule', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })) .to.equal(false) - expect(node.db.evalOwner('/test/test_owner/mixed/false/true/true', 'write_rule', + expect(node.db.evalOwner('/apps/test/test_owner/mixed/false/true/true', 'write_rule', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })) .to.equal(false) - expect(node.db.evalOwner('/test/test_owner/mixed/true/false/true', 'write_rule', + expect(node.db.evalOwner('/apps/test/test_owner/mixed/true/false/true', 'write_rule', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })) .to.equal(false) - expect(node.db.evalOwner('/test/test_owner/mixed/true/true/false', 'write_rule', + expect(node.db.evalOwner('/apps/test/test_owner/mixed/true/true/false', 'write_rule', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })) .to.equal(true) }) it("write_rule permission on deeper path for unknown user with mixed config", () => { - expect(node.db.evalOwner('/test/test_owner/mixed/true/true/true/deeper_path', 'write_rule', + expect(node.db.evalOwner('/apps/test/test_owner/mixed/true/true/true/deeper_path', 'write_rule', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })) .to.equal(false) - expect(node.db.evalOwner('/test/test_owner/mixed/false/true/true/deeper_path', 'write_rule', + expect(node.db.evalOwner('/apps/test/test_owner/mixed/false/true/true/deeper_path', 'write_rule', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })) .to.equal(false) - expect(node.db.evalOwner('/test/test_owner/mixed/true/false/true/deeper_path', 'write_rule', + expect(node.db.evalOwner('/apps/test/test_owner/mixed/true/false/true/deeper_path', 'write_rule', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })) .to.equal(false) - expect(node.db.evalOwner('/test/test_owner/mixed/true/true/false/deeper_path', 'write_rule', + expect(node.db.evalOwner('/apps/test/test_owner/mixed/true/true/false/deeper_path', 'write_rule', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })) .to.equal(true) }) it("write_function permission for unknown user with mixed config", () => { - expect(node.db.evalOwner('/test/test_owner/mixed/true/true/true', 'write_function', + expect(node.db.evalOwner('/apps/test/test_owner/mixed/true/true/true', 'write_function', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })).to.equal(false) - expect(node.db.evalOwner('/test/test_owner/mixed/false/true/true', 'write_function', + expect(node.db.evalOwner('/apps/test/test_owner/mixed/false/true/true', 'write_function', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })).to.equal(false) - expect(node.db.evalOwner('/test/test_owner/mixed/true/false/true', 'write_function', + expect(node.db.evalOwner('/apps/test/test_owner/mixed/true/false/true', 'write_function', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })).to.equal(false) - expect(node.db.evalOwner('/test/test_owner/mixed/true/true/false', 'write_function', + expect(node.db.evalOwner('/apps/test/test_owner/mixed/true/true/false', 'write_function', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })).to.equal(true) }) it("write_function permission on deeper path for unknown user with mixed config", () => { expect(node.db.evalOwner( - '/test/test_owner/mixed/true/true/true/deeper_path', 'write_function', + '/apps/test/test_owner/mixed/true/true/true/deeper_path', 'write_function', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })) .to.equal(false) expect(node.db.evalOwner( - '/test/test_owner/mixed/false/true/true/deeper_path', 'write_function', + '/apps/test/test_owner/mixed/false/true/true/deeper_path', 'write_function', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })) .to.equal(false) expect(node.db.evalOwner( - '/test/test_owner/mixed/true/false/true/deeper_path', 'write_function', + '/apps/test/test_owner/mixed/true/false/true/deeper_path', 'write_function', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })) .to.equal(false) expect(node.db.evalOwner( - '/test/test_owner/mixed/true/true/false/deeper_path', 'write_function', + '/apps/test/test_owner/mixed/true/true/false/deeper_path', 'write_function', { addr: '0x07A43138CC760C85A5B1F115aa60eADEaa0bf417' })) .to.equal(true) }) @@ -3055,7 +3059,7 @@ describe("DB sharding config", () => { } } }; - result = node.db.setValue("test/test_sharding", dbValues); + result = node.db.setValue("/apps/test/test_sharding", dbValues); assert.deepEqual(result.code, 0); dbFuncs = { @@ -3084,7 +3088,7 @@ describe("DB sharding config", () => { } } }; - result = node.db.setFunction("test/test_sharding", dbFuncs); + result = node.db.setFunction("/apps/test/test_sharding", dbFuncs); assert.deepEqual(result.code, 0); dbRules = { @@ -3100,7 +3104,7 @@ describe("DB sharding config", () => { } } }; - result = node.db.setRule("test/test_sharding", dbRules); + result = node.db.setRule("/apps/test/test_sharding", dbRules); assert.deepEqual(result.code, 0); dbOwners = { @@ -3139,7 +3143,7 @@ describe("DB sharding config", () => { } } }; - result = node.db.setOwner("test/test_sharding", dbOwners); + result = node.db.setOwner("/apps/test/test_sharding", dbOwners); assert.deepEqual(result.code, 0); }) @@ -3169,182 +3173,182 @@ describe("DB sharding config", () => { const decDelta = 3; it("getValue with isGlobal = false", () => { - expect(node.db.getValue("test/test_sharding/some/path/to/value")).to.equal(value); - expect(node.db.getValue("apps/test_sharding/afan/test/some/path/to/value")).to.equal(null); + expect(node.db.getValue("/apps/test/test_sharding/some/path/to/value")).to.equal(value); + expect(node.db.getValue("/apps/test_sharding/afan/test/some/path/to/value")).to.equal(null); }) it("getValue with isGlobal = true", () => { - expect(node.db.getValue("test/test_sharding/some/path/to/value", false, true)).to.equal(null); - expect(node.db.getValue("apps/afan/test/test_sharding/some/path/to/value", false, true)) + expect(node.db.getValue("/apps/test/apps/test/test_sharding/some/path/to/value", false, true)).to.equal(null); + expect(node.db.getValue("/apps/afan/apps/test/test_sharding/some/path/to/value", false, true)) .to.equal(value); }) it("getValue with isGlobal = true and non-existing path", () => { - expect(node.db.getValue("some/non-existing/path", false, true)).to.equal(null); + expect(node.db.getValue("/apps/some/non-existing/path", false, true)).to.equal(null); }) it("setValue with isGlobal = false", () => { expect(node.db.setValue( - "test/test_sharding/some/path/to/value", newValue, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, + "/apps/test/test_sharding/some/path/to/value", newValue, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, null, { extra: { executed_at: 1234567890000 }}).code) .to.equal(0); - expect(node.db.getValue("test/test_sharding/some/path/to/value")).to.equal(newValue); + expect(node.db.getValue("/apps/test/test_sharding/some/path/to/value")).to.equal(newValue); }) it("setValue with isGlobal = true", () => { expect(node.db.setValue( - "apps/afan/test/test_sharding/some/path/to/value", newValue, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, + "/apps/afan/apps/test/test_sharding/some/path/to/value", newValue, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, null, { extra: { executed_at: 1234567890000 }}, true).code) .to.equal(0); - expect(node.db.getValue("test/test_sharding/some/path/to/value")).to.equal(newValue); + expect(node.db.getValue("/apps/test/test_sharding/some/path/to/value")).to.equal(newValue); }) it("setValue with isGlobal = true and non-existing path", () => { expect(node.db.setValue( - "some/non-existing/path", newValue, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, + "/apps/some/non-existing/path", newValue, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, null, { extra: { executed_at: 1234567890000 }}, true).code) .to.equal(0); }) it("setValue with isGlobal = false and non-writable path with sharding", () => { - assert.deepEqual(node.db.setValue("test/test_sharding/shards/enabled_shard/path", 20), { + assert.deepEqual(node.db.setValue("/apps/test/test_sharding/shards/enabled_shard/path", 20), { "code": 104, - "error_message": "Non-writable path with shard config: /values/test/test_sharding/shards/enabled_shard", + "error_message": "Non-writable path with shard config: /values/apps/test/test_sharding/shards/enabled_shard", "gas_amount": 0 }); }) it("setValue with isGlobal = true and non-writable path with sharding", () => { expect(node.db.setValue( - "apps/afan/test/test_sharding/shards/enabled_shard/path", 20, '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1', null, null, + "/apps/afan/apps/test/test_sharding/shards/enabled_shard/path", 20, '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1', null, null, true).code) .to.equal(0); - expect(node.db.getValue("apps/afan/test/test_sharding/shards/enabled_shard/path", false, true)) + expect(node.db.getValue("/apps/afan/apps/test/test_sharding/shards/enabled_shard/path", false, true)) .to.equal(10); // value unchanged }) it("setValue with isGlobal = false and writable path with sharding", () => { - expect(node.db.setValue("test/test_sharding/shards/disabled_shard/path", 20).code) + expect(node.db.setValue("/apps/test/test_sharding/shards/disabled_shard/path", 20).code) .to.equal(0); - expect(node.db.getValue("test/test_sharding/shards/disabled_shard/path")).to.equal(20); + expect(node.db.getValue("/apps/test/test_sharding/shards/disabled_shard/path")).to.equal(20); }) it("setValue with isGlobal = true and writable path with sharding", () => { expect(node.db.setValue( - "apps/afan/test/test_sharding/shards/disabled_shard/path", 20, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, + "apps/afan/apps/test/test_sharding/shards/disabled_shard/path", 20, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, null, { extra: { executed_at: 1234567890000 }}, true).code) .to.equal(0); - expect(node.db.getValue("apps/afan/test/test_sharding/shards/disabled_shard/path", false, true)) + expect(node.db.getValue("apps/afan/apps/test/test_sharding/shards/disabled_shard/path", false, true)) .to.equal(20); // value changed }) it("incValue with isGlobal = false", () => { expect(node.db.incValue( - "test/test_sharding/some/path/to/number", incDelta, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, + "/apps/test/test_sharding/some/path/to/number", incDelta, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, null, { extra: { executed_at: 1234567890000 }}).code) .to.equal(0); - expect(node.db.getValue("test/test_sharding/some/path/to/number")).to.equal(10 + incDelta); + expect(node.db.getValue("/apps/test/test_sharding/some/path/to/number")).to.equal(10 + incDelta); }) it("incValue with isGlobal = true", () => { expect(node.db.incValue( - "apps/afan/test/test_sharding/some/path/to/number", incDelta, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, + "/apps/afan/apps/test/test_sharding/some/path/to/number", incDelta, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, null, { extra: { executed_at: 1234567890000 }}, true).code) .to.equal(0); - expect(node.db.getValue("test/test_sharding/some/path/to/number")).to.equal(10 + incDelta); + expect(node.db.getValue("/apps/test/test_sharding/some/path/to/number")).to.equal(10 + incDelta); }) it("incValue with isGlobal = true and non-existing path", () => { expect(node.db.incValue( - "some/non-existing/path", incDelta, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, null, null, true).code) + "/apps/some/non-existing/path", incDelta, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, null, null, true).code) .to.equal(0); }) it("setValue with isGlobal = false and non-writable path with sharding", () => { - assert.deepEqual(node.db.incValue("test/test_sharding/shards/enabled_shard/path", 5), { + assert.deepEqual(node.db.incValue("/apps/test/test_sharding/shards/enabled_shard/path", 5), { "code": 104, - "error_message": "Non-writable path with shard config: /values/test/test_sharding/shards/enabled_shard", + "error_message": "Non-writable path with shard config: /values/apps/test/test_sharding/shards/enabled_shard", "gas_amount": 0 }); }) it("setValue with isGlobal = true and non-writable path with sharding", () => { expect(node.db.incValue( - "apps/afan/test/test_sharding/shards/enabled_shard/path", 5, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, + "/apps/afan/apps/test/test_sharding/shards/enabled_shard/path", 5, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, null, null, true).code) .to.equal(0); - expect(node.db.getValue("apps/afan/test/test_sharding/shards/enabled_shard/path", false, true)) + expect(node.db.getValue("apps/afan/apps/test/test_sharding/shards/enabled_shard/path", false, true)) .to.equal(10); // value unchanged }) it("setValue with isGlobal = false and writable path with sharding", () => { - expect(node.db.incValue("test/test_sharding/shards/disabled_shard/path", 5).code).to.equal(0); - expect(node.db.getValue("test/test_sharding/shards/disabled_shard/path")) + expect(node.db.incValue("/apps/test/test_sharding/shards/disabled_shard/path", 5).code).to.equal(0); + expect(node.db.getValue("/apps/test/test_sharding/shards/disabled_shard/path")) .to.equal(15); // value changed }) it("setValue with isGlobal = true and writable path with sharding", () => { expect(node.db.incValue( - "apps/afan/test/test_sharding/shards/disabled_shard/path", 5, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, + "/apps/afan/apps/test/test_sharding/shards/disabled_shard/path", 5, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, null, { extra: { executed_at: 1234567890000 }}, true).code) .to.equal(0); - expect(node.db.getValue("apps/afan/test/test_sharding/shards/disabled_shard/path", false, true)) + expect(node.db.getValue("/apps/afan/apps/test/test_sharding/shards/disabled_shard/path", false, true)) .to.equal(15); // value changed }) it("decValue with isGlobal = false", () => { expect(node.db.decValue( - "test/test_sharding/some/path/to/number", decDelta, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, + "/apps/test/test_sharding/some/path/to/number", decDelta, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, null, { extra: { executed_at: 1234567890000 }}).code) .to.equal(0); - expect(node.db.getValue("test/test_sharding/some/path/to/number")).to.equal(10 - decDelta); + expect(node.db.getValue("/apps/test/test_sharding/some/path/to/number")).to.equal(10 - decDelta); }) it("decValue with isGlobal = true", () => { expect(node.db.decValue( - "apps/afan/test/test_sharding/some/path/to/number", decDelta, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, + "/apps/afan/apps/test/test_sharding/some/path/to/number", decDelta, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, null, { extra: { executed_at: 1234567890000 }}, true).code) .to.equal(0); - expect(node.db.getValue("test/test_sharding/some/path/to/number")).to.equal(10 - decDelta); + expect(node.db.getValue("/apps/test/test_sharding/some/path/to/number")).to.equal(10 - decDelta); }) it("decValue with isGlobal = true and non-existing path", () => { expect(node.db.decValue( - "some/non-existing/path", decDelta, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, null, null, true).code) + "/apps/some/non-existing/path", decDelta, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, null, null, true).code) .to.equal(0); }) it("setValue with isGlobal = false and non-writable path with sharding", () => { - assert.deepEqual(node.db.decValue("test/test_sharding/shards/enabled_shard/path", 5), { + assert.deepEqual(node.db.decValue("/apps/test/test_sharding/shards/enabled_shard/path", 5), { "code": 104, - "error_message": "Non-writable path with shard config: /values/test/test_sharding/shards/enabled_shard", + "error_message": "Non-writable path with shard config: /values/apps/test/test_sharding/shards/enabled_shard", "gas_amount": 0 }); }) it("setValue with isGlobal = true and non-writable path with sharding", () => { expect(node.db.decValue( - "apps/afan/test/test_sharding/shards/enabled_shard/path", 5, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, + "/apps/afan/apps/test/test_sharding/shards/enabled_shard/path", 5, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, null, null, true).code) .to.equal(0); expect(node.db.getValue( - "apps/afan/test/test_sharding/shards/enabled_shard/path", false, true)) + "/apps/afan/apps/test/test_sharding/shards/enabled_shard/path", false, true)) .to.equal(10); // value unchanged }) it("setValue with isGlobal = false and writable path with sharding", () => { - expect(node.db.decValue("test/test_sharding/shards/disabled_shard/path", 5).code) + expect(node.db.decValue("/apps/test/test_sharding/shards/disabled_shard/path", 5).code) .to.equal(0); - expect(node.db.getValue("test/test_sharding/shards/disabled_shard/path")) + expect(node.db.getValue("/apps/test/test_sharding/shards/disabled_shard/path")) .to.equal(5); // value changed }) it("setValue with isGlobal = true and writable path with sharding", () => { expect(node.db.decValue( - "apps/afan/test/test_sharding/shards/disabled_shard/path", 5, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, + "/apps/afan/apps/test/test_sharding/shards/disabled_shard/path", 5, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, null, { extra: { executed_at: 1234567890000 }}, true).code) .to.equal(0); - expect(node.db.getValue("apps/afan/test/test_sharding/shards/disabled_shard/path", false, true)) + expect(node.db.getValue("/apps/afan/apps/test/test_sharding/shards/disabled_shard/path", false, true)) .to.equal(5); // value changed }) @@ -3403,47 +3407,47 @@ describe("DB sharding config", () => { }; it("getFunction with isGlobal = false", () => { - assert.deepEqual(node.db.getFunction("test/test_sharding/some/path/to"), func); + assert.deepEqual(node.db.getFunction("/apps/test/test_sharding/some/path/to"), func); expect(node.db.getFunction("apps/afan/test/test_sharding/some/path/to")).to.equal(null); }) it("getFunction with isGlobal = true", () => { - expect(node.db.getFunction("test/test_sharding/some/path/to", false, true)).to.equal(null); + expect(node.db.getFunction("/apps/test/test_sharding/some/path/to", false, true)).to.equal(null); assert.deepEqual( - node.db.getFunction("apps/afan/test/test_sharding/some/path/to", false, true), func); + node.db.getFunction("/apps/afan/apps/test/test_sharding/some/path/to", false, true), func); }) it("getFunction with isGlobal = true and non-existing path", () => { - expect(node.db.getFunction("some/non-existing/path", false, true)).to.equal(null); + expect(node.db.getFunction("/apps/some/non-existing/path", false, true)).to.equal(null); }) it("setFunction with isGlobal = false", () => { expect(node.db.setFunction( - "test/test_sharding/some/path/to", funcChange, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }).code) + "/apps/test/test_sharding/some/path/to", funcChange, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }).code) .to.equal(0); - assert.deepEqual(node.db.getFunction("test/test_sharding/some/path/to"), newFunc); + assert.deepEqual(node.db.getFunction("/apps/test/test_sharding/some/path/to"), newFunc); }) it("setFunction with isGlobal = true", () => { expect(node.db.setFunction( - "apps/afan/test/test_sharding/some/path/to", funcChange, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, + "/apps/afan/apps/test/test_sharding/some/path/to", funcChange, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, true).code) .to.equal(0); assert.deepEqual( - node.db.getFunction("apps/afan/test/test_sharding/some/path/to", false, true), newFunc); + node.db.getFunction("/apps/afan/apps/test/test_sharding/some/path/to", false, true), newFunc); }) it("setFunction with isGlobal = true and non-existing path", () => { expect(node.db.setFunction( - "some/non-existing/path", funcChange, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, true).code) + "/apps/some/non-existing/path", funcChange, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, true).code) .to.equal(0); }) it("matchFunction with isGlobal = false", () => { - assert.deepEqual(node.db.matchFunction("/test/test_sharding/some/path/to"), { + assert.deepEqual(node.db.matchFunction("/apps/test/test_sharding/some/path/to"), { "matched_path": { - "target_path": "/test/test_sharding/some/path/to", - "ref_path": "/test/test_sharding/some/path/to", + "target_path": "/apps/test/test_sharding/some/path/to", + "ref_path": "/apps/test/test_sharding/some/path/to", "path_vars": {}, }, "matched_config": { @@ -3455,7 +3459,7 @@ describe("DB sharding config", () => { "service_name": "https://ainetwork.ai", } }, - "path": "/test/test_sharding/some/path/to" + "path": "/apps/test/test_sharding/some/path/to" }, "subtree_configs": [ { @@ -3474,10 +3478,10 @@ describe("DB sharding config", () => { }) it("matchFunction with isGlobal = true", () => { - assert.deepEqual(node.db.matchFunction("/apps/afan/test/test_sharding/some/path/to", true), { + assert.deepEqual(node.db.matchFunction("/apps/afan/apps/test/test_sharding/some/path/to", true), { "matched_path": { - "target_path": "/apps/afan/test/test_sharding/some/path/to", - "ref_path": "/apps/afan/test/test_sharding/some/path/to", + "target_path": "/apps/afan/apps/test/test_sharding/some/path/to", + "ref_path": "/apps/afan/apps/test/test_sharding/some/path/to", "path_vars": {}, }, "matched_config": { @@ -3489,7 +3493,7 @@ describe("DB sharding config", () => { "service_name": "https://ainetwork.ai", } }, - "path": "/apps/afan/test/test_sharding/some/path/to" + "path": "/apps/afan/apps/test/test_sharding/some/path/to" }, "subtree_configs": [ { @@ -3508,7 +3512,7 @@ describe("DB sharding config", () => { }) it("matchFunction with isGlobal = true and non-existing path", () => { - expect(node.db.matchFunction("some/non-existing/path", true)).to.equal(null); + expect(node.db.matchFunction("/apps/some/non-existing/path", true)).to.equal(null); }) }) @@ -3523,50 +3527,50 @@ describe("DB sharding config", () => { const newValue = "that"; it("getRule with isGlobal = false", () => { - assert.deepEqual(node.db.getRule("test/test_sharding/some/path/to"), rule); - expect(node.db.getRule("apps/afan/test/test_sharding/some/path/to")).to.equal(null); + assert.deepEqual(node.db.getRule("/apps/test/test_sharding/some/path/to"), rule); + expect(node.db.getRule("/apps/afan/apps/test/test_sharding/some/path/to")).to.equal(null); }) it("getRule with isGlobal = true", () => { - expect(node.db.getRule("test/test_sharding/some/path/to", false, true)).to.equal(null); + expect(node.db.getRule("/apps/test/test_sharding/some/path/to", false, true)).to.equal(null); assert.deepEqual( - node.db.getRule("apps/afan/test/test_sharding/some/path/to", false, true), rule); + node.db.getRule("/apps/afan/apps/test/test_sharding/some/path/to", false, true), rule); }) it("getRule with isGlobal = true and non-existing path", () => { - expect(node.db.getRule("some/non-existing/path", false, true)).to.equal(null); + expect(node.db.getRule("/apps/some/non-existing/path", false, true)).to.equal(null); }) it("setRule with isGlobal = false", () => { expect(node.db.setRule( - "test/test_sharding/some/path/to", newRule, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }).code) + "/apps/test/test_sharding/some/path/to", newRule, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }).code) .to.equal(0); - assert.deepEqual(node.db.getRule("test/test_sharding/some/path/to"), newRule); + assert.deepEqual(node.db.getRule("/apps/test/test_sharding/some/path/to"), newRule); }) it("setRule with isGlobal = true", () => { expect(node.db.setRule( - "apps/afan/test/test_sharding/some/path/to", newRule, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, true).code) + "/apps/afan/apps/test/test_sharding/some/path/to", newRule, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, true).code) .to.equal(0); assert.deepEqual( - node.db.getRule("apps/afan/test/test_sharding/some/path/to", false, true), newRule); + node.db.getRule("/apps/afan/apps/test/test_sharding/some/path/to", false, true), newRule); }) it("setRule with isGlobal = true and non-existing path", () => { - expect(node.db.setRule("some/non-existing/path", newRule, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, true).code) + expect(node.db.setRule("/apps/some/non-existing/path", newRule, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, true).code) .to.equal(0); }) it("matchRule with isGlobal = false", () => { - assert.deepEqual(node.db.matchRule("/test/test_sharding/some/path/to"), { + assert.deepEqual(node.db.matchRule("/apps/test/test_sharding/some/path/to"), { "matched_path": { - "target_path": "/test/test_sharding/some/path/to", - "ref_path": "/test/test_sharding/some/path/to", + "target_path": "/apps/test/test_sharding/some/path/to", + "ref_path": "/apps/test/test_sharding/some/path/to", "path_vars": {}, }, "matched_config": { "config": "auth.addr === '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1'", - "path": "/test/test_sharding/some/path/to" + "path": "/apps/test/test_sharding/some/path/to" }, "subtree_configs": [ { @@ -3578,15 +3582,15 @@ describe("DB sharding config", () => { }) it("matchRule with isGlobal = true", () => { - assert.deepEqual(node.db.matchRule("/apps/afan/test/test_sharding/some/path/to", true), { + assert.deepEqual(node.db.matchRule("/apps/afan/apps/test/test_sharding/some/path/to", true), { "matched_path": { - "target_path": "/apps/afan/test/test_sharding/some/path/to", - "ref_path": "/apps/afan/test/test_sharding/some/path/to", + "target_path": "/apps/afan/apps/test/test_sharding/some/path/to", + "ref_path": "/apps/afan/apps/test/test_sharding/some/path/to", "path_vars": {}, }, "matched_config": { "config": "auth.addr === '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1'", - "path": "/apps/afan/test/test_sharding/some/path/to" + "path": "/apps/afan/apps/test/test_sharding/some/path/to" }, "subtree_configs": [ { @@ -3598,24 +3602,24 @@ describe("DB sharding config", () => { }) it("matchRule with isGlobal = true and non-existing path", () => { - expect(node.db.matchRule("some/non-existing/path", true)).to.equal(null); + expect(node.db.matchRule("/apps/some/non-existing/path", true)).to.equal(null); }) it("evalRule with isGlobal = false", () => { - expect(node.db.evalRule("/test/test_sharding/some/path/to", newValue, { addr: "0x09A0d53FDf1c36A131938eb379b98910e55EEfe1" })) + expect(node.db.evalRule("/apps/test/test_sharding/some/path/to", newValue, { addr: "0x09A0d53FDf1c36A131938eb379b98910e55EEfe1" })) .to.equal(true); }) it("evalRule with isGlobal = true", () => { expect(node.db.evalRule( - "/apps/afan/test/test_sharding/some/path/to", newValue, { addr: "0x09A0d53FDf1c36A131938eb379b98910e55EEfe1" }, + "/apps/afan/apps/test/test_sharding/some/path/to", newValue, { addr: "0x09A0d53FDf1c36A131938eb379b98910e55EEfe1" }, null, true)) .to.equal(true); }) it("evalRule with isGlobal = true and non-existing path", () => { expect(node.db.evalRule( - "/some/non-existing/path", newValue, { addr: "0x09A0d53FDf1c36A131938eb379b98910e55EEfe1" }, null, true)) + "/apps/some/non-existing/path", newValue, { addr: "0x09A0d53FDf1c36A131938eb379b98910e55EEfe1" }, null, true)) .to.equal(null); }) }) @@ -3684,47 +3688,47 @@ describe("DB sharding config", () => { }; it("getOwner with isGlobal = false", () => { - assert.deepEqual(node.db.getOwner("test/test_sharding/some/path/to"), owner); - expect(node.db.getOwner("apps/afan/test/test_sharding/some/path/to")).to.equal(null); + assert.deepEqual(node.db.getOwner("/apps/test/test_sharding/some/path/to"), owner); + expect(node.db.getOwner("/apps/afan/apps/test/test_sharding/some/path/to")).to.equal(null); }) it("getOwner with isGlobal = true", () => { - expect(node.db.getOwner("test/test_sharding/some/path/to", false, true)).to.equal(null); + expect(node.db.getOwner("/apps/test/test_sharding/some/path/to", false, true)).to.equal(null); assert.deepEqual( - node.db.getOwner("apps/afan/test/test_sharding/some/path/to", false, true), owner); + node.db.getOwner("apps/afan/apps/test/test_sharding/some/path/to", false, true), owner); }) it("getOwner with isGlobal = true and non-existing path", () => { - expect(node.db.getOwner("some/non-existing/path", false, true)).to.equal(null); + expect(node.db.getOwner("/apps/some/non-existing/path", false, true)).to.equal(null); }) it("setOwner with isGlobal = false", () => { expect(node.db.setOwner( - "test/test_sharding/some/path/to", ownerChange, + "/apps/test/test_sharding/some/path/to", ownerChange, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }).code) .to.equal(0); - assert.deepEqual(node.db.getOwner("test/test_sharding/some/path/to"), newOwner); + assert.deepEqual(node.db.getOwner("/apps/test/test_sharding/some/path/to"), newOwner); }) it("setOwner with isGlobal = true", () => { expect(node.db.setOwner( - "apps/afan/test/test_sharding/some/path/to", ownerChange, + "/apps/afan/apps/test/test_sharding/some/path/to", ownerChange, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, true).code) .to.equal(0); assert.deepEqual( - node.db.getOwner("apps/afan/test/test_sharding/some/path/to", false, true), newOwner); + node.db.getOwner("/apps/afan/apps/test/test_sharding/some/path/to", false, true), newOwner); }) it("setOwner with isGlobal = true and non-existing path", () => { expect(node.db.setOwner( - "some/non-existing/path", ownerChange, + "/apps/some/non-existing/path", ownerChange, { addr: '0x09A0d53FDf1c36A131938eb379b98910e55EEfe1' }, true).code).to.equal(0); }) it("matchOwner with isGlobal = false", () => { - assert.deepEqual(node.db.matchOwner("/test/test_sharding/some/path/to"), { + assert.deepEqual(node.db.matchOwner("/apps/test/test_sharding/some/path/to"), { "matched_path": { - "target_path": "/test/test_sharding/some/path/to", + "target_path": "/apps/test/test_sharding/some/path/to", }, "matched_config": { "config": { @@ -3743,15 +3747,15 @@ describe("DB sharding config", () => { } } }, - "path": "/test/test_sharding/some/path/to" + "path": "/apps/test/test_sharding/some/path/to" } }); }) it("matchOwner with isGlobal = true", () => { - assert.deepEqual(node.db.matchOwner("/apps/afan/test/test_sharding/some/path/to", true), { + assert.deepEqual(node.db.matchOwner("/apps/afan/apps/test/test_sharding/some/path/to", true), { "matched_path": { - "target_path": "/apps/afan/test/test_sharding/some/path/to", + "target_path": "/apps/afan/apps/test/test_sharding/some/path/to", }, "matched_config": { "config": { @@ -3770,30 +3774,30 @@ describe("DB sharding config", () => { } } }, - "path": "/apps/afan/test/test_sharding/some/path/to" + "path": "/apps/afan/apps/test/test_sharding/some/path/to" } }); }) it("matchOwner with isGlobal = true and non-existing path", () => { - expect(node.db.matchOwner("some/non-existing/path", true)).to.equal(null); + expect(node.db.matchOwner("/apps/some/non-existing/path", true)).to.equal(null); }) it("evalOwner with isGlobal = false", () => { expect(node.db.evalOwner( - "/test/test_sharding/some/path/to", "write_rule", + "/apps/test/test_sharding/some/path/to", "write_rule", { addr: "0x09A0d53FDf1c36A131938eb379b98910e55EEfe1" })).to.equal(true); }) it("evalOwner with isGlobal = true", () => { expect(node.db.evalOwner( - "/apps/afan/test/test_sharding/some/path/to", "write_rule", + "/apps/afan/apps/test/test_sharding/some/path/to", "write_rule", { addr: "0x09A0d53FDf1c36A131938eb379b98910e55EEfe1" }, true)).to.equal(true); }) it("evalOwner with isGlobal = true and non-existing path", () => { expect(node.db.evalOwner( - "/some/non-existing/path", "write_rule", + "/apps/some/non-existing/path", "write_rule", { addr: "0x09A0d53FDf1c36A131938eb379b98910e55EEfe1" }, true)).to.equal(null); }) }) @@ -3829,7 +3833,7 @@ describe("Proof hash", () => { child3: false } }; - result = node.db.setValue("test", valuesObject); + result = node.db.setValue("/apps/test", valuesObject); assert.deepEqual(result.code, 0); }); @@ -3839,10 +3843,10 @@ describe("Proof hash", () => { describe("Check proof for setValue(), setOwner(), setRule(), and setFunction()", () => { it("checks proof hash of under $root_path/test", () => { - const valuesNode = node.db.getRefForReading(['values', 'test']); - const ownersNode = node.db.getRefForReading(['owners', 'test']); - const rulesNode = node.db.getRefForReading(['rules', 'test']); - const functionNode = node.db.getRefForReading(['functions', 'test']); + const valuesNode = node.db.getRefForReading(['values', 'apps', 'test']); + const ownersNode = node.db.getRefForReading(['owners', 'apps', 'test']); + const rulesNode = node.db.getRefForReading(['rules', 'apps', 'test']); + const functionNode = node.db.getRefForReading(['functions', 'apps', 'test']); expect(valuesNode.getProofHash()).to.equal(valuesNode.buildProofHash()); expect(ownersNode.getProofHash()).to.equal(ownersNode.buildProofHash()); expect(rulesNode.getProofHash()).to.equal(rulesNode.buildProofHash()); @@ -3887,14 +3891,14 @@ describe("Proof hash", () => { }, } }; - node.db.setValue("test/level0/level1/level2", { aaa: 'bbb' }); - node.db.setOwner("test/empty_owners/.owner/owners/*/write_function", false); - node.db.setRule("test/test_rules", nestedRules); - node.db.setFunction("test/test_functions", dbFuncs); - const valuesNode = node.db.getRefForReading(['values', 'test']); - const ownersNode = node.db.getRefForReading(['owners', 'test']); - const rulesNode = node.db.getRefForReading(['rules', 'test']); - const functionNode = node.db.getRefForReading(['functions', 'test']); + node.db.setValue("/apps/test/level0/level1/level2", { aaa: 'bbb' }); + node.db.setOwner("/apps/test/empty_owners/.owner/owners/*/write_function", false); + node.db.setRule("/apps/test/test_rules", nestedRules); + node.db.setFunction("/apps/test/test_functions", dbFuncs); + const valuesNode = node.db.getRefForReading(['values', 'apps', 'test']); + const ownersNode = node.db.getRefForReading(['owners', 'apps', 'test']); + const rulesNode = node.db.getRefForReading(['rules', 'apps', 'test']); + const functionNode = node.db.getRefForReading(['functions', 'apps', 'test']); expect(valuesNode.getProofHash()).to.equal(valuesNode.buildProofHash()); expect(ownersNode.getProofHash()).to.equal(ownersNode.buildProofHash()); expect(rulesNode.getProofHash()).to.equal(rulesNode.buildProofHash()); @@ -3905,7 +3909,7 @@ describe("Proof hash", () => { describe("State proof (getStateProof)", () => { it("tests proof with a null case", () => { const rootNode = node.db.stateRoot; - assert.deepEqual(null, node.db.getStateProof('/test/test')); + assert.deepEqual(null, node.db.getStateProof('/apps/test/test')); }); it("tests proof with owners, rules, values and functions", () => { @@ -3941,10 +3945,10 @@ describe("Proof hash", () => { { [label]: { [ProofProperties.PROOF_HASH]: functionNode.getChild(label).getProofHash() } }); }); assert.deepEqual(rootProof, node.db.getStateProof('/')); - assert.deepEqual(ownersProof, node.db.getStateProof('/owners/test')); - assert.deepEqual(rulesProof, node.db.getStateProof('/rules/test')); - assert.deepEqual(valuesProof, node.db.getStateProof('/values/test')); - assert.deepEqual(functionsProof, node.db.getStateProof('/functions/test')); + assert.deepEqual(ownersProof, node.db.getStateProof('/owners/apps')); + assert.deepEqual(rulesProof, node.db.getStateProof('/rules/apps')); + assert.deepEqual(valuesProof, node.db.getStateProof('/values/apps')); + assert.deepEqual(functionsProof, node.db.getStateProof('/functions/apps')); }); }); }); @@ -3973,7 +3977,7 @@ describe("State info (getStateInfo)", () => { label22: 'value12', } }; - result = node.db.setValue("test", valuesObject); + result = node.db.setValue("/apps/test", valuesObject); assert.deepEqual(result.code, 0); }); @@ -3983,7 +3987,7 @@ describe("State info (getStateInfo)", () => { describe("No tree structure change", () => { it("replace node values", () => { - result = node.db.setValue('test/label1/label12', { // Only value updates + result = node.db.setValue('/apps/test/label1/label12', { // Only value updates label121: 'new_value121', label122: 'new_value122' }); @@ -3991,66 +3995,66 @@ describe("State info (getStateInfo)", () => { // Existing paths. assert.deepEqual( - node.db.getStateInfo('/values/test/label1'), { tree_height: 2, tree_size: 5 }); + node.db.getStateInfo('/values/apps/test/label1'), { tree_height: 2, tree_size: 5 }); assert.deepEqual( - node.db.getStateInfo('/values/test/label1/label11'), { tree_height: 0, tree_size: 1 }); + node.db.getStateInfo('/values/apps/test/label1/label11'), { tree_height: 0, tree_size: 1 }); assert.deepEqual( - node.db.getStateInfo('/values/test/label1/label12'), { tree_height: 1, tree_size: 3 }); + node.db.getStateInfo('/values/apps/test/label1/label12'), { tree_height: 1, tree_size: 3 }); assert.deepEqual( - node.db.getStateInfo('/values/test/label1/label12/label121'), + node.db.getStateInfo('/values/apps/test/label1/label12/label121'), { tree_height: 0, tree_size: 1 }); assert.deepEqual( - node.db.getStateInfo('/values/test/label1/label12/label122'), + node.db.getStateInfo('/values/apps/test/label1/label12/label122'), { tree_height: 0, tree_size: 1 }); assert.deepEqual( - node.db.getStateInfo('/values/test/label2'), { tree_height: 1, tree_size: 3 }); + node.db.getStateInfo('/values/apps/test/label2'), { tree_height: 1, tree_size: 3 }); assert.deepEqual( - node.db.getStateInfo('/values/test/label2/label21'), { tree_height: 0, tree_size: 1 }); + node.db.getStateInfo('/values/apps/test/label2/label21'), { tree_height: 0, tree_size: 1 }); assert.deepEqual( - node.db.getStateInfo('/values/test/label2/label22'), { tree_height: 0, tree_size: 1 }); + node.db.getStateInfo('/values/apps/test/label2/label22'), { tree_height: 0, tree_size: 1 }); // Non-existing paths. - assert.deepEqual(node.db.getStateInfo('/values/test/non-existing/path'), null); + assert.deepEqual(node.db.getStateInfo('/values/apps/test/non-existing/path'), null); }); }); describe("Tree reduction", () => { it("remove state nodes", () => { - result = node.db.setValue("test/label1/label12", null); // Reduce tree + result = node.db.setValue("/apps/test/label1/label12", null); // Reduce tree assert.deepEqual(result.code, 0); assert.deepEqual( - node.db.getStateInfo('/values/test/label1'), { tree_height: 1, tree_size: 2 }); + node.db.getStateInfo('/values/apps/test/label1'), { tree_height: 1, tree_size: 2 }); assert.deepEqual( - node.db.getStateInfo('/values/test/label1/label11'), { tree_height: 0, tree_size: 1 }); - assert.deepEqual(node.db.getStateInfo('/values/test/label1/label12'), null); + node.db.getStateInfo('/values/apps/test/label1/label11'), { tree_height: 0, tree_size: 1 }); + assert.deepEqual(node.db.getStateInfo('/values/apps/test/label1/label12'), null); assert.deepEqual( - node.db.getStateInfo('/values/test/label2'), { tree_height: 1, tree_size: 3 }); + node.db.getStateInfo('/values/apps/test/label2'), { tree_height: 1, tree_size: 3 }); }); }); describe("Tree expansion", () => { it("add state nodes", () => { - result = node.db.setValue('test/label2/label21', { // Expand tree + result = node.db.setValue('/apps/test/label2/label21', { // Expand tree label211: 'value211', label212: 'value212' }); assert.deepEqual(result.code, 0); assert.deepEqual( - node.db.getStateInfo('/values/test/label1'), { tree_height: 2, tree_size: 5 }); + node.db.getStateInfo('/values/apps/test/label1'), { tree_height: 2, tree_size: 5 }); assert.deepEqual( - node.db.getStateInfo('/values/test/label2'), { tree_height: 2, tree_size: 5 }); + node.db.getStateInfo('/values/apps/test/label2'), { tree_height: 2, tree_size: 5 }); assert.deepEqual( - node.db.getStateInfo('/values/test/label2/label21'), { tree_height: 1, tree_size: 3 }); + node.db.getStateInfo('/values/apps/test/label2/label21'), { tree_height: 1, tree_size: 3 }); assert.deepEqual( - node.db.getStateInfo('/values/test/label2/label21/label211'), + node.db.getStateInfo('/values/apps/test/label2/label21/label211'), { tree_height: 0, tree_size: 1 }); assert.deepEqual( - node.db.getStateInfo('/values/test/label2/label21/label212'), + node.db.getStateInfo('/values/apps/test/label2/label21/label212'), { tree_height: 0, tree_size: 1 }); assert.deepEqual( - node.db.getStateInfo('/values/test/label2/label22'), { tree_height: 0, tree_size: 1 }); + node.db.getStateInfo('/values/apps/test/label2/label22'), { tree_height: 0, tree_size: 1 }); }); }); }); @@ -4087,7 +4091,7 @@ describe("State version handling", () => { } } }; - result = node.db.setValue("test", dbValues); + result = node.db.setValue("/apps/test", dbValues); assert.deepEqual(result.code, 0); }); @@ -4098,15 +4102,15 @@ describe("State version handling", () => { describe("getRefForReading()", () => { it("the nodes on the path are not affected", () => { expect(node.db.deleteBackupStateVersion()).to.equal(true); - const child2 = node.db.stateRoot.getChild('values').getChild('test').getChild('child_2'); + const child2 = node.db.stateRoot.getChild('values').getChild('apps').getChild('test').getChild('child_2'); const child21 = child2.getChild('child_21'); const child212 = child21.getChild('child_212'); - expect(node.db.getRefForReading(['values', 'test', 'child_2', 'child_21', 'child_212'])) + expect(node.db.getRefForReading(['values', 'apps', 'test', 'child_2', 'child_21', 'child_212'])) .to.not.equal(null); // The nodes on the path are not affected. - const newChild2 = node.db.stateRoot.getChild('values').getChild('test').getChild('child_2'); + const newChild2 = node.db.stateRoot.getChild('values').getChild('apps').getChild('test').getChild('child_2'); const newChild21 = newChild2.getChild('child_21'); const newChild212 = newChild21.getChild('child_212'); expect(newChild2).to.equal(child2); // Not cloned @@ -4118,18 +4122,18 @@ describe("State version handling", () => { describe("getRefForWriting()", () => { it("the nodes of single access path are not cloned", () => { // First referencing to make the number of access paths = 1. - expect(node.db.getRefForWriting(['values', 'test', 'child_2', 'child_21', 'child_212'])) + expect(node.db.getRefForWriting(['values', 'apps', 'test', 'child_2', 'child_21', 'child_212'])) .to.not.equal(null); - const child2 = node.db.stateRoot.getChild('values').getChild('test').getChild('child_2'); + const child2 = node.db.stateRoot.getChild('values').getChild('apps').getChild('test').getChild('child_2'); const child21 = child2.getChild('child_21'); const child212 = child21.getChild('child_212'); // Second referencing. - expect(node.db.getRefForWriting(['values', 'test', 'child_2', 'child_21', 'child_212'])) + expect(node.db.getRefForWriting(['values', 'apps', 'test', 'child_2', 'child_21', 'child_212'])) .to.not.equal(null); // The nodes on the path are not cloned. - const newChild2 = node.db.stateRoot.getChild('values').getChild('test').getChild('child_2'); + const newChild2 = node.db.stateRoot.getChild('values').getChild('apps').getChild('test').getChild('child_2'); const newChild21 = newChild2.getChild('child_21'); const newChild212 = newChild21.getChild('child_212'); expect(newChild2).to.equal(child2); // Not cloned @@ -4141,15 +4145,15 @@ describe("State version handling", () => { // Make the number of roots = 2. const otherRoot = node.stateManager.cloneVersion(node.db.stateVersion, 'new version'); expect(otherRoot).to.not.equal(null); - const child2 = node.db.stateRoot.getChild('values').getChild('test').getChild('child_2'); + const child2 = node.db.stateRoot.getChild('values').getChild('apps').getChild('test').getChild('child_2'); const child21 = child2.getChild('child_21'); const child212 = child21.getChild('child_212'); - expect(node.db.getRefForWriting(['values', 'test', 'child_2', 'child_21', 'child_212'])) + expect(node.db.getRefForWriting(['values', 'apps', 'test', 'child_2', 'child_21', 'child_212'])) .to.not.equal(null); // The nodes on the path are cloned. - const newChild2 = node.db.stateRoot.getChild('values').getChild('test').getChild('child_2'); + const newChild2 = node.db.stateRoot.getChild('values').getChild('apps').getChild('test').getChild('child_2'); const newChild21 = newChild2.getChild('child_21'); const newChild212 = newChild21.getChild('child_212'); expect(newChild2).to.not.equal(child2); // Cloned. @@ -4158,17 +4162,17 @@ describe("State version handling", () => { }); it("the nodes of multiple access paths are cloned - multiple parents case 1", () => { - const child2 = node.db.stateRoot.getChild('values').getChild('test').getChild('child_2'); + const child2 = node.db.stateRoot.getChild('values').getChild('apps').getChild('test').getChild('child_2'); const child21 = child2.getChild('child_21'); const child212 = child21.getChild('child_212'); // Make child21's number of parents = 2. const clonedChild2 = child2.clone('new version'); - expect(node.db.getRefForWriting(['values', 'test', 'child_2', 'child_21', 'child_212'])) + expect(node.db.getRefForWriting(['values', 'apps', 'test', 'child_2', 'child_21', 'child_212'])) .to.not.equal(null); // Only the nodes of multiple paths are cloned. - const newChild2 = node.db.stateRoot.getChild('values').getChild('test').getChild('child_2'); + const newChild2 = node.db.stateRoot.getChild('values').getChild('apps').getChild('test').getChild('child_2'); const newChild21 = newChild2.getChild('child_21'); const newChild212 = newChild21.getChild('child_212'); expect(newChild2).to.equal(child2); // Not cloned. @@ -4177,17 +4181,17 @@ describe("State version handling", () => { }); it("the nodes of multiple access paths are cloned - multiple parents case 2", () => { - const child2 = node.db.stateRoot.getChild('values').getChild('test').getChild('child_2'); + const child2 = node.db.stateRoot.getChild('values').getChild('apps').getChild('test').getChild('child_2'); const child21 = child2.getChild('child_21'); const child212 = child21.getChild('child_212'); // Make child212's number of parents = 2. const clonedChild21 = child21.clone('new version'); - expect(node.db.getRefForWriting(['values', 'test', 'child_2', 'child_21', 'child_212'])) + expect(node.db.getRefForWriting(['values', 'apps', 'test', 'child_2', 'child_21', 'child_212'])) .to.not.equal(null); // Only the nodes of multiple paths are cloned. - const newChild2 = node.db.stateRoot.getChild('values').getChild('test').getChild('child_2'); + const newChild2 = node.db.stateRoot.getChild('values').getChild('apps').getChild('test').getChild('child_2'); const newChild21 = newChild2.getChild('child_21'); const newChild212 = newChild21.getChild('child_212'); expect(newChild2).to.equal(child2); // Not cloned. @@ -4198,15 +4202,15 @@ describe("State version handling", () => { it("the on other ref paths are not affected", () => { const otherRoot = node.stateManager.cloneVersion(node.db.stateVersion, 'new version'); expect(otherRoot).to.not.equal(null); - const beforeOtherChild2 = otherRoot.getChild('values').getChild('test').getChild('child_2'); + const beforeOtherChild2 = otherRoot.getChild('values').getChild('apps').getChild('test').getChild('child_2'); const beforeOtherChild21 = beforeOtherChild2.getChild('child_21'); const beforeOtherChild212 = beforeOtherChild21.getChild('child_212'); - expect(node.db.getRefForWriting(['values', 'test', 'child_2', 'child_21', 'child_212'])) + expect(node.db.getRefForWriting(['values', 'apps', 'test', 'child_2', 'child_21', 'child_212'])) .to.not.equal(null); // The nodes on the path from other roots are not affected. - const afterOtherChild2 = otherRoot.getChild('values').getChild('test').getChild('child_2'); + const afterOtherChild2 = otherRoot.getChild('values').getChild('apps').getChild('test').getChild('child_2'); const afterOtherChild21 = afterOtherChild2.getChild('child_21'); const afterOtherChild212 = afterOtherChild21.getChild('child_212'); expect(afterOtherChild2).to.equal(beforeOtherChild2); // Not cloned @@ -4214,26 +4218,26 @@ describe("State version handling", () => { expect(afterOtherChild212).to.equal(beforeOtherChild212); // Not cloned // The state values of other roots are not affected. - assert.deepEqual(otherRoot.getChild('values').getChild('test').toJsObject(), dbValues); + assert.deepEqual(otherRoot.getChild('values').getChild('apps').getChild('test').toJsObject(), dbValues); }); }); describe("backupDb() / restoreDb()", () => { it("backuped states are restored", () => { - assert.deepEqual(node.db.getValue('test'), dbValues); + assert.deepEqual(node.db.getValue('/apps/test'), dbValues); assert.deepEqual(node.db.backupDb(), true); expect(node.db.backupStateVersion).to.not.equal(null); expect(node.db.backupStateRoot).to.not.equal(null); - assert.deepEqual(node.db.getValue('test'), dbValues); + assert.deepEqual(node.db.getValue('/apps/test'), dbValues); assert.deepEqual( - node.db.setValue('/test/child_2/child_21', { 'new_child': 'new_value' }).code, 0); - assert.deepEqual(node.db.getValue('/test/child_2/child_21'), { 'new_child': 'new_value' }); + node.db.setValue('/apps/test/child_2/child_21', { 'new_child': 'new_value' }).code, 0); + assert.deepEqual(node.db.getValue('/apps/test/child_2/child_21'), { 'new_child': 'new_value' }); assert.deepEqual(node.db.restoreDb(), true); expect(node.db.backupStateVersion).to.equal(null); expect(node.db.backupStateRoot).to.equal(null); - assert.deepEqual(node.db.getValue('test'), dbValues); + assert.deepEqual(node.db.getValue('/apps/test'), dbValues); }); }); }); diff --git a/unittest/functions.test.js b/unittest/functions.test.js index 9a0c753a1..e4e578bc8 100644 --- a/unittest/functions.test.js +++ b/unittest/functions.test.js @@ -33,11 +33,11 @@ describe("Functions", () => { }); describe("Function triggering", () => { - const refPathRest = "/test/test_function/some/path/rest"; - const refPathRestMulti = "/test/test_function/some/path/rest_multi"; - const refPathRestWithoutListener = "/test/test_function/some/path/rest_without_listener"; - const refPathRestNotWhitelisted = "/test/test_function/some/path/rest_not_whitelisted"; - const refPathNull = "/test/test_function/some/path/null"; + const refPathRest = "/apps/test/test_function/some/path/rest"; + const refPathRestMulti = "/apps/test/test_function/some/path/rest_multi"; + const refPathRestWithoutListener = "/apps/test/test_function/some/path/rest_without_listener"; + const refPathRestNotWhitelisted = "/apps/test/test_function/some/path/rest_not_whitelisted"; + const refPathNull = "/apps/test/test_function/some/path/null"; let requestBody1 = null, requestBody2 = null; before(() => { @@ -349,7 +349,7 @@ describe("Functions", () => { }) describe("Gas fee", () => { - const refPathRest = "/test/test_function/some/path/rest"; + const refPathRest = "/apps/test/test_function/some/path/rest"; const refPathTransfer = "/transfer/0x09A0d53FDf1c36A131938eb379b98910e55EEfe1/0x107Ab4369070716cEA7f0d34359fa6a99F54951F/0/value"; diff --git a/unittest/test-util.js b/unittest/test-util.js index d16877314..b3b99977d 100644 --- a/unittest/test-util.js +++ b/unittest/test-util.js @@ -24,21 +24,21 @@ function setNodeForTesting( throw Error('Missing owners file: ' + ownersFile); } const owners = readConfigFile(ownersFile); - node.db.setOwnersForTesting("test", owners); + node.db.setOwnersForTesting("/apps/test", owners); const rulesFile = path.resolve(__dirname, './data/rules_for_testing.json'); if (!fs.existsSync(rulesFile)) { throw Error('Missing rules file: ' + rulesFile); } const rules = readConfigFile(rulesFile); - node.db.setRulesForTesting("test", rules); + node.db.setRulesForTesting("/apps/test", rules); const functionsFile = path.resolve(__dirname, './data/functions_for_testing.json'); if (!fs.existsSync(functionsFile)) { throw Error('Missing functions file: ' + functionsFile); } const functions = JSON.parse(fs.readFileSync(functionsFile)); - node.db.setFunctionsForTesting("test", functions); + node.db.setFunctionsForTesting("/apps/test", functions); } if (!skipShardingConfig) { const shardingFile = path.resolve(__dirname, './data/sharding_for_testing.json'); @@ -123,6 +123,30 @@ function parseOrLog(resp) { return parsed; } +async function setUpApp(appName, serverList, appConfig) { + const signingAddr = parseOrLog(syncRequest( + 'GET', serverList[0] + '/get_address').body.toString('utf-8')).result; + const appStakingRes = parseOrLog(syncRequest('POST', serverList[0] + '/set_value', { + json: { + ref: `/staking/${appName}/${signingAddr}/0/stake/${Date.now()}/value`, + value: 1 + } + }).body.toString('utf-8')).result; + if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { + console.log(`setUpTestApp(): Failed to check finalization of app staking tx.`); + } + + const createAppRes = parseOrLog(syncRequest('POST', serverList[0] + '/set_value', { + json: { + ref: `/manage_app/${appName}/create/${Date.now()}`, + value: appConfig + } + }).body.toString('utf-8')).result; + if (!(await waitUntilTxFinalized(serverList, createAppRes.tx_hash))) { + console.log(`setUpTestApp(): Failed to check finalization of create app tx.`) + } +} + module.exports = { readConfigFile, setNodeForTesting, @@ -132,4 +156,5 @@ module.exports = { waitForNewBlocks, waitUntilNodeSyncs, parseOrLog, + setUpApp, }; diff --git a/unittest/transaction.test.js b/unittest/transaction.test.js index 3332f82d9..e5fa7873c 100644 --- a/unittest/transaction.test.js +++ b/unittest/transaction.test.js @@ -83,7 +83,7 @@ describe('Transaction', () => { txBodyForNode = { operation: { type: 'SET_VALUE', - ref: 'test/comcom', + ref: '/apps/test/comcom', value: 'val' }, gas_price: 1 From 67e2983ed0ba84f4f4a2d2adc35cc0e95d5c91ec Mon Sep 17 00:00:00 2001 From: Lia Yoo Date: Mon, 5 Jul 2021 22:35:54 +0900 Subject: [PATCH 003/175] Remove test from SERVICE_TYPES. --- common/constants.js | 1 - 1 file changed, 1 deletion(-) diff --git a/common/constants.js b/common/constants.js index 874595f2c..ffe89cd31 100644 --- a/common/constants.js +++ b/common/constants.js @@ -469,7 +469,6 @@ const SERVICE_TYPES = [ PredefinedDbPaths.SHARDING, PredefinedDbPaths.STAKING, PredefinedDbPaths.TRANSFER, - 'test', // NOTE(platfowner): A temporary solution for tests. ]; function isServiceType(type) { From 351182cc77aadc606bb763ddd47f31086fbd17d8 Mon Sep 17 00:00:00 2001 From: Lia Yoo Date: Mon, 5 Jul 2021 22:42:12 +0900 Subject: [PATCH 004/175] Remove unused code. --- integration/afan_dapp.test.js | 28 --------- integration/blockchain.test.js | 28 --------- integration/node.test.js | 102 --------------------------------- integration/sharding.test.js | 46 +-------------- 4 files changed, 1 insertion(+), 203 deletions(-) diff --git a/integration/afan_dapp.test.js b/integration/afan_dapp.test.js index fb41f7f6d..b04b735a1 100644 --- a/integration/afan_dapp.test.js +++ b/integration/afan_dapp.test.js @@ -62,34 +62,6 @@ async function setUp() { 'GET', server3 + '/get_address').body.toString('utf-8')).result; const server4Addr = parseOrLog(syncRequest( 'GET', server4 + '/get_address').body.toString('utf-8')).result; - // const appStakingRes = parseOrLog(syncRequest('POST', server1 + '/set_value', { - // json: { - // ref: `/staking/afan/${server1Addr}/0/stake/${Date.now()}/value`, - // value: 1 - // } - // }).body.toString('utf-8')).result; - // assert.deepEqual(CommonUtil.isFailedTx(_.get(appStakingRes, 'result')), false); - // if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { - // console.log(`setUp(): Failed to check finalization of app staking tx.`); - // } - - // const createAppRes = parseOrLog(syncRequest('POST', server1 + '/set_value', { - // json: { - // ref: `/manage_app/afan/create/${Date.now()}`, - // value: { - // admin: { - // [server1Addr]: true, - // [server2Addr]: true, - // [server3Addr]: true, - // [server4Addr]: true, - // } - // } - // } - // }).body.toString('utf-8')).result; - // assert.deepEqual(CommonUtil.isFailedTx(_.get(createAppRes, 'result')), false); - // if (!(await waitUntilTxFinalized(serverList, createAppRes.tx_hash))) { - // console.log(`setUp(): Failed to check finalization of create app tx.`) - // } await setUpApp('afan', serverList, { admin: { diff --git a/integration/blockchain.test.js b/integration/blockchain.test.js index e4c8340f6..92d9b48e9 100644 --- a/integration/blockchain.test.js +++ b/integration/blockchain.test.js @@ -270,34 +270,6 @@ describe('Blockchain Cluster', () => { 'GET', server3 + '/get_address').body.toString('utf-8')).result; const server4Addr = parseOrLog(syncRequest( 'GET', server4 + '/get_address').body.toString('utf-8')).result; - // const appStakingRes = parseOrLog(syncRequest('POST', server1 + '/set_value', { - // json: { - // ref: `/staking/test/${server1Addr}/0/stake/${Date.now()}/value`, - // value: 1 - // } - // }).body.toString('utf-8')).result; - // assert.deepEqual(CommonUtil.isFailedTx(_.get(appStakingRes, 'result')), false); - // if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { - // console.log(`setUp(): Failed to check finalization of app staking tx.`); - // } - - // const createAppRes = parseOrLog(syncRequest('POST', server1 + '/set_value', { - // json: { - // ref: `/manage_app/test/create/${Date.now()}`, - // value: { - // admin: { - // [server1Addr]: true, - // [server2Addr]: true, - // [server3Addr]: true, - // [server4Addr]: true, - // } - // } - // } - // }).body.toString('utf-8')).result; - // assert.deepEqual(CommonUtil.isFailedTx(_.get(createAppRes, 'result')), false); - // if (!(await waitUntilTxFinalized(serverList, createAppRes.tx_hash))) { - // console.log(`setUp(): Failed to check finalization of create app tx.`) - // } await setUpApp('test', serverList, { admin: { diff --git a/integration/node.test.js b/integration/node.test.js index 59a3d4e09..f9c8e2061 100644 --- a/integration/node.test.js +++ b/integration/node.test.js @@ -3943,25 +3943,6 @@ describe('Blockchain Node', () => { describe('Gas fee', () => { before(async () => { await setUpApp('test_service_gas_fee', serverList, { admin: { [serviceAdmin]: true } }); - // const appStakingPath = - // `/staking/test_service_gas_fee/${serviceAdmin}/0/stake/${Date.now()}/value`; - // const appStakingRes = parseOrLog(syncRequest('POST', server1 + '/set_value', {json: { - // ref: appStakingPath, - // value: 1 - // }}).body.toString('utf-8')).result; - // if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { - // console.error(`Failed to check finalization of tx.`); - // } - // const manageAppPath = '/manage_app/test_service_gas_fee/create/1'; - // const createAppRes = parseOrLog(syncRequest('POST', server2 + '/set_value', {json: { - // ref: manageAppPath, - // value: { - // admin: { [serviceAdmin]: true }, - // }, - // }}).body.toString('utf-8')).result; - // if (!(await waitUntilTxFinalized(serverList, createAppRes.tx_hash))) { - // console.error(`Failed to check finalization of tx.`); - // } }); it("native function (_transfer) with individual account registration", () => { @@ -4478,28 +4459,6 @@ describe('Blockchain Node', () => { describe('Staking: _stake, _unstake', () => { before(async () => { - // const appStakingPath = `/staking/test_service_staking/${serviceAdmin}/0/stake/${Date.now()}/value` - // const appStakingRes = parseOrLog(syncRequest('POST', server1 + '/set_value', {json: { - // ref: appStakingPath, - // value: 1 - // }}).body.toString('utf-8')).result; - // if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { - // console.error(`Failed to check finalization of tx.`); - // } - // const manageAppPath = '/manage_app/test_service_staking/create/1' - // const body = parseOrLog(syncRequest('POST', server2 + '/set_value', {json: { - // ref: manageAppPath, - // value: { - // admin: { [serviceAdmin]: true }, - // service: { - // staking: { lockup_duration: 1000 } - // } - // } - // }}).body.toString('utf-8')); - // expect(body.code).to.equals(0); - // if (!(await waitUntilTxFinalized(serverList, _.get(body, 'result.tx_hash')))) { - // console.error(`Failed to check finalization of tx.`); - // } await setUpApp('test_service_staking', serverList, { admin: { [serviceAdmin]: true }, service: { @@ -4912,26 +4871,6 @@ describe('Blockchain Node', () => { describe('Payments: _pay, _claim', () => { before(async () => { - // const appStakingPath = `/staking/test_service_payment/${serviceAdmin}/0/stake/${Date.now()}/value` - // const appStakingRes = parseOrLog(syncRequest('POST', server1 + '/set_value', {json: { - // ref: appStakingPath, - // value: 1 - // }}).body.toString('utf-8')).result; - // if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { - // console.error(`Failed to check finalization of tx.`); - // } - // const manageAppPath = '/manage_app/test_service_payment/create/1' - // const body = parseOrLog(syncRequest('POST', server2 + '/set_value', {json: { - // ref: manageAppPath, - // value: { - // admin: { [serviceAdmin]: true }, - // }, - // }}).body.toString('utf-8')); - // expect(body.code).to.equals(0); - // if (!(await waitUntilTxFinalized(serverList, _.get(body, 'result.tx_hash')))) { - // console.error(`Failed to check finalization of tx.`); - // } - await setUpApp('test_service_payment', serverList, { admin: { [serviceAdmin]: true } }); }); @@ -5328,25 +5267,6 @@ describe('Blockchain Node', () => { describe('Escrow: _hold, _release', () => { before(async () => { - // const appStakingPath = `/staking/test_service_escrow/${serviceAdmin}/0/stake/${Date.now()}/value` - // const appStakingRes = parseOrLog(syncRequest('POST', server1 + '/set_value', {json: { - // ref: appStakingPath, - // value: 1 - // }}).body.toString('utf-8')).result; - // if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { - // console.error(`Failed to check finalization of tx.`); - // } - // const manageAppPath = '/manage_app/test_service_escrow/create/1' - // const body = parseOrLog(syncRequest('POST', server2 + '/set_value', {json: { - // ref: manageAppPath, - // value: { - // admin: { [serviceAdmin]: true }, - // }, - // }}).body.toString('utf-8')); - // expect(body.code).to.equals(0); - // if (!(await waitUntilTxFinalized(serverList, _.get(body, 'result.tx_hash')))) { - // console.error(`Failed to check finalization of tx.`); - // } await setUpApp('test_service_escrow', serverList, { admin: { [serviceAdmin]: true } }); }); @@ -5957,28 +5877,6 @@ describe('Blockchain Node', () => { parseOrLog(syncRequest('GET', server3 + '/get_address').body.toString('utf-8')).result; userBalancePathA = `/get_value?ref=/accounts/${billingUserA}/balance`; userBalancePathB = `/get_value?ref=/accounts/${billingUserB}/balance`; - - // const appStakingRes = parseOrLog(syncRequest('POST', server1 + '/set_value', {json: { - // ref: `/staking/test_billing/${serviceAdmin}/0/stake/${Date.now()}/value`, - // value: 1 - // }}).body.toString('utf-8')).result; - // if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { - // console.error(`Failed to check finalization of app staking tx.`); - // } - - // const createAppRes = parseOrLog(syncRequest('POST', server2 + '/set_value', {json: { - // ref: '/manage_app/test_billing/create/0', - // value: { - // admin: , - // billing: - // }, - // nonce: -1, - // timestamp: Date.now(), - // }}).body.toString('utf-8')).result; - // if (!(await waitUntilTxFinalized(serverList, createAppRes.tx_hash))) { - // console.error(`Failed to check finalization of create app tx.`); - // } - const adminConfig = { [serviceAdmin]: true, [billingUserA]: true, diff --git a/integration/sharding.test.js b/integration/sharding.test.js index a48f777a0..133132ad6 100644 --- a/integration/sharding.test.js +++ b/integration/sharding.test.js @@ -249,28 +249,6 @@ describe('Sharding', async () => { ).result; await waitUntilTxFinalized(parentServerList, shardReportRes.tx_hash); // Create app at the parent chain for the shard - // const appStakingRes = parseOrLog(syncRequest('POST', parentServer + '/set_value', { - // json: { - // ref: `/staking/afan/${parentServerAddr}/0/stake/${Date.now()}/value`, - // value: 1 - // } - // }).body.toString('utf-8')).result; - // assert.deepEqual(CommonUtil.isFailedTx(_.get(appStakingRes, 'result')), false); - // if (!(await waitUntilTxFinalized(parentServerList, appStakingRes.tx_hash))) { - // console.log(`Failed to check finalization of app staking tx.`); - // } - // const createAppRes = parseOrLog(syncRequest('POST', parentServer + '/set_value', { - // json: { - // ref: `/manage_app/afan/create/${Date.now()}`, - // value: { - // admin: { [shardOwnerAddr]: true } - // } - // } - // }).body.toString('utf-8')).result; - // assert.deepEqual(CommonUtil.isFailedTx(_.get(createAppRes, 'result')), false); - // if (!(await waitUntilTxFinalized(parentServerList, createAppRes.tx_hash))) { - // console.log(`Failed to check finalization of create app tx.`); - // } await setUpApp('afan', parentServerList, { admin: { [shardOwnerAddr]: true } }); tracker_proc = startServer(TRACKER_SERVER, 'tracker server', ENV_VARIABLES[1], true); @@ -1933,29 +1911,7 @@ describe('Sharding', async () => { describe('_updateLatestShardReport', () => { before(async () => { - const { shard_owner, shard_reporter, sharding_path } = shardingConfig; - // const appStakingRes = parseOrLog(syncRequest('POST', parentServer + '/set_value', { - // json: { - // ref: `/staking/a_dapp/${shard_owner}/0/stake/${Date.now()}/value`, - // value: 1 - // } - // }).body.toString('utf-8')).result; - // assert.deepEqual(CommonUtil.isFailedTx(_.get(appStakingRes, 'result')), false); - // if (!(await waitUntilTxFinalized(parentServerList, appStakingRes.tx_hash))) { - // console.log(`Failed to check finalization of app staking tx.`) - // } - // const createAppRes = parseOrLog(syncRequest('POST', parentServer + '/set_value', { - // json: { - // ref: `/manage_app/a_dapp/create/${Date.now()}`, - // value: { - // admin: { [shard_owner]: true } - // } - // } - // }).body.toString('utf-8')).result; - // assert.deepEqual(CommonUtil.isFailedTx(_.get(createAppRes, 'result')), false); - // if (!(await waitUntilTxFinalized(parentServerList, createAppRes.tx_hash))) { - // console.log(`Failed to check finalization of create app tx.`) - // } + const { shard_owner, sharding_path } = shardingConfig; await setUpApp('a_dapp', parentServerList, { admin: { [shard_owner]: true } }); const res = parseOrLog(syncRequest('POST', parentServer + '/set', { From 4b05baa640bb625bc26c61ad6a6e363f3ad23950 Mon Sep 17 00:00:00 2001 From: Lia Yoo Date: Mon, 5 Jul 2021 22:56:51 +0900 Subject: [PATCH 005/175] Fix chain util test. --- unittest/chain-util.test.js | 76 ++++++++++++++++++------------------- 1 file changed, 38 insertions(+), 38 deletions(-) diff --git a/unittest/chain-util.test.js b/unittest/chain-util.test.js index 653adc8ac..7dbe042f2 100644 --- a/unittest/chain-util.test.js +++ b/unittest/chain-util.test.js @@ -391,13 +391,13 @@ describe("CommonUtil", () => { "_saveLastTx": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "func_results": { "_eraseValue": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "code": 0, "gas_amount": 1 @@ -426,13 +426,13 @@ describe("CommonUtil", () => { "_saveLastTx": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "func_results": { "_eraseValue": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "code": 0, "gas_amount": 1 @@ -462,13 +462,13 @@ describe("CommonUtil", () => { "_saveLastTx": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "func_results": { "_eraseValue": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "code": 201, // A sub-operation failed "error_message": "Not a number type: bar or 10", @@ -498,13 +498,13 @@ describe("CommonUtil", () => { "_saveLastTx": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "func_results": { "_eraseValue": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "code": 0, "gas_amount": 1 @@ -593,13 +593,13 @@ describe("CommonUtil", () => { "_saveLastTx": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "func_results": { "_eraseValue": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "code": 0, "gas_amount": 1 @@ -640,13 +640,13 @@ describe("CommonUtil", () => { "_saveLastTx": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "func_results": { "_eraseValue": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "code": 0, "gas_amount": 1 @@ -688,13 +688,13 @@ describe("CommonUtil", () => { "_saveLastTx": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "func_results": { "_eraseValue": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "code": 201, // A sub-operation failed. "error_message": "Not a number type: bar or 10", @@ -736,13 +736,13 @@ describe("CommonUtil", () => { "_saveLastTx": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "func_results": { "_eraseValue": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "code": 0, "gas_amount": 1 @@ -814,20 +814,20 @@ describe("CommonUtil", () => { }) describe("getTotalGasAmount", () => { - const op = { ref: '/test', value: null, type: 'SET_VALUE' }; + const serviceOp = { ref: '/transfer/test', value: null, type: 'SET_VALUE' }; const appOp = { ref: '/apps/test', value: null, type: 'SET_VALUE' }; it("when abnormal input", () => { const emptyVal = { app: {}, service: 0 }; - assert.deepEqual(CommonUtil.getTotalGasAmount(op, null), emptyVal); - assert.deepEqual(CommonUtil.getTotalGasAmount(op, undefined), emptyVal); - assert.deepEqual(CommonUtil.getTotalGasAmount(op, {}), emptyVal); - assert.deepEqual(CommonUtil.getTotalGasAmount(op, { gas: 'gas' }), emptyVal); - assert.deepEqual(CommonUtil.getTotalGasAmount(op, { gas: {} }), emptyVal); - assert.deepEqual(CommonUtil.getTotalGasAmount(op, true), emptyVal); - assert.deepEqual(CommonUtil.getTotalGasAmount(op, 'result'), emptyVal); - assert.deepEqual(CommonUtil.getTotalGasAmount(op, 0), emptyVal); - assert.deepEqual(CommonUtil.getTotalGasAmount(op, 1), emptyVal); + assert.deepEqual(CommonUtil.getTotalGasAmount(serviceOp, null), emptyVal); + assert.deepEqual(CommonUtil.getTotalGasAmount(serviceOp, undefined), emptyVal); + assert.deepEqual(CommonUtil.getTotalGasAmount(serviceOp, {}), emptyVal); + assert.deepEqual(CommonUtil.getTotalGasAmount(serviceOp, { gas: 'gas' }), emptyVal); + assert.deepEqual(CommonUtil.getTotalGasAmount(serviceOp, { gas: {} }), emptyVal); + assert.deepEqual(CommonUtil.getTotalGasAmount(serviceOp, true), emptyVal); + assert.deepEqual(CommonUtil.getTotalGasAmount(serviceOp, 'result'), emptyVal); + assert.deepEqual(CommonUtil.getTotalGasAmount(serviceOp, 0), emptyVal); + assert.deepEqual(CommonUtil.getTotalGasAmount(serviceOp, 1), emptyVal); }) it("when single operation result input (service)", () => { @@ -836,13 +836,13 @@ describe("CommonUtil", () => { "_saveLastTx": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/transfer/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "func_results": { "_eraseValue": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/transfer/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "code": 0, "gas_amount": 1 @@ -865,7 +865,7 @@ describe("CommonUtil", () => { "code": 0, "gas_amount": 30 }; - assert.deepEqual(CommonUtil.getTotalGasAmount(op, result), { + assert.deepEqual(CommonUtil.getTotalGasAmount(serviceOp, result), { app: {}, service: 62 }); @@ -926,7 +926,7 @@ describe("CommonUtil", () => { "_eraseValue": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/transfer/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "code": 0, "gas_amount": 1 @@ -958,7 +958,7 @@ describe("CommonUtil", () => { }) it("when multiple operation result input (service)", () => { - const setTxOp = { type: 'SET', op_list: [{...op}, {...op}] }; + const setTxOp = { type: 'SET', op_list: [{...serviceOp}, {...serviceOp}] }; const result = { "result_list": [ { @@ -966,13 +966,13 @@ describe("CommonUtil", () => { "_saveLastTx": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/transfer/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "func_results": { "_eraseValue": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/transfer/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "code": 0, "gas_amount": 1 @@ -1060,7 +1060,7 @@ describe("CommonUtil", () => { }) it("when multiple operation result input (service & app)", () => { - const setTxOp = { type: 'SET', op_list: [{...appOp}, {...op}] }; + const setTxOp = { type: 'SET', op_list: [{...appOp}, {...serviceOp}] }; const result = { "result_list": [ { @@ -1068,13 +1068,13 @@ describe("CommonUtil", () => { "_saveLastTx": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "func_results": { "_eraseValue": { "op_results": [ { - "path": "/test/test_function_triggering/allowed_path/.last_tx/value", + "path": "/apps/test/test_function_triggering/allowed_path/.last_tx/value", "result": { "code": 0, "gas_amount": 1 @@ -1105,9 +1105,9 @@ describe("CommonUtil", () => { }; assert.deepEqual(CommonUtil.getTotalGasAmount(setTxOp, result), { app: { - test: 50 + test: 62 }, - service: 13 + service: 1 }); }) }) From 105fe61db98e5cf2db35fb446a915cf256b357e8 Mon Sep 17 00:00:00 2001 From: Lia Yoo Date: Mon, 5 Jul 2021 22:57:36 +0900 Subject: [PATCH 006/175] Rename chain-util.test.js -> common-util.test.js. --- unittest/{chain-util.test.js => common-util.test.js} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename unittest/{chain-util.test.js => common-util.test.js} (100%) diff --git a/unittest/chain-util.test.js b/unittest/common-util.test.js similarity index 100% rename from unittest/chain-util.test.js rename to unittest/common-util.test.js From 6ee8340796aa29191350c91868aa038ebe83613a Mon Sep 17 00:00:00 2001 From: Lia Yoo Date: Mon, 5 Jul 2021 23:40:46 +0900 Subject: [PATCH 007/175] Add sharding server nodes to test app admin. --- integration/sharding.test.js | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/integration/sharding.test.js b/integration/sharding.test.js index 133132ad6..a6d8c38b7 100644 --- a/integration/sharding.test.js +++ b/integration/sharding.test.js @@ -483,7 +483,21 @@ describe('Sharding', async () => { describe('API Tests', () => { describe('Get API', () => { before(async () => { - await setUpApp('test', shardServerList, { admin: { [account.address]: true } }); + const server1Addr = parseOrLog(syncRequest( + 'GET', server1 + '/get_address').body.toString('utf-8')).result; + const server2Addr = parseOrLog(syncRequest( + 'GET', server2 + '/get_address').body.toString('utf-8')).result; + const server3Addr = parseOrLog(syncRequest( + 'GET', server3 + '/get_address').body.toString('utf-8')).result; + const server4Addr = parseOrLog(syncRequest( + 'GET', server4 + '/get_address').body.toString('utf-8')).result; + await setUpApp('test', shardServerList, { admin: { + [account.address]: true, + [server1Addr]: true, + [server2Addr]: true, + [server3Addr]: true, + [server4Addr]: true + } }); await setUp(); }) From 08a5c0a3945e02e38ecb27574ebe383c2facfd92 Mon Sep 17 00:00:00 2001 From: liayoo Date: Tue, 6 Jul 2021 10:54:07 +0900 Subject: [PATCH 008/175] Use head commit for build_and_test --- .github/workflows/github-actions.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/github-actions.yml b/.github/workflows/github-actions.yml index 06d6273b7..b3f0ad464 100644 --- a/.github/workflows/github-actions.yml +++ b/.github/workflows/github-actions.yml @@ -24,6 +24,7 @@ jobs: with: node-version: '12.x' registry-url: 'https://registry.npmjs.org' + ref: ${{ github.event.pull_request.head.sha }} - name: yarn install run: yarn install - name: run unittest From b1084f2a1450d121d8636ae1974b40406d8a230e Mon Sep 17 00:00:00 2001 From: Lia Yoo Date: Tue, 6 Jul 2021 11:02:13 +0900 Subject: [PATCH 009/175] Fix github-actions.yml --- .github/workflows/github-actions.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/github-actions.yml b/.github/workflows/github-actions.yml index b3f0ad464..40969ecd8 100644 --- a/.github/workflows/github-actions.yml +++ b/.github/workflows/github-actions.yml @@ -19,12 +19,13 @@ jobs: steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - uses: actions/checkout@v2 + with: + ref: ${{ github.event.pull_request.head.sha }} # Setup node environment for testing - uses: actions/setup-node@v2 with: node-version: '12.x' registry-url: 'https://registry.npmjs.org' - ref: ${{ github.event.pull_request.head.sha }} - name: yarn install run: yarn install - name: run unittest From 101776dd907f4a45f213657ac5f3579c2073cb1c Mon Sep 17 00:00:00 2001 From: Lia Yoo Date: Tue, 6 Jul 2021 11:03:04 +0900 Subject: [PATCH 010/175] Remove commented out code. --- integration/sharding.test.js | 5 ----- 1 file changed, 5 deletions(-) diff --git a/integration/sharding.test.js b/integration/sharding.test.js index a6d8c38b7..9e6ec15c7 100644 --- a/integration/sharding.test.js +++ b/integration/sharding.test.js @@ -1665,7 +1665,6 @@ describe('Sharding', async () => { }) it('ain_sendSignedTransaction with is_global = false (explicit)', () => { - // const account = ainUtil.createAccount(); const client = jayson.client.http(server1 + '/json-rpc'); const txBody = { operation: { @@ -1705,7 +1704,6 @@ describe('Sharding', async () => { }) it('ain_sendSignedTransaction with is_global = true', () => { - // const account = ainUtil.createAccount(); const client = jayson.client.http(server1 + '/json-rpc'); const txBody = { operation: { @@ -1747,7 +1745,6 @@ describe('Sharding', async () => { describe('ain_sendSignedTransactionBatch', () => { it('ain_sendSignedTransactionBatch with is_global = false', () => { - // const account = ainUtil.createAccount(); const client = jayson.client.http(server1 + '/json-rpc'); const txBody = { operation: { @@ -1795,7 +1792,6 @@ describe('Sharding', async () => { }) it('ain_sendSignedTransactionBatch with is_global = false (explicit)', () => { - // const account = ainUtil.createAccount(); const client = jayson.client.http(server1 + '/json-rpc'); const txBody = { operation: { @@ -1847,7 +1843,6 @@ describe('Sharding', async () => { }) it('ain_sendSignedTransactionBatch with is_global = true', () => { - // const account = ainUtil.createAccount(); const client = jayson.client.http(server1 + '/json-rpc'); const txBody = { operation: { From d4c45ed5b0ce8370f4b43d06509b268ae923290d Mon Sep 17 00:00:00 2001 From: Lia Yoo Date: Tue, 6 Jul 2021 11:28:10 +0900 Subject: [PATCH 011/175] Update log messages. --- unittest/test-util.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/unittest/test-util.js b/unittest/test-util.js index b3b99977d..76ed66e42 100644 --- a/unittest/test-util.js +++ b/unittest/test-util.js @@ -133,7 +133,7 @@ async function setUpApp(appName, serverList, appConfig) { } }).body.toString('utf-8')).result; if (!(await waitUntilTxFinalized(serverList, appStakingRes.tx_hash))) { - console.log(`setUpTestApp(): Failed to check finalization of app staking tx.`); + console.log(`setUpApp(): Failed to check finalization of app staking tx.`); } const createAppRes = parseOrLog(syncRequest('POST', serverList[0] + '/set_value', { @@ -143,7 +143,7 @@ async function setUpApp(appName, serverList, appConfig) { } }).body.toString('utf-8')).result; if (!(await waitUntilTxFinalized(serverList, createAppRes.tx_hash))) { - console.log(`setUpTestApp(): Failed to check finalization of create app tx.`) + console.log(`setUpApp(): Failed to check finalization of create app tx.`) } } From 1896b39fab1c4dfbbfaa0016a9c34800f5447433 Mon Sep 17 00:00:00 2001 From: Dongil Seo Date: Mon, 28 Jun 2021 14:09:59 +0900 Subject: [PATCH 012/175] Tweak deploy script messages. --- deploy_blockchain_gcp.sh | 5 ++++- setup_blockchain_ubuntu.sh | 2 +- setup_monitoring_ubuntu.sh | 2 +- start_node_gcp.sh | 4 +++- start_node_incremental_gcp.sh | 4 ++-- start_tracker_gcp.sh | 4 ++-- 6 files changed, 13 insertions(+), 8 deletions(-) diff --git a/deploy_blockchain_gcp.sh b/deploy_blockchain_gcp.sh index f798bfdf7..c0c0230b1 100644 --- a/deploy_blockchain_gcp.sh +++ b/deploy_blockchain_gcp.sh @@ -26,6 +26,9 @@ echo "GCP_USER=$GCP_USER" NUM_SHARDS=$3 echo "NUM_SHARDS=$NUM_SHARDS" +OPTIONS="$4" +echo "OPTIONS=$OPTIONS" + # Get confirmation. echo read -p "Do you want to proceed? >> (y/N) " -n 1 -r @@ -94,7 +97,7 @@ printf "\nDeploying files to ${NODE_4_TARGET_ADDR}..." gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_4_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_4_ZONE # ssh into each instance, set up the ubuntu VM instance (ONLY NEEDED FOR THE FIRST TIME) -if [ "$4" == "--setup" ]; then +if [ $OPTIONS == "--setup" ]; then printf "\n\n##########################\n# Setting up parent tracker #\n###########################\n\n" gcloud compute ssh $TRACKER_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $TRACKER_ZONE printf "\n\n##########################\n# Setting up parent node 0 #\n##########################\n\n" diff --git a/setup_blockchain_ubuntu.sh b/setup_blockchain_ubuntu.sh index 86ca40795..ef34718e8 100644 --- a/setup_blockchain_ubuntu.sh +++ b/setup_blockchain_ubuntu.sh @@ -1,6 +1,6 @@ #!/bin/sh -echo 'Installing nodejs..' +echo 'Installing NodeJS..' sudo apt update curl -sL https://deb.nodesource.com/setup_12.x | sudo -E bash - sudo apt install -y nodejs diff --git a/setup_monitoring_ubuntu.sh b/setup_monitoring_ubuntu.sh index 951094c52..edb4b7090 100644 --- a/setup_monitoring_ubuntu.sh +++ b/setup_monitoring_ubuntu.sh @@ -1,6 +1,6 @@ #!/bin/sh -echo 'Installing nodejs..' +echo 'Installing NodeJS..' sudo apt update curl -sL https://deb.nodesource.com/setup_12.x | sudo -E bash - sudo apt install -y nodejs diff --git a/start_node_gcp.sh b/start_node_gcp.sh index 725f80dd7..56755e57d 100644 --- a/start_node_gcp.sh +++ b/start_node_gcp.sh @@ -89,5 +89,7 @@ export LIGHTWEIGHT=false export STAKE=100000 export BLOCKCHAIN_DATA_DIR="/home/ain_blockchain_data" -echo 'Starting up Node server..' +echo 'Starting up Blockchain Node server..' nohup node --async-stack-traces client/index.js >/dev/null 2>error_logs.txt & + +echo "Blockchain Node server is now up!" diff --git a/start_node_incremental_gcp.sh b/start_node_incremental_gcp.sh index 97a9b0ab7..751c4885c 100644 --- a/start_node_incremental_gcp.sh +++ b/start_node_incremental_gcp.sh @@ -122,7 +122,7 @@ cd $NEW_DIR_PATH npm install # 6. Start a new node process -printf "Starting up Node server.." +printf "Starting up Blockchain Node server.." nohup node --async-stack-traces client/index.js >/dev/null 2>error_logs.txt & # 7. Wait until the new node catches up @@ -143,7 +143,7 @@ do printf "\nconsensusStatus = ${consensusStatus}" printf "\nlastBlockNumber = ${lastBlockNumber}" if [ "$consensusStatus" == "RUNNING" ]; then - printf "\nNode is synced & running!" + printf "\nBlockchain Node server is synced & running!" printf "Time it took to sync in seconds: $SECONDS\n\n" break fi diff --git a/start_tracker_gcp.sh b/start_tracker_gcp.sh index 2937880c0..263c3baaa 100644 --- a/start_tracker_gcp.sh +++ b/start_tracker_gcp.sh @@ -2,6 +2,6 @@ export CONSOLE_LOG=false -echo 'Starting up Tracker server..' +echo 'Starting up Blockchain Tracker server..' nohup node --async-stack-traces tracker-server/index.js >/dev/null 2>error_logs.txt & -echo "Tracker server is now up!" +echo "Blockchain Tracker server is now up!" From 769be1a32ee48daaa7c91560b0ba90c29cdf2918 Mon Sep 17 00:00:00 2001 From: Dongil Seo Date: Mon, 28 Jun 2021 14:10:24 +0900 Subject: [PATCH 013/175] Remove a completed todo. --- db/index.js | 2 -- 1 file changed, 2 deletions(-) diff --git a/db/index.js b/db/index.js index 2a81a518e..6fd9e18ef 100644 --- a/db/index.js +++ b/db/index.js @@ -641,8 +641,6 @@ class DB { } // TODO(platfowner): Define error code explicitly. - // TODO(platfowner): Consider making set operation and native function run tightly bound, i.e., - // revert the former if the latter fails. // TODO(platfowner): Apply .shard (isWritablePathWithSharding()) to setFunction(), setRule(), // and setOwner() as well. setValue(valuePath, value, auth, timestamp, transaction, isGlobal) { From 74dffe42adee9a25c68f87b606ce45f51d3a95d1 Mon Sep 17 00:00:00 2001 From: Dongil Seo Date: Tue, 6 Jul 2021 18:17:27 +0900 Subject: [PATCH 014/175] Tweak deploy script options. --- deploy_blockchain_gcp.sh | 6 +++--- deploy_blockchain_incremental_gcp.sh | 9 ++++++--- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/deploy_blockchain_gcp.sh b/deploy_blockchain_gcp.sh index c0c0230b1..964c7680b 100644 --- a/deploy_blockchain_gcp.sh +++ b/deploy_blockchain_gcp.sh @@ -133,7 +133,7 @@ if [ "$NUM_SHARDS" -gt 0 ]; then echo "shard #$i" # generate genesis config files in ./blockchain/shard_$i - if [ "$4" == "--setup" ]; then + if [ $OPTIONS == "--setup" ]; then node ./tools/generateShardGenesisFiles.js $SEASON 10 $i fi @@ -152,8 +152,8 @@ if [ "$NUM_SHARDS" -gt 0 ]; then printf "\nDeploying files to ${SHARD_NODE_2_TARGET_ADDR}..." gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_2_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_2_ZONE - # ssh into each instance, set up the ubuntu VM instance (ONLY NEEDED FOR THE FIRST TIME) - if [ "$4" == "--setup" ]; then + # ssh into each instance, set up the ubuntu VM instance (ONLY NEEDED FOR THE FIRST TIME) + if [ $OPTIONS == "--setup" ]; then printf "\n\n###########################\n# Setting up shard_$i tracker #\n###########################\n\n" gcloud compute ssh $SHARD_TRACKER_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $TRACKER_ZONE printf "\n\n##########################\n# Setting up shard_$i node 0 #\n##########################\n\n" diff --git a/deploy_blockchain_incremental_gcp.sh b/deploy_blockchain_incremental_gcp.sh index 55e4b8e5b..6e1b81a5f 100644 --- a/deploy_blockchain_incremental_gcp.sh +++ b/deploy_blockchain_incremental_gcp.sh @@ -26,6 +26,9 @@ echo "GCP_USER=$GCP_USER" NUM_SHARDS=$3 echo "NUM_SHARDS=$NUM_SHARDS" +OPTIONS="$4" +echo "OPTIONS=$OPTIONS" + # Get confirmation. echo read -p "Do you want to proceed? >> (y/N) " -n 1 -r @@ -69,7 +72,7 @@ printf "\nCopying files to ${NODE_4_TARGET_ADDR}..." gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_4_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_4_ZONE # ssh into each instance, set up the ubuntu VM instance (ONLY NEEDED FOR THE FIRST TIME) -if [ "$4" == "--setup" ]; then +if [ $OPTIONS == "--setup" ]; then printf "\n\n##########################\n# Setting up parent tracker #\n###########################\n\n" gcloud compute ssh $TRACKER_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $TRACKER_ZONE printf "\n\n##########################\n# Setting up parent node 0 #\n##########################\n\n" @@ -106,7 +109,7 @@ if [ "$NUM_SHARDS" -gt 0 ]; then echo "shard #$i" # generate genesis config files in ./blockchain/shard_$i - if [ "$4" == "--setup" ]; then + if [ $OPTIONS == "--setup" ]; then node ./tools/generateShardGenesisFiles.js $SEASON 10 $i fi @@ -126,7 +129,7 @@ if [ "$NUM_SHARDS" -gt 0 ]; then gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_2_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_2_ZONE # ssh into each instance, set up the ubuntu VM instance (ONLY NEEDED FOR THE FIRST TIME) - if [ "$4" == "--setup" ]; then + if [ $OPTIONS == "--setup" ]; then printf "\n\n###########################\n# Setting up shard_$i tracker #\n###########################\n\n" gcloud compute ssh $SHARD_TRACKER_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $TRACKER_ZONE printf "\n\n##########################\n# Setting up shard_$i node 0 #\n##########################\n\n" From af8376d8338b02c8f7dd7adc64f6d51bb013d4cb Mon Sep 17 00:00:00 2001 From: Dongil Seo Date: Tue, 6 Jul 2021 20:44:33 +0900 Subject: [PATCH 015/175] Tweak execution order of start_node_incremental_gcp.sh. --- start_node_incremental_gcp.sh | 27 ++++++++++++++++++++------- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/start_node_incremental_gcp.sh b/start_node_incremental_gcp.sh index 751c4885c..aa26051b5 100644 --- a/start_node_incremental_gcp.sh +++ b/start_node_incremental_gcp.sh @@ -7,6 +7,8 @@ if [ "$#" -lt 2 ]; then fi # 1. Configure env vars (GENESIS_CONFIGS_DIR, TRACKER_WS_ADDR, ACCOUNT_INDEX, ...) +printf "\n\n#### [Step 1] Configure env vars.. ####\n" + export GENESIS_CONFIGS_DIR=genesis-configs/testnet if [ "$1" = 'spring' ]; then export TRACKER_WS_ADDR=ws://35.221.137.80:5000 @@ -103,29 +105,40 @@ NEW_DIR_PATH="../ain-blockchain-$date" echo "NEW_DIR_PATH=$NEW_DIR_PATH" # 2. Get currently used directory +printf "\n\n#### [Step 2] Get currently used directory.. ####\n" + OLD_DIR_PATH=$(find ../ain-blockchain* -maxdepth 0 -type d) echo "OLD_DIR_PATH=$OLD_DIR_PATH" -# 3. Kill old node & remove old directory (but keep the chain data) -sudo killall node -sudo rm -rf ../ain-blockchain* +# 3. Create a new directory +printf "\n\n#### [Step 3] Create a new directory.. ####\n" -# 4. Create a new directory sudo mkdir $NEW_DIR_PATH sudo chmod 777 $NEW_DIR_PATH mv * $NEW_DIR_PATH sudo mkdir -p $BLOCKCHAIN_DATA_DIR sudo chmod 777 $BLOCKCHAIN_DATA_DIR -# 5. Install dependencies +# 4. Install dependencies +printf "\n\n#### [Step 4] Install dependencies.. ####\n" + cd $NEW_DIR_PATH npm install +# 5. Kill old node process & remove old directory (but keep the chain data) +printf "\n\n#### [Step 5] Kill old node process & remove old directory.. ####\n" + +sudo killall node +sudo rm -rf $OLD_DIR_PATH + # 6. Start a new node process -printf "Starting up Blockchain Node server.." +printf "\n\n#### [Step 6] Start a new node process.. ####\n" + nohup node --async-stack-traces client/index.js >/dev/null 2>error_logs.txt & -# 7. Wait until the new node catches up +# 7. Wait until the new node process catches up +printf "\n\n#### [Step 7] Wait until the new node process catches up.. ####\n" + SECONDS=0 loopCount=0 From 9752084b8b59c4dd2d41638ad8ca37be9d333bf6 Mon Sep 17 00:00:00 2001 From: Dongil Seo Date: Tue, 6 Jul 2021 23:55:37 +0900 Subject: [PATCH 016/175] Add canary run mode in deploy script. Use double bracket if comparison. Tweak logging messages. --- deploy_blockchain_gcp.sh | 16 +-- deploy_blockchain_incremental_gcp.sh | 185 ++++++++++++++++----------- deploy_monitoring_gcp.sh | 6 +- reset_blockchain_gcp.sh | 10 +- setup_monitoring_gcp.sh | 4 +- start_node_gcp.sh | 56 ++++---- start_node_incremental_gcp.sh | 82 ++++++------ 7 files changed, 200 insertions(+), 159 deletions(-) diff --git a/deploy_blockchain_gcp.sh b/deploy_blockchain_gcp.sh index 964c7680b..f45944da3 100644 --- a/deploy_blockchain_gcp.sh +++ b/deploy_blockchain_gcp.sh @@ -1,14 +1,14 @@ #!/bin/sh -if [ "$#" -lt 3 ]; then +if [[ "$#" -lt 3 ]]; then echo "Usage: sh deploy_blockchain_gcp.sh [dev|staging|spring|summer] <# of Shards> [--setup]" echo "Example: sh deploy_blockchain_gcp.sh dev lia 0 --setup" exit fi -if [ "$1" = 'spring' ] || [ "$1" = 'summer' ] || [ "$1" = 'dev' ] || [ "$1" = 'staging' ]; then +if [[ "$1" == 'spring' ]] || [[ "$1" == 'summer' ]] || [[ "$1" == 'dev' ]] || [[ "$1" == 'staging' ]]; then SEASON="$1" - if [ "$1" = 'spring' ] || [ "$1" = 'summer' ]; then + if [[ "$1" == 'spring' ]] || [[ "$1" == 'summer' ]]; then PROJECT_ID="testnet-prod-ground" else PROJECT_ID="testnet-$1-ground" @@ -64,7 +64,7 @@ gcloud compute ssh $NODE_2_TARGET_ADDR --command "sudo killall node" --project $ gcloud compute ssh $NODE_3_TARGET_ADDR --command "sudo killall node" --project $PROJECT_ID --zone $NODE_3_ZONE gcloud compute ssh $NODE_4_TARGET_ADDR --command "sudo killall node" --project $PROJECT_ID --zone $NODE_4_ZONE -if [ "$NUM_SHARDS" -gt 0 ]; then +if [[ "$NUM_SHARDS" -gt 0 ]]; then for i in $(seq $NUM_SHARDS) do echo "shard #$i" @@ -97,7 +97,7 @@ printf "\nDeploying files to ${NODE_4_TARGET_ADDR}..." gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_4_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_4_ZONE # ssh into each instance, set up the ubuntu VM instance (ONLY NEEDED FOR THE FIRST TIME) -if [ $OPTIONS == "--setup" ]; then +if [[ $OPTIONS == "--setup" ]]; then printf "\n\n##########################\n# Setting up parent tracker #\n###########################\n\n" gcloud compute ssh $TRACKER_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $TRACKER_ZONE printf "\n\n##########################\n# Setting up parent node 0 #\n##########################\n\n" @@ -126,14 +126,14 @@ gcloud compute ssh $NODE_3_TARGET_ADDR --command ". setup_node_gcp.sh && . start printf "\n\n#########################\n# Running parent node 4 #\n#########################\n\n" gcloud compute ssh $NODE_4_TARGET_ADDR --command ". setup_node_gcp.sh && . start_node_gcp.sh $SEASON 0 4" --project $PROJECT_ID --zone $NODE_4_ZONE -if [ "$NUM_SHARDS" -gt 0 ]; then +if [[ "$NUM_SHARDS" -gt 0 ]]; then printf "\nDeploying shard blockchains..." for i in $(seq $NUM_SHARDS) do echo "shard #$i" # generate genesis config files in ./blockchain/shard_$i - if [ $OPTIONS == "--setup" ]; then + if [[ $OPTIONS == "--setup" ]]; then node ./tools/generateShardGenesisFiles.js $SEASON 10 $i fi @@ -153,7 +153,7 @@ if [ "$NUM_SHARDS" -gt 0 ]; then gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_2_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_2_ZONE # ssh into each instance, set up the ubuntu VM instance (ONLY NEEDED FOR THE FIRST TIME) - if [ $OPTIONS == "--setup" ]; then + if [[ $OPTIONS == "--setup" ]]; then printf "\n\n###########################\n# Setting up shard_$i tracker #\n###########################\n\n" gcloud compute ssh $SHARD_TRACKER_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $TRACKER_ZONE printf "\n\n##########################\n# Setting up shard_$i node 0 #\n##########################\n\n" diff --git a/deploy_blockchain_incremental_gcp.sh b/deploy_blockchain_incremental_gcp.sh index 6e1b81a5f..3a33cfb23 100644 --- a/deploy_blockchain_incremental_gcp.sh +++ b/deploy_blockchain_incremental_gcp.sh @@ -1,14 +1,14 @@ #!/bin/sh -if [ "$#" -lt 3 ]; then - echo "Usage: sh deploy_blockchain_incremental_gcp.sh [dev|staging|spring|summer] <# of Shards> [--setup]" - echo "Example: sh deploy_blockchain_incremental_gcp.sh dev lia 0 --setup" +if [[ "$#" -lt 4 ]]; then + echo "Usage: sh deploy_blockchain_incremental_gcp.sh [dev|staging|spring|summer] <# of Shards> [canary|full] [--setup]" + echo "Example: sh deploy_blockchain_incremental_gcp.sh dev lia 0 canary --setup" exit fi -if [ "$1" = 'spring' ] || [ "$1" = 'summer' ] || [ "$1" = 'dev' ] || [ "$1" = 'staging' ]; then +if [[ "$1" == 'spring' ]] || [[ "$1" == 'summer' ]] || [[ "$1" == 'dev' ]] || [[ "$1" == 'staging' ]]; then SEASON="$1" - if [ "$1" = 'spring' ] || [ "$1" = 'summer' ]; then + if [[ "$1" == 'spring' ]] || [[ "$1" == 'summer' ]]; then PROJECT_ID="testnet-prod-ground" else PROJECT_ID="testnet-$1-ground" @@ -26,7 +26,15 @@ echo "GCP_USER=$GCP_USER" NUM_SHARDS=$3 echo "NUM_SHARDS=$NUM_SHARDS" -OPTIONS="$4" +if [[ "$4" == 'canary' ]] || [[ "$4" == 'full' ]]; then + RUN_MODE="$4" +else + echo "Invalid run mode argument: $4" + exit +fi +echo "RUN_MODE=$RUN_MODE" + +OPTIONS="$5" echo "OPTIONS=$OPTIONS" # Get confirmation. @@ -34,9 +42,8 @@ echo read -p "Do you want to proceed? >> (y/N) " -n 1 -r echo echo -if [[ ! $REPLY =~ ^[Yy]$ ]] -then - [[ "$0" = "$BASH_SOURCE" ]] && exit 1 || return 1 # handle exits from shell or function but don't exit interactive shell +if [[ ! $REPLY =~ ^[Yy]$ ]]; then + [[ "$0" == "$BASH_SOURCE" ]] && exit 1 || return 1 # handle exits from shell or function but don't exit interactive shell fi FILES_FOR_TRACKER="blockchain/ client/ common/ consensus/ db/ genesis-configs/ logger/ tracker-server/ package.json setup_tracker_gcp.sh setup_blockchain_ubuntu.sh start_tracker_gcp.sh" @@ -57,59 +64,74 @@ NODE_3_ZONE="us-central1-a" NODE_4_ZONE="europe-west4-a" # 1. Copy files to gcp -printf "\nDeploying parent blockchain..." -printf "\nCopying files to ${TRACKER_TARGET_ADDR}..." -gcloud compute scp --recurse $FILES_FOR_TRACKER ${TRACKER_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $TRACKER_ZONE -printf "\nCopying files to ${NODE_0_TARGET_ADDR}..." -gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_0_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_0_ZONE -printf "\nCopying files to ${NODE_1_TARGET_ADDR}..." -gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_1_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_1_ZONE -printf "\nCopying files to ${NODE_2_TARGET_ADDR}..." -gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_2_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_2_ZONE -printf "\nCopying files to ${NODE_3_TARGET_ADDR}..." -gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_3_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_3_ZONE -printf "\nCopying files to ${NODE_4_TARGET_ADDR}..." -gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_4_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_4_ZONE +printf "\nDeploying parent blockchain...\n" +if [[ $RUN_MODE == "canary" ]]; then + printf "\nCopying files to parent node 0 (${NODE_0_TARGET_ADDR})...\n\n" + gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_0_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_0_ZONE +else + printf "\nCopying files to parent tracker (${TRACKER_TARGET_ADDR})...\n\n" + gcloud compute scp --recurse $FILES_FOR_TRACKER ${TRACKER_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $TRACKER_ZONE + printf "\nCopying files to parent node 0 (${NODE_0_TARGET_ADDR})...\n\n" + gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_0_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_0_ZONE + printf "\nCopying files to parent node 1 (${NODE_1_TARGET_ADDR})...\n\n" + gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_1_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_1_ZONE + printf "\nCopying files to parent node 2 (${NODE_2_TARGET_ADDR})...\n\n" + gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_2_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_2_ZONE + printf "\nCopying files to parent node 3 (${NODE_3_TARGET_ADDR})...\n\n" + gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_3_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_3_ZONE + printf "\nCopying files to parent node 4 (${NODE_4_TARGET_ADDR})...\n\n" + gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_4_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_4_ZONE +fi # ssh into each instance, set up the ubuntu VM instance (ONLY NEEDED FOR THE FIRST TIME) -if [ $OPTIONS == "--setup" ]; then - printf "\n\n##########################\n# Setting up parent tracker #\n###########################\n\n" - gcloud compute ssh $TRACKER_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $TRACKER_ZONE - printf "\n\n##########################\n# Setting up parent node 0 #\n##########################\n\n" - gcloud compute ssh $NODE_0_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_0_ZONE - printf "\n\n##########################\n# Setting up parent node 1 #\n##########################\n\n" - gcloud compute ssh $NODE_1_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_1_ZONE - printf "\n\n##########################\n# Setting up parent node 2 #\n##########################\n\n" - gcloud compute ssh $NODE_2_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_2_ZONE - printf "\n\n##########################\n# Setting up parent node 3 #\n##########################\n\n" - gcloud compute ssh $NODE_3_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_3_ZONE - printf "\n\n##########################\n# Setting up parent node 4 #\n##########################\n\n" - gcloud compute ssh $NODE_4_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_4_ZONE +if [[ $OPTIONS == "--setup" ]]; then + if [[ $RUN_MODE == "canary" ]]; then + printf "\n\n##########################\n# Setting up parent node 0 #\n##########################\n\n" + gcloud compute ssh $NODE_0_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_0_ZONE + else + printf "\n\n##########################\n# Setting up parent tracker #\n###########################\n\n" + gcloud compute ssh $TRACKER_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $TRACKER_ZONE + printf "\n\n##########################\n# Setting up parent node 0 #\n##########################\n\n" + gcloud compute ssh $NODE_0_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_0_ZONE + printf "\n\n##########################\n# Setting up parent node 1 #\n##########################\n\n" + gcloud compute ssh $NODE_1_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_1_ZONE + printf "\n\n##########################\n# Setting up parent node 2 #\n##########################\n\n" + gcloud compute ssh $NODE_2_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_2_ZONE + printf "\n\n##########################\n# Setting up parent node 3 #\n##########################\n\n" + gcloud compute ssh $NODE_3_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_3_ZONE + printf "\n\n##########################\n# Setting up parent node 4 #\n##########################\n\n" + gcloud compute ssh $NODE_4_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_4_ZONE + fi fi # 2. Set up parent chain -printf "\n\n############################\n# Running parent tracker #\n############################\n\n" -gcloud compute ssh $TRACKER_TARGET_ADDR --command ". setup_tracker_gcp.sh && . start_tracker_gcp.sh" --project $PROJECT_ID --zone $TRACKER_ZONE -printf "\n\n###########################\n# Running parent node 0 #\n###########################\n\n" -gcloud compute ssh $NODE_0_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 0" --project $PROJECT_ID --zone $NODE_0_ZONE -printf "\n\n#########################\n# Running parent node 1 #\n#########################\n\n" -gcloud compute ssh $NODE_1_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 1" --project $PROJECT_ID --zone $NODE_1_ZONE -printf "\n\n#########################\n# Running parent node 2 #\n#########################\n\n" -gcloud compute ssh $NODE_2_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 2" --project $PROJECT_ID --zone $NODE_2_ZONE -printf "\n\n#########################\n# Running parent node 3 #\n#########################\n\n" -gcloud compute ssh $NODE_3_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 3" --project $PROJECT_ID --zone $NODE_3_ZONE -printf "\n\n#########################\n# Running parent node 4 #\n#########################\n\n" -gcloud compute ssh $NODE_4_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 4" --project $PROJECT_ID --zone $NODE_4_ZONE +if [[ $RUN_MODE == "canary" ]]; then + printf "\n\n###########################\n# Running parent node 0 #\n###########################\n\n" + gcloud compute ssh $NODE_0_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 0" --project $PROJECT_ID --zone $NODE_0_ZONE +else + printf "\n\n############################\n# Running parent tracker #\n############################\n\n" + gcloud compute ssh $TRACKER_TARGET_ADDR --command ". setup_tracker_gcp.sh && . start_tracker_gcp.sh" --project $PROJECT_ID --zone $TRACKER_ZONE + printf "\n\n###########################\n# Running parent node 0 #\n###########################\n\n" + gcloud compute ssh $NODE_0_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 0" --project $PROJECT_ID --zone $NODE_0_ZONE + printf "\n\n#########################\n# Running parent node 1 #\n#########################\n\n" + gcloud compute ssh $NODE_1_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 1" --project $PROJECT_ID --zone $NODE_1_ZONE + printf "\n\n#########################\n# Running parent node 2 #\n#########################\n\n" + gcloud compute ssh $NODE_2_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 2" --project $PROJECT_ID --zone $NODE_2_ZONE + printf "\n\n#########################\n# Running parent node 3 #\n#########################\n\n" + gcloud compute ssh $NODE_3_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 3" --project $PROJECT_ID --zone $NODE_3_ZONE + printf "\n\n#########################\n# Running parent node 4 #\n#########################\n\n" + gcloud compute ssh $NODE_4_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 4" --project $PROJECT_ID --zone $NODE_4_ZONE +fi # 3. Shards -if [ "$NUM_SHARDS" -gt 0 ]; then +if [[ "$NUM_SHARDS" -gt 0 ]]; then printf "\nDeploying shard blockchains..." for i in $(seq $NUM_SHARDS) do echo "shard #$i" # generate genesis config files in ./blockchain/shard_$i - if [ $OPTIONS == "--setup" ]; then + if [[ $OPTIONS == "--setup" ]]; then node ./tools/generateShardGenesisFiles.js $SEASON 10 $i fi @@ -119,35 +141,50 @@ if [ "$NUM_SHARDS" -gt 0 ]; then SHARD_NODE_2_TARGET_ADDR="${GCP_USER}@${SEASON}-shard-${i}-node-2-singapore" # deploy files to GCP instances - printf "\nDeploying files to ${SHARD_TRACKER_TARGET_ADDR}..." - gcloud compute scp --recurse $FILES_FOR_TRACKER ${SHARD_TRACKER_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $TRACKER_ZONE - printf "\nDeploying files to ${SHARD_NODE_0_TARGET_ADDR}..." - gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_0_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_0_ZONE - printf "\nDeploying files to ${SHARD_NODE_1_TARGET_ADDR}..." - gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_1_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_1_ZONE - printf "\nDeploying files to ${SHARD_NODE_2_TARGET_ADDR}..." - gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_2_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_2_ZONE + if [[ $RUN_MODE == "canary" ]]; then + printf "\nDeploying files to shard_$i node 0 ${SHARD_NODE_0_TARGET_ADDR}..." + gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_0_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_0_ZONE + else + printf "\nDeploying files to shard_$i tracker ${SHARD_TRACKER_TARGET_ADDR}..." + gcloud compute scp --recurse $FILES_FOR_TRACKER ${SHARD_TRACKER_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $TRACKER_ZONE + printf "\nDeploying files to shard_$i node 0 ${SHARD_NODE_0_TARGET_ADDR}..." + gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_0_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_0_ZONE + printf "\nDeploying files to shard_$i node 1 ${SHARD_NODE_1_TARGET_ADDR}..." + gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_1_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_1_ZONE + printf "\nDeploying files to shard_$i node 2 ${SHARD_NODE_2_TARGET_ADDR}..." + gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_2_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_2_ZONE + fi # ssh into each instance, set up the ubuntu VM instance (ONLY NEEDED FOR THE FIRST TIME) - if [ $OPTIONS == "--setup" ]; then - printf "\n\n###########################\n# Setting up shard_$i tracker #\n###########################\n\n" - gcloud compute ssh $SHARD_TRACKER_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $TRACKER_ZONE - printf "\n\n##########################\n# Setting up shard_$i node 0 #\n##########################\n\n" - gcloud compute ssh $SHARD_NODE_0_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_0_ZONE - printf "\n\n##########################\n# Setting up shard_$i node 1 #\n##########################\n\n" - gcloud compute ssh $SHARD_NODE_1_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_1_ZONE - printf "\n\n##########################\n# Setting up shard_$i node 2 #\n##########################\n\n" - gcloud compute ssh $SHARD_NODE_2_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_2_ZONE + if [[ $OPTIONS == "--setup" ]]; then + if [[ $RUN_MODE == "canary" ]]; then + printf "\n\n##########################\n# Setting up shard_$i node 0 #\n##########################\n\n" + gcloud compute ssh $SHARD_NODE_0_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_0_ZONE + else + printf "\n\n###########################\n# Setting up shard_$i tracker #\n###########################\n\n" + gcloud compute ssh $SHARD_TRACKER_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $TRACKER_ZONE + printf "\n\n##########################\n# Setting up shard_$i node 0 #\n##########################\n\n" + gcloud compute ssh $SHARD_NODE_0_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_0_ZONE + printf "\n\n##########################\n# Setting up shard_$i node 1 #\n##########################\n\n" + gcloud compute ssh $SHARD_NODE_1_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_1_ZONE + printf "\n\n##########################\n# Setting up shard_$i node 2 #\n##########################\n\n" + gcloud compute ssh $SHARD_NODE_2_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_2_ZONE + fi fi # ssh into each instance, install packages and start up the server - printf "\n\n###########################\n# Running shard_$i tracker #\n###########################\n\n" - gcloud compute ssh $SHARD_TRACKER_TARGET_ADDR --command ". setup_tracker_gcp.sh && . start_tracker_gcp.sh" --project $PROJECT_ID --zone $TRACKER_ZONE - printf "\n\n##########################\n# Running shard_$i node 0 #\n##########################\n\n" - gcloud compute ssh $SHARD_NODE_0_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON $i 0" --project $PROJECT_ID --zone $NODE_0_ZONE - printf "\n\n##########################\n# Running shard_$i node 1 #\n##########################\n\n" - gcloud compute ssh $SHARD_NODE_1_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON $i 1" --project $PROJECT_ID --zone $NODE_1_ZONE - printf "\n\n##########################\n# Running shard_$i node 2 #\n##########################\n\n" - gcloud compute ssh $SHARD_NODE_2_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON $i 2" --project $PROJECT_ID --zone $NODE_2_ZONE + if [[ $RUN_MODE == "canary" ]]; then + printf "\n\n##########################\n# Running shard_$i node 0 #\n##########################\n\n" + gcloud compute ssh $SHARD_NODE_0_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON $i 0" --project $PROJECT_ID --zone $NODE_0_ZONE + else + printf "\n\n###########################\n# Running shard_$i tracker #\n###########################\n\n" + gcloud compute ssh $SHARD_TRACKER_TARGET_ADDR --command ". setup_tracker_gcp.sh && . start_tracker_gcp.sh" --project $PROJECT_ID --zone $TRACKER_ZONE + printf "\n\n##########################\n# Running shard_$i node 0 #\n##########################\n\n" + gcloud compute ssh $SHARD_NODE_0_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON $i 0" --project $PROJECT_ID --zone $NODE_0_ZONE + printf "\n\n##########################\n# Running shard_$i node 1 #\n##########################\n\n" + gcloud compute ssh $SHARD_NODE_1_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON $i 1" --project $PROJECT_ID --zone $NODE_1_ZONE + printf "\n\n##########################\n# Running shard_$i node 2 #\n##########################\n\n" + gcloud compute ssh $SHARD_NODE_2_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON $i 2" --project $PROJECT_ID --zone $NODE_2_ZONE + fi done fi diff --git a/deploy_monitoring_gcp.sh b/deploy_monitoring_gcp.sh index 8f68f001f..0a12d2fc1 100644 --- a/deploy_monitoring_gcp.sh +++ b/deploy_monitoring_gcp.sh @@ -1,14 +1,14 @@ #!/bin/sh -if [ "$#" -lt 2 ]; then +if [[ "$#" -lt 2 ]]; then echo "Usage: sh deploy_monitoring_gcp.sh [dev|staging|spring|summer] " echo "Example: sh deploy_monitoring_gcp.sh dev seo" exit fi -if [ "$1" = 'spring' ] || [ "$1" = 'summer' ] || [ "$1" = 'dev' ] || [ "$1" = 'staging' ]; then +if [[ "$1" == 'spring' ]] || [[ "$1" == 'summer' ]] || [[ "$1" == 'dev' ]] || [[ "$1" == 'staging' ]]; then SEASON="$1" - if [ "$1" = 'spring' ] || [ "$1" = 'summer' ]; then + if [[ "$1" == 'spring' ]] || [[ "$1" == 'summer' ]]; then PROJECT_ID="testnet-prod-ground" else PROJECT_ID="testnet-$1-ground" diff --git a/reset_blockchain_gcp.sh b/reset_blockchain_gcp.sh index 39580aa81..d021ce95b 100644 --- a/reset_blockchain_gcp.sh +++ b/reset_blockchain_gcp.sh @@ -1,13 +1,13 @@ #!/bin/sh -if [ "$#" -lt 3 ]; then +if [[ "$#" -lt 3 ]]; then echo "Usage: sh reset_blockchain_gcp.sh dev lia 0" exit fi -if [ "$1" = 'spring' ] || [ "$1" = 'summer' ] || [ "$1" = 'dev' ] || [ "$1" = 'staging' ]; then +if [[ "$1" == 'spring' ]] || [[ "$1" == 'summer' ]] || [[ "$1" == 'dev' ]] || [[ "$1" == 'staging' ]]; then SEASON="$1" - if [ "$1" = 'spring' ] || [ "$1" = 'summer' ]; then + if [[ "$1" == 'spring' ]] || [[ "$1" == 'summer' ]]; then PROJECT_ID="testnet-prod-ground" else PROJECT_ID="testnet-$1-ground" @@ -45,7 +45,7 @@ gcloud compute ssh $NODE_3_TARGET_ADDR --command "killall node" --project $PROJE gcloud compute ssh $NODE_4_TARGET_ADDR --command "killall node" --project $PROJECT_ID --zone $NODE_4_ZONE printf "\nStopping shard blockchains..." -if [ "$3" -gt 0 ]; then +if [[ "$3" -gt 0 ]]; then for i in $(seq $3) do echo "shard #$i" @@ -79,7 +79,7 @@ gcloud compute ssh $NODE_4_TARGET_ADDR --command "cd ../ain-blockchain && sudo r sleep 10 -if [ "$3" -gt 0 ]; then +if [[ "$3" -gt 0 ]]; then for i in $(seq $3) do echo "shard #$i" diff --git a/setup_monitoring_gcp.sh b/setup_monitoring_gcp.sh index 43cea5ab1..27cec41cf 100644 --- a/setup_monitoring_gcp.sh +++ b/setup_monitoring_gcp.sh @@ -1,12 +1,12 @@ #!/bin/sh -if [ "$#" -lt 1 ]; then +if [[ "$#" -lt 1 ]]; then echo "Usage: sh setup_monitoring_gcp.sh [dev|staging|spring|summer]" echo "Example: sh setup_monitoring_gcp.sh dev" exit fi -if [ "$1" != 'spring' ] && [ "$1" != 'summer' ] && [ "$1" != 'dev' ] && [ "$1" != 'staging' ]; then +if [[ "$1" != 'spring' ]] && [[ "$1" != 'summer' ]] && [[ "$1" != 'dev' ]] && [[ "$1" != 'staging' ]]; then echo "Invalid season argument: $1" exit fi diff --git a/start_node_gcp.sh b/start_node_gcp.sh index 56755e57d..5a436e2a2 100644 --- a/start_node_gcp.sh +++ b/start_node_gcp.sh @@ -1,64 +1,64 @@ #!/bin/sh -if [ "$#" -lt 2 ]; then +if [[ "$#" -lt 2 ]]; then echo "Usage: sh start_node_gcp.sh [dev|staging|spring|summer] " echo "Example: sh start_node_gcp.sh spring 0 0" exit fi export GENESIS_CONFIGS_DIR=genesis-configs/testnet -if [ "$1" = 'spring' ]; then +if [[ "$1" == 'spring' ]]; then export TRACKER_WS_ADDR=ws://35.221.137.80:5000 -elif [ "$1" = 'summer' ]; then +elif [[ "$1" == 'summer' ]]; then export TRACKER_WS_ADDR=ws://35.194.172.106:5000 -elif [ "$1" = 'staging' ]; then +elif [[ "$1" == 'staging' ]]; then export TRACKER_WS_ADDR=ws://35.221.150.73:5000 -elif [ "$1" = 'dev' ]; then - if [ "$2" -gt 0 ]; then +elif [[ "$1" == 'dev' ]]; then + if [[ "$2" -gt 0 ]]; then export GENESIS_CONFIGS_DIR=genesis-configs/sim-shard fi - if [ "$2" = 0 ]; then + if [[ "$2" = 0 ]]; then export TRACKER_WS_ADDR=ws://34.80.184.73:5000 # dev-tracker-ip - elif [ "$2" = 1 ]; then + elif [[ "$2" = 1 ]]; then export TRACKER_WS_ADDR=ws://35.187.153.22:5000 # dev-shard-1-tracker-ip - elif [ "$2" = 2 ]; then + elif [[ "$2" = 2 ]]; then export TRACKER_WS_ADDR=ws://34.80.203.104:5000 # dev-shard-2-tracker-ip - elif [ "$2" = 3 ]; then + elif [[ "$2" = 3 ]]; then export TRACKER_WS_ADDR=ws://35.189.174.17:5000 # dev-shard-3-tracker-ip - elif [ "$2" = 4 ]; then + elif [[ "$2" = 4 ]]; then export TRACKER_WS_ADDR=ws://35.221.164.158:5000 # dev-shard-4-tracker-ip - elif [ "$2" = 5 ]; then + elif [[ "$2" = 5 ]]; then export TRACKER_WS_ADDR=ws://35.234.46.65:5000 # dev-shard-5-tracker-ip - elif [ "$2" = 6 ]; then + elif [[ "$2" = 6 ]]; then export TRACKER_WS_ADDR=ws://35.221.210.171:5000 # dev-shard-6-tracker-ip - elif [ "$2" = 7 ]; then + elif [[ "$2" = 7 ]]; then export TRACKER_WS_ADDR=ws://34.80.222.121:5000 # dev-shard-7-tracker-ip - elif [ "$2" = 8 ]; then + elif [[ "$2" = 8 ]]; then export TRACKER_WS_ADDR=ws://35.221.200.95:5000 # dev-shard-8-tracker-ip - elif [ "$2" = 9 ]; then + elif [[ "$2" = 9 ]]; then export TRACKER_WS_ADDR=ws://34.80.216.199:5000 # dev-shard-9-tracker-ip - elif [ "$2" = 10 ]; then + elif [[ "$2" = 10 ]]; then export TRACKER_WS_ADDR=ws://34.80.161.85:5000 # dev-shard-10-tracker-ip - elif [ "$2" = 11 ]; then + elif [[ "$2" = 11 ]]; then export TRACKER_WS_ADDR=ws://35.194.239.169:5000 # dev-shard-11-tracker-ip - elif [ "$2" = 12 ]; then + elif [[ "$2" = 12 ]]; then export TRACKER_WS_ADDR=ws://35.185.156.22:5000 # dev-shard-12-tracker-ip - elif [ "$2" = 13 ]; then + elif [[ "$2" = 13 ]]; then export TRACKER_WS_ADDR=ws://35.229.247.143:5000 # dev-shard-13-tracker-ip - elif [ "$2" = 14 ]; then + elif [[ "$2" = 14 ]]; then export TRACKER_WS_ADDR=ws://35.229.226.47:5000 # dev-shard-14-tracker-ip - elif [ "$2" = 15 ]; then + elif [[ "$2" = 15 ]]; then export TRACKER_WS_ADDR=ws://35.234.61.23:5000 # dev-shard-15-tracker-ip - elif [ "$2" = 16 ]; then + elif [[ "$2" = 16 ]]; then export TRACKER_WS_ADDR=ws://34.80.66.41:5000 # dev-shard-16-tracker-ip - elif [ "$2" = 17 ]; then + elif [[ "$2" = 17 ]]; then export TRACKER_WS_ADDR=ws://35.229.143.18:5000 # dev-shard-17-tracker-ip - elif [ "$2" = 18 ]; then + elif [[ "$2" = 18 ]]; then export TRACKER_WS_ADDR=ws://35.234.58.137:5000 # dev-shard-18-tracker-ip - elif [ "$2" = 19 ]; then + elif [[ "$2" = 19 ]]; then export TRACKER_WS_ADDR=ws://34.80.249.104:5000 # dev-shard-19-tracker-ip - elif [ "$2" = 20 ]; then + elif [[ "$2" = 20 ]]; then export TRACKER_WS_ADDR=ws://35.201.248.92:5000 # dev-shard-20-tracker-ip else echo "Invalid shard ID argument: $2" @@ -69,7 +69,7 @@ else exit fi -if [ "$3" -lt 0 ] || [ "$3" -gt 4 ]; then +if [[ "$3" -lt 0 ]] || [[ "$3" -gt 4 ]]; then echo "Invalid account_index argument: $2" exit fi diff --git a/start_node_incremental_gcp.sh b/start_node_incremental_gcp.sh index aa26051b5..25c58beaf 100644 --- a/start_node_incremental_gcp.sh +++ b/start_node_incremental_gcp.sh @@ -1,69 +1,69 @@ #!/bin/sh -if [ "$#" -lt 2 ]; then +if [[ "$#" -lt 2 ]]; then echo "Usage: sh start_node_incremental_gcp.sh [dev|staging|spring|summer] " echo "Example: sh start_node_incremental_gcp.sh spring 0 0" exit fi # 1. Configure env vars (GENESIS_CONFIGS_DIR, TRACKER_WS_ADDR, ACCOUNT_INDEX, ...) -printf "\n\n#### [Step 1] Configure env vars.. ####\n" +printf "\n\n#### [Step 1] Configure env vars ####\n\n" export GENESIS_CONFIGS_DIR=genesis-configs/testnet -if [ "$1" = 'spring' ]; then +if [[ "$1" == 'spring' ]]; then export TRACKER_WS_ADDR=ws://35.221.137.80:5000 -elif [ "$1" = 'summer' ]; then +elif [[ "$1" == 'summer' ]]; then export TRACKER_WS_ADDR=ws://35.194.172.106:5000 -elif [ "$1" = 'staging' ]; then +elif [[ "$1" == 'staging' ]]; then export TRACKER_WS_ADDR=ws://35.221.150.73:5000 -elif [ "$1" = 'dev' ]; then - if [ "$2" = 0 ]; then +elif [[ "$1" == 'dev' ]]; then + if [[ "$2" = 0 ]]; then export TRACKER_WS_ADDR=ws://34.80.184.73:5000 # dev-tracker-ip - elif [ "$2" = 1 ]; then + elif [[ "$2" = 1 ]]; then export TRACKER_WS_ADDR=ws://35.187.153.22:5000 # dev-shard-1-tracker-ip - elif [ "$2" = 2 ]; then + elif [[ "$2" = 2 ]]; then export TRACKER_WS_ADDR=ws://34.80.203.104:5000 # dev-shard-2-tracker-ip - elif [ "$2" = 3 ]; then + elif [[ "$2" = 3 ]]; then export TRACKER_WS_ADDR=ws://35.189.174.17:5000 # dev-shard-3-tracker-ip - elif [ "$2" = 4 ]; then + elif [[ "$2" = 4 ]]; then export TRACKER_WS_ADDR=ws://35.221.164.158:5000 # dev-shard-4-tracker-ip - elif [ "$2" = 5 ]; then + elif [[ "$2" = 5 ]]; then export TRACKER_WS_ADDR=ws://35.234.46.65:5000 # dev-shard-5-tracker-ip - elif [ "$2" = 6 ]; then + elif [[ "$2" = 6 ]]; then export TRACKER_WS_ADDR=ws://35.221.210.171:5000 # dev-shard-6-tracker-ip - elif [ "$2" = 7 ]; then + elif [[ "$2" = 7 ]]; then export TRACKER_WS_ADDR=ws://34.80.222.121:5000 # dev-shard-7-tracker-ip - elif [ "$2" = 8 ]; then + elif [[ "$2" = 8 ]]; then export TRACKER_WS_ADDR=ws://35.221.200.95:5000 # dev-shard-8-tracker-ip - elif [ "$2" = 9 ]; then + elif [[ "$2" = 9 ]]; then export TRACKER_WS_ADDR=ws://34.80.216.199:5000 # dev-shard-9-tracker-ip - elif [ "$2" = 10 ]; then + elif [[ "$2" = 10 ]]; then export TRACKER_WS_ADDR=ws://34.80.161.85:5000 # dev-shard-10-tracker-ip - elif [ "$2" = 11 ]; then + elif [[ "$2" = 11 ]]; then export TRACKER_WS_ADDR=ws://35.194.239.169:5000 # dev-shard-11-tracker-ip - elif [ "$2" = 12 ]; then + elif [[ "$2" = 12 ]]; then export TRACKER_WS_ADDR=ws://35.185.156.22:5000 # dev-shard-12-tracker-ip - elif [ "$2" = 13 ]; then + elif [[ "$2" = 13 ]]; then export TRACKER_WS_ADDR=ws://35.229.247.143:5000 # dev-shard-13-tracker-ip - elif [ "$2" = 14 ]; then + elif [[ "$2" = 14 ]]; then export TRACKER_WS_ADDR=ws://35.229.226.47:5000 # dev-shard-14-tracker-ip - elif [ "$2" = 15 ]; then + elif [[ "$2" = 15 ]]; then export TRACKER_WS_ADDR=ws://35.234.61.23:5000 # dev-shard-15-tracker-ip - elif [ "$2" = 16 ]; then + elif [[ "$2" = 16 ]]; then export TRACKER_WS_ADDR=ws://34.80.66.41:5000 # dev-shard-16-tracker-ip - elif [ "$2" = 17 ]; then + elif [[ "$2" = 17 ]]; then export TRACKER_WS_ADDR=ws://35.229.143.18:5000 # dev-shard-17-tracker-ip - elif [ "$2" = 18 ]; then + elif [[ "$2" = 18 ]]; then export TRACKER_WS_ADDR=ws://35.234.58.137:5000 # dev-shard-18-tracker-ip - elif [ "$2" = 19 ]; then + elif [[ "$2" = 19 ]]; then export TRACKER_WS_ADDR=ws://34.80.249.104:5000 # dev-shard-19-tracker-ip - elif [ "$2" = 20 ]; then + elif [[ "$2" = 20 ]]; then export TRACKER_WS_ADDR=ws://35.201.248.92:5000 # dev-shard-20-tracker-ip else echo "Invalid shard ID argument: $2" exit fi - if [ "$2" -gt 0 ]; then + if [[ "$2" -gt 0 ]]; then # Create a genesis_params.json export GENESIS_CONFIGS_DIR="genesis-configs/shard_$2" mkdir -p "./$GENESIS_CONFIGS_DIR" @@ -79,7 +79,7 @@ else exit fi -if [ "$3" -lt 0 ] || [ "$3" -gt 4 ]; then +if [[ "$3" -lt 0 ]] || [[ "$3" -gt 4 ]]; then echo "Invalid account_index argument: $2" exit fi @@ -105,13 +105,13 @@ NEW_DIR_PATH="../ain-blockchain-$date" echo "NEW_DIR_PATH=$NEW_DIR_PATH" # 2. Get currently used directory -printf "\n\n#### [Step 2] Get currently used directory.. ####\n" +printf "\n\n#### [Step 2] Get currently used directory ####\n\n" OLD_DIR_PATH=$(find ../ain-blockchain* -maxdepth 0 -type d) echo "OLD_DIR_PATH=$OLD_DIR_PATH" # 3. Create a new directory -printf "\n\n#### [Step 3] Create a new directory.. ####\n" +printf "\n\n#### [Step 3] Create a new directory ####\n\n" sudo mkdir $NEW_DIR_PATH sudo chmod 777 $NEW_DIR_PATH @@ -120,24 +120,23 @@ sudo mkdir -p $BLOCKCHAIN_DATA_DIR sudo chmod 777 $BLOCKCHAIN_DATA_DIR # 4. Install dependencies -printf "\n\n#### [Step 4] Install dependencies.. ####\n" +printf "\n\n#### [Step 4] Install dependencies ####\n\n" cd $NEW_DIR_PATH npm install -# 5. Kill old node process & remove old directory (but keep the chain data) -printf "\n\n#### [Step 5] Kill old node process & remove old directory.. ####\n" +# 5. Kill old node process +printf "\n\n#### [Step 5] Kill old node process ####\n\n" sudo killall node -sudo rm -rf $OLD_DIR_PATH # 6. Start a new node process -printf "\n\n#### [Step 6] Start a new node process.. ####\n" +printf "\n\n#### [Step 6] Start a new node process ####\n\n" nohup node --async-stack-traces client/index.js >/dev/null 2>error_logs.txt & # 7. Wait until the new node process catches up -printf "\n\n#### [Step 7] Wait until the new node process catches up.. ####\n" +printf "\n\n#### [Step 7] Wait until the new node process catches up ####\n\n" SECONDS=0 loopCount=0 @@ -155,8 +154,8 @@ do lastBlockNumber=$(curl -X POST -H "Content-Type: application/json" --data "$(generate_post_data 'ain_getRecentBlockNumber')" "http://localhost:8080/json-rpc" | jq -r '.result.result') printf "\nconsensusStatus = ${consensusStatus}" printf "\nlastBlockNumber = ${lastBlockNumber}" - if [ "$consensusStatus" == "RUNNING" ]; then - printf "\nBlockchain Node server is synced & running!" + if [[ "$consensusStatus" == "RUNNING" ]]; then + printf "\nBlockchain Node server is synced & running!\n" printf "Time it took to sync in seconds: $SECONDS\n\n" break fi @@ -164,3 +163,8 @@ do printf "\nLoop count: ${loopCount}\n" sleep 30 done + +# 8. Remove old directory keeping the chain data +printf "\n\n#### [Step 8] Remove old directory keeping the chain data ####\n\n" + +sudo rm -rf $OLD_DIR_PATH From 025bc62ef03b1cad498898645f6126d473742dc0 Mon Sep 17 00:00:00 2001 From: Dongil Seo Date: Wed, 7 Jul 2021 00:27:34 +0900 Subject: [PATCH 017/175] Use just single operator (=) string equality operator. --- deploy_blockchain_gcp.sh | 10 +++++----- deploy_blockchain_incremental_gcp.sh | 26 +++++++++++++------------- deploy_monitoring_gcp.sh | 4 ++-- reset_blockchain_gcp.sh | 4 ++-- start_node_incremental_gcp.sh | 10 +++++----- 5 files changed, 27 insertions(+), 27 deletions(-) diff --git a/deploy_blockchain_gcp.sh b/deploy_blockchain_gcp.sh index f45944da3..1cf63adab 100644 --- a/deploy_blockchain_gcp.sh +++ b/deploy_blockchain_gcp.sh @@ -6,9 +6,9 @@ if [[ "$#" -lt 3 ]]; then exit fi -if [[ "$1" == 'spring' ]] || [[ "$1" == 'summer' ]] || [[ "$1" == 'dev' ]] || [[ "$1" == 'staging' ]]; then +if [[ "$1" = 'spring' ]] || [[ "$1" = 'summer' ]] || [[ "$1" = 'dev' ]] || [[ "$1" = 'staging' ]]; then SEASON="$1" - if [[ "$1" == 'spring' ]] || [[ "$1" == 'summer' ]]; then + if [[ "$1" = 'spring' ]] || [[ "$1" = 'summer' ]]; then PROJECT_ID="testnet-prod-ground" else PROJECT_ID="testnet-$1-ground" @@ -97,7 +97,7 @@ printf "\nDeploying files to ${NODE_4_TARGET_ADDR}..." gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_4_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_4_ZONE # ssh into each instance, set up the ubuntu VM instance (ONLY NEEDED FOR THE FIRST TIME) -if [[ $OPTIONS == "--setup" ]]; then +if [[ $OPTIONS = "--setup" ]]; then printf "\n\n##########################\n# Setting up parent tracker #\n###########################\n\n" gcloud compute ssh $TRACKER_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $TRACKER_ZONE printf "\n\n##########################\n# Setting up parent node 0 #\n##########################\n\n" @@ -133,7 +133,7 @@ if [[ "$NUM_SHARDS" -gt 0 ]]; then echo "shard #$i" # generate genesis config files in ./blockchain/shard_$i - if [[ $OPTIONS == "--setup" ]]; then + if [[ $OPTIONS = "--setup" ]]; then node ./tools/generateShardGenesisFiles.js $SEASON 10 $i fi @@ -153,7 +153,7 @@ if [[ "$NUM_SHARDS" -gt 0 ]]; then gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_2_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_2_ZONE # ssh into each instance, set up the ubuntu VM instance (ONLY NEEDED FOR THE FIRST TIME) - if [[ $OPTIONS == "--setup" ]]; then + if [[ $OPTIONS = "--setup" ]]; then printf "\n\n###########################\n# Setting up shard_$i tracker #\n###########################\n\n" gcloud compute ssh $SHARD_TRACKER_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $TRACKER_ZONE printf "\n\n##########################\n# Setting up shard_$i node 0 #\n##########################\n\n" diff --git a/deploy_blockchain_incremental_gcp.sh b/deploy_blockchain_incremental_gcp.sh index 3a33cfb23..f297b7063 100644 --- a/deploy_blockchain_incremental_gcp.sh +++ b/deploy_blockchain_incremental_gcp.sh @@ -6,9 +6,9 @@ if [[ "$#" -lt 4 ]]; then exit fi -if [[ "$1" == 'spring' ]] || [[ "$1" == 'summer' ]] || [[ "$1" == 'dev' ]] || [[ "$1" == 'staging' ]]; then +if [[ "$1" = 'spring' ]] || [[ "$1" = 'summer' ]] || [[ "$1" = 'dev' ]] || [[ "$1" = 'staging' ]]; then SEASON="$1" - if [[ "$1" == 'spring' ]] || [[ "$1" == 'summer' ]]; then + if [[ "$1" = 'spring' ]] || [[ "$1" = 'summer' ]]; then PROJECT_ID="testnet-prod-ground" else PROJECT_ID="testnet-$1-ground" @@ -26,7 +26,7 @@ echo "GCP_USER=$GCP_USER" NUM_SHARDS=$3 echo "NUM_SHARDS=$NUM_SHARDS" -if [[ "$4" == 'canary' ]] || [[ "$4" == 'full' ]]; then +if [[ "$4" = 'canary' ]] || [[ "$4" = 'full' ]]; then RUN_MODE="$4" else echo "Invalid run mode argument: $4" @@ -43,7 +43,7 @@ read -p "Do you want to proceed? >> (y/N) " -n 1 -r echo echo if [[ ! $REPLY =~ ^[Yy]$ ]]; then - [[ "$0" == "$BASH_SOURCE" ]] && exit 1 || return 1 # handle exits from shell or function but don't exit interactive shell + [[ "$0" = "$BASH_SOURCE" ]] && exit 1 || return 1 # handle exits from shell or function but don't exit interactive shell fi FILES_FOR_TRACKER="blockchain/ client/ common/ consensus/ db/ genesis-configs/ logger/ tracker-server/ package.json setup_tracker_gcp.sh setup_blockchain_ubuntu.sh start_tracker_gcp.sh" @@ -65,7 +65,7 @@ NODE_4_ZONE="europe-west4-a" # 1. Copy files to gcp printf "\nDeploying parent blockchain...\n" -if [[ $RUN_MODE == "canary" ]]; then +if [[ $RUN_MODE = "canary" ]]; then printf "\nCopying files to parent node 0 (${NODE_0_TARGET_ADDR})...\n\n" gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_0_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_0_ZONE else @@ -84,8 +84,8 @@ else fi # ssh into each instance, set up the ubuntu VM instance (ONLY NEEDED FOR THE FIRST TIME) -if [[ $OPTIONS == "--setup" ]]; then - if [[ $RUN_MODE == "canary" ]]; then +if [[ $OPTIONS = "--setup" ]]; then + if [[ $RUN_MODE = "canary" ]]; then printf "\n\n##########################\n# Setting up parent node 0 #\n##########################\n\n" gcloud compute ssh $NODE_0_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_0_ZONE else @@ -105,7 +105,7 @@ if [[ $OPTIONS == "--setup" ]]; then fi # 2. Set up parent chain -if [[ $RUN_MODE == "canary" ]]; then +if [[ $RUN_MODE = "canary" ]]; then printf "\n\n###########################\n# Running parent node 0 #\n###########################\n\n" gcloud compute ssh $NODE_0_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 0" --project $PROJECT_ID --zone $NODE_0_ZONE else @@ -131,7 +131,7 @@ if [[ "$NUM_SHARDS" -gt 0 ]]; then echo "shard #$i" # generate genesis config files in ./blockchain/shard_$i - if [[ $OPTIONS == "--setup" ]]; then + if [[ $OPTIONS = "--setup" ]]; then node ./tools/generateShardGenesisFiles.js $SEASON 10 $i fi @@ -141,7 +141,7 @@ if [[ "$NUM_SHARDS" -gt 0 ]]; then SHARD_NODE_2_TARGET_ADDR="${GCP_USER}@${SEASON}-shard-${i}-node-2-singapore" # deploy files to GCP instances - if [[ $RUN_MODE == "canary" ]]; then + if [[ $RUN_MODE = "canary" ]]; then printf "\nDeploying files to shard_$i node 0 ${SHARD_NODE_0_TARGET_ADDR}..." gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_0_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_0_ZONE else @@ -156,8 +156,8 @@ if [[ "$NUM_SHARDS" -gt 0 ]]; then fi # ssh into each instance, set up the ubuntu VM instance (ONLY NEEDED FOR THE FIRST TIME) - if [[ $OPTIONS == "--setup" ]]; then - if [[ $RUN_MODE == "canary" ]]; then + if [[ $OPTIONS = "--setup" ]]; then + if [[ $RUN_MODE = "canary" ]]; then printf "\n\n##########################\n# Setting up shard_$i node 0 #\n##########################\n\n" gcloud compute ssh $SHARD_NODE_0_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_0_ZONE else @@ -173,7 +173,7 @@ if [[ "$NUM_SHARDS" -gt 0 ]]; then fi # ssh into each instance, install packages and start up the server - if [[ $RUN_MODE == "canary" ]]; then + if [[ $RUN_MODE = "canary" ]]; then printf "\n\n##########################\n# Running shard_$i node 0 #\n##########################\n\n" gcloud compute ssh $SHARD_NODE_0_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON $i 0" --project $PROJECT_ID --zone $NODE_0_ZONE else diff --git a/deploy_monitoring_gcp.sh b/deploy_monitoring_gcp.sh index 0a12d2fc1..8365f27c8 100644 --- a/deploy_monitoring_gcp.sh +++ b/deploy_monitoring_gcp.sh @@ -6,9 +6,9 @@ if [[ "$#" -lt 2 ]]; then exit fi -if [[ "$1" == 'spring' ]] || [[ "$1" == 'summer' ]] || [[ "$1" == 'dev' ]] || [[ "$1" == 'staging' ]]; then +if [[ "$1" = 'spring' ]] || [[ "$1" = 'summer' ]] || [[ "$1" = 'dev' ]] || [[ "$1" = 'staging' ]]; then SEASON="$1" - if [[ "$1" == 'spring' ]] || [[ "$1" == 'summer' ]]; then + if [[ "$1" = 'spring' ]] || [[ "$1" = 'summer' ]]; then PROJECT_ID="testnet-prod-ground" else PROJECT_ID="testnet-$1-ground" diff --git a/reset_blockchain_gcp.sh b/reset_blockchain_gcp.sh index d021ce95b..54e60c6bc 100644 --- a/reset_blockchain_gcp.sh +++ b/reset_blockchain_gcp.sh @@ -5,9 +5,9 @@ if [[ "$#" -lt 3 ]]; then exit fi -if [[ "$1" == 'spring' ]] || [[ "$1" == 'summer' ]] || [[ "$1" == 'dev' ]] || [[ "$1" == 'staging' ]]; then +if [[ "$1" = 'spring' ]] || [[ "$1" = 'summer' ]] || [[ "$1" = 'dev' ]] || [[ "$1" = 'staging' ]]; then SEASON="$1" - if [[ "$1" == 'spring' ]] || [[ "$1" == 'summer' ]]; then + if [[ "$1" = 'spring' ]] || [[ "$1" = 'summer' ]]; then PROJECT_ID="testnet-prod-ground" else PROJECT_ID="testnet-$1-ground" diff --git a/start_node_incremental_gcp.sh b/start_node_incremental_gcp.sh index 25c58beaf..b5657d305 100644 --- a/start_node_incremental_gcp.sh +++ b/start_node_incremental_gcp.sh @@ -10,13 +10,13 @@ fi printf "\n\n#### [Step 1] Configure env vars ####\n\n" export GENESIS_CONFIGS_DIR=genesis-configs/testnet -if [[ "$1" == 'spring' ]]; then +if [[ "$1" = 'spring' ]]; then export TRACKER_WS_ADDR=ws://35.221.137.80:5000 -elif [[ "$1" == 'summer' ]]; then +elif [[ "$1" = 'summer' ]]; then export TRACKER_WS_ADDR=ws://35.194.172.106:5000 -elif [[ "$1" == 'staging' ]]; then +elif [[ "$1" = 'staging' ]]; then export TRACKER_WS_ADDR=ws://35.221.150.73:5000 -elif [[ "$1" == 'dev' ]]; then +elif [[ "$1" = 'dev' ]]; then if [[ "$2" = 0 ]]; then export TRACKER_WS_ADDR=ws://34.80.184.73:5000 # dev-tracker-ip elif [[ "$2" = 1 ]]; then @@ -154,7 +154,7 @@ do lastBlockNumber=$(curl -X POST -H "Content-Type: application/json" --data "$(generate_post_data 'ain_getRecentBlockNumber')" "http://localhost:8080/json-rpc" | jq -r '.result.result') printf "\nconsensusStatus = ${consensusStatus}" printf "\nlastBlockNumber = ${lastBlockNumber}" - if [[ "$consensusStatus" == "RUNNING" ]]; then + if [[ "$consensusStatus" = "RUNNING" ]]; then printf "\nBlockchain Node server is synced & running!\n" printf "Time it took to sync in seconds: $SECONDS\n\n" break From 89c05def9f989aaee61d44a1c925b8c3b2d303df Mon Sep 17 00:00:00 2001 From: Dongil Seo Date: Wed, 7 Jul 2021 13:00:46 +0900 Subject: [PATCH 018/175] Increase nodejs memory limit to 4GB (--max-old-space-size=4000). Set timeout of curl jobs. --- start_node_incremental_gcp.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/start_node_incremental_gcp.sh b/start_node_incremental_gcp.sh index b5657d305..6e1c39583 100644 --- a/start_node_incremental_gcp.sh +++ b/start_node_incremental_gcp.sh @@ -133,7 +133,7 @@ sudo killall node # 6. Start a new node process printf "\n\n#### [Step 6] Start a new node process ####\n\n" -nohup node --async-stack-traces client/index.js >/dev/null 2>error_logs.txt & +nohup node --async-stack-traces --max-old-space-size=4000 client/index.js >/dev/null 2>error_logs.txt & # 7. Wait until the new node process catches up printf "\n\n#### [Step 7] Wait until the new node process catches up ####\n\n" @@ -150,8 +150,8 @@ EOF while : do - consensusStatus=$(curl -X POST -H "Content-Type: application/json" --data "$(generate_post_data 'net_consensusStatus')" "http://localhost:8080/json-rpc" | jq -r '.result.result.state') - lastBlockNumber=$(curl -X POST -H "Content-Type: application/json" --data "$(generate_post_data 'ain_getRecentBlockNumber')" "http://localhost:8080/json-rpc" | jq -r '.result.result') + consensusStatus=$(curl -m 20 -X POST -H "Content-Type: application/json" --data "$(generate_post_data 'net_consensusStatus')" "http://localhost:8080/json-rpc" | jq -r '.result.result.state') + lastBlockNumber=$(curl -m 20 -X POST -H "Content-Type: application/json" --data "$(generate_post_data 'ain_getRecentBlockNumber')" "http://localhost:8080/json-rpc" | jq -r '.result.result') printf "\nconsensusStatus = ${consensusStatus}" printf "\nlastBlockNumber = ${lastBlockNumber}" if [[ "$consensusStatus" = "RUNNING" ]]; then @@ -161,7 +161,7 @@ do fi ((loopCount++)) printf "\nLoop count: ${loopCount}\n" - sleep 30 + sleep 20 done # 8. Remove old directory keeping the chain data From 2f26d15cf52fa18c025e5d243d7e4de6fed7c023 Mon Sep 17 00:00:00 2001 From: Dongil Seo Date: Wed, 7 Jul 2021 15:38:00 +0900 Subject: [PATCH 019/175] Print out commands in deploy scripts. --- deploy_blockchain_gcp.sh | 22 +++++++------- deploy_blockchain_incremental_gcp.sh | 10 +++---- start_node_gcp.sh | 9 ++++-- start_node_incremental_gcp.sh | 44 +++++++++++++++++----------- 4 files changed, 49 insertions(+), 36 deletions(-) diff --git a/deploy_blockchain_gcp.sh b/deploy_blockchain_gcp.sh index 1cf63adab..02a55eb59 100644 --- a/deploy_blockchain_gcp.sh +++ b/deploy_blockchain_gcp.sh @@ -82,18 +82,18 @@ if [[ "$NUM_SHARDS" -gt 0 ]]; then fi # deploy files to GCP instances -printf "\nDeploying parent blockchain..." -printf "\nDeploying files to ${TRACKER_TARGET_ADDR}..." +printf "\nDeploying parent blockchain...\n\n" +printf "\nDeploying files to parent tracker (${TRACKER_TARGET_ADDR})...\n\n" gcloud compute scp --recurse $FILES_FOR_TRACKER ${TRACKER_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $TRACKER_ZONE -printf "\nDeploying files to ${NODE_0_TARGET_ADDR}..." +printf "\nDeploying files to parent node 0 (${NODE_0_TARGET_ADDR})...\n\n" gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_0_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_0_ZONE -printf "\nDeploying files to ${NODE_1_TARGET_ADDR}..." +printf "\nDeploying files to parent node 1 (${NODE_1_TARGET_ADDR})...\n\n" gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_1_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_1_ZONE -printf "\nDeploying files to ${NODE_2_TARGET_ADDR}..." +printf "\nDeploying files to parent node 2 (${NODE_2_TARGET_ADDR})...\n\n" gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_2_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_2_ZONE -printf "\nDeploying files to ${NODE_3_TARGET_ADDR}..." +printf "\nDeploying files to parent node 3 (${NODE_3_TARGET_ADDR})...\n\n" gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_3_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_3_ZONE -printf "\nDeploying files to ${NODE_4_TARGET_ADDR}..." +printf "\nDeploying files to parent node 4 (${NODE_4_TARGET_ADDR})...\n\n" gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_4_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_4_ZONE # ssh into each instance, set up the ubuntu VM instance (ONLY NEEDED FOR THE FIRST TIME) @@ -143,13 +143,13 @@ if [[ "$NUM_SHARDS" -gt 0 ]]; then SHARD_NODE_2_TARGET_ADDR="${GCP_USER}@${SEASON}-shard-${i}-node-2-singapore" # deploy files to GCP instances - printf "\nDeploying files to ${SHARD_TRACKER_TARGET_ADDR}..." + printf "\nDeploying files to shard_$i tracker (${SHARD_TRACKER_TARGET_ADDR})...\n\n" gcloud compute scp --recurse $FILES_FOR_TRACKER ${SHARD_TRACKER_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $TRACKER_ZONE - printf "\nDeploying files to ${SHARD_NODE_0_TARGET_ADDR}..." + printf "\nDeploying files to shard_$i node 0 (${SHARD_NODE_0_TARGET_ADDR})...\n\n" gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_0_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_0_ZONE - printf "\nDeploying files to ${SHARD_NODE_1_TARGET_ADDR}..." + printf "\nDeploying files to shard_$i node 1 (${SHARD_NODE_1_TARGET_ADDR})...\n\n" gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_1_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_1_ZONE - printf "\nDeploying files to ${SHARD_NODE_2_TARGET_ADDR}..." + printf "\nDeploying files to shard_$i node 2 (${SHARD_NODE_2_TARGET_ADDR})...\n\n" gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_2_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_2_ZONE # ssh into each instance, set up the ubuntu VM instance (ONLY NEEDED FOR THE FIRST TIME) diff --git a/deploy_blockchain_incremental_gcp.sh b/deploy_blockchain_incremental_gcp.sh index f297b7063..81d493dad 100644 --- a/deploy_blockchain_incremental_gcp.sh +++ b/deploy_blockchain_incremental_gcp.sh @@ -142,16 +142,16 @@ if [[ "$NUM_SHARDS" -gt 0 ]]; then # deploy files to GCP instances if [[ $RUN_MODE = "canary" ]]; then - printf "\nDeploying files to shard_$i node 0 ${SHARD_NODE_0_TARGET_ADDR}..." + printf "\nDeploying files to shard_$i node 0 (${SHARD_NODE_0_TARGET_ADDR})..." gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_0_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_0_ZONE else - printf "\nDeploying files to shard_$i tracker ${SHARD_TRACKER_TARGET_ADDR}..." + printf "\nDeploying files to shard_$i tracker (${SHARD_TRACKER_TARGET_ADDR})..." gcloud compute scp --recurse $FILES_FOR_TRACKER ${SHARD_TRACKER_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $TRACKER_ZONE - printf "\nDeploying files to shard_$i node 0 ${SHARD_NODE_0_TARGET_ADDR}..." + printf "\nDeploying files to shard_$i node 0 (${SHARD_NODE_0_TARGET_ADDR})..." gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_0_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_0_ZONE - printf "\nDeploying files to shard_$i node 1 ${SHARD_NODE_1_TARGET_ADDR}..." + printf "\nDeploying files to shard_$i node 1 (${SHARD_NODE_1_TARGET_ADDR})..." gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_1_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_1_ZONE - printf "\nDeploying files to shard_$i node 2 ${SHARD_NODE_2_TARGET_ADDR}..." + printf "\nDeploying files to shard_$i node 2 (${SHARD_NODE_2_TARGET_ADDR})..." gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_2_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_2_ZONE fi diff --git a/start_node_gcp.sh b/start_node_gcp.sh index 5a436e2a2..97565eee6 100644 --- a/start_node_gcp.sh +++ b/start_node_gcp.sh @@ -89,7 +89,10 @@ export LIGHTWEIGHT=false export STAKE=100000 export BLOCKCHAIN_DATA_DIR="/home/ain_blockchain_data" -echo 'Starting up Blockchain Node server..' -nohup node --async-stack-traces client/index.js >/dev/null 2>error_logs.txt & +printf "\nStarting up Blockchain Node server..\n\n" +START_CMD='nohup node --async-stack-traces --max-old-space-size=4000 client/index.js >/dev/null 2>error_logs.txt &' +printf "START_CMD=$START_CMD\n" +eval $START_CMD -echo "Blockchain Node server is now up!" + +printf "\nBlockchain Node server is now up!\n\n" diff --git a/start_node_incremental_gcp.sh b/start_node_incremental_gcp.sh index 6e1c39583..08e549b37 100644 --- a/start_node_incremental_gcp.sh +++ b/start_node_incremental_gcp.sh @@ -7,7 +7,7 @@ if [[ "$#" -lt 2 ]]; then fi # 1. Configure env vars (GENESIS_CONFIGS_DIR, TRACKER_WS_ADDR, ACCOUNT_INDEX, ...) -printf "\n\n#### [Step 1] Configure env vars ####\n\n" +printf "\n#### [Step 1] Configure env vars ####\n\n" export GENESIS_CONFIGS_DIR=genesis-configs/testnet if [[ "$1" = 'spring' ]]; then @@ -79,17 +79,19 @@ else exit fi +echo "TRACKER_WS_ADDR=$TRACKER_WS_ADDR" +echo "GENESIS_CONFIGS_DIR=$GENESIS_CONFIGS_DIR" + if [[ "$3" -lt 0 ]] || [[ "$3" -gt 4 ]]; then echo "Invalid account_index argument: $2" exit fi -echo "TRACKER_WS_ADDR=$TRACKER_WS_ADDR" -echo "GENESIS_CONFIGS_DIR=$GENESIS_CONFIGS_DIR" - export ACCOUNT_INDEX="$3" echo "ACCOUNT_INDEX=$ACCOUNT_INDEX" +#export SYNC_MODE="fast" + export DEBUG=false export CONSOLE_LOG=false export ENABLE_DEV_SET_CLIENT_API=false @@ -105,38 +107,46 @@ NEW_DIR_PATH="../ain-blockchain-$date" echo "NEW_DIR_PATH=$NEW_DIR_PATH" # 2. Get currently used directory -printf "\n\n#### [Step 2] Get currently used directory ####\n\n" +printf "\n#### [Step 2] Get currently used directory ####\n\n" OLD_DIR_PATH=$(find ../ain-blockchain* -maxdepth 0 -type d) echo "OLD_DIR_PATH=$OLD_DIR_PATH" # 3. Create a new directory -printf "\n\n#### [Step 3] Create a new directory ####\n\n" +printf "\n#### [Step 3] Create a new directory ####\n\n" + +MKDIR_CMD="sudo mkdir $NEW_DIR_PATH" +echo "MKDIR_CMD=$MKDIR_CMD" +eval $MKDIR_CMD -sudo mkdir $NEW_DIR_PATH sudo chmod 777 $NEW_DIR_PATH mv * $NEW_DIR_PATH sudo mkdir -p $BLOCKCHAIN_DATA_DIR sudo chmod 777 $BLOCKCHAIN_DATA_DIR # 4. Install dependencies -printf "\n\n#### [Step 4] Install dependencies ####\n\n" +printf "\n#### [Step 4] Install dependencies ####\n\n" cd $NEW_DIR_PATH npm install # 5. Kill old node process -printf "\n\n#### [Step 5] Kill old node process ####\n\n" +printf "\n#### [Step 5] Kill old node process ####\n\n" -sudo killall node +KILL_CMD='sudo killall node' +printf "KILL_CMD=$KILL_CMD\n\n" +eval $KILL_CMD # 6. Start a new node process -printf "\n\n#### [Step 6] Start a new node process ####\n\n" +sleep 20 +printf "\n#### [Step 6] Start a new node process ####\n\n" -nohup node --async-stack-traces --max-old-space-size=4000 client/index.js >/dev/null 2>error_logs.txt & +START_CMD='nohup node --async-stack-traces --max-old-space-size=4000 client/index.js >/dev/null 2>error_logs.txt &' +printf "START_CMD=$START_CMD\n" +eval $START_CMD # 7. Wait until the new node process catches up -printf "\n\n#### [Step 7] Wait until the new node process catches up ####\n\n" +printf "\n#### [Step 7] Wait until the new node process catches up ####\n\n" SECONDS=0 loopCount=0 @@ -151,12 +161,12 @@ EOF while : do consensusStatus=$(curl -m 20 -X POST -H "Content-Type: application/json" --data "$(generate_post_data 'net_consensusStatus')" "http://localhost:8080/json-rpc" | jq -r '.result.result.state') + printf "\nconsensusStatus = ${consensusStatus}\n" lastBlockNumber=$(curl -m 20 -X POST -H "Content-Type: application/json" --data "$(generate_post_data 'ain_getRecentBlockNumber')" "http://localhost:8080/json-rpc" | jq -r '.result.result') - printf "\nconsensusStatus = ${consensusStatus}" - printf "\nlastBlockNumber = ${lastBlockNumber}" + printf "\nlastBlockNumber = ${lastBlockNumber}\n" if [[ "$consensusStatus" = "RUNNING" ]]; then printf "\nBlockchain Node server is synced & running!\n" - printf "Time it took to sync in seconds: $SECONDS\n\n" + printf "\nTime it took to sync in seconds: $SECONDS\n" break fi ((loopCount++)) @@ -165,6 +175,6 @@ do done # 8. Remove old directory keeping the chain data -printf "\n\n#### [Step 8] Remove old directory keeping the chain data ####\n\n" +printf "\n#### [Step 8] Remove old directory keeping the chain data ####\n\n" sudo rm -rf $OLD_DIR_PATH From a2f3c7c41a4ec6816a9e7793a6b5114b6ee484a5 Mon Sep 17 00:00:00 2001 From: Dongil Seo Date: Wed, 7 Jul 2021 16:31:35 +0900 Subject: [PATCH 020/175] Add sync mode option to deploy script. --- deploy_blockchain_incremental_gcp.sh | 75 +++++++++++++++------------- start_node_gcp.sh | 8 +-- start_node_incremental_gcp.sh | 48 ++++++++++-------- 3 files changed, 73 insertions(+), 58 deletions(-) diff --git a/deploy_blockchain_incremental_gcp.sh b/deploy_blockchain_incremental_gcp.sh index 81d493dad..08ea185fb 100644 --- a/deploy_blockchain_incremental_gcp.sh +++ b/deploy_blockchain_incremental_gcp.sh @@ -1,8 +1,8 @@ #!/bin/sh -if [[ "$#" -lt 4 ]]; then - echo "Usage: sh deploy_blockchain_incremental_gcp.sh [dev|staging|spring|summer] <# of Shards> [canary|full] [--setup]" - echo "Example: sh deploy_blockchain_incremental_gcp.sh dev lia 0 canary --setup" +if [[ "$#" -lt 5 ]] || [[ "$#" -gt 6 ]]; then + printf "Usage: sh deploy_blockchain_incremental_gcp.sh [dev|staging|spring|summer] <# of Shards> [fast|full] [canary|full] [--setup]\n" + printf "Example: sh deploy_blockchain_incremental_gcp.sh dev lia 0 fast canary --setup\n" exit fi @@ -14,34 +14,41 @@ if [[ "$1" = 'spring' ]] || [[ "$1" = 'summer' ]] || [[ "$1" = 'dev' ]] || [[ "$ PROJECT_ID="testnet-$1-ground" fi else - echo "Invalid project/season argument: $1" + printf "Invalid argument: $1\n" exit fi -echo "SEASON=$SEASON" -echo "PROJECT_ID=$PROJECT_ID" +printf "SEASON=$SEASON\n" +printf "PROJECT_ID=$PROJECT_ID\n" GCP_USER="$2" -echo "GCP_USER=$GCP_USER" +printf "GCP_USER=$GCP_USER\n" NUM_SHARDS=$3 -echo "NUM_SHARDS=$NUM_SHARDS" +printf "NUM_SHARDS=$NUM_SHARDS\n" -if [[ "$4" = 'canary' ]] || [[ "$4" = 'full' ]]; then - RUN_MODE="$4" +if [[ "$4" = 'fast' ]] || [[ "$4" = 'full' ]]; then + SYNC_MODE="$4" else - echo "Invalid run mode argument: $4" + printf "Invalid argument: $4\n" exit fi -echo "RUN_MODE=$RUN_MODE" +printf "SYNC_MODE=$SYNC_MODE\n" -OPTIONS="$5" -echo "OPTIONS=$OPTIONS" +if [[ "$5" = 'canary' ]] || [[ "$5" = 'full' ]]; then + RUN_MODE="$5" +else + printf "Invalid argument: $5\n" + exit +fi +printf "RUN_MODE=$RUN_MODE\n" + +OPTIONS="$6" +printf "OPTIONS=$OPTIONS\n" # Get confirmation. -echo +printf "\n" read -p "Do you want to proceed? >> (y/N) " -n 1 -r -echo -echo +printf "\n\n" if [[ ! $REPLY =~ ^[Yy]$ ]]; then [[ "$0" = "$BASH_SOURCE" ]] && exit 1 || return 1 # handle exits from shell or function but don't exit interactive shell fi @@ -107,28 +114,28 @@ fi # 2. Set up parent chain if [[ $RUN_MODE = "canary" ]]; then printf "\n\n###########################\n# Running parent node 0 #\n###########################\n\n" - gcloud compute ssh $NODE_0_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 0" --project $PROJECT_ID --zone $NODE_0_ZONE + gcloud compute ssh $NODE_0_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 0 $SYNC_MODE" --project $PROJECT_ID --zone $NODE_0_ZONE else printf "\n\n############################\n# Running parent tracker #\n############################\n\n" gcloud compute ssh $TRACKER_TARGET_ADDR --command ". setup_tracker_gcp.sh && . start_tracker_gcp.sh" --project $PROJECT_ID --zone $TRACKER_ZONE printf "\n\n###########################\n# Running parent node 0 #\n###########################\n\n" - gcloud compute ssh $NODE_0_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 0" --project $PROJECT_ID --zone $NODE_0_ZONE + gcloud compute ssh $NODE_0_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 0 $SYNC_MODE" --project $PROJECT_ID --zone $NODE_0_ZONE printf "\n\n#########################\n# Running parent node 1 #\n#########################\n\n" - gcloud compute ssh $NODE_1_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 1" --project $PROJECT_ID --zone $NODE_1_ZONE + gcloud compute ssh $NODE_1_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 1 $SYNC_MODE" --project $PROJECT_ID --zone $NODE_1_ZONE printf "\n\n#########################\n# Running parent node 2 #\n#########################\n\n" - gcloud compute ssh $NODE_2_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 2" --project $PROJECT_ID --zone $NODE_2_ZONE + gcloud compute ssh $NODE_2_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 2 $SYNC_MODE" --project $PROJECT_ID --zone $NODE_2_ZONE printf "\n\n#########################\n# Running parent node 3 #\n#########################\n\n" - gcloud compute ssh $NODE_3_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 3" --project $PROJECT_ID --zone $NODE_3_ZONE + gcloud compute ssh $NODE_3_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 3 $SYNC_MODE" --project $PROJECT_ID --zone $NODE_3_ZONE printf "\n\n#########################\n# Running parent node 4 #\n#########################\n\n" - gcloud compute ssh $NODE_4_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 4" --project $PROJECT_ID --zone $NODE_4_ZONE + gcloud compute ssh $NODE_4_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 4 $SYNC_MODE" --project $PROJECT_ID --zone $NODE_4_ZONE fi # 3. Shards if [[ "$NUM_SHARDS" -gt 0 ]]; then - printf "\nDeploying shard blockchains..." + printf "\nDeploying shard blockchains...\n\n" for i in $(seq $NUM_SHARDS) do - echo "shard #$i" + printf "\nShard #$i\n\n" # generate genesis config files in ./blockchain/shard_$i if [[ $OPTIONS = "--setup" ]]; then @@ -142,16 +149,16 @@ if [[ "$NUM_SHARDS" -gt 0 ]]; then # deploy files to GCP instances if [[ $RUN_MODE = "canary" ]]; then - printf "\nDeploying files to shard_$i node 0 (${SHARD_NODE_0_TARGET_ADDR})..." + printf "\nDeploying files to shard_$i node 0 (${SHARD_NODE_0_TARGET_ADDR})...\n\n" gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_0_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_0_ZONE else - printf "\nDeploying files to shard_$i tracker (${SHARD_TRACKER_TARGET_ADDR})..." + printf "\nDeploying files to shard_$i tracker (${SHARD_TRACKER_TARGET_ADDR})...\n\n" gcloud compute scp --recurse $FILES_FOR_TRACKER ${SHARD_TRACKER_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $TRACKER_ZONE - printf "\nDeploying files to shard_$i node 0 (${SHARD_NODE_0_TARGET_ADDR})..." + printf "\nDeploying files to shard_$i node 0 (${SHARD_NODE_0_TARGET_ADDR})...\n\n" gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_0_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_0_ZONE - printf "\nDeploying files to shard_$i node 1 (${SHARD_NODE_1_TARGET_ADDR})..." + printf "\nDeploying files to shard_$i node 1 (${SHARD_NODE_1_TARGET_ADDR})...\n\n" gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_1_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_1_ZONE - printf "\nDeploying files to shard_$i node 2 (${SHARD_NODE_2_TARGET_ADDR})..." + printf "\nDeploying files to shard_$i node 2 (${SHARD_NODE_2_TARGET_ADDR})...\n\n" gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_2_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_2_ZONE fi @@ -175,16 +182,16 @@ if [[ "$NUM_SHARDS" -gt 0 ]]; then # ssh into each instance, install packages and start up the server if [[ $RUN_MODE = "canary" ]]; then printf "\n\n##########################\n# Running shard_$i node 0 #\n##########################\n\n" - gcloud compute ssh $SHARD_NODE_0_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON $i 0" --project $PROJECT_ID --zone $NODE_0_ZONE + gcloud compute ssh $SHARD_NODE_0_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON $i 0 $SYNC_MODE" --project $PROJECT_ID --zone $NODE_0_ZONE else printf "\n\n###########################\n# Running shard_$i tracker #\n###########################\n\n" gcloud compute ssh $SHARD_TRACKER_TARGET_ADDR --command ". setup_tracker_gcp.sh && . start_tracker_gcp.sh" --project $PROJECT_ID --zone $TRACKER_ZONE printf "\n\n##########################\n# Running shard_$i node 0 #\n##########################\n\n" - gcloud compute ssh $SHARD_NODE_0_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON $i 0" --project $PROJECT_ID --zone $NODE_0_ZONE + gcloud compute ssh $SHARD_NODE_0_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON $i 0 $SYNC_MODE" --project $PROJECT_ID --zone $NODE_0_ZONE printf "\n\n##########################\n# Running shard_$i node 1 #\n##########################\n\n" - gcloud compute ssh $SHARD_NODE_1_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON $i 1" --project $PROJECT_ID --zone $NODE_1_ZONE + gcloud compute ssh $SHARD_NODE_1_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON $i 1 $SYNC_MODE" --project $PROJECT_ID --zone $NODE_1_ZONE printf "\n\n##########################\n# Running shard_$i node 2 #\n##########################\n\n" - gcloud compute ssh $SHARD_NODE_2_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON $i 2" --project $PROJECT_ID --zone $NODE_2_ZONE + gcloud compute ssh $SHARD_NODE_2_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON $i 2 $SYNC_MODE" --project $PROJECT_ID --zone $NODE_2_ZONE fi done fi diff --git a/start_node_gcp.sh b/start_node_gcp.sh index 97565eee6..c1179761d 100644 --- a/start_node_gcp.sh +++ b/start_node_gcp.sh @@ -69,14 +69,14 @@ else exit fi +echo "TRACKER_WS_ADDR=$TRACKER_WS_ADDR" +echo "GENESIS_CONFIGS_DIR=$GENESIS_CONFIGS_DIR" + if [[ "$3" -lt 0 ]] || [[ "$3" -gt 4 ]]; then echo "Invalid account_index argument: $2" exit fi -echo "TRACKER_WS_ADDR=$TRACKER_WS_ADDR" -echo "GENESIS_CONFIGS_DIR=$GENESIS_CONFIGS_DIR" - export ACCOUNT_INDEX="$3" echo "ACCOUNT_INDEX=$ACCOUNT_INDEX" @@ -91,7 +91,7 @@ export BLOCKCHAIN_DATA_DIR="/home/ain_blockchain_data" printf "\nStarting up Blockchain Node server..\n\n" START_CMD='nohup node --async-stack-traces --max-old-space-size=4000 client/index.js >/dev/null 2>error_logs.txt &' -printf "START_CMD=$START_CMD\n" +printf "START_CMD='$START_CMD'\n" eval $START_CMD diff --git a/start_node_incremental_gcp.sh b/start_node_incremental_gcp.sh index 08e549b37..e1ec4a886 100644 --- a/start_node_incremental_gcp.sh +++ b/start_node_incremental_gcp.sh @@ -1,8 +1,8 @@ #!/bin/sh -if [[ "$#" -lt 2 ]]; then - echo "Usage: sh start_node_incremental_gcp.sh [dev|staging|spring|summer] " - echo "Example: sh start_node_incremental_gcp.sh spring 0 0" +if [[ "$#" -lt 4 ]] || [[ "$#" -gt 4 ]]; then + printf "Usage: sh start_node_incremental_gcp.sh [dev|staging|spring|summer] [fast|full]\n" + printf "Example: sh start_node_incremental_gcp.sh spring 0 0 fast\n" exit fi @@ -60,7 +60,7 @@ elif [[ "$1" = 'dev' ]]; then elif [[ "$2" = 20 ]]; then export TRACKER_WS_ADDR=ws://35.201.248.92:5000 # dev-shard-20-tracker-ip else - echo "Invalid shard ID argument: $2" + printf "Invalid argument: $2\n" exit fi if [[ "$2" -gt 0 ]]; then @@ -75,22 +75,28 @@ elif [[ "$1" = 'dev' ]]; then EOF fi else - echo "Invalid season argument: $1" + printf "Invalid argument: $1\n" exit fi -echo "TRACKER_WS_ADDR=$TRACKER_WS_ADDR" -echo "GENESIS_CONFIGS_DIR=$GENESIS_CONFIGS_DIR" +printf "TRACKER_WS_ADDR=$TRACKER_WS_ADDR\n" +printf "GENESIS_CONFIGS_DIR=$GENESIS_CONFIGS_DIR\n" if [[ "$3" -lt 0 ]] || [[ "$3" -gt 4 ]]; then - echo "Invalid account_index argument: $2" + printf "Invalid argument: $3\n" exit fi export ACCOUNT_INDEX="$3" -echo "ACCOUNT_INDEX=$ACCOUNT_INDEX" +printf "ACCOUNT_INDEX=$ACCOUNT_INDEX\n" -#export SYNC_MODE="fast" +if [[ "$4" != 'fast' ]] && [[ "$4" != 'full' ]]; then + printf "Invalid argument: $2\n" + exit +fi + +export SYNC_MODE="$4" +printf "SYNC_MODE=$SYNC_MODE\n" export DEBUG=false export CONSOLE_LOG=false @@ -102,21 +108,21 @@ export STAKE=100000 export BLOCKCHAIN_DATA_DIR="/home/ain_blockchain_data" date=$(date '+%Y-%m-%dT%H:%M') -echo "date=$date" +printf "date=$date\n" NEW_DIR_PATH="../ain-blockchain-$date" -echo "NEW_DIR_PATH=$NEW_DIR_PATH" +printf "NEW_DIR_PATH=$NEW_DIR_PATH\n" # 2. Get currently used directory printf "\n#### [Step 2] Get currently used directory ####\n\n" OLD_DIR_PATH=$(find ../ain-blockchain* -maxdepth 0 -type d) -echo "OLD_DIR_PATH=$OLD_DIR_PATH" +printf "OLD_DIR_PATH=$OLD_DIR_PATH\n" # 3. Create a new directory printf "\n#### [Step 3] Create a new directory ####\n\n" MKDIR_CMD="sudo mkdir $NEW_DIR_PATH" -echo "MKDIR_CMD=$MKDIR_CMD" +printf "MKDIR_CMD=$MKDIR_CMD\n" eval $MKDIR_CMD sudo chmod 777 $NEW_DIR_PATH @@ -133,16 +139,16 @@ npm install # 5. Kill old node process printf "\n#### [Step 5] Kill old node process ####\n\n" -KILL_CMD='sudo killall node' -printf "KILL_CMD=$KILL_CMD\n\n" +KILL_CMD="sudo killall node" +printf "KILL_CMD='$KILL_CMD'\n\n" eval $KILL_CMD # 6. Start a new node process -sleep 20 printf "\n#### [Step 6] Start a new node process ####\n\n" -START_CMD='nohup node --async-stack-traces --max-old-space-size=4000 client/index.js >/dev/null 2>error_logs.txt &' -printf "START_CMD=$START_CMD\n" +sleep 10 +START_CMD="nohup node --async-stack-traces --max-old-space-size=4000 client/index.js >/dev/null 2>error_logs.txt &" +printf "START_CMD='$START_CMD'\n" eval $START_CMD # 7. Wait until the new node process catches up @@ -177,4 +183,6 @@ done # 8. Remove old directory keeping the chain data printf "\n#### [Step 8] Remove old directory keeping the chain data ####\n\n" -sudo rm -rf $OLD_DIR_PATH +RM_CMD="sudo rm -rf $OLD_DIR_PATH" +printf "RM_CMD='$RM_CMD'\n" +eval $RM_CMD From e01bcf4d1929eaa046b590f8a0867f9cdeb7e75a Mon Sep 17 00:00:00 2001 From: Dongil Seo Date: Wed, 7 Jul 2021 17:39:47 +0900 Subject: [PATCH 021/175] Leave more log messages when starting node. --- consensus/index.js | 2 +- node/index.js | 20 ++++++++++++++++---- p2p/index.js | 45 +++++++++++++++++++++++++++++++-------------- p2p/server.js | 8 +++++--- 4 files changed, 53 insertions(+), 22 deletions(-) diff --git a/consensus/index.js b/consensus/index.js index 06732cbf6..32c7752a4 100644 --- a/consensus/index.js +++ b/consensus/index.js @@ -68,7 +68,7 @@ class Consensus { } init(lastBlockWithoutProposal) { - const LOG_HEADER = 'init'; + const LOG_HEADER = 'Consensus.init'; const finalizedNumber = this.node.bc.lastBlockNumber(); const genesisBlock = this.node.bc.getBlockByNumber(0); if (!genesisBlock) { diff --git a/node/index.js b/node/index.js index 11b44c9df..88040c574 100644 --- a/node/index.js +++ b/node/index.js @@ -60,6 +60,7 @@ class BlockchainNode { this.db = this.createDb(StateVersions.EMPTY, initialVersion, this.bc, this.tp, false, true); this.nonce = null; // nonce from current final version this.state = BlockchainNodeStates.STARTING; + logger.info(`Now node in STARTING state!`); this.snapshotDir = path.resolve(SNAPSHOTS_ROOT_DIR, `${PORT}`); FileUtil.createSnapshotDir(this.snapshotDir); } @@ -85,14 +86,15 @@ class BlockchainNode { } init(isFirstNode) { - const LOG_HEADER = 'init'; - logger.info(`[${LOG_HEADER}] Initializing node..`); + const LOG_HEADER = 'BlockchainNode.init'; + let latestSnapshot = null; let latestSnapshotPath = null; let latestSnapshotBlockNumber = -1; // 1. Get the latest snapshot if in the "fast" sync mode. if (SYNC_MODE === SyncModeOptions.FAST) { + logger.info(`[${LOG_HEADER}] Initializing node in 'fast' mode..`); const latestSnapshotInfo = FileUtil.getLatestSnapshotInfo(this.snapshotDir); latestSnapshotPath = latestSnapshotInfo.latestSnapshotPath; latestSnapshotBlockNumber = latestSnapshotInfo.latestSnapshotBlockNumber; @@ -103,28 +105,36 @@ class BlockchainNode { logger.error(`[${LOG_HEADER}] ${err.stack}`); } } + logger.info(`[${LOG_HEADER}] Fast mode sync done!`); + } else { + logger.info(`[${LOG_HEADER}] Initializing node in 'full' mode..`); } // 2. Initialize the blockchain, starting from `latestSnapshotBlockNumber`. + logger.info(`[${LOG_HEADER}] Initializing blockchain..`); const lastBlockWithoutProposal = this.bc.init(isFirstNode, latestSnapshotBlockNumber); // 3. Initialize DB (with the latest snapshot, if it exists) + logger.info(`[${LOG_HEADER}] Initializing DB..`); const startingDb = this.createDb(StateVersions.EMPTY, StateVersions.START, this.bc, this.tp, true); startingDb.initDbStates(latestSnapshot); // 4. Execute the chain on the DB and finalize it. + logger.info(`[${LOG_HEADER}] Executing chains on DB..`); this.executeChainOnDb(startingDb); this.cloneAndFinalizeVersion(StateVersions.START, this.bc.lastBlockNumber()); this.nonce = this.getNonceForAddr(this.account.address, false, true); // 5. Execute transactions from the pool. + logger.info(`[${LOG_HEADER}] Executing the transaction from the tx pool..`); this.db.executeTransactionList( this.tp.getValidTransactions(null, this.stateManager.getFinalVersion()), this.bc.lastBlockNumber() + 1); // 6. Node status changed: STARTING -> SYNCING. this.state = BlockchainNodeStates.SYNCING; + logger.info(`[${LOG_HEADER}] Now node in SYNCING state!`); return lastBlockWithoutProposal; } @@ -144,7 +154,7 @@ class BlockchainNode { createDb(baseVersion, newVersion, bc, tp, finalizeVersion, isNodeDb, blockNumberSnapshot) { const LOG_HEADER = 'createDb'; - logger.info(`[${LOG_HEADER}] Creating a new DB by cloning state version: ` + + logger.debug(`[${LOG_HEADER}] Creating a new DB by cloning state version: ` + `${baseVersion} -> ${newVersion}`); const newRoot = this.stateManager.cloneVersion(baseVersion, newVersion); if (!newRoot) { @@ -161,7 +171,7 @@ class BlockchainNode { destroyDb(db) { const LOG_HEADER = 'destroyDb'; - logger.info(`[${LOG_HEADER}] Destroying DB with state version: ${db.stateVersion}`); + logger.debug(`[${LOG_HEADER}] Destroying DB with state version: ${db.stateVersion}`); db.deleteStateVersion(); db.deleteBackupStateVersion(); } @@ -438,6 +448,7 @@ class BlockchainNode { // TODO(liayoo): Ask the tracker server for another peer. logger.info(`[${LOG_HEADER}] Blockchain Node is now synced!`); this.state = BlockchainNodeStates.SERVING; + logger.info(`[${LOG_HEADER}] Now node in SERVING state.`); } return false; } @@ -453,6 +464,7 @@ class BlockchainNode { // TODO(liayoo): Ask the tracker server for another peer. logger.info(`[${LOG_HEADER}] Blockchain Node is now synced!`); this.state = BlockchainNodeStates.SERVING; + logger.info(`[${LOG_HEADER}] Now node in SERVING state!`); } return false; } diff --git a/p2p/index.js b/p2p/index.js index dbd108e3f..0c3c3fce0 100644 --- a/p2p/index.js +++ b/p2p/index.js @@ -148,7 +148,6 @@ class P2pClient { } async setTrackerEventHandlers() { - const node = this.server.node; this.trackerWebSocket.on('message', async (message) => { const parsedMsg = JSON.parse(message); logger.info(`\n<< Message from [TRACKER]: ${JSON.stringify(parsedMsg, null, 2)}`); @@ -156,16 +155,8 @@ class P2pClient { logger.debug(`Updated MANAGED peers info: ` + `${JSON.stringify(this.server.managedPeersInfo, null, 2)}`); } - if (node.state === BlockchainNodeStates.STARTING) { - if (parsedMsg.numLivePeers === 0) { - const lastBlockWithoutProposal = node.init(true); - await this.server.tryInitializeShard(); - node.state = BlockchainNodeStates.SERVING; - this.server.consensus.init(lastBlockWithoutProposal); - } else { - // Consensus will be initialized after syncing with peers - node.init(false); - } + if (this.server.node.state === BlockchainNodeStates.STARTING) { + await this.startNode(parsedMsg.numLivePeers); } }); this.trackerWebSocket.on('close', (code) => { @@ -175,6 +166,32 @@ class P2pClient { }); } + async startNode(numLivePeers) { + const LOG_HEADER = 'startNode'; + + if (numLivePeers === 0) { + logger.info(`[${LOG_HEADER}] Starting node without peers..`); + const lastBlockWithoutProposal = this.server.node.init(true); + logger.info(`[${LOG_HEADER}] lastBlockWithoutProposal=${lastBlockWithoutProposal}`); + logger.info(`[${LOG_HEADER}] Trying to initializing shard..`); + if (await this.server.tryInitializeShard()) { + logger.info(`[${LOG_HEADER}] Shard initialization done!`); + } else { + logger.info(`[${LOG_HEADER}] No need to initialize shard.`); + } + this.server.node.state = BlockchainNodeStates.SERVING; + logger.info(`[${LOG_HEADER}] Now node in SERVING state!`); + logger.info(`[${LOG_HEADER}] Initializing consensus process..`); + this.server.consensus.init(lastBlockWithoutProposal); + logger.info(`[${LOG_HEADER}] Consensus process initialized!`); + } else { + // Consensus will be initialized after syncing with peers + logger.info(`[${LOG_HEADER}] Starting node with ${numLivePeers} peers..`); + this.server.node.init(false); + logger.info(`[${LOG_HEADER}] Node initialized!`); + } + } + connectToTracker() { logger.info(`Reconnecting to tracker (${TRACKER_WS_ADDR})`); this.trackerWebSocket = new Websocket(TRACKER_WS_ADDR); @@ -250,8 +267,8 @@ class P2pClient { socket.send(JSON.stringify(payload)); } - setPeerEventHandlers(socket) { - const LOG_HEADER = 'setPeerEventHandlers'; + setClientSidePeerEventHandlers(socket) { + const LOG_HEADER = 'setClientSidePeerEventHandlers'; socket.on('message', (message) => { const parsedMessage = JSON.parse(message); const dataProtoVer = _.get(parsedMessage, 'dataProtoVer'); @@ -442,7 +459,7 @@ class P2pClient { const socket = new Websocket(peerInfo.url); socket.on('open', async () => { logger.info(`Connected to peer(${peerInfo.url}),`); - this.setPeerEventHandlers(socket); + this.setClientSidePeerEventHandlers(socket); this.sendAddress(socket); await this.waitForAddress(socket); this.requestChainSegment(socket, this.server.node.bc.lastBlockNumber()); diff --git a/p2p/server.js b/p2p/server.js index 41f736e37..42b42bbc6 100644 --- a/p2p/server.js +++ b/p2p/server.js @@ -102,7 +102,7 @@ class P2pServer { // Set the number of maximum clients. this.wsServer.setMaxListeners(this.maxInbound); this.wsServer.on('connection', (socket) => { - this.setPeerEventHandlers(socket); + this.setServerSidePeerEventHandlers(socket); }); logger.info(`Listening to peer-to-peer connections on: ${P2P_PORT}\n`); await this.setUpIpAddresses(); @@ -345,8 +345,8 @@ class P2pServer { return 0; } - setPeerEventHandlers(socket) { - const LOG_HEADER = 'setPeerEventHandlers'; + setServerSidePeerEventHandlers(socket) { + const LOG_HEADER = 'setServerSidePeerEventHandlers'; socket.on('message', (message) => { try { const parsedMessage = JSON.parse(message); @@ -601,7 +601,9 @@ class P2pServer { if (this.node.isShardReporter && this.node.bc.lastBlockNumber() === 0) { logger.info(`Setting up sharding..`); await this.setUpDbForSharding(); + return true; } + return false; } // TODO(platfowner): Set .shard config for functions, rules, and owners as well. From 82a6706d8626b34dff40631857616d24f579be68 Mon Sep 17 00:00:00 2001 From: Dongil Seo Date: Wed, 7 Jul 2021 18:59:05 +0900 Subject: [PATCH 022/175] Tweak deploy script. --- start_node_incremental_gcp.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/start_node_incremental_gcp.sh b/start_node_incremental_gcp.sh index e1ec4a886..f57ba40e7 100644 --- a/start_node_incremental_gcp.sh +++ b/start_node_incremental_gcp.sh @@ -147,7 +147,9 @@ eval $KILL_CMD printf "\n#### [Step 6] Start a new node process ####\n\n" sleep 10 -START_CMD="nohup node --async-stack-traces --max-old-space-size=4000 client/index.js >/dev/null 2>error_logs.txt &" +MAX_OLD_SPACE_SIZE_MB=4000 + +START_CMD="nohup node --async-stack-traces --max-old-space-size=$MAX_OLD_SPACE_SIZE_MB client/index.js >/dev/null 2>error_logs.txt &" printf "START_CMD='$START_CMD'\n" eval $START_CMD From f1423d1d5cf506571ad7f6d08dfe642d68e3cbe0 Mon Sep 17 00:00:00 2001 From: Dongil Seo Date: Wed, 7 Jul 2021 23:31:19 +0900 Subject: [PATCH 023/175] Use simpler equality expression. --- start_node_gcp.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/start_node_gcp.sh b/start_node_gcp.sh index c1179761d..cb49f378d 100644 --- a/start_node_gcp.sh +++ b/start_node_gcp.sh @@ -7,13 +7,13 @@ if [[ "$#" -lt 2 ]]; then fi export GENESIS_CONFIGS_DIR=genesis-configs/testnet -if [[ "$1" == 'spring' ]]; then +if [[ "$1" = 'spring' ]]; then export TRACKER_WS_ADDR=ws://35.221.137.80:5000 -elif [[ "$1" == 'summer' ]]; then +elif [[ "$1" = 'summer' ]]; then export TRACKER_WS_ADDR=ws://35.194.172.106:5000 -elif [[ "$1" == 'staging' ]]; then +elif [[ "$1" = 'staging' ]]; then export TRACKER_WS_ADDR=ws://35.221.150.73:5000 -elif [[ "$1" == 'dev' ]]; then +elif [[ "$1" = 'dev' ]]; then if [[ "$2" -gt 0 ]]; then export GENESIS_CONFIGS_DIR=genesis-configs/sim-shard fi From c3c913e3ef8c25fe8f3b3514724929f6abf1bba4 Mon Sep 17 00:00:00 2001 From: Lia Yoo Date: Mon, 12 Jul 2021 14:00:49 +0900 Subject: [PATCH 024/175] Introduce validation-only consensus nodes. --- blockchain/block.js | 13 ++- common/common-util.js | 4 +- common/constants.js | 16 ++-- common/path-util.js | 6 +- consensus/block-pool.js | 7 +- consensus/index.js | 84 ++++++++++++------- db/rule-util.js | 5 ++ .../afan-shard/genesis_params.json | 2 + genesis-configs/base/genesis_params.json | 2 + genesis-configs/base/genesis_rules.json | 4 +- genesis-configs/sim-shard/genesis_params.json | 2 + genesis-configs/testnet/genesis_params.json | 2 + p2p/server.js | 1 + tracker-server/index.js | 1 + 14 files changed, 101 insertions(+), 48 deletions(-) diff --git a/blockchain/block.js b/blockchain/block.js index 8765e4ee5..3c802f9e1 100644 --- a/blockchain/block.js +++ b/blockchain/block.js @@ -158,6 +158,15 @@ class Block { nonceTracker[tx.address] = tx.tx_body.nonce; } + for (const validator of block.validators) { + if (!Block.validValidatorObject(validator)) { + logger.error( + `[${LOG_HEADER}] Invalid validators format: ${JSON.stringify(block.validators)} ` + + `(${block.number} / ${block.epoch})`); + return false; + } + } + logger.info(`[${LOG_HEADER}] Validated block: ${block.number} / ${block.epoch}`); return true; } @@ -261,7 +270,7 @@ class Block { static buildGenesisStakingTxs(timestamp) { const txs = []; - Object.entries(GENESIS_VALIDATORS).forEach(([address, amount], index) => { + Object.entries(GENESIS_VALIDATORS).forEach(([address, info], index) => { const privateKey = _.get(GenesisAccounts, `${AccountProperties.OTHERS}.${index}.${AccountProperties.PRIVATE_KEY}`); if (!privateKey) { @@ -274,7 +283,7 @@ class Block { operation: { type: 'SET_VALUE', ref: PathUtil.getStakingStakeRecordValuePath(PredefinedDbPaths.CONSENSUS, address, 0, timestamp), - value: amount + value: info.stake } }; txs.push(Transaction.fromTxBody(txBody, privateKey)); diff --git a/common/common-util.js b/common/common-util.js index 0d56116e5..f084aee1b 100644 --- a/common/common-util.js +++ b/common/common-util.js @@ -574,13 +574,13 @@ class CommonUtil { return CommonUtil.metricsToText(metrics); } - static sleep = (ms) => { + static sleep(ms) { return new Promise((resolve) => { setTimeout(resolve, ms); }); }; - static convertEnvVarInputToBool = (input, defaultValue = false) => { + static convertEnvVarInputToBool(input, defaultValue = false) { return input ? input.toLowerCase().startsWith('t') : defaultValue; } } diff --git a/common/constants.js b/common/constants.js index ffe89cd31..9bc6150f5 100644 --- a/common/constants.js +++ b/common/constants.js @@ -520,7 +520,7 @@ const SyncModeOptions = { * (priority: base params < genesis_params.json in GENESIS_CONFIGS_DIR < env var) */ const OVERWRITING_BLOCKCHAIN_PARAMS = ['TRACKER_WS_ADDR', 'HOSTING_ENV']; -const OVERWRITING_CONSENSUS_PARAMS = ['MIN_NUM_VALIDATORS', 'EPOCH_MS']; +const OVERWRITING_CONSENSUS_PARAMS = ['MIN_NUM_VALIDATORS', 'MAX_NUM_VALIDATORS', 'EPOCH_MS']; function overwriteGenesisParams(overwritingParams, type) { for (const key of overwritingParams) { @@ -535,7 +535,7 @@ function overwriteGenesisParams(overwritingParams, type) { for (let i = 0; i < GenesisParams.consensus.MIN_NUM_VALIDATORS; i++) { const addr = GenesisAccounts[AccountProperties.OTHERS][i][AccountProperties.ADDRESS]; CommonUtil.setJsObject(whitelist, [addr], true); - CommonUtil.setJsObject(validators, [addr], GenesisParams.consensus.MIN_STAKE_PER_VALIDATOR); + CommonUtil.setJsObject(validators, [addr], { stake: GenesisParams.consensus.MIN_STAKE_PER_VALIDATOR, producing_right: true }); } GenesisParams.consensus.GENESIS_WHITELIST = whitelist; GenesisParams.consensus.GENESIS_VALIDATORS = validators; @@ -546,19 +546,19 @@ overwriteGenesisParams(OVERWRITING_BLOCKCHAIN_PARAMS, 'blockchain'); overwriteGenesisParams(OVERWRITING_CONSENSUS_PARAMS, 'consensus'); // NOTE(minsulee2): If NETWORK_OPTIMIZATION env is set, it tightly limits the outbound connections. -// The minimum network connections are set based on the MIN_NUM_VALIDATORS otherwise. +// The minimum network connections are set based on the MAX_NUM_VALIDATORS otherwise. function initializeNetworkEnvronments() { if (process.env.NETWORK_OPTIMIZATION) { return GenesisParams.network; } else { return { - // NOTE(minsulee2): Need a discussion that MIN_NUM_VALIDATORS and MAX_INBOUND_LIMIT + // NOTE(minsulee2): Need a discussion that MAX_NUM_VALIDATORS and MAX_INBOUND_LIMIT // should not be related to one another. P2P_MESSAGE_TIMEOUT_MS: 600000, - MAX_OUTBOUND_LIMIT: GenesisParams.consensus.MIN_NUM_VALIDATORS - 1, - MAX_INBOUND_LIMIT: GenesisParams.consensus.MIN_NUM_VALIDATORS - 1, - DEFAULT_MAX_OUTBOUND: GenesisParams.consensus.MIN_NUM_VALIDATORS - 1, - DEFAULT_MAX_INBOUND: GenesisParams.consensus.MIN_NUM_VALIDATORS - 1 + MAX_OUTBOUND_LIMIT: GenesisParams.consensus.MAX_NUM_VALIDATORS - 1, + MAX_INBOUND_LIMIT: GenesisParams.consensus.MAX_NUM_VALIDATORS - 1, + DEFAULT_MAX_OUTBOUND: GenesisParams.consensus.MAX_NUM_VALIDATORS - 1, + DEFAULT_MAX_INBOUND: GenesisParams.consensus.MAX_NUM_VALIDATORS - 1 } } } diff --git a/common/path-util.js b/common/path-util.js index b7f7bc136..c4529c231 100644 --- a/common/path-util.js +++ b/common/path-util.js @@ -68,6 +68,10 @@ class PathUtil { PredefinedDbPaths.STAKING, PredefinedDbPaths.STAKING_LOCKUP_DURATION]); } + static getStakingServicePath(serviceName) { + return CommonUtil.formatPath([PredefinedDbPaths.STAKING, serviceName]); + } + static getStakingExpirationPath(serviceName, user, stakingKey) { return CommonUtil.formatPath([PredefinedDbPaths.STAKING, serviceName, user, stakingKey, PredefinedDbPaths.STAKING_EXPIRE_AT]); @@ -187,7 +191,7 @@ class PathUtil { } static getConsensusStakingAccountBalancePath(address) { - const accountPath = PathUtil.getServiceAccountPath(PredefinedDbPaths.STAKING, PredefinedDbPaths.CONSENSUS, `${address}|0`); + const accountPath = PathUtil.getServiceAccountPath(PredefinedDbPaths.STAKING, PredefinedDbPaths.CONSENSUS, `${address}|0`); return CommonUtil.appendPath(accountPath, PredefinedDbPaths.BALANCE) } diff --git a/consensus/block-pool.js b/consensus/block-pool.js index 40ede0a0e..87eb938ea 100644 --- a/consensus/block-pool.js +++ b/consensus/block-pool.js @@ -316,8 +316,7 @@ class BlockPool { } const voter = voteTx.address; logger.debug(`[${LOG_HEADER}] voted block: ${JSON.stringify(block, null, 2)}`); - logger.debug(`[${LOG_HEADER}] ${block && block.validators[voter] === stake}`); - if (stake > 0 && block && block.validators[voter] === stake) { + if (stake > 0 && block && get(block, `validators.${voter}.stake`) === stake) { this.hashToBlockInfo[blockHash].tallied += stake; this.tryUpdateNotarized(blockHash); } @@ -345,8 +344,8 @@ class BlockPool { logger.info(`[${LOG_HEADER}] Prev block is unavailable`); return; } - const totalAtStake = Object.values(prevBlock.validators).reduce((a, b) => { - return a + b; + const totalAtStake = Object.values(prevBlock.validators).reduce((acc, cur) => { + return acc + get(cur, 'stake', 0); }, 0); if (currentBlockInfo.tallied && currentBlockInfo.tallied >= totalAtStake * ConsensusConsts.MAJORITY) { diff --git a/consensus/index.js b/consensus/index.js index 32c7752a4..9343ccb20 100644 --- a/consensus/index.js +++ b/consensus/index.js @@ -22,7 +22,9 @@ const { GENESIS_WHITELIST, LIGHTWEIGHT, MIN_NUM_VALIDATORS, + MAX_NUM_VALIDATORS, MIN_STAKE_PER_VALIDATOR, + MAX_STAKE_PER_VALIDATOR, EPOCH_MS, CONSENSUS_PROTOCOL_VERSION } = require('../common/constants'); @@ -80,7 +82,7 @@ class Consensus { try { const targetStake = process.env.STAKE ? Number(process.env.STAKE) : 0; const currentStake = - this.getValidConsensusStake(this.node.stateManager.getFinalVersion(), myAddr); + this.getConsensusStakeFromAddr(this.node.stateManager.getFinalVersion(), myAddr); logger.info(`[${LOG_HEADER}] Current stake: ${currentStake} / Target stake: ${targetStake}`); if (!targetStake && !currentStake) { logger.info(`[${LOG_HEADER}] Node doesn't have any stakes. ` + @@ -382,11 +384,10 @@ class Consensus { if (this.node.bc.lastBlockNumber() < 1) { const whitelist = GENESIS_WHITELIST; for (const address in whitelist) { - if (Object.prototype.hasOwnProperty.call(whitelist, address)) { - const stakingAccount = tempDb.getValue(PathUtil.getConsensusStakingAccountPath(address)); - if (whitelist[address] === true && stakingAccount && - stakingAccount.balance >= MIN_STAKE_PER_VALIDATOR) { - validators[address] = stakingAccount.balance; + if (whitelist[address] === true) { + const stake = tempDb.getValue(PathUtil.getConsensusStakingAccountBalancePath(address)); + if (stake && MIN_STAKE_PER_VALIDATOR <= stake && stake <= MAX_STAKE_PER_VALIDATOR) { + validators[address] = { stake, producing_right: true }; } } } @@ -399,8 +400,8 @@ class Consensus { if (numValidators < MIN_NUM_VALIDATORS) { throw Error(`Not enough validators: ${JSON.stringify(validators)}`); } - const totalAtStake = Object.values(validators).reduce(function(a, b) { - return a + b; + const totalAtStake = Object.values(validators).reduce((acc, cur) => { + return acc + cur.stake; }, 0); const stateProofHash = LIGHTWEIGHT ? '' : tempDb.getStateProof('/')[ProofProperties.PROOF_HASH]; const proposalBlock = Block.create( @@ -506,10 +507,10 @@ class Consensus { const prevBlock = number > 1 ? prevBlockInfo.block : prevBlockInfo; // Make sure we have at least MIN_NUM_VALIDATORS validators. - if (Object.keys(validators).length < MIN_NUM_VALIDATORS) { + if (Object.keys(validators).length < MIN_NUM_VALIDATORS || Object.keys(validators).length > MAX_NUM_VALIDATORS) { logger.error( - `[${LOG_HEADER}] Validator set smaller than MIN_NUM_VALIDATORS: ` + - `${JSON.stringify(validators)}`); + `[${LOG_HEADER}] Invalid validator set size (${JSON.stringify(validators)})\n` + + `MIN_NUM_VALIDATORS: ${MIN_NUM_VALIDATORS}, MAX_NUM_VALIDATORS: ${MAX_NUM_VALIDATORS}`); return false; } @@ -778,8 +779,8 @@ class Consensus { vote(block) { const myAddr = this.node.account.address; - const myStake = block.validators[myAddr]; - if (!myStake) { + const isValidator = block.validators[myAddr]; + if (!isValidator) { return; } const operation = { @@ -787,7 +788,7 @@ class Consensus { ref: PathUtil.getConsensusVotePath(block.number, myAddr), value: { [PredefinedDbPaths.BLOCK_HASH]: block.hash, - [PredefinedDbPaths.STAKE]: myStake + [PredefinedDbPaths.STAKE]: isValidator.stake } }; const voteTx = this.node.createTransaction({ operation, nonce: -1, gas_price: 1 }); @@ -977,20 +978,42 @@ class Consensus { logger.error(err); throw Error(err); } - const whitelist = this.getWhitelist(stateVersion); + let candidates = []; const validators = {}; - Object.keys(whitelist).forEach((address) => { - const stake = this.getValidConsensusStake(stateVersion, address); - if (whitelist[address] === true && stake >= MIN_STAKE_PER_VALIDATOR) { - validators[address] = stake; + const whitelist = this.getWhitelist(stateVersion); + const stateRoot = this.node.stateManager.getRoot(stateVersion); + const allStakeInfo = DB.getValueFromStateRoot( + stateRoot, PathUtil.getStakingServicePath(PredefinedDbPaths.CONSENSUS)) || {}; + for (const [address, stakeInfo] of Object.entries(allStakeInfo)) { + const stake = this.getConsensusStakeFromAddr(stateVersion, address); + if (stake) { + if (whitelist[address] === true) { + if (MIN_STAKE_PER_VALIDATOR <= stake && stake <= MAX_STAKE_PER_VALIDATOR) { + validators[address] = { stake, producing_right: true }; + } + } else { + candidates.push({ + address, + stake, + expireAt: _.get(stakeInfo, `0.${PredefinedDbPaths.STAKING_EXPIRE_AT}`, 0) + }); + } } - }); + } + candidates = _.orderBy(candidates, ['stake', 'expireAt'], ['desc', 'desc']); // TODO(liayoo): How to do tie-breaking? + for (const candidate of candidates) { + if (Object.keys(validators).length < MAX_NUM_VALIDATORS) { + validators[candidate.address] = { stake: candidate.stake, producing_right: false }; + } else { + break; + } + } logger.debug(`[${LOG_HEADER}] validators: ${JSON.stringify(validators, null, 2)}, ` + `whitelist: ${JSON.stringify(whitelist, null, 2)}`); return validators; } - getValidConsensusStake(stateVersion, address) { + getConsensusStakeFromAddr(stateVersion, address) { const stateRoot = this.node.stateManager.getRoot(stateVersion); return DB.getValueFromStateRoot( stateRoot, PathUtil.getConsensusStakingAccountBalancePath(address)) || 0; @@ -1201,22 +1224,25 @@ class Consensus { static selectProposer(seed, validators) { const LOG_HEADER = 'selectProposer'; logger.debug(`[${LOG_HEADER}] seed: ${seed}, validators: ${JSON.stringify(validators)}`); - const alphabeticallyOrderedValidators = Object.keys(validators).sort(); - const totalAtStake = Object.values(validators).reduce((a, b) => { - return a + b; + const validatorsWithProducingRights = _.pickBy(validators, (x) => _.get(x, 'producing_right') === true); + const alphabeticallyOrderedValidators = Object.keys(validatorsWithProducingRights).sort(); + const totalAtStake = Object.values(validatorsWithProducingRights).reduce((acc, cur) => { + return acc + cur.stake; }, 0); const randomNumGenerator = seedrandom(seed); const targetValue = randomNumGenerator() * totalAtStake; let cumulative = 0; for (let i = 0; i < alphabeticallyOrderedValidators.length; i++) { - cumulative += validators[alphabeticallyOrderedValidators[i]]; + const addr = alphabeticallyOrderedValidators[i]; + cumulative += validatorsWithProducingRights[addr].stake; if (cumulative > targetValue) { - logger.info(`Proposer is ${alphabeticallyOrderedValidators[i]}`); - return alphabeticallyOrderedValidators[i]; + logger.info(`Proposer is ${addr}`); + return addr; } } - logger.error(`[${LOG_HEADER}] Failed to get the proposer.\nvalidators: ` + - `${alphabeticallyOrderedValidators}\n` + + logger.error( + `[${LOG_HEADER}] Failed to get the proposer.\n` + + `alphabeticallyOrderedValidators: ${alphabeticallyOrderedValidators}\n` + `totalAtStake: ${totalAtStake}\nseed: ${seed}\ntargetValue: ${targetValue}`); return null; } diff --git a/db/rule-util.js b/db/rule-util.js index 86eeedfc5..c1aa5d868 100644 --- a/db/rule-util.js +++ b/db/rule-util.js @@ -166,6 +166,11 @@ class RuleUtil { return MIN_STAKE_PER_VALIDATOR; } + getMaxStakeAmount() { + const { MAX_STAKE_PER_VALIDATOR } = require('../common/constants'); + return MAX_STAKE_PER_VALIDATOR; + } + getMinNumValidators() { const { MIN_NUM_VALIDATORS } = require('../common/constants'); return MIN_NUM_VALIDATORS; diff --git a/genesis-configs/afan-shard/genesis_params.json b/genesis-configs/afan-shard/genesis_params.json index c7e4124ba..c004991c5 100644 --- a/genesis-configs/afan-shard/genesis_params.json +++ b/genesis-configs/afan-shard/genesis_params.json @@ -14,7 +14,9 @@ }, "consensus": { "MIN_STAKE_PER_VALIDATOR": 100000, + "MAX_STAKE_PER_VALIDATOR": 100000, "MIN_NUM_VALIDATORS": 3, + "MAX_NUM_VALIDATORS": 9, "EPOCH_MS": 3000, "GENESIS_WHITELIST": {}, "GENESIS_VALIDATORS": {} diff --git a/genesis-configs/base/genesis_params.json b/genesis-configs/base/genesis_params.json index 5e586f926..091a47a43 100644 --- a/genesis-configs/base/genesis_params.json +++ b/genesis-configs/base/genesis_params.json @@ -14,7 +14,9 @@ }, "consensus": { "MIN_STAKE_PER_VALIDATOR": 100000, + "MAX_STAKE_PER_VALIDATOR": 100000, "MIN_NUM_VALIDATORS": 3, + "MAX_NUM_VALIDATORS": 9, "EPOCH_MS": 3000, "GENESIS_WHITELIST": {}, "GENESIS_VALIDATORS": {} diff --git a/genesis-configs/base/genesis_rules.json b/genesis-configs/base/genesis_rules.json index c6d9a0387..f28d895c0 100644 --- a/genesis-configs/base/genesis_rules.json +++ b/genesis-configs/base/genesis_rules.json @@ -28,11 +28,11 @@ "$number": { ".write": "newData === null && !!getValue('/consensus/number/' + (Number($number) + 1000))", "propose": { - ".write": "newData !== null && util.isDict(newData) && newData.proposer === auth.addr && Number($number) === newData.number && getValue('/consensus/whitelist/' + auth.addr) === true && (lastBlockNumber < 1 || getValue('/service_accounts/staking/consensus/' + auth.addr + '|0/balance') >= util.getMinStakeAmount()) && util.isNumber(newData.gas_cost_total) && (newData.gas_cost_total === 0 || newData.gas_cost_total === getValue('/service_accounts/gas_fee/gas_fee/' + $number + '/balance'))" + ".write": "util.isDict(newData) && newData.proposer === auth.addr && Number($number) === newData.number && getValue('/consensus/whitelist/' + auth.addr) === true && (lastBlockNumber < 1 || (getValue('/service_accounts/staking/consensus/' + auth.addr + '|0/balance') >= util.getMinStakeAmount() && getValue('/service_accounts/staking/consensus/' + auth.addr + '|0/balance') <= util.getMaxStakeAmount())) && util.isNumber(newData.gas_cost_total) && (newData.gas_cost_total === 0 || newData.gas_cost_total === getValue('/service_accounts/gas_fee/gas_fee/' + $number + '/balance'))" }, "vote": { "$user_addr": { - ".write": "auth.addr === $user_addr && util.isDict(newData) && util.isString(newData.block_hash) && util.isNumber(newData.stake) && newData.stake > 0 && getValue('/consensus/whitelist/' + auth.addr) === true && (lastBlockNumber < 1 || getValue('/service_accounts/staking/consensus/' + auth.addr + '|0/balance') >= util.getMinStakeAmount())" + ".write": "auth.addr === $user_addr && util.isDict(newData) && util.isString(newData.block_hash) && util.isNumber(newData.stake) && (lastBlockNumber < 1 || getValue('/service_accounts/staking/consensus/' + auth.addr + '|0/balance') === newData.stake)" } } } diff --git a/genesis-configs/sim-shard/genesis_params.json b/genesis-configs/sim-shard/genesis_params.json index d01e0c7ab..0f9d34fb0 100644 --- a/genesis-configs/sim-shard/genesis_params.json +++ b/genesis-configs/sim-shard/genesis_params.json @@ -14,7 +14,9 @@ }, "consensus": { "MIN_STAKE_PER_VALIDATOR": 100000, + "MAX_STAKE_PER_VALIDATOR": 100000, "MIN_NUM_VALIDATORS": 3, + "MAX_NUM_VALIDATORS": 9, "EPOCH_MS": 3000, "GENESIS_WHITELIST": {}, "GENESIS_VALIDATORS": {} diff --git a/genesis-configs/testnet/genesis_params.json b/genesis-configs/testnet/genesis_params.json index 3ebd89a5e..c77c82532 100644 --- a/genesis-configs/testnet/genesis_params.json +++ b/genesis-configs/testnet/genesis_params.json @@ -14,7 +14,9 @@ }, "consensus": { "MIN_STAKE_PER_VALIDATOR": 100000, + "MAX_STAKE_PER_VALIDATOR": 100000, "MIN_NUM_VALIDATORS": 5, + "MAX_NUM_VALIDATORS": 15, "EPOCH_MS": 3000, "GENESIS_WHITELIST": {}, "GENESIS_VALIDATORS": {} diff --git a/p2p/server.js b/p2p/server.js index 42b42bbc6..ff00d2ab9 100644 --- a/p2p/server.js +++ b/p2p/server.js @@ -241,6 +241,7 @@ class P2pServer { NETWORK_OPTIMIZATION: process.env.NETWORK_OPTIMIZATION, GENESIS_CONFIGS_DIR: process.env.GENESIS_CONFIGS_DIR, MIN_NUM_VALIDATORS: process.env.MIN_NUM_VALIDATORS, + MAX_NUM_VALIDATORS: process.env.MAX_NUM_VALIDATORS, ACCOUNT_INDEX: process.env.ACCOUNT_INDEX, P2P_PORT: process.env.P2P_PORT, PORT: process.env.PORT, diff --git a/tracker-server/index.js b/tracker-server/index.js index de4d4558d..47fe17ad0 100755 --- a/tracker-server/index.js +++ b/tracker-server/index.js @@ -304,6 +304,7 @@ function getRuntimeInfo() { NETWORK_OPTIMIZATION: process.env.NETWORK_OPTIMIZATION, GENESIS_CONFIGS_DIR: process.env.GENESIS_CONFIGS_DIR, MIN_NUM_VALIDATORS: process.env.MIN_NUM_VALIDATORS, + MAX_NUM_VALIDATORS: process.env.MAX_NUM_VALIDATORS, ACCOUNT_INDEX: process.env.ACCOUNT_INDEX, P2P_PORT: process.env.P2P_PORT, PORT: process.env.PORT, From 6a7eec07c6481c19a14cdb849476fb9fef768f75 Mon Sep 17 00:00:00 2001 From: Lia Yoo Date: Mon, 12 Jul 2021 14:01:17 +0900 Subject: [PATCH 025/175] Update tests. --- integration/blockchain.test.js | 6 +++--- unittest/block-pool.test.js | 14 ++++++++------ unittest/p2p.test.js | 1 + 3 files changed, 12 insertions(+), 9 deletions(-) diff --git a/integration/blockchain.test.js b/integration/blockchain.test.js index 92d9b48e9..2f9db9901 100644 --- a/integration/blockchain.test.js +++ b/integration/blockchain.test.js @@ -398,18 +398,18 @@ describe('Blockchain Cluster', () => { for (let j = 2; j < len; j++) { // voting starts with block#1 (included in block#2's last_votes) let voteSum = 0; const validators = Object.assign({}, blocks[j - 1].validators); - let totalStakedAmount = Object.values(validators).reduce((a, b) => { return a + b; }, 0); + let totalStakedAmount = Object.values(validators).reduce((acc, cur) => { return acc + cur.stake; }, 0); let majority = Math.floor(totalStakedAmount * ConsensusConsts.MAJORITY); for (let k = 0; k < blocks[j].last_votes.length; k++) { const vote = blocks[j].last_votes[k]; - if (!blocks[j - 1].validators[vote.address]) { + if (!blocks[j - 1].validators[vote.address].stake) { assert.fail(`Invalid validator (${vote.address}) is validating block ${blocks[j - 1]}`); } if (vote.tx_body.operation.value.block_hash !== blocks[j - 1].hash) { assert.fail('Invalid vote included in last_votes'); } if (vote.tx_body.operation.type === 'SET_VALUE' && vote.tx_body.operation.value.stake && - blocks[j - 1].validators[vote.address]) { + blocks[j - 1].validators[vote.address].stake) { voteSum += vote.tx_body.operation.value.stake; } } diff --git a/unittest/block-pool.test.js b/unittest/block-pool.test.js index 06b829e42..055651d28 100644 --- a/unittest/block-pool.test.js +++ b/unittest/block-pool.test.js @@ -25,7 +25,7 @@ describe("BlockPool", () => { function createAndAddBlock(node, blockPool, lastBlock, number, epoch) { const block = Block.create( lastBlock.hash, [], [], number, epoch, '', node.account.address, - {[node.account.address]: 100000}, 0, 0); + {[node.account.address]: { stake: 100000, producing_right: true } }, 0, 0); const proposal = getTransaction(node, { operation: { type: 'SET_VALUE', @@ -33,7 +33,7 @@ describe("BlockPool", () => { value: { number: block.number, epoch: block.epoch, - validators: {[node.account.address]: 100000}, + validators: { [node.account.address]: { stake: 100000, producing_right: true } }, total_at_stake: 100000, proposer: node.account.address, block_hash: block.hash @@ -65,7 +65,8 @@ describe("BlockPool", () => { const lastBlock = node1.bc.lastBlock(); const addr = node1.account.address; const block = Block.create( - lastBlock.hash, [], [], lastBlock.number + 1, lastBlock.epoch + 1, '', addr, {[addr]: 100000}, 0, 0); + lastBlock.hash, [], [], lastBlock.number + 1, lastBlock.epoch + 1, '', addr, + {[addr]: { stake: 100000, producing_right: true }}, 0, 0); const proposalTx = getTransaction(node1, { operation: { type: 'SET_VALUE', @@ -73,7 +74,7 @@ describe("BlockPool", () => { value: { number: block.number, epoch: block.epoch, - validators: {[addr]: 100000}, + validators: {[addr]: { stake: 100000, producing_right: true } }, total_at_stake: 100000, proposer: addr, block_hash: block.hash @@ -93,7 +94,8 @@ describe("BlockPool", () => { const addr = node1.account.address; const lastBlock = node1.bc.lastBlock(); const block = Block.create( - lastBlock.hash, [], [], lastBlock.number + 1, lastBlock.epoch + 1, '', addr, {[addr]: 100000}, 0, 0); + lastBlock.hash, [], [], lastBlock.number + 1, lastBlock.epoch + 1, '', addr, + {[addr]: { stake: 100000, producing_right: true }}, 0, 0); const proposalTx = getTransaction(node1, { operation: { type: 'SET_VALUE', @@ -101,7 +103,7 @@ describe("BlockPool", () => { value: { number: block.number, epoch: block.epoch, - validators: {[addr]: 100000}, + validators: {[addr]: { stake: 100000, producing_right: true } }, total_at_stake: 100000, proposer: addr, block_hash: block.hash diff --git a/unittest/p2p.test.js b/unittest/p2p.test.js index a2b580414..fa0707b2d 100644 --- a/unittest/p2p.test.js +++ b/unittest/p2p.test.js @@ -212,6 +212,7 @@ describe("p2p", () => { NETWORK_OPTIMIZATION: undefined, GENESIS_CONFIGS_DIR: undefined, MIN_NUM_VALIDATORS: undefined, + MAX_NUM_VALIDATORS: undefined, ACCOUNT_INDEX: undefined, P2P_PORT: undefined, PORT: undefined, From cd1cc650b0d2c05c9d19468ac3924130e7e949e1 Mon Sep 17 00:00:00 2001 From: Lia Yoo Date: Mon, 12 Jul 2021 14:01:52 +0900 Subject: [PATCH 026/175] Add more consensus unit test cases. --- unittest/consensus.test.js | 133 ++++++++++++++++++++++++++++++++++--- 1 file changed, 124 insertions(+), 9 deletions(-) diff --git a/unittest/consensus.test.js b/unittest/consensus.test.js index 7c9a716b0..aae15be32 100644 --- a/unittest/consensus.test.js +++ b/unittest/consensus.test.js @@ -1,7 +1,7 @@ const chai = require('chai'); const expect = chai.expect; const rimraf = require('rimraf'); -const { CHAINS_DIR } = require('../common/constants'); +const { CHAINS_DIR, MIN_STAKE_PER_VALIDATOR, MAX_STAKE_PER_VALIDATOR } = require('../common/constants'); const BlockchainNode = require('../node'); const { setNodeForTesting, getTransaction, addBlock } = require('./test-util') @@ -28,17 +28,19 @@ describe("Consensus", () => { ref: '/afan/test', value: 'foo' }, + nonce: -1, gas_price: 1 } ); addBlock(node1, [tx], [], {}); const lastBlock = node1.bc.lastBlock(); - const voteTx = getTransaction(node1, { + const voteTx = getTransaction(node2, { operation: { type: 'SET_VALUE', - ref: `/consensus/number/1/vote/${node1.account.address}`, - value: { block_hash: lastBlock.hash, stake: 0 } + ref: `/consensus/number/1/vote/${node2.account.address}`, + value: { block_hash: lastBlock.hash, stake: 100000 } }, + nonce: -1, gas_price: 1 } ); @@ -46,27 +48,140 @@ describe("Consensus", () => { }); it("Staked nodes can vote", () => { - const addr = node1.account.address; - const stakeTx = getTransaction(node1, { + const addr = node2.account.address; // Staked node without producing rights + const stakeTx = getTransaction(node2, { operation: { type: 'SET_VALUE', ref: `/staking/consensus/${addr}/0/stake/key1/value`, - value: 200 + value: 100000 }, + nonce: -1, gas_price: 1 } ); addBlock(node1, [stakeTx], [], {}); const lastBlock = node1.bc.lastBlock(); - const voteTx = getTransaction(node1, { + const voteTx = getTransaction(node2, { operation: { type: 'SET_VALUE', ref: `/consensus/number/${lastBlock.number}/vote/${addr}`, - value: { block_hash: lastBlock.hash, stake: 200 } + value: { block_hash: lastBlock.hash, stake: 100000 } }, + nonce: -1, gas_price: 1 } ); expect(node1.db.executeTransaction(voteTx).code).to.equal(0); }); + + it('Staked nodes without producing rights cannot propose blocks', () => { + const addr = node2.account.address; + const stakeTx = getTransaction(node2, { + operation: { + type: 'SET_VALUE', + ref: `/staking/consensus/${addr}/0/stake/key1/value`, + value: 100000 + }, + nonce: -1, + gas_price: 1 + } + ); + addBlock(node1, [stakeTx], [], {}); + const lastBlock = node1.bc.lastBlock(); + const voteTx = getTransaction(node2, { + operation: { + type: 'SET_VALUE', + ref: `/consensus/number/${lastBlock.number + 1}/propose/${addr}`, + value: { + number: lastBlock.number + 1, + proposer: addr, + gas_cost_total: 0 + } + }, + nonce: -1, + gas_price: 1 + } + ); + expect(node1.db.executeTransaction(voteTx).code).to.equal(103); + }); + + it('Whitelisted validators must stake within MIN_STAKE_PER_VALIDATOR & MAX_STAKE_PER_VALIDATOR to have the producing rights', () => { + let lastBlock = node1.bc.lastBlock(); + const addr = node2.account.address; + + // Staking less than MIN_STAKE_PER_VALIDATOR + const stakeLessThanMin = getTransaction(node2, { + operation: { + type: 'SET_VALUE', + ref: `/staking/consensus/${addr}/0/stake/key1/value`, + value: MIN_STAKE_PER_VALIDATOR - 1 + }, + nonce: -1, + gas_price: 1 + } + ); + addBlock(node1, [stakeLessThanMin], [], {}); + lastBlock = node1.bc.lastBlock(); + const voteWithStakeLessThanMin = getTransaction(node2, { + operation: { + type: 'SET_VALUE', + ref: `/consensus/number/${lastBlock.number}/vote/${addr}`, + value: { block_hash: lastBlock.hash, stake: 100000 } + }, + nonce: -1, + gas_price: 1 + } + ); + expect(node1.db.executeTransaction(voteWithStakeLessThanMin).code).to.equal(103); // Fails + + // Staking MIN_STAKE_PER_VALIDATOR + const stakeEqualMin = getTransaction(node2, { + operation: { + type: 'SET_VALUE', + ref: `/staking/consensus/${addr}/0/stake/key2/value`, + value: 1 + }, + nonce: -1, + gas_price: 1 + } + ); + addBlock(node1, [stakeEqualMin], [], {}); + lastBlock = node1.bc.lastBlock(); + const voteWithStakeEqualMin = getTransaction(node2, { + operation: { + type: 'SET_VALUE', + ref: `/consensus/number/${lastBlock.number}/vote/${addr}`, + value: { block_hash: lastBlock.hash, stake: 100000 } + }, + nonce: -1, + gas_price: 1 + } + ); + expect(node1.db.executeTransaction(voteWithStakeEqualMin).code).to.equal(0); // Succeeds + + // Staking more than MAX_STAKE_PER_VALIDATOR + const stakeMoreThanMax = getTransaction(node2, { + operation: { + type: 'SET_VALUE', + ref: `/staking/consensus/${addr}/0/stake/key3/value`, + value: MAX_STAKE_PER_VALIDATOR - MIN_STAKE_PER_VALIDATOR + 1 // 1 more than MAX_STAKE_PER_VALIDATOR + }, + nonce: -1, + gas_price: 1 + } + ); + addBlock(node1, [stakeMoreThanMax], [], {}); + lastBlock = node1.bc.lastBlock(); + const voteWithStakeMoreThanMax = getTransaction(node2, { + operation: { + type: 'SET_VALUE', + ref: `/consensus/number/${lastBlock.number}/vote/${addr}`, + value: { block_hash: lastBlock.hash, stake: 100000 } + }, + nonce: -1, + gas_price: 1 + } + ); + expect(node1.db.executeTransaction(voteWithStakeMoreThanMax).code).to.equal(103); // Fails + }); }); \ No newline at end of file From 9636fe799f5ad2652f51d917577b13be1bb1b0de Mon Sep 17 00:00:00 2001 From: Lia Yoo Date: Mon, 12 Jul 2021 14:02:14 +0900 Subject: [PATCH 027/175] Add consensus integration test. --- integration/consensus.test.js | 289 ++++++++++++++++++++++++++++++++++ unittest/test-util.js | 5 + 2 files changed, 294 insertions(+) create mode 100644 integration/consensus.test.js diff --git a/integration/consensus.test.js b/integration/consensus.test.js new file mode 100644 index 000000000..d8edb696f --- /dev/null +++ b/integration/consensus.test.js @@ -0,0 +1,289 @@ +const _ = require('lodash'); +const chai = require('chai'); +const assert = chai.assert; +const spawn = require('child_process').spawn; +const rimraf = require('rimraf'); +const jayson = require('jayson/promise'); +const PROJECT_ROOT = require('path').dirname(__filename) + '/../'; +const TRACKER_SERVER = PROJECT_ROOT + 'tracker-server/index.js'; +const APP_SERVER = PROJECT_ROOT + 'client/index.js'; +const syncRequest = require('sync-request'); +const { + CURRENT_PROTOCOL_VERSION, + CHAINS_DIR, +} = require('../common/constants'); +const CommonUtil = require('../common/common-util'); +const MAX_ITERATION = 200; +const { + waitUntilTxFinalized, + waitForNewBlocks, + parseOrLog, + getLastBlock, +} = require('../unittest/test-util'); + +const MAX_NUM_VALIDATORS = 4; +const ENV_VARIABLES = [ + { + ACCOUNT_INDEX: 0, MIN_NUM_VALIDATORS: 3, MAX_NUM_VALIDATORS, EPOCH_MS: 1000, DEBUG: false, + CONSOLE_LOG: false, ENABLE_DEV_SET_CLIENT_API: true, ENABLE_GAS_FEE_WORKAROUND: true, + ADDITIONAL_OWNERS: 'test:unittest/data/owners_for_testing.json', + ADDITIONAL_RULES: 'test:unittest/data/rules_for_testing.json' + }, + { + ACCOUNT_INDEX: 1, MIN_NUM_VALIDATORS: 3, MAX_NUM_VALIDATORS, EPOCH_MS: 1000, DEBUG: false, + CONSOLE_LOG: false, ENABLE_DEV_SET_CLIENT_API: true, ENABLE_GAS_FEE_WORKAROUND: true, + ADDITIONAL_OWNERS: 'test:unittest/data/owners_for_testing.json', + ADDITIONAL_RULES: 'test:unittest/data/rules_for_testing.json' + }, + { + ACCOUNT_INDEX: 2, MIN_NUM_VALIDATORS: 3, MAX_NUM_VALIDATORS, EPOCH_MS: 1000, DEBUG: false, + CONSOLE_LOG: false, ENABLE_DEV_SET_CLIENT_API: true, ENABLE_GAS_FEE_WORKAROUND: true, + ADDITIONAL_OWNERS: 'test:unittest/data/owners_for_testing.json', + ADDITIONAL_RULES: 'test:unittest/data/rules_for_testing.json' + }, + { + ACCOUNT_INDEX: 3, MIN_NUM_VALIDATORS: 3, MAX_NUM_VALIDATORS, EPOCH_MS: 1000, DEBUG: false, + CONSOLE_LOG: false, ENABLE_DEV_SET_CLIENT_API: true, ENABLE_GAS_FEE_WORKAROUND: true, + ADDITIONAL_OWNERS: 'test:unittest/data/owners_for_testing.json', + ADDITIONAL_RULES: 'test:unittest/data/rules_for_testing.json' + }, + { + ACCOUNT_INDEX: 4, MIN_NUM_VALIDATORS: 3, MAX_NUM_VALIDATORS, EPOCH_MS: 1000, DEBUG: false, + CONSOLE_LOG: false, ENABLE_DEV_SET_CLIENT_API: true, ENABLE_GAS_FEE_WORKAROUND: true, + ADDITIONAL_OWNERS: 'test:unittest/data/owners_for_testing.json', + ADDITIONAL_RULES: 'test:unittest/data/rules_for_testing.json' + }, +]; + +// Server configurations +const server1 = 'http://localhost:' + String(8081 + Number(ENV_VARIABLES[0].ACCOUNT_INDEX)) +const server2 = 'http://localhost:' + String(8081 + Number(ENV_VARIABLES[1].ACCOUNT_INDEX)) +const server3 = 'http://localhost:' + String(8081 + Number(ENV_VARIABLES[2].ACCOUNT_INDEX)) +const server4 = 'http://localhost:' + String(8081 + Number(ENV_VARIABLES[3].ACCOUNT_INDEX)) +const server5 = 'http://localhost:' + String(8081 + Number(ENV_VARIABLES[4].ACCOUNT_INDEX)) +const serverList = [server1, server2, server3, server4, server5]; + +const JSON_RPC_ENDPOINT = '/json-rpc'; +const JSON_RPC_GET_RECENT_BLOCK = 'ain_getRecentBlock'; + +class Process { + constructor(application, envVariables) { + this.application = application; + this.envVariables = envVariables; + this.proc = null; + } + + start(stdioInherit = false) { + if (this.proc) { + throw Error('Process already started'); + } + const options = { + cwd: process.cwd(), + env: { + PATH: process.env.PATH, + ...this.envVariables, + }, + } + if (stdioInherit) { + options.stdio = 'inherit'; + } + this.proc = spawn('node', [this.application], options).on('error', (err) => { + console.error( + `Failed to start server${this.application} with ${this.envVariables} with error: ` + + err.message); + }); + } + + kill() { + this.proc.kill(); + this.proc = null; + } +} + +const SERVER_PROCS = []; +for (let i = 0; i < ENV_VARIABLES.length; i++) { + SERVER_PROCS.push(new Process(APP_SERVER, ENV_VARIABLES[i])); +} + +describe('Consensus', () => { + let trackerProc; + let jsonRpcClient; + let server4Addr; + let server5Addr; + const nodeAddressList = []; + + before(async () => { + rimraf.sync(CHAINS_DIR); + + const promises = []; + // Start up all servers + trackerProc = new Process(TRACKER_SERVER, { CONSOLE_LOG: false }); + trackerProc.start(true); + await CommonUtil.sleep(2000); + for (let i = 0; i < SERVER_PROCS.length; i++) { + const proc = SERVER_PROCS[i]; + proc.start(true); + await CommonUtil.sleep(2000); + const address = + parseOrLog(syncRequest('GET', serverList[i] + '/get_address').body.toString('utf-8')).result; + nodeAddressList.push(address); + }; + jsonRpcClient = jayson.client.http(server2 + JSON_RPC_ENDPOINT); + promises.push(new Promise((resolve) => { + jsonRpcClient.request(JSON_RPC_GET_RECENT_BLOCK, + {protoVer: CURRENT_PROTOCOL_VERSION}, function(err, response) { + if (err) { + resolve(); + throw err; + } + numBlocksOnStartup = response.result.result ? response.result.result.number : 0; + resolve(); + }); + })); + await Promise.all(promises); + + server1Addr = parseOrLog(syncRequest( + 'GET', server1 + '/get_address').body.toString('utf-8')).result; + server2Addr = parseOrLog(syncRequest( + 'GET', server2 + '/get_address').body.toString('utf-8')).result; + server3Addr = parseOrLog(syncRequest( + 'GET', server3 + '/get_address').body.toString('utf-8')).result; + server4Addr = parseOrLog(syncRequest( + 'GET', server4 + '/get_address').body.toString('utf-8')).result; + server5Addr = parseOrLog(syncRequest( + 'GET', server5 + '/get_address').body.toString('utf-8')).result; + }); + + after(() => { + // Teardown all servers + for (let i = 0; i < SERVER_PROCS.length; i++) { + SERVER_PROCS[i].kill(); + } + trackerProc.kill(); + + rimraf.sync(CHAINS_DIR); + }); + + describe('Validators', () => { + it('Number of validators cannot exceed MAX_NUM_VALIDATORS', async () => { + // 1. server4 stakes 100000 + const server4StakeRes = parseOrLog(syncRequest('POST', server4 + '/set_value', {json: { + ref: `/staking/consensus/${server4Addr}/0/stake/${Date.now()}/value`, + value: 100000, + nonce: -1 + }}).body.toString('utf-8')).result; + if (!(await waitUntilTxFinalized([server4], server4StakeRes.tx_hash))) { + console.error(`Failed to check finalization of server4's staking tx.`); + } + // 2. server4 added to validators & can vote + let iterCount = 0; + let lastBlock = getLastBlock(server1); + while (!lastBlock.validators[server4Addr]) { + if (iterCount >= MAX_ITERATION) { + console.log(`Iteration count exceeded its limit before server4 becomes a validator`); + assert.fail(`server4 is not included in validators`); + } + lastBlock = getLastBlock(server1); + iterCount++; + await CommonUtil.sleep(200); + } + assert.deepEqual(lastBlock.validators[server4Addr].producing_right, false); + await waitForNewBlocks(server1, 1); + const server4Voted = parseOrLog(syncRequest( + 'GET', + `${server1}/get_value?ref=/consensus/number/${lastBlock.number}/vote/${server4Addr}` + ).body.toString('utf-8')).result; + assert.deepEqual(server4Voted.stake, 100000); + // 3. server5 stakes 100000 + const server5StakeRes = parseOrLog(syncRequest('POST', server5 + '/set_value', {json: { + ref: `/staking/consensus/${server5Addr}/0/stake/${Date.now()}/value`, + value: 100000, + nonce: -1 + }}).body.toString('utf-8')).result; + if (!(await waitUntilTxFinalized([server4], server5StakeRes.tx_hash))) { + console.error(`Failed to check finalization of server5's staking tx.`); + } + // 4. server5 added to validators & server4 is evicted (server5's expireAt > server4's expireAt) + iterCount = 0; + lastBlock = getLastBlock(server1); + while (!lastBlock.validators[server5Addr]) { + if (iterCount >= MAX_ITERATION) { + console.log(`Iteration count exceeded its limit before server5 becomes a validator`); + assert.fail(`server5 is not included in validators`); + } + lastBlock = getLastBlock(server1); + assert.deepEqual(Object.keys(lastBlock.validators).length, MAX_NUM_VALIDATORS); + iterCount++; + await CommonUtil.sleep(200); + } + assert.deepEqual(lastBlock.validators[server5Addr].producing_right, false); + await waitForNewBlocks(server1, 1); + const votes = parseOrLog(syncRequest( + 'GET', + `${server1}/get_value?ref=/consensus/number/${lastBlock.number}/vote` + ).body.toString('utf-8')).result; + assert.deepEqual(votes[server4Addr], undefined); + assert.deepEqual(votes[server5Addr].stake, 100000); + }); + + it('When more than MAX_NUM_VALIDATORS validators exist, validatators with bigger stakes get prioritized', async () => { + // 1. server4 stakes 10 more AIN + const server4StakeRes = parseOrLog(syncRequest('POST', server4 + '/set_value', {json: { + ref: `/staking/consensus/${server4Addr}/0/stake/${Date.now()}/value`, + value: 10, + nonce: -1 + }}).body.toString('utf-8')).result; + if (!(await waitUntilTxFinalized([server4], server4StakeRes.tx_hash))) { + console.error(`Failed to check finalization of server4's staking tx.`); + } + // 2. server4 added to validators & server5 is evicted + let iterCount = 0; + let lastBlock = getLastBlock(server1); + while (!lastBlock.validators[server4Addr]) { + if (iterCount >= MAX_ITERATION) { + console.log(`Iteration count exceeded its limit before server4 becomes a validator`); + assert.fail(`server4 is not included in validators`); + } + lastBlock = getLastBlock(server1); + iterCount++; + await CommonUtil.sleep(200); + } + await waitForNewBlocks(server1, 1); + let votes = parseOrLog(syncRequest( + 'GET', + `${server1}/get_value?ref=/consensus/number/${lastBlock.number}/vote` + ).body.toString('utf-8')).result; + assert.deepEqual(votes[server5Addr], undefined); + assert.deepEqual(votes[server4Addr].stake, 100010); + // 3. server5 stakes 20 more AIN + const server5StakeRes = parseOrLog(syncRequest('POST', server5 + '/set_value', {json: { + ref: `/staking/consensus/${server5Addr}/0/stake/${Date.now()}/value`, + value: 20, + nonce: -1 + }}).body.toString('utf-8')).result; + if (!(await waitUntilTxFinalized([server4], server5StakeRes.tx_hash))) { + console.error(`Failed to check finalization of server5's staking tx.`); + } + // 4. server5 added to validators & server4 is evicted + iterCount = 0; + lastBlock = getLastBlock(server1); + while (!lastBlock.validators[server5Addr]) { + if (iterCount >= MAX_ITERATION) { + console.log(`Iteration count exceeded its limit before server5 becomes a validator`); + assert.fail(`server5 is not included in validators`); + } + lastBlock = getLastBlock(server1); + assert.deepEqual(Object.keys(lastBlock.validators).length, MAX_NUM_VALIDATORS); + iterCount++; + await CommonUtil.sleep(200); + } + await waitForNewBlocks(server1, 1); + votes = parseOrLog(syncRequest( + 'GET', + `${server1}/get_value?ref=/consensus/number/${lastBlock.number}/vote` + ).body.toString('utf-8')).result; + assert.deepEqual(votes[server4Addr], undefined); + assert.deepEqual(votes[server5Addr].stake, 100020); + }); + }) +}); \ No newline at end of file diff --git a/unittest/test-util.js b/unittest/test-util.js index 76ed66e42..a522ff73a 100644 --- a/unittest/test-util.js +++ b/unittest/test-util.js @@ -147,6 +147,10 @@ async function setUpApp(appName, serverList, appConfig) { } } +function getLastBlock(server) { + return parseOrLog(syncRequest('GET', server + '/last_block').body.toString('utf-8')).result; +} + module.exports = { readConfigFile, setNodeForTesting, @@ -157,4 +161,5 @@ module.exports = { waitUntilNodeSyncs, parseOrLog, setUpApp, + getLastBlock, }; From 8ff3b06aca6765f4762bdadde2581450dc92a7cd Mon Sep 17 00:00:00 2001 From: Lia Yoo Date: Mon, 12 Jul 2021 14:08:49 +0900 Subject: [PATCH 028/175] Add validators format checking logic in block.js. --- blockchain/block.js | 27 +++++++++++++++++++-------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/blockchain/block.js b/blockchain/block.js index 3c802f9e1..631004910 100644 --- a/blockchain/block.js +++ b/blockchain/block.js @@ -134,6 +134,20 @@ class Block { return true; } + static validateValidators(validators) { + if (!CommonUtil.isDict(validators)) return false; + for (const [address, info] of Object.entries(validators)) { + if (!CommonUtil.isCksumAddr(address)) { + return false; + } + if (!CommonUtil.isDict(info) || !CommonUtil.isNumber(info.stake) || + !CommonUtil.isBool(info.producing_right)) { + return false; + } + } + return true; + } + static validateProposedBlock(block) { const LOG_HEADER = 'validateProposedBlock'; @@ -157,14 +171,11 @@ class Block { } nonceTracker[tx.address] = tx.tx_body.nonce; } - - for (const validator of block.validators) { - if (!Block.validValidatorObject(validator)) { - logger.error( - `[${LOG_HEADER}] Invalid validators format: ${JSON.stringify(block.validators)} ` + - `(${block.number} / ${block.epoch})`); - return false; - } + if (!Block.validateValidators(block.validators)) { + logger.error( + `[${LOG_HEADER}] Invalid validators format: ${JSON.stringify(block.validators)} ` + + `(${block.number} / ${block.epoch})`); + return false; } logger.info(`[${LOG_HEADER}] Validated block: ${block.number} / ${block.epoch}`); From 47c9f668e78b0ff0dfd23fab64e1c60f3b57851e Mon Sep 17 00:00:00 2001 From: Lia Yoo Date: Mon, 12 Jul 2021 15:26:33 +0900 Subject: [PATCH 029/175] Fix consensus unit test. --- unittest/consensus.test.js | 42 ++++++++++++++++++++++++++++++-------- 1 file changed, 33 insertions(+), 9 deletions(-) diff --git a/unittest/consensus.test.js b/unittest/consensus.test.js index aae15be32..d2ca33dde 100644 --- a/unittest/consensus.test.js +++ b/unittest/consensus.test.js @@ -1,7 +1,12 @@ const chai = require('chai'); const expect = chai.expect; const rimraf = require('rimraf'); -const { CHAINS_DIR, MIN_STAKE_PER_VALIDATOR, MAX_STAKE_PER_VALIDATOR } = require('../common/constants'); +const { + CHAINS_DIR, + MIN_STAKE_PER_VALIDATOR, + MAX_STAKE_PER_VALIDATOR, + PredefinedDbPaths, +} = require('../common/constants'); const BlockchainNode = require('../node'); const { setNodeForTesting, getTransaction, addBlock } = require('./test-util') @@ -108,13 +113,20 @@ describe("Consensus", () => { it('Whitelisted validators must stake within MIN_STAKE_PER_VALIDATOR & MAX_STAKE_PER_VALIDATOR to have the producing rights', () => { let lastBlock = node1.bc.lastBlock(); const addr = node2.account.address; + // Bypass whitelist rule check (need owner's private key) + const tempDb = node1.createTempDb(node1.db.stateVersion, 'CONSENSUS_UNIT_TEST', lastBlock.number); + tempDb.writeDatabase( + [PredefinedDbPaths.VALUES_ROOT, PredefinedDbPaths.CONSENSUS, PredefinedDbPaths.WHITELIST, addr], + true); + node1.cloneAndFinalizeVersion(tempDb.stateVersion, -1); // Bypass already existing final state version // Staking less than MIN_STAKE_PER_VALIDATOR + let stakeAmount = MIN_STAKE_PER_VALIDATOR - 1; const stakeLessThanMin = getTransaction(node2, { operation: { type: 'SET_VALUE', ref: `/staking/consensus/${addr}/0/stake/key1/value`, - value: MIN_STAKE_PER_VALIDATOR - 1 + value: stakeAmount }, nonce: -1, gas_price: 1 @@ -124,9 +136,13 @@ describe("Consensus", () => { lastBlock = node1.bc.lastBlock(); const voteWithStakeLessThanMin = getTransaction(node2, { operation: { - type: 'SET_VALUE', - ref: `/consensus/number/${lastBlock.number}/vote/${addr}`, - value: { block_hash: lastBlock.hash, stake: 100000 } + type: 'SET_VALUE', + ref: `/consensus/number/${lastBlock.number + 1}/propose/${addr}`, + value: { + number: lastBlock.number + 1, + proposer: addr, + gas_cost_total: 0 + } }, nonce: -1, gas_price: 1 @@ -150,8 +166,12 @@ describe("Consensus", () => { const voteWithStakeEqualMin = getTransaction(node2, { operation: { type: 'SET_VALUE', - ref: `/consensus/number/${lastBlock.number}/vote/${addr}`, - value: { block_hash: lastBlock.hash, stake: 100000 } + ref: `/consensus/number/${lastBlock.number + 1}/propose/${addr}`, + value: { + number: lastBlock.number + 1, + proposer: addr, + gas_cost_total: 0 + } }, nonce: -1, gas_price: 1 @@ -175,8 +195,12 @@ describe("Consensus", () => { const voteWithStakeMoreThanMax = getTransaction(node2, { operation: { type: 'SET_VALUE', - ref: `/consensus/number/${lastBlock.number}/vote/${addr}`, - value: { block_hash: lastBlock.hash, stake: 100000 } + ref: `/consensus/number/${lastBlock.number + 1}/propose/${addr}`, + value: { + number: lastBlock.number + 1, + proposer: addr, + gas_cost_total: 0 + } }, nonce: -1, gas_price: 1 From 37e4d9e9951f6ecc8fc2e574271b60df0d4be109 Mon Sep 17 00:00:00 2001 From: Dongil Seo Date: Mon, 12 Jul 2021 16:36:44 +0900 Subject: [PATCH 030/175] Add deploy_node() function to deploy to node one by one. --- deploy_blockchain_incremental_gcp.sh | 193 ++++++++++----------------- start_node_incremental_gcp.sh | 4 +- 2 files changed, 72 insertions(+), 125 deletions(-) diff --git a/deploy_blockchain_incremental_gcp.sh b/deploy_blockchain_incremental_gcp.sh index 08ea185fb..0daa6265e 100644 --- a/deploy_blockchain_incremental_gcp.sh +++ b/deploy_blockchain_incremental_gcp.sh @@ -56,142 +56,89 @@ fi FILES_FOR_TRACKER="blockchain/ client/ common/ consensus/ db/ genesis-configs/ logger/ tracker-server/ package.json setup_tracker_gcp.sh setup_blockchain_ubuntu.sh start_tracker_gcp.sh" FILES_FOR_NODE="blockchain/ client/ common/ consensus/ db/ json_rpc/ genesis-configs/ logger/ node/ tx-pool/ p2p/ package.json setup_blockchain_ubuntu.sh start_node_incremental_gcp.sh" -TRACKER_TARGET_ADDR="${GCP_USER}@${SEASON}-tracker-taiwan" -NODE_0_TARGET_ADDR="${GCP_USER}@${SEASON}-node-0-taiwan" -NODE_1_TARGET_ADDR="${GCP_USER}@${SEASON}-node-1-oregon" -NODE_2_TARGET_ADDR="${GCP_USER}@${SEASON}-node-2-singapore" -NODE_3_TARGET_ADDR="${GCP_USER}@${SEASON}-node-3-iowa" -NODE_4_TARGET_ADDR="${GCP_USER}@${SEASON}-node-4-netherlands" - TRACKER_ZONE="asia-east1-b" -NODE_0_ZONE="asia-east1-b" -NODE_1_ZONE="us-west1-b" -NODE_2_ZONE="asia-southeast1-b" -NODE_3_ZONE="us-central1-a" -NODE_4_ZONE="europe-west4-a" - -# 1. Copy files to gcp -printf "\nDeploying parent blockchain...\n" -if [[ $RUN_MODE = "canary" ]]; then - printf "\nCopying files to parent node 0 (${NODE_0_TARGET_ADDR})...\n\n" - gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_0_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_0_ZONE -else - printf "\nCopying files to parent tracker (${TRACKER_TARGET_ADDR})...\n\n" - gcloud compute scp --recurse $FILES_FOR_TRACKER ${TRACKER_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $TRACKER_ZONE - printf "\nCopying files to parent node 0 (${NODE_0_TARGET_ADDR})...\n\n" - gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_0_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_0_ZONE - printf "\nCopying files to parent node 1 (${NODE_1_TARGET_ADDR})...\n\n" - gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_1_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_1_ZONE - printf "\nCopying files to parent node 2 (${NODE_2_TARGET_ADDR})...\n\n" - gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_2_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_2_ZONE - printf "\nCopying files to parent node 3 (${NODE_3_TARGET_ADDR})...\n\n" - gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_3_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_3_ZONE - printf "\nCopying files to parent node 4 (${NODE_4_TARGET_ADDR})...\n\n" - gcloud compute scp --recurse $FILES_FOR_NODE ${NODE_4_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_4_ZONE -fi - -# ssh into each instance, set up the ubuntu VM instance (ONLY NEEDED FOR THE FIRST TIME) -if [[ $OPTIONS = "--setup" ]]; then - if [[ $RUN_MODE = "canary" ]]; then - printf "\n\n##########################\n# Setting up parent node 0 #\n##########################\n\n" - gcloud compute ssh $NODE_0_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_0_ZONE - else - printf "\n\n##########################\n# Setting up parent tracker #\n###########################\n\n" - gcloud compute ssh $TRACKER_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $TRACKER_ZONE - printf "\n\n##########################\n# Setting up parent node 0 #\n##########################\n\n" - gcloud compute ssh $NODE_0_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_0_ZONE - printf "\n\n##########################\n# Setting up parent node 1 #\n##########################\n\n" - gcloud compute ssh $NODE_1_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_1_ZONE - printf "\n\n##########################\n# Setting up parent node 2 #\n##########################\n\n" - gcloud compute ssh $NODE_2_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_2_ZONE - printf "\n\n##########################\n# Setting up parent node 3 #\n##########################\n\n" - gcloud compute ssh $NODE_3_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_3_ZONE - printf "\n\n##########################\n# Setting up parent node 4 #\n##########################\n\n" - gcloud compute ssh $NODE_4_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_4_ZONE +NODE_ZONE_LIST=( + "asia-east1-b" \ + "us-west1-b" \ + "asia-southeast1-b" \ + "us-central1-a" \ + "europe-west4-a") + +NUM_PARENT_NODES=5 +NUM_SHARD_NODES=3 + +function deploy_node() { + local node_index="$1" + local node_target_addr=${NODE_TARGET_ADDR_LIST[${node_index}]} + local node_zone=${NODE_ZONE_LIST[${node_index}]} + + printf "//////////////////////////\n" + printf "/ Deploying node $node_index /\n" + printf "//////////////////////////\n\n" + + printf "node_target_addr='$node_target_addr'\n" + printf "node_zone='$node_zone'\n" + + # 1. Copy files to gcp + printf "\n\n[[[[ Copying files for node $node_index ]]]]\n\n" + SCP_CMD="gcloud compute scp --recurse $FILES_FOR_NODE ${node_target_addr}:~/ --project $PROJECT_ID --zone $node_zone" + printf "SCP_CMD='$SCP_CMD'\n\n" + eval $SCP_CMD + + # ssh into each instance, set up the ubuntu VM instance (ONLY NEEDED FOR THE FIRST TIME) + if [[ $OPTIONS = "--setup" ]]; then + printf "\n\n[[[[ Setting up node $node_index ]]]]\n\n" + SETUP_CMD="gcloud compute ssh $node_target_addr --command '. setup_blockchain_ubuntu.sh' --project $PROJECT_ID --zone $node_zone" + printf "SETUP_CMD='$SETUP_CMD'\n\n" + eval $SETUP_CMD fi -fi -# 2. Set up parent chain + # 2. Start node + printf "\n\n[[[[ Starting node $node_index ]]]]\n\n" + START_CMD="gcloud compute ssh $node_target_addr --command '. start_node_incremental_gcp.sh $SEASON 0 $node_index $SYNC_MODE' --project $PROJECT_ID --zone $node_zone" + printf "START_CMD='$START_CMD'\n\n" + eval $START_CMD +} + +printf "#################################\n" +printf "# Deploying parent blockchain #\n" +printf "########################################################################################\n\n" + +NODE_TARGET_ADDR_LIST=( + "${GCP_USER}@${SEASON}-node-0-taiwan" \ + "${GCP_USER}@${SEASON}-node-1-oregon" \ + "${GCP_USER}@${SEASON}-node-2-singapore" \ + "${GCP_USER}@${SEASON}-node-3-iowa" \ + "${GCP_USER}@${SEASON}-node-4-netherlands") + if [[ $RUN_MODE = "canary" ]]; then - printf "\n\n###########################\n# Running parent node 0 #\n###########################\n\n" - gcloud compute ssh $NODE_0_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 0 $SYNC_MODE" --project $PROJECT_ID --zone $NODE_0_ZONE + deploy_node "0" else - printf "\n\n############################\n# Running parent tracker #\n############################\n\n" - gcloud compute ssh $TRACKER_TARGET_ADDR --command ". setup_tracker_gcp.sh && . start_tracker_gcp.sh" --project $PROJECT_ID --zone $TRACKER_ZONE - printf "\n\n###########################\n# Running parent node 0 #\n###########################\n\n" - gcloud compute ssh $NODE_0_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 0 $SYNC_MODE" --project $PROJECT_ID --zone $NODE_0_ZONE - printf "\n\n#########################\n# Running parent node 1 #\n#########################\n\n" - gcloud compute ssh $NODE_1_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 1 $SYNC_MODE" --project $PROJECT_ID --zone $NODE_1_ZONE - printf "\n\n#########################\n# Running parent node 2 #\n#########################\n\n" - gcloud compute ssh $NODE_2_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 2 $SYNC_MODE" --project $PROJECT_ID --zone $NODE_2_ZONE - printf "\n\n#########################\n# Running parent node 3 #\n#########################\n\n" - gcloud compute ssh $NODE_3_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 3 $SYNC_MODE" --project $PROJECT_ID --zone $NODE_3_ZONE - printf "\n\n#########################\n# Running parent node 4 #\n#########################\n\n" - gcloud compute ssh $NODE_4_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON 0 4 $SYNC_MODE" --project $PROJECT_ID --zone $NODE_4_ZONE + for j in `seq 0 $(( ${NUM_PARENT_NODES} - 1 ))` + do + deploy_node "$j" + done fi -# 3. Shards if [[ "$NUM_SHARDS" -gt 0 ]]; then - printf "\nDeploying shard blockchains...\n\n" for i in $(seq $NUM_SHARDS) do - printf "\nShard #$i\n\n" - - # generate genesis config files in ./blockchain/shard_$i - if [[ $OPTIONS = "--setup" ]]; then - node ./tools/generateShardGenesisFiles.js $SEASON 10 $i - fi - - SHARD_TRACKER_TARGET_ADDR="${GCP_USER}@${SEASON}-shard-${i}-tracker-taiwan" - SHARD_NODE_0_TARGET_ADDR="${GCP_USER}@${SEASON}-shard-${i}-node-0-taiwan" - SHARD_NODE_1_TARGET_ADDR="${GCP_USER}@${SEASON}-shard-${i}-node-1-oregon" - SHARD_NODE_2_TARGET_ADDR="${GCP_USER}@${SEASON}-shard-${i}-node-2-singapore" - - # deploy files to GCP instances - if [[ $RUN_MODE = "canary" ]]; then - printf "\nDeploying files to shard_$i node 0 (${SHARD_NODE_0_TARGET_ADDR})...\n\n" - gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_0_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_0_ZONE - else - printf "\nDeploying files to shard_$i tracker (${SHARD_TRACKER_TARGET_ADDR})...\n\n" - gcloud compute scp --recurse $FILES_FOR_TRACKER ${SHARD_TRACKER_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $TRACKER_ZONE - printf "\nDeploying files to shard_$i node 0 (${SHARD_NODE_0_TARGET_ADDR})...\n\n" - gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_0_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_0_ZONE - printf "\nDeploying files to shard_$i node 1 (${SHARD_NODE_1_TARGET_ADDR})...\n\n" - gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_1_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_1_ZONE - printf "\nDeploying files to shard_$i node 2 (${SHARD_NODE_2_TARGET_ADDR})...\n\n" - gcloud compute scp --recurse $FILES_FOR_NODE ${SHARD_NODE_2_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $NODE_2_ZONE - fi + printf "###################################\n" + printf "# Deploying shard $i blockchain #\n" + printf "########################################################################################\n\n" - # ssh into each instance, set up the ubuntu VM instance (ONLY NEEDED FOR THE FIRST TIME) - if [[ $OPTIONS = "--setup" ]]; then - if [[ $RUN_MODE = "canary" ]]; then - printf "\n\n##########################\n# Setting up shard_$i node 0 #\n##########################\n\n" - gcloud compute ssh $SHARD_NODE_0_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_0_ZONE - else - printf "\n\n###########################\n# Setting up shard_$i tracker #\n###########################\n\n" - gcloud compute ssh $SHARD_TRACKER_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $TRACKER_ZONE - printf "\n\n##########################\n# Setting up shard_$i node 0 #\n##########################\n\n" - gcloud compute ssh $SHARD_NODE_0_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_0_ZONE - printf "\n\n##########################\n# Setting up shard_$i node 1 #\n##########################\n\n" - gcloud compute ssh $SHARD_NODE_1_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_1_ZONE - printf "\n\n##########################\n# Setting up shard_$i node 2 #\n##########################\n\n" - gcloud compute ssh $SHARD_NODE_2_TARGET_ADDR --command ". setup_blockchain_ubuntu.sh" --project $PROJECT_ID --zone $NODE_2_ZONE - fi - fi + NODE_TARGET_ADDR_LIST=( \ + "${GCP_USER}@${SEASON}-shard-${i}-node-0-taiwan" \ + "${GCP_USER}@${SEASON}-shard-${i}-node-1-oregon" \ + "${GCP_USER}@${SEASON}-shard-${i}-node-2-singapore") - # ssh into each instance, install packages and start up the server if [[ $RUN_MODE = "canary" ]]; then - printf "\n\n##########################\n# Running shard_$i node 0 #\n##########################\n\n" - gcloud compute ssh $SHARD_NODE_0_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON $i 0 $SYNC_MODE" --project $PROJECT_ID --zone $NODE_0_ZONE + deploy_node "0" else - printf "\n\n###########################\n# Running shard_$i tracker #\n###########################\n\n" - gcloud compute ssh $SHARD_TRACKER_TARGET_ADDR --command ". setup_tracker_gcp.sh && . start_tracker_gcp.sh" --project $PROJECT_ID --zone $TRACKER_ZONE - printf "\n\n##########################\n# Running shard_$i node 0 #\n##########################\n\n" - gcloud compute ssh $SHARD_NODE_0_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON $i 0 $SYNC_MODE" --project $PROJECT_ID --zone $NODE_0_ZONE - printf "\n\n##########################\n# Running shard_$i node 1 #\n##########################\n\n" - gcloud compute ssh $SHARD_NODE_1_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON $i 1 $SYNC_MODE" --project $PROJECT_ID --zone $NODE_1_ZONE - printf "\n\n##########################\n# Running shard_$i node 2 #\n##########################\n\n" - gcloud compute ssh $SHARD_NODE_2_TARGET_ADDR --command ". start_node_incremental_gcp.sh $SEASON $i 2 $SYNC_MODE" --project $PROJECT_ID --zone $NODE_2_ZONE + for j in `seq 0 $(( ${NUM_SHARD_NODES} - 1 ))` + do + deploy_node "$j" + done fi - done + done fi diff --git a/start_node_incremental_gcp.sh b/start_node_incremental_gcp.sh index f57ba40e7..9a93d7897 100644 --- a/start_node_incremental_gcp.sh +++ b/start_node_incremental_gcp.sh @@ -137,14 +137,14 @@ cd $NEW_DIR_PATH npm install # 5. Kill old node process -printf "\n#### [Step 5] Kill old node process ####\n\n" +printf "\n#### [Step 5] Kill old node server ####\n\n" KILL_CMD="sudo killall node" printf "KILL_CMD='$KILL_CMD'\n\n" eval $KILL_CMD # 6. Start a new node process -printf "\n#### [Step 6] Start a new node process ####\n\n" +printf "\n#### [Step 6] Start new node server ####\n\n" sleep 10 MAX_OLD_SPACE_SIZE_MB=4000 From 3a8b9bc693f67baa938e8d0a7bffb3873c58c6f8 Mon Sep 17 00:00:00 2001 From: Dongil Seo Date: Mon, 12 Jul 2021 17:02:07 +0900 Subject: [PATCH 031/175] Add deploy_tracker() function. --- deploy_blockchain_incremental_gcp.sh | 51 +++++++++++++++++++++++----- 1 file changed, 42 insertions(+), 9 deletions(-) diff --git a/deploy_blockchain_incremental_gcp.sh b/deploy_blockchain_incremental_gcp.sh index 0daa6265e..d907a7282 100644 --- a/deploy_blockchain_incremental_gcp.sh +++ b/deploy_blockchain_incremental_gcp.sh @@ -56,6 +56,9 @@ fi FILES_FOR_TRACKER="blockchain/ client/ common/ consensus/ db/ genesis-configs/ logger/ tracker-server/ package.json setup_tracker_gcp.sh setup_blockchain_ubuntu.sh start_tracker_gcp.sh" FILES_FOR_NODE="blockchain/ client/ common/ consensus/ db/ json_rpc/ genesis-configs/ logger/ node/ tx-pool/ p2p/ package.json setup_blockchain_ubuntu.sh start_node_incremental_gcp.sh" +NUM_PARENT_NODES=5 +NUM_SHARD_NODES=3 + TRACKER_ZONE="asia-east1-b" NODE_ZONE_LIST=( "asia-east1-b" \ @@ -64,17 +67,43 @@ NODE_ZONE_LIST=( "us-central1-a" \ "europe-west4-a") -NUM_PARENT_NODES=5 -NUM_SHARD_NODES=3 +function deploy_tracker() { + printf "*******************************************************************************\n" + printf "* Deploying tracker *\n" + printf "*******************************************************************************\n\n" + + printf "TRACKER_TARGET_ADDR='$TRACKER_TARGET_ADDR'\n" + printf "TRACKER_ZONE='$TRACKER_ZONE'\n" + + # 1. Copy files to gcp + printf "\n\n[[[[ Copying files for tracker ]]]]\n\n" + SCP_CMD="gcloud compute scp --recurse $FILES_FOR_TRACKER ${TRACKER_TARGET_ADDR}:~/ --project $PROJECT_ID --zone $TRACKER_ZONE" + printf "SCP_CMD='$SCP_CMD'\n\n" + eval $SCP_CMD + + # ssh into each instance, set up the ubuntu VM instance (ONLY NEEDED FOR THE FIRST TIME) + if [[ $OPTIONS = "--setup" ]]; then + printf "\n\n[[[[ Setting up tracker ]]]]\n\n" + SETUP_CMD="gcloud compute ssh $TRACKER_TARGET_ADDR --command '. setup_blockchain_ubuntu.sh' --project $PROJECT_ID --zone $TRACKER_ZONE" + printf "SETUP_CMD='$SETUP_CMD'\n\n" + eval $SETUP_CMD + fi + + # 2. Start tracker + printf "\n\n[[[[ Starting tracker ]]]]\n\n" + START_CMD="gcloud compute ssh $TRACKER_TARGET_ADDR --command '. setup_tracker_gcp.sh && . start_tracker_gcp.sh' --project $PROJECT_ID --zone $TRACKER_ZONE" + printf "START_CMD='$START_CMD'\n\n" + eval $START_CMD +} function deploy_node() { local node_index="$1" local node_target_addr=${NODE_TARGET_ADDR_LIST[${node_index}]} local node_zone=${NODE_ZONE_LIST[${node_index}]} - printf "//////////////////////////\n" - printf "/ Deploying node $node_index /\n" - printf "//////////////////////////\n\n" + printf "*******************************************************************************\n" + printf "* Deploying node $node_index *\n" + printf "*******************************************************************************\n\n" printf "node_target_addr='$node_target_addr'\n" printf "node_zone='$node_zone'\n" @@ -100,10 +129,11 @@ function deploy_node() { eval $START_CMD } -printf "#################################\n" +printf "###############################################################################\n" printf "# Deploying parent blockchain #\n" -printf "########################################################################################\n\n" +printf "###############################################################################\n\n" +TRACKER_TARGET_ADDR="${GCP_USER}@${SEASON}-tracker-taiwan" NODE_TARGET_ADDR_LIST=( "${GCP_USER}@${SEASON}-node-0-taiwan" \ "${GCP_USER}@${SEASON}-node-1-oregon" \ @@ -114,6 +144,7 @@ NODE_TARGET_ADDR_LIST=( if [[ $RUN_MODE = "canary" ]]; then deploy_node "0" else + deploy_tracker for j in `seq 0 $(( ${NUM_PARENT_NODES} - 1 ))` do deploy_node "$j" @@ -123,10 +154,11 @@ fi if [[ "$NUM_SHARDS" -gt 0 ]]; then for i in $(seq $NUM_SHARDS) do - printf "###################################\n" + printf "###############################################################################\n" printf "# Deploying shard $i blockchain #\n" - printf "########################################################################################\n\n" + printf "###############################################################################\n\n" + TRACKER_TARGET_ADDR="${GCP_USER}@${SEASON}-shard-${i}-tracker-taiwan" NODE_TARGET_ADDR_LIST=( \ "${GCP_USER}@${SEASON}-shard-${i}-node-0-taiwan" \ "${GCP_USER}@${SEASON}-shard-${i}-node-1-oregon" \ @@ -135,6 +167,7 @@ if [[ "$NUM_SHARDS" -gt 0 ]]; then if [[ $RUN_MODE = "canary" ]]; then deploy_node "0" else + deploy_tracker for j in `seq 0 $(( ${NUM_SHARD_NODES} - 1 ))` do deploy_node "$j" From b406fd62ebf8cf8c68fb60c7232a53bf02cb4d81 Mon Sep 17 00:00:00 2001 From: Dongil Seo Date: Mon, 12 Jul 2021 19:30:16 +0900 Subject: [PATCH 032/175] Deploy tracker incrementally with start_tracker_incremental_gcp.sh. --- deploy_blockchain_incremental_gcp.sh | 18 +++--- start_node_incremental_gcp.sh | 14 +++-- start_tracker_incremental_gcp.sh | 85 ++++++++++++++++++++++++++++ 3 files changed, 101 insertions(+), 16 deletions(-) create mode 100644 start_tracker_incremental_gcp.sh diff --git a/deploy_blockchain_incremental_gcp.sh b/deploy_blockchain_incremental_gcp.sh index d907a7282..37668edf1 100644 --- a/deploy_blockchain_incremental_gcp.sh +++ b/deploy_blockchain_incremental_gcp.sh @@ -53,7 +53,7 @@ if [[ ! $REPLY =~ ^[Yy]$ ]]; then [[ "$0" = "$BASH_SOURCE" ]] && exit 1 || return 1 # handle exits from shell or function but don't exit interactive shell fi -FILES_FOR_TRACKER="blockchain/ client/ common/ consensus/ db/ genesis-configs/ logger/ tracker-server/ package.json setup_tracker_gcp.sh setup_blockchain_ubuntu.sh start_tracker_gcp.sh" +FILES_FOR_TRACKER="blockchain/ client/ common/ consensus/ db/ genesis-configs/ logger/ tracker-server/ package.json setup_blockchain_ubuntu.sh start_tracker_incremental_gcp.sh" FILES_FOR_NODE="blockchain/ client/ common/ consensus/ db/ json_rpc/ genesis-configs/ logger/ node/ tx-pool/ p2p/ package.json setup_blockchain_ubuntu.sh start_node_incremental_gcp.sh" NUM_PARENT_NODES=5 @@ -68,9 +68,9 @@ NODE_ZONE_LIST=( "europe-west4-a") function deploy_tracker() { - printf "*******************************************************************************\n" - printf "* Deploying tracker *\n" - printf "*******************************************************************************\n\n" + local num_nodes="$1" + + printf "\n* >> Deploying tracker ********************************************************\n\n" printf "TRACKER_TARGET_ADDR='$TRACKER_TARGET_ADDR'\n" printf "TRACKER_ZONE='$TRACKER_ZONE'\n" @@ -91,7 +91,7 @@ function deploy_tracker() { # 2. Start tracker printf "\n\n[[[[ Starting tracker ]]]]\n\n" - START_CMD="gcloud compute ssh $TRACKER_TARGET_ADDR --command '. setup_tracker_gcp.sh && . start_tracker_gcp.sh' --project $PROJECT_ID --zone $TRACKER_ZONE" + START_CMD="gcloud compute ssh $TRACKER_TARGET_ADDR --command '. start_tracker_incremental_gcp.sh $num_nodes' --project $PROJECT_ID --zone $TRACKER_ZONE" printf "START_CMD='$START_CMD'\n\n" eval $START_CMD } @@ -101,9 +101,7 @@ function deploy_node() { local node_target_addr=${NODE_TARGET_ADDR_LIST[${node_index}]} local node_zone=${NODE_ZONE_LIST[${node_index}]} - printf "*******************************************************************************\n" - printf "* Deploying node $node_index *\n" - printf "*******************************************************************************\n\n" + printf "\n* >> Deploying node $node_index *********************************************************\n\n" printf "node_target_addr='$node_target_addr'\n" printf "node_zone='$node_zone'\n" @@ -144,7 +142,7 @@ NODE_TARGET_ADDR_LIST=( if [[ $RUN_MODE = "canary" ]]; then deploy_node "0" else - deploy_tracker + deploy_tracker "$NUM_PARENT_NODES" for j in `seq 0 $(( ${NUM_PARENT_NODES} - 1 ))` do deploy_node "$j" @@ -167,7 +165,7 @@ if [[ "$NUM_SHARDS" -gt 0 ]]; then if [[ $RUN_MODE = "canary" ]]; then deploy_node "0" else - deploy_tracker + deploy_tracker "$NUM_SHARD_NODES" for j in `seq 0 $(( ${NUM_SHARD_NODES} - 1 ))` do deploy_node "$j" diff --git a/start_node_incremental_gcp.sh b/start_node_incremental_gcp.sh index 9a93d7897..e4b1944cb 100644 --- a/start_node_incremental_gcp.sh +++ b/start_node_incremental_gcp.sh @@ -136,14 +136,14 @@ printf "\n#### [Step 4] Install dependencies ####\n\n" cd $NEW_DIR_PATH npm install -# 5. Kill old node process +# 5. Kill old node server printf "\n#### [Step 5] Kill old node server ####\n\n" KILL_CMD="sudo killall node" printf "KILL_CMD='$KILL_CMD'\n\n" eval $KILL_CMD -# 6. Start a new node process +# 6. Start a new node server printf "\n#### [Step 6] Start new node server ####\n\n" sleep 10 @@ -153,8 +153,8 @@ START_CMD="nohup node --async-stack-traces --max-old-space-size=$MAX_OLD_SPACE_S printf "START_CMD='$START_CMD'\n" eval $START_CMD -# 7. Wait until the new node process catches up -printf "\n#### [Step 7] Wait until the new node process catches up ####\n\n" +# 7. Wait until the new node server catches up +printf "\n#### [Step 7] Wait until the new node server catches up ####\n\n" SECONDS=0 loopCount=0 @@ -162,7 +162,7 @@ loopCount=0 generate_post_data() { cat <\n" + printf "Example: sh start_tracker_incremental_gcp.sh 5\n" + exit +fi + +# 1. Configure env vars +printf "\n#### [Step 1] Configure env vars ####\n\n" + +NUM_NODES="$1" + +date=$(date '+%Y-%m-%dT%H:%M') +printf "date=$date\n" +NEW_DIR_PATH="../ain-blockchain-$date" +printf "NEW_DIR_PATH=$NEW_DIR_PATH\n" + +# 2. Get currently used directory +printf "\n#### [Step 2] Get currently used directory ####\n\n" + +OLD_DIR_PATH=$(find ../ain-blockchain* -maxdepth 0 -type d) +printf "OLD_DIR_PATH=$OLD_DIR_PATH\n" + +# 3. Create a new directory +printf "\n#### [Step 3] Create a new directory ####\n\n" + +MKDIR_CMD="sudo mkdir $NEW_DIR_PATH" +printf "MKDIR_CMD=$MKDIR_CMD\n" +eval $MKDIR_CMD + +sudo chmod 777 $NEW_DIR_PATH +mv * $NEW_DIR_PATH + +# 4. Install dependencies +printf "\n#### [Step 4] Install dependencies ####\n\n" + +cd $NEW_DIR_PATH +npm install + +# 5. Kill old tracker server +printf "\n#### [Step 5] Kill old tracker server ####\n\n" + +KILL_CMD="sudo killall node" +printf "KILL_CMD='$KILL_CMD'\n\n" +eval $KILL_CMD + +# 6. Start new tracker server +printf "\n#### [Step 6] Start new tracker server ####\n\n" + +sleep 10 +export CONSOLE_LOG=false + +START_CMD="nohup node --async-stack-traces tracker-server/index.js >/dev/null 2>error_logs.txt &" +printf "START_CMD='$START_CMD'\n" +eval $START_CMD + +# 7. Wait until the new tracker server catches up +printf "\n#### [Step 7] Wait until the new tracker server catches up ####\n\n" + +SECONDS=0 +loopCount=0 + +while : +do + numAliveNodes=$(curl -m 20 -X GET -H "Content-Type: application/json" "http://localhost:8080/network_status" | jq -r '.numAliveNodes') + printf "\nnumAliveNodes = ${numAliveNodes}\n" + if [[ "$numAliveNodes" = "$NUM_NODES" ]]; then + printf "\nBlockchain Tracker server is running!\n" + printf "\nTime it took to sync in seconds: $SECONDS\n" + break + fi + ((loopCount++)) + printf "\nLoop count: ${loopCount}\n" + sleep 20 +done + +# 8. Remove old directory keeping the chain data +printf "\n#### [Step 8] Remove old directory keeping the chain data ####\n\n" + +RM_CMD="sudo rm -rf $OLD_DIR_PATH" +printf "RM_CMD='$RM_CMD'\n" +eval $RM_CMD + +printf "\n* << Tracker server successfully deployed! ************************************\n\n" From 74f46002d3037bd28e292443419e75610c2baf28 Mon Sep 17 00:00:00 2001 From: Dongil Seo Date: Mon, 12 Jul 2021 20:52:42 +0900 Subject: [PATCH 033/175] Merge setup stage to start stage. --- deploy_blockchain_gcp.sh | 44 ++++++++++++++++++++-------------------- setup_node_gcp.sh | 17 ---------------- setup_tracker_gcp.sh | 20 ------------------ start_node_gcp.sh | 19 +++++++++++++++++ start_tracker_gcp.sh | 19 +++++++++++++++++ 5 files changed, 60 insertions(+), 59 deletions(-) delete mode 100644 setup_node_gcp.sh delete mode 100644 setup_tracker_gcp.sh diff --git a/deploy_blockchain_gcp.sh b/deploy_blockchain_gcp.sh index 02a55eb59..1040d83e4 100644 --- a/deploy_blockchain_gcp.sh +++ b/deploy_blockchain_gcp.sh @@ -39,8 +39,8 @@ then [[ "$0" = "$BASH_SOURCE" ]] && exit 1 || return 1 # handle exits from shell or function but don't exit interactive shell fi -FILES_FOR_TRACKER="blockchain/ client/ common/ consensus/ db/ genesis-configs/ logger/ tracker-server/ package.json setup_tracker_gcp.sh setup_blockchain_ubuntu.sh start_tracker_gcp.sh" -FILES_FOR_NODE="blockchain/ client/ common/ consensus/ db/ json_rpc/ genesis-configs/ logger/ node/ tx-pool/ p2p/ package.json setup_node_gcp.sh setup_blockchain_ubuntu.sh start_node_gcp.sh" +FILES_FOR_TRACKER="blockchain/ client/ common/ consensus/ db/ genesis-configs/ logger/ tracker-server/ package.json setup_blockchain_ubuntu.sh start_tracker_gcp.sh" +FILES_FOR_NODE="blockchain/ client/ common/ consensus/ db/ json_rpc/ genesis-configs/ logger/ node/ tx-pool/ p2p/ package.json setup_blockchain_ubuntu.sh start_node_gcp.sh" TRACKER_TARGET_ADDR="${GCP_USER}@${SEASON}-tracker-taiwan" NODE_0_TARGET_ADDR="${GCP_USER}@${SEASON}-node-0-taiwan" @@ -113,18 +113,18 @@ if [[ $OPTIONS = "--setup" ]]; then fi # ssh into each instance, install packages and start up the server -printf "\n\n############################\n# Running parent tracker #\n############################\n\n" -gcloud compute ssh $TRACKER_TARGET_ADDR --command ". setup_tracker_gcp.sh && . start_tracker_gcp.sh" --project $PROJECT_ID --zone $TRACKER_ZONE -printf "\n\n###########################\n# Running parent node 0 #\n###########################\n\n" -gcloud compute ssh $NODE_0_TARGET_ADDR --command ". setup_node_gcp.sh && . start_node_gcp.sh $SEASON 0 0" --project $PROJECT_ID --zone $NODE_0_ZONE -printf "\n\n#########################\n# Running parent node 1 #\n#########################\n\n" -gcloud compute ssh $NODE_1_TARGET_ADDR --command ". setup_node_gcp.sh && . start_node_gcp.sh $SEASON 0 1" --project $PROJECT_ID --zone $NODE_1_ZONE -printf "\n\n#########################\n# Running parent node 2 #\n#########################\n\n" -gcloud compute ssh $NODE_2_TARGET_ADDR --command ". setup_node_gcp.sh && . start_node_gcp.sh $SEASON 0 2" --project $PROJECT_ID --zone $NODE_2_ZONE -printf "\n\n#########################\n# Running parent node 3 #\n#########################\n\n" -gcloud compute ssh $NODE_3_TARGET_ADDR --command ". setup_node_gcp.sh && . start_node_gcp.sh $SEASON 0 3" --project $PROJECT_ID --zone $NODE_3_ZONE -printf "\n\n#########################\n# Running parent node 4 #\n#########################\n\n" -gcloud compute ssh $NODE_4_TARGET_ADDR --command ". setup_node_gcp.sh && . start_node_gcp.sh $SEASON 0 4" --project $PROJECT_ID --zone $NODE_4_ZONE +printf "\n\n###########################\n# Starting parent tracker #\n###########################\n\n" +gcloud compute ssh $TRACKER_TARGET_ADDR --command ". start_tracker_gcp.sh" --project $PROJECT_ID --zone $TRACKER_ZONE +printf "\n\n##########################\n# Starting parent node 0 #\n##########################\n\n" +gcloud compute ssh $NODE_0_TARGET_ADDR --command ". start_node_gcp.sh $SEASON 0 0" --project $PROJECT_ID --zone $NODE_0_ZONE +printf "\n\n##########################\n# Starting parent node 1 #\n##########################\n\n" +gcloud compute ssh $NODE_1_TARGET_ADDR --command ". start_node_gcp.sh $SEASON 0 1" --project $PROJECT_ID --zone $NODE_1_ZONE +printf "\n\n##########################\n# Starting parent node 2 #\n##########################\n\n" +gcloud compute ssh $NODE_2_TARGET_ADDR --command ". start_node_gcp.sh $SEASON 0 2" --project $PROJECT_ID --zone $NODE_2_ZONE +printf "\n\n##########################\n# Starting parent node 3 #\n##########################\n\n" +gcloud compute ssh $NODE_3_TARGET_ADDR --command ". start_node_gcp.sh $SEASON 0 3" --project $PROJECT_ID --zone $NODE_3_ZONE +printf "\n\n##########################\n# Starting parent node 4 #\n##########################\n\n" +gcloud compute ssh $NODE_4_TARGET_ADDR --command ". start_node_gcp.sh $SEASON 0 4" --project $PROJECT_ID --zone $NODE_4_ZONE if [[ "$NUM_SHARDS" -gt 0 ]]; then printf "\nDeploying shard blockchains..." @@ -165,13 +165,13 @@ if [[ "$NUM_SHARDS" -gt 0 ]]; then fi # ssh into each instance, install packages and start up the server - printf "\n\n###########################\n# Running shard_$i tracker #\n###########################\n\n" - gcloud compute ssh $SHARD_TRACKER_TARGET_ADDR --command ". setup_tracker_gcp.sh && . start_tracker_gcp.sh" --project $PROJECT_ID --zone $TRACKER_ZONE - printf "\n\n##########################\n# Running shard_$i node 0 #\n##########################\n\n" - gcloud compute ssh $SHARD_NODE_0_TARGET_ADDR --command ". setup_node_gcp.sh && . start_node_gcp.sh $SEASON $i 0" --project $PROJECT_ID --zone $NODE_0_ZONE - printf "\n\n##########################\n# Running shard_$i node 1 #\n##########################\n\n" - gcloud compute ssh $SHARD_NODE_1_TARGET_ADDR --command ". setup_node_gcp.sh && . start_node_gcp.sh $SEASON $i 1" --project $PROJECT_ID --zone $NODE_1_ZONE - printf "\n\n##########################\n# Running shard_$i node 2 #\n##########################\n\n" - gcloud compute ssh $SHARD_NODE_2_TARGET_ADDR --command ". setup_node_gcp.sh && . start_node_gcp.sh $SEASON $i 2" --project $PROJECT_ID --zone $NODE_2_ZONE + printf "\n\n###########################\n# Starting shard_$i tracker #\n###########################\n\n" + gcloud compute ssh $SHARD_TRACKER_TARGET_ADDR --command ". start_tracker_gcp.sh" --project $PROJECT_ID --zone $TRACKER_ZONE + printf "\n\n##########################\n# Starting shard_$i node 0 #\n##########################\n\n" + gcloud compute ssh $SHARD_NODE_0_TARGET_ADDR --command ". start_node_gcp.sh $SEASON $i 0" --project $PROJECT_ID --zone $NODE_0_ZONE + printf "\n\n##########################\n# Starting shard_$i node 1 #\n##########################\n\n" + gcloud compute ssh $SHARD_NODE_1_TARGET_ADDR --command ". start_node_gcp.sh $SEASON $i 1" --project $PROJECT_ID --zone $NODE_1_ZONE + printf "\n\n##########################\n# Starting shard_$i node 2 #\n##########################\n\n" + gcloud compute ssh $SHARD_NODE_2_TARGET_ADDR --command ". start_node_gcp.sh $SEASON $i 2" --project $PROJECT_ID --zone $NODE_2_ZONE done fi diff --git a/setup_node_gcp.sh b/setup_node_gcp.sh deleted file mode 100644 index 4915bf23d..000000000 --- a/setup_node_gcp.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh -echo 'Killing old jobs..' -sudo killall node - -echo 'Setting up working directory..' -cd -sudo rm -rf /home/ain_blockchain_data -sudo mkdir /home/ain_blockchain_data -sudo chmod 777 /home/ain_blockchain_data -sudo rm -rf ../ain-blockchain* -sudo mkdir ../ain-blockchain -sudo chmod 777 ../ain-blockchain -mv * ../ain-blockchain -cd ../ain-blockchain - -echo 'Installing node modules..' -npm install \ No newline at end of file diff --git a/setup_tracker_gcp.sh b/setup_tracker_gcp.sh deleted file mode 100644 index 1539eca38..000000000 --- a/setup_tracker_gcp.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/sh - -echo 'Killing jobs..' -killall node - - -echo 'Setting up working directory..' -cd -sudo rm -rf /home/ain_blockchain_data -sudo mkdir /home/ain_blockchain_data -sudo chmod 777 /home/ain_blockchain_data -sudo rm -rf ../ain-blockchain* -sudo mkdir ../ain-blockchain -sudo chmod 777 ../ain-blockchain -mv * ../ain-blockchain -cd ../ain-blockchain - - -echo 'Installing node modules..' -npm install diff --git a/start_node_gcp.sh b/start_node_gcp.sh index cb49f378d..00f4c44b6 100644 --- a/start_node_gcp.sh +++ b/start_node_gcp.sh @@ -6,6 +6,25 @@ if [[ "$#" -lt 2 ]]; then exit fi + +echo 'Killing old jobs..' +sudo killall node + + +echo 'Setting up working directory..' +cd +sudo rm -rf /home/ain_blockchain_data +sudo mkdir /home/ain_blockchain_data +sudo chmod 777 /home/ain_blockchain_data +sudo rm -rf ../ain-blockchain* +sudo mkdir ../ain-blockchain +sudo chmod 777 ../ain-blockchain +mv * ../ain-blockchain +cd ../ain-blockchain + + +echo 'Installing node modules..' +npm install export GENESIS_CONFIGS_DIR=genesis-configs/testnet if [[ "$1" = 'spring' ]]; then export TRACKER_WS_ADDR=ws://35.221.137.80:5000 diff --git a/start_tracker_gcp.sh b/start_tracker_gcp.sh index 263c3baaa..4023a9e9d 100644 --- a/start_tracker_gcp.sh +++ b/start_tracker_gcp.sh @@ -1,5 +1,24 @@ #!/bin/sh +echo 'Killing jobs..' +killall node + + +echo 'Setting up working directory..' +cd +sudo rm -rf /home/ain_blockchain_data +sudo mkdir /home/ain_blockchain_data +sudo chmod 777 /home/ain_blockchain_data +sudo rm -rf ../ain-blockchain* +sudo mkdir ../ain-blockchain +sudo chmod 777 ../ain-blockchain +mv * ../ain-blockchain +cd ../ain-blockchain + + +echo 'Installing node modules..' +npm install + export CONSOLE_LOG=false echo 'Starting up Blockchain Tracker server..' From e91667af7d3170795f584b5b9fc3454670042c00 Mon Sep 17 00:00:00 2001 From: Dongil Seo Date: Tue, 13 Jul 2021 10:35:48 +0900 Subject: [PATCH 034/175] Use health_check API in incremental deploy. --- start_node_incremental_gcp.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/start_node_incremental_gcp.sh b/start_node_incremental_gcp.sh index e4b1944cb..79d35a637 100644 --- a/start_node_incremental_gcp.sh +++ b/start_node_incremental_gcp.sh @@ -168,11 +168,11 @@ EOF while : do - consensusStatus=$(curl -m 20 -X POST -H "Content-Type: application/json" --data "$(generate_post_data 'net_consensusStatus')" "http://localhost:8080/json-rpc" | jq -r '.result.result.state') - printf "\nconsensusStatus = ${consensusStatus}\n" + healthCheck=$(curl -m 20 -X GET -H "Content-Type: application/json" "http://localhost:8080/health_check") + printf "\nhealthCheck = ${healthCheck}\n" lastBlockNumber=$(curl -m 20 -X POST -H "Content-Type: application/json" --data "$(generate_post_data 'ain_getRecentBlockNumber')" "http://localhost:8080/json-rpc" | jq -r '.result.result') printf "\nlastBlockNumber = ${lastBlockNumber}\n" - if [[ "$consensusStatus" = "RUNNING" ]] && [[ "$lastBlockNumber" -gt 0 ]]; then + if [[ "$healthCheck" = "true" ]]; then printf "\nBlockchain Node server is synced & running!\n" printf "\nTime it took to sync in seconds: $SECONDS\n" break From 27e40df06d180f53579e1f13c190874ae8b607ca Mon Sep 17 00:00:00 2001 From: Lia Yoo Date: Tue, 13 Jul 2021 10:39:35 +0900 Subject: [PATCH 035/175] Update a comment. --- consensus/index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/consensus/index.js b/consensus/index.js index 9343ccb20..34ed6d0ab 100644 --- a/consensus/index.js +++ b/consensus/index.js @@ -506,7 +506,7 @@ class Consensus { } const prevBlock = number > 1 ? prevBlockInfo.block : prevBlockInfo; - // Make sure we have at least MIN_NUM_VALIDATORS validators. + // Make sure we have validators within MIN_NUM_VALIDATORS and MAX_NUM_VALIDATORS. if (Object.keys(validators).length < MIN_NUM_VALIDATORS || Object.keys(validators).length > MAX_NUM_VALIDATORS) { logger.error( `[${LOG_HEADER}] Invalid validator set size (${JSON.stringify(validators)})\n` + From f7f620bf3172ef1db4d1a08883811e62ac0a0405 Mon Sep 17 00:00:00 2001 From: Lia Yoo Date: Tue, 13 Jul 2021 10:40:17 +0900 Subject: [PATCH 036/175] Break ties by addresses. --- consensus/index.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/consensus/index.js b/consensus/index.js index 34ed6d0ab..e2059c59c 100644 --- a/consensus/index.js +++ b/consensus/index.js @@ -1000,7 +1000,8 @@ class Consensus { } } } - candidates = _.orderBy(candidates, ['stake', 'expireAt'], ['desc', 'desc']); // TODO(liayoo): How to do tie-breaking? + // NOTE(liayoo): tie-breaking by addresses as a temporary solution. + candidates = _.orderBy(candidates, ['stake', 'expireAt', 'address'], ['desc', 'desc', 'asc']); for (const candidate of candidates) { if (Object.keys(validators).length < MAX_NUM_VALIDATORS) { validators[candidate.address] = { stake: candidate.stake, producing_right: false }; From 794a130809c071c89db0e706a721b4010e2b41b3 Mon Sep 17 00:00:00 2001 From: Lia Yoo Date: Tue, 13 Jul 2021 10:52:09 +0900 Subject: [PATCH 037/175] Define constants, producing_right -> proposal_right. --- blockchain/block.js | 6 +++--- common/constants.js | 6 +++++- consensus/index.js | 25 ++++++++++++++++++------- integration/consensus.test.js | 13 +++++++------ unittest/block-pool.test.js | 14 +++++++------- 5 files changed, 40 insertions(+), 24 deletions(-) diff --git a/blockchain/block.js b/blockchain/block.js index 631004910..0b1cad0fe 100644 --- a/blockchain/block.js +++ b/blockchain/block.js @@ -140,8 +140,8 @@ class Block { if (!CommonUtil.isCksumAddr(address)) { return false; } - if (!CommonUtil.isDict(info) || !CommonUtil.isNumber(info.stake) || - !CommonUtil.isBool(info.producing_right)) { + if (!CommonUtil.isDict(info) || !CommonUtil.isNumber(info[PredefinedDbPaths.STAKE]) || + !CommonUtil.isBool(info[PredefinedDbPaths.PROPOSAL_RIGHT])) { return false; } } @@ -294,7 +294,7 @@ class Block { operation: { type: 'SET_VALUE', ref: PathUtil.getStakingStakeRecordValuePath(PredefinedDbPaths.CONSENSUS, address, 0, timestamp), - value: info.stake + value: info[PredefinedDbPaths.STAKE] } }; txs.push(Transaction.fromTxBody(txBody, privateKey)); diff --git a/common/constants.js b/common/constants.js index 9bc6150f5..6d1532b66 100644 --- a/common/constants.js +++ b/common/constants.js @@ -130,6 +130,7 @@ const PredefinedDbPaths = { VOTE: 'vote', BLOCK_HASH: 'block_hash', STAKE: 'stake', + PROPOSAL_RIGHT: 'proposal_right', // Gas fee GAS_FEE: 'gas_fee', COLLECT: 'collect', @@ -535,7 +536,10 @@ function overwriteGenesisParams(overwritingParams, type) { for (let i = 0; i < GenesisParams.consensus.MIN_NUM_VALIDATORS; i++) { const addr = GenesisAccounts[AccountProperties.OTHERS][i][AccountProperties.ADDRESS]; CommonUtil.setJsObject(whitelist, [addr], true); - CommonUtil.setJsObject(validators, [addr], { stake: GenesisParams.consensus.MIN_STAKE_PER_VALIDATOR, producing_right: true }); + CommonUtil.setJsObject(validators, [addr], { + [PredefinedDbPaths.STAKE]: GenesisParams.consensus.MIN_STAKE_PER_VALIDATOR, + [PredefinedDbPaths.PROPOSAL_RIGHT]: true + }); } GenesisParams.consensus.GENESIS_WHITELIST = whitelist; GenesisParams.consensus.GENESIS_VALIDATORS = validators; diff --git a/consensus/index.js b/consensus/index.js index e2059c59c..8c031015c 100644 --- a/consensus/index.js +++ b/consensus/index.js @@ -387,7 +387,10 @@ class Consensus { if (whitelist[address] === true) { const stake = tempDb.getValue(PathUtil.getConsensusStakingAccountBalancePath(address)); if (stake && MIN_STAKE_PER_VALIDATOR <= stake && stake <= MAX_STAKE_PER_VALIDATOR) { - validators[address] = { stake, producing_right: true }; + validators[address] = { + [PredefinedDbPaths.STAKE]: stake, + [PredefinedDbPaths.PROPOSAL_RIGHT]: true + }; } } } @@ -401,7 +404,7 @@ class Consensus { throw Error(`Not enough validators: ${JSON.stringify(validators)}`); } const totalAtStake = Object.values(validators).reduce((acc, cur) => { - return acc + cur.stake; + return acc + cur[PredefinedDbPaths.STAKE]; }, 0); const stateProofHash = LIGHTWEIGHT ? '' : tempDb.getStateProof('/')[ProofProperties.PROOF_HASH]; const proposalBlock = Block.create( @@ -989,7 +992,10 @@ class Consensus { if (stake) { if (whitelist[address] === true) { if (MIN_STAKE_PER_VALIDATOR <= stake && stake <= MAX_STAKE_PER_VALIDATOR) { - validators[address] = { stake, producing_right: true }; + validators[address] = { + [PredefinedDbPaths.STAKE]: stake, + [PredefinedDbPaths.PROPOSAL_RIGHT]: true + }; } } else { candidates.push({ @@ -1004,7 +1010,10 @@ class Consensus { candidates = _.orderBy(candidates, ['stake', 'expireAt', 'address'], ['desc', 'desc', 'asc']); for (const candidate of candidates) { if (Object.keys(validators).length < MAX_NUM_VALIDATORS) { - validators[candidate.address] = { stake: candidate.stake, producing_right: false }; + validators[candidate.address] = { + [PredefinedDbPaths.STAKE]: candidate.stake, + [PredefinedDbPaths.PROPOSAL_RIGHT]: false + }; } else { break; } @@ -1225,17 +1234,19 @@ class Consensus { static selectProposer(seed, validators) { const LOG_HEADER = 'selectProposer'; logger.debug(`[${LOG_HEADER}] seed: ${seed}, validators: ${JSON.stringify(validators)}`); - const validatorsWithProducingRights = _.pickBy(validators, (x) => _.get(x, 'producing_right') === true); + const validatorsWithProducingRights = _.pickBy(validators, (x) => { + return _.get(x, PredefinedDbPaths.PROPOSAL_RIGHT) === true; + }); const alphabeticallyOrderedValidators = Object.keys(validatorsWithProducingRights).sort(); const totalAtStake = Object.values(validatorsWithProducingRights).reduce((acc, cur) => { - return acc + cur.stake; + return acc + cur[PredefinedDbPaths.STAKE]; }, 0); const randomNumGenerator = seedrandom(seed); const targetValue = randomNumGenerator() * totalAtStake; let cumulative = 0; for (let i = 0; i < alphabeticallyOrderedValidators.length; i++) { const addr = alphabeticallyOrderedValidators[i]; - cumulative += validatorsWithProducingRights[addr].stake; + cumulative += validatorsWithProducingRights[addr][PredefinedDbPaths.STAKE]; if (cumulative > targetValue) { logger.info(`Proposer is ${addr}`); return addr; diff --git a/integration/consensus.test.js b/integration/consensus.test.js index d8edb696f..3f6257c90 100644 --- a/integration/consensus.test.js +++ b/integration/consensus.test.js @@ -11,6 +11,7 @@ const syncRequest = require('sync-request'); const { CURRENT_PROTOCOL_VERSION, CHAINS_DIR, + PredefinedDbPaths, } = require('../common/constants'); const CommonUtil = require('../common/common-util'); const MAX_ITERATION = 200; @@ -187,13 +188,13 @@ describe('Consensus', () => { iterCount++; await CommonUtil.sleep(200); } - assert.deepEqual(lastBlock.validators[server4Addr].producing_right, false); + assert.deepEqual(lastBlock.validators[server4Addr][PredefinedDbPaths.PROPOSAL_RIGHT], false); await waitForNewBlocks(server1, 1); const server4Voted = parseOrLog(syncRequest( 'GET', `${server1}/get_value?ref=/consensus/number/${lastBlock.number}/vote/${server4Addr}` ).body.toString('utf-8')).result; - assert.deepEqual(server4Voted.stake, 100000); + assert.deepEqual(server4Voted[PredefinedDbPaths.STAKE], 100000); // 3. server5 stakes 100000 const server5StakeRes = parseOrLog(syncRequest('POST', server5 + '/set_value', {json: { ref: `/staking/consensus/${server5Addr}/0/stake/${Date.now()}/value`, @@ -216,14 +217,14 @@ describe('Consensus', () => { iterCount++; await CommonUtil.sleep(200); } - assert.deepEqual(lastBlock.validators[server5Addr].producing_right, false); + assert.deepEqual(lastBlock.validators[server5Addr][PredefinedDbPaths.PROPOSAL_RIGHT], false); await waitForNewBlocks(server1, 1); const votes = parseOrLog(syncRequest( 'GET', `${server1}/get_value?ref=/consensus/number/${lastBlock.number}/vote` ).body.toString('utf-8')).result; assert.deepEqual(votes[server4Addr], undefined); - assert.deepEqual(votes[server5Addr].stake, 100000); + assert.deepEqual(votes[server5Addr][PredefinedDbPaths.STAKE], 100000); }); it('When more than MAX_NUM_VALIDATORS validators exist, validatators with bigger stakes get prioritized', async () => { @@ -254,7 +255,7 @@ describe('Consensus', () => { `${server1}/get_value?ref=/consensus/number/${lastBlock.number}/vote` ).body.toString('utf-8')).result; assert.deepEqual(votes[server5Addr], undefined); - assert.deepEqual(votes[server4Addr].stake, 100010); + assert.deepEqual(votes[server4Addr][PredefinedDbPaths.STAKE], 100010); // 3. server5 stakes 20 more AIN const server5StakeRes = parseOrLog(syncRequest('POST', server5 + '/set_value', {json: { ref: `/staking/consensus/${server5Addr}/0/stake/${Date.now()}/value`, @@ -283,7 +284,7 @@ describe('Consensus', () => { `${server1}/get_value?ref=/consensus/number/${lastBlock.number}/vote` ).body.toString('utf-8')).result; assert.deepEqual(votes[server4Addr], undefined); - assert.deepEqual(votes[server5Addr].stake, 100020); + assert.deepEqual(votes[server5Addr][PredefinedDbPaths.STAKE], 100020); }); }) }); \ No newline at end of file diff --git a/unittest/block-pool.test.js b/unittest/block-pool.test.js index 055651d28..ef65a2956 100644 --- a/unittest/block-pool.test.js +++ b/unittest/block-pool.test.js @@ -2,7 +2,7 @@ const chai = require('chai'); const expect = chai.expect; const rimraf = require('rimraf'); const assert = chai.assert; -const { CHAINS_DIR } = require('../common/constants'); +const { CHAINS_DIR, PredefinedDbPaths } = require('../common/constants'); const BlockPool = require('../consensus/block-pool'); const BlockchainNode = require('../node'); const { Block } = require('../blockchain/block'); @@ -25,7 +25,7 @@ describe("BlockPool", () => { function createAndAddBlock(node, blockPool, lastBlock, number, epoch) { const block = Block.create( lastBlock.hash, [], [], number, epoch, '', node.account.address, - {[node.account.address]: { stake: 100000, producing_right: true } }, 0, 0); + {[node.account.address]: { [PredefinedDbPaths.STAKE]: 100000, [PredefinedDbPaths.PROPOSAL_RIGHT]: true } }, 0, 0); const proposal = getTransaction(node, { operation: { type: 'SET_VALUE', @@ -33,7 +33,7 @@ describe("BlockPool", () => { value: { number: block.number, epoch: block.epoch, - validators: { [node.account.address]: { stake: 100000, producing_right: true } }, + validators: { [node.account.address]: { [PredefinedDbPaths.STAKE]: 100000, [PredefinedDbPaths.PROPOSAL_RIGHT]: true } }, total_at_stake: 100000, proposer: node.account.address, block_hash: block.hash @@ -66,7 +66,7 @@ describe("BlockPool", () => { const addr = node1.account.address; const block = Block.create( lastBlock.hash, [], [], lastBlock.number + 1, lastBlock.epoch + 1, '', addr, - {[addr]: { stake: 100000, producing_right: true }}, 0, 0); + {[addr]: { [PredefinedDbPaths.STAKE]: 100000, [PredefinedDbPaths.PROPOSAL_RIGHT]: true }}, 0, 0); const proposalTx = getTransaction(node1, { operation: { type: 'SET_VALUE', @@ -74,7 +74,7 @@ describe("BlockPool", () => { value: { number: block.number, epoch: block.epoch, - validators: {[addr]: { stake: 100000, producing_right: true } }, + validators: {[addr]: { [PredefinedDbPaths.STAKE]: 100000, [PredefinedDbPaths.PROPOSAL_RIGHT]: true } }, total_at_stake: 100000, proposer: addr, block_hash: block.hash @@ -95,7 +95,7 @@ describe("BlockPool", () => { const lastBlock = node1.bc.lastBlock(); const block = Block.create( lastBlock.hash, [], [], lastBlock.number + 1, lastBlock.epoch + 1, '', addr, - {[addr]: { stake: 100000, producing_right: true }}, 0, 0); + {[addr]: { [PredefinedDbPaths.STAKE]: 100000, [PredefinedDbPaths.PROPOSAL_RIGHT]: true }}, 0, 0); const proposalTx = getTransaction(node1, { operation: { type: 'SET_VALUE', @@ -103,7 +103,7 @@ describe("BlockPool", () => { value: { number: block.number, epoch: block.epoch, - validators: {[addr]: { stake: 100000, producing_right: true } }, + validators: {[addr]: { [PredefinedDbPaths.STAKE]: 100000, [PredefinedDbPaths.PROPOSAL_RIGHT]: true } }, total_at_stake: 100000, proposer: addr, block_hash: block.hash From b754ee7811dddb09874a06ef78a75c0d71e48bc3 Mon Sep 17 00:00:00 2001 From: Lia Yoo Date: Tue, 13 Jul 2021 11:11:09 +0900 Subject: [PATCH 038/175] Refactor get path functions in rule-util.js. --- db/rule-util.js | 23 ++++++++++++++++++----- genesis-configs/base/genesis_rules.json | 10 +++++----- 2 files changed, 23 insertions(+), 10 deletions(-) diff --git a/db/rule-util.js b/db/rule-util.js index c1aa5d868..4f55115a6 100644 --- a/db/rule-util.js +++ b/db/rule-util.js @@ -130,10 +130,11 @@ class RuleUtil { return parsed[1]; } - getAppAdminPath(accountName) { + isAppAdmin(accountName, address, getValue) { const { PredefinedDbPaths } = require('../common/constants'); const appName = this.getServiceNameFromServAcntName(accountName); - return `/${PredefinedDbPaths.MANAGE_APP}/${appName}/${PredefinedDbPaths.MANAGE_APP_CONFIG}/${PredefinedDbPaths.MANAGE_APP_CONFIG_ADMIN}`; + return getValue(`/${PredefinedDbPaths.MANAGE_APP}/${appName}/${PredefinedDbPaths.MANAGE_APP_CONFIG}/` + + `${PredefinedDbPaths.MANAGE_APP_CONFIG_ADMIN}/${address}`); } getBalancePath(addrOrServAcnt) { @@ -146,14 +147,26 @@ class RuleUtil { } } - getBillingUserPath(billingServAcntName, userAddr) { + getBalance(addrOrServAcnt, getValue) { + return getValue(this.getBalancePath(addrOrServAcnt)); + } + + isBillingUser(billingServAcntName, userAddr, getValue) { const { PredefinedDbPaths } = require('../common/constants'); const parsed = this.parseServAcntName(billingServAcntName); const appName = parsed[1]; const billingId = parsed[2]; - return `/${PredefinedDbPaths.MANAGE_APP}/${appName}/${PredefinedDbPaths.MANAGE_APP_CONFIG}/` + + return getValue( + `/${PredefinedDbPaths.MANAGE_APP}/${appName}/${PredefinedDbPaths.MANAGE_APP_CONFIG}/` + `${PredefinedDbPaths.MANAGE_APP_CONFIG_BILLING}/${billingId}/` + - `${PredefinedDbPaths.MANAGE_APP_CONFIG_BILLING_USERS}/${userAddr}`; + `${PredefinedDbPaths.MANAGE_APP_CONFIG_BILLING_USERS}/${userAddr}`); + } + + getConsensusStakeBalance(address, getValue) { + const { PredefinedDbPaths } = require('../common/constants'); + return getValue( + `/${PredefinedDbPaths.SERVICE_ACCOUNTS}/${PredefinedDbPaths.STAKING}/` + + `${PredefinedDbPaths.CONSENSUS}/${address}|0/${PredefinedDbPaths.BALANCE}`); } getOwnerAddr() { diff --git a/genesis-configs/base/genesis_rules.json b/genesis-configs/base/genesis_rules.json index f28d895c0..cd258d5f7 100644 --- a/genesis-configs/base/genesis_rules.json +++ b/genesis-configs/base/genesis_rules.json @@ -32,7 +32,7 @@ }, "vote": { "$user_addr": { - ".write": "auth.addr === $user_addr && util.isDict(newData) && util.isString(newData.block_hash) && util.isNumber(newData.stake) && (lastBlockNumber < 1 || getValue('/service_accounts/staking/consensus/' + auth.addr + '|0/balance') === newData.stake)" + ".write": "auth.addr === $user_addr && util.isDict(newData) && util.isString(newData.block_hash) && util.isNumber(newData.stake) && (lastBlockNumber < 1 || util.getConsensusStakeBalance(auth.addr, getValue) === newData.stake)" } } } @@ -52,7 +52,7 @@ }, "hold": { "$record_id": { - ".write": "((util.isServAcntName($source_account) && getValue(util.getAppAdminPath($source_account) + '/' + auth.addr) === true) || (util.isCksumAddr($source_account) && $source_account === auth.addr)) && getValue('/escrow/' + $source_account + '/' + $target_account + '/' + $escrow_key + '/config') !== null && data === null && util.isDict(newData)", + ".write": "((util.isServAcntName($source_account) && util.isAppAdmin($source_account, auth.addr, getValue) === true) || (util.isCksumAddr($source_account) && $source_account === auth.addr)) && getValue('/escrow/' + $source_account + '/' + $target_account + '/' + $escrow_key + '/config') !== null && data === null && util.isDict(newData)", "result": { ".write": "auth.fid === '_hold'" } @@ -75,7 +75,7 @@ "$from": { "$block_number": { "$tx_hash": { - ".write": "(auth.addr === $from || (util.isServAcntName($from) && getValue(util.getBillingUserPath($from, auth.addr)) === true)) && data === null && util.isDict(newData) && util.isNumber(newData.amount) && newData.amount <= getValue(util.getBalancePath($from))" + ".write": "(auth.addr === $from || (util.isServAcntName($from) && util.isBillingUser($from, auth.addr, getValue) === true)) && data === null && util.isDict(newData) && util.isNumber(newData.amount) && newData.amount <= util.getBalance($from, getValue)" } } } @@ -162,7 +162,7 @@ "unstake": { "$record_id": { "value": { - ".write": "$user_addr === auth.addr && data === null && util.isNumber(newData) && newData > 0 && newData <= getValue(util.getBalancePath(util.toServiceAccountName('staking', $service_name, $user_addr + '|' + $staking_key)))" + ".write": "$user_addr === auth.addr && data === null && util.isNumber(newData) && newData > 0 && newData <= util.getBalance(util.toServiceAccountName('staking', $service_name, $user_addr + '|' + $staking_key), getValue)" }, "result": { ".write": "auth.fid === '_unstake'" @@ -178,7 +178,7 @@ "$to": { "$key": { "value": { - ".write": "(auth.addr === $from || auth.fid === '_stake' || auth.fid === '_unstake' || auth.fid === '_pay' || auth.fid === '_claim' || auth.fid === '_hold' || auth.fid === '_release' || auth.fid === '_collectFee' || auth.fid === '_distributeFee') && !getValue('transfer/' + $from + '/' + $to + '/' + $key) && (util.isServAcntName($from) || util.isCksumAddr($from)) && (util.isServAcntName($to) || util.isCksumAddr($to)) && $from !== $to && util.isNumber(newData) && getValue(util.getBalancePath($from)) >= newData" + ".write": "(auth.addr === $from || auth.fid === '_stake' || auth.fid === '_unstake' || auth.fid === '_pay' || auth.fid === '_claim' || auth.fid === '_hold' || auth.fid === '_release' || auth.fid === '_collectFee' || auth.fid === '_distributeFee') && !getValue('transfer/' + $from + '/' + $to + '/' + $key) && (util.isServAcntName($from) || util.isCksumAddr($from)) && (util.isServAcntName($to) || util.isCksumAddr($to)) && $from !== $to && util.isNumber(newData) && util.getBalance($from, getValue) >= newData" }, "result": { ".write": "auth.fid === '_transfer'" From 46da36e9a5bf1f904cfc59a814b2087549810daf Mon Sep 17 00:00:00 2001 From: Dongil Seo Date: Tue, 13 Jul 2021 11:58:16 +0900 Subject: [PATCH 039/175] Add _genesis suffix to genesis deploy scripts. Add _local suffix to local scripts. --- README.md | 4 ++-- ...gcp.sh => deploy_blockchain_genesis_gcp.sh | 24 +++++++++---------- reset_blockchain_gcp.sh | 20 ++++++++-------- ...t_node_gcp.sh => start_node_genesis_gcp.sh | 4 ++-- ...ers_afan.sh => start_servers_afan_local.sh | 0 start_servers.sh => start_servers_local.sh | 0 ...ker_gcp.sh => start_tracker_genesis_gcp.sh | 1 + stop_servers.sh => stop_servers_local.sh | 0 8 files changed, 27 insertions(+), 26 deletions(-) rename deploy_blockchain_gcp.sh => deploy_blockchain_genesis_gcp.sh (92%) rename start_node_gcp.sh => start_node_genesis_gcp.sh (96%) rename start_servers_afan.sh => start_servers_afan_local.sh (100%) rename start_servers.sh => start_servers_local.sh (100%) rename start_tracker_gcp.sh => start_tracker_genesis_gcp.sh (99%) rename stop_servers.sh => stop_servers_local.sh (100%) diff --git a/README.md b/README.md index 061c3f045..fd2e4b1ea 100644 --- a/README.md +++ b/README.md @@ -49,7 +49,7 @@ source setup_tracker_gcp.sh - Start tracker server job ``` cd ain-blockchain/ -sh start_tracker_gcp.sh +sh start_tracker_genesis_gcp.sh ```