Skip to content

Commit

Permalink
Merge pull request #14 from initia-labs/fix/convention
Browse files Browse the repository at this point in the history
fix convention in lib and lintjob
  • Loading branch information
JSHan94 authored Jan 3, 2024
2 parents 3093c16 + 8ce89e2 commit f29ef62
Show file tree
Hide file tree
Showing 11 changed files with 1,100 additions and 707 deletions.
1,717 changes: 1,068 additions & 649 deletions bots/package-lock.json

Large diffs are not rendered by default.

7 changes: 4 additions & 3 deletions bots/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -47,12 +47,13 @@
"@testcontainers/postgresql": "^10.2.1",
"@types/chalk": "^2.2.0",
"@types/jest": "^27.5.2",
"@types/koa": "^2.13.12",
"@types/node": "^15.14.9",
"@typescript-eslint/eslint-plugin": "^4.7.0",
"@typescript-eslint/parser": "^4.7.0",
"@typescript-eslint/eslint-plugin": "^6",
"@typescript-eslint/parser": "^6",
"concurrently": "^8.2.1",
"docker-compose": "^0.24.2",
"eslint": "^7.13.0",
"eslint": "^8",
"husky": "^8.0.3",
"jest": "^27.5.1",
"lint-staged": "^10.5.1",
Expand Down
4 changes: 2 additions & 2 deletions bots/src/lib/compressor.ts
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
import * as pako from 'pako';

// compress tx data to submit L1
export function compressor(input: string[]): Buffer {
export function compress(input: string[]): Buffer {
const recordsWithCommas = input.join(',');
const recordsBuffer = Buffer.from(recordsWithCommas);
return pako.gzip(recordsBuffer);
}

// decompress indexed batch data
export function decompressor(input: Buffer): string[] {
export function decompress(input: Buffer): string[] {
const decompressed = pako.inflate(input);
const output: string = Buffer.from(decompressed).toString();
return output.split(',');
Expand Down
8 changes: 4 additions & 4 deletions bots/src/lib/error.ts
Original file line number Diff line number Diff line change
Expand Up @@ -61,9 +61,9 @@ export class APIError extends Error {
}

export function errorHandler(
callback: (ctx, type: string, code?: string, message?: string) => void
callback: (ctx: any, type: string, code?: string, message?: string) => void
) {
return async (ctx, next) => {
return async (ctx: any, next: any) => {
try {
await next();
} catch (err) {
Expand All @@ -78,7 +78,7 @@ export function errorHandler(

sentry.withScope((scope) => {
scope.addEventProcessor((event) =>
sentry.Handlers.parseRequest(event, ctx.request)
sentry.addRequestDataToEvent(event, ctx.request)
);
sentry.captureException(errForThrow);
});
Expand All @@ -90,7 +90,7 @@ export function errorHandler(
} else {
sentry.withScope((scope) => {
scope.addEventProcessor((event) =>
sentry.Handlers.parseRequest(event, ctx.request)
sentry.addRequestDataToEvent(event, ctx.request)
);
sentry.captureException(err);
});
Expand Down
4 changes: 1 addition & 3 deletions bots/src/lib/query.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
import {
BridgeInfo,
Coin,
LCDClient,
OutputInfo,
TokenPair
} from '@initia/initia.js';
Expand All @@ -19,7 +17,7 @@ import axios from 'axios';
export async function getLastOutputInfo(
bridgeId: number
): Promise<OutputInfo | null> {
const [outputInfos, _pagination] = await config.l1lcd.ophost.outputInfos(
const [outputInfos] = await config.l1lcd.ophost.outputInfos(
bridgeId,
{
'pagination.limit': '1',
Expand Down
1 change: 0 additions & 1 deletion bots/src/lib/tx.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import { delay } from 'bluebird';
import {
LCDClient,
Msg,
Expand Down
50 changes: 13 additions & 37 deletions bots/src/lib/util.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,10 @@
import { SHA3 } from 'sha3';
import { sha256 } from '@initia/initia.js';

export function sha3_256(value: Buffer | string | number) {
value = toBuffer(value);

return new SHA3(256).update(value as Buffer).digest();
export function sha3_256(value: Buffer | string | number): Buffer {
return new SHA3(256).update(toBuffer(value)).digest();
}

function toBuffer(value: any) {
function toBuffer(value: any): Buffer {
if (!Buffer.isBuffer(value)) {
if (Array.isArray(value)) {
value = Buffer.from(value);
Expand All @@ -18,7 +15,7 @@ function toBuffer(value: any) {
value = Buffer.from(value);
}
} else if (typeof value === 'number') {
value = intToBuffer(value);
value = numberToBuffer(value);
} else if (value === null || value === undefined) {
value = Buffer.allocUnsafe(0);
} else if (value.toArray) {
Expand All @@ -32,8 +29,8 @@ function toBuffer(value: any) {
return value;
}

function isHexString(value: any, length?: number) {
if (typeof value !== 'string' || !value.match(/^0x[0-9A-Fa-f]*$/)) {
function isHexString(value: string, length?: number): boolean {
if (!value.match(/^0x[0-9A-Fa-f]*$/)) {
return false;
}

Expand All @@ -44,46 +41,25 @@ function isHexString(value: any, length?: number) {
return true;
}

function padToEven(value: any) {
if (typeof value !== 'string') {
throw new Error(
`while padding to even, value must be string, is currently ${typeof value}, while padToEven.`
);
}

function padToEven(value: string): string {
if (value.length % 2) {
value = `0${value}`;
}

return value;
}

function stripHexPrefix(value: any) {
if (typeof value !== 'string') {
return value;
}

function stripHexPrefix(value: string): string {
return isHexPrefixed(value) ? value.slice(2) : value;
}

function isHexPrefixed(value: any) {
if (typeof value !== 'string') {
throw new Error(
"value must be type 'string', is currently type " +
typeof value +
', while checking isHexPrefixed.'
);
}

function isHexPrefixed(value: string): boolean {
return value.slice(0, 2) === '0x';
}

function intToBuffer(i: number) {
const hex = intToHex(i);
return Buffer.from(padToEven(hex.slice(2)), 'hex');
function numberToBuffer(i: number): Buffer {
return Buffer.from(padToEven(numberToHexString(i).slice(2)), 'hex');
}

function intToHex(i: number) {
const hex = i.toString(16);
return `0x${hex}`;
function numberToHexString(i: number): string {
return `0x${i.toString(16)}`;
}
6 changes: 3 additions & 3 deletions bots/src/orm/CamelToSnakeNamingStrategy.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
const { DefaultNamingStrategy } = require('typeorm');
const { snakeCase } = require('lodash');
import { DefaultNamingStrategy } from 'typeorm';
import { snakeCase } from 'lodash';

class CamelToSnakeNamingStrategy extends DefaultNamingStrategy {
tableName(targetName, userSpecifiedName) {
Expand All @@ -16,4 +16,4 @@ class CamelToSnakeNamingStrategy extends DefaultNamingStrategy {
}
}

module.exports = CamelToSnakeNamingStrategy;
export default CamelToSnakeNamingStrategy;
2 changes: 1 addition & 1 deletion bots/src/orm/challenger/DeletedOutputEntity.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Column, Entity, Index, PrimaryColumn } from 'typeorm';
import { Column, Entity, PrimaryColumn } from 'typeorm';

@Entity('challenger_deleted_output')
export default class ChallengedOutputEntity {
Expand Down
4 changes: 2 additions & 2 deletions bots/src/service/batch/BatchService.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { RecordEntity } from 'orm';
import { APIError, ErrorTypes } from 'lib/error';
import { getDB } from 'worker/batchSubmitter/db';
import { decompressor } from 'lib/compressor';
import { decompress } from 'lib/compressor';

interface GetBatchResponse {
bridgeId: number;
Expand All @@ -27,7 +27,7 @@ export async function getBatch(batchIndex: number): Promise<GetBatchResponse> {
return {
bridgeId: batch.bridgeId,
batchIndex: batch.batchIndex,
batch: decompressor(batch.batch)
batch: decompress(batch.batch)
};
} finally {
queryRunner.release();
Expand Down
4 changes: 2 additions & 2 deletions bots/src/worker/batchSubmitter/batchSubmitter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { getDB } from './db';
import { DataSource, EntityManager } from 'typeorm';
import { batchLogger, batchLogger as logger } from 'lib/logger';
import { BlockBulk, RPCClient } from 'lib/rpc';
import { compressor } from 'lib/compressor';
import { compress } from 'lib/compressor';
import { ExecutorOutputEntity, RecordEntity } from 'orm';
import { Wallet, MnemonicKey, MsgRecordBatch } from '@initia/initia.js';
import { delay } from 'bluebird';
Expand Down Expand Up @@ -88,7 +88,7 @@ export class BatchSubmitter {
throw new Error(`Error getting block bulk from L2`);
}

return compressor(bulk.blocks);
return compress(bulk.blocks);
}

async getStoredBatch(manager: EntityManager): Promise<RecordEntity | null> {
Expand Down

0 comments on commit f29ef62

Please sign in to comment.