mirror of
https://github.com/Instadapp/trustwallet-assets.git
synced 2024-07-29 22:37:31 +00:00
[Internal] New infra for runnig checks (not as jest tests) (#2938)
* CMC mapping update. * New check infrastructure, move root folder test to new infra. * Move list of allowed files to config. * Include new check in other tests. * More generic way to call checks. * Organize fix and update actions behind interfaces. * Organize checks into steps, multiple steps per action. * Simplify checkStep class/instance creation. * Migrate chain logo checks. * Migrate asset folder check. * Migrate further chain checks. * Migrate eth fork folder checks. * Migrate binance chain check. * Extra output. * Output improvements. * Async fix. * Migrate Tron check. * Add Tron check. * Remove Tron check from old. * White/blacklist check in new intra, combined with fix. * Refine ETH checks. * Remove from old infra. * Migrate CMC check to new infra. * Migrate validator tests to new check infra. * Migrate Json files validity check to new check infra. * Whitelist check fix. * Cleanup helpers.ts. * Move helpers.ts. * Cleanup of models.ts. * Move models.ts. * Move index.test.ts. * Update with BEP8 support. * Descriptive names for jobs within the builds. Co-authored-by: Catenocrypt <catenocrypt@users.noreply.github.com>
This commit is contained in:
parent
4390942c1b
commit
102f2b88d4
4
.github/workflows/fix-dryrun.yml
vendored
4
.github/workflows/fix-dryrun.yml
vendored
|
@ -7,7 +7,7 @@ on:
|
|||
pull_request:
|
||||
branches: [master]
|
||||
jobs:
|
||||
scripts:
|
||||
fix-dryrun:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
@ -21,6 +21,8 @@ jobs:
|
|||
- name: Show fix result (diff); run 'npm run fix' locally
|
||||
if: success()
|
||||
run: git status
|
||||
- name: Run check
|
||||
run: npm run check
|
||||
- name: Run test
|
||||
run: npm t
|
||||
|
||||
|
|
5
.github/workflows/fix.yml
vendored
5
.github/workflows/fix.yml
vendored
|
@ -3,7 +3,7 @@ on:
|
|||
push:
|
||||
branches: [ master ]
|
||||
jobs:
|
||||
scripts:
|
||||
fix:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
@ -18,8 +18,9 @@ jobs:
|
|||
- name: Run fix script
|
||||
run: npm run fix
|
||||
- name: Show fix result (diff); run 'npm run fix' locally
|
||||
if: success()
|
||||
run: git status
|
||||
- name: Run check
|
||||
run: npm run check
|
||||
- name: Run test
|
||||
run: npm t
|
||||
- name: Commit changes if any
|
||||
|
|
4
.github/workflows/periodic-update.yml
vendored
4
.github/workflows/periodic-update.yml
vendored
|
@ -4,7 +4,7 @@ on:
|
|||
# Run twice per day (at 7:00UTC/12amPST, 19:00UTC/12pmPST)
|
||||
- cron: '0 7,19 * * *'
|
||||
jobs:
|
||||
scripts:
|
||||
preiodic-update:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
@ -21,6 +21,8 @@ jobs:
|
|||
- name: Show fix result (diff); run 'npm run fix' locally
|
||||
if: success()
|
||||
run: git status
|
||||
- name: Run check
|
||||
run: npm run check
|
||||
- name: Run test
|
||||
run: npm t
|
||||
- name: Commit changes test pased
|
||||
|
|
4
.github/workflows/pr-ci.yml
vendored
4
.github/workflows/pr-ci.yml
vendored
|
@ -15,4 +15,6 @@ jobs:
|
|||
with:
|
||||
node-version: '12.x'
|
||||
- uses: bahmutov/npm-install@v1
|
||||
- run: npm t
|
||||
- name: Run check
|
||||
run: npm run check
|
||||
- run: npm t
|
||||
|
|
2
.github/workflows/s3_upload.yml
vendored
2
.github/workflows/s3_upload.yml
vendored
|
@ -4,7 +4,7 @@ on:
|
|||
branches:
|
||||
- master
|
||||
jobs:
|
||||
build:
|
||||
upload-s3:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
|
|
4
.github/workflows/test.yml
vendored
4
.github/workflows/test.yml
vendored
|
@ -1,4 +1,4 @@
|
|||
name: Test
|
||||
name: Check
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
|
@ -12,5 +12,7 @@ jobs:
|
|||
node-version: 12
|
||||
- name: Install Dependencies
|
||||
run: npm ci
|
||||
- name: Run check
|
||||
run: npm run check
|
||||
- name: Run test
|
||||
run: npm t
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
module.exports = {
|
||||
"roots": [
|
||||
"<rootDir>/src"
|
||||
"<rootDir>/test"
|
||||
],
|
||||
testMatch: [
|
||||
"**/__tests__/**/*.+(ts|tsx|js)",
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "jest",
|
||||
"check": "ts-node ./script/main/check",
|
||||
"fix": "ts-node ./script/main/fix",
|
||||
"update": "ts-node ./script/main/update"
|
||||
},
|
||||
|
|
165
pricing/coinmarketcap/check.ts
Normal file
165
pricing/coinmarketcap/check.ts
Normal file
|
@ -0,0 +1,165 @@
|
|||
import { CheckStepInterface } from "../../script/action/interface";
|
||||
import { readFileSync } from "../../script/common/filesystem";
|
||||
import { mapTiker, TickerType } from "../../script-old/models";
|
||||
import { isChecksum } from "../../script/common/eth-web3";
|
||||
import { isTRC10, isTRC20 } from "../../script/action/tron";
|
||||
import { retrieveAssetSymbols } from "../../script/action/binance";
|
||||
|
||||
export function getChecks(): CheckStepInterface[] {
|
||||
const cmcMap: mapTiker[] = JSON.parse(readFileSync("./pricing/coinmarketcap/mapping.json"));
|
||||
return [
|
||||
{
|
||||
getName: () => { return "Must have items";},
|
||||
check: async () => {
|
||||
if (cmcMap.length == 0) {
|
||||
return `CMC map must have items`;
|
||||
}
|
||||
return "";
|
||||
}
|
||||
},
|
||||
{
|
||||
getName: () => { return `Items must be sorted by "id" in ascending order`;},
|
||||
check: async () => {
|
||||
var error: string = "";
|
||||
cmcMap.forEach((el, i) => {
|
||||
if (i > 0) {
|
||||
const prevID = cmcMap[i - 1].id;
|
||||
const curID = el.id;
|
||||
if (curID < prevID) {
|
||||
error += `Item ${curID} must be greather or equal to ${prevID}\n`;
|
||||
}
|
||||
}
|
||||
});
|
||||
return error;
|
||||
}
|
||||
},
|
||||
{
|
||||
getName: () => { return `Items must be sorted by "coin" in ascending order if have same "id"`;},
|
||||
check: async () => {
|
||||
var error: string = "";
|
||||
cmcMap.forEach((el, i) => {
|
||||
if (i > 0) {
|
||||
const prevEl = cmcMap[i - 1]
|
||||
|
||||
const prevCoin = prevEl.coin
|
||||
const prevID = cmcMap[i - 1].id
|
||||
|
||||
const curCoin = el.coin
|
||||
const curID = el.id
|
||||
|
||||
if (prevID == curID) {
|
||||
if (curCoin < prevCoin) {
|
||||
error += `Item ${JSON.stringify(el)} must be greather or equal to ${JSON.stringify(prevEl)}\n`;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
});
|
||||
return error;
|
||||
}
|
||||
},
|
||||
{
|
||||
getName: () => { return "Properies value shoud not contain spaces";},
|
||||
check: async () => {
|
||||
var error: string = "";
|
||||
cmcMap.forEach((el, i) => {
|
||||
Object.keys(el).forEach(key => {
|
||||
const val = el[key]
|
||||
if (typeof val === "string") {
|
||||
if (val.indexOf(" ") >= 0) {
|
||||
error += ` Property value "${val}" should not contain space\n`;
|
||||
}
|
||||
}
|
||||
})
|
||||
});
|
||||
return error;
|
||||
}
|
||||
},
|
||||
{
|
||||
getName: () => { return "Params should have value and correct type";},
|
||||
check: async () => {
|
||||
var error: string = "";
|
||||
cmcMap.forEach((el) => {
|
||||
const {coin, type, id, token_id} = el;
|
||||
if (typeof coin !== "number") {
|
||||
error += `Coin ${coin} must be type "number"\n`;
|
||||
}
|
||||
if (type !== "token" && type !== "coin") {
|
||||
error += `Element with id ${id} has wrong type: "${type}"\n`;
|
||||
}
|
||||
if (type === "token") {
|
||||
if (!token_id) {
|
||||
error += `token_id ${token_id} with id ${id} must be type not empty\n`;
|
||||
}
|
||||
}
|
||||
if (type === "coin") {
|
||||
if ("token_in" in el) {
|
||||
error += `Element with id ${id} should not have property "token_id"\n`;
|
||||
}
|
||||
}
|
||||
});
|
||||
return error;
|
||||
}
|
||||
},
|
||||
{
|
||||
getName: () => { return `"token_id" should be in correct format`;},
|
||||
check: async () => {
|
||||
var error: string = "";
|
||||
const bep2Symbols = await retrieveAssetSymbols();
|
||||
cmcMap.forEach((el) => {
|
||||
const {coin, token_id, type, id} = el
|
||||
switch (coin) {
|
||||
case 60:
|
||||
if (type === TickerType.Token) {
|
||||
if (!isChecksum(token_id)) {
|
||||
error += `"token_id" ${token_id} with id ${id} must be in checksum'n`;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case 195:
|
||||
if (type === TickerType.Token) {
|
||||
if (!isTRC10(token_id) && !isTRC20(token_id)) {
|
||||
error += `"token_id" ${token_id} with id ${id} must be in TRC10 or TRC20\n`;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case 714:
|
||||
if (type === TickerType.Token) {
|
||||
if (!(bep2Symbols.indexOf(token_id) >= 0)) {
|
||||
error += `"token_id" ${token_id} with id ${id} must be BEP2 symbol\n`;
|
||||
}
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
});
|
||||
return error;
|
||||
}
|
||||
},
|
||||
{
|
||||
getName: () => { return `"token_id" shoud be unique`;},
|
||||
check: async () => {
|
||||
var error: string = "";
|
||||
const mappedList = cmcMap.reduce((acm, val) => {
|
||||
if (val.hasOwnProperty("token_id")) {
|
||||
if (acm.hasOwnProperty(val.token_id)) {
|
||||
acm[val.token_id] == ++acm[val.token_id]
|
||||
} else {
|
||||
acm[val.token_id] = 0
|
||||
}
|
||||
}
|
||||
return acm
|
||||
}, {});
|
||||
cmcMap.forEach((el) => {
|
||||
if (el.hasOwnProperty("token_id")) {
|
||||
if (mappedList[el.token_id] > 0) {
|
||||
error += `CMC map ticker with "token_id" ${el.token_id} shoud be unique'n`;
|
||||
}
|
||||
}
|
||||
});
|
||||
return error;
|
||||
}
|
||||
},
|
||||
];
|
||||
}
|
15
pricing/coinmarketcap/cmc-action.ts
Normal file
15
pricing/coinmarketcap/cmc-action.ts
Normal file
|
@ -0,0 +1,15 @@
|
|||
import { ActionInterface, CheckStepInterface } from "../../script/action/interface";
|
||||
import { run } from "./script";
|
||||
import { getChecks } from "./check";
|
||||
|
||||
export class Coinmarketcap implements ActionInterface {
|
||||
getName(): string { return "Coinmarketcap mapping"; }
|
||||
|
||||
getChecks(): CheckStepInterface[] { return getChecks(); }
|
||||
|
||||
fix = null;
|
||||
|
||||
async update(): Promise<void> {
|
||||
await run();
|
||||
}
|
||||
}
|
|
@ -1,21 +1,21 @@
|
|||
import { toChecksum } from "../../src/test/helpers"
|
||||
const BluebirdPromise = require("bluebird")
|
||||
const axios = require("axios")
|
||||
const chalk = require('chalk')
|
||||
const fs = require("fs")
|
||||
const path = require('path')
|
||||
const constants = require('bip44-constants')
|
||||
import { readFileSync } from "../../script/common/filesystem";
|
||||
import { ethForkChains } from "../../script/common/blockchains";
|
||||
import {
|
||||
readFileSync,
|
||||
toChecksum,
|
||||
getChainAssetLogoPath,
|
||||
isPathExistsSync,
|
||||
makeDirSync,
|
||||
getChainAssetPath,
|
||||
ethSidechains,
|
||||
getChainBlacklist,
|
||||
getChainWhitelist,
|
||||
} from "../../src/test/helpers";
|
||||
import { TickerType, mapTiker, PlatformType } from "../../src/test/models";
|
||||
} from "../../script-old/helpers";
|
||||
import { TickerType, mapTiker, PlatformType } from "../../script-old/models";
|
||||
|
||||
// Steps required to run this:
|
||||
// 1. (Optional) CMC API key already setup, use yours if needed. Install script deps "npm i" if hasn't been run before.
|
||||
|
@ -62,7 +62,7 @@ const allContracts: mapTiker[] = [] // Temp storage for mapped assets
|
|||
let bnbOwnerToSymbol = {} // e.g: bnb1tawge8u97slduhhtumm03l4xl4c46dwv5m9yzk: WISH-2D5
|
||||
let bnbOriginalSymbolToSymbol = {} // e.g: WISH: WISH-2D5
|
||||
|
||||
async function run() {
|
||||
export async function run() {
|
||||
try {
|
||||
await Promise.all([initState(), setBinanceTokens()])
|
||||
const [totalCrypto, coins] = await Promise.all([getTotalActiveCryptocurrencies(), getTickers()])
|
||||
|
@ -196,7 +196,7 @@ async function initState () {
|
|||
}
|
||||
|
||||
async function mapChainsAssetsLists() {
|
||||
ethSidechains.forEach(chain => {
|
||||
ethForkChains.forEach(chain => {
|
||||
Object.assign(mappedChainsWhitelistAssets, {[chain]: {}})
|
||||
Object.assign(mappedChainsBlacklistAssets, {[chain]: {}})
|
||||
|
||||
|
@ -352,7 +352,3 @@ function log(string, cb?) {
|
|||
// }
|
||||
// })
|
||||
// }
|
||||
|
||||
export async function update() {
|
||||
await run();
|
||||
}
|
||||
|
|
|
@ -1,14 +1,16 @@
|
|||
import * as fs from "fs"
|
||||
const isImage = require("is-image");
|
||||
import { rootDirAllowedFiles } from "../script/common/repo-structure";
|
||||
import { ethForkChains, Ethereum } from "../script/common/blockchains";
|
||||
import {
|
||||
getFileExt,
|
||||
getFileName
|
||||
} from "../script/common/filesystem";
|
||||
import {
|
||||
Ethereum,
|
||||
chainsFolderPath,
|
||||
ethSidechains,
|
||||
getChainAssetPath,
|
||||
getChainAssetsPath,
|
||||
getChainPath,
|
||||
getFileExt,
|
||||
getFileName,
|
||||
getRootDirFilesList,
|
||||
isChecksum,
|
||||
isEthereumAddress,
|
||||
|
@ -17,12 +19,11 @@ import {
|
|||
logoExtension,
|
||||
makeDirIfDoestExist,
|
||||
readDirSync,
|
||||
rootDirAllowedFiles,
|
||||
toChecksum,
|
||||
isDirContainLogo
|
||||
} from "../src/test/helpers"
|
||||
} from "./helpers"
|
||||
|
||||
ethSidechains.forEach(chain => {
|
||||
ethForkChains.forEach(chain => {
|
||||
const chainAssetsPath = getChainAssetsPath(chain)
|
||||
|
||||
readDirSync(chainAssetsPath).forEach(async asset => {
|
||||
|
|
|
@ -1,17 +1,11 @@
|
|||
const bluebird = require("bluebird")
|
||||
const nestedProperty = require("nested-property");
|
||||
import { writeFileSync, readDirSync } from "../script/common/filesystem";
|
||||
import {
|
||||
chainsFolderPath,
|
||||
getChainInfoPath,
|
||||
isChainInfoExistSync,
|
||||
writeFileSync,
|
||||
readDirSync,
|
||||
readFileSync,
|
||||
getChainAssetInfoPath,
|
||||
getChainAssetsPath,
|
||||
isPathExistsSync
|
||||
} from "../src/test/helpers"
|
||||
import { CoinInfoList } from "../src/test/models";
|
||||
} from "./helpers"
|
||||
import { CoinInfoList } from "./models";
|
||||
|
||||
const dafaultInfoTemplate: CoinInfoList =
|
||||
{
|
||||
|
|
95
script-old/helpers.ts
Normal file
95
script-old/helpers.ts
Normal file
|
@ -0,0 +1,95 @@
|
|||
import * as fs from "fs"
|
||||
import * as path from "path"
|
||||
const Web3 = require('web3')
|
||||
const web3 = new Web3('ws://localhost:8546');
|
||||
|
||||
export const logoName = `logo`
|
||||
export const infoName = `info`
|
||||
|
||||
export const logoExtension = "png"
|
||||
export const jsonExtension = "json"
|
||||
|
||||
const whiteList = `whitelist.${jsonExtension}`
|
||||
const blackList = `blacklist.${jsonExtension}`
|
||||
|
||||
export const logo = `${logoName}.${logoExtension}`
|
||||
export const info = `${infoName}.${jsonExtension}`
|
||||
|
||||
export const root = './'
|
||||
export const chainsFolderPath = path.join(process.cwd(), '/blockchains')
|
||||
export const getChainLogoPath = (chain: string): string => `${chainsFolderPath}/${chain}/info/${logo}`
|
||||
export const getChainInfoPath = (chain: string): string => `${chainsFolderPath}/${chain}/info/${info}`
|
||||
export const getChainAssetsPath = (chain: string): string => `${chainsFolderPath}/${chain}/assets`
|
||||
export const getChainPath = (chain: string): string => `${chainsFolderPath}/${chain}`
|
||||
|
||||
export const getChainAssetPath = (chain: string, address: string) => `${getChainAssetsPath(chain)}/${address}`
|
||||
export const getAllChainsList = (): string[] => readDirSync(chainsFolderPath)
|
||||
export const getChainAssetLogoPath = (chain: string, address: string) => `${getChainAssetsPath(chain)}/${address}/${logo}`
|
||||
export const getChainWhitelistPath = (chain: string): string => `${chainsFolderPath}/${chain}/${whiteList}`
|
||||
export const getChainBlacklistPath = (chain: string): string => `${chainsFolderPath}/${chain}/${blackList}`
|
||||
export const getChainWhitelist = (chain: string): string[] => {
|
||||
if (isChainWhitelistExistSync(chain)) {
|
||||
return JSON.parse(readFileSync(getChainWhitelistPath(chain)))
|
||||
}
|
||||
return []
|
||||
}
|
||||
export const getChainBlacklist = (chain: string): string[] => {
|
||||
if (isChainBlacklistExistSync(chain)) {
|
||||
return JSON.parse(readFileSync(getChainBlacklistPath(chain)))
|
||||
}
|
||||
return []
|
||||
}
|
||||
export const getRootDirFilesList = (): string[] => readDirSync(root)
|
||||
|
||||
export const readDirSync = (path: string): string[] => fs.readdirSync(path)
|
||||
export const makeDirSync = (path: string) => fs.mkdirSync(path)
|
||||
export const isPathExistsSync = (path: string): boolean => fs.existsSync(path)
|
||||
export const isDirContainLogo = (path: string): boolean => fs.existsSync(`${path}/${logo}`)
|
||||
export const isChainWhitelistExistSync = (chain: string): boolean => isPathExistsSync(getChainWhitelistPath(chain))
|
||||
export const isChainBlacklistExistSync = (chain: string): boolean => isPathExistsSync(getChainBlacklistPath(chain))
|
||||
export const isChainInfoExistSync = (chain: string): boolean => isPathExistsSync(getChainInfoPath(chain))
|
||||
export const readFileSync = (path: string) => fs.readFileSync(path, 'utf8')
|
||||
|
||||
export const isChecksum = (address: string): boolean => web3.utils.checkAddressChecksum(address)
|
||||
export const toChecksum = (address: string): string => web3.utils.toChecksumAddress(address)
|
||||
|
||||
export const isEthereumAddress = (address: string): boolean => {
|
||||
return web3.utils.isAddress(address)
|
||||
}
|
||||
|
||||
export const isPathDir = (path: string): boolean => {
|
||||
try {
|
||||
return fs.lstatSync(path).isDirectory()
|
||||
} catch (e) {
|
||||
console.log(`Path: ${path} is not a directory with error: ${e.message}`)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
export const isPathDirEmpty = (path: string): boolean => {
|
||||
try {
|
||||
if (isPathDir(path)) {
|
||||
return fs.readdirSync(path).length == 0
|
||||
} else {
|
||||
false
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(`Error isPathDirEmpty`, error)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
export const removeDir = (path: string) => {
|
||||
fs.rmdirSync(path, {recursive: true})
|
||||
}
|
||||
|
||||
export const makeDirIfDoestExist = async (dirPath: string, dirName: string) => {
|
||||
const path = `${dirPath}/${dirName}`
|
||||
await fs.mkdir(path, {recursive: true}, (err) => {
|
||||
if (err) {
|
||||
console.error(`Error creating dir at path ${path} with result ${err}`)
|
||||
} else {
|
||||
console.log(`Created direcotry at ${path}`)
|
||||
}
|
||||
})
|
||||
}
|
44
script-old/models.ts
Normal file
44
script-old/models.ts
Normal file
|
@ -0,0 +1,44 @@
|
|||
export interface CoinInfoList {
|
||||
name: string;
|
||||
website: string;
|
||||
source_code: string;
|
||||
whitepaper: string;
|
||||
short_description: string;
|
||||
explorer: string;
|
||||
socials: Social[];
|
||||
details: Detail[];
|
||||
}
|
||||
|
||||
interface Detail {
|
||||
language: string;
|
||||
description: string;
|
||||
}
|
||||
|
||||
interface Social {
|
||||
name: string;
|
||||
url: string;
|
||||
handle: string;
|
||||
}
|
||||
|
||||
// CoinmarketCap
|
||||
export interface mapTiker {
|
||||
coin: number
|
||||
type: mapType
|
||||
token_id?: string
|
||||
id: number
|
||||
}
|
||||
|
||||
type mapType = TickerType.Coin | TickerType.Token
|
||||
|
||||
export enum TickerType {
|
||||
Token = "token",
|
||||
Coin = "coin"
|
||||
}
|
||||
|
||||
export enum PlatformType {
|
||||
Ethereum = "Ethereum",
|
||||
Binance = "Binance Coin",
|
||||
TRON = "TRON",
|
||||
OMNI = "Omni",
|
||||
VeChain = "VeChain"
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
var axios = require("axios");
|
||||
import { toChecksum } from "../src/test/helpers"
|
||||
import { toChecksum } from "./helpers"
|
||||
|
||||
// Returns array of ERC-721, ERC-1155 contract addresses in checksum
|
||||
export const getOpenseaCollectionAddresses = async () => {
|
||||
|
|
|
@ -2,11 +2,11 @@ import {
|
|||
getChainAssetPath,
|
||||
getChainAssetsPath,
|
||||
isPathDir,
|
||||
isPathDirEmpthy,
|
||||
isPathDirEmpty,
|
||||
readDirSync,
|
||||
removeDir,
|
||||
getAllChainsList,
|
||||
} from "../src/test/helpers"
|
||||
} from "./helpers"
|
||||
|
||||
getAllChainsList().forEach(async chain => {
|
||||
const chainAssetsPath = getChainAssetsPath(chain)
|
||||
|
@ -15,9 +15,9 @@ getAllChainsList().forEach(async chain => {
|
|||
readDirSync(chainAssetsPath).forEach(async (asset) => {
|
||||
const assetPath = getChainAssetPath(chain, asset);
|
||||
const isDir = await isPathDir(assetPath);
|
||||
const isPathEmpthy = await isPathDirEmpthy(assetPath);
|
||||
const isPathEmpty = await isPathDirEmpty(assetPath);
|
||||
|
||||
if (isDir && isPathEmpthy) {
|
||||
if (isDir && isPathEmpty) {
|
||||
removeDir(assetPath)
|
||||
console.log(`Removed empty folder at path ${assetPath}`);
|
||||
}
|
|
@ -1,7 +1,7 @@
|
|||
const { execSync } = require('child_process');
|
||||
const path = require('path')
|
||||
const axios = require('axios')
|
||||
import { readDirSync } from "../src/test/helpers";
|
||||
import { readDirSync } from "./helpers";
|
||||
|
||||
const assetsPath = path.resolve(`${__dirname}/../blockchains/tron/assets`)
|
||||
const chainAddresses = readDirSync(assetsPath)
|
||||
|
|
|
@ -4,6 +4,10 @@ import * as fs from "fs";
|
|||
import * as path from "path";
|
||||
import * as chalk from 'chalk';
|
||||
import * as config from "../common/config";
|
||||
import { ActionInterface, CheckStepInterface } from "./interface";
|
||||
import { getChainAssetsPath } from "../common/repo-structure";
|
||||
import { Binance } from "../common/blockchains";
|
||||
import { readDirSync } from "../common/filesystem";
|
||||
|
||||
import {
|
||||
getChainAssetLogoPath,
|
||||
|
@ -11,15 +15,27 @@ import {
|
|||
} from "../common/repo-structure";
|
||||
|
||||
const binanceChain = "binance"
|
||||
const binanceAssetsUrl = config.getConfig("binance_assets_url", "https://explorer.binance.org/api/v1/assets?page=1&rows=1000");
|
||||
const binanceUrlTokens2 = config.getConfig("binance_url_tokens2", "https://dex-atlantic.binance.org/api/v1/tokens?limit=1000");
|
||||
const binanceUrlTokens8 = config.getConfig("binance_url_tokens8", "https://dex-atlantic.binance.org/api/v1/mini/tokens?limit=1000");
|
||||
const binanceUrlTokenAssets = config.getConfig("binance_url_token_assets", "https://explorer.binance.org/api/v1/assets?page=1&rows=1000");
|
||||
|
||||
async function retrieveBep2AssetList() {
|
||||
console.log(`Retrieving assets info from: ${binanceAssetsUrl}`);
|
||||
const { assetInfoList } = await axios.get(binanceAssetsUrl).then(r => r.data);
|
||||
console.log(`Retrieved ${assetInfoList.length} asset infos`);
|
||||
async function retrieveBep2AssetList(): Promise<any[]> {
|
||||
console.log(` Retrieving token asset infos from: ${binanceUrlTokenAssets}`);
|
||||
const { assetInfoList } = await axios.get(binanceUrlTokenAssets).then(r => r.data);
|
||||
console.log(` Retrieved ${assetInfoList.length} token asset infos`);
|
||||
return assetInfoList
|
||||
}
|
||||
|
||||
export async function retrieveAssetSymbols(): Promise<string[]> {
|
||||
console.log(` Retrieving token infos (${binanceUrlTokens2}, ${binanceUrlTokens8})`);
|
||||
const bep2assets = await axios.get(binanceUrlTokens2);
|
||||
const bep8assets = await axios.get(binanceUrlTokens8);
|
||||
const symbols = bep2assets.data.map(({ symbol }) => symbol)
|
||||
.concat(bep8assets.data.map(({ symbol }) => symbol));
|
||||
console.log(` Retrieved ${symbols.length} symbols`);
|
||||
return symbols;
|
||||
}
|
||||
|
||||
function fetchImage(url) {
|
||||
return axios.get(url, { responseType: "stream" })
|
||||
.then(r => r.data)
|
||||
|
@ -73,16 +89,42 @@ async function fetchMissingImages(toFetch: any[]): Promise<string[]> {
|
|||
return fetchedAssets;
|
||||
}
|
||||
|
||||
export async function update() {
|
||||
// retrieve missing token images; BEP2 (bep8 not supported)
|
||||
const bep2InfoList = await retrieveBep2AssetList();
|
||||
const blacklist: string[] = require(getChainBlacklistPath(binanceChain));
|
||||
export class BinanceAction implements ActionInterface {
|
||||
getName(): string { return "Binance chain"; }
|
||||
|
||||
const toFetch = findImagesToFetch(bep2InfoList, blacklist);
|
||||
const fetchedAssets = await fetchMissingImages(toFetch);
|
||||
getChecks(): CheckStepInterface[] {
|
||||
return [
|
||||
{
|
||||
getName: () => { return "Binance chain; assets must exist on chain"},
|
||||
check: async () => {
|
||||
var error: string = "";
|
||||
const tokenSymbols = await retrieveAssetSymbols();
|
||||
const assets = readDirSync(getChainAssetsPath(Binance));
|
||||
assets.forEach(asset => {
|
||||
if (!(tokenSymbols.indexOf(asset) >= 0)) {
|
||||
error += `Asset ${asset} missing on chain\n`;
|
||||
}
|
||||
});
|
||||
console.log(` ${assets.length} assets checked.`);
|
||||
return error;
|
||||
}
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
fix = null;
|
||||
|
||||
async update(): Promise<void> {
|
||||
// retrieve missing token images; BEP2 (bep8 not supported)
|
||||
const bep2InfoList = await retrieveBep2AssetList();
|
||||
const blacklist: string[] = require(getChainBlacklistPath(binanceChain));
|
||||
|
||||
if (fetchedAssets.length > 0) {
|
||||
console.log(`Fetched ${fetchedAssets.length} asset(s):`);
|
||||
fetchedAssets.forEach(asset => console.log(` ${asset}`));
|
||||
const toFetch = findImagesToFetch(bep2InfoList, blacklist);
|
||||
const fetchedAssets = await fetchMissingImages(toFetch);
|
||||
|
||||
if (fetchedAssets.length > 0) {
|
||||
console.log(`Fetched ${fetchedAssets.length} asset(s):`);
|
||||
fetchedAssets.forEach(asset => console.log(` ${asset}`));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
38
script/action/cosmos.ts
Normal file
38
script/action/cosmos.ts
Normal file
|
@ -0,0 +1,38 @@
|
|||
import { Cosmos } from "../common/blockchains";
|
||||
import { getChainValidatorsAssets } from "../common/repo-structure";
|
||||
import { ActionInterface, CheckStepInterface } from "./interface";
|
||||
import { isLowerCase } from "../common/types";
|
||||
|
||||
export class CosmosAction implements ActionInterface {
|
||||
getName(): string { return "Cosmos chain"; }
|
||||
|
||||
getChecks(): CheckStepInterface[] {
|
||||
return [
|
||||
{
|
||||
getName: () => { return "Cosmos validator assets must have correct format"},
|
||||
check: async () => {
|
||||
var error: string = "";
|
||||
const assets = getChainValidatorsAssets(Cosmos);
|
||||
const prefix = "cosmosvaloper1";
|
||||
const expLength = 52;
|
||||
assets.forEach(addr => {
|
||||
if (!(addr.startsWith(prefix))) {
|
||||
error += `Address ${addr} should start with '${prefix}'\n`;
|
||||
}
|
||||
if (addr.length != expLength) {
|
||||
error += `Address ${addr} should have length ${expLength}\n`;
|
||||
}
|
||||
if (!isLowerCase(addr)) {
|
||||
error += `Address ${addr} should be in lowercase\n`;
|
||||
}
|
||||
});
|
||||
return error;
|
||||
}
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
fix = null;
|
||||
|
||||
update = null;
|
||||
}
|
|
@ -8,16 +8,22 @@ import {
|
|||
isChainAssetInfoExistSync,
|
||||
logoName,
|
||||
logoExtension,
|
||||
logoFullName
|
||||
logoFullName,
|
||||
getChainAssetLogoPath
|
||||
} from "../common/repo-structure";
|
||||
import { formatJsonFile } from "../common/json";
|
||||
import {
|
||||
getFileName,
|
||||
getFileExt,
|
||||
gitMove,
|
||||
readDirSync
|
||||
readDirSync,
|
||||
isPathExistsSync,
|
||||
} from "../common/filesystem";
|
||||
import { isChecksum, toChecksum } from "../common/eth-web3";
|
||||
import { ActionInterface, CheckStepInterface } from "./interface";
|
||||
import { isLogoOK } from "../common/image";
|
||||
import { isAssetInfoOK } from "../common/asset-info";
|
||||
import * as bluebird from "bluebird";
|
||||
|
||||
function formatInfos() {
|
||||
console.log(`Formatting info files...`);
|
||||
|
@ -60,7 +66,53 @@ function checkAddressChecksums() {
|
|||
});
|
||||
}
|
||||
|
||||
export async function fix() {
|
||||
formatInfos();
|
||||
checkAddressChecksums();
|
||||
export class EthForks implements ActionInterface {
|
||||
getName(): string { return "Ethereum forks"; }
|
||||
|
||||
getChecks(): CheckStepInterface[] {
|
||||
var steps: CheckStepInterface[] = [];
|
||||
ethForkChains.forEach(chain => {
|
||||
steps.push(
|
||||
{
|
||||
getName: () => { return `Folder structure for chain ${chain} (ethereum fork)`;},
|
||||
check: async () => {
|
||||
var error: string = "";
|
||||
const assetsFolder = getChainAssetsPath(chain);
|
||||
const assetsList = getChainAssetsList(chain);
|
||||
console.log(` Found ${assetsList.length} assets for chain ${chain}`);
|
||||
await bluebird.each(assetsList, async (address) => {
|
||||
const assetPath = `${assetsFolder}/${address}`;
|
||||
if (!isPathExistsSync(assetPath)) {
|
||||
error += `Expect directory at path: ${assetPath}\n`;
|
||||
}
|
||||
if (!isChecksum(address)) {
|
||||
error += `Expect asset at path ${assetPath} in checksum\n`;
|
||||
}
|
||||
const assetLogoPath = getChainAssetLogoPath(chain, address);
|
||||
if (!isPathExistsSync(assetLogoPath)) {
|
||||
error += `Missing file at path '${assetLogoPath}'\n`;
|
||||
}
|
||||
const [isOK, dimensionMsg] = await isLogoOK(assetLogoPath);
|
||||
if (!isOK) {
|
||||
error += dimensionMsg + "\n";
|
||||
}
|
||||
const [isInfoOK, infoMsg] = isAssetInfoOK(chain, address);
|
||||
if (!isInfoOK) {
|
||||
error += infoMsg + "\n";
|
||||
}
|
||||
});
|
||||
return error;
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
return steps;
|
||||
}
|
||||
|
||||
async fix(): Promise<void> {
|
||||
formatInfos();
|
||||
checkAddressChecksums();
|
||||
}
|
||||
|
||||
update = null;
|
||||
}
|
||||
|
|
100
script/action/folders-and-files.ts
Normal file
100
script/action/folders-and-files.ts
Normal file
|
@ -0,0 +1,100 @@
|
|||
import {
|
||||
readDirSync,
|
||||
isPathExistsSync
|
||||
} from "../common/filesystem";
|
||||
import { CheckStepInterface, ActionInterface } from "./interface";
|
||||
import {
|
||||
chainsPath,
|
||||
getChainLogoPath,
|
||||
getChainAssetsPath,
|
||||
getChainAssetPath,
|
||||
assetFolderAllowedFiles,
|
||||
getChainFolderFilesList,
|
||||
chainFolderAllowedFiles,
|
||||
rootDirAllowedFiles
|
||||
} from "../common/repo-structure";
|
||||
import { isLogoOK } from "../common/image";
|
||||
import { isLowerCase } from "../common/types";
|
||||
import * as bluebird from "bluebird";
|
||||
|
||||
const foundChains = readDirSync(chainsPath)
|
||||
|
||||
export class FoldersFiles implements ActionInterface {
|
||||
getName(): string { return "Folders and Files"; }
|
||||
|
||||
getChecks(): CheckStepInterface[] {
|
||||
return [
|
||||
{
|
||||
getName: () => { return "Repository root dir"},
|
||||
check: async () => {
|
||||
var error: string = "";
|
||||
const dirActualFiles = readDirSync(".");
|
||||
dirActualFiles.forEach(file => {
|
||||
if (!(rootDirAllowedFiles.indexOf(file) >= 0)) {
|
||||
error += `File "${file}" should not be in root or added to predifined list\n`;
|
||||
}
|
||||
});
|
||||
return error;
|
||||
}
|
||||
},
|
||||
{
|
||||
getName: () => { return "Chain folders are lowercase, contain only predefined list of files"},
|
||||
check: async () => {
|
||||
var error: string = "";
|
||||
foundChains.forEach(chain => {
|
||||
if (!isLowerCase(chain)) {
|
||||
error += `Chain folder must be in lowercase "${chain}"\n`;
|
||||
}
|
||||
getChainFolderFilesList(chain).forEach(file => {
|
||||
if (!(chainFolderAllowedFiles.indexOf(file) >= 0)) {
|
||||
error += `File '${file}' not allowed in chain folder: ${chain}\n`;
|
||||
}
|
||||
});
|
||||
});
|
||||
return error;
|
||||
}
|
||||
},
|
||||
{
|
||||
getName: () => { return "Chain folders have logo, and correct size"},
|
||||
check: async () => {
|
||||
var error: string = "";
|
||||
await bluebird.each(foundChains, async (chain) => {
|
||||
const chainLogoPath = getChainLogoPath(chain);
|
||||
if (!isPathExistsSync(chainLogoPath)) {
|
||||
error += `File missing at path "${chainLogoPath}"\n`;
|
||||
}
|
||||
const [isOk, error1] = await isLogoOK(chainLogoPath);
|
||||
if (!isOk) {
|
||||
error += error1 + "\n";
|
||||
}
|
||||
});
|
||||
return error;
|
||||
}
|
||||
},
|
||||
{
|
||||
getName: () => { return "Asset folders contain only predefined set of files"},
|
||||
check: async () => {
|
||||
var error: string = "";
|
||||
foundChains.forEach(chain => {
|
||||
const assetsPath = getChainAssetsPath(chain);
|
||||
if (isPathExistsSync(assetsPath)) {
|
||||
readDirSync(assetsPath).forEach(address => {
|
||||
const assetFiles = getChainAssetPath(chain, address)
|
||||
readDirSync(assetFiles).forEach(assetFolderFile => {
|
||||
if (!(assetFolderAllowedFiles.indexOf(assetFolderFile) >= 0)) {
|
||||
error += `File '${assetFolderFile}' not allowed at this path: ${assetsPath}\n`;
|
||||
}
|
||||
});
|
||||
}) ;
|
||||
}
|
||||
});
|
||||
return error;
|
||||
}
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
fix = null;
|
||||
|
||||
update = null;
|
||||
}
|
15
script/action/interface.ts
Normal file
15
script/action/interface.ts
Normal file
|
@ -0,0 +1,15 @@
|
|||
// A single check step
|
||||
export interface CheckStepInterface {
|
||||
getName(): string;
|
||||
// return error or null/empty on success
|
||||
check(): Promise<string>;
|
||||
}
|
||||
|
||||
// An action for a check, fix, or update, or a combination.
|
||||
export interface ActionInterface {
|
||||
getName(): string;
|
||||
// return check steps for check (0, 1, or more)
|
||||
getChecks(): CheckStepInterface[];
|
||||
fix(): Promise<void>;
|
||||
update(): Promise<void>;
|
||||
}
|
35
script/action/json.ts
Normal file
35
script/action/json.ts
Normal file
|
@ -0,0 +1,35 @@
|
|||
import { chainsPath, pricingFolderPath } from "../common/repo-structure";
|
||||
import { findFiles } from "../common/filesystem";
|
||||
import { ActionInterface, CheckStepInterface } from "./interface";
|
||||
import { isValidJSON } from "../common/json";
|
||||
import * as bluebird from "bluebird";
|
||||
|
||||
export class JsonAction implements ActionInterface {
|
||||
getName(): string { return "Json files"; }
|
||||
|
||||
getChecks(): CheckStepInterface[] {
|
||||
return [
|
||||
{
|
||||
getName: () => { return "Check all JSON files to have valid content"},
|
||||
check: async () => {
|
||||
var error: string = "";
|
||||
const files = [
|
||||
...findFiles(chainsPath, 'json'),
|
||||
...findFiles(pricingFolderPath, 'json')
|
||||
];
|
||||
|
||||
await bluebird.each(files, async file => {
|
||||
if (!isValidJSON(file)) {
|
||||
error += `${file} path contains invalid JSON\n`;
|
||||
}
|
||||
});
|
||||
return error;
|
||||
}
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
fix = null;
|
||||
|
||||
update = null;
|
||||
}
|
38
script/action/kava.ts
Normal file
38
script/action/kava.ts
Normal file
|
@ -0,0 +1,38 @@
|
|||
import { Kava } from "../common/blockchains";
|
||||
import { getChainValidatorsAssets } from "../common/repo-structure";
|
||||
import { ActionInterface, CheckStepInterface } from "./interface";
|
||||
import { isLowerCase } from "../common/types";
|
||||
|
||||
export class KavaAction implements ActionInterface {
|
||||
getName(): string { return "Kava chain"; }
|
||||
|
||||
getChecks(): CheckStepInterface[] {
|
||||
return [
|
||||
{
|
||||
getName: () => { return "Kava validator assets must have correct format"},
|
||||
check: async () => {
|
||||
var error: string = "";
|
||||
const assets = getChainValidatorsAssets(Kava);
|
||||
const prefix = "kavavaloper1";
|
||||
const expLength = 50;
|
||||
assets.forEach(addr => {
|
||||
if (!(addr.startsWith(prefix))) {
|
||||
error += `Address ${addr} should start with '${prefix}'\n`;
|
||||
}
|
||||
if (addr.length != expLength) {
|
||||
error += `Address ${addr} should have length ${expLength}\n`;
|
||||
}
|
||||
if (!isLowerCase(addr)) {
|
||||
error += `Address ${addr} should be in lowercase\n`;
|
||||
}
|
||||
});
|
||||
return error;
|
||||
}
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
fix = null;
|
||||
|
||||
update = null;
|
||||
}
|
|
@ -13,6 +13,7 @@ import {
|
|||
isPathExistsSync
|
||||
} from "../common/filesystem";
|
||||
import { resizeIfTooLarge } from "../common/image";
|
||||
import { ActionInterface } from "./interface";
|
||||
|
||||
async function downsize(chains) {
|
||||
console.log(`Checking all logos for downsizing ...`);
|
||||
|
@ -56,7 +57,12 @@ async function downsize(chains) {
|
|||
console.log(`Checking logos completed, ${totalCountChecked} logos checked, ${totalCountUpdated} logos updated`);
|
||||
}
|
||||
|
||||
export async function fix() {
|
||||
const foundChains = readDirSync(chainsPath);
|
||||
await downsize(foundChains);
|
||||
export class LogoSize implements ActionInterface {
|
||||
getName(): string { return "Logo sizes"; }
|
||||
getChecks = null;
|
||||
async fix(): Promise<void> {
|
||||
const foundChains = readDirSync(chainsPath);
|
||||
await downsize(foundChains);
|
||||
}
|
||||
update = null;
|
||||
}
|
||||
|
|
38
script/action/terra.ts
Normal file
38
script/action/terra.ts
Normal file
|
@ -0,0 +1,38 @@
|
|||
import { Terra } from "../common/blockchains";
|
||||
import { getChainValidatorsAssets } from "../common/repo-structure";
|
||||
import { ActionInterface, CheckStepInterface } from "./interface";
|
||||
import { isLowerCase } from "../common/types";
|
||||
|
||||
export class TerraAction implements ActionInterface {
|
||||
getName(): string { return "Terra chain"; }
|
||||
|
||||
getChecks(): CheckStepInterface[] {
|
||||
return [
|
||||
{
|
||||
getName: () => { return "Terra validator assets must have correct format"},
|
||||
check: async () => {
|
||||
var error: string = "";
|
||||
const assets = getChainValidatorsAssets(Terra);
|
||||
const prefix = "terravaloper1";
|
||||
const expLength = 51;
|
||||
assets.forEach(addr => {
|
||||
if (!(addr.startsWith(prefix))) {
|
||||
error += `Address ${addr} should start with '${prefix}'\n`;
|
||||
}
|
||||
if (addr.length != expLength) {
|
||||
error += `Address ${addr} should have length ${expLength}\n`;
|
||||
}
|
||||
if (!isLowerCase(addr)) {
|
||||
error += `Address ${addr} should be in lowercase\n`;
|
||||
}
|
||||
});
|
||||
return error;
|
||||
}
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
fix = null;
|
||||
|
||||
update = null;
|
||||
}
|
|
@ -1,17 +1,28 @@
|
|||
import axios from "axios";
|
||||
import * as eztz from "eztz-lib";
|
||||
import {
|
||||
validatorsList,
|
||||
getChainValidatorsPath,
|
||||
getChainValidatorsListPath
|
||||
getChainValidatorsListPath,
|
||||
getChainValidatorsAssets
|
||||
} from "../common/repo-structure";
|
||||
import { Tezos } from "../common/blockchains";
|
||||
import { readFileSync } from "../common/filesystem";
|
||||
import { writeJsonFile } from "../common/json";
|
||||
import { ActionInterface, CheckStepInterface } from "./interface";
|
||||
import { ValidatorModel } from "../common/validator-models";
|
||||
|
||||
import {
|
||||
BakingBadBaker,
|
||||
ValidatorModel
|
||||
} from "../../src/test/models";
|
||||
interface BakingBadBaker {
|
||||
address: string,
|
||||
freeSpace: number
|
||||
// serviceHealth: string // active or Dead is a working baker who was a public baker but for some reason stopped paying his delegators, Closed is a permanently closed service (we store them for historical purposes only
|
||||
fee: number
|
||||
minDelegation: number
|
||||
openForDelegation: boolean
|
||||
payoutDelay: number
|
||||
payoutPeriod: number
|
||||
serviceHealth: string
|
||||
}
|
||||
|
||||
function getChainValidatorsList(chain: string): ValidatorModel[] {
|
||||
return JSON.parse(readFileSync(`${(getChainValidatorsPath(chain))}/${validatorsList}`));
|
||||
|
@ -64,6 +75,30 @@ async function gen_validators_tezos() {
|
|||
writeJsonFile(getChainValidatorsListPath(Tezos), newbakers)
|
||||
}
|
||||
|
||||
export async function update() {
|
||||
await gen_validators_tezos();
|
||||
export class TezosAction implements ActionInterface {
|
||||
getName(): string { return "Tezos"; }
|
||||
|
||||
getChecks(): CheckStepInterface[] {
|
||||
return [
|
||||
{
|
||||
getName: () => { return "Tezos validator assets must have correct format"},
|
||||
check: async () => {
|
||||
var error: string = "";
|
||||
const assets = getChainValidatorsAssets(Tezos);
|
||||
assets.forEach(addr => {
|
||||
if (!(eztz.crypto.checkAddress(addr))) {
|
||||
error += `Address ${addr} must be valid Tezos address'\n`;
|
||||
}
|
||||
});
|
||||
return error;
|
||||
}
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
fix = null;
|
||||
|
||||
async update(): Promise<void> {
|
||||
await gen_validators_tezos();
|
||||
}
|
||||
}
|
||||
|
|
67
script/action/tron.ts
Normal file
67
script/action/tron.ts
Normal file
|
@ -0,0 +1,67 @@
|
|||
import { ActionInterface, CheckStepInterface } from "./interface";
|
||||
import { getChainAssetsPath } from "../common/repo-structure";
|
||||
import { Tron } from "../common/blockchains";
|
||||
import { readDirSync, isPathExistsSync } from "../common/filesystem";
|
||||
import { getChainAssetLogoPath, getChainValidatorsAssets } from "../common/repo-structure";
|
||||
import { isLowerCase, isUpperCase } from "../common/types";
|
||||
import { isLogoOK } from "../common/image";
|
||||
import * as bluebird from "bluebird";
|
||||
|
||||
export function isTRC10(str: string): boolean {
|
||||
return (/^\d+$/.test(str));
|
||||
}
|
||||
|
||||
export function isTRC20(address: string): boolean {
|
||||
return address.length == 34 &&
|
||||
address.startsWith("T") &&
|
||||
isLowerCase(address) == false &&
|
||||
isUpperCase(address) == false;
|
||||
}
|
||||
|
||||
export class TronAction implements ActionInterface {
|
||||
getName(): string { return "Tron chain"; }
|
||||
|
||||
getChecks(): CheckStepInterface[] {
|
||||
return [
|
||||
{
|
||||
getName: () => { return "Tron assets should be TRC10 or TRC20, logo of correct size"; },
|
||||
check: async () => {
|
||||
var error: string = "";
|
||||
const path = getChainAssetsPath(Tron);
|
||||
const assets = readDirSync(path);
|
||||
await bluebird.each(assets, async (asset) => {
|
||||
if (!isTRC10(asset) && !isTRC20(asset)) {
|
||||
error += `Asset ${asset} at path '${path}' is not TRC10 nor TRC20\n`;
|
||||
}
|
||||
const assetsLogoPath = getChainAssetLogoPath(Tron, asset);
|
||||
if (!isPathExistsSync(assetsLogoPath)) {
|
||||
error += `Missing file at path '${assetsLogoPath}'\n`;
|
||||
}
|
||||
const [isOk, sizeMsg] = await isLogoOK(assetsLogoPath);
|
||||
if (!isOk) {
|
||||
error += sizeMsg + "\n";
|
||||
}
|
||||
});
|
||||
return error;
|
||||
}
|
||||
},
|
||||
{
|
||||
getName: () => { return "Tron validator assets must have correct format"},
|
||||
check: async () => {
|
||||
var error: string = "";
|
||||
const assets = getChainValidatorsAssets(Tron);
|
||||
assets.forEach(addr => {
|
||||
if (!(isTRC20(addr))) {
|
||||
error += `Address ${addr} should be TRC20 address'\n`;
|
||||
}
|
||||
});
|
||||
return error;
|
||||
}
|
||||
}
|
||||
];
|
||||
}
|
||||
|
||||
fix = null;
|
||||
|
||||
update = null;
|
||||
}
|
|
@ -1,22 +1,123 @@
|
|||
import * as eth_forks from "./eth-forks";
|
||||
import * as logo_size from "./logo-size";
|
||||
import * as validators from "./validators";
|
||||
import * as whitelists from "./whitelists";
|
||||
import * as binance from "./binance";
|
||||
import * as coinmarketcap from "../../pricing/coinmarketcap/script";
|
||||
import * as tezos from "./tezos";
|
||||
import { BinanceAction } from "./binance";
|
||||
import { CosmosAction } from "./cosmos";
|
||||
import { EthForks } from "./eth-forks";
|
||||
import { FoldersFiles } from "./folders-and-files";
|
||||
import { JsonAction } from "./json";
|
||||
import { KavaAction } from "./kava";
|
||||
import { LogoSize } from "./logo-size";
|
||||
import { TerraAction } from "./terra";
|
||||
import { TezosAction } from "./tezos";
|
||||
import { TronAction } from "./tron";
|
||||
import { Validators } from "./validators";
|
||||
import { WavesAction } from "./waves";
|
||||
import { Whitelist } from "./whitelists";
|
||||
import { Coinmarketcap } from "../../pricing/coinmarketcap/cmc-action";
|
||||
import { ActionInterface, CheckStepInterface } from "./interface";
|
||||
import * as chalk from 'chalk';
|
||||
import * as bluebird from "bluebird";
|
||||
|
||||
export function fixAll() {
|
||||
const actionList: ActionInterface[] = [
|
||||
new FoldersFiles(),
|
||||
new EthForks(),
|
||||
new LogoSize(),
|
||||
new Whitelist(),
|
||||
new Validators(),
|
||||
new JsonAction(),
|
||||
// chains:
|
||||
new BinanceAction(),
|
||||
new CosmosAction(),
|
||||
new KavaAction(),
|
||||
new TerraAction(),
|
||||
new TezosAction(),
|
||||
new TronAction(),
|
||||
new WavesAction(),
|
||||
new Coinmarketcap()
|
||||
];
|
||||
|
||||
async function checkStepList(steps: CheckStepInterface[]): Promise<number> {
|
||||
var returnCode = 0;
|
||||
await bluebird.each(steps, async (step) => {
|
||||
try {
|
||||
//console.log(` Running check step '${step.getName()}'...`);
|
||||
const error = await step.check();
|
||||
if (error && error.length > 0) {
|
||||
console.log(`- ${chalk.red('X')} '${step.getName()}': '${error}'`);
|
||||
returnCode = 1;
|
||||
} else {
|
||||
console.log(`- ${chalk.green('✓')} '${step.getName()}' OK`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(`- ${chalk.red('X')} '${step.getName()}': Caught error: ${error.message}`);
|
||||
returnCode = 2;
|
||||
}
|
||||
});
|
||||
return returnCode;
|
||||
}
|
||||
|
||||
async function checkActionList(actions: ActionInterface[]): Promise<number> {
|
||||
console.log("Running checks...");
|
||||
var returnCode = 0;
|
||||
await bluebird.each(actions, async (action) => {
|
||||
try {
|
||||
if (action.getChecks) {
|
||||
const steps = action.getChecks();
|
||||
if (steps && steps.length > 0) {
|
||||
console.log(` Action '${action.getName()}' has ${steps.length} check steps`);
|
||||
const ret1 = await checkStepList(steps);
|
||||
if (ret1 != 0) {
|
||||
returnCode = ret1;
|
||||
} else {
|
||||
console.log(`- ${chalk.green('✓')} Action '${action.getName()}' OK, all ${steps.length} steps`);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(`- ${chalk.red('X')} '${action.getName()}' Caught error: ${error.message}`);
|
||||
returnCode = 3;
|
||||
}
|
||||
});
|
||||
console.log(`All checks done, returnCode ${returnCode}`);
|
||||
return returnCode;
|
||||
}
|
||||
|
||||
async function fixByList(actions: ActionInterface[]) {
|
||||
console.log("Running fixes...");
|
||||
eth_forks.fix();
|
||||
logo_size.fix();
|
||||
validators.fix();
|
||||
whitelists.fix();
|
||||
await bluebird.each(actions, async (action) => {
|
||||
try {
|
||||
if (action.fix) {
|
||||
console.log(`Fix '${action.getName()}':`);
|
||||
await action.fix();
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(`Caught error: ${error.message}`);
|
||||
}
|
||||
});
|
||||
console.log("All fixes done.");
|
||||
}
|
||||
|
||||
export function updateAll() {
|
||||
async function updateByList(actions: ActionInterface[]) {
|
||||
console.log("Running updates (using external data sources) ...");
|
||||
tezos.update();
|
||||
binance.update();
|
||||
coinmarketcap.update();
|
||||
await bluebird.each(actions, async (action) => {
|
||||
try {
|
||||
if (action.update) {
|
||||
console.log(`Update '${action.getName()}':`);
|
||||
await action.update();
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(`Caught error: ${error.message}`);
|
||||
}
|
||||
});
|
||||
console.log("All updates done.");
|
||||
}
|
||||
|
||||
export async function checkAll(): Promise<number> {
|
||||
return await checkActionList(actionList);
|
||||
}
|
||||
|
||||
export async function fixAll() {
|
||||
await fixByList(actionList);
|
||||
}
|
||||
|
||||
export async function updateAll() {
|
||||
await updateByList(actionList);
|
||||
}
|
||||
|
|
|
@ -1,6 +1,16 @@
|
|||
import { stakingChains } from "../common/blockchains";
|
||||
import { getChainValidatorsListPath } from "../common/repo-structure";
|
||||
import { formatSortJsonFile } from "../common/json";
|
||||
import {
|
||||
getChainValidatorsListPath,
|
||||
getChainValidatorAssetLogoPath,
|
||||
getChainValidatorsAssets
|
||||
} from "../common/repo-structure";
|
||||
import { isPathExistsSync } from "../common/filesystem";
|
||||
import { formatSortJsonFile, readJsonFile } from "../common/json";
|
||||
import { ActionInterface, CheckStepInterface } from "./interface";
|
||||
import { isValidJSON } from "../common/json";
|
||||
import { ValidatorModel } from "../common/validator-models";
|
||||
import { isLogoOK } from "../common/image";
|
||||
import * as bluebird from "bluebird";
|
||||
|
||||
function formatValidators() {
|
||||
stakingChains.forEach(chain => {
|
||||
|
@ -9,6 +19,84 @@ function formatValidators() {
|
|||
})
|
||||
}
|
||||
|
||||
export async function fix() {
|
||||
formatValidators();
|
||||
function getChainValidatorsList(chain: string): ValidatorModel[] {
|
||||
return readJsonFile(getChainValidatorsListPath(chain));
|
||||
}
|
||||
|
||||
function isValidatorHasAllKeys(val: ValidatorModel): boolean {
|
||||
return typeof val.id === "string"
|
||||
&& typeof val.name === "string"
|
||||
&& typeof val.description === "string"
|
||||
&& typeof val.website === "string";
|
||||
}
|
||||
|
||||
export class Validators implements ActionInterface {
|
||||
getName(): string { return "Validators"; }
|
||||
|
||||
getChecks(): CheckStepInterface[] {
|
||||
var steps = [
|
||||
{
|
||||
getName: () => { return "Make sure tests added for new staking chain"},
|
||||
check: async () => {
|
||||
if (stakingChains.length != 7) {
|
||||
return `Wrong number of staking chains ${stakingChains.length}`;
|
||||
}
|
||||
return "";
|
||||
}
|
||||
},
|
||||
];
|
||||
stakingChains.forEach(chain => {
|
||||
steps.push(
|
||||
{
|
||||
getName: () => { return `Make sure chain ${chain} has valid list file, has logo`},
|
||||
check: async () => {
|
||||
const validatorsListPath = getChainValidatorsListPath(chain);
|
||||
if (!isValidJSON(validatorsListPath)) {
|
||||
return `Not valid Json file at path ${validatorsListPath}`;
|
||||
}
|
||||
|
||||
var error: string = "";
|
||||
const validatorsList = getChainValidatorsList(chain);
|
||||
const chainValidatorsAssetsList = getChainValidatorsAssets(chain);
|
||||
await bluebird.each(validatorsList, async (val: ValidatorModel) => {
|
||||
if (!isValidatorHasAllKeys(val)) {
|
||||
error += `Some key and/or type missing for validator ${JSON.stringify(val)}\n`;
|
||||
}
|
||||
|
||||
const id = val.id;
|
||||
const path = getChainValidatorAssetLogoPath(chain, id);
|
||||
if (!isPathExistsSync(path)) {
|
||||
error += `Chain ${chain} asset ${id} logo must be present at path ${path}\n`;
|
||||
}
|
||||
const [isOk, logoMsg] = await isLogoOK(path);
|
||||
if (!isOk) {
|
||||
error += logoMsg + "\n";
|
||||
}
|
||||
|
||||
// Make sure validator has corresponding logo
|
||||
if (!(chainValidatorsAssetsList.indexOf(id) >= 0)) {
|
||||
error += `Expecting image asset for validator ${id} on chain ${chain}\n`;
|
||||
}
|
||||
});
|
||||
|
||||
// Make sure validator asset logo has corresponding info
|
||||
chainValidatorsAssetsList.forEach(valAssetLogoID => {
|
||||
if (validatorsList.filter(v => v.id === valAssetLogoID).length != 1) {
|
||||
error += `Expect validator logo ${valAssetLogoID} to have info\n`;
|
||||
}
|
||||
});
|
||||
|
||||
return error;
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
return steps;
|
||||
}
|
||||
|
||||
async fix(): Promise<void> {
|
||||
formatValidators();
|
||||
}
|
||||
|
||||
update = null;
|
||||
}
|
||||
|
|
37
script/action/waves.ts
Normal file
37
script/action/waves.ts
Normal file
|
@ -0,0 +1,37 @@
|
|||
import { Waves } from "../common/blockchains";
|
||||
import { getChainValidatorsAssets } from "../common/repo-structure";
|
||||
import { ActionInterface, CheckStepInterface } from "./interface";
|
||||
import { isLowerCase, isUpperCase } from "../common/types";
|
||||
|
||||
export function isWavesAddress(address: string): boolean {
|
||||
return address.length == 35 &&
|
||||
address.startsWith("3P") &&
|
||||
isLowerCase(address) == false &&
|
||||
isUpperCase(address) == false;
|
||||
}
|
||||
|
||||
export class WavesAction implements ActionInterface {
|
||||
getName(): string { return "Waves chain"; }
|
||||
|
||||
getChecks(): CheckStepInterface[] {
|
||||
return [
|
||||
{
|
||||
getName: () => { return "Waves validator assets must have correct format"},
|
||||
check: async () => {
|
||||
var error: string = "";
|
||||
const assets = getChainValidatorsAssets(Waves);
|
||||
assets.forEach(addr => {
|
||||
if (!(isWavesAddress(addr))) {
|
||||
error += `Address ${addr} should be a Waves address'\n`;
|
||||
}
|
||||
});
|
||||
return error;
|
||||
}
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
fix = null;
|
||||
|
||||
update = null;
|
||||
}
|
|
@ -1,39 +1,79 @@
|
|||
import { chainsWithBlacklist } from "../common/blockchains";
|
||||
import { getChainAssetsList, getChainWhitelistPath, getChainBlacklistPath } from "../common/repo-structure";
|
||||
import { readFileSync, writeFileSync } from "../common/filesystem";
|
||||
import { sortElements, makeUnique, arrayDiff } from "../common/types";
|
||||
import { arrayDiff, findCommonElementOrDuplicate } from "../common/types";
|
||||
import { ActionInterface, CheckStepInterface } from "./interface";
|
||||
import { formatSortJson, formatUniqueSortJson } from "../common/json";
|
||||
import * as bluebird from "bluebird";
|
||||
|
||||
function formatWhiteBlackList() {
|
||||
chainsWithBlacklist.forEach(async chain => {
|
||||
const assets = getChainAssetsList(chain);
|
||||
|
||||
const whitelistPath = getChainWhitelistPath(chain);
|
||||
const blacklistPath = getChainBlacklistPath(chain);
|
||||
async function checkUpdateWhiteBlackList(chain: string, checkOnly: boolean ): Promise<[boolean, string]> {
|
||||
let wrongMsg = "";
|
||||
const assets = getChainAssetsList(chain);
|
||||
|
||||
const currentWhitelist = JSON.parse(readFileSync(whitelistPath));
|
||||
const currentBlacklist = JSON.parse(readFileSync(blacklistPath));
|
||||
|
||||
let newBlackList = [];
|
||||
// Some chains required pulling lists from other sources
|
||||
// switch (chain) {
|
||||
// case Ethereum:
|
||||
// const nftList = await getOpenseaCollectionAddresses()
|
||||
// newBlackList = currentBlacklist.concat(nftList)
|
||||
// break;
|
||||
// default:
|
||||
// newBlackList = newBlackList.concat(currentBlacklist)
|
||||
// break;
|
||||
// }
|
||||
|
||||
const removedAssets = arrayDiff(currentWhitelist, assets);
|
||||
newBlackList = currentBlacklist.concat(removedAssets);
|
||||
|
||||
writeFileSync(whitelistPath, JSON.stringify(sortElements(assets), null, 4));
|
||||
writeFileSync(blacklistPath, JSON.stringify(makeUnique(sortElements(newBlackList)), null, 4));
|
||||
console.log(`Updated white and blacklists for chain ${chain}`);
|
||||
})
|
||||
const whitelistPath = getChainWhitelistPath(chain);
|
||||
const blacklistPath = getChainBlacklistPath(chain);
|
||||
|
||||
const currentWhitelistText = readFileSync(whitelistPath);
|
||||
const currentBlacklistText = readFileSync(blacklistPath);
|
||||
const currentWhitelist = JSON.parse(currentWhitelistText);
|
||||
const currentBlacklist = JSON.parse(currentBlacklistText);
|
||||
|
||||
const commonElementsOrDuplicated = findCommonElementOrDuplicate(currentWhitelist, currentBlacklist);
|
||||
if (commonElementsOrDuplicated && commonElementsOrDuplicated.length > 0) {
|
||||
wrongMsg += `Blacklist and whitelist for chain ${chain} should have no common elements or duplicates, found ${commonElementsOrDuplicated.length}, ${commonElementsOrDuplicated[0]}\n`;
|
||||
}
|
||||
const removedAssets = arrayDiff(currentWhitelist, assets);
|
||||
if (removedAssets && removedAssets.length > 0) {
|
||||
wrongMsg += `Whitelist for chain ${chain} contains non-exitent assets, found ${removedAssets.length}, ${removedAssets[0]}\n`;
|
||||
}
|
||||
|
||||
const niceWhite = formatSortJson(assets);
|
||||
if (niceWhite !== currentWhitelistText) {
|
||||
wrongMsg += `Whitelist for chain ${chain} has inconsistent content of formatting\n`;
|
||||
}
|
||||
const newBlackList = currentBlacklist.concat(removedAssets);
|
||||
const niceBlack = formatUniqueSortJson(newBlackList);
|
||||
if (niceBlack !== currentBlacklistText) {
|
||||
wrongMsg += `Blacklist for chain ${chain} has inconsistent content of formatting\n`;
|
||||
}
|
||||
|
||||
if (wrongMsg.length > 0) {
|
||||
if (!checkOnly) {
|
||||
// update
|
||||
writeFileSync(whitelistPath, niceWhite);
|
||||
writeFileSync(blacklistPath, niceBlack);
|
||||
console.log(`Updated white and blacklists for chain ${chain}`);
|
||||
}
|
||||
}
|
||||
return [(wrongMsg.length == 0), wrongMsg];
|
||||
}
|
||||
|
||||
export async function fix() {
|
||||
formatWhiteBlackList();
|
||||
export class Whitelist implements ActionInterface {
|
||||
getName(): string { return "Whitelists"; }
|
||||
|
||||
getChecks(): CheckStepInterface[] {
|
||||
const steps: CheckStepInterface[] = [];
|
||||
chainsWithBlacklist.forEach(chain => {
|
||||
steps.push(
|
||||
{
|
||||
getName: () => { return `Whitelist and blacklist for ${chain} should be consistent with assets`},
|
||||
check: async () => {
|
||||
const [isOK, msg] = await checkUpdateWhiteBlackList(chain, true);
|
||||
if (!isOK) {
|
||||
return msg;
|
||||
}
|
||||
return "";
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
return steps;
|
||||
}
|
||||
|
||||
|
||||
async fix(): Promise<void> {
|
||||
await bluebird.each(chainsWithBlacklist, async (chain) => await checkUpdateWhiteBlackList(chain, false));
|
||||
}
|
||||
|
||||
update = null;
|
||||
}
|
||||
|
|
45
script/common/asset-info.ts
Normal file
45
script/common/asset-info.ts
Normal file
|
@ -0,0 +1,45 @@
|
|||
import { getChainAssetInfoPath } from "./repo-structure";
|
||||
import { readFileSync, isPathExistsSync } from "./filesystem";
|
||||
import { arrayDiff } from "./types";
|
||||
import { isValidJSON } from "../common/json";
|
||||
|
||||
const requiredKeys = ["explorer", "name", "website", "short_description"];
|
||||
|
||||
function isAssetInfoHasAllKeys(path: string): [boolean, string] {
|
||||
const info = JSON.parse(readFileSync(path));
|
||||
const infoKeys = Object.keys(info);
|
||||
|
||||
const hasAllKeys = requiredKeys.every(k => info.hasOwnProperty(k));
|
||||
|
||||
if (!hasAllKeys) {
|
||||
return [false, `Info at path '${path}' missing next key(s): ${arrayDiff(requiredKeys, infoKeys)}`];
|
||||
}
|
||||
|
||||
const isKeysCorrentType =
|
||||
typeof info.explorer === "string" && info.explorer != ""
|
||||
&& typeof info.name === "string" && info.name != ""
|
||||
&& typeof info.website === "string"
|
||||
&& typeof info.short_description === "string";
|
||||
|
||||
return [isKeysCorrentType, `Check keys '${requiredKeys}' vs. '${infoKeys}'`];
|
||||
}
|
||||
|
||||
export function isAssetInfoOK(chain: string, address: string): [boolean, string] {
|
||||
const assetInfoPath = getChainAssetInfoPath(chain, address);
|
||||
if (!isPathExistsSync(assetInfoPath)) {
|
||||
return [true, `Info file doesn't exist, no need to check`]
|
||||
}
|
||||
|
||||
if (!isValidJSON(assetInfoPath)) {
|
||||
console.log(`JSON at path: '${assetInfoPath}' is invalid`);
|
||||
return [false, `JSON at path: '${assetInfoPath}' is invalid`];
|
||||
}
|
||||
|
||||
const [hasAllKeys, msg] = isAssetInfoHasAllKeys(assetInfoPath);
|
||||
if (!hasAllKeys) {
|
||||
console.log(msg);
|
||||
return [false, msg];
|
||||
}
|
||||
|
||||
return [true, ''];
|
||||
}
|
|
@ -11,7 +11,7 @@ export const readDirSync = (path: string): string[] => fs.readdirSync(path);
|
|||
export const isPathExistsSync = (path: string): boolean => fs.existsSync(path);
|
||||
export const getFileSizeInKilobyte = (path: string): number => fs.statSync(path).size / 1000;
|
||||
|
||||
export function execRename(command: string, cwd: string) {
|
||||
function execRename(command: string, cwd: string) {
|
||||
console.log(`Running command ${command}`);
|
||||
execSync(command, {encoding: "utf-8", cwd: cwd});
|
||||
}
|
||||
|
@ -24,3 +24,20 @@ export function gitMove(path: string, oldName: string, newName: string) {
|
|||
console.log(`Renaming file or folder at path ${path}: ${oldName} => ${newName}`);
|
||||
execRename(gitMoveCommand(oldName, newName), path);
|
||||
}
|
||||
|
||||
export function findFiles(base: string, ext: string, files: string[] = [], result: string[] = []): string[] {
|
||||
files = fs.readdirSync(base) || files;
|
||||
result = result || result;
|
||||
|
||||
files.forEach(file => {
|
||||
var newbase = path.join(base, file);
|
||||
if (fs.statSync(newbase).isDirectory()) {
|
||||
result = findFiles(newbase, ext, fs.readdirSync(newbase), result);
|
||||
} else {
|
||||
if (file.substr(-1*(ext.length+1)) == '.' + ext) {
|
||||
result.push(newbase);
|
||||
}
|
||||
}
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -8,8 +8,8 @@ import {
|
|||
import * as chalk from 'chalk';
|
||||
import * as config from "../common/config";
|
||||
|
||||
//export const minLogoWidth = 64;
|
||||
//export const minLogoHeight = 64;
|
||||
export const minLogoWidth = config.getConfig("image_min_logo_width", 64);
|
||||
export const minLogoHeight = config.getConfig("image_min_logo_height", 64);
|
||||
export const maxLogoWidth = config.getConfig("image_max_logo_width", 512);
|
||||
export const maxLogoHeight = config.getConfig("image_max_logo_height", 512);
|
||||
export const maxLogoSizeInKilobyte = config.getConfig("image_logo_size_kb", 100);
|
||||
|
@ -18,6 +18,11 @@ export function isDimensionTooLarge(width: number, height: number): boolean {
|
|||
return (width > maxLogoWidth) || (height > maxLogoHeight);
|
||||
}
|
||||
|
||||
export function isDimensionOK(width: number, height: number): boolean {
|
||||
return (width <= maxLogoWidth) && (height <= maxLogoHeight) &&
|
||||
(width >= minLogoWidth) && (height >= minLogoHeight);
|
||||
}
|
||||
|
||||
export function calculateTargetSize(srcWidth: number, srcHeight: number, targetWidth: number, targetHeight: number): {width: number, height: number} {
|
||||
if (srcWidth == 0 || srcHeight == 0) {
|
||||
return {width: targetWidth, height: targetHeight};
|
||||
|
@ -29,14 +34,43 @@ export function calculateTargetSize(srcWidth: number, srcHeight: number, targetW
|
|||
};
|
||||
}
|
||||
|
||||
// check logo dimensions (pixel) and size (kilobytes)
|
||||
export async function isLogoOK(path: string): Promise<[boolean, string]> {
|
||||
var [isOK, msg] = await isLogoDimensionOK(path);
|
||||
if (!isOK) {
|
||||
return [false, msg];
|
||||
}
|
||||
[isOK, msg] = await isLogoSizeOK(path);
|
||||
if (!isOK) {
|
||||
return [false, msg];
|
||||
}
|
||||
return [true, ""];
|
||||
}
|
||||
|
||||
const getImageDimensions = (path: string) => image_size.imageSize(path);
|
||||
|
||||
async function isLogoDimensionOK(path: string): Promise<[boolean, string]> {
|
||||
const { width, height } = getImageDimensions(path)
|
||||
if (isDimensionOK(width, height)) {
|
||||
return [true, ""];
|
||||
}
|
||||
return [false, `Image at path ${path} must have dimensions: min: ${minLogoWidth}x${minLogoHeight} and max: ${maxLogoWidth}x${maxLogoHeight} instead ${width}x${height}`];
|
||||
}
|
||||
|
||||
async function compressTinyPNG(path: string) {
|
||||
console.log(`Compressing image via tinypng at path ${path}`);
|
||||
const source = await tinify.fromFile(path);
|
||||
await source.toFile(path);
|
||||
}
|
||||
|
||||
async function isLogoSizeOK(path: string): Promise<[boolean, string]> {
|
||||
const sizeKilobyte = getFileSizeInKilobyte(path);
|
||||
if (sizeKilobyte > maxLogoSizeInKilobyte) {
|
||||
return [false, `Logo ${path} is too large, ${sizeKilobyte} kB instead of ${maxLogoSizeInKilobyte}`];
|
||||
}
|
||||
return [true, ''];
|
||||
}
|
||||
|
||||
// return true if image updated
|
||||
export async function resizeIfTooLarge(path: string): Promise<boolean> {
|
||||
let updated: boolean = false;
|
||||
|
|
|
@ -2,22 +2,44 @@ import {
|
|||
readFileSync,
|
||||
writeFileSync
|
||||
} from "./filesystem";
|
||||
import { sortElements } from "./types";
|
||||
import { sortElements, makeUnique } from "./types";
|
||||
|
||||
export function isValidJSON(path: string): boolean {
|
||||
try {
|
||||
let rawdata = readFileSync(path);
|
||||
JSON.parse(rawdata);
|
||||
return true;
|
||||
} catch {
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export function formatJson(content: any) {
|
||||
return JSON.stringify(content, null, 4);
|
||||
}
|
||||
|
||||
export function formatSortJson(content: any) {
|
||||
return JSON.stringify(sortElements(content), null, 4);
|
||||
}
|
||||
|
||||
export function formatUniqueSortJson(content: any) {
|
||||
return JSON.stringify(makeUnique(sortElements(content)), null, 4);
|
||||
}
|
||||
|
||||
export function formatJsonFile(filename: string, silent: boolean = false) {
|
||||
const jsonContent = JSON.parse(readFileSync(filename));
|
||||
writeFileSync(filename, JSON.stringify(jsonContent, null, 4));
|
||||
if (!silent) {
|
||||
console.log(`Formatted json file ${filename}`);
|
||||
}
|
||||
writeFileSync(filename, formatJson(JSON.parse(readFileSync(filename))));
|
||||
console.log(`Formatted json file ${filename}`);
|
||||
}
|
||||
|
||||
export function formatSortJsonFile(filename: string) {
|
||||
const jsonContent = JSON.parse(readFileSync(filename));
|
||||
writeFileSync(filename, JSON.stringify(sortElements(jsonContent), null, 4));
|
||||
writeFileSync(filename, formatSortJson(JSON.parse(readFileSync(filename))));
|
||||
console.log(`Formatted json file ${filename}`);
|
||||
}
|
||||
|
||||
export function readJsonFile(path: string): any {
|
||||
return JSON.parse(readFileSync(path));
|
||||
}
|
||||
|
||||
export function writeJsonFile(path: string, data: any) {
|
||||
writeFileSync(path, JSON.stringify(data, null, 4));
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ import {
|
|||
isPathExistsSync,
|
||||
readDirSync
|
||||
} from "./filesystem";
|
||||
import * as config from "./config";
|
||||
|
||||
export const logoName = `logo`;
|
||||
export const infoName = `info`;
|
||||
|
@ -15,6 +16,14 @@ const whiteList = `whitelist.${jsonExtension}`;
|
|||
const blackList = `blacklist.${jsonExtension}`;
|
||||
export const validatorsList = `${listName}.${jsonExtension}`
|
||||
|
||||
export const assetFolderAllowedFiles = [logoFullName, infoFullName];
|
||||
export const chainFolderAllowedFiles = [
|
||||
"assets",
|
||||
whiteList,
|
||||
blackList,
|
||||
"validators",
|
||||
infoName
|
||||
]
|
||||
export const chainsPath: string = path.join(process.cwd(), '/blockchains');
|
||||
export const getChainPath = (chain: string): string => `${chainsPath}/${chain}`;
|
||||
export const getChainLogoPath = (chain: string): string => `${getChainPath(chain)}/info/${logoFullName}`;
|
||||
|
@ -24,13 +33,19 @@ export const getChainAssetLogoPath = (chain: string, asset: string): string => `
|
|||
export const getChainAssetInfoPath = (chain: string, asset: string): string => `${getChainAssetPath(chain, asset)}/${infoFullName}`;
|
||||
export const getChainWhitelistPath = (chain: string): string => `${getChainPath(chain)}/${whiteList}`;
|
||||
export const getChainBlacklistPath = (chain: string): string => `${getChainPath(chain)}/${blackList}`;
|
||||
export const pricingFolderPath = path.join(process.cwd(), '/pricing');
|
||||
|
||||
export const getChainValidatorsPath = (chain: string): string => `${getChainPath(chain)}/validators`;
|
||||
export const getChainValidatorsListPath = (chain: string): string => `${getChainValidatorsPath(chain)}/list.${jsonExtension}`;
|
||||
export const getChainValidatorsListPath = (chain: string): string => `${getChainValidatorsPath(chain)}/${validatorsList}`;
|
||||
export const getChainValidatorsAssetsPath = (chain: string): string => `${getChainValidatorsPath(chain)}/assets`
|
||||
export const getChainValidatorAssetLogoPath = (chain: string, asset: string): string => `${getChainValidatorsAssetsPath(chain)}/${asset}/${logoFullName}`
|
||||
|
||||
export const isChainAssetInfoExistSync = (chain: string, address: string) => isPathExistsSync(getChainAssetInfoPath(chain, address));
|
||||
|
||||
export const getChainFolderFilesList = (chain: string) => readDirSync(getChainPath(chain))
|
||||
export const getChainAssetsList = (chain: string): string[] => readDirSync(getChainAssetsPath(chain));
|
||||
export const getChainAssetFilesList = (chain: string, address: string) => readDirSync(getChainAssetPath(chain, address));
|
||||
export const getChainValidatorsAssets = (chain: string): string[] => readDirSync(getChainValidatorsAssetsPath(chain));
|
||||
|
||||
const defaultRootDirAllowedFiles = [".github", "blockchains", "dapps", "media", "script", "test", ".gitignore", "LICENSE", "package-lock.json", "package.json", "README.md", ".git", "Gemfile", "Gemfile.lock"];
|
||||
export const rootDirAllowedFiles = config.getConfig("folders_rootdir_allowed_files", defaultRootDirAllowedFiles);
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
export const isLowerCase = (str: string): boolean => str.toLowerCase() === str;
|
||||
export const isUpperCase = (str: string): boolean => str.toUpperCase() === str;
|
||||
|
||||
export const mapList = arr => {
|
||||
return arr.reduce((acm, val) => {
|
||||
acm[val] = "";
|
||||
|
@ -27,3 +30,22 @@ export function arrayDiff(a: string[], b: string[]): string[] {
|
|||
const mappedB = mapList(b);
|
||||
return a.filter(e => !mappedB.hasOwnProperty(e));
|
||||
}
|
||||
|
||||
export function findDuplicate(list: string[]): string {
|
||||
let m = new Map<string, number>();
|
||||
let duplicate: string = null;
|
||||
list.forEach(val => {
|
||||
if (m.has(val)) {
|
||||
duplicate = val;
|
||||
} else {
|
||||
m.set(val, 0);
|
||||
}
|
||||
});
|
||||
return duplicate;
|
||||
}
|
||||
|
||||
// Check that two lists have no common elements, and no duplicates in either.
|
||||
// Do a single check: checking for duplicates in the concatenated list.
|
||||
export function findCommonElementOrDuplicate(list1: string[], list2: string[]) {
|
||||
return findDuplicate(list1.concat(list2));
|
||||
}
|
||||
|
|
26
script/common/validator-models.ts
Normal file
26
script/common/validator-models.ts
Normal file
|
@ -0,0 +1,26 @@
|
|||
export interface ValidatorModel {
|
||||
id: string,
|
||||
name: string,
|
||||
description: string,
|
||||
website: string,
|
||||
staking: Staking
|
||||
payout: Payout
|
||||
status: ValidatorStatus
|
||||
}
|
||||
|
||||
interface Staking {
|
||||
freeSpace: number,
|
||||
minDelegation: number
|
||||
openForDelegation: boolean
|
||||
}
|
||||
|
||||
interface Payout {
|
||||
commission: number // in %
|
||||
payoutDelay: number // in cycles
|
||||
payoutPeriod: number
|
||||
}
|
||||
|
||||
interface ValidatorStatus {
|
||||
disabled: boolean;
|
||||
note: string;
|
||||
}
|
|
@ -1,6 +1,11 @@
|
|||
{
|
||||
"image_max_logo_width": 512,
|
||||
"image_max_logo_height": 512,
|
||||
"image_min_logo_width": 64,
|
||||
"image_min_logo_height": 64,
|
||||
"image_logo_size_kb": 100,
|
||||
"binance_assets_url": "https://explorer.binance.org/api/v1/assets?page=1&rows=1000"
|
||||
"folders_rootdir_allowed_files": [".github", "blockchains", "dapps", "media", "node_modules", "script-old", "script", "test", ".gitignore", "azure-pipelines.yml", "jest.config.js", "LICENSE", "package-lock.json", "package.json", "README.md", ".git", "pricing", "Dangerfile", "Gemfile", "Gemfile.lock"],
|
||||
"binance_url_tokens2": "https://dex-atlantic.binance.org/api/v1/tokens?limit=1000",
|
||||
"binance_url_tokens8": "https://dex-atlantic.binance.org/api/v1/mini/tokens?limit=1000",
|
||||
"binance_url_token_assets": "https://explorer.binance.org/api/v1/assets?page=1&rows=1000"
|
||||
}
|
||||
|
|
13
script/main/check.ts
Normal file
13
script/main/check.ts
Normal file
|
@ -0,0 +1,13 @@
|
|||
import { checkAll } from "../action/update-all";
|
||||
|
||||
export async function main() {
|
||||
try {
|
||||
const returnCode = await checkAll();
|
||||
process.exit(returnCode);
|
||||
} catch(err) {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
|
@ -1,8 +1,8 @@
|
|||
import { fixAll } from "../action/update-all";
|
||||
|
||||
export function main() {
|
||||
export async function main() {
|
||||
try {
|
||||
fixAll();
|
||||
await fixAll();
|
||||
} catch(err) {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { updateAll } from "../action/update-all";
|
||||
|
||||
export function main() {
|
||||
export async function main() {
|
||||
try {
|
||||
updateAll();
|
||||
await updateAll();
|
||||
} catch(err) {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
|
|
|
@ -1,355 +0,0 @@
|
|||
import * as fs from "fs"
|
||||
import * as path from "path"
|
||||
import { ValidatorModel } from "./models";
|
||||
const axios = require('axios')
|
||||
const Web3 = require('web3')
|
||||
const web3 = new Web3('ws://localhost:8546');
|
||||
import { CoinType } from "@trustwallet/wallet-core";
|
||||
const sizeOf = require("image-size");
|
||||
const { execSync } = require('child_process');
|
||||
|
||||
export const getChainName = (id: CoinType): string => CoinType.id(id) // 60 => ethereum
|
||||
export const Binance = getChainName(CoinType.binance)
|
||||
export const Classic = getChainName(CoinType.classic)
|
||||
export const Cosmos = getChainName(CoinType.cosmos)
|
||||
export const Ethereum = getChainName(CoinType.ethereum)
|
||||
export const GoChain = getChainName(CoinType.gochain)
|
||||
export const IoTeX = getChainName(CoinType.iotex)
|
||||
export const POA = getChainName(CoinType.poa)
|
||||
export const Tezos = getChainName(CoinType.tezos)
|
||||
export const ThunderCore = getChainName(CoinType.thundertoken)
|
||||
export const Terra = getChainName(CoinType.terra)
|
||||
export const TomoChain = getChainName(CoinType.tomochain)
|
||||
export const Tron = getChainName(CoinType.tron)
|
||||
export const Kava = "kava" // TODO add to kava to tw types
|
||||
export const Wanchain = getChainName(CoinType.wanchain)
|
||||
export const Waves = getChainName(CoinType.waves)
|
||||
export const Solana = "solana"
|
||||
|
||||
export const ethSidechains = [Ethereum, Classic, POA, TomoChain, GoChain, Wanchain, ThunderCore]
|
||||
export const stakingChains = [Tezos, Cosmos, IoTeX, Tron, Waves, Kava, Terra]
|
||||
|
||||
export const logoName = `logo`
|
||||
export const infoName = `info`
|
||||
export const listName = `list`
|
||||
|
||||
export const logoExtension = "png"
|
||||
export const jsonExtension = "json"
|
||||
|
||||
const whiteList = `whitelist.${jsonExtension}`
|
||||
const blackList = `blacklist.${jsonExtension}`
|
||||
|
||||
const validatorsList = `${listName}.${jsonExtension}`
|
||||
|
||||
export const logo = `${logoName}.${logoExtension}`
|
||||
export const info = `${infoName}.${jsonExtension}`
|
||||
|
||||
|
||||
export const root = './'
|
||||
export const chainsFolderPath = path.join(process.cwd(), '/blockchains')
|
||||
export const pricingFolderPath = path.join(process.cwd(), '/pricing')
|
||||
export const getChainLogoPath = (chain: string): string => `${chainsFolderPath}/${chain}/info/${logo}`
|
||||
export const getChainInfoPath = (chain: string): string => `${chainsFolderPath}/${chain}/info/${info}`
|
||||
export const getChainAssetInfoPath = (chain: string, address: string): string => `${chainsFolderPath}/${chain}/assets/${address}/${info}`
|
||||
export const getChainAssetsPath = (chain: string): string => `${chainsFolderPath}/${chain}/assets`
|
||||
export const getChainPath = (chain: string): string => `${chainsFolderPath}/${chain}`
|
||||
|
||||
export const minLogoWidth = 64
|
||||
export const minLogoHeight = 64
|
||||
export const maxLogoWidth = 512
|
||||
export const maxLogoHeight = 512
|
||||
|
||||
export const maxAssetLogoSizeInKilobyte = 100
|
||||
|
||||
export const getChainAssetPath = (chain: string, address: string) => `${getChainAssetsPath(chain)}/${address}`
|
||||
export const getAllChainsList = (): string[] => readDirSync(chainsFolderPath)
|
||||
export const getChainAssetLogoPath = (chain: string, address: string) => `${getChainAssetsPath(chain)}/${address}/${logo}`
|
||||
export const getChainAssetFilesList = (chain: string, address: string) => readDirSync(getChainAssetPath(chain, address))
|
||||
export const getChainFolderFilesList = (chain: string) => readDirSync(getChainPath(chain))
|
||||
export const getChainAssetsList = (chain: string): string[] => readDirSync(getChainAssetsPath(chain))
|
||||
export const getChainValidatorsPath = (chain: string): string => `${chainsFolderPath}/${chain}/validators`
|
||||
export const getChainValidatorsAssets = (chain: string): string[] => readDirSync(getChainValidatorsAssetsPath(chain))
|
||||
export const getChainValidatorsListPath = (chain: string): string => `${(getChainValidatorsPath(chain))}/list.${jsonExtension}`
|
||||
export const getChainValidatorsList = (chain: string): ValidatorModel[] => JSON.parse(readFileSync(`${(getChainValidatorsPath(chain))}/${validatorsList}`))
|
||||
export const getChainValidatorsAssetsPath = (chain: string): string => `${getChainValidatorsPath(chain)}/assets`
|
||||
export const getChainValidatorAssetLogoPath = (chain: string, asset: string): string => `${getChainValidatorsAssetsPath(chain)}/${asset}/${logo}`
|
||||
export const getChainWhitelistPath = (chain: string): string => `${chainsFolderPath}/${chain}/${whiteList}`
|
||||
export const getChainBlacklistPath = (chain: string): string => `${chainsFolderPath}/${chain}/${blackList}`
|
||||
export const getChainWhitelist = (chain: string): string[] => {
|
||||
if (isChainWhitelistExistSync(chain)) {
|
||||
return JSON.parse(readFileSync(getChainWhitelistPath(chain)))
|
||||
}
|
||||
return []
|
||||
}
|
||||
export const getChainBlacklist = (chain: string): string[] => {
|
||||
if (isChainBlacklistExistSync(chain)) {
|
||||
return JSON.parse(readFileSync(getChainBlacklistPath(chain)))
|
||||
}
|
||||
return []
|
||||
}
|
||||
export const getRootDirFilesList = (): string[] => readDirSync(root)
|
||||
|
||||
export const readDirSync = (path: string): string[] => fs.readdirSync(path)
|
||||
export const makeDirSync = (path: string) => fs.mkdirSync(path)
|
||||
export const isPathExistsSync = (path: string): boolean => fs.existsSync(path)
|
||||
export const isDirContainLogo = (path: string): boolean => fs.existsSync(`${path}/${logo}`)
|
||||
export const isChainWhitelistExistSync = (chain: string): boolean => isPathExistsSync(getChainWhitelistPath(chain))
|
||||
export const isChainBlacklistExistSync = (chain: string): boolean => isPathExistsSync(getChainBlacklistPath(chain))
|
||||
export const isChainInfoExistSync = (chain: string): boolean => isPathExistsSync(getChainInfoPath(chain))
|
||||
export const isChainAssetInfoExistSync = (chain: string, address: string) => isPathExistsSync(getChainAssetInfoPath(chain, address))
|
||||
export const readFileSync = (path: string) => fs.readFileSync(path, 'utf8')
|
||||
export const writeFileSync = (path: string, str: string) => fs.writeFileSync(path, str)
|
||||
export const writeJSONToPath = (path: string, data: any) => fs.writeFileSync(path, JSON.stringify(data, null, 4))
|
||||
|
||||
export const isLowerCase = (str: string): boolean => str.toLowerCase() === str
|
||||
export const isUpperCase = (str: string): boolean => str.toUpperCase() === str
|
||||
export const isChecksum = (address: string): boolean => web3.utils.checkAddressChecksum(address)
|
||||
export const toChecksum = (address: string): string => web3.utils.toChecksumAddress(address)
|
||||
|
||||
export const getFileName = (name: string): string => path.basename(name, path.extname(name))
|
||||
export const getFileExt = (name: string): string => name.slice((Math.max(0, name.lastIndexOf(".")) || Infinity) + 1)
|
||||
|
||||
export async function getBinanceTokenSymbols() {
|
||||
const bep2assets = await axios.get(`https://dex-atlantic.binance.org/api/v1/tokens?limit=1000`);
|
||||
const bep8assets = await axios.get(`https://dex-atlantic.binance.org/api/v1/mini/tokens?limit=1000`);
|
||||
return bep2assets.data.map(({ symbol }) => symbol)
|
||||
.concat(bep8assets.data.map(({ symbol }) => symbol));
|
||||
}
|
||||
|
||||
export const isTRC10 = (str: string): boolean => (/^\d+$/.test(str))
|
||||
export const isTRC20 = (address: string) => {
|
||||
return address.length == 34 &&
|
||||
address.startsWith("T") &&
|
||||
isLowerCase(address) == false &&
|
||||
isUpperCase(address) == false
|
||||
}
|
||||
|
||||
export const isEthereumAddress = (address: string): boolean => {
|
||||
return web3.utils.isAddress(address)
|
||||
}
|
||||
|
||||
export const isWavesAddress = (address: string) => {
|
||||
return address.length == 35 &&
|
||||
address.startsWith("3P") &&
|
||||
isLowerCase(address) == false &&
|
||||
isUpperCase(address) == false
|
||||
}
|
||||
|
||||
export const isSolanaAddress = (address: string) => {
|
||||
// return address.length == 44
|
||||
return true
|
||||
}
|
||||
|
||||
export const isPathDir = (path: string): boolean => {
|
||||
try {
|
||||
return fs.lstatSync(path).isDirectory()
|
||||
} catch (e) {
|
||||
console.log(`Path: ${path} is not a directory with error: ${e.message}`)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
export const isPathDirEmpthy = (path: string): boolean => {
|
||||
try {
|
||||
if (isPathDir(path)) {
|
||||
return fs.readdirSync(path).length == 0
|
||||
} else {
|
||||
false
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(`Error isPathDirEmpthy`, error)
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
export const removeDir = (path: string) => {
|
||||
fs.rmdirSync(path, {recursive: true})
|
||||
}
|
||||
|
||||
export const makeDirIfDoestExist = async (dirPath: string, dirName: string) => {
|
||||
const path = `${dirPath}/${dirName}`
|
||||
await fs.mkdir(path, {recursive: true}, (err) => {
|
||||
if (err) {
|
||||
console.error(`Error creating dir at path ${path} with result ${err}`)
|
||||
} else {
|
||||
console.log(`Created direcotry at ${path}`)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export const sortDesc = arr => arr.sort((a, b) => a - b)
|
||||
export const getUnique = arr => Array.from(new Set(arr))
|
||||
export const mapList = arr => {
|
||||
return arr.reduce((acm, val) => {
|
||||
acm[val] = ""
|
||||
return acm
|
||||
}, {})
|
||||
}
|
||||
|
||||
export function findDuplicate(list: string[]): string {
|
||||
let m = new Map<string, number>()
|
||||
let duplicate: string = null
|
||||
list.forEach(val => {
|
||||
if (m.has(val)) {
|
||||
duplicate = val
|
||||
} else {
|
||||
m.set(val, 0)
|
||||
}
|
||||
})
|
||||
return duplicate
|
||||
}
|
||||
|
||||
// Check that two lists have no common elements, and no duplicates in either.
|
||||
// Do a single check: checking for duplicates in the concatenated list.
|
||||
export function findCommonElementOrDuplicate(list1: string[], list2: string[]) {
|
||||
return findDuplicate(list1.concat(list2))
|
||||
}
|
||||
|
||||
export const getImageDimensions = (path: string) => sizeOf(path)
|
||||
|
||||
export function isLogoDimensionOK(path: string): [boolean, string] {
|
||||
const { width, height } = getImageDimensions(path)
|
||||
if (((width >= minLogoWidth && width <= maxLogoWidth) && (height >= minLogoHeight && height <= maxLogoHeight))) {
|
||||
return [true, '']
|
||||
} else {
|
||||
return [false, `Image at path ${path} must have dimensions: min:${minLogoWidth}x${minLogoHeight} and max:${maxLogoWidth}x${maxLogoHeight} insted ${width}x${height}`]
|
||||
}
|
||||
}
|
||||
|
||||
export function isLogoSizeOK(path: string): [boolean, string, number] {
|
||||
const sizeInKylobyte = getFileSizeInKilobyte(path)
|
||||
|
||||
if (sizeInKylobyte <= maxAssetLogoSizeInKilobyte) {
|
||||
return [true, ``, sizeInKylobyte]
|
||||
}
|
||||
return [false, `Logo at path ${path} with size ${sizeInKylobyte} exceeded max allowed size ${maxAssetLogoSizeInKilobyte} kB`, sizeInKylobyte]
|
||||
}
|
||||
|
||||
export const calculateAspectRatioFit = (srcWidth: number, srcHeight: number, maxWidth: number, maxHeight: number) => {
|
||||
const ratio = Math.min(maxWidth / srcWidth, maxHeight / srcHeight)
|
||||
return { width: Math.round(srcWidth * ratio), height: Math.round(srcHeight * ratio) }
|
||||
}
|
||||
|
||||
export const findFiles = (base: string, ext: string, files: string[] = [], result: string[] = []) => {
|
||||
files = fs.readdirSync(base) || files
|
||||
result = result || result
|
||||
|
||||
files.forEach(
|
||||
function (file) {
|
||||
var newbase = path.join(base, file)
|
||||
if (fs.statSync(newbase).isDirectory()) {
|
||||
result = findFiles(newbase, ext, fs.readdirSync(newbase), result)
|
||||
} else {
|
||||
if (file.substr(-1*(ext.length+1)) == '.' + ext) {
|
||||
result.push(newbase)
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
return result
|
||||
}
|
||||
|
||||
export const isValidJSON = (path: string) => {
|
||||
let rawdata = fs.readFileSync(path, 'utf8')
|
||||
try {
|
||||
JSON.parse(rawdata)
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
export function getMoveCommandFromTo(oldName: string, newName: string): string {
|
||||
return `git mv ${oldName} ${newName}-temp && git mv ${newName}-temp ${newName}`
|
||||
}
|
||||
|
||||
export function execRename(path: string, command: string) {
|
||||
console.log(`Running command ${command}`)
|
||||
execSync(command, {encoding: "utf-8", cwd: path})
|
||||
}
|
||||
|
||||
export const isValidatorHasAllKeys = (val: ValidatorModel): boolean => {
|
||||
return typeof val.id === "string"
|
||||
&& typeof val.name === "string"
|
||||
&& typeof val.description === "string"
|
||||
&& typeof val.website === "string"
|
||||
}
|
||||
|
||||
export function isAssetInfoOK(chain: string, address: string): [boolean, string] {
|
||||
if (!isChainAssetInfoExistSync(chain, address)) {
|
||||
return [true, `Info file doest exist, non eed to check`]
|
||||
}
|
||||
|
||||
const assetInfoPath = getChainAssetInfoPath(chain, address)
|
||||
const isInfoJSONValid = isValidJSON(assetInfoPath)
|
||||
if (!isInfoJSONValid) {
|
||||
console.log(`JSON at path: ${assetInfoPath} is invalid`)
|
||||
return [false, `JSON at path: ${assetInfoPath} is invalid`]
|
||||
}
|
||||
|
||||
const [hasAllKeys, msg] = isAssetInfoHasAllKeys(assetInfoPath)
|
||||
if (!hasAllKeys) {
|
||||
console.log({msg})
|
||||
return [false, msg]
|
||||
}
|
||||
|
||||
return [true, ``]
|
||||
}
|
||||
|
||||
export function isAssetInfoHasAllKeys(path: string): [boolean, string] {
|
||||
const info = JSON.parse(readFileSync(path))
|
||||
const infoKeys = Object.keys(info)
|
||||
const requiredKeys = ["explorer", "name", "website", "short_description"] // Find better solution getting AssetInfo interface keys
|
||||
|
||||
const hasAllKeys = requiredKeys.every(k => info.hasOwnProperty(k))
|
||||
|
||||
if (!hasAllKeys) {
|
||||
return [false, `Info at path ${path} missing next key(s): ${getArraysDiff(requiredKeys, infoKeys)}`]
|
||||
}
|
||||
|
||||
const isKeysCorrentType = typeof info.explorer === "string" && info.explorer != ""
|
||||
&& typeof info.name === "string" && info.name != ""
|
||||
&& typeof info.website === "string"
|
||||
&& typeof info.short_description === "string"
|
||||
|
||||
return [isKeysCorrentType, `Check keys ${requiredKeys} vs ${infoKeys}`]
|
||||
}
|
||||
|
||||
export const getArraysDiff = (arr1 :string[], arr2: string[]): string[] => arr1.filter(d => !arr2.includes(d))
|
||||
export const getFileSizeInKilobyte = (path: string): number => fs.statSync(path).size / 1000
|
||||
|
||||
export const rootDirAllowedFiles = [
|
||||
".github",
|
||||
"blockchains",
|
||||
"dapps",
|
||||
"media",
|
||||
"node_modules",
|
||||
"script-old",
|
||||
"script",
|
||||
"src",
|
||||
".gitignore",
|
||||
"azure-pipelines.yml",
|
||||
"jest.config.js",
|
||||
"LICENSE",
|
||||
"package-lock.json",
|
||||
"package.json",
|
||||
"README.md",
|
||||
".git",
|
||||
"pricing",
|
||||
"Dangerfile",
|
||||
"Gemfile",
|
||||
"Gemfile.lock"
|
||||
]
|
||||
|
||||
export const assetFolderAllowedFiles = [
|
||||
logo,
|
||||
info
|
||||
]
|
||||
|
||||
export const chainFolderAllowedFiles = [
|
||||
"assets",
|
||||
"whitelist.json",
|
||||
"blacklist.json",
|
||||
"validators",
|
||||
infoName
|
||||
]
|
|
@ -1,538 +0,0 @@
|
|||
const eztz = require('eztz-lib')
|
||||
|
||||
import {
|
||||
Binance, Cosmos, Tezos, Tron, IoTeX, Waves, Kava, Terra,
|
||||
assetFolderAllowedFiles,
|
||||
chainFolderAllowedFiles,
|
||||
chainsFolderPath,
|
||||
ethSidechains,
|
||||
findFiles,
|
||||
getBinanceTokenSymbols,
|
||||
getChainAssetLogoPath,
|
||||
getChainAssetPath,
|
||||
getChainAssetsPath,
|
||||
getChainFolderFilesList,
|
||||
getChainBlacklistPath,
|
||||
getChainLogoPath,
|
||||
getChainValidatorAssetLogoPath,
|
||||
getChainValidatorsAssets,
|
||||
getChainValidatorsListPath,
|
||||
getChainWhitelistPath,
|
||||
getChainAssetsList,
|
||||
getChainValidatorsList,
|
||||
findDuplicate,
|
||||
findCommonElementOrDuplicate,
|
||||
isLogoDimensionOK,
|
||||
isLogoSizeOK,
|
||||
isLowerCase,
|
||||
isPathDir,
|
||||
isPathExistsSync,
|
||||
isTRC10, isTRC20, isWavesAddress,
|
||||
isValidJSON,
|
||||
isAssetInfoOK,
|
||||
isValidatorHasAllKeys,
|
||||
pricingFolderPath,
|
||||
readDirSync,
|
||||
readFileSync,
|
||||
rootDirAllowedFiles,
|
||||
stakingChains,
|
||||
} from "./helpers"
|
||||
import { ValidatorModel, mapTiker, TickerType } from "./models";
|
||||
import { getHandle } from "../../script-old/gen_info";
|
||||
|
||||
import {
|
||||
isChecksum,
|
||||
toChecksum
|
||||
} from "../../script/common/eth-web3";
|
||||
import {
|
||||
isDimensionTooLarge,
|
||||
calculateTargetSize
|
||||
} from "../../script/common/image";
|
||||
import {
|
||||
mapList,
|
||||
sortElements,
|
||||
makeUnique,
|
||||
arrayDiff
|
||||
} from "../../script/common/types";
|
||||
import { findImagesToFetch } from "../../script/action/binance";
|
||||
|
||||
describe("Check repository root dir", () => {
|
||||
const dirActualFiles = readDirSync(".")
|
||||
test("Root should contains only predefined files", () => {
|
||||
dirActualFiles.forEach(file => {
|
||||
expect(rootDirAllowedFiles.indexOf(file), `File "${file}" should not be in root or added to predifined list`).not.toBe(-1)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe(`Test "blockchains" folder`, () => {
|
||||
const foundChains = readDirSync(chainsFolderPath)
|
||||
|
||||
test(`Chain should have "logo.png" image`, () => {
|
||||
foundChains.forEach(chain => {
|
||||
const chainLogoPath = getChainLogoPath(chain)
|
||||
expect(isPathExistsSync(chainLogoPath), `File missing at path "${chainLogoPath}"`).toBe(true)
|
||||
const [isOk, msg] = isLogoDimensionOK(chainLogoPath)
|
||||
expect(isOk, msg).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
test("Chain folder must have lowercase naming", () => {
|
||||
foundChains.forEach(chain => {
|
||||
expect(isLowerCase(chain), `Chain folder must be in lowercase "${chain}"`).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe(`Asset folder should contain only predefined list of files`, () => {
|
||||
readDirSync(chainsFolderPath).forEach(chain => {
|
||||
const assetsPath = getChainAssetsPath(chain)
|
||||
|
||||
if (isPathExistsSync(assetsPath)) {
|
||||
test(`Test asset folder allowed files on chain: ${chain}`, () => {
|
||||
readDirSync(assetsPath).forEach(address => {
|
||||
const assetFiles = getChainAssetPath(chain, address)
|
||||
readDirSync(assetFiles).forEach(assetFolderFile => {
|
||||
expect(assetFolderAllowedFiles.indexOf(assetFolderFile),`File "${assetFolderFile}" not allowed at this path: ${assetsPath}`).not.toBe(-1)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe(`Chain folder should contain only predefined list of files`, () => {
|
||||
readDirSync(chainsFolderPath).forEach(chain => {
|
||||
getChainFolderFilesList(chain).forEach(file => {
|
||||
expect(chainFolderAllowedFiles.indexOf(file),`File "${typeof file}" ${file} not allowed in chain folder: ${chain}`).not.toBe(-1)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("Check Ethereum side-chain folders", () => {
|
||||
ethSidechains.forEach(chain => {
|
||||
const assetsFolder = getChainAssetsPath(chain)
|
||||
const assetsList = getChainAssetsList(chain)
|
||||
test(`Test chain ${chain} folder, folder (${assetsList.length})`, () => {
|
||||
assetsList.forEach(address => {
|
||||
const assetPath = `${assetsFolder}/${address}`
|
||||
expect(isPathDir(assetPath), `Expect directory at path: ${assetPath}`).toBe(true)
|
||||
|
||||
const checksum = isChecksum(address)
|
||||
expect(checksum, `Expect asset at path ${assetPath} in checksum`).toBe(true)
|
||||
|
||||
const assetLogoPath = getChainAssetLogoPath(chain, address)
|
||||
expect(isPathExistsSync(assetLogoPath), `Missing file at path "${assetLogoPath}"`).toBe(true)
|
||||
|
||||
const [isDimensionOK, dimensionMsg] = isLogoDimensionOK(assetLogoPath)
|
||||
expect(isDimensionOK, dimensionMsg).toBe(true)
|
||||
|
||||
const [isLogoOK, sizeMsg] = isLogoSizeOK(assetLogoPath)
|
||||
expect(isLogoOK, sizeMsg).toBe(true)
|
||||
|
||||
const [isInfoOK, InfoMsg] = isAssetInfoOK(chain, address)
|
||||
expect(isInfoOK, InfoMsg).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe(`Check "binace" folder`, () => {
|
||||
it("Asset must exist on chain", async () => {
|
||||
const tokenSymbols = await getBinanceTokenSymbols()
|
||||
console.log(tokenSymbols);
|
||||
const assets = readDirSync(getChainAssetsPath(Binance))
|
||||
|
||||
assets.forEach(asset => {
|
||||
expect(tokenSymbols.indexOf(asset), `Asset ${asset} missing on chain`).not.toBe(-1)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe(`Check "tron" folder`, () => {
|
||||
const path = getChainAssetsPath(Tron)
|
||||
|
||||
test("Expect asset to be TRC10 or TRC20", () => {
|
||||
readDirSync(path).forEach(asset => {
|
||||
expect(isTRC10(asset) || isTRC20(asset), `Asset ${asset} at path ${path} non TRC10 nor TRC20`).toBe(true)
|
||||
|
||||
const assetsLogoPath = getChainAssetLogoPath(Tron, asset)
|
||||
expect(isPathExistsSync(assetsLogoPath), `Missing file at path "${assetsLogoPath}"`).toBe(true)
|
||||
const [isOk, msg] = isLogoDimensionOK(assetsLogoPath)
|
||||
expect(isOk, msg).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("Check Staking chains", () => {
|
||||
test("Make sure tests added for new staking chain", () => {
|
||||
expect(stakingChains.length).toBe(7)
|
||||
})
|
||||
|
||||
stakingChains.forEach(chain => {
|
||||
const validatorsListPath = getChainValidatorsListPath(chain)
|
||||
const validatorsList = getChainValidatorsList(chain)
|
||||
|
||||
test(`Chain ${chain} validator must have correct structure and valid JSON format`, () => {
|
||||
validatorsList.forEach((val: ValidatorModel) => {
|
||||
expect(isValidatorHasAllKeys(val), `Some key and/or type missing for validator ${JSON.stringify(val)}`).toBe(true)
|
||||
expect(isValidJSON(validatorsListPath), `Not valid json file at path ${validatorsListPath}`).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
test(`Chain ${chain} validator must have corresponding asset logo`, () => {
|
||||
validatorsList.forEach(({ id }) => {
|
||||
const path = getChainValidatorAssetLogoPath(chain, id)
|
||||
expect(isPathExistsSync(path), `Chain ${chain} asset ${id} logo must be present at path ${path}`).toBe(true)
|
||||
|
||||
const [isOk, msg] = isLogoDimensionOK(path)
|
||||
expect(isOk, msg).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
const chainValidatorsAssetsList = getChainValidatorsAssets(chain)
|
||||
switch (chain) {
|
||||
case Cosmos:
|
||||
testCosmosValidatorsAddress(chainValidatorsAssetsList)
|
||||
break;
|
||||
case Kava:
|
||||
testKavaValidatorsAddress(chainValidatorsAssetsList)
|
||||
break;
|
||||
case Terra:
|
||||
testTerraValidatorsAddress(chainValidatorsAssetsList)
|
||||
break;
|
||||
case Tezos:
|
||||
testTezosValidatorsAssets(chainValidatorsAssetsList)
|
||||
break;
|
||||
case Tron:
|
||||
testTronValidatorsAssets(chainValidatorsAssetsList)
|
||||
break;
|
||||
case Waves:
|
||||
testWavesValidatorsAssets(chainValidatorsAssetsList)
|
||||
break;
|
||||
// case Solana:
|
||||
// testSolanaValidatorsAssets(chainValidatorsAssetsList)
|
||||
// break;
|
||||
// TODO Add IoTex when taking suported by Trust
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
test("Make sure validator has corresponding logo", () => {
|
||||
validatorsList.forEach(val => {
|
||||
expect(chainValidatorsAssetsList.indexOf(val.id), `Expecting image asset for validator ${val.id} on chain ${chain}`)
|
||||
.toBeGreaterThanOrEqual(0)
|
||||
})
|
||||
})
|
||||
|
||||
test("Make sure validator asset logo has corresponding info", () => {
|
||||
chainValidatorsAssetsList.forEach(valAssetLogoID => {
|
||||
expect(validatorsList.filter(v => v.id === valAssetLogoID).length, `Expect validator logo ${valAssetLogoID} to have info`)
|
||||
.toBe(1)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
function testTezosValidatorsAssets(assets: string[]) {
|
||||
test("Tezos assets must be correctly formated tz1 address", () => {
|
||||
assets.forEach(addr => {
|
||||
expect(eztz.crypto.checkAddress(addr), `Ivalid Tezos address: ${addr}`).toBe(true)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function testTronValidatorsAssets(assets: string[]) {
|
||||
test("TRON assets must be correctly formated", () => {
|
||||
assets.forEach(addr => {
|
||||
expect(isTRC20(addr), `Address ${addr} should be TRC20`).toBe(true)
|
||||
})
|
||||
})
|
||||
}
|
||||
function testWavesValidatorsAssets(assets: string[]) {
|
||||
test("WAVES assets must have correct format", () => {
|
||||
assets.forEach(addr => {
|
||||
expect(isWavesAddress(addr), `Address ${addr} should be WAVES formated`).toBe(true)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// function testSolanaValidatorsAssets(assets: string[]) {
|
||||
// test("Solana assets must have correct format", () => {
|
||||
// assets.forEach(addr => {
|
||||
// expect(isSolanaAddress(addr), `Address ${addr} should be Solana formated`).toBe(true)
|
||||
// })
|
||||
// })
|
||||
// }
|
||||
|
||||
function testCosmosValidatorsAddress(assets: string[]) {
|
||||
test("Cosmos assets must have correct format", () => {
|
||||
assets.forEach(addr => {
|
||||
expect(addr.startsWith("cosmosvaloper1"), `Address ${addr} should start from "cosmosvaloper1"`).toBe(true)
|
||||
expect(addr.length, `Address ${addr} should have length 52`).toBe(52)
|
||||
expect(isLowerCase(addr), `Address ${addr} should be in lowercase`).toBe(true)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function testKavaValidatorsAddress(assets: string[]) {
|
||||
test("Kava assets must have correct format", () => {
|
||||
assets.forEach(addr => {
|
||||
expect(addr.startsWith("kavavaloper1"), `Address ${addr} should start from "kavavaloper1"`).toBe(true)
|
||||
expect(addr.length, `Address ${addr} should have length 50`).toBe(50)
|
||||
expect(isLowerCase(addr), `Address ${addr} should be in lowercase`).toBe(true)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function testTerraValidatorsAddress(assets: string[]) {
|
||||
test("Terra assets must have correct format", () => {
|
||||
assets.forEach(addr => {
|
||||
expect(addr.startsWith("terravaloper1"), `Address ${addr} should start from "terravaloper1"`).toBe(true)
|
||||
expect(addr.length, `Address ${addr} should have length 51`).toBe(51)
|
||||
expect(isLowerCase(addr), `Address ${addr} should be in lowercase`).toBe(true)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
describe("Test Coinmarketcap mapping", () => {
|
||||
const cmcMap: mapTiker[] = JSON.parse(readFileSync("./pricing/coinmarketcap/mapping.json"))
|
||||
|
||||
test("Must have items", () => {
|
||||
expect(cmcMap.length, `CMC map must have items`).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
test(`Items must be sorted by "id" in ascending order`, () => {
|
||||
cmcMap.forEach((el, i) => {
|
||||
if (i > 0) {
|
||||
const prevID = cmcMap[i - 1].id
|
||||
const curID = el.id
|
||||
expect(curID, `Item ${curID} must be greather or equal to ${prevID}`)
|
||||
.toBeGreaterThanOrEqual(prevID)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test(`Items must be sorted by "coin" in ascending order if have same "id"`, () => {
|
||||
cmcMap.forEach((el, i) => {
|
||||
if (i > 0) {
|
||||
const prevEl = cmcMap[i - 1]
|
||||
|
||||
const prevCoin = prevEl.coin
|
||||
const prevID = cmcMap[i - 1].id
|
||||
|
||||
const curCoin = el.coin
|
||||
const curID = el.id
|
||||
|
||||
if (prevID == curID) {
|
||||
expect(curCoin, `Item ${JSON.stringify(el)} must be greather or equal to ${JSON.stringify(prevEl)}`)
|
||||
.toBeGreaterThanOrEqual(prevCoin)
|
||||
}
|
||||
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test("Properies value shoud not contain spaces", () => {
|
||||
cmcMap.forEach(el => {
|
||||
Object.keys(el).forEach(key => {
|
||||
const val = el[key]
|
||||
if (typeof val === "string") {
|
||||
expect(val.indexOf(" ") >= 0, ` Property value "${val}" should not contain space`).toBe(false)
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
test("Params should have value and correct type", () => {
|
||||
cmcMap.forEach(el => {
|
||||
const {coin, type, id, token_id} = el
|
||||
|
||||
expect(typeof coin, `Coin ${coin} must be type "number"`).toBe("number")
|
||||
|
||||
expect(["token", "coin"], `Element with id ${id} has wrong type: "${type}"`).toContain(type)
|
||||
if (type === "token") {
|
||||
expect(token_id, `token_id ${token_id} with id ${id} must be type not empty`).toBeTruthy()
|
||||
}
|
||||
|
||||
if (type === "coin") {
|
||||
expect(el, `Element with id ${id} should not have property "token_id"`).not.toHaveProperty("token_id")
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test(`"token_id" should be in correct format`, async () => {
|
||||
const bepSymbols = await getBinanceTokenSymbols()
|
||||
|
||||
cmcMap.forEach(el => {
|
||||
const {coin, token_id, type, id} = el
|
||||
switch (coin) {
|
||||
case 60:
|
||||
if (type === TickerType.Token) {
|
||||
expect(isChecksum(token_id), `"token_id" ${token_id} with id ${id} must be in checksum`).toBe(true)
|
||||
break;
|
||||
}
|
||||
case 195:
|
||||
if (type === TickerType.Token) {
|
||||
expect(isTRC10(token_id) || isTRC20(token_id), `"token_id" ${token_id} with id ${id} must be in TRC10 or TRC20`).toBe(true)
|
||||
break;
|
||||
}
|
||||
case 714:
|
||||
if (type === TickerType.Token) {
|
||||
expect(bepSymbols.indexOf(token_id), `"token_id" ${token_id} with id ${id} must be BEP2 or BEP8 symbol`).toBeGreaterThan(0)
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
test(`"token_id" shoud be unique`, () => {
|
||||
const mappedList = cmcMap.reduce((acm, val) => {
|
||||
if (val.hasOwnProperty("token_id")) {
|
||||
if (acm.hasOwnProperty(val.token_id)) {
|
||||
acm[val.token_id] == ++acm[val.token_id]
|
||||
} else {
|
||||
acm[val.token_id] = 0
|
||||
}
|
||||
}
|
||||
return acm
|
||||
}, {})
|
||||
|
||||
cmcMap.forEach(el => {
|
||||
if (el.hasOwnProperty("token_id")) {
|
||||
expect(mappedList[el.token_id], `CMC map ticker with "token_id" ${el.token_id} shoud be unique`).toBeLessThanOrEqual(0)
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("Test blacklist and whitelist", () => {
|
||||
const assetsChains = readDirSync(chainsFolderPath).filter(chain => isPathExistsSync(getChainAssetsPath(chain)))
|
||||
|
||||
assetsChains.forEach(chain => {
|
||||
// Test uniqeness of blacklist and whitelist, and non-intersection among the two:
|
||||
// test by a single check: checking for duplicates in the concatenated list.
|
||||
const whiteList = JSON.parse(readFileSync(getChainWhitelistPath(chain)))
|
||||
const blackList = JSON.parse(readFileSync(getChainBlacklistPath(chain)))
|
||||
test(`Blacklist and whitelist should have no common elements or duplicates (${chain})`, () => {
|
||||
expect(findCommonElementOrDuplicate(whiteList, blackList), `Found a duplicate or common element`).toBe(null)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("Test coins info.json file", () => {
|
||||
|
||||
});
|
||||
|
||||
describe("Test all JSON files to have valid content", () => {
|
||||
const files = [
|
||||
...findFiles(chainsFolderPath, 'json'),
|
||||
...findFiles(pricingFolderPath, 'json')
|
||||
]
|
||||
|
||||
files.forEach(file => {
|
||||
expect(isValidJSON(file), `${file} path contains invalid JSON`).toBe(true)
|
||||
});
|
||||
})
|
||||
|
||||
describe("Test helper functions", () => {
|
||||
test(`Test getHandle`, () => {
|
||||
const urls = [
|
||||
{
|
||||
url: "https://twitter.com/aeternity",
|
||||
expected: "aeternity"
|
||||
},
|
||||
{
|
||||
url: "https://www.reddit.com/r/Aeternity",
|
||||
expected: "Aeternity"
|
||||
}
|
||||
]
|
||||
|
||||
urls.forEach(u => {
|
||||
expect(getHandle(u.url), `Getting handle from url ${u}`).toBe(u.expected)
|
||||
})
|
||||
})
|
||||
|
||||
test(`Test findDuplicate`, () => {
|
||||
expect(findDuplicate(["a", "bb", "ccc"]), `No duplicates`).toBe(null)
|
||||
expect(findDuplicate(["a", "bb", "ccc", "bb"]), `One double duplicate`).toBe("bb")
|
||||
expect(findDuplicate([]), `Empty array`).toBe(null)
|
||||
expect(findDuplicate(["a"]), `One element`).toBe(null)
|
||||
expect(findDuplicate(["a", "bb", "ccc", "bb", "d", "bb"]), `One trip[le duplicate`).toBe("bb")
|
||||
expect(findDuplicate(["a", "bb", "ccc", "bb", "a"]), `Two double duplicates`).toBe("a")
|
||||
})
|
||||
|
||||
test(`Test findCommonElementOrDuplicate`, () => {
|
||||
expect(findCommonElementOrDuplicate(["a", "bb", "ccc"], ["1", "22", "333"]), `No intersection or duplicates`).toBe(null)
|
||||
expect(findCommonElementOrDuplicate(["a", "bb", "ccc"], ["1", "bb", "333"]), `Common element`).toBe("bb")
|
||||
expect(findCommonElementOrDuplicate(["a", "bb", "ccc", "bb"], ["1", "22", "333"]), `Duplicate in first`).toBe("bb")
|
||||
expect(findCommonElementOrDuplicate(["a", "bb", "ccc"], ["1", "22", "333", "22"]), `Duplicate in second`).toBe("22")
|
||||
expect(findCommonElementOrDuplicate(["a", "bb", "ccc", "1", "bb"], ["1", "22", "333", "22"]), `Intersection and duplicates`).toBe("22")
|
||||
expect(findCommonElementOrDuplicate([], []), `Empty lists`).toBe(null)
|
||||
})
|
||||
});
|
||||
|
||||
describe("Test eth-web3 helpers", () => {
|
||||
test(`Test isChecksum`, () => {
|
||||
expect(isChecksum("0x7Bb09bC8aDE747178e95B1D035ecBeEBbB18cFee"), `checksum`).toBe(true);
|
||||
expect(isChecksum("0x7bb09bc8ade747178e95b1d035ecbeebbb18cfee"), `lowercase`).toBe(false);
|
||||
expect(isChecksum("0x7Bb09bC8aDE747178e95B1D035ecBe"), `too short`).toBe(false);
|
||||
});
|
||||
test(`Test toChecksum`, () => {
|
||||
expect(toChecksum("0x7bb09bc8ade747178e95b1d035ecbeebbb18cfee"), `from lowercase`).toEqual("0x7Bb09bC8aDE747178e95B1D035ecBeEBbB18cFee");
|
||||
expect(toChecksum("0x7Bb09bC8aDE747178e95B1D035ecBeEBbB18cFee"), `from checksum`).toEqual("0x7Bb09bC8aDE747178e95B1D035ecBeEBbB18cFee");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Test image helpers", () => {
|
||||
test(`Test isDimensionTooLarge`, () => {
|
||||
expect(isDimensionTooLarge(256, 256), `256x256`).toBe(false);
|
||||
expect(isDimensionTooLarge(64, 64), `64x64`).toBe(false);
|
||||
expect(isDimensionTooLarge(800, 800), `800x800`).toBe(true);
|
||||
expect(isDimensionTooLarge(256, 800), `256x800`).toBe(true);
|
||||
expect(isDimensionTooLarge(800, 256), `800x256`).toBe(true);
|
||||
});
|
||||
test(`Test calculateReducedSize`, () => {
|
||||
expect(calculateTargetSize(256, 256, 512, 512), `small 1.0`).toEqual({width: 512, height: 512});
|
||||
expect(calculateTargetSize(800, 800, 512, 512), `large 1.0`).toEqual({width: 512, height: 512});
|
||||
expect(calculateTargetSize(200, 100, 512, 512), `small 2.0`).toEqual({width: 512, height: 256});
|
||||
expect(calculateTargetSize(100, 200, 512, 512), `small 0.5`).toEqual({width: 256, height: 512});
|
||||
expect(calculateTargetSize(1200, 600, 512, 512), `small 2.0`).toEqual({width: 512, height: 256});
|
||||
expect(calculateTargetSize(600, 1200, 512, 512), `small 0.5`).toEqual({width: 256, height: 512});
|
||||
expect(calculateTargetSize(256, 0, 512, 512), `zero`).toEqual({width: 512, height: 512});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Test type helpers", () => {
|
||||
test(`Test mapList`, () => {
|
||||
expect(mapList(["a", "b", "c"]), `3 elems`).toEqual({"a": "", "b":"", "c": ""});
|
||||
});
|
||||
test(`Test sortElements`, () => {
|
||||
expect(sortElements(["c", "a", "b"]), `3 elems`).toEqual(["a", "b", "c"]);
|
||||
expect(sortElements(["C", "a", "b"]), `mixed case`).toEqual(["a", "b", "C"]);
|
||||
expect(sortElements(["1", "2", "11"]), `numerical`).toEqual(["1", "2", "11"]);
|
||||
expect(sortElements(["C", "a", "1", "b", "2", "11"]), `complex`).toEqual(["1", "2", "11", "a", "b", "C"]);
|
||||
});
|
||||
test(`Test makeUnique`, () => {
|
||||
expect(makeUnique(["a", "b", "c", "b"]), `4 elems with 1 duplicate`).toEqual(["a", "b", "c"]);
|
||||
});
|
||||
test(`Test arrayDiff`, () => {
|
||||
expect(arrayDiff(["a", "b", "c"], ["c"]), `4 elems with 1 duplicate`).toEqual(["a", "b"]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Test action binance", () => {
|
||||
test(`Test findImagesToFetch`, () => {
|
||||
const assetsInfoListNonexisting: any[] = [{asset: "A1", assetImg: "imgurl1"}, {asset: "A2", assetImg: "imgurl2"}];
|
||||
const assetsInfoListExisting: any[] = [{asset: "BUSD-BD1", assetImg: "imgurlBUSD"}, {asset: "ETH-1C9", assetImg: "imgurlETH"}];
|
||||
const blackListEmpty: string[] = [];
|
||||
const blackListA1: string[] = ["A1"];
|
||||
expect(findImagesToFetch(assetsInfoListNonexisting, blackListEmpty), `2 nonexisting`).toEqual(assetsInfoListNonexisting);
|
||||
expect(findImagesToFetch(assetsInfoListNonexisting, blackListA1), `2 nonexisting with 1 blacklisted`).toEqual([{asset: "A2", assetImg: "imgurl2"}]);
|
||||
expect(findImagesToFetch(assetsInfoListExisting, blackListEmpty), `2 existing`).toEqual([]);
|
||||
expect(findImagesToFetch([], []), `empty`).toEqual([]);
|
||||
});
|
||||
});
|
|
@ -1,91 +0,0 @@
|
|||
export interface ValidatorModel {
|
||||
id: string,
|
||||
name: string,
|
||||
description: string,
|
||||
website: string,
|
||||
staking: Staking
|
||||
payout: Payout
|
||||
status: ValidatorStatus
|
||||
}
|
||||
|
||||
interface Staking {
|
||||
freeSpace: number,
|
||||
minDelegation: number
|
||||
openForDelegation: boolean
|
||||
}
|
||||
|
||||
interface Payout {
|
||||
commission: number // in %
|
||||
payoutDelay: number // in cycles
|
||||
payoutPeriod: number
|
||||
}
|
||||
|
||||
interface ValidatorStatus {
|
||||
disabled: boolean;
|
||||
note: string;
|
||||
}
|
||||
|
||||
// Minimal property requirements for asset info file
|
||||
export interface AssetInfo {
|
||||
explorer: string;
|
||||
name: string;
|
||||
short_description: string;
|
||||
website: string;
|
||||
}
|
||||
|
||||
export interface CoinInfoList {
|
||||
name: string;
|
||||
website: string;
|
||||
source_code: string;
|
||||
whitepaper: string;
|
||||
short_description: string;
|
||||
explorer: string;
|
||||
socials: Social[];
|
||||
details: Detail[];
|
||||
}
|
||||
|
||||
interface Detail {
|
||||
language: string;
|
||||
description: string;
|
||||
}
|
||||
|
||||
interface Social {
|
||||
name: string;
|
||||
url: string;
|
||||
handle: string;
|
||||
}
|
||||
|
||||
// CoinmarketCap
|
||||
export interface mapTiker {
|
||||
coin: number
|
||||
type: mapType
|
||||
token_id?: string
|
||||
id: number
|
||||
}
|
||||
|
||||
export type mapType = TickerType.Coin | TickerType.Token
|
||||
|
||||
export enum TickerType {
|
||||
Token = "token",
|
||||
Coin = "coin"
|
||||
}
|
||||
|
||||
export enum PlatformType {
|
||||
Ethereum = "Ethereum",
|
||||
Binance = "Binance Coin",
|
||||
TRON = "TRON",
|
||||
OMNI = "Omni",
|
||||
VeChain = "VeChain"
|
||||
}
|
||||
|
||||
export interface BakingBadBaker {
|
||||
address: string,
|
||||
freeSpace: number
|
||||
// serviceHealth: string // active or Dead is a working baker who was a public baker but for some reason stopped paying his delegators, Closed is a permanently closed service (we store them for historical purposes only
|
||||
fee: number
|
||||
minDelegation: number
|
||||
openForDelegation: boolean
|
||||
payoutDelay: number
|
||||
payoutPeriod: number
|
||||
serviceHealth: string
|
||||
}
|
108
test/index.test.ts
Normal file
108
test/index.test.ts
Normal file
|
@ -0,0 +1,108 @@
|
|||
import {
|
||||
findDuplicate,
|
||||
findCommonElementOrDuplicate,
|
||||
} from "../script/common/types";
|
||||
import {
|
||||
isChecksum,
|
||||
toChecksum
|
||||
} from "../script/common/eth-web3";
|
||||
import {
|
||||
isDimensionTooLarge,
|
||||
isDimensionOK,
|
||||
calculateTargetSize
|
||||
} from "../script/common/image";
|
||||
import {
|
||||
mapList,
|
||||
sortElements,
|
||||
makeUnique,
|
||||
arrayDiff
|
||||
} from "../script/common/types";
|
||||
import { findImagesToFetch } from "../script/action/binance";
|
||||
|
||||
describe("Test eth-web3 helpers", () => {
|
||||
test(`Test isChecksum`, () => {
|
||||
expect(isChecksum("0x7Bb09bC8aDE747178e95B1D035ecBeEBbB18cFee"), `checksum`).toBe(true);
|
||||
expect(isChecksum("0x7bb09bc8ade747178e95b1d035ecbeebbb18cfee"), `lowercase`).toBe(false);
|
||||
expect(isChecksum("0x7Bb09bC8aDE747178e95B1D035ecBe"), `too short`).toBe(false);
|
||||
});
|
||||
test(`Test toChecksum`, () => {
|
||||
expect(toChecksum("0x7bb09bc8ade747178e95b1d035ecbeebbb18cfee"), `from lowercase`).toEqual("0x7Bb09bC8aDE747178e95B1D035ecBeEBbB18cFee");
|
||||
expect(toChecksum("0x7Bb09bC8aDE747178e95B1D035ecBeEBbB18cFee"), `from checksum`).toEqual("0x7Bb09bC8aDE747178e95B1D035ecBeEBbB18cFee");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Test image helpers", () => {
|
||||
test(`Test isDimensionTooLarge`, () => {
|
||||
expect(isDimensionTooLarge(256, 256), `256x256`).toBe(false);
|
||||
expect(isDimensionTooLarge(64, 64), `64x64`).toBe(false);
|
||||
expect(isDimensionTooLarge(800, 800), `800x800`).toBe(true);
|
||||
expect(isDimensionTooLarge(256, 800), `256x800`).toBe(true);
|
||||
expect(isDimensionTooLarge(800, 256), `800x256`).toBe(true);
|
||||
});
|
||||
test(`Test isDimensionOK`, () => {
|
||||
expect(isDimensionOK(256, 256), `256x256`).toBe(true);
|
||||
expect(isDimensionOK(64, 64), `64x64`).toBe(true);
|
||||
expect(isDimensionOK(800, 800), `800x800`).toBe(false);
|
||||
expect(isDimensionOK(256, 800), `256x800`).toBe(false);
|
||||
expect(isDimensionOK(800, 256), `800x256`).toBe(false);
|
||||
expect(isDimensionOK(60, 60), `60x60`).toBe(false);
|
||||
expect(isDimensionOK(64, 60), `64x60`).toBe(false);
|
||||
expect(isDimensionOK(60, 64), `60x64`).toBe(false);
|
||||
});
|
||||
test(`Test calculateReducedSize`, () => {
|
||||
expect(calculateTargetSize(256, 256, 512, 512), `small 1.0`).toEqual({width: 512, height: 512});
|
||||
expect(calculateTargetSize(800, 800, 512, 512), `large 1.0`).toEqual({width: 512, height: 512});
|
||||
expect(calculateTargetSize(200, 100, 512, 512), `small 2.0`).toEqual({width: 512, height: 256});
|
||||
expect(calculateTargetSize(100, 200, 512, 512), `small 0.5`).toEqual({width: 256, height: 512});
|
||||
expect(calculateTargetSize(1200, 600, 512, 512), `small 2.0`).toEqual({width: 512, height: 256});
|
||||
expect(calculateTargetSize(600, 1200, 512, 512), `small 0.5`).toEqual({width: 256, height: 512});
|
||||
expect(calculateTargetSize(256, 0, 512, 512), `zero`).toEqual({width: 512, height: 512});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Test type helpers", () => {
|
||||
test(`Test mapList`, () => {
|
||||
expect(mapList(["a", "b", "c"]), `3 elems`).toEqual({"a": "", "b":"", "c": ""});
|
||||
});
|
||||
test(`Test sortElements`, () => {
|
||||
expect(sortElements(["c", "a", "b"]), `3 elems`).toEqual(["a", "b", "c"]);
|
||||
expect(sortElements(["C", "a", "b"]), `mixed case`).toEqual(["a", "b", "C"]);
|
||||
expect(sortElements(["1", "2", "11"]), `numerical`).toEqual(["1", "2", "11"]);
|
||||
expect(sortElements(["C", "a", "1", "b", "2", "11"]), `complex`).toEqual(["1", "2", "11", "a", "b", "C"]);
|
||||
});
|
||||
test(`Test makeUnique`, () => {
|
||||
expect(makeUnique(["a", "b", "c", "b"]), `4 elems with 1 duplicate`).toEqual(["a", "b", "c"]);
|
||||
});
|
||||
test(`Test arrayDiff`, () => {
|
||||
expect(arrayDiff(["a", "b", "c"], ["c"]), `4 elems with 1 duplicate`).toEqual(["a", "b"]);
|
||||
});
|
||||
test(`Test findDuplicate`, () => {
|
||||
expect(findDuplicate(["a", "bb", "ccc"]), `No duplicates`).toBe(null)
|
||||
expect(findDuplicate(["a", "bb", "ccc", "bb"]), `One double duplicate`).toBe("bb")
|
||||
expect(findDuplicate([]), `Empty array`).toBe(null)
|
||||
expect(findDuplicate(["a"]), `One element`).toBe(null)
|
||||
expect(findDuplicate(["a", "bb", "ccc", "bb", "d", "bb"]), `One triple duplicate`).toBe("bb")
|
||||
expect(findDuplicate(["a", "bb", "ccc", "bb", "a"]), `Two double duplicates`).toBe("a")
|
||||
});
|
||||
test(`Test findCommonElementOrDuplicate`, () => {
|
||||
expect(findCommonElementOrDuplicate(["a", "bb", "ccc"], ["1", "22", "333"]), `No intersection or duplicates`).toBe(null)
|
||||
expect(findCommonElementOrDuplicate(["a", "bb", "ccc"], ["1", "bb", "333"]), `Common element`).toBe("bb")
|
||||
expect(findCommonElementOrDuplicate(["a", "bb", "ccc", "bb"], ["1", "22", "333"]), `Duplicate in first`).toBe("bb")
|
||||
expect(findCommonElementOrDuplicate(["a", "bb", "ccc"], ["1", "22", "333", "22"]), `Duplicate in second`).toBe("22")
|
||||
expect(findCommonElementOrDuplicate(["a", "bb", "ccc", "1", "bb"], ["1", "22", "333", "22"]), `Intersection and duplicates`).toBe("22")
|
||||
expect(findCommonElementOrDuplicate([], []), `Empty lists`).toBe(null)
|
||||
});
|
||||
});
|
||||
|
||||
describe("Test action binance", () => {
|
||||
test(`Test findImagesToFetch`, () => {
|
||||
const assetsInfoListNonexisting: any[] = [{asset: "A1", assetImg: "imgurl1"}, {asset: "A2", assetImg: "imgurl2"}];
|
||||
const assetsInfoListExisting: any[] = [{asset: "BUSD-BD1", assetImg: "imgurlBUSD"}, {asset: "ETH-1C9", assetImg: "imgurlETH"}];
|
||||
const blackListEmpty: string[] = [];
|
||||
const blackListA1: string[] = ["A1"];
|
||||
expect(findImagesToFetch(assetsInfoListNonexisting, blackListEmpty), `2 nonexisting`).toEqual(assetsInfoListNonexisting);
|
||||
expect(findImagesToFetch(assetsInfoListNonexisting, blackListA1), `2 nonexisting with 1 blacklisted`).toEqual([{asset: "A2", assetImg: "imgurl2"}]);
|
||||
expect(findImagesToFetch(assetsInfoListExisting, blackListEmpty), `2 existing`).toEqual([]);
|
||||
expect(findImagesToFetch([], []), `empty`).toEqual([]);
|
||||
});
|
||||
});
|
Loading…
Reference in New Issue
Block a user