[Internal] Refactored fix builds (#2920)

* Empty infrastructure for new-housekeeping build.

* Move updateBEP action to new new-housekeeping build infra.

* Remove old updateBEP.

* New-housekeeping-dryrun run.

* Include new top-level folder name script-new.

* Remove update:bep2 from old daily-run.

* Use imports instead of require.

* Small refactor for testability.

* Organize scripts into subfolders.

* iUpdateBEP2: refactor and add tests.

* Move formatting validators to new-housekeeping,add new helpers.

* Move info and black/whitelist fixing to new-housekeeping.

* New fix command.

* New 'fix' target; Move ETH checksum fix to new-housekeeping.

* Move logo size check and resize to new-housekeeping.

* Improved async error handling.

* Build renames.

* Move (old) BEP2 and CMC update to periodic update build.

* Rename (add missing).

* Rename builds.

* Renames ('fix').

* rename

* Invoke new scripts (as well) from period-update.

* Move cmc update to new-periodic.

* Move tezos validator update to new-periodic.

* Missing file.

* Leftover.

* Cleanup

* Rename of unused openseacontracts.

* CMC should not be run always.

* Break main/fixAndUpdate function into two.

* Show diff in build after changes.

* Cleanup

* Rename, script-old.

* Cleanup, remove old fix build definitions.

* Renames, remove new- prefix.

* CMC mapping update.

* Config infrastructure; add binance URL to config.

* Add image size parameters to config.

* Rename.

Co-authored-by: Catenocrypt <catenocrypt@users.noreply.github.com>
Co-authored-by: Andrew M <35627271+zachzwei@users.noreply.github.com>
This commit is contained in:
Adam R 2020-07-29 15:42:51 +02:00 committed by GitHub
parent 515a1a0cbb
commit 079617ac38
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
37 changed files with 684 additions and 335 deletions

View File

@ -1,4 +1,4 @@
name: Fixes Dryrun
name: Fixes and Consistency Updates - Dry run
on:
push:
branches:
@ -7,7 +7,7 @@ on:
pull_request:
branches: [master]
jobs:
fix-consistency:
scripts:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
@ -16,12 +16,11 @@ jobs:
node-version: 12
- name: Install Dependencies
run: npm ci
- name: Run fix scripts
run: |
npm run resize
npm run format:all
npm run gen:list
- name: Run fix script
run: npm run fix
- name: Show git status (diff)
if: success()
run: git status
- name: Run test
run: npm t

View File

@ -3,23 +3,19 @@ on:
push:
branches: [ master ]
jobs:
fix-consistency:
scripts:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
token: ${{ secrets.DANGER_GITHUB_API_TOKEN }}
- uses: actions/setup-node@v1
with:
node-version: 12
- name: Install Dependencies
run: npm ci
- name: Run fix scripts
run: |
npm run resize
npm run format:all
npm run gen:list
- name: Run fix script
run: npm run fix
- name: Show git status (diff)
if: success()
run: git status
- name: Run test
run: npm t

View File

@ -4,7 +4,7 @@ on:
# Run twice per day (at 7:00UTC/12amPST, 19:00UTC/12pmPST)
- cron: '0 7,19 * * *'
jobs:
update:
scripts:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
@ -17,10 +17,10 @@ jobs:
- name: Install Dependencies
run: npm ci
- name: Run scripts
run: |
npm run gen:validators:tezos
npm run update:bep2
npm run map:cmc
run: npm run update
- name: Show git status (diff)
if: success()
run: git status
- name: Run test
run: npm t
- name: Commit changes test pased

View File

@ -152,8 +152,8 @@
"TLyqzVGLV1srkB7dToTAEqgDSfPtXRJZYH",
"TMhjbHzJCiMWqa5oqn2DF3mGw5yh9oQNf2",
"TMwFHYXLJaRUPeW6421aqXL4ZEzPRFGkGT",
"TNZXVQUKQ8Gnjq2BqLvt2kC5WbeDQc1q3j",
"TNjt5fShPVJ4YpsLuU4THuBbg58g2bZoLk",
"TNZXVQUKQ8Gnjq2BqLvt2kC5WbeDQc1q3j",
"TPRuU2GbXPwvvSuggE4xKMRtzsfwYfvBWq",
"TQhfmGiZvRt3oe9vKv6W9WXRr4oErxc8RE",
"TR7NHqjeKQxGTCi8q8ZY4pL8otSzgjLj6t",

View File

@ -5,16 +5,8 @@
"main": "index.js",
"scripts": {
"test": "jest",
"update:bep2": "node ./script/updateBEP2",
"checksum": "ts-node ./script/erc20_to_checksum",
"format:all": "ts-node ./script/format_files_name",
"arrange:all": "ts-node ./script/arrange_files",
"gen:list": "ts-node ./script/gen_list",
"gen:info": "ts-node ./script/gen_info",
"gen:validators:tezos": "ts-node ./script/gen_validators_tezos",
"resize": "ts-node ./script/resize_images",
"map:cmc": "ts-node ./pricing/coinmarketcap/script",
"magic": "npm run update:bep2 && npm run checksum && npm run resize && npm run gen:list && npm t"
"fix": "ts-node ./script/main/fix",
"update": "ts-node ./script/main/update"
},
"repository": {
"type": "git",

View File

@ -1,5 +1,5 @@
import { toChecksum } from "../../src/test/helpers"
const BluebirbPromise = require("bluebird")
const BluebirdPromise = require("bluebird")
const axios = require("axios")
const chalk = require('chalk')
const fs = require("fs")
@ -20,7 +20,7 @@ import { TickerType, mapTiker, PlatformType } from "../../src/test/models";
// Steps required to run this:
// 1. (Optional) CMC API key already setup, use yours if needed. Install script deps "npm i" if hasn't been run before.
// 2. Pull down tokens repo https://github.com/trustwallet/assets and point COIN_IMAGE_BASE_PATH and TOKEN_IMAGE_BASE_PATH to it.
// 3. Run: `npm run gen:list`
// 3. Run: `npm run update`
const CMC_PRO_API_KEY = `df781835-e5f4-4448-8b0a-fe31402ab3af` // Free Basic Plan api key is enough to run script
const CMC_LATEST_BASE_URL = `https://pro-api.coinmarketcap.com/v1/global-metrics/quotes/latest?`
@ -61,14 +61,13 @@ const allContracts: mapTiker[] = [] // Temp storage for mapped assets
let bnbOwnerToSymbol = {} // e.g: bnb1tawge8u97slduhhtumm03l4xl4c46dwv5m9yzk: WISH-2D5
let bnbOriginalSymbolToSymbol = {} // e.g: WISH: WISH-2D5
run()
async function run() {
try {
await Promise.all([initState(), setBinanceTokens()])
const [totalCrypto, coins] = await Promise.all([getTotalActiveCryptocurrencies(), getTickers()])
// setBIP44Constants()
log(`Found ${totalCrypto} on CMC`, chalk.yellowBright)
await BluebirbPromise.mapSeries(coins, processCoin)
await BluebirdPromise.mapSeries(coins, processCoin)
addCustom()
printContracts()
@ -351,4 +350,8 @@ function log(string, cb?) {
// coinName: row[2]
// }
// })
// }
// }
export async function update() {
await run();
}

87
script/action/binance.ts Normal file
View File

@ -0,0 +1,87 @@
import axios from "axios";
import * as bluebird from "bluebird";
import * as fs from "fs";
import * as path from "path";
import * as chalk from 'chalk';
import * as config from "../common/config";
import {
getChainAssetLogoPath,
getChainBlacklistPath
} from "../common/repo-structure";
const binanceChain = "binance"
const binanceAssetsUrl = config.getConfig("binance_assets_url", "https://explorer.binance.org/api/v1/assets?page=1&rows=1000");
async function retrieveAssetList() {
console.log(`Retrieving assets info from: ${binanceAssetsUrl}`);
const { assetInfoList } = await axios.get(binanceAssetsUrl).then(r => r.data);
console.log(`Retrieved ${assetInfoList.length} asset infos`);
return assetInfoList
}
function fetchImage(url) {
return axios.get(url, { responseType: "stream" })
.then(r => r.data)
.catch(err => {
throw `Error fetchImage: ${url} ${err.message}`;
});
}
/// Return: array with images to fetch; {asset, assetImg}
export function findImagesToFetch(assetInfoList: any, blacklist: string[]): any[] {
let toFetch: any[] = [];
console.log(`Checking for asset images to be fetched`);
assetInfoList.forEach(({asset, assetImg}) => {
process.stdout.write(`.${asset} `);
if (assetImg) {
if (blacklist.indexOf(asset) != -1) {
console.log();
console.log(`${asset} is blacklisted`);
} else {
const imagePath = getChainAssetLogoPath(binanceChain, asset);
if (!fs.existsSync(imagePath)) {
console.log(chalk.red(`Missing image: ${asset}`));
toFetch.push({asset, assetImg});
}
}
}
});
console.log();
console.log(`${toFetch.length} asset image(s) to be fetched`);
return toFetch;
}
async function fetchMissingImages(toFetch: any[]): Promise<string[]> {
console.log(`Attempting to fetch ${toFetch.length} asset image(s)`);
let fetchedAssets: string[] = [];
await bluebird.each(toFetch, async ({ asset, assetImg }) => {
if (assetImg) {
const imagePath = getChainAssetLogoPath(binanceChain, asset);
fs.mkdir(path.dirname(imagePath), err => {
if (err && err.code != `EEXIST`) throw err;
});
await fetchImage(assetImg).then(buffer => {
buffer.pipe(fs.createWriteStream(imagePath));
fetchedAssets.push(asset)
console.log(`Fetched image ${asset} ${imagePath} from ${assetImg}`)
});
}
});
console.log();
return fetchedAssets;
}
export async function update() {
const assetInfoList = await retrieveAssetList();
const blacklist: string[] = require(getChainBlacklistPath(binanceChain));
const toFetch = findImagesToFetch(assetInfoList, blacklist);
const fetchedAssets = await fetchMissingImages(toFetch);
if (fetchedAssets.length > 0) {
console.log(`Fetched ${fetchedAssets.length} asset(s):`);
fetchedAssets.forEach(asset => console.log(` ${asset}`));
}
}

View File

@ -0,0 +1,66 @@
import { ethForkChains } from "../common/blockchains";
import {
getChainAssetsPath,
getChainAssetsList,
getChainAssetPath,
getChainAssetInfoPath,
getChainAssetFilesList,
isChainAssetInfoExistSync,
logoName,
logoExtension,
logoFullName
} from "../common/repo-structure";
import { formatJsonFile } from "../common/json";
import {
getFileName,
getFileExt,
gitMove,
readDirSync
} from "../common/filesystem";
import { isChecksum, toChecksum } from "../common/eth-web3";
function formatInfos() {
console.log(`Formatting info files...`);
ethForkChains.forEach(chain => {
let count: number = 0;
const chainAssets = getChainAssetsList(chain);
chainAssets.forEach(address => {
if (isChainAssetInfoExistSync(chain, address)) {
const chainAssetInfoPath = getChainAssetInfoPath(chain, address);
formatJsonFile(chainAssetInfoPath, true);
++count;
}
})
console.log(`Formatted ${count} info files for chain ${chain} (total ${chainAssets.length})`);
})
}
function checkAddressChecksum(assetsFolderPath: string, address: string) {
if (!isChecksum(address)) {
const checksumAddress = toChecksum(address);
gitMove(assetsFolderPath, address, checksumAddress);
console.log(`Renamed to checksum format ${checksumAddress}`);
}
}
function checkAddressChecksums() {
console.log(`Checking for checksum formats ...`);
ethForkChains.forEach(chain => {
const assetsPath = getChainAssetsPath(chain);
readDirSync(assetsPath).forEach(address => {
getChainAssetFilesList(chain, address).forEach(file => {
if (getFileName(file) == logoName && getFileExt(file) !== logoExtension) {
console.log(`Renaming incorrect asset logo extension ${file} ...`);
gitMove(getChainAssetPath(chain, address), file, logoFullName);
}
});
checkAddressChecksum(assetsPath, address);
});
});
}
export async function fix() {
formatInfos();
checkAddressChecksums();
}

View File

@ -0,0 +1,62 @@
import * as bluebird from "bluebird";
import {
chainsPath,
getChainLogoPath,
getChainAssetsPath,
getChainAssetLogoPath,
getChainValidatorsListPath,
getChainValidatorAssetLogoPath
} from "../common/repo-structure";
import {
readDirSync,
readFileSync,
isPathExistsSync
} from "../common/filesystem";
import { resizeIfTooLarge } from "../common/image";
async function downsize(chains) {
console.log(`Checking all logos for downsizing ...`);
let totalCountChecked: number = 0;
let totalCountUpdated: number = 0;
await bluebird.map(chains, async chain => {
let countChecked: number = 0;
let countUpdated: number = 0;
const path = getChainLogoPath(chain);
countChecked++;
countUpdated += await resizeIfTooLarge(path) ? 1 : 0;
// Check and resize if needed chain assets
const assetsPath = getChainAssetsPath(chain);
if (isPathExistsSync(assetsPath)) {
await bluebird.mapSeries(readDirSync(assetsPath), async asset => {
const path = getChainAssetLogoPath(chain, asset);
countChecked++;
countUpdated += await resizeIfTooLarge(path) ? 1 : 0;
})
}
// Check and resize if needed chain validators image
const chainValidatorsList = getChainValidatorsListPath(chain);
if (isPathExistsSync(chainValidatorsList)) {
const validatorsList = JSON.parse(readFileSync(getChainValidatorsListPath(chain)));
await bluebird.mapSeries(validatorsList, async ({ id }) => {
const path = getChainValidatorAssetLogoPath(chain, id);
countChecked++;
countUpdated += await resizeIfTooLarge(path) ? 1 : 0;
})
}
totalCountChecked += countChecked;
totalCountUpdated += countUpdated;
if (countUpdated > 0) {
console.log(`Checking logos on chain ${chain} completed, ${countChecked} checked, ${countUpdated} logos updated`);
}
});
console.log(`Checking logos completed, ${totalCountChecked} logos checked, ${totalCountUpdated} logos updated`);
}
export async function fix() {
const foundChains = readDirSync(chainsPath);
await downsize(foundChains);
}

View File

@ -1,13 +1,23 @@
const axios = require('axios')
import {
getChainValidatorsList,
getChainValidatorsListPath,
Tezos,
writeJSONToPath
} from "../src/test/helpers";
import { BakingBadBaker } from "../src/test/models";
import axios from "axios";
import {
validatorsList,
getChainValidatorsPath,
getChainValidatorsListPath
} from "../common/repo-structure";
import { Tezos } from "../common/blockchains";
import { readFileSync } from "../common/filesystem";
import { writeJsonFile } from "../common/json";
(async function(){
import {
BakingBadBaker,
ValidatorModel
} from "../../src/test/models";
function getChainValidatorsList(chain: string): ValidatorModel[] {
return JSON.parse(readFileSync(`${(getChainValidatorsPath(chain))}/${validatorsList}`));
}
async function gen_validators_tezos() {
const bakers: BakingBadBaker[] = await axios.get(`https://api.baking-bad.org/v2/bakers`).then(res => res.data)
const bakersMap: {[key: string]: BakingBadBaker} = bakers.reduce((acm, val) => {
acm[val.address] = val
@ -51,5 +61,9 @@ import { BakingBadBaker } from "../src/test/models";
return acm
}, [])
writeJSONToPath(getChainValidatorsListPath(Tezos), newbakers)
})()
writeJsonFile(getChainValidatorsListPath(Tezos), newbakers)
}
export async function update() {
await gen_validators_tezos();
}

View File

@ -0,0 +1,22 @@
import * as eth_forks from "./eth-forks";
import * as logo_size from "./logo-size";
import * as validators from "./validators";
import * as whitelists from "./whitelists";
import * as binance from "./binance";
import * as coinmarketcap from "../../pricing/coinmarketcap/script";
import * as tezos from "./tezos";
export function fixAll() {
console.log("Running fixes...");
eth_forks.fix();
logo_size.fix();
validators.fix();
whitelists.fix();
}
export function updateAll() {
console.log("Running updates (using external data sources) ...");
tezos.update();
binance.update();
coinmarketcap.update();
}

View File

@ -0,0 +1,14 @@
import { stakingChains } from "../common/blockchains";
import { getChainValidatorsListPath } from "../common/repo-structure";
import { formatSortJsonFile } from "../common/json";
function formatValidators() {
stakingChains.forEach(chain => {
const validatorsPath = getChainValidatorsListPath(chain);
formatSortJsonFile(validatorsPath);
})
}
export async function fix() {
formatValidators();
}

View File

@ -0,0 +1,39 @@
import { chainsWithBlacklist } from "../common/blockchains";
import { getChainAssetsList, getChainWhitelistPath, getChainBlacklistPath } from "../common/repo-structure";
import { readFileSync, writeFileSync } from "../common/filesystem";
import { sortElements, makeUnique, arrayDiff } from "../common/types";
function formatWhiteBlackList() {
chainsWithBlacklist.forEach(async chain => {
const assets = getChainAssetsList(chain);
const whitelistPath = getChainWhitelistPath(chain);
const blacklistPath = getChainBlacklistPath(chain);
const currentWhitelist = JSON.parse(readFileSync(whitelistPath));
const currentBlacklist = JSON.parse(readFileSync(blacklistPath));
let newBlackList = [];
// Some chains required pulling lists from other sources
// switch (chain) {
// case Ethereum:
// const nftList = await getOpenseaCollectionAddresses()
// newBlackList = currentBlacklist.concat(nftList)
// break;
// default:
// newBlackList = newBlackList.concat(currentBlacklist)
// break;
// }
const removedAssets = arrayDiff(currentWhitelist, assets);
newBlackList = currentBlacklist.concat(removedAssets);
writeFileSync(whitelistPath, JSON.stringify(sortElements(assets), null, 4));
writeFileSync(blacklistPath, JSON.stringify(makeUnique(sortElements(newBlackList)), null, 4));
console.log(`Updated white and blacklists for chain ${chain}`);
})
}
export async function fix() {
formatWhiteBlackList();
}

View File

@ -0,0 +1,25 @@
import { CoinType } from "@trustwallet/wallet-core";
export const getChainName = (id: CoinType): string => CoinType.id(id); // 60 => ethereum
export const Binance = getChainName(CoinType.binance);
export const Classic = getChainName(CoinType.classic);
export const Cosmos = getChainName(CoinType.cosmos);
export const Ethereum = getChainName(CoinType.ethereum);
export const GoChain = getChainName(CoinType.gochain);
export const IoTeX = getChainName(CoinType.iotex);
export const NEO = getChainName(CoinType.neo);
export const NULS = getChainName(CoinType.nuls);
export const POA = getChainName(CoinType.poa);
export const Tezos = getChainName(CoinType.tezos);
export const ThunderCore = getChainName(CoinType.thundertoken);
export const Terra = getChainName(CoinType.terra);
export const TomoChain = getChainName(CoinType.tomochain);
export const Tron = getChainName(CoinType.tron);
export const Kava = getChainName(CoinType.kava);
export const Wanchain = getChainName(CoinType.wanchain);
export const Waves = getChainName(CoinType.waves);
export const Solana = getChainName(CoinType.solana);
export const ethForkChains = [Ethereum, Classic, POA, TomoChain, GoChain, Wanchain, ThunderCore];
export const stakingChains = [Tezos, Cosmos, IoTeX, Tron, Waves, Kava, Terra];
export const chainsWithBlacklist = ethForkChains.concat(Tron, Terra, NEO, NULS);

15
script/common/config.ts Normal file
View File

@ -0,0 +1,15 @@
const configFileName = "../config.json";
const configData = require(configFileName);
export function getConfig(key: string, defaultValue: any): any {
if (!configData) {
console.log(`Missing config, config file: ${configFileName}`);
return defaultValue;
}
if (!(key in configData)) {
console.log(`Missing config entry, key ${key}, config file: ${configFileName}`);
return defaultValue;
}
return configData[key];
}

View File

@ -0,0 +1,6 @@
const Web3 = require('web3');
const web3 = new Web3('ws://localhost:8546');
export const isChecksum = (address: string): boolean => web3.utils.checkAddressChecksum(address);
export const toChecksum = (address: string): string => web3.utils.toChecksumAddress(address);

View File

@ -0,0 +1,26 @@
import * as fs from "fs";
import * as path from "path";
import { execSync } from "child_process";
export const getFileName = (name: string): string => path.basename(name, path.extname(name))
export const getFileExt = (name: string): string => name.slice((Math.max(0, name.lastIndexOf(".")) || Infinity) + 1)
export const readFileSync = (path: string) => fs.readFileSync(path, 'utf8');
export const writeFileSync = (path: string, data: any) => fs.writeFileSync(path, data);
export const readDirSync = (path: string): string[] => fs.readdirSync(path);
export const isPathExistsSync = (path: string): boolean => fs.existsSync(path);
export const getFileSizeInKilobyte = (path: string): number => fs.statSync(path).size / 1000;
export function execRename(command: string, cwd: string) {
console.log(`Running command ${command}`);
execSync(command, {encoding: "utf-8", cwd: cwd});
}
function gitMoveCommand(oldName: string, newName: string): string {
return `git mv ${oldName} ${newName}-temp && git mv ${newName}-temp ${newName}`;
}
export function gitMove(path: string, oldName: string, newName: string) {
console.log(`Renaming file or folder at path ${path}: ${oldName} => ${newName}`);
execRename(gitMoveCommand(oldName, newName), path);
}

73
script/common/image.ts Normal file
View File

@ -0,0 +1,73 @@
import * as sharp from "sharp";
import * as tinify from "tinify";
import * as image_size from "image-size";
import {
writeFileSync,
getFileSizeInKilobyte
} from "./filesystem";
import * as chalk from 'chalk';
import * as config from "../common/config";
//export const minLogoWidth = 64;
//export const minLogoHeight = 64;
export const maxLogoWidth = config.getConfig("image_max_logo_width", 512);
export const maxLogoHeight = config.getConfig("image_max_logo_height", 512);
export const maxLogoSizeInKilobyte = config.getConfig("image_logo_size_kb", 100);
export function isDimensionTooLarge(width: number, height: number): boolean {
return (width > maxLogoWidth) || (height > maxLogoHeight);
}
export function calculateTargetSize(srcWidth: number, srcHeight: number, targetWidth: number, targetHeight: number): {width: number, height: number} {
if (srcWidth == 0 || srcHeight == 0) {
return {width: targetWidth, height: targetHeight};
}
const ratio = Math.min(targetWidth / srcWidth, targetHeight / srcHeight);
return {
width: Math.round(srcWidth * ratio),
height: Math.round(srcHeight * ratio)
};
}
const getImageDimensions = (path: string) => image_size.imageSize(path);
async function compressTinyPNG(path: string) {
console.log(`Compressing image via tinypng at path ${path}`);
const source = await tinify.fromFile(path);
await source.toFile(path);
}
// return true if image updated
export async function resizeIfTooLarge(path: string): Promise<boolean> {
let updated: boolean = false;
const { width: srcWidth, height: srcHeight } = getImageDimensions(path);
if (isDimensionTooLarge(srcWidth, srcHeight)) {
const { width, height } = calculateTargetSize(srcWidth, srcHeight, maxLogoWidth, maxLogoHeight);
console.log(`Resizing image at ${path} from ${srcWidth}x${srcHeight} => ${width}x${height}`)
await sharp(path).resize(width, height).toBuffer()
.then(data => {
writeFileSync(path, data);
updated = true;
})
.catch(e => {
console.log(chalk.red(e.message));
});
}
// If file size > max limit, compress with tinypng
const sizeKilobyte = getFileSizeInKilobyte(path);
if (sizeKilobyte > maxLogoSizeInKilobyte) {
console.log(`Resizing image at path ${path} from ${sizeKilobyte} kB`);
await compressTinyPNG(path)
.then(() => {
updated = true;
console.log(`Resized image at path ${path} from ${sizeKilobyte} kB => ${getFileSizeInKilobyte(path)} kB`);
})
.catch(e => {
console.log(chalk.red(e.message));
});
}
return updated;
}

23
script/common/json.ts Normal file
View File

@ -0,0 +1,23 @@
import {
readFileSync,
writeFileSync
} from "./filesystem";
import { sortElements } from "./types";
export function formatJsonFile(filename: string, silent: boolean = false) {
const jsonContent = JSON.parse(readFileSync(filename));
writeFileSync(filename, JSON.stringify(jsonContent, null, 4));
if (!silent) {
console.log(`Formatted json file ${filename}`);
}
}
export function formatSortJsonFile(filename: string) {
const jsonContent = JSON.parse(readFileSync(filename));
writeFileSync(filename, JSON.stringify(sortElements(jsonContent), null, 4));
console.log(`Formatted json file ${filename}`);
}
export function writeJsonFile(path: string, data: any) {
writeFileSync(path, JSON.stringify(data, null, 4));
}

View File

@ -0,0 +1,36 @@
import * as path from "path";
import {
isPathExistsSync,
readDirSync
} from "./filesystem";
export const logoName = `logo`;
export const infoName = `info`;
export const listName = `list`
export const logoExtension = "png";
export const jsonExtension = "json";
export const logoFullName = `${logoName}.${logoExtension}`;
export const infoFullName = `${infoName}.${jsonExtension}`;
const whiteList = `whitelist.${jsonExtension}`;
const blackList = `blacklist.${jsonExtension}`;
export const validatorsList = `${listName}.${jsonExtension}`
export const chainsPath: string = path.join(process.cwd(), '/blockchains');
export const getChainPath = (chain: string): string => `${chainsPath}/${chain}`;
export const getChainLogoPath = (chain: string): string => `${getChainPath(chain)}/info/${logoFullName}`;
export const getChainAssetsPath = (chain: string): string => `${getChainPath(chain)}/assets`;
export const getChainAssetPath = (chain: string, asset: string) => `${getChainAssetsPath(chain)}/${asset}`;
export const getChainAssetLogoPath = (chain: string, asset: string): string => `${getChainAssetPath(chain, asset)}/${logoFullName}`;
export const getChainAssetInfoPath = (chain: string, asset: string): string => `${getChainAssetPath(chain, asset)}/${infoFullName}`;
export const getChainWhitelistPath = (chain: string): string => `${getChainPath(chain)}/${whiteList}`;
export const getChainBlacklistPath = (chain: string): string => `${getChainPath(chain)}/${blackList}`;
export const getChainValidatorsPath = (chain: string): string => `${getChainPath(chain)}/validators`;
export const getChainValidatorsListPath = (chain: string): string => `${getChainValidatorsPath(chain)}/list.${jsonExtension}`;
export const getChainValidatorsAssetsPath = (chain: string): string => `${getChainValidatorsPath(chain)}/assets`
export const getChainValidatorAssetLogoPath = (chain: string, asset: string): string => `${getChainValidatorsAssetsPath(chain)}/${asset}/${logoFullName}`
export const isChainAssetInfoExistSync = (chain: string, address: string) => isPathExistsSync(getChainAssetInfoPath(chain, address));
export const getChainAssetsList = (chain: string): string[] => readDirSync(getChainAssetsPath(chain));
export const getChainAssetFilesList = (chain: string, address: string) => readDirSync(getChainAssetPath(chain, address));

29
script/common/types.ts Normal file
View File

@ -0,0 +1,29 @@
export const mapList = arr => {
return arr.reduce((acm, val) => {
acm[val] = "";
return acm;
}, {});
}
// Sort: treat numbers as number, strings as case-insensitive
export const sortElements = (arr: any[]): any[] => {
arr.sort((a, b) => {
if (!isNaN(a) && !isNaN(b)) {
// numerical comparison
return a - b;
}
if ((typeof a === 'string' || a instanceof String) && (typeof b === 'string' || b instanceof String)) {
return a.toLowerCase() > b.toLowerCase() ? 1 : -1;
}
return 0;
});
return arr;
}
export const makeUnique = (arr: any[]): any[] => Array.from(new Set(arr));
// Remove from set a elements of set b.
export function arrayDiff(a: string[], b: string[]): string[] {
const mappedB = mapList(b);
return a.filter(e => !mappedB.hasOwnProperty(e));
}

6
script/config.json Normal file
View File

@ -0,0 +1,6 @@
{
"image_max_logo_width": 512,
"image_max_logo_height": 512,
"image_logo_size_kb": 100,
"binance_assets_url": "https://explorer.binance.org/api/v1/assets?page=1&rows=1000"
}

View File

@ -1,10 +0,0 @@
import { ethSidechains, readDirSync, getChainAssetsPath } from "../src/test/helpers"
import { checksumAssetsFolder } from './format_files_name'
ethSidechains.forEach(chain => {
const chainAssetsPath = getChainAssetsPath(chain)
readDirSync(chainAssetsPath).forEach(addr => {
checksumAssetsFolder(chainAssetsPath, addr)
})
})

View File

@ -1,42 +0,0 @@
import {
ethSidechains,
readDirSync,
getChainAssetsPath,
getChainAssetFilesList,
isChecksum,
toChecksum,
getFileName,
getFileExt,
getMoveCommandFromTo,
execRename,
logoName,
logoExtension,
logo,
getChainAssetPath
} from "../src/test/helpers"
ethSidechains.forEach(chain => {
const assetsPath = getChainAssetsPath(chain)
readDirSync(assetsPath).forEach(address => {
getChainAssetFilesList(chain, address).forEach(file => {
if (getFileName(file) == logoName && getFileExt(file) !== logoExtension) {
console.log(`Renaming incorrect asset logo extension ${file} ...`)
renameAndMove(getChainAssetPath(chain, address), file, logo)
}
})
checksumAssetsFolder(assetsPath, address)
})
})
export function checksumAssetsFolder(assetsFolderPath: string, addr: string) {
if (!isChecksum(addr)) {
renameAndMove(assetsFolderPath, addr, toChecksum(addr))
}
}
export function renameAndMove(path: string, oldName: string, newName: string) {
console.log(` Renaming file or folder at path ${path}: ${oldName} => ${newName}`)
execRename(path, getMoveCommandFromTo(oldName, newName))
}

View File

@ -1,92 +0,0 @@
const fs = require('fs')
import { getOpenseaCollectionAddresses } from "./opesea_contrats"
import {
Ethereum, Terra, Tron,
getChainAssetInfoPath,
getChainAssetsList,
ethSidechains,
getChainBlacklistPath,
getChainValidatorsListPath,
getChainWhitelistPath,
getUnique,
isChainAssetInfoExistSync,
isChainBlacklistExistSync,
isChainWhitelistExistSync,
mapList,
readFileSync,
sortDesc,
stakingChains,
writeFileSync,
} from '../src/test/helpers'
formatWhiteBlackList()
formatValidators()
formatInfo()
function formatWhiteBlackList() {
ethSidechains.concat(Tron, Terra, "neo", 'nuls').forEach(async chain => {
const assets = getChainAssetsList(chain)
const whitelistPath = getChainWhitelistPath(chain)
const blacklistPath = getChainBlacklistPath(chain)
//Create inital lists if they do not exists
if (!isChainWhitelistExistSync(chain)) {
writeFileSync(whitelistPath, `[]`)
}
if (!isChainBlacklistExistSync(chain)) {
writeFileSync(blacklistPath, `[]`)
}
const currentWhitelist = JSON.parse(readFileSync(whitelistPath))
const currentBlacklist = JSON.parse(readFileSync(blacklistPath))
let newBlackList = []
// Some chains required pulling lists from other sources
// switch (chain) {
// case Ethereum:
// const nftList = await getOpenseaCollectionAddresses()
// newBlackList = currentBlacklist.concat(nftList)
// break;
// default:
// newBlackList = newBlackList.concat(currentBlacklist)
// break;
// }
const removedAssets = getRemovedAddressesFromAssets(assets, currentWhitelist)
newBlackList = currentBlacklist.concat(removedAssets)
fs.writeFileSync(whitelistPath, JSON.stringify(sortDesc(assets), null, 4))
fs.writeFileSync(blacklistPath, JSON.stringify(getUnique(sortDesc(newBlackList)), null, 4))
})
}
function formatValidators() {
stakingChains.forEach(chain => {
const validatorsPath = getChainValidatorsListPath(chain)
const currentValidatorsList = JSON.parse(readFileSync(validatorsPath))
fs.writeFileSync(validatorsPath, JSON.stringify(currentValidatorsList, null, 4))
})
}
function formatInfo() {
ethSidechains.forEach(chain => {
const chainAssets = getChainAssetsList(chain)
chainAssets.forEach(address => {
if (isChainAssetInfoExistSync(chain, address)) {
const chainAssetInfoPath = getChainAssetInfoPath(chain, address)
const currentAssetInfo = JSON.parse(readFileSync(chainAssetInfoPath))
fs.writeFileSync(chainAssetInfoPath, JSON.stringify(currentAssetInfo, null, 4))
}
})
})
}
function getRemovedAddressesFromAssets(assets: string[], whiteList: string[]): string[] {
const mappedAssets = mapList(assets)
const removed = whiteList.filter(a => !mappedAssets.hasOwnProperty(a))
return removed
}

12
script/main/fix.ts Normal file
View File

@ -0,0 +1,12 @@
import { fixAll } from "../action/update-all";
export function main() {
try {
fixAll();
} catch(err) {
console.error(err);
process.exit(1);
}
}
main();

12
script/main/update.ts Normal file
View File

@ -0,0 +1,12 @@
import { updateAll } from "../action/update-all";
export function main() {
try {
updateAll();
} catch(err) {
console.error(err);
process.exit(1);
}
}
main();

View File

@ -1,101 +0,0 @@
import {
maxLogoWidth,
maxLogoHeight,
readDirSync,
chainsFolderPath,
getChainLogoPath,
calculateAspectRatioFit,
getImageDimensions,
getChainAssetsPath,
getChainAssetLogoPath,
isPathExistsSync,
writeFileSync,
readFileSync,
getChainValidatorsListPath,
getChainValidatorAssetLogoPath,
maxAssetLogoSizeInKilobyte,
getFileSizeInKilobyte
} from "../src/test/helpers"
const sharp = require('sharp')
const bluebird = require("bluebird")
const foundChains = readDirSync(chainsFolderPath)
const tinify = require("tinify");
tinify.key = "MXxhvmhjMkMM6CVccGrfyQm2RHpTf1G7"; // Key is free to get, gives 500 uploads per month
function downsize() {
console.log(`Start resizing`)
bluebird.map(foundChains, async chain => {
console.log(`Resizing assets on chain ${chain}`)
const chainLogoPath = getChainLogoPath(chain)
const { width: srcWidth, heigth: srcHeight } = getImageDimensions(chainLogoPath)
// Check and resize if needed chain logo
if (isDownsizing(srcWidth, srcHeight)) {
await resize(srcWidth, srcHeight, chainLogoPath)
}
// Check and resize if needed chain assets
const assetsPath = getChainAssetsPath(chain)
if (isPathExistsSync(assetsPath)) {
bluebird.mapSeries(readDirSync(assetsPath), async asset => {
const assetPath = getChainAssetLogoPath(chain, asset)
const { width: srcWidth, height: srcHeight } = getImageDimensions(assetPath)
if (isDownsizing(srcWidth, srcHeight)) {
await resize(srcWidth, srcHeight, assetPath)
}
// If size still > max limit, compress with tinypng
const sizeKilobyte = getFileSizeInKilobyte(assetPath)
if (sizeKilobyte > maxAssetLogoSizeInKilobyte) {
await compressTinyPNG(assetPath)
console.log(`Successfully resized iamge at path ${assetPath} from ${sizeKilobyte} => ${getFileSizeInKilobyte(assetPath)}`)
}
})
}
// Check and resize if needed chain validators image
const chainValidatorsList = getChainValidatorsListPath(chain)
if (isPathExistsSync(chainValidatorsList)) {
const validatorsList = JSON.parse(readFileSync(getChainValidatorsListPath(chain)))
bluebird.mapSeries(validatorsList, async ({ id }) => {
const path = getChainValidatorAssetLogoPath(chain, id)
const { width: srcWidth, height: srcHeight } = getImageDimensions(path)
if (isDownsizing(srcWidth, srcHeight)) {
await resize(srcWidth, srcHeight, path)
}
// If size still > max limit, compress with tinypng
const sizeKilobyte = getFileSizeInKilobyte(path)
if (sizeKilobyte > maxAssetLogoSizeInKilobyte) {
await compressTinyPNG(path)
}
})
}
console.log(` Resizing assets on chain ${chain} completed`)
})
}
downsize()
function isDownsizing(srcWidth: number, srcHeight: number): boolean {
return (srcWidth > maxLogoWidth) || (srcHeight > maxLogoHeight)
}
async function resize(srcWidth: number, srcHeight: number, path: string) {
const { width, height } = calculateAspectRatioFit(srcWidth, srcHeight, maxLogoWidth, maxLogoHeight)
console.log(` Resizing image at ${path} from ${srcWidth}x${srcHeight} => ${width}x${height}`)
await sharp(path).resize(width, height).toBuffer()
.then(data => writeFileSync(path, data))
.catch(e => {
console.log(e.message)
})
}
export async function compressTinyPNG(path: string) {
console.log(`Compressing image via tinypng at path ${path}`)
const source = await tinify.fromFile(path);
await source.toFile(path);
}

View File

@ -1,40 +0,0 @@
const axios = require("axios")
const bluebird = require("bluebird")
const fs = require("fs")
const path = require("path")
const chalk = require('chalk')
const blacklist = require('../blockchains/binance/blacklist.json')
;(async () => {
const { assetInfoList } = await axios.get(`https://explorer.binance.org/api/v1/assets?page=1&rows=1000`).then(r => r.data)
await bluebird.each(assetInfoList, async ({ asset, assetImg }) => {
if (assetImg && blacklist.indexOf(asset) == -1) {
const binanceDir = path.join(__dirname, `../blockchains/binance`)
const imagePath = `${binanceDir}/assets/${asset}/logo.png`
if (fs.existsSync(imagePath)) {
console.log(chalk.green(`${asset}`))
} else {
console.log(chalk.red(`${asset}`))
fs.mkdir(`${binanceDir}/assets/${asset}`, err => {
if (err && err.code != `EEXIST`) throw err
})
await fetchImage(assetImg).then(buffer => {
buffer.pipe(fs.createWriteStream(imagePath))
})
}
}
})
function fetchImage(url) {
return axios.get(url, { responseType: "stream" }).then(r => r.data).catch(err => {
throw `Error fetchImage: ${url} ${err.message}`
})
}
})().catch(err => {
console.error(err)
process.exit(1)
})

View File

@ -318,6 +318,7 @@ export const rootDirAllowedFiles = [
"dapps",
"media",
"node_modules",
"script-old",
"script",
"src",
".gitignore",

View File

@ -22,7 +22,6 @@ import {
getChainValidatorsList,
findDuplicate,
findCommonElementOrDuplicate,
isChecksum,
isLogoDimensionOK,
isLogoSizeOK,
isLowerCase,
@ -32,7 +31,6 @@ import {
isValidJSON,
isAssetInfoOK,
isValidatorHasAllKeys,
mapList,
pricingFolderPath,
readDirSync,
readFileSync,
@ -40,7 +38,23 @@ import {
stakingChains,
} from "./helpers"
import { ValidatorModel, mapTiker, TickerType } from "./models";
import { getHandle } from "../../script/gen_info";
import { getHandle } from "../../script-old/gen_info";
import {
isChecksum,
toChecksum
} from "../../script/common/eth-web3";
import {
isDimensionTooLarge,
calculateTargetSize
} from "../../script/common/image";
import {
mapList,
sortElements,
makeUnique,
arrayDiff
} from "../../script/common/types";
import { findImagesToFetch } from "../../script/action/binance";
describe("Check repository root dir", () => {
const dirActualFiles = readDirSync(".")
@ -459,3 +473,65 @@ describe("Test helper functions", () => {
expect(findCommonElementOrDuplicate([], []), `Empty lists`).toBe(null)
})
});
describe("Test eth-web3 helpers", () => {
test(`Test isChecksum`, () => {
expect(isChecksum("0x7Bb09bC8aDE747178e95B1D035ecBeEBbB18cFee"), `checksum`).toBe(true);
expect(isChecksum("0x7bb09bc8ade747178e95b1d035ecbeebbb18cfee"), `lowercase`).toBe(false);
expect(isChecksum("0x7Bb09bC8aDE747178e95B1D035ecBe"), `too short`).toBe(false);
});
test(`Test toChecksum`, () => {
expect(toChecksum("0x7bb09bc8ade747178e95b1d035ecbeebbb18cfee"), `from lowercase`).toEqual("0x7Bb09bC8aDE747178e95B1D035ecBeEBbB18cFee");
expect(toChecksum("0x7Bb09bC8aDE747178e95B1D035ecBeEBbB18cFee"), `from checksum`).toEqual("0x7Bb09bC8aDE747178e95B1D035ecBeEBbB18cFee");
});
});
describe("Test image helpers", () => {
test(`Test isDimensionTooLarge`, () => {
expect(isDimensionTooLarge(256, 256), `256x256`).toBe(false);
expect(isDimensionTooLarge(64, 64), `64x64`).toBe(false);
expect(isDimensionTooLarge(800, 800), `800x800`).toBe(true);
expect(isDimensionTooLarge(256, 800), `256x800`).toBe(true);
expect(isDimensionTooLarge(800, 256), `800x256`).toBe(true);
});
test(`Test calculateReducedSize`, () => {
expect(calculateTargetSize(256, 256, 512, 512), `small 1.0`).toEqual({width: 512, height: 512});
expect(calculateTargetSize(800, 800, 512, 512), `large 1.0`).toEqual({width: 512, height: 512});
expect(calculateTargetSize(200, 100, 512, 512), `small 2.0`).toEqual({width: 512, height: 256});
expect(calculateTargetSize(100, 200, 512, 512), `small 0.5`).toEqual({width: 256, height: 512});
expect(calculateTargetSize(1200, 600, 512, 512), `small 2.0`).toEqual({width: 512, height: 256});
expect(calculateTargetSize(600, 1200, 512, 512), `small 0.5`).toEqual({width: 256, height: 512});
expect(calculateTargetSize(256, 0, 512, 512), `zero`).toEqual({width: 512, height: 512});
});
});
describe("Test type helpers", () => {
test(`Test mapList`, () => {
expect(mapList(["a", "b", "c"]), `3 elems`).toEqual({"a": "", "b":"", "c": ""});
});
test(`Test sortElements`, () => {
expect(sortElements(["c", "a", "b"]), `3 elems`).toEqual(["a", "b", "c"]);
expect(sortElements(["C", "a", "b"]), `mixed case`).toEqual(["a", "b", "C"]);
expect(sortElements(["1", "2", "11"]), `numerical`).toEqual(["1", "2", "11"]);
expect(sortElements(["C", "a", "1", "b", "2", "11"]), `complex`).toEqual(["1", "2", "11", "a", "b", "C"]);
});
test(`Test makeUnique`, () => {
expect(makeUnique(["a", "b", "c", "b"]), `4 elems with 1 duplicate`).toEqual(["a", "b", "c"]);
});
test(`Test arrayDiff`, () => {
expect(arrayDiff(["a", "b", "c"], ["c"]), `4 elems with 1 duplicate`).toEqual(["a", "b"]);
});
});
describe("Test action binance", () => {
test(`Test findImagesToFetch`, () => {
const assetsInfoListNonexisting: any[] = [{asset: "A1", assetImg: "imgurl1"}, {asset: "A2", assetImg: "imgurl2"}];
const assetsInfoListExisting: any[] = [{asset: "BUSD-BD1", assetImg: "imgurlBUSD"}, {asset: "ETH-1C9", assetImg: "imgurlETH"}];
const blackListEmpty: string[] = [];
const blackListA1: string[] = ["A1"];
expect(findImagesToFetch(assetsInfoListNonexisting, blackListEmpty), `2 nonexisting`).toEqual(assetsInfoListNonexisting);
expect(findImagesToFetch(assetsInfoListNonexisting, blackListA1), `2 nonexisting with 1 blacklisted`).toEqual([{asset: "A2", assetImg: "imgurl2"}]);
expect(findImagesToFetch(assetsInfoListExisting, blackListEmpty), `2 existing`).toEqual([]);
expect(findImagesToFetch([], []), `empty`).toEqual([]);
});
});