diff --git a/package.json b/package.json index a4cc56d4..62a20560 100644 --- a/package.json +++ b/package.json @@ -22,6 +22,7 @@ "homepage": "https://github.com/InstaDApp/dsa-connectors-new#readme", "dependencies": { "@openzeppelin/contracts": "^3.4.0-solc-0.7", + "@typechain/ethers-v5": "^8.0.5", "@typechain/hardhat": "^3.0.0", "@uniswap/v3-core": "^1.0.0", "@uniswap/v3-periphery": "^1.2.1", diff --git a/scripts/constant/addresses.ts b/scripts/constant/addresses.ts index 86af9b6a..28b6afec 100644 --- a/scripts/constant/addresses.ts +++ b/scripts/constant/addresses.ts @@ -9,4 +9,3 @@ export const addresses = { instaIndex: "0x2971AdFa57b20E5a416aE5a708A8655A9c74f723", }, }; -  \ No newline at end of file diff --git a/scripts/constant/constant.ts b/scripts/constant/constant.ts index 0139c186..14fcbe2f 100644 --- a/scripts/constant/constant.ts +++ b/scripts/constant/constant.ts @@ -1,6 +1,6 @@ export const constants = { - address_zero: "0x0000000000000000000000000000000000000000", - eth_addr: "0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE", - max_value: "115792089237316195423570985008687907853269984665640564039457584007913129639935" - }; - \ No newline at end of file + address_zero: "0x0000000000000000000000000000000000000000", + eth_addr: "0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE", + max_value: + "115792089237316195423570985008687907853269984665640564039457584007913129639935", +}; diff --git a/scripts/deployAvaxConnector.js b/scripts/deployAvaxConnector.js deleted file mode 100644 index f385fc8c..00000000 --- a/scripts/deployAvaxConnector.js +++ /dev/null @@ -1,27 +0,0 @@ -const hre = require("hardhat"); -const { ethers, deployments, getUnnamedAccounts } = hre; -const { deploy } = deployments; - - -async function main() { - - const deployer = (await getUnnamedAccounts())[0] - - const connector = "ConnectV2InstaPoolV3Avalanche" - - const connectorInstance = await deploy(connector, { - from: deployer, - }) - console.log(`${connector} deployed: `, connectorInstance.address); - - await hre.run("sourcify", { - address: connectorInstance.address, - }) -} - -main() - .then(() => process.exit(0)) - .catch(error => { - console.error(error); - process.exit(1); - }); \ No newline at end of file diff --git a/scripts/deployCompoundMapping.js b/scripts/deployCompoundMapping.js deleted file mode 100644 index 498b00d5..00000000 --- a/scripts/deployCompoundMapping.js +++ /dev/null @@ -1,82 +0,0 @@ -const hre = require("hardhat"); -const { ethers } = hre; - -async function main() { - - const CONNECTORS_V2 = "0x97b0B3A8bDeFE8cB9563a3c610019Ad10DB8aD11"; - - const ctokenMapping = { - "ETH-A": "0x4ddc2d193948926d02f9b1fe9e1daa0718270ed5", - "BAT-A": "0x6c8c6b02e7b2be14d4fa6022dfd6d75921d90e4e", - "COMP-A": "0x70e36f6bf80a52b3b46b3af8e106cc0ed743e8e4", - "DAI-A": "0x5d3a536e4d6dbd6114cc1ead35777bab948e3643", - "REP-A": "0x158079ee67fce2f58472a96584a73c7ab9ac95c1", - "UNI-A": "0x35a18000230da775cac24873d00ff85bccded550", - "USDC-A": "0x39aa39c021dfbae8fac545936693ac917d5e7563", - "USDT-A": "0xf650c3d88d12db855b8bf7d11be6c55a4e07dcc9", - "WBTC-A": "0xc11b1268c1a384e55c48c2391d8d480264a3a7f4", - "WBTC-B": "0xccF4429DB6322D5C611ee964527D42E5d685DD6a", - "ZRX-A": "0xb3319f5d18bc0d84dd1b4825dcde5d5f7266d407", - "YFI-A": "0x80a2ae356fc9ef4305676f7a3e2ed04e12c33946", - "SUSHI-A": "0x4b0181102a0112a2ef11abee5563bb4a3176c9d7", - "MKR-A": "0x95b4ef2869ebd94beb4eee400a99824bf5dc325b", - "AAVE-A": "0xe65cdb6479bac1e22340e4e755fae7e509ecd06c", - "TUSD-A": "0x12392f67bdf24fae0af363c24ac620a2f67dad86", - "LINK-A": "0xface851a4921ce59e912d19329929ce6da6eb0c7", - } - - const tokenMapping = { - "ETH-A": "0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE", - "BAT-A": "0x0D8775F648430679A709E98d2b0Cb6250d2887EF", - "COMP-A": "0xc00e94cb662c3520282e6f5717214004a7f26888", - "DAI-A": "0x6b175474e89094c44da98b954eedeac495271d0f", - "REP-A": "0x1985365e9f78359a9B6AD760e32412f4a445E862", - "UNI-A": "0x1f9840a85d5af5bf1d1762f925bdaddc4201f984", - "USDC-A": "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48", - "USDT-A": "0xdac17f958d2ee523a2206206994597c13d831ec7", - "WBTC-A": "0x2260fac5e5542a773aa44fbcfedf7c193bc2c599", - "WBTC-B": "0x2260fac5e5542a773aa44fbcfedf7c193bc2c599", - "ZRX-A": "0xe41d2489571d322189246dafa5ebde1f4699f498", - "YFI-A": "0x0bc529c00C6401aEF6D220BE8C6Ea1667F6Ad93e", - "SUSHI-A": "0x6B3595068778DD592e39A122f4f5a5cF09C90fE2", - "MKR-A": "0x9f8F72aA9304c8B593d555F12eF6589cC3A579A2", - "AAVE-A": "0x7Fc66500c84A76Ad7e9c93437bFc5Ac33E2DDaE9", - "TUSD-A": "0x0000000000085d4780B73119b644AE5ecd22b376", - "LINK-A": "0x514910771af9ca656af840dff83e8264ecf986ca", - } - - const Mapping = await ethers.getContractFactory("InstaCompoundMapping"); - const mapping = await Mapping.deploy( - CONNECTORS_V2, - Object.keys(ctokenMapping), - Object.values(tokenMapping), - Object.values(ctokenMapping) - ); - await mapping.deployed(); - - console.log(`InstaCompoundMapping Deployed: ${mapping.address}`); - - try { - await hre.run("verify:verify", { - address: mapping.address, - constructorArguments: [ - CONNECTORS_V2, - Object.keys(ctokenMapping), - Object.values(tokenMapping), - Object.values(ctokenMapping) - ] - } - ) -} catch (error) { - console.log(`Failed to verify: InstaCompoundMapping@${mapping.address}`) - console.log(error) - console.log() -} -} - -main() - .then(() => process.exit(0)) - .catch(error => { - console.error(error); - process.exit(1); - }); \ No newline at end of file diff --git a/scripts/deployConnectorsFromCmd.js b/scripts/deployConnectorsFromCmd.js deleted file mode 100644 index f2584fa5..00000000 --- a/scripts/deployConnectorsFromCmd.js +++ /dev/null @@ -1,105 +0,0 @@ -const fs = require("fs"); -const hre = require("hardhat"); -const { ethers } = hre; - -let args = process.argv; -args = args.splice(2, args.length); -let params = {}; - -for (let i = 0; i < args.length; i += 2) { - if (args[i][0] !== "-" || args[i][1] !== "-") { - console.log("Please add '--' for the key"); - process.exit(-1); - } - let key = args[i].slice(2, args[i].length); - params[key] = args[i + 1]; -} - -if (!params.hasOwnProperty('connector')) { - console.error("Should include connector params") - process.exit(-1); -} - -if (!params.hasOwnProperty('network')) { - console.error("Should include network params") - process.exit(-1); -} - -if (!params.hasOwnProperty('gasPrice')) { - console.error("Should include gas params") - process.exit(-1); -} - -let privateKey = process.env.PRIVATE_KEY; -let provider = new ethers.providers.JsonRpcProvider(hre.config.networks[params['network']].url); -let wallet = new ethers.Wallet(privateKey, provider); - -hre.network.name = params['networkName']; -hre.network.config = hre.config.networks[params['networkName']]; -hre.network.provider = provider; -let contracts = []; - -const parseFile = async (filePath) => { - const data = fs.readFileSync(filePath, "utf-8"); - let parsedData = data.split("contract "); - parsedData = parsedData[parsedData.length - 1].split(" "); - parsedData = parsedData[0]; - return parsedData; -} - -const parseDir = async (root, basePath, addPath) => { - for(let i = 0; i < root.length; i++) { - addPath = "/" + root[i]; - const dir = fs.readdirSync(basePath + addPath); - if(dir.indexOf("main.sol") !== -1) { - const fileData = await parseFile(basePath + addPath + "/main.sol"); - contracts.push(fileData) - } else { - await parseDir(dir, basePath + addPath, ""); - } - } -} - -const main = async () => { - const mainnet = fs.readdirSync("./contracts/mainnet/connectors/"); - const polygon = fs.readdirSync("./contracts/polygon/connectors/"); - let basePathMainnet = "./contracts/mainnet/connectors/"; - let basePathPolygon = "./contracts/polygon/connectors/"; - - const connectorName = params['connector']; - - await parseDir(mainnet, basePathMainnet, ""); - await parseDir(polygon, basePathPolygon, ""); - - if(contracts.indexOf(connectorName) === -1) { - throw new Error("can not find the connector!\n" + "supported connector names are:\n" + contracts.join("\n")); - } - - const Connector = await ethers.getContractFactory(connectorName); - const connector = await Connector.connect(wallet).deploy({ gasPrice: ethers.utils.parseUnits(params['gasPrice'], "gwei") }); - await connector.deployed(); - - console.log(`${connectorName} Deployed: ${connector.address}`); - try { - await hre.run("verify:verify", { - address: connector.address, - constructorArguments: [] - } - ) - } catch (error) { - console.log(`Failed to verify: ${connectorName}@${connector.address}`) - console.log(error) - } - - return connector.address -} - -main() - .then(() => { - console.log("Done successfully"); - process.exit(0) - }) - .catch(err => { - console.log("error:", err); - process.exit(1); - }) \ No newline at end of file diff --git a/scripts/deployInstaMappingController.js b/scripts/deployInstaMappingController.js deleted file mode 100644 index f6919bb2..00000000 --- a/scripts/deployInstaMappingController.js +++ /dev/null @@ -1,36 +0,0 @@ -const hre = require('hardhat') -const { ethers } = hre - -async function main () { - if (hre.network.name === 'mainnet') { - console.log( - '\n\n Deploying Contracts to mainnet. Hit ctrl + c to abort' - ) - } else if (hre.network.name === 'hardhat') { - console.log( - '\n\n Deploying Contracts to hardhat.' - ) - } - - const InstaMappingController = await ethers.getContractFactory('InstaMappingController') - const instaMappingController = await InstaMappingController.deploy() - await instaMappingController.deployed() - - console.log('InstaMappingController deployed: ', instaMappingController.address) - - if (hre.network.name === 'mainnet') { - await hre.run('verify:verify', { - address: instaMappingController.address, - constructorArguments: [] - }) - } else if (hre.network.name === 'hardhat') { - console.log("Contracts deployed.") - } -} - -main() - .then(() => process.exit(0)) - .catch(error => { - console.error(error) - process.exit(1) - }) diff --git a/scripts/deployMappingContract.js b/scripts/deployMappingContract.js deleted file mode 100644 index 7fce2acd..00000000 --- a/scripts/deployMappingContract.js +++ /dev/null @@ -1,38 +0,0 @@ -const hre = require('hardhat') -const { ethers } = hre - -async function main () { - if (hre.network.name === 'mainnet') { - console.log( - '\n\n Deploying Contracts to mainnet. Hit ctrl + c to abort' - ) - } else if (hre.network.name === 'hardhat') { - console.log( - '\n\n Deploying Contracts to hardhat.' - ) - } - - const mappingContract = "CONTRACT_NAME" - - const InstaProtocolMapping = await ethers.getContractFactory(mappingContract) - const instaProtocolMapping = await InstaProtocolMapping.deploy() - await instaProtocolMapping.deployed() - - console.log(`${mappingContract} deployed: `, instaProtocolMapping.address) - - if (hre.network.name === 'mainnet') { - await hre.run('verify:verify', { - address: instaProtocolMapping.address, - constructorArguments: [] - }) - } else if (hre.network.name === 'hardhat') { - console.log("Contracts deployed.") - } -} - -main() - .then(() => process.exit(0)) - .catch(error => { - console.error(error) - process.exit(1) - }) diff --git a/scripts/deploySingle.js b/scripts/deploySingle.js deleted file mode 100644 index 50ed7298..00000000 --- a/scripts/deploySingle.js +++ /dev/null @@ -1,23 +0,0 @@ -const hre = require("hardhat"); -const { ethers } = hre; - -const deployConnector = require("./deployConnector"); - -async function main() { - const address = await deployConnector("ConnectOne") // Example - - const connectorsAbi = [ - "function addConnectors(string[] _connectorNames, address[] _connectors)" - ] - - const connectorsContract = new ethers.Contract("0x84b457c6D31025d56449D5A01F0c34bF78636f67", connectorsAbi, ethers.provider); - - await connectorsContract.addConnectors(['1inch'], [address]) -} - -main() - .then(() => process.exit(0)) - .catch(error => { - console.error(error); - process.exit(1); - }); \ No newline at end of file diff --git a/scripts/encodeFlashcastData.js b/scripts/encodeFlashcastData.js deleted file mode 100644 index a674c1af..00000000 --- a/scripts/encodeFlashcastData.js +++ /dev/null @@ -1,16 +0,0 @@ -const abis = require("./constant/abis"); -const addresses = require("./constant/addresses"); -const { web3 } = hre; - -const encodeSpells = require("./encodeSpells.js") - - -module.exports = function (spells) { - const encodeSpellsData = encodeSpells(spells); - const targetType = "string[]"; - let argTypes = [targetType, "bytes[]"]; - return web3.eth.abi.encodeParameters(argTypes, [ - encodeSpellsData[0], - encodeSpellsData[1], - ]); -}; diff --git a/scripts/polygon/buildDSAv2.js b/scripts/polygon/buildDSAv2.js deleted file mode 100644 index 967d879f..00000000 --- a/scripts/polygon/buildDSAv2.js +++ /dev/null @@ -1,15 +0,0 @@ -const hre = require("hardhat"); -const { ethers } = hre; -const addresses = require("./constant/addresses"); -const abis = require("../constant/abis"); - -const instaImplementations_m1 = require("../../deployements/mainnet/Implementation_m1.sol/InstaImplementationM1.json") - -module.exports = async function (owner) { - const instaIndex = await ethers.getContractAt(abis.core.instaIndex, addresses.core.instaIndex) - - const tx = await instaIndex.build(owner, 2, owner); - const receipt = await tx.wait() - const event = receipt.events.find(a => a.event === "LogAccountCreated") - return await ethers.getContractAt(instaImplementations_m1.abi, event.args.account) -}; diff --git a/scripts/polygon/buildDSAv2.ts b/scripts/polygon/buildDSAv2.ts new file mode 100644 index 00000000..de623b32 --- /dev/null +++ b/scripts/polygon/buildDSAv2.ts @@ -0,0 +1,20 @@ +import { ethers } from "hardhat"; + +import { addresses } from "./constant/addresses"; +import { abis } from "../constant/abis"; + +import { abi } from "../../deployements/mainnet/Implementation_m1.sol/InstaImplementationM1.json"; + +module.exports = async function(owner: any) { + const instaIndex = await ethers.getContractAt( + abis.core.instaIndex, + addresses.core.instaIndex + ); + + const tx = await instaIndex.build(owner, 2, owner); + const receipt = await tx.wait(); + const event = receipt.events.find( + (a: { event: string }) => a.event === "LogAccountCreated" + ); + return await ethers.getContractAt(abi, event.args.account); +}; diff --git a/scripts/polygon/constant/addresses.js b/scripts/polygon/constant/addresses.ts similarity index 91% rename from scripts/polygon/constant/addresses.js rename to scripts/polygon/constant/addresses.ts index baaaa3da..d60133c0 100644 --- a/scripts/polygon/constant/addresses.js +++ b/scripts/polygon/constant/addresses.ts @@ -1,4 +1,4 @@ -module.exports = { +export const addresses = { connectors: { basic: "0x1cAF5EC802ca602E98139AD96A8f2B7BC524264E", auth: "0xf6474aD0dA75A0dE15D2c915e601D9f754B9e6fe", diff --git a/scripts/polygon/constant/constant.js b/scripts/polygon/constant/constant.js deleted file mode 100644 index 62933314..00000000 --- a/scripts/polygon/constant/constant.js +++ /dev/null @@ -1,7 +0,0 @@ -module.exports = { - address_zero: "0x0000000000000000000000000000000000000000", - eth_addr: "0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE", - matic_addr: "0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE", - max_value: "115792089237316195423570985008687907853269984665640564039457584007913129639935" - }; - \ No newline at end of file diff --git a/scripts/polygon/constant/constant.ts b/scripts/polygon/constant/constant.ts new file mode 100644 index 00000000..4059e9e5 --- /dev/null +++ b/scripts/polygon/constant/constant.ts @@ -0,0 +1,7 @@ +export const constant = { + address_zero: "0x0000000000000000000000000000000000000000", + eth_addr: "0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE", + matic_addr: "0xEeeeeEeeeEeEeeEeEeEeeEEEeeeeEeeeeeeeEEeE", + max_value: + "115792089237316195423570985008687907853269984665640564039457584007913129639935", +}; diff --git a/scripts/polygon/constant/tokens.js b/scripts/polygon/constant/tokens.js deleted file mode 100644 index 55946203..00000000 --- a/scripts/polygon/constant/tokens.js +++ /dev/null @@ -1,30 +0,0 @@ -module.exports = { - "matic": { - "type": "token", - "symbol": "MATIC", - "name": "Matic", - "address": "0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee", - "decimals": 18 - }, - "eth": { - "type": "token", - "symbol": "ETH", - "name": "Ethereum", - "address": "0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee", - "decimals": 18 - }, - "dai": { - "type": "token", - "symbol": "DAI", - "name": "DAI Stable", - "address": "0x8f3Cf7ad23Cd3CaDbD9735AFf958023239c6A063", - "decimals": 18 - }, - "usdc": { - "type": "token", - "symbol": "USDC", - "name": "USD Coin", - "address": "0x2791Bca1f2de4661ED88A30C99A7a9449Aa84174", - "decimals": 6 - } -} \ No newline at end of file diff --git a/scripts/polygon/constant/tokens.ts b/scripts/polygon/constant/tokens.ts new file mode 100644 index 00000000..d621b68e --- /dev/null +++ b/scripts/polygon/constant/tokens.ts @@ -0,0 +1,30 @@ +export const tokens = { + matic: { + type: "token", + symbol: "MATIC", + name: "Matic", + address: "0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee", + decimals: 18, + }, + eth: { + type: "token", + symbol: "ETH", + name: "Ethereum", + address: "0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee", + decimals: 18, + }, + dai: { + type: "token", + symbol: "DAI", + name: "DAI Stable", + address: "0x8f3Cf7ad23Cd3CaDbD9735AFf958023239c6A063", + decimals: 18, + }, + usdc: { + type: "token", + symbol: "USDC", + name: "USD Coin", + address: "0x2791Bca1f2de4661ED88A30C99A7a9449Aa84174", + decimals: 6, + }, +}; diff --git a/scripts/polygon/deployAndEnableConnector.js b/scripts/polygon/deployAndEnableConnector.js deleted file mode 100644 index 52d80b5e..00000000 --- a/scripts/polygon/deployAndEnableConnector.js +++ /dev/null @@ -1,19 +0,0 @@ -const abis = require("../constant/abis"); -const addresses = require("./constant/addresses"); - -const hre = require("hardhat"); -const { ethers, waffle } = hre; -const { deployContract } = waffle; -const fs = require("fs") - - -module.exports = async function ({connectorName, contractArtifact, signer, connectors}) { - const connectorInstanace = await deployContract(signer, contractArtifact, []); - - await connectors.connect(signer).addConnectors([connectorName], [connectorInstanace.address]) - - addresses.connectors[connectorName] = connectorInstanace.address - abis.connectors[connectorName] = contractArtifact.abi; - - return connectorInstanace; -}; diff --git a/scripts/polygon/deployAndEnableConnector.ts b/scripts/polygon/deployAndEnableConnector.ts new file mode 100644 index 00000000..a62806dd --- /dev/null +++ b/scripts/polygon/deployAndEnableConnector.ts @@ -0,0 +1,26 @@ +import "@nomiclabs/hardhat-waffle"; + +import { addresses } from "./constant/addresses"; +import { abis } from "../constant/abis"; + +import * as hre from "hardhat"; +const { ethers, waffle } = hre; +const { deployContract } = waffle; + +module.exports = async function({ + connectorName, + contractArtifact, + signer, + connectors, +}) { + const connectorInstanace = await deployContract(signer, contractArtifact, []); + + await connectors + .connect(signer) + .addConnectors([connectorName], [connectorInstanace.address]); + + addresses.connectors[connectorName] = connectorInstanace.address; + abis.connectors[connectorName] = contractArtifact.abi; + + return connectorInstanace; +}; diff --git a/scripts/polygon/encodeSpells.js b/scripts/polygon/encodeSpells.js deleted file mode 100644 index ba1cddcb..00000000 --- a/scripts/polygon/encodeSpells.js +++ /dev/null @@ -1,18 +0,0 @@ -const abis = require("../constant/abis"); -const addresses = require("./constant/addresses"); -const { web3 } = hre; - -module.exports = function (spells) { - const targets = spells.map(a => a.connector) - const calldatas = spells.map(a => { - const functionName = a.method; - // console.log(functionName) - const abi = abis.connectors[a.connector].find(b => { - return b.name === functionName - }); - // console.log(functionName) - if (!abi) throw new Error("Couldn't find function") - return web3.eth.abi.encodeFunctionCall(abi, a.args) - }) - return [targets, calldatas] -}; diff --git a/scripts/polygon/encodeSpells.ts b/scripts/polygon/encodeSpells.ts new file mode 100644 index 00000000..c7e43755 --- /dev/null +++ b/scripts/polygon/encodeSpells.ts @@ -0,0 +1,18 @@ +import { addresses } from "./constant/addresses"; +import { abis } from "../constant/abis"; +import { web3 } from "hardhat"; + +module.exports = function(spells: any[]) { + const targets = spells.map((a) => a.connector); + const calldatas = spells.map((a) => { + const functionName = a.method; + // console.log(functionName) + const abi = abis.connectors[a.connector].find((b: { name: any }) => { + return b.name === functionName; + }); + // console.log(functionName) + if (!abi) throw new Error("Couldn't find function"); + return web3.eth.abi.encodeFunctionCall(abi, a.args); + }); + return [targets, calldatas]; +}; diff --git a/scripts/polygon/getMasterSigner.js b/scripts/polygon/getMasterSigner.ts similarity index 73% rename from scripts/polygon/getMasterSigner.js rename to scripts/polygon/getMasterSigner.ts index 9edb0417..558f3a27 100644 --- a/scripts/polygon/getMasterSigner.js +++ b/scripts/polygon/getMasterSigner.ts @@ -1,7 +1,6 @@ -const hre = require("hardhat"); -const { ethers } = hre; -const addresses = require("./constant/addresses"); -const abis = require("../constant/abis"); +const { ethers, network } = "hardhat"; +import { addresses } from "./constant/addresses"; +import { abis } from "../constant/abis"; module.exports = async function() { const [_, __, ___, wallet3] = await ethers.getSigners(); @@ -12,7 +11,7 @@ module.exports = async function() { ); const masterAddress = await instaIndex.master(); // TODO: make it constant? - await hre.network.provider.request({ + await network.provider.request({ method: "hardhat_impersonateAccount", params: [masterAddress], }); diff --git a/status-checks/check.js b/status-checks/check.js deleted file mode 100644 index c0848d77..00000000 --- a/status-checks/check.js +++ /dev/null @@ -1,370 +0,0 @@ -const fs = require('fs') -const path = require('path') - -const forbiddenStrings = ['selfdestruct'] - -const getConnectorsList = async (connectorsRootsDirs) => { - try { - const connectors = [] - for (let index = 0; index < connectorsRootsDirs.length; index++) { - const dirs = [connectorsRootsDirs[index]] - while (dirs.length) { - const currentDir = dirs.pop() - const subs = fs.readdirSync(currentDir, { withFileTypes: true }) - for (let index = 0; index < subs.length; index++) { - const sub = subs[index] - if (sub.isFile() && sub.name === 'main.sol') { - connectors.push(currentDir) - } else if (sub.isDirectory()) { - dirs.push(`${currentDir}/${sub.name}`) - } - } - } - } - return connectors.map(dir => ({ path: dir })) - } catch (error) { - return Promise.reject(error) - } -} - -const checkCodeForbidden = async (code, codePath) => { - try { - const forbidden = [] - for (let i1 = 0; i1 < forbiddenStrings.length; i1++) { - const forbiddenStr = forbiddenStrings[i1] - const strs = code.split('\n') - for (let i2 = 0; i2 < strs.length; i2++) { - if (strs[i2].includes(forbiddenStr)) { - forbidden.push(`found '${forbiddenStr}' in ${codePath}:${i2 + 1}`) - } - } - } - return forbidden - } catch (error) { - return Promise.reject(error) - } -} - -const checkForbidden = async (parentPath, codePath = './main.sol') => { - try { - if (codePath.startsWith('@')) { - codePath = path.resolve('node_modules', `./${codePath}`) - } else { - codePath = path.resolve(parentPath, codePath) - } - const code = fs.readFileSync(codePath, { encoding: 'utf8' }) - const forbidden = await checkCodeForbidden(code, codePath) - if (code.includes('import')) { - const importsPathes = code - .split('\n') - .filter(str => str.includes('import') && str.includes('from') && str.includes('.sol')) - .map(str => str.split('from')[1].replace(/["; ]/gi, '')) - for (let index = 0; index < importsPathes.length; index++) { - const forbiddenErrors = await checkForbidden( - path.parse(codePath).dir, - importsPathes[index] - ) - forbidden.push(...forbiddenErrors) - } - } - return codePath.endsWith('main.sol') ? { forbiddenErrors: forbidden, code } : forbidden - } catch (error) { - return Promise.reject(error) - } -} - -const checkEvents = async (connector) => { - try { - const errors = [] - const warnings = [] - const eventsPath = `${connector.path}/events.sol` - const mainPath = `${connector.path}/main.sol` - if (connector.events.length) { - const eventNames = [] - for (let i1 = 0; i1 < connector.mainEvents.length; i1++) { - const mainEvent = connector.mainEvents[i1] - const name = mainEvent.split('(')[0] - eventNames.push(name) - const event = connector.events.find(e => e.split('(')[0].split(' ')[1] === name) - if (event) { - const mainEventArgs = mainEvent.split('(')[1].split(')')[0].split(',').map(a => a.trim()) - const eventArgs = event.split('(')[1].split(')')[0].split(',').map(a => a.trim()) - if (mainEventArgs.length !== eventArgs.length) { - errors.push(`arguments amount don't match for ${name} at ${mainPath}:${connector.mainEventsLines[i1]}`) - continue - } - for (let i2 = 0; i2 < mainEventArgs.length; i2++) { - if (!mainEventArgs[i2].startsWith(eventArgs[i2].split(' ')[0])) { - errors.push(`invalid argument #${i2 + 1} for ${name} at ${mainPath}:${connector.mainEventsLines[i1]}`) - } - } - } else { - errors.push(`event ${name} missing at ${eventsPath}`) - } - } - if (connector.mainEvents.length < connector.events.length) { - const deprecatedEvents = connector.events.filter(e => { - let used = false - for (let index = 0; index < eventNames.length; index++) { - if (e.split('(')[0].split(' ')[1] === eventNames[index]) used = true - } - return !used - }) - warnings.push(`${deprecatedEvents.map(e => e.split('(')[0].split(' ')[1]).join(', ')} event(s) not used at ${connector.path}/main.sol`) - } - } else { - warnings.push(`missing events file for ${connector.path}/main.sol`) - } - return { eventsErrors: errors, eventsWarnings: warnings } - } catch (error) { - return Promise.reject(error) - } -} - -const getCommments = async (strs) => { - try { - const comments = [] - let type - for (let index = strs.length - 1; index >= 0; index--) { - const str = strs[index] - if (!type) { - if (str.trim().startsWith('//')) { - type = 'single' - } else if (str.trim().startsWith('*/')) { - type = 'multiple' - } - } - if (type === 'single' && str.trim().startsWith('//')) { - comments.push(str.replace(/[/]/gi, '').trim()) - } else if (type === 'multiple' && !str.trim().startsWith('/**') && !str.trim().startsWith('*/')) { - comments.push(str.replace(/[*]/gi, '').trim()) - } else if (type === 'single' && !str.trim().startsWith('//')) { - break - } else if (type === 'multiple' && str.trim().startsWith('/**')) { - break - } - } - return comments - } catch (error) { - return Promise.reject(error) - } -} - -const parseCode = async (connector) => { - try { - const strs = connector.code.split('\n') - const events = [] - const eventsFirstLines = [] - let func = [] - let funcs = [] - let event = [] - let mainEvents = [] - let firstLine - let mainEventsLines = [] - for (let index = 0; index < strs.length; index++) { - const str = strs[index] - if (str.includes('function') && !str.trim().startsWith('//')) { - func = [str] - firstLine = index + 1 - } else if (func.length && !str.trim().startsWith('//')) { - func.push(str) - } - if (func.length && str.startsWith(`${func[0].split('function')[0]}}`)) { - funcs.push({ - raw: func.map(str => str.trim()).join(' '), - comments: await getCommments(strs.slice(0, firstLine)), - firstLine - }) - func = [] - } - } - const allPublicFuncs = funcs - .filter(({ raw }) => { - return raw.includes('external') || raw.includes('public') - }) - .map(f => { - const name = f.raw.split('(')[0].split('function')[1].trim() - return { - ...f, - name - } - }) - funcs = allPublicFuncs - .filter(({ raw }) => { - if (raw.includes('returns')) { - const returns = raw.split('returns')[1].split('(')[1].split(')')[0] - return returns.includes('string') && returns.includes('bytes') - } - return false - }) - .map(f => { - const args = f.raw.split('(')[1].split(')')[0].split(',') - .map(arg => arg.trim()) - .filter(arg => arg !== '') - return { - ...f, - args - } - }) - const eventsPath = `${connector.path}/events.sol` - if (fs.existsSync(eventsPath)) { - mainEvents = funcs - .map(({ raw }) => raw.split('_eventName')[2].trim().split('"')[1]) - .filter(raw => !!raw) - mainEventsLines = mainEvents.map(me => strs.findIndex(str => str.includes(me)) + 1) - const eventsCode = fs.readFileSync(eventsPath, { encoding: 'utf8' }) - const eventsStrs = eventsCode.split('\n') - for (let index = 0; index < eventsStrs.length; index++) { - const str = eventsStrs[index] - if (str.includes('event')) { - event = [str] - firstLine = index + 1 - } else if (event.length && !str.trim().startsWith('//')) { - event.push(str) - } - if (event.length && str.includes(')')) { - events.push(event.map(str => str.trim()).join(' ')) - eventsFirstLines.push(firstLine) - event = [] - } - } - } - return { - ...connector, - events, - eventsFirstLines, - mainEvents, - mainEventsLines, - funcs, - allPublicFuncs - } - } catch (error) { - return Promise.reject(error) - } -} - -const checkComments = async (connector) => { - try { - const errors = [] - for (let i1 = 0; i1 < connector.funcs.length; i1++) { - const func = connector.funcs[i1] - for (let i2 = 0; i2 < func.args.length; i2++) { - const argName = func.args[i2].split(' ').pop() - if (!func.comments.some( - comment => comment.startsWith('@param') && comment.split(' ')[1] === argName - )) { - errors.push(`argument ${argName} has no @param for function ${func.name} at ${connector.path}/main.sol:${func.firstLine}`) - } - } - const reqs = ['@dev', '@notice'] - for (let i3 = 0; i3 < reqs.length; i3++) { - if (!func.comments.some(comment => comment.startsWith(reqs[i3]))) { - errors.push(`no ${reqs[i3]} for function ${func.name} at ${connector.path}/main.sol:${func.firstLine}`) - } - } - } - return errors - } catch (error) { - return Promise.reject(error) - } -} - -const checkPublicFuncs = async (connector) => { - try { - const errors = [] - for (let i1 = 0; i1 < connector.allPublicFuncs.length; i1++) { - const { raw, firstLine, name } = connector.allPublicFuncs[i1] - if (!raw.includes('payable')) { - errors.push(`public function ${name} is not payable at ${connector.path}/main.sol:${firstLine}`) - } - } - return errors - } catch (error) { - return Promise.reject(error) - } -} - -const checkName = async (connector) => { - try { - const strs = connector.code.split('\n') - let haveName = false - for (let index = strs.length - 1; index > 0; index--) { - const str = strs[index] - if (str.includes('string') && str.includes('public') && str.includes('name = ')) { - haveName = true - } - } - return haveName ? [] : [`name variable missing in ${connector.path}/main.sol`] - } catch (error) { - return Promise.reject(error) - } -} - -const checkHeadComments = async (connector) => { - try { - const errors = [] - const strs = connector.code.split('\n') - let haveTitle = false - let haveDev = false - for (let index = 0; index < strs.length; index++) { - if (!strs[index].includes('{')) { - if (strs[index].includes('@title')) haveTitle = true - if (strs[index].includes('@dev')) haveDev = true - } else { - break - } - } - if (!haveTitle) errors.push(`@title missing in ${connector.path}/main.sol`) - if (!haveDev) errors.push(`@dev missing in ${connector.path}/main.sol`) - return errors - } catch (error) { - return Promise.reject(error) - } -} - -async function checkMain () { - try { - const connectorsRootsDirsDefault = ['mainnet', 'polygon'].map(v=> `contracts/${v}/connectors`) - const customPathArg = process.argv.find(a => a.startsWith('connector=')) - const connectorsRootsDirs = customPathArg - ? [customPathArg.slice(10)] - : connectorsRootsDirsDefault - const errors = [] - const warnings = [] - const connectors = await getConnectorsList(connectorsRootsDirs) - for (let index = 0; index < connectors.length; index++) { - const { forbiddenErrors, code } = await checkForbidden(connectors[index].path) - connectors[index].code = code - connectors[index] = await parseCode(connectors[index]) - const { eventsErrors, eventsWarnings } = await checkEvents(connectors[index]) - const commentsErrors = await checkComments(connectors[index]) - const nameErrors = await checkName(connectors[index]) - const headCommentsErrors = await checkHeadComments(connectors[index]) - const publicFuncsErrors = await checkPublicFuncs(connectors[index]) - - errors.push(...forbiddenErrors) - errors.push(...eventsErrors) - errors.push(...commentsErrors) - errors.push(...nameErrors) - errors.push(...headCommentsErrors) - errors.push(...publicFuncsErrors) - warnings.push(...eventsWarnings) - } - if (errors.length) { - console.log('\x1b[31m%s\x1b[0m', `Total errors: ${errors.length}`) - errors.forEach(error => console.log('\x1b[31m%s\x1b[0m', error)) - } else { - console.log('\x1b[32m%s\x1b[0m', 'No Errors Found') - } - if (warnings.length) { - console.log('\x1b[33m%s\x1b[0m', `Total warnings: ${warnings.length}`) - warnings.forEach(warning => console.log('\x1b[33m%s\x1b[0m', warning)) - } else { - console.log('\x1b[32m%s\x1b[0m', 'No Warnings Found') - } - if (errors.length) return Promise.reject(errors.join('\n')) - } catch (error) { - console.error('check execution error:', error) - } -} -module.exports = checkMain diff --git a/status-checks/check.ts b/status-checks/check.ts new file mode 100644 index 00000000..9c0b442d --- /dev/null +++ b/status-checks/check.ts @@ -0,0 +1,448 @@ +import * as fs from "fs"; +import * as path from "path"; + +const forbiddenStrings: any = ["selfdestruct"]; + +const getConnectorsList = async (connectorsRootsDirs: string | any[]) => { + try { + const connectors = []; + for (let index = 0; index < connectorsRootsDirs.length; index++) { + const dirs = [connectorsRootsDirs[index]]; + while (dirs.length) { + const currentDir = dirs.pop(); + const subs = fs.readdirSync(currentDir, { withFileTypes: true }); + for (let index = 0; index < subs.length; index++) { + const sub = subs[index]; + if (sub.isFile() && sub.name === "main.sol") { + connectors.push(currentDir); + } else if (sub.isDirectory()) { + dirs.push(`${currentDir}/${sub.name}`); + } + } + } + } + return connectors.map((dir) => ({ path: dir })); + } catch (error) { + return Promise.reject(error); + } +}; + +const checkCodeForbidden = async (code: string, codePath: string) => { + try { + const forbidden = []; + for (let i1 = 0; i1 < forbiddenStrings.length; i1++) { + const forbiddenStr = forbiddenStrings[i1]; + const strs = code.split("\n"); + for (let i2 = 0; i2 < strs.length; i2++) { + if (strs[i2].includes(forbiddenStr)) { + forbidden.push(`found '${forbiddenStr}' in ${codePath}:${i2 + 1}`); + } + } + } + return forbidden; + } catch (error) { + return Promise.reject(error); + } +}; + +const checkForbidden = async (parentPath: string, codePath = "./main.sol") => { + try { + if (codePath.startsWith("@")) { + codePath = path.resolve("node_modules", `./${codePath}`); + } else { + codePath = path.resolve(parentPath, codePath); + } + const code = fs.readFileSync(codePath, { encoding: "utf8" }); + const forbidden: any = await checkCodeForbidden(code, codePath); + if (code.includes("import")) { + const importsPathes = code + .split("\n") + .filter( + (str) => + str.includes("import") && + str.includes("from") && + str.includes(".sol") + ) + .map((str) => str.split("from")[1].replace(/["; ]/gi, "")); + for (let index = 0; index < importsPathes.length; index++) { + const forbiddenErrors = await checkForbidden( + path.parse(codePath).dir, + importsPathes[index] + ); + forbidden.push(...forbiddenErrors); + } + } + return codePath.endsWith("main.sol") + ? { forbiddenErrors: forbidden, code } + : forbidden; + } catch (error) { + return Promise.reject(error); + } +}; + +const checkEvents = async (connector: { + path: any; + events?: any; + mainEvents?: any; + mainEventsLines?: any; +}) => { + try { + const errors = []; + const warnings = []; + const eventsPath = `${connector.path}/events.sol`; + const mainPath = `${connector.path}/main.sol`; + if (connector.events.length) { + const eventNames = []; + for (let i1 = 0; i1 < connector.mainEvents.length; i1++) { + const mainEvent = connector.mainEvents[i1]; + const name = mainEvent.split("(")[0]; + eventNames.push(name); + const event = connector.events.find( + (e: string) => e.split("(")[0].split(" ")[1] === name + ); + if (event) { + const mainEventArgs = mainEvent + .split("(")[1] + .split(")")[0] + .split(",") + .map((a: string) => a.trim()); + const eventArgs = event + .split("(")[1] + .split(")")[0] + .split(",") + .map((a: string) => a.trim()); + if (mainEventArgs.length !== eventArgs.length) { + errors.push( + `arguments amount don't match for ${name} at ${mainPath}:${connector.mainEventsLines[i1]}` + ); + continue; + } + for (let i2 = 0; i2 < mainEventArgs.length; i2++) { + if (!mainEventArgs[i2].startsWith(eventArgs[i2].split(" ")[0])) { + errors.push( + `invalid argument #${i2 + 1} for ${name} at ${mainPath}:${ + connector.mainEventsLines[i1] + }` + ); + } + } + } else { + errors.push(`event ${name} missing at ${eventsPath}`); + } + } + if (connector.mainEvents.length < connector.events.length) { + const deprecatedEvents = connector.events.filter((e) => { + let used = false; + for (let index = 0; index < eventNames.length; index++) { + if (e.split("(")[0].split(" ")[1] === eventNames[index]) + used = true; + } + return !used; + }); + warnings.push( + `${deprecatedEvents + .map((e: string) => e.split("(")[0].split(" ")[1]) + .join(", ")} event(s) not used at ${connector.path}/main.sol` + ); + } + } else { + warnings.push(`missing events file for ${connector.path}/main.sol`); + } + return { eventsErrors: errors, eventsWarnings: warnings }; + } catch (error) { + return Promise.reject(error); + } +}; + +const getCommments = async (strs: string | any[]) => { + try { + const comments = []; + let type: string; + for (let index = strs.length - 1; index >= 0; index--) { + const str = strs[index]; + if (!type) { + if (str.trim().startsWith("//")) { + type = "single"; + } else if (str.trim().startsWith("*/")) { + type = "multiple"; + } + } + if (type === "single" && str.trim().startsWith("//")) { + comments.push(str.replace(/[/]/gi, "").trim()); + } else if ( + type === "multiple" && + !str.trim().startsWith("/**") && + !str.trim().startsWith("*/") + ) { + comments.push(str.replace(/[*]/gi, "").trim()); + } else if (type === "single" && !str.trim().startsWith("//")) { + break; + } else if (type === "multiple" && str.trim().startsWith("/**")) { + break; + } + } + return comments; + } catch (error) { + return Promise.reject(error); + } +}; + +const parseCode = async (connector: { path: any; code?: any }) => { + try { + const strs = connector.code.split("\n"); + const events = []; + const eventsFirstLines = []; + let func = []; + let funcs = []; + let event = []; + let mainEvents = []; + let firstLine: number; + let mainEventsLines = []; + for (let index = 0; index < strs.length; index++) { + const str = strs[index]; + if (str.includes("function") && !str.trim().startsWith("//")) { + func = [str]; + firstLine = index + 1; + } else if (func.length && !str.trim().startsWith("//")) { + func.push(str); + } + if (func.length && str.startsWith(`${func[0].split("function")[0]}}`)) { + funcs.push({ + raw: func.map((str) => str.trim()).join(" "), + comments: await getCommments(strs.slice(0, firstLine)), + firstLine, + }); + func = []; + } + } + const allPublicFuncs = funcs + .filter(({ raw }) => { + return raw.includes("external") || raw.includes("public"); + }) + .map((f) => { + const name = f.raw + .split("(")[0] + .split("function")[1] + .trim(); + return { + ...f, + name, + }; + }); + funcs = allPublicFuncs + .filter(({ raw }) => { + if (raw.includes("returns")) { + const returns = raw + .split("returns")[1] + .split("(")[1] + .split(")")[0]; + return returns.includes("string") && returns.includes("bytes"); + } + return false; + }) + .map((f) => { + const args = f.raw + .split("(")[1] + .split(")")[0] + .split(",") + .map((arg) => arg.trim()) + .filter((arg) => arg !== ""); + return { + ...f, + args, + }; + }); + const eventsPath = `${connector.path}/events.sol`; + if (fs.existsSync(eventsPath)) { + mainEvents = funcs + .map( + ({ raw }) => + raw + .split("_eventName")[2] + .trim() + .split('"')[1] + ) + .filter((raw) => !!raw); + mainEventsLines = mainEvents.map( + (me) => strs.findIndex((str: string | any[]) => str.includes(me)) + 1 + ); + const eventsCode = fs.readFileSync(eventsPath, { encoding: "utf8" }); + const eventsStrs = eventsCode.split("\n"); + for (let index = 0; index < eventsStrs.length; index++) { + const str = eventsStrs[index]; + if (str.includes("event")) { + event = [str]; + firstLine = index + 1; + } else if (event.length && !str.trim().startsWith("//")) { + event.push(str); + } + if (event.length && str.includes(")")) { + events.push(event.map((str) => str.trim()).join(" ")); + eventsFirstLines.push(firstLine); + event = []; + } + } + } + return { + ...connector, + events, + eventsFirstLines, + mainEvents, + mainEventsLines, + funcs, + allPublicFuncs, + }; + } catch (error) { + return Promise.reject(error); + } +}; + +const checkComments = async (connector) => { + try { + const errors = []; + for (let i1 = 0; i1 < connector.funcs.length; i1++) { + const func = connector.funcs[i1]; + for (let i2 = 0; i2 < func.args.length; i2++) { + const argName = func.args[i2].split(" ").pop(); + if ( + !func.comments.some( + (comment: string) => + comment.startsWith("@param") && comment.split(" ")[1] === argName + ) + ) { + errors.push( + `argument ${argName} has no @param for function ${func.name} at ${connector.path}/main.sol:${func.firstLine}` + ); + } + } + const reqs = ["@dev", "@notice"]; + for (let i3 = 0; i3 < reqs.length; i3++) { + if (!func.comments.some((comment) => comment.startsWith(reqs[i3]))) { + errors.push( + `no ${reqs[i3]} for function ${func.name} at ${connector.path}/main.sol:${func.firstLine}` + ); + } + } + } + return errors; + } catch (error) { + return Promise.reject(error); + } +}; + +const checkPublicFuncs = async (connector: { + path: any; + allPublicFuncs?: any; +}) => { + try { + const errors = []; + for (let i1 = 0; i1 < connector.allPublicFuncs.length; i1++) { + const { raw, firstLine, name } = connector.allPublicFuncs[i1]; + if (!raw.includes("payable")) { + errors.push( + `public function ${name} is not payable at ${connector.path}/main.sol:${firstLine}` + ); + } + } + return errors; + } catch (error) { + return Promise.reject(error); + } +}; + +const checkName = async (connector: { path: any; code?: any }) => { + try { + const strs = connector.code.split("\n"); + let haveName = false; + for (let index = strs.length - 1; index > 0; index--) { + const str = strs[index]; + if ( + str.includes("string") && + str.includes("public") && + str.includes("name = ") + ) { + haveName = true; + } + } + return haveName + ? [] + : [`name variable missing in ${connector.path}/main.sol`]; + } catch (error) { + return Promise.reject(error); + } +}; + +const checkHeadComments = async (connector: { path: any; code?: any }) => { + try { + const errors = []; + const strs = connector.code.split("\n"); + let haveTitle = false; + let haveDev = false; + for (let index = 0; index < strs.length; index++) { + if (!strs[index].includes("{")) { + if (strs[index].includes("@title")) haveTitle = true; + if (strs[index].includes("@dev")) haveDev = true; + } else { + break; + } + } + if (!haveTitle) errors.push(`@title missing in ${connector.path}/main.sol`); + if (!haveDev) errors.push(`@dev missing in ${connector.path}/main.sol`); + return errors; + } catch (error) { + return Promise.reject(error); + } +}; + +async function checkMain() { + try { + const connectorsRootsDirsDefault = ["mainnet", "polygon"].map( + (v) => `contracts/${v}/connectors` + ); + const customPathArg = process.argv.find((a) => a.startsWith("connector=")); + const connectorsRootsDirs = customPathArg + ? [customPathArg.slice(10)] + : connectorsRootsDirsDefault; + const errors = []; + const warnings = []; + const connectors = await getConnectorsList(connectorsRootsDirs); + for (let index = 0; index < connectors.length; index++) { + const { forbiddenErrors, code } = await checkForbidden( + connectors[index].path + ); + connectors[index].code = code; + connectors[index] = await parseCode(connectors[index]); + const { eventsErrors, eventsWarnings } = await checkEvents( + connectors[index] + ); + const commentsErrors = await checkComments(connectors[index]); + const nameErrors = await checkName(connectors[index]); + const headCommentsErrors = await checkHeadComments(connectors[index]); + const publicFuncsErrors = await checkPublicFuncs(connectors[index]); + + errors.push(...forbiddenErrors); + errors.push(...eventsErrors); + errors.push(...commentsErrors); + errors.push(...nameErrors); + errors.push(...headCommentsErrors); + errors.push(...publicFuncsErrors); + warnings.push(...eventsWarnings); + } + if (errors.length) { + console.log("\x1b[31m%s\x1b[0m", `Total errors: ${errors.length}`); + errors.forEach((error) => console.log("\x1b[31m%s\x1b[0m", error)); + } else { + console.log("\x1b[32m%s\x1b[0m", "No Errors Found"); + } + if (warnings.length) { + console.log("\x1b[33m%s\x1b[0m", `Total warnings: ${warnings.length}`); + warnings.forEach((warning) => console.log("\x1b[33m%s\x1b[0m", warning)); + } else { + console.log("\x1b[32m%s\x1b[0m", "No Warnings Found"); + } + if (errors.length) return Promise.reject(errors.join("\n")); + } catch (error) { + console.error("check execution error:", error); + } +} +export default checkMain; diff --git a/status-checks/checks.js b/status-checks/checks.js deleted file mode 100644 index 871dccf0..00000000 --- a/status-checks/checks.js +++ /dev/null @@ -1,13 +0,0 @@ -const checkMain = require('./check') - -module.exports = [{ - name: 'Solidity check', - callback: async () => { - try { - await checkMain() - return 'Check passed!' - } catch (error) { - throw new Error('Check failed!') - } - } -}] diff --git a/status-checks/checks.ts b/status-checks/checks.ts new file mode 100644 index 00000000..be76ccdf --- /dev/null +++ b/status-checks/checks.ts @@ -0,0 +1,15 @@ +import checkMain from "./check"; + +module.exports = [ + { + name: "Solidity check", + callback: async () => { + try { + await checkMain(); + return "Check passed!"; + } catch (error) { + throw new Error("Check failed!"); + } + }, + }, +]; diff --git a/status-checks/huskyCheck.js b/status-checks/huskyCheck.js deleted file mode 100644 index c7f7fc82..00000000 --- a/status-checks/huskyCheck.js +++ /dev/null @@ -1,9 +0,0 @@ -const checkMain = require('./check'); - -(async function runHusky () { - try { - await checkMain() - } catch (error) { - process.exit(1) - } -})() diff --git a/status-checks/huskyCheck.ts b/status-checks/huskyCheck.ts new file mode 100644 index 00000000..658a76d9 --- /dev/null +++ b/status-checks/huskyCheck.ts @@ -0,0 +1,9 @@ +import checkMain from "./check"; + +(async function runHusky() { + try { + await checkMain(); + } catch (error) { + process.exit(1); + } +})(); diff --git a/status-checks/index.js b/status-checks/index.js deleted file mode 100644 index 0be169ba..00000000 --- a/status-checks/index.js +++ /dev/null @@ -1,54 +0,0 @@ -const cp = require('child_process') -const fetch = require('node-fetch') - -const checks = require('./checks') - -const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/') - -function getCurrentCommitSha () { - return cp - .execSync('git rev-parse HEAD') - .toString() - .trim() -} -// The SHA provied by GITHUB_SHA is the merge (PR) commit. -// We need to get the current commit sha ourself. -const sha = getCurrentCommitSha() - -async function setStatus (context, state, description) { - return fetch(`https://api.github.com/repos/${owner}/${repo}/statuses/${sha}`, { - method: 'POST', - body: JSON.stringify({ - state, - description, - context - }), - headers: { - Authorization: `Bearer ${process.env.GITHUB_TOKEN}`, - 'Content-Type': 'application/json' - } - }) -} - -(async () => { - console.log(`Starting status checks for commit ${sha}`) - - // Run in parallel - await Promise.all( - checks.map(async check => { - const { name, callback } = check - - await setStatus(name, 'pending', 'Running check..') - - try { - const response = await callback() - await setStatus(name, 'success', response) - } catch (err) { - const message = err ? err.message : 'Something went wrong' - await setStatus(name, 'failure', message) - } - }) - ) - - console.log('Finished status checks') -})() diff --git a/status-checks/index.ts b/status-checks/index.ts new file mode 100644 index 00000000..738beed3 --- /dev/null +++ b/status-checks/index.ts @@ -0,0 +1,57 @@ +import * as cp from "child_process"; +import fetch from "node-fetch"; + +import checkMain from "./check"; + +const [owner, repo] = process.env.GITHUB_REPOSITORY.split("/"); + +function getCurrentCommitSha() { + return cp + .execSync("git rev-parse HEAD") + .toString() + .trim(); +} +// The SHA provied by GITHUB_SHA is the merge (PR) commit. +// We need to get the current commit sha ourself. +const sha = getCurrentCommitSha(); + +async function setStatus(context, state, description) { + return fetch( + `https://api.github.com/repos/${owner}/${repo}/statuses/${sha}`, + { + method: "POST", + body: JSON.stringify({ + state, + description, + context, + }), + headers: { + Authorization: `Bearer ${process.env.GITHUB_TOKEN}`, + "Content-Type": "application/json", + }, + } + ); +} + +(async () => { + console.log(`Starting status checks for commit ${sha}`); + + // Run in parallel + await Promise.all( + checks.map(async (check) => { + const { name, callback } = check; + + await setStatus(name, "pending", "Running check.."); + + try { + const response = await callback(); + await setStatus(name, "success", response); + } catch (err) { + const message = err ? err.message : "Something went wrong"; + await setStatus(name, "failure", message); + } + }) + ); + + console.log("Finished status checks"); +})();