add spark import permit

This commit is contained in:
carlos53093 2024-02-12 14:26:07 -05:00
parent f3f965890d
commit 7f1cc0c919
4 changed files with 729 additions and 0 deletions

View File

@ -0,0 +1,25 @@
//SPDX-License-Identifier: MIT
pragma solidity ^0.7.0;
pragma experimental ABIEncoderV2;
contract Events {
event LogSparkImportWithPermit(
address indexed user,
address[] stokens,
string[] supplyIds,
string[] borrowIds,
uint256[] flashLoanFees,
uint256[] supplyAmts,
uint256[] borrowAmts
);
event LogSparkImportWithPermitAndCollateral(
address indexed user,
address[] stokens,
string[] supplyIds,
string[] borrowIds,
uint256[] flashLoanFees,
uint256[] supplyAmts,
uint256[] borrowAmts,
bool[] enableCollateral
);
}

View File

@ -0,0 +1,344 @@
//SPDX-License-Identifier: MIT
pragma solidity ^0.7.0;
import { DSMath } from "../../common/math.sol";
import { Basic } from "../../common/basic.sol";
import { TokenInterface, AccountInterface } from "../../common/interfaces.sol";
import { SparkInterface, SparkPoolProviderInterface, SparkDataProviderInterface } from "./interface.sol";
import "./events.sol";
import "./interface.sol";
abstract contract Helper is DSMath, Basic {
/**
* @dev Spark referal code
*/
uint16 internal constant referalCode = 0;
/**
* @dev Spark Lending Pool Provider
*/
SparkPoolProviderInterface internal constant sparkProvider =
SparkPoolProviderInterface(0x02C3eA4e34C0cBd694D2adFa2c690EECbC1793eE);
/**
* @dev Spark Protocol Data Provider
*/
SparkDataProviderInterface internal constant sparkData =
SparkDataProviderInterface(0xFc21d6d146E6086B8359705C8b28512a983db0cb);
function getIsColl(address token, address user)
internal
view
returns (bool isCol)
{
(, , , , , , , , isCol) = sparkData.getUserReserveData(token, user);
}
struct ImportData {
address[] _supplyTokens;
address[] _borrowTokens;
STokenInterface[] sTokens;
uint256[] supplyAmts;
uint256[] variableBorrowAmts;
uint256[] variableBorrowAmtsWithFee;
uint256[] stableBorrowAmts;
uint256[] stableBorrowAmtsWithFee;
uint256[] totalBorrowAmts;
uint256[] totalBorrowAmtsWithFee;
bool convertStable;
}
struct ImportInputData {
address[] supplyTokens;
address[] borrowTokens;
bool convertStable;
uint256[] flashLoanFees;
}
struct SignedPermits {
uint8[] v;
bytes32[] r;
bytes32[] s;
uint256[] expiry;
}
}
contract SparkHelpers is Helper {
function getBorrowAmount(address _token, address userAccount)
internal
view
returns (uint256 stableBorrow, uint256 variableBorrow)
{
(
,
address stableDebtTokenAddress,
address variableDebtTokenAddress
) = sparkData.getReserveTokensAddresses(_token);
stableBorrow = STokenInterface(stableDebtTokenAddress).balanceOf(
userAccount
);
variableBorrow = STokenInterface(variableDebtTokenAddress).balanceOf(
userAccount
);
}
function getBorrowAmounts(
address userAccount,
SparkInterface spark,
ImportInputData memory inputData,
ImportData memory data
) internal returns (ImportData memory) {
if (inputData.borrowTokens.length > 0) {
data._borrowTokens = new address[](inputData.borrowTokens.length);
data.variableBorrowAmts = new uint256[](
inputData.borrowTokens.length
);
data.variableBorrowAmtsWithFee = new uint256[](
inputData.borrowTokens.length
);
data.stableBorrowAmts = new uint256[](
inputData.borrowTokens.length
);
data.stableBorrowAmtsWithFee = new uint256[](
inputData.borrowTokens.length
);
data.totalBorrowAmts = new uint256[](inputData.borrowTokens.length);
data.totalBorrowAmtsWithFee = new uint256[](
inputData.borrowTokens.length
);
for (uint256 i = 0; i < inputData.borrowTokens.length; i++) {
for (uint256 j = i; j < inputData.borrowTokens.length; j++) {
if (j != i) {
require(
inputData.borrowTokens[i] !=
inputData.borrowTokens[j],
"token-repeated"
);
}
}
}
for (uint256 i = 0; i < inputData.borrowTokens.length; i++) {
address _token = inputData.borrowTokens[i] == ethAddr
? wethAddr
: inputData.borrowTokens[i];
data._borrowTokens[i] = _token;
(
data.stableBorrowAmts[i],
data.variableBorrowAmts[i]
) = getBorrowAmount(_token, userAccount);
if (data.variableBorrowAmts[i] != 0) {
data.variableBorrowAmtsWithFee[i] = add(
data.variableBorrowAmts[i],
inputData.flashLoanFees[i]
);
data.stableBorrowAmtsWithFee[i] = data.stableBorrowAmts[i];
} else {
data.stableBorrowAmtsWithFee[i] = add(
data.stableBorrowAmts[i],
inputData.flashLoanFees[i]
);
}
data.totalBorrowAmts[i] = add(
data.stableBorrowAmts[i],
data.variableBorrowAmts[i]
);
data.totalBorrowAmtsWithFee[i] = add(
data.stableBorrowAmtsWithFee[i],
data.variableBorrowAmtsWithFee[i]
);
if (data.totalBorrowAmts[i] > 0) {
uint256 _amt = data.totalBorrowAmts[i];
TokenInterface(_token).approve(address(spark), _amt);
}
}
}
return data;
}
function getSupplyAmounts(
address userAccount,
ImportInputData memory inputData,
ImportData memory data
) internal view returns (ImportData memory) {
data.supplyAmts = new uint256[](inputData.supplyTokens.length);
data._supplyTokens = new address[](inputData.supplyTokens.length);
data.sTokens = new STokenInterface[](inputData.supplyTokens.length);
for (uint256 i = 0; i < inputData.supplyTokens.length; i++) {
for (uint256 j = i; j < inputData.supplyTokens.length; j++) {
if (j != i) {
require(
inputData.supplyTokens[i] != inputData.supplyTokens[j],
"token-repeated"
);
}
}
}
for (uint256 i = 0; i < inputData.supplyTokens.length; i++) {
address _token = inputData.supplyTokens[i] == ethAddr
? wethAddr
: inputData.supplyTokens[i];
(address _sToken, , ) = sparkData.getReserveTokensAddresses(_token);
data._supplyTokens[i] = _token;
data.sTokens[i] = STokenInterface(_sToken);
data.supplyAmts[i] = data.sTokens[i].balanceOf(userAccount);
}
return data;
}
function _paybackBehalfOne(
SparkInterface spark,
address token,
uint256 amt,
uint256 rateMode,
address user
) private {
spark.repay(token, amt, rateMode, user);
}
function _PaybackStable(
uint256 _length,
SparkInterface spark,
address[] memory tokens,
uint256[] memory amts,
address user
) internal {
for (uint256 i = 0; i < _length; i++) {
if (amts[i] > 0) {
_paybackBehalfOne(spark, tokens[i], amts[i], 1, user);
}
}
}
function _PaybackVariable(
uint256 _length,
SparkInterface spark,
address[] memory tokens,
uint256[] memory amts,
address user
) internal {
for (uint256 i = 0; i < _length; i++) {
if (amts[i] > 0) {
_paybackBehalfOne(spark, tokens[i], amts[i], 2, user);
}
}
}
function _PermitSTokens(
address userAccount,
STokenInterface[] memory sTokenContracts,
address[] memory tokens,
uint8[] memory v,
bytes32[] memory r,
bytes32[] memory s,
uint256[] memory expiry
) internal {
for (uint256 i = 0; i < tokens.length; i++) {
sTokenContracts[i].permit(
userAccount,
address(this),
uint256(-1),
expiry[i],
v[i],
r[i],
s[i]
);
}
}
function _TransferStokens(
uint256 _length,
SparkInterface spark,
STokenInterface[] memory stokenContracts,
uint256[] memory amts,
address[] memory tokens,
address userAccount
) internal {
for (uint256 i = 0; i < _length; i++) {
if (amts[i] > 0) {
uint256 _amt = amts[i];
require(
stokenContracts[i].transferFrom(
userAccount,
address(this),
_amt
),
"allowance?"
);
if (!getIsColl(tokens[i], address(this))) {
spark.setUserUseReserveAsCollateral(tokens[i], true);
}
}
}
}
function _TransferStokensWithCollateral(
uint256 _length,
SparkInterface spark,
STokenInterface[] memory stokenContracts,
uint256[] memory amts,
address[] memory tokens,
bool[] memory colEnable,
address userAccount
) internal {
for (uint256 i = 0; i < _length; i++) {
if (amts[i] > 0) {
uint256 _amt = amts[i];
require(
stokenContracts[i].transferFrom(
userAccount,
address(this),
_amt
),
"allowance?"
);
if (!getIsColl(tokens[i], address(this))) {
spark.setUserUseReserveAsCollateral(tokens[i], colEnable[i]);
}
}
}
}
function _BorrowVariable(
uint256 _length,
SparkInterface spark,
address[] memory tokens,
uint256[] memory amts
) internal {
for (uint256 i = 0; i < _length; i++) {
if (amts[i] > 0) {
_borrowOne(spark, tokens[i], amts[i], 2);
}
}
}
function _BorrowStable(
uint256 _length,
SparkInterface spark,
address[] memory tokens,
uint256[] memory amts
) internal {
for (uint256 i = 0; i < _length; i++) {
if (amts[i] > 0) {
_borrowOne(spark, tokens[i], amts[i], 1);
}
}
}
function _borrowOne(
SparkInterface spark,
address token,
uint256 amt,
uint256 rateMode
) private {
spark.borrow(token, amt, rateMode, referalCode, address(this));
}
}

View File

@ -0,0 +1,105 @@
//SPDX-License-Identifier: MIT
pragma solidity ^0.7.0;
interface SparkInterface {
function supply(
address asset,
uint256 amount,
address onBehalfOf,
uint16 referralCode
) external;
function withdraw(
address _asset,
uint256 _amount,
address _to
) external;
function borrow(
address _asset,
uint256 _amount,
uint256 _interestRateMode,
uint16 _referralCode,
address _onBehalfOf
) external;
function repay(
address _asset,
uint256 _amount,
uint256 _rateMode,
address _onBehalfOf
) external;
function setUserUseReserveAsCollateral(
address _asset,
bool _useAsCollateral
) external;
function swapBorrowRateMode(address _asset, uint256 _rateMode) external;
}
interface STokenInterface {
function scaledBalanceOf(address _user) external view returns (uint256);
function isTransferAllowed(address _user, uint256 _amount)
external
view
returns (bool);
function balanceOf(address _user) external view returns (uint256);
function permit(
address owner,
address spender,
uint256 value,
uint256 deadline,
uint8 v,
bytes32 r,
bytes32 s
) external;
function transferFrom(
address,
address,
uint256
) external returns (bool);
function allowance(address, address) external returns (uint256);
}
interface SparkPoolProviderInterface {
function getPool() external view returns (address);
}
interface SparkDataProviderInterface {
function getReserveTokensAddresses(address _asset)
external
view
returns (
address sTokenAddress,
address stableDebtTokenAddress,
address variableDebtTokenAddress
);
function getUserReserveData(address _asset, address _user)
external
view
returns (
uint256 currentSTokenBalance,
uint256 currentStableDebt,
uint256 currentVariableDebt,
uint256 principalStableDebt,
uint256 scaledVariableDebt,
uint256 stableBorrowRate,
uint256 liquidityRate,
uint40 stableRateLastUpdated,
bool usageAsCollateralEnabled
);
}
interface SparkAddressProviderRegistryInterface {
function getAddressesProvidersList()
external
view
returns (address[] memory);
}

View File

@ -0,0 +1,255 @@
//SPDX-License-Identifier: MIT
pragma solidity ^0.7.0;
pragma experimental ABIEncoderV2;
/**
* @title Spark import connector .
* @dev Import EOA's spark position to DSA's spark position
*/
import { TokenInterface, AccountInterface } from "../../common/interfaces.sol";
import { SparkInterface, STokenInterface } from "./interface.sol";
import "./helpers.sol";
import "./events.sol";
contract SparkImportPermitResolver is SparkHelpers {
function _importSpark(
address userAccount,
ImportInputData memory inputData,
SignedPermits memory permitData
) internal returns (string memory _eventName, bytes memory _eventParam) {
require(
AccountInterface(address(this)).isAuth(userAccount),
"user-account-not-auth"
);
require(inputData.supplyTokens.length > 0, "0-length-not-allowed");
ImportData memory data;
SparkInterface spark = SparkInterface(sparkProvider.getPool());
data = getBorrowAmounts(userAccount, spark, inputData, data);
data = getSupplyAmounts(userAccount, inputData, data);
// payback borrowed amount;
_PaybackStable(
data._borrowTokens.length,
spark,
data._borrowTokens,
data.stableBorrowAmts,
userAccount
);
_PaybackVariable(
data._borrowTokens.length,
spark,
data._borrowTokens,
data.variableBorrowAmts,
userAccount
);
//permit this address to transfer sTokens
_PermitSTokens(
userAccount,
data.sTokens,
data._supplyTokens,
permitData.v,
permitData.r,
permitData.s,
permitData.expiry
);
// transfer stokens to this address;
_TransferStokens(
data._supplyTokens.length,
spark,
data.sTokens,
data.supplyAmts,
data._supplyTokens,
userAccount
);
// borrow assets after migrating position
if (data.convertStable) {
_BorrowVariable(
data._borrowTokens.length,
spark,
data._borrowTokens,
data.totalBorrowAmtsWithFee
);
} else {
_BorrowStable(
data._borrowTokens.length,
spark,
data._borrowTokens,
data.stableBorrowAmtsWithFee
);
_BorrowVariable(
data._borrowTokens.length,
spark,
data._borrowTokens,
data.variableBorrowAmtsWithFee
);
}
_eventName = "LogSparkImportWithPermit(address,bool,address[],address[],uint256[],uint256[],uint256[],uint256[])";
_eventParam = abi.encode(
userAccount,
inputData.convertStable,
inputData.supplyTokens,
inputData.borrowTokens,
inputData.flashLoanFees,
data.supplyAmts,
data.stableBorrowAmts,
data.variableBorrowAmts
);
}
function _importSparkWithCollateral(
address userAccount,
ImportInputData memory inputData,
SignedPermits memory permitData,
bool[] memory enableCollateral
) internal returns (string memory _eventName, bytes memory _eventParam) {
require(
AccountInterface(address(this)).isAuth(userAccount),
"user-account-not-auth"
);
require(inputData.supplyTokens.length > 0, "0-length-not-allowed");
require(enableCollateral.length == inputData.supplyTokens.length, "supplytokens-enableCol-len-not-same");
ImportData memory data;
SparkInterface spark = SparkInterface(sparkProvider.getPool());
data = getBorrowAmounts(userAccount, spark, inputData, data);
data = getSupplyAmounts(userAccount, inputData, data);
// payback borrowed amount;
_PaybackStable(
data._borrowTokens.length,
spark,
data._borrowTokens,
data.stableBorrowAmts,
userAccount
);
_PaybackVariable(
data._borrowTokens.length,
spark,
data._borrowTokens,
data.variableBorrowAmts,
userAccount
);
//permit this address to transfer sTokens
_PermitSTokens(
userAccount,
data.sTokens,
data._supplyTokens,
permitData.v,
permitData.r,
permitData.s,
permitData.expiry
);
// transfer stokens to this address;
_TransferStokensWithCollateral(
data._supplyTokens.length,
spark,
data.sTokens,
data.supplyAmts,
data._supplyTokens,
enableCollateral,
userAccount
);
// borrow assets after migrating position
if (data.convertStable) {
_BorrowVariable(
data._borrowTokens.length,
spark,
data._borrowTokens,
data.totalBorrowAmtsWithFee
);
} else {
_BorrowStable(
data._borrowTokens.length,
spark,
data._borrowTokens,
data.stableBorrowAmtsWithFee
);
_BorrowVariable(
data._borrowTokens.length,
spark,
data._borrowTokens,
data.variableBorrowAmtsWithFee
);
}
_eventName = "LogSparkImportWithPermitAndCollateral(address,bool,address[],address[],uint256[],uint256[],uint256[],uint256[],bool[])";
_eventParam = abi.encode(
userAccount,
inputData.convertStable,
inputData.supplyTokens,
inputData.borrowTokens,
inputData.flashLoanFees,
data.supplyAmts,
data.stableBorrowAmts,
data.variableBorrowAmts,
enableCollateral
);
}
/**
* @dev Import spark position .
* @notice Import EOA's spark position to DSA's spark position
* @param userAccount The address of the EOA from which spark position will be imported
* @param inputData The struct containing all the neccessary input data
* @param permitData The struct containing signed permit data like v,r,s,expiry
*/
function importSpark(
address userAccount,
ImportInputData memory inputData,
SignedPermits memory permitData
)
external
payable
returns (string memory _eventName, bytes memory _eventParam)
{
(_eventName, _eventParam) = _importSpark(
userAccount,
inputData,
permitData
);
}
/**
* @dev Import spark position (with collateral).
* @notice Import EOA's spark position to DSA's spark position
* @param userAccount The address of the EOA from which spark position will be imported
* @param inputData The struct containing all the neccessary input data
* @param permitData The struct containing signed permit data like v,r,s,expiry
* @param enableCollateral The boolean array to enable selected collaterals in the imported position
*/
function importSparkWithCollateral(
address userAccount,
ImportInputData memory inputData,
SignedPermits memory permitData,
bool[] memory enableCollateral
)
external
payable
returns (string memory _eventName, bytes memory _eventParam)
{
(_eventName, _eventParam) = _importSparkWithCollateral(
userAccount,
inputData,
permitData,
enableCollateral
);
}
}
contract ConnectV2SparkImportPermitPolygon is SparkImportPermitResolver {
string public constant name = "Spark-import-permit-v1.0";
}