diff --git a/api/_dexes/uniswap/adapter.ts b/api/_dexes/uniswap/adapter.ts new file mode 100644 index 000000000..3b686ad84 --- /dev/null +++ b/api/_dexes/uniswap/adapter.ts @@ -0,0 +1,252 @@ +import { MixedRouteSDK, Trade as RouterTrade } from "@uniswap/router-sdk"; +import { + Currency, + CurrencyAmount, + Ether, + Token, + TradeType, +} from "@uniswap/sdk-core"; +import { Pair, Route as V2Route } from "@uniswap/v2-sdk"; +import { Pool, Route as V3Route, FeeAmount } from "@uniswap/v3-sdk"; +import { BigNumber } from "ethers"; + +export type TokenInRoute = { + address: string; + chainId: number; + symbol: string; + decimals: string; + name?: string; + buyFeeBps?: string; + sellFeeBps?: string; +}; + +export enum PoolType { + V2Pool = "v2-pool", + V3Pool = "v3-pool", + V4Pool = "v4-pool", +} + +export type V2Reserve = { + token: TokenInRoute; + quotient: string; +}; + +export type V2PoolInRoute = { + type: PoolType.V2Pool; + address?: string; + tokenIn: TokenInRoute; + tokenOut: TokenInRoute; + reserve0: V2Reserve; + reserve1: V2Reserve; + amountIn?: string; + amountOut?: string; +}; + +export type V3PoolInRoute = { + type: PoolType.V3Pool; + address?: string; + tokenIn: TokenInRoute; + tokenOut: TokenInRoute; + sqrtRatioX96: string; + liquidity: string; + tickCurrent: string; + fee: string; + amountIn?: string; + amountOut?: string; +}; + +export type PartialClassicQuote = { + // We need tokenIn/Out to support native currency + tokenIn: string; + tokenOut: string; + tradeType: TradeType; + route: Array<(V3PoolInRoute | V2PoolInRoute)[]>; +}; + +interface RouteResult { + routev3: V3Route | null; + routev2: V2Route | null; + mixedRoute: MixedRouteSDK | null; + inputAmount: CurrencyAmount; + outputAmount: CurrencyAmount; +} + +// Helper class to convert routing-specific quote entities to RouterTrade entities +// the returned RouterTrade can then be used to build the UniswapTrade entity in this package +export class RouterTradeAdapter { + // Generate a RouterTrade using fields from a classic quote response + static fromClassicQuote(quote: PartialClassicQuote) { + const { route } = quote; + + if (!route) throw new Error("Expected route to be present"); + if (!route.length) + throw new Error("Expected there to be at least one route"); + if (route.some((r) => !r.length)) + throw new Error("Expected all routes to have at least one pool"); + const firstRoute = route[0]; + + const tokenInData = firstRoute[0].tokenIn; + const tokenOutData = firstRoute[firstRoute.length - 1].tokenOut; + + if (!tokenInData || !tokenOutData) + throw new Error("Expected both tokenIn and tokenOut to be present"); + if (tokenInData.chainId !== tokenOutData.chainId) + throw new Error("Expected tokenIn and tokenOut to be have same chainId"); + + const parsedCurrencyIn = RouterTradeAdapter.toCurrency(false, tokenInData); + const parsedCurrencyOut = RouterTradeAdapter.toCurrency( + false, + tokenOutData + ); + + const typedRoutes: RouteResult[] = route.map((subRoute) => { + const rawAmountIn = subRoute[0].amountIn; + const rawAmountOut = subRoute[subRoute.length - 1].amountOut; + + if (!rawAmountIn || !rawAmountOut) { + throw new Error( + "Expected both raw amountIn and raw amountOut to be present" + ); + } + + const inputAmount = CurrencyAmount.fromRawAmount( + parsedCurrencyIn, + rawAmountIn + ); + const outputAmount = CurrencyAmount.fromRawAmount( + parsedCurrencyOut, + rawAmountOut + ); + + const isOnlyV2 = RouterTradeAdapter.isVersionedRoute( + PoolType.V2Pool, + subRoute + ); + const isOnlyV3 = RouterTradeAdapter.isVersionedRoute( + PoolType.V3Pool, + subRoute + ); + + return { + routev3: isOnlyV3 + ? new V3Route( + (subRoute as V3PoolInRoute[]).map(RouterTradeAdapter.toPool), + parsedCurrencyIn, + parsedCurrencyOut + ) + : null, + routev2: isOnlyV2 + ? new V2Route( + (subRoute as V2PoolInRoute[]).map(RouterTradeAdapter.toPair), + parsedCurrencyIn, + parsedCurrencyOut + ) + : null, + mixedRoute: + !isOnlyV3 && !isOnlyV2 + ? new MixedRouteSDK( + subRoute.map(RouterTradeAdapter.toPoolOrPair), + parsedCurrencyIn, + parsedCurrencyOut + ) + : null, + inputAmount, + outputAmount, + }; + }); + + return new RouterTrade({ + v2Routes: typedRoutes + .filter((route) => route.routev2) + .map((route) => ({ + routev2: route.routev2 as V2Route, + inputAmount: route.inputAmount, + outputAmount: route.outputAmount, + })), + v3Routes: typedRoutes + .filter((route) => route.routev3) + .map((route) => ({ + routev3: route.routev3 as V3Route, + inputAmount: route.inputAmount, + outputAmount: route.outputAmount, + })), + // TODO: ROUTE-219 - Support v4 trade in universal-router sdk + v4Routes: [], + mixedRoutes: typedRoutes + .filter((route) => route.mixedRoute) + .map((route) => ({ + mixedRoute: route.mixedRoute as MixedRouteSDK, + inputAmount: route.inputAmount, + outputAmount: route.outputAmount, + })), + tradeType: quote.tradeType, + }); + } + + private static toCurrency(isNative: boolean, token: TokenInRoute): Currency { + if (isNative) { + return Ether.onChain(token.chainId); + } + return this.toToken(token); + } + + private static toPoolOrPair = ( + pool: V3PoolInRoute | V2PoolInRoute + ): Pool | Pair => { + return pool.type === PoolType.V3Pool + ? RouterTradeAdapter.toPool(pool) + : RouterTradeAdapter.toPair(pool); + }; + + private static toToken(token: TokenInRoute): Token { + const { chainId, address, decimals, symbol, buyFeeBps, sellFeeBps } = token; + return new Token( + chainId, + address, + parseInt(decimals.toString()), + symbol, + /* name */ undefined, + false, + buyFeeBps ? BigNumber.from(buyFeeBps) : undefined, + sellFeeBps ? BigNumber.from(sellFeeBps) : undefined + ); + } + + private static toPool({ + fee, + sqrtRatioX96, + liquidity, + tickCurrent, + tokenIn, + tokenOut, + }: V3PoolInRoute): Pool { + return new Pool( + RouterTradeAdapter.toToken(tokenIn), + RouterTradeAdapter.toToken(tokenOut), + parseInt(fee) as FeeAmount, + sqrtRatioX96, + liquidity, + parseInt(tickCurrent) + ); + } + + private static toPair = ({ reserve0, reserve1 }: V2PoolInRoute): Pair => { + return new Pair( + CurrencyAmount.fromRawAmount( + RouterTradeAdapter.toToken(reserve0.token), + reserve0.quotient + ), + CurrencyAmount.fromRawAmount( + RouterTradeAdapter.toToken(reserve1.token), + reserve1.quotient + ) + ); + }; + + private static isVersionedRoute( + type: PoolType, + route: (V3PoolInRoute | V2PoolInRoute)[] + ): route is T[] { + return route.every((pool) => pool.type === type); + } +} diff --git a/api/_dexes/uniswap/trading-api.ts b/api/_dexes/uniswap/trading-api.ts new file mode 100644 index 000000000..1a381b307 --- /dev/null +++ b/api/_dexes/uniswap/trading-api.ts @@ -0,0 +1,154 @@ +import { TradeType } from "@uniswap/sdk-core"; +import axios, { AxiosError } from "axios"; + +import { Swap } from "../types"; +import { V2PoolInRoute, V3PoolInRoute } from "./adapter"; + +export type UniswapClassicQuoteFromApi = { + chainId: number; + input: { + amount: string; + token: string; + }; + output: { + amount: string; + token: string; + recipient: string; + }; + swapper: string; + route: Array<(V3PoolInRoute | V2PoolInRoute)[]>; + slippage: number; + tradeType: "EXACT_OUTPUT" | "EXACT_INPUT"; + quoteId: string; +}; + +export type UniswapIndicativeQuoteFromApi = Awaited< + ReturnType +>; + +export type UniswapParamForApi = Omit & { + swapper: string; + slippageTolerance?: number; +}; + +export const UNISWAP_TRADING_API_BASE_URL = + process.env.UNISWAP_TRADING_API_BASE_URL || + "https://trading-api-labs.interface.gateway.uniswap.org/v1"; + +export const UNISWAP_API_KEY = + process.env.UNISWAP_API_KEY || "JoyCGj29tT4pymvhaGciK4r1aIPvqW6W53xT1fwo"; + +/** + * Based on https://uniswap-docs.readme.io/reference/aggregator_quote-1 + */ +export async function getUniswapClassicQuoteFromApi( + swap: UniswapParamForApi, + tradeType: TradeType +) { + const response = await axios.post<{ + requestId: string; + routing: "CLASSIC"; + quote: UniswapClassicQuoteFromApi; + }>( + `${UNISWAP_TRADING_API_BASE_URL}/quote`, + { + type: + tradeType === TradeType.EXACT_INPUT ? "EXACT_INPUT" : "EXACT_OUTPUT", + tokenInChainId: swap.tokenIn.chainId, + tokenOutChainId: swap.tokenOut.chainId, + tokenIn: swap.tokenIn.address, + tokenOut: swap.tokenOut.address, + swapper: swap.swapper, + slippageTolerance: swap.slippageTolerance, + autoSlippage: swap.slippageTolerance ? undefined : "DEFAULT", + amount: swap.amount, + urgency: "urgent", + routingPreference: "CLASSIC", + }, + { + headers: { + "x-api-key": UNISWAP_API_KEY, + }, + } + ); + return response.data; +} + +export async function getUniswapClassicIndicativeQuoteFromApi( + swap: UniswapParamForApi, + tradeType: TradeType, + useFallback: boolean = true +) { + try { + const response = await axios.post<{ + requestId: string; + input: { + amount: string; + chainId: number; + token: string; + }; + output: { + amount: string; + chainId: number; + token: string; + }; + }>( + `${UNISWAP_TRADING_API_BASE_URL}/indicative_quote`, + { + type: + tradeType === TradeType.EXACT_INPUT ? "EXACT_INPUT" : "EXACT_OUTPUT", + amount: swap.amount, + tokenInChainId: swap.tokenIn.chainId, + tokenOutChainId: swap.tokenOut.chainId, + tokenIn: swap.tokenIn.address, + tokenOut: swap.tokenOut.address, + }, + { + headers: { + "x-api-key": UNISWAP_API_KEY, + }, + } + ); + return response.data; + } catch (error) { + if (error instanceof AxiosError && error.response?.status === 404) { + if (useFallback) { + const { quote } = await getUniswapClassicQuoteFromApi(swap, tradeType); + return quote; + } + } + throw error; + } +} + +export async function getUniswapClassicCalldataFromApi( + classicQuote: UniswapClassicQuoteFromApi +) { + const response = await axios.post<{ + requestId: string; + swap: { + to: string; + from: string; + data: string; + value: string; + gasLimit: string; + chainId: number; + maxFeePerGas: string; + maxPriorityFeePerGas: string; + gasPrice: string; + }; + }>( + `${UNISWAP_TRADING_API_BASE_URL}/swap`, + { + quote: classicQuote, + simulateTransaction: false, + urgency: "urgent", + }, + { + headers: { + "x-api-key": UNISWAP_API_KEY, + }, + } + ); + return response.data; +} diff --git a/api/_dexes/uniswap/utils.ts b/api/_dexes/uniswap/utils.ts new file mode 100644 index 000000000..9699a7adc --- /dev/null +++ b/api/_dexes/uniswap/utils.ts @@ -0,0 +1,50 @@ +import { CHAIN_IDs, TOKEN_SYMBOLS_MAP } from "@across-protocol/constants"; +import { Percent } from "@uniswap/sdk-core"; + +import { Token, QuoteFetchStrategy } from "../types"; + +export type UniswapQuoteFetchStrategy = QuoteFetchStrategy; + +// Maps testnet chain IDs to their prod counterparts. Used to get the prod token +// info for testnet tokens. +const TESTNET_TO_PROD = { + [CHAIN_IDs.SEPOLIA]: CHAIN_IDs.MAINNET, + [CHAIN_IDs.BASE_SEPOLIA]: CHAIN_IDs.BASE, + [CHAIN_IDs.OPTIMISM_SEPOLIA]: CHAIN_IDs.OPTIMISM, + [CHAIN_IDs.ARBITRUM_SEPOLIA]: CHAIN_IDs.ARBITRUM, +}; + +export const UNISWAP_TRADING_API_BASE_URL = + process.env.UNISWAP_TRADING_API_BASE_URL || + "https://trading-api-labs.interface.gateway.uniswap.org/v1"; + +export const UNISWAP_API_KEY = + process.env.UNISWAP_API_KEY || "JoyCGj29tT4pymvhaGciK4r1aIPvqW6W53xT1fwo"; + +export function getProdToken(token: Token) { + const prodChainId = TESTNET_TO_PROD[token.chainId] || token.chainId; + + const prodToken = + TOKEN_SYMBOLS_MAP[token.symbol as keyof typeof TOKEN_SYMBOLS_MAP]; + const prodTokenAddress = prodToken?.addresses[prodChainId]; + + if (!prodToken || !prodTokenAddress) { + throw new Error( + `Prod token not found for ${token.symbol} on chain ${token.chainId}` + ); + } + + return { + ...prodToken, + chainId: prodChainId, + address: prodTokenAddress, + }; +} + +export function floatToPercent(value: number) { + return new Percent( + // max. slippage decimals is 2 + Number(value.toFixed(2)) * 100, + 10_000 + ); +} diff --git a/api/_dexes/utils.ts b/api/_dexes/utils.ts index 8ecfef6b8..6fa452b55 100644 --- a/api/_dexes/utils.ts +++ b/api/_dexes/utils.ts @@ -396,6 +396,7 @@ export function getQuoteFetchStrategy( return ( strategies.swapPairs?.[chainId]?.[tokenInSymbol]?.[tokenOutSymbol] ?? strategies.chains?.[chainId] ?? + strategies.default ?? defaultQuoteFetchStrategy ); } diff --git a/api/relay/_queue.ts b/api/relay/_queue.ts new file mode 100644 index 000000000..9ed25c961 --- /dev/null +++ b/api/relay/_queue.ts @@ -0,0 +1,50 @@ +import { Client } from "@upstash/qstash"; + +import { RelayRequest, RelayStrategy, RelayStrategyName } from "./_types"; +import { resolveVercelEndpoint } from "../_utils"; + +const client = new Client({ + token: process.env.QSTASH_TOKEN!, +}); + +export async function pushRelayRequestToQueue({ + request, + strategy, +}: { + request: RelayRequest; + strategy: RelayStrategy; +}) { + const strategyName = strategy.strategyName; + const queue = getRelayRequestQueue(strategyName, request.chainId); + await queue.upsert({ + parallelism: strategy.queueParallelism, + }); + + const baseUrl = resolveVercelEndpoint(true); + const response = await queue.enqueueJSON({ + retries: 3, + contentBasedDeduplication: true, + headers: new Headers({ + "Retry-After": "10", + }), + url: `${baseUrl}/api/relay/jobs/process`, + body: { + request, + strategyName, + }, + }); + return response; +} + +function getRelayRequestQueue( + strategyName: RelayStrategyName, + chainId: number +) { + const queueName = + strategyName === "gelato" + ? `relay-request-queue-gelato` + : `relay-request-queue-${strategyName}-${chainId}`; + return client.queue({ + queueName, + }); +} diff --git a/api/relay/_strategies/gelato.ts b/api/relay/_strategies/gelato.ts new file mode 100644 index 000000000..9ce8572f6 --- /dev/null +++ b/api/relay/_strategies/gelato.ts @@ -0,0 +1,120 @@ +import axios from "axios"; +import { RelayRequest, RelayStrategy } from "../_types"; +import { encodeCalldataForRelayRequest } from "../_utils"; + +const GELATO_API_KEY = process.env.GELATO_API_KEY; + +export function getGelatoStrategy(): RelayStrategy { + return { + strategyName: "gelato", + queueParallelism: 1, + relay: async (request: RelayRequest) => { + const encodedCalldata = encodeCalldataForRelayRequest(request); + + const taskId = await relayWithGelatoApi({ + chainId: request.chainId, + target: request.to, + data: encodedCalldata, + }); + + let txHash: string | undefined; + + while (true) { + const taskStatus = await getGelatoTaskStatus(taskId); + + if ( + ["Cancelled", "NotFound", "ExecReverted", "Blacklisted"].includes( + taskStatus.taskState + ) + ) { + throw new GelatoTaskStatusError(taskStatus); + } + + if (taskStatus.transactionHash) { + txHash = taskStatus.transactionHash; + break; + } + + await new Promise((resolve) => setTimeout(resolve, 1_000)); + } + + return txHash; + }, + }; +} + +const gelatoBaseUrl = "https://api.gelato.digital"; + +async function relayWithGelatoApi({ + chainId, + target, + data, +}: { + chainId: number; + target: string; + data: string; +}) { + if (!GELATO_API_KEY) { + throw new Error("Can not call Gelato API: key is not set"); + } + + const response = await axios.post( + `${gelatoBaseUrl}/relays/v2/sponsored-call`, + { + chainId, + target, + data, + sponsorApiKey: GELATO_API_KEY, + } + ); + + return response.data.taskId as string; +} + +type TaskStatus = { + taskState: + | "CheckPending" + | "ExecPending" + | "ExecSuccess" + | "ExecReverted" + | "WaitingForConfirmation" + | "Blacklisted" + | "Cancelled" + | "NotFound"; + chainId: number; + taskId: string; + creationDate: string; + lastCheckDate?: string; + lastCheckMessage?: string; + transactionHash?: string; + blockNumber?: number; + executionDate?: string; + gasUsed?: string; + effectiveGasPrice?: string; +}; + +async function getGelatoTaskStatus(taskId: string) { + const response = await axios.get<{ task: TaskStatus }>( + `${gelatoBaseUrl}/tasks/status/${taskId}` + ); + return response.data.task; +} + +class GelatoTaskStatusError extends Error { + taskStatus: TaskStatus; + + constructor(taskStatus: TaskStatus) { + super( + `Can not relay request via Gelato due to task state ${taskStatus.taskState}` + ); + this.taskStatus = taskStatus; + } + + toJSON() { + return { + name: this.name, + message: this.message, + taskStatus: this.taskStatus, + }; + } +} diff --git a/api/relay/_strategies/index.ts b/api/relay/_strategies/index.ts new file mode 100644 index 000000000..9b621c0eb --- /dev/null +++ b/api/relay/_strategies/index.ts @@ -0,0 +1,11 @@ +import { RelayStrategy, RelayStrategyName } from "../_types"; +import { getGelatoStrategy } from "./gelato"; +import { getLocalSignersStrategy } from "./local-signers"; + +const gelatoStrategy = getGelatoStrategy(); +const localSignersStrategy = getLocalSignersStrategy(); + +export const strategiesByName = { + [gelatoStrategy.strategyName]: gelatoStrategy, + [localSignersStrategy.strategyName]: localSignersStrategy, +} as Record; diff --git a/api/relay/_strategies/local-signers.ts b/api/relay/_strategies/local-signers.ts new file mode 100644 index 000000000..d895f1b82 --- /dev/null +++ b/api/relay/_strategies/local-signers.ts @@ -0,0 +1,92 @@ +import { Wallet, utils } from "ethers"; + +import { RelayRequest, RelayStrategy } from "../_types"; +import { encodeCalldataForRelayRequest } from "../_utils"; +import { redisCache } from "../../_cache"; +import { getProvider } from "../../_utils"; + +const localSignerPrivateKeys = + process.env.LOCAL_SIGNER_PRIVATE_KEYS!.split(","); +const balanceAlertThreshold = utils.parseEther("0.000001"); // TODO: Refine value + +export function getLocalSignersStrategy(): RelayStrategy { + return { + strategyName: "local-signers", + queueParallelism: 1, // TODO: Should be dynamic based on the number of local signers + relay: async (request: RelayRequest) => { + const encodedCalldata = encodeCalldataForRelayRequest(request); + + if (localSignerPrivateKeys.length === 0) { + throw new Error( + "Can not relay tx via local signers: No local signers found" + ); + } + + for (const signerPrivateKey of localSignerPrivateKeys) { + const provider = getProvider(request.chainId); + const wallet = new Wallet(signerPrivateKey, provider); + try { + await lockSigner(wallet.address, request.chainId); + + const balance = await wallet.getBalance(); + if (balance.lt(balanceAlertThreshold)) { + // TODO: Send PD alert + } + + const txRequest = { + chainId: request.chainId, + to: request.to, + data: encodedCalldata, + from: wallet.address, + }; + const tx = await wallet.sendTransaction(txRequest); + const receipt = await tx.wait(); + return receipt.transactionHash; + } catch (error) { + if (error instanceof SignerLockedError) { + continue; + } + throw error; + } finally { + await unlockSigner(wallet.address, request.chainId); + } + } + + throw new Error( + "Can not relay tx via local signers: All local signers are locked" + ); + }, + }; +} + +async function lockSigner(signerAddress: string, chainId: number) { + const lockKey = getLockKey(signerAddress, chainId); + const lockValue = await redisCache.get(lockKey); + + if (lockValue) { + throw new SignerLockedError(signerAddress, chainId); + } + + await redisCache.set(lockKey, "true", 30); +} + +async function unlockSigner(signerAddress: string, chainId: number) { + const lockKey = getLockKey(signerAddress, chainId); + + const lockValue = await redisCache.get(lockKey); + if (!lockValue) { + return; + } + + await redisCache.del(lockKey); +} + +function getLockKey(signerAddress: string, chainId: number) { + return `signer-lock:${signerAddress}:${chainId}`; +} + +class SignerLockedError extends Error { + constructor(signerAddress: string, chainId: number) { + super(`Signer ${signerAddress} on chain ${chainId} is already locked`); + } +} diff --git a/api/relay/_types.ts b/api/relay/_types.ts new file mode 100644 index 000000000..95ea1076c --- /dev/null +++ b/api/relay/_types.ts @@ -0,0 +1,19 @@ +import { validateMethodArgs } from "./_utils"; + +export type RelayStrategyName = "gelato" | "local-signers"; + +export type RelayRequest = { + chainId: number; + to: string; + methodNameAndArgs: ReturnType; + signatures: { + permit: string; // use this for all auth signatures + deposit: string; + }; +}; + +export type RelayStrategy = { + strategyName: RelayStrategyName; + queueParallelism: number; + relay: (request: RelayRequest) => Promise; +}; diff --git a/api/relay/_utils.ts b/api/relay/_utils.ts new file mode 100644 index 000000000..de5465602 --- /dev/null +++ b/api/relay/_utils.ts @@ -0,0 +1,468 @@ +import { assert, Infer, type } from "superstruct"; +import { utils } from "ethers"; + +import { bytes32, hexString, positiveIntStr, validAddress } from "../_utils"; +import { getPermitTypedData } from "../_permit"; +import { InvalidParamError } from "../_errors"; +import { + encodeDepositWithAuthCalldata, + encodeDepositWithPermitCalldata, + encodeSwapAndBridgeWithAuthCalldata, + encodeSwapAndBridgeWithPermitCalldata, + getDepositTypedData, + getSwapAndDepositTypedData, +} from "../_spoke-pool-periphery"; +import { RelayRequest } from "./_types"; +import { redisCache } from "../_cache"; +import { getReceiveWithAuthTypedData } from "../_transfer-with-auth"; + +export const GAS_SPONSOR_ADDRESS = + process.env.GAS_SPONSOR_ADDRESS ?? + "0x0000000000000000000000000000000000000000"; + +const SubmissionFeesSchema = type({ + amount: positiveIntStr(), + recipient: validAddress(), +}); + +const BaseDepositDataSchema = type({ + inputToken: validAddress(), + outputToken: validAddress(), + outputAmount: positiveIntStr(), + depositor: validAddress(), + recipient: validAddress(), + destinationChainId: positiveIntStr(), + exclusiveRelayer: validAddress(), + quoteTimestamp: positiveIntStr(), + fillDeadline: positiveIntStr(), + exclusivityParameter: positiveIntStr(), + message: hexString(), +}); + +const SwapAndDepositDataSchema = type({ + submissionFees: SubmissionFeesSchema, + depositData: BaseDepositDataSchema, + swapToken: validAddress(), + exchange: validAddress(), + transferType: positiveIntStr(), + swapTokenAmount: positiveIntStr(), + minExpectedInputTokenAmount: positiveIntStr(), + routerCalldata: hexString(), +}); + +const DepositDataSchema = type({ + submissionFees: SubmissionFeesSchema, + baseDepositData: BaseDepositDataSchema, + inputAmount: positiveIntStr(), +}); + +export const DepositWithPermitArgsSchema = type({ + signatureOwner: validAddress(), + depositData: DepositDataSchema, + deadline: positiveIntStr(), +}); + +export const SwapAndDepositWithPermitArgsSchema = type({ + signatureOwner: validAddress(), + swapAndDepositData: SwapAndDepositDataSchema, + deadline: positiveIntStr(), +}); + +export const DepositWithAuthArgsSchema = type({ + signatureOwner: validAddress(), + depositData: DepositDataSchema, + validAfter: positiveIntStr(), + validBefore: positiveIntStr(), + nonce: bytes32(), +}); + +export const SwapAndDepositWithAuthArgsSchema = type({ + signatureOwner: validAddress(), + swapAndDepositData: SwapAndDepositDataSchema, + validAfter: positiveIntStr(), + validBefore: positiveIntStr(), + nonce: bytes32(), +}); + +export const allowedMethodNames = [ + "depositWithPermit", + "swapAndBridgeWithPermit", + "depositWithAuthorization", + "swapAndBridgeWithAuthorization", +] as const; + +export function validateMethodArgs( + methodName: (typeof allowedMethodNames)[number], + args: any +) { + if (methodName === "depositWithPermit") { + assert(args, DepositWithPermitArgsSchema); + return { + args: args as Infer, + methodName, + } as const; + } else if (methodName === "swapAndBridgeWithPermit") { + assert(args, SwapAndDepositWithPermitArgsSchema); + return { + args: args as Infer, + methodName, + } as const; + } else if (methodName === "depositWithAuthorization") { + assert(args, DepositWithAuthArgsSchema); + return { + args: args as Infer, + methodName, + } as const; + } else if (methodName === "swapAndBridgeWithAuthorization") { + assert(args, SwapAndDepositWithAuthArgsSchema); + return { + args: args as Infer, + methodName, + } as const; + } + throw new Error(`Invalid method name: ${methodName}`); +} + +export async function verifySignatures({ + methodNameAndArgs, + signatures, + chainId, + to, +}: RelayRequest) { + const { methodName, args } = methodNameAndArgs; + + let signatureOwner: string; + let getPermitTypedDataPromise: + | ReturnType + | undefined; + let getDepositTypedDataPromise: ReturnType< + typeof getDepositTypedData | typeof getSwapAndDepositTypedData + >; + let getReceiveWithAuthTypedDataPromise: + | ReturnType + | undefined; + + if (methodName === "depositWithPermit") { + const { signatureOwner: _signatureOwner, deadline, depositData } = args; + signatureOwner = _signatureOwner; + getPermitTypedDataPromise = getPermitTypedData({ + tokenAddress: depositData.baseDepositData.inputToken, + chainId, + ownerAddress: signatureOwner, + spenderAddress: to, // SpokePoolV3Periphery address + value: depositData.inputAmount, + deadline: Number(deadline), + }); + getDepositTypedDataPromise = getDepositTypedData({ + chainId, + depositData, + }); + } else if (methodName === "swapAndBridgeWithPermit") { + const { + signatureOwner: _signatureOwner, + deadline, + swapAndDepositData, + } = args; + signatureOwner = _signatureOwner; + getPermitTypedDataPromise = getPermitTypedData({ + tokenAddress: swapAndDepositData.swapToken, + chainId, + ownerAddress: signatureOwner, + spenderAddress: to, // SpokePoolV3Periphery address + value: swapAndDepositData.swapTokenAmount, + deadline: Number(deadline), + }); + getDepositTypedDataPromise = getSwapAndDepositTypedData({ + chainId, + swapAndDepositData, + }); + } else if (methodName === "depositWithAuthorization") { + const { + signatureOwner: _signatureOwner, + validAfter, + validBefore, + nonce, + depositData, + } = args; + signatureOwner = _signatureOwner; + getReceiveWithAuthTypedDataPromise = getReceiveWithAuthTypedData({ + tokenAddress: depositData.baseDepositData.inputToken, + chainId, + ownerAddress: signatureOwner, + spenderAddress: to, // SpokePoolV3Periphery address + value: depositData.inputAmount, + validAfter: Number(validAfter), + validBefore: Number(validBefore), + nonce, + }); + getDepositTypedDataPromise = getDepositTypedData({ + chainId, + depositData, + }); + } else if (methodName === "swapAndBridgeWithAuthorization") { + const { + signatureOwner: _signatureOwner, + validAfter, + validBefore, + nonce, + swapAndDepositData, + } = args; + signatureOwner = _signatureOwner; + getReceiveWithAuthTypedDataPromise = getReceiveWithAuthTypedData({ + tokenAddress: swapAndDepositData.swapToken, + chainId, + ownerAddress: signatureOwner, + spenderAddress: to, // SpokePoolV3Periphery address + value: swapAndDepositData.swapTokenAmount, + validAfter: Number(validAfter), + validBefore: Number(validBefore), + nonce, + }); + getDepositTypedDataPromise = getSwapAndDepositTypedData({ + chainId, + swapAndDepositData, + }); + } else { + throw new Error( + `Can not verify signatures for invalid method name: ${methodName}` + ); + } + + if (getPermitTypedDataPromise) { + const [permitTypedData, depositTypedData] = await Promise.all([ + getPermitTypedDataPromise, + getDepositTypedDataPromise, + ]); + + const recoveredPermitSignerAddress = utils.verifyTypedData( + permitTypedData.eip712.domain, + permitTypedData.eip712.types, + permitTypedData.eip712.message, + signatures.permit + ); + + if (recoveredPermitSignerAddress !== signatureOwner) { + throw new InvalidParamError({ + message: "Invalid permit signature", + param: "signatures.permit", + }); + } + + const recoveredDepositSignerAddress = utils.verifyTypedData( + depositTypedData.eip712.domain, + depositTypedData.eip712.types, + depositTypedData.eip712.message, + signatures.deposit + ); + if (recoveredDepositSignerAddress !== signatureOwner) { + throw new InvalidParamError({ + message: "Invalid deposit signature", + param: "signatures.deposit", + }); + } + } else if (getReceiveWithAuthTypedDataPromise) { + const [authTypedData, depositTypedData] = await Promise.all([ + getReceiveWithAuthTypedDataPromise, + getDepositTypedDataPromise, + ]); + + const recoveredAuthSignerAddress = utils.verifyTypedData( + authTypedData.eip712.domain, + authTypedData.eip712.types, + authTypedData.eip712.message, + signatures.permit + ); + + if (recoveredAuthSignerAddress !== signatureOwner) { + throw new InvalidParamError({ + message: "Invalid Authorization signature", + param: "signatures.permit", + }); + } + + const recoveredDepositSignerAddress = utils.verifyTypedData( + depositTypedData.eip712.domain, + depositTypedData.eip712.types, + depositTypedData.eip712.message, + signatures.deposit + ); + + if (recoveredDepositSignerAddress !== signatureOwner) { + throw new InvalidParamError({ + message: "Invalid deposit signature", + param: "signatures.deposit", + }); + } + } +} + +export function encodeCalldataForRelayRequest(request: RelayRequest) { + let encodedCalldata: string; + + if (request.methodNameAndArgs.methodName === "depositWithPermit") { + encodedCalldata = encodeDepositWithPermitCalldata({ + ...request.methodNameAndArgs.args, + deadline: Number(request.methodNameAndArgs.args.deadline), + depositDataSignature: request.signatures.deposit, + permitSignature: request.signatures.permit, + }); + } else if ( + request.methodNameAndArgs.methodName === "swapAndBridgeWithPermit" + ) { + encodedCalldata = encodeSwapAndBridgeWithPermitCalldata({ + ...request.methodNameAndArgs.args, + deadline: Number(request.methodNameAndArgs.args.deadline), + swapAndDepositDataSignature: request.signatures.deposit, + permitSignature: request.signatures.permit, + }); + } else if ( + request.methodNameAndArgs.methodName === "depositWithAuthorization" + ) { + encodedCalldata = encodeDepositWithAuthCalldata({ + ...request.methodNameAndArgs.args, + validAfter: Number(request.methodNameAndArgs.args.validAfter), + validBefore: Number(request.methodNameAndArgs.args.validBefore), + nonce: request.methodNameAndArgs.args.nonce, + receiveWithAuthSignature: request.signatures.permit, + depositDataSignature: request.signatures.deposit, + }); + } else if ( + request.methodNameAndArgs.methodName === "swapAndBridgeWithAuthorization" + ) { + encodedCalldata = encodeSwapAndBridgeWithAuthCalldata({ + ...request.methodNameAndArgs.args, + validAfter: Number(request.methodNameAndArgs.args.validAfter), + validBefore: Number(request.methodNameAndArgs.args.validBefore), + nonce: request.methodNameAndArgs.args.nonce, + receiveWithAuthSignature: request.signatures.permit, + swapAndDepositDataSignature: request.signatures.deposit, + }); + } + // TODO: Add cases for `withPermit2` + else { + throw new Error(`Can not encode calldata for relay request`); + } + return encodedCalldata; +} + +export function getRelayRequestHash(request: RelayRequest) { + return utils.keccak256( + utils.defaultAbiCoder.encode( + ["bytes", "bytes"], + [request.signatures.permit, request.signatures.deposit] + ) + ); +} + +type CachedRelayRequest = + | { + status: "pending"; + request: RelayRequest; + messageId: string; + } + | { + status: "success"; + request: RelayRequest; + txHash: string; + messageId: string; + } + | { + status: "failure"; + request: RelayRequest; + error: Error; + messageId: string; + } + | { + status: "unknown"; + }; +// TODO: Refine value +const cachedRelayRequestTTL = 5 * 60 * 60 * 24; // 5 days + +export async function getCachedRelayRequest( + requestOrHash: RelayRequest | string +): Promise { + const cachedRelayRequest = await redisCache.get( + getRelayRequestCacheKey(requestOrHash) + ); + + if (!cachedRelayRequest) { + return { + status: "unknown", + }; + } + + return cachedRelayRequest; +} + +export async function setCachedRelayRequestPending(params: { + messageId: string; + request: RelayRequest; +}) { + await redisCache.set( + getRelayRequestCacheKey(params.request), + { + status: "pending", + messageId: params.messageId, + request: params.request, + }, + cachedRelayRequestTTL + ); +} + +export async function setCachedRelayRequestFailure(params: { + request: RelayRequest; + error: Error; +}) { + const cachedRelayRequest = await getCachedRelayRequest(params.request); + + if (!cachedRelayRequest || cachedRelayRequest.status === "unknown") { + throw new Error("Request not found in cache"); + } + + if (cachedRelayRequest.status === "success") { + throw new Error( + "Can not set 'failure' status for request that is already successful" + ); + } + + await redisCache.set( + getRelayRequestCacheKey(params.request), + { + status: "failure", + messageId: cachedRelayRequest.messageId, + request: cachedRelayRequest.request, + error: params.error, + }, + cachedRelayRequestTTL + ); +} + +export async function setCachedRelayRequestSuccess(params: { + request: RelayRequest; + txHash: string; +}) { + const cachedRelayRequest = await getCachedRelayRequest(params.request); + + if (!cachedRelayRequest || cachedRelayRequest.status === "unknown") { + throw new Error("Request not found in cache"); + } + + await redisCache.set( + getRelayRequestCacheKey(params.request), + { + status: "success", + messageId: cachedRelayRequest.messageId, + request: cachedRelayRequest.request, + txHash: params.txHash, + }, + cachedRelayRequestTTL + ); +} + +function getRelayRequestCacheKey(requestOrHash: RelayRequest | string) { + const requestHash = + typeof requestOrHash === "string" + ? requestOrHash + : getRelayRequestHash(requestOrHash); + return `relay-request:${requestHash}`; +} diff --git a/api/relay/index.ts b/api/relay/index.ts new file mode 100644 index 000000000..c5d13003b --- /dev/null +++ b/api/relay/index.ts @@ -0,0 +1,92 @@ +import { VercelRequest, VercelResponse } from "@vercel/node"; +import { object, number, assert, enums } from "superstruct"; + +import { handleErrorCondition } from "../_errors"; +import { getLogger, hexString, validAddress } from "../_utils"; +import { + allowedMethodNames, + getRelayRequestHash, + setCachedRelayRequestPending, + validateMethodArgs, + verifySignatures, +} from "./_utils"; +import { strategiesByName } from "./_strategies"; +import { CHAIN_IDs } from "../_constants"; +import { pushRelayRequestToQueue } from "./_queue"; +import { RelayRequest } from "./_types"; + +export const BaseRelayRequestBodySchema = object({ + chainId: number(), + to: validAddress(), + methodName: enums(allowedMethodNames), + argsWithoutSignatures: object(), + signatures: object({ + permit: hexString(), + deposit: hexString(), + }), +}); + +const strategies = { + default: strategiesByName["gelato"], + [CHAIN_IDs.WORLD_CHAIN]: strategiesByName["local-signers"], +}; + +export default async function handler( + request: VercelRequest, + response: VercelResponse +) { + const logger = getLogger(); + logger.debug({ + at: "Relay", + message: "Request body", + body: request.body, + }); + + try { + if (request.method !== "POST") { + return response.status(405).json({ error: "Method not allowed" }); + } + + assert(request.body, BaseRelayRequestBodySchema); + + // Validate method-specific request body + const methodNameAndArgs = validateMethodArgs( + request.body.methodName, + request.body.argsWithoutSignatures + ); + + // Verify signatures + const { signatures, chainId, to } = request.body; + await verifySignatures({ + methodNameAndArgs, + signatures, + chainId, + to, + }); + + // Push request to queue + const strategy = strategies[chainId] ?? strategies.default; + const relayRequest: RelayRequest = { + chainId, + to, + methodNameAndArgs, + signatures, + }; + const queueResponse = await pushRelayRequestToQueue({ + request: relayRequest, + strategy, + }); + + // Store requestId in database + await setCachedRelayRequestPending({ + messageId: queueResponse.messageId, + request: relayRequest, + }); + + response.status(200).json({ + requestHash: getRelayRequestHash(relayRequest), + }); + } catch (error) { + return handleErrorCondition("api/relay", response, logger, error); + } +} diff --git a/api/relay/jobs/process.ts b/api/relay/jobs/process.ts new file mode 100644 index 000000000..0d3973bf8 --- /dev/null +++ b/api/relay/jobs/process.ts @@ -0,0 +1,127 @@ +import { VercelRequest, VercelResponse } from "@vercel/node"; +import { assert, enums, number, object, type } from "superstruct"; +import { Receiver } from "@upstash/qstash"; + +import { handleErrorCondition, InvalidParamError } from "../../_errors"; +import { getLogger, hexString, validAddress } from "../../_utils"; +import { + validateMethodArgs, + verifySignatures, + setCachedRelayRequestSuccess, + setCachedRelayRequestFailure, + getCachedRelayRequest, + allowedMethodNames, +} from "../_utils"; +import { RelayRequest, RelayStrategyName } from "../_types"; +import { strategiesByName } from "../_strategies"; + +const messageReceiver = new Receiver({ + currentSigningKey: process.env.QSTASH_CURRENT_SIGNING_KEY!, + nextSigningKey: process.env.QSTASH_NEXT_SIGNING_KEY!, +}); + +const RelayProcessJobBodySchema = type({ + strategyName: enums(Object.keys(strategiesByName)), + request: type({ + chainId: number(), + to: validAddress(), + methodNameAndArgs: type({ + methodName: enums(allowedMethodNames), + args: object(), + }), + signatures: object({ + permit: hexString(), + deposit: hexString(), + }), + }), +}); + +export default async function handler(req: VercelRequest, res: VercelResponse) { + const logger = getLogger(); + logger.debug({ + at: "Relay/jobs/process", + message: "Request body", + body: req.body, + }); + + try { + if (req.method !== "POST") { + return res.status(405).json({ error: "Method not allowed" }); + } + + // Verify message comes from QSTASH + const isValid = await messageReceiver.verify({ + signature: (req.headers["upstash-signature"] || + req.headers["Upstash-Signature"]) as string, + body: JSON.stringify(req.body), + }); + + if (!isValid) { + return res.status(401).json({ error: "Unauthorized" }); + } + + assert(req.body, RelayProcessJobBodySchema); + const { request, strategyName } = req.body; + + // Validate method-specific request body + const methodNameAndArgs = validateMethodArgs( + request.methodNameAndArgs.methodName, + request.methodNameAndArgs.args + ); + + // Verify user signatures + const { signatures, chainId, to } = request; + await verifySignatures({ + methodNameAndArgs, + signatures, + chainId, + to, + }); + + const strategy = strategiesByName[strategyName as RelayStrategyName]; + const relayRequest: RelayRequest = { + chainId, + to, + methodNameAndArgs, + signatures, + }; + + // Get cached request + const cachedRequest = await getCachedRelayRequest(relayRequest); + + if ( + !cachedRequest || + cachedRequest.status === "unknown" || + cachedRequest.status === "success" + ) { + throw new InvalidParamError({ + param: "request", + message: "Request not found in cache or succeeded already", + }); + } + + const { messageId } = cachedRequest; + + // Handle request via strategy + try { + const txHash = await strategy.relay(relayRequest); + // Store requestId in database + await setCachedRelayRequestSuccess({ + request: relayRequest, + txHash, + }); + return res.status(200).json({ + messageId, + txHash, + }); + } catch (error) { + await setCachedRelayRequestFailure({ + request: relayRequest, + error: error as Error, + }); + throw error; + } + } catch (error) { + return handleErrorCondition("relay/jobs/process", res, logger, error); + } +} diff --git a/api/relay/status.ts b/api/relay/status.ts new file mode 100644 index 000000000..0a029adf4 --- /dev/null +++ b/api/relay/status.ts @@ -0,0 +1,45 @@ +import { VercelResponse } from "@vercel/node"; +import { assert, type, Infer } from "superstruct"; + +import { handleErrorCondition, InputError } from "../_errors"; +import { getLogger, hexString } from "../_utils"; +import { getCachedRelayRequest } from "./_utils"; +import { TypedVercelRequest } from "../_types"; + +const RelayRequestStatusSchema = type({ + requestHash: hexString(), +}); + +type RelayRequestStatusType = Infer; + +export default async function handler( + request: TypedVercelRequest, + response: VercelResponse +) { + const logger = getLogger(); + logger.debug({ + at: "Relay/status", + message: "Request params", + params: request.query, + }); + + try { + assert(request.query, RelayRequestStatusSchema); + + const cachedRelayRequest = await getCachedRelayRequest( + request.query.requestHash + ); + + if (cachedRelayRequest.status === "unknown") { + throw new InputError({ + message: `Request with hash ${request.query.requestHash} is unknown`, + code: "INVALID_PARAM", + param: "requestHash", + }); + } + + response.status(200).json(cachedRelayRequest); + } catch (error) { + return handleErrorCondition("api/relay/status", response, logger, error); + } +} diff --git a/api/swap/auth/_service.ts b/api/swap/auth/_service.ts new file mode 100644 index 000000000..49c3dc142 --- /dev/null +++ b/api/swap/auth/_service.ts @@ -0,0 +1,122 @@ +import { assert, Infer, optional, type } from "superstruct"; +import { BigNumber } from "ethers"; +import * as sdk from "@across-protocol/sdk"; + +import { TypedVercelRequest } from "../../_types"; +import { positiveIntStr } from "../../_utils"; +import { getCrossSwapQuotes } from "../../_dexes/cross-swap-service"; +import { + handleBaseSwapQueryParams, + BaseSwapQueryParams, + buildBaseSwapResponseJson, +} from "../_utils"; +import { getSwapRouter02Strategy } from "../../_dexes/uniswap/swap-router-02"; +import { InvalidParamError } from "../../_errors"; +import { QuoteFetchStrategies } from "../../_dexes/utils"; +import { buildAuthTxPayload } from "./_utils"; +import { GAS_SPONSOR_ADDRESS } from "../../relay/_utils"; +import { getBalance } from "../../_erc20"; + +export const AuthSwapQueryParamsSchema = type({ + authDeadline: optional(positiveIntStr()), +}); + +export type AuthSwapQueryParams = Infer; + +const DEFAULT_AUTH_DEADLINE = sdk.utils.getCurrentTime() + 60 * 60 * 24 * 365; // 1 year + +// For auth-based flows, we have to use the `SpokePoolPeriphery` as an entry point +const quoteFetchStrategies: QuoteFetchStrategies = { + default: getSwapRouter02Strategy("SpokePoolPeriphery"), +}; + +export async function handleAuthSwap( + request: TypedVercelRequest +) { + const { + authDeadline: _authDeadline, + authStart: _authStart, + ...restQuery + } = request.query; + assert( + { + authDeadline: _authDeadline, + }, + AuthSwapQueryParamsSchema + ); + const authDeadline = Number(_authDeadline ?? DEFAULT_AUTH_DEADLINE); + const authStart = Number(_authStart ?? sdk.utils.getCurrentTime()); + + if (authDeadline < Math.floor(Date.now() / 1000)) { + throw new InvalidParamError({ + message: "auth deadline must be a UNIX timestamp (seconds) in the future", + param: "authDeadline", + }); + } + + // `/swap` specific params validation + quote generation + const { + isInputNative, + isOutputNative, + inputToken, + outputToken, + amount, + amountType, + refundOnOrigin, + refundAddress, + recipient, + depositor, + slippageTolerance, + refundToken, + } = await handleBaseSwapQueryParams(restQuery); + + const crossSwapQuotes = await getCrossSwapQuotes( + { + amount, + inputToken, + outputToken, + depositor, + recipient: recipient || depositor, + slippageTolerance: Number(slippageTolerance), + type: amountType, + refundOnOrigin, + refundAddress, + isInputNative, + isOutputNative, + }, + quoteFetchStrategies + ); + // Build tx for auth + const crossSwapTxForAuth = await buildAuthTxPayload({ + crossSwapQuotes, + authDeadline, + authStart, + // FIXME: Calculate proper fees + submissionFees: { + amount: "0", + recipient: GAS_SPONSOR_ADDRESS, + }, + }); + + const balance = await getBalance({ + chainId: inputToken.chainId, + tokenAddress: inputToken.address, + owner: crossSwapQuotes.crossSwap.depositor, + }); + + const responseJson = buildBaseSwapResponseJson({ + inputTokenAddress: inputToken.address, + originChainId: inputToken.chainId, + permitSwapTx: crossSwapTxForAuth, + inputAmount: amount, + bridgeQuote: crossSwapQuotes.bridgeQuote, + originSwapQuote: crossSwapQuotes.originSwapQuote, + destinationSwapQuote: crossSwapQuotes.destinationSwapQuote, + refundToken, + balance, + // Allowance does not matter for auth-based flows + allowance: BigNumber.from(0), + }); + + return responseJson; +} diff --git a/api/swap/auth/_utils.ts b/api/swap/auth/_utils.ts new file mode 100644 index 000000000..81828dd40 --- /dev/null +++ b/api/swap/auth/_utils.ts @@ -0,0 +1,168 @@ +import { + CrossSwapQuotes, + DepositEntryPointContract, + OriginSwapEntryPointContract, +} from "../../_dexes/types"; +import { getReceiveWithAuthTypedData } from "../../_transfer-with-auth"; +import { + getDepositTypedData, + getSwapAndDepositTypedData, +} from "../../_spoke-pool-periphery"; +import { + extractDepositDataStruct, + extractSwapAndDepositDataStruct, +} from "../../_dexes/utils"; +import { BigNumberish, BytesLike, utils } from "ethers"; +import { SpokePoolV3PeripheryInterface } from "../../_typechain/SpokePoolV3Periphery"; + +export type AuthTxPayload = Awaited>; + +export async function buildAuthTxPayload({ + crossSwapQuotes, + authDeadline, + authStart = 0, + submissionFees, +}: { + crossSwapQuotes: CrossSwapQuotes; + authDeadline: number; + authStart?: number; + submissionFees?: { + amount: BigNumberish; + recipient: string; + }; +}) { + const { originSwapQuote, bridgeQuote, crossSwap, contracts } = + crossSwapQuotes; + const originChainId = crossSwap.inputToken.chainId; + const { originSwapEntryPoint, depositEntryPoint, originRouter } = contracts; + + let entryPointContract: + | DepositEntryPointContract + | OriginSwapEntryPointContract; + let getDepositTypedDataPromise: + | ReturnType + | ReturnType; + let methodNameAndArgsWithoutSignatures: + | { + methodName: "depositWithAuthorization"; + argsWithoutSignatures: { + signatureOwner: string; + depositData: SpokePoolV3PeripheryInterface.DepositDataStruct; + validAfter: BigNumberish; + validBefore: BigNumberish; + nonce: BytesLike; + }; + } + | { + methodName: "swapAndBridgeWithAuthorization"; + argsWithoutSignatures: { + signatureOwner: string; + swapAndDepositData: SpokePoolV3PeripheryInterface.SwapAndDepositDataStruct; + validAfter: BigNumberish; + validBefore: BigNumberish; + nonce: BytesLike; + }; + }; + + // random non-sequesntial nonce + const nonce = utils.hexlify(utils.randomBytes(32)); + + const validAfter = authStart; + const validBefore = authDeadline; + + if (originSwapQuote) { + if (!originSwapEntryPoint) { + throw new Error( + `'originSwapEntryPoint' needs to be defined for origin swap quotes` + ); + } + // Only SpokePoolPeriphery supports transfer with auth + if (originSwapEntryPoint.name !== "SpokePoolPeriphery") { + throw new Error( + `Transfer with auth is not supported for origin swap entry point contract '${originSwapEntryPoint.name}'` + ); + } + + if (!originRouter) { + throw new Error( + `'originRouter' needs to be defined for origin swap quotes` + ); + } + const swapAndDepositData = + await extractSwapAndDepositDataStruct(crossSwapQuotes); + entryPointContract = originSwapEntryPoint; + + getDepositTypedDataPromise = getSwapAndDepositTypedData({ + swapAndDepositData: swapAndDepositData, + chainId: originChainId, + }); + methodNameAndArgsWithoutSignatures = { + methodName: "swapAndBridgeWithAuthorization", + argsWithoutSignatures: { + signatureOwner: crossSwap.depositor, + swapAndDepositData, + validAfter, + validBefore, + nonce, + }, + }; + } else { + if (!depositEntryPoint) { + throw new Error( + `'depositEntryPoint' needs to be defined for bridge quotes` + ); + } + + if (depositEntryPoint.name !== "SpokePoolPeriphery") { + throw new Error( + `auth is not supported for deposit entry point contract '${depositEntryPoint.name}'` + ); + } + const depositDataStruct = await extractDepositDataStruct( + crossSwapQuotes, + submissionFees + ); + entryPointContract = depositEntryPoint; + getDepositTypedDataPromise = getDepositTypedData({ + depositData: depositDataStruct, + chainId: originChainId, + }); + methodNameAndArgsWithoutSignatures = { + methodName: "depositWithAuthorization", + argsWithoutSignatures: { + signatureOwner: crossSwap.depositor, + depositData: depositDataStruct, + validAfter, + validBefore, + nonce, + }, + }; + } + + const [authTypedData, depositTypedData] = await Promise.all([ + getReceiveWithAuthTypedData({ + tokenAddress: + originSwapQuote?.tokenIn.address || bridgeQuote.inputToken.address, + chainId: originChainId, + ownerAddress: crossSwap.depositor, + spenderAddress: entryPointContract.address, + value: originSwapQuote?.maximumAmountIn || bridgeQuote.inputAmount, + nonce, + validAfter, + validBefore, + }), + getDepositTypedDataPromise, + ]); + + return { + eip712: { + permit: authTypedData.eip712, + deposit: depositTypedData.eip712, + }, + swapTx: { + chainId: originChainId, + to: entryPointContract.address, + ...methodNameAndArgsWithoutSignatures, + }, + }; +} diff --git a/api/swap/auth/index.ts b/api/swap/auth/index.ts new file mode 100644 index 000000000..a7a213f8c --- /dev/null +++ b/api/swap/auth/index.ts @@ -0,0 +1,33 @@ +import { VercelResponse } from "@vercel/node"; + +import { TypedVercelRequest } from "../../_types"; +import { getLogger, handleErrorCondition } from "../../_utils"; +import { BaseSwapQueryParams } from "../_utils"; + +import { handleAuthSwap, AuthSwapQueryParams } from "./_service"; + +const handler = async ( + request: TypedVercelRequest, + response: VercelResponse +) => { + const logger = getLogger(); + logger.debug({ + at: "Swap/auth", + message: "Query data", + query: request.query, + }); + try { + const responseJson = await handleAuthSwap(request); + + logger.debug({ + at: "Swap/auth", + message: "Response data", + responseJson, + }); + response.status(200).json(responseJson); + } catch (error: unknown) { + return handleErrorCondition("swap/auth", response, logger, error); + } +}; + +export default handler; diff --git a/api/swap/index.ts b/api/swap/index.ts index f23a4070a..da8c9905b 100644 --- a/api/swap/index.ts +++ b/api/swap/index.ts @@ -4,10 +4,16 @@ import { TypedVercelRequest } from "../_types"; import { getLogger, handleErrorCondition } from "../_utils"; import { handleBaseSwapQueryParams, BaseSwapQueryParams } from "./_utils"; import { handleApprovalSwap } from "./approval/_service"; +import { handlePermitSwap } from "./permit/_service"; +import { handleAuthSwap } from "./auth/_service"; +import { getPermitArgsFromContract } from "../_permit"; +import { getReceiveWithAuthArgsFromContract } from "../_transfer-with-auth"; -type SwapFlowType = "approval"; +type SwapFlowType = "permit" | "transfer-with-auth" | "approval"; const swapFlowTypeToHandler = { + permit: handlePermitSwap, + "transfer-with-auth": handleAuthSwap, approval: handleApprovalSwap, }; @@ -23,10 +29,31 @@ export default async function handler( }); try { // `/swap` only validate shared base params - await handleBaseSwapQueryParams(request.query); + const { inputToken, depositor, amount, recipient } = + await handleBaseSwapQueryParams(request.query); - // TODO: Enable other swap flow types in the future - const swapFlowType = "approval"; + // Determine swap flow by checking if required args and methods are supported + let swapFlowType: SwapFlowType; + const args = { + tokenAddress: inputToken.address, + chainId: inputToken.chainId, + ownerAddress: depositor, + spenderAddress: recipient || depositor, + value: amount, + }; + const [permitArgsResult, transferWithAuthArgsResult] = + await Promise.allSettled([ + getPermitArgsFromContract(args), + getReceiveWithAuthArgsFromContract(args), + ]); + + if (permitArgsResult.status === "fulfilled") { + swapFlowType = "permit"; + } else if (transferWithAuthArgsResult.status === "fulfilled") { + swapFlowType = "transfer-with-auth"; + } else { + swapFlowType = "approval"; + } const handler = swapFlowTypeToHandler[swapFlowType as SwapFlowType]; const responseJson = await handler(request); diff --git a/api/swap/permit/_service.ts b/api/swap/permit/_service.ts new file mode 100644 index 000000000..0b7168fe1 --- /dev/null +++ b/api/swap/permit/_service.ts @@ -0,0 +1,124 @@ +import { BigNumber } from "ethers"; +import { assert, Infer, optional, type } from "superstruct"; + +import { TypedVercelRequest } from "../../_types"; +import { getLogger, positiveIntStr } from "../../_utils"; +import { getCrossSwapQuotes } from "../../_dexes/cross-swap-service"; +import { + handleBaseSwapQueryParams, + BaseSwapQueryParams, + buildBaseSwapResponseJson, +} from "../_utils"; +import { getSwapRouter02Strategy } from "../../_dexes/uniswap/swap-router-02"; +import { InvalidParamError } from "../../_errors"; +import { buildPermitTxPayload } from "./_utils"; +import { QuoteFetchStrategies } from "../../_dexes/utils"; +import { GAS_SPONSOR_ADDRESS } from "../../relay/_utils"; +import { getBalance } from "../../_erc20"; + +export const PermitSwapQueryParamsSchema = type({ + permitDeadline: optional(positiveIntStr()), +}); + +export type PermitSwapQueryParams = Infer; + +const DEFAULT_PERMIT_DEADLINE = + Math.floor(Date.now() / 1000) + 60 * 60 * 24 * 365; // 1 year + +// For permit-based flows, we have to use the `SpokePoolPeriphery` as an entry point +const quoteFetchStrategies: QuoteFetchStrategies = { + default: getSwapRouter02Strategy("SpokePoolPeriphery"), +}; + +export async function handlePermitSwap( + request: TypedVercelRequest +) { + const logger = getLogger(); + logger.debug({ + at: "Swap/permit", + message: "Query data", + query: request.query, + }); + // `/swap/permit` specific params validation + const { permitDeadline: _permitDeadline, ...restQuery } = request.query; + assert( + { + permitDeadline: _permitDeadline, + }, + PermitSwapQueryParamsSchema + ); + const permitDeadline = Number(_permitDeadline ?? DEFAULT_PERMIT_DEADLINE); + + if (permitDeadline < Math.floor(Date.now() / 1000)) { + throw new InvalidParamError({ + message: + "Permit deadline must be a UNIX timestamp (seconds) in the future", + param: "permitDeadline", + }); + } + + // `/swap` specific params validation + quote generation + const { + isInputNative, + isOutputNative, + inputToken, + outputToken, + amount, + amountType, + refundOnOrigin, + refundAddress, + recipient, + depositor, + slippageTolerance, + refundToken, + } = await handleBaseSwapQueryParams(restQuery); + + const crossSwapQuotes = await getCrossSwapQuotes( + { + amount, + inputToken, + outputToken, + depositor, + recipient: recipient || depositor, + slippageTolerance: Number(slippageTolerance), + type: amountType, + refundOnOrigin, + refundAddress, + isInputNative, + isOutputNative, + }, + quoteFetchStrategies + ); + // Build tx for permit + const crossSwapTxForPermit = await buildPermitTxPayload({ + crossSwapQuotes, + permitDeadline, + // FIXME: Calculate proper fees + submissionFees: { + amount: "0", + recipient: GAS_SPONSOR_ADDRESS, + }, + }); + + const balance = await getBalance({ + chainId: inputToken.chainId, + tokenAddress: inputToken.address, + owner: crossSwapQuotes.crossSwap.depositor, + }); + + const responseJson = buildBaseSwapResponseJson({ + inputTokenAddress: inputToken.address, + originChainId: inputToken.chainId, + permitSwapTx: crossSwapTxForPermit, + inputAmount: amount, + bridgeQuote: crossSwapQuotes.bridgeQuote, + originSwapQuote: crossSwapQuotes.originSwapQuote, + destinationSwapQuote: crossSwapQuotes.destinationSwapQuote, + refundToken, + balance, + // Allowance does not matter for permit-based flows + allowance: BigNumber.from(0), + }); + + return responseJson; +} diff --git a/api/swap/permit/_utils.ts b/api/swap/permit/_utils.ts new file mode 100644 index 000000000..f42fa81b1 --- /dev/null +++ b/api/swap/permit/_utils.ts @@ -0,0 +1,152 @@ +import { BigNumberish } from "ethers"; + +import { + CrossSwapQuotes, + DepositEntryPointContract, + OriginSwapEntryPointContract, +} from "../../_dexes/types"; +import { getPermitTypedData } from "../../_permit"; +import { + getDepositTypedData, + getSwapAndDepositTypedData, +} from "../../_spoke-pool-periphery"; +import { + extractDepositDataStruct, + extractSwapAndDepositDataStruct, +} from "../../_dexes/utils"; +import { SpokePoolV3PeripheryInterface } from "../../_typechain/SpokePoolV3Periphery"; + +export type PermitTxPayload = Awaited>; + +export async function buildPermitTxPayload({ + crossSwapQuotes, + permitDeadline, + submissionFees, +}: { + crossSwapQuotes: CrossSwapQuotes; + permitDeadline: number; + submissionFees?: { + amount: BigNumberish; + recipient: string; + }; +}) { + const { originSwapQuote, bridgeQuote, crossSwap, contracts } = + crossSwapQuotes; + const originChainId = crossSwap.inputToken.chainId; + const { originSwapEntryPoint, depositEntryPoint, originRouter } = contracts; + + let entryPointContract: + | DepositEntryPointContract + | OriginSwapEntryPointContract; + let getDepositTypedDataPromise: + | ReturnType + | ReturnType; + let methodNameAndArgsWithoutSignatures: + | { + methodName: "depositWithPermit"; + argsWithoutSignatures: { + signatureOwner: string; + depositData: SpokePoolV3PeripheryInterface.DepositDataStruct; + deadline: BigNumberish; + }; + } + | { + methodName: "swapAndBridgeWithPermit"; + argsWithoutSignatures: { + signatureOwner: string; + swapAndDepositData: SpokePoolV3PeripheryInterface.SwapAndDepositDataStruct; + deadline: BigNumberish; + }; + }; + + if (originSwapQuote) { + if (!originSwapEntryPoint) { + throw new Error( + `'originSwapEntryPoint' needs to be defined for origin swap quotes` + ); + } + // Only SpokePoolPeriphery supports permit + if (originSwapEntryPoint.name !== "SpokePoolPeriphery") { + throw new Error( + `Permit is not supported for origin swap entry point contract '${originSwapEntryPoint.name}'` + ); + } + + if (!originRouter) { + throw new Error( + `'originRouter' needs to be defined for origin swap quotes` + ); + } + + const swapAndDepositData = + await extractSwapAndDepositDataStruct(crossSwapQuotes); + entryPointContract = originSwapEntryPoint; + getDepositTypedDataPromise = getSwapAndDepositTypedData({ + swapAndDepositData: swapAndDepositData, + chainId: originChainId, + }); + methodNameAndArgsWithoutSignatures = { + methodName: "swapAndBridgeWithPermit", + argsWithoutSignatures: { + signatureOwner: crossSwap.depositor, + swapAndDepositData, + deadline: permitDeadline, + }, + }; + } else { + if (!depositEntryPoint) { + throw new Error( + `'depositEntryPoint' needs to be defined for bridge quotes` + ); + } + + if (depositEntryPoint.name !== "SpokePoolPeriphery") { + throw new Error( + `Permit is not supported for deposit entry point contract '${depositEntryPoint.name}'` + ); + } + const depositDataStruct = await extractDepositDataStruct( + crossSwapQuotes, + submissionFees + ); + entryPointContract = depositEntryPoint; + getDepositTypedDataPromise = getDepositTypedData({ + depositData: depositDataStruct, + chainId: originChainId, + }); + methodNameAndArgsWithoutSignatures = { + methodName: "depositWithPermit", + argsWithoutSignatures: { + signatureOwner: crossSwap.depositor, + depositData: depositDataStruct, + deadline: permitDeadline, + }, + }; + } + + const [permitTypedData, depositTypedData] = await Promise.all([ + getPermitTypedData({ + tokenAddress: + originSwapQuote?.tokenIn.address || bridgeQuote.inputToken.address, + chainId: originChainId, + ownerAddress: crossSwap.depositor, + spenderAddress: entryPointContract.address, + value: originSwapQuote?.maximumAmountIn || bridgeQuote.inputAmount, + deadline: permitDeadline, + }), + getDepositTypedDataPromise, + ]); + return { + eip712: { + permit: permitTypedData.eip712, + deposit: depositTypedData.eip712, + }, + swapTx: { + chainId: originChainId, + to: entryPointContract.address, + methodName: methodNameAndArgsWithoutSignatures.methodName, + argsWithoutSignatures: + methodNameAndArgsWithoutSignatures.argsWithoutSignatures, + }, + }; +} diff --git a/api/swap/permit/index.ts b/api/swap/permit/index.ts new file mode 100644 index 000000000..127eb3132 --- /dev/null +++ b/api/swap/permit/index.ts @@ -0,0 +1,39 @@ +import { VercelResponse } from "@vercel/node"; +import { Infer, optional, type } from "superstruct"; + +import { TypedVercelRequest } from "../../_types"; +import { getLogger, handleErrorCondition, positiveIntStr } from "../../_utils"; +import { BaseSwapQueryParams } from "../_utils"; +import { handlePermitSwap } from "./_service"; + +export const PermitSwapQueryParamsSchema = type({ + permitDeadline: optional(positiveIntStr()), +}); + +export type PermitSwapQueryParams = Infer; + +const handler = async ( + request: TypedVercelRequest, + response: VercelResponse +) => { + const logger = getLogger(); + logger.debug({ + at: "Swap/permit", + message: "Query data", + query: request.query, + }); + try { + const responseJson = await handlePermitSwap(request); + + logger.debug({ + at: "Swap/permit", + message: "Response data", + responseJson, + }); + response.status(200).json(responseJson); + } catch (error: unknown) { + return handleErrorCondition("swap/permit", response, logger, error); + } +}; + +export default handler; diff --git a/vercel.json b/vercel.json index 863e71739..2526fcdaa 100644 --- a/vercel.json +++ b/vercel.json @@ -38,6 +38,9 @@ }, "api/cron-ping-endpoints.ts": { "maxDuration": 90 + }, + "api/relay/jobs/process.ts": { + "maxDuration": 90 } }, "rewrites": [