Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ export function generateProtocolAdaptorsList2({ allImports, config, adapterType,

const methodology = getMethodologyDataByBaseAdapter(moduleObject, adapterType, infoItem.category)
if (methodology) infoItem.methodology = methodology
if (moduleObject.breakdownMethodology) infoItem.breakdownMethodology = moduleObject.breakdownMethodology
if (childProtocols.length > 0) infoItem.childProtocols = childProtocols

response.push(infoItem)
Expand Down
1 change: 1 addition & 0 deletions defi/src/adaptors/data/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import { baseIconsUrl } from "../../constants";
let dimensionsConfig: any
getDimensionsConfig()

// TODO: reduce the places this is called to improve performance
export const importModule = (adaptorType: AdapterType) => async (mod: string) => {
const { default: module } = await import('@defillama/dimension-adapters/' + dimensionsConfig[adaptorType].imports[mod].moduleFilePath)
setModuleDefaults(module)
Expand Down
92 changes: 88 additions & 4 deletions defi/src/adaptors/data/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,13 @@ export interface ICleanRecordsConfig {
export type ChartBreakdownOptions = 'daily' | 'weekly' | 'monthly'

export type ProtocolDimensionsExtraConfig = {
defaultChartView?: ChartBreakdownOptions;
adapter: string;
genuineSpikes?: string[] // list of unix timestamps with valid spikes,
defaultChartView?: ChartBreakdownOptions;
adapter: string;
genuineSpikes?: string[] // list of unix timestamps with valid spikes,
}

export type DimensionsConfig = {
[K in AdapterType]?: string | ProtocolDimensionsExtraConfig;
[K in AdapterType]?: string | ProtocolDimensionsExtraConfig;
}
export interface ProtocolAdaptor extends Protocol {
defillamaId: string
Expand All @@ -27,11 +27,14 @@ export interface ProtocolAdaptor extends Protocol {
adapterType?: ProtocolType
methodologyURL: string
methodology?: string | IJSON<string> | any
breakdownMethodology?: IJSON<IJSON<string>> | any
allAddresses?: Array<string>
startFrom?: number
childProtocols?: Array<ProtocolAdaptor>
doublecounted?: boolean,
isDead?: boolean,
childMethodologies?: IJSON<any> // applicable only for parent protocols, key is child protocol id
childBreakdownMethodologies?: IJSON<IJSON<IJSON<string>> | any> // applicable only for parent protocols, key is child protocol id
}

export interface IConfig {
Expand Down Expand Up @@ -191,3 +194,84 @@ export const AdaptorRecordTypeMap = Object.entries(AdaptorRecordType).reduce((ac
export const AdaptorRecordTypeMapReverse = Object.entries(AdaptorRecordType).reduce((acc, [key, value]) => ({ ...acc, [value]: key }), {} as IJSON<string>)

export const ADAPTER_TYPES = Object.values(AdapterType).filter((adapterType: any) => adapterType !== AdapterType.PROTOCOLS)

export type DimensionsDataRecord = {
value: number,
chains: IJSON<number>
labelBreakdown?: IJSON<number> // it is not really stored in the db, but added in the transform function while reading from db
}

export type DimensionsDataRecordMap = Partial<Record<AdaptorRecordType, DimensionsDataRecord>>

export type DIMENSIONS_DB_RECORD = {
id: string,
timestamp: number,
timeS: string,
type: AdapterType,
data: {
aggregated: DimensionsDataRecordMap,
},
bl?: Partial<Record<AdaptorRecordType, IJSON<number>>>
}


export type PROTOCOL_SUMMARY = {
records: IJSON<{
aggObject: DimensionsDataRecordMap,
}>, // key is timeS
aggregatedRecords: {
yearly: IJSON<DimensionsDataRecordMap>, // probably chain key is not needed/ignored
quarterly: IJSON<DimensionsDataRecordMap>,
monthly: IJSON<DimensionsDataRecordMap>,
},
info: Protocol,
dataTypes: Set<AdaptorRecordType>, // set of all record types present in records
misc?: IJSON<any>, // not really used atm
summaries: Partial<Record<AdaptorRecordType, RecordSummary>>,
}

export type DIMENSIONS_ADAPTER_CACHE = {
lastUpdated: number, // cached
protocols: { // cached
[id: string]: {
records: {
[timeS: string]: {
timestamp: number,
aggObject: DimensionsDataRecordMap,
},
},
}
},
protocolSummaries?: IJSON<PROTOCOL_SUMMARY>, // key is protocol id
parentProtocolSummaries?: IJSON<PROTOCOL_SUMMARY>, // key is parent protocol id
summaries?: Partial<Record<AdaptorRecordType, RecordSummary>>,
allChains?: string[]
}


export type RecordSummary = {
total24h: number
total48hto24h: number
chart: IJSON<number>
chartBreakdown: IJSON<IJSON<number>>
earliestTimestamp?: number
chainSummary?: IJSON<RecordSummary>
total7d?: number
total30d?: number
total14dto7d?: number
total60dto30d?: number
total1y?: number
recordCount: number
}

export type ProtocolSummary = RecordSummary & {
change_1d?: number
change_7d?: number
change_1m?: number
change_7dover7d?: number
average1y?: number
monthlyAverage1y?: number
totalAllTime?: number
breakdown24h?: any
breakdown30d?: any
}
26 changes: 13 additions & 13 deletions defi/src/adaptors/db-utils/db2.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import { AdapterType } from "@defillama/dimension-adapters/adapters/types"
import * as sdk from "@defillama/sdk"
import { sliceIntoChunks } from "@defillama/sdk/build/util"
import { Op, } from "sequelize"
import { initializeTVLCacheDB } from "../../api2/db"
import { Tables } from "../../api2/db/tables"
import dynamodb from "../../utils/shared/dynamodb"
import { initializeTVLCacheDB } from "../../api2/db"
import { AdapterRecord2 } from "./AdapterRecord2"
import { AdapterType } from "@defillama/dimension-adapters/adapters/types"
import { Op, } from "sequelize"
import { sliceIntoChunks } from "@defillama/sdk/build/util"
import { IJSON } from "../data/types"
import { AdapterRecord2 } from "./AdapterRecord2"

let isInitialized: any

Expand Down Expand Up @@ -67,7 +67,7 @@ export async function storeAdapterRecordBulk(records: AdapterRecord2[]) {
}

await Tables.DIMENSIONS_DATA.bulkCreate(pgItems, {
updateOnDuplicate: ['timestamp', 'data', 'type', 'bl', 'blc']
updateOnDuplicate: ['timestamp', 'data', 'type', 'bl']
});

async function writeChunkToDDB(chunk: any, retriesLeft = 3) {
Expand All @@ -87,7 +87,7 @@ export async function storeAdapterRecordBulk(records: AdapterRecord2[]) {
}
}

export async function getAllItemsUpdatedAfter({ adapterType, timestamp }: { adapterType: AdapterType, timestamp: number }) {
export async function getAllItemsUpdatedAfter({ adapterType, timestamp, transform = a => a }: { adapterType: AdapterType, timestamp: number, transform?: (a: any) => any }) {
await init()
if (timestamp < 946684800) timestamp = 946684800 // 2000-01-01

Expand All @@ -101,14 +101,14 @@ export async function getAllItemsUpdatedAfter({ adapterType, timestamp }: { adap
while (true) {
const batch: any = await Tables.DIMENSIONS_DATA.findAll({
where: { type: adapterType, updatedat: { [Op.gte]: timestamp * 1000 } },
attributes: ['data', 'timestamp', 'id', 'timeS'],
attributes: ['data', 'timestamp', 'id', 'timeS', 'bl'],
raw: true,
order: [['timestamp', 'ASC']],
offset,
limit,
})

result = result.concat(batch)
result = result.concat(batch.map(transform))
// sdk.log(`getAllItemsUpdatedAfter(${adapterType}) found ${batch.length} total fetched: ${result.length} items updated after ${new Date(timestamp * 1000)}`)
if (batch.length < limit) break
offset += limit
Expand All @@ -120,7 +120,7 @@ export async function getAllItemsUpdatedAfter({ adapterType, timestamp }: { adap
}


export async function getAllItemsAfter({ adapterType, timestamp = 0 }: { adapterType: AdapterType, timestamp?: number }) {
export async function getAllItemsAfter({ adapterType, timestamp = 0, transform = a => a }: { adapterType: AdapterType, timestamp?: number, transform?: (a: any) => any }) {
await init()
if (timestamp < 946684800) timestamp = 946684800 // 2000-01-01
const filterCondition: any = { timestamp: { [Op.gte]: timestamp } }
Expand All @@ -135,14 +135,14 @@ export async function getAllItemsAfter({ adapterType, timestamp = 0 }: { adapter
while (true) {
const batch: any = await Tables.DIMENSIONS_DATA.findAll({
where: filterCondition,
attributes: ['data', 'timestamp', 'id', 'timeS'],
attributes: ['data', 'timestamp', 'id', 'timeS', 'bl'],
raw: true,
order: [['timestamp', 'ASC']],
offset,
limit,
})

result = result.concat(batch)
result = result.concat(batch.map(transform))
sdk.log(`getAllItemsAfter(${adapterType}, ${timestamp}) found ${batch.length} total fetched: ${result.length} items after ${new Date(timestamp * 1000)}`)
if (batch.length < limit) break
offset += limit
Expand Down Expand Up @@ -178,4 +178,4 @@ export async function getAllDimensionsRecordsTimeS({ adapterType, id, timestamp
})

return result
}
}
41 changes: 0 additions & 41 deletions defi/src/adaptors/handlers/helpers/processEventParameters.ts

This file was deleted.

1 change: 0 additions & 1 deletion defi/src/api2/cache/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ import { Protocol } from "../../protocols/types";
import { shuffleArray } from "../../utils/shared/shuffleArray";
import PromisePool from "@supercharge/promise-pool";
import { getProtocolAllTvlData } from "../utils/cachedFunctions";
// import { getDimensionsCacheV2, } from "../utils/dimensionsUtils";
import { getTwitterOverviewFileV2 } from "../../../dev-metrics/utils/r2";
import { RUN_TYPE } from "../utils";
import { updateProtocolMetadataUsingCache } from "../../protocols/data";
Expand Down
Loading