@putkoff/abstract-solcatcher 0.0.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (210) hide show
  1. package/dist/cjs/cli.d.ts +2 -0
  2. package/dist/cjs/cli.d.ts.map +1 -0
  3. package/dist/cjs/index.d.ts +47 -0
  4. package/dist/cjs/index.d.ts.map +1 -0
  5. package/dist/cjs/index.js +156807 -0
  6. package/dist/cjs/index.js.map +1 -0
  7. package/dist/cjs/runtime.d.ts +4 -0
  8. package/dist/cjs/runtime.d.ts.map +1 -0
  9. package/dist/cjs/server.d.ts +729 -0
  10. package/dist/cjs/server.d.ts.map +1 -0
  11. package/dist/cjs/types/cli.d.ts +2 -0
  12. package/dist/cjs/types/config/defaultConfig.d.ts +2 -0
  13. package/dist/cjs/types/config/loadConfig.d.ts +2 -0
  14. package/dist/cjs/types/config/types.d.ts +4 -0
  15. package/dist/cjs/types/env/index.d.ts +38 -0
  16. package/dist/cjs/types/env/loadEnv.d.ts +2 -0
  17. package/dist/cjs/types/env/postgres.d.ts +8 -0
  18. package/dist/cjs/types/env/queues.d.ts +12 -0
  19. package/dist/cjs/types/env/rabbit.d.ts +8 -0
  20. package/dist/cjs/types/env/solana.d.ts +8 -0
  21. package/dist/cjs/types/index.d.ts +1 -0
  22. package/dist/cjs/types/main.d.ts +1 -0
  23. package/dist/cjs/types/runtime/bootstrap.d.ts +1 -0
  24. package/dist/cjs/types/runtime/start.d.ts +1 -0
  25. package/dist/cjs/types/server/db/connections.d.ts +21 -0
  26. package/dist/cjs/types/server/db/env_imports.d.ts +34 -0
  27. package/dist/cjs/types/server/db/index.d.ts +4 -0
  28. package/dist/cjs/types/server/db/init_imports.d.ts +18 -0
  29. package/dist/cjs/types/server/db/module_imports.d.ts +1 -0
  30. package/dist/cjs/types/server/db/tokenUtils/index.d.ts +1 -0
  31. package/dist/cjs/types/server/db/tokenUtils/pubKeyUtils.d.ts +10 -0
  32. package/dist/cjs/types/server/index.d.ts +4 -0
  33. package/dist/cjs/types/server/rabbitMq/core/concurrency.d.ts +2 -0
  34. package/dist/cjs/types/server/rabbitMq/core/consumer.d.ts +1 -0
  35. package/dist/cjs/types/server/rabbitMq/core/index.d.ts +5 -0
  36. package/dist/cjs/types/server/rabbitMq/core/publisher.d.ts +1 -0
  37. package/dist/cjs/types/server/rabbitMq/core/rabbitManager.d.ts +7 -0
  38. package/dist/cjs/types/server/rabbitMq/core/websocketServer.d.ts +6 -0
  39. package/dist/cjs/types/server/rabbitMq/index.d.ts +5 -0
  40. package/dist/cjs/types/server/rabbitMq/main/index.d.ts +1 -0
  41. package/dist/cjs/types/server/rabbitMq/main/services.d.ts +1 -0
  42. package/dist/cjs/types/server/rabbitMq/processors/get_pair_data.d.ts +62 -0
  43. package/dist/cjs/types/server/rabbitMq/processors/index.d.ts +5 -0
  44. package/dist/cjs/types/server/rabbitMq/processors/logParser.d.ts +18 -0
  45. package/dist/cjs/types/server/rabbitMq/processors/make_meta_list.d.ts +51 -0
  46. package/dist/cjs/types/server/rabbitMq/processors/metaDataProcessor.d.ts +70 -0
  47. package/dist/cjs/types/server/rabbitMq/processors/processLogs.d.ts +14 -0
  48. package/dist/cjs/types/server/rabbitMq/producers/index.d.ts +4 -0
  49. package/dist/cjs/types/server/rabbitMq/solana/index.d.ts +2 -0
  50. package/dist/cjs/types/server/rabbitMq/solana/pumpFunMonitor.d.ts +1 -0
  51. package/dist/cjs/types/server/rabbitMq/solana/solanaLogMonitor.d.ts +8 -0
  52. package/dist/cjs/types/server/schemas/accounts/index.d.ts +18 -0
  53. package/dist/cjs/types/server/schemas/getTransactions/index.d.ts +13 -0
  54. package/dist/cjs/types/server/schemas/index.d.ts +7 -0
  55. package/dist/cjs/types/server/schemas/logs/index.d.ts +41 -0
  56. package/dist/cjs/types/server/schemas/main/index.d.ts +4 -0
  57. package/dist/cjs/types/server/schemas/metaData/index.d.ts +97 -0
  58. package/dist/cjs/types/server/schemas/pairs/index.d.ts +137 -0
  59. package/dist/cjs/types/server/schemas/transactions/index.d.ts +19 -0
  60. package/dist/cjs/types/server/utils/envUtils.d.ts +7 -0
  61. package/dist/cjs/types/server/utils/index.d.ts +7 -0
  62. package/dist/cjs/types/server/utils/jsonUtils.d.ts +10 -0
  63. package/dist/cjs/types/server/utils/networkUtils.d.ts +3 -0
  64. package/dist/cjs/types/server/utils/pathUtils.d.ts +11 -0
  65. package/dist/cjs/types/server/utils/rpcUtils.d.ts +54 -0
  66. package/dist/cjs/types/shared/config.d.ts +1 -0
  67. package/dist/cjs/types/shared/constants.d.ts +4 -0
  68. package/dist/cjs/types/shared/env_imports.d.ts +35 -0
  69. package/dist/cjs/types/shared/index.d.ts +5 -0
  70. package/dist/cjs/types/shared/init_imports.d.ts +1 -0
  71. package/dist/cjs/types/shared/module_imports.d.ts +1 -0
  72. package/dist/esm/cli.d.ts +2 -0
  73. package/dist/esm/cli.d.ts.map +1 -0
  74. package/dist/esm/index.d.ts +47 -0
  75. package/dist/esm/index.d.ts.map +1 -0
  76. package/dist/esm/index.js +156679 -0
  77. package/dist/esm/index.js.map +1 -0
  78. package/dist/esm/runtime.d.ts +4 -0
  79. package/dist/esm/runtime.d.ts.map +1 -0
  80. package/dist/esm/server.d.ts +729 -0
  81. package/dist/esm/server.d.ts.map +1 -0
  82. package/dist/esm/types/cli.d.ts +2 -0
  83. package/dist/esm/types/config/defaultConfig.d.ts +2 -0
  84. package/dist/esm/types/config/loadConfig.d.ts +2 -0
  85. package/dist/esm/types/config/types.d.ts +4 -0
  86. package/dist/esm/types/env/index.d.ts +38 -0
  87. package/dist/esm/types/env/loadEnv.d.ts +2 -0
  88. package/dist/esm/types/env/postgres.d.ts +8 -0
  89. package/dist/esm/types/env/queues.d.ts +12 -0
  90. package/dist/esm/types/env/rabbit.d.ts +8 -0
  91. package/dist/esm/types/env/solana.d.ts +8 -0
  92. package/dist/esm/types/index.d.ts +1 -0
  93. package/dist/esm/types/main.d.ts +1 -0
  94. package/dist/esm/types/runtime/bootstrap.d.ts +1 -0
  95. package/dist/esm/types/runtime/start.d.ts +1 -0
  96. package/dist/esm/types/server/db/connections.d.ts +21 -0
  97. package/dist/esm/types/server/db/env_imports.d.ts +34 -0
  98. package/dist/esm/types/server/db/index.d.ts +4 -0
  99. package/dist/esm/types/server/db/init_imports.d.ts +18 -0
  100. package/dist/esm/types/server/db/module_imports.d.ts +1 -0
  101. package/dist/esm/types/server/db/tokenUtils/index.d.ts +1 -0
  102. package/dist/esm/types/server/db/tokenUtils/pubKeyUtils.d.ts +10 -0
  103. package/dist/esm/types/server/index.d.ts +4 -0
  104. package/dist/esm/types/server/rabbitMq/core/concurrency.d.ts +2 -0
  105. package/dist/esm/types/server/rabbitMq/core/consumer.d.ts +1 -0
  106. package/dist/esm/types/server/rabbitMq/core/index.d.ts +5 -0
  107. package/dist/esm/types/server/rabbitMq/core/publisher.d.ts +1 -0
  108. package/dist/esm/types/server/rabbitMq/core/rabbitManager.d.ts +7 -0
  109. package/dist/esm/types/server/rabbitMq/core/websocketServer.d.ts +6 -0
  110. package/dist/esm/types/server/rabbitMq/index.d.ts +5 -0
  111. package/dist/esm/types/server/rabbitMq/main/index.d.ts +1 -0
  112. package/dist/esm/types/server/rabbitMq/main/services.d.ts +1 -0
  113. package/dist/esm/types/server/rabbitMq/processors/get_pair_data.d.ts +62 -0
  114. package/dist/esm/types/server/rabbitMq/processors/index.d.ts +5 -0
  115. package/dist/esm/types/server/rabbitMq/processors/logParser.d.ts +18 -0
  116. package/dist/esm/types/server/rabbitMq/processors/make_meta_list.d.ts +51 -0
  117. package/dist/esm/types/server/rabbitMq/processors/metaDataProcessor.d.ts +70 -0
  118. package/dist/esm/types/server/rabbitMq/processors/processLogs.d.ts +14 -0
  119. package/dist/esm/types/server/rabbitMq/producers/index.d.ts +4 -0
  120. package/dist/esm/types/server/rabbitMq/solana/index.d.ts +2 -0
  121. package/dist/esm/types/server/rabbitMq/solana/pumpFunMonitor.d.ts +1 -0
  122. package/dist/esm/types/server/rabbitMq/solana/solanaLogMonitor.d.ts +8 -0
  123. package/dist/esm/types/server/schemas/accounts/index.d.ts +18 -0
  124. package/dist/esm/types/server/schemas/getTransactions/index.d.ts +13 -0
  125. package/dist/esm/types/server/schemas/index.d.ts +7 -0
  126. package/dist/esm/types/server/schemas/logs/index.d.ts +41 -0
  127. package/dist/esm/types/server/schemas/main/index.d.ts +4 -0
  128. package/dist/esm/types/server/schemas/metaData/index.d.ts +97 -0
  129. package/dist/esm/types/server/schemas/pairs/index.d.ts +137 -0
  130. package/dist/esm/types/server/schemas/transactions/index.d.ts +19 -0
  131. package/dist/esm/types/server/utils/envUtils.d.ts +7 -0
  132. package/dist/esm/types/server/utils/index.d.ts +7 -0
  133. package/dist/esm/types/server/utils/jsonUtils.d.ts +10 -0
  134. package/dist/esm/types/server/utils/networkUtils.d.ts +3 -0
  135. package/dist/esm/types/server/utils/pathUtils.d.ts +11 -0
  136. package/dist/esm/types/server/utils/rpcUtils.d.ts +54 -0
  137. package/dist/esm/types/shared/config.d.ts +1 -0
  138. package/dist/esm/types/shared/constants.d.ts +4 -0
  139. package/dist/esm/types/shared/env_imports.d.ts +35 -0
  140. package/dist/esm/types/shared/index.d.ts +5 -0
  141. package/dist/esm/types/shared/init_imports.d.ts +1 -0
  142. package/dist/esm/types/shared/module_imports.d.ts +1 -0
  143. package/dist/types/cli.d.ts +2 -0
  144. package/dist/types/config/defaultConfig.d.ts +2 -0
  145. package/dist/types/config/loadConfig.d.ts +2 -0
  146. package/dist/types/config/types.d.ts +4 -0
  147. package/dist/types/env/index.d.ts +38 -0
  148. package/dist/types/env/loadEnv.d.ts +2 -0
  149. package/dist/types/env/postgres.d.ts +8 -0
  150. package/dist/types/env/queues.d.ts +12 -0
  151. package/dist/types/env/rabbit.d.ts +8 -0
  152. package/dist/types/env/solana.d.ts +8 -0
  153. package/dist/types/index.d.ts +1 -0
  154. package/dist/types/main.d.ts +1 -0
  155. package/dist/types/runtime/bootstrap.d.ts +1 -0
  156. package/dist/types/runtime/start.d.ts +1 -0
  157. package/dist/types/server/db/connections.d.ts +21 -0
  158. package/dist/types/server/db/env_imports.d.ts +34 -0
  159. package/dist/types/server/db/index.d.ts +4 -0
  160. package/dist/types/server/db/init_imports.d.ts +18 -0
  161. package/dist/types/server/db/module_imports.d.ts +1 -0
  162. package/dist/types/server/db/pubKeyUtils.d.ts +10 -0
  163. package/dist/types/server/db/tokenUtils/extrapolateBondingCurve.d.ts +43 -0
  164. package/dist/types/server/db/tokenUtils/index.d.ts +1 -0
  165. package/dist/types/server/db/tokenUtils/pubKeyUtils.d.ts +10 -0
  166. package/dist/types/server/index.d.ts +4 -0
  167. package/dist/types/server/rabbitMq/core/concurrency.d.ts +2 -0
  168. package/dist/types/server/rabbitMq/core/consumer.d.ts +1 -0
  169. package/dist/types/server/rabbitMq/core/index.d.ts +5 -0
  170. package/dist/types/server/rabbitMq/core/publisher.d.ts +1 -0
  171. package/dist/types/server/rabbitMq/core/rabbitManager.d.ts +7 -0
  172. package/dist/types/server/rabbitMq/core/websocketServer.d.ts +6 -0
  173. package/dist/types/server/rabbitMq/index.d.ts +5 -0
  174. package/dist/types/server/rabbitMq/main/index.d.ts +1 -0
  175. package/dist/types/server/rabbitMq/main/services.d.ts +1 -0
  176. package/dist/types/server/rabbitMq/processors/get_pair_data.d.ts +62 -0
  177. package/dist/types/server/rabbitMq/processors/index.d.ts +5 -0
  178. package/dist/types/server/rabbitMq/processors/logParser.d.ts +18 -0
  179. package/dist/types/server/rabbitMq/processors/make_meta_list.d.ts +51 -0
  180. package/dist/types/server/rabbitMq/processors/metaDataProcessor.d.ts +70 -0
  181. package/dist/types/server/rabbitMq/processors/processLogs.d.ts +14 -0
  182. package/dist/types/server/rabbitMq/producers/index.d.ts +4 -0
  183. package/dist/types/server/rabbitMq/solana/index.d.ts +2 -0
  184. package/dist/types/server/rabbitMq/solana/pumpFunMonitor.d.ts +1 -0
  185. package/dist/types/server/rabbitMq/solana/solanaLogMonitor.d.ts +8 -0
  186. package/dist/types/server/schemas/accounts/index.d.ts +18 -0
  187. package/dist/types/server/schemas/getTransactions/index.d.ts +13 -0
  188. package/dist/types/server/schemas/index.d.ts +7 -0
  189. package/dist/types/server/schemas/logs/index.d.ts +41 -0
  190. package/dist/types/server/schemas/main/index.d.ts +4 -0
  191. package/dist/types/server/schemas/metaData/index.d.ts +97 -0
  192. package/dist/types/server/schemas/pairs/index.d.ts +137 -0
  193. package/dist/types/server/schemas/transactions/index.d.ts +19 -0
  194. package/dist/types/server/utils/envUtils.d.ts +7 -0
  195. package/dist/types/server/utils/index.d.ts +7 -0
  196. package/dist/types/server/utils/jsonUtils.d.ts +10 -0
  197. package/dist/types/server/utils/networkUtils.d.ts +3 -0
  198. package/dist/types/server/utils/pathUtils.d.ts +11 -0
  199. package/dist/types/server/utils/rabbitUtils.d.ts +25 -0
  200. package/dist/types/server/utils/rpcUtils.d.ts +54 -0
  201. package/dist/types/shared/config.d.ts +1 -0
  202. package/dist/types/shared/constants.d.ts +4 -0
  203. package/dist/types/shared/env_imports.d.ts +35 -0
  204. package/dist/types/shared/index.d.ts +5 -0
  205. package/dist/types/shared/init_imports.d.ts +1 -0
  206. package/dist/types/shared/module_imports.d.ts +1 -0
  207. package/dist/types/test/debugPrint.d.ts +1 -0
  208. package/dist/types/test/getMeta.d.ts +0 -0
  209. package/dist/types/test/index.d.ts +1 -0
  210. package/package.json +113 -0
@@ -0,0 +1 @@
1
+ {"version":3,"file":"server.d.ts","sources":["../../src/server/rabbitMq/core/concurrency.ts","../../src/server/rabbitMq/core/consumer.ts","../../src/server/rabbitMq/core/publisher.ts","../../src/server/rabbitMq/core/rabbitManager.ts","../../src/server/db/connections.ts","../../src/server/db/tokenUtils/pubKeyUtils.ts","../../src/server/rabbitMq/core/websocketServer.ts","../../src/server/rabbitMq/solana/pumpFunMonitor.ts","../../src/server/rabbitMq/solana/solanaLogMonitor.ts","../../src/server/rabbitMq/processors/get_pair_data.ts","../../src/server/rabbitMq/processors/logParser.ts","../../src/server/rabbitMq/processors/make_meta_list.ts","../../src/server/rabbitMq/processors/metaDataProcessor.ts","../../src/server/rabbitMq/processors/processLogs.ts","../../src/server/rabbitMq/main/services.ts","../../src/server/schemas/accounts/index.ts","../../src/server/schemas/getTransactions/index.ts","../../src/server/schemas/logs/index.ts","../../src/server/schemas/main/index.ts","../../src/server/schemas/metaData/index.ts","../../src/server/schemas/pairs/index.ts","../../src/server/schemas/transactions/index.ts","../../src/server/utils/envUtils.ts","../../src/server/utils/networkUtils.ts","../../src/server/utils/pathUtils.ts","../../src/server/utils/rpcUtils.ts","../../src/server/utils/jsonUtils.ts"],"sourcesContent":["import PQueue from \"p-queue\";\n\nexport const concurrencyQueue = new PQueue({\n concurrency: 15,\n});\n","import type { ConsumeMessage } from \"amqplib\";\nimport { getChannels } from \"./rabbitManager\";\nimport { concurrencyQueue } from \"./concurrency\";\nimport { logger } from \"./../../db\";\n\nexport const startConsumer = async (\n queue: string,\n handler: (msg: any) => Promise<void>,\n noAck = false\n) => {\n const { consumeChannel } = getChannels();\n await consumeChannel.assertQueue(queue, { durable: true });\n await consumeChannel.prefetch(10);\n\n logger.info(`👂 Consuming ${queue}`);\n\n consumeChannel.consume(queue, (msg: ConsumeMessage | null) => {\n if (!msg) return;\n\n concurrencyQueue.add(async () => {\n try {\n const parsed = JSON.parse(msg.content.toString());\n await handler(parsed);\n if (!noAck) consumeChannel.ack(msg);\n } catch (err) {\n logger.error(`❌ Consumer error: ${err}`);\n consumeChannel.nack(msg, false, false);\n }\n });\n });\n};\n","import { getChannels } from \"./rabbitManager\";\nimport { logger } from \"./../../db\";\n\nexport const sendToQueue = async (\n queue: string,\n payload: any,\n log = true\n) => {\n const { publishChannel } = getChannels();\n const buffer = Buffer.from(JSON.stringify(payload));\n\n publishChannel.sendToQueue(queue, buffer, { persistent: true });\n\n if (log) {\n const tag =\n payload?.signature ??\n payload?.params?.[0] ??\n queue;\n logger.info(`📤 Sent → ${queue}: ${tag}`);\n }\n};\n","import type { Channel, ChannelModel } from \"amqplib\";\nimport amqp from \"amqplib\";\nimport {loadRabbitEnv} from './../../../env/rabbit'\nimport { logger } from \"./../../db\";\n\nlet channelModel: ChannelModel | null = null;\nlet publishChannel: Channel | null = null;\nlet consumeChannel: Channel | null = null;\n\nexport const getChannels = () => {\n if (!publishChannel || !consumeChannel) {\n throw new Error(\"RabbitMQ not initialized\");\n }\n return { publishChannel, consumeChannel };\n};\n\nexport const initRabbitMQ = async (\n publishQueues: string[] = [],\n consumeQueues: string[] = []\n): Promise<void> => {\n logger.info(\"🔌 Connecting to RabbitMQ...\");\n const rabbit_env = loadRabbitEnv()\n channelModel = await amqp.connect(rabbit_env.url);\n\n publishChannel = await channelModel.createChannel();\n consumeChannel = await channelModel.createChannel();\n\n for (const q of publishQueues) {\n await publishChannel.assertQueue(q, { durable: true });\n }\n\n for (const q of consumeQueues) {\n await consumeChannel.assertQueue(q, { durable: true });\n }\n\n channelModel.on(\"close\", () => {\n logger.warn(\"RabbitMQ connection closed — reconnecting...\");\n setTimeout(reconnectRabbitMQ, 5000);\n });\n\n channelModel.on(\"error\", (err: any) => {\n logger.error(`RabbitMQ error: ${err.message}`);\n });\n\n logger.info(\"✅ RabbitMQ ready\");\n};\n\nconst reconnectRabbitMQ = async () => {\n try {\n await shutdownRabbitMQ(false);\n await initRabbitMQ();\n } catch {\n logger.error(\"Reconnect failed, retrying in 5s\");\n setTimeout(reconnectRabbitMQ, 5000);\n }\n};\n\nexport const shutdownRabbitMQ = async (exit = true) => {\n logger.warn(\"🛑 Shutting down RabbitMQ...\");\n\n await publishChannel?.close().catch(() => null);\n await consumeChannel?.close().catch(() => null);\n await channelModel?.close().catch(() => null);\n\n publishChannel = null;\n consumeChannel = null;\n channelModel = null;\n\n if (exit) process.exit(0);\n};\n","import {loadSolanaEnv} from './../../env/solana'\nimport {loadPostgresEnv} from './../../env/postgres'\nimport {pkg} from './init_imports';\nimport {logger} from './module_imports.js'\nimport type { QueryResult as PGQueryResult } from 'pg';\nconst { Pool } = pkg;\n\n\n// Singleton pool to prevent multiple Pool instances\n// Singleton pattern to ensure only one pool is created\n\n// Singleton pattern to ensure only one pool is created\nlet pool: pkg.Pool | null = null;\n\nfunction initPool(): pkg.Pool {\n const solEnv = loadSolanaEnv()\n const newPool = new Pool({\n connectionString: loadPostgresEnv().url,\n max: solEnv.maxDbClients,\n idleTimeoutMillis: solEnv.idleTimeoutMs,\n connectionTimeoutMillis: solEnv.connectionTimeoutMs,\n });\n\n newPool.on(\"error\", (err: any) => {\n logger.error(\"🔥 PG pool error\", err);\n process.exit(1);\n });\n\n return newPool;\n}\n\nexport function getPool(): pkg.Pool {\n if (!pool) {\n pool = initPool();\n }\n return pool;\n}\n\nexport async function endPool() {\n if (!pool) return;\n await pool.end();\n pool = null;\n}\n\n/**\n * Runs a SQL query using the connection pool.\n * Implements retry logic in case of 'too many clients' errors.\n * @param query - SQL query string\n * @param params - Query parameters (optional)\n * @param successMsg - Log message on successful query\n * @param failMsg - Log message on failed query\n * @param retries - Number of retries for 'too many clients' errors\n * @returns Query result\n */\nexport async function poolQuery(query: string, params?: any[]) {\n return getPool().query(query, params);\n}\n\nexport async function withTransaction<T>(\n fn: (client: any) => Promise<T>\n): Promise<T> {\n const client = await getPool().connect();\n try {\n await client.query(\"BEGIN\");\n const result = await fn(client);\n await client.query(\"COMMIT\");\n return result;\n } catch (err) {\n await client.query(\"ROLLBACK\");\n throw err;\n } finally {\n client.release();\n }\n}\n\n\n/**\n * Fetch all users from the 'users' table.\n */\nexport async function fetchAllRows(tableName:any): Promise<any> {\n try {\n const queryText = `SELECT * FROM ${tableName};`;\n const result: PGQueryResult<any> = await poolQuery(queryText);\n return result.rows; // This is an array of user objects\n } catch (error) {\n console.error('Error fetching all users:', error);\n throw error; // Re-throw or handle\n }\n}\nexport function extractRow(obj: any) { \n if (obj?.rows && obj.rows.length > 0) { \n return obj.rows[0]; \n } \n} \nexport function extractId(obj: any) { \n if (obj && obj?.id) { \n return obj.id; \n } let row = extractRow(obj) \n if (row && row?.id) { \n return row.id; \n } \n}\n","import { PublicKey, isValidBase58} from \"./../module_imports\";\n\n\nclass KeyManager {\n private cache?: Map<string, PublicKey>;\n\n constructor(enableCache = true) {\n if (enableCache) this.cache = new Map();\n }\n\n getPubkey(obj: string | PublicKey): PublicKey {\n if (obj instanceof PublicKey) return obj;\n\n if (typeof obj === \"string\") {\n if (!isValidBase58(obj)) {\n throw new Error(`Invalid Base58 string: ${obj}`);\n }\n\n if (this.cache?.has(obj)) {\n return this.cache.get(obj)!;\n }\n\n const pk = new PublicKey(obj);\n this.cache?.set(obj, pk);\n return pk;\n }\n\n throw new Error(`Invalid input for PublicKey`);\n }\n}\n\nexport const KeyManager_server = new KeyManager(false); // no cache\n\n\n// Validate or convert an object to a PublicKey\nexport function getPubkey_server(obj: any): PublicKey {\n return KeyManager_server.getPubkey(obj)\n}\n// Ensure the output is a valid PublicKey string\nexport function getPubkeyString_server(obj: string | PublicKey): string {\n return getPubkey_server(obj).toBase58();\n}\n","import WebSocket from \"ws\";\nimport type { TransactionItem } from \"../../db\";\nimport { logger } from \"../../db\";\nimport {loadSolanaEnv} from './../../../env/solana'\n\nconst FILE_LOCATION = \"src/rabbitMq/websocketServer.ts\";\n\n\n\ntype WebSocketServer = typeof WebSocket.Server;\n\nlet wss: InstanceType<WebSocketServer> | null = null;\nexport function getWebSocketServer(): InstanceType<WebSocketServer> {\n\n if (wss) return wss;\n const solanaEnv = loadSolanaEnv()\n const WS_PORT = solanaEnv.broadcastPort;\n logger.info(\"🚀 Starting WebSocketServer\", {\n file: FILE_LOCATION,\n port: WS_PORT,\n });\n\n wss = new WebSocket.Server({\n port: WS_PORT,\n clientTracking: true,\n });\n\n wss.on(\"connection\", (ws: WebSocket) => {\n logger.info(\"🔌 WebSocket client connected\", { file: FILE_LOCATION });\n\n ws.on(\"close\", () => {\n logger.info(\"❌ WebSocket client disconnected\", {\n file: FILE_LOCATION,\n });\n });\n\n ws.on(\"error\", (err: Error) => {\n logger.error(\"🔥 WebSocket client error\", {\n error: err.message,\n file: FILE_LOCATION,\n });\n });\n });\n\n const shutdown = () => {\n if (!wss) return;\n\n logger.warn(\"🛑 Shutting down WebSocketServer\", {\n file: FILE_LOCATION,\n port: WS_PORT,\n });\n\n for (const client of wss.clients) {\n try {\n client.close();\n } catch {}\n }\n\n wss.close();\n wss = null;\n };\n\n process.once(\"SIGINT\", shutdown);\n process.once(\"SIGTERM\", shutdown);\n process.once(\"exit\", shutdown);\n\n return wss;\n}\n\n// ---------------------------------------------------------------------------\n// Broadcast helper (safe even if server already exists)\n// ---------------------------------------------------------------------------\nexport function broadcastTransaction(txn: TransactionItem): void {\n const server = getWebSocketServer();\n const payload = JSON.stringify(txn);\n\n for (const client of server.clients) {\n if (client.readyState === WebSocket.OPEN) {\n client.send(payload);\n }\n }\n}\n","import {\n SOLANA_MAINNET_RPC_URL,\n SOLANA_MAINNET_WS_ENDPOINT,\n SOLANA_PUMP_FUN_PROGRAM_ID,\n} from \"./../../db\";\nimport {loadQueueEnv} from './../../../env/queues';\nimport { startSolanaLogMonitor } from \"./solanaLogMonitor\";\n\nexport const startPumpFunMonitor = () =>\n\n startSolanaLogMonitor({\n programId: SOLANA_PUMP_FUN_PROGRAM_ID,\n rpcUrl: SOLANA_MAINNET_RPC_URL,\n wsEndpoint: SOLANA_MAINNET_WS_ENDPOINT,\n outputQueue: loadQueueEnv().logIntake\n });\n","import { PublicKey,logger } from \"./../../db\";\nimport { Connection } from \"@solana/web3.js\";\nimport { sendToQueue } from \"../core\";\n\n\ninterface MonitorOpts {\n programId: string;\n rpcUrl: string;\n wsEndpoint?: string;\n outputQueue: string;\n}\n\nexport const startSolanaLogMonitor = async ({\n programId,\n rpcUrl,\n wsEndpoint,\n outputQueue,\n}: MonitorOpts) => {\n while (true) {\n try {\n const conn = new Connection(rpcUrl, {\n commitment: \"processed\",\n wsEndpoint,\n });\n\n const pubkey = new PublicKey(programId);\n\n logger.info(`📡 Subscribing to logs: ${programId}`);\n\n const subId = conn.onLogs(pubkey, async (log, ctx) => {\n if (!log.err && log.signature) {\n await sendToQueue(outputQueue, {\n signature: log.signature,\n slot: ctx.slot,\n logs: log.logs,\n programId,\n }, false);\n }\n });\n\n await new Promise(() => {}); // block forever\n conn.removeOnLogsListener(subId);\n } catch (err) {\n logger.error(\"Solana WS error — retrying in 5s\");\n await new Promise(r => setTimeout(r, 5000));\n }\n }\n};\n","// src/revisions/get_transactions.ts\nimport {loadSolanaEnv} from './../../../env/solana';\nimport { upsertTransaction, searchTransactionBySignature,getMetaDataFromId,fetchLogEntryBySignature } from './../../schemas';\nimport { fetchRpc,findKeyValue } from './../../utils';\nimport { processParsedLogs,processTxns,getMetaFoundation,getOrFetchMetaData } from './../processors';\n\nimport {logger,decodeInstructionData,getLogString} from './../../db';\n/**\n * Returns the necessary headers for HTTP requests.\n */\nexport function get_headers() {\n return {\n \"Content-Type\": \"application/json\",\n };\n}\n/**\n * Extracts a value for a given key from the data.\n *\n * @param data - The data object.\n * @param key - The key to extract.\n * @returns The value associated with the key or undefined.\n */\nfunction getAnyValue(data: any, key: string): any {\n return data && data.length > 0 ? data[0][key] : undefined;\n}\n/**\n * Interface representing the structure of the RPC response for getTransaction.\n */\ninterface RpcResponse {\n jsonrpc: string;\n result: TransactionResult | null;\n id: number;\n}\n\ninterface TransactionResult {\n blockTime: number;\n meta: {\n computeUnitsConsumed: number;\n err: any;\n fee: number;\n innerInstructions: Array<any>;\n loadedAddresses: {\n readonly: Array<any>;\n writable: Array<any>;\n };\n logMessages: Array<string>;\n postBalances: Array<number>;\n postTokenBalances: Array<any>;\n preBalances: Array<number>;\n preTokenBalances: Array<any>;\n rewards: Array<any>;\n status: {\n Ok: any;\n };\n };\n slot: number;\n transaction: {\n message: {\n accountKeys: Array<string>;\n addressTableLookups: Array<any>;\n header: {\n numReadonlySignedAccounts: number;\n numReadonlyUnsignedAccounts: number;\n numRequiredSignatures: number;\n };\n instructions: Array<any>;\n recentBlockhash: string;\n };\n signatures: Array<string>;\n };\n version: number;\n}\n/**\n * Pauses execution for the given number of milliseconds.\n *\n * @param ms - Milliseconds to sleep.\n * @returns A promise that resolves after the specified delay.\n */\nfunction sleep(ms: number): Promise<void> {\n return new Promise(resolve => setTimeout(resolve, ms));\n}\n\n\n/**\n * Asynchronous function to get public keys from mint metadata.\n *\n * @param mint - The mint address as a string.\n * @param url - The RPC URL (optional).\n * @param consumerLogger - The logger to use (optional).\n * @returns A promise that resolves to an array of public key strings.\n */\nexport async function getPubkeysFromMintMetaData(\n mint: string,\n url: string | null = null,\n consumerLogger: any = logger\n): Promise<any> {\n const env = loadSolanaEnv()\n const SOLANA_FALLBACK_RPC_URL = env.fallbackRpcUrl\n url = url || SOLANA_FALLBACK_RPC_URL;\n const maxAttempts = 43;\n const initialDelayMs = 1000; // 1 second\n const backoffFactor = 2; // Exponential backoff factor\n\n let metaId: any;\n\n for (let attempt = 1; attempt <= maxAttempts; attempt++) {\n try {\n // Fetch meta ID using the RPC call\n const metaIdResponse = await getMetaFoundation(mint, SOLANA_FALLBACK_RPC_URL);\n metaId =metaIdResponse\n getLogString({message:`metadata response == ${metaIdResponse}`,details:`Attempt ${attempt}: Meta ID Response: ${JSON.stringify(metaIdResponse)}`,logType:'info'})\n\n if (!metaIdResponse) {\n getLogString({message:`Attempt ${attempt}: Failed to fetch meta ID for mint: ${mint}`, details:metaIdResponse,logType:'error'});\n } else {\n \n // Determine the type of metaIdResponse and extract metaId accordingly\n if (typeof metaIdResponse === 'object' && metaIdResponse !== null && 'id' in metaIdResponse) {\n metaId = metaIdResponse.id;\n } else if (typeof metaIdResponse === 'number') {\n metaId = metaIdResponse;\n } else {\n getLogString({message:`Attempt ${attempt}: Invalid meta ID response type for mint: ${mint}`, details:metaIdResponse,logType:'error'});\n }\n\n if (metaId) {\n getLogString({message:`Attempt ${attempt}: Successfully obtained meta ID: ${metaId}`});\n break; // Exit the loop if metaId is successfully obtained\n }\n }\n } catch (error) {\n getLogString({message:`Attempt ${attempt}: Error fetching meta ID for mint ${mint}:`,details: error,logType:'error'});\n }\n\n if (attempt < maxAttempts) {\n const delay = initialDelayMs * Math.pow(backoffFactor, attempt - 1);\n getLogString({message:`Waiting for ${delay}ms before next attempt...`});\n await sleep(delay);\n } else {\n getLogString({message:`Exceeded maximum attempts (${maxAttempts}) to fetch meta ID for mint: ${mint}`,logType:'error'});\n }\n }\n \n if (!metaId) {\n getLogString({message:`Failed to retrieve meta ID after ${maxAttempts} attempts for mint: ${mint}`,logType:'error'});\n return [];\n }\n\n try {\n // Fetch metadata from the database asynchronously\n const metaData = await getMetaDataFromId(metaId);\n if (!metaData) {\n getLogString({message:`No metadata found in the database for meta ID: ${metaId}`,logType:'error'});\n return [];\n }\n\n // Process the metadata to extract public keys\n const publicKey: string = metaData.meta_data.metadata.publicKey;\n return publicKey;\n } catch (error) {\n getLogString({message:`Error in getPubkeysFromMintMetaData for mint ${mint}:`,details:error,logType:'error'});\n return [];\n }\n}\n/**\n * Calls the RPC method with the given parameters.\n *\n * @param method - The RPC method to call.\n * @param params - The parameters for the RPC method.\n * @param url - The RPC URL (optional).\n * @param consumerLogger - The logger to use (optional).\n * @returns The JSON response from the RPC call.\n */\nexport async function call_rpc_data(\n method: string,\n params: any\n) {\n // Set the default RPC URL if none is provided\n try {\n // Corrected parameter order: url, method, params\n const response = await fetchRpc(method, params);\n\n //consumerLogger.info(`📥 response body: ${JSON.stringify(responseBody)}`);\n return response;\n } catch (error) {\n getLogString({message:`Error in call_rpc_data: ${error}`,logType:'error'});\n return undefined; // Explicitly return undefined in case of error\n }\n}\n/**\n * Fetches signatures for a given address.\n *\n * @param address - The address to fetch signatures for.\n * @param until - Optional parameter to fetch signatures until a certain point.\n * @param limit - The maximum number of signatures to fetch.\n * @param url - The RPC URL (optional).\n * @param consumerLogger - The logger to use (optional).\n * @returns An array of signatures.\n */\nexport async function fetch_getSignaturesForAddress(\n address: string,\n until: any = null,\n limit: number = 1000,\n url: string | null = null,\n consumerLogger: any = logger\n) {\n const method = 'getSignaturesForAddress';\n const params = [address, { until, limit }];\n const response = await call_rpc_data(method, params);\n\n return response;\n}\n\n/**\n * Fetches a transaction by its signature.\n *\n * @param signature - The signature of the transaction.\n * @param url - The RPC URL (optional).\n * @param consumerLogger - The logger to use (optional).\n * @returns The transaction object.\n */\nexport async function fetch_getTransaction(\n signature: string,\n url: string | null = null,\n consumerLogger: any = logger\n) {\n const method = 'getTransaction';\n const params = [signature, { maxSupportedTransactionVersion: 0 }];\n const response = await call_rpc_data(method, params);\n\n return response;\n}\n\n/**\n * Retrieves a transaction by signature. If not found in the database, fetches it via RPC,\n * upserts it into the database, and then returns it.\n *\n * @param signature - The signature of the transaction to retrieve.\n * @returns The transaction object or null if not found.\n */\nexport async function get_or_fetch_transaction(signature: string,consumerLogger:any=logger): Promise<any> {\n try {\n // Search for the transaction in the database\n let transaction:any = await searchTransactionBySignature(signature);\n if (transaction) {\n //logger.info(`🔍 Transaction found in DB for signature: ${signature}`);\n return transaction;\n }\n\n //logger.info(`🔍 No transaction found in DB for signature: ${signature}. Fetching via RPC...`);\n\n // Fetch the transaction via RPC\n transaction = await fetch_getTransaction(signature);\n if (!transaction) {\n logger.error(`❌ Failed to fetch transaction via RPC for signature: ${signature}`);\n return null;\n }\n\n // Extract signatures and slot from the transaction\n let signatures = findKeyValue(transaction, 'signatures') || [];\n const slot = findKeyValue(transaction, 'slot') || null;\n const program_id:any =null;\n // Ensure the main signature is included in the signatures array\n signatures = signatures.includes(signature) ? signatures : [...signatures, signature];\n\n // Upsert the transaction into the database\n await upsertTransaction(transaction);\n //logger.info(`✅ Transaction upserted into DB for signature: ${signature}`);\n const logs = findKeyValue(transaction, 'logMessages') || null;\n let pairMsg = { signature, slot, program_id, logs ,signatures}\n pairMsg = await processParsedLogs(pairMsg,consumerLogger)\n //logger.info(JSON.stringify({ signature, slot, program_id, logs ,signatures}))\n await processTxns(pairMsg,consumerLogger)\n return transaction;\n } catch (error) {\n logger.error(`❌ Error in get_or_fetch_transaction for signature ${signature}: ${error}`);\n return null;\n }\n}\n/**\n * Extracts the user address from parsed logs.\n *\n * @param parsedLogs - The parsed logs object.\n * @returns A promise that resolves to the user address string or null.\n */\nexport async function getUserAddress(parsedLogs: any): Promise<string | null> {\n try {\n if (!parsedLogs) {\n return null;\n }\n\n const logs: any[] = parsedLogs.logs;\n if (!logs || logs.length === 0) {\n return null;\n }\n\n // Filter logs that contain both 'data' and 'logs'\n const relevantLogs = logs.filter(log => log.data && log.logs);\n if (relevantLogs.length === 0) {\n return null;\n }\n\n for (const log of relevantLogs) {\n if (log.logs.includes('Instruction: Create')) {\n const dataItem = log.data[0];\n const decodedData = await decodeInstructionData(dataItem);\n if (decodedData && decodedData.user_address) {\n return decodedData.user_address;\n }\n }\n }\n\n return null;\n } catch (error) {\n logger.error(`Error in getUserAddress: ${error}`);\n return null;\n }\n}\nfunction extractData<T>(data:any): T {\n return Array.isArray(data) && data.length === 1 ? data[0] : data;\n }\nexport async function get_decodedData(parsed_logs:any ): Promise<any | null> {\n\n for (let inocationData of parsed_logs) {\n let { program_id, invocationNumber, logs, data } = inocationData;\n data = extractData(data)\n if (logs && data){\n let invocationLogs = logs[0]\n let dataStr = typeof data === 'string' ? data : String(data);\n if (invocationLogs.includes('Instruction: Create')){\n \n const decodedData = await decodeInstructionData(dataStr);\n return decodedData;\n \n }\n }\n }\n}\nfunction getPublicKeyExplicit(metadata: any): string | null {\n if (!metadata || !metadata.meta_data) {\n return null;\n }\n \n // If 'meta_data.publicKey' exists, use it\n if (metadata.meta_data.publicKey) {\n return metadata.meta_data.publicKey;\n }\n \n // Otherwise, check 'meta_data.mint.publicKey'\n if (metadata.meta_data.mint && metadata.meta_data.mint.publicKey) {\n return metadata.meta_data.mint.publicKey;\n }\n \n return null;\n }\n \nexport async function get_pair_data(mint:any,consumerLogger:any=logger):Promise<any>{\n consumerLogger.info(`mint = ${mint}`)\n const env = loadSolanaEnv()\n const SOLANA_FALLBACK_RPC_URL = env.fallbackRpcUrl\n const metaIdResponse = await getMetaFoundation(mint, SOLANA_FALLBACK_RPC_URL);\n let metadata:any = await getOrFetchMetaData(mint, null, null, consumerLogger)\n consumerLogger.info(`metadata == ${JSON.stringify(metadata)}`);\n let publicKey: any = metadata.meta_data.metadata.publicKey;\n if (publicKey && publicKey.length >0){\n publicKey=publicKey[0]\n }\n //const publicKey = await getPubkeysFromMintMetaData(mint,null,consumerLogger);\n //consumerLogger.info(`publicKey == ${publicKey}`);\n let signatures:any = await fetch_getSignaturesForAddress(publicKey)\n //consumerLogger.info(`publicKey == ${JSON.stringify(signatures)}`);\n let signature = signatures[signatures.length-1].signature\n const transaction = await get_or_fetch_transaction(signature);\n const logData:any = await fetchLogEntryBySignature(signature);\n let log_id = logData.id\n const decodedData = await get_decodedData(logData.logs)\n let user_address = decodedData.user_address\n let bonding_curve = decodedData.bonding_curve\n return {signature,log_id,bonding_curve,user_address}\n}\n","//src/rabbitMq/processors/logProcessor.ts\nimport { insertLogEntry } from './../../schemas';\nimport {logger} from './../../db'\nexport interface InvocationRecord {\n program_id: string;\n invocationNumber: number;\n logs: string[];\n data: string[];\n }\n \n interface ParsedLogs {\n invocations: InvocationRecord[];\n }\nexport async function processParsedLogs(pairMsg:any,consumerLogger:any=logger):Promise<any>{\n //let start:any=Date.now()\n let { signature, slot, program_id, logs ,signatures} = pairMsg;\n let parsedLogs:any=null\n let log_id:any = null\n try{\n\n // Parse logs and insert\n //start=Date.now();\n parsedLogs = parseProgramLogs(logs);\n //start=Date.now();\n log_id = await insertLogEntry(signature, slot, program_id, parsedLogs,signatures);\n \n \n return { signature, slot, program_id,log_id,parsedLogs,signatures}\n }catch (error) {\n consumerLogger.error(`❌ Error Parsing Log: ${JSON.stringify(error)}`);\n \n logs=null\n pairMsg=null\n parsedLogs=null\n log_id= null\n // Handle message requeue in case of processing errors\n }\n\n}\n // Precompiled Regular Expressions\n const INVOKE_REGEX = /^Program\\s+(\\S+)\\s+invoke\\s+\\[(\\d+)\\]/;\n const SUCCESS_REGEX = /^Program\\s+(\\S+)\\s+success/;\n const LOG_REGEX = /^Program\\s+log:\\s+(.*)/;\n const DATA_REGEX = /^Program\\s+data:\\s+(.*)/;\n \n /**\n * Parses program logs to extract invocation records, logs, and data.\n * Optimized for memory efficiency.\n * @param {string[]} logs - Array of log strings from Solana transactions.\n * @returns {ParsedLogs} - Parsed invocation records.\n */\n export function parseProgramLogs(logs: string[]): ParsedLogs {\n const invocationStack: InvocationRecord[] = [];\n const rootInvocations: InvocationRecord[] = [];\n for (const line of logs) {\n let match: RegExpMatchArray | null;\n \n if ((match = INVOKE_REGEX.exec(line))) {\n // New invocation\n const program_id = match[1];\n const invokeNumber = parseInt(match[2], 10);\n \n // Safeguard against excessive stack depth\n if (invocationStack.length > 1000) { // Example limit\n console.warn('Invocation stack exceeded 1000 levels. Skipping further invocations.');\n continue;\n }\n \n const invocation: InvocationRecord = {\n program_id,\n invocationNumber: invokeNumber,\n logs: [],\n data: [],\n };\n \n invocationStack.push(invocation);\n \n // If this is a top-level invocation, add to rootInvocations\n if (invocationStack.length === 1) {\n rootInvocations.push(invocation);\n }\n \n } else if ((match = SUCCESS_REGEX.exec(line))) {\n // Invocation success, pop from stack if matches\n const program_id = match[1];\n const lastInvocation = invocationStack[invocationStack.length - 1];\n \n if (lastInvocation && lastInvocation.program_id === program_id) {\n invocationStack.pop();\n } else {\n console.warn(`Success log for program ${program_id} does not match the invocation stack.`);\n }\n \n } else if ((match = LOG_REGEX.exec(line))) {\n // Log line, append to current invocation\n const logMessage = match[1];\n const currentInvocation = invocationStack[invocationStack.length - 1];\n \n if (currentInvocation) {\n currentInvocation.logs.push(logMessage);\n } else {\n console.warn(`Log message \"${logMessage}\" found outside of any invocation.`);\n }\n \n } else if ((match = DATA_REGEX.exec(line))) {\n // Data line, append to current invocation\n const dataMessage = match[1];\n const currentInvocation = invocationStack[invocationStack.length - 1];\n \n if (currentInvocation) {\n currentInvocation.data.push(dataMessage);\n } else {\n console.warn(`Data message \"${dataMessage}\" found outside of any invocation.`);\n }\n \n }\n // Ignore other lines (e.g., 'consumed ... compute units')\n }\n \n // Filter out invocations that have empty logs and data\n const filteredInvocations:any = rootInvocations.filter(\n (invocation) => invocation.logs.length > 0 || invocation.data.length > 0\n );\n \n // Clear temporary references to allow garbage collection\n invocationStack.length = 0;\n //console.log(filteredInvocations)\n return filteredInvocations;\n }\n","\nimport { poolQuery,fetchAccountInfo} from './../../db'; // Adjust import to match your code\nimport { inspect } from 'util'; // for debugging\n\n// Schema definition - explicit structure\ninterface TokenMintInfo {\n decimals: number;\n freezeAuthority: string | null;\n isInitialized: boolean;\n mintAuthority: string | null;\n supply: string;\n}\n\ninterface TokenAccountData {\n parsed: {\n info: TokenMintInfo;\n type: 'mint';\n };\n program: string;\n space: number;\n}\n\ninterface TokenAccountValue {\n data: TokenAccountData;\n executable: boolean;\n lamports: number;\n owner: string;\n rentEpoch: number;\n space: number;\n}\n\ninterface TokenInfoResponse {\n context: {\n apiVersion: string;\n slot: number;\n };\n value: TokenAccountValue;\n}\n\nexport async function get_token_info(mint: string) {\n try {\n return await fetchAccountInfo(mint,true);\n\n } catch (err) {\n console.error('Error fetching token info:', err);\n throw err;\n }\n}\n\n// Validation registry pattern\nconst validators = {\n tokenMintInfo: (data: any): data is TokenMintInfo => {\n return (\n typeof data.decimals === 'number' &&\n (data.freezeAuthority === null || typeof data.freezeAuthority === 'string') &&\n typeof data.isInitialized === 'boolean' &&\n (data.mintAuthority === null || typeof data.mintAuthority === 'string') &&\n typeof data.supply === 'string'\n );\n },\n};\n\n/**\n * Converts your \"dbMetaData\" format into the \"metadata2\" format.\n *\n * @param dbMetaData - The original object with properties like mint, name, decimals, meta_data, etc.\n * @returns An object in the \"metadata2\" shape, with { publickey, tokeninfo, metadata }.\n */\nexport async function convertDbMetaDataToMetadata2(dbMetaData: any) {\n let publickey= dbMetaData?.mint ?? null\n let metadata = dbMetaData.meta_data\n let tokeninfo = await get_token_info(publickey) \n \n return {publickey,tokeninfo,metadata};\n}\n\n/**\n * Inserts or updates a record in the `metadata2` table with columns:\n * 1) publickey (TEXT, unique or primary key)\n * 2) tokeninfo (JSONB)\n * 3) metadata (JSONB)\n *\n * If publickey already exists, it updates the tokeninfo and metadata columns.\n *\n * @param publicKey - The unique public key string\n * @param tokenInfo - JSON object for token info\n * @param meta - JSON object for metadata\n * @param tableName - (Optional) override table name, defaults to \"metadata2\"\n */\nexport async function upsertMetadata2(\n publicKey: string,\n tokenInfo: any,\n meta: any,\n tableName = 'metadata2'\n): Promise<any> {\n try {\n const query = `\n INSERT INTO ${tableName} (publickey, tokeninfo, metadata)\n VALUES ($1, $2, $3)\n ON CONFLICT (publickey)\n DO UPDATE\n SET tokeninfo = EXCLUDED.tokeninfo,\n metadata = EXCLUDED.metadata\n RETURNING *\n `;\n\n const result = await poolQuery(query, [publicKey, tokenInfo, meta]);\n if (result.rows.length === 0) {\n console.log(`No rows returned from upsert for publicKey: ${publicKey}`);\n return null;\n }\n\n const upsertedRow = result.rows[0];\n console.log('Upsert successful for:', publicKey, upsertedRow);\n return upsertedRow;\n } catch (error) {\n console.error('Error in upsertMetadata2:', error);\n throw error;\n }\n}\n\n\n\n\nexport async function saveAsMetadata2(dbMetaData: any) {\n // 1) Convert dbMetaData to the \"metadata2\" structure\n const metadata2 = await convertDbMetaDataToMetadata2(dbMetaData);\n\n // 2) Destructure out the three columns for your table\n let {publickey,tokeninfo,metadata } = metadata2;\n\n // 3) Upsert into metadata2\n // This returns the newly inserted or updated row\n const savedRecord = await upsertMetadata2(publickey, tokeninfo, metadata);\n return savedRecord;\n}\nasync function main() {\n // Suppose you have a list of old-style dbMetaData objects\n const metadataList = await poolQuery(`SELECT * FROM metadata`);\n \n let records = metadataList.rows\n console.log(records)\n for (const dbMetaData of records) {\n try {\n \n \n const savedRecord = await saveAsMetadata2(dbMetaData);\n \n console.log('Saved record =>', savedRecord);\n } catch (err) {\n console.error('Error saving record:', err);\n }\n }\n}\n\n\n","// src/rabbitMq/processors/metaDataProcessor.ts\nimport {loadSolanaEnv} from './../../../env/solana';\nimport { insertMetaData, getMetadataRowFromDb, getMetadataIdFromMint } from './../../schemas';\nimport { SOLANA_PUMP_FUN_PROGRAM_ID, getLogString,SOLANA_MAINNET_RPC_URL,logger,getPubkeyString_server } from './../../db';\nimport { createUmi } from \"@metaplex-foundation/umi-bundle-defaults\";\nimport {\n createSignerFromKeypair,\n generateSigner,\n signerIdentity,\n} from \"@metaplex-foundation/umi\";\nimport {\n fetchDigitalAsset,\n mplTokenMetadata,\n} from \"@metaplex-foundation/mpl-token-metadata\";\nimport { fetchIPFSData,convertBigInts } from './../../utils';\n\nconst FILE_LOCATION = 'src/rabbitMq/processors/metaDataProcessor.ts';\n\n\n/**\n * Initializes and configures the UMI instance.\n * @param url - The RPC URL.\n * @returns The configured UMI instance.\n */\nexport async function initializeUmi(url: string): Promise<any> {\n const functionName = 'initializeUmi';\n try {\n const umi = await createUmi(url || SOLANA_MAINNET_RPC_URL);\n umi.use(mplTokenMetadata());\n umi.use(signerIdentity(createSignerFromKeypair(umi, generateSigner(umi))));\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'UMI initialized successfully',details:{ url },logType:'info'});\n return umi;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to initialize UMI',details:{ url},logType:'info'});\n throw error;\n }\n}\nasync function getUrlUmi(url: string | undefined, umi: any) {\n return {\n url: url || SOLANA_MAINNET_RPC_URL,\n umi: umi || (await initializeUmi(url || SOLANA_MAINNET_RPC_URL))\n };\n}\n/**\n * Fetches metadata for a given mint address.\n * @param mint - The mint address.\n * @param url - The RPC URL.\n * @param umi - Optional UMI instance.\n * @param consumerLogger - Custom logger.\n * @param maxMetadataSize - Maximum size limit for metadata.\n * @returns The metadata or null if failed.\n */\nexport async function callMetaData(\n mint: any,\n url: any =null,\n umi: any = null,\n consumerLogger = logger,\n maxMetadataSize: number = 1e6\n): Promise<any> {\n const functionName = 'callMetaData';\n\n if (!mint || typeof mint !== 'string') {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Invalid mint address provided',details:null,logType:'warn'});\n return null;\n }\n\n try {\n const { url: url2, umi: umi2 } = await getUrlUmi(url, umi);\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Fetching metadata',details:{ mint },logType:'info'});\n mint = getPubkeyString_server(mint)\n const metaData = await Promise.race([\n fetchDigitalAsset(umi2, mint),\n new Promise((_, reject) =>\n setTimeout(() => reject(new Error(`Timeout fetching metadata for mint ${mint}`)), 5000)\n ),\n ]);\n\n if (!metaData) throw new Error('Metadata fetch returned null');\n\n const safeMetaData = convertBigInts({ mint, ...metaData });\n const size = JSON.stringify(safeMetaData).length;\n if (size > maxMetadataSize) {\n throw new Error(`Metadata size (${size} bytes) exceeds limit (${maxMetadataSize} bytes)`);\n }\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Metadata fetched successfully',details:{ mint },logType:'info'});\n return safeMetaData;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to fetch metadata',details:{ mint},logType:'error'});\n return null;\n }\n}\n\n/**\n * Retrieves or fetches metadata for a mint address.\n * @param mint - The mint address.\n * @param url - Optional RPC URL.\n * @param umi - Optional UMI instance.\n * @param consumerLogger - Custom logger.\n * @returns The metadata or null if not found/fetched.\n */\nexport async function getOrFetchMetaData(\n mint: string,\n url: any = null,\n umi: any = null,\n consumerLogger: any = logger\n): Promise<any> {\n const functionName = 'getOrFetchMetaData';\n\n if (!mint) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No mint address provided',details:null,logType:'warn'});\n return null;\n }\n\n try {\n const cachedMetaData = await getMetadataRowFromDb(mint, consumerLogger);\n if (cachedMetaData && cachedMetaData.processed) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Returning cached metadata',details:{ mint },logType:'info'});\n return cachedMetaData;\n }\n\n const { url: url2, umi: umi2 } = await getUrlUmi(url, umi);\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Fetching new metadata',details:{ mint },logType:'info'});\n const metaData = await call_and_upsert_metadata(mint, url2, umi2, consumerLogger);\n\n if (!metaData) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to fetch metadata',details:{ mint },logType:'warn'});\n return null;\n }\n\n return metaData;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Error processing metadata fetch',details:{ mint},logType:'error'});\n return null;\n }\n}\n\n/**\n * Fetches metadata from Metaplex and upserts it into the database.\n * @param mint - The mint address.\n * @param url - Optional RPC URL.\n * @param umi - Optional UMI instance.\n * @param consumerLogger - Custom logger.\n * @returns The metadata or null if failed.\n */\nexport async function getMetaFoundation(\n mint: string,\n url: any = null,\n umi: any = null,\n consumerLogger: any = logger\n): Promise<any> {\n const functionName = 'getMetaFoundation';\n\n try {\n const { url: url2, umi: umi2 } = await getUrlUmi(url, umi);\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Fetching metadata from Metaplex',details:{ mint },logType:'info'});\n const metaData = await callMetaData(mint, url2, umi2, consumerLogger);\n\n if (!metaData) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No metadata fetched from Metaplex',details:{ mint },logType:'warn'});\n return null;\n }\n\n metaData.processed = true;\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Inserting fetched metadata',details:{ mint },logType:'info'});\n const insertedMetaData = await insertMetaData(metaData, metaData?.uri, mint, null, consumerLogger);\n\n if (!insertedMetaData) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to insert metadata',details:{ mint },logType:'warn'});\n return null;\n }\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Metadata processed successfully',details:{ mint },logType:'info'});\n return insertedMetaData;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Error in metadata fetch and insert',details:{ mint},logType:'error'});\n return null;\n }\n}\n\n/**\n * Fetches or processes metadata to retrieve its ID.\n * @param mint - The mint address.\n * @param decodedData - Optional decoded metadata.\n * @param decimals - Decimal precision.\n * @param consumerLogger - Custom logger.\n * @returns The metadata ID or null if failed.\n */\nexport async function getOrFetchMetaDataId(\n mint: string,\n decodedData: any | Partial<any> | null = null,\n decimals: number = 6,\n consumerLogger: any = logger\n): Promise<any> {\n const functionName = 'getOrFetchMetaDataId';\n\n try {\n if (decodedData) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Processing provided decoded data',details:{ mint },logType:'info'});\n const metaId = await processMetaData(mint, decodedData, decimals, consumerLogger);\n if (metaId) return metaId;\n }\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Fetching metadata',details:{ mint },logType:'info'});\n const metaData = await getOrFetchMetaData(mint, null, null, consumerLogger);\n\n if (metaData && metaData.id) {\n return metaData.id;\n }\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No metadata ID retrieved',details:{ mint },logType:'warn'});\n return null;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to process metadata ID',details:{ mint},logType:'error'});\n return null;\n }\n}\n\n/**\n * Processes and inserts metadata into the database.\n * @param mint - The mint address.\n * @param decodedData - Decoded metadata.\n * @param decimals - Decimal precision.\n * @param consumerLogger - Custom logger.\n * @returns The metadata ID or null if failed.\n */\nexport async function processMetaData(\n mint: string | null = null,\n decodedData: any | Partial<any>,\n decimals: number = 6,\n consumerLogger: any = logger\n): Promise<number | null> {\n const functionName = 'processMetaData';\n\n try {\n mint = mint || decodedData.mint;\n if (!mint) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No mint provided',details:null,logType:'error'});\n return null;\n }\n\n const existingMetaData = await getMetadataRowFromDb(mint, consumerLogger);\n if (existingMetaData) {\n const metaId = await getMetadataIdFromMint(mint, consumerLogger);\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Using existing metadata',details:{ mint},logType:'info'});\n return metaId;\n }\n\n const uri = decodedData?.uri || '';\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Inserting new metadata',details:{ mint },logType:'info'});\n const metaData = await insertMetaData(decodedData, uri, mint, { decimals }, consumerLogger);\n const metaId = await getMetadataIdFromMint(mint, consumerLogger);\n\n if (!metaId) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to retrieve metadata ID after insert',details:{ mint },logType:'warn'});\n return null;\n }\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:`Created new metadata with ID: ${metaId}`,details:{ mint },logType:'info'});\n return metaId;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to process metadata',details:{ mint},logType:'error'});\n return null;\n }\n}\n\n/**\n * Processes decoded metadata based on program ID.\n * @param decodedData - Decoded metadata.\n * @param program_id - Program ID.\n * @param tokenDecimals - Token decimals.\n * @param consumerLogger - Custom logger.\n * @returns The metadata ID.\n */\nexport async function processMetaDataDecoded(\n decodedData: any,\n program_id: any = null,\n tokenDecimals: number = 6,\n consumerLogger: any = logger\n): Promise<any> {\n const functionName = 'processMetaDataDecoded';\n\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Processing decoded metadata',details:{ mint: decodedData.mint },logType:'info'});\n const decimals = program_id === SOLANA_PUMP_FUN_PROGRAM_ID ? 6 : tokenDecimals;\n const metaId = await processMetaData(decodedData.mint, decodedData, decimals, consumerLogger);\n\n if (!metaId) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No metadata ID returned',details:{ mint: decodedData.mint },logType:'warn'});\n return null;\n }\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:`Metadata processed with ID: ${metaId}`,details:{ mint: decodedData.mint },logType:'info'});\n return metaId;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to process decoded metadata',details:{ mint: decodedData.mint},logType:'error'});\n throw error; // Throwing error here as it's more critical\n }\n}\n\n/**\n * Calls metadata and upserts it with IPFS data.\n * @param mint - The mint address.\n * @param url - Optional RPC URL.\n * @param umi - Optional UMI instance.\n * @param consumerLogger - Custom logger.\n * @returns The upserted metadata.\n */\nexport async function call_and_upsert_metadata(\n mint: string,\n url: any = null,\n umi: any = null,\n consumerLogger = logger\n): Promise<any> {\n const functionName = 'call_and_upsert_metadata';\n const env = loadSolanaEnv()\n const SOLANA_FALLBACK_RPC_URL = env.fallbackRpcUrl\n try {\n url = url || SOLANA_FALLBACK_RPC_URL;\n const { url: url2, umi: umi2 } = await getUrlUmi(url, umi);\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Fetching metadata',details:{ mint },logType:'info'});\n let metadata = await callMetaData(mint, url2, umi2, consumerLogger);\n\n if (!metadata) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No metadata fetched',details:{ mint },logType:'warn'});\n return null;\n }\n\n const tokenInfo = convertBigInts(metadata.mint);\n let meta_data = convertBigInts(metadata.metadata);\n const uri = meta_data.uri;\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Fetching IPFS data',details:{ mint},logType:'info'});\n let jsonMetadata = await fetchIPFSData(uri, consumerLogger);\n jsonMetadata = convertBigInts(jsonMetadata);\n\n metadata = { ...jsonMetadata, ...meta_data };\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Upserting metadata',details:{ mint },logType:'info'});\n const upsertedMetaData = await insertMetaData(metadata, uri, mint, tokenInfo, consumerLogger);\n\n if (!upsertedMetaData) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to upsert metadata',details:{ mint },logType:'warn'});\n return null;\n }\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Metadata upserted successfully',details:{ mint },logType:'info'});\n return upsertedMetaData;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Error in metadata fetch and upsert',details:{ mint},logType:'error'});\n return null;\n }\n}","// src/rabbitMq/processors/processLogs.ts\nimport {loadQueueEnv} from './../../../env/queues';\nimport { getLogString,logger,decodeInstructionData, decodeTransactionData,bondingCurveSpec } from './../../db';\nimport { fetchPairByMintAndProgramId, insertPair, appendTcnToPair,upsertTransaction } from './../../schemas';\nimport { processMetaData } from './../processors';\nimport { sendToQueue } from './../producers';\nimport type { TcnData, TransactionItem,PairData } from './../../db';\nimport {emptyObjectToNull,extractData,getPubkeyString} from './../../utils';\nimport { broadcastTransaction } from './../core';\n\n\n\nconst FILE_LOCATION = 'src/rabbitMq/processors/processLogs.ts';\nconst PRICE_TOKEN = \"So11111111111111111111111111111111111111112\";\n\n\n/**\n * Processes transaction logs to extract TCNs and handle pair creation/updates.\n * @param txnMsg - The transaction message from WebSocket.\n * @param consumerLogger - Custom logger instance.\n * @returns The processed transaction data or null on critical failure.\n */\nexport async function getTcns(txnMsg: any, consumerLogger: any = logger): Promise<TransactionItem | null> {\n const env = loadQueueEnv()\n const QUEUE_SIGNATURE_CALL = env.signatureCall\n const QUEUE_META_DATA_CALL = env.metaDataCall\n const QUEUE_TRANSACTION_CALL = env.txnCall\n const functionName = 'getTcns';\n const { signature, slot, program_id, parsedLogs, log_id } = txnMsg || {};\n getLogString({file_location:FILE_LOCATION,function_name:'getTcns',message:'txnMsg',details:JSON.stringify(txnMsg),logType:'info',consumerLogger:consumerLogger});\n\n const programIdNormalized:any =\n program_id ? getPubkeyString(program_id) : null;\n\n if (!parsedLogs) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Missing or undefined parsedLogs',details:JSON.stringify(txnMsg),logType:'error',consumerLogger:consumerLogger});\n return null;\n }\n\n const logsArray = Array.isArray(parsedLogs) ? parsedLogs : [parsedLogs];\n const txnData: TransactionItem = {\n signature,\n slot,\n program_id,\n log_id,\n pair_id: null,\n meta_id: null,\n tcns: [],\n };\n\n let invocation = -1;\n let meta_id: any = null;\n let pair_id: any= null;\n\n try {\n \n\n for (const logEntry of logsArray) {\n const { invocationNumber, logs, data } = logEntry || {};\n const processedData = extractData(logs,data);\n if (!logs || !processedData) continue;\n\n const dataStr = typeof processedData === 'string' ? processedData : String(processedData);\n if (!dataStr.trim()) continue;\n\n // Transaction (vdt) branch\n if (dataStr.startsWith('vdt')) {\n invocation++;\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Decoding transaction data',details:{ signature},logType:'debug',consumerLogger:consumerLogger});\n const decodedData: TcnData = await decodeTransactionData(dataStr, null, { invocation });\n txnData.tcns.push(decodedData);\n const mint:any = decodedData.mint;\n\n pair_id = await fetchPairByMintAndProgramId(mint, programIdNormalized, consumerLogger);\n pair_id = emptyObjectToNull(pair_id);\n\n if (!meta_id) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Processing metadata for transaction',details:{ mint },logType:'info',consumerLogger:consumerLogger});\n meta_id = await processMetaData(mint, decodedData, 6, consumerLogger);\n meta_id = emptyObjectToNull(meta_id);\n }\n\n if (!pair_id) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Creating new pair for transaction',details:{ mint },logType:'info',consumerLogger:consumerLogger});\n const curves = await bondingCurveSpec(mint, programIdNormalized, null, null);\n const { user_address } = decodedData;\n \n\n const pairData: PairData = {\n signature,\n program_id: programIdNormalized,\n mint,\n price_token: PRICE_TOKEN,\n user_address,\n bonding_curve: curves.bonding_curve,\n associated_bonding_curve: curves.associated_bonding_curve,\n meta_id,\n log_id,\n };\n\n\n pair_id = await insertPair(pairData, consumerLogger);\n if (pair_id) {\n const payloadA = { mint, pair_id, program_id:programIdNormalized };\n const payloadB = { params: [mint] };\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Sending transaction pair to queues',details:{ mint},logType:'info',consumerLogger:consumerLogger});\n await Promise.all([\n sendToQueue(QUEUE_TRANSACTION_CALL, payloadA),\n sendToQueue(QUEUE_META_DATA_CALL, payloadB),\n ]);\n } else {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to insert new pair',details:{ mint },logType:'warn',consumerLogger:consumerLogger});\n }\n }\n\n // Pair creation branch\n } else if (logs.length > 0 && logs[0].includes('Instruction: Create')) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Processing pair creation',details:{ signature },logType:'info',consumerLogger:consumerLogger});\n const decodedData:any = decodeInstructionData(dataStr);\n const { mint, user_address, bonding_curve } = decodedData;\n \n const curves = await bondingCurveSpec(mint, programIdNormalized, bonding_curve, null);\n meta_id = await processMetaData(mint, decodedData, 6, consumerLogger);\n\n const pairData: PairData = {\n signature,\n program_id:programIdNormalized,\n mint,\n price_token: PRICE_TOKEN,\n user_address,\n bonding_curve,\n associated_bonding_curve: curves.associated_bonding_curve,\n meta_id,\n log_id,\n };\n if (!pairData.bonding_curve) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Refusing to insert pair without bonding_curve',details:pairData,logType:'error',consumerLogger:consumerLogger});\n throw new Error('bonding_curve is required');\n }\n\n pair_id = await insertPair(pairData, consumerLogger);\n if (pair_id && user_address) {\n const payloadA = { params: [user_address, { until: signature || null, limit: 1000 }] };\n const payloadB = { params: [mint] };\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Sending pair creation to queues',details:{ mint},logType:'info',consumerLogger:consumerLogger});\n throw new Error('bonding_curve is required');\n await Promise.all([\n sendToQueue(QUEUE_SIGNATURE_CALL, payloadA),\n sendToQueue(QUEUE_META_DATA_CALL, payloadB),\n ]);\n }\n }\n\n // Update transaction data\n txnData.log_id = emptyObjectToNull(txnData.log_id) || log_id;\n txnData.pair_id = emptyObjectToNull(txnData.pair_id) || pair_id;\n txnData.meta_id = emptyObjectToNull(txnData.meta_id) || meta_id;\n if (txnData.pair_id) {\n await appendTcnToPair(txnData.pair_id, signature, consumerLogger);\n }\n }\n \n if (invocation > -1) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Upserting transaction',details:{ signature},logType:'info',consumerLogger:consumerLogger});\n const txn_id = await upsertTransaction(txnData);\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:`Transaction processed with ID: ${txn_id}`,details:{ signature },logType:'info',consumerLogger:consumerLogger});\n }\n\n return txnData;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Error processing transaction logs',details:{ signature},logType:'info',consumerLogger:consumerLogger});\n return txnData; // Return partial data for debugging\n }\n}\n\n/**\n * Entry point for processing transactions from WebSocket.\n * @param pairMsg - The transaction message.\n * @param consumerLogger - Custom logger instance.\n */\n// Inside processTxns\nexport async function processTxns(pairMsg: any, consumerLogger: any = logger): Promise<void> {\n const functionName = 'processTxns';\n\n if (!pairMsg) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No transaction message provided',details:null,logType:'error',consumerLogger:consumerLogger});\n return;\n }\n\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Starting transaction processing',details:{ signature: pairMsg.signature },logType:'info',consumerLogger:consumerLogger});\n const result = await getTcns(pairMsg, consumerLogger);\n if (!result) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No transaction data returned',details:{ signature: pairMsg.signature },logType:'warn',consumerLogger:consumerLogger});\n } else {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Transaction processing completed',details:{ signature: pairMsg.signature },logType:'info',consumerLogger:consumerLogger});\n // Broadcast to WebSocket clients\n broadcastTransaction(result);\n }\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to process transaction',details:{ signature: pairMsg.signature},logType:'info',consumerLogger:consumerLogger});\n throw error;\n }\n}","import { loadEnvConfig } from \"../../../env\"; // NEW\nimport { startConsumer, initRabbitMQ } from \"./../core\";\nimport { startPumpFunMonitor } from \"./../solana\";\nimport { processParsedLogs, processTxns } from \"./../processors\";\nimport { setupIndexes } from \"./../../schemas\";\nimport { getLogString, logger } from \"./../../db\";\n\nexport const startAllServices = async (): Promise<void> => {\n const fn = \"startAllServices\";\n\n // 🔐 0. Bootstrap + freeze env ONCE\n const env = loadEnvConfig();\n\n while (true) {\n try {\n getLogString({\n function_name: \"Starting services\",\n message: \"info\",\n details: null,\n file_location: null,\n logType: null,\n });\n\n // 1. Ensure DB is ready\n await setupIndexes(logger);\n\n // 2. Initialize RabbitMQ\n await initRabbitMQ(\n [env.queues.signatureCall, env.queues.metaDataCall],\n [env.queues.logIntake, env.queues.txnEntry]\n );\n\n // 3. Solana ingest\n startPumpFunMonitor();\n\n // 4. Consumers\n startConsumer(env.queues.logIntake, processParsedLogs);\n startConsumer(env.queues.txnEntry, processTxns);\n\n break;\n } catch (err: any) {\n getLogString({\n function_name: \"Startup error — retrying\",\n message: \"error\",\n details: err?.message,\n file_location: null,\n logType: null,\n });\n\n await new Promise((r) => setTimeout(r, 5000));\n }\n }\n};\n","\nimport { _,poolQuery,path,fs,withTransaction} from './../../db';\nimport {getDirname} from './../../utils';\nconst __dirname = getDirname(import.meta.url);\nconst query_js = JSON.parse(\n fs.readFileSync(path.join(__dirname, 'queries.json'), 'utf8')\n);\nconst queries = query_js.queries\ninterface SignatureEntry {\n signature: string;\n}\n\ntype SignatureToAccountMap = { [signature: string]: number };\ntype WalletAccountMap = { [address: string]: number };\n\nexport async function createOrAlterTable(): Promise<void> {\n const query = queries.createOrAlterTable\n await poolQuery(query);\n}\n\nexport async function fetchExistingAccounts(): Promise<{[address: string]: number}> {\n const res = await poolQuery(queries.fetchExistingAccounts);\n const result: {[address: string]: number} = {};\n for (const row of res.rows) {\n result[row.address] = row.assigned_account;\n }\n return result;\n}\n\nexport function updateSignatureToAccountMap(\n signatureToAccount: SignatureToAccountMap,\n wallets: Array<[string, Set<string>]>,\n walletAccountMap: WalletAccountMap,\n nextAccountNumber: number\n): { walletAccountMap: WalletAccountMap; nextAccountNumber: number } {\n for (const [wAddr, wSigs] of wallets) {\n const candidateAccounts = new Set<number>();\n for (const s of wSigs) {\n if (signatureToAccount[s] !== undefined) {\n candidateAccounts.add(signatureToAccount[s]);\n }\n }\n\n let accNum: number;\n if (candidateAccounts.size === 0) {\n accNum = nextAccountNumber;\n nextAccountNumber += 1;\n walletAccountMap[wAddr] = accNum;\n for (const s of wSigs) {\n signatureToAccount[s] = accNum;\n }\n } else {\n accNum = Math.min(...candidateAccounts);\n walletAccountMap[wAddr] = accNum;\n for (const s of wSigs) {\n signatureToAccount[s] = accNum;\n }\n }\n }\n return { walletAccountMap, nextAccountNumber };\n}\n\n/* ------------------------------------------------------------------ */\n/* DB mutation (NO TRANSACTION LOGIC) */\n/* ------------------------------------------------------------------ */\n\nexport async function upsertWalletAssignments(\n client: any,\n walletAccountMap: WalletAccountMap\n): Promise<void> {\n const query = queries.upsertWalletAssignments;\n\n for (const [w_addr, acc_num] of Object.entries(walletAccountMap)) {\n await client.query(query, [w_addr, acc_num, w_addr]);\n }\n}\n\n\nexport async function processSingleAddress(\n address: string,\n signatures: string[]\n): Promise<void> {\n await createOrAlterTable();\n\n await withTransaction(async (client:any) => {\n const existingAccounts = await fetchExistingAccounts();\n\n const signatureToAccount: SignatureToAccountMap = {};\n for (const [addr, acc] of Object.entries(existingAccounts)) {\n for (const sig of addr.split(\",\")) {\n signatureToAccount[sig] = acc;\n }\n }\n\n let walletAccountMap: WalletAccountMap = {};\n let nextAccountNumber =\n Object.values(existingAccounts).length > 0\n ? Math.max(...Object.values(existingAccounts)) + 1\n : 1;\n\n const wallets: Array<[string, Set<string>]> = [\n [address, new Set(signatures)],\n ];\n\n ({ walletAccountMap, nextAccountNumber } =\n updateSignatureToAccountMap(\n signatureToAccount,\n wallets,\n walletAccountMap,\n nextAccountNumber\n ));\n\n await upsertWalletAssignments(client, walletAccountMap);\n });\n}\nexport async function processAllAddresses(): Promise<void> {\n await createOrAlterTable();\n\n const res = await poolQuery(queries.processAllAddresses);\n\n const wallets: Array<[string, Set<string>]> = [];\n\n for (const row of res.rows) {\n if (!Array.isArray(row.signatures)) continue;\n\n const sigs = new Set<string>();\n for (const entry of row.signatures as SignatureEntry[]) {\n if (entry?.signature) sigs.add(entry.signature);\n }\n\n wallets.push([row.user_address, sigs]);\n }\n\n await withTransaction(async (client) => {\n const existingAccounts = await fetchExistingAccounts();\n\n const signatureToAccount: SignatureToAccountMap = {};\n for (const [addr, acc] of Object.entries(existingAccounts)) {\n for (const sig of addr.split(\",\")) {\n signatureToAccount[sig] = acc;\n }\n }\n\n let walletAccountMap: WalletAccountMap = {};\n let nextAccountNumber =\n Object.values(existingAccounts).length > 0\n ? Math.max(...Object.values(existingAccounts)) + 1\n : 1;\n\n ({ walletAccountMap, nextAccountNumber } =\n updateSignatureToAccountMap(\n signatureToAccount,\n wallets,\n walletAccountMap,\n nextAccountNumber\n ));\n\n await upsertWalletAssignments(client, walletAccountMap);\n });\n}","\nimport {getDirname} from './../../utils';\nimport { _ ,poolQuery,logger,QueryResult,path,fs} from './../../db';\nconst __dirname = getDirname(import.meta.url);\nconst query_js = JSON.parse(\n fs.readFileSync(path.join(__dirname, 'queries.json'), 'utf8')\n);\nconst queries = query_js.queries\n\n/**\n * --- Database Functions for Transactions ---\n */\n\n/**\n * Creates the transactions table if it doesn't exist.\n */\nconst getTransactionsTableCreation = queries.getTransactionsTableCreation\n\nexport const createGetTransactionsTable = async (): Promise<void> => {\n try {\n await poolQuery(getTransactionsTableCreation);\n logger.info('✅ Transactions table has been created successfully.');\n } catch (error) {\n logger.error('❌ Error creating transactions table:', error);\n throw error;\n }\n};\n\ninterface TransactionRecord {\n id: number;\n signature: string;\n slot: number | null;\n signatures: string[];\n transaction: object;\n updated_at: Date;\n}\n\nexport const upsertGetTransaction = async (\n signature: string,\n slot: number | null,\n signatures: string[] = [],\n transaction: object\n): Promise<void> => {\n const upsertQuery = queries.upsertGetTransaction\n try {\n if (!signatures.includes(signature)) {\n signatures.push(signature);\n }\n await poolQuery(upsertQuery, [signature, slot, signatures, transaction]);\n logger.info(`✅ Upserted transaction with signature: ${signature}`);\n } catch (error) {\n logger.error(`❌ Error upserting transaction with signature ${signature}:`, error);\n throw error;\n }\n};\n\nexport const searchTransactionBySignature = async (\n searchSignature: string\n): Promise<TransactionRecord | null> => {\n const searchQuery = queries.searchTransactionBySignature\n try {\n const result: QueryResult<TransactionRecord> = await poolQuery(searchQuery, [searchSignature]);\n if (result.rows.length > 0) {\n logger.info(`🔍 Transaction found for signature: ${searchSignature}`);\n return result.rows[0];\n } else {\n logger.info(`🔍 No transaction found for signature: ${searchSignature}`);\n return null;\n }\n } catch (error) {\n logger.error(`❌ Error searching for transaction with signature ${searchSignature}:`, error);\n throw error;\n }\n};\n\nexport const searchTransactionById = async (\n id: string | number\n): Promise<TransactionRecord | null> => {\n const searchQuery = queries.searchTransactionById\n try {\n const result: QueryResult<any> = searchQuery[1]\n if (result.rows.length > 0) {\n logger.info(`🔍 Transaction found for id: ${id}`);\n return result.rows[0];\n } else {\n logger.info(`🔍 No transaction found for id: ${id}`);\n return null;\n }\n } catch (error) {\n logger.error(`❌ Error searching for transaction with id ${id}:`, error);\n throw error;\n }\n};\n\n","// logsDb.ts\n\n\nimport type {LogData} from './../../db';\nimport { Joi,logger,QueryResult,poolQuery,path,fs,endPool} from './../../db';\nimport {getDirname} from './../../utils';\nconst __dirname = getDirname(import.meta.url);\nconst query_js = JSON.parse(\n fs.readFileSync(path.join(__dirname, 'queries.json'), 'utf8')\n);\nconst queries = query_js.queries\nconst all_signatures = []\n/**\n * SQL query to create the logData table.\n */\n\nconst logDataTableCreation = queries.logDataTableCreation;\n\n\nconst logsSchema = Joi.object({\n // Define the expected structure of your logs\n message: Joi.string().required(),\n // Add other fields as necessary\n});\n\n\n\n/**\n * Function to create the logData table.\n */\nexport const createLogDataTable = async (): Promise<void> => {\n try {\n await poolQuery(logDataTableCreation);\n console.log('logData table has been created successfully.');\n } catch (error) {\n console.error('Error creating logData table:', error);\n throw error;\n }\n};\n\n/**\n * Function to insert a new log entry.\n * Implements Upsert to handle duplicate signatures gracefully.\n * @param signature - The unique signature of the log.\n * @param slot - The slot number.\n * @param program_id - Program ID associated with the entry.\n * @param logs - The log data in JSON format.\n * @returns The inserted or existing log entry ID.\n */\nexport const insertLogEntry = async (\n signature: string,\n slot: number,\n program_id: string,\n logs: object,\n signatures?: string[] // Optional parameter\n): Promise<number> => {\n try {\n const query = queries.insertLogEntry;\n const values = [signature, slot, program_id, JSON.stringify(logs), signatures || null];\n const result = await poolQuery(query, values);\n return result.rows[0].id;\n } catch (error: any) {\n if (error.code === '23505') {\n logger.error('Duplicate key error:', error);\n const logEntry = await fetchLogEntryBySignature(signature);\n if (logEntry?.rows && logEntry.rows.length > 0) {\n return logEntry.rows[0].id;\n } else {\n throw new Error('Duplicate key error but no existing log entry found.');\n }\n } else {\n logger.error('Error inserting/upserting log entry:', error);\n throw error;\n }\n }\n};\n\n/**\n * Function to update a log entry by its signature.\n * @param signature - The unique signature of the log to update.\n * @param slot - The slot number.\n * @param logs - The updated log data in JSON format.\n * @param pairId - The ID of the pair.\n * @param txnId - The ID of the transaction.\n * @returns The updated log entry.\n */\nexport const updateLogEntry = async (\n signature: string,\n slot: number,\n logs: object,\n program_id: string,\n pair_id: number,\n txn_id: number,\n signatures?: string[]\n): Promise<QueryResult> => {\n try {\n const query = queries.updateLogEntry;\n const values = [signature, slot, JSON.stringify(logs), program_id, pair_id, txn_id, signatures || null];\n const result = await poolQuery(query, values);\n return result;\n } catch (error) {\n console.error('Error updating log entry:', error);\n throw error;\n }\n};\n\n/**\n * Function to fetch a log entry by its signature.\n * @param signature - The unique signature of the log.\n * @returns The log entry.\n */\nexport const fetchLogEntryBySignature = async (signature: string): Promise<QueryResult> => {\n try {\n const query = queries.fetchLogEntryBySignature;\n const values = [signature];\n const result = await poolQuery(query, values);\n return result;\n } catch (error) {\n console.error('Error fetching log entry:', error);\n throw error;\n }\n};\n/**\n * Fetches a log entry by its ID with optional filters.\n * @param id - The ID of the log entry.\n * @param filters - Optional filters for the query (like `{ sorted: false }`).\n * @returns The log entry if it exists, otherwise returns null.\n */\nexport const fetchLogEntryById = async (\n id: number,\n filters: { sorted?: boolean } = {}\n): Promise<LogData | null> => {\n try {\n const whereClauses = ['id = $1'];\n const values = [id];\n let paramIndex = 2;\n let query = queries.fetchLogEntryById;\n // Check for 'undefined' explicitly to ensure false is included\n if (filters.sorted == false || filters.sorted == true) {\n query = queries.fetchLogEntryById;\n } \n\n \n\n \n const result: QueryResult = await poolQuery(query, values);\n \n if (result?.rows && result.rows.length > 0) {\n console.log('✅ Log entry fetched successfully:', result.rows[0]);\n return result.rows[0] as LogData;\n } else {\n console.log('⚠️ No log entry found for ID:', id);\n return null;\n }\n } catch (error) {\n console.error('❌ Error fetching log entry:', error);\n throw error;\n }\n};\n\n\n\n","\nimport { poolQuery, getLogString,logger,path,fs} from './../../db';\nimport {getDirname} from './../../utils';\nconst __dirname = getDirname(import.meta.url);\nconst query_js = JSON.parse(\n fs.readFileSync(path.join(__dirname, 'queries.json'), 'utf8')\n);\nconst queries = query_js.queries\nconst FILE_LOCATION = 'src/dbConfigs/schemas/main';\n\n\n\n\n\n/**\n * Tallies TCNs (assuming this is a standalone operation).\n */\nasync function tally_tcns(consumerLogger: any = logger): Promise<void> {\n const functionName = 'tally_tcns';\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Tallying TCNs',details:null,logType:'info',consumerLogger:consumerLogger});\n await poolQuery(queries.tally_tcns);\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'TCNs tallied successfully',details:null,logType:'info',consumerLogger:consumerLogger});\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to tally TCNs',details:{ error: error.message },logType:'error',consumerLogger:consumerLogger});\n throw error;\n }\n}\n\n/**\n * Sets up indexes for pairs and metadata tables.\n */\nexport const setupIndexes = async (consumerLogger: any = logger): Promise<void> => {\n const functionName = 'setupIndexes';\n // Define index queries by category\n const querie = queries.setupIndexes\n const index_queries_batch = {\n transactions: [\n querie.transactions_pair_id,\n querie.transactions_program_id,\n querie.transactions_updated_at,\n querie.transactions_tcns_gin,\n ],\n pairs: [\n querie.pairs_program_id,\n querie.pairs_signature,\n ],\n metaData: [\n querie.metadata_id,\n ],\n };\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Starting index setup',details:null,logType:'info',consumerLogger:consumerLogger});\n\n for (const [key, indexQueries] of Object.entries(index_queries_batch)) {\n for (const indexQuery of indexQueries) {\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:`Creating index for ${key}`,details:{ query: indexQuery.slice(0)},logType:'info',consumerLogger:consumerLogger});\n await poolQuery(indexQuery);\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:`Index created successfully for ${key}`,details:null,logType:'info',consumerLogger:consumerLogger});\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:`Failed to create index for ${key}`,details:{ error: error.message },logType:'error',consumerLogger:consumerLogger});\n throw error; // Re-throw to halt execution on failure\n }\n }\n }\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'All indexes setup completed',details:null,logType:'info',consumerLogger:consumerLogger});\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Error during index setup',details:{ error: error.message },logType:'error',consumerLogger:consumerLogger});\n throw error; // Ensure caller can handle the error\n }\n};","// metaDataDb.ts\n\nimport type {TokenMetadata} from './../../db';\nimport {convertBigInts,fetchIPFSData,getDirname} from './../../utils';\nimport { QueryResult, poolQuery,getLogString,extractRow,logger,path,fs,extractId} from './../../db';\nconst __dirname = getDirname(import.meta.url);\nconst query_js = JSON.parse(\n fs.readFileSync(path.join(__dirname, 'queries.json'), 'utf8')\n);\nconst queries = query_js.queries\n\n\n/**\n * SQL query to create the metadata table.\n */\nconst metaDataTableCreation =queries.metaDataTableCreation;\n\nconst FILE_LOCATION = 'src/dbConfigs/metaDataDb.ts'; // Adjust based on actual file location\n\n/**\n * Inserts metadata into the database with retry logic for deadlocks.\n * @param data - The metadata to insert.\n * @returns The inserted ID.\n */\nexport const insertMetaDataDb = async (data: TokenMetadata): Promise<number> => {\n const MAX_RETRIES = 3;\n const RETRY_DELAY = 500; // milliseconds\n let retries = MAX_RETRIES;\n\n const values = [\n data.mint || null,\n data.discriminator || null,\n data.name || null,\n data.symbol || null,\n data.uri || null,\n data.decimals || null,\n data.image || null,\n data.mintAuthority || null,\n data.freezeAuthority || null,\n data.twitter || null,\n data.website || null,\n data.isMutable ?? null,\n data.primarySaleHappened ?? null,\n data.description || null,\n JSON.stringify(data.meta_data || {}),\n data.creation_date ?? null,\n data.tokeninfo ?? null,\n data.processed ?? false\n ];\n\n while (retries > 0) {\n try {\n const result: QueryResult = await poolQuery(queries.insertMetaDataDb, values);\n const insertedId = result.rows[0]?.id;\n getLogString({file_location:FILE_LOCATION,function_name:'insertMetaDataDb',message:`Successfully inserted metadata with ID: ${insertedId}`,details:{ mint: data.mint },logType:'info'});\n return insertedId;\n } catch (error: any) {\n if (error.message.includes('deadlock detected') && retries > 1) {\n retries--;\n getLogString({file_location:FILE_LOCATION,function_name:'insertMetaDataDb',message:`Deadlock detected, retrying (${MAX_RETRIES - retries}/${MAX_RETRIES})`,details:{ mint: data.mint },logType:'warn'});\n await new Promise(resolve => setTimeout(resolve, RETRY_DELAY));\n } else {\n getLogString({file_location:FILE_LOCATION,function_name:'insertMetaDataDb',message:'Failed to insert metadata',details:{ mint: data.mint},logType:'error'});\n throw error;\n }\n }\n }\n getLogString({file_location:FILE_LOCATION,function_name:'insertMetaDataDb',message:'Exhausted retries for metadata insertion',details:{ mint: data.mint },logType:'error'});\n throw new Error('Failed to insert metadata after retries');\n};\n\n/**\n * Inserts or updates metadata, fetching from IPFS if needed.\n * @param metaData - Initial metadata object.\n * @param uri - URI for IPFS fetch.\n * @param mint - Mint address.\n * @param tokeninfo - Additional token info.\n * @param consumerLogger - Custom logger.\n * @returns Processed metadata or null/error.\n */\nexport async function insertMetaData(\n metaData: any = null,\n uri: string | null = null,\n mint: any = null,\n tokeninfo: any = null,\n consumerLogger: any = logger\n): Promise<any> {\n const functionName = 'insertMetaData';\n\n metaData = metaData || {};\n let jsonMetadata = {};\n\n // Step 1: Resolve URI\n try {\n uri = uri || metaData?.uri || metaData?.metadata?.uri;\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Resolving URI',details:{ mint},logType:'debug'});\n if (!metaData && !uri) {\n const warnMsg = !metaData ? 'No metadata provided' : `No URI provided for mint: ${mint}`;\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:warnMsg,details:null,logType:'warn'});\n return null;\n }\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to resolve URI',details:{ mint,logType:'error'}});\n return null;\n }\n\n // Step 2: Fetch metadata from IPFS\n if (uri) {\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Fetching IPFS data',details:{ uri},logType:'info'});\n jsonMetadata = await fetchIPFSData(uri, consumerLogger);\n if (!jsonMetadata) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No IPFS data returned',details:{uri},logType:'warn'});\n jsonMetadata = {};\n }\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to fetch IPFS data',details:{ uri,error: error.message },logType:'error'});\n jsonMetadata = {};\n }\n }\n\n // Step 3: Merge metadata\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Merging metadata',details:{ mint },logType:'debug'});\n metaData = { ...metaData, ...jsonMetadata };\n if (!Object.keys(metaData).length) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Merged metadata is empty',details:{ mint},logType:'warn'});\n return null;\n }\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to merge metadata',details:{ mint,error: error.message },logType:'error'});\n return null;\n }\n\n // Step 4: DB insertion\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Preparing database insertion',details:{ mint },logType:'info'});\n metaData = convertBigInts(metaData);\n const dbMetaData: TokenMetadata = {\n mint: metaData.metadata?.mint || mint || null,\n name: metaData?.name || null,\n symbol: metaData?.symbol || null,\n uri: uri || metaData?.uri || metaData?.metadata?.uri || null,\n decimals: metaData.mint?.decimals ?? 6,\n image: metaData?.image || null,\n mintAuthority: metaData.mint?.mintAuthority || null,\n freezeAuthority: metaData.mint?.freezeAuthority || null,\n twitter: metaData?.twitter || null,\n website: metaData?.website || null,\n isMutable: metaData.metadata?.isMutable ?? false,\n primarySaleHappened: metaData.metadata?.primarySaleHappened ?? false,\n description: metaData?.description || null,\n meta_data: metaData || {},\n creation_date: new Date().toISOString(),\n discriminator: metaData?.discriminator || 'default_discriminator',\n processed: true,\n tokeninfo: tokeninfo\n };\n\n let metaId = await getMetadataIdFromMint(mint);\n if (!metaId) {\n metaId = await insertMetaDataDb(dbMetaData);\n } else {\n await updateMetaDataById(metaId, dbMetaData);\n }\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:`Successfully processed metadata with ID: ${metaId}`,details:{ mint },logType:'info'});\n return dbMetaData; // Return the processed metadata\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to insert metadata into DB',details:{ mint, error:error.message },logType:'error'});\n throw error; // Rethrow for caller to handle\n }\n};\n\n/**\n * Creates the metadata table if it doesn't exist.\n * @param consumerLogger - Custom logger instance.\n */\nexport const createMetaDataTable = async (consumerLogger: any = logger): Promise<void> => {\n const functionName = 'createMetaDataTable';\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Attempting to create metadata table',details:null,logType:'info'});\n await poolQuery(queries.metaDataTableCreation);\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Metadata table created successfully',details:null,logType:'info'});\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to create metadata table',details:{ error: error.message },logType:'error'});\n throw error;\n }\n};\n\n\n/**\n * Updates a metadata entry in the metadata table by ID.\n * @param id - The ID of the metadata entry to update.\n * @param fields - An object containing the fields to update.\n * @param consumerLogger - Custom logger instance.\n * @returns The updated metadata entry.\n */\nexport const updateMetaDataById = async (\n id: number,\n fields: Partial<{\n name: string | null;\n symbol: string | null;\n uri: string | null;\n decimals: number | null;\n image: string | null;\n mintAuthority: string | null;\n freezeAuthority: string | null;\n twitter: string | null;\n website: string | null;\n isMutable: boolean | null;\n primarySaleHappened: boolean | null;\n description: string | null;\n meta_data: Record<string, any> | null;\n discriminator: string | null;\n creation_date: string | null;\n tokeninfo: any;\n processed: boolean | null;\n }>,\n consumerLogger: any = logger\n): Promise<any> => {\n const functionName = 'updateMetaDataById';\n \n try {\n const setClauses = Object.entries(fields)\n .filter(([_, value]) => value !== undefined) // Skip undefined values\n .map(([key], index) => `${key} = $${index + 2}`)\n .join(', ');\n\n if (!setClauses) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No fields to update',details:{ id },logType:'warn'});\n return null;\n }\n\n const values = [id, ...Object.values(fields).filter(v => v !== undefined)];\n const query = `UPDATE metadata SET ${setClauses}, updated_at = NOW() WHERE id = $1 RETURNING *;`;\n\n const result = await poolQuery(query, values);\n const row = extractRow(result);\n\n if (!row) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No metadata found to update',details:{ id },logType:'warn'});\n return null;\n }\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Metadata updated successfully',details:{ id },logType:'info'});\n return row;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to update metadata',details:{ id},logType:'error'});\n throw error;\n }\n};\n\n/**\n * Inserts or updates token metadata in the database.\n * @param data - The metadata to upsert.\n * @param consumerLogger - Custom logger instance.\n */\nexport async function upsertTokenMetadata(data: any, consumerLogger: any = logger): Promise<void> {\n const functionName = 'upsertTokenMetadata';\n const query = queries.upsertTokenMetadata;\n const values = [\n data.mint,\n data.discriminator,\n data.name,\n data.symbol,\n data.uri,\n JSON.stringify(data.meta_data),\n ];\n\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Attempting to upsert metadata',details:{ mint: data.mint },logType:'info'});\n await poolQuery(query, values);\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Metadata upserted successfully',details:{ mint: data.mint },logType:'info'});\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to upsert metadata',details:{ mint: data.mint},logType:'error'});\n throw error;\n }\n};\n\n/**\n * Sets the processed flag for metadata by mint.\n * @param mint - The mint address.\n * @param consumerLogger - Custom logger instance.\n * @returns The ID of the updated row or null if not found.\n */\nexport async function setMetaDataProcessed(mint: string, consumerLogger: any = logger): Promise<number | null> {\n const functionName = 'setMetaDataProcessed';\n const query = queries.setMetaDataProcessed;\n\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Setting metadata processed',details:{ mint },logType:'info'});\n const result = await poolQuery(query, [mint]);\n const id = extractId(result);\n\n if (!id) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No metadata found to set processed',details:{ mint },logType:'warn'});\n return null;\n }\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Metadata marked as processed',details:{ mint},logType:'info'});\n return id;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to set metadata processed',details:{ mint},logType:'error'});\n throw error;\n }\n};\n\n/**\n * Checks if metadata is processed by mint.\n * @param mint - The mint address.\n * @param consumerLogger - Custom logger instance.\n * @returns The processed status or null if not found.\n */\nexport async function checkMetaDataProcessed(mint: string, consumerLogger: any = logger): Promise<boolean | null> {\n const functionName = 'checkMetaDataProcessed';\n const query = queries.checkMetaDataProcessed;\n\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Checking metadata processed status',details:{ mint },logType:'debug'});\n const result = await poolQuery(query, [mint]);\n\n if (result.rows.length === 0) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No metadata found',details:{ mint },logType:'warn'});\n return null;\n }\n\n const processed = result.rows[0].processed;\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:`Metadata processed status: ${processed}`,details:{ mint },logType:'info'});\n return processed;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to check metadata processed status',details:{ mint},logType:'error'});\n throw error;\n }\n};\n\n/**\n * Fetches metadata by mint address.\n * @param mint - The mint address.\n * @param consumerLogger - Custom logger instance.\n * @returns The metadata object or null if not found.\n */\nexport async function getMetadataRowFromDb(mint: string, consumerLogger: any = logger): Promise<Record<string, any> | null> {\n const functionName = 'getMetadataRowFromDb';\n const query = queries.getMetadataRowFromDb;\n\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Fetching metadata',details:{ mint },logType:'debug'});\n const result: QueryResult = await poolQuery(query, [mint]);\n const row = extractRow(result);\n\n if (!row) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No metadata found',details:{ mint },logType:'warn'});\n return null;\n }\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Metadata fetched successfully',details:{ mint },logType:'info'});\n return row;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to fetch metadata',details:{ mint},logType:'error'});\n throw error;\n }\n};\n\n/**\n * Fetches metadata by mint address (alternative version).\n * @param mint - The mint address.\n * @param consumerLogger - Custom logger instance.\n * @returns The metadata object or null if not found.\n */\nexport async function getMetadataFromDb(mint: string, consumerLogger: any = logger): Promise<Record<string, any> | null> {\n const functionName = 'getMetadataFromDb';\n const query = queries.getMetadataFromDb;\n\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Fetching metadata',details:{ mint },logType:'debug'});\n const result = await poolQuery(query, [mint]);\n const row = extractRow(result);\n\n if (!row) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No metadata found',details:{ mint },logType:'warn'});\n return null;\n }\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Metadata fetched successfully',details:{ mint },logType:'info'});\n return row;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to fetch metadata',details:{ mint},logType:'error'});\n throw error;\n }\n};\n\n/**\n * Fetches metadata ID by mint address.\n * @param mint - The mint address.\n * @param consumerLogger - Custom logger instance.\n * @returns The metadata ID or null if not found.\n */\nexport async function getMetadataIdFromMint(mint: string, consumerLogger: any = logger): Promise<number | null> {\n const functionName = 'getMetadataIdFromMint';\n const query = queries.getMetadataIdFromMint;\n\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Fetching metadata ID',details:{ mint },logType:'debug'});\n const result = await poolQuery(query, [mint]);\n const id = extractId(result);\n\n if (!id) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No metadata ID found',details:{ mint },logType:'warn'});\n return null;\n }\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Metadata ID fetched successfully',details:{ mint},logType:'info'});\n return id;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to fetch metadata ID',details:{ mint},logType:'error'});\n throw error;\n }\n};\n\n/**\n * Fetches metadata by ID.\n * @param meta_id - The metadata ID.\n * @param consumerLogger - Custom logger instance.\n * @returns The metadata object or null if not found.\n */\nexport async function getMetaDataFromId(meta_id: string, consumerLogger: any = logger): Promise<Record<string, any> | null> {\n const functionName = 'getMetaDataFromId';\n const query = queries.getMetaDataFromId;\n\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Fetching metadata by ID',details:{ meta_id },logType:'debug'});\n const result = await poolQuery(query, [meta_id]);\n const row = extractRow(result);\n\n if (!row) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No metadata found for ID',details:{ meta_id },logType:'warn'});\n return null;\n }\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Metadata fetched successfully',details:{ meta_id },logType:'info'});\n return row;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to fetch metadata by ID',details:{ meta_id},logType:'error'});\n throw error;\n }\n};\n\nconst insertBatchMetaData = async (dataArray: TokenMetadata[]): Promise<number[]> => {\n const query = queries.insertBatchMetaData;\n\n const values = [\n dataArray.map((d) => d.mint),\n dataArray.map((d) => d.discriminator),\n dataArray.map((d) => d.name),\n dataArray.map((d) => d.symbol),\n dataArray.map((d) => d.uri),\n dataArray.map((d) => d.decimals),\n dataArray.map((d) => d.image),\n dataArray.map((d) => d.mintAuthority),\n dataArray.map((d) => d.freezeAuthority),\n dataArray.map((d) => d.twitter),\n dataArray.map((d) => d.website),\n dataArray.map((d) => d.isMutable),\n dataArray.map((d) => d.primarySaleHappened),\n dataArray.map((d) => JSON.stringify(d.meta_data || {})),\n dataArray.map((d) => d.creation_date ? new Date(d.creation_date) : null),\n new Array(dataArray.length).fill(new Date()), // updated_at\n ];\n\n const result = await poolQuery(query, values);\n return result.rows.map((row:any) => row.id);\n};\nexport async function createMetaData2(){\n const query = queries.createMetaData2;\n await poolQuery(query);\n}\n","// src/dbConfigs/schemas/pairsDb.ts\nimport { SOLANA_PUMP_FUN_PROGRAM_ID,QueryResult,logger,extractId, poolQuery, getLogString,path,fs,endPool} from './../../db';\nimport {getDirname,stripQuotes} from './../../utils';\nconst __dirname = getDirname(import.meta.url);\nconst query_js = JSON.parse(\n fs.readFileSync(path.join(__dirname, 'queries.json'), 'utf8')\n);\nconst queries = query_js.queries\n\n\nconst FILE_LOCATION = 'src/dbConfigs/schemas/pairsDb.ts';\n\n/**\n * Creates the pairs table if it doesn't exist.\n * @param consumerLogger - Custom logger instance.\n */\nexport const createPairsTable = async (consumerLogger: any = logger): Promise<void> => {\n const functionName = 'createPairsTable';\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Attempting to create pairs table',details:null,logType:'info',consumerLogger:consumerLogger});\n await poolQuery(queries.pairsTableCreation);\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Pairs table created successfully',details:null,logType:'info',consumerLogger:consumerLogger});\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to create pairs table',details:{ error: error.message },logType:'error',consumerLogger:consumerLogger});\n throw error;\n }\n};\n\n/**\n * Upserts a pair into the pairs table.\n * @param pairsData - The pair data to upsert.\n * @param consumerLogger - Custom logger instance.\n * @returns The upserted pair ID.\n */\nexport const upsertPair = async (pairsData: any, consumerLogger: any = logger): Promise<number> => {\n const functionName = 'upsertPair';\n const query = queries.upsertPair;\n\n const values = [\n stripQuotes(pairsData.signature),\n stripQuotes(pairsData.program_id || SOLANA_PUMP_FUN_PROGRAM_ID),\n stripQuotes(pairsData.mint),\n stripQuotes(pairsData.price_token),\n stripQuotes(pairsData.user_address),\n stripQuotes(pairsData.bonding_curve),\n stripQuotes(pairsData.associated_bonding_curve),\n pairsData.meta_id,\n pairsData.log_id\n ];\n\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Attempting to upsert pair',details:{ mint: pairsData.mint },logType:'info',consumerLogger:consumerLogger});\n const result: QueryResult = await poolQuery(query, values);\n const id = extractId(result);\n\n if (!id) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No ID returned after upsert',details:{ mint: pairsData.mint },logType:'warn',consumerLogger:consumerLogger});\n throw new Error('Upsert failed to return an ID');\n }\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:`Pair upserted successfully with ID: ${id}`,details:{ mint: pairsData.mint },logType:'info',consumerLogger:consumerLogger});\n return id;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to upsert pair',details:{ mint: pairsData.mint},logType:'error',consumerLogger:consumerLogger});\n throw error;\n }\n};\n\n/**\n * Upserts a pair with stricter conflict resolution.\n * @param pairsData - The pair data to upsert.\n * @param consumerLogger - Custom logger instance.\n * @returns The upserted pair ID.\n */\nexport const upsertAllPair = async (pairsData: any, consumerLogger: any = logger): Promise<number> => {\n const functionName = 'upsertAllPair';\n const query = queries.upsertAllPair;\n\n const values = [\n stripQuotes(pairsData.signature),\n stripQuotes(pairsData.program_id || SOLANA_PUMP_FUN_PROGRAM_ID),\n stripQuotes(pairsData.mint),\n stripQuotes(pairsData.price_token),\n stripQuotes(pairsData.user_address),\n stripQuotes(pairsData.bonding_curve),\n stripQuotes(pairsData.associated_bonding_curve),\n pairsData.meta_id,\n pairsData.log_id\n ];\n\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Attempting to upsert all pair',details:{ mint: pairsData.mint},logType:'info',consumerLogger:consumerLogger});\n const result: QueryResult = await poolQuery(query, values);\n const id = extractId(result);\n\n if (!id) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No ID returned after upsert',details:{ mint: pairsData.mint },logType:'warn',consumerLogger:consumerLogger});\n throw new Error('Upsert failed to return an ID');\n }\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:`Pair upserted successfully with ID: ${id}`,details:{ mint: pairsData.mint},logType:'info',consumerLogger:consumerLogger});\n return id;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to upsert all pair',details:{ mint: pairsData.mint},logType:'error',consumerLogger:consumerLogger});\n throw error;\n }\n};\n\n/**\n * Inserts a pair into the pairs table (uses upsert logic).\n * @param pairsData - The pair data to insert.\n * @param consumerLogger - Custom logger instance.\n * @returns The inserted pair ID.\n */\nexport const insertPair = async (pairsData: any, consumerLogger: any = logger): Promise<number> => {\n const functionName = 'insertPair';\n const query = queries.upsertPair; // Note: Using upsertPair query as per original\n\n const values = [\n stripQuotes(pairsData.signature),\n stripQuotes(pairsData.program_id || SOLANA_PUMP_FUN_PROGRAM_ID),\n stripQuotes(pairsData.mint),\n stripQuotes(pairsData.price_token),\n stripQuotes(pairsData.user_address),\n stripQuotes(pairsData.bonding_curve),\n stripQuotes(pairsData.associated_bonding_curve),\n pairsData.meta_id,\n pairsData.log_id\n ];\n\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Attempting to insert pair',details:{ mint: pairsData.mint },logType:'info',consumerLogger:consumerLogger});\n const result = await poolQuery(query, values);\n const id = extractId(result);\n\n if (!id) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No ID returned after insert',details:{ mint: pairsData.mint },logType:'warn',consumerLogger:consumerLogger});\n throw new Error('Insert failed to return an ID');\n }\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:`Pair inserted successfully with ID: ${id}`,details:{ mint: pairsData.mint },logType:'info',consumerLogger:consumerLogger});\n return id;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to insert pair',details:{ mint: pairsData.mint},logType:'error',consumerLogger:consumerLogger});\n throw error;\n }\n};\n\n/**\n * Updates a pair by ID with provided fields.\n * @param id - The pair ID to update.\n * @param fields - Fields to update.\n * @param consumerLogger - Custom logger instance.\n * @returns True if updated, false if no changes or not found.\n */\nexport const updatePairByIds = async (\n id: number,\n fields: Partial<{\n mint: string;\n price_token: string;\n user_address: string;\n program_id: string;\n bonding_curve: string;\n associated_bonding_curve: string;\n signature: string;\n meta_id: number;\n log_id: number;\n }>,\n consumerLogger: any = logger\n): Promise<boolean> => {\n const functionName = 'updatePairByIds';\n\n try {\n await poolQuery('BEGIN');\n\n // Fetch existing row\n const selectQuery = queries.updatePairByIds; // Note: This is a SELECT query in your JSON\n const selectResult = await poolQuery(selectQuery, [id]);\n\n if (!selectResult.rows.length) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No pair found to update',details:{ id },logType:'warn',consumerLogger:consumerLogger});\n await poolQuery('ROLLBACK');\n return false;\n }\n\n const existingData = selectResult.rows[0];\n const updatedData = { ...existingData, ...fields };\n\n // Clean up quotes\n ['bonding_curve', 'associated_bonding_curve'].forEach(key => {\n if (updatedData[key]) updatedData[key] = stripQuotes(updatedData[key]);\n });\n\n const setClauses = Object.entries(updatedData)\n .filter(([_, value]) => value !== undefined)\n .map(([key], index) => `${key} = $${index + 2}`)\n .join(', ');\n\n if (!setClauses) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No fields to update',details:{ id },logType:'warn',consumerLogger:consumerLogger});\n await poolQuery('ROLLBACK');\n return false;\n }\n\n const values = [id, ...Object.values(updatedData).filter(v => v !== undefined)];\n const query = `UPDATE pairs SET ${setClauses}, updated_at = NOW() WHERE id = $1 RETURNING *;`;\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Attempting to update pair',details:{ id },logType:'info',consumerLogger:consumerLogger});\n const result = await poolQuery(query, values);\n await poolQuery('COMMIT');\n\n if (result.rows.length > 0) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Pair updated successfully',details:{ id },logType:'info',consumerLogger:consumerLogger});\n return true;\n } else {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No changes made to pair',details:{ id },logType:'warn',consumerLogger:consumerLogger});\n return false;\n }\n } catch (error: any) {\n await poolQuery('ROLLBACK');\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to update pair',details:{ id},logType:'error',consumerLogger:consumerLogger});\n throw error;\n }\n};\n/**\n * Appends a TCN to the `tcns` JSONB array of a pair.\n * @param id - The pair ID.\n * @param tcn - The TCN to append.\n * @param consumerLogger - Custom logger instance.\n * @returns The query result.\n */\nexport const appendTcnToPair = async (\n id: number,\n tcn: string,\n consumerLogger: any = logger\n): Promise<QueryResult> => {\n const functionName = 'appendTcnToPair';\n const query = queries.appendTcnToPair;\n const values = [id, tcn];\n\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Attempting to append TCN',details:{ id},logType:'info',consumerLogger:consumerLogger});\n const result = await poolQuery(query, values);\n\n if (result.rows.length > 0) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'TCN appended successfully',details:{ id },logType:'info',consumerLogger:consumerLogger});\n } else {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No pair found to append TCN',details:{ id },logType:'warn',consumerLogger:consumerLogger});\n }\n return result;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to append TCN',details:{ id},logType:'error',consumerLogger:consumerLogger});\n throw error;\n }\n};\n\n/**\n * Updates a pair entry by ID.\n * @param id - The pair ID.\n * @param fields - Fields to update.\n * @param consumerLogger - Custom logger instance.\n * @returns True if updated, false if no-op or not found.\n */\nexport const updatePairById = async (\n id: number,\n fields: Partial<{\n mint: string;\n price_token: string;\n user_address: string;\n program_id: string;\n bonding_curve: string;\n associated_bonding_curve: string;\n signature: string;\n meta_id: number;\n log_id: number;\n }>,\n consumerLogger: any = logger\n): Promise<boolean> => {\n const functionName = 'updatePairById';\n\n if (!Object.keys(fields).length) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No fields provided to update',details:{ id },logType:'warn',consumerLogger:consumerLogger});\n return false;\n }\n\n try {\n const setClauses = Object.entries(fields)\n .map(([key], index) => `${key} = $${index + 2}`)\n .join(', ');\n const values = [id, ...Object.values(fields)];\n const query = `UPDATE pairs SET ${setClauses}, updated_at = NOW() WHERE id = $1 RETURNING id;`;\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Attempting to update pair',details:{ id },logType:'info',consumerLogger:consumerLogger});\n const result = await poolQuery(query, values);\n\n if (result.rows.length > 0) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Pair updated successfully',details:{ id },logType:'info',consumerLogger:consumerLogger});\n return true;\n } else {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No pair found to update',details:{ id },logType:'warn',consumerLogger:consumerLogger});\n return false;\n }\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to update pair',details:{ id},logType:'error',consumerLogger:consumerLogger});\n throw error;\n }\n};\n\n/**\n * Appends a TCN to a transaction's `tcns` array.\n * @param id - The transaction ID.\n * @param tcn - The TCN to append.\n * @param consumerLogger - Custom logger instance.\n * @returns The query result.\n */\nexport const appendTcnToTransaction = async (\n id: number,\n tcn: string,\n consumerLogger: any = logger\n): Promise<QueryResult> => {\n const functionName = 'appendTcnToTransaction';\n const query = queries.appendTcnToTransaction;\n const values = [id, tcn];\n\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Attempting to append TCN to transaction',details:{ id},logType:'info',consumerLogger:consumerLogger});\n const result = await poolQuery(query, values);\n\n if (result.rows.length > 0) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'TCN appended successfully to transaction',details:{ id },logType:'info',consumerLogger:consumerLogger});\n } else {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No transaction found to append TCN',details:{ id },logType:'warn',consumerLogger:consumerLogger});\n }\n return result;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to append TCN to transaction',details:{ id},logType:'error',consumerLogger:consumerLogger});\n throw error;\n }\n};\n\n/**\n * Fetches a pair ID by genesis signature.\n * @param genesis_signature - The genesis signature.\n * @param consumerLogger - Custom logger instance.\n * @returns The pair ID or null if not found.\n */\nexport const fetchPairByGenesisSignature = async (\n genesis_signature: string,\n consumerLogger: any = logger\n): Promise<number | null> => {\n const functionName = 'fetchPairByGenesisSignature';\n const query = queries.fetchPairByGenesisSignature;\n const values = [genesis_signature];\n\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Fetching pair by genesis signature',details:{ genesis_signature },logType:'debug',consumerLogger:consumerLogger});\n const result: QueryResult = await poolQuery(query, values);\n\n if (result.rows.length > 0) {\n const id = result.rows[0].id;\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Pair fetched successfully',details:{ genesis_signature},logType:'info',consumerLogger:consumerLogger});\n return id;\n } else {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No pair found',details:{ genesis_signature },logType:'warn',consumerLogger:consumerLogger});\n return null;\n }\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to fetch pair',details:{ genesis_signature},logType:'error',consumerLogger:consumerLogger});\n throw error;\n }\n};\n\n/**\n * Fetches a pair ID by mint.\n * @param mint - The mint address.\n * @param consumerLogger - Custom logger instance.\n * @returns The pair ID or null if not found.\n */\nexport const fetchPairByMint = async (\n mint: string,\n consumerLogger: any = logger\n): Promise<number | null> => {\n const functionName = 'fetchPairByMint';\n const query = queries.fetchPairByMint;\n const values = [mint];\n\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Fetching pair by mint',details:{ mint },logType:'debug',consumerLogger:consumerLogger});\n const result: QueryResult = await poolQuery(query, values);\n const id = extractId(result);\n\n if (!id) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No pair found',details:{ mint },logType:'warn',consumerLogger:consumerLogger});\n return null;\n }\n\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:`Pair fetched successfully with ID: ${id}`,details:{ mint },logType:'info',consumerLogger:consumerLogger});\n return id;\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to fetch pair',details:{ mint},logType:'error',consumerLogger:consumerLogger});\n throw error;\n }\n};\n\n/**\n * Fetches a pair ID by mint and program ID.\n * @param mint - The mint address.\n * @param programId - The program ID.\n * @param consumerLogger - Custom logger instance.\n * @returns The pair ID or null if not found.\n */\nexport const fetchPairByMintAndProgramId = async (\n mint: string,\n programId: string,\n consumerLogger: any = logger\n): Promise<number | null> => {\n const functionName = 'fetchPairByMintAndProgramId';\n const query = queries.fetchPairByMintAndProgramId;\n const values = [mint, programId];\n\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Fetching pair by mint and program ID',details:{ mint},logType:'debug',consumerLogger:consumerLogger});\n const result: QueryResult = await poolQuery(query, values);\n\n if (result.rows.length > 0) {\n const id = result.rows[0].id;\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:`Pair fetched successfully with ID: ${id}`,details:{ mint},logType:'info',consumerLogger:consumerLogger});\n return id;\n } else {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No pair found',details:{ mint},logType:'warn',consumerLogger:consumerLogger});\n return null;\n }\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to fetch pair',details:{ mint,error: error.message },logType:'debug',consumerLogger:consumerLogger});\n throw error;\n }\n};\n\n/**\n * Fetches pair data by ID, fixing broken JSON in associated_bonding_curve.\n * @param id - The pair ID.\n * @param consumerLogger - Custom logger instance.\n * @returns The pair data or null if not found.\n */\nexport const fetchPairDataById = async (\n id: string | number,\n consumerLogger: any = logger\n): Promise<Record<string, any> | null> => {\n const functionName = 'fetchPairDataById';\n const query = queries.fetchPairDataById;\n const values = [id];\n\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Fetching pair data by ID',details:{ id },logType:'debug',consumerLogger:consumerLogger});\n const result: QueryResult = await poolQuery(query, values);\n\n if (result.rows.length > 0) {\n const pairData = result.rows[0];\n pairData.associated_bonding_curve = stripQuotes(pairData.associated_bonding_curve);\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Pair data fetched successfully',details:{ id },logType:'info',consumerLogger:consumerLogger});\n return pairData;\n } else {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No pair found',details:{ id },logType:'warn',consumerLogger:consumerLogger});\n return null;\n }\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to fetch pair data',details:{ id},logType:'error',consumerLogger:consumerLogger});\n return null; // Changed to return null instead of throwing for consistency with original\n }\n};\n\n/**\n * Fetches a pair ID by mint and program ID.\n * @param mint - The mint address.\n * @param programId - The program ID.\n * @param consumerLogger - Custom logger instance.\n * @returns The pair ID or null if not found.\n */\nexport const fetchPairIdByMintAndProgramId = async (\n mint: string,\n programId: string,\n consumerLogger: any = logger\n): Promise<number | null> => {\n const functionName = 'fetchPairIdByMintAndProgramId';\n const query = queries.fetchPairIdByMintAndProgramId;\n const values = [mint, programId];\n\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Fetching pair ID by mint and program ID',details:{ mint},logType:'debug',consumerLogger:consumerLogger});\n const result: QueryResult = await poolQuery(query, values);\n\n if (result.rows.length > 0) {\n const id = result.rows[0].id;\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:`Pair ID fetched successfully: ${id}`,details:{ mint},logType:'info',consumerLogger:consumerLogger});\n return id;\n } else {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No pair found',details:{ mint},logType:'warn',consumerLogger:consumerLogger});\n return null;\n }\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to fetch pair ID',details:{ mint,error: error.message },logType:'error',consumerLogger:consumerLogger});\n throw error;\n }\n};\n\n/**\n * Fetches a pair by ID with optional filters.\n * @param id - The pair ID.\n * @param filters - Optional filters (e.g., sorted).\n * @param consumerLogger - Custom logger instance.\n * @returns The pair ID or null if not found.\n */\nexport const fetchPairById = async (\n id: number,\n filters: { sorted?: boolean } = {},\n consumerLogger: any = logger\n): Promise<number | null> => {\n const functionName = 'fetchPairById';\n let query = queries.fetchPairById;\n const values: any[] = [id];\n let paramIndex = 2;\n\n if (filters.sorted !== undefined) {\n query += ` AND sorted = $${paramIndex}`;\n values.push(filters.sorted);\n }\n\n query += ` LIMIT 1;`;\n\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Fetching pair by ID',details:{ id},logType:'debug',consumerLogger:consumerLogger});\n const result = await poolQuery(query, values);\n\n if (result.rows.length > 0) {\n const id = result.rows[0].id;\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:`Pair fetched successfully with ID: ${id}`,details:{ id },logType:'info',consumerLogger:consumerLogger});\n return id; // Adjusted to return ID instead of full row for consistency with type\n } else {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No pair found',details:{ id },logType:'warn',consumerLogger:consumerLogger});\n return null;\n }\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to fetch pair',details:{ id},logType:'error',consumerLogger:consumerLogger});\n throw error;\n }\n};\n\n/**\n * Deletes a pair by genesis signature.\n * @param genesis_signature - The genesis signature.\n * @param consumerLogger - Custom logger instance.\n * @returns True if deleted, false if not found.\n */\nexport const deletePairBySignature = async (\n genesis_signature: string,\n consumerLogger: any = logger\n): Promise<boolean> => {\n const functionName = 'deletePairBySignature';\n const query = queries.deletePairBySignature;\n const values = [genesis_signature];\n\n try {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Attempting to delete pair',details:{ genesis_signature },logType:'info',consumerLogger:consumerLogger});\n const result = await poolQuery(query, values);\n\n if (result.rows.length > 0) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Pair deleted successfully',details:{ genesis_signature },logType:'info',consumerLogger:consumerLogger});\n return true;\n } else {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'No pair found to delete',details:{ genesis_signature },logType:'warn',consumerLogger:consumerLogger});\n return false;\n }\n } catch (error: any) {\n getLogString({file_location:FILE_LOCATION,function_name:functionName,message:'Failed to delete pair',details:{ genesis_signature},logType:'error',consumerLogger:consumerLogger});\n throw error;\n }\n};\n/**\n * Function to close the PostgreSQL connection pool.\n */\nexport const closePool = async (consumerLogger:any=logger): Promise<void> => {\n try {\n await endPool();\n //consumerLogger.log('✅ PostgreSQL connection pool has been closed.');\n } catch (error) {\n consumerLogger.error('❌ Error closing PostgreSQL connection pool:', error);\n }\n};\n","// src/schemas/transactions/upsertTransaction.ts\nimport { _,QueryResult,poolQuery,logger,path,fs,withTransaction} from './../../db';\nimport type { TcnData,TransactionItem } from './../../db';\nimport {getDirname} from './../../utils';\nconst __dirname = getDirname(import.meta.url);\nconst query_js = JSON.parse(\n fs.readFileSync(path.join(__dirname, 'queries.json'), 'utf8')\n);\nconst queries = query_js.queries\nconst transactionsTableCreation = queries.getTransactionsTableCreation;\nexport const createTransactionsTable = async (): Promise<void> => {\n // Index creation for optimizing queries on pair_id\n const createTransactionsTableCreateIndexQuery = queries.createTransactionsTableCreateIndexQuery;\n try {\n await poolQuery(transactionsTableCreation);\n logger.info('✅ Transactions table has been created successfully.');\n\n await poolQuery(createTransactionsTableCreateIndexQuery);\n logger.info('✅ Index on pair_id created successfully.');\n } catch (error) {\n logger.error('❌ Error setting up transactions table and indexes:', error);\n throw error;\n }\n};\n// Max retries for handling deadlocks\nconst MAX_RETRIES = 3;\n\n// Retry delay (exponential backoff)\nconst retryDelay = (attempt: number) => 50 * Math.pow(2, attempt);\n\n\nexport async function upsertTransaction(txData: TransactionItem): Promise<any> {\n\n try {\n await poolQuery('BEGIN');\n\n // Check if the row exists\n const upsertTransactionSelectQuery = queries.upsertTransactionSelectQuery\n const selectResult = await poolQuery(upsertTransactionSelectQuery, [txData.signature]);\n\n let existingTcns = [];\n if (selectResult?.rows && selectResult.rows.length > 0) {\n existingTcns = selectResult.rows[0].tcns || [];\n }\n\n // Deduplicate tcns in application logic\n const mergedTcns = _.uniqBy([...existingTcns, ...txData.tcns], 'invocation');\n\n // Upsert with deduplicated tcns\n const upsertQuery = queries.upsertTransactionUpsertQuery\n const upsertValues = [\n txData.signature,\n txData.slot || null,\n txData.program_id || null,\n txData.pair_id || null,\n txData.log_id || null,\n txData.meta_id || null,\n JSON.stringify(mergedTcns),\n ];\n\n const upsertResult = await poolQuery(upsertQuery, upsertValues);\n\n await poolQuery('COMMIT');\n if (upsertResult?.rows && upsertResult.rows.length > 0) {\n return upsertResult.rows[0].id;\n }\n } catch (error:any) {\n await poolQuery('ROLLBACK');\n logger.error(`❌ Error in upsertTransaction: ${error}`);\n throw error;\n }\n}\n\n/**\n * Upserts TCN data into transactions.tcns array using (signature, invocation) as unique keys.\n */\nexport async function upsertTransactios(\n txData: TransactionItem\n): Promise<number> {\n let attempt = 0;\n\n while (attempt <= MAX_RETRIES) {\n try {\n return await withTransaction<number>(async (client) => {\n // Lock row\n const selectRes = await client.query(\n queries.upsertTransactionsSelectQuery,\n [txData.signature]\n );\n\n // INSERT PATH\n if (selectRes.rows.length === 0) {\n const insertRes = await client.query(\n queries.upsertTransactios,\n [\n txData.signature,\n txData.slot ?? null,\n txData.program_id ?? null,\n txData.pair_id ?? null,\n txData.log_id ?? null,\n txData.meta_id ?? null,\n JSON.stringify(txData.tcns),\n ]\n );\n\n const txnId = insertRes.rows[0].id;\n logger.info(\n `✅ Inserted new transaction row for signature=${txData.signature}, id=${txnId}`\n );\n return txnId;\n }\n\n // UPDATE PATH\n const existingRow = selectRes.rows[0];\n const existingTcns: TcnData[] = existingRow.tcns ?? [];\n const newTcn = txData.tcns[0];\n\n const idx = existingTcns.findIndex(\n (t) => t.invocation === newTcn.invocation\n );\n\n if (idx >= 0) {\n existingTcns[idx] = newTcn;\n logger.info(\n `⚠️ Updated existing TCN invocation=${newTcn.invocation}`\n );\n } else {\n existingTcns.push(newTcn);\n logger.info(\n `⚠️ Added new TCN invocation=${newTcn.invocation}`\n );\n }\n\n const updateRes = await client.query(\n queries.upsertTransactiosUpdateQuery,\n [\n txData.signature,\n txData.slot ?? null,\n txData.program_id ?? null,\n txData.pair_id ?? null,\n txData.log_id ?? null,\n txData.meta_id ?? null,\n JSON.stringify(existingTcns),\n ]\n );\n\n const updatedId = updateRes.rows[0].id;\n logger.info(\n `✅ Upserted TCN invocation=${newTcn.invocation}, id=${updatedId}`\n );\n return updatedId;\n });\n\n } catch (error: any) {\n if (error.code === \"40P01\") {\n attempt++;\n logger.warn(\n `⚠️ Deadlock detected, retry ${attempt}/${MAX_RETRIES}`\n );\n\n if (attempt > MAX_RETRIES) {\n logger.error(\"❌ Max retries exceeded\");\n throw error;\n }\n\n await new Promise((r) =>\n setTimeout(r, retryDelay(attempt))\n );\n continue;\n }\n\n logger.error(`❌ Error in upsertTransactios: ${error.message}`);\n throw error;\n }\n }\n\n throw new Error(\"Unreachable\");\n}\n\n\n\n/**\n * Fetches a transaction by its ID.\n * @param id - The ID of the transaction.\n * @returns The transaction data or null if not found.\n */\nexport const fetchTransactionById = async (\n id: number\n): Promise<any | null> => {\n try {\n const query = queries.fetchTransactionById\n const values = [id];\n const result: QueryResult<any> = await poolQuery(query, values);\n\n if (result?.rows && result.rows.length === 0) {\n logger.warn(`🔍 No transaction found with ID: ${id}`);\n return null;\n }\n logger.info(`✅ Transaction fetched successfully with ID: ${id}`);\n return result.rows[0];\n } catch (error) {\n logger.error(`❌ Error fetching transaction by ID ${id}:`, error);\n throw error;\n }\n};\n\n/**\n * Fetches a transaction by its signature.\n * @param signature - The signature of the transaction.\n * @returns The transaction data or null if not found.\n */\nexport const fetchTransactionBySignature = async (\n signature: string\n): Promise<any | null> => {\n try {\n const query = queries.fetchTransactionBySignature\n const values = [signature];\n const result: QueryResult<any> = await poolQuery(query, values);\n\n if (result?.rows && result.rows.length === 0) {\n logger.warn(`🔍 No transaction found with signature: ${signature}`);\n return null;\n }\n logger.info(`✅ Transaction fetched successfully with signature: ${signature}`);\n return result.rows[0];\n } catch (error) {\n logger.error(`❌ Error fetching transaction by signature ${signature}:`, error);\n throw error;\n }\n};\n\n","import { config, DotenvConfigOutput } from 'dotenv';\nimport {getParentDirectory} from './pathUtils';\nimport * as fs from 'fs';\nimport * as path from 'path';\n/**\n * Loads environment variables from a specified file or directory.\n * If a directory is provided, it looks for a `.env` file within that directory.\n * If a file is provided, it ensures the file is named `.env` before loading.\n *\n * @param inputPath - The file or directory path to load the `.env` file from.\n * @returns The result of dotenv.config()\n * @throws Will throw an error if the path does not exist,\n * if a provided file is not named `.env`,\n * or if a `.env` file does not exist in the provided directory.\n */\nfunction loadEnv(inputPath: string): DotenvConfigOutput {\n // Resolve the absolute path\n const resolvedPath = path.resolve(inputPath);\n let envPath: string;\n \n // Check if the path exists\n if (!fs.existsSync(resolvedPath)) {\n throw new Error(`Path does not exist: ${resolvedPath}`);\n }\n \n const stats = fs.statSync(resolvedPath);\n \n if (stats.isFile()) {\n // If it's a file, verify that it's named `.env`\n if (path.basename(resolvedPath) !== '.env') {\n throw new Error(`Provided file is not a .env file: ${resolvedPath}`);\n }\n envPath = resolvedPath;\n } else if (stats.isDirectory()) {\n // If it's a directory, append `.env` to the path\n envPath = path.join(resolvedPath, '.env');\n \n if (!fs.existsSync(envPath)) {\n throw new Error(`.env file not found in directory: ${resolvedPath}`);\n }\n } else {\n // If it's neither a file nor a directory, throw an error\n throw new Error(`Provided path is neither a file nor a directory: ${resolvedPath}`);\n }\n \n // Load environment variables from the determined `.env` path\n const result = config({ path: envPath });\n \n if (result.error) {\n throw result.error;\n }\n \n return result;\n}\n \n /**\n * Retrieves the value of an environment variable.\n * @param key - The environment variable key.\n * @param pathDir - Optional directory path to load the .env file from.\n * @returns The value of the environment variable or null if not found.\n */\n export function getEnvValue(key: string, pathDir?: string): string | null {\n pathDir = pathDir || getParentDirectory();\n if (pathDir) {\n try {\n loadEnv(pathDir);\n console.log(pathDir)\n } catch (error) {\n console.error(error);\n return null;\n }\n }\n \n return process.env[key] || null;\n}\n","import os from 'os';\nimport ip from 'ip';\nexport const staticIp = '192.168.0.100'\nfunction getLocalIP(): string[] {\n const interfaces = os.networkInterfaces();\n const addresses: string[] = [];\n\n for (const ifaceName in interfaces) {\n const iface = interfaces[ifaceName];\n if (!iface) continue;\n\n for (const alias of iface) {\n if (alias.family === 'IPv4' && !alias.internal) {\n addresses.push(alias.address);\n }\n }\n }\n\n return addresses;\n}\n\n\nfunction isIpInSubnet(ipAddress: string, subnet: string): boolean {\n\n return ip.cidrSubnet(subnet).contains(ipAddress);\n}\nexport function isServerIp(){\n // Usage\n const ips = getLocalIP();\n ips.forEach(ip => {\n if (isIpInSubnet(ip, '192.168.0.100/24')) {\n return true\n } else {\n return false\n }\n });\n}\nexport function get_host(){\n // Usage\n const ips = getLocalIP();\n if (isServerIp() != null){\n return 'localhost'\n }else{\n return '127.0.0.1'\n }\n}\n","import { fileURLToPath } from 'url';\nimport * as path from 'path';\nimport * as url from 'url';\n/**\n * Returns the absolute path of the current file.\n */\nfunction getAbsolutePath(): string {\n return fileURLToPath(import.meta.url);\n};\n/**\n * Equivalent of __dirname for ESM\n */\nexport function getDirname(moduleUrl: string): string {\n return path.dirname(fileURLToPath(moduleUrl));\n}\n\nfunction getAbsDir(): string {\n return path.dirname(getAbsolutePath());\n};\nfunction getSolcatcherDir(): string {\n return path.dirname(getAbsDir());\n};\nfunction getSrcDir(): string {\n return path.join(getSolcatcherDir(),\"src\");\n};\nexport function getParentDirectory():string {\n return path.dirname(getSrcDir());\n};\nexport function getAbsPath(subPath: string): string {\n return path.join(getAbsDir(), subPath);\n}\nexport function getEnvPath():string {\n return path.join(getParentDirectory(), '.env');\n};\nexport function getDbConfigsPath(): string {\n return path.join(getParentDirectory(),'dbConfigs');\n}\nexport function getSchemasPath(): string {\n return path.join(getDbConfigsPath(),'schemas');\n}\nexport function getSchemasDirPath(subPath: string): string {\n return path.join(getSchemasPath(),subPath);\n}\n// Helper functions\nexport function getBaseDomain(inputUrl: string): string {\n const parsedUrl = url.parse(inputUrl);\n return parsedUrl.host || '';\n}\nconsole.log(getEnvPath());\n","\nimport {fetch,getLogString,logger,loadInnerJson} from './../db';\nimport {getIfNone} from './../utils';\nexport {fetchRpc} from './../db';\nexport interface RPCParams {\n url?: any; \n method: any; \n params: any;\n \n}\n// Function to check if response is of a certain type\nexport function isRawResponse(response: any, typeObj: any): boolean {\n return response instanceof typeObj;\n}\n// Function to get text response\nexport async function getTextResponse(response: Response): Promise<string | null> {\n try {\n return await response.text();\n } catch (e) {\n \n return null;\n }\n}\n// Function to get HTTP headers\nexport function getHeaders(): HeadersInit {\n return {\n 'Content-Type': 'application/json',\n };\n}\n/**\n * Fetches data from an IPFS URL.\n * @param url - The IPFS URL to fetch data from.\n * @returns The fetched data or null if an error occurs.\n */\n\nexport async function fetchIPFSData(url: string, consumerLogger: any = logger): Promise<any> {\n const functionName = 'fetchIPFSData';\n url = url.replace('cf-', '');\n try {\n const response = await fetch(url);\n if (!response.ok) {\n throw new Error(getLogString({function_name:functionName,message:`Network response was not ok for URL: ${url}`,details:null,file_location:null,logType:null}));\n }\n const data = await response.json();\n return data;\n } catch (error: any) {\n consumerLogger.error(getLogString({function_name:functionName,message:'Failed to fetch IPFS data',details:{ url, error:error.message},logType:'error'}));\n throw error;\n }\n}\n\n// Function to get JSON response\nexport async function getJsonResponse(\n response: Response,\n responseResult: string = 'result',\n loadNestedJson: boolean = true\n): Promise<any | null> {\n try {\n const responseJson = await response.json();\n let result = typeof responseJson === 'object' && responseJson !== null\n ? (responseJson[responseResult] || responseJson)\n : responseJson;\n\n if (loadNestedJson) {\n result = loadInnerJson(result);\n }\n\n if (result !== null && result !== undefined) {\n return result;\n }\n\n // Fallback to the last key if 'result' is not found\n if (typeof responseJson === 'object' && responseJson !== null) {\n const keys = Object.keys(responseJson);\n const lastKey = keys.length > 0 ? keys[keys.length - 1] : null;\n return lastKey ? responseJson[lastKey] : null;\n }\n\n return null;\n } catch (e) {\n return null;\n }\n}\n\n// Function to get combined response\nexport async function getResponse(\n response: Response,\n responseResult: string | undefined = undefined,\n rawResponse: boolean = false,\n loadNestedJson: boolean = true\n): Promise<any | string | Response> {\n if (rawResponse) {\n return response;\n }\n\n const jsonResponse = await getJsonResponse(response, responseResult, loadNestedJson);\n if (jsonResponse !== null) {\n return jsonResponse;\n }\n\n const textResponse = await getTextResponse(response);\n if (textResponse) {\n return textResponse;\n }\n\n return response; // Return raw content as a last resort\n}\n// Function to create RPC payload\nexport function getRpcPayload(\n method: string,\n params?: any[],\n id?: number | string,\n jsonrpc?: string\n): any | null {\n if (!method) {\n return null;\n }\n let payload = JSON.stringify({\n jsonrpc: getIfNone(jsonrpc, '2.0'),\n id: getIfNone(id, 1),\n method: method,\n params: getIfNone(params, []),\n })\n return payload;\n}\n\n// Function to clean queries based on interval\nexport function cleanQueries(priorQueries: any[] | undefined, interval: number): any[] {\n const timeIntervalCutoff = Date.now() / 1000 - interval;\n // Ensure priorQueries is an array\n return (priorQueries || []).filter(query => query.time >= timeIntervalCutoff);\n}\n\n// Function to check request per single RPC\nexport function isRequestPerSingleRpc(\n priorQueries: any[],\n method: string,\n maxRequests: number,\n interval: number\n): false | number {\n const timeIntervalCutoff = Date.now() / 1000 - interval;\n const qualifyingRequests = priorQueries.filter(query => query.method === method && query.time >= timeIntervalCutoff);\n const lenQualifying = qualifyingRequests.length;\n if (lenQualifying >= maxRequests) {\n console.log(`request_per_single_rpc ${lenQualifying} of ${maxRequests} hit within ${interval} seconds`);\n return parseFloat(String(qualifyingRequests[lenQualifying - maxRequests].time)) - timeIntervalCutoff;\n }\n return false;\n}\n\n// Function to check request per single IP\nexport function isRequestPerSingleIp(\n priorQueries: any[],\n maxRequests: number,\n interval: number\n): false | number {\n const timeIntervalCutoff = Date.now() / 1000 - interval;\n const qualifyingRequests = priorQueries.filter(query => query.time >= timeIntervalCutoff);\n const lenQualifying = qualifyingRequests.length;\n if (lenQualifying >= maxRequests) {\n console.log(`request_per_single_ip ${lenQualifying} of limit ${maxRequests} hit within ${interval} seconds`);\n return parseFloat(String(qualifyingRequests[lenQualifying - maxRequests].time)) - timeIntervalCutoff;\n }\n return false;\n}\n\n// Function to check data per IP\nexport function isDataPerIp(\n priorQueries: any[],\n maxData: number,\n interval: number,\n avgData: number\n): false | number {\n const timeIntervalCutoff = Date.now() / 1000 - interval;\n let totalData = priorQueries\n .filter(query => query.time >= timeIntervalCutoff)\n .reduce((sum, query) => sum + (query.data || 0), 0) + avgData;\n if (totalData >= maxData) {\n let targetTime = priorQueries[0].time;\n console.log(`data_per_ip ${totalData} of limit ${maxData} hit within ${interval} seconds`);\n if (avgData) {\n const targetData = maxData - avgData;\n for (const data of priorQueries) {\n totalData -= data.data || 0;\n if (totalData <= targetData) {\n targetTime = data.time;\n break;\n }\n }\n }\n return parseFloat(String(targetTime)) - timeIntervalCutoff;\n }\n return false;\n}\n\n// Function to check all limits\nexport function getIsLimit(\n priorQueries: any[] | undefined,\n method: string = 'default_method',\n rateLimit: number = 40,\n rpsLimit: number = 100,\n interval: number = 10,\n dataInterval: number = 30,\n dataLimit: number = 100,\n avgData: number = 0\n): false | number {\n // Ensure priorQueries is an array\n priorQueries = priorQueries || [];\n\n const requestPerSingleRpc = isRequestPerSingleRpc(priorQueries, method, rateLimit, interval);\n if (requestPerSingleRpc !== false) {\n return requestPerSingleRpc;\n }\n\n const requestPerSingleIp = isRequestPerSingleIp(priorQueries, rpsLimit, interval);\n if (requestPerSingleIp !== false) {\n return requestPerSingleIp;\n }\n\n const dataPerSingleIp = isDataPerIp(priorQueries, dataLimit, dataInterval, avgData);\n if (dataPerSingleIp !== false) {\n return dataPerSingleIp;\n }\n\n return false;\n}\n\n\n// Handles fetch requests with error handling, response validation, and retries\nexport async function fetchData(url: string, method: string, headers: { [key: string]: string }, body: any): Promise<any> {\n try {\n const response = await fetch(url, { \n method, \n headers, \n body: JSON.stringify(body) \n });\n \n \n return response;\n } catch (error) {\n logger.error(`FetchData failed for URL: ${url}, Method: ${method}, Error: ${error}`);\n \n }\n }\n \n // Prepares parameters for an RPC call (url, headers, payload)\n export async function getRpcParams(\n method: string,\n params: any[] | null = null,\n url: string | null = null,\n id: number | string | null = null,\n jsonrpc: string | null = null,\n headers: { [key: string]: string } | null = null,\n call_method: string = 'POST'\n ): Promise<{ url: string; call_method: string; headers: { [key: string]: string }; payload: any }> {\n headers = headers ?? { 'Content-Type': 'application/json' };\n url = url ?? 'https://api.mainnet-beta.solana.com';\n id = id ?? 1;\n jsonrpc = jsonrpc ?? '2.0';\n params = params ?? [];\n const payload = {\n method,\n params,\n id,\n jsonrpc,\n };\n return { url, call_method, headers, payload };\n }\n \n // Calls the RPC endpoint with proper request headers and payload\n export async function callRpc( \n method: string,\n params: any = null,\n url: any = null,\n id: any = null,\n jsonrpc: string | null = null,\n headers: { [key: string]: string } | null = null,\n call_method: string = 'POST'\n ): Promise<any> {\n try {\n const { url: rpcUrl, call_method: rpcMethod, headers: rpcHeaders, payload } = await getRpcParams(\n method,\n url,\n params,\n id,\n jsonrpc,\n headers,\n call_method\n );\n \n logger.info(`Calling RPC at ${rpcUrl} with payload: ${JSON.stringify(payload)}`);\n \n const response = await fetchData(rpcUrl, rpcMethod, rpcHeaders, payload);\n \n logger.info(`Response from RPC: ${JSON.stringify(response)}`);\n \n return response;\n } catch (error) {\n logger.error(`Error in callRpc for method: ${method}, Error: ${error}`);\n throw error;\n }\n }\n\n\n\n\n\nexport interface RpcPayload {\n method: string;\n params?: any[];\n id?: number | string;\n jsonrpc?: string;\n headers?: { [key: string]: string };\n url_1_only?: boolean;\n url_2_only?: boolean;\n }\n \n export interface RpcResponse {\n result?: any;\n error?: any;\n }\n \n\n ","import {fs,path,fileURLToPath} from './../db';\nexport function safeDumpToFile(data: any, filePath: string): void {\n try {\n fs.writeFileSync(filePath, JSON.stringify(data, null, 2));\n } catch (error) {\n console.error(`Error writing to file ${filePath}:`, error);\n }\n}\nexport function safeReadFromJson(filePath: string): any {\n try {\n if (fs.existsSync(filePath)) {\n const rawData = fs.readFileSync(filePath, 'utf-8');\n return JSON.parse(rawData);\n } else {\n return {};\n }\n } catch (error) {\n console.error(`Error reading from file ${filePath}:`, error);\n return {};\n }\n}\n\n/**\n* Serializes the input to JSON if it's an object, excluding circular references.\n*\n* @param data - The data to serialize.\n* @returns The serialized JSON string or the original data.\n*/\nexport function dumpIfJson(data: any): any {\nif (typeof data === 'object') {\n try {\n // Use a replacer function to omit circular references\n const seen = new WeakSet();\n return JSON.stringify(data, (key, value) => {\n if (typeof value === 'object' && value !== null) {\n if (seen.has(value)) {\n return; // Omit circular reference\n }\n seen.add(value);\n }\n return value;\n });\n } catch (error) {\n console.error('Failed to serialize object:', error);\n return null;\n }\n}\nreturn data;\n}\n\nexport function loadQueries<T = any>(\n callerUrl: string,\n subPath: string = 'queries.json'\n): T {\n const callerFile = fileURLToPath(callerUrl);\n const callerDir = path.dirname(callerFile);\n\n const fullPath = path.resolve(callerDir, subPath);\n\n if (!fs.existsSync(fullPath)) {\n throw new Error(`Query file not found: ${fullPath}`);\n }\n\n const raw = fs.readFileSync(fullPath, 'utf-8');\n\n try {\n const parsed = JSON.parse(raw);\n if (!parsed?.queries) {\n throw new Error(`Missing \"queries\" key in ${fullPath}`);\n }\n return parsed.queries as T;\n } catch (err: any) {\n throw new Error(`Failed to parse ${fullPath}: ${err.message}`);\n }\n}"],"names":["PQueue","p_queue"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AACO,cAAA,gBAAA,EAAAA,eAAA,CAAuC,2BAAqC,UAAUC,MAAiB,CAAA,eAAA;;ACDvG,cAAA,aAAA,yCAAA,OAAA,4BAAA,OAAA;;ACAA,cAAA,WAAA,kDAAA,OAAA;;ACCA,cAAA,WAAA;AACP,oBAAA,OAAA;AACA,oBAAA,OAAA;AACA;AACO,cAAA,YAAA,0DAAA,OAAA;AACA,cAAA,gBAAA,sBAAA,OAAA;;ACLA,iBAAA,OAAA,IAAA,GAAA,CAAA,IAAA;AACA,iBAAA,OAAA,IAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,SAAA,iCAAA,OAAA,CAAA,GAAA,CAAA,WAAA;AACA,iBAAA,eAAA,yBAAA,OAAA,MAAA,OAAA;AACP;AACA;AACA;AACO,iBAAA,YAAA,kBAAA,OAAA;AACA,iBAAA,UAAA;AACA,iBAAA,SAAA;;ACnBP,cAAA,UAAA;AACA;AACA;AACA,4BAAA,SAAA,GAAA,SAAA;AACA;AACO,cAAA,iBAAA,EAAA,UAAA;AACA,iBAAA,gBAAA,YAAA,SAAA;AACA,iBAAA,sBAAA,eAAA,SAAA;;ACNP,KAAA,eAAA,UAAA,SAAA,CAAA,MAAA;AACO,iBAAA,kBAAA,IAAA,YAAA,CAAA,eAAA;AACA,iBAAA,oBAAA,MAAA,eAAA;;ACJA,cAAA,mBAAA,QAAA,OAAA;;ACAP,UAAA,WAAA;AACA;AACA;AACA;AACA;AACA;AACO,cAAA,qBAAA,oDAAA,WAAA,KAAA,OAAA;;ACNP;AACA;AACA;AACO,iBAAA,WAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,0BAAA,2DAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,aAAA,+BAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,6BAAA,2FAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,oBAAA,gEAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,wBAAA,2CAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,cAAA,mBAAA,OAAA;AACA,iBAAA,eAAA,oBAAA,OAAA;AACA,iBAAA,aAAA,mCAAA,OAAA;;AC7DA,UAAA,gBAAA;AACP;AACA;AACA;AACA;AACA;AACA,UAAA,UAAA;AACA,iBAAA,gBAAA;AACA;AACO,iBAAA,iBAAA,sCAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,gBAAA,kBAAA,UAAA;;AChBA,iBAAA,cAAA,gBAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAI,wBAAkC,CAAA,iBAAA;AACtC;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,4BAAA,mBAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,QAAQ,wBAAkC,CAAA,iBAAA;AAC1C;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,eAAA,oEAAA,OAAA;AACA,iBAAA,eAAA,mBAAA,OAAA;;AClDP;AACA;AACA;AACA;AACA;AACO,iBAAA,aAAA,eAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,YAAA,mFAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,kBAAA,4DAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,iBAAA,4DAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,oBAAA,mCAAA,OAAA,wDAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,eAAA,qDAAA,OAAA,iDAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,sBAAA,oFAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,wBAAA,4DAAA,OAAA;;ACpEP;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,OAAA,qCAAA,OAAA,CAAA,eAAA;AACP;AACA;AACA;AACA;AACA;AACO,iBAAA,WAAA,sCAAA,OAAA;;ACbA,cAAA,gBAAA,QAAA,OAAA;;ACAP,KAAA,qBAAA;AACA;AACA;AACA,KAAA,gBAAA;AACA;AACA;AACO,iBAAA,kBAAA,IAAA,OAAA;AACA,iBAAA,qBAAA,IAAA,OAAA;AACP;AACA;AACO,iBAAA,2BAAA,qBAAA,qBAAA,WAAA,KAAA,UAAA,GAAA,8BAAA,gBAAA;AACP,sBAAA,gBAAA;AACA;AACA;AACO,iBAAA,uBAAA,gCAAA,gBAAA,GAAA,OAAA;AACA,iBAAA,oBAAA,yCAAA,OAAA;AACA,iBAAA,mBAAA,IAAA,OAAA;;AChBA,cAAA,0BAAA,QAAA,OAAA;AACP,UAAA,iBAAA;AACA;AACA;AACA;AACA;AACA;AACA,gBAAA,IAAA;AACA;AACO,cAAA,oBAAA,qGAAA,OAAA;AACA,cAAA,4BAAA,+BAAA,OAAA,CAAA,iBAAA;AACA,cAAA,qBAAA,2BAAA,OAAA,CAAA,iBAAA;;ACTP;AACA;AACA;AACO,cAAA,kBAAA,QAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,cAAA,cAAA,gGAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,cAAA,cAAA,iIAAA,OAAA,CAAA,WAAA;AACP;AACA;AACA;AACA;AACA;AACO,cAAA,wBAAA,yBAAA,OAAA,CAAA,WAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACO,cAAA,iBAAA;AACP;AACA,MAAA,OAAA,CAAA,OAAA;;ACxCA;AACA;AACA;AACO,cAAA,YAAA,4BAAA,OAAA;;ACFP;AACA;AACA;AACA;AACA;AACO,cAAA,gBAAA,SAAA,aAAA,KAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,cAAA,0FAAA,OAAA;AACP;AACA;AACA;AACA;AACO,cAAA,mBAAA,4BAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACO,cAAA,kBAAA,uBAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,eAAA,MAAA;AACA;AACA;AACA;AACA;AACA,6BAAA,OAAA;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,mBAAA,mCAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,oBAAA,sCAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,sBAAA,sCAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,oBAAA,sCAAA,OAAA,CAAA,MAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,iBAAA,sCAAA,OAAA,CAAA,MAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,qBAAA,sCAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,iBAAA,yCAAA,OAAA,CAAA,MAAA;AACA,iBAAA,eAAA,IAAA,OAAA;;AC/FP;AACA;AACA;AACA;AACO,cAAA,gBAAA,4BAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACO,cAAA,UAAA,4CAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACO,cAAA,aAAA,4CAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACO,cAAA,UAAA,4CAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACO,cAAA,eAAA,uBAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,6BAAA,OAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,cAAA,eAAA,qDAAA,OAAA,CAAA,WAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACO,cAAA,cAAA,uBAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,6BAAA,OAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,cAAA,sBAAA,qDAAA,OAAA,CAAA,WAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACO,cAAA,2BAAA,uDAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACO,cAAA,eAAA,0CAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACO,cAAA,2BAAA,6DAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACO,cAAA,iBAAA,iDAAA,OAAA,CAAA,MAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACO,cAAA,6BAAA,6DAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACO,cAAA,aAAA;AACP;AACA,4BAAA,OAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,cAAA,qBAAA,uDAAA,OAAA;AACP;AACA;AACA;AACO,cAAA,SAAA,4BAAA,OAAA;;ACvIA,cAAA,uBAAA,QAAA,OAAA;AACA,iBAAA,iBAAA,SAAA,eAAA,GAAA,OAAA;AACP;AACA;AACA;AACO,iBAAA,iBAAA,SAAA,eAAA,GAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACO,cAAA,oBAAA,kBAAA,OAAA;AACP;AACA;AACA;AACA;AACA;AACO,cAAA,2BAAA,yBAAA,OAAA;;AClBP;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,WAAA;;ACNA,cAAA,QAAA;AACA,iBAAA,UAAA;AACA,iBAAA,QAAA;;ACFP;AACA;AACA;AACO,iBAAA,UAAA;AACA,iBAAA,kBAAA;AACA,iBAAA,UAAA;AACA,iBAAA,UAAA;AACA,iBAAA,gBAAA;AACA,iBAAA,cAAA;AACA,iBAAA,iBAAA;AACA,iBAAA,aAAA;;ACTA,UAAA,SAAA;AACP;AACA;AACA;AACA;AACO,iBAAA,aAAA;AACA,iBAAA,eAAA,WAAA,QAAA,GAAA,OAAA;AACA,iBAAA,UAAA,IAAA,WAAA;AACP;AACA;AACA;AACA;AACA;AACO,iBAAA,aAAA,qCAAA,OAAA;AACA,iBAAA,eAAA,WAAA,QAAA,sDAAA,OAAA;AACA,iBAAA,WAAA,WAAA,QAAA,yFAAA,OAAA,gBAAA,QAAA;AACA,iBAAA,aAAA;AACA,iBAAA,YAAA;AACA,iBAAA,qBAAA;AACA,iBAAA,oBAAA;AACA,iBAAA,WAAA;AACA,iBAAA,UAAA;AACA,iBAAA,SAAA;AACP;AACA,eAAA,OAAA;AACO,iBAAA,YAAA;AACP;AACA,iCAAA,OAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,OAAA;AACP;AACA,iCAAA,OAAA;AACO,UAAA,UAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,UAAA,WAAA;AACP;AACA;AACA;;ACrDO,iBAAA,cAAA;AACA,iBAAA,gBAAA;AACP;AACA;AACA;AACA;AACA;AACA;AACO,iBAAA,UAAA;AACA,iBAAA,WAAA;;;;"}
@@ -0,0 +1,2 @@
1
+ #!/usr/bin/env node
2
+ export {};
@@ -0,0 +1,2 @@
1
+ import type { SolcatcherConfig } from "./types";
2
+ export declare const defaultConfig: SolcatcherConfig;
@@ -0,0 +1,2 @@
1
+ import type { SolcatcherConfig } from "./types";
2
+ export declare function loadSolcatcherConfig(explicitPath?: string): SolcatcherConfig;
@@ -0,0 +1,4 @@
1
+ export interface SolcatcherConfig {
2
+ envPath?: string;
3
+ autoLoadEnv?: boolean;
4
+ }
@@ -0,0 +1,38 @@
1
+ export declare function loadEnvConfig(): {
2
+ rabbit: {
3
+ host: string;
4
+ port: string;
5
+ user: string;
6
+ pass: string;
7
+ name: string;
8
+ url: string;
9
+ };
10
+ postgres: {
11
+ host: string;
12
+ port: string;
13
+ user: string;
14
+ pass: string;
15
+ name: string;
16
+ url: string;
17
+ };
18
+ queues: {
19
+ queueName: string;
20
+ logEntry: string;
21
+ pairEntry: string;
22
+ txnEntry: string;
23
+ rpcCall: string;
24
+ txnCall: string;
25
+ signatureCall: string;
26
+ metaDataCall: string;
27
+ signaturesCall: string;
28
+ logIntake: string;
29
+ };
30
+ solana: {
31
+ maxDbClients: number;
32
+ idleTimeoutMs: number;
33
+ connectionTimeoutMs: number;
34
+ fallbackRpcUrl: string;
35
+ fallbackWsEndpoint: string;
36
+ broadcastPort: number;
37
+ };
38
+ };
@@ -0,0 +1,2 @@
1
+ import type { SolcatcherConfig } from "../config/types";
2
+ export declare function loadEnvFromConfig(config: SolcatcherConfig): void;
@@ -0,0 +1,8 @@
1
+ export declare function loadPostgresEnv(): {
2
+ host: string;
3
+ port: string;
4
+ user: string;
5
+ pass: string;
6
+ name: string;
7
+ url: string;
8
+ };
@@ -0,0 +1,12 @@
1
+ export declare function loadQueueEnv(): {
2
+ queueName: string;
3
+ logEntry: string;
4
+ pairEntry: string;
5
+ txnEntry: string;
6
+ rpcCall: string;
7
+ txnCall: string;
8
+ signatureCall: string;
9
+ metaDataCall: string;
10
+ signaturesCall: string;
11
+ logIntake: string;
12
+ };
@@ -0,0 +1,8 @@
1
+ export declare function loadRabbitEnv(): {
2
+ host: string;
3
+ port: string;
4
+ user: string;
5
+ pass: string;
6
+ name: string;
7
+ url: string;
8
+ };
@@ -0,0 +1,8 @@
1
+ export declare function loadSolanaEnv(): {
2
+ maxDbClients: number;
3
+ idleTimeoutMs: number;
4
+ connectionTimeoutMs: number;
5
+ fallbackRpcUrl: string;
6
+ fallbackWsEndpoint: string;
7
+ broadcastPort: number;
8
+ };
@@ -0,0 +1 @@
1
+ export * from './shared';
@@ -0,0 +1 @@
1
+ export declare function startSolcatcher(): Promise<void>;
@@ -0,0 +1 @@
1
+ export declare function bootstrapEnv(): void;
@@ -0,0 +1 @@
1
+ export declare function startSolcatcher(): Promise<void>;
@@ -0,0 +1,21 @@
1
+ import { pkg } from './init_imports';
2
+ export declare function getPool(): pkg.Pool;
3
+ export declare function endPool(): Promise<void>;
4
+ /**
5
+ * Runs a SQL query using the connection pool.
6
+ * Implements retry logic in case of 'too many clients' errors.
7
+ * @param query - SQL query string
8
+ * @param params - Query parameters (optional)
9
+ * @param successMsg - Log message on successful query
10
+ * @param failMsg - Log message on failed query
11
+ * @param retries - Number of retries for 'too many clients' errors
12
+ * @returns Query result
13
+ */
14
+ export declare function poolQuery(query: string, params?: any[]): Promise<pkg.QueryResult<any>>;
15
+ export declare function withTransaction<T>(fn: (client: any) => Promise<T>): Promise<T>;
16
+ /**
17
+ * Fetch all users from the 'users' table.
18
+ */
19
+ export declare function fetchAllRows(tableName: any): Promise<any>;
20
+ export declare function extractRow(obj: any): any;
21
+ export declare function extractId(obj: any): any;
@@ -0,0 +1,34 @@
1
+ export declare const RABBITMQ_HOST: string;
2
+ export declare const RABBITMQ_PORT: string;
3
+ export declare const RABBITMQ_USER: string;
4
+ export declare const RABBITMQ_NAME: string;
5
+ export declare const RABBITMQ_PASS: string;
6
+ export declare const RABBITMQ_URL: string;
7
+ export declare const RABBITMQ_UI_HOST: string;
8
+ export declare const RABBITMQ_UI_PORT: string;
9
+ export declare const RABBITMQ_UI_USER: string;
10
+ export declare const RABBITMQ_UI_NAME: string;
11
+ export declare const RABBITMQ_UI_PASS: string;
12
+ export declare const RABBITMQ_UI_URL: string;
13
+ export declare const POSTGRESQL_HOST: string;
14
+ export declare const POSTGRESQL_PORT: string;
15
+ export declare const POSTGRESQL_USER: string;
16
+ export declare const POSTGRESQL_NAME: string;
17
+ export declare const POSTGRESQL_PASS: string;
18
+ export declare const POSTGRESQL_DB_URL: string;
19
+ export declare const QUEUE_NAME: string;
20
+ export declare const QUEUE_LOG_INTAKE: string;
21
+ export declare const QUEUE_LOG_ENTRY: string;
22
+ export declare const QUEUE_PAIR_ENTRY: string;
23
+ export declare const QUEUE_TXN_ENTRY: string;
24
+ export declare const QUEUE_RPC_CALL: string;
25
+ export declare const QUEUE_TRANSACTION_CALL: string;
26
+ export declare const QUEUE_SIGNATURE_CALL: string;
27
+ export declare const QUEUE_META_DATA_CALL: string;
28
+ export declare const QUEUE_GET_SIGNATURES_CALL: string;
29
+ export declare const QUEUE_LOG_GETEM: string;
30
+ export declare const SOLANA_DB_MAX_CLIENTS: number;
31
+ export declare const SOLANA_IDLE_TIMEOUT_MS: number;
32
+ export declare const SOLANA_CONNECTION_TIMEOUT_MS: number;
33
+ export declare const SOLANA_FALLBACK_RPC_URL: string;
34
+ export declare const SOLANA_FALLBACK_WS_ENDPOINT: string;
@@ -0,0 +1,4 @@
1
+ export * from './connections';
2
+ export * from './init_imports';
3
+ export * from './module_imports';
4
+ export * from './tokenUtils';
@@ -0,0 +1,18 @@
1
+ import * as amqp from "amqplib";
2
+ import type { Connection, Channel, ConsumeMessage, ChannelModel } from "amqplib";
3
+ import type { DotenvConfigOutput } from "dotenv";
4
+ import * as fs from 'fs';
5
+ import * as path from 'path';
6
+ import * as url from 'url';
7
+ import { fileURLToPath } from "url";
8
+ import pkg from 'pg';
9
+ import type { QueryResult } from 'pg';
10
+ import dotenv from 'dotenv';
11
+ import axios from "axios";
12
+ import PQueue from 'p-queue';
13
+ import fetch, { Response } from 'node-fetch';
14
+ import _ from 'lodash';
15
+ import Joi from 'joi';
16
+ import WebSocket from "ws";
17
+ export { WebSocket, dotenv, amqp, fs, path, fileURLToPath, QueryResult, pkg, PQueue, fetch, Response, _, Joi, axios, url };
18
+ export { Connection, Channel, ConsumeMessage, ChannelModel, DotenvConfigOutput, };
@@ -0,0 +1 @@
1
+ export * from './../../shared';
@@ -0,0 +1 @@
1
+ export * from './pubKeyUtils';
@@ -0,0 +1,10 @@
1
+ import { PublicKey } from "./../module_imports";
2
+ declare class KeyManager {
3
+ private cache?;
4
+ constructor(enableCache?: boolean);
5
+ getPubkey(obj: string | PublicKey): PublicKey;
6
+ }
7
+ export declare const KeyManager_server: KeyManager;
8
+ export declare function getPubkey_server(obj: any): PublicKey;
9
+ export declare function getPubkeyString_server(obj: string | PublicKey): string;
10
+ export {};
@@ -0,0 +1,4 @@
1
+ export * from './rabbitMq';
2
+ export * from './schemas';
3
+ export * from './utils';
4
+ export * from './db';