pyswordfish 3.0.4.0__cp312-cp312-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (644) hide show
  1. pyswordfish-3.0.4.0.dist-info/METADATA +53 -0
  2. pyswordfish-3.0.4.0.dist-info/RECORD +644 -0
  3. pyswordfish-3.0.4.0.dist-info/WHEEL +5 -0
  4. pyswordfish-3.0.4.0.dist-info/top_level.txt +1 -0
  5. swordfish/__init__.py +102 -0
  6. swordfish/_config.py +901 -0
  7. swordfish/_connection.py +1418 -0
  8. swordfish/_core.py +17 -0
  9. swordfish/_engine.py +1456 -0
  10. swordfish/_enums.py +142 -0
  11. swordfish/_function_bindings.py +23594 -0
  12. swordfish/_function_tools.py +124 -0
  13. swordfish/_helper.py +57 -0
  14. swordfish/_runtime.py +158 -0
  15. swordfish/_sqlbuilder.py +549 -0
  16. swordfish/_streaming.py +333 -0
  17. swordfish/_swordfishcpp.cp312-win_amd64.pyd +0 -0
  18. swordfish/_swordfishcpp.pyi +4784 -0
  19. swordfish/_translator.py +580 -0
  20. swordfish/asset/dolphindb.dos +71 -0
  21. swordfish/asset/tzdb/Africa/Abidjan +0 -0
  22. swordfish/asset/tzdb/Africa/Accra +0 -0
  23. swordfish/asset/tzdb/Africa/Addis_Ababa +0 -0
  24. swordfish/asset/tzdb/Africa/Algiers +0 -0
  25. swordfish/asset/tzdb/Africa/Asmara +0 -0
  26. swordfish/asset/tzdb/Africa/Asmera +0 -0
  27. swordfish/asset/tzdb/Africa/Bamako +0 -0
  28. swordfish/asset/tzdb/Africa/Bangui +0 -0
  29. swordfish/asset/tzdb/Africa/Banjul +0 -0
  30. swordfish/asset/tzdb/Africa/Bissau +0 -0
  31. swordfish/asset/tzdb/Africa/Blantyre +0 -0
  32. swordfish/asset/tzdb/Africa/Brazzaville +0 -0
  33. swordfish/asset/tzdb/Africa/Bujumbura +0 -0
  34. swordfish/asset/tzdb/Africa/Cairo +0 -0
  35. swordfish/asset/tzdb/Africa/Casablanca +0 -0
  36. swordfish/asset/tzdb/Africa/Ceuta +0 -0
  37. swordfish/asset/tzdb/Africa/Conakry +0 -0
  38. swordfish/asset/tzdb/Africa/Dakar +0 -0
  39. swordfish/asset/tzdb/Africa/Dar_es_Salaam +0 -0
  40. swordfish/asset/tzdb/Africa/Djibouti +0 -0
  41. swordfish/asset/tzdb/Africa/Douala +0 -0
  42. swordfish/asset/tzdb/Africa/El_Aaiun +0 -0
  43. swordfish/asset/tzdb/Africa/Freetown +0 -0
  44. swordfish/asset/tzdb/Africa/Gaborone +0 -0
  45. swordfish/asset/tzdb/Africa/Harare +0 -0
  46. swordfish/asset/tzdb/Africa/Johannesburg +0 -0
  47. swordfish/asset/tzdb/Africa/Juba +0 -0
  48. swordfish/asset/tzdb/Africa/Kampala +0 -0
  49. swordfish/asset/tzdb/Africa/Khartoum +0 -0
  50. swordfish/asset/tzdb/Africa/Kigali +0 -0
  51. swordfish/asset/tzdb/Africa/Kinshasa +0 -0
  52. swordfish/asset/tzdb/Africa/Lagos +0 -0
  53. swordfish/asset/tzdb/Africa/Libreville +0 -0
  54. swordfish/asset/tzdb/Africa/Lome +0 -0
  55. swordfish/asset/tzdb/Africa/Luanda +0 -0
  56. swordfish/asset/tzdb/Africa/Lubumbashi +0 -0
  57. swordfish/asset/tzdb/Africa/Lusaka +0 -0
  58. swordfish/asset/tzdb/Africa/Malabo +0 -0
  59. swordfish/asset/tzdb/Africa/Maputo +0 -0
  60. swordfish/asset/tzdb/Africa/Maseru +0 -0
  61. swordfish/asset/tzdb/Africa/Mbabane +0 -0
  62. swordfish/asset/tzdb/Africa/Mogadishu +0 -0
  63. swordfish/asset/tzdb/Africa/Monrovia +0 -0
  64. swordfish/asset/tzdb/Africa/Nairobi +0 -0
  65. swordfish/asset/tzdb/Africa/Ndjamena +0 -0
  66. swordfish/asset/tzdb/Africa/Niamey +0 -0
  67. swordfish/asset/tzdb/Africa/Nouakchott +0 -0
  68. swordfish/asset/tzdb/Africa/Ouagadougou +0 -0
  69. swordfish/asset/tzdb/Africa/Porto-Novo +0 -0
  70. swordfish/asset/tzdb/Africa/Sao_Tome +0 -0
  71. swordfish/asset/tzdb/Africa/Timbuktu +0 -0
  72. swordfish/asset/tzdb/Africa/Tripoli +0 -0
  73. swordfish/asset/tzdb/Africa/Tunis +0 -0
  74. swordfish/asset/tzdb/Africa/Windhoek +0 -0
  75. swordfish/asset/tzdb/America/Adak +0 -0
  76. swordfish/asset/tzdb/America/Anchorage +0 -0
  77. swordfish/asset/tzdb/America/Anguilla +0 -0
  78. swordfish/asset/tzdb/America/Antigua +0 -0
  79. swordfish/asset/tzdb/America/Araguaina +0 -0
  80. swordfish/asset/tzdb/America/Argentina/Buenos_Aires +0 -0
  81. swordfish/asset/tzdb/America/Argentina/Catamarca +0 -0
  82. swordfish/asset/tzdb/America/Argentina/ComodRivadavia +0 -0
  83. swordfish/asset/tzdb/America/Argentina/Cordoba +0 -0
  84. swordfish/asset/tzdb/America/Argentina/Jujuy +0 -0
  85. swordfish/asset/tzdb/America/Argentina/La_Rioja +0 -0
  86. swordfish/asset/tzdb/America/Argentina/Mendoza +0 -0
  87. swordfish/asset/tzdb/America/Argentina/Rio_Gallegos +0 -0
  88. swordfish/asset/tzdb/America/Argentina/Salta +0 -0
  89. swordfish/asset/tzdb/America/Argentina/San_Juan +0 -0
  90. swordfish/asset/tzdb/America/Argentina/San_Luis +0 -0
  91. swordfish/asset/tzdb/America/Argentina/Tucuman +0 -0
  92. swordfish/asset/tzdb/America/Argentina/Ushuaia +0 -0
  93. swordfish/asset/tzdb/America/Aruba +0 -0
  94. swordfish/asset/tzdb/America/Asuncion +0 -0
  95. swordfish/asset/tzdb/America/Atikokan +0 -0
  96. swordfish/asset/tzdb/America/Atka +0 -0
  97. swordfish/asset/tzdb/America/Bahia +0 -0
  98. swordfish/asset/tzdb/America/Bahia_Banderas +0 -0
  99. swordfish/asset/tzdb/America/Barbados +0 -0
  100. swordfish/asset/tzdb/America/Belem +0 -0
  101. swordfish/asset/tzdb/America/Belize +0 -0
  102. swordfish/asset/tzdb/America/Blanc-Sablon +0 -0
  103. swordfish/asset/tzdb/America/Boa_Vista +0 -0
  104. swordfish/asset/tzdb/America/Bogota +0 -0
  105. swordfish/asset/tzdb/America/Boise +0 -0
  106. swordfish/asset/tzdb/America/Buenos_Aires +0 -0
  107. swordfish/asset/tzdb/America/Cambridge_Bay +0 -0
  108. swordfish/asset/tzdb/America/Campo_Grande +0 -0
  109. swordfish/asset/tzdb/America/Cancun +0 -0
  110. swordfish/asset/tzdb/America/Caracas +0 -0
  111. swordfish/asset/tzdb/America/Catamarca +0 -0
  112. swordfish/asset/tzdb/America/Cayenne +0 -0
  113. swordfish/asset/tzdb/America/Cayman +0 -0
  114. swordfish/asset/tzdb/America/Chicago +0 -0
  115. swordfish/asset/tzdb/America/Chihuahua +0 -0
  116. swordfish/asset/tzdb/America/Coral_Harbour +0 -0
  117. swordfish/asset/tzdb/America/Cordoba +0 -0
  118. swordfish/asset/tzdb/America/Costa_Rica +0 -0
  119. swordfish/asset/tzdb/America/Creston +0 -0
  120. swordfish/asset/tzdb/America/Cuiaba +0 -0
  121. swordfish/asset/tzdb/America/Curacao +0 -0
  122. swordfish/asset/tzdb/America/Danmarkshavn +0 -0
  123. swordfish/asset/tzdb/America/Dawson +0 -0
  124. swordfish/asset/tzdb/America/Dawson_Creek +0 -0
  125. swordfish/asset/tzdb/America/Denver +0 -0
  126. swordfish/asset/tzdb/America/Detroit +0 -0
  127. swordfish/asset/tzdb/America/Dominica +0 -0
  128. swordfish/asset/tzdb/America/Edmonton +0 -0
  129. swordfish/asset/tzdb/America/Eirunepe +0 -0
  130. swordfish/asset/tzdb/America/El_Salvador +0 -0
  131. swordfish/asset/tzdb/America/Ensenada +0 -0
  132. swordfish/asset/tzdb/America/Fort_Nelson +0 -0
  133. swordfish/asset/tzdb/America/Fort_Wayne +0 -0
  134. swordfish/asset/tzdb/America/Fortaleza +0 -0
  135. swordfish/asset/tzdb/America/Glace_Bay +0 -0
  136. swordfish/asset/tzdb/America/Godthab +0 -0
  137. swordfish/asset/tzdb/America/Goose_Bay +0 -0
  138. swordfish/asset/tzdb/America/Grand_Turk +0 -0
  139. swordfish/asset/tzdb/America/Grenada +0 -0
  140. swordfish/asset/tzdb/America/Guadeloupe +0 -0
  141. swordfish/asset/tzdb/America/Guatemala +0 -0
  142. swordfish/asset/tzdb/America/Guayaquil +0 -0
  143. swordfish/asset/tzdb/America/Guyana +0 -0
  144. swordfish/asset/tzdb/America/Halifax +0 -0
  145. swordfish/asset/tzdb/America/Havana +0 -0
  146. swordfish/asset/tzdb/America/Hermosillo +0 -0
  147. swordfish/asset/tzdb/America/Indiana/Indianapolis +0 -0
  148. swordfish/asset/tzdb/America/Indiana/Knox +0 -0
  149. swordfish/asset/tzdb/America/Indiana/Marengo +0 -0
  150. swordfish/asset/tzdb/America/Indiana/Petersburg +0 -0
  151. swordfish/asset/tzdb/America/Indiana/Tell_City +0 -0
  152. swordfish/asset/tzdb/America/Indiana/Vevay +0 -0
  153. swordfish/asset/tzdb/America/Indiana/Vincennes +0 -0
  154. swordfish/asset/tzdb/America/Indiana/Winamac +0 -0
  155. swordfish/asset/tzdb/America/Indianapolis +0 -0
  156. swordfish/asset/tzdb/America/Inuvik +0 -0
  157. swordfish/asset/tzdb/America/Iqaluit +0 -0
  158. swordfish/asset/tzdb/America/Jamaica +0 -0
  159. swordfish/asset/tzdb/America/Jujuy +0 -0
  160. swordfish/asset/tzdb/America/Juneau +0 -0
  161. swordfish/asset/tzdb/America/Kentucky/Louisville +0 -0
  162. swordfish/asset/tzdb/America/Kentucky/Monticello +0 -0
  163. swordfish/asset/tzdb/America/Knox_IN +0 -0
  164. swordfish/asset/tzdb/America/Kralendijk +0 -0
  165. swordfish/asset/tzdb/America/La_Paz +0 -0
  166. swordfish/asset/tzdb/America/Lima +0 -0
  167. swordfish/asset/tzdb/America/Los_Angeles +0 -0
  168. swordfish/asset/tzdb/America/Louisville +0 -0
  169. swordfish/asset/tzdb/America/Lower_Princes +0 -0
  170. swordfish/asset/tzdb/America/Maceio +0 -0
  171. swordfish/asset/tzdb/America/Managua +0 -0
  172. swordfish/asset/tzdb/America/Manaus +0 -0
  173. swordfish/asset/tzdb/America/Marigot +0 -0
  174. swordfish/asset/tzdb/America/Martinique +0 -0
  175. swordfish/asset/tzdb/America/Matamoros +0 -0
  176. swordfish/asset/tzdb/America/Mazatlan +0 -0
  177. swordfish/asset/tzdb/America/Mendoza +0 -0
  178. swordfish/asset/tzdb/America/Menominee +0 -0
  179. swordfish/asset/tzdb/America/Merida +0 -0
  180. swordfish/asset/tzdb/America/Metlakatla +0 -0
  181. swordfish/asset/tzdb/America/Mexico_City +0 -0
  182. swordfish/asset/tzdb/America/Miquelon +0 -0
  183. swordfish/asset/tzdb/America/Moncton +0 -0
  184. swordfish/asset/tzdb/America/Monterrey +0 -0
  185. swordfish/asset/tzdb/America/Montevideo +0 -0
  186. swordfish/asset/tzdb/America/Montreal +0 -0
  187. swordfish/asset/tzdb/America/Montserrat +0 -0
  188. swordfish/asset/tzdb/America/Nassau +0 -0
  189. swordfish/asset/tzdb/America/New_York +0 -0
  190. swordfish/asset/tzdb/America/Nipigon +0 -0
  191. swordfish/asset/tzdb/America/Nome +0 -0
  192. swordfish/asset/tzdb/America/Noronha +0 -0
  193. swordfish/asset/tzdb/America/North_Dakota/Beulah +0 -0
  194. swordfish/asset/tzdb/America/North_Dakota/Center +0 -0
  195. swordfish/asset/tzdb/America/North_Dakota/New_Salem +0 -0
  196. swordfish/asset/tzdb/America/Ojinaga +0 -0
  197. swordfish/asset/tzdb/America/Panama +0 -0
  198. swordfish/asset/tzdb/America/Pangnirtung +0 -0
  199. swordfish/asset/tzdb/America/Paramaribo +0 -0
  200. swordfish/asset/tzdb/America/Phoenix +0 -0
  201. swordfish/asset/tzdb/America/Port-au-Prince +0 -0
  202. swordfish/asset/tzdb/America/Port_of_Spain +0 -0
  203. swordfish/asset/tzdb/America/Porto_Acre +0 -0
  204. swordfish/asset/tzdb/America/Porto_Velho +0 -0
  205. swordfish/asset/tzdb/America/Puerto_Rico +0 -0
  206. swordfish/asset/tzdb/America/Punta_Arenas +0 -0
  207. swordfish/asset/tzdb/America/Rainy_River +0 -0
  208. swordfish/asset/tzdb/America/Rankin_Inlet +0 -0
  209. swordfish/asset/tzdb/America/Recife +0 -0
  210. swordfish/asset/tzdb/America/Regina +0 -0
  211. swordfish/asset/tzdb/America/Resolute +0 -0
  212. swordfish/asset/tzdb/America/Rio_Branco +0 -0
  213. swordfish/asset/tzdb/America/Rosario +0 -0
  214. swordfish/asset/tzdb/America/Santa_Isabel +0 -0
  215. swordfish/asset/tzdb/America/Santarem +0 -0
  216. swordfish/asset/tzdb/America/Santiago +0 -0
  217. swordfish/asset/tzdb/America/Santo_Domingo +0 -0
  218. swordfish/asset/tzdb/America/Sao_Paulo +0 -0
  219. swordfish/asset/tzdb/America/Scoresbysund +0 -0
  220. swordfish/asset/tzdb/America/Shiprock +0 -0
  221. swordfish/asset/tzdb/America/Sitka +0 -0
  222. swordfish/asset/tzdb/America/St_Barthelemy +0 -0
  223. swordfish/asset/tzdb/America/St_Johns +0 -0
  224. swordfish/asset/tzdb/America/St_Kitts +0 -0
  225. swordfish/asset/tzdb/America/St_Lucia +0 -0
  226. swordfish/asset/tzdb/America/St_Thomas +0 -0
  227. swordfish/asset/tzdb/America/St_Vincent +0 -0
  228. swordfish/asset/tzdb/America/Swift_Current +0 -0
  229. swordfish/asset/tzdb/America/Tegucigalpa +0 -0
  230. swordfish/asset/tzdb/America/Thule +0 -0
  231. swordfish/asset/tzdb/America/Thunder_Bay +0 -0
  232. swordfish/asset/tzdb/America/Tijuana +0 -0
  233. swordfish/asset/tzdb/America/Toronto +0 -0
  234. swordfish/asset/tzdb/America/Tortola +0 -0
  235. swordfish/asset/tzdb/America/Vancouver +0 -0
  236. swordfish/asset/tzdb/America/Virgin +0 -0
  237. swordfish/asset/tzdb/America/Whitehorse +0 -0
  238. swordfish/asset/tzdb/America/Winnipeg +0 -0
  239. swordfish/asset/tzdb/America/Yakutat +0 -0
  240. swordfish/asset/tzdb/America/Yellowknife +0 -0
  241. swordfish/asset/tzdb/Antarctica/Casey +0 -0
  242. swordfish/asset/tzdb/Antarctica/Davis +0 -0
  243. swordfish/asset/tzdb/Antarctica/DumontDUrville +0 -0
  244. swordfish/asset/tzdb/Antarctica/Macquarie +0 -0
  245. swordfish/asset/tzdb/Antarctica/Mawson +0 -0
  246. swordfish/asset/tzdb/Antarctica/McMurdo +0 -0
  247. swordfish/asset/tzdb/Antarctica/Palmer +0 -0
  248. swordfish/asset/tzdb/Antarctica/Rothera +0 -0
  249. swordfish/asset/tzdb/Antarctica/South_Pole +0 -0
  250. swordfish/asset/tzdb/Antarctica/Syowa +0 -0
  251. swordfish/asset/tzdb/Antarctica/Troll +0 -0
  252. swordfish/asset/tzdb/Antarctica/Vostok +0 -0
  253. swordfish/asset/tzdb/Arctic/Longyearbyen +0 -0
  254. swordfish/asset/tzdb/Asia/Aden +0 -0
  255. swordfish/asset/tzdb/Asia/Almaty +0 -0
  256. swordfish/asset/tzdb/Asia/Amman +0 -0
  257. swordfish/asset/tzdb/Asia/Anadyr +0 -0
  258. swordfish/asset/tzdb/Asia/Aqtau +0 -0
  259. swordfish/asset/tzdb/Asia/Aqtobe +0 -0
  260. swordfish/asset/tzdb/Asia/Ashgabat +0 -0
  261. swordfish/asset/tzdb/Asia/Ashkhabad +0 -0
  262. swordfish/asset/tzdb/Asia/Atyrau +0 -0
  263. swordfish/asset/tzdb/Asia/Baghdad +0 -0
  264. swordfish/asset/tzdb/Asia/Bahrain +0 -0
  265. swordfish/asset/tzdb/Asia/Baku +0 -0
  266. swordfish/asset/tzdb/Asia/Bangkok +0 -0
  267. swordfish/asset/tzdb/Asia/Barnaul +0 -0
  268. swordfish/asset/tzdb/Asia/Beirut +0 -0
  269. swordfish/asset/tzdb/Asia/Bishkek +0 -0
  270. swordfish/asset/tzdb/Asia/Brunei +0 -0
  271. swordfish/asset/tzdb/Asia/Calcutta +0 -0
  272. swordfish/asset/tzdb/Asia/Chita +0 -0
  273. swordfish/asset/tzdb/Asia/Choibalsan +0 -0
  274. swordfish/asset/tzdb/Asia/Chongqing +0 -0
  275. swordfish/asset/tzdb/Asia/Chungking +0 -0
  276. swordfish/asset/tzdb/Asia/Colombo +0 -0
  277. swordfish/asset/tzdb/Asia/Dacca +0 -0
  278. swordfish/asset/tzdb/Asia/Damascus +0 -0
  279. swordfish/asset/tzdb/Asia/Dhaka +0 -0
  280. swordfish/asset/tzdb/Asia/Dili +0 -0
  281. swordfish/asset/tzdb/Asia/Dubai +0 -0
  282. swordfish/asset/tzdb/Asia/Dushanbe +0 -0
  283. swordfish/asset/tzdb/Asia/Famagusta +0 -0
  284. swordfish/asset/tzdb/Asia/Gaza +0 -0
  285. swordfish/asset/tzdb/Asia/Harbin +0 -0
  286. swordfish/asset/tzdb/Asia/Hebron +0 -0
  287. swordfish/asset/tzdb/Asia/Ho_Chi_Minh +0 -0
  288. swordfish/asset/tzdb/Asia/Hong_Kong +0 -0
  289. swordfish/asset/tzdb/Asia/Hovd +0 -0
  290. swordfish/asset/tzdb/Asia/Irkutsk +0 -0
  291. swordfish/asset/tzdb/Asia/Istanbul +0 -0
  292. swordfish/asset/tzdb/Asia/Jakarta +0 -0
  293. swordfish/asset/tzdb/Asia/Jayapura +0 -0
  294. swordfish/asset/tzdb/Asia/Jerusalem +0 -0
  295. swordfish/asset/tzdb/Asia/Kabul +0 -0
  296. swordfish/asset/tzdb/Asia/Kamchatka +0 -0
  297. swordfish/asset/tzdb/Asia/Karachi +0 -0
  298. swordfish/asset/tzdb/Asia/Kashgar +0 -0
  299. swordfish/asset/tzdb/Asia/Kathmandu +0 -0
  300. swordfish/asset/tzdb/Asia/Katmandu +0 -0
  301. swordfish/asset/tzdb/Asia/Khandyga +0 -0
  302. swordfish/asset/tzdb/Asia/Kolkata +0 -0
  303. swordfish/asset/tzdb/Asia/Krasnoyarsk +0 -0
  304. swordfish/asset/tzdb/Asia/Kuala_Lumpur +0 -0
  305. swordfish/asset/tzdb/Asia/Kuching +0 -0
  306. swordfish/asset/tzdb/Asia/Kuwait +0 -0
  307. swordfish/asset/tzdb/Asia/Macao +0 -0
  308. swordfish/asset/tzdb/Asia/Macau +0 -0
  309. swordfish/asset/tzdb/Asia/Magadan +0 -0
  310. swordfish/asset/tzdb/Asia/Makassar +0 -0
  311. swordfish/asset/tzdb/Asia/Manila +0 -0
  312. swordfish/asset/tzdb/Asia/Muscat +0 -0
  313. swordfish/asset/tzdb/Asia/Nicosia +0 -0
  314. swordfish/asset/tzdb/Asia/Novokuznetsk +0 -0
  315. swordfish/asset/tzdb/Asia/Novosibirsk +0 -0
  316. swordfish/asset/tzdb/Asia/Omsk +0 -0
  317. swordfish/asset/tzdb/Asia/Oral +0 -0
  318. swordfish/asset/tzdb/Asia/Phnom_Penh +0 -0
  319. swordfish/asset/tzdb/Asia/Pontianak +0 -0
  320. swordfish/asset/tzdb/Asia/Pyongyang +0 -0
  321. swordfish/asset/tzdb/Asia/Qatar +0 -0
  322. swordfish/asset/tzdb/Asia/Qyzylorda +0 -0
  323. swordfish/asset/tzdb/Asia/Rangoon +0 -0
  324. swordfish/asset/tzdb/Asia/Riyadh +0 -0
  325. swordfish/asset/tzdb/Asia/Saigon +0 -0
  326. swordfish/asset/tzdb/Asia/Sakhalin +0 -0
  327. swordfish/asset/tzdb/Asia/Samarkand +0 -0
  328. swordfish/asset/tzdb/Asia/Seoul +0 -0
  329. swordfish/asset/tzdb/Asia/Shanghai +0 -0
  330. swordfish/asset/tzdb/Asia/Singapore +0 -0
  331. swordfish/asset/tzdb/Asia/Srednekolymsk +0 -0
  332. swordfish/asset/tzdb/Asia/Taipei +0 -0
  333. swordfish/asset/tzdb/Asia/Tashkent +0 -0
  334. swordfish/asset/tzdb/Asia/Tbilisi +0 -0
  335. swordfish/asset/tzdb/Asia/Tehran +0 -0
  336. swordfish/asset/tzdb/Asia/Tel_Aviv +0 -0
  337. swordfish/asset/tzdb/Asia/Thimbu +0 -0
  338. swordfish/asset/tzdb/Asia/Thimphu +0 -0
  339. swordfish/asset/tzdb/Asia/Tokyo +0 -0
  340. swordfish/asset/tzdb/Asia/Tomsk +0 -0
  341. swordfish/asset/tzdb/Asia/Ujung_Pandang +0 -0
  342. swordfish/asset/tzdb/Asia/Ulaanbaatar +0 -0
  343. swordfish/asset/tzdb/Asia/Ulan_Bator +0 -0
  344. swordfish/asset/tzdb/Asia/Urumqi +0 -0
  345. swordfish/asset/tzdb/Asia/Ust-Nera +0 -0
  346. swordfish/asset/tzdb/Asia/Vientiane +0 -0
  347. swordfish/asset/tzdb/Asia/Vladivostok +0 -0
  348. swordfish/asset/tzdb/Asia/Yakutsk +0 -0
  349. swordfish/asset/tzdb/Asia/Yangon +0 -0
  350. swordfish/asset/tzdb/Asia/Yekaterinburg +0 -0
  351. swordfish/asset/tzdb/Asia/Yerevan +0 -0
  352. swordfish/asset/tzdb/Atlantic/Azores +0 -0
  353. swordfish/asset/tzdb/Atlantic/Bermuda +0 -0
  354. swordfish/asset/tzdb/Atlantic/Canary +0 -0
  355. swordfish/asset/tzdb/Atlantic/Cape_Verde +0 -0
  356. swordfish/asset/tzdb/Atlantic/Faeroe +0 -0
  357. swordfish/asset/tzdb/Atlantic/Faroe +0 -0
  358. swordfish/asset/tzdb/Atlantic/Jan_Mayen +0 -0
  359. swordfish/asset/tzdb/Atlantic/Madeira +0 -0
  360. swordfish/asset/tzdb/Atlantic/Reykjavik +0 -0
  361. swordfish/asset/tzdb/Atlantic/South_Georgia +0 -0
  362. swordfish/asset/tzdb/Atlantic/St_Helena +0 -0
  363. swordfish/asset/tzdb/Atlantic/Stanley +0 -0
  364. swordfish/asset/tzdb/Australia/ACT +0 -0
  365. swordfish/asset/tzdb/Australia/Adelaide +0 -0
  366. swordfish/asset/tzdb/Australia/Brisbane +0 -0
  367. swordfish/asset/tzdb/Australia/Broken_Hill +0 -0
  368. swordfish/asset/tzdb/Australia/Canberra +0 -0
  369. swordfish/asset/tzdb/Australia/Currie +0 -0
  370. swordfish/asset/tzdb/Australia/Darwin +0 -0
  371. swordfish/asset/tzdb/Australia/Eucla +0 -0
  372. swordfish/asset/tzdb/Australia/Hobart +0 -0
  373. swordfish/asset/tzdb/Australia/LHI +0 -0
  374. swordfish/asset/tzdb/Australia/Lindeman +0 -0
  375. swordfish/asset/tzdb/Australia/Lord_Howe +0 -0
  376. swordfish/asset/tzdb/Australia/Melbourne +0 -0
  377. swordfish/asset/tzdb/Australia/NSW +0 -0
  378. swordfish/asset/tzdb/Australia/North +0 -0
  379. swordfish/asset/tzdb/Australia/Perth +0 -0
  380. swordfish/asset/tzdb/Australia/Queensland +0 -0
  381. swordfish/asset/tzdb/Australia/South +0 -0
  382. swordfish/asset/tzdb/Australia/Sydney +0 -0
  383. swordfish/asset/tzdb/Australia/Tasmania +0 -0
  384. swordfish/asset/tzdb/Australia/Victoria +0 -0
  385. swordfish/asset/tzdb/Australia/West +0 -0
  386. swordfish/asset/tzdb/Australia/Yancowinna +0 -0
  387. swordfish/asset/tzdb/Brazil/Acre +0 -0
  388. swordfish/asset/tzdb/Brazil/DeNoronha +0 -0
  389. swordfish/asset/tzdb/Brazil/East +0 -0
  390. swordfish/asset/tzdb/Brazil/West +0 -0
  391. swordfish/asset/tzdb/CET +0 -0
  392. swordfish/asset/tzdb/CST6CDT +0 -0
  393. swordfish/asset/tzdb/Canada/Atlantic +0 -0
  394. swordfish/asset/tzdb/Canada/Central +0 -0
  395. swordfish/asset/tzdb/Canada/Eastern +0 -0
  396. swordfish/asset/tzdb/Canada/Mountain +0 -0
  397. swordfish/asset/tzdb/Canada/Newfoundland +0 -0
  398. swordfish/asset/tzdb/Canada/Pacific +0 -0
  399. swordfish/asset/tzdb/Canada/Saskatchewan +0 -0
  400. swordfish/asset/tzdb/Canada/Yukon +0 -0
  401. swordfish/asset/tzdb/Chile/Continental +0 -0
  402. swordfish/asset/tzdb/Chile/EasterIsland +0 -0
  403. swordfish/asset/tzdb/Cuba +0 -0
  404. swordfish/asset/tzdb/EET +0 -0
  405. swordfish/asset/tzdb/EST +0 -0
  406. swordfish/asset/tzdb/EST5EDT +0 -0
  407. swordfish/asset/tzdb/Egypt +0 -0
  408. swordfish/asset/tzdb/Eire +0 -0
  409. swordfish/asset/tzdb/Etc/GMT +0 -0
  410. swordfish/asset/tzdb/Etc/GMT+0 +0 -0
  411. swordfish/asset/tzdb/Etc/GMT+1 +0 -0
  412. swordfish/asset/tzdb/Etc/GMT+10 +0 -0
  413. swordfish/asset/tzdb/Etc/GMT+11 +0 -0
  414. swordfish/asset/tzdb/Etc/GMT+12 +0 -0
  415. swordfish/asset/tzdb/Etc/GMT+2 +0 -0
  416. swordfish/asset/tzdb/Etc/GMT+3 +0 -0
  417. swordfish/asset/tzdb/Etc/GMT+4 +0 -0
  418. swordfish/asset/tzdb/Etc/GMT+5 +0 -0
  419. swordfish/asset/tzdb/Etc/GMT+6 +0 -0
  420. swordfish/asset/tzdb/Etc/GMT+7 +0 -0
  421. swordfish/asset/tzdb/Etc/GMT+8 +0 -0
  422. swordfish/asset/tzdb/Etc/GMT+9 +0 -0
  423. swordfish/asset/tzdb/Etc/GMT-0 +0 -0
  424. swordfish/asset/tzdb/Etc/GMT-1 +0 -0
  425. swordfish/asset/tzdb/Etc/GMT-10 +0 -0
  426. swordfish/asset/tzdb/Etc/GMT-11 +0 -0
  427. swordfish/asset/tzdb/Etc/GMT-12 +0 -0
  428. swordfish/asset/tzdb/Etc/GMT-13 +0 -0
  429. swordfish/asset/tzdb/Etc/GMT-14 +0 -0
  430. swordfish/asset/tzdb/Etc/GMT-2 +0 -0
  431. swordfish/asset/tzdb/Etc/GMT-3 +0 -0
  432. swordfish/asset/tzdb/Etc/GMT-4 +0 -0
  433. swordfish/asset/tzdb/Etc/GMT-5 +0 -0
  434. swordfish/asset/tzdb/Etc/GMT-6 +0 -0
  435. swordfish/asset/tzdb/Etc/GMT-7 +0 -0
  436. swordfish/asset/tzdb/Etc/GMT-8 +0 -0
  437. swordfish/asset/tzdb/Etc/GMT-9 +0 -0
  438. swordfish/asset/tzdb/Etc/GMT0 +0 -0
  439. swordfish/asset/tzdb/Etc/Greenwich +0 -0
  440. swordfish/asset/tzdb/Etc/UCT +0 -0
  441. swordfish/asset/tzdb/Etc/UTC +0 -0
  442. swordfish/asset/tzdb/Etc/Universal +0 -0
  443. swordfish/asset/tzdb/Etc/Zulu +0 -0
  444. swordfish/asset/tzdb/Europe/Amsterdam +0 -0
  445. swordfish/asset/tzdb/Europe/Andorra +0 -0
  446. swordfish/asset/tzdb/Europe/Astrakhan +0 -0
  447. swordfish/asset/tzdb/Europe/Athens +0 -0
  448. swordfish/asset/tzdb/Europe/Belfast +0 -0
  449. swordfish/asset/tzdb/Europe/Belgrade +0 -0
  450. swordfish/asset/tzdb/Europe/Berlin +0 -0
  451. swordfish/asset/tzdb/Europe/Bratislava +0 -0
  452. swordfish/asset/tzdb/Europe/Brussels +0 -0
  453. swordfish/asset/tzdb/Europe/Bucharest +0 -0
  454. swordfish/asset/tzdb/Europe/Budapest +0 -0
  455. swordfish/asset/tzdb/Europe/Busingen +0 -0
  456. swordfish/asset/tzdb/Europe/Chisinau +0 -0
  457. swordfish/asset/tzdb/Europe/Copenhagen +0 -0
  458. swordfish/asset/tzdb/Europe/Dublin +0 -0
  459. swordfish/asset/tzdb/Europe/Gibraltar +0 -0
  460. swordfish/asset/tzdb/Europe/Guernsey +0 -0
  461. swordfish/asset/tzdb/Europe/Helsinki +0 -0
  462. swordfish/asset/tzdb/Europe/Isle_of_Man +0 -0
  463. swordfish/asset/tzdb/Europe/Istanbul +0 -0
  464. swordfish/asset/tzdb/Europe/Jersey +0 -0
  465. swordfish/asset/tzdb/Europe/Kaliningrad +0 -0
  466. swordfish/asset/tzdb/Europe/Kiev +0 -0
  467. swordfish/asset/tzdb/Europe/Kirov +0 -0
  468. swordfish/asset/tzdb/Europe/Lisbon +0 -0
  469. swordfish/asset/tzdb/Europe/Ljubljana +0 -0
  470. swordfish/asset/tzdb/Europe/London +0 -0
  471. swordfish/asset/tzdb/Europe/Luxembourg +0 -0
  472. swordfish/asset/tzdb/Europe/Madrid +0 -0
  473. swordfish/asset/tzdb/Europe/Malta +0 -0
  474. swordfish/asset/tzdb/Europe/Mariehamn +0 -0
  475. swordfish/asset/tzdb/Europe/Minsk +0 -0
  476. swordfish/asset/tzdb/Europe/Monaco +0 -0
  477. swordfish/asset/tzdb/Europe/Moscow +0 -0
  478. swordfish/asset/tzdb/Europe/Nicosia +0 -0
  479. swordfish/asset/tzdb/Europe/Oslo +0 -0
  480. swordfish/asset/tzdb/Europe/Paris +0 -0
  481. swordfish/asset/tzdb/Europe/Podgorica +0 -0
  482. swordfish/asset/tzdb/Europe/Prague +0 -0
  483. swordfish/asset/tzdb/Europe/Riga +0 -0
  484. swordfish/asset/tzdb/Europe/Rome +0 -0
  485. swordfish/asset/tzdb/Europe/Samara +0 -0
  486. swordfish/asset/tzdb/Europe/San_Marino +0 -0
  487. swordfish/asset/tzdb/Europe/Sarajevo +0 -0
  488. swordfish/asset/tzdb/Europe/Saratov +0 -0
  489. swordfish/asset/tzdb/Europe/Simferopol +0 -0
  490. swordfish/asset/tzdb/Europe/Skopje +0 -0
  491. swordfish/asset/tzdb/Europe/Sofia +0 -0
  492. swordfish/asset/tzdb/Europe/Stockholm +0 -0
  493. swordfish/asset/tzdb/Europe/Tallinn +0 -0
  494. swordfish/asset/tzdb/Europe/Tirane +0 -0
  495. swordfish/asset/tzdb/Europe/Tiraspol +0 -0
  496. swordfish/asset/tzdb/Europe/Ulyanovsk +0 -0
  497. swordfish/asset/tzdb/Europe/Uzhgorod +0 -0
  498. swordfish/asset/tzdb/Europe/Vaduz +0 -0
  499. swordfish/asset/tzdb/Europe/Vatican +0 -0
  500. swordfish/asset/tzdb/Europe/Vienna +0 -0
  501. swordfish/asset/tzdb/Europe/Vilnius +0 -0
  502. swordfish/asset/tzdb/Europe/Volgograd +0 -0
  503. swordfish/asset/tzdb/Europe/Warsaw +0 -0
  504. swordfish/asset/tzdb/Europe/Zagreb +0 -0
  505. swordfish/asset/tzdb/Europe/Zaporozhye +0 -0
  506. swordfish/asset/tzdb/Europe/Zurich +0 -0
  507. swordfish/asset/tzdb/Factory +0 -0
  508. swordfish/asset/tzdb/GB +0 -0
  509. swordfish/asset/tzdb/GB-Eire +0 -0
  510. swordfish/asset/tzdb/GMT +0 -0
  511. swordfish/asset/tzdb/GMT+0 +0 -0
  512. swordfish/asset/tzdb/GMT-0 +0 -0
  513. swordfish/asset/tzdb/GMT0 +0 -0
  514. swordfish/asset/tzdb/Greenwich +0 -0
  515. swordfish/asset/tzdb/HST +0 -0
  516. swordfish/asset/tzdb/Hongkong +0 -0
  517. swordfish/asset/tzdb/Iceland +0 -0
  518. swordfish/asset/tzdb/Indian/Antananarivo +0 -0
  519. swordfish/asset/tzdb/Indian/Chagos +0 -0
  520. swordfish/asset/tzdb/Indian/Christmas +0 -0
  521. swordfish/asset/tzdb/Indian/Cocos +0 -0
  522. swordfish/asset/tzdb/Indian/Comoro +0 -0
  523. swordfish/asset/tzdb/Indian/Kerguelen +0 -0
  524. swordfish/asset/tzdb/Indian/Mahe +0 -0
  525. swordfish/asset/tzdb/Indian/Maldives +0 -0
  526. swordfish/asset/tzdb/Indian/Mauritius +0 -0
  527. swordfish/asset/tzdb/Indian/Mayotte +0 -0
  528. swordfish/asset/tzdb/Indian/Reunion +0 -0
  529. swordfish/asset/tzdb/Iran +0 -0
  530. swordfish/asset/tzdb/Israel +0 -0
  531. swordfish/asset/tzdb/Jamaica +0 -0
  532. swordfish/asset/tzdb/Japan +0 -0
  533. swordfish/asset/tzdb/Kwajalein +0 -0
  534. swordfish/asset/tzdb/Libya +0 -0
  535. swordfish/asset/tzdb/MET +0 -0
  536. swordfish/asset/tzdb/MST +0 -0
  537. swordfish/asset/tzdb/MST7MDT +0 -0
  538. swordfish/asset/tzdb/Mexico/BajaNorte +0 -0
  539. swordfish/asset/tzdb/Mexico/BajaSur +0 -0
  540. swordfish/asset/tzdb/Mexico/General +0 -0
  541. swordfish/asset/tzdb/NZ +0 -0
  542. swordfish/asset/tzdb/NZ-CHAT +0 -0
  543. swordfish/asset/tzdb/Navajo +0 -0
  544. swordfish/asset/tzdb/PRC +0 -0
  545. swordfish/asset/tzdb/PST8PDT +0 -0
  546. swordfish/asset/tzdb/Pacific/Apia +0 -0
  547. swordfish/asset/tzdb/Pacific/Auckland +0 -0
  548. swordfish/asset/tzdb/Pacific/Bougainville +0 -0
  549. swordfish/asset/tzdb/Pacific/Chatham +0 -0
  550. swordfish/asset/tzdb/Pacific/Chuuk +0 -0
  551. swordfish/asset/tzdb/Pacific/Easter +0 -0
  552. swordfish/asset/tzdb/Pacific/Efate +0 -0
  553. swordfish/asset/tzdb/Pacific/Enderbury +0 -0
  554. swordfish/asset/tzdb/Pacific/Fakaofo +0 -0
  555. swordfish/asset/tzdb/Pacific/Fiji +0 -0
  556. swordfish/asset/tzdb/Pacific/Funafuti +0 -0
  557. swordfish/asset/tzdb/Pacific/Galapagos +0 -0
  558. swordfish/asset/tzdb/Pacific/Gambier +0 -0
  559. swordfish/asset/tzdb/Pacific/Guadalcanal +0 -0
  560. swordfish/asset/tzdb/Pacific/Guam +0 -0
  561. swordfish/asset/tzdb/Pacific/Honolulu +0 -0
  562. swordfish/asset/tzdb/Pacific/Johnston +0 -0
  563. swordfish/asset/tzdb/Pacific/Kiritimati +0 -0
  564. swordfish/asset/tzdb/Pacific/Kosrae +0 -0
  565. swordfish/asset/tzdb/Pacific/Kwajalein +0 -0
  566. swordfish/asset/tzdb/Pacific/Majuro +0 -0
  567. swordfish/asset/tzdb/Pacific/Marquesas +0 -0
  568. swordfish/asset/tzdb/Pacific/Midway +0 -0
  569. swordfish/asset/tzdb/Pacific/Nauru +0 -0
  570. swordfish/asset/tzdb/Pacific/Niue +0 -0
  571. swordfish/asset/tzdb/Pacific/Norfolk +0 -0
  572. swordfish/asset/tzdb/Pacific/Noumea +0 -0
  573. swordfish/asset/tzdb/Pacific/Pago_Pago +0 -0
  574. swordfish/asset/tzdb/Pacific/Palau +0 -0
  575. swordfish/asset/tzdb/Pacific/Pitcairn +0 -0
  576. swordfish/asset/tzdb/Pacific/Pohnpei +0 -0
  577. swordfish/asset/tzdb/Pacific/Ponape +0 -0
  578. swordfish/asset/tzdb/Pacific/Port_Moresby +0 -0
  579. swordfish/asset/tzdb/Pacific/Rarotonga +0 -0
  580. swordfish/asset/tzdb/Pacific/Saipan +0 -0
  581. swordfish/asset/tzdb/Pacific/Samoa +0 -0
  582. swordfish/asset/tzdb/Pacific/Tahiti +0 -0
  583. swordfish/asset/tzdb/Pacific/Tarawa +0 -0
  584. swordfish/asset/tzdb/Pacific/Tongatapu +0 -0
  585. swordfish/asset/tzdb/Pacific/Truk +0 -0
  586. swordfish/asset/tzdb/Pacific/Wake +0 -0
  587. swordfish/asset/tzdb/Pacific/Wallis +0 -0
  588. swordfish/asset/tzdb/Pacific/Yap +0 -0
  589. swordfish/asset/tzdb/Poland +0 -0
  590. swordfish/asset/tzdb/Portugal +0 -0
  591. swordfish/asset/tzdb/ROC +0 -0
  592. swordfish/asset/tzdb/ROK +0 -0
  593. swordfish/asset/tzdb/Singapore +0 -0
  594. swordfish/asset/tzdb/Turkey +0 -0
  595. swordfish/asset/tzdb/UCT +0 -0
  596. swordfish/asset/tzdb/US/Alaska +0 -0
  597. swordfish/asset/tzdb/US/Aleutian +0 -0
  598. swordfish/asset/tzdb/US/Arizona +0 -0
  599. swordfish/asset/tzdb/US/Central +0 -0
  600. swordfish/asset/tzdb/US/East-Indiana +0 -0
  601. swordfish/asset/tzdb/US/Eastern +0 -0
  602. swordfish/asset/tzdb/US/Hawaii +0 -0
  603. swordfish/asset/tzdb/US/Indiana-Starke +0 -0
  604. swordfish/asset/tzdb/US/Michigan +0 -0
  605. swordfish/asset/tzdb/US/Mountain +0 -0
  606. swordfish/asset/tzdb/US/Pacific +0 -0
  607. swordfish/asset/tzdb/US/Samoa +0 -0
  608. swordfish/asset/tzdb/UTC +0 -0
  609. swordfish/asset/tzdb/Universal +0 -0
  610. swordfish/asset/tzdb/W-SU +0 -0
  611. swordfish/asset/tzdb/WET +0 -0
  612. swordfish/asset/tzdb/Zulu +0 -0
  613. swordfish/asset/tzdb/iso3166.tab +274 -0
  614. swordfish/asset/tzdb/leapseconds +61 -0
  615. swordfish/asset/tzdb/posixrules +0 -0
  616. swordfish/asset/tzdb/tzdata.zi +4150 -0
  617. swordfish/asset/tzdb/tzmap_gen.py +27 -0
  618. swordfish/asset/tzdb/tzmapping +501 -0
  619. swordfish/asset/tzdb/windowsZones.xml +781 -0
  620. swordfish/asset/tzdb/zone.tab +448 -0
  621. swordfish/asset/tzdb/zone1970.tab +382 -0
  622. swordfish/connection.py +33 -0
  623. swordfish/data.py +806 -0
  624. swordfish/engine.py +28 -0
  625. swordfish/enums.py +32 -0
  626. swordfish/function.py +3 -0
  627. swordfish/infos.py +53 -0
  628. swordfish/io.py +11 -0
  629. swordfish/libSwordfish.dll +0 -0
  630. swordfish/libclucene-contribs-lib.dll +0 -0
  631. swordfish/libclucene-core.dll +0 -0
  632. swordfish/libclucene-shared.dll +0 -0
  633. swordfish/libgcc_s_seh-1.dll +0 -0
  634. swordfish/libstdc++-6.dll +0 -0
  635. swordfish/libwinpthread-1.dll +0 -0
  636. swordfish/module.py +57 -0
  637. swordfish/plugins/__init__.py +17 -0
  638. swordfish/plugins/backtest/__init__.py +38 -0
  639. swordfish/plugins/backtest/backtest.py +4228 -0
  640. swordfish/plugins/backtest/translator.py +820 -0
  641. swordfish/plugins/matching_engine_simulator.py +247 -0
  642. swordfish/streaming.py +19 -0
  643. swordfish/tools.py +71 -0
  644. swordfish/types.py +30 -0
@@ -0,0 +1,4784 @@
1
+ from __future__ import annotations
2
+
3
+ import abc
4
+ from decimal import Decimal
5
+ from enum import Enum
6
+ from pathlib import Path
7
+ from types import FunctionType
8
+ from typing import (
9
+ Any,
10
+ BinaryIO,
11
+ Dict,
12
+ List,
13
+ Literal,
14
+ Optional,
15
+ overload,
16
+ Tuple,
17
+ Union,
18
+ )
19
+ from typing_extensions import Self
20
+
21
+ import numpy as np
22
+ import pandas as pd
23
+
24
+ from ._engine import (
25
+ Builder,
26
+ StreamBroadcastEngineBuilder,
27
+ CrossSectionalEngineBuilder,
28
+ TimeSeriesEngineBuilder,
29
+ ReactiveStateEngineBuilder,
30
+ StreamFilterEngineBuilder,
31
+ )
32
+ from .plugins import matching_engine_simulator as plugin_simulator
33
+ from .types import TypeDict
34
+ from .function import DFLT
35
+ from .connection import Connection
36
+
37
+
38
+ def sw_init(args: List[str]) -> None: ...
39
+ def sw_uninit() -> None: ...
40
+ def sw_check() -> bool: ...
41
+ def sw_is_ce_edition() -> bool: ...
42
+ def sw_info(host: str, port: int, alias: str): ...
43
+
44
+
45
+ def set_dynamic_config(config_name: str, config_value: Any): ...
46
+
47
+
48
+ def _global_exec(script: str, vars: Optional[Dict[str, Any]] = None) -> Constant: ...
49
+ def _global_call(function: str, *args) -> Constant: ...
50
+ def _global_vars(var_dict: Dict[str, Any]) -> bool: ...
51
+ def _global_undef(name: str) -> None: ...
52
+ def _global_sql(script: str, vars: Optional[Dict[str, Any]] = None) -> Constant: ...
53
+
54
+
55
+ EXPARAM_DEFAULT = -0x7fffffff - 1
56
+
57
+
58
+ class Session:
59
+ """
60
+ A Swordfish session that provides script execution and function calling
61
+ capabilities.
62
+ """
63
+ def exec(self, script: str, vars: Optional[Dict[str, Any]] = None) -> Constant:
64
+ """
65
+ Executes Swordfish scripts and returns the result.
66
+ """
67
+ ...
68
+
69
+ def call(self, function: str, *args) -> Constant:
70
+ """
71
+ Calls a Swordfish function with the provided arguments.
72
+ """
73
+ ...
74
+
75
+ def variable(self, val_maps: Dict[str, Any]) -> bool:
76
+ """
77
+ Defines variables in the Swordfish session based on Python variables.
78
+
79
+ Returns
80
+ -------
81
+ bool
82
+ True if variables are successfully defined, False otherwise.
83
+ """
84
+ ...
85
+
86
+
87
+ class RemoteSession(Session):
88
+ pass
89
+
90
+
91
+ class ConnectionImpl:
92
+ def __enter__(self) -> ConnectionImpl: ...
93
+ def __exit__(self, exc_type, exc_value, traceback): ...
94
+ def sql(self, sql: str, *, vars: Dict[str, Any]) -> Constant: ...
95
+ def session(self) -> Session: ...
96
+
97
+
98
+ class BaseConnectionImpl(ConnectionImpl):
99
+ pass
100
+
101
+
102
+ class DefaultSessionConnectionImpl(BaseConnectionImpl):
103
+ @classmethod
104
+ def create(cls) -> DefaultSessionConnectionImpl: ...
105
+
106
+
107
+ class RemoteConnectionImpl(ConnectionImpl):
108
+ @classmethod
109
+ def create(cls, host: str, port: int, user: str = "", password: str = "") -> RemoteConnectionImpl: ...
110
+
111
+
112
+ class Constant:
113
+ """
114
+ The base class for all Swordfish objects.
115
+
116
+ All data types (such as `Int`, `String`) and data forms (such as `Vector`,
117
+ `Table`) inherit from this class.
118
+
119
+ Provides common operations and properties for all Swordfish data objects.
120
+ """
121
+ def __str__(self) -> str: ...
122
+ def __repr__(self) -> str: ...
123
+ def __copy__(self) -> Constant: ...
124
+ def __deepcopy__(self, memo) -> Constant: ...
125
+ def __bool__(self) -> bool: ...
126
+ def __int__(self) -> int: ...
127
+ def __len__(self) -> int: ...
128
+ def __hash__(self) -> int: ...
129
+ def __contains__(self, o) -> Bool: ...
130
+ def __neg__(self) -> Constant: ...
131
+ def __abs__(self) -> Constant: ...
132
+ def __add__(self, o: Union[Constant, Any]) -> Constant: ...
133
+ def __radd__(self, o: Union[Constant, Any]) -> Constant: ...
134
+ def __sub__(self, o: Union[Constant, Any]) -> Constant: ...
135
+ def __rsub__(self, o: Union[Constant, Any]) -> Constant: ...
136
+ def __mul__(self, o: Union[Constant, Any]) -> Constant: ...
137
+ def __rmul__(self, o: Union[Constant, Any]) -> Constant: ...
138
+ def __truediv__(self, o: Union[Constant, Any]) -> Constant: ...
139
+ def __rtruediv__(self, o: Union[Constant, Any]) -> Constant: ...
140
+ def __floordiv__(self, o: Union[Constant, Any]) -> Constant: ...
141
+ def __rfloordiv__(self, o: Union[Constant, Any]) -> Constant: ...
142
+ def __mod__(self, o: Union[Constant, Any]) -> Constant: ...
143
+ def __rmod__(self, o: Union[Constant, Any]) -> Constant: ...
144
+ def __pow__(self, o: Union[Constant, Any]) -> Constant: ...
145
+ def __rpow__(self, o: Union[Constant, Any]) -> Constant: ...
146
+ def __lt__(self, o: Union[Constant, Any]) -> Constant: ...
147
+ def __le__(self, o: Union[Constant, Any]) -> Constant: ...
148
+ def __eq__(self, o: Union[Constant, Any]) -> Constant: ...
149
+ def __ne__(self, o: Union[Constant, Any]) -> Constant: ...
150
+ def __gt__(self, o: Union[Constant, Any]) -> Constant: ...
151
+ def __ge__(self, o: Union[Constant, Any]) -> Constant: ...
152
+ def __and__(self, o: Union[Constant, Any]) -> Constant: ...
153
+ def __rand__(self, o: Union[Constant, Any]) -> Constant: ...
154
+ def __or__(self, o: Union[Constant, Any]) -> Constant: ...
155
+ def __ror__(self, o: Union[Constant, Any]) -> Constant: ...
156
+ def __xor__(self, o: Union[Constant, Any]) -> Constant: ...
157
+ def __rxor__(self, o: Union[Constant, Any]) -> Constant: ...
158
+
159
+ @property
160
+ def form(self) -> DataForm:
161
+ """
162
+ Retrieves the data form (DataForm) of a Constant object.
163
+
164
+ Returns
165
+ -------
166
+ DataForm
167
+ The data form representing the structure of the data (e.g. SCALAR,
168
+ VECTOR, TABLE).
169
+ """
170
+ ...
171
+
172
+ @property
173
+ def type(self) -> DataType:
174
+ """
175
+ Retrieves the data type (DataType) of a Constant object.
176
+
177
+ Returns
178
+ -------
179
+ DataType
180
+ The data type representing the type of the data (e.g., INT, FLOAT,
181
+ STRING).
182
+ """
183
+ ...
184
+
185
+ def is_null(self) -> Constant:
186
+ """
187
+ Checks if the object is a NULL value or contains NULL elements.
188
+
189
+ Returns
190
+ -------
191
+ Constant
192
+ A boolean Constant indicating whether elements are NULL. For scalar
193
+ input, returns a single boolean value. For non-scalar input, returns a
194
+ result with the same shape as the input.
195
+ """
196
+ ...
197
+
198
+ def rows(self) -> Int:
199
+ """
200
+ Returns the number of rows in the object.
201
+
202
+ Returns
203
+ -------
204
+ Int
205
+ The number of rows. For scalar objects, returns 1.
206
+ """
207
+ ...
208
+
209
+ def cols(self) -> Int:
210
+ """
211
+ Returns the number of columns in the object.
212
+
213
+ Returns
214
+ -------
215
+ Int
216
+ The number of columns. For scalar objects, returns 1.
217
+ """
218
+ ...
219
+
220
+
221
+ class Iterator(Constant):
222
+ """
223
+ Iterator for Constant objects.
224
+
225
+ Provides a standard way to iterate over the elements of any Swordfish object.
226
+ This follows Python's iterator protocol and can be used in for loops and other
227
+ iteration contexts.
228
+ """
229
+ def __iter__(self) -> Iterator: ...
230
+ def __next__(self) -> Constant: ...
231
+
232
+
233
+ class Scalar(Constant):
234
+ """
235
+ Superclass for scalar types.
236
+
237
+ Represents single-value data types like `Int`, `String`, and `Float`. Inherits
238
+ from `Constant` and provides functionality specific to scalar values.
239
+ """
240
+
241
+ def to_python(self) -> Any:
242
+ """Converts the Scalar to a corresponding Python type.
243
+
244
+ Returns:
245
+ Any: A Python object that represents the same value as the Scalar.
246
+ The exact type depends on the Scalar's data type (e.g., int, str, float).
247
+ """
248
+ ...
249
+
250
+
251
+ class EnumInt(Scalar):
252
+ """
253
+ A base class for enumerated integer constants.
254
+
255
+ This class serves as the parent class for various enumeration types, such as
256
+ DataType, DataForm, and ObjectType.
257
+ """
258
+ def __init__(self, desc: str, val: int, type: int) -> None: ...
259
+ def __int__(self) -> int: ...
260
+ def __getitem__(self) -> Any: ...
261
+ def set_function(self, func): ...
262
+
263
+
264
+ class DataType(EnumInt):
265
+ """
266
+ Enumeration defining Swordfish data types.
267
+
268
+ Defines various data types, such as INT, FLOAT, STRING, etc. Inherits from
269
+ EnumInt and provides type information for Constant objects. The data type of a
270
+ Constant object can be retrieved using ``Constant.type``.
271
+ """
272
+ ...
273
+
274
+
275
+ class DataForm(EnumInt):
276
+ """
277
+ Enumeration defining Swordfish data forms.
278
+
279
+ Defines various data forms, such as SCALAR, VECTOR, TABLE, etc. Inherits from
280
+ EnumInt and provides structural information for Constant objects. The data form
281
+ of a Constant object can be retrieved using ``Constant.form``.
282
+ """
283
+ ...
284
+
285
+
286
+ class ObjectType(EnumInt):
287
+ """
288
+ Enumeration defining Swordfish object types.
289
+
290
+ Defines object types including VAR (local variable), SHARED (shared variable),
291
+ and DEF (function definition). This helps categorize different kinds of objects
292
+ in the Swordfish system.
293
+ """
294
+ ...
295
+
296
+
297
+ class LogLevel(EnumInt):
298
+ """
299
+ Enumeration representing log levels.
300
+
301
+ Defines logging levels including DEBUG, INFO, WARNING, and ERROR. Used to
302
+ control the verbosity and filtering of log messages in the system.
303
+ """
304
+ ...
305
+
306
+
307
+ class FunctionDef(Constant):
308
+ """
309
+ Represents a function definition.
310
+
311
+ Inherits from the Constant class and provides a way to treat function
312
+ definitions.
313
+ """
314
+ # FIXME:
315
+ @overload
316
+ def __init__(self, func: FunctionType, *, name: str = "<lambda>", aggregation: bool = None):
317
+ """
318
+ Initializes a FunctionDef object from a Python function.
319
+
320
+ Creates a lambda function that can be used in Swordfish from a Python
321
+ function object.
322
+
323
+ Parameters
324
+ ----------
325
+ func : FunctionType
326
+ The Python function to be wrapped.
327
+ name : str, optional
328
+ The name of the function. Defaults to "<lambda>".
329
+ aggregation : bool, optional
330
+ Indicates whether this is an aggregate function. Defaults to None.
331
+ """
332
+ ...
333
+
334
+ # FIXME:
335
+ @overload
336
+ def __init__(self, code: str, *, state: bool = False):
337
+ """
338
+ Initializes a FunctionDef object from a Swordfish function definition string.
339
+
340
+ Creates a function definition from Swordfish syntax code string containing a
341
+ single anonymous function definition.
342
+
343
+ Parameters
344
+ ----------
345
+ code : str
346
+ A string containing a single anonymous function definition in Swordfish
347
+ syntax.
348
+ state : bool, optional
349
+ State parameter for the function. Defaults to False.
350
+ """
351
+ ...
352
+
353
+ def __copy__(self) -> FunctionDef: ...
354
+ def __deepcopy__(self, memo) -> FunctionDef: ...
355
+ def __get__(self): ...
356
+ def __call__(self, *args, **kwargs) -> Constant: ...
357
+ def set_meta(self, signature, alias) -> None: ...
358
+
359
+
360
+ class Vector(Constant):
361
+ """
362
+ Represents a one-dimensional vector.
363
+
364
+ Inherits from the Constant class and provides functionality for working with
365
+ vector data structures. Supports conversion to/from Python lists, tuples, and
366
+ NumPy arrays.
367
+ """
368
+
369
+ def __getitem__(self, index) -> Scalar: ...
370
+ def __setitem__(self, index, value) -> None: ...
371
+ def __iter__(self) -> Iterator: ...
372
+
373
+ @classmethod
374
+ def from_list(cls, data: list, type: DataType = None) -> Vector:
375
+ """
376
+ Constructs a Vector object from a Python list.
377
+
378
+ Parameters
379
+ ----------
380
+ data : list
381
+ The input data as a Python list instance.
382
+ type : DataType, optional
383
+ An enumeration value from the DataType enum, specifying the target data
384
+ type for the vector elements.
385
+
386
+ Returns
387
+ -------
388
+ Vector
389
+ A new Vector object containing the data from the input list, converted
390
+ to the specified data type.
391
+ """
392
+ ...
393
+
394
+ @classmethod
395
+ def from_tuple(cls, data: tuple, type: DataType = None) -> Vector:
396
+ """
397
+ Constructs a Vector object from a Python tuple.
398
+
399
+ Parameters
400
+ ----------
401
+ data : tuple
402
+ The input data as a Python tuple instance.
403
+ type : DataType, optional
404
+ An enumeration value from the DataType enum, specifying the target data
405
+ type for the vector elements.
406
+
407
+ Returns
408
+ -------
409
+ Vector
410
+ A new Vector object containing the data from the input tuple, converted
411
+ to the specified data type.
412
+ """
413
+ ...
414
+
415
+ @classmethod
416
+ def from_numpy(cls, data: np.ndarray, type: DataType = None) -> Vector:
417
+ """
418
+ Constructs a Vector object from a NumPy array.
419
+
420
+ Parameters
421
+ ----------
422
+ data : np.ndarray
423
+ The input data as a 1-dimensional ndarray.
424
+ type : DataType, optional
425
+ An enumeration value from the DataType enum, specifying the target data
426
+ type for the vector elements.
427
+
428
+ Returns
429
+ -------
430
+ Vector
431
+ A new Vector object containing the data from the input NumPy array.
432
+ """
433
+ ...
434
+
435
+ def to_numpy(self) -> np.ndarray:
436
+ """
437
+ Converts the Vector object to a NumPy ndarray.
438
+
439
+ Returns
440
+ -------
441
+ np.ndarray
442
+ A new 1-dimensional NumPy array containing all the elements of the
443
+ Vector.
444
+ """
445
+ ...
446
+
447
+ def to_list(self) -> list:
448
+ """
449
+ Converts the Vector object to a Python list.
450
+
451
+ Returns
452
+ -------
453
+ list
454
+ A new Python list containing all the elements of the Vector.
455
+ """
456
+ ...
457
+
458
+
459
+ class AnyVector(Vector):
460
+ """
461
+ A versatile vector container that can store elements of any type.
462
+
463
+ Extends the Vector class to allow storage of heterogeneous elements, making it
464
+ suitable for mixed-type data scenarios.
465
+ """
466
+ def __getitem__(self, index) -> Constant: ...
467
+ def __setitem__(self, index, value) -> None: ...
468
+
469
+ @classmethod
470
+ def from_list(cls, data: list) -> AnyVector:
471
+ """
472
+ Constructs an AnyVector object from a Python list.
473
+
474
+ Parameters
475
+ ----------
476
+ data : list
477
+ The input data as a Python list instance.
478
+
479
+ Returns
480
+ -------
481
+ AnyVector
482
+ A new AnyVector object containing the data from the input list. The
483
+ elements retain their original types.
484
+ """
485
+ ...
486
+
487
+ @classmethod
488
+ def from_tuple(cls, data: tuple) -> AnyVector:
489
+ """
490
+ Constructs an AnyVector object from a Python tuple.
491
+
492
+ Parameters
493
+ ----------
494
+ data : tuple
495
+ The input data as a Python tuple instance.
496
+
497
+ Returns
498
+ -------
499
+ AnyVector
500
+ A new AnyVector object containing the data from the input tuple. The
501
+ elements retain their original types.
502
+ """
503
+ ...
504
+
505
+ @classmethod
506
+ def from_numpy(cls, data: np.ndarray) -> AnyVector:
507
+ """
508
+ Constructs an AnyVector object from a NumPy ndarray.
509
+
510
+ Parameters
511
+ ----------
512
+ data : np.ndarray
513
+ The input data as a NumPy ndarray.
514
+
515
+ Returns
516
+ -------
517
+ AnyVector
518
+ A new AnyVector object containing the data from the input ndarray.
519
+ """
520
+ ...
521
+
522
+ def to_numpy(self) -> np.ndarray:
523
+ """
524
+ Converts the AnyVector to a NumPy ndarray.
525
+
526
+ Returns
527
+ -------
528
+ np.ndarray
529
+ A NumPy array containing the data from the AnyVector. The array has
530
+ dtype="object" and each element is a Constant.
531
+ """
532
+ ...
533
+
534
+ def to_list(self) -> list:
535
+ """
536
+ Converts the AnyVector to a Python list.
537
+
538
+ Returns
539
+ -------
540
+ list
541
+ A Python list containing the data from the AnyVector. Each element in
542
+ the list is a Constant object.
543
+ """
544
+ ...
545
+
546
+
547
+ class ArrayVector(Vector):
548
+ """
549
+ A vector container designed to store arrays as its elements.
550
+ """
551
+ def __getitem__(self, index) -> Vector: ...
552
+ def __setitem__(self, index, value) -> None: ...
553
+
554
+ @classmethod
555
+ def from_list(cls, data: list, type: DataType = None) -> ArrayVector:
556
+ """
557
+ Constructs an ArrayVector object from a Python list of arrays.
558
+
559
+ Parameters
560
+ ----------
561
+ data : list
562
+ A list where each element is an array-like object.
563
+ type : DataType, optional
564
+ An enumeration value from the DataType enum, specifying the target data
565
+ type for the array elements. If None, the type will be inferred.
566
+
567
+ Returns
568
+ -------
569
+ ArrayVector
570
+ A new ArrayVector object containing the arrays from the input list.
571
+ """
572
+ ...
573
+
574
+ @classmethod
575
+ def from_tuple(cls, data: tuple, type: DataType = None) -> ArrayVector:
576
+ """
577
+ Constructs an ArrayVector object from a Python tuple.
578
+
579
+ Parameters
580
+ ----------
581
+ data : tuple
582
+ A tuple where each element is an array-like object.
583
+ type : DataType, optional
584
+ An enumeration value from the DataType enum, specifying the target data
585
+ type for the array elements. If None, the type will be inferred.
586
+
587
+ Returns
588
+ -------
589
+ ArrayVector
590
+ A new ArrayVector object containing the arrays from the input tuple.
591
+ """
592
+ ...
593
+
594
+ @classmethod
595
+ def from_numpy(cls, data: np.ndarray, type: DataType = None) -> ArrayVector:
596
+ """
597
+ Constructs an ArrayVector object from a NumPy ndarray.
598
+
599
+ Parameters
600
+ ----------
601
+ data : np.ndarray
602
+ A NumPy array.
603
+ type : DataType, optional
604
+ An enumeration value from the DataType enum, specifying the target data
605
+ type for the array elements. If None, the type will be inferred from the
606
+ NumPy array's dtype.
607
+
608
+ Returns
609
+ -------
610
+ ArrayVector
611
+ A new ArrayVector object containing the arrays from the input NumPy
612
+ array.
613
+ """
614
+ ...
615
+
616
+ def to_numpy(self) -> np.ndarray:
617
+ """
618
+ Converts the ArrayVector to a NumPy ndarray.
619
+
620
+ Returns
621
+ -------
622
+ np.ndarray
623
+ A NumPy array with dtype="object".
624
+ """
625
+ ...
626
+
627
+ def to_list(self) -> list:
628
+ """
629
+ Converts the ArrayVector to a Python list of lists.
630
+
631
+ Returns
632
+ -------
633
+ list
634
+ A Python list where each element is also a list.
635
+ """
636
+ ...
637
+
638
+
639
+ class Pair(Constant):
640
+ """
641
+ Represents a pair of values in Swordfish.
642
+
643
+ A container that holds exactly two values. Instances of this class should not be
644
+ created directly. Use the swordfish.pair() function to create Pair objects.
645
+ """
646
+ def __getitem__(self, index) -> Scalar: ...
647
+ def __setitem__(self, index, value) -> None: ...
648
+ def __iter__(self) -> Iterator: ...
649
+
650
+ def to_list(self) -> list:
651
+ """
652
+ Converts the Pair to a Python list.
653
+
654
+ Returns
655
+ -------
656
+ list
657
+ A Python list containing two elements. Each element is converted to its
658
+ corresponding Python type.
659
+ """
660
+ ...
661
+
662
+
663
+ class Matrix(Vector):
664
+ """
665
+ Represents a two-dimensional matrix.
666
+ """
667
+ def __getitem__(self, index) -> Constant: ...
668
+ def __setitem__(self, index, value) -> None: ...
669
+
670
+ @classmethod
671
+ def from_numpy(cls, data: np.ndarray, type: DataType = None) -> Matrix:
672
+ """
673
+ Constructs a Matrix object from a one-dimensional or two-dimensional NumPy
674
+ ndarray.
675
+
676
+ Parameters
677
+ ----------
678
+ data : np.ndarray
679
+ A 1D or 2D NumPy array to be converted into a Matrix.
680
+ type : DataType, optional
681
+ An enumeration value from the DataType enum, specifying the target data
682
+ type for the matrix elements. If None, the type will be inferred from the
683
+ NumPy array's dtype.
684
+
685
+ Returns
686
+ -------
687
+ Matrix
688
+ A new Matrix object containing the data from the input NumPy array.
689
+ """
690
+ ...
691
+
692
+ def to_numpy(self) -> np.ndarray:
693
+ """
694
+ Converts the Matrix to a two-dimensional NumPy ndarray.
695
+
696
+ Returns
697
+ -------
698
+ np.ndarray
699
+ A 2D NumPy array containing the data from the Matrix.
700
+ """
701
+ ...
702
+
703
+ def to_list(self) -> list:
704
+ """
705
+ Converts the Matrix to a nested Python list.
706
+
707
+ Returns
708
+ -------
709
+ list
710
+ A list of lists representing the Matrix. Each inner list corresponds to a
711
+ column of the Matrix.
712
+ """
713
+ ...
714
+
715
+
716
+ class Set(Constant):
717
+ """
718
+ Represents a container with no duplicate values.
719
+ """
720
+ def __iter__(self) -> Iterator: ...
721
+
722
+ @classmethod
723
+ def from_set(cls, data: set, type: DataType = None) -> Set:
724
+ """
725
+ Constructs a Set object from a Python set.
726
+
727
+ Parameters
728
+ ----------
729
+ data : set
730
+ A Python set containing the elements to be included in the new Set.
731
+ type : DataType, optional
732
+ An enumeration value from the DataType enum, specifying the target data
733
+ type for the Set elements. If None, the type will be inferred from the
734
+ input set's elements. Defaults to None.
735
+
736
+ Returns
737
+ -------
738
+ Set
739
+ A new Set object containing the elements from the input Python set.
740
+ """
741
+ ...
742
+
743
+ def to_set(self) -> set:
744
+ """
745
+ Converts the Set to a Python set.
746
+
747
+ Returns
748
+ -------
749
+ set
750
+ A Python set containing the elements of this Set. Each element in the
751
+ returned set is a Constant object.
752
+ """
753
+ ...
754
+
755
+
756
+ class Dictionary(Constant):
757
+ """
758
+ Represents a container type that holds unique key-value pairs.
759
+
760
+ A mapping structure similar to Python's dict, but with Swordfish-specific type
761
+ handling and conversion capabilities.
762
+ """
763
+ def __getitem__(self, index) -> Constant: ...
764
+ def __setitem__(self, index, value) -> None: ...
765
+ def __iter__(self) -> Iterator: ...
766
+
767
+ @classmethod
768
+ def from_dict(cls, data: dict, *, key_type: DataType = None, val_type: DataType = None) -> Dictionary:
769
+ """
770
+ Constructs a Dictionary object from a Python dict.
771
+
772
+ Parameters
773
+ ----------
774
+ data : dict
775
+ A Python dict containing the key-value pairs to be included in the new
776
+ Dictionary.
777
+ key_type : DataType, optional
778
+ An enumeration value from the DataType enum, specifying the target data
779
+ type for the Dictionary keys. If None, the type will be inferred from the
780
+ input dict's keys. Defaults to None.
781
+ val_type : DataType, optional
782
+ An enumeration value from the DataType enum, specifying the target data
783
+ type for the Dictionary values. If None, the type will be inferred from
784
+ the input dict's values. Defaults to None.
785
+
786
+ Returns
787
+ -------
788
+ Dictionary
789
+ A new Dictionary object containing the key-value pairs from the input
790
+ Python dict.
791
+ """
792
+ ...
793
+
794
+ def to_dict(self) -> dict:
795
+ """
796
+ Converts the Dictionary to a Python dict.
797
+
798
+ Returns
799
+ -------
800
+ dict
801
+ A Python dict representing this Dictionary object. If the Dictionary's
802
+ value type is 'Any': keys are converted to their corresponding Python
803
+ types, values are Constant objects. Otherwise, both keys and values are
804
+ converted to their corresponding Python types.
805
+ """
806
+ ...
807
+
808
+ def keys(self) -> Constant:
809
+ """
810
+ Retrieves the keys of the Dictionary.
811
+
812
+ Returns
813
+ -------
814
+ Constant
815
+ The retrieved dictionary keys.
816
+ """
817
+ ...
818
+
819
+ def values(self) -> Constant:
820
+ """
821
+ Retrieves the values of the Dictionary.
822
+
823
+ Returns
824
+ -------
825
+ Constant
826
+ The retrieved dictionary values.
827
+ """
828
+ ...
829
+
830
+ def items(self) -> DictionaryItems:
831
+ """
832
+ Retrieves an iterator over the Dictionary's key-value pairs.
833
+
834
+ Returns
835
+ -------
836
+ DictionaryItems
837
+ An iterable object that yields key-value pairs from the Dictionary.
838
+ """
839
+ ...
840
+
841
+
842
+ class DictionaryItems:
843
+ def __iter__(self) -> DictionaryItemsIterator: ...
844
+
845
+
846
+ class DictionaryItemsIterator:
847
+ def __next__(self) -> AnyVector: ...
848
+
849
+
850
+ class Table(Constant):
851
+ """
852
+ Represents a tabular data structure.
853
+
854
+ In tables, data is logically organized in a row-and-column format. Each row
855
+ represents a unique record, and each column represents a field in the record.
856
+ Provides comprehensive functionality for data manipulation and analysis.
857
+ """
858
+ def __getitem__(self, index) -> Constant: ...
859
+ def __setitem__(self, index, value) -> None: ...
860
+ def __iter__(self) -> Iterator: ...
861
+ def __getattr__(self, name: str) -> Constant: ...
862
+
863
+ @classmethod
864
+ def from_pandas(cls, data: pd.DataFrame, *, types: Dict[str, DataType] = None) -> Table:
865
+ """
866
+ Creates a Table instance from a Pandas DataFrame.
867
+
868
+ Parameters
869
+ ----------
870
+ data : pd.DataFrame
871
+ The Pandas DataFrame to convert.
872
+ types : Dict[str, DataType], optional
873
+ Column type mappings where keys are column names and values are DataType
874
+ enumerations. If None, types are inferred automatically.
875
+
876
+ Returns
877
+ -------
878
+ Table
879
+ A new Table instance containing the DataFrame data.
880
+ """
881
+ ...
882
+
883
+ def to_pandas(self) -> pd.DataFrame:
884
+ """
885
+ Converts this Table to a Pandas DataFrame.
886
+
887
+ Returns
888
+ -------
889
+ pd.DataFrame
890
+ A DataFrame with equivalent data and column types automatically mapped
891
+ to compatible Pandas dtypes.
892
+ """
893
+ ...
894
+
895
+ @property
896
+ def types(self) -> Dict[str, DataType]:
897
+ """
898
+ Returns the data types of all table columns.
899
+
900
+ Returns
901
+ -------
902
+ Dict[str, DataType]
903
+ Mapping of column names to their corresponding DataType values.
904
+ """
905
+ ...
906
+
907
+ @property
908
+ def name(self) -> str:
909
+ """
910
+ Returns the table's name.
911
+
912
+ Returns
913
+ -------
914
+ str
915
+ The assigned name of this table.
916
+ """
917
+ ...
918
+
919
+ @property
920
+ def is_shared(self) -> bool:
921
+ """
922
+ Indicates whether this table is shared across sessions.
923
+
924
+ Returns
925
+ -------
926
+ bool
927
+ True if shared, False if private to current session.
928
+ """
929
+ ...
930
+
931
+ def share(self, name: str, readonly: bool = False) -> Self:
932
+ """
933
+ Makes this table accessible across sessions with the specified name.
934
+
935
+ Parameters
936
+ ----------
937
+ name : str
938
+ Global name for the shared table.
939
+ readonly : bool, optional
940
+ Whether to restrict the table to read-only access. Defaults to False.
941
+
942
+ Returns
943
+ -------
944
+ Self
945
+ This table instance for method chaining.
946
+ """
947
+ ...
948
+
949
+ def schema(self) -> Dictionary:
950
+ """
951
+ Returns the table's schema information.
952
+
953
+ Returns
954
+ -------
955
+ Dictionary
956
+ Column names mapped to their respective data types.
957
+ """
958
+ ...
959
+
960
+ def head(self, n: Constant = DFLT) -> Constant:
961
+ """
962
+ Returns the first n rows of the table.
963
+
964
+ Parameters
965
+ ----------
966
+ n : Constant, optional
967
+ Number of rows to return. Uses default if not specified.
968
+
969
+ Returns
970
+ -------
971
+ Constant
972
+ A table containing the first n rows.
973
+ """
974
+ ...
975
+
976
+ def tail(self, n: Constant = DFLT) -> Constant:
977
+ """
978
+ Retrieves the last n rows of the table.
979
+
980
+ Parameters
981
+ ----------
982
+ n : Constant, optional
983
+ The number of rows to retrieve. Defaults to DFLT.
984
+
985
+ Returns
986
+ -------
987
+ Constant
988
+ A subset of the table containing the last n rows.
989
+ """
990
+ ...
991
+
992
+ def count(self) -> Constant:
993
+ """
994
+ Counts the number of rows in the table.
995
+
996
+ Returns
997
+ -------
998
+ Constant
999
+ The number of rows in the table.
1000
+ """
1001
+ ...
1002
+
1003
+ def summary(self, interpolation: Constant = DFLT, characteristic: Constant = DFLT,
1004
+ percentile: Constant = DFLT, precision: Constant = DFLT,
1005
+ partitionSampling: Constant = DFLT) -> Constant:
1006
+ """
1007
+ Computes comprehensive summary statistics for numeric columns.
1008
+
1009
+ Parameters
1010
+ ----------
1011
+ interpolation : Constant, optional
1012
+ Percentile interpolation method. Available options: "linear" (default),
1013
+ "nearest", "lower", "higher", "midpoint".
1014
+ characteristic : Constant, optional
1015
+ Statistics to calculate. Options: "avg" (mean), "std" (standard deviation).
1016
+ Default computes both ["avg", "std"].
1017
+ percentile : Constant, optional
1018
+ List of percentile values (0-1) to compute. Default is [0.25, 0.50, 0.75]
1019
+ for 25th, 50th, and 75th percentiles.
1020
+ precision : Constant, optional
1021
+ Convergence threshold for iterative calculations. Recommended range:
1022
+ [1e-3, 1e-9]. Default: 1e-3.
1023
+ partitionSampling : Constant, optional
1024
+ For partitioned tables, either the number of partitions to sample
1025
+ (integer) or sampling ratio (0-1]. No effect on non-partitioned tables.
1026
+
1027
+ Returns
1028
+ -------
1029
+ Constant
1030
+ Summary table with min, max, count, mean, std dev, and percentiles for
1031
+ each numeric column.
1032
+ """
1033
+ ...
1034
+
1035
+ def sortBy_(self, sortColumns: Constant, sortDirections: Constant = DFLT) -> Constant:
1036
+ """
1037
+ Sorts the table in-place by specified columns and directions.
1038
+
1039
+ For partitioned tables, sorting occurs within each partition independently.
1040
+ Parallel processing is used when localExecutors > 0 configuration is enabled.
1041
+
1042
+ Parameters
1043
+ ----------
1044
+ sortColumns : Constant
1045
+ Column name(s) to sort by. Accepts string, list of strings, or meta code
1046
+ expression.
1047
+ sortDirections : Constant, optional
1048
+ Sort order for each column. True/1 for ascending (default), False/0 for
1049
+ descending.
1050
+
1051
+ Returns
1052
+ -------
1053
+ Constant
1054
+ The sorted table.
1055
+ """
1056
+ ...
1057
+
1058
+
1059
+ class Void(Scalar):
1060
+ VOID_VALUE: Void
1061
+ """
1062
+ A void value constant representing no data.
1063
+ """
1064
+ NULL_VALUE: Void
1065
+ """
1066
+ A null value constant representing absence of value.
1067
+ """
1068
+ DFLT_VALUE: Void
1069
+ """
1070
+ A default value constant for void type.
1071
+ """
1072
+ def __init__(self) -> None: ...
1073
+
1074
+ def is_nothing(self) -> bool:
1075
+ """
1076
+ Checks if the current value represents "Nothing".
1077
+
1078
+ This method verifies whether the current instance holds the `VOID_VALUE`,
1079
+ which signifies an absence of meaningful data. Typically used to check if an
1080
+ argument has been properly provided.
1081
+
1082
+ Returns
1083
+ -------
1084
+ bool
1085
+ True if the current value is `VOID_VALUE`; False otherwise.
1086
+ """
1087
+ ...
1088
+
1089
+ def is_default(self) -> bool:
1090
+ """
1091
+ Checks if the current value represents the default value.
1092
+
1093
+ This method verifies whether the current instance holds the `DFLT_VALUE`.
1094
+
1095
+ Returns
1096
+ -------
1097
+ bool
1098
+ True if the current value is DFLT_VALUE, False otherwise.
1099
+ """
1100
+ ...
1101
+
1102
+
1103
+ class Bool(Scalar):
1104
+ """Represents a Swordfish Bool object, initialized optionally with a Python bool value.
1105
+
1106
+ Examples:
1107
+ >>> import swordfish as sf
1108
+ >>> sf.data.Bool()
1109
+ Bool(null)
1110
+ >>> sf.data.Bool(True)
1111
+ Bool(true)
1112
+ """
1113
+ NULL_VALUE: Bool
1114
+
1115
+ @overload
1116
+ def __init__(self, data: bool) -> None:
1117
+ """Initializes Bool with a boolean value."""
1118
+ ...
1119
+
1120
+ @overload
1121
+ def __init__(self) -> None:
1122
+ """Initializes Bool with null value."""
1123
+ ...
1124
+
1125
+
1126
+ class Char(Scalar):
1127
+ """Represents a Swordfish Char object, initialized optionally with a Python str or int value.
1128
+
1129
+ Examples:
1130
+ >>> import swordfish as sf
1131
+ >>> sf.data.Char()
1132
+ Char(null)
1133
+ >>> sf.data.Char('c')
1134
+ Char(c)
1135
+ >>> sf.data.Char(100)
1136
+ Char(d)
1137
+ """
1138
+ NULL_VALUE: Char
1139
+
1140
+ @overload
1141
+ def __init__(self, data: str) -> None:
1142
+ """Initializes Char with a string value."""
1143
+ ...
1144
+
1145
+ @overload
1146
+ def __init__(self, data: int) -> None:
1147
+ """Initializes Char with an integer value."""
1148
+ ...
1149
+
1150
+ @overload
1151
+ def __init__(self) -> None:
1152
+ """Initializes Char with null value."""
1153
+ ...
1154
+
1155
+
1156
+ class Short(Scalar):
1157
+ """Represents a Swordfish Short object, initialized optionally with a Python int value.
1158
+
1159
+ Examples:
1160
+ >>> import swordfish as sf
1161
+ >>> sf.data.Short()
1162
+ Short(null)
1163
+ >>> sf.data.Short(28)
1164
+ Short(28)
1165
+ """
1166
+ NULL_VALUE: Short
1167
+
1168
+ @overload
1169
+ def __init__(self, data: int) -> None:
1170
+ """Initializes Short with an integer value."""
1171
+ ...
1172
+
1173
+ @overload
1174
+ def __init__(self) -> None:
1175
+ """Initializes Short with null value."""
1176
+ ...
1177
+
1178
+
1179
+ class Int(Scalar):
1180
+ """Represents a Swordfish Int object, initialized optionally with a Python int value.
1181
+
1182
+ Examples:
1183
+ >>> import swordfish as sf
1184
+ >>> sf.data.Int()
1185
+ Int(null)
1186
+ >>> sf.data.Int(23)
1187
+ Int(23)
1188
+ """
1189
+ NULL_VALUE: Int
1190
+
1191
+ @overload
1192
+ def __init__(self, data: int) -> None:
1193
+ """Initializes Int with an integer value."""
1194
+ ...
1195
+
1196
+ @overload
1197
+ def __init__(self) -> None:
1198
+ """Initializes Int with null value."""
1199
+ ...
1200
+
1201
+
1202
+ class Long(Scalar):
1203
+ """Represents a Swordfish Long object, initialized optionally with a Python int value.
1204
+
1205
+ Examples:
1206
+ >>> import swordfish as sf
1207
+ >>> sf.data.Long()
1208
+ Long(null)
1209
+ >>> sf.data.Long(123)
1210
+ Long(123)
1211
+ """
1212
+ NULL_VALUE: Long
1213
+
1214
+ @overload
1215
+ def __init__(self, data: int) -> None:
1216
+ """Initializes Long with an integer value."""
1217
+ ...
1218
+
1219
+ @overload
1220
+ def __init__(self) -> None:
1221
+ """Initializes Long with null value."""
1222
+ ...
1223
+
1224
+
1225
+ class Float(Scalar):
1226
+ """
1227
+ Represents a Swordfish Float object, initialized optionally with a Python float
1228
+ value.
1229
+
1230
+ Parameters
1231
+ ----------
1232
+ data : float, optional
1233
+ A Python float used to initialize the Float object.
1234
+
1235
+ Examples
1236
+ --------
1237
+ >>> import swordfish as sf
1238
+ >>> sf.data.Float()
1239
+ Float(null)
1240
+ >>> sf.data.Float(3.14)
1241
+ Float(3.14)
1242
+ """
1243
+ NULL_VALUE: Float
1244
+
1245
+ @overload
1246
+ def __init__(self, data: float) -> None:
1247
+ """Initialize Float with a float value."""
1248
+ ...
1249
+
1250
+ @overload
1251
+ def __init__(self) -> None:
1252
+ """Initialize Float with null value."""
1253
+ ...
1254
+
1255
+
1256
+ class Double(Scalar):
1257
+ """
1258
+ Represents a Swordfish Double object, initialized optionally with a Python float
1259
+ value.
1260
+
1261
+ Parameters
1262
+ ----------
1263
+ data : float, optional
1264
+ A Python float used to initialize the Double object.
1265
+
1266
+ Examples
1267
+ --------
1268
+ >>> import swordfish as sf
1269
+ >>> sf.data.Double()
1270
+ Double(null)
1271
+ >>> sf.data.Double(3.14)
1272
+ Double(3.14)
1273
+ """
1274
+ NULL_VALUE: Double
1275
+
1276
+ @overload
1277
+ def __init__(self, data: float) -> None:
1278
+ """Initialize Double with a float value."""
1279
+ ...
1280
+
1281
+ @overload
1282
+ def __init__(self) -> None:
1283
+ """Initialize Double with null value."""
1284
+ ...
1285
+
1286
+
1287
+ class String(Scalar):
1288
+ """
1289
+ Represents a Swordfish String object, initialized optionally with a Python str
1290
+ value.
1291
+
1292
+ Parameters
1293
+ ----------
1294
+ data : str, optional
1295
+ A Python str used to initialize the String object.
1296
+
1297
+ Examples
1298
+ --------
1299
+ >>> import swordfish as sf
1300
+ >>> sf.data.String()
1301
+ String(null)
1302
+ >>> sf.data.String("hello")
1303
+ String(hello)
1304
+ """
1305
+ NULL_VALUE: String
1306
+
1307
+ @overload
1308
+ def __init__(self, data: str) -> None:
1309
+ """Initialize String with a string value."""
1310
+ ...
1311
+
1312
+ @overload
1313
+ def __init__(self) -> None:
1314
+ """Initialize String with null value."""
1315
+ ...
1316
+
1317
+
1318
+ class Blob(Scalar):
1319
+ """
1320
+ Represents a Swordfish Blob object, initialized optionally with a Python str
1321
+ value.
1322
+
1323
+ Parameters
1324
+ ----------
1325
+ data : str, optional
1326
+ A Python str used to initialize the Blob object.
1327
+
1328
+ Examples
1329
+ --------
1330
+ >>> import swordfish as sf
1331
+ >>> sf.data.Blob()
1332
+ Blob(null)
1333
+ >>> sf.data.Blob(b"hello")
1334
+ Blob(hello)
1335
+ """
1336
+ NULL_VALUE: Blob
1337
+
1338
+ @overload
1339
+ def __init__(self, data: str) -> None:
1340
+ """Initialize Blob with a string value."""
1341
+ ...
1342
+
1343
+ @overload
1344
+ def __init__(self) -> None:
1345
+ """Initialize Blob with null value."""
1346
+ ...
1347
+
1348
+
1349
+ class Date(Scalar):
1350
+ """
1351
+ Represents a Swordfish Date object, initialized in one of three ways: with no
1352
+ arguments, with a Python int value, or with three ints indicating year, month,
1353
+ and day.
1354
+
1355
+ Parameters
1356
+ ----------
1357
+ data : int, optional
1358
+ A Python int used to initialize the Date object.
1359
+ year : int, optional
1360
+ The year component of the Date object.
1361
+ month : int, optional
1362
+ The month component of the Date object.
1363
+ day : int, optional
1364
+ The day component of the Date object.
1365
+
1366
+ Examples
1367
+ --------
1368
+ >>> import swordfish as sf
1369
+ >>> sf.data.Date()
1370
+ Date(null)
1371
+ >>> sf.data.Date(1)
1372
+ Date(1970.01.02)
1373
+ >>> sf.data.Date(2000, 1, 1)
1374
+ Date(2000.01.01)
1375
+ """
1376
+ NULL_VALUE: Date
1377
+
1378
+ @overload
1379
+ def __init__(self, data: int) -> None:
1380
+ """Initialize Date with an integer value."""
1381
+ ...
1382
+
1383
+ @overload
1384
+ def __init__(self, year: int, month: int, day: int) -> None:
1385
+ """Initialize Date with year, month, and day values."""
1386
+ ...
1387
+
1388
+ @overload
1389
+ def __init__(self) -> None:
1390
+ """Initialize Date with null value."""
1391
+ ...
1392
+
1393
+
1394
+ class Month(Scalar):
1395
+ """
1396
+ Represents a Swordfish Month object, initialized in one of three ways: with no
1397
+ arguments, with a Python int value, or two ints indicating year and month.
1398
+
1399
+ Parameters
1400
+ ----------
1401
+ data : int, optional
1402
+ A Python int used to initialize the Month object.
1403
+ year : int, optional
1404
+ The year component of the Month object.
1405
+ month : int, optional
1406
+ The month component of the Month object.
1407
+
1408
+ Examples
1409
+ --------
1410
+ >>> import swordfish as sf
1411
+ >>> sf.data.Month()
1412
+ Month(null)
1413
+ >>> sf.data.Month(1)
1414
+ Month(0000.02M)
1415
+ >>> sf.data.Month(2025, 2)
1416
+ Month(2025.02M)
1417
+ """
1418
+ NULL_VALUE: Month
1419
+
1420
+ @overload
1421
+ def __init__(self, data: int) -> None:
1422
+ """Initialize Month with an integer value."""
1423
+ ...
1424
+
1425
+ @overload
1426
+ def __init__(self, year: int, month: int) -> None:
1427
+ """Initialize Month with year and month values."""
1428
+ ...
1429
+
1430
+ @overload
1431
+ def __init__(self) -> None:
1432
+ """Initialize Month with null value."""
1433
+ ...
1434
+
1435
+
1436
+ class Time(Scalar):
1437
+ """
1438
+ Represents a Swordfish Time object, initialized in one of three ways: with no
1439
+ arguments, with a Python int value, or with separate ints indicating hour,
1440
+ minute, second, and millisecond.
1441
+
1442
+ Parameters
1443
+ ----------
1444
+ data : int, optional
1445
+ A Python int used to initialize the Time object.
1446
+ hour : int, optional
1447
+ The hour component of the Time object.
1448
+ minute : int, optional
1449
+ The minute component of the Time object.
1450
+ second : int, optional
1451
+ The second component of the Time object.
1452
+ millisecond : int, optional
1453
+ The millisecond component of the Time object.
1454
+
1455
+ Examples
1456
+ --------
1457
+ >>> import swordfish as sf
1458
+ >>> sf.data.Time()
1459
+ Time(null)
1460
+ >>> sf.data.Time(100)
1461
+ Time(00:00:00.100)
1462
+ >>> sf.data.Time(12, 1, 2, 0)
1463
+ Time(12:01:02.000)
1464
+ """
1465
+ NULL_VALUE: Time
1466
+
1467
+ @overload
1468
+ def __init__(self, data: int) -> None:
1469
+ """Initialize Time with an integer value."""
1470
+ ...
1471
+
1472
+ @overload
1473
+ def __init__(self, hour: int, minute: int, second: int, millisecond: int) -> None:
1474
+ """Initialize Time with hour, minute, second, and millisecond values."""
1475
+ ...
1476
+
1477
+ @overload
1478
+ def __init__(self) -> None:
1479
+ """Initialize Time with null value."""
1480
+ ...
1481
+
1482
+
1483
+ class Minute(Scalar):
1484
+ """
1485
+ Represents a Swordfish Minute object, initialized in one of three ways: with no
1486
+ arguments, with a Python int value, or with two ints indicating hour and minute.
1487
+
1488
+ Parameters
1489
+ ----------
1490
+ data : int, optional
1491
+ A Python int used to initialize the Minute object.
1492
+ hour : int, optional
1493
+ The hour component of the Minute object.
1494
+ minute : int, optional
1495
+ The minute component of the Minute object.
1496
+
1497
+ Examples
1498
+ --------
1499
+ >>> import swordfish as sf
1500
+ >>> sf.data.Minute()
1501
+ Minute(null)
1502
+ >>> sf.data.Minute(20)
1503
+ Minute(00:20m)
1504
+ >>> sf.data.Minute(11, 50)
1505
+ Minute(11:50m)
1506
+ """
1507
+ NULL_VALUE: Minute
1508
+
1509
+ @overload
1510
+ def __init__(self, data: int) -> None:
1511
+ """Initialize Minute with an integer value."""
1512
+ ...
1513
+
1514
+ @overload
1515
+ def __init__(self, hour: int, minute: int) -> None:
1516
+ """Initialize Minute with hour and minute values."""
1517
+ ...
1518
+
1519
+ @overload
1520
+ def __init__(self) -> None:
1521
+ """Initialize Minute with null value."""
1522
+ ...
1523
+
1524
+
1525
+ class Second(Scalar):
1526
+ """
1527
+ Represents a Swordfish Second object, initialized in one of three ways: with no
1528
+ arguments, with a Python int value, or with separate ints indicating hour,
1529
+ minute, and second.
1530
+
1531
+ Parameters
1532
+ ----------
1533
+ data : int, optional
1534
+ A Python int used to initialize the Second object.
1535
+ hour : int, optional
1536
+ The hour component of the Second object.
1537
+ minute : int, optional
1538
+ The minute component of the Second object.
1539
+ second : int, optional
1540
+ The second component of the Second object.
1541
+
1542
+ Examples
1543
+ --------
1544
+ >>> import swordfish as sf
1545
+ >>> sf.data.Second()
1546
+ Second(null)
1547
+ >>> sf.data.Second(10)
1548
+ Second(00:00:10)
1549
+ >>> sf.data.Second(10,20,30)
1550
+ Second(10:20:30)
1551
+ """
1552
+ NULL_VALUE: Second
1553
+
1554
+ @overload
1555
+ def __init__(self, data: int) -> None:
1556
+ """Initialize Second with an integer value."""
1557
+ ...
1558
+
1559
+ @overload
1560
+ def __init__(self, hour: int, minute: int, second: int) -> None:
1561
+ """Initialize Second with hour, minute, and second values."""
1562
+ ...
1563
+
1564
+ @overload
1565
+ def __init__(self) -> None:
1566
+ """Initialize Second with null value."""
1567
+ ...
1568
+
1569
+
1570
+ class DateTime(Scalar):
1571
+ """
1572
+ Represents a Swordfish DateTime object, initialized in one of three ways: with no
1573
+ arguments, with a Python int value, or with separate ints for year, month, day,
1574
+ hour, minute, and second.
1575
+
1576
+ Parameters
1577
+ ----------
1578
+ data : int, optional
1579
+ A Python int used to initialize the DateTime object (e.g., a timestamp).
1580
+ year : int, optional
1581
+ The year component of the DateTime object.
1582
+ month : int, optional
1583
+ The month component of the DateTime object.
1584
+ day : int, optional
1585
+ The day component of the DateTime object.
1586
+ hour : int, optional
1587
+ The hour component of the DateTime object.
1588
+ minute : int, optional
1589
+ The minute component of the DateTime object.
1590
+ second : int, optional
1591
+ The second component of the DateTime object.
1592
+
1593
+ Examples
1594
+ --------
1595
+ >>> import swordfish as sf
1596
+ >>> sf.data.DateTime()
1597
+ DateTime(null)
1598
+ >>> sf.data.DateTime(20)
1599
+ DateTime(1970.01.01T00:00:20)
1600
+ >>> sf.data.DateTime(2025,1,2,12,0,45)
1601
+ DateTime(2025.01.02T12:00:45)
1602
+ """
1603
+ NULL_VALUE: DateTime
1604
+
1605
+ @overload
1606
+ def __init__(self, data: int) -> None:
1607
+ """Initialize DateTime with an integer value."""
1608
+ ...
1609
+
1610
+ @overload
1611
+ def __init__(self, year: int, month: int, day: int, hour: int, minute: int,
1612
+ second: int) -> None:
1613
+ """Initialize DateTime with year, month, day, hour, minute, and second values."""
1614
+ ...
1615
+
1616
+ @overload
1617
+ def __init__(self) -> None:
1618
+ """Initialize DateTime with null value."""
1619
+ ...
1620
+
1621
+
1622
+ class Timestamp(Scalar):
1623
+ """
1624
+ Represents a Swordfish Timestamp object.
1625
+
1626
+ Parameters
1627
+ ----------
1628
+ data : int, optional
1629
+ A Python int used to initialize the Timestamp object.
1630
+ year : int, optional
1631
+ The year component of the Timestamp object.
1632
+ month : int, optional
1633
+ The month component of the Timestamp object.
1634
+ day : int, optional
1635
+ The day component of the Timestamp object.
1636
+ hour : int, optional
1637
+ The hour component of the Timestamp object.
1638
+ minute : int, optional
1639
+ The minute component of the Timestamp object.
1640
+ second : int, optional
1641
+ The second component of the Timestamp object.
1642
+ millisecond : int, optional
1643
+ The millisecond component of the Timestamp object.
1644
+
1645
+ Examples
1646
+ --------
1647
+ >>> import swordfish as sf
1648
+ >>> sf.data.Timestamp()
1649
+ Timestamp(null)
1650
+ >>> sf.data.Timestamp(0)
1651
+ Timestamp(1970.01.01T00:00:00.000)
1652
+ >>> sf.data.Timestamp(2025, 1, 1, 12, 0, 20, 0)
1653
+ Timestamp(2025.01.01T12:00:20.000)
1654
+ """
1655
+ NULL_VALUE: Timestamp
1656
+
1657
+ @overload
1658
+ def __init__(self, data: int) -> None:
1659
+ """Initialize Timestamp with an integer value."""
1660
+ ...
1661
+
1662
+ @overload
1663
+ def __init__(
1664
+ self, year: int, month: int, day: int, hour: int, minute: int, second: int,
1665
+ millisecond: int
1666
+ ) -> None:
1667
+ """Initialize Timestamp with year, month, day, hour, minute, second, and millisecond values."""
1668
+ ...
1669
+
1670
+ @overload
1671
+ def __init__(self) -> None:
1672
+ """Initialize Timestamp with null value."""
1673
+ ...
1674
+
1675
+
1676
+ class NanoTime(Scalar):
1677
+ """
1678
+ Represents a Swordfish NanoTime object.
1679
+
1680
+ Parameters
1681
+ ----------
1682
+ data : int, optional
1683
+ A Python int used to initialize the NanoTime object (e.g., a timestamp).
1684
+ hour : int, optional
1685
+ The hour component of the NanoTime object.
1686
+ minute : int, optional
1687
+ The minute component of the NanoTime object.
1688
+ second : int, optional
1689
+ The second component of the NanoTime object.
1690
+ nanosecond : int, optional
1691
+ The nanosecond component of the NanoTime object.
1692
+
1693
+ Examples
1694
+ --------
1695
+ >>> import swordfish as sf
1696
+ >>> sf.data.NanoTime()
1697
+ NanoTime(null)
1698
+ >>> sf.data.NanoTime(3)
1699
+ NanoTime(00:00:00.000000003)
1700
+ >>> sf.data.NanoTime(18, 0, 40, 30)
1701
+ NanoTime(18:00:40.000000030)
1702
+ """
1703
+ NULL_VALUE: NanoTime
1704
+
1705
+ @overload
1706
+ def __init__(self, data: int) -> None:
1707
+ """Initialize NanoTime with an integer value."""
1708
+ ...
1709
+
1710
+ @overload
1711
+ def __init__(
1712
+ self, hour: int, minute: int, second: int, nanosecond: int
1713
+ ) -> None:
1714
+ """Initialize NanoTime with hour, minute, second, and nanosecond values."""
1715
+ ...
1716
+
1717
+ @overload
1718
+ def __init__(self) -> None:
1719
+ """Initialize NanoTime with null value."""
1720
+ ...
1721
+
1722
+
1723
+ class NanoTimestamp(Scalar):
1724
+ """
1725
+ Represents a Swordfish NanoTimestamp object.
1726
+
1727
+ Parameters
1728
+ ----------
1729
+ data : int, optional
1730
+ A Python int used to initialize the NanoTimestamp object (e.g., a timestamp).
1731
+ year : int, optional
1732
+ The year component of the NanoTimestamp object.
1733
+ month : int, optional
1734
+ The month component of the NanoTimestamp object.
1735
+ day : int, optional
1736
+ The day component of the NanoTimestamp object.
1737
+ hour : int, optional
1738
+ The hour component of the NanoTimestamp object.
1739
+ minute : int, optional
1740
+ The minute component of the NanoTimestamp object.
1741
+ second : int, optional
1742
+ The second component of the NanoTimestamp object.
1743
+ nanosecond : int, optional
1744
+ The nanosecond component of the NanoTimestamp object.
1745
+
1746
+ Examples
1747
+ --------
1748
+ >>> import swordfish as sf
1749
+ >>> sf.data.NanoTimestamp()
1750
+ NanoTimestamp(null)
1751
+ >>> sf.data.NanoTimestamp(15)
1752
+ NanoTimestamp(1970.01.01T00:00:00.000000015)
1753
+ >>> sf.data.NanoTimestamp(2025, 1, 1, 7, 0, 0, 0)
1754
+ NanoTimestamp(2025.01.01T07:00:00.000000000)
1755
+ """
1756
+ NULL_VALUE: "NanoTimestamp"
1757
+
1758
+ @overload
1759
+ def __init__(self, data: int) -> None:
1760
+ """Initialize NanoTimestamp with an integer value."""
1761
+ ...
1762
+
1763
+ @overload
1764
+ def __init__(
1765
+ self, year: int, month: int, day: int, hour: int, minute: int, second: int,
1766
+ nanosecond: int
1767
+ ) -> None:
1768
+ """Initialize NanoTimestamp with year, month, day, hour, minute, second, and nanosecond values."""
1769
+ ...
1770
+
1771
+ @overload
1772
+ def __init__(self) -> None:
1773
+ """Initialize NanoTimestamp with null value."""
1774
+ ...
1775
+
1776
+
1777
+ class DateHour(Scalar):
1778
+ """
1779
+ Represents a Swordfish DateHour object.
1780
+
1781
+ Parameters
1782
+ ----------
1783
+ data : int, optional
1784
+ A Python int used to initialize the DateHour object.
1785
+ year : int, optional
1786
+ The year component of the DateHour object.
1787
+ month : int, optional
1788
+ The month component of the DateHour object.
1789
+ day : int, optional
1790
+ The day component of the DateHour object.
1791
+ hour : int, optional
1792
+ The hour component of the DateHour object.
1793
+
1794
+ Examples
1795
+ --------
1796
+ >>> import swordfish as sf
1797
+ >>> sf.data.DateHour()
1798
+ DateHour(null)
1799
+ >>> sf.data.DateHour(1000)
1800
+ DateHour(1970.02.11T16)
1801
+ >>> sf.data.DateHour(2025,2,2,9)
1802
+ DateHour(2025.02.02T09)
1803
+ """
1804
+ NULL_VALUE: DateHour
1805
+
1806
+ @overload
1807
+ def __init__(self, data: int) -> None:
1808
+ """Initialize DateHour with an integer value."""
1809
+ ...
1810
+
1811
+ @overload
1812
+ def __init__(self, year: int, month: int, day: int, hour: int) -> None:
1813
+ """Initialize DateHour with year, month, day, and hour values."""
1814
+ ...
1815
+
1816
+ @overload
1817
+ def __init__(self) -> None:
1818
+ """Initialize DateHour with null value."""
1819
+ ...
1820
+
1821
+
1822
+ class Uuid(Scalar):
1823
+ """Represents a Swordfish Uuid object."""
1824
+ NULL_VALUE: Uuid
1825
+
1826
+
1827
+ class Int128(Scalar):
1828
+ """Represents a Swordfish Int128 object."""
1829
+ NULL_VALUE: Int128
1830
+
1831
+
1832
+ class Ipaddr(Scalar):
1833
+ """Represents a Swordfish Ipaddr object."""
1834
+ NULL_VALUE: Ipaddr
1835
+
1836
+
1837
+ class Duration(Scalar):
1838
+ """
1839
+ Represents a Swordfish Duration object.
1840
+
1841
+ Parameters
1842
+ ----------
1843
+ data : str
1844
+ A Python str used to initialize the Duration object.
1845
+ val : int
1846
+ The value of the Duration.
1847
+ unit : str, optional
1848
+ The unit of the Duration. Defaults to "ns".
1849
+
1850
+ Examples
1851
+ --------
1852
+ >>> import swordfish as sf
1853
+ >>> sf.data.Duration("20w")
1854
+ Duration(20w)
1855
+ >>> sf.data.Duration(3, "m")
1856
+ Duration(3m)
1857
+ >>> sf.data.Duration(10)
1858
+ Duration(10ns)
1859
+ """
1860
+ NULL_VALUE: Duration
1861
+
1862
+ @overload
1863
+ def __init__(self, data: str) -> None:
1864
+ """Initialize Duration with a string value."""
1865
+ ...
1866
+
1867
+ @overload
1868
+ def __init__(self, val: int, unit: str = "ns") -> None:
1869
+ """Initialize Duration with value and unit."""
1870
+ ...
1871
+
1872
+
1873
+ class Handle(Scalar):
1874
+ """Represents a Swordfish Handle object."""
1875
+
1876
+
1877
+ class Resource(Scalar):
1878
+ """Represents a Swordfish Resource object."""
1879
+
1880
+
1881
+ class MetaCode(Scalar):
1882
+ """
1883
+ Represents a Swordfish MetaCode object.
1884
+ """
1885
+
1886
+ def eval(self, conn: Connection = None) -> Constant:
1887
+ """
1888
+ Evaluates the MetaCode.
1889
+
1890
+ Parameters
1891
+ ----------
1892
+ conn : Connection, optional
1893
+ The connection to evaluate this metacode.
1894
+
1895
+ Returns
1896
+ -------
1897
+ Constant
1898
+ The evaluated result of the MetaCode.
1899
+ """
1900
+ ...
1901
+
1902
+
1903
+ class Decimal32(Scalar):
1904
+ """
1905
+ Represents a Swordfish Decimal32 object.
1906
+
1907
+ Parameters
1908
+ ----------
1909
+ data : int or Decimal
1910
+ The raw data representation or value of the Decimal32.
1911
+ scale : int, optional
1912
+ The scale of the Decimal32. Defaults to EXPARAM_DEFAULT.
1913
+
1914
+ Examples
1915
+ --------
1916
+ >>> import swordfish as sf
1917
+ >>> sf.data.Decimal32(314, 2)
1918
+ Decimal32(3.14, scale=2)
1919
+ >>> sf.data.Decimal32(3.141,3)
1920
+ Decimal32(3.141, scale=3)
1921
+ """
1922
+ NULL_VALUE: Decimal32
1923
+
1924
+ @overload
1925
+ def __init__(self, data: int, scale: int = EXPARAM_DEFAULT) -> None:
1926
+ """Initialize Decimal32 with an integer value and scale."""
1927
+ ...
1928
+
1929
+ @overload
1930
+ def __init__(self, data: Decimal, scale: int = EXPARAM_DEFAULT) -> None:
1931
+ """Initialize Decimal32 with a Decimal value and scale."""
1932
+ ...
1933
+
1934
+
1935
+ class Decimal64(Scalar):
1936
+ """
1937
+ Represents a Swordfish Decimal64 object.
1938
+
1939
+ Parameters
1940
+ ----------
1941
+ data : int or Decimal
1942
+ The raw data representation or value of the Decimal64.
1943
+ scale : int, optional
1944
+ The scale of the Decimal64. Defaults to EXPARAM_DEFAULT.
1945
+
1946
+ Examples
1947
+ --------
1948
+ >>> import swordfish as sf
1949
+ >>> sf.data.Decimal64(12345, 3)
1950
+ Decimal64(12.345, scale=3)
1951
+ >>> sf.data.Decimal64(3.14,2)
1952
+ Decimal64(3.14, scale=2)
1953
+ """
1954
+ NULL_VALUE: Decimal64
1955
+
1956
+ @overload
1957
+ def __init__(self, data: int, scale: int = EXPARAM_DEFAULT) -> None:
1958
+ """Initialize Decimal64 with an integer value and scale."""
1959
+ ...
1960
+
1961
+ @overload
1962
+ def __init__(self, data: Decimal, scale: int = EXPARAM_DEFAULT) -> None:
1963
+ """Initialize Decimal64 with a Decimal value and scale."""
1964
+ ...
1965
+
1966
+
1967
+ class Decimal128(Scalar):
1968
+ """
1969
+ Represents a Swordfish Decimal128 object.
1970
+
1971
+ Parameters
1972
+ ----------
1973
+ data : int or Decimal
1974
+ The raw data representation or value of the Decimal128.
1975
+ scale : int, optional
1976
+ The scale of the Decimal128. Defaults to EXPARAM_DEFAULT.
1977
+
1978
+ Examples
1979
+ --------
1980
+ >>> import swordfish as sf
1981
+ >>> sf.data.Decimal128(12345, 6)
1982
+ Decimal128(0.012345, scale=6)
1983
+ >>> sf.data.Decimal128(3.14,5)
1984
+ Decimal128(3.14000, scale=5)
1985
+ """
1986
+ NULL_VALUE: Decimal128
1987
+
1988
+ @overload
1989
+ def __init__(self, data: int, scale: int = EXPARAM_DEFAULT) -> None:
1990
+ """Initialize Decimal128 with an integer value and scale."""
1991
+ ...
1992
+
1993
+ @overload
1994
+ def __init__(self, data: Decimal, scale: int = EXPARAM_DEFAULT) -> None:
1995
+ """Initialize Decimal128 with a Decimal value and scale."""
1996
+ ...
1997
+
1998
+
1999
+ class Point(Scalar):
2000
+ """
2001
+ Represents a Swordfish Point object, defined by x and y coordinates.
2002
+
2003
+ Parameters
2004
+ ----------
2005
+ x : float
2006
+ The x-coordinate of the Point.
2007
+ y : float
2008
+ The y-coordinate of the Point.
2009
+ """
2010
+ def __init__(self, x: float, y: float) -> None:
2011
+ """Initializes Point with x and y coordinates.
2012
+
2013
+ Args:
2014
+ x (float): The x-coordinate.
2015
+ y (float): The y-coordinate.
2016
+ """
2017
+ ...
2018
+
2019
+
2020
+ class Unknown(Constant):
2021
+ ...
2022
+
2023
+
2024
+ #####################################################################
2025
+ # Enum Module
2026
+ #####################################################################
2027
+
2028
+
2029
+ def create_type_enum(type: int, exparam: int) -> DataType: ...
2030
+
2031
+
2032
+ def create_array_type_enum(sub_type: DataType) -> DataType: ...
2033
+
2034
+
2035
+ def create_form_enum(form: int) -> DataForm: ...
2036
+
2037
+
2038
+ ALL: EnumInt
2039
+ FIRST: EnumInt
2040
+ LAST: EnumInt
2041
+ NONE: EnumInt
2042
+
2043
+ VOID: DataType
2044
+ BOOL: DataType
2045
+ CHAR: DataType
2046
+ SHORT: DataType
2047
+ INT: DataType
2048
+ LONG: DataType
2049
+ DATE: DataType
2050
+ MONTH: DataType
2051
+ TIME: DataType
2052
+ MINUTE: DataType
2053
+ SECOND: DataType
2054
+ DATETIME: DataType
2055
+ TIMESTAMP: DataType
2056
+ NANOTIME: DataType
2057
+ NANOTIMESTAMP: DataType
2058
+ FLOAT: DataType
2059
+ DOUBLE: DataType
2060
+ SYMBOL: DataType
2061
+ STRING: DataType
2062
+ UUID: DataType
2063
+ FUNCTIONDEF: DataType
2064
+ HANDLE: DataType
2065
+ CODE: DataType
2066
+ DATASOURCE: DataType
2067
+ RESOURCE: DataType
2068
+ ANY: DataType
2069
+ DICTIONARY: DataType
2070
+ DATEHOUR: DataType
2071
+ IPADDR: DataType
2072
+ INT128: DataType
2073
+ BLOB: DataType
2074
+ COMPLEX: DataType
2075
+ POINT: DataType
2076
+ DURATION: DataType
2077
+ OBJECT: DataType
2078
+
2079
+ SCALAR: DataForm
2080
+ VECTOR: DataForm
2081
+ PAIR: DataForm
2082
+ MATRIX: DataForm
2083
+ SET: DataForm
2084
+ DICT: DataForm
2085
+ TABLE: DataForm
2086
+
2087
+ VAR: ObjectType
2088
+ SHARED: ObjectType
2089
+ DEF: ObjectType
2090
+
2091
+ DEBUG: LogLevel
2092
+ INFO: LogLevel
2093
+ ERROR: LogLevel
2094
+ WARNING: LogLevel
2095
+
2096
+
2097
+ #####################################################################
2098
+ # Exception Module
2099
+ #####################################################################
2100
+
2101
+
2102
+ class Warning(Exception):
2103
+ ...
2104
+
2105
+
2106
+ class Error(Exception):
2107
+ ...
2108
+
2109
+
2110
+ class InterfaceError(Error):
2111
+ ...
2112
+
2113
+
2114
+ class DatabaseError(Error):
2115
+ ...
2116
+
2117
+
2118
+ class DataError(DatabaseError):
2119
+ ...
2120
+
2121
+
2122
+ class OperationalError(DatabaseError):
2123
+ ...
2124
+
2125
+
2126
+ class IntegrityError(DatabaseError):
2127
+ ...
2128
+
2129
+
2130
+ class InternalError(DatabaseError):
2131
+ ...
2132
+
2133
+
2134
+ class ProgrammingError(DatabaseError):
2135
+ ...
2136
+
2137
+
2138
+ class NotSupportedError(DatabaseError):
2139
+ ...
2140
+
2141
+
2142
+ #####################################################################
2143
+ # IO Module
2144
+ #####################################################################
2145
+
2146
+
2147
+ def dump(obj: Constant, file: BinaryIO) -> None:
2148
+ """
2149
+ Serialize a Constant object and write the serialized data to a writable
2150
+ BinaryIO object.
2151
+
2152
+ Parameters
2153
+ ----------
2154
+ obj : Constant
2155
+ The object to serialize. Must be a Constant object.
2156
+ file : BinaryIO
2157
+ A writable BinaryIO object to store the serialized data.
2158
+ """
2159
+ ...
2160
+
2161
+
2162
+ def load(file: BinaryIO) -> Constant:
2163
+ """
2164
+ Read serialized data from a readable BinaryIO object and deserialize it into a
2165
+ Constant object.
2166
+
2167
+ Parameters
2168
+ ----------
2169
+ file : BinaryIO
2170
+ A readable BinaryIO object.
2171
+
2172
+ Returns
2173
+ -------
2174
+ Constant
2175
+ The deserialized object.
2176
+ """
2177
+ ...
2178
+
2179
+
2180
+ def dumps(obj: Constant) -> bytes:
2181
+ """
2182
+ Serialize a Constant object and return the serialized data as bytes.
2183
+
2184
+ Parameters
2185
+ ----------
2186
+ obj : Constant
2187
+ The object to serialize. Must be a Constant object.
2188
+
2189
+ Returns
2190
+ -------
2191
+ bytes
2192
+ The serialized representation of `obj`.
2193
+ """
2194
+ ...
2195
+
2196
+
2197
+ def loads(data: bytes) -> Constant:
2198
+ """
2199
+ Deserialize a Constant object from a bytes-like object.
2200
+
2201
+ Parameters
2202
+ ----------
2203
+ data : bytes
2204
+ The serialized data.
2205
+
2206
+ Returns
2207
+ -------
2208
+ Constant
2209
+ The deserialized `Constant` object.
2210
+ """
2211
+ ...
2212
+
2213
+
2214
+ #####################################################################
2215
+ # Streaming Engine Module
2216
+ #####################################################################
2217
+
2218
+
2219
+ class EngineType(Enum):
2220
+ """
2221
+ Enumeration of streaming engine types in Swordfish.
2222
+ """
2223
+
2224
+ StreamBroadcastEngine: int
2225
+ TimeSeriesEngine: int
2226
+ CrossSectionalEngine: int
2227
+ ReactiveStateEngine: int
2228
+ StreamFilterEngine: int
2229
+ ExtensionEngine: int
2230
+
2231
+ def get_from_str(cls, name: str) -> "EngineType":
2232
+ """
2233
+ Returns the corresponding EngineType for a given string.
2234
+
2235
+ Parameters
2236
+ ----------
2237
+ name : str
2238
+ String representation of the EngineType.
2239
+
2240
+ Returns
2241
+ -------
2242
+ EngineType
2243
+ Matching EngineType enum member.
2244
+ """
2245
+ ...
2246
+
2247
+
2248
+ class EngineStat:
2249
+ pass
2250
+
2251
+
2252
+ class StreamEngine(Table, abc.ABC):
2253
+ """
2254
+ Abstract base class representing a streaming engine.
2255
+
2256
+ This class serves as the base for all streaming engine types in Swordfish.
2257
+ """
2258
+
2259
+ engine_type: EngineType
2260
+ """
2261
+ The type of the streaming engine.
2262
+ """
2263
+
2264
+ stat: EngineStat
2265
+ """
2266
+ Descriptive statistics related to the streaming engine.
2267
+ """
2268
+
2269
+ @classmethod
2270
+ def create(cls, name: str, *args, **kwargs) -> Builder: ...
2271
+
2272
+ @classmethod
2273
+ def list(cls) -> List[Tuple[str, EngineType, str]]: ...
2274
+
2275
+ @classmethod
2276
+ def get(cls, name: str) -> Self: ...
2277
+
2278
+
2279
+ def _create_engine(engine_type: EngineType, *args) -> StreamEngine: ...
2280
+
2281
+
2282
+ class StreamBroadcastEngineStat(EngineStat):
2283
+ user: str
2284
+ """
2285
+ Name of the user who created the streaming engine.
2286
+ """
2287
+ status: Literal["OK", "FATAL"]
2288
+ """
2289
+ Status of the streaming engine. "OK" means available; "FATAL" means unavailable.
2290
+ """
2291
+ last_err_msg: str
2292
+ """
2293
+ The latest error message.
2294
+ """
2295
+ num_groups: int
2296
+ """
2297
+ The number of groups that the streaming engine has handled.
2298
+ """
2299
+ num_rows: int
2300
+ """
2301
+ The number of records that has entered the streaming engine.
2302
+ """
2303
+ num_metrics: int
2304
+ """
2305
+ The number of metrics calculated by the streaming engine.
2306
+ """
2307
+ metrics: str
2308
+ """
2309
+ The metacode of the metrics calculated by the streaming engine.
2310
+ """
2311
+ snapshot_dir: str
2312
+ """
2313
+ The directory to save engine snapshot.
2314
+ """
2315
+ snapshot_interval: int
2316
+ """
2317
+ The interval to save snapshot.
2318
+ """
2319
+ snapshot_msg_id: int
2320
+ """
2321
+ The msgId of engine snapshot.
2322
+ """
2323
+ snapshot_timestamp: Timestamp
2324
+ """
2325
+ The timestamp of snapshot.
2326
+ """
2327
+ garbage_size: int
2328
+ """
2329
+ The threshold of the number of records in memory that triggers memory cleaning.
2330
+ """
2331
+ memory_used: int
2332
+ """
2333
+ The amount of memory used.
2334
+ """
2335
+
2336
+
2337
+ class StreamBroadcastEngine(StreamEngine):
2338
+ """
2339
+ The stream broadcast engine distributes the same data stream to different target tables.
2340
+
2341
+ Use this engine when you need to process a single stream of data in multiple ways.
2342
+ For example, save one copy to disk while sending another copy to a computing engine
2343
+ for further processing.
2344
+
2345
+ ``StreamBroadcastEngine.create`` returns a Builder object, and then call submit to
2346
+ create an Engine object to which you can ingest the data for stream processing.
2347
+ """
2348
+ engine_type: EngineType
2349
+ stat: StreamBroadcastEngineStat
2350
+
2351
+ @classmethod
2352
+ def create(
2353
+ cls, name: str, table_schema: Union[Table, TypeDict], outputs: List[Table]
2354
+ ) -> StreamBroadcastEngineBuilder:
2355
+ """
2356
+ Creates a new instance of a StreamBroadcastEngine.
2357
+
2358
+ Parameters
2359
+ ----------
2360
+ name : str
2361
+ The name of the engine. It can have letters, numbers and "_" and must
2362
+ start with a letter.
2363
+ table_schema : Union[Table, TypeDict]
2364
+ Specifies the column names and corresponding types of the input stream.
2365
+ If a Table is provided, its schema must match the schema of the subscribed
2366
+ stream table. Whether the table contains data or not doesn't matter.
2367
+ outputs : List[Table]
2368
+ A list of two or more tables. The schema of each table must match
2369
+ ``table_schema``.
2370
+
2371
+ Returns
2372
+ -------
2373
+ StreamBroadcastEngineBuilder
2374
+ A builder object to configure and create the StreamBroadcastEngine.
2375
+
2376
+ Examples
2377
+ --------
2378
+ >>> import swordfish as sf
2379
+ >>> table_schema = {"id": "LONG", "name": "STRING"}
2380
+ >>> output_table1 = sf.table(types=table_schema)
2381
+ >>> output_table2 = sf.table(types=table_schema)
2382
+ >>> my_engine = sf.engine.StreamBroadcastEngine.create(
2383
+ ... "MainStreamEngine", table_schema, [output_table1, output_table2]
2384
+ ... ).submit()
2385
+ """
2386
+ ...
2387
+
2388
+
2389
+ class TimeSeriesEngineStat(EngineStat):
2390
+ user: str
2391
+ """
2392
+ Name of the user who created the streaming engine.
2393
+ """
2394
+ status: Literal["OK", "FATAL"]
2395
+ """
2396
+ Status of the streaming engine. "OK" means available; "FATAL" means unavailable.
2397
+ """
2398
+ last_err_msg: str
2399
+ """
2400
+ The latest error message.
2401
+ """
2402
+ window_time: str
2403
+ """
2404
+ The size of the data window.
2405
+ """
2406
+ step: int
2407
+ """
2408
+ The duration between 2 adjacent windows.
2409
+ """
2410
+ use_system_time: bool
2411
+ """
2412
+ Whether the calculations are performed based on the system time when data is
2413
+ ingested into the engine.
2414
+ """
2415
+ num_groups: int
2416
+ """
2417
+ The number of groups that the streaming engine has handled.
2418
+ """
2419
+ num_rows: int
2420
+ """
2421
+ The number of records that has entered the streaming engine.
2422
+ """
2423
+ num_metrics: int
2424
+ """
2425
+ The number of metrics calculated by the streaming engine.
2426
+ """
2427
+ metrics: str
2428
+ """
2429
+ The metacode of the metrics calculated by the streaming engine.
2430
+ """
2431
+ snapshot_dir: str
2432
+ """
2433
+ The directory to save engine snapshot.
2434
+ """
2435
+ snapshot_interval: int
2436
+ """
2437
+ The interval to save snapshot.
2438
+ """
2439
+ snapshot_msg_id: int
2440
+ """
2441
+ The msgId of engine snapshot.
2442
+ """
2443
+ snapshot_timestamp: Timestamp
2444
+ """
2445
+ The timestamp of snapshot.
2446
+ """
2447
+ garbage_size: int
2448
+ """
2449
+ The threshold of the number of records in memory that triggers memory cleaning.
2450
+ """
2451
+ memory_used: int
2452
+ """
2453
+ The amount of memory currently used by the engine (in bytes).
2454
+ """
2455
+
2456
+
2457
+ class TimeSeriesEngine(StreamEngine):
2458
+ """The time-series streaming engine conducts real-time time-series calculations with moving
2459
+ windows.
2460
+
2461
+ ``TimeSeriesEngine.create`` returns a Builder object, and then call the submit to create an
2462
+ Engine object to which you can ingest the data for stream processing.
2463
+
2464
+ There are two types of aggregate operators in the time-series engine: incremental operators
2465
+ and full operators. Incremental operators incrementally aggregate the data as they arrive
2466
+ without keeping the historical data. Full operators (e.g., user-defined aggregate functions,
2467
+ unoptimized built-in aggregate functions, or functions with nested state functions) keep all
2468
+ the data in a window and recompute the output as a full refresh whenever new data arrives.
2469
+
2470
+ The following aggregate operators in the time-series engine are optimized for incremental
2471
+ computations: ``corr``, ``covar``, ``first``, ``last``, ``max``, ``med``, ``min``,
2472
+ ``percentile``, ``quantile``, ``std``, ``var``, ``sum``, ``sum2``, ``sum3``, ``sum4``,
2473
+ ``wavg``, ``wsum``, ``count``, ``firstNot``, ifirstNot, lastNot, ilastNot, imax, imin,
2474
+ ``nunique``, ``prod``, ``sem``, ``mode``, ``searchK``, ``beta``, ``avg``.
2475
+
2476
+ **Windowing Logic**
2477
+
2478
+ Window boundaries: The engine automatically adjusts the starting point of the first window.
2479
+ (See parameter description for ``step`` and ``round_time``, and the Alignment Rules section).
2480
+
2481
+ Window properties:
2482
+
2483
+ ``window_size`` - the size of each window;
2484
+ ``closed`` - whether the left/right boundaries of a window is inclusive/exclusive;
2485
+ ``step`` - the duration of time between windows;
2486
+ ``use_system_time`` - specifies how values are windowed, which is based on the time column in
2487
+ the data or the system time of data ingestion.
2488
+
2489
+ **Calculation Rules**
2490
+
2491
+ - If ``time_col`` is specified, its values must be increasing. If ``key_col`` is specified to
2492
+ group the data, the values in ``time_col`` must be increasing with each group specified by
2493
+ ``key_col``. Otherwise, out-of-order data will be discarded.
2494
+ - If ``use_system_time`` = true, the calculation of a window is triggered as soon as the
2495
+ window ends. If ``use_system_time`` = false (with ``time_col`` specified), the calculation
2496
+ of a window is triggered by the arrival of the next record after the window ends. To
2497
+ trigger the calculation for the uncalculated windows, you can specify the parameter
2498
+ ``update_time`` or ``force_trigger_time``.
2499
+ - If ``fill`` is unspecified or "None", only windows with calculation results are output. If
2500
+ ``fill`` is specified, all windows are output, and the empty windows are filled using the
2501
+ specified filling method.
2502
+ - If ``update_time`` = 0, incoming records in the current window can be immediately
2503
+ calculated and output.
2504
+
2505
+ **Other Features**
2506
+
2507
+ - Data/state cleanup: You can set a cleanup rule to clear historical data. (See parameters
2508
+ ``key_purge_filter`` and ``key_purge_freq_in_sec``)
2509
+ - Snapshot: Snapshot mechanism is used to restore the streaming engine to the latest
2510
+ snapshot after system interruption. (See parameters ``snapshot_dir`` and
2511
+ ``snapshot_interval_in_msg_count``)
2512
+
2513
+ **Alignment Rules**
2514
+
2515
+ To facilitate observation and comparison of calculation results, the engine automatically
2516
+ adjusts the starting point of the first window. The alignment size (integer) is determined by
2517
+ the parameters `step`, `round_time`, and the precision of `time_column`. When the time series
2518
+ engine calculates within groups, all groups' windows will be uniformly aligned, and the
2519
+ boundaries of each window are the same for each group.
2520
+
2521
+ - Case 1: `time_column` is of type Minute (HH:mm)
2522
+
2523
+ +----------------+-------------------+
2524
+ | Range | alignment_size |
2525
+ +================+===================+
2526
+ | 0 ~ 2 | 2 |
2527
+ +----------------+-------------------+
2528
+ | 3 | 3 |
2529
+ +----------------+-------------------+
2530
+ | 4 ~ 5 | 5 |
2531
+ +----------------+-------------------+
2532
+ | 6 ~ 10 | 10 |
2533
+ +----------------+-------------------+
2534
+ | 11 ~ 15 | 15 |
2535
+ +----------------+-------------------+
2536
+ | 16 ~ 20 | 20 |
2537
+ +----------------+-------------------+
2538
+ | 21 ~ 30 | 30 |
2539
+ +----------------+-------------------+
2540
+ | > 30 | 60 (1 hour) |
2541
+ +----------------+-------------------+
2542
+
2543
+ If `round_time` = True:
2544
+ - The value of `alignment_size` is the same as the above table if `step` ≤ 30.
2545
+ - If `step` > 30, then:
2546
+
2547
+ +----------------+-------------------+
2548
+ | step | alignment_size |
2549
+ +================+===================+
2550
+ | 31 ~ 60 | 60 (1 hour) |
2551
+ +----------------+-------------------+
2552
+ | 61 ~ 120 | 120 (2 hours) |
2553
+ +----------------+-------------------+
2554
+ | 121 ~ 180 | 180 (3 hours) |
2555
+ +----------------+-------------------+
2556
+ | 181 ~ 300 | 300 (5 hours) |
2557
+ +----------------+-------------------+
2558
+ | 301 ~ 600 | 600 (10 hours) |
2559
+ +----------------+-------------------+
2560
+ | 601 ~ 900 | 900 (15 hours) |
2561
+ +----------------+-------------------+
2562
+ | 901 ~ 1200 | 1200 (20 hours) |
2563
+ +----------------+-------------------+
2564
+ | 1201 ~ 1800 | 1800 (30 hours) |
2565
+ +----------------+-------------------+
2566
+ | > 1800 | 3600 (60 hours) |
2567
+ +----------------+-------------------+
2568
+
2569
+ - Case 2: `time_column` is of type Datetime (yyyy-MM-dd HH:mm:ss) or Second (HH:mm:ss)
2570
+
2571
+ If `round_time` = False:
2572
+
2573
+ +----------------+-------------------+
2574
+ | step | alignment_size |
2575
+ +================+===================+
2576
+ | 0 ~ 2 | 2 |
2577
+ +----------------+-------------------+
2578
+ | 3 | 3 |
2579
+ +----------------+-------------------+
2580
+ | 4 ~ 5 | 5 |
2581
+ +----------------+-------------------+
2582
+ | 6 ~ 10 | 10 |
2583
+ +----------------+-------------------+
2584
+ | 11 ~ 15 | 15 |
2585
+ +----------------+-------------------+
2586
+ | 16 ~ 20 | 20 |
2587
+ +----------------+-------------------+
2588
+ | 21 ~ 30 | 30 |
2589
+ +----------------+-------------------+
2590
+ | > 30 | 60 (1 minute) |
2591
+ +----------------+-------------------+
2592
+
2593
+ If `round_time` = True:
2594
+ - The value of `alignment_size` is the same as the above table if `step` ≤ 30.
2595
+ - If `step` > 30, then:
2596
+
2597
+ +----------------+-------------------+
2598
+ | step | alignment_size |
2599
+ +================+===================+
2600
+ | 31 ~ 60 | 60 (1 minute) |
2601
+ +----------------+-------------------+
2602
+ | 61 ~ 120 | 120 (2 minutes) |
2603
+ +----------------+-------------------+
2604
+ | 121 ~ 180 | 180 (3 minutes) |
2605
+ +----------------+-------------------+
2606
+ | 181 ~ 300 | 300 (5 minutes) |
2607
+ +----------------+-------------------+
2608
+ | 301 ~ 600 | 600 (10 minutes) |
2609
+ +----------------+-------------------+
2610
+ | 601 ~ 900 | 900 (15 minutes) |
2611
+ +----------------+-------------------+
2612
+ | 901 ~ 1200 | 1200 (20 minutes) |
2613
+ +----------------+-------------------+
2614
+ | 1201 ~ 1800 | 1800 (30 minutes) |
2615
+ +----------------+-------------------+
2616
+ | > 1800 | 3600 (1 hour) |
2617
+ +----------------+-------------------+
2618
+
2619
+ - Case 3: `time_column` is of type Timestamp (yyyy-MM-dd HH:mm:ss.mmm) or TIME (HH:mm:ss.mmm)
2620
+
2621
+ If `round_time` = False:
2622
+
2623
+ +----------------+-------------------+
2624
+ | step | alignment_size |
2625
+ +================+===================+
2626
+ | 0 ~ 2ns | 2ns |
2627
+ +----------------+-------------------+
2628
+ | 3ns ~ 5ns | 5ns |
2629
+ +----------------+-------------------+
2630
+ | 6ns ~ 10ns | 10ns |
2631
+ +----------------+-------------------+
2632
+ | 11ns ~ 20ns | 20ns |
2633
+ +----------------+-------------------+
2634
+ | 21ns ~ 25ns | 25ns |
2635
+ +----------------+-------------------+
2636
+ | 26ns ~ 50ns | 50ns |
2637
+ +----------------+-------------------+
2638
+ | 51ns ~ 100ns | 100ns |
2639
+ +----------------+-------------------+
2640
+ | 101ns ~ 200ns | 200ns |
2641
+ +----------------+-------------------+
2642
+ | 201ns ~ 250ns | 250ns |
2643
+ +----------------+-------------------+
2644
+ | 251ns ~ 500ns | 500ns |
2645
+ +----------------+-------------------+
2646
+ | > 500ns | 1000ns |
2647
+ +----------------+-------------------+
2648
+
2649
+ If `round_time` = True:
2650
+ - The value of `alignment_size` is the same as the above table if `step` ≤ 30000.
2651
+ - If `step` > 30000, then:
2652
+
2653
+ +------------------+---------------------+
2654
+ | step | alignment_size |
2655
+ +==================+=====================+
2656
+ | 30001 ~ 60000 | 60000 (1 minute) |
2657
+ +------------------+---------------------+
2658
+ | 60001 ~ 120000 | 120000 (2 minutes) |
2659
+ +------------------+---------------------+
2660
+ | 120001 ~ 300000 | 300000 (5 minutes) |
2661
+ +------------------+---------------------+
2662
+ | 300001 ~ 600000 | 600000 (10 minutes) |
2663
+ +------------------+---------------------+
2664
+ | 600001 ~ 900000 | 900000 (15 minutes) |
2665
+ +------------------+---------------------+
2666
+ | 900001 ~ 1200000 | 1200000 (20 minutes)|
2667
+ +------------------+---------------------+
2668
+ | 1200001 ~ 1800000| 1800000 (30 minutes)|
2669
+ +------------------+---------------------+
2670
+ | > 1800000 | 3600000 (1 hour) |
2671
+ +------------------+---------------------+
2672
+
2673
+ - Case 4: `time_column` is of type Nanotimestamp (yyyy-MM-dd HH:mm:ss.nnnnnnnnn) or NANOTIME
2674
+ (HH:mm:ss.nnnnnnnnn)
2675
+
2676
+ If `round_time` = False:
2677
+
2678
+ +----------------+-------------------+
2679
+ | step | alignment_size |
2680
+ +================+===================+
2681
+ | 0 ~ 2ns | 2ns |
2682
+ +----------------+-------------------+
2683
+ | 3ns ~ 5ns | 5ns |
2684
+ +----------------+-------------------+
2685
+ | 6ns ~ 10ns | 10ns |
2686
+ +----------------+-------------------+
2687
+ | 11ns ~ 20ns | 20ns |
2688
+ +----------------+-------------------+
2689
+ | 21ns ~ 25ns | 25ns |
2690
+ +----------------+-------------------+
2691
+ | 26ns ~ 50ns | 50ns |
2692
+ +----------------+-------------------+
2693
+ | 51ns ~ 100ns | 100ns |
2694
+ +----------------+-------------------+
2695
+ | 101ns ~ 200ns | 200ns |
2696
+ +----------------+-------------------+
2697
+ | 201ns ~ 250ns | 250ns |
2698
+ +----------------+-------------------+
2699
+ | 251ns ~ 500ns | 500ns |
2700
+ +----------------+-------------------+
2701
+ | > 500ns | 1000ns |
2702
+ +----------------+-------------------+
2703
+
2704
+ If `round_time` = True:
2705
+
2706
+ +----------------+-------------------+
2707
+ | step | alignment_size |
2708
+ +================+===================+
2709
+ | 1000ns ~ 1ms | 1ms |
2710
+ +----------------+-------------------+
2711
+ | 1ms ~ 10ms | 10ms |
2712
+ +----------------+-------------------+
2713
+ | 10ms ~ 100ms | 100ms |
2714
+ +----------------+-------------------+
2715
+ | 100ms ~ 1s | 1s |
2716
+ +----------------+-------------------+
2717
+ | 1s ~ 2s | 2s |
2718
+ +----------------+-------------------+
2719
+ | 2s ~ 3s | 3s |
2720
+ +----------------+-------------------+
2721
+ | 3s ~ 5s | 5s |
2722
+ +----------------+-------------------+
2723
+ | 5s ~ 10s | 10s |
2724
+ +----------------+-------------------+
2725
+ | 10s ~ 15s | 15s |
2726
+ +----------------+-------------------+
2727
+ | 15s ~ 20s | 20s |
2728
+ +----------------+-------------------+
2729
+ | 20s ~ 30s | 30s |
2730
+ +----------------+-------------------+
2731
+ | > 30s | 1min |
2732
+ +----------------+-------------------+
2733
+
2734
+ If the time of the first record is x with data type of Timestamp, then the starting time of
2735
+ the first window is adjusted to be `timeType_cast(x/alignment_size*alignment_size+step-
2736
+ window_size)`, where "/" produces only the integer part after division. For example, if the
2737
+ time of the first record is 2018.10.08T01:01:01.365, ``window_size`` = 120000, and ``step`` =
2738
+ 60000, then ``alignment_size`` = 60000, and the starting time of the first window is
2739
+ ``timestamp(2018.10.08T01:01:01.365/60000*60000+60000-120000)=2018.10.08T01:01:00.000``.
2740
+ """
2741
+ engine_type: EngineType
2742
+ stat: TimeSeriesEngineStat
2743
+
2744
+ @classmethod
2745
+ def create(
2746
+ cls, name: str, table_schema: Union[Table, TypeDict], outputs: Table,
2747
+ window_size, step, metrics, *,
2748
+ time_col: Optional[Union[List[str], str]] = None,
2749
+ use_system_time: bool = False,
2750
+ key_col: Optional[Union[List[str], str]] = None,
2751
+ garbage_size: int = 5000,
2752
+ update_time: Optional[int] = None,
2753
+ use_window_start_time: bool = False,
2754
+ round_time: bool = True,
2755
+ snapshot_dir: Optional[Union[Path, str]] = None,
2756
+ snapshot_interval_in_msg_count: Optional[int] = None,
2757
+ fill: Union[Literal["none", "null", "ffill"], Constant, List[Union[Literal["null", "ffill"], Constant]]] = "none",
2758
+ force_trigger_time: Optional[int] = None,
2759
+ key_purge_freq_in_sec: Optional[int] = None,
2760
+ closed: Literal["left", "right"] = "left",
2761
+ output_elapsed_microseconds: bool = False,
2762
+ sub_window: Optional[Union[int, Constant]] = None,
2763
+ parallelism: int = 1,
2764
+ accepted_delay: int = 0,
2765
+ output_handler: Optional[FunctionDef] = None,
2766
+ msg_as_table: bool = False,
2767
+ ) -> TimeSeriesEngineBuilder:
2768
+ """
2769
+ Creates a time-series streaming engine with the specified parameters.
2770
+
2771
+ Parameters
2772
+ ----------
2773
+ name : str
2774
+ The name of the engine. Can contain letters, numbers, and "_", and must
2775
+ start with a letter.
2776
+ table_schema : Union[Table, TypeDict]
2777
+ Column names and types of the input stream. If a Table is provided,
2778
+ its schema must match the subscribed stream table.
2779
+ outputs : Table
2780
+ The output table for results. Can be in-memory or DFS. Create an empty
2781
+ table and specify column names and types before calling `create`.
2782
+ Output columns:
2783
+
2784
+ - First column is TIMESTAMP type.
2785
+
2786
+ - If `use_system_time` is True, stores calculation start time.
2787
+
2788
+ - If False, uses `time_col` values.
2789
+
2790
+ - Next column is `context_by_col` (if specified).
2791
+
2792
+ - If `output_elapsed_microseconds` is True, add LONG and INT columns.
2793
+
2794
+ - Remaining columns store metric results. If a metric result is an array vector,
2795
+ the output column must be array vector type.
2796
+
2797
+ window_size : int or list of int
2798
+ Size(s) of the calculation windows.
2799
+ step : int
2800
+ Step size for moving windows. Must be divisible by `window_size`.
2801
+ Unit depends on `use_system_time`:
2802
+
2803
+ - If True, unit is millisecond.
2804
+
2805
+ - If False, unit matches `time_col`.
2806
+
2807
+ metrics : MetaCode or AnyVector
2808
+ Calculation formulas. Can be:
2809
+
2810
+ - Aggregate functions, e.g., `<[sum(qty), avg(price)]>`.
2811
+
2812
+ - Expressions on previous results, e.g., `<[avg(price1)-avg(price2)]>`.
2813
+
2814
+ - Calculations on multiple columns, e.g., `<[std(price1-price2)]>`.
2815
+
2816
+ - Functions with multiple returns, e.g., `<func(price) as `col1`col2>`.
2817
+
2818
+ Column names in `metrics` are not case-sensitive and can differ from
2819
+ input table columns. Nested aggregate functions are not supported.
2820
+ time_col : Optional[Union[List[str], str]], optional
2821
+ Time column(s) for the stream table. Default is None.
2822
+ use_system_time : bool, optional
2823
+ Whether to use system time for calculations. Default is False.
2824
+ key_col : Optional[Union[List[str], str]], optional
2825
+ Grouping column(s). Default is None.
2826
+ garbage_size : int, optional
2827
+ Threshold for garbage collection of historical data. Default is 5000.
2828
+ update_time : Optional[int], optional
2829
+ Interval to trigger window calculations before window ends. Default is None.
2830
+ use_window_start_time : bool, optional
2831
+ Whether output table time column uses window start time. Default is False.
2832
+ round_time : bool, optional
2833
+ Align window boundary by alignment rule. Default is True.
2834
+ snapshot_dir : Optional[Union[Path, str]], optional
2835
+ Directory to save engine snapshot. Default is None.
2836
+ snapshot_interval_in_msg_count : Optional[int], optional
2837
+ Number of messages before saving next snapshot. Default is None.
2838
+ fill : Union[Literal["none", "null", "ffill"], Constant,
2839
+ List[Union[Literal["null", "ffill"], Constant]]], optional
2840
+ Filling method(s) for empty windows in a group. Default is "none".
2841
+ force_trigger_time : Optional[int], optional
2842
+ Waiting time to force trigger calculation in uncalculated windows.
2843
+ Default is None.
2844
+ key_purge_freq_in_sec : Optional[int], optional
2845
+ Interval in seconds to remove inactive groups. Default is None.
2846
+ closed : Literal["left", "right"], optional
2847
+ Whether left or right boundary is included in window. Default is "left".
2848
+ output_elapsed_microseconds : bool, optional
2849
+ Whether to output elapsed time (in microseconds). Default is False.
2850
+ sub_window : Optional[Union[int, Constant]], optional
2851
+ Range of subwindow within window defined by `window_size`. Default is None.
2852
+ parallelism : int, optional
2853
+ Number of worker threads for parallel computation. Default is 1.
2854
+ accepted_delay : int, optional
2855
+ Maximum delay for each window to accept data. Default is 0.
2856
+ output_handler : Optional[FunctionDef], optional
2857
+ Unary or partial function to handle output. If set, engine does not
2858
+ write results to output table directly. Default is None.
2859
+ msg_as_table : bool, optional
2860
+ Whether output data is passed to `output_handler` as table or AnyVector.
2861
+ Default is False.
2862
+
2863
+ Returns
2864
+ -------
2865
+ TimeSeriesEngineBuilder
2866
+ Instance for further configuration and execution.
2867
+
2868
+ Examples
2869
+ --------
2870
+ >>> import swordfish as sf
2871
+ >>> table_schema = {"timestamp": "DATETIME", "sensor_id": "LONG",
2872
+ ... "temperature": "DOUBLE", "humidity": "DOUBLE"}
2873
+ >>> output_table_1 = sf.table(types={"timestamp": "DATETIME",
2874
+ ... "sensor_id": "LONG", "temperature": "DOUBLE"})
2875
+ >>> output_table_2 = sf.table(types={"timestamp": "DATETIME",
2876
+ ... "sensor_id": "LONG", "humidity": "DOUBLE"})
2877
+ >>> my_engine = sf.engine.TimeSeriesEngine.create(
2878
+ ... name="SensorTimeSeriesEngine",
2879
+ ... table_schema=table_schema,
2880
+ ... outputs=[output_table_1, output_table_2],
2881
+ ... window_size=5,
2882
+ ... step=1,
2883
+ ... metrics=["temperature", "humidity"],
2884
+ ... time_col="timestamp",
2885
+ ... use_system_time=True,
2886
+ ... key_col="sensor_id",
2887
+ ... garbage_size=5000,
2888
+ ... update_time=1000,
2889
+ ... snapshot_dir="/path/to/snapshot/dir",
2890
+ ... snapshot_interval_in_msg_count=100,
2891
+ ... fill="ffill",
2892
+ ... parallelism=4,
2893
+ ... accepted_delay=10,
2894
+ ... output_handler=None,
2895
+ ... msg_as_table=True,
2896
+ ... ).submit()
2897
+ """
2898
+ ...
2899
+
2900
+
2901
+ class CrossSectionalEngineStat(EngineStat):
2902
+ user: str
2903
+ """
2904
+ Name of the user who created the streaming engine.
2905
+ """
2906
+ status: Literal["OK", "FATAL"]
2907
+ """
2908
+ Status of the streaming engine. "OK" means available; "FATAL" means unavailable.
2909
+ """
2910
+ last_err_msg: str
2911
+ """
2912
+ The latest error message from the engine.
2913
+ """
2914
+ num_rows: int
2915
+ """
2916
+ The number of records that has entered the streaming engine.
2917
+ """
2918
+ num_metrics: int
2919
+ """
2920
+ The number of metrics calculated by the streaming engine.
2921
+ """
2922
+ metrics: str
2923
+ """
2924
+ The metacode of the metrics calculated by the streaming engine.
2925
+ """
2926
+ triggering_pattern: str
2927
+ """
2928
+ How calculations are triggered in the engine.
2929
+ """
2930
+ triggering_interval: int
2931
+ """
2932
+ The duration in milliseconds between 2 adjacent calculations.
2933
+ """
2934
+ snapshot_dir: str
2935
+ """
2936
+ The directory where engine snapshots are saved.
2937
+ """
2938
+ snapshot_interval: int
2939
+ """
2940
+ The interval at which to save snapshots.
2941
+ """
2942
+ snapshot_msg_id: int
2943
+ """
2944
+ The message ID of the engine snapshot.
2945
+ """
2946
+ snapshot_timestamp: Timestamp
2947
+ """
2948
+ The timestamp when the snapshot was created.
2949
+ """
2950
+ memory_used: int
2951
+ """
2952
+ The amount of memory currently used by the engine (in bytes).
2953
+ """
2954
+
2955
+
2956
+ class CrossSectionalEngine(StreamEngine):
2957
+ """
2958
+ The cross-sectional streaming engine is used for real-time computing on
2959
+ cross-sectional data, which is a collection of observations (behaviors) for
2960
+ multiple subjects (entities such as different stocks) at a single point in time.
2961
+
2962
+ ``CrossSectionalEngine.create`` returns a Builder object, and then call the
2963
+ submit to create a keyed table object with the `key_col` parameter as the key.
2964
+ The keyed table is updated every time a new record arrives. If the
2965
+ `last_batch_only` parameter is set to True, the table only maintains the latest
2966
+ record in each group. When new data is ingested into the engine:
2967
+
2968
+ - If `metrics` and `output` are specified, the engine first updates the keyed
2969
+ table, then performs calculations on the latest data and outputs the results
2970
+ to `output`.
2971
+
2972
+ - If `metrics` and `output` are not specified, the engine only updates the keyed
2973
+ table.
2974
+
2975
+ Calculation can be triggered by the number of records or time interval. See
2976
+ ``create`` parameters `triggering_pattern` and `triggering_interval`. Note that
2977
+ if `context_by_col` is specified, the data will be grouped by the specified
2978
+ columns and calculated by group.
2979
+
2980
+ Snapshot mechanism is used to restore the streaming engine to the latest
2981
+ snapshot after system interruption. (See ``create`` parameters `snapshot_dir`
2982
+ and `snapshot_interval_in_msg_count`)
2983
+ """
2984
+ engine_type: EngineType
2985
+ stat: CrossSectionalEngineStat
2986
+
2987
+ @classmethod
2988
+ def create(
2989
+ cls, name: str, table_schema: Union[Table, TypeDict],
2990
+ key_col: Union[List[str], str],
2991
+ *,
2992
+ metrics=None,
2993
+ output: Table = None,
2994
+ triggering_pattern: Literal["per_batch", "per_row", "interval", "key_count",
2995
+ "data_interval"] = "per_batch",
2996
+ triggering_interval: Any = None,
2997
+ use_system_time: bool = True,
2998
+ time_col: Optional[str] = None,
2999
+ last_batch_only: bool = False,
3000
+ context_by_col: Optional[Union[List[str], str]] = None,
3001
+ snapshot_dir: Optional[Union[Path, str]] = None,
3002
+ snapshot_interval_in_msg_count: Optional[int] = None,
3003
+ output_elapsed_microseconds: bool = False,
3004
+ round_time: bool = True,
3005
+ key_filter: Optional[MetaCode] = None,
3006
+ updated_context_groups_only: bool = False,
3007
+ ) -> CrossSectionalEngineBuilder:
3008
+ """
3009
+ Creates a cross-sectional streaming engine with the specified parameters and
3010
+ configuration.
3011
+
3012
+ Parameters
3013
+ ----------
3014
+ name : str
3015
+ The name of the engine. It can contain letters, numbers and "_" and must
3016
+ start with a letter.
3017
+ table_schema : Union[Table, TypeDict]
3018
+ Specifies the column names and corresponding types of the input stream.
3019
+ If a Table is provided, its schema must match the schema of the subscribed
3020
+ stream table. Whether the table contains data or not doesn't matter.
3021
+ key_col : Union[List[str], str]
3022
+ One or more columns in the stream table as the key columns. For each key
3023
+ entry, only the latest record is used in the calculation.
3024
+ metrics : optional
3025
+ The formulas for calculation using MetaCode or an AnyVector. Defaults to
3026
+ None.
3027
+ output : Table
3028
+ The output table for the results. It can be an in-memory table or a DFS
3029
+ table. Create an empty table and specify the column names and types before
3030
+ calling `create`. Make sure the column types match the calculation results
3031
+ of the corresponding metrics. The columns in the output table are in the
3032
+ following order:
3033
+
3034
+ - The first column is of TIMESTAMP type.
3035
+
3036
+ - If ``use_system_time`` = True, the column stores the time when each
3037
+ calculation starts.
3038
+
3039
+ - If ``use_system_time`` = False, it takes the values of ``time_col``.
3040
+
3041
+ - The following column is the ``context_by_col`` (if specified).
3042
+
3043
+ - If the ``output_elapsed_microseconds`` is set to True, specify two more
3044
+ columns: a LONG column and an INT column.
3045
+
3046
+ - The remaining columns store the calculation results of metrics.
3047
+
3048
+ triggering_pattern : Literal["per_batch", "per_row", "interval", "key_count", "data_interval"], optional
3049
+ Specifies how to trigger the calculations.
3050
+ triggering_interval : Any, optional
3051
+ The triggering interval for the system based on the triggering pattern.
3052
+ Defaults to None.
3053
+ use_system_time : bool, optional
3054
+ Whether the calculations are performed based on the system time when data
3055
+ is ingested into the engine. Defaults to True.
3056
+ time_col : Optional[str], optional
3057
+ The time column in the stream table to which the engine subscribes if
3058
+ ``use_system_time`` = False. Defaults to None.
3059
+ last_batch_only : bool, optional
3060
+ Whether to keep only the records with the latest timestamp in the engine.
3061
+ Defaults to False.
3062
+ context_by_col : Optional[Union[List[str], str]], optional
3063
+ The grouping column(s) by which calculations are performed within groups.
3064
+ Only takes effect if `metrics` and `output` are specified. Defaults to None.
3065
+ snapshot_dir : Optional[Union[Path, str]], optional
3066
+ The directory where the streaming engine snapshot is saved. Defaults to None.
3067
+ snapshot_interval_in_msg_count : Optional[int], optional
3068
+ The number of messages to receive before saving the next snapshot. Defaults
3069
+ to None.
3070
+ output_elapsed_microseconds : bool, optional
3071
+ Whether to output the elapsed time (in microseconds). Defaults to False.
3072
+ round_time : bool, optional
3073
+ Aligns the window boundary based on the specified alignment rule. Defaults
3074
+ to True.
3075
+ key_filter : Optional[MetaCode], optional
3076
+ The conditions for filtering keys in the keyed table returned by the engine.
3077
+ Defaults to None.
3078
+ updated_context_groups_only : bool, optional
3079
+ Whether to compute only the groups updated with new data since the last
3080
+ output. Defaults to False.
3081
+
3082
+ Returns
3083
+ -------
3084
+ CrossSectionalEngineBuilder
3085
+ An instance of ``CrossSectionalEngineBuilder`` that allows further
3086
+ configuration and execution of the cross-sectional engine. This object
3087
+ enables setting up the opional parameters.
3088
+
3089
+ Examples
3090
+ --------
3091
+ >>> import swordfish as sf
3092
+ >>> table_schema = {"timestamp": "DATETIME", "symbol": "STRING", "price":
3093
+ ... "DOUBLE", "volume": "LONG"}
3094
+ >>> output_table = sf.table(types={"symbol": "STRING", "avg_price": "DOUBLE",
3095
+ ... "total_volume": "LONG"})
3096
+ >>> my_engine = sf.engine.CrossSectionalEngine.create(
3097
+ ... name="StockAnalysisEngine",
3098
+ ... table_schema=table_schema,
3099
+ ... key_col="symbol",
3100
+ ... metrics=["avg(price)", "sum(volume)"],
3101
+ ... output=output_table,
3102
+ ... triggering_pattern="interval",
3103
+ ... triggering_interval=10,
3104
+ ... use_system_time=True,
3105
+ ... time_col="timestamp",
3106
+ ... last_batch_only=False,
3107
+ ... snapshot_dir="/path/to/snapshot",
3108
+ ... snapshot_interval_in_msg_count=1000,
3109
+ ... round_time=True,
3110
+ ... updated_context_groups_only=True
3111
+ ... ).submit()
3112
+ """
3113
+ ...
3114
+
3115
+
3116
+ class ReactiveStateEngineStat(EngineStat):
3117
+ user: str
3118
+ """
3119
+ Name of the user who created the streaming engine.
3120
+ """
3121
+ status: Literal["OK", "FATAL"]
3122
+ """
3123
+ Status of the streaming engine. "OK" means available; "FATAL" means unavailable.
3124
+ """
3125
+ last_err_msg: str
3126
+ """
3127
+ The latest error message from the engine.
3128
+ """
3129
+ num_groups: int
3130
+ """
3131
+ The number of groups that the streaming engine has handled.
3132
+ """
3133
+ num_rows: int
3134
+ """
3135
+ The number of records that has entered the streaming engine.
3136
+ """
3137
+ num_metrics: int
3138
+ """
3139
+ The number of metrics calculated by the streaming engine.
3140
+ """
3141
+ snapshot_dir: str
3142
+ """
3143
+ The directory where engine snapshots are saved.
3144
+ """
3145
+ snapshot_interval: int
3146
+ """
3147
+ The interval at which to save snapshots.
3148
+ """
3149
+ snapshot_msg_id: int
3150
+ """
3151
+ The message ID of the engine snapshot.
3152
+ """
3153
+ snapshot_timestamp: Timestamp
3154
+ """
3155
+ The timestamp when the snapshot was created.
3156
+ """
3157
+ memory_used: int
3158
+ """
3159
+ The amount of memory currently used by the engine (in bytes).
3160
+ """
3161
+
3162
+
3163
+ class ReactiveStateEngine(StreamEngine):
3164
+ """
3165
+ The reactive state streaming engine maintains and updates states for stateful
3166
+ computations, ensuring efficient processing of continuous data streams. It
3167
+ triggers an output for each input record, supports only vectorized functions
3168
+ as operators, and optimizes stateful operations.
3169
+
3170
+ .. note::
3171
+ Only the following optimized state functions can be used in the engine.
3172
+ Alternatively, you can implement a stateful indicator by defining a user-
3173
+ defined function and declaring it with keyword @state before the definition.
3174
+ Aggregate functions should be avoided.
3175
+
3176
+ Cumulative functions: `cumavg`, `cumsum`, `cumprod`, `cumcount`, `cummin`,
3177
+ `cummax`, `cumvar`, `cumvarp`, `cumstd`, `cumstdp`, `cumcorr`, `cumcovar`,
3178
+ `cumbeta`, `cumwsum`, `cumwavg`, `cumfirstNot`, `cumlastNot`, `cummed`,
3179
+ `cumpercentile`, `cumnunique`, `cumPositiveStreak`, `cummdd`
3180
+
3181
+ Moving functions: `ema`, `mavg`, `msum`, `mcount`, `mprod`, `mvar`, `mvarp`,
3182
+ `mstd`, `mstdp`, `mskew`, `mkurtosis`, `mmin`, `mmax`, `mimin`, `mimax`,
3183
+ `mmed`, `mpercentile`, `mrank`, `mcorr`, `mcovar`, `mbeta`, `mwsum`,
3184
+ `mwavg`, `mmad`, `mfirst`, `mlast`, `mslr`, `tmove`, `tmfirst`, `tmlast`,
3185
+ `tmsum`, `tmavg`, `tmcount`, `tmvar`, `tmvarp`, `tmstd`, `tmstdp`,
3186
+ `tmprod`, `tmskew`, `tmkurtosis`, `tmmin`, `tmmax`, `tmmed`,
3187
+ `tmpercentile`, `tmrank`, `tmcovar`, `tmbeta`, `tmcorr`, `tmwavg`,
3188
+ `tmwsum`, `tmoving`, `moving`, `sma`, `wma`, `dema`, `tema`, `trima`,
3189
+ `linearTimeTrend`, `talib`, `t3`, `ma`, `mmaxPositiveStreak`
3190
+
3191
+ .. note::
3192
+ If `talib` is used as a state function, its first parameter must be a state
3193
+ function.
3194
+
3195
+ Row-based functions: `rowMin`, `rowMax`, `rowAnd`, `rowOr`, `rowXor`,
3196
+ `rowProd`, `rowSum`, `rowSum2`, `rowSize`, `rowCount`, `rowAvg`,
3197
+ `rowKurtosis`, `rowSkew`, `rowVar`, `rowVarp`, `rowStd`, `rowStdp`
3198
+
3199
+ Order-sensitive functions: `deltas`, `ratios`, `ffill`, `move`, `prev`,
3200
+ `iterate`, `ewmMean`, `ewmVar`, `ewmStd`, `ewmCov`, `ewmCorr`,
3201
+ `prevState`, `percentChange`
3202
+
3203
+ TopN functions: `msumTopN`, `mavgTopN`, `mstdpTopN`, `mstdTopN`,
3204
+ `mvarpTopN`, `mvarTopN`, `mcorrTopN`, `mbetaTopN`, `mcovarTopN`,
3205
+ `mwsumTopN`, `cumwsumTopN`, `cumsumTopN`, `cumvarTopN`, `cumvarpTopN`,
3206
+ `cumstdTopN`, `cumstdpTopN`, `cumcorrTopN`, `cumbetaTopN`, `cumavgTopN`,
3207
+ `cumskewTopN`, `cumkurtosisTopN`, `mskewTopN`, `mkurtosisTopN`,
3208
+ `tmsumTopN`, `tmavgTopN`, `tmstdTopN`, `tmstdpTopN`, `tmvarTopN`,
3209
+ `tmvarpTopN`, `tmskewTopN`, `tmkurtosisTopN`, `tmbetaTopN`, `tmcorrTopN`,
3210
+ `tmcovarTopN`, `tmwsumTopN`
3211
+
3212
+ Higher-order functions: `segmentby` (whose first parameter can only take
3213
+ `cumsum`, `cummax`, `cummin`, `cumcount`, `cumavg`, `cumstd`, `cumvar`,
3214
+ `cumstdp`, `cumvarp`), `moving`, `byColumn`, `accumulate`, `window`
3215
+
3216
+ Others: `talibNull`, `topRange`, `lowRange`, `trueRange`
3217
+
3218
+ Functions that can only be used in the reactive state engine:
3219
+ `stateIterate`, `conditionalIterate`, `genericStateIterate`,
3220
+ `genericTStateIterate`
3221
+
3222
+ Calculation Rules
3223
+ -----------------
3224
+ The reactive state engine outputs a result for each input. If multiple
3225
+ records are ingested into the reactive state engine at the same time, the
3226
+ data is calculated in batches. The number of records in each batch is
3227
+ determined by the system.
3228
+
3229
+ - To output only the results that met the specified conditions, set the
3230
+ parameter `filter`.
3231
+ - To perform calculations by group, set the parameter `key_col`.
3232
+ - To preserve the insertion order of the records in the output table, set
3233
+ the parameter `keep_order`.
3234
+
3235
+ Features
3236
+ --------
3237
+ - State cleanup: States in the engine are maintained by group. A large
3238
+ number of groups may lead to high memory overhead, and you can set a
3239
+ cleanup rule to clear data that are no longer needed. (See parameters
3240
+ `key_purge_filter` and `key_purge_fre_in_second`)
3241
+ - Snapshot: Snapshot mechanism is used to restore the streaming engine to
3242
+ the latest snapshot after system interruption. (See parameters
3243
+ `snapshot_dir` and `snapshot_interval_in_msg_count`)
3244
+ """
3245
+ engine_type: EngineType
3246
+ stat: ReactiveStateEngineStat
3247
+
3248
+ @classmethod
3249
+ def create(
3250
+ cls, name: str, table_schema: Union[Table, TypeDict],
3251
+ output: Table, metrics,
3252
+ *,
3253
+ key_col: Optional[Union[List[str], str]] = None,
3254
+ filter: Optional[MetaCode] = None,
3255
+ snapshot_dir: Optional[Union[Path, str]] = None,
3256
+ snapshot_interval_in_msg_count: Optional[int] = None,
3257
+ keep_order: Optional[bool] = None,
3258
+ key_purge_filter: Optional[MetaCode] = None,
3259
+ key_purge_freq_in_second: Optional[int] = None,
3260
+ output_elapsed_microseconds: bool = False,
3261
+ key_capacity: int = 1024,
3262
+ parallelism: int = 1,
3263
+ output_handler: Optional[FunctionDef] = None,
3264
+ msg_as_table: bool = False,
3265
+ ) -> ReactiveStateEngineBuilder:
3266
+ """
3267
+ Creates a reactive state streaming engine with the specified parameters
3268
+ and configuration.
3269
+
3270
+ Parameters
3271
+ ----------
3272
+ name : str
3273
+ The name of the engine. It can contain letters, numbers and "_" and
3274
+ must start with a letter.
3275
+ table_schema : Union[Table, TypeDict]
3276
+ Specifies the column names and corresponding types of the input
3277
+ stream. If a Table is provided, its schema must match the schema of
3278
+ the subscribed stream table. Whether the table contains data or not
3279
+ doesn't matter.
3280
+ output : Table
3281
+ The output table for the results. It can be an in-memory table or a
3282
+ DFS table. Create an empty table and specify the column names and
3283
+ types before calling `create`. The columns in the output table are
3284
+ in the following order: (1) If `key_col` is specified, the first few
3285
+ columns must match its order. (2) If `output_elapsed_microseconds`
3286
+ is set to True, specify two more columns: a LONG column for elapsed
3287
+ time of each batch and an INT column for total records in each
3288
+ batch. (3) The remaining columns store the calculation results of
3289
+ metrics. Make sure the column types match the calculation results of
3290
+ the corresponding metrics.
3291
+ metrics
3292
+ MetaCode specifying the formulas for calculation. The metacode can
3293
+ include one or more expressions, built-in or user-defined functions,
3294
+ or a constant scalar/vector. Note that the output column for a
3295
+ constant vector must be in array vector form.
3296
+ key_col : Optional[Union[List[str], str]], optional
3297
+ The grouping column(s) for the calculation. Defaults to None.
3298
+ filter : Optional[MetaCode], optional
3299
+ The filtering conditions for the output table. Defaults to None.
3300
+ snapshot_dir : Optional[Union[Path, str]], optional
3301
+ The directory where the streaming engine snapshot is saved. Defaults
3302
+ to None.
3303
+ snapshot_interval_in_msg_count : Optional[int], optional
3304
+ The number of messages to receive before saving the next snapshot.
3305
+ Defaults to None.
3306
+ keep_order : Optional[bool], optional
3307
+ Whether to preserve the insertion order of records in the output
3308
+ table. Defaults to None.
3309
+ key_purge_filter : Optional[MetaCode], optional
3310
+ The filtering conditions to identify the data to be purged from the
3311
+ cache. Defaults to None.
3312
+ key_purge_freq_in_second : Optional[int], optional
3313
+ The time interval (in seconds) to trigger a purge. Defaults to None.
3314
+ output_elapsed_microseconds : bool, optional
3315
+ Whether to output the elapsed time (in microseconds). Defaults to
3316
+ False.
3317
+ key_capacity : int, optional
3318
+ A positive integer indicating the amount of memory allocated for
3319
+ buffering state of each group. Defaults to 1024.
3320
+ parallelism : int, optional
3321
+ A positive integer no greater than 63, indicating the maximum number
3322
+ of workers that can run in parallel. Defaults to 1.
3323
+ output_handler : Optional[FunctionDef], optional
3324
+ A unary function or a partial function with a single unfixed
3325
+ parameter. If set, the engine will not write the calculation results
3326
+ to the output table directly. Instead, the results will be passed as
3327
+ a parameter to the specified function. Defaults to None.
3328
+ msg_as_table : bool, optional
3329
+ Whether the output data is passed into the function (specified by
3330
+ `output_handler`) as a table or as an AnyVector. Defaults to False.
3331
+
3332
+ Returns
3333
+ -------
3334
+ ReactiveStateEngineBuilder
3335
+ An instance of `ReactiveStateEngineBuilder` that allows further
3336
+ configuration and execution of the reactive state engine. This
3337
+ object enables setting up the optional parameters.
3338
+
3339
+ Examples
3340
+ --------
3341
+ >>> import swordfish as sf
3342
+ >>> table_schema = {"timestamp": "DATETIME", "device_id": "STRING",
3343
+ ... "temperature": "DOUBLE", "status": "STRING"}
3344
+ >>> output_table = sf.table(types={"device_id": "STRING",
3345
+ ... "max_temperature": "DOUBLE", "last_status": "STRING"})
3346
+ >>> my_engine = sf.engine.ReactiveStateEngine.create(
3347
+ ... name="DeviceStateTracker",
3348
+ ... table_schema=table_schema,
3349
+ ... output=output_table,
3350
+ ... metrics=["max(temperature)", "last(status)"],
3351
+ ... key_col="device_id",
3352
+ ... filter=None,
3353
+ ... snapshot_dir="/path/to/snapshot",
3354
+ ... snapshot_interval_in_msg_count=1000,
3355
+ ... keep_order=True,
3356
+ ... key_purge_filter=None,
3357
+ ... key_purge_freq_in_second=60,
3358
+ ... key_capacity=4096,
3359
+ ... parallelism=2,
3360
+ ... output_handler=None,
3361
+ ... msg_as_table=True,
3362
+ ... ).submit()
3363
+ """
3364
+ ...
3365
+
3366
+
3367
+ class StreamFilterEngineStat(EngineStat):
3368
+ user: str
3369
+ """
3370
+ Name of the user who created the streaming engine.
3371
+ """
3372
+
3373
+ status: Literal["OK", "FATAL"]
3374
+ """
3375
+ Status of the streaming engine. "OK" means available; "FATAL" means unavailable.
3376
+ """
3377
+
3378
+ last_err_msg: str
3379
+ """
3380
+ The latest error message from the engine.
3381
+ """
3382
+
3383
+ num_rows: int
3384
+ """
3385
+ The number of records that has entered the streaming engine.
3386
+ """
3387
+
3388
+ filters: str
3389
+ """
3390
+ The metacode of the filters used by the streaming engine.
3391
+ """
3392
+
3393
+
3394
+ filter_dict = Dict[Literal["timeRange", "condition", "handler"], Any]
3395
+
3396
+
3397
+ class StreamFilterEngine(StreamEngine):
3398
+ engine_type: EngineType
3399
+ stat: StreamFilterEngineStat
3400
+
3401
+ @classmethod
3402
+ def create(
3403
+ cls, name: str, table_schema: Union[Table, TypeDict],
3404
+ filter: Union[filter_dict, List[filter_dict]],
3405
+ *,
3406
+ msg_schema: Optional[Dict] = None,
3407
+ time_col: Optional[str] = None,
3408
+ condition_col: Optional[str] = None,
3409
+ ) -> StreamFilterEngineBuilder:
3410
+ ...
3411
+
3412
+
3413
+ #####################################################################
3414
+ # Storage Module
3415
+ #####################################################################
3416
+
3417
+
3418
+ class StorageType(Enum):
3419
+ """
3420
+ Swordfish provides the following storage engines.
3421
+
3422
+ OLAP
3423
+ ----
3424
+ Well-suited for large-scale data analysis (e.g., querying trading volumes
3425
+ for all stocks in a specific time period). The OLAP engine utilizes data
3426
+ partitioning to horizontally divide large datasets into multiple
3427
+ partitions based on specified rules. Within each partition, the engine
3428
+ employs columnar storage for data management. Data partitioning allows
3429
+ for selective column access, reducing unnecessary I/O operations and
3430
+ significantly enhancing query performance.
3431
+
3432
+ TSDB
3433
+ ----
3434
+ Implemented based on the LSM-Tree (Log Structured Merge Tree) model. The
3435
+ TSDB engine is optimized for handling time-series data, providing
3436
+ improved performance and efficiency in data storage, retrieval, and
3437
+ analysis.
3438
+
3439
+ PKEY
3440
+ ----
3441
+ Designed to store data with a unique identifier for each record within a
3442
+ table. The PKEY engine enables faster sorting, searching, and querying
3443
+ operations. It is suitable for real-time updates and efficient queries
3444
+ (e.g., real-time data analysis through CDC integration with OLTP
3445
+ systems).
3446
+ """
3447
+ OLAP: "StorageType" = 0
3448
+ OLTP: "StorageType" = 1
3449
+ TSDB: "StorageType" = 2
3450
+ PKEY: "StorageType" = 4
3451
+
3452
+
3453
+ class OLTPConnectionImpl(BaseConnectionImpl):
3454
+ @classmethod
3455
+ def connect(cls, url: str, option: dict) -> "OLTPConnectionImpl": ...
3456
+
3457
+ def begin_transaction(self): ...
3458
+ def check_transaction(self): ...
3459
+ def commit(self): ...
3460
+ def rollback(self): ...
3461
+
3462
+ def create_table(self, name: str, **kwargs): ...
3463
+ def drop_table(self, name: str): ...
3464
+ def list_table(self): ...
3465
+ def exists_table(self, name: str) -> bool: ...
3466
+ def get_table(self, name: str): ...
3467
+
3468
+
3469
+ class SchemaImpl:
3470
+ def create_partitioned_table(self, *args) -> Table: ...
3471
+ def create_dimension_table(self, *args) -> Table: ...
3472
+ def list_table(self) -> List[str]: ...
3473
+ def exists_table(self, name: str) -> bool: ...
3474
+ def drop_table(self, name: str): ...
3475
+ def truncate_table(self, name: str): ...
3476
+ def get_table(self, name: str) -> Table: ...
3477
+ def get_engine_type(self) -> StorageType: ...
3478
+ def get_handle(self) -> Handle: ...
3479
+
3480
+
3481
+ class CatalogConnectionImpl(BaseConnectionImpl):
3482
+ @classmethod
3483
+ def connect(cls, catalog: str) -> "CatalogConnectionImpl": ...
3484
+
3485
+ def create_schema(self, **kwargs) -> SchemaImpl: ...
3486
+ def list_schema(self) -> List[str]: ...
3487
+ def exists_schema(self, name: str) -> bool: ...
3488
+ def drop_schema(self, name: str): ...
3489
+ def get_schema(self, name: str) -> SchemaImpl: ...
3490
+
3491
+
3492
+ #####################################################################
3493
+ # Streaming Module
3494
+ #####################################################################
3495
+
3496
+
3497
+ class PersistenceMetaInfo:
3498
+ size_in_memory: int
3499
+ """
3500
+ The number of records currently stored in memory.
3501
+ """
3502
+
3503
+ asyn_write: bool
3504
+ """
3505
+ Whether data is persisted to disk in asynchronous mode.
3506
+ """
3507
+
3508
+ total_size: int
3509
+ """
3510
+ The total number of records in the stream table.
3511
+ """
3512
+
3513
+ compress: bool
3514
+ """
3515
+ Whether data is stored in compression mode.
3516
+ """
3517
+
3518
+ memory_offset: int
3519
+ """
3520
+ The offset position of the first message in memory relative to all records in
3521
+ the stream table.
3522
+ """
3523
+
3524
+ size_on_disk: int
3525
+ """
3526
+ The number of records that have been persisted to disk.
3527
+ """
3528
+
3529
+ retention_minutes: int
3530
+ """
3531
+ How long (in minutes) the log file will be retained.
3532
+ """
3533
+
3534
+ persistence_dir: str
3535
+ """
3536
+ The directory path where persistent data is stored.
3537
+ """
3538
+
3539
+ hash_value: int
3540
+ """
3541
+ The identifier of the thread responsible for persisting the table to disk.
3542
+ """
3543
+
3544
+ disk_offset: int
3545
+ """
3546
+ The offset position of the first message on disk relative to all records in
3547
+ the stream table.
3548
+ """
3549
+
3550
+
3551
+ class StreamTableInfo:
3552
+ cache_size: int
3553
+ """
3554
+ When cache is purged by size, the threshold for the number of records to be
3555
+ retained in memory is determined based on ``cache_size``.
3556
+ """
3557
+
3558
+ cache_purge_time_column: Optional[str]
3559
+ """
3560
+ The time column in the stream table. When cache is purged by time, it will be
3561
+ conducted based on this column.
3562
+ """
3563
+
3564
+ cache_purge_interval: Duration
3565
+ """
3566
+ The interval to trigger a purge when cache is purged by time.
3567
+ """
3568
+
3569
+ cache_retention_time: Duration
3570
+ """
3571
+ The retention time of cached data when cache is purged by time.
3572
+ """
3573
+
3574
+ rows_in_memory: int
3575
+ """
3576
+ The number of rows currently stored in memory.
3577
+ """
3578
+
3579
+ total_rows: int
3580
+ """
3581
+ The total number of rows in the stream table.
3582
+ """
3583
+
3584
+ memory_used: int
3585
+ """
3586
+ Memory used by the stream table (in bytes).
3587
+ """
3588
+
3589
+
3590
+ class StreamTable(Table):
3591
+ """
3592
+ Stream tables are tables that support real-time data ingestion
3593
+ """
3594
+ @overload
3595
+ def enable_persistence(
3596
+ self, *,
3597
+ asyn_write: bool = True,
3598
+ compress: bool = True,
3599
+ cache_size: Optional[int] = None,
3600
+ retention_minutes: int = 1440,
3601
+ flush_mode: Literal["async", "sync"] = "async",
3602
+ pre_cache: Optional[int] = None,
3603
+ ) -> Self:
3604
+ ...
3605
+
3606
+ @overload
3607
+ def enable_persistence(
3608
+ self, *,
3609
+ asyn_write: bool = True,
3610
+ compress: bool = True,
3611
+ retention_minutes: int = 1440,
3612
+ flush_mode: Literal["async", "sync"] = "async",
3613
+ pre_cache: Optional[int] = None,
3614
+ cache_purge_time_column: Optional[str] = None,
3615
+ cache_purge_interval: Optional[Duration] = None,
3616
+ cache_retention_time: Optional[Duration] = None,
3617
+ ) -> Self:
3618
+ ...
3619
+
3620
+ def enable_persistence(
3621
+ self, *,
3622
+ asyn_write: bool = True,
3623
+ compress: bool = True,
3624
+ cache_size: Optional[int] = None,
3625
+ retention_minutes: int = 1440,
3626
+ flush_mode: Literal["async", "sync"] = "async",
3627
+ pre_cache: Optional[int] = None,
3628
+ cache_purge_time_column: Optional[str] = None,
3629
+ cache_purge_interval: Optional[Duration] = None,
3630
+ cache_retention_time: Optional[Duration] = None,
3631
+ ) -> Self:
3632
+ """
3633
+ Enables persistence for the stream table, allowing different configurations for
3634
+ cache purge.
3635
+
3636
+ **Prerequisites**
3637
+
3638
+ To enable persistence, specify the ``persistenceDir`` configuration. The
3639
+ persistence location of the table is ``<PERSISTENCE_DIR>/<TABLE_NAME>``. The
3640
+ directory contains data files (named like ``data0.log``, ``data1.log``...) and an
3641
+ index file ``index.log``. The data that has been persisted to disk will be loaded
3642
+ into memory after Swordfish is restarted.
3643
+
3644
+ **Persistence Modes**
3645
+
3646
+ The parameter ``asyn_write`` informs the system whether table persistence is in
3647
+ asynchronous mode. With asynchronous mode (default), new data are pushed to a
3648
+ queue and persistence threads will write the data to disk later. With synchronous
3649
+ mode, the table append operation keeps running until new data are persisted to the
3650
+ disk. In general, asynchronous mode achieves higher throughput.
3651
+
3652
+ With asynchronous mode, table persistence is conducted by a single persistence
3653
+ thread, and the persistence thread may handle multiple tables. If there is only
3654
+ one table to be persisted, an increase in the number of persistence threads
3655
+ doesn't improve performance.
3656
+
3657
+ Note that if asynchronous mode is enabled for data persistence or flush, data
3658
+ loss may occur due to server crash.
3659
+
3660
+ **Cache Purge Settings**
3661
+
3662
+ Stream tables keep all data in memory by default. To prevent excessive memory
3663
+ usage, you can clear cached data using either of the following methods:
3664
+
3665
+ - Cache purge by size: Set ``cache_size`` to specify a threshold for the number
3666
+ of records retained. Older records exceeding the threshold will be removed.
3667
+ The threshold is determined as follows:
3668
+
3669
+ - If the number of records appended in one batch does not exceed
3670
+ ``cache_size``, the threshold is 2.5 * ``cache_size``.
3671
+
3672
+ - If the number of records appended in one batch exceeds ``cache_size``, the
3673
+ threshold is 1.2 * (appended records + ``cache_size``).
3674
+
3675
+ - Cache purge by time: Set ``cache_purge_time_column``, ``cache_purge_interval``
3676
+ and ``cache_retention_time``. The system will clean up data based on the
3677
+ ``cache_purge_time_column``. Each time when a new record arrives, the system
3678
+ obtains the time difference between the new record and the oldest record kept
3679
+ in memory. If the time difference exceeds ``cache_purge_interval``, the system
3680
+ will retain only the data with timestamps within ``cache_retention_time`` of
3681
+ the new data.
3682
+
3683
+
3684
+ Parameters
3685
+ ----------
3686
+ asyn_write : bool, optional
3687
+ Whether to enable asynchronous writes. Defaults to True.
3688
+ compress : bool, optional
3689
+ Whether to save a table to disk in compression mode. Defaults to True.
3690
+ cache_size : Optional[int], optional
3691
+ Used to determine the maximum number of records to retain in memory.
3692
+ Defaults to None.
3693
+ retention_minutes : int, optional
3694
+ For how long (in minutes) a log file larger than 1GB will be kept after
3695
+ the last update. Defaults to 1440.
3696
+ flush_mode : {'async', 'sync'}, optional
3697
+ Whether to enable synchronous disk flush. Defaults to "async".
3698
+ pre_cache : Optional[int], optional
3699
+ The number of records to be loaded into memory from the persisted stream
3700
+ table on disk when Swordfish is initialized. Defaults to None.
3701
+ cache_purge_time_column : Optional[str], optional
3702
+ The time column in the stream table. Defaults to None.
3703
+ cache_purge_interval : Optional[Duration], optional
3704
+ The interval to trigger cache purge. Defaults to None.
3705
+ cache_retention_time : Optional[Duration], optional
3706
+ The retention time of cached data. Must be smaller than
3707
+ ``cache_purge_interval``. Defaults to None.
3708
+
3709
+ Returns
3710
+ -------
3711
+ Self
3712
+ The StreamTable with persistence enabled.
3713
+
3714
+ Examples
3715
+ --------
3716
+ Enable persistence and set cache purge by size:
3717
+ >>> import swordfish as sf
3718
+ >>> table = sf.streaming.table(names=["id", "name", "age", "created_at"],
3719
+ ... types=["INT", "STRING", "INT", "TIMESTAMP"], size=0, capacity=10)
3720
+ >>> table.share("my_table")
3721
+ >>> table.enable_persistence(
3722
+ ... asyn_write=True,
3723
+ ... compress=False,
3724
+ ... cache_size=1024,
3725
+ ... retention_minutes=720,
3726
+ ... flush_mode="sync",
3727
+ ... pre_cache=100,
3728
+ ... )
3729
+ Enable persistence and set cache purge by time:
3730
+ >>> table.enable_persistence(
3731
+ ... asyn_write=True,
3732
+ ... compress=False,
3733
+ ... retention_minutes=720,
3734
+ ... flush_mode="sync",
3735
+ ... pre_cache=100,
3736
+ ... cache_purge_time_column="created_at",
3737
+ ... cache_purge_interval=sf.data.Duration("2H"),
3738
+ ... cache_retention_time=sf.data.Duration("10m"),
3739
+ ... )
3740
+ """
3741
+ ...
3742
+
3743
+ @overload
3744
+ def enable_cache_purge(
3745
+ self, *,
3746
+ cache_size: Optional[int] = None,
3747
+ ) -> Self:
3748
+ ...
3749
+
3750
+ @overload
3751
+ def enable_cache_purge(
3752
+ self, *,
3753
+ cache_purge_time_column: Optional[str] = None,
3754
+ cache_purge_interval: Optional[Duration] = None,
3755
+ cache_retention_time: Optional[Duration] = None,
3756
+ ) -> Self:
3757
+ ...
3758
+
3759
+ def enable_cache_purge(
3760
+ self, *,
3761
+ cache_size: Optional[int] = None,
3762
+ cache_purge_time_column: Optional[str] = None,
3763
+ cache_purge_interval: Optional[Duration] = None,
3764
+ cache_retention_time: Optional[Duration] = None,
3765
+ ) -> Self:
3766
+ """
3767
+ Enables cache purge for a non-persisted stream table.
3768
+
3769
+ To prevent excessive memory usage, you can clear cached data using either of the
3770
+ following methods:
3771
+
3772
+ - Cache purge by size: Set ``cache_size`` to specify a threshold for the number
3773
+ of records retained. Older records exceeding the threshold will be removed.
3774
+ The threshold is determined as follows:
3775
+
3776
+ - If the number of records appended in one batch does not exceed
3777
+ ``cache_size``, the threshold is 2.5 * ``cache_size``.
3778
+
3779
+ - If the number of records appended in one batch exceeds ``cache_size``, the
3780
+ threshold is 1.2 * (appended records + ``cache_size``).
3781
+
3782
+ - Cache purge by time: Set ``cache_purge_time_column``, ``cache_purge_interval``
3783
+ and ``cache_retention_time``. The system will clean up data based on the
3784
+ ``cache_purge_time_column``. Each time when a new record arrives, the system
3785
+ obtains the time difference between the new record and the oldest record kept
3786
+ in memory. If the time difference exceeds ``cache_purge_interval``, the system
3787
+ will retain only the data with timestamps within ``cache_retention_time`` of
3788
+ the new data.
3789
+
3790
+ .. note::
3791
+ If a record has not been enqueued for publishing, it will not be removed.
3792
+
3793
+ Parameters
3794
+ ----------
3795
+ cache_size : Optional[int], optional
3796
+ Used to determine the maximum number of records to retain in memory.
3797
+ Defaults to None.
3798
+ cache_purge_time_column : Optional[str], optional
3799
+ The time column in the stream table. Defaults to None.
3800
+ cache_purge_interval : Optional[Duration], optional
3801
+ The interval to trigger cache purge. Defaults to None.
3802
+ cache_retention_time : Optional[Duration], optional
3803
+ The retention time of cached data. Must be smaller than
3804
+ ``cache_purge_interval``. Defaults to None.
3805
+
3806
+ Returns
3807
+ -------
3808
+ Self
3809
+ The StreamTable with cache purge enabled.
3810
+
3811
+ Examples
3812
+ --------
3813
+ Cache purge by size:
3814
+ >>> table.enable_cache_purge(cache_size=1024)
3815
+ Cache purge by time:
3816
+ >>> table.enable_cache_purge(
3817
+ ... cache_purge_time_column="created_at",
3818
+ ... cache_purge_interval=sf.data.Duration("2H"),
3819
+ ... cache_retention_time=sf.data.Duration("10m")
3820
+ ... )
3821
+ """
3822
+ ...
3823
+
3824
+ def disable_persistence(self) -> Self:
3825
+ """
3826
+ Disable the table's persistence to disk. Any future update of the table will not
3827
+ be persisted to disk.
3828
+
3829
+ Returns
3830
+ -------
3831
+ Self
3832
+ Return the current table instance.
3833
+
3834
+ Examples
3835
+ --------
3836
+ >>> table.disable_persistence()
3837
+ """
3838
+ ...
3839
+
3840
+ def clear_persistence(self) -> Self:
3841
+ """
3842
+ Stop the table's persistence to disk, and delete the contents of the table on disk
3843
+ while the table schema remains.
3844
+
3845
+ Returns
3846
+ -------
3847
+ Self
3848
+ Return the current table instance.
3849
+
3850
+ Examples
3851
+ --------
3852
+ >>> table.clear_persistence()
3853
+ """
3854
+ ...
3855
+
3856
+ def set_timestamp_column(self, name: str) -> Self:
3857
+ """
3858
+ Set the timestamp column in the table.
3859
+
3860
+ Parameters
3861
+ ----------
3862
+ name : str
3863
+ The name of the column to be set as the timestamp column.
3864
+
3865
+ Returns
3866
+ -------
3867
+ Self
3868
+ Return the current table instance.
3869
+
3870
+ Examples
3871
+ --------
3872
+ >>> table.set_timestamp_column("column_name")
3873
+ """
3874
+ ...
3875
+
3876
+ def set_filter_column(self, name: str) -> Self:
3877
+ """
3878
+ Set the filter column in the table.
3879
+
3880
+ Parameters
3881
+ ----------
3882
+ name : str
3883
+ The name of the column to be set as the filter column.
3884
+
3885
+ Returns
3886
+ -------
3887
+ Self
3888
+ Return the current table instance.
3889
+
3890
+ Examples
3891
+ --------
3892
+ >>> table.set_filter_column("column_name")
3893
+ """
3894
+ ...
3895
+
3896
+ @property
3897
+ def is_persisted(self) -> bool:
3898
+ """
3899
+ Check whether the table has been persisted.
3900
+
3901
+ Returns
3902
+ -------
3903
+ bool
3904
+ True if the table is persisted, False otherwise.
3905
+ """
3906
+ ...
3907
+
3908
+ @property
3909
+ def is_cache_purge(self) -> bool:
3910
+ """
3911
+ Check whether cache purging is enabled for the table.
3912
+
3913
+ Returns
3914
+ -------
3915
+ bool
3916
+ True if cache purging is enabled, False otherwise.
3917
+ """
3918
+ ...
3919
+
3920
+ @property
3921
+ def persistence_meta(self) -> PersistenceMetaInfo:
3922
+ """
3923
+ Retrieve metadata information related to the table's persistence.
3924
+
3925
+ Returns
3926
+ -------
3927
+ PersistenceMetaInfo
3928
+ The PersistenceMetaInfo for the table, which includes details about
3929
+ persistence settings.
3930
+ """
3931
+ ...
3932
+
3933
+ @property
3934
+ def info(self) -> StreamTableInfo:
3935
+ """
3936
+ Retrieve information about the stream table.
3937
+
3938
+ Returns
3939
+ -------
3940
+ StreamTableInfo
3941
+ The StreamTableInfo for the table.
3942
+ """
3943
+ ...
3944
+
3945
+ @property
3946
+ def timestamp_column(self) -> str:
3947
+ """
3948
+ Get the name of the timestamp column used in the table.
3949
+
3950
+ Returns
3951
+ -------
3952
+ str
3953
+ The name of the timestamp column.
3954
+ """
3955
+ ...
3956
+
3957
+ @property
3958
+ def filter_column(self) -> str:
3959
+ """
3960
+ Get the name of the filter column used in the table for filtering data.
3961
+
3962
+ Returns
3963
+ -------
3964
+ str
3965
+ The name of the filter column.
3966
+ """
3967
+ ...
3968
+
3969
+ def subscribe(
3970
+ self, action_name, handler, *,
3971
+ offset: int = -1, msg_as_table: bool = False, batch_size: int = 0,
3972
+ throttle: float = 1, hash: int = -1, reconnect: bool = False, filter=None,
3973
+ persist_offset: bool = False, time_trigger: bool = False,
3974
+ handler_need_msg_id: bool = False,
3975
+ ) -> "SubscriptionHelper":
3976
+ """
3977
+ Subscribes to a stream table on a local or remote server. We can also specify a
3978
+ handler to process the subscribed data.
3979
+
3980
+ Submit and return the subscription topic, which is a combination of the alias
3981
+ of the node where the stream table is located, stream table name, and the
3982
+ subscription task name (``actionName``) separated by "_". If the subscription
3983
+ topic already exists, an exception is thrown.
3984
+
3985
+ - If ``batch_size`` is specified, ``handler`` will be triggered if either the
3986
+ number of unprocessed messages reaches ``batch_size`` or the duration of time
3987
+ since the last time handler was triggered reaches ``throttle`` seconds.
3988
+
3989
+ - If the subscribed table is overwritten, to keep the subscription we need to
3990
+ cancel the subscription with ``Topic.unsubscribe`` and then subscribe to the
3991
+ new table.
3992
+
3993
+ Here is how to set the socket buffer size in Linux:
3994
+
3995
+ - In the Linux terminal, run the following commands:
3996
+
3997
+ .. code-block:: shell
3998
+
3999
+ sudo sysctl -w net.core.rmem_default=1048576
4000
+ sudo sysctl -w net.core.rmem_max=1048576
4001
+ sudo sysctl -w net.core.wmem_default=1048576
4002
+ sudo sysctl -w net.core.wmem_max=1048576
4003
+
4004
+ - Alternatively, add or modify the values of *net.core.rmem_default*,
4005
+ *net.core.rmem_max*, *net.core.wmem_default* and *net.core.wmem_max* to
4006
+ 1048576 in the */etc/sysctl.conf* file, and then run ``sudo sysctl -p``.
4007
+
4008
+ Parameters
4009
+ ----------
4010
+ action_name
4011
+ A string indicating subscription task name. It starts with a letter and can
4012
+ have letters, digits, and underscores.
4013
+ handler
4014
+ A unary/binary function or a table, which is used to process the subscribed
4015
+ data.
4016
+
4017
+ - If ``handler`` is a unary function, the only parameter of the function is
4018
+ the subscribed data, which can be a Table or an AnyVector of the
4019
+ subscribed table columns.
4020
+
4021
+ - ``handler`` must be specified as a binary function when
4022
+ ``handler_need_msg_id`` = True. The parameters of the function are
4023
+ msg_body and msg_id. For details, see ``handler_need_msg_id``.
4024
+
4025
+ - If ``handler`` is a table, the subscribed data will be inserted into it
4026
+ directly. It can be a streaming engine, a shared table (including stream
4027
+ table, in-memory table, keyed table, indexed table), or a DFS table.
4028
+
4029
+ offset : int, optional
4030
+ The position of the first message where the subscription begins. Defaults to
4031
+ -1.
4032
+ msg_as_table : bool, optional
4033
+ Indicates whether the subscribed data is ingested into ``handler`` as a Table
4034
+ or as an Any Vector. Defaults to False.
4035
+ batch_size : int, optional
4036
+ The number of unprocessed messages to trigger the ``handler``. Defaults to 0.
4037
+ throttle : float, optional
4038
+ The maximum waiting seconds before the handler processes the incoming
4039
+ messages if the ``batch_size`` condition has not been reached. Defaults to 1.
4040
+ hash : int, optional
4041
+ A hash value indicating which subscription executor will process the incoming
4042
+ messages for this subscription. Defaults to -1.
4043
+ reconnect : bool, optional
4044
+ Specifies whether to automatically attempt to resume the subscription if
4045
+ interrupted. Defaults to False.
4046
+ filter : optional
4047
+ The filter condition(s). Defaults to None.
4048
+ persist_offset : bool, optional
4049
+ Indicates whether to persist the offset of the last processed message in the
4050
+ current subscription. Defaults to False.
4051
+ time_trigger : bool, optional
4052
+ If set to True, ``handler`` will be triggered at the intervals specified by
4053
+ ``throttle`` even if no new messages arrive. Defaults to False.
4054
+ handler_need_msg_id : bool, optional
4055
+ Determines the required parameters for the ``handler``. If True, the
4056
+ ``handler`` must accept both msgBody (messages to be ingested) and msgId (ID
4057
+ of the last ingested message). If False, the ``handler`` only requires
4058
+ msgBody. Defaults to false.
4059
+
4060
+ Returns
4061
+ -------
4062
+ SubscriptionHelper
4063
+ An instance of SubscriptionHelper that allows further configuration and
4064
+ submit of the subscription. This object enables setting up the optional
4065
+ parameters.
4066
+
4067
+ Examples
4068
+ --------
4069
+ >>> def my_handler(message):
4070
+ ... print(f"Received message: {message}")
4071
+ ...
4072
+ >>> subscription = table.subscribe(
4073
+ ... action_name="action_name",
4074
+ ... handler=my_handler,
4075
+ ... offset=0,
4076
+ ... batch_size=10,
4077
+ ... reconnect=True,
4078
+ ... persist_offset=True
4079
+ ... )
4080
+ """
4081
+ ...
4082
+
4083
+
4084
+ def convert_stream_table(*args) -> StreamTable: ...
4085
+ def create_stream_table(*args) -> StreamTable: ...
4086
+
4087
+
4088
+ class SubscriptionHelper:
4089
+ """
4090
+ A helper class for managing the stream subscription, allowing further
4091
+ configuration and submit of the subscription.
4092
+ """
4093
+ def offset(self, val: int = -1) -> Self:
4094
+ """
4095
+ Sets the position of the first message where the subscription begins.
4096
+
4097
+ Parameters
4098
+ ----------
4099
+ val : int, optional
4100
+ The offset position. Defaults to -1.
4101
+
4102
+ Returns
4103
+ -------
4104
+ Self
4105
+ The instance itself.
4106
+
4107
+
4108
+ .. note::
4109
+ A message is a row of the stream table. The offset is relative to the
4110
+ first row of the stream table when it is created. If `val` is
4111
+ unspecified or -1, the subscription starts with the next new message.
4112
+ If `val` is -2, the system retrieves the persisted offset on disk and
4113
+ starts the subscription from there. If some rows were cleared from
4114
+ memory due to the cache size limit, they are still considered in
4115
+ determining where the subscription starts.
4116
+ """
4117
+ ...
4118
+
4119
+ def msg_as_table(self, val: bool = False) -> Self:
4120
+ """
4121
+ Sets whether the subscribed data is ingested into ``handler`` as a table
4122
+ or as an AnyVector.
4123
+
4124
+ Parameters
4125
+ ----------
4126
+ val : bool, optional
4127
+ Whether to ingest the subscribed data into the handler as a table.
4128
+ Defaults to False.
4129
+
4130
+ Returns
4131
+ -------
4132
+ Self
4133
+ The instance itself.
4134
+
4135
+
4136
+ .. note::
4137
+ If `val` is True, the subscribed data is ingested into the handler as a
4138
+ table, allowing it to be processed with SQL statements. The default
4139
+ value is False, meaning the subscribed data is ingested as an AnyVector
4140
+ of columns.
4141
+ """
4142
+ ...
4143
+
4144
+ def batch_size(self, val: int = 0) -> Self:
4145
+ """
4146
+ Sets the number of unprocessed messages required to trigger the
4147
+ ``handler``.
4148
+
4149
+ Parameters
4150
+ ----------
4151
+ val : int, optional
4152
+ The batch size threshold. Defaults to 0.
4153
+
4154
+ Returns
4155
+ -------
4156
+ Self
4157
+ The instance itself.
4158
+
4159
+
4160
+ .. note::
4161
+ If ``val`` is positive, the handler does not process messages until the
4162
+ number of unprocessed messages reaches ``val``. If ``val`` is
4163
+ unspecified or non-positive, the handler processes incoming messages as
4164
+ soon as they arrive.
4165
+ """
4166
+ ...
4167
+
4168
+ def throttle(self, val: float = 1) -> Self:
4169
+ """
4170
+ Sets the maximum waiting time before the ``handler`` processes incoming
4171
+ messages if the `batch_size` condition has not been met.
4172
+
4173
+ Parameters
4174
+ ----------
4175
+ val : float, optional
4176
+ The maximum waiting time in seconds. Defaults to 1.
4177
+
4178
+ Returns
4179
+ -------
4180
+ Self
4181
+ The instance itself.
4182
+
4183
+
4184
+ .. note::
4185
+ This value is in seconds. This parameter has no effect if ``batch_size``
4186
+ is not specified. To set ``val`` to less than 1 second, the
4187
+ ``subThrottle`` configuration must be modified.
4188
+ """
4189
+ ...
4190
+
4191
+ def hash(self, val: int = -1) -> Self:
4192
+ """
4193
+ Sets the hash value determining the subscription executor.
4194
+
4195
+ Parameters
4196
+ ----------
4197
+ val : int, optional
4198
+ The hash value for assigning an executor. Defaults to -1.
4199
+
4200
+ Returns
4201
+ -------
4202
+ Self
4203
+ The instance itself.
4204
+
4205
+
4206
+ .. note::
4207
+ This non-negative integer specifies which subscription executor will
4208
+ process the incoming messages. If `val` is unspecified, the system
4209
+ automatically assigns an executor. To synchronize messages from multiple
4210
+ subscriptions, set the same hash value for all of them to ensure they
4211
+ are processed by the same executor.
4212
+ """
4213
+ ...
4214
+
4215
+ def reconnect(self, val: bool = False) -> Self:
4216
+ """
4217
+ Sets whether the subscription can be automatically resumed if
4218
+ interrupted.
4219
+
4220
+ Parameters
4221
+ ----------
4222
+ val : bool, optional
4223
+ Whether to enable automatic resubscription. Defaults to False.
4224
+
4225
+ Returns
4226
+ -------
4227
+ Self
4228
+ The instance itself.
4229
+
4230
+
4231
+ .. note::
4232
+ If `val` is True, the subscription attempts to resume and retrieve all
4233
+ streaming data since the interruption. Behavior depends on the
4234
+ interruption type:
4235
+
4236
+ - If the network is disconnected but both nodes remain running,
4237
+ reconnection occurs automatically when the network is restored.
4238
+
4239
+ - If the publisher node crashes, the subscriber retries resubscribing
4240
+ after the publisher restarts:
4241
+
4242
+ - If the publisher adopts data persistence mode, resubscription
4243
+ succeeds only after persisted data has been loaded and the
4244
+ publisher reaches the row of data where the subscription was
4245
+ interrupted.
4246
+
4247
+ - If the publisher does not adopt data persistence, resubscription
4248
+ fails.
4249
+
4250
+ - If the subscriber node crashes, automatic resubscription is not
4251
+ possible and subscription must be submitted again.
4252
+
4253
+ """
4254
+ ...
4255
+
4256
+ def filter(self, val=None) -> Self:
4257
+ """
4258
+ Sets the filter condition(s) for the subscription.
4259
+
4260
+ Parameters
4261
+ ----------
4262
+ val : optional
4263
+ The filter condition(s) for the subscription. Defaults to None.
4264
+
4265
+ Returns
4266
+ -------
4267
+ Self
4268
+ The instance itself.
4269
+
4270
+
4271
+ .. note::
4272
+ Must be used with the ``set_filter_column`` function. The filter can be
4273
+ used in the following ways:
4274
+
4275
+ - Value filtering: A Vector specifying allowed values.
4276
+
4277
+ - Range filtering: A Pair defining an inclusive lower bound and an
4278
+ exclusive upper bound.
4279
+
4280
+ - Hash filtering: An AnyVector where:
4281
+
4282
+ - The first element is the number of buckets.
4283
+
4284
+ - The second element is either a scalar specifying the bucket index
4285
+ (starting from 0) or a Pair specifying an index range (inclusive
4286
+ lower bound, exclusive upper bound).
4287
+
4288
+ - Custom function filtering: A FunctionDef or a str (indicating function
4289
+ name or lambda expression). The subscribed data is passed into the
4290
+ function as a table, and the function result is sent to the
4291
+ subscriber.
4292
+
4293
+ `filter` does not support Boolean types.
4294
+ """
4295
+ ...
4296
+
4297
+ def persist_offset(self, val: bool = False) -> Self:
4298
+ """
4299
+ Sets whether to persist the offset of the last processed message.
4300
+
4301
+ Parameters
4302
+ ----------
4303
+ val : bool, optional
4304
+ Whether to persist the last processed message offset. Defaults to
4305
+ False.
4306
+
4307
+ Returns
4308
+ -------
4309
+ Self
4310
+ The instance itself.
4311
+
4312
+
4313
+ .. note::
4314
+ This is useful for resubscription and can be retrieved using
4315
+ ``Topic.processed_offset``.
4316
+
4317
+ To resubscribe from the persisted offset, set `persist_offset` to True
4318
+ and `remove_offset` in `unsubscribe` to False.
4319
+ """
4320
+ ...
4321
+
4322
+ def time_trigger(self, val: bool = False) -> Self:
4323
+ """
4324
+ Sets whether the handler is triggered at intervals even if no new
4325
+ messages arrive.
4326
+
4327
+ Parameters
4328
+ ----------
4329
+ val : bool, optional
4330
+ Whether to trigger the handler at fixed intervals. Defaults to False.
4331
+
4332
+ Returns
4333
+ -------
4334
+ Self
4335
+ The instance itself.
4336
+
4337
+
4338
+ .. note::
4339
+ If `val` is True, the handler triggers at the intervals specified by
4340
+ `throttle`, even when no new messages are received.
4341
+ """
4342
+ ...
4343
+
4344
+ def handler_need_msg_id(self, val: bool = False) -> Self:
4345
+ """
4346
+ Sets whether the ``handler`` requires message IDs.
4347
+
4348
+ Parameters
4349
+ ----------
4350
+ val : bool, optional
4351
+ Whether the handler requires message IDs. Defaults to False.
4352
+
4353
+ Returns
4354
+ -------
4355
+ Self
4356
+ The instance itself.
4357
+
4358
+
4359
+ .. note::
4360
+ If `val` is True, the handler must accept two parameters:
4361
+
4362
+ - `msg_body`: The messages ingested into the streaming engine.
4363
+
4364
+ - `msg_id`: The ID of the last ingested message.
4365
+
4366
+ If `val` is False, the handler must accept only one parameter:
4367
+ `msg_body`.
4368
+ """
4369
+ ...
4370
+
4371
+ def submit(self) -> "Topic":
4372
+ """
4373
+ Submits the current state of the subscription.
4374
+
4375
+ Returns
4376
+ -------
4377
+ Topic
4378
+ The current topic or stream to which the subscription is submitted.
4379
+ """
4380
+ ...
4381
+
4382
+
4383
+ class TopicInfo:
4384
+ node_alias: str
4385
+ table_name: str
4386
+ """The name of the table for the topic."""
4387
+ action_name: str
4388
+ """The action associated with the topic."""
4389
+
4390
+
4391
+ class SubscriptionStat:
4392
+ worker_id: int
4393
+ """Worker ID. An empty column means the subscriber node has not received data."""
4394
+ type: Literal["tcp", "udp"]
4395
+ """The subscription method, which can be tcp (TCP) or udp (UDP multicast)."""
4396
+ queue_depth_limit: int
4397
+ """The maximum depth (number of records) of a message queue that is allowed on the
4398
+ subscriber node."""
4399
+ queue_depth: int
4400
+ """Current depth (number of records) of the message queue on the subscriber node."""
4401
+ processed_msg_count: int
4402
+ """The number of messages that have been processed."""
4403
+ last_msg_id: int
4404
+ """The last message ID."""
4405
+ failed_msg_count: int
4406
+ """The number of messages that failed to be processed."""
4407
+ last_failed_msg_id: int
4408
+ """The last failed message ID."""
4409
+ last_failed_timestamp: Timestamp
4410
+ """The timestamp of the latest failed message."""
4411
+ last_err_msg: str
4412
+ """The last error information on the failed message."""
4413
+ msg_as_table: bool
4414
+ """Indicates how the subscribed data is ingested into handler. True means the data is
4415
+ ingested as a table, and False means data is ingested as an AnyVector."""
4416
+ batch_size: int
4417
+ """The number of messages batch processed by the handler."""
4418
+ throttle: float
4419
+ """The waiting time (in seconds) for the handler to process the messages if the
4420
+ ``batch_size`` condition has not been reached since the last process."""
4421
+ hash: int
4422
+ """Indicates which subscription executor to process the incoming messages."""
4423
+ filter: str
4424
+ """The filtering column of a stream table."""
4425
+ persist_offset: bool
4426
+ """Indicates whether to persist the offset of the last processed message."""
4427
+ time_trigger: bool
4428
+ """True means that the handler is triggered at the intervals specified by the
4429
+ ``throttle`` even if no new messages arrive."""
4430
+ handler_need_msg_id: bool
4431
+ """True means that the handler supports two parameters: ``msgBody`` and ``msgId``.
4432
+ Default false."""
4433
+
4434
+
4435
+ class Topic:
4436
+ info: TopicInfo
4437
+ """Information related to the topic."""
4438
+
4439
+ stat: SubscriptionStat
4440
+ """Statistics related to the topic's subscription."""
4441
+
4442
+ def __str__(self) -> str: ...
4443
+
4444
+ def unsubscribe(self, remove_offset: bool = True):
4445
+ """
4446
+ Unsubscribe from the topic.
4447
+
4448
+ Parameters
4449
+ ----------
4450
+ remove_offset : bool, optional
4451
+ Whether to remove the current offset. Defaults to True.
4452
+ """
4453
+ ...
4454
+
4455
+ def remove_offset(self) -> None:
4456
+ """
4457
+ Remove the stored offset.
4458
+ """
4459
+ ...
4460
+
4461
+ @property
4462
+ def processed_offset(self) -> int:
4463
+ """
4464
+ Get the processed data offset.
4465
+
4466
+ Returns
4467
+ -------
4468
+ int
4469
+ The offset.
4470
+ """
4471
+ ...
4472
+
4473
+ @classmethod
4474
+ def get_with_detail(cls, table_name: str, action_name: str,
4475
+ node_alias: str = "") -> "Topic":
4476
+ """
4477
+ Retrieve a topic along with detailed information by specifying the table
4478
+ name, action name, and optional node alias.
4479
+
4480
+ Parameters
4481
+ ----------
4482
+ table_name : str
4483
+ Name of the table associated with the topic.
4484
+ action_name : str
4485
+ Action name related to the topic.
4486
+ node_alias : str, optional
4487
+ Alias of the node. Defaults to "".
4488
+
4489
+ Returns
4490
+ -------
4491
+ Topic
4492
+ Topic related to the table and action.
4493
+ """
4494
+ ...
4495
+
4496
+ @classmethod
4497
+ def get_with_topic(cls, topic: str) -> "Topic":
4498
+ """
4499
+ Retrieve a topic based on the topic string.
4500
+
4501
+ Parameters
4502
+ ----------
4503
+ topic : str
4504
+ Topic string identifier.
4505
+
4506
+ Returns
4507
+ -------
4508
+ Topic
4509
+ Topic corresponding to the given topic identifier.
4510
+ """
4511
+ ...
4512
+
4513
+
4514
+ #####################################################################
4515
+ # Plugin Module
4516
+ #####################################################################
4517
+
4518
+
4519
+ class MatchingEngineSimulatorStat(EngineStat):
4520
+ """_summary_
4521
+
4522
+ Parameters
4523
+ ----------
4524
+ EngineStat : _type_
4525
+ _description_
4526
+ """
4527
+ pass
4528
+
4529
+
4530
+ class MatchingEngineSimulator(StreamEngine):
4531
+ engine_type: EngineType
4532
+ stat: MatchingEngineSimulatorStat
4533
+
4534
+ @classmethod
4535
+ def create(
4536
+ cls,
4537
+ name: str,
4538
+ exchange: Union[plugin_simulator.Exchange, str],
4539
+ data_type: Union[plugin_simulator.MarketDataType, int],
4540
+ order_detail_output: Table,
4541
+ quote_schema: Union[Table, TypeDict] = None,
4542
+ user_order_schema: Union[Table, TypeDict] = None,
4543
+ *,
4544
+ config: plugin_simulator.MatchingEngineSimulatorConfig = None,
4545
+ ) -> plugin_simulator.MatchingEngineSimulatorBuilder:
4546
+ """
4547
+ Create and configure a matching engine simulator instance.
4548
+
4549
+ Parameters
4550
+ ----------
4551
+ name : str
4552
+ Unique engine name.
4553
+ exchange : Union[plugin_simulator.Exchange, str]
4554
+ Market type.
4555
+ data_type : Union[plugin_simulator.MarketDataType, int]
4556
+ Market data type.
4557
+ order_detail_output : Table
4558
+ Unique engine name.
4559
+ quote_schema : Union[Table, TypeDict], optional
4560
+ Unique engine name.
4561
+ user_order_schema : Union[Table, TypeDict], optional
4562
+ Unique engine name.
4563
+ config : plugin_simulator.MatchingEngineSimulatorConfig, optional
4564
+ Unique engine name.
4565
+
4566
+ Returns
4567
+ -------
4568
+ plugin_simulator.MatchingEngineSimulatorBuilder
4569
+ An instance of ``MatchingEngineSimulatorBuilder`` instant.
4570
+ """
4571
+ ...
4572
+
4573
+ def reset(self, cancel_order: bool = False):
4574
+ """Clear cached orders and market data.
4575
+
4576
+ Parameters
4577
+ ----------
4578
+ cancel_order : bool, optional
4579
+ Whether to cancel all unfilled user orders.
4580
+
4581
+ - If True, all unfilled user orders will be canceled, and the corresponding
4582
+ cancellation information will be written to the trade detail table (specified via the order_detail_output parameter in create).
4583
+
4584
+ - Default is False, meaning no cancellation is performed.
4585
+ """
4586
+ ...
4587
+
4588
+ def drop(self):
4589
+ """Drop the matching engine.
4590
+ """
4591
+ ...
4592
+
4593
+ def get_open_orders(self, symbol: str = None) -> Table:
4594
+ """Get all unfilled user orders as a table.
4595
+
4596
+ Parameters
4597
+ ----------
4598
+ symbol : str, optional
4599
+ A STRING scalar used to specify a stock for retrieving all unfilled orders.
4600
+
4601
+ Returns
4602
+ -------
4603
+ Table
4604
+ Returns a table containing the following columns:
4605
+
4606
+ .. list-table::
4607
+ :header-rows: 1
4608
+ :widths: 20 15 65
4609
+
4610
+ * - Name
4611
+ - Type
4612
+ - Description
4613
+ * - orderId
4614
+ - LONG
4615
+ - Order ID
4616
+ * - timestamp
4617
+ - TIMESTAMP
4618
+ - Timestamp
4619
+ * - symbol
4620
+ - STRING
4621
+ - Stock symbol
4622
+ * - price
4623
+ - DOUBLE
4624
+ - Order price
4625
+ * - totalQty
4626
+ - LONG
4627
+ - User order quantity
4628
+ * - openQty
4629
+ - LONG
4630
+ - Remaining quantity of user order
4631
+ * - direction
4632
+ - INT
4633
+ - 1 = Buy, 2 = Sell
4634
+ * - isMatching
4635
+ - INT
4636
+ - Whether the order has reached matching time
4637
+ * - openVolumeWithBetterPrice
4638
+ - LONG
4639
+ - Total unfilled order volume at prices better than the order price
4640
+ * - openVolumeWithWorsePrice
4641
+ - LONG
4642
+ - Total unfilled order volume at prices worse than the order price
4643
+ * - openVolumeAtOrderPrice
4644
+ - LONG
4645
+ - Total unfilled order volume at the order price
4646
+ * - priorOpenVolumeAtOrderPrice
4647
+ - LONG
4648
+ - Total unfilled order volume at the order price with earlier timestamp than this order
4649
+ * - depthWithBetterPrice
4650
+ - INT
4651
+ - Number of price levels better than the order price
4652
+ * - updateTime
4653
+ - TIMESTAMP
4654
+ - Latest update time
4655
+
4656
+ .. note::
4657
+ The columns openVolumeWithBetterPrice, openVolumeWithWorsePrice, openVolumeAtOrderPrice,
4658
+ priorOpenVolumeAtOrderPrice, depthWithBetterPrice, and updateTime are included only
4659
+ when output_queue_position=1. (See the config parameter description in the create interface for details.)
4660
+
4661
+ """
4662
+ ...
4663
+
4664
+ @property
4665
+ def symbol_list(self) -> Vector:
4666
+ """Retrieve the list of stock symbols in the engine.
4667
+
4668
+ Returns
4669
+ -------
4670
+ Vector
4671
+ A string vector indicating the list of stock symbols.
4672
+ """
4673
+ ...
4674
+
4675
+ def insert_market(self, msg_body: Constant) -> None:
4676
+ """Insert market data (table or tuple).
4677
+
4678
+ Parameters
4679
+ ----------
4680
+ msg_body : Constant
4681
+ It can be either a table object or a tuple, representing market data or user order data.
4682
+ Its format must conform to the target table structure specified when creating the engine,
4683
+ such as quote_schema or user_order_schema. In particular, when msg_body is a tuple,
4684
+ if a column in the target table is an array vector, the corresponding element in the tuple
4685
+ must be either an array vector (e.g., arrayVector([2], 23.42 23.43)) or a tuple containing
4686
+ only a regular vector (e.g., [23.42 23.43]).
4687
+ """
4688
+ ...
4689
+
4690
+ def insert_order(self, msg_body: Constant) -> Vector:
4691
+ """Insert user order data. Returns order ID.
4692
+
4693
+ Parameters
4694
+ ----------
4695
+ msg_body : Constant
4696
+ It can be either a table object or a tuple, representing market data or user order data.
4697
+ Its format must conform to the target table structure specified when creating the engine,
4698
+ such as quote_schema or user_order_schema. In particular, when msg_body is a tuple,
4699
+ if a column in the target table is an array vector, the corresponding element in the tuple
4700
+ must be either an array vector (e.g., arrayVector([2], 23.42 23.43)) or a tuple containing
4701
+ only a regular vector (e.g., [23.42 23.43]).
4702
+
4703
+ Returns
4704
+ -------
4705
+ Vector
4706
+ A LONG vector indicating the order ID.
4707
+ """
4708
+ ...
4709
+
4710
+ def set_limit_price(self, data: Table) -> bool:
4711
+ """Set limit up/down prices.
4712
+
4713
+ Parameters
4714
+ ----------
4715
+ data : Table
4716
+ A table containing three columns: symbol (STRING), upLimitPrice (DOUBLE), and downLimitPrice (DOUBLE).
4717
+
4718
+ Returns
4719
+ -------
4720
+ bool
4721
+ Returns True if the settings are applied successfully.
4722
+ """
4723
+ ...
4724
+
4725
+ def set_prev_close(self, prev_close: Union[Dict[str, float], Dictionary]) -> bool:
4726
+ """Set the previous closing prices for the matching engine simulator.
4727
+
4728
+ Parameters
4729
+ ----------
4730
+ prev_close : Union[Dict[str, float], Dictionary]
4731
+ A dictionary where keys are stock symbols (strings) and values are the
4732
+ corresponding previous closing prices (floats).
4733
+
4734
+ Returns
4735
+ -------
4736
+ bool
4737
+ Returns True if the settings are applied successfully.
4738
+ """
4739
+ ...
4740
+
4741
+ def get_snapshot(self, symbol: str = None) -> Table:
4742
+ """Get market snapshot from the engine.
4743
+
4744
+ Parameters
4745
+ ----------
4746
+ symbol : str, optional
4747
+ A STRING scalar specifying a stock. If not provided, snapshots for all stocks are retrieved.
4748
+
4749
+ Returns
4750
+ -------
4751
+ Table
4752
+ A table with the following schema:
4753
+
4754
+ +----------------------+-----------+----------------------------------------+
4755
+ | Name | Type | Description |
4756
+ +======================+===========+========================================+
4757
+ | symbol | STRING | Stock symbol |
4758
+ +----------------------+-----------+----------------------------------------+
4759
+ | timestamp | TIMESTAMP | Time |
4760
+ +----------------------+-----------+----------------------------------------+
4761
+ | avgTradePriceAtBid | DOUBLE | Average trade price at bid |
4762
+ +----------------------+-----------+----------------------------------------+
4763
+ | avgTradePriceAtOffer | DOUBLE | Average trade price at offer |
4764
+ +----------------------+-----------+----------------------------------------+
4765
+ | totalTradeQtyAtBid | LONG | Total traded quantity at bid |
4766
+ +----------------------+-----------+----------------------------------------+
4767
+ | totalTradeQtyAtOffer | LONG | Total traded quantity at offer |
4768
+ +----------------------+-----------+----------------------------------------+
4769
+ | bidPrice | DOUBLE[] | List of bid prices |
4770
+ +----------------------+-----------+----------------------------------------+
4771
+ | bidQty | LONG[] | List of bid quantities |
4772
+ +----------------------+-----------+----------------------------------------+
4773
+ | offerPrice | DOUBLE[] | List of offer prices |
4774
+ +----------------------+-----------+----------------------------------------+
4775
+ | offerQty | LONG[] | List of offer quantities |
4776
+ +----------------------+-----------+----------------------------------------+
4777
+ | lastPrice | DOUBLE | Last price |
4778
+ +----------------------+-----------+----------------------------------------+
4779
+ | highPrice | DOUBLE | Highest price |
4780
+ +----------------------+-----------+----------------------------------------+
4781
+ | lowPrice | DOUBLE | Lowest price |
4782
+ +----------------------+-----------+----------------------------------------+
4783
+ """
4784
+ ...