ingestr 0.12.5__tar.gz → 0.12.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ingestr might be problematic. Click here for more details.

Files changed (197) hide show
  1. {ingestr-0.12.5 → ingestr-0.12.6}/Makefile +1 -1
  2. {ingestr-0.12.5 → ingestr-0.12.6}/PKG-INFO +2 -1
  3. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/github.md +3 -3
  4. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/main.py +1 -1
  5. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/asana_source/__init__.py +4 -1
  6. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/facebook_ads/__init__.py +4 -1
  7. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/filesystem/__init__.py +3 -1
  8. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/github/__init__.py +7 -3
  9. ingestr-0.12.6/ingestr/src/google_analytics/__init__.py +106 -0
  10. ingestr-0.12.5/ingestr/src/google_analytics/helpers/data_processing.py → ingestr-0.12.6/ingestr/src/google_analytics/helpers.py +29 -33
  11. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/gorgias/__init__.py +12 -4
  12. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/hubspot/__init__.py +8 -1
  13. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/klaviyo/_init_.py +78 -13
  14. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/shopify/__init__.py +14 -0
  15. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/slack/__init__.py +4 -0
  16. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/sources.py +19 -9
  17. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/stripe_analytics/__init__.py +4 -1
  18. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/tiktok_ads/__init__.py +6 -1
  19. ingestr-0.12.6/ingestr/src/version.py +1 -0
  20. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/zendesk/__init__.py +6 -0
  21. {ingestr-0.12.5 → ingestr-0.12.6}/requirements-dev.txt +1 -1
  22. {ingestr-0.12.5 → ingestr-0.12.6}/requirements.txt +9 -8
  23. ingestr-0.12.5/ingestr/src/google_analytics/__init__.py +0 -70
  24. ingestr-0.12.5/ingestr/src/google_analytics/helpers/__init__.py +0 -70
  25. ingestr-0.12.5/ingestr/src/version.py +0 -1
  26. {ingestr-0.12.5 → ingestr-0.12.6}/.dockerignore +0 -0
  27. {ingestr-0.12.5 → ingestr-0.12.6}/.githooks/pre-commit-hook.sh +0 -0
  28. {ingestr-0.12.5 → ingestr-0.12.6}/.github/workflows/deploy-docs.yml +0 -0
  29. {ingestr-0.12.5 → ingestr-0.12.6}/.github/workflows/secrets-scan.yml +0 -0
  30. {ingestr-0.12.5 → ingestr-0.12.6}/.github/workflows/tests.yml +0 -0
  31. {ingestr-0.12.5 → ingestr-0.12.6}/.gitignore +0 -0
  32. {ingestr-0.12.5 → ingestr-0.12.6}/.gitleaksignore +0 -0
  33. {ingestr-0.12.5 → ingestr-0.12.6}/.python-version +0 -0
  34. {ingestr-0.12.5 → ingestr-0.12.6}/.vale.ini +0 -0
  35. {ingestr-0.12.5 → ingestr-0.12.6}/Dockerfile +0 -0
  36. {ingestr-0.12.5 → ingestr-0.12.6}/LICENSE.md +0 -0
  37. {ingestr-0.12.5 → ingestr-0.12.6}/README.md +0 -0
  38. {ingestr-0.12.5 → ingestr-0.12.6}/docs/.vitepress/config.mjs +0 -0
  39. {ingestr-0.12.5 → ingestr-0.12.6}/docs/.vitepress/theme/custom.css +0 -0
  40. {ingestr-0.12.5 → ingestr-0.12.6}/docs/.vitepress/theme/index.js +0 -0
  41. {ingestr-0.12.5 → ingestr-0.12.6}/docs/commands/example-uris.md +0 -0
  42. {ingestr-0.12.5 → ingestr-0.12.6}/docs/commands/ingest.md +0 -0
  43. {ingestr-0.12.5 → ingestr-0.12.6}/docs/getting-started/core-concepts.md +0 -0
  44. {ingestr-0.12.5 → ingestr-0.12.6}/docs/getting-started/incremental-loading.md +0 -0
  45. {ingestr-0.12.5 → ingestr-0.12.6}/docs/getting-started/quickstart.md +0 -0
  46. {ingestr-0.12.5 → ingestr-0.12.6}/docs/getting-started/telemetry.md +0 -0
  47. {ingestr-0.12.5 → ingestr-0.12.6}/docs/index.md +0 -0
  48. {ingestr-0.12.5 → ingestr-0.12.6}/docs/media/athena.png +0 -0
  49. {ingestr-0.12.5 → ingestr-0.12.6}/docs/media/github.png +0 -0
  50. {ingestr-0.12.5 → ingestr-0.12.6}/docs/media/googleanalytics.png +0 -0
  51. {ingestr-0.12.5 → ingestr-0.12.6}/docs/media/tiktok.png +0 -0
  52. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/adjust.md +0 -0
  53. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/airtable.md +0 -0
  54. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/appsflyer.md +0 -0
  55. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/asana.md +0 -0
  56. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/athena.md +0 -0
  57. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/bigquery.md +0 -0
  58. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/chess.md +0 -0
  59. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/csv.md +0 -0
  60. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/custom_queries.md +0 -0
  61. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/databricks.md +0 -0
  62. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/duckdb.md +0 -0
  63. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/dynamodb.md +0 -0
  64. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/facebook-ads.md +0 -0
  65. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/google_analytics.md +0 -0
  66. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/gorgias.md +0 -0
  67. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/gsheets.md +0 -0
  68. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/hubspot.md +0 -0
  69. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/kafka.md +0 -0
  70. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/klaviyo.md +0 -0
  71. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/mongodb.md +0 -0
  72. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/mssql.md +0 -0
  73. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/mysql.md +0 -0
  74. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/notion.md +0 -0
  75. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/oracle.md +0 -0
  76. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/postgres.md +0 -0
  77. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/redshift.md +0 -0
  78. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/s3.md +0 -0
  79. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/sap-hana.md +0 -0
  80. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/shopify.md +0 -0
  81. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/slack.md +0 -0
  82. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/snowflake.md +0 -0
  83. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/sqlite.md +0 -0
  84. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/stripe.md +0 -0
  85. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/tiktok-ads.md +0 -0
  86. {ingestr-0.12.5 → ingestr-0.12.6}/docs/supported-sources/zendesk.md +0 -0
  87. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/.gitignore +0 -0
  88. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/adjust/__init__.py +0 -0
  89. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/adjust/adjust_helpers.py +0 -0
  90. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/airtable/__init__.py +0 -0
  91. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/appsflyer/_init_.py +0 -0
  92. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/appsflyer/client.py +0 -0
  93. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/arrow/__init__.py +0 -0
  94. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/asana_source/helpers.py +0 -0
  95. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/asana_source/settings.py +0 -0
  96. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/chess/__init__.py +0 -0
  97. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/chess/helpers.py +0 -0
  98. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/chess/settings.py +0 -0
  99. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/destinations.py +0 -0
  100. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/dynamodb/__init__.py +0 -0
  101. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/facebook_ads/exceptions.py +0 -0
  102. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/facebook_ads/helpers.py +0 -0
  103. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/facebook_ads/settings.py +0 -0
  104. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/factory.py +0 -0
  105. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/filesystem/helpers.py +0 -0
  106. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/filesystem/readers.py +0 -0
  107. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/filters.py +0 -0
  108. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/github/helpers.py +0 -0
  109. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/github/queries.py +0 -0
  110. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/github/settings.py +0 -0
  111. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/google_sheets/README.md +0 -0
  112. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/google_sheets/__init__.py +0 -0
  113. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/google_sheets/helpers/__init__.py +0 -0
  114. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/google_sheets/helpers/api_calls.py +0 -0
  115. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/google_sheets/helpers/data_processing.py +0 -0
  116. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/gorgias/helpers.py +0 -0
  117. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/hubspot/helpers.py +0 -0
  118. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/hubspot/settings.py +0 -0
  119. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/kafka/__init__.py +0 -0
  120. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/kafka/helpers.py +0 -0
  121. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/klaviyo/client.py +0 -0
  122. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/klaviyo/helpers.py +0 -0
  123. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/mongodb/__init__.py +0 -0
  124. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/mongodb/helpers.py +0 -0
  125. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/notion/__init__.py +0 -0
  126. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/notion/helpers/__init__.py +0 -0
  127. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/notion/helpers/client.py +0 -0
  128. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/notion/helpers/database.py +0 -0
  129. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/notion/settings.py +0 -0
  130. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/shopify/exceptions.py +0 -0
  131. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/shopify/helpers.py +0 -0
  132. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/shopify/settings.py +0 -0
  133. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/slack/helpers.py +0 -0
  134. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/slack/settings.py +0 -0
  135. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/sql_database/__init__.py +0 -0
  136. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/sql_database/callbacks.py +0 -0
  137. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/stripe_analytics/helpers.py +0 -0
  138. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/stripe_analytics/settings.py +0 -0
  139. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/table_definition.py +0 -0
  140. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/telemetry/event.py +0 -0
  141. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/testdata/fakebqcredentials.json +0 -0
  142. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/tiktok_ads/tiktok_helpers.py +0 -0
  143. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/time.py +0 -0
  144. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/zendesk/helpers/__init__.py +0 -0
  145. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/zendesk/helpers/api_helpers.py +0 -0
  146. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/zendesk/helpers/credentials.py +0 -0
  147. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/zendesk/helpers/talk_api.py +0 -0
  148. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/src/zendesk/settings.py +0 -0
  149. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/testdata/.gitignore +0 -0
  150. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/testdata/create_replace.csv +0 -0
  151. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/testdata/delete_insert_expected.csv +0 -0
  152. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/testdata/delete_insert_part1.csv +0 -0
  153. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/testdata/delete_insert_part2.csv +0 -0
  154. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/testdata/merge_expected.csv +0 -0
  155. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/testdata/merge_part1.csv +0 -0
  156. {ingestr-0.12.5 → ingestr-0.12.6}/ingestr/testdata/merge_part2.csv +0 -0
  157. {ingestr-0.12.5 → ingestr-0.12.6}/package-lock.json +0 -0
  158. {ingestr-0.12.5 → ingestr-0.12.6}/package.json +0 -0
  159. {ingestr-0.12.5 → ingestr-0.12.6}/pyproject.toml +0 -0
  160. {ingestr-0.12.5 → ingestr-0.12.6}/resources/demo.gif +0 -0
  161. {ingestr-0.12.5 → ingestr-0.12.6}/resources/demo.tape +0 -0
  162. {ingestr-0.12.5 → ingestr-0.12.6}/resources/ingestr.svg +0 -0
  163. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/AMPM.yml +0 -0
  164. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/Acronyms.yml +0 -0
  165. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/Colons.yml +0 -0
  166. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/Contractions.yml +0 -0
  167. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/DateFormat.yml +0 -0
  168. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/Ellipses.yml +0 -0
  169. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/EmDash.yml +0 -0
  170. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/Exclamation.yml +0 -0
  171. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/FirstPerson.yml +0 -0
  172. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/Gender.yml +0 -0
  173. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/GenderBias.yml +0 -0
  174. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/HeadingPunctuation.yml +0 -0
  175. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/Headings.yml +0 -0
  176. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/Latin.yml +0 -0
  177. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/LyHyphens.yml +0 -0
  178. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/OptionalPlurals.yml +0 -0
  179. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/Ordinal.yml +0 -0
  180. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/OxfordComma.yml +0 -0
  181. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/Parens.yml +0 -0
  182. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/Passive.yml +0 -0
  183. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/Periods.yml +0 -0
  184. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/Quotes.yml +0 -0
  185. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/Ranges.yml +0 -0
  186. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/Semicolons.yml +0 -0
  187. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/Slang.yml +0 -0
  188. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/Spacing.yml +0 -0
  189. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/Spelling.yml +0 -0
  190. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/Units.yml +0 -0
  191. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/We.yml +0 -0
  192. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/Will.yml +0 -0
  193. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/WordList.yml +0 -0
  194. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/meta.json +0 -0
  195. {ingestr-0.12.5 → ingestr-0.12.6}/styles/Google/vocab.txt +0 -0
  196. {ingestr-0.12.5 → ingestr-0.12.6}/styles/bruin/Ingestr.yml +0 -0
  197. {ingestr-0.12.5 → ingestr-0.12.6}/styles/config/vocabularies/bruin/accept.txt +0 -0
@@ -24,7 +24,7 @@ test-specific: venv
24
24
  . venv/bin/activate; pytest -rP -vv --tb=short --capture=no -k $(test)
25
25
 
26
26
  lint-ci:
27
- ruff check ingestr --fix && ruff format ingestr
27
+ ruff format ingestr && ruff check ingestr --fix
28
28
  mypy --config-file pyproject.toml --explicit-package-bases ingestr
29
29
 
30
30
  lint: venv
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ingestr
3
- Version: 0.12.5
3
+ Version: 0.12.6
4
4
  Summary: ingestr is a command-line application that ingests data from various sources and stores them in any database.
5
5
  Project-URL: Homepage, https://github.com/bruin-data/ingestr
6
6
  Project-URL: Issues, https://github.com/bruin-data/ingestr/issues
@@ -26,6 +26,7 @@ Requires-Dist: google-api-python-client==2.130.0
26
26
  Requires-Dist: google-cloud-bigquery-storage==2.24.0
27
27
  Requires-Dist: mysql-connector-python==9.1.0
28
28
  Requires-Dist: pendulum==3.0.0
29
+ Requires-Dist: psutil==6.1.1
29
30
  Requires-Dist: psycopg2-binary==2.9.10
30
31
  Requires-Dist: py-machineid==0.6.0
31
32
  Requires-Dist: pyairtable==2.3.3
@@ -14,9 +14,9 @@ github://?access_token=<access_token>&owner=<owner>&repo=<repo>
14
14
 
15
15
  URI parameters:
16
16
 
17
- - `access_token`: Access Token used for authentication with the GitHub API
18
- - `owner`: Refers to the owner of the repository
19
- - `repo`: Refers to the name of the repository
17
+ - `access_token` (optional): Access Token used for authentication with the GitHub API
18
+ - `owner` (required): Refers to the owner of the repository
19
+ - `repo` (required): Refers to the name of the repository
20
20
 
21
21
 
22
22
  ## Setting up a GitHub Integration
@@ -444,7 +444,7 @@ def ingest(
444
444
 
445
445
  progressInstance: Collector = SpinnerCollector()
446
446
  if progress == Progress.log:
447
- progressInstance = LogCollector(dump_system_stats=False)
447
+ progressInstance = LogCollector()
448
448
 
449
449
  is_pipelines_dir_temp = False
450
450
  if pipelines_dir is None:
@@ -150,7 +150,10 @@ def tasks(
150
150
  project_array: t.List[TDataItem],
151
151
  access_token: str = dlt.secrets.value,
152
152
  modified_at: dlt.sources.incremental[str] = dlt.sources.incremental(
153
- "modified_at", initial_value=DEFAULT_START_DATE
153
+ "modified_at",
154
+ initial_value=DEFAULT_START_DATE,
155
+ range_end="closed",
156
+ range_start="closed",
154
157
  ),
155
158
  fields: Iterable[str] = TASK_FIELDS,
156
159
  ) -> Iterable[TDataItem]:
@@ -159,7 +159,10 @@ def facebook_insights_source(
159
159
  )
160
160
  def facebook_insights(
161
161
  date_start: dlt.sources.incremental[str] = dlt.sources.incremental(
162
- "date_start", initial_value=initial_load_start_date_str
162
+ "date_start",
163
+ initial_value=initial_load_start_date_str,
164
+ range_end="closed",
165
+ range_start="closed",
163
166
  ),
164
167
  ) -> Iterator[TDataItems]:
165
168
  start_date = get_start_date(date_start, attribution_window_days_lag)
@@ -38,7 +38,9 @@ def readers(
38
38
  """
39
39
  filesystem_resource = filesystem(bucket_url, credentials, file_glob=file_glob)
40
40
  filesystem_resource.apply_hints(
41
- incremental=dlt.sources.incremental("modification_date")
41
+ incremental=dlt.sources.incremental("modification_date"),
42
+ range_end="closed",
43
+ range_start="closed",
42
44
  )
43
45
  return (
44
46
  filesystem_resource | dlt.transformer(name="read_csv")(_read_csv),
@@ -14,7 +14,7 @@ from .helpers import get_reactions_data, get_rest_pages, get_stargazers
14
14
  def github_reactions(
15
15
  owner: str,
16
16
  name: str,
17
- access_token: str = dlt.secrets.value,
17
+ access_token: str,
18
18
  items_per_page: int = 100,
19
19
  max_items: Optional[int] = None,
20
20
  ) -> Sequence[DltResource]:
@@ -89,7 +89,11 @@ def github_repo_events(
89
89
  @dlt.resource(primary_key="id", table_name=lambda i: i["type"])
90
90
  def repo_events(
91
91
  last_created_at: dlt.sources.incremental[str] = dlt.sources.incremental(
92
- "created_at", initial_value="1970-01-01T00:00:00Z", last_value_func=max
92
+ "created_at",
93
+ initial_value="1970-01-01T00:00:00Z",
94
+ last_value_func=max,
95
+ range_end="closed",
96
+ range_start="closed",
93
97
  ),
94
98
  ) -> Iterator[TDataItems]:
95
99
  repos_path = (
@@ -114,7 +118,7 @@ def github_repo_events(
114
118
  def github_stargazers(
115
119
  owner: str,
116
120
  name: str,
117
- access_token: str = dlt.secrets.value,
121
+ access_token: str,
118
122
  items_per_page: int = 100,
119
123
  max_items: Optional[int] = None,
120
124
  ) -> Sequence[DltResource]:
@@ -0,0 +1,106 @@
1
+ """
2
+ Defines all the sources and resources needed for Google Analytics V4
3
+ """
4
+
5
+ from typing import Iterator, List, Optional, Union
6
+
7
+ import dlt
8
+ from dlt.common import pendulum
9
+ from dlt.common.typing import DictStrAny, TDataItem
10
+ from dlt.extract import DltResource
11
+ from dlt.sources.credentials import GcpOAuthCredentials, GcpServiceAccountCredentials
12
+ from google.analytics.data_v1beta import BetaAnalyticsDataClient
13
+ from google.analytics.data_v1beta.types import (
14
+ Dimension,
15
+ Metric,
16
+ )
17
+
18
+ from .helpers import get_report
19
+
20
+
21
+ @dlt.source(max_table_nesting=0)
22
+ def google_analytics(
23
+ datetime_dimension: str,
24
+ credentials: Union[
25
+ GcpOAuthCredentials, GcpServiceAccountCredentials
26
+ ] = dlt.secrets.value,
27
+ property_id: int = dlt.config.value,
28
+ queries: List[DictStrAny] = dlt.config.value,
29
+ start_date: Optional[pendulum.DateTime] = pendulum.datetime(2024, 1, 1),
30
+ end_date: Optional[pendulum.DateTime] = None,
31
+ rows_per_page: int = 10000,
32
+ ) -> List[DltResource]:
33
+ try:
34
+ property_id = int(property_id)
35
+ except ValueError:
36
+ raise ValueError(
37
+ f"{property_id} is an invalid google property id. Please use a numeric id, and not your Measurement ID like G-7F1AE12JLR"
38
+ )
39
+ if property_id == 0:
40
+ raise ValueError(
41
+ "Google Analytics property id is 0. Did you forget to configure it?"
42
+ )
43
+ if not rows_per_page:
44
+ raise ValueError("Rows per page cannot be 0")
45
+ # generate access token for credentials if we are using OAuth2.0
46
+ if isinstance(credentials, GcpOAuthCredentials):
47
+ credentials.auth("https://www.googleapis.com/auth/analytics.readonly")
48
+
49
+ # Build the service object for Google Analytics api.
50
+ client = BetaAnalyticsDataClient(credentials=credentials.to_native_credentials())
51
+ if len(queries) > 1:
52
+ raise ValueError(
53
+ "Google Analytics supports a single query ingestion at a time, please give only one query"
54
+ )
55
+ query = queries[0]
56
+
57
+ # always add "date" to dimensions so we are able to track the last day of a report
58
+ dimensions = query["dimensions"]
59
+
60
+ @dlt.resource(
61
+ name="basic_report",
62
+ merge_key=datetime_dimension,
63
+ write_disposition="merge",
64
+ )
65
+ def basic_report(
66
+ incremental=dlt.sources.incremental(
67
+ datetime_dimension,
68
+ initial_value=start_date,
69
+ end_value=end_date,
70
+ range_end="closed",
71
+ range_start="closed",
72
+ ),
73
+ ) -> Iterator[TDataItem]:
74
+ start_date = incremental.last_value
75
+ end_date = incremental.end_value
76
+ if start_date is None:
77
+ start_date = pendulum.datetime(2024, 1, 1)
78
+ if end_date is None:
79
+ end_date = pendulum.yesterday()
80
+ yield from get_report(
81
+ client=client,
82
+ property_id=property_id,
83
+ dimension_list=[Dimension(name=dimension) for dimension in dimensions],
84
+ metric_list=[Metric(name=metric) for metric in query["metrics"]],
85
+ per_page=rows_per_page,
86
+ start_date=start_date,
87
+ end_date=end_date,
88
+ )
89
+
90
+ # res = dlt.resource(
91
+ # basic_report, name="basic_report", merge_key=datetime_dimension, write_disposition="merge"
92
+ # )(
93
+ # client=client,
94
+ # rows_per_page=rows_per_page,
95
+ # property_id=property_id,
96
+ # dimensions=dimensions,
97
+ # metrics=query["metrics"],
98
+ # resource_name=resource_name,
99
+ # last_date=dlt.sources.incremental(
100
+ # datetime_dimension,
101
+ # initial_value=start_date,
102
+ # end_value=end_date,
103
+ # ),
104
+ # )
105
+
106
+ return [basic_report]
@@ -57,9 +57,9 @@ def get_report(
57
57
  property_id: int,
58
58
  dimension_list: List[Dimension],
59
59
  metric_list: List[Metric],
60
- limit: int,
61
- start_date: str,
62
- end_date: str,
60
+ per_page: int,
61
+ start_date: pendulum.DateTime,
62
+ end_date: pendulum.DateTime,
63
63
  ) -> Iterator[TDataItem]:
64
64
  """
65
65
  Gets all the possible pages of reports with the given query parameters.
@@ -79,30 +79,36 @@ def get_report(
79
79
  Generator of all rows of data in the report.
80
80
  """
81
81
 
82
- request = RunReportRequest(
83
- property=f"properties/{property_id}",
84
- dimensions=dimension_list,
85
- metrics=metric_list,
86
- limit=limit,
87
- date_ranges=[DateRange(start_date=start_date, end_date=end_date)],
82
+ print(
83
+ "fetching for daterange", start_date.to_date_string(), end_date.to_date_string()
88
84
  )
89
- # process request
90
- response = client.run_report(request)
91
- processed_response_generator = process_report(response=response)
92
- yield from processed_response_generator
93
85
 
86
+ offset = 0
87
+ while True:
88
+ request = RunReportRequest(
89
+ property=f"properties/{property_id}",
90
+ dimensions=dimension_list,
91
+ metrics=metric_list,
92
+ limit=per_page,
93
+ offset=offset,
94
+ date_ranges=[
95
+ DateRange(
96
+ start_date=start_date.to_date_string(),
97
+ end_date=end_date.to_date_string(),
98
+ )
99
+ ],
100
+ )
101
+ # process request
102
+ response = client.run_report(request)
103
+ processed_response_generator = process_report(response=response)
104
+ # import pdb; pdb.set_trace()
105
+ yield from processed_response_generator
106
+ offset += per_page
107
+ if len(response.rows) < per_page or offset > 1000000:
108
+ break
94
109
 
95
- def process_report(response: RunReportResponse) -> Iterator[TDataItems]:
96
- """
97
- Receives a single page for a report response, processes it, and returns a generator for every row of data in the report page.
98
-
99
- Args:
100
- response: The API response for a single page of the report.
101
-
102
- Yields:
103
- Generator of dictionaries for every row of the report page.
104
- """
105
110
 
111
+ def process_report(response: RunReportResponse) -> Iterator[TDataItems]:
106
112
  metrics_headers = [header.name for header in response.metric_headers]
107
113
  dimensions_headers = [header.name for header in response.dimension_headers]
108
114
 
@@ -156,16 +162,6 @@ def process_metric_value(metric_type: MetricType, value: str) -> Union[str, int,
156
162
 
157
163
 
158
164
  def _resolve_dimension_value(dimension_name: str, dimension_value: str) -> Any:
159
- """
160
- Helper function that receives a dimension's name and value and converts it to a datetime object if needed.
161
-
162
- Args:
163
- dimension_name: Name of the dimension.
164
- dimension_value: Value of the dimension.
165
-
166
- Returns:
167
- The value of the dimension with the correct data type.
168
- """
169
165
  if dimension_name == "date":
170
166
  return pendulum.from_format(dimension_value, "YYYYMMDD", tz="UTC")
171
167
  elif dimension_name == "dateHour":
@@ -116,7 +116,9 @@ def gorgias_source(
116
116
  },
117
117
  )
118
118
  def customers(
119
- updated_datetime=dlt.sources.incremental("updated_datetime", start_date_obj),
119
+ updated_datetime=dlt.sources.incremental(
120
+ "updated_datetime", start_date_obj, range_end="closed", range_start="closed"
121
+ ),
120
122
  ) -> Iterable[TDataItem]:
121
123
  """
122
124
  The resource for customers on your Gorgias domain, supports incremental loading and pagination.
@@ -290,7 +292,9 @@ def gorgias_source(
290
292
  },
291
293
  )
292
294
  def tickets(
293
- updated_datetime=dlt.sources.incremental("updated_datetime", start_date_obj),
295
+ updated_datetime=dlt.sources.incremental(
296
+ "updated_datetime", start_date_obj, range_end="closed", range_start="closed"
297
+ ),
294
298
  ) -> Iterable[TDataItem]:
295
299
  """
296
300
  The resource for tickets on your Gorgias domain, supports incremental loading and pagination.
@@ -481,7 +485,9 @@ def gorgias_source(
481
485
  },
482
486
  )
483
487
  def ticket_messages(
484
- updated_datetime=dlt.sources.incremental("updated_datetime", start_date_obj),
488
+ updated_datetime=dlt.sources.incremental(
489
+ "updated_datetime", start_date_obj, range_end="closed", range_start="closed"
490
+ ),
485
491
  ) -> Iterable[TDataItem]:
486
492
  """
487
493
  The resource for ticket messages on your Gorgias domain, supports incremental loading and pagination.
@@ -566,7 +572,9 @@ def gorgias_source(
566
572
  },
567
573
  )
568
574
  def satisfaction_surveys(
569
- updated_datetime=dlt.sources.incremental("updated_datetime", start_date_obj),
575
+ updated_datetime=dlt.sources.incremental(
576
+ "updated_datetime", start_date_obj, range_end="closed", range_start="closed"
577
+ ),
570
578
  ) -> Iterable[TDataItem]:
571
579
  """
572
580
  The resource for satisfaction surveys on your Gorgias domain, supports incremental loading and pagination.
@@ -278,4 +278,11 @@ def hubspot_events_for_objects(
278
278
  write_disposition="append",
279
279
  selected=True,
280
280
  table_name=lambda e: name + "_" + str(e["eventType"]),
281
- )(dlt.sources.incremental("occurredAt", initial_value=start_date.isoformat()))
281
+ )(
282
+ dlt.sources.incremental(
283
+ "occurredAt",
284
+ initial_value=start_date.isoformat(),
285
+ range_end="closed",
286
+ range_start="closed",
287
+ )
288
+ )
@@ -33,7 +33,12 @@ def klaviyo_source(api_key: str, start_date: TAnyDateTime) -> Iterable[DltResour
33
33
 
34
34
  @dlt.resource(write_disposition="append", primary_key="id", parallelized=True)
35
35
  def events(
36
- datetime=dlt.sources.incremental("datetime", start_date_obj.isoformat()),
36
+ datetime=dlt.sources.incremental(
37
+ "datetime",
38
+ start_date_obj.isoformat(),
39
+ range_end="closed",
40
+ range_start="closed",
41
+ ),
37
42
  ) -> Iterable[TDataItem]:
38
43
  intervals = split_date_range(
39
44
  pendulum.parse(datetime.start_value), pendulum.now()
@@ -44,7 +49,12 @@ def klaviyo_source(api_key: str, start_date: TAnyDateTime) -> Iterable[DltResour
44
49
 
45
50
  @dlt.resource(write_disposition="merge", primary_key="id", parallelized=True)
46
51
  def profiles(
47
- updated=dlt.sources.incremental("updated", start_date_obj.isoformat()),
52
+ updated=dlt.sources.incremental(
53
+ "updated",
54
+ start_date_obj.isoformat(),
55
+ range_end="closed",
56
+ range_start="closed",
57
+ ),
48
58
  ) -> Iterable[TDataItem]:
49
59
  intervals = split_date_range(
50
60
  pendulum.parse(updated.start_value), pendulum.now()
@@ -55,7 +65,12 @@ def klaviyo_source(api_key: str, start_date: TAnyDateTime) -> Iterable[DltResour
55
65
 
56
66
  @dlt.resource(write_disposition="merge", primary_key="id", parallelized=True)
57
67
  def campaigns(
58
- updated_at=dlt.sources.incremental("updated_at", start_date_obj.isoformat()),
68
+ updated_at=dlt.sources.incremental(
69
+ "updated_at",
70
+ start_date_obj.isoformat(),
71
+ range_end="closed",
72
+ range_start="closed",
73
+ ),
59
74
  ) -> Iterable[TDataItem]:
60
75
  intervals = split_date_range(
61
76
  pendulum.parse(updated_at.start_value), pendulum.now()
@@ -69,7 +84,12 @@ def klaviyo_source(api_key: str, start_date: TAnyDateTime) -> Iterable[DltResour
69
84
 
70
85
  @dlt.resource(write_disposition="merge", primary_key="id")
71
86
  def metrics(
72
- updated=dlt.sources.incremental("updated", start_date_obj.isoformat()),
87
+ updated=dlt.sources.incremental(
88
+ "updated",
89
+ start_date_obj.isoformat(),
90
+ range_end="closed",
91
+ range_start="closed",
92
+ ),
73
93
  ) -> Iterable[TDataItem]:
74
94
  yield from client.fetch_metrics(create_client(), updated.start_value)
75
95
 
@@ -83,7 +103,12 @@ def klaviyo_source(api_key: str, start_date: TAnyDateTime) -> Iterable[DltResour
83
103
 
84
104
  @dlt.resource(write_disposition="merge", primary_key="id", name="catalog-variants")
85
105
  def catalog_variants(
86
- updated=dlt.sources.incremental("updated", start_date_obj.isoformat()),
106
+ updated=dlt.sources.incremental(
107
+ "updated",
108
+ start_date_obj.isoformat(),
109
+ range_end="closed",
110
+ range_start="closed",
111
+ ),
87
112
  ) -> Iterable[TDataItem]:
88
113
  yield from client.fetch_catalog_variant(create_client(), updated.start_value)
89
114
 
@@ -91,19 +116,34 @@ def klaviyo_source(api_key: str, start_date: TAnyDateTime) -> Iterable[DltResour
91
116
  write_disposition="merge", primary_key="id", name="catalog-categories"
92
117
  )
93
118
  def catalog_categories(
94
- updated=dlt.sources.incremental("updated", start_date_obj.isoformat()),
119
+ updated=dlt.sources.incremental(
120
+ "updated",
121
+ start_date_obj.isoformat(),
122
+ range_end="closed",
123
+ range_start="closed",
124
+ ),
95
125
  ) -> Iterable[TDataItem]:
96
126
  yield from client.fetch_catalog_categories(create_client(), updated.start_value)
97
127
 
98
128
  @dlt.resource(write_disposition="merge", primary_key="id", name="catalog-items")
99
129
  def catalog_items(
100
- updated=dlt.sources.incremental("updated", start_date_obj.isoformat()),
130
+ updated=dlt.sources.incremental(
131
+ "updated",
132
+ start_date_obj.isoformat(),
133
+ range_end="closed",
134
+ range_start="closed",
135
+ ),
101
136
  ) -> Iterable[TDataItem]:
102
137
  yield from client.fetch_catalog_item(create_client(), updated.start_value)
103
138
 
104
139
  @dlt.resource(write_disposition="append", primary_key="id", parallelized=True)
105
140
  def forms(
106
- updated_at=dlt.sources.incremental("updated_at", start_date_obj.isoformat()),
141
+ updated_at=dlt.sources.incremental(
142
+ "updated_at",
143
+ start_date_obj.isoformat(),
144
+ range_end="closed",
145
+ range_start="closed",
146
+ ),
107
147
  ) -> Iterable[TDataItem]:
108
148
  intervals = split_date_range(
109
149
  pendulum.parse(updated_at.start_value), pendulum.now()
@@ -114,13 +154,23 @@ def klaviyo_source(api_key: str, start_date: TAnyDateTime) -> Iterable[DltResour
114
154
 
115
155
  @dlt.resource(write_disposition="merge", primary_key="id")
116
156
  def lists(
117
- updated=dlt.sources.incremental("updated", start_date_obj.isoformat()),
157
+ updated=dlt.sources.incremental(
158
+ "updated",
159
+ start_date_obj.isoformat(),
160
+ range_end="closed",
161
+ range_start="closed",
162
+ ),
118
163
  ) -> Iterable[TDataItem]:
119
164
  yield from client.fetch_lists(create_client(), updated.start_value)
120
165
 
121
166
  @dlt.resource(write_disposition="append", primary_key="id", parallelized=True)
122
167
  def images(
123
- updated_at=dlt.sources.incremental("updated_at", start_date_obj.isoformat()),
168
+ updated_at=dlt.sources.incremental(
169
+ "updated_at",
170
+ start_date_obj.isoformat(),
171
+ range_end="closed",
172
+ range_start="closed",
173
+ ),
124
174
  ) -> Iterable[TDataItem]:
125
175
  intervals = split_date_range(
126
176
  pendulum.parse(updated_at.start_value), pendulum.now()
@@ -130,13 +180,23 @@ def klaviyo_source(api_key: str, start_date: TAnyDateTime) -> Iterable[DltResour
130
180
 
131
181
  @dlt.resource(write_disposition="merge", primary_key="id")
132
182
  def segments(
133
- updated=dlt.sources.incremental("updated", start_date_obj.isoformat()),
183
+ updated=dlt.sources.incremental(
184
+ "updated",
185
+ start_date_obj.isoformat(),
186
+ range_end="closed",
187
+ range_start="closed",
188
+ ),
134
189
  ) -> Iterable[TDataItem]:
135
190
  yield from client.fetch_segments(create_client(), updated.start_value)
136
191
 
137
192
  @dlt.resource(write_disposition="append", primary_key="id", parallelized=True)
138
193
  def flows(
139
- updated=dlt.sources.incremental("updated", start_date_obj.isoformat()),
194
+ updated=dlt.sources.incremental(
195
+ "updated",
196
+ start_date_obj.isoformat(),
197
+ range_end="closed",
198
+ range_start="closed",
199
+ ),
140
200
  ) -> Iterable[TDataItem]:
141
201
  intervals = split_date_range(
142
202
  pendulum.parse(updated.start_value), pendulum.now()
@@ -146,7 +206,12 @@ def klaviyo_source(api_key: str, start_date: TAnyDateTime) -> Iterable[DltResour
146
206
 
147
207
  @dlt.resource(write_disposition="append", primary_key="id", parallelized=True)
148
208
  def templates(
149
- updated=dlt.sources.incremental("updated", start_date_obj.isoformat()),
209
+ updated=dlt.sources.incremental(
210
+ "updated",
211
+ start_date_obj.isoformat(),
212
+ range_end="closed",
213
+ range_start="closed",
214
+ ),
150
215
  ) -> Iterable[TDataItem]:
151
216
  intervals = split_date_range(
152
217
  pendulum.parse(updated.start_value), pendulum.now()
@@ -158,6 +158,8 @@ def shopify_source(
158
158
  initial_value=start_date_obj,
159
159
  end_value=end_date_obj,
160
160
  allow_external_schedulers=True,
161
+ range_end="closed",
162
+ range_start="closed",
161
163
  ),
162
164
  created_at_min: pendulum.DateTime = created_at_min_obj,
163
165
  items_per_page: int = items_per_page,
@@ -606,6 +608,8 @@ def shopify_source(
606
608
  initial_value=start_date_obj,
607
609
  end_value=end_date_obj,
608
610
  allow_external_schedulers=True,
611
+ range_end="closed",
612
+ range_start="closed",
609
613
  ),
610
614
  created_at_min: pendulum.DateTime = created_at_min_obj,
611
615
  items_per_page: int = items_per_page,
@@ -640,6 +644,8 @@ def shopify_source(
640
644
  initial_value=start_date_obj,
641
645
  end_value=end_date_obj,
642
646
  allow_external_schedulers=True,
647
+ range_end="closed",
648
+ range_start="closed",
643
649
  ),
644
650
  created_at_min: pendulum.DateTime = created_at_min_obj,
645
651
  items_per_page: int = items_per_page,
@@ -671,6 +677,8 @@ def shopify_source(
671
677
  "created_at",
672
678
  initial_value=start_date_obj,
673
679
  end_value=end_date_obj,
680
+ range_end="closed",
681
+ range_start="closed",
674
682
  ),
675
683
  items_per_page: int = items_per_page,
676
684
  ) -> Iterable[TDataItem]:
@@ -689,6 +697,8 @@ def shopify_source(
689
697
  "updated_at",
690
698
  initial_value=start_date_obj,
691
699
  end_value=end_date_obj,
700
+ range_end="closed",
701
+ range_start="closed",
692
702
  ),
693
703
  items_per_page: int = items_per_page,
694
704
  ) -> Iterable[TDataItem]:
@@ -730,6 +740,8 @@ def shopify_source(
730
740
  initial_value=start_date_obj,
731
741
  end_value=end_date_obj,
732
742
  allow_external_schedulers=True,
743
+ range_end="closed",
744
+ range_start="closed",
733
745
  ),
734
746
  items_per_page: int = items_per_page,
735
747
  ) -> Iterable[TDataItem]:
@@ -1807,6 +1819,8 @@ query discountNodes($after: String, $query: String, $first: Int) {
1807
1819
  "updatedAt",
1808
1820
  initial_value=start_date_obj,
1809
1821
  end_value=end_date_obj,
1822
+ range_end="closed",
1823
+ range_start="closed",
1810
1824
  ),
1811
1825
  items_per_page: int = items_per_page,
1812
1826
  ) -> Iterable[TDataItem]:
@@ -175,6 +175,8 @@ def slack_source(
175
175
  initial_value=start_dt,
176
176
  end_value=end_dt,
177
177
  allow_external_schedulers=True,
178
+ range_end="closed",
179
+ range_start="closed",
178
180
  ),
179
181
  ) -> Iterable[TDataItem]:
180
182
  """
@@ -198,6 +200,8 @@ def slack_source(
198
200
  initial_value=start_dt,
199
201
  end_value=end_dt,
200
202
  allow_external_schedulers=True,
203
+ range_end="closed",
204
+ range_start="closed",
201
205
  ),
202
206
  ) -> Iterable[TDataItem]:
203
207
  """Yield all messages for a given channel as a DLT resource. Keep blocks column without normalization.