datajunction-server 0.0.19.dev0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,128 @@
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ env/
12
+ build/
13
+ develop-eggs/
14
+ dist/
15
+ downloads/
16
+ eggs/
17
+ .eggs/
18
+ lib/
19
+ lib64/
20
+ parts/
21
+ sdist/
22
+ var/
23
+ wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+
28
+ # PyInstaller
29
+ # Usually these files are written by a python script from a template
30
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
31
+ *.manifest
32
+ *.spec
33
+
34
+ # Installer logs
35
+ pip-log.txt
36
+ pip-delete-this-directory.txt
37
+
38
+ # Unit test / coverage reports
39
+ htmlcov/
40
+ .tox/
41
+ .coverage
42
+ .coverage.*
43
+ .cache
44
+ nosetests.xml
45
+ coverage.xml
46
+ *.cover
47
+ .hypothesis/
48
+
49
+ # Translations
50
+ *.mo
51
+ *.pot
52
+
53
+ # Django stuff:
54
+ *.log
55
+ local_settings.py
56
+
57
+ # Flask stuff:
58
+ instance/
59
+ .webassets-cache
60
+
61
+ # Scrapy stuff:
62
+ .scrapy
63
+
64
+ # Sphinx documentation
65
+ docs/api/*
66
+ docs/_rst/*
67
+ docs/_build/*
68
+
69
+ # PyBuilder
70
+ target/
71
+
72
+ # Jupyter Notebook
73
+ .ipynb_checkpoints
74
+
75
+ # pyenv
76
+ .python-version
77
+
78
+ # celery beat schedule file
79
+ celerybeat-schedule
80
+
81
+ # SageMath parsed files
82
+ *.sage.py
83
+
84
+ # virtualenv
85
+ .venv
86
+ venv/
87
+ ENV/
88
+
89
+ # Spyder project settings
90
+ .spyderproject
91
+ .spyproject
92
+
93
+ # Rope project settings
94
+ .ropeproject
95
+
96
+ # mkdocs documentation
97
+ /site
98
+
99
+ # mypy
100
+ .mypy_cache/
101
+
102
+ *.sqlite
103
+ dj.db
104
+ djqs.db
105
+ *.swp
106
+
107
+ # VS Code
108
+ .vscode
109
+
110
+ # Idea
111
+ .idea
112
+
113
+ # MacOS
114
+ .DS_Store
115
+ .pdm-python
116
+ .pdm.toml
117
+
118
+ # oauth credentials
119
+ client_secret*
120
+
121
+ # random notebooks
122
+ Untitled*
123
+ .notebook_executed
124
+
125
+ # postgres
126
+ postgres_metadata
127
+ postgres_superset
128
+ node_modules
@@ -0,0 +1,99 @@
1
+ Metadata-Version: 2.3
2
+ Name: datajunction-server
3
+ Version: 0.0.19.dev0
4
+ Summary: DataJunction server library for running to a DataJunction server
5
+ Project-URL: Homepage, https://datajunction.io
6
+ Project-URL: Repository, https://github.com/DataJunction/dj
7
+ License: MIT
8
+ Keywords: metrics,semanticlayer
9
+ Classifier: License :: OSI Approved :: MIT License
10
+ Classifier: Operating System :: OS Independent
11
+ Classifier: Programming Language :: Python :: 3.10
12
+ Classifier: Programming Language :: Python :: 3.11
13
+ Classifier: Programming Language :: Python :: 3.12
14
+ Requires-Python: <4.0,>=3.10
15
+ Requires-Dist: alembic>=1.10.3
16
+ Requires-Dist: antlr4-python3-runtime==4.13.1
17
+ Requires-Dist: bcrypt>=4.0.1
18
+ Requires-Dist: cachelib<1.0.0,>=0.10.2
19
+ Requires-Dist: cachetools>=5.3.1
20
+ Requires-Dist: celery<6.0.0,>=5.2.7
21
+ Requires-Dist: cryptography<=45.0.0
22
+ Requires-Dist: fastapi-cache2>=0.2.1
23
+ Requires-Dist: fastapi>=0.110.0
24
+ Requires-Dist: google-api-python-client>=2.95.0
25
+ Requires-Dist: google-auth-httplib2>=0.1.0
26
+ Requires-Dist: google-auth-oauthlib>=1.0.0
27
+ Requires-Dist: jinja2>=3.1.4
28
+ Requires-Dist: line-profiler>=4.0.3
29
+ Requires-Dist: msgpack<2.0.0,>=1.0.5
30
+ Requires-Dist: nbformat>=5.10.4
31
+ Requires-Dist: opentelemetry-instrumentation-fastapi==0.38b0
32
+ Requires-Dist: passlib>=1.7.4
33
+ Requires-Dist: psycopg>=3.1.16
34
+ Requires-Dist: pydantic-settings>=2.10.1
35
+ Requires-Dist: pydantic<2.11,>=2.0
36
+ Requires-Dist: python-dotenv<1.0.0,>=0.19.0
37
+ Requires-Dist: python-jose>=3.3.0
38
+ Requires-Dist: python-multipart>=0.0.20
39
+ Requires-Dist: redis<5.0.0,>=4.5.4
40
+ Requires-Dist: requests<=2.29.0,>=2.28.2
41
+ Requires-Dist: rich<14.0.0,>=13.3.3
42
+ Requires-Dist: sqlalchemy-utils<1.0.0,>=0.40.0
43
+ Requires-Dist: sqlalchemy>=2
44
+ Requires-Dist: sse-starlette<=2.0.0,>=1.6.0
45
+ Requires-Dist: strawberry-graphql>=0.235.0
46
+ Requires-Dist: types-cachetools>=5.3.0.6
47
+ Requires-Dist: yarl<2.0.0,>=1.8.2
48
+ Provides-Extra: all
49
+ Requires-Dist: snowflake-connector-python>=3.0.0; extra == 'all'
50
+ Provides-Extra: snowflake
51
+ Requires-Dist: snowflake-connector-python>=3.0.0; extra == 'snowflake'
52
+ Provides-Extra: transpilation
53
+ Requires-Dist: sqlglot>=18.0.1; extra == 'transpilation'
54
+ Provides-Extra: uvicorn
55
+ Requires-Dist: uvicorn[standard]>=0.21.1; extra == 'uvicorn'
56
+ Description-Content-Type: text/markdown
57
+
58
+ # DataJunction
59
+
60
+ ## Introduction
61
+
62
+ DataJunction (DJ) is an open source **metrics platform** that allows users to define
63
+ metrics and the data models behind them using **SQL**, serving as a **semantic layer**
64
+ on top of a physical data warehouse. By leveraging this metadata, DJ can enable efficient
65
+ retrieval of metrics data across different dimensions and filters.
66
+
67
+ ![DataJunction](docs/static/datajunction-illustration.png)
68
+
69
+ ## Getting Started
70
+
71
+ To launch the DataJunction UI with a minimal DataJunction backend, start the default docker compose environment.
72
+
73
+ ```sh
74
+ docker compose up
75
+ ```
76
+
77
+ If you'd like to launch the full suite of services, including open-source implementations of the DataJunction query service and
78
+ DataJunction reflection service specifications, use the `demo` profile.
79
+
80
+ ```sh
81
+ docker compose --profile demo up
82
+ ```
83
+
84
+ DJUI: [http://localhost:3000/](http://localhost:3000/)
85
+ DJ Swagger Docs: [http://localhost:8000/docs](http://localhost:8000/docs)
86
+ DJQS Swagger Docs: [http://localhost:8001/docs](http://localhost:8001/docs)
87
+ Jaeger UI: [http://localhost:16686/search](http://localhost:16686/search)
88
+ Jupyter Lab: [http://localhost:8888](http://localhost:8888)
89
+
90
+ ## How does this work?
91
+
92
+ At its core, DJ stores metrics and their upstream abstractions as interconnected nodes.
93
+ These nodes can represent a variety of elements, such as tables in a data warehouse
94
+ (**source nodes**), SQL transformation logic (**transform nodes**), dimensions logic,
95
+ metrics logic, and even selections of metrics, dimensions, and filters (**cube nodes**).
96
+
97
+ By parsing each node's SQL into an AST and through dimensional links between columns,
98
+ DJ can infer a graph of dependencies between nodes, which allows it to find the
99
+ appropriate join paths between nodes to generate queries for metrics.
@@ -0,0 +1,42 @@
1
+ # DataJunction
2
+
3
+ ## Introduction
4
+
5
+ DataJunction (DJ) is an open source **metrics platform** that allows users to define
6
+ metrics and the data models behind them using **SQL**, serving as a **semantic layer**
7
+ on top of a physical data warehouse. By leveraging this metadata, DJ can enable efficient
8
+ retrieval of metrics data across different dimensions and filters.
9
+
10
+ ![DataJunction](docs/static/datajunction-illustration.png)
11
+
12
+ ## Getting Started
13
+
14
+ To launch the DataJunction UI with a minimal DataJunction backend, start the default docker compose environment.
15
+
16
+ ```sh
17
+ docker compose up
18
+ ```
19
+
20
+ If you'd like to launch the full suite of services, including open-source implementations of the DataJunction query service and
21
+ DataJunction reflection service specifications, use the `demo` profile.
22
+
23
+ ```sh
24
+ docker compose --profile demo up
25
+ ```
26
+
27
+ DJUI: [http://localhost:3000/](http://localhost:3000/)
28
+ DJ Swagger Docs: [http://localhost:8000/docs](http://localhost:8000/docs)
29
+ DJQS Swagger Docs: [http://localhost:8001/docs](http://localhost:8001/docs)
30
+ Jaeger UI: [http://localhost:16686/search](http://localhost:16686/search)
31
+ Jupyter Lab: [http://localhost:8888](http://localhost:8888)
32
+
33
+ ## How does this work?
34
+
35
+ At its core, DJ stores metrics and their upstream abstractions as interconnected nodes.
36
+ These nodes can represent a variety of elements, such as tables in a data warehouse
37
+ (**source nodes**), SQL transformation logic (**transform nodes**), dimensions logic,
38
+ metrics logic, and even selections of metrics, dimensions, and filters (**cube nodes**).
39
+
40
+ By parsing each node's SQL into an AST and through dimensional links between columns,
41
+ DJ can infer a graph of dependencies between nodes, which allows it to find the
42
+ appropriate join paths between nodes to generate queries for metrics.
@@ -0,0 +1,550 @@
1
+ enum Aggregability {
2
+ FULL
3
+ LIMITED
4
+ NONE
5
+ }
6
+
7
+ type AggregationRule {
8
+ type: Aggregability!
9
+ level: [String!]
10
+ }
11
+
12
+ type Attribute {
13
+ attributeType: AttributeTypeName!
14
+ }
15
+
16
+ type AttributeTypeName {
17
+ namespace: String!
18
+ name: String!
19
+ }
20
+
21
+ type AvailabilityState {
22
+ catalog: String!
23
+ schema_: String
24
+ table: String!
25
+ validThroughTs: Int!
26
+ url: String
27
+ categoricalPartitions: [String!]
28
+ temporalPartitions: [String!]
29
+ minTemporalPartition: [String!]
30
+ maxTemporalPartition: [String!]
31
+ partitions: [PartitionAvailability!]
32
+ }
33
+
34
+ type Backfill {
35
+ spec: [PartitionBackfill!]
36
+ urls: [String!]
37
+ }
38
+
39
+ type Catalog {
40
+ name: String!
41
+ engines: [Engine!]
42
+ }
43
+
44
+ type Column {
45
+ name: String!
46
+ displayName: String
47
+ type: String!
48
+ attributes: [Attribute!]!
49
+ dimension: NodeName
50
+ partition: Partition
51
+ }
52
+
53
+ type ColumnMetadata {
54
+ name: String!
55
+ type: String!
56
+ semanticEntity: SemanticEntity
57
+ semanticType: SemanticType
58
+ }
59
+
60
+ input CubeDefinition {
61
+ cube: String = null
62
+ metrics: [String!] = null
63
+ dimensions: [String!] = null
64
+ filters: [String!] = null
65
+ orderby: [String!] = null
66
+ }
67
+
68
+ type DJError {
69
+ code: ErrorCode!
70
+ message: String
71
+ context: String
72
+ }
73
+
74
+ """Date with time (isoformat)"""
75
+ scalar DateTime
76
+
77
+ type DecomposedMetric {
78
+ components: [MetricComponent!]!
79
+ derivedQuery: String!
80
+ derivedExpression: String!
81
+ }
82
+
83
+ enum Dialect {
84
+ SPARK
85
+ TRINO
86
+ DRUID
87
+ POSTGRES
88
+ CLICKHOUSE
89
+ DUCKDB
90
+ REDSHIFT
91
+ SNOWFLAKE
92
+ SQLITE
93
+ }
94
+
95
+ type DialectInfo {
96
+ name: String!
97
+ pluginClass: String!
98
+ }
99
+
100
+ type DimensionAttribute {
101
+ name: String!
102
+ attribute: String
103
+ role: String
104
+ properties: [String!]!
105
+ type: String!
106
+ DimensionNode: Node
107
+
108
+ """The dimension node this attribute belongs to"""
109
+ dimensionNode: Node!
110
+ }
111
+
112
+ type DimensionLink {
113
+ dimension: NodeName!
114
+ joinType: JoinType!
115
+ joinSql: String!
116
+ joinCardinality: JoinCardinality
117
+ role: String
118
+ foreignKeys: JSON!
119
+ }
120
+
121
+ type Engine {
122
+ name: String!
123
+ version: String!
124
+ uri: String
125
+ dialect: Dialect
126
+ }
127
+
128
+ input EngineSettings {
129
+ """The name of the engine used by the generated SQL"""
130
+ name: String!
131
+
132
+ """The version of the engine used by the generated SQL"""
133
+ version: String
134
+ }
135
+
136
+ enum ErrorCode {
137
+ UNKNOWN_ERROR
138
+ NOT_IMPLEMENTED_ERROR
139
+ ALREADY_EXISTS
140
+ INVALID_FILTER_PATTERN
141
+ INVALID_COLUMN_IN_FILTER
142
+ INVALID_VALUE_IN_FILTER
143
+ INVALID_ARGUMENTS_TO_FUNCTION
144
+ INVALID_SQL_QUERY
145
+ MISSING_COLUMNS
146
+ UNKNOWN_NODE
147
+ NODE_TYPE_ERROR
148
+ INVALID_DIMENSION_JOIN
149
+ INVALID_COLUMN
150
+ QUERY_SERVICE_ERROR
151
+ INVALID_ORDER_BY
152
+ COMPOUND_BUILD_EXCEPTION
153
+ MISSING_PARENT
154
+ TYPE_INFERENCE
155
+ MISSING_PARAMETER
156
+ AUTHENTICATION_ERROR
157
+ OAUTH_ERROR
158
+ INVALID_LOGIN_CREDENTIALS
159
+ USER_NOT_FOUND
160
+ UNAUTHORIZED_ACCESS
161
+ INCOMPLETE_AUTHORIZATION
162
+ INVALID_PARENT
163
+ INVALID_DIMENSION
164
+ INVALID_METRIC
165
+ INVALID_DIMENSION_LINK
166
+ INVALID_CUBE
167
+ TAG_NOT_FOUND
168
+ CATALOG_NOT_FOUND
169
+ INVALID_NAMESPACE
170
+ }
171
+
172
+ type GeneratedSQL {
173
+ node: Node!
174
+ sql: String!
175
+ columns: [ColumnMetadata!]!
176
+ dialect: Dialect!
177
+ upstreamTables: [String!]!
178
+ errors: [DJError!]!
179
+ }
180
+
181
+ """
182
+ The `JSON` scalar type represents JSON values as specified by [ECMA-404](https://ecma-international.org/wp-content/uploads/ECMA-404_2nd_edition_december_2017.pdf).
183
+ """
184
+ scalar JSON @specifiedBy(url: "https://ecma-international.org/wp-content/uploads/ECMA-404_2nd_edition_december_2017.pdf")
185
+
186
+ enum JoinCardinality {
187
+ ONE_TO_ONE
188
+ ONE_TO_MANY
189
+ MANY_TO_ONE
190
+ MANY_TO_MANY
191
+ }
192
+
193
+ enum JoinType {
194
+ LEFT
195
+ RIGHT
196
+ INNER
197
+ FULL
198
+ CROSS
199
+ }
200
+
201
+ type MaterializationConfig {
202
+ name: String
203
+ config: JSON!
204
+ schedule: String!
205
+ job: String
206
+ backfills: [Backfill!]!
207
+ strategy: String
208
+ }
209
+
210
+ type MaterializationPlan {
211
+ units: [MaterializationUnit!]!
212
+ }
213
+
214
+ type MaterializationUnit {
215
+ upstream: VersionedRef!
216
+ grainDimensions: [VersionedRef!]!
217
+ measures: [MetricComponent!]!
218
+ filterRefs: [VersionedRef!]!
219
+ filters: [String!]!
220
+ }
221
+
222
+ type MetricComponent {
223
+ name: String!
224
+ expression: String!
225
+ aggregation: String
226
+ rule: AggregationRule!
227
+ }
228
+
229
+ enum MetricDirection {
230
+ HIGHER_IS_BETTER
231
+ LOWER_IS_BETTER
232
+ NEUTRAL
233
+ }
234
+
235
+ type MetricMetadata {
236
+ direction: MetricDirection
237
+ unit: Unit
238
+ significantDigits: Int
239
+ minDecimalExponent: Int
240
+ maxDecimalExponent: Int
241
+ expression: String!
242
+ incompatibleDruidFunctions: [String!]!
243
+ }
244
+
245
+ type Node {
246
+ id: Union!
247
+ name: String!
248
+ type: NodeType!
249
+ currentVersion: String!
250
+ createdAt: DateTime!
251
+ deactivatedAt: DateTime
252
+ current: NodeRevision!
253
+ revisions: [NodeRevision!]!
254
+ tags: [TagBase!]!
255
+ createdBy: User!
256
+ owners: [User!]!
257
+ editedBy: [String!]!
258
+ }
259
+
260
+ type NodeConnection {
261
+ pageInfo: PageInfo!
262
+ edges: [NodeEdge!]!
263
+ }
264
+
265
+ type NodeEdge {
266
+ node: Node!
267
+ }
268
+
269
+ enum NodeMode {
270
+ PUBLISHED
271
+ DRAFT
272
+ }
273
+
274
+ type NodeName {
275
+ name: String!
276
+ }
277
+
278
+ type NodeNameVersion {
279
+ name: String!
280
+ currentVersion: String!
281
+ }
282
+
283
+ type NodeRevision {
284
+ id: Union!
285
+ type: NodeType!
286
+ name: String!
287
+ displayName: String
288
+ version: String!
289
+ status: NodeStatus!
290
+ mode: NodeMode
291
+ description: String!
292
+ updatedAt: DateTime!
293
+ customMetadata: JSON
294
+ query: String
295
+ parents: [NodeNameVersion!]!
296
+ availability: AvailabilityState
297
+ materializations: [MaterializationConfig!]
298
+ schema_: String
299
+ table: String
300
+ requiredDimensions: [Column!]
301
+ catalog: Catalog
302
+ columns(attributes: [String!] = null): [Column!]!
303
+ dimensionLinks: [DimensionLink!]!
304
+ primaryKey: [String!]!
305
+ metricMetadata: MetricMetadata
306
+ extractedMeasures: DecomposedMetric
307
+ cubeMetrics: [NodeRevision!]!
308
+ cubeDimensions: [DimensionAttribute!]!
309
+ }
310
+
311
+ enum NodeSortField {
312
+ NAME
313
+ DISPLAY_NAME
314
+ TYPE
315
+ STATUS
316
+ MODE
317
+ CREATED_AT
318
+ UPDATED_AT
319
+ }
320
+
321
+ enum NodeStatus {
322
+ VALID
323
+ INVALID
324
+ }
325
+
326
+ enum NodeType {
327
+ SOURCE
328
+ TRANSFORM
329
+ METRIC
330
+ DIMENSION
331
+ CUBE
332
+ }
333
+
334
+ enum OAuthProvider {
335
+ BASIC
336
+ GITHUB
337
+ GOOGLE
338
+ }
339
+
340
+ type PageInfo {
341
+ """When paginating forwards, are there more nodes?"""
342
+ hasNextPage: Boolean!
343
+
344
+ """When paginating forwards, are there more nodes?"""
345
+ hasPrevPage: Boolean!
346
+
347
+ """When paginating back, the cursor to continue."""
348
+ startCursor: String
349
+
350
+ """When paginating forwards, the cursor to continue."""
351
+ endCursor: String
352
+ }
353
+
354
+ type Partition {
355
+ type_: PartitionType!
356
+ format: String
357
+ granularity: String
358
+ expression: String
359
+ }
360
+
361
+ type PartitionAvailability {
362
+ minTemporalPartition: [String!]
363
+ maxTemporalPartition: [String!]
364
+ value: [String]!
365
+ validThroughTs: Int
366
+ }
367
+
368
+ type PartitionBackfill {
369
+ columnName: String!
370
+ values: [String!]
371
+ range: [String!]
372
+ }
373
+
374
+ enum PartitionType {
375
+ TEMPORAL
376
+ CATEGORICAL
377
+ }
378
+
379
+ type Query {
380
+ """List available catalogs"""
381
+ listCatalogs: [Catalog!]!
382
+
383
+ """List all available engines"""
384
+ listEngines: [Engine!]!
385
+
386
+ """List all supported SQL dialects"""
387
+ listDialects: [DialectInfo!]!
388
+
389
+ """Find nodes based on the search parameters."""
390
+ findNodes(
391
+ """A fragment of a node name to search for"""
392
+ fragment: String = null
393
+
394
+ """Filter to nodes with these names"""
395
+ names: [String!] = null
396
+
397
+ """Filter nodes to these node types"""
398
+ nodeTypes: [NodeType!] = null
399
+
400
+ """Filter to nodes tagged with these tags"""
401
+ tags: [String!] = null
402
+
403
+ """Limit nodes"""
404
+ limit: Int = 1000
405
+ orderBy: NodeSortField! = CREATED_AT
406
+ ascending: Boolean! = false
407
+ ): [Node!]!
408
+
409
+ """Find nodes based on the search parameters with pagination"""
410
+ findNodesPaginated(
411
+ """A fragment of a node name to search for"""
412
+ fragment: String = null
413
+
414
+ """Filter to nodes with these names"""
415
+ names: [String!] = null
416
+
417
+ """Filter nodes to these node types"""
418
+ nodeTypes: [NodeType!] = null
419
+
420
+ """Filter to nodes tagged with these tags"""
421
+ tags: [String!] = null
422
+
423
+ """Filter to nodes edited by this user"""
424
+ editedBy: String = null
425
+
426
+ """Filter to nodes in this namespace"""
427
+ namespace: String = null
428
+
429
+ """Filter to nodes with this mode (published or draft)"""
430
+ mode: NodeMode = null
431
+ after: String = null
432
+ before: String = null
433
+
434
+ """Limit nodes"""
435
+ limit: Int = 100
436
+ orderBy: NodeSortField! = CREATED_AT
437
+ ascending: Boolean! = false
438
+ ): NodeConnection!
439
+
440
+ """Get common dimensions for one or more nodes"""
441
+ commonDimensions(
442
+ """A list of nodes to find common dimensions for"""
443
+ nodes: [String!] = null
444
+ ): [DimensionAttribute!]!
445
+
446
+ """Find downstream nodes (optionally, of a given type) from a given node."""
447
+ downstreamNodes(
448
+ """The node name to find downstream nodes for."""
449
+ nodeName: String!
450
+
451
+ """The node type to filter the downstream nodes on."""
452
+ nodeType: NodeType = null
453
+
454
+ """Whether to include deactivated nodes in the result."""
455
+ includeDeactivated: Boolean! = false
456
+ ): [Node!]!
457
+
458
+ """Get measures SQL for a list of metrics, dimensions, and filters."""
459
+ measuresSql(
460
+ cube: CubeDefinition!
461
+ engine: EngineSettings = null
462
+
463
+ """Whether to use materialized nodes where applicable"""
464
+ useMaterialized: Boolean! = true
465
+
466
+ """
467
+ Whether to include all columns or only those necessary for the metrics and dimensions in the cube
468
+ """
469
+ includeAllColumns: Boolean! = false
470
+
471
+ """
472
+ Whether to pre-aggregate to the requested dimensions so that subsequent queries are more efficient.
473
+ """
474
+ preaggregate: Boolean! = false
475
+
476
+ """Query parameters to include in the SQL"""
477
+ queryParameters: JSON = null
478
+ ): [GeneratedSQL!]!
479
+
480
+ """
481
+ Get materialization plan for a list of metrics, dimensions, and filters.
482
+ """
483
+ materializationPlan(cube: CubeDefinition!): MaterializationPlan!
484
+
485
+ """Find DJ node tags based on the search parameters."""
486
+ listTags(tagNames: [String!] = null, tagTypes: [String!] = null): [Tag!]!
487
+
488
+ """List all DJ node tag types"""
489
+ listTagTypes: [String!]!
490
+ }
491
+
492
+ type SemanticEntity {
493
+ name: String!
494
+
495
+ """The node this semantic entity is sourced from"""
496
+ node: String!
497
+
498
+ """The column on the node this semantic entity is sourced from"""
499
+ column: String!
500
+ }
501
+
502
+ enum SemanticType {
503
+ MEASURE
504
+ METRIC
505
+ DIMENSION
506
+ TIMESTAMP
507
+ }
508
+
509
+ type Tag {
510
+ name: String!
511
+ tagType: String!
512
+ description: String
513
+ displayName: String
514
+ tagMetadata: JSON
515
+
516
+ """The nodes with this tag"""
517
+ nodes: [Node!]!
518
+ }
519
+
520
+ type TagBase {
521
+ name: String!
522
+ tagType: String!
523
+ description: String
524
+ displayName: String
525
+ tagMetadata: JSON
526
+ }
527
+
528
+ """BigInt field"""
529
+ scalar Union
530
+
531
+ type Unit {
532
+ name: String!
533
+ label: String
534
+ category: String
535
+ abbreviation: String
536
+ }
537
+
538
+ type User {
539
+ id: Union!
540
+ username: String!
541
+ email: String
542
+ name: String
543
+ oauthProvider: OAuthProvider!
544
+ isAdmin: Boolean!
545
+ }
546
+
547
+ type VersionedRef {
548
+ name: String!
549
+ version: String!
550
+ }
@@ -0,0 +1,166 @@
1
+ [build-system]
2
+ requires = ["hatchling"]
3
+ build-backend = "hatchling.build"
4
+
5
+ [tool.hatch.build]
6
+ include = [
7
+ "datajunction_server/api/graphql/schema.graphql",
8
+ ]
9
+
10
+ [tool.hatch.build.targets.wheel]
11
+ packages = ["datajunction_server"]
12
+ include = ["alembic/**", "alembic.ini"]
13
+
14
+ [tool.hatch.metadata]
15
+ allow-direct-references = true
16
+
17
+ [tool.pdm]
18
+ [tool.pdm.build]
19
+ includes = ["dj"]
20
+
21
+ [[tool.pdm.autoexport]]
22
+ filename = "requirements/docker.txt"
23
+ groups = ["default", "uvicorn", "transpilation"]
24
+ without-hashes = true
25
+
26
+ [[tool.pdm.autoexport]]
27
+ filename = "requirements/test.txt"
28
+ groups = ["default", "test"]
29
+ without-hashes = true
30
+
31
+ [project]
32
+ name = "datajunction-server"
33
+ dynamic = ["version"]
34
+ description = "DataJunction server library for running to a DataJunction server"
35
+ repository = "https://github.com/DataJunction/dj"
36
+ keywords = ["semanticlayer", "metrics"]
37
+ dependencies = [
38
+ # Database and ORM
39
+ "alembic>=1.10.3",
40
+ "SQLAlchemy-Utils<1.0.0,>=0.40.0",
41
+ "sqlalchemy>=2",
42
+ "psycopg>=3.1.16",
43
+
44
+ # FastAPI and web framework
45
+ "fastapi>=0.110.0",
46
+ "sse-starlette>=1.6.0,<=2.0.0",
47
+
48
+ # Authentication and security
49
+ "passlib>=1.7.4",
50
+ "python-jose>=3.3.0",
51
+ "cryptography<=45.0.0",
52
+ "bcrypt>=4.0.1",
53
+
54
+ # Google APIs
55
+ "google-api-python-client>=2.95.0",
56
+ "google-auth-httplib2>=0.1.0",
57
+ "google-auth-oauthlib>=1.0.0",
58
+
59
+ # Instrumentation and monitoring
60
+ "opentelemetry-instrumentation-fastapi==0.38b0",
61
+ "line-profiler>=4.0.3",
62
+
63
+ # Task queues
64
+ "celery<6.0.0,>=5.2.7",
65
+
66
+ # Data serialization and caching
67
+ "fastapi-cache2>=0.2.1",
68
+ "cachetools>=5.3.1",
69
+ "types-cachetools>=5.3.0.6",
70
+ "cachelib<1.0.0,>=0.10.2",
71
+ "msgpack<2.0.0,>=1.0.5",
72
+ "redis<5.0.0,>=4.5.4",
73
+
74
+ # Query parsing
75
+ "antlr4-python3-runtime==4.13.1",
76
+
77
+ # Utilities and formatting
78
+ "requests<=2.29.0,>=2.28.2",
79
+ "python-dotenv<1.0.0,>=0.19.0",
80
+ "rich<14.0.0,>=13.3.3",
81
+ "yarl<2.0.0,>=1.8.2",
82
+ "jinja2>=3.1.4",
83
+ "python-multipart>=0.0.20",
84
+ "nbformat>=5.10.4",
85
+
86
+ # GraphQL
87
+ "strawberry-graphql>=0.235.0",
88
+
89
+ # Data validation
90
+ "pydantic<2.11,>=2.0",
91
+ "pydantic-settings>=2.10.1",
92
+ ]
93
+ requires-python = ">=3.10,<4.0"
94
+ readme = "README.md"
95
+ license = {text = "MIT"}
96
+ classifiers = [
97
+ "Programming Language :: Python :: 3.10",
98
+ "Programming Language :: Python :: 3.11",
99
+ "Programming Language :: Python :: 3.12",
100
+ "License :: OSI Approved :: MIT License",
101
+ "Operating System :: OS Independent"
102
+ ]
103
+
104
+ [project.optional-dependencies]
105
+ uvicorn = [
106
+ "uvicorn[standard]>=0.21.1",
107
+ ]
108
+ transpilation = [
109
+ "sqlglot>=18.0.1",
110
+ ]
111
+
112
+ # Query client dependencies for different data warehouse vendors
113
+ snowflake = [
114
+ "snowflake-connector-python>=3.0.0",
115
+ ]
116
+
117
+ all = [
118
+ "snowflake-connector-python>=3.0.0",
119
+ ]
120
+
121
+ [project.entry-points.'superset.db_engine_specs']
122
+ dj = 'datajunction_server.superset:DJEngineSpec'
123
+
124
+ [tool.hatch.version]
125
+ path = "datajunction_server/__about__.py"
126
+
127
+ [project.urls]
128
+ Homepage = "https://datajunction.io"
129
+ Repository = "https://github.com/DataJunction/dj"
130
+
131
+ [tool.coverage.run]
132
+ source = ['datajunction_server/']
133
+ concurrency = ["thread,greenlet"]
134
+
135
+ [tool.pytest.ini_options]
136
+ asyncio_mode = "auto"
137
+ testpaths = [
138
+ "tests",
139
+ ]
140
+
141
+ [tool.pdm.dev-dependencies]
142
+ test = [
143
+ "codespell>=2.2.4",
144
+ "freezegun>=1.2.2",
145
+ "pre-commit>=3.2.2",
146
+ "pylint>=3.0.3",
147
+ "pytest-asyncio<=0.22",
148
+ "pytest-cov>=4.0.0",
149
+ "pytest-integration>=0.2.2",
150
+ "pytest-mock>=3.10.0",
151
+ "pytest>=7.3.0",
152
+ "requests-mock>=1.10.0",
153
+ "typing-extensions>=4.5.0",
154
+ "pytest-xdist>=3.3.0",
155
+ "duckdb==0.8.1",
156
+ "testcontainers>=3.7.1",
157
+ "httpx>=0.27.0",
158
+ "greenlet>=3.0.3",
159
+ "gevent>=24.2.1",
160
+ "sqlparse<1.0.0,>=0.4.3",
161
+ "asgi-lifespan>=2",
162
+ ]
163
+
164
+ [tool.ruff.lint]
165
+ ignore = ["F811"]
166
+ exclude = ["datajunction_server/sql/parsing/backends/antlr4.py"]