confluent-pyflink 1.0.dev1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. confluent_pyflink-1.0.dev1/MANIFEST.in +22 -0
  2. confluent_pyflink-1.0.dev1/PKG-INFO +96 -0
  3. confluent_pyflink-1.0.dev1/README.md +76 -0
  4. confluent_pyflink-1.0.dev1/confluent-flink-jars/version.py +24 -0
  5. confluent_pyflink-1.0.dev1/confluent_pyflink.egg-info/PKG-INFO +96 -0
  6. confluent_pyflink-1.0.dev1/confluent_pyflink.egg-info/SOURCES.txt +44 -0
  7. confluent_pyflink-1.0.dev1/confluent_pyflink.egg-info/dependency_links.txt +1 -0
  8. confluent_pyflink-1.0.dev1/confluent_pyflink.egg-info/not-zip-safe +1 -0
  9. confluent_pyflink-1.0.dev1/confluent_pyflink.egg-info/requires.txt +4 -0
  10. confluent_pyflink-1.0.dev1/confluent_pyflink.egg-info/top_level.txt +1 -0
  11. confluent_pyflink-1.0.dev1/pyproject.toml +75 -0
  12. confluent_pyflink-1.0.dev1/setup.cfg +4 -0
  13. confluent_pyflink-1.0.dev1/src/confluent_pyflink/__init__.py +0 -0
  14. confluent_pyflink-1.0.dev1/src/confluent_pyflink/context/__init__.py +4 -0
  15. confluent_pyflink-1.0.dev1/src/confluent_pyflink/context/confluent_context.py +92 -0
  16. confluent_pyflink-1.0.dev1/src/confluent_pyflink/context/context.py +120 -0
  17. confluent_pyflink-1.0.dev1/src/confluent_pyflink/context/java_gateway_manager.py +417 -0
  18. confluent_pyflink-1.0.dev1/src/confluent_pyflink/context/local_context.py +58 -0
  19. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/__init__.py +76 -0
  20. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/catalog.py +47 -0
  21. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/changelog_mode.py +20 -0
  22. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/data_view.py +21 -0
  23. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/descriptors.py +21 -0
  24. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/environment_settings.py +216 -0
  25. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/explain_detail.py +21 -0
  26. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/expression.py +39 -0
  27. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/expressions.py +127 -0
  28. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/functions.py +63 -0
  29. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/module.py +21 -0
  30. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/result_kind.py +20 -0
  31. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/schema.py +21 -0
  32. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/serializers.py +21 -0
  33. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/sql_dialect.py +21 -0
  34. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/statement_set.py +20 -0
  35. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/table.py +33 -0
  36. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/table_config.py +21 -0
  37. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/table_descriptor.py +192 -0
  38. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/table_environment.py +215 -0
  39. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/table_result.py +21 -0
  40. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/table_schema.py +21 -0
  41. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/types.py +28 -0
  42. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/udf.py +41 -0
  43. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/utils/__init__.py +18 -0
  44. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/utils/confluent_settings.py +230 -0
  45. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/utils/confluent_tools.py +206 -0
  46. confluent_pyflink-1.0.dev1/src/confluent_pyflink/table/window.py +21 -0
@@ -0,0 +1,22 @@
1
+ ################################################################################
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing, software
13
+ # distributed under the License is distributed on an "AS IS" BASIS,
14
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
+ # See the License for the specific language governing permissions and
16
+ # limitations under the License.
17
+ ################################################################################
18
+
19
+ global-exclude *.py[cod] __pycache__ .DS_Store
20
+ graft deps/bin
21
+ include README.md confluent-flink-jars/version.py
22
+ recursive-exclude pyflink/fn_execution *
@@ -0,0 +1,96 @@
1
+ Metadata-Version: 2.4
2
+ Name: confluent-pyflink
3
+ Version: 1.0.dev1
4
+ Summary: Confluent Apache Flink Table API Python
5
+ Author-email: Confluent <dev@confluent.io>
6
+ License-Expression: Apache-2.0
7
+ Project-URL: Homepage, https://confluent.io
8
+ Project-URL: Examples, https://github.com/confluentinc/flink-table-api-python-examples
9
+ Classifier: Development Status :: 4 - Beta
10
+ Classifier: Programming Language :: Python :: 3.9
11
+ Classifier: Programming Language :: Python :: 3.10
12
+ Classifier: Programming Language :: Python :: 3.11
13
+ Classifier: Programming Language :: Python :: 3.12
14
+ Requires-Python: >=3.9
15
+ Description-Content-Type: text/markdown
16
+ Requires-Dist: apache-flink~=2.1.0
17
+ Requires-Dist: pydantic-settings>=2.10.1
18
+ Requires-Dist: pyyaml
19
+ Requires-Dist: confluent-flink-jars==2.1.dev1
20
+
21
+ # Confluent Apache Flink Table API Python
22
+
23
+ This package contains the client library for running Apache Flink's Table API on Confluent Cloud.
24
+
25
+ The [Table API](https://nightlies.apache.org/flink/flink-docs-stable/docs/dev/python/table_api_tutorial/) enables a programmatic
26
+ way of developing, testing, and submitting Flink pipelines for processing data streams.
27
+ Streams can be finite or infinite, with insert-only or changelog data. The latter allows for dealing with *Change Data
28
+ Capture* (CDC) events.
29
+
30
+ Within the API, you conceptually work with tables that change over time - inspired by relational databases. Write
31
+ a *Table Program* as a declarative and structured graph of data transformations. Table API is inspired by SQL and complements
32
+ it with additional tools for juggling real-time data. You can mix and match Flink SQL with Table API at any time as they
33
+ go hand in hand.
34
+
35
+ ## Table API on Confluent Cloud
36
+
37
+ Table API on Confluent Cloud is a client-side library that delegates Flink API calls to Confluent’s public
38
+ REST API. It submits [Statements](https://docs.confluent.io/cloud/current/api.html#tag/Statements-(sqlv1)) and retrieves
39
+ [StatementResults](https://docs.confluent.io/cloud/current/api.html#tag/Statement-Results-(sqlv1)).
40
+
41
+ Table programs are implemented against [Flink's open source Table API for Python](https://github.com/apache/flink/tree/master/flink-python/pyflink/table).
42
+ This package repackages Flink's Python API and bundles the Confluent-specific components for powering the `TableEnvironment` without the need
43
+ for a local Flink cluster. While using those packages, Flink internal components such as
44
+ `CatalogStore`, `Catalog`, `Planner`, `Executor`, and configuration are managed by the plugin and fully integrate with
45
+ Confluent Cloud. Including access to Apache Kafka®, Schema Registry, and Flink Compute Pools.
46
+
47
+ Note: The Table API plugin is in Open Preview stage.
48
+
49
+ ### Motivating Example
50
+
51
+ The following code shows how a Table API program is structured. Subsequent sections will go into more details how you
52
+ can use the examples of this repository to play around with Flink on Confluent Cloud.
53
+
54
+ ```python
55
+ from confluent_pyflink.table.utils import ConfluentSettings, ConfluentTools
56
+ from confluent_pyflink.table import TableEnvironment, Row
57
+ from confluent_pyflink.table.expressions import col, row
58
+
59
+
60
+ def run():
61
+ # Setup connection properties to Confluent Cloud
62
+ settings = ConfluentSettings.from_global_variables()
63
+ env = TableEnvironment.create(settings)
64
+
65
+ # Run your first Flink statement in Table API
66
+ env.from_elements([row("Hello world!")]).execute().print()
67
+
68
+ # Or use SQL
69
+ env.sql_query("SELECT 'Hello world!'").execute().print()
70
+
71
+ # Structure your code with Table objects - the main ingredient of Table API.
72
+ table = (
73
+ env.from_path("examples.marketplace.clicks")
74
+ .filter(col("user_agent").like("Mozilla%"))
75
+ .select(col("click_id"), col("user_id"))
76
+ )
77
+
78
+ table.print_schema()
79
+ print(table.explain())
80
+
81
+ # Use the provided tools to test on a subset of the streaming data
82
+ expected = ConfluentTools.collect_materialized_limit(table, 50)
83
+ actual = [Row(42, 500)]
84
+ if expected != actual:
85
+ print("Results don't match!")
86
+
87
+
88
+ if __name__ == "__main__":
89
+ run()
90
+ ```
91
+
92
+ ### Further Examples
93
+
94
+ For further examples, please see Confluent's
95
+ [Apache Flink® Table API on Confluent Cloud Examples](https://github.com/confluentinc/flink-table-api-python-examples)
96
+ repository.
@@ -0,0 +1,76 @@
1
+ # Confluent Apache Flink Table API Python
2
+
3
+ This package contains the client library for running Apache Flink's Table API on Confluent Cloud.
4
+
5
+ The [Table API](https://nightlies.apache.org/flink/flink-docs-stable/docs/dev/python/table_api_tutorial/) enables a programmatic
6
+ way of developing, testing, and submitting Flink pipelines for processing data streams.
7
+ Streams can be finite or infinite, with insert-only or changelog data. The latter allows for dealing with *Change Data
8
+ Capture* (CDC) events.
9
+
10
+ Within the API, you conceptually work with tables that change over time - inspired by relational databases. Write
11
+ a *Table Program* as a declarative and structured graph of data transformations. Table API is inspired by SQL and complements
12
+ it with additional tools for juggling real-time data. You can mix and match Flink SQL with Table API at any time as they
13
+ go hand in hand.
14
+
15
+ ## Table API on Confluent Cloud
16
+
17
+ Table API on Confluent Cloud is a client-side library that delegates Flink API calls to Confluent’s public
18
+ REST API. It submits [Statements](https://docs.confluent.io/cloud/current/api.html#tag/Statements-(sqlv1)) and retrieves
19
+ [StatementResults](https://docs.confluent.io/cloud/current/api.html#tag/Statement-Results-(sqlv1)).
20
+
21
+ Table programs are implemented against [Flink's open source Table API for Python](https://github.com/apache/flink/tree/master/flink-python/pyflink/table).
22
+ This package repackages Flink's Python API and bundles the Confluent-specific components for powering the `TableEnvironment` without the need
23
+ for a local Flink cluster. While using those packages, Flink internal components such as
24
+ `CatalogStore`, `Catalog`, `Planner`, `Executor`, and configuration are managed by the plugin and fully integrate with
25
+ Confluent Cloud. Including access to Apache Kafka®, Schema Registry, and Flink Compute Pools.
26
+
27
+ Note: The Table API plugin is in Open Preview stage.
28
+
29
+ ### Motivating Example
30
+
31
+ The following code shows how a Table API program is structured. Subsequent sections will go into more details how you
32
+ can use the examples of this repository to play around with Flink on Confluent Cloud.
33
+
34
+ ```python
35
+ from confluent_pyflink.table.utils import ConfluentSettings, ConfluentTools
36
+ from confluent_pyflink.table import TableEnvironment, Row
37
+ from confluent_pyflink.table.expressions import col, row
38
+
39
+
40
+ def run():
41
+ # Setup connection properties to Confluent Cloud
42
+ settings = ConfluentSettings.from_global_variables()
43
+ env = TableEnvironment.create(settings)
44
+
45
+ # Run your first Flink statement in Table API
46
+ env.from_elements([row("Hello world!")]).execute().print()
47
+
48
+ # Or use SQL
49
+ env.sql_query("SELECT 'Hello world!'").execute().print()
50
+
51
+ # Structure your code with Table objects - the main ingredient of Table API.
52
+ table = (
53
+ env.from_path("examples.marketplace.clicks")
54
+ .filter(col("user_agent").like("Mozilla%"))
55
+ .select(col("click_id"), col("user_id"))
56
+ )
57
+
58
+ table.print_schema()
59
+ print(table.explain())
60
+
61
+ # Use the provided tools to test on a subset of the streaming data
62
+ expected = ConfluentTools.collect_materialized_limit(table, 50)
63
+ actual = [Row(42, 500)]
64
+ if expected != actual:
65
+ print("Results don't match!")
66
+
67
+
68
+ if __name__ == "__main__":
69
+ run()
70
+ ```
71
+
72
+ ### Further Examples
73
+
74
+ For further examples, please see Confluent's
75
+ [Apache Flink® Table API on Confluent Cloud Examples](https://github.com/confluentinc/flink-table-api-python-examples)
76
+ repository.
@@ -0,0 +1,24 @@
1
+ ################################################################################
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing, software
13
+ # distributed under the License is distributed on an "AS IS" BASIS,
14
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
+ # See the License for the specific language governing permissions and
16
+ # limitations under the License.
17
+ ################################################################################
18
+
19
+ """
20
+ The confluent-flink-jars version will be consistent with the flink version and follow the PEP440.
21
+ .. seealso:: https://www.python.org/dev/peps/pep-0440
22
+ """
23
+
24
+ __version__ = "2.1.dev1"
@@ -0,0 +1,96 @@
1
+ Metadata-Version: 2.4
2
+ Name: confluent-pyflink
3
+ Version: 1.0.dev1
4
+ Summary: Confluent Apache Flink Table API Python
5
+ Author-email: Confluent <dev@confluent.io>
6
+ License-Expression: Apache-2.0
7
+ Project-URL: Homepage, https://confluent.io
8
+ Project-URL: Examples, https://github.com/confluentinc/flink-table-api-python-examples
9
+ Classifier: Development Status :: 4 - Beta
10
+ Classifier: Programming Language :: Python :: 3.9
11
+ Classifier: Programming Language :: Python :: 3.10
12
+ Classifier: Programming Language :: Python :: 3.11
13
+ Classifier: Programming Language :: Python :: 3.12
14
+ Requires-Python: >=3.9
15
+ Description-Content-Type: text/markdown
16
+ Requires-Dist: apache-flink~=2.1.0
17
+ Requires-Dist: pydantic-settings>=2.10.1
18
+ Requires-Dist: pyyaml
19
+ Requires-Dist: confluent-flink-jars==2.1.dev1
20
+
21
+ # Confluent Apache Flink Table API Python
22
+
23
+ This package contains the client library for running Apache Flink's Table API on Confluent Cloud.
24
+
25
+ The [Table API](https://nightlies.apache.org/flink/flink-docs-stable/docs/dev/python/table_api_tutorial/) enables a programmatic
26
+ way of developing, testing, and submitting Flink pipelines for processing data streams.
27
+ Streams can be finite or infinite, with insert-only or changelog data. The latter allows for dealing with *Change Data
28
+ Capture* (CDC) events.
29
+
30
+ Within the API, you conceptually work with tables that change over time - inspired by relational databases. Write
31
+ a *Table Program* as a declarative and structured graph of data transformations. Table API is inspired by SQL and complements
32
+ it with additional tools for juggling real-time data. You can mix and match Flink SQL with Table API at any time as they
33
+ go hand in hand.
34
+
35
+ ## Table API on Confluent Cloud
36
+
37
+ Table API on Confluent Cloud is a client-side library that delegates Flink API calls to Confluent’s public
38
+ REST API. It submits [Statements](https://docs.confluent.io/cloud/current/api.html#tag/Statements-(sqlv1)) and retrieves
39
+ [StatementResults](https://docs.confluent.io/cloud/current/api.html#tag/Statement-Results-(sqlv1)).
40
+
41
+ Table programs are implemented against [Flink's open source Table API for Python](https://github.com/apache/flink/tree/master/flink-python/pyflink/table).
42
+ This package repackages Flink's Python API and bundles the Confluent-specific components for powering the `TableEnvironment` without the need
43
+ for a local Flink cluster. While using those packages, Flink internal components such as
44
+ `CatalogStore`, `Catalog`, `Planner`, `Executor`, and configuration are managed by the plugin and fully integrate with
45
+ Confluent Cloud. Including access to Apache Kafka®, Schema Registry, and Flink Compute Pools.
46
+
47
+ Note: The Table API plugin is in Open Preview stage.
48
+
49
+ ### Motivating Example
50
+
51
+ The following code shows how a Table API program is structured. Subsequent sections will go into more details how you
52
+ can use the examples of this repository to play around with Flink on Confluent Cloud.
53
+
54
+ ```python
55
+ from confluent_pyflink.table.utils import ConfluentSettings, ConfluentTools
56
+ from confluent_pyflink.table import TableEnvironment, Row
57
+ from confluent_pyflink.table.expressions import col, row
58
+
59
+
60
+ def run():
61
+ # Setup connection properties to Confluent Cloud
62
+ settings = ConfluentSettings.from_global_variables()
63
+ env = TableEnvironment.create(settings)
64
+
65
+ # Run your first Flink statement in Table API
66
+ env.from_elements([row("Hello world!")]).execute().print()
67
+
68
+ # Or use SQL
69
+ env.sql_query("SELECT 'Hello world!'").execute().print()
70
+
71
+ # Structure your code with Table objects - the main ingredient of Table API.
72
+ table = (
73
+ env.from_path("examples.marketplace.clicks")
74
+ .filter(col("user_agent").like("Mozilla%"))
75
+ .select(col("click_id"), col("user_id"))
76
+ )
77
+
78
+ table.print_schema()
79
+ print(table.explain())
80
+
81
+ # Use the provided tools to test on a subset of the streaming data
82
+ expected = ConfluentTools.collect_materialized_limit(table, 50)
83
+ actual = [Row(42, 500)]
84
+ if expected != actual:
85
+ print("Results don't match!")
86
+
87
+
88
+ if __name__ == "__main__":
89
+ run()
90
+ ```
91
+
92
+ ### Further Examples
93
+
94
+ For further examples, please see Confluent's
95
+ [Apache Flink® Table API on Confluent Cloud Examples](https://github.com/confluentinc/flink-table-api-python-examples)
96
+ repository.
@@ -0,0 +1,44 @@
1
+ MANIFEST.in
2
+ README.md
3
+ pyproject.toml
4
+ confluent-flink-jars/version.py
5
+ confluent_pyflink.egg-info/PKG-INFO
6
+ confluent_pyflink.egg-info/SOURCES.txt
7
+ confluent_pyflink.egg-info/dependency_links.txt
8
+ confluent_pyflink.egg-info/not-zip-safe
9
+ confluent_pyflink.egg-info/requires.txt
10
+ confluent_pyflink.egg-info/top_level.txt
11
+ src/confluent_pyflink/__init__.py
12
+ src/confluent_pyflink/context/__init__.py
13
+ src/confluent_pyflink/context/confluent_context.py
14
+ src/confluent_pyflink/context/context.py
15
+ src/confluent_pyflink/context/java_gateway_manager.py
16
+ src/confluent_pyflink/context/local_context.py
17
+ src/confluent_pyflink/table/__init__.py
18
+ src/confluent_pyflink/table/catalog.py
19
+ src/confluent_pyflink/table/changelog_mode.py
20
+ src/confluent_pyflink/table/data_view.py
21
+ src/confluent_pyflink/table/descriptors.py
22
+ src/confluent_pyflink/table/environment_settings.py
23
+ src/confluent_pyflink/table/explain_detail.py
24
+ src/confluent_pyflink/table/expression.py
25
+ src/confluent_pyflink/table/expressions.py
26
+ src/confluent_pyflink/table/functions.py
27
+ src/confluent_pyflink/table/module.py
28
+ src/confluent_pyflink/table/result_kind.py
29
+ src/confluent_pyflink/table/schema.py
30
+ src/confluent_pyflink/table/serializers.py
31
+ src/confluent_pyflink/table/sql_dialect.py
32
+ src/confluent_pyflink/table/statement_set.py
33
+ src/confluent_pyflink/table/table.py
34
+ src/confluent_pyflink/table/table_config.py
35
+ src/confluent_pyflink/table/table_descriptor.py
36
+ src/confluent_pyflink/table/table_environment.py
37
+ src/confluent_pyflink/table/table_result.py
38
+ src/confluent_pyflink/table/table_schema.py
39
+ src/confluent_pyflink/table/types.py
40
+ src/confluent_pyflink/table/udf.py
41
+ src/confluent_pyflink/table/window.py
42
+ src/confluent_pyflink/table/utils/__init__.py
43
+ src/confluent_pyflink/table/utils/confluent_settings.py
44
+ src/confluent_pyflink/table/utils/confluent_tools.py
@@ -0,0 +1,4 @@
1
+ apache-flink~=2.1.0
2
+ pydantic-settings>=2.10.1
3
+ pyyaml
4
+ confluent-flink-jars==2.1.dev1
@@ -0,0 +1 @@
1
+ confluent_pyflink
@@ -0,0 +1,75 @@
1
+ [build-system]
2
+ requires = ["setuptools>=75.8", "wheel"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "confluent-pyflink"
7
+ description = "Confluent Apache Flink Table API Python"
8
+ readme = "README.md"
9
+ version = "1.0.dev1"
10
+ dependencies = [
11
+ "apache-flink~=2.1.0",
12
+ "pydantic-settings>=2.10.1",
13
+ "pyyaml",
14
+ "confluent-flink-jars==2.1.dev1"
15
+ ]
16
+ authors = [
17
+ {name = "Confluent", email = "dev@confluent.io"}
18
+ ]
19
+ license = "Apache-2.0"
20
+ requires-python = ">=3.9"
21
+ classifiers = [
22
+ "Development Status :: 4 - Beta",
23
+ "Programming Language :: Python :: 3.9",
24
+ "Programming Language :: Python :: 3.10",
25
+ "Programming Language :: Python :: 3.11",
26
+ "Programming Language :: Python :: 3.12",
27
+ ]
28
+ dynamic = ["scripts"]
29
+
30
+ [project.urls]
31
+ Homepage = "https://confluent.io"
32
+ Examples = "https://github.com/confluentinc/flink-table-api-python-examples"
33
+
34
+ [dependency-groups]
35
+ dev = [
36
+ "ruff",
37
+ "pytest>=8.4.1"
38
+ ]
39
+
40
+ [tool.setuptools]
41
+ zip-safe = false
42
+ include-package-data = true
43
+
44
+ [tool.setuptools.packages.find]
45
+ where = ["src", "deps"]
46
+ include = [
47
+ 'confluent_pyflink',
48
+ 'confluent_pyflink.context',
49
+ 'confluent_pyflink.table',
50
+ 'confluent_pyflink.table.utils',
51
+ ]
52
+
53
+ [tool.ruff]
54
+ line-length = 101
55
+ indent-width = 4
56
+ target-version = "py39"
57
+ extend-exclude = ["setup_pyshell.py"]
58
+
59
+ [tool.ruff.lint]
60
+ select = ["E", "F"]
61
+ ignore = []
62
+ fixable = ["ALL"]
63
+ unfixable = []
64
+
65
+ [tool.ruff.format]
66
+ quote-style = "double"
67
+ indent-style = "space"
68
+ skip-magic-trailing-comma = false
69
+ line-ending = "auto"
70
+
71
+ [tool.uv.sources]
72
+ confluent-flink-jars = { path = "confluent-flink-jars" }
73
+
74
+ [tool.pytest.ini_options]
75
+ addopts = ["--junit-xml", "TEST-result.xml",]
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,4 @@
1
+ from .confluent_context import ConfluentContext
2
+ from .local_context import LocalContext
3
+
4
+ __all__ = ["ConfluentContext", "LocalContext"]
@@ -0,0 +1,92 @@
1
+ from .context import Context, FlinkDirectories, InvalidFlinkHomeException
2
+ import os
3
+ import logging
4
+ from pathlib import Path
5
+
6
+ logger = logging.getLogger(__name__)
7
+
8
+
9
+ class ConfluentContext(Context):
10
+ """
11
+ An implementation of the Context class for Confluent PyFlink applications.
12
+ Loads classes and jars from the Confluent Flink distribution, for use with Confluent Cloud.
13
+
14
+ Use it as a context manager::
15
+
16
+ with ConfluentContext():
17
+ settings = ConfluentSettings(...)
18
+ env = TableEnvironment.create(settings)
19
+ env.from_elements([row("Hello world!")]).execute().print()
20
+ """
21
+
22
+ def _find_confluent_flink_home(self) -> Path:
23
+ """
24
+ Find and return the Confluent Flink home directory.
25
+
26
+ Searches for Confluent Flink home in the following order:
27
+ 1. CONFLUENT_FLINK_HOME environment variable
28
+ 2. PyFlink module path
29
+ """
30
+ # If the environment has set CONFLUENT_FLINK_HOME, trust it.
31
+ if "CONFLUENT_FLINK_HOME" in os.environ:
32
+ return Path(os.environ["CONFLUENT_FLINK_HOME"])
33
+ else:
34
+ try:
35
+ CONFLUENT_FLINK_HOME = None
36
+ for module_home in __import__("confluent_pyflink").__path__:
37
+ CONFLUENT_FLINK_HOME = module_home
38
+ if CONFLUENT_FLINK_HOME is not None:
39
+ return Path(CONFLUENT_FLINK_HOME)
40
+ else:
41
+ raise InvalidFlinkHomeException(
42
+ "Could not find valid CONFLUENT_FLINK_HOME (Flink distribution directory) "
43
+ "in current environment."
44
+ )
45
+ except Exception as exception:
46
+ raise InvalidFlinkHomeException(
47
+ "Unable to find CONFLUENT_FLINK_HOME (Flink distribution directory) due to:"
48
+ ) from exception
49
+
50
+ def _get_flink_directories(self) -> FlinkDirectories:
51
+ flink_home = self._find_flink_home()
52
+ confluent_flink_home = self._find_confluent_flink_home()
53
+ flink_dirs = FlinkDirectories.build_flink_dirs(
54
+ home=flink_home, default_lib=confluent_flink_home / "lib"
55
+ )
56
+ logger.debug(f"Using the following Flink directories:\n{flink_dirs}")
57
+ return flink_dirs
58
+
59
+ def _classes_to_load(self) -> set[str]:
60
+ return {
61
+ "org.apache.flink.table.api.*",
62
+ "org.apache.flink.table.legacy.api.*",
63
+ "org.apache.flink.table.api.config.*",
64
+ "org.apache.flink.table.api.java.*",
65
+ "org.apache.flink.table.api.bridge.java.*",
66
+ "org.apache.flink.table.api.dataview.*",
67
+ "org.apache.flink.table.catalog.*",
68
+ "org.apache.flink.table.descriptors.*",
69
+ "org.apache.flink.table.legacy.descriptors.*",
70
+ "org.apache.flink.table.descriptors.python.*",
71
+ "org.apache.flink.table.expressions.*",
72
+ "org.apache.flink.table.sources.*",
73
+ "org.apache.flink.table.legacy.sources.*",
74
+ "org.apache.flink.table.sinks.*",
75
+ "org.apache.flink.table.legacy.sinks.*",
76
+ "org.apache.flink.table.types.*",
77
+ "org.apache.flink.table.types.logical.*",
78
+ "org.apache.flink.table.util.python.*",
79
+ "org.apache.flink.api.common.python.*",
80
+ "org.apache.flink.api.common.typeinfo.TypeInformation",
81
+ "org.apache.flink.api.common.typeinfo.Types",
82
+ "org.apache.flink.api.java.ExecutionEnvironment",
83
+ "org.apache.flink.streaming.api.environment.StreamExecutionEnvironment",
84
+ "org.apache.flink.python.util.PythonDependencyUtils",
85
+ "org.apache.flink.python.PythonOptions",
86
+ "org.apache.flink.client.python.PythonGatewayServer",
87
+ "org.apache.flink.streaming.api.functions.python.*",
88
+ "org.apache.flink.streaming.api.operators.python.process.*",
89
+ "org.apache.flink.streaming.api.operators.python.embedded.*",
90
+ "org.apache.flink.streaming.api.typeinfo.python.*",
91
+ "io.confluent.flink.plugin.*",
92
+ }