clickhouse-orm 2.2.2__tar.gz → 3.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,90 @@
1
+ Metadata-Version: 2.4
2
+ Name: clickhouse_orm
3
+ Version: 3.0.1
4
+ Summary: A simple ORM for working with the Clickhouse database. Maintainance fork of infi.clickhouse_orm.
5
+ Author-email: Oliver Margetts <oliver.margetts@gmail.com>
6
+ Description-Content-Type: text/markdown
7
+ Classifier: Intended Audience :: Developers
8
+ Classifier: Intended Audience :: System Administrators
9
+ Classifier: License :: OSI Approved :: BSD License
10
+ Classifier: Operating System :: OS Independent
11
+ Classifier: Programming Language :: Python
12
+ Classifier: Programming Language :: Python :: 3.11
13
+ Classifier: Programming Language :: Python :: 3.12
14
+ Classifier: Programming Language :: Python :: 3.13
15
+ Classifier: Programming Language :: Python :: 3.14
16
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
17
+ Classifier: Topic :: Database
18
+ License-File: LICENSE
19
+ Requires-Dist: requests
20
+ Requires-Dist: pytz
21
+ Requires-Dist: docker==7.1.0 ; extra == "dev"
22
+ Requires-Dist: pytest==9.0.2 ; extra == "dev"
23
+ Requires-Dist: ruff==0.14.14 ; extra == "dev"
24
+ Project-URL: Homepage, https://github.com/SuadeLabs/clickhouse_orm
25
+ Project-URL: Repository, https://github.com/SuadeLabs/clickhouse_orm
26
+ Provides-Extra: dev
27
+
28
+ A fork of [infi.clikchouse_orm](https://github.com/Infinidat/infi.clickhouse_orm) aimed at more frequent maintenance and bugfixes.
29
+
30
+ [![Tests](https://github.com/SuadeLabs/clickhouse_orm/actions/workflows/python-test.yml/badge.svg)](https://github.com/SuadeLabs/clickhouse_orm/actions/workflows/python-test.yml)
31
+ ![PyPI](https://img.shields.io/pypi/v/clickhouse_orm)
32
+
33
+ Introduction
34
+ ============
35
+
36
+ This project is simple ORM for working with the [ClickHouse database](https://clickhouse.yandex/).
37
+ It allows you to define model classes whose instances can be written to the database and read from it.
38
+
39
+ Let's jump right in with a simple example of monitoring CPU usage. First we need to define the model class,
40
+ connect to the database and create a table for the model:
41
+
42
+ ```python
43
+ from clickhouse_orm import Database, Model, DateTimeField, UInt16Field, Float32Field, Memory, F
44
+
45
+ class CPUStats(Model):
46
+
47
+ timestamp = DateTimeField()
48
+ cpu_id = UInt16Field()
49
+ cpu_percent = Float32Field()
50
+
51
+ engine = Memory()
52
+
53
+ db = Database('demo')
54
+ db.create_table(CPUStats)
55
+ ```
56
+
57
+ Now we can collect usage statistics per CPU, and write them to the database:
58
+
59
+ ```python
60
+ import psutil, time, datetime
61
+
62
+ psutil.cpu_percent(percpu=True) # first sample should be discarded
63
+ while True:
64
+ time.sleep(1)
65
+ stats = psutil.cpu_percent(percpu=True)
66
+ timestamp = datetime.datetime.now()
67
+ db.insert([
68
+ CPUStats(timestamp=timestamp, cpu_id=cpu_id, cpu_percent=cpu_percent)
69
+ for cpu_id, cpu_percent in enumerate(stats)
70
+ ])
71
+ ```
72
+
73
+ Querying the table is easy, using either the query builder or raw SQL:
74
+
75
+ ```python
76
+ # Calculate what percentage of the time CPU 1 was over 95% busy
77
+ queryset = CPUStats.objects_in(db)
78
+ total = queryset.filter(CPUStats.cpu_id == 1).count()
79
+ busy = queryset.filter(CPUStats.cpu_id == 1, CPUStats.cpu_percent > 95).count()
80
+ print('CPU 1 was busy {:.2f}% of the time'.format(busy * 100.0 / total))
81
+
82
+ # Calculate the average usage per CPU
83
+ for row in queryset.aggregate(CPUStats.cpu_id, average=F.avg(CPUStats.cpu_percent)):
84
+ print('CPU {row.cpu_id}: {row.average:.2f}%'.format(row=row))
85
+ ```
86
+
87
+ This and other examples can be found in the `examples` folder.
88
+
89
+ To learn more please visit the [documentation](docs/toc.md).
90
+
@@ -0,0 +1,62 @@
1
+ A fork of [infi.clikchouse_orm](https://github.com/Infinidat/infi.clickhouse_orm) aimed at more frequent maintenance and bugfixes.
2
+
3
+ [![Tests](https://github.com/SuadeLabs/clickhouse_orm/actions/workflows/python-test.yml/badge.svg)](https://github.com/SuadeLabs/clickhouse_orm/actions/workflows/python-test.yml)
4
+ ![PyPI](https://img.shields.io/pypi/v/clickhouse_orm)
5
+
6
+ Introduction
7
+ ============
8
+
9
+ This project is simple ORM for working with the [ClickHouse database](https://clickhouse.yandex/).
10
+ It allows you to define model classes whose instances can be written to the database and read from it.
11
+
12
+ Let's jump right in with a simple example of monitoring CPU usage. First we need to define the model class,
13
+ connect to the database and create a table for the model:
14
+
15
+ ```python
16
+ from clickhouse_orm import Database, Model, DateTimeField, UInt16Field, Float32Field, Memory, F
17
+
18
+ class CPUStats(Model):
19
+
20
+ timestamp = DateTimeField()
21
+ cpu_id = UInt16Field()
22
+ cpu_percent = Float32Field()
23
+
24
+ engine = Memory()
25
+
26
+ db = Database('demo')
27
+ db.create_table(CPUStats)
28
+ ```
29
+
30
+ Now we can collect usage statistics per CPU, and write them to the database:
31
+
32
+ ```python
33
+ import psutil, time, datetime
34
+
35
+ psutil.cpu_percent(percpu=True) # first sample should be discarded
36
+ while True:
37
+ time.sleep(1)
38
+ stats = psutil.cpu_percent(percpu=True)
39
+ timestamp = datetime.datetime.now()
40
+ db.insert([
41
+ CPUStats(timestamp=timestamp, cpu_id=cpu_id, cpu_percent=cpu_percent)
42
+ for cpu_id, cpu_percent in enumerate(stats)
43
+ ])
44
+ ```
45
+
46
+ Querying the table is easy, using either the query builder or raw SQL:
47
+
48
+ ```python
49
+ # Calculate what percentage of the time CPU 1 was over 95% busy
50
+ queryset = CPUStats.objects_in(db)
51
+ total = queryset.filter(CPUStats.cpu_id == 1).count()
52
+ busy = queryset.filter(CPUStats.cpu_id == 1, CPUStats.cpu_percent > 95).count()
53
+ print('CPU 1 was busy {:.2f}% of the time'.format(busy * 100.0 / total))
54
+
55
+ # Calculate the average usage per CPU
56
+ for row in queryset.aggregate(CPUStats.cpu_id, average=F.avg(CPUStats.cpu_percent)):
57
+ print('CPU {row.cpu_id}: {row.average:.2f}%'.format(row=row))
58
+ ```
59
+
60
+ This and other examples can be found in the `examples` folder.
61
+
62
+ To learn more please visit the [documentation](docs/toc.md).
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  from inspect import isclass
2
4
 
3
5
  from .database import * # noqa: F401, F403
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  import datetime
2
4
  import logging
3
5
  import re
@@ -13,13 +15,11 @@ from .utils import Page, import_submodules, parse_tsv
13
15
  logger = logging.getLogger("clickhouse_orm")
14
16
 
15
17
 
16
- class DatabaseException(Exception):
18
+ class DatabaseException(Exception): # noqa: N818
17
19
  """
18
20
  Raised when a database operation fails.
19
21
  """
20
22
 
21
- pass
22
-
23
23
 
24
24
  class ServerError(DatabaseException):
25
25
  """
@@ -35,7 +35,7 @@ class ServerError(DatabaseException):
35
35
  # just skip custom init
36
36
  # if non-standard message format
37
37
  self.message = message
38
- super(ServerError, self).__init__(message)
38
+ super().__init__(message)
39
39
 
40
40
  ERROR_PATTERNS = (
41
41
  # ClickHouse prior to v19.3.3
@@ -55,6 +55,14 @@ class ServerError(DatabaseException):
55
55
  """,
56
56
  re.VERBOSE | re.DOTALL,
57
57
  ),
58
+ # ClickHouse v21+
59
+ re.compile(
60
+ r"""
61
+ Code:\ (?P<code>\d+).
62
+ \ (?P<type1>[^ \n]+):\ (?P<msg>.+)
63
+ """,
64
+ re.VERBOSE | re.DOTALL,
65
+ ),
58
66
  )
59
67
 
60
68
  @classmethod
@@ -75,19 +83,21 @@ class ServerError(DatabaseException):
75
83
 
76
84
  def __str__(self):
77
85
  if self.code is not None:
78
- return "{} ({})".format(self.message, self.code)
86
+ return f"{self.message} ({self.code})"
79
87
 
80
88
 
81
- class Database(object):
89
+ class Database:
82
90
  """
83
91
  Database instances connect to a specific ClickHouse database for running queries,
84
92
  inserting data and other operations.
85
93
  """
86
94
 
95
+ _default_url = "http://localhost:8123/"
96
+
87
97
  def __init__(
88
98
  self,
89
99
  db_name,
90
- db_url="http://localhost:8123/",
100
+ db_url=None,
91
101
  username=None,
92
102
  password=None,
93
103
  readonly=False,
@@ -111,7 +121,7 @@ class Database(object):
111
121
  - `log_statements`: when True, all database statements are logged.
112
122
  """
113
123
  self.db_name = db_name
114
- self.db_url = db_url
124
+ self.db_url = db_url or self._default_url
115
125
  self.readonly = False
116
126
  self.timeout = timeout
117
127
  self.request_session = requests.Session()
@@ -432,7 +442,7 @@ class Database(object):
432
442
  except ServerError as e:
433
443
  logger.exception("Cannot determine server version (%s), assuming 1.1.0", e)
434
444
  ver = "1.1.0"
435
- return tuple(int(n) for n in ver.split(".")) if as_tuple else ver
445
+ return tuple(int(n) for n in ver.split(".") if n.isdigit()) if as_tuple else ver
436
446
 
437
447
  def _is_existing_database(self):
438
448
  r = self._send("SELECT count() FROM system.databases WHERE name = '%s'" % self.db_name)
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  import logging
2
4
 
3
5
  from .utils import comma_join, get_subclass_names
@@ -5,7 +7,7 @@ from .utils import comma_join, get_subclass_names
5
7
  logger = logging.getLogger("clickhouse_orm")
6
8
 
7
9
 
8
- class Engine(object):
10
+ class Engine:
9
11
  def create_table_sql(self, db):
10
12
  raise NotImplementedError() # pragma: no cover
11
13
 
@@ -44,9 +46,9 @@ class MergeTree(Engine):
44
46
  list,
45
47
  tuple,
46
48
  ), "partition_key must be tuple or list if present"
47
- assert (replica_table_path is None) == (
48
- replica_name is None
49
- ), "both replica_table_path and replica_name must be specified"
49
+ assert (replica_table_path is None) == (replica_name is None), (
50
+ "both replica_table_path and replica_name must be specified"
51
+ )
50
52
 
51
53
  # These values conflict with each other (old and new syntax of table engines.
52
54
  # So let's control only one of them is given.
@@ -145,7 +147,7 @@ class CollapsingMergeTree(MergeTree):
145
147
  partition_key=None,
146
148
  primary_key=None,
147
149
  ):
148
- super(CollapsingMergeTree, self).__init__(
150
+ super().__init__(
149
151
  date_col,
150
152
  order_by,
151
153
  sampling_expr,
@@ -158,7 +160,7 @@ class CollapsingMergeTree(MergeTree):
158
160
  self.sign_col = sign_col
159
161
 
160
162
  def _build_sql_params(self, db):
161
- params = super(CollapsingMergeTree, self)._build_sql_params(db)
163
+ params = super()._build_sql_params(db)
162
164
  params.append(self.sign_col)
163
165
  return params
164
166
 
@@ -176,7 +178,7 @@ class SummingMergeTree(MergeTree):
176
178
  partition_key=None,
177
179
  primary_key=None,
178
180
  ):
179
- super(SummingMergeTree, self).__init__(
181
+ super().__init__(
180
182
  date_col,
181
183
  order_by,
182
184
  sampling_expr,
@@ -190,7 +192,7 @@ class SummingMergeTree(MergeTree):
190
192
  self.summing_cols = summing_cols
191
193
 
192
194
  def _build_sql_params(self, db):
193
- params = super(SummingMergeTree, self)._build_sql_params(db)
195
+ params = super()._build_sql_params(db)
194
196
  if self.summing_cols:
195
197
  params.append("(%s)" % comma_join(self.summing_cols))
196
198
  return params
@@ -209,7 +211,7 @@ class ReplacingMergeTree(MergeTree):
209
211
  partition_key=None,
210
212
  primary_key=None,
211
213
  ):
212
- super(ReplacingMergeTree, self).__init__(
214
+ super().__init__(
213
215
  date_col,
214
216
  order_by,
215
217
  sampling_expr,
@@ -222,7 +224,7 @@ class ReplacingMergeTree(MergeTree):
222
224
  self.ver_col = ver_col
223
225
 
224
226
  def _build_sql_params(self, db):
225
- params = super(ReplacingMergeTree, self)._build_sql_params(db)
227
+ params = super()._build_sql_params(db)
226
228
  if self.ver_col:
227
229
  params.append(self.ver_col)
228
230
  return params
@@ -332,7 +334,7 @@ class Distributed(Engine):
332
334
 
333
335
  def _build_sql_params(self, db):
334
336
  if self.table_name is None:
335
- raise ValueError("Cannot create {} engine: specify an underlying table".format(self.__class__.__name__))
337
+ raise ValueError(f"Cannot create {self.__class__.__name__} engine: specify an underlying table")
336
338
 
337
339
  params = ["`%s`" % p for p in [self.cluster, db.db_name, self.table_name]]
338
340
  if self.sharding_key:
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  import datetime
2
4
  from calendar import timegm
3
5
  from decimal import Decimal, localcontext
@@ -5,7 +7,6 @@ from ipaddress import IPv4Address, IPv6Address
5
7
  from logging import getLogger
6
8
  from uuid import UUID
7
9
 
8
- import iso8601
9
10
  import pytz
10
11
  from pytz import BaseTzInfo
11
12
 
@@ -27,12 +28,12 @@ class Field(FunctionOperatorsMixin):
27
28
  db_type = None # should be overridden by concrete subclasses
28
29
 
29
30
  def __init__(self, default=None, alias=None, materialized=None, readonly=None, codec=None):
30
- assert [default, alias, materialized].count(
31
- None
32
- ) >= 2, "Only one of default, alias and materialized parameters can be given"
33
- assert (
34
- alias is None or isinstance(alias, F) or isinstance(alias, str) and alias != ""
35
- ), "Alias parameter must be a string or function object, if given"
31
+ assert [default, alias, materialized].count(None) >= 2, (
32
+ "Only one of default, alias and materialized parameters can be given"
33
+ )
34
+ assert alias is None or isinstance(alias, F) or isinstance(alias, str) and alias != "", (
35
+ "Alias parameter must be a string or function object, if given"
36
+ )
36
37
  assert (
37
38
  materialized is None or isinstance(materialized, F) or isinstance(materialized, str) and materialized != ""
38
39
  ), "Materialized parameter must be a string or function object, if given"
@@ -117,7 +118,7 @@ class Field(FunctionOperatorsMixin):
117
118
  elif self.default:
118
119
  default = self.to_db_string(self.default)
119
120
  sql += " DEFAULT %s" % default
120
- if self.codec and db and db.has_codec_support:
121
+ if self.codec and db and db.has_codec_support and not self.alias:
121
122
  sql += " CODEC(%s)" % self.codec
122
123
  return sql
123
124
 
@@ -141,7 +142,6 @@ class Field(FunctionOperatorsMixin):
141
142
 
142
143
 
143
144
  class StringField(Field):
144
-
145
145
  class_default = ""
146
146
  db_type = "String"
147
147
 
@@ -157,10 +157,10 @@ class FixedStringField(StringField):
157
157
  def __init__(self, length, default=None, alias=None, materialized=None, readonly=None):
158
158
  self._length = length
159
159
  self.db_type = "FixedString(%d)" % length
160
- super(FixedStringField, self).__init__(default, alias, materialized, readonly)
160
+ super().__init__(default, alias, materialized, readonly)
161
161
 
162
162
  def to_python(self, value, timezone_in_use):
163
- value = super(FixedStringField, self).to_python(value, timezone_in_use)
163
+ value = super().to_python(value, timezone_in_use)
164
164
  return value.rstrip("\0")
165
165
 
166
166
  def validate(self, value):
@@ -171,7 +171,6 @@ class FixedStringField(StringField):
171
171
 
172
172
 
173
173
  class DateField(Field):
174
-
175
174
  min_value = datetime.date(1970, 1, 1)
176
175
  max_value = datetime.date(2105, 12, 31)
177
176
  class_default = min_value
@@ -198,7 +197,6 @@ class DateField(Field):
198
197
 
199
198
 
200
199
  class DateTimeField(Field):
201
-
202
200
  class_default = datetime.datetime.fromtimestamp(0, pytz.utc)
203
201
  db_type = "DateTime"
204
202
 
@@ -231,11 +229,8 @@ class DateTimeField(Field):
231
229
  return datetime.datetime.utcfromtimestamp(value).replace(tzinfo=pytz.utc)
232
230
  except ValueError:
233
231
  pass
234
- try:
235
- # left the date naive in case of no tzinfo set
236
- dt = iso8601.parse_date(value, default_timezone=None)
237
- except iso8601.ParseError as e:
238
- raise ValueError(str(e))
232
+ # left the date naive in case of no tzinfo set
233
+ dt = datetime.datetime.fromisoformat(value)
239
234
 
240
235
  # convert naive to aware
241
236
  if dt.tzinfo is None or dt.tzinfo.utcoffset(dt) is None:
@@ -316,58 +311,50 @@ class BaseIntField(Field):
316
311
 
317
312
 
318
313
  class UInt8Field(BaseIntField):
319
-
320
314
  min_value = 0
321
- max_value = 2 ** 8 - 1
315
+ max_value = 2**8 - 1
322
316
  db_type = "UInt8"
323
317
 
324
318
 
325
319
  class UInt16Field(BaseIntField):
326
-
327
320
  min_value = 0
328
- max_value = 2 ** 16 - 1
321
+ max_value = 2**16 - 1
329
322
  db_type = "UInt16"
330
323
 
331
324
 
332
325
  class UInt32Field(BaseIntField):
333
-
334
326
  min_value = 0
335
- max_value = 2 ** 32 - 1
327
+ max_value = 2**32 - 1
336
328
  db_type = "UInt32"
337
329
 
338
330
 
339
331
  class UInt64Field(BaseIntField):
340
-
341
332
  min_value = 0
342
- max_value = 2 ** 64 - 1
333
+ max_value = 2**64 - 1
343
334
  db_type = "UInt64"
344
335
 
345
336
 
346
337
  class Int8Field(BaseIntField):
347
-
348
- min_value = -(2 ** 7)
349
- max_value = 2 ** 7 - 1
338
+ min_value = -(2**7)
339
+ max_value = 2**7 - 1
350
340
  db_type = "Int8"
351
341
 
352
342
 
353
343
  class Int16Field(BaseIntField):
354
-
355
- min_value = -(2 ** 15)
356
- max_value = 2 ** 15 - 1
344
+ min_value = -(2**15)
345
+ max_value = 2**15 - 1
357
346
  db_type = "Int16"
358
347
 
359
348
 
360
349
  class Int32Field(BaseIntField):
361
-
362
- min_value = -(2 ** 31)
363
- max_value = 2 ** 31 - 1
350
+ min_value = -(2**31)
351
+ max_value = 2**31 - 1
364
352
  db_type = "Int32"
365
353
 
366
354
 
367
355
  class Int64Field(BaseIntField):
368
-
369
- min_value = -(2 ** 63)
370
- max_value = 2 ** 63 - 1
356
+ min_value = -(2**63)
357
+ max_value = 2**63 - 1
371
358
  db_type = "Int64"
372
359
 
373
360
 
@@ -389,12 +376,10 @@ class BaseFloatField(Field):
389
376
 
390
377
 
391
378
  class Float32Field(BaseFloatField):
392
-
393
379
  db_type = "Float32"
394
380
 
395
381
 
396
382
  class Float64Field(BaseFloatField):
397
-
398
383
  db_type = "Float64"
399
384
 
400
385
 
@@ -414,7 +399,7 @@ class DecimalField(Field):
414
399
  self.exp = Decimal(10) ** -self.scale # for rounding to the required scale
415
400
  self.max_value = Decimal(10 ** (self.precision - self.scale)) - self.exp
416
401
  self.min_value = -self.max_value
417
- super(DecimalField, self).__init__(default, alias, materialized, readonly)
402
+ super().__init__(default, alias, materialized, readonly)
418
403
 
419
404
  def to_python(self, value, timezone_in_use):
420
405
  if not isinstance(value, Decimal):
@@ -440,19 +425,19 @@ class DecimalField(Field):
440
425
 
441
426
  class Decimal32Field(DecimalField):
442
427
  def __init__(self, scale, default=None, alias=None, materialized=None, readonly=None):
443
- super(Decimal32Field, self).__init__(9, scale, default, alias, materialized, readonly)
428
+ super().__init__(9, scale, default, alias, materialized, readonly)
444
429
  self.db_type = "Decimal32(%d)" % scale
445
430
 
446
431
 
447
432
  class Decimal64Field(DecimalField):
448
433
  def __init__(self, scale, default=None, alias=None, materialized=None, readonly=None):
449
- super(Decimal64Field, self).__init__(18, scale, default, alias, materialized, readonly)
434
+ super().__init__(18, scale, default, alias, materialized, readonly)
450
435
  self.db_type = "Decimal64(%d)" % scale
451
436
 
452
437
 
453
438
  class Decimal128Field(DecimalField):
454
439
  def __init__(self, scale, default=None, alias=None, materialized=None, readonly=None):
455
- super(Decimal128Field, self).__init__(38, scale, default, alias, materialized, readonly)
440
+ super().__init__(38, scale, default, alias, materialized, readonly)
456
441
  self.db_type = "Decimal128(%d)" % scale
457
442
 
458
443
 
@@ -465,7 +450,7 @@ class BaseEnumField(Field):
465
450
  self.enum_cls = enum_cls
466
451
  if default is None:
467
452
  default = list(enum_cls)[0]
468
- super(BaseEnumField, self).__init__(default, alias, materialized, readonly, codec)
453
+ super().__init__(default, alias, materialized, readonly, codec)
469
454
 
470
455
  def to_python(self, value, timezone_in_use):
471
456
  if isinstance(value, self.enum_cls):
@@ -512,24 +497,21 @@ class BaseEnumField(Field):
512
497
 
513
498
 
514
499
  class Enum8Field(BaseEnumField):
515
-
516
500
  db_type = "Enum8"
517
501
 
518
502
 
519
503
  class Enum16Field(BaseEnumField):
520
-
521
504
  db_type = "Enum16"
522
505
 
523
506
 
524
507
  class ArrayField(Field):
525
-
526
508
  class_default = []
527
509
 
528
510
  def __init__(self, inner_field, default=None, alias=None, materialized=None, readonly=None, codec=None):
529
511
  assert isinstance(inner_field, Field), "The first argument of ArrayField must be a Field instance"
530
512
  assert not isinstance(inner_field, ArrayField), "Multidimensional array fields are not supported by the ORM"
531
513
  self.inner_field = inner_field
532
- super(ArrayField, self).__init__(default, alias, materialized, readonly, codec)
514
+ super().__init__(default, alias, materialized, readonly, codec)
533
515
 
534
516
  def to_python(self, value, timezone_in_use):
535
517
  if isinstance(value, str):
@@ -556,7 +538,6 @@ class ArrayField(Field):
556
538
 
557
539
 
558
540
  class UUIDField(Field):
559
-
560
541
  class_default = UUID(int=0)
561
542
  db_type = "UUID"
562
543
 
@@ -579,7 +560,6 @@ class UUIDField(Field):
579
560
 
580
561
 
581
562
  class IPv4Field(Field):
582
-
583
563
  class_default = 0
584
564
  db_type = "IPv4"
585
565
 
@@ -596,7 +576,6 @@ class IPv4Field(Field):
596
576
 
597
577
 
598
578
  class IPv6Field(Field):
599
-
600
579
  class_default = 0
601
580
  db_type = "IPv6"
602
581
 
@@ -613,18 +592,17 @@ class IPv6Field(Field):
613
592
 
614
593
 
615
594
  class NullableField(Field):
616
-
617
595
  class_default = None
618
596
 
619
597
  def __init__(self, inner_field, default=None, alias=None, materialized=None, extra_null_values=None, codec=None):
620
- assert isinstance(
621
- inner_field, Field
622
- ), "The first argument of NullableField must be a Field instance. Not: {}".format(inner_field)
598
+ assert isinstance(inner_field, Field), (
599
+ f"The first argument of NullableField must be a Field instance. Not: {inner_field}"
600
+ )
623
601
  self.inner_field = inner_field
624
602
  self._null_values = [None]
625
603
  if extra_null_values:
626
604
  self._null_values.extend(extra_null_values)
627
- super(NullableField, self).__init__(default, alias, materialized, readonly=None, codec=codec)
605
+ super().__init__(default, alias, materialized, readonly=None, codec=codec)
628
606
 
629
607
  def to_python(self, value, timezone_in_use):
630
608
  if value == "\\N" or value in self._null_values:
@@ -648,18 +626,18 @@ class NullableField(Field):
648
626
 
649
627
  class LowCardinalityField(Field):
650
628
  def __init__(self, inner_field, default=None, alias=None, materialized=None, readonly=None, codec=None):
651
- assert isinstance(
652
- inner_field, Field
653
- ), "The first argument of LowCardinalityField must be a Field instance. Not: {}".format(inner_field)
654
- assert not isinstance(
655
- inner_field, LowCardinalityField
656
- ), "LowCardinality inner fields are not supported by the ORM"
657
- assert not isinstance(
658
- inner_field, ArrayField
659
- ), "Array field inside LowCardinality are not supported by the ORM. Use Array(LowCardinality) instead"
629
+ assert isinstance(inner_field, Field), (
630
+ f"The first argument of LowCardinalityField must be a Field instance. Not: {inner_field}"
631
+ )
632
+ assert not isinstance(inner_field, LowCardinalityField), (
633
+ "LowCardinality inner fields are not supported by the ORM"
634
+ )
635
+ assert not isinstance(inner_field, ArrayField), (
636
+ "Array field inside LowCardinality are not supported by the ORM. Use Array(LowCardinality) instead"
637
+ )
660
638
  self.inner_field = inner_field
661
639
  self.class_default = self.inner_field.class_default
662
- super(LowCardinalityField, self).__init__(default, alias, materialized, readonly, codec)
640
+ super().__init__(default, alias, materialized, readonly, codec)
663
641
 
664
642
  def to_python(self, value, timezone_in_use):
665
643
  return self.inner_field.to_python(value, timezone_in_use)
@@ -676,9 +654,7 @@ class LowCardinalityField(Field):
676
654
  else:
677
655
  sql = self.inner_field.get_sql(with_default_expression=False)
678
656
  logger.warning(
679
- "LowCardinalityField not supported on clickhouse-server version < 19.0 using {} as fallback".format(
680
- self.inner_field.__class__.__name__
681
- )
657
+ f"LowCardinalityField not supported on clickhouse-server version < 19.0 using {self.inner_field.__class__.__name__} as fallback"
682
658
  )
683
659
  if with_default_expression:
684
660
  sql += self._extra_params(db)
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  from functools import wraps
2
4
  from inspect import Parameter, signature
3
5
  from types import FunctionType
@@ -86,7 +88,7 @@ def parametric(func):
86
88
  return wrapper
87
89
 
88
90
 
89
- class FunctionOperatorsMixin(object):
91
+ class FunctionOperatorsMixin:
90
92
  """
91
93
  A mixin for implementing Python operators using F objects.
92
94
  """
@@ -186,7 +188,6 @@ class FunctionOperatorsMixin(object):
186
188
 
187
189
 
188
190
  class FMeta(type):
189
-
190
191
  FUNCTION_COMBINATORS = {
191
192
  "type_conversion": [
192
193
  {"suffix": "OrZero"},
@@ -409,7 +410,7 @@ class F(Cond, FunctionOperatorsMixin, metaclass=FMeta):
409
410
 
410
411
  @staticmethod
411
412
  def toQuarter(d, timezone=NO_VALUE):
412
- return F("toQuarter", d, timezone)
413
+ return F("toQuarter", d, timezone) if timezone else F("toQuarter", d)
413
414
 
414
415
  @staticmethod
415
416
  def toMonth(d, timezone=NO_VALUE):
@@ -421,7 +422,7 @@ class F(Cond, FunctionOperatorsMixin, metaclass=FMeta):
421
422
 
422
423
  @staticmethod
423
424
  def toISOWeek(d, timezone=NO_VALUE):
424
- return F("toISOWeek", d, timezone)
425
+ return F("toISOWeek", d, timezone) if timezone else F("toISOWeek", d)
425
426
 
426
427
  @staticmethod
427
428
  def toDayOfYear(d, timezone=NO_VALUE):
@@ -509,15 +510,15 @@ class F(Cond, FunctionOperatorsMixin, metaclass=FMeta):
509
510
 
510
511
  @staticmethod
511
512
  def toYYYYMM(dt, timezone=NO_VALUE):
512
- return F("toYYYYMM", dt, timezone)
513
+ return F("toYYYYMM", dt, timezone) if timezone else F("toYYYYMM", dt)
513
514
 
514
515
  @staticmethod
515
516
  def toYYYYMMDD(dt, timezone=NO_VALUE):
516
- return F("toYYYYMMDD", dt, timezone)
517
+ return F("toYYYYMMDD", dt, timezone) if timezone else F("toYYYYMMDD", dt)
517
518
 
518
519
  @staticmethod
519
520
  def toYYYYMMDDhhmmss(dt, timezone=NO_VALUE):
520
- return F("toYYYYMMDDhhmmss", dt, timezone)
521
+ return F("toYYYYMMDDhhmmss", dt, timezone) if timezone else F("toYYYYMMDDhhmmss", dt)
521
522
 
522
523
  @staticmethod
523
524
  def toRelativeYearNum(d, timezone=NO_VALUE):
@@ -911,8 +912,6 @@ class F(Cond, FunctionOperatorsMixin, metaclass=FMeta):
911
912
  def replace(haystack, pattern, replacement):
912
913
  return F("replace", haystack, pattern, replacement)
913
914
 
914
- replaceAll = replace
915
-
916
915
  @staticmethod
917
916
  def replaceAll(haystack, pattern, replacement):
918
917
  return F("replaceAll", haystack, pattern, replacement)
@@ -1649,6 +1648,16 @@ class F(Cond, FunctionOperatorsMixin, metaclass=FMeta):
1649
1648
  def varSamp(x):
1650
1649
  return F("varSamp", x)
1651
1650
 
1651
+ @staticmethod
1652
+ @aggregate
1653
+ def stddevPop(expr):
1654
+ return F("stddevPop", expr)
1655
+
1656
+ @staticmethod
1657
+ @aggregate
1658
+ def stddevSamp(expr):
1659
+ return F("stddevSamp", expr)
1660
+
1652
1661
  @staticmethod
1653
1662
  @aggregate
1654
1663
  @parametric
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  import logging
2
4
 
3
5
  from .engines import MergeTree
@@ -84,10 +86,12 @@ class AlterTable(ModelOperation):
84
86
  is_regular_field = not (field.materialized or field.alias)
85
87
  if name not in table_fields:
86
88
  logger.info(" Add column %s", name)
87
- assert prev_name, "Cannot add a column to the beginning of the table"
88
89
  cmd = "ADD COLUMN %s %s" % (name, field.get_sql(db=database))
89
90
  if is_regular_field:
90
- cmd += " AFTER %s" % prev_name
91
+ if prev_name:
92
+ cmd += " AFTER %s" % prev_name
93
+ else:
94
+ cmd += " FIRST"
91
95
  self._alter_table(database, cmd)
92
96
 
93
97
  if is_regular_field:
@@ -151,18 +155,18 @@ class AlterConstraints(ModelOperation):
151
155
  def apply(self, database):
152
156
  logger.info(" Alter constraints for %s", self.table_name)
153
157
  existing = self._get_constraint_names(database)
154
- # Go over constraints in the model
158
+ no_longer_needed = existing - {c.name for c in self.model_class._constraints.values()}
159
+ # Drop old constraints first as they can conflict
160
+ for name in no_longer_needed:
161
+ logger.info(" Drop constraint %s", name)
162
+ self._alter_table(database, "DROP CONSTRAINT `%s`" % name)
163
+
164
+ # Add any new constraints
155
165
  for constraint in self.model_class._constraints.values():
156
166
  # Check if it's a new constraint
157
167
  if constraint.name not in existing:
158
168
  logger.info(" Add constraint %s", constraint.name)
159
169
  self._alter_table(database, "ADD %s" % constraint.create_table_sql())
160
- else:
161
- existing.remove(constraint.name)
162
- # Remaining constraints in `existing` are obsolete
163
- for name in existing:
164
- logger.info(" Drop constraint %s", name)
165
- self._alter_table(database, "DROP CONSTRAINT `%s`" % name)
166
170
 
167
171
  def _get_constraint_names(self, database):
168
172
  """
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  import sys
2
4
  from collections import OrderedDict
3
5
  from itertools import chain
@@ -125,7 +127,6 @@ class ModelBase(type):
125
127
  ad_hoc_model_cache = {}
126
128
 
127
129
  def __new__(metacls, name, bases, attrs):
128
-
129
130
  # Collect fields, constraints and indexes from parent classes
130
131
  fields = {}
131
132
  constraints = {}
@@ -170,7 +171,7 @@ class ModelBase(type):
170
171
  _defaults=defaults,
171
172
  _has_funcs_as_defaults=has_funcs_as_defaults,
172
173
  )
173
- model = super(ModelBase, metacls).__new__(metacls, str(name), bases, attrs)
174
+ model = super().__new__(metacls, str(name), bases, attrs)
174
175
 
175
176
  # Let each field, constraint and index know its parent and its own name
176
177
  for n, obj in chain(fields, constraints.items(), indexes.items()):
@@ -180,24 +181,24 @@ class ModelBase(type):
180
181
  return model
181
182
 
182
183
  @classmethod
183
- def create_ad_hoc_model(metacls, fields, model_name="AdHocModel"):
184
+ def create_ad_hoc_model(cls, fields, model_name="AdHocModel"):
184
185
  # fields is a list of tuples (name, db_type)
185
186
  # Check if model exists in cache
186
187
  fields = list(fields)
187
188
  cache_key = model_name + " " + str(fields)
188
- if cache_key in metacls.ad_hoc_model_cache:
189
- return metacls.ad_hoc_model_cache[cache_key]
189
+ if cache_key in cls.ad_hoc_model_cache:
190
+ return cls.ad_hoc_model_cache[cache_key]
190
191
  # Create an ad hoc model class
191
192
  attrs = {}
192
193
  for name, db_type in fields:
193
- attrs[name] = metacls.create_ad_hoc_field(db_type)
194
- model_class = metacls.__new__(metacls, model_name, (Model,), attrs)
194
+ attrs[name] = cls.create_ad_hoc_field(db_type)
195
+ model_class = cls.__new__(cls, model_name, (Model,), attrs)
195
196
  # Add the model class to the cache
196
- metacls.ad_hoc_model_cache[cache_key] = model_class
197
+ cls.ad_hoc_model_cache[cache_key] = model_class
197
198
  return model_class
198
199
 
199
200
  @classmethod
200
- def create_ad_hoc_field(metacls, db_type):
201
+ def create_ad_hoc_field(cls, db_type):
201
202
  import clickhouse_orm.fields as orm_fields
202
203
 
203
204
  # Enums
@@ -215,13 +216,18 @@ class ModelBase(type):
215
216
  )
216
217
  # Arrays
217
218
  if db_type.startswith("Array"):
218
- inner_field = metacls.create_ad_hoc_field(db_type[6:-1])
219
+ inner_field = cls.create_ad_hoc_field(db_type[6:-1])
219
220
  return orm_fields.ArrayField(inner_field)
220
221
  # Tuples (poor man's version - convert to array)
221
222
  if db_type.startswith("Tuple"):
222
223
  types = [s.strip() for s in db_type[6:-1].split(",")]
224
+ # newer versions are essentially "named tuples"
225
+ if any(" " in t for t in types):
226
+ assert all(" " in t for t in types), "Either all or none of the tuple types must be named - " + db_type
227
+ types = [t.split(" ", 1)[1] for t in types]
228
+
223
229
  assert len(set(types)) == 1, "No support for mixed types in tuples - " + db_type
224
- inner_field = metacls.create_ad_hoc_field(types[0])
230
+ inner_field = cls.create_ad_hoc_field(types[0])
225
231
  return orm_fields.ArrayField(inner_field)
226
232
  # FixedString
227
233
  if db_type.startswith("FixedString"):
@@ -235,11 +241,11 @@ class ModelBase(type):
235
241
  return field_class(*args)
236
242
  # Nullable
237
243
  if db_type.startswith("Nullable"):
238
- inner_field = metacls.create_ad_hoc_field(db_type[9:-1])
244
+ inner_field = cls.create_ad_hoc_field(db_type[9:-1])
239
245
  return orm_fields.NullableField(inner_field)
240
246
  # LowCardinality
241
247
  if db_type.startswith("LowCardinality"):
242
- inner_field = metacls.create_ad_hoc_field(db_type[15:-1])
248
+ inner_field = cls.create_ad_hoc_field(db_type[15:-1])
243
249
  return orm_fields.LowCardinalityField(inner_field)
244
250
  # Simple fields
245
251
  name = db_type + "Field"
@@ -276,7 +282,7 @@ class Model(metaclass=ModelBase):
276
282
  invalid values will cause a `ValueError` to be raised.
277
283
  Unrecognized field names will cause an `AttributeError`.
278
284
  """
279
- super(Model, self).__init__()
285
+ super().__init__()
280
286
  # Assign default values
281
287
  self.__dict__.update(self._defaults)
282
288
  # Assign field values from keyword arguments
@@ -299,9 +305,9 @@ class Model(metaclass=ModelBase):
299
305
  field.validate(value)
300
306
  except ValueError:
301
307
  tp, v, tb = sys.exc_info()
302
- new_msg = "{} (field '{}')".format(v, name)
308
+ new_msg = f"{v} (field '{name}')"
303
309
  raise tp.with_traceback(tp(new_msg), tb)
304
- super(Model, self).__setattr__(name, value)
310
+ super().__setattr__(name, value)
305
311
 
306
312
  def set_database(self, db):
307
313
  """
@@ -535,7 +541,7 @@ class DistributedModel(Model):
535
541
  This is done automatically when the instance is read from the database or written to it.
536
542
  """
537
543
  assert isinstance(self.engine, Distributed), "engine must be an instance of engines.Distributed"
538
- res = super(DistributedModel, self).set_database(db)
544
+ res = super().set_database(db)
539
545
  return res
540
546
 
541
547
  @classmethod
@@ -579,7 +585,7 @@ class DistributedModel(Model):
579
585
  storage_models = [b for b in cls.__bases__ if issubclass(b, Model) and not issubclass(b, DistributedModel)]
580
586
  if not storage_models:
581
587
  raise TypeError(
582
- "When defining Distributed engine without the table_name " "ensure that your model has a parent model"
588
+ "When defining Distributed engine without the table_name ensure that your model has a parent model"
583
589
  )
584
590
 
585
591
  if len(storage_models) > 1:
@@ -601,9 +607,7 @@ class DistributedModel(Model):
601
607
  cls.fix_engine_table()
602
608
 
603
609
  parts = [
604
- "CREATE TABLE IF NOT EXISTS `{0}`.`{1}` AS `{0}`.`{2}`".format(
605
- db.db_name, cls.table_name(), cls.engine.table_name
606
- ),
610
+ f"CREATE TABLE IF NOT EXISTS `{db.db_name}`.`{cls.table_name()}` AS `{db.db_name}`.`{cls.engine.table_name}`",
607
611
  "ENGINE = " + cls.engine.create_table_sql(db),
608
612
  ]
609
613
  return "\n".join(parts)
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  from copy import copy, deepcopy
2
4
  from math import ceil
3
5
 
@@ -10,7 +12,7 @@ from .utils import Page, arg_to_sql, comma_join, string_or_func
10
12
  # - check that field names are valid
11
13
 
12
14
 
13
- class Operator(object):
15
+ class Operator:
14
16
  """
15
17
  Base class for filtering operators.
16
18
  """
@@ -161,7 +163,7 @@ register_operator("iendswith", LikeOperator("%{}", False))
161
163
  register_operator("iexact", IExactOperator())
162
164
 
163
165
 
164
- class Cond(object):
166
+ class Cond:
165
167
  """
166
168
  An abstract object for storing a single query condition Field + Operator + Value.
167
169
  """
@@ -193,8 +195,7 @@ class FieldCond(Cond):
193
195
  return res
194
196
 
195
197
 
196
- class Q(object):
197
-
198
+ class Q:
198
199
  AND_MODE = "AND"
199
200
  OR_MODE = "OR"
200
201
 
@@ -217,7 +218,6 @@ class Q(object):
217
218
  if mode == l_child._mode and not l_child._negate:
218
219
  q = deepcopy(l_child)
219
220
  q._children.append(deepcopy(r_child))
220
-
221
221
  else:
222
222
  q = cls()
223
223
  q._children = [l_child, r_child]
@@ -249,7 +249,7 @@ class Q(object):
249
249
  sql = condition_sql[0]
250
250
  else:
251
251
  # Each condition must be enclosed in brackets, or order of operations may be wrong
252
- sql = "(%s)" % ") {} (".format(self._mode).join(condition_sql)
252
+ sql = "(%s)" % f") {self._mode} (".join(condition_sql)
253
253
 
254
254
  if self._negate:
255
255
  sql = "NOT (%s)" % sql
@@ -288,7 +288,7 @@ class Q(object):
288
288
  return q
289
289
 
290
290
 
291
- class QuerySet(object):
291
+ class QuerySet:
292
292
  """
293
293
  A queryset is an object that represents a database query using a specific `Model`.
294
294
  It is lazy, meaning that it does not hit the database until you iterate over its
@@ -300,6 +300,7 @@ class QuerySet(object):
300
300
  Initializer. It is possible to create a queryset like this, but the standard
301
301
  way is to use `MyModel.objects_in(database)`.
302
302
  """
303
+ self.model = model_cls
303
304
  self._model_cls = model_cls
304
305
  self._database = database
305
306
  self._order_by = []
@@ -343,7 +344,7 @@ class QuerySet(object):
343
344
  # Slice
344
345
  assert s.step in (None, 1), "step is not supported in slices"
345
346
  start = s.start or 0
346
- stop = s.stop or 2 ** 63 - 1
347
+ stop = s.stop or 2**63 - 1
347
348
  assert start >= 0 and stop >= 0, "negative indexes are not supported"
348
349
  assert start <= stop, "start of slice cannot be smaller than its end"
349
350
  qs = copy(self)
@@ -626,7 +627,7 @@ class AggregateQuerySet(QuerySet):
626
627
  ```
627
628
  At least one calculated field is required.
628
629
  """
629
- super(AggregateQuerySet, self).__init__(base_qs._model_cls, base_qs._database)
630
+ super().__init__(base_qs._model_cls, base_qs._database)
630
631
  assert calculated_fields, "No calculated fields specified for aggregation"
631
632
  self._fields = grouping_fields
632
633
  self._grouping_fields = grouping_fields
@@ -2,6 +2,9 @@
2
2
  This file contains system readonly models that can be got from the database
3
3
  https://clickhouse.tech/docs/en/system_tables/
4
4
  """
5
+
6
+ from __future__ import annotations
7
+
5
8
  from .database import Database
6
9
  from .fields import DateTimeField, StringField, UInt8Field, UInt32Field, UInt64Field
7
10
  from .models import Model
@@ -1,15 +1,27 @@
1
+ from __future__ import annotations
2
+
1
3
  import codecs
2
4
  import importlib
3
5
  import pkgutil
4
6
  import re
5
- from collections import namedtuple
6
7
  from datetime import date, datetime, timedelta, tzinfo
7
8
  from inspect import isclass
8
- from types import ModuleType
9
- from typing import Any, Dict, Iterable, List, Optional, Type, Union
9
+ from typing import TYPE_CHECKING, NamedTuple
10
+
11
+ if TYPE_CHECKING:
12
+ from collections.abc import Iterable
13
+ from types import ModuleType
14
+ from typing import Any
15
+
16
+
17
+ class Page(NamedTuple):
18
+ """A simple data structure for paginated results."""
10
19
 
11
- Page = namedtuple("Page", "objects number_of_objects pages_total number page_size")
12
- Page.__doc__ += "\nA simple data structure for paginated results."
20
+ objects: list[Any]
21
+ number_of_objects: int
22
+ pages_total: int
23
+ number: int
24
+ page_size: int
13
25
 
14
26
 
15
27
  def escape(value: str, quote: bool = True) -> str:
@@ -25,7 +37,7 @@ def escape(value: str, quote: bool = True) -> str:
25
37
  return value
26
38
 
27
39
 
28
- def unescape(value: str) -> Optional[str]:
40
+ def unescape(value: str) -> str | None:
29
41
  if value == "\\N":
30
42
  return None
31
43
  return codecs.escape_decode(value)[0].decode("utf-8")
@@ -70,7 +82,7 @@ def arg_to_sql(arg: Any) -> str:
70
82
  return str(arg)
71
83
 
72
84
 
73
- def parse_tsv(line: Union[bytes, str]) -> List[str]:
85
+ def parse_tsv(line: bytes | str) -> list[str]:
74
86
  if isinstance(line, bytes):
75
87
  line = line.decode()
76
88
  if line and line[-1] == "\n":
@@ -78,7 +90,7 @@ def parse_tsv(line: Union[bytes, str]) -> List[str]:
78
90
  return [unescape(value) for value in line.split("\t")]
79
91
 
80
92
 
81
- def parse_array(array_string: str) -> List[Any]:
93
+ def parse_array(array_string: str) -> list[Any]:
82
94
  """
83
95
  Parse an array or tuple string as returned by clickhouse. For example:
84
96
  "['hello', 'world']" ==> ["hello", "world"]
@@ -112,7 +124,7 @@ def parse_array(array_string: str) -> List[Any]:
112
124
  array_string = array_string[match.end() - 1 :]
113
125
 
114
126
 
115
- def import_submodules(package_name: str) -> Dict[str, ModuleType]:
127
+ def import_submodules(package_name: str) -> dict[str, ModuleType]:
116
128
  """
117
129
  Import all submodules of a module.
118
130
  """
@@ -141,7 +153,7 @@ def is_iterable(obj: Any) -> bool:
141
153
  return False
142
154
 
143
155
 
144
- def get_subclass_names(locals: Dict[str, Any], base_class: Type):
156
+ def get_subclass_names(locals: dict[str, Any], base_class: type):
145
157
  return [c.__name__ for c in locals.values() if isclass(c) and issubclass(c, base_class)]
146
158
 
147
159
 
@@ -0,0 +1,95 @@
1
+ [build-system]
2
+ requires = ["flit_core >=3.2,<4"]
3
+ build-backend = "flit_core.buildapi"
4
+
5
+ [project]
6
+ name = "clickhouse_orm"
7
+ version = "3.0.1"
8
+ readme = "README.md"
9
+ description = "A simple ORM for working with the Clickhouse database. Maintainance fork of infi.clickhouse_orm."
10
+ authors = [
11
+ {name = "Oliver Margetts", email = "oliver.margetts@gmail.com"}
12
+ ]
13
+ license = { text = "BSD-3-Clause" }
14
+ classifiers = [
15
+ "Intended Audience :: Developers",
16
+ "Intended Audience :: System Administrators",
17
+ "License :: OSI Approved :: BSD License",
18
+ "Operating System :: OS Independent",
19
+ "Programming Language :: Python",
20
+ "Programming Language :: Python :: 3.11",
21
+ "Programming Language :: Python :: 3.12",
22
+ "Programming Language :: Python :: 3.13",
23
+ "Programming Language :: Python :: 3.14",
24
+ "Topic :: Software Development :: Libraries :: Python Modules",
25
+ "Topic :: Database",
26
+ ]
27
+ # requires_python = ">=3.11"
28
+ dependencies = [
29
+ "requests",
30
+ "pytz",
31
+ ]
32
+
33
+ [project.optional-dependencies]
34
+ dev = [
35
+ "docker==7.1.0",
36
+ "pytest==9.0.2",
37
+ "ruff==0.14.14",
38
+ ]
39
+
40
+ [project.urls]
41
+ Homepage = "https://github.com/SuadeLabs/clickhouse_orm"
42
+ Repository = "https://github.com/SuadeLabs/clickhouse_orm"
43
+
44
+ [tool.ruff]
45
+ line-length = 120
46
+ target-version = "py311"
47
+ # File Selection
48
+ force-exclude = true # don't check excluded files even if passed directly
49
+ extend-exclude = ["./venv"]
50
+
51
+ [tool.ruff.lint]
52
+ # Rule Selection
53
+ # to read about ruff rules check this: https://beta.ruff.rs/docs/rules/
54
+ select = [
55
+ "E",
56
+ "W", # pycodestyle: E, W
57
+ "F", # pyflakes: F
58
+ "B", # flake8-bugbear: B
59
+ "I", # isort: I
60
+ "ISC", # flake8-implicit-str-concat: ISC
61
+ "N", # pep8-naming: N
62
+ "PYI", # flake8-pyi: PYI
63
+ "RUF013", # ruff: RUF (Specifically implicit-optional)
64
+ "RUF022", # unsorted-dunder-all: https://docs.astral.sh/ruff/rules/unsorted-dunder-all/
65
+ "RUF023", # unsorted-dunder-slots: https://docs.astral.sh/ruff/rules/unsorted-dunder-slots/
66
+ "RUF101", # redirected-noqa: https://docs.astral.sh/ruff/rules/redirected-noqa/
67
+ "T10", # flake8-debugger
68
+ "TC", # flake8-type-checking
69
+ "UP", # pyupgrade: U
70
+ ]
71
+
72
+ ignore = [
73
+ "B904", # raising without from clause
74
+ "B905", # zip without strict parameter
75
+ "E501", # line-too-long
76
+ "F403", # from module import *
77
+ "F405", # name defined from star imports
78
+ "N802", # function names often mirror clickhouse function names
79
+ "N806", # dynamically created classes
80
+ "N999", # migration module names: 1234.py
81
+ "UP031", # percent formatting
82
+ ]
83
+
84
+ [tool.ruff.lint.isort]
85
+ required-imports = ["from __future__ import annotations"]
86
+ relative-imports-order = "closest-to-furthest"
87
+ combine-as-imports = true
88
+ split-on-trailing-comma = false
89
+ section-order = [
90
+ "future",
91
+ "standard-library",
92
+ "third-party",
93
+ "first-party",
94
+ "local-folder",
95
+ ]
@@ -1,26 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: clickhouse-orm
3
- Version: 2.2.2
4
- Summary: A simple ORM for working with the Clickhouse database. Maintainance fork of infi.clickhouse_orm.
5
- Home-page: https://github.com/SuadeLabs/clickhouse_orm
6
- License: BSD
7
- Author: olliemath
8
- Author-email: oliver.margetts@gmail.com
9
- Requires-Python: >=3.6.2,<4
10
- Classifier: Intended Audience :: Developers
11
- Classifier: Intended Audience :: System Administrators
12
- Classifier: License :: OSI Approved :: BSD License
13
- Classifier: License :: Other/Proprietary License
14
- Classifier: Operating System :: OS Independent
15
- Classifier: Programming Language :: Python
16
- Classifier: Programming Language :: Python :: 3
17
- Classifier: Programming Language :: Python :: 3.6
18
- Classifier: Programming Language :: Python :: 3.7
19
- Classifier: Programming Language :: Python :: 3.8
20
- Classifier: Programming Language :: Python :: 3.9
21
- Classifier: Topic :: Database
22
- Classifier: Topic :: Software Development :: Libraries :: Python Modules
23
- Requires-Dist: iso8601
24
- Requires-Dist: pytz
25
- Requires-Dist: requests
26
- Project-URL: Repository, https://github.com/SuadeLabs/clickhouse_orm
@@ -1,52 +0,0 @@
1
- [tool.black]
2
- line-length = 120
3
-
4
- [tool.isort]
5
- multi_line_output = 3
6
- include_trailing_comma = true
7
- force_grid_wrap = 0
8
- use_parentheses = true
9
- ensure_newline_before_comments = true
10
- line_length = 120
11
-
12
- [tool.poetry]
13
- name = "clickhouse_orm"
14
- version = "2.2.2"
15
- description = "A simple ORM for working with the Clickhouse database. Maintainance fork of infi.clickhouse_orm."
16
- authors = ["olliemath <oliver.margetts@gmail.com>"]
17
- license = "BSD"
18
- homepage = "https://github.com/SuadeLabs/clickhouse_orm"
19
- repository = "https://github.com/SuadeLabs/clickhouse_orm"
20
- classifiers = [
21
- "Intended Audience :: Developers",
22
- "Intended Audience :: System Administrators",
23
- "License :: OSI Approved :: BSD License",
24
- "Operating System :: OS Independent",
25
- "Programming Language :: Python",
26
- "Programming Language :: Python :: 3.6",
27
- "Programming Language :: Python :: 3.7",
28
- "Programming Language :: Python :: 3.8",
29
- "Programming Language :: Python :: 3.9",
30
- "Topic :: Software Development :: Libraries :: Python Modules",
31
- "Topic :: Database"
32
- ]
33
-
34
- [tool.poetry.dependencies]
35
- python = ">=3.6.2,<4"
36
- requests = "*"
37
- pytz = "*"
38
- iso8601 = "*"
39
-
40
- [tool.poetry.dev-dependencies]
41
- flake8 = "^3.9.2"
42
- flake8-bugbear = "^21.4.3"
43
- pep8-naming = "^0.12.0"
44
- pytest = "^6.2.4"
45
- flake8-isort = "^4.0.0"
46
- black = {version = "^21.7b0", markers = "platform_python_implementation == 'CPython'"}
47
- isort = "^5.9.2"
48
- freezegun = "^1.1.0"
49
-
50
- [build-system]
51
- requires = ["poetry-core>=1.0.0"]
52
- build-backend = "poetry.core.masonry.api"
@@ -1,30 +0,0 @@
1
- # -*- coding: utf-8 -*-
2
- from setuptools import setup
3
-
4
- packages = \
5
- ['clickhouse_orm']
6
-
7
- package_data = \
8
- {'': ['*']}
9
-
10
- install_requires = \
11
- ['iso8601', 'pytz', 'requests']
12
-
13
- setup_kwargs = {
14
- 'name': 'clickhouse-orm',
15
- 'version': '2.2.2',
16
- 'description': 'A simple ORM for working with the Clickhouse database. Maintainance fork of infi.clickhouse_orm.',
17
- 'long_description': None,
18
- 'author': 'olliemath',
19
- 'author_email': 'oliver.margetts@gmail.com',
20
- 'maintainer': None,
21
- 'maintainer_email': None,
22
- 'url': 'https://github.com/SuadeLabs/clickhouse_orm',
23
- 'packages': packages,
24
- 'package_data': package_data,
25
- 'install_requires': install_requires,
26
- 'python_requires': '>=3.6.2,<4',
27
- }
28
-
29
-
30
- setup(**setup_kwargs)
File without changes