mage-ai 0.9.76__py3-none-any.whl → 0.9.77__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mage-ai might be problematic. Click here for more details.

Files changed (37) hide show
  1. mage_ai/api/resources/GitFileResource.py +8 -0
  2. mage_ai/cli/main.py +6 -1
  3. mage_ai/data_preparation/executors/block_executor.py +8 -3
  4. mage_ai/data_preparation/executors/pipeline_executor.py +35 -19
  5. mage_ai/data_preparation/models/block/__init__.py +29 -22
  6. mage_ai/data_preparation/models/block/outputs.py +1 -1
  7. mage_ai/data_preparation/models/constants.py +2 -0
  8. mage_ai/data_preparation/storage/local_storage.py +4 -1
  9. mage_ai/io/config.py +1 -0
  10. mage_ai/io/mssql.py +16 -9
  11. mage_ai/io/postgres.py +3 -0
  12. mage_ai/orchestration/db/migrations/versions/39d36f1dab73_create_genericjob.py +47 -0
  13. mage_ai/orchestration/db/models/oauth.py +2 -1
  14. mage_ai/orchestration/db/models/schedules.py +105 -0
  15. mage_ai/orchestration/job_manager.py +19 -0
  16. mage_ai/orchestration/notification/sender.py +2 -2
  17. mage_ai/orchestration/pipeline_scheduler_original.py +146 -1
  18. mage_ai/orchestration/queue/config.py +11 -1
  19. mage_ai/orchestration/queue/process_queue.py +2 -0
  20. mage_ai/server/api/base.py +41 -0
  21. mage_ai/server/api/constants.py +1 -0
  22. mage_ai/server/constants.py +1 -1
  23. mage_ai/server/scheduler_manager.py +2 -0
  24. mage_ai/server/terminal_server.py +3 -0
  25. mage_ai/settings/server.py +2 -0
  26. mage_ai/streaming/sources/kafka.py +2 -1
  27. mage_ai/tests/data_preparation/executors/test_block_executor.py +3 -3
  28. mage_ai/tests/data_preparation/models/test_variable.py +2 -0
  29. mage_ai/tests/io/create_table/test_postgresql.py +3 -2
  30. mage_ai/tests/orchestration/notification/test_sender.py +5 -1
  31. mage_ai/tests/streaming/sources/test_kafka.py +2 -2
  32. {mage_ai-0.9.76.dist-info → mage_ai-0.9.77.dist-info}/METADATA +70 -107
  33. {mage_ai-0.9.76.dist-info → mage_ai-0.9.77.dist-info}/RECORD +37 -36
  34. {mage_ai-0.9.76.dist-info → mage_ai-0.9.77.dist-info}/WHEEL +1 -1
  35. {mage_ai-0.9.76.dist-info → mage_ai-0.9.77.dist-info}/entry_points.txt +0 -0
  36. {mage_ai-0.9.76.dist-info → mage_ai-0.9.77.dist-info}/licenses/LICENSE +0 -0
  37. {mage_ai-0.9.76.dist-info → mage_ai-0.9.77.dist-info}/top_level.txt +0 -0
@@ -32,6 +32,7 @@ from mage_ai.orchestration.db.models.schedules import (
32
32
  Backfill,
33
33
  BlockRun,
34
34
  EventMatcher,
35
+ GenericJob,
35
36
  PipelineRun,
36
37
  PipelineSchedule,
37
38
  )
@@ -928,6 +929,81 @@ class PipelineScheduler:
928
929
  self.memory_usage_failure(tags=tags)
929
930
 
930
931
 
932
+ def on_pipeline_run_cancelled(
933
+ job_id: int,
934
+ pipeline_run_id: int,
935
+ cancelled_block_run_ids: List,
936
+ ):
937
+ job = GenericJob.query.get(job_id)
938
+ if job.status not in [
939
+ GenericJob.JobStatus.INITIAL,
940
+ GenericJob.JobStatus.QUEUED,
941
+ GenericJob.JobStatus.RUNNING,
942
+ ]:
943
+ return
944
+ job.mark_running()
945
+
946
+ if not pipeline_run_id:
947
+ job.mark_failed(
948
+ metadata=dict(
949
+ error_message='Pipeline run id is not provided in the job.',
950
+ ),
951
+ )
952
+ return
953
+ if not cancelled_block_run_ids:
954
+ job.mark_failed(
955
+ metadata=dict(
956
+ error_message='Cancelled block run ids are not provided in the job.',
957
+ ),
958
+ )
959
+ return
960
+
961
+ try:
962
+ # Run callback blocks for cancelled block runs
963
+ pipeline_run = PipelineRun.query.get(pipeline_run_id)
964
+ if not pipeline_run:
965
+ raise Exception(f'Fail to retrieve pipeline run with id {pipeline_run_id}')
966
+ pipeline_schedule = pipeline_run.pipeline_schedule
967
+ pipeline = get_pipeline_from_platform(
968
+ pipeline_run.pipeline_uuid,
969
+ repo_path=pipeline_schedule.repo_path if pipeline_schedule else None,
970
+ )
971
+ if pipeline_run.status != PipelineRun.PipelineRunStatus.CANCELLED:
972
+ job.mark_failed(
973
+ metadata=dict(
974
+ error_message=f'Pipeline run {pipeline_run_id} is not in cancelled status.',
975
+ ),
976
+ )
977
+ return
978
+ for block_run_id in cancelled_block_run_ids:
979
+ block_run = BlockRun.query.get(block_run_id)
980
+ if not block_run:
981
+ continue
982
+ if block_run.status != BlockRun.BlockRunStatus.CANCELLED:
983
+ continue
984
+ ExecutorFactory.get_block_executor(
985
+ pipeline,
986
+ block_run.block_uuid,
987
+ block_run_id=block_run.id,
988
+ execution_partition=pipeline_run.execution_partition,
989
+ executor_type=ExecutorType.LOCAL_PYTHON,
990
+ ).execute_callback(
991
+ callback='on_cancelled',
992
+ global_vars=pipeline_run.get_variables(),
993
+ logging_tags=None,
994
+ pipeline_run=pipeline_run,
995
+ block_run_id=block_run_id,
996
+ )
997
+ except Exception as e:
998
+ job.mark_failed(
999
+ metadata=dict(
1000
+ error_message=f'Failed to run pipeline run cancellation hook: {e}.',
1001
+ ),
1002
+ )
1003
+ return
1004
+ job.mark_completed()
1005
+
1006
+
931
1007
  def run_integration_streams(
932
1008
  streams: List[Dict],
933
1009
  *args,
@@ -1406,8 +1482,9 @@ def cancel_block_runs_and_jobs(
1406
1482
  block_runs_to_cancel.append(b)
1407
1483
  if b.status == BlockRun.BlockRunStatus.RUNNING:
1408
1484
  running_blocks.append(b)
1485
+ cancelled_block_run_ids = [b.id for b in block_runs_to_cancel]
1409
1486
  BlockRun.batch_update_status(
1410
- [b.id for b in block_runs_to_cancel],
1487
+ cancelled_block_run_ids,
1411
1488
  BlockRun.BlockRunStatus.CANCELLED,
1412
1489
  )
1413
1490
 
@@ -1435,6 +1512,18 @@ def cancel_block_runs_and_jobs(
1435
1512
  for b in running_blocks:
1436
1513
  job_manager.kill_block_run_job(b.id)
1437
1514
 
1515
+ GenericJob.enqueue_cancel_pipeline_run(pipeline_run.id, cancelled_block_run_ids)
1516
+
1517
+
1518
+ def kill_cancelled_runs(pipeline_run_id: int, cancelled_block_run_ids: List):
1519
+ job_manager = get_job_manager()
1520
+ if pipeline_run_id and job_manager.has_pipeline_run_job(pipeline_run_id):
1521
+ job_manager.kill_pipeline_run_job(pipeline_run_id)
1522
+ if cancelled_block_run_ids:
1523
+ for block_run_id in cancelled_block_run_ids:
1524
+ if job_manager.has_block_run_job(block_run_id):
1525
+ job_manager.kill_block_run_job(block_run_id)
1526
+
1438
1527
 
1439
1528
  def check_sla():
1440
1529
  repo_pipelines = set(
@@ -1732,6 +1821,13 @@ def schedule_all():
1732
1821
  logger.exception(f'Failed to schedule {r}')
1733
1822
  traceback.print_exc()
1734
1823
  continue
1824
+
1825
+ try:
1826
+ schedule_generic_jobs()
1827
+ except Exception as e:
1828
+ logger.exception(f'Failed to schedule generic jobs {e}')
1829
+ traceback.print_exc()
1830
+
1735
1831
  job_manager = get_job_manager()
1736
1832
  if job_manager is None:
1737
1833
  logger.info('Job manager is None.')
@@ -1912,3 +2008,52 @@ def sync_schedules(pipeline_uuids: List[str]):
1912
2008
  trigger_configs.append(pipeline_trigger)
1913
2009
 
1914
2010
  PipelineSchedule.create_or_update_batch(trigger_configs)
2011
+
2012
+
2013
+ def schedule_generic_jobs():
2014
+ """
2015
+ Schedule generic jobs that are in INITIAL status.
2016
+
2017
+ This method schedules jobs like cancel_pipeline_run that have been enqueued
2018
+ by the server process and are waiting to be executed by the job manager.
2019
+ """
2020
+ try:
2021
+ # Get all jobs with INITIAL status across all repositories
2022
+ jobs = GenericJob.get_jobs_with_initial_status()
2023
+ job_manager = get_job_manager()
2024
+
2025
+ if not jobs:
2026
+ return
2027
+
2028
+ logger.info(f'Scheduling {len(jobs)} generic jobs')
2029
+
2030
+ # Schedule each job with the job manager
2031
+ for job in jobs:
2032
+ try:
2033
+ if job.job_type == GenericJob.JobType.CANCEL_PIPELINE_RUN:
2034
+ pipeline_run_id = job.payload.get('pipeline_run_id')
2035
+ cancelled_block_run_ids = job.payload.get('cancelled_block_run_ids')
2036
+ try:
2037
+ kill_cancelled_runs(pipeline_run_id, cancelled_block_run_ids)
2038
+ except Exception:
2039
+ pass
2040
+ # Add job to job manager for execution
2041
+ job_manager.add_job(
2042
+ JobType.GENERIC_JOB,
2043
+ job.id,
2044
+ on_pipeline_run_cancelled,
2045
+ # args
2046
+ job.id,
2047
+ pipeline_run_id,
2048
+ cancelled_block_run_ids,
2049
+ )
2050
+ job.mark_queued()
2051
+ logger.info(f'Scheduled generic job {job.id} ({job.job_type})')
2052
+ else:
2053
+ raise ValueError(f"Unknown job type: {job.job_type}")
2054
+ except Exception as e:
2055
+ error_message = f"Failed to schedule generic job: {str(e)}"
2056
+ logger.exception(f'Failed to schedule generic job {job.id}: {error_message}')
2057
+ job.mark_failed({'error': error_message})
2058
+ except Exception as e:
2059
+ logger.exception(f'Failed to schedule generic jobs: {e}')
@@ -1,8 +1,11 @@
1
+ import traceback
1
2
  from dataclasses import dataclass
2
3
 
3
4
  from mage_ai.shared.config import BaseConfig
4
5
  from mage_ai.shared.enum import StrEnum
5
6
 
7
+ DEFAULT_CONCURRENCY = 20
8
+
6
9
 
7
10
  class QueueType(StrEnum):
8
11
  CELERY = 'celery'
@@ -17,5 +20,12 @@ class ProcessQueueConfig(BaseConfig):
17
20
  @dataclass
18
21
  class QueueConfig(BaseConfig):
19
22
  queue_type: QueueType = QueueType.PROCESS
20
- concurrency: int = 20
23
+ concurrency: int = DEFAULT_CONCURRENCY
21
24
  process_queue_config: ProcessQueueConfig = None
25
+
26
+ def __post_init__(self):
27
+ try:
28
+ self.concurrency = int(self.concurrency)
29
+ except Exception:
30
+ traceback.print_exc()
31
+ self.concurrency = DEFAULT_CONCURRENCY
@@ -19,6 +19,7 @@ from mage_ai.settings import (
19
19
  HOSTNAME,
20
20
  REDIS_URL,
21
21
  SENTRY_DSN,
22
+ SENTRY_SERVER_NAME,
22
23
  SENTRY_TRACES_SAMPLE_RATE,
23
24
  SERVER_LOGGING_FORMAT,
24
25
  SERVER_VERBOSITY,
@@ -287,6 +288,7 @@ class Worker(mp.Process):
287
288
  sentry_sdk.init(
288
289
  self.dsn,
289
290
  traces_sample_rate=SENTRY_TRACES_SAMPLE_RATE,
291
+ server_name=SENTRY_SERVER_NAME,
290
292
  )
291
293
  initialize_new_relic()
292
294
 
@@ -1,12 +1,15 @@
1
1
  import asyncio
2
2
  import json
3
+ import re
3
4
  import traceback
5
+ import urllib.parse
4
6
 
5
7
  import dateutil.parser
6
8
  import simplejson
7
9
  import tornado.web
8
10
 
9
11
  from mage_ai.api.middleware import OAuthMiddleware
12
+ from mage_ai.server.api.constants import PATH_TRAVERSAL_PATTERN
10
13
  from mage_ai.shared.parsers import encode_complex
11
14
  from mage_ai.shared.strings import camel_to_snake_case
12
15
  from mage_ai.usage_statistics.constants import EventNameType
@@ -127,11 +130,49 @@ class BaseApiHandler(BaseHandler, OAuthMiddleware):
127
130
  super().initialize(**kwargs)
128
131
  self.is_health_check = kwargs.get('is_health_check', False)
129
132
 
133
+ def is_safe_path(self, user_input):
134
+ """
135
+ Check if the user input is safe and doesn't contain path traversal sequences.
136
+ """
137
+ return re.match(PATH_TRAVERSAL_PATTERN, user_input) is not None
138
+
130
139
  def prepare(self):
131
140
  from mage_ai.server.server import latest_user_activity
132
141
 
133
142
  if not self.is_health_check:
134
143
  latest_user_activity.update_latest_activity()
144
+
145
+ # Validate the request path by decoding from bytes to string if necessary
146
+ decoded_path = self.request.path
147
+ if isinstance(decoded_path, bytes):
148
+ decoded_path = decoded_path.decode('utf-8') # Decode only if it's a bytes object
149
+
150
+ decoded_path = urllib.parse.unquote(decoded_path) # Decode URL-encoded characters
151
+ if not self.is_safe_path(decoded_path):
152
+ self.set_status(400) # Bad Request
153
+ self.write("Error: Invalid path (path traversal detected)")
154
+ self.finish()
155
+ return
156
+
157
+ # Validate query parameters (if any)
158
+ for key, value in self.request.arguments.items():
159
+ # Decode each key and value from bytes to string if necessary
160
+ decoded_key = key
161
+ if isinstance(decoded_key, bytes):
162
+ decoded_key = decoded_key.decode('utf-8')
163
+
164
+ decoded_value = value[0]
165
+ if isinstance(decoded_value, bytes):
166
+ decoded_value = decoded_value.decode('utf-8')
167
+
168
+ decoded_value = urllib.parse.unquote(decoded_value) # Decode URL-encoded characters
169
+ if not self.is_safe_path(decoded_value):
170
+ self.set_status(400)
171
+ self.write(
172
+ f"Error: Invalid parameter value for '{decoded_key}' (path traversal detected)"
173
+ )
174
+ self.finish()
175
+ return
135
176
  super().prepare()
136
177
 
137
178
 
@@ -3,4 +3,5 @@ ENDPOINTS_BYPASS_OAUTH_CHECK = []
3
3
  HEADER_API_KEY = 'X-API-KEY'
4
4
  HEADER_OAUTH_TOKEN = 'OAUTH-TOKEN'
5
5
  OAUTH_CLIENT_ID_KEY = 'OAUTH_CLIENT_ID'
6
+ PATH_TRAVERSAL_PATTERN = r"^(?!.*(\.\.\/|\.\.\\)).*$"
6
7
  URL_PARAMETER_API_KEY = 'api_key'
@@ -12,4 +12,4 @@ DATAFRAME_OUTPUT_SAMPLE_COUNT = 10
12
12
  # Dockerfile depends on it because it runs ./scripts/install_mage.sh and uses
13
13
  # the last line to determine the version to install.
14
14
  VERSION = \
15
- '0.9.76'
15
+ '0.9.77'
@@ -12,6 +12,7 @@ from mage_ai.server.logger import Logger
12
12
  from mage_ai.services.newrelic import initialize_new_relic
13
13
  from mage_ai.settings import (
14
14
  SENTRY_DSN,
15
+ SENTRY_SERVER_NAME,
15
16
  SENTRY_TRACES_SAMPLE_RATE,
16
17
  SERVER_LOGGING_FORMAT,
17
18
  SERVER_VERBOSITY,
@@ -35,6 +36,7 @@ def run_scheduler():
35
36
  sentry_sdk.init(
36
37
  sentry_dsn,
37
38
  traces_sample_rate=SENTRY_TRACES_SAMPLE_RATE,
39
+ server_name=SENTRY_SERVER_NAME,
38
40
  )
39
41
  (enable_new_relic, application) = initialize_new_relic()
40
42
  try:
@@ -126,6 +126,9 @@ class TerminalWebsocketServer(terminado.TermSocket):
126
126
  Entity.PROJECT,
127
127
  get_project_uuid(),
128
128
  )
129
+ else:
130
+ # If the user is deleted, set valid to False
131
+ valid = False
129
132
  if not valid or is_disable_pipeline_edit_access():
130
133
  return self.send_json_message(
131
134
  ['stdout', f'{command[1]}\nUnauthorized access to the terminal.'])
@@ -154,6 +154,7 @@ USE_UNIQUE_TERMINAL = os.getenv('USE_UNIQUE_TERMINAL', None)
154
154
  # Sentry Configuration
155
155
  SENTRY_DSN = os.getenv('SENTRY_DSN')
156
156
  SENTRY_TRACES_SAMPLE_RATE = os.getenv('SENTRY_TRACES_SAMPLE_RATE', 1.0)
157
+ SENTRY_SERVER_NAME = os.getenv('SENTRY_SERVER_NAME', None)
157
158
 
158
159
  # New Relic Configuration
159
160
  ENABLE_NEW_RELIC = os.getenv('ENABLE_NEW_RELIC', False)
@@ -265,6 +266,7 @@ MAGE_SETTINGS_ENVIRONMENT_VARIABLES = [
265
266
  'USE_UNIQUE_TERMINAL',
266
267
  'SENTRY_DSN',
267
268
  'SENTRY_TRACES_SAMPLE_RATE',
269
+ 'SENTRY_SERVER_NAME',
268
270
  'MAGE_PUBLIC_HOST',
269
271
  'SCHEDULER_TRIGGER_INTERVAL',
270
272
  'REQUIRE_USER_PERMISSIONS',
@@ -91,7 +91,7 @@ class KafkaSource(BaseSource):
91
91
  api_version=self.config.api_version,
92
92
  auto_offset_reset=self.config.auto_offset_reset,
93
93
  max_partition_fetch_bytes=self.config.max_partition_fetch_bytes,
94
- enable_auto_commit=True,
94
+ enable_auto_commit=False,
95
95
  )
96
96
  if self.config.security_protocol == SecurityProtocol.SSL:
97
97
  consumer_kwargs['security_protocol'] = SecurityProtocol.SSL
@@ -270,6 +270,7 @@ class KafkaSource(BaseSource):
270
270
  message_values.append(message)
271
271
  if len(message_values) > 0:
272
272
  handler(message_values)
273
+ self.consumer.commit()
273
274
 
274
275
  def test_connection(self):
275
276
  self.consumer._client.check_version(timeout=5)
@@ -98,7 +98,7 @@ class BlockExecutorTest(BaseApiTestCase):
98
98
  self.block_executor._execute_conditional = MagicMock(return_value=True)
99
99
  self.block_executor._execute = MagicMock(return_value={'result': 'success'})
100
100
  # self.block.run_tests = MagicMock()
101
- self.block_executor._execute_callback = MagicMock()
101
+ self.block_executor.execute_callback = MagicMock()
102
102
 
103
103
  result = self.block_executor.execute(
104
104
  analyze_outputs=analyze_outputs,
@@ -167,7 +167,7 @@ class BlockExecutorTest(BaseApiTestCase):
167
167
  # update_tests=False,
168
168
  # dynamic_block_uuid=dynamic_block_uuid,
169
169
  # )
170
- self.block_executor._execute_callback.assert_called_with(
170
+ self.block_executor.execute_callback.assert_called_with(
171
171
  'on_success',
172
172
  block_run_id=None,
173
173
  callback_kwargs=dict(retry=dict(attempts=1)),
@@ -245,7 +245,7 @@ class BlockExecutorTest(BaseApiTestCase):
245
245
  self.block.callback_blocks = [MagicMock(), MagicMock()]
246
246
  self.block.callback_block = MagicMock()
247
247
 
248
- self.block_executor._execute_callback(
248
+ self.block_executor.execute_callback(
249
249
  callback='on_success',
250
250
  global_vars=dict(retry=dict(attempts=1)),
251
251
  logging_tags={},
@@ -180,6 +180,7 @@ class VariableTest(DBTestCase):
180
180
  [1, 'test'],
181
181
  [2, 'test2'],
182
182
  ],
183
+ orient="row",
183
184
  schema=['col1', 'col2'],
184
185
  )
185
186
  df2 = pl.DataFrame(
@@ -187,6 +188,7 @@ class VariableTest(DBTestCase):
187
188
  [1, 'test', 3.123, 41414123123124],
188
189
  [2, 'test2', 4.321, 12111111],
189
190
  ],
191
+ orient="row",
190
192
  schema=['col1', 'col2', 'col3', 'col4'],
191
193
  )
192
194
  df2 = df2.cast({'col4': pl.Int64})
@@ -33,14 +33,14 @@ class TestTablePostgres(DBTestCase):
33
33
  test_cases = [
34
34
  [],
35
35
  [123],
36
- [['abc', 'def']],
36
+ [['àabc', 'deèéf']],
37
37
  [['08:00', '12:00'], ['15:00', '20:00']],
38
38
  [['08:00', '12:00'], []],
39
39
  ]
40
40
  expected = [
41
41
  '{}',
42
42
  '{123}',
43
- '{{"abc", "def"}}',
43
+ '{{"àabc", "deèéf"}}',
44
44
  '{{"08:00", "12:00"}, {"15:00", "20:00"}}',
45
45
  '{{"08:00", "12:00"}, {}}',
46
46
  ]
@@ -49,6 +49,7 @@ class TestTablePostgres(DBTestCase):
49
49
  simplejson.dumps(
50
50
  val,
51
51
  default=encode_complex,
52
+ ensure_ascii=False,
52
53
  ignore_nan=True,
53
54
  )
54
55
  )
@@ -112,9 +112,11 @@ class NotificationSenderTests(DBTestCase):
112
112
  f'`{pipeline_run.pipeline_schedule.name}` '
113
113
  f'at execution time `{pipeline_run.execution_date}`. Error: None'
114
114
  )
115
+ title = 'Failed to run Mage pipeline test_pipeline'
115
116
  mock_send_teams_message.assert_called_once_with(
116
117
  notification_config.teams_config,
117
118
  message,
119
+ title
118
120
  )
119
121
 
120
122
  @patch('mage_ai.orchestration.notification.sender.send_teams_message')
@@ -131,9 +133,11 @@ class NotificationSenderTests(DBTestCase):
131
133
  f'`{pipeline_run.pipeline_schedule.name}` '
132
134
  f'at execution time `{pipeline_run.execution_date}`.'
133
135
  )
136
+ title = 'Successfully ran Pipeline test_pipeline'
134
137
  mock_send_teams_message.assert_called_once_with(
135
138
  notification_config.teams_config,
136
139
  message,
140
+ title
137
141
  )
138
142
 
139
143
  @patch('mage_ai.orchestration.notification.sender.send_teams_message')
@@ -162,4 +166,4 @@ class NotificationSenderTests(DBTestCase):
162
166
  notification_config.opsgenie_config,
163
167
  message=ANY,
164
168
  description=ANY,
165
- )
169
+ )
@@ -33,7 +33,7 @@ class KafkaTests(TestCase):
33
33
  api_version='0.10.2',
34
34
  auto_offset_reset='latest',
35
35
  max_partition_fetch_bytes=1048576,
36
- enable_auto_commit=True,
36
+ enable_auto_commit=False,
37
37
  )
38
38
 
39
39
  def test_init_client_with_topics(self):
@@ -56,7 +56,7 @@ class KafkaTests(TestCase):
56
56
  api_version='0.10.2',
57
57
  auto_offset_reset='latest',
58
58
  max_partition_fetch_bytes=1048576,
59
- enable_auto_commit=True,
59
+ enable_auto_commit=False,
60
60
  )
61
61
 
62
62
  def test_init_client_with_missing_topic_or_topics(self):