tickflow 0.1.0.dev0__py3-none-any.whl → 0.1.0.dev1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,180 @@
1
+ """Instrument resources for TickFlow API."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import TYPE_CHECKING, List, Optional, Union, overload
6
+
7
+ from ._base import AsyncResource, SyncResource
8
+
9
+ if TYPE_CHECKING:
10
+ from ..generated_model import Instrument, InstrumentType
11
+
12
+
13
+ class Instruments(SyncResource):
14
+ """Synchronous interface for instrument endpoints.
15
+
16
+ Examples
17
+ --------
18
+ >>> client = TickFlow(api_key="your-key")
19
+ >>> inst = client.instruments.get("600000.SH")
20
+ >>> print(f"{inst['symbol']}: {inst['name']}")
21
+ """
22
+
23
+ @overload
24
+ def get(self, symbol: str) -> "Instrument": ...
25
+
26
+ @overload
27
+ def get(self, symbol: List[str]) -> List["Instrument"]: ...
28
+
29
+ def get(
30
+ self, symbol: Union[str, List[str]]
31
+ ) -> Union["Instrument", List["Instrument"]]:
32
+ """Get metadata for one or more instruments.
33
+
34
+ Parameters
35
+ ----------
36
+ symbol : str or list of str
37
+ Instrument code(s). Can be a single symbol string or a list of symbols.
38
+
39
+ Returns
40
+ -------
41
+ Instrument or list of Instrument
42
+ If a single symbol is provided, returns a single Instrument dict.
43
+ If a list is provided, returns a list of Instrument dicts.
44
+
45
+ Each Instrument contains:
46
+ - symbol: Full symbol code (e.g., "600000.SH")
47
+ - code: Exchange-specific code (e.g., "600000")
48
+ - exchange: Exchange code (e.g., "SH")
49
+ - region: Region code (e.g., "CN")
50
+ - name: Instrument name
51
+ - instrument_type: Type (stock, etf, index, etc.)
52
+ - ext: Market-specific extension data
53
+
54
+ Examples
55
+ --------
56
+ >>> # Single instrument
57
+ >>> inst = client.instruments.get("600000.SH")
58
+ >>> print(inst['name'])
59
+
60
+ >>> # Multiple instruments
61
+ >>> insts = client.instruments.get(["600000.SH", "AAPL.US"])
62
+ >>> for i in insts:
63
+ ... print(f"{i['symbol']}: {i['name']}")
64
+ """
65
+ if isinstance(symbol, str):
66
+ response = self._client.get("/v1/instruments", params={"symbols": symbol})
67
+ data = response["data"]
68
+ return data[0] if data else {}
69
+ else:
70
+ # Use POST for batch queries
71
+ response = self._client.post("/v1/instruments", json={"symbols": symbol})
72
+ return response["data"]
73
+
74
+ def batch(self, symbols: List[str]) -> List["Instrument"]:
75
+ """Get metadata for multiple instruments.
76
+
77
+ This method uses POST to handle large batches without URL length limits.
78
+
79
+ Parameters
80
+ ----------
81
+ symbols : list of str
82
+ List of symbol codes (up to 1000).
83
+
84
+ Returns
85
+ -------
86
+ list of Instrument
87
+ List of instrument metadata dicts.
88
+
89
+ Examples
90
+ --------
91
+ >>> insts = client.instruments.batch(["600000.SH", "000001.SZ", "AAPL.US"])
92
+ >>> for i in insts:
93
+ ... print(f"{i['symbol']}: {i['name']}")
94
+ """
95
+ response = self._client.post("/v1/instruments", json={"symbols": symbols})
96
+ return response["data"]
97
+
98
+
99
+ class AsyncInstruments(AsyncResource):
100
+ """Asynchronous interface for instrument endpoints.
101
+
102
+ Examples
103
+ --------
104
+ >>> async with AsyncTickFlow(api_key="your-key") as client:
105
+ ... inst = await client.instruments.get("600000.SH")
106
+ """
107
+
108
+ @overload
109
+ async def get(self, symbol: str) -> "Instrument": ...
110
+
111
+ @overload
112
+ async def get(self, symbol: List[str]) -> List["Instrument"]: ...
113
+
114
+ async def get(
115
+ self, symbol: Union[str, List[str]]
116
+ ) -> Union["Instrument", List["Instrument"]]:
117
+ """Get metadata for one or more instruments.
118
+
119
+ Parameters
120
+ ----------
121
+ symbol : str or list of str
122
+ Instrument code(s). Can be a single symbol string or a list of symbols.
123
+
124
+ Returns
125
+ -------
126
+ Instrument or list of Instrument
127
+ If a single symbol is provided, returns a single Instrument dict.
128
+ If a list is provided, returns a list of Instrument dicts.
129
+
130
+ Each Instrument contains:
131
+ - symbol: Full symbol code (e.g., "600000.SH")
132
+ - code: Exchange-specific code (e.g., "600000")
133
+ - exchange: Exchange code (e.g., "SH")
134
+ - region: Region code (e.g., "CN")
135
+ - name: Instrument name
136
+ - instrument_type: Type (stock, etf, index, etc.)
137
+ - ext: Market-specific extension data
138
+
139
+ Examples
140
+ --------
141
+ >>> # Single instrument
142
+ >>> inst = await client.instruments.get("600000.SH")
143
+ >>> print(inst['name'])
144
+
145
+ >>> # Multiple instruments
146
+ >>> insts = await client.instruments.get(["600000.SH", "AAPL.US"])
147
+ """
148
+ if isinstance(symbol, str):
149
+ response = await self._client.get(
150
+ "/v1/instruments", params={"symbols": symbol}
151
+ )
152
+ data = response["data"]
153
+ return data[0] if data else {}
154
+ else:
155
+ response = await self._client.post(
156
+ "/v1/instruments", json={"symbols": symbol}
157
+ )
158
+ return response["data"]
159
+
160
+ async def batch(self, symbols: List[str]) -> List["Instrument"]:
161
+ """Get metadata for multiple instruments.
162
+
163
+ This method uses POST to handle large batches without URL length limits.
164
+
165
+ Parameters
166
+ ----------
167
+ symbols : list of str
168
+ List of symbol codes (up to 1000).
169
+
170
+ Returns
171
+ -------
172
+ list of Instrument
173
+ List of instrument metadata dicts.
174
+
175
+ Examples
176
+ --------
177
+ >>> insts = await client.instruments.batch(["600000.SH", "000001.SZ", "AAPL.US"])
178
+ """
179
+ response = await self._client.post("/v1/instruments", json={"symbols": symbols})
180
+ return response["data"]
@@ -2,7 +2,19 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
- from typing import TYPE_CHECKING, Dict, List, Literal, Optional, Union, overload
5
+ import asyncio
6
+ import concurrent.futures
7
+ from typing import (
8
+ TYPE_CHECKING,
9
+ Any,
10
+ Dict,
11
+ List,
12
+ Literal,
13
+ Optional,
14
+ Tuple,
15
+ Union,
16
+ overload,
17
+ )
6
18
 
7
19
  from .._types import NOT_GIVEN, NotGiven
8
20
  from ._base import AsyncResource, SyncResource
@@ -12,6 +24,9 @@ if TYPE_CHECKING:
12
24
 
13
25
  from ..generated_model import CompactKlineData, Period
14
26
 
27
+ # Maximum symbols per batch request (API limit)
28
+ MAX_SYMBOLS_PER_BATCH = 100
29
+
15
30
 
16
31
  def _klines_to_dataframe(
17
32
  data: "CompactKlineData", symbol: Optional[str] = None
@@ -86,6 +101,52 @@ def _batch_klines_to_dataframe(data: Dict[str, "CompactKlineData"]) -> "pd.DataF
86
101
  return combined
87
102
 
88
103
 
104
+ def _chunk_list(lst: List[str], chunk_size: int) -> List[List[str]]:
105
+ """Split a list into chunks of specified size.
106
+
107
+ Parameters
108
+ ----------
109
+ lst : list
110
+ The list to chunk.
111
+ chunk_size : int
112
+ Maximum size of each chunk.
113
+
114
+ Returns
115
+ -------
116
+ list of list
117
+ List of chunks.
118
+ """
119
+ return [lst[i : i + chunk_size] for i in range(0, len(lst), chunk_size)]
120
+
121
+
122
+ def _get_progress_bar(total: int, desc: str, show_progress: bool):
123
+ """Get a progress bar iterator or a simple range.
124
+
125
+ Parameters
126
+ ----------
127
+ total : int
128
+ Total number of items.
129
+ desc : str
130
+ Description for the progress bar.
131
+ show_progress : bool
132
+ Whether to show the progress bar.
133
+
134
+ Returns
135
+ -------
136
+ iterator
137
+ tqdm progress bar or range.
138
+ """
139
+ if show_progress:
140
+ try:
141
+ from tqdm.auto import tqdm
142
+
143
+ return tqdm(total=total, desc=desc, leave=False)
144
+ except ImportError:
145
+ # tqdm not installed, fall back to no progress bar
146
+ pass
147
+ return None
148
+
149
+
89
150
  class Klines(SyncResource):
90
151
  """Synchronous interface for K-line (OHLCV) data endpoints.
91
152
 
@@ -176,7 +237,7 @@ class Klines(SyncResource):
176
237
  >>> # Calculate 20-day moving average
177
238
  >>> df["ma20"] = df["close"].rolling(20).mean()
178
239
  """
179
- params = {"symbol": symbol}
240
+ params: Dict[str, Any] = {"symbol": symbol}
180
241
  if not isinstance(period, NotGiven):
181
242
  params["period"] = period
182
243
  if not isinstance(count, NotGiven):
@@ -193,6 +254,28 @@ class Klines(SyncResource):
193
254
  return _klines_to_dataframe(data, symbol=symbol)
194
255
  return data
195
256
 
257
+ def _fetch_batch_chunk(
258
+ self,
259
+ symbols: List[str],
260
+ params: Dict[str, Any],
261
+ ) -> Tuple[Dict[str, "CompactKlineData"], List[Tuple[str, Exception]]]:
262
+ """Fetch a single batch chunk.
263
+
264
+ Returns
265
+ -------
266
+ tuple
267
+ (data dict, list of (symbol, error) for failed symbols)
268
+ """
269
+ symbols_str = ",".join(symbols)
270
+ chunk_params = {**params, "symbols": symbols_str}
271
+
272
+ try:
273
+ response = self._client.get("/v1/klines/batch", params=chunk_params)
274
+ return response["data"], []
275
+ except Exception as e:
276
+ # Return empty data and record the error for all symbols in this chunk
277
+ return {}, [(s, e) for s in symbols]
278
+
196
279
  @overload
197
280
  def batch(
198
281
  self,
@@ -203,6 +286,8 @@ class Klines(SyncResource):
203
286
  start_time: Union[int, None, NotGiven] = NOT_GIVEN,
204
287
  end_time: Union[int, None, NotGiven] = NOT_GIVEN,
205
288
  as_dataframe: Literal[False] = False,
289
+ show_progress: bool = False,
290
+ max_workers: int = 5,
206
291
  ) -> Dict[str, "CompactKlineData"]: ...
207
292
 
208
293
  @overload
@@ -215,6 +300,8 @@ class Klines(SyncResource):
215
300
  start_time: Union[int, None, NotGiven] = NOT_GIVEN,
216
301
  end_time: Union[int, None, NotGiven] = NOT_GIVEN,
217
302
  as_dataframe: Literal[True],
303
+ show_progress: bool = False,
304
+ max_workers: int = 5,
218
305
  ) -> "pd.DataFrame": ...
219
306
 
220
307
  def batch(
@@ -226,13 +313,19 @@ class Klines(SyncResource):
226
313
  start_time: Union[int, None, NotGiven] = NOT_GIVEN,
227
314
  end_time: Union[int, None, NotGiven] = NOT_GIVEN,
228
315
  as_dataframe: bool = False,
316
+ show_progress: bool = False,
317
+ max_workers: int = 5,
229
318
  ) -> Union[Dict[str, "CompactKlineData"], "pd.DataFrame"]:
230
- """Get K-line data for multiple symbols in a single request.
319
+ """Get K-line data for multiple symbols in batched concurrent requests.
320
+
321
+ This method automatically handles the API limit of 100 symbols per request
322
+ by splitting into chunks and fetching concurrently. Failed chunks don't
323
+ affect other chunks - partial results are returned.
231
324
 
232
325
  Parameters
233
326
  ----------
234
327
  symbols : list of str
235
- List of symbol codes.
328
+ List of symbol codes. Can exceed 100 - will be automatically chunked.
236
329
  period : str, optional
237
330
  K-line period. Defaults to "1d".
238
331
  count : int, optional
@@ -244,6 +337,10 @@ class Klines(SyncResource):
244
337
  as_dataframe : bool, optional
245
338
  If True, return a combined pandas DataFrame with MultiIndex.
246
339
  If False (default), return a dict mapping symbols to K-line data.
340
+ show_progress : bool, optional
341
+ If True, display a progress bar (requires tqdm). Default False.
342
+ max_workers : int, optional
343
+ Maximum number of concurrent requests. Default 5.
247
344
 
248
345
  Returns
249
346
  -------
@@ -251,19 +348,25 @@ class Klines(SyncResource):
251
348
  If as_dataframe=False: dict mapping symbol codes to CompactKlineData.
252
349
  If as_dataframe=True: pandas DataFrame with MultiIndex (symbol, timestamp).
253
350
 
351
+ Notes
352
+ -----
353
+ - The API limits batch requests to 100 symbols. This method automatically
354
+ splits larger requests into chunks.
355
+ - Failed chunks are logged but don't cause the entire request to fail.
356
+ - Use `show_progress=True` for large requests to see progress.
357
+
254
358
  Examples
255
359
  --------
256
- >>> # Get raw data for multiple symbols
257
- >>> data = client.klines.batch(["600000.SH", "000001.SZ"])
258
- >>> for symbol, klines in data.items():
259
- ... print(f"{symbol}: {len(klines['timestamp'])} bars")
260
-
261
- >>> # Get as combined DataFrame
262
- >>> df = client.klines.batch(["600000.SH", "000001.SZ"], as_dataframe=True)
263
- >>> print(df.loc["600000.SH"].tail())
360
+ >>> # Get data for 500 symbols with progress bar
361
+ >>> symbols = client.exchanges.get_symbols("SH")[:500]
362
+ >>> df = client.klines.batch(symbols, as_dataframe=True, show_progress=True)
363
+ >>> print(f"Got data for {len(df.index.get_level_values('symbol').unique())} symbols")
264
364
  """
265
- symbols_str = ",".join(symbols)
266
- params = {"symbols": symbols_str}
365
+ if not symbols:
366
+ return {} if not as_dataframe else _batch_klines_to_dataframe({})
367
+
368
+ # Build base params
369
+ params: Dict[str, Any] = {}
267
370
  if not isinstance(period, NotGiven):
268
371
  params["period"] = period
269
372
  if not isinstance(count, NotGiven):
@@ -273,12 +376,56 @@ class Klines(SyncResource):
273
376
  if not isinstance(end_time, NotGiven) and end_time is not None:
274
377
  params["end_time"] = end_time
275
378
 
276
- response = self._client.get("/v1/klines/batch", params=params)
277
- data = response["data"]
379
+ # Split symbols into chunks
380
+ chunks = _chunk_list(symbols, MAX_SYMBOLS_PER_BATCH)
381
+
382
+ # If only one chunk, no need for concurrency
383
+ if len(chunks) == 1:
384
+ data, errors = self._fetch_batch_chunk(chunks[0], params)
385
+ if as_dataframe:
386
+ return _batch_klines_to_dataframe(data)
387
+ return data
388
+
389
+ # Setup progress bar
390
+ pbar = _get_progress_bar(len(chunks), "Fetching K-lines", show_progress)
391
+
392
+ # Fetch chunks concurrently
393
+ all_data: Dict[str, "CompactKlineData"] = {}
394
+ all_errors: List[Tuple[str, Exception]] = []
395
+
396
+ try:
397
+ with concurrent.futures.ThreadPoolExecutor(
398
+ max_workers=max_workers
399
+ ) as executor:
400
+ futures = {
401
+ executor.submit(self._fetch_batch_chunk, chunk, params): chunk
402
+ for chunk in chunks
403
+ }
404
+
405
+ for future in concurrent.futures.as_completed(futures):
406
+ try:
407
+ data, errors = future.result()
408
+ all_data.update(data)
409
+ all_errors.extend(errors)
410
+ except Exception as e:
411
+ chunk = futures[future]
412
+ all_errors.extend((s, e) for s in chunk)
413
+
414
+ if pbar:
415
+ pbar.update(1)
416
+ finally:
417
+ if pbar:
418
+ pbar.close()
419
+
420
+ # Log errors if any (could use logging module in production)
421
+ if all_errors:
422
+ error_symbols = [s for s, _ in all_errors]
423
+ # Silently continue - partial results are better than no results
424
+ # Users can check if their symbols are in the result
278
425
 
279
426
  if as_dataframe:
280
- return _batch_klines_to_dataframe(data)
281
- return data
427
+ return _batch_klines_to_dataframe(all_data)
428
+ return all_data
282
429
 
283
430
 
284
431
  class AsyncKlines(AsyncResource):
@@ -361,7 +508,7 @@ class AsyncKlines(AsyncResource):
361
508
  >>> df = await client.klines.get("600000.SH", period="1d", as_dataframe=True)
362
509
  >>> print(df.tail())
363
510
  """
364
- params = {"symbol": symbol}
511
+ params: Dict[str, Any] = {"symbol": symbol}
365
512
  if not isinstance(period, NotGiven):
366
513
  params["period"] = period
367
514
  if not isinstance(count, NotGiven):
@@ -378,6 +525,27 @@ class AsyncKlines(AsyncResource):
378
525
  return _klines_to_dataframe(data, symbol=symbol)
379
526
  return data
380
527
 
528
+ async def _fetch_batch_chunk(
529
+ self,
530
+ symbols: List[str],
531
+ params: Dict[str, Any],
532
+ ) -> Tuple[Dict[str, "CompactKlineData"], List[Tuple[str, Exception]]]:
533
+ """Fetch a single batch chunk asynchronously.
534
+
535
+ Returns
536
+ -------
537
+ tuple
538
+ (data dict, list of (symbol, error) for failed symbols)
539
+ """
540
+ symbols_str = ",".join(symbols)
541
+ chunk_params = {**params, "symbols": symbols_str}
542
+
543
+ try:
544
+ response = await self._client.get("/v1/klines/batch", params=chunk_params)
545
+ return response["data"], []
546
+ except Exception as e:
547
+ return {}, [(s, e) for s in symbols]
548
+
381
549
  @overload
382
550
  async def batch(
383
551
  self,
@@ -388,6 +556,8 @@ class AsyncKlines(AsyncResource):
388
556
  start_time: Union[int, None, NotGiven] = NOT_GIVEN,
389
557
  end_time: Union[int, None, NotGiven] = NOT_GIVEN,
390
558
  as_dataframe: Literal[False] = False,
559
+ show_progress: bool = False,
560
+ max_concurrency: int = 5,
391
561
  ) -> Dict[str, "CompactKlineData"]: ...
392
562
 
393
563
  @overload
@@ -400,6 +570,8 @@ class AsyncKlines(AsyncResource):
400
570
  start_time: Union[int, None, NotGiven] = NOT_GIVEN,
401
571
  end_time: Union[int, None, NotGiven] = NOT_GIVEN,
402
572
  as_dataframe: Literal[True],
573
+ show_progress: bool = False,
574
+ max_concurrency: int = 5,
403
575
  ) -> "pd.DataFrame": ...
404
576
 
405
577
  async def batch(
@@ -411,13 +583,19 @@ class AsyncKlines(AsyncResource):
411
583
  start_time: Union[int, None, NotGiven] = NOT_GIVEN,
412
584
  end_time: Union[int, None, NotGiven] = NOT_GIVEN,
413
585
  as_dataframe: bool = False,
586
+ show_progress: bool = False,
587
+ max_concurrency: int = 5,
414
588
  ) -> Union[Dict[str, "CompactKlineData"], "pd.DataFrame"]:
415
- """Get K-line data for multiple symbols in a single request.
589
+ """Get K-line data for multiple symbols in batched concurrent requests.
590
+
591
+ This method automatically handles the API limit of 100 symbols per request
592
+ by splitting into chunks and fetching concurrently. Failed chunks don't
593
+ affect other chunks - partial results are returned.
416
594
 
417
595
  Parameters
418
596
  ----------
419
597
  symbols : list of str
420
- List of symbol codes.
598
+ List of symbol codes. Can exceed 100 - will be automatically chunked.
421
599
  period : str, optional
422
600
  K-line period. Defaults to "1d".
423
601
  count : int, optional
@@ -429,6 +607,10 @@ class AsyncKlines(AsyncResource):
429
607
  as_dataframe : bool, optional
430
608
  If True, return a combined pandas DataFrame with MultiIndex.
431
609
  If False (default), return a dict mapping symbols to K-line data.
610
+ show_progress : bool, optional
611
+ If True, display a progress bar (requires tqdm). Default False.
612
+ max_concurrency : int, optional
613
+ Maximum number of concurrent requests. Default 5.
432
614
 
433
615
  Returns
434
616
  -------
@@ -436,13 +618,24 @@ class AsyncKlines(AsyncResource):
436
618
  If as_dataframe=False: dict mapping symbol codes to CompactKlineData.
437
619
  If as_dataframe=True: pandas DataFrame with MultiIndex (symbol, timestamp).
438
620
 
621
+ Notes
622
+ -----
623
+ - The API limits batch requests to 100 symbols. This method automatically
624
+ splits larger requests into chunks.
625
+ - Failed chunks are logged but don't cause the entire request to fail.
626
+ - Use `show_progress=True` for large requests to see progress.
627
+
439
628
  Examples
440
629
  --------
441
- >>> df = await client.klines.batch(["600000.SH", "000001.SZ"], as_dataframe=True)
442
- >>> print(df.loc["600000.SH"].tail())
630
+ >>> # Get data for many symbols with progress bar
631
+ >>> symbols = await client.exchanges.get_symbols("SH")
632
+ >>> df = await client.klines.batch(symbols[:500], as_dataframe=True, show_progress=True)
443
633
  """
444
- symbols_str = ",".join(symbols)
445
- params = {"symbols": symbols_str}
634
+ if not symbols:
635
+ return {} if not as_dataframe else _batch_klines_to_dataframe({})
636
+
637
+ # Build base params
638
+ params: Dict[str, Any] = {}
446
639
  if not isinstance(period, NotGiven):
447
640
  params["period"] = period
448
641
  if not isinstance(count, NotGiven):
@@ -452,9 +645,50 @@ class AsyncKlines(AsyncResource):
452
645
  if not isinstance(end_time, NotGiven) and end_time is not None:
453
646
  params["end_time"] = end_time
454
647
 
455
- response = await self._client.get("/v1/klines/batch", params=params)
456
- data = response["data"]
648
+ # Split symbols into chunks
649
+ chunks = _chunk_list(symbols, MAX_SYMBOLS_PER_BATCH)
650
+
651
+ # If only one chunk, no need for semaphore
652
+ if len(chunks) == 1:
653
+ data, errors = await self._fetch_batch_chunk(chunks[0], params)
654
+ if as_dataframe:
655
+ return _batch_klines_to_dataframe(data)
656
+ return data
657
+
658
+ # Setup progress bar
659
+ pbar = _get_progress_bar(len(chunks), "Fetching K-lines", show_progress)
660
+
661
+ # Use semaphore to limit concurrency
662
+ semaphore = asyncio.Semaphore(max_concurrency)
663
+
664
+ async def fetch_with_semaphore(
665
+ chunk: List[str],
666
+ ) -> Tuple[Dict[str, "CompactKlineData"], List[Tuple[str, Exception]]]:
667
+ async with semaphore:
668
+ result = await self._fetch_batch_chunk(chunk, params)
669
+ if pbar:
670
+ pbar.update(1)
671
+ return result
672
+
673
+ # Fetch all chunks concurrently
674
+ all_data: Dict[str, "CompactKlineData"] = {}
675
+ all_errors: List[Tuple[str, Exception]] = []
676
+
677
+ try:
678
+ tasks = [fetch_with_semaphore(chunk) for chunk in chunks]
679
+ results = await asyncio.gather(*tasks, return_exceptions=True)
680
+
681
+ for i, result in enumerate(results):
682
+ if isinstance(result, Exception):
683
+ all_errors.extend((s, result) for s in chunks[i])
684
+ else:
685
+ data, errors = result
686
+ all_data.update(data)
687
+ all_errors.extend(errors)
688
+ finally:
689
+ if pbar:
690
+ pbar.close()
457
691
 
458
692
  if as_dataframe:
459
- return _batch_klines_to_dataframe(data)
460
- return data
693
+ return _batch_klines_to_dataframe(all_data)
694
+ return all_data