pybgpkitstream 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,65 @@
1
+ Metadata-Version: 2.3
2
+ Name: pybgpkitstream
3
+ Version: 0.1.0
4
+ Summary: Drop-in replacement for PyBGPStream using BGPKIT
5
+ Author: JustinLoye
6
+ Author-email: JustinLoye <jloye@iij.ad.jp>
7
+ Requires-Dist: aiohttp>=3.12.15
8
+ Requires-Dist: pybgpkit>=0.6.2
9
+ Requires-Dist: pydantic>=2.11.9
10
+ Requires-Python: >=3.10
11
+ Description-Content-Type: text/markdown
12
+
13
+ # PyBGPKITStream
14
+
15
+ A drop-in replacement for PyBGPStream using BGPKIT
16
+
17
+ ## Features
18
+
19
+ - Effortlessly switch to another BGP stream:
20
+ - Designed to be a seamless, drop-in replacement ([example](tests/test_stream.py#L38))
21
+ - Lazy message generation: generates time-ordered BGP messages on the fly, consuming minimal memory and making it suitable for large datasets
22
+ - Supports multiple route collectors
23
+ - Supports both ribs and updates
24
+ - Caching with concurrent downloading is enabled, and the caching mechanism is fully compatible with the BGPKIT parser's caching functionality
25
+ - [Good-enough performances](examples/perf.ipynb)
26
+ - A CLI tool
27
+
28
+ ## Quick start
29
+
30
+ ```python
31
+ import datetime
32
+ from pybgpkitstream import BGPStreamConfig, BGPKITStream
33
+
34
+ config = BGPStreamConfig(
35
+ start_time=datetime.datetime(2010, 9, 1, 0, 0),
36
+ end_time=datetime.datetime(2010, 9, 1, 1, 59),
37
+ collectors=["route-views.sydney", "route-views.wide"],
38
+ data_types=["ribs", "updates"],
39
+ )
40
+
41
+ stream = BGPKITStream.from_config(config)
42
+
43
+ n_elems = 0
44
+ for _ in stream:
45
+ n_elems += 1
46
+
47
+ print(f"Processed {n_elems} BGP elements")
48
+ ```
49
+
50
+ ## Motivation
51
+
52
+ PyBGPStream is great but the implementation is complex and stops working when UC San Diego experiences a power outage.
53
+ BGPKIT broker and parser are great, but cannot be used to create an ordered stream of BGP messages from multiple collectors and multiple data types.
54
+
55
+ ## Missing features
56
+
57
+ - live mode
58
+ - `pybgpkitstream.BGPElement` is not fully compatible with `pybgpstream.BGPElem`: missing record_type (BGPKIT limitation), project (BGPKIT limitation), router (could be improved), router_ip (could be improved)
59
+ - CLI output is not yet compatible with `bgpdump -m` or `bgpreader` (right now a similar-looking output is produced)
60
+
61
+ ## Issues
62
+
63
+ - Program will crash when working with many update files per collector (~ more than few hours of data), only when caching is disabled. This might be caused by [BGPKIT parser not being lazy](https://github.com/bgpkit/bgpkit-parser/pull/239). See [details and workaround fix](examples/many_updates.ipynb)
64
+ - Filters are designed with BGPKIT in mind, and can slightly differ to pybgpstream. See [this file](tests/pybgpstream_utils.py) for a conversion to PyBGPStream filter. Note that for now the filters have not been heavily tested...
65
+ - ... just like the rest of the project. Use at your own risk. The only tests I did are in /tests
@@ -0,0 +1,53 @@
1
+ # PyBGPKITStream
2
+
3
+ A drop-in replacement for PyBGPStream using BGPKIT
4
+
5
+ ## Features
6
+
7
+ - Effortlessly switch to another BGP stream:
8
+ - Designed to be a seamless, drop-in replacement ([example](tests/test_stream.py#L38))
9
+ - Lazy message generation: generates time-ordered BGP messages on the fly, consuming minimal memory and making it suitable for large datasets
10
+ - Supports multiple route collectors
11
+ - Supports both ribs and updates
12
+ - Caching with concurrent downloading is enabled, and the caching mechanism is fully compatible with the BGPKIT parser's caching functionality
13
+ - [Good-enough performances](examples/perf.ipynb)
14
+ - A CLI tool
15
+
16
+ ## Quick start
17
+
18
+ ```python
19
+ import datetime
20
+ from pybgpkitstream import BGPStreamConfig, BGPKITStream
21
+
22
+ config = BGPStreamConfig(
23
+ start_time=datetime.datetime(2010, 9, 1, 0, 0),
24
+ end_time=datetime.datetime(2010, 9, 1, 1, 59),
25
+ collectors=["route-views.sydney", "route-views.wide"],
26
+ data_types=["ribs", "updates"],
27
+ )
28
+
29
+ stream = BGPKITStream.from_config(config)
30
+
31
+ n_elems = 0
32
+ for _ in stream:
33
+ n_elems += 1
34
+
35
+ print(f"Processed {n_elems} BGP elements")
36
+ ```
37
+
38
+ ## Motivation
39
+
40
+ PyBGPStream is great but the implementation is complex and stops working when UC San Diego experiences a power outage.
41
+ BGPKIT broker and parser are great, but cannot be used to create an ordered stream of BGP messages from multiple collectors and multiple data types.
42
+
43
+ ## Missing features
44
+
45
+ - live mode
46
+ - `pybgpkitstream.BGPElement` is not fully compatible with `pybgpstream.BGPElem`: missing record_type (BGPKIT limitation), project (BGPKIT limitation), router (could be improved), router_ip (could be improved)
47
+ - CLI output is not yet compatible with `bgpdump -m` or `bgpreader` (right now a similar-looking output is produced)
48
+
49
+ ## Issues
50
+
51
+ - Program will crash when working with many update files per collector (~ more than few hours of data), only when caching is disabled. This might be caused by [BGPKIT parser not being lazy](https://github.com/bgpkit/bgpkit-parser/pull/239). See [details and workaround fix](examples/many_updates.ipynb)
52
+ - Filters are designed with BGPKIT in mind, and can slightly differ to pybgpstream. See [this file](tests/pybgpstream_utils.py) for a conversion to PyBGPStream filter. Note that for now the filters have not been heavily tested...
53
+ - ... just like the rest of the project. Use at your own risk. The only tests I did are in /tests
@@ -0,0 +1,28 @@
1
+ [project]
2
+ name = "pybgpkitstream"
3
+ version = "0.1.0"
4
+ description = "Drop-in replacement for PyBGPStream using BGPKIT"
5
+ readme = "README.md"
6
+ authors = [
7
+ { name = "JustinLoye", email = "jloye@iij.ad.jp" }
8
+ ]
9
+ requires-python = ">=3.10"
10
+ dependencies = [
11
+ "aiohttp>=3.12.15",
12
+ "pybgpkit>=0.6.2",
13
+ "pydantic>=2.11.9",
14
+ ]
15
+
16
+ [build-system]
17
+ requires = ["uv_build>=0.8.12,<0.9.0"]
18
+ build-backend = "uv_build"
19
+
20
+ [dependency-groups]
21
+ test = [
22
+ "jupyter>=1.1.1",
23
+ "pybgpstream>=2.0.2",
24
+ "pytest>=8.4.2",
25
+ ]
26
+
27
+ [project.scripts]
28
+ pybgpkitstream = "pybgpkitstream.cli:main"
@@ -0,0 +1,4 @@
1
+ from .bgpstreamconfig import BGPStreamConfig, FilterOptions
2
+ from .bgpkitstream import BGPKITStream
3
+
4
+ __all__ = ["BGPStreamConfig", "FilterOptions", "BGPKITStream"]
@@ -0,0 +1,44 @@
1
+ from typing import NamedTuple, TypedDict
2
+
3
+
4
+ class ElementFields(TypedDict):
5
+ # need to use annotations because of '-'
6
+ __annotations__ = {
7
+ "next-hop": str,
8
+ "as-path": str,
9
+ "communities": list[str],
10
+ "prefix": str,
11
+ }
12
+
13
+
14
+ class BGPElement(NamedTuple):
15
+ """Compatible with pybgpstream.BGPElem"""
16
+
17
+ type: str
18
+ collector: str
19
+ time: float
20
+ peer_asn: int
21
+ peer_address: str
22
+ fields: ElementFields
23
+
24
+ def __str__(self):
25
+ """Credit to pybgpstream"""
26
+ return "%s|%f|%s|%s|%s|%s|%s|%s|%s|%s|%s" % (
27
+ self.type,
28
+ self.time,
29
+ self.collector,
30
+ self.peer_asn,
31
+ self.peer_address,
32
+ self._maybe_field("prefix"),
33
+ self._maybe_field("next-hop"),
34
+ self._maybe_field("as-path"),
35
+ " ".join(self.fields["communities"])
36
+ if "communities" in self.fields
37
+ else None,
38
+ self._maybe_field("old-state"),
39
+ self._maybe_field("new-state"),
40
+ )
41
+
42
+ def _maybe_field(self, field):
43
+ """Credit to pybgpstream"""
44
+ return self.fields[field] if field in self.fields else None
@@ -0,0 +1,228 @@
1
+ import asyncio
2
+ import os
3
+ import re
4
+ import datetime
5
+ from typing import Iterator, Literal
6
+ from collections import defaultdict
7
+ from itertools import chain
8
+ from heapq import merge
9
+ from operator import itemgetter
10
+ import binascii
11
+ import logging
12
+
13
+ import aiohttp
14
+ import bgpkit
15
+ from bgpkit.bgpkit_broker import BrokerItem
16
+
17
+ from pybgpkitstream.bgpstreamconfig import BGPStreamConfig
18
+ from pybgpkitstream.bgpelement import BGPElement
19
+
20
+
21
+ logger = logging.getLogger(__name__)
22
+
23
+
24
+ def convert_bgpkit_elem(element, is_rib: bool, collector: str) -> BGPElement:
25
+ """Convert pybgpkit element to pybgpstream-like element"""
26
+ return BGPElement(
27
+ type="R" if is_rib else element.elem_type,
28
+ collector=collector,
29
+ time=element.timestamp,
30
+ peer_asn=element.peer_asn,
31
+ peer_address=element.peer_ip,
32
+ fields={
33
+ "next-hop": element.next_hop,
34
+ "as-path": element.as_path,
35
+ "communities": [] if not element.communities else element.communities,
36
+ "prefix": element.prefix,
37
+ },
38
+ )
39
+
40
+
41
+ def crc32(input_str: str):
42
+ input_bytes = input_str.encode("utf-8")
43
+ crc = binascii.crc32(input_bytes) & 0xFFFFFFFF
44
+ return f"{crc:08x}"
45
+
46
+
47
+ class BGPKITStream:
48
+ def __init__(
49
+ self,
50
+ ts_start: datetime.datetime,
51
+ ts_end: datetime.datetime,
52
+ collector_id: str,
53
+ data_type: list[Literal["update", "rib"]],
54
+ cache_dir: str | None,
55
+ filters: dict = {},
56
+ ):
57
+ self.ts_start = ts_start
58
+ self.ts_end = ts_end
59
+ self.collector_id = collector_id
60
+ self.data_type = data_type
61
+ self.cache_dir = cache_dir
62
+ self.filters = filters
63
+
64
+ self.broker = bgpkit.Broker()
65
+
66
+ @staticmethod
67
+ def _generate_cache_filename(url):
68
+ """Generate a cache filename compatible with BGPKIT parser."""
69
+
70
+ hash_suffix = crc32(url)
71
+ print(url)
72
+
73
+ if "updates." in url:
74
+ data_type = "updates"
75
+ elif "rib" in url or "view" in url:
76
+ data_type = "rib"
77
+ else:
78
+ raise ValueError("Could not understand data type from url")
79
+
80
+ # Look for patterns like rib.20100901.0200 or updates.20100831.2345
81
+ timestamp_match = re.search(r"(\d{8})\.(\d{4})", url)
82
+ if timestamp_match:
83
+ timestamp = f"{timestamp_match.group(1)}.{timestamp_match.group(2)}"
84
+ else:
85
+ raise ValueError("Could not parse timestamp from url")
86
+
87
+ if url.endswith(".bz2"):
88
+ compression_ext = "bz2"
89
+ elif url.endswith(".gz"):
90
+ compression_ext = "gz"
91
+ else:
92
+ raise ValueError("Could not parse extension from url")
93
+
94
+ return f"cache-{data_type}.{timestamp}.{hash_suffix}.{compression_ext}"
95
+
96
+ def _set_urls(self):
97
+ """Set archive files URL with bgpkit broker"""
98
+ # Set the urls with bgpkit broker
99
+ self.urls = {"rib": defaultdict(list), "update": defaultdict(list)}
100
+ for data_type in self.data_type:
101
+ items: list[BrokerItem] = self.broker.query(
102
+ ts_start=datetime.datetime.fromtimestamp(self.ts_start)
103
+ - datetime.timedelta(minutes=1),
104
+ ts_end=datetime.datetime.fromtimestamp(self.ts_end),
105
+ collector_id=self.collector_id,
106
+ data_type=data_type,
107
+ )
108
+ for item in items:
109
+ self.urls[data_type][item.collector_id].append(item.url)
110
+
111
+ async def _download_file(self, semaphore, session, url, filepath, data_type, rc):
112
+ """Helper coroutine to download a single file, controlled by a semaphore"""
113
+ async with semaphore:
114
+ logging.debug(f"{filepath} is a cache miss. Downloading {url}")
115
+ try:
116
+ async with session.get(url) as resp:
117
+ resp.raise_for_status()
118
+ with open(filepath, "wb") as fd:
119
+ async for chunk in resp.content.iter_chunked(8192):
120
+ fd.write(chunk)
121
+ return data_type, rc, filepath
122
+ except aiohttp.ClientError as e:
123
+ logging.error(f"Failed to download {url}: {e}")
124
+ # Return None on failure so asyncio.gather doesn't cancel everything.
125
+ return None
126
+
127
+ async def _prefetch_data(self):
128
+ """Download archive files concurrently and cache to `self.cache_dir`"""
129
+ self.paths = {"rib": defaultdict(list), "update": defaultdict(list)}
130
+ tasks = []
131
+
132
+ CONCURRENT_DOWNLOADS = 10
133
+ semaphore = asyncio.Semaphore(CONCURRENT_DOWNLOADS)
134
+
135
+ conn = aiohttp.TCPConnector()
136
+ async with aiohttp.ClientSession(connector=conn) as session:
137
+ # Create all the download tasks.
138
+ for data_type in self.data_type:
139
+ for rc, rc_urls in self.urls[data_type].items():
140
+ for url in rc_urls:
141
+ filename = self._generate_cache_filename(url)
142
+ filepath = os.path.join(self.cache_dir, filename)
143
+
144
+ if os.path.exists(filepath):
145
+ logging.debug(f"{filepath} is a cache hit")
146
+ self.paths[data_type][rc].append(filepath)
147
+ else:
148
+ task = asyncio.create_task(
149
+ self._download_file(
150
+ semaphore, session, url, filepath, data_type, rc
151
+ )
152
+ )
153
+ tasks.append(task)
154
+
155
+ if tasks:
156
+ logging.info(
157
+ f"Starting download of {len(tasks)} files with a concurrency of {CONCURRENT_DOWNLOADS}..."
158
+ )
159
+ results = await asyncio.gather(*tasks)
160
+
161
+ # Process the results, skipping any 'None' values from failed downloads.
162
+ for result in results:
163
+ if result:
164
+ data_type, rc, filepath = result
165
+ self.paths[data_type][rc].append(filepath)
166
+ logging.info("All downloads finished.")
167
+
168
+ def _create_tagged_iterator(self, iterator, is_rib, collector):
169
+ """Creates a generator that tags elements with metadata missing in bgpkit."""
170
+ return ((elem.timestamp, elem, is_rib, collector) for elem in iterator)
171
+
172
+ def __iter__(self) -> Iterator[BGPElement]:
173
+ self._set_urls()
174
+
175
+ if self.cache_dir:
176
+ asyncio.run(self._prefetch_data())
177
+
178
+ # One iterator for each data_type * collector combinations
179
+ # To be merged according to the elements timestamp
180
+ iterators_to_merge = []
181
+
182
+ for data_type in self.data_type:
183
+ is_rib = data_type == "rib"
184
+
185
+ # Get rib or update files per collector
186
+ if self.cache_dir:
187
+ rc_to_urls = self.paths[data_type]
188
+ else:
189
+ rc_to_urls = self.urls[data_type]
190
+
191
+ # Chain rib or update iterators to get one stream per collector / data_type
192
+ for rc, urls in rc_to_urls.items():
193
+ parsers = [bgpkit.Parser(url=url, filters=self.filters) for url in urls]
194
+
195
+ chained_iterator = chain.from_iterable(parsers)
196
+
197
+ # Add metadata lost by bgpkit for compatibility with pubgpstream
198
+ iterators_to_merge.append((chained_iterator, is_rib, rc))
199
+
200
+ # Make a generator to tag each bgpkit element with metadata
201
+ # Benefit 1: full compat with pybgpstream
202
+ # Benefit 2: we give a key easy to access for heapq to merge
203
+ tagged_iterators = [
204
+ self._create_tagged_iterator(it, is_rib, rc)
205
+ for it, is_rib, rc in iterators_to_merge
206
+ ]
207
+
208
+ # Merge and convert to pybgpstream format
209
+ for timestamp, bgpkit_elem, is_rib, rc in merge(
210
+ *tagged_iterators, key=itemgetter(0)
211
+ ):
212
+ if self.ts_start <= timestamp <= self.ts_end:
213
+ yield convert_bgpkit_elem(bgpkit_elem, is_rib, rc)
214
+
215
+ @classmethod
216
+ def from_config(cls, config: BGPStreamConfig):
217
+ return cls(
218
+ ts_start=config.start_time.timestamp(),
219
+ ts_end=config.end_time.timestamp(),
220
+ collector_id=",".join(config.collectors),
221
+ data_type=[
222
+ dtype[:-1] for dtype in config.data_types
223
+ ], # removes plural form
224
+ cache_dir=str(config.cache_dir) if config.cache_dir else None,
225
+ filters=config.filters.model_dump(exclude_unset=True)
226
+ if config.filters
227
+ else {},
228
+ )
@@ -0,0 +1,62 @@
1
+ import datetime
2
+ from pydantic import BaseModel, Field, DirectoryPath
3
+ from typing import Literal
4
+ from ipaddress import IPv4Address, IPv6Address
5
+
6
+
7
+ class FilterOptions(BaseModel):
8
+ """A unified model for the available filter options."""
9
+
10
+ origin_asn: int | None = Field(
11
+ default=None, description="Filter by the origin AS number."
12
+ )
13
+ prefix: str | None = Field(
14
+ default=None, description="Filter by an exact prefix match."
15
+ )
16
+ prefix_super: str | None = Field(
17
+ default=None,
18
+ description="Filter by the exact prefix and its more general super-prefixes.",
19
+ )
20
+ prefix_sub: str | None = Field(
21
+ default=None,
22
+ description="Filter by the exact prefix and its more specific sub-prefixes.",
23
+ )
24
+ prefix_super_sub: str | None = Field(
25
+ default=None,
26
+ description="Filter by the exact prefix and both its super- and sub-prefixes.",
27
+ )
28
+ peer_ip: str | IPv4Address | IPv6Address | None = Field(
29
+ default=None, description="Filter by the IP address of a single BGP peer."
30
+ )
31
+ peer_ips: list[str | IPv4Address | IPv6Address] | None = Field(
32
+ default=None, description="Filter by a list of BGP peer IP addresses."
33
+ )
34
+ peer_asn: str | None = Field(
35
+ default=None, description="Filter by the AS number of the BGP peer."
36
+ )
37
+ update_type: Literal["withdraw", "announce"] | None = Field(
38
+ default=None, description="Filter by the BGP update message type."
39
+ )
40
+ as_path: str | None = Field(
41
+ default=None, description="Filter by a regular expression matching the AS path."
42
+ )
43
+
44
+
45
+ class BGPStreamConfig(BaseModel):
46
+ """
47
+ Unified BGPStream config.
48
+
49
+ Filters are primarily written for BGPKit but utils to convert to pybgpstream are provided in tests/pybgpstream_utils.
50
+ """
51
+
52
+ start_time: datetime.datetime = Field(description="Start of the stream")
53
+ end_time: datetime.datetime = Field(description="End of the stream")
54
+ collectors: list[str] = Field(description="List of collectors to get data from")
55
+ data_types: list[Literal["ribs", "updates"]] = Field(
56
+ description="List of archives files to consider (`ribs` or `updates`)"
57
+ )
58
+ cache_dir: DirectoryPath | None = Field(
59
+ default=None,
60
+ description="Specifies the directory for caching downloaded files.",
61
+ )
62
+ filters: FilterOptions | None = Field(default=None, description="Optional filters")
@@ -0,0 +1,151 @@
1
+ import argparse
2
+ import sys
3
+ import datetime
4
+
5
+ from pybgpkitstream import BGPStreamConfig, FilterOptions
6
+ from pybgpkitstream import BGPKITStream
7
+
8
+
9
+ def main():
10
+ parser = argparse.ArgumentParser(
11
+ description="Stream and filter BGP data.",
12
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
13
+ )
14
+
15
+ # Arguments with default values for BGPStreamConfig
16
+ parser.add_argument(
17
+ "--start-time",
18
+ type=datetime.datetime.fromisoformat,
19
+ default=datetime.datetime(2010, 9, 1, 0, 0),
20
+ help="Start of the stream in ISO format.",
21
+ )
22
+ parser.add_argument(
23
+ "--end-time",
24
+ type=datetime.datetime.fromisoformat,
25
+ default=datetime.datetime(2010, 9, 1, 2, 0),
26
+ help="End of the stream in ISO format.",
27
+ )
28
+ parser.add_argument(
29
+ "--collectors",
30
+ type=str,
31
+ nargs="+",
32
+ default=["route-views.sydney", "route-views.wide"],
33
+ help="List of collectors to get data from.",
34
+ )
35
+ parser.add_argument(
36
+ "--data-types",
37
+ type=str,
38
+ nargs="+",
39
+ choices=["ribs", "updates"],
40
+ default=["updates"],
41
+ help="List of archives to consider ('ribs' or 'updates').",
42
+ )
43
+ parser.add_argument(
44
+ "--cache-dir",
45
+ type=str,
46
+ default=None,
47
+ help="Directory for caching downloaded files.",
48
+ )
49
+
50
+ # Arguments for FilterOptions
51
+ parser.add_argument(
52
+ "--origin-asn",
53
+ type=int,
54
+ default=None,
55
+ help="Filter by the origin AS number.",
56
+ )
57
+ parser.add_argument(
58
+ "--prefix",
59
+ type=str,
60
+ default=None,
61
+ help="Filter by an exact prefix match.",
62
+ )
63
+ parser.add_argument(
64
+ "--prefix-super",
65
+ type=str,
66
+ default=None,
67
+ help="Filter by the exact prefix and its more general super-prefixes.",
68
+ )
69
+ parser.add_argument(
70
+ "--prefix-sub",
71
+ type=str,
72
+ default=None,
73
+ help="Filter by the exact prefix and its more specific sub-prefixes.",
74
+ )
75
+ parser.add_argument(
76
+ "--prefix-super-sub",
77
+ type=str,
78
+ default=None,
79
+ help="Filter by the exact prefix and both its super- and sub-prefixes.",
80
+ )
81
+ parser.add_argument(
82
+ "--peer-ip",
83
+ type=str, # Note: argparse does not directly handle Union types, so we use string here.
84
+ default=None,
85
+ help="Filter by the IP address of a single BGP peer.",
86
+ )
87
+ parser.add_argument(
88
+ "--peer-ips",
89
+ type=str,
90
+ nargs="+",
91
+ default=None,
92
+ help="Filter by a list of BGP peer IP addresses.",
93
+ )
94
+ parser.add_argument(
95
+ "--peer-asn",
96
+ type=str,
97
+ default=None,
98
+ help="Filter by the AS number of the BGP peer.",
99
+ )
100
+ parser.add_argument(
101
+ "--update-type",
102
+ type=str,
103
+ choices=["withdraw", "announce"],
104
+ default=None,
105
+ help="Filter by the BGP update message type.",
106
+ )
107
+ parser.add_argument(
108
+ "--as-path",
109
+ type=str,
110
+ default=None,
111
+ help="Filter by a regular expression matching the AS path.",
112
+ )
113
+
114
+ args = parser.parse_args()
115
+
116
+ filter_options = FilterOptions(
117
+ origin_asn=args.origin_asn,
118
+ prefix=args.prefix,
119
+ prefix_super=args.prefix_super,
120
+ prefix_sub=args.prefix_sub,
121
+ prefix_super_sub=args.prefix_super_sub,
122
+ peer_ip=args.peer_ip,
123
+ peer_ips=args.peer_ips,
124
+ peer_asn=args.peer_asn,
125
+ update_type=args.update_type,
126
+ as_path=args.as_path,
127
+ )
128
+
129
+ # Convert filter to None if all filter attributes are None
130
+ if all(value is None for value in filter_options.model_dump().values()):
131
+ filter_options = None
132
+
133
+ config = BGPStreamConfig(
134
+ start_time=args.start_time,
135
+ end_time=args.end_time,
136
+ collectors=args.collectors,
137
+ data_types=args.data_types,
138
+ cache_dir=args.cache_dir,
139
+ filters=filter_options,
140
+ )
141
+
142
+ try:
143
+ for element in BGPKITStream.from_config(config):
144
+ print(element)
145
+ except Exception as e:
146
+ print(f"An error occurred during streaming: {e}", file=sys.stderr)
147
+ sys.exit(1)
148
+
149
+
150
+ if __name__ == "__main__":
151
+ main()
File without changes