warp-beacon 2.6.85__tar.gz → 2.6.87__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {warp_beacon-2.6.85/warp_beacon.egg-info → warp_beacon-2.6.87}/PKG-INFO +2 -1
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/pyproject.toml +2 -1
- warp_beacon-2.6.87/warp_beacon/__version__.py +2 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/scheduler/instagram_human.py +3 -3
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/scheduler/scheduler.py +4 -3
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/scraper/__init__.py +2 -12
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/scraper/abstract.py +11 -6
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/scraper/instagram/instagram.py +4 -6
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/scraper/instagram/wb_instagrapi.py +9 -10
- warp_beacon-2.6.87/warp_beacon/scraper/utils.py +20 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/scraper/youtube/abstract.py +4 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87/warp_beacon.egg-info}/PKG-INFO +2 -1
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon.egg-info/requires.txt +1 -0
- warp_beacon-2.6.85/warp_beacon/__version__.py +0 -2
- warp_beacon-2.6.85/warp_beacon/scraper/utils.py +0 -4
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/LICENSE +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/MANIFEST.in +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/README.md +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/assets/placeholder.gif +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/etc/.gitignore +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/etc/accounts.json +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/etc/proxies.json +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/etc/warp_beacon.conf +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/etc/warp_beacon.service +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/setup.cfg +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/setup.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/__init__.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/compress/__init__.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/compress/video.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/jobs/__init__.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/jobs/abstract.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/jobs/download_job.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/jobs/types.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/jobs/upload_job.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/mediainfo/__init__.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/mediainfo/abstract.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/mediainfo/audio.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/mediainfo/silencer.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/mediainfo/video.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/scheduler/__init__.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/scraper/account_selector.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/scraper/exceptions.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/scraper/fail_handler.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/scraper/instagram/__init__.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/scraper/instagram/captcha.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/scraper/link_resolver.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/scraper/youtube/__init__.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/scraper/youtube/music.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/scraper/youtube/shorts.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/scraper/youtube/youtube.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/storage/__init__.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/storage/mongo.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/telegram/__init__.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/telegram/bot.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/telegram/caption_shortener.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/telegram/download_status.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/telegram/edit_message.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/telegram/handlers.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/telegram/placeholder_message.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/telegram/progress_bar.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/telegram/progress_file_reader.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/telegram/types.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/telegram/utils.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/uploader/__init__.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/warp_beacon.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon/yt_auth.py +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon.egg-info/SOURCES.txt +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon.egg-info/dependency_links.txt +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon.egg-info/entry_points.txt +0 -0
- {warp_beacon-2.6.85 → warp_beacon-2.6.87}/warp_beacon.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: warp_beacon
|
3
|
-
Version: 2.6.
|
3
|
+
Version: 2.6.87
|
4
4
|
Summary: Telegram bot for expanding external media links
|
5
5
|
Home-page: https://github.com/sb0y/warp_beacon
|
6
6
|
Author: Andrey Bagrintsev
|
@@ -241,6 +241,7 @@ Requires-Dist: yt_dlp
|
|
241
241
|
Requires-Dist: pydub
|
242
242
|
Requires-Dist: SpeechRecognition
|
243
243
|
Requires-Dist: playwright
|
244
|
+
Requires-Dist: fake-useragent
|
244
245
|
Dynamic: author
|
245
246
|
Dynamic: home-page
|
246
247
|
Dynamic: license-file
|
@@ -17,7 +17,7 @@ class InstagramHuman(object):
|
|
17
17
|
self.operations_count = 0
|
18
18
|
|
19
19
|
def watch_content(self, media: list) -> None:
|
20
|
-
for m in media[:random.randint(1,
|
20
|
+
for m in media[:random.randint(1, 15)]:
|
21
21
|
try:
|
22
22
|
logging.info("Wathing content with pk '%s'", str(m.pk))
|
23
23
|
content = self.scrapler.cl.media_info_v1(m.pk)
|
@@ -34,7 +34,7 @@ class InstagramHuman(object):
|
|
34
34
|
timeline_initialized = True
|
35
35
|
self.scrapler.timeline_cursor = self.scrapler.download_hndlr(self.scrapler.cl.get_timeline_feed, reason="cold_start_fetch")
|
36
36
|
logging.info("Starting to watch related reels with media_pk '%d'", last_pk)
|
37
|
-
media = self.scrapler.download_hndlr(self.scrapler.cl.reels, amount=random.randint(4,
|
37
|
+
media = self.scrapler.download_hndlr(self.scrapler.cl.reels, amount=random.randint(4, 15), last_media_pk=last_pk)
|
38
38
|
self.operations_count += 1
|
39
39
|
self.watch_content(media)
|
40
40
|
|
@@ -43,7 +43,7 @@ class InstagramHuman(object):
|
|
43
43
|
if not timeline_initialized:
|
44
44
|
self.scrapler.timeline_cursor = self.scrapler.download_hndlr(self.scrapler.cl.get_timeline_feed, reason="cold_start_fetch")
|
45
45
|
logging.info("Starting to explore reels with media_pk '%d'", last_pk)
|
46
|
-
media = self.scrapler.download_hndlr(self.scrapler.cl.explore_reels, amount=random.randint(4,
|
46
|
+
media = self.scrapler.download_hndlr(self.scrapler.cl.explore_reels, amount=random.randint(4, 15), last_media_pk=last_pk)
|
47
47
|
self.operations_count += 1
|
48
48
|
self.watch_content(media)
|
49
49
|
|
@@ -15,10 +15,8 @@ class IGScheduler(object):
|
|
15
15
|
yt_sessions_dir = "/var/warp_beacon"
|
16
16
|
|
17
17
|
def __init__(self, downloader: warp_beacon.scraper.AsyncDownloader) -> None:
|
18
|
-
self.downloader = None
|
19
18
|
self.running = True
|
20
19
|
self.thread = None
|
21
|
-
self.event = None
|
22
20
|
self.state = {"remaining": randrange(8400, 26200), "yt_sess_exp": []}
|
23
21
|
self.downloader = downloader
|
24
22
|
self.event = threading.Event()
|
@@ -108,6 +106,9 @@ class IGScheduler(object):
|
|
108
106
|
|
109
107
|
def validate_yt_session(self) -> bool:
|
110
108
|
try:
|
109
|
+
if self.downloader.yt_validate_event.is_set():
|
110
|
+
return True
|
111
|
+
self.downloader.yt_validate_event.set()
|
111
112
|
logging.info("Setting YT validate task ...")
|
112
113
|
self.downloader.queue_task(warp_beacon.jobs.download_job.DownloadJob.build(
|
113
114
|
session_validation=True,
|
@@ -147,7 +148,7 @@ class IGScheduler(object):
|
|
147
148
|
self.handle_time_planning()
|
148
149
|
|
149
150
|
start_time = time.time()
|
150
|
-
logging.info("Next scheduler activity in '%
|
151
|
+
logging.info("Next scheduler activity in '%d' seconds", int(min_val))
|
151
152
|
logging.info("IG timeout '%d' secs", int(self.state["remaining"]))
|
152
153
|
self.event.wait(timeout=min_val)
|
153
154
|
self.event.clear()
|
@@ -33,18 +33,6 @@ PROXY_FILE = os.environ.get("PROXY_FILE", default="/var/warp_beacon/proxies.json
|
|
33
33
|
class AsyncDownloader(object):
|
34
34
|
TG_FILE_LIMIT = 2147483648 # 2 GiB
|
35
35
|
__JOE_BIDEN_WAKEUP = None
|
36
|
-
workers = None
|
37
|
-
allow_loop = None
|
38
|
-
job_queue = None
|
39
|
-
uploader = None
|
40
|
-
workers_count = 0
|
41
|
-
auth_event = None
|
42
|
-
manager = None
|
43
|
-
acc_selector = None
|
44
|
-
scheduler = None
|
45
|
-
scrolling_now = None
|
46
|
-
process_context = None
|
47
|
-
status_pipe = None
|
48
36
|
|
49
37
|
def __init__(self, uploader: AsyncUploader, pipe_connection: multiprocessing.connection.Connection, workers_count: int) -> None:
|
50
38
|
self.workers = []
|
@@ -58,6 +46,7 @@ class AsyncDownloader(object):
|
|
58
46
|
self.uploader = uploader
|
59
47
|
self.workers_count = workers_count
|
60
48
|
self.status_pipe = pipe_connection
|
49
|
+
self.yt_validate_event = multiprocessing.Event()
|
61
50
|
if os.environ.get("TG_PREMIUM", default="false") == "true":
|
62
51
|
self.TG_FILE_LIMIT = 4294967296 # 4 GiB
|
63
52
|
|
@@ -160,6 +149,7 @@ class AsyncDownloader(object):
|
|
160
149
|
actor.request_yt_auth = self.request_yt_auth
|
161
150
|
actor.auth_event = self.auth_event
|
162
151
|
actor.status_pipe = self.status_pipe
|
152
|
+
actor.yt_validate_event = self.yt_validate_event
|
163
153
|
# job retry loop
|
164
154
|
while self.allow_loop.value == 1:
|
165
155
|
try:
|
@@ -1,23 +1,28 @@
|
|
1
|
-
import os
|
2
|
-
import time
|
3
|
-
import pathlib
|
4
|
-
from abc import ABC, abstractmethod
|
5
|
-
from typing import Callable, Union
|
6
1
|
import logging
|
7
2
|
import multiprocessing
|
8
3
|
import multiprocessing.connection
|
4
|
+
import os
|
9
5
|
import socket
|
10
|
-
import
|
6
|
+
import pathlib
|
7
|
+
import time
|
8
|
+
from abc import ABC, abstractmethod
|
9
|
+
from typing import TYPE_CHECKING, Callable, Union
|
11
10
|
|
12
11
|
from PIL import Image
|
13
12
|
from pillow_heif import register_heif_opener
|
14
13
|
|
14
|
+
import requests.packages.urllib3.util.connection as urllib3_cn
|
15
|
+
|
16
|
+
if TYPE_CHECKING:
|
17
|
+
from multiprocessing.synchronize import Event as EventType
|
18
|
+
|
15
19
|
class ScraperAbstract(ABC):
|
16
20
|
def __init__(self, account: tuple, proxy: dict = None) -> None:
|
17
21
|
self.original_gai_family = None
|
18
22
|
self.send_message_to_admin_func: Callable = lambda: None
|
19
23
|
self.request_yt_auth: Callable = lambda: None
|
20
24
|
self.status_pipe: multiprocessing.connection.Connection = None
|
25
|
+
self.yt_validate_event: EventType = None
|
21
26
|
self.auth_event = None
|
22
27
|
self.account = None
|
23
28
|
self.account_index = 0
|
@@ -12,9 +12,9 @@ import logging
|
|
12
12
|
import email
|
13
13
|
import imaplib
|
14
14
|
import json
|
15
|
+
from urllib.parse import urljoin, urlparse
|
15
16
|
import requests
|
16
17
|
import urllib3
|
17
|
-
from urllib.parse import urljoin, urlparse
|
18
18
|
|
19
19
|
from instagrapi import exceptions
|
20
20
|
from instagrapi.exceptions import UnknownError as IGUnknownError
|
@@ -31,6 +31,7 @@ from warp_beacon.jobs.download_job import DownloadJob
|
|
31
31
|
from warp_beacon.telegram.utils import Utils
|
32
32
|
from warp_beacon.scraper.instagram.wb_instagrapi import WBClient
|
33
33
|
from warp_beacon.telegram.types import ReportType
|
34
|
+
from warp_beacon.scraper.utils import ScraperUtils
|
34
35
|
|
35
36
|
INST_SESSION_FILE_TPL = "/var/warp_beacon/inst_session_account_%d.json"
|
36
37
|
|
@@ -61,10 +62,7 @@ class InstagramScraper(ScraperAbstract):
|
|
61
62
|
"Accept": "*/*",
|
62
63
|
"Accept-Encoding": "gzip, deflate, br",
|
63
64
|
"Accept-Language": "en-US,en;q=0.9",
|
64
|
-
"User-Agent": (
|
65
|
-
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
|
66
|
-
"(KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36"
|
67
|
-
)
|
65
|
+
"User-Agent": ScraperUtils.get_ua()
|
68
66
|
})
|
69
67
|
self.cl.set_progress_callback(self.download_progress)
|
70
68
|
|
@@ -96,7 +94,7 @@ class InstagramScraper(ScraperAbstract):
|
|
96
94
|
"uuid": uuids.get("uuid", self.cl.generate_uuid()),
|
97
95
|
"client_session_id": self.client_session_id,
|
98
96
|
"advertising_id": uuids.get("advertising_id", self.cl.generate_uuid()),
|
99
|
-
"device_id": uuids.get("device_id", self.cl.
|
97
|
+
"device_id": uuids.get("device_id", self.cl.generate_android_device_id())
|
100
98
|
})
|
101
99
|
|
102
100
|
def safe_write_session(self) -> None:
|
@@ -7,6 +7,8 @@ import requests
|
|
7
7
|
from instagrapi import Client
|
8
8
|
from instagrapi.exceptions import VideoNotDownload
|
9
9
|
|
10
|
+
from warp_beacon.scraper.utils import ScraperUtils
|
11
|
+
|
10
12
|
class WBClient(Client):
|
11
13
|
"""
|
12
14
|
patched instagrapi
|
@@ -17,10 +19,7 @@ class WBClient(Client):
|
|
17
19
|
self.session = requests.Session()
|
18
20
|
# may be I should remove '"Sec-Fetch-*", "Upgrade-Insecure-Requests", "DNT"' ?
|
19
21
|
self.session.headers.update({
|
20
|
-
"User-Agent": (
|
21
|
-
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
|
22
|
-
"(KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36"
|
23
|
-
),
|
22
|
+
"User-Agent": ScraperUtils.get_ua(),
|
24
23
|
"Accept": (
|
25
24
|
"text/html,application/xhtml+xml,application/xml;"
|
26
25
|
"q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8"
|
@@ -29,12 +28,12 @@ class WBClient(Client):
|
|
29
28
|
"Accept-Encoding": "gzip, deflate, br",
|
30
29
|
"Referer": "https://www.instagram.com/",
|
31
30
|
"Connection": "keep-alive",
|
32
|
-
"Sec-Fetch-Site": "same-origin",
|
33
|
-
"Sec-Fetch-Mode": "navigate",
|
34
|
-
"Sec-Fetch-User": "?1",
|
35
|
-
"Sec-Fetch-Dest": "document",
|
36
|
-
"Upgrade-Insecure-Requests": "1",
|
37
|
-
"DNT": "1",
|
31
|
+
#"Sec-Fetch-Site": "same-origin",
|
32
|
+
#"Sec-Fetch-Mode": "navigate",
|
33
|
+
#"Sec-Fetch-User": "?1",
|
34
|
+
#"Sec-Fetch-Dest": "document",
|
35
|
+
#"Upgrade-Insecure-Requests": "1",
|
36
|
+
#"DNT": "1",
|
38
37
|
})
|
39
38
|
self.essential_params = {"oe", "oh", "_nc_ht", "_nc_cat", "_nc_oc", "_nc_ohc", "_nc_gid"}
|
40
39
|
|
@@ -0,0 +1,20 @@
|
|
1
|
+
import logging
|
2
|
+
|
3
|
+
from fake_useragent import UserAgent
|
4
|
+
|
5
|
+
class ScraperUtils(object):
|
6
|
+
@staticmethod
|
7
|
+
def get_ua_dict() -> dict:
|
8
|
+
random_client = None
|
9
|
+
try:
|
10
|
+
ua = UserAgent(browsers=['Facebook', 'Android'], platforms=['mobile', 'tablet'], os=['Android', 'iOS'])
|
11
|
+
random_client = ua.getRandom
|
12
|
+
logging.info("Select random UA: %s", random_client)
|
13
|
+
except Exception as e:
|
14
|
+
logging.warning("Exception occurrd while generating random client UA!", exc_info=e)
|
15
|
+
random_client = {'useragent': 'Mozilla/5.0 (Linux; Android 14; SM-S911B Build/UP1A.231005.007; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/133.0.6943.117 Mobile Safari/537.36 [FB_IAB/FB4A;FBAV/502.0.0.66.79;IABMV/1;]', 'percent': 0.017937771404345798, 'type': 'mobile', 'device_brand': 'Samsung', 'browser': 'Facebook', 'browser_version': '502.0.0', 'browser_version_major_minor': 502.0, 'os': 'Android', 'os_version': '14', 'platform': 'Linux aarch64'}
|
16
|
+
return random_client
|
17
|
+
|
18
|
+
@staticmethod
|
19
|
+
def get_ua() -> str:
|
20
|
+
return ScraperUtils.get_ua_dict()["useragent"]
|
@@ -63,6 +63,10 @@ class YoutubeAbstract(ScraperAbstract):
|
|
63
63
|
logging.error("Failed to refresh Youtube session!")
|
64
64
|
logging.exception(e)
|
65
65
|
|
66
|
+
# avoid task acquiring in parallel worker
|
67
|
+
self.yt_validate_event.clear()
|
68
|
+
#time.sleep(35)
|
69
|
+
|
66
70
|
return 0
|
67
71
|
|
68
72
|
def get_video_id(self, url: str) -> Optional[str]:
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: warp_beacon
|
3
|
-
Version: 2.6.
|
3
|
+
Version: 2.6.87
|
4
4
|
Summary: Telegram bot for expanding external media links
|
5
5
|
Home-page: https://github.com/sb0y/warp_beacon
|
6
6
|
Author: Andrey Bagrintsev
|
@@ -241,6 +241,7 @@ Requires-Dist: yt_dlp
|
|
241
241
|
Requires-Dist: pydub
|
242
242
|
Requires-Dist: SpeechRecognition
|
243
243
|
Requires-Dist: playwright
|
244
|
+
Requires-Dist: fake-useragent
|
244
245
|
Dynamic: author
|
245
246
|
Dynamic: home-page
|
246
247
|
Dynamic: license-file
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|