warp-beacon 2.6.96__tar.gz → 2.6.98__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. {warp_beacon-2.6.96/warp_beacon.egg-info → warp_beacon-2.6.98}/PKG-INFO +1 -1
  2. warp_beacon-2.6.98/warp_beacon/__version__.py +2 -0
  3. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/scheduler/instagram_human.py +136 -31
  4. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/scraper/instagram/instagram.py +5 -1
  5. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/scraper/instagram/wb_instagrapi.py +32 -2
  6. {warp_beacon-2.6.96 → warp_beacon-2.6.98/warp_beacon.egg-info}/PKG-INFO +1 -1
  7. warp_beacon-2.6.96/warp_beacon/__version__.py +0 -2
  8. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/LICENSE +0 -0
  9. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/MANIFEST.in +0 -0
  10. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/README.md +0 -0
  11. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/assets/cc-group-black.png +0 -0
  12. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/assets/placeholder.gif +0 -0
  13. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/etc/.gitignore +0 -0
  14. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/etc/accounts.json +0 -0
  15. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/etc/proxies.json +0 -0
  16. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/etc/warp_beacon.conf +0 -0
  17. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/etc/warp_beacon.service +0 -0
  18. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/pyproject.toml +0 -0
  19. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/setup.cfg +0 -0
  20. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/setup.py +0 -0
  21. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/__init__.py +0 -0
  22. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/compress/__init__.py +0 -0
  23. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/compress/video.py +0 -0
  24. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/jobs/__init__.py +0 -0
  25. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/jobs/abstract.py +0 -0
  26. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/jobs/download_job.py +0 -0
  27. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/jobs/types.py +0 -0
  28. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/jobs/upload_job.py +0 -0
  29. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/mediainfo/__init__.py +0 -0
  30. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/mediainfo/abstract.py +0 -0
  31. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/mediainfo/audio.py +0 -0
  32. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/mediainfo/silencer.py +0 -0
  33. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/mediainfo/video.py +0 -0
  34. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/scheduler/__init__.py +0 -0
  35. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/scheduler/scheduler.py +0 -0
  36. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/scraper/__init__.py +0 -0
  37. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/scraper/abstract.py +0 -0
  38. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/scraper/account_selector.py +0 -0
  39. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/scraper/exceptions.py +0 -0
  40. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/scraper/fail_handler.py +0 -0
  41. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/scraper/instagram/__init__.py +0 -0
  42. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/scraper/instagram/captcha.py +0 -0
  43. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/scraper/link_resolver.py +0 -0
  44. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/scraper/utils.py +0 -0
  45. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/scraper/youtube/__init__.py +0 -0
  46. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/scraper/youtube/abstract.py +0 -0
  47. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/scraper/youtube/music.py +0 -0
  48. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/scraper/youtube/shorts.py +0 -0
  49. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/scraper/youtube/youtube.py +0 -0
  50. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/storage/__init__.py +0 -0
  51. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/storage/mongo.py +0 -0
  52. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/telegram/__init__.py +0 -0
  53. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/telegram/bot.py +0 -0
  54. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/telegram/caption_shortener.py +0 -0
  55. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/telegram/download_status.py +0 -0
  56. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/telegram/edit_message.py +0 -0
  57. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/telegram/handlers.py +0 -0
  58. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/telegram/placeholder_message.py +0 -0
  59. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/telegram/progress_bar.py +0 -0
  60. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/telegram/progress_file_reader.py +0 -0
  61. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/telegram/types.py +0 -0
  62. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/telegram/utils.py +0 -0
  63. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/uploader/__init__.py +0 -0
  64. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/warp_beacon.py +0 -0
  65. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon/yt_auth.py +0 -0
  66. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon.egg-info/SOURCES.txt +0 -0
  67. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon.egg-info/dependency_links.txt +0 -0
  68. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon.egg-info/entry_points.txt +0 -0
  69. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon.egg-info/requires.txt +0 -0
  70. {warp_beacon-2.6.96 → warp_beacon-2.6.98}/warp_beacon.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: warp_beacon
3
- Version: 2.6.96
3
+ Version: 2.6.98
4
4
  Summary: Telegram bot for expanding external media links
5
5
  Home-page: https://github.com/sb0y/warp_beacon
6
6
  Author: Andrey Bagrintsev
@@ -0,0 +1,2 @@
1
+ __version__ = "2.6.98"
2
+
@@ -1,5 +1,6 @@
1
1
  import time
2
2
  import random
3
+ from typing import Optional
3
4
  from datetime import datetime
4
5
 
5
6
  import logging
@@ -8,33 +9,109 @@ from instagrapi.types import UserShort
8
9
  from warp_beacon.scraper.instagram.instagram import InstagramScraper
9
10
 
10
11
  class InstagramHuman(object):
11
- scrapler = None
12
12
  default_profiles = ["nasa", "natgeo", "9gag", "spotify", "nba"]
13
- operations_count = 0
14
13
 
15
14
  def __init__(self, scrapler: InstagramScraper) -> None:
16
15
  self.scrapler = scrapler
17
16
  self.operations_count = 0
18
17
 
18
+ def browse_timeline(self) -> Optional[dict]:
19
+ feed = None
20
+ items = []
21
+ try:
22
+ reason = random.choice(["cold_start_fetch", "pull_to_refresh"])
23
+ feed = self.scrapler.cl.get_timeline_feed(reason=reason)
24
+ self.operations_count += 1
25
+ items = feed.get("feed_items", [])
26
+ except Exception as e:
27
+ logging.warning("Failed to get timeline feed!", exc_info=e)
28
+ return
29
+
30
+ seen = []
31
+ if items:
32
+ for item in items:
33
+ media = item.get("media_or_ad")
34
+ logging.debug("Item content: %s", media)
35
+ if not media:
36
+ continue
37
+ media_id = media.get("id")
38
+ #user_id = media.get("user", {}).get("pk")
39
+
40
+ if media_id:
41
+ seen.append(str(media_id))
42
+ if random.random() < 0.5:
43
+ try:
44
+ #self.scrapler.cl.media_like(media_id)
45
+ self.scrapler.cl.media_comments(media_id)
46
+ self.operations_count += 1
47
+ except Exception as e:
48
+ logging.warning("Failed to see comments to media '%s'", media_id, exc_info=e)
49
+ self.random_pause()
50
+
51
+ if seen:
52
+ try:
53
+ self.scrapler.cl.media_seen(seen)
54
+ self.operations_count += 1
55
+ except Exception as e:
56
+ logging.warning("Failed to mark timeline feed as seen", exc_info=e)
57
+
58
+ return feed
59
+
60
+ def watch_stories(self) -> None:
61
+ logging.info("Simulating stories watch ...")
62
+ stories = None
63
+ try:
64
+ stories = self.scrapler.cl.user_stories_v1(self.scrapler.cl.user_id)
65
+ self.operations_count += 1
66
+ except Exception as e:
67
+ logging.warning("Failed to get user stories!", exc_info=e)
68
+
69
+ if not stories:
70
+ return
71
+
72
+ seen = []
73
+ for m in stories[:random.randint(1, len(stories))]:
74
+ try:
75
+ logging.info("Wathing story with pk '%s'", str(m.id))
76
+ seen.append(str(m.id))
77
+ self.random_pause()
78
+ except Exception as e:
79
+ logging.warning("Exception while watching content", exc_info=e)
80
+
81
+ if seen:
82
+ try:
83
+ self.scrapler.cl.media_seen(seen)
84
+ self.operations_count += 1
85
+ logging.info("Marked '%d' stories as seen", len(seen))
86
+ except Exception as e:
87
+ logging.warning("Failed to mark seen watched watch stories!", exc_info=e)
88
+
19
89
  def watch_content(self, media: list) -> None:
20
90
  if not media:
21
91
  return
92
+ seen = []
22
93
  for m in media[:random.randint(1, len(media))]:
23
94
  try:
24
- logging.info("Wathing content with pk '%s'", str(m.pk))
95
+ logging.info("Wathing content with pk '%s'", str(m.id))
25
96
  content = self.scrapler.cl.media_info_v1(m.pk)
26
- logging.info("Watched content with id '%s'", str(content.pk))
97
+ seen.append(str(content.id))
98
+ logging.info("Watched content with id '%s'", str(content.id))
27
99
  self.operations_count += 1
28
100
  self.random_pause()
29
101
  except Exception as e:
30
102
  logging.warning("Exception while watching content")
31
103
  logging.exception(e)
104
+ try:
105
+ self.scrapler.cl.media_seen(seen)
106
+ except Exception as e:
107
+ logging.warning("Failed to mark seen watched videos!", exc_info=e)
32
108
 
33
109
  def scroll_content(self, last_pk: int) -> None:
34
110
  timeline_initialized = False
35
111
  if random.random() > 0.5:
36
112
  timeline_initialized = True
37
- self.scrapler.timeline_cursor = self.scrapler.download_hndlr(self.scrapler.cl.get_timeline_feed, reason="cold_start_fetch")
113
+ #self.scrapler.timeline_cursor = self.scrapler.download_hndlr(self.scrapler.cl.get_timeline_feed, reason="cold_start_fetch")
114
+ self.scrapler.timeline_cursor = self.browse_timeline()
38
115
  logging.info("Starting to watch related reels with media_pk '%d'", last_pk)
39
116
  media = self.scrapler.download_hndlr(self.scrapler.cl.reels, amount=random.randint(4, 10), last_media_pk=last_pk)
40
117
  self.operations_count += 1
@@ -43,7 +120,8 @@ class InstagramHuman(object):
43
120
  if random.random() > 0.7:
44
121
  time.sleep(random.uniform(2, 20))
45
122
  if not timeline_initialized:
46
- self.scrapler.timeline_cursor = self.scrapler.download_hndlr(self.scrapler.cl.get_timeline_feed, reason="cold_start_fetch")
123
+ #self.scrapler.timeline_cursor = self.scrapler.download_hndlr(self.scrapler.cl.get_timeline_feed, reason="cold_start_fetch")
124
+ self.scrapler.timeline_cursor = self.browse_timeline()
47
125
  logging.info("Starting to explore reels with media_pk '%d'", last_pk)
48
126
  media = self.scrapler.download_hndlr(self.scrapler.cl.explore_reels, amount=random.randint(4, 10), last_media_pk=last_pk)
49
127
  self.operations_count += 1
@@ -65,15 +143,13 @@ class InstagramHuman(object):
65
143
  def morning_routine(self) -> None:
66
144
  try:
67
145
  logging.info("Starting morning activity simulation")
68
- self.scrapler.timeline_cursor = self.scrapler.download_hndlr(self.scrapler.cl.get_timeline_feed, "pull_to_refresh", self.scrapler.timeline_cursor.get("next_max_id"))
69
- self.operations_count += 1
146
+ #self.scrapler.timeline_cursor = self.scrapler.download_hndlr(self.scrapler.cl.get_timeline_feed, "pull_to_refresh", self.scrapler.timeline_cursor.get("next_max_id"))
147
+ #self.operations_count += 1
148
+ self.scrapler.timeline_cursor = self.browse_timeline()
70
149
  time.sleep(random.uniform(3, 7))
71
150
  if random.random() > 0.5:
72
- logging.info("Checking direct ...")
73
- self.scrapler.download_hndlr(self.scrapler.cl.direct_active_presence)
74
- self.operations_count += 1
75
- time.sleep(random.uniform(2, 5))
76
- if random.random() > 0.3:
151
+ self.check_direct()
152
+ if random.random() > 0.6:
77
153
  self.scrapler.download_hndlr(self.scrapler.cl.notification_like_and_comment_on_photo_user_tagged, "everyone")
78
154
  self.operations_count += 1
79
155
  self.random_pause()
@@ -82,6 +158,8 @@ class InstagramHuman(object):
82
158
  self.scrapler.download_hndlr(self.scrapler.cl.get_reels_tray_feed, "pull_to_refresh")
83
159
  self.operations_count += 1
84
160
  self.random_pause()
161
+ if random.random() > 0.4:
162
+ self.watch_stories()
85
163
  if random.random() > 0.8:
86
164
  self.profile_view()
87
165
  except Exception as e:
@@ -91,13 +169,19 @@ class InstagramHuman(object):
91
169
  def daytime_routine(self) -> None:
92
170
  try:
93
171
  logging.info("Starting day fast check activity simulation")
94
- self.scrapler.download_hndlr(self.scrapler.cl.get_timeline_feed, "pull_to_refresh")
95
- self.operations_count += 1
172
+ #self.scrapler.download_hndlr(self.scrapler.cl.get_timeline_feed, "pull_to_refresh")
173
+ #self.operations_count += 1
174
+ self.browse_timeline()
96
175
  time.sleep(random.uniform(2, 5))
97
176
  if random.random() > 0.5:
98
177
  self.scrapler.download_hndlr(self.scrapler.cl.get_reels_tray_feed, "pull_to_refresh")
99
178
  self.operations_count += 1
100
179
  self.random_pause()
180
+
181
+ if random.random() > 0.4:
182
+ self.watch_stories()
183
+ self.random_pause()
184
+
101
185
  if random.random() > 0.4:
102
186
  logging.info("Watching reels ...")
103
187
  reels = self.scrapler.download_hndlr(self.scrapler.cl.reels, amount=random.randint(4, 15))
@@ -111,21 +195,18 @@ class InstagramHuman(object):
111
195
  def evening_routine(self) -> None:
112
196
  try:
113
197
  logging.info("Starting evening active user simulation")
114
- self.scrapler.download_hndlr(self.scrapler.cl.get_timeline_feed, "pull_to_refresh")
115
- self.operations_count += 1
198
+ self.browse_timeline()
116
199
  time.sleep(random.uniform(2, 5))
117
- self.scrapler.download_hndlr(self.scrapler.cl.get_reels_tray_feed, "pull_to_refresh")
118
- self.operations_count += 1
119
- time.sleep(random.uniform(2, 5))
120
- if random.random() > 0.5:
121
- self.scrapler.download_hndlr(self.scrapler.cl.direct_active_presence)
122
- self.operations_count += 1
123
- time.sleep(random.uniform(2, 5))
124
200
  if random.random() > 0.5:
201
+ self.check_direct()
202
+ if random.random() > 0.6:
125
203
  logging.info("Checking notifications, tags ...")
126
204
  self.scrapler.download_hndlr(self.scrapler.cl.notification_like_and_comment_on_photo_user_tagged, "everyone")
127
205
  self.operations_count += 1
128
206
  self.random_pause()
207
+ if random.random() > 0.4:
208
+ self.watch_stories()
209
+ self.random_pause()
129
210
  if random.random() > 0.4:
130
211
  logging.info("Watching reels ...")
131
212
  reels = self.scrapler.download_hndlr(self.scrapler.cl.reels, amount=random.randint(4, 10))
@@ -144,9 +225,10 @@ class InstagramHuman(object):
144
225
  try:
145
226
  logging.info("Starting night activity simulation")
146
227
  if random.random() > 0.7:
147
- self.scrapler.download_hndlr(self.scrapler.cl.direct_active_presence)
148
- self.operations_count += 1
149
- self.random_pause(short=True)
228
+ self.check_direct()
229
+ if random.random() > 0.7:
230
+ self.watch_stories()
231
+ self.random_pause()
150
232
  if random.random() > 0.5:
151
233
  logging.info("Watching reels ...")
152
234
  reels = self.scrapler.download_hndlr(self.scrapler.cl.reels, amount=random.randint(4, 15))
@@ -162,6 +244,30 @@ class InstagramHuman(object):
162
244
  logging.info("Pause for '%.2f' sec ...", round(pause, 2))
163
245
  time.sleep(pause)
164
246
 
247
+ def check_direct(self) -> None:
248
+ logging.info("Checking direct ...")
249
+ self.scrapler.download_hndlr(self.scrapler.cl.direct_active_presence)
250
+ self.operations_count += 1
251
+ self.random_pause()
252
+ threads = self.scrapler.download_hndlr(self.scrapler.cl.direct_threads, amount=random.randint(3, 7))
253
+ self.operations_count += 1
254
+ for thread in threads:
255
+ try:
256
+ messages = self.scrapler.cl.direct_messages(thread.id, amount=random.randint(5, 15))
257
+ self.operations_count += 1
258
+ if not messages:
259
+ continue
260
+ msg_sample = random.sample(messages, k=random.randint(1, min(len(messages), 5)))
261
+ for msg in msg_sample:
262
+ if random.random() < 0.85:
263
+ self.scrapler.cl.direct_message_seen(msg.thread_id, msg.id)
264
+ self.operations_count += 1
265
+ self.random_pause()
266
+ self.random_pause()
267
+ except Exception as e:
268
+ logging.warning("Failed to read thread %s", thread.id)
269
+ logging.exception(e)
270
+
165
271
  def profile_view(self) -> None:
166
272
  try:
167
273
  logging.info("profile_view ...")
@@ -190,11 +296,10 @@ class InstagramHuman(object):
190
296
  self.operations_count += 1
191
297
  self.random_pause()
192
298
 
299
+ #self.scrapler.cl.explore_page_media_info
300
+
193
301
  if random.random() > 0.5:
194
- logging.info("Checking direct ...")
195
- self.scrapler.download_hndlr(self.scrapler.cl.direct_active_presence)
196
- self.operations_count += 1
197
- self.random_pause()
302
+ self.check_direct()
198
303
 
199
304
  if random.random() > 0.3:
200
305
  logging.info("Checking notifications, tags ...")
@@ -282,7 +282,7 @@ class InstagramScraper(ScraperAbstract):
282
282
  if len(st_parts) > 1:
283
283
  effective_story_id = st_parts[0]
284
284
  logging.info("Effective story id is '%s'", effective_story_id)
285
- effective_url = "https://www.instagram.com/stories/%s/%s/" % (story_info.user.username, effective_story_id)
285
+ effective_url = f"https://www.instagram.com/stories/{story_info.user.username}/{effective_story_id}/"
286
286
  if story_info.media_type == 1: # photo
287
287
  path = str(self.download_hndlr(self.cl.story_download_by_url, url=story_info.thumbnail_url, folder='/tmp'))
288
288
  path_lowered = path.lower()
@@ -357,6 +357,10 @@ class InstagramScraper(ScraperAbstract):
357
357
  res.append(self.download_photo(url=media_info.thumbnail_url, media_info=media_info))
358
358
  elif media_info.media_type == 8: # Album
359
359
  res.append(self.download_album(media_info=media_info))
360
+ try:
361
+ self.cl.media_seen([media_info.id])
362
+ except Exception as e:
363
+ logging.warning("Failed to mark seen with id = '%s'", media_info.id, exc_info=e)
360
364
  elif scrap_type == "story":
361
365
  story_info = self.cl.story_info(media_id)
362
366
  logging.info("media_type for story is '%d'", story_info.media_type)
@@ -1,11 +1,17 @@
1
1
  import logging
2
2
  from typing import Callable
3
+ from copy import deepcopy
3
4
  from urllib.parse import urlparse, parse_qs, urlencode, urlunparse
4
5
  from pathlib import Path
5
6
  import requests
6
7
 
7
8
  from instagrapi import Client
8
- from instagrapi.exceptions import VideoNotDownload
9
+ from instagrapi.types import Media
10
+ from instagrapi.exceptions import (
11
+ ClientError,
12
+ ClientLoginRequired,
13
+ VideoNotDownload
14
+ )
9
15
 
10
16
  from warp_beacon.scraper.utils import ScraperUtils
11
17
 
@@ -160,4 +166,28 @@ class WBClient(Client):
160
166
  )
161
167
  except Exception as e:
162
168
  logging.warning("Progress callback raised an exception!", exc_info=e)
163
- return path.resolve()
169
+ return path.resolve()
170
+
171
+ def media_info(self, media_pk: str, use_cache: bool = True) -> Media:
172
+ """
173
+ Get Media Information from PK
174
+
175
+ Parameters
176
+ ----------
177
+ media_pk: str
178
+ Unique identifier of the media
179
+ use_cache: bool, optional
180
+ Whether or not to use information from cache, default value is True
181
+
182
+ Returns
183
+ -------
184
+ Media
185
+ An object of Media type
186
+ """
187
+ media_pk = self.media_pk(media_pk)
188
+ if not use_cache or media_pk not in self._medias_cache:
189
+ media = self.media_info_v1(media_pk)
190
+ self._medias_cache[media_pk] = media
191
+ return deepcopy(
192
+ self._medias_cache[media_pk]
193
+ ) # return copy of cache (dict changes protection)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: warp_beacon
3
- Version: 2.6.96
3
+ Version: 2.6.98
4
4
  Summary: Telegram bot for expanding external media links
5
5
  Home-page: https://github.com/sb0y/warp_beacon
6
6
  Author: Andrey Bagrintsev
@@ -1,2 +0,0 @@
1
- __version__ = "2.6.96"
2
-
File without changes
File without changes
File without changes
File without changes
File without changes