abstract-webtools 0.1.6.78__tar.gz → 0.1.6.80__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/PKG-INFO +1 -1
  2. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/setup.py +1 -1
  3. abstract_webtools-0.1.6.80/src/abstract_webtools/managers/allss//.py +19 -0
  4. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/videoDownloader.py +60 -20
  5. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools.egg-info/PKG-INFO +1 -1
  6. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools.egg-info/SOURCES.txt +1 -0
  7. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/README.md +0 -0
  8. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/pyproject.toml +0 -0
  9. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/setup.cfg +0 -0
  10. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/__init__.py +0 -0
  11. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/abstract_usurpit.py +0 -0
  12. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/abstract_webtools.py +0 -0
  13. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/big_user_agent_list.py +0 -0
  14. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/main.py +0 -0
  15. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/__init__.py +0 -0
  16. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/cipherManager.py +0 -0
  17. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/crawlManager.py +0 -0
  18. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/crawlmgr2.py +0 -0
  19. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/curlMgr.py +0 -0
  20. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/domainManager.py +0 -0
  21. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/dynamicRateLimiter.py +0 -0
  22. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/get_test.py +0 -0
  23. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/linkManager/__init__.py +0 -0
  24. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/linkManager/linkManager.py +0 -0
  25. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/mySocketClient.py +0 -0
  26. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/networkManager.py +0 -0
  27. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/requestManager/__init__.py +0 -0
  28. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/requestManager/requestManager.py +0 -0
  29. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/seleniumManager.py +0 -0
  30. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/soupManager/__init__.py +0 -0
  31. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/soupManager/asoueces.py +0 -0
  32. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/soupManager/soupManager.py +0 -0
  33. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/sslManager.py +0 -0
  34. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/tlsAdapter.py +0 -0
  35. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/urlManager/__init__.py +0 -0
  36. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/urlManager/urlManager.py +0 -0
  37. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/userAgentManager.py +0 -0
  38. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/managers/videoDownloader2.py +0 -0
  39. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/soup_gui.py +0 -0
  40. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/url_grabber.py +0 -0
  41. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools/url_grabber_new.py +0 -0
  42. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools.egg-info/dependency_links.txt +0 -0
  43. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools.egg-info/requires.txt +0 -0
  44. {abstract_webtools-0.1.6.78 → abstract_webtools-0.1.6.80}/src/abstract_webtools.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: abstract_webtools
3
- Version: 0.1.6.78
3
+ Version: 0.1.6.80
4
4
  Summary: Abstract Web Tools is a Python package that provides various utility functions for web scraping tasks. It is built on top of popular libraries such as `requests`, `BeautifulSoup`, and `urllib3` to simplify the process of fetching and parsing web content.
5
5
  Home-page: https://github.com/AbstractEndeavors/abstract_essentials/tree/main/abstract_webtools
6
6
  Author: putkoff
@@ -4,7 +4,7 @@ with open("README.md", "r", encoding="utf-8") as fh:
4
4
  long_description = fh.read()
5
5
  setuptools.setup(
6
6
  name='abstract_webtools',
7
- version='0.1.6.78',
7
+ version='0.1.6.80',
8
8
  author='putkoff',
9
9
  author_email='partners@abstractendeavors.com',
10
10
  description='Abstract Web Tools is a Python package that provides various utility functions for web scraping tasks. It is built on top of popular libraries such as `requests`, `BeautifulSoup`, and `urllib3` to simplify the process of fetching and parsing web content.',
@@ -0,0 +1,19 @@
1
+ allss = """BASE_DIR = Path('/var/www/clownworld')
2
+ TEMPLATES_DIR = BASE_DIR / 'templates'
3
+ TEMPLATES_FOLDER_PATH = TEMPLATES_DIR / 'videos'
4
+ FLASK_APP_DIR = BASE_DIR / 'video_player'
5
+ VIDEO_REACT = BASE_DIR / 'bolshevid'
6
+ BUILD_DIR = VIDEO_REACT / 'build'
7
+ STATIC_DIR = BUILD_DIR / 'static'
8
+ IMGS_DIR = STATIC_DIR / 'imgs'
9
+ CSS_DIR = STATIC_DIR / 'css'
10
+ JS_DIR = STATIC_DIR / 'js'
11
+ DATA_DIR = BASE_DIR / 'data'
12
+ VIDEOS_DIR = DATA_DIR / 'videos'
13
+ USERS_DIR = DATA_DIR / 'users'
14
+ DOWNLOADS_DIR = DATA_DIR / 'downloads'
15
+ DOWNLOADS_VIDS_DIR = DOWNLOADS_DIR / 'videos'"""
16
+ ste = ''
17
+ for alls in allss.split('\n'):
18
+ ste+=f"{alls.split(' ')[0]},"
19
+ input(ste)
@@ -240,49 +240,89 @@ def optimize_video_for_safari(input_file, reencode=False):
240
240
 
241
241
  try:
242
242
  subprocess.run(command, check=True)
243
- shutil.copy2(local_output, input_file)
243
+
244
+ os.remove(input_file)
245
+ shutil.move(local_output, input_file)
244
246
  print(f"Optimized video saved as {input_file}")
245
247
  except subprocess.CalledProcessError as e:
246
248
  print(f"Error during optimization: {e}")
247
249
  return input_file
248
250
  finally:
249
251
  shutil.rmtree(tmp_dir)
250
-
251
- def downloadvideo(url, directory=False, rename_display=True, thumbnails=True, audio=False,safari_optimize=False):
252
+ def bool_or_default(obj,default=True):
253
+ if obj == None:
254
+ obj = default
255
+ return obj
256
+ def get_video_info(url,download_directory=None,output_filename=None,get_info=None,download_video=None):
252
257
  directory = directory or os.getcwd()
253
- temp_id = re.sub(r'[^\w\d.-]', '_', url)[-20:]
254
- temp_filename = f"temp_{temp_id}.mp4"
258
+ output_filename = output_filename or get_temp_file_name(url)
259
+ get_info = bool_or_true(get_info)
260
+ download_video = bool_or_default(download_video,default=False)
255
261
  video_mgr = VideoDownloader(
256
262
  url=url,
257
263
  download_directory=directory,
258
- download_video=True,
259
- get_info=True,
260
- output_filename=temp_filename
264
+ download_video=download_video,
265
+ get_info=get_info,
266
+ output_filename=output_filename
261
267
  )
262
- info = video_mgr.info
268
+ return video_mgr
269
+ def get_temp_id(url):
270
+ url = str(url)
271
+ url_length = len(url)
272
+ len_neg = 20
273
+ len_neg = len_neg if url_length >= len_neg else url_length
274
+ temp_id = re.sub(r'[^\w\d.-]', '_', url)[-len_neg:]
275
+ return temp_id
276
+ def get_temp_file_name(url):
277
+ temp_id = get_temp_id(url)
278
+ temp_filename = f"temp_{temp_id}.mp4"
279
+ return temp_filename
280
+ def get_display_id(info):
263
281
  display_id = info.get('display_id') or info.get('id')
264
- directory = os.path.join(directory, display_id)
282
+ return display_id
283
+ def get_video_title(info):
284
+ title = info.get('title', 'video')[:30]
285
+ return title
286
+ def get_safe_title(title):
287
+ re_str = r'[^\w\d.-]'
288
+ safe_title = re.sub(re_str, '_', title)
289
+ return safe_title
290
+ def downloadvideo(url, directory=None,output_filename=None, rename_display=None, thumbnails=None, audio=None,safari_optimize=None,download_video=None,*args,**kwargs):
291
+ rename_display = bool_or_default(rename_display)
292
+ thumbnails= bool_or_default(thumbnails)
293
+ audio= bool_or_default(thumbnails,default=False)
294
+ safari_optimize=bool_or_default(thumbnails,default=True)
295
+ download_video =bool_or_default(download_video,default=True)
296
+ output_filename = output_filename or get_temp_file_name(url)
297
+ video_mgr = get_video_info(url,download_directory=directory,output_filename=output_filename,download_video=download_video)
298
+ info = video_mgr.info
299
+ display_id = get_display_id(info)
265
300
  os.makedirs(directory, exist_ok=True)
266
-
267
- if rename_display and info and 'file_path' in info:
301
+ video_directory = os.path.join(directory, display_id)
302
+ os.makedirs(video_directory, exist_ok=True)
303
+ file_path = None
304
+ if info:
305
+ file_path = info.get('file_path')
306
+ if rename_display and file_path:
268
307
  # Rename using metadata
269
- video_id = info.get('id', temp_id)
270
- title = info.get('title', 'video')[:30] # Limit to 30 chars
271
- safe_title = re.sub(r'[^\w\d.-]', '_', title)
272
- final_filename = f"{safe_title}_{video_id}.mp4"
273
- new_path = os.path.join(directory, final_filename)
308
+ video_id = info.get('id', get_temp_id(url))
309
+ title = output_filename or get_video_title(info)
310
+ safe_title = get_safe_title(title)
311
+ final_filename = output_filename or f"{safe_title}_{video_id}"
312
+ final_filename = f"{final_filename}.mp4"
313
+ new_path = os.path.join(video_directory, final_filename)
274
314
  if os.path.exists(info['file_path']):
275
315
  os.rename(info['file_path'], new_path)
276
316
  info['file_path'] = new_path
317
+ info['file_path'] = new_path
277
318
 
278
319
  # *** Here we call the optimization function ***
279
320
  video_path = info.get('file_path')
280
321
  if video_path and video_path.lower().endswith('.mp4') and safari_optimize:
281
322
  info['file_path'] = optimize_video_for_safari(video_path,reencode=safari_optimize)
282
-
283
- info_path = os.path.join(directory, 'info.json')
323
+ info_path = os.path.join(video_directory, 'info.json')
284
324
  if thumbnails:
285
- info = get_thumbnails(directory, info)
325
+ info = get_thumbnails(video_directory, info)
286
326
  if audio:
287
327
  try:
288
328
  info = download_audio(directory, info)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: abstract_webtools
3
- Version: 0.1.6.78
3
+ Version: 0.1.6.80
4
4
  Summary: Abstract Web Tools is a Python package that provides various utility functions for web scraping tasks. It is built on top of popular libraries such as `requests`, `BeautifulSoup`, and `urllib3` to simplify the process of fetching and parsing web content.
5
5
  Home-page: https://github.com/AbstractEndeavors/abstract_essentials/tree/main/abstract_webtools
6
6
  Author: putkoff
@@ -16,6 +16,7 @@ src/abstract_webtools.egg-info/dependency_links.txt
16
16
  src/abstract_webtools.egg-info/requires.txt
17
17
  src/abstract_webtools.egg-info/top_level.txt
18
18
  src/abstract_webtools/managers/__init__.py
19
+ src/abstract_webtools/managers/allss\.py
19
20
  src/abstract_webtools/managers/cipherManager.py
20
21
  src/abstract_webtools/managers/crawlManager.py
21
22
  src/abstract_webtools/managers/crawlmgr2.py