overleaf-sync-fixed 1.2.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- overleaf_sync_fixed-1.2.0/.gitignore +118 -0
- overleaf_sync_fixed-1.2.0/LICENSE +21 -0
- overleaf_sync_fixed-1.2.0/PKG-INFO +73 -0
- overleaf_sync_fixed-1.2.0/README.md +51 -0
- overleaf_sync_fixed-1.2.0/README_CN.md +51 -0
- overleaf_sync_fixed-1.2.0/olsync/__init__.py +3 -0
- overleaf_sync_fixed-1.2.0/olsync/debug_ids.py +55 -0
- overleaf_sync_fixed-1.2.0/olsync/olbrowserlogin.py +99 -0
- overleaf_sync_fixed-1.2.0/olsync/olclient.py +243 -0
- overleaf_sync_fixed-1.2.0/olsync/olsync.py +479 -0
- overleaf_sync_fixed-1.2.0/pyproject.toml +25 -0
- overleaf_sync_fixed-1.2.0/requirements.txt +6 -0
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
# Byte-compiled / optimized / DLL files
|
|
2
|
+
__pycache__/
|
|
3
|
+
*.py[cod]
|
|
4
|
+
*$py.class
|
|
5
|
+
|
|
6
|
+
# C extensions
|
|
7
|
+
*.so
|
|
8
|
+
|
|
9
|
+
# Distribution / packaging
|
|
10
|
+
.Python
|
|
11
|
+
build/
|
|
12
|
+
develop-eggs/
|
|
13
|
+
dist/
|
|
14
|
+
downloads/
|
|
15
|
+
eggs/
|
|
16
|
+
.eggs/
|
|
17
|
+
lib/
|
|
18
|
+
lib64/
|
|
19
|
+
parts/
|
|
20
|
+
sdist/
|
|
21
|
+
var/
|
|
22
|
+
wheels/
|
|
23
|
+
*.egg-info/
|
|
24
|
+
.installed.cfg
|
|
25
|
+
*.egg
|
|
26
|
+
MANIFEST
|
|
27
|
+
pip-wheel-metadata/
|
|
28
|
+
|
|
29
|
+
# PyInstaller
|
|
30
|
+
# Usually these files are written by a python script from a template
|
|
31
|
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
|
32
|
+
*.manifest
|
|
33
|
+
*.spec
|
|
34
|
+
|
|
35
|
+
# Installer logs
|
|
36
|
+
pip-log.txt
|
|
37
|
+
pip-delete-this-directory.txt
|
|
38
|
+
|
|
39
|
+
# Unit test / coverage reports
|
|
40
|
+
htmlcov/
|
|
41
|
+
.tox/
|
|
42
|
+
.coverage
|
|
43
|
+
.coverage.*
|
|
44
|
+
.cache
|
|
45
|
+
nosetests.xml
|
|
46
|
+
coverage.xml
|
|
47
|
+
*.cover
|
|
48
|
+
.hypothesis/
|
|
49
|
+
.pytest_cache/
|
|
50
|
+
|
|
51
|
+
# Translations
|
|
52
|
+
*.mo
|
|
53
|
+
*.pot
|
|
54
|
+
|
|
55
|
+
# Django stuff:
|
|
56
|
+
*.log
|
|
57
|
+
local_settings.py
|
|
58
|
+
db.sqlite3
|
|
59
|
+
|
|
60
|
+
# Flask stuff:
|
|
61
|
+
instance/
|
|
62
|
+
.webassets-cache
|
|
63
|
+
|
|
64
|
+
# Scrapy stuff:
|
|
65
|
+
.scrapy
|
|
66
|
+
|
|
67
|
+
# Sphinx documentation
|
|
68
|
+
docs/_build/
|
|
69
|
+
|
|
70
|
+
# PyBuilder
|
|
71
|
+
target/
|
|
72
|
+
|
|
73
|
+
# Jupyter Notebook
|
|
74
|
+
.ipynb_checkpoints
|
|
75
|
+
|
|
76
|
+
# pyenv
|
|
77
|
+
.python-version
|
|
78
|
+
|
|
79
|
+
# celery beat schedule file
|
|
80
|
+
celerybeat-schedule
|
|
81
|
+
|
|
82
|
+
# SageMath parsed files
|
|
83
|
+
*.sage.py
|
|
84
|
+
|
|
85
|
+
# Environments
|
|
86
|
+
.env
|
|
87
|
+
.venv
|
|
88
|
+
env/
|
|
89
|
+
venv/
|
|
90
|
+
ENV/
|
|
91
|
+
env.bak/
|
|
92
|
+
venv.bak/
|
|
93
|
+
|
|
94
|
+
# Spyder project settings
|
|
95
|
+
.spyderproject
|
|
96
|
+
.spyproject
|
|
97
|
+
|
|
98
|
+
# Rope project settings
|
|
99
|
+
.ropeproject
|
|
100
|
+
|
|
101
|
+
# mkdocs documentation
|
|
102
|
+
/site
|
|
103
|
+
|
|
104
|
+
# mypy
|
|
105
|
+
.mypy_cache/
|
|
106
|
+
|
|
107
|
+
#PyCharm
|
|
108
|
+
.idea/
|
|
109
|
+
|
|
110
|
+
# vscode
|
|
111
|
+
.vscode
|
|
112
|
+
.DS_Store
|
|
113
|
+
|
|
114
|
+
# dev testing
|
|
115
|
+
setup.py
|
|
116
|
+
test*
|
|
117
|
+
.olauth
|
|
118
|
+
.olignore
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2021 Moritz Glöckl
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: overleaf-sync-fixed
|
|
3
|
+
Version: 1.2.0
|
|
4
|
+
Summary: Overleaf Two-Way Sync Tool
|
|
5
|
+
Home-page: https://github.com/lawrencee/overleaf-sync-fixed
|
|
6
|
+
Keywords: overleaf sync latex tex fix
|
|
7
|
+
Author: Lawrence Yang (Fixed)
|
|
8
|
+
Author-email: lawrenceeyang@gmail.com
|
|
9
|
+
Requires-Python: >=3
|
|
10
|
+
Description-Content-Type: text/markdown
|
|
11
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
12
|
+
Classifier: Intended Audience :: Science/Research
|
|
13
|
+
Classifier: Programming Language :: Python :: 3
|
|
14
|
+
License-File: LICENSE
|
|
15
|
+
Requires-Dist: requests == 2.*
|
|
16
|
+
Requires-Dist: beautifulsoup4 == 4.11.1
|
|
17
|
+
Requires-Dist: yaspin == 2.*
|
|
18
|
+
Requires-Dist: python-dateutil~=2.8.1
|
|
19
|
+
Requires-Dist: click == 8.*
|
|
20
|
+
Requires-Dist: PySide6 == 6.*
|
|
21
|
+
|
|
22
|
+
# Overleaf Sync (Fixed)
|
|
23
|
+
|
|
24
|
+
A robust two-way synchronization tool for Overleaf projects, updated to support the latest Overleaf web interface (2024+).
|
|
25
|
+
|
|
26
|
+
## Features
|
|
27
|
+
- **Two-way Sync**: Seamlessly sync changes between local computer and Overleaf.
|
|
28
|
+
- **Robust Uploads**: Specialized strategy for `.tex`, `.bib`, `.cls`, and `.sty` files to ensure 100% content delivery.
|
|
29
|
+
- **No Premium Required**: Works with free Overleaf accounts; no Git or Dropbox subscription needed.
|
|
30
|
+
- **Conflict Awareness**: Intelligent sync logic that prioritizes local changes and warns about potential conflicts.
|
|
31
|
+
|
|
32
|
+
## Installation
|
|
33
|
+
|
|
34
|
+
### From PyPI (Recommended)
|
|
35
|
+
```bash
|
|
36
|
+
pip install overleaf-sync-fixed
|
|
37
|
+
```
|
|
38
|
+
|
|
39
|
+
### From Source
|
|
40
|
+
```bash
|
|
41
|
+
git clone https://github.com/lawrencee/overleaf-sync-fixed
|
|
42
|
+
cd overleaf-sync-fixed
|
|
43
|
+
pip install .
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
## Usage
|
|
47
|
+
|
|
48
|
+
### 1. Login
|
|
49
|
+
Authenticate using your `overleaf_session2` cookie.
|
|
50
|
+
```bash
|
|
51
|
+
ols login
|
|
52
|
+
```
|
|
53
|
+
*Note: Get the cookie from Browser DevTools -> Application -> Cookies -> `overleaf_session2`.*
|
|
54
|
+
|
|
55
|
+
### 2. List Projects
|
|
56
|
+
```bash
|
|
57
|
+
ols list
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
### 3. Download & Sync
|
|
61
|
+
Download a project to the current directory:
|
|
62
|
+
```bash
|
|
63
|
+
ols download "Your Project Name"
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
Once in the project directory, run two-way sync:
|
|
67
|
+
```bash
|
|
68
|
+
ols
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
---
|
|
72
|
+
**Disclaimer**: This tool is not affiliated with Overleaf. Use at your own risk.
|
|
73
|
+
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
# Overleaf Sync (Fixed)
|
|
2
|
+
|
|
3
|
+
A robust two-way synchronization tool for Overleaf projects, updated to support the latest Overleaf web interface (2024+).
|
|
4
|
+
|
|
5
|
+
## Features
|
|
6
|
+
- **Two-way Sync**: Seamlessly sync changes between local computer and Overleaf.
|
|
7
|
+
- **Robust Uploads**: Specialized strategy for `.tex`, `.bib`, `.cls`, and `.sty` files to ensure 100% content delivery.
|
|
8
|
+
- **No Premium Required**: Works with free Overleaf accounts; no Git or Dropbox subscription needed.
|
|
9
|
+
- **Conflict Awareness**: Intelligent sync logic that prioritizes local changes and warns about potential conflicts.
|
|
10
|
+
|
|
11
|
+
## Installation
|
|
12
|
+
|
|
13
|
+
### From PyPI (Recommended)
|
|
14
|
+
```bash
|
|
15
|
+
pip install overleaf-sync-fixed
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
### From Source
|
|
19
|
+
```bash
|
|
20
|
+
git clone https://github.com/lawrencee/overleaf-sync-fixed
|
|
21
|
+
cd overleaf-sync-fixed
|
|
22
|
+
pip install .
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
## Usage
|
|
26
|
+
|
|
27
|
+
### 1. Login
|
|
28
|
+
Authenticate using your `overleaf_session2` cookie.
|
|
29
|
+
```bash
|
|
30
|
+
ols login
|
|
31
|
+
```
|
|
32
|
+
*Note: Get the cookie from Browser DevTools -> Application -> Cookies -> `overleaf_session2`.*
|
|
33
|
+
|
|
34
|
+
### 2. List Projects
|
|
35
|
+
```bash
|
|
36
|
+
ols list
|
|
37
|
+
```
|
|
38
|
+
|
|
39
|
+
### 3. Download & Sync
|
|
40
|
+
Download a project to the current directory:
|
|
41
|
+
```bash
|
|
42
|
+
ols download "Your Project Name"
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
Once in the project directory, run two-way sync:
|
|
46
|
+
```bash
|
|
47
|
+
ols
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
---
|
|
51
|
+
**Disclaimer**: This tool is not affiliated with Overleaf. Use at your own risk.
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
# Overleaf 同步工具 (修复版)
|
|
2
|
+
|
|
3
|
+
一个稳健的 Overleaf 项目双向同步工具,已更新以支持最新的 Overleaf 界面(2024+)。
|
|
4
|
+
|
|
5
|
+
## 特性
|
|
6
|
+
- **双向同步**: 在本地计算机和 Overleaf 之间无缝同步更改。
|
|
7
|
+
- **稳健上传**: 针对 `.tex`, `.bib`, `.cls` 和 `.sty` 文件采用专门的上传策略,确保内容准确送达。
|
|
8
|
+
- **无需高级版**: 适用于免费 Overleaf 账户;无需 Git 或 Dropbox 订阅。
|
|
9
|
+
- **冲突处理**: 智能同步逻辑,优先处理本地更改并对潜在冲突发出警告。
|
|
10
|
+
|
|
11
|
+
## 安装方式
|
|
12
|
+
|
|
13
|
+
### 通过 PyPI 安装 (推荐)
|
|
14
|
+
```bash
|
|
15
|
+
pip install overleaf-sync-fixed
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
### 从源码安装
|
|
19
|
+
```bash
|
|
20
|
+
git clone https://github.com/lawrencee/overleaf-sync-fixed
|
|
21
|
+
cd overleaf-sync-fixed
|
|
22
|
+
pip install .
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
## 使用方法
|
|
26
|
+
|
|
27
|
+
### 1. 登录
|
|
28
|
+
使用 `overleaf_session2` cookie 进行身份验证。
|
|
29
|
+
```bash
|
|
30
|
+
ols login
|
|
31
|
+
```
|
|
32
|
+
*提示:从浏览器开发者工具 (F12) -> Application -> Cookies -> 获取 `overleaf_session2` 的值。*
|
|
33
|
+
|
|
34
|
+
### 2. 列出项目
|
|
35
|
+
```bash
|
|
36
|
+
ols list
|
|
37
|
+
```
|
|
38
|
+
|
|
39
|
+
### 3. 下载与同步
|
|
40
|
+
将项目下载到当前目录:
|
|
41
|
+
```bash
|
|
42
|
+
ols download "你的项目名称"
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
在项目目录中,运行双向同步:
|
|
46
|
+
```bash
|
|
47
|
+
ols
|
|
48
|
+
```
|
|
49
|
+
|
|
50
|
+
---
|
|
51
|
+
**免责声明**: 本工具与 Overleaf 官方无关。请自行承担使用风险。
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import re
|
|
2
|
+
import sys
|
|
3
|
+
import pickle
|
|
4
|
+
import os
|
|
5
|
+
import requests as reqs
|
|
6
|
+
from olsync.olclient import OverleafClient
|
|
7
|
+
|
|
8
|
+
def debug_ids(project_id):
|
|
9
|
+
cookie_path = ".olauth"
|
|
10
|
+
if not os.path.isfile(cookie_path):
|
|
11
|
+
print("Error: .olauth not found")
|
|
12
|
+
return
|
|
13
|
+
|
|
14
|
+
with open(cookie_path, 'rb') as f:
|
|
15
|
+
store = pickle.load(f)
|
|
16
|
+
|
|
17
|
+
client = OverleafClient(store["cookie"], store["csrf"])
|
|
18
|
+
|
|
19
|
+
urls = [
|
|
20
|
+
f"https://www.overleaf.com/project/{project_id}",
|
|
21
|
+
f"https://www.overleaf.com/project/{project_id}/editor"
|
|
22
|
+
]
|
|
23
|
+
|
|
24
|
+
print(f"Project ID: {project_id}")
|
|
25
|
+
|
|
26
|
+
for url in urls:
|
|
27
|
+
print(f"\nFetching {url}...")
|
|
28
|
+
r = reqs.get(url, cookies=client._cookie, headers=client._headers)
|
|
29
|
+
if r.status_code != 200:
|
|
30
|
+
print(f"Failed to fetch {url}: {r.status_code}")
|
|
31
|
+
continue
|
|
32
|
+
|
|
33
|
+
ids = re.findall(r'[a-f0-9]{24}', r.text)
|
|
34
|
+
unique_ids = sorted(list(set(ids)))
|
|
35
|
+
|
|
36
|
+
print(f"Found {len(ids)} ID-like strings ({len(unique_ids)} unique):")
|
|
37
|
+
for i in unique_ids:
|
|
38
|
+
overlap = 0
|
|
39
|
+
for c1, c2 in zip(project_id, i):
|
|
40
|
+
if c1 == c2: overlap += 1
|
|
41
|
+
else: break
|
|
42
|
+
|
|
43
|
+
suffix = ""
|
|
44
|
+
if i == project_id:
|
|
45
|
+
suffix = " (PROJECT ID)"
|
|
46
|
+
elif overlap >= 20:
|
|
47
|
+
suffix = f" (STRONG CANDIDATE, overlap {overlap})"
|
|
48
|
+
|
|
49
|
+
print(f" {i}{suffix}")
|
|
50
|
+
|
|
51
|
+
if __name__ == "__main__":
|
|
52
|
+
if len(sys.argv) < 2:
|
|
53
|
+
print("Usage: python debug_ids.py <project_id>")
|
|
54
|
+
else:
|
|
55
|
+
debug_ids(sys.argv[1])
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
"""Ol Browser Login Utility"""
|
|
2
|
+
##################################################
|
|
3
|
+
# MIT License
|
|
4
|
+
##################################################
|
|
5
|
+
# File: olbrowserlogin.py
|
|
6
|
+
# Description: Overleaf Browser Login Utility
|
|
7
|
+
# Author: Moritz Glöckl
|
|
8
|
+
# License: MIT
|
|
9
|
+
# Version: 1.2.0
|
|
10
|
+
##################################################
|
|
11
|
+
|
|
12
|
+
from PySide6.QtCore import *
|
|
13
|
+
from PySide6.QtWidgets import *
|
|
14
|
+
from PySide6.QtWebEngineWidgets import *
|
|
15
|
+
from PySide6.QtWebEngineCore import QWebEngineProfile, QWebEngineSettings, QWebEnginePage
|
|
16
|
+
|
|
17
|
+
# Where to get the CSRF Token and where to send the login request to
|
|
18
|
+
LOGIN_URL = "https://www.overleaf.com/login"
|
|
19
|
+
PROJECT_URL = "https://www.overleaf.com/project" # The dashboard URL
|
|
20
|
+
# JS snippet to extract the csrfToken
|
|
21
|
+
JAVASCRIPT_CSRF_EXTRACTOR = "document.getElementsByName('ol-csrfToken')[0].content"
|
|
22
|
+
# Name of the cookies we want to extract
|
|
23
|
+
COOKIE_NAMES = ["overleaf_session2", "GCLB"]
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class OlBrowserLoginWindow(QMainWindow):
|
|
27
|
+
"""
|
|
28
|
+
Overleaf Browser Login Utility
|
|
29
|
+
Opens a browser window to securely login the user and returns relevant login data.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
def __init__(self, *args, **kwargs):
|
|
33
|
+
super(OlBrowserLoginWindow, self).__init__(*args, **kwargs)
|
|
34
|
+
|
|
35
|
+
self.webview = QWebEngineView()
|
|
36
|
+
|
|
37
|
+
self._cookies = {}
|
|
38
|
+
self._csrf = ""
|
|
39
|
+
self._login_success = False
|
|
40
|
+
|
|
41
|
+
self.profile = QWebEngineProfile(self.webview)
|
|
42
|
+
self.cookie_store = self.profile.cookieStore()
|
|
43
|
+
self.cookie_store.cookieAdded.connect(self.handle_cookie_added)
|
|
44
|
+
self.profile.setPersistentCookiesPolicy(QWebEngineProfile.NoPersistentCookies)
|
|
45
|
+
|
|
46
|
+
self.profile.settings().setAttribute(QWebEngineSettings.JavascriptEnabled, True)
|
|
47
|
+
|
|
48
|
+
webpage = QWebEnginePage(self.profile, self)
|
|
49
|
+
self.webview.setPage(webpage)
|
|
50
|
+
self.webview.load(QUrl.fromUserInput(LOGIN_URL))
|
|
51
|
+
self.webview.loadFinished.connect(self.handle_load_finished)
|
|
52
|
+
|
|
53
|
+
self.setCentralWidget(self.webview)
|
|
54
|
+
self.resize(600, 700)
|
|
55
|
+
|
|
56
|
+
def handle_load_finished(self):
|
|
57
|
+
def callback(result):
|
|
58
|
+
self._csrf = result
|
|
59
|
+
self._login_success = True
|
|
60
|
+
QCoreApplication.quit()
|
|
61
|
+
|
|
62
|
+
if self.webview.url().toString() == PROJECT_URL:
|
|
63
|
+
self.webview.page().runJavaScript(
|
|
64
|
+
JAVASCRIPT_CSRF_EXTRACTOR, 0, callback
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
def handle_cookie_added(self, cookie):
|
|
68
|
+
cookie_name = cookie.name().data().decode('utf-8')
|
|
69
|
+
if cookie_name in COOKIE_NAMES:
|
|
70
|
+
self._cookies[cookie_name] = cookie.value().data().decode('utf-8')
|
|
71
|
+
|
|
72
|
+
@property
|
|
73
|
+
def cookies(self):
|
|
74
|
+
return self._cookies
|
|
75
|
+
|
|
76
|
+
@property
|
|
77
|
+
def csrf(self):
|
|
78
|
+
return self._csrf
|
|
79
|
+
|
|
80
|
+
@property
|
|
81
|
+
def login_success(self):
|
|
82
|
+
return self._login_success
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def login():
|
|
86
|
+
from PySide6.QtCore import QLoggingCategory
|
|
87
|
+
QLoggingCategory.setFilterRules('''\
|
|
88
|
+
qt.webenginecontext.info=false
|
|
89
|
+
''')
|
|
90
|
+
|
|
91
|
+
app = QApplication([])
|
|
92
|
+
ol_browser_login_window = OlBrowserLoginWindow()
|
|
93
|
+
ol_browser_login_window.show()
|
|
94
|
+
app.exec()
|
|
95
|
+
|
|
96
|
+
if not ol_browser_login_window.login_success:
|
|
97
|
+
return None
|
|
98
|
+
|
|
99
|
+
return {"cookie": ol_browser_login_window.cookies, "csrf": ol_browser_login_window.csrf}
|
|
@@ -0,0 +1,243 @@
|
|
|
1
|
+
"""Overleaf Client"""
|
|
2
|
+
##################################################
|
|
3
|
+
# MIT License
|
|
4
|
+
##################################################
|
|
5
|
+
# File: olclient.py
|
|
6
|
+
# Description: Overleaf API Wrapper
|
|
7
|
+
# Author: Moritz Glöckl
|
|
8
|
+
# License: MIT
|
|
9
|
+
# Version: 1.3.3
|
|
10
|
+
##################################################
|
|
11
|
+
|
|
12
|
+
import os
|
|
13
|
+
import re
|
|
14
|
+
import html
|
|
15
|
+
import requests as reqs
|
|
16
|
+
from bs4 import BeautifulSoup
|
|
17
|
+
import json
|
|
18
|
+
import uuid
|
|
19
|
+
import time
|
|
20
|
+
import traceback
|
|
21
|
+
|
|
22
|
+
try:
|
|
23
|
+
import websocket
|
|
24
|
+
if not hasattr(websocket, 'SSLError'):
|
|
25
|
+
class SSLError(Exception): pass
|
|
26
|
+
websocket.SSLError = SSLError
|
|
27
|
+
except ImportError:
|
|
28
|
+
pass
|
|
29
|
+
|
|
30
|
+
# URLs
|
|
31
|
+
LOGIN_URL = "https://www.overleaf.com/login"
|
|
32
|
+
PROJECT_URL = "https://www.overleaf.com/project"
|
|
33
|
+
DOWNLOAD_URL = "https://www.overleaf.com/project/{}/download/zip"
|
|
34
|
+
UPLOAD_URL = "https://www.overleaf.com/project/{}/upload"
|
|
35
|
+
FOLDER_URL = "https://www.overleaf.com/project/{}/folder"
|
|
36
|
+
DELETE_URL = "https://www.overleaf.com/project/{}/doc/{}"
|
|
37
|
+
COMPILE_URL = "https://www.overleaf.com/project/{}/compile?enable_pdf_caching=true"
|
|
38
|
+
BASE_URL = "https://www.overleaf.com"
|
|
39
|
+
PATH_SEP = "/"
|
|
40
|
+
|
|
41
|
+
class OverleafClient(object):
|
|
42
|
+
@staticmethod
|
|
43
|
+
def filter_projects(json_content, more_attrs=None):
|
|
44
|
+
more_attrs = more_attrs or {}
|
|
45
|
+
for p in json_content:
|
|
46
|
+
if not p.get("archived") and not p.get("trashed"):
|
|
47
|
+
if all(p.get(k) == v for k, v in more_attrs.items()):
|
|
48
|
+
yield p
|
|
49
|
+
|
|
50
|
+
def __init__(self, cookie=None, csrf=None):
|
|
51
|
+
self._cookie = cookie
|
|
52
|
+
self._csrf = csrf
|
|
53
|
+
self._headers = {
|
|
54
|
+
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36",
|
|
55
|
+
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8",
|
|
56
|
+
"Accept-Language": "en-US,en;q=0.9",
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
def all_projects(self):
|
|
60
|
+
projects_page = reqs.get(PROJECT_URL, cookies=self._cookie, headers=self._headers)
|
|
61
|
+
if not projects_page.ok:
|
|
62
|
+
if projects_page.status_code in [401, 403]:
|
|
63
|
+
raise PermissionError("Authentication failed. Cookie expired.")
|
|
64
|
+
raise reqs.HTTPError(f"HTTP {projects_page.status_code}")
|
|
65
|
+
|
|
66
|
+
text = html.unescape(projects_page.text)
|
|
67
|
+
json_content = None
|
|
68
|
+
|
|
69
|
+
soup = BeautifulSoup(text, 'html.parser')
|
|
70
|
+
meta_projects = soup.find('meta', {'name': 'ol-projects'})
|
|
71
|
+
if meta_projects:
|
|
72
|
+
try: json_content = json.loads(meta_projects.get('content'))
|
|
73
|
+
except: pass
|
|
74
|
+
|
|
75
|
+
if json_content is None:
|
|
76
|
+
markers = ['"projects":[', 'preloadedProjects:[', 'initialProjects:[']
|
|
77
|
+
for marker in markers:
|
|
78
|
+
start_pos = text.find(marker)
|
|
79
|
+
if start_pos != -1:
|
|
80
|
+
start_idx = start_pos + len(marker) - 1
|
|
81
|
+
bc, in_str, esc = 0, False, False
|
|
82
|
+
for i in range(start_idx, len(text)):
|
|
83
|
+
c = text[i]
|
|
84
|
+
if esc: esc = False; continue
|
|
85
|
+
if c == '\\': esc = True; continue
|
|
86
|
+
if c == '"': in_str = not in_str; continue
|
|
87
|
+
if not in_str:
|
|
88
|
+
if c == '[': bc += 1
|
|
89
|
+
elif c == ']': bc -= 1
|
|
90
|
+
if bc == 0:
|
|
91
|
+
try:
|
|
92
|
+
json_content = json.loads(text[start_idx:i+1])
|
|
93
|
+
break
|
|
94
|
+
except: continue
|
|
95
|
+
if json_content: break
|
|
96
|
+
|
|
97
|
+
if json_content is None:
|
|
98
|
+
if "login" in projects_page.url or "Log In" in text:
|
|
99
|
+
raise PermissionError("Authentication failed. Cookie might be expired.")
|
|
100
|
+
raise AttributeError("Could not find project list. Overleaf layout may have changed.")
|
|
101
|
+
|
|
102
|
+
if isinstance(json_content, dict) and 'projects' in json_content: json_content = json_content['projects']
|
|
103
|
+
return list(OverleafClient.filter_projects(json_content))
|
|
104
|
+
|
|
105
|
+
def get_project(self, project_name):
|
|
106
|
+
projects = self.all_projects()
|
|
107
|
+
project = next((p for p in projects if p['name'] == project_name), None)
|
|
108
|
+
if not project: raise ValueError(f"Project '{project_name}' not found.")
|
|
109
|
+
return project
|
|
110
|
+
|
|
111
|
+
def download_project(self, project_id):
|
|
112
|
+
r = reqs.get(DOWNLOAD_URL.format(project_id), stream=True, cookies=self._cookie, headers=self._headers)
|
|
113
|
+
return r.content
|
|
114
|
+
|
|
115
|
+
def create_folder(self, project_id, parent_folder_id, folder_name):
|
|
116
|
+
params = {"name": folder_name}
|
|
117
|
+
if parent_folder_id: params["parent_folder_id"] = parent_folder_id
|
|
118
|
+
h = self._headers.copy()
|
|
119
|
+
h.update({"X-Csrf-Token": self._csrf})
|
|
120
|
+
r = reqs.post(FOLDER_URL.format(project_id), cookies=self._cookie, headers=h, json=params)
|
|
121
|
+
if r.ok: return json.loads(r.content)
|
|
122
|
+
return None
|
|
123
|
+
|
|
124
|
+
def get_project_infos(self, project_id, verbose_error_logging=False, file_list=None):
|
|
125
|
+
project_meta_json = None
|
|
126
|
+
try:
|
|
127
|
+
url = BASE_URL + f"/project/{project_id}/metadata"
|
|
128
|
+
h = self._headers.copy()
|
|
129
|
+
h.update({"X-Csrf-Token": self._csrf, "X-Requested-With": "XMLHttpRequest", "Referer": BASE_URL + f"/project/{project_id}"})
|
|
130
|
+
r = reqs.get(url, cookies=self._cookie, headers=h)
|
|
131
|
+
if r.status_code == 200: project_meta_json = r.json()
|
|
132
|
+
except: pass
|
|
133
|
+
|
|
134
|
+
# Discovery by creation
|
|
135
|
+
try:
|
|
136
|
+
name = f"olsync_find_{uuid.uuid4().hex[:6]}"
|
|
137
|
+
h = self._headers.copy()
|
|
138
|
+
h.update({"X-Csrf-Token": self._csrf, "Referer": BASE_URL + f"/project/{project_id}"})
|
|
139
|
+
r = reqs.post(BASE_URL + f"/project/{project_id}/folder", cookies=self._cookie, headers=h, json={"name": name})
|
|
140
|
+
if r.ok:
|
|
141
|
+
data = r.json()
|
|
142
|
+
root_id = data.get('parent_folder_id') or data.get('root_folder_id') or data.get('parentFolderId')
|
|
143
|
+
new_id = data.get('_id')
|
|
144
|
+
if new_id: reqs.delete(BASE_URL + f"/project/{project_id}/folder/{new_id}", cookies=self._cookie, headers=h, json={})
|
|
145
|
+
if root_id: return {'rootFolder': {'_id': root_id, 'folders': [], 'docs': []}}
|
|
146
|
+
except: pass
|
|
147
|
+
|
|
148
|
+
if project_meta_json and 'projectMeta' in project_meta_json:
|
|
149
|
+
for k in project_meta_json['projectMeta'].keys():
|
|
150
|
+
if k.endswith("92") or k.endswith("c86"):
|
|
151
|
+
return {'rootFolder': {'_id': k, 'folders': [], 'docs': []}}
|
|
152
|
+
|
|
153
|
+
return {'rootFolder': {'_id': project_id, 'folders': [], 'docs': []}}
|
|
154
|
+
|
|
155
|
+
def find_file_id_by_name(self, project_id, file_name, verbose_error_logging=False):
|
|
156
|
+
try:
|
|
157
|
+
r = reqs.get(BASE_URL + f"/project/{project_id}", cookies=self._cookie, headers=self._headers)
|
|
158
|
+
if r.status_code == 200:
|
|
159
|
+
pattern = r'["\']name["\']\s*:\s*["\']' + re.escape(file_name) + r'["\'].*?["\'](?:_id|id)["\']\s*:\s*["\']([a-f0-9]{24})["\']'
|
|
160
|
+
match = re.search(pattern, r.text, re.DOTALL)
|
|
161
|
+
if match: return match.group(1)
|
|
162
|
+
except: pass
|
|
163
|
+
return None
|
|
164
|
+
|
|
165
|
+
def update_doc(self, project_id, doc_id, content):
|
|
166
|
+
url = BASE_URL + f"/project/{project_id}/doc/{doc_id}"
|
|
167
|
+
h = self._headers.copy()
|
|
168
|
+
h.update({"X-Csrf-Token": self._csrf, "Content-Type": "application/json", "Referer": BASE_URL + f"/project/{project_id}/editor"})
|
|
169
|
+
for payload in [{"content": content}, {"content": content.split('\n')}]:
|
|
170
|
+
r = reqs.put(url, cookies=self._cookie, headers=h, json=payload)
|
|
171
|
+
if r.ok: return True
|
|
172
|
+
return False
|
|
173
|
+
|
|
174
|
+
def upload_file(self, project_id, project_infos, file_name, file_size, file, verbose_error_logging=False):
|
|
175
|
+
"""
|
|
176
|
+
Uploads a file. Handles text files via Doc API and others via Multipart.
|
|
177
|
+
"""
|
|
178
|
+
clean_name = file_name.split(PATH_SEP)[-1]
|
|
179
|
+
|
|
180
|
+
# 1. TEXT FILE STRATEGY
|
|
181
|
+
if file_name.endswith(('.tex', '.bib', '.cls', '.sty')):
|
|
182
|
+
if verbose_error_logging: print(f"[DEBUG] Text File Strategy for {clean_name}")
|
|
183
|
+
try:
|
|
184
|
+
if hasattr(file, 'seek'): file.seek(0)
|
|
185
|
+
content = file.read()
|
|
186
|
+
if isinstance(content, bytes): content = content.decode('utf-8', errors='replace')
|
|
187
|
+
|
|
188
|
+
fid = self.find_file_id_by_name(project_id, clean_name, verbose_error_logging)
|
|
189
|
+
if fid:
|
|
190
|
+
if verbose_error_logging: print(f"[DEBUG] Updating existing {fid}")
|
|
191
|
+
if self.update_doc(project_id, fid, content): return True
|
|
192
|
+
self.delete_file(project_id, project_infos, file_name)
|
|
193
|
+
time.sleep(0.5)
|
|
194
|
+
|
|
195
|
+
doc_url = BASE_URL + f"/project/{project_id}/doc"
|
|
196
|
+
h = self._headers.copy()
|
|
197
|
+
h.update({"X-Csrf-Token": self._csrf, "Referer": BASE_URL + f"/project/{project_id}/editor"})
|
|
198
|
+
folder_id = project_infos.get('rootFolder', {}).get('_id') if project_infos else None
|
|
199
|
+
for p_id in [folder_id, None]:
|
|
200
|
+
r = reqs.post(doc_url, cookies=self._cookie, headers=h, json={"name": clean_name, "parent_folder_id": p_id})
|
|
201
|
+
if r.ok:
|
|
202
|
+
nid = r.json().get('_id')
|
|
203
|
+
if nid: self.update_doc(project_id, nid, content)
|
|
204
|
+
return True
|
|
205
|
+
except Exception as e:
|
|
206
|
+
if verbose_error_logging: print(f"[DEBUG] Doc API failed: {e}")
|
|
207
|
+
|
|
208
|
+
# 2. MULTIPART STRATEGY
|
|
209
|
+
url = UPLOAD_URL.format(project_id)
|
|
210
|
+
h = self._headers.copy()
|
|
211
|
+
h.update({"X-Csrf-Token": self._csrf, "X-Requested-With": "XMLHttpRequest", "Referer": BASE_URL + f"/project/{project_id}/editor", "Origin": BASE_URL})
|
|
212
|
+
folder_id = project_infos.get('rootFolder', {}).get('_id') if project_infos else None
|
|
213
|
+
for fid in [folder_id, "", project_id]:
|
|
214
|
+
if hasattr(file, 'seek'): file.seek(0)
|
|
215
|
+
data = {"folder_id": fid if fid is not None else "", "qquuid": str(uuid.uuid4()), "qqfilename": clean_name, "qqtotalfilesize": file_size, "upload_params": '{"is_not_standard_upload":true}', "csrf_token": self._csrf, "_csrf": self._csrf}
|
|
216
|
+
if data["folder_id"] is None: data.pop("folder_id")
|
|
217
|
+
try:
|
|
218
|
+
r = reqs.post(url, cookies=self._cookie, data=data, files={"qqfile": (clean_name, file, 'application/octet-stream')}, headers=h)
|
|
219
|
+
if r.status_code == 200 and r.json().get("success"): return True
|
|
220
|
+
except: pass
|
|
221
|
+
|
|
222
|
+
raise reqs.HTTPError(f"Upload failed for {clean_name}")
|
|
223
|
+
|
|
224
|
+
def delete_file(self, project_id, project_infos, file_name):
|
|
225
|
+
fid = self.find_file_id_by_name(project_id, file_name.split(PATH_SEP)[-1])
|
|
226
|
+
if not fid: return False
|
|
227
|
+
h = self._headers.copy()
|
|
228
|
+
h.update({"X-Csrf-Token": self._csrf})
|
|
229
|
+
r = reqs.delete(DELETE_URL.format(project_id, fid), cookies=self._cookie, headers=h, json={})
|
|
230
|
+
return r.status_code == 204
|
|
231
|
+
|
|
232
|
+
def download_pdf(self, project_id):
|
|
233
|
+
h = self._headers.copy()
|
|
234
|
+
h.update({"X-Csrf-Token": self._csrf})
|
|
235
|
+
body = {"check": "silent", "draft": False, "incrementalCompilesEnabled": True, "rootDoc_id": "", "stopOnFirstError": False}
|
|
236
|
+
r = reqs.post(COMPILE_URL.format(project_id), cookies=self._cookie, headers=h, json=body)
|
|
237
|
+
if not r.ok: raise reqs.HTTPError()
|
|
238
|
+
res = r.json()
|
|
239
|
+
if res["status"] != "success": raise reqs.HTTPError()
|
|
240
|
+
pdf = next(v for v in res['outputFiles'] if v['type'] == 'pdf')
|
|
241
|
+
dr = reqs.get(BASE_URL + pdf['url'], cookies=self._cookie, headers=h)
|
|
242
|
+
if dr.ok: return pdf['path'], dr.content
|
|
243
|
+
return None
|
|
@@ -0,0 +1,479 @@
|
|
|
1
|
+
"""Overleaf Two-Way Sync Tool"""
|
|
2
|
+
##################################################
|
|
3
|
+
# MIT License
|
|
4
|
+
##################################################
|
|
5
|
+
# File: olsync.py
|
|
6
|
+
# Description: Overleaf Two-Way Sync
|
|
7
|
+
# Author: Moritz Glöckl
|
|
8
|
+
# License: MIT
|
|
9
|
+
# Version: 1.2.0
|
|
10
|
+
##################################################
|
|
11
|
+
|
|
12
|
+
import click
|
|
13
|
+
import os
|
|
14
|
+
from yaspin import yaspin
|
|
15
|
+
import pickle
|
|
16
|
+
import zipfile
|
|
17
|
+
import io
|
|
18
|
+
import dateutil.parser
|
|
19
|
+
import glob
|
|
20
|
+
import fnmatch
|
|
21
|
+
import traceback
|
|
22
|
+
from pathlib import Path
|
|
23
|
+
|
|
24
|
+
try:
|
|
25
|
+
# Import for pip installation / wheel
|
|
26
|
+
from olsync.olclient import OverleafClient
|
|
27
|
+
import olsync.olbrowserlogin as olbrowserlogin
|
|
28
|
+
except ImportError:
|
|
29
|
+
# Import for development
|
|
30
|
+
from olclient import OverleafClient
|
|
31
|
+
import olbrowserlogin
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def find_olauth(start_path, filename=".olauth"):
|
|
35
|
+
"""Search for the auth file in the current directory and all parent directories."""
|
|
36
|
+
current = Path(start_path).resolve()
|
|
37
|
+
while True:
|
|
38
|
+
candidate = current / filename
|
|
39
|
+
if candidate.is_file():
|
|
40
|
+
return str(candidate)
|
|
41
|
+
if current.parent == current: # Reached root directory
|
|
42
|
+
break
|
|
43
|
+
current = current.parent
|
|
44
|
+
return None
|
|
45
|
+
|
|
46
|
+
@click.group(invoke_without_command=True)
|
|
47
|
+
@click.option('-l', '--local-only', 'local', is_flag=True, help="Sync local project files to Overleaf only.")
|
|
48
|
+
@click.option('-r', '--remote-only', 'remote', is_flag=True,
|
|
49
|
+
help="Sync remote project files from Overleaf to local file system only.")
|
|
50
|
+
@click.option('-n', '--name', 'project_name', default="",
|
|
51
|
+
help="Specify the Overleaf project name instead of the default name of the sync directory.")
|
|
52
|
+
@click.option('--store-path', 'cookie_path', default=".olauth", type=click.Path(exists=False),
|
|
53
|
+
help="Relative path to load the persisted Overleaf cookie.")
|
|
54
|
+
@click.option('-p', '--path', 'sync_path', default=".", type=click.Path(exists=True),
|
|
55
|
+
help="Path of the project to sync.")
|
|
56
|
+
@click.option('-i', '--olignore', 'olignore_path', default=".olignore", type=click.Path(exists=False),
|
|
57
|
+
help="Path to the .olignore file relative to sync path (ignored if syncing from remote to local). See "
|
|
58
|
+
"fnmatch / unix filename pattern matching for information on how to use it.")
|
|
59
|
+
@click.option('-v', '--verbose', 'verbose', is_flag=True, help="Enable extended error logging.")
|
|
60
|
+
@click.version_option(package_name='overleaf-sync-fixed')
|
|
61
|
+
@click.pass_context
|
|
62
|
+
def main(ctx, local, remote, project_name, cookie_path, sync_path, olignore_path, verbose):
|
|
63
|
+
if verbose:
|
|
64
|
+
click.echo("[DEBUG] Running olsync.py version with enhanced debugging")
|
|
65
|
+
if ctx.invoked_subcommand is None:
|
|
66
|
+
# Search for .olauth in current or parent directories
|
|
67
|
+
resolved_cookie_path = find_olauth(os.getcwd(), cookie_path) if cookie_path == ".olauth" else cookie_path
|
|
68
|
+
|
|
69
|
+
if not resolved_cookie_path or not os.path.isfile(resolved_cookie_path):
|
|
70
|
+
raise click.ClickException(
|
|
71
|
+
"Persisted Overleaf cookie not found. Please login or check store path.")
|
|
72
|
+
|
|
73
|
+
with open(resolved_cookie_path, 'rb') as f:
|
|
74
|
+
store = pickle.load(f)
|
|
75
|
+
|
|
76
|
+
overleaf_client = OverleafClient(store["cookie"], store["csrf"])
|
|
77
|
+
|
|
78
|
+
# Change the current directory to the specified sync path
|
|
79
|
+
os.chdir(sync_path)
|
|
80
|
+
|
|
81
|
+
project_name = project_name or os.path.basename(os.getcwd())
|
|
82
|
+
|
|
83
|
+
project = execute_action(
|
|
84
|
+
lambda: overleaf_client.get_project(project_name),
|
|
85
|
+
"Querying project",
|
|
86
|
+
"Project queried successfully.",
|
|
87
|
+
"Project could not be queried.",
|
|
88
|
+
verbose)
|
|
89
|
+
|
|
90
|
+
zip_file = execute_action(
|
|
91
|
+
lambda: zipfile.ZipFile(io.BytesIO(
|
|
92
|
+
overleaf_client.download_project(project["id"]))),
|
|
93
|
+
"Downloading project",
|
|
94
|
+
"Project downloaded successfully.",
|
|
95
|
+
"Project could not be downloaded.",
|
|
96
|
+
verbose)
|
|
97
|
+
|
|
98
|
+
project_infos = execute_action(
|
|
99
|
+
lambda: overleaf_client.get_project_infos(project["id"], verbose, file_list=zip_file.namelist()),
|
|
100
|
+
"Querying project details",
|
|
101
|
+
"Project details queried successfully.",
|
|
102
|
+
"Project details could not be queried (Optional).",
|
|
103
|
+
verbose,
|
|
104
|
+
is_optional=True)
|
|
105
|
+
|
|
106
|
+
if not project_infos and verbose:
|
|
107
|
+
click.echo("[DEBUG] Proceeding without project_infos. File uploads/deletes will fail.")
|
|
108
|
+
|
|
109
|
+
sync = not (local or remote)
|
|
110
|
+
|
|
111
|
+
# Sync local to remote
|
|
112
|
+
if local or sync:
|
|
113
|
+
sync_func(
|
|
114
|
+
files_from=olignore_keep_list(olignore_path),
|
|
115
|
+
deleted_files=[f for f in zip_file.namelist() if f not in olignore_keep_list(olignore_path) and not sync],
|
|
116
|
+
create_file_at_to=lambda name: overleaf_client.upload_file(
|
|
117
|
+
project["id"], project_infos, name, os.path.getsize(name), open(name, 'rb'), verbose),
|
|
118
|
+
delete_file_at_to=lambda name: overleaf_client.delete_file(project["id"], project_infos, name),
|
|
119
|
+
create_file_at_from=lambda name: write_file(name, zip_file.read(name)),
|
|
120
|
+
from_exists_in_to=lambda name: name in zip_file.namelist(),
|
|
121
|
+
from_equal_to_to=lambda name: open(name, 'rb').read() == zip_file.read(name),
|
|
122
|
+
# Local-to-remote: if not equal, local takes priority
|
|
123
|
+
from_newer_than_to=lambda name: True,
|
|
124
|
+
from_name="local",
|
|
125
|
+
to_name="remote",
|
|
126
|
+
verbose=verbose,
|
|
127
|
+
ask=False)
|
|
128
|
+
|
|
129
|
+
# Sync remote to local
|
|
130
|
+
if remote or sync:
|
|
131
|
+
sync_func(
|
|
132
|
+
files_from=zip_file.namelist(),
|
|
133
|
+
deleted_files=[f for f in olignore_keep_list(olignore_path) if f not in zip_file.namelist() and not sync],
|
|
134
|
+
create_file_at_to=lambda name: write_file(name, zip_file.read(name)),
|
|
135
|
+
delete_file_at_to=lambda name: delete_file(name),
|
|
136
|
+
create_file_at_from=lambda name: overleaf_client.upload_file(
|
|
137
|
+
project["id"], project_infos, name, os.path.getsize(name), open(name, 'rb'), verbose),
|
|
138
|
+
from_exists_in_to=lambda name: os.path.isfile(name),
|
|
139
|
+
from_equal_to_to=lambda name: open(name, 'rb').read() == zip_file.read(name),
|
|
140
|
+
from_newer_than_to=lambda name: dateutil.parser.isoparse(project["lastUpdated"]).timestamp() >
|
|
141
|
+
os.path.getmtime(name),
|
|
142
|
+
from_name="remote",
|
|
143
|
+
to_name="local",
|
|
144
|
+
verbose=verbose,
|
|
145
|
+
ask=True)
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
@main.command()
|
|
149
|
+
@click.option('--path', 'cookie_path', default=".olauth", type=click.Path(exists=False),
|
|
150
|
+
help="Path to store the persisted Overleaf cookie.")
|
|
151
|
+
@click.option('-v', '--verbose', 'verbose', is_flag=True, help="Enable extended error logging.")
|
|
152
|
+
def login(cookie_path, verbose):
|
|
153
|
+
if os.path.isfile(cookie_path) and not click.confirm(
|
|
154
|
+
'Persisted Overleaf cookie already exist. Do you want to override it?'):
|
|
155
|
+
return
|
|
156
|
+
click.clear()
|
|
157
|
+
execute_action(lambda: login_handler(cookie_path), "Login",
|
|
158
|
+
"Login successful. Cookie persisted as `" + click.format_filename(
|
|
159
|
+
cookie_path) + "`. You may now sync your project.",
|
|
160
|
+
"Login failed. Please try again.", verbose)
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
@main.command(name='list')
|
|
164
|
+
@click.option('--store-path', 'cookie_path', default=".olauth", type=click.Path(exists=False),
|
|
165
|
+
help="Relative path to load the persisted Overleaf cookie.")
|
|
166
|
+
@click.option('-v', '--verbose', 'verbose', is_flag=True, help="Enable extended error logging.")
|
|
167
|
+
def list_projects(cookie_path, verbose):
|
|
168
|
+
def query_projects():
|
|
169
|
+
all_p = overleaf_client.all_projects()
|
|
170
|
+
if not all_p:
|
|
171
|
+
click.echo("\nNo active projects found.")
|
|
172
|
+
return True
|
|
173
|
+
for index, p in enumerate(sorted(all_p, key=lambda x: x['lastUpdated'], reverse=True)):
|
|
174
|
+
if not index:
|
|
175
|
+
click.echo("\n")
|
|
176
|
+
click.echo(f"{dateutil.parser.isoparse(p['lastUpdated']).strftime('%m/%d/%Y, %H:%M:%S')} - {p['name']}")
|
|
177
|
+
return True
|
|
178
|
+
|
|
179
|
+
if not os.path.isfile(cookie_path):
|
|
180
|
+
raise click.ClickException(
|
|
181
|
+
"Persisted Overleaf cookie not found. Please login or check store path.")
|
|
182
|
+
|
|
183
|
+
with open(cookie_path, 'rb') as f:
|
|
184
|
+
store = pickle.load(f)
|
|
185
|
+
|
|
186
|
+
overleaf_client = OverleafClient(store["cookie"], store["csrf"])
|
|
187
|
+
|
|
188
|
+
click.clear()
|
|
189
|
+
execute_action(query_projects, "Querying all projects",
|
|
190
|
+
"Querying all projects successful.",
|
|
191
|
+
"Querying all projects failed.", verbose)
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
@main.command(name='download')
|
|
195
|
+
@click.argument('project_name', required=False)
|
|
196
|
+
@click.option('--pdf', is_flag=True, help="Download compiled PDF instead of source code.")
|
|
197
|
+
@click.option('--path', 'download_path', default=".", type=click.Path(exists=True), help="Directory to download to.")
|
|
198
|
+
@click.option('--store-path', 'cookie_path', default=".olauth", type=click.Path(exists=False),
|
|
199
|
+
help="Relative path to load the persisted Overleaf cookie.")
|
|
200
|
+
@click.option('-v', '--verbose', 'verbose', is_flag=True, help="Enable extended error logging.")
|
|
201
|
+
def download(project_name, pdf, download_path, cookie_path, verbose):
|
|
202
|
+
"""Download project source (default) or compiled PDF (--pdf)."""
|
|
203
|
+
if not os.path.isfile(cookie_path):
|
|
204
|
+
raise click.ClickException(
|
|
205
|
+
"Persisted Overleaf cookie not found. Please login or check store path.")
|
|
206
|
+
|
|
207
|
+
with open(cookie_path, 'rb') as f:
|
|
208
|
+
store = pickle.load(f)
|
|
209
|
+
|
|
210
|
+
overleaf_client = OverleafClient(store["cookie"], store["csrf"])
|
|
211
|
+
|
|
212
|
+
project_name = project_name or os.path.basename(os.getcwd())
|
|
213
|
+
|
|
214
|
+
project = execute_action(
|
|
215
|
+
lambda: overleaf_client.get_project(project_name),
|
|
216
|
+
"Querying project",
|
|
217
|
+
"Project queried successfully.",
|
|
218
|
+
"Project could not be queried.",
|
|
219
|
+
verbose)
|
|
220
|
+
|
|
221
|
+
if pdf:
|
|
222
|
+
# Download PDF
|
|
223
|
+
file_name, content = execute_action(
|
|
224
|
+
lambda: overleaf_client.download_pdf(project["id"]),
|
|
225
|
+
"Compiling and downloading PDF",
|
|
226
|
+
"PDF downloaded successfully.",
|
|
227
|
+
"PDF could not be downloaded.",
|
|
228
|
+
verbose)
|
|
229
|
+
if file_name and content:
|
|
230
|
+
target_path = os.path.join(download_path, file_name)
|
|
231
|
+
with open(target_path, 'wb') as f:
|
|
232
|
+
f.write(content)
|
|
233
|
+
click.echo(f"\n✅ Saved PDF to: {target_path}")
|
|
234
|
+
else:
|
|
235
|
+
# Download Source ZIP
|
|
236
|
+
content = execute_action(
|
|
237
|
+
lambda: overleaf_client.download_project(project["id"]),
|
|
238
|
+
"Downloading project source",
|
|
239
|
+
"Source downloaded successfully.",
|
|
240
|
+
"Source could not be downloaded.",
|
|
241
|
+
verbose)
|
|
242
|
+
|
|
243
|
+
# Create a directory with the project name to ensure it matches for future syncs
|
|
244
|
+
target_dir = os.path.join(download_path, project["name"])
|
|
245
|
+
if not os.path.exists(target_dir):
|
|
246
|
+
os.makedirs(target_dir)
|
|
247
|
+
click.echo(f"Created directory: {target_dir}")
|
|
248
|
+
|
|
249
|
+
import zipfile, io
|
|
250
|
+
with zipfile.ZipFile(io.BytesIO(content)) as z:
|
|
251
|
+
z.extractall(target_dir)
|
|
252
|
+
click.echo(f"\n✅ Extracted source to: {os.path.abspath(target_dir)}")
|
|
253
|
+
click.echo(f"💡 Hint: You can now 'cd \"{project['name']}\"' and run 'ols' to sync.")
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
def login_handler(path):
|
|
258
|
+
store = olbrowserlogin.login()
|
|
259
|
+
if store is None:
|
|
260
|
+
return False
|
|
261
|
+
with open(path, 'wb+') as f:
|
|
262
|
+
pickle.dump(store, f)
|
|
263
|
+
return True
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
def delete_file(path):
|
|
267
|
+
_dir = os.path.dirname(path)
|
|
268
|
+
if _dir == path:
|
|
269
|
+
return
|
|
270
|
+
|
|
271
|
+
if _dir != '' and not os.path.exists(_dir):
|
|
272
|
+
return
|
|
273
|
+
else:
|
|
274
|
+
os.remove(path)
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
def write_file(path, content):
|
|
278
|
+
_dir = os.path.dirname(path)
|
|
279
|
+
if _dir == path:
|
|
280
|
+
return
|
|
281
|
+
|
|
282
|
+
# path is a file
|
|
283
|
+
if _dir != '' and not os.path.exists(_dir):
|
|
284
|
+
os.makedirs(_dir)
|
|
285
|
+
|
|
286
|
+
with open(path, 'wb+') as f:
|
|
287
|
+
f.write(content)
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
def sync_func(files_from, deleted_files, create_file_at_to, delete_file_at_to, create_file_at_from, from_exists_in_to,
|
|
291
|
+
from_equal_to_to, from_newer_than_to, from_name,
|
|
292
|
+
to_name, verbose=False, ask=True):
|
|
293
|
+
click.echo("\nSyncing files from [%s] to [%s]" % (from_name, to_name))
|
|
294
|
+
click.echo('=' * 40)
|
|
295
|
+
|
|
296
|
+
newly_add_list = []
|
|
297
|
+
update_list = []
|
|
298
|
+
delete_list = []
|
|
299
|
+
restore_list = []
|
|
300
|
+
not_restored_list = []
|
|
301
|
+
not_sync_list = []
|
|
302
|
+
synced_list = []
|
|
303
|
+
|
|
304
|
+
for name in files_from:
|
|
305
|
+
if from_exists_in_to(name):
|
|
306
|
+
if not from_equal_to_to(name):
|
|
307
|
+
if ask and not from_newer_than_to(name) and not click.confirm(
|
|
308
|
+
'\n-> Warning: last-edit time stamp of file <%s> from [%s] is older than [%s].\nContinue to '
|
|
309
|
+
'overwrite with an older version?' % (name, from_name, to_name)):
|
|
310
|
+
not_sync_list.append(name)
|
|
311
|
+
continue
|
|
312
|
+
update_list.append(name)
|
|
313
|
+
else:
|
|
314
|
+
synced_list.append(name)
|
|
315
|
+
else:
|
|
316
|
+
newly_add_list.append(name)
|
|
317
|
+
|
|
318
|
+
for name in deleted_files:
|
|
319
|
+
if ask:
|
|
320
|
+
delete_choice = click.prompt(
|
|
321
|
+
'\n-> Warning: file <%s> does not exist on [%s] anymore (but it still exists on [%s]).'
|
|
322
|
+
'\nShould the file be [d]eleted, [r]estored or [i]gnored?' % (name, from_name, to_name),
|
|
323
|
+
default="i",
|
|
324
|
+
type=click.Choice(['d', 'r', 'i']))
|
|
325
|
+
else:
|
|
326
|
+
delete_choice = "i"
|
|
327
|
+
|
|
328
|
+
if delete_choice == "d":
|
|
329
|
+
delete_list.append(name)
|
|
330
|
+
elif delete_choice == "r":
|
|
331
|
+
restore_list.append(name)
|
|
332
|
+
elif delete_choice == "i":
|
|
333
|
+
not_restored_list.append(name)
|
|
334
|
+
|
|
335
|
+
click.echo(
|
|
336
|
+
"\n[NEW] Following new file(s) created on [%s]" % to_name)
|
|
337
|
+
for name in newly_add_list:
|
|
338
|
+
click.echo("\t%s" % name)
|
|
339
|
+
try:
|
|
340
|
+
create_file_at_to(name)
|
|
341
|
+
except Exception as e:
|
|
342
|
+
if verbose:
|
|
343
|
+
click.echo(f"\t[WARN] Failed to create {name} on {to_name}: {e}")
|
|
344
|
+
else:
|
|
345
|
+
click.echo(f"\t[WARN] Failed to create {name}")
|
|
346
|
+
|
|
347
|
+
click.echo(
|
|
348
|
+
"\n[NEW] Following new file(s) created on [%s]" % from_name)
|
|
349
|
+
for name in restore_list:
|
|
350
|
+
click.echo("\t%s" % name)
|
|
351
|
+
try:
|
|
352
|
+
create_file_at_from(name)
|
|
353
|
+
except Exception as e:
|
|
354
|
+
click.echo(f"\t[WARN] Failed to restore {name}: {e}")
|
|
355
|
+
|
|
356
|
+
click.echo(
|
|
357
|
+
"\n[UPDATE] Following file(s) updated on [%s]" % to_name)
|
|
358
|
+
for name in update_list:
|
|
359
|
+
click.echo("\t%s" % name)
|
|
360
|
+
try:
|
|
361
|
+
if to_name == "remote":
|
|
362
|
+
# Delete and recreate is much more reliable for remote updates
|
|
363
|
+
delete_file_at_to(name)
|
|
364
|
+
create_file_at_to(name)
|
|
365
|
+
else:
|
|
366
|
+
create_file_at_to(name)
|
|
367
|
+
except Exception as e:
|
|
368
|
+
if verbose:
|
|
369
|
+
click.echo(f"\t[WARN] Failed to update {name} on {to_name}: {e}")
|
|
370
|
+
else:
|
|
371
|
+
click.echo(f"\t[WARN] Failed to update {name}")
|
|
372
|
+
|
|
373
|
+
click.echo(
|
|
374
|
+
"\n[DELETE] Following file(s) deleted on [%s]" % to_name)
|
|
375
|
+
for name in delete_list:
|
|
376
|
+
click.echo("\t%s" % name)
|
|
377
|
+
try:
|
|
378
|
+
delete_file_at_to(name)
|
|
379
|
+
except Exception as e:
|
|
380
|
+
if verbose:
|
|
381
|
+
print(traceback.format_exc())
|
|
382
|
+
raise click.ClickException(f"\n[ERROR] An error occurred while deleting file(s) on [{to_name}]: {str(e)}")
|
|
383
|
+
|
|
384
|
+
click.echo(
|
|
385
|
+
"\n[SYNC] Following file(s) are up to date")
|
|
386
|
+
for name in synced_list:
|
|
387
|
+
click.echo("\t%s" % name)
|
|
388
|
+
|
|
389
|
+
click.echo(
|
|
390
|
+
"\n[SKIP] Following file(s) on [%s] have not been synced to [%s]" % (from_name, to_name))
|
|
391
|
+
for name in not_sync_list:
|
|
392
|
+
click.echo("\t%s" % name)
|
|
393
|
+
|
|
394
|
+
click.echo(
|
|
395
|
+
"\n[SKIP] Following file(s) on [%s] have not been synced to [%s]" % (to_name, from_name))
|
|
396
|
+
for name in not_restored_list:
|
|
397
|
+
click.echo("\t%s" % name)
|
|
398
|
+
|
|
399
|
+
click.echo("")
|
|
400
|
+
click.echo("✅ Synced files from [%s] to [%s]" % (from_name, to_name))
|
|
401
|
+
click.echo("")
|
|
402
|
+
|
|
403
|
+
|
|
404
|
+
def execute_action(action, progress_message, success_message, fail_message, verbose_error_logging=False, is_optional=False):
|
|
405
|
+
with yaspin(text=progress_message, color="green") as spinner:
|
|
406
|
+
success = None
|
|
407
|
+
error_detail = ""
|
|
408
|
+
try:
|
|
409
|
+
if verbose_error_logging:
|
|
410
|
+
click.echo(f"\n[DEBUG] Starting action: {progress_message}")
|
|
411
|
+
success = action()
|
|
412
|
+
if verbose_error_logging:
|
|
413
|
+
click.echo(f"[DEBUG] Action result: {success}")
|
|
414
|
+
except Exception as e:
|
|
415
|
+
error_detail = str(e) or "Unknown exception occurred"
|
|
416
|
+
if verbose_error_logging:
|
|
417
|
+
click.echo(f"[DEBUG] Exception caught: {error_detail}")
|
|
418
|
+
click.echo(traceback.format_exc())
|
|
419
|
+
success = False
|
|
420
|
+
|
|
421
|
+
if success:
|
|
422
|
+
spinner.write(success_message)
|
|
423
|
+
spinner.ok("✅ ")
|
|
424
|
+
return success
|
|
425
|
+
else:
|
|
426
|
+
spinner.fail("💥 ")
|
|
427
|
+
if error_detail:
|
|
428
|
+
fail_message = f"{fail_message} ({error_detail})"
|
|
429
|
+
elif success is None:
|
|
430
|
+
fail_message = f"{fail_message} (Action returned None)"
|
|
431
|
+
|
|
432
|
+
if verbose_error_logging:
|
|
433
|
+
click.echo(f"[DEBUG] Failing with: {fail_message}")
|
|
434
|
+
|
|
435
|
+
if is_optional:
|
|
436
|
+
click.echo(f"⚠️ Warning: {fail_message}")
|
|
437
|
+
return None
|
|
438
|
+
else:
|
|
439
|
+
raise click.ClickException(fail_message)
|
|
440
|
+
|
|
441
|
+
def olignore_keep_list(olignore_path):
|
|
442
|
+
"""
|
|
443
|
+
The list of files to keep synced, with support for sub-folders.
|
|
444
|
+
Should only be called when syncing from local to remote.
|
|
445
|
+
"""
|
|
446
|
+
# get list of files recursively
|
|
447
|
+
files = glob.glob('**', recursive=True)
|
|
448
|
+
|
|
449
|
+
# System ignore list: things we NEVER want to sync to Overleaf
|
|
450
|
+
system_ignore = [
|
|
451
|
+
'.olauth', '.olignore', '.git', '.DS_Store',
|
|
452
|
+
'__pycache__', 'debug_dashboard.html', 'debug_project_page.html'
|
|
453
|
+
]
|
|
454
|
+
|
|
455
|
+
if not os.path.isfile(olignore_path):
|
|
456
|
+
keep_list = files
|
|
457
|
+
else:
|
|
458
|
+
with open(olignore_path, 'r') as f:
|
|
459
|
+
ignore_pattern = f.read().splitlines()
|
|
460
|
+
|
|
461
|
+
keep_list = [f for f in files if not any(
|
|
462
|
+
fnmatch.fnmatch(f, ignore) for ignore in ignore_pattern)]
|
|
463
|
+
|
|
464
|
+
# Filter out system ignore files and directories
|
|
465
|
+
# We use Path(item).parts to check if any part of the path is in system_ignore
|
|
466
|
+
filtered_list = []
|
|
467
|
+
for item in keep_list:
|
|
468
|
+
path_obj = Path(item)
|
|
469
|
+
if any(part in system_ignore for part in path_obj.parts):
|
|
470
|
+
continue
|
|
471
|
+
if path_obj.is_dir():
|
|
472
|
+
continue
|
|
473
|
+
filtered_list.append(path_obj.as_posix())
|
|
474
|
+
|
|
475
|
+
return filtered_list
|
|
476
|
+
|
|
477
|
+
|
|
478
|
+
if __name__ == "__main__":
|
|
479
|
+
main()
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["flit"]
|
|
3
|
+
build-backend = "flit.buildapi"
|
|
4
|
+
|
|
5
|
+
[tool.flit.metadata]
|
|
6
|
+
module = "olsync"
|
|
7
|
+
dist-name = "overleaf-sync-fixed"
|
|
8
|
+
description-file = "README.md"
|
|
9
|
+
author = "Lawrence Yang (Fixed)"
|
|
10
|
+
author-email = "lawrenceeyang@gmail.com"
|
|
11
|
+
home-page = "https://github.com/lawrencee/overleaf-sync-fixed"
|
|
12
|
+
classifiers = ["License :: OSI Approved :: MIT License", "Intended Audience :: Science/Research", "Programming Language :: Python :: 3"]
|
|
13
|
+
requires-python = ">=3"
|
|
14
|
+
requires = [
|
|
15
|
+
"requests == 2.*",
|
|
16
|
+
"beautifulsoup4 == 4.11.1",
|
|
17
|
+
"yaspin == 2.*",
|
|
18
|
+
"python-dateutil~=2.8.1",
|
|
19
|
+
"click == 8.*",
|
|
20
|
+
"PySide6 == 6.*"
|
|
21
|
+
]
|
|
22
|
+
keywords = "overleaf sync latex tex fix"
|
|
23
|
+
|
|
24
|
+
[tool.flit.scripts]
|
|
25
|
+
ols = "olsync.olsync:main"
|