olj-cli 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
olj/__init__.py ADDED
@@ -0,0 +1,7 @@
1
+ """OnlineJobs.ph CLI - Automate job applications and scraping."""
2
+
3
+ __version__ = "0.1.0"
4
+ __author__ = "kuugang"
5
+ __email__ = "jakebajo21@gmail.com"
6
+
7
+ __all__ = ["__version__", "__author__", "__email__"]
olj/__main__.py ADDED
@@ -0,0 +1,6 @@
1
+ """Allow running olj as: python -m olj"""
2
+
3
+ from olj.cli import main
4
+
5
+ if __name__ == "__main__":
6
+ main()
olj/cli.py ADDED
@@ -0,0 +1,392 @@
1
+ import argparse
2
+ import json
3
+ import logging
4
+ import os
5
+ import time
6
+ from typing import cast
7
+ from urllib.parse import urlencode
8
+
9
+ from bs4 import BeautifulSoup
10
+ from curl_cffi import requests
11
+
12
+ # --- Logging setup ---
13
+ logging.basicConfig(
14
+ level=logging.INFO,
15
+ format="%(asctime)s [%(levelname)s] %(message)s",
16
+ datefmt="%Y-%m-%d %H:%M:%S",
17
+ )
18
+ logger = logging.getLogger(__name__)
19
+
20
+ AUTHENTICATE_URL = "https://www.onlinejobs.ph/authenticate"
21
+ LOGIN_URL = "https://www.onlinejobs.ph/login"
22
+ JOBS_URL = "https://www.onlinejobs.ph/jobseekers/jobsearch"
23
+ APPLY_URL = "https://www.onlinejobs.ph/apply"
24
+
25
+ HEADERS = {
26
+ "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
27
+ "accept-encoding": "gzip, deflate, br, zstd",
28
+ "accept-language": "en-US,en;q=0.9",
29
+ "cache-control": "max-age=0",
30
+ "content-type": "application/x-www-form-urlencoded",
31
+ "origin": "https://www.onlinejobs.ph",
32
+ "priority": "u=0, i",
33
+ "referer": "https://www.onlinejobs.ph/login",
34
+ "upgrade-insecure-requests": "1",
35
+ "user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/145.0.0.0 Safari/537.36",
36
+ }
37
+
38
+
39
+ # ---------------------------------------------------------------------------
40
+ # Session helpers
41
+ # ---------------------------------------------------------------------------
42
+
43
+
44
+ def make_session(cookies: dict | None = None) -> requests.Session:
45
+ session = requests.Session(impersonate="chrome")
46
+ session.headers.update(HEADERS)
47
+ if cookies:
48
+ session.cookies.update(cookies)
49
+ return session
50
+
51
+
52
+ def get_input_value(soup: BeautifulSoup, name: str) -> str | None:
53
+ tag = soup.find("input", {"name": name})
54
+ if tag is None:
55
+ logger.warning(f"Input field '{name}' not found")
56
+ return None
57
+ return cast(str, tag.get("value"))
58
+
59
+
60
+ # ---------------------------------------------------------------------------
61
+ # Commands
62
+ # ---------------------------------------------------------------------------
63
+
64
+
65
+ def login(email: str, password: str) -> dict | None:
66
+ """Login and print cookies as JSON."""
67
+
68
+ def login_failed(html: str) -> bool:
69
+ soup = BeautifulSoup(html, "html.parser")
70
+ error_tag = soup.find("p", class_="error")
71
+ if (
72
+ error_tag
73
+ and "email address or password is incorrect"
74
+ in error_tag.get_text(strip=True).lower()
75
+ ):
76
+ return True
77
+ return False
78
+
79
+ session = make_session()
80
+
81
+ logger.info("Fetching CSRF token from login page...")
82
+ resp = session.get(LOGIN_URL)
83
+ soup = BeautifulSoup(resp.text, "html.parser")
84
+ csrf_token = get_input_value(soup, "csrf-token")
85
+
86
+ if not csrf_token:
87
+ logger.error("No CSRF token found. Exiting...")
88
+ raise SystemExit(1)
89
+
90
+ logger.info(f"Logging in as {email}...")
91
+ resp = session.post(
92
+ AUTHENTICATE_URL,
93
+ data={
94
+ "csrf_token": csrf_token,
95
+ "info[email]": email,
96
+ "info[password]": password,
97
+ "login": "Login →",
98
+ },
99
+ )
100
+ if login_failed(resp.text):
101
+ logger.error("Login failed: invalid email or password!")
102
+ return None
103
+
104
+ logger.info("Login successful")
105
+
106
+ cookies = dict(session.cookies)
107
+ print(json.dumps(cookies))
108
+ return cookies
109
+
110
+
111
+ def apply(
112
+ cookies: dict,
113
+ job_url: str,
114
+ subject: str,
115
+ message: str,
116
+ contact_info: str,
117
+ apply_points: int = 1,
118
+ ) -> None:
119
+ """Apply to a job using saved cookies."""
120
+
121
+ def is_already_applied(soup: BeautifulSoup) -> bool:
122
+ button = soup.find(
123
+ "button",
124
+ class_="btn btn-danger btn-rounded btn-addpad fs-16",
125
+ disabled=True,
126
+ )
127
+ if button and "applied" in button.get_text(strip=True).lower():
128
+ return True
129
+ return False
130
+
131
+ session = make_session(cookies)
132
+
133
+ # Step 1: Job page → contact_email, job_id, back_id
134
+ logger.info(f"Fetching job page: {job_url}")
135
+ resp = session.get(job_url)
136
+
137
+ soup = BeautifulSoup(resp.text, "html.parser")
138
+ if is_already_applied(soup):
139
+ logger.error("You have already applied to this job!")
140
+ return
141
+
142
+ csrf_token = get_input_value(soup, "csrf-token")
143
+ contact_email = get_input_value(soup, "contact_email")
144
+ job_id = get_input_value(soup, "job_id")
145
+ back_id = get_input_value(soup, "back_id")
146
+
147
+ logger.debug(f"{csrf_token=} {contact_email=} {job_id=} {back_id=}")
148
+
149
+ # Step 2: POST to /apply → get the actual apply form
150
+ logger.info("Fetching apply form...")
151
+ apply_resp = session.post(
152
+ APPLY_URL,
153
+ data={
154
+ "csrf_token": csrf_token,
155
+ "contact_email": contact_email,
156
+ "job_id": job_id,
157
+ "back_id": back_id,
158
+ },
159
+ )
160
+ apply_soup = BeautifulSoup(apply_resp.text, "html.parser")
161
+
162
+ # Step 3: Parse apply form fields
163
+ csrf_token = get_input_value(apply_soup, "csrf-token")
164
+ info_name = get_input_value(apply_soup, "info[name]")
165
+ info_email = get_input_value(apply_soup, "info[email]")
166
+ sent_to_e_id = get_input_value(apply_soup, "sent_to_e_id")
167
+ email_sent_count_today = get_input_value(apply_soup, "email_sent_count_today")
168
+
169
+ logger.debug(
170
+ f"{info_name=} {info_email=} {sent_to_e_id=} {email_sent_count_today=}"
171
+ )
172
+
173
+ # Step 4: Submit
174
+ data = {
175
+ "csrf-token": csrf_token,
176
+ "info[name]": info_name,
177
+ "info[email]": info_email,
178
+ "info[subject]": subject,
179
+ "info[message]": message,
180
+ "points": apply_points,
181
+ "op": "Send Email",
182
+ "contact_email": contact_email,
183
+ "email_sent_count_today": email_sent_count_today,
184
+ "back_id": back_id,
185
+ "sent_to_e_id": sent_to_e_id,
186
+ "job_id": job_id,
187
+ "contact_info": contact_info,
188
+ }
189
+
190
+ logger.info(f"Submitting application for job {job_id}...")
191
+ logger.debug(f"Payload: {data}")
192
+ session.post(APPLY_URL, data=data)
193
+ logger.info(f"Successfully applied to job_id {job_id}")
194
+
195
+
196
+ def jobs(
197
+ search_filter: str | None = None,
198
+ pages: int | None = None,
199
+ ) -> list[dict] | None:
200
+ """Scrape jobs and print as JSON."""
201
+ session = make_session()
202
+
203
+ def get_jobs_url(page: int, params: dict | None = None) -> str:
204
+ url = JOBS_URL if page == 1 else f"{JOBS_URL}/{(page - 1) * 30}"
205
+ if params:
206
+ url = f"{url}?{urlencode(params)}"
207
+ return url
208
+
209
+ def parse_jobs(html: str) -> list[dict]:
210
+ soup = BeautifulSoup(html, "html.parser")
211
+ all_jobs: list[dict] = []
212
+ cards = soup.find_all("div", class_="jobpost-cat-box")
213
+ logger.debug(f"Found {len(cards)} job cards in HTML")
214
+
215
+ for card in cards:
216
+ link_tag = card.find("a", href=True)
217
+ url = f"https://www.onlinejobs.ph{link_tag['href']}" if link_tag else ""
218
+
219
+ title_tag = card.find("h4")
220
+ title = ""
221
+ if title_tag:
222
+ badge = title_tag.find("span")
223
+ if badge:
224
+ badge.extract()
225
+ title = title_tag.get_text(strip=True)
226
+
227
+ posted_by, posted_on = "", ""
228
+ meta_p = card.find("p", class_="fs-13")
229
+ if meta_p:
230
+ text = meta_p.get_text(strip=True)
231
+ if "•" in text:
232
+ parts = text.split("•")
233
+ posted_by = parts[0].strip()
234
+ posted_on = parts[1].replace("Posted on", "").strip()
235
+
236
+ rate_dd = card.find("dd")
237
+ rate = rate_dd.get_text(strip=True) if rate_dd else ""
238
+
239
+ all_jobs.append(
240
+ {
241
+ "url": url,
242
+ "title": title,
243
+ "posted_by": posted_by,
244
+ "posted_on": posted_on,
245
+ "rate": rate,
246
+ }
247
+ )
248
+
249
+ return all_jobs
250
+
251
+ def enrich(job_list: list[dict]) -> list[dict]:
252
+ for i, job in enumerate(job_list, 1):
253
+ url = job.get("url")
254
+ if not url:
255
+ logger.warning(f"Job #{i} has no URL, skipping description fetch")
256
+ job["description"] = ""
257
+ continue
258
+ logger.debug(
259
+ f"Fetching description for job #{i}: {job.get('title', 'N/A')}"
260
+ )
261
+ resp = session.get(url)
262
+ soup = BeautifulSoup(resp.text, "html.parser")
263
+ desc_tag = soup.find("p", id="job-description")
264
+ job["description"] = desc_tag.get_text(strip=True) if desc_tag else ""
265
+ if not desc_tag:
266
+ logger.warning(f"No description found for job #{i}: {url}")
267
+ time.sleep(0.5)
268
+ return job_list
269
+
270
+ def fetch_page(page: int) -> list[dict]:
271
+ params = {
272
+ "jobkeyword": search_filter or "",
273
+ "skill_tags": "",
274
+ "gig": "on",
275
+ "partTime": "on",
276
+ "fullTime": "on",
277
+ "isFromJobsearchForm": "1",
278
+ }
279
+ url = get_jobs_url(page, params)
280
+ logger.info(f"Fetching jobs page {page}: {url}")
281
+ resp = session.get(url)
282
+ job_list = parse_jobs(resp.text)
283
+ logger.info(
284
+ f"Page {page}: parsed {len(job_list)} jobs, enriching with descriptions..."
285
+ )
286
+ return enrich(job_list)
287
+
288
+ all_jobs: list[dict] = []
289
+
290
+ if pages:
291
+ for page in range(1, pages + 1):
292
+ all_jobs.extend(fetch_page(page))
293
+ logger.info(f"Total jobs collected so far: {len(all_jobs)}")
294
+ else:
295
+ logger.info("No page limit — scraping until no jobs found")
296
+ page = 1
297
+ while True:
298
+ page_jobs = fetch_page(page)
299
+ if not page_jobs:
300
+ logger.info(f"No jobs found on page {page}. Stopping.")
301
+ break
302
+ all_jobs.extend(page_jobs)
303
+ logger.info(f"Total jobs collected so far: {len(all_jobs)}")
304
+ page += 1
305
+
306
+ logger.info(f"Scraping complete. Total jobs: {len(all_jobs)}")
307
+ print(json.dumps(all_jobs, indent=2))
308
+ return all_jobs
309
+
310
+
311
+ # ---------------------------------------------------------------------------
312
+ # CLI
313
+ # ---------------------------------------------------------------------------
314
+
315
+
316
+ def parse_args() -> argparse.Namespace:
317
+ parser = argparse.ArgumentParser(
318
+ description="OnlineJobs.ph CLI",
319
+ formatter_class=argparse.RawTextHelpFormatter,
320
+ )
321
+ parser.add_argument("--debug", action="store_true", help="Enable debug logging")
322
+
323
+ sub = parser.add_subparsers(dest="command", required=True)
324
+
325
+ # -- login --
326
+ login_p = sub.add_parser("login", help="Login and output cookies as JSON")
327
+ login_p.add_argument(
328
+ "--email", required=True, help="Account email (or set OLJ_EMAIL)"
329
+ )
330
+ login_p.add_argument(
331
+ "--password", required=True, help="Account password (or set OLJ_PASSWORD)"
332
+ )
333
+
334
+ # -- apply --
335
+ apply_p = sub.add_parser("apply", help="Apply to a job posting")
336
+ apply_p.add_argument(
337
+ "--cookies", required=True, help="JSON cookies string from `login`"
338
+ )
339
+ apply_p.add_argument("--job-url", required=True, help="Full URL of the job posting")
340
+ apply_p.add_argument("--subject", required=True, help="Email subject")
341
+ apply_p.add_argument("--message", required=True, help="Email message body")
342
+ apply_p.add_argument(
343
+ "--contact-info", required=True, help="Contact info to include"
344
+ )
345
+ apply_p.add_argument(
346
+ "--apply-points", type=int, default=1, help="Points to spend (default: 1)"
347
+ )
348
+
349
+ # -- jobs --
350
+ jobs_p = sub.add_parser("jobs", help="Search and scrape job listings")
351
+ jobs_p.add_argument("--filter", dest="search_filter", help="Keyword filter")
352
+ jobs_p.add_argument("--pages", type=int, help="Number of pages to scrape")
353
+
354
+ return parser.parse_args()
355
+
356
+
357
+ def main() -> list | dict | None:
358
+ args = parse_args()
359
+
360
+ if args.debug:
361
+ logging.getLogger().setLevel(logging.DEBUG)
362
+ logger.debug("Debug logging enabled")
363
+
364
+ if args.command == "login":
365
+ email = args.email or os.environ.get("OLJ_EMAIL", "")
366
+ password = args.password or os.environ.get("OLJ_PASSWORD", "")
367
+ if not email or not password:
368
+ logger.error(
369
+ "--email and --password are required (or set OLJ_EMAIL / OLJ_PASSWORD)"
370
+ )
371
+ raise SystemExit(1)
372
+ return login(email, password)
373
+
374
+ elif args.command == "apply":
375
+ apply(
376
+ cookies=json.loads(args.cookies),
377
+ job_url=args.job_url,
378
+ subject=args.subject,
379
+ message=args.message,
380
+ contact_info=args.contact_info,
381
+ apply_points=args.apply_points,
382
+ )
383
+
384
+ elif args.command == "jobs":
385
+ return jobs(
386
+ search_filter=args.search_filter,
387
+ pages=args.pages,
388
+ )
389
+
390
+
391
+ if __name__ == "__main__":
392
+ main()
@@ -0,0 +1,200 @@
1
+ Metadata-Version: 2.4
2
+ Name: olj-cli
3
+ Version: 0.1.0
4
+ Summary: A command-line tool to automate job applications and scraping on OnlineJobs.ph
5
+ Author-email: kuugang <jakebajo21@gmail.com>
6
+ License: MIT License
7
+
8
+ Copyright (c) 2026 Kuugang
9
+
10
+ Permission is hereby granted, free of charge, to any person obtaining a copy
11
+ of this software and associated documentation files (the "Software"), to deal
12
+ in the Software without restriction, including without limitation the rights
13
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14
+ copies of the Software, and to permit persons to whom the Software is
15
+ furnished to do so, subject to the following conditions:
16
+
17
+ The above copyright notice and this permission notice shall be included in all
18
+ copies or substantial portions of the Software.
19
+
20
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
23
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
25
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
26
+ SOFTWARE.
27
+
28
+ Project-URL: Repository, https://github.com/Kuugang/olj-cli
29
+ Project-URL: Homepage, https://github.com/Kuugang/olj-cli
30
+ Project-URL: Bug Tracker, https://github.com/Kuugang/olj-cli/issues
31
+ Keywords: cli,automation,scraping,jobsearch,onlinejobs
32
+ Classifier: Development Status :: 3 - Alpha
33
+ Classifier: Environment :: Console
34
+ Classifier: Intended Audience :: End Users/Desktop
35
+ Classifier: Natural Language :: English
36
+ Classifier: Operating System :: OS Independent
37
+ Classifier: Programming Language :: Python :: 3
38
+ Classifier: Programming Language :: Python :: 3.11
39
+ Classifier: Programming Language :: Python :: 3.12
40
+ Classifier: Topic :: Internet :: WWW/HTTP
41
+ Classifier: Topic :: Office/Business
42
+ Classifier: Topic :: Utilities
43
+ Requires-Python: >=3.11
44
+ Description-Content-Type: text/markdown
45
+ License-File: LICENSE
46
+ Requires-Dist: beautifulsoup4>=4.14.3
47
+ Requires-Dist: curl-cffi>=0.15.0
48
+ Requires-Dist: requests>=2.33.1
49
+ Dynamic: license-file
50
+
51
+ # OnlineJobs.ph CLI
52
+
53
+ A command-line tool to automate job applications and scraping on [OnlineJobs.ph](https://www.onlinejobs.ph).
54
+
55
+ ## Features
56
+
57
+ - **Login**: Authenticate with OnlineJobs.ph and export session cookies
58
+ - **Apply**: Automatically apply to job postings with custom messages and contact info
59
+ - **Jobs**: Search and scrape job listings with descriptions
60
+
61
+ ## Installation
62
+
63
+ ### Requirements
64
+
65
+ - Python 3.8+
66
+ - Dependencies: `requests`, `beautifulsoup4`, `curl-cffi`
67
+
68
+ ### Setup
69
+
70
+ ```bash
71
+ pip install -r requirements.txt
72
+ ```
73
+
74
+ ## Example Usage
75
+
76
+ ### 1. Login — Get Session Cookies
77
+
78
+ Authenticate and save your session cookies for use in other commands.
79
+
80
+ ```bash
81
+ COOKIES=$(python main.py login --email you@example.com --password secret)
82
+ ```
83
+
84
+ This prints the cookies as JSON to stdout, which you can store in the `COOKIES` variable.
85
+
86
+ ### 2. Apply to a Job
87
+
88
+ Submit an application to a specific job posting.
89
+
90
+ ```bash
91
+ python main.py apply \
92
+ --cookies "$COOKIES" \
93
+ --job-url "https://www.onlinejobs.ph/jobseekers/job/12345" \
94
+ --subject "Applying for Part time developer" \
95
+ --message "I would like to apply, thank you." \
96
+ --contact-info "Email: you@example.com | GitHub: yourhandle"
97
+ ```
98
+
99
+ **Parameters:**
100
+
101
+ - `--cookies`: JSON cookies string from the `login` command
102
+ - `--job-url`: Full URL of the job posting
103
+ - `--subject`: Email subject line
104
+ - `--message`: Email message body
105
+ - `--contact-info`: Your contact information
106
+ - `--apply-points` (optional): Points to spend (default: 1)
107
+
108
+ ### 3. Scrape Jobs
109
+
110
+ Search and scrape job listings with full descriptions.
111
+
112
+ ```bash
113
+ python main.py jobs --filter "python developer" --pages 3
114
+ ```
115
+
116
+ **Parameters:**
117
+
118
+ - `--filter` (optional): Keyword filter for search
119
+ - `--pages` (optional): Number of pages to scrape (if not specified, scrapes until no jobs found)
120
+
121
+ **Output:** JSON array of jobs with `url`, `title`, `posted_by`, `posted_on`, `rate`, and `description`
122
+
123
+ ## Commands
124
+
125
+ ### `login`
126
+
127
+ Authenticate with OnlineJobs.ph and output session cookies as JSON.
128
+
129
+ ```bash
130
+ python main.py login --email <email> --password <password>
131
+ ```
132
+
133
+ **Environment Variables:**
134
+
135
+ - `OLJ_EMAIL`: Account email (alternative to `--email`)
136
+ - `OLJ_PASSWORD`: Account password (alternative to `--password`)
137
+
138
+ ### `apply`
139
+
140
+ Apply to a job posting using authenticated session.
141
+
142
+ ```bash
143
+ python main.py apply --cookies <JSON> --job-url <url> --subject <subject> --message <message> --contact-info <info>
144
+ ```
145
+
146
+ ### `jobs`
147
+
148
+ Search and scrape job listings.
149
+
150
+ ```bash
151
+ python main.py jobs [--filter <keyword>] [--pages <number>]
152
+ ```
153
+
154
+ ## Debug
155
+
156
+ Enable debug logging for any command:
157
+
158
+ ```bash
159
+ python main.py --debug jobs --filter "react"
160
+ ```
161
+
162
+ ## Example Workflow
163
+
164
+ ```bash
165
+ # 1. Search for jobs (no authentication needed)
166
+ python main.py jobs --filter "python developer" --pages 3
167
+
168
+ # 2. Login to get cookies (if you want to apply)
169
+ COOKIES=$(python main.py login --email you@example.com --password secret)
170
+
171
+ # 3. Apply to a specific job
172
+ python main.py apply \
173
+ --cookies "$COOKIES" \
174
+ --job-url "https://www.onlinejobs.ph/jobseekers/job/1604447" \
175
+ --subject "Applying for Senior Magento 2 Developer" \
176
+ --message "I would like to apply, thank you." \
177
+ --contact-info "Email: you@example.com | GitHub: yourhandle"
178
+ ```
179
+
180
+ ## How It Works
181
+
182
+ ### Login Flow
183
+
184
+ 1. Fetches the login page to extract CSRF token
185
+ 2. Submits credentials to authenticate endpoint
186
+ 3. Stores session cookies for subsequent requests
187
+
188
+ ### Apply Flow
189
+
190
+ 1. Fetches the job posting page
191
+ 2. Extracts CSRF token, job ID, and other metadata
192
+ 3. Fetches the application form
193
+ 4. Submits the application with subject, message, and contact info
194
+
195
+ ### Jobs Scraping
196
+
197
+ 1. Fetches job listing pages with optional keyword filter
198
+ 2. Parses job cards to extract title, URL, poster, and date
199
+ 3. Fetches each job's detail page to extract full description
200
+ 4. Returns complete job data as JSON
@@ -0,0 +1,9 @@
1
+ olj/__init__.py,sha256=yC1JDligyQDy2FM5GVXnENXQxR05WBQh_TP4Ho3T9s0,201
2
+ olj/__main__.py,sha256=ItL5CYytyBu5oUgJMYEQUZL-mMmSIRLij_S876edAOk,107
3
+ olj/cli.py,sha256=WBCoSBwStv1R1hLeTv-1POAitOqwbDzcwUSFF0CYMmQ,12874
4
+ olj_cli-0.1.0.dist-info/licenses/LICENSE,sha256=QT95UKZOn8MUidEHfVZMjOiICtlIjcxfRth7B1BW5Pg,1064
5
+ olj_cli-0.1.0.dist-info/METADATA,sha256=l29INYlvxek6ylr_8LJ3JZf_bqZhQa8vNuInhX12osU,6190
6
+ olj_cli-0.1.0.dist-info/WHEEL,sha256=aeYiig01lYGDzBgS8HxWXOg3uV61G9ijOsup-k9o1sk,91
7
+ olj_cli-0.1.0.dist-info/entry_points.txt,sha256=iEVX3a2SEEtaDo7SgdQq-6cTTFKPc8iM4c5KptjURXo,41
8
+ olj_cli-0.1.0.dist-info/top_level.txt,sha256=MrQ53ptyt9NSXIS5Un9S1lmkcDWknoG4TBT7mI9wcGw,4
9
+ olj_cli-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (82.0.1)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ olj-cli = olj.cli:main
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Kuugang
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1 @@
1
+ olj