ragflow-cli 0.24.0__tar.gz → 0.25.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ragflow_cli-0.24.0 → ragflow_cli-0.25.0}/PKG-INFO +3 -2
- {ragflow_cli-0.24.0 → ragflow_cli-0.25.0}/README.md +1 -1
- ragflow_cli-0.25.0/http_client.py +182 -0
- {ragflow_cli-0.24.0 → ragflow_cli-0.25.0}/parser.py +287 -6
- {ragflow_cli-0.24.0 → ragflow_cli-0.25.0}/pyproject.toml +3 -3
- {ragflow_cli-0.24.0 → ragflow_cli-0.25.0}/ragflow_cli.egg-info/PKG-INFO +3 -2
- {ragflow_cli-0.24.0 → ragflow_cli-0.25.0}/ragflow_cli.egg-info/SOURCES.txt +3 -0
- {ragflow_cli-0.24.0 → ragflow_cli-0.25.0}/ragflow_cli.egg-info/requires.txt +1 -0
- ragflow_cli-0.25.0/ragflow_cli.egg-info/top_level.txt +5 -0
- {ragflow_cli-0.24.0 → ragflow_cli-0.25.0}/ragflow_cli.py +25 -0
- ragflow_cli-0.25.0/ragflow_client.py +2241 -0
- ragflow_cli-0.25.0/user.py +77 -0
- ragflow_cli-0.24.0/ragflow_cli.egg-info/top_level.txt +0 -2
- {ragflow_cli-0.24.0 → ragflow_cli-0.25.0}/ragflow_cli.egg-info/dependency_links.txt +0 -0
- {ragflow_cli-0.24.0 → ragflow_cli-0.25.0}/ragflow_cli.egg-info/entry_points.txt +0 -0
- {ragflow_cli-0.24.0 → ragflow_cli-0.25.0}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ragflow-cli
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.25.0
|
|
4
4
|
Summary: Admin Service's client of [RAGFlow](https://github.com/infiniflow/ragflow). The Admin Service provides user management and system monitoring.
|
|
5
5
|
Author-email: Lynn <lynn_inf@hotmail.com>
|
|
6
6
|
License: Apache License, Version 2.0
|
|
@@ -10,6 +10,7 @@ Requires-Dist: requests<3.0.0,>=2.30.0
|
|
|
10
10
|
Requires-Dist: beartype<1.0.0,>=0.20.0
|
|
11
11
|
Requires-Dist: pycryptodomex>=3.10.0
|
|
12
12
|
Requires-Dist: lark>=1.1.0
|
|
13
|
+
Requires-Dist: requests-toolbelt>=1.0.0
|
|
13
14
|
|
|
14
15
|
# RAGFlow Admin Service & CLI
|
|
15
16
|
|
|
@@ -61,7 +62,7 @@ It consists of a server-side Service and a command-line client (CLI), both imple
|
|
|
61
62
|
1. Ensure the Admin Service is running.
|
|
62
63
|
2. Install ragflow-cli.
|
|
63
64
|
```bash
|
|
64
|
-
pip install ragflow-cli==0.
|
|
65
|
+
pip install ragflow-cli==0.25.0
|
|
65
66
|
```
|
|
66
67
|
3. Launch the CLI client:
|
|
67
68
|
```bash
|
|
@@ -48,7 +48,7 @@ It consists of a server-side Service and a command-line client (CLI), both imple
|
|
|
48
48
|
1. Ensure the Admin Service is running.
|
|
49
49
|
2. Install ragflow-cli.
|
|
50
50
|
```bash
|
|
51
|
-
pip install ragflow-cli==0.
|
|
51
|
+
pip install ragflow-cli==0.25.0
|
|
52
52
|
```
|
|
53
53
|
3. Launch the CLI client:
|
|
54
54
|
```bash
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright 2026 The InfiniFlow Authors. All Rights Reserved.
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License at
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
#
|
|
16
|
+
|
|
17
|
+
import time
|
|
18
|
+
import json
|
|
19
|
+
import typing
|
|
20
|
+
from typing import Any, Dict, Optional
|
|
21
|
+
|
|
22
|
+
import requests
|
|
23
|
+
# from requests.sessions import HTTPAdapter
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class HttpClient:
|
|
27
|
+
def __init__(
|
|
28
|
+
self,
|
|
29
|
+
host: str = "127.0.0.1",
|
|
30
|
+
port: int = 9381,
|
|
31
|
+
api_version: str = "v1",
|
|
32
|
+
api_key: Optional[str] = None,
|
|
33
|
+
connect_timeout: float = 5.0,
|
|
34
|
+
read_timeout: float = 60.0,
|
|
35
|
+
verify_ssl: bool = False,
|
|
36
|
+
) -> None:
|
|
37
|
+
self.host = host
|
|
38
|
+
self.port = port
|
|
39
|
+
self.api_version = api_version
|
|
40
|
+
self.api_key = api_key
|
|
41
|
+
self.login_token: str | None = None
|
|
42
|
+
self.connect_timeout = connect_timeout
|
|
43
|
+
self.read_timeout = read_timeout
|
|
44
|
+
self.verify_ssl = verify_ssl
|
|
45
|
+
|
|
46
|
+
def api_base(self) -> str:
|
|
47
|
+
return f"{self.host}:{self.port}/api/{self.api_version}"
|
|
48
|
+
|
|
49
|
+
def non_api_base(self) -> str:
|
|
50
|
+
return f"{self.host}:{self.port}/{self.api_version}"
|
|
51
|
+
|
|
52
|
+
def build_url(self, path: str, use_api_base: bool = True) -> str:
|
|
53
|
+
base = self.api_base() if use_api_base else self.non_api_base()
|
|
54
|
+
if self.verify_ssl:
|
|
55
|
+
return f"https://{base}/{path.lstrip('/')}"
|
|
56
|
+
else:
|
|
57
|
+
return f"http://{base}/{path.lstrip('/')}"
|
|
58
|
+
|
|
59
|
+
def _headers(self, auth_kind: Optional[str], extra: Optional[Dict[str, str]]) -> Dict[str, str]:
|
|
60
|
+
headers = {}
|
|
61
|
+
if auth_kind == "api" and self.api_key:
|
|
62
|
+
headers["Authorization"] = f"Bearer {self.api_key}"
|
|
63
|
+
elif auth_kind == "web" and self.login_token:
|
|
64
|
+
headers["Authorization"] = self.login_token
|
|
65
|
+
elif auth_kind == "admin" and self.login_token:
|
|
66
|
+
headers["Authorization"] = self.login_token
|
|
67
|
+
else:
|
|
68
|
+
pass
|
|
69
|
+
if extra:
|
|
70
|
+
headers.update(extra)
|
|
71
|
+
return headers
|
|
72
|
+
|
|
73
|
+
def request(
|
|
74
|
+
self,
|
|
75
|
+
method: str,
|
|
76
|
+
path: str,
|
|
77
|
+
*,
|
|
78
|
+
use_api_base: bool = True,
|
|
79
|
+
auth_kind: Optional[str] = "api",
|
|
80
|
+
headers: Optional[Dict[str, str]] = None,
|
|
81
|
+
json_body: Optional[Dict[str, Any]] = None,
|
|
82
|
+
data: Any = None,
|
|
83
|
+
files: Any = None,
|
|
84
|
+
params: Optional[Dict[str, Any]] = None,
|
|
85
|
+
stream: bool = False,
|
|
86
|
+
iterations: int = 1,
|
|
87
|
+
) -> requests.Response | dict:
|
|
88
|
+
url = self.build_url(path, use_api_base=use_api_base)
|
|
89
|
+
merged_headers = self._headers(auth_kind, headers)
|
|
90
|
+
# timeout: Tuple[float, float] = (self.connect_timeout, self.read_timeout)
|
|
91
|
+
session = requests.Session()
|
|
92
|
+
# adapter = HTTPAdapter(pool_connections=100, pool_maxsize=100)
|
|
93
|
+
# session.mount("http://", adapter)
|
|
94
|
+
http_function = typing.Any
|
|
95
|
+
match method:
|
|
96
|
+
case "GET":
|
|
97
|
+
http_function = session.get
|
|
98
|
+
case "POST":
|
|
99
|
+
http_function = session.post
|
|
100
|
+
case "PUT":
|
|
101
|
+
http_function = session.put
|
|
102
|
+
case "DELETE":
|
|
103
|
+
http_function = session.delete
|
|
104
|
+
case "PATCH":
|
|
105
|
+
http_function = session.patch
|
|
106
|
+
case _:
|
|
107
|
+
raise ValueError(f"Invalid HTTP method: {method}")
|
|
108
|
+
|
|
109
|
+
if iterations > 1:
|
|
110
|
+
response_list = []
|
|
111
|
+
total_duration = 0.0
|
|
112
|
+
for _ in range(iterations):
|
|
113
|
+
start_time = time.perf_counter()
|
|
114
|
+
response = http_function(url, headers=merged_headers, json=json_body, data=data, stream=stream)
|
|
115
|
+
# response = session.get(url, headers=merged_headers, json=json_body, data=data, stream=stream)
|
|
116
|
+
# response = requests.request(
|
|
117
|
+
# method=method,
|
|
118
|
+
# url=url,
|
|
119
|
+
# headers=merged_headers,
|
|
120
|
+
# json=json_body,
|
|
121
|
+
# data=data,
|
|
122
|
+
# files=files,
|
|
123
|
+
# params=params,
|
|
124
|
+
# stream=stream,
|
|
125
|
+
# verify=self.verify_ssl,
|
|
126
|
+
# )
|
|
127
|
+
end_time = time.perf_counter()
|
|
128
|
+
total_duration += end_time - start_time
|
|
129
|
+
response_list.append(response)
|
|
130
|
+
return {"duration": total_duration, "response_list": response_list}
|
|
131
|
+
else:
|
|
132
|
+
return http_function(url, headers=merged_headers, json=json_body, data=data, stream=stream)
|
|
133
|
+
# return session.get(url, headers=merged_headers, json=json_body, data=data, stream=stream)
|
|
134
|
+
# return requests.request(
|
|
135
|
+
# method=method,
|
|
136
|
+
# url=url,
|
|
137
|
+
# headers=merged_headers,
|
|
138
|
+
# json=json_body,
|
|
139
|
+
# data=data,
|
|
140
|
+
# files=files,
|
|
141
|
+
# params=params,
|
|
142
|
+
# stream=stream,
|
|
143
|
+
# verify=self.verify_ssl,
|
|
144
|
+
# )
|
|
145
|
+
|
|
146
|
+
def request_json(
|
|
147
|
+
self,
|
|
148
|
+
method: str,
|
|
149
|
+
path: str,
|
|
150
|
+
*,
|
|
151
|
+
use_api_base: bool = True,
|
|
152
|
+
auth_kind: Optional[str] = "api",
|
|
153
|
+
headers: Optional[Dict[str, str]] = None,
|
|
154
|
+
json_body: Optional[Dict[str, Any]] = None,
|
|
155
|
+
data: Any = None,
|
|
156
|
+
files: Any = None,
|
|
157
|
+
params: Optional[Dict[str, Any]] = None,
|
|
158
|
+
stream: bool = False,
|
|
159
|
+
) -> Dict[str, Any]:
|
|
160
|
+
response = self.request(
|
|
161
|
+
method,
|
|
162
|
+
path,
|
|
163
|
+
use_api_base=use_api_base,
|
|
164
|
+
auth_kind=auth_kind,
|
|
165
|
+
headers=headers,
|
|
166
|
+
json_body=json_body,
|
|
167
|
+
data=data,
|
|
168
|
+
files=files,
|
|
169
|
+
params=params,
|
|
170
|
+
stream=stream,
|
|
171
|
+
)
|
|
172
|
+
try:
|
|
173
|
+
return response.json()
|
|
174
|
+
except Exception as exc:
|
|
175
|
+
raise ValueError(f"Non-JSON response from {path}: {exc}") from exc
|
|
176
|
+
|
|
177
|
+
@staticmethod
|
|
178
|
+
def parse_json_bytes(raw: bytes) -> Dict[str, Any]:
|
|
179
|
+
try:
|
|
180
|
+
return json.loads(raw.decode("utf-8"))
|
|
181
|
+
except Exception as exc:
|
|
182
|
+
raise ValueError(f"Invalid JSON payload: {exc}") from exc
|
|
@@ -77,10 +77,17 @@ sql_command: login_user
|
|
|
77
77
|
| drop_user_dataset
|
|
78
78
|
| list_user_datasets
|
|
79
79
|
| list_user_dataset_files
|
|
80
|
+
| list_user_dataset_documents
|
|
81
|
+
| list_user_datasets_metadata
|
|
82
|
+
| list_user_documents_metadata_summary
|
|
80
83
|
| list_user_agents
|
|
81
84
|
| list_user_chats
|
|
82
85
|
| create_user_chat
|
|
83
86
|
| drop_user_chat
|
|
87
|
+
| create_dataset_table
|
|
88
|
+
| drop_dataset_table
|
|
89
|
+
| create_metadata_table
|
|
90
|
+
| drop_metadata_table
|
|
84
91
|
| list_user_model_providers
|
|
85
92
|
| list_user_default_models
|
|
86
93
|
| parse_dataset_docs
|
|
@@ -88,15 +95,35 @@ sql_command: login_user
|
|
|
88
95
|
| parse_dataset_async
|
|
89
96
|
| import_docs_into_dataset
|
|
90
97
|
| search_on_datasets
|
|
98
|
+
| get_chunk
|
|
99
|
+
| list_chunks
|
|
100
|
+
| insert_dataset_from_file
|
|
101
|
+
| insert_metadata_from_file
|
|
102
|
+
| update_chunk
|
|
103
|
+
| set_metadata
|
|
104
|
+
| remove_tags
|
|
105
|
+
| remove_chunks
|
|
106
|
+
| create_chat_session
|
|
107
|
+
| drop_chat_session
|
|
108
|
+
| list_chat_sessions
|
|
109
|
+
| chat_on_session
|
|
110
|
+
| list_server_configs
|
|
111
|
+
| show_fingerprint
|
|
112
|
+
| set_license
|
|
113
|
+
| set_license_config
|
|
114
|
+
| show_license
|
|
115
|
+
| check_license
|
|
91
116
|
| benchmark
|
|
92
117
|
|
|
93
118
|
// meta command definition
|
|
94
119
|
meta_command: "\\" meta_command_name [meta_args]
|
|
95
120
|
|
|
121
|
+
COMMA: ","
|
|
122
|
+
|
|
96
123
|
meta_command_name: /[a-zA-Z?]+/
|
|
97
124
|
meta_args: (meta_arg)+
|
|
98
125
|
|
|
99
|
-
meta_arg: /[
|
|
126
|
+
meta_arg: /[^\s"',]+/ | quoted_string
|
|
100
127
|
|
|
101
128
|
// command definition
|
|
102
129
|
|
|
@@ -117,6 +144,7 @@ ALTER: "ALTER"i
|
|
|
117
144
|
ACTIVE: "ACTIVE"i
|
|
118
145
|
ADMIN: "ADMIN"i
|
|
119
146
|
PASSWORD: "PASSWORD"i
|
|
147
|
+
DATASET_TABLE: "DATASET TABLE"i
|
|
120
148
|
DATASET: "DATASET"i
|
|
121
149
|
DATASETS: "DATASETS"i
|
|
122
150
|
OF: "OF"i
|
|
@@ -151,11 +179,18 @@ DEFAULT: "DEFAULT"i
|
|
|
151
179
|
CHATS: "CHATS"i
|
|
152
180
|
CHAT: "CHAT"i
|
|
153
181
|
FILES: "FILES"i
|
|
182
|
+
DOCUMENT: "DOCUMENT"i
|
|
183
|
+
DOCUMENTS: "DOCUMENTS"i
|
|
184
|
+
METADATA: "METADATA"i
|
|
185
|
+
SUMMARY: "SUMMARY"i
|
|
154
186
|
AS: "AS"i
|
|
155
187
|
PARSE: "PARSE"i
|
|
156
188
|
IMPORT: "IMPORT"i
|
|
157
189
|
INTO: "INTO"i
|
|
190
|
+
IN: "IN"i
|
|
158
191
|
WITH: "WITH"i
|
|
192
|
+
VECTOR: "VECTOR"i
|
|
193
|
+
SIZE: "SIZE"i
|
|
159
194
|
PARSER: "PARSER"i
|
|
160
195
|
PIPELINE: "PIPELINE"i
|
|
161
196
|
SEARCH: "SEARCH"i
|
|
@@ -170,8 +205,28 @@ ASYNC: "ASYNC"i
|
|
|
170
205
|
SYNC: "SYNC"i
|
|
171
206
|
BENCHMARK: "BENCHMARK"i
|
|
172
207
|
PING: "PING"i
|
|
173
|
-
|
|
174
|
-
|
|
208
|
+
SESSION: "SESSION"i
|
|
209
|
+
SESSIONS: "SESSIONS"i
|
|
210
|
+
SERVER: "SERVER"i
|
|
211
|
+
FINGERPRINT: "FINGERPRINT"i
|
|
212
|
+
LICENSE: "LICENSE"i
|
|
213
|
+
CHECK: "CHECK"i
|
|
214
|
+
CONFIG: "CONFIG"i
|
|
215
|
+
INDEX: "INDEX"i
|
|
216
|
+
TABLE: "TABLE"i
|
|
217
|
+
CHUNK: "CHUNK"i
|
|
218
|
+
CHUNKS: "CHUNKS"i
|
|
219
|
+
GET: "GET"i
|
|
220
|
+
INSERT: "INSERT"i
|
|
221
|
+
PAGE: "PAGE"i
|
|
222
|
+
KEYWORDS: "KEYWORDS"i
|
|
223
|
+
AVAILABLE: "AVAILABLE"i
|
|
224
|
+
FILE: "FILE"i
|
|
225
|
+
UPDATE: "UPDATE"i
|
|
226
|
+
REMOVE: "REMOVE"i
|
|
227
|
+
TAGS: "TAGS"i
|
|
228
|
+
|
|
229
|
+
login_user: LOGIN USER quoted_string (PASSWORD quoted_string)? ";"
|
|
175
230
|
list_services: LIST SERVICES ";"
|
|
176
231
|
show_service: SHOW SERVICE NUMBER ";"
|
|
177
232
|
startup_service: STARTUP SERVICE NUMBER ";"
|
|
@@ -215,6 +270,14 @@ list_variables: LIST VARS ";"
|
|
|
215
270
|
list_configs: LIST CONFIGS ";"
|
|
216
271
|
list_environments: LIST ENVS ";"
|
|
217
272
|
|
|
273
|
+
show_fingerprint: SHOW FINGERPRINT ";"
|
|
274
|
+
set_license: SET LICENSE quoted_string ";"
|
|
275
|
+
set_license_config: SET LICENSE CONFIG NUMBER NUMBER ";"
|
|
276
|
+
show_license: SHOW LICENSE ";"
|
|
277
|
+
check_license: CHECK LICENSE ";"
|
|
278
|
+
|
|
279
|
+
list_server_configs: LIST SERVER CONFIGS ";"
|
|
280
|
+
|
|
218
281
|
benchmark: BENCHMARK NUMBER NUMBER user_statement
|
|
219
282
|
|
|
220
283
|
user_statement: ping_server
|
|
@@ -246,6 +309,13 @@ user_statement: ping_server
|
|
|
246
309
|
| list_user_default_models
|
|
247
310
|
| import_docs_into_dataset
|
|
248
311
|
| search_on_datasets
|
|
312
|
+
| update_chunk
|
|
313
|
+
| set_metadata
|
|
314
|
+
| remove_tags
|
|
315
|
+
| create_chat_session
|
|
316
|
+
| drop_chat_session
|
|
317
|
+
| list_chat_sessions
|
|
318
|
+
| chat_on_session
|
|
249
319
|
|
|
250
320
|
ping_server: PING ";"
|
|
251
321
|
show_current_user: SHOW CURRENT USER ";"
|
|
@@ -270,24 +340,46 @@ create_user_dataset_with_parser: CREATE DATASET quoted_string WITH EMBEDDING quo
|
|
|
270
340
|
create_user_dataset_with_pipeline: CREATE DATASET quoted_string WITH EMBEDDING quoted_string PIPELINE quoted_string ";"
|
|
271
341
|
drop_user_dataset: DROP DATASET quoted_string ";"
|
|
272
342
|
list_user_dataset_files: LIST FILES OF DATASET quoted_string ";"
|
|
343
|
+
list_user_dataset_documents: LIST DOCUMENTS OF DATASET quoted_string ";"
|
|
344
|
+
list_user_datasets_metadata: LIST METADATA OF DATASETS quoted_string (COMMA quoted_string)* ";"
|
|
345
|
+
list_user_documents_metadata_summary: LIST METADATA SUMMARY OF DATASET quoted_string (DOCUMENTS quoted_string (COMMA quoted_string)*)? ";"
|
|
273
346
|
list_user_agents: LIST AGENTS ";"
|
|
274
347
|
list_user_chats: LIST CHATS ";"
|
|
275
348
|
create_user_chat: CREATE CHAT quoted_string ";"
|
|
276
349
|
drop_user_chat: DROP CHAT quoted_string ";"
|
|
350
|
+
create_chat_session: CREATE CHAT quoted_string SESSION ";"
|
|
351
|
+
drop_chat_session: DROP CHAT quoted_string SESSION quoted_string ";"
|
|
352
|
+
list_chat_sessions: LIST CHAT quoted_string SESSIONS ";"
|
|
353
|
+
chat_on_session: CHAT quoted_string ON quoted_string SESSION quoted_string ";"
|
|
277
354
|
list_user_model_providers: LIST MODEL PROVIDERS ";"
|
|
278
355
|
list_user_default_models: LIST DEFAULT MODELS ";"
|
|
279
356
|
import_docs_into_dataset: IMPORT quoted_string INTO DATASET quoted_string ";"
|
|
280
357
|
search_on_datasets: SEARCH quoted_string ON DATASETS quoted_string ";"
|
|
358
|
+
get_chunk: GET CHUNK quoted_string ";"
|
|
359
|
+
list_chunks: LIST CHUNKS OF DOCUMENT quoted_string ("PAGE" NUMBER)? ("SIZE" NUMBER)? ("KEYWORDS" quoted_string)? ("AVAILABLE" NUMBER)? ";"
|
|
360
|
+
set_metadata: SET METADATA OF DOCUMENT quoted_string TO quoted_string ";"
|
|
361
|
+
remove_tags: REMOVE TAGS quoted_string (COMMA quoted_string)* FROM DATASET quoted_string ";"
|
|
362
|
+
remove_chunks: REMOVE CHUNKS quoted_string (COMMA quoted_string)* FROM DOCUMENT quoted_string ";"
|
|
363
|
+
| REMOVE ALL CHUNKS FROM DOCUMENT quoted_string ";"
|
|
281
364
|
|
|
282
365
|
parse_dataset_docs: PARSE quoted_string OF DATASET quoted_string ";"
|
|
283
366
|
parse_dataset_sync: PARSE DATASET quoted_string SYNC ";"
|
|
284
367
|
parse_dataset_async: PARSE DATASET quoted_string ASYNC ";"
|
|
285
368
|
|
|
286
|
-
|
|
369
|
+
// Internal CLI only for GO
|
|
370
|
+
create_dataset_table: CREATE DATASET TABLE quoted_string VECTOR SIZE NUMBER ";"
|
|
371
|
+
drop_dataset_table: DROP DATASET TABLE quoted_string ";"
|
|
372
|
+
create_metadata_table: CREATE METADATA TABLE ";"
|
|
373
|
+
drop_metadata_table: DROP METADATA TABLE ";"
|
|
374
|
+
insert_dataset_from_file: INSERT DATASET FROM FILE quoted_string ";"
|
|
375
|
+
insert_metadata_from_file: INSERT METADATA FROM FILE quoted_string ";"
|
|
376
|
+
update_chunk: UPDATE CHUNK quoted_string OF DATASET quoted_string SET quoted_string ";"
|
|
377
|
+
|
|
378
|
+
identifier_list: identifier (COMMA identifier)*
|
|
287
379
|
|
|
288
380
|
identifier: WORD
|
|
289
381
|
quoted_string: QUOTED_STRING
|
|
290
|
-
status: WORD
|
|
382
|
+
status: ON | WORD
|
|
291
383
|
|
|
292
384
|
QUOTED_STRING: /'[^']+'/ | /"[^"]+"/
|
|
293
385
|
WORD: /[a-zA-Z0-9_\-\.]+/
|
|
@@ -307,7 +399,13 @@ class RAGFlowCLITransformer(Transformer):
|
|
|
307
399
|
|
|
308
400
|
def login_user(self, items):
|
|
309
401
|
email = items[2].children[0].strip("'\"")
|
|
310
|
-
|
|
402
|
+
if len(items) == 5:
|
|
403
|
+
# With password: LOGIN USER email PASSWORD password
|
|
404
|
+
password = items[4].children[0].strip("'\"")
|
|
405
|
+
return {"type": "login_user", "email": email, "password": password}
|
|
406
|
+
else:
|
|
407
|
+
# Without password: LOGIN USER email
|
|
408
|
+
return {"type": "login_user", "email": email}
|
|
311
409
|
|
|
312
410
|
def ping_server(self, items):
|
|
313
411
|
return {"type": "ping_server"}
|
|
@@ -459,6 +557,27 @@ class RAGFlowCLITransformer(Transformer):
|
|
|
459
557
|
def list_environments(self, items):
|
|
460
558
|
return {"type": "list_environments"}
|
|
461
559
|
|
|
560
|
+
def show_fingerprint(self, items):
|
|
561
|
+
return {"type": "show_fingerprint"}
|
|
562
|
+
|
|
563
|
+
def set_license(self, items):
|
|
564
|
+
license = items[2].children[0].strip("'\"")
|
|
565
|
+
return {"type": "set_license", "license": license}
|
|
566
|
+
|
|
567
|
+
def set_license_config(self, items):
|
|
568
|
+
value1: int = int(items[3])
|
|
569
|
+
value2: int = int(items[4])
|
|
570
|
+
return {"type": "set_license_config", "value1": value1, "value2": value2}
|
|
571
|
+
|
|
572
|
+
def show_license(self, items):
|
|
573
|
+
return {"type": "show_license"}
|
|
574
|
+
|
|
575
|
+
def check_license(self, items):
|
|
576
|
+
return {"type": "check_license"}
|
|
577
|
+
|
|
578
|
+
def list_server_configs(self, items):
|
|
579
|
+
return {"type": "list_server_configs"}
|
|
580
|
+
|
|
462
581
|
def create_model_provider(self, items):
|
|
463
582
|
provider_name = items[3].children[0].strip("'\"")
|
|
464
583
|
provider_key = items[4].children[0].strip("'\"")
|
|
@@ -538,6 +657,28 @@ class RAGFlowCLITransformer(Transformer):
|
|
|
538
657
|
dataset_name = items[4].children[0].strip("'\"")
|
|
539
658
|
return {"type": "list_user_dataset_files", "dataset_name": dataset_name}
|
|
540
659
|
|
|
660
|
+
def list_user_dataset_documents(self, items):
|
|
661
|
+
dataset_name = items[4].children[0].strip("'\"")
|
|
662
|
+
return {"type": "list_user_dataset_documents", "dataset_name": dataset_name}
|
|
663
|
+
|
|
664
|
+
def list_user_datasets_metadata(self, items):
|
|
665
|
+
dataset_names = []
|
|
666
|
+
dataset_names.append(items[4].children[0].strip("'\""))
|
|
667
|
+
for i in range(5, len(items)):
|
|
668
|
+
if items[i] and hasattr(items[i], 'children') and items[i].children:
|
|
669
|
+
dataset_names.append(items[i].children[0].strip("'\""))
|
|
670
|
+
return {"type": "list_user_datasets_metadata", "dataset_names": dataset_names}
|
|
671
|
+
|
|
672
|
+
def list_user_documents_metadata_summary(self, items):
|
|
673
|
+
dataset_name = items[5].children[0].strip("'\"")
|
|
674
|
+
doc_ids = []
|
|
675
|
+
if len(items) > 6 and items[6] == "DOCUMENTS":
|
|
676
|
+
for i in range(7, len(items)):
|
|
677
|
+
if items[i] and hasattr(items[i], 'children') and items[i].children:
|
|
678
|
+
doc_id = items[i].children[0].strip("'\"")
|
|
679
|
+
doc_ids.append(doc_id)
|
|
680
|
+
return {"type": "list_user_documents_metadata_summary", "dataset_name": dataset_name, "document_ids": doc_ids}
|
|
681
|
+
|
|
541
682
|
def list_user_agents(self, items):
|
|
542
683
|
return {"type": "list_user_agents"}
|
|
543
684
|
|
|
@@ -552,6 +693,30 @@ class RAGFlowCLITransformer(Transformer):
|
|
|
552
693
|
chat_name = items[2].children[0].strip("'\"")
|
|
553
694
|
return {"type": "drop_user_chat", "chat_name": chat_name}
|
|
554
695
|
|
|
696
|
+
def create_dataset_table(self, items):
|
|
697
|
+
dataset_name = None
|
|
698
|
+
vector_size = None
|
|
699
|
+
for i, item in enumerate(items):
|
|
700
|
+
if hasattr(item, 'data') and item.data == 'quoted_string':
|
|
701
|
+
dataset_name = item.children[0].strip("'\"")
|
|
702
|
+
if hasattr(item, 'type') and item.type == 'NUMBER':
|
|
703
|
+
if i > 0 and items[i-1].type == 'SIZE' and items[i-2].type == 'VECTOR':
|
|
704
|
+
vector_size = int(item)
|
|
705
|
+
return {"type": "create_dataset_table", "dataset_name": dataset_name, "vector_size": vector_size}
|
|
706
|
+
|
|
707
|
+
def drop_dataset_table(self, items):
|
|
708
|
+
dataset_name = None
|
|
709
|
+
for item in items:
|
|
710
|
+
if hasattr(item, 'data') and item.data == 'quoted_string':
|
|
711
|
+
dataset_name = item.children[0].strip("'\"")
|
|
712
|
+
return {"type": "drop_dataset_table", "dataset_name": dataset_name}
|
|
713
|
+
|
|
714
|
+
def create_metadata_table(self, items):
|
|
715
|
+
return {"type": "create_metadata_table"}
|
|
716
|
+
|
|
717
|
+
def drop_metadata_table(self, items):
|
|
718
|
+
return {"type": "drop_metadata_table"}
|
|
719
|
+
|
|
555
720
|
def list_user_model_providers(self, items):
|
|
556
721
|
return {"type": "list_user_model_providers"}
|
|
557
722
|
|
|
@@ -575,6 +740,25 @@ class RAGFlowCLITransformer(Transformer):
|
|
|
575
740
|
dataset_name = items[2].children[0].strip("'\"")
|
|
576
741
|
return {"type": "parse_dataset", "dataset_name": dataset_name, "method": "async"}
|
|
577
742
|
|
|
743
|
+
def create_chat_session(self, items):
|
|
744
|
+
chat_name = items[2].children[0].strip("'\"")
|
|
745
|
+
return {"type": "create_chat_session", "chat_name": chat_name}
|
|
746
|
+
|
|
747
|
+
def drop_chat_session(self, items):
|
|
748
|
+
chat_name = items[2].children[0].strip("'\"")
|
|
749
|
+
session_id = items[4].children[0].strip("'\"")
|
|
750
|
+
return {"type": "drop_chat_session", "chat_name": chat_name, "session_id": session_id}
|
|
751
|
+
|
|
752
|
+
def list_chat_sessions(self, items):
|
|
753
|
+
chat_name = items[2].children[0].strip("'\"")
|
|
754
|
+
return {"type": "list_chat_sessions", "chat_name": chat_name}
|
|
755
|
+
|
|
756
|
+
def chat_on_session(self, items):
|
|
757
|
+
message = items[1].children[0].strip("'\"")
|
|
758
|
+
chat_name = items[3].children[0].strip("'\"")
|
|
759
|
+
session_id = items[5].children[0].strip("'\"")
|
|
760
|
+
return {"type": "chat_on_session", "message": message, "chat_name": chat_name, "session_id": session_id}
|
|
761
|
+
|
|
578
762
|
def import_docs_into_dataset(self, items):
|
|
579
763
|
document_list_str = items[1].children[0].strip("'\"")
|
|
580
764
|
document_paths = document_list_str.split(",")
|
|
@@ -593,6 +777,103 @@ class RAGFlowCLITransformer(Transformer):
|
|
|
593
777
|
datasets = datasets.split(" ")
|
|
594
778
|
return {"type": "search_on_datasets", "datasets": datasets, "question": question}
|
|
595
779
|
|
|
780
|
+
def get_chunk(self, items):
|
|
781
|
+
chunk_id = items[2].children[0].strip("'\"")
|
|
782
|
+
return {"type": "get_chunk", "chunk_id": chunk_id}
|
|
783
|
+
|
|
784
|
+
def insert_dataset_from_file(self, items):
|
|
785
|
+
file_path = items[4].children[0].strip("'\"")
|
|
786
|
+
return {"type": "insert_dataset_from_file", "file_path": file_path}
|
|
787
|
+
|
|
788
|
+
def insert_metadata_from_file(self, items):
|
|
789
|
+
file_path = items[4].children[0].strip("'\"")
|
|
790
|
+
return {"type": "insert_metadata_from_file", "file_path": file_path}
|
|
791
|
+
|
|
792
|
+
def update_chunk(self, items):
|
|
793
|
+
def get_quoted_value(item):
|
|
794
|
+
if hasattr(item, 'children') and item.children:
|
|
795
|
+
return item.children[0].strip("'\"")
|
|
796
|
+
return str(item).strip("'\"")
|
|
797
|
+
|
|
798
|
+
chunk_id = get_quoted_value(items[2])
|
|
799
|
+
dataset_name = get_quoted_value(items[5])
|
|
800
|
+
json_body = get_quoted_value(items[7])
|
|
801
|
+
return {"type": "update_chunk", "chunk_id": chunk_id, "dataset_name": dataset_name, "json_body": json_body}
|
|
802
|
+
|
|
803
|
+
def set_metadata(self, items):
|
|
804
|
+
doc_id = items[4].children[0].strip("'\"")
|
|
805
|
+
meta_json = items[6].children[0].strip("'\"")
|
|
806
|
+
return {"type": "set_metadata", "doc_id": doc_id, "meta": meta_json}
|
|
807
|
+
|
|
808
|
+
def remove_tags(self, items):
|
|
809
|
+
# items: REMOVE, TAGS, quoted_string(tag1), quoted_string(tag2), ..., FROM, DATASET, quoted_string(dataset_name), ";"
|
|
810
|
+
tags = []
|
|
811
|
+
# Start from index 2 (after TAGS keyword) and parse quoted strings until FROM
|
|
812
|
+
for i in range(2, len(items)):
|
|
813
|
+
item = items[i]
|
|
814
|
+
# Check for FROM token to stop
|
|
815
|
+
if hasattr(item, 'type') and item.type == 'FROM':
|
|
816
|
+
break
|
|
817
|
+
if hasattr(item, 'children') and item.children:
|
|
818
|
+
tag = item.children[0].strip("'\"")
|
|
819
|
+
tags.append(tag)
|
|
820
|
+
# Find dataset_name: quoted_string after DATASET
|
|
821
|
+
dataset_name = None
|
|
822
|
+
for i, item in enumerate(items):
|
|
823
|
+
# Check if item is a DATASET token
|
|
824
|
+
if hasattr(item, 'type') and item.type == 'DATASET':
|
|
825
|
+
# Next item should be quoted_string
|
|
826
|
+
dataset_name = items[i + 1].children[0].strip("'\"")
|
|
827
|
+
break
|
|
828
|
+
return {"type": "remove_tags", "dataset_name": dataset_name, "tags": tags}
|
|
829
|
+
|
|
830
|
+
def remove_chunks(self, items):
|
|
831
|
+
# Handle two cases:
|
|
832
|
+
# 1. REMOVE CHUNKS quoted_string (COMMA quoted_string)* FROM DOCUMENT quoted_string ";"
|
|
833
|
+
# 2. REMOVE ALL CHUNKS FROM DOCUMENT quoted_string ";"
|
|
834
|
+
|
|
835
|
+
# Check if it's "REMOVE ALL CHUNKS"
|
|
836
|
+
for item in items:
|
|
837
|
+
if hasattr(item, 'type') and item.type == 'ALL':
|
|
838
|
+
# Find doc_id
|
|
839
|
+
for j, inner_item in enumerate(items):
|
|
840
|
+
if hasattr(inner_item, 'type') and inner_item.type == 'DOCUMENT':
|
|
841
|
+
doc_id = items[j + 1].children[0].strip("'\"")
|
|
842
|
+
return {"type": "remove_chunks", "doc_id": doc_id, "delete_all": True}
|
|
843
|
+
|
|
844
|
+
# Otherwise, we have chunk_ids
|
|
845
|
+
chunk_ids = []
|
|
846
|
+
doc_id = None
|
|
847
|
+
for i, item in enumerate(items):
|
|
848
|
+
if hasattr(item, 'type') and item.type == 'DOCUMENT':
|
|
849
|
+
doc_id = items[i + 1].children[0].strip("'\"")
|
|
850
|
+
elif hasattr(item, 'children') and item.children:
|
|
851
|
+
val = item.children[0].strip("'\"")
|
|
852
|
+
# Skip if it's "FROM" or "DOCUMENT"
|
|
853
|
+
if val.upper() in ['FROM', 'DOCUMENT']:
|
|
854
|
+
continue
|
|
855
|
+
chunk_ids.append(val)
|
|
856
|
+
|
|
857
|
+
return {"type": "remove_chunks", "doc_id": doc_id, "chunk_ids": chunk_ids}
|
|
858
|
+
|
|
859
|
+
def list_chunks(self, items):
|
|
860
|
+
doc_id = items[4].children[0].strip("'\"")
|
|
861
|
+
result = {"type": "list_chunks", "doc_id": doc_id}
|
|
862
|
+
|
|
863
|
+
# Parse optional parameters: PAGE, SIZE, KEYWORDS, AVAILABLE
|
|
864
|
+
# items structure varies based on which params are present
|
|
865
|
+
for i, item in enumerate(items):
|
|
866
|
+
if str(item) == "PAGE":
|
|
867
|
+
result["page"] = int(items[i + 1])
|
|
868
|
+
elif str(item) == "SIZE":
|
|
869
|
+
result["size"] = int(items[i + 1])
|
|
870
|
+
elif str(item) == "KEYWORDS":
|
|
871
|
+
result["keywords"] = items[i + 1].children[0].strip("'\"")
|
|
872
|
+
elif str(item) == "AVAILABLE":
|
|
873
|
+
result["available_int"] = int(items[i + 1])
|
|
874
|
+
|
|
875
|
+
return result
|
|
876
|
+
|
|
596
877
|
def benchmark(self, items):
|
|
597
878
|
concurrency: int = int(items[1])
|
|
598
879
|
iterations: int = int(items[2])
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "ragflow-cli"
|
|
3
|
-
version = "0.
|
|
3
|
+
version = "0.25.0"
|
|
4
4
|
description = "Admin Service's client of [RAGFlow](https://github.com/infiniflow/ragflow). The Admin Service provides user management and system monitoring. "
|
|
5
5
|
authors = [{ name = "Lynn", email = "lynn_inf@hotmail.com" }]
|
|
6
6
|
license = { text = "Apache License, Version 2.0" }
|
|
@@ -11,17 +11,17 @@ dependencies = [
|
|
|
11
11
|
"beartype>=0.20.0,<1.0.0",
|
|
12
12
|
"pycryptodomex>=3.10.0",
|
|
13
13
|
"lark>=1.1.0",
|
|
14
|
+
"requests-toolbelt>=1.0.0",
|
|
14
15
|
]
|
|
15
16
|
|
|
16
17
|
[dependency-groups]
|
|
17
18
|
test = [
|
|
18
19
|
"pytest>=8.3.5",
|
|
19
20
|
"requests>=2.32.3",
|
|
20
|
-
"requests-toolbelt>=1.0.0",
|
|
21
21
|
]
|
|
22
22
|
|
|
23
23
|
[tool.setuptools]
|
|
24
|
-
py-modules = ["ragflow_cli", "parser"]
|
|
24
|
+
py-modules = ["ragflow_cli", "parser", "http_client", "ragflow_client", "user"]
|
|
25
25
|
|
|
26
26
|
[project.scripts]
|
|
27
27
|
ragflow-cli = "ragflow_cli:main"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ragflow-cli
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.25.0
|
|
4
4
|
Summary: Admin Service's client of [RAGFlow](https://github.com/infiniflow/ragflow). The Admin Service provides user management and system monitoring.
|
|
5
5
|
Author-email: Lynn <lynn_inf@hotmail.com>
|
|
6
6
|
License: Apache License, Version 2.0
|
|
@@ -10,6 +10,7 @@ Requires-Dist: requests<3.0.0,>=2.30.0
|
|
|
10
10
|
Requires-Dist: beartype<1.0.0,>=0.20.0
|
|
11
11
|
Requires-Dist: pycryptodomex>=3.10.0
|
|
12
12
|
Requires-Dist: lark>=1.1.0
|
|
13
|
+
Requires-Dist: requests-toolbelt>=1.0.0
|
|
13
14
|
|
|
14
15
|
# RAGFlow Admin Service & CLI
|
|
15
16
|
|
|
@@ -61,7 +62,7 @@ It consists of a server-side Service and a command-line client (CLI), both imple
|
|
|
61
62
|
1. Ensure the Admin Service is running.
|
|
62
63
|
2. Install ragflow-cli.
|
|
63
64
|
```bash
|
|
64
|
-
pip install ragflow-cli==0.
|
|
65
|
+
pip install ragflow-cli==0.25.0
|
|
65
66
|
```
|
|
66
67
|
3. Launch the CLI client:
|
|
67
68
|
```bash
|