cloudpss 4.0.2__tar.gz → 4.1.1a1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/PKG-INFO +1 -1
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/__init__.py +2 -3
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/function/functionExecution.py +1 -1
- cloudpss-4.1.1a1/cloudpss/job/__init__.py +5 -0
- cloudpss-4.1.1a1/cloudpss/job/job.py +240 -0
- cloudpss-4.1.1a1/cloudpss/job/jobMachine.py +11 -0
- cloudpss-4.1.1a1/cloudpss/job/jobPolicy.py +129 -0
- cloudpss-4.1.1a1/cloudpss/job/jobQueue.py +14 -0
- cloudpss-4.1.1a1/cloudpss/job/jobReceiver.py +33 -0
- cloudpss-4.1.1a1/cloudpss/job/jobTres.py +6 -0
- cloudpss-4.1.1a1/cloudpss/job/messageStreamReceiver.py +127 -0
- cloudpss-4.1.1a1/cloudpss/job/messageStreamSender.py +77 -0
- cloudpss-4.1.1a1/cloudpss/job/view/EMTView.py +216 -0
- cloudpss-4.1.1a1/cloudpss/job/view/IESLabSimulationView.py +5 -0
- cloudpss-4.1.1a1/cloudpss/job/view/IESLabTypicalDayView.py +27 -0
- cloudpss-4.1.1a1/cloudpss/job/view/IESView.py +103 -0
- cloudpss-4.1.1a1/cloudpss/job/view/PowerFlowView.py +80 -0
- cloudpss-4.1.1a1/cloudpss/job/view/__init__.py +42 -0
- cloudpss-4.1.1a1/cloudpss/job/view/view.py +122 -0
- cloudpss-4.1.1a1/cloudpss/model/model.py +759 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/model/revision.py +9 -9
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/model/topology.py +4 -2
- cloudpss-4.1.1a1/cloudpss/runner/MessageStreamReceiver.py +193 -0
- cloudpss-4.1.1a1/cloudpss/runner/__init__.py +10 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/runner/receiver.py +2 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/runner/runner.py +91 -18
- cloudpss-4.1.1a1/cloudpss/utils/AsyncIterable.py +27 -0
- cloudpss-4.1.1a1/cloudpss/utils/IO.py +164 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/utils/__init__.py +5 -1
- cloudpss-4.1.1a1/cloudpss/utils/httpAsyncRequest.py +88 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/utils/httprequests.py +6 -1
- cloudpss-4.1.1a1/cloudpss/utils/yamlLoader.py +80 -0
- cloudpss-4.1.1a1/cloudpss/version.py +1 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss.egg-info/PKG-INFO +1 -1
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss.egg-info/SOURCES.txt +21 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss.egg-info/requires.txt +2 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/setup.py +2 -2
- cloudpss-4.1.1a1/test/test-topology.py +26 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/test/test.py +1 -1
- cloudpss-4.0.2/cloudpss/model/model.py +0 -718
- cloudpss-4.0.2/cloudpss/runner/__init__.py +0 -8
- cloudpss-4.0.2/cloudpss/utils/yamlLoader.py +0 -81
- cloudpss-4.0.2/cloudpss/version.py +0 -1
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/README.md +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/function/__init__.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/function/function.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/function/job.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/ieslab/DataManageModel.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/ieslab/EvaluationModel.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/ieslab/IESLabPlan.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/ieslab/IESLabSimulation.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/ieslab/PlanModel.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/ieslab/__init__.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/model/__init__.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/model/implements/__init__.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/model/implements/component.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/model/implements/diagram.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/model/implements/implement.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/model/jobDefinitions.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/project/__init__.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/project/project.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/runner/IESLabEvaluationResult.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/runner/IESLabPlanResult.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/runner/IESLabTypicalDayResult.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/runner/result.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/runner/storage.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/runner/transform.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/utils/dataEncoder.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/utils/graphqlUtil.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/utils/matlab.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss/verify.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss.egg-info/dependency_links.txt +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/cloudpss.egg-info/top_level.txt +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/setup.cfg +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/test/test-sdk.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/test/test-snapshot.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/test/test7950.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/test/testAsync.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/test/testEvent.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/test/testRt copy.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/test/testRt.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/test/testSend.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/test/test_in_new_web_1.py +0 -0
- {cloudpss-4.0.2 → cloudpss-4.1.1a1}/test/testb.py +0 -0
@@ -6,17 +6,16 @@ from .model import Model, ModelRevision, ModelTopology
|
|
6
6
|
from .project import Project
|
7
7
|
from .utils import MatlabDataEncoder, DateTimeEncode
|
8
8
|
from . import function
|
9
|
-
|
9
|
+
from .job import Job
|
10
10
|
from .function import FunctionExecution
|
11
11
|
from .version import __version__
|
12
12
|
__all__ = [
|
13
13
|
'setToken', 'Model', 'ModelRevision', 'ModelTopology', 'Runner', 'Result',
|
14
14
|
'PowerFlowResult', 'EMTResult', 'MatlabDataEncoder', 'DateTimeEncode',
|
15
|
-
'function', 'Project', 'currentJob', 'IESLabSimulation', 'IESLabPlan','__version__'
|
15
|
+
'function', 'Project', 'currentJob', 'IESLabSimulation', 'IESLabPlan','__version__','Job'
|
16
16
|
]
|
17
17
|
|
18
18
|
|
19
|
-
|
20
19
|
def currentJob():
|
21
20
|
"""
|
22
21
|
获取当前的 currentExecution 实例
|
@@ -163,7 +163,7 @@ class FunctionExecution(object):
|
|
163
163
|
sys.exit(code)
|
164
164
|
|
165
165
|
def print(self, data):
|
166
|
-
print(json.dumps(data, cls=MatlabDataEncoder), flush=True)
|
166
|
+
print(json.dumps(data, cls=MatlabDataEncoder)+'\n', flush=True)
|
167
167
|
|
168
168
|
def log(self, content, level='info', html=False, key=None):
|
169
169
|
'''
|
@@ -0,0 +1,240 @@
|
|
1
|
+
import asyncio
|
2
|
+
import random
|
3
|
+
import re
|
4
|
+
import time
|
5
|
+
from cloudpss.utils.AsyncIterable import CustomAsyncIterable
|
6
|
+
|
7
|
+
from cloudpss.utils.httpAsyncRequest import graphql_fetch
|
8
|
+
from .view import getViewClass
|
9
|
+
|
10
|
+
from cloudpss.utils.IO import IO
|
11
|
+
from .messageStreamReceiver import MessageStreamReceiver
|
12
|
+
|
13
|
+
from cloudpss.utils.graphqlUtil import graphql_request
|
14
|
+
from .jobPolicy import JobPolicy
|
15
|
+
from .jobMachine import JobMachine
|
16
|
+
from .messageStreamSender import MessageStreamSender
|
17
|
+
|
18
|
+
|
19
|
+
class Job(object):
|
20
|
+
"""docstring for Job"""
|
21
|
+
|
22
|
+
def __init__(
|
23
|
+
self,
|
24
|
+
id,
|
25
|
+
args,
|
26
|
+
createTime,
|
27
|
+
startTime,
|
28
|
+
endTime,
|
29
|
+
status,
|
30
|
+
context,
|
31
|
+
user,
|
32
|
+
priority,
|
33
|
+
policy,
|
34
|
+
machine,
|
35
|
+
input,
|
36
|
+
output,
|
37
|
+
position,
|
38
|
+
):
|
39
|
+
super(Job, self).__init__()
|
40
|
+
self.id = id
|
41
|
+
self.args = args
|
42
|
+
self.createTime = createTime
|
43
|
+
self.startTime = startTime
|
44
|
+
self.endTime = endTime
|
45
|
+
self.job_status = status #这里的status字段与原本的status()冲突
|
46
|
+
self.context = context
|
47
|
+
self.user = user
|
48
|
+
self.priority = priority
|
49
|
+
self.policy = policy # type: ignore
|
50
|
+
self.machine = JobMachine(**machine) # type: ignore
|
51
|
+
self.input = input
|
52
|
+
self.output = output
|
53
|
+
self.position = position
|
54
|
+
self.__receiver = None
|
55
|
+
self.__sender = None
|
56
|
+
self._result = None
|
57
|
+
|
58
|
+
@staticmethod
|
59
|
+
async def fetch(id):
|
60
|
+
"""
|
61
|
+
获取job信息
|
62
|
+
"""
|
63
|
+
if id is None:
|
64
|
+
raise Exception("id is None")
|
65
|
+
query = """query($_a:JobInput!){
|
66
|
+
job(input:$_a){
|
67
|
+
id
|
68
|
+
args
|
69
|
+
createTime
|
70
|
+
startTime
|
71
|
+
endTime
|
72
|
+
status
|
73
|
+
context
|
74
|
+
user
|
75
|
+
priority
|
76
|
+
policy {
|
77
|
+
name
|
78
|
+
queue
|
79
|
+
tres {
|
80
|
+
cpu
|
81
|
+
ecpu
|
82
|
+
mem
|
83
|
+
}
|
84
|
+
priority
|
85
|
+
maxDuration
|
86
|
+
}
|
87
|
+
machine {
|
88
|
+
id
|
89
|
+
name
|
90
|
+
tres {
|
91
|
+
cpu
|
92
|
+
ecpu
|
93
|
+
mem
|
94
|
+
}
|
95
|
+
}
|
96
|
+
input
|
97
|
+
output
|
98
|
+
position
|
99
|
+
}
|
100
|
+
}"""
|
101
|
+
variables = {"_a": {"id": id}}
|
102
|
+
|
103
|
+
r = await graphql_fetch(query, variables)
|
104
|
+
if "errors" in r:
|
105
|
+
raise Exception(r["errors"])
|
106
|
+
return Job(**r["data"]["job"])
|
107
|
+
|
108
|
+
@staticmethod
|
109
|
+
def fetchMany(*args):
|
110
|
+
"""
|
111
|
+
批量获取任务信息
|
112
|
+
"""
|
113
|
+
jobs = CustomAsyncIterable(Job.fetch,*args)
|
114
|
+
return jobs
|
115
|
+
|
116
|
+
|
117
|
+
@staticmethod
|
118
|
+
async def create(revisionHash, job, config, name=None, rid="", policy=None, **kwargs):
|
119
|
+
"""
|
120
|
+
创建一个运行任务
|
121
|
+
|
122
|
+
:params: revision 项目版本号
|
123
|
+
:params: job 调用仿真时使用的计算方案,为空时使用项目的第一个计算方案
|
124
|
+
:params: config 调用仿真时使用的参数方案,为空时使用项目的第一个参数方案
|
125
|
+
:params: name 任务名称,为空时使用项目的参数方案名称和计算方案名称
|
126
|
+
:params: rid 项目rid,可为空
|
127
|
+
|
128
|
+
:return: 返回一个运行实例
|
129
|
+
|
130
|
+
>>> runner = Runner.runRevision(revision,job,config,'')
|
131
|
+
"""
|
132
|
+
|
133
|
+
# 处理policy字段
|
134
|
+
if policy is None:
|
135
|
+
policy = {}
|
136
|
+
if policy.get("tres", None) is None:
|
137
|
+
policy["tres"] = {}
|
138
|
+
policy["queue"] = job["args"].get("@queue", 1)
|
139
|
+
policy["priority"] = job["args"].get("@priority", 0)
|
140
|
+
tres = {"cpu": 1, "ecpu": 0, "mem": 0}
|
141
|
+
tresStr = job["args"].get("@tres", "")
|
142
|
+
for t in re.split("\s+", tresStr):
|
143
|
+
if t == "":
|
144
|
+
continue
|
145
|
+
k, v = t.split("=")
|
146
|
+
tres[k] = float(v) # type: ignore
|
147
|
+
policy["tres"] = tres
|
148
|
+
|
149
|
+
query = """mutation($input:CreateJobInput!){job:createJob(input:$input){id input output status position}}"""
|
150
|
+
function = job["rid"].replace("job-definition/cloudpss/", "function/CloudPSS/")
|
151
|
+
variables = {
|
152
|
+
"input": {
|
153
|
+
"args": {
|
154
|
+
**job["args"],
|
155
|
+
"_ModelRevision": revisionHash,
|
156
|
+
"_ModelArgs": config["args"],
|
157
|
+
},
|
158
|
+
"context": [
|
159
|
+
function,
|
160
|
+
rid,
|
161
|
+
f"model/@sdk/{str(int(time.time() * random.random()))}",
|
162
|
+
],
|
163
|
+
"policy": policy,
|
164
|
+
}
|
165
|
+
}
|
166
|
+
r = await graphql_fetch(query, variables)
|
167
|
+
if "errors" in r:
|
168
|
+
raise Exception(r["errors"])
|
169
|
+
id = r["data"]["job"]["id"]
|
170
|
+
return await Job.fetch(id)
|
171
|
+
|
172
|
+
@staticmethod
|
173
|
+
async def abort(id, timeout):
|
174
|
+
"""
|
175
|
+
结束当前运行的算例
|
176
|
+
|
177
|
+
"""
|
178
|
+
query = """mutation ($input: AbortJobInput!) {
|
179
|
+
job: abortJob(input: $input) {
|
180
|
+
id
|
181
|
+
status
|
182
|
+
}
|
183
|
+
}
|
184
|
+
"""
|
185
|
+
variables = {"input": {"id": id, "timeout": timeout}}
|
186
|
+
await graphql_fetch(query, variables)
|
187
|
+
|
188
|
+
@staticmethod
|
189
|
+
def load(file, format="yaml"):
|
190
|
+
return IO.load(file, format)
|
191
|
+
|
192
|
+
@staticmethod
|
193
|
+
def dump(job, file, format="yaml", compress="gzip"):
|
194
|
+
return IO.dump(job, file, format, compress)
|
195
|
+
|
196
|
+
async def read(self, receiver=None, dev=False, **kwargs):
|
197
|
+
"""
|
198
|
+
使用接收器获取当前运行实例的输出
|
199
|
+
"""
|
200
|
+
if receiver is not None:
|
201
|
+
self.__sender = receiver
|
202
|
+
if self.__receiver is None:
|
203
|
+
self.__receiver = MessageStreamReceiver(self, dev)
|
204
|
+
await self.__receiver.connect(**kwargs)
|
205
|
+
return self.__receiver
|
206
|
+
|
207
|
+
async def write(self, sender=None, dev=False, **kwargs) -> MessageStreamSender:
|
208
|
+
"""
|
209
|
+
使用发送器为当前运行实例输入
|
210
|
+
"""
|
211
|
+
|
212
|
+
if sender is not None:
|
213
|
+
self.__sender = sender
|
214
|
+
if self.__sender is None:
|
215
|
+
self.__sender = MessageStreamSender(self, dev)
|
216
|
+
await self.__sender.connect(**kwargs)
|
217
|
+
return self.__sender
|
218
|
+
|
219
|
+
def status(self):
|
220
|
+
if self.__receiver is not None:
|
221
|
+
return self.__receiver.status
|
222
|
+
return 0
|
223
|
+
|
224
|
+
@property
|
225
|
+
def result(self):
|
226
|
+
"""
|
227
|
+
获取当前运行实例的输出
|
228
|
+
"""
|
229
|
+
if self._result is None:
|
230
|
+
viewType = getViewClass(self.context[0])
|
231
|
+
self._result = asyncio.run(self.view(viewType))
|
232
|
+
return self._result
|
233
|
+
|
234
|
+
async def view(self, viewType):
|
235
|
+
"""
|
236
|
+
获取当前运行实例的输出
|
237
|
+
"""
|
238
|
+
receiver = await self.read()
|
239
|
+
sender = await self.write()
|
240
|
+
return viewType(receiver, sender)
|
@@ -0,0 +1,129 @@
|
|
1
|
+
from cloudpss.job.jobQueue import JobQueue
|
2
|
+
from cloudpss.job.jobTres import JobTres
|
3
|
+
from cloudpss.utils.graphqlUtil import graphql_request
|
4
|
+
|
5
|
+
|
6
|
+
class JobPolicy(object):
|
7
|
+
|
8
|
+
def __init__(self, id, name, users, functions, tres, minPriority,
|
9
|
+
maxPriority, maxDuration, createTime, updateTime, visibility,
|
10
|
+
queue):
|
11
|
+
self.id = id
|
12
|
+
self.name = name
|
13
|
+
self.users = users
|
14
|
+
self.functions = functions
|
15
|
+
print(tres)
|
16
|
+
self.tres = []
|
17
|
+
for tre in tres:
|
18
|
+
self.tres.append(JobTres(**tre)) # type: ignore
|
19
|
+
self.minPriority = minPriority
|
20
|
+
self.maxPriority = maxPriority
|
21
|
+
self.maxDuration = maxDuration
|
22
|
+
self.createTime = createTime
|
23
|
+
self.updateTime = updateTime
|
24
|
+
self.visibility = visibility
|
25
|
+
self.queue = JobQueue(**queue) # type: ignore
|
26
|
+
|
27
|
+
@staticmethod
|
28
|
+
def fetch(id):
|
29
|
+
query = '''query($input:JobPolicyInput!)
|
30
|
+
{
|
31
|
+
jobPolicy(input:$input)
|
32
|
+
{
|
33
|
+
id
|
34
|
+
name
|
35
|
+
users
|
36
|
+
functions
|
37
|
+
tres {
|
38
|
+
cpu
|
39
|
+
ecpu
|
40
|
+
mem
|
41
|
+
}
|
42
|
+
minPriority
|
43
|
+
maxPriority
|
44
|
+
maxDuration
|
45
|
+
createTime
|
46
|
+
updateTime
|
47
|
+
visibility
|
48
|
+
queue {
|
49
|
+
id
|
50
|
+
name
|
51
|
+
scheduler
|
52
|
+
machines {
|
53
|
+
id
|
54
|
+
name
|
55
|
+
tres {
|
56
|
+
cpu
|
57
|
+
ecpu
|
58
|
+
mem
|
59
|
+
}
|
60
|
+
valid
|
61
|
+
}
|
62
|
+
createTime
|
63
|
+
updateTime
|
64
|
+
load
|
65
|
+
}
|
66
|
+
}
|
67
|
+
|
68
|
+
}'''
|
69
|
+
variables = {'input': {'id': id}}
|
70
|
+
r = graphql_request(query, variables)
|
71
|
+
print (r)
|
72
|
+
if 'errors' in r:
|
73
|
+
raise Exception(r['errors'])
|
74
|
+
return JobPolicy(**r['data']['jobPolicy'])
|
75
|
+
|
76
|
+
@staticmethod
|
77
|
+
def fetchMany(input):
|
78
|
+
query = '''query($input:JobPoliciesInput!)
|
79
|
+
{
|
80
|
+
jobPolicies(input:$input)
|
81
|
+
{
|
82
|
+
items {
|
83
|
+
id
|
84
|
+
name
|
85
|
+
users
|
86
|
+
functions
|
87
|
+
tres {
|
88
|
+
cpu
|
89
|
+
ecpu
|
90
|
+
mem
|
91
|
+
}
|
92
|
+
minPriority
|
93
|
+
maxPriority
|
94
|
+
maxDuration
|
95
|
+
createTime
|
96
|
+
updateTime
|
97
|
+
visibility
|
98
|
+
queue {
|
99
|
+
id
|
100
|
+
name
|
101
|
+
scheduler
|
102
|
+
machines {
|
103
|
+
id
|
104
|
+
name
|
105
|
+
tres {
|
106
|
+
cpu
|
107
|
+
ecpu
|
108
|
+
mem
|
109
|
+
}
|
110
|
+
valid
|
111
|
+
}
|
112
|
+
createTime
|
113
|
+
updateTime
|
114
|
+
load
|
115
|
+
}
|
116
|
+
}
|
117
|
+
cursor
|
118
|
+
count
|
119
|
+
total
|
120
|
+
}
|
121
|
+
}'''
|
122
|
+
variables = {'input': input}
|
123
|
+
r = graphql_request(query, variables)
|
124
|
+
if 'errors' in r:
|
125
|
+
raise Exception(r['errors'])
|
126
|
+
policies = []
|
127
|
+
for policy in r['data']['jobPolicies']['items']:
|
128
|
+
policies.append(JobPolicy(**policy))
|
129
|
+
return policies
|
@@ -0,0 +1,14 @@
|
|
1
|
+
from cloudpss.job.jobMachine import JobMachine
|
2
|
+
|
3
|
+
|
4
|
+
class JobQueue(object):
|
5
|
+
|
6
|
+
def __init__(self, id, name, scheduler, machines, createTime, updateTime,
|
7
|
+
load):
|
8
|
+
self.id = id
|
9
|
+
self.name = name
|
10
|
+
self.scheduler = scheduler
|
11
|
+
self.machines = [JobMachine(**m) for m in machines]
|
12
|
+
self.createTime = createTime
|
13
|
+
self.updateTime = updateTime
|
14
|
+
self.load = load
|
@@ -0,0 +1,33 @@
|
|
1
|
+
class JobReceiver(object):
|
2
|
+
messages = []
|
3
|
+
index = 0
|
4
|
+
|
5
|
+
def __init__(self):
|
6
|
+
self.index = 0
|
7
|
+
self.messages = []
|
8
|
+
|
9
|
+
def __len__(self):
|
10
|
+
return len(self.messages)
|
11
|
+
|
12
|
+
def __iter__(self):
|
13
|
+
return self
|
14
|
+
|
15
|
+
def __next__(self):
|
16
|
+
maxLength = len(self.messages)
|
17
|
+
if self.index < maxLength:
|
18
|
+
message = self.messages[self.index]
|
19
|
+
self.index += 1
|
20
|
+
return message
|
21
|
+
raise StopIteration()
|
22
|
+
|
23
|
+
def view(self, ViewType):
|
24
|
+
"""
|
25
|
+
获取指定类型的视图数据
|
26
|
+
|
27
|
+
:params viewType: 视图类型
|
28
|
+
|
29
|
+
:returns: 对应类型的视图数据
|
30
|
+
|
31
|
+
>>> view= receiver.view(EMTView)
|
32
|
+
"""
|
33
|
+
return ViewType(self)
|
@@ -0,0 +1,127 @@
|
|
1
|
+
import logging
|
2
|
+
|
3
|
+
from cloudpss.utils.httpAsyncRequest import websocket_connect
|
4
|
+
from .jobReceiver import JobReceiver
|
5
|
+
import os
|
6
|
+
from urllib.parse import urlparse
|
7
|
+
import pytz
|
8
|
+
|
9
|
+
utc_tz = pytz.timezone("UTC")
|
10
|
+
|
11
|
+
from ..utils.IO import IO
|
12
|
+
|
13
|
+
|
14
|
+
class Message(object):
|
15
|
+
def __init__(self, id, token):
|
16
|
+
self.id = id
|
17
|
+
self.token = token
|
18
|
+
|
19
|
+
|
20
|
+
class MessageStreamReceiver(JobReceiver):
|
21
|
+
def __init__(self, job, dev=False):
|
22
|
+
super().__init__()
|
23
|
+
self.job = job
|
24
|
+
self.dev = dev
|
25
|
+
self.origin = os.environ.get("CLOUDPSS_API_URL", "https://cloudpss.net/")
|
26
|
+
|
27
|
+
async def receive(self, id, fr0m, on_open, on_message, on_error, on_close):
|
28
|
+
"""
|
29
|
+
读取消息流中的数据
|
30
|
+
id: 消息流id
|
31
|
+
fr0m: 从哪个位置开始读取,如果为0则从头开始读取
|
32
|
+
on_open: 连接建立时的回调函数
|
33
|
+
on_message: 收到消息时的回调函数
|
34
|
+
on_error: 发生错误时的回调函数
|
35
|
+
on_close: 连接关闭时的回调函数
|
36
|
+
"""
|
37
|
+
if id is None:
|
38
|
+
raise Exception("id is None")
|
39
|
+
u = list(urlparse(self.origin))
|
40
|
+
head = "wss" if u[0] == "https" else "ws"
|
41
|
+
|
42
|
+
path = head + "://" + str(u[1]) + "/api/streams/id/" + id
|
43
|
+
if fr0m is not None:
|
44
|
+
path = path + "&from=" + str(fr0m)
|
45
|
+
logging.info(f"receive data from websocket: {path}")
|
46
|
+
ws = await websocket_connect(
|
47
|
+
path,
|
48
|
+
open_func=on_open,
|
49
|
+
receive_func=on_message,
|
50
|
+
error_func=on_error,
|
51
|
+
closed_func=on_close,
|
52
|
+
)
|
53
|
+
|
54
|
+
return ws
|
55
|
+
|
56
|
+
###下面是兼容Receiver部分功能实现
|
57
|
+
|
58
|
+
def on_message(self, message):
|
59
|
+
data = IO.deserialize(message, "ubjson")
|
60
|
+
msg = IO.deserialize(data["data"], "ubjson")
|
61
|
+
logging.debug(f"receive message: {msg}")
|
62
|
+
if msg.get("type", None) == "log" and self.dev:
|
63
|
+
print(msg)
|
64
|
+
self.messages.append(msg)
|
65
|
+
# if msg and type(msg) is dict and msg.get('type', None) == 'terminate':
|
66
|
+
# self.close(ws)
|
67
|
+
|
68
|
+
def on_error(self, ws, error):
|
69
|
+
logging.info("MessageStreamReceiver error")
|
70
|
+
msg = {
|
71
|
+
"type": "log",
|
72
|
+
"verb": "create",
|
73
|
+
"version": 1,
|
74
|
+
"data": {
|
75
|
+
"level": "error",
|
76
|
+
"content": "websocket error",
|
77
|
+
},
|
78
|
+
}
|
79
|
+
self.messages.append(msg)
|
80
|
+
|
81
|
+
def on_close(self, *args, **kwargs):
|
82
|
+
logging.info("MessageStreamReceiver close")
|
83
|
+
self._status = 0
|
84
|
+
msg = {
|
85
|
+
"type": "log",
|
86
|
+
"verb": "create",
|
87
|
+
"version": 1,
|
88
|
+
"data": {
|
89
|
+
"level": "error",
|
90
|
+
"content": "websocket closed",
|
91
|
+
},
|
92
|
+
}
|
93
|
+
self.messages.append(msg)
|
94
|
+
|
95
|
+
def on_open(self):
|
96
|
+
self._status = 1
|
97
|
+
self.__hasOpen = True
|
98
|
+
pass
|
99
|
+
|
100
|
+
def close(self, ws):
|
101
|
+
self._status = 0
|
102
|
+
ws.close()
|
103
|
+
|
104
|
+
@property
|
105
|
+
def status(self):
|
106
|
+
return self._status
|
107
|
+
|
108
|
+
@property
|
109
|
+
def end(self):
|
110
|
+
return not self._status
|
111
|
+
|
112
|
+
async def connect(self):
|
113
|
+
self._status = 0
|
114
|
+
self.ws = await self.receive(
|
115
|
+
self.job.output,
|
116
|
+
None,
|
117
|
+
self.on_open,
|
118
|
+
self.on_message,
|
119
|
+
self.on_error,
|
120
|
+
self.on_close,
|
121
|
+
)
|
122
|
+
|
123
|
+
# thread = threading.Thread(target=self.ws.run_forever, args=(None, None, 6, 3))
|
124
|
+
# thread.setDaemon(True)
|
125
|
+
# thread.start()
|
126
|
+
# while not self.__hasOpen:
|
127
|
+
# time.sleep(0.2)
|
@@ -0,0 +1,77 @@
|
|
1
|
+
import sys, os
|
2
|
+
import threading
|
3
|
+
from urllib.parse import urlparse
|
4
|
+
|
5
|
+
from cloudpss.utils.httpAsyncRequest import websocket_connect
|
6
|
+
|
7
|
+
sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
|
8
|
+
|
9
|
+
import websocket
|
10
|
+
|
11
|
+
from cloudpss.utils.IO import IO
|
12
|
+
import time
|
13
|
+
import logging
|
14
|
+
|
15
|
+
|
16
|
+
class MessageStreamSender:
|
17
|
+
def __init__(self, job, dev=False):
|
18
|
+
super().__init__()
|
19
|
+
self.job = job
|
20
|
+
self.dev = dev
|
21
|
+
self.origin = os.environ.get("CLOUDPSS_API_URL", "https://cloudpss.net/")
|
22
|
+
|
23
|
+
###下面是兼容Receiver部分功能实现
|
24
|
+
|
25
|
+
def on_message(self, ws, message):
|
26
|
+
logging.info("on_message", message)
|
27
|
+
|
28
|
+
def on_error(self, ws, error):
|
29
|
+
logging.info("on_error")
|
30
|
+
|
31
|
+
def on_close(self, *args, **kwargs):
|
32
|
+
time.sleep(0.5)
|
33
|
+
self._status = 0
|
34
|
+
|
35
|
+
logging.info("on_close")
|
36
|
+
|
37
|
+
def on_open(self):
|
38
|
+
self._status = 1
|
39
|
+
logging.info("on_open")
|
40
|
+
pass
|
41
|
+
|
42
|
+
def close(self):
|
43
|
+
self._status = 0
|
44
|
+
self.ws.close()
|
45
|
+
|
46
|
+
@property
|
47
|
+
def status(self):
|
48
|
+
return self._status
|
49
|
+
|
50
|
+
def write(self, message):
|
51
|
+
data = IO.serialize(message, "ubjson", None)
|
52
|
+
self.ws.send_bytes(data)
|
53
|
+
|
54
|
+
async def connect(self):
|
55
|
+
logging.info("connect")
|
56
|
+
self._status = 0
|
57
|
+
if self.job.input is None:
|
58
|
+
raise Exception("id is None")
|
59
|
+
u = list(urlparse(self.origin))
|
60
|
+
head = "wss" if u[0] == "https" else "ws"
|
61
|
+
|
62
|
+
path = head + "://" + str(u[1]) + "/api/streams/token/" + self.job.input
|
63
|
+
logging.info(f"receive data from websocket: {path}")
|
64
|
+
|
65
|
+
self.ws = await websocket_connect(
|
66
|
+
path,
|
67
|
+
open_func=self.on_open,
|
68
|
+
receive_func=self.on_message,
|
69
|
+
error_func=self.on_error,
|
70
|
+
closed_func=self.on_close,
|
71
|
+
)
|
72
|
+
|
73
|
+
# thread = threading.Thread(target=self.ws.run_forever, args=(None, None, 6, 3))
|
74
|
+
# thread.setDaemon(True)
|
75
|
+
# thread.start()
|
76
|
+
# while self.status != 1:
|
77
|
+
# time.sleep(0.2)
|