cloudpss 4.1.1b8__py3-none-any.whl → 4.1.1b9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. cloudpss/asyncio/__init__.py +8 -0
  2. cloudpss/asyncio/job/__init__.py +5 -0
  3. cloudpss/asyncio/job/job.py +116 -0
  4. cloudpss/asyncio/job/messageStreamReceiver.py +121 -0
  5. cloudpss/asyncio/job/messageStreamSender.py +45 -0
  6. cloudpss/asyncio/model/__init__.py +5 -0
  7. cloudpss/asyncio/model/model.py +257 -0
  8. cloudpss/asyncio/model/revision.py +41 -0
  9. cloudpss/asyncio/model/topology.py +34 -0
  10. cloudpss/asyncio/utils/__init__.py +6 -0
  11. cloudpss/{utils → asyncio/utils}/httpAsyncRequest.py +2 -2
  12. cloudpss/dslab/dslab.py +115 -21
  13. cloudpss/function/functionExecution.py +10 -0
  14. cloudpss/ieslab/DataManageModel.py +65 -16
  15. cloudpss/ieslab/IESLabPlan.py +78 -0
  16. cloudpss/ieslab/IESLabSimulation.py +59 -32
  17. cloudpss/ieslab/PlanModel.py +11 -7
  18. cloudpss/job/job.py +119 -132
  19. cloudpss/job/jobReceiver.py +7 -1
  20. cloudpss/job/messageStreamReceiver.py +42 -99
  21. cloudpss/job/messageStreamSender.py +5 -42
  22. cloudpss/job/view/EMTView.py +9 -11
  23. cloudpss/job/view/IESLabTypicalDayView.py +114 -5
  24. cloudpss/job/view/view.py +36 -7
  25. cloudpss/model/implements/diagram.py +113 -0
  26. cloudpss/model/jobDefinitions.py +6 -6
  27. cloudpss/model/model.py +181 -193
  28. cloudpss/model/revision.py +29 -34
  29. cloudpss/model/topology.py +12 -14
  30. cloudpss/runner/MessageStreamReceiver.py +2 -0
  31. cloudpss/utils/httprequests.py +6 -5
  32. cloudpss/version.py +1 -1
  33. {cloudpss-4.1.1b8.dist-info → cloudpss-4.1.1b9.dist-info}/METADATA +1 -1
  34. {cloudpss-4.1.1b8.dist-info → cloudpss-4.1.1b9.dist-info}/RECORD +37 -31
  35. cloudpss/job/jobMachine.py +0 -11
  36. cloudpss/job/jobPolicy.py +0 -129
  37. cloudpss/job/jobQueue.py +0 -14
  38. cloudpss/job/jobTres.py +0 -6
  39. /cloudpss/{utils → asyncio/utils}/AsyncIterable.py +0 -0
  40. {cloudpss-4.1.1b8.dist-info → cloudpss-4.1.1b9.dist-info}/WHEEL +0 -0
  41. {cloudpss-4.1.1b8.dist-info → cloudpss-4.1.1b9.dist-info}/top_level.txt +0 -0
cloudpss/job/job.py CHANGED
@@ -2,23 +2,53 @@ import asyncio
2
2
  import random
3
3
  import re
4
4
  import time
5
- from cloudpss.utils.AsyncIterable import CustomAsyncIterable
6
5
 
7
- from cloudpss.utils.httpAsyncRequest import graphql_fetch
6
+ from cloudpss.job.view.view import View
8
7
  from .view import getViewClass
9
8
 
10
9
  from cloudpss.utils.IO import IO
11
10
  from .messageStreamReceiver import MessageStreamReceiver
12
11
 
13
12
  from cloudpss.utils.graphqlUtil import graphql_request
14
- from .jobPolicy import JobPolicy
15
- from .jobMachine import JobMachine
16
13
  from .messageStreamSender import MessageStreamSender
17
- from typing import Any, Callable, TypeVar
14
+ from typing import Any, Callable, Generic, TypeVar
18
15
  F = TypeVar('F', bound=Callable[..., Any])
19
- class Job(object):
16
+ T = TypeVar('T', bound=Callable[..., View])
17
+ class Job(Generic[T]):
20
18
  """docstring for Job"""
21
-
19
+ __jobQuery = """query($_a:JobInput!){
20
+ job(input:$_a){
21
+ id
22
+ args
23
+ createTime
24
+ startTime
25
+ endTime
26
+ status
27
+ context
28
+ user
29
+ priority
30
+ policy {
31
+ name
32
+ queue
33
+ tres {
34
+ cpu
35
+ ecpu
36
+ mem
37
+ }
38
+ priority
39
+ maxDuration
40
+ }
41
+ machine {
42
+ id
43
+ name
44
+ }
45
+ input
46
+ output
47
+ position
48
+ }
49
+ }"""
50
+
51
+ __createJobQuery = """mutation($input:CreateJobInput!){job:createJob(input:$input){id input output status position}}"""
22
52
  def __init__(
23
53
  self,
24
54
  id,
@@ -47,7 +77,7 @@ class Job(object):
47
77
  self.user = user
48
78
  self.priority = priority
49
79
  self.policy = policy # type: ignore
50
- self.machine = JobMachine(**machine) # type: ignore
80
+ self.machine = machine # type: ignore
51
81
  self.input = input
52
82
  self.output = output
53
83
  self.position = position
@@ -57,82 +87,32 @@ class Job(object):
57
87
 
58
88
  @staticmethod
59
89
  def fetch(id):
60
- return asyncio.run(Job.fetchAsync(id))
61
- @staticmethod
62
- async def fetchAsync(id):
63
90
  """
64
91
  获取job信息
65
92
  """
66
93
  if id is None:
67
94
  raise Exception("id is None")
68
- query = """query($_a:JobInput!){
69
- job(input:$_a){
70
- id
71
- args
72
- createTime
73
- startTime
74
- endTime
75
- status
76
- context
77
- user
78
- priority
79
- policy {
80
- name
81
- queue
82
- tres {
83
- cpu
84
- ecpu
85
- mem
86
- }
87
- priority
88
- maxDuration
89
- }
90
- machine {
91
- id
92
- name
93
- tres {
94
- cpu
95
- ecpu
96
- mem
97
- }
98
- }
99
- input
100
- output
101
- position
102
- }
103
- }"""
95
+
104
96
  variables = {"_a": {"id": id}}
105
97
 
106
- r = await graphql_fetch(query, variables)
98
+ r = graphql_request(Job.__jobQuery, variables)
107
99
  if "errors" in r:
108
100
  raise Exception(r["errors"])
109
101
  return Job(**r["data"]["job"])
110
-
111
- @staticmethod
112
- def fetchMany(*args):
113
- """
114
- 批量获取任务信息
115
- """
116
- jobs = CustomAsyncIterable(Job.fetchAsync,*args)
117
- return jobs
102
+
103
+
104
+ # @staticmethod
105
+ # def fetchMany(*args):
106
+ # """
107
+ # 批量获取任务信息
108
+ # """
109
+ # # jobs = CustomAsyncIterable(Job.fetch,*args)
110
+ # # return jobs
118
111
 
119
112
 
113
+
120
114
  @staticmethod
121
- async def create(revisionHash, job, config, name=None, rid="", policy=None, **kwargs):
122
- """
123
- 创建一个运行任务
124
-
125
- :params: revision 项目版本号
126
- :params: job 调用仿真时使用的计算方案,为空时使用项目的第一个计算方案
127
- :params: config 调用仿真时使用的参数方案,为空时使用项目的第一个参数方案
128
- :params: name 任务名称,为空时使用项目的参数方案名称和计算方案名称
129
- :params: rid 项目rid,可为空
130
-
131
- :return: 返回一个运行实例
132
-
133
- >>> runner = Runner.runRevision(revision,job,config,'')
134
- """
135
-
115
+ def __createJobVariables(job, config, revisionHash, rid, policy, **kwargs):
136
116
  # 处理policy字段
137
117
  if policy is None:
138
118
  policy = {}
@@ -148,15 +128,21 @@ class Job(object):
148
128
  k, v = t.split("=")
149
129
  tres[k] = float(v) # type: ignore
150
130
  policy["tres"] = tres
151
-
152
- query = """mutation($input:CreateJobInput!){job:createJob(input:$input){id input output status position}}"""
153
131
  function = job["rid"].replace("job-definition/cloudpss/", "function/CloudPSS/")
132
+ implement = kwargs.get("implement", None)
133
+ debug = job["args"].get("@debug", None )
134
+ debugargs={}
135
+ if debug is not None:
136
+ t= [ i.split('=') for i in re.split(r'\s+',debug) if i.find('=')>0]
137
+ for i in t:
138
+ debugargs[i[0]]=i[1]
154
139
  variables = {
155
140
  "input": {
156
141
  "args": {
157
142
  **job["args"],
158
143
  "_ModelRevision": revisionHash,
159
144
  "_ModelArgs": config["args"],
145
+ "implement":implement
160
146
  },
161
147
  "context": [
162
148
  function,
@@ -164,29 +150,33 @@ class Job(object):
164
150
  f"model/@sdk/{str(int(time.time() * random.random()))}",
165
151
  ],
166
152
  "policy": policy,
153
+ "debug":debugargs
167
154
  }
168
155
  }
169
- r = await graphql_fetch(query, variables)
170
- if "errors" in r:
171
- raise Exception(r["errors"])
172
- id = r["data"]["job"]["id"]
173
- return await Job.fetchAsync(id)
174
-
156
+ return variables
175
157
  @staticmethod
176
- async def abort(id, timeout):
158
+ def create(revisionHash, job, config, name=None, rid="", policy=None, **kwargs):
177
159
  """
178
- 结束当前运行的算例
160
+ 创建一个运行任务
179
161
 
162
+ :params: revision 项目版本号
163
+ :params: job 调用仿真时使用的计算方案,为空时使用项目的第一个计算方案
164
+ :params: config 调用仿真时使用的参数方案,为空时使用项目的第一个参数方案
165
+ :params: name 任务名称,为空时使用项目的参数方案名称和计算方案名称
166
+ :params: rid 项目rid,可为空
167
+
168
+ :return: 返回一个运行实例
169
+
170
+ >>> runner = Runner.runRevision(revision,job,config,'')
180
171
  """
181
- query = """mutation ($input: AbortJobInput!) {
182
- job: abortJob(input: $input) {
183
- id
184
- status
185
- }
186
- }
187
- """
188
- variables = {"input": {"id": id, "timeout": timeout}}
189
- await graphql_fetch(query, variables)
172
+ variables=Job.__createJobVariables(job, config, revisionHash, rid, policy)
173
+ r = graphql_request(Job.__createJobQuery, variables)
174
+ if "errors" in r:
175
+ raise Exception(r["errors"])
176
+ id = r["data"]["job"]["id"]
177
+ return Job.fetch(id)
178
+
179
+
190
180
 
191
181
  @staticmethod
192
182
  def load(file, format="yaml"):
@@ -196,49 +186,30 @@ class Job(object):
196
186
  def dump(job, file, format="yaml", compress="gzip"):
197
187
  return IO.dump(job, file, format, compress)
198
188
 
199
- async def read(self, receiver=None, dev=False, **kwargs):
200
- """
201
- 使用接收器获取当前运行实例的输出
202
- """
203
- if receiver is not None:
204
- self.__receiver = receiver
205
- if self.__receiver is None:
206
- self.__receiver = MessageStreamReceiver(self, dev)
207
- await self.__receiver.connect(**kwargs)
208
- return self.__receiver
189
+
209
190
 
210
- def read_legacy(self, receiver=None, dev=False, **kwargs):
191
+ def read(self, receiver=None, **kwargs):
211
192
  """
212
193
  使用接收器获取当前运行实例的输出
213
194
  """
214
195
  if receiver is not None:
215
196
  self.__receiver = receiver
216
197
  if self.__receiver is None:
217
- self.__receiver = MessageStreamReceiver(self, dev)
218
- self.__receiver.connect_legacy(**kwargs)
198
+ self.__receiver = MessageStreamReceiver(self)
199
+ self.__receiver.connect(**kwargs)
219
200
  return self.__receiver
220
201
 
221
- async def write(self, sender=None, dev=False, **kwargs) -> MessageStreamSender:
222
- """
223
- 使用发送器为当前运行实例输入
224
- """
225
-
226
- if sender is not None:
227
- self.__sender = sender
228
- if self.__sender is None:
229
- self.__sender = MessageStreamSender(self, dev)
230
- await self.__sender.connect(**kwargs)
231
- return self.__sender
232
202
 
233
- def write_legacy(self, sender=None, dev=False, **kwargs) -> MessageStreamSender:
203
+
204
+ def write(self, sender=None, **kwargs) -> MessageStreamSender:
234
205
  """
235
- 使用发送器为当前运行实例输入同步方法
206
+ 使用发送器为当前运行实例输入
236
207
  """
237
208
 
238
209
  if sender is not None:
239
210
  self.__sender = sender
240
211
  if self.__sender is None:
241
- self.__sender = MessageStreamSender(self, dev)
212
+ self.__sender = MessageStreamSender(self)
242
213
  self.__sender.connect_legacy(**kwargs)
243
214
  return self.__sender
244
215
 
@@ -246,43 +217,59 @@ class Job(object):
246
217
  """
247
218
  return: 0: 运行中 1: 运行完成 2: 运行失败
248
219
  """
249
-
220
+ time.sleep(0)
250
221
  if self.__receiver is not None:
251
222
  return self.__receiver.status
252
223
  if self.__receiver is None:
253
- self.__connect_legacy()
224
+ self.__connect()
225
+
254
226
  return 0
255
227
 
256
- def __connect_legacy(self):
228
+ def __connect(self):
257
229
  """
258
230
  连接接收器和发送器
259
231
  """
260
232
  viewType = getViewClass(self.context[0])
261
- self._result = self.view_legacy(viewType)
233
+ self._result = self.view(viewType)
262
234
 
263
235
  @property
264
- def result(self):
236
+ def result(self)->T:
265
237
  """
266
238
  获取当前运行实例的输出
267
239
  """
268
240
  if self._result is None:
269
- self.__connect_legacy()
241
+ self.__connect()
270
242
  return self._result
271
243
 
272
244
 
273
- def view_legacy(self, viewType:F)->F:
245
+ def view(self, viewType:F=None)->F:
274
246
  """
275
247
  获取当前运行实例的输出
276
248
  """
277
- receiver = self.read_legacy()
278
- sender = self.write_legacy()
249
+ receiver = self.read()
250
+ sender = self.write()
251
+ if viewType is None:
252
+ viewType = getViewClass(self.context[0])
253
+
279
254
  return viewType(receiver, sender)
280
255
 
281
- async def view(self, viewType:F)->F:
256
+
257
+
258
+ def abort(self,timeout=3):
282
259
  """
283
- 获取当前运行实例的输出
260
+ 中断当前运行实例
284
261
  """
285
- receiver = await self.read()
286
- sender = await self.write()
287
- self._result= viewType(receiver, sender)
288
- return self._result
262
+ query = '''mutation ($input: AbortJobInput!) {
263
+ job: abortJob(input: $input) {
264
+ id
265
+ status
266
+ }
267
+ }
268
+ '''
269
+ variables = {
270
+ 'input': {
271
+ 'id': self.taskId,
272
+ 'timeout': timeout
273
+ }
274
+ }
275
+ graphql_request(query, variables)
@@ -1,3 +1,4 @@
1
+ from deprecated import deprecated
1
2
  class JobReceiver(object):
2
3
  messages = []
3
4
  index = 0
@@ -30,4 +31,9 @@ class JobReceiver(object):
30
31
 
31
32
  >>> view= receiver.view(EMTView)
32
33
  """
33
- return ViewType(self)
34
+ return ViewType(self)
35
+
36
+ @property
37
+ @deprecated(version='3.0', reason="该方法将在 5.0 版本移除")
38
+ def message(self):
39
+ return self.messages
@@ -1,9 +1,5 @@
1
1
  import logging
2
-
3
- import aiohttp
4
- from aiohttp import WSMsgType
5
-
6
- from cloudpss.utils.httpAsyncRequest import websocket_connect
2
+ import sys
7
3
  from .jobReceiver import JobReceiver
8
4
  import os
9
5
  from urllib.parse import urlparse
@@ -24,91 +20,36 @@ class Message(object):
24
20
 
25
21
 
26
22
  class MessageStreamReceiver(JobReceiver):
27
- def __init__(self, job, dev=False):
23
+ def __init__(self, job):
28
24
  super().__init__()
29
25
  self.job = job
30
- self.dev = dev
26
+ self.id =self.job.output
31
27
  self.origin = os.environ.get("CLOUDPSS_API_URL", "https://cloudpss.net/")
32
28
  self.__hasOpen = False
33
29
 
34
- def __receive_legacy(self, id, fr0m):
35
- """
36
- 同步方法读取消息流中的数据
37
- id: 消息流id
38
- fr0m: 从哪个位置开始读取,如果为0则从头开始读取
39
- on_open: 连接建立时的回调函数
40
- on_message: 收到消息时的回调函数
41
- on_error: 发生错误时的回调函数
42
- on_close: 连接关闭时的回调函数
43
- """
44
- if id is None:
45
- raise Exception("id is None")
46
- u = list(urlparse(self.origin))
47
- head = "wss" if u[0] == "https" else "ws"
48
-
49
- path = head + "://" + str(u[1]) + "/api/streams/id/" + id
50
- if fr0m is not None:
51
- path = path + "&from=" + str(fr0m)
52
- logging.info(f"receive data from websocket: {path}")
53
- ws = websocket.WebSocketApp(
54
- path,
55
- on_open=self.__on_open,
56
- on_message=self.__on_message_legacy,
57
- on_error=self.__on_error,
58
- on_close=self.__on_close,
59
- )
60
-
61
- return ws
62
-
63
- async def __receive(self, id, fr0m):
64
- """
65
- 读取消息流中的数据
66
- id: 消息流id
67
- fr0m: 从哪个位置开始读取,如果为0则从头开始读取
68
- on_open: 连接建立时的回调函数
69
- on_message: 收到消息时的回调函数
70
- on_error: 发生错误时的回调函数
71
- on_close: 连接关闭时的回调函数
72
- """
73
- if id is None:
30
+ def __path(self, from_=None):
31
+ if self.id is None:
74
32
  raise Exception("id is None")
75
33
  u = list(urlparse(self.origin))
76
34
  head = "wss" if u[0] == "https" else "ws"
77
-
78
- path = head + "://" + str(u[1]) + "/api/streams/id/" + id
79
- if fr0m is not None:
80
- path = path + "&from=" + str(fr0m)
81
- logging.info(f"MessageStreamReceiver data from websocket: {path}")
82
- async for msg in websocket_connect(
83
- path,
84
- open_func=self.__on_open,
85
- ):
86
- if msg.type == WSMsgType.BINARY:
87
- decode = self.__on_message(msg.data)
88
- yield decode
89
- elif msg.type == WSMsgType.TEXT:
90
- decode = self.__on_message(msg.data)
91
- yield decode
92
- elif msg.type == WSMsgType.CLOSED:
93
- logging.debug("WebSocket连接已关闭")
94
- self.__on_close()
95
-
96
- break
97
- elif msg.type == WSMsgType.ERROR:
98
- logging.debug(f"WebSocket连接发生错误:{msg.data}")
99
- self.__on_error(msg.data)
100
- break
101
- self._status=1
35
+ path = head + "://" + str(u[1]) + "/api/streams/id/" + self.id
36
+ if from_ is not None:
37
+ path = path + "?from=" + str(from_)
38
+ return path
39
+
102
40
  ###下面是兼容Receiver部分功能实现
103
41
  def __on_message_legacy(self, *args, **kwargs):
104
- if len(args) == 1:
42
+
43
+ if type(args[0]) != websocket.WebSocketApp:
105
44
  message = args[0]
106
45
  else:
107
46
  message = args[1]
108
47
  return self.__on_message(message)
109
48
 
110
49
  def __on_message(self, message):
50
+
111
51
  data = IO.deserialize(message, "ubjson")
52
+ self.ws.url = self.__path(data["id"])
112
53
  msg = IO.deserialize(data["data"], "ubjson")
113
54
  self.messages.append(msg)
114
55
  if(msg['type']=='terminate'):
@@ -130,9 +71,10 @@ class MessageStreamReceiver(JobReceiver):
130
71
  self.messages.append(msg)
131
72
 
132
73
  def __on_close(self, *args, **kwargs):
133
- if len(args)>0:
74
+ if len(args)>1:
134
75
  msg =args[2]
135
- if msg.startswith("CMS_NO_STREAM_ID:"):
76
+
77
+ if msg is not None and msg.startswith("CMS_NO_STREAM_ID:"):
136
78
  self._status = 1
137
79
  msg = {
138
80
  "type": "log",
@@ -145,7 +87,6 @@ class MessageStreamReceiver(JobReceiver):
145
87
  self.messages.append(msg)
146
88
  return
147
89
  logging.debug("MessageStreamReceiver close")
148
- self._status = 1
149
90
  msg = {
150
91
  "type": "log",
151
92
  "verb": "create",
@@ -156,7 +97,7 @@ class MessageStreamReceiver(JobReceiver):
156
97
  },
157
98
  }
158
99
  self.messages.append(msg)
159
-
100
+ self._status = 1
160
101
 
161
102
  def __on_open(self,ws, *args, **kwargs):
162
103
  self.ws = ws
@@ -173,31 +114,33 @@ class MessageStreamReceiver(JobReceiver):
173
114
  def status(self):
174
115
  return self._status
175
116
 
176
- @property
177
- def end(self):
178
- return not self._status
117
+ def waitFor(self,timeOut=sys.maxsize):
118
+ """
119
+ 阻塞方法,直到任务完成
179
120
 
180
- def connect_legacy(self):
121
+ :params timeOut: 超时时间
122
+ """
123
+ start = time.time()
124
+ while self.status == 0:
125
+ time.sleep(0)
126
+ if time.time()-start>timeOut:
127
+ raise Exception("time out")
128
+
129
+
130
+ def connect(self):
181
131
  self._status = 1
182
- self.ws = self.__receive_legacy(
183
- self.job.output,
184
- None,
132
+ path = self.__path()
133
+ logging.info(f"receive data from websocket: {path}")
134
+ self.ws = websocket.WebSocketApp(
135
+ path,
136
+ on_open=self.__on_open,
137
+ on_message=self.__on_message_legacy,
138
+ on_error=self.__on_error,
139
+ on_close=self.__on_close,
185
140
  )
186
- thread = threading.Thread(target=self.ws.run_forever, args=(None, None, 6, 3))
141
+ thread = threading.Thread(target=self.ws.run_forever, kwargs={'ping_interval':60,'ping_timeout':5,'reconnect':True})
187
142
  thread.setDaemon(True)
188
143
  thread.start()
189
144
  while not self.__hasOpen:
190
- time.sleep(0.2)
191
-
192
- async def connect(self):
193
- self._status = 0
194
- self.receiver= self.__receive(
195
- self.job.output,
196
- None,
197
- )
198
- # asyncio.create_task(
199
- # self.__receive(
200
- # self.job.output,
201
- # None
202
- # )
203
- # )
145
+ time.sleep(0)
146
+
@@ -2,10 +2,6 @@ import asyncio
2
2
  import sys, os
3
3
  import threading
4
4
  from urllib.parse import urlparse
5
-
6
- import aiohttp
7
-
8
-
9
5
  sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
10
6
 
11
7
  import websocket
@@ -16,10 +12,9 @@ import logging
16
12
 
17
13
 
18
14
  class MessageStreamSender:
19
- def __init__(self, job, dev=False):
15
+ def __init__(self, job):
20
16
  super().__init__()
21
17
  self.job = job
22
- self.dev = dev
23
18
  self.origin = os.environ.get("CLOUDPSS_API_URL", "https://cloudpss.net/")
24
19
  self.__hasOpen = False
25
20
 
@@ -56,19 +51,9 @@ class MessageStreamSender:
56
51
  def status(self):
57
52
  return self._status
58
53
 
59
- async def write_async(self, message):
60
- if self.websocket:
61
- data = IO.serialize(message, "ubjson", None)
62
- await self.websocket.send_bytes(data)
63
- else:
64
- logging.info("websocket is None")
65
-
66
54
  def write(self, message):
67
- if self.ws:
68
- data = IO.serialize(message, "ubjson", None)
69
- self.ws.send(data,websocket.ABNF.OPCODE_BINARY)
70
- else:
71
- asyncio.run(self.write_async(message))
55
+ data = IO.serialize(message, "ubjson", None)
56
+ self.ws.send(data,websocket.ABNF.OPCODE_BINARY)
72
57
 
73
58
  def connect_legacy(self):
74
59
  """
@@ -99,28 +84,6 @@ class MessageStreamSender:
99
84
  time.sleep(0.2)
100
85
  return self.ws
101
86
 
102
- async def receive_data(self):
103
- if self.websocket:
104
- data = await self.websocket.receive()
105
- if data.type == aiohttp.WSMsgType.TEXT:
106
- self.__on_message(data.data)
107
- elif data.type == aiohttp.WSMsgType.CLOSED:
108
- self.__on_close()
109
- elif data.type == aiohttp.WSMsgType.ERROR:
110
- self.__on_error(data.data)
111
- else:
112
- logging.info("WebSocket connection not established")
113
-
114
- async def connect(self):
115
- self._status = 0
116
- if self.job.input is None:
117
- raise Exception("id is None")
118
- if self.job.input == "00000000-0000-0000-0000-000000000000":
119
- return
120
- u = list(urlparse(self.origin))
121
- head = "wss" if u[0] == "https" else "ws"
87
+
122
88
 
123
- path = head + "://" + str(u[1]) + "/api/streams/token/" + self.job.input
124
- logging.info(f"MessageStreamSender data from websocket: {path}")
125
- async with aiohttp.ClientSession() as session:
126
- self.websocket = await session.ws_connect(path)
89
+