IGJSP 0.0.3__py3-none-any.whl → 0.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,125 @@
1
+ {
2
+ "nbJobs": [
3
+ 0,
4
+ 1,
5
+ 2,
6
+ 3,
7
+ 4
8
+ ],
9
+ "nbMchs": [
10
+ 0,
11
+ 1
12
+ ],
13
+ "speed": 1,
14
+ "timeEnergy": [
15
+ {
16
+ "jobId": 0,
17
+ "operations": {
18
+ "0": {
19
+ "speed-scaling": [
20
+ {
21
+ "procTime": 39,
22
+ "energyCons": 67
23
+ }
24
+ ]
25
+ },
26
+ "1": {
27
+ "speed-scaling": [
28
+ {
29
+ "procTime": 28,
30
+ "energyCons": 75
31
+ }
32
+ ]
33
+ }
34
+ }
35
+ },
36
+ {
37
+ "jobId": 1,
38
+ "operations": {
39
+ "1": {
40
+ "speed-scaling": [
41
+ {
42
+ "procTime": 10,
43
+ "energyCons": 90
44
+ }
45
+ ]
46
+ },
47
+ "0": {
48
+ "speed-scaling": [
49
+ {
50
+ "procTime": 67,
51
+ "energyCons": 51
52
+ }
53
+ ]
54
+ }
55
+ }
56
+ },
57
+ {
58
+ "jobId": 2,
59
+ "operations": {
60
+ "1": {
61
+ "speed-scaling": [
62
+ {
63
+ "procTime": 34,
64
+ "energyCons": 71
65
+ }
66
+ ]
67
+ },
68
+ "0": {
69
+ "speed-scaling": [
70
+ {
71
+ "procTime": 84,
72
+ "energyCons": 43
73
+ }
74
+ ]
75
+ }
76
+ }
77
+ },
78
+ {
79
+ "jobId": 3,
80
+ "operations": {
81
+ "1": {
82
+ "speed-scaling": [
83
+ {
84
+ "procTime": 45,
85
+ "energyCons": 63
86
+ }
87
+ ]
88
+ },
89
+ "0": {
90
+ "speed-scaling": [
91
+ {
92
+ "procTime": 56,
93
+ "energyCons": 57
94
+ }
95
+ ]
96
+ }
97
+ }
98
+ },
99
+ {
100
+ "jobId": 4,
101
+ "operations": {
102
+ "0": {
103
+ "speed-scaling": [
104
+ {
105
+ "procTime": 79,
106
+ "energyCons": 45
107
+ }
108
+ ]
109
+ },
110
+ "1": {
111
+ "speed-scaling": [
112
+ {
113
+ "procTime": 10,
114
+ "energyCons": 90
115
+ }
116
+ ]
117
+ }
118
+ }
119
+ }
120
+ ],
121
+ "minMakespan": 118,
122
+ "minEnergy": 652,
123
+ "maxMinMakespan": 334,
124
+ "maxMinEnergy": 0
125
+ }
@@ -0,0 +1,23 @@
1
+ Number of jobs: 5
2
+ Number of machines: 2
3
+
4
+ Processing times:
5
+ 39 28
6
+ 10 67
7
+ 34 84
8
+ 45 56
9
+ 79 10
10
+
11
+ Energy consumption:
12
+ 67 75
13
+ 90 51
14
+ 71 43
15
+ 63 57
16
+ 45 90
17
+
18
+ Machine order:
19
+ 0 1
20
+ 1 0
21
+ 1 0
22
+ 1 0
23
+ 0 1
@@ -0,0 +1,289 @@
1
+ {
2
+ "nbJobs": [
3
+ 0,
4
+ 1,
5
+ 2,
6
+ 3,
7
+ 4
8
+ ],
9
+ "nbMchs": [
10
+ 0,
11
+ 1,
12
+ 2,
13
+ 3,
14
+ 4,
15
+ 5
16
+ ],
17
+ "speed": 1,
18
+ "timeEnergy": [
19
+ {
20
+ "jobId": 0,
21
+ "operations": {
22
+ "5": {
23
+ "speed-scaling": [
24
+ {
25
+ "procTime": 10,
26
+ "energyCons": 90
27
+ }
28
+ ]
29
+ },
30
+ "4": {
31
+ "speed-scaling": [
32
+ {
33
+ "procTime": 79,
34
+ "energyCons": 45
35
+ }
36
+ ]
37
+ },
38
+ "2": {
39
+ "speed-scaling": [
40
+ {
41
+ "procTime": 56,
42
+ "energyCons": 57
43
+ }
44
+ ]
45
+ },
46
+ "0": {
47
+ "speed-scaling": [
48
+ {
49
+ "procTime": 84,
50
+ "energyCons": 43
51
+ }
52
+ ]
53
+ },
54
+ "3": {
55
+ "speed-scaling": [
56
+ {
57
+ "procTime": 45,
58
+ "energyCons": 63
59
+ }
60
+ ]
61
+ },
62
+ "1": {
63
+ "speed-scaling": [
64
+ {
65
+ "procTime": 34,
66
+ "energyCons": 71
67
+ }
68
+ ]
69
+ }
70
+ }
71
+ },
72
+ {
73
+ "jobId": 1,
74
+ "operations": {
75
+ "4": {
76
+ "speed-scaling": [
77
+ {
78
+ "procTime": 46,
79
+ "energyCons": 63
80
+ }
81
+ ]
82
+ },
83
+ "2": {
84
+ "speed-scaling": [
85
+ {
86
+ "procTime": 72,
87
+ "energyCons": 48
88
+ }
89
+ ]
90
+ },
91
+ "1": {
92
+ "speed-scaling": [
93
+ {
94
+ "procTime": 42,
95
+ "energyCons": 65
96
+ }
97
+ ]
98
+ },
99
+ "3": {
100
+ "speed-scaling": [
101
+ {
102
+ "procTime": 28,
103
+ "energyCons": 75
104
+ }
105
+ ]
106
+ },
107
+ "5": {
108
+ "speed-scaling": [
109
+ {
110
+ "procTime": 32,
111
+ "energyCons": 72
112
+ }
113
+ ]
114
+ },
115
+ "0": {
116
+ "speed-scaling": [
117
+ {
118
+ "procTime": 43,
119
+ "energyCons": 65
120
+ }
121
+ ]
122
+ }
123
+ }
124
+ },
125
+ {
126
+ "jobId": 2,
127
+ "operations": {
128
+ "2": {
129
+ "speed-scaling": [
130
+ {
131
+ "procTime": 27,
132
+ "energyCons": 76
133
+ }
134
+ ]
135
+ },
136
+ "1": {
137
+ "speed-scaling": [
138
+ {
139
+ "procTime": 61,
140
+ "energyCons": 54
141
+ }
142
+ ]
143
+ },
144
+ "4": {
145
+ "speed-scaling": [
146
+ {
147
+ "procTime": 68,
148
+ "energyCons": 50
149
+ }
150
+ ]
151
+ },
152
+ "0": {
153
+ "speed-scaling": [
154
+ {
155
+ "procTime": 50,
156
+ "energyCons": 60
157
+ }
158
+ ]
159
+ },
160
+ "3": {
161
+ "speed-scaling": [
162
+ {
163
+ "procTime": 72,
164
+ "energyCons": 48
165
+ }
166
+ ]
167
+ },
168
+ "5": {
169
+ "speed-scaling": [
170
+ {
171
+ "procTime": 60,
172
+ "energyCons": 54
173
+ }
174
+ ]
175
+ }
176
+ }
177
+ },
178
+ {
179
+ "jobId": 3,
180
+ "operations": {
181
+ "3": {
182
+ "speed-scaling": [
183
+ {
184
+ "procTime": 31,
185
+ "energyCons": 73
186
+ }
187
+ ]
188
+ },
189
+ "0": {
190
+ "speed-scaling": [
191
+ {
192
+ "procTime": 68,
193
+ "energyCons": 50
194
+ }
195
+ ]
196
+ },
197
+ "2": {
198
+ "speed-scaling": [
199
+ {
200
+ "procTime": 47,
201
+ "energyCons": 62
202
+ }
203
+ ]
204
+ },
205
+ "1": {
206
+ "speed-scaling": [
207
+ {
208
+ "procTime": 36,
209
+ "energyCons": 69
210
+ }
211
+ ]
212
+ },
213
+ "4": {
214
+ "speed-scaling": [
215
+ {
216
+ "procTime": 44,
217
+ "energyCons": 64
218
+ }
219
+ ]
220
+ },
221
+ "5": {
222
+ "speed-scaling": [
223
+ {
224
+ "procTime": 60,
225
+ "energyCons": 54
226
+ }
227
+ ]
228
+ }
229
+ }
230
+ },
231
+ {
232
+ "jobId": 4,
233
+ "operations": {
234
+ "2": {
235
+ "speed-scaling": [
236
+ {
237
+ "procTime": 36,
238
+ "energyCons": 69
239
+ }
240
+ ]
241
+ },
242
+ "0": {
243
+ "speed-scaling": [
244
+ {
245
+ "procTime": 36,
246
+ "energyCons": 69
247
+ }
248
+ ]
249
+ },
250
+ "5": {
251
+ "speed-scaling": [
252
+ {
253
+ "procTime": 49,
254
+ "energyCons": 61
255
+ }
256
+ ]
257
+ },
258
+ "1": {
259
+ "speed-scaling": [
260
+ {
261
+ "procTime": 42,
262
+ "energyCons": 65
263
+ }
264
+ ]
265
+ },
266
+ "3": {
267
+ "speed-scaling": [
268
+ {
269
+ "procTime": 33,
270
+ "energyCons": 71
271
+ }
272
+ ]
273
+ },
274
+ "4": {
275
+ "speed-scaling": [
276
+ {
277
+ "procTime": 36,
278
+ "energyCons": 69
279
+ }
280
+ ]
281
+ }
282
+ }
283
+ }
284
+ ],
285
+ "minMakespan": 338,
286
+ "minEnergy": 1875,
287
+ "maxMinMakespan": 1089,
288
+ "maxMinEnergy": 0
289
+ }
@@ -0,0 +1,23 @@
1
+ Number of jobs: 5
2
+ Number of machines: 6
3
+
4
+ Processing times:
5
+ 10 79 56 84 45 34
6
+ 46 72 42 28 32 43
7
+ 27 61 68 50 72 60
8
+ 31 68 47 36 44 60
9
+ 36 36 49 42 33 36
10
+
11
+ Energy consumption:
12
+ 90 45 57 43 63 71
13
+ 63 48 65 75 72 65
14
+ 76 54 50 60 48 54
15
+ 73 50 62 69 64 54
16
+ 69 69 61 65 71 69
17
+
18
+ Machine order:
19
+ 5 4 2 0 3 1
20
+ 4 2 1 3 5 0
21
+ 2 1 4 0 3 5
22
+ 3 0 2 1 4 5
23
+ 2 0 5 1 3 4
IGJSP/generador.py CHANGED
@@ -11,7 +11,6 @@ from pathlib import Path
11
11
  import networkx as nx
12
12
  import numpy as np
13
13
  from scipy.stats import expon, norm, uniform
14
- from pprint import pprint
15
14
 
16
15
 
17
16
  def f(x):
@@ -50,7 +49,10 @@ class JSP:
50
49
 
51
50
  self.ProcessingTime = np.zeros((self.numJobs, self.numMchs, self.speed), dtype=int)
52
51
  self.EnergyConsumption = np.zeros((self.numJobs, self.numMchs, self.speed), dtype=int)
53
- self.Orden = np.zeros((self.numJobs, self.numMchs), dtype=int)
52
+
53
+
54
+ if len(self.Orden) == 0:
55
+ self.Orden = np.zeros((self.numJobs, self.numMchs), dtype=int)
54
56
 
55
57
  if self.rddd == 0:
56
58
  release_date_tasks = np.array([0] * self.numJobs)
@@ -163,56 +165,52 @@ class JSP:
163
165
  json.dump(self.JSP, f, indent=4)
164
166
 
165
167
  def saveDznFile(self, InputDir, OutputDir):
166
- indexProblema = OutputDir.split("/")[-2]
167
- OutputDir = "/".join(OutputDir.split("/")[:-2])
168
- # indexProblema = os.path.basename(os.path.normpath(OutputDir))
168
+ indexProblema = os.path.basename(os.path.normpath(OutputDir))
169
169
  with open(f"{InputDir}", 'rb') as f:
170
170
  data: JSP = pickle.load(f)
171
- print(self.speed)
172
- # for t in [0, 1, 2]:
173
- t = data.rddd
174
- for s in range(1,self.speed+1):
175
- s0, sf, sp = [0,s,1]
176
- time = data.ProcessingTime[:, :, s0:sf:sp]
177
- energy = data.EnergyConsumption[:, :, s0:sf:sp]
178
- precedence = np.full((data.numJobs, data.numMchs), 0)
179
-
180
- replace_data = {
181
- "machines": data.numMchs,
182
- "jobs": data.numJobs,
183
- "Speed": s,
184
- "time": list(time.flatten()),
185
- "energy": list(energy.flatten())
186
- }
187
- if t == 1:
188
- replace_data["releaseDate"] = [data.ReleaseDueDate[job, 0] for job in range(data.numJobs)]
189
- replace_data["dueDate"] = [data.ReleaseDueDate[job, 1] for job in range(data.numJobs)]
190
- elif t == 2:
191
- replace_data["releaseDate"] = list(data.ReleaseDueDate[:, :, 0].flatten())
192
- replace_data["dueDate"] = list(data.ReleaseDueDate[:, :, 1].flatten())
193
-
194
- for job in range(data.numJobs):
195
- for i, prioridad in enumerate(range(data.numMchs)):
196
- precedence[job, data.Orden[job, prioridad]] = i
197
- replace_data["precedence"] = list(precedence.flatten())
198
-
199
- new_object = data.change_rddd_type(t).select_speeds(list(range(s0, sf, sp)))
200
- with open(f"./Minizinc/Types/RD/type{t}.dzn", "r", encoding="utf-8") as file:
201
- filedata = file.read()
202
- for kk, v in replace_data.items():
203
- filedata = filedata.replace("{" + kk + "}", str(v))
204
-
205
- os.makedirs(f"{OutputDir}/", exist_ok=True)
206
-
207
- with open(f"{OutputDir}/{indexProblema}-{t}-{s}.dzn", "w+", encoding="utf-8") as new:
208
- new.write(filedata)
209
- # print(f"{OutputDir}/{indexProblema}")
210
- # with open(f"{OutputDir}/{indexProblema}", "wb") as new:
211
- # pickle.dump(new_object, new)
171
+ for t in [0, 1, 2]:
172
+ for s0, sf, sp, s in [(2, 3, 1, 1), (0, 5, 2, 3), (0, 5, 1, 5)]:
173
+ time = data.ProcessingTime[:, :, s0:sf:sp]
174
+ energy = data.EnergyConsumption[:, :, s0:sf:sp]
175
+ precedence = np.full((data.numJobs, data.numMchs), 0)
176
+
177
+ replace_data = {
178
+ "machines": data.numMchs,
179
+ "jobs": data.numJobs,
180
+ "Speed": s,
181
+ "time": list(time.flatten()),
182
+ "energy": list(energy.flatten())
183
+ }
184
+
185
+ if t == 1:
186
+ replace_data["releaseDate"] = [data.ReleaseDueDate[job, :, 0].min() for job in range(data.numJobs)]
187
+ replace_data["dueDate"] = [data.ReleaseDueDate[job, :, 0].max() for job in range(data.numJobs)]
188
+ elif t == 2:
189
+ replace_data["releaseDate"] = list(data.ReleaseDueDate[:, :, 0].flatten())
190
+ replace_data["dueDate"] = list(data.ReleaseDueDate[:, :, 1].flatten())
191
+
192
+ for job in range(data.numJobs):
193
+ for i, prioridad in enumerate(range(data.numMchs)):
194
+ precedence[job, data.Orden[job, prioridad]] = i
195
+ replace_data["precedence"] = list(precedence.flatten())
196
+
197
+ new_object = data.change_rddd_type(t).select_speeds(list(range(s0, sf, sp)))
198
+
199
+ with open(f"./Minizinc/Types/RD/type{t}.dzn", "r", encoding="utf-8") as file:
200
+ filedata = file.read()
201
+ for kk, v in replace_data.items():
202
+ filedata = filedata.replace("{" + kk + "}", str(v))
203
+
204
+ os.makedirs(f"{OutputDir}", exist_ok=True)
205
+
206
+ with open(f"{OutputDir}/{indexProblema}-{t}-{s}.dzn", "w+", encoding="utf-8") as new:
207
+ new.write(filedata)
208
+
209
+ with open(f"{OutputDir}/{indexProblema}", "wb") as new:
210
+ pickle.dump(new_object, new)
212
211
 
213
212
  def saveTaillardStandardFile(self, path):
214
- os.makedirs("/".join(path.split("/")[:-1]),exist_ok=True)
215
- with open(path, 'w+') as f:
213
+ with open(path, 'w') as f:
216
214
  # Escribir el encabezado con el número de trabajos y máquinas
217
215
  f.write(f"Number of jobs: {self.numJobs}\n")
218
216
  f.write(f"Number of machines: {self.numMchs}\n\n")
@@ -253,7 +251,9 @@ class JSP:
253
251
  return self
254
252
  new_object = copy.deepcopy(self)
255
253
  new_object.speed = len(speeds)
254
+
256
255
  new_object.ProcessingTime = new_object.ProcessingTime[:, :, speeds]
256
+
257
257
  new_object.EnergyConsumption = new_object.EnergyConsumption[:, :, speeds]
258
258
  new_object.generate_maxmin_objective_values()
259
259
  return new_object
@@ -444,33 +444,14 @@ class JSP:
444
444
  # jsp.fill_random_values(speed=3, rddd=2, distribution="uniform", seed=1234)
445
445
  # jsp.saveTaillardStandardFile("./output_taillard.txt")
446
446
 
447
- class Generator:
448
- def __init__( self,json = False, dzn = False, taillard = False, savepath="./"):
449
- self.json = json
450
- self.dzn = dzn
451
- self.taillard = taillard
452
- self.savepath = savepath
453
-
454
- def generate_new_instance(self, jobs = 10, machines = 4, ProcessingTime=[], EnergyConsumption=[], Orden=[],speed = 1, ReleaseDateDueDate = 0, distribution = "uniform" , seed = 0, tpm=[]):
455
- jsp = JSP(jobs=jobs, machines=machines,ProcessingTime=ProcessingTime,EnergyConsumption=EnergyConsumption,Orden=Orden)
456
- jsp.fill_random_values(speed = speed, rddd = ReleaseDateDueDate, distribution = distribution, seed = seed,tpm = tpm)
457
-
458
- if not (self.json or self.dzn or self.taillard): return jsp
459
-
460
- j = str(jobs)
461
- m = str(machines)
462
- jm_path = str(j)+"_"+str(m)+"/"
463
-
464
- i = seed
465
-
466
- if self.json:
467
- jsp.saveJsonFile(f"{self.savepath}/JSON/"+jm_path.split("/")[0]+f"_{j}x{m}_{i}.json")
468
- pass
469
- if self.dzn:
470
- pkl_path = f"{self.savepath}/"+jm_path.split("/")[0]+f"_{j}x{m}_{i}.pkl"
471
- jsp.savePythonFile(pkl_path)
472
- jsp.saveDznFile(pkl_path,f"{self.savepath}/DZN/"+jm_path)#f"{j}x{m}_{i}")
473
- os.remove(pkl_path)
474
- if self.taillard:
475
- jsp.saveTaillardStandardFile(f"{self.savepath}/TAILLARD/"+jm_path.split("/")[0]+f"_{j}x{m}_{i}.txt")
476
- return jsp
447
+ # class generador:
448
+ # def __init__( self,jobs, machines, ProcessingTime=[], EnergyConsumption=[], ReleaseDateDueDate=[], Orden=[],speed = 1, rddd = 0, distribution = "uniform" , seed = 1, tpm=[],json=False,dzn=False,taillard=False ,savepath = ""):
449
+ # jsp = JSP(jobs=jobs, machines=machines,ProcessingTime=ProcessingTime,EnergyConsumption=EnergyConsumption,ReleaseDateDueDate=ReleaseDateDueDate,Orden=Orden)
450
+ # jsp.fill_random_values(speed=speed, rddd=rddd, distribution=distribution, seed=seed,tpm=tpm)
451
+ # if json:
452
+ # jsp.saveJsonFile(f"{savepath}/"+jm_path+f"{j}x{m}_{i}.json")
453
+
454
+ # if dzn:
455
+ # jsp.saveTaillardStandardFile("./output_taillard.txt")
456
+ # if taillard:
457
+ # jsp.saveTaillardStandardFile("./output_taillard.txt")