IGJSP 0.0.10__py3-none-any.whl → 1.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,9 @@
1
+ %SETS
2
+ JOBS = 1..{jobs};
3
+ MACHINES = 1..{machines};
4
+ SPEED = {Speed};
5
+
6
+ time = array3d(JOBS,MACHINES,1..SPEED,{time});
7
+ energy = array3d(JOBS,MACHINES,1..SPEED,{energy});
8
+ precedence = array2d(JOBS,MACHINES,{precedence});
9
+ available_machines = array2d(JOBS,MACHINES,{available_machines});
@@ -0,0 +1,11 @@
1
+ %SETS
2
+ JOBS = 1..{jobs};
3
+ MACHINES = 1..{machines};
4
+ SPEED = {Speed};
5
+
6
+ time = array3d(JOBS,MACHINES,1..SPEED,{time});
7
+ energy = array3d(JOBS,MACHINES,1..SPEED,{energy});
8
+ precedence = array2d(JOBS,MACHINES,{precedence});
9
+ releaseDate = {releaseDate};
10
+ dueDate = {dueDate};
11
+ available_machines = array2d(JOBS,MACHINES,{available_machines});
@@ -0,0 +1,11 @@
1
+ %SETS
2
+ JOBS = 1..{jobs};
3
+ MACHINES = 1..{machines};
4
+ SPEED = {Speed};
5
+
6
+ time = array3d(JOBS,MACHINES,1..SPEED,{time});
7
+ energy = array3d(JOBS,MACHINES,1..SPEED,{energy});
8
+ precedence = array2d(JOBS,MACHINES,{precedence});
9
+ releaseDate = array2d(JOBS,MACHINES,{releaseDate});
10
+ dueDate = array2d(JOBS,MACHINES,{dueDate});
11
+ available_machines = array2d(JOBS,MACHINES,{available_machines});
@@ -0,0 +1,8 @@
1
+ %SETS
2
+ JOBS = 1..{jobs};
3
+ MACHINES = 1..{machines};
4
+ SPEED = {Speed};
5
+
6
+ time = array3d(JOBS,MACHINES,1..SPEED,{time});
7
+ energy = array3d(JOBS,MACHINES,1..SPEED,{energy});
8
+ precedence = array2d(JOBS,MACHINES,{precedence});
@@ -0,0 +1,10 @@
1
+ %SETS
2
+ JOBS = 1..{jobs};
3
+ MACHINES = 1..{machines};
4
+ SPEED = {Speed};
5
+
6
+ time = array3d(JOBS,MACHINES,1..SPEED,{time});
7
+ energy = array3d(JOBS,MACHINES,1..SPEED,{energy});
8
+ precedence = array2d(JOBS,MACHINES,{precedence});
9
+ releaseDate = {releaseDate};
10
+ dueDate = {dueDate};
@@ -0,0 +1,10 @@
1
+ %SETS
2
+ JOBS = 1..{jobs};
3
+ MACHINES = 1..{machines};
4
+ SPEED = {Speed};
5
+
6
+ time = array3d(JOBS,MACHINES,1..SPEED,{time});
7
+ energy = array3d(JOBS,MACHINES,1..SPEED,{energy});
8
+ precedence = array2d(JOBS,MACHINES,{precedence});
9
+ releaseDate = array2d(JOBS,MACHINES,{releaseDate});
10
+ dueDate = array2d(JOBS,MACHINES,{dueDate});
IGJSP/generador.py CHANGED
@@ -1,22 +1,20 @@
1
1
  import copy
2
+ import datetime
2
3
  import json
3
- import multiprocessing
4
4
  import os
5
5
  import pickle
6
- import random
7
- from concurrent.futures import ProcessPoolExecutor, as_completed
8
- from datetime import datetime
9
- from itertools import combinations, product
10
- from pathlib import Path
6
+ import re
7
+ from itertools import combinations
8
+
11
9
  import networkx as nx
12
10
  import numpy as np
13
- from scipy.stats import expon, norm, uniform
11
+
12
+ np.set_printoptions(linewidth=np.inf, threshold=np.inf, formatter={"int": lambda x: f"{x}"})
13
+
14
14
  from pprint import pprint
15
- import sys
16
15
 
17
- import importlib_resources
18
- from importlib.resources import read_text
19
- minizinc_files = importlib_resources.files("IGJSP")
16
+ from scipy.stats import expon, norm, uniform
17
+
20
18
 
21
19
  def f(x):
22
20
  return int(np.exp(-int(x)/100)*100)
@@ -27,6 +25,13 @@ def g(x):
27
25
  def t(c):
28
26
  return 4.0704 * np.log(2) / np.log(1 + (c* 2.5093)**3)
29
27
 
28
+
29
+ #################################################################################
30
+ # #
31
+ # JSP #
32
+ # #
33
+ #################################################################################
34
+
30
35
  class JSP:
31
36
  def __init__(self, jobs, machines, ProcessingTime=np.array([]), EnergyConsumption=np.array([]), ReleaseDateDueDate=np.array([]), Orden=np.array([])) -> None:
32
37
  self.numJobs = jobs
@@ -41,14 +46,14 @@ class JSP:
41
46
  np.random.seed(seed)
42
47
  self.rddd = rddd
43
48
  self.speed = speed
44
- if not tpm:
49
+ if not tpm or len(tpm) != self.numMchs:
45
50
  if distribution == "uniform":
46
51
  tpm = np.random.uniform(10, 100, self.numMchs)
47
52
  elif distribution == "normal":
48
53
  tpm = [max(10, data) for data in np.random.normal(50, 20, self.numMchs)]
49
54
  else:
50
55
  tpm = expon(loc=10, scale=20).rvs(self.numMchs)
51
-
56
+
52
57
  energyPer, timePer = self._particionate_speed_space(speed)
53
58
  self._generate_standar_operation_cost(distribution)
54
59
 
@@ -71,7 +76,6 @@ class JSP:
71
76
 
72
77
  self._jobToMachine(release_date_tasks, timePer, distribution)
73
78
  self.generate_maxmin_objective_values()
74
- self.vectorization()
75
79
 
76
80
  def _particionate_speed_space(self, speed):
77
81
  energyPer = np.linspace(0.5, 3, speed) if speed > 1 else [1]
@@ -121,6 +125,7 @@ class JSP:
121
125
  return expon(loc=duration, scale=duration/2).rvs()
122
126
 
123
127
  def savePythonFile(self, path):
128
+ os.makedirs(os.path.dirname(path), exist_ok=True)
124
129
  with open(path, 'wb') as f:
125
130
  pickle.dump(self, f)
126
131
 
@@ -139,19 +144,19 @@ class JSP:
139
144
  for job in range(self.numJobs):
140
145
  new = {
141
146
  "jobId": job,
142
- "operations": {}
147
+ "operations": {},
143
148
  }
149
+
144
150
  for machine in self.Orden[job]:
145
151
  machine = int(machine)
146
- new["operations"][machine] = {
147
- "speed-scaling": [
148
- {
149
- "procTime": int(proc),
150
- "energyCons": int(energy)
151
- }
152
- for proc, energy in zip(self.ProcessingTime[job, machine], self.EnergyConsumption[job, machine])
153
- ]
154
- }
152
+ new["operations"][machine] = {"speed-scaling" :
153
+ [
154
+ {"procTime" : int(proc),
155
+ "energyCons" : int(energy)
156
+ }
157
+ for proc, energy in zip(self.ProcessingTime[job, machine],self.EnergyConsumption[job, machine])
158
+ ]
159
+ }
155
160
  if self.rddd == 2:
156
161
  new["operations"][machine]["release-date"] = int(self.ReleaseDueDate[job][machine][0])
157
162
  new["operations"][machine]["due-date"] = int(self.ReleaseDueDate[job][machine][1])
@@ -164,8 +169,28 @@ class JSP:
164
169
  self.JSP["timeEnergy"].append(new)
165
170
 
166
171
  os.makedirs(os.path.dirname(path), exist_ok=True)
167
- with open(path, 'w+' ) as f:
168
- json.dump(self.JSP, f, indent=4)
172
+ with open(path, 'w+') as f:
173
+ # Generamos el JSON con indentación normal
174
+ json_str = json.dumps(self.JSP, indent=4)
175
+
176
+ # Compresión selectiva: solo arrays de números simples
177
+ def compress_simple_arrays(match):
178
+ # Comprimir si el array contiene solo números y comas
179
+ content = match.group(1)
180
+ if re.match(r'^(\s*\d+\s*,)*\s*\d+\s*$', content):
181
+ # Eliminar espacios y saltos de línea
182
+ return '[' + re.sub(r'\s+', '', content) + ']'
183
+ return match.group(0) # Mantener como está si no es simple
184
+
185
+ # Buscar arrays que puedan comprimirse
186
+ json_str = re.sub(
187
+ r'\[([\s\S]*?)\]',
188
+ compress_simple_arrays,
189
+ json_str,
190
+ flags=re.DOTALL
191
+ )
192
+
193
+ f.write(json_str)
169
194
 
170
195
  def saveDznFile(self, InputDir, OutputDir):
171
196
  indexProblema = OutputDir.split("/")[-2]
@@ -186,35 +211,30 @@ class JSP:
186
211
  "machines": data.numMchs,
187
212
  "jobs": data.numJobs,
188
213
  "Speed": s,
189
- "time": list(map(int,time.flatten())),
190
- "energy": list(map(int,energy.flatten()))
214
+ "time": str(time.flatten()).replace(" ", ", "),
215
+ "energy": str(energy.flatten()).replace(" ", ", ")
191
216
  }
217
+
192
218
  if t == 1:
193
- replace_data["releaseDate"] = [data.ReleaseDueDate[job, 0] for job in range(data.numJobs)]
194
- replace_data["dueDate"] = [data.ReleaseDueDate[job, 1] for job in range(data.numJobs)]
219
+ replace_data["releaseDate"] = str([int(data.ReleaseDueDate[job, 0]) for job in range(data.numJobs)]).replace(", ", " ")
220
+ replace_data["dueDate"] = str([int(data.ReleaseDueDate[job, 1]) for job in range(data.numJobs)]).replace(", ", " ")
195
221
  elif t == 2:
196
- replace_data["releaseDate"] = list(data.ReleaseDueDate[:, :, 0].flatten())
197
- replace_data["dueDate"] = list(data.ReleaseDueDate[:, :, 1].flatten())
222
+ replace_data["releaseDate"] = str(data.ReleaseDueDate[:, :, 0].flatten()).replace(", ", " ")
223
+ replace_data["dueDate"] = str(data.ReleaseDueDate[:, :, 1].flatten()).replace(", ", " ")
198
224
 
199
225
  for job in range(data.numJobs):
200
226
  for i, prioridad in enumerate(range(data.numMchs)):
201
227
  precedence[job, data.Orden[job, prioridad]] = i
202
- replace_data["precedence"] = list(map(int,precedence.flatten()))
203
228
 
204
- # new_object = data.change_rddd_type(t).select_speeds(list(range(s0, sf, sp)))
205
- with open(minizinc_files.joinpath("Minizinc/Types/RD", f"type{t}.dzn"), "r", encoding="utf-8") as file:
206
- filedata = file.read()
207
- # filedata = file
208
- for kk, v in replace_data.items():
209
- filedata = filedata.replace("{" + kk + "}", str(v))
229
+ replace_data["precedence"] = str(precedence.flatten()).replace(" ", ",")
210
230
 
231
+ with open(f"./Minizinc/Types/RD/JSP/type{t}.dzn", "r", encoding="utf-8") as file:
232
+ filedata = file.read()
233
+ for k, v in replace_data.items():
234
+ filedata = filedata.replace("{" + k + "}", str(v))
211
235
  os.makedirs(f"{OutputDir}/", exist_ok=True)
212
-
213
236
  with open(f"{OutputDir}/{indexProblema}-{t}-{s}.dzn", "w+", encoding="utf-8") as new:
214
237
  new.write(filedata)
215
- # print(f"{OutputDir}/{indexProblema}")
216
- # with open(f"{OutputDir}/{indexProblema}", "wb") as new:
217
- # pickle.dump(new_object, new)
218
238
 
219
239
  def saveTaillardStandardFile(self, path):
220
240
  os.makedirs("/".join(path.split("/")[:-1]),exist_ok=True)
@@ -253,7 +273,6 @@ class JSP:
253
273
  f.write(f"{machine} ")
254
274
  f.write("\n")
255
275
 
256
-
257
276
  def select_speeds(self, speeds):
258
277
  if self.speed == len(speeds):
259
278
  return self
@@ -283,16 +302,16 @@ class JSP:
283
302
  return new_object
284
303
 
285
304
  def generate_maxmin_objective_values(self):
286
- self.max_makespan = sum([max(self.ProcessingTime[job, machine, :]) for job in range(self.numJobs) for machine in range(self.numMchs)])
305
+ max_makespan = sum([max(self.ProcessingTime[job, machine, :]) for job in range(self.numJobs) for machine in range(self.numMchs)])
287
306
  self.min_makespan = max([sum([min(self.ProcessingTime[job, machine, :]) for machine in range(self.numMchs)]) for job in range(self.numJobs)])
288
- self.max_min_makespan = self.max_makespan - self.min_makespan
289
- self.max_energy = sum([max(self.EnergyConsumption[job, machine, :]) for job in range(self.numJobs) for machine in range(self.numMchs)])
307
+ self.max_min_makespan = max_makespan - self.min_makespan
308
+ max_energy = sum([max(self.EnergyConsumption[job, machine, :]) for job in range(self.numJobs) for machine in range(self.numMchs)])
290
309
  self.min_energy = sum([min(self.EnergyConsumption[job, machine, :]) for job in range(self.numJobs) for machine in range(self.numMchs)])
291
- self.max_min_energy = self.max_energy - self.min_energy
310
+ self.max_min_energy = max_energy - self.min_energy
292
311
  if self.rddd == 1:
293
- self.max_tardiness = sum([max(0, self.max_makespan - self.ReleaseDueDate[job, 1]) for job in range(self.numJobs)])
312
+ self.max_tardiness = sum([max(0, max_makespan - self.ReleaseDueDate[job, 1]) for job in range(self.numJobs)])
294
313
  elif self.rddd == 2:
295
- self.max_tardiness = np.sum([max(0, np.int64(self.max_makespan - self.ReleaseDueDate[job, machine, 1])) for job in range(self.numJobs) for machine in range(self.numMchs)])
314
+ self.max_tardiness = np.sum([max(0, np.int64(max_makespan - self.ReleaseDueDate[job, machine, 1])) for job in range(self.numJobs) for machine in range(self.numMchs)])
296
315
 
297
316
  def norm_makespan(self, makespan):
298
317
  return (makespan - self.min_makespan) / self.max_min_makespan
@@ -394,86 +413,7 @@ class JSP:
394
413
 
395
414
  def generate_schedule_image(self, schedule):
396
415
  pass
397
-
398
- def vectorization(self):
399
- vectorization = {}
400
- # Caracteristicas básicas
401
- vectorization["jobs"] = self.numJobs
402
- vectorization["machines"] = self.numMchs
403
- vectorization["rddd"] = self.rddd
404
- vectorization["speed"] = self.speed
405
- vectorization["max_makespan"] = self.max_makespan
406
- vectorization["min_makespan"] = self.min_makespan
407
- vectorization["max_sum_energy"] = self.max_energy
408
- vectorization["min_sum_energy"] = self.min_energy
409
- vectorization["max_tardiness"] = self.max_tardiness if self.rddd != 0 else 0
410
- vectorization["min_window"] = 0
411
- vectorization["max_window"] = 0
412
- vectorization["mean_window"] = 0
413
- vectorization["overlap"] = 0
414
-
415
- # Caracteristicas complejas
416
- if self.rddd == 0:
417
- vectorization["min_window"] = -1
418
- vectorization["max_window"] = -1
419
- vectorization["mean_window"] = -1
420
- vectorization["overlap"] = -1
421
- else:
422
- if self.rddd == 1:
423
- # Ventana de cada trabajo
424
- for job in range(self.numJobs):
425
- tproc_min = np.sum(np.min(self.ProcessingTime[job,machine,:]) for machine in range(self.numMchs))
426
- tproc_max = np.sum(np.max(self.ProcessingTime[job,machine,:]) for machine in range(self.numMchs))
427
- tproc_mean = np.sum(np.mean(self.ProcessingTime[job,machine,:]) for machine in range(self.numMchs))
428
- window = self.ReleaseDueDate[job,1] - self.ReleaseDueDate[job,0]
429
- vectorization["min_window"] += window / tproc_max
430
- vectorization["max_window"] += window / tproc_min
431
- vectorization["mean_window"] += window / tproc_mean
432
- vectorization["min_window"] = vectorization["min_window"] / self.numJobs
433
- vectorization["max_window"] = vectorization["max_window"] / self.numJobs
434
- vectorization["mean_window"] = vectorization["mean_window"] / self.numJobs
435
- # Overlap entre trabajos
436
- for job in range(self.numJobs):
437
- for job2 in range(job + 1, self.numJobs):
438
- diff = min(self.ReleaseDueDate[job,1],self.ReleaseDueDate[job2,1])-max(self.ReleaseDueDate[job,0], self.ReleaseDueDate[job2,0])
439
- if diff > 0:
440
- vectorization["overlap"] += diff / (self.ReleaseDueDate[job,1] - self.ReleaseDueDate[job,0])
441
- vectorization["overlap"] += diff / (self.ReleaseDueDate[job2,1] - self.ReleaseDueDate[job2,0])
442
- vectorization["overlap"] = vectorization["overlap"] / (self.numJobs * (self.numJobs - 1))
443
- else:
444
- # Ventana de cada operacion
445
- for job in range(self.numJobs):
446
- for machine in range(self.numMchs):
447
- tproc_min = np.min(self.ProcessingTime[job,machine,:])
448
- tproc_max = np.max(self.ProcessingTime[job,machine,:])
449
- tproc_mean = np.mean(self.ProcessingTime[job,machine,:])
450
- window = self.ReleaseDueDate[job,machine,1] - self.ReleaseDueDate[job,machine,0]
451
- vectorization["min_window"] += window / tproc_max
452
- vectorization["max_window"] += window / tproc_min
453
- vectorization["mean_window"] += window / tproc_mean
454
- vectorization["min_window"] = vectorization["min_window"] / (self.numJobs * self.numMchs)
455
- vectorization["max_window"] = vectorization["max_window"] / (self.numJobs * self.numMchs)
456
- vectorization["mean_window"] = vectorization["mean_window"] / (self.numJobs * self.numMchs)
457
- # Overlap entre operaciones
458
- for job1 in range(self.numJobs):
459
- for machine1 in range(self.numMchs):
460
- for job2 in range(job1 + 1, self.numJobs):
461
- diff = min(self.ReleaseDueDate[job1,machine1,1],self.ReleaseDueDate[job2,machine1,1])-max(self.ReleaseDueDate[job1,machine1,0], self.ReleaseDueDate[job2,machine1,0])
462
- if diff > 0:
463
- vectorization["overlap"] += diff / (self.ReleaseDueDate[job1,machine1,1] - self.ReleaseDueDate[job1,machine1,0])
464
- vectorization["overlap"] += diff / (self.ReleaseDueDate[job2,machine1,1] - self.ReleaseDueDate[job2,machine1,0])
465
- vectorization["overlap"] = vectorization["overlap"] / (self.numJobs * (self.numJobs - 1) * self.numMchs)
466
- # Estadísticos de los datos
467
- vectorization["max_processing_time_value"] = np.max(self.ProcessingTime)
468
- vectorization["min_processing_time_value"] = np.min(self.ProcessingTime)
469
- vectorization["mean_processing_time_value"] = np.mean(self.ProcessingTime)
470
-
471
- vectorization["max_energy_value"] = np.max(self.ProcessingTime)
472
- vectorization["min_energy_value"] = np.min(self.ProcessingTime)
473
- vectorization["mean_energy_value"] = np.mean(self.ProcessingTime)
474
- self.features = vectorization
475
- return vectorization
476
-
416
+
477
417
  def disjuntive_graph(self):
478
418
  vertex = list(range(self.numJobs * self.numMchs + 2))
479
419
  A = {v: [] for v in vertex}
@@ -524,36 +464,258 @@ class JSP:
524
464
  if operation > 0 and operation < self.numMchs - 1:
525
465
  graph.add_edge((job * self.numMchs + operation - 1, job * self.numMchs + operation))
526
466
 
527
- # if __name__ == "__main__":
528
- # jsp = JSP(jobs=5, machines=5)
529
- # jsp.fill_random_values(speed=3, rddd=2, distribution="uniform", seed=1234)
530
- # jsp.saveTaillardStandardFile("./output_taillard.txt")
467
+
468
+
469
+ #################################################################################
470
+ # #
471
+ # FJSP #
472
+ # #
473
+ #################################################################################
474
+
475
+
476
+ class FJSP(JSP):
477
+ def __init__(self, jobs, machines, ProcessingTime=np.array([]), EnergyConsumption=np.array([]), ReleaseDateDueDate=np.array([]), Orden=np.array([]), AvailableMachines = np.array([])) -> None:
478
+
479
+ super().__init__(jobs, machines, ProcessingTime, EnergyConsumption, ReleaseDateDueDate, Orden)
480
+ self.available_machines = AvailableMachines
481
+
482
+ def fill_random_values(self, speed, rddd, distribution, seed, tpm=[]):
483
+
484
+ super().fill_random_values(speed, rddd, distribution, seed, tpm)
485
+
486
+ self.available_machines = np.random.choice([0, 1], size=(self.numJobs, self.numMchs))
487
+
488
+ # Asegurar al menos un 1 en cada columna
489
+ for job in range(self.numJobs):
490
+ if np.sum(self.available_machines[job, :]) == 0:
491
+ columna_aleatoria = np.random.randint(0, self.numMchs)
492
+ self.available_machines[job, columna_aleatoria] = 1
493
+
494
+ # def savePythonFile(self, path):
495
+ # with open(path, 'wb') as f:
496
+ # pickle.dump(self, f)
497
+
498
+ def saveJsonFile(self, path):
499
+ self.JSP = {
500
+ "nbJobs": list(range(self.numJobs)),
501
+ "nbMchs": list(range(self.numMchs)),
502
+ "speed": self.speed,
503
+ "timeEnergy": [],
504
+ "minMakespan": int(self.min_makespan),
505
+ "minEnergy": int(self.min_energy),
506
+ "maxMinMakespan": int(self.max_min_makespan),
507
+ "maxMinEnergy": int(self.max_min_energy)
508
+ }
509
+
510
+ for job in range(self.numJobs):
511
+ new = {
512
+ "jobId": job,
513
+ "operations": {},
514
+ "available_machines": self.available_machines[job, : ].tolist()
515
+ }
516
+
517
+ #new["available_machines_prueba"] = self.available_machines[job, self.Orden[job] ].tolist() #for i in self.Orden[job]]
518
+
519
+ for machine in self.Orden[job]:
520
+ machine = int(machine)
521
+ new["operations"][machine] = {"speed-scaling" :
522
+ [
523
+ {"procTime" : int(proc),
524
+ "energyCons" : int(energy)
525
+ }
526
+ for proc, energy in zip(self.ProcessingTime[job, machine],self.EnergyConsumption[job, machine])
527
+ ]
528
+ }
529
+ if self.rddd == 2:
530
+ new["operations"][machine]["release-date"] = int(self.ReleaseDueDate[job][machine][0])
531
+ new["operations"][machine]["due-date"] = int(self.ReleaseDueDate[job][machine][1])
532
+ if self.rddd == 1:
533
+ new["release-date"] = int(self.ReleaseDueDate[job][0])
534
+ new["due-date"] = int(self.ReleaseDueDate[job][1])
535
+ if self.rddd == 2:
536
+ new["release-date"] = int(min(self.ReleaseDueDate[job, :, 0]))
537
+ new["due-date"] = int(max(self.ReleaseDueDate[job, :, 1]))
538
+ self.JSP["timeEnergy"].append(new)
539
+
540
+ os.makedirs(os.path.dirname(path), exist_ok=True)
541
+ with open(path, 'w+') as f:
542
+ # Generamos el JSON con indentación normal
543
+ json_str = json.dumps(self.JSP, indent=4)
544
+
545
+ # Compresión selectiva: solo arrays de números simples
546
+ def compress_simple_arrays(match):
547
+ # Comprimir si el array contiene solo números y comas
548
+ content = match.group(1)
549
+ if re.match(r'^(\s*\d+\s*,)*\s*\d+\s*$', content):
550
+ # Eliminar espacios y saltos de línea
551
+ return '[' + re.sub(r'\s+', '', content) + ']'
552
+ return match.group(0) # Mantener como está si no es simple
553
+
554
+ # Buscar arrays que puedan comprimirse
555
+ json_str = re.sub(
556
+ r'\[([\s\S]*?)\]',
557
+ compress_simple_arrays,
558
+ json_str,
559
+ flags=re.DOTALL
560
+ )
561
+
562
+ f.write(json_str)
563
+
564
+ def saveDznFile(self, InputDir, OutputDir):
565
+ indexProblema = OutputDir.split("/")[-2]
566
+ OutputDir = "/".join(OutputDir.split("/")[:-2])
567
+ # indexProblema = os.path.basename(os.path.normpath(OutputDir))
568
+ with open(f"{InputDir}", 'rb') as f:
569
+ data: FJSP = pickle.load(f)
570
+ # print(self.speed)
571
+ # for t in [0, 1, 2]:
572
+ t = data.rddd
573
+ for s in range(1,self.speed+1):
574
+ s0, sf, sp = [0,s,1]
575
+ time = data.ProcessingTime[:, :, s0:sf:sp]
576
+ energy = data.EnergyConsumption[:, :, s0:sf:sp]
577
+ precedence = np.full((data.numJobs, data.numMchs), 0)
578
+
579
+ replace_data = {
580
+ "machines": data.numMchs,
581
+ "jobs": data.numJobs,
582
+ "Speed": s,
583
+ "time": str(time.flatten()).replace(" ", ", "),
584
+ "energy": str(energy.flatten()).replace(" ", ", ")
585
+ }
586
+ if t == 1:
587
+ replace_data["releaseDate"] = str([int(data.ReleaseDueDate[job, 0]) for job in range(data.numJobs)]).replace(" ", ",")
588
+ replace_data["dueDate"] = str([int(data.ReleaseDueDate[job, 1]) for job in range(data.numJobs)]).replace(" ", ",")
589
+ elif t == 2:
590
+ replace_data["releaseDate"] = str(data.ReleaseDueDate[:, :, 0].flatten()).replace(" ", ",")
591
+ replace_data["dueDate"] = str(data.ReleaseDueDate[:, :, 1].flatten()).replace(" ", ",")
592
+
593
+ for job in range(data.numJobs):
594
+ for i, prioridad in enumerate(range(data.numMchs)):
595
+ precedence[job, data.Orden[job, prioridad]] = i
596
+ replace_data["precedence"] = str(precedence.flatten()).replace(" ", ",")
597
+
598
+ replace_data["available_machines"] = str(data.available_machines.flatten()).replace(" ", ", ")
599
+
600
+ with open(f"./Minizinc/Types/RD/FJSP/type{t}.dzn", "r", encoding="utf-8") as file:
601
+ filedata = file.read()
602
+ for kk, v in replace_data.items():
603
+ filedata = filedata.replace("{" + kk + "}", str(v))
604
+
605
+ os.makedirs(f"{OutputDir}/", exist_ok=True)
606
+
607
+ with open(f"{OutputDir}/{indexProblema}-{t}-{s}.dzn", "w+", encoding="utf-8") as new:
608
+ new.write(filedata)
609
+ # print(f"{OutputDir}/{indexProblema}")
610
+ # with open(f"{OutputDir}/{indexProblema}", "wb") as new:
611
+ # pickle.dump(new_object, new)
612
+
613
+ def saveTaillardStandardFile(self, path):
614
+ os.makedirs("/".join(path.split("/")[:-1]),exist_ok=True)
615
+ with open(path, 'w+') as f:
616
+ # Escribir el encabezado con el número de trabajos y máquinas
617
+ f.write(f"Number of jobs: {self.numJobs}\n")
618
+ f.write(f"Number of machines: {self.numMchs}\n\n")
619
+
620
+ # Escribir la matriz de tiempos de procesamiento
621
+ f.write("Processing times:\n")
622
+ for job in range(self.numJobs):
623
+ # Almacenar todos los tiempos de esta fila
624
+ tiempos = []
625
+ for machine_index in range(self.numMchs):
626
+ machine = self.Orden[job, machine_index]
627
+ processing_time = self.ProcessingTime[job, machine, 0]
628
+ tiempos.append(str(processing_time))
629
+
630
+ # Unir los tiempos con comas y escribirlos
631
+ linea = ", ".join(tiempos)
632
+ f.write(linea + "\n")
633
+
634
+ f.write("\n")
635
+
636
+ # Escribir la matriz de consumo de energía
637
+ f.write("Energy consumption:\n")
638
+ for job in range(self.numJobs):
639
+ consumos = []
640
+ for machine_index in range(self.numMchs):
641
+ machine = self.Orden[job, machine_index]
642
+ energy_consumption = self.EnergyConsumption[job, machine, 0]
643
+ consumos.append(str(energy_consumption))
644
+ f.write(", ".join(consumos) + "\n")
645
+
646
+ f.write("\n")
647
+
648
+ # Escribir el orden de las máquinas para cada trabajo
649
+ f.write("Machine order:\n")
650
+ for job in range(self.numJobs):
651
+ maquinas = []
652
+ for machine_index in range(self.numMchs):
653
+ machine = self.Orden[job, machine_index]
654
+ maquinas.append(str(machine))
655
+ f.write(", ".join(maquinas) + "\n")
656
+
657
+ f.write("\n")
658
+
659
+ f.write("Available machines:\n")
660
+ for job in range(self.numJobs):
661
+ disponibles = []
662
+ for machine_index in range(self.numMchs):
663
+ disponibles.append(str(self.available_machines[job, machine_index]))
664
+ f.write(", ".join(disponibles) + "\n")
665
+
666
+
531
667
 
532
668
  class Generator:
533
- def __init__( self,json = False, dzn = False, taillard = False, savepath="./"):
669
+ def __init__(self, json = False, dzn = False, taillard = False, savepath = "./", single_folder_output = False):
534
670
  self.json = json
535
671
  self.dzn = dzn
536
672
  self.taillard = taillard
537
673
  self.savepath = savepath
674
+ self.single_folder_output = single_folder_output
538
675
 
539
- def generate_new_instance(self, jobs = 10, machines = 4,speed = 1, ReleaseDateDueDate = 0, distribution = "uniform" , seed = 0, tpm=[]):
540
- jsp = JSP(jobs=jobs, machines=machines)
541
- jsp.fill_random_values(speed = speed, rddd = ReleaseDateDueDate, distribution = distribution, seed = seed,tpm = tpm)
542
- if not (self.json or self.dzn or self.taillard): return jsp
543
-
544
- j = str(jobs)
545
- m = str(machines)
546
- jm_path = str(j)+"_"+str(m)+"/"
547
-
548
- i = seed
549
-
550
- if self.json:
551
- jsp.saveJsonFile(f"{self.savepath}/JSON/"+jm_path.split("/")[0]+f"_{j}x{m}_{i}.json")
552
- if self.dzn:
553
- pkl_path = f"{self.savepath}/"+jm_path.split("/")[0]+f"_{j}x{m}_{i}.pkl"
554
- jsp.savePythonFile(pkl_path)
555
- jsp.saveDznFile(pkl_path,f"{self.savepath}/DZN/"+jm_path)#f"{j}x{m}_{i}")
556
- os.remove(pkl_path)
557
- if self.taillard:
558
- jsp.saveTaillardStandardFile(f"{self.savepath}/TAILLARD/"+jm_path.split("/")[0]+f"_{j}x{m}_{i}.txt")
559
- return jsp
676
+ def generate_new_instance(self, jobs = 10, machines = 4, speed = 1, ReleaseDateDueDate = 0, distribution = "uniform" , seed = 0, tpm=[], instance_type = "JSP", size = 1):
677
+
678
+ match instance_type:
679
+ case "JSP":
680
+ jsp_instance = JSP(jobs = jobs, machines = machines)
681
+ case "FJSP":
682
+ jsp_instance = FJSP(jobs = jobs, machines = machines)
683
+
684
+ for index in range(1, size + 1):
685
+
686
+ jsp_instance.fill_random_values(speed = speed, rddd = ReleaseDateDueDate, distribution = distribution, seed = seed,tpm = tpm)
687
+
688
+ # Determinar el nombre de salida basado en `outputName` y los parámetros actuales
689
+ problem_path = self.savepath.format(size = size, jobs =jobs, machines = machines, release_due_date = ReleaseDateDueDate,
690
+ speed_scaling = speed, distribution = distribution, seed=seed)
691
+
692
+ if not (self.json or self.dzn or self.taillard): return jsp_instance
693
+
694
+ j = str(jobs)
695
+ m = str(machines)
696
+ jm_path = str(j)+"_"+str(m)+"/"
697
+
698
+ i = seed
699
+
700
+ if self.single_folder_output:
701
+ if self.json:
702
+ jsp_instance.saveJsonFile(problem_path + jm_path.split("/")[0] + f"_{j}x{m}_{i}.json")
703
+ if self.dzn:
704
+ pkl_path = f"{problem_path}/" + jm_path.split("/")[0] + f"_{j}x{m}_{i}.pkl"
705
+ jsp_instance.savePythonFile(pkl_path)
706
+ jsp_instance.saveDznFile(pkl_path, problem_path + jm_path)#f"{j}x{m}_{i}")
707
+ os.remove(pkl_path)
708
+ if self.taillard:
709
+ jsp_instance.saveTaillardStandardFile(problem_path + jm_path.split("/")[0] + f"_{j}x{m}_{i}.txt")
710
+ else:
711
+ if self.json:
712
+ jsp_instance.saveJsonFile(f"{problem_path}/JSON/" + jm_path.split("/")[0] + f"_{j}x{m}_{i}.json")
713
+ if self.dzn:
714
+ pkl_path = f"{problem_path}/" + jm_path.split("/")[0] + f"_{j}x{m}_{i}.pkl"
715
+ jsp_instance.savePythonFile(pkl_path)
716
+ jsp_instance.saveDznFile(pkl_path,f"{problem_path}/DZN/" + jm_path)#f"{j}x{m}_{i}")
717
+ os.remove(pkl_path)
718
+ if self.taillard:
719
+ jsp_instance.saveTaillardStandardFile(f"{problem_path}/TAILLARD/" + jm_path.split("/")[0] + f"_{j}x{m}_{i}.txt")
720
+
721
+ return jsp_instance
IGJSP/main.py CHANGED
@@ -18,10 +18,7 @@ try:
18
18
  parser.add_argument('-M','--machines', type=json.loads, default='[4]')
19
19
  # Semilla
20
20
  parser.add_argument('-s','--seeds', type= json.loads, default='[0]')
21
- # Tipo de problema
22
- # 1 - JSP, 2 - FlowJSP, 3 - FlexibleJSP, 4 - OpenJSP
23
- # parser.add_argument('-T','--type', type=int, default=1)
24
- # Niveles de dificultad
21
+
25
22
  # Speed Scaling
26
23
  parser.add_argument('-S', '--speed-scaling', type=int, default=1)
27
24
 
@@ -29,7 +26,7 @@ try:
29
26
  # 0 -> Tiempo infinito
30
27
  # 1 -> Tiempo por trabajo
31
28
  # 2 -> Tiempo por tarea de cada trabajo
32
- parser.add_argument('-RDDD', '--release-due', type=int, default=0)
29
+ parser.add_argument('-RDDD', '--release-due', type=int, default=0, choices=[0, 1, 2])
33
30
  # Time
34
31
  # parser.add_argument('-Ti', '--time', type=int, default=0)
35
32
  # Path
@@ -48,7 +45,16 @@ try:
48
45
  # Taillard save
49
46
  parser.add_argument('-t','--taillard', type=bool, default=False)
50
47
 
48
+ #Instance Type (JSP o FJSP)
49
+ parser.add_argument('-T', '--type', type=int, default=1, choices=[1, 2])
50
+
51
51
  args = parser.parse_args()
52
+
53
+ type_dict = {
54
+ 1: "JSP",
55
+ 2: "FJSP"
56
+ }
57
+
52
58
  np.random.seed(args.seeds)
53
59
 
54
60
  start = time.time()
@@ -64,7 +70,8 @@ try:
64
70
  args.seeds = args.seeds+ list(p_s[:args.quantity-len(args.seeds)])
65
71
  for i in trange(args.quantity,desc='Quantity',leave=False):
66
72
  generator.savepath = args.path+"/instancesGenerated_"+str(i)
67
- generator.generate_new_instance(jobs=j, machines=m,ReleaseDateDueDate =np.array(args.release_due),speed = args.speed_scaling,distribution = args.distribution, seed = args.seeds[i])
73
+ generator.generate_new_instance(jobs=j, machines=m, ReleaseDateDueDate=np.array(args.release_due), speed = args.speed_scaling,
74
+ distribution=args.distribution, seed=args.seeds[i], instance_type=type_dict[args.type])
68
75
 
69
76
  except Exception as e:
70
77
  raise
@@ -0,0 +1,304 @@
1
+ Metadata-Version: 2.4
2
+ Name: IGJSP
3
+ Version: 1.0.1
4
+ Summary: Instance generator for JSP
5
+ Project-URL: Homepage, https://gps.blogs.upv.es/
6
+ Author-email: GPS-UPV <gps@dsic.upv.es>
7
+ License-File: LICENSE
8
+ Classifier: License :: OSI Approved :: MIT License
9
+ Classifier: Operating System :: OS Independent
10
+ Classifier: Programming Language :: Python :: 3
11
+ Requires-Python: >=3.9
12
+ Requires-Dist: numpy
13
+ Requires-Dist: scipy
14
+ Requires-Dist: tqdm
15
+ Description-Content-Type: text/markdown
16
+
17
+ # Instance Generator for JSP & FJSP (Energy‑aware)
18
+
19
+ ## Description
20
+
21
+ Instance generator for the **Job Shop Scheduling Problem (JSP)** and the **Flexible Job Shop Scheduling Problem (FJSP)** with speed‑scaling and optional release/due dates. The generator produces instances in **JSON**, **MiniZinc DZN**, and **Taillard-like text** formats, and it is designed for reproducible experiments via a random seed.
22
+
23
+ ### Key features
24
+
25
+ - Supports **JSP** and **FJSP** (`instance_type="JSP"` or `instance_type="FJSP"`).
26
+ - **Energy‑aware speed scaling**: each operation can be executed at one of several speeds; processing time and energy consumption are linked per speed.
27
+ - **Release/Due date modes** (`ReleaseDateDueDate`): `0` (none), `1` (per job), `2` (per operation).
28
+ - Multiple output formats: **JSON**, **DZN** (MiniZinc templates), **Taillard-style** text.
29
+ - **Distributions** for data generation: `uniform`, `normal`, `exponential`.
30
+ - Reproducibility via **seed**.
31
+ - **FJSP** adds a per‑job binary vector of **available machines**.
32
+
33
+ > **About value ranges & scaling**
34
+ >
35
+ > - With the **uniform** distribution, base operation costs are sampled within **[10, 100]**.
36
+ > - Initial *release times* are sampled from **[0, 100]** (in steps of 10) and then normalized to start at 0.
37
+ > - **Energy consumption** values are normalized into **[1, 100]** by construction.
38
+ > - **Processing times** are derived from base costs and speed scaling and **are not capped** at 100 (they can exceed 100), especially with `normal`/`exponential` draws.
39
+ >
40
+ > As a result, energy values and the initial release‑date seeds are within 0–100; if you need strict 0–100 ranges for *all* fields, set an external rescaling on the produced arrays or constrain generation to `distribution="uniform"` and adjust your post‑processing accordingly.
41
+
42
+ ---
43
+
44
+ ## Python API
45
+
46
+ ### Generator initialization
47
+
48
+ ```python
49
+ from IGJSP.generador import Generator
50
+
51
+ gen = Generator(
52
+ json=False, # write JSON files
53
+ dzn=False, # write DZN files (MiniZinc)
54
+ taillard=False, # write Taillard-like txt
55
+ savepath="./output", # base output directory/template
56
+ single_folder_output=False # put artifacts in a single folder
57
+ )
58
+ ```
59
+
60
+ ### Instance creation
61
+
62
+ ```python
63
+ obj = gen.generate_new_instance(
64
+ jobs=10, machines=4,
65
+ speed=1, # number of speed levels
66
+ ReleaseDateDueDate=0, # 0 (none), 1 (per job), 2 (per operation)
67
+ distribution="uniform", # 'uniform' | 'normal' | 'exponential'
68
+ seed=1,
69
+ tpm=[], # optional per-machine time scale
70
+ instance_type="JSP", # 'JSP' (default) or 'FJSP'
71
+ size=1 # how many instances to emit (looped)
72
+ )
73
+ ```
74
+
75
+ If all three output flags (`json`, `dzn`, `taillard`) are `False`, the function returns the in‑memory instance object (`JSP` or `FJSP`). Otherwise, it writes files under `savepath` and returns the last instance created.
76
+
77
+ ---
78
+
79
+ ## Generating a JSP problem instance
80
+
81
+ To generate an instance of the problem, use the `Generator` class (module `Generador`). Initialize it and then call `generate_new_instance` with the parameters below.
82
+
83
+ ### Parameters (generation)
84
+
85
+ 1. **Jobs:** `jobs` — number of jobs. Default: `10`
86
+ 2. **Machines:** `machines` — number of machines. Default: `4`
87
+ 3. **Release and Due Date:** `ReleaseDateDueDate`
88
+ - `0`: neither jobs nor operations have release/due times (default)
89
+ - `1`: each job has a release and due date
90
+ - `2`: each operation has a release and due date
91
+ 4. **Speeds:** `speed` — number of speed levels. Default: `1`
92
+ 5. **Distribution:** `distribution` — `uniform`, `normal`, or `exponential`. Default: `normal`
93
+ 6. **Seed:** `seed` — random seed for reproducibility. Default: `1`
94
+
95
+ ### Parameters (output)
96
+
97
+ - **JSON:** `json` (bool) — write JSON file(s). Default: `False`
98
+ - **DZN:** `dzn` (bool) — write MiniZinc DZN file(s). Default: `False`
99
+ - **Taillard:** `taillard` (bool) — write Taillard-like text file. Default: `False`
100
+ - **Save Path:** `savepath` (str) — base path/template for outputs. Default: `./output`
101
+ - **Single folder:** `single_folder_output` (bool) — whether to write all artifacts into a single folder. Default: `False`
102
+
103
+ ### Example (JSP)
104
+
105
+ ```python
106
+ from IGJSP.generador import Generator
107
+ generator = Generator(json=True, savepath="output")
108
+ generator.generate_new_instance(
109
+ jobs=4, machines=2,
110
+ ReleaseDateDueDate=2,
111
+ distribution="exponential",
112
+ seed=53
113
+ )
114
+ ```
115
+
116
+ ### Example of JSON generated (JSP)
117
+
118
+ ```json
119
+ {
120
+ "nbJobs": [
121
+ 0,
122
+ 1
123
+ ],
124
+ "nbMchs": [
125
+ 0,
126
+ 1,
127
+ 2,
128
+ 3
129
+ ],
130
+ "speed": 1,
131
+ "timeEnergy": [
132
+ {
133
+ "jobId": 0,
134
+ "operations": {
135
+ "0": {
136
+ "speed-scaling": [
137
+ {
138
+ "procTime": 8,
139
+ "energyCons": 92
140
+ }
141
+ ],
142
+ "release-date": 30,
143
+ "due-date": 41
144
+ },
145
+ "2": {
146
+ "speed-scaling": [
147
+ {
148
+ "procTime": 17,
149
+ "energyCons": 84
150
+ }
151
+ ],
152
+ "release-date": 41,
153
+ "due-date": 77
154
+ },
155
+ "3": {
156
+ "speed-scaling": [
157
+ {
158
+ "procTime": 3,
159
+ "energyCons": 97
160
+ }
161
+ ],
162
+ "release-date": 77,
163
+ "due-date": 80
164
+ },
165
+ "1": {
166
+ "speed-scaling": [
167
+ {
168
+ "procTime": 7,
169
+ "energyCons": 93
170
+ }
171
+ ],
172
+ "release-date": 80,
173
+ "due-date": 88
174
+ }
175
+ },
176
+ "release-date": 30,
177
+ "due-date": 88
178
+ },
179
+ {
180
+ "jobId": 1,
181
+ "operations": {
182
+ "1": {
183
+ "speed-scaling": [
184
+ {
185
+ "procTime": 4,
186
+ "energyCons": 96
187
+ }
188
+ ],
189
+ "release-date": 0,
190
+ "due-date": 5
191
+ },
192
+ "3": {
193
+ "speed-scaling": [
194
+ {
195
+ "procTime": 3,
196
+ "energyCons": 97
197
+ }
198
+ ],
199
+ "release-date": 5,
200
+ "due-date": 9
201
+ },
202
+ "2": {
203
+ "speed-scaling": [
204
+ {
205
+ "procTime": 1,
206
+ "energyCons": 99
207
+ }
208
+ ],
209
+ "release-date": 9,
210
+ "due-date": 10
211
+ },
212
+ "0": {
213
+ "speed-scaling": [
214
+ {
215
+ "procTime": 6,
216
+ "energyCons": 94
217
+ }
218
+ ],
219
+ "release-date": 10,
220
+ "due-date": 17
221
+ }
222
+ },
223
+ "release-date": 0,
224
+ "due-date": 17
225
+ }
226
+ ],
227
+ "minMakespan": 35,
228
+ "minEnergy": 752,
229
+ "maxMinMakespan": 14,
230
+ "maxMinEnergy": 0
231
+ }
232
+ ```
233
+
234
+ ---
235
+
236
+ ## Generating an FJSP problem instance
237
+
238
+ Set `instance_type="FJSP"` to enable flexible routing. In the JSON output, each job includes an `available_machines` binary vector of length `nbMchs`, indicating where the job's operations can be processed (`1` = available, `0` = not available).
239
+
240
+ ### Example (FJSP)
241
+
242
+ ```python
243
+ from IGJSP.generador import Generator
244
+ generator = Generator(json=True, savepath="output")
245
+ generator.generate_new_instance(
246
+ jobs=3, machines=3,
247
+ speed=1,
248
+ ReleaseDateDueDate=0,
249
+ distribution="uniform",
250
+ seed=7,
251
+ instance_type="FJSP"
252
+ )
253
+ ```
254
+
255
+ ### Example of JSON generated (FJSP)
256
+
257
+ Abridged example to illustrate the additional `available_machines` field:
258
+
259
+ ```json
260
+ {
261
+ "nbJobs": [0,1,2],
262
+ "nbMchs": [0,1,2],
263
+ "speed": 1,
264
+ "timeEnergy": [
265
+ {
266
+ "jobId": 0,
267
+ "available_machines": [1,0,1],
268
+ "operations": {
269
+ "0": { "speed-scaling": [ { "procTime": 12, "energyCons": 90 } ] },
270
+ "2": { "speed-scaling": [ { "procTime": 18, "energyCons": 84 } ] },
271
+ "1": { "speed-scaling": [ { "procTime": 11, "energyCons": 89 } ] }
272
+ }
273
+ },
274
+ {
275
+ "jobId": 1,
276
+ "available_machines": [1,1,0],
277
+ "operations": {
278
+ "2": { "speed-scaling": [ { "procTime": 7, "energyCons": 93 } ] },
279
+ "0": { "speed-scaling": [ { "procTime": 5, "energyCons": 95 } ] },
280
+ "1": { "speed-scaling": [ { "procTime": 13, "energyCons": 88 } ] }
281
+ }
282
+ }
283
+ ],
284
+ "minMakespan": 123,
285
+ "minEnergy": 456,
286
+ "maxMinMakespan": 78,
287
+ "maxMinEnergy": 90
288
+ }
289
+ ```
290
+
291
+ ---
292
+
293
+ ## Notes on outputs
294
+
295
+ - **JSON**: Contains `nbJobs`, `nbMchs`, `speed`, and a `timeEnergy` list with per‑job `operations`. For `ReleaseDateDueDate=1` (per job) or `2` (per operation), `release-date`/`due-date` fields are added accordingly.
296
+ - **DZN**: The generator writes `.dzn` files using templates located inside generator packages, parameterized by the selected RD mode and speed levels.
297
+ - **Taillard-like**: Writes textual matrices for processing times, energy consumption, and the job‑specific machine order; the FJSP variant also appends an `Available machines:` section (binary rows per job).
298
+
299
+ ---
300
+
301
+ ## Reproducibility & scaling tips
302
+
303
+ - Use a fixed `seed` to reproduce instances exactly.
304
+ - For tighter value ranges (e.g., unit testing), prefer `distribution="uniform"` and post‑scale arrays if you require strict bounds (e.g., map processing times to `[1,100]` after generation). Energy values are already normalized to `[1,100]` by design.
@@ -0,0 +1,18 @@
1
+ IGJSP/generador.py,sha256=w9SxkVC3oj4inIH9gP_CKvOecMm84_TVS08Kbc12A6E,34349
2
+ IGJSP/main.py,sha256=Sia5Ss8O3HWBdshvPLJKUMaZIoQPHy6x8yzvojojPFo,2838
3
+ IGJSP/Minizinc/Models/RD/JSP0.mzn,sha256=cfN_E3RQ6nBulGfaOOYTd-zAgA5SI6E2saDlYtKCflg,2282
4
+ IGJSP/Minizinc/Models/RD/JSP1.mzn,sha256=5B8cyw2WyKR8yEL1fFd0TaCAVhjPoxEJRJDPPEjJGEk,2840
5
+ IGJSP/Minizinc/Models/RD/JSP2.mzn,sha256=ATqpg-Ow_VzjQJ_hZSSXiTXmwmqNqFsq81TZgcnjTks,2941
6
+ IGJSP/Minizinc/Types/RD/JSP0.mzn,sha256=cfN_E3RQ6nBulGfaOOYTd-zAgA5SI6E2saDlYtKCflg,2282
7
+ IGJSP/Minizinc/Types/RD/JSP1.mzn,sha256=5B8cyw2WyKR8yEL1fFd0TaCAVhjPoxEJRJDPPEjJGEk,2840
8
+ IGJSP/Minizinc/Types/RD/JSP2.mzn,sha256=ATqpg-Ow_VzjQJ_hZSSXiTXmwmqNqFsq81TZgcnjTks,2941
9
+ IGJSP/Minizinc/Types/RD/FJSP/type0.dzn,sha256=lhHhITCHzWrChnmOsyVMPoNGdIJ9S09bg9JfmwgY16Q,330
10
+ IGJSP/Minizinc/Types/RD/FJSP/type1.dzn,sha256=bUGYq03ZlQId5xdDwEy5s2Y_iE0LkVZO3camiGKva2A,400
11
+ IGJSP/Minizinc/Types/RD/FJSP/type2.dzn,sha256=Wz1MnkSL5GUPsbh1eq0leoaQRImkNqQqkXh9fWoGhRM,446
12
+ IGJSP/Minizinc/Types/RD/JSP/type0.dzn,sha256=wNuPQkXBXPSpPaPz2WFhp4pGDgfSimtg4I93UfwC01Q,263
13
+ IGJSP/Minizinc/Types/RD/JSP/type1.dzn,sha256=Xbt9StzCgEqqh_HS9tWGrTVtu-OEnf5Yq5Ty91AkzoM,333
14
+ IGJSP/Minizinc/Types/RD/JSP/type2.dzn,sha256=L2nc7bPJEhyuaEwgw0ZCpC52CpVJILQU_WQdKn8GUZs,379
15
+ igjsp-1.0.1.dist-info/METADATA,sha256=xBsUY3HSGQvowsRaCPdf1wmwU8N9N_c9QykR2Y7dZs4,10609
16
+ igjsp-1.0.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
17
+ igjsp-1.0.1.dist-info/licenses/LICENSE,sha256=f7RDRO-z_nMoooAya7NAb8sXtrHR6WnttYtyUc9fB-c,1116
18
+ igjsp-1.0.1.dist-info/RECORD,,
@@ -1,8 +0,0 @@
1
- %SETS
2
- JOBS = 1..{jobs};
3
- MACHINES = 1..{machines};
4
- SPEED = {Speed};
5
-
6
- time = array3d(JOBS,MACHINES,1..SPEED,{time});
7
- energy = array3d(JOBS,MACHINES,1..SPEED,{energy});
8
- precedence = array2d(JOBS,MACHINES,{precedence});
@@ -1,10 +0,0 @@
1
- %SETS
2
- JOBS = 1..{jobs};
3
- MACHINES = 1..{machines};
4
- SPEED = {Speed};
5
-
6
- time = array3d(JOBS,MACHINES,1..SPEED,{time});
7
- energy = array3d(JOBS,MACHINES,1..SPEED,{energy});
8
- precedence = array2d(JOBS,MACHINES,{precedence});
9
- releaseDate = {releaseDate};
10
- dueDate = {dueDate};
@@ -1,10 +0,0 @@
1
- %SETS
2
- JOBS = 1..{jobs};
3
- MACHINES = 1..{machines};
4
- SPEED = {Speed};
5
-
6
- time = array3d(JOBS,MACHINES,1..SPEED,{time});
7
- energy = array3d(JOBS,MACHINES,1..SPEED,{energy});
8
- precedence = array2d(JOBS,MACHINES,{precedence});
9
- releaseDate = array2d(JOBS,MACHINES,{releaseDate});
10
- dueDate = array2d(JOBS,MACHINES,{dueDate});
@@ -1,216 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: IGJSP
3
- Version: 0.0.10
4
- Summary: Instance generator for JSP
5
- Project-URL: Homepage, https://gps.blogs.upv.es/
6
- Author-email: GPS-UPV <gps@dsic.upv.es>
7
- License-File: LICENSE
8
- Classifier: License :: OSI Approved :: MIT License
9
- Classifier: Operating System :: OS Independent
10
- Classifier: Programming Language :: Python :: 3
11
- Requires-Python: >=3.9
12
- Requires-Dist: numpy
13
- Requires-Dist: scipy
14
- Requires-Dist: tqdm
15
- Description-Content-Type: text/markdown
16
-
17
- # Instance Generator Job Shop Scheduling
18
-
19
- ## Description
20
- Benchmark generator for the Job Shop Problem (BG-JSP)
21
-
22
- ## Generating a JSP problem instance
23
-
24
- To generate an instance of the problem, we will use the Generator class, located in the Generador module.
25
- To do this, we initialize the generator, giving it the following parameters:
26
-
27
- 1. **JSON:`json`**
28
- - **Description**: Parameter that indicates if the generated instance will be stored in JSON format.
29
- - **Possible values**: Boolean value. Only the values True or False can be obtained.
30
- - **Example of possible values**: `True`, `False`
31
- - **Default value**: `False`
32
-
33
- 2. **DZN:`dzn`**
34
- - **Description**: Parameter that indicates if the generated instance will be stored in DZN format.
35
- - **Possible values**: Boolean value. Only the values True or False can be obtained.
36
- - **Example of possible values**: `True`, `False`
37
- - **Default value**: `False`
38
-
39
- 3. **Taillard:`taillard`**
40
- - **Description**: Parameter that indicates if the generated instance will be stored in taillard format.
41
- - **Possible values**: Boolean value. Only the values True or False can be obtained.
42
- - **Example of possible values**: `True`, `False`
43
- - **Default value**: `False`
44
-
45
- 4. **Save Path:`savepath`**
46
- - **Description**: Path where the problem instance file will be generated.
47
- - **Possible values**: String.
48
- - **Example of possible values**: `./problems`, `./instances`
49
- - **Default value**: `./output`
50
-
51
-
52
- Once the generator has been initialized, we proceed to generate different instances of the JSP problem with different values for this initialization; for that we use the following function using the following parameters to customize the generated instances:
53
-
54
- 1. **Jobs:`jobs`**
55
- - **Description**: Number of jobs that will have the problem generated.
56
- - **Possible values**: Integer value.
57
- - **Example of possible values**: `3`, `4`.
58
- - **Default value**: `10`
59
-
60
- 2. **Machines:`machines`**
61
- - **Description**: Number of machines that will have the problem generated.
62
- - **Possible values**: Integer value.
63
- - **Example of possible values**: `6`, `2`.
64
- - **Default value**: `4`
65
-
66
- 3. **Release and Due Date:`ReleaseDateDueDate`**
67
- - **Descripción**: Establish that each task has an instant of release and completion limit.
68
- - **Possible values**:
69
- - `0`: Neither the works nor the operations of each of them will have an instant release or time limit for completion.
70
- - `1`: The work will have an instant of release and instant of completion limit.
71
- - `2`: The operations of each job will have a release time and a limiting end time.
72
- - **Example of possible values**: `1`, `2`
73
- - **Default value**: `0`
74
-
75
- 4. **Speeds:`speed`**
76
- - **Description**: Number of speeds that will be available to perform each task.
77
- - **Possible values**: Integer value.
78
- - **Example of possible values**: `3`, `5`
79
- - **Default value**: `1`
80
-
81
- 5. **Distribution:`distribution`**
82
- - **Description**: Type of distribution to be followed for data generation.
83
- - **Possible values**: You can only set one of the following values: `uniform` `normal` `exponential.`
84
- - **Example of possible values**: `uniform`, `exponential`
85
- - **Default value**: `normal`
86
-
87
- 6. **Seed:`seed`**
88
- - **Description**: Base number for data generation.
89
- - **Possible values**: Integer value.
90
- - **Example of possible values**: `84`, `32`
91
- - **Default value**: `1`
92
-
93
- ## Example of JSON generated
94
-
95
- This JSON shows how the data generated from a JSP problem with 2 machines and 4 jobs will look. For this generation, we have the following code:
96
- ``` python
97
- from IGJSP.generador import Generator
98
- generator = Generator(json=True,savepath="output")
99
- generator.generate_new_instance(jobs=4,machines=2,ReleaseDateDueDate=2,distribution="exponential",seed=53)
100
- ```
101
-
102
- ```json
103
- {
104
- "nbJobs": [
105
- 0,
106
- 1
107
- ],
108
- "nbMchs": [
109
- 0,
110
- 1,
111
- 2,
112
- 3
113
- ],
114
- "speed": 1,
115
- "timeEnergy": [
116
- {
117
- "jobId": 0,
118
- "operations": {
119
- "0": {
120
- "speed-scaling": [
121
- {
122
- "procTime": 8,
123
- "energyCons": 92
124
- }
125
- ],
126
- "release-date": 30,
127
- "due-date": 41
128
- },
129
- "2": {
130
- "speed-scaling": [
131
- {
132
- "procTime": 17,
133
- "energyCons": 84
134
- }
135
- ],
136
- "release-date": 41,
137
- "due-date": 77
138
- },
139
- "3": {
140
- "speed-scaling": [
141
- {
142
- "procTime": 3,
143
- "energyCons": 97
144
- }
145
- ],
146
- "release-date": 77,
147
- "due-date": 80
148
- },
149
- "1": {
150
- "speed-scaling": [
151
- {
152
- "procTime": 7,
153
- "energyCons": 93
154
- }
155
- ],
156
- "release-date": 80,
157
- "due-date": 88
158
- }
159
- },
160
- "release-date": 30,
161
- "due-date": 88
162
- },
163
- {
164
- "jobId": 1,
165
- "operations": {
166
- "1": {
167
- "speed-scaling": [
168
- {
169
- "procTime": 4,
170
- "energyCons": 96
171
- }
172
- ],
173
- "release-date": 0,
174
- "due-date": 5
175
- },
176
- "3": {
177
- "speed-scaling": [
178
- {
179
- "procTime": 3,
180
- "energyCons": 97
181
- }
182
- ],
183
- "release-date": 5,
184
- "due-date": 9
185
- },
186
- "2": {
187
- "speed-scaling": [
188
- {
189
- "procTime": 1,
190
- "energyCons": 99
191
- }
192
- ],
193
- "release-date": 9,
194
- "due-date": 10
195
- },
196
- "0": {
197
- "speed-scaling": [
198
- {
199
- "procTime": 6,
200
- "energyCons": 94
201
- }
202
- ],
203
- "release-date": 10,
204
- "due-date": 17
205
- }
206
- },
207
- "release-date": 0,
208
- "due-date": 17
209
- }
210
- ],
211
- "minMakespan": 35,
212
- "minEnergy": 752,
213
- "maxMinMakespan": 14,
214
- "maxMinEnergy": 0
215
- }
216
- ```
@@ -1,24 +0,0 @@
1
- IGJSP/generador.py,sha256=lOgB0hDKLboO-eubDL8DV7dvK8XfnQ2MvXRc_CkkZdk,27747
2
- IGJSP/main.py,sha256=qnpAdI9Nymfbb2t674EHKCZr1vG80tGyVT5RKMiGvZM,2727
3
- IGJSP/Minizinc/Models/RD/JSP0.mzn,sha256=cfN_E3RQ6nBulGfaOOYTd-zAgA5SI6E2saDlYtKCflg,2282
4
- IGJSP/Minizinc/Models/RD/JSP1.mzn,sha256=5B8cyw2WyKR8yEL1fFd0TaCAVhjPoxEJRJDPPEjJGEk,2840
5
- IGJSP/Minizinc/Models/RD/JSP2.mzn,sha256=ATqpg-Ow_VzjQJ_hZSSXiTXmwmqNqFsq81TZgcnjTks,2941
6
- IGJSP/Minizinc/Types/RD/JSP0.mzn,sha256=cfN_E3RQ6nBulGfaOOYTd-zAgA5SI6E2saDlYtKCflg,2282
7
- IGJSP/Minizinc/Types/RD/JSP1.mzn,sha256=5B8cyw2WyKR8yEL1fFd0TaCAVhjPoxEJRJDPPEjJGEk,2840
8
- IGJSP/Minizinc/Types/RD/JSP2.mzn,sha256=ATqpg-Ow_VzjQJ_hZSSXiTXmwmqNqFsq81TZgcnjTks,2941
9
- IGJSP/Minizinc/Types/RD/type0.dzn,sha256=alo54TrPd2svp-YeOq8rNhb42Aj5w4N1y1WYRxZ-aWM,225
10
- IGJSP/Minizinc/Types/RD/type1.dzn,sha256=r3f1uOoDnscjpXHbpvOWPGhGN0tmwkJ3NYIRP-NGk5M,298
11
- IGJSP/Minizinc/Types/RD/type2.dzn,sha256=ddJxLqR_WlUpycHhA_QulxiTx3o9neIzNESEt59VUx0,344
12
- IGJSP/Minizinc/Models/RD/JSP0.mzn,sha256=cfN_E3RQ6nBulGfaOOYTd-zAgA5SI6E2saDlYtKCflg,2282
13
- IGJSP/Minizinc/Models/RD/JSP1.mzn,sha256=5B8cyw2WyKR8yEL1fFd0TaCAVhjPoxEJRJDPPEjJGEk,2840
14
- IGJSP/Minizinc/Models/RD/JSP2.mzn,sha256=ATqpg-Ow_VzjQJ_hZSSXiTXmwmqNqFsq81TZgcnjTks,2941
15
- IGJSP/Minizinc/Types/RD/JSP0.mzn,sha256=cfN_E3RQ6nBulGfaOOYTd-zAgA5SI6E2saDlYtKCflg,2282
16
- IGJSP/Minizinc/Types/RD/JSP1.mzn,sha256=5B8cyw2WyKR8yEL1fFd0TaCAVhjPoxEJRJDPPEjJGEk,2840
17
- IGJSP/Minizinc/Types/RD/JSP2.mzn,sha256=ATqpg-Ow_VzjQJ_hZSSXiTXmwmqNqFsq81TZgcnjTks,2941
18
- IGJSP/Minizinc/Types/RD/type0.dzn,sha256=alo54TrPd2svp-YeOq8rNhb42Aj5w4N1y1WYRxZ-aWM,225
19
- IGJSP/Minizinc/Types/RD/type1.dzn,sha256=r3f1uOoDnscjpXHbpvOWPGhGN0tmwkJ3NYIRP-NGk5M,298
20
- IGJSP/Minizinc/Types/RD/type2.dzn,sha256=ddJxLqR_WlUpycHhA_QulxiTx3o9neIzNESEt59VUx0,344
21
- igjsp-0.0.10.dist-info/METADATA,sha256=cAMGyVrod2bEHE5AUU5OG5uR96i3tVS8C162l07hCzA,7407
22
- igjsp-0.0.10.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
23
- igjsp-0.0.10.dist-info/licenses/LICENSE,sha256=f7RDRO-z_nMoooAya7NAb8sXtrHR6WnttYtyUc9fB-c,1116
24
- igjsp-0.0.10.dist-info/RECORD,,
File without changes