IGJSP 0.0.9__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,9 @@
1
+ %SETS
2
+ JOBS = 1..{jobs};
3
+ MACHINES = 1..{machines};
4
+ SPEED = {Speed};
5
+
6
+ time = array3d(JOBS,MACHINES,1..SPEED,{time});
7
+ energy = array3d(JOBS,MACHINES,1..SPEED,{energy});
8
+ precedence = array2d(JOBS,MACHINES,{precedence});
9
+ available_machines = array2d(JOBS,MACHINES,{available_machines});
@@ -0,0 +1,11 @@
1
+ %SETS
2
+ JOBS = 1..{jobs};
3
+ MACHINES = 1..{machines};
4
+ SPEED = {Speed};
5
+
6
+ time = array3d(JOBS,MACHINES,1..SPEED,{time});
7
+ energy = array3d(JOBS,MACHINES,1..SPEED,{energy});
8
+ precedence = array2d(JOBS,MACHINES,{precedence});
9
+ releaseDate = {releaseDate};
10
+ dueDate = {dueDate};
11
+ available_machines = array2d(JOBS,MACHINES,{available_machines});
@@ -0,0 +1,11 @@
1
+ %SETS
2
+ JOBS = 1..{jobs};
3
+ MACHINES = 1..{machines};
4
+ SPEED = {Speed};
5
+
6
+ time = array3d(JOBS,MACHINES,1..SPEED,{time});
7
+ energy = array3d(JOBS,MACHINES,1..SPEED,{energy});
8
+ precedence = array2d(JOBS,MACHINES,{precedence});
9
+ releaseDate = array2d(JOBS,MACHINES,{releaseDate});
10
+ dueDate = array2d(JOBS,MACHINES,{dueDate});
11
+ available_machines = array2d(JOBS,MACHINES,{available_machines});
@@ -0,0 +1,8 @@
1
+ %SETS
2
+ JOBS = 1..{jobs};
3
+ MACHINES = 1..{machines};
4
+ SPEED = {Speed};
5
+
6
+ time = array3d(JOBS,MACHINES,1..SPEED,{time});
7
+ energy = array3d(JOBS,MACHINES,1..SPEED,{energy});
8
+ precedence = array2d(JOBS,MACHINES,{precedence});
@@ -0,0 +1,10 @@
1
+ %SETS
2
+ JOBS = 1..{jobs};
3
+ MACHINES = 1..{machines};
4
+ SPEED = {Speed};
5
+
6
+ time = array3d(JOBS,MACHINES,1..SPEED,{time});
7
+ energy = array3d(JOBS,MACHINES,1..SPEED,{energy});
8
+ precedence = array2d(JOBS,MACHINES,{precedence});
9
+ releaseDate = {releaseDate};
10
+ dueDate = {dueDate};
@@ -0,0 +1,10 @@
1
+ %SETS
2
+ JOBS = 1..{jobs};
3
+ MACHINES = 1..{machines};
4
+ SPEED = {Speed};
5
+
6
+ time = array3d(JOBS,MACHINES,1..SPEED,{time});
7
+ energy = array3d(JOBS,MACHINES,1..SPEED,{energy});
8
+ precedence = array2d(JOBS,MACHINES,{precedence});
9
+ releaseDate = array2d(JOBS,MACHINES,{releaseDate});
10
+ dueDate = array2d(JOBS,MACHINES,{dueDate});
IGJSP/generador.py CHANGED
@@ -1,15 +1,15 @@
1
1
  import copy
2
+ import datetime
2
3
  import json
3
- import multiprocessing
4
4
  import os
5
5
  import pickle
6
- import random
7
- from concurrent.futures import ProcessPoolExecutor, as_completed
8
- from datetime import datetime
9
- from itertools import combinations, product
10
- from pathlib import Path
6
+ from itertools import combinations
11
7
  import networkx as nx
12
8
  import numpy as np
9
+ import re
10
+
11
+ np.set_printoptions(linewidth=np.inf, threshold=np.inf, formatter={"int": lambda x: f"{x}"})
12
+
13
13
  from scipy.stats import expon, norm, uniform
14
14
  from pprint import pprint
15
15
 
@@ -37,14 +37,14 @@ class JSP:
37
37
  np.random.seed(seed)
38
38
  self.rddd = rddd
39
39
  self.speed = speed
40
- if not tpm:
40
+ if not tpm or len(tpm) != self.numMchs:
41
41
  if distribution == "uniform":
42
42
  tpm = np.random.uniform(10, 100, self.numMchs)
43
43
  elif distribution == "normal":
44
44
  tpm = [max(10, data) for data in np.random.normal(50, 20, self.numMchs)]
45
45
  else:
46
46
  tpm = expon(loc=10, scale=20).rvs(self.numMchs)
47
-
47
+
48
48
  energyPer, timePer = self._particionate_speed_space(speed)
49
49
  self._generate_standar_operation_cost(distribution)
50
50
 
@@ -67,7 +67,6 @@ class JSP:
67
67
 
68
68
  self._jobToMachine(release_date_tasks, timePer, distribution)
69
69
  self.generate_maxmin_objective_values()
70
- self.vectorization()
71
70
 
72
71
  def _particionate_speed_space(self, speed):
73
72
  energyPer = np.linspace(0.5, 3, speed) if speed > 1 else [1]
@@ -117,6 +116,7 @@ class JSP:
117
116
  return expon(loc=duration, scale=duration/2).rvs()
118
117
 
119
118
  def savePythonFile(self, path):
119
+ os.makedirs(os.path.dirname(path), exist_ok=True)
120
120
  with open(path, 'wb') as f:
121
121
  pickle.dump(self, f)
122
122
 
@@ -135,19 +135,19 @@ class JSP:
135
135
  for job in range(self.numJobs):
136
136
  new = {
137
137
  "jobId": job,
138
- "operations": {}
138
+ "operations": {},
139
139
  }
140
+
140
141
  for machine in self.Orden[job]:
141
142
  machine = int(machine)
142
- new["operations"][machine] = {
143
- "speed-scaling": [
144
- {
145
- "procTime": int(proc),
146
- "energyCons": int(energy)
147
- }
148
- for proc, energy in zip(self.ProcessingTime[job, machine], self.EnergyConsumption[job, machine])
149
- ]
150
- }
143
+ new["operations"][machine] = {"speed-scaling" :
144
+ [
145
+ {"procTime" : int(proc),
146
+ "energyCons" : int(energy)
147
+ }
148
+ for proc, energy in zip(self.ProcessingTime[job, machine],self.EnergyConsumption[job, machine])
149
+ ]
150
+ }
151
151
  if self.rddd == 2:
152
152
  new["operations"][machine]["release-date"] = int(self.ReleaseDueDate[job][machine][0])
153
153
  new["operations"][machine]["due-date"] = int(self.ReleaseDueDate[job][machine][1])
@@ -160,8 +160,28 @@ class JSP:
160
160
  self.JSP["timeEnergy"].append(new)
161
161
 
162
162
  os.makedirs(os.path.dirname(path), exist_ok=True)
163
- with open(path, 'w+' ) as f:
164
- json.dump(self.JSP, f, indent=4)
163
+ with open(path, 'w+') as f:
164
+ # Generamos el JSON con indentación normal
165
+ json_str = json.dumps(self.JSP, indent=4)
166
+
167
+ # Compresión selectiva: solo arrays de números simples
168
+ def compress_simple_arrays(match):
169
+ # Comprimir si el array contiene solo números y comas
170
+ content = match.group(1)
171
+ if re.match(r'^(\s*\d+\s*,)*\s*\d+\s*$', content):
172
+ # Eliminar espacios y saltos de línea
173
+ return '[' + re.sub(r'\s+', '', content) + ']'
174
+ return match.group(0) # Mantener como está si no es simple
175
+
176
+ # Buscar arrays que puedan comprimirse
177
+ json_str = re.sub(
178
+ r'\[([\s\S]*?)\]',
179
+ compress_simple_arrays,
180
+ json_str,
181
+ flags=re.DOTALL
182
+ )
183
+
184
+ f.write(json_str)
165
185
 
166
186
  def saveDznFile(self, InputDir, OutputDir):
167
187
  indexProblema = OutputDir.split("/")[-2]
@@ -182,26 +202,27 @@ class JSP:
182
202
  "machines": data.numMchs,
183
203
  "jobs": data.numJobs,
184
204
  "Speed": s,
185
- "time": list(time.flatten()),
186
- "energy": list(energy.flatten())
205
+ "time": time.flatten(),
206
+ "energy": energy.flatten(),
187
207
  }
208
+
188
209
  if t == 1:
189
- replace_data["releaseDate"] = [data.ReleaseDueDate[job, 0] for job in range(data.numJobs)]
190
- replace_data["dueDate"] = [data.ReleaseDueDate[job, 1] for job in range(data.numJobs)]
210
+ replace_data["releaseDate"] = str([int(data.ReleaseDueDate[job, 0]) for job in range(data.numJobs)]).replace(", ", " ")
211
+ replace_data["dueDate"] = str([int(data.ReleaseDueDate[job, 1]) for job in range(data.numJobs)]).replace(", ", " ")
191
212
  elif t == 2:
192
- replace_data["releaseDate"] = list(data.ReleaseDueDate[:, :, 0].flatten())
193
- replace_data["dueDate"] = list(data.ReleaseDueDate[:, :, 1].flatten())
213
+ replace_data["releaseDate"] = data.ReleaseDueDate[:, :, 0].flatten()
214
+ replace_data["dueDate"] = data.ReleaseDueDate[:, :, 1].flatten()
194
215
 
195
216
  for job in range(data.numJobs):
196
217
  for i, prioridad in enumerate(range(data.numMchs)):
197
218
  precedence[job, data.Orden[job, prioridad]] = i
198
- replace_data["precedence"] = list(precedence.flatten())
199
219
 
200
- new_object = data.change_rddd_type(t).select_speeds(list(range(s0, sf, sp)))
201
- with open(f"./Minizinc/Types/RD/type{t}.dzn", "r", encoding="utf-8") as file:
220
+ replace_data["precedence"] = precedence.flatten()
221
+
222
+ with open(f"./Minizinc/Types/RD/JSP/type{t}.dzn", "r", encoding="utf-8") as file:
202
223
  filedata = file.read()
203
- for kk, v in replace_data.items():
204
- filedata = filedata.replace("{" + kk + "}", str(v))
224
+ for k, v in replace_data.items():
225
+ filedata = filedata.replace("{" + k + "}", str(v))
205
226
 
206
227
  os.makedirs(f"{OutputDir}/", exist_ok=True)
207
228
 
@@ -248,7 +269,6 @@ class JSP:
248
269
  f.write(f"{machine} ")
249
270
  f.write("\n")
250
271
 
251
-
252
272
  def select_speeds(self, speeds):
253
273
  if self.speed == len(speeds):
254
274
  return self
@@ -278,16 +298,16 @@ class JSP:
278
298
  return new_object
279
299
 
280
300
  def generate_maxmin_objective_values(self):
281
- self.max_makespan = sum([max(self.ProcessingTime[job, machine, :]) for job in range(self.numJobs) for machine in range(self.numMchs)])
301
+ max_makespan = sum([max(self.ProcessingTime[job, machine, :]) for job in range(self.numJobs) for machine in range(self.numMchs)])
282
302
  self.min_makespan = max([sum([min(self.ProcessingTime[job, machine, :]) for machine in range(self.numMchs)]) for job in range(self.numJobs)])
283
- self.max_min_makespan = self.max_makespan - self.min_makespan
284
- self.max_energy = sum([max(self.EnergyConsumption[job, machine, :]) for job in range(self.numJobs) for machine in range(self.numMchs)])
303
+ self.max_min_makespan = max_makespan - self.min_makespan
304
+ max_energy = sum([max(self.EnergyConsumption[job, machine, :]) for job in range(self.numJobs) for machine in range(self.numMchs)])
285
305
  self.min_energy = sum([min(self.EnergyConsumption[job, machine, :]) for job in range(self.numJobs) for machine in range(self.numMchs)])
286
- self.max_min_energy = self.max_energy - self.min_energy
306
+ self.max_min_energy = max_energy - self.min_energy
287
307
  if self.rddd == 1:
288
- self.max_tardiness = sum([max(0, self.max_makespan - self.ReleaseDueDate[job, 1]) for job in range(self.numJobs)])
308
+ self.max_tardiness = sum([max(0, max_makespan - self.ReleaseDueDate[job, 1]) for job in range(self.numJobs)])
289
309
  elif self.rddd == 2:
290
- self.max_tardiness = np.sum([max(0, np.int64(self.max_makespan - self.ReleaseDueDate[job, machine, 1])) for job in range(self.numJobs) for machine in range(self.numMchs)])
310
+ self.max_tardiness = np.sum([max(0, np.int64(max_makespan - self.ReleaseDueDate[job, machine, 1])) for job in range(self.numJobs) for machine in range(self.numMchs)])
291
311
 
292
312
  def norm_makespan(self, makespan):
293
313
  return (makespan - self.min_makespan) / self.max_min_makespan
@@ -389,86 +409,7 @@ class JSP:
389
409
 
390
410
  def generate_schedule_image(self, schedule):
391
411
  pass
392
-
393
- def vectorization(self):
394
- vectorization = {}
395
- # Caracteristicas básicas
396
- vectorization["jobs"] = self.numJobs
397
- vectorization["machines"] = self.numMchs
398
- vectorization["rddd"] = self.rddd
399
- vectorization["speed"] = self.speed
400
- vectorization["max_makespan"] = self.max_makespan
401
- vectorization["min_makespan"] = self.min_makespan
402
- vectorization["max_sum_energy"] = self.max_energy
403
- vectorization["min_sum_energy"] = self.min_energy
404
- vectorization["max_tardiness"] = self.max_tardiness if self.rddd != 0 else 0
405
- vectorization["min_window"] = 0
406
- vectorization["max_window"] = 0
407
- vectorization["mean_window"] = 0
408
- vectorization["overlap"] = 0
409
-
410
- # Caracteristicas complejas
411
- if self.rddd == 0:
412
- vectorization["min_window"] = -1
413
- vectorization["max_window"] = -1
414
- vectorization["mean_window"] = -1
415
- vectorization["overlap"] = -1
416
- else:
417
- if self.rddd == 1:
418
- # Ventana de cada trabajo
419
- for job in range(self.numJobs):
420
- tproc_min = np.sum(np.min(self.ProcessingTime[job,machine,:]) for machine in range(self.numMchs))
421
- tproc_max = np.sum(np.max(self.ProcessingTime[job,machine,:]) for machine in range(self.numMchs))
422
- tproc_mean = np.sum(np.mean(self.ProcessingTime[job,machine,:]) for machine in range(self.numMchs))
423
- window = self.ReleaseDueDate[job,1] - self.ReleaseDueDate[job,0]
424
- vectorization["min_window"] += window / tproc_max
425
- vectorization["max_window"] += window / tproc_min
426
- vectorization["mean_window"] += window / tproc_mean
427
- vectorization["min_window"] = vectorization["min_window"] / self.numJobs
428
- vectorization["max_window"] = vectorization["max_window"] / self.numJobs
429
- vectorization["mean_window"] = vectorization["mean_window"] / self.numJobs
430
- # Overlap entre trabajos
431
- for job in range(self.numJobs):
432
- for job2 in range(job + 1, self.numJobs):
433
- diff = min(self.ReleaseDueDate[job,1],self.ReleaseDueDate[job2,1])-max(self.ReleaseDueDate[job,0], self.ReleaseDueDate[job2,0])
434
- if diff > 0:
435
- vectorization["overlap"] += diff / (self.ReleaseDueDate[job,1] - self.ReleaseDueDate[job,0])
436
- vectorization["overlap"] += diff / (self.ReleaseDueDate[job2,1] - self.ReleaseDueDate[job2,0])
437
- vectorization["overlap"] = vectorization["overlap"] / (self.numJobs * (self.numJobs - 1))
438
- else:
439
- # Ventana de cada operacion
440
- for job in range(self.numJobs):
441
- for machine in range(self.numMchs):
442
- tproc_min = np.min(self.ProcessingTime[job,machine,:])
443
- tproc_max = np.max(self.ProcessingTime[job,machine,:])
444
- tproc_mean = np.mean(self.ProcessingTime[job,machine,:])
445
- window = self.ReleaseDueDate[job,machine,1] - self.ReleaseDueDate[job,machine,0]
446
- vectorization["min_window"] += window / tproc_max
447
- vectorization["max_window"] += window / tproc_min
448
- vectorization["mean_window"] += window / tproc_mean
449
- vectorization["min_window"] = vectorization["min_window"] / (self.numJobs * self.numMchs)
450
- vectorization["max_window"] = vectorization["max_window"] / (self.numJobs * self.numMchs)
451
- vectorization["mean_window"] = vectorization["mean_window"] / (self.numJobs * self.numMchs)
452
- # Overlap entre operaciones
453
- for job1 in range(self.numJobs):
454
- for machine1 in range(self.numMchs):
455
- for job2 in range(job1 + 1, self.numJobs):
456
- diff = min(self.ReleaseDueDate[job1,machine1,1],self.ReleaseDueDate[job2,machine1,1])-max(self.ReleaseDueDate[job1,machine1,0], self.ReleaseDueDate[job2,machine1,0])
457
- if diff > 0:
458
- vectorization["overlap"] += diff / (self.ReleaseDueDate[job1,machine1,1] - self.ReleaseDueDate[job1,machine1,0])
459
- vectorization["overlap"] += diff / (self.ReleaseDueDate[job2,machine1,1] - self.ReleaseDueDate[job2,machine1,0])
460
- vectorization["overlap"] = vectorization["overlap"] / (self.numJobs * (self.numJobs - 1) * self.numMchs)
461
- # Estadísticos de los datos
462
- vectorization["max_processing_time_value"] = np.max(self.ProcessingTime)
463
- vectorization["min_processing_time_value"] = np.min(self.ProcessingTime)
464
- vectorization["mean_processing_time_value"] = np.mean(self.ProcessingTime)
465
-
466
- vectorization["max_energy_value"] = np.max(self.ProcessingTime)
467
- vectorization["min_energy_value"] = np.min(self.ProcessingTime)
468
- vectorization["mean_energy_value"] = np.mean(self.ProcessingTime)
469
- self.features = vectorization
470
- return vectorization
471
-
412
+
472
413
  def disjuntive_graph(self):
473
414
  vertex = list(range(self.numJobs * self.numMchs + 2))
474
415
  A = {v: [] for v in vertex}
@@ -524,31 +465,258 @@ class JSP:
524
465
  # jsp.fill_random_values(speed=3, rddd=2, distribution="uniform", seed=1234)
525
466
  # jsp.saveTaillardStandardFile("./output_taillard.txt")
526
467
 
468
+
469
+
470
+ #################################################################################
471
+ # #
472
+ # FJSP #
473
+ # #
474
+ #################################################################################
475
+
476
+
477
+ class FJSP(JSP):
478
+ def __init__(self, jobs, machines, ProcessingTime=np.array([]), EnergyConsumption=np.array([]), ReleaseDateDueDate=np.array([]), Orden=np.array([]), AvailableMachines = np.array([])) -> None:
479
+
480
+ super().__init__(jobs, machines, ProcessingTime, EnergyConsumption, ReleaseDateDueDate, Orden)
481
+ self.available_machines = AvailableMachines
482
+
483
+ def fill_random_values(self, speed, rddd, distribution, seed, tpm=[]):
484
+
485
+ super().fill_random_values(speed, rddd, distribution, seed, tpm)
486
+
487
+ self.available_machines = np.random.choice([0, 1], size=(self.numJobs, self.numMchs))
488
+
489
+ # Asegurar al menos un 1 en cada columna
490
+ for job in range(self.numJobs):
491
+ if np.sum(self.available_machines[job, :]) == 0:
492
+ columna_aleatoria = np.random.randint(0, self.numMchs)
493
+ self.available_machines[job, columna_aleatoria] = 1
494
+
495
+ # def savePythonFile(self, path):
496
+ # with open(path, 'wb') as f:
497
+ # pickle.dump(self, f)
498
+
499
+ def saveJsonFile(self, path):
500
+ self.JSP = {
501
+ "nbJobs": list(range(self.numJobs)),
502
+ "nbMchs": list(range(self.numMchs)),
503
+ "speed": self.speed,
504
+ "timeEnergy": [],
505
+ "minMakespan": int(self.min_makespan),
506
+ "minEnergy": int(self.min_energy),
507
+ "maxMinMakespan": int(self.max_min_makespan),
508
+ "maxMinEnergy": int(self.max_min_energy)
509
+ }
510
+
511
+ for job in range(self.numJobs):
512
+ new = {
513
+ "jobId": job,
514
+ "operations": {},
515
+ "available_machines": self.available_machines[job, : ].tolist()
516
+ }
517
+
518
+ #new["available_machines_prueba"] = self.available_machines[job, self.Orden[job] ].tolist() #for i in self.Orden[job]]
519
+
520
+ for machine in self.Orden[job]:
521
+ machine = int(machine)
522
+ new["operations"][machine] = {"speed-scaling" :
523
+ [
524
+ {"procTime" : int(proc),
525
+ "energyCons" : int(energy)
526
+ }
527
+ for proc, energy in zip(self.ProcessingTime[job, machine],self.EnergyConsumption[job, machine])
528
+ ]
529
+ }
530
+ if self.rddd == 2:
531
+ new["operations"][machine]["release-date"] = int(self.ReleaseDueDate[job][machine][0])
532
+ new["operations"][machine]["due-date"] = int(self.ReleaseDueDate[job][machine][1])
533
+ if self.rddd == 1:
534
+ new["release-date"] = int(self.ReleaseDueDate[job][0])
535
+ new["due-date"] = int(self.ReleaseDueDate[job][1])
536
+ if self.rddd == 2:
537
+ new["release-date"] = int(min(self.ReleaseDueDate[job, :, 0]))
538
+ new["due-date"] = int(max(self.ReleaseDueDate[job, :, 1]))
539
+ self.JSP["timeEnergy"].append(new)
540
+
541
+ os.makedirs(os.path.dirname(path), exist_ok=True)
542
+ with open(path, 'w+') as f:
543
+ # Generamos el JSON con indentación normal
544
+ json_str = json.dumps(self.JSP, indent=4)
545
+
546
+ # Compresión selectiva: solo arrays de números simples
547
+ def compress_simple_arrays(match):
548
+ # Comprimir si el array contiene solo números y comas
549
+ content = match.group(1)
550
+ if re.match(r'^(\s*\d+\s*,)*\s*\d+\s*$', content):
551
+ # Eliminar espacios y saltos de línea
552
+ return '[' + re.sub(r'\s+', '', content) + ']'
553
+ return match.group(0) # Mantener como está si no es simple
554
+
555
+ # Buscar arrays que puedan comprimirse
556
+ json_str = re.sub(
557
+ r'\[([\s\S]*?)\]',
558
+ compress_simple_arrays,
559
+ json_str,
560
+ flags=re.DOTALL
561
+ )
562
+
563
+ f.write(json_str)
564
+
565
+ def saveDznFile(self, InputDir, OutputDir):
566
+ indexProblema = OutputDir.split("/")[-2]
567
+ OutputDir = "/".join(OutputDir.split("/")[:-2])
568
+ # indexProblema = os.path.basename(os.path.normpath(OutputDir))
569
+ with open(f"{InputDir}", 'rb') as f:
570
+ data: FJSP = pickle.load(f)
571
+ # print(self.speed)
572
+ # for t in [0, 1, 2]:
573
+ t = data.rddd
574
+ for s in range(1,self.speed+1):
575
+ s0, sf, sp = [0,s,1]
576
+ time = data.ProcessingTime[:, :, s0:sf:sp]
577
+ energy = data.EnergyConsumption[:, :, s0:sf:sp]
578
+ precedence = np.full((data.numJobs, data.numMchs), 0)
579
+
580
+ replace_data = {
581
+ "machines": data.numMchs,
582
+ "jobs": data.numJobs,
583
+ "Speed": s,
584
+ "time": str(time.flatten()).replace(" ", ", "),
585
+ "energy": str(energy.flatten()).replace(" ", ", ")
586
+ }
587
+ if t == 1:
588
+ replace_data["releaseDate"] = str([int(data.ReleaseDueDate[job, 0]) for job in range(data.numJobs)])
589
+ replace_data["dueDate"] = str([int(data.ReleaseDueDate[job, 1]) for job in range(data.numJobs)])
590
+ elif t == 2:
591
+ replace_data["releaseDate"] = data.ReleaseDueDate[:, :, 0].flatten()
592
+ replace_data["dueDate"] = data.ReleaseDueDate[:, :, 1].flatten()
593
+
594
+ for job in range(data.numJobs):
595
+ for i, prioridad in enumerate(range(data.numMchs)):
596
+ precedence[job, data.Orden[job, prioridad]] = i
597
+ replace_data["precedence"] = str(precedence.flatten()).replace(" ", ", ")
598
+
599
+ replace_data["available_machines"] = str(data.available_machines.flatten()).replace(" ", ", ")
600
+
601
+ with open(f"./Minizinc/Types/RD/FJSP/type{t}.dzn", "r", encoding="utf-8") as file:
602
+ filedata = file.read()
603
+ for kk, v in replace_data.items():
604
+ filedata = filedata.replace("{" + kk + "}", str(v))
605
+
606
+ os.makedirs(f"{OutputDir}/", exist_ok=True)
607
+
608
+ with open(f"{OutputDir}/{indexProblema}-{t}-{s}.dzn", "w+", encoding="utf-8") as new:
609
+ new.write(filedata)
610
+ # print(f"{OutputDir}/{indexProblema}")
611
+ # with open(f"{OutputDir}/{indexProblema}", "wb") as new:
612
+ # pickle.dump(new_object, new)
613
+
614
+ def saveTaillardStandardFile(self, path):
615
+ os.makedirs("/".join(path.split("/")[:-1]),exist_ok=True)
616
+ with open(path, 'w+') as f:
617
+ # Escribir el encabezado con el número de trabajos y máquinas
618
+ f.write(f"Number of jobs: {self.numJobs}\n")
619
+ f.write(f"Number of machines: {self.numMchs}\n\n")
620
+
621
+ # Escribir la matriz de tiempos de procesamiento
622
+ f.write("Processing times:\n")
623
+ for job in range(self.numJobs):
624
+ # Almacenar todos los tiempos de esta fila
625
+ tiempos = []
626
+ for machine_index in range(self.numMchs):
627
+ machine = self.Orden[job, machine_index]
628
+ processing_time = self.ProcessingTime[job, machine, 0]
629
+ tiempos.append(str(processing_time))
630
+
631
+ # Unir los tiempos con comas y escribirlos
632
+ linea = ", ".join(tiempos)
633
+ f.write(linea + "\n")
634
+
635
+ f.write("\n")
636
+
637
+ # Escribir la matriz de consumo de energía
638
+ f.write("Energy consumption:\n")
639
+ for job in range(self.numJobs):
640
+ consumos = []
641
+ for machine_index in range(self.numMchs):
642
+ machine = self.Orden[job, machine_index]
643
+ energy_consumption = self.EnergyConsumption[job, machine, 0]
644
+ consumos.append(str(energy_consumption))
645
+ f.write(", ".join(consumos) + "\n")
646
+
647
+ f.write("\n")
648
+
649
+ # Escribir el orden de las máquinas para cada trabajo
650
+ f.write("Machine order:\n")
651
+ for job in range(self.numJobs):
652
+ maquinas = []
653
+ for machine_index in range(self.numMchs):
654
+ machine = self.Orden[job, machine_index]
655
+ maquinas.append(str(machine))
656
+ f.write(", ".join(maquinas) + "\n")
657
+
658
+ f.write("\n")
659
+
660
+ f.write("Available machines:\n")
661
+ for job in range(self.numJobs):
662
+ disponibles = []
663
+ for machine_index in range(self.numMchs):
664
+ disponibles.append(str(self.available_machines[job, machine_index]))
665
+ f.write(", ".join(disponibles) + "\n")
666
+
667
+
668
+
527
669
  class Generator:
528
- def __init__( self,json = False, dzn = False, taillard = False, savepath="./"):
670
+ def __init__(self, json = False, dzn = False, taillard = False, savepath = "./", single_folder_output = False):
529
671
  self.json = json
530
672
  self.dzn = dzn
531
673
  self.taillard = taillard
532
674
  self.savepath = savepath
675
+ self.single_folder_output = single_folder_output
533
676
 
534
- def generate_new_instance(self, jobs = 10, machines = 4,speed = 1, ReleaseDateDueDate = 0, distribution = "uniform" , seed = 0, tpm=[]):
535
- jsp = JSP(jobs=jobs, machines=machines)
536
- jsp.fill_random_values(speed = speed, rddd = ReleaseDateDueDate, distribution = distribution, seed = seed,tpm = tpm)
537
- if not (self.json or self.dzn or self.taillard): return jsp
538
-
539
- j = str(jobs)
540
- m = str(machines)
541
- jm_path = str(j)+"_"+str(m)+"/"
542
-
543
- i = seed
544
-
545
- if self.json:
546
- jsp.saveJsonFile(f"{self.savepath}/JSON/"+jm_path.split("/")[0]+f"_{j}x{m}_{i}.json")
547
- if self.dzn:
548
- pkl_path = f"{self.savepath}/"+jm_path.split("/")[0]+f"_{j}x{m}_{i}.pkl"
549
- jsp.savePythonFile(pkl_path)
550
- jsp.saveDznFile(pkl_path,f"{self.savepath}/DZN/"+jm_path)#f"{j}x{m}_{i}")
551
- os.remove(pkl_path)
552
- if self.taillard:
553
- jsp.saveTaillardStandardFile(f"{self.savepath}/TAILLARD/"+jm_path.split("/")[0]+f"_{j}x{m}_{i}.txt")
554
- return jsp
677
+ def generate_new_instance(self, jobs = 10, machines = 4, speed = 1, ReleaseDateDueDate = 0, distribution = "uniform" , seed = 0, tpm=[], instance_type = "JSP", size = 1):
678
+
679
+ match instance_type:
680
+ case "JSP":
681
+ jsp_instance = JSP(jobs = jobs, machines = machines)
682
+ case "FJSP":
683
+ jsp_instance = FJSP(jobs = jobs, machines = machines)
684
+
685
+ for index in range(1, size + 1):
686
+
687
+ jsp_instance.fill_random_values(speed = speed, rddd = ReleaseDateDueDate, distribution = distribution, seed = seed,tpm = tpm)
688
+
689
+ # Determinar el nombre de salida basado en `outputName` y los parámetros actuales
690
+ problem_path = self.savepath.format(size = size, jobs =jobs, machines = machines, release_due_date = ReleaseDateDueDate,
691
+ speed_scaling = speed, distribution = distribution, seed=seed)
692
+
693
+ if not (self.json or self.dzn or self.taillard): return jsp_instance
694
+
695
+ j = str(jobs)
696
+ m = str(machines)
697
+ jm_path = str(j)+"_"+str(m)+"/"
698
+
699
+ i = seed
700
+
701
+ if self.single_folder_output:
702
+ if self.json:
703
+ jsp_instance.saveJsonFile(problem_path + jm_path.split("/")[0] + f"_{j}x{m}_{i}.json")
704
+ if self.dzn:
705
+ pkl_path = f"{problem_path}/" + jm_path.split("/")[0] + f"_{j}x{m}_{i}.pkl"
706
+ jsp_instance.savePythonFile(pkl_path)
707
+ jsp_instance.saveDznFile(pkl_path, problem_path + jm_path)#f"{j}x{m}_{i}")
708
+ os.remove(pkl_path)
709
+ if self.taillard:
710
+ jsp_instance.saveTaillardStandardFile(problem_path + jm_path.split("/")[0] + f"_{j}x{m}_{i}.txt")
711
+ else:
712
+ if self.json:
713
+ jsp_instance.saveJsonFile(f"{problem_path}/JSON/" + jm_path.split("/")[0] + f"_{j}x{m}_{i}.json")
714
+ if self.dzn:
715
+ pkl_path = f"{problem_path}/" + jm_path.split("/")[0] + f"_{j}x{m}_{i}.pkl"
716
+ jsp_instance.savePythonFile(pkl_path)
717
+ jsp_instance.saveDznFile(pkl_path,f"{problem_path}/DZN/" + jm_path)#f"{j}x{m}_{i}")
718
+ os.remove(pkl_path)
719
+ if self.taillard:
720
+ jsp_instance.saveTaillardStandardFile(f"{problem_path}/TAILLARD/" + jm_path.split("/")[0] + f"_{j}x{m}_{i}.txt")
721
+
722
+ return jsp_instance
IGJSP/main.py CHANGED
@@ -18,10 +18,7 @@ try:
18
18
  parser.add_argument('-M','--machines', type=json.loads, default='[4]')
19
19
  # Semilla
20
20
  parser.add_argument('-s','--seeds', type= json.loads, default='[0]')
21
- # Tipo de problema
22
- # 1 - JSP, 2 - FlowJSP, 3 - FlexibleJSP, 4 - OpenJSP
23
- # parser.add_argument('-T','--type', type=int, default=1)
24
- # Niveles de dificultad
21
+
25
22
  # Speed Scaling
26
23
  parser.add_argument('-S', '--speed-scaling', type=int, default=1)
27
24
 
@@ -29,7 +26,7 @@ try:
29
26
  # 0 -> Tiempo infinito
30
27
  # 1 -> Tiempo por trabajo
31
28
  # 2 -> Tiempo por tarea de cada trabajo
32
- parser.add_argument('-RDDD', '--release-due', type=int, default=0)
29
+ parser.add_argument('-RDDD', '--release-due', type=int, default=0, choices=[0, 1, 2])
33
30
  # Time
34
31
  # parser.add_argument('-Ti', '--time', type=int, default=0)
35
32
  # Path
@@ -48,7 +45,16 @@ try:
48
45
  # Taillard save
49
46
  parser.add_argument('-t','--taillard', type=bool, default=False)
50
47
 
48
+ #Instance Type (JSP o FJSP)
49
+ parser.add_argument('-T', '--type', type=int, default=1, choices=[1, 2])
50
+
51
51
  args = parser.parse_args()
52
+
53
+ type_dict = {
54
+ 1: "JSP",
55
+ 2: "FJSP"
56
+ }
57
+
52
58
  np.random.seed(args.seeds)
53
59
 
54
60
  start = time.time()
@@ -64,7 +70,8 @@ try:
64
70
  args.seeds = args.seeds+ list(p_s[:args.quantity-len(args.seeds)])
65
71
  for i in trange(args.quantity,desc='Quantity',leave=False):
66
72
  generator.savepath = args.path+"/instancesGenerated_"+str(i)
67
- generator.generate_new_instance(jobs=j, machines=m,ReleaseDateDueDate =np.array(args.release_due),speed = args.speed_scaling,distribution = args.distribution, seed = args.seeds[i])
73
+ generator.generate_new_instance(jobs=j, machines=m, ReleaseDateDueDate=np.array(args.release_due), speed = args.speed_scaling,
74
+ distribution=args.distribution, seed=args.seeds[i], instance_type=type_dict[args.type])
68
75
 
69
76
  except Exception as e:
70
77
  raise
@@ -0,0 +1,304 @@
1
+ Metadata-Version: 2.4
2
+ Name: IGJSP
3
+ Version: 1.0.0
4
+ Summary: Instance generator for JSP
5
+ Project-URL: Homepage, https://gps.blogs.upv.es/
6
+ Author-email: GPS-UPV <gps@dsic.upv.es>
7
+ License-File: LICENSE
8
+ Classifier: License :: OSI Approved :: MIT License
9
+ Classifier: Operating System :: OS Independent
10
+ Classifier: Programming Language :: Python :: 3
11
+ Requires-Python: >=3.9
12
+ Requires-Dist: numpy
13
+ Requires-Dist: scipy
14
+ Requires-Dist: tqdm
15
+ Description-Content-Type: text/markdown
16
+
17
+ # Instance Generator for JSP & FJSP (Energy‑aware)
18
+
19
+ ## Description
20
+
21
+ Instance generator for the **Job Shop Scheduling Problem (JSP)** and the **Flexible Job Shop Scheduling Problem (FJSP)** with speed‑scaling and optional release/due dates. The generator produces instances in **JSON**, **MiniZinc DZN**, and **Taillard-like text** formats, and it is designed for reproducible experiments via a random seed.
22
+
23
+ ### Key features
24
+
25
+ - Supports **JSP** and **FJSP** (`instance_type="JSP"` or `instance_type="FJSP"`).
26
+ - **Energy‑aware speed scaling**: each operation can be executed at one of several speeds; processing time and energy consumption are linked per speed.
27
+ - **Release/Due date modes** (`ReleaseDateDueDate`): `0` (none), `1` (per job), `2` (per operation).
28
+ - Multiple output formats: **JSON**, **DZN** (MiniZinc templates), **Taillard-style** text.
29
+ - **Distributions** for data generation: `uniform`, `normal`, `exponential`.
30
+ - Reproducibility via **seed**.
31
+ - **FJSP** adds a per‑job binary vector of **available machines**.
32
+
33
+ > **About value ranges & scaling**
34
+ >
35
+ > - With the **uniform** distribution, base operation costs are sampled within **[10, 100]**.
36
+ > - Initial *release times* are sampled from **[0, 100]** (in steps of 10) and then normalized to start at 0.
37
+ > - **Energy consumption** values are normalized into **[1, 100]** by construction.
38
+ > - **Processing times** are derived from base costs and speed scaling and **are not capped** at 100 (they can exceed 100), especially with `normal`/`exponential` draws.
39
+ >
40
+ > As a result, energy values and the initial release‑date seeds are within 0–100; if you need strict 0–100 ranges for *all* fields, set an external rescaling on the produced arrays or constrain generation to `distribution="uniform"` and adjust your post‑processing accordingly.
41
+
42
+ ---
43
+
44
+ ## Python API
45
+
46
+ ### Generator initialization
47
+
48
+ ```python
49
+ from IGJSP.generador import Generator
50
+
51
+ gen = Generator(
52
+ json=False, # write JSON files
53
+ dzn=False, # write DZN files (MiniZinc)
54
+ taillard=False, # write Taillard-like txt
55
+ savepath="./output", # base output directory/template
56
+ single_folder_output=False # put artifacts in a single folder
57
+ )
58
+ ```
59
+
60
+ ### Instance creation
61
+
62
+ ```python
63
+ obj = gen.generate_new_instance(
64
+ jobs=10, machines=4,
65
+ speed=1, # number of speed levels
66
+ ReleaseDateDueDate=0, # 0 (none), 1 (per job), 2 (per operation)
67
+ distribution="uniform", # 'uniform' | 'normal' | 'exponential'
68
+ seed=1,
69
+ tpm=[], # optional per-machine time scale
70
+ instance_type="JSP", # 'JSP' (default) or 'FJSP'
71
+ size=1 # how many instances to emit (looped)
72
+ )
73
+ ```
74
+
75
+ If all three output flags (`json`, `dzn`, `taillard`) are `False`, the function returns the in‑memory instance object (`JSP` or `FJSP`). Otherwise, it writes files under `savepath` and returns the last instance created.
76
+
77
+ ---
78
+
79
+ ## Generating a JSP problem instance
80
+
81
+ To generate an instance of the problem, use the `Generator` class (module `Generador`). Initialize it and then call `generate_new_instance` with the parameters below.
82
+
83
+ ### Parameters (generation)
84
+
85
+ 1. **Jobs:** `jobs` — number of jobs. Default: `10`
86
+ 2. **Machines:** `machines` — number of machines. Default: `4`
87
+ 3. **Release and Due Date:** `ReleaseDateDueDate`
88
+ - `0`: neither jobs nor operations have release/due times (default)
89
+ - `1`: each job has a release and due date
90
+ - `2`: each operation has a release and due date
91
+ 4. **Speeds:** `speed` — number of speed levels. Default: `1`
92
+ 5. **Distribution:** `distribution` — `uniform`, `normal`, or `exponential`. Default: `normal`
93
+ 6. **Seed:** `seed` — random seed for reproducibility. Default: `1`
94
+
95
+ ### Parameters (output)
96
+
97
+ - **JSON:** `json` (bool) — write JSON file(s). Default: `False`
98
+ - **DZN:** `dzn` (bool) — write MiniZinc DZN file(s). Default: `False`
99
+ - **Taillard:** `taillard` (bool) — write Taillard-like text file. Default: `False`
100
+ - **Save Path:** `savepath` (str) — base path/template for outputs. Default: `./output`
101
+ - **Single folder:** `single_folder_output` (bool) — whether to write all artifacts into a single folder. Default: `False`
102
+
103
+ ### Example (JSP)
104
+
105
+ ```python
106
+ from IGJSP.generador import Generator
107
+ generator = Generator(json=True, savepath="output")
108
+ generator.generate_new_instance(
109
+ jobs=4, machines=2,
110
+ ReleaseDateDueDate=2,
111
+ distribution="exponential",
112
+ seed=53
113
+ )
114
+ ```
115
+
116
+ ### Example of JSON generated (JSP)
117
+
118
+ ```json
119
+ {
120
+ "nbJobs": [
121
+ 0,
122
+ 1
123
+ ],
124
+ "nbMchs": [
125
+ 0,
126
+ 1,
127
+ 2,
128
+ 3
129
+ ],
130
+ "speed": 1,
131
+ "timeEnergy": [
132
+ {
133
+ "jobId": 0,
134
+ "operations": {
135
+ "0": {
136
+ "speed-scaling": [
137
+ {
138
+ "procTime": 8,
139
+ "energyCons": 92
140
+ }
141
+ ],
142
+ "release-date": 30,
143
+ "due-date": 41
144
+ },
145
+ "2": {
146
+ "speed-scaling": [
147
+ {
148
+ "procTime": 17,
149
+ "energyCons": 84
150
+ }
151
+ ],
152
+ "release-date": 41,
153
+ "due-date": 77
154
+ },
155
+ "3": {
156
+ "speed-scaling": [
157
+ {
158
+ "procTime": 3,
159
+ "energyCons": 97
160
+ }
161
+ ],
162
+ "release-date": 77,
163
+ "due-date": 80
164
+ },
165
+ "1": {
166
+ "speed-scaling": [
167
+ {
168
+ "procTime": 7,
169
+ "energyCons": 93
170
+ }
171
+ ],
172
+ "release-date": 80,
173
+ "due-date": 88
174
+ }
175
+ },
176
+ "release-date": 30,
177
+ "due-date": 88
178
+ },
179
+ {
180
+ "jobId": 1,
181
+ "operations": {
182
+ "1": {
183
+ "speed-scaling": [
184
+ {
185
+ "procTime": 4,
186
+ "energyCons": 96
187
+ }
188
+ ],
189
+ "release-date": 0,
190
+ "due-date": 5
191
+ },
192
+ "3": {
193
+ "speed-scaling": [
194
+ {
195
+ "procTime": 3,
196
+ "energyCons": 97
197
+ }
198
+ ],
199
+ "release-date": 5,
200
+ "due-date": 9
201
+ },
202
+ "2": {
203
+ "speed-scaling": [
204
+ {
205
+ "procTime": 1,
206
+ "energyCons": 99
207
+ }
208
+ ],
209
+ "release-date": 9,
210
+ "due-date": 10
211
+ },
212
+ "0": {
213
+ "speed-scaling": [
214
+ {
215
+ "procTime": 6,
216
+ "energyCons": 94
217
+ }
218
+ ],
219
+ "release-date": 10,
220
+ "due-date": 17
221
+ }
222
+ },
223
+ "release-date": 0,
224
+ "due-date": 17
225
+ }
226
+ ],
227
+ "minMakespan": 35,
228
+ "minEnergy": 752,
229
+ "maxMinMakespan": 14,
230
+ "maxMinEnergy": 0
231
+ }
232
+ ```
233
+
234
+ ---
235
+
236
+ ## Generating an FJSP problem instance
237
+
238
+ Set `instance_type="FJSP"` to enable flexible routing. In the JSON output, each job includes an `available_machines` binary vector of length `nbMchs`, indicating where the job's operations can be processed (`1` = available, `0` = not available).
239
+
240
+ ### Example (FJSP)
241
+
242
+ ```python
243
+ from IGJSP.generador import Generator
244
+ generator = Generator(json=True, savepath="output")
245
+ generator.generate_new_instance(
246
+ jobs=3, machines=3,
247
+ speed=1,
248
+ ReleaseDateDueDate=0,
249
+ distribution="uniform",
250
+ seed=7,
251
+ instance_type="FJSP"
252
+ )
253
+ ```
254
+
255
+ ### Example of JSON generated (FJSP)
256
+
257
+ Abridged example to illustrate the additional `available_machines` field:
258
+
259
+ ```json
260
+ {
261
+ "nbJobs": [0,1,2],
262
+ "nbMchs": [0,1,2],
263
+ "speed": 1,
264
+ "timeEnergy": [
265
+ {
266
+ "jobId": 0,
267
+ "available_machines": [1,0,1],
268
+ "operations": {
269
+ "0": { "speed-scaling": [ { "procTime": 12, "energyCons": 90 } ] },
270
+ "2": { "speed-scaling": [ { "procTime": 18, "energyCons": 84 } ] },
271
+ "1": { "speed-scaling": [ { "procTime": 11, "energyCons": 89 } ] }
272
+ }
273
+ },
274
+ {
275
+ "jobId": 1,
276
+ "available_machines": [1,1,0],
277
+ "operations": {
278
+ "2": { "speed-scaling": [ { "procTime": 7, "energyCons": 93 } ] },
279
+ "0": { "speed-scaling": [ { "procTime": 5, "energyCons": 95 } ] },
280
+ "1": { "speed-scaling": [ { "procTime": 13, "energyCons": 88 } ] }
281
+ }
282
+ }
283
+ ],
284
+ "minMakespan": 123,
285
+ "minEnergy": 456,
286
+ "maxMinMakespan": 78,
287
+ "maxMinEnergy": 90
288
+ }
289
+ ```
290
+
291
+ ---
292
+
293
+ ## Notes on outputs
294
+
295
+ - **JSON**: Contains `nbJobs`, `nbMchs`, `speed`, and a `timeEnergy` list with per‑job `operations`. For `ReleaseDateDueDate=1` (per job) or `2` (per operation), `release-date`/`due-date` fields are added accordingly.
296
+ - **DZN**: The generator writes `.dzn` files using templates located inside generator packages, parameterized by the selected RD mode and speed levels.
297
+ - **Taillard-like**: Writes textual matrices for processing times, energy consumption, and the job‑specific machine order; the FJSP variant also appends an `Available machines:` section (binary rows per job).
298
+
299
+ ---
300
+
301
+ ## Reproducibility & scaling tips
302
+
303
+ - Use a fixed `seed` to reproduce instances exactly.
304
+ - For tighter value ranges (e.g., unit testing), prefer `distribution="uniform"` and post‑scale arrays if you require strict bounds (e.g., map processing times to `[1,100]` after generation). Energy values are already normalized to `[1,100]` by design.
@@ -0,0 +1,18 @@
1
+ IGJSP/generador.py,sha256=AuiRTZ-tfRVt8U0-EECznq85NtDZsLyZLCAIjM1U8-8,34124
2
+ IGJSP/main.py,sha256=Sia5Ss8O3HWBdshvPLJKUMaZIoQPHy6x8yzvojojPFo,2838
3
+ IGJSP/Minizinc/Models/RD/JSP0.mzn,sha256=cfN_E3RQ6nBulGfaOOYTd-zAgA5SI6E2saDlYtKCflg,2282
4
+ IGJSP/Minizinc/Models/RD/JSP1.mzn,sha256=5B8cyw2WyKR8yEL1fFd0TaCAVhjPoxEJRJDPPEjJGEk,2840
5
+ IGJSP/Minizinc/Models/RD/JSP2.mzn,sha256=ATqpg-Ow_VzjQJ_hZSSXiTXmwmqNqFsq81TZgcnjTks,2941
6
+ IGJSP/Minizinc/Types/RD/JSP0.mzn,sha256=cfN_E3RQ6nBulGfaOOYTd-zAgA5SI6E2saDlYtKCflg,2282
7
+ IGJSP/Minizinc/Types/RD/JSP1.mzn,sha256=5B8cyw2WyKR8yEL1fFd0TaCAVhjPoxEJRJDPPEjJGEk,2840
8
+ IGJSP/Minizinc/Types/RD/JSP2.mzn,sha256=ATqpg-Ow_VzjQJ_hZSSXiTXmwmqNqFsq81TZgcnjTks,2941
9
+ IGJSP/Minizinc/Types/RD/FJSP/type0.dzn,sha256=lhHhITCHzWrChnmOsyVMPoNGdIJ9S09bg9JfmwgY16Q,330
10
+ IGJSP/Minizinc/Types/RD/FJSP/type1.dzn,sha256=bUGYq03ZlQId5xdDwEy5s2Y_iE0LkVZO3camiGKva2A,400
11
+ IGJSP/Minizinc/Types/RD/FJSP/type2.dzn,sha256=Wz1MnkSL5GUPsbh1eq0leoaQRImkNqQqkXh9fWoGhRM,446
12
+ IGJSP/Minizinc/Types/RD/JSP/type0.dzn,sha256=wNuPQkXBXPSpPaPz2WFhp4pGDgfSimtg4I93UfwC01Q,263
13
+ IGJSP/Minizinc/Types/RD/JSP/type1.dzn,sha256=Xbt9StzCgEqqh_HS9tWGrTVtu-OEnf5Yq5Ty91AkzoM,333
14
+ IGJSP/Minizinc/Types/RD/JSP/type2.dzn,sha256=L2nc7bPJEhyuaEwgw0ZCpC52CpVJILQU_WQdKn8GUZs,379
15
+ igjsp-1.0.0.dist-info/METADATA,sha256=zU57DaxL2XUGadX8d5Bxlyfo8uUxgD1X_f43gKDMSns,10609
16
+ igjsp-1.0.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
17
+ igjsp-1.0.0.dist-info/licenses/LICENSE,sha256=f7RDRO-z_nMoooAya7NAb8sXtrHR6WnttYtyUc9fB-c,1116
18
+ igjsp-1.0.0.dist-info/RECORD,,
@@ -1,8 +0,0 @@
1
- %SETS
2
- JOBS = 1..{jobs};
3
- MACHINES = 1..{machines};
4
- SPEED = {Speed};
5
-
6
- time = array3d(JOBS,MACHINES,1..SPEED,{time});
7
- energy = array3d(JOBS,MACHINES,1..SPEED,{energy});
8
- precedence = array2d(JOBS,MACHINES,{precedence});
@@ -1,10 +0,0 @@
1
- %SETS
2
- JOBS = 1..{jobs};
3
- MACHINES = 1..{machines};
4
- SPEED = {Speed};
5
-
6
- time = array3d(JOBS,MACHINES,1..SPEED,{time});
7
- energy = array3d(JOBS,MACHINES,1..SPEED,{energy});
8
- precedence = array2d(JOBS,MACHINES,{precedence});
9
- releaseDate = {releaseDate};
10
- dueDate = {dueDate};
@@ -1,10 +0,0 @@
1
- %SETS
2
- JOBS = 1..{jobs};
3
- MACHINES = 1..{machines};
4
- SPEED = {Speed};
5
-
6
- time = array3d(JOBS,MACHINES,1..SPEED,{time});
7
- energy = array3d(JOBS,MACHINES,1..SPEED,{energy});
8
- precedence = array2d(JOBS,MACHINES,{precedence});
9
- releaseDate = array2d(JOBS,MACHINES,{releaseDate});
10
- dueDate = array2d(JOBS,MACHINES,{dueDate});
@@ -1,216 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: IGJSP
3
- Version: 0.0.9
4
- Summary: Instance generator for JSP
5
- Project-URL: Homepage, https://gps.blogs.upv.es/
6
- Author-email: GPS-UPV <gps@dsic.upv.es>
7
- License-File: LICENSE
8
- Classifier: License :: OSI Approved :: MIT License
9
- Classifier: Operating System :: OS Independent
10
- Classifier: Programming Language :: Python :: 3
11
- Requires-Python: >=3.9
12
- Requires-Dist: numpy
13
- Requires-Dist: scipy
14
- Requires-Dist: tqdm
15
- Description-Content-Type: text/markdown
16
-
17
- # Instance Generator Job Shop Scheduling
18
-
19
- ## Description
20
- Benchmark generator for the Job Shop Problem (BG-JSP)
21
-
22
- ## Generating a JSP problem instance
23
-
24
- To generate an instance of the problem, we will use the Generator class, located in the Generador module.
25
- To do this, we initialize the generator, giving it the following parameters:
26
-
27
- 1. **JSON:`json`**
28
- - **Description**: Parameter that indicates if the generated instance will be stored in JSON format.
29
- - **Possible values**: Boolean value. Only the values True or False can be obtained.
30
- - **Example of possible values**: `True`, `False`
31
- - **Default value**: `False`
32
-
33
- 2. **DZN:`dzn`**
34
- - **Description**: Parameter that indicates if the generated instance will be stored in DZN format.
35
- - **Possible values**: Boolean value. Only the values True or False can be obtained.
36
- - **Example of possible values**: `True`, `False`
37
- - **Default value**: `False`
38
-
39
- 3. **Taillard:`taillard`**
40
- - **Description**: Parameter that indicates if the generated instance will be stored in taillard format.
41
- - **Possible values**: Boolean value. Only the values True or False can be obtained.
42
- - **Example of possible values**: `True`, `False`
43
- - **Default value**: `False`
44
-
45
- 4. **Save Path:`savepath`**
46
- - **Description**: Path where the problem instance file will be generated.
47
- - **Possible values**: String.
48
- - **Example of possible values**: `./problems`, `./instances`
49
- - **Default value**: `./output`
50
-
51
-
52
- Once the generator has been initialized, we proceed to generate different instances of the JSP problem with different values for this initialization; for that we use the following function using the following parameters to customize the generated instances:
53
-
54
- 1. **Jobs:`jobs`**
55
- - **Description**: Number of jobs that will have the problem generated.
56
- - **Possible values**: Integer value.
57
- - **Example of possible values**: `3`, `4`.
58
- - **Default value**: `10`
59
-
60
- 2. **Machines:`machines`**
61
- - **Description**: Number of machines that will have the problem generated.
62
- - **Possible values**: Integer value.
63
- - **Example of possible values**: `6`, `2`.
64
- - **Default value**: `4`
65
-
66
- 3. **Release and Due Date:`ReleaseDateDueDate`**
67
- - **Descripción**: Establish that each task has an instant of release and completion limit.
68
- - **Possible values**:
69
- - `0`: Neither the works nor the operations of each of them will have an instant release or time limit for completion.
70
- - `1`: The work will have an instant of release and instant of completion limit.
71
- - `2`: The operations of each job will have a release time and a limiting end time.
72
- - **Example of possible values**: `1`, `2`
73
- - **Default value**: `0`
74
-
75
- 4. **Speeds:`speed`**
76
- - **Description**: Number of speeds that will be available to perform each task.
77
- - **Possible values**: Integer value.
78
- - **Example of possible values**: `3`, `5`
79
- - **Default value**: `1`
80
-
81
- 5. **Distribution:`distribution`**
82
- - **Description**: Type of distribution to be followed for data generation.
83
- - **Possible values**: You can only set one of the following values: `uniform` `normal` `exponential.`
84
- - **Example of possible values**: `uniform`, `exponential`
85
- - **Default value**: `normal`
86
-
87
- 6. **Seed:`seed`**
88
- - **Description**: Base number for data generation.
89
- - **Possible values**: Integer value.
90
- - **Example of possible values**: `84`, `32`
91
- - **Default value**: `1`
92
-
93
- ## Example of JSON generated
94
-
95
- This JSON shows how the data generated from a JSP problem with 2 machines and 4 jobs will look. For this generation, we have the following code:
96
- ``` python
97
- from IGJSP.generador import Generator
98
- generator = Generator(json=True,savepath="output")
99
- generator.generate_new_instance(jobs=4,machines=2,ReleaseDateDueDate=2,distribution="exponential",seed=53)
100
- ```
101
-
102
- ```json
103
- {
104
- "nbJobs": [
105
- 0,
106
- 1
107
- ],
108
- "nbMchs": [
109
- 0,
110
- 1,
111
- 2,
112
- 3
113
- ],
114
- "speed": 1,
115
- "timeEnergy": [
116
- {
117
- "jobId": 0,
118
- "operations": {
119
- "0": {
120
- "speed-scaling": [
121
- {
122
- "procTime": 8,
123
- "energyCons": 92
124
- }
125
- ],
126
- "release-date": 30,
127
- "due-date": 41
128
- },
129
- "2": {
130
- "speed-scaling": [
131
- {
132
- "procTime": 17,
133
- "energyCons": 84
134
- }
135
- ],
136
- "release-date": 41,
137
- "due-date": 77
138
- },
139
- "3": {
140
- "speed-scaling": [
141
- {
142
- "procTime": 3,
143
- "energyCons": 97
144
- }
145
- ],
146
- "release-date": 77,
147
- "due-date": 80
148
- },
149
- "1": {
150
- "speed-scaling": [
151
- {
152
- "procTime": 7,
153
- "energyCons": 93
154
- }
155
- ],
156
- "release-date": 80,
157
- "due-date": 88
158
- }
159
- },
160
- "release-date": 30,
161
- "due-date": 88
162
- },
163
- {
164
- "jobId": 1,
165
- "operations": {
166
- "1": {
167
- "speed-scaling": [
168
- {
169
- "procTime": 4,
170
- "energyCons": 96
171
- }
172
- ],
173
- "release-date": 0,
174
- "due-date": 5
175
- },
176
- "3": {
177
- "speed-scaling": [
178
- {
179
- "procTime": 3,
180
- "energyCons": 97
181
- }
182
- ],
183
- "release-date": 5,
184
- "due-date": 9
185
- },
186
- "2": {
187
- "speed-scaling": [
188
- {
189
- "procTime": 1,
190
- "energyCons": 99
191
- }
192
- ],
193
- "release-date": 9,
194
- "due-date": 10
195
- },
196
- "0": {
197
- "speed-scaling": [
198
- {
199
- "procTime": 6,
200
- "energyCons": 94
201
- }
202
- ],
203
- "release-date": 10,
204
- "due-date": 17
205
- }
206
- },
207
- "release-date": 0,
208
- "due-date": 17
209
- }
210
- ],
211
- "minMakespan": 35,
212
- "minEnergy": 752,
213
- "maxMinMakespan": 14,
214
- "maxMinEnergy": 0
215
- }
216
- ```
@@ -1,15 +0,0 @@
1
- IGJSP/generador.py,sha256=Cwuhy7U15OOzYJ-hpFvGutaiB1Q2PMtW6KaUt4GmoOo,27517
2
- IGJSP/main.py,sha256=qnpAdI9Nymfbb2t674EHKCZr1vG80tGyVT5RKMiGvZM,2727
3
- IGJSP/Minizinc/Models/RD/JSP0.mzn,sha256=cfN_E3RQ6nBulGfaOOYTd-zAgA5SI6E2saDlYtKCflg,2282
4
- IGJSP/Minizinc/Models/RD/JSP1.mzn,sha256=5B8cyw2WyKR8yEL1fFd0TaCAVhjPoxEJRJDPPEjJGEk,2840
5
- IGJSP/Minizinc/Models/RD/JSP2.mzn,sha256=ATqpg-Ow_VzjQJ_hZSSXiTXmwmqNqFsq81TZgcnjTks,2941
6
- IGJSP/Minizinc/Types/RD/JSP0.mzn,sha256=cfN_E3RQ6nBulGfaOOYTd-zAgA5SI6E2saDlYtKCflg,2282
7
- IGJSP/Minizinc/Types/RD/JSP1.mzn,sha256=5B8cyw2WyKR8yEL1fFd0TaCAVhjPoxEJRJDPPEjJGEk,2840
8
- IGJSP/Minizinc/Types/RD/JSP2.mzn,sha256=ATqpg-Ow_VzjQJ_hZSSXiTXmwmqNqFsq81TZgcnjTks,2941
9
- IGJSP/Minizinc/Types/RD/type0.dzn,sha256=alo54TrPd2svp-YeOq8rNhb42Aj5w4N1y1WYRxZ-aWM,225
10
- IGJSP/Minizinc/Types/RD/type1.dzn,sha256=r3f1uOoDnscjpXHbpvOWPGhGN0tmwkJ3NYIRP-NGk5M,298
11
- IGJSP/Minizinc/Types/RD/type2.dzn,sha256=ddJxLqR_WlUpycHhA_QulxiTx3o9neIzNESEt59VUx0,344
12
- igjsp-0.0.9.dist-info/METADATA,sha256=astGQyCgjsoLf8uA-25Ezh1M0qTidVN9LHZenflRWJU,7406
13
- igjsp-0.0.9.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
14
- igjsp-0.0.9.dist-info/licenses/LICENSE,sha256=f7RDRO-z_nMoooAya7NAb8sXtrHR6WnttYtyUc9fB-c,1116
15
- igjsp-0.0.9.dist-info/RECORD,,
File without changes