drap 0.0.4.post7__py3-none-any.whl → 0.0.4.post9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
drap/utils.py CHANGED
@@ -1,6 +1,8 @@
1
- from PyQt5.QtWidgets import QApplication, QMainWindow, QLabel, QVBoxLayout, QWidget, QPushButton, QFileDialog, QMessageBox, QLineEdit, QHBoxLayout, QGroupBox, QCheckBox
1
+ from PyQt5.QtWidgets import (QApplication, QProgressDialog, QMainWindow, QLabel, QVBoxLayout, QWidget, QPushButton, QFileDialog,
2
+ QMessageBox, QLineEdit, QHBoxLayout, QGroupBox, QCheckBox, QSlider, QDialog, QDialogButtonBox,
3
+ QComboBox)
2
4
  from PyQt5.QtGui import QPixmap, QPainter, QPen, QImage, QMouseEvent, QColor
3
- from PyQt5.QtCore import Qt, QPoint, QRect, QFileInfo
5
+ from PyQt5.QtCore import Qt, QPoint, QRect, QFileInfo, QTimer, QEvent
4
6
  from fabio.edfimage import EdfImage
5
7
  import tkinter as tk
6
8
  from tkinter import filedialog
@@ -24,38 +26,87 @@ import re
24
26
  import argparse
25
27
  import cv2
26
28
  import matplotlib
29
+ import subprocess
30
+ import shutil
31
+ import tempfile
32
+ from typing import Iterable, Set, Tuple, Optional, Callable, List
33
+ from textwrap import dedent
27
34
 
28
35
  import matplotlib.pyplot as plt
29
36
 
30
37
  matplotlib.use('Agg')
31
38
 
32
39
 
33
- from PyQt5.QtWidgets import QApplication
34
- import sys
35
40
 
36
41
 
42
+ # Mapeamento codec → extensão recomendada
43
+ CODEC_EXTENSIONS = {
44
+ "mp4v": ".mp4",
45
+ "avc1": ".mp4",
46
+ "H264": ".mp4",
47
+ "XVID": ".avi",
48
+ "MJPG": ".avi",
49
+ "DIVX": ".avi",
50
+ "WMV1": ".avi",
51
+ "WMV2": ".avi",
52
+ }
53
+
54
+ class CodecDialog(QDialog):
55
+ def __init__(self, available_codecs, parent=None):
56
+ super().__init__(parent)
57
+ self.setWindowTitle("Escolher Codec de Vídeo")
58
+ self.setMinimumWidth(300)
59
+
60
+ layout = QVBoxLayout()
61
+
62
+ # Texto de instrução
63
+ layout.addWidget(QLabel("Selecione o codec e a extensão para salvar o vídeo:"))
64
+
65
+ # Combobox para codecs
66
+ self.codec_combo = QComboBox()
67
+ for codec in available_codecs:
68
+ ext = CODEC_EXTENSIONS.get(codec, ".avi")
69
+ self.codec_combo.addItem(f"{codec} → {ext}", (codec, ext))
70
+ layout.addWidget(self.codec_combo)
71
+
72
+ # Botões OK/Cancel
73
+ buttons = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)
74
+ buttons.accepted.connect(self.accept)
75
+ buttons.rejected.connect(self.reject)
76
+ layout.addWidget(buttons)
37
77
 
78
+ self.setLayout(layout)
79
+
80
+ def get_selection(self):
81
+ """Retorna (codec, extensão) escolhido pelo usuário"""
82
+ return self.codec_combo.currentData()
38
83
 
39
84
 
40
85
  class ImageCropper(QMainWindow):
86
+
87
+
41
88
  def __init__(self):
89
+
42
90
  super().__init__()
43
- self.initUI()
44
-
45
- # Atributos para desenho
46
91
  self.drawing = False
47
92
  self.rect_start = QPoint()
48
93
  self.current_rect = QRect()
49
- self.image = None
50
- self.pixmap = None
51
94
  self.original_image = None
95
+ self.image = None
96
+ self.pixmap = None
52
97
  self.result_image = None
98
+ self.ret = None
99
+
100
+
101
+ self.initUI()
102
+
103
+
53
104
 
54
105
 
55
106
  def initUI(self):
56
107
 
57
- # self.test = True
58
- self.test = False
108
+ self.test = True
109
+ # self.test = False
59
110
  # print("teste")
60
111
 
61
112
 
@@ -72,22 +123,99 @@ class ImageCropper(QMainWindow):
72
123
  self.image_layout = QVBoxLayout()
73
124
  self.main_layout.addLayout(self.image_layout)
74
125
 
75
- # Layout to controls
76
- self.controls_layout = QVBoxLayout()
77
- self.main_layout.addLayout(self.controls_layout)
78
126
 
79
127
  # Create a QLabel to display the image
80
128
  self.image_label = QLabel(self)
129
+ self.image_label.setAlignment(Qt.AlignCenter) # centraliza o pixmap
130
+ self.image_label.setMinimumSize(320, 240)
81
131
  self.image_layout.addWidget(self.image_label)
132
+ self.image_label.setMouseTracking(True)
133
+
134
+
135
+ # --- Video Player Controls ---
136
+ # Layout de controles do player de vídeo (abaixo do vídeo)
137
+ self.video_bar_time_layout = QVBoxLayout()
138
+ self.image_layout.addLayout(self.video_bar_time_layout)
139
+
140
+ # Layout de controles do player de vídeo (abaixo do vídeo)
141
+ self.video_controls_layout = QVBoxLayout()
142
+ self.image_layout.addLayout(self.video_controls_layout)
143
+
144
+ # progress bar
145
+ self.video_slider = QSlider(Qt.Horizontal)
146
+ self.video_bar_time_layout.addWidget(self.video_slider)
147
+ self.video_slider.sliderMoved.connect(self.seek_video)
148
+
149
+ # show time
150
+ self.time_label = QLabel('00:00 / 00:00')
151
+ self.video_bar_time_layout.addWidget(self.time_label)
152
+
153
+ self.mouse_label = QLabel('Mouse: (0,0)')
154
+ self.video_bar_time_layout.addWidget(self.mouse_label)
155
+
156
+ # Timer para reprodução de vídeo
157
+ self.video_timer = QTimer()
158
+ self.video_timer.timeout.connect(self.next_frame)
159
+
160
+ # Variáveis de controle de vídeo
161
+ self.video = None # cv2.VideoCapture
162
+ self.fps = 0
163
+ self.total_frames = 0
164
+ self.current_frame = 0
165
+ self.playing = False
166
+
167
+ # botons line: Play, Pause, Stop
168
+ self.video_buttons_layout = QHBoxLayout()
169
+ self.video_controls_layout.addLayout(self.video_buttons_layout)
170
+
171
+ # Create a button to load the image
172
+ self.load_button = QPushButton('Load Video', self)
173
+ self.video_buttons_layout.addWidget(self.load_button)
174
+ self.load_button.clicked.connect(self.load_image)
175
+
176
+ # Botões de controle
177
+ self.play_button = QPushButton('Play')
178
+ self.pause_button = QPushButton('Pause')
179
+ self.stop_button = QPushButton('Stop')
180
+ self.export_button = QPushButton('Export Video', self)
181
+
182
+
183
+ self.video_buttons_layout.addWidget(self.play_button)
184
+ self.video_buttons_layout.addWidget(self.pause_button)
185
+ self.video_buttons_layout.addWidget(self.stop_button)
186
+ self.video_buttons_layout.addWidget(self.export_button)
187
+
188
+ self.play_button.clicked.connect(self.play_video)
189
+ self.pause_button.clicked.connect(self.pause_video)
190
+ self.stop_button.clicked.connect(self.stop_video)
191
+ self.export_button.clicked.connect(self.export_video_dialog)
192
+
193
+
194
+ # Slider de velocidade (0.25x a 2.0x)
195
+ self.speed_label = QLabel("Speed: 1.0x")
196
+ self.video_controls_layout.addWidget(self.speed_label)
197
+ self.speed_slider = QSlider(Qt.Horizontal)
198
+ self.speed_slider.setMinimum(25) # 0.25x
199
+ self.speed_slider.setMaximum(200) # 2.0x
200
+ self.speed_slider.setValue(100) # 1.0x
201
+ self.speed_slider.setTickInterval(25)
202
+ self.speed_slider.setTickPosition(QSlider.TicksBelow)
203
+ self.video_controls_layout.addWidget(self.speed_slider)
204
+
205
+ self.speed_slider.valueChanged.connect(self.update_speed)
206
+
207
+
208
+
209
+
210
+ # Layout to controls
211
+ self.controls_layout = QVBoxLayout()
212
+ self.main_layout.addLayout(self.controls_layout)
82
213
 
83
214
  # Create a QLabel to display the resulting image
84
215
  self.result_label = QLabel(self)
85
216
  self.image_layout.addWidget(self.result_label)
86
217
 
87
- # Create a button to load the image
88
- self.load_button = QPushButton('Load Video', self)
89
- self.controls_layout.addWidget(self.load_button)
90
- self.load_button.clicked.connect(self.load_image)
218
+
91
219
 
92
220
 
93
221
  # Create a button to crop the image
@@ -187,9 +315,7 @@ class ImageCropper(QMainWindow):
187
315
  self.check_option = QCheckBox("Print a PDF with images", self)
188
316
  self.check_option.setChecked(False) # desmarcado por padrão
189
317
  self.controls_layout.addWidget(self.check_option)
190
-
191
-
192
-
318
+
193
319
  # Hook mouse events
194
320
  self.image_label.installEventFilter(self)
195
321
 
@@ -198,7 +324,7 @@ class ImageCropper(QMainWindow):
198
324
  self.load_image()
199
325
  self.int_input1.setText("45.")
200
326
  self.int_input2.setText("10")
201
- self.int_input3.setText("5000")
327
+ self.int_input3.setText("1000")
202
328
  self.int_input4.setText("1.0")
203
329
 
204
330
  self.show()
@@ -206,8 +332,12 @@ class ImageCropper(QMainWindow):
206
332
  def load_image(self):
207
333
 
208
334
 
335
+ if hasattr(self, "video") and self.video and self.video.isOpened():
336
+ self.video.release()
337
+
338
+
209
339
  if self.test:
210
- self.file_path = "/media/standard02/Linux_sync_2020_02/standart/Documents/programming/python/files/15-SY-50cm/water-without-absolute-intensity2.flv"
340
+ self.file_path = "/home/standard02/Documents/programming/python/bolhas/test/2024-07-10-water-without-absolute-intensity.flv"
211
341
  # self.file_path, _ = QFileDialog.getOpenFileName(self, 'Open Video', '', 'Videos (*.avi *.mp4 *.mov *.mkv *.wmv *.flv *.mpg *.mpeg *.3gp *.ogv .webm)')
212
342
  else:
213
343
  self.file_path, _ = QFileDialog.getOpenFileName(self, 'Open Video', '', 'Videos (*.avi *.mp4 *.mov *.mkv *.wmv *.flv *.mpg *.mpeg *.3gp *.ogv .webm)')
@@ -217,54 +347,553 @@ class ImageCropper(QMainWindow):
217
347
  return
218
348
 
219
349
  self.file_path = os.path.normpath(str(Path(self.file_path).expanduser().resolve()))
350
+ self.video = cv2.VideoCapture(self.file_path)
351
+ self.ret = None
352
+
220
353
 
354
+ if not self.video.isOpened():
355
+ QMessageBox.critical(self, 'Error', f'Could not open video:\n{self.file_path}')
356
+ return
357
+
221
358
  # self.file_path = os.path.normpath(self.file_path)
222
359
  # self.file_path = QFileInfo(self.file_path).fileName();
223
360
  # self.file_path = Path(self.file_path);
224
361
  # self.file_path = self.file_path.resolve();
225
362
  # self.file_path = os.path.normpath(self.file_path);
226
-
227
- video = cv2.VideoCapture(self.file_path);
363
+
364
+ rval, frame = self.video.read();
228
365
 
229
- if not video.isOpened():
230
- QMessageBox.critical(self, 'Error', f'Could not open video:\n{self.file_path}')
366
+ if not rval or frame is None:
367
+ QMessageBox.critical(self, 'Error', 'Could not read first frame from the video.')
368
+ self.video.release()
231
369
  return
370
+
371
+ # Converte o frame OpenCV (BGR → RGB) para QImage/QPixmap
372
+ rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
373
+ h, w, ch = rgb.shape
374
+ bytes_per_line = ch * w
375
+ qimg = QImage(rgb.data, w, h, bytes_per_line, QImage.Format_RGB888).copy()
376
+
377
+ # Atualiza atributos de imagem
378
+ self.original_image = qimg
379
+ self.image = qimg.copy()
380
+ self.pixmap = QPixmap.fromImage(self.image)
381
+ self.image_label.setPixmap(self.pixmap) #show image
382
+ # self.image_label.setScaledContents(True)
383
+ self.current_rect = QRect()
384
+ self.update_image()
385
+
386
+ # Reseta variáveis de estado
387
+ self.fps = int(self.video.get(cv2.CAP_PROP_FPS))
388
+ self.total_frames = int(self.video.get(cv2.CAP_PROP_FRAME_COUNT))
389
+ self.duration_sec = self.total_frames / self.fps if self.fps else 0
390
+ self.current_frame = 0
391
+ self.video.set(cv2.CAP_PROP_POS_FRAMES, 0)
392
+ self.video_slider.setMaximum(max(0, self.total_frames - 1))
393
+ self.playing = False
232
394
 
233
- rval, frame = video.read();
395
+ # Atualiza UI
396
+ # self.update_time_label()
397
+ # self.display_frame()
398
+ # self.image = self.original_image.copy()
399
+ # self.display_cv2_frame(frame)
400
+
401
+
402
+
403
+ def play_video(self):
234
404
 
235
- if not rval or frame is None:
236
- QMessageBox.critical(self, 'Error', 'Could not read first frame from the video.')
237
- video.release()
405
+ if self.video is None:
238
406
  return
407
+ self.playing = True
408
+ # self.video_timer.start(int(1000 / self.fps)) # chama a cada frame
409
+ speed_factor = self.speed_slider.value() / 100.0
410
+ interval = max(1, int(1000 / self.fps / speed_factor))
411
+ self.video_timer.start(interval)
412
+
413
+
414
+ def pause_video(self):
239
415
 
240
- file_path = 'data/';
241
- file_path = Path(file_path);
242
- file_path = file_path.resolve();
243
- os.makedirs(file_path, exist_ok=True)
244
- file_path = os.path.normpath(file_path);
245
- file_path = os.path.join(file_path, 'sample_frame.jpg');
246
- file_path = os.path.normpath(file_path);
416
+ self.playing = False
417
+ self.video_timer.stop()
418
+
419
+
420
+ def stop_video(self):
421
+
422
+ if self.video is None:
423
+ return
424
+ self.pause_video()
425
+ self.current_frame = 0
426
+ self.video.set(cv2.CAP_PROP_POS_FRAMES, 0)
427
+ self.display_frame()
428
+ self.video_slider.setValue(0)
429
+ self.update_time_label()
430
+
431
+
432
+ def keyPressEvent(self, event):
433
+
434
+ if event.key() == Qt.Key_Space:
435
+ if self.playing:
436
+ self.pause_video()
437
+ else:
438
+ self.play_video()
439
+
440
+
441
+ def next_frame(self):
442
+
443
+ if self.video is None or not self.playing:
444
+ return
445
+
446
+ self.video.set(cv2.CAP_PROP_POS_FRAMES, self.current_frame)
447
+ ret, frame = self.video.read()
448
+
449
+ if not ret:
450
+ self.pause_video()
451
+ return
452
+
453
+ self.display_cv2_frame(frame)
454
+ self.video_slider.setValue(self.current_frame)
455
+ self.update_time_label()
456
+
457
+ self.current_frame += 1
458
+
459
+ if self.current_frame >= self.total_frames:
460
+ self.stop_video()
461
+
462
+
463
+
464
+
465
+ def display_frame(self):
466
+
467
+
468
+ if self.video is None:
469
+ return
470
+ self.video.set(cv2.CAP_PROP_POS_FRAMES, self.current_frame)
471
+ ret, frame = self.video.read()
472
+ if ret:
473
+ self.display_cv2_frame(frame)
474
+
475
+ def display_cv2_frame(self, frame):
476
+
477
+ rgb_image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
478
+ h, w, ch = rgb_image.shape
479
+ bytes_per_line = ch * w
480
+ q_image = QImage(rgb_image.data, w, h, bytes_per_line, QImage.Format_RGB888).copy()
481
+
482
+
483
+ # GUARDA a imagem atual
484
+ self.original_image = q_image
485
+ self.image = q_image.copy()
486
+ self.pixmap = QPixmap.fromImage(self.image)
487
+ self.image_label.setPixmap(self.pixmap)
488
+
489
+ self.update_image()
490
+
491
+
492
+
493
+
494
+ def seek_video(self, frame_number):
495
+
496
+ if self.video is None:
497
+ return
498
+ self.pause_video()
499
+ self.current_frame = frame_number
500
+ self.video.set(cv2.CAP_PROP_POS_FRAMES, frame_number)
501
+ self.display_frame()
502
+ self.update_time_label()
503
+
504
+ def update_time_label(self):
505
+
506
+ current_time = self.current_frame / self.fps if self.fps else 0
507
+ total_time = self.total_frames / self.fps if self.fps else 0
508
+ time_str = f"{self.format_time(current_time)} ({self.current_frame} frame) / {self.format_time(total_time)} min"
509
+ self.time_label.setText(time_str)
510
+
511
+ def format_time(self, seconds):
512
+
513
+ m, s = divmod(int(seconds), 60)
514
+ return f"{m:02d}:{s:02d}"
515
+
516
+ def update_speed(self):
517
+
518
+ speed_factor = self.speed_slider.value() / 100.0
519
+ self.speed_label.setText(f"Speed: {speed_factor:.2f}x")
520
+ if self.playing:
521
+ interval = max(1, int(1000 / self.fps / speed_factor))
522
+ self.video_timer.setInterval(interval)
523
+
524
+
525
+ def export_video_dialog(self):
526
+
527
+ if self.video is None:
528
+ print(f"Error: input file not found: {self.video}", file=sys.stderr)
529
+ return
530
+
531
+ available_codecs = self.detect_codecs()
532
+ dlg = CodecDialog(available_codecs)
533
+ if dlg.exec_() == QDialog.Accepted:
534
+ codec, ext = dlg.get_selection()
535
+ # print(f"✅ Codec escolhido: {codec}, Extensão: {ext}")
536
+
537
+
538
+ # fourcc = cv2.VideoWriter_fourcc(*codec)
539
+ # out = cv2.VideoWriter("saida" + ext, fourcc, fps, (width, height))
540
+
541
+ # Diálogo para escolher onde salvar o vídeo
542
+ save_path, _ = QFileDialog.getSaveFileName(self, "Save Video As", "", f"Video Files {codec}")
543
+ base, ext = os.path.splitext(self.file_path)
544
+ out_ext = out_ext = os.path.splitext(save_path)[1].lower()
545
+
546
+ if not save_path:
547
+ return
548
+
549
+ if not out_ext:
550
+ out_ext = '.mp4'
551
+ save_path = (save_path + out_ext)
552
+
553
+
554
+ # Widgets para escolher parâmetros
555
+ dialog = QDialog(self)
556
+ dialog.setWindowTitle("Export Video Settings")
557
+ layout = QVBoxLayout(dialog)
558
+
559
+ # Frame inicial
560
+ start_label = QLabel("Start Frame:")
561
+ start_input = QLineEdit(str(self.current_frame))
562
+ layout.addWidget(start_label)
563
+ layout.addWidget(start_input)
564
+
565
+ # Frame final
566
+ end_label = QLabel("End Frame:")
567
+ end_input = QLineEdit(str(self.total_frames - 1))
568
+ layout.addWidget(end_label)
569
+ layout.addWidget(end_input)
570
+
571
+ # keep decider when the frame will save
572
+ keep_decider_label = QLabel("Output FPS:")
573
+ keep_decider_input = QLineEdit(str(self.fps))
574
+ layout.addWidget(keep_decider_label)
575
+ layout.addWidget(keep_decider_input)
576
+
577
+
578
+ # Botões
579
+ button_box = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)
580
+ layout.addWidget(button_box)
581
+
582
+
583
+ button_box.accepted.connect(lambda: self.export_video(
584
+ self.file_path,
585
+ save_path,
586
+ codec,
587
+ int(start_input.text()),
588
+ int(end_input.text()),
589
+ int(keep_decider_input.text()),
590
+ self.ret,
591
+ None
592
+ ))
593
+ # crop_rect=(100, 50, 400, 300)
594
+ button_box.accepted.connect(dialog.accept)
595
+ button_box.rejected.connect(dialog.reject)
596
+
597
+ dialog.exec_()
598
+
599
+
600
+
601
+ def export_video (self, in_path, out_path, codec, cut_first=0, cut_last=0, keep_decider=None, crop_rect=None, to_drop=None,):
602
+
603
+ video_in = cv2.VideoCapture(in_path)
604
+
605
+
606
+ if not video_in.isOpened():
607
+ raise RuntimeError(f"Not was possible open {in_path}")
608
+
609
+ total_frames = int(video_in.get(cv2.CAP_PROP_FRAME_COUNT))
610
+ fps_in = int(video_in.get(cv2.CAP_PROP_FPS))
611
+ if fps_in <= 0:
612
+ raise ValueError(f"Invalid FPS: {fps_in}")
613
+
614
+
615
+ # crop limits
616
+ start = max(0, cut_first)
617
+ end = cut_last if cut_last > 0 else total_frames
618
+ end = max(start, end) - 1
619
+ # print(start, end, total_frames)
620
+
621
+
622
+ w_in = int(video_in.get(cv2.CAP_PROP_FRAME_WIDTH))
623
+ h_in = int(video_in.get(cv2.CAP_PROP_FRAME_HEIGHT))
624
+ display_w = self.image_label.width()
625
+ display_h = self.image_label.height()
626
+ scale_x = w_in / display_w
627
+ scale_y = h_in / display_h
628
+
629
+
630
+ if crop_rect is not None:
631
+ x, y, w_out, h_out = crop_rect
632
+ x = int(x * scale_x)
633
+ y = int(y * scale_y)
634
+ w_out = int(w_out * scale_x)
635
+ h_out = int(h_out * scale_y)
636
+
637
+ # x = max(0, min(x, w_in - 1))
638
+ # y = max(0, min(y, h_in - 1))
639
+ # w_out = min(w_out, w_in - x)
640
+ # h_out = min(h_out, h_in - y)
641
+ else:
642
+ w_out = w_in
643
+ h_out = h_in
644
+
645
+ size = (abs(w_out), abs(h_out))
646
+
647
+
648
+
649
+ kept = 0
650
+ for idx in range(start, end):
651
+ if to_drop and idx in to_drop:
652
+ continue
653
+ if keep_decider and idx % keep_decider != 0:
654
+ continue
655
+ kept += 1
656
+
657
+ if kept == 0:
658
+ raise RuntimeError("No frames selected for export!")
659
+
660
+ total_considered = end - start
661
+ fps_out = fps_in * (kept / total_considered)
662
+ # print(f"FPS adjusted: {fps_in:.2f} → {fps_out:.2f} (keeped {kept}/{total_considered})")
663
+
664
+
665
+ if size[0] <= 0 or size[1] <= 0:
666
+ raise ValueError(f"Invalid size: {size}")
667
+
668
+ # temporary file (backup)
669
+ out_ext = os.path.splitext(out_path)[1] or ".mp4"
670
+ os.makedirs(os.path.dirname(os.path.abspath(out_path)), exist_ok=True)
671
+ with tempfile.NamedTemporaryFile(prefix="cut_", suffix=out_ext, delete=False,
672
+ dir=os.path.dirname(os.path.abspath(out_path))) as tmp:
673
+ tmp_out_path = tmp.name
674
+
675
+ # Inicializar VideoWriter (tentando codecs candidatos, como você já fazia)
676
+ fourcc = cv2.VideoWriter_fourcc(*codec)
677
+ writer = cv2.VideoWriter(tmp_out_path, fourcc, fps_out, size)
678
+
679
+ if not writer.isOpened():
680
+ raise RuntimeError("Failed to open VideoWriter with codec {codec}")
681
+
682
+ progress = QProgressDialog("Exporting video...", "Cancel", 0, kept, self)
683
+ progress.setWindowTitle("Please wait")
684
+ progress.setWindowModality(Qt.WindowModal)
685
+ progress.setMinimumDuration(0)
686
+ progress.setValue(0)
687
+
688
+
689
+
690
+ # Loop in frames
691
+ idx = 0
692
+ written = 0
693
+ current_kept = 0
694
+
695
+ while True:
696
+
697
+ ret, frame = video_in.read()
698
+ if not ret:
699
+ break
700
+
701
+ if idx < start: # crop first N frames
702
+ idx += 1
703
+ continue
704
+ if idx >= end: # crop last N frames
705
+ break
706
+ if to_drop and idx in to_drop: # descartar manualmente
707
+ idx += 1
708
+ continue
709
+ if keep_decider and idx % keep_decider != 0: # functions decide if keep
710
+ idx += 1
711
+ continue
712
+
713
+ # Crop if exist rect
714
+ if crop_rect is not None:
715
+ frame = frame[y:y+h_out, x:x+w_out]
716
+
717
+
718
+
719
+
720
+ # save frame
721
+ writer.write(frame)
722
+ written += 1
723
+ idx += 1
724
+ current_kept += 1
725
+
726
+ progress.setValue(current_kept)
727
+ QApplication.processEvents()
728
+ if progress.wasCanceled():
729
+ print("Export canceled by user.")
730
+ break
731
+
732
+
733
+ video_in.release()
734
+ writer.release()
735
+
736
+ if not progress.wasCanceled():
737
+ # change the last file
738
+ shutil.move(tmp_out_path, out_path)
739
+ # print(f"Video exported in {out_path}, ({written} saves frames)")
740
+ progress.setValue(kept)
741
+ else:
742
+ os.remove(tmp_out_path)
743
+
744
+ def draw_square(self, event, x, y, flags, param, imagem):
745
+
746
+
747
+
748
+ if event == cv2.EVENT_LBUTTONDOWN:
749
+ # Primeiro clique → guarda o ponto inicial
750
+ self.vertices = [(x, y)]
751
+ self.drawing = True
752
+
753
+ elif event == cv2.EVENT_MOUSEMOVE and self.drawing:
754
+ # Se estiver arrastando, mostra o quadrado "dinâmico"
755
+ img_copy = param.copy()
756
+ cv2.rectangle(img_copy, self.vertices[0], (x, y), (255, 0, 0), 2)
757
+ cv2.imshow("Video", img_copy)
758
+
759
+ elif event == cv2.EVENT_LBUTTONUP:
760
+ # Segundo clique → fecha o quadrado
761
+ self.vertices.append((x, y))
762
+ self.drawing = False
763
+ cv2.rectangle(param, vertices[0], self.vertices[1], (255, 0, 0), 2)
764
+ cv_imshow_safe("Video", param)
765
+
766
+ # print(f"Rectangle of {self.vertices[0]} up to {self.vertices[1]}")
767
+
768
+
769
+
770
+ def label_pos_to_image_pos(self, pos: QPoint):
771
+
772
+
773
+ if self.pixmap is None or self.image is None:
774
+ return None
775
+
776
+ label_size = self.image_label.size()
777
+ pm = self.pixmap
778
+ pm_size = pm.size()
779
+
780
+
781
+ scaled_pm = pm.scaled(label_size, Qt.KeepAspectRatio, Qt.SmoothTransformation)
782
+
783
+ x_off = (label_size.width() - scaled_pm.width()) // 2
784
+ y_off = (label_size.height() - scaled_pm.height()) // 2
785
+
786
+
787
+ sx = pos.x() - x_off
788
+ sy = pos.y() - y_off
789
+ if sx < 0 or sy < 0 or sx >= scaled_pm.width() or sy >= scaled_pm.height():
790
+ return None # fora da imagem
791
+
792
+
793
+ img_w = pm.width()
794
+ img_h = pm.height()
795
+ ix = int(sx * img_w / scaled_pm.width())
796
+ iy = int(sy * img_h / scaled_pm.height())
797
+
798
+ return QPoint(ix, iy)
799
+
800
+
801
+
802
+ def detect_codecs(self):
803
+
804
+ codecs = self.list_ffmpeg_codecs()
805
+ if codecs:
806
+ return codecs
807
+ else:
808
+ return self.test_opencv_codecs(["mp4v", "XVID", "MJPG", "H264", "avc1", "DIVX"])
809
+
810
+
811
+ def list_ffmpeg_codecs(self):
812
+ try:
813
+ result = subprocess.run(["ffmpeg", "-codecs"],
814
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE,
815
+ text=True)
816
+ codecs = []
817
+ for line in result.stdout.splitlines():
818
+ if line.startswith(" "): # linhas úteis
819
+ parts = line.split()
820
+ if len(parts) >= 2:
821
+ codecs.append(parts[1])
822
+ return codecs
823
+ except FileNotFoundError:
824
+ # print("⚠️ FFmpeg não encontrado no sistema.")
825
+ return []
826
+
827
+ def test_opencv_codecs(self,codecs, output_dir="test_codecs"):
828
+
829
+ os.makedirs(output_dir, exist_ok=True)
830
+ fps = 10
831
+ frame_size = (320, 240)
832
+ frame = np.zeros((frame_size[1], frame_size[0], 3), dtype=np.uint8)
833
+
834
+ available = []
835
+ for codec in codecs:
836
+ filename = os.path.join(output_dir, f"test_{codec}.avi")
837
+ fourcc = cv2.VideoWriter_fourcc(*codec)
838
+ writer = cv2.VideoWriter(filename, fourcc, fps, frame_size)
839
+ if writer.isOpened():
840
+ writer.write(frame)
841
+ writer.release()
842
+ if os.path.exists(filename) and os.path.getsize(filename) > 0:
843
+ available.append(codec)
844
+ return available
845
+
846
+
847
+
848
+
849
+
850
+
851
+
852
+
853
+
854
+
855
+
856
+
857
+
858
+
859
+
860
+
861
+
862
+
863
+
864
+
865
+
866
+
867
+
868
+
869
+
870
+
871
+
872
+
873
+
874
+
875
+
876
+
877
+
878
+
879
+
880
+
881
+
882
+
883
+
884
+
885
+
886
+
887
+
888
+
889
+
890
+
891
+
892
+
893
+
894
+
895
+
247
896
 
248
- ok = cv2.imwrite(file_path,frame);
249
- if not ok:
250
- QMessageBox.critical(self, 'Error', f'Failed to save first frame to:\n{file_path}')
251
- video.release()
252
- return
253
-
254
- if file_path:
255
- self.pixmap = QPixmap(file_path)
256
- if self.pixmap.isNull():
257
- QMessageBox.critical(self, 'Error', f'Failed to load image into QPixmap:\n{file_path}')
258
- video.release()
259
- return
260
- self.original_image = self.pixmap.toImage()
261
- self.image = self.pixmap.toImage()
262
- self.image_label.setPixmap(self.pixmap)
263
- self.image_label.setScaledContents(True)
264
- self.fps = video.get(cv2.CAP_PROP_FPS);
265
- self.total_frames = int(video.get(cv2.CAP_PROP_FRAME_COUNT))
266
- self.max_frames = self.total_frames;
267
- video.release();
268
897
 
269
898
  def sort_images_edf_time(self):
270
899
 
@@ -308,14 +937,22 @@ class ImageCropper(QMainWindow):
308
937
  set_file_1.read_setfiles_edf(option = 0);
309
938
 
310
939
  def contate_sort_images_edf_size_drop(self):
311
-
312
- directory = QFileDialog.getExistingDirectory(self, 'Choose (Samples) Directory with EDF Images');
940
+
941
+
942
+ if self.test:
943
+ directory = "/home/standard02/Documents/programming/python/bolhas/data/15-SY-30cm/edf-15-SY"
944
+ else:
945
+ directory = QFileDialog.getExistingDirectory(self, 'Choose (Samples) Directory with EDF Images');
946
+
313
947
  directory = directory + "/";
314
948
  directory = Path(directory);
315
949
  directory = directory.resolve();
316
950
  directory = os.path.normpath(directory);
317
951
 
318
- file_path, _ = QFileDialog.getOpenFileName(self, 'Choose (Samples) file with polynomio data', directory, 'Data files (*.dat *.txt *.csv)');
952
+ if self.test:
953
+ file_path = '/home/standard02/Documents/programming/python/bolhas/PyPI/drap/data/15SY-30cm_Video_time_size.csv'
954
+ else:
955
+ file_path, _ = QFileDialog.getOpenFileName(self, 'Choose (Samples) file with polynomio data', directory, 'Data files (*.dat *.txt *.csv)');
319
956
 
320
957
  file_path = Path(file_path);
321
958
  file_path = file_path.resolve();
@@ -327,15 +964,23 @@ class ImageCropper(QMainWindow):
327
964
  set_file_1 = conc_scat_video(5,path = directory, input_file = file_path);
328
965
  set_file_1.read_setfiles_edf(option = 2);
329
966
 
330
- directory = QFileDialog.getExistingDirectory(self, 'Choose (Background) Directory with EDF Images');
967
+
968
+ if self.test:
969
+ directory = "/home/standard02/Documents/programming/python/bolhas/data/15-SY-30cm/edf-files-buffer"
970
+ else:
971
+ directory = QFileDialog.getExistingDirectory(self, 'Choose (Background) Directory with EDF Images');
331
972
  directory = directory + "/";
332
973
 
333
974
 
334
975
  directory = Path(directory);
335
976
  directory = directory.resolve();
336
977
  directory = os.path.normpath(directory);
337
-
338
- file_path, _ = QFileDialog.getOpenFileName(self, 'Choose (Background) file with polynomio data', directory, 'Data files (*.dat *.txt *.csv)');
978
+
979
+
980
+ if self.test:
981
+ file_path = "/home/standard02/Documents/programming/python/bolhas/PyPI/drap/data/water-without-absolute-intensity-30cm_Video_time_size.csv"
982
+ else :
983
+ file_path, _ = QFileDialog.getOpenFileName(self, 'Choose (Background) file with polynomio data', directory, 'Data files (*.dat *.txt *.csv)');
339
984
 
340
985
  file_path = Path(file_path);
341
986
  file_path = file_path.resolve();
@@ -349,42 +994,51 @@ class ImageCropper(QMainWindow):
349
994
  concatene_files_scat_back(set_file_1, set_file_2);
350
995
 
351
996
  def eventFilter(self, obj, event):
352
- if obj == self.image_label:
353
- if event.type() == QMouseEvent.MouseButtonPress:
354
- if event.button() == Qt.LeftButton:
997
+
998
+
999
+ if obj == self.image_label and (self.original_image is not None):
1000
+
1001
+ if event.type() == QEvent.MouseButtonPress and event.button() == Qt.LeftButton:
1002
+ mapped = self.label_pos_to_image_pos(event.pos())
1003
+ if mapped is not None:
355
1004
  self.drawing = True
356
- self.rect_start = event.pos()
357
- elif event.type() == QMouseEvent.MouseMove:
1005
+ self.rect_start = mapped
1006
+ self.current_rect = QRect(self.rect_start, self.rect_start)
1007
+ self.update_image()
1008
+
1009
+ elif event.type() == QEvent.MouseMove and self.drawing:
1010
+ mapped = self.label_pos_to_image_pos(event.pos())
1011
+ if mapped is not None:
1012
+ self.current_rect = QRect(self.rect_start, mapped).normalized()
1013
+ self.update_image()
1014
+
1015
+ elif event.type() == QEvent.MouseButtonRelease and event.button() == Qt.LeftButton:
358
1016
  if self.drawing:
359
- self.current_rect = QRect(self.rect_start, event.pos()).normalized()
360
- self.update_image()
361
- elif event.type() == QMouseEvent.MouseButtonRelease:
362
- if event.button() == Qt.LeftButton:
363
1017
  self.drawing = False
364
- if not self.current_rect.isNull():
365
- self.update_image()
1018
+ mapped = self.label_pos_to_image_pos(event.pos())
1019
+ if mapped is not None:
1020
+ self.current_rect = QRect(self.rect_start, mapped).normalized()
1021
+ self.update_image()
1022
+ # x, y, w, h = self.current_rect.getRect()
1023
+ # self.crop_rect = (x, y, w, h)
1024
+
366
1025
  return super().eventFilter(obj, event)
367
1026
 
1027
+
368
1028
  def crop_image(self):
369
1029
 
370
- if self.current_rect.isNull() or not self.original_image:
371
- QMessageBox.warning(self, 'Warning', 'No rectangle drawn for cropping. Please draw rectangle first.')
1030
+ if not hasattr(self, "original_image") or self.original_image is None:
1031
+ QMessageBox.warning(self, "Warning", "No image loaded.")
1032
+ return
1033
+ if not hasattr(self, "current_rect") or self.current_rect.isNull():
1034
+ QMessageBox.warning(self, "Warning", "No rectangle drawn.")
372
1035
  return
373
1036
 
374
- # Crop the image
375
1037
  cropped_image = self.original_image.copy(self.current_rect)
376
-
377
- # Saving the cropped image
378
- save_path = 'data/';
379
- save_path = Path(save_path);
380
- save_path = save_path.resolve();
381
- save_path = os.path.normpath(save_path);
382
- save_path = os.path.join(save_path, 'image_croped.png');
383
- save_path = os.path.normpath(save_path);
384
- os.makedirs(os.path.dirname(save_path), exist_ok=True)
385
- if save_path:
386
- cropped_image.save(save_path)
387
- self.save_rectangle_coordinates(self.current_rect)
1038
+ save_path = Path("data/image_cropped.png").resolve()
1039
+ os.makedirs(save_path.parent, exist_ok=True)
1040
+ cropped_image.save(str(save_path))
1041
+ self.save_rectangle_coordinates(self.current_rect)
388
1042
 
389
1043
 
390
1044
  def show_image_info(self):
@@ -403,6 +1057,15 @@ class ImageCropper(QMainWindow):
403
1057
  f"Time Total (s): {round(self.total_frames/self.fps)} ")
404
1058
 
405
1059
  QMessageBox.information(self, 'Information of Image', info)
1060
+
1061
+
1062
+ def show_error_message(self, title, message):
1063
+
1064
+ msg = QMessageBox(self)
1065
+ msg.setIcon(QMessageBox.Critical)
1066
+ msg.setWindowTitle(title)
1067
+ msg.setText(message)
1068
+ msg.exec_()
406
1069
 
407
1070
  def calcule_size_drop(self):
408
1071
 
@@ -424,7 +1087,7 @@ class ImageCropper(QMainWindow):
424
1087
  for output_field, number in zip([self.int_output1, self.int_output2, self.int_output3], numbers):
425
1088
  output_field.setText(f'Number: {number}')
426
1089
 
427
- set_file_1 = conc_scat_video(3, file_video =self.file_path, px_mm = float(numbers[0]) , step = int(numbers[1]), time_limit = int(numbers[2]), Co = float(numbers[3]) , retangulo = self.ret, print_pdf = print_pdf);
1090
+ set_file_1 = conc_scat_video(3, file_video =self.file_path, px_mm = float(numbers[0]) , step = int(numbers[1]), time_limit = int(numbers[2]), Co = float(numbers[3]), retangulo = self.ret, print_pdf = print_pdf);
428
1091
  result_image = set_file_1.read_video();
429
1092
  if result_image:
430
1093
  self.result_image = QPixmap(result_image)
@@ -437,25 +1100,59 @@ class ImageCropper(QMainWindow):
437
1100
  return;
438
1101
 
439
1102
 
440
- def update_image(self):
1103
+ def update_image(self, frame=None):
441
1104
 
442
- if self.original_image and self.pixmap:
443
- self.image = self.original_image.copy() # Restore the original image
444
- painter = QPainter(self.image)
445
- painter.setPen(QPen(Qt.red, 2, Qt.SolidLine))
446
- if not self.current_rect.isNull():
447
- painter.drawRect(self.current_rect)
448
- painter.end()
449
- self.pixmap = QPixmap.fromImage(self.image)
450
- self.image_label.setPixmap(self.pixmap)
1105
+ if frame is not None:
1106
+ self.image = cv2_to_qimage(frame)
1107
+ self.original_image = self.image.copy()
1108
+ elif self.image is None and self.original_image is not None:
1109
+ self.image = self.original_image.copy()
1110
+ elif self.original_image is not None:
1111
+ self.image = self.original_image.copy()
1112
+
1113
+ else:
1114
+ return
1115
+
1116
+ painter = QPainter(self.image)
1117
+ painter.setPen(QPen(Qt.red, 2, Qt.SolidLine))
1118
+
1119
+
1120
+ if hasattr(self, "current_rect") and not self.current_rect.isNull():
1121
+ painter.drawRect(self.current_rect)
1122
+ painter.end()
1123
+
1124
+ self.pixmap = QPixmap.fromImage(self.image)
1125
+ self.image_label.setPixmap(self.pixmap)
1126
+
1127
+
1128
+
1129
+ # if self.original_image and self.pixmap:
1130
+ #
1131
+ # self.image = self.original_image.copy() # Restore the original image
1132
+ # painter = QPainter(self.image)
1133
+ # painter.setPen(QPen(Qt.red, 2, Qt.SolidLine))
1134
+ #
1135
+ # if not self.current_rect.isNull():
1136
+ # painter.drawRect(self.current_rect)
1137
+ #
1138
+ # painter.end()
1139
+ # self.pixmap = QPixmap.fromImage(self.image)
1140
+ # self.image_label.setPixmap(self.pixmap)
451
1141
 
452
1142
  def save_rectangle_coordinates(self, rect):
453
1143
 
454
1144
  # Save vertex coordinates to a file
455
- x1, y1 = rect.topLeft().x(), rect.topLeft().y()
456
- x2, y2 = rect.bottomRight().x(), rect.bottomRight().y()
457
- self.ret = [x1, x2, y1,y2]
458
- coordinates = f"Vértices do Retângulo: ({x1}, {y1}), ({x2}, {y2})"
1145
+ # x1, y1 = rect.topLeft().x(), rect.topLeft().y()
1146
+ # x2, y2 = rect.bottomRight().x(), rect.bottomRight().y()
1147
+ # self.ret = [x1, x2, y1,y2]
1148
+
1149
+
1150
+ x = self.current_rect.x()
1151
+ y = self.current_rect.y()
1152
+ w = self.current_rect.width()
1153
+ h = self.current_rect.height()
1154
+ self.ret = [x, y, w,h]
1155
+ coordinates = f"Vértice do Retângulo: ({x}, {y}), width ({w}, height {h})"
459
1156
 
460
1157
  # Save the coordinates to a text file
461
1158
  save_path = "data/";
@@ -659,12 +1356,12 @@ def concatene_files_scat_back(set_file_1, set_file_2):
659
1356
 
660
1357
 
661
1358
  factor = 0.15;
662
- max_area = max(_date_1["area_big"] for _date_1 in temp_info_files_edf)
1359
+ # max_area = max(_date_1["area_big"] for _date_1 in temp_info_files_edf)
663
1360
  temp_name_file = numpy.array(list_scat_back[:,0]);
664
1361
  for _date_1 in temp_info_files_edf:
665
1362
  area_avg_1 = (_date_1['area_big'] + _date_1['area_small']) / 2. ;
666
1363
 
667
- min_diff = max_area;
1364
+ min_diff = float('inf');
668
1365
 
669
1366
  for i_file_back in range(0, len(list_back_size_avg_drop)):
670
1367
  if abs(area_avg_1 - float(list_back_size_avg_drop[i_file_back,1])) <= (min_diff):
@@ -768,6 +1465,15 @@ def cv_destroy_all_windows_safe():
768
1465
  pass # headless
769
1466
 
770
1467
 
1468
+ def cv2_to_qimage(frame):
1469
+
1470
+ """Converte frame do OpenCV (BGR) para QImage (RGB)."""
1471
+ rgb_image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
1472
+ h, w, ch = rgb_image.shape
1473
+ bytes_per_line = ch * w
1474
+
1475
+ return QImage(rgb_image.data, w, h, bytes_per_line, QImage.Format_RGB888).copy()
1476
+
771
1477
  def get_dir_paths( **kwargs):
772
1478
 
773
1479
  # Create a hidden Tkinter window
@@ -820,10 +1526,10 @@ def get_info_video():
820
1526
 
821
1527
  questions = [
822
1528
  "Type the interval between frames to get the drop size: ",
823
- "Type the START pixel value, in the image in X, to select the drop region: ",
824
- "Type the END pixel value, in the image in X, to select the drop region: ",
825
- "Type the START pixel value, in the image in Y, to select the drop region: ",
826
- "Type the END pixel value, in the image in Y, to select the drop region: ",
1529
+ "Type the START pixel value (left bottom), in the image in X EDGE, to select the drop region: ",
1530
+ "Type the START pixel value (left bottom), in the image in Y EDGE, to select the drop region: ",
1531
+ "Type the WIDTH value, in the image, to select the drop region: ",
1532
+ "Type the HEIGHT value, in the image, to select the drop region: ",
827
1533
  "Type the value of pixel by millimeters: ",
828
1534
  "Type the maximum video analysis time (s): "
829
1535
  ]
@@ -913,14 +1619,14 @@ class conc_scat_video:
913
1619
  self.Co = (float(str(line[line.index(':')+1:])));
914
1620
  if line.find('step_1:') != -1:
915
1621
  self.step = round(float(str(line[line.index(':')+1:])));
916
- if line.find('start pixel x_1:') != -1:
1622
+ if line.find('left bottom pixel x_1:') != -1:
917
1623
  self.start_x = round(float(str(line[line.index(':')+1:])));
918
- if line.find('end pixel x_1:') != -1:
919
- self.end_x = round(float(str(line[line.index(':')+1:])));
920
- if line.find('start pixel y_1:') != -1:
1624
+ if line.find('left bottom pixel y_1:') != -1:
921
1625
  self.start_y = round(float(str(line[line.index(':')+1:])));
922
- if line.find('end pixel y_1:') != -1:
923
- self.end_y = round(float(str(line[line.index(':')+1:])));
1626
+ if line.find('width_1:') != -1:
1627
+ self.width = round(float(str(line[line.index(':')+1:])));
1628
+ if line.find('height_1:') != -1:
1629
+ self.height = round(float(str(line[line.index(':')+1:])));
924
1630
  if line.find('pixel/mm_1:') != -1:
925
1631
  self.px_mm = (float(str(line[line.index(':')+1:])));
926
1632
  self.px_mm_inv = 1. / self.px_mm
@@ -958,14 +1664,14 @@ class conc_scat_video:
958
1664
  self.Co = (float(str(line[line.index(':')+1:])));
959
1665
  if line.find('step_2:') != -1:
960
1666
  self.step = round(float(str(line[line.index(':')+1:])));
961
- if line.find('start pixel x_2:') != -1:
1667
+ if line.find('left bottom pixel x_2:') != -1:
962
1668
  self.start_x = round(float(str(line[line.index(':')+1:])));
963
- if line.find('end pixel x_2:') != -1:
964
- self.end_x = round(float(str(line[line.index(':')+1:])));
965
- if line.find('start pixel y_2:') != -1:
1669
+ if line.find('left bottom pixel y_2:') != -1:
966
1670
  self.start_y = round(float(str(line[line.index(':')+1:])));
967
- if line.find('end pixel y_2:') != -1:
968
- self.end_y = round(float(str(line[line.index(':')+1:])));
1671
+ if line.find('width_2:') != -1:
1672
+ self.width = round(float(str(line[line.index(':')+1:])));
1673
+ if line.find('height_2:') != -1:
1674
+ self.height = round(float(str(line[line.index(':')+1:])));
969
1675
  if line.find('pixel/mm_2:') != -1:
970
1676
  self.px_mm = (float(str(line[line.index(':')+1:])));
971
1677
  self.px_mm_inv = 1. / self.px_mm
@@ -1013,9 +1719,9 @@ class conc_scat_video:
1013
1719
  if 'retangulo' in kwargs:
1014
1720
  ret = kwargs['retangulo'];
1015
1721
  self.start_x = ret[0];
1016
- self.end_x = ret[1];
1017
- self.start_y = ret[2];
1018
- self.end_y= ret[3];
1722
+ self.start_y = ret[1];
1723
+ self.width = ret[2];
1724
+ self.height = ret[3];
1019
1725
  if 'print_pdf' in kwargs:
1020
1726
  self.print_pdf = kwargs['print_pdf'];
1021
1727
  else:
@@ -1180,11 +1886,12 @@ class conc_scat_video:
1180
1886
  save_data_edf(self.info_files_edf, os.path.join(path_dir_imgs, name_file), option);
1181
1887
 
1182
1888
 
1183
-
1184
1889
  def read_video(self):
1185
1890
 
1186
1891
 
1187
1892
  # self.video_c = os.path.getctime(self.file_video);
1893
+ # print(self.file_video)
1894
+
1188
1895
 
1189
1896
  self.video_m = os.path.getmtime(self.file_video);
1190
1897
 
@@ -1241,14 +1948,16 @@ class conc_scat_video:
1241
1948
  # crop image to restrict background
1242
1949
  new_start_x = self.start_x;
1243
1950
  new_start_y = self.start_y;
1244
- new_end_x = self.end_x;
1245
- new_end_y = self.end_y;
1246
- ref_width = abs(self.end_x - self.start_x);
1247
- ref_height = abs(self.end_y - self.start_y);
1951
+ ref_end_x = new_end_x = self.start_x + self.width;
1952
+ ref_end_y = new_end_y = self.start_y + self.height;
1953
+ ref_width = abs(self.width);
1954
+ ref_height = abs(self.height);
1248
1955
 
1249
1956
  amplie = False;
1250
1957
  factor = 1;
1251
1958
  start_time = timelib.time()
1959
+
1960
+ progress = ProgressHandler(self, label="Reading frames...", maximum=total_frames)
1252
1961
 
1253
1962
  while has_frame: # take frame just end of video
1254
1963
 
@@ -1279,18 +1988,18 @@ class conc_scat_video:
1279
1988
  _w = temp_size_window[data_i-window:data_i,0]; _h = temp_size_window[data_i-window:data_i,1];
1280
1989
  avg_w = numpy.mean(_w) ;
1281
1990
  avg_h = numpy.mean(_h) ;
1282
- if avg_w < 0.15* abs(self.end_x - self.start_x): avg_w = 0.15 * abs(self.end_x - self.start_x);
1283
- if avg_h < 0.15* abs(self.end_y - self.start_y): avg_h =0.15 *abs(self.end_y - self.start_y);
1991
+ if avg_w < 0.15* abs(ref_end_x - self.start_x): avg_w = 0.15 * abs(ref_end_x - self.start_x);
1992
+ if avg_h < 0.15* abs(ref_end_y - self.start_y): avg_h =0.15 *abs(ref_end_y - self.start_y);
1284
1993
  factor_exp = 0.15;
1285
1994
  new_start_x = int(( x_center - avg_w/2) - (factor_exp * avg_w));
1286
1995
  new_end_x = int(( x_center + avg_w/2) + (factor_exp * avg_w));
1287
1996
  if new_start_x < self.start_x: new_start_x = self.start_x;
1288
- if new_end_x > self.end_x: new_end_x = self.end_x;
1997
+ if new_end_x > ref_end_x: new_end_x = ref_end_x;
1289
1998
  ref_width = abs(new_end_x - new_start_x);
1290
1999
  new_start_y = int(( y_center - avg_h/2) - (factor_exp * avg_h));
1291
2000
  new_end_y = int(( y_center + avg_h/2) + (factor_exp * avg_h));
1292
2001
  if new_start_y < self.start_y: new_start_y = self.start_y;
1293
- if new_end_y > self.end_y: new_end_y = self.end_y;
2002
+ if new_end_y > ref_end_y: new_end_y = ref_end_y;
1294
2003
  ref_height = abs(new_end_y - new_start_y);
1295
2004
  amplie = True;
1296
2005
 
@@ -1299,7 +2008,9 @@ class conc_scat_video:
1299
2008
  # cv2.imwrite("teste.png",frame); #exit();
1300
2009
  #crop image
1301
2010
  imagem = frame[new_start_y:new_end_y, new_start_x:new_end_x];
1302
- # cv2.imwrite("teste1.png",imagem); #exit();
2011
+ # cv2.imwrite("teste1.png",imagem); #exit();
2012
+ # cv2.imshow("teste1",imagem)
2013
+
1303
2014
 
1304
2015
  img_h, img_w = imagem.shape[:2];
1305
2016
  if data_i >= 1 or amplie:
@@ -1307,10 +2018,13 @@ class conc_scat_video:
1307
2018
  factor = 12;
1308
2019
  new_w = int(img_w * factor)
1309
2020
  new_h = int(img_h * factor)
2021
+
1310
2022
 
1311
2023
  if new_w <= 1 or new_h <= 1:
1312
- print(f"Error, check the video; it seems probably there is no droplet image starting from {int(time)} s.")
1313
- break
2024
+ message = f"Error, check the video; it seems probably there is no droplet image starting from {int(time)} s."
2025
+ show_message(self, "Check the video", message, details=None, level="error")
2026
+ # print(f"Error, check the video; it seems probably there is no droplet image starting from {int(time)} s.")
2027
+ return None
1314
2028
 
1315
2029
 
1316
2030
 
@@ -1465,13 +2179,20 @@ class conc_scat_video:
1465
2179
 
1466
2180
  frame_count += 1
1467
2181
  elapsed_time = timelib.time() - start_time;
1468
- print(f"Iteration {frame_count + 1}/{(self.time_limit*fps)}, Elapsed time: {elapsed_time:.2f} seconds", end='\r')
2182
+
2183
+ # print(f"Iteration {frame_count + 1}/{(self.time_limit*fps)}, Elapsed time: {elapsed_time:.2f} seconds", end='\r')
2184
+ progress.update(frame_count, elapsed_time)
2185
+ if progress.was_canceled():
2186
+ progress.finish()
2187
+ print("Process canceled by user.")
2188
+ return None
1469
2189
 
1470
2190
  has_frame, frame = video.read()
1471
2191
 
1472
-
2192
+ progress.finish()
1473
2193
  file_data_imgs.close();
1474
-
2194
+
2195
+
1475
2196
  new_data_time_size = delete_value_extrem(data_time_size);
1476
2197
  self.coef_pol_w = numpy.polyfit(new_data_time_size[:, 0],new_data_time_size[:, 1],12);
1477
2198
  self.coef_pol_h = numpy.polyfit(new_data_time_size[:, 0],new_data_time_size[:, 2],12);
@@ -1498,11 +2219,76 @@ class conc_scat_video:
1498
2219
 
1499
2220
  if self.print_pdf:
1500
2221
  self.print_frames_pdf(path_dir_imgs, file_image_str)
1501
-
2222
+
1502
2223
 
1503
2224
  return file_out
1504
2225
 
1505
2226
 
2227
+
2228
+ class ProgressHandler:
2229
+
2230
+ def __init__(self, parent=None, label="Processing...", maximum=100):
2231
+
2232
+ self.parent = parent
2233
+ self.maximum = maximum
2234
+ self.current = 0
2235
+ self.use_gui = False
2236
+ self.progress = None
2237
+
2238
+ # Detecta se GUI está ativa
2239
+ app = QApplication.instance()
2240
+ if app is not None:
2241
+ try:
2242
+ # Tenta criar mesmo sem parent QWidget
2243
+ if isinstance(parent, QWidget):
2244
+ self.progress = QProgressDialog(label, "Cancel", 0, maximum, parent)
2245
+ else:
2246
+ self.progress = QProgressDialog(label, "Cancel", 0, maximum)
2247
+ self.progress.setWindowTitle("Please wait")
2248
+ self.progress.setWindowModality(Qt.WindowModal)
2249
+ self.progress.setMinimumDuration(0)
2250
+ self.progress.setValue(0)
2251
+ self.use_gui = True
2252
+ except Exception as e:
2253
+ print(f"[ProgressHandler] ⚠️ Falling back to terminal mode: {e}")
2254
+ self.use_gui = False
2255
+ else:
2256
+ self.use_gui = False
2257
+
2258
+ def update(self, value, elapsed=None):
2259
+ """Atualiza o progresso (GUI ou terminal)"""
2260
+ self.current = value
2261
+ if self.use_gui and self.progress:
2262
+ self.progress.setValue(value)
2263
+ QApplication.processEvents()
2264
+ else:
2265
+ if elapsed is not None:
2266
+ print(
2267
+ f"Iteration {value}/{self.maximum}, Elapsed time: {elapsed:.2f} seconds",
2268
+ end="\r"
2269
+ )
2270
+ else:
2271
+ print(f"Progress: {value}/{self.maximum}", end="\r")
2272
+
2273
+ def was_canceled(self):
2274
+ """Verifica se o usuário cancelou (apenas GUI)"""
2275
+ if self.use_gui and self.progress:
2276
+ return self.progress.wasCanceled()
2277
+ return False
2278
+
2279
+ def finish(self):
2280
+ """Finaliza o progresso"""
2281
+ if self.use_gui and self.progress:
2282
+ self.progress.setValue(self.maximum)
2283
+ QApplication.processEvents() # 🔹 força atualização final
2284
+ self.progress.close() # 🔹 fecha explicitamente o diálogo
2285
+ QApplication.processEvents() # 🔹 garante que o fechamento seja processado
2286
+ else:
2287
+ print()
2288
+
2289
+
2290
+
2291
+
1506
2292
  def menu():
1507
2293
  print("\n Options:")
1508
2294
  print("1. Video analysis")
@@ -1513,25 +2299,6 @@ def menu():
1513
2299
 
1514
2300
 
1515
2301
 
1516
- def draw_square(event, x, y, flags, param, imagem):
1517
-
1518
-
1519
- vertices = []
1520
-
1521
- imagem = cv2.imread('sample.jpg')
1522
-
1523
- if event == cv2.EVENT_LBUTTONDOWN:
1524
- vertices.append((x, y))
1525
-
1526
- if len(vertices) == 2:
1527
- # Draw the square on the original image
1528
- cv2.rectangle(imagem, vertices[0], vertices[1], (255, 0, 0), 5) # Blue with thickness 2
1529
- cv_imshow_safe("Imagem", imagem) # cv2.imshow('Imagem', imagem)
1530
- vertices.clear()
1531
-
1532
-
1533
- for i, vertice in enumerate(vertices):
1534
- print(f"Vértice {i + 1}: {vertice}")
1535
2302
 
1536
2303
  def save_data_video(data_in, coef_w, coef_h, coef_area, coef_conc, output_file):
1537
2304
 
@@ -1570,6 +2337,42 @@ def save_data_edf(data_in, output_file, option):
1570
2337
  str_ = f"{i_data['file']}, {i_data['date']}, {float(i_data['start_time']):.2f} \n";
1571
2338
  file_op.write(str_);
1572
2339
  file_op.close()
2340
+
2341
+
2342
+ def show_message(self, title, message, details=None, level="error"):
2343
+
2344
+ import traceback
2345
+ import sys
2346
+ from PyQt5.QtWidgets import QMessageBox, QApplication
2347
+
2348
+ app = QApplication.instance() # verifica se a GUI está ativa
2349
+
2350
+ if app is not None:
2351
+
2352
+ msg = QMessageBox(self if hasattr(self, "windowTitle") else None)
2353
+ if level.lower() == "error":
2354
+ msg.setIcon(QMessageBox.Critical)
2355
+ elif level.lower() == "warning":
2356
+ msg.setIcon(QMessageBox.Warning)
2357
+ else:
2358
+ msg.setIcon(QMessageBox.Information)
2359
+
2360
+ msg.setWindowTitle(title)
2361
+ msg.setText(message)
2362
+ if details:
2363
+ msg.setDetailedText(details)
2364
+ msg.exec_()
2365
+ else:
2366
+
2367
+ print(f"\n{'='*60}")
2368
+ print(f"[{level.upper()}] {title}")
2369
+ print(f"→ {message}")
2370
+ if details:
2371
+ print("-" * 60)
2372
+ print(details)
2373
+ print("-" * 60)
2374
+ print(f"{'='*60}\n")
2375
+
1573
2376
 
1574
2377
  def read_file_video(input_file):
1575
2378
 
@@ -1627,4 +2430,108 @@ def calcule_surface_spheroide(edge_1, edge_2):
1627
2430
  e = np.sqrt(1.0 - (edge_1*edge_1)/(edge_2*edge_2)) # 0 < e < 1
1628
2431
  return 2.0 * np.pi * edge_1*edge_1 * (1.0 + (edge_2/(edge_1*e)) * np.arcsin(e))
1629
2432
 
1630
-
2433
+
2434
+
2435
+ def _int_to_fourcc(v: int) -> str:
2436
+ if not v:
2437
+ return ""
2438
+ chars = []
2439
+ for i in range(4):
2440
+ chars.append(chr((v >> (8 * i)) & 0xFF))
2441
+ s = "".join(chars)
2442
+ if not s.isprintable():
2443
+ return ""
2444
+ return s
2445
+
2446
+
2447
+ def _default_fourcc_candidates_for_ext(ext: str) -> List[str]:
2448
+ ext = ext.lower()
2449
+ # Reasonable candidates given typical OpenCV/FFmpeg builds (no guarantee)
2450
+ if ext in (".mp4", ".m4v", ".mov"):
2451
+ return ["mp4v", "avc1", "h264"] # mp4v is most portable in OpenCV wheels
2452
+ if ext == ".avi":
2453
+ return ["MJPG", "XVID", "mp4v"]
2454
+ if ext == ".mkv":
2455
+ return ["mp4v", "MJPG", "XVID"]
2456
+ # Very uncommon/unsupported for writing via OpenCV:
2457
+ if ext == ".flv":
2458
+ return [] # force user to change container
2459
+ return ["mp4v"]
2460
+
2461
+
2462
+ def _pick_writer_fourcc(cap: cv2.VideoCapture, out_path: str, user_codec: Optional[str]) -> List[str]:
2463
+ ext = os.path.splitext(out_path)[1].lower()
2464
+ # If user forces a codec, try it first
2465
+ candidates: List[str] = []
2466
+ if user_codec:
2467
+ candidates.append(user_codec)
2468
+
2469
+ # Try to reuse detected codec (rarely usable for writing, but try)
2470
+ detected = _int_to_fourcc(int(cap.get(cv2.CAP_PROP_FOURCC)))
2471
+ if detected and detected.strip("\x00").strip():
2472
+ candidates.append(detected)
2473
+
2474
+ # Add common candidates for the chosen extension
2475
+ candidates += _default_fourcc_candidates_for_ext(ext)
2476
+
2477
+ # Finally, add a few generic fallbacks
2478
+ for fallback in ("mp4v", "MJPG", "XVID", "avc1"):
2479
+ if fallback not in candidates:
2480
+ candidates.append(fallback)
2481
+
2482
+ # Remove empties/dupes while preserving order
2483
+ seen = set()
2484
+ out = []
2485
+ for c in candidates:
2486
+ c = (c or "").strip()
2487
+ if not c:
2488
+ continue
2489
+ if c not in seen:
2490
+ seen.add(c)
2491
+ out.append(c)
2492
+ return out
2493
+
2494
+
2495
+ def parse_drop_spec(spec: str, total_frames: int) -> Set[int]:
2496
+ if not spec:
2497
+ return set()
2498
+ result: Set[int] = set()
2499
+ for chunk in spec.split(","):
2500
+ chunk = chunk.strip()
2501
+ if not chunk:
2502
+ continue
2503
+ if "-" in chunk:
2504
+ a, b = chunk.split("-", 1)
2505
+ a = a.strip()
2506
+ b = b.strip()
2507
+ if not a.isdigit() or not b.isdigit():
2508
+ raise ValueError(f"Invalid range '{chunk}' in --drop spec")
2509
+ start = int(a)
2510
+ end = int(b)
2511
+ if start > end:
2512
+ start, end = end, start
2513
+ for i in range(start, end + 1):
2514
+ if 0 <= i < total_frames:
2515
+ result.add(i)
2516
+ else:
2517
+ if not chunk.isdigit():
2518
+ raise ValueError(f"Invalid index '{chunk}' in --drop spec")
2519
+ i = int(chunk)
2520
+ if 0 <= i < total_frames:
2521
+ result.add(i)
2522
+ return result
2523
+
2524
+ def _open_writer_any(tmp_out_path: str, fps: float, size: Tuple[int, int], candidates: List[str]) -> Tuple[Optional[cv2.VideoWriter], Optional[str]]:
2525
+
2526
+
2527
+ for c in candidates:
2528
+ try:
2529
+ fourcc = cv2.VideoWriter_fourcc(*c)
2530
+ w = cv2.VideoWriter(tmp_out_path, fourcc, fps, size)
2531
+ if w.isOpened():
2532
+ return w, c
2533
+ # release and try next
2534
+ w.release()
2535
+ except Exception:
2536
+ pass
2537
+ return None, None