bplusplus 1.2.0__tar.gz → 1.2.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bplusplus might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: bplusplus
3
- Version: 1.2.0
3
+ Version: 1.2.1
4
4
  Summary: A simple method to create AI models for biodiversity, with collect and prepare pipeline
5
5
  License: MIT
6
6
  Author: Titus Venverloo
@@ -15,6 +15,9 @@ Classifier: Programming Language :: Python :: 3.12
15
15
  Requires-Dist: prettytable (==3.7.0)
16
16
  Requires-Dist: pygbif (>=0.6.4,<0.7.0)
17
17
  Requires-Dist: requests (==2.25.1)
18
+ Requires-Dist: scikit-learn (>=1.6.1,<2.0.0)
19
+ Requires-Dist: tabulate (>=0.9.0,<0.10.0)
20
+ Requires-Dist: torch (==2.5.0)
18
21
  Requires-Dist: ultralytics (==8.0.195)
19
22
  Requires-Dist: validators (>=0.33.0,<0.34.0)
20
23
  Description-Content-Type: text/markdown
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "bplusplus"
3
- version = "1.2.0"
3
+ version = "1.2.1"
4
4
  description = "A simple method to create AI models for biodiversity, with collect and prepare pipeline"
5
5
  authors = ["Titus Venverloo <tvenver@mit.edu>", "Deniz Aydemir <deniz@aydemir.us>", "Orlando Closs <orlando.closs@wur.nl>", "Ase Hatveit <aase@mit.edu>"]
6
6
  license = "MIT"
@@ -13,6 +13,9 @@ ultralytics = "8.0.195"
13
13
  pygbif = "^0.6.4"
14
14
  validators = "^0.33.0"
15
15
  prettytable = "3.7.0"
16
+ scikit-learn = "^1.6.1"
17
+ tabulate = "^0.9.0"
18
+ torch = "2.5.0"
16
19
 
17
20
  [tool.poetry.group.dev.dependencies]
18
21
  jupyter = "^1.0.0"
@@ -2,12 +2,13 @@ import os
2
2
  import random
3
3
  import threading
4
4
  from enum import Enum
5
- from typing import Any, Optional, List, Dict
6
- from tqdm import tqdm
7
- import random
5
+ from typing import Any, Dict, List, Optional
6
+
8
7
  import pygbif
9
8
  import requests
10
9
  import validators
10
+ from tqdm import tqdm
11
+
11
12
 
12
13
  #this lists currently supported groupings, more can be added with proper testing
13
14
  class Group(str, Enum):
@@ -70,6 +71,10 @@ def __single_collect(group_by_key: Group, search_parameters: dict[str, Any], ima
70
71
 
71
72
  # threaded_collect: paralellize the collection of images
72
73
  def __threaded_collect(images_per_group: int, output_directory: str, num_threads: int, groups: list[str]):
74
+ # Handle edge case where num_threads is greater than number of groups
75
+ if num_threads >= len(groups):
76
+ num_threads = len(groups)
77
+
73
78
  # Divide the species list into num_threads parts
74
79
  chunk_size = len(groups) // num_threads
75
80
  species_chunks = [
@@ -1,38 +1,38 @@
1
1
  import os
2
2
  import random
3
- from typing import Any, Optional
4
- import requests
5
- import tempfile
6
- from .collect import Group, collect
7
- from pathlib import Path
8
- from ultralytics import YOLO
9
3
  import shutil
10
- from PIL import Image, ImageDraw, ImageFont
4
+ import tempfile
11
5
  from collections import defaultdict
12
- from prettytable import PrettyTable
6
+ from pathlib import Path
7
+ from typing import Any, Optional
8
+
13
9
  import matplotlib.pyplot as plt
10
+ import numpy as np
14
11
  import requests
15
- from tqdm import tqdm
16
- import yaml
17
12
  import torch
13
+ import yaml
14
+ from PIL import Image, ImageDraw, ImageFont
15
+ from prettytable import PrettyTable
18
16
  from torch import serialization
19
- from ultralytics.nn.tasks import DetectionModel
20
- from torch.nn.modules.container import Sequential
21
- from ultralytics.nn.modules.conv import Conv
22
- from torch.nn.modules.conv import Conv2d
17
+ from torch.nn import Module, ModuleDict, ModuleList
18
+ from torch.nn.modules.activation import LeakyReLU, ReLU, SiLU
23
19
  # Add more modules to prevent further errors
24
20
  from torch.nn.modules.batchnorm import BatchNorm2d
25
- from torch.nn.modules.activation import SiLU, ReLU, LeakyReLU
26
- from torch.nn.modules.pooling import MaxPool2d
27
- from torch.nn.modules.linear import Linear
21
+ from torch.nn.modules.container import Sequential
22
+ from torch.nn.modules.conv import Conv2d
28
23
  from torch.nn.modules.dropout import Dropout
24
+ from torch.nn.modules.linear import Linear
25
+ from torch.nn.modules.pooling import MaxPool2d
29
26
  from torch.nn.modules.upsampling import Upsample
30
- from torch.nn import Module, ModuleList, ModuleDict
31
- from ultralytics.nn.modules import (
32
- Bottleneck, C2f, SPPF, Detect, Concat
33
- )
27
+ from tqdm import tqdm
28
+ from ultralytics import YOLO
29
+ from ultralytics.nn.modules import SPPF, Bottleneck, C2f, Concat, Detect
34
30
  from ultralytics.nn.modules.block import DFL
35
- import numpy as np
31
+ from ultralytics.nn.modules.conv import Conv
32
+ from ultralytics.nn.tasks import DetectionModel
33
+
34
+ from .collect import Group, collect
35
+
36
36
 
37
37
  def prepare(input_directory: str, output_directory: str, one_stage: bool = False, with_background: bool = False, size_filter: bool = False, sizes: list = None):
38
38
 
@@ -130,7 +130,8 @@ def prepare(input_directory: str, output_directory: str, one_stage: bool = False
130
130
  group_by_key=Group.scientificName,
131
131
  search_parameters=search,
132
132
  images_per_group=bg_images,
133
- output_directory=temp_dir_path
133
+ output_directory=temp_dir_path,
134
+ num_threads=3
134
135
  )
135
136
 
136
137
  __delete_corrupted_images(temp_dir_path / "Plantae")
File without changes
File without changes