mlx-cluster 0.0.2__tar.gz → 0.0.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/CMakeLists.txt +0 -1
  2. {mlx_cluster-0.0.2/mlx_cluster.egg-info → mlx_cluster-0.0.3}/PKG-INFO +4 -3
  3. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/mlx_cluster/_ext.cpython-311-darwin.so +0 -0
  4. mlx_cluster-0.0.3/mlx_cluster/libmlx.dylib +0 -0
  5. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/mlx_cluster/libmlx_cluster.dylib +0 -0
  6. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3/mlx_cluster.egg-info}/PKG-INFO +4 -3
  7. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/mlx_cluster.egg-info/SOURCES.txt +1 -0
  8. mlx_cluster-0.0.3/mlx_cluster.egg-info/requires.txt +7 -0
  9. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/pyproject.toml +9 -4
  10. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/random_walks/RandomWalk.cpp +1 -0
  11. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/setup.py +9 -2
  12. mlx_cluster-0.0.3/tests/test_random_walk.py +35 -0
  13. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/tests/test_rejection_sampling.py +3 -18
  14. mlx_cluster-0.0.2/mlx_cluster.egg-info/requires.txt +0 -6
  15. mlx_cluster-0.0.2/tests/test_random_walk.py +0 -58
  16. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/LICENSE +0 -0
  17. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/MANIFEST.in +0 -0
  18. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/README.md +0 -0
  19. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/bindings.cpp +0 -0
  20. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/mlx_cluster/__init__.py +0 -0
  21. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/mlx_cluster/mlx_cluster.metallib +0 -0
  22. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/mlx_cluster.egg-info/dependency_links.txt +0 -0
  23. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/mlx_cluster.egg-info/not-zip-safe +0 -0
  24. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/mlx_cluster.egg-info/top_level.txt +0 -0
  25. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/random_walks/BiasedRandomWalk.cpp +0 -0
  26. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/random_walks/BiasedRandomWalk.h +0 -0
  27. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/random_walks/RandomWalk.h +0 -0
  28. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/random_walks/random_walk.metal +0 -0
  29. {mlx_cluster-0.0.2 → mlx_cluster-0.0.3}/setup.cfg +0 -0
@@ -9,7 +9,6 @@ set(CMAKE_POSITION_INDEPENDENT_CODE ON)
9
9
  option(BUILD_SHARED_LIBS "Build extensions as a shared library" ON)
10
10
 
11
11
  # ----- Dependencies required ----
12
- find_package(fmt REQUIRED)
13
12
  find_package(MLX CONFIG REQUIRED)
14
13
  find_package(Python 3.8 COMPONENTS Interpreter Development.Module REQUIRED)
15
14
  execute_process(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mlx_cluster
3
- Version: 0.0.2
3
+ Version: 0.0.3
4
4
  Summary: C++ and Metal extensions for MLX CTC Loss
5
5
  Author-email: Vinay Pandya <vinayharshadpandya27@gmail.com>
6
6
  Project-URL: Homepage, https://github.com/vinayhpandya/mlx_cluster
@@ -15,8 +15,9 @@ Description-Content-Type: text/markdown
15
15
  License-File: LICENSE
16
16
  Provides-Extra: dev
17
17
  Provides-Extra: test
18
- Requires-Dist: torch_geometric; extra == "test"
19
- Requires-Dist: pytest; extra == "test"
18
+ Requires-Dist: pytest==7.4.4; extra == "test"
19
+ Requires-Dist: torch; extra == "test"
20
+ Requires-Dist: mlx_graphs; extra == "test"
20
21
 
21
22
  # mlx_cluster
22
23
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mlx_cluster
3
- Version: 0.0.2
3
+ Version: 0.0.3
4
4
  Summary: C++ and Metal extensions for MLX CTC Loss
5
5
  Author-email: Vinay Pandya <vinayharshadpandya27@gmail.com>
6
6
  Project-URL: Homepage, https://github.com/vinayhpandya/mlx_cluster
@@ -15,8 +15,9 @@ Description-Content-Type: text/markdown
15
15
  License-File: LICENSE
16
16
  Provides-Extra: dev
17
17
  Provides-Extra: test
18
- Requires-Dist: torch_geometric; extra == "test"
19
- Requires-Dist: pytest; extra == "test"
18
+ Requires-Dist: pytest==7.4.4; extra == "test"
19
+ Requires-Dist: torch; extra == "test"
20
+ Requires-Dist: mlx_graphs; extra == "test"
20
21
 
21
22
  # mlx_cluster
22
23
 
@@ -7,6 +7,7 @@ pyproject.toml
7
7
  setup.py
8
8
  mlx_cluster/__init__.py
9
9
  mlx_cluster/_ext.cpython-311-darwin.so
10
+ mlx_cluster/libmlx.dylib
10
11
  mlx_cluster/libmlx_cluster.dylib
11
12
  mlx_cluster/mlx_cluster.metallib
12
13
  mlx_cluster.egg-info/PKG-INFO
@@ -0,0 +1,7 @@
1
+
2
+ [dev]
3
+
4
+ [test]
5
+ pytest==7.4.4
6
+ torch
7
+ mlx_graphs
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "mlx_cluster"
3
- version = "0.0.2"
3
+ version = "0.0.3"
4
4
  authors = [
5
5
  { name = "Vinay Pandya", email = "vinayharshadpandya27@gmail.com" },
6
6
  ]
@@ -17,7 +17,11 @@ classifiers = [
17
17
 
18
18
  [project.optional-dependencies]
19
19
  dev = []
20
-
20
+ test = [
21
+ "pytest==7.4.4",
22
+ "torch",
23
+ "mlx_graphs",
24
+ ]
21
25
  [project.urls]
22
26
  Homepage = "https://github.com/vinayhpandya/mlx_cluster"
23
27
  Issues = "https://github.com/vinayhpandya/mlx_cluster/Issues"
@@ -27,8 +31,9 @@ Issues = "https://github.com/vinayhpandya/mlx_cluster/Issues"
27
31
  requires = [
28
32
  "setuptools>=42",
29
33
  "cmake>=3.24",
30
- "mlx==0.15.*",
34
+ "mlx>=0.17.0",
31
35
  "nanobind@git+https://github.com/wjakob/nanobind.git@2f04eac452a6d9142dedb957701bdb20125561e4",
32
- "fmt"
33
36
  ]
37
+
38
+
34
39
  build-backend = "setuptools.build_meta"
@@ -37,6 +37,7 @@ namespace mlx::core {
37
37
  auto* col_values = col.data<int64_t>();
38
38
  auto* rand_values = rand.data<float>();
39
39
 
40
+ std::cout<<"After evaluating outputs"<<std::endl;
40
41
  for (int64_t n = 0; n < numel; n++) {
41
42
  int64_t n_cur = start_values[n];
42
43
  n_out_ptr[n * (walk_length_ + 1)] = n_cur;
@@ -4,13 +4,20 @@ from mlx import extension
4
4
  if __name__ == "__main__":
5
5
  setup(
6
6
  name="mlx_cluster",
7
- version="0.0.2",
7
+ version="0.0.3",
8
8
  description="Sample C++ and Metal extensions for MLX primitives.",
9
9
  ext_modules=[extension.CMakeExtension("mlx_cluster._ext")],
10
10
  cmdclass={"build_ext": extension.CMakeBuild},
11
11
  packages=["mlx_cluster"],
12
12
  package_data={"mlx_cluster": ["*.so", "*.dylib", "*.metallib"]},
13
- extras_require={"dev": [], "test": ["torch_geometric", "pytest"]},
13
+ extras_require={
14
+ "dev": [],
15
+ "test": [
16
+ "mlx_graphs",
17
+ "torch",
18
+ "pytest",
19
+ ],
20
+ },
14
21
  zip_safe=False,
15
22
  python_requires=">=3.8",
16
23
  )
@@ -0,0 +1,35 @@
1
+ import mlx.core as mx
2
+ import numpy as np
3
+ import time
4
+
5
+ # Torch dataset
6
+ from torch.utils.data import DataLoader
7
+
8
+ loader = DataLoader(range(2708), batch_size=2000)
9
+ start_indices = next(iter(loader))
10
+
11
+ from mlx_graphs.datasets import PlanetoidDataset
12
+ from mlx_graphs.utils.sorting import sort_edge_index
13
+ from torch.utils.data import DataLoader
14
+ from mlx_cluster import random_walk
15
+
16
+ cora_dataset = PlanetoidDataset(name="cora", base_dir="~")
17
+ # For some reason int_64t and int_32t are not compatible
18
+ edge_index = cora_dataset.graphs[0].edge_index.astype(mx.int64)
19
+ # Convert edge index into a CSR matrix
20
+ sorted_edge_index = sort_edge_index(edge_index=edge_index)
21
+ row_mlx = sorted_edge_index[0][0]
22
+ col_mlx = sorted_edge_index[0][1]
23
+ _, counts_mlx = np.unique(np.array(row_mlx, copy=False), return_counts=True)
24
+ cum_sum_mlx = counts_mlx.cumsum()
25
+ row_ptr_mlx = mx.concatenate([mx.array([0]), mx.array(cum_sum_mlx)])
26
+ start_indices = mx.array(start_indices.numpy())
27
+
28
+ rand_data = mx.random.uniform(shape=[start_indices.shape[0], 5])
29
+ start_time = time.time()
30
+
31
+ node_sequence = random_walk(
32
+ row_ptr_mlx, col_mlx, start_indices, rand_data, 5, stream=mx.cpu
33
+ )
34
+ print("Time taken to complete 1000 random walks : ", time.time() - start_time)
35
+ print("MLX random walks are", node_sequence)
@@ -2,19 +2,10 @@ import mlx.core as mx
2
2
  import numpy as np
3
3
  import time
4
4
 
5
- # Torch dataset
6
- import torch
7
- import torch_geometric.datasets as pyg_datasets
8
- from torch_geometric.utils import sort_edge_index
9
- from torch_geometric.utils.num_nodes import maybe_num_nodes
10
- from torch_geometric.utils.sparse import index2ptr
5
+ # Torch dataloader
11
6
  from torch.utils.data import DataLoader
12
7
 
13
- torch_planetoid = pyg_datasets.Planetoid(root="data/Cora", name="Cora")
14
- edge_index_torch = torch_planetoid.edge_index
15
- num_nodes = maybe_num_nodes(edge_index=edge_index_torch)
16
- row, col = sort_edge_index(edge_index=edge_index_torch, num_nodes=num_nodes)
17
- row_ptr, col = index2ptr(row, num_nodes), col
8
+
18
9
  loader = DataLoader(range(2708), batch_size=2000)
19
10
  start_indices = next(iter(loader))
20
11
  # random_walks = torch.ops.torch_cluster.random_walk(
@@ -23,7 +14,6 @@ start_indices = next(iter(loader))
23
14
 
24
15
  from mlx_graphs.datasets import PlanetoidDataset
25
16
  from mlx_graphs.utils.sorting import sort_edge_index
26
- from torch.utils.data import DataLoader
27
17
  from mlx_cluster import rejection_sampling
28
18
 
29
19
  cora_dataset = PlanetoidDataset(name="cora", base_dir="~")
@@ -35,15 +25,10 @@ _, counts_mlx = np.unique(np.array(row_mlx, copy=False), return_counts=True)
35
25
  cum_sum_mlx = counts_mlx.cumsum()
36
26
  row_ptr_mlx = mx.concatenate([mx.array([0]), mx.array(cum_sum_mlx)])
37
27
  start_indices = mx.array(start_indices.numpy())
38
- print("row pointer datatype", row_ptr_mlx.dtype)
39
- print("col datatype", col_mlx.dtype)
40
- print("start pointer datatype", start_indices.dtype)
41
- assert mx.array_equal(row_ptr_mlx, mx.array(row_ptr.numpy())), "Arrays not equal"
42
- assert mx.array_equal(col_mlx, mx.array(col.numpy())), "Col arrays are not equal"
43
28
  rand_data = mx.random.uniform(shape=[start_indices.shape[0], 5])
44
29
  start_time = time.time()
45
30
  node_sequence = rejection_sampling(
46
31
  row_ptr_mlx, col_mlx, start_indices, 5, 1.0, 3.0, stream=mx.cpu
47
32
  )
48
- # print("Time taken to complete 1000 random walks : ", time.time() - start_time)
33
+ print("Time taken to complete random walks : ", time.time() - start_time)
49
34
  print(node_sequence)
@@ -1,6 +0,0 @@
1
-
2
- [dev]
3
-
4
- [test]
5
- torch_geometric
6
- pytest
@@ -1,58 +0,0 @@
1
- import mlx.core as mx
2
- import numpy as np
3
- import time
4
-
5
- # Torch dataset
6
- import torch
7
- import torch_geometric.datasets as pyg_datasets
8
- from torch_geometric.utils import sort_edge_index
9
- from torch_geometric.utils.num_nodes import maybe_num_nodes
10
- from torch_geometric.utils.sparse import index2ptr
11
- from torch.utils.data import DataLoader
12
-
13
- torch_planetoid = pyg_datasets.Planetoid(root="data/Cora", name="Cora")
14
- edge_index_torch = torch_planetoid.edge_index
15
- num_nodes = maybe_num_nodes(edge_index=edge_index_torch)
16
- row, col = sort_edge_index(edge_index=edge_index_torch, num_nodes=num_nodes)
17
- row_ptr, col = index2ptr(row, num_nodes), col
18
- loader = DataLoader(range(2708), batch_size=2000)
19
- start_indices = next(iter(loader))
20
- print(edge_index_torch.dtype)
21
- print(row_ptr.dtype)
22
- print(col.dtype)
23
- print(start_indices.dtype)
24
- random_walks = torch.ops.torch_cluster.random_walk(
25
- row_ptr, col, start_indices, 5, 1.0, 1.0
26
- )
27
-
28
- from mlx_graphs.datasets import PlanetoidDataset
29
- from mlx_graphs.utils.sorting import sort_edge_index
30
- from torch.utils.data import DataLoader
31
- from mlx_cluster import random_walk
32
-
33
- cora_dataset = PlanetoidDataset(name="cora", base_dir="~")
34
- edge_index = cora_dataset.graphs[0].edge_index.astype(mx.int64)
35
- sorted_edge_index = sort_edge_index(edge_index=edge_index)
36
- print(edge_index.dtype)
37
- row_mlx = sorted_edge_index[0][0]
38
- col_mlx = sorted_edge_index[0][1]
39
- _, counts_mlx = np.unique(np.array(row_mlx, copy=False), return_counts=True)
40
- cum_sum_mlx = counts_mlx.cumsum()
41
- row_ptr_mlx = mx.concatenate([mx.array([0]), mx.array(cum_sum_mlx)])
42
- start_indices = mx.array(start_indices.numpy())
43
- print("Start indices data type is ", start_indices.dtype)
44
- print("Col mlx data type is ", col_mlx.dtype)
45
- print("Row mlx data type is ", row_ptr_mlx.dtype)
46
- assert mx.array_equal(row_ptr_mlx, mx.array(row_ptr.numpy())), "Arrays not equal"
47
- assert mx.array_equal(col_mlx, mx.array(col.numpy())), "Col arrays are not equal"
48
- rand_data = mx.random.uniform(shape=[start_indices.shape[0], 5])
49
- start_time = time.time()
50
- print("Start indices data type is ", start_indices.dtype)
51
- print("Col mlx data type is ", col_mlx.dtype)
52
- print("Row mlx data type is ", row_ptr_mlx.dtype)
53
- node_sequence = random_walk(
54
- row_ptr_mlx, col_mlx, start_indices, rand_data, 5, stream=mx.gpu
55
- )
56
- # print("Time taken to complete 1000 random walks : ", time.time() - start_time)
57
- print("Torch random walks are", random_walks[0])
58
- print("MLX random walks are", node_sequence)
File without changes
File without changes
File without changes
File without changes
File without changes