@anysphere/file-service 0.0.0-e3fdf62d → 0.0.0-eb8b99bf

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/Cargo.toml CHANGED
@@ -17,6 +17,9 @@ tempfile = "3.8.0"
17
17
  anyhow = "1.0.75"
18
18
  tonic = "0.9.2"
19
19
  prost = "0.11.9"
20
+ tracing = "0.1.37"
21
+ tracing-subscriber = "0.3.17"
22
+ tracing-appender = "0.2.2"
20
23
 
21
24
  [build-dependencies]
22
25
  napi-build = "2.0.1"
package/index.d.ts CHANGED
@@ -6,11 +6,15 @@
6
6
  export class MerkleClient {
7
7
  constructor(rootDirectory: string)
8
8
  init(): Promise<void>
9
+ computeMerkleTree(): Promise<void>
9
10
  updateFile(filePath: string): Promise<void>
10
11
  deleteFile(filePath: string): Promise<void>
11
- getSubtreeHash(path: string): Promise<string>
12
+ getSubtreeHash(relativePath: string): Promise<string>
12
13
  getNumEmbeddableFiles(): Promise<number>
13
14
  getAllFiles(): Promise<Array<string>>
15
+ getAllDirFilesToEmbed(absoluteFilePath: string): Promise<Array<string>>
14
16
  getNextFileToEmbed(): Promise<Array<string>>
17
+ getSpline(absoluteFilePath: string): Promise<Array<string>>
15
18
  getHashesForFiles(files: Array<string>): Promise<Array<string>>
19
+ updateRootDirectory(rootDirectory: string): void
16
20
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@anysphere/file-service",
3
- "version": "0.0.0-e3fdf62d",
3
+ "version": "0.0.0-eb8b99bf",
4
4
  "main": "index.js",
5
5
  "types": "index.d.ts",
6
6
  "napi": {
@@ -9,7 +9,8 @@
9
9
  "additional": [
10
10
  "aarch64-apple-darwin",
11
11
  "aarch64-pc-windows-msvc",
12
- "universal-apple-darwin"
12
+ "universal-apple-darwin",
13
+ "aarch64-unknown-linux-gnu"
13
14
  ]
14
15
  }
15
16
  },
@@ -35,11 +36,12 @@
35
36
  "version": "napi version"
36
37
  },
37
38
  "optionalDependencies": {
38
- "@anysphere/file-service-win32-x64-msvc": "0.0.0-e3fdf62d",
39
- "@anysphere/file-service-darwin-x64": "0.0.0-e3fdf62d",
40
- "@anysphere/file-service-linux-x64-gnu": "0.0.0-e3fdf62d",
41
- "@anysphere/file-service-darwin-arm64": "0.0.0-e3fdf62d",
42
- "@anysphere/file-service-win32-arm64-msvc": "0.0.0-e3fdf62d",
43
- "@anysphere/file-service-darwin-universal": "0.0.0-e3fdf62d"
39
+ "@anysphere/file-service-win32-x64-msvc": "0.0.0-eb8b99bf",
40
+ "@anysphere/file-service-darwin-x64": "0.0.0-eb8b99bf",
41
+ "@anysphere/file-service-linux-x64-gnu": "0.0.0-eb8b99bf",
42
+ "@anysphere/file-service-darwin-arm64": "0.0.0-eb8b99bf",
43
+ "@anysphere/file-service-win32-arm64-msvc": "0.0.0-eb8b99bf",
44
+ "@anysphere/file-service-darwin-universal": "0.0.0-eb8b99bf",
45
+ "@anysphere/file-service-linux-arm64-gnu": "0.0.0-eb8b99bf"
44
46
  }
45
47
  }
package/src/file_utils.rs CHANGED
@@ -12,9 +12,8 @@ pub fn is_in_bad_dir(file_path: &Path) -> Result<bool, Error> {
12
12
  let item_path = file_path
13
13
  .to_str()
14
14
  .ok_or(anyhow::anyhow!("Failed to convert path to string"))?;
15
- let is_bad_dir = (item_path.contains("node_modules")
16
- || item_path.contains(".git"))
17
- && !(item_path.ends_with(".git") || item_path.ends_with("node_modules"));
15
+ let is_bad_dir =
16
+ item_path.contains("node_modules") || item_path.contains(".git");
18
17
  Ok(is_bad_dir)
19
18
  }
20
19
 
@@ -38,14 +37,14 @@ pub fn is_good_file(file_path: &Path) -> Result<(), Error> {
38
37
 
39
38
  match file_name {
40
39
  "package-lock.json" | "pnpm-lock.yaml" | "yarn.lock" | "composer.lock"
41
- | "Gemfile.lock" => {
40
+ | "Gemfile.lock" | "bun.lockb" => {
42
41
  return Err(anyhow::anyhow!("File is just a lock file"));
43
42
  }
44
43
  _ => {}
45
44
  }
46
45
 
47
46
  match extension {
48
- "lock" | "bak" | "tmp" | "bin" | "exe" | "dll" | "so" => {
47
+ "lock" | "bak" | "tmp" | "bin" | "exe" | "dll" | "so" | "lockb" => {
49
48
  return Err(anyhow::anyhow!("File is just a lock file"));
50
49
  }
51
50
  _ => {}
package/src/git_utils.rs CHANGED
@@ -1,10 +1,10 @@
1
1
  use std::collections::HashSet;
2
- use std::path::PathBuf;
3
2
  use std::process::Command;
4
3
 
5
4
  pub fn list_ignored_files(
6
5
  workspace_root_path: &str,
7
- ) -> Result<HashSet<PathBuf>, Box<dyn std::error::Error>> {
6
+ should_return_absolute_paths: bool,
7
+ ) -> Result<HashSet<String>, Box<dyn std::error::Error>> {
8
8
  let mut gitignored_files = HashSet::new();
9
9
 
10
10
  let commands = vec![
@@ -15,12 +15,66 @@ pub fn list_ignored_files(
15
15
  "--ignored",
16
16
  "--exclude-standard",
17
17
  ],
18
+ // FIXME(sualeh): this is super sketchy and might totally break in like a bazillion ways. i dont like it.
19
+ vec![
20
+ "sh",
21
+ "-c",
22
+ "git submodule foreach --quiet 'git ls-files --others --ignored --exclude-standard | sed \"s|^|$path/|\"'",
23
+ ],
24
+ ];
25
+
26
+ for command in commands {
27
+ let output = Command::new(&command[0])
28
+ .args(&command[1..])
29
+ .current_dir(workspace_root_path)
30
+ .output()?;
31
+
32
+ if output.status.success() {
33
+ let files = String::from_utf8(output.stdout)?
34
+ .lines()
35
+ .filter(|line| !line.is_empty())
36
+ .map(|line| {
37
+ if should_return_absolute_paths {
38
+ let mut path = std::path::PathBuf::from(workspace_root_path);
39
+ path.push(line);
40
+
41
+ match path.canonicalize() {
42
+ Ok(canonical_path) => {
43
+ canonical_path.to_string_lossy().into_owned()
44
+ }
45
+ Err(_) => String::from(line),
46
+ }
47
+ } else {
48
+ String::from(line)
49
+ }
50
+ })
51
+ .collect::<HashSet<_>>();
52
+
53
+ gitignored_files.extend(files);
54
+ }
55
+ }
56
+
57
+ Ok(gitignored_files)
58
+ }
59
+
60
+ pub fn list_ignored_files_with_absolute_paths(
61
+ workspace_root_path: &str,
62
+ ) -> Result<HashSet<String>, Box<dyn std::error::Error>> {
63
+ let mut gitignored_files = HashSet::new();
64
+
65
+ let commands = vec![
18
66
  vec![
19
67
  "git",
20
- "submodule",
21
- "foreach",
22
- "--quiet",
23
- "git ls-files --others --ignored --exclude-standard | sed 's|^|$path/|'",
68
+ "ls-files",
69
+ "--others",
70
+ "--ignored",
71
+ "--exclude-standard",
72
+ ],
73
+ // FIXME(sualeh): this is super sketchy and might totally break in like a bazillion ways. i dont like it.
74
+ vec![
75
+ "sh",
76
+ "-c",
77
+ "git submodule foreach --quiet 'git ls-files --others --ignored --exclude-standard | sed \"s|^|$path/|\"'",
24
78
  ],
25
79
  ];
26
80
 
@@ -34,7 +88,7 @@ pub fn list_ignored_files(
34
88
  let files = String::from_utf8(output.stdout)?
35
89
  .lines()
36
90
  .filter(|line| !line.is_empty())
37
- .map(|line| PathBuf::from(line))
91
+ .map(|line| String::from(line))
38
92
  .collect::<HashSet<_>>();
39
93
 
40
94
  gitignored_files.extend(files);
@@ -57,6 +111,18 @@ pub async fn is_git_ignored(
57
111
  Ok(output.status.success())
58
112
  }
59
113
 
114
+ pub async fn is_git_directory(
115
+ workspace_root_path: &str,
116
+ ) -> Result<bool, anyhow::Error> {
117
+ let output = tokio::process::Command::new("git")
118
+ .args(&["rev-parse", "--is-inside-work-tree"])
119
+ .current_dir(workspace_root_path)
120
+ .output()
121
+ .await?;
122
+
123
+ Ok(output.status.success())
124
+ }
125
+
60
126
  #[cfg(test)]
61
127
  mod tests {
62
128
  use super::*;
@@ -66,7 +132,8 @@ mod tests {
66
132
  #[test]
67
133
  fn test_no_ignored_files() {
68
134
  let dir = tempfile::tempdir().unwrap();
69
- let gitignored_files = list_ignored_files(dir.path().to_str().unwrap()).unwrap();
135
+ let gitignored_files =
136
+ list_ignored_files(dir.path().to_str().unwrap(), false).unwrap();
70
137
  Command::new("git")
71
138
  .args(&["init"])
72
139
  .current_dir(dir.path())
@@ -92,13 +159,14 @@ mod tests {
92
159
  .current_dir(dir.path())
93
160
  .output()
94
161
  .unwrap();
95
- let gitignored_files = list_ignored_files(dir.path().to_str().unwrap()).unwrap();
162
+ let gitignored_files =
163
+ list_ignored_files(dir.path().to_str().unwrap(), false).unwrap();
96
164
  println!(
97
165
  "ignored files for test_one_ignored_file: {:?}",
98
166
  gitignored_files
99
167
  );
100
168
  // assert_eq!(gitignored_files.len(), 1);
101
- assert!(gitignored_files.contains(&PathBuf::from("ignored.txt")));
169
+ assert!(gitignored_files.contains(&String::from("ignored.txt")));
102
170
  }
103
171
 
104
172
  #[test]
@@ -121,19 +189,83 @@ mod tests {
121
189
  .current_dir(dir.path())
122
190
  .output()
123
191
  .unwrap();
124
- let gitignored_files = list_ignored_files(dir.path().to_str().unwrap()).unwrap();
192
+ let gitignored_files =
193
+ list_ignored_files(dir.path().to_str().unwrap(), false).unwrap();
125
194
  println!(
126
195
  "ignored files for test_multiple_ignored_files: {:?}",
127
196
  gitignored_files
128
197
  );
129
198
  // assert_eq!(gitignored_files.len(), 2);
130
- assert!(gitignored_files.contains(&PathBuf::from("ignored1.txt")));
131
- assert!(gitignored_files.contains(&PathBuf::from("ignored2.txt")));
199
+ assert!(gitignored_files.contains(&String::from("ignored1.txt")));
200
+ assert!(gitignored_files.contains(&String::from("ignored2.txt")));
201
+ }
202
+
203
+ #[test]
204
+ fn test_git_submodule_ignored_files() {
205
+ let dir = tempfile::tempdir().unwrap();
206
+ let submodule_path = dir.path().join("submodule");
207
+ std::fs::create_dir(&submodule_path).unwrap();
208
+
209
+ let o = Command::new("git")
210
+ .args(&["init"])
211
+ .current_dir(&submodule_path)
212
+ .output()
213
+ .unwrap();
214
+ println!("git init output: {:?}", o);
215
+
216
+ let file_path = submodule_path.join("ignored.txt");
217
+ let mut file = File::create(&file_path).unwrap();
218
+ writeln!(file, "This is an ignored file.").unwrap();
219
+
220
+ let file2 = submodule_path.join("ignored2.txt");
221
+ let mut file = File::create(&file2).unwrap();
222
+ writeln!(file, "This is another ignored file.").unwrap();
223
+
224
+ let gitignore_path = submodule_path.join(".gitignore");
225
+ let mut gitignore = File::create(&gitignore_path).unwrap();
226
+ writeln!(gitignore, "*.txt").unwrap();
227
+
228
+ let o = Command::new("git")
229
+ .args(&["init"])
230
+ .current_dir(dir.path())
231
+ .output()
232
+ .unwrap();
233
+ println!("git init output: {:?}", o);
234
+
235
+ // make a commit in the submodule
236
+ let o = Command::new("git")
237
+ .args(&["add", "."])
238
+ .current_dir(&submodule_path)
239
+ .output()
240
+ .unwrap();
241
+ println!("git add output: {:?}", o);
242
+ let o = Command::new("git")
243
+ .args(&["commit", "-m", "initial commit"])
244
+ .current_dir(&submodule_path)
245
+ .output()
246
+ .unwrap();
247
+ println!("git commit output: {:?}", o);
248
+
249
+ let o = Command::new("git")
250
+ .args(&["submodule", "add", "./submodule"])
251
+ .current_dir(dir.path())
252
+ .output()
253
+ .unwrap();
254
+ println!("git submodule add output: {:?}", o);
255
+
256
+ let gitignored_files =
257
+ list_ignored_files(dir.path().to_str().unwrap(), false).unwrap();
258
+ println!(
259
+ "ignored files for test_git_submodule_ignored_files: {:?}",
260
+ gitignored_files
261
+ );
262
+ assert!(gitignored_files.contains(&String::from("submodule/ignored.txt")));
263
+ assert!(gitignored_files.contains(&String::from("submodule/ignored2.txt")));
132
264
  }
133
265
 
134
266
  #[test]
135
267
  fn test_multiple_ignored_files_in_current_dir() {
136
- let gitignored_files = list_ignored_files(".").unwrap();
268
+ let gitignored_files = list_ignored_files(".", false).unwrap();
137
269
  assert!(gitignored_files.len() > 1);
138
270
 
139
271
  // print a sample of the ignored files
@@ -147,7 +279,6 @@ mod tests {
147
279
  }
148
280
  }
149
281
 
150
-
151
282
  #[tokio::test]
152
283
  async fn test_file_not_ignored() {
153
284
  let dir = tempfile::tempdir().unwrap();
@@ -160,7 +291,10 @@ mod tests {
160
291
  .current_dir(dir.path())
161
292
  .output()
162
293
  .unwrap();
163
- let is_ignored = is_git_ignored(dir.path().to_str().unwrap(), "not_ignored.txt").await.unwrap();
294
+ let is_ignored =
295
+ is_git_ignored(dir.path().to_str().unwrap(), "not_ignored.txt")
296
+ .await
297
+ .unwrap();
164
298
  assert_eq!(is_ignored, false);
165
299
  }
166
300
 
@@ -180,7 +314,10 @@ mod tests {
180
314
  .current_dir(dir.path())
181
315
  .output()
182
316
  .unwrap();
183
- let is_ignored = is_git_ignored(dir.path().to_str().unwrap(), "ignored.txt").await.unwrap();
317
+ let is_ignored =
318
+ is_git_ignored(dir.path().to_str().unwrap(), "ignored.txt")
319
+ .await
320
+ .unwrap();
184
321
  assert_eq!(is_ignored, true);
185
322
  }
186
323
 
@@ -200,8 +337,10 @@ mod tests {
200
337
  .current_dir(dir.path())
201
338
  .output()
202
339
  .unwrap();
203
- let is_ignored = is_git_ignored(dir.path().to_str().unwrap(), "ignored.txt").await.unwrap();
340
+ let is_ignored =
341
+ is_git_ignored(dir.path().to_str().unwrap(), "ignored.txt")
342
+ .await
343
+ .unwrap();
204
344
  assert_eq!(is_ignored, true);
205
345
  }
206
-
207
346
  }
package/src/lib.rs CHANGED
@@ -1,4 +1,5 @@
1
1
  #![deny(clippy::all)]
2
+ #![deny(unsafe_op_in_unsafe_fn)]
2
3
  pub mod file_utils;
3
4
  pub mod git_utils;
4
5
  pub mod merkle_tree;
@@ -6,6 +7,9 @@ pub mod merkle_tree;
6
7
  use std::vec;
7
8
 
8
9
  use merkle_tree::{LocalConstruction, MerkleTree};
10
+ use tracing::{info, Level};
11
+ use tracing_appender::rolling::{RollingFileAppender, Rotation};
12
+ use tracing_subscriber::fmt;
9
13
 
10
14
  #[macro_use]
11
15
  extern crate napi_derive;
@@ -14,15 +18,35 @@ extern crate napi_derive;
14
18
  pub struct MerkleClient {
15
19
  tree: MerkleTree,
16
20
  root_directory: String,
21
+ _guard: tracing_appender::non_blocking::WorkerGuard,
22
+ }
23
+
24
+ pub fn init_logger() -> tracing_appender::non_blocking::WorkerGuard {
25
+ let file_appender =
26
+ RollingFileAppender::new(Rotation::NEVER, "./", "rust_log.txt");
27
+ let (non_blocking, _guard) = tracing_appender::non_blocking(file_appender);
28
+ let subscriber = fmt::Subscriber::builder()
29
+ .with_max_level(Level::TRACE)
30
+ .with_writer(non_blocking)
31
+ .with_ansi(false)
32
+ .with_line_number(true)
33
+ .finish();
34
+
35
+ let _ = tracing::subscriber::set_global_default(subscriber);
36
+
37
+ _guard
17
38
  }
18
39
 
19
40
  #[napi]
20
41
  impl MerkleClient {
21
42
  #[napi(constructor)]
22
43
  pub fn new(root_directory: String) -> MerkleClient {
44
+ let _guard = init_logger();
45
+
23
46
  MerkleClient {
24
47
  tree: MerkleTree::empty_tree(),
25
48
  root_directory,
49
+ _guard,
26
50
  }
27
51
  }
28
52
 
@@ -31,7 +55,10 @@ impl MerkleClient {
31
55
  // 1. compute the merkle tree
32
56
  // 2. update the backend
33
57
  // 3. sync with the remote
34
- self.compute_merkle_tree().await?;
58
+ info!("Merkle tree compute started!");
59
+ unsafe {
60
+ self.compute_merkle_tree().await?;
61
+ }
35
62
 
36
63
  Ok(())
37
64
  }
@@ -40,13 +67,24 @@ impl MerkleClient {
40
67
  unimplemented!("Interrupt is not implemented yet");
41
68
  }
42
69
 
43
- // #[napi]
70
+ #[napi]
44
71
  pub async unsafe fn compute_merkle_tree(
45
72
  &mut self,
46
73
  ) -> Result<(), napi::Error> {
47
74
  let t =
48
75
  MerkleTree::construct_merkle_tree(self.root_directory.clone()).await;
49
76
 
77
+ let files = self.tree.get_all_files().await;
78
+
79
+ match files {
80
+ Ok(files) => {
81
+ info!("files: {:?}", files);
82
+ }
83
+ Err(e) => {
84
+ info!("Error in get_all_files: {:?}", e);
85
+ }
86
+ }
87
+
50
88
  match t {
51
89
  Ok(tree) => {
52
90
  self.tree = tree;
@@ -72,9 +110,13 @@ impl MerkleClient {
72
110
  #[napi]
73
111
  pub async fn get_subtree_hash(
74
112
  &self,
75
- path: String,
113
+ relative_path: String,
76
114
  ) -> Result<String, napi::Error> {
77
- let hash = self.tree.get_subtree_hash(path).await;
115
+ let absolute_path =
116
+ std::path::Path::new(&self.root_directory).join(relative_path);
117
+ let canonical_path = absolute_path.canonicalize().unwrap();
118
+
119
+ let hash = self.tree.get_subtree_hash(canonical_path).await;
78
120
 
79
121
  match hash {
80
122
  Ok(hash) => Ok(hash),
@@ -98,6 +140,28 @@ impl MerkleClient {
98
140
  }
99
141
  }
100
142
 
143
+ pub async fn get_num_embeddable_files_in_subtree(
144
+ &self,
145
+ relative_path: String,
146
+ ) -> Result<i32, napi::Error> {
147
+ let absolute_path = std::path::Path::new(&self.root_directory)
148
+ .join(relative_path)
149
+ .canonicalize()?;
150
+
151
+ let num = self
152
+ .tree
153
+ .get_num_embeddable_files_in_subtree(absolute_path)
154
+ .await;
155
+
156
+ match num {
157
+ Ok(num) => Ok(num),
158
+ Err(e) => Err(napi::Error::new(
159
+ napi::Status::Unknown,
160
+ format!("Error in get_num_embeddable_files_in_subtree: {:?}", e),
161
+ )),
162
+ }
163
+ }
164
+
101
165
  #[napi]
102
166
  pub async fn get_all_files(&self) -> Result<Vec<String>, napi::Error> {
103
167
  let files = self.tree.get_all_files().await;
@@ -111,6 +175,26 @@ impl MerkleClient {
111
175
  }
112
176
  }
113
177
 
178
+ #[napi]
179
+ pub async fn get_all_dir_files_to_embed(
180
+ &self,
181
+ absolute_file_path: String,
182
+ ) -> Result<Vec<String>, napi::Error> {
183
+ let absolute_path_str = absolute_file_path.as_str();
184
+ let files = self
185
+ .tree
186
+ .get_all_dir_files_to_embed(absolute_path_str)
187
+ .await;
188
+
189
+ match files {
190
+ Ok(files) => Ok(files),
191
+ Err(e) => Err(napi::Error::new(
192
+ napi::Status::Unknown,
193
+ format!("Error in get_all_dir_files_to_embed: {:?}", e),
194
+ )),
195
+ }
196
+ }
197
+
114
198
  #[napi]
115
199
  pub async unsafe fn get_next_file_to_embed(
116
200
  &mut self,
@@ -125,7 +209,6 @@ impl MerkleClient {
125
209
 
126
210
  let ret = vec![file];
127
211
  let ret = ret.into_iter().chain(path.into_iter()).collect::<Vec<_>>();
128
-
129
212
  Ok(ret)
130
213
  }
131
214
  Err(e) => Err(napi::Error::new(
@@ -135,6 +218,24 @@ impl MerkleClient {
135
218
  }
136
219
  }
137
220
 
221
+ // FIXME(sualeh): get_spline
222
+ #[napi]
223
+ pub async fn get_spline(
224
+ &self,
225
+ absolute_file_path: String,
226
+ ) -> Result<Vec<String>, napi::Error> {
227
+ let absolute_path_str = absolute_file_path.as_str();
228
+ let spline = self.tree.get_spline(absolute_path_str).await;
229
+
230
+ match spline {
231
+ Ok(spline) => Ok(spline),
232
+ Err(e) => Err(napi::Error::new(
233
+ napi::Status::Unknown,
234
+ format!("Error in get_spline: {:?}", e),
235
+ )),
236
+ }
237
+ }
238
+
138
239
  #[napi]
139
240
  pub async fn get_hashes_for_files(
140
241
  &self,
@@ -151,7 +252,7 @@ impl MerkleClient {
151
252
  }
152
253
  }
153
254
 
154
- // #[napi]
255
+ #[napi]
155
256
  pub fn update_root_directory(&mut self, root_directory: String) {
156
257
  self.root_directory = root_directory;
157
258
  }
@@ -1,10 +1,11 @@
1
+ use crate::git_utils;
1
2
  use crate::merkle_tree::{
2
3
  File, MerkleNode, MerkleNodePtr, NodeType, PinnedFuture,
3
4
  };
4
5
 
5
6
  use super::{LocalConstruction, MerkleTree};
6
- use std::path::PathBuf;
7
- use std::{collections::HashMap, path::Path, sync::Arc};
7
+ use std::collections::{BTreeMap, HashSet};
8
+ use std::path::{Path, PathBuf};
8
9
  use tonic::async_trait;
9
10
 
10
11
  #[async_trait]
@@ -28,32 +29,49 @@ impl LocalConstruction for MerkleTree {
28
29
  /// 3. construct merkle tree
29
30
  /// 4. return merkle tree
30
31
  async fn construct_merkle_tree(
31
- root_directory: String,
32
+ absolute_path_to_root_directory: String,
32
33
  ) -> Result<MerkleTree, anyhow::Error> {
33
- let path = PathBuf::from(root_directory.clone());
34
+ let path = PathBuf::from(absolute_path_to_root_directory.clone());
34
35
  if !path.exists() {
35
36
  // FIXME: we should report this via a good logger.
36
37
  panic!("Root directory does not exist!");
37
38
  }
38
39
 
39
- let root_node = MerkleNode::new(path, None).await;
40
+ // 1. get all the gitignored files
41
+ let git_ignored_files = match git_utils::list_ignored_files(
42
+ absolute_path_to_root_directory.as_str(),
43
+ true,
44
+ ) {
45
+ Ok(git_ignored) => git_ignored,
46
+ Err(_e) => HashSet::new(),
47
+ };
48
+
49
+ let root_node = MerkleNode::new(
50
+ path,
51
+ None,
52
+ &git_ignored_files,
53
+ absolute_path_to_root_directory.as_str(),
54
+ )
55
+ .await;
40
56
  let mut mt = MerkleTree {
41
57
  root: root_node,
42
- files: HashMap::new(),
43
- root_path: root_directory,
58
+ files: BTreeMap::new(),
59
+ root_path: absolute_path_to_root_directory,
44
60
  cursor: None,
61
+ git_ignored_files,
45
62
  };
46
63
 
47
64
  // we now iterate over all the nodes and add them to the hashmap
48
65
  // TODO(later): i can make this parallel.
49
66
  fn add_nodes_to_hashmap<'a>(
50
67
  node: &'a MerkleNodePtr,
51
- files: &'a mut HashMap<String, File>,
68
+ files: &'a mut BTreeMap<String, File>,
52
69
  ) -> PinnedFuture<'a, ()> {
53
70
  Box::pin(async move {
54
71
  let node_reader = node.read().await;
55
72
  match &node_reader.node_type {
56
73
  NodeType::Branch(n) => {
74
+ tracing::info!("Branch: {:?}", n.0);
57
75
  let children = &n.1;
58
76
  files.insert(n.0.clone(), File { node: node.clone() });
59
77
  for child in children {
@@ -62,6 +80,7 @@ impl LocalConstruction for MerkleTree {
62
80
  }
63
81
  NodeType::File(file_name) => {
64
82
  let f = File { node: node.clone() };
83
+ tracing::info!("File: {:?}", file_name);
65
84
  files.insert(file_name.clone(), f);
66
85
  }
67
86
  NodeType::ErrorNode(_) => {
@@ -73,6 +92,9 @@ impl LocalConstruction for MerkleTree {
73
92
 
74
93
  add_nodes_to_hashmap(&mt.root, &mut mt.files).await;
75
94
 
95
+ tracing::info!("Merkle tree compute finished!");
96
+ // tracing::info!("Merkle tree: {:?}", mt.files);
97
+
76
98
  Ok(mt)
77
99
  }
78
100
 
@@ -1,9 +1,13 @@
1
+ use crate::git_utils;
2
+
1
3
  use super::file_utils;
2
4
  use sha2::Digest;
5
+ use std::collections::{BTreeMap, HashSet};
3
6
  use std::path::PathBuf;
4
- use std::{collections::HashMap, fs, path::Path, sync::Arc};
7
+ use std::{fs, path::Path, sync::Arc};
5
8
  use tokio::sync::RwLock;
6
9
  use tonic::async_trait;
10
+ use tracing::info;
7
11
  pub mod local_construction;
8
12
  pub mod test;
9
13
 
@@ -12,8 +16,9 @@ pub type MerkleNodePtr = Arc<RwLock<MerkleNode>>;
12
16
  pub struct MerkleTree {
13
17
  root_path: String,
14
18
  root: MerkleNodePtr,
15
- files: HashMap<String, File>,
16
- cursor: Option<MerkleNodePtr>,
19
+ files: BTreeMap<String, File>,
20
+ cursor: Option<usize>,
21
+ git_ignored_files: HashSet<String>,
17
22
  }
18
23
 
19
24
  #[derive(Debug)]
@@ -87,21 +92,31 @@ impl MerkleTree {
87
92
  pub fn empty_tree() -> MerkleTree {
88
93
  MerkleTree {
89
94
  root: Arc::new(RwLock::new(MerkleNode::empty_node(None, None))),
90
- files: HashMap::new(),
95
+ files: BTreeMap::new(),
91
96
  root_path: "".to_string(),
92
97
  cursor: None,
98
+ git_ignored_files: HashSet::new(),
93
99
  }
94
100
  }
95
101
 
96
102
  pub async fn get_subtree_hash(
97
103
  &self,
98
- path: String,
104
+ absolute_path: PathBuf,
99
105
  ) -> Result<String, anyhow::Error> {
100
- let path = PathBuf::from(path);
101
- let node = match self.files.get(path.to_str().unwrap()) {
106
+ let abs_string = match absolute_path.to_str() {
107
+ Some(s) => s.to_string(),
108
+ None => {
109
+ return Err(anyhow::anyhow!(
110
+ "get_subtree_hash: Failed to convert path to string"
111
+ ))
112
+ }
113
+ };
114
+
115
+ let node = match self.files.get(&abs_string) {
102
116
  Some(file) => file.node.clone(),
103
117
  None => {
104
- return Err(anyhow::anyhow!("Could not find file in tree!"));
118
+ let all_files: Vec<String> = self.files.keys().cloned().collect();
119
+ return Err(anyhow::anyhow!("Could not find file in tree! Looking for: {}. All files: {:?}", abs_string, all_files));
105
120
  }
106
121
  };
107
122
 
@@ -132,6 +147,43 @@ impl MerkleTree {
132
147
  Ok(count)
133
148
  }
134
149
 
150
+ pub async fn get_num_embeddable_files_in_subtree(
151
+ &self,
152
+ absolute_path: PathBuf,
153
+ ) -> Result<i32, anyhow::Error> {
154
+ let mut count = 0;
155
+
156
+ let absolute_path = match absolute_path.to_str() {
157
+ Some(s) => s.to_string(),
158
+ None => {
159
+ return Err(anyhow::anyhow!(
160
+ "get_num_embeddable_files_in_subtree: Failed to convert path to string"
161
+ ))
162
+ }
163
+ };
164
+
165
+ // TODO(sualeh): worth keeping this list sorted. its now a btree
166
+
167
+ for (_, file) in &self.files {
168
+ let file_reader = file.node.read().await;
169
+ match &file_reader.node_type {
170
+ NodeType::File(file_name) => {
171
+ if file_name.contains(&absolute_path) {
172
+ count += 1;
173
+ }
174
+ }
175
+ NodeType::Branch(_) => {
176
+ continue;
177
+ }
178
+ NodeType::ErrorNode(_) => {
179
+ continue;
180
+ }
181
+ }
182
+ }
183
+
184
+ Ok(count)
185
+ }
186
+
135
187
  pub async fn get_all_files(&self) -> Result<Vec<String>, anyhow::Error> {
136
188
  let mut files = Vec::new();
137
189
 
@@ -188,83 +240,119 @@ impl MerkleTree {
188
240
  pub async fn get_next_file_to_embed(
189
241
  &mut self,
190
242
  ) -> Result<(String, Vec<String>), anyhow::Error> {
191
- // the plan is to do an in-order traversal of the tree.
192
-
193
- // first the edge case to deal with:
194
- // cursor == None
195
- if self.cursor.is_none() {
196
- // If the root is a file, return its name.
197
- if let NodeType::File(file_path) = &self.root.read().await.node_type {
198
- return Ok((file_path.clone(), vec![]));
199
- }
200
-
201
- // if the path is not empty, we can iterate till we find the first child.
202
- let mut potential_first_child = self.root.clone();
203
- let mut is_branch = true;
204
- let mut path = Vec::new();
205
-
206
- while is_branch {
207
- let node = {
208
- let potential_first_child_reader = potential_first_child.read().await;
209
- match &potential_first_child_reader.node_type {
210
- NodeType::Branch(branch) => branch.clone(),
211
- NodeType::File(_) => {
212
- return Err(anyhow::anyhow!(
213
- "get_next_file_to_embed: This should not happen! the branch happened to be file."
214
- ));
215
- }
216
- NodeType::ErrorNode(_) => {
217
- return Err(anyhow::anyhow!("Cursor is an error node!"));
218
- }
219
- }
220
- };
243
+ // if the cursor is none, set it to 0
244
+ let cursor = match self.cursor {
245
+ Some(cursor) => cursor,
246
+ None => {
247
+ self.cursor = Some(0);
248
+ 0
249
+ }
250
+ };
221
251
 
222
- let current_node_name = &node.0;
223
- let child_list = &node.1;
252
+ // get the thing at the cursor. while we dont find a file, we keep incrementing the cursor.
253
+ let mut cursor = cursor;
254
+ loop {
255
+ // O(log n)
256
+ let file = match self.files.values().nth(cursor) {
257
+ Some(file) => file,
258
+ None => {
259
+ return Err(anyhow::anyhow!("Could not find file to embed!"));
260
+ }
261
+ };
224
262
 
225
- if let Some(c) = child_list.first() {
226
- let c_reader = c.read().await;
263
+ let file_reader = file.node.read().await;
264
+ match &file_reader.node_type {
265
+ NodeType::File(f) => {
266
+ // update the cursor.
267
+ self.cursor = Some(cursor + 1);
268
+ let spline = self.get_spline(f).await?;
269
+ return Ok((f.clone(), spline));
270
+ }
271
+ NodeType::Branch(_) => {
272
+ cursor += 1;
273
+ continue;
274
+ }
275
+ NodeType::ErrorNode(_) => {
276
+ cursor += 1;
277
+ continue;
278
+ }
279
+ }
280
+ }
281
+ }
227
282
 
228
- match &c_reader.node_type {
229
- NodeType::File(file_path) => {
230
- // must set the cursor!
231
- self.cursor = Some(c.clone());
283
+ pub async fn get_all_dir_files_to_embed(
284
+ &self,
285
+ absolute_path: &str,
286
+ ) -> Result<Vec<String>, anyhow::Error> {
287
+ let mut files = Vec::new();
232
288
 
233
- return Ok((file_path.clone(), path));
234
- }
235
- NodeType::Branch(_) => {
236
- potential_first_child = c.clone();
237
- is_branch = true;
289
+ for (file_path, f) in &self.files {
290
+ if !file_path.contains(absolute_path) {
291
+ continue;
292
+ }
238
293
 
239
- // add the path to the current node.
240
- path.push(current_node_name.clone());
241
- }
242
- NodeType::ErrorNode(_) => {
243
- return Err(anyhow::anyhow!("Cursor is an error node!"));
244
- }
245
- }
246
- } else {
247
- // If the root has no children, return an error.
248
- return Err(anyhow::anyhow!("Root has no children!"));
294
+ match f.node.read().await.node_type {
295
+ NodeType::File(_) => {
296
+ files.push(file_path.clone());
297
+ }
298
+ NodeType::Branch(_) => {
299
+ continue;
300
+ }
301
+ NodeType::ErrorNode(_) => {
302
+ continue;
249
303
  }
250
304
  }
251
305
  }
252
306
 
253
- // THE DEFAULT CASE:
254
- // we already have a cursor at a file.
255
-
256
- // UNWRAP checked and fine. see the none case above.
257
- let cursor_name = self.cursor.as_ref().unwrap();
258
- let cursor_reader = cursor_name.read().await;
307
+ Ok(files)
308
+ }
259
309
 
260
- // invariant: you must be a file!!
310
+ // TODO(sualeh): i need tests for this!!
311
+ pub async fn get_spline(
312
+ &self,
313
+ absolute_path: &str,
314
+ ) -> Result<Vec<String>, anyhow::Error> {
315
+ info!("get_spline called with absolute_path: {}", absolute_path);
316
+ let mut files = Vec::new();
261
317
 
262
- // everytime we get to a child list, we will add all the children to a fifo, and then pull from it as long as we need it.
318
+ let current_node = match self.files.get(absolute_path) {
319
+ Some(node) => {
320
+ info!("Found node for absolute_path: {}", absolute_path);
321
+ node.node.clone()
322
+ }
323
+ None => {
324
+ info!("File not found for absolute_path: {}", absolute_path);
325
+ return Err(anyhow::anyhow!("File not found: {}", absolute_path));
326
+ }
327
+ };
263
328
 
264
- // algorithm:
265
- // 1.
329
+ let mut stack = Vec::new();
330
+ stack.push(current_node);
331
+
332
+ while let Some(node) = stack.pop() {
333
+ let parent = node.read().await.parent.clone();
334
+ if let Some(parent) = parent {
335
+ info!("Adding parent hash to files vector");
336
+ {
337
+ let parent_node = parent.read().await;
338
+ match &parent_node.node_type {
339
+ NodeType::File(file_name) => {
340
+ files.push(file_name.clone());
341
+ }
342
+ NodeType::Branch((branch_name, _)) => {
343
+ files.push(branch_name.clone());
344
+ }
345
+ _ => {
346
+ continue;
347
+ }
348
+ }
349
+ }
266
350
 
267
- Err(anyhow::anyhow!("Could not find file to embed!"))
351
+ stack.push(parent);
352
+ }
353
+ }
354
+ info!("Returning files vector with {} elements", files.len());
355
+ Ok(files)
268
356
  }
269
357
 
270
358
  /// creates a new node and attaches it to the current tree.
@@ -302,12 +390,18 @@ impl MerkleTree {
302
390
  // 1. the path is empty. this means that the ancestor is the root.
303
391
  // 2. the path is non-empty. that means there exist a non-empty element btwn till the root.
304
392
 
393
+ let absolute_root_path = self.root_path.clone();
305
394
  let new_node = match path.len() {
306
395
  0 => {
307
396
  // this means that the ancestor is the root.
308
397
  // we need to create a new node and attach it to the ancestor.
309
- let new_node =
310
- MerkleNode::new(file_path.clone(), Some(ancestor.clone())).await;
398
+ let new_node = MerkleNode::new(
399
+ file_path.clone(),
400
+ Some(ancestor.clone()),
401
+ &self.git_ignored_files,
402
+ &absolute_root_path.as_str(),
403
+ )
404
+ .await;
311
405
  ancestor.write().await.attach_child(new_node.clone()).await;
312
406
  new_node
313
407
  }
@@ -318,9 +412,13 @@ impl MerkleTree {
318
412
  // UNSURE: not sure this is the correct thing to do but it is the fastest.
319
413
  // get the last thing that is not in the tree.
320
414
  let first_child_path = path.last().unwrap();
321
- let first_child =
322
- MerkleNode::new(first_child_path.clone(), Some(ancestor.clone()))
323
- .await;
415
+ let first_child = MerkleNode::new(
416
+ first_child_path.clone(),
417
+ Some(ancestor.clone()),
418
+ &self.git_ignored_files,
419
+ &absolute_root_path.as_str(),
420
+ )
421
+ .await;
324
422
 
325
423
  // TODO(sualeh): we should do an assertion check that the entire vec is contained here.
326
424
 
@@ -597,18 +695,62 @@ use std::future::Future;
597
695
  use std::pin::Pin;
598
696
 
599
697
  type PinnedFuture<'a, T> = Pin<Box<dyn Future<Output = T> + Send + 'a>>;
698
+ type IgnoredFiles = HashSet<String>;
600
699
 
601
700
  impl MerkleNode {
602
701
  /// please be careful using this.
603
702
  async fn __new_unchecked(
604
703
  file_or_directory: String,
605
704
  parent: ParentPtr,
705
+ ignored_files: &IgnoredFiles,
706
+ absolute_root_path: &str,
606
707
  ) -> MerkleNodePtr {
607
- MerkleNode::construct_node(Path::new(&file_or_directory), parent).await
708
+ // check if the root is a git directory.
709
+ let is_git_repo =
710
+ match git_utils::is_git_directory(absolute_root_path).await {
711
+ Ok(is_git_repo) => is_git_repo,
712
+ Err(e) => false,
713
+ };
714
+ let bypass_git = !is_git_repo;
715
+
716
+ MerkleNode::construct_node(
717
+ Path::new(&file_or_directory),
718
+ parent,
719
+ ignored_files,
720
+ absolute_root_path,
721
+ bypass_git,
722
+ )
723
+ .await
608
724
  }
609
725
 
610
- async fn new(file_or_directory: PathBuf, parent: ParentPtr) -> MerkleNodePtr {
611
- MerkleNode::construct_node(Path::new(&file_or_directory), parent).await
726
+ async fn new(
727
+ absolute_file_or_directory: PathBuf,
728
+ parent: ParentPtr,
729
+ ignored_files: &IgnoredFiles,
730
+ absolute_root_path: &str,
731
+ ) -> MerkleNodePtr {
732
+ // check if the root is a git directory.
733
+ let is_git_repo =
734
+ match git_utils::is_git_directory(absolute_root_path).await {
735
+ Ok(is_git_repo) => is_git_repo,
736
+ Err(_e) => false,
737
+ };
738
+ let bypass_git = !is_git_repo;
739
+
740
+ info!(
741
+ "constructing node for absolute_file_or_directory: {:?}",
742
+ absolute_file_or_directory
743
+ );
744
+ info!("bypass_git: {}, is_git_repo: {}", bypass_git, is_git_repo);
745
+
746
+ MerkleNode::construct_node(
747
+ Path::new(&absolute_file_or_directory),
748
+ parent,
749
+ ignored_files,
750
+ absolute_root_path,
751
+ bypass_git,
752
+ )
753
+ .await
612
754
  }
613
755
 
614
756
  /// NOT added to the tree by default.
@@ -619,38 +761,68 @@ impl MerkleNode {
619
761
  // let file_hash = self.files.get_mut(&file_path).unwrap();
620
762
 
621
763
  fn construct_node<'a>(
622
- file_or_directory: &'a Path,
764
+ absolute_file_or_directory: &'a Path,
623
765
  parent: ParentPtr,
766
+ ignored_files: &'a IgnoredFiles,
767
+ absolute_root_path: &'a str,
768
+ bypass_git: bool,
624
769
  ) -> PinnedFuture<'a, MerkleNodePtr> {
625
770
  Box::pin(async move {
626
771
  // check if it is a file
627
- let path_str = file_or_directory.to_str().unwrap().to_string();
628
- if file_or_directory.is_file() {
772
+ let path_str = absolute_file_or_directory.to_str().unwrap().to_string();
773
+ if absolute_file_or_directory.is_file() {
629
774
  return Arc::new(RwLock::new(
630
775
  MerkleNode::construct_file_node_or_error_node(
631
- file_or_directory,
776
+ absolute_file_or_directory,
632
777
  parent,
778
+ ignored_files,
633
779
  )
634
780
  .await,
635
781
  ));
636
782
  }
637
783
 
638
784
  // check if the directory fails the bad dir test.
639
- let is_bad_dir = file_utils::is_in_bad_dir(file_or_directory);
785
+ let is_bad_dir = file_utils::is_in_bad_dir(absolute_file_or_directory);
640
786
  if is_bad_dir.is_err() || is_bad_dir.unwrap_or(false) {
641
787
  // println!("skipping directory: {}", path_str);
642
788
  return Arc::new(RwLock::new(MerkleNode::empty_node(
643
- Some(file_or_directory),
789
+ Some(absolute_file_or_directory),
644
790
  Some("Directory is in bad dir!".to_string()),
645
791
  )));
646
792
  }
647
793
 
648
- let entries = fs::read_dir(file_or_directory);
794
+ // check if the directory is git ignored
795
+ let is_git_ignored =
796
+ match git_utils::is_git_ignored(absolute_root_path, path_str.as_str())
797
+ .await
798
+ {
799
+ Ok(is_git_ignored) => is_git_ignored,
800
+ Err(e) => {
801
+ return Arc::new(RwLock::new(MerkleNode::empty_node(
802
+ Some(absolute_file_or_directory),
803
+ Some(e.to_string()),
804
+ )));
805
+ }
806
+ };
807
+
808
+ if is_git_ignored && !bypass_git {
809
+ // println!("skipping directory: {}", path_str);
810
+ tracing::info!(
811
+ "skipping directory because its git ignored: {}",
812
+ path_str
813
+ );
814
+ return Arc::new(RwLock::new(MerkleNode::empty_node(
815
+ Some(absolute_file_or_directory),
816
+ Some("Directory is git ignored!".to_string()),
817
+ )));
818
+ }
819
+
820
+ let entries = fs::read_dir(absolute_file_or_directory);
649
821
  match entries {
650
822
  Ok(_) => (),
651
823
  Err(e) => {
652
824
  return Arc::new(RwLock::new(MerkleNode::empty_node(
653
- Some(file_or_directory),
825
+ Some(absolute_file_or_directory),
654
826
  Some(e.to_string()),
655
827
  )));
656
828
  }
@@ -670,13 +842,19 @@ impl MerkleNode {
670
842
  match entry {
671
843
  Ok(entry) => {
672
844
  children.push(
673
- MerkleNode::construct_node(&entry.path(), Some(node.clone()))
674
- .await,
845
+ MerkleNode::construct_node(
846
+ &entry.path(),
847
+ Some(node.clone()),
848
+ ignored_files,
849
+ absolute_root_path,
850
+ bypass_git,
851
+ )
852
+ .await,
675
853
  );
676
854
  }
677
855
  Err(e) => {
678
856
  children.push(Arc::new(RwLock::new(MerkleNode::empty_node(
679
- Some(file_or_directory),
857
+ Some(absolute_file_or_directory),
680
858
  Some(e.to_string()),
681
859
  ))));
682
860
  }
@@ -696,23 +874,33 @@ impl MerkleNode {
696
874
  }
697
875
 
698
876
  async fn construct_file_node(
699
- file_path: &Path,
877
+ absolute_file_path: &Path,
700
878
  parent: ParentPtr,
879
+ ignored_files: &IgnoredFiles,
701
880
  ) -> Result<MerkleNode, String> {
702
- let file_str = file_path
881
+ let file_str = absolute_file_path
703
882
  .to_str()
704
883
  .ok_or("Could not convert file path to string!")?
705
884
  .to_string();
706
885
  // first see if it passes the
707
- match file_utils::is_good_file(file_path) {
886
+ match file_utils::is_good_file(absolute_file_path) {
708
887
  Ok(_) => {}
709
888
  Err(e) => {
710
889
  return Err(format!("File failed runtime checks! {}", e.to_string()));
711
890
  }
712
891
  }
713
892
 
893
+ // check if the file is in the git ignore buffer.
894
+ // this is a bug right because we are not checking absoluteness here.
895
+ match ignored_files.contains(&file_str) {
896
+ true => {
897
+ return Err(format!("File is in git ignore buffer!"));
898
+ }
899
+ false => {}
900
+ }
901
+
714
902
  // read the file_content to a buffer
715
- let file_content = match tokio::fs::read(file_path).await {
903
+ let file_content = match tokio::fs::read(absolute_file_path).await {
716
904
  Ok(content) => content,
717
905
  Err(e) => {
718
906
  return Err(format!("Could not read file! {}", e.to_string()));
@@ -720,7 +908,11 @@ impl MerkleNode {
720
908
  };
721
909
 
722
910
  // check if the file passes runtime checks.
723
- match file_utils::is_good_file_runtime_check(file_path, &file_content).await
911
+ match file_utils::is_good_file_runtime_check(
912
+ absolute_file_path,
913
+ &file_content,
914
+ )
915
+ .await
724
916
  {
725
917
  Ok(_) => {}
726
918
  Err(e) => {
@@ -751,15 +943,22 @@ impl MerkleNode {
751
943
  }
752
944
 
753
945
  async fn construct_file_node_or_error_node(
754
- file_path: &Path,
946
+ absolute_file_path: &Path,
755
947
  parent: ParentPtr,
948
+ ignored_files: &IgnoredFiles,
756
949
  ) -> MerkleNode {
757
- let node = match MerkleNode::construct_file_node(file_path, parent).await {
950
+ let node = match MerkleNode::construct_file_node(
951
+ absolute_file_path,
952
+ parent,
953
+ ignored_files,
954
+ )
955
+ .await
956
+ {
758
957
  Ok(node) => node,
759
958
  Err(e) => {
760
959
  // println!("constructing error node. error: {}", e);
761
960
  // println!("file_path: {:?}", file_path);
762
- MerkleNode::empty_node(Some(file_path), Some(e))
961
+ MerkleNode::empty_node(Some(absolute_file_path), Some(e))
763
962
  }
764
963
  };
765
964
 
package/src/test.rs ADDED
@@ -0,0 +1,5 @@
1
+ #[cfg(test)]
2
+ mod tests {
3
+ use super::super::*;
4
+ use std::path::PathBuf;
5
+ }