gqlite 1.3.0 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. checksums.yaml +4 -4
  2. data/ext/Cargo.toml +4 -3
  3. data/ext/gqlitedb/Cargo.toml +7 -4
  4. data/ext/gqlitedb/src/aggregators/arithmetic.rs +1 -0
  5. data/ext/gqlitedb/src/compiler/expression_analyser.rs +2 -0
  6. data/ext/gqlitedb/src/compiler.rs +31 -19
  7. data/ext/gqlitedb/src/connection.rs +42 -7
  8. data/ext/gqlitedb/src/error.rs +3 -0
  9. data/ext/gqlitedb/src/functions/containers.rs +1 -1
  10. data/ext/gqlitedb/src/functions/path.rs +1 -1
  11. data/ext/gqlitedb/src/functions/scalar.rs +23 -0
  12. data/ext/gqlitedb/src/functions.rs +8 -0
  13. data/ext/gqlitedb/src/interpreter/evaluators.rs +10 -10
  14. data/ext/gqlitedb/src/interpreter/instructions.rs +3 -3
  15. data/ext/gqlitedb/src/lib.rs +1 -3
  16. data/ext/gqlitedb/src/parser/ast.rs +1 -0
  17. data/ext/gqlitedb/src/parser/gql.pest +3 -1
  18. data/ext/gqlitedb/src/parser/parser_impl.rs +8 -0
  19. data/ext/gqlitedb/src/prelude.rs +3 -0
  20. data/ext/gqlitedb/src/store/{pgql.rs → pgrx.rs} +2 -0
  21. data/ext/gqlitedb/src/store/postgres.rs +0 -0
  22. data/ext/gqlitedb/src/store/sqlbase/sqlmetadata.rs +117 -0
  23. data/ext/gqlitedb/src/store/sqlbase/sqlqueries.rs +62 -0
  24. data/ext/gqlitedb/src/store/sqlbase/sqlstore.rs +55 -0
  25. data/ext/gqlitedb/src/store/sqlbase/sqlvalue.rs +189 -0
  26. data/ext/gqlitedb/src/store/sqlbase.rs +456 -0
  27. data/ext/gqlitedb/src/store/sqlite.rs +271 -573
  28. data/ext/gqlitedb/src/store.rs +7 -5
  29. data/ext/gqlitedb/src/tests/templates/programs.rs +10 -10
  30. data/ext/gqlitedb/src/utils.rs +25 -0
  31. data/ext/gqlitedb/src/value/compare.rs +6 -0
  32. data/ext/gqlitedb/src/value.rs +18 -2
  33. data/ext/gqlitedb/templates/sql/sqlite/edge_select.sql +18 -18
  34. data/ext/gqlitedb/templates/sql/sqlite/edge_update.sql +3 -3
  35. data/ext/gqlitedb/templates/sql/sqlite/node_select.sql +6 -6
  36. data/ext/gqlitedb/templates/sql/sqlite/node_update.sql +3 -3
  37. data/ext/gqliterb/src/lib.rs +30 -2
  38. data/ext/graphcore/Cargo.toml +3 -2
  39. data/ext/graphcore/src/error.rs +2 -0
  40. data/ext/graphcore/src/lib.rs +2 -1
  41. data/ext/graphcore/src/prelude.rs +1 -1
  42. data/ext/graphcore/src/table.rs +1 -1
  43. data/ext/graphcore/src/timestamp.rs +104 -0
  44. data/ext/graphcore/src/value.rs +106 -23
  45. metadata +10 -7
  46. data/ext/gqlitedb/gqlite_bench_data/README.MD +0 -6
  47. data/ext/gqlitedb/gqlite_bench_data/scripts/generate_smaller_pokec.rb +0 -85
  48. data/ext/gqlitedb/gqlite_bench_data/scripts/to_efficient_pokec.rb +0 -34
  49. data/ext/graphcore/release.toml +0 -1
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: c8b19f14f03ff7f92bf78f132e0c26f800e2d9ce3be108557bb9d23c11e0e8d1
4
- data.tar.gz: aad435a97e100195806af926765c90f6c57d1232c51afe771d2b89f00ffa2ea2
3
+ metadata.gz: 1b7eba40745af114ff57ca358b2e03c67a7adb9717ad7c7b81ddd7444503fbb1
4
+ data.tar.gz: 3ae6e7c7b038a2caeaf728ff7ce98c49266160d181f478c8259b98d7d8157ba5
5
5
  SHA512:
6
- metadata.gz: fe9d202a5b338e34141df0240867a68b4f794067dc506d173264a66decd973029dc04f64da9f0a1f6192da8cab652e8c71e8c63a1432c4110259d971d64d3697
7
- data.tar.gz: 6f90979086a3be82858d38b62bc23a84ab8a05266ca048541d59c87d5f43757219e88c8a2d7c158762e19533e17720f2d5b583d24d8614a6bbef9196374b6004
6
+ metadata.gz: 6674d35ca4d0ec02090ef1521f9f8bb2a4025a050ed312e9c20d50033c8a3472a6a31bdb73dfe6fa9ec99612518c5f5edd72d3ea9d8ead7885c39ed106ac6056
7
+ data.tar.gz: e200b7d2a4203cf803a65bcd6eef82a990fc8c1fa2e2846148673109731353898dacd7b388fe23c9c5b8e5eeae2b56768912408f6f916805111d322c0a83d5ac
data/ext/Cargo.toml CHANGED
@@ -3,17 +3,18 @@ resolver = "2"
3
3
  members = ["gqliterb", "gqlitedb", "graphcore"]
4
4
 
5
5
  [workspace.package]
6
- version = "0.6.0"
6
+ version = "0.7.0"
7
7
  edition = "2021"
8
8
  license = "MIT"
9
9
  homepage = "https://gqlite.org"
10
10
  repository = "https://gitlab.com/gqlite/gqlite"
11
11
 
12
12
  [workspace.dependencies]
13
- graphcore = { version = "0.2.0", path = "graphcore" }
14
- gqlitedb = { version = "0.6.0", path = "gqlitedb" }
13
+ graphcore = { version = "0.7.0", path = "graphcore" }
14
+ gqlitedb = { version = "0.7.0", path = "gqlitedb" }
15
15
 
16
16
  askama = { version = "0.14" }
17
+ ccutils = { version = "0.4" }
17
18
  itertools = "0.14"
18
19
  serde = "1"
19
20
  thiserror = "2"
@@ -16,12 +16,13 @@ default = ["redb", "capi", "sqlite"]
16
16
  _backtrace = []
17
17
  capi = []
18
18
  redb = ["dep:redb", "dep:redb2"]
19
- _pgql = ["dep:pgrx"]
19
+ _pgrx = ["dep:pgrx"]
20
20
  _pg13 = ["pgrx/pg13"]
21
21
  _pg14 = ["pgrx/pg14"]
22
22
  _pg15 = ["pgrx/pg15"]
23
23
  _pg16 = ["pgrx/pg16"]
24
24
  _pg17 = ["pgrx/pg17"]
25
+ postgres = ["dep:postgres"]
25
26
  sqlite = ["dep:rusqlite", "dep:askama"]
26
27
  _value_private = []
27
28
  bundled = ["rusqlite/bundled"]
@@ -30,12 +31,13 @@ bundled = ["rusqlite/bundled"]
30
31
  graphcore = { workspace = true }
31
32
 
32
33
  askama = { workspace = true, optional = true }
33
- ccutils = { version = "0.4", features = ["alias", "pool", "sync"] }
34
+ ccutils = { workspace = true, features = ["alias", "pool", "sync"] }
34
35
  ciborium = "0.2"
35
36
  itertools = { workspace = true }
36
- pgrx = { version = "0.16", optional = true }
37
37
  pest = "2"
38
38
  pest_derive = "2"
39
+ pgrx = { version = "0.16", optional = true }
40
+ postgres = { version = "0.19", optional = true }
39
41
  rand = "0.9"
40
42
  redb = { version = "3", optional = true }
41
43
  redb2 = { version = "2", optional = true, package = "redb" }
@@ -49,9 +51,10 @@ thiserror = { workspace = true }
49
51
  uuid = { workspace = true }
50
52
 
51
53
  [dev-dependencies]
52
- ccutils = { version = "0.4", features = ["alias", "temporary"] }
54
+ ccutils = { workspace = true, features = ["alias", "temporary"] }
53
55
  divan = "0.1"
54
56
  iai-callgrind = { version = "0.16" }
57
+ pgtemp = "0.6"
55
58
  regex = "1"
56
59
 
57
60
  # web:
@@ -46,6 +46,7 @@ where
46
46
  | Value::Edge(..)
47
47
  | Value::Array(..)
48
48
  | Value::String(..)
49
+ | Value::TimeStamp(..)
49
50
  | Value::Map(..)
50
51
  | Value::Path(..) => Err(RunTimeError::InvalidBinaryOperands)?,
51
52
  Value::Null =>
@@ -21,6 +21,7 @@ pub(crate) enum ExpressionType
21
21
  Float,
22
22
  Path,
23
23
  String,
24
+ TimeStamp,
24
25
  Variant,
25
26
  }
26
27
 
@@ -419,6 +420,7 @@ impl<'b> Analyser<'b>
419
420
  value::Value::Map(_) => ExpressionType::Map,
420
421
  value::Value::Path(_) => ExpressionType::Path,
421
422
  value::Value::String(_) => ExpressionType::String,
423
+ value::Value::TimeStamp(_) => ExpressionType::TimeStamp,
422
424
  },
423
425
  true,
424
426
  false,
@@ -19,6 +19,14 @@ macro_rules! compile_binary_op {
19
19
  };
20
20
  }
21
21
 
22
+ struct CompiledReturnWith
23
+ {
24
+ variables: Vec<(String, RWExpression)>,
25
+ filter: Instructions,
26
+ modifiers: Modifiers,
27
+ variables_sizes: VariablesSizes,
28
+ }
29
+
22
30
  struct Compiler
23
31
  {
24
32
  function_manager: functions::Manager,
@@ -701,19 +709,13 @@ impl Compiler
701
709
  })
702
710
  }
703
711
 
704
- #[allow(clippy::type_complexity)]
705
712
  fn compile_return_with(
706
713
  &mut self,
707
714
  all: bool,
708
715
  expressions: &[ast::NamedExpression],
709
716
  where_expression: &Option<ast::Expression>,
710
717
  modifiers: &ast::Modifiers,
711
- ) -> Result<(
712
- Vec<(String, RWExpression)>,
713
- Instructions,
714
- Modifiers,
715
- VariablesSizes,
716
- )>
718
+ ) -> Result<CompiledReturnWith>
717
719
  {
718
720
  let mut variables = Vec::<(ast::VariableIdentifier, RWExpression)>::new();
719
721
  let mut filter = Default::default();
@@ -775,7 +777,7 @@ impl Compiler
775
777
  }
776
778
 
777
779
  let modifiers = self.compile_modifiers(modifiers)?;
778
- let variables_size = self.variables_size();
780
+ let variables_sizes = self.variables_size();
779
781
  self
780
782
  .variables_manager
781
783
  .keep_variables(variables.iter().map(|(n, _)| n))?;
@@ -784,7 +786,12 @@ impl Compiler
784
786
  .into_iter()
785
787
  .map(|(var_id, e)| (var_id.take_name(), e))
786
788
  .collect();
787
- Ok((variables, filter, modifiers, variables_size))
789
+ Ok(CompiledReturnWith {
790
+ variables,
791
+ filter,
792
+ modifiers,
793
+ variables_sizes,
794
+ })
788
795
  }
789
796
 
790
797
  fn compile_match_patterns(
@@ -935,6 +942,7 @@ pub(crate) fn compile(
935
942
  {
936
943
  ast::Statement::CreateGraph(create_graph) => Ok(Block::CreateGraph {
937
944
  name: create_graph.name.to_owned(),
945
+ if_not_exists: create_graph.if_not_exists,
938
946
  }),
939
947
  ast::Statement::DropGraph(drop_graph) => Ok(Block::DropGraph {
940
948
  name: drop_graph.name.to_owned(),
@@ -951,17 +959,17 @@ pub(crate) fn compile(
951
959
  ),
952
960
  ast::Statement::Return(return_statement) =>
953
961
  {
954
- let (variables, filter, modifiers, variables_size) = compiler.compile_return_with(
962
+ let compiled_return_with = compiler.compile_return_with(
955
963
  return_statement.all,
956
964
  &return_statement.expressions,
957
965
  &return_statement.where_expression,
958
966
  &return_statement.modifiers,
959
967
  )?;
960
968
  Ok(Block::Return {
961
- variables,
962
- filter,
963
- modifiers,
964
- variables_size,
969
+ variables: compiled_return_with.variables,
970
+ filter: compiled_return_with.filter,
971
+ modifiers: compiled_return_with.modifiers,
972
+ variables_sizes: compiled_return_with.variables_sizes,
965
973
  })
966
974
  }
967
975
  ast::Statement::Call(call) =>
@@ -978,17 +986,21 @@ pub(crate) fn compile(
978
986
  }
979
987
  ast::Statement::With(with) =>
980
988
  {
981
- let (variables, filter, modifiers, variables_size) = compiler.compile_return_with(
989
+ let compiled_return_with = compiler.compile_return_with(
982
990
  with.all,
983
991
  &with.expressions,
984
992
  &with.where_expression,
985
993
  &with.modifiers,
986
994
  )?;
987
995
  Ok(Block::With {
988
- variables: variables.into_iter().map(|(_, v)| v).collect(),
989
- filter,
990
- modifiers,
991
- variables_size,
996
+ variables: compiled_return_with
997
+ .variables
998
+ .into_iter()
999
+ .map(|(_, v)| v)
1000
+ .collect(),
1001
+ filter: compiled_return_with.filter,
1002
+ modifiers: compiled_return_with.modifiers,
1003
+ variables_sizes: compiled_return_with.variables_sizes,
992
1004
  })
993
1005
  }
994
1006
  ast::Statement::Unwind(unwind) =>
@@ -8,12 +8,15 @@ pub enum Backend
8
8
  {
9
9
  /// Select the first available backend.
10
10
  Automatic,
11
- /// SQLite backend.
12
- #[cfg(feature = "sqlite")]
13
- SQLite,
11
+ /// Postgres backend.
12
+ #[cfg(feature = "postgres")]
13
+ Postgres,
14
14
  /// Redb backend.
15
15
  #[cfg(feature = "redb")]
16
16
  Redb,
17
+ /// SQLite backend.
18
+ #[cfg(feature = "sqlite")]
19
+ SQLite,
17
20
  }
18
21
 
19
22
  /// Builder with high-level API for creating connection.
@@ -52,16 +55,21 @@ impl ConnectionBuilder
52
55
  {
53
56
  self.map.insert(key, "automatic".into());
54
57
  }
55
- #[cfg(feature = "sqlite")]
56
- Backend::SQLite =>
58
+ #[cfg(feature = "postgres")]
59
+ Backend::Postgres =>
57
60
  {
58
- self.map.insert(key, "sqlite".into());
61
+ self.map.insert(key, "postgres".into());
59
62
  }
60
63
  #[cfg(feature = "redb")]
61
64
  Backend::Redb =>
62
65
  {
63
66
  self.map.insert(key, "redb".into());
64
67
  }
68
+ #[cfg(feature = "sqlite")]
69
+ Backend::SQLite =>
70
+ {
71
+ self.map.insert(key, "sqlite".into());
72
+ }
65
73
  }
66
74
  self
67
75
  }
@@ -204,7 +212,7 @@ impl Connection
204
212
 
205
213
  sq_r.map_err(|rb_e| {
206
214
  StoreError::OpeningError {
207
- errors: error::vec_to_error(&vec![sq_e, rb_e]),
215
+ errors: error::vec_to_error(&[sq_e, rb_e]),
208
216
  }
209
217
  .into()
210
218
  })
@@ -251,6 +259,33 @@ impl Connection
251
259
  .boxed(),
252
260
  })
253
261
  }
262
+ #[cfg(feature = "postgres")]
263
+ "postgres" =>
264
+ {
265
+ let mut config = postgres::Config::new();
266
+ if let Some(host) = options.get("host")
267
+ {
268
+ let host: &String = host.try_into_ref()?;
269
+ config.host(host);
270
+ }
271
+ if let Some(user) = options.get("user")
272
+ {
273
+ let user: &String = user.try_into_ref()?;
274
+ config.user(user);
275
+ }
276
+ if let Some(password) = options.get("password")
277
+ {
278
+ let password: &String = password.try_into_ref()?;
279
+ config.password(password);
280
+ }
281
+ let store = store::postgres::Store::connect(config)?;
282
+ Ok(Connection {
283
+ connection: ConnectionImpl {
284
+ store,
285
+ function_manager: functions::Manager::new(),
286
+ },
287
+ })
288
+ }
254
289
  _ => Err(StoreError::UnknownBackend { backend }.into()),
255
290
  }
256
291
  }
@@ -313,6 +313,8 @@ pub enum InternalError
313
313
  Poison(String),
314
314
  #[error("IOError: {0}.")]
315
315
  IOError(#[from] std::io::Error),
316
+ #[error("Utf8Error: {0}.")]
317
+ Utf8Error(#[from] std::str::Utf8Error),
316
318
  }
317
319
 
318
320
  /// Error in the store backend.
@@ -541,6 +543,7 @@ error_as_internal! {ciborium::ser::Error<std::io::Error>}
541
543
  error_as_internal! {ciborium::de::Error<std::io::Error>}
542
544
  error_as_internal! {serde_json::Error}
543
545
  error_as_internal! {std::num::ParseFloatError}
546
+ error_as_internal! {std::str::Utf8Error}
544
547
 
545
548
  #[cfg(feature = "redb")]
546
549
  mod _trait_impl_redb
@@ -74,7 +74,7 @@ impl super::FunctionTrait for Size
74
74
  {
75
75
  let container = arguments
76
76
  .first()
77
- .ok_or_else(|| RunTimeError::InvalidNumberOfArguments {
77
+ .ok_or(RunTimeError::InvalidNumberOfArguments {
78
78
  function_name: "size",
79
79
  got: arguments.len(),
80
80
  expected: 1,
@@ -10,7 +10,7 @@ impl super::FunctionTrait for Length
10
10
  {
11
11
  let container = arguments
12
12
  .first()
13
- .ok_or_else(|| RunTimeError::InvalidNumberOfArguments {
13
+ .ok_or(RunTimeError::InvalidNumberOfArguments {
14
14
  function_name: "length",
15
15
  got: arguments.len(),
16
16
  expected: 1,
@@ -32,6 +32,29 @@ impl super::FunctionTrait for Coalesce
32
32
 
33
33
  super::declare_function!(coalesce, Coalesce, custom_trait);
34
34
 
35
+ #[derive(Debug, Default)]
36
+ pub(super) struct Id {}
37
+
38
+ impl Id
39
+ {
40
+ fn call_impl(value: &value::Value) -> FResult<graph::Key>
41
+ {
42
+ match value
43
+ {
44
+ value::Value::Node(n) => Ok(n.key()),
45
+ value::Value::Edge(e) => Ok(e.key()),
46
+ _ => Err(RunTimeError::InvalidArgument {
47
+ function_name: "id",
48
+ index: 0,
49
+ expected_type: "node or edge",
50
+ value: format!("{:?}", value),
51
+ }),
52
+ }
53
+ }
54
+ }
55
+
56
+ super::declare_function!(id, Id, call_impl(crate::value::Value) -> graph::Key);
57
+
35
58
  #[derive(Debug, Default)]
36
59
  pub(super) struct ToInteger {}
37
60
 
@@ -59,6 +59,13 @@ impl FunctionTypeTrait for f64
59
59
  }
60
60
  }
61
61
 
62
+ impl FunctionTypeTrait for graph::Key
63
+ {
64
+ fn result_type() -> ExpressionType
65
+ {
66
+ ExpressionType::Key
67
+ }
68
+ }
62
69
  impl<T> FunctionTypeTrait for Vec<T>
63
70
  {
64
71
  fn result_type() -> ExpressionType
@@ -146,6 +153,7 @@ impl Manager
146
153
  path::Nodes::create(),
147
154
  path::Edges::create(),
148
155
  scalar::Coalesce::create(),
156
+ scalar::Id::create(),
149
157
  scalar::Properties::create(),
150
158
  scalar::ToInteger::create(),
151
159
  string::ToString::create(),
@@ -950,7 +950,6 @@ fn compute_return_with_table(
950
950
  for row in input_table.into_row_iter()
951
951
  {
952
952
  // a) compute non-aggregated columns
953
- let mut row = row.extended(variables_sizes.total_size())?;
954
953
  let out_row = variables
955
954
  .iter()
956
955
  .map(|rw_expr| {
@@ -959,9 +958,7 @@ fn compute_return_with_table(
959
958
  assert_eq!(rw_expr.aggregations.len(), 0);
960
959
  let mut stack = Stack::default();
961
960
  eval_instructions(&mut stack, &row, &rw_expr.instructions, parameters)?;
962
- let value: value::Value = stack.try_pop_into()?;
963
- row.set(rw_expr.col_id, value.to_owned())?;
964
- Ok(value)
961
+ stack.try_pop_into()
965
962
  }
966
963
  else
967
964
  {
@@ -1039,12 +1036,12 @@ fn compute_return_with_table(
1039
1036
  output_table = input_table
1040
1037
  .into_row_iter()
1041
1038
  .map(|row| {
1042
- let mut out_row = row.extended(variables_sizes.total_size())?;
1039
+ let mut out_row = row.clone().extended(variables_sizes.total_size())?;
1043
1040
  for rw_expr in variables.iter()
1044
1041
  {
1045
1042
  assert_eq!(rw_expr.aggregations.len(), 0);
1046
1043
  let mut stack = Stack::default();
1047
- eval_instructions(&mut stack, &out_row, &rw_expr.instructions, parameters)?;
1044
+ eval_instructions(&mut stack, &row, &rw_expr.instructions, parameters)?;
1048
1045
  let value: value::Value = stack.try_pop_into()?;
1049
1046
  out_row.set(rw_expr.col_id, value.to_owned())?;
1050
1047
  }
@@ -1215,10 +1212,13 @@ pub(crate) fn eval_program<TStore: store::Store>(
1215
1212
  }
1216
1213
  match block
1217
1214
  {
1218
- instructions::Block::CreateGraph { name } =>
1215
+ instructions::Block::CreateGraph {
1216
+ name,
1217
+ if_not_exists,
1218
+ } =>
1219
1219
  {
1220
1220
  store
1221
- .create_graph(&mut tx, name, false)
1221
+ .create_graph(&mut tx, name, *if_not_exists)
1222
1222
  .map_err(|e|
1223
1223
  error::map_error!(e, Error::StoreError(StoreError::DuplicatedGraph { graph_name }) => RunTimeError::DuplicatedGraph {
1224
1224
  graph_name: graph_name.clone(),
@@ -1429,7 +1429,7 @@ pub(crate) fn eval_program<TStore: store::Store>(
1429
1429
  variables,
1430
1430
  filter,
1431
1431
  modifiers,
1432
- variables_size,
1432
+ variables_sizes: variables_size,
1433
1433
  } =>
1434
1434
  {
1435
1435
  let (names, variables): (Vec<_>, Vec<_>) = variables.iter().map(|(s, e)| (s, e)).unzip();
@@ -1454,7 +1454,7 @@ pub(crate) fn eval_program<TStore: store::Store>(
1454
1454
  variables,
1455
1455
  filter,
1456
1456
  modifiers,
1457
- variables_size,
1457
+ variables_sizes: variables_size,
1458
1458
  } =>
1459
1459
  {
1460
1460
  input_table = compute_return_with_table(
@@ -207,7 +207,7 @@ pub(crate) enum Block
207
207
  {
208
208
  CreateGraph
209
209
  {
210
- name: String
210
+ name: String, if_not_exists: bool
211
211
  },
212
212
  DropGraph
213
213
  {
@@ -234,7 +234,7 @@ pub(crate) enum Block
234
234
  variables: Vec<(String, RWExpression)>,
235
235
  filter: Instructions,
236
236
  modifiers: Modifiers,
237
- variables_size: VariablesSizes,
237
+ variables_sizes: VariablesSizes,
238
238
  },
239
239
  Call
240
240
  {
@@ -247,7 +247,7 @@ pub(crate) enum Block
247
247
  variables: Vec<RWExpression>,
248
248
  filter: Instructions,
249
249
  modifiers: Modifiers,
250
- variables_size: VariablesSizes,
250
+ variables_sizes: VariablesSizes,
251
251
  },
252
252
  Unwind
253
253
  {
@@ -7,9 +7,7 @@
7
7
  //! for an example of use.
8
8
 
9
9
  #![warn(missing_docs)]
10
- #![deny(warnings)]
11
10
  #![allow(clippy::result_large_err)]
12
- #![allow(clippy::unnecessary_lazy_evaluations)]
13
11
 
14
12
  mod aggregators;
15
13
  #[cfg(feature = "capi")]
@@ -37,7 +35,7 @@ pub use {
37
35
  error::{CompileTimeError, Error, RunTimeError, StoreError},
38
36
  graph::{labels, Edge, Node, Path},
39
37
  query_result::QueryResult,
40
- value::{array, value_map, Value, ValueMap, ValueTryIntoRef},
38
+ value::{array, value_map, TimeStamp, Value, ValueMap, ValueTryIntoRef},
41
39
  };
42
40
 
43
41
  pub use graphcore::{table, Table};
@@ -129,6 +129,7 @@ pub(crate) type Queries = Vec<Statements>;
129
129
  pub(crate) struct CreateGraph
130
130
  {
131
131
  pub(crate) name: String,
132
+ pub(crate) if_not_exists: bool,
132
133
  }
133
134
 
134
135
  #[derive(Debug)]
@@ -63,7 +63,8 @@ where_modifier = { "WHERE" ~ expression }
63
63
 
64
64
  // Statements
65
65
  statement = {
66
- create_graph_statement
66
+ create_graph_if_not_exists_statement
67
+ | create_graph_statement
67
68
  | drop_graph_if_exists_statement
68
69
  | drop_graph_statement
69
70
  | use_graph_statement
@@ -83,6 +84,7 @@ statement = {
83
84
  star = { "*" }
84
85
 
85
86
  create_graph_statement = { "CREATE" ~ "GRAPH" ~ ident }
87
+ create_graph_if_not_exists_statement = { "CREATE" ~ "GRAPH" ~ "IF" ~ "NOT" ~ "EXISTS" ~ ident }
86
88
  drop_graph_statement = { "DROP" ~ "GRAPH" ~ ident }
87
89
  drop_graph_if_exists_statement = { "DROP" ~ "GRAPH" ~ "IF" ~ "EXISTS" ~ ident }
88
90
  use_graph_statement = { "USE" ~ ident }
@@ -909,7 +909,15 @@ impl AstBuilder
909
909
  {
910
910
  Rule::create_graph_statement => Ok(ast::Statement::CreateGraph(ast::CreateGraph {
911
911
  name: self.build_ident(&mut pair.into_inner())?,
912
+ if_not_exists: false,
912
913
  })),
914
+ Rule::create_graph_if_not_exists_statement =>
915
+ {
916
+ Ok(ast::Statement::CreateGraph(ast::CreateGraph {
917
+ name: self.build_ident(&mut pair.into_inner())?,
918
+ if_not_exists: true,
919
+ }))
920
+ }
913
921
  Rule::drop_graph_statement => Ok(ast::Statement::DropGraph(ast::DropGraph {
914
922
  name: self.build_ident(&mut pair.into_inner())?,
915
923
  if_exists: false,
@@ -7,3 +7,6 @@ pub(crate) use crate::{
7
7
  };
8
8
 
9
9
  pub(crate) use error::export::Error as ErrorType;
10
+
11
+ #[cfg(any(feature = "sqlite", feature = "postgres", feature = "_pgrx"))]
12
+ pub(crate) use store::sqlbase::{self, Row as _, SqlMetaDataStore as _, SqlStore as _};
@@ -1,5 +1,7 @@
1
1
  use crate::{graph, Result};
2
2
 
3
+ impl SqlParams for &[&(dyn tokio_postgres::types::ToSql + Sync)] {}
4
+
3
5
  // _____ _ _
4
6
  // |_ _| __ __ _ _ __ ___ __ _ ___| |_(_) ___ _ __
5
7
  // | || '__/ _` | '_ \/ __|/ _` |/ __| __| |/ _ \| '_ \
File without changes