bentopy 0.2.0a10__cp313-cp313-manylinux_2_34_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. bentopy-0.2.0a10.data/scripts/bentopy-init +0 -0
  2. bentopy-0.2.0a10.data/scripts/bentopy-pack +0 -0
  3. bentopy-0.2.0a10.data/scripts/bentopy-render +0 -0
  4. bentopy-0.2.0a10.data/scripts/bentopy-solvate +0 -0
  5. bentopy-0.2.0a10.dist-info/METADATA +358 -0
  6. bentopy-0.2.0a10.dist-info/RECORD +58 -0
  7. bentopy-0.2.0a10.dist-info/WHEEL +5 -0
  8. bentopy-0.2.0a10.dist-info/entry_points.txt +4 -0
  9. bentopy-0.2.0a10.dist-info/licenses/LICENSE.txt +13 -0
  10. bentopy-0.2.0a10.dist-info/top_level.txt +8 -0
  11. check/check.py +128 -0
  12. core/config/bent/lexer.rs +338 -0
  13. core/config/bent/parser.rs +1180 -0
  14. core/config/bent/writer.rs +205 -0
  15. core/config/bent.rs +149 -0
  16. core/config/compartment_combinations.rs +300 -0
  17. core/config/legacy.rs +768 -0
  18. core/config.rs +362 -0
  19. core/mod.rs +4 -0
  20. core/placement.rs +100 -0
  21. core/utilities.rs +1 -0
  22. core/version.rs +32 -0
  23. init/example.bent +74 -0
  24. init/main.rs +235 -0
  25. mask/config.py +153 -0
  26. mask/mask.py +308 -0
  27. mask/utilities.py +38 -0
  28. merge/merge.py +175 -0
  29. pack/args.rs +77 -0
  30. pack/main.rs +121 -0
  31. pack/mask.rs +940 -0
  32. pack/session.rs +176 -0
  33. pack/state/combinations.rs +31 -0
  34. pack/state/compartment.rs +44 -0
  35. pack/state/mask.rs +196 -0
  36. pack/state/pack.rs +187 -0
  37. pack/state/segment.rs +72 -0
  38. pack/state/space.rs +98 -0
  39. pack/state.rs +440 -0
  40. pack/structure.rs +185 -0
  41. pack/voxelize.rs +85 -0
  42. render/args.rs +109 -0
  43. render/limits.rs +73 -0
  44. render/main.rs +12 -0
  45. render/render.rs +393 -0
  46. render/structure.rs +264 -0
  47. solvate/args.rs +324 -0
  48. solvate/convert.rs +25 -0
  49. solvate/cookies.rs +185 -0
  50. solvate/main.rs +177 -0
  51. solvate/placement.rs +380 -0
  52. solvate/solvate.rs +244 -0
  53. solvate/structure.rs +160 -0
  54. solvate/substitute.rs +113 -0
  55. solvate/water/martini.rs +409 -0
  56. solvate/water/models.rs +150 -0
  57. solvate/water/tip3p.rs +658 -0
  58. solvate/water.rs +115 -0
@@ -0,0 +1,205 @@
1
+ use std::io::{Result, Write};
2
+ use std::path::Path;
3
+
4
+ use crate::core::config::{
5
+ Compartment, Config, Constraint, General, Mask, Point, RearrangeMethod, Rule, Segment, Shape,
6
+ Space,
7
+ };
8
+
9
+ pub fn write<W: Write>(config: &Config, w: &mut W) -> Result<()> {
10
+ let Config {
11
+ general,
12
+ space,
13
+ includes,
14
+ constraints,
15
+ compartments,
16
+ segments,
17
+ } = config;
18
+
19
+ // Write a comment header about this file.
20
+ writeln!(w, "# Bentopy {}", crate::core::version::VERSION)?;
21
+ writeln!(w)?;
22
+
23
+ {
24
+ // General section.
25
+ let General {
26
+ title,
27
+ seed,
28
+ bead_radius,
29
+ max_tries_mult,
30
+ max_tries_rot_div,
31
+ rearrange_method,
32
+ } = general;
33
+
34
+ section(w, "general")?;
35
+ if let Some(val) = title {
36
+ field(w, "title", format_string(val))?
37
+ }
38
+ if let Some(val) = seed {
39
+ field(w, "seed", val)?
40
+ }
41
+ if let Some(val) = bead_radius {
42
+ // FIXME: This is a bit cursed, I guess. It's just to make sure we don't print ugly
43
+ // insignificant digits.
44
+ field(w, "bead-radius", *val as f32)?
45
+ }
46
+ if let Some(val) = max_tries_mult {
47
+ field(w, "max-tries-mult", val)?
48
+ }
49
+ if let Some(val) = max_tries_rot_div {
50
+ field(w, "max-tries-rot-div", val)?
51
+ }
52
+ if let Some(val) = rearrange_method {
53
+ field(w, "rearrange", rm(val))?
54
+ }
55
+
56
+ writeln!(w)?; // Spacing newline.
57
+ }
58
+
59
+ {
60
+ // Space section.
61
+ let Space {
62
+ dimensions,
63
+ resolution,
64
+ periodic,
65
+ } = space;
66
+
67
+ section(w, "space")?;
68
+ if let Some(val) = dimensions {
69
+ field(w, "dimensions", point(val))?
70
+ }
71
+ if let Some(val) = resolution {
72
+ field(w, "resolution", val)?
73
+ }
74
+ if let Some(val) = periodic {
75
+ field(w, "periodic", val)?
76
+ }
77
+
78
+ writeln!(w)?; // Spacing newline.
79
+ }
80
+
81
+ if !includes.is_empty() {
82
+ // Includes section.
83
+ section(w, "includes")?;
84
+ for include in includes {
85
+ include_entry(w, include)?;
86
+ }
87
+ writeln!(w)?; // Spacing newline.
88
+ }
89
+
90
+ // Compartments section.
91
+ section(w, "compartments")?;
92
+ for compartment in compartments {
93
+ compartment_entry(w, compartment)?;
94
+ }
95
+ writeln!(w)?; // Spacing newline.
96
+
97
+ if !constraints.is_empty() {
98
+ // Constraints section.
99
+ section(w, "constraints")?;
100
+ for constraint in constraints {
101
+ constraint_entry(w, constraint)?;
102
+ }
103
+ writeln!(w)?; // Spacing newline.
104
+ }
105
+
106
+ // Segments section.
107
+ section(w, "segments")?;
108
+ for segment in segments {
109
+ segment_entry(w, segment)?;
110
+ }
111
+
112
+ Ok(())
113
+ }
114
+
115
+ fn section<W: Write>(w: &mut W, header: &'static str) -> Result<()> {
116
+ writeln!(w, "[ {header} ]")
117
+ }
118
+
119
+ fn field<W: Write, D: std::fmt::Display>(w: &mut W, name: &'static str, value: D) -> Result<()> {
120
+ writeln!(w, "{name}\t{value}")
121
+ }
122
+
123
+ // TODO: This could be done with a std::fmt::Display impl on RearrangeMethod.
124
+ fn rm(val: &RearrangeMethod) -> &'static str {
125
+ match val {
126
+ RearrangeMethod::Moment => "moment",
127
+ RearrangeMethod::Volume => "volume",
128
+ RearrangeMethod::BoundingSphere => "bounding-sphere",
129
+ RearrangeMethod::None => "none",
130
+ }
131
+ }
132
+
133
+ fn point(point: &Point) -> String {
134
+ let [x, y, z] = point;
135
+ format!("{x}, {y}, {z}")
136
+ }
137
+
138
+ fn format_string(s: &str) -> String {
139
+ format!(r#""{s}""#)
140
+ }
141
+
142
+ fn format_path(path: &Path) -> String {
143
+ // TODO: This really makes me think we should mave it to Vec<String>.
144
+ format_string(path.to_str().expect("TODO"))
145
+ }
146
+
147
+ fn list<T: std::fmt::Display>(items: &[T]) -> String {
148
+ items
149
+ .iter()
150
+ .map(ToString::to_string)
151
+ .collect::<Vec<_>>()
152
+ .join(", ")
153
+ }
154
+
155
+ fn include_entry<W: Write>(w: &mut W, include: &Path) -> Result<()> {
156
+ writeln!(w, "{}", format_path(include))
157
+ }
158
+
159
+ fn compartment_entry<W: Write>(w: &mut W, compartment: &Compartment) -> Result<()> {
160
+ let Compartment { id, mask } = compartment;
161
+ let mask = match mask {
162
+ Mask::All => "is all".to_string(),
163
+ Mask::Voxels(path) => format!("from {}", format_path(path)),
164
+ Mask::Shape(Shape::Sphere { center, radius }) => {
165
+ format!("as sphere at {center} with radius {radius}")
166
+ }
167
+ Mask::Shape(Shape::Cuboid { start, end }) => {
168
+ format!("as cuboid from {start} to {end}")
169
+ }
170
+ Mask::Limits(expr) => format!("where {expr}"),
171
+ Mask::Within { distance, id } => format!("within {distance} of {id}"),
172
+ Mask::Combination(expr) => format!("combines {expr}"),
173
+ };
174
+ writeln!(w, "{id} {mask}")
175
+ }
176
+
177
+ fn constraint_entry<W: Write>(w: &mut W, constraint: &Constraint) -> Result<()> {
178
+ let Constraint { id, rule } = constraint;
179
+ let rule = match rule {
180
+ Rule::RotationAxes(axes) => format!("rotates {}", list(&axes.list())),
181
+ };
182
+ writeln!(w, "{id} {rule}")
183
+ }
184
+
185
+ fn segment_entry<W: Write>(w: &mut W, segment: &Segment) -> Result<()> {
186
+ let Segment {
187
+ name,
188
+ tag,
189
+ quantity,
190
+ path,
191
+ compartment_ids,
192
+ rules,
193
+ } = segment;
194
+ let id = match tag {
195
+ Some(tag) => format!("{name}:{tag}"),
196
+ None => name.to_string(),
197
+ };
198
+ let path = format_path(path);
199
+ let compartment_ids = list(compartment_ids);
200
+ let rules = match rules[..] {
201
+ [] => Default::default(),
202
+ _ => format!(" satisfies {}", list(rules)),
203
+ };
204
+ writeln!(w, "{id} {quantity} from {path} in {compartment_ids}{rules}")
205
+ }
core/config/bent.rs ADDED
@@ -0,0 +1,149 @@
1
+ use chumsky::Parser;
2
+ use chumsky::error::Rich;
3
+ use chumsky::input::{BorrowInput, Input};
4
+ use chumsky::span::SimpleSpan;
5
+
6
+ use lexer::{Spanned, Token};
7
+
8
+ use crate::core::config::Config;
9
+
10
+ mod lexer;
11
+ mod parser;
12
+ mod writer;
13
+
14
+ pub use writer::write;
15
+
16
+ pub(crate) fn make_input<'src, 'tokens>(
17
+ eoi: SimpleSpan,
18
+ toks: &'tokens [Spanned<Token<'src>>],
19
+ ) -> impl BorrowInput<'tokens, Token = Token<'src>, Span = SimpleSpan> {
20
+ toks.map(eoi, |(t, s)| (t, s))
21
+ }
22
+
23
+ mod report {
24
+ use super::*;
25
+
26
+ use ariadne::{Color, Fmt, Label, Report, ReportKind, Source};
27
+
28
+ // Most of this is taken and modified from the mini_ml.rs example in the chumsky github page.
29
+ // https://github.com/zesterer/chumsky/blob/0.11/examples/mini_ml.rs
30
+
31
+ /// Report errors from a parsing step.
32
+ pub(crate) fn error(path: &str, src: &str, err: &Rich<impl std::fmt::Display>) -> String {
33
+ let msg = err.reason().to_string();
34
+ let label = (
35
+ err.found()
36
+ .map(|c| c.to_string())
37
+ .unwrap_or_else(|| "end of input".to_string()),
38
+ *err.span(),
39
+ );
40
+ let extra_labels = err
41
+ .contexts()
42
+ .map(|(l, s)| (format!("while parsing this {l}"), *s));
43
+
44
+ let cfg = ariadne::Config::new()
45
+ .with_index_type(ariadne::IndexType::Char)
46
+ .with_label_attach(ariadne::LabelAttach::Middle);
47
+ Report::build(ReportKind::Error, (path, label.1.into_range()))
48
+ .with_config(cfg)
49
+ .with_message(&msg)
50
+ .with_label(
51
+ Label::new((path, label.1.into_range()))
52
+ .with_message(label.0)
53
+ .with_color(Color::Red),
54
+ )
55
+ .with_labels(extra_labels.into_iter().map(|label| {
56
+ Label::new((path, label.1.into_range()))
57
+ .with_message(label.0)
58
+ .with_color(Color::Yellow)
59
+ }))
60
+ .finish()
61
+ .eprint((path, Source::from(src)))
62
+ .unwrap();
63
+ msg
64
+ }
65
+
66
+ /// Report the result of a parsing step by showing the spans of source code associated with
67
+ /// each item.
68
+ pub(crate) fn result<T: std::fmt::Display>(path: &str, src: &str, items: &[Spanned<T>]) {
69
+ let cfg = ariadne::Config::new()
70
+ .with_compact(true)
71
+ .with_index_type(ariadne::IndexType::Char)
72
+ .with_label_attach(ariadne::LabelAttach::Middle);
73
+ for (item, span) in items {
74
+ Report::build(
75
+ ReportKind::Custom("Result", Color::Green),
76
+ (path, span.into_range()),
77
+ )
78
+ .with_config(cfg)
79
+ .with_message(format!("found the following item: {item}"))
80
+ .with_label(
81
+ Label::new((path, span.into_range()))
82
+ .with_message(item.to_string())
83
+ .with_color(Color::Green),
84
+ )
85
+ .finish()
86
+ .eprint((path, Source::from(src)))
87
+ .unwrap();
88
+ }
89
+ }
90
+
91
+ /// Report placeholders in an input file.
92
+ pub(crate) fn placeholders<'s>(path: &str, src: &'s str, items: &[Spanned<Token<'s>>]) {
93
+ let cfg = ariadne::Config::new()
94
+ .with_compact(true)
95
+ .with_index_type(ariadne::IndexType::Char)
96
+ .with_label_attach(ariadne::LabelAttach::Middle);
97
+ for (item, name, span) in items.iter().filter_map(|(item, span)| match item {
98
+ Token::Placeholder(name) => Some((item, name, span)),
99
+ _ => None,
100
+ }) {
101
+ Report::build(
102
+ ReportKind::Custom("Placeholder", Color::Cyan),
103
+ (path, span.into_range()),
104
+ )
105
+ .with_config(cfg)
106
+ .with_message(format!(
107
+ "replace {} with a valid value",
108
+ format!("<{name}>").fg(Color::Cyan)
109
+ ))
110
+ .with_label(
111
+ Label::new((path, span.into_range()))
112
+ .with_message(item.to_string())
113
+ .with_color(Color::Cyan),
114
+ )
115
+ .finish()
116
+ .eprint((path, Source::from(src)))
117
+ .unwrap();
118
+ }
119
+ }
120
+ }
121
+
122
+ pub fn parse_config(path: &str, src: &str) -> anyhow::Result<Config> {
123
+ let tokens = lexer::lexer().parse(src).into_result().map_err(|errs| {
124
+ // Display a nice report.
125
+ let summary = report::error(path, src, &errs[0]);
126
+ // Communicate the error upstream.
127
+ anyhow::anyhow!("could not lex {path:?}: {summary}")
128
+ })?;
129
+
130
+ if std::env::var("BENTOPY_SHOW_TOKENS").is_ok_and(|v| v.parse::<bool>().unwrap_or_default()) {
131
+ eprintln!("Printing tokens.");
132
+ report::result(path, src, &tokens);
133
+ }
134
+
135
+ // Report any placeholders in the stream of tokens.
136
+ report::placeholders(path, src, &tokens);
137
+
138
+ let tokens = make_input((0..src.len()).into(), &tokens);
139
+ let config = parser::parser()
140
+ .parse(tokens)
141
+ .into_result()
142
+ .map_err(|errs| {
143
+ // Display a nice report.
144
+ let summary = report::error(path, src, &errs[0]);
145
+ // Communicate the error upstream.
146
+ anyhow::anyhow!("could not parse {path:?}: {summary}")
147
+ })?;
148
+ Ok(config)
149
+ }
@@ -0,0 +1,300 @@
1
+ use std::str::FromStr;
2
+
3
+ use super::CompartmentID as Id;
4
+
5
+ #[derive(Debug, Clone, PartialEq, Eq, Hash)]
6
+ enum Token {
7
+ Id(Id),
8
+ Not, // `!`
9
+ Union, // `union(`
10
+ Intersect, // `intersect(`
11
+ Close, // `)`
12
+ }
13
+
14
+ #[derive(Debug, Clone, PartialEq, Eq, Hash)]
15
+ pub enum Expression {
16
+ Id(Id),
17
+ Not(Box<Self>),
18
+ Union(Vec<Self>),
19
+ Intersect(Vec<Self>),
20
+ }
21
+
22
+ #[derive(Debug, Clone, PartialEq, Eq)]
23
+ pub enum ParseExpressionError {
24
+ UnexpectedEnd,
25
+ UnexpectedClose,
26
+ TrailingTokens,
27
+ }
28
+
29
+ impl std::error::Error for ParseExpressionError {}
30
+ impl std::fmt::Display for ParseExpressionError {
31
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
32
+ match self {
33
+ ParseExpressionError::UnexpectedEnd => {
34
+ "encountered the end of the tokens, but expected more"
35
+ }
36
+ ParseExpressionError::UnexpectedClose => "encounterd an unexpected closing parenthesis",
37
+ ParseExpressionError::TrailingTokens => {
38
+ "some tokens remained after parsing the root expression"
39
+ }
40
+ }
41
+ .fmt(f)
42
+ }
43
+ }
44
+
45
+ fn consume_whitespace(input: &str) -> &str {
46
+ input.trim_start()
47
+ }
48
+
49
+ /// Tokenize a single ID.
50
+ fn tokenize_id<'s>(mut input: &'s str, tokens: &mut Vec<Token>) -> &'s str {
51
+ // Either, the end of the token is the end of `input`, whitespace, or a closing parenthesis.
52
+ let end = input
53
+ .find(|ch: char| ch.is_whitespace() || ch == ')')
54
+ .unwrap_or(input.len());
55
+ let id;
56
+ (id, input) = input.split_at(end);
57
+ // Make sure we're not accidentally emitting an empty id. This can happen if we're at the end
58
+ // of the input string. That is usually wrong, but we'll catch that during the parse.
59
+ if !id.is_empty() {
60
+ tokens.push(Token::Id(id.to_string())); // Emit.
61
+ }
62
+ input
63
+ }
64
+
65
+ /// Tokenize the next operation, if there is one.
66
+ fn tokenize_operation<'s>(
67
+ mut input: &'s str,
68
+ tokens: &mut Vec<Token>,
69
+ ) -> Result<Option<&'s str>, ParseExpressionError> {
70
+ input = consume_whitespace(input);
71
+
72
+ if input.starts_with('!') {
73
+ tokens.push(Token::Not); // Emit.
74
+ input = &input[1..];
75
+
76
+ // We now expect another expression.
77
+ return Ok(Some(tokenize_expr(input, tokens)?));
78
+ }
79
+
80
+ if input.starts_with("union(") {
81
+ tokens.push(Token::Union); // Emit.
82
+ input = &input[6..];
83
+
84
+ // We now expect a list of operands.
85
+ return Ok(Some(tokenize_operands(input, tokens)?));
86
+ }
87
+
88
+ if input.starts_with("intersect(") {
89
+ tokens.push(Token::Intersect); // Emit.
90
+ input = &input[10..];
91
+
92
+ // We now expect a list of operands.
93
+ return Ok(Some(tokenize_operands(input, tokens)?));
94
+ }
95
+
96
+ // Syntax errors.
97
+ if input.starts_with(')') {
98
+ return Err(ParseExpressionError::UnexpectedClose);
99
+ }
100
+
101
+ Ok(None)
102
+ }
103
+
104
+ /// Tokenize the operands of an operation until a [`Token::Close`] is found the same tree level.
105
+ fn tokenize_operands<'s>(
106
+ mut input: &'s str,
107
+ tokens: &mut Vec<Token>,
108
+ ) -> Result<&'s str, ParseExpressionError> {
109
+ while !input.is_empty() {
110
+ // We now expect another expression.
111
+ input = tokenize_expr(input, tokens)?;
112
+
113
+ // If we have reached the end of this body, we are done.
114
+ input = consume_whitespace(input);
115
+ if input.starts_with(')') {
116
+ tokens.push(Token::Close); // Emit.
117
+ input = &input[1..];
118
+ break;
119
+ }
120
+ }
121
+
122
+ Ok(input)
123
+ }
124
+
125
+ /// Tokenize the next expression.
126
+ fn tokenize_expr<'s>(
127
+ mut input: &'s str,
128
+ tokens: &mut Vec<Token>,
129
+ ) -> Result<&'s str, ParseExpressionError> {
130
+ input = consume_whitespace(input);
131
+ match tokenize_operation(input, tokens)? {
132
+ Some(tail) => Ok(tail),
133
+ None => Ok(tokenize_id(input, tokens)),
134
+ }
135
+ }
136
+
137
+ /// Tokenize a compartment combinations definition input string.
138
+ fn tokenize(mut input: &str) -> Result<Vec<Token>, ParseExpressionError> {
139
+ let mut tokens = Vec::new();
140
+ while !input.is_empty() {
141
+ input = tokenize_expr(input, &mut tokens)?;
142
+ }
143
+ Ok(tokens)
144
+ }
145
+
146
+ /// Returns a list of operands until the [`Token::Close`] if this level in the tree is encountered.
147
+ fn parse_operands_until_close(
148
+ tokens: &mut &[Token],
149
+ ) -> Result<Vec<Expression>, ParseExpressionError> {
150
+ let mut operands = Vec::new();
151
+ while let Some(item) = parse_next_item(tokens)? {
152
+ operands.push(item);
153
+ }
154
+ Ok(operands)
155
+ }
156
+
157
+ /// Returns the next item or `None` when the [`Token::Close`] for this level in the three is hit.
158
+ fn parse_next_item(tokens: &mut &[Token]) -> Result<Option<Expression>, ParseExpressionError> {
159
+ let token;
160
+ (token, *tokens) = tokens
161
+ .split_first()
162
+ .ok_or(ParseExpressionError::UnexpectedEnd)?;
163
+ let expr = match token {
164
+ Token::Id(id) => Expression::Id(id.to_owned()),
165
+ Token::Not => Expression::Not(Box::new(
166
+ parse_next_item(tokens)?.ok_or(ParseExpressionError::UnexpectedClose)?,
167
+ )),
168
+ Token::Union => Expression::Union(parse_operands_until_close(tokens)?),
169
+ Token::Intersect => Expression::Intersect(parse_operands_until_close(tokens)?),
170
+ Token::Close => return Ok(None),
171
+ };
172
+
173
+ Ok(Some(expr))
174
+ }
175
+
176
+ fn parse(tokens: &mut &[Token]) -> Result<Expression, ParseExpressionError> {
177
+ parse_next_item(tokens)?.ok_or(ParseExpressionError::UnexpectedClose)
178
+ }
179
+
180
+ impl FromStr for Expression {
181
+ type Err = ParseExpressionError;
182
+
183
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
184
+ let tokens = tokenize(s)?;
185
+ let tokens = &mut tokens.as_slice();
186
+ let expr = parse(tokens)?;
187
+ if tokens.is_empty() {
188
+ // No trailing tokens, as desired.
189
+ Ok(expr)
190
+ } else {
191
+ Err(Self::Err::TrailingTokens)
192
+ }
193
+ }
194
+ }
195
+
196
+ #[cfg(test)]
197
+ mod tests {
198
+ use super::*;
199
+
200
+ #[test]
201
+ fn tokens() {
202
+ let expected = Ok(vec![
203
+ Token::Union,
204
+ Token::Id("first".to_string()),
205
+ Token::Intersect,
206
+ Token::Id("second".to_string()),
207
+ Token::Id("third".to_string()),
208
+ Token::Close,
209
+ Token::Not,
210
+ Token::Union,
211
+ Token::Id("fourth".to_string()),
212
+ Token::Id("fifth".to_string()),
213
+ Token::Close,
214
+ Token::Close,
215
+ ]);
216
+
217
+ let clean = "union(first intersect(second third) !union(fourth fifth))";
218
+ let extra_spaces =
219
+ " union( first intersect( second third) ! union( fourth fifth ) ) ";
220
+ let messy = "
221
+ union( first
222
+ intersect(
223
+
224
+ second third) !
225
+ union( fourth
226
+
227
+ fifth
228
+
229
+
230
+
231
+ )
232
+ )
233
+ ";
234
+
235
+ assert_eq!(tokenize(clean), expected);
236
+ assert_eq!(tokenize(extra_spaces), expected);
237
+ assert_eq!(tokenize(messy), expected);
238
+ }
239
+
240
+ #[test]
241
+ fn parsing() {
242
+ let expected = Expression::Union(vec![
243
+ Expression::Id("first".to_string()),
244
+ Expression::Intersect(vec![
245
+ Expression::Id("second".to_string()),
246
+ Expression::Id("third".to_string()),
247
+ ]),
248
+ Expression::Not(Box::new(Expression::Union(vec![
249
+ Expression::Id("fourth".to_string()),
250
+ Expression::Id("fifth".to_string()),
251
+ ]))),
252
+ ]);
253
+
254
+ let tokens = vec![
255
+ Token::Union,
256
+ Token::Id("first".to_string()),
257
+ Token::Intersect,
258
+ Token::Id("second".to_string()),
259
+ Token::Id("third".to_string()),
260
+ Token::Close,
261
+ Token::Not,
262
+ Token::Union,
263
+ Token::Id("fourth".to_string()),
264
+ Token::Id("fifth".to_string()),
265
+ Token::Close,
266
+ Token::Close,
267
+ ];
268
+
269
+ assert_eq!(parse(&mut tokens.as_slice()), Ok(expected));
270
+ }
271
+
272
+ #[test]
273
+ fn parse_from_string() {
274
+ let expected = Expression::Union(vec![
275
+ Expression::Id("first".to_string()),
276
+ Expression::Intersect(vec![
277
+ Expression::Id("second".to_string()),
278
+ Expression::Id("third".to_string()),
279
+ ]),
280
+ Expression::Not(Box::new(Expression::Union(vec![
281
+ Expression::Id("fourth".to_string()),
282
+ Expression::Id("fifth".to_string()),
283
+ ]))),
284
+ ]);
285
+
286
+ let s = "union(first intersect(second third) !union(fourth fifth))";
287
+
288
+ assert_eq!(Expression::from_str(s), Ok(expected));
289
+ }
290
+
291
+ #[test]
292
+ fn parse_not() {
293
+ let expected = Ok(Expression::Not(Box::new(Expression::Id("a".to_string()))));
294
+ let unexpected_close = Err(ParseExpressionError::UnexpectedClose);
295
+
296
+ assert_eq!(Expression::from_str("!a)"), unexpected_close);
297
+ assert_eq!(Expression::from_str("!a "), expected);
298
+ assert_eq!(Expression::from_str("!a"), expected);
299
+ }
300
+ }