omlish 0.0.0.dev104__py3-none-any.whl → 0.0.0.dev105__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
omlish/__about__.py CHANGED
@@ -1,5 +1,5 @@
1
- __version__ = '0.0.0.dev104'
2
- __revision__ = '1ffe2eb6d42217c0ee8b049c6c3b04fd970f9e66'
1
+ __version__ = '0.0.0.dev105'
2
+ __revision__ = '933f83fc2d561338d613829ed857297f339dcccd'
3
3
 
4
4
 
5
5
  #
omlish/fnpipes.py CHANGED
@@ -6,6 +6,9 @@ T = ta.TypeVar('T')
6
6
  U = ta.TypeVar('U')
7
7
 
8
8
 
9
+ ##
10
+
11
+
9
12
  class Fn(abc.ABC, ta.Generic[T]):
10
13
  @abc.abstractmethod
11
14
  def __call__(self, *args: ta.Any, **kwargs: ta.Any) -> T:
@@ -24,6 +27,9 @@ class Fn(abc.ABC, ta.Generic[T]):
24
27
  return self.apply(fn)
25
28
 
26
29
 
30
+ ##
31
+
32
+
27
33
  class Bind(Fn[T]):
28
34
  def __init__(self, fn: ta.Callable[..., T], *args: ta.Any, **kwargs: ta.Any) -> None:
29
35
  super().__init__()
@@ -54,6 +60,12 @@ class Bind(Fn[T]):
54
60
  return self._fn(*fa, **fkw)
55
61
 
56
62
 
63
+ bind = Bind
64
+
65
+
66
+ ##
67
+
68
+
57
69
  class Pipe(Fn[T]):
58
70
  def __init__(self, lfns: ta.Sequence[ta.Callable], rfn: ta.Callable[..., T]) -> None:
59
71
  super().__init__()
@@ -66,6 +78,14 @@ class Pipe(Fn[T]):
66
78
  return o
67
79
 
68
80
 
81
+ def pipe(*fns: ta.Callable) -> Pipe:
82
+ *lfns, rfn = fns
83
+ return Pipe(lfns, rfn)
84
+
85
+
86
+ ##
87
+
88
+
69
89
  class Apply(Fn[T]):
70
90
  def __init__(self, *fns: ta.Callable[[T], ta.Any]) -> None:
71
91
  super().__init__()
@@ -77,6 +97,4 @@ class Apply(Fn[T]):
77
97
  return o
78
98
 
79
99
 
80
- bind = Bind
81
- pipe = Pipe
82
100
  apply = Apply
@@ -3,6 +3,8 @@ from .consts import ( # noqa
3
3
  COMPACT_SEPARATORS,
4
4
  PRETTY_INDENT,
5
5
  PRETTY_KWARGS,
6
+ PRETTY_SEPARATORS,
7
+ Separators,
6
8
  )
7
9
 
8
10
  from .encoding import ( # noqa
@@ -5,7 +5,7 @@ TODO:
5
5
 
6
6
  ==
7
7
 
8
- jq Command options:
8
+ jq Command options)
9
9
  -n, --null-input use `null` as the single input value;
10
10
  -R, --raw-input read each line as string instead of JSON;
11
11
  -s, --slurp read all inputs into an array and use it as the single input value;
@@ -38,47 +38,36 @@ jq Command options:
38
38
  -- terminates argument processing;
39
39
  """
40
40
  import argparse
41
- import codecs
42
41
  import contextlib
42
+ import dataclasses as dc
43
43
  import io
44
- import json
45
44
  import os
46
45
  import subprocess
47
46
  import sys
48
47
  import typing as ta
49
48
 
50
49
  from .... import check
50
+ from .... import fnpipes as fp
51
51
  from .... import lang
52
- from .... import term
53
- from ....lite.io import DelimitingBuffer
54
- from ..render import JsonRenderer
55
- from ..stream.build import JsonObjectBuilder
56
- from ..stream.lex import JsonStreamLexer
57
- from ..stream.parse import JsonStreamParser
58
- from ..stream.render import StreamJsonRenderer
59
52
  from .formats import FORMATS_BY_NAME
53
+ from .formats import Format
60
54
  from .formats import Formats
55
+ from .parsing import DelimitingParser
56
+ from .parsing import EagerParser
57
+ from .parsing import StreamBuilder
58
+ from .parsing import StreamParser
59
+ from .processing import ProcessingOptions
60
+ from .processing import Processor
61
+ from .rendering import EagerRenderer
62
+ from .rendering import RenderingOptions
63
+ from .rendering import StreamRenderer
61
64
 
62
65
 
63
- if ta.TYPE_CHECKING:
64
- from ....specs import jmespath
65
- else:
66
- jmespath = lang.proxy_import('....specs.jmespath', __package__)
66
+ T = ta.TypeVar('T')
67
+ U = ta.TypeVar('U')
67
68
 
68
69
 
69
- ##
70
-
71
-
72
- def term_color(o: ta.Any, state: JsonRenderer.State) -> tuple[str, str]:
73
- if state is JsonRenderer.State.KEY:
74
- return term.SGR(term.SGRs.FG.BRIGHT_BLUE), term.SGR(term.SGRs.RESET)
75
- elif isinstance(o, str):
76
- return term.SGR(term.SGRs.FG.GREEN), term.SGR(term.SGRs.RESET)
77
- else:
78
- return '', ''
79
-
80
-
81
- def _main() -> None:
70
+ def _build_args_parser() -> argparse.ArgumentParser:
82
71
  parser = argparse.ArgumentParser()
83
72
 
84
73
  parser.add_argument('file', nargs='?')
@@ -93,17 +82,52 @@ def _main() -> None:
93
82
  parser.add_argument('-f', '--format')
94
83
 
95
84
  parser.add_argument('-x', '--jmespath-expr')
85
+ parser.add_argument('-F', '--flat', action='store_true')
96
86
 
97
87
  parser.add_argument('-z', '--compact', action='store_true')
98
88
  parser.add_argument('-p', '--pretty', action='store_true')
99
89
  parser.add_argument('-i', '--indent')
100
90
  parser.add_argument('-s', '--sort-keys', action='store_true')
101
-
91
+ parser.add_argument('-R', '--raw', action='store_true')
92
+ parser.add_argument('-U', '--unicode', action='store_true')
102
93
  parser.add_argument('-c', '--color', action='store_true')
103
94
 
104
95
  parser.add_argument('-L', '--less', action='store_true')
105
96
 
106
- args = parser.parse_args()
97
+ return parser
98
+
99
+
100
+ def _parse_args(args: ta.Any = None) -> ta.Any:
101
+ return _build_args_parser().parse_args(args)
102
+
103
+
104
+ @dc.dataclass(frozen=True, kw_only=True)
105
+ class RunConfiguration:
106
+ format: Format
107
+ processing: ProcessingOptions
108
+ rendering: RenderingOptions
109
+
110
+
111
+ def _process_args(args: ta.Any) -> RunConfiguration:
112
+ fmt_name = args.format
113
+ if fmt_name is None:
114
+ if args.file is not None:
115
+ ext = args.file.rpartition('.')[2]
116
+ if ext in FORMATS_BY_NAME:
117
+ fmt_name = ext
118
+ if fmt_name is None:
119
+ fmt_name = 'json'
120
+ format = FORMATS_BY_NAME[fmt_name] # noqa
121
+
122
+ if args.stream:
123
+ check.arg(format is Formats.JSON.value)
124
+
125
+ #
126
+
127
+ processing = ProcessingOptions(
128
+ jmespath_expr=args.jmespath_expr,
129
+ flat=args.flat,
130
+ )
107
131
 
108
132
  #
109
133
 
@@ -120,48 +144,30 @@ def _main() -> None:
120
144
  except ValueError:
121
145
  indent = args.indent
122
146
 
123
- kw: dict[str, ta.Any] = dict(
147
+ rendering = RenderingOptions(
124
148
  indent=indent,
125
149
  separators=separators,
126
150
  sort_keys=args.sort_keys,
151
+ raw=args.raw,
152
+ unicode=args.unicode,
153
+ color=args.color,
127
154
  )
128
155
 
129
- if args.jmespath_expr is not None:
130
- jp_expr = jmespath.compile(args.jmespath_expr)
131
- else:
132
- jp_expr = None
156
+ #
133
157
 
134
- def render_one(v: ta.Any) -> str:
135
- if jp_expr is not None:
136
- v = jp_expr.search(v)
158
+ return RunConfiguration(
159
+ format=format,
160
+ processing=processing,
161
+ rendering=rendering,
162
+ )
137
163
 
138
- if args.color:
139
- return JsonRenderer.render_str(
140
- v,
141
- **kw,
142
- style=term_color,
143
- )
144
164
 
145
- else:
146
- return json.dumps(
147
- v,
148
- **kw,
149
- )
165
+ def _main() -> None:
166
+ args = _parse_args()
150
167
 
151
168
  #
152
169
 
153
- fmt_name = args.format
154
- if fmt_name is None:
155
- if args.file is not None:
156
- ext = args.file.rpartition('.')[2]
157
- if ext in FORMATS_BY_NAME:
158
- fmt_name = ext
159
- if fmt_name is None:
160
- fmt_name = 'json'
161
- fmt = FORMATS_BY_NAME[fmt_name]
162
-
163
- if args.stream:
164
- check.arg(fmt is Formats.JSON.value)
170
+ cfg = _process_args(args)
165
171
 
166
172
  #
167
173
 
@@ -172,20 +178,31 @@ def _main() -> None:
172
178
  else:
173
179
  in_file = es.enter_context(open(args.file, 'rb'))
174
180
 
181
+ def yield_input() -> ta.Generator[bytes, None, None]:
182
+ fd = check.isinstance(in_file.fileno(), int)
183
+
184
+ while True:
185
+ buf = os.read(fd, args.read_buffer_size)
186
+
187
+ yield buf
188
+
189
+ if not buf:
190
+ break
191
+
175
192
  #
176
193
 
177
194
  if args.less:
178
195
  less = subprocess.Popen(
179
196
  [
180
197
  'less',
181
- *(['-R'] if args.color else []),
198
+ *(['-R'] if cfg.rendering.color else []),
182
199
  ],
183
200
  stdin=subprocess.PIPE,
184
201
  encoding='utf-8',
185
202
  )
186
203
  out = check.not_none(less.stdin)
187
204
 
188
- def close_less():
205
+ def close_less() -> None:
189
206
  out.close()
190
207
  less.wait()
191
208
 
@@ -196,67 +213,86 @@ def _main() -> None:
196
213
 
197
214
  #
198
215
 
199
- if args.stream:
200
- fd = in_file.fileno()
201
- decoder = codecs.getincrementaldecoder('utf-8')()
216
+ parser: ta.Any
217
+ renderer: ta.Any
202
218
 
219
+ if args.stream:
203
220
  with contextlib.ExitStack() as es2:
204
- lex = es2.enter_context(JsonStreamLexer())
205
- parse = es2.enter_context(JsonStreamParser())
221
+ parser = es2.enter_context(StreamParser())
206
222
 
207
- if args.stream_build:
208
- build = es2.enter_context(JsonObjectBuilder())
209
- renderer = None
223
+ def flush_output(
224
+ fn: ta.Callable[[T], ta.Iterable[U]],
225
+ i: T,
226
+ ) -> ta.Generator[U, None, None]:
227
+ n = 0
228
+ for o in fn(i):
229
+ yield o
230
+ n += 1
231
+ if n:
232
+ out.flush()
210
233
 
211
- else:
212
- renderer = StreamJsonRenderer(
213
- out,
214
- style=term_color if args.color else None,
215
- delimiter='\n',
216
- **kw,
217
- )
218
- build = None
219
-
220
- while True:
221
- buf = os.read(fd, args.read_buffer_size)
234
+ pipeline: ta.Any
222
235
 
223
- for s in decoder.decode(buf, not buf):
224
- n = 0
225
- for c in s:
226
- for t in lex(c):
227
- for e in parse(t):
228
- if renderer is not None:
229
- renderer.render((e,))
236
+ if args.stream_build:
237
+ builder: StreamBuilder = es2.enter_context(StreamBuilder())
238
+ processor = Processor(cfg.processing)
239
+ renderer = EagerRenderer(cfg.rendering)
240
+ trailing_newline = False
241
+
242
+ def append_newlines(
243
+ fn: ta.Callable[[T], ta.Iterable[str]],
244
+ i: T,
245
+ ) -> ta.Generator[str, None, None]:
246
+ yield from fn(i)
247
+ yield '\n'
248
+
249
+ pipeline = lambda v: (renderer.render(v),) # Any -> [str] # noqa
250
+ pipeline = fp.bind(append_newlines, pipeline) # Any -> [str]
251
+ pipeline = fp.bind(lang.flatmap, pipeline) # [Any] -> [str]
252
+ pipeline = fp.pipe(fp.bind(lang.flatmap, processor.process), pipeline) # [Any] -> [str]
253
+ pipeline = fp.pipe(fp.bind(lang.flatmap, builder.build), pipeline) # [JsonStreamParserEvent] -> [str] # noqa
254
+ pipeline = fp.pipe(parser.parse, pipeline) # bytes -> [str]
230
255
 
231
- if build is not None:
232
- for v in build(e):
233
- print(render_one(v), file=out)
256
+ else:
257
+ renderer = StreamRenderer(cfg.rendering)
258
+ trailing_newline = True
234
259
 
235
- n += 1
260
+ pipeline = renderer.render # JsonStreamParserEvent -> [str]
261
+ pipeline = fp.bind(lang.flatmap, pipeline) # [JsonStreamParserEvent] -> [str]
262
+ pipeline = fp.pipe(parser.parse, pipeline) # bytes -> [str]
236
263
 
237
- if n:
238
- out.flush()
264
+ pipeline = fp.bind(flush_output, pipeline) # bytes -> [str]
239
265
 
240
- if not buf:
241
- break
266
+ for buf in yield_input():
267
+ for s in pipeline(buf):
268
+ print(s, file=out, end='')
242
269
 
243
- if renderer is not None:
244
- out.write('\n')
270
+ if trailing_newline:
271
+ print(file=out)
245
272
 
246
273
  elif args.lines:
247
- fd = in_file.fileno()
248
- db = DelimitingBuffer()
274
+ parser = DelimitingParser(cfg.format)
275
+ processor = Processor(cfg.processing)
276
+ renderer = EagerRenderer(cfg.rendering)
249
277
 
250
- while buf := os.read(fd, args.read_buffer_size):
251
- for chunk in db.feed(buf):
252
- s = check.isinstance(chunk, bytes).decode('utf-8')
253
- v = fmt.load(io.StringIO(s))
254
- print(render_one(v), file=out)
278
+ for buf in yield_input():
279
+ if buf:
280
+ for v in parser.parse(buf):
281
+ for e in processor.process(v):
282
+ s = renderer.render(e)
283
+ print(s, file=out)
255
284
 
256
285
  else:
257
- with io.TextIOWrapper(in_file) as tw:
258
- v = fmt.load(tw)
259
- print(render_one(v), file=out)
286
+ parser = EagerParser(cfg.format)
287
+ processor = Processor(cfg.processing)
288
+ renderer = EagerRenderer(cfg.rendering)
289
+
290
+ with io.TextIOWrapper(in_file) as tf:
291
+ v = parser.parse(tf)
292
+
293
+ for e in processor.process(v):
294
+ s = renderer.render(e)
295
+ print(s, file=out)
260
296
 
261
297
 
262
298
  if __name__ == '__main__':
@@ -0,0 +1,82 @@
1
+ import codecs
2
+ import io
3
+ import typing as ta
4
+
5
+ from .... import check
6
+ from .... import lang
7
+ from ....lite.io import DelimitingBuffer
8
+ from ..stream.build import JsonObjectBuilder
9
+ from ..stream.lex import JsonStreamLexer
10
+ from ..stream.parse import JsonStreamParser
11
+ from ..stream.parse import JsonStreamParserEvent
12
+ from .formats import Format
13
+
14
+
15
+ ##
16
+
17
+
18
+ class EagerParser:
19
+ def __init__(self, fmt: Format) -> None:
20
+ super().__init__()
21
+
22
+ self._fmt = fmt
23
+
24
+ def parse(self, f: ta.TextIO) -> ta.Generator[ta.Any, None, None]:
25
+ return self._fmt.load(f)
26
+
27
+
28
+ ##
29
+
30
+
31
+ class DelimitingParser:
32
+ def __init__(
33
+ self,
34
+ fmt: Format,
35
+ *,
36
+ delimiters: ta.Iterable[int] = b'\n',
37
+ ) -> None:
38
+ super().__init__()
39
+
40
+ self._fmt = fmt
41
+
42
+ self._db = DelimitingBuffer(delimiters)
43
+
44
+ def parse(self, b: bytes) -> ta.Generator[ta.Any, None, None]:
45
+ for chunk in self._db.feed(b):
46
+ s = check.isinstance(chunk, bytes).decode('utf-8')
47
+ v = self._fmt.load(io.StringIO(s))
48
+ yield v
49
+
50
+
51
+ ##
52
+
53
+
54
+ class StreamBuilder(lang.ExitStacked):
55
+ _builder: JsonObjectBuilder | None = None
56
+
57
+ def __enter__(self) -> ta.Self:
58
+ super().__enter__()
59
+ self._builder = self._enter_context(JsonObjectBuilder())
60
+ return self
61
+
62
+ def build(self, e: JsonStreamParserEvent) -> ta.Generator[ta.Any, None, None]:
63
+ yield from check.not_none(self._builder)(e)
64
+
65
+
66
+ class StreamParser(lang.ExitStacked):
67
+ _decoder: codecs.IncrementalDecoder
68
+ _lex: JsonStreamLexer
69
+ _parse: JsonStreamParser
70
+
71
+ def __enter__(self) -> ta.Self:
72
+ super().__enter__()
73
+ self._decoder = codecs.getincrementaldecoder('utf-8')()
74
+ self._lex = self._enter_context(JsonStreamLexer())
75
+ self._parse = self._enter_context(JsonStreamParser())
76
+ return self
77
+
78
+ def parse(self, b: bytes) -> ta.Generator[JsonStreamParserEvent, None, None]:
79
+ for s in self._decoder.decode(b, not b):
80
+ for c in s:
81
+ for t in self._lex(c):
82
+ yield from self._parse(t)
@@ -0,0 +1,44 @@
1
+ import dataclasses as dc
2
+ import typing as ta
3
+
4
+ from .... import lang
5
+
6
+
7
+ if ta.TYPE_CHECKING:
8
+ from ....specs import jmespath
9
+ else:
10
+ jmespath = lang.proxy_import('....specs.jmespath', __package__)
11
+
12
+
13
+ ##
14
+
15
+
16
+ @dc.dataclass(frozen=True, kw_only=True)
17
+ class ProcessingOptions:
18
+ jmespath_expr: ta.Any | None = None
19
+ flat: bool = False
20
+
21
+
22
+ class Processor:
23
+ def __init__(self, opts: ProcessingOptions) -> None:
24
+ super().__init__()
25
+
26
+ self._opts = opts
27
+
28
+ jmespath_expr = opts.jmespath_expr
29
+ if isinstance(jmespath_expr, str):
30
+ jmespath_expr = jmespath.compile(jmespath_expr)
31
+ self._jmespath_expr: ta.Any | None = jmespath_expr
32
+
33
+ def process(self, v: ta.Any) -> ta.Iterable[ta.Any]:
34
+ if self._jmespath_expr is not None:
35
+ v = self._jmespath_expr.search(v)
36
+
37
+ if self._opts.flat:
38
+ if isinstance(v, str):
39
+ raise TypeError(f'Flat output must be arrays, got {type(v)}', v)
40
+
41
+ yield from v
42
+
43
+ else:
44
+ yield v
@@ -0,0 +1,92 @@
1
+ import dataclasses as dc
2
+ import json
3
+ import typing as ta
4
+
5
+ from .... import lang
6
+ from .... import term
7
+ from ..render import JsonRenderer
8
+ from ..stream.parse import JsonStreamParserEvent
9
+ from ..stream.render import StreamJsonRenderer
10
+
11
+
12
+ ##
13
+
14
+
15
+ @dc.dataclass(frozen=True, kw_only=True)
16
+ class RenderingOptions:
17
+ indent: int | str | None = None
18
+ separators: tuple[str, str] | None = None
19
+ sort_keys: bool = False
20
+ raw: bool = False
21
+ unicode: bool = False
22
+ color: bool = False
23
+
24
+
25
+ def make_render_kwargs(opts: RenderingOptions) -> ta.Mapping[str, ta.Any]:
26
+ return dict(
27
+ indent=opts.indent,
28
+ separators=opts.separators,
29
+ sort_keys=opts.sort_keys,
30
+ ensure_ascii=not opts.unicode,
31
+ )
32
+
33
+
34
+ class Renderer(lang.Abstract):
35
+ def __init__(self, opts: RenderingOptions) -> None:
36
+ super().__init__()
37
+ self._opts = opts
38
+ self._kw = make_render_kwargs(opts)
39
+
40
+
41
+ ##
42
+
43
+
44
+ def term_color(o: ta.Any, state: JsonRenderer.State) -> tuple[str, str]:
45
+ if state is JsonRenderer.State.KEY:
46
+ return term.SGR(term.SGRs.FG.BRIGHT_BLUE), term.SGR(term.SGRs.RESET)
47
+ elif isinstance(o, str):
48
+ return term.SGR(term.SGRs.FG.GREEN), term.SGR(term.SGRs.RESET)
49
+ else:
50
+ return '', ''
51
+
52
+
53
+ ##
54
+
55
+
56
+ class EagerRenderer(Renderer):
57
+ def render(self, v: ta.Any) -> str:
58
+ if self._opts.raw:
59
+ if not isinstance(v, str):
60
+ raise TypeError(f'Raw output must be strings, got {type(v)}', v)
61
+
62
+ return v
63
+
64
+ elif self._opts.color:
65
+ return JsonRenderer.render_str(
66
+ v,
67
+ **self._kw,
68
+ style=term_color,
69
+ )
70
+
71
+ else:
72
+ return json.dumps(
73
+ v,
74
+ **self._kw,
75
+ )
76
+
77
+
78
+ ##
79
+
80
+
81
+ class StreamRenderer(Renderer):
82
+ def __init__(self, opts: RenderingOptions) -> None:
83
+ super().__init__(opts)
84
+
85
+ self._renderer = StreamJsonRenderer(
86
+ style=term_color if self._opts.color else None,
87
+ delimiter='\n',
88
+ **self._kw,
89
+ )
90
+
91
+ def render(self, e: JsonStreamParserEvent) -> ta.Generator[str, None, None]:
92
+ return self._renderer.render((e,))
@@ -4,8 +4,18 @@ import typing as ta
4
4
  ##
5
5
 
6
6
 
7
+ class Separators(ta.NamedTuple):
8
+ comma: str
9
+ colon: str
10
+
11
+
12
+ ##
13
+
14
+
7
15
  PRETTY_INDENT = 2
8
16
 
17
+ PRETTY_SEPARATORS = Separators(', ', ': ')
18
+
9
19
  PRETTY_KWARGS: ta.Mapping[str, ta.Any] = dict(
10
20
  indent=PRETTY_INDENT,
11
21
  )
@@ -14,7 +24,7 @@ PRETTY_KWARGS: ta.Mapping[str, ta.Any] = dict(
14
24
  ##
15
25
 
16
26
 
17
- COMPACT_SEPARATORS = (',', ':')
27
+ COMPACT_SEPARATORS = Separators(',', ':')
18
28
 
19
29
  COMPACT_KWARGS: ta.Mapping[str, ta.Any] = dict(
20
30
  indent=None,