ethspecify 0.2.2__py3-none-any.whl → 0.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ethspecify/cli.py CHANGED
@@ -3,7 +3,7 @@ import json
3
3
  import os
4
4
  import sys
5
5
 
6
- from .core import grep, replace_spec_tags, get_pyspec, get_latest_fork, get_spec_item_history
6
+ from .core import grep, replace_spec_tags, get_pyspec, get_latest_fork, get_spec_item_history, load_config, run_checks
7
7
 
8
8
 
9
9
  def process(args):
@@ -13,9 +13,12 @@ def process(args):
13
13
  print(f"Error: The directory {repr(project_dir)} does not exist.")
14
14
  return 1
15
15
 
16
+ # Load config once from the project directory
17
+ config = load_config(project_dir)
18
+
16
19
  for f in grep(project_dir, r"<spec\b.*?>", args.exclude):
17
20
  print(f"Processing file: {f}")
18
- replace_spec_tags(f)
21
+ replace_spec_tags(f, config)
19
22
 
20
23
  return 0
21
24
 
@@ -66,6 +69,64 @@ def _list_tags_with_history(args, preset):
66
69
  return 0
67
70
 
68
71
 
72
+ def check(args):
73
+ """Run checks to validate spec references."""
74
+ project_dir = os.path.abspath(os.path.expanduser(args.path))
75
+ if not os.path.isdir(project_dir):
76
+ print(f"Error: The directory {repr(project_dir)} does not exist.")
77
+ return 1
78
+
79
+ # Load config
80
+ config = load_config(project_dir)
81
+
82
+ # Run checks
83
+ success, results = run_checks(project_dir, config)
84
+
85
+ # Collect all missing items and errors
86
+ all_missing = []
87
+ all_errors = []
88
+ total_coverage = {"found": 0, "expected": 0}
89
+ total_source_files = {"valid": 0, "total": 0}
90
+
91
+ for section_name, section_results in results.items():
92
+ # Determine the type prefix from section name
93
+ if "Config Variables" in section_name:
94
+ type_prefix = "config_var"
95
+ elif "Preset Variables" in section_name:
96
+ type_prefix = "preset_var"
97
+ elif "Ssz Objects" in section_name:
98
+ type_prefix = "ssz_object"
99
+ elif "Dataclasses" in section_name:
100
+ type_prefix = "dataclass"
101
+ else:
102
+ type_prefix = section_name.lower().replace(" ", "_")
103
+
104
+ # Collect source file errors
105
+ source = section_results['source_files']
106
+ total_source_files["valid"] += source["valid"]
107
+ total_source_files["total"] += source["total"]
108
+ all_errors.extend(source["errors"])
109
+
110
+ # Collect missing items with type prefix
111
+ coverage = section_results['coverage']
112
+ total_coverage["found"] += coverage["found"]
113
+ total_coverage["expected"] += coverage["expected"]
114
+ for missing in coverage['missing']:
115
+ all_missing.append(f"MISSING: {type_prefix}.{missing}")
116
+
117
+ # Display only errors and missing items
118
+ for error in all_errors:
119
+ print(error)
120
+
121
+ for missing in sorted(all_missing):
122
+ print(missing)
123
+
124
+ if all_errors or all_missing:
125
+ return 1
126
+ else:
127
+ total_refs = total_coverage['expected']
128
+ print(f"All specification references ({total_refs}) are valid.")
129
+ return 0
69
130
 
70
131
 
71
132
  def list_forks(args):
@@ -139,6 +200,16 @@ def main():
139
200
  default=None,
140
201
  )
141
202
 
203
+ # Parser for 'check' command
204
+ check_parser = subparsers.add_parser("check", help="Check spec reference coverage and validity")
205
+ check_parser.set_defaults(func=check)
206
+ check_parser.add_argument(
207
+ "--path",
208
+ type=str,
209
+ help="Directory containing YAML files to check",
210
+ default=".",
211
+ )
212
+
142
213
  # Parser for 'list-forks' command
143
214
  list_forks_parser = subparsers.add_parser("list-forks", help="List available forks")
144
215
  list_forks_parser.set_defaults(func=list_forks)
ethspecify/core.py CHANGED
@@ -1,5 +1,6 @@
1
1
  import difflib
2
2
  import functools
3
+ import glob
3
4
  import hashlib
4
5
  import io
5
6
  import os
@@ -7,6 +8,50 @@ import re
7
8
  import requests
8
9
  import textwrap
9
10
  import tokenize
11
+ import yaml
12
+
13
+
14
+ def load_config(directory=None):
15
+ """
16
+ Load configuration from .ethspecify.yml file in the specified directory.
17
+ Returns a dict with configuration values, or empty dict if no config file found.
18
+ """
19
+ if directory is None:
20
+ directory = os.getcwd()
21
+
22
+ config_path = os.path.join(directory, '.ethspecify.yml')
23
+
24
+ if os.path.exists(config_path):
25
+ try:
26
+ with open(config_path, 'r') as f:
27
+ config = yaml.safe_load(f)
28
+ return config if config else {}
29
+ except (yaml.YAMLError, IOError) as e:
30
+ print(f"Warning: Error reading .ethspecify.yml file: {e}")
31
+ return {}
32
+
33
+ return {}
34
+
35
+
36
+ def is_excepted(item_name, fork, exceptions):
37
+ """
38
+ Check if an item#fork combination is in the exception list.
39
+ Exceptions can be:
40
+ - Just the item name (applies to all forks)
41
+ - item#fork (specific fork)
42
+ """
43
+ if not exceptions:
44
+ return False
45
+
46
+ # Check for exact match with fork
47
+ if f"{item_name}#{fork}" in exceptions:
48
+ return True
49
+
50
+ # Check for item name only (all forks)
51
+ if item_name in exceptions:
52
+ return True
53
+
54
+ return False
10
55
 
11
56
 
12
57
  def strip_comments(code):
@@ -348,7 +393,10 @@ def _trace_item_history(item_name, category, all_forks, pyspec, preset):
348
393
 
349
394
  return history_forks
350
395
 
351
- def parse_common_attributes(attributes):
396
+ def parse_common_attributes(attributes, config=None):
397
+ if config is None:
398
+ config = {}
399
+
352
400
  try:
353
401
  preset = attributes["preset"]
354
402
  except KeyError:
@@ -357,7 +405,7 @@ def parse_common_attributes(attributes):
357
405
  try:
358
406
  version = attributes["version"]
359
407
  except KeyError:
360
- version = "nightly"
408
+ version = config.get("version", "nightly")
361
409
 
362
410
  try:
363
411
  fork = attributes["fork"]
@@ -367,12 +415,12 @@ def parse_common_attributes(attributes):
367
415
  try:
368
416
  style = attributes["style"]
369
417
  except KeyError:
370
- style = "hash"
418
+ style = config.get("style", "hash")
371
419
 
372
420
  return preset, fork, style, version
373
421
 
374
- def get_spec_item(attributes):
375
- preset, fork, style, version = parse_common_attributes(attributes)
422
+ def get_spec_item(attributes, config=None):
423
+ preset, fork, style, version = parse_common_attributes(attributes, config)
376
424
  spec = get_spec(attributes, preset, fork, version)
377
425
 
378
426
  if style == "full" or style == "hash":
@@ -423,10 +471,14 @@ def extract_attributes(tag):
423
471
  return dict(attr_pattern.findall(tag))
424
472
 
425
473
 
426
- def replace_spec_tags(file_path):
474
+ def replace_spec_tags(file_path, config=None):
427
475
  with open(file_path, 'r') as file:
428
476
  content = file.read()
429
477
 
478
+ # Use provided config or load from file's directory as fallback
479
+ if config is None:
480
+ config = load_config(os.path.dirname(file_path))
481
+
430
482
  # Define regex to match self-closing tags and long (paired) tags separately
431
483
  pattern = re.compile(
432
484
  r'(?P<self><spec\b[^>]*\/>)|(?P<long><spec\b[^>]*>[\s\S]*?</spec>)',
@@ -467,7 +519,7 @@ def replace_spec_tags(file_path):
467
519
 
468
520
  attributes = extract_attributes(original_tag_text)
469
521
  print(f"spec tag: {attributes}")
470
- preset, fork, style, version = parse_common_attributes(attributes)
522
+ preset, fork, style, version = parse_common_attributes(attributes, config)
471
523
  spec = get_spec(attributes, preset, fork, version)
472
524
  hash_value = hashlib.sha256(spec.encode('utf-8')).hexdigest()[:8]
473
525
 
@@ -478,7 +530,7 @@ def replace_spec_tags(file_path):
478
530
  else:
479
531
  # For full/diff styles, rebuild as a long (paired) tag.
480
532
  new_opening = rebuild_opening_tag(attributes, hash_value)
481
- spec_content = get_spec_item(attributes)
533
+ spec_content = get_spec_item(attributes, config)
482
534
  prefix = content[:match.start()].splitlines()[-1]
483
535
  prefixed_spec = "\n".join(
484
536
  f"{prefix}{line}" if line.rstrip() else prefix.rstrip()
@@ -494,3 +546,459 @@ def replace_spec_tags(file_path):
494
546
  # Write the updated content back to the file
495
547
  with open(file_path, 'w') as file:
496
548
  file.write(updated_content)
549
+
550
+
551
+ def check_source_files(yaml_file, project_root, exceptions=None):
552
+ """
553
+ Check that source files referenced in a YAML file exist and contain expected search strings.
554
+ Returns (valid_count, total_count, errors)
555
+ """
556
+ if exceptions is None:
557
+ exceptions = []
558
+ if not os.path.exists(yaml_file):
559
+ return 0, 0, [f"YAML file not found: {yaml_file}"]
560
+
561
+ errors = []
562
+ total_count = 0
563
+
564
+ try:
565
+ with open(yaml_file, 'r') as f:
566
+ content_str = f.read()
567
+
568
+ # Try to fix common YAML issues with unquoted search strings
569
+ # Replace unquoted search values ending with colons
570
+ content_str = re.sub(r'(\s+search:\s+)([^"\n]+:)(\s*$)', r'\1"\2"\3', content_str, flags=re.MULTILINE)
571
+
572
+ try:
573
+ content = yaml.safe_load(content_str)
574
+ except yaml.YAMLError:
575
+ # Fall back to FullLoader if safe_load fails
576
+ content = yaml.load(content_str, Loader=yaml.FullLoader)
577
+ except (yaml.YAMLError, IOError) as e:
578
+ return 0, 0, [f"YAML parsing error in {yaml_file}: {e}"]
579
+
580
+ if not content:
581
+ return 0, 0, []
582
+
583
+ # Handle both array of objects and single object formats
584
+ items = content if isinstance(content, list) else [content]
585
+
586
+ for item in items:
587
+ if not isinstance(item, dict) or 'sources' not in item:
588
+ continue
589
+
590
+ # Extract spec reference information from the item
591
+ spec_ref = None
592
+ if 'spec' in item and isinstance(item['spec'], str):
593
+ # Try to extract spec reference from spec content
594
+ spec_content = item['spec']
595
+ # Look for any spec tag attribute and fork
596
+ spec_tag_match = re.search(r'<spec\s+([^>]+)>', spec_content)
597
+ if spec_tag_match:
598
+ tag_attrs = spec_tag_match.group(1)
599
+ # Extract fork
600
+ fork_match = re.search(r'fork="([^"]+)"', tag_attrs)
601
+ # Extract the main attribute (not hash or fork)
602
+ attr_matches = re.findall(r'(\w+)="([^"]+)"', tag_attrs)
603
+
604
+ if fork_match:
605
+ fork = fork_match.group(1)
606
+ # Find the first non-meta attribute
607
+ for attr_name, attr_value in attr_matches:
608
+ if attr_name not in ['fork', 'hash', 'preset', 'version', 'style']:
609
+ # Map attribute names to type prefixes
610
+ type_map = {
611
+ 'fn': 'functions',
612
+ 'function': 'functions',
613
+ 'constant_var': 'constants',
614
+ 'config_var': 'configs',
615
+ 'preset_var': 'presets',
616
+ 'ssz_object': 'ssz_objects',
617
+ 'dataclass': 'dataclasses',
618
+ 'custom_type': 'custom_types'
619
+ }
620
+ type_prefix = type_map.get(attr_name, attr_name)
621
+ spec_ref = f"{type_prefix}.{attr_value}#{fork}"
622
+ break
623
+
624
+ # Fallback to just the name if spec extraction failed
625
+ if not spec_ref and 'name' in item:
626
+ spec_ref = item['name']
627
+
628
+ # Check if sources list is empty
629
+ if not item['sources']:
630
+ if spec_ref:
631
+ # Extract item name and fork from spec_ref for exception checking
632
+ if '#' in spec_ref and '.' in spec_ref:
633
+ # Format: "functions.item_name#fork"
634
+ _, item_with_fork = spec_ref.split('.', 1)
635
+ if '#' in item_with_fork:
636
+ item_name, fork = item_with_fork.split('#', 1)
637
+ # Check if this item is in exceptions
638
+ if is_excepted(item_name, fork, exceptions):
639
+ total_count += 1
640
+ continue
641
+
642
+ errors.append(f"EMPTY SOURCES: {spec_ref}")
643
+ else:
644
+ # Fallback if we can't extract spec reference
645
+ item_name = item.get('name', 'unknown')
646
+ errors.append(f"EMPTY SOURCES: No sources defined ({item_name})")
647
+ total_count += 1
648
+ continue
649
+
650
+ for source in item['sources']:
651
+ # All sources now use the standardized dict format with file and optional search
652
+ if not isinstance(source, dict) or 'file' not in source:
653
+ continue
654
+
655
+ file_path = source['file']
656
+ search_string = source.get('search')
657
+ is_regex = source.get('regex', False)
658
+
659
+ total_count += 1
660
+
661
+ # Parse line range from file path if present (#L123 or #L123-L456)
662
+ line_range = None
663
+ if '#L' in file_path:
664
+ base_path, line_part = file_path.split('#L', 1)
665
+ file_path = base_path
666
+ # Format is always #L123 or #L123-L456, so just remove all 'L' characters
667
+ line_range = line_part.replace('L', '')
668
+
669
+ full_path = os.path.join(project_root, file_path)
670
+
671
+ # Create error prefix with spec reference if available
672
+ ref_prefix = f"{spec_ref} | " if spec_ref else ""
673
+
674
+ # Check if file exists
675
+ if not os.path.exists(full_path):
676
+ errors.append(f"MISSING FILE: {ref_prefix}{file_path}")
677
+ continue
678
+
679
+ # Check line range if specified
680
+ if line_range:
681
+ try:
682
+ with open(full_path, 'r', encoding='utf-8') as f:
683
+ lines = f.readlines()
684
+ total_lines = len(lines)
685
+
686
+ # Parse line range
687
+ if '-' in line_range:
688
+ # Range like "123-456"
689
+ start_str, end_str = line_range.split('-', 1)
690
+ start_line = int(start_str)
691
+ end_line = int(end_str)
692
+
693
+ if start_line < 1 or end_line < 1 or start_line > end_line:
694
+ errors.append(f"INVALID LINE RANGE: {ref_prefix}#{line_range} - invalid range in {file_path}")
695
+ continue
696
+ elif end_line > total_lines:
697
+ errors.append(f"INVALID LINE RANGE: {ref_prefix}#{line_range} - line {end_line} exceeds file length ({total_lines}) in {file_path}")
698
+ continue
699
+ else:
700
+ # Single line like "123"
701
+ line_num = int(line_range)
702
+ if line_num < 1:
703
+ errors.append(f"INVALID LINE RANGE: {ref_prefix}#{line_range} - invalid line number in {file_path}")
704
+ continue
705
+ elif line_num > total_lines:
706
+ errors.append(f"INVALID LINE RANGE: {ref_prefix}#{line_range} - line {line_num} exceeds file length ({total_lines}) in {file_path}")
707
+ continue
708
+
709
+ except ValueError:
710
+ errors.append(f"INVALID LINE RANGE: {ref_prefix}#{line_range} - invalid line format in {file_path}")
711
+ continue
712
+ except (IOError, UnicodeDecodeError):
713
+ errors.append(f"ERROR READING: {ref_prefix}{file_path}")
714
+ continue
715
+
716
+ # Check search string if provided
717
+ if search_string:
718
+ try:
719
+ with open(full_path, 'r', encoding='utf-8') as f:
720
+ content = f.read()
721
+
722
+ if is_regex:
723
+ # Use regex search
724
+ try:
725
+ pattern = re.compile(search_string, re.MULTILINE)
726
+ matches = list(pattern.finditer(content))
727
+ count = len(matches)
728
+ search_type = "REGEX"
729
+ except re.error as e:
730
+ errors.append(f"INVALID REGEX: {ref_prefix}'{search_string}' in {file_path} - {e}")
731
+ continue
732
+ else:
733
+ # Use literal string search
734
+ count = content.count(search_string)
735
+ search_type = "SEARCH"
736
+
737
+ if count == 0:
738
+ errors.append(f"{search_type} NOT FOUND: {ref_prefix}'{search_string}' in {file_path}")
739
+ elif count > 1:
740
+ errors.append(f"AMBIGUOUS {search_type}: {ref_prefix}'{search_string}' found {count} times in {file_path}")
741
+ except (IOError, UnicodeDecodeError):
742
+ errors.append(f"ERROR READING: {ref_prefix}{file_path}")
743
+
744
+ valid_count = total_count - len(errors)
745
+ return valid_count, total_count, errors
746
+
747
+
748
+ def extract_spec_tags_from_yaml(yaml_file, tag_type=None):
749
+ """
750
+ Extract spec tags from a YAML file and return (tag_types_found, item#fork pairs).
751
+ If tag_type is provided, only extract tags of that type.
752
+ """
753
+ if not os.path.exists(yaml_file):
754
+ return set(), set()
755
+
756
+ pairs = set()
757
+ tag_types_found = set()
758
+
759
+ # Known tag type attributes
760
+ tag_attributes = ['fn', 'function', 'constant_var', 'config_var', 'preset_var',
761
+ 'ssz_object', 'dataclass', 'custom_type']
762
+
763
+ try:
764
+ with open(yaml_file, 'r') as f:
765
+ content_str = f.read()
766
+
767
+ # Try to fix common YAML issues with unquoted search strings
768
+ # Replace unquoted search values ending with colons
769
+ content_str = re.sub(r'(\s+search:\s+)([^"\n]+:)(\s*$)', r'\1"\2"\3', content_str, flags=re.MULTILINE)
770
+
771
+ try:
772
+ content = yaml.safe_load(content_str)
773
+ except yaml.YAMLError:
774
+ # Fall back to FullLoader if safe_load fails
775
+ content = yaml.load(content_str, Loader=yaml.FullLoader)
776
+
777
+ if not content:
778
+ return tag_types_found, pairs
779
+
780
+ # Handle both array of objects and single object formats
781
+ items = content if isinstance(content, list) else [content]
782
+
783
+ for item in items:
784
+ if not isinstance(item, dict) or 'spec' not in item:
785
+ continue
786
+
787
+ spec_content = item['spec']
788
+ if not isinstance(spec_content, str):
789
+ continue
790
+
791
+ # Find all spec tags in the content
792
+ spec_tag_pattern = r'<spec\s+([^>]+)>'
793
+ spec_matches = re.findall(spec_tag_pattern, spec_content)
794
+
795
+ for tag_attrs_str in spec_matches:
796
+ # Extract all attributes from the tag
797
+ attrs = dict(re.findall(r'(\w+)="([^"]+)"', tag_attrs_str))
798
+
799
+ # Find which tag type this is
800
+ found_tag_type = None
801
+ item_name = None
802
+
803
+ for attr in tag_attributes:
804
+ if attr in attrs:
805
+ found_tag_type = attr
806
+ item_name = attrs[attr]
807
+ # Normalize function to fn
808
+ if found_tag_type == 'function':
809
+ found_tag_type = 'fn'
810
+ break
811
+
812
+ if found_tag_type and 'fork' in attrs:
813
+ tag_types_found.add(found_tag_type)
814
+
815
+ # If tag_type filter is specified, only add matching types
816
+ if tag_type is None or tag_type == found_tag_type:
817
+ pairs.add(f"{item_name}#{attrs['fork']}")
818
+
819
+ except (IOError, UnicodeDecodeError, yaml.YAMLError):
820
+ pass
821
+
822
+ return tag_types_found, pairs
823
+
824
+
825
+ def check_coverage(yaml_file, tag_type, exceptions, preset="mainnet"):
826
+ """
827
+ Check that all spec items from ethspecify have corresponding tags in the YAML file.
828
+ Returns (found_count, total_count, missing_items)
829
+ """
830
+ # Map tag types to history keys
831
+ history_key_map = {
832
+ 'ssz_object': 'ssz_objects',
833
+ 'config_var': 'config_vars',
834
+ 'preset_var': 'preset_vars',
835
+ 'dataclass': 'dataclasses',
836
+ 'fn': 'functions',
837
+ 'constant_var': 'constant_vars',
838
+ 'custom_type': 'custom_types'
839
+ }
840
+
841
+ # Get expected items from ethspecify
842
+ history = get_spec_item_history(preset)
843
+ expected_pairs = set()
844
+
845
+ history_key = history_key_map.get(tag_type, tag_type)
846
+ if history_key in history:
847
+ for item_name, forks in history[history_key].items():
848
+ for fork in forks:
849
+ expected_pairs.add(f"{item_name}#{fork}")
850
+
851
+ # Get actual pairs from YAML file
852
+ _, actual_pairs = extract_spec_tags_from_yaml(yaml_file, tag_type)
853
+
854
+ # Find missing items (excluding exceptions)
855
+ missing_items = []
856
+ total_count = len(expected_pairs)
857
+
858
+ for item_fork in expected_pairs:
859
+ item_name, fork = item_fork.split('#', 1)
860
+
861
+ if is_excepted(item_name, fork, exceptions):
862
+ continue
863
+
864
+ if item_fork not in actual_pairs:
865
+ missing_items.append(item_fork)
866
+
867
+ found_count = total_count - len(missing_items)
868
+ return found_count, total_count, missing_items
869
+
870
+
871
+ def run_checks(project_dir, config):
872
+ """
873
+ Run all checks based on the configuration.
874
+ Returns (success, results)
875
+ """
876
+ results = {}
877
+ overall_success = True
878
+
879
+ # Get specrefs config
880
+ specrefs_config = config.get('specrefs', {})
881
+
882
+ # Handle both old format (specrefs as array) and new format (specrefs as dict)
883
+ if isinstance(specrefs_config, list):
884
+ # Old format: specrefs: [file1, file2, ...]
885
+ specrefs_files = specrefs_config
886
+ exceptions = config.get('exceptions', {})
887
+ else:
888
+ # New format: specrefs: { files: [...], exceptions: {...} }
889
+ specrefs_files = specrefs_config.get('files', [])
890
+ exceptions = specrefs_config.get('exceptions', {})
891
+
892
+ if not specrefs_files:
893
+ print("Error: No specrefs files specified in .ethspecify.yml")
894
+ print("Please add a 'specrefs:' section with 'files:' listing the files to check")
895
+ return False, {}
896
+
897
+ # Map tag types to exception keys (support both singular and plural)
898
+ exception_key_map = {
899
+ 'ssz_object': ['ssz_objects', 'ssz_object'],
900
+ 'config_var': ['configs', 'config_variables', 'config_var'],
901
+ 'preset_var': ['presets', 'preset_variables', 'preset_var'],
902
+ 'dataclass': ['dataclasses', 'dataclass'],
903
+ 'fn': ['functions', 'fn'],
904
+ 'constant_var': ['constants', 'constant_variables', 'constant_var'],
905
+ 'custom_type': ['custom_types', 'custom_type']
906
+ }
907
+
908
+ # Use explicit file list only
909
+ for filename in specrefs_files:
910
+ yaml_path = os.path.join(project_dir, filename)
911
+
912
+ if not os.path.exists(yaml_path):
913
+ print(f"Error: File {filename} defined in config but not found")
914
+ overall_success = False
915
+ continue
916
+
917
+ # Detect tag types in the file
918
+ tag_types_found, _ = extract_spec_tags_from_yaml(yaml_path)
919
+
920
+ # Check for preset indicators in filename
921
+ preset = "mainnet" # default preset
922
+ if 'minimal' in filename.lower():
923
+ preset = "minimal"
924
+
925
+ # Process each tag type found in the file
926
+ if not tag_types_found:
927
+ # No spec tags found, still check source files
928
+ valid_count, total_count, source_errors = check_source_files(yaml_path, os.path.dirname(project_dir), [])
929
+
930
+ # Store results using filename as section name
931
+ section_name = filename.replace('.yml', '').replace('-', ' ').title()
932
+ if preset != "mainnet":
933
+ section_name += f" ({preset.title()})"
934
+
935
+ results[section_name] = {
936
+ 'source_files': {
937
+ 'valid': valid_count,
938
+ 'total': total_count,
939
+ 'errors': source_errors
940
+ },
941
+ 'coverage': {
942
+ 'found': 0,
943
+ 'expected': 0,
944
+ 'missing': []
945
+ }
946
+ }
947
+
948
+ if source_errors:
949
+ overall_success = False
950
+ else:
951
+ # Process each tag type separately for better reporting
952
+ all_missing_items = []
953
+ total_found = 0
954
+ total_expected = 0
955
+
956
+ for tag_type in tag_types_found:
957
+ # Get the appropriate exceptions for this tag type
958
+ section_exceptions = []
959
+ if tag_type in exception_key_map:
960
+ for key in exception_key_map[tag_type]:
961
+ if key in exceptions:
962
+ section_exceptions = exceptions[key]
963
+ break
964
+
965
+ # Check coverage for this specific tag type
966
+ found_count, expected_count, missing_items = check_coverage(yaml_path, tag_type, section_exceptions, preset)
967
+ total_found += found_count
968
+ total_expected += expected_count
969
+ all_missing_items.extend(missing_items)
970
+
971
+ # Check source files (only once per file, not per tag type)
972
+ # Use the union of all exceptions for source file checking
973
+ all_exceptions = []
974
+ for tag_type in tag_types_found:
975
+ if tag_type in exception_key_map:
976
+ for key in exception_key_map[tag_type]:
977
+ if key in exceptions:
978
+ all_exceptions.extend(exceptions[key])
979
+
980
+ valid_count, total_count, source_errors = check_source_files(yaml_path, os.path.dirname(project_dir), all_exceptions)
981
+
982
+ # Store results using filename as section name
983
+ section_name = filename.replace('.yml', '').replace('-', ' ').title()
984
+ if preset != "mainnet":
985
+ section_name += f" ({preset.title()})"
986
+
987
+ results[section_name] = {
988
+ 'source_files': {
989
+ 'valid': valid_count,
990
+ 'total': total_count,
991
+ 'errors': source_errors
992
+ },
993
+ 'coverage': {
994
+ 'found': total_found,
995
+ 'expected': total_expected,
996
+ 'missing': all_missing_items
997
+ }
998
+ }
999
+
1000
+ # Update overall success
1001
+ if source_errors or all_missing_items:
1002
+ overall_success = False
1003
+
1004
+ return overall_success, results
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ethspecify
3
- Version: 0.2.2
3
+ Version: 0.2.4
4
4
  Summary: A utility for processing Ethereum specification tags.
5
5
  Home-page: https://github.com/jtraglia/ethspecify
6
6
  Author: Justin Traglia
@@ -12,6 +12,7 @@ Requires-Python: >=3.6
12
12
  Description-Content-Type: text/markdown
13
13
  License-File: LICENSE
14
14
  Requires-Dist: requests==2.32.3
15
+ Requires-Dist: PyYAML>=6.0
15
16
  Dynamic: author
16
17
  Dynamic: author-email
17
18
  Dynamic: classifier
@@ -0,0 +1,9 @@
1
+ ethspecify/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ ethspecify/cli.py,sha256=SZ47-lgfeDHmzXCE-rx8ydM66N9NfNAA2GDxoC4DE7E,7641
3
+ ethspecify/core.py,sha256=kOu8avxvFnDt8uRuIK4bbl0zjIciXk_8jvlE1cu23J0,37884
4
+ ethspecify-0.2.4.dist-info/licenses/LICENSE,sha256=Awxsr73mm9YMBVhBYnzeI7bNdRd-bH6RDtO5ItG0DaM,1071
5
+ ethspecify-0.2.4.dist-info/METADATA,sha256=DzopppX63lH3ykj6H1nq5mNbRV1mA1XkhAX8bzJhkVw,9212
6
+ ethspecify-0.2.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
7
+ ethspecify-0.2.4.dist-info/entry_points.txt,sha256=09viGkCg9J3h0c9BFRN-BKaJUEaIc4JyULNgBP5EL_g,51
8
+ ethspecify-0.2.4.dist-info/top_level.txt,sha256=0klaMvlVyOkXW09fwZTijJpdybITEp2c9zQKV5v30VM,11
9
+ ethspecify-0.2.4.dist-info/RECORD,,
@@ -1,9 +0,0 @@
1
- ethspecify/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- ethspecify/cli.py,sha256=wkJyDd55peeJjNe1vF8suje3Mc6ggEa3O6hvfGUZSD4,5162
3
- ethspecify/core.py,sha256=7H1mwkQ2YBxJbO-RCc-7RCy6cJoxFnWwfe-H9McUwaU,17881
4
- ethspecify-0.2.2.dist-info/licenses/LICENSE,sha256=Awxsr73mm9YMBVhBYnzeI7bNdRd-bH6RDtO5ItG0DaM,1071
5
- ethspecify-0.2.2.dist-info/METADATA,sha256=NT3bYROThPVfEI6LNWMOCjh12b-6GNrgY5s6uhjm1io,9185
6
- ethspecify-0.2.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
7
- ethspecify-0.2.2.dist-info/entry_points.txt,sha256=09viGkCg9J3h0c9BFRN-BKaJUEaIc4JyULNgBP5EL_g,51
8
- ethspecify-0.2.2.dist-info/top_level.txt,sha256=0klaMvlVyOkXW09fwZTijJpdybITEp2c9zQKV5v30VM,11
9
- ethspecify-0.2.2.dist-info/RECORD,,