python2mobile 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- examples/example_ecommerce_app.py +189 -0
- examples/example_todo_app.py +159 -0
- p2m/__init__.py +31 -0
- p2m/cli.py +470 -0
- p2m/config.py +205 -0
- p2m/core/__init__.py +18 -0
- p2m/core/api.py +191 -0
- p2m/core/ast_walker.py +171 -0
- p2m/core/database.py +192 -0
- p2m/core/events.py +56 -0
- p2m/core/render_engine.py +597 -0
- p2m/core/runtime.py +128 -0
- p2m/core/state.py +51 -0
- p2m/core/validator.py +284 -0
- p2m/devserver/__init__.py +9 -0
- p2m/devserver/server.py +84 -0
- p2m/i18n/__init__.py +7 -0
- p2m/i18n/translator.py +74 -0
- p2m/imagine/__init__.py +35 -0
- p2m/imagine/agent.py +463 -0
- p2m/imagine/legacy.py +217 -0
- p2m/llm/__init__.py +20 -0
- p2m/llm/anthropic_provider.py +78 -0
- p2m/llm/base.py +42 -0
- p2m/llm/compatible_provider.py +120 -0
- p2m/llm/factory.py +72 -0
- p2m/llm/ollama_provider.py +89 -0
- p2m/llm/openai_provider.py +79 -0
- p2m/testing/__init__.py +41 -0
- p2m/ui/__init__.py +43 -0
- p2m/ui/components.py +301 -0
- python2mobile-1.0.1.dist-info/METADATA +238 -0
- python2mobile-1.0.1.dist-info/RECORD +50 -0
- python2mobile-1.0.1.dist-info/WHEEL +5 -0
- python2mobile-1.0.1.dist-info/entry_points.txt +2 -0
- python2mobile-1.0.1.dist-info/top_level.txt +3 -0
- tests/test_basic_engine.py +281 -0
- tests/test_build_generation.py +603 -0
- tests/test_build_test_gate.py +150 -0
- tests/test_carousel_modal.py +84 -0
- tests/test_config_system.py +272 -0
- tests/test_i18n.py +101 -0
- tests/test_ifood_app_integration.py +172 -0
- tests/test_imagine_cli.py +133 -0
- tests/test_imagine_command.py +341 -0
- tests/test_llm_providers.py +321 -0
- tests/test_new_apps_integration.py +588 -0
- tests/test_ollama_functional.py +329 -0
- tests/test_real_world_apps.py +228 -0
- tests/test_run_integration.py +776 -0
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Tests for the `p2m build` test-gate step.
|
|
3
|
+
|
|
4
|
+
Verifies that:
|
|
5
|
+
- build aborts when tests/ contains failing tests
|
|
6
|
+
- build proceeds when tests/ passes (or is absent)
|
|
7
|
+
- --skip-tests bypasses the gate
|
|
8
|
+
- --help exposes the new flag
|
|
9
|
+
|
|
10
|
+
All subprocess tests run in a minimal temp project dir to avoid touching
|
|
11
|
+
the framework's own test suite.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
import subprocess
|
|
15
|
+
import sys
|
|
16
|
+
import textwrap
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
|
|
19
|
+
import pytest
|
|
20
|
+
|
|
21
|
+
_TOML = textwrap.dedent("""\
|
|
22
|
+
[project]
|
|
23
|
+
name = "test_proj"
|
|
24
|
+
version = "0.1.0"
|
|
25
|
+
entry = "main.py"
|
|
26
|
+
[build]
|
|
27
|
+
target = ["android"]
|
|
28
|
+
generator = "flutter"
|
|
29
|
+
llm_provider = "openai"
|
|
30
|
+
llm_model = "gpt-4o"
|
|
31
|
+
output_dir = "./build"
|
|
32
|
+
cache = false
|
|
33
|
+
[devserver]
|
|
34
|
+
port = 3000
|
|
35
|
+
hot_reload = true
|
|
36
|
+
mobile_frame = true
|
|
37
|
+
[style]
|
|
38
|
+
system = "tailwind"
|
|
39
|
+
""")
|
|
40
|
+
|
|
41
|
+
_MAIN_PY = textwrap.dedent("""\
|
|
42
|
+
from p2m.core import Render
|
|
43
|
+
from p2m.ui import Column, Text
|
|
44
|
+
|
|
45
|
+
def create_view():
|
|
46
|
+
root = Column()
|
|
47
|
+
root.add(Text("Hello"))
|
|
48
|
+
return root.build()
|
|
49
|
+
|
|
50
|
+
def main():
|
|
51
|
+
Render.execute(create_view)
|
|
52
|
+
""")
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def _make_project(tmp_path: Path, test_content: str = None) -> Path:
|
|
56
|
+
(tmp_path / "p2m.toml").write_text(_TOML)
|
|
57
|
+
(tmp_path / "main.py").write_text(_MAIN_PY)
|
|
58
|
+
if test_content is not None:
|
|
59
|
+
tests_dir = tmp_path / "tests"
|
|
60
|
+
tests_dir.mkdir()
|
|
61
|
+
(tests_dir / "__init__.py").write_text("")
|
|
62
|
+
(tests_dir / "test_app.py").write_text(test_content)
|
|
63
|
+
return tmp_path
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def _build(tmp_path: Path, *extra_args):
|
|
67
|
+
"""Invoke `python -m p2m.cli build` in *tmp_path* and return CompletedProcess."""
|
|
68
|
+
return subprocess.run(
|
|
69
|
+
[sys.executable, "-m", "p2m.cli", "build", *extra_args],
|
|
70
|
+
capture_output=True, text=True, cwd=str(tmp_path),
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
# ── Help flag ─────────────────────────────────────────────────────────────────
|
|
75
|
+
|
|
76
|
+
class TestBuildHelp:
|
|
77
|
+
def test_skip_tests_flag_in_help(self):
|
|
78
|
+
proc = subprocess.run(
|
|
79
|
+
["p2m", "build", "--help"], capture_output=True, text=True
|
|
80
|
+
)
|
|
81
|
+
assert proc.returncode == 0
|
|
82
|
+
assert "--skip-tests" in proc.stdout
|
|
83
|
+
|
|
84
|
+
def test_skip_validation_still_present(self):
|
|
85
|
+
proc = subprocess.run(
|
|
86
|
+
["p2m", "build", "--help"], capture_output=True, text=True
|
|
87
|
+
)
|
|
88
|
+
assert "--skip-validation" in proc.stdout
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
# ── Test gate behaviour ───────────────────────────────────────────────────────
|
|
92
|
+
|
|
93
|
+
class TestBuildTestGate:
|
|
94
|
+
def test_failing_tests_abort_build(self, tmp_path):
|
|
95
|
+
_make_project(tmp_path, test_content="def test_fail():\n assert False\n")
|
|
96
|
+
proc = _build(tmp_path, "--skip-validation", "--target", "flutter")
|
|
97
|
+
assert proc.returncode != 0
|
|
98
|
+
combined = proc.stdout + proc.stderr
|
|
99
|
+
assert "Tests failed" in combined
|
|
100
|
+
|
|
101
|
+
def test_failing_tests_message_mentions_skip_tests(self, tmp_path):
|
|
102
|
+
_make_project(tmp_path, test_content="def test_fail():\n assert False\n")
|
|
103
|
+
proc = _build(tmp_path, "--skip-validation", "--target", "flutter")
|
|
104
|
+
combined = proc.stdout + proc.stderr
|
|
105
|
+
assert "--skip-tests" in combined
|
|
106
|
+
|
|
107
|
+
def test_passing_tests_allow_build(self, tmp_path):
|
|
108
|
+
_make_project(tmp_path, test_content="def test_ok():\n assert True\n")
|
|
109
|
+
proc = _build(tmp_path, "--skip-validation", "--target", "flutter")
|
|
110
|
+
combined = proc.stdout + proc.stderr
|
|
111
|
+
assert "Tests failed" not in combined
|
|
112
|
+
assert "All tests passed" in combined
|
|
113
|
+
|
|
114
|
+
def test_no_tests_dir_warns_and_continues(self, tmp_path):
|
|
115
|
+
_make_project(tmp_path, test_content=None) # no tests/ dir
|
|
116
|
+
proc = _build(tmp_path, "--skip-validation", "--target", "flutter")
|
|
117
|
+
combined = proc.stdout + proc.stderr
|
|
118
|
+
assert "No tests/" in combined or "skipping tests" in combined
|
|
119
|
+
|
|
120
|
+
def test_skip_tests_bypasses_failing_tests(self, tmp_path):
|
|
121
|
+
_make_project(tmp_path, test_content="def test_fail():\n assert False\n")
|
|
122
|
+
proc = _build(tmp_path, "--skip-validation", "--skip-tests", "--target", "flutter")
|
|
123
|
+
combined = proc.stdout + proc.stderr
|
|
124
|
+
assert "Tests failed" not in combined
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
# ── Ordering ─────────────────────────────────────────────────────────────────
|
|
128
|
+
|
|
129
|
+
class TestBuildOrder:
|
|
130
|
+
def test_validation_runs_before_tests(self, tmp_path):
|
|
131
|
+
"""Syntax error in main.py → validation fires before tests run."""
|
|
132
|
+
(tmp_path / "p2m.toml").write_text(_TOML)
|
|
133
|
+
(tmp_path / "main.py").write_text("def create_view(\n") # syntax error
|
|
134
|
+
tests_dir = tmp_path / "tests"
|
|
135
|
+
tests_dir.mkdir()
|
|
136
|
+
(tests_dir / "__init__.py").write_text("")
|
|
137
|
+
(tests_dir / "test_app.py").write_text("def test_fail():\n assert False\n")
|
|
138
|
+
|
|
139
|
+
proc = _build(tmp_path, "--target", "flutter")
|
|
140
|
+
combined = proc.stdout + proc.stderr
|
|
141
|
+
assert "Validation failed" in combined
|
|
142
|
+
assert "Running unit tests" not in combined
|
|
143
|
+
|
|
144
|
+
def test_tests_run_after_validation(self, tmp_path):
|
|
145
|
+
"""With passing tests, 'Running unit tests' appears after validation passes."""
|
|
146
|
+
_make_project(tmp_path, test_content="def test_ok():\n assert True\n")
|
|
147
|
+
proc = _build(tmp_path, "--skip-validation", "--target", "flutter")
|
|
148
|
+
combined = proc.stdout + proc.stderr
|
|
149
|
+
assert "Running unit tests" in combined
|
|
150
|
+
assert "All tests passed" in combined
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Tests for Carousel component and Modal display:none fix.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import pytest
|
|
6
|
+
from p2m.ui import Carousel, Modal, Text, Column
|
|
7
|
+
from p2m.core.render_engine import RenderEngine
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def _html(component):
|
|
11
|
+
return RenderEngine().render_content(component.build())
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class TestCarousel:
|
|
15
|
+
def test_carousel_build_type(self):
|
|
16
|
+
c = Carousel()
|
|
17
|
+
tree = c.build()
|
|
18
|
+
assert tree["type"] == "Carousel"
|
|
19
|
+
|
|
20
|
+
def test_carousel_in_tag_map(self):
|
|
21
|
+
"""Carousel maps to <div> in the render engine."""
|
|
22
|
+
html = _html(Carousel(class_="px-4"))
|
|
23
|
+
assert "<div" in html
|
|
24
|
+
|
|
25
|
+
def test_carousel_renders_horizontal_scroll(self):
|
|
26
|
+
html = _html(Carousel())
|
|
27
|
+
assert "overflow-x:auto" in html
|
|
28
|
+
|
|
29
|
+
def test_carousel_has_flex_row(self):
|
|
30
|
+
html = _html(Carousel())
|
|
31
|
+
assert "flex-direction:row" in html
|
|
32
|
+
|
|
33
|
+
def test_carousel_children_rendered(self):
|
|
34
|
+
c = Carousel()
|
|
35
|
+
c.add(Text("Item A"))
|
|
36
|
+
c.add(Text("Item B"))
|
|
37
|
+
html = _html(c)
|
|
38
|
+
assert "Item A" in html
|
|
39
|
+
assert "Item B" in html
|
|
40
|
+
|
|
41
|
+
def test_carousel_class_applied(self):
|
|
42
|
+
html = _html(Carousel(class_="px-4 gap-2"))
|
|
43
|
+
# px-4 → padding-left:1rem; padding-right:1rem;
|
|
44
|
+
assert "padding-left:1rem" in html
|
|
45
|
+
|
|
46
|
+
def test_carousel_webkit_scrolling(self):
|
|
47
|
+
html = _html(Carousel())
|
|
48
|
+
assert "-webkit-overflow-scrolling:touch" in html
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class TestModalDisplayNone:
|
|
52
|
+
def test_modal_visible_no_display_none(self):
|
|
53
|
+
html = _html(Modal(visible=True, class_="fixed inset-0"))
|
|
54
|
+
assert "display:none" not in html
|
|
55
|
+
|
|
56
|
+
def test_modal_hidden_has_display_none(self):
|
|
57
|
+
html = _html(Modal(visible=False))
|
|
58
|
+
assert "display:none" in html
|
|
59
|
+
|
|
60
|
+
def test_modal_no_duplicate_style_attr(self):
|
|
61
|
+
"""Regression: Modal(visible=False) must not produce two style= attrs."""
|
|
62
|
+
html = _html(Modal(visible=False, class_="fixed inset-0"))
|
|
63
|
+
assert html.count("style=") == 1
|
|
64
|
+
|
|
65
|
+
def test_modal_visible_with_class_styles(self):
|
|
66
|
+
"""visible=True + class should still render class styles."""
|
|
67
|
+
html = _html(Modal(visible=True, class_="fixed inset-0"))
|
|
68
|
+
# fixed → position:fixed; inset-0 → top:0; right:0; bottom:0; left:0;
|
|
69
|
+
assert "position:fixed" in html
|
|
70
|
+
|
|
71
|
+
def test_modal_hidden_with_class_and_display_none(self):
|
|
72
|
+
"""visible=False merges class styles with display:none in one style=."""
|
|
73
|
+
html = _html(Modal(visible=False, class_="fixed inset-0"))
|
|
74
|
+
assert "position:fixed" in html
|
|
75
|
+
assert "display:none" in html
|
|
76
|
+
assert html.count("style=") == 1
|
|
77
|
+
|
|
78
|
+
def test_modal_children_hidden(self):
|
|
79
|
+
"""Children are still in the DOM (just hidden via CSS)."""
|
|
80
|
+
m = Modal(visible=False)
|
|
81
|
+
m.add(Text("Secret content"))
|
|
82
|
+
html = _html(m)
|
|
83
|
+
assert "Secret content" in html
|
|
84
|
+
assert "display:none" in html
|
|
@@ -0,0 +1,272 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Test suite for P2M configuration system
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import sys
|
|
6
|
+
import tempfile
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
# Add project to path
|
|
10
|
+
sys.path.insert(0, str(Path(__file__).parent.parent))
|
|
11
|
+
|
|
12
|
+
from p2m.config import Config, ProjectConfig, BuildConfig, DevServerConfig, StyleConfig, LLMConfig
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def test_default_config():
|
|
16
|
+
"""Test default configuration"""
|
|
17
|
+
print("\n🧪 Test 1: Default Configuration")
|
|
18
|
+
|
|
19
|
+
try:
|
|
20
|
+
config = Config()
|
|
21
|
+
|
|
22
|
+
assert config.project is not None
|
|
23
|
+
assert config.project.name == "MyApp"
|
|
24
|
+
assert config.project.version == "0.1.0"
|
|
25
|
+
assert config.project.entry == "main.py"
|
|
26
|
+
|
|
27
|
+
assert config.build is not None
|
|
28
|
+
assert config.build.generator == "flutter"
|
|
29
|
+
assert config.build.llm_provider == "openai"
|
|
30
|
+
|
|
31
|
+
assert config.devserver is not None
|
|
32
|
+
assert config.devserver.port == 3000
|
|
33
|
+
assert config.devserver.hot_reload == True
|
|
34
|
+
|
|
35
|
+
print("✅ Default config test passed")
|
|
36
|
+
return True
|
|
37
|
+
except Exception as e:
|
|
38
|
+
print(f"❌ Default config test failed: {e}")
|
|
39
|
+
import traceback
|
|
40
|
+
traceback.print_exc()
|
|
41
|
+
return False
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def test_config_save_and_load():
|
|
45
|
+
"""Test saving and loading configuration"""
|
|
46
|
+
print("\n🧪 Test 2: Config Save and Load")
|
|
47
|
+
|
|
48
|
+
try:
|
|
49
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
50
|
+
config_path = Path(tmpdir) / "p2m.toml"
|
|
51
|
+
|
|
52
|
+
# Create and save config
|
|
53
|
+
config = Config()
|
|
54
|
+
config.project.name = "TestApp"
|
|
55
|
+
config.project.version = "1.0.0"
|
|
56
|
+
config.build.generator = "react-native"
|
|
57
|
+
config.save(str(config_path))
|
|
58
|
+
|
|
59
|
+
assert config_path.exists()
|
|
60
|
+
|
|
61
|
+
# Load config
|
|
62
|
+
loaded_config = Config(str(config_path))
|
|
63
|
+
|
|
64
|
+
assert loaded_config.project.name == "TestApp"
|
|
65
|
+
assert loaded_config.project.version == "1.0.0"
|
|
66
|
+
assert loaded_config.build.generator == "react-native"
|
|
67
|
+
|
|
68
|
+
print("✅ Config save and load test passed")
|
|
69
|
+
return True
|
|
70
|
+
except Exception as e:
|
|
71
|
+
print(f"❌ Config save and load test failed: {e}")
|
|
72
|
+
import traceback
|
|
73
|
+
traceback.print_exc()
|
|
74
|
+
return False
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def test_llm_config():
|
|
78
|
+
"""Test LLM configuration"""
|
|
79
|
+
print("\n🧪 Test 3: LLM Configuration")
|
|
80
|
+
|
|
81
|
+
try:
|
|
82
|
+
config = Config()
|
|
83
|
+
|
|
84
|
+
llm_config = config.get_llm_config()
|
|
85
|
+
|
|
86
|
+
assert llm_config is not None
|
|
87
|
+
assert llm_config.provider == "openai"
|
|
88
|
+
assert llm_config.model == "gpt-4o"
|
|
89
|
+
|
|
90
|
+
print("✅ LLM config test passed")
|
|
91
|
+
return True
|
|
92
|
+
except Exception as e:
|
|
93
|
+
print(f"❌ LLM config test failed: {e}")
|
|
94
|
+
import traceback
|
|
95
|
+
traceback.print_exc()
|
|
96
|
+
return False
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def test_project_config_dataclass():
|
|
100
|
+
"""Test ProjectConfig dataclass"""
|
|
101
|
+
print("\n🧪 Test 4: ProjectConfig Dataclass")
|
|
102
|
+
|
|
103
|
+
try:
|
|
104
|
+
project = ProjectConfig(
|
|
105
|
+
name="MyProject",
|
|
106
|
+
version="0.2.0",
|
|
107
|
+
entry="app.py"
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
assert project.name == "MyProject"
|
|
111
|
+
assert project.version == "0.2.0"
|
|
112
|
+
assert project.entry == "app.py"
|
|
113
|
+
|
|
114
|
+
print("✅ ProjectConfig dataclass test passed")
|
|
115
|
+
return True
|
|
116
|
+
except Exception as e:
|
|
117
|
+
print(f"❌ ProjectConfig dataclass test failed: {e}")
|
|
118
|
+
import traceback
|
|
119
|
+
traceback.print_exc()
|
|
120
|
+
return False
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def test_build_config_dataclass():
|
|
124
|
+
"""Test BuildConfig dataclass"""
|
|
125
|
+
print("\n🧪 Test 5: BuildConfig Dataclass")
|
|
126
|
+
|
|
127
|
+
try:
|
|
128
|
+
build = BuildConfig(
|
|
129
|
+
target=["android", "ios", "web"],
|
|
130
|
+
generator="react-native",
|
|
131
|
+
llm_provider="anthropic",
|
|
132
|
+
llm_model="claude-3-opus-20240229",
|
|
133
|
+
output_dir="./dist",
|
|
134
|
+
cache=True
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
assert build.target == ["android", "ios", "web"]
|
|
138
|
+
assert build.generator == "react-native"
|
|
139
|
+
assert build.llm_provider == "anthropic"
|
|
140
|
+
assert build.cache == True
|
|
141
|
+
|
|
142
|
+
print("✅ BuildConfig dataclass test passed")
|
|
143
|
+
return True
|
|
144
|
+
except Exception as e:
|
|
145
|
+
print(f"❌ BuildConfig dataclass test failed: {e}")
|
|
146
|
+
import traceback
|
|
147
|
+
traceback.print_exc()
|
|
148
|
+
return False
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def test_devserver_config_dataclass():
|
|
152
|
+
"""Test DevServerConfig dataclass"""
|
|
153
|
+
print("\n🧪 Test 6: DevServerConfig Dataclass")
|
|
154
|
+
|
|
155
|
+
try:
|
|
156
|
+
devserver = DevServerConfig(
|
|
157
|
+
port=8000,
|
|
158
|
+
hot_reload=True,
|
|
159
|
+
mobile_frame=False
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
assert devserver.port == 8000
|
|
163
|
+
assert devserver.hot_reload == True
|
|
164
|
+
assert devserver.mobile_frame == False
|
|
165
|
+
|
|
166
|
+
print("✅ DevServerConfig dataclass test passed")
|
|
167
|
+
return True
|
|
168
|
+
except Exception as e:
|
|
169
|
+
print(f"❌ DevServerConfig dataclass test failed: {e}")
|
|
170
|
+
import traceback
|
|
171
|
+
traceback.print_exc()
|
|
172
|
+
return False
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def test_style_config_dataclass():
|
|
176
|
+
"""Test StyleConfig dataclass"""
|
|
177
|
+
print("\n🧪 Test 7: StyleConfig Dataclass")
|
|
178
|
+
|
|
179
|
+
try:
|
|
180
|
+
style = StyleConfig(system="tailwind")
|
|
181
|
+
|
|
182
|
+
assert style.system == "tailwind"
|
|
183
|
+
|
|
184
|
+
print("✅ StyleConfig dataclass test passed")
|
|
185
|
+
return True
|
|
186
|
+
except Exception as e:
|
|
187
|
+
print(f"❌ StyleConfig dataclass test failed: {e}")
|
|
188
|
+
import traceback
|
|
189
|
+
traceback.print_exc()
|
|
190
|
+
return False
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
def test_llm_config_dataclass():
|
|
194
|
+
"""Test LLMConfig dataclass"""
|
|
195
|
+
print("\n🧪 Test 8: LLMConfig Dataclass")
|
|
196
|
+
|
|
197
|
+
try:
|
|
198
|
+
llm = LLMConfig(
|
|
199
|
+
provider="openai-compatible",
|
|
200
|
+
api_key="test-key",
|
|
201
|
+
model="custom-model",
|
|
202
|
+
base_url="https://api.example.com/v1",
|
|
203
|
+
x_api_key="optional-header"
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
assert llm.provider == "openai-compatible"
|
|
207
|
+
assert llm.api_key == "test-key"
|
|
208
|
+
assert llm.model == "custom-model"
|
|
209
|
+
assert llm.base_url == "https://api.example.com/v1"
|
|
210
|
+
assert llm.x_api_key == "optional-header"
|
|
211
|
+
|
|
212
|
+
print("✅ LLMConfig dataclass test passed")
|
|
213
|
+
return True
|
|
214
|
+
except Exception as e:
|
|
215
|
+
print(f"❌ LLMConfig dataclass test failed: {e}")
|
|
216
|
+
import traceback
|
|
217
|
+
traceback.print_exc()
|
|
218
|
+
return False
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def run_all_tests():
|
|
222
|
+
"""Run all tests"""
|
|
223
|
+
print("\n" + "="*60)
|
|
224
|
+
print("🧪 P2M Configuration System Test Suite")
|
|
225
|
+
print("="*60)
|
|
226
|
+
|
|
227
|
+
tests = [
|
|
228
|
+
test_default_config,
|
|
229
|
+
test_config_save_and_load,
|
|
230
|
+
test_llm_config,
|
|
231
|
+
test_project_config_dataclass,
|
|
232
|
+
test_build_config_dataclass,
|
|
233
|
+
test_devserver_config_dataclass,
|
|
234
|
+
test_style_config_dataclass,
|
|
235
|
+
test_llm_config_dataclass,
|
|
236
|
+
]
|
|
237
|
+
|
|
238
|
+
results = []
|
|
239
|
+
|
|
240
|
+
for test_func in tests:
|
|
241
|
+
try:
|
|
242
|
+
result = test_func()
|
|
243
|
+
results.append((test_func.__name__, result))
|
|
244
|
+
except Exception as e:
|
|
245
|
+
results.append((test_func.__name__, False))
|
|
246
|
+
print(f"❌ {test_func.__name__} failed: {e}")
|
|
247
|
+
|
|
248
|
+
# Summary
|
|
249
|
+
print("\n" + "="*60)
|
|
250
|
+
print("📊 Test Summary")
|
|
251
|
+
print("="*60)
|
|
252
|
+
|
|
253
|
+
passed = sum(1 for _, success in results if success)
|
|
254
|
+
total = len(results)
|
|
255
|
+
|
|
256
|
+
for name, success in results:
|
|
257
|
+
status = "✅ PASS" if success else "❌ FAIL"
|
|
258
|
+
print(f"{status} - {name}")
|
|
259
|
+
|
|
260
|
+
print(f"\n📈 Results: {passed}/{total} tests passed")
|
|
261
|
+
|
|
262
|
+
if passed == total:
|
|
263
|
+
print("\n🎉 All tests passed!")
|
|
264
|
+
else:
|
|
265
|
+
print(f"\n⚠️ {total - passed} test(s) failed")
|
|
266
|
+
|
|
267
|
+
return passed == total
|
|
268
|
+
|
|
269
|
+
|
|
270
|
+
if __name__ == "__main__":
|
|
271
|
+
success = run_all_tests()
|
|
272
|
+
sys.exit(0 if success else 1)
|
tests/test_i18n.py
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Tests for p2m.i18n — configure, set_locale, get_locale, t.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import pytest
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@pytest.fixture()
|
|
11
|
+
def locale_dir(tmp_path):
|
|
12
|
+
"""Create a temporary locales directory with pt.json and en.json."""
|
|
13
|
+
pt = {"greeting": "Olá, {name}!", "key_only": "Valor", "shared": "Português"}
|
|
14
|
+
en = {"greeting": "Hello, {name}!", "key_only": "Value", "shared": "English"}
|
|
15
|
+
(tmp_path / "pt.json").write_text(json.dumps(pt), encoding="utf-8")
|
|
16
|
+
(tmp_path / "en.json").write_text(json.dumps(en), encoding="utf-8")
|
|
17
|
+
return tmp_path
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@pytest.fixture(autouse=True)
|
|
21
|
+
def reset_i18n():
|
|
22
|
+
"""Reset i18n module state between tests."""
|
|
23
|
+
import p2m.i18n.translator as _t
|
|
24
|
+
_t._locale = "en"
|
|
25
|
+
_t._translations = {}
|
|
26
|
+
_t._locales_dir = None
|
|
27
|
+
yield
|
|
28
|
+
_t._locale = "en"
|
|
29
|
+
_t._translations = {}
|
|
30
|
+
_t._locales_dir = None
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class TestConfigure:
|
|
34
|
+
def test_configure_and_translate(self, locale_dir):
|
|
35
|
+
from p2m.i18n import configure, t
|
|
36
|
+
configure(str(locale_dir), default_locale="pt")
|
|
37
|
+
assert t("key_only") == "Valor"
|
|
38
|
+
|
|
39
|
+
def test_configure_sets_locale(self, locale_dir):
|
|
40
|
+
from p2m.i18n import configure, get_locale
|
|
41
|
+
configure(str(locale_dir), default_locale="pt")
|
|
42
|
+
assert get_locale() == "pt"
|
|
43
|
+
|
|
44
|
+
def test_configure_default_en(self, locale_dir):
|
|
45
|
+
from p2m.i18n import configure, t
|
|
46
|
+
configure(str(locale_dir), default_locale="en")
|
|
47
|
+
assert t("key_only") == "Value"
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class TestFormatKwargs:
|
|
51
|
+
def test_format_kwargs(self, locale_dir):
|
|
52
|
+
from p2m.i18n import configure, t
|
|
53
|
+
configure(str(locale_dir), default_locale="pt")
|
|
54
|
+
assert t("greeting", name="João") == "Olá, João!"
|
|
55
|
+
|
|
56
|
+
def test_format_kwargs_en(self, locale_dir):
|
|
57
|
+
from p2m.i18n import configure, set_locale, t
|
|
58
|
+
configure(str(locale_dir), default_locale="pt")
|
|
59
|
+
set_locale("en")
|
|
60
|
+
assert t("greeting", name="Alice") == "Hello, Alice!"
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class TestMissingKey:
|
|
64
|
+
def test_missing_key_returns_key(self, locale_dir):
|
|
65
|
+
from p2m.i18n import configure, t
|
|
66
|
+
configure(str(locale_dir), default_locale="pt")
|
|
67
|
+
assert t("nonexistent_key") == "nonexistent_key"
|
|
68
|
+
|
|
69
|
+
def test_missing_locale_file_returns_key(self, tmp_path):
|
|
70
|
+
from p2m.i18n import configure, t
|
|
71
|
+
configure(str(tmp_path), default_locale="xx")
|
|
72
|
+
assert t("some_key") == "some_key"
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class TestSetLocale:
|
|
76
|
+
def test_set_locale_switches_language(self, locale_dir):
|
|
77
|
+
from p2m.i18n import configure, set_locale, t
|
|
78
|
+
configure(str(locale_dir), default_locale="pt")
|
|
79
|
+
assert t("shared") == "Português"
|
|
80
|
+
set_locale("en")
|
|
81
|
+
assert t("shared") == "English"
|
|
82
|
+
|
|
83
|
+
def test_set_locale_back(self, locale_dir):
|
|
84
|
+
from p2m.i18n import configure, set_locale, t
|
|
85
|
+
configure(str(locale_dir), default_locale="en")
|
|
86
|
+
set_locale("pt")
|
|
87
|
+
set_locale("en")
|
|
88
|
+
assert t("shared") == "English"
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
class TestGetLocale:
|
|
92
|
+
def test_get_locale_default(self, locale_dir):
|
|
93
|
+
from p2m.i18n import configure, get_locale
|
|
94
|
+
configure(str(locale_dir), default_locale="pt")
|
|
95
|
+
assert get_locale() == "pt"
|
|
96
|
+
|
|
97
|
+
def test_get_locale_after_switch(self, locale_dir):
|
|
98
|
+
from p2m.i18n import configure, set_locale, get_locale
|
|
99
|
+
configure(str(locale_dir), default_locale="pt")
|
|
100
|
+
set_locale("en")
|
|
101
|
+
assert get_locale() == "en"
|