diagram-to-iac 1.0.2__py3-none-any.whl → 1.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -9,6 +9,10 @@ from .base_driver import BaseLLMDriver
9
9
  from .openai_driver import OpenAIDriver
10
10
  from .anthropic_driver import AnthropicDriver
11
11
  from .gemini_driver import GoogleDriver
12
+ from .grok_driver import GrokDriver
13
+
14
+ # Import ConfigLoader for centralized configuration
15
+ from ...core.config_loader import ConfigLoader
12
16
 
13
17
  try:
14
18
  from langchain_core.messages import HumanMessage
@@ -28,11 +32,15 @@ class LLMRouter:
28
32
  self.config = self._load_model_policy(config_path)
29
33
  self._provider_cache = {}
30
34
 
31
- # Initialize drivers
35
+ # Initialize ConfigLoader for accessing centralized configuration
36
+ self._config_loader = ConfigLoader()
37
+
38
+ # Initialize drivers (including new Grok driver)
32
39
  self._drivers = {
33
40
  "openai": OpenAIDriver(),
34
41
  "anthropic": AnthropicDriver(),
35
- "google": GoogleDriver()
42
+ "google": GoogleDriver(),
43
+ "grok": GrokDriver()
36
44
  }
37
45
 
38
46
  def _load_model_policy(self, config_path: Optional[str] = None) -> Dict[str, Any]:
@@ -77,6 +85,8 @@ class LLMRouter:
77
85
  return 'anthropic'
78
86
  elif any(pattern in model_lower for pattern in ['gemini', 'google']):
79
87
  return 'google'
88
+ elif any(pattern in model_lower for pattern in ['grok', 'x.ai']):
89
+ return 'grok'
80
90
  else:
81
91
  return 'openai' # Default fallback
82
92
 
@@ -85,7 +95,8 @@ class LLMRouter:
85
95
  key_mapping = {
86
96
  'openai': 'OPENAI_API_KEY',
87
97
  'anthropic': 'ANTHROPIC_API_KEY',
88
- 'google': 'GOOGLE_API_KEY'
98
+ 'google': 'GOOGLE_API_KEY',
99
+ 'grok': 'GROK_API_KEY'
89
100
  }
90
101
 
91
102
  required_key = key_mapping.get(provider)
@@ -93,23 +104,154 @@ class LLMRouter:
93
104
  return False
94
105
  return True
95
106
 
107
+ def _get_available_providers(self) -> List[str]:
108
+ """Get list of providers with available API keys."""
109
+ available = []
110
+ for provider in self._drivers.keys():
111
+ if self._check_api_key(provider):
112
+ available.append(provider)
113
+ return available
114
+
115
+ def _get_provider_selection_config(self) -> Dict[str, Any]:
116
+ """Get provider selection configuration from centralized config."""
117
+ try:
118
+ app_config = self._config_loader.get_config()
119
+ return app_config.get('ai', {}).get('provider_selection', {})
120
+ except Exception:
121
+ # Fallback to default behavior if config loading fails
122
+ return {
123
+ 'strategy': 'auto',
124
+ 'preferred_order': ['openai', 'anthropic', 'google', 'grok'],
125
+ 'fallback': {'enabled': True, 'retry_attempts': 2}
126
+ }
127
+
128
+ def _select_best_provider(self, requested_provider: Optional[str] = None,
129
+ requested_model: Optional[str] = None) -> tuple[str, str]:
130
+ """
131
+ Intelligently select the best available provider and model.
132
+
133
+ Args:
134
+ requested_provider: Explicitly requested provider (takes precedence)
135
+ requested_model: Explicitly requested model (used for provider detection)
136
+
137
+ Returns:
138
+ tuple: (selected_provider, selected_model)
139
+ """
140
+ config = self._get_provider_selection_config()
141
+ strategy = config.get('strategy', 'auto')
142
+
143
+ # If provider explicitly requested, try that first
144
+ if requested_provider and self._check_api_key(requested_provider):
145
+ fallback_model = self._get_fallback_model_for_provider(requested_provider)
146
+ return requested_provider, requested_model or fallback_model
147
+
148
+ # If model specified, detect its provider and check availability
149
+ if requested_model:
150
+ detected_provider = self._detect_provider(requested_model)
151
+ if self._check_api_key(detected_provider):
152
+ return detected_provider, requested_model
153
+
154
+ # Get available providers
155
+ available_providers = self._get_available_providers()
156
+ if not available_providers:
157
+ raise ValueError(
158
+ "No AI providers available. Please set at least one API key: "
159
+ "OPENAI_API_KEY, ANTHROPIC_API_KEY, GOOGLE_API_KEY, or GROK_API_KEY"
160
+ )
161
+
162
+ # Apply selection strategy
163
+ if strategy == 'auto' or strategy == 'prefer_cost' or strategy == 'prefer_performance':
164
+ preferred_order = config.get('preferred_order', ['openai', 'anthropic', 'google', 'grok'])
165
+
166
+ # Filter to only available providers, maintaining preference order
167
+ for provider in preferred_order:
168
+ if provider in available_providers:
169
+ model = self._get_optimal_model_for_provider(provider, strategy, config)
170
+ return provider, model
171
+
172
+ # Fallback to first available provider if strategy selection fails
173
+ first_provider = available_providers[0]
174
+ model = self._get_fallback_model_for_provider(first_provider)
175
+ return first_provider, model
176
+
177
+ def _get_optimal_model_for_provider(self, provider: str, strategy: str, config: Dict[str, Any]) -> str:
178
+ """Get the optimal model for a provider based on selection strategy."""
179
+ if strategy == 'prefer_cost':
180
+ cost_models = config.get('cost_optimization', {}).get('prefer_models', [])
181
+ for model in cost_models:
182
+ if self._detect_provider(model) == provider:
183
+ return model
184
+ elif strategy == 'prefer_performance':
185
+ perf_models = config.get('performance_optimization', {}).get('prefer_models', [])
186
+ for model in perf_models:
187
+ if self._detect_provider(model) == provider:
188
+ return model
189
+
190
+ # Fallback to provider's default model
191
+ return self._get_fallback_model_for_provider(provider)
192
+
193
+ def _get_fallback_model_for_provider(self, provider: str) -> str:
194
+ """Get default/fallback model for a specific provider."""
195
+ fallback_models = {
196
+ 'openai': 'gpt-4o-mini',
197
+ 'anthropic': 'claude-3-haiku',
198
+ 'google': 'gemini-pro',
199
+ 'grok': 'grok-1.5'
200
+ }
201
+ return fallback_models.get(provider, 'gpt-4o-mini')
202
+
96
203
  def get_llm_for_agent(self, agent_name: str) -> BaseChatModel:
97
204
  """
98
- Get an LLM instance configured for a specific agent.
99
- Uses agent-specific configuration from model_policy.yaml.
205
+ Get an LLM instance configured for a specific agent with intelligent provider selection.
206
+ Uses agent-specific configuration from model_policy.yaml and falls back to available providers.
100
207
  """
101
208
  config = self._resolve_model_config(agent_name)
102
209
 
103
- # Check if API key is available for the provider
104
- if not self._check_api_key(config['provider']):
105
- raise ValueError(f"API key not found for provider: {config['provider']}")
106
-
107
- return self._create_llm_instance(config)
210
+ # Try intelligent provider selection with fallback
211
+ try:
212
+ # Check if configured provider is available
213
+ if self._check_api_key(config['provider']):
214
+ return self._create_llm_instance(config)
215
+ else:
216
+ # Provider not available, use intelligent selection
217
+ print(f"Warning: Configured provider '{config['provider']}' not available for agent '{agent_name}'. Using intelligent fallback.")
218
+
219
+ selected_provider, selected_model = self._select_best_provider(
220
+ requested_model=config.get('model')
221
+ )
222
+
223
+ # Update config with selected provider and model
224
+ fallback_config = config.copy()
225
+ fallback_config['provider'] = selected_provider
226
+ fallback_config['model'] = selected_model
227
+
228
+ return self._create_llm_instance(fallback_config)
229
+
230
+ except Exception as e:
231
+ # Last resort fallback
232
+ available_providers = self._get_available_providers()
233
+ if not available_providers:
234
+ raise ValueError(
235
+ f"No AI providers available for agent '{agent_name}'. "
236
+ f"Please set at least one API key: OPENAI_API_KEY, ANTHROPIC_API_KEY, GOOGLE_API_KEY, or GROK_API_KEY"
237
+ ) from e
238
+
239
+ # Use first available provider with its default model
240
+ fallback_provider = available_providers[0]
241
+ fallback_model = self._get_fallback_model_for_provider(fallback_provider)
242
+
243
+ fallback_config = config.copy()
244
+ fallback_config['provider'] = fallback_provider
245
+ fallback_config['model'] = fallback_model
246
+
247
+ print(f"Warning: Fallback to {fallback_provider}/{fallback_model} for agent '{agent_name}' due to error: {e}")
248
+ return self._create_llm_instance(fallback_config)
108
249
 
109
250
  def get_llm(self, model_name: str = None, temperature: float = None, agent_name: str = None) -> BaseChatModel:
110
251
  """
111
- Initializes and returns an LLM instance using model_policy.yaml configuration.
252
+ Initializes and returns an LLM instance with intelligent provider selection.
112
253
  Uses provided parameters or falls back to agent-specific or global defaults.
254
+ Automatically selects best available provider if configured provider is unavailable.
113
255
  """
114
256
  # If agent_name is provided but other params are None, use agent-specific config
115
257
  if agent_name and model_name is None and temperature is None:
@@ -120,26 +262,53 @@ class LLMRouter:
120
262
  model_name, temperature, agent_name
121
263
  )
122
264
 
123
- # Detect provider for the model
124
- provider = self._detect_provider(effective_model_name)
125
-
126
- # Check API key availability
127
- if not self._check_api_key(provider):
128
- # Fallback to default provider if API key is missing
265
+ # Use intelligent provider selection
266
+ try:
267
+ # Try to detect provider for the requested model
268
+ initial_provider = self._detect_provider(effective_model_name)
269
+
270
+ # Use intelligent selection to find best available option
271
+ selected_provider, selected_model = self._select_best_provider(
272
+ requested_provider=initial_provider if self._check_api_key(initial_provider) else None,
273
+ requested_model=effective_model_name
274
+ )
275
+
276
+ # Create configuration dict
277
+ config = {
278
+ 'model': selected_model,
279
+ 'temperature': effective_temperature,
280
+ 'provider': selected_provider
281
+ }
282
+
283
+ # Create and return the appropriate LLM instance
284
+ return self._create_llm_instance(config)
285
+
286
+ except Exception as e:
287
+ # Last resort: use fallback configuration from policy
288
+ print(f"Warning: Intelligent provider selection failed: {e}. Using fallback configuration.")
289
+
129
290
  fallback_config = self.config.get('default', {})
130
- effective_model_name = fallback_config.get('model', 'gpt-4o-mini')
131
- effective_temperature = fallback_config.get('temperature', 0.0)
132
- provider = fallback_config.get('provider', 'openai')
133
-
134
- # Create configuration dict
135
- config = {
136
- 'model': effective_model_name,
137
- 'temperature': effective_temperature,
138
- 'provider': provider
139
- }
140
-
141
- # Create and return the appropriate LLM instance
142
- return self._create_llm_instance(config)
291
+ fallback_provider = fallback_config.get('provider', 'openai')
292
+
293
+ # If default provider is not available, try any available provider
294
+ if not self._check_api_key(fallback_provider):
295
+ available_providers = self._get_available_providers()
296
+ if available_providers:
297
+ fallback_provider = available_providers[0]
298
+ effective_model_name = self._get_fallback_model_for_provider(fallback_provider)
299
+ else:
300
+ raise ValueError(
301
+ "No AI providers available. Please set at least one API key: "
302
+ "OPENAI_API_KEY, ANTHROPIC_API_KEY, GOOGLE_API_KEY, or GROK_API_KEY"
303
+ ) from e
304
+
305
+ config = {
306
+ 'model': effective_model_name,
307
+ 'temperature': effective_temperature,
308
+ 'provider': fallback_provider
309
+ }
310
+
311
+ return self._create_llm_instance(config)
143
312
 
144
313
  def _resolve_model_config(self, agent_name: str) -> Dict[str, Any]:
145
314
  """
@@ -230,6 +399,7 @@ class LLMRouter:
230
399
  info = {}
231
400
  for provider, driver in self._drivers.items():
232
401
  info[provider] = {
402
+ "available": self._check_api_key(provider),
233
403
  "models": driver.get_supported_models(),
234
404
  "capabilities": {
235
405
  model: driver.get_model_capabilities(model)
@@ -237,6 +407,26 @@ class LLMRouter:
237
407
  }
238
408
  }
239
409
  return info
410
+
411
+ def get_provider_status(self) -> Dict[str, Any]:
412
+ """Get status information about all providers and intelligent selection."""
413
+ available_providers = self._get_available_providers()
414
+ config = self._get_provider_selection_config()
415
+
416
+ return {
417
+ "available_providers": available_providers,
418
+ "total_providers": len(self._drivers),
419
+ "selection_strategy": config.get('strategy', 'auto'),
420
+ "preferred_order": config.get('preferred_order', []),
421
+ "provider_details": {
422
+ provider: {
423
+ "available": self._check_api_key(provider),
424
+ "api_key_env": f"{provider.upper()}_API_KEY",
425
+ "default_model": self._get_fallback_model_for_provider(provider)
426
+ }
427
+ for provider in self._drivers.keys()
428
+ }
429
+ }
240
430
 
241
431
 
242
432
  # Create global router instance
@@ -1,21 +1,22 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: diagram-to-iac
3
- Version: 1.0.2
3
+ Version: 1.0.4
4
4
  Summary: Convert architecture diagrams into IaC modules
5
5
  Author-email: vindpro <admin@vindpro.com>
6
6
  Description-Content-Type: text/markdown
7
- Requires-Dist: anthropic==0.54.0
7
+ Requires-Dist: anthropic==0.55.0
8
8
  Requires-Dist: google_api_python_client==2.173.0
9
9
  Requires-Dist: langchain_anthropic==0.3.15
10
10
  Requires-Dist: langchain-core<1.0.0,>=0.3.62
11
11
  Requires-Dist: langchain_google_genai==2.1.5
12
- Requires-Dist: langchain_openai==0.3.24
12
+ Requires-Dist: langchain_openai==0.3.25
13
13
  Requires-Dist: langgraph==0.4.8
14
- Requires-Dist: openai==1.88.0
14
+ Requires-Dist: openai==1.91.0
15
15
  Requires-Dist: protobuf>=5.27.0
16
16
  Requires-Dist: pydantic==2.11.7
17
17
  Requires-Dist: PyYAML==6.0.2
18
18
  Requires-Dist: Requests==2.32.4
19
+ Requires-Dist: typing_extensions==4.14.0
19
20
  Requires-Dist: GitPython<4.0,>=3.1
20
21
 
21
22
  # diagram-to-iac
@@ -3,13 +3,13 @@ diagram_to_iac/cli.py,sha256=uumG1frF42eCkdLIZxyxQB1x6lDwtG-qKL4vcHnLLXY,400
3
3
  diagram_to_iac/r2d.py,sha256=I7XSuUtu8TdvAhK4tCMLc3U_3ZtP7DJGfq168aeI3Mk,13208
4
4
  diagram_to_iac/actions/__init__.py,sha256=P1CjjY4FYUA0Tcx8FQNLYYSI9fhv8yKd_TmRGtmhW50,229
5
5
  diagram_to_iac/actions/git_entry.py,sha256=mhY6gYquUPVvyvnTC2S90z_uXEe1asqWLoi1989aB_Q,5403
6
- diagram_to_iac/actions/supervisor_entry.py,sha256=vWhFn-4M0jQnrUQUSCb0I_YNxzGsKiBV0QNzkIkQfNE,3586
6
+ diagram_to_iac/actions/supervisor_entry.py,sha256=Nm6YIBbEzpL1huza3a0ThA0hdimptRd5rndLUsOMSH4,9282
7
7
  diagram_to_iac/actions/terraform_agent_entry.py,sha256=gKkX4fIRdBDZpwPQO_v2t1SSO0SQuzaxQ0StKegGK8U,6852
8
8
  diagram_to_iac/agents/__init__.py,sha256=GHInKSPq56ZPYSKsyti6_wk82dhn2hOqfxNHkZZOj_0,735
9
9
  diagram_to_iac/agents/demonstrator_langgraph/__init__.py,sha256=nghMYMEEarfkR0V6AH1fDCV-mXBLnmFP2sO4OPxJ4cI,371
10
10
  diagram_to_iac/agents/demonstrator_langgraph/agent.py,sha256=9ZH2H5iAB2DfMhCr-OzImVZlwoeXIP8RKl6_VG47W2I,35349
11
11
  diagram_to_iac/agents/git_langgraph/__init__.py,sha256=x6nCnOu-Vcl-qVqW1swhdaE_sQqUSvEUUtWk4eePBUo,295
12
- diagram_to_iac/agents/git_langgraph/agent.py,sha256=bG3GHub62Rm8q9XrkLMN4kLBH6BTiLYrxQ9heNGsoCY,47640
12
+ diagram_to_iac/agents/git_langgraph/agent.py,sha256=_lGwyTTgTsS4ZtaQLCceGJa5zeLvux_Hqda1-fqAgXg,49288
13
13
  diagram_to_iac/agents/git_langgraph/pr.py,sha256=qXopN5XAF1DIac5vbH-QasihkuAiWmC9JY8pLYlm-sQ,8601
14
14
  diagram_to_iac/agents/hello_langgraph/__init__.py,sha256=lviuDAPJezmpaXR-H7JxfIT9wvg1xO2t6JLyeKSSx0Y,266
15
15
  diagram_to_iac/agents/hello_langgraph/agent.py,sha256=R49yfFGxqMPBBu36ztDH9lBE_-s7VFyRB33gnNSXxek,33777
@@ -22,8 +22,9 @@ diagram_to_iac/agents/shell_langgraph/__init__.py,sha256=teAx1L87McCj9_24NUdET3O
22
22
  diagram_to_iac/agents/shell_langgraph/agent.py,sha256=dZWzjVQ9oX_BtNHQ1Zrzy2oQpuY1e5BS51-SGcWpoSw,4341
23
23
  diagram_to_iac/agents/shell_langgraph/detector.py,sha256=wLw0uDP_V2m1z6SRk7QNCzoUMYCfXwu3DNg8EWue9yk,1493
24
24
  diagram_to_iac/agents/supervisor_langgraph/__init__.py,sha256=iLN60d20cqoXOLyuLvJkiwrzapE84em222Tnyndq2dc,385
25
- diagram_to_iac/agents/supervisor_langgraph/agent.py,sha256=NdqYFyVw1bc4UK-IQOmcc1ZEIQVVakg1z5OC_uN_kqc,84790
25
+ diagram_to_iac/agents/supervisor_langgraph/agent.py,sha256=1qfgYSQQx1JNgN9ORFCl35NrklRbN7ZcUdP5AbGBsGo,101342
26
26
  diagram_to_iac/agents/supervisor_langgraph/demonstrator.py,sha256=OT-bElEyLZBedzcc5DtZnp1yhjYVjx4jRzt52f5SoSU,803
27
+ diagram_to_iac/agents/supervisor_langgraph/github_listener.py,sha256=Ko9dOnS9CUqbjTogEyhEmVhkiaW8OiwLzX6k18lSrac,16377
27
28
  diagram_to_iac/agents/supervisor_langgraph/guards.py,sha256=XzBgjXnwbOgLkGm7AqXX4tQdGBerq_6pKvduKPqIwF0,720
28
29
  diagram_to_iac/agents/supervisor_langgraph/pat_loop.py,sha256=feY8ZPGQxqkUuHOMSdpilGDUjOvaky8xImLuVe98hrw,1566
29
30
  diagram_to_iac/agents/supervisor_langgraph/router.py,sha256=7hZXXEmtvG__w7UAaOhoPaHdubUv-oMKbQdMTMXk-qY,276
@@ -37,7 +38,9 @@ diagram_to_iac/core/enhanced_memory.py,sha256=Ga5wtI45zEcbwL_F1YqJaXBRpWK0iJPa69
37
38
  diagram_to_iac/core/errors.py,sha256=gZwZocnIcBlS4YccIBdjG8XztRCtMe4Cu6KWxLzebDM,115
38
39
  diagram_to_iac/core/issue_tracker.py,sha256=0eo289hn94yCoFCkLaYiDOIJBjk33i2dk6eLeYe_9YE,1659
39
40
  diagram_to_iac/core/memory.py,sha256=P9URX8m2nab65ZPF36uf6Z9hEXQGXrjrXa8dPXG7pm8,4444
41
+ diagram_to_iac/core/registry.py,sha256=AM2fv9lzrNvFfkyt7VMxQ5SWIOWhdBu4_3Aaspdokj8,25758
40
42
  diagram_to_iac/services/__init__.py,sha256=I5R8g7vYX4tCldRf1Jf9vEhm5mylc-MfFicqLnY6a3E,238
43
+ diagram_to_iac/services/commenter.py,sha256=iXvHXOeih64FbE34PuGPk6fhI4RmC62ZSVtFwmMqiOA,22146
41
44
  diagram_to_iac/services/observability.py,sha256=yxbnjMc4TO1SM8RZZMHf2E8uVOLpxFhiTjsTkymDi6Y,1856
42
45
  diagram_to_iac/services/step_summary.py,sha256=g3MuMZ51IDubI0oWcF7qMvseNgDS6D90AsKK_1s5xDQ,2808
43
46
  diagram_to_iac/tools/__init__.py,sha256=F2pcKhoPP5KDeQIGcqKXD1J30KFKc9qxMw1jxzrs9qY,434
@@ -49,17 +52,18 @@ diagram_to_iac/tools/git/git.py,sha256=0NYz9NqQWf-5YTX7R3nBPyLmzvih-jhd0gYY8KZDm
49
52
  diagram_to_iac/tools/hello/__init__.py,sha256=f6GpkiQxvuGaRMm34yQilGACxUI4c5edJQTDjZtskjQ,891
50
53
  diagram_to_iac/tools/hello/cal_utils.py,sha256=B-0iOJHNL1IgYPlWUdrAwEf1r9LUKBAnGyx1xQz05ZE,1507
51
54
  diagram_to_iac/tools/hello/text_utils.py,sha256=ZaVQYw6GVqaq9EDTQfG3gTAudeN8CuFUUb7IETZhUCA,3952
52
- diagram_to_iac/tools/llm_utils/__init__.py,sha256=IQ6cQprJtV4j5s_RVnt94rCGXfuvZ0PiTH6Y0gK242o,440
55
+ diagram_to_iac/tools/llm_utils/__init__.py,sha256=xkSbnB3_eqFX7UDZPl9s4blh5IKPzRh52sHdwmJ4VMM,494
53
56
  diagram_to_iac/tools/llm_utils/anthropic_driver.py,sha256=tb8HVGB6Ng9ZwImRJtSy2X0965ZE3Vm5g8HbMfcLyBY,3674
54
57
  diagram_to_iac/tools/llm_utils/base_driver.py,sha256=sDUxk6_iNn3WU_HyRz2hW3YGTn8_7aucqEUnGTj2PeU,2503
55
58
  diagram_to_iac/tools/llm_utils/gemini_driver.py,sha256=VO1mJ3o10oSFo5hTBs6h8TJsXyAuah4FRr6Ua-9aNYc,3794
59
+ diagram_to_iac/tools/llm_utils/grok_driver.py,sha256=hcq4m6ZEgjVsLXaaGlW5SWHEqyjY4KUDy88xSZFUa6Y,2955
56
60
  diagram_to_iac/tools/llm_utils/openai_driver.py,sha256=ZqzXEYEutwqRw3qWx-GH85Mj2afxK4NlhCOMq_MabqQ,3962
57
- diagram_to_iac/tools/llm_utils/router.py,sha256=WHGanstQjUlo2SmDFKGAL6xtIb3xXWlgZ5CIzHWN8I8,12906
61
+ diagram_to_iac/tools/llm_utils/router.py,sha256=hl-y1CCvRoBWSpKpkDI_SSyi9YIT2ZA6y6awn7_ErkM,22117
58
62
  diagram_to_iac/tools/shell/__init__.py,sha256=6UZjBcnbPabA6Qy7t4j-dCi3S2sE6sB2bTE9PIL98bA,292
59
63
  diagram_to_iac/tools/shell/shell.py,sha256=ZOJ7Vo3l_R2Gm6Ml2FL0RX__-C_JOsUrLJVvBMwAy9E,21122
60
64
  diagram_to_iac/tools/tf/terraform.py,sha256=j1boWRo6JKpNGf1OwnWoWboO0gMYTizCOHDSxozoFZw,37343
61
- diagram_to_iac-1.0.2.dist-info/METADATA,sha256=t9VPmIiCYqB66upyuPB5hpx6h5zOxch96bdLClkndUw,9019
62
- diagram_to_iac-1.0.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
63
- diagram_to_iac-1.0.2.dist-info/entry_points.txt,sha256=DfGCnmgWWGHtQpqU8VqcUWs5k_be-bfO67z1vOuTitA,277
64
- diagram_to_iac-1.0.2.dist-info/top_level.txt,sha256=k1cV0YODiCUU46qlmbQaquMcbMXhNm05NZLxsinDUBA,15
65
- diagram_to_iac-1.0.2.dist-info/RECORD,,
65
+ diagram_to_iac-1.0.4.dist-info/METADATA,sha256=T5CbWJpZcJzrnRlEPH47EtIxumc12u0IBcrB4yLGCCc,9060
66
+ diagram_to_iac-1.0.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
67
+ diagram_to_iac-1.0.4.dist-info/entry_points.txt,sha256=DfGCnmgWWGHtQpqU8VqcUWs5k_be-bfO67z1vOuTitA,277
68
+ diagram_to_iac-1.0.4.dist-info/top_level.txt,sha256=k1cV0YODiCUU46qlmbQaquMcbMXhNm05NZLxsinDUBA,15
69
+ diagram_to_iac-1.0.4.dist-info/RECORD,,