ag2 0.6.0b1__py3-none-any.whl → 0.6.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ag2 might be problematic. Click here for more details.
- {ag2-0.6.0b1.dist-info → ag2-0.6.1.dist-info}/METADATA +55 -55
- ag2-0.6.1.dist-info/RECORD +6 -0
- ag2-0.6.0b1.dist-info/RECORD +0 -6
- {ag2-0.6.0b1.dist-info → ag2-0.6.1.dist-info}/LICENSE +0 -0
- {ag2-0.6.0b1.dist-info → ag2-0.6.1.dist-info}/NOTICE.md +0 -0
- {ag2-0.6.0b1.dist-info → ag2-0.6.1.dist-info}/WHEEL +0 -0
- {ag2-0.6.0b1.dist-info → ag2-0.6.1.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: ag2
|
|
3
|
-
Version: 0.6.
|
|
3
|
+
Version: 0.6.1
|
|
4
4
|
Summary: Alias package for pyautogen
|
|
5
5
|
Home-page: https://github.com/ag2ai/ag2
|
|
6
6
|
Author: Chi Wang & Qingyun Wu
|
|
@@ -14,77 +14,77 @@ Requires-Python: >=3.9,<3.14
|
|
|
14
14
|
Description-Content-Type: text/markdown
|
|
15
15
|
License-File: LICENSE
|
|
16
16
|
License-File: NOTICE.md
|
|
17
|
-
Requires-Dist: pyautogen (==0.6.
|
|
17
|
+
Requires-Dist: pyautogen (==0.6.1)
|
|
18
18
|
Provides-Extra: anthropic
|
|
19
|
-
Requires-Dist: pyautogen[anthropic] (==0.6.
|
|
19
|
+
Requires-Dist: pyautogen[anthropic] (==0.6.1) ; extra == 'anthropic'
|
|
20
20
|
Provides-Extra: autobuild
|
|
21
|
-
Requires-Dist: pyautogen[autobuild] (==0.6.
|
|
21
|
+
Requires-Dist: pyautogen[autobuild] (==0.6.1) ; extra == 'autobuild'
|
|
22
22
|
Provides-Extra: bedrock
|
|
23
|
-
Requires-Dist: pyautogen[bedrock] (==0.6.
|
|
23
|
+
Requires-Dist: pyautogen[bedrock] (==0.6.1) ; extra == 'bedrock'
|
|
24
24
|
Provides-Extra: blendsearch
|
|
25
|
-
Requires-Dist: pyautogen[blendsearch] (==0.6.
|
|
25
|
+
Requires-Dist: pyautogen[blendsearch] (==0.6.1) ; extra == 'blendsearch'
|
|
26
26
|
Provides-Extra: captainagent
|
|
27
|
-
Requires-Dist: pyautogen[captainagent] (==0.6.
|
|
27
|
+
Requires-Dist: pyautogen[captainagent] (==0.6.1) ; extra == 'captainagent'
|
|
28
28
|
Provides-Extra: cerebras
|
|
29
|
-
Requires-Dist: pyautogen[cerebras] (==0.6.
|
|
29
|
+
Requires-Dist: pyautogen[cerebras] (==0.6.1) ; extra == 'cerebras'
|
|
30
30
|
Provides-Extra: cohere
|
|
31
|
-
Requires-Dist: pyautogen[cohere] (==0.6.
|
|
31
|
+
Requires-Dist: pyautogen[cohere] (==0.6.1) ; extra == 'cohere'
|
|
32
32
|
Provides-Extra: cosmosdb
|
|
33
|
-
Requires-Dist: pyautogen[cosmosdb] (==0.6.
|
|
33
|
+
Requires-Dist: pyautogen[cosmosdb] (==0.6.1) ; extra == 'cosmosdb'
|
|
34
34
|
Provides-Extra: gemini
|
|
35
|
-
Requires-Dist: pyautogen[gemini] (==0.6.
|
|
35
|
+
Requires-Dist: pyautogen[gemini] (==0.6.1) ; extra == 'gemini'
|
|
36
36
|
Provides-Extra: graph
|
|
37
|
-
Requires-Dist: pyautogen[graph] (==0.6.
|
|
37
|
+
Requires-Dist: pyautogen[graph] (==0.6.1) ; extra == 'graph'
|
|
38
38
|
Provides-Extra: graph-rag-falkor-db
|
|
39
|
-
Requires-Dist: pyautogen[graph-rag-falkor-db] (==0.6.
|
|
39
|
+
Requires-Dist: pyautogen[graph-rag-falkor-db] (==0.6.1) ; extra == 'graph-rag-falkor-db'
|
|
40
40
|
Provides-Extra: groq
|
|
41
|
-
Requires-Dist: pyautogen[groq] (==0.6.
|
|
41
|
+
Requires-Dist: pyautogen[groq] (==0.6.1) ; extra == 'groq'
|
|
42
42
|
Provides-Extra: interop
|
|
43
|
-
Requires-Dist: pyautogen[interop] (==0.6.
|
|
43
|
+
Requires-Dist: pyautogen[interop] (==0.6.1) ; extra == 'interop'
|
|
44
44
|
Provides-Extra: interop-crewai
|
|
45
|
-
Requires-Dist: pyautogen[interop-crewai] (==0.6.
|
|
45
|
+
Requires-Dist: pyautogen[interop-crewai] (==0.6.1) ; extra == 'interop-crewai'
|
|
46
46
|
Provides-Extra: interop-langchain
|
|
47
|
-
Requires-Dist: pyautogen[interop-langchain] (==0.6.
|
|
47
|
+
Requires-Dist: pyautogen[interop-langchain] (==0.6.1) ; extra == 'interop-langchain'
|
|
48
48
|
Provides-Extra: interop-pydantic-ai
|
|
49
|
-
Requires-Dist: pyautogen[interop-pydantic-ai] (==0.6.
|
|
49
|
+
Requires-Dist: pyautogen[interop-pydantic-ai] (==0.6.1) ; extra == 'interop-pydantic-ai'
|
|
50
50
|
Provides-Extra: jupyter-executor
|
|
51
|
-
Requires-Dist: pyautogen[jupyter-executor] (==0.6.
|
|
51
|
+
Requires-Dist: pyautogen[jupyter-executor] (==0.6.1) ; extra == 'jupyter-executor'
|
|
52
52
|
Provides-Extra: lmm
|
|
53
|
-
Requires-Dist: pyautogen[lmm] (==0.6.
|
|
53
|
+
Requires-Dist: pyautogen[lmm] (==0.6.1) ; extra == 'lmm'
|
|
54
54
|
Provides-Extra: long-context
|
|
55
|
-
Requires-Dist: pyautogen[long-context] (==0.6.
|
|
55
|
+
Requires-Dist: pyautogen[long-context] (==0.6.1) ; extra == 'long-context'
|
|
56
56
|
Provides-Extra: mathchat
|
|
57
|
-
Requires-Dist: pyautogen[mathchat] (==0.6.
|
|
57
|
+
Requires-Dist: pyautogen[mathchat] (==0.6.1) ; extra == 'mathchat'
|
|
58
58
|
Provides-Extra: mistral
|
|
59
|
-
Requires-Dist: pyautogen[mistral] (==0.6.
|
|
59
|
+
Requires-Dist: pyautogen[mistral] (==0.6.1) ; extra == 'mistral'
|
|
60
60
|
Provides-Extra: neo4j
|
|
61
|
-
Requires-Dist: pyautogen[neo4j] (==0.6.
|
|
61
|
+
Requires-Dist: pyautogen[neo4j] (==0.6.1) ; extra == 'neo4j'
|
|
62
62
|
Provides-Extra: ollama
|
|
63
|
-
Requires-Dist: pyautogen[ollama] (==0.6.
|
|
63
|
+
Requires-Dist: pyautogen[ollama] (==0.6.1) ; extra == 'ollama'
|
|
64
64
|
Provides-Extra: redis
|
|
65
|
-
Requires-Dist: pyautogen[redis] (==0.6.
|
|
65
|
+
Requires-Dist: pyautogen[redis] (==0.6.1) ; extra == 'redis'
|
|
66
66
|
Provides-Extra: retrievechat
|
|
67
|
-
Requires-Dist: pyautogen[retrievechat] (==0.6.
|
|
67
|
+
Requires-Dist: pyautogen[retrievechat] (==0.6.1) ; extra == 'retrievechat'
|
|
68
68
|
Provides-Extra: retrievechat-mongodb
|
|
69
|
-
Requires-Dist: pyautogen[retrievechat-mongodb] (==0.6.
|
|
69
|
+
Requires-Dist: pyautogen[retrievechat-mongodb] (==0.6.1) ; extra == 'retrievechat-mongodb'
|
|
70
70
|
Provides-Extra: retrievechat-pgvector
|
|
71
|
-
Requires-Dist: pyautogen[retrievechat-pgvector] (==0.6.
|
|
71
|
+
Requires-Dist: pyautogen[retrievechat-pgvector] (==0.6.1) ; extra == 'retrievechat-pgvector'
|
|
72
72
|
Provides-Extra: retrievechat-qdrant
|
|
73
|
-
Requires-Dist: pyautogen[retrievechat-qdrant] (==0.6.
|
|
73
|
+
Requires-Dist: pyautogen[retrievechat-qdrant] (==0.6.1) ; extra == 'retrievechat-qdrant'
|
|
74
74
|
Provides-Extra: teachable
|
|
75
|
-
Requires-Dist: pyautogen[teachable] (==0.6.
|
|
75
|
+
Requires-Dist: pyautogen[teachable] (==0.6.1) ; extra == 'teachable'
|
|
76
76
|
Provides-Extra: test
|
|
77
|
-
Requires-Dist: pyautogen[test] (==0.6.
|
|
77
|
+
Requires-Dist: pyautogen[test] (==0.6.1) ; extra == 'test'
|
|
78
78
|
Provides-Extra: together
|
|
79
|
-
Requires-Dist: pyautogen[together] (==0.6.
|
|
79
|
+
Requires-Dist: pyautogen[together] (==0.6.1) ; extra == 'together'
|
|
80
80
|
Provides-Extra: twilio
|
|
81
|
-
Requires-Dist: pyautogen[twilio] (==0.6.
|
|
81
|
+
Requires-Dist: pyautogen[twilio] (==0.6.1) ; extra == 'twilio'
|
|
82
82
|
Provides-Extra: types
|
|
83
|
-
Requires-Dist: pyautogen[types] (==0.6.
|
|
83
|
+
Requires-Dist: pyautogen[types] (==0.6.1) ; extra == 'types'
|
|
84
84
|
Provides-Extra: websockets
|
|
85
|
-
Requires-Dist: pyautogen[websockets] (==0.6.
|
|
85
|
+
Requires-Dist: pyautogen[websockets] (==0.6.1) ; extra == 'websockets'
|
|
86
86
|
Provides-Extra: websurfer
|
|
87
|
-
Requires-Dist: pyautogen[websurfer] (==0.6.
|
|
87
|
+
Requires-Dist: pyautogen[websurfer] (==0.6.1) ; extra == 'websurfer'
|
|
88
88
|
|
|
89
89
|
<a name="readme-top"></a>
|
|
90
90
|
|
|
@@ -131,11 +131,11 @@ We adopt the Apache 2.0 license from v0.3. This enhances our commitment to open-
|
|
|
131
131
|
|
|
132
132
|
:tada: May 11, 2024: [AutoGen: Enabling Next-Gen LLM Applications via Multi-Agent Conversation](https://openreview.net/pdf?id=uAjxFFing2) received the best paper award at the [ICLR 2024 LLM Agents Workshop](https://llmagents.github.io/).
|
|
133
133
|
|
|
134
|
-
<!-- :tada: Apr 26, 2024: [AutoGen.NET](https://
|
|
134
|
+
<!-- :tada: Apr 26, 2024: [AutoGen.NET](https://docs.ag2.ai/ag2-for-net/) is available for .NET developers! -->
|
|
135
135
|
|
|
136
136
|
:tada: Apr 17, 2024: Andrew Ng cited AutoGen in [The Batch newsletter](https://www.deeplearning.ai/the-batch/issue-245/) and [What's next for AI agentic workflows](https://youtu.be/sal78ACtGTc?si=JduUzN_1kDnMq0vF) at Sequoia Capital's AI Ascent (Mar 26).
|
|
137
137
|
|
|
138
|
-
:tada: Mar 3, 2024: What's new in AutoGen? 📰[Blog](https://
|
|
138
|
+
:tada: Mar 3, 2024: What's new in AutoGen? 📰[Blog](https://docs.ag2.ai/blog/2024-03-03-AutoGen-Update); 📺[Youtube](https://www.youtube.com/watch?v=j_mtwQiaLGU).
|
|
139
139
|
|
|
140
140
|
<!-- :tada: Mar 1, 2024: the first AutoGen multi-agent experiment on the challenging [GAIA](https://huggingface.co/spaces/gaia-benchmark/leaderboard) benchmark achieved the No. 1 accuracy in all the three levels. -->
|
|
141
141
|
|
|
@@ -143,9 +143,9 @@ We adopt the Apache 2.0 license from v0.3. This enhances our commitment to open-
|
|
|
143
143
|
|
|
144
144
|
:tada: Dec 31, 2023: [AutoGen: Enabling Next-Gen LLM Applications via Multi-Agent Conversation Framework](https://arxiv.org/abs/2308.08155) is selected by [TheSequence: My Five Favorite AI Papers of 2023](https://thesequence.substack.com/p/my-five-favorite-ai-papers-of-2023).
|
|
145
145
|
|
|
146
|
-
<!-- :fire: Nov 24: pyautogen [v0.2](https://github.com/ag2ai/ag2/releases/tag/v0.2.0) is released with many updates and new features compared to v0.1.1. It switches to using openai-python v1. Please read the [migration guide](https://
|
|
146
|
+
<!-- :fire: Nov 24: pyautogen [v0.2](https://github.com/ag2ai/ag2/releases/tag/v0.2.0) is released with many updates and new features compared to v0.1.1. It switches to using openai-python v1. Please read the [migration guide](https://docs.ag2.ai/docs/installation/Installation). -->
|
|
147
147
|
|
|
148
|
-
<!-- :fire: Nov 11: OpenAI's Assistants are available in AutoGen and interoperatable with other AutoGen agents! Checkout our [blogpost](https://
|
|
148
|
+
<!-- :fire: Nov 11: OpenAI's Assistants are available in AutoGen and interoperatable with other AutoGen agents! Checkout our [blogpost](https://docs.ag2.ai/blog/2023-11-13-OAI-assistants) for details and examples. -->
|
|
149
149
|
|
|
150
150
|
:tada: Nov 8, 2023: AutoGen is selected into [Open100: Top 100 Open Source achievements](https://www.benchcouncil.org/evaluation/opencs/annual.html) 35 days after spinoff from [FLAML](https://github.com/microsoft/FLAML).
|
|
151
151
|
|
|
@@ -162,7 +162,7 @@ We adopt the Apache 2.0 license from v0.3. This enhances our commitment to open-
|
|
|
162
162
|
<!--
|
|
163
163
|
:fire: FLAML is highlighted in OpenAI's [cookbook](https://github.com/openai/openai-cookbook#related-resources-from-around-the-web).
|
|
164
164
|
|
|
165
|
-
:fire: [autogen](https://
|
|
165
|
+
:fire: [autogen](https://docs.ag2.ai/) is released with support for ChatGPT and GPT-4, based on [Cost-Effective Hyperparameter Optimization for Large Language Model Generation Inference](https://arxiv.org/abs/2303.04673).
|
|
166
166
|
|
|
167
167
|
:fire: FLAML supports Code-First AutoML & Tuning – Private Preview in [Microsoft Fabric Data Science](https://learn.microsoft.com/en-us/fabric/data-science/). -->
|
|
168
168
|
|
|
@@ -205,11 +205,11 @@ The easiest way to start playing is
|
|
|
205
205
|
</a>
|
|
206
206
|
</p>
|
|
207
207
|
|
|
208
|
-
## [Installation](https://
|
|
208
|
+
## [Installation](https://docs.ag2.ai/docs/installation/Installation)
|
|
209
209
|
|
|
210
210
|
### Option 1. Install and Run AG2 in Docker
|
|
211
211
|
|
|
212
|
-
Find detailed instructions for users [here](https://
|
|
212
|
+
Find detailed instructions for users [here](https://docs.ag2.ai/docs/installation/Docker#step-1-install-docker), and for developers [here](https://docs.ag2.ai/docs/contributor-guide/docker).
|
|
213
213
|
|
|
214
214
|
### Option 2. Install AG2 Locally
|
|
215
215
|
|
|
@@ -226,13 +226,13 @@ Minimal dependencies are installed without extra options. You can install extra
|
|
|
226
226
|
pip install "autogen[blendsearch]"
|
|
227
227
|
``` -->
|
|
228
228
|
|
|
229
|
-
Find more options in [Installation](https://
|
|
229
|
+
Find more options in [Installation](https://docs.ag2.ai/docs/Installation#option-2-install-autogen-locally-using-virtual-environment).
|
|
230
230
|
|
|
231
231
|
<!-- Each of the [`notebook examples`](https://github.com/ag2ai/ag2/tree/main/notebook) may require a specific option to be installed. -->
|
|
232
232
|
|
|
233
|
-
Even if you are installing and running AG2 locally outside of docker, the recommendation and default behavior of agents is to perform [code execution](https://
|
|
233
|
+
Even if you are installing and running AG2 locally outside of docker, the recommendation and default behavior of agents is to perform [code execution](https://docs.ag2.ai/docs/FAQ#if-you-want-to-run-code-execution-in-docker) in docker. Find more instructions and how to change the default behaviour [here](https://docs.ag2.ai/docs/FAQ#if-you-want-to-run-code-execution-locally).
|
|
234
234
|
|
|
235
|
-
For LLM inference configurations, check the [FAQs](https://
|
|
235
|
+
For LLM inference configurations, check the [FAQs](https://docs.ag2.ai/docs/FAQ#set-your-api-endpoints).
|
|
236
236
|
|
|
237
237
|
<p align="right" style="font-size: 14px; color: #555; margin-top: 20px;">
|
|
238
238
|
<a href="#readme-top" style="text-decoration: none; color: blue; font-weight: bold;">
|
|
@@ -242,7 +242,7 @@ For LLM inference configurations, check the [FAQs](https://ag2ai.github.io/ag2/d
|
|
|
242
242
|
|
|
243
243
|
## Multi-Agent Conversation Framework
|
|
244
244
|
|
|
245
|
-
AG2 enables the next-gen LLM applications with a generic [multi-agent conversation](https://
|
|
245
|
+
AG2 enables the next-gen LLM applications with a generic [multi-agent conversation](https://docs.ag2.ai/docs/Use-Cases/agent_chat) framework. It offers customizable and conversable agents that integrate LLMs, tools, and humans.
|
|
246
246
|
By automating chat among multiple capable agents, one can easily make them collectively perform tasks autonomously or with human feedback, including tasks that require using tools via code.
|
|
247
247
|
|
|
248
248
|
Features of this use case include:
|
|
@@ -256,7 +256,7 @@ For [example](https://github.com/ag2ai/ag2/blob/main/test/twoagent.py),
|
|
|
256
256
|
```python
|
|
257
257
|
from autogen import AssistantAgent, UserProxyAgent, config_list_from_json
|
|
258
258
|
# Load LLM inference endpoints from an env variable or a file
|
|
259
|
-
# See https://
|
|
259
|
+
# See https://docs.ag2.ai/docs/FAQ#set-your-api-endpoints
|
|
260
260
|
# and OAI_CONFIG_LIST_sample
|
|
261
261
|
config_list = config_list_from_json(env_or_file="OAI_CONFIG_LIST")
|
|
262
262
|
# You can also set config_list directly as a list, for example, config_list = [{'model': 'gpt-4o', 'api_key': '<your OpenAI API key here>'},]
|
|
@@ -279,7 +279,7 @@ The figure below shows an example conversation flow with AG2.
|
|
|
279
279
|
|
|
280
280
|
|
|
281
281
|
Alternatively, the [sample code](https://github.com/ag2ai/build-with-ag2/blob/main/samples/simple_chat.py) here allows a user to chat with an AG2 agent in ChatGPT style.
|
|
282
|
-
Please find more [code examples](https://
|
|
282
|
+
Please find more [code examples](https://docs.ag2.ai/docs/Examples#automated-multi-agent-chat) for this feature.
|
|
283
283
|
|
|
284
284
|
<p align="right" style="font-size: 14px; color: #555; margin-top: 20px;">
|
|
285
285
|
<a href="#readme-top" style="text-decoration: none; color: blue; font-weight: bold;">
|
|
@@ -289,7 +289,7 @@ Please find more [code examples](https://ag2ai.github.io/ag2/docs/Examples#autom
|
|
|
289
289
|
|
|
290
290
|
## Enhanced LLM Inferences
|
|
291
291
|
|
|
292
|
-
AG2 also helps maximize the utility out of the expensive LLMs such as gpt-4o. It offers [enhanced LLM inference](https://
|
|
292
|
+
AG2 also helps maximize the utility out of the expensive LLMs such as gpt-4o. It offers [enhanced LLM inference](https://docs.ag2.ai/docs/Use-Cases/enhanced_inference#api-unification) with powerful functionalities like caching, error handling, multi-config inference and templating.
|
|
293
293
|
|
|
294
294
|
<!-- For example, you can optimize generations by LLM with your own tuning data, success metrics, and budgets.
|
|
295
295
|
|
|
@@ -308,7 +308,7 @@ config, analysis = autogen.Completion.tune(
|
|
|
308
308
|
response = autogen.Completion.create(context=test_instance, **config)
|
|
309
309
|
```
|
|
310
310
|
|
|
311
|
-
Please find more [code examples](https://
|
|
311
|
+
Please find more [code examples](https://docs.ag2.ai/docs/Examples#tune-gpt-models) for this feature. -->
|
|
312
312
|
|
|
313
313
|
<p align="right" style="font-size: 14px; color: #555; margin-top: 20px;">
|
|
314
314
|
<a href="#readme-top" style="text-decoration: none; color: blue; font-weight: bold;">
|
|
@@ -318,15 +318,15 @@ Please find more [code examples](https://ag2ai.github.io/ag2/docs/Examples#tune-
|
|
|
318
318
|
|
|
319
319
|
## Documentation
|
|
320
320
|
|
|
321
|
-
You can find detailed documentation about AG2 [here](https://
|
|
321
|
+
You can find detailed documentation about AG2 [here](https://docs.ag2.ai/).
|
|
322
322
|
|
|
323
323
|
In addition, you can find:
|
|
324
324
|
|
|
325
|
-
- [Research](https://
|
|
325
|
+
- [Research](https://docs.ag2.ai/docs/Research), [blogposts](https://docs.ag2.ai/blog) around AG2, and [Transparency FAQs](https://github.com/ag2ai/ag2/blob/main/TRANSPARENCY_FAQS.md)
|
|
326
326
|
|
|
327
327
|
- [Discord](https://discord.gg/pAbnFJrkgZ)
|
|
328
328
|
|
|
329
|
-
- [Contributing guide](https://
|
|
329
|
+
- [Contributing guide](https://docs.ag2.ai/docs/contributor-guide/contributing)
|
|
330
330
|
|
|
331
331
|
<p align="right" style="font-size: 14px; color: #555; margin-top: 20px;">
|
|
332
332
|
<a href="#readme-top" style="text-decoration: none; color: blue; font-weight: bold;">
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
ag2-0.6.1.dist-info/LICENSE,sha256=GEFQVNayAR-S_rQD5l8hPdgvgyktVdy4Bx5-v90IfRI,11384
|
|
2
|
+
ag2-0.6.1.dist-info/METADATA,sha256=SG8183f-KizmFyrRbzu1o5V12j6qTCuUqBIL6aajTPw,22025
|
|
3
|
+
ag2-0.6.1.dist-info/NOTICE.md,sha256=ucvou1bE6i2s40qyuU9RL0TeIVG01VhXoQ59EngtEz4,1317
|
|
4
|
+
ag2-0.6.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
|
|
5
|
+
ag2-0.6.1.dist-info/top_level.txt,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
6
|
+
ag2-0.6.1.dist-info/RECORD,,
|
ag2-0.6.0b1.dist-info/RECORD
DELETED
|
@@ -1,6 +0,0 @@
|
|
|
1
|
-
ag2-0.6.0b1.dist-info/LICENSE,sha256=GEFQVNayAR-S_rQD5l8hPdgvgyktVdy4Bx5-v90IfRI,11384
|
|
2
|
-
ag2-0.6.0b1.dist-info/METADATA,sha256=Kk-ji5gptOFJVFUS5xs1uLzGkZk7XRaQa3MnvaYn_m0,22209
|
|
3
|
-
ag2-0.6.0b1.dist-info/NOTICE.md,sha256=ucvou1bE6i2s40qyuU9RL0TeIVG01VhXoQ59EngtEz4,1317
|
|
4
|
-
ag2-0.6.0b1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
|
|
5
|
-
ag2-0.6.0b1.dist-info/top_level.txt,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
|
6
|
-
ag2-0.6.0b1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|