olca 0.2.64__tar.gz → 0.2.66__tar.gz

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: olca
3
- Version: 0.2.64
3
+ Version: 0.2.66
4
4
  Summary: A Python package for experimental usage of Langchain and Human-in-the-Loop
5
5
  Home-page: https://github.com/jgwill/olca
6
6
  Author: Jean GUillaume ISabelle
@@ -76,7 +76,7 @@ def serialize_response_to_json_file(resp, filename):
76
76
  f.write(json_str)
77
77
 
78
78
  def serialize_response_to_markdown(o):
79
- output=o["output"]["output"]
79
+ output=o["output"]
80
80
  string_md="# Output\n"
81
81
  #string_md+=f"## Model\n{o['model']}\n"
82
82
  #string_md+=f"## Prompt\n{o['prompt']['prompt']}\n"
@@ -99,16 +99,16 @@ def main():
99
99
  resp = ask_agent(input_request,tool_hub_tag=tool_hub_tag,chatbot_model=args.chatbot_model)
100
100
  outdir=os.path.join(os.getcwd(),"output")
101
101
  os.makedirs(outdir, exist_ok=True)
102
- out_filename = f"{args.prefix}output-{tlid.get_minutes()}.json"
102
+ out_filename = f"{args.prefix}{tlid.get_minutes()}.json"
103
103
  outfile=os.path.join(outdir,out_filename)
104
104
  o={}
105
105
  prompt_dict = {
106
- "prompt": str(prompt)
106
+ "promptdata": str(prompt)
107
107
  }
108
108
  o["model"]=args.chatbot_model
109
109
  o["prompt"]=prompt_dict
110
110
  o["input"]=input_request
111
- o["output"]=resp
111
+ o["output"]=resp["output"]
112
112
  serialize_response_to_json_file(o, outfile)
113
113
  serialize_response_to_markdown_file(o, outfile.replace(".json",".md"))
114
114
  VERBOSE_RESULT=False
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: olca
3
- Version: 0.2.64
3
+ Version: 0.2.66
4
4
  Summary: A Python package for experimental usage of Langchain and Human-in-the-Loop
5
5
  Home-page: https://github.com/jgwill/olca
6
6
  Author: Jean GUillaume ISabelle
@@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"
7
7
 
8
8
  [project]
9
9
  name = "olca"
10
- version = "0.2.64"
10
+ version = "0.2.66"
11
11
 
12
12
  description = "A Python package for experimental usage of Langchain and Human-in-the-Loop"
13
13
  readme = "README.md"
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
2
2
 
3
3
  setup(
4
4
  name='olca',
5
- version = "0.2.64",
5
+ version = "0.2.66",
6
6
  author='Jean GUillaume ISabelle',
7
7
  author_email='jgi@jgwill.com',
8
8
  description='A Python package for experimenting with Langchain agent and interactivity in Terminal modalities.',
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes