PraisonAI 2.0.66__tar.gz → 2.0.68__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of PraisonAI might be problematic. Click here for more details.

Files changed (88) hide show
  1. {praisonai-2.0.66 → praisonai-2.0.68}/PKG-INFO +1 -1
  2. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/deploy.py +1 -1
  3. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/train.py +198 -18
  4. {praisonai-2.0.66 → praisonai-2.0.68}/pyproject.toml +2 -2
  5. {praisonai-2.0.66 → praisonai-2.0.68}/LICENSE +0 -0
  6. {praisonai-2.0.66 → praisonai-2.0.68}/README.md +0 -0
  7. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/__init__.py +0 -0
  8. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/__main__.py +0 -0
  9. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/agents_generator.py +0 -0
  10. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/api/call.py +0 -0
  11. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/auto.py +0 -0
  12. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/chainlit_ui.py +0 -0
  13. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/cli.py +0 -0
  14. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/inbuilt_tools/__init__.py +0 -0
  15. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/inbuilt_tools/autogen_tools.py +0 -0
  16. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/inc/__init__.py +0 -0
  17. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/inc/config.py +0 -0
  18. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/inc/models.py +0 -0
  19. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/public/android-chrome-192x192.png +0 -0
  20. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/public/android-chrome-512x512.png +0 -0
  21. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/public/apple-touch-icon.png +0 -0
  22. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/public/fantasy.svg +0 -0
  23. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/public/favicon-16x16.png +0 -0
  24. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/public/favicon-32x32.png +0 -0
  25. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/public/favicon.ico +0 -0
  26. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/public/game.svg +0 -0
  27. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/public/logo_dark.png +0 -0
  28. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/public/logo_light.png +0 -0
  29. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/public/movie.svg +0 -0
  30. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/public/praison-ai-agents-architecture-dark.png +0 -0
  31. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/public/praison-ai-agents-architecture.png +0 -0
  32. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/public/thriller.svg +0 -0
  33. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/setup/__init__.py +0 -0
  34. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/setup/build.py +0 -0
  35. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/setup/config.yaml +0 -0
  36. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/setup/post_install.py +0 -0
  37. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/setup/setup_conda_env.py +0 -0
  38. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/setup/setup_conda_env.sh +0 -0
  39. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/setup.py +0 -0
  40. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/test.py +0 -0
  41. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/README.md +0 -0
  42. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/agents.py +0 -0
  43. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/callbacks.py +0 -0
  44. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/chat.py +0 -0
  45. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/code.py +0 -0
  46. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/colab.py +0 -0
  47. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/colab_chainlit.py +0 -0
  48. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/components/aicoder.py +0 -0
  49. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/.chainlit/config.toml +0 -0
  50. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/.chainlit/translations/bn.json +0 -0
  51. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/.chainlit/translations/en-US.json +0 -0
  52. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/.chainlit/translations/gu.json +0 -0
  53. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/.chainlit/translations/he-IL.json +0 -0
  54. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/.chainlit/translations/hi.json +0 -0
  55. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/.chainlit/translations/kn.json +0 -0
  56. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/.chainlit/translations/ml.json +0 -0
  57. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/.chainlit/translations/mr.json +0 -0
  58. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/.chainlit/translations/ta.json +0 -0
  59. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/.chainlit/translations/te.json +0 -0
  60. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/.chainlit/translations/zh-CN.json +0 -0
  61. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/chainlit.md +0 -0
  62. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/translations/bn.json +0 -0
  63. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/translations/en-US.json +0 -0
  64. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/translations/gu.json +0 -0
  65. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/translations/he-IL.json +0 -0
  66. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/translations/hi.json +0 -0
  67. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/translations/kn.json +0 -0
  68. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/translations/ml.json +0 -0
  69. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/translations/mr.json +0 -0
  70. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/translations/ta.json +0 -0
  71. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/translations/te.json +0 -0
  72. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/config/translations/zh-CN.json +0 -0
  73. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/context.py +0 -0
  74. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/db.py +0 -0
  75. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/public/fantasy.svg +0 -0
  76. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/public/game.svg +0 -0
  77. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/public/logo_dark.png +0 -0
  78. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/public/logo_light.png +0 -0
  79. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/public/movie.svg +0 -0
  80. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/public/praison.css +0 -0
  81. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/public/thriller.svg +0 -0
  82. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/realtime.py +0 -0
  83. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/realtimeclient/__init__.py +0 -0
  84. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/realtimeclient/realtimedocs.txt +0 -0
  85. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/realtimeclient/tools.py +0 -0
  86. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/sql_alchemy.py +0 -0
  87. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/ui/tools.md +0 -0
  88. {praisonai-2.0.66 → praisonai-2.0.68}/praisonai/version.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: PraisonAI
3
- Version: 2.0.66
3
+ Version: 2.0.68
4
4
  Summary: PraisonAI is an AI Agents Framework with Self Reflection. PraisonAI application combines PraisonAI Agents, AutoGen, and CrewAI into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customisation, and efficient human-agent collaboration.
5
5
  Author: Mervin Praison
6
6
  Requires-Python: >=3.10,<3.13
@@ -56,7 +56,7 @@ class CloudDeployer:
56
56
  file.write("FROM python:3.11-slim\n")
57
57
  file.write("WORKDIR /app\n")
58
58
  file.write("COPY . .\n")
59
- file.write("RUN pip install flask praisonai==2.0.66 gunicorn markdown\n")
59
+ file.write("RUN pip install flask praisonai==2.0.68 gunicorn markdown\n")
60
60
  file.write("EXPOSE 8080\n")
61
61
  file.write('CMD ["gunicorn", "-b", "0.0.0.0:8080", "api:app"]\n')
62
62
 
@@ -312,26 +312,206 @@ class TrainModel:
312
312
 
313
313
  def prepare_modelfile_content(self):
314
314
  output_model = self.config["hf_model_name"]
315
- gguf_path = f"{output_model}/unsloth.Q4_K_M.gguf"
316
- if not os.path.exists(gguf_path):
317
- self.model, self.hf_tokenizer = self.load_model()
318
- self.save_model_gguf()
319
- return f"""FROM {output_model}/unsloth.Q4_K_M.gguf
320
-
321
- TEMPLATE \"\"\"Below are some instructions that describe some tasks. Write responses that appropriately complete each request.{{{{ if .Prompt }}}}
322
-
323
- ### Instruction:
324
- {{{{ .Prompt }}}}
315
+ model_name = self.config["model_name"].lower()
316
+ # Mapping from model name keywords to their default TEMPLATE and stop tokens (and optional SYSTEM/num_ctx)
317
+ mapping = {
318
+ "llama": {
319
+ "template": """<|start_header_id|>system<|end_header_id|>
320
+ Cutting Knowledge Date: December 2023
321
+ {{ if .System }}{{ .System }}
322
+ {{- end }}
323
+ {{- if .Tools }}When you receive a tool call response, use the output to format an answer to the orginal user question.
324
+ You are a helpful assistant with tool calling capabilities.
325
+ {{- end }}<|eot_id|>
326
+ {{- range $i, $_ := .Messages }}
327
+ {{- $last := eq (len (slice $.Messages $i)) 1 }}
328
+ {{- if eq .Role "user" }}<|start_header_id|>user<|end_header_id|>
329
+ {{- if and $.Tools $last }}
330
+ Given the following functions, please respond with a JSON for a function call with its proper arguments that best answers the given prompt.
331
+ Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}. Do not use variables.
332
+ {{ range $.Tools }}
333
+ {{- . }}
334
+ {{ end }}
335
+ {{ .Content }}<|eot_id|>
336
+ {{- else }}
337
+ {{ .Content }}<|eot_id|>
338
+ {{- end }}{{ if $last }}<|start_header_id|>assistant<|end_header_id|>
339
+ {{ end }}
340
+ {{- else if eq .Role "assistant" }}<|start_header_id|>assistant<|end_header_id|>
341
+ {{- if .ToolCalls }}
342
+ {{ range .ToolCalls }}
343
+ {"name": "{{ .Function.Name }}", "parameters": {{ .Function.Arguments }}}{{ end }}
344
+ {{- else }}
345
+ {{ .Content }}
346
+ {{- end }}{{ if not $last }}<|eot_id|>{{ end }}
347
+ {{- else if eq .Role "tool" }}<|start_header_id|>ipython<|end_header_id|>
348
+ {{ .Content }}<|eot_id|>{{ if $last }}<|start_header_id|>assistant<|end_header_id|>
349
+ {{ end }}
350
+ {{- end }}
351
+ {{- end }}""",
352
+ "stop_tokens": ["<|start_header_id|>", "<|end_header_id|>", "<|eot_id|>"]
353
+ },
354
+ "qwen": {
355
+ "template": """{{- if .Suffix }}<|fim_prefix|>{{ .Prompt }}<|fim_suffix|>{{ .Suffix }}<|fim_middle|>
356
+ {{- else if .Messages }}
357
+ {{- if or .System .Tools }}<|im_start|>system
358
+ {{- if .System }}
359
+ {{ .System }}
360
+ {{- end }}
361
+ {{- if .Tools }}
362
+ # Tools
363
+ You may call one or more functions to assist with the user query.
364
+ You are provided with function signatures within <tools></tools> XML tags:
365
+ <tools>
366
+ {{- range .Tools }}
367
+ {"type": "function", "function": {{ .Function }}}
368
+ {{- end }}
369
+ </tools>
370
+ For each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:
371
+ <tool_call>
372
+ {"name": <function-name>, "arguments": <args-json-object>}
373
+ </tool_call>
374
+ {{- end }}<|im_end|>
375
+ {{ end }}
376
+ {{- range $i, $_ := .Messages }}
377
+ {{- $last := eq (len (slice $.Messages $i)) 1 -}}
378
+ {{- if eq .Role "user" }}<|im_start|>user
379
+ {{ .Content }}<|im_end|>
380
+ {{ else if eq .Role "assistant" }}<|im_start|>assistant
381
+ {{ if .Content }}{{ .Content }}
382
+ {{- else if .ToolCalls }}<tool_call>
383
+ {{ range .ToolCalls }}{"name": "{{ .Function.Name }}", "arguments": {{ .Function.Arguments }}}
384
+ {{ end }}</tool_call>
385
+ {{- end }}{{ if not $last }}<|im_end|>
386
+ {{ end }}
387
+ {{- else if eq .Role "tool" }}<|im_start|>user
388
+ <tool_response>
389
+ {{ .Content }}
390
+ </tool_response><|im_end|>
391
+ {{ end }}
392
+ {{- if and (ne .Role "assistant") $last }}<|im_start|>assistant
393
+ {{ end }}
394
+ {{- end }}
395
+ {{- else }}
396
+ {{- if .System }}<|im_start|>system
397
+ {{ .System }}<|im_end|>
398
+ {{ end }}{{ if .Prompt }}<|im_start|>user
399
+ {{ .Prompt }}<|im_end|>
400
+ {{ end }}<|im_start|>assistant
401
+ {{ end }}{{ .Response }}{{ if .Response }}<|im_end|>{{ end }}""",
402
+ "system": "You are Qwen, created by Alibaba Cloud. You are a helpful assistant.",
403
+ "num_ctx": 32768,
404
+ "stop_tokens": ["<|endoftext|>"]
405
+ },
406
+ "mistral": {
407
+ "template": "[INST] {{ if .System }}{{ .System }} {{ end }}{{ .Prompt }} [/INST]",
408
+ "stop_tokens": ["[INST]", "[/INST]"]
409
+ },
410
+ "phi": {
411
+ "template": """{{- range $i, $_ := .Messages }}
412
+ {{- $last := eq (len (slice $.Messages $i)) 1 -}}
413
+ <|im_start|>{{ .Role }}<|im_sep|>
414
+ {{ .Content }}{{ if not $last }}<|im_end|>
415
+ {{ end }}
416
+ {{- if and (ne .Role "assistant") $last }}<|im_end|>
417
+ <|im_start|>assistant<|im_sep|>
418
+ {{ end }}
419
+ {{- end }}""",
420
+ "stop_tokens": ["<|im_start|>", "<|im_end|>", "<|im_sep|>"]
421
+ },
422
+ "deepseek": {
423
+ "template": """{{- if .System }}{{ .System }}{{ end }}
424
+ {{- range $i, $_ := .Messages }}
425
+ {{- $last := eq (len (slice $.Messages $i)) 1}}
426
+ {{- if eq .Role "user" }}<|User|>{{ .Content }}
427
+ {{- else if eq .Role "assistant" }}<|Assistant|>{{ .Content }}{{- if not $last }}<|end▁of▁sentence|>{{- end }}
428
+ {{- end }}
429
+ {{- if and $last (ne .Role "assistant") }}<|Assistant|>{{- end }}
430
+ {{- end }}""",
431
+ "stop_tokens": ["<|begin▁of▁sentence|>", "<|end▁of▁sentence|>", "<|User|>", "<|Assistant|>"]
432
+ },
433
+ "llava": {
434
+ "template": """{{- if .Suffix }}<|fim_prefix|>{{ .Prompt }}<|fim_suffix|>{{ .Suffix }}<|fim_middle|>
435
+ {{- else if .Messages }}
436
+ {{- if or .System .Tools }}<|im_start|>system
437
+ {{- if .System }}
438
+ {{ .System }}
439
+ {{- end }}
440
+ {{- if .Tools }}
441
+ # Tools
442
+ You may call one or more functions to assist with the user query.
443
+ You are provided with function signatures within <tools></tools> XML tags:
444
+ <tools>
445
+ {{- range .Tools }}
446
+ {"type": "function", "function": {{ .Function }}}
447
+ {{- end }}
448
+ </tools>
449
+ For each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:
450
+ <tool_call>
451
+ {"name": <function-name>, "arguments": <args-json-object>}
452
+ </tool_call>
453
+ {{- end }}<|im_end|>
454
+ {{ end }}
455
+ {{- range $i, $_ := .Messages }}
456
+ {{- $last := eq (len (slice $.Messages $i)) 1 -}}
457
+ {{- if eq .Role "user" }}<|im_start|>user
458
+ {{ .Content }}<|im_end|>
459
+ {{ else if eq .Role "assistant" }}<|im_start|>assistant
460
+ {{ if .Content }}{{ .Content }}
461
+ {{- else if .ToolCalls }}<tool_call>
462
+ {{ range .ToolCalls }}{"name": "{{ .Function.Name }}", "arguments": {{ .Function.Arguments }}}
463
+ {{ end }}</tool_call>
464
+ {{- end }}{{ if not $last }}<|im_end|>
465
+ {{ end }}
466
+ {{- else if eq .Role "tool" }}<|im_start|>user
467
+ <tool_response>
468
+ {{ .Content }}
469
+ </tool_response><|im_end|>
470
+ {{ end }}
471
+ {{- if and (ne .Role "assistant") $last }}<|im_start|>assistant
472
+ {{ end }}
473
+ {{- end }}
474
+ {{- else }}
475
+ {{- if .System }}<|im_start|>system
476
+ {{ .System }}<|im_end|>
477
+ {{ end }}{{ if .Prompt }}<|im_start|>user
478
+ {{ .Prompt }}<|im_end|>
479
+ {{ end }}<|im_start|>assistant
480
+ {{ end }}{{ .Response }}{{ if .Response }}<|im_end|>{{ end }}""",
481
+ "stop_tokens": ["</s>", "USER:", "ASSSISTANT:"]
482
+ }
483
+ }
484
+ # Select mapping by checking if any key is in the model_name.
485
+ chosen = None
486
+ for key, settings in mapping.items():
487
+ if key in model_name:
488
+ chosen = settings
489
+ break
490
+ if chosen is None:
491
+ # Fallback default
492
+ chosen = {
493
+ "template": """{{ if .System }}<|start_header_id|>system<|end_header_id|>
494
+ {{ .System }}<|eot_id|>{{ end }}{{ if .Prompt }}<|start_header_id|>user<|end_header_id|>
495
+ {{ .Prompt }}<|eot_id|>{{ end }}<|start_header_id|>assistant<|end_header_id|>
496
+ {{ .Response }}<|eot_id|>""",
497
+ "stop_tokens": ["<|start_header_id|>", "<|end_header_id|>", "<|eot_id|>"]
498
+ }
499
+ # Build the stop parameter lines.
500
+ stop_params = "\n".join([f"PARAMETER stop {token}" for token in chosen["stop_tokens"]])
501
+ # Optionally include a SYSTEM line and num_ctx if defined in the mapping.
502
+ system_line = ""
503
+ if "system" in chosen:
504
+ system_line = f"SYSTEM {chosen['system']}\n"
505
+ num_ctx_line = ""
506
+ if "num_ctx" in chosen:
507
+ num_ctx_line = f"PARAMETER num_ctx {chosen['num_ctx']}\n"
508
+ # Assemble and return the modelfile content.
509
+ return f"""FROM {output_model}
510
+ TEMPLATE \"\"\"{chosen['template']}\"\"\"
511
+ {system_line}{num_ctx_line}{stop_params}
512
+ """
325
513
 
326
- {{{{ end }}}}### Response:
327
- {{{{ .Response }}}}\"\"\"
328
514
 
329
- PARAMETER stop ""
330
- PARAMETER stop ""
331
- PARAMETER stop ""
332
- PARAMETER stop ""
333
- PARAMETER stop "<|reserved_special_token_"
334
- """
335
515
 
336
516
  def create_and_push_ollama_model(self):
337
517
  modelfile_content = self.prepare_modelfile_content()
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "PraisonAI"
3
- version = "2.0.66"
3
+ version = "2.0.68"
4
4
  description = "PraisonAI is an AI Agents Framework with Self Reflection. PraisonAI application combines PraisonAI Agents, AutoGen, and CrewAI into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customisation, and efficient human-agent collaboration."
5
5
  readme = "README.md"
6
6
  license = ""
@@ -84,7 +84,7 @@ autogen = ["pyautogen>=0.2.19", "praisonai-tools>=0.0.7", "crewai"]
84
84
 
85
85
  [tool.poetry]
86
86
  name = "PraisonAI"
87
- version = "2.0.66"
87
+ version = "2.0.68"
88
88
  description = "PraisonAI is an AI Agents Framework with Self Reflection. PraisonAI application combines PraisonAI Agents, AutoGen, and CrewAI into a low-code solution for building and managing multi-agent LLM systems, focusing on simplicity, customisation, and efficient human–agent collaboration."
89
89
  authors = ["Mervin Praison"]
90
90
  license = ""
File without changes
File without changes
File without changes
File without changes
File without changes