edsl 0.1.36.dev5__py3-none-any.whl → 0.1.37__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. edsl/__init__.py +1 -0
  2. edsl/__version__.py +1 -1
  3. edsl/agents/Agent.py +92 -41
  4. edsl/agents/AgentList.py +15 -2
  5. edsl/agents/InvigilatorBase.py +15 -25
  6. edsl/agents/PromptConstructor.py +149 -108
  7. edsl/agents/descriptors.py +17 -4
  8. edsl/conjure/AgentConstructionMixin.py +11 -3
  9. edsl/conversation/Conversation.py +66 -14
  10. edsl/conversation/chips.py +95 -0
  11. edsl/coop/coop.py +148 -39
  12. edsl/data/Cache.py +1 -1
  13. edsl/data/RemoteCacheSync.py +25 -12
  14. edsl/exceptions/BaseException.py +21 -0
  15. edsl/exceptions/__init__.py +7 -3
  16. edsl/exceptions/agents.py +17 -19
  17. edsl/exceptions/results.py +11 -8
  18. edsl/exceptions/scenarios.py +22 -0
  19. edsl/exceptions/surveys.py +13 -10
  20. edsl/inference_services/AwsBedrock.py +7 -2
  21. edsl/inference_services/InferenceServicesCollection.py +42 -13
  22. edsl/inference_services/models_available_cache.py +25 -1
  23. edsl/jobs/Jobs.py +306 -71
  24. edsl/jobs/interviews/Interview.py +24 -14
  25. edsl/jobs/interviews/InterviewExceptionCollection.py +1 -1
  26. edsl/jobs/interviews/InterviewExceptionEntry.py +17 -13
  27. edsl/jobs/interviews/ReportErrors.py +2 -2
  28. edsl/jobs/runners/JobsRunnerAsyncio.py +10 -9
  29. edsl/jobs/tasks/TaskHistory.py +1 -0
  30. edsl/language_models/KeyLookup.py +30 -0
  31. edsl/language_models/LanguageModel.py +47 -59
  32. edsl/language_models/__init__.py +1 -0
  33. edsl/prompts/Prompt.py +11 -12
  34. edsl/questions/QuestionBase.py +53 -13
  35. edsl/questions/QuestionBasePromptsMixin.py +1 -33
  36. edsl/questions/QuestionFreeText.py +1 -0
  37. edsl/questions/QuestionFunctional.py +2 -2
  38. edsl/questions/descriptors.py +23 -28
  39. edsl/results/DatasetExportMixin.py +25 -1
  40. edsl/results/Result.py +27 -10
  41. edsl/results/Results.py +34 -121
  42. edsl/results/ResultsDBMixin.py +1 -1
  43. edsl/results/Selector.py +18 -1
  44. edsl/scenarios/FileStore.py +20 -5
  45. edsl/scenarios/Scenario.py +52 -13
  46. edsl/scenarios/ScenarioHtmlMixin.py +7 -2
  47. edsl/scenarios/ScenarioList.py +12 -1
  48. edsl/scenarios/__init__.py +2 -0
  49. edsl/surveys/Rule.py +10 -4
  50. edsl/surveys/Survey.py +100 -77
  51. edsl/utilities/utilities.py +18 -0
  52. {edsl-0.1.36.dev5.dist-info → edsl-0.1.37.dist-info}/METADATA +1 -1
  53. {edsl-0.1.36.dev5.dist-info → edsl-0.1.37.dist-info}/RECORD +55 -51
  54. {edsl-0.1.36.dev5.dist-info → edsl-0.1.37.dist-info}/LICENSE +0 -0
  55. {edsl-0.1.36.dev5.dist-info → edsl-0.1.37.dist-info}/WHEEL +0 -0
@@ -16,25 +16,48 @@ class InferenceServicesCollection:
16
16
 
17
17
  @staticmethod
18
18
  def _get_service_available(service, warn: bool = False) -> list[str]:
19
- from_api = True
20
19
  try:
21
20
  service_models = service.available()
22
- except Exception as e:
21
+ except Exception:
23
22
  if warn:
24
23
  warnings.warn(
25
24
  f"""Error getting models for {service._inference_service_}.
26
25
  Check that you have properly stored your Expected Parrot API key and activated remote inference, or stored your own API keys for the language models that you want to use.
27
26
  See https://docs.expectedparrot.com/en/latest/api_keys.html for instructions on storing API keys.
28
- Relying on cache.""",
27
+ Relying on Coop.""",
29
28
  UserWarning,
30
29
  )
31
- from edsl.inference_services.models_available_cache import models_available
32
30
 
33
- service_models = models_available.get(service._inference_service_, [])
34
- # cache results
35
- service._models_list_cache = service_models
36
- from_api = False
37
- return service_models # , from_api
31
+ # Use the list of models on Coop as a fallback
32
+ try:
33
+ from edsl import Coop
34
+
35
+ c = Coop()
36
+ models_from_coop = c.fetch_models()
37
+ service_models = models_from_coop.get(service._inference_service_, [])
38
+
39
+ # cache results
40
+ service._models_list_cache = service_models
41
+
42
+ # Finally, use the available models cache from the Python file
43
+ except Exception:
44
+ if warn:
45
+ warnings.warn(
46
+ f"""Error getting models for {service._inference_service_}.
47
+ Relying on EDSL cache.""",
48
+ UserWarning,
49
+ )
50
+
51
+ from edsl.inference_services.models_available_cache import (
52
+ models_available,
53
+ )
54
+
55
+ service_models = models_available.get(service._inference_service_, [])
56
+
57
+ # cache results
58
+ service._models_list_cache = service_models
59
+
60
+ return service_models
38
61
 
39
62
  def available(self):
40
63
  total_models = []
@@ -56,13 +79,19 @@ class InferenceServicesCollection:
56
79
  self.services.append(service)
57
80
 
58
81
  def create_model_factory(self, model_name: str, service_name=None, index=None):
82
+ from edsl.inference_services.TestService import TestService
83
+
84
+ if model_name == "test":
85
+ return TestService.create_model(model_name)
86
+
87
+ if service_name:
88
+ for service in self.services:
89
+ if service_name == service._inference_service_:
90
+ return service.create_model(model_name)
91
+
59
92
  for service in self.services:
60
93
  if model_name in self._get_service_available(service):
61
94
  if service_name is None or service_name == service._inference_service_:
62
95
  return service.create_model(model_name)
63
96
 
64
- # if model_name == "test":
65
- # from edsl.language_models import LanguageModel
66
- # return LanguageModel(test = True)
67
-
68
97
  raise Exception(f"Model {model_name} not found in any of the services")
@@ -65,7 +65,31 @@ models_available = {
65
65
  "meta-llama/Meta-Llama-3-70B-Instruct",
66
66
  "openchat/openchat_3.5",
67
67
  ],
68
- "google": ["gemini-pro"],
68
+ "google": [
69
+ "gemini-1.0-pro",
70
+ "gemini-1.0-pro-001",
71
+ "gemini-1.0-pro-latest",
72
+ "gemini-1.0-pro-vision-latest",
73
+ "gemini-1.5-flash",
74
+ "gemini-1.5-flash-001",
75
+ "gemini-1.5-flash-001-tuning",
76
+ "gemini-1.5-flash-002",
77
+ "gemini-1.5-flash-8b",
78
+ "gemini-1.5-flash-8b-001",
79
+ "gemini-1.5-flash-8b-exp-0827",
80
+ "gemini-1.5-flash-8b-exp-0924",
81
+ "gemini-1.5-flash-8b-latest",
82
+ "gemini-1.5-flash-exp-0827",
83
+ "gemini-1.5-flash-latest",
84
+ "gemini-1.5-pro",
85
+ "gemini-1.5-pro-001",
86
+ "gemini-1.5-pro-002",
87
+ "gemini-1.5-pro-exp-0801",
88
+ "gemini-1.5-pro-exp-0827",
89
+ "gemini-1.5-pro-latest",
90
+ "gemini-pro",
91
+ "gemini-pro-vision",
92
+ ],
69
93
  "bedrock": [
70
94
  "amazon.titan-tg1-large",
71
95
  "amazon.titan-text-lite-v1",