sopel-ai 1.0.14__py3-none-any.whl → 1.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
sopel_ai/__init__.py CHANGED
@@ -1,7 +1,10 @@
1
1
  # See: https://raw.githubcontent.com/pr3d4t0r/sopel_ai/master/LICENSE.txt
2
2
 
3
3
 
4
- __VERSION__ = '1.0.14'
4
+ import importlib.metadata
5
+
6
+
7
+ __VERSION__ = importlib.metadata.version('sopel-ai')
5
8
  """
6
9
  @public
7
10
  """
sopel_ai/config.py CHANGED
@@ -1,6 +1,7 @@
1
1
  # See: https://raw.githubcontent.com/pr3d4t0r/sopel_ai/master/LICENSE.txt
2
2
 
3
3
  from sopel import config
4
+ from sopel_ai.core import DEFAULT_API_KEY
4
5
  from sopel_ai.core import DEFAULT_LLM
5
6
  from sopel_ai.core import DEFAULT_LLM_PROVIDER
6
7
  from sopel_ai.core import DEFAULT_LLM_SERVICE
@@ -9,6 +10,7 @@ from sopel_ai.core import DEFAULT_LOG_LEVEL
9
10
 
10
11
  class SopelAISection(config.types.StaticSection):
11
12
  llm_engine = config.types.ValidatedAttribute('llm_engine', str, default = DEFAULT_LLM)
13
+ llm_key = config.types.ValidatedAttribute('llm_key', str, default = DEFAULT_API_KEY, is_secret = True)
12
14
  llm_provider = config.types.ValidatedAttribute('llm_provider', str, default = DEFAULT_LLM_PROVIDER)
13
15
  llm_service = config.types.ValidatedAttribute('llm_service', str, default = DEFAULT_LLM_SERVICE)
14
16
  logLevel = config.types.ValidatedAttribute('logLevel', str, default = DEFAULT_LOG_LEVEL)
sopel_ai/core.py CHANGED
@@ -1,11 +1,10 @@
1
1
  # See: https://raw.githubcontent.com/pr3d4t0r/sopel_ai/master/LICENSE.txt
2
2
 
3
- from perplexipy import PERPLEXITY_API_KEY
4
3
  from perplexipy import PERPLEXITY_API_URL
5
4
  from perplexipy import PERPLEXITY_DEFAULT_MODEL
6
5
  from perplexipy import PerplexityClient
7
6
  from sopel_ai import __VERSION__
8
- from sopel_ai.errors import M0tokoError
7
+ from sopel_ai.errors import SopelAIError
9
8
  from tinydb import Query
10
9
  from tinydb import TinyDB
11
10
 
@@ -14,6 +13,7 @@ from tinydb import TinyDB
14
13
 
15
14
  # +++ constants +++
16
15
 
16
+ DEFAULT_API_KEY = 'pplx-3a45enterthekeyhere'
17
17
  DEFAULT_LLM = PERPLEXITY_DEFAULT_MODEL
18
18
  DEFAULT_LLM_PROVIDER = 'PerplexityAI'
19
19
  DEFAULT_LLM_SERVICE = PERPLEXITY_API_URL
@@ -58,15 +58,15 @@ def _checkDB(fileName: str) -> TinyDB:
58
58
  return _database
59
59
 
60
60
 
61
- def _checkClientInstance() -> None:
61
+ def _checkClientInstance(key: str) -> None:
62
62
  global _client
63
63
 
64
64
  if not _client:
65
- _client = PerplexityClient(key = PERPLEXITY_API_KEY, endpoint = PERPLEXITY_API_URL)
65
+ _client = PerplexityClient(key = key, endpoint = PERPLEXITY_API_URL)
66
66
  _client.model = PERPLEXITY_DEFAULT_MODEL
67
67
 
68
68
 
69
- def runQuery(query: str, nick: str = None, fileNameDB: str = None, responseLength: int = MAX_RESPONSE_LENGTH) -> str:
69
+ def runQuery(query: str, nick: str = None, fileNameDB: str = None, responseLength: int = MAX_RESPONSE_LENGTH, key: str = None) -> str:
70
70
  """
71
71
  Run a query against the LLM engine using the PerplexipyClient, and return the
72
72
  query result in a string.
@@ -87,25 +87,36 @@ def runQuery(query: str, nick: str = None, fileNameDB: str = None, responseLengt
87
87
  responseLength
88
88
  The maximum response length requested from the AI provider. See `MAX_RESPONSE_LENGTH`.
89
89
 
90
+ key
91
+ The LLM service provider API key.
92
+
90
93
  Returns
91
94
  -------
92
95
  A string with the response if the service found a reasonable and convenient
93
96
  one, or the text of an Error and the possible cause, as reported by the
94
97
  Python run-time.
95
98
 
99
+ Raises
100
+ ------
101
+ `SopelAIError` if the `key` is empty or if the query is invalid. The string
102
+ message in the error reflects the cause.
103
+
96
104
  ---
97
105
  """
106
+ if not key:
107
+ raise SopelAIError('key argument cannot be empty - set the LLM service API key')
108
+
98
109
  _checkDB(fileNameDB)
99
- model = getModelForUser(nick, fileNameDB)
110
+ model = getModelForUser(nick, fileNameDB, key)
100
111
  if not nick or model == DEFAULT_LLM:
101
- _checkClientInstance()
112
+ _checkClientInstance(key)
102
113
  client = _client
103
114
  else:
104
115
  client = _clientCache[nick]
105
116
 
106
117
  try:
107
118
  if not query:
108
- raise M0tokoError('query parameter cannot be empty')
119
+ raise SopelAIError('query parameter cannot be empty')
109
120
  query = 'Brief answer in %s characters or less to: "%s". Include one URL in the response and strip off all Markdown and hashtags.' % (responseLength, query)
110
121
  result = client.query(query).replace('\n', '')
111
122
  except Exception as e:
@@ -114,7 +125,7 @@ def runQuery(query: str, nick: str = None, fileNameDB: str = None, responseLengt
114
125
  return result
115
126
 
116
127
 
117
- def modelsList() -> list:
128
+ def modelsList(key: str = None) -> list:
118
129
  """
119
130
  Returns a list of all available models so that they can be used for
120
131
  requesting a specific one in another command.
@@ -125,26 +136,37 @@ def modelsList() -> list:
125
136
  order depends on what the underlying API reports, and it's unlikely to
126
137
  change between calls.
127
138
 
128
- Other M0toko functions will use the index to refer to a model in the
139
+ Other SopelAI functions will use the index to refer to a model in the
129
140
  collection.
130
141
 
142
+ Raises
143
+ ------
144
+ `SopelAIError` if the `key` is empty or if the query is invalid. The string
145
+ message in the error reflects the cause.
146
+
131
147
  ---
132
148
  """
133
- _checkClientInstance()
149
+ if not key:
150
+ raise SopelAIError('key argument cannot be empty - set the LLM service API key')
151
+
152
+ _checkClientInstance(key)
134
153
 
135
154
  return sorted(list(_client.models.keys()))
136
155
 
137
156
 
138
- def versionInfo() -> str:
139
- _checkClientInstance()
157
+ def versionInfo(key: str = None) -> str:
158
+ if not key:
159
+ raise SopelAIError('key argument cannot be empty - set the LLM service API key')
160
+
161
+ _checkClientInstance(key)
140
162
  return 'sopel_ai v%s using %s' % (__VERSION__, '.'.join([_client.__class__.__module__, _client.__class__.__name__]))
141
163
 
142
164
 
143
- def setModelForUser(modelID: int, nick: str, fileNameDB: str) -> str:
165
+ def setModelForUser(modelID: int, nick: str, fileNameDB: str, key = None) -> str:
144
166
  """
145
167
  Set the model associated with `modelID` for processing requests from `nick`.
146
168
  The `modelID` is the index into the `models` object returned by
147
- `motoko.modelsList()`, from zero.
169
+ `sopel_ai.modelsList()`, from zero.
148
170
 
149
171
  Arguments
150
172
  ---------
@@ -158,6 +180,9 @@ def setModelForUser(modelID: int, nick: str, fileNameDB: str) -> str:
158
180
  fileNameDB
159
181
  The path to the database in the file system. Can be absolute or relative.
160
182
 
183
+ key
184
+ The LLM service provider API key.
185
+
161
186
  The function assumes that `nick` represents a valid user /nick because Sopel
162
187
  enforces that the exists and is registered in the server.
163
188
 
@@ -167,14 +192,14 @@ def setModelForUser(modelID: int, nick: str, fileNameDB: str) -> str:
167
192
 
168
193
  Raises
169
194
  ------
170
- `motoko.errors.M0tokoError` if the arguments are invalid or out of range.
195
+ `sopel_ai.errors.SopelAIError` if the arguments are invalid or out of range.
171
196
 
172
197
  ---
173
198
  """
174
199
  _checkDB(fileNameDB)
175
- models= modelsList()
200
+ models= modelsList(key)
176
201
  if modelID not in range(len(models)):
177
- raise M0tokoError('modelID outside of available models index range')
202
+ raise SopelAIError('modelID outside of available models index range')
178
203
 
179
204
  Preference = Query()
180
205
 
@@ -186,7 +211,7 @@ def setModelForUser(modelID: int, nick: str, fileNameDB: str) -> str:
186
211
  return models[modelID]
187
212
 
188
213
 
189
- def getModelForUser(nick: str, fileNameDB: str) -> str:
214
+ def getModelForUser(nick: str, fileNameDB: str, key = None) -> str:
190
215
  """
191
216
  Get the model name for the user with `nick`.
192
217
 
@@ -198,19 +223,29 @@ def getModelForUser(nick: str, fileNameDB: str) -> str:
198
223
  fileNameDB
199
224
  The path to the database in the file system. Can be absolute or relative.
200
225
 
226
+ key
227
+ The LLM service provider API key.
228
+
201
229
  Returns
202
230
  -------
203
231
  A string representing the model name, if one exists in the database
204
- associated with the user, `motoko.DEFAULT_LLM` otherwise.
232
+ associated with the user, `sopel_ai.DEFAULT_LLM` otherwise.
233
+
234
+ Raises
235
+ ------
236
+ `sopel_ai.errors.SopelAIError` if the arguments are invalid or out of range.
205
237
 
206
238
  ---
207
239
  """
240
+ if not key:
241
+ raise SopelAIError('key argument cannot be empty - set the LLM service API key')
242
+
208
243
  _checkDB(fileNameDB)
209
244
  Preference = Query()
210
245
  preference = _database.search(Preference.nick == nick)
211
246
  if preference:
212
247
  model = preference[0]['model']
213
- client = PerplexityClient(key = PERPLEXITY_API_KEY, endpoint = PERPLEXITY_API_URL)
248
+ client = PerplexityClient(key = key, endpoint = PERPLEXITY_API_URL)
214
249
  client.model = model
215
250
  _clientCache[nick] = client
216
251
  return model
sopel_ai/errors.py CHANGED
@@ -1,7 +1,7 @@
1
1
  # See: https://raw.githubusercontent.com/pr3d4t0r/sopel_ai/master/LICENSE.txt
2
2
 
3
3
 
4
- class M0tokoError(Exception):
4
+ class SopelAIError(Exception):
5
5
  def __init__(self, exceptionInfo):
6
6
  super().__init__(exceptionInfo)
7
7
 
sopel_ai/plugin.py CHANGED
@@ -1,17 +1,16 @@
1
1
  # See: https://raw.githubcontent.com/pr3d4t0r/sopel_ai/master/LICENSE.txt
2
2
  """
3
- This module is intended for interfacing with Sopel and there are no
4
- user-callable objects, functions defined in it. If in doubt, user the Force and
5
- read the Source.
3
+ Sopel AI interactive models service
6
4
  """
7
5
 
6
+ from sopel import formatting
7
+ from sopel import plugin
8
8
  from sopel.bot import Sopel
9
9
  from sopel.bot import SopelWrapper
10
10
  from sopel.config import Config
11
- from sopel import formatting
12
- from sopel import plugin
13
11
  from sopel.trigger import Trigger
14
12
  from sopel_ai.config import SopelAISection
13
+ from sopel_ai.core import DEFAULT_API_KEY
15
14
  from sopel_ai.core import DEFAULT_LLM
16
15
  from sopel_ai.core import DEFAULT_LLM_PROVIDER
17
16
  from sopel_ai.core import DEFAULT_LLM_SERVICE
@@ -26,6 +25,12 @@ from sopel_ai.core import versionInfo
26
25
 
27
26
  import os
28
27
 
28
+ """
29
+ This module is intended for interfacing with Sopel and there are no
30
+ user-callable objects, functions defined in it. If in doubt, user the Force and
31
+ read the Source.
32
+ """
33
+
29
34
 
30
35
  # +++ constants +++
31
36
 
@@ -49,6 +54,7 @@ def configure(config: Config) -> None:
49
54
  """
50
55
  config.define_section('sopel_ai', SopelAISection)
51
56
  config.sopel_ai.configure_setting('llm_engine', 'Set the LLM engine', default = DEFAULT_LLM)
57
+ config.sopel_ai.configure_setting('llm_key', 'Set the API key', default = DEFAULT_API_KEY)
52
58
  config.sopel_ai.configure_setting('llm_provider', 'Set the LLM provider name', default = DEFAULT_LLM_PROVIDER)
53
59
  config.sopel_ai.configure_setting('llm_service', 'Set the LLM service URL', default = DEFAULT_LLM_SERVICE)
54
60
  config.sopel_ai.configure_setting('logLevel', 'Set the log level', default = DEFAULT_LOG_LEVEL)
@@ -66,7 +72,7 @@ def _queryCommand(bot: SopelWrapper, trigger: Trigger) -> None:
66
72
  return
67
73
 
68
74
  # TODO: Log this
69
- bot.reply(runQuery(trigger.group(2), trigger.nick, fileNameDB = _USER_DB_FILE))
75
+ bot.reply(runQuery(trigger.group(2), trigger.nick, fileNameDB = _USER_DB_FILE, key = bot.config.sopel_ai.llm_key))
70
76
 
71
77
 
72
78
  @plugin.commands('qpm', 'llmqpm')
@@ -80,7 +86,12 @@ def _queryCommandPrivateMessage(bot: SopelWrapper, trigger: Trigger) -> None:
80
86
  bot.reply('No search term. Usage: {}qpm Some question about anything'.format(bot.config.core.help_prefix))
81
87
  return
82
88
 
83
- bot.say(runQuery(trigger.group(2), trigger.nick), trigger.nick, fileNameDB = _USER_DB_FILE, responseLength = _PRIVATE_MESSAGE_RESPONSE_LENGTH)
89
+ bot.say(runQuery(
90
+ trigger.group(2),
91
+ trigger.nick,
92
+ fileNameDB = _USER_DB_FILE,
93
+ responseLength = _PRIVATE_MESSAGE_RESPONSE_LENGTH,
94
+ key = bot.config.sopel_ai.llm_key), trigger.nick)
84
95
 
85
96
 
86
97
  @plugin.commands('mver')
@@ -89,7 +100,7 @@ def _queryCommandPrivateMessage(bot: SopelWrapper, trigger: Trigger) -> None:
89
100
  @plugin.require_account(message = 'You must be a registered to use this command.', reply = True)
90
101
  @plugin.thread(True)
91
102
  def _versionCommand(bot: SopelWrapper, trigger: Trigger) -> None:
92
- bot.reply(versionInfo())
103
+ bot.reply(versionInfo(key = bot.config.sopel_ai.llm_key))
93
104
 
94
105
 
95
106
  @plugin.commands('models')
@@ -98,7 +109,7 @@ def _versionCommand(bot: SopelWrapper, trigger: Trigger) -> None:
98
109
  @plugin.require_account(message = 'You must be a registered to use this command.', reply = True)
99
110
  @plugin.thread(True)
100
111
  def _modelsCommand(bot: SopelWrapper, trigger: Trigger) -> None:
101
- models = sorted(modelsList())
112
+ models = sorted(modelsList(key = bot.config.sopel_ai.llm_key))
102
113
  s = ''
103
114
  for index in range(len(models)):
104
115
  s += '[%d] %s; ' % (index+1, models[index])
@@ -115,13 +126,13 @@ def _setModelCommand(bot: SopelWrapper, trigger: Trigger) -> None:
115
126
  modelID = int(trigger.group(2))
116
127
  except:
117
128
  modelID = -1
118
- models = modelsList()
129
+ models = modelsList(bot.config.sopel_ai.llm_key)
119
130
  if modelID not in range(1, len(models)+1):
120
131
  message = 'Invalid model ID; must be in range %s. Usage: {}setmodel n, where n ::= integer' % ('1 - %d' % len(models))
121
132
  bot.reply(message.format(bot.config.core.help_prefix))
122
133
  else:
123
134
  effectiveModelID = modelID-1
124
- effectiveModel = setModelForUser(effectiveModelID, trigger.nick, _USER_DB_FILE)
135
+ effectiveModel = setModelForUser(effectiveModelID, trigger.nick, _USER_DB_FILE, key = bot.config.sopel_ai.llm_key)
125
136
  bot.reply('All your future interactions will use the %s model.' % effectiveModel)
126
137
 
127
138
 
@@ -131,7 +142,7 @@ def _setModelCommand(bot: SopelWrapper, trigger: Trigger) -> None:
131
142
  @plugin.require_account(message = 'You must be a registered to use this command.', reply = True)
132
143
  @plugin.thread(True)
133
144
  def _getModelCommand(bot: SopelWrapper, trigger: Trigger) -> None:
134
- bot.reply(getModelForUser(trigger.nick, _USER_DB_FILE))
145
+ bot.reply(getModelForUser(trigger.nick, _USER_DB_FILE, key = bot.config.sopel_ai.llm_key))
135
146
 
136
147
 
137
148
  @plugin.commands('mymodel')
@@ -146,12 +157,12 @@ def _myModelCommand(bot: SopelWrapper, trigger: Trigger) -> None:
146
157
  _setModelCommand(bot, trigger)
147
158
 
148
159
 
149
- @plugin.commands('bug', 'feature', 'req')
150
- @plugin.example('.bug|.feature|.req Displays the URL for opening a GitHub issues request')
160
+ @plugin.commands('reqai')
161
+ @plugin.example('.reqai Displays the URL for opening a GitHub features/bug/issues request for sopel_ai')
151
162
  @plugin.output_prefix(_PLUGIN_OUTPUT_PREFIX)
152
163
  @plugin.require_account(message = 'You must be a registered to use this command.', reply = True)
153
164
  @plugin.thread(True)
154
165
  def _reqCommand(bot: SopelWrapper, trigger: Trigger) -> None:
155
166
  locator = formatting.bold(GITHUB_NEW_ISSUE_URL)
156
- bot.reply('SopelAI version %s. Enter your bug report or feature request at this URL: %s' % (__VERSION__, locator))
167
+ bot.reply('SopelAI version %s. Enter your feature request or bug report at this URL: %s' % (__VERSION__, locator))
157
168
 
@@ -1,8 +1,9 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: sopel-ai
3
- Version: 1.0.14
3
+ Version: 1.1.1
4
4
  Summary: Sopel AI - an LLM enhanced chat bot plug-in
5
5
  Author-email: The SopelAI team <sopel_ai@cime.net>
6
+ License: BSD-3-Clause
6
7
  Project-URL: Homepage, https://github.com/pr3d4t0r/sopel_ai
7
8
  Project-URL: Bug Tracker, https://github.com/pr3d4t0r/sopel_ai/issues
8
9
  Keywords: ai,bot,irc,llm,plugin,sopel
@@ -23,7 +24,7 @@ Requires-Dist: perplexipy
23
24
  Requires-Dist: sopel (>=7.1)
24
25
  Requires-Dist: tinydb
25
26
 
26
- % sopel_ai(1) Version 1.0.14 chatbot plugin
27
+ % sopel_ai(1) Version 1.1.1 chatbot plugin
27
28
 
28
29
  Name
29
30
  ====
@@ -105,8 +106,8 @@ The bot produces a numbered list of supported models by issuing:
105
106
  `.models`
106
107
 
107
108
  Users are welcome to change the default model to one of those listed by issuing
108
- the `.mymodel` command followed by the item number for the desired model from the
109
- list:
109
+ the `.mymodel` command followed by the item number for the desired model from
110
+ the list:
110
111
 
111
112
  `.mymodel 1`
112
113
 
@@ -114,6 +115,15 @@ Users may request private instead of in-channel responses:
114
115
 
115
116
  `.qpm Quote the Three Laws of Robotics and give me examples.`
116
117
 
118
+ Responses generated by making `.q` queries are expected to be short or are
119
+ trunked at 480 characters. They are intended to appear in-channel and to be as
120
+ brief as possible.
121
+
122
+ Responses generated from a `.qpm` query are expected to be long and detailed,
123
+ with a 16 KB length limit, span multpile messages (due to ircv3 limitations),
124
+ and `sopel_ai` presents them to the user in private message, regardless of
125
+ whether they were issued from a channel or a direct message.
126
+
117
127
  Users can query the bot plugin and AI provider using:
118
128
 
119
129
  `.mver`
@@ -127,13 +137,65 @@ support other providers.
127
137
 
128
138
  API Key
129
139
  =======
130
- All AI services providers require an API key for access. This version of
131
- Sopel AI uses one environment variable and two mechanisms for resolving it:
140
+ All AI services providers require an API key for access. The API key is
141
+ configured via:
142
+
143
+ `sopel config`
144
+
145
+ Or edit this section in the Sopel configuration file:
146
+
147
+ ```ini
148
+ [sopel_ai]
149
+ .
150
+ .
151
+ llm_key = pplx-3a45enteryourkeykere
152
+ ```
153
+
154
+
155
+ Docker
156
+ ======
157
+ Sopel AI is dockerized and available from Docker Hub as pr3d4t0r/sopel_ai. The
158
+ version tag is the same as the latest version number for Sopel AI.
159
+
160
+ The examples in this section assume execution from the local file system. Adapt
161
+ as needed to run in a Kubernets cluster or other deployment method.
162
+
163
+
164
+ ### First time
132
165
 
133
- `export PERPLEXITY_API_KEY="pplx-2a45baaf"`
166
+ The Sopel + AI configuration file must be created:
167
+
168
+ ```bash
169
+ docker run -ti -v ${HOME}/sopel_ai_data:/home/sopel_ai \
170
+ pr3d4t0r/sopel_ai:latest \
171
+ sopel configure
172
+ ```
173
+
174
+ The API key and other relevant configuration data must be provided at this time.
175
+ `$HOME/sopel_ai_data` is volume mapped to the container's `/home/sopel_ai/.sopel
176
+ directory. Ensure that your host has write permissions in the shared volume.
177
+
178
+ The `pr3d4t0r/sopel_ai:latest` image is used if no version is specified. The
179
+ image update policy is left to the sysops and is not automatic.
180
+
181
+ Once `$HOME/sopel_ai_data` exists it's possible to copy the contents of a
182
+ different `~/.sopel` directory to it and use is as the configuration and Sopel
183
+ AI database store.
184
+
185
+
186
+ ### Starting Sopel AI
187
+
188
+ A Docker Compose file is provided as an example of how to start the service,
189
+ <a href='./dockerized/docker-compose.yaml' target='_blank'>docker-file.yaml</a>. With this Docker Compose
190
+ file in the current directory, start the service with:
191
+
192
+ ```bash
193
+ docker-compose up [-d] sopel_ai
194
+
195
+ ```
134
196
 
135
- Or use the `.env` file to store this and other secrets. The underlying
136
- PerplexiPy module uses `dotenv` package for secrets resolution.
197
+ The `-d` parameter daemonizes the service. Without it, the service will start
198
+ and display its output in the current console.
137
199
 
138
200
 
139
201
  License
@@ -0,0 +1,11 @@
1
+ sopel_ai/__init__.py,sha256=UXSuXZj_p860rLAiewSe5nbKg-qbIEcXq6jkqYmaMnI,2531
2
+ sopel_ai/config.py,sha256=y0vbvfeDjjfLUOvTJ3W56mAGVpJxeizrXJbCaFW4ZXk,867
3
+ sopel_ai/core.py,sha256=r5lnH8lcXenc2Ln3Qha2hoAggWUP5ZyZniZrdl9IIsQ,7240
4
+ sopel_ai/errors.py,sha256=XMVgFk4Rw64Z0UO3ZInp-N6LP0GRG8NFIuXKnhu5rLo,192
5
+ sopel_ai/plugin.py,sha256=51mYp6B_mUtgS6mL-IMhK8Qiz_Rw_qAwmTLdML-qQ2o,6800
6
+ sopel_ai-1.1.1.dist-info/LICENSE.txt,sha256=I8aHapysmbM9F3y-rUfp011GQoosNO5L8pzl7IKgPnE,1531
7
+ sopel_ai-1.1.1.dist-info/METADATA,sha256=9_KZvSmAhclhUN0B1SxFnO03tTxzJcfQrN8cVA1olYQ,6200
8
+ sopel_ai-1.1.1.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
9
+ sopel_ai-1.1.1.dist-info/entry_points.txt,sha256=7Juxcn6L4j6F83TjkviiTwiyXLM4gZxAAXFQDR2G_m4,43
10
+ sopel_ai-1.1.1.dist-info/top_level.txt,sha256=kpNMzNEGbhCXkyn7oc3uQPmrX1J6qLxn59IcZBpwSYg,9
11
+ sopel_ai-1.1.1.dist-info/RECORD,,
@@ -1,11 +0,0 @@
1
- sopel_ai/__init__.py,sha256=qH1q1GP7whFC-A28pYlcg2YIb9WGv7TFnUykJqXp6Lc,2473
2
- sopel_ai/config.py,sha256=Kj8yacVbjjC4FRz7ser16Hf4fS9_XwvXK3f5dPSpZ0I,718
3
- sopel_ai/core.py,sha256=Jy7zyzc_mbvs_9nUeetH-hlAAus087zfXW9_h3Vsv4Q,6181
4
- sopel_ai/errors.py,sha256=fgc9mjYJMnq1AxFXOwiknFwnJBJTWRNkZIHzRKS9m5g,191
5
- sopel_ai/plugin.py,sha256=tCsTsqQsuBC4orcEZCQu2d2iS09NvVPBpook0fV7w5k,6306
6
- sopel_ai-1.0.14.dist-info/LICENSE.txt,sha256=I8aHapysmbM9F3y-rUfp011GQoosNO5L8pzl7IKgPnE,1531
7
- sopel_ai-1.0.14.dist-info/METADATA,sha256=_OcL-mDwQVViDahpFK7gcI7fZMnaKuiTWUYg-f62ENo,4324
8
- sopel_ai-1.0.14.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
9
- sopel_ai-1.0.14.dist-info/entry_points.txt,sha256=7Juxcn6L4j6F83TjkviiTwiyXLM4gZxAAXFQDR2G_m4,43
10
- sopel_ai-1.0.14.dist-info/top_level.txt,sha256=kpNMzNEGbhCXkyn7oc3uQPmrX1J6qLxn59IcZBpwSYg,9
11
- sopel_ai-1.0.14.dist-info/RECORD,,