codemie-test-harness 0.1.136__py3-none-any.whl → 0.1.137__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of codemie-test-harness might be problematic. Click here for more details.
- codemie_test_harness/tests/__init__.py +1 -0
- codemie_test_harness/tests/conftest.py +17 -0
- codemie_test_harness/tests/service/test_assistant_service.py +349 -379
- codemie_test_harness/tests/service/test_datasource_service.py +276 -292
- codemie_test_harness/tests/service/test_integration_service.py +133 -122
- codemie_test_harness/tests/service/test_llm_service.py +16 -17
- codemie_test_harness/tests/service/test_task_service.py +108 -120
- codemie_test_harness/tests/service/test_user_service.py +36 -19
- codemie_test_harness/tests/service/test_workflow_execution_service.py +142 -169
- codemie_test_harness/tests/service/test_workflow_service.py +145 -144
- codemie_test_harness/tests/utils/assistant_utils.py +39 -4
- codemie_test_harness/tests/utils/llm_utils.py +9 -0
- codemie_test_harness/tests/utils/search_utils.py +11 -5
- codemie_test_harness/tests/utils/user_utils.py +9 -0
- codemie_test_harness/tests/utils/workflow_utils.py +34 -6
- {codemie_test_harness-0.1.136.dist-info → codemie_test_harness-0.1.137.dist-info}/METADATA +2 -2
- {codemie_test_harness-0.1.136.dist-info → codemie_test_harness-0.1.137.dist-info}/RECORD +19 -17
- {codemie_test_harness-0.1.136.dist-info → codemie_test_harness-0.1.137.dist-info}/WHEEL +0 -0
- {codemie_test_harness-0.1.136.dist-info → codemie_test_harness-0.1.137.dist-info}/entry_points.txt +0 -0
|
@@ -1,23 +1,21 @@
|
|
|
1
|
-
"""Integration tests for LLMService."""
|
|
2
|
-
|
|
3
1
|
import os
|
|
4
|
-
from time import sleep
|
|
5
|
-
|
|
6
2
|
import pytest
|
|
3
|
+
from hamcrest import (
|
|
4
|
+
assert_that,
|
|
5
|
+
is_not,
|
|
6
|
+
instance_of,
|
|
7
|
+
has_property,
|
|
8
|
+
has_length,
|
|
9
|
+
greater_than,
|
|
10
|
+
is_,
|
|
11
|
+
is_in,
|
|
12
|
+
all_of,
|
|
13
|
+
equal_to,
|
|
14
|
+
)
|
|
7
15
|
|
|
8
|
-
from codemie_sdk import CodeMieClient
|
|
9
16
|
from codemie_sdk.models.datasource import (
|
|
10
17
|
DataSourceType,
|
|
11
18
|
DataSourceStatus,
|
|
12
|
-
CodeDataSourceRequest,
|
|
13
|
-
CodeDataSourceType,
|
|
14
|
-
UpdateCodeDataSourceRequest,
|
|
15
|
-
ConfluenceDataSourceRequest,
|
|
16
|
-
UpdateConfluenceDataSourceRequest,
|
|
17
|
-
UpdateJiraDataSourceRequest,
|
|
18
|
-
JiraDataSourceRequest,
|
|
19
|
-
GoogleDataSourceRequest,
|
|
20
|
-
UpdateGoogleDataSourceRequest,
|
|
21
19
|
Jira,
|
|
22
20
|
Confluence,
|
|
23
21
|
Code,
|
|
@@ -25,24 +23,12 @@ from codemie_sdk.models.datasource import (
|
|
|
25
23
|
from codemie_sdk.models.integration import (
|
|
26
24
|
CredentialTypes,
|
|
27
25
|
CredentialValues,
|
|
28
|
-
Integration,
|
|
29
26
|
IntegrationType,
|
|
30
27
|
)
|
|
31
28
|
from codemie_test_harness.tests import PROJECT
|
|
32
29
|
from codemie_test_harness.tests.utils.base_utils import get_random_name
|
|
33
30
|
|
|
34
31
|
|
|
35
|
-
@pytest.fixture
|
|
36
|
-
def project_name():
|
|
37
|
-
"""Return project name for tests."""
|
|
38
|
-
return PROJECT
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
@pytest.fixture
|
|
42
|
-
def datasource_name():
|
|
43
|
-
return get_random_name()
|
|
44
|
-
|
|
45
|
-
|
|
46
32
|
@pytest.fixture
|
|
47
33
|
def integration_config():
|
|
48
34
|
"""Configuration for different integration types."""
|
|
@@ -68,72 +54,33 @@ def integration_config():
|
|
|
68
54
|
class TestDatasourceBase:
|
|
69
55
|
"""Base class for datasource tests with common utility methods."""
|
|
70
56
|
|
|
71
|
-
@staticmethod
|
|
72
|
-
def create_integration(
|
|
73
|
-
client: CodeMieClient,
|
|
74
|
-
project_name: str,
|
|
75
|
-
cred_type: CredentialTypes,
|
|
76
|
-
config: dict,
|
|
77
|
-
) -> Integration:
|
|
78
|
-
"""Create integration with given credentials."""
|
|
79
|
-
integration_alias = get_random_name()
|
|
80
|
-
credential_values = [
|
|
81
|
-
CredentialValues(key=k, value=v) for k, v in config[cred_type].items()
|
|
82
|
-
]
|
|
83
|
-
|
|
84
|
-
integration_request = Integration(
|
|
85
|
-
project_name=project_name,
|
|
86
|
-
credential_type=cred_type,
|
|
87
|
-
credential_values=credential_values,
|
|
88
|
-
alias=integration_alias,
|
|
89
|
-
setting_type=IntegrationType.USER,
|
|
90
|
-
)
|
|
91
|
-
|
|
92
|
-
client.integrations.create(integration_request)
|
|
93
|
-
sleep(5)
|
|
94
|
-
return client.integrations.get_by_alias(integration_alias)
|
|
95
|
-
|
|
96
|
-
@staticmethod
|
|
97
|
-
def cleanup_datasource(client: CodeMieClient, datasource_id: str):
|
|
98
|
-
"""Clean up datasource and verify deletion."""
|
|
99
|
-
try:
|
|
100
|
-
client.datasources.delete(datasource_id)
|
|
101
|
-
sleep(5)
|
|
102
|
-
with pytest.raises(Exception) as exc_info:
|
|
103
|
-
client.datasources.get(datasource_id)
|
|
104
|
-
assert "503" in str(exc_info.value)
|
|
105
|
-
except Exception as e:
|
|
106
|
-
pytest.fail(f"Failed to clean up datasource: {str(e)}")
|
|
107
|
-
|
|
108
57
|
@staticmethod
|
|
109
58
|
def verify_datasource_exists(
|
|
110
|
-
|
|
59
|
+
search_utils,
|
|
111
60
|
name: str,
|
|
112
61
|
project_name: str,
|
|
113
62
|
datasource_type: DataSourceType,
|
|
114
63
|
):
|
|
115
64
|
"""Verify datasource exists with given parameters."""
|
|
116
|
-
|
|
117
|
-
datasources = client.datasources.list(
|
|
65
|
+
datasources = search_utils.list_data_sources(
|
|
118
66
|
datasource_types=datasource_type, projects=project_name
|
|
119
67
|
)
|
|
120
68
|
datasource = next((ds for ds in datasources if ds.name == name), None)
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
69
|
+
assert_that(datasource, is_not(None))
|
|
70
|
+
assert_that(datasource.project_name, equal_to(project_name))
|
|
71
|
+
assert_that(datasource.type, equal_to(datasource_type))
|
|
124
72
|
return datasource
|
|
125
73
|
|
|
126
74
|
@staticmethod
|
|
127
75
|
def verify_datasource_updated(
|
|
128
|
-
|
|
76
|
+
datasource_utils, datasource_id: str, expected_values: dict
|
|
129
77
|
):
|
|
130
78
|
"""
|
|
131
79
|
Verify datasource was updated with expected values.
|
|
132
80
|
Handles both root level fields and nested objects (Jira, Confluence).
|
|
133
81
|
"""
|
|
134
|
-
|
|
135
|
-
updated_datasource
|
|
136
|
-
assert updated_datasource is not None
|
|
82
|
+
updated_datasource = datasource_utils.get_datasource(datasource_id)
|
|
83
|
+
assert_that(updated_datasource, is_not(None))
|
|
137
84
|
|
|
138
85
|
field_mapping = {
|
|
139
86
|
"jql": ("jira", Jira, "jql"),
|
|
@@ -148,15 +95,19 @@ class TestDatasourceBase:
|
|
|
148
95
|
nested_obj = getattr(updated_datasource, attr, None)
|
|
149
96
|
if nested_obj is not None and isinstance(nested_obj, expected_class):
|
|
150
97
|
actual_value = getattr(nested_obj, sub_attr, None)
|
|
151
|
-
|
|
152
|
-
|
|
98
|
+
assert_that(
|
|
99
|
+
actual_value,
|
|
100
|
+
equal_to(value),
|
|
101
|
+
f"Expected {key} to be {value}, got {actual_value}",
|
|
153
102
|
)
|
|
154
103
|
else:
|
|
155
104
|
pytest.fail(f"Unhandled field in verification: {key}")
|
|
156
105
|
else:
|
|
157
106
|
actual_value = getattr(updated_datasource, key, None)
|
|
158
|
-
|
|
159
|
-
|
|
107
|
+
assert_that(
|
|
108
|
+
actual_value,
|
|
109
|
+
equal_to(value),
|
|
110
|
+
f"Expected {key} to be {value}, got {actual_value}",
|
|
160
111
|
)
|
|
161
112
|
|
|
162
113
|
return updated_datasource
|
|
@@ -165,13 +116,16 @@ class TestDatasourceBase:
|
|
|
165
116
|
class TestDatasources(TestDatasourceBase):
|
|
166
117
|
"""Tests for datasource operations."""
|
|
167
118
|
|
|
168
|
-
def test_list_datasources(self,
|
|
169
|
-
"""Test successful retrieval of available datasources models."""
|
|
119
|
+
def test_list_datasources(self, search_utils):
|
|
170
120
|
datasource_types = [DataSourceType.CODE, DataSourceType.CONFLUENCE]
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
121
|
+
datasource_models = search_utils.list_data_sources(
|
|
122
|
+
datasource_types=datasource_types
|
|
123
|
+
)
|
|
124
|
+
assert_that(
|
|
125
|
+
datasource_models, all_of(instance_of(list), has_length(greater_than(0)))
|
|
126
|
+
)
|
|
127
|
+
for model in datasource_models:
|
|
128
|
+
assert_that(model.type, is_in(datasource_types))
|
|
175
129
|
|
|
176
130
|
@pytest.mark.parametrize(
|
|
177
131
|
"datasource_type",
|
|
@@ -183,12 +137,15 @@ class TestDatasources(TestDatasourceBase):
|
|
|
183
137
|
DataSourceType.GOOGLE,
|
|
184
138
|
],
|
|
185
139
|
)
|
|
186
|
-
def
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
140
|
+
def test_list_datasources_filtered_by_type(self, search_utils, datasource_type):
|
|
141
|
+
datasource_models = search_utils.list_data_sources(
|
|
142
|
+
datasource_types=datasource_type
|
|
143
|
+
)
|
|
144
|
+
assert_that(
|
|
145
|
+
datasource_models, all_of(instance_of(list), has_length(greater_than(0)))
|
|
146
|
+
)
|
|
147
|
+
for model in datasource_models:
|
|
148
|
+
assert_that(model.type, is_in(datasource_type))
|
|
192
149
|
|
|
193
150
|
@pytest.mark.parametrize(
|
|
194
151
|
"status",
|
|
@@ -198,194 +155,179 @@ class TestDatasources(TestDatasourceBase):
|
|
|
198
155
|
DataSourceStatus.FAILED,
|
|
199
156
|
],
|
|
200
157
|
)
|
|
201
|
-
def
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
158
|
+
def test_list_datasources_filtered_by_status(self, search_utils, status):
|
|
159
|
+
datasource_models = search_utils.list_data_sources(status=status)
|
|
160
|
+
assert_that(datasource_models, all_of(instance_of(list)))
|
|
161
|
+
for model in datasource_models:
|
|
162
|
+
assert_that(
|
|
163
|
+
model.status,
|
|
164
|
+
equal_to(status),
|
|
165
|
+
f"Datasource {model.name} has status {model.status}, expected {status}",
|
|
166
|
+
)
|
|
207
167
|
|
|
208
168
|
def test_create_update_code_datasource(
|
|
209
|
-
self,
|
|
169
|
+
self,
|
|
170
|
+
integration_utils,
|
|
171
|
+
llm_utils,
|
|
172
|
+
search_utils,
|
|
173
|
+
integration_config,
|
|
174
|
+
datasource_utils,
|
|
210
175
|
):
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
176
|
+
integration = integration_utils.create_integration(
|
|
177
|
+
credential_type=CredentialTypes.GIT,
|
|
178
|
+
credential_values=[
|
|
179
|
+
CredentialValues(key=k, value=v)
|
|
180
|
+
for k, v in integration_config[CredentialTypes.GIT].items()
|
|
181
|
+
],
|
|
182
|
+
project_name=PROJECT,
|
|
214
183
|
)
|
|
215
|
-
embeddings_models =
|
|
216
|
-
|
|
184
|
+
embeddings_models = llm_utils.list_embedding_llm_models()
|
|
185
|
+
assert_that(embeddings_models, has_length(greater_than(0)))
|
|
217
186
|
embeddings_model = embeddings_models[0]
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
datasource = self.verify_datasource_exists(
|
|
234
|
-
client, datasource_name, project_name, DataSourceType.CODE
|
|
235
|
-
)
|
|
187
|
+
create_request_params = {
|
|
188
|
+
"name": integration.alias,
|
|
189
|
+
"project_name": PROJECT,
|
|
190
|
+
"description": "Code datasource description",
|
|
191
|
+
"link": os.getenv("GITLAB_PROJECT"),
|
|
192
|
+
"branch": "main",
|
|
193
|
+
"index_type": DataSourceType.CODE,
|
|
194
|
+
"embeddings_model": embeddings_model.base_name,
|
|
195
|
+
"setting_id": integration.id,
|
|
196
|
+
}
|
|
197
|
+
created = datasource_utils.create_code_datasource(**create_request_params)
|
|
198
|
+
assert_that(created, is_not(None))
|
|
199
|
+
datasource = self.verify_datasource_exists(
|
|
200
|
+
search_utils, integration.alias, PROJECT, DataSourceType.CODE
|
|
201
|
+
)
|
|
236
202
|
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
203
|
+
update_request_params = {
|
|
204
|
+
"link": os.getenv("GITHUB_PROJECT"),
|
|
205
|
+
"branch": "master",
|
|
206
|
+
"name": integration.alias,
|
|
207
|
+
"project_name": PROJECT,
|
|
208
|
+
"description": "Updated datasource description",
|
|
209
|
+
}
|
|
244
210
|
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
self.verify_datasource_updated(client, datasource.id, update_request_params)
|
|
253
|
-
finally:
|
|
254
|
-
# Cleanup
|
|
255
|
-
if datasource:
|
|
256
|
-
self.cleanup_datasource(client, datasource.id)
|
|
257
|
-
if integration:
|
|
258
|
-
client.integrations.delete(integration.id)
|
|
211
|
+
updated = datasource_utils.update_code_datasource(
|
|
212
|
+
datasource.id, **update_request_params
|
|
213
|
+
)
|
|
214
|
+
assert_that(updated, is_not(None))
|
|
215
|
+
self.verify_datasource_updated(
|
|
216
|
+
datasource_utils, datasource.id, update_request_params
|
|
217
|
+
)
|
|
259
218
|
|
|
260
219
|
def test_create_update_confluence_datasource(
|
|
261
|
-
self,
|
|
220
|
+
self,
|
|
221
|
+
search_utils,
|
|
222
|
+
integration_utils,
|
|
223
|
+
integration_config,
|
|
224
|
+
datasource_utils,
|
|
262
225
|
):
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
226
|
+
integration = integration_utils.create_user_integration(
|
|
227
|
+
credential_type=CredentialTypes.CONFLUENCE,
|
|
228
|
+
credentials=[
|
|
229
|
+
CredentialValues(key=k, value=v)
|
|
230
|
+
for k, v in integration_config[CredentialTypes.CONFLUENCE].items()
|
|
231
|
+
],
|
|
232
|
+
project_name=PROJECT,
|
|
233
|
+
)
|
|
234
|
+
create_request_params = {
|
|
235
|
+
"name": integration.alias,
|
|
236
|
+
"project_name": PROJECT,
|
|
237
|
+
"description": "Datasource for KB space",
|
|
238
|
+
"cql": os.getenv("CQL"),
|
|
239
|
+
"setting_id": integration.id,
|
|
240
|
+
}
|
|
241
|
+
created = datasource_utils.create_confluence_datasource(**create_request_params)
|
|
242
|
+
assert_that(created, is_not(None))
|
|
243
|
+
datasource = self.verify_datasource_exists(
|
|
244
|
+
search_utils, integration.alias, PROJECT, DataSourceType.CONFLUENCE
|
|
266
245
|
)
|
|
267
|
-
datasource = None
|
|
268
|
-
try:
|
|
269
|
-
create_request_params = {
|
|
270
|
-
"name": datasource_name,
|
|
271
|
-
"project_name": project_name,
|
|
272
|
-
"description": "Datasource for KB space",
|
|
273
|
-
"cql": os.getenv("CQL"),
|
|
274
|
-
"setting_id": integration.id,
|
|
275
|
-
}
|
|
276
|
-
create_datasource_request = ConfluenceDataSourceRequest(
|
|
277
|
-
**create_request_params
|
|
278
|
-
)
|
|
279
|
-
created = client.datasources.create(create_datasource_request)
|
|
280
|
-
assert created is not None
|
|
281
|
-
datasource = self.verify_datasource_exists(
|
|
282
|
-
client, datasource_name, project_name, DataSourceType.CONFLUENCE
|
|
283
|
-
)
|
|
284
246
|
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
}
|
|
247
|
+
update_request_params = {
|
|
248
|
+
"name": integration.alias,
|
|
249
|
+
"description": "Updated datasource description for KB space",
|
|
250
|
+
"cql": "SPACE = MY_KB",
|
|
251
|
+
}
|
|
291
252
|
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
self.verify_datasource_updated(client, datasource.id, update_request_params)
|
|
300
|
-
finally:
|
|
301
|
-
# Cleanup
|
|
302
|
-
if datasource:
|
|
303
|
-
self.cleanup_datasource(client, datasource.id)
|
|
304
|
-
if integration:
|
|
305
|
-
client.integrations.delete(integration.id)
|
|
253
|
+
updated = datasource_utils.update_confluence_datasource(
|
|
254
|
+
datasource.id, **update_request_params
|
|
255
|
+
)
|
|
256
|
+
assert_that(updated, is_not(None))
|
|
257
|
+
self.verify_datasource_updated(
|
|
258
|
+
datasource_utils, datasource.id, update_request_params
|
|
259
|
+
)
|
|
306
260
|
|
|
307
261
|
def test_create_update_jira_datasource(
|
|
308
|
-
self,
|
|
262
|
+
self,
|
|
263
|
+
search_utils,
|
|
264
|
+
integration_utils,
|
|
265
|
+
integration_config,
|
|
266
|
+
datasource_utils,
|
|
309
267
|
):
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
268
|
+
integration = integration_utils.create_integration(
|
|
269
|
+
credential_type=CredentialTypes.JIRA,
|
|
270
|
+
credential_values=[
|
|
271
|
+
CredentialValues(key=k, value=v)
|
|
272
|
+
for k, v in integration_config[CredentialTypes.JIRA].items()
|
|
273
|
+
],
|
|
274
|
+
setting_type=IntegrationType.USER,
|
|
275
|
+
project_name=PROJECT,
|
|
276
|
+
)
|
|
277
|
+
create_request_params = {
|
|
278
|
+
"name": integration.alias,
|
|
279
|
+
"project_name": PROJECT,
|
|
280
|
+
"description": "Jira datasource description",
|
|
281
|
+
"jql": os.getenv("JQL"),
|
|
282
|
+
"setting_id": integration.id,
|
|
283
|
+
}
|
|
284
|
+
created = datasource_utils.create_jira_datasource(**create_request_params)
|
|
285
|
+
assert_that(created, is_not(None))
|
|
286
|
+
datasource = self.verify_datasource_exists(
|
|
287
|
+
search_utils, integration.alias, PROJECT, DataSourceType.JIRA
|
|
313
288
|
)
|
|
314
|
-
datasource = None
|
|
315
|
-
try:
|
|
316
|
-
create_request_params = {
|
|
317
|
-
"name": datasource_name,
|
|
318
|
-
"project_name": project_name,
|
|
319
|
-
"description": "Jira datasource description",
|
|
320
|
-
"jql": os.getenv("JQL"),
|
|
321
|
-
"setting_id": integration.id,
|
|
322
|
-
}
|
|
323
|
-
create_datasource_request = JiraDataSourceRequest(**create_request_params)
|
|
324
|
-
created = client.datasources.create(create_datasource_request)
|
|
325
|
-
assert created is not None
|
|
326
|
-
datasource = self.verify_datasource_exists(
|
|
327
|
-
client, datasource_name, project_name, DataSourceType.JIRA
|
|
328
|
-
)
|
|
329
289
|
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
290
|
+
update_request_params = {
|
|
291
|
+
"name": integration.alias,
|
|
292
|
+
"project_name": PROJECT,
|
|
293
|
+
"description": "Updated Jira datasource description",
|
|
294
|
+
"jql": os.getenv("JQL"),
|
|
295
|
+
}
|
|
336
296
|
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
self.verify_datasource_updated(client, datasource.id, update_request_params)
|
|
345
|
-
finally:
|
|
346
|
-
# Cleanup
|
|
347
|
-
if datasource:
|
|
348
|
-
self.cleanup_datasource(client, datasource.id)
|
|
349
|
-
if integration:
|
|
350
|
-
client.integrations.delete(integration.id)
|
|
297
|
+
updated = datasource_utils.update_jira_datasource(
|
|
298
|
+
datasource.id, **update_request_params
|
|
299
|
+
)
|
|
300
|
+
assert_that(updated, is_not(None))
|
|
301
|
+
self.verify_datasource_updated(
|
|
302
|
+
datasource_utils, datasource.id, update_request_params
|
|
303
|
+
)
|
|
351
304
|
|
|
352
|
-
def test_create_update_google_datasource(
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
create_datasource_request = GoogleDataSourceRequest(**create_request_params)
|
|
365
|
-
created = client.datasources.create(create_datasource_request)
|
|
366
|
-
assert created is not None
|
|
367
|
-
datasource = self.verify_datasource_exists(
|
|
368
|
-
client, datasource_name, project_name, DataSourceType.GOOGLE
|
|
369
|
-
)
|
|
305
|
+
def test_create_update_google_datasource(self, search_utils, datasource_utils):
|
|
306
|
+
create_request_params = {
|
|
307
|
+
"name": get_random_name(),
|
|
308
|
+
"project_name": PROJECT,
|
|
309
|
+
"description": "Google datasource description",
|
|
310
|
+
"google_doc": "https://docs.google.com/document/d/19EXgnFCgJontz0ToCAH6zMGwBTdhi5X97P9JIby4wHs/edit?tab=t.0",
|
|
311
|
+
}
|
|
312
|
+
created = datasource_utils.create_google_doc_datasource(**create_request_params)
|
|
313
|
+
assert_that(created, is_not(None))
|
|
314
|
+
datasource = self.verify_datasource_exists(
|
|
315
|
+
search_utils, created.name, PROJECT, DataSourceType.GOOGLE
|
|
316
|
+
)
|
|
370
317
|
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
318
|
+
update_request_params = {
|
|
319
|
+
"name": created.name,
|
|
320
|
+
"project_name": PROJECT,
|
|
321
|
+
"description": "Updated Google datasource description",
|
|
322
|
+
}
|
|
376
323
|
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
self.verify_datasource_updated(client, datasource.id, update_request_params)
|
|
385
|
-
finally:
|
|
386
|
-
# Cleanup
|
|
387
|
-
if datasource:
|
|
388
|
-
self.cleanup_datasource(client, datasource.id)
|
|
324
|
+
updated = datasource_utils.update_google_doc_datasource(
|
|
325
|
+
datasource.id, **update_request_params
|
|
326
|
+
)
|
|
327
|
+
assert_that(updated, is_not(None))
|
|
328
|
+
self.verify_datasource_updated(
|
|
329
|
+
datasource_utils, datasource.id, update_request_params
|
|
330
|
+
)
|
|
389
331
|
|
|
390
332
|
@pytest.mark.parametrize(
|
|
391
333
|
"datasource_type",
|
|
@@ -397,54 +339,96 @@ class TestDatasources(TestDatasourceBase):
|
|
|
397
339
|
DataSourceType.GOOGLE,
|
|
398
340
|
],
|
|
399
341
|
)
|
|
400
|
-
def test_get_datasource_by_id(
|
|
401
|
-
|
|
402
|
-
|
|
342
|
+
def test_get_datasource_by_id(
|
|
343
|
+
self, datasource_utils, datasource_type, search_utils
|
|
344
|
+
):
|
|
345
|
+
datasources = search_utils.list_data_sources(
|
|
403
346
|
datasource_types=datasource_type, per_page=50
|
|
404
347
|
)
|
|
405
|
-
|
|
406
|
-
assert len(datasources) > 0
|
|
348
|
+
assert_that(datasources, all_of(instance_of(list), has_length(greater_than(0))))
|
|
407
349
|
|
|
408
350
|
original_datasource = datasources[0]
|
|
409
351
|
datasource_id = original_datasource.id
|
|
410
|
-
retrieved_datasource =
|
|
352
|
+
retrieved_datasource = datasource_utils.get_datasource(datasource_id)
|
|
411
353
|
|
|
412
354
|
# Compare full objects (they should be identical)
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
355
|
+
assert_that(
|
|
356
|
+
retrieved_datasource,
|
|
357
|
+
all_of(
|
|
358
|
+
is_not(None),
|
|
359
|
+
),
|
|
360
|
+
)
|
|
361
|
+
assert_that(retrieved_datasource.id, equal_to(original_datasource.id))
|
|
362
|
+
assert_that(retrieved_datasource.name, equal_to(original_datasource.name))
|
|
363
|
+
assert_that(
|
|
364
|
+
retrieved_datasource.project_name,
|
|
365
|
+
equal_to(original_datasource.project_name),
|
|
366
|
+
)
|
|
367
|
+
assert_that(
|
|
368
|
+
retrieved_datasource.created_by,
|
|
369
|
+
equal_to(original_datasource.created_by),
|
|
420
370
|
)
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
371
|
+
assert_that(
|
|
372
|
+
retrieved_datasource.shared_with_project,
|
|
373
|
+
equal_to(original_datasource.shared_with_project),
|
|
374
|
+
)
|
|
375
|
+
assert_that(
|
|
376
|
+
retrieved_datasource.created_date,
|
|
377
|
+
equal_to(original_datasource.created_date),
|
|
378
|
+
)
|
|
379
|
+
assert_that(
|
|
380
|
+
retrieved_datasource.error_message,
|
|
381
|
+
equal_to(original_datasource.error_message),
|
|
382
|
+
)
|
|
383
|
+
assert_that(
|
|
384
|
+
retrieved_datasource.processing_info.processed_documents_count,
|
|
385
|
+
is_not(None),
|
|
426
386
|
)
|
|
427
387
|
|
|
428
388
|
if datasource_type == DataSourceType.CODE:
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
389
|
+
assert_that(original_datasource.description, is_(None))
|
|
390
|
+
assert_that(
|
|
391
|
+
retrieved_datasource,
|
|
392
|
+
all_of(
|
|
393
|
+
has_property("confluence", None),
|
|
394
|
+
has_property("jira", None),
|
|
395
|
+
has_property("tokens_usage", is_not(None)),
|
|
396
|
+
has_property("code"),
|
|
397
|
+
),
|
|
398
|
+
)
|
|
399
|
+
assert_that(
|
|
400
|
+
retrieved_datasource.code.link,
|
|
401
|
+
equal_to(original_datasource.code.link),
|
|
402
|
+
)
|
|
403
|
+
assert_that(retrieved_datasource.code.branch, is_not(None))
|
|
435
404
|
elif datasource_type == DataSourceType.CONFLUENCE:
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
405
|
+
assert_that(
|
|
406
|
+
retrieved_datasource,
|
|
407
|
+
all_of(
|
|
408
|
+
has_property("code", None),
|
|
409
|
+
has_property("jira", None),
|
|
410
|
+
has_property("tokens_usage", is_not(None)),
|
|
411
|
+
has_property("confluence", is_not(None)),
|
|
412
|
+
),
|
|
413
|
+
)
|
|
414
|
+
assert_that(retrieved_datasource.confluence.cql, is_not(None))
|
|
441
415
|
elif datasource_type == DataSourceType.JIRA:
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
416
|
+
assert_that(
|
|
417
|
+
retrieved_datasource,
|
|
418
|
+
all_of(
|
|
419
|
+
has_property("code", None),
|
|
420
|
+
has_property("confluence", None),
|
|
421
|
+
has_property("jira", is_not(None)),
|
|
422
|
+
),
|
|
423
|
+
)
|
|
424
|
+
assert_that(retrieved_datasource.jira.jql, is_not(None))
|
|
446
425
|
elif datasource_type == DataSourceType.GOOGLE:
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
426
|
+
assert_that(
|
|
427
|
+
retrieved_datasource,
|
|
428
|
+
all_of(
|
|
429
|
+
has_property("code", None),
|
|
430
|
+
has_property("confluence", None),
|
|
431
|
+
has_property("jira", None),
|
|
432
|
+
has_property("google_doc_link", is_not(None)),
|
|
433
|
+
),
|
|
434
|
+
)
|