ldap-ui 0.9.15__py3-none-any.whl → 0.10.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ldap_ui/__init__.py +1 -1
- ldap_ui/app.py +63 -212
- ldap_ui/entities.py +62 -0
- ldap_ui/ldap_api.py +419 -352
- ldap_ui/ldap_helpers.py +74 -34
- ldap_ui/schema.py +8 -10
- ldap_ui/statics/assets/index-BxCLA1wZ.js +19 -0
- ldap_ui/statics/assets/index-BxCLA1wZ.js.gz +0 -0
- ldap_ui/statics/assets/{index-qocMa2qY.css → index-CusJ2HRh.css} +1 -1
- ldap_ui/statics/assets/index-CusJ2HRh.css.gz +0 -0
- ldap_ui/statics/index.html +2 -2
- {ldap_ui-0.9.15.dist-info → ldap_ui-0.10.1.dist-info}/METADATA +8 -9
- ldap_ui-0.10.1.dist-info/RECORD +25 -0
- {ldap_ui-0.9.15.dist-info → ldap_ui-0.10.1.dist-info}/WHEEL +1 -1
- ldap_ui/statics/assets/index-CZWuB-hf.js +0 -18
- ldap_ui/statics/assets/index-CZWuB-hf.js.gz +0 -0
- ldap_ui/statics/assets/index-qocMa2qY.css.gz +0 -0
- ldap_ui-0.9.15.dist-info/RECORD +0 -24
- {ldap_ui-0.9.15.dist-info → ldap_ui-0.10.1.dist-info}/entry_points.txt +0 -0
- {ldap_ui-0.9.15.dist-info → ldap_ui-0.10.1.dist-info}/licenses/LICENSE.txt +0 -0
- {ldap_ui-0.9.15.dist-info → ldap_ui-0.10.1.dist-info}/top_level.txt +0 -0
ldap_ui/ldap_api.py
CHANGED
|
@@ -1,51 +1,67 @@
|
|
|
1
1
|
"""
|
|
2
2
|
ReST endpoints for LDAP access.
|
|
3
3
|
|
|
4
|
-
Directory operations are
|
|
5
|
-
|
|
4
|
+
Directory operations are exposed to the frontend
|
|
5
|
+
by a hand-knit ReST API, responses are usually converted to JSON.
|
|
6
6
|
|
|
7
7
|
Asynchronous LDAP operations are used as much as possible.
|
|
8
8
|
"""
|
|
9
9
|
|
|
10
10
|
import base64
|
|
11
11
|
import io
|
|
12
|
+
from enum import StrEnum
|
|
12
13
|
from http import HTTPStatus
|
|
13
|
-
from typing import
|
|
14
|
+
from typing import Annotated, cast
|
|
14
15
|
|
|
15
16
|
import ldif
|
|
17
|
+
from fastapi import (
|
|
18
|
+
APIRouter,
|
|
19
|
+
Body,
|
|
20
|
+
Depends,
|
|
21
|
+
File,
|
|
22
|
+
HTTPException,
|
|
23
|
+
Response,
|
|
24
|
+
UploadFile,
|
|
25
|
+
)
|
|
26
|
+
from fastapi.responses import PlainTextResponse
|
|
27
|
+
from fastapi.security import HTTPBasic, HTTPBasicCredentials
|
|
16
28
|
from ldap import (
|
|
17
|
-
INVALID_CREDENTIALS, #
|
|
18
|
-
SCOPE_BASE, #
|
|
19
|
-
SCOPE_ONELEVEL, #
|
|
20
|
-
SCOPE_SUBTREE, #
|
|
29
|
+
INVALID_CREDENTIALS, # type: ignore
|
|
30
|
+
SCOPE_BASE, # type: ignore
|
|
31
|
+
SCOPE_ONELEVEL, # type: ignore
|
|
32
|
+
SCOPE_SUBTREE, # type: ignore
|
|
21
33
|
)
|
|
22
34
|
from ldap.ldapobject import LDAPObject
|
|
23
35
|
from ldap.modlist import addModlist, modifyModlist
|
|
24
36
|
from ldap.schema import SubSchema
|
|
25
37
|
from ldap.schema.models import AttributeType, LDAPSyntax, ObjectClass
|
|
26
|
-
from pydantic import BaseModel, Field, TypeAdapter
|
|
27
|
-
from starlette.datastructures import UploadFile
|
|
28
|
-
from starlette.exceptions import HTTPException
|
|
29
|
-
from starlette.requests import Request
|
|
30
|
-
from starlette.responses import JSONResponse, PlainTextResponse, Response
|
|
31
|
-
from starlette.routing import Router
|
|
32
38
|
|
|
33
39
|
from . import settings
|
|
40
|
+
from .entities import (
|
|
41
|
+
Attributes,
|
|
42
|
+
ChangedAttributes,
|
|
43
|
+
ChangePasswordRequest,
|
|
44
|
+
Entry,
|
|
45
|
+
Meta,
|
|
46
|
+
Range,
|
|
47
|
+
SearchResult,
|
|
48
|
+
TreeItem,
|
|
49
|
+
)
|
|
34
50
|
from .ldap_helpers import (
|
|
35
51
|
WITH_OPERATIONAL_ATTRS,
|
|
52
|
+
LdapEntry,
|
|
53
|
+
anonymous_user_search,
|
|
36
54
|
empty,
|
|
37
55
|
get_entry_by_dn,
|
|
56
|
+
get_schema,
|
|
38
57
|
ldap_connect,
|
|
39
|
-
|
|
58
|
+
results,
|
|
40
59
|
unique,
|
|
41
60
|
)
|
|
42
61
|
from .schema import ObjectClass as OC
|
|
43
|
-
from .schema import frontend_schema
|
|
44
|
-
|
|
45
|
-
__all__ = ("api",)
|
|
46
|
-
|
|
62
|
+
from .schema import Schema, frontend_schema
|
|
47
63
|
|
|
48
|
-
NO_CONTENT = Response(status_code=HTTPStatus.NO_CONTENT
|
|
64
|
+
NO_CONTENT = Response(status_code=HTTPStatus.NO_CONTENT)
|
|
49
65
|
|
|
50
66
|
# Special fields
|
|
51
67
|
PHOTOS = ("jpegPhoto", "thumbnailPhoto")
|
|
@@ -55,266 +71,379 @@ PASSWORDS = ("userPassword",)
|
|
|
55
71
|
OCTET_STRING = "1.3.6.1.4.1.1466.115.121.1.40"
|
|
56
72
|
INTEGER = "1.3.6.1.4.1.1466.115.121.1.27"
|
|
57
73
|
|
|
58
|
-
|
|
59
|
-
api = Router()
|
|
74
|
+
api = APIRouter(prefix="/api")
|
|
60
75
|
|
|
61
76
|
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
77
|
+
async def get_root_dse(connection: LDAPObject):
|
|
78
|
+
"Auto-detect base DN and LDAP schema from root DSE"
|
|
79
|
+
result = await unique(
|
|
80
|
+
connection,
|
|
81
|
+
connection.search(
|
|
82
|
+
"",
|
|
83
|
+
SCOPE_BASE,
|
|
84
|
+
attrlist=WITH_OPERATIONAL_ATTRS,
|
|
85
|
+
),
|
|
86
|
+
)
|
|
87
|
+
if not settings.BASE_DN:
|
|
88
|
+
base_dns = result.attr("namingContexts")
|
|
89
|
+
assert len(base_dns) == 1, f"No unique base DN: {base_dns}"
|
|
90
|
+
settings.BASE_DN = base_dns[0]
|
|
66
91
|
|
|
92
|
+
if not settings.SCHEMA_DN:
|
|
93
|
+
schema_dns = result.attr("subschemaSubentry")
|
|
94
|
+
assert schema_dns, "Cannot determine LDAP schema"
|
|
95
|
+
settings.SCHEMA_DN = schema_dns[0]
|
|
67
96
|
|
|
68
|
-
class TreeItem(BaseModel):
|
|
69
|
-
dn: str
|
|
70
|
-
structuralObjectClass: str
|
|
71
|
-
hasSubordinates: bool
|
|
72
|
-
level: int
|
|
73
97
|
|
|
98
|
+
async def authenticated(
|
|
99
|
+
credentials: Annotated[HTTPBasicCredentials, Depends(HTTPBasic())],
|
|
100
|
+
connection: Annotated[LDAPObject, Depends(ldap_connect)],
|
|
101
|
+
) -> LDAPObject:
|
|
102
|
+
"Authenticate against the directory"
|
|
74
103
|
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
"List directory entries"
|
|
104
|
+
if not settings.BASE_DN or not settings.SCHEMA_DN:
|
|
105
|
+
await get_root_dse(connection)
|
|
78
106
|
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
if basedn == "base":
|
|
83
|
-
scope = SCOPE_BASE
|
|
84
|
-
basedn = settings.BASE_DN
|
|
107
|
+
# Hard-wired credentials
|
|
108
|
+
dn = settings.GET_BIND_DN()
|
|
109
|
+
password = settings.GET_BIND_PASSWORD()
|
|
85
110
|
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
111
|
+
# Search for basic auth user
|
|
112
|
+
if not dn:
|
|
113
|
+
password = credentials.password
|
|
114
|
+
dn = settings.GET_BIND_PATTERN(
|
|
115
|
+
credentials.username
|
|
116
|
+
) or await anonymous_user_search(connection, credentials.username)
|
|
117
|
+
|
|
118
|
+
if dn: # Log in
|
|
119
|
+
await empty(connection, connection.simple_bind(dn, password))
|
|
120
|
+
return connection
|
|
121
|
+
|
|
122
|
+
raise INVALID_CREDENTIALS([{"desc": f"Invalid credentials for DN: {dn}"}])
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
AuthenticatedConnection = Annotated[LDAPObject, Depends(authenticated)]
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
class Tag(StrEnum):
|
|
129
|
+
EDITING = "Editing"
|
|
130
|
+
MISC = "Misc"
|
|
131
|
+
NAVIGATION = "Navigation"
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
@api.get(
|
|
135
|
+
"/tree/base",
|
|
136
|
+
tags=[Tag.NAVIGATION],
|
|
137
|
+
operation_id="get_base_entry",
|
|
138
|
+
include_in_schema=False, # Overlaps with next endpoint
|
|
139
|
+
)
|
|
140
|
+
async def get_base_entry(connection: AuthenticatedConnection) -> list[TreeItem]:
|
|
141
|
+
"Get the directory base entry"
|
|
142
|
+
|
|
143
|
+
assert settings.BASE_DN, "An LDAP base DN is required!"
|
|
144
|
+
result = await unique(
|
|
145
|
+
connection,
|
|
146
|
+
connection.search(
|
|
147
|
+
settings.BASE_DN, SCOPE_BASE, attrlist=WITH_OPERATIONAL_ATTRS
|
|
148
|
+
),
|
|
95
149
|
)
|
|
150
|
+
return [_tree_item(result, settings.BASE_DN)]
|
|
96
151
|
|
|
97
152
|
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
attrs["objectClass"],
|
|
108
|
-
),
|
|
109
|
-
)
|
|
153
|
+
@api.get("/tree/{basedn:path}", tags=[Tag.NAVIGATION], operation_id="get_tree")
|
|
154
|
+
async def get_tree(basedn: str, connection: AuthenticatedConnection) -> list[TreeItem]:
|
|
155
|
+
"List directory entries below a DN"
|
|
156
|
+
|
|
157
|
+
return [
|
|
158
|
+
_tree_item(entry, basedn)
|
|
159
|
+
async for entry in results(
|
|
160
|
+
connection,
|
|
161
|
+
connection.search(basedn, SCOPE_ONELEVEL, attrlist=WITH_OPERATIONAL_ATTRS),
|
|
110
162
|
)
|
|
111
|
-
|
|
163
|
+
]
|
|
164
|
+
|
|
112
165
|
|
|
166
|
+
def _tree_item(entry: LdapEntry, base_dn: str) -> TreeItem:
|
|
113
167
|
return TreeItem(
|
|
114
|
-
dn=dn,
|
|
115
|
-
structuralObjectClass=structuralObjectClass
|
|
116
|
-
hasSubordinates=
|
|
117
|
-
|
|
118
|
-
else bool(attrs.get("numSubordinates")),
|
|
119
|
-
level=len(dn.split(",")) - level,
|
|
168
|
+
dn=entry.dn,
|
|
169
|
+
structuralObjectClass=entry.attr("structuralObjectClass")[0],
|
|
170
|
+
hasSubordinates=entry.hasSubordinates,
|
|
171
|
+
level=_level(entry.dn) - _level(base_dn),
|
|
120
172
|
)
|
|
121
173
|
|
|
122
174
|
|
|
123
|
-
|
|
124
|
-
dn
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
175
|
+
def _level(dn: str) -> int:
|
|
176
|
+
return len(dn.split(","))
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
@api.get("/entry/{dn:path}", tags=[Tag.EDITING], operation_id="get_entry")
|
|
180
|
+
async def get_entry(dn: str, connection: AuthenticatedConnection) -> Entry:
|
|
181
|
+
"Retrieve a directory entry by DN"
|
|
182
|
+
return _entry(
|
|
183
|
+
await get_entry_by_dn(connection, dn),
|
|
184
|
+
await get_schema(connection),
|
|
185
|
+
)
|
|
186
|
+
|
|
129
187
|
|
|
188
|
+
def _entry(entry: LdapEntry, schema: SubSchema) -> Entry:
|
|
189
|
+
"Decode an LDAP entry for transmission"
|
|
130
190
|
|
|
131
|
-
|
|
132
|
-
attrs
|
|
133
|
-
|
|
191
|
+
meta = _meta(entry, schema)
|
|
192
|
+
attrs = {
|
|
193
|
+
k: ["*****"] # 23 suppress userPassword
|
|
194
|
+
if k == "userPassword"
|
|
195
|
+
else [base64.b64encode(val).decode() for val in entry.attrs[k]]
|
|
196
|
+
if k in meta.binary
|
|
197
|
+
else entry.attr(k)
|
|
198
|
+
for k in sorted(entry.attrs)
|
|
199
|
+
}
|
|
200
|
+
return Entry(attrs=attrs, meta=meta)
|
|
134
201
|
|
|
135
202
|
|
|
136
|
-
def
|
|
137
|
-
"
|
|
203
|
+
def _meta(entry: LdapEntry, schema: SubSchema) -> Meta:
|
|
204
|
+
"Classify entry attributes"
|
|
138
205
|
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
206
|
+
object_classes = set(entry.attr("objectClass"))
|
|
207
|
+
must_attrs, _may_attrs = schema.attribute_types(object_classes)
|
|
208
|
+
required = [
|
|
209
|
+
schema.get_obj(AttributeType, a).names[0] # type: ignore
|
|
210
|
+
for a in must_attrs
|
|
211
|
+
]
|
|
212
|
+
structural = [
|
|
213
|
+
oc.names[0] # type: ignore
|
|
214
|
+
for oc in map(lambda o: schema.get_obj(ObjectClass, o), object_classes)
|
|
215
|
+
if oc.kind == OC.Kind.structural # type: ignore
|
|
146
216
|
]
|
|
147
217
|
aux = set(
|
|
148
|
-
schema.get_obj(ObjectClass, a).names[0] #
|
|
149
|
-
for a in schema.get_applicable_aux_classes(
|
|
218
|
+
schema.get_obj(ObjectClass, a).names[0] # type: ignore
|
|
219
|
+
for a in schema.get_applicable_aux_classes(structural[0])
|
|
150
220
|
)
|
|
151
221
|
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
for attr in attrs:
|
|
159
|
-
obj = schema.get_obj(AttributeType, attr)
|
|
160
|
-
|
|
161
|
-
# Octet strings are not used consistently.
|
|
162
|
-
# Try to decode as text and treat as binary on failure
|
|
163
|
-
if not obj.syntax or obj.syntax == OCTET_STRING: # pyright: ignore[reportOptionalMemberAccess]
|
|
164
|
-
try:
|
|
165
|
-
for val in attrs[attr]:
|
|
166
|
-
assert val.decode().isprintable()
|
|
167
|
-
except: # noqa: E722
|
|
168
|
-
binary.add(attr)
|
|
169
|
-
|
|
170
|
-
else: # Check human-readable flag in schema
|
|
171
|
-
syntax = schema.get_obj(LDAPSyntax, obj.syntax) # pyright: ignore[reportOptionalMemberAccess]
|
|
172
|
-
if syntax.not_human_readable: # pyright: ignore[reportOptionalMemberAccess]
|
|
173
|
-
binary.add(attr)
|
|
174
|
-
|
|
175
|
-
return Entry(
|
|
176
|
-
attrs={
|
|
177
|
-
k: [
|
|
178
|
-
base64.b64encode(val).decode() if k in binary else val for val in values
|
|
179
|
-
]
|
|
180
|
-
for k, values in attrs.items()
|
|
181
|
-
},
|
|
182
|
-
meta=Meta(
|
|
183
|
-
dn=dn,
|
|
184
|
-
required=[schema.get_obj(AttributeType, a).names[0] for a in must_attrs], # pyright: ignore[reportOptionalMemberAccess]
|
|
185
|
-
aux=sorted(aux - ocs),
|
|
186
|
-
binary=sorted(binary),
|
|
187
|
-
autoFilled=[],
|
|
188
|
-
),
|
|
222
|
+
return Meta(
|
|
223
|
+
dn=entry.dn,
|
|
224
|
+
required=required,
|
|
225
|
+
aux=sorted(aux - object_classes),
|
|
226
|
+
binary=sorted(_binary_attributes(entry, schema)),
|
|
227
|
+
autoFilled=[],
|
|
189
228
|
)
|
|
190
229
|
|
|
191
230
|
|
|
192
|
-
|
|
231
|
+
def _binary_attributes(entry: LdapEntry, schema: SubSchema) -> set[str]:
|
|
232
|
+
return set(attr for attr in entry.attrs if _is_binary(entry, attr, schema))
|
|
193
233
|
|
|
194
234
|
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
"Edit directory entries"
|
|
235
|
+
def _is_binary(entry: LdapEntry, attr: str, schema: SubSchema) -> bool:
|
|
236
|
+
"Guess whether an attribute has binary content"
|
|
198
237
|
|
|
199
|
-
|
|
200
|
-
|
|
238
|
+
# Octet strings are not used consistently in schemata.
|
|
239
|
+
# Try to decode as text and treat as binary on failure
|
|
240
|
+
attr_type = schema.get_obj(AttributeType, attr)
|
|
241
|
+
if not attr_type.syntax or attr_type.syntax == OCTET_STRING: # type: ignore
|
|
242
|
+
try:
|
|
243
|
+
return any(not val.isprintable() for val in entry.attr(attr))
|
|
244
|
+
except UnicodeDecodeError:
|
|
245
|
+
return True
|
|
246
|
+
|
|
247
|
+
# Check human-readable flag
|
|
248
|
+
return schema.get_obj(LDAPSyntax, attr_type.syntax).not_human_readable # type: ignore
|
|
201
249
|
|
|
202
|
-
if request.method == "GET":
|
|
203
|
-
return JSONResponse(
|
|
204
|
-
_entry(
|
|
205
|
-
request.app.state.schema, await get_entry_by_dn(connection, dn)
|
|
206
|
-
).model_dump()
|
|
207
|
-
)
|
|
208
250
|
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
251
|
+
@api.delete(
|
|
252
|
+
"/entry/{dn:path}",
|
|
253
|
+
status_code=HTTPStatus.NO_CONTENT,
|
|
254
|
+
tags=[Tag.EDITING],
|
|
255
|
+
operation_id="delete_entry",
|
|
256
|
+
)
|
|
257
|
+
async def delete_entry(dn: str, connection: AuthenticatedConnection) -> None:
|
|
258
|
+
for entry_dn in sorted(
|
|
259
|
+
[
|
|
260
|
+
entry.dn
|
|
261
|
+
async for entry in results(
|
|
262
|
+
connection,
|
|
263
|
+
connection.search(dn, SCOPE_SUBTREE),
|
|
264
|
+
)
|
|
265
|
+
],
|
|
266
|
+
key=len,
|
|
267
|
+
reverse=True,
|
|
268
|
+
):
|
|
269
|
+
await empty(connection, connection.delete(entry_dn))
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
@api.post("/entry/{dn:path}", tags=[Tag.EDITING], operation_id="post_entry")
|
|
273
|
+
async def post_entry(
|
|
274
|
+
dn: str, attributes: Attributes, connection: AuthenticatedConnection
|
|
275
|
+
) -> ChangedAttributes:
|
|
276
|
+
entry = await get_entry_by_dn(connection, dn)
|
|
277
|
+
schema = await get_schema(connection)
|
|
278
|
+
|
|
279
|
+
expected = {
|
|
280
|
+
attr: _nonempty_byte_strings(attributes, attr)
|
|
281
|
+
for attr in attributes
|
|
282
|
+
if attr not in PASSWORDS
|
|
283
|
+
and not _is_binary(
|
|
284
|
+
entry, attr, schema
|
|
285
|
+
) # FIXME Handle binary attributes properly
|
|
230
286
|
}
|
|
231
287
|
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
288
|
+
actual = {attr: v for attr, v in entry.attrs.items() if attr in expected}
|
|
289
|
+
modlist = modifyModlist(actual, expected)
|
|
290
|
+
if modlist: # Apply changes and send changed keys back
|
|
291
|
+
await empty(connection, connection.modify(dn, modlist))
|
|
292
|
+
return ChangedAttributes(changed=list(sorted(set(m[1] for m in modlist))))
|
|
293
|
+
|
|
237
294
|
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
return JSONResponse({"changed": sorted(set(m[1] for m in modlist))})
|
|
295
|
+
def _nonempty_byte_strings(attributes: Attributes, attr: str) -> list[bytes]:
|
|
296
|
+
return [s.encode() for s in filter(None, attributes[attr])]
|
|
241
297
|
|
|
242
|
-
if request.method == "PUT":
|
|
243
|
-
# Create new object
|
|
244
|
-
modlist = addModlist(req)
|
|
245
|
-
if modlist:
|
|
246
|
-
await empty(connection, connection.add(dn, modlist))
|
|
247
|
-
return JSONResponse({"changed": ["dn"]}) # Dummy
|
|
248
298
|
|
|
249
|
-
|
|
299
|
+
@api.put("/entry/{dn:path}", tags=[Tag.EDITING], operation_id="put_entry")
|
|
300
|
+
async def put_entry(
|
|
301
|
+
dn: str, attributes: Attributes, connection: AuthenticatedConnection
|
|
302
|
+
) -> ChangedAttributes:
|
|
303
|
+
modlist = addModlist(
|
|
304
|
+
{
|
|
305
|
+
attr: _nonempty_byte_strings(attributes, attr)
|
|
306
|
+
for attr in attributes
|
|
307
|
+
if attr not in PHOTOS
|
|
308
|
+
}
|
|
309
|
+
)
|
|
310
|
+
if modlist:
|
|
311
|
+
await empty(connection, connection.add(dn, modlist))
|
|
312
|
+
return ChangedAttributes(changed=["dn"]) # Dummy
|
|
250
313
|
|
|
251
314
|
|
|
252
|
-
@api.
|
|
253
|
-
|
|
254
|
-
|
|
315
|
+
@api.post(
|
|
316
|
+
"/rename/{dn:path}",
|
|
317
|
+
status_code=HTTPStatus.NO_CONTENT,
|
|
318
|
+
tags=[Tag.EDITING],
|
|
319
|
+
operation_id="post_rename_entry",
|
|
320
|
+
)
|
|
321
|
+
async def rename_entry(
|
|
322
|
+
dn: str, rdn: Annotated[str, Body()], connection: AuthenticatedConnection
|
|
323
|
+
) -> None:
|
|
324
|
+
"Rename an entry"
|
|
325
|
+
await empty(connection, connection.rename(dn, rdn, delold=0))
|
|
255
326
|
|
|
256
|
-
attr = request.path_params["attr"]
|
|
257
|
-
index = request.path_params["index"]
|
|
258
|
-
dn = request.path_params["dn"]
|
|
259
|
-
connection = request.state.ldap
|
|
260
327
|
|
|
261
|
-
|
|
328
|
+
@api.get(
|
|
329
|
+
"/blob/{attr}/{index}/{dn:path}",
|
|
330
|
+
tags=[Tag.EDITING],
|
|
331
|
+
operation_id="get_blob",
|
|
332
|
+
include_in_schema=False, # Not used in UI, images are transferred inline
|
|
333
|
+
)
|
|
334
|
+
async def get_blob(
|
|
335
|
+
attr: str, index: int, dn: str, connection: AuthenticatedConnection
|
|
336
|
+
) -> Response:
|
|
337
|
+
"Retrieve a binary attribute"
|
|
262
338
|
|
|
263
|
-
|
|
264
|
-
if attr not in attrs or len(attrs[attr]) <= index:
|
|
265
|
-
raise HTTPException(
|
|
266
|
-
HTTPStatus.NOT_FOUND.value, f"Attribute {attr} not found for DN {dn}"
|
|
267
|
-
)
|
|
339
|
+
entry = await get_entry_by_dn(connection, dn)
|
|
268
340
|
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
headers={
|
|
273
|
-
"Content-Disposition": f'attachment; filename="{attr}-{index:d}.bin"'
|
|
274
|
-
},
|
|
341
|
+
if attr not in entry.attrs or len(entry.attrs[attr]) <= index:
|
|
342
|
+
raise HTTPException(
|
|
343
|
+
HTTPStatus.NOT_FOUND, f"Attribute {attr} not found for DN {dn}"
|
|
275
344
|
)
|
|
276
345
|
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
if attr in attrs:
|
|
283
|
-
await empty(
|
|
284
|
-
connection,
|
|
285
|
-
connection.modify(
|
|
286
|
-
dn, [(1, attr, None), (0, attr, attrs[attr] + [data])]
|
|
287
|
-
),
|
|
288
|
-
)
|
|
289
|
-
else:
|
|
290
|
-
await empty(connection, connection.modify(dn, [(0, attr, [data])]))
|
|
291
|
-
return NO_CONTENT
|
|
292
|
-
|
|
293
|
-
if request.method == "DELETE":
|
|
294
|
-
if attr not in attrs or len(attrs[attr]) <= index:
|
|
295
|
-
raise HTTPException(
|
|
296
|
-
HTTPStatus.NOT_FOUND.value, f"Attribute {attr} not found for DN {dn}"
|
|
297
|
-
)
|
|
298
|
-
await empty(connection, connection.modify(dn, [(1, attr, None)]))
|
|
299
|
-
data = attrs[attr][:index] + attrs[attr][index + 1 :]
|
|
300
|
-
if data:
|
|
301
|
-
await empty(connection, connection.modify(dn, [(0, attr, data)]))
|
|
302
|
-
return NO_CONTENT
|
|
346
|
+
return Response(
|
|
347
|
+
entry.attrs[attr][index],
|
|
348
|
+
media_type="application/octet-stream",
|
|
349
|
+
headers={"Content-Disposition": f'attachment; filename="{attr}-{index:d}.bin"'},
|
|
350
|
+
)
|
|
303
351
|
|
|
304
|
-
|
|
352
|
+
|
|
353
|
+
@api.put(
|
|
354
|
+
"/blob/{attr}/{index}/{dn:path}",
|
|
355
|
+
status_code=HTTPStatus.NO_CONTENT,
|
|
356
|
+
tags=[Tag.EDITING],
|
|
357
|
+
operation_id="put_blob",
|
|
358
|
+
)
|
|
359
|
+
async def put_blob(
|
|
360
|
+
attr: str,
|
|
361
|
+
index: int,
|
|
362
|
+
dn: str,
|
|
363
|
+
blob: Annotated[UploadFile, File()],
|
|
364
|
+
connection: AuthenticatedConnection,
|
|
365
|
+
) -> None:
|
|
366
|
+
"Upload a binary attribute"
|
|
367
|
+
entry = await get_entry_by_dn(connection, dn)
|
|
368
|
+
data = await blob.read(cast(int, blob.size))
|
|
369
|
+
if attr in entry.attrs:
|
|
370
|
+
await empty(
|
|
371
|
+
connection,
|
|
372
|
+
connection.modify(
|
|
373
|
+
dn, [(1, attr, None), (0, attr, entry.attrs[attr] + [data])]
|
|
374
|
+
),
|
|
375
|
+
)
|
|
376
|
+
else:
|
|
377
|
+
await empty(connection, connection.modify(dn, [(0, attr, [data])]))
|
|
305
378
|
|
|
306
379
|
|
|
307
|
-
@api.
|
|
308
|
-
|
|
380
|
+
@api.delete(
|
|
381
|
+
"/blob/{attr}/{index}/{dn:path}",
|
|
382
|
+
status_code=HTTPStatus.NO_CONTENT,
|
|
383
|
+
tags=[Tag.EDITING],
|
|
384
|
+
operation_id="delete_blob",
|
|
385
|
+
)
|
|
386
|
+
async def delete_blob(
|
|
387
|
+
attr: str, index: int, dn: str, connection: AuthenticatedConnection
|
|
388
|
+
) -> None:
|
|
389
|
+
"Remove a binary attribute"
|
|
390
|
+
entry = await get_entry_by_dn(connection, dn)
|
|
391
|
+
if attr not in entry.attrs or len(entry.attrs[attr]) <= index:
|
|
392
|
+
raise HTTPException(
|
|
393
|
+
HTTPStatus.NOT_FOUND, f"Attribute {attr} not found for DN {dn}"
|
|
394
|
+
)
|
|
395
|
+
await empty(connection, connection.modify(dn, [(1, attr, None)]))
|
|
396
|
+
data = entry.attrs[attr][:index] + entry.attrs[attr][index + 1 :]
|
|
397
|
+
if data:
|
|
398
|
+
await empty(connection, connection.modify(dn, [(0, attr, data)]))
|
|
399
|
+
|
|
400
|
+
|
|
401
|
+
@api.post(
|
|
402
|
+
"/check-password/{dn:path}", tags=[Tag.EDITING], operation_id="post_check_password"
|
|
403
|
+
)
|
|
404
|
+
async def check_password(
|
|
405
|
+
dn: str, check: Annotated[str, Body()], connection: AuthenticatedConnection
|
|
406
|
+
) -> bool:
|
|
407
|
+
"Verify a password"
|
|
408
|
+
|
|
409
|
+
try:
|
|
410
|
+
connection.simple_bind_s(dn, check)
|
|
411
|
+
return True
|
|
412
|
+
except INVALID_CREDENTIALS:
|
|
413
|
+
return False
|
|
414
|
+
|
|
415
|
+
|
|
416
|
+
@api.post(
|
|
417
|
+
"/change-password/{dn:path}",
|
|
418
|
+
tags=[Tag.EDITING],
|
|
419
|
+
operation_id="post_change_password",
|
|
420
|
+
status_code=HTTPStatus.NO_CONTENT,
|
|
421
|
+
)
|
|
422
|
+
async def change_password(
|
|
423
|
+
dn: str, args: ChangePasswordRequest, connection: AuthenticatedConnection
|
|
424
|
+
) -> None:
|
|
425
|
+
"Update passwords"
|
|
426
|
+
if args.new1:
|
|
427
|
+
await empty(
|
|
428
|
+
connection,
|
|
429
|
+
connection.passwd(dn, args.old or None, args.new1),
|
|
430
|
+
)
|
|
431
|
+
else:
|
|
432
|
+
await empty(connection, connection.modify(dn, [(1, "userPassword", None)]))
|
|
433
|
+
|
|
434
|
+
|
|
435
|
+
@api.get(
|
|
436
|
+
"/ldif/{dn:path}",
|
|
437
|
+
include_in_schema=False, # Used as a link target, no API call
|
|
438
|
+
)
|
|
439
|
+
async def export_ldif(dn: str, connection: AuthenticatedConnection) -> Response:
|
|
309
440
|
"Dump an entry as LDIF"
|
|
310
441
|
|
|
311
|
-
dn = request.path_params["dn"]
|
|
312
442
|
out = io.StringIO()
|
|
313
443
|
writer = ldif.LDIFWriter(out)
|
|
314
|
-
connection = request.state.ldap
|
|
315
444
|
|
|
316
|
-
async for
|
|
317
|
-
writer.unparse(dn, attrs)
|
|
445
|
+
async for entry in results(connection, connection.search(dn, SCOPE_SUBTREE)):
|
|
446
|
+
writer.unparse(dn, entry.attrs)
|
|
318
447
|
|
|
319
448
|
file_name = dn.split(",")[0].split("=")[1]
|
|
320
449
|
return PlainTextResponse(
|
|
@@ -329,96 +458,35 @@ class LDIFReader(ldif.LDIFParser):
|
|
|
329
458
|
self.count = 0
|
|
330
459
|
self.con = con
|
|
331
460
|
|
|
332
|
-
def handle(self, dn: str, entry:
|
|
461
|
+
def handle(self, dn: str, entry: Attributes):
|
|
333
462
|
self.con.add_s(dn, addModlist(entry))
|
|
334
463
|
self.count += 1
|
|
335
464
|
|
|
336
465
|
|
|
337
|
-
@api.
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
466
|
+
@api.post(
|
|
467
|
+
"/ldif",
|
|
468
|
+
tags=[Tag.EDITING],
|
|
469
|
+
operation_id="post_ldif",
|
|
470
|
+
status_code=HTTPStatus.NO_CONTENT,
|
|
471
|
+
)
|
|
472
|
+
async def upload_ldif(
|
|
473
|
+
ldif: Annotated[str, Body()], connection: AuthenticatedConnection
|
|
474
|
+
) -> None:
|
|
341
475
|
"Import LDIF"
|
|
342
476
|
|
|
343
|
-
reader = LDIFReader(
|
|
477
|
+
reader = LDIFReader(ldif.encode(), connection)
|
|
344
478
|
try:
|
|
345
479
|
reader.parse()
|
|
346
|
-
return NO_CONTENT
|
|
347
480
|
except ValueError as e:
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
Rdn = TypeAdapter(str)
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
@api.route("/rename/{dn:path}", methods=["POST"])
|
|
355
|
-
async def rename(request: Request) -> Response:
|
|
356
|
-
"Rename an entry"
|
|
357
|
-
|
|
358
|
-
dn = request.path_params["dn"]
|
|
359
|
-
rdn = Rdn.validate_json(await request.body())
|
|
360
|
-
connection = request.state.ldap
|
|
361
|
-
await empty(connection, connection.rename(dn, rdn, delold=0))
|
|
362
|
-
return NO_CONTENT
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
class ChangePasswordRequest(BaseModel):
|
|
366
|
-
old: str
|
|
367
|
-
new1: str
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
class CheckPasswordRequest(BaseModel):
|
|
371
|
-
check: str = Field(min_length=1)
|
|
372
|
-
|
|
481
|
+
raise HTTPException(HTTPStatus.UNPROCESSABLE_ENTITY, e.args[0])
|
|
373
482
|
|
|
374
|
-
PasswordRequest = TypeAdapter(Union[ChangePasswordRequest, CheckPasswordRequest])
|
|
375
483
|
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
async def passwd(request: Request) -> Response:
|
|
379
|
-
"Update passwords"
|
|
380
|
-
|
|
381
|
-
dn = request.path_params["dn"]
|
|
382
|
-
args = PasswordRequest.validate_json(await request.body())
|
|
383
|
-
|
|
384
|
-
if type(args) is CheckPasswordRequest:
|
|
385
|
-
with ldap_connect() as con:
|
|
386
|
-
try:
|
|
387
|
-
con.simple_bind_s(dn, args.check)
|
|
388
|
-
return JSONResponse(True)
|
|
389
|
-
except INVALID_CREDENTIALS:
|
|
390
|
-
return JSONResponse(False)
|
|
391
|
-
|
|
392
|
-
elif type(args) is ChangePasswordRequest:
|
|
393
|
-
connection = request.state.ldap
|
|
394
|
-
if args.new1:
|
|
395
|
-
await empty(
|
|
396
|
-
connection,
|
|
397
|
-
connection.passwd(dn, args.old or None, args.new1),
|
|
398
|
-
)
|
|
399
|
-
_dn, attrs = await get_entry_by_dn(connection, dn)
|
|
400
|
-
return JSONResponse(attrs["userPassword"][0].decode())
|
|
401
|
-
|
|
402
|
-
else:
|
|
403
|
-
await empty(connection, connection.modify(dn, [(1, "userPassword", None)]))
|
|
404
|
-
return NO_CONTENT
|
|
405
|
-
|
|
406
|
-
raise HTTPException(HTTPStatus.UNPROCESSABLE_ENTITY)
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
def _cn(entry: dict) -> Optional[str]:
|
|
410
|
-
"Try to extract a CN"
|
|
411
|
-
if "cn" in entry and entry["cn"]:
|
|
412
|
-
return entry["cn"][0].decode()
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
@api.route("/search/{query:path}")
|
|
416
|
-
async def search(request: Request) -> JSONResponse:
|
|
484
|
+
@api.get("/search/{query:path}", tags=[Tag.NAVIGATION], operation_id="search")
|
|
485
|
+
async def search(query: str, connection: AuthenticatedConnection) -> list[SearchResult]:
|
|
417
486
|
"Search the directory"
|
|
418
487
|
|
|
419
|
-
query = request.path_params["query"]
|
|
420
488
|
if len(query) < settings.SEARCH_QUERY_MIN:
|
|
421
|
-
return
|
|
489
|
+
return []
|
|
422
490
|
|
|
423
491
|
if "=" in query: # Search specific attributes
|
|
424
492
|
if "(" not in query:
|
|
@@ -428,50 +496,58 @@ async def search(request: Request) -> JSONResponse:
|
|
|
428
496
|
|
|
429
497
|
# Collect results
|
|
430
498
|
res = []
|
|
431
|
-
|
|
432
|
-
async for dn, attrs in result(
|
|
499
|
+
async for entry in results(
|
|
433
500
|
connection, connection.search(settings.BASE_DN, SCOPE_SUBTREE, query)
|
|
434
501
|
):
|
|
435
|
-
res.append(
|
|
502
|
+
res.append(
|
|
503
|
+
SearchResult(
|
|
504
|
+
dn=entry.dn,
|
|
505
|
+
name=entry.attr("cn")[0] if "cn" in entry.attrs else entry.dn,
|
|
506
|
+
)
|
|
507
|
+
)
|
|
436
508
|
if len(res) >= settings.SEARCH_MAX:
|
|
437
509
|
break
|
|
438
|
-
return
|
|
510
|
+
return res
|
|
511
|
+
|
|
439
512
|
|
|
513
|
+
@api.get("/whoami", tags=[Tag.MISC], operation_id="get_who_am_i")
|
|
514
|
+
async def whoami(connection: AuthenticatedConnection) -> str:
|
|
515
|
+
"DN of the current user"
|
|
516
|
+
return connection.whoami_s().replace("dn:", "")
|
|
440
517
|
|
|
441
|
-
|
|
442
|
-
|
|
518
|
+
|
|
519
|
+
@api.get("/subtree/{root_dn:path}", tags=[Tag.MISC], operation_id="get_subtree")
|
|
520
|
+
async def list_subtree(
|
|
521
|
+
root_dn: str, connection: AuthenticatedConnection
|
|
522
|
+
) -> list[TreeItem]:
|
|
443
523
|
"List the subtree below a DN"
|
|
444
524
|
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
],
|
|
458
|
-
key=lambda item: tuple(reversed(item["dn"].lower().split(","))),
|
|
459
|
-
)
|
|
525
|
+
return sorted(
|
|
526
|
+
[
|
|
527
|
+
_tree_item(entry, root_dn)
|
|
528
|
+
async for entry in results(
|
|
529
|
+
connection,
|
|
530
|
+
connection.search(
|
|
531
|
+
root_dn, SCOPE_SUBTREE, attrlist=WITH_OPERATIONAL_ATTRS
|
|
532
|
+
),
|
|
533
|
+
)
|
|
534
|
+
if root_dn != entry.dn
|
|
535
|
+
],
|
|
536
|
+
key=lambda item: tuple(reversed(item.dn.lower().split(","))),
|
|
460
537
|
)
|
|
461
538
|
|
|
462
539
|
|
|
463
|
-
@api.
|
|
464
|
-
async def attribute_range(
|
|
540
|
+
@api.get("/range/{attribute}", tags=[Tag.MISC], operation_id="get_range")
|
|
541
|
+
async def attribute_range(attribute: str, connection: AuthenticatedConnection) -> Range:
|
|
465
542
|
"List all values for a numeric attribute of an objectClass like uidNumber or gidNumber"
|
|
466
543
|
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
obj = request.app.state.schema.get_obj(AttributeType, attribute)
|
|
544
|
+
schema = await get_schema(connection)
|
|
545
|
+
obj = schema.get_obj(AttributeType, attribute)
|
|
470
546
|
|
|
471
547
|
values = set(
|
|
472
548
|
[
|
|
473
|
-
int(attrs[attribute][0])
|
|
474
|
-
async for
|
|
549
|
+
int(entry.attrs[attribute][0])
|
|
550
|
+
async for entry in results(
|
|
475
551
|
connection,
|
|
476
552
|
connection.search(
|
|
477
553
|
settings.BASE_DN,
|
|
@@ -486,34 +562,25 @@ async def attribute_range(request: Request) -> JSONResponse:
|
|
|
486
562
|
|
|
487
563
|
if not values:
|
|
488
564
|
raise HTTPException(
|
|
489
|
-
HTTPStatus.NOT_FOUND
|
|
565
|
+
HTTPStatus.NOT_FOUND, f"No values found for attribute {attribute}"
|
|
490
566
|
)
|
|
491
567
|
|
|
492
568
|
minimum, maximum = min(values), max(values)
|
|
493
|
-
return
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
"next": min(set(range(minimum, maximum + 2)) - values),
|
|
498
|
-
}
|
|
569
|
+
return Range(
|
|
570
|
+
min=minimum,
|
|
571
|
+
max=maximum,
|
|
572
|
+
next=min(set(range(minimum, maximum + 2)) - values),
|
|
499
573
|
)
|
|
500
574
|
|
|
501
575
|
|
|
502
|
-
@api.
|
|
503
|
-
|
|
576
|
+
@api.get(
|
|
577
|
+
"/schema",
|
|
578
|
+
tags=[Tag.MISC],
|
|
579
|
+
operation_id="get_schema",
|
|
580
|
+
response_model_exclude_none=True,
|
|
581
|
+
response_model_exclude_unset=True,
|
|
582
|
+
)
|
|
583
|
+
async def ldap_schema(connection: AuthenticatedConnection) -> Schema:
|
|
504
584
|
"Dump the LDAP schema as JSON"
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
# See: https://hub.packtpub.com/python-ldap-applications-part-4-ldap-schema/
|
|
508
|
-
_dn, sub_schema = await unique(
|
|
509
|
-
connection,
|
|
510
|
-
connection.search(
|
|
511
|
-
settings.SCHEMA_DN,
|
|
512
|
-
SCOPE_BASE,
|
|
513
|
-
attrlist=WITH_OPERATIONAL_ATTRS,
|
|
514
|
-
),
|
|
515
|
-
)
|
|
516
|
-
request.app.state.schema = SubSchema(sub_schema, check_uniqueness=2)
|
|
517
|
-
|
|
518
|
-
schema = frontend_schema(request.app.state.schema)
|
|
519
|
-
return JSONResponse(schema.model_dump())
|
|
585
|
+
assert settings.SCHEMA_DN, "An LDAP schema DN is required!"
|
|
586
|
+
return frontend_schema(await get_schema(connection))
|