@peerbit/log 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. package/LICENSE +23 -0
  2. package/README.md +11 -0
  3. package/lib/esm/change.d.ts +5 -0
  4. package/lib/esm/change.js +2 -0
  5. package/lib/esm/change.js.map +1 -0
  6. package/lib/esm/clock.d.ts +87 -0
  7. package/lib/esm/clock.js +260 -0
  8. package/lib/esm/clock.js.map +1 -0
  9. package/lib/esm/difference.d.ts +1 -0
  10. package/lib/esm/difference.js +20 -0
  11. package/lib/esm/difference.js.map +1 -0
  12. package/lib/esm/encoding.d.ts +7 -0
  13. package/lib/esm/encoding.js +20 -0
  14. package/lib/esm/encoding.js.map +1 -0
  15. package/lib/esm/entry-index.d.ts +21 -0
  16. package/lib/esm/entry-index.js +63 -0
  17. package/lib/esm/entry-index.js.map +1 -0
  18. package/lib/esm/entry-with-refs.d.ts +5 -0
  19. package/lib/esm/entry-with-refs.js +2 -0
  20. package/lib/esm/entry-with-refs.js.map +1 -0
  21. package/lib/esm/entry.d.ts +179 -0
  22. package/lib/esm/entry.js +591 -0
  23. package/lib/esm/entry.js.map +1 -0
  24. package/lib/esm/find-uniques.d.ts +1 -0
  25. package/lib/esm/find-uniques.js +12 -0
  26. package/lib/esm/find-uniques.js.map +1 -0
  27. package/lib/esm/heads-cache.d.ts +64 -0
  28. package/lib/esm/heads-cache.js +317 -0
  29. package/lib/esm/heads-cache.js.map +1 -0
  30. package/lib/esm/heads.d.ts +63 -0
  31. package/lib/esm/heads.js +143 -0
  32. package/lib/esm/heads.js.map +1 -0
  33. package/lib/esm/hrtime.d.ts +5 -0
  34. package/lib/esm/hrtime.js +71 -0
  35. package/lib/esm/hrtime.js.map +1 -0
  36. package/lib/esm/index.d.ts +11 -0
  37. package/lib/esm/index.js +11 -0
  38. package/lib/esm/index.js.map +1 -0
  39. package/lib/esm/is-defined.d.ts +1 -0
  40. package/lib/esm/is-defined.js +2 -0
  41. package/lib/esm/is-defined.js.map +1 -0
  42. package/lib/esm/log-errors.d.ts +5 -0
  43. package/lib/esm/log-errors.js +6 -0
  44. package/lib/esm/log-errors.js.map +1 -0
  45. package/lib/esm/log-sorting.d.ts +44 -0
  46. package/lib/esm/log-sorting.js +86 -0
  47. package/lib/esm/log-sorting.js.map +1 -0
  48. package/lib/esm/log.d.ts +205 -0
  49. package/lib/esm/log.js +1004 -0
  50. package/lib/esm/log.js.map +1 -0
  51. package/lib/esm/logger.d.ts +2 -0
  52. package/lib/esm/logger.js +4 -0
  53. package/lib/esm/logger.js.map +1 -0
  54. package/lib/esm/package.json +3 -0
  55. package/lib/esm/snapshot.d.ts +22 -0
  56. package/lib/esm/snapshot.js +83 -0
  57. package/lib/esm/snapshot.js.map +1 -0
  58. package/lib/esm/trim.d.ts +49 -0
  59. package/lib/esm/trim.js +203 -0
  60. package/lib/esm/trim.js.map +1 -0
  61. package/lib/esm/types.d.ts +6 -0
  62. package/lib/esm/types.js +23 -0
  63. package/lib/esm/types.js.map +1 -0
  64. package/lib/esm/utils.d.ts +2 -0
  65. package/lib/esm/utils.js +3 -0
  66. package/lib/esm/utils.js.map +1 -0
  67. package/lib/esm/values.d.ts +33 -0
  68. package/lib/esm/values.js +141 -0
  69. package/lib/esm/values.js.map +1 -0
  70. package/package.json +79 -0
  71. package/src/change.ts +2 -0
  72. package/src/clock.ts +280 -0
  73. package/src/difference.ts +22 -0
  74. package/src/encoding.ts +27 -0
  75. package/src/entry-index.ts +78 -0
  76. package/src/entry-with-refs.ts +6 -0
  77. package/src/entry.ts +749 -0
  78. package/src/find-uniques.ts +14 -0
  79. package/src/heads-cache.ts +400 -0
  80. package/src/heads.ts +208 -0
  81. package/src/hrtime.ts +78 -0
  82. package/src/index.ts +11 -0
  83. package/src/is-defined.ts +1 -0
  84. package/src/log-errors.ts +9 -0
  85. package/src/log-sorting.ts +108 -0
  86. package/src/log.ts +1262 -0
  87. package/src/logger.ts +3 -0
  88. package/src/snapshot.ts +103 -0
  89. package/src/trim.ts +269 -0
  90. package/src/types.ts +12 -0
  91. package/src/utils.ts +2 -0
  92. package/src/values.ts +193 -0
package/src/logger.ts ADDED
@@ -0,0 +1,3 @@
1
+ import pino from "pino";
2
+ const logger = pino();
3
+ export { logger };
@@ -0,0 +1,103 @@
1
+ import { Entry } from "./entry.js";
2
+ import { Blocks } from "@peerbit/blocks-interface";
3
+ import {
4
+ BinaryReader,
5
+ BinaryWriter,
6
+ deserialize,
7
+ field,
8
+ fixedArray,
9
+ serialize,
10
+ variant,
11
+ vec,
12
+ } from "@dao-xyz/borsh";
13
+ import { waitForAsync } from "@peerbit/time";
14
+ import LocalStore from "@peerbit/lazy-level";
15
+ import { logger } from "./logger.js";
16
+
17
+ @variant(0)
18
+ export class Snapshot {
19
+ @field({ type: fixedArray("u8", 32) })
20
+ id: Uint8Array;
21
+
22
+ @field({ type: vec("string") })
23
+ heads: string[];
24
+
25
+ @field({ type: "u64" })
26
+ size: bigint; // we do a size field, because the "true" log size can be larger then the size of values provided below
27
+
28
+ @field({ type: vec(Entry) })
29
+ values: Entry<any>[];
30
+
31
+ constructor(props?: {
32
+ id: Uint8Array;
33
+ heads: string[];
34
+ size: bigint;
35
+ values: Entry<any>[];
36
+ }) {
37
+ if (props) {
38
+ this.heads = props.heads;
39
+ this.id = props.id;
40
+ this.size = props.size;
41
+ this.values = props.values;
42
+ }
43
+ }
44
+ }
45
+
46
+ export const save = async <T>(
47
+ snapshotPath: string,
48
+ blockstore: Blocks,
49
+ cache: LocalStore,
50
+ log: {
51
+ id: Uint8Array;
52
+ getHeads: () => Promise<string[]>;
53
+ getValues: () => Promise<Entry<T>[]> | Entry<T>[];
54
+ }
55
+ ): Promise<string> => {
56
+ const values = await log.getValues();
57
+ const buf = serialize(
58
+ new Snapshot({
59
+ id: log.id,
60
+ heads: await log.getHeads(),
61
+ size: BigInt(values.length),
62
+ values: values,
63
+ })
64
+ );
65
+
66
+ const snapshot = await blockstore.put(buf);
67
+ const writer = new BinaryWriter();
68
+ writer.string(snapshot);
69
+ await cache.put(snapshotPath, writer.finalize());
70
+
71
+ await waitForAsync(() => cache.get(snapshotPath).then((bytes) => !!bytes), {
72
+ delayInterval: 200,
73
+ timeout: 10 * 1000,
74
+ });
75
+
76
+ logger.debug(`Saved snapshot: ${snapshot}`);
77
+ return snapshot;
78
+ };
79
+
80
+ export const load = async (
81
+ hash: string,
82
+ blockstore: Blocks
83
+ ): Promise<Snapshot> => {
84
+ const block = await blockstore.get(hash);
85
+ if (!block) {
86
+ throw new Error("Missing snapshot for CID: " + hash);
87
+ }
88
+ return deserialize(block, Snapshot);
89
+ };
90
+
91
+ export const loadFromCache = async (
92
+ path: string,
93
+ blockstore: Blocks,
94
+ cache: LocalStore
95
+ ) => {
96
+ const snapshotOrCID = await cache.get(path);
97
+ if (!snapshotOrCID) {
98
+ throw new Error("Missing snapshot CID from local store");
99
+ }
100
+ const reader = new BinaryReader(snapshotOrCID);
101
+ const snapshotCIDString = reader.string();
102
+ return load(snapshotCIDString, blockstore);
103
+ };
package/src/trim.ts ADDED
@@ -0,0 +1,269 @@
1
+ import { Cache } from "@peerbit/cache";
2
+ import PQueue from "p-queue";
3
+ import { Entry } from "./entry.js";
4
+ import { EntryNode, Values } from "./values.js";
5
+
6
+ const trimOptionsEqual = (a: TrimOptions, b: TrimOptions) => {
7
+ if (a.type === b.type) {
8
+ if (a.type === "length" && b.type === "length") {
9
+ return (
10
+ a.from === b.from &&
11
+ a.to === b.to &&
12
+ a.filter?.canTrim === b.filter?.canTrim
13
+ );
14
+ }
15
+ if (a.type === "bytelength" && b.type === "bytelength") {
16
+ return (
17
+ a.from === b.from &&
18
+ a.to === b.to &&
19
+ a.filter?.canTrim === b.filter?.canTrim
20
+ );
21
+ }
22
+
23
+ if (a.type === "time" && b.type === "time") {
24
+ return a.maxAge === b.maxAge && a.filter?.canTrim === b.filter?.canTrim;
25
+ }
26
+ }
27
+ return false;
28
+ };
29
+
30
+ const trimOptionsStricter = (from: TrimOptions, to: TrimOptions) => {
31
+ if (from.type !== to.type || from.filter?.canTrim !== to.filter?.canTrim) {
32
+ // TODO also check ttl?
33
+ return true; // we don't really know
34
+ }
35
+
36
+ if (
37
+ (from.type === "bytelength" || from.type === "length") &&
38
+ (to.type === "bytelength" || to.type === "length")
39
+ ) {
40
+ if (from.to > to.to) {
41
+ return true;
42
+ }
43
+ const fromFrom = from.from || from.to;
44
+ const fromTo = to.from || to.to;
45
+ return fromFrom > fromTo;
46
+ } else {
47
+ if (from.type === "time" && to.type === "time") {
48
+ return from.maxAge > to.maxAge;
49
+ }
50
+ }
51
+
52
+ throw new Error("Unexpected");
53
+ };
54
+
55
+ export type TrimToLengthOption = { type: "length"; to: number; from?: number };
56
+ export type TrimToByteLengthOption = {
57
+ type: "bytelength";
58
+ to: number;
59
+ from?: number;
60
+ };
61
+
62
+ export type TrimToTime = {
63
+ type: "time";
64
+ maxAge: number; // ms
65
+ };
66
+
67
+ export type TrimCondition =
68
+ | TrimToByteLengthOption
69
+ | TrimToLengthOption
70
+ | TrimToTime;
71
+ export type TrimCanAppendOption = {
72
+ filter?: {
73
+ canTrim: (gid: string) => Promise<boolean> | boolean;
74
+ cacheId?: () => string | number;
75
+ };
76
+ };
77
+ export type TrimOptions = TrimCanAppendOption & TrimCondition;
78
+
79
+ interface Log<T> {
80
+ values: () => Values<T>;
81
+ deleteNode: (node: EntryNode) => Promise<Entry<T> | undefined>;
82
+ }
83
+ export class Trim<T> {
84
+ private _trim?: TrimOptions;
85
+ private _canTrimCacheLastNode: EntryNode | undefined | null;
86
+ private _trimLastHead: EntryNode | undefined | null;
87
+ private _trimLastTail: EntryNode | undefined | null;
88
+ private _trimLastOptions: TrimOptions;
89
+ private _trimLastSeed: string | number | undefined;
90
+ private _canTrimCacheHashBreakpoint: Cache<boolean>;
91
+ private _log: Log<T>;
92
+ private _queue: PQueue;
93
+ constructor(log: Log<T>, options?: TrimOptions) {
94
+ this._log = log;
95
+ this._trim = options;
96
+ this._canTrimCacheHashBreakpoint = new Cache({ max: 1e5 });
97
+ this._queue = new PQueue({ concurrency: 1 });
98
+ }
99
+
100
+ deleteFromCache(entry: Entry<T>) {
101
+ if (this._canTrimCacheLastNode?.value.hash === entry.hash) {
102
+ this._canTrimCacheLastNode = this._canTrimCacheLastNode.prev;
103
+ }
104
+ }
105
+
106
+ get options() {
107
+ return this._trim;
108
+ }
109
+
110
+ private async trimTask(
111
+ option: TrimOptions | undefined = this._trim
112
+ ): Promise<Entry<T>[]> {
113
+ if (!option) {
114
+ return [];
115
+ }
116
+ /// TODO Make this method less ugly
117
+ const deleted: Entry<T>[] = [];
118
+
119
+ let done: () => Promise<boolean> | boolean;
120
+ const values = this._log.values();
121
+ if (option.type === "length") {
122
+ const to = option.to;
123
+ const from = option.from ?? to;
124
+ if (values.length < from) {
125
+ return [];
126
+ }
127
+ done = () => values.length <= to;
128
+ } else if (option.type == "bytelength") {
129
+ // prune to max sum payload sizes in bytes
130
+ const byteLengthFrom = option.from ?? option.to;
131
+
132
+ if (values.byteLength < byteLengthFrom) {
133
+ return [];
134
+ }
135
+ done = () => values.byteLength <= option.to;
136
+ } else if (option.type == "time") {
137
+ const s0 = BigInt(+new Date() * 1e6);
138
+ const maxAge = option.maxAge * 1e6;
139
+ done = async () => {
140
+ if (!values.tail) {
141
+ return true;
142
+ }
143
+
144
+ const nodeValue = await values.getEntry(values.tail);
145
+
146
+ if (!nodeValue) {
147
+ return true;
148
+ }
149
+
150
+ return s0 - nodeValue.metadata.clock.timestamp.wallTime < maxAge;
151
+ };
152
+ } else {
153
+ return [];
154
+ }
155
+
156
+ const tail = values.tail;
157
+
158
+ if (
159
+ this._trimLastOptions &&
160
+ trimOptionsStricter(this._trimLastOptions, option)
161
+ ) {
162
+ this._canTrimCacheHashBreakpoint.clear();
163
+ }
164
+
165
+ const seed = option.filter?.cacheId?.();
166
+ const cacheProgress = seed != null;
167
+
168
+ let changed = false;
169
+ if (seed !== this._trimLastSeed || !cacheProgress) {
170
+ // Reset caches
171
+ this._canTrimCacheHashBreakpoint.clear();
172
+ this._canTrimCacheLastNode = undefined;
173
+ changed = true;
174
+ } else {
175
+ const trimOptionsChanged =
176
+ !this._trimLastOptions ||
177
+ !trimOptionsEqual(this._trimLastOptions, option);
178
+
179
+ const changed =
180
+ this._trimLastHead !== values.head ||
181
+ this._trimLastTail !== values.tail ||
182
+ trimOptionsChanged;
183
+ if (!changed) {
184
+ return [];
185
+ }
186
+ }
187
+
188
+ let node: EntryNode | undefined | null = this._canTrimCacheLastNode || tail; // TODO should we do this._canTrimCacheLastNode?.prev instead ?
189
+ let lastNode: EntryNode | undefined | null = node;
190
+ let looped = false;
191
+ const startNode = node;
192
+ let canTrimByGid: Map<string, boolean> | undefined = undefined;
193
+
194
+ // TODO only go through heads?
195
+ while (
196
+ node &&
197
+ !(await done()) &&
198
+ values.length > 0 &&
199
+ node &&
200
+ (!looped || node !== startNode)
201
+ ) {
202
+ let deleteAble: boolean | undefined = true;
203
+ if (option.filter?.canTrim) {
204
+ canTrimByGid = canTrimByGid || new Map();
205
+ deleteAble = canTrimByGid.get(node.value.gid);
206
+ if (deleteAble === undefined) {
207
+ deleteAble = await option.filter?.canTrim(node.value.gid);
208
+ canTrimByGid.set(node.value.gid, deleteAble);
209
+ }
210
+
211
+ if (!deleteAble && cacheProgress) {
212
+ // ignore it
213
+ this._canTrimCacheHashBreakpoint.add(node.value.hash, true);
214
+ }
215
+ }
216
+
217
+ // Delete, and update current node
218
+ if (deleteAble) {
219
+ // Do this before deleteNode, else prev/next might be gone!
220
+ const prev = node.prev;
221
+ const next = node.next;
222
+
223
+ const entry = await this._log.deleteNode(node);
224
+ if (entry) {
225
+ deleted.push(entry);
226
+ }
227
+
228
+ node = prev;
229
+ // If we don't do this, we might, next time start to iterate from a node that does not exist
230
+ // we do prev 'or' next because next time we want to start as close as possible to where we left of
231
+ lastNode = prev || next;
232
+ } else {
233
+ lastNode = node;
234
+ node = node?.prev;
235
+ }
236
+
237
+ if (!node) {
238
+ if (!looped && changed && !cacheProgress) {
239
+ node = tail;
240
+ looped = true;
241
+ } else {
242
+ break;
243
+ }
244
+ }
245
+ }
246
+
247
+ // remember the node where we started last time from
248
+ this._canTrimCacheLastNode = node || lastNode;
249
+ this._trimLastHead = values.head;
250
+ this._trimLastTail = values.tail;
251
+ this._trimLastOptions = option;
252
+ this._trimLastSeed = seed;
253
+
254
+ return deleted;
255
+ }
256
+ /**
257
+ * @param options
258
+ * @returns deleted entries
259
+ */
260
+ async trim(
261
+ option: TrimOptions | undefined = this._trim
262
+ ): Promise<Entry<T>[]> {
263
+ const result = await this._queue.add(() => this.trimTask(option));
264
+ if (result instanceof Object) {
265
+ return result;
266
+ }
267
+ throw new Error("Something when wrong when trimming");
268
+ }
269
+ }
package/src/types.ts ADDED
@@ -0,0 +1,12 @@
1
+ import { field, vec } from "@dao-xyz/borsh";
2
+
3
+ export class StringArray {
4
+ @field({ type: vec("string") })
5
+ arr: string[];
6
+
7
+ constructor(properties?: { arr: string[] }) {
8
+ if (properties) {
9
+ this.arr = properties.arr;
10
+ }
11
+ }
12
+ }
package/src/utils.ts ADDED
@@ -0,0 +1,2 @@
1
+ export const max = <T>(...args: T[]) => args.reduce((m, e) => (e > m ? e : m));
2
+ export const min = <T>(...args: T[]) => args.reduce((m, e) => (e < m ? e : m));
package/src/values.ts ADDED
@@ -0,0 +1,193 @@
1
+ import { Entry } from "./entry";
2
+ import { ISortFunction } from "./log-sorting";
3
+ import yallist from "yallist";
4
+ import { EntryIndex } from "./entry-index";
5
+
6
+ type Storage<T> = (
7
+ hash: string
8
+ ) => Promise<Entry<T> | undefined> | Entry<T> | undefined;
9
+
10
+ interface Value {
11
+ hash: string;
12
+ gid: string;
13
+ byteLength: number;
14
+ }
15
+
16
+ export type EntryNode = yallist.Node<Value>;
17
+
18
+ export class Values<T> {
19
+ /**
20
+ * Keep track of sorted elements in descending sort order (i.e. newest elements)
21
+ */
22
+ private _values: yallist<Value>;
23
+ private _sortFn: ISortFunction;
24
+ private _byteLength: number;
25
+ private _entryIndex: EntryIndex<T>;
26
+
27
+ constructor(
28
+ entryIndex: EntryIndex<T>,
29
+ sortFn: ISortFunction,
30
+ entries: Entry<T>[] = []
31
+ ) {
32
+ this._values = yallist.create(
33
+ entries
34
+ .slice()
35
+ .sort(sortFn)
36
+ .reverse()
37
+ .map((x) => {
38
+ if (!x.hash) throw new Error("Unexpected");
39
+ return {
40
+ hash: x.hash,
41
+ byteLength: x._payload.byteLength,
42
+ gid: x.gid,
43
+ };
44
+ })
45
+ );
46
+ this._byteLength = 0;
47
+ entries.forEach((entry) => {
48
+ this._byteLength += entry._payload.byteLength;
49
+ });
50
+ this._sortFn = sortFn;
51
+ this._entryIndex = entryIndex;
52
+ }
53
+
54
+ toArray(): Promise<Entry<T>[]> {
55
+ return Promise.all(
56
+ this._values.toArrayReverse().map((x) => this._entryIndex.get(x.hash))
57
+ ).then((arr) => arr.filter((x) => !!x)) as Promise<Entry<T>[]>; // we do reverse because we assume the log is only meaningful if we read it from start to end
58
+ }
59
+
60
+ get head() {
61
+ return this._values.head;
62
+ }
63
+ get tail() {
64
+ return this._values.tail;
65
+ }
66
+ get length() {
67
+ return this._values.length;
68
+ }
69
+
70
+ private _putPromise: Map<string, Promise<any>> = new Map();
71
+ async put(value: Entry<T>) {
72
+ let promise = this._putPromise.get(value.hash);
73
+ if (promise) {
74
+ return promise;
75
+ }
76
+ promise = this._put(value).then((v) => {
77
+ this._putPromise.delete(value.hash);
78
+ return v;
79
+ });
80
+ this._putPromise.set(value.hash, promise);
81
+ return promise;
82
+ }
83
+ async _put(value: Entry<T>) {
84
+ // assume we want to insert at head (or somehere close)
85
+ let walker = this._values.head;
86
+ let last: EntryNode | undefined = undefined;
87
+ while (walker) {
88
+ const walkerValue = await this.getEntry(walker);
89
+ if (!walkerValue) {
90
+ throw new Error("Missing walker value");
91
+ }
92
+ if (walkerValue.hash === value.hash) {
93
+ return; // already exist!
94
+ }
95
+
96
+ if (this._sortFn(walkerValue, value) < 0) {
97
+ break;
98
+ }
99
+ last = walker;
100
+ walker = walker.next;
101
+ continue;
102
+ }
103
+
104
+ this._byteLength += value._payload.byteLength;
105
+ if (!value.hash) {
106
+ throw new Error("Unexpected");
107
+ }
108
+
109
+ _insertAfter(this._values, last, {
110
+ byteLength: value._payload.byteLength,
111
+ gid: value.gid,
112
+ hash: value.hash,
113
+ });
114
+ }
115
+
116
+ async delete(value: Entry<T> | string) {
117
+ const hash = typeof value === "string" ? value : value.hash;
118
+ // Assume we want to delete at tail (or somwhere close)
119
+
120
+ let walker = this._values.tail;
121
+ while (walker) {
122
+ const walkerValue = await this.getEntry(walker);
123
+
124
+ if (!walkerValue) {
125
+ throw new Error("Missing walker value");
126
+ }
127
+
128
+ if (walkerValue.hash === hash) {
129
+ this._values.removeNode(walker);
130
+ this._byteLength -= walkerValue._payload.byteLength;
131
+ return;
132
+ }
133
+ walker = walker.prev; // prev will be undefined if you do removeNode(walker)
134
+ }
135
+ throw new Error(
136
+ "Failed to delete, entry does not exist" +
137
+ " ??? " +
138
+ this.length +
139
+ " ??? " +
140
+ hash
141
+ );
142
+ }
143
+
144
+ deleteNode(node: EntryNode) {
145
+ this._values.removeNode(node);
146
+ this._byteLength -= node.value.byteLength;
147
+ return;
148
+ }
149
+
150
+ pop() {
151
+ const value = this._values.pop();
152
+ if (value) {
153
+ this._byteLength -= value.byteLength;
154
+ }
155
+ return value;
156
+ }
157
+
158
+ get byteLength() {
159
+ return this._byteLength;
160
+ }
161
+
162
+ async getEntry(node: EntryNode) {
163
+ return this._entryIndex.get(node.value.hash);
164
+ }
165
+ }
166
+
167
+ function _insertAfter(
168
+ self: yallist<any>,
169
+ node: EntryNode | undefined,
170
+ value: Value
171
+ ) {
172
+ const inserted = !node
173
+ ? new yallist.Node(
174
+ value,
175
+ null as any,
176
+ self.head as EntryNode | undefined,
177
+ self
178
+ )
179
+ : new yallist.Node(value, node, node.next as EntryNode | undefined, self);
180
+
181
+ // is tail
182
+ if (inserted.next === null) {
183
+ self.tail = inserted;
184
+ }
185
+
186
+ // is head
187
+ if (inserted.prev === null) {
188
+ self.head = inserted;
189
+ }
190
+
191
+ self.length++;
192
+ return inserted;
193
+ }