@teambit/objects 0.0.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/artifacts/__bit_junit.xml +68 -0
- package/artifacts/preview/teambit_scope_objects-preview.js +1 -0
- package/dist/fixtures/version-model-extended.json +48 -0
- package/dist/fixtures/version-model-object.json +87 -0
- package/dist/index.d.ts +19 -0
- package/dist/index.js +371 -0
- package/dist/index.js.map +1 -0
- package/dist/models/dependencies-graph.d.ts +45 -0
- package/dist/models/dependencies-graph.js +106 -0
- package/dist/models/dependencies-graph.js.map +1 -0
- package/dist/models/detach-heads.d.ts +25 -0
- package/dist/models/detach-heads.js +84 -0
- package/dist/models/detach-heads.js.map +1 -0
- package/dist/models/export-metadata.d.ts +24 -0
- package/dist/models/export-metadata.js +76 -0
- package/dist/models/export-metadata.js.map +1 -0
- package/dist/models/index.d.ts +10 -0
- package/dist/models/index.js +125 -0
- package/dist/models/index.js.map +1 -0
- package/dist/models/lane-history.d.ts +40 -0
- package/dist/models/lane-history.js +117 -0
- package/dist/models/lane-history.js.map +1 -0
- package/dist/models/lane.d.ts +124 -0
- package/dist/models/lane.js +463 -0
- package/dist/models/lane.js.map +1 -0
- package/dist/models/model-component.d.ts +317 -0
- package/dist/models/model-component.js +1365 -0
- package/dist/models/model-component.js.map +1 -0
- package/dist/models/model-component.spec.d.ts +1 -0
- package/dist/models/model-component.spec.js +71 -0
- package/dist/models/model-component.spec.js.map +1 -0
- package/dist/models/scopeMeta.d.ts +20 -0
- package/dist/models/scopeMeta.js +71 -0
- package/dist/models/scopeMeta.js.map +1 -0
- package/dist/models/source.d.ts +10 -0
- package/dist/models/source.js +43 -0
- package/dist/models/source.js.map +1 -0
- package/dist/models/symlink.d.ts +30 -0
- package/dist/models/symlink.js +91 -0
- package/dist/models/symlink.js.map +1 -0
- package/dist/models/version-history.d.ts +59 -0
- package/dist/models/version-history.js +285 -0
- package/dist/models/version-history.js.map +1 -0
- package/dist/models/version.d.ts +279 -0
- package/dist/models/version.js +777 -0
- package/dist/models/version.js.map +1 -0
- package/dist/models/version.spec.d.ts +1 -0
- package/dist/models/version.spec.js +340 -0
- package/dist/models/version.spec.js.map +1 -0
- package/dist/objects/bit-object-list.d.ts +24 -0
- package/dist/objects/bit-object-list.js +65 -0
- package/dist/objects/bit-object-list.js.map +1 -0
- package/dist/objects/index.d.ts +5 -0
- package/dist/objects/index.js +60 -0
- package/dist/objects/index.js.map +1 -0
- package/dist/objects/object-list-to-graph.d.ts +13 -0
- package/dist/objects/object-list-to-graph.js +93 -0
- package/dist/objects/object-list-to-graph.js.map +1 -0
- package/dist/objects/object-list.d.ts +52 -0
- package/dist/objects/object-list.js +369 -0
- package/dist/objects/object-list.js.map +1 -0
- package/dist/objects/object.d.ts +35 -0
- package/dist/objects/object.js +190 -0
- package/dist/objects/object.js.map +1 -0
- package/dist/objects/objects-readable-generator.d.ts +31 -0
- package/dist/objects/objects-readable-generator.js +192 -0
- package/dist/objects/objects-readable-generator.js.map +1 -0
- package/dist/objects/raw-object.d.ts +23 -0
- package/dist/objects/raw-object.js +155 -0
- package/dist/objects/raw-object.js.map +1 -0
- package/dist/objects/ref.d.ts +14 -0
- package/dist/objects/ref.js +45 -0
- package/dist/objects/ref.js.map +1 -0
- package/dist/objects/repository-hooks.d.ts +4 -0
- package/dist/objects/repository-hooks.js +56 -0
- package/dist/objects/repository-hooks.js.map +1 -0
- package/dist/objects/repository.d.ts +148 -0
- package/dist/objects/repository.js +842 -0
- package/dist/objects/repository.js.map +1 -0
- package/dist/objects/scope-index.d.ts +73 -0
- package/dist/objects/scope-index.js +251 -0
- package/dist/objects/scope-index.js.map +1 -0
- package/dist/objects/scope-index.spec.d.ts +1 -0
- package/dist/objects/scope-index.spec.js +152 -0
- package/dist/objects/scope-index.spec.js.map +1 -0
- package/dist/objects.aspect.d.ts +2 -0
- package/dist/objects.aspect.js +18 -0
- package/dist/objects.aspect.js.map +1 -0
- package/dist/objects.main.runtime.d.ts +7 -0
- package/dist/objects.main.runtime.js +36 -0
- package/dist/objects.main.runtime.js.map +1 -0
- package/dist/preview-1736824735631.js +7 -0
- package/fixtures/version-model-extended.json +48 -0
- package/fixtures/version-model-object.json +87 -0
- package/models/dependencies-graph.ts +119 -0
- package/models/detach-heads.ts +79 -0
- package/models/export-metadata.ts +57 -0
- package/models/index.ts +11 -0
- package/models/lane-history.ts +106 -0
- package/models/lane.ts +367 -0
- package/models/model-component.spec.ts +55 -0
- package/models/model-component.ts +1367 -0
- package/models/scopeMeta.ts +60 -0
- package/models/source.ts +32 -0
- package/models/symlink.ts +66 -0
- package/models/version-history.ts +266 -0
- package/models/version.spec.ts +288 -0
- package/models/version.ts +818 -0
- package/objects/bit-object-list.ts +59 -0
- package/objects/index.ts +6 -0
- package/objects/object-list-to-graph.ts +69 -0
- package/objects/object-list.ts +313 -0
- package/objects/object.ts +153 -0
- package/objects/objects-readable-generator.ts +167 -0
- package/objects/raw-object.ts +142 -0
- package/objects/ref.ts +45 -0
- package/objects/repository-hooks.ts +42 -0
- package/objects/repository.ts +753 -0
- package/objects/scope-index.spec.ts +95 -0
- package/objects/scope-index.ts +192 -0
- package/package.json +98 -0
- package/types/asset.d.ts +41 -0
- package/types/style.d.ts +42 -0
@@ -0,0 +1,59 @@
|
|
1
|
+
import BitObject from './object';
|
2
|
+
import { ExportMetadata, Lane, LaneHistory, ModelComponent, Version, VersionHistory } from '../models';
|
3
|
+
|
4
|
+
export class BitObjectList {
|
5
|
+
constructor(private objects: BitObject[]) {}
|
6
|
+
|
7
|
+
getComponents(): ModelComponent[] {
|
8
|
+
return this.objects.filter((object) => object instanceof ModelComponent) as ModelComponent[];
|
9
|
+
}
|
10
|
+
|
11
|
+
getVersions(): Version[] {
|
12
|
+
return this.objects.filter((object) => object instanceof Version) as Version[];
|
13
|
+
}
|
14
|
+
|
15
|
+
getLanes(): Lane[] {
|
16
|
+
return this.objects.filter((object) => object instanceof Lane) as Lane[];
|
17
|
+
}
|
18
|
+
|
19
|
+
getVersionHistories(): VersionHistory[] {
|
20
|
+
return this.objects.filter((object) => object instanceof VersionHistory) as VersionHistory[];
|
21
|
+
}
|
22
|
+
|
23
|
+
getLaneHistories(): LaneHistory[] {
|
24
|
+
return this.objects.filter((object) => object instanceof LaneHistory) as LaneHistory[];
|
25
|
+
}
|
26
|
+
|
27
|
+
getAll(): BitObject[] {
|
28
|
+
return this.objects;
|
29
|
+
}
|
30
|
+
|
31
|
+
excludeTypes(types: string[]): BitObject[] {
|
32
|
+
return this.objects.filter((object) => !types.includes(object.getType()));
|
33
|
+
}
|
34
|
+
|
35
|
+
getExportMetadata(): ExportMetadata | undefined {
|
36
|
+
return this.objects.find((object) => object instanceof ExportMetadata) as ExportMetadata | undefined;
|
37
|
+
}
|
38
|
+
|
39
|
+
/**
|
40
|
+
* object that needs merge operation before saving them into the scope, such as ModelComponent
|
41
|
+
*/
|
42
|
+
getObjectsRequireMerge() {
|
43
|
+
const typeRequireMerge = this.objectTypesRequireMerge();
|
44
|
+
return this.objects.filter((object) => typeRequireMerge.some((ObjClass) => object instanceof ObjClass));
|
45
|
+
}
|
46
|
+
|
47
|
+
/**
|
48
|
+
* object that don't need merge operation and can be saved immediately into the scope.
|
49
|
+
* such as Source or Version
|
50
|
+
*/
|
51
|
+
getObjectsNotRequireMerge() {
|
52
|
+
const typeRequireMerge = this.objectTypesRequireMerge();
|
53
|
+
return this.objects.filter((object) => typeRequireMerge.every((ObjClass) => !(object instanceof ObjClass)));
|
54
|
+
}
|
55
|
+
|
56
|
+
private objectTypesRequireMerge() {
|
57
|
+
return [ModelComponent, Lane, VersionHistory, LaneHistory];
|
58
|
+
}
|
59
|
+
}
|
package/objects/index.ts
ADDED
@@ -0,0 +1,69 @@
|
|
1
|
+
import { Graph, Node, Edge } from '@teambit/graph.cleargraph';
|
2
|
+
import { uniqBy } from 'lodash';
|
3
|
+
import { ComponentID } from '@teambit/component-id';
|
4
|
+
import type { ObjectList } from './object-list';
|
5
|
+
import { BitObjectList } from './bit-object-list';
|
6
|
+
import { getAllVersionsInfo } from '@teambit/component.snap-distance';
|
7
|
+
import { Dependency } from '@teambit/graph';
|
8
|
+
|
9
|
+
type BitIdNode = Node<ComponentID>;
|
10
|
+
type DependencyEdge = Edge<Dependency>;
|
11
|
+
|
12
|
+
export class IdGraph extends Graph<ComponentID, Dependency> {
|
13
|
+
constructor(nodes: BitIdNode[] = [], edges: DependencyEdge[] = []) {
|
14
|
+
super(nodes, edges);
|
15
|
+
}
|
16
|
+
}
|
17
|
+
|
18
|
+
export async function objectListToGraph(objectList: ObjectList): Promise<IdGraph> {
|
19
|
+
const bitObjectsList = await objectList.toBitObjects();
|
20
|
+
|
21
|
+
return bitObjectListToGraph(bitObjectsList);
|
22
|
+
}
|
23
|
+
|
24
|
+
export async function bitObjectListToGraph(bitObjectsList: BitObjectList): Promise<IdGraph> {
|
25
|
+
const exportMetadata = bitObjectsList.getExportMetadata();
|
26
|
+
const components = bitObjectsList.getComponents();
|
27
|
+
const versions = bitObjectsList.getVersions();
|
28
|
+
const nodes: BitIdNode[] = [];
|
29
|
+
const edges: DependencyEdge[] = [];
|
30
|
+
await Promise.all(
|
31
|
+
components.map(async (component) => {
|
32
|
+
const compFromMetadata = exportMetadata?.exportVersions.find((c) =>
|
33
|
+
c.id.isEqualWithoutVersion(component.toComponentId())
|
34
|
+
);
|
35
|
+
const startFrom = compFromMetadata?.head;
|
36
|
+
const versionsInfo = await getAllVersionsInfo({
|
37
|
+
modelComponent: component,
|
38
|
+
versionObjects: versions,
|
39
|
+
startFrom,
|
40
|
+
throws: false,
|
41
|
+
});
|
42
|
+
versionsInfo.forEach((versionInfo) => {
|
43
|
+
const id = component.toComponentId().changeVersion(versionInfo.tag || versionInfo.ref.toString());
|
44
|
+
const idStr = id.toString();
|
45
|
+
nodes.push(new Node(idStr, id));
|
46
|
+
if (!versionInfo.version) {
|
47
|
+
return;
|
48
|
+
}
|
49
|
+
const { dependencies, devDependencies, peerDependencies, extensionDependencies } =
|
50
|
+
versionInfo.version.depsIdsGroupedByType;
|
51
|
+
const addDep = (depId: ComponentID, edge: Dependency) => {
|
52
|
+
const depIdStr = depId.toString();
|
53
|
+
nodes.push(new Node(depIdStr, depId));
|
54
|
+
edges.push(new Edge(idStr, depIdStr, edge));
|
55
|
+
};
|
56
|
+
const runTime = new Dependency('runtime');
|
57
|
+
const dev = new Dependency('dev');
|
58
|
+
const peer = new Dependency('peer');
|
59
|
+
dependencies.forEach((depId) => addDep(depId, runTime));
|
60
|
+
[...devDependencies, ...extensionDependencies].forEach((depId) => addDep(depId, dev));
|
61
|
+
peerDependencies.forEach((depId) => addDep(depId, peer));
|
62
|
+
});
|
63
|
+
})
|
64
|
+
);
|
65
|
+
const uniqNodes = uniqBy(nodes, 'id');
|
66
|
+
const idGraph = new IdGraph(uniqNodes, edges);
|
67
|
+
|
68
|
+
return idGraph;
|
69
|
+
}
|
@@ -0,0 +1,313 @@
|
|
1
|
+
import tarStream from 'tar-stream';
|
2
|
+
import { pMapPool } from '@teambit/toolbox.promise.map-pool';
|
3
|
+
import { compact } from 'lodash';
|
4
|
+
import { Readable, PassThrough, pipeline } from 'stream';
|
5
|
+
import BitObject from './object';
|
6
|
+
import { BitObjectList } from './bit-object-list';
|
7
|
+
import Ref from './ref';
|
8
|
+
import { logger } from '@teambit/legacy.logger';
|
9
|
+
import { concurrentIOLimit } from '@teambit/harmony.modules.concurrency';
|
10
|
+
import { ExportMetadata } from '../models';
|
11
|
+
import { UnknownObjectType } from '@teambit/legacy.scope';
|
12
|
+
|
13
|
+
/**
|
14
|
+
* when error occurred during streaming between HTTP server and client, there is no good way to
|
15
|
+
* indicate this other than sending a new file with a special name and the error message.
|
16
|
+
*/
|
17
|
+
const TAR_STREAM_ERROR_FILENAME = '.BIT.ERROR';
|
18
|
+
/**
|
19
|
+
* schema 1.0.0 - added the start and end file with basic info
|
20
|
+
*/
|
21
|
+
const OBJECT_LIST_CURRENT_SCHEMA = '1.0.0';
|
22
|
+
const TAR_STREAM_START_FILENAME = '.BIT.START';
|
23
|
+
const TAR_STREAM_END_FILENAME = '.BIT.END';
|
24
|
+
|
25
|
+
type StartFile = {
|
26
|
+
schema: string;
|
27
|
+
scopeName: string;
|
28
|
+
};
|
29
|
+
type EndFile = {
|
30
|
+
numOfFiles: number;
|
31
|
+
scopeName: string;
|
32
|
+
};
|
33
|
+
|
34
|
+
export type ObjectItem = {
|
35
|
+
ref: Ref;
|
36
|
+
buffer: Buffer; // zlib deflated BitObject
|
37
|
+
type?: string; // for future use. e.g. to be able to export only Component/Version types but not Source/Artifact, etc.
|
38
|
+
scope?: string; // used for the export process
|
39
|
+
};
|
40
|
+
|
41
|
+
export const FETCH_FORMAT_OBJECT_LIST = 'ObjectList';
|
42
|
+
|
43
|
+
/**
|
44
|
+
* Stream.Readable that operates with objectMode, while each 'data' event emits one ObjectItem object.
|
45
|
+
*/
|
46
|
+
export type ObjectItemsStream = Readable;
|
47
|
+
|
48
|
+
export class ObjectList {
|
49
|
+
constructor(public objects: ObjectItem[] = []) {}
|
50
|
+
|
51
|
+
count() {
|
52
|
+
return this.objects.length;
|
53
|
+
}
|
54
|
+
|
55
|
+
static mergeMultipleInstances(objectLists: ObjectList[]): ObjectList {
|
56
|
+
const objectList = new ObjectList();
|
57
|
+
objectLists.forEach((objList) => objectList.mergeObjectList(objList));
|
58
|
+
return objectList;
|
59
|
+
}
|
60
|
+
mergeObjectList(objectList: ObjectList) {
|
61
|
+
this.addIfNotExist(objectList.objects);
|
62
|
+
}
|
63
|
+
static fromJsonString(jsonStr: string): ObjectList {
|
64
|
+
const jsonParsed = JSON.parse(jsonStr);
|
65
|
+
if (!Array.isArray(jsonParsed)) {
|
66
|
+
throw new Error(`fromJsonString expect an array, got ${typeof jsonParsed}`);
|
67
|
+
}
|
68
|
+
jsonParsed.forEach((obj) => {
|
69
|
+
obj.ref = new Ref(obj.ref.hash);
|
70
|
+
obj.buffer = Buffer.from(obj.buffer);
|
71
|
+
});
|
72
|
+
return new ObjectList(jsonParsed);
|
73
|
+
}
|
74
|
+
toJsonString(): string {
|
75
|
+
return JSON.stringify(this.objects);
|
76
|
+
}
|
77
|
+
toTar(): NodeJS.ReadableStream {
|
78
|
+
const pack = tarStream.pack();
|
79
|
+
this.objects.forEach((obj) => {
|
80
|
+
pack.entry({ name: ObjectList.combineScopeAndHash(obj) }, obj.buffer);
|
81
|
+
});
|
82
|
+
pack.finalize();
|
83
|
+
return pack;
|
84
|
+
}
|
85
|
+
toReadableStream(): ObjectItemsStream {
|
86
|
+
return Readable.from(this.objects);
|
87
|
+
}
|
88
|
+
static async fromTar(packStream: NodeJS.ReadableStream): Promise<ObjectList> {
|
89
|
+
const extract = tarStream.extract();
|
90
|
+
const objectItems: ObjectItem[] = await new Promise((resolve, reject) => {
|
91
|
+
const objects: ObjectItem[] = [];
|
92
|
+
extract.on('entry', (header, stream, next) => {
|
93
|
+
const data: Buffer[] = [];
|
94
|
+
stream.on('data', (chunk) => {
|
95
|
+
data.push(chunk);
|
96
|
+
});
|
97
|
+
stream.on('end', () => {
|
98
|
+
objects.push({
|
99
|
+
...ObjectList.extractScopeAndHash(header.name),
|
100
|
+
buffer: Buffer.concat(data as unknown as Uint8Array[]),
|
101
|
+
});
|
102
|
+
next(); // ready for next entry
|
103
|
+
});
|
104
|
+
stream.on('error', (err) => reject(err));
|
105
|
+
|
106
|
+
stream.resume(); // just auto drain the stream
|
107
|
+
});
|
108
|
+
|
109
|
+
extract.on('finish', () => {
|
110
|
+
resolve(objects);
|
111
|
+
});
|
112
|
+
|
113
|
+
packStream.pipe(extract);
|
114
|
+
});
|
115
|
+
return new ObjectList(objectItems);
|
116
|
+
}
|
117
|
+
|
118
|
+
static fromTarToObjectStream(packStream: NodeJS.ReadableStream): ObjectItemsStream {
|
119
|
+
const passThrough = new PassThrough({ objectMode: true });
|
120
|
+
const extract = tarStream.extract();
|
121
|
+
let startData: StartFile | undefined;
|
122
|
+
let endData: EndFile | undefined;
|
123
|
+
extract.on('entry', (header, stream, next) => {
|
124
|
+
const data: Buffer[] = [];
|
125
|
+
stream.on('data', (chunk) => {
|
126
|
+
data.push(chunk);
|
127
|
+
});
|
128
|
+
stream.on('end', () => {
|
129
|
+
const allData = Buffer.concat(data as unknown as Uint8Array[]);
|
130
|
+
if (header.name === TAR_STREAM_ERROR_FILENAME) {
|
131
|
+
passThrough.emit('error', new Error(allData.toString()));
|
132
|
+
return;
|
133
|
+
}
|
134
|
+
if (header.name === TAR_STREAM_START_FILENAME) {
|
135
|
+
startData = JSON.parse(allData.toString());
|
136
|
+
logger.debug('fromTarToObjectStream, start getting data', startData);
|
137
|
+
next();
|
138
|
+
return;
|
139
|
+
}
|
140
|
+
if (header.name === TAR_STREAM_END_FILENAME) {
|
141
|
+
endData = JSON.parse(allData.toString());
|
142
|
+
logger.debug('fromTarToObjectStream, finished getting data', endData);
|
143
|
+
next();
|
144
|
+
return;
|
145
|
+
}
|
146
|
+
passThrough.write({ ...ObjectList.extractScopeAndHash(header.name), buffer: allData });
|
147
|
+
next(); // ready for next entry
|
148
|
+
});
|
149
|
+
stream.on('error', (err) => {
|
150
|
+
passThrough.emit('error', err);
|
151
|
+
});
|
152
|
+
|
153
|
+
stream.resume(); // just auto drain the stream
|
154
|
+
});
|
155
|
+
|
156
|
+
// not sure if needed
|
157
|
+
extract.on('error', (err) => {
|
158
|
+
passThrough.emit('error', err);
|
159
|
+
});
|
160
|
+
|
161
|
+
extract.on('finish', () => {
|
162
|
+
if (startData?.schema === OBJECT_LIST_CURRENT_SCHEMA && !endData) {
|
163
|
+
// wasn't able to find a better way to indicate whether the server aborted the request
|
164
|
+
// see https://github.com/node-fetch/node-fetch/issues/1117
|
165
|
+
passThrough.emit(
|
166
|
+
'error',
|
167
|
+
new Error(`server terminated the stream unexpectedly (metadata: ${JSON.stringify(startData)})`)
|
168
|
+
);
|
169
|
+
}
|
170
|
+
passThrough.end();
|
171
|
+
});
|
172
|
+
pipeline(packStream, extract, (err) => {
|
173
|
+
if (err) {
|
174
|
+
logger.error('fromTarToObjectStream, pipeline', err);
|
175
|
+
passThrough.emit('error', err);
|
176
|
+
} else {
|
177
|
+
logger.debug('fromTarToObjectStream, pipeline is completed');
|
178
|
+
}
|
179
|
+
});
|
180
|
+
|
181
|
+
return passThrough;
|
182
|
+
}
|
183
|
+
|
184
|
+
static fromObjectStreamToTar(readable: Readable, scopeName: string) {
|
185
|
+
const pack = tarStream.pack();
|
186
|
+
const startFile: StartFile = { schema: OBJECT_LIST_CURRENT_SCHEMA, scopeName };
|
187
|
+
logger.debug('fromObjectStreamToTar, start sending data', startFile);
|
188
|
+
pack.entry({ name: TAR_STREAM_START_FILENAME }, JSON.stringify(startFile));
|
189
|
+
let numOfFiles = 0;
|
190
|
+
readable.on('data', (obj: ObjectItem) => {
|
191
|
+
numOfFiles += 1;
|
192
|
+
pack.entry({ name: ObjectList.combineScopeAndHash(obj) }, obj.buffer);
|
193
|
+
});
|
194
|
+
readable.on('end', () => {
|
195
|
+
const endFile: EndFile = { numOfFiles, scopeName };
|
196
|
+
logger.debug('fromObjectStreamToTar, finished sending data', endFile);
|
197
|
+
pack.entry({ name: TAR_STREAM_END_FILENAME }, JSON.stringify(endFile));
|
198
|
+
pack.finalize();
|
199
|
+
});
|
200
|
+
readable.on('error', (err) => {
|
201
|
+
const errorMessage = err.message || `unexpected error (${err.name})`;
|
202
|
+
logger.error(`ObjectList.fromObjectStreamToTar, streaming an error as a file`, err);
|
203
|
+
pack.entry({ name: TAR_STREAM_ERROR_FILENAME }, errorMessage);
|
204
|
+
pack.finalize();
|
205
|
+
});
|
206
|
+
return pack;
|
207
|
+
}
|
208
|
+
|
209
|
+
static async fromReadableStream(readable: ObjectItemsStream): Promise<ObjectList> {
|
210
|
+
const objectItems: ObjectItem[] = [];
|
211
|
+
for await (const obj of readable) {
|
212
|
+
objectItems.push(obj);
|
213
|
+
}
|
214
|
+
return new ObjectList(objectItems);
|
215
|
+
}
|
216
|
+
|
217
|
+
/**
|
218
|
+
* the opposite of this.combineScopeAndHash
|
219
|
+
*/
|
220
|
+
static extractScopeAndHash(name: string): { scope?: string; ref: Ref } {
|
221
|
+
const nameSplit = name.split('/');
|
222
|
+
const hasScope = nameSplit.length > 1;
|
223
|
+
return {
|
224
|
+
scope: hasScope ? nameSplit[0] : undefined,
|
225
|
+
ref: new Ref(hasScope ? nameSplit[1] : nameSplit[0]),
|
226
|
+
};
|
227
|
+
}
|
228
|
+
/**
|
229
|
+
* the opposite of this.extractScopeAndHash
|
230
|
+
*/
|
231
|
+
static combineScopeAndHash(objectItem: ObjectItem): string {
|
232
|
+
const scope = objectItem.scope ? `${objectItem.scope}/` : '';
|
233
|
+
return `${scope}${objectItem.ref.hash}`;
|
234
|
+
}
|
235
|
+
|
236
|
+
addIfNotExist(objectItems: ObjectItem[]) {
|
237
|
+
objectItems.forEach((objectItem) => {
|
238
|
+
const exists = this.objects.find(
|
239
|
+
(object) => object.ref.isEqual(objectItem.ref) && object.scope === objectItem.scope
|
240
|
+
);
|
241
|
+
if (!exists) {
|
242
|
+
this.objects.push(objectItem);
|
243
|
+
}
|
244
|
+
});
|
245
|
+
}
|
246
|
+
|
247
|
+
async toBitObjects(throwForUnknownTypes = false): Promise<BitObjectList> {
|
248
|
+
const concurrency = concurrentIOLimit();
|
249
|
+
const bitObjects = await pMapPool(
|
250
|
+
this.objects,
|
251
|
+
async (object) => {
|
252
|
+
try {
|
253
|
+
return await BitObject.parseObject(object.buffer);
|
254
|
+
} catch (err) {
|
255
|
+
if (throwForUnknownTypes || !(err instanceof UnknownObjectType)) {
|
256
|
+
throw err;
|
257
|
+
}
|
258
|
+
logger.error(
|
259
|
+
`toBitObjects, unable to parse object of type ${err.type}, ignoring it. please update your bit version`
|
260
|
+
);
|
261
|
+
return null;
|
262
|
+
}
|
263
|
+
},
|
264
|
+
{ concurrency }
|
265
|
+
);
|
266
|
+
return new BitObjectList(compact(bitObjects));
|
267
|
+
}
|
268
|
+
|
269
|
+
static async fromBitObjects(bitObjects: BitObject[]): Promise<ObjectList> {
|
270
|
+
const concurrency = concurrentIOLimit();
|
271
|
+
const objectItems = await pMapPool(
|
272
|
+
bitObjects,
|
273
|
+
async (obj) => ({
|
274
|
+
ref: obj.hash(),
|
275
|
+
buffer: await obj.compress(),
|
276
|
+
type: obj.getType(),
|
277
|
+
}),
|
278
|
+
{ concurrency }
|
279
|
+
);
|
280
|
+
return new ObjectList(objectItems);
|
281
|
+
}
|
282
|
+
|
283
|
+
addScopeName(scopeName: string) {
|
284
|
+
this.objects.forEach((object) => {
|
285
|
+
object.scope = scopeName;
|
286
|
+
});
|
287
|
+
}
|
288
|
+
|
289
|
+
splitByScopeName(): { [scopeName: string]: ObjectList } {
|
290
|
+
const objectListPerScope: { [scopeName: string]: ObjectList } = {};
|
291
|
+
this.objects.forEach((obj) => {
|
292
|
+
if (obj.type === ExportMetadata.name) {
|
293
|
+
return; // no scope for this type. it's general for all export data from all scopes
|
294
|
+
}
|
295
|
+
if (!obj.scope) {
|
296
|
+
throw new Error(`ObjectList: unable to split by scopeName, the scopeName is missing for ${obj.ref.hash}`);
|
297
|
+
}
|
298
|
+
if (objectListPerScope[obj.scope]) {
|
299
|
+
objectListPerScope[obj.scope].addIfNotExist([obj]);
|
300
|
+
} else {
|
301
|
+
objectListPerScope[obj.scope] = new ObjectList([obj]);
|
302
|
+
}
|
303
|
+
});
|
304
|
+
return objectListPerScope;
|
305
|
+
}
|
306
|
+
|
307
|
+
/**
|
308
|
+
* helps debugging
|
309
|
+
*/
|
310
|
+
toConsoleLog() {
|
311
|
+
console.log(this.objects.map((o) => o.ref.hash).join('\n')); // eslint-disable-line no-console
|
312
|
+
}
|
313
|
+
}
|
@@ -0,0 +1,153 @@
|
|
1
|
+
import { inflateSync } from 'zlib';
|
2
|
+
|
3
|
+
import { NULL_BYTE, SPACE_DELIMITER } from '@teambit/legacy.constants';
|
4
|
+
import { deflate, inflate } from '@teambit/legacy.utils';
|
5
|
+
import { sha1 } from '@teambit/toolbox.crypto.sha1';
|
6
|
+
import { UnknownObjectType, typesObj as types } from '@teambit/legacy.scope';
|
7
|
+
import { ObjectItem } from './object-list';
|
8
|
+
import Ref from './ref';
|
9
|
+
import Repository from './repository';
|
10
|
+
|
11
|
+
function parse(buffer: Buffer): BitObject {
|
12
|
+
const { type, hash, contents } = extractHeaderAndContent(buffer);
|
13
|
+
if (!types[type]) throw new UnknownObjectType(type);
|
14
|
+
return types[type].parse(contents, hash);
|
15
|
+
}
|
16
|
+
|
17
|
+
function extractHeaderAndContent(buffer: Buffer): { type: string; hash: string; contents: Buffer } {
|
18
|
+
const firstNullByteLocation = buffer.indexOf(NULL_BYTE);
|
19
|
+
const headers = buffer.slice(0, firstNullByteLocation).toString();
|
20
|
+
const [type, hash] = headers.split(SPACE_DELIMITER);
|
21
|
+
const contents = buffer.slice(firstNullByteLocation + 1, buffer.length);
|
22
|
+
return { type, hash, contents };
|
23
|
+
}
|
24
|
+
|
25
|
+
export default class BitObject {
|
26
|
+
validateBeforePersist = true; // validate the object before persisting
|
27
|
+
id(): string | Buffer {
|
28
|
+
throw new Error('id() was not implemented...');
|
29
|
+
}
|
30
|
+
|
31
|
+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
32
|
+
toBuffer(pretty?: boolean): Buffer {
|
33
|
+
throw new Error('toBuffer() was not implemented...');
|
34
|
+
}
|
35
|
+
|
36
|
+
refs(): Ref[] {
|
37
|
+
return [];
|
38
|
+
}
|
39
|
+
|
40
|
+
getType(): string {
|
41
|
+
return this.constructor.name;
|
42
|
+
}
|
43
|
+
|
44
|
+
getHeader(buffer: Buffer): string {
|
45
|
+
return `${this.getType()} ${this.hash().toString()} ${buffer.toString().length}${NULL_BYTE}`;
|
46
|
+
}
|
47
|
+
|
48
|
+
async collectRefs(repo: Repository): Promise<Ref[]> {
|
49
|
+
const refsCollection = [];
|
50
|
+
const objectType = this.getType();
|
51
|
+
const objectId = objectType === 'Component' ? `Component ${this.id()}` : objectType;
|
52
|
+
|
53
|
+
async function addRefs(object: BitObject) {
|
54
|
+
const refs = object.refs();
|
55
|
+
let objs;
|
56
|
+
try {
|
57
|
+
objs = await Promise.all(refs.map((ref) => ref.load(repo, true)));
|
58
|
+
} catch (err: any) {
|
59
|
+
if (err.code === 'ENOENT') {
|
60
|
+
throw new Error(`failed finding an object file required by ${object.constructor.name} object, originated from ${objectId}
|
61
|
+
path: ${err.path}`);
|
62
|
+
}
|
63
|
+
throw err;
|
64
|
+
}
|
65
|
+
|
66
|
+
// @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX!
|
67
|
+
refsCollection.push(...refs);
|
68
|
+
await Promise.all(objs.map((obj) => addRefs(obj)));
|
69
|
+
}
|
70
|
+
|
71
|
+
await addRefs(this);
|
72
|
+
return refsCollection;
|
73
|
+
}
|
74
|
+
|
75
|
+
async collectRaw(repo: Repository): Promise<ObjectItem[]> {
|
76
|
+
const refs = await this.collectRefs(repo);
|
77
|
+
return repo.loadManyRaw(refs);
|
78
|
+
}
|
79
|
+
|
80
|
+
asRaw(repo: Repository): Promise<Buffer> {
|
81
|
+
return repo.loadRaw(this.hash());
|
82
|
+
}
|
83
|
+
|
84
|
+
collect(repo: Repository): BitObject[] {
|
85
|
+
const objects: BitObject[] = [];
|
86
|
+
|
87
|
+
function addRefs(object: BitObject) {
|
88
|
+
const objs = object.refs().map((ref) => {
|
89
|
+
return ref.loadSync(repo);
|
90
|
+
});
|
91
|
+
|
92
|
+
objects.push(...objs);
|
93
|
+
objs.forEach((obj) => addRefs(obj));
|
94
|
+
}
|
95
|
+
|
96
|
+
addRefs(this);
|
97
|
+
return objects;
|
98
|
+
}
|
99
|
+
|
100
|
+
/**
|
101
|
+
* indexing method
|
102
|
+
*/
|
103
|
+
hash(): Ref {
|
104
|
+
// console.log(`sha ${sha1(this.id())}, id ${this.id()}`); // uncomment when debugging hash issues
|
105
|
+
return new Ref(BitObject.makeHash(this.id()));
|
106
|
+
}
|
107
|
+
|
108
|
+
compress(): Promise<Buffer> {
|
109
|
+
return deflate(this.serialize());
|
110
|
+
}
|
111
|
+
|
112
|
+
serialize(): Buffer {
|
113
|
+
const buffer = this.toBuffer();
|
114
|
+
return Buffer.concat([Buffer.from(this.getHeader(buffer)), buffer] as unknown as Uint8Array[]);
|
115
|
+
}
|
116
|
+
|
117
|
+
/**
|
118
|
+
* see `this.parseSync` for the sync version
|
119
|
+
*/
|
120
|
+
static async parseObject(fileContents: Buffer, filePath?: string): Promise<BitObject> {
|
121
|
+
const buffer = await inflate(fileContents, filePath);
|
122
|
+
return parse(buffer);
|
123
|
+
}
|
124
|
+
|
125
|
+
/**
|
126
|
+
* same as `parseObject`, however, if the type is not one of the given "typeNames", it returns null.
|
127
|
+
* the performance improvement is huge compare to "parseObject", as it doesn't parse the object if not needed.
|
128
|
+
*/
|
129
|
+
static async parseObjectOnlyIfType(
|
130
|
+
fileContents: Buffer,
|
131
|
+
typeNames: string[],
|
132
|
+
filePath?: string
|
133
|
+
): Promise<BitObject | null> {
|
134
|
+
const buffer = await inflate(fileContents, filePath);
|
135
|
+
const { type } = extractHeaderAndContent(buffer);
|
136
|
+
if (typeNames.includes(type)) return parse(buffer);
|
137
|
+
return null;
|
138
|
+
}
|
139
|
+
|
140
|
+
/**
|
141
|
+
* prefer using `this.parseObject()`, unless it must be sync.
|
142
|
+
*/
|
143
|
+
static parseSync(fileContents: Buffer): BitObject {
|
144
|
+
// @ts-ignore todo: fix after merging #9359
|
145
|
+
const buffer = inflateSync(fileContents);
|
146
|
+
return parse(buffer);
|
147
|
+
}
|
148
|
+
|
149
|
+
static makeHash(str: string | Buffer): string {
|
150
|
+
// @ts-ignore AUTO-ADDED-AFTER-MIGRATION-PLEASE-FIX!
|
151
|
+
return sha1(str);
|
152
|
+
}
|
153
|
+
}
|