@opencloning/utils 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +17 -0
- package/package.json +22 -0
- package/src/config/urlWhitelist.js +19 -0
- package/src/utils/ambiguous_dna_bases.json +14 -0
- package/src/utils/enzyme_utils.js +20 -0
- package/src/utils/error2String.js +18 -0
- package/src/utils/fileParsers.js +69 -0
- package/src/utils/getHttpClient.js +20 -0
- package/src/utils/getStructuredBases.js +109 -0
- package/src/utils/ncbiRequests.js +102 -0
- package/src/utils/network.js +184 -0
- package/src/utils/network.test.js +149 -0
- package/src/utils/other.js +24 -0
- package/src/utils/readNwrite.js +295 -0
- package/src/utils/selectedRegionUtils.js +25 -0
- package/src/utils/selectedRegionUtils.test.js +14 -0
- package/src/utils/sequenceDisplay.js +38 -0
- package/src/utils/sequenceManipulation.js +220 -0
- package/src/utils/sequenceManipulation.test.js +38 -0
- package/src/utils/sequencingFileExtensions.js +8 -0
- package/src/utils/sourceFunctions.js +48 -0
- package/src/utils/thunks.js +46 -0
- package/src/utils/transformCoords.js +71 -0
- package/src/utils/transformCoords.test.js +58 -0
- package/vitest.config.js +17 -0
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
import { mockSources, mockSequences, mockPrimers, mockFiles } from '../../../../tests/mockNetworkData';
|
|
2
|
+
import { collectParentSequencesAndSources, getSubState } from './network';
|
|
3
|
+
|
|
4
|
+
describe('collectParentSequencesAndSources', () => {
|
|
5
|
+
it('should collect all parent sequences and sources recursively', () => {
|
|
6
|
+
const sequencesToExport = [];
|
|
7
|
+
const sourcesToExport = [];
|
|
8
|
+
|
|
9
|
+
const { parentSequences, parentSources } = collectParentSequencesAndSources(
|
|
10
|
+
mockSources.find((s) => s.id === 1),
|
|
11
|
+
mockSources,
|
|
12
|
+
mockSequences,
|
|
13
|
+
);
|
|
14
|
+
|
|
15
|
+
expect(parentSequences).toEqual([
|
|
16
|
+
{ id: 2, name: 'Seq2' },
|
|
17
|
+
{ id: 3, name: 'Seq3' },
|
|
18
|
+
{ id: 4, name: 'Seq4' },
|
|
19
|
+
{ id: 5, name: 'Seq5' },
|
|
20
|
+
]);
|
|
21
|
+
|
|
22
|
+
expect(parentSources).toEqual([
|
|
23
|
+
mockSources.find((s) => s.id === 2),
|
|
24
|
+
mockSources.find((s) => s.id === 3),
|
|
25
|
+
mockSources.find((s) => s.id === 4),
|
|
26
|
+
mockSources.find((s) => s.id === 5),
|
|
27
|
+
]);
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
it('should stop collecting when stopAtDatabaseId is true and a source with database_id is found', () => {
|
|
31
|
+
|
|
32
|
+
const { parentSequences, parentSources } = collectParentSequencesAndSources(
|
|
33
|
+
mockSources.find((s) => s.id === 1),
|
|
34
|
+
mockSources,
|
|
35
|
+
mockSequences,
|
|
36
|
+
true,
|
|
37
|
+
);
|
|
38
|
+
|
|
39
|
+
expect(parentSequences).toEqual([
|
|
40
|
+
{ id: 2, name: 'Seq2' },
|
|
41
|
+
{ id: 3, name: 'Seq3' },
|
|
42
|
+
]);
|
|
43
|
+
|
|
44
|
+
expect(parentSources).toEqual([
|
|
45
|
+
mockSources.find((s) => s.id === 2),
|
|
46
|
+
mockSources.find((s) => s.id === 3),
|
|
47
|
+
]);
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
it('should handle sources with no input', () => {
|
|
51
|
+
const { parentSequences, parentSources } = collectParentSequencesAndSources(
|
|
52
|
+
mockSources.find((s) => s.id === 4),
|
|
53
|
+
mockSources,
|
|
54
|
+
mockSequences,
|
|
55
|
+
);
|
|
56
|
+
|
|
57
|
+
expect(parentSequences).toEqual([]);
|
|
58
|
+
expect(parentSources).toEqual([]);
|
|
59
|
+
});
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
describe('getSubState', () => {
|
|
63
|
+
it('should throw an error if the sequence is not found', () => {
|
|
64
|
+
const state = {
|
|
65
|
+
cloning: {
|
|
66
|
+
sequences: [],
|
|
67
|
+
sources: [],
|
|
68
|
+
},
|
|
69
|
+
};
|
|
70
|
+
|
|
71
|
+
expect(() => getSubState(state, 1)).toThrow('Sequence with id 1 not found');
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
it('should throw an error if the source is not found', () => {
|
|
75
|
+
const state = {
|
|
76
|
+
cloning: {
|
|
77
|
+
sequences: [{ id: 1 }],
|
|
78
|
+
sources: [],
|
|
79
|
+
},
|
|
80
|
+
};
|
|
81
|
+
|
|
82
|
+
expect(() => getSubState(state, 1)).toThrow('Source with id 1 not found');
|
|
83
|
+
});
|
|
84
|
+
it('should return the correct substate with used primers and files only', () => {
|
|
85
|
+
const state = {
|
|
86
|
+
cloning: {
|
|
87
|
+
sequences: mockSequences,
|
|
88
|
+
sources: mockSources,
|
|
89
|
+
primers: mockPrimers,
|
|
90
|
+
files: mockFiles,
|
|
91
|
+
},
|
|
92
|
+
};
|
|
93
|
+
|
|
94
|
+
const substate = getSubState(state, 1);
|
|
95
|
+
|
|
96
|
+
// Unlike the sequencesToExport, the substate includes the sequence with id and its source
|
|
97
|
+
expect(substate.sequences).toEqual([
|
|
98
|
+
{ id: 1, name: 'Seq1' },
|
|
99
|
+
{ id: 2, name: 'Seq2' },
|
|
100
|
+
{ id: 3, name: 'Seq3' },
|
|
101
|
+
{ id: 4, name: 'Seq4' },
|
|
102
|
+
{ id: 5, name: 'Seq5' },
|
|
103
|
+
]);
|
|
104
|
+
|
|
105
|
+
expect(substate.sources).toEqual([
|
|
106
|
+
mockSources.find((s) => s.id === 1),
|
|
107
|
+
mockSources.find((s) => s.id === 2),
|
|
108
|
+
mockSources.find((s) => s.id === 3),
|
|
109
|
+
mockSources.find((s) => s.id === 4),
|
|
110
|
+
mockSources.find((s) => s.id === 5),
|
|
111
|
+
]);
|
|
112
|
+
|
|
113
|
+
expect(substate.primers).toEqual([
|
|
114
|
+
mockPrimers.find((p) => p.id === 7),
|
|
115
|
+
mockPrimers.find((p) => p.id === 8),
|
|
116
|
+
mockPrimers.find((p) => p.id === 9),
|
|
117
|
+
mockPrimers.find((p) => p.id === 10),
|
|
118
|
+
]);
|
|
119
|
+
expect(substate.files).toEqual(mockFiles.slice(0, 3));
|
|
120
|
+
});
|
|
121
|
+
it('should work with database_id', () => {
|
|
122
|
+
const state = {
|
|
123
|
+
cloning: {
|
|
124
|
+
sequences: mockSequences,
|
|
125
|
+
sources: mockSources,
|
|
126
|
+
primers: mockPrimers,
|
|
127
|
+
},
|
|
128
|
+
};
|
|
129
|
+
|
|
130
|
+
const substate = getSubState(state, 1, true);
|
|
131
|
+
|
|
132
|
+
expect(substate.sequences).toEqual([
|
|
133
|
+
{ id: 1, name: 'Seq1' },
|
|
134
|
+
{ id: 2, name: 'Seq2' },
|
|
135
|
+
{ id: 3, name: 'Seq3' },
|
|
136
|
+
]);
|
|
137
|
+
|
|
138
|
+
expect(substate.sources).toEqual([
|
|
139
|
+
mockSources.find((s) => s.id === 1),
|
|
140
|
+
mockSources.find((s) => s.id === 2),
|
|
141
|
+
mockSources.find((s) => s.id === 3),
|
|
142
|
+
]);
|
|
143
|
+
|
|
144
|
+
expect(substate.primers).toEqual([
|
|
145
|
+
mockPrimers.find((p) => p.id === 7),
|
|
146
|
+
mockPrimers.find((p) => p.id === 8),
|
|
147
|
+
]);
|
|
148
|
+
});
|
|
149
|
+
});
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
// From https://github.com/sindresorhus/escape-string-regexp/blob/main/index.js
|
|
2
|
+
export function escapeStringRegexp(string) {
|
|
3
|
+
if (typeof string !== 'string') {
|
|
4
|
+
throw new TypeError('Expected a string');
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
// Escape characters with special meaning either inside or outside character sets.
|
|
8
|
+
// Use a simple backslash escape when it’s always valid, and a `\xnn` escape when the simpler form would be disallowed by Unicode patterns’ stricter grammar.
|
|
9
|
+
return string
|
|
10
|
+
.replace(/[|\\{}()[\]^$+*?.]/g, '\\$&')
|
|
11
|
+
.replace(/-/g, '\\x2d');
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export function getUrlParameters() {
|
|
15
|
+
const query = window.location.search;
|
|
16
|
+
const searchParams = new URLSearchParams(query);
|
|
17
|
+
return Object.fromEntries(searchParams.entries());
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export function formatSequenceLocationString(start, end, strand) {
|
|
21
|
+
if (strand !== -1)
|
|
22
|
+
return `${start}..${end}`;
|
|
23
|
+
return `complement(${start}..${end})`;
|
|
24
|
+
}
|
|
@@ -0,0 +1,295 @@
|
|
|
1
|
+
import { elementToSVG, inlineResources } from 'dom-to-svg';
|
|
2
|
+
import { ab1ToJson, anyToJson, genbankToJson, jsonToFasta, jsonToGenbank } from '@teselagen/bio-parsers';
|
|
3
|
+
import {
|
|
4
|
+
BlobWriter,
|
|
5
|
+
ZipWriter,
|
|
6
|
+
TextReader,
|
|
7
|
+
BlobReader,
|
|
8
|
+
ZipReader,
|
|
9
|
+
configure,
|
|
10
|
+
} from '@zip.js/zip.js';
|
|
11
|
+
import { tidyUpSequenceData } from '@teselagen/sequence-utils';
|
|
12
|
+
import { isEqual } from 'lodash-es';
|
|
13
|
+
|
|
14
|
+
configure({
|
|
15
|
+
useWebWorkers: false,
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
export function base64ToBlob(base64) {
|
|
19
|
+
const binaryString = atob(base64);
|
|
20
|
+
const bytes = new Uint8Array(binaryString.length);
|
|
21
|
+
for (let i = 0; i < binaryString.length; i++) {
|
|
22
|
+
bytes[i] = binaryString.charCodeAt(i);
|
|
23
|
+
}
|
|
24
|
+
return new Blob([bytes]);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
export const downloadBlob = (blob, fileName) => {
|
|
28
|
+
const href = URL.createObjectURL(blob);
|
|
29
|
+
const link = document.createElement('a');
|
|
30
|
+
link.href = href;
|
|
31
|
+
link.download = fileName;
|
|
32
|
+
document.body.appendChild(link);
|
|
33
|
+
link.click();
|
|
34
|
+
document.body.removeChild(link);
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
export const file2base64 = (file) => new Promise((resolve) => {
|
|
38
|
+
const reader = new FileReader();
|
|
39
|
+
reader.onload = () => {
|
|
40
|
+
const base64String = reader.result.split(',')[1];
|
|
41
|
+
resolve(base64String);
|
|
42
|
+
};
|
|
43
|
+
reader.readAsDataURL(file);
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
export const downloadTextFile = (text, fileName, type = 'text/plain') => {
|
|
47
|
+
const blob = new Blob([text], { type });
|
|
48
|
+
downloadBlob(blob, fileName);
|
|
49
|
+
};
|
|
50
|
+
|
|
51
|
+
export async function getZipFileBlob(files) {
|
|
52
|
+
const zipWriter = new ZipWriter(new BlobWriter('application/zip'));
|
|
53
|
+
await Promise.all(files.map((file) => zipWriter.add(file.name, file.reader)));
|
|
54
|
+
return zipWriter.close();
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
export async function readSubmittedTextFile(file) {
|
|
58
|
+
return new Promise((resolve, reject) => {
|
|
59
|
+
const reader = new FileReader();
|
|
60
|
+
reader.readAsText(file, 'UTF-8');
|
|
61
|
+
reader.onload = (event) => {
|
|
62
|
+
resolve(event.target.result);
|
|
63
|
+
};
|
|
64
|
+
reader.onerror = (error) => {
|
|
65
|
+
reject(new Error('Error reading text file'));
|
|
66
|
+
};
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
export function formatStateForJsonExport(cloningState) {
|
|
71
|
+
const { sequences, sources, description, primers, appInfo } = cloningState;
|
|
72
|
+
const { backendVersion, schemaVersion, frontendVersion } = appInfo;
|
|
73
|
+
return { sequences, sources, description, primers, backend_version: backendVersion, schema_version: schemaVersion, frontend_version: frontendVersion };
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
export const prettyPrintJson = (json) => `${JSON.stringify(json, null, 2)}\n`;
|
|
77
|
+
|
|
78
|
+
export const downloadStateAsJson = async (cloningState, fileName = 'cloning_strategy.json') => {
|
|
79
|
+
const output = formatStateForJsonExport(cloningState);
|
|
80
|
+
downloadTextFile(prettyPrintJson(output), fileName, 'application/json');
|
|
81
|
+
};
|
|
82
|
+
|
|
83
|
+
export const downloadStateAsZip = async (cloningState, zipFileName = 'cloning_strategy.zip') => {
|
|
84
|
+
const output = formatStateForJsonExport(cloningState);
|
|
85
|
+
output.files = cloningState.files;
|
|
86
|
+
const fileNames = cloningState.files.map((file) => getVerificationFileName(file));
|
|
87
|
+
const files2write = [
|
|
88
|
+
{ name: 'cloning_strategy.json', reader: new TextReader(prettyPrintJson(output)) },
|
|
89
|
+
...fileNames.map((fileName) => {
|
|
90
|
+
const base64Content = sessionStorage.getItem(fileName);
|
|
91
|
+
if (!base64Content) {
|
|
92
|
+
const nameOnly = fileName.replace(/verification-\d+-/, '');
|
|
93
|
+
throw new Error(`File ${nameOnly} not found in session storage`);
|
|
94
|
+
}
|
|
95
|
+
return { name: fileName, reader: new BlobReader(base64ToBlob(base64Content)) };
|
|
96
|
+
}),
|
|
97
|
+
];
|
|
98
|
+
const blob = await getZipFileBlob(files2write);
|
|
99
|
+
downloadBlob(blob, zipFileName);
|
|
100
|
+
};
|
|
101
|
+
|
|
102
|
+
export const downloadSequence = (fileName, sequenceData) => {
|
|
103
|
+
if (sequenceData === undefined) {
|
|
104
|
+
return;
|
|
105
|
+
}
|
|
106
|
+
if (fileName.endsWith('.gb')) {
|
|
107
|
+
downloadTextFile(jsonToGenbank(sequenceData), fileName);
|
|
108
|
+
} else if (fileName.endsWith('.fasta')) {
|
|
109
|
+
downloadTextFile(jsonToFasta(sequenceData), fileName);
|
|
110
|
+
}
|
|
111
|
+
};
|
|
112
|
+
|
|
113
|
+
export const downloadCloningStrategyAsSvg = async (fileName) => {
|
|
114
|
+
const container = document.querySelector('div.open-cloning');
|
|
115
|
+
// Clone the container to avoid modifying the original
|
|
116
|
+
const containerCopy = container.cloneNode(true);
|
|
117
|
+
containerCopy.id = 'temp-div-svg-print';
|
|
118
|
+
|
|
119
|
+
// Make sure the entire element is displayed
|
|
120
|
+
containerCopy.style.overflow = 'visible';
|
|
121
|
+
containerCopy.style.width = 'fit-content';
|
|
122
|
+
containerCopy.style.height = 'fit-content';
|
|
123
|
+
containerCopy.style.position = 'absolute';
|
|
124
|
+
containerCopy.style.left = '-9999px';
|
|
125
|
+
|
|
126
|
+
// Remove all MUI icons from the copy before converting to SVG
|
|
127
|
+
const muiIcons = containerCopy.querySelectorAll('.MuiSvgIcon-root');
|
|
128
|
+
muiIcons.forEach((icon) => icon.remove());
|
|
129
|
+
|
|
130
|
+
// Remove all "Add" buttons
|
|
131
|
+
const addButtons = containerCopy.querySelectorAll('.hang-from-node');
|
|
132
|
+
addButtons.forEach((button) => button.remove());
|
|
133
|
+
|
|
134
|
+
// Remove all "New source box"
|
|
135
|
+
const newSourceBoxes = containerCopy.querySelectorAll('.new_source_box');
|
|
136
|
+
newSourceBoxes.forEach((box) => box.remove());
|
|
137
|
+
|
|
138
|
+
container.appendChild(containerCopy);
|
|
139
|
+
const node2print = document.getElementById('temp-div-svg-print');
|
|
140
|
+
|
|
141
|
+
const svgDocument = elementToSVG(node2print);
|
|
142
|
+
container.removeChild(node2print);
|
|
143
|
+
|
|
144
|
+
await inlineResources(svgDocument.documentElement);
|
|
145
|
+
const svgString = new XMLSerializer().serializeToString(svgDocument);
|
|
146
|
+
downloadTextFile(svgString, fileName);
|
|
147
|
+
};
|
|
148
|
+
|
|
149
|
+
export async function loadHistoryFile(file) {
|
|
150
|
+
const isZipFile = file.name.endsWith('.zip');
|
|
151
|
+
const isJsonFile = file.name.endsWith('.json');
|
|
152
|
+
|
|
153
|
+
let cloningStrategyFile;
|
|
154
|
+
let verificationFiles = [];
|
|
155
|
+
|
|
156
|
+
if (isZipFile) {
|
|
157
|
+
const zipReader = new ZipReader(new BlobReader(file));
|
|
158
|
+
// Only in the root directory (e.g. Mac sometimes add __MACOSX/.. to the zip)
|
|
159
|
+
const entries = (await zipReader.getEntries()).filter((entry) => !entry.filename.includes('/'));
|
|
160
|
+
const jsonFilesInZip = entries.filter((entry) => entry.filename.endsWith('.json'));
|
|
161
|
+
|
|
162
|
+
if (jsonFilesInZip.length !== 1) {
|
|
163
|
+
throw new Error('Zip file must contain exactly one JSON file.');
|
|
164
|
+
}
|
|
165
|
+
cloningStrategyFile = await jsonFilesInZip[0].getData(new BlobWriter());
|
|
166
|
+
verificationFiles = await Promise.all(entries
|
|
167
|
+
.filter((entry) => /verification-\d+-.*/.test(entry.filename))
|
|
168
|
+
.map(async (entry) => {
|
|
169
|
+
const blob = await entry.getData(new BlobWriter());
|
|
170
|
+
return new File([blob], entry.filename, { type: blob.type });
|
|
171
|
+
}));
|
|
172
|
+
} else if (isJsonFile) {
|
|
173
|
+
cloningStrategyFile = file;
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
let cloningStrategy;
|
|
177
|
+
try {
|
|
178
|
+
cloningStrategy = JSON.parse(await readSubmittedTextFile(cloningStrategyFile));
|
|
179
|
+
} catch (error) {
|
|
180
|
+
throw new Error('Invalid JSON file.');
|
|
181
|
+
}
|
|
182
|
+
const newCloningStrategy = { ...cloningStrategy };
|
|
183
|
+
|
|
184
|
+
// Drop the files if loading only json
|
|
185
|
+
if (isJsonFile) {
|
|
186
|
+
newCloningStrategy.files = [];
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
// Check files
|
|
190
|
+
if (isZipFile) {
|
|
191
|
+
if (!newCloningStrategy.files) {
|
|
192
|
+
newCloningStrategy.files = [];
|
|
193
|
+
}
|
|
194
|
+
// Missing files in zip
|
|
195
|
+
const stateFileNames = newCloningStrategy.files.map((f) => getVerificationFileName(f));
|
|
196
|
+
const verificationFileNames = verificationFiles.map((f) => f.name);
|
|
197
|
+
|
|
198
|
+
const missingFile = stateFileNames.find((name) => !verificationFileNames.includes(name));
|
|
199
|
+
if (missingFile) {
|
|
200
|
+
throw new Error(`File ${missingFile} not found in zip.`);
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
// Excess file in zip
|
|
204
|
+
const excessFile = verificationFileNames.find((name) => !stateFileNames.includes(name));
|
|
205
|
+
if (excessFile) {
|
|
206
|
+
throw new Error(`File ${excessFile} found in zip but not in cloning strategy.`);
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
// Validate the cloning strategy
|
|
211
|
+
if (newCloningStrategy.primers === undefined || newCloningStrategy.sequences === undefined || newCloningStrategy.sources === undefined) {
|
|
212
|
+
throw new Error('JSON file should contain at least keys: primers, sequences and sources');
|
|
213
|
+
}
|
|
214
|
+
// They should be arrays
|
|
215
|
+
if (!Array.isArray(newCloningStrategy.primers)) {
|
|
216
|
+
throw new Error('primers should be an array');
|
|
217
|
+
}
|
|
218
|
+
if (!Array.isArray(newCloningStrategy.sequences)) {
|
|
219
|
+
throw new Error('sequences should be an array');
|
|
220
|
+
}
|
|
221
|
+
if (!Array.isArray(newCloningStrategy.sources)) {
|
|
222
|
+
throw new Error('sources should be an array');
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
return { cloningStrategy: newCloningStrategy, verificationFiles };
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
export const loadFilesToSessionStorage = async (files, idShift = 0) => {
|
|
229
|
+
await Promise.all(files.map(async (file) => {
|
|
230
|
+
const fileContent = await file2base64(file);
|
|
231
|
+
const filename = file.name.replace(/verification-(\d+)-/, (match, num) => `verification-${parseInt(num, 10) + idShift}-`);
|
|
232
|
+
sessionStorage.setItem(filename, fileContent);
|
|
233
|
+
}));
|
|
234
|
+
};
|
|
235
|
+
|
|
236
|
+
export function getVerificationFileName({ sequence_id, file_name }) {
|
|
237
|
+
return `verification-${sequence_id}-${file_name}`;
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
export function convertToTeselaJson(sequence) {
|
|
241
|
+
// TODO: This might have been fixed in more recent versions of the library
|
|
242
|
+
// For some reason, as it is it does not read circular or linear properly from certain files
|
|
243
|
+
const { parsedSequence } = genbankToJson(sequence.file_content)[0];
|
|
244
|
+
|
|
245
|
+
// TODO: Remove this eventually?
|
|
246
|
+
// if (sequence.file_content.split('\n')[0].includes('linear')) {
|
|
247
|
+
// parsedSequence.circular = false;
|
|
248
|
+
// }
|
|
249
|
+
|
|
250
|
+
parsedSequence.id = sequence.id;
|
|
251
|
+
return tidyUpSequenceData(parsedSequence);
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
export async function getTeselaJsonFromBase64(ab1Base64, fileName = null) {
|
|
255
|
+
try {
|
|
256
|
+
const blob = base64ToBlob(ab1Base64);
|
|
257
|
+
const results = await (fileName.endsWith('.ab1') ? ab1ToJson(blob) : anyToJson(blob, { fileName }));
|
|
258
|
+
return results[0].parsedSequence;
|
|
259
|
+
} catch (error) {
|
|
260
|
+
const fileNameError = fileName || 'file';
|
|
261
|
+
throw new Error(`Error parsing ${fileNameError}: ${error.message}`);
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
export function formatTemplate(data, url) {
|
|
266
|
+
const newData = { ...data };
|
|
267
|
+
const segments = url.split('/');
|
|
268
|
+
const kitUrl = segments[segments.length - 3];
|
|
269
|
+
const rootGithubUrl = 'https://assets.opencloning.org/OpenCloning-submission/submissions';
|
|
270
|
+
newData.sources = newData.sources.map((s) => ((!Boolean(s.image) || s.image[0] === null) ? s : {
|
|
271
|
+
...s, image: [`${rootGithubUrl}/${kitUrl}/${s.image[0]}`, s.image[1]],
|
|
272
|
+
}));
|
|
273
|
+
return newData;
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
export function updateVerificationFileNames(verificationFiles, originalFiles, validatedFiles) {
|
|
277
|
+
if (isEqual(originalFiles.map((f) => getVerificationFileName(f)), validatedFiles.map((f) => getVerificationFileName(f)))) {
|
|
278
|
+
return [...verificationFiles];
|
|
279
|
+
}
|
|
280
|
+
return verificationFiles.map((file) => {
|
|
281
|
+
const preValidationName = file.name;
|
|
282
|
+
const newFileIndex = originalFiles.findIndex((f) => getVerificationFileName(f) === preValidationName);
|
|
283
|
+
|
|
284
|
+
if (newFileIndex !== -1) {
|
|
285
|
+
const postValidationName = getVerificationFileName(validatedFiles[newFileIndex]);
|
|
286
|
+
if (preValidationName !== postValidationName) {
|
|
287
|
+
// Create a new File object with the updated name
|
|
288
|
+
return new File([file], postValidationName, { type: file.type });
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
// Return the original file if no update is needed
|
|
293
|
+
return file;
|
|
294
|
+
});
|
|
295
|
+
}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
export function selectedRegion2String(selectedRegion) {
|
|
2
|
+
if (!selectedRegion) {
|
|
3
|
+
// We return a space so that the label of the TextField
|
|
4
|
+
// shows up on top of the TextField
|
|
5
|
+
return ' ';
|
|
6
|
+
}
|
|
7
|
+
const { selectionLayer, caretPosition } = selectedRegion;
|
|
8
|
+
if (caretPosition === -1) {
|
|
9
|
+
return `${selectionLayer.start + 1} - ${selectionLayer.end + 1}`;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
return `insertion at ${caretPosition}`;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export function selectedRegion2SequenceLocation({ selectionLayer, caretPosition }, size) {
|
|
16
|
+
if (caretPosition === -1) {
|
|
17
|
+
const { start } = selectionLayer;
|
|
18
|
+
const end = selectionLayer.end + 1;
|
|
19
|
+
if (end > start) {
|
|
20
|
+
return `${start + 1}..${end}`;
|
|
21
|
+
}
|
|
22
|
+
return `join(${start + 1}..${size},1..${end})`;
|
|
23
|
+
}
|
|
24
|
+
return `${caretPosition}^${caretPosition + 1}`;
|
|
25
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { selectedRegion2SequenceLocation } from './selectedRegionUtils';
|
|
2
|
+
|
|
3
|
+
describe('selectedRegion2SequenceLocation', () => {
|
|
4
|
+
it('should return the correct sequence location', () => {
|
|
5
|
+
// Normal features
|
|
6
|
+
expect(selectedRegion2SequenceLocation({ selectionLayer: { start: 0, end: 0 }, caretPosition: -1 }, 10)).toBe('1..1');
|
|
7
|
+
expect(selectedRegion2SequenceLocation({ selectionLayer: { start: 0, end: 1 }, caretPosition: -1 }, 10)).toBe('1..2');
|
|
8
|
+
// Position between bases
|
|
9
|
+
expect(selectedRegion2SequenceLocation({ selectionLayer: { start: -1, end: -1 }, caretPosition: 0 }, 10)).toBe('0^1');
|
|
10
|
+
// Origin-spanning feature
|
|
11
|
+
expect(selectedRegion2SequenceLocation({ selectionLayer: { start: 7, end: 0 }, caretPosition: -1 }, 8)).toBe('join(8..8,1..1)');
|
|
12
|
+
expect(selectedRegion2SequenceLocation({ selectionLayer: { start: 6, end: 1 }, caretPosition: -1 }, 8)).toBe('join(7..8,1..2)');
|
|
13
|
+
});
|
|
14
|
+
});
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { getReverseComplementSequenceString } from '@teselagen/sequence-utils';
|
|
2
|
+
|
|
3
|
+
export function formatSequenceForOverhangDisplay(sequenceString, overhangCrick3prime, overhangWatson3prime) {
|
|
4
|
+
let watson = sequenceString;
|
|
5
|
+
let crick = getReverseComplementSequenceString(sequenceString);
|
|
6
|
+
// If necessary, we trim the left side
|
|
7
|
+
if (overhangCrick3prime < 0) {
|
|
8
|
+
crick = crick.substring(0, crick.length + overhangCrick3prime) + ' '.repeat(-overhangCrick3prime);
|
|
9
|
+
} else if (overhangCrick3prime > 0) {
|
|
10
|
+
watson = ' '.repeat(overhangCrick3prime) + watson.substring(overhangCrick3prime, watson.length);
|
|
11
|
+
}
|
|
12
|
+
if (overhangWatson3prime < 0) {
|
|
13
|
+
watson = watson.substring(0, watson.length + overhangWatson3prime) + ' '.repeat(-overhangWatson3prime);
|
|
14
|
+
} else if (overhangWatson3prime > 0) {
|
|
15
|
+
crick = ' '.repeat(overhangWatson3prime) + crick.substring(overhangWatson3prime, crick.length);
|
|
16
|
+
}
|
|
17
|
+
// Invert the crick strand
|
|
18
|
+
crick = crick.split('').reverse().join('');
|
|
19
|
+
let middle = '';
|
|
20
|
+
for (let i = 0; i < watson.length; i += 1) {
|
|
21
|
+
middle += watson[i] !== ' ' && crick[i] !== ' ' ? '|' : ' ';
|
|
22
|
+
}
|
|
23
|
+
// We want to show up to 10 bp inside the body of the molecule
|
|
24
|
+
const lengthLimit = 10 + Math.abs(overhangCrick3prime) + Math.abs(overhangWatson3prime);
|
|
25
|
+
const trimRepresentation = (rep, edge) => {
|
|
26
|
+
const edgeLeft = rep.substring(0, Math.abs(overhangCrick3prime) + edge);
|
|
27
|
+
const edgeRight = rep.substring(rep.length - Math.abs(overhangWatson3prime) - edge, rep.legth);
|
|
28
|
+
const middleSection = '...';
|
|
29
|
+
return edgeLeft + middleSection + edgeRight;
|
|
30
|
+
};
|
|
31
|
+
|
|
32
|
+
if (watson.length >= lengthLimit) {
|
|
33
|
+
watson = trimRepresentation(watson, 5).toUpperCase();
|
|
34
|
+
crick = trimRepresentation(crick, 5).toUpperCase();
|
|
35
|
+
middle = trimRepresentation(middle, 5).toUpperCase();
|
|
36
|
+
}
|
|
37
|
+
return { watson, crick, middle };
|
|
38
|
+
}
|