@basemaps/cli 6.28.1 → 6.29.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +21 -0
- package/README.md +46 -13
- package/build/cli/bin.d.ts +2 -0
- package/build/cli/bin.d.ts.map +1 -0
- package/build/cli/bin.js +3 -0
- package/build/cli/cogify/__tests__/batch.job.test.js +48 -22
- package/build/cli/cogify/action.cog.d.ts +2 -1
- package/build/cli/cogify/action.cog.d.ts.map +1 -1
- package/build/cli/cogify/action.cog.js +49 -42
- package/build/cli/cogify/action.job.d.ts +1 -1
- package/build/cli/cogify/action.job.d.ts.map +1 -1
- package/build/cli/cogify/action.job.js +1 -1
- package/build/cli/cogify/batch.job.d.ts +6 -4
- package/build/cli/cogify/batch.job.d.ts.map +1 -1
- package/build/cli/cogify/batch.job.js +63 -49
- package/build/cli/cogify/imagery.config.d.ts.map +1 -1
- package/build/cli/cogify/imagery.config.js +0 -2
- package/build/cli/config/action.bundle.d.ts +11 -0
- package/build/cli/config/action.bundle.d.ts.map +1 -0
- package/build/cli/config/action.bundle.js +36 -0
- package/build/cli/config/action.import.d.ts +14 -0
- package/build/cli/config/action.import.d.ts.map +1 -0
- package/build/cli/config/action.import.js +80 -0
- package/build/cli/config/config.diff.d.ts +10 -0
- package/build/cli/config/config.diff.d.ts.map +1 -0
- package/build/cli/config/config.diff.js +47 -0
- package/build/cli/config/config.update.d.ts +23 -0
- package/build/cli/config/config.update.d.ts.map +1 -0
- package/build/cli/config/config.update.js +71 -0
- package/build/cli/{cogify/index.d.ts → index.d.ts} +1 -1
- package/build/cli/index.d.ts.map +1 -0
- package/build/cli/index.js +21 -0
- package/build/cli/screenshot/action.screenshot.d.ts +61 -0
- package/build/cli/screenshot/action.screenshot.d.ts.map +1 -0
- package/build/cli/screenshot/action.screenshot.js +121 -0
- package/build/cog/job.factory.d.ts.map +1 -1
- package/build/cog/job.factory.js +12 -4
- package/package.json +15 -9
- package/build/cli/cogify/index.d.ts.map +0 -1
- package/build/cli/cogify/index.js +0 -16
- package/cogify.js +0 -3
package/CHANGELOG.md
CHANGED
|
@@ -3,6 +3,27 @@
|
|
|
3
3
|
All notable changes to this project will be documented in this file.
|
|
4
4
|
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
|
5
5
|
|
|
6
|
+
# [6.29.0](https://github.com/linz/basemaps/compare/v6.28.1...v6.29.0) (2022-06-27)
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
### Bug Fixes
|
|
10
|
+
|
|
11
|
+
* **cli:** ensure cli can run inside of docker ([#2273](https://github.com/linz/basemaps/issues/2273)) ([8184167](https://github.com/linz/basemaps/commit/81841674efba2f86d9a39d01af62fccb1fe6f70f))
|
|
12
|
+
* **cli:** ensure errors are thrown ([#2248](https://github.com/linz/basemaps/issues/2248)) ([c0923fe](https://github.com/linz/basemaps/commit/c0923fe137ce36c610c6e13332292d5c7f573c16))
|
|
13
|
+
* **cli:** give each internal job a fresh copy of all data ([#2250](https://github.com/linz/basemaps/issues/2250)) ([52d59de](https://github.com/linz/basemaps/commit/52d59def25358e5a0f1fae5506edee6d8b17dbad))
|
|
14
|
+
* **cli:** look into batch to get exact list of tiffs being processed ([#2249](https://github.com/linz/basemaps/issues/2249)) ([69b722e](https://github.com/linz/basemaps/commit/69b722ea3190488231baf3b7023ce83e60e432c1))
|
|
15
|
+
* **config:** include missing zod dependency ([#2245](https://github.com/linz/basemaps/issues/2245)) ([94914dc](https://github.com/linz/basemaps/commit/94914dc728b60c51d6382c4460a1bd4e233f00c5))
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
### Features
|
|
19
|
+
|
|
20
|
+
* **cli:** merge all cli from config-cli package and server package into cli package. ([#2260](https://github.com/linz/basemaps/issues/2260)) ([73afd61](https://github.com/linz/basemaps/commit/73afd610740f69e67504a0610c94a407a6ba4091))
|
|
21
|
+
* **config:** create a hash of config bundles and use bundle created timestamp for records ([#2274](https://github.com/linz/basemaps/issues/2274)) ([bd9c7bb](https://github.com/linz/basemaps/commit/bd9c7bbf3f651417b60ba6ad2ca655f89f1f5cd9))
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
|
|
6
27
|
## [6.28.1](https://github.com/linz/basemaps/compare/v6.28.0...v6.28.1) (2022-06-07)
|
|
7
28
|
|
|
8
29
|
**Note:** Version bump only for package @basemaps/cli
|
package/README.md
CHANGED
|
@@ -1,35 +1,69 @@
|
|
|
1
|
-
# @basemaps/
|
|
1
|
+
# @basemaps/cli
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
CreateThis package is to control the configuration in the LINZ basemaps product.
|
|
4
4
|
|
|
5
5
|
## Install
|
|
6
6
|
|
|
7
7
|
This script requires docker to be installed
|
|
8
8
|
|
|
9
|
-
To install
|
|
9
|
+
To install
|
|
10
10
|
|
|
11
11
|
```bash
|
|
12
|
-
npm i @basemaps/
|
|
12
|
+
npm i @basemaps/cli
|
|
13
13
|
```
|
|
14
14
|
|
|
15
|
-
## Usage
|
|
15
|
+
## Usage -- Bundle
|
|
16
16
|
|
|
17
|
-
|
|
17
|
+
Bundle config files into config bundle json from a given config path.
|
|
18
18
|
|
|
19
19
|
```bash
|
|
20
|
-
|
|
20
|
+
./bin/bmc.js bundle --config config/ --output config.json
|
|
21
21
|
```
|
|
22
22
|
|
|
23
|
-
|
|
23
|
+
## Usage -- Import
|
|
24
|
+
|
|
25
|
+
Import all configs from a bundled config.json into dynamo db from a given config path
|
|
24
26
|
|
|
25
27
|
```bash
|
|
26
|
-
|
|
28
|
+
./bin/bmc.js import --config config.json --commit
|
|
27
29
|
```
|
|
28
30
|
|
|
29
|
-
|
|
31
|
+
## Usage -- Screenshots
|
|
32
|
+
|
|
33
|
+
Dump the screenshots from basemaps production
|
|
30
34
|
|
|
31
35
|
```bash
|
|
32
|
-
|
|
36
|
+
./bin/bmc.js screenshot
|
|
37
|
+
```
|
|
38
|
+
|
|
39
|
+
Dump the screenshots from different host
|
|
40
|
+
|
|
41
|
+
```bash
|
|
42
|
+
./bin/bmc.js screenshot --host HOST
|
|
43
|
+
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
Dump the screenshots with config file
|
|
47
|
+
|
|
48
|
+
```bash
|
|
49
|
+
./bin/bmc.js screenshot --config s3://..../config.json.gz
|
|
50
|
+
|
|
51
|
+
```
|
|
52
|
+
|
|
53
|
+
## Usage -- cogify
|
|
54
|
+
|
|
55
|
+
Create a collection of cloud optimized geotiff's from a collection of geotiff, that is optimized to be used in `@basemaps/tiler`
|
|
56
|
+
|
|
57
|
+
Create a list of COG's to create
|
|
58
|
+
|
|
59
|
+
```bash
|
|
60
|
+
./bin/bmc.js -V job --source ./source_folder/ --output ./source_folder/cogify/
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
Build a specific COG
|
|
64
|
+
|
|
65
|
+
```bash
|
|
66
|
+
./bin/bmc.js -V cog --job ./cogs/01DYREBEEFFXEPBAYBED2TMAFJ/job.json --name 1-2-3 --commit
|
|
33
67
|
```
|
|
34
68
|
|
|
35
69
|
## Advanced Job creation
|
|
@@ -39,7 +73,7 @@ Useful configuration options for `cogify job`
|
|
|
39
73
|
### Min Tile zoom `--min-zoom :number`
|
|
40
74
|
|
|
41
75
|
using the argument `--min-zoom` will configure the highest COG tile that can be created
|
|
42
|
-
for example, `--min-zoom 10` means tiles of zoom `0 - 10`
|
|
76
|
+
for example, `--min-zoom 10` means tiles of zoom `0 - 10` can be created but `11` cannot.
|
|
43
77
|
|
|
44
78
|
This is useful to control the size of COGS across varying imagery sets World level z0-5 vs region z12-15 vs specific area 20+
|
|
45
79
|
|
|
@@ -59,7 +93,6 @@ Outputs two GeoJSON files to provide a representation of the source and target i
|
|
|
59
93
|
|
|
60
94
|
The best way to build a cog collection is to create multiple JOBS with `--geojson` enabled while adjusting `--max-cogs` and `--min-zoom` when a good covering is found the job can then be submitted to AWS for processing.
|
|
61
95
|
|
|
62
|
-
|
|
63
96
|
Using a large area of interest as shown in the imagery set below and is showing three different sets of parameters, The shaded green area is the original source imagery
|
|
64
97
|
|
|
65
98
|

|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"bin.d.ts","sourceRoot":"","sources":["../../src/cli/bin.ts"],"names":[],"mappings":""}
|
package/build/cli/bin.js
ADDED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { LogConfig } from '@basemaps/shared';
|
|
1
2
|
import o from 'ospec';
|
|
2
3
|
import { BatchJob, extractResolutionFromName } from '../batch.job.js';
|
|
3
4
|
o.spec('action.batch', () => {
|
|
@@ -12,9 +13,10 @@ o.spec('action.batch', () => {
|
|
|
12
13
|
});
|
|
13
14
|
o('should create valid jobNames', () => {
|
|
14
15
|
const fakeJob = { id: '01FHRPYJ5FV1XAARZAC4T4K6MC', name: 'geographx_nz_texture_shade_2012_8-0m' };
|
|
15
|
-
o(BatchJob.id(fakeJob, ['0'])).equals('01FHRPYJ5FV1XAARZAC4T4K6MC-9af5e139bbb3e502-0');
|
|
16
|
+
o(BatchJob.id(fakeJob, ['0'])).equals('01FHRPYJ5FV1XAARZAC4T4K6MC-9af5e139bbb3e502-1x-0');
|
|
16
17
|
fakeJob.name = 'ōtorohanga_urban_2021_0.1m_RGB';
|
|
17
|
-
o(BatchJob.id(fakeJob, ['0'])).equals('01FHRPYJ5FV1XAARZAC4T4K6MC-5294acface81c107-0');
|
|
18
|
+
o(BatchJob.id(fakeJob, ['0'])).equals('01FHRPYJ5FV1XAARZAC4T4K6MC-5294acface81c107-1x-0');
|
|
19
|
+
o(BatchJob.id(fakeJob)).equals('01FHRPYJ5FV1XAARZAC4T4K6MC-5294acface81c107-');
|
|
18
20
|
});
|
|
19
21
|
o('should truncate job names to 128 characters', () => {
|
|
20
22
|
const fakeJob = { id: '01FHRPYJ5FV1XAARZAC4T4K6MC', name: 'geographx_nz_texture_shade_2012_8-0m' };
|
|
@@ -22,9 +24,9 @@ o.spec('action.batch', () => {
|
|
|
22
24
|
'this is a really long file name',
|
|
23
25
|
'it should over flow 128 characters',
|
|
24
26
|
'so it should be truncated at some point.tiff',
|
|
25
|
-
])).equals('01FHRPYJ5FV1XAARZAC4T4K6MC-9af5e139bbb3e502-it should over flow 128 characters_so it should be truncated at some point.
|
|
27
|
+
])).equals('01FHRPYJ5FV1XAARZAC4T4K6MC-9af5e139bbb3e502-3x-it should over flow 128 characters_so it should be truncated at some point.tiff_t');
|
|
26
28
|
});
|
|
27
|
-
o('
|
|
29
|
+
o.spec('ChunkJobs', () => {
|
|
28
30
|
const fakeGsd = 0.9;
|
|
29
31
|
const ChunkJobSmall = 4097;
|
|
30
32
|
const ChunkJobMiddle = 8193;
|
|
@@ -47,23 +49,47 @@ o.spec('action.batch', () => {
|
|
|
47
49
|
{ name: '1-2-14', width: ChunkJobMiddle * fakeGsd - 1 }, // Middle Job 70
|
|
48
50
|
];
|
|
49
51
|
const fakeJob = { id: '01FHRPYJ5FV1XAARZAC4T4K6MC', output: { files: fakeFiles, gsd: fakeGsd } };
|
|
50
|
-
o(
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
52
|
+
o('should prepare valid chunk jobs', async () => {
|
|
53
|
+
o(BatchJob.getJobs(fakeJob, new Set(), LogConfig.get())).deepEquals([
|
|
54
|
+
[fakeFiles[2].name],
|
|
55
|
+
[fakeFiles[5].name],
|
|
56
|
+
[
|
|
57
|
+
fakeFiles[0].name,
|
|
58
|
+
fakeFiles[1].name,
|
|
59
|
+
fakeFiles[3].name,
|
|
60
|
+
fakeFiles[4].name,
|
|
61
|
+
fakeFiles[6].name,
|
|
62
|
+
fakeFiles[7].name,
|
|
63
|
+
fakeFiles[8].name,
|
|
64
|
+
fakeFiles[9].name,
|
|
65
|
+
fakeFiles[10].name,
|
|
66
|
+
fakeFiles[11].name,
|
|
67
|
+
],
|
|
68
|
+
[fakeFiles[13].name],
|
|
69
|
+
[fakeFiles[12].name, fakeFiles[14].name], // Second Chunk
|
|
70
|
+
]);
|
|
71
|
+
});
|
|
72
|
+
o('should skip tiffs that exist', () => {
|
|
73
|
+
const existing = new Set([
|
|
74
|
+
`${fakeFiles[0].name}.tiff`,
|
|
75
|
+
`${fakeFiles[1].name}.tiff`,
|
|
76
|
+
`${fakeFiles[13].name}.tiff`,
|
|
77
|
+
]);
|
|
78
|
+
o(BatchJob.getJobs(fakeJob, existing, LogConfig.get())).deepEquals([
|
|
79
|
+
[fakeFiles[2].name],
|
|
80
|
+
[fakeFiles[5].name],
|
|
81
|
+
[
|
|
82
|
+
fakeFiles[3].name,
|
|
83
|
+
fakeFiles[4].name,
|
|
84
|
+
fakeFiles[6].name,
|
|
85
|
+
fakeFiles[7].name,
|
|
86
|
+
fakeFiles[8].name,
|
|
87
|
+
fakeFiles[9].name,
|
|
88
|
+
fakeFiles[10].name,
|
|
89
|
+
fakeFiles[11].name,
|
|
90
|
+
],
|
|
91
|
+
[fakeFiles[12].name, fakeFiles[14].name], // Second Chunk
|
|
92
|
+
]);
|
|
93
|
+
});
|
|
68
94
|
});
|
|
69
95
|
});
|
|
@@ -2,7 +2,7 @@ import { LogType } from '@basemaps/shared';
|
|
|
2
2
|
import { CommandLineAction } from '@rushstack/ts-command-line';
|
|
3
3
|
import { CogStacJob } from '../../cog/cog.stac.job.js';
|
|
4
4
|
import { CogJob } from '../../cog/types.js';
|
|
5
|
-
export declare class
|
|
5
|
+
export declare class CommandCogCreate extends CommandLineAction {
|
|
6
6
|
private job?;
|
|
7
7
|
private name?;
|
|
8
8
|
private commit?;
|
|
@@ -10,6 +10,7 @@ export declare class ActionCogCreate extends CommandLineAction {
|
|
|
10
10
|
constructor();
|
|
11
11
|
getNames(job: CogJob): Set<string> | null;
|
|
12
12
|
onExecute(): Promise<void>;
|
|
13
|
+
processTiff(job: CogStacJob, tiffName: string, tmpFolder: string, isCommit: boolean, logger: LogType): Promise<void>;
|
|
13
14
|
/** Check to see how many tiffs are remaining in the job */
|
|
14
15
|
checkJobStatus(job: CogStacJob, logger: LogType): Promise<void>;
|
|
15
16
|
protected onDefineParameters(): void;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"action.cog.d.ts","sourceRoot":"","sources":["../../../src/cli/cogify/action.cog.ts"],"names":[],"mappings":"AAAA,OAAO,EAAiD,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAE1F,OAAO,EACL,iBAAiB,EAKlB,MAAM,4BAA4B,CAAC;AAIpC,OAAO,EAAE,UAAU,EAAE,MAAM,2BAA2B,CAAC;AAGvD,OAAO,EAAE,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAQ5C,qBAAa,
|
|
1
|
+
{"version":3,"file":"action.cog.d.ts","sourceRoot":"","sources":["../../../src/cli/cogify/action.cog.ts"],"names":[],"mappings":"AAAA,OAAO,EAAiD,OAAO,EAAE,MAAM,kBAAkB,CAAC;AAE1F,OAAO,EACL,iBAAiB,EAKlB,MAAM,4BAA4B,CAAC;AAIpC,OAAO,EAAE,UAAU,EAAE,MAAM,2BAA2B,CAAC;AAGvD,OAAO,EAAE,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAQ5C,qBAAa,gBAAiB,SAAQ,iBAAiB;IACrD,OAAO,CAAC,GAAG,CAAC,CAA6B;IACzC,OAAO,CAAC,IAAI,CAAC,CAAiC;IAC9C,OAAO,CAAC,MAAM,CAAC,CAA2B;IAC1C,OAAO,CAAC,QAAQ,CAAC,CAAkC;;IAUnD,QAAQ,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC,MAAM,CAAC,GAAG,IAAI;IA0CnC,SAAS,IAAI,OAAO,CAAC,IAAI,CAAC;IAkD1B,WAAW,CACf,GAAG,EAAE,UAAU,EACf,QAAQ,EAAE,MAAM,EAChB,SAAS,EAAE,MAAM,EACjB,QAAQ,EAAE,OAAO,EACjB,MAAM,EAAE,OAAO,GACd,OAAO,CAAC,IAAI,CAAC;IA2ChB,2DAA2D;IACrD,cAAc,CAAC,GAAG,EAAE,UAAU,EAAE,MAAM,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAqCrE,SAAS,CAAC,kBAAkB,IAAI,IAAI;CA4BrC"}
|
|
@@ -12,7 +12,7 @@ import path from 'path';
|
|
|
12
12
|
import { insertConfigImagery, insertConfigTileSet } from './imagery.config.js';
|
|
13
13
|
import { JobStatus } from '@basemaps/config';
|
|
14
14
|
import { prepareUrl } from '../util.js';
|
|
15
|
-
export class
|
|
15
|
+
export class CommandCogCreate extends CommandLineAction {
|
|
16
16
|
constructor() {
|
|
17
17
|
super({
|
|
18
18
|
actionName: 'cog',
|
|
@@ -55,12 +55,12 @@ export class ActionCogCreate extends CommandLineAction {
|
|
|
55
55
|
return output;
|
|
56
56
|
}
|
|
57
57
|
async onExecute() {
|
|
58
|
-
var _a, _b, _c
|
|
59
|
-
const
|
|
60
|
-
if (
|
|
58
|
+
var _a, _b, _c;
|
|
59
|
+
const jobLocation = (_a = this.job) === null || _a === void 0 ? void 0 : _a.value;
|
|
60
|
+
if (jobLocation == null)
|
|
61
61
|
throw new Error('Missing job name');
|
|
62
|
-
const job = await CogStacJob.load(jobFn);
|
|
63
62
|
const isCommit = (_c = (_b = this.commit) === null || _b === void 0 ? void 0 : _b.value) !== null && _c !== void 0 ? _c : false;
|
|
63
|
+
const job = await CogStacJob.load(jobLocation);
|
|
64
64
|
const logger = LogConfig.get().child({
|
|
65
65
|
correlationId: job.id,
|
|
66
66
|
imageryName: job.name,
|
|
@@ -69,49 +69,15 @@ export class ActionCogCreate extends CommandLineAction {
|
|
|
69
69
|
LogConfig.set(logger);
|
|
70
70
|
logger.info('CogCreate:Start');
|
|
71
71
|
const gdalVersion = await Gdal.version(logger);
|
|
72
|
-
logger.info({
|
|
72
|
+
logger.info({ gdalVersion }, 'CogCreate:GdalVersion');
|
|
73
73
|
const names = this.getNames(job);
|
|
74
74
|
if (names == null || names.size === 0)
|
|
75
75
|
return;
|
|
76
76
|
const tmpFolder = await makeTempFolder(`basemaps-${job.id}-${CliId}`);
|
|
77
77
|
try {
|
|
78
78
|
for (const name of names) {
|
|
79
|
-
const
|
|
80
|
-
|
|
81
|
-
fsa.configure(job.output.location);
|
|
82
|
-
const outputExists = await fsa.exists(targetPath);
|
|
83
|
-
logger.info({ targetPath, outputExists }, 'CogCreate:CheckExists');
|
|
84
|
-
// Output file exists don't try and overwrite it
|
|
85
|
-
if (outputExists) {
|
|
86
|
-
logger.warn({ targetPath }, 'CogCreate:OutputExists');
|
|
87
|
-
await this.checkJobStatus(job, logger);
|
|
88
|
-
continue;
|
|
89
|
-
}
|
|
90
|
-
let cutlineJson;
|
|
91
|
-
if (job.output.cutline != null) {
|
|
92
|
-
const cutlinePath = job.getJobPath('cutline.geojson.gz');
|
|
93
|
-
logger.info({ path: cutlinePath }, 'CogCreate:UsingCutLine');
|
|
94
|
-
cutlineJson = await Cutline.loadCutline(cutlinePath);
|
|
95
|
-
}
|
|
96
|
-
else {
|
|
97
|
-
logger.warn('NoCutLine');
|
|
98
|
-
}
|
|
99
|
-
const cutline = new Cutline(job.tileMatrix, cutlineJson, (_d = job.output.cutline) === null || _d === void 0 ? void 0 : _d.blend, job.output.oneCogCovering);
|
|
100
|
-
const tmpVrtPath = await CogVrt.buildVrt(tiffFolder, job, cutline, name, logger);
|
|
101
|
-
if (tmpVrtPath == null) {
|
|
102
|
-
logger.warn({ name }, 'CogCreate:NoMatchingSourceImagery');
|
|
103
|
-
return;
|
|
104
|
-
}
|
|
105
|
-
const tmpTiff = fsa.join(tiffFolder, `${name}.tiff`);
|
|
106
|
-
await buildCogForName(job, name, tmpVrtPath, tmpTiff, logger, isCommit);
|
|
107
|
-
logger.info({ target: targetPath }, 'CogCreate:StoreTiff');
|
|
108
|
-
if (isCommit) {
|
|
109
|
-
await fsa.write(targetPath, createReadStream(tmpTiff));
|
|
110
|
-
await this.checkJobStatus(job, logger);
|
|
111
|
-
}
|
|
112
|
-
else {
|
|
113
|
-
logger.warn({ name }, 'DryRun:Done');
|
|
114
|
-
}
|
|
79
|
+
const tiffJob = await CogStacJob.load(jobLocation);
|
|
80
|
+
await this.processTiff(tiffJob, name, tmpFolder, isCommit, logger.child({ tiffName: name }));
|
|
115
81
|
}
|
|
116
82
|
}
|
|
117
83
|
catch (e) {
|
|
@@ -129,12 +95,53 @@ export class ActionCogCreate extends CommandLineAction {
|
|
|
129
95
|
else
|
|
130
96
|
throw new Error('Unable update the Processing Job status:' + jobFailed.id);
|
|
131
97
|
}
|
|
98
|
+
// Ensure the error is thrown
|
|
99
|
+
throw e;
|
|
132
100
|
}
|
|
133
101
|
finally {
|
|
134
102
|
// Cleanup!
|
|
135
103
|
await fs.rm(tmpFolder, { recursive: true });
|
|
136
104
|
}
|
|
137
105
|
}
|
|
106
|
+
async processTiff(job, tiffName, tmpFolder, isCommit, logger) {
|
|
107
|
+
var _a;
|
|
108
|
+
const tiffFolder = await makeTiffFolder(tmpFolder, tiffName);
|
|
109
|
+
const targetPath = job.getJobPath(`${tiffName}.tiff`);
|
|
110
|
+
fsa.configure(job.output.location);
|
|
111
|
+
const outputExists = await fsa.exists(targetPath);
|
|
112
|
+
logger.info({ targetPath, outputExists }, 'CogCreate:CheckExists');
|
|
113
|
+
// Output file exists don't try and overwrite it
|
|
114
|
+
if (outputExists) {
|
|
115
|
+
logger.warn({ targetPath }, 'CogCreate:OutputExists');
|
|
116
|
+
await this.checkJobStatus(job, logger);
|
|
117
|
+
return;
|
|
118
|
+
}
|
|
119
|
+
let cutlineJson;
|
|
120
|
+
if (job.output.cutline != null) {
|
|
121
|
+
const cutlinePath = job.getJobPath('cutline.geojson.gz');
|
|
122
|
+
logger.info({ path: cutlinePath }, 'CogCreate:UsingCutLine');
|
|
123
|
+
cutlineJson = await Cutline.loadCutline(cutlinePath);
|
|
124
|
+
}
|
|
125
|
+
else {
|
|
126
|
+
logger.warn('CutLine:Skip');
|
|
127
|
+
}
|
|
128
|
+
const cutline = new Cutline(job.tileMatrix, cutlineJson, (_a = job.output.cutline) === null || _a === void 0 ? void 0 : _a.blend, job.output.oneCogCovering);
|
|
129
|
+
const tmpVrtPath = await CogVrt.buildVrt(tiffFolder, job, cutline, tiffName, logger);
|
|
130
|
+
if (tmpVrtPath == null) {
|
|
131
|
+
logger.warn('CogCreate:NoMatchingSourceImagery');
|
|
132
|
+
return;
|
|
133
|
+
}
|
|
134
|
+
const tmpTiff = fsa.join(tiffFolder, `${tiffName}.tiff`);
|
|
135
|
+
await buildCogForName(job, tiffName, tmpVrtPath, tmpTiff, logger, isCommit);
|
|
136
|
+
logger.info({ target: targetPath }, 'CogCreate:StoreTiff');
|
|
137
|
+
if (isCommit) {
|
|
138
|
+
await fsa.write(targetPath, createReadStream(tmpTiff));
|
|
139
|
+
await this.checkJobStatus(job, logger);
|
|
140
|
+
}
|
|
141
|
+
else {
|
|
142
|
+
logger.warn('DryRun:Done');
|
|
143
|
+
}
|
|
144
|
+
}
|
|
138
145
|
/** Check to see how many tiffs are remaining in the job */
|
|
139
146
|
async checkJobStatus(job, logger) {
|
|
140
147
|
const basePath = job.getJobPath();
|
|
@@ -6,7 +6,7 @@ export declare class CLiInputData {
|
|
|
6
6
|
externalId: CommandLineStringParameter;
|
|
7
7
|
constructor(parent: CommandLineAction, prefix: string);
|
|
8
8
|
}
|
|
9
|
-
export declare class
|
|
9
|
+
export declare class CommandJobCreate extends CommandLineAction {
|
|
10
10
|
private source;
|
|
11
11
|
private output;
|
|
12
12
|
private maxConcurrency;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"action.job.d.ts","sourceRoot":"","sources":["../../../src/cli/cogify/action.job.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,UAAU,EAAuB,MAAM,kBAAkB,CAAC;AACnE,OAAO,EACL,iBAAiB,EAGjB,0BAA0B,EAC3B,MAAM,4BAA4B,CAAC;AAMpC,qBAAa,YAAY;IACvB,IAAI,EAAE,0BAA0B,CAAC;IACjC,OAAO,EAAE,0BAA0B,CAAC;IACpC,UAAU,EAAE,0BAA0B,CAAC;gBAE3B,MAAM,EAAE,iBAAiB,EAAE,MAAM,EAAE,MAAM;CAsBtD;AAED,qBAAa,
|
|
1
|
+
{"version":3,"file":"action.job.d.ts","sourceRoot":"","sources":["../../../src/cli/cogify/action.job.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,UAAU,EAAuB,MAAM,kBAAkB,CAAC;AACnE,OAAO,EACL,iBAAiB,EAGjB,0BAA0B,EAC3B,MAAM,4BAA4B,CAAC;AAMpC,qBAAa,YAAY;IACvB,IAAI,EAAE,0BAA0B,CAAC;IACjC,OAAO,EAAE,0BAA0B,CAAC;IACpC,UAAU,EAAE,0BAA0B,CAAC;gBAE3B,MAAM,EAAE,iBAAiB,EAAE,MAAM,EAAE,MAAM;CAsBtD;AAED,qBAAa,gBAAiB,SAAQ,iBAAiB;IACrD,OAAO,CAAC,MAAM,CAAe;IAC7B,OAAO,CAAC,MAAM,CAAe;IAC7B,OAAO,CAAC,cAAc,CAA8B;IACpD,OAAO,CAAC,OAAO,CAA6B;IAC5C,OAAO,CAAC,YAAY,CAA8B;IAClD,OAAO,CAAC,UAAU,CAA6B;IAC/C,OAAO,CAAC,mBAAmB,CAA6B;IACxD,OAAO,CAAC,WAAW,CAA2B;IAC9C,OAAO,CAAC,OAAO,CAA8B;IAC7C,OAAO,CAAC,gBAAgB,CAA8B;IACtD,OAAO,CAAC,UAAU,CAA6B;IAC/C,OAAO,CAAC,MAAM,CAA2B;IACzC,OAAO,CAAC,QAAQ,CAA6B;IAC7C,OAAO,CAAC,YAAY,CAA6B;;IAUjD,QAAQ,CAAC,MAAM,EAAE,YAAY,GAAG,UAAU;IAkBpC,SAAS,IAAI,OAAO,CAAC,IAAI,CAAC;IA0DhC,SAAS,CAAC,kBAAkB,IAAI,IAAI;CAyFrC"}
|
|
@@ -9,6 +9,8 @@ import { CogJob } from '../../cog/types.js';
|
|
|
9
9
|
*/
|
|
10
10
|
export declare function extractResolutionFromName(name: string): number;
|
|
11
11
|
export declare class BatchJob {
|
|
12
|
+
static _batch: Batch;
|
|
13
|
+
static get batch(): Batch;
|
|
12
14
|
/**
|
|
13
15
|
* Create a id for a job
|
|
14
16
|
*
|
|
@@ -17,8 +19,8 @@ export declare class BatchJob {
|
|
|
17
19
|
* @param fileNames output filename
|
|
18
20
|
* @returns job id
|
|
19
21
|
*/
|
|
20
|
-
static id(job: CogJob, fileNames
|
|
21
|
-
static batchOne(jobPath: string, job: CogJob,
|
|
22
|
+
static id(job: CogJob, fileNames?: string[]): string;
|
|
23
|
+
static batchOne(jobPath: string, job: CogJob, names: string[], isCommit: boolean): Promise<{
|
|
22
24
|
jobName: string;
|
|
23
25
|
jobId: string;
|
|
24
26
|
memory: number;
|
|
@@ -27,12 +29,12 @@ export declare class BatchJob {
|
|
|
27
29
|
* List all the current jobs in batch and their statuses
|
|
28
30
|
* @returns a map of JobName to if their status is "ok" (not failed)
|
|
29
31
|
*/
|
|
30
|
-
static getCurrentJobList(
|
|
32
|
+
static getCurrentJobList(job: CogJob, logger: LogType): Promise<Set<string>>;
|
|
31
33
|
static batchJob(job: CogJob, commit: boolean | undefined, logger: LogType): Promise<void>;
|
|
32
34
|
/**
|
|
33
35
|
* Prepare the jobs from job files, and chunk the small images into single
|
|
34
36
|
* @returns List of jobs including single job and chunk jobs.
|
|
35
37
|
*/
|
|
36
|
-
static getJobs(job: CogJob
|
|
38
|
+
static getJobs(job: CogJob, existing: Set<string>, log: LogType): string[][];
|
|
37
39
|
}
|
|
38
40
|
//# sourceMappingURL=batch.job.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"batch.job.d.ts","sourceRoot":"","sources":["../../../src/cli/cogify/batch.job.ts"],"names":[],"mappings":"AACA,OAAO,EAAuB,OAAO,EAAc,MAAM,kBAAkB,CAAC;AAC5E,OAAO,KAAK,MAAM,0BAA0B,CAAC;
|
|
1
|
+
{"version":3,"file":"batch.job.d.ts","sourceRoot":"","sources":["../../../src/cli/cogify/batch.job.ts"],"names":[],"mappings":"AACA,OAAO,EAAuB,OAAO,EAAc,MAAM,kBAAkB,CAAC;AAC5E,OAAO,KAAK,MAAM,0BAA0B,CAAC;AAG7C,OAAO,EAAE,MAAM,EAAE,MAAM,oBAAoB,CAAC;AAa5C;;;;;GAKG;AACH,wBAAgB,yBAAyB,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,CAI9D;AAED,qBAAa,QAAQ;IACnB,MAAM,CAAC,MAAM,EAAE,KAAK,CAAC;IACrB,MAAM,KAAK,KAAK,IAAI,KAAK,CAKxB;IACD;;;;;;;OAOG;IACH,MAAM,CAAC,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,MAAM,EAAE,GAAG,MAAM;WAQvC,QAAQ,CACnB,OAAO,EAAE,MAAM,EACf,GAAG,EAAE,MAAM,EACX,KAAK,EAAE,MAAM,EAAE,EACf,QAAQ,EAAE,OAAO,GAChB,OAAO,CAAC;QAAE,OAAO,EAAE,MAAM,CAAC;QAAC,KAAK,EAAE,MAAM,CAAC;QAAC,MAAM,EAAE,MAAM,CAAA;KAAE,CAAC;IAmC9D;;;OAGG;WACU,iBAAiB,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;WA0CrE,QAAQ,CAAC,GAAG,EAAE,MAAM,EAAE,MAAM,qBAAQ,EAAE,MAAM,EAAE,OAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IA+ClF;;;OAGG;IACH,MAAM,CAAC,OAAO,CAAC,GAAG,EAAE,MAAM,EAAE,QAAQ,EAAE,GAAG,CAAC,MAAM,CAAC,EAAE,GAAG,EAAE,OAAO,GAAG,MAAM,EAAE,EAAE;CAkC7E"}
|
|
@@ -2,6 +2,7 @@ import { TileMatrixSet } from '@basemaps/geo';
|
|
|
2
2
|
import { Env, fsa, LogConfig, Projection } from '@basemaps/shared';
|
|
3
3
|
import Batch from 'aws-sdk/clients/batch.js';
|
|
4
4
|
import { createHash } from 'crypto';
|
|
5
|
+
import { basename } from 'path';
|
|
5
6
|
const JobQueue = 'CogBatchJobQueue';
|
|
6
7
|
const JobDefinition = 'CogBatchJob';
|
|
7
8
|
const ChunkJobMax = 1000;
|
|
@@ -24,6 +25,14 @@ export function extractResolutionFromName(name) {
|
|
|
24
25
|
return parseFloat(matches[1].replace('-', '.')) * 1000;
|
|
25
26
|
}
|
|
26
27
|
export class BatchJob {
|
|
28
|
+
static get batch() {
|
|
29
|
+
var _a, _b;
|
|
30
|
+
if (this._batch)
|
|
31
|
+
return this._batch;
|
|
32
|
+
const region = (_b = (_a = Env.get('AWS_REGION')) !== null && _a !== void 0 ? _a : Env.get('AWS_DEFAULT_REGION')) !== null && _b !== void 0 ? _b : 'ap-southeast-2';
|
|
33
|
+
this._batch = new Batch({ region });
|
|
34
|
+
return this._batch;
|
|
35
|
+
}
|
|
27
36
|
/**
|
|
28
37
|
* Create a id for a job
|
|
29
38
|
*
|
|
@@ -35,10 +44,12 @@ export class BatchJob {
|
|
|
35
44
|
static id(job, fileNames) {
|
|
36
45
|
// Job names are uncontrolled so hash the name and grab a small slice to use as a identifier
|
|
37
46
|
const jobName = createHash('sha256').update(job.name).digest('hex').slice(0, 16);
|
|
47
|
+
if (fileNames == null)
|
|
48
|
+
return `${job.id}-${jobName}-`;
|
|
38
49
|
fileNames.sort((a, b) => a.localeCompare(b));
|
|
39
|
-
return `${job.id}-${jobName}-${fileNames.join('_')}`.slice(0, 128);
|
|
50
|
+
return `${job.id}-${jobName}-${fileNames.length}x-${fileNames.join('_')}`.slice(0, 128);
|
|
40
51
|
}
|
|
41
|
-
static async batchOne(jobPath, job,
|
|
52
|
+
static async batchOne(jobPath, job, names, isCommit) {
|
|
42
53
|
const jobName = BatchJob.id(job, names);
|
|
43
54
|
let memory = 3900;
|
|
44
55
|
if (names.length === 1) {
|
|
@@ -55,7 +66,7 @@ export class BatchJob {
|
|
|
55
66
|
let commandStr = ['-V', 'cog', '--job', jobPath, '--commit'];
|
|
56
67
|
for (const name of names)
|
|
57
68
|
commandStr = commandStr.concat(['--name', name]);
|
|
58
|
-
const batchJob = await batch
|
|
69
|
+
const batchJob = await this.batch
|
|
59
70
|
.submitJob({
|
|
60
71
|
jobName,
|
|
61
72
|
jobQueue: JobQueue,
|
|
@@ -73,77 +84,75 @@ export class BatchJob {
|
|
|
73
84
|
* List all the current jobs in batch and their statuses
|
|
74
85
|
* @returns a map of JobName to if their status is "ok" (not failed)
|
|
75
86
|
*/
|
|
76
|
-
static async getCurrentJobList(
|
|
87
|
+
static async getCurrentJobList(job, logger) {
|
|
88
|
+
var _a;
|
|
89
|
+
const jobPrefix = BatchJob.id(job);
|
|
77
90
|
// For some reason AWS only lets us query one status at a time.
|
|
78
|
-
const allStatuses = ['SUBMITTED', 'PENDING', 'RUNNABLE', 'STARTING', 'RUNNING'
|
|
79
|
-
|
|
80
|
-
const
|
|
91
|
+
const allStatuses = ['SUBMITTED', 'PENDING', 'RUNNABLE', 'STARTING', 'RUNNING' /* 'SUCCEEDED' */];
|
|
92
|
+
// Succeeded is not needed as we check to see if the output file exists, if it succeeds and the output file doesn't exist then something has gone wrong
|
|
93
|
+
const allJobs = await Promise.all(allStatuses.map((jobStatus) => this.batch.listJobs({ jobQueue: JobQueue, jobStatus }).promise()));
|
|
94
|
+
const jobIds = new Set();
|
|
95
|
+
// Find all the relevant jobs that start with our job prefix
|
|
81
96
|
for (const status of allJobs) {
|
|
82
97
|
for (const job of status.jobSummaryList) {
|
|
83
|
-
if (job.
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
98
|
+
if (!job.jobName.startsWith(jobPrefix))
|
|
99
|
+
continue;
|
|
100
|
+
jobIds.add(job.jobId);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
// Inspect all the jobs for what files are being "processed"
|
|
104
|
+
const tiffs = new Set();
|
|
105
|
+
let allJobIds = [...jobIds];
|
|
106
|
+
while (allJobIds.length > 0) {
|
|
107
|
+
logger.info({ jobCount: allJobIds.length }, 'JobFetch');
|
|
108
|
+
const jobList = allJobIds.slice(0, 100);
|
|
109
|
+
allJobIds = allJobIds.slice(100);
|
|
110
|
+
const describedJobs = await this.batch.describeJobs({ jobs: jobList }).promise();
|
|
111
|
+
if (describedJobs.jobs == null)
|
|
112
|
+
continue;
|
|
113
|
+
for (const job of describedJobs.jobs) {
|
|
114
|
+
const jobCommand = (_a = job.container) === null || _a === void 0 ? void 0 : _a.command;
|
|
115
|
+
if (jobCommand == null)
|
|
116
|
+
continue;
|
|
117
|
+
// Extract the tiff names from the job command
|
|
118
|
+
for (let i = 0; i < jobCommand.length; i++) {
|
|
119
|
+
if (jobCommand[i] === '--name')
|
|
120
|
+
tiffs.add(jobCommand[i + 1]);
|
|
88
121
|
}
|
|
89
122
|
}
|
|
90
123
|
}
|
|
91
|
-
return
|
|
124
|
+
return tiffs;
|
|
92
125
|
}
|
|
93
126
|
static async batchJob(job, commit = false, logger) {
|
|
94
|
-
var _a;
|
|
95
127
|
const jobPath = job.getJobPath('job.json');
|
|
96
128
|
if (!jobPath.startsWith('s3://')) {
|
|
97
129
|
throw new Error(`AWS Batch collection.json have to be in S3, jobPath:${jobPath}`);
|
|
98
130
|
}
|
|
99
131
|
LogConfig.set(logger.child({ correlationId: job.id, imageryName: job.name }));
|
|
100
|
-
const region = (_a = Env.get('AWS_DEFAULT_REGION')) !== null && _a !== void 0 ? _a : 'ap-southeast-2';
|
|
101
|
-
const batch = new Batch({ region });
|
|
102
132
|
fsa.configure(job.output.location);
|
|
103
|
-
const runningJobs = await BatchJob.getCurrentJobList(batch);
|
|
104
|
-
// Prepare chunk job and individual jobs based on imagery size.
|
|
105
|
-
const jobs = await this.getJobs(job);
|
|
106
133
|
// Get all the existing output tiffs
|
|
107
|
-
const
|
|
108
|
-
const existTiffs = [];
|
|
134
|
+
const existTiffs = new Set();
|
|
109
135
|
for await (const fileName of fsa.list(job.getJobPath())) {
|
|
110
136
|
if (fileName.endsWith('.tiff'))
|
|
111
|
-
existTiffs.
|
|
137
|
+
existTiffs.add(basename(fileName));
|
|
112
138
|
}
|
|
113
|
-
const
|
|
114
|
-
for (const
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
logger.info({ jobName }, 'JobRunning');
|
|
120
|
-
continue;
|
|
121
|
-
}
|
|
122
|
-
// Check existence of all the output tiffs.
|
|
123
|
-
let allExists = true;
|
|
124
|
-
for (const name of names) {
|
|
125
|
-
if (!existTiffs.includes(job.getJobPath(`${name}.tiff`)))
|
|
126
|
-
allExists = false;
|
|
127
|
-
}
|
|
128
|
-
if (allExists) {
|
|
129
|
-
logger.info({ targetPath, names }, 'FileExists');
|
|
130
|
-
continue;
|
|
131
|
-
}
|
|
132
|
-
// Ready to submit
|
|
133
|
-
toSubmit.push(names);
|
|
134
|
-
}
|
|
135
|
-
if (toSubmit.length === 0) {
|
|
139
|
+
const runningJobs = await this.getCurrentJobList(job, logger);
|
|
140
|
+
for (const tiffName of runningJobs)
|
|
141
|
+
existTiffs.add(`${tiffName}.tiff`);
|
|
142
|
+
// Prepare chunk job and individual jobs based on imagery size.
|
|
143
|
+
const jobs = await this.getJobs(job, existTiffs, logger);
|
|
144
|
+
if (jobs.length === 0) {
|
|
136
145
|
logger.info('NoJobs');
|
|
137
146
|
return;
|
|
138
147
|
}
|
|
139
148
|
logger.info({
|
|
140
149
|
jobTotal: job.output.files.length,
|
|
141
|
-
jobLeft:
|
|
150
|
+
jobLeft: jobs.length,
|
|
142
151
|
jobQueue: JobQueue,
|
|
143
152
|
jobDefinition: JobDefinition,
|
|
144
153
|
}, 'JobSubmit');
|
|
145
|
-
for (const names of
|
|
146
|
-
const jobStatus = await BatchJob.batchOne(jobPath, job,
|
|
154
|
+
for (const names of jobs) {
|
|
155
|
+
const jobStatus = await BatchJob.batchOne(jobPath, job, names, commit);
|
|
147
156
|
logger.info(jobStatus, 'JobSubmitted');
|
|
148
157
|
}
|
|
149
158
|
if (!commit) {
|
|
@@ -155,11 +164,16 @@ export class BatchJob {
|
|
|
155
164
|
* Prepare the jobs from job files, and chunk the small images into single
|
|
156
165
|
* @returns List of jobs including single job and chunk jobs.
|
|
157
166
|
*/
|
|
158
|
-
static
|
|
167
|
+
static getJobs(job, existing, log) {
|
|
159
168
|
const jobs = [];
|
|
160
169
|
let chunkJob = [];
|
|
161
170
|
let chunkUnit = 0; // Calculate the chunkUnit based on the size
|
|
162
171
|
for (const file of job.output.files) {
|
|
172
|
+
const outputFile = `${file.name}.tiff`;
|
|
173
|
+
if (existing.has(outputFile)) {
|
|
174
|
+
log.debug({ fileName: outputFile }, 'Skip:Exists');
|
|
175
|
+
continue;
|
|
176
|
+
}
|
|
163
177
|
const imageSize = file.width / job.output.gsd;
|
|
164
178
|
if (imageSize > 16385) {
|
|
165
179
|
jobs.push([file.name]);
|