-
Notifications
You must be signed in to change notification settings - Fork 18
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat(cogify): Update cogify to support the topo raster processes. BM-…
…1116 (#3388) ### Motivation As a Basemaps user, I want to consume the NZTopo 50 & 250 Maps [as a tile service]. ### Modifications This work comprises a series of additions and modifications to the `basemaps/cogify` package. These changes allow us to generate [STAC] files for an NZTopo Map Series imagery collection. They also allow us to generate standardised and cloud-optimised GeoTIFFs from the map sheet files of such collections. We have adjusted the `basemaps/cogify` package to handle the following collections stored in the AWS S3 [TopoReleaseArchive] directory: ``` s3://topographic-upload/TopoReleaseArchive/NZTopo50_GeoTif_Gridless/ s3://topographic-upload/TopoReleaseArchive/NZTopo250_GeoTif_Gridless/ ``` There is also a [new Argo workflow in development][pr-argo-workflow] that depends on this work as part of its process flow. The workflow's purpose is to automate the standardisation of an NZTopo Map Series imagery collection. ### Generating STAC files This works defines a new process for generating a nested directory structure of STAC files for an NZTopo Map Series imagery collection. The process operates as follows: 1. Loads the collection of GeoTiff images into memory 6. Extracts the map code, version, and metadata (e.g. EPSG) from each image 7. Identifies the latest version of each map sheet by code 8. Groups the images by EPSG 9. Generates a StacItem file for each image, and a StacCollection file for each EPSG grouping 10. Structures the groups of StacItem and StacCollection files into a directory tree 11. Saves the files as they are structured to a target location #### Outputs The command groups the images by EPSG and then structures the generated StacItem and StacCollection files, as illustrated: | Schema | Example | | - | - | | ![][schema] | ![][example] | The command then saves the generated tree of folders and files into the target location directory. ### Processing Map Sheet files This work extends the `create` CLI command of the `basemaps/cogify` package. This extension allows the command to recognise and process the map sheet files of an NZTopo Map Series imagery collection by way of particular properties added to the accompanying STAC files. [as a tile service]: #3365 (comment) [STAC]: https://stacspec.org/en [TopoReleaseArchive]: https://linz-topographic-upload.s3.ap-southeast-2.amazonaws.com/topographic/TopoReleaseArchive/ [pr-argo-workflow]: linz/topo-workflows#932 [schema]: https://github.com/user-attachments/assets/2f4eddd7-eb2c-4796-8ab7-b3d9eca9140f [example]: https://github.com/user-attachments/assets/8f111ba6-befd-44b5-9566-db07a07089ed --------- Co-authored-by: Blayne Chard <bchard@linz.govt.nz> Co-authored-by: Tawera Manaena <TManaena@linz.govt.nz>
- Loading branch information
1 parent
4bb91bd
commit 4366df6
Showing
18 changed files
with
1,059 additions
and
67 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,37 @@ | ||
import { strictEqual, throws } from 'node:assert'; | ||
import { describe, it } from 'node:test'; | ||
|
||
import { extractMapCodeAndVersion } from '../topo/extract.js'; | ||
|
||
describe('extractMapCodeAndVersion', () => { | ||
const FakeDomain = 's3://topographic/fake-domain'; | ||
const validFiles = [ | ||
{ input: `${FakeDomain}/MB07_GeoTifv1-00.tif`, expected: { mapCode: 'MB07', version: 'v1-00' } }, | ||
{ input: `${FakeDomain}/MB07_GRIDLESS_GeoTifv1-00.tif`, expected: { mapCode: 'MB07', version: 'v1-00' } }, | ||
{ input: `${FakeDomain}/MB07_TIFFv1-00.tif`, expected: { mapCode: 'MB07', version: 'v1-00' } }, | ||
{ input: `${FakeDomain}/MB07_TIFF_600v1-00.tif`, expected: { mapCode: 'MB07', version: 'v1-00' } }, | ||
{ | ||
input: `${FakeDomain}/AX32ptsAX31AY31AY32_GeoTifv1-00.tif`, | ||
expected: { mapCode: 'AX32ptsAX31AY31AY32', version: 'v1-00' }, | ||
}, | ||
{ | ||
input: `${FakeDomain}/AZ36ptsAZ35BA35BA36_GeoTifv1-00.tif`, | ||
expected: { mapCode: 'AZ36ptsAZ35BA35BA36', version: 'v1-00' }, | ||
}, | ||
]; | ||
const invalidFiles = [`${FakeDomain}/MB07_GeoTif1-00.tif`, `${FakeDomain}/MB07_TIFF_600v1.tif`]; | ||
|
||
it('should parse the correct MapSheet Names', () => { | ||
for (const file of validFiles) { | ||
const output = extractMapCodeAndVersion(new URL(file.input)); | ||
strictEqual(output.mapCode, file.expected.mapCode, 'Map code does not match'); | ||
strictEqual(output.version, file.expected.version, 'Version does not match'); | ||
} | ||
}); | ||
|
||
it('should not able to parse a version from file', () => { | ||
for (const file of invalidFiles) { | ||
throws(() => extractMapCodeAndVersion(new URL(file)), new Error(`Version not found in the file name: "${file}"`)); | ||
} | ||
}); | ||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,69 @@ | ||
import assert from 'node:assert'; | ||
import { beforeEach, describe, it } from 'node:test'; | ||
|
||
import { fsa, FsMemory, LogConfig } from '@basemaps/shared'; | ||
import { TestTiff } from '@basemaps/test'; | ||
import { StacCollection } from 'stac-ts'; | ||
|
||
import { TopoStacCreationCommand } from '../cli.topo.js'; | ||
|
||
describe('cli.topo', () => { | ||
const fsMemory = new FsMemory(); | ||
|
||
beforeEach(async () => { | ||
LogConfig.get().level = 'silent'; | ||
fsa.register('memory://', fsMemory); | ||
fsMemory.files.clear(); | ||
|
||
await fsa.write(new URL('memory://source/CJ10_GRIDLESS_GeoTifv1-00.tif'), fsa.readStream(TestTiff.Nztm2000)); | ||
await fsa.write(new URL('memory://source/CJ10_GRIDLESS_GeoTifv1-01.tif'), fsa.readStream(TestTiff.Nztm2000)); | ||
}); | ||
|
||
const baseArgs = { | ||
paths: [new URL('memory://source/')], | ||
target: new URL('memory://target/'), | ||
mapSeries: 'topo50', | ||
latestOnly: false, | ||
title: undefined, | ||
output: undefined, | ||
|
||
// extra logging arguments | ||
verbose: false, | ||
extraVerbose: false, | ||
}; | ||
|
||
it('should generate a covering', async () => { | ||
const ret = await TopoStacCreationCommand.handler({ ...baseArgs }).catch((e) => String(e)); | ||
assert.equal(ret, undefined); // no errors returned | ||
|
||
const files = [...fsMemory.files.keys()]; | ||
files.sort(); | ||
|
||
assert.deepEqual(files, [ | ||
'memory://source/CJ10_GRIDLESS_GeoTifv1-00.tif', | ||
'memory://source/CJ10_GRIDLESS_GeoTifv1-01.tif', | ||
'memory://target/topo50/gridless_600dpi/2193/CJ10_v1-00.json', | ||
'memory://target/topo50/gridless_600dpi/2193/CJ10_v1-01.json', | ||
'memory://target/topo50/gridless_600dpi/2193/collection.json', | ||
'memory://target/topo50_latest/gridless_600dpi/2193/CJ10.json', | ||
'memory://target/topo50_latest/gridless_600dpi/2193/collection.json', | ||
]); | ||
|
||
const collectionJson = await fsa.readJson<StacCollection>( | ||
new URL('memory://target/topo50/gridless_600dpi/2193/collection.json'), | ||
); | ||
assert.equal(collectionJson['description'], 'Topographic maps of New Zealand'); | ||
assert.equal(collectionJson['linz:slug'], 'topo50-new-zealand-mainland'); | ||
assert.equal(collectionJson['linz:region'], 'new-zealand'); | ||
|
||
const latestItemUrl = new URL('memory://target/topo50_latest/gridless_600dpi/2193/CJ10.json'); | ||
const latestVersion = await fsa.readJson<StacCollection>(latestItemUrl); | ||
|
||
// Latest file should be derived_from the source file | ||
const derived = latestVersion.links.filter((f) => f.rel === 'derived_from'); | ||
assert.equal(derived.length, 1); | ||
|
||
const derivedFile = new URL(derived[0].href, latestItemUrl); | ||
assert.equal(derivedFile.href, 'memory://target/topo50/gridless_600dpi/2193/CJ10_v1-01.json'); | ||
}); | ||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.