Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feat/project metadata piece importer #836

Merged
merged 3 commits into from
Feb 17, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,12 @@ export class ArchiveReaderAdapter implements ArchiveReader {

const exportConfigOrError = await extractFile(
readableOrError.right,
new RegExp(ClonePieceRelativePaths[ClonePiece.ExportConfig].config),
ClonePieceRelativePaths[ClonePiece.ExportConfig].config,
);
if (isLeft(exportConfigOrError)) return left(archiveCorrupted);
const exportConfig = JSON.parse(exportConfigOrError.right);

const resourceId = new ResourceId(exportConfig.resourceId);
const resourceId = ResourceId.create();
const resourceKind = exportConfig.resourceKind;

const validResourceKind = Object.values(ResourceKind).includes(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Module } from '@nestjs/common';
import { Logger, Module, Scope } from '@nestjs/common';
import { FileRepositoryModule } from '@marxan/files-repository';
import { TypeOrmModule } from '@nestjs/typeorm';
import { geoprocessingConnections } from '@marxan-geoprocessing/ormconfig';
Expand All @@ -9,6 +9,9 @@ import { ProjectMetadataPieceImporter } from './project-metadata.piece-importer'
FileRepositoryModule,
TypeOrmModule.forFeature([], geoprocessingConnections.apiDB),
],
providers: [ProjectMetadataPieceImporter],
providers: [
ProjectMetadataPieceImporter,
{ provide: Logger, useClass: Logger, scope: Scope.TRANSIENT },
],
})
export class PiecesImportersModule {}
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
import { Injectable } from '@nestjs/common';
import { InjectEntityManager } from '@nestjs/typeorm';
import { EntityManager } from 'typeorm';

import { geoprocessingConnections } from '@marxan-geoprocessing/ormconfig';
import { ClonePiece, ImportJobInput, ImportJobOutput } from '@marxan/cloning';
import { ProjectMetadataContent } from '@marxan/cloning/infrastructure/clone-piece-data/project-metadata';
import { FileRepository } from '@marxan/files-repository';

import { geoprocessingConnections } from '@marxan-geoprocessing/ormconfig';

import { extractFile } from '@marxan/utils';
import { Injectable, Logger } from '@nestjs/common';
import { InjectEntityManager } from '@nestjs/typeorm';
import { isLeft } from 'fp-ts/lib/Either';
import { EntityManager } from 'typeorm';
import {
PieceImportProvider,
ImportPieceProcessor,
PieceImportProvider,
} from '../pieces/import-piece-processor';

@Injectable()
Expand All @@ -19,13 +19,79 @@ export class ProjectMetadataPieceImporter implements ImportPieceProcessor {
private readonly fileRepository: FileRepository,
@InjectEntityManager(geoprocessingConnections.apiDB)
private readonly entityManager: EntityManager,
) {}
private readonly logger: Logger,
) {
this.logger.setContext(ProjectMetadataPieceImporter.name);
}

isSupported(piece: ClonePiece): boolean {
return piece === ClonePiece.ProjectMetadata;
}

private async getRandomOrganizationId(): Promise<string> {
const [{ id }]: [{ id: string }] = await this.entityManager.query(`
SELECT id FROM organizations LIMIT 1
`);
return id;
}

async run(input: ImportJobInput): Promise<ImportJobOutput> {
throw new Error('Missing implementation');
const { uris, resourceId, piece } = input;

if (uris.length !== 1) {
const errorMessage = `uris array has an unexpected amount of elements: ${uris.length}`;
this.logger.error(errorMessage);
throw new Error(errorMessage);
}
const [projectMetadataLocation] = uris;

const readableOrError = await this.fileRepository.get(
projectMetadataLocation.uri,
);
if (isLeft(readableOrError)) {
const errorMessage = `File with piece data for ${piece}/${resourceId} is not available at ${projectMetadataLocation.uri}`;
this.logger.error(errorMessage);
throw new Error(errorMessage);
}

const stringProjectMetadataOrError = await extractFile(
readableOrError.right,
projectMetadataLocation.relativePath,
);
if (isLeft(stringProjectMetadataOrError)) {
const errorMessage = `Project metadata file extraction failed: ${projectMetadataLocation.relativePath}`;
this.logger.error(errorMessage);
throw new Error(errorMessage);
}

// TODO As we don't handle organizations for the time being,
// the imported/cloned project is homed arbitrarily within an
// existing organization. Once proper handling of organizations
// is added, users may be able to specify within which organization
// an imported/cloned project should be created.
const organizationId = await this.getRandomOrganizationId();
const projectMetadata: ProjectMetadataContent = JSON.parse(
stringProjectMetadataOrError.right,
);

await this.entityManager.query(
`
INSERT INTO projects(id, name, description, organization_id)
VALUES ($1, $2, $3, $4)
`,
[
resourceId,
projectMetadata.name,
projectMetadata.description,
organizationId,
],
);

return {
importId: input.importId,
componentId: input.componentId,
resourceId: input.resourceId,
piece: input.piece,
};
}
}
4 changes: 2 additions & 2 deletions api/libs/utils/src/zip-file-extractor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@ export const extractFileFailed = Symbol('Extract file failed');

export async function extractFile(
readable: Readable,
fileName: RegExp,
fileRelativePath: string,
): Promise<Either<typeof extractFileFailed, string>> {
return new Promise<Either<typeof extractFileFailed, string>>((resolve) => {
readable
.pipe(unzipper.ParseOne(fileName))
.pipe(unzipper.ParseOne(new RegExp(fileRelativePath)))
.on('entry', async (entry: unzipper.Entry) => {
const buffer = await entry.buffer();
resolve(right(buffer.toString()));
Expand Down