Skip to content

Commit

Permalink
Add ProjectCostSurfacesPieceImporter for project cloning
Browse files Browse the repository at this point in the history
  • Loading branch information
yulia-bel committed Sep 22, 2023
1 parent 8ed51e1 commit a7b1036
Show file tree
Hide file tree
Showing 3 changed files with 203 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ type ProjectCostSurfacesSelectResult = {
type CostSurfaceDataSelectResult = {
cost_surface_id: string;
cost: number;
puid: number;
project_pu_id: number;
};

@Injectable()
Expand Down Expand Up @@ -67,6 +67,7 @@ export class ProjectCostSurfacesPieceExporter implements ExportPieceProcessor {

const costSurfacesIds = costSurfaces.map((costSurface) => costSurface.id);
let costSurfaceData: CostSurfaceDataSelectResult[] = [];
let projectPusMap: Record<string, number> = {};
if (costSurfacesIds.length > 0) {
costSurfaceData = await this.geoprocessingEntityManager
.createQueryBuilder()
Expand All @@ -76,12 +77,18 @@ export class ProjectCostSurfacesPieceExporter implements ExportPieceProcessor {
costSurfacesIds,
})
.execute();

projectPusMap = await this.getProjectPusMap(input.resourceId);
}

const fileContent: ProjectCostSurfacesContent = {
costSurfaces: costSurfaces.map(({ id, ...costSurface }) => ({
...costSurface,
data: costSurfaceData.filter((data) => data.cost_surface_id === id),
data: costSurfaceData.filter((data : CostSurfaceDataSelectResult) => data.cost_surface_id === id)
.map(({ cost_surface_id, project_pu_id, ...data }) => {
const puid = projectPusMap[project_pu_id];
return { puid, ...data };
}),
})),
};

Expand All @@ -106,4 +113,25 @@ export class ProjectCostSurfacesPieceExporter implements ExportPieceProcessor {
uris: [new ComponentLocation(outputFile.right, relativePath)],
};
}

private async getProjectPusMap(
projectId: string,
): Promise<Record<string, number>> {
const projectPus: {
id: string;
puid: number;
}[] = await this.geoprocessingEntityManager
.createQueryBuilder()
.select(['id', 'puid'])
.from(ProjectsPuEntity, 'ppus')
.where('ppus.project_id = :projectId', { projectId })
.execute();

const projectPuIdByPuid: Record<string, number> = {};
projectPus.forEach(({ puid, id }) => {
projectPuIdByPuid[id] = puid;
});

return projectPuIdByPuid;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,9 @@ import { ScenarioMetadataPieceImporter } from './scenario-metadata.piece-importe
import { ScenarioPlanningUnitsDataPieceImporter } from './scenario-planning-units-data.piece-importer';
import { ScenarioProtectedAreasPieceImporter } from './scenario-protected-areas.piece-importer';
import { ScenarioRunResultsPieceImporter } from './scenario-run-results.piece-importer';
import {
ProjectCostSurfacesPieceImporter
} from "@marxan-geoprocessing/import/pieces-importers/project-cost-surfaces.piece-importer";

@Module({
imports: [
Expand All @@ -37,6 +40,7 @@ import { ScenarioRunResultsPieceImporter } from './scenario-run-results.piece-im
PlanningUnitsGridPieceImporter,
ProjectCustomProtectedAreasPieceImporter,
ProjectCustomFeaturesPieceImporter,
ProjectCostSurfacesPieceImporter,
ScenarioProtectedAreasPieceImporter,
ScenarioPlanningUnitsDataPieceImporter,
ScenarioRunResultsPieceImporter,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,169 @@
import { geoprocessingConnections } from '@marxan-geoprocessing/ormconfig';
import { ClonePiece, ImportJobInput, ImportJobOutput } from '@marxan/cloning';
import { ResourceKind } from '@marxan/cloning/domain';
import { ProjectCustomFeaturesContent } from '@marxan/cloning/infrastructure/clone-piece-data/project-custom-features';
import { CloningFilesRepository } from '@marxan/cloning-files-repository';
import { GeoFeatureGeometry } from '@marxan/geofeatures';
import { readableToBuffer } from '@marxan/utils';
import { Injectable, Logger } from '@nestjs/common';
import { InjectEntityManager } from '@nestjs/typeorm';
import { isLeft } from 'fp-ts/lib/Either';
import { EntityManager } from 'typeorm';
import { v4 } from 'uuid';
import {
ImportPieceProcessor,
PieceImportProvider,
} from '../pieces/import-piece-processor';
import { chunk } from 'lodash';
import { ProjectsPuEntity } from '@marxan-jobs/planning-unit-geometry';
import { CHUNK_SIZE_FOR_BATCH_GEODB_OPERATIONS } from '@marxan-geoprocessing/utils/chunk-size-for-batch-geodb-operations';
import { ProjectCostSurfacesContent } from "@marxan/cloning/infrastructure/clone-piece-data/project-cost-surfaces";
import { CostSurfacePuDataEntity } from "@marxan/cost-surfaces";

@Injectable()
@PieceImportProvider()
export class ProjectCostSurfacesPieceImporter
implements ImportPieceProcessor {
private readonly logger: Logger = new Logger(
ProjectCostSurfacesPieceImporter.name,
);

constructor(
private readonly fileRepository: CloningFilesRepository,
@InjectEntityManager(geoprocessingConnections.apiDB)
private readonly apiEntityManager: EntityManager,
@InjectEntityManager(geoprocessingConnections.default)
private readonly geoprocessingEntityManager: EntityManager,
) {}

isSupported(piece: ClonePiece, kind: ResourceKind): boolean {
return (
piece === ClonePiece.ProjectCostSurfaces &&
kind === ResourceKind.Project
);
}

async run(input: ImportJobInput): Promise<ImportJobOutput> {
const { uris, pieceResourceId, projectId, piece } = input;
let returnValue: ImportJobOutput = {} as ImportJobOutput;

try {
if (uris.length !== 1) {
const errorMessage = `uris array has an unexpected amount of elements: ${uris.length}`;
this.logger.error(errorMessage);
throw new Error(errorMessage);
}
const [projectCostSurfacesLocation] = uris;

const readableOrError = await this.fileRepository.get(
projectCostSurfacesLocation.uri,
);
if (isLeft(readableOrError)) {
const errorMessage = `File with piece data for ${piece}/${pieceResourceId} is not available at ${projectCostSurfacesLocation.uri}`;
this.logger.error(errorMessage);
throw new Error(errorMessage);
}

const buffer = await readableToBuffer(readableOrError.right);
const projectCostSurfacesOrError = buffer.toString();

const { costSurfaces }: ProjectCostSurfacesContent = JSON.parse(
projectCostSurfacesOrError,
);

returnValue = {
importId: input.importId,
componentId: input.componentId,
pieceResourceId,
projectId,
piece: input.piece,
};

if (!costSurfaces.length) return returnValue;

const projectPusMap = await this.getProjectPusMap(projectId);

await this.apiEntityManager.transaction(async (apiEm) => {
const costSurfacesInsertValues: any[] = [];
costSurfaces.forEach(({ data, ...costSurface }) => {
const costSurfaceId = v4();

costSurfacesInsertValues.push({
...costSurface,
project_id: projectId,
id: costSurfaceId,
});

});

await Promise.all(
costSurfacesInsertValues.map((values) =>
apiEm
.createQueryBuilder()
.insert()
.into('cost_surfaces')
.values(values)
.execute(),
),
);


const costSurfaceData = costSurfacesInsertValues.flatMap((costSurface) =>
costSurface.data.map((data: any) => ({
...data,
cost_surface_id: costSurface.id,
})),
);

const costSurfaceDataInsertValues = costSurfaceData.map(
({ projectPuPuid, ...data }) => ({
cost_surface_id: data.cost_surface_id,
cost: data.cost,
puid: projectPusMap[projectPuPuid],

}),
);

await Promise.all(
chunk(
costSurfaceDataInsertValues,
CHUNK_SIZE_FOR_BATCH_GEODB_OPERATIONS,
).map((values) =>
this.geoprocessingEntityManager
.createQueryBuilder()
.insert()
.into(CostSurfacePuDataEntity)
.values(values)
.execute(),
),
);
});
} catch (e) {
this.logger.error(e);
throw e;
}

return returnValue;
}

private async getProjectPusMap(
projectId: string,
): Promise<Record<number, string>> {
const projectPus: {
id: string;
puid: number;
}[] = await this.geoprocessingEntityManager
.createQueryBuilder()
.select(['id', 'puid'])
.from(ProjectsPuEntity, 'ppus')
.where('ppus.project_id = :projectId', { projectId })
.execute();

const projectPuIdByPuid: Record<number, string> = {};
projectPus.forEach(({ puid, id }) => {
projectPuIdByPuid[puid] = id;
});

return projectPuIdByPuid;
}
}

0 comments on commit a7b1036

Please sign in to comment.