diff --git a/.github/workflows/pull-request.yml b/.github/workflows/pull-request.yml index 91c9a9168..46fa9cfd6 100644 --- a/.github/workflows/pull-request.yml +++ b/.github/workflows/pull-request.yml @@ -1,7 +1,6 @@ name: pull-request on: pull_request: - branches: [main, staging, release/**] jobs: lintTest: runs-on: ubuntu-latest diff --git a/app/Console/Commands/ImportTreeSpeciesAssociations.php b/app/Console/Commands/ImportTreeSpeciesAssociations.php new file mode 100644 index 000000000..095f24268 --- /dev/null +++ b/app/Console/Commands/ImportTreeSpeciesAssociations.php @@ -0,0 +1,103 @@ +executeAbortableScript(function () { + $process = new Process(['wc', '-l', $this->argument('file')]); + $process->run(); + $this->assert($process->isSuccessful(), "WC failed {$process->getErrorOutput()}"); + + $lines = ((int)explode(' ', $process->getOutput())[0]) - 1; + + $fileHandle = fopen($this->argument('file'), 'r'); + $this->parseHeaders(fgetcsv($fileHandle)); + + $this->withProgressBar($lines, function ($progressBar) use ($fileHandle) { + $abortExceptions = []; + while ($csvRow = fgetcsv($fileHandle)) { + $treeSpeciesUuid = $csvRow[$this->treeSpeciesUuidColumn]; + $taxonId = $csvRow[$this->taxonIdColumn]; + + if ($taxonId != 'NA') { + try { + $research = TreeSpeciesResearch::find($taxonId); + $this->assert($research != null, "Taxon ID not found: $taxonId", ExceptionLevel::Warning); + + TreeSpecies::isUuid($treeSpeciesUuid)->update([ + 'taxon_id' => $taxonId, + 'name' => $research->scientific_name, + ]); + } catch (AbortException $e) { + $abortExceptions[] = $e; + } + } + + $progressBar->advance(); + } + + $progressBar->finish(); + + if (! empty($abortExceptions)) { + $this->warn("Errors and warnings encountered during parsing CSV Rows:\n"); + foreach ($abortExceptions as $error) { + $this->logException($error); + } + } + }); + + fclose($fileHandle); + }); + } + + protected function parseHeaders(array $headerRow): void + { + foreach ($headerRow as $index => $header) { + $header = trim($header, "\xEF\xBB\xBF\""); + if ($header == 'tree_species_uuid') { + $this->treeSpeciesUuidColumn = $index; + } elseif ($header == 'taxon_id') { + $this->taxonIdColumn = $index; + } + } + + $this->assert( + is_numeric($this->treeSpeciesUuidColumn) && is_numeric($this->taxonIdColumn), + 'Not all required columns were found' + ); + } +} diff --git a/app/Console/Commands/OneOff/AssociateExactMatchTrees.php b/app/Console/Commands/OneOff/AssociateExactMatchTrees.php new file mode 100644 index 000000000..71e92d71b --- /dev/null +++ b/app/Console/Commands/OneOff/AssociateExactMatchTrees.php @@ -0,0 +1,43 @@ +join('tree_species_research', 'v2_tree_species.name', '=', 'tree_species_research.scientific_name') + ->where('v2_tree_species.taxon_id', null); + $this->withProgressBar((clone $query)->count(), function ($progressBar) use ($query) { + $query->chunkById(100, function ($trees) use ($progressBar) { + foreach ($trees as $tree) { + TreeSpecies::where('id', $tree->id)->update(['taxon_id' => $tree->taxon_id]); + $progressBar->advance(); + } + }); + }); + }); + } +} diff --git a/app/Console/Commands/OneOff/PopulateTreeSpeciesResearch.php b/app/Console/Commands/OneOff/PopulateTreeSpeciesResearch.php new file mode 100644 index 000000000..a05253fba --- /dev/null +++ b/app/Console/Commands/OneOff/PopulateTreeSpeciesResearch.php @@ -0,0 +1,132 @@ + 'taxon_id', + 'scientificName' => 'scientific_name', + 'family' => 'family', + 'genus' => 'genus', + 'specificEpithet' => 'specific_epithet', + 'infraspecificEpithet' => 'infraspecific_epithet', + ]; + + // Populated by parseHeaders(), a mapping of DB colum name to the index in each row where that data is expected to + // exist + protected $columns = []; + + /** + * Execute the console command. + */ + public function handle() + { + $this->executeAbortableScript(function () { + $process = new Process(['wc', '-l', $this->argument('file')]); + $process->run(); + $this->assert($process->isSuccessful(), "WC failed {$process->getErrorOutput()}"); + + $lines = ((int)explode(' ', $process->getOutput())[0]) - 1; + + $fileHandle = fopen($this->argument('file'), 'r'); + $this->parseHeaders(fgetcsv($fileHandle)); + + $this->withProgressBar($lines, function ($progressBar) use ($fileHandle) { + $abortExceptions = []; + $bulkInsert = []; + while ($csvRow = fgetcsv($fileHandle)) { + $data = []; + foreach ($this->columns as $column => $index) { + $data[$column] = $csvRow[$index] == 'NA' ? null : $csvRow[$index]; + } + + // These don't get set automatically with bulk insert + $now = Carbon::now(); + $data['created_at'] = $now; + $data['updated_at'] = $now; + + try { + $existing = TreeSpeciesResearch::where('scientific_name', $data['scientific_name'])->first(); + $this->assert( + $existing == null, + 'Scientific name already exists, skipping: ' . json_encode([ + 'existing_id' => $existing?->taxon_id, + 'new_id' => $data['taxon_id'], + 'scientific_name' => $data['scientific_name'], + 'infraspecific_epithet' => $data['infraspecific_epithet'], + ], JSON_PRETTY_PRINT), + ExceptionLevel::Warning + ); + + $bulkInsert[] = $data; + if (count($bulkInsert) >= 1000) { + TreeSpeciesResearch::insert($bulkInsert); + $bulkInsert = []; + } + } catch (AbortException $e) { + $abortExceptions[] = $e; + } + $progressBar->advance(); + } + + $progressBar->finish(); + + if (! empty($abortExceptions)) { + $this->warn("Errors and warnings encountered during parsing CSV Rows:\n"); + foreach ($abortExceptions as $error) { + $this->logException($error); + } + } + }); + + fclose($fileHandle); + }); + } + + /** + * @throws AbortException + */ + protected function parseHeaders(array $headerRow): void + { + foreach ($headerRow as $index => $header) { + // Excel puts some garbage at the beginning of the file that we need to filter out. + $header = trim($header, "\xEF\xBB\xBF\""); + + if (array_key_exists($header, self::COLUMN_MAPPING)) { + $this->columns[self::COLUMN_MAPPING[$header]] = $index; + } + } + + $this->assert( + count(self::COLUMN_MAPPING) === count($this->columns), + 'Not all required columns were found' + ); + } +} diff --git a/app/Console/Commands/OneOff/UpdateTreeCollections.php b/app/Console/Commands/OneOff/UpdateTreeCollections.php new file mode 100644 index 000000000..d97c206db --- /dev/null +++ b/app/Console/Commands/OneOff/UpdateTreeCollections.php @@ -0,0 +1,83 @@ +info('Updating collections in v2_tree_species'); + TreeSpecies::withoutTimestamps(function () { + TreeSpecies::withTrashed()->where('speciesable_type', ProjectPitch::class) + ->update(['collection' => TreeSpecies::COLLECTION_PLANTED]); + TreeSpecies::withTrashed()->where('speciesable_type', Project::class)->where('collection', 'primary') + ->update(['collection' => TreeSpecies::COLLECTION_PLANTED]); + TreeSpecies::withTrashed()->where('speciesable_type', Organisation::class) + ->update(['collection' => TreeSpecies::COLLECTION_HISTORICAL]); + TreeSpecies::withTrashed()->where('speciesable_type', SiteReport::class)->where('collection', null) + ->update(['collection' => TreeSpecies::COLLECTION_NON_TREE]); + }); + + $this->info('Updating collections in v2_update_requests content'); + // This is kind of a hassle; fortunately, the only model type above that has bad data embedded in update requests + // is Project + UpdateRequest::withoutTimestamps(function () { + $updateRequests = UpdateRequest::where('updaterequestable_type', Project::class) + ->where('content', 'LIKE', '%"collection":"primary"%') + ->get(); + foreach ($updateRequests as $updateRequest) { + $content = $updateRequest->content; + foreach (array_keys($content) as $key) { + $collections = data_get($content, "$key.*.collection"); + if (is_array($collections) && in_array('primary', $collections)) { + data_set($content, "$key.*.collection", TreeSpecies::COLLECTION_PLANTED); + } + } + + $updateRequest->update(['content' => $content]); + } + }); + + $this->info('Updating form fields'); + FormQuestion::withoutTimestamps(function () { + $relationSets = data_get(config('wri.linked-fields.models'), '*.relations'); + foreach ($relationSets as $relations) { + foreach ($relations as $linkedFieldKey => $properties) { + if ($properties['input_type'] != 'treeSpecies') { + continue; + } + + FormQuestion::withTrashed() + ->where('linked_field_key', $linkedFieldKey) + ->update(['collection' => $properties['collection']]); + } + } + }); + } +} diff --git a/app/Console/Commands/RecalculatePolygonAreas.php b/app/Console/Commands/RecalculatePolygonAreas.php new file mode 100644 index 000000000..508c3f879 --- /dev/null +++ b/app/Console/Commands/RecalculatePolygonAreas.php @@ -0,0 +1,94 @@ +option('only-active')) { + $query->active(); + } + + $sitePolygons = $query->cursor(); + + $processedCount = 0; + $errorCount = 0; + + $this->info('Starting polygon area recalculation...'); + $progressBar = $this->output->createProgressBar(); + $progressBar->start(); + + foreach ($sitePolygons as $sitePolygon) { + try { + $polygonGeometry = PolygonGeometry::where('uuid', $sitePolygon->poly_id) + ->select('uuid', DB::raw('ST_AsGeoJSON(geom) AS geojsonGeometry')) + ->first(); + if (! $polygonGeometry) { + $this->error("No geometry found for poly_id: {$sitePolygon->poly_id}"); + $errorCount++; + + continue; + } + $geometry = json_decode($polygonGeometry->geojsonGeometry, true); + + $calculatedArea = $areaService->getArea($geometry); + + $sitePolygon->calc_area = $calculatedArea; + $sitePolygon->save(); + + $processedCount++; + $progressBar->advance(); + } catch (\Exception $e) { + $this->error("Error processing polygon {$sitePolygon->id}: " . $e->getMessage()); + $errorCount++; + } + } + + DB::commit(); + + $progressBar->finish(); + $this->info("\n\nRecalculation complete!"); + $this->info("Processed: {$processedCount} polygons"); + $this->info("Errors: {$errorCount}"); + + } catch (\Exception $e) { + DB::rollBack(); + $this->error('Recalculation failed: ' . $e->getMessage()); + + return self::FAILURE; + } + + return self::SUCCESS; + } +} diff --git a/app/Console/Commands/UpdateValuesForIndicatorsCommand.php b/app/Console/Commands/UpdateValuesForIndicatorsCommand.php new file mode 100644 index 000000000..897ac3a37 --- /dev/null +++ b/app/Console/Commands/UpdateValuesForIndicatorsCommand.php @@ -0,0 +1,215 @@ + [ + 'sql' => 'SELECT umd_tree_cover_loss__year, SUM(area__ha) FROM results GROUP BY umd_tree_cover_loss__year', + 'query_url' => '/dataset/umd_tree_cover_loss/latest/query', + 'indicator' => 'umd_tree_cover_loss', + 'model' => IndicatorTreeCoverLoss::class, + 'table_name' => 'indicator_output_tree_cover_loss', + ], + 'treeCoverLossFires' => [ + 'sql' => 'SELECT umd_tree_cover_loss_from_fires__year, SUM(area__ha) FROM results GROUP BY umd_tree_cover_loss_from_fires__year', + 'query_url' => '/dataset/umd_tree_cover_loss_from_fires/latest/query', + 'indicator' => 'umd_tree_cover_loss_from_fires', + 'model' => IndicatorTreeCoverLoss::class, + 'table_name' => 'indicator_output_tree_cover_loss', + ], + 'restorationByEcoRegion' => [ + 'indicator' => 'wwf_terrestrial_ecoregions', + 'model' => IndicatorHectares::class, + 'table_name' => 'indicator_output_hectares', + ], + 'restorationByStrategy' => [ + 'indicator' => 'restoration_practice', + 'model' => IndicatorHectares::class, + 'table_name' => 'indicator_output_hectares', + ], + 'restorationByLandUse' => [ + 'indicator' => 'target_system', + 'model' => IndicatorHectares::class, + 'table_name' => 'indicator_output_hectares', + ], + ]; + + foreach ($slugMappings as $slug => $slugMapping) { + $uuids = []; + $processedCount = 0; + $errorCount = 0; + $this->info('Processing ' . $slug . '...'); + $progressBar = $this->output->createProgressBar(); + $progressBar->start(); + $data = $slugMapping['model']::with('sitePolygon') + ->where('indicator_slug', $slug) + ->select('id', 'site_polygon_id')->get(); + $uuids = $data->map(function ($item) { + return $item->sitePolygon ? $item->sitePolygon->poly_id : null; + })->filter()->toArray(); + + foreach ($uuids as $uuid) { + try { + $polygonGeometry = $this->getGeometry($uuid); + $registerExist = DB::table($slugMappings[$slug]['table_name'].' as i') + ->where('i.site_polygon_id', $polygonGeometry['site_polygon_id']) + ->where('i.indicator_slug', $slug) + ->where('i.year_of_analysis', Carbon::now()->year) + ->exists(); + + if (! $registerExist) { + continue; + } + + if (str_contains($slug, 'restorationBy')) { + $geojson = GeometryHelper::getPolygonGeojson($uuid); + $indicatorRestorationResponse = App::make(PythonService::class)->IndicatorPolygon($geojson, $slugMappings[$slug]['indicator'], getenv('GFW_SECRET_KEY')); + + if ($slug == 'restorationByEcoRegion') { + $value = json_encode($indicatorRestorationResponse['area'][$slugMappings[$slug]['indicator']]); + } else { + $value = $this->formatKeysValues($indicatorRestorationResponse['area'][$slugMappings[$slug]['indicator']]); + } + $data = [ + 'value' => $value, + ]; + $slugMappings[$slug]['model']::where('site_polygon_id', $polygonGeometry['site_polygon_id']) + ->where('indicator_slug', $slug) + ->where('year_of_analysis', Carbon::now()->year) + ->update($data); + + $processedCount++; + $progressBar->advance(); + + continue; + } + + $response = $this->sendApiRequestIndicator(getenv('GFW_SECRET_KEY'), $slugMappings[$slug]['query_url'], $slugMappings[$slug]['sql'], $polygonGeometry['geo']); + if (str_contains($slug, 'treeCoverLoss')) { + $processedTreeCoverLossValue = $this->processTreeCoverLossValue($response->json()['data'], $slugMappings[$slug]['indicator']); + } + + if ($response->successful()) { + if (str_contains($slug, 'treeCoverLoss')) { + $data = $this->generateTreeCoverLossData($processedTreeCoverLossValue); + } else { + $data = [ + 'value' => json_encode($response->json()['data']), + ]; + } + + $slugMappings[$slug]['model']::where('site_polygon_id', $polygonGeometry['site_polygon_id']) + ->where('indicator_slug', $slug) + ->where('year_of_analysis', Carbon::now()->year) + ->update($data); + $processedCount++; + $progressBar->advance(); + } else { + Log::error('A problem occurred during the analysis of the geometry for the polygon: ' . $uuid); + } + } catch (\Exception $e) { + Log::error('Error in the analysis: ' . $e->getMessage()); + $errorCount++; + } + } + $progressBar->finish(); + $this->info("\n\n{$slug} updated successfully."); + $this->info("Processed: {$processedCount} polygons"); + $this->info("Errors: {$errorCount}"); + } + + return 0; + } + + public function getGeometry($polygonUuid) + { + $geojson = GeometryHelper::getMonitoredPolygonsGeojson($polygonUuid); + $geoJsonObject = json_decode($geojson['geometry']->geojsonGeometry, true); + + return [ + 'geo' => [ + 'type' => 'Polygon', + 'coordinates' => $geoJsonObject['coordinates'], + ], + 'site_polygon_id' => $geojson['site_polygon_id'], + ]; + } + + public function sendApiRequestIndicator($secret_key, $query_url, $query_sql, $geometry) + { + return Http::withHeaders([ + 'content-type' => 'application/json', + 'x-api-key' => $secret_key, + ])->post('https://data-api.globalforestwatch.org' . $query_url, [ + 'sql' => $query_sql, + 'geometry' => $geometry, + ]); + } + + public function processTreeCoverLossValue($data, $indicator) + { + $processedTreeCoverLossValue = []; + foreach ($data as $i) { + $processedTreeCoverLossValue[$i[$indicator . '__year']] = $i['area__ha']; + } + + return $processedTreeCoverLossValue; + } + + public function generateTreeCoverLossData($processedTreeCoverLossValue) + { + $yearsOfAnalysis = [2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023, 2024]; + $responseData = []; + foreach ($yearsOfAnalysis as $year) { + if (isset($processedTreeCoverLossValue[$year])) { + $responseData[$year] = $processedTreeCoverLossValue[$year]; + } else { + $responseData[$year] = 0.0; + } + } + + return [ + 'value' => json_encode($responseData), + ]; + } + + public function formatKeysValues($data) + { + $formattedData = []; + foreach ($data as $key => $value) { + $formattedKey = strtolower(str_replace(' ', '-', $key)); + $formattedData[$formattedKey] = $value; + } + + return json_encode($formattedData); + } +} diff --git a/app/Exports/V2/OrganisationsExport.php b/app/Exports/V2/OrganisationsExport.php index ea5f44982..ac79004ad 100644 --- a/app/Exports/V2/OrganisationsExport.php +++ b/app/Exports/V2/OrganisationsExport.php @@ -181,9 +181,9 @@ private function addFileCollectionHeadings(array $headings): array private function buildTreeSpecies(Organisation $organisation): string { $list = []; - $treeSpecies = $organisation->treeSpecies()->select('name', 'amount')->get(); - foreach ($treeSpecies as $treeSpecies) { - $list[] = $treeSpecies->name . '(' . $treeSpecies->amount . ')'; + $treeSpecies = $organisation->treeSpeciesHistorical()->select('name', 'amount')->get(); + foreach ($treeSpecies as $instance) { + $list[] = $instance->name . '(' . $instance->amount . ')'; } return '[ ' . implode(',', $list) . ' ]'; diff --git a/app/Helpers/GeometryHelper.php b/app/Helpers/GeometryHelper.php index c0ca8e1e1..a105bbf35 100755 --- a/app/Helpers/GeometryHelper.php +++ b/app/Helpers/GeometryHelper.php @@ -369,4 +369,41 @@ public static function getSitePolygonsOfPolygons(array $polygonUuids) { return SitePolygon::whereIn('poly_id', $polygonUuids)->where('is_active', true)->get()->pluck('uuid'); } + + public static function getMonitoredPolygonsGeojson($polygonUuid) + { + $polygonGeometry = PolygonGeometry::where('uuid', $polygonUuid) + ->select('uuid', DB::raw('ST_AsGeoJSON(geom) AS geojsonGeometry')) + ->first(); + + return [ + 'geometry' => $polygonGeometry, + 'site_polygon_id' => $polygonGeometry->sitePolygon->id, + ]; + } + + public static function getPolygonGeojson($uuid) + { + $polygonGeometry = PolygonGeometry::where('uuid', $uuid) + ->select('uuid', DB::raw('ST_AsGeoJSON(geom) AS geojsonGeometry')) + ->first(); + $geometry = json_decode($polygonGeometry->geojsonGeometry, true); + $polygonData = $polygonGeometry->sitePolygon; + + return [ + 'type' => 'Feature', + 'properties' => [ + 'poly_id' => $polygonData->poly_id, + 'poly_name' => $polygonData->poly_name ?? '', + 'plantstart' => $polygonData->plantstart ?? '', + 'plantend' => $polygonData->plantend ?? '', + 'practice' => $polygonData->practice ?? '', + 'target_sys' => $polygonData->target_sys ?? '', + 'distr' => $polygonData->distr ?? '', + 'num_trees' => $polygonData->num_trees ?? '', + 'site_id' => $polygonData->site_id ?? '', + ], + 'geometry' => $geometry, + ]; + } } diff --git a/app/Helpers/PolygonGeometryHelper.php b/app/Helpers/PolygonGeometryHelper.php index 44c5085e3..7f6bc28d8 100644 --- a/app/Helpers/PolygonGeometryHelper.php +++ b/app/Helpers/PolygonGeometryHelper.php @@ -5,7 +5,7 @@ use App\Models\V2\Projects\Project; use App\Models\V2\Sites\Site; use App\Models\V2\Sites\SitePolygon; -use Illuminate\Support\Facades\DB; +use App\Services\AreaCalculationService; use Illuminate\Support\Facades\Log; class PolygonGeometryHelper @@ -16,12 +16,8 @@ public static function updateEstAreainSitePolygon($polygonGeometry, $geometry) $sitePolygon = SitePolygon::where('poly_id', $polygonGeometry->uuid)->first(); if ($sitePolygon) { - $geojson = json_encode($geometry); - $areaSqDegrees = DB::selectOne("SELECT ST_Area(ST_GeomFromGeoJSON('$geojson')) AS area")->area; - $latitude = DB::selectOne("SELECT ST_Y(ST_Centroid(ST_GeomFromGeoJSON('$geojson'))) AS latitude")->latitude; - $unitLatitude = 111320; - $areaSqMeters = $areaSqDegrees * pow($unitLatitude * cos(deg2rad($latitude)), 2); - $areaHectares = $areaSqMeters / 10000; + $areaCalculationService = app(AreaCalculationService::class); + $areaHectares = $areaCalculationService->getArea((array) $geometry->geometry); $sitePolygon->calc_area = $areaHectares; $sitePolygon->save(); diff --git a/app/Helpers/RestorationByEcoregionHelper.php b/app/Helpers/RestorationByEcoregionHelper.php new file mode 100755 index 000000000..26b25e9b1 --- /dev/null +++ b/app/Helpers/RestorationByEcoregionHelper.php @@ -0,0 +1,107 @@ + [ + 'Southeast Australia temperate forests', + 'Tocantins/Pindare moist forests', + 'Tapajós-Xingu moist forests', + 'Mato Grosso seasonal forests', + 'Mato Grosso seasonal forests, Xingu-Tocantins-Araguaia moist forests', + 'Bahia coastal forests', + 'Southern Miombo woodlands', + 'Palawan rain forests', + ], + 'afrotropical' => [ + 'Atlantic mixed forests', + 'Northern Acacia-Commiphora bushlands and thickets', + 'Southern Rift montane forest-grassland mosaic', + 'Sierra Madre de Chiapas moist forests', + 'Iberian sclerophyllous and semi-deciduous forests', + 'Northwest Iberian montane forests', + 'Northwestern Congolian lowland forests', + 'Albertine Rift montane forests', + 'Sahelian Acacia savanna', + 'Northern Congolian forest-savanna mosaic', + 'Nigerian lowland forests', + 'West Sudanian savanna', + 'Northern Congolian forest-savanna mosaic, Northwestern Congolian lowland forests', + 'Eastern Guinean forests', + 'Victoria Basin forest-savanna mosaic', + 'Guinean forest-savanna mosaic', + 'East Sudanian savanna', + 'Central Zambezian Miombo woodlands', + 'Ethiopian montane grasslands and woodlands', + 'Central African mangroves', + 'Southern Acacia-Commiphora bushlands and thickets', + 'East African montane forests', + 'Eastern Arc forests', + 'Guinean mangroves', + 'Eastern Zimbabwe montane forest-grassland mosaic', + 'Somali Acacia-Commiphora bushlands and thickets', + 'Ethiopian montane forests', + 'Inner Niger Delta flooded savanna', + 'Western Guinean lowland forests', + 'Eastern Miombo woodlands', + 'Ethiopian montane forests, Ethiopian montane grasslands and woodlands', + 'Cross-Sanaga-Bioko coastal forests', + 'Zambezian and Mopane woodlands', + 'Madagascar lowland forests', + 'Madagascar subhumid forests', + 'Southern Congolian forest-savanna mosaic', + 'East African montane forests', + 'East African montane forests, Northern Acacia-Commiphora bushlands and thickets', + 'Albertine Rift montane forests, Lake', + ], + 'paleartic' => [ + 'Southwest Iberian Mediterranean sclerophyllous and mixed forests', + 'Narmada Valley dry deciduous forests', + 'East African montane moorlands', + 'Cameroonian Highlands forests', + 'Celtic broadleaf forests', + 'Atlantic Coast restingas', + ], + 'neotropical' => [ + 'Sinú Valley dry forests', + 'Santa Marta montane forests', + 'Petén-Veracruz moist forests', + 'Central American Atlantic moist forests', + 'Petén-Veracruz moist forests, Central American Atlantic moist forests', + 'Central American montane forests', + 'Central American Atlantic moist forests, Central American montane forests', + 'Cross-Niger transition forests', + 'Atlantic Coast restingas', + ], + + ]; + $formatedValue = []; + foreach ($categoriesFromEcoRegion as $category => $values) { + $formatedValue[$category] = 0; + foreach ($value as $key => $val) { + if (in_array($key, $values)) { + $formatedValue[$category] = round((float) $val, 3); + + break; + } + } + } + + $result = array_filter($formatedValue, function ($val) { + return $val !== 0; + }); + + if (empty($result)) { + return $result; + } + if ($isExport) { + return $result; + } else { + return ['data' => $result]; + } + } +} diff --git a/app/Http/Controllers/V2/MonitoredData/GetIndicatorPolygonStatusController.php b/app/Http/Controllers/V2/MonitoredData/GetIndicatorPolygonStatusController.php new file mode 100644 index 000000000..750ca56c4 --- /dev/null +++ b/app/Http/Controllers/V2/MonitoredData/GetIndicatorPolygonStatusController.php @@ -0,0 +1,51 @@ +where('uuid', $entity->uuid); + } elseif (get_class($entity) == Project::class) { + $query->where('project_id', $entity->project->id); + } + }) + ->select([ + 'id', + 'status', + 'is_active', + ]) + ->where('is_active', 1) + ->get() + ->groupBy('status') + ->map(function ($group) { + return $group->count(); + }); + $statuses = ['draft', 'submitted', 'needs-more-information', 'approved']; + $statusesByCount = []; + + foreach ($statuses as $status) { + if (! isset($sitePolygonGroupByStatus[$status])) { + $statusesByCount[$status] = 0; + } else { + $statusesByCount[$status] = $sitePolygonGroupByStatus[$status]; + } + } + + return response()->json($statusesByCount); + } catch (\Exception $e) { + Log::info($e); + } + } +} diff --git a/app/Http/Controllers/V2/MonitoredData/GetPolygonsIndicatorAnalysisController.php b/app/Http/Controllers/V2/MonitoredData/GetPolygonsIndicatorAnalysisController.php new file mode 100644 index 000000000..73961c480 --- /dev/null +++ b/app/Http/Controllers/V2/MonitoredData/GetPolygonsIndicatorAnalysisController.php @@ -0,0 +1,132 @@ + [ + 'relation_name' => 'treeCoverLossIndicator', + 'extra_columns' => '', + ], + 'treeCoverLossFires' => [ + 'relation_name' => 'treeCoverLossIndicator', + ], + 'restorationByStrategy' => [ + 'relation_name' => 'hectaresIndicator', + ], + 'restorationByLandUse' => [ + 'relation_name' => 'hectaresIndicator', + ], + 'restorationByEcoRegion' => [ + 'relation_name' => 'hectaresIndicator', + ], + ]; + if (! isset($slugMappings[$slug])) { + return response()->json([]); + } + + try { + return SitePolygon::whereHas($slugMappings[$slug]['relation_name'], function ($query) use ($slug) { + $query->where('indicator_slug', $slug) + ->where('year_of_analysis', date('Y')); + }) + ->whereHas('site', function ($query) use ($entity) { + if (get_class($entity) == Site::class) { + $query->where('uuid', $entity->uuid); + } elseif (get_class($entity) == Project::class) { + $query->where('project_id', $entity->project->id); + } + }) + ->select([ + 'id', + 'poly_name', + 'status', + 'plantstart', + 'site_id', + 'is_active', + 'poly_id', + 'calc_area', + ]) + ->where('is_active', 1) + ->get() + ->map(function ($polygon) use ($slugMappings, $slug) { + $indicator = $polygon->{$slugMappings[$slug]['relation_name']}() + ->where('indicator_slug', $slug) + ->select([ + 'indicator_slug', + 'year_of_analysis', + 'value', + 'created_at', + ]) + ->first(); + $results = [ + 'id' => $polygon->id, + 'poly_name' => $polygon->poly_name ?? '-', + 'poly_id' => $polygon->poly_id, + 'site_id' => $polygon->site_id, + 'status' => $polygon->status, + 'plantstart' => $polygon->plantstart ?? '-', + 'site_name' => $polygon->site->name ?? '-', + 'size' => round($polygon->calc_area ?? 0, 1), + 'indicator_slug' => $indicator->indicator_slug, + 'year_of_analysis' => $indicator->year_of_analysis, + 'created_at' => $indicator->created_at, + 'base_line' => $indicator->created_at, + 'data' => [], + ]; + if (str_contains($slug, 'treeCoverLoss')) { + $valueYears = json_decode($indicator->value, true); + $results['data']['2015'] = round((float) $valueYears['2015'], 1); + $results['data']['2016'] = round((float) $valueYears['2016'], 1); + $results['data']['2017'] = round((float) $valueYears['2017'], 1); + $results['data']['2018'] = round((float) $valueYears['2018'], 1); + $results['data']['2019'] = round((float) $valueYears['2019'], 1); + $results['data']['2020'] = round((float) $valueYears['2020'], 1); + $results['data']['2021'] = round((float) $valueYears['2021'], 1); + $results['data']['2022'] = round((float) $valueYears['2022'], 1); + $results['data']['2023'] = round((float) $valueYears['2023'], 1); + $results['data']['2024'] = round((float) $valueYears['2024'], 1); + } + + if ($slug == 'restorationByEcoRegion') { + $values = json_decode($indicator->value, true); + $results = array_merge($results, RestorationByEcoregionHelper::getCategoryEcoRegion($values)); + } + + if ($slug == 'restorationByLandUse' || $slug == 'restorationByStrategy') { + $values = json_decode($indicator->value, true); + $results = array_merge($results, $this->processValuesHectares($values)); + } + + return $results; + }); + } catch (\Exception $e) { + Log::info($e); + } + } + + public function processValuesHectares($values) + { + $separateKeys = []; + foreach ($values as $key => $value) { + $array = explode(',', str_replace('-', '_', $key)); + $arrayTrim = array_map('trim', $array); + foreach ($arrayTrim as $item) { + $separateKeys[$item] = round((float) $value, 1); + } + } + + return ['data' => $separateKeys]; + } +} diff --git a/app/Http/Controllers/V2/MonitoredData/GetPolygonsIndicatorAnalysisVerifyController.php b/app/Http/Controllers/V2/MonitoredData/GetPolygonsIndicatorAnalysisVerifyController.php new file mode 100644 index 000000000..bc5f0cf92 --- /dev/null +++ b/app/Http/Controllers/V2/MonitoredData/GetPolygonsIndicatorAnalysisVerifyController.php @@ -0,0 +1,76 @@ + [ + 'relation_name' => 'treeCoverLossIndicator', + 'indicator_title' => 'Tree Cover Loss', + ], + 'treeCoverLossFires' => [ + 'relation_name' => 'treeCoverLossIndicator', + 'indicator_title' => 'Tree Cover Loss from Fire', + ], + 'restorationByStrategy' => [ + 'relation_name' => 'hectaresIndicator', + 'indicator_title' => 'Hectares Under Restoration By Strategy', + ], + 'restorationByLandUse' => [ + 'relation_name' => 'hectaresIndicator', + 'indicator_title' => 'Hectares Under Restoration By Target Land Use System', + ], + 'restorationByEcoRegion' => [ + 'relation_name' => 'hectaresIndicator', + 'indicator_title' => 'Hectares Under Restoration By WWF EcoRegion', + ], + ]; + if (! isset($slugMappings[$slug])) { + return response()->json([]); + } + + try { + $polygonUuids = SitePolygon::whereHas('site', function ($query) use ($entity) { + if (get_class($entity) == Site::class) { + $query->where('uuid', $entity->uuid); + } elseif (get_class($entity) == Project::class) { + $query->where('project_id', $entity->project->id); + } + }) + ->select(['id', 'poly_id', 'is_active']) + ->where('is_active', 1) + ->get() + ->map(function ($polygon) use ($slugMappings, $slug) { + $indicator = $polygon->{$slugMappings[$slug]['relation_name']}() + ->where('indicator_slug', $slug) + ->where('year_of_analysis', date('Y')) + ->where('site_polygon_id', $polygon->id) + ->first(); + if (! $indicator) { + return $polygon->poly_id; + } + + return null; + }) + ->filter(); + if ($polygonUuids->isEmpty()) { + return response()->json(['message' => 'All polygons have already been analyzed to ' . $slugMappings[$slug]['indicator_title']], 200); + } else { + return response()->json($polygonUuids); + + } + } catch (\Exception $e) { + Log::info($e); + } + } +} diff --git a/app/Http/Controllers/V2/MonitoredData/IndicatorEntitySlugExportController.php b/app/Http/Controllers/V2/MonitoredData/IndicatorEntitySlugExportController.php new file mode 100644 index 000000000..8c28aba56 --- /dev/null +++ b/app/Http/Controllers/V2/MonitoredData/IndicatorEntitySlugExportController.php @@ -0,0 +1,200 @@ +json(['message' => 'Indicator not found'], 404); + } + + return $this->exportCsv($entity, $slug); + } + + public function exportCsv($entity, $slug) + { + $defaulHeaders = [ + 'poly_name' => 'Polygon Name', + 'size' => 'Size (ha)', + 'site_name' => 'Site Name', + 'status' => 'Status', + 'plantstart' => 'Plant Start Date', + ]; + $treeCoverLossHeaders = [ + ...$defaulHeaders, + '2015' => '2015', + '2016' => '2016', + '2017' => '2017', + '2018' => '2018', + '2019' => '2019', + '2020' => '2020', + '2021' => '2021', + '2022' => '2022', + '2023' => '2023', + '2024' => '2024', + ]; + $restorationByEcoRegionHeaders = [ + ...$defaulHeaders, + 'created_at' => 'Baseline', + 'australasian' => 'Australasian', + 'afrotropical' => 'Afrotropical', + 'palearctic' => 'Palearctic11', + ]; + $restorationByStrategyHeaders = [ + ...$defaulHeaders, + 'created_at' => 'Baseline', + 'tree_planting' => 'Tree Planting', + 'assisted_natural_regeneration' => 'Assisted Natural Regeneration', + 'direct_seeding' => 'Direct Seeding', + ]; + $restorationByLandUseHeaders = [ + ...$defaulHeaders, + 'created_at' => 'Baseline', + 'agroforest' => 'Agroforest', + 'natural_forest' => 'Natural Forest', + 'mangrove' => 'Mangrove', + ]; + $slugMappings = [ + 'treeCoverLoss' => [ + 'relation_name' => 'treeCoverLossIndicator', + 'columns' => $treeCoverLossHeaders, + 'indicator_title' => 'Tree Cover Loss', + ], + 'treeCoverLossFires' => [ + 'relation_name' => 'treeCoverLossIndicator', + 'columns' => $treeCoverLossHeaders, + 'indicator_title' => 'Tree Cover Loss from Fire', + ], + 'restorationByStrategy' => [ + 'relation_name' => 'hectaresIndicator', + 'columns' => $restorationByStrategyHeaders, + 'indicator_title' => 'Hectares Under Restoration By Strategy', + ], + 'restorationByLandUse' => [ + 'relation_name' => 'hectaresIndicator', + 'columns' => $restorationByLandUseHeaders, + 'indicator_title' => 'Hectares Under Restoration By Target Land Use System', + ], + 'restorationByEcoRegion' => [ + 'relation_name' => 'hectaresIndicator', + 'columns' => $restorationByEcoRegionHeaders, + 'indicator_title' => 'Hectares Under Restoration By WWF EcoRegion', + ], + ]; + + $sitePolygonsIndicator = SitePolygon::whereHas($slugMappings[$slug]['relation_name'], function ($query) use ($slug) { + $query->where('indicator_slug', $slug) + ->where('year_of_analysis', date('Y')); + }) + ->whereHas('site', function ($query) use ($entity) { + if (get_class($entity) == Site::class) { + $query->where('uuid', $entity->uuid); + } elseif (get_class($entity) == Project::class) { + $query->where('project_id', $entity->project->id); + } + }) + ->select([ + 'id', + 'poly_name', + 'status', + 'plantstart', + 'site_id', + 'is_active', + 'poly_id', + 'calc_area', + ]) + ->where('is_active', 1) + ->get() + ->map(function ($polygon) use ($slugMappings, $slug) { + $indicator = $polygon->{$slugMappings[$slug]['relation_name']}() + ->where('indicator_slug', $slug) + ->select([ + 'indicator_slug', + 'year_of_analysis', + 'value', + 'created_at', + ]) + ->first(); + $results = [ + 'poly_name' => $polygon->poly_name, + 'status' => $polygon->status, + 'plantstart' => $polygon->plantstart, + 'site_name' => $polygon->site->name ?? '', + 'size' => $polygon->calc_area ?? 0, + 'created_at' => $indicator->created_at, + ]; + if (str_contains($slug, 'treeCoverLoss')) { + $valueYears = json_decode($indicator->value, true); + $results['2015'] = $valueYears['2015']; + $results['2016'] = $valueYears['2016']; + $results['2017'] = (float) $valueYears['2017']; + $results['2018'] = $valueYears['2018']; + $results['2019'] = $valueYears['2019']; + $results['2020'] = $valueYears['2020']; + $results['2021'] = $valueYears['2021']; + $results['2022'] = $valueYears['2022']; + $results['2023'] = $valueYears['2023']; + $results['2024'] = $valueYears['2024']; + } + if ($slug == 'restorationByEcoRegion') { + $values = json_decode($indicator->value, true); + $results = array_merge($results, RestorationByEcoregionHelper::getCategoryEcoRegion($values, true)); + } + if ($slug == 'restorationByLandUse' || $slug == 'restorationByStrategy') { + $values = json_decode($indicator->value, true); + $results = array_merge($results, $this->processValuesHectares($values)); + } + + return $results; + }); + + $filteredIndicators = []; + foreach ($sitePolygonsIndicator as $polygon) { + $filteredIndicator = []; + foreach ($slugMappings[$slug]['columns'] as $key => $label) { + $filteredIndicator[$key] = $polygon[$key] ?? ''; + } + $filteredIndicators[] = $filteredIndicator; + } + + $csv = Writer::createFromString(''); + + $csv->insertOne(array_values($slugMappings[$slug]['columns'])); + + foreach ($filteredIndicators as $filteredIndicator) { + $csv->insertOne(array_values($filteredIndicator)); + } + + $csvContent = $csv->toString(); + + return response($csvContent, 200, [ + 'Content-Type' => 'text/csv', + 'Content-Disposition' => 'attachment; filename=indicator' . $slugMappings[$slug]['indicator_title'] . '.csv', + ]); + + } + + public function processValuesHectares($values) + { + $separateKeys = []; + foreach ($values as $key => $value) { + $array = explode(',', str_replace('-', '_', $key)); + $arrayTrim = array_map('trim', $array); + foreach ($arrayTrim as $item) { + $separateKeys[$item] = round((float) $value, 3); + } + } + + return $separateKeys; + } +} diff --git a/app/Http/Controllers/V2/MonitoredData/RunIndicatorAnalysisController.php b/app/Http/Controllers/V2/MonitoredData/RunIndicatorAnalysisController.php new file mode 100644 index 000000000..08307b034 --- /dev/null +++ b/app/Http/Controllers/V2/MonitoredData/RunIndicatorAnalysisController.php @@ -0,0 +1,33 @@ +all(); + $delayedJob = DelayedJob::create(); + $job = new RunIndicatorAnalysisJob( + $delayedJob->id, + $requestData, + $slug + ); + dispatch($job); + + return (new DelayedJobResource($delayedJob))->additional(['message' => 'Analysis for '.$slug.' is being processed']); + } catch (\Exception $e) { + Log::error('Error during analysis for ' . $slug . ' : ' . $e->getMessage()); + + return response()->json(['error' => 'An error occurred during analysis for ' . $slug], 500); + } + } +} diff --git a/app/Http/Controllers/V2/Projects/CreateProjectWithFormController.php b/app/Http/Controllers/V2/Projects/CreateProjectWithFormController.php index ca6495a3a..186fb3210 100644 --- a/app/Http/Controllers/V2/Projects/CreateProjectWithFormController.php +++ b/app/Http/Controllers/V2/Projects/CreateProjectWithFormController.php @@ -101,7 +101,7 @@ public function __invoke(Request $request): EntityWithSchemaResource foreach ($projectPitch->treeSpecies()->get() as $treeSpecies) { $project->treeSpecies()->create([ - 'collection' => $treeSpecies->collection ?? TreeSpecies::COLLECTION_PRIMARY, + 'collection' => $treeSpecies->collection ?? TreeSpecies::COLLECTION_PLANTED, 'name' => $treeSpecies->name, 'amount' => $treeSpecies->amount, ]); diff --git a/app/Http/Controllers/V2/Terrafund/TerrafundClipGeometryController.php b/app/Http/Controllers/V2/Terrafund/TerrafundClipGeometryController.php index 2e7d5ead2..3408bf0c7 100644 --- a/app/Http/Controllers/V2/Terrafund/TerrafundClipGeometryController.php +++ b/app/Http/Controllers/V2/Terrafund/TerrafundClipGeometryController.php @@ -5,7 +5,7 @@ use App\Helpers\GeometryHelper; use App\Http\Resources\DelayedJobResource; use App\Jobs\FixPolygonOverlapJob; -use App\Models\DelayedJob; +use App\Models\DelayedJobProgress; use App\Models\V2\Sites\CriteriaSite; use App\Models\V2\Sites\Site; use App\Models\V2\Sites\SitePolygon; @@ -24,8 +24,19 @@ public function clipOverlappingPolygonsBySite(string $uuid) ini_set('max_execution_time', self::MAX_EXECUTION_TIME); ini_set('memory_limit', '-1'); $user = Auth::user(); + $site = Site::isUuid($uuid)->first(); $polygonUuids = GeometryHelper::getSitePolygonsUuids($uuid)->toArray(); - $delayedJob = DelayedJob::create(); + $delayedJob = DelayedJobProgress::create([ + 'processed_content' => 0, + 'created_by' => $user->id, + 'metadata' => [ + 'entity_id' => $site->id, + 'entity_type' => get_class($site), + 'entity_name' => $site->name, + ], + 'is_acknowledged' => false, + 'name' => 'Polygon Fix', + ]); $job = new FixPolygonOverlapJob($delayedJob->id, $polygonUuids, $user->id); dispatch($job); @@ -38,11 +49,56 @@ public function clipOverlappingPolygonsOfProjectBySite(string $uuid) ini_set('max_execution_time', self::MAX_EXECUTION_TIME); ini_set('memory_limit', '-1'); $user = Auth::user(); - $sitePolygon = Site::isUuid($uuid)->first(); - $projectId = $sitePolygon->project_id ?? null; + $site = Site::isUuid($uuid)->first(); + $projectId = $site->project_id ?? null; + + if (! $projectId) { + return response()->json(['error' => 'Project not found for the given site UUID.'], 404); + } + $polygonUuids = GeometryHelper::getProjectPolygonsUuids($projectId); - $delayedJob = DelayedJob::create(); - $job = new FixPolygonOverlapJob($delayedJob->id, $polygonUuids, $user->id); + + if (empty($polygonUuids)) { + return response()->json(['message' => 'No polygons found for the project.'], 204); + } + + $allPolygonUuids = []; + foreach ($polygonUuids as $uuid) { + $polygonOverlappingExtraInfo = CriteriaSite::forCriteria(PolygonService::OVERLAPPING_CRITERIA_ID) + ->where('polygon_id', $uuid) + ->first() + ->extra_info ?? null; + + if ($polygonOverlappingExtraInfo) { + $decodedInfo = json_decode($polygonOverlappingExtraInfo, true); + $polygonUuidsOverlapping = array_map(function ($item) { + return $item['poly_uuid'] ?? null; + }, $decodedInfo); + $polygonUuidsFiltered = array_filter($polygonUuidsOverlapping); + + array_unshift($polygonUuidsFiltered, $uuid); + $allPolygonUuids = array_merge($allPolygonUuids, $polygonUuidsFiltered); + } + } + + $uniquePolygonUuids = array_unique($allPolygonUuids); + + if (empty($uniquePolygonUuids)) { + return response()->json(['message' => 'No overlapping polygons found for the project.'], 204); + } + + $delayedJob = DelayedJobProgress::create([ + 'processed_content' => 0, + 'metadata' => [ + 'entity_id' => $site->id, + 'entity_type' => get_class($site), + 'entity_name' => $site->name, + ], + 'created_by' => $user->id, + 'is_acknowledged' => false, + 'name' => 'Polygon Fix', + ]); + $job = new FixPolygonOverlapJob($delayedJob->id, $uniquePolygonUuids, $user->id); dispatch($job); return new DelayedJobResource($delayedJob); @@ -53,6 +109,11 @@ public function clipOverlappingPolygons(Request $request) ini_set('max_execution_time', self::MAX_EXECUTION_TIME); ini_set('memory_limit', '-1'); $uuids = $request->input('uuids'); + $uuid = $request->input('entity_uuid'); + $type = $request->input('entity_type'); + if ($type === 'sites') { + $entity = Site::where('uuid', $uuid)->firstOrFail(); + } Log::info('Clipping polygons', ['uuids' => $uuids]); if (empty($uuids) || ! is_array($uuids)) { return response()->json(['error' => 'Invalid or missing UUIDs'], 400); @@ -89,7 +150,17 @@ public function clipOverlappingPolygons(Request $request) $delayedJob = null; if (! empty($uniquePolygonUuids)) { $user = Auth::user(); - $delayedJob = DelayedJob::create(); + $delayedJob = DelayedJobProgress::create([ + 'processed_content' => 0, + 'metadata' => [ + 'entity_id' => $entity->id, + 'entity_type' => get_class($entity), + 'entity_name' => $entity->name, + ], + 'created_by' => $user->id, + 'is_acknowledged' => false, + 'name' => 'Polygon Fix', + ]); $job = new FixPolygonOverlapJob($delayedJob->id, $polygonUuids, $user->id); dispatch($job); } diff --git a/app/Http/Controllers/V2/Terrafund/TerrafundCreateGeometryController.php b/app/Http/Controllers/V2/Terrafund/TerrafundCreateGeometryController.php index 262687b1c..a3b4bf6d8 100755 --- a/app/Http/Controllers/V2/Terrafund/TerrafundCreateGeometryController.php +++ b/app/Http/Controllers/V2/Terrafund/TerrafundCreateGeometryController.php @@ -8,6 +8,7 @@ use App\Jobs\InsertGeojsonToDBJob; use App\Jobs\RunSitePolygonsValidationJob; use App\Models\DelayedJob; +use App\Models\DelayedJobProgress; use App\Models\V2\PolygonGeometry; use App\Models\V2\Sites\Site; use App\Models\V2\Sites\SitePolygon; @@ -234,9 +235,21 @@ function ($attribute, $value, $fail) { return response()->json($polygonLoaded->original, 200); } + $user = Auth::user(); + $entity = Site::where('uuid', $site_id)->firstOrFail(); + $redis_key = 'kml_file_' . uniqid(); Redis::set($redis_key, $geojsonContent, 'EX', 7200); - $delayedJob = DelayedJob::create(); + $delayedJob = DelayedJob::create([ + 'created_by' => $user->id, + 'metadata' => [ + 'entity_id' => $entity->id, + 'entity_type' => get_class($entity), + 'entity_name' => $entity->name, + ], + 'is_acknowledged' => false, + 'name' => 'Polygon Upload', + ]); $job = new InsertGeojsonToDBJob( $redis_key, @@ -393,10 +406,21 @@ public function uploadShapefile(Request $request) return response()->json($polygonLoaded->original, 200); } + $user = Auth::user(); + $entity = Site::where('uuid', $site_id)->firstOrFail(); $redis_key = 'shapefile_file_' . uniqid(); Redis::set($redis_key, $geojsonContent, 'EX', 7200); - $delayedJob = DelayedJob::create(); + $delayedJob = DelayedJob::create([ + 'created_by' => $user->id, + 'metadata' => [ + 'entity_id' => $entity->id, + 'entity_type' => get_class($entity), + 'entity_name' => $entity->name, + ], + 'is_acknowledged' => false, + 'name' => 'Polygon Upload', + ]); $job = new InsertGeojsonToDBJob( $redis_key, @@ -613,10 +637,21 @@ public function uploadGeoJSONFile(Request $request) return response()->json($polygonLoaded->original, 200); } + $user = Auth::user(); + $entity = Site::where('uuid', $site_id)->firstOrFail(); $redis_key = 'geojson_file_' . uniqid(); Redis::set($redis_key, $geojson_content, 'EX', 7200); - $delayedJob = DelayedJob::create(); + $delayedJob = DelayedJob::create([ + 'created_by' => $user->id, + 'metadata' => [ + 'entity_id' => $entity->id, + 'entity_type' => get_class($entity), + 'entity_name' => $entity->name, + ], + 'is_acknowledged' => false, + 'name' => 'Polygon Upload', + ]); $job = new InsertGeojsonToDBJob( $redis_key, @@ -1218,8 +1253,21 @@ public function runSiteValidationPolygon(Request $request) try { $uuid = $request->input('uuid'); + $user = Auth::user(); + $entity = Site::where('uuid', $uuid)->firstOrFail(); $sitePolygonsUuids = GeometryHelper::getSitePolygonsUuids($uuid)->toArray(); - $delayedJob = DelayedJob::create(); + $delayedJob = DelayedJobProgress::create([ + 'total_content' => count($sitePolygonsUuids), + 'processed_content' => 0, + 'created_by' => $user->id, + 'metadata' => [ + 'entity_id' => $entity->id, + 'entity_type' => get_class($entity), + 'entity_name' => $entity->name, + ], + 'is_acknowledged' => false, + 'name' => 'Polygon Validation', + ]); $job = new RunSitePolygonsValidationJob($delayedJob->id, $sitePolygonsUuids); dispatch($job); @@ -1235,7 +1283,24 @@ public function runPolygonsValidation(Request $request) { try { $uuids = $request->input('uuids'); - $delayedJob = DelayedJob::create(); + $uuid = $request->input('entity_uuid'); + $type = $request->input('entity_type'); + if ($type === 'sites') { + $entity = Site::where('uuid', $uuid)->firstOrFail(); + } + $user = Auth::user(); + $delayedJob = DelayedJobProgress::create([ + 'total_content' => count($uuids), + 'processed_content' => 0, + 'created_by' => $user->id, + 'metadata' => [ + 'entity_id' => $entity->id, + 'entity_type' => get_class($entity), + 'entity_name' => $entity->name, + ], + 'is_acknowledged' => false, + 'name' => 'Polygon Validation', + ]); $job = new RunSitePolygonsValidationJob($delayedJob->id, $uuids); dispatch($job); diff --git a/app/Http/Controllers/V2/Terrafund/TerrafundEditGeometryController.php b/app/Http/Controllers/V2/Terrafund/TerrafundEditGeometryController.php index 26ff7ac17..469efa40b 100644 --- a/app/Http/Controllers/V2/Terrafund/TerrafundEditGeometryController.php +++ b/app/Http/Controllers/V2/Terrafund/TerrafundEditGeometryController.php @@ -9,6 +9,7 @@ use App\Models\V2\Projects\ProjectPolygon; use App\Models\V2\Sites\SitePolygon; use App\Models\V2\User; +use App\Services\AreaCalculationService; use App\Services\PolygonService; use App\Services\SiteService; use Illuminate\Http\Request; @@ -329,10 +330,12 @@ public function createSitePolygon(string $uuid, string $siteUuid, Request $reque if (! $polygonGeometry) { return response()->json(['message' => 'No polygon geometry found for the given UUID.'], 404); } - $areaSqDegrees = DB::selectOne('SELECT ST_Area(geom) AS area FROM polygon_geometry WHERE uuid = :uuid', ['uuid' => $uuid])->area; - $latitude = DB::selectOne('SELECT ST_Y(ST_Centroid(geom)) AS latitude FROM polygon_geometry WHERE uuid = :uuid', ['uuid' => $uuid])->latitude; - $areaSqMeters = $areaSqDegrees * pow(111320 * cos(deg2rad($latitude)), 2); - $areaHectares = $areaSqMeters / 10000; + $polygonGeom = PolygonGeometry::where('uuid', $uuid) + ->select('uuid', DB::raw('ST_AsGeoJSON(geom) AS geojsonGeometry')) + ->first(); + $geometry = json_decode($polygonGeom->geojsonGeometry, true); + $areaCalculationService = app(AreaCalculationService::class); + $areaHectares = $areaCalculationService->getArea($geometry); $sitePolygon = new SitePolygon([ 'poly_name' => $validatedData['poly_name'], 'plantstart' => $validatedData['plantstart'], diff --git a/app/Http/Resources/DelayedJobProgressResource.php b/app/Http/Resources/DelayedJobProgressResource.php new file mode 100644 index 000000000..59c1dccfe --- /dev/null +++ b/app/Http/Resources/DelayedJobProgressResource.php @@ -0,0 +1,25 @@ + + */ + public function toArray(Request $request): array + { + return [ + 'message' => $this->message ?? 'Job dispatched', + 'job_uuid' => $this->uuid, + 'proccessed_content' => $this->processed_content, + 'total_content' => $this->total_content, + 'progress_message' => $this->progress_message, + ]; + } +} diff --git a/app/Http/Resources/V2/Dashboard/ProjectProfileDetailsResource.php b/app/Http/Resources/V2/Dashboard/ProjectProfileDetailsResource.php index b20ca295e..c4e18b813 100644 --- a/app/Http/Resources/V2/Dashboard/ProjectProfileDetailsResource.php +++ b/app/Http/Resources/V2/Dashboard/ProjectProfileDetailsResource.php @@ -2,19 +2,18 @@ namespace App\Http\Resources\V2\Dashboard; +use App\Models\Traits\HasProjectCoverImage; use App\Models\V2\Forms\FormOptionListOption; use Illuminate\Http\Resources\Json\JsonResource; class ProjectProfileDetailsResource extends JsonResource { - /** - * Transform the resource into an array. - * - * @param \Illuminate\Http\Request $request - * @return array - */ + use HasProjectCoverImage; + public function toArray($request) { + $coverImage = $this->getProjectCoverImage($this->resource); + $data = [ 'name' => $this->name, 'descriptionObjetive' => $this->objectives, @@ -26,9 +25,16 @@ public function toArray($request) 'targetLandUse' => $this->land_use_types, 'landTenure' => $this->land_tenure_project_area, 'framework' => $this->framework_key, + 'cover_image' => $coverImage ? [ + 'id' => $coverImage->id, + 'url' => $coverImage->getUrl(), + 'thumbnail' => $coverImage->getUrl('thumbnail'), + 'is_cover' => $coverImage->is_cover, + 'mime_type' => $coverImage->mime_type, + ] : null, ]; - return $this->appendFilesToResource($data); + return $data; } public function getCountryLabel($slug) diff --git a/app/Http/Resources/V2/Organisation/MonitoringOrganisationResource.php b/app/Http/Resources/V2/Organisation/MonitoringOrganisationResource.php index 98dd9cd4d..d97c78119 100644 --- a/app/Http/Resources/V2/Organisation/MonitoringOrganisationResource.php +++ b/app/Http/Resources/V2/Organisation/MonitoringOrganisationResource.php @@ -40,7 +40,7 @@ public function toArray($request) 'founding_date' => $this->founding_date, 'description' => $this->description, - 'tree_species' => TreeSpeciesResource::collection($this->treeSpecies), + 'tree_species' => TreeSpeciesResource::collection($this->treeSpeciesHistorical), 'web_url' => $this->web_url, 'facebook_url' => $this->facebook_url, diff --git a/app/Http/Resources/V2/Organisation/OrganisationResource.php b/app/Http/Resources/V2/Organisation/OrganisationResource.php index fa1ac03a7..71cf87ef0 100644 --- a/app/Http/Resources/V2/Organisation/OrganisationResource.php +++ b/app/Http/Resources/V2/Organisation/OrganisationResource.php @@ -39,8 +39,7 @@ public function toArray($request) 'founding_date' => $this->founding_date, 'description' => $this->description, - 'tree_species' => TreeSpeciesResource::collection($this->treeSpecies), - 'tree_species_restored' => TreeSpeciesResource::collection($this->treeSpeciesRestored), + 'tree_species_historical' => TreeSpeciesResource::collection($this->treeSpeciesHistorical), 'project_pitches' => ProjectPitchResource::collection($this->projectPitches), 'leadership_team' => LeadershipTeamResource::collection($this->leadershipTeam), 'core_team_leaders' => CoreTeamLeaderResource::collection($this->coreTeamLeaders), diff --git a/app/Http/Resources/V2/ProjectReports/ProjectReportResource.php b/app/Http/Resources/V2/ProjectReports/ProjectReportResource.php index c39000056..2dd10fb54 100644 --- a/app/Http/Resources/V2/ProjectReports/ProjectReportResource.php +++ b/app/Http/Resources/V2/ProjectReports/ProjectReportResource.php @@ -122,6 +122,16 @@ public function toArray($request) 'workdays_convergence_total' => $this->workdays_convergence_total, 'non_tree_total' => $this->non_tree_total, 'total_community_partners' => $this->total_community_partners, + 'business_milestones' => $this->business_milestones, + 'ft_other' => $this->ft_other, + 'pt_other' => $this->pt_other, + 'volunteer_other' => $this->volunteer_other, + 'beneficiaries_other' => $this->beneficiaries_other, + 'beneficiaries_training_women' => $this->beneficiaries_training_women, + 'beneficiaries_training_men' => $this->beneficiaries_training_men, + 'beneficiaries_training_other' => $this->beneficiaries_training_other, + 'beneficiaries_training_youth' => $this->beneficiaries_training_youth, + 'beneficiaries_training_non_youth' => $this->beneficiaries_training_non_youth, ]; return $this->appendFilesToResource($data); diff --git a/app/Http/Resources/V2/SiteReports/SiteReportResource.php b/app/Http/Resources/V2/SiteReports/SiteReportResource.php index e79590041..9e865c21e 100644 --- a/app/Http/Resources/V2/SiteReports/SiteReportResource.php +++ b/app/Http/Resources/V2/SiteReports/SiteReportResource.php @@ -65,6 +65,12 @@ public function toArray($request) 'created_by' => $this->handleCreatedBy(), 'regeneration_description' => $this->regeneration_description, 'total_non_tree_species_planted_count' => $this->total_non_tree_species_planted_count, + 'total_tree_replanting_count' => $this->total_tree_replanting_count, + + 'pct_survival_to_date' => $this->pct_survival_to_date, + 'survival_calculation' => $this->survival_calculation, + 'survival_description' => $this->survival_description, + 'maintenance_activities' => $this->maintenance_activities, ]; return $this->appendFilesToResource($data); diff --git a/app/Http/Resources/V2/TreeSpecies/TreeSpeciesResource.php b/app/Http/Resources/V2/TreeSpecies/TreeSpeciesResource.php index 7ce666bf2..12cd4daaa 100644 --- a/app/Http/Resources/V2/TreeSpecies/TreeSpeciesResource.php +++ b/app/Http/Resources/V2/TreeSpecies/TreeSpeciesResource.php @@ -18,6 +18,7 @@ public function toArray($request) 'amount' => $this->amount, 'type' => $this->type, 'collection' => $this->collection, + 'taxon_id' => $this->taxon_id, ]; } } diff --git a/app/Jobs/FixPolygonOverlapJob.php b/app/Jobs/FixPolygonOverlapJob.php index 6196c339e..ab1e2f238 100644 --- a/app/Jobs/FixPolygonOverlapJob.php +++ b/app/Jobs/FixPolygonOverlapJob.php @@ -3,7 +3,10 @@ namespace App\Jobs; use App\Http\Middleware\SetAuthenticatedUserForJob; +use App\Mail\PolygonOperationsComplete; use App\Models\DelayedJob; +use App\Models\DelayedJobProgress; +use App\Models\V2\Sites\Site; use App\Services\PolygonService; use Exception; use Illuminate\Bus\Queueable; @@ -15,6 +18,7 @@ use Illuminate\Support\Facades\App; use Illuminate\Support\Facades\Auth; use Illuminate\Support\Facades\Log; +use Illuminate\Support\Facades\Mail; use Throwable; class FixPolygonOverlapJob implements ShouldQueue @@ -63,15 +67,28 @@ public function handle(): void { try { - $delayedJob = DelayedJob::findOrFail($this->delayed_job_id); + $delayedJob = DelayedJobProgress::findOrFail($this->delayed_job_id); $user = Auth::user(); + $metadata = $delayedJob->metadata; + $entityId = $metadata['entity_id'] ?? null; + $site = Site::findOrFail($entityId); + $userForMail = $delayedJob->creator; if ($user) { - $polygonsClipped = App::make(PolygonService::class)->processClippedPolygons($this->polygonUuids); + $polygonsClipped = App::make(PolygonService::class)->processClippedPolygons($this->polygonUuids, $this->delayed_job_id); $delayedJob->update([ - 'status' => DelayedJob::STATUS_SUCCEEDED, + 'status' => DelayedJobProgress::STATUS_SUCCEEDED, 'payload' => json_encode(['updated_polygons' => $polygonsClipped]), 'status_code' => Response::HTTP_OK, + 'progress' => 100, ]); + + Mail::to($user->email_address) + ->send(new PolygonOperationsComplete( + $site, + 'Fix', + $userForMail, + now() + )); } } catch (Exception $e) { Log::error('Error in Fix Polygon Overlap Job: ' . $e->getMessage()); @@ -82,7 +99,7 @@ public function handle(): void 'status_code' => Response::HTTP_INTERNAL_SERVER_ERROR, ]); } catch (Throwable $e) { - Log::error('Throwable Error in RunSitePolygonsValidationJob: ' . $e->getMessage()); + Log::error('Throwable Error in Fix overlap job: ' . $e->getMessage()); DelayedJob::where('uuid', $this->delayed_job_id)->update([ 'status' => DelayedJob::STATUS_FAILED, diff --git a/app/Jobs/InsertGeojsonToDBJob.php b/app/Jobs/InsertGeojsonToDBJob.php index a07752d5b..8809163e8 100755 --- a/app/Jobs/InsertGeojsonToDBJob.php +++ b/app/Jobs/InsertGeojsonToDBJob.php @@ -2,19 +2,22 @@ namespace App\Jobs; +use App\Mail\PolygonOperationsComplete; use App\Models\DelayedJob; +use App\Models\V2\Sites\Site; use App\Services\PolygonService; use App\Services\SiteService; use Exception; use Illuminate\Bus\Queueable; -use Illuminate\Contracts\Queue\ShouldQueue; +use Illuminate\Contracts\Queue\ShouldQueue; use Illuminate\Foundation\Bus\Dispatchable; use Illuminate\Http\Response; use Illuminate\Queue\InteractsWithQueue; use Illuminate\Queue\SerializesModels; use Illuminate\Support\Facades\App; use Illuminate\Support\Facades\Log; +use Illuminate\Support\Facades\Mail; use Illuminate\Support\Facades\Redis; class InsertGeojsonToDBJob implements ShouldQueue @@ -51,6 +54,11 @@ public function __construct(string $redis_key, string $delayed_job_id, ?string $ public function handle(PolygonService $service) { $delayedJob = DelayedJob::findOrFail($this->delayed_job_id); + $user = $delayedJob->creator; + $metadata = $delayedJob->metadata; + $entityId = $metadata['entity_id'] ?? null; + + $site = Site::findOrFail($entityId); try { $geojsonContent = Redis::get($this->redis_key); @@ -86,6 +94,14 @@ public function handle(PolygonService $service) 'status_code' => Response::HTTP_OK, ]); + Mail::to($user->email_address) + ->send(new PolygonOperationsComplete( + $site, + 'Upload', + $user, + now() + )); + } catch (Exception $e) { Log::error('Error in InsertGeojsonToDBJob: ' . $e->getMessage()); $delayedJob->update([ diff --git a/app/Jobs/RunIndicatorAnalysisJob.php b/app/Jobs/RunIndicatorAnalysisJob.php new file mode 100644 index 000000000..b7aa76efa --- /dev/null +++ b/app/Jobs/RunIndicatorAnalysisJob.php @@ -0,0 +1,60 @@ +delayed_job_id = $delayed_job_id; + $this->request = $request; + $this->slug = $slug; + } + + public function handle(RunIndicatorAnalysisService $runIndicatorAnalysisService) + { + try { + $delayedJob = DelayedJob::findOrFail($this->delayed_job_id); + $runIndicatorAnalysisService->run($this->request, $this->slug); + + $delayedJob->update([ + 'status' => DelayedJob::STATUS_SUCCEEDED, + 'payload' => ['message' => 'Analysis completed'], + 'status_code' => Response::HTTP_OK, + ]); + + } catch (Exception $e) { + Log::error('Error in the analysis: ' . $e->getMessage()); + + DelayedJob::where('id', $this->delayed_job_id)->update([ + 'status' => DelayedJob::STATUS_FAILED, + 'payload' => json_encode(['error' => $e->getMessage()]), + 'status_code' => Response::HTTP_INTERNAL_SERVER_ERROR, + ]); + } + } +} diff --git a/app/Jobs/RunSitePolygonsValidationJob.php b/app/Jobs/RunSitePolygonsValidationJob.php index 96fe5e6f5..bcd1b43a2 100644 --- a/app/Jobs/RunSitePolygonsValidationJob.php +++ b/app/Jobs/RunSitePolygonsValidationJob.php @@ -2,7 +2,10 @@ namespace App\Jobs; +use App\Mail\PolygonOperationsComplete; use App\Models\DelayedJob; +use App\Models\DelayedJobProgress; +use App\Models\V2\Sites\Site; use App\Services\PolygonValidationService; use Exception; use Illuminate\Bus\Queueable; @@ -13,6 +16,7 @@ use Illuminate\Queue\InteractsWithQueue; use Illuminate\Queue\SerializesModels; use Illuminate\Support\Facades\Log; +use Illuminate\Support\Facades\Mail; class RunSitePolygonsValidationJob implements ShouldQueue { @@ -48,7 +52,22 @@ public function __construct(string $delayed_job_id, array $sitePolygonsUuids) public function handle(PolygonValidationService $validationService) { try { - $delayedJob = DelayedJob::findOrFail($this->delayed_job_id); + $delayedJob = DelayedJobProgress::findOrFail($this->delayed_job_id); + $user = $delayedJob->creator; + $metadata = $delayedJob->metadata; + + $entityId = $metadata['entity_id'] ?? null; + + if ($entityId) { + $site = Site::findOrFail($entityId); + } else { + Log::error('entityId is null, unable to find site'); + } + + if (! $site) { + throw new Exception('Site not found for the given site UUID.'); + } + foreach ($this->sitePolygonsUuids as $polygonUuid) { $request = new Request(['uuid' => $polygonUuid]); $validationService->validateOverlapping($request); @@ -60,14 +79,29 @@ public function handle(PolygonValidationService $validationService) $validationService->getGeometryType($request); $validationService->validateEstimatedArea($request); $validationService->validateDataInDB($request); + + $delayedJob->increment('processed_content'); + $delayedJob->processMessage(); + $delayedJob->save(); } $delayedJob->update([ - 'status' => DelayedJob::STATUS_SUCCEEDED, + 'status' => DelayedJobProgress::STATUS_SUCCEEDED, 'payload' => ['message' => 'Validation completed for all site polygons'], 'status_code' => Response::HTTP_OK, + 'progress' => 100, ]); + Log::info('site available? ' . $site); + + Mail::to($user->email_address) + ->send(new PolygonOperationsComplete( + $site, + 'Check', + $user, + now() + )); + } catch (Exception $e) { Log::error('Error in RunSitePolygonsValidationJob: ' . $e->getMessage()); @@ -77,5 +111,6 @@ public function handle(PolygonValidationService $validationService) 'status_code' => Response::HTTP_INTERNAL_SERVER_ERROR, ]); } + } } diff --git a/app/Jobs/SendDailyDigestNotificationsJob.php b/app/Jobs/SendDailyDigestNotificationsJob.php index 9fc198189..d663243ea 100644 --- a/app/Jobs/SendDailyDigestNotificationsJob.php +++ b/app/Jobs/SendDailyDigestNotificationsJob.php @@ -37,12 +37,15 @@ public function __construct(Task $task) */ public function handle(): void { + if (! $this->task->project) { + return; + } $users = $this->task->project->users()->get(); $users = $this->skipRecipients($users); $usersGroupedByLocale = $users->groupBy('locale'); $taskDueAt = Carbon::parse($this->task->due_at); - if (! $this->verifyIfReportsAreApproved($this->task) && Carbon::now()->diffInDays($taskDueAt) == 7) { + if (! $this->verifyIfReportsAreApproved($this->task) && Carbon::now()->diffInDays($taskDueAt) <= 7) { foreach ($usersGroupedByLocale as $locale => $users) { $groupedLocale['locale'] = $locale; Mail::to($users->pluck('email_address')->toArray())->queue(new TaskDigestMail($groupedLocale, $this->task)); diff --git a/app/Mail/PolygonOperationsComplete.php b/app/Mail/PolygonOperationsComplete.php new file mode 100644 index 000000000..b08ec4abf --- /dev/null +++ b/app/Mail/PolygonOperationsComplete.php @@ -0,0 +1,40 @@ +site = $site; + $this->operation = $operation; + $this->completedAt = $completedAt; + + $this->setSubjectKey('polygon-validation.subject') + ->setTitleKey('polygon-validation.title') + ->setBodyKey('polygon-validation.body') + ->setParams([ + '{operation}' => e($operation), + '{operationUpper}' => strtoupper(e($operation)), + '{siteName}' => e($site->name), + '{completedTime}' => $completedAt->format('H:i'), + ]) + ->setCta('polygon-validation.cta'); + + if ($user->hasRole('project-developer')) { + $this->link = '/site/' . $site->uuid; + } else { + $this->link = '/admin#/site/' . $site->uuid . '/show'; + } + + $this->transactional = true; + } +} diff --git a/app/Models/DelayedJob.php b/app/Models/DelayedJob.php index 4ae833449..25b599e34 100644 --- a/app/Models/DelayedJob.php +++ b/app/Models/DelayedJob.php @@ -3,6 +3,7 @@ namespace App\Models; use App\Models\Traits\HasUuid; +use App\Models\V2\User; use Illuminate\Database\Eloquent\Factories\HasFactory; use Illuminate\Database\Eloquent\Model; @@ -17,14 +18,15 @@ class DelayedJob extends Model protected $table = 'delayed_jobs'; - protected $fillable = [ - 'uuid', - 'status', - 'status_code', - 'payload', - ]; + protected $fillable = ['uuid', 'status', 'status_code', 'payload', 'metadata', 'created_by', 'is_acknowledged', 'name']; protected $casts = [ 'uuid' => 'string', + 'metadata' => 'json', ]; + + public function creator() + { + return $this->belongsTo(User::class, 'created_by'); + } } diff --git a/app/Models/DelayedJobProgress.php b/app/Models/DelayedJobProgress.php new file mode 100644 index 000000000..8dcd57867 --- /dev/null +++ b/app/Models/DelayedJobProgress.php @@ -0,0 +1,36 @@ +fillable = array_merge($this->fillable, [ + 'processed_content', + 'total_content', + 'progress_message', + ]); + + $this->casts = array_merge($this->casts, [ + 'processed_content' => 'integer', + 'total_content' => 'integer', + 'progress_message' => 'string', + ]); + } + + public function processMessage(): string + { + $progress = 0; + if ($this->total_content > 0) { + $progress = (int)(($this->processed_content / $this->total_content) * 100); + } else { + $progress = 0; + } + + return $this->progress_message = 'Running '. $this->processed_content .' out of ' + .$this->total_content. ' polygons ('.$progress.'%)' ; + } +} diff --git a/app/Models/Site.php b/app/Models/Site.php index b50b01d87..12d4ef4ef 100644 --- a/app/Models/Site.php +++ b/app/Models/Site.php @@ -156,4 +156,9 @@ public function getTotalWorkdaysAttribute(): int { return $this->total_paid_workdays + $this->total_volunteer_workdays; } + + public function delayedJobs() + { + return $this->morphMany(DelayedJob::class, 'entity'); + } } diff --git a/app/Models/Traits/HasProjectCoverImage.php b/app/Models/Traits/HasProjectCoverImage.php new file mode 100644 index 000000000..b6656e9f1 --- /dev/null +++ b/app/Models/Traits/HasProjectCoverImage.php @@ -0,0 +1,57 @@ + get_class($project), 'ids' => [$project->id]], + ['type' => Site::class, 'ids' => $project->sites->pluck('id')->toArray()], + ['type' => Nursery::class, 'ids' => $project->nurseries->pluck('id')->toArray()], + ['type' => ProjectReport::class, 'ids' => $project->reports->pluck('id')->toArray()], + ['type' => SiteReport::class, 'ids' => $project->siteReports->pluck('id')->toArray()], + ['type' => NurseryReport::class, 'ids' => $project->nurseryReports->pluck('id')->toArray()], + ]; + + $coverMedia = Media::where(function ($query) use ($models) { + foreach ($models as $model) { + $query->orWhere(function ($query) use ($model) { + $query->where('model_type', $model['type']) + ->whereIn('model_id', $model['ids']); + }); + } + }) + ->where('is_cover', true) + ->first(); + + if ($coverMedia) { + return $coverMedia; + } + + // If no cover image found, the latest image is sent + return Media::where(function ($query) use ($models) { + foreach ($models as $model) { + $query->orWhere(function ($query) use ($model) { + $query->where('model_type', $model['type']) + ->whereIn('model_id', $model['ids']); + }); + } + }) + ->where(function ($query) { + $query->where('mime_type', 'like', 'image/jpeg') + ->orWhere('mime_type', 'like', 'image/png'); + }) + ->latest() + ->first(); + } +} diff --git a/app/Models/Traits/UsesLinkedFields.php b/app/Models/Traits/UsesLinkedFields.php index 68be03d2a..818fcf3ac 100644 --- a/app/Models/Traits/UsesLinkedFields.php +++ b/app/Models/Traits/UsesLinkedFields.php @@ -293,8 +293,10 @@ private function syncRelation(string $property, string $inputType, $data, bool $ if ($model != null) { $model->update($entry); } else { - // protection against updating a deleted entry - unset($entry['uuid']); + // protection against clashing with a deleted entry + if (! empty($entry['uuid']) && $entity->$property()->onlyTrashed()->where('uuid', $entry['uuid'])->exists()) { + unset($entry['uuid']); + } $entity->$property()->create($entry); } } diff --git a/app/Models/V2/MonitoredData/IndicatorHectares.php b/app/Models/V2/MonitoredData/IndicatorHectares.php new file mode 100644 index 000000000..55deef5f4 --- /dev/null +++ b/app/Models/V2/MonitoredData/IndicatorHectares.php @@ -0,0 +1,29 @@ +belongsTo(SitePolygon::class, 'site_polygon_id', 'id'); + } +} diff --git a/app/Models/V2/MonitoredData/IndicatorTreeCoverLoss.php b/app/Models/V2/MonitoredData/IndicatorTreeCoverLoss.php new file mode 100644 index 000000000..599103c18 --- /dev/null +++ b/app/Models/V2/MonitoredData/IndicatorTreeCoverLoss.php @@ -0,0 +1,29 @@ +belongsTo(SitePolygon::class, 'site_polygon_id', 'id'); + } +} diff --git a/app/Models/V2/Organisation.php b/app/Models/V2/Organisation.php index 6bd2593c0..2415cb920 100644 --- a/app/Models/V2/Organisation.php +++ b/app/Models/V2/Organisation.php @@ -225,16 +225,10 @@ public static function search($query) ->where('organisations.name', 'like', "%$query%"); } - public function treeSpecies(): MorphMany + public function treeSpeciesHistorical(): MorphMany { return $this->morphMany(TreeSpecies::class, 'speciesable') - ->whereNull('collection'); - } - - public function treeSpeciesRestored(): MorphMany - { - return $this->morphMany(TreeSpecies::class, 'speciesable') - ->where('collection', TreeSpecies::COLLECTION_RESTORED); + ->where('collection', TreeSpecies::COLLECTION_HISTORICAL); } public function owners(): HasMany diff --git a/app/Models/V2/Projects/ProjectReport.php b/app/Models/V2/Projects/ProjectReport.php index aa44ec361..2ed0c631c 100644 --- a/app/Models/V2/Projects/ProjectReport.php +++ b/app/Models/V2/Projects/ProjectReport.php @@ -160,7 +160,16 @@ class ProjectReport extends Model implements MediaModel, AuditableContract, Repo 'beneficiaries_scstobc_farmers', 'beneficiaries_scstobc', 'total_unique_restoration_partners', - + 'business_milestones', + 'ft_other', + 'pt_other', + 'volunteer_other', + 'beneficiaries_other', + 'beneficiaries_training_women', + 'beneficiaries_training_men', + 'beneficiaries_training_other', + 'beneficiaries_training_youth', + 'beneficiaries_training_non_youth', // virtual (see HasWorkdays trait) 'other_workdays_description', // virtual (see HasRestorationPartners trait) diff --git a/app/Models/V2/Sites/SitePolygon.php b/app/Models/V2/Sites/SitePolygon.php index 9f60a7715..a41191fd3 100644 --- a/app/Models/V2/Sites/SitePolygon.php +++ b/app/Models/V2/Sites/SitePolygon.php @@ -5,6 +5,8 @@ use App\Models\Traits\HasUuid; use App\Models\V2\AuditableModel; use App\Models\V2\AuditStatus\AuditStatus; +use App\Models\V2\MonitoredData\IndicatorHectares; +use App\Models\V2\MonitoredData\IndicatorTreeCoverLoss; use App\Models\V2\PointGeometry; use App\Models\V2\PolygonGeometry; use App\Models\V2\Projects\Project; @@ -98,6 +100,16 @@ public function auditStatuses(): MorphMany return $this->morphMany(AuditStatus::class, 'auditable'); } + public function hectaresIndicator() + { + return $this->hasMany(IndicatorHectares::class, 'site_polygon_id'); + } + + public function treeCoverLossIndicator() + { + return $this->hasMany(IndicatorTreeCoverLoss::class, 'site_polygon_id'); + } + public function getAuditableNameAttribute(): string { return $this->poly_name ?? ''; diff --git a/app/Models/V2/Sites/SiteReport.php b/app/Models/V2/Sites/SiteReport.php index 439fdfabd..97f0a78ce 100644 --- a/app/Models/V2/Sites/SiteReport.php +++ b/app/Models/V2/Sites/SiteReport.php @@ -100,7 +100,10 @@ class SiteReport extends Model implements MediaModel, AuditableContract, ReportM 'water_structures', 'site_community_partners_description', 'site_community_partners_income_increase_description', - + 'pct_survival_to_date', + 'survival_calculation', + 'survival_description', + 'maintenance_activities', // virtual (see HasWorkdays trait) 'other_workdays_description', ]; @@ -239,6 +242,11 @@ public function nonTreeSpecies() return $this->morphMany(TreeSpecies::class, 'speciesable')->where('collection', 'non-tree'); } + public function replantingTreeSpecies() + { + return $this->morphMany(TreeSpecies::class, 'speciesable')->where('collection', 'replanting'); + } + public function seedings(): MorphMany { return $this->morphMany(Seeding::class, 'seedable'); @@ -299,6 +307,11 @@ public function getTotalNonTreeSpeciesPlantedCountAttribute(): int return $this->nonTreeSpecies()->visible()->sum('amount'); } + public function getTotalTreeReplantingCountAttribute(): int + { + return $this->replantingTreeSpecies()->visible()->sum('amount'); + } + public function getTotalSeedsPlantedCountAttribute(): int { return $this->seedings()->visible()->sum('amount'); diff --git a/app/Models/V2/TreeSpecies/TreeSpecies.php b/app/Models/V2/TreeSpecies/TreeSpecies.php index 200740ee3..56b2827f5 100644 --- a/app/Models/V2/TreeSpecies/TreeSpecies.php +++ b/app/Models/V2/TreeSpecies/TreeSpecies.php @@ -32,31 +32,30 @@ class TreeSpecies extends Model implements EntityRelationModel public $table = 'v2_tree_species'; protected $fillable = [ + 'uuid', 'name', 'amount', 'speciesable_type', 'speciesable_id', 'collection', 'hidden', - - 'old_id', - 'old_model', + 'taxon_id', ]; public const COLLECTION_DIRECT_SEEDING = 'direct-seeding'; public const COLLECTION_PLANTED = 'tree-planted'; public const COLLECTION_NON_TREE = 'non-tree'; + public const COLLECTION_REPLANTING = 'replanting'; public const COLLECTION_NURSERY = 'nursery-seedling'; - public const COLLECTION_RESTORED = 'restored'; - public const COLLECTION_PRIMARY = 'primary'; + public const COLLECTION_HISTORICAL = 'historical-tree-species'; public static $collections = [ self::COLLECTION_DIRECT_SEEDING => 'Direct Seeding', self::COLLECTION_PLANTED => 'Planted', self::COLLECTION_NON_TREE => 'Non Tree', + self::COLLECTION_REPLANTING => 'Replanting', self::COLLECTION_NURSERY => 'Nursery Seedling', - self::COLLECTION_RESTORED => 'Restored', - self::COLLECTION_PRIMARY => 'Primary', + self::COLLECTION_HISTORICAL => 'Historical Tree Species', ]; public static function createResourceCollection(EntityModel $entity): JsonResource @@ -84,6 +83,11 @@ public function speciesable() return $this->morphTo(); } + public function taxonomicSpecies() + { + return $this->belongsTo(TreeSpeciesResearch::class, 'taxon_id'); + } + public function getRouteKeyName() { return 'uuid'; diff --git a/app/Models/V2/TreeSpecies/TreeSpeciesResearch.php b/app/Models/V2/TreeSpecies/TreeSpeciesResearch.php new file mode 100644 index 000000000..499b5a726 --- /dev/null +++ b/app/Models/V2/TreeSpecies/TreeSpeciesResearch.php @@ -0,0 +1,27 @@ + 'Feature', + 'geometry' => $geometry, + 'crs' => ['type' => 'name', 'properties' => ['name' => 'EPSG:4326']], + ]); + + $inputGeojson = tempnam(sys_get_temp_dir(), 'input_') . '.geojson'; + $outputGeojson = tempnam(sys_get_temp_dir(), 'output_') . '.geojson'; + + try { + file_put_contents($inputGeojson, $geojson); + + $process = new Process([ + 'python3', + base_path() . '/resources/python/polygon-area/app.py', + $inputGeojson, + $outputGeojson, + ]); + + $process->run(); + + if (! $process->isSuccessful()) { + Log::error('Area calculation failed: ' . $process->getErrorOutput()); + + throw new \RuntimeException('Area calculation failed: ' . $process->getErrorOutput()); + } + + $result = json_decode(file_get_contents($outputGeojson), true); + + return $result['area_hectares']; + + } catch (\Exception $e) { + Log::error('Error calculating area: ' . $e->getMessage()); + + throw $e; + } finally { + @unlink($inputGeojson); + @unlink($outputGeojson); + } + } + + public function getGeomAndArea(array $geometry): array + { + $geojson = json_encode([ + 'type' => 'Feature', + 'geometry' => $geometry, + 'crs' => ['type' => 'name', 'properties' => ['name' => 'EPSG:4326']], + ]); + + $geom = DB::raw("ST_GeomFromGeoJSON('$geojson')"); + $areaHectares = $this->calculateArea($geometry); + + return ['geom' => $geom, 'area' => $areaHectares]; + } + + public function getArea(array $geometry): float + { + if ($geometry['type'] === 'MultiPolygon') { + $totalArea = 0; + foreach ($geometry['coordinates'] as $polygon) { + $polygonGeometry = [ + 'type' => 'Polygon', + 'coordinates' => $polygon, + ]; + $totalArea += $this->calculateArea($polygonGeometry); + } + + return $totalArea; + } + + return $this->calculateArea($geometry); + } +} diff --git a/app/Services/PolygonService.php b/app/Services/PolygonService.php index c6712bb77..67b04218e 100755 --- a/app/Services/PolygonService.php +++ b/app/Services/PolygonService.php @@ -5,6 +5,7 @@ use App\Helpers\CreateVersionPolygonGeometryHelper; use App\Helpers\GeometryHelper; use App\Helpers\PolygonGeometryHelper; +use App\Models\DelayedJobProgress; use App\Models\V2\PointGeometry; use App\Models\V2\PolygonGeometry; use App\Models\V2\ProjectPitch; @@ -283,20 +284,9 @@ protected function getGeom(array $geometry) protected function getGeomAndArea(array $geometry): array { - // Convert geometry to GeoJSON string - $geojson = json_encode(['type' => 'Feature', 'geometry' => $geometry, 'crs' => ['type' => 'name', 'properties' => ['name' => 'EPSG:4326']]]); - - // Get GeoJSON data in the database - $geom = DB::raw("ST_GeomFromGeoJSON('$geojson')"); - $areaSqDegrees = DB::selectOne("SELECT ST_Area(ST_GeomFromGeoJSON('$geojson')) AS area")->area; - $latitude = DB::selectOne("SELECT ST_Y(ST_Centroid(ST_GeomFromGeoJSON('$geojson'))) AS latitude")->latitude; - // 111320 is the length of one degree of latitude in meters at the equator - $unitLatitude = 111320; - $areaSqMeters = $areaSqDegrees * pow($unitLatitude * cos(deg2rad($latitude)), 2); + $areaCalculationService = app(AreaCalculationService::class); - $areaHectares = $areaSqMeters / 10000; - - return ['geom' => $geom, 'area' => $areaHectares]; + return $areaCalculationService->getGeomAndArea($geometry); } protected function insertSinglePolygon(array $geometry): array @@ -551,24 +541,34 @@ public function insertGeojsonToDBFromContent(string $geojsonData, ?string $entit } catch (Exception $e) { $errorMessage = $e->getMessage(); - $decodedErrorMessage = json_decode($errorMessage, true); + $decodedError = json_decode($errorMessage, true); + if (json_last_error() === JSON_ERROR_NONE) { - return ['error' => $decodedErrorMessage]; + Log::error('Validation error', ['error' => $decodedError]); + + return [ + 'error' => json_encode($decodedError), + ]; } else { - Log::info('Error inserting geojson to DB', ['error' => $errorMessage]); + Log::error('Validation error', ['error' => $errorMessage]); - return ['error' => $errorMessage]; + return [ + 'error' => $errorMessage, + ]; } } } - public function processClippedPolygons(array $polygonUuids) + public function processClippedPolygons(array $polygonUuids, $delayed_job_id = null) { $geojson = GeometryHelper::getPolygonsGeojson($polygonUuids); $clippedPolygons = App::make(PythonService::class)->clipPolygons($geojson); $uuids = []; + $delayedJob = DelayedJobProgress::findOrFail($delayed_job_id); + + Log::info('test now selected plygons'); if (isset($clippedPolygons['type']) && $clippedPolygons['type'] === 'FeatureCollection' && isset($clippedPolygons['features'])) { foreach ($clippedPolygons['features'] as $feature) { if (isset($feature['properties']['poly_id'])) { @@ -591,8 +591,13 @@ public function processClippedPolygons(array $polygonUuids) } if (! empty($uuids)) { + $delayedJob->total_content = count($newPolygonUuids); + $delayedJob->save(); foreach ($newPolygonUuids as $polygonUuid) { App::make(PolygonValidationService::class)->runValidationPolygon($polygonUuid); + $delayedJob->increment('processed_content'); + $delayedJob->processMessage(); + $delayedJob->save(); } } diff --git a/app/Services/PythonService.php b/app/Services/PythonService.php index 46146e6cc..d425cf2af 100644 --- a/app/Services/PythonService.php +++ b/app/Services/PythonService.php @@ -85,6 +85,50 @@ public function clipPolygons($geojson): ?array return $result; } + public function IndicatorPolygon($geojson, $indicator_name, $api_key) + { + $inputGeojson = $this->getTemporaryFile('input.geojson'); + $outputGeojson = $this->getTemporaryFile('output.geojson'); + + $writeHandle = fopen($inputGeojson, 'w'); + + try { + fwrite($writeHandle, json_encode($geojson)); + } finally { + fclose($writeHandle); + } + + $process = new Process(['python3', base_path() . '/resources/python/polygon-indicator/app.py', $inputGeojson, $outputGeojson, $indicator_name, $api_key]); + + $stdout = ''; + $stderr = ''; + + $process->run(function ($type, $buffer) use (&$stdout, &$stderr) { + if (Process::ERR === $type) { + $stderr .= $buffer; + } else { + $stdout .= $buffer; + } + }); + + if (! $process->isSuccessful()) { + Log::error('Error running indicator script: ' . $stderr); + + return null; + } + + if (! empty($stderr)) { + Log::warning('Python script warnings/errors: ' . $stderr); + } + + $result = json_decode(file_get_contents($outputGeojson), true); + + unlink($inputGeojson); + unlink($outputGeojson); + + return $result; + } + protected function getTemporaryFile(string $prefix): string { return tempnam(sys_get_temp_dir(), $prefix); diff --git a/app/Services/RunIndicatorAnalysisService.php b/app/Services/RunIndicatorAnalysisService.php new file mode 100644 index 000000000..19608be1f --- /dev/null +++ b/app/Services/RunIndicatorAnalysisService.php @@ -0,0 +1,234 @@ + [ + 'sql' => 'SELECT umd_tree_cover_loss__year, SUM(area__ha) FROM results GROUP BY umd_tree_cover_loss__year', + 'query_url' => '/dataset/umd_tree_cover_loss/latest/query', + 'indicator' => 'umd_tree_cover_loss', + 'model' => IndicatorTreeCoverLoss::class, + 'table_name' => 'indicator_output_tree_cover_loss', + ], + 'treeCoverLossFires' => [ + 'sql' => 'SELECT umd_tree_cover_loss_from_fires__year, SUM(area__ha) FROM results GROUP BY umd_tree_cover_loss_from_fires__year', + 'query_url' => '/dataset/umd_tree_cover_loss_from_fires/latest/query', + 'indicator' => 'umd_tree_cover_loss_from_fires', + 'model' => IndicatorTreeCoverLoss::class, + 'table_name' => 'indicator_output_tree_cover_loss', + ], + 'restorationByEcoRegion' => [ + 'indicator' => 'wwf_terrestrial_ecoregions', + 'model' => IndicatorHectares::class, + 'table_name' => 'indicator_output_hectares', + ], + 'restorationByStrategy' => [ + 'indicator' => 'restoration_practice', + 'model' => IndicatorHectares::class, + 'table_name' => 'indicator_output_hectares', + ], + 'restorationByLandUse' => [ + 'indicator' => 'target_system', + 'model' => IndicatorHectares::class, + 'table_name' => 'indicator_output_hectares', + ], + ]; + + if (! isset($slugMappings[$slug])) { + return response()->json(['message' => 'Slug Not Found'], 400); + } + foreach ($request['uuids'] as $uuid) { + $polygonGeometry = $this->getGeometry($uuid); + $registerExist = DB::table($slugMappings[$slug]['table_name'].' as i') + ->where('i.site_polygon_id', $polygonGeometry['site_polygon_id']) + ->where('i.indicator_slug', $slug) + ->where('i.year_of_analysis', Carbon::now()->year) + ->exists(); + + if ($registerExist) { + continue; + } + + if (str_contains($slug, 'restorationBy')) { + $geojson = GeometryHelper::getPolygonGeojson($uuid); + $indicatorRestorationResponse = App::make(PythonService::class)->IndicatorPolygon($geojson, $slugMappings[$slug]['indicator'], getenv('GFW_SECRET_KEY')); + + if ($slug == 'restorationByEcoRegion') { + $value = json_encode($indicatorRestorationResponse['area'][$slugMappings[$slug]['indicator']]); + } else { + $value = $this->formatKeysValues($indicatorRestorationResponse['area'][$slugMappings[$slug]['indicator']]); + } + $data = [ + 'indicator_slug' => $slug, + 'site_polygon_id' => $polygonGeometry['site_polygon_id'], + 'year_of_analysis' => Carbon::now()->year, + 'value' => $value, + ]; + $slugMappings[$slug]['model']::create($data); + + continue; + } + + $response = $this->sendApiRequestIndicator(getenv('GFW_SECRET_KEY'), $slugMappings[$slug]['query_url'], $slugMappings[$slug]['sql'], $polygonGeometry['geo']); + if (str_contains($slug, 'treeCoverLoss')) { + $processedTreeCoverLossValue = $this->processTreeCoverLossValue($response->json()['data'], $slugMappings[$slug]['indicator']); + } + + if ($response->successful()) { + if (str_contains($slug, 'treeCoverLoss')) { + $data = $this->generateTreeCoverLossData($processedTreeCoverLossValue, $slug, $polygonGeometry); + } else { + $data = [ + 'indicator_slug' => $slug, + 'site_polygon_id' => $polygonGeometry['site_polygon_id'], + 'year_of_analysis' => Carbon::now()->year, + 'value' => json_encode($response->json()['data']), + ]; + } + + $slugMappings[$slug]['model']::create($data); + } else { + Log::error('A problem occurred during the analysis of the geometry for the polygon: ' . $uuid); + } + } + + return response()->json(['message' => 'Analysis completed']); + } catch (\Exception $e) { + Log::info($e); + + return response()->json([ + 'message' => 'An error occurred during the analysis', + 'error' => $e->getMessage(), + ], 500); + } + } + + public function getGeometry($polygonUuid) + { + $geojson = GeometryHelper::getMonitoredPolygonsGeojson($polygonUuid); + $geoJsonObject = json_decode($geojson['geometry']->geojsonGeometry, true); + + return [ + 'geo' => [ + 'type' => 'Polygon', + 'coordinates' => $geoJsonObject['coordinates'], + ], + 'site_polygon_id' => $geojson['site_polygon_id'], + ]; + } + + public function sendApiRequestIndicator($secret_key, $query_url, $query_sql, $geometry) + { + $response = Http::withHeaders([ + 'content-type' => 'application/json', + 'x-api-key' => $secret_key, + ])->post('https://data-api.globalforestwatch.org' . $query_url, [ + 'sql' => $query_sql, + 'geometry' => $geometry, + ]); + + if ($response->successful()) { + $gfwDataFile = tempnam(sys_get_temp_dir(), 'gfw_') . '.json'; + $geometryFile = tempnam(sys_get_temp_dir(), 'geom_') . '.json'; + $outputFile = tempnam(sys_get_temp_dir(), 'output_') . '.json'; + + try { + file_put_contents($gfwDataFile, json_encode($response->json())); + file_put_contents($geometryFile, json_encode($geometry)); + + $process = new Process([ + 'python3', + base_path() . '/resources/python/gfw-area-adjustment/app.py', + $gfwDataFile, + $geometryFile, + $outputFile, + ]); + + $process->run(); + + if (! $process->isSuccessful()) { + Log::error('Area adjustment failed: ' . $process->getErrorOutput()); + + return $response; + } + + $adjustedData = json_decode(file_get_contents($outputFile), true); + + return new \Illuminate\Http\Client\Response( + new \GuzzleHttp\Psr7\Response( + 200, + ['Content-Type' => 'application/json'], + json_encode($adjustedData) + ) + ); + + } catch (\Exception $e) { + Log::error('Error adjusting areas: ' . $e->getMessage()); + + return $response; + } finally { + @unlink($gfwDataFile); + @unlink($geometryFile); + @unlink($outputFile); + } + } + + return $response; + } + + public function processTreeCoverLossValue($data, $indicator) + { + $processedTreeCoverLossValue = []; + foreach ($data as $i) { + $processedTreeCoverLossValue[$i[$indicator . '__year']] = $i['area__ha']; + } + + return $processedTreeCoverLossValue; + } + + public function generateTreeCoverLossData($processedTreeCoverLossValue, $slug, $polygonGeometry) + { + $yearsOfAnalysis = [2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023, 2024]; + $responseData = []; + foreach ($yearsOfAnalysis as $year) { + if (isset($processedTreeCoverLossValue[$year])) { + $responseData[$year] = $processedTreeCoverLossValue[$year]; + } else { + $responseData[$year] = 0.0; + } + } + + return [ + 'indicator_slug' => $slug, + 'site_polygon_id' => $polygonGeometry['site_polygon_id'], + 'year_of_analysis' => Carbon::now()->year, + 'value' => json_encode($responseData), + ]; + } + + public function formatKeysValues($data) + { + $formattedData = []; + foreach ($data as $key => $value) { + $formattedKey = strtolower(str_replace(' ', '-', $key)); + $formattedData[$formattedKey] = $value; + } + + return json_encode($formattedData); + } +} diff --git a/app/Validators/Extensions/Polygons/NotOverlapping.php b/app/Validators/Extensions/Polygons/NotOverlapping.php index d8d039696..de9862bcc 100644 --- a/app/Validators/Extensions/Polygons/NotOverlapping.php +++ b/app/Validators/Extensions/Polygons/NotOverlapping.php @@ -24,19 +24,21 @@ public static function passes($attribute, $value, $parameters, $validator): bool public static function getIntersectionData(string $polygonUuid): array { $sitePolygon = SitePolygon::forPolygonGeometry($polygonUuid)->first(); - if ($sitePolygon === null) { - return [ - 'valid' => false, - 'error' => 'Site polygon not found for the given polygon ID', - 'status' => 404, - ]; + if (! $sitePolygon) { + return ['valid' => false, 'error' => 'Site polygon not found', 'status' => 404]; } $relatedPolyIds = $sitePolygon->project->sitePolygons() ->where('poly_id', '!=', $polygonUuid) ->pluck('poly_id'); - $intersects = PolygonGeometry::join('site_polygon', 'polygon_geometry.uuid', '=', 'site_polygon.poly_id') + + $bboxFilteredPolyIds = PolygonGeometry::join('site_polygon', 'polygon_geometry.uuid', '=', 'site_polygon.poly_id') ->whereIn('polygon_geometry.uuid', $relatedPolyIds) + ->whereRaw('ST_Intersects(ST_Envelope(polygon_geometry.geom), (SELECT ST_Envelope(geom) FROM polygon_geometry WHERE uuid = ?))', [$polygonUuid]) + ->pluck('polygon_geometry.uuid'); + + $intersects = PolygonGeometry::join('site_polygon', 'polygon_geometry.uuid', '=', 'site_polygon.poly_id') + ->whereIn('polygon_geometry.uuid', $bboxFilteredPolyIds) ->select([ 'polygon_geometry.uuid', 'site_polygon.poly_name', @@ -50,28 +52,27 @@ public static function getIntersectionData(string $polygonUuid): array $mainPolygonArea = PolygonGeometry::where('uuid', $polygonUuid) ->value(DB::raw('ST_Area(geom)')); - $extra_info = []; - foreach ($intersects as $intersect) { - if ($intersect->intersects) { + + $extra_info = $intersects + ->filter(fn ($intersect) => $intersect->intersects) + ->map(function ($intersect) use ($mainPolygonArea) { $minArea = min($mainPolygonArea, $intersect->area); - if ($minArea > 0) { - $percentage = ($intersect->intersection_area / $minArea) * 100; - $percentage = round($percentage, 2); - } else { - $percentage = 100; - } - $extra_info[] = [ + $percentage = $minArea > 0 + ? round(($intersect->intersection_area / $minArea) * 100, 2) + : 100; + + return [ 'poly_uuid' => $intersect->uuid, 'poly_name' => $intersect->poly_name, 'percentage' => $percentage, 'intersectSmaller' => ($intersect->area < $mainPolygonArea), ]; - } - } - + }) + ->values() + ->toArray(); return [ - 'valid' => ! $intersects->contains('intersects', 1), + 'valid' => empty($extra_info), 'uuid' => $polygonUuid, 'project_id' => $sitePolygon->project_id, 'extra_info' => $extra_info, diff --git a/config/wri/linked-fields.php b/config/wri/linked-fields.php index 56c75c231..096350e57 100644 --- a/config/wri/linked-fields.php +++ b/config/wri/linked-fields.php @@ -151,6 +151,7 @@ 'label' => 'Tree Species', 'resource' => 'App\Http\Resources\V2\TreeSpecies\TreeSpeciesResource', 'input_type' => 'treeSpecies', + 'collection' => 'historical-tree-species' ], 'org-leadership-team' => [ 'property' => 'leadershipTeam', @@ -165,11 +166,11 @@ 'input_type' => 'coreTeamLeaders', ], 'org-tree-species-restored' => [ - 'property' => 'treeSpeciesRestored', + 'property' => 'treeSpeciesHistorical', 'label' => 'Tree species restored in landscape', 'resource' => 'App\Http\Resources\V2\TreeSpecies\TreeSpeciesResource', 'input_type' => 'treeSpecies', - 'collection' => 'restored' + 'collection' => 'historical-tree-species' ], 'org-ownership-stake' => [ 'property' => 'ownershipStake', @@ -277,7 +278,7 @@ 'label' => 'Tree Species', 'resource' => App\Http\Resources\V2\TreeSpecies\TreeSpeciesResource::class, 'input_type' => 'treeSpecies', - 'collection' => 'primary', + 'collection' => 'tree-planted', ], ], ], @@ -364,7 +365,7 @@ 'label' => 'Tree Species', 'resource' => 'App\Http\Resources\V2\TreeSpecies\TreeSpeciesResource', 'input_type' => 'treeSpecies', - 'collection' => 'primary', + 'collection' => 'tree-planted', ], ], ], @@ -459,6 +460,17 @@ 'pro-rep-indirect-beneficiaries-description' => ['property' => 'indirect_beneficiaries_description', 'label' => 'Indirect Beneficiaries Description', 'input_type' => 'long-text'], 'pro-rep-other-restoration-partners-description' => ['property' => 'other_restoration_partners_description', 'label' => 'Other Restoration Partners Description', 'input_type' => 'long-text'], 'pro-rep-total-unique-restoration-partners' => ['property' => 'total_unique_restoration_partners', 'label' => 'Total Unique Restoration Partners', 'input_type' => 'number'], + // New fields TM-1542 + 'pro-rep-business-milestones' => ['property' => 'business_milestones', 'label' => 'Business Milestones', 'input_type' => 'long-text'], + 'pro-rep-ft-other' => ['property' => 'ft_other', 'label' => 'Full Time Other Gender', 'input_type' => 'number'], + 'pro-rep-pt-other' => ['property' => 'pt_other', 'label' => 'Part Time Other Gender', 'input_type' => 'number'], + 'pro-rep-volunteer_other' => ['property' => 'volunteer_other', 'label' => 'Volunteer Other Gender', 'input_type' => 'number'], + 'pro-rep-beneficiaries-other' => ['property' => 'beneficiaries_other', 'label' => 'Other Gender Beneficiary', 'input_type' => 'number'], + 'pro-rep-beneficiaries-training-women' => ['property' => 'beneficiaries_training_women', 'label' => 'Women Trained', 'input_type' => 'number'], + 'pro-rep-beneficiaries-training-men' => ['property' => 'beneficiaries_training_men', 'label' => 'Men Trained', 'input_type' => 'number'], + 'pro-rep-beneficiaries-training-other' => ['property' => 'beneficiaries_training_other', 'label' => 'Other Gender Trained', 'input_type' => 'number'], + 'pro-rep-beneficiaries-training-youth' => ['property' => 'beneficiaries_training_youth', 'label' => 'Youth Trained', 'input_type' => 'number'], + 'pro-rep-beneficiaries-training-non-youth' => ['property' => 'beneficiaries_training_non_youth', 'label' => 'Non Youth Trained', 'input_type' => 'number'], ], 'relations' => [ 'pro-rep-rel-tree-species' => [ @@ -784,6 +796,11 @@ 'site-rep-other-workdays-description' => ['property' => 'other_workdays_description', 'label' => 'Other Activities Description', 'input_type' => 'long-text'], 'site-rep-num-trees-regenerating' => ['property' => 'num_trees_regenerating', 'label' => 'Estimate Number of Trees Restored via ANR', 'input_type' => 'number'], 'site-rep-regeneration-description' => ['property' => 'regeneration_description', 'label' => 'Description of ANR Activities', 'input_type' => 'long-text'], + // New fields TM-1542 + 'site-rep-pct-survival-to-date' => ['property' => 'pct_survival_to_date', 'label' => 'Survival Rate', 'input_type' => 'number-percentage'], + 'site-rep-survival-calculation' => ['property' => 'survival_calculation', 'label' => 'Description of Survival Rate Calculation', 'input_type' => 'long-text'], + 'site-rep-survival-description' => ['property' => 'survival_description', 'label' => 'Explanation of Survival Rate', 'input_type' => 'long-text'], + 'site-rep-maintenance-activities' => ['property' => 'maintenance_activities', 'label' => 'Maintenance Activities', 'input_type' => 'long-text'], ], 'file-collections' => [ 'site-rep-col-media' => ['property' => 'media', 'label' => 'Media', 'input_type' => 'file', 'multichoice' => true], @@ -796,6 +813,13 @@ 'site-rep-col-site-submission' => ['property' => 'site_submission', 'label' => 'Site submission', 'input_type' => 'file', 'multichoice' => true], ], 'relations' => [ + 'site-rep-rel-replanting-tree-species' => [ + 'property' => 'replantingTreeSpecies', + 'label' => 'Replanting Species + Count', + 'resource' => 'App\Http\Resources\V2\TreeSpecies\TreeSpeciesResource', + 'input_type' => 'treeSpecies', + 'collection' => 'replanting', + ], 'site-rep-rel-tree-species' => [ 'property' => 'treeSpecies', 'label' => 'Tree Species', diff --git a/database/factories/V2/PolygonGeometryFactory.php b/database/factories/V2/PolygonGeometryFactory.php index bea9177a1..5c75a788c 100644 --- a/database/factories/V2/PolygonGeometryFactory.php +++ b/database/factories/V2/PolygonGeometryFactory.php @@ -16,11 +16,14 @@ public function definition() public function geojson(string|array $geojson) { - $geom = DB::raw("ST_GeomFromGeoJSON('$geojson')"); + if (is_array($geojson)) { + $geojson = json_encode($geojson); + } + $geomExpression = DB::raw("ST_GeomFromGeoJSON('$geojson')"); - return $this->state(function (array $attributes) use ($geom) { + return $this->state(function (array $attributes) use ($geomExpression) { return [ - 'geom' => $geom, + 'geom' => $geomExpression, ]; }); } diff --git a/database/factories/V2/Sites/SitePolygonFactory.php b/database/factories/V2/Sites/SitePolygonFactory.php index 1e8415ca5..13d595f0c 100644 --- a/database/factories/V2/Sites/SitePolygonFactory.php +++ b/database/factories/V2/Sites/SitePolygonFactory.php @@ -10,8 +10,24 @@ class SitePolygonFactory extends Factory { public function definition() { + $geojson = [ + 'type' => 'Polygon', + 'coordinates' => [ + [ + [0, 0], + [1, 0], + [1, 1], + [0, 1], + [0, 0], + ], + ], + ]; + return [ - 'poly_id' => PolygonGeometry::factory()->create()->uuid, + 'poly_id' => PolygonGeometry::factory() + ->geojson($geojson) + ->create() + ->uuid, 'site_id' => Site::factory()->create()->uuid, 'calc_area' => $this->faker->numberBetween(2.0, 50.0), ]; diff --git a/database/factories/V2/TreeSpecies/TreeSpeciesFactory.php b/database/factories/V2/TreeSpecies/TreeSpeciesFactory.php index 93a96649d..fb678b5f1 100644 --- a/database/factories/V2/TreeSpecies/TreeSpeciesFactory.php +++ b/database/factories/V2/TreeSpecies/TreeSpeciesFactory.php @@ -18,7 +18,6 @@ public function definition() 'speciesable_id' => Project::factory()->create(), 'name' => $this->faker->word(), 'amount' => $this->faker->numberBetween(0, 2147483647), - 'type' => 'tree', 'collection' => $this->faker->randomElement(TreeSpecies::$collections), ]; } diff --git a/database/migrations/2024_11_23_005836_create_tree_species_research.php b/database/migrations/2024_11_23_005836_create_tree_species_research.php new file mode 100644 index 000000000..237e0543d --- /dev/null +++ b/database/migrations/2024_11_23_005836_create_tree_species_research.php @@ -0,0 +1,34 @@ +string('taxon_id')->primary(); + + $table->string('scientific_name'); + $table->string('family'); + $table->string('genus'); + $table->string('specific_epithet'); + + $table->timestamps(); + $table->softDeletes(); + }); + } + + /** + * Reverse the migrations. + */ + public function down(): void + { + Schema::dropIfExists('tree_species_research'); + } +}; diff --git a/database/migrations/2024_11_25_212923_connect_v2_tree_species_research.php b/database/migrations/2024_11_25_212923_connect_v2_tree_species_research.php new file mode 100644 index 000000000..f422e63d0 --- /dev/null +++ b/database/migrations/2024_11_25_212923_connect_v2_tree_species_research.php @@ -0,0 +1,53 @@ +dropColumn('old_model'); + } + if (Schema::hasColumn('v2_tree_species', 'old_id')) { + $table->dropColumn('old_id'); + } + if (Schema::hasColumn('v2_tree_species', 'type')) { + $table->dropColumn('type'); + } + + $table->string('taxon_id')->nullable(); + $table->index('taxon_id'); + }); + + Schema::table('v2_seedings', function (Blueprint $table): void { + if (Schema::hasColumn('v2_seedings', 'old_model')) { + $table->dropColumn('old_model'); + } + if (Schema::hasColumn('v2_seedings', 'old_id')) { + $table->dropColumn('old_id'); + } + + $table->string('taxon_id')->nullable(); + $table->index('taxon_id'); + }); + } + + /** + * Reverse the migrations. + */ + public function down(): void + { + Schema::table('v2_tree_species', function (Blueprint $table): void { + $table->dropColumn('taxon_id'); + }); + Schema::table('v2_seedings', function (Blueprint $table): void { + $table->dropColumn('taxon_id'); + }); + } +}; diff --git a/database/migrations/2024_11_26_154102_add_spatial_index_to_polygon_geometry.php b/database/migrations/2024_11_26_154102_add_spatial_index_to_polygon_geometry.php new file mode 100644 index 000000000..1a2e14336 --- /dev/null +++ b/database/migrations/2024_11_26_154102_add_spatial_index_to_polygon_geometry.php @@ -0,0 +1,30 @@ +unsignedInteger('processed_content')->nullable()->after('payload'); + $table->unsignedInteger('total_content')->nullable()->after('processed_content'); + $table->string('proccess_message')->nullable()->after('total_content'); + }); + } + + /** + * Reverse the migrations. + * + * @return void + */ + public function down() + { + Schema::table('delayed_jobs', function (Blueprint $table) { + $table->dropColumn(['proccessed_content', 'total_content', 'proccess_message']); + }); + } +}; diff --git a/database/migrations/2024_11_28_200341_add_index_to_site_polygon_poly_id.php b/database/migrations/2024_11_28_200341_add_index_to_site_polygon_poly_id.php new file mode 100644 index 000000000..e072dfafc --- /dev/null +++ b/database/migrations/2024_11_28_200341_add_index_to_site_polygon_poly_id.php @@ -0,0 +1,21 @@ +index('poly_id', 'idx_site_polygon_poly_id'); + }); + } + + public function down(): void + { + Schema::table('site_polygon', function (Blueprint $table) { + $table->dropIndex('idx_site_polygon_poly_id'); + }); + } +}; diff --git a/database/migrations/2024_12_04_151101_add_entity_morph_and_created_by.php b/database/migrations/2024_12_04_151101_add_entity_morph_and_created_by.php new file mode 100644 index 000000000..af98d3991 --- /dev/null +++ b/database/migrations/2024_12_04_151101_add_entity_morph_and_created_by.php @@ -0,0 +1,31 @@ +nullableMorphs('entity'); + $table->string('created_by')->nullable(); + $table->boolean('is_cleared')->default(false); + }); + } + + /** + * Reverse the migrations. + */ + public function down(): void + { + Schema::table('delayed_jobs', function (Blueprint $table) { + $table->dropMorphs('entity'); + $table->dropColumn('created_by'); + $table->dropColumn('is_cleared'); + }); + } +}; diff --git a/database/migrations/2024_12_06_182649_add_report_fields_for_project_and_site_report.php b/database/migrations/2024_12_06_182649_add_report_fields_for_project_and_site_report.php new file mode 100644 index 000000000..26499a5e6 --- /dev/null +++ b/database/migrations/2024_12_06_182649_add_report_fields_for_project_and_site_report.php @@ -0,0 +1,61 @@ +longText('business_milestones')->nullable(); + $table->unsignedInteger('ft_other')->nullable(); + $table->unsignedInteger('pt_other')->nullable(); + $table->unsignedInteger('volunteer_other')->nullable(); + $table->unsignedInteger('beneficiaries_other')->nullable(); + $table->unsignedInteger('beneficiaries_training_women')->nullable(); + $table->unsignedInteger('beneficiaries_training_men')->nullable(); + $table->unsignedInteger('beneficiaries_training_other')->nullable(); + $table->unsignedInteger('beneficiaries_training_youth')->nullable(); + $table->unsignedInteger('beneficiaries_training_non_youth')->nullable(); + }); + + Schema::table('v2_site_reports', function (Blueprint $table) { + $table->unsignedInteger('pct_survival_to_date')->nullable(); + $table->longText('survival_calculation')->nullable(); + $table->longText('survival_description')->nullable(); + $table->longText('maintenance_activities')->nullable(); + }); + } + + /** + * Reverse the migrations. + */ + public function down(): void + { + // + Schema::table('v2_project_reports', function (Blueprint $table) { + $table->dropColumn('business_milestones'); + $table->dropColumn('ft_other'); + $table->dropColumn('pt_other'); + $table->dropColumn('volunteer_other'); + $table->dropColumn('beneficiaries_other'); + $table->dropColumn('beneficiaries_training_women'); + $table->dropColumn('beneficiaries_training_men'); + $table->dropColumn('beneficiaries_training_other'); + $table->dropColumn('beneficiaries_training_youth'); + $table->dropColumn('beneficiaries_training_non_youth'); + }); + + Schema::table('v2_site_reports', function (Blueprint $table) { + $table->dropColumn('pct_survival_to_date'); + $table->dropColumn('survival_calculation'); + $table->dropColumn('survival_description'); + $table->dropColumn('maintenance_activities'); + }); + } +}; diff --git a/database/migrations/2024_12_06_190555_modify_columns_names_in_delayed_jobs_table.php b/database/migrations/2024_12_06_190555_modify_columns_names_in_delayed_jobs_table.php new file mode 100644 index 000000000..b13e0e2e9 --- /dev/null +++ b/database/migrations/2024_12_06_190555_modify_columns_names_in_delayed_jobs_table.php @@ -0,0 +1,27 @@ +renameColumn('proccess_message', 'progress_message'); + }); + } + + /** + * Reverse the migrations. + */ + public function down(): void + { + Schema::table('delayed_jobs', function (Blueprint $table) { + $table->renameColumn('progress_message', 'proccess_message'); + }); + } +}; diff --git a/database/migrations/2024_12_09_212253_change_is_cleared_values.php b/database/migrations/2024_12_09_212253_change_is_cleared_values.php new file mode 100644 index 000000000..972664bb5 --- /dev/null +++ b/database/migrations/2024_12_09_212253_change_is_cleared_values.php @@ -0,0 +1,41 @@ +unsignedBigInteger('created_by')->nullable()->change(); + + $table->dropColumn('is_cleared'); + + $table->boolean('is_acknowledged')->default(true); + }); + } + + /** + * Reverse the migrations. + */ + public function down(): void + { + Schema::table('delayed_jobs', function (Blueprint $table) { + if (Schema::hasColumn('delayed_jobs', 'createdBy')) { + $table->string('createdBy')->nullable()->change(); + } + + if (Schema::hasColumn('delayed_jobs', 'is_acknowledged')) { + $table->dropColumn('is_acknowledged'); + } + + if (! Schema::hasColumn('delayed_jobs', 'is_cleared')) { + $table->boolean('is_cleared')->default(false); + } + }); + } +}; diff --git a/database/migrations/2024_12_10_220655_add_infraspecific_epithet_to_tree_species_research.php b/database/migrations/2024_12_10_220655_add_infraspecific_epithet_to_tree_species_research.php new file mode 100644 index 000000000..4d993dcf9 --- /dev/null +++ b/database/migrations/2024_12_10_220655_add_infraspecific_epithet_to_tree_species_research.php @@ -0,0 +1,29 @@ +string('infraspecific_epithet'); + $table->unique('scientific_name'); + }); + } + + /** + * Reverse the migrations. + */ + public function down(): void + { + Schema::table('tree_species_research', function (Blueprint $table) { + $table->dropColumn('infraspecific_epithet'); + $table->dropIndex('tree_species_research_scientific_name_unique'); + }); + } +}; diff --git a/database/migrations/2024_12_16_200032_allow_null_infraspecific_epithet.php b/database/migrations/2024_12_16_200032_allow_null_infraspecific_epithet.php new file mode 100644 index 000000000..162d74f23 --- /dev/null +++ b/database/migrations/2024_12_16_200032_allow_null_infraspecific_epithet.php @@ -0,0 +1,27 @@ +string('infraspecific_epithet')->nullable()->change(); + }); + } + + /** + * Reverse the migrations. + */ + public function down(): void + { + Schema::table('tree_species_research', function (Blueprint $table) { + $table->string('infraspecific_epithet')->nullable(false)->change(); + }); + } +}; diff --git a/database/migrations/2024_12_16_212214_allow_null_specitic_epithet.php b/database/migrations/2024_12_16_212214_allow_null_specitic_epithet.php new file mode 100644 index 000000000..e950418b3 --- /dev/null +++ b/database/migrations/2024_12_16_212214_allow_null_specitic_epithet.php @@ -0,0 +1,27 @@ +string('specific_epithet')->nullable()->change(); + }); + } + + /** + * Reverse the migrations. + */ + public function down(): void + { + Schema::table('tree_species_research', function (Blueprint $table) { + $table->string('specific_epithet')->nullable(false)->change(); + }); + } +}; diff --git a/database/migrations/2024_12_17_150134_add_name_to_jobs.php b/database/migrations/2024_12_17_150134_add_name_to_jobs.php new file mode 100644 index 000000000..edc2c5c17 --- /dev/null +++ b/database/migrations/2024_12_17_150134_add_name_to_jobs.php @@ -0,0 +1,27 @@ +string('name')->nullable()->after('id'); + }); + } + + /** + * Reverse the migrations. + */ + public function down(): void + { + Schema::table('delayed_jobs', function (Blueprint $table) { + $table->dropColumn('name'); + }); + } +}; diff --git a/database/migrations/2024_12_20_153701_add_metadata_and_remove_entity_columns.php b/database/migrations/2024_12_20_153701_add_metadata_and_remove_entity_columns.php new file mode 100644 index 000000000..22ab00098 --- /dev/null +++ b/database/migrations/2024_12_20_153701_add_metadata_and_remove_entity_columns.php @@ -0,0 +1,31 @@ +json('metadata')->nullable()->after('payload')->comment('Stores additional information for the delayed job.'); + + $table->dropColumn(['entity_id', 'entity_type']); + }); + } + + /** + * Reverse the migrations. + */ + public function down(): void + { + Schema::table('delayed_jobs', function (Blueprint $table) { + $table->dropColumn('metadata'); + $table->unsignedBigInteger('entity_id')->nullable()->after('name'); + $table->string('entity_type')->nullable()->after('entityId'); + }); + } +}; diff --git a/database/seeders/LocalizationKeysTableSeeder.php b/database/seeders/LocalizationKeysTableSeeder.php index 1676b6c68..8f1f68c3d 100644 --- a/database/seeders/LocalizationKeysTableSeeder.php +++ b/database/seeders/LocalizationKeysTableSeeder.php @@ -179,6 +179,12 @@ public function run(): void 'TerraMatch Support'); $this->createLocalizationKey('send-login-details.cta', 'Set Password'); + // polygon-operations-complete + $this->createLocalizationKey('polygon-validation.subject', 'Your TerraMatch Polygon {operation} is Complete'); + $this->createLocalizationKey('polygon-validation.title', 'YOUR POLYGON {operationUpper} IS COMPLETE'); + $this->createLocalizationKey('polygon-validation.body', 'Your {operation} for Site {siteName} completed at {completedTime} GMT.'); + $this->createLocalizationKey('polygon-validation.cta', 'OPEN SITE'); + // satellite-map-created $this->createLocalizationKey('satellite-map-created.subject', 'Remote Sensing Map Received'); $this->createLocalizationKey('satellite-map-created.title', 'Remote Sensing Map Received'); diff --git a/docker/php.Dockerfile b/docker/php.Dockerfile index 56eaa8143..46ab3fab7 100644 --- a/docker/php.Dockerfile +++ b/docker/php.Dockerfile @@ -1,7 +1,11 @@ ## PHP FROM php:8.2-apache AS php -RUN apt-get update -RUN apt-get install -y \ + +# Set GDAL version +ENV GDAL_VERSION=3.4.3 + +# Install basic dependencies +RUN apt-get update && apt-get install -y \ libxml2-dev \ libonig-dev \ libpng-dev \ @@ -10,10 +14,36 @@ RUN apt-get install -y \ libmagickwand-dev \ mariadb-client \ libzip-dev \ - gdal-bin \ - libgdal-dev \ python3.11-venv \ - exiftool + python3.11-dev \ + exiftool \ + build-essential \ + wget \ + cmake \ + sqlite3 \ + libsqlite3-dev \ + libspatialite-dev \ + libpq-dev \ + libcurl4-gnutls-dev \ + libproj-dev \ + libgeos-dev \ + && rm -rf /var/lib/apt/lists/* + +# Install GDAL 3.4.3 from source +RUN wget https://github.com/OSGeo/gdal/releases/download/v${GDAL_VERSION}/gdal-${GDAL_VERSION}.tar.gz \ + && tar xzf gdal-${GDAL_VERSION}.tar.gz \ + && cd gdal-${GDAL_VERSION} \ + && ./configure \ + && make -j$(nproc) \ + && make install \ + && ldconfig \ + && cd .. \ + && rm -rf gdal-${GDAL_VERSION} gdal-${GDAL_VERSION}.tar.gz + +# Set GDAL environment variables +ENV CPLUS_INCLUDE_PATH=/usr/include/gdal +ENV C_INCLUDE_PATH=/usr/include/gdal +ENV LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH RUN docker-php-ext-configure gd --with-freetype --with-jpeg RUN docker-php-ext-install \ @@ -40,12 +70,16 @@ RUN a2enmod rewrite COPY docker/000-default.conf /etc/apache2/sites-available/000-default.conf COPY docker/php.ini /usr/local/etc/php/php.ini -## Python -RUN python3 -m venv /opt/python -COPY resources/python/polygon-voronoi/requirements.txt /root/voronoi-requirements.txt +# Python virtual environment setup +RUN python3.11 -m venv /opt/python ENV PATH="/opt/python/bin:${PATH}" + +# Install Python dependencies in the correct order +COPY resources/python/polygon-voronoi/requirements.txt /root/voronoi-requirements.txt +RUN pip3 install --upgrade pip wheel setuptools RUN pip3 install -r /root/voronoi-requirements.txt + RUN chmod -R a+rx /opt/python USER www-data ENV PATH="/opt/python/bin:${PATH}" -USER root +USER root \ No newline at end of file diff --git a/openapi-src/V2/definitions/IndicatorPolygonsStatus.yml b/openapi-src/V2/definitions/IndicatorPolygonsStatus.yml new file mode 100644 index 000000000..47807cbc0 --- /dev/null +++ b/openapi-src/V2/definitions/IndicatorPolygonsStatus.yml @@ -0,0 +1,12 @@ + +type: object +properties: + draft: + type: integer + submitted: + type: integer + approved: + type: integer + needs-more-information: + type: integer + \ No newline at end of file diff --git a/openapi-src/V2/definitions/IndicatorPost.yml b/openapi-src/V2/definitions/IndicatorPost.yml new file mode 100644 index 000000000..630ba2b58 --- /dev/null +++ b/openapi-src/V2/definitions/IndicatorPost.yml @@ -0,0 +1,6 @@ +type: object +properties: + uuids: + type: array + items: + type: string \ No newline at end of file diff --git a/openapi-src/V2/definitions/Indicators.yml b/openapi-src/V2/definitions/Indicators.yml new file mode 100644 index 000000000..a880fc31f --- /dev/null +++ b/openapi-src/V2/definitions/Indicators.yml @@ -0,0 +1,45 @@ + +type: object +properties: + id: + type: integer + poly_name: + type: string + status: + type: string + plantstart: + type: string + format: date + site_name: + type: string + size: + type: interger + created_at: + type: string + format: date + indicator_slug: + type: string + year_of_analysis: + type: integer + value: + type: object + 2015: + type: number + 2016: + type: number + 2017: + type: number + 2018: + type: number + 2019: + type: number + 2020: + type: number + 2021: + type: number + 2022: + type: number + 2023: + type: number + 2024: + type: number \ No newline at end of file diff --git a/openapi-src/V2/definitions/_index.yml b/openapi-src/V2/definitions/_index.yml index 3e84461b9..e67305c62 100644 --- a/openapi-src/V2/definitions/_index.yml +++ b/openapi-src/V2/definitions/_index.yml @@ -396,3 +396,9 @@ UserCreateComplete: $ref: './UserCreateComplete.yml' V2AdminProjectUpdate: $ref: './V2AdminProjectUpdate.yml' +IndicatorPost: + $ref: './IndicatorPost.yml' +Indicators: + $ref: './Indicators.yml' +IndicatorPolygonsStatus: + $ref: './IndicatorPolygonsStatus.yml' diff --git a/openapi-src/V2/paths/MonitoredData/get-v2-indicators-entity-slug-export.yml b/openapi-src/V2/paths/MonitoredData/get-v2-indicators-entity-slug-export.yml new file mode 100644 index 000000000..2b956dfb5 --- /dev/null +++ b/openapi-src/V2/paths/MonitoredData/get-v2-indicators-entity-slug-export.yml @@ -0,0 +1,32 @@ +operationId: get-v2-indicators-entity-slug-export +summary: Export CSV document of indicators for a specific entity and slug +tags: + - Export + - V2 Indicators +parameters: + - in: path + name: entity + type: string + description: Filter counts and metrics by entity. + required: true + - in: path + name: uuid + type: string + description: Filter counts and metrics by entity uuid. + required: true + - in: path + name: slug + type: string + description: Filter counts and metrics by slug. + required: true +produces: + - text/plain +responses: + '200': + description: OK + schema: + type: file + '400': + description: Bad request + '500': + description: Internal server error \ No newline at end of file diff --git a/openapi-src/V2/paths/MonitoredData/get-v2-indicators-polygon-indicator-analysis.yml b/openapi-src/V2/paths/MonitoredData/get-v2-indicators-polygon-indicator-analysis.yml new file mode 100644 index 000000000..ea1a12d4c --- /dev/null +++ b/openapi-src/V2/paths/MonitoredData/get-v2-indicators-polygon-indicator-analysis.yml @@ -0,0 +1,31 @@ +operationId: get-v2-indicators-polygon-indicator-analysis +summary: get all indicators from the polygon indicator analysis +tags: + - V2 Indicators +parameters: + - in: path + name: entity + type: string + description: Filter counts and metrics by entity. + required: true + - in: path + name: uuid + type: string + description: Filter counts and metrics by entity uuid. + required: true + - in: path + name: slug + type: string + description: Filter counts and metrics by slug. + required: true +responses: + '200': + description: OK + schema: + type: array + items: + $ref: '../../definitions/_index.yml#/Indicators' + '400': + description: Bad request + '500': + description: Internal server error \ No newline at end of file diff --git a/openapi-src/V2/paths/MonitoredData/get-v2-indicators-polygon-indicator-status.yml b/openapi-src/V2/paths/MonitoredData/get-v2-indicators-polygon-indicator-status.yml new file mode 100644 index 000000000..636048775 --- /dev/null +++ b/openapi-src/V2/paths/MonitoredData/get-v2-indicators-polygon-indicator-status.yml @@ -0,0 +1,26 @@ +operationId: get-v2-indicators-polygon-indicator-status +summary: get all indicators from the polygon indicator status +tags: + - V2 Indicators +parameters: + - in: path + name: entity + type: string + description: Filter counts and metrics by entity. + required: true + - in: path + name: uuid + type: string + description: Filter counts and metrics by entity uuid. + required: true +responses: + '200': + description: OK + schema: + type: array + items: + $ref: '../../definitions/_index.yml#/IndicatorPolygonsStatus' + '400': + description: Bad request + '500': + description: Internal server error \ No newline at end of file diff --git a/openapi-src/V2/paths/MonitoredData/get-v2-indicators-polygon-indicator-verify.yml b/openapi-src/V2/paths/MonitoredData/get-v2-indicators-polygon-indicator-verify.yml new file mode 100644 index 000000000..7fab4e236 --- /dev/null +++ b/openapi-src/V2/paths/MonitoredData/get-v2-indicators-polygon-indicator-verify.yml @@ -0,0 +1,29 @@ +operationId: get-v2-indicators-polygon-indicator-verify +summary: get all indicators from the polygon indicator verify +tags: + - V2 Indicators +parameters: + - in: path + name: entity + type: string + description: Filter counts and metrics by entity. + required: true + - in: path + name: uuid + type: string + description: Filter counts and metrics by entity uuid. + required: true + - in: path + name: slug + type: string + description: Filter counts and metrics by slug. + required: true +responses: + '200': + description: OK + schema: + type: array + '400': + description: Bad request + '500': + description: Internal server error \ No newline at end of file diff --git a/openapi-src/V2/paths/MonitoredData/post-v2-indicators-polygon-indicator-analysis.yml b/openapi-src/V2/paths/MonitoredData/post-v2-indicators-polygon-indicator-analysis.yml new file mode 100644 index 000000000..c28391ca2 --- /dev/null +++ b/openapi-src/V2/paths/MonitoredData/post-v2-indicators-polygon-indicator-analysis.yml @@ -0,0 +1,26 @@ +operationId: post-v2-indicators-polygon-indicator-analysis +summary: Create a new indicator in the polygon indicator analysis +tags: + - V2 Indicators +parameters: + - in: path + name: slug + type: string + description: Optional. Filter counts and metrics by slug. + required: true + - name: body + in: body + required: true + schema: + $ref: '../../definitions/_index.yml#/IndicatorPost' +responses: + '201': + description: Created + schema: + type: object + properties: + uuids: + type: array + items: + type: string + description: A list of processed polygons diff --git a/openapi-src/V2/paths/Terrafund/post-v2-terrafund-clip-polygons-polygons.yml b/openapi-src/V2/paths/Terrafund/post-v2-terrafund-clip-polygons-polygons.yml index 09424689e..500460d55 100644 --- a/openapi-src/V2/paths/Terrafund/post-v2-terrafund-clip-polygons-polygons.yml +++ b/openapi-src/V2/paths/Terrafund/post-v2-terrafund-clip-polygons-polygons.yml @@ -12,6 +12,12 @@ parameters: items: type: string description: UUIDs of the polygon geometries to be fixed + entity_type: + type: string + description: The entity type of the polygon geometries to be fixed + entity_uuid: + type: string + description: The entity ID of the polygon geometries to be fixed responses: '200': description: A list of processed and unprocessed polygons diff --git a/openapi-src/V2/paths/Terrafund/post-v2-terrafund-validation-polygons.yml b/openapi-src/V2/paths/Terrafund/post-v2-terrafund-validation-polygons.yml index c63331051..f4582f40f 100644 --- a/openapi-src/V2/paths/Terrafund/post-v2-terrafund-validation-polygons.yml +++ b/openapi-src/V2/paths/Terrafund/post-v2-terrafund-validation-polygons.yml @@ -11,6 +11,10 @@ parameters: items: type: string description: UUIDs of the polygon geometries to be validated + entity_uuid: + type: string + entity_type: + type: string responses: '200': description: Successful response diff --git a/openapi-src/V2/paths/_index.yml b/openapi-src/V2/paths/_index.yml index fa643036d..e24924bf6 100644 --- a/openapi-src/V2/paths/_index.yml +++ b/openapi-src/V2/paths/_index.yml @@ -2821,3 +2821,18 @@ /v2/terrafund/validation/polygons: post: $ref: './Terrafund/post-v2-terrafund-validation-polygons.yml' +/v2/indicators/{entity}/{uuid}/{slug}: + get: + $ref: './MonitoredData/get-v2-indicators-polygon-indicator-analysis.yml' +/v2/indicators/{slug}: + post: + $ref: './MonitoredData/post-v2-indicators-polygon-indicator-analysis.yml' +/v2/indicators/{entity}/{uuid}: + get: + $ref: './MonitoredData/get-v2-indicators-polygon-indicator-status.yml' +/v2/indicators/{entity}/{uuid}/{slug}/verify: + get: + $ref: './MonitoredData/get-v2-indicators-polygon-indicator-verify.yml' +/v2/indicators/{entity}/{uuid}/{slug}/export: + get: + $ref: './MonitoredData/get-v2-indicators-entity-slug-export.yml' diff --git a/resources/docs/swagger-v2.yml b/resources/docs/swagger-v2.yml index 6ffff9c2a..b1773a994 100644 --- a/resources/docs/swagger-v2.yml +++ b/resources/docs/swagger-v2.yml @@ -44396,6 +44396,69 @@ definitions: properties: is_test: type: boolean + IndicatorPost: + type: object + properties: + uuids: + type: array + items: + type: string + Indicators: + type: object + properties: + '2015': + type: number + '2016': + type: number + '2017': + type: number + '2018': + type: number + '2019': + type: number + '2020': + type: number + '2021': + type: number + '2022': + type: number + '2023': + type: number + '2024': + type: number + id: + type: integer + poly_name: + type: string + status: + type: string + plantstart: + type: string + format: date + site_name: + type: string + size: + type: interger + created_at: + type: string + format: date + indicator_slug: + type: string + year_of_analysis: + type: integer + value: + type: object + IndicatorPolygonsStatus: + type: object + properties: + draft: + type: integer + submitted: + type: integer + approved: + type: integer + needs-more-information: + type: integer paths: '/v2/tree-species/{entity}/{UUID}': get: @@ -99171,6 +99234,12 @@ paths: items: type: string description: UUIDs of the polygon geometries to be fixed + entity_type: + type: string + description: The entity type of the polygon geometries to be fixed + entity_uuid: + type: string + description: The entity ID of the polygon geometries to be fixed responses: '200': description: A list of processed and unprocessed polygons @@ -99220,6 +99289,10 @@ paths: items: type: string description: UUIDs of the polygon geometries to be validated + entity_uuid: + type: string + entity_type: + type: string responses: '200': description: Successful response @@ -99229,3 +99302,214 @@ paths: message: type: string description: A message indicating the completion of validation for all site polygons. + '/v2/indicators/{entity}/{uuid}/{slug}': + get: + operationId: get-v2-indicators-polygon-indicator-analysis + summary: get all indicators from the polygon indicator analysis + tags: + - V2 Indicators + parameters: + - in: path + name: entity + type: string + description: Filter counts and metrics by entity. + required: true + - in: path + name: uuid + type: string + description: Filter counts and metrics by entity uuid. + required: true + - in: path + name: slug + type: string + description: Filter counts and metrics by slug. + required: true + responses: + '200': + description: OK + schema: + type: array + items: + type: object + properties: + '2015': + type: number + '2016': + type: number + '2017': + type: number + '2018': + type: number + '2019': + type: number + '2020': + type: number + '2021': + type: number + '2022': + type: number + '2023': + type: number + '2024': + type: number + id: + type: integer + poly_name: + type: string + status: + type: string + plantstart: + type: string + format: date + site_name: + type: string + size: + type: interger + created_at: + type: string + format: date + indicator_slug: + type: string + year_of_analysis: + type: integer + value: + type: object + '400': + description: Bad request + '500': + description: Internal server error + '/v2/indicators/{slug}': + post: + operationId: post-v2-indicators-polygon-indicator-analysis + summary: Create a new indicator in the polygon indicator analysis + tags: + - V2 Indicators + parameters: + - in: path + name: slug + type: string + description: Optional. Filter counts and metrics by slug. + required: true + - name: body + in: body + required: true + schema: + type: object + properties: + uuids: + type: array + items: + type: string + responses: + '201': + description: Created + schema: + type: object + properties: + uuids: + type: array + items: + type: string + description: A list of processed polygons + '/v2/indicators/{entity}/{uuid}': + get: + operationId: get-v2-indicators-polygon-indicator-status + summary: get all indicators from the polygon indicator status + tags: + - V2 Indicators + parameters: + - in: path + name: entity + type: string + description: Filter counts and metrics by entity. + required: true + - in: path + name: uuid + type: string + description: Filter counts and metrics by entity uuid. + required: true + responses: + '200': + description: OK + schema: + type: array + items: + type: object + properties: + draft: + type: integer + submitted: + type: integer + approved: + type: integer + needs-more-information: + type: integer + '400': + description: Bad request + '500': + description: Internal server error + '/v2/indicators/{entity}/{uuid}/{slug}/verify': + get: + operationId: get-v2-indicators-polygon-indicator-verify + summary: get all indicators from the polygon indicator verify + tags: + - V2 Indicators + parameters: + - in: path + name: entity + type: string + description: Filter counts and metrics by entity. + required: true + - in: path + name: uuid + type: string + description: Filter counts and metrics by entity uuid. + required: true + - in: path + name: slug + type: string + description: Filter counts and metrics by slug. + required: true + responses: + '200': + description: OK + schema: + type: array + '400': + description: Bad request + '500': + description: Internal server error + '/v2/indicators/{entity}/{uuid}/{slug}/export': + get: + operationId: get-v2-indicators-entity-slug-export + summary: Export CSV document of indicators for a specific entity and slug + tags: + - Export + - V2 Indicators + parameters: + - in: path + name: entity + type: string + description: Filter counts and metrics by entity. + required: true + - in: path + name: uuid + type: string + description: Filter counts and metrics by entity uuid. + required: true + - in: path + name: slug + type: string + description: Filter counts and metrics by slug. + required: true + produces: + - text/plain + responses: + '200': + description: OK + schema: + type: file + '400': + description: Bad request + '500': + description: Internal server error diff --git a/resources/python/gfw-area-adjustment/app.py b/resources/python/gfw-area-adjustment/app.py new file mode 100644 index 000000000..b586a60d5 --- /dev/null +++ b/resources/python/gfw-area-adjustment/app.py @@ -0,0 +1,101 @@ +import sys +import json +import geopandas as gpd +from shapely.geometry import shape, Polygon +import logging + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +def calculate_correction_factor(reference_geometry): + """ + Calculate the correction factor by comparing WGS84 and ESRI:54009 areas + All areas are converted to hectares (1 hectare = 10000 square meters) + """ + try: + gdf_wgs84 = gpd.GeoDataFrame(geometry=[reference_geometry], crs="EPSG:4326") + + gdf_projected = gdf_wgs84.to_crs('ESRI:54009') + area_projected_ha = gdf_projected.area[0] / 10000 + + if area_projected_ha < 0.0001: + logger.warning("Area is very small, using correction factor of 1") + return 1.0 + + area_geodesic_ha = gdf_wgs84.geometry.to_crs('+proj=cea').area[0] / 10000 + + correction_factor = area_projected_ha / area_geodesic_ha if area_geodesic_ha > 0 else 1 + + if correction_factor > 10 or correction_factor < 0.1: + logger.warning(f"Extreme correction factor detected: {correction_factor}. Using 1.0 instead.") + return 1.0 + + return correction_factor + + except Exception as e: + logger.error(f"Error calculating correction factor: {str(e)}") + raise + +def adjust_gfw_data(gfw_data, reference_geometry): + """ + Adjust GFW area values using the correction factor from reference geometry + """ + try: + if isinstance(reference_geometry, str): + reference_geometry = json.loads(reference_geometry) + + if 'type' in reference_geometry and reference_geometry['type'] == 'Feature': + geometry = shape(reference_geometry['geometry']) + elif 'type' in reference_geometry and reference_geometry['type'] == 'FeatureCollection': + geometry = shape(reference_geometry['features'][0]['geometry']) + else: + geometry = shape(reference_geometry) + + correction_factor = calculate_correction_factor(geometry) + + if isinstance(gfw_data, str): + gfw_data = json.loads(gfw_data) + + adjusted_data = { + "data": [], + "status": gfw_data.get("status", "success") + } + + for entry in gfw_data.get("data", []): + adjusted_entry = entry.copy() + if entry["area__ha"] > 0.0001: + adjusted_entry["area__ha"] = round(entry["area__ha"] * correction_factor, 5) + adjusted_data["data"].append(adjusted_entry) + + return adjusted_data + + except Exception as e: + logger.error(f"Error adjusting GFW data: {str(e)}") + raise + +def main(): + try: + if len(sys.argv) != 4: + raise ValueError("Script requires GFW data, reference geometry, and output file paths as arguments") + + gfw_data_file = sys.argv[1] + reference_geometry_file = sys.argv[2] + output_file = sys.argv[3] + + with open(gfw_data_file, 'r') as f: + gfw_data = json.load(f) + + with open(reference_geometry_file, 'r') as f: + reference_geometry = json.load(f) + + result = adjust_gfw_data(gfw_data, reference_geometry) + + with open(output_file, 'w') as f: + json.dump(result, f) + + except Exception as e: + logger.error(f"Error processing data: {str(e)}") + sys.exit(1) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/resources/python/polygon-area/app.py b/resources/python/polygon-area/app.py new file mode 100644 index 000000000..4af6e65b8 --- /dev/null +++ b/resources/python/polygon-area/app.py @@ -0,0 +1,60 @@ +import sys +import json +import geopandas as gpd +from shapely.geometry import shape +import logging + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +def calculate_area(geometry): + """ + Calculate area in hectares for a given geometry + """ + try: + gdf = gpd.GeoDataFrame(geometry=[geometry], crs="EPSG:4326") + gdf_projected = gdf.to_crs('ESRI:54009') + + area_hectares = gdf_projected.geometry.area[0] / 10000 + + return area_hectares + except Exception as e: + logger.error(f"Error calculating area: {str(e)}") + raise + +def main(): + try: + if len(sys.argv) != 3: + raise ValueError("Script requires input and output file paths as arguments") + + input_file = sys.argv[1] + output_file = sys.argv[2] + + with open(input_file, 'r') as f: + geojson_data = json.load(f) + + if 'type' in geojson_data and geojson_data['type'] == 'Feature': + geometry = shape(geojson_data['geometry']) + elif 'type' in geojson_data and geojson_data['type'] == 'FeatureCollection': + geometry = shape(geojson_data['features'][0]['geometry']) + else: + geometry = shape(geojson_data) + + area = calculate_area(geometry) + + result = { + 'area_hectares': area, + 'original_geometry': geojson_data + } + + with open(output_file, 'w') as f: + json.dump(result, f) + + print(area) + + except Exception as e: + logger.error(f"Error processing geometry: {str(e)}") + sys.exit(1) + +if __name__ == "__main__": + main() diff --git a/resources/python/polygon-indicator/app.py b/resources/python/polygon-indicator/app.py new file mode 100644 index 000000000..e0bf3b8ec --- /dev/null +++ b/resources/python/polygon-indicator/app.py @@ -0,0 +1,82 @@ +import json +import sys + +import requests +import yaml +import gfw_api as gfw +import tree_cover_indicator as ttc +from fiona.model import Geometry, Feature, Properties + + +def generate_indicator(feature, indicator_name, params, session): + if params["indicators"][indicator_name]["data_source"] == "gfw": + supported_layers = gfw.get_supported_gfw_layer() + if indicator_name not in supported_layers.keys(): + raise gfw.UnsupportedGFWLayer(indicator_name) + polygon_gfw_data = gfw.get_gfw_data( + feature.geometry, session, indicator_name, params + ) + if params["indicators"][indicator_name]["area_source"] == "gfw": + key_label = params["indicators"][indicator_name]["key_label"] + key_value = params["indicators"][indicator_name]["key_value"] + polygon_data = { + row[key_label]: row[key_value] for row in polygon_gfw_data["data"] + } + else: + key_label = params["indicators"][indicator_name]["key_label"] + polygon_data = { + row[key_label]: ttc.calculate_area(feature) + for row in polygon_gfw_data["data"] + } + elif params["indicators"][indicator_name]["data_source"] == "polygon": + polygon_data = { + feature.properties[ + params["indicators"][indicator_name]["polygon_key"] + ]: ttc.calculate_area(feature) + } + + if params["indicators"][indicator_name]["zero_fill"]: + values = {} + for year in range( + params["indicators"][indicator_name]["start_year"], + params["indicators"][indicator_name]["end_year"] + 1, + ): + values[year] = polygon_data.get(year, 0.0) + indicator_results = {indicator_name: values} + else: + indicator_results = {indicator_name: polygon_data} + return indicator_results + +def main(): + input_geojson = sys.argv[1] + output_geojson = sys.argv[2] + indicator_name = sys.argv[3] + api_key = sys.argv[4] + + with open(input_geojson, "r") as f: + geojson_data = json.load(f) + + config_path = "resources/python/polygon-indicator/config.yaml" + with open(config_path) as conf_file: + config = yaml.safe_load(conf_file) + + with requests.Session() as session: + session.headers = { + "content-type": "application/json", + "x-api-key": f"{api_key}", + } + + fiona_feature = Feature( + geometry=Geometry( + type=geojson_data["geometry"]["type"], + coordinates=geojson_data["geometry"]["coordinates"] + ), + properties=Properties(**geojson_data["properties"]) + ) + result = generate_indicator(fiona_feature, indicator_name, config, session) + + with open(output_geojson, 'w') as f: + json.dump({'area': result}, f) + +if __name__ == "__main__": + main() diff --git a/resources/python/polygon-indicator/config.yaml b/resources/python/polygon-indicator/config.yaml new file mode 100644 index 000000000..e75b39874 --- /dev/null +++ b/resources/python/polygon-indicator/config.yaml @@ -0,0 +1,164 @@ +base: + config_path: "secrets.yaml" + temp_path: "temp/" + log_level: DEBUG + data_path: "data/" + +polygons: + intervention_key: IntervType + area_key: Area_ha + +gfw_api: + base_url: https://data-api.globalforestwatch.org + +s3: + tile_bucket: tof-output + tile_year: 2020 + tile_prefix: /tiles/ + lookup_bucket: tof-output + lookup_prefix: tile-databases + lookup_filelist: + - "tile-databases/america-africa-europe-corrected.csv" + - "tile-databases/asia.csv" + +gdal: + outfile: temp/tiles/mosaic + outfile_suffix: 2020 + +indicators: + umd_tree_cover_loss: + data_source: gfw + start_year: 2018 + end_year: 2024 + query_url: /dataset/umd_tree_cover_loss/latest/query + sql: "SELECT umd_tree_cover_loss__year, SUM(area__ha) FROM results GROUP BY umd_tree_cover_loss__year" + key_label: umd_tree_cover_loss__year + key_value: area__ha + area_source: gfw + zero_fill: True + umd_tree_cover_loss_from_fires: + data_source: gfw + start_year: 2018 + end_year: 2024 + query_url: /dataset/umd_tree_cover_loss_from_fires/latest/query + sql: "SELECT umd_tree_cover_loss_from_fires__year, SUM(area__ha) FROM results GROUP BY umd_tree_cover_loss_from_fires__year" + key_label: umd_tree_cover_loss_from_fires__year + key_value: area__ha + area_source: gfw + zero_fill: True + wwf_terrestrial_ecoregions: + data_source: gfw + query_url: /dataset/wwf_terrestrial_ecoregions/latest/query + sql: "SELECT eco_name FROM results" + key_label: eco_name + area_source: polygon + zero_fill: False + wri_tropical_tree_cover: + data_source: tiles + data_year: 2020 + query_url: /dataset/wri_tropical_tree_cover/latest/query + sql: "SELECT AVG(wri_tropical_tree_cover__percent) FROM results" + key_label: wri_tropical_tree_cover__percent + download_tile_lookup: False + small_sites_area_thresh: 0.5 + shift_value: 0.0001081081 + lulc: + input_path: C3S-LC-L4-LCCS-Map-300m-P1Y-2020-v2.1.1.tif + temp_output_prefix: temp-lulc- + target_crs: 4326 + default_initial_crs: "EPSG:4326" + reproj_crs: 3857 + buffer_size: 500 + gdf_project_key: "Project" + zonal_stats: "count majority" + small_sites_error: + zeroToNine: + min: 0.0 + max: 10.0 + error: 3.6386 + tenToThirtyNine: + min: 10.0 + max: 40.0 + error: 16.68 + fortyTo1Hundred: + min: 40.0 + max: 100.0 + error: 23.468 + land_cover_data: + source: geotiff + query_url: /dataset/esa_land_cover_2015/latest/query + sql: SELECT esa_land_cover_2015__class, esa_land_cover_2015__uint16, Count(*) FROM results GROUP BY esa_land_cover_2015__uint16, esa_land_cover_2015__class ORDER BY count DESC LIMIT 1 + key_label: count + admin_boundary_data: + source: gfw + query_url: /dataset/wri_tropical_tree_cover/latest/query + sql: SELECT gadm_administrative_boundaries__adm0, Count(*) FROM results GROUP BY gadm_administrative_boundaries__adm0 ORDER BY count DESC LIMIT 1 + key_label: gadm_administrative_boundaries__adm0 + lulc_ci_data: + source: csv + path: ttc_supporting_data/lulc_ci.csv + subregion_ci_data: + source: geojson + path: ttc_supporting_data/subregions_conf.geojson + esa_lulc_conversions: + urban: + - 190 + grassland: + - 130 + cropland: + - 10 + - 11 + - 12 + - 20 + agriculture: + - 10 + - 11 + - 12 + - 20 + forest: + - 50 + - 60 + - 61 + - 62 + - 70 + - 71 + - 80 + - 81 + - 82 + - 90 + - 160 + - 170 + mosaic vegetation: + - 30 + - 40 + - 100 + - 110 + mosaic: + - 30 + - 40 + - 100 + - 110 + shrub/scrub/otherland: + - 120 + - 121 + - 122 + - 140 + - 150 + - 151 + - 152 + - 153 + - 200 + - 201 + - 202 + - 0 + - 220 + target_system: + data_source: polygon + area_source: polygon + polygon_key: target_sys + zero_fill: False + restoration_practice: + data_source: polygon + area_source: polygon + polygon_key: practice + zero_fill: False diff --git a/resources/python/polygon-indicator/gfw_api.py b/resources/python/polygon-indicator/gfw_api.py new file mode 100644 index 000000000..7b0ac8a38 --- /dev/null +++ b/resources/python/polygon-indicator/gfw_api.py @@ -0,0 +1,26 @@ +import shapely + +def get_gfw_data(polygon, session, indicator_type, params): + url = f'{params["gfw_api"]["base_url"]}{params["indicators"][indicator_type]["query_url"]}' + sql = params["indicators"][indicator_type]["sql"] + payload = {"sql": sql, "geometry": shapely.geometry.mapping(polygon)} + with session.post(url, json=payload) as response: + if not response.ok: + raise RuntimeError(f"{response.status_code}") + response_data = response.json() + return response_data + + +class UnsupportedGFWLayer(Exception): + def __init__(self, estimator_name): + self.msg = f"Unsupported GFW layer {estimator_name}" + super().__init__(self.msg) + + +def get_supported_gfw_layer(): + return { + "umd_tree_cover_loss": "umd_tree_cover_loss", + "umd_tree_cover_loss_from_fires": "umd_tree_cover_loss_from_fires", + "wwf_terrestrial_ecoregions": "wwf_terrestrial_ecoregions", + "wri_tropical_tree_cover": "wri_tropical_tree_cover", + } diff --git a/resources/python/polygon-indicator/requirements.txt b/resources/python/polygon-indicator/requirements.txt new file mode 100755 index 000000000..342e081ff --- /dev/null +++ b/resources/python/polygon-indicator/requirements.txt @@ -0,0 +1,11 @@ +shapely==2.0.6 +geopandas==1.0.1 +pandas==2.1.3 +requests==2.32.3 +fiona==1.10.1 +exactextract==0.2.0 +rasterio==1.4.1 +gdal==3.4.1 +pyyaml==6.0.2 +rasterstats==0.20.0 +boto3==1.35.43 \ No newline at end of file diff --git a/resources/python/polygon-indicator/tree_cover_indicator.py b/resources/python/polygon-indicator/tree_cover_indicator.py new file mode 100644 index 000000000..7ec3b08f6 --- /dev/null +++ b/resources/python/polygon-indicator/tree_cover_indicator.py @@ -0,0 +1,162 @@ +import shapely +import pandas as pd +# from rasterstats import zonal_stats +import geopandas as gpd +import os +from shapely.geometry import Polygon, shape, box +from shapely.ops import transform +from exactextract import exact_extract +import rasterio +from pathlib import Path + +import ttc_s3_utils as s3_utils +import ttc_error_utils as error + + +def get_gfw_data(geometry, session, dataset, params): + url = f'{params["gfw_api"]["base_url"]}{params["indicators"]["wri_tropical_tree_cover"][dataset]["query_url"]}' + sql = params["indicators"]["wri_tropical_tree_cover"][dataset]["sql"] + payload = {"sql": sql, "geometry": shapely.geometry.mapping(geometry)} + response = session.post(url, json=payload) + response.raise_for_status() + return response.json() + + +def calculate_area(feature): + geometry = shape(feature["geometry"]) + gdf = gpd.GeoDataFrame(geometry=[geometry], crs="EPSG:4326") + gdf_projected = gdf.to_crs('ESRI:54009') + + area_ha = gdf_projected.geometry.area[0] / 10000 + return area_ha + + +def calculate_tree_cover(feature, project_name, params, logger): + try: + logger.debug("Calculating area...") + area_ha = calculate_area(feature) + logger.debug(f"Area calculated successfully: {area_ha}") + mosaic_file = f"{str(params['gdal']['outfile'])}-{project_name}-{str(params['gdal']['outfile_suffix'])}.tif" + temp_path = Path("temp.geojson") + temp_path.write_text(shapely.to_geojson(shape(feature["geometry"]))) + with rasterio.open(mosaic_file) as mosaic: + # Use exactextract to compute the mean + result = exact_extract( + mosaic, + temp_path, + "mean(min_coverage_frac=0.05, coverage_weight=fraction)", + ) + expected_ttc = result[0]["properties"]["mean"] + + logger.debug(f"Expected tree cover calculated successfully: {expected_ttc}") + + logger.debug("Calculating shift error...") + shift_error = error.calculate_shift_error( + feature, expected_ttc, project_name, params, logger + ) + logger.debug(f"Shift error calculated successfully: {shift_error}") + + logger.debug("Calculating LULC error...") + + lulc_lower_error, lulc_upper_error = error.calculate_lulc_error( + feature, project_name, expected_ttc, params, logger + ) + if lulc_lower_error == float("inf") or lulc_lower_error == float("-inf"): + lulc_lower_error = 0 + if lulc_upper_error == float("inf") or lulc_upper_error == float("-inf"): + lulc_upper_error = 0 + + logger.debug( + f"LULC error calculated successfully: {lulc_lower_error}, {lulc_upper_error}" + ) + + logger.debug("Calculating subregion error...") + subregion, subregion_lower_error, subregion_upper_error = ( + error.calculate_subregion_error(feature, expected_ttc, params, logger) + ) + + logger.debug( + f"Subregion error calculated successfully: {subregion}, {subregion_lower_error}, {subregion_upper_error}" + ) + + logger.debug("Calculating small site error...") + small_site_error = error.get_small_site_error_value( + area_ha, expected_ttc, params, logger + ) + logger.debug(f"Small site error: {small_site_error}") + logger.debug(f"Small site error calculated successfully: {small_site_error}") + + logger.debug("Integrating errors...") + plus, minus, plus_minus_average = error.combine_errors( + expected_ttc, + shift_error, + small_site_error, + lulc_lower_error, + lulc_upper_error, + subregion_lower_error, + subregion_upper_error, + ) + + tree_cover_result = { + "TTC": expected_ttc, + "error_plus": plus, + "error_minus": minus, + "plus_minus_average": plus_minus_average, + "small_site_error": small_site_error, + "lulc_lower_error": lulc_lower_error, + "lulc_upper_error": lulc_upper_error, + "shift_error": shift_error, + "subregion_lower_error": subregion_lower_error, + "subregion_upper_error": subregion_upper_error, + 'area_HA': area_ha + } + + logger.debug(f"Tree cover result calculated successfully: {tree_cover_result}") + return tree_cover_result + except Exception as e: + logger.error(f"Failed to calculate tree cover result: {e}", exc_info=True) + return None + + +def process_features_by_project(project_gdf, project_name, logger, params): + logger.info(f"Checking for TTC mosaic for {project_name}") + mosaic_file = f"{str(params['gdal']['outfile'])}-{project_name}-{str(params['gdal']['outfile_suffix'])}.tif" + if os.path.exists(mosaic_file): + logger.debug("TTC mosaic file found") + else: + global_lookup = s3_utils.build_tile_lookup(params) + logger.debug("Global tile lookup generated") + tile_file_list = s3_utils.download_tiles( + project_gdf, global_lookup, "project", params + ) + logger.debug("Tiles downloaded") + s3_utils.make_mosaic(tile_file_list, project_name, params) + logger.debug("Tile mosaic generated") + logger.debug(f"Mosaic file at: {mosaic_file}") + error.prep_lulc_data(project_gdf, project_name, logger, params) + poly_list = project_gdf["poly_name"].unique() + logger.info( + f"Calculating tre cover for {len(poly_list)} polygons in {project_name}" + ) + project_poly_list = [] + poly_count = 0 + for poly in poly_list: + poly_data = project_gdf[project_gdf["poly_name"] == poly] + ttc_result = calculate_tree_cover( + poly_data.iloc[0], project_name, params, logger + ) + poly_count += 1 + logger.info(f"TTC result: {str(ttc_result)} for polygon {poly_count}") + poly_name = poly_data["poly_name"] + logger.debug(f"poly_name: {poly_name}") + if ttc_result is None: + ttc_result = {} + ttc_result["poly_name"] = poly_data["poly_name"] + ttc_result["Project"] = poly_data["Project"] + project_poly_list.append(pd.DataFrame.from_dict(ttc_result)) + all_poly_df = pd.DataFrame(pd.concat(project_poly_list, ignore_index=True)) + data_path = str(params["base"]["data_path"]) + all_poly_df.to_csv( + f"{data_path}ttc_output/ttc_from_tiles_{project_name}.csv", index=False + ) + logger.info(f"Tree cover data calculated for {project_name}") diff --git a/resources/python/polygon-indicator/ttc_error_utils.py b/resources/python/polygon-indicator/ttc_error_utils.py new file mode 100644 index 000000000..eeaa55dfe --- /dev/null +++ b/resources/python/polygon-indicator/ttc_error_utils.py @@ -0,0 +1,332 @@ +import os +import shapely +import fiona +import rasterio +from rasterstats import zonal_stats +import pandas as pd +from osgeo import gdal +from shapely.geometry import Polygon, shape, box + +# from shapely import concave_hull +from shapely.ops import transform, unary_union + + +# general +def calculate_percent_error(obs, exp): + if exp != 0: + return (obs - exp) / exp + else: + return 0 + + +def force_2d(geometry): + return transform(lambda x, y, z=None: (x, y), geometry) + + +def combine_errors( + expected_ttc, + shift_error, + small_site_error, + lulc_lower_error, + lulc_upper_error, + subregion_lower_error, + subregion_upper_error, +): + shift_error_half = shift_error / 2 + small_site_error_half = small_site_error / 2 + lower_error = ( + lulc_lower_error**2 + + subregion_lower_error**2 + + shift_error_half**2 + + small_site_error_half**2 + ) ** 0.5 + upper_error = ( + lulc_upper_error**2 + + subregion_upper_error**2 + + shift_error_half**2 + + small_site_error_half**2 + ) ** 0.5 + minus = expected_ttc * lower_error + plus = expected_ttc * upper_error + plus_minus_average = (minus + plus) / 2 + return plus, minus, plus_minus_average + + +# shift error +def shift_geometry(feature, NS, EW, params): + offset = params["indicators"]["wri_tropical_tree_cover"]["shift_value"] + # Convert feature to 2D + geometry = shape(feature["geometry"]) + geometry_2d = force_2d(geometry) + + if geometry_2d.geom_type == "Polygon": + coords = list(geometry_2d.exterior.coords) + elif geometry_2d.geom_type == "MultiPolygon": + coords = list( + ( + unary_union( + ( + gpd.GeoDataFrame( + {"geometry": geometry_2d, "test": [1]} + ).explode(ignore_index=True) + )["geometry"] + ) + ).geometry.exterior.coords + ) + + # Shift coordinates + new_coords = [] + for i, (lat, lon) in enumerate(coords): + if NS == "N": + lat = lat + offset + if NS == "S": + lat = lat - offset + if EW == "E": + lon = lon + offset + if EW == "W": + lon = lon - offset + new_coords.append((lat, lon)) + new_polygon = Polygon(new_coords) + return new_polygon + + +def calculate_shift_error(feature, expected_ttc, project_name, params, logger): + shift_list = [ + ["N", 0], + ["S", 0], + [0, "E"], + [0, "W"], + ["N", "E"], + ["N", "W"], + ["S", "E"], + ["S", "W"], + ] + shift_ttc = [] + mosaic_file = f"{str(params['gdal']['outfile'])}-{project_name}-{str(params['gdal']['outfile_suffix'])}.tif" + for i in shift_list: + shift_feature = shift_geometry(feature, i[0], i[1], params) + logger.debug(f"shift calculated for {str(i)}") + if params["indicators"]["wri_tropical_tree_cover"]["data_source"] == "tiles": + with rasterio.open(mosaic_file) as src: + affine = src.transform + array = src.read(1) + shift_data = zonal_stats( + shift_feature, array, affine=affine, stats="mean", all_touched=True + )[0]["mean"] + logger.debug(f"Shift TTC: {shift_data}") + shift_ttc.append(shift_data) + shift_error = [calculate_percent_error(i, expected_ttc) for i in shift_ttc] + sq_shift_error = [i**2 for i in shift_error] + all_shift_error = (sum(sq_shift_error) / 8) ** 0.5 + return all_shift_error + + +# LULC error +def find_lulc_label(lulc_int, conversion_dict): + for key, value in conversion_dict.items(): + if lulc_int in value: + return key + return None + + +def find_lulc_error_data(lulc_label, lulc_error_table): + lulc_error_table["category"] = lulc_error_table["category"].str.lower() + # print("Lulc error table category:", lulc_error_table["category"]) + return lulc_error_table[lulc_error_table["category"] == lulc_label] + + +def prep_lulc_data(features_gdf, project_name, logger, params): + target_crs = params["indicators"]["wri_tropical_tree_cover"]["lulc"]["target_crs"] + initial_crs = features_gdf.crs.srs + if initial_crs is None: + initial_crs = params["indicators"]["wri_tropical_tree_cover"]["lulc"][ + "default_initial_crs" + ] + features_gdf.set_crs(initial_crs, inplace=True) + reproj = features_gdf.to_crs( + crs=params["indicators"]["wri_tropical_tree_cover"]["lulc"]["reproj_crs"] + ) + buffer = reproj.buffer( + params["indicators"]["wri_tropical_tree_cover"]["lulc"]["buffer_size"], + cap_style=3, + ) + buffer = buffer.to_crs(crs=target_crs) + xmin, ymin, xmax, ymax = buffer.total_bounds + logger.debug(f"xmin: {xmin}, ymin: {ymin}, xmax: {xmax}, ymax: {ymax}") + + data_path = params["base"]["data_path"] + temp_path = params["base"]["temp_path"] + lulc_global_name = params["indicators"]["wri_tropical_tree_cover"]["lulc"][ + "input_path" + ] + lulc_prefix = params["indicators"]["wri_tropical_tree_cover"]["lulc"][ + "temp_output_prefix" + ] + + global_lulc_file = f"{data_path}{lulc_global_name}" + + ds = gdal.Open(global_lulc_file) + output_file = f"{temp_path}{lulc_prefix}{project_name}.tif" + + translateoptions = gdal.TranslateOptions(projWin=[xmin, ymax, xmax, ymin]) + ds = gdal.Translate(output_file, ds, options=translateoptions) + logger.debug(f"Temp LULC file for {project_name} generated at {output_file}") + + warpoptions = gdal.WarpOptions(warpOptions=["CENTER_LONG 0"]) + ds = gdal.Warp(output_file, output_file, options=warpoptions) + + +def get_lulc_by_polygon(feature, project_name, logger, params): + geometry = shape(feature["geometry"]) + temp_path = params["base"]["temp_path"] + lulc_prefix = params["indicators"]["wri_tropical_tree_cover"]["lulc"][ + "temp_output_prefix" + ] + input_file = f"{temp_path}{lulc_prefix}{project_name}.tif" + if os.path.exists(input_file): + lulc = zonal_stats( + geometry, + input_file, + all_touched=True, + stats=params["indicators"]["wri_tropical_tree_cover"]["lulc"][ + "zonal_stats" + ], + nodata=255, + ) + logger.debug( + f"Zonal stats count: {lulc[0]['count']}, zonal stats majority: {lulc[0]['majority']}" + ) + return lulc[0]["count"], lulc[0]["majority"] + else: + raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), input_file) + + +def calculate_lulc_error(feature, project_name, expected_ttc, params, logger): + logger.debug(f"Calculating LULC data for {project_name}") + logger.debug(f"expected_ttc: {expected_ttc}") + if expected_ttc == 0: + lulc_lower_error = 0 + lulc_upper_error = 0 + return float(lulc_lower_error), float(lulc_upper_error) + else: + lulc_count, lulc_majority = get_lulc_by_polygon( + feature, project_name, logger, params + ) + if lulc_count > 0: + + logger.debug("getting lulc error table") + lulc_error_table = pd.read_csv( + params["indicators"]["wri_tropical_tree_cover"]["lulc_ci_data"]["path"] + ) + lulc_conversion_dict = params["indicators"]["wri_tropical_tree_cover"][ + "esa_lulc_conversions" + ] + lulc_int = int(lulc_majority) + logger.debug(f"lulc_int: {lulc_int}") + lulc_label = find_lulc_label(lulc_int, lulc_conversion_dict) + logger.debug(f"lulc_label: {lulc_label}") + lulc_error_table = find_lulc_error_data(lulc_label, lulc_error_table) + + upper_error = ( + lulc_error_table["r_upper_95"] - lulc_error_table["p_lower_95"] + ) + lower_error = ( + lulc_error_table["p_upper_95"] - lulc_error_table["r_lower_95"] + ) + logger.debug(f"lulc_upper_error: {upper_error}") + logger.debug(f"lulc_lower_error: {lower_error}") + + observed_lower_lulc = expected_ttc + lower_error + observed_upper_lulc = expected_ttc + upper_error + lulc_lower_error = (observed_lower_lulc - expected_ttc) / expected_ttc + lulc_upper_error = (observed_upper_lulc - expected_ttc) / expected_ttc + + logger.debug(f"lulc_lower_error: {lulc_lower_error}") + logger.debug(f"lulc_upper_error: {lulc_upper_error}") + + return float(lulc_lower_error), float(lulc_upper_error) + else: + logger.error("Missing LULC data") + + +# subregion error +def calculate_subregion_error(feature, expected_ttc, params, logger): + with fiona.open( + params["indicators"]["wri_tropical_tree_cover"]["subregion_ci_data"]["path"], + "r", + ) as shpin: + subregion_features = list(shpin) + subregion_polys = [shape(poly["geometry"]) for poly in subregion_features] + centroid = shape(feature["geometry"]).centroid + intersect_list = [ + feat + for feat, poly in zip(subregion_features, subregion_polys) + if poly.intersects(centroid) + ] + logger.debug(f"Intersection list length: {len(intersect_list)}") + + if intersect_list: + intersect_feature = intersect_list[0] + category = intersect_feature["properties"]["category"] + p_lower_95 = intersect_feature["properties"]["p_lower_95"] + r_lower_95 = intersect_feature["properties"]["r_lower_95"] + p_upper_95 = intersect_feature["properties"]["p_upper_95"] + r_upper_95 = intersect_feature["properties"]["r_upper_95"] + upper_error = r_upper_95 - p_lower_95 + lower_error = p_upper_95 - r_lower_95 + observed_lower_subregion = expected_ttc + lower_error + observed_upper_subregion = expected_ttc + upper_error + subregion_lower_error = calculate_percent_error( + observed_lower_subregion, expected_ttc + ) + subregion_upper_error = calculate_percent_error( + observed_upper_subregion, expected_ttc + ) + return category, subregion_lower_error, subregion_upper_error + else: + logger.debug("No subregion intersection found") + return None, 0, 0 + + +# small site error +def get_small_site_error_value(area, expected_ttc, params, logger): + small_sites_error = params["indicators"]["wri_tropical_tree_cover"][ + "small_sites_error" + ] + if ( + area + <= params["indicators"]["wri_tropical_tree_cover"]["small_sites_area_thresh"] + ): + logger.debug( + f'Polygon area of {area}ha is below threshold of {params["indicators"]["wri_tropical_tree_cover"]["small_sites_area_thresh"]}' + ) + logger.debug(f"Expected TTC: {expected_ttc}") + if ( + float(small_sites_error["zeroToNine"]["min"]) + <= expected_ttc + <= float(small_sites_error["zeroToNine"]["max"]) + ): + logger.debug(f"Small sites error is 0 - 9") + return float(small_sites_error["zeroToNine"]["error"]) / expected_ttc + + elif ( + float(small_sites_error["tenToThirtyNine"]["min"]) + < expected_ttc + <= float(small_sites_error["tenToThirtyNine"]["max"]) + ): + logger.debug(f"Small sites error is 10 - 39") + return float(small_sites_error["tenToThirtyNine"]["error"]) / expected_ttc + elif ( + float(small_sites_error["fortyTo1Hundred"]["min"]) + < expected_ttc + <= float(small_sites_error["fortyTo1Hundred"]["max"]) + ): + logger.debug(f"Small sites error is 40 - 100") + return float(small_sites_error["fortyTo1Hundred"]["error"]) / expected_ttc + else: + logger.debug("Small sites error not found") + else: + logger.debug( + f'Polygon area of {area}ha is above threshold of {params["indicators"]["wri_tropical_tree_cover"]["small_sites_area_thresh"]}' + ) + return 0.0 diff --git a/resources/python/polygon-indicator/ttc_s3_utils.py b/resources/python/polygon-indicator/ttc_s3_utils.py new file mode 100644 index 000000000..0624af65f --- /dev/null +++ b/resources/python/polygon-indicator/ttc_s3_utils.py @@ -0,0 +1,108 @@ +import pandas as pd +import os +from shapely.geometry import shape +from boto3 import client +from botocore.exceptions import ClientError +from osgeo import gdal + +pd.reset_option("mode.chained_assignment") + + +def build_tile_lookup(params): + bucket_name = params["s3"]["lookup_bucket"] + lookup_file_list = params["s3"]["lookup_filelist"] + outpath = params["base"]["temp_path"] + if params["indicators"]["wri_tropical_tree_cover"]["download_tile_lookup"]: + conn = client("s3") + for file in lookup_file_list: + conn.download_file( + Bucket=bucket_name, + Key=file, + Filename=(outpath + os.path.basename(file)), + ) + df_list = [] + for file in lookup_file_list: + filename = outpath + os.path.basename(file) + df = pd.read_csv(filename, index_col=None, header=0) + df_list.append(df) + global_lookup = pd.concat(df_list, axis=0, ignore_index=True) + return global_lookup + + +def pre_filter_tiles_feature(feature, global_lookup): + centroid = shape(feature.geometry).centroid + poly_x = centroid.x + poly_y = centroid.y + pre_filter = global_lookup[ + (abs(global_lookup["X"] - poly_x) < 0.1) + & (abs(global_lookup["Y"] - poly_y) < 0.1) + ] + pre_filter["X_tile"] = pd.to_numeric(pre_filter["X_tile"], downcast="integer") + pre_filter["Y_tile"] = pd.to_numeric(pre_filter["Y_tile"], downcast="integer") + return pre_filter + + +def pre_filter_tiles_project(project_gdf, global_lookup): + bounds = project_gdf.total_bounds + pre_filter = global_lookup[ + (global_lookup["X"] > (bounds[0] - 0.05)) + & (global_lookup["X"] < (bounds[2] + 0.05)) + & (global_lookup["Y"] > (bounds[1] - 0.05)) + & (global_lookup["Y"] < (bounds[3] + 0.05)) + ] + pre_filter["X_tile"] = pd.to_numeric(pre_filter["X_tile"], downcast="integer") + pre_filter["Y_tile"] = pd.to_numeric(pre_filter["Y_tile"], downcast="integer") + return pre_filter + + +def build_bucket_path(x_tile, y_tile, config): + filename = f"{config['indicators']['wri_tropical_tree_cover']['data_year']}/tiles/{x_tile}/{y_tile}/{x_tile}X{y_tile}Y_FINAL.tif" + return filename + + +def download_tiles(feature, global_lookup, type, params): + conn = client("s3") + if type == "polygon": + pre_filtered_lookup = pre_filter_tiles_feature(feature, global_lookup) + elif type == "project": + pre_filtered_lookup = pre_filter_tiles_project(feature, global_lookup) + tile_file_list = list( + pre_filtered_lookup.apply( + lambda row: build_bucket_path(row["X_tile"], row["Y_tile"], params), axis=1 + ) + ) + bucket_name = params["s3"]["tile_bucket"] + outpath = params["base"]["temp_path"] + directory = f"{outpath}tiles/" + if not os.path.exists(directory): + os.makedirs(directory) + tile_list = [] + for file in tile_file_list: + try: + conn.download_file( + Bucket=bucket_name, + Key=file, + Filename=(directory + os.path.basename(file)), + ) + tile_list.append(directory + os.path.basename(file)) + except ClientError as e: + print(e) + return tile_list + + +def make_mosaic(file_list, project_name, params): + gdal.BuildVRT( + f"{str(params['gdal']['outfile'])}.vrt", + file_list, + options=gdal.BuildVRTOptions(srcNodata=255, VRTNodata=255), + ) + ds = gdal.Open(f"{str(params['gdal']['outfile'])}.vrt") + translateoptions = gdal.TranslateOptions( + gdal.ParseCommandLine("-ot Byte -co COMPRESS=LZW -a_nodata 255 -co BIGTIFF=YES") + ) + ds = gdal.Translate( + f"{str(params['gdal']['outfile'])}-{project_name}-{str(params['gdal']['outfile_suffix'])}.tif", + ds, + options=translateoptions, + ) + os.remove(f"{str(params['gdal']['outfile'])}.vrt") diff --git a/resources/python/polygon-voronoi/requirements.txt b/resources/python/polygon-voronoi/requirements.txt index 0d1ba9bb3..5885dc848 100755 --- a/resources/python/polygon-voronoi/requirements.txt +++ b/resources/python/polygon-voronoi/requirements.txt @@ -1,3 +1,12 @@ pyproj==3.4.1 numpy==1.26.4 shapely==2.0.1 +fiona==1.10.1 +pandas==2.1.3 +geopandas==1.0.1 +rasterio==1.4.1 +exactextract==0.2.0 +rasterstats==0.20.0 +pyyaml==6.0.2 +requests==2.32.3 +boto3==1.35.43 \ No newline at end of file diff --git a/routes/api_v2.php b/routes/api_v2.php index 1df197c28..199aae7b1 100644 --- a/routes/api_v2.php +++ b/routes/api_v2.php @@ -105,6 +105,11 @@ use App\Http\Controllers\V2\LeadershipTeam\StoreLeadershipTeamController; use App\Http\Controllers\V2\LeadershipTeam\UpdateLeadershipTeamController; use App\Http\Controllers\V2\MediaController; +use App\Http\Controllers\V2\MonitoredData\GetIndicatorPolygonStatusController; +use App\Http\Controllers\V2\MonitoredData\GetPolygonsIndicatorAnalysisController; +use App\Http\Controllers\V2\MonitoredData\GetPolygonsIndicatorAnalysisVerifyController; +use App\Http\Controllers\V2\MonitoredData\IndicatorEntitySlugExportController; +use App\Http\Controllers\V2\MonitoredData\RunIndicatorAnalysisController; use App\Http\Controllers\V2\Nurseries\AdminIndexNurseriesController; use App\Http\Controllers\V2\Nurseries\AdminNurseriesMultiController; use App\Http\Controllers\V2\Nurseries\CreateNurseryWithFormController; @@ -759,6 +764,16 @@ function () { Route::get('/indicator/hectares-restoration', GetHectaresRestoredController::class); }); +Route::prefix('indicators')->group(function () { + Route::post('/{slug}', RunIndicatorAnalysisController::class); + ModelInterfaceBindingMiddleware::with(EntityModel::class, function () { + Route::get('/{entity}/{slug}', GetPolygonsIndicatorAnalysisController::class); + Route::get('/{entity}/{slug}/verify', GetPolygonsIndicatorAnalysisVerifyController::class); + Route::get('/{entity}/{slug}/export', IndicatorEntitySlugExportController::class); + Route::get('/{entity}', GetIndicatorPolygonStatusController::class); + }); +}); + Route::prefix('project-pipeline')->group(function () { Route::get('/', GetProjectPipelineController::class); Route::get('/{id}', GetProjectPipelineController::class);