Skip to content

Commit

Permalink
Merge pull request #184 from geosolutions-it/issue-182-release-1.1.0
Browse files Browse the repository at this point in the history
Issue 182 release 1.1.0
  • Loading branch information
chpicone authored Dec 16, 2020
2 parents a296b55 + c7fedf9 commit bd13dae
Show file tree
Hide file tree
Showing 30 changed files with 906 additions and 161 deletions.
2 changes: 2 additions & 0 deletions app/context_processors.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import ast
import os


def export_vars(request):
data = dict()
data['URL_PATH_PREFIX'] = os.getenv("URL_PATH_PREFIX", "")
data['FREEZE_FEATURE_TOGGLE'] = ast.literal_eval(os.getenv("FREEZE_FEATURE_TOGGLE", "False"))
return data
26 changes: 26 additions & 0 deletions app/scheduler/migrations/0014_freezelayer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# Generated by Django 3.1.2 on 2020-12-09 12:19

import datetime
from django.db import migrations, models
import django.db.models.deletion


class Migration(migrations.Migration):

dependencies = [
('scheduler', '0013_importedlayer_status'),
]

operations = [
migrations.CreateModel(
name='FreezeLayer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('freeze_start_timestamp', models.DateTimeField(default=datetime.datetime.now)),
('freeze_end_timestamp', models.DateTimeField(null=True)),
('layer_name', models.CharField(max_length=250)),
('status', models.CharField(default='QUEUED', max_length=20)),
('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='scheduler.task')),
],
),
]
23 changes: 23 additions & 0 deletions app/scheduler/migrations/0015_freeze.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# Generated by Django 3.1.2 on 2020-12-10 09:06

from django.db import migrations, models
import django.db.models.deletion


class Migration(migrations.Migration):

dependencies = [
('scheduler', '0014_freezelayer'),
]

operations = [
migrations.CreateModel(
name='Freeze',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ref_year', models.IntegerField()),
('notes', models.TextField(blank=True, null=True)),
('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='scheduler.task')),
],
),
]
26 changes: 26 additions & 0 deletions app/scheduler/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,32 @@ def to_dict(self):
}


class Freeze(models.Model):
ref_year = models.IntegerField(blank=False, null=False)
notes = models.TextField(blank=True, null=True)
task = models.ForeignKey(Task, on_delete=models.CASCADE)

def __str__(self):
return str(self.ref_year)


class FreezeLayer(models.Model):
task = models.ForeignKey(Task, on_delete=models.CASCADE)
freeze_start_timestamp = models.DateTimeField(default=datetime.datetime.now)
freeze_end_timestamp = models.DateTimeField(null=True)
layer_name = models.CharField(max_length=250, null=False)
status = models.CharField(max_length=20, null=False, default=TaskStatus.QUEUED)

def to_dict(self):
return {
"task": str(self.task.uuid),
"freeze_start_timestamp": str(self.freeze_start_timestamp),
"freeze_end_timestamp": str(self.freeze_end_timestamp),
"layer_name": self.layer_name,
"status": self.status
}


class AllDomains(models.Model):
"""
"all_domains" table model used for a quick *.csv data loading
Expand Down
14 changes: 13 additions & 1 deletion app/scheduler/serializers.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from app.scheduler.models import Task, ImportedLayer
from app.scheduler.models import Task, ImportedLayer, FreezeLayer
from rest_framework import serializers


Expand All @@ -8,12 +8,24 @@ class Meta:
fields = [u'id', u'uuid', u'status', u'style_class', u'status_icon', u'progress']


class FreezeSerializer(serializers.ModelSerializer):
class Meta:
model = Task
fields = [u'id', u'uuid', u'status', u'style_class', u'status_icon', u'progress']


class ImportedLayerSerializer(serializers.ModelSerializer):
class Meta:
model = ImportedLayer
fields = [u'import_start_timestamp', u'import_end_timestamp', u'layer_name', u'status']


class FreezeLayerSerializer(serializers.ModelSerializer):
class Meta:
model = FreezeLayer
fields = [u'import_start_timestamp', u'import_end_timestamp', u'layer_name', u'status']


class ProcessSerializer(serializers.ModelSerializer):
class Meta:
model = Task
Expand Down
10 changes: 7 additions & 3 deletions app/scheduler/tasks/base_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

from django.db.models import ObjectDoesNotExist

from app.scheduler.models import Task, TaskStatus, ImportedLayer
from app.scheduler.models import Task, TaskStatus, ImportedLayer, FreezeLayer
from app.scheduler.logging import Tee


Expand Down Expand Up @@ -137,13 +137,17 @@ def perform(self, task_id: int) -> None:
task.save()
finally:
'''
Final check of the ImportedLayer.
If at least 1 import process is failed, the whole task is considered unsuccessful
Final check of the ImportedLayer/FreezeLayer.
If at least 1 import/freeze process is failed, the whole task is considered unsuccessful
'''
import_layer = ImportedLayer.objects.filter(task_id__id=task.id)
freeze_layer = FreezeLayer.objects.filter(task_id__id=task.id)
if len(import_layer) > 0:
imported_results = all(list(map(lambda x: x.status == 'SUCCESS', import_layer)))
task.status = TaskStatus.SUCCESS if imported_results else TaskStatus.FAILED
elif len(freeze_layer) > 0:
freeze_result = all(list(map(lambda x: x.status == 'SUCCESS', freeze_layer)))
task.status = TaskStatus.SUCCESS if freeze_result else TaskStatus.FAILED

task.progress = 100
task.end_date = timezone.now()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,15 @@


class ShpExportConfig(BaseExportConfig):
def __init__(self):
def __init__(self, ref_year=None):
super().__init__()
self.ref_year = ref_year

with open(settings.SHAPEFILE_EXPORT_CONFIG.substitute(), "r") as ecf:
sett = settings.SHAPEFILE_EXPORT_CONFIG.substitute()
if self.ref_year is not None:
sett = settings.SHAPEFILE_EXPORT_CONFIG.substitute({"year": self.ref_year})

with open(sett, "r") as ecf:
config = json.load(ecf)

shapes_config_files = config.get("shp_files_configs", None)
Expand All @@ -44,8 +49,12 @@ def __init__(self):
f"Sheet config path may not be absolute: {shape_config_path}."
)
else:
parent_folder = Path(settings.EXPORT_CONF_FILE.substitute()).parent
if ref_year is not None:
parent_folder = Path(settings.EXPORT_CONF_FILE.substitute({"year": self.ref_year})).parent

shape_config_path = Path(
Path(settings.EXPORT_CONF_FILE.substitute()).parent,
parent_folder,
shape_config_path,
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,10 +51,15 @@


class XlsExportConfig(BaseExportConfig):
def __init__(self):
def __init__(self, year=None):
super().__init__()
self.year = year

with open(settings.EXPORT_CONF_FILE.substitute(), "r") as ecf:
sett = settings.EXPORT_CONF_FILE.substitute()
if year is not None:
sett = settings.EXPORT_CONF_FILE.substitute({"year": year})

with open(sett, "r") as ecf:
config = json.load(ecf)

sheets_config_files = config.get("xls_sheet_configs", None)
Expand All @@ -74,8 +79,12 @@ def __init__(self):
f"Sheet config path may not be absolute: {sheet_config_path}."
)
else:
parent_folder = Path(settings.EXPORT_CONF_FILE.substitute()).parent
if year is not None:
parent_folder = Path(settings.EXPORT_CONF_FILE.substitute({"year": year})).parent

sheet_config_path = Path(
Path(settings.EXPORT_CONF_FILE.substitute()).parent,
parent_folder,
sheet_config_path,
)

Expand Down
3 changes: 2 additions & 1 deletion app/scheduler/tasks/export_definitions/export/export_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
class ExportBase:

def __init__(
self, export_dir: pathlib.Path, orm_task: Task, max_progress: int = 100
self, export_dir: pathlib.Path, orm_task: Task, max_progress: int = 100, ref_year=None
):
"""
Initialization function of data export
Expand All @@ -27,6 +27,7 @@ def __init__(
self.max_progress = max_progress
self.starting_progress = orm_task.progress
self.logger = None
self.ref_year = ref_year

# make sure target location exists
self.export_dir.parent.mkdir(parents=True, exist_ok=True)
Expand Down
4 changes: 2 additions & 2 deletions app/scheduler/tasks/export_definitions/export/export_shp.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,8 @@ def run(self):
qgs, processing, gdal_utils, is_windows = initQgis()

# parse export configuration
config = ShpExportConfig()

config = ShpExportConfig(self.ref_year)
print(f"Exporting Shape for {len(config)} config files")
# calculate total number of steps
total_shapes_number = len(config)
step = 1
Expand Down
53 changes: 34 additions & 19 deletions app/scheduler/tasks/export_definitions/export/export_xls.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,17 +34,21 @@ def run(self):
)

# parse export configuration
config = XlsExportConfig()
config = XlsExportConfig(self.ref_year)

# calculate total number of steps
print(f"Exporting XLS for {len(config)} config files")
total_sheet_number = len(config)
step = 1

# fetch all_domains info
all_domains = Domains()

# load seed *.xlsx file
excel_wb = openpyxl.load_workbook(settings.EXPORT_XLS_SEED_FILE.substitute())
seed_path = settings.EXPORT_XLS_SEED_FILE.substitute()
if self.ref_year is not None:
seed_path = settings.EXPORT_XLS_SEED_FILE.substitute({"year": self.ref_year})
excel_wb = openpyxl.load_workbook(seed_path)

for sheet in config:

Expand Down Expand Up @@ -134,25 +138,31 @@ def run(self):

for column in sheet["columns"]:
for validator in column.get("validators", []):
if not validator["validator"].validate(sheet_row):
message = validator.get("warning", "")
column_letter = coord_id_mapping.get(
str(column["id"]), None
)

if message:
message = (
message.replace("{SHEET}", sheet["sheet"])
.replace("{ROW}", str(first_empty_row))
.replace("{FIELD}", column_letter)
)
self.logger.error(message)
else:
self.logger.error(
f"Validation failed for cell '{column_letter}{first_empty_row}' "
f"in the '{sheet['sheet']}' sheet."
try:
if not validator["validator"].validate(sheet_row, self.ref_year):
message = validator.get("warning", "")
column_letter = coord_id_mapping.get(
str(column["id"]), None
)

if message:
message = (
message.replace("{SHEET}", sheet["sheet"])
.replace("{ROW}", str(first_empty_row))
.replace("{FIELD}", column_letter)
)
self.logger.error(message)
else:
self.logger.error(
f"Validation failed for cell '{column_letter}{first_empty_row}' "
f"in the '{sheet['sheet']}' sheet."
)
except Exception as e:
self.logger.error(
f"Error occurred during validation of column with "
f"ID '{column['id']}' in row '{first_empty_row}' in sheet '{sheet['sheet']}':\n"
f"{type(e).__name__}: {e}.\n"
)
# insert sheet_row into excel
for column_id, value in sheet_row.items():
column_letter = coord_id_mapping.get(str(column_id))
Expand All @@ -177,6 +187,11 @@ def run(self):
step += 1
self.update_progress(step, total_sheet_number)

# update the information in the sheet "DATI" before save it

excel_wb["DATI"]["B5"] = today.date()
excel_wb["DATI"]["B8"] = self.ref_year
excel_wb["DATI"]["B10"] = today.date()
# save updated *.xlsx seed file in the target location
excel_wb.save(target_xls_file)

Expand Down
18 changes: 11 additions & 7 deletions app/scheduler/tasks/export_definitions/validations.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import ast
import re
from datetime import datetime

import schema
from .exceptions import ExportConfigError
Expand All @@ -18,7 +19,7 @@ def __init__(self, args=None):
self.schema.validate(args)
self.args = args

def validate(self, value):
def validate(self, value, ref_year):
"""
Apply the transformation
"""
Expand Down Expand Up @@ -86,19 +87,19 @@ class IfValidation(BaseValidation):
)
re_pattern = re.compile('{\W*(\w+)\W*}')

def validate(self, row: Dict):
def validate(self, row: Dict, ref_year: int = None):
conditions = self.args["cond"]
result = []
for cond in conditions:
and_conditions = cond.get("and", [])
or_conditions = cond.get("or", [])
result.append(self._validate_condition(and_conditions, or_conditions, row))
result.append(self._validate_condition(and_conditions, or_conditions, row, ref_year))

return all(result)

def _validate_condition(self, and_conditions, or_conditions, row):
and_result = list(self._validate_list(and_conditions, row))
or_result = list(self._validate_list(or_conditions, row))
def _validate_condition(self, and_conditions, or_conditions, row, ref_year):
and_result = list(self._validate_list(and_conditions, row, ref_year))
or_result = list(self._validate_list(or_conditions, row, ref_year))

if len(and_result) > 0 and len(or_result) > 0:
return all([and_result, any(or_result)])
Expand All @@ -107,7 +108,7 @@ def _validate_condition(self, and_conditions, or_conditions, row):
elif len(and_result) > 0 and len(or_result) == 0:
return all(and_result)

def _validate_list(self, conditions, row):
def _validate_list(self, conditions, row, ref_year):
field_value = row.get(self.args["field"], None)
for cond in conditions:
if "lookup" in cond:
Expand All @@ -121,6 +122,9 @@ def _validate_list(self, conditions, row):
if field_value is None:
return False

if cond['value'] == "{REF_YEAR}":
cond['value'] = ref_year or datetime.utcnow().year

operator = COMPARISON_OPERATORS_MAPPING.get(cond["operator"], None)
yield operator(field_value, cond["value"])

Expand Down
Loading

0 comments on commit bd13dae

Please sign in to comment.