From 584583e0af8cca428578aa7a9d6f56c6ca6c3278 Mon Sep 17 00:00:00 2001 From: Glauber Costa Vila-Verde Date: Fri, 4 Dec 2020 17:47:38 -0300 Subject: [PATCH] v2.2 (#1271) * Update Django to v2.2.17 (#1256) * Update Django and other related libs. Django 2.1.15 -> 2.2.17 #1255 * Update of other dependencies * Integrate with DESaccess (#1258) * Minor correction in the target catalogs query using postgresql * Start of integration with Descut service. * Created methods to Submit and Check the status of the Descut job. and the creation of the method to download the files has started. * Created methods to download the results, match between targets and images. the method of registering images is still incomplete. * Finalized the methods of registering images. Correction of target queries. creation of methods that retrieve the list of objects before submitting. * Partial implementation of the cutout workflow. * Integration with DESaccess cutout has been implemented. workflow created for submitting and downloading cutout jobs. #1257 * Created methods that allow integration with DESaccess Tile Finder. docstrings of the methods related to Cutout have been added. #1257 * Commenting the 'teste' endpoint Co-authored-by: Matheus Allein * Target Cutout and Integration with DESaccess (#1267) * Minor correction in the target catalogs query using postgresql * Start of integration with Descut service. * Created methods to Submit and Check the status of the Descut job. and the creation of the method to download the files has started. * Created methods to download the results, match between targets and images. the method of registering images is still incomplete. * Finalized the methods of registering images. Correction of target queries. creation of methods that retrieve the list of objects before submitting. * Partial implementation of the cutout workflow. * Integration with DESaccess cutout has been implemented. workflow created for submitting and downloading cutout jobs. #1257 * Created methods that allow integration with DESaccess Tile Finder. docstrings of the methods related to Cutout have been added. #1257 * Cutout Jobs submission form has been changed to be compatible with the new version of DESaccess. new color image options have been added. #1257 * Alteration of the Mosaic panel has started to allow the selection of more than one type of color image. Cutout Jobs loading and selection has been improved, now the job list will always be updated with all available jobs and the most recent job will be selected by default. #1257 * Refactoring the visualization of the cutouts, it is now possible to choose the color image used in the mosaic. * minor fix in default settings #1257 * The discard_fits_files function has been implemented to avoid downloading fits files that have not been requested by the user. #1257 * Download Target List compatible with DESacces. (#1269) * Minor fix in cutout. * A correction was made to the download feature. to be compatible with the new version of DESaccess. * Change labels Mosaic -> Cutout * Minor fix in cutout (#1270) * Fixing contrast bug and removing features from tile inspection (#1259) * Fixing contrast bug and removing features from tile inspection * Changing virtual list library * Fixing reload logic based on new virtual list * Fixing release change not loading datasets * The name of the configuration variable has been changed. * Enabling clear search and positioning visiomatic on search by position Co-authored-by: glaubervila * Feat/eyeballing tile download (#1268) * Fetching tile info and displaying on download dialog * Fetching tile info and displaying on download dialog * Fetching tile info and displaying on download dialog * Adding error handler for the case of not having data on tile * Fixing release change not loading datasets Co-authored-by: Matheus Allein --- api/catalog/tests.py | 2 + api/coadd/views.py | 141 +- api/common/desaccess.py | 341 +++++ api/common/views.py | 73 +- api/dri/settings/defaults.py | 50 +- api/dri/settings/local_vars.py.template | 70 +- api/dri/urls.py | 38 +- api/lib/CatalogDB.py | 121 +- api/lib/sqlalchemy_wrapper.py | 8 +- api/product/__init__.py | 1 + api/product/admin.py | 18 +- api/product/apps.py | 4 + api/product/descutoutservice.py | 1175 ++++++++--------- api/product/export.py | 18 +- .../migrations/0013_auto_20201110_1507.py | 21 + .../0014_remove_cutout_ctt_thumbname.py | 17 + .../migrations/0015_cutout_ctt_img_format.py | 18 + .../migrations/0016_cutout_ctt_jobid.py | 18 + .../migrations/0017_auto_20201111_1926.py | 67 + .../migrations/0018_auto_20201111_1947.py | 28 + api/product/migrations/0019_desjob.py | 26 + .../migrations/0020_desjob_djb_message.py | 18 + .../migrations/0021_cutout_cjb_des_job.py | 19 + .../migrations/0022_auto_20201117_2155.py | 23 + .../migrations/0023_auto_20201119_1817.py | 22 + api/product/models.py | 120 +- api/product/serializers.py | 80 +- api/product/signals.py | 38 +- api/product/tasks.py | 239 +--- .../templates/cutout_notification_finish.html | 351 ++--- api/product/views.py | 10 +- api/product_classifier/views.py | 5 +- api/requirements.txt | 80 +- api/validation/views.py | 1 - frontend/eyeballing/package.json | 25 +- frontend/eyeballing/public/index.html | 2 +- frontend/eyeballing/src/api/Api.js | 15 +- frontend/eyeballing/src/components/Counter.js | 102 +- .../eyeballing/src/components/SearchField.js | 76 +- .../eyeballing/src/components/TileTable.js | 33 +- .../src/components/download/index.js | 135 ++ .../src/components/download/styles.js | 34 + .../src/components/visiomatic/Visiomatic.js | 44 +- frontend/eyeballing/src/home.js | 630 ++++++--- .../eyeballing/src/theme/MaterialTheme.js | 32 +- .../pages/Home/partials/Interfaces/index.js | 14 +- frontend/target/app/Application.js | 4 +- frontend/target/app/model/Cutout.js | 38 +- frontend/target/app/model/CutoutJob.js | 85 +- frontend/target/app/store/CutoutJobs.js | 2 + frontend/target/app/store/Cutouts.js | 20 +- .../app/view/objects/CutoutJobDetailWindow.js | 34 +- .../target/app/view/objects/DownloadWindow.js | 37 +- frontend/target/app/view/objects/Mosaic.js | 138 +- .../app/view/objects/ObjectsController.js | 239 +++- .../target/app/view/objects/ObjectsModel.js | 21 +- frontend/target/app/view/objects/Panel.js | 44 +- .../app/view/settings/CutoutJobController.js | 161 +-- .../target/app/view/settings/CutoutJobForm.js | 241 ++-- .../app/view/settings/CutoutJobModel.js | 14 +- 60 files changed, 3327 insertions(+), 2154 deletions(-) create mode 100644 api/common/desaccess.py create mode 100644 api/product/migrations/0013_auto_20201110_1507.py create mode 100644 api/product/migrations/0014_remove_cutout_ctt_thumbname.py create mode 100644 api/product/migrations/0015_cutout_ctt_img_format.py create mode 100644 api/product/migrations/0016_cutout_ctt_jobid.py create mode 100644 api/product/migrations/0017_auto_20201111_1926.py create mode 100644 api/product/migrations/0018_auto_20201111_1947.py create mode 100644 api/product/migrations/0019_desjob.py create mode 100644 api/product/migrations/0020_desjob_djb_message.py create mode 100644 api/product/migrations/0021_cutout_cjb_des_job.py create mode 100644 api/product/migrations/0022_auto_20201117_2155.py create mode 100644 api/product/migrations/0023_auto_20201119_1817.py create mode 100644 frontend/eyeballing/src/components/download/index.js create mode 100644 frontend/eyeballing/src/components/download/styles.js diff --git a/api/catalog/tests.py b/api/catalog/tests.py index a0aa64489..b2e8806c7 100644 --- a/api/catalog/tests.py +++ b/api/catalog/tests.py @@ -11,6 +11,8 @@ class CommentsAPITestCase(APITestCase): + databases = '__all__' + def setUp(self): self.user = User.objects.create_user("dri", "dri@linea.org", "dri") self.client.login(username='dri', password='dri') diff --git a/api/coadd/views.py b/api/coadd/views.py index 316a74205..7b88c6bec 100644 --- a/api/coadd/views.py +++ b/api/coadd/views.py @@ -1,24 +1,23 @@ import copy +import os +from urllib.parse import urljoin import django_filters +from common.models import Filter +from django.conf import settings +from django_filters.rest_framework import DjangoFilterBackend, OrderingFilter from lib.sqlalchemy_wrapper import DBBase -from rest_framework import filters -from rest_framework import viewsets -from rest_framework.decorators import api_view, list_route +from rest_framework import filters, viewsets +from rest_framework.decorators import api_view +from rest_framework.decorators import action from rest_framework.response import Response -from django_filters.rest_framework import DjangoFilterBackend -from django_filters.rest_framework import OrderingFilter -from .models import Release, Tag, Tile, Dataset, Survey -from .serializers import ReleaseSerializer, TagSerializer, TileSerializer, DatasetSerializer, \ - SurveySerializer, DatasetFootprintSerializer -from django.conf import settings -import os -from urllib.parse import urljoin - -from common.models import Filter +from .models import Dataset, Release, Survey, Tag, Tile +from .serializers import (DatasetFootprintSerializer, DatasetSerializer, + ReleaseSerializer, SurveySerializer, TagSerializer, + TileSerializer) -# Create your views here. +from common.desaccess import DesAccessApi class ReleaseViewSet(viewsets.ModelViewSet): @@ -66,6 +65,42 @@ class TileViewSet(viewsets.ModelViewSet): ordering_fields = ('tli_tilename', 'tli_ra', 'tli_dec',) + @action(detail=True) + def desaccess_tile_info(self, request, pk=None): + """Search DESaccess for tilename and return a list of tile files. + + Returns: + dict: returns a dict with the image and catalog urls. + """ + tile = self.get_object() + + tilename = tile.tli_tilename + + desapi = DesAccessApi() + tileinfo = desapi.tile_by_name(tilename) + + return Response(tileinfo) + + @action(detail=False, methods=['post']) + def desaccess_get_download_url(self, request): + """creates an authenticated url for a file served by DESaccess. + + Args: + file_url (str): URL of the file to be downloaded. + + Returns: + str: Authenticated URL, note that this url has a time limit to be used. must be generated at the time the download is requested. + """ + + params = request.data + file_url = params['file_url'] + + desapi = DesAccessApi() + + download_url = desapi.file_url_to_download(file_url) + + return Response(dict({"download_url": download_url})) + class DatasetFilter(django_filters.FilterSet): tag__in = django_filters.CharFilter(method='filter_tag__in') @@ -73,11 +108,10 @@ class DatasetFilter(django_filters.FilterSet): position = django_filters.CharFilter(method='filter_position') release = django_filters.CharFilter(method='filter_release') inspected = django_filters.CharFilter(method='filter_inspected') - class Meta: model = Dataset - fields = ['id', 'tag', 'tile', 'tag__in', 'tli_tilename', 'release',] + fields = ['id', 'tag', 'tile', 'tag__in', 'tli_tilename', 'release', ] order_by = True def filter_release(self, queryset, name, value): @@ -145,6 +179,79 @@ class DatasetViewSet(viewsets.ModelViewSet): ordering = ('tile__tli_tilename',) + @action(detail=True) + def desaccess_tile_info(self, request, pk=None): + """Search DESaccess for tilename and return a list of tile files already filtered by the dataset release. + + Returns: + dict: returns a dict with the image and catalog urls, both organized by band and with the file url. + """ + dataset = self.get_object() + + # Requested to associate these internal releases + # to the DESAccess releases: + associated_releases = { + 'y6a2_coadd': 'y6a1_coadd', + 'y3a1_coadd': 'y3a2_coadd', + 'y1_supplemental_dfull': 'y1a1_coadd', + 'y1_supplemental_d10': 'y1a1_coadd', + 'y1_supplemental_d04': 'y1a1_coadd', + 'y1_wide_survey': 'y1a1_coadd', + } + + tilename = dataset.tile.tli_tilename + rls_name = dataset.tag.tag_release.rls_name + + # Associate the internal release to the release of DESAccess: + if rls_name in associated_releases.keys(): + rls_name = associated_releases[rls_name] + + desapi = DesAccessApi() + + tileinfo = desapi.tile_by_name(tilename) + + for release in tileinfo["releases"]: + # Compara o release pelo internal name, nas nossas tabelas o release tem _coadd no nome. por isso é necessário fazer um split. + if release["release"] == rls_name.split("_")[0].lower(): + + result = dict({ + "tilename": tileinfo["tilename"], + "ra_cent": tileinfo["ra_cent"], + "dec_cent": tileinfo["dec_cent"], + "racmin": tileinfo["racmin"], + "racmax": tileinfo["racmax"], + "deccmin": tileinfo["deccmin"], + "deccmax": tileinfo["deccmax"], + "images": {}, + "catalogs": {}, + }) + + for band in release["bands"]: + result["images"][band.lower()] = release["bands"][band]["image"] + result["catalogs"][band.lower()] = release["bands"][band]["catalog"] + + return Response(result) + + @action(detail=False, methods=['post']) + def desaccess_get_download_url(self, request): + """creates an authenticated url for a file served by DESaccess. + + Args: + file_url (str): URL of the file to be downloaded. + + Returns: + str: Authenticated URL, note that this url has a time limit to be used. must be generated at the time the download is requested. + """ + + params = request.data + file_url = params['file_url'] + + desapi = DesAccessApi() + + download_url = desapi.file_url_to_download(file_url) + + return Response(dict({"download_url": download_url})) + class DatasetFootprintViewSet(viewsets.ModelViewSet): queryset = Dataset.objects.select_related().all() @@ -172,7 +279,7 @@ class SurveyViewSet(viewsets.ModelViewSet): ordering_fields = ('srv_filter__lambda_min',) -@api_view(['GET']) +@ api_view(['GET']) def get_fits_by_tilename(request): if request.method == 'GET': diff --git a/api/common/desaccess.py b/api/common/desaccess.py new file mode 100644 index 000000000..feaaa5525 --- /dev/null +++ b/api/common/desaccess.py @@ -0,0 +1,341 @@ + +import logging +from tarfile import filemode + +import requests +from django.conf import settings +from requests.packages.urllib3.exceptions import InsecureRequestWarning + +requests.packages.urllib3.disable_warnings(InsecureRequestWarning) + + +class DesAccessApi: + """This class implements the necessary methods for integration with the DESaccess service. + + it is necessary to have the configuration parameters DESACCESS_API in Settings. + DESACCESS_API = { + # URL Principal do Serviço. + 'API_URL': 'https://deslabs.ncsa.illinois.edu/desaccess/api', + + # URL para download dos resultados do cutout job. + 'FILES_URL': 'https://deslabs.ncsa.illinois.edu/files-desaccess', + + # Usuario Oracle do NCSA com acesso ao desaccess. + 'USERNAME': None, + 'PASSWORD': None, + + # Database Oracle que será usado para authenticar as credenciais. must be either 'dessci' or 'desoper', usar mesmo database usado em NCSA_AUTHENTICATION_DB + 'DATABASE': 'dessci', + + # Lista dos Releases disponiveis no serviço do descut. OBS: está lista de releases é utilizada pela interface no formulário de submissão. + 'AVAILABLE_RELEASES': ['Y6A1', 'Y3A2', 'Y1A1', 'SVA1'], + + # Max de cutouts que o Descut aceita por job. default is 20000 + 'MAX_OBJECTS': 20000 + } + + DESaccess API Reference: https://deslabs.ncsa.illinois.edu/desaccess/docs/api/ + """ + + def __init__(self): + + # instance default do logger, esse log pode ser substituido utilizando o metodo setLogger. + self.logger = logging.getLogger("django") + + # fazer os request sem verificar o certificado SSL / HTTPS + self.verify_ssl = False + + def setLogger(self, logger): + """Changes the logger used by this class. + it is useful when you want to keep the request logs in the same log of a process for example. + + Args: + logger (object): Logger instance where the messages will be written. + """ + self.logger = logger + + def login(self, ): + """Authenticate using DES account to obtain an access token. + + Raises: + Exception: Lança uma exception caso o login falhe. + + Returns: + str: auth token example: eyJ0eXAiOiJK...V1QiLCJhbGciOiJI + """ + config = settings.DESACCESS_API + + url = "{}/login".format(config['API_URL']) + # self.logger.debug("Login URL: [%s]" % url) + + # Dados para a Autenticação. + data = {'username': config['USERNAME'], 'password': config['PASSWORD'], 'database': config['DATABASE']} + + # Login to obtain an auth token + r = requests.post(url, data, verify=self.verify_ssl) + + self.logger.debug("Login Status: [%s]" % r.json()['status']) + + if r.json()['status'] != 'ok': + raise Exception(r.json()['message']) + + return r.json()['token'] + + def submit_cutout_job(self, data): + """Submits a CUTOUT job and returns the complete server response which includes the job ID. + Complete Documentation, submit and return schema: https://deslabs.ncsa.illinois.edu/desaccess/docs/api/#/Jobs/cutout + + Args: + data (dict): Cutout job specification, all parameters can be checked in api documentation. + + Raises: + Exception: throws an exception if the returned status is different from ok. + + Returns: + str: New job UUID (universally unique identifier) example: 25f4d3bacae04a59aee7847af82c5012 + """ + self.logger.info("Submiting Descut Job") + self.logger.debug(data) + + config = settings.DESACCESS_API + url = "{}/job/cutout".format(config['API_URL']) + + # Submit job + r = requests.put( + url, + data=data, + headers={'Authorization': 'Bearer {}'.format(self.login())}, + verify=self.verify_ssl + ) + + response = r.json() + self.logger.debug("Response: %s" % response) + self.logger.debug("Response Status: [%s]" % response["status"]) + + if response["status"] == "ok": + # Retorna o jobid + self.logger.debug("Descut Job Submited with Jobid: [%s]" % response["jobid"]) + + return response["jobid"] + else: + msg = "Error submitting job: %s" % response["message"] + raise Exception(msg) + + def check_job_status(self, jobid): + """Request status of existing job(s) + Complete Documentation and return schema: https://deslabs.ncsa.illinois.edu/desaccess/docs/api/#/Jobs/status + + Args: + jobid (str): Either a specific job ID + + Raises: + Exception: Retorna uma Execption caso o Job tenha terminado com falha no lado do Desaccess. + + Returns: + dict: Job status and information. or None when job still running. + + """ + + self.logger.info("Check Status for Descut Jobid: [%s]" % jobid) + + config = settings.DESACCESS_API + url = "{}/job/status".format(config['API_URL']) + + r = requests.post( + url, + data={ + "job-id": jobid + }, + headers={'Authorization': 'Bearer {}'.format(self.login())}, + verify=self.verify_ssl + ) + + response = r.json() + # self.logger.debug("Response Status: [%s]" % response["status"]) + + if response["status"] == "ok": + # A requisição foi bem sucedida + job = response["jobs"][0] + + # Verificar o status_job: + if job["job_status"] == "success": + self.logger.info("DES Job finished and ready for download and registration.") + + return job + + elif job["job_status"] == "failure": + # Neste caso o Job falhou na execução do lado do Descut, retorna erro e finaliza o job. + + self.logger.error("DES Job Finished with Descut Error: %s" % job["job_status_message"]) + raise Exception(job["job_status_message"]) + + else: + # Job ainda não terminou com sucesso e nem falhou pode estar rodando ainda, + # não fazer nada neste caso só logar uma mensagem de debug. + self.logger.debug("Des Job status: [%s]" % job["job_status"]) + return None + + else: + # Como esta requisição vai ser usada diversas vezes caso ela falhe apenas é escrito no log. + # é aceitavel que uma das requisiçoes tenha algum problema como timeout ou outro problema de rede. + msg = "Error checking DES Job status: %s" % response["message"] + self.logger.error(msg) + + def delete_job(self, jobid): + """Delete a job and all associated files + + Args: + jobid (str): Universally unique identifier of job to be deleted + + Raises: + Exception: Returns an exception, if the desacess returns a status other than ok. + """ + + self.logger.info("Removing Job on Desaccess. Jobid: [%s]" % jobid) + + config = settings.DESACCESS_API + url = "{}/job/delete".format(config['API_URL']) + + # Delete job + r = requests.delete( + url, + data=dict({ + "job-id": jobid + }), + headers={'Authorization': 'Bearer {}'.format(self.login())}, + verify=self.verify_ssl + ) + response = r.json() + # self.logger.debug("Response: %s" % response) + self.logger.debug("Response Status: [%s]" % response["status"]) + + if response["status"] == "ok": + # Retorna o jobid + self.logger.debug("Descut Job Deleted with Jobid: [%s]" % jobid) + + else: + msg = "Error Deleting job: %s" % response["message"] + raise Exception(msg) + + def get_cutout_tar_filename(self, jobid): + """Retona o nome do arquivo tar.gz criado no job de cutout. + + Args: + jobid (str): a specific job ID + + Returns: + str: tar.gz filename + """ + filename = "{}.tar.gz".format(jobid) + return filename + + def get_cutout_files_url(self, jobid): + """Retorna a url para o arquivo tar.gz com resultados do cutout job. + + Args: + jobid (str): a specific job ID + + Returns: + str: Url para download do tar.gz + """ + config = settings.DESACCESS_API + + # Baixar o tar.gz com todos os dados + filename = self.get_cutout_tar_filename(jobid) + + url = "{}/{}/cutout/{}".format(config["FILES_URL"], config["USERNAME"], filename) + return url + + def tile_by_coord(self, ra, dec): + """Searches for a tile's information from a coordinate. + NOTE: the urls of the files are not authorized for download. + it is necessary to use the file_url_to_download method to generate an authenticated url. + + Args: + ra (float): RA Coordinate in float. + dec (float): Dec Coordinate in float. + + Raises: + Exception: Returns an exception, if the desacess returns a status other than ok. + + Returns: + dict: Returns a dict with the keys "releases" and "results" both have the same information but are organized differently. + releases groups the tile files by release band and then image and catalog. + results is an array each element is a release, and then information from the images and catalogs. + """ + + self.logger.info("Get Tile Info by RA: [%s] Dec: [%s]" % (ra, dec)) + + config = settings.DESACCESS_API + url = "{}/tiles/info/coords".format(config['API_URL']) + + r = requests.post( + url, + data={ + "coords": "{},{}".format(ra, dec) + }, + headers={'Authorization': 'Bearer {}'.format(self.login())}, + verify=self.verify_ssl + ) + + response = r.json() + self.logger.debug("Response Status: [%s]" % response["status"]) + + if response["status"] == "ok": + return response + else: + msg = "Error Retrieving Tile Info: %s" % response["message"] + raise Exception(msg) + + def tile_by_name(self, name): + """Search for tilename and return all files related to tile. + NOTE: the urls of the files are not authorized for download. it is necessary to use the file_url_to_download method to generate an authenticated url. + + Args: + name (str): Tile name. example: DES2359+0001 + + Raises: + Exception: Returns an exception, if the desacess returns a status other than ok. + + Returns: + dict: Returns a dict with the keys "releases" and "results" both have the same information but are organized differently. + releases groups the tile files by release band and then image and catalog. + results is an array each element is a release, and then information from the images and catalogs. + """ + + self.logger.info("Get Tile Info by Name: [%s]" % name) + + config = settings.DESACCESS_API + url = "{}/tiles/info/name".format(config['API_URL']) + + r = requests.post( + url, + data={ + "name": name + }, + headers={'Authorization': 'Bearer {}'.format(self.login())}, + verify=self.verify_ssl + ) + + response = r.json() + self.logger.debug("Response Status: [%s]" % response["status"]) + + if response["status"] == "ok": + return response + else: + msg = "Error Retrieving Tile Info: %s" % response["message"] + raise Exception(msg) + + def file_url_to_download(self, file_url): + """Generates a URL to download a tile file. this url already has authorization parameters. + + Args: + file_url (str): Url for a file can be an image or catalog. + + Returns: + str: Authenticated url to Download a file. + """ + + auth_url = "{}?token={}".format(file_url, self.login()) + + return auth_url diff --git a/api/common/views.py b/api/common/views.py index 1c5156e63..f74137d12 100755 --- a/api/common/views.py +++ b/api/common/views.py @@ -220,7 +220,6 @@ def get_setting(request): orinal_name = name if name.find("__") > -1: arr = name.split("__") - print(arr) key = arr[0] name = arr[1].replace('__', '') @@ -303,8 +302,8 @@ def galaxy_cluster(request): @api_view(['GET']) def available_database(request): """ - Retorna os databases configurados como sendo DBs de Catalogo. - não inclui o database administrativo. + Retorna os databases configurados como sendo DBs de Catalogo. + não inclui o database administrativo. """ if request.method == 'GET': dbs = list([]) @@ -332,41 +331,39 @@ def teste(request): if request.method == 'GET': # import logging - - # log = logging.getLogger('import_target_csv') - - # log.info("------------------ TESTE ----------------") - - # from product.importproduct import ImportTargetListCSV - # it = ImportTargetListCSV() - # upload_data = {'mime': 'csv', 'type': 'catalog', 'class': 'objects', 'name': 'testeupload', 'displayName': 'testeupload', 'releases': ['y6a1_coadd'], 'isPublic': False, 'description': '', 'csvData': '31.12232, -6.20153\n29.92641, -5.96732\n40.09991 , -8.43430\n-179.4548 , -9.43430'} - # upload_data_header = {'mime': 'csv', 'type': 'catalog', 'class': 'objects', 'name': 'testeupload', 'displayName': 'testeupload', 'releases': ['y6a1_coadd'], 'isPublic': False, 'description': '', 'csvData': 'ra, dec, name\n31.12232, -6.20153, galaxy1\n29.92641, -5.96732, galaxy2\n40.09991 , -8.43430, galaxy3'} - # it.start_import(request.user.id, upload_data) - - # Exemplo de teste para desenvolvimento do User Query - # from userquery.create_table_as import CreateTableAs - # from userquery.models import Job, Table - # job = Job.objects.get(pk=4) - # job.job_status = 'st' - # job.save() - - # try: - # t = Table.objects.get(table_name='teste_uq') - # t.delete() - # except Exception as e: - # pass - - # ct = CreateTableAs( - # job_id=job.id, - # user_id=job.owner.id, - # table_name='teste_uq', - # table_display_name='Teste User Query', - # release_id=24, - # release_name='y6a1_coadd', - # associate_target_viewer=True, - # task_id='101010', - # schema=None + # from datetime import datetime + # import shutil + # import os + # from product.models import CutOutJob, Product + # from product.descutoutservice import DesCutoutService + # from django.utils.timezone import utc + # import pandas as pd + + # log = logging.getLogger('descutoutservice') + + # log.debug("------ TESTE DESCUT ------") + + # dc = DesCutoutService() + + # job = CutOutJob.objects.create( + # cjb_product=Product.objects.get(pk=2), + # owner=request.user, + # cjb_display_name="Teste Cutout", + # cjb_tag="Y6A1", + # cjb_xsize=1, + # cjb_ysize=1, + # cjb_make_fits=True, + # cjb_fits_colors="grizy", + # cjb_make_stiff=True, + # cjb_stiff_colors="gri;rig;zgi", + # cjb_make_lupton=True, + # cjb_lupton_colors="gri;rig;zgi", + # cjb_status="st", + # cjb_label_position="inside", + # cjb_label_properties="meta_id,ra,dec", + # cjb_label_colors="2eadf5", + # cjb_label_font_size="10" # ) - # ct.do_all() + # job.save() return Response(dict({'status': "success"})) diff --git a/api/dri/settings/defaults.py b/api/dri/settings/defaults.py index e7f89fe25..9ce1563c2 100644 --- a/api/dri/settings/defaults.py +++ b/api/dri/settings/defaults.py @@ -355,37 +355,25 @@ USER_QUERY_EXECUTION_TIMEOUT = 300 # Limite de linhas de uma query USER_QUERY_MAX_ROWS = 100000 -# DES Cutout Service: -# DESCUT Colaboracao -DES_CUTOUT_SERVICE = { - # 1 para a versao do Descut Colaboracao 2 para versao Descut Public - 'API_VERSION': 1, - 'HOST': 'https://descut.cosmology.illinois.edu', - 'USER': '', - 'PASSWORD': '', - # Path onde ficaram os arquivos de cutout, esse parametro sera usado em conjunto com DATA_DIR para criar o path - # absoluto para os arquivos. - 'CUTOUT_DIR': 'targets/cutouts', - # Url base que sera usada para exibir as imagens geradas esse parametro deve ser mapeado no dri.conf no apache - 'CUTOUT_SOURCE': '/data', - # Tempo de delay para a task check_jobs em minutos - 'CUTOUT_TASK_CHECK_JOBS_DELAY': 1, - # Lista dos Releases que podem ser usados para cutout em lowercase. use [] para permitir todos - 'AVAILABLE_RELEASES': [], - # Quantidade limit de objetos a ser passada para o descutout - 'MAX_OBJECTS': 300, - # Token de authenticacao utilizado apenas para o DescutPublico para colaboracao usar None - 'TOKEN': None, - # Esta opcao deve ser False para o DescutPublico e True para Colaboracao - 'DELETE_JOB_AFTER_DOWNLOAD': True, - # Url para gerar o token, para o publico usar None. - 'API_GET_TOKEN': '/api/token/', - # Url para a API reponsavel por criar os jobs - 'API_CREATE_JOBS': '/api/jobs/', - # Url para a API responsavel por retornar o status dos jobs - 'API_CHECK_JOBS': '/api/jobs/', - # No DescutPublico e necessario passar um email para onde seram enviadas as notificacoes do descut. - 'EMAIL': '' + +# DES ACCESS API +# Usada para o Cutout de targets e Download das imagens de Tiles. +# API Reference: https://deslabs.ncsa.illinois.edu/desaccess/docs/api/ +# Essas configs são usadas pelas classes common.desaccess e product.descutoutservice +DESACCESS_API = { + # URL Principal do Serviço. + 'API_URL': 'https://deslabs.ncsa.illinois.edu/desaccess/api', + # URL para download dos resultados do cutout job. + 'FILES_URL': 'https://deslabs.ncsa.illinois.edu/files-desaccess', + # Usuario Oracle do NCSA com acesso ao desaccess. + 'USERNAME': None, + 'PASSWORD': None, + # Database Oracle que será usado para authenticar as credenciais. must be either 'dessci' or 'desoper' usar mesmo database usado em NCSA_AUTHENTICATION_DB + 'DATABASE': 'dessci', + # Lista dos Releases disponiveis no serviço do descut. OBS: está lista de releases é utilizada pela interface no formulário de submissão. + 'AVAILABLE_RELEASES': ['Y6A1', 'Y3A2', 'Y1A1', 'SVA1'], + # Max de cutouts que o Descut aceita por job. default is 20000 + 'MAX_OBJECTS': 20000 } # Others app config: # Tempo limite em horas para que um produto fique disponivel, apos este tempo diff --git a/api/dri/settings/local_vars.py.template b/api/dri/settings/local_vars.py.template index dc9e5eeef..d4c1a27d9 100644 --- a/api/dri/settings/local_vars.py.template +++ b/api/dri/settings/local_vars.py.template @@ -11,17 +11,17 @@ DEBUG = False # the hostname and port number of the current Server BASE_HOST = "http://localhost" -# A list of strings representing the host/domain names that this Django site can serve. +# A list of strings representing the host/domain names that this Django site can serve. ALLOWED_HOSTS = ['*'] # Database -# Esta variavel deve ser preechida no local_vars.py deve conter obrigatóriamente -# 2 bancos de dados denominados de default e catalog. +# Esta variavel deve ser preechida no local_vars.py deve conter obrigatóriamente +# 2 bancos de dados denominados de default e catalog. # - N bancos de dados podem ser cadastradados. # - O mesmo banco pode ser cadastrado mais de uma vez com alias diferentes, as vezes é necessário para usar schemas diferentes do mesmo banco, exemplo dos bancos oracle do NCSA. # - Pode ser cadastrados bancos de dados de tipos diferentes exemplo Oracle e Postgresql. -# - para o Oracle o nome da chave que representa o banco não pode ter mais de 30 caracteres e nem o database name, +# - para o Oracle o nome da chave que representa o banco não pode ter mais de 30 caracteres e nem o database name, # https://docs.djangoproject.com/en/1.9/ref/settings/#databases DATABASES = { 'default': { @@ -33,7 +33,7 @@ DATABASES = { 'PORT': 5432, 'OPTIONS': { 'options': '-c search_path=dri_admin,public' - } + } }, 'catalog': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', @@ -139,38 +139,36 @@ SCHEMA_SAVE_AS = None USER_QUERY_EXECUTION_TIMEOUT = 300 # Limite de linhas de uma query USER_QUERY_MAX_ROWS = 100000 -# DES Cutout Service: -# DESCUT Colaboracao -DES_CUTOUT_SERVICE = { - # 1 para a versao do Descut Colaboracao 2 para versao Descut Public - 'API_VERSION': 1, - 'HOST': 'https://descut.cosmology.illinois.edu', - 'USER': '', - 'PASSWORD': '', - # Path onde ficaram os arquivos de cutout, esse parametro sera usado em conjunto com DATA_DIR para criar o path - # absoluto para os arquivos. - 'CUTOUT_DIR': 'targets/cutouts', - # Url base que sera usada para exibir as imagens geradas esse parametro deve ser mapeado no dri.conf no apache - 'CUTOUT_SOURCE': '/data', - # Tempo de delay para a task check_jobs em minutos - 'CUTOUT_TASK_CHECK_JOBS_DELAY': 1, - # Lista dos Releases que podem ser usados para cutout em lowercase. use [] para permitir todos - 'AVAILABLE_RELEASES': [], - # Quantidade limit de objetos a ser passada para o descutout - 'MAX_OBJECTS': 300, - # Token de authenticacao utilizado apenas para o DescutPublico para colaboracao usar None - 'TOKEN': None, - # Esta opcao deve ser False para o DescutPublico e True para Colaboracao - 'DELETE_JOB_AFTER_DOWNLOAD': True, - # Url para gerar o token, para o publico usar None. - 'API_GET_TOKEN': '/api/token/', - # Url para a API reponsavel por criar os jobs - 'API_CREATE_JOBS': '/api/jobs/', - # Url para a API responsavel por retornar o status dos jobs - 'API_CHECK_JOBS': '/api/jobs/', - # No DescutPublico e necessario passar um email para onde seram enviadas as notificacoes do descut. - 'EMAIL': '' +# TILE VIEWER/TILE INSPECTION (antigo eyeballing): +# Esta config muda o comportamento da ferramenta Tile Viewer. +# Quando Ligada habilita as funcionalidades de validação e inspeção das tiles +# Comentários, avaliações (Good, Bad), avaiação por posição e relatórios. +# Quando Desligada a ferramente se torna Tile Viewer, ficam só as +# funcões de navegação entre tiles, visualização e download. +# True: para Tile Inspection (essa opção deve estar ligada somente no ambiente da colaboração NCSA.) +# False: para Tile Viewer (essa é a opção default) +TILE_VIEWER_INSPECTION_ENABLED=False + +# DES ACCESS API +# Usada para o Cutout de targets e Download das imagens de Tiles. +# Essas configs são usadas pelas classes common.desaccess e product.descutoutservice +# API Reference: https://deslabs.ncsa.illinois.edu/desaccess/docs/api/ +DESACCESS_API = { + # URL Principal do Serviço. + 'API_URL': 'https://deslabs.ncsa.illinois.edu/desaccess/api', + # URL para download dos resultados do cutout job. + 'FILES_URL': 'https://deslabs.ncsa.illinois.edu/files-desaccess', + # Usuario Oracle do NCSA com acesso ao desaccess. + 'USERNAME': None, + 'PASSWORD': None, + # Database Oracle que será usado para authenticar as credenciais. must be either 'dessci' or 'desoper', usar mesmo database usado em NCSA_AUTHENTICATION_DB + 'DATABASE': 'dessci', + # Lista dos Releases disponiveis no serviço do descut. OBS: está lista de releases é utilizada pela interface no formulário de submissão. + 'AVAILABLE_RELEASES': ['Y6A1', 'Y3A2', 'Y1A1', 'SVA1'], + # Max de cutouts que o Descut aceita por job. default is 20000 + 'MAX_OBJECTS': 20000 } + # Others app config: # Tempo limite em horas para que um produto fique disponivel, apos este tempo # o produto sera removido pelo garbage colector e sua tabela sera dropada. Use None para desabilitar. diff --git a/api/dri/urls.py b/api/dri/urls.py index 0ace36bfc..71d63a5ff 100644 --- a/api/dri/urls.py +++ b/api/dri/urls.py @@ -34,18 +34,18 @@ router = routers.DefaultRouter() -router.register(r'logged', common_views.LoggedUserViewSet, base_name='logged') -router.register(r'users_same_group', common_views.UsersInSameGroupViewSet, base_name='users_same_group') +router.register(r'logged', common_views.LoggedUserViewSet, basename='logged') +router.register(r'users_same_group', common_views.UsersInSameGroupViewSet, basename='users_same_group') router.register(r'releases', coadd_views.ReleaseViewSet) router.register(r'tags', coadd_views.TagViewSet) router.register(r'tiles', coadd_views.TileViewSet) -router.register(r'dataset', coadd_views.DatasetViewSet, base_name='dataset') -router.register(r'footprints', coadd_views.DatasetFootprintViewSet, base_name='footprints') +router.register(r'dataset', coadd_views.DatasetViewSet, basename='dataset') +router.register(r'footprints', coadd_views.DatasetFootprintViewSet, basename='footprints') router.register(r'surveys', coadd_views.SurveyViewSet) -router.register(r'productclass', product_classifier_views.ProductClassViewSet, base_name='productclass') -router.register(r'productgroup', product_classifier_views.ProductGroupViewSet, base_name='productgroup') +router.register(r'productclass', product_classifier_views.ProductClassViewSet, basename='productclass') +router.register(r'productgroup', product_classifier_views.ProductGroupViewSet, basename='productgroup') router.register(r'productclasscontent', product_classifier_views.ProductClassContentViewSet) router.register(r'product', product_views.ProductViewSet) @@ -58,7 +58,7 @@ router.register(r'productcontent', product_views.ProductContentViewSet) router.register(r'productassociation', product_views.ProductContentAssociationViewSet) router.register(r'association', product_views.ProductAssociationViewSet) -router.register(r'AllProducts', product_views.AllProductViewSet, base_name='allproducts') +router.register(r'AllProducts', product_views.AllProductViewSet, basename='allproducts') router.register(r'productsetting', product_views.ProductSettingViewSet) router.register(r'currentsetting', product_views.CurrentSettingViewSet) router.register(r'contentsetting', product_views.ProductContentSettingViewSet) @@ -70,9 +70,9 @@ router.register(r'filterset', product_views.FiltersetViewSet) router.register(r'filtercondition', product_views.FilterConditionViewSet) router.register(r'bookmarked', product_views.BookmarkedViewSet) -router.register(r'productexport', product_views.ExportViewSet, base_name='export_product') -router.register(r'productsaveas', product_views.SaveAsViewSet, base_name='product_save_as') -router.register(r'import_target_list', product_views.ImportTargetListViewSet, base_name='import_target_list') +router.register(r'productexport', product_views.ExportViewSet, basename='export_product') +router.register(r'productsaveas', product_views.SaveAsViewSet, basename='product_save_as') +router.register(r'import_target_list', product_views.ImportTargetListViewSet, basename='import_target_list') router.register(r'feature', validation_views.FeatureViewSet) router.register(r'flagged', validation_views.FlaggedViewSet) @@ -84,18 +84,18 @@ router.register(r'site', product_register_views.SiteViewSet) router.register(r'importexternalprocess', product_register_views.ExternalProcessImportViewSet, - base_name='importprocess') + basename='importprocess') router.register(r'importauthorization', product_register_views.AuthorizationViewSet) router.register(r'application', interfaces_views.ApplicationViewSet) router.register(r'tutorial', interfaces_views.TutorialViewSet) # API Relacionadas ao Banco de Dados de Catalogo -router.register(r'target', catalog_views.TargetViewSet, base_name='target') +router.register(r'target', catalog_views.TargetViewSet, basename='target') router.register(r'objectsrating', catalog_views.RatingViewSet) router.register(r'objectsreject', catalog_views.RejectViewSet) router.register(r'objectscomments', catalog_views.CommentsViewSet) -router.register(r'catalogobjects', catalog_views.CatalogObjectsViewSet, base_name='catalog_objects') +router.register(r'catalogobjects', catalog_views.CatalogObjectsViewSet, basename='catalog_objects') # Comment API router.register(r'comment/position', comment_views.PositionViewSet) @@ -106,11 +106,11 @@ router.register(r'userquery_sample', userquery_views.SampleViewSet) router.register(r'userquery_job', userquery_views.JobViewSet) router.register(r'userquery_table', userquery_views.TableViewSet) -router.register(r'userquery_validate', userquery_views.QueryValidate, base_name='validate_query') -router.register(r'userquery_preview', userquery_views.QueryPreview, base_name='preview_query') -router.register(r'userquery_property', userquery_views.TableProperties, base_name='table') -router.register(r'userquery_target', userquery_views.TargetViewerRegister, base_name='target_viewer_register') -router.register(r'userquery_download', userquery_views.TableDownload, base_name='table_download') +router.register(r'userquery_validate', userquery_views.QueryValidate, basename='validate_query') +router.register(r'userquery_preview', userquery_views.QueryPreview, basename='preview_query') +router.register(r'userquery_property', userquery_views.TableProperties, basename='table') +router.register(r'userquery_target', userquery_views.TargetViewerRegister, basename='target_viewer_register') +router.register(r'userquery_download', userquery_views.TableDownload, basename='table_download') # Aladin API router.register(r'aladin/image', aladin_views.ImageViewSet) @@ -127,7 +127,7 @@ url(r'^dri/api/get_fits_by_tilename', coadd_views.get_fits_by_tilename), url(r'^dri/api/vizier/', product_views.vizier_cds), url(r'^dri/api/send_statistic_email/', common_views.send_statistic_email), - url(r'^dri/api/available_database/', common_views.available_database), + url(r'^dri/api/available_database/', common_views.available_database), url(r'^dri/api/teste/', common_views.teste), diff --git a/api/lib/CatalogDB.py b/api/lib/CatalogDB.py index 8cd376973..0547efc3f 100644 --- a/api/lib/CatalogDB.py +++ b/api/lib/CatalogDB.py @@ -1,15 +1,16 @@ import json +import logging import math import warnings import sqlalchemy from sqlalchemy import Column, cast, desc from sqlalchemy import exc as sa_exc +from sqlalchemy import func from sqlalchemy.sql import and_, or_, select from sqlalchemy.sql.expression import between, literal_column from lib.sqlalchemy_wrapper import DBBase -import logging class CatalogDB(DBBase): @@ -328,6 +329,15 @@ def get_condition_square(self, lowerleft, upperright, property_ra, property_dec) return and_(raCondition, decCondition).self_group() + def count(self): + with self.engine.connect() as con: + + stm = select([func.count()]).select_from(self.table) + self.log.debug("SQL: [%s]" % self.statement_to_str(stm)) + + result = con.execute(stm) + return result.fetchone()[0] + class CatalogObjectsDBHelper(CatalogTable): def create_stm(self, columns=list(), filters=None, ordering=None, limit=None, start=None, url_filters=None, @@ -343,7 +353,7 @@ def create_stm(self, columns=list(), filters=None, ordering=None, limit=None, st stm = select(self.query_columns).select_from(self.table) filters = list() - coordinates_filter = "" + coordinates_filter = None for condition in self.filters: if condition.get("op") == "coordinates": @@ -359,7 +369,10 @@ def create_stm(self, columns=list(), filters=None, ordering=None, limit=None, st base_filters = and_(*self.do_filter(self.table, filters)) - stm = stm.where(and_(base_filters, coordinates_filter)) + if coordinates_filter: + stm = stm.where(and_(base_filters, coordinates_filter)) + else: + stm = stm.where(base_filters) # Ordenacao if self.ordering is not None: @@ -382,7 +395,7 @@ def create_stm(self, columns=list(), filters=None, ordering=None, limit=None, st if self.start: stm = stm.offset(literal_column(str(self.start))) - print(str(stm)) + self.log.debug("SQL: [%s]" % self.statement_to_str(stm)) return stm @@ -451,36 +464,67 @@ def create_stm(self, columns=list(), filters=None, ordering=None, limit=None, st # Cria os Joins stm_join = self.table - # Join com Catalog_Rating - stm_join = stm_join.join(catalog_rating, - and_( - # Product ID - catalog_rating.c.catalog_id == self.product.pk, - # User ID - catalog_rating.c.owner == self.user.pk, - # Object ID - self.get_column_obj(self.table, property_id) == catalog_rating.c.object_id, - # Fazer o Cast da coluna objeto id do catalogo para String, por que na catalog rating object_id é string - # 15/09/2020 - este cast para string gerou um bug no Oracle - # cast(self.get_column_obj(self.table, property_id), sqlalchemy.String) == catalog_rating.c.object_id, - ), - isouter=True) - - stm_join = stm_join.join(catalog_reject, - and_( - # Product ID - catalog_reject.c.catalog_id == self.product.pk, - # User ID - catalog_reject.c.owner == self.user.pk, - # Object Id OR Reject is NULL - or_(self.get_column_obj(self.table, property_id) == catalog_reject.c.object_id, - catalog_reject.c.id.is_(None)) - # Fazer o Cast da coluna objeto id do catalogo para String, por que na catalog reject object_id é string - # 15/09/2020 - este cast para string gerou um bug no Oracle - # or_(cast(self.get_column_obj(self.table, property_id), sqlalchemy.String) == catalog_reject.c.object_id, - # catalog_reject.c.id.is_(None)) - ), - isouter=True) + # Identificar qual é o banco de dados a query é diferente entre Oracle e Postgresql + if self.get_engine() == "oracle": + # Join com Catalog_Rating + stm_join = stm_join.join(catalog_rating, + and_( + # Product ID + catalog_rating.c.catalog_id == self.product.pk, + # User ID + catalog_rating.c.owner == self.user.pk, + # Object ID + self.get_column_obj(self.table, property_id) == catalog_rating.c.object_id, + # Fazer o Cast da coluna objeto id do catalogo para String, por que na catalog rating object_id é string + # 15/09/2020 - este cast para string gerou um bug no Oracle + # cast(self.get_column_obj(self.table, property_id), sqlalchemy.String) == catalog_rating.c.object_id, + ), + isouter=True) + + stm_join = stm_join.join(catalog_reject, + and_( + # Product ID + catalog_reject.c.catalog_id == self.product.pk, + # User ID + catalog_reject.c.owner == self.user.pk, + # Object Id OR Reject is NULL + or_(self.get_column_obj(self.table, property_id) == catalog_reject.c.object_id, + catalog_reject.c.id.is_(None)) + # Fazer o Cast da coluna objeto id do catalogo para String, por que na catalog reject object_id é string + # 15/09/2020 - este cast para string gerou um bug no Oracle + # or_(cast(self.get_column_obj(self.table, property_id), sqlalchemy.String) == catalog_reject.c.object_id, + # catalog_reject.c.id.is_(None)) + ), + isouter=True) + + elif self.get_engine() == "postgresql_psycopg2" or self.get_engine() == "sqlite3": + # Join com Catalog_Rating + stm_join = stm_join.join(catalog_rating, + and_( + # Product ID + catalog_rating.c.catalog_id == self.product.pk, + # User ID + catalog_rating.c.owner == self.user.pk, + # Object ID + # Fazer o Cast da coluna objeto id do catalogo para String, por que na catalog rating object_id é string + cast(self.get_column_obj(self.table, property_id), sqlalchemy.String) == catalog_rating.c.object_id, + ), + isouter=True) + + stm_join = stm_join.join(catalog_reject, + and_( + # Product ID + catalog_reject.c.catalog_id == self.product.pk, + # User ID + catalog_reject.c.owner == self.user.pk, + # Object Id OR Reject is NULL + # Fazer o Cast da coluna objeto id do catalogo para String, por que na catalog reject object_id é string + or_(cast(self.get_column_obj(self.table, property_id), sqlalchemy.String) == catalog_reject.c.object_id, + catalog_reject.c.id.is_(None)) + ), + isouter=True) + else: + raise Exception("Catalog, rating and reject query was not implemented for this database engine.") query_columns = list() @@ -513,9 +557,9 @@ def create_stm(self, columns=list(), filters=None, ordering=None, limit=None, st # Filtros filters = list() - rating_filters = "" - reject_filters = "" - coordinate_filters = "" + rating_filters = and_() + reject_filters = and_() + coordinate_filters = and_() # Targets podem ter filtros especias checar a existencia deles if self.filters is not None and len(self.filters) > 0: @@ -551,6 +595,7 @@ def create_stm(self, columns=list(), filters=None, ordering=None, limit=None, st base_filters = and_(*self.do_filter(self.table, filters)) stm = stm.where(and_(base_filters, coordinate_filters, rating_filters, reject_filters)) + # stm = stm.where(and_(rating_filters, reject_filters)) # Ordenacao if self.ordering is not None: @@ -580,6 +625,6 @@ def create_stm(self, columns=list(), filters=None, ordering=None, limit=None, st if self.start: stm = stm.offset(literal_column(str(self.start))) - self.log.info("Target Query: [ %s ]" % str(stm)) + self.log.info("Target Query: [ %s ]" % self.statement_to_str(stm)) return stm diff --git a/api/lib/sqlalchemy_wrapper.py b/api/lib/sqlalchemy_wrapper.py index 940f4a47e..2d787ed4a 100644 --- a/api/lib/sqlalchemy_wrapper.py +++ b/api/lib/sqlalchemy_wrapper.py @@ -20,7 +20,6 @@ from lib.db_sqlite import DBSqlite - class DBBase: # Django database engines available_engines = list(['sqlite3', 'oracle', 'postgresql_psycopg2']) @@ -168,9 +167,12 @@ def get_count(self, table, schema=None): with warnings.catch_warnings(): warnings.simplefilter("ignore", category=sa_exc.SAWarning) - table = Table(table, self.metadata, - autoload=True, schema=schema) + table = Table(table, self.metadata, autoload=True, schema=schema) + stm = select([func.count()]).select_from(table) + + self.log.debug("SQL: [%s]" % self.statement_to_str(stm)) + result = con.execute(stm) return result.fetchone()[0] diff --git a/api/product/__init__.py b/api/product/__init__.py index e69de29bb..d58fec616 100644 --- a/api/product/__init__.py +++ b/api/product/__init__.py @@ -0,0 +1 @@ +default_app_config = 'product.apps.ProductConfig' diff --git a/api/product/admin.py b/api/product/admin.py index 2babbe9b5..0dce8144d 100644 --- a/api/product/admin.py +++ b/api/product/admin.py @@ -54,19 +54,25 @@ class MapAdmin(admin.ModelAdmin): class CutOutJobAdmin(admin.ModelAdmin): list_display = ( - 'id', 'cjb_product', 'cjb_display_name', 'cjb_status', 'cjb_job_type', 'cjb_tag', 'cjb_job_id', 'owner', + 'id', 'cjb_product', 'cjb_display_name', 'cjb_status', 'cjb_tag', 'owner', ) list_display_links = ('id',) search_fields = ('cjb_display_name',) -class CutoutAdmin(admin.ModelAdmin): +class DesjobAdmin(admin.ModelAdmin): list_display = ( - 'id', 'cjb_cutout_job', 'ctt_object_id', 'ctt_object_ra', 'ctt_object_dec', 'ctt_filter', 'ctt_thumbname', - 'ctt_file_name', 'ctt_file_path', 'ctt_file_type', 'ctt_file_size', 'ctt_download_start_time', - 'ctt_download_finish_time', + 'id', 'djb_cutout_job', 'djb_jobid', 'djb_status', 'djb_start_time', 'djb_finish_time', 'djb_message', ) list_display_links = ('id',) + search_fields = ('djb_jobid',) + + +class CutoutAdmin(admin.ModelAdmin): + list_display = ( + 'id', 'cjb_cutout_job', 'ctt_object_id', 'ctt_object_ra', 'ctt_object_dec', 'ctt_img_format', 'ctt_filter', + 'ctt_file_name', 'ctt_file_path', 'ctt_file_type', 'ctt_file_size', ) + list_display_links = ('id',) search_fields = ('id',) @@ -126,6 +132,7 @@ class FiltersetdAdmin(admin.ModelAdmin): class FilterConditionAdmin(admin.ModelAdmin): list_display = ('id', 'filterset', 'fcd_property', 'fcd_property_name', 'fcd_operation', 'fcd_value') + class BookmarkedAdmin(admin.ModelAdmin): list_display = ('id', 'product', 'owner', 'is_starred') @@ -138,6 +145,7 @@ class BookmarkedAdmin(admin.ModelAdmin): admin.site.register(Catalog, CatalogAdmin) admin.site.register(Map, MapAdmin) admin.site.register(CutOutJob, CutOutJobAdmin) +admin.site.register(Desjob, DesjobAdmin) admin.site.register(Cutout, CutoutAdmin) admin.site.register(Mask, MaskAdmin) admin.site.register(ProductContent, ProductContentAdmin) diff --git a/api/product/apps.py b/api/product/apps.py index 56d7d268d..b2bb854e7 100644 --- a/api/product/apps.py +++ b/api/product/apps.py @@ -3,3 +3,7 @@ class ProductConfig(AppConfig): name = 'product' + verbose_name = ('product') + + def ready(self): + import product.signals diff --git a/api/product/descutoutservice.py b/api/product/descutoutservice.py index afbac1abe..8c903dcee 100644 --- a/api/product/descutoutservice.py +++ b/api/product/descutoutservice.py @@ -1,798 +1,735 @@ -import csv -import datetime +import io +import json import logging +import math import os -from pprint import pformat +import shutil +import tarfile +import traceback +from datetime import datetime, timedelta +from pathlib import Path import humanize -import requests +import pandas as pd +from common.desaccess import DesAccessApi from common.download import Download +from common.models import Filter from common.notify import Notify from django.conf import settings from django.db.models import Sum from django.template.loader import render_to_string from django.utils import timezone +from django.utils.timezone import utc from lib.CatalogDB import CatalogObjectsDBHelper + from product.association import Association -from product.models import Catalog -from product.models import CutOutJob -from product.models import Cutout +from product.models import Catalog, Cutout, CutOutJob, Desjob class DesCutoutService: + """This Allows the execution of a Cutout job using the DESaccess API. + has methods for submitting, monitoring, downloading and registering jobs and their results. + + Some job steps are asynchronous and are controlled by Celery tasks. + + the process starts with the creation of a Cutoutjob model. that starts with status start. + A celery daemon searches from time to time for jobs with this status and submits that job to DESacces. + Another celery daemon looks for jobs that have been submitted, and when they finish running, the results are downloaded and recorded. + + """ db = None def __init__(self): # Get an instance of a logger self.logger = logging.getLogger("descutoutservice") - self.logger.info("Start!") - - self.logger.info("Retrieving settings for des cutout service") - - try: - params = settings.DES_CUTOUT_SERVICE - # self.logger.debug(params) + # fazer os request sem verificar o certificado SSL / HTTPS + self.verify_ssl = False - self.api_version = params["API_VERSION"] + # Cria uma instancia da classe DesAccesApi que é responsavel pela integração com o serviço DESaccess. + self.desapi = DesAccessApi() + # Configura para usar o mesmo log desta classe, + # assim as mensagens de log referente as requisições vão ficar visiveis neste log. + self.desapi.setLogger(self.logger) - self.host = params["HOST"] - self.user = params["USER"] - self.password = params["PASSWORD"] - self.token = params["TOKEN"] + def get_cutoutjobs_by_id(self, id): + """Returns a CutoutJob model by its id. - self.email = params["EMAIL"] + Args: + id (int): CutoutJob model primary key - self.check_jobs_task_delay = params["CUTOUT_TASK_CHECK_JOBS_DELAY"] + Returns: + product.models.CutOutJob: Instance of CutOutJob. + """ + try: + return CutOutJob.objects.get(pk=int(id)) + except CutOutJob.DoesNotExist as e: + self.on_error(id, e) - # Diretorio raiz onde ficaram as imagens do cutout - self.data_dir = settings.DATA_DIR - self.cutout_dir = params["CUTOUT_DIR"] + def get_job_path(self, id): + """Returns the path where the job's local files are located. - # Limit de Objetos que podem ser enviados ao descut - self.cutout_max_objects = params["MAX_OBJECTS"] + Args: + id (int): CutoutJob model primary key - # Deletar os jobs no DESCUT depois de baixar as imagens - self.delete_job_after_download = params["DELETE_JOB_AFTER_DOWNLOAD"] + Returns: + str: Path where are the files for this job. + """ + basepath = os.path.join(settings.DATA_DIR, 'cutouts') + job_path = os.path.join(basepath, str(id)) + return job_path - self.host_token = None - if params["API_GET_TOKEN"] is not None: - self.host_token = self.host + params["API_GET_TOKEN"] + def create_job_path(self, id): + """Creates a directory for the cutout job. - self.host_create_jobs = self.host + params["API_CREATE_JOBS"] + Args: + id (int): CutoutJob model primary key - self.host_check_jobs = self.host + params["API_CHECK_JOBS"] + Raises: + error: Failed to create the directory + Returns: + str: Path where are the files for this job. + """ + job_path = self.get_job_path(id) + self.logger.debug("Trying to create the job directory") - except Exception as e: - msg = ("Error in the Cutouts parameters in the settings. " - "Check the DES_CUTOUT_SERVICE section if it is configured correctly. ERROR: %s" % e) - raise Exception(msg) + try: - # Tipos de arquivos recebidos que nao sao imagens - self.not_images = ["log", "csv", "stifflog"] + Path(job_path).mkdir(parents=True, exist_ok=True) + self.logger.info("Directory successfully created [%s]" % job_path) + return job_path - # Nome do arquivo de resultados - self.result_file = "result_file.txt" + except OSError as error: + self.logger.error("Failed to create the directory [%s]" % job_path) + raise error - # fazer os request sem verificar o certificado SSL / HTTPS - self.verify_ssl = False + def get_summary_path(self, id, jobid): + """Returns the path to a DES job's summary file. - self.logger.debug("host_token: %s" % self.host_token) - self.logger.debug("host_create_jobs: %s" % self.host_create_jobs) - self.logger.debug("host_check_jobs: %s" % self.host_check_jobs) + Args: + id (int): CutoutJob model primary key + jobid (str): Universally unique identifier of DES job - def generate_token(self): - """ - Returns a token to create other requests - Returns: str(token) + Returns: + str: Path to summary.json file """ - self.logger.info("Generating a new Authentication token") - - if self.host_token is not None: - - # Create Authetication Token - req = requests.post( - self.host_token, - data={ - "username": self.user, - "password": self.password - }, - verify=self.verify_ssl) + summary_path = os.path.join(self.get_job_path(id), str(jobid), 'summary.json') + return summary_path - try: - self.logger.debug(req.text) - - return req.json()["token"] - except Exception as e: - text = req.json() - msg = ("Token generation error %s - %s" % (req.status_code, text["message"])) + def start_job_by_id(self, id): + """This method submits a local job to DESaccess and creates a desjob. + it also makes the selection of the parameters that will be used by the desjob to create the cutouts. - self.logger.critical(msg) + Depending on the number of positions that will be sent more than one DES job can be created. - raise Exception(msg) + At the end of this function the job will be in running status. + a daemon will look for jobs in this status and check when the DES job has finished running. - else: - return self.token + Args: + id (int): CutoutJob model primary key - def check_token_status(self, token): """ - Check Token status: Check the expiration time for a token - Returns: bool() - """ - self.logger.info("Check the expiration time for a token") - - if self.host_token is not None: - - req = requests.get( - self.host_token + "?token=" + token, verify=self.verify_ssl) - - if req.json()["status"].lower() == "ok": - return True - else: - return False - else: - return True + self.logger.info("Des Cutout Starting Job by ID %s" % id) - def create_job(self, token, data): - """ - Submit a Job to service - :param token: - :param data: { - "token" : "aaa...", # required - "ra" : str(ra), # required - "dec" : str(dec), # required - "job_type" : "coadd", # required "coadd" or "single" - "comment" : "String" # required Adicionado em 09/2017 - "xsize" : str(xs), # optional (default : 1.0) - "ysize" : str(ys), # optional (default : 1.0) - "tag" : "Y3A1_COADD", # optional for "coadd" jobs (default: Y3A1_COADD, see Coadd Help page for more options) - "band" : "g,r,i", # optional for "single" epochs jobs (default: all bands) - "no_blacklist" : "false", # optional for "single" epochs jobs (default: "false"). return or not blacklisted exposures - "list_only" : "false", # required for DR1 public version (default : "false") "true": will not generate pngs (faster) - "email" : "myemail@mmm.com" # optional will send email when job is finished - "username" : "Username" # Required for DR1 public version - - } - """ - self.logger.info("Sending request to create a new job in the Service") + # Recupera o Model CutoutJob pelo id + try: + job = self.get_cutoutjobs_by_id(id) - data["token"] = token + self.logger.debug("CutoutJob Name: %s" % job.cjb_display_name) - self.logger.debug("Host Jobs: %s" % self.host_create_jobs) + # Notificacao por email de Inicio do Job + CutoutJobNotify().create_email_message(job) - req = requests.post( - self.host_create_jobs, - data=data, - verify=self.verify_ssl - ) + # Alterar o status para Before Submit + job.cjb_status = 'bs' + job.save() - self.logger.debug(req) + # Criar um diretório para os arquivos do Job. + job.cjb_cutouts_path = self.create_job_path(id) + job.save() - try: - if req.json()["status"] == "ok": - self.logger.debug(req.text) + # Parametros de submissão + data = dict({}) - return req.json() + # Tamanho dos Cutouts + if job.cjb_xsize: + data.update({"xsize": job.cjb_xsize}) + if job.cjb_ysize: + data.update({"ysize": job.cjb_ysize}) + # Geração de Imagens Fits + # Enviar o parametro que indica para o DESaccess que não é para incluir os fits + # utilizados na geração das imagens coloridas, incluir somente a cores solicitadas no colors_fits. + # discard_fits_files (bool) Discard FITS files that are only created in order to produce requested RGB images. + # FITS files that are explicitly requested are retained. + if job.cjb_make_fits: + data.update({ + "make_fits": True, + "colors_fits": job.cjb_fits_colors, + "discard_fits_files": True + }) else: - self.logger.warning(req.text) - msg = ("Create Job Error: " % req.json()["message"]) - raise Exception(msg) - - except Exception as e: - - self.logger.error(req.text) + data.update({ + "make_fits": False, + "discard_fits_files": True + }) - msg = ("Request Create Job error %s - %s" % (req.status_code, req.text)) + # Geração de Imagens Coloridas com Stiff + if job.cjb_make_stiff: + data.update({ + "make_rgb_stiff": True, + "rgb_stiff_colors": job.cjb_stiff_colors + }) - raise Exception(msg) + # Geração de Imagens Coloridas com lupton + if job.cjb_make_lupton: + data.update({ + "make_rgb_lupton": True, + "rgb_lupton_colors": job.cjb_lupton_colors + }) - def get_job_results(self, token, jobid): - """ - Get Job Results : Mainly returns a list of links to files + # Seleção do Release + data.update({ + "release": job.cjb_tag + }) - return - links (string): quando o job termina com sucesso - None: quando o job ainda nao terminou - False: quando o job retorna com status failure - """ + # Preparar a lista de objetos para submissão - self.logger.info("Get Results for job %s" % jobid) + # Recuperar da settings a quantidade maxima de rows por job + config = settings.DESACCESS_API + max_objects = config["MAX_OBJECTS"] - # TODO Diferenca entre Colaboracao e DR1 Public talvez transformar em metodos diferentes. - if self.api_version == 1: + # Checar o tamanho do lista e dividir em varios jobs caso ultrapasse o limit. + count = self.get_catalog_count(job.cjb_product_id) - req = requests.get( - self.host_check_jobs + "?token=" + token + "&jobid=" + jobid, verify=self.verify_ssl) + # Quantidade de Paginas ou jobs que serao necessários. + pages_count = math.ceil(float(count) / max_objects) + # self.logger.debug("Pages: [%s]" % pages_count) - self.logger.debug(req.text) + # Fazer a query dos objetos dividindo em paginas pelo tamanho maximo de objetos que o Desaccess aceita. + for page in range(1, pages_count + 1): + # self.logger.debug("Current Page: [%s]" % page) - data = req.json() + # Calculo do Offset para a paginação. + offset = ((page - 1) * max_objects) + # self.logger.debug("Offset: [%s]" % offset) - if data["status"] != "error" and data["job_status"] == "SUCCESS": + rows = self.get_catalog_objects(job.cjb_product.pk, limit=max_objects, offset=offset) - if "links" in data and data["links"] is not None: - self.logger.info("This job %s is finished and is ready to be downloaded" % jobid) + df = pd.DataFrame(rows) - return data["links"] - else: - # Nao retornou a lista de resultado - self.logger.warning("Descut returned success, but not the list of download links.") - return None + # Cria uma string no formato csv para ser enviada para Desaccess. + s_positions = io.StringIO() + df.to_csv(s_positions, columns=["meta_ra", "meta_dec"], header=["RA", "DEC"], index=False) - elif data["status"] != "error" and data["job_status"] == "PENDING": - # O job ainda nao terminou no servidor - self.logger.info("This job %s is still running" % jobid) - return None + data.update({ + "positions": s_positions.getvalue(), + }) - else: - return False + # Submeter o Job e guardar o id retornado pelo Descut + jobid = self.desapi.submit_cutout_job(data) - else: + # Cria uma instancia do Desjob + record = Desjob.objects.create( + djb_cutout_job=job, + djb_jobid=jobid) + record.save() - data = { - "token": token, - "jobid": jobid - } - req = requests.post( - self.host_check_jobs, - data=data, - verify=self.verify_ssl, - ) + self.logger.info("New Desjob was created. Desjob: [%s] Jobid: [%s]" % (record.pk, jobid)) - self.logger.debug(req.text) + # Criar um arquivo csv com as posições enviadas para este job este aquivo sera usado para associar os resultados. + target_csv = os.path.join(self.get_job_path(job.id), "{}_targets.csv".format(jobid)) + df.to_csv( + target_csv, + sep=";", + header=True, + index=False + ) + self.logger.debug("Csv file with the positions of this jobid was created. CSV: [%s]" % target_csv) - data = req.json() + # Cutout Job enviado e aguardando termino na API + # Alterar o status para Running + job.cjb_status = 'rn' + job.save() - if data["status"] != "error" and data["job_status"] == "SUCCESS": + self.logger.info("Status changed to Running") - if "files" in data and data["files"] is not None: - self.logger.info("This job %s is finished and is ready to be downloaded" % jobid) + # Apartir daqui o job está rodando no NCSA. + # Uma daemon vai ficar checando o andamento do Job usando o metodo check_job_by_id. - return data["files"] - else: - # Nao retornou a lista de resultado - self.logger.warning("Descut returned success, but not the list of download links.") - return None + except CutOutJob.DoesNotExist as e: + self.on_error(id, e) + raise e - elif data["status"] != "error" and data["job_status"] == "PENDING": - # O job ainda nao terminou no servidor - self.logger.info("This job %s is still running" % jobid) - return None + except Exception as e: + self.on_error(id, e) + raise e - else: - return False + def check_job_by_id(self, id): + """This method checks the status of all DES Jobs related to this Cutout Job. + When all DES jobs finish Cutout Job status is changed to Before Download, + at this point another daemon is looking for jobs with this status to download and register files. - def delete_job_results(self, token, jobid): + Args: + id (int): CutoutJob model primary key """ - Delete Jobs: Delete Job by its Id + self.logger.info("Des Cutout Check Job by ID %s" % id) - """ - self.logger.info("Deleting job %s in DesCutout service" % jobid) + try: + # Recupera o Model CutoutJob pelo id + job = self.get_cutoutjobs_by_id(id) - if self.api_version == 1 and self.delete_job_after_download is True: - req = requests.delete( - self.host_check_jobs + "?token=" + token + "&jobid=" + jobid, verify=self.verify_ssl) + # Para cada Desjob associado ao CutoutJob com status None. + desjobs = job.desjob_set.filter(djb_status=None) - data = req.json() - self.logger.debug(data) + for desjob in desjobs: + try: + # Verifica se o status do job no Descut + job_summary = self.desapi.check_job_status(desjob.djb_jobid) + + # Se o retorno do Check for None signica que o job ainda não foi finalizado. + if job_summary is None: + return + + # Alterar o Status do Desjob + desjob.djb_status = job_summary["job_status"] + desjob.djb_message = job_summary["job_status_message"] + desjob.djb_start_time = job_summary["job_time_start"] + desjob.djb_finish_time = job_summary["job_time_complete"] + + desjob.save() + + except Exception as e: + self.on_error(id, e) + + # Verifica se todos os Desjobs tiverem acabado de executar muda o status. + desjobs = job.desjob_set.filter(djb_status=None) + if len(desjobs) == 0: + self.logger.info("All DES Jobs for this Cutout Job have been executed.") + # Alterar o status para Before Download + job.cjb_status = "bd" + job.save() - if data["status"] != "error" and data["status"] == "ok": - self.logger.info("Deleted job!") + except Exception as e: + self.on_error(id, e) - return True - else: - return False - else: - return True + def download_by_id(self, id): + """Starts the download phase of the results. + Each DES Job associated with the Cutout Job is downloaded sequentially. + at the end of downloading all results, Cutout Job is successfully completed. - def parse_result_url(self, url): - """ - Divide uma url retornada pelo Des Cutout Service em um objeto Args: - url: str() - - Returns: - + id (int): CutoutJob model primary key """ - arq = dict({ - "thumbname": None, - "filename": None, - "file_type": None, - # "ra_sex": None, - # "dec_sex": None, - # "ra": None, - # "dec": None, - "filter": None, - "url": url.strip() - }) - - # filename = ultima parte da url - aurl = url.split("/") - filename = aurl[len(aurl) - 1] - arq.update({"filename": filename.strip()}) - - # file_type extensao do arquivo - file_type = filename.split(".")[len(filename.split(".")) - 1] - arq.update({"file_type": file_type.strip()}) - - if file_type not in self.not_images: - # Filtro da Imagem. - try: - filter = filename.split("_")[1].split(".")[0] - arq.update({"filter": filter.strip()}) + # Recupera o Model CutoutJob pelo id + job = self.get_cutoutjobs_by_id(id) - # thumbname = filename split _ - thumbname = filename.split("_")[0] - arq.update({"thumbname": thumbname.strip()}) + # Alterar o status para Downloading + job.cjb_status = "dw" + job.save() - except: - # NAO TEM BANDA - # TODO descobrir um jeito de saber quais as bandas usadas para imagem colorida + for desjob in job.desjob_set.filter(djb_status="success"): + self.download_by_jobid(job.id, desjob.djb_jobid) - thumbname = filename[0:21] - arq.update({"thumbname": thumbname.strip()}) + # Depois de baixar todos os desjobs finalizar o CutoutJob + self.on_success(id) - return arq + def download_by_jobid(self, id, jobid): + """Performs the download of the results of a DES Job. + the result is a tar.gz file with all the files. + this tar.gz is extracted. + a position can generate more than one file, and all files are registered in the Cutout model. - def start_job(self, job): + Args: + id (int): CutoutJob model primary key + jobid (str): Universally unique identifier of DES job - product_id = job.cjb_product_id + """ + self.logger.info("Starting download Cutouts ID:[%s] Jobid [ %s ]" % (id, jobid)) - # Se o Estatus for Starting - if job.cjb_status == "st": + try: + # Recupera o Model CutoutJob pelo id + job = self.get_cutoutjobs_by_id(id) - # Criando o token de acesso - token = self.generate_token() - self.logger.debug("Token: %s" % token) + # Recupera o Model Desjob + desjob = job.desjob_set.get(djb_jobid=jobid) - # Muda o Status para Before Submit - self.change_cutoutjob_status(job, "bs") + # Baixar o tar.gz com todos os dados + filename = self.desapi.get_cutout_tar_filename(jobid) + url = self.desapi.get_cutout_files_url(jobid) + job_path = self.get_job_path(id) - # Recupera os objetos do catalogo - self.logger.info("Retrieving the objects to be sent") + tar_filepath = Download().download_file_from_url(url, job_path, filename) - objects = self.get_catalog_objects(job) + if os.path.exists(tar_filepath): + self.logger.info("Files was successfully downloaded!") + self.logger.debug("Filepath: [%s]" % tar_filepath) - self.logger.info("There are %s objects to send" % objects.get("count")) + # Extrair o tar.gz + self.extract_file(tar_filepath, job_path) - # Comment, este comentario e visivel so na interface do descut - comment = "Science Server Cutout Job Id: %s Product ID: %s" % (job.pk, product_id) + # Verificar se ao extrarir o arquivo criou o diretório + image_path = os.path.join(job_path, str(jobid)) + if not os.path.exists(image_path): + raise Exception("Failed to extract the tar.gz file: [%s]" % filename) - data = dict({ - "job_type": job.cjb_job_type, - "ra": objects.get("ra"), - "dec": objects.get("dec"), - "comment": comment, - }) + # Arquivo já foi extraido, apagar o tar.gz + os.unlink(tar_filepath) + self.logger.info("File was extracted and tar.gz was deleted.") - # Params Obrigatorios para DR1 public version - if self.api_version == 2: - data.update({ - "username": self.user, - "password": self.password, - "list_only": "false", - "email": self.email, - "jobname": comment - }) + # Altera o status do Desjob para downloaded + desjob.djb_status = "downloaded" + desjob.save() - if job.cjb_xsize: - data.update({"xsize": job.cjb_xsize}) - if job.cjb_ysize: - data.update({"ysize": job.cjb_ysize}) + # Inciar o registro das imagens geradas + self.register_cutouts_by_jobid(id, jobid) - if job.cjb_job_type == "single": - if job.cjb_band: - data.update({"band": job.cjb_band}) - if job.cjb_Blacklist: - data.update({"no_blacklist": "true"}) - else: - data.update({"no_blacklist": "false"}) else: - if job.cjb_tag: - data.update({"tag": job.cjb_tag}) - - self.logger.debug("Data to be send coordinates:") - self.logger.debug(pformat(data)) + raise Exception("%s file not downloaded" % filename) - # Submit a Job - try: - result = self.create_job(token, data) + except Exception as e: + msg = "Error downloading Des job files: %s" % e + self.on_error(id, msg) - self.logger.info("Updating CutoutJob to keep job id returned") + def register_cutouts_by_jobid(self, id, jobid): + """Register all files in a DES Job. + each file will have a record in the Cutout model. + for each file an association with a position will be made. - # Diferencas entre DR1 e Colaboracao - jobid = None - try: - jobid = result["job"] + Args: + id (int): CutoutJob model primary key + jobid (str): Universally unique identifier of DES job + """ + self.logger.info("Starting Registration of Cutouts ID:[%s] Jobid [ %s ]" % (id, jobid)) - except: - jobid = result["jobid"] + # Recupera o Model CutoutJob pelo id + job = self.get_cutoutjobs_by_id(id) + + # Recupera o Model Desjob + desjob = job.desjob_set.get(djb_jobid=jobid) + + # Ler o Targets.csv, arquivo com a lista de todas as coordenadas que foram enviadas. + targets_file = os.path.join(self.get_job_path(job.id), "{}_targets.csv".format(jobid)) + df_targets = pd.read_csv( + targets_file, + sep=";", + index_col="meta_id", + dtype=dict({ + "meta_ra": "str", + "meta_dec": "str", + })) + + # self.logger.debug(df_targets.head()) + + # Ler o Summary.json + summary_file = self.get_summary_path(id, jobid) + self.logger.debug("Summary File: [%s]" % summary_file) + with open(summary_file) as fp: + summary = json.load(fp) + + # Array com os dados do cutout, cada coordenada de targets gera um elemento neste array. + # cada elemento tem todas as configurações usadas, as coordendas e um array FILES com os arquivos gerados para esta coordenada. + cutouts = summary["cutouts"] + + total_images = 0 + # Para cada cutout, procurar no targets qual é o id relacionado a esta coordenada. + # esse Id sera usado para registrar os arquivos desta coordenada com um target especifico. + for cutout in cutouts: + try: + # Procura no dataframe targets um registro que tenhas as coordenadas Ra e Dec iguais as do cutout. + # Por algum motivo a comparação entre as coordenas só funcionou usando String, + # usando float, não encontra todos os valores mesmo sendo visualmente identicos. + result = df_targets.loc[(df_targets["meta_ra"] == str(cutout["RA"])) & (df_targets["meta_dec"] == str(cutout["DEC"]))] - self.logger.debug("Job ID: %s" % jobid) + result.reset_index(inplace=True) + result = result.to_dict('records') - job.cjb_job_id = str(jobid) - job.save() + if len(result) == 1: + target = result[0] + self.logger.info("Cutout RA: [%s] Dec: [%s] Target Id: [%s]" % (cutout["RA"], cutout["DEC"], target["meta_id"])) - # Changing the CutoutJob Status for Running - self.change_cutoutjob_status(job, "rn") + # TODO: Para cada Arquivo de imagem, criar um registro no Model Cutouts + records = self.register_images(job, desjob, cutout, target["meta_id"]) - self.logger.info("Done! The new job was created successfully") + total_images += len(records) + else: + self.logger.warning("Cutout RA: [%s] Dec: [%s] Was not associated with a target! Match: %s" % (cutout["RA"], cutout["DEC"], result)) except Exception as e: - # Changing the CutoutJob Status for Error - self.change_cutoutjob_status(job, "er") + self.on_error(jobid, e) - raise e - else: - msg = ( - "This cutoutjob %s can not be started because the current status '%s' is different from 'starting'" % ( - job.pk, job.cjb_status)) - raise Exception(msg) + self.logger.info("Total of [%s] images were registered" % total_images) - def start_job_by_id(self, id): - self.logger.info("Des Cutout Start Job by ID %s" % id) + def register_images(self, job, desjob, cutout, object_id): + """Creates a record in the Cutout model. + For each position/object register all image files. - # Recupera o Model CutoutJob pelo id - try: - cutoutjob = self.get_cutoutjobs_by_id(id) + Args: + job (product.models.CutOutJob): Cutout Job instance. + desjob (product.models.DesJob): Des Job instance. + cutout (dict): File information already associated with position. + object_id (str): Object Id referring to the position. - self.logger.debug("CutoutJob Name: %s" % cutoutjob.cjb_display_name) + Returns: + array: Array of Cutout model instances + """ - # Notificacao por email - CutoutJobNotify().create_email_message(cutoutjob) + jobid = desjob.djb_jobid + records = [] - self.start_job(cutoutjob) + try: + for filename in cutout['FILES']: - except CutOutJob.DoesNotExist as e: - self.logger.critical(e) - raise e + filename = filename.strip() - except Exception as e: - self.logger.critical(e) - raise e + name, extension = os.path.splitext(filename) + extension = extension.strip(".") - def start_jobs(self): - self.logger.info("Des Cutout Start Jobs with status is 'starting'") + img_filter = name.split("_")[1] + band = self.get_band_model(img_filter) - # Recuperar a lista de jobs com o status "st" - cutoutjobs = self.get_cutoutjobs_by_status("st") + img_format = None + if extension == "fits": + img_format = "fits" + else: + img_format = name.split('_')[2] - self.logger.info("There are %s CutoutJobs to start" % len(cutoutjobs)) + job_path = self.get_job_path(job.id) - for job in cutoutjobs: - # TODO chamar o metodo start_job - pass + file_path = os.path.join(job_path, jobid, cutout['TILENAME'], name.split("_")[0]) - def delete_job(self, cutoutjob): + record = Cutout.objects.create( + cjb_cutout_job=job, + cjb_des_job=desjob, + ctt_jobid=jobid, + ctt_file_name=filename, + ctt_file_type=extension, + ctt_filter=band, + ctt_object_id=object_id, + ctt_object_ra=cutout['RA'], + ctt_object_dec=cutout['DEC'], + ctt_img_format=img_format, + ctt_file_path=file_path, + ctt_file_size=os.path.getsize(os.path.join(file_path, filename)), + ) - if cutoutjob.cjb_job_id is not None: - token = self.generate_token() + records.append(record) - self.delete_job_results(token, cutoutjob.cjb_job_id) + Cutout.objects.bulk_create(records, ignore_conflicts=True) - def get_cutoutjobs_by_status(self, status): + return records - # Pegar todos os CutoutJobs com status = st (Start) - return CutOutJob.objects.filter(cjb_status=str(status)) + except Exception as e: + raise Exception("Failed to register images. %s" % e) - def get_cutoutjobs_by_id(self, id): - return CutOutJob.objects.get(pk=int(id)) + def get_band_model(self, band): + """Returns the common.Filter model for a band by name. - def change_cutoutjob_status(self, cutoutjob, status): - self.logger.info("Changing the CutoutJob Status %s for %s" % (cutoutjob.cjb_status, status)) - cutoutjob.cjb_status = status - cutoutjob.save() + Args: + band (str): band name exemple "g" - def check_jobs(self): - """ - Verifica todos os jobs com status running + Returns: + common.models.Filter: A instance of Filter model. """ + try: + record = Filter.objects.get(filter=band) + except Filter.DoesNotExist: + record = Filter.objects.create( + project="DES", + filter=band, + ) + record.save() + self.logger.info("A record was created for the %s band in the model common.Filter" % band) - # Pegar todos os CutoutJobs com status running - jobs = CutOutJob.objects.filter(cjb_status="rn") - - if jobs.count() > 0: - self.logger.info("Check %s Jobs with status running" % jobs.count()) - - # Faz um for para cara job - for job in jobs: - self.logger.info("Get Status for job %s" % job.pk) - - # Cria um Token - token = self.generate_token() - - # Consulta o Job no servico - list_files = self.get_job_results(token, job.cjb_job_id) - - if list_files is False: - # Changing the CutoutJob Status for Error in the DesCutout side. - self.change_cutoutjob_status(job, "je") - break - - if list_files is not None: - # Path onde ficaram os arquivos de cutout - cutoutdir = self.get_cutout_dir(job) - - # Guardar o Arquivo de resultado com os links a serem baixados - result_file = self.save_result_links_file(job, list_files) - - job.cjb_results_file = result_file.split(self.data_dir)[1].strip("/") - - # Baixar o Arquivo Matched que sera usado para associar os arquivos baixados com os objetos. - matched = None - for link in list_files: - arq = self.parse_result_url(link) - if arq.get("file_type") == "csv" and arq.get("filename").find("matched") > -1: - matched = arq - break - - if matched is not None: - matched_file = Download().download_file_from_url( - matched.get("url"), - cutoutdir, - matched.get("filename")) + return record - # Criar um arquivo associando os arquivos ao seu objeto - objects = self.get_objects_from_file(job) + def get_catalog_count(self, product_id): + """Executes a query in the catalog and returns the total number of records. - with open(matched_file, "r") as matched_csv: - matched_reader = csv.DictReader(matched_csv) + Args: + product_id (int): Primary key of the Product model that represents the catalog that will be made the query. - for row in matched_reader: - key = self.get_object_position_key(row.get("RA"), row.get("DEC")) + Returns: + int: Total catalog lines. + """ + catalog = Catalog.objects.select_related().get(product_ptr_id=product_id) - for obj in objects: - if key == obj.get("key"): - obj.update({"thumbname": row.get("THUMBNAME")}) - break + # Instancia da classe de Banco de dados utilizada para query em tabelas de catalogos. + catalog_db = CatalogObjectsDBHelper( + catalog.tbl_name, + schema=catalog.tbl_schema, + database=catalog.tbl_database + ) - matched_csv.close() - job.cjb_matched_file = matched_file.split(self.data_dir)[1].strip("/") + # Seta o log na instancia da catalog_db para que as querys executadas aparareçam no log de cutout. + catalog_db.setLogger(self.logger) - # Escrever o novo arquivo de objetos com o nome do arquivo - with open(os.path.join(cutoutdir, "objects.csv"), "w") as new_objects_csv: - fieldnames = ["key", "id", "ra_original", "ra", "dec", "thumbname"] - writer = csv.DictWriter(new_objects_csv, fieldnames=fieldnames) - writer.writeheader() - for obj in objects: - print("Escrevendo o novo objeto") - self.logger.debug(obj) - writer.writerow(obj) + count = catalog_db.count() - new_objects_csv.close() + self.logger.info("Total number of objects in the catalog: [%s]" % count) - job.cjb_cutouts_path = cutoutdir.split(self.data_dir)[1].strip("/") + return count - # Changing the CutoutJob Status for Before Download - self.change_cutoutjob_status(job, "bd") + def get_catalog_objects(self, product_id, limit=None, offset=None): + """Executes a query in the catalog and returns an array of objects already using association for the id, ra and dec columns. - def get_cutout_dir(self, cutout_job=None, product=None, jobid=None): - """ - Criar um Diretorio agrupando os jobs de cutouts por produtos - //* + This query can be paged using the limit and offset parameters. Args: - cutout_job: instancia do model CutoutJob - OR - product: chave pk do model Product em conjunto com - jobid: chave pk do model CutoutJob + product_id (int): Primary key of the Product model that represents the catalog that will be made the query. + limit (int, optional): Maximum number of rows in this query.. Defaults to None. + offset (int optional): From which result the query will be executed. Defaults to None. - Returns: str(//) + Returns: + array: Returns an array of catalog objects, where each object has the attributes meta_id, meta_ra and meta_dec. """ - try: - if cutout_job is not None: - cutout_dir = os.path.join( - self.data_dir, - self.cutout_dir, - str(cutout_job.cjb_product_id), - str(cutout_job.id)) - else: - cutout_dir = os.path.join( - self.data_dir, - self.cutout_dir, - str(product), - str(jobid)) - - os.makedirs(cutout_dir) - return cutout_dir - - except OSError: - # Cutout path already exists - return cutout_dir - - def save_result_links_file(self, cutoutjob, links): - self.logger.info("Save result links to a file") - - cutoutdir = self.get_cutout_dir(cutoutjob) - f = os.path.join(cutoutdir, self.result_file) - with open(f, "w") as result: - for l in links: - result.write(l + "\n") - - result.close() - - self.logger.debug("Result File %s" % f) - return f - - def get_catalog_objects(self, job): - product_id = job.cjb_product_id - cutoutdir = self.get_cutout_dir(job) catalog = Catalog.objects.select_related().get(product_ptr_id=product_id) + self.logger.info("Executing the query in the catalog table. Table: [%s]" % (catalog.tbl_name)) + # colunas associadas ao produto associations = Association().get_associations_by_product_id(product_id) + self.logger.debug("Associations: [%s]" % associations) # Criar uma lista de colunas baseda nas associacoes isso para limitar a query de nao usar * columns = Association().get_properties_associated(product_id) + self.logger.debug("Columns: [%s]" % columns) + # Instancia da classe de Banco de dados utilizada para query em tabelas de catalogos. catalog_db = CatalogObjectsDBHelper( catalog.tbl_name, schema=catalog.tbl_schema, database=catalog.tbl_database ) + # Seta o log na instancia da catalog_db para que as querys executadas aparareçam no log de cutout. + catalog_db.setLogger(self.logger) + + # Lista com os resultados da query. + records = list() + # Executa a query rows, count = catalog_db.query( columns=columns, - limit=self.cutout_max_objects + limit=limit, + start=offset ) - # Criar um arquivo que servira de index para a associar os objetos as imagens - - # Lista de Ra e dec que serao passadas como parametro - lra = list() - ldec = list() + # Para cada linha alterar os nomes de colunas utilizando as informações de associação. + # O resultado é um array records onde cada record tem sempre os mesmos atributos (meta_id, meta_ra, meta_dec) + # independente dos nomes originais das colunas. + for row in rows: + ra = row[associations["pos.eq.ra;meta.main"]] + dec = row[associations["pos.eq.dec;meta.main"]] + + record = dict({ + "meta_id": row[associations["meta.id;meta.main"]], + "meta_ra": float("{:.6f}".format(ra)), + "meta_dec": float("{:.6f}".format(dec)) + }) + records.append(record) - with open(os.path.join(cutoutdir, "objects.csv"), "w") as objects_csv: - fieldnames = ["key", "id", "ra_original", "ra", "dec"] - writer = csv.DictWriter(objects_csv, fieldnames=fieldnames) - writer.writeheader() + del rows - for row in rows: - ra_original = float(row.get(associations.get("pos.eq.ra;meta.main"))) - ra = ra_original - dec = float(row.get(associations.get("pos.eq.dec;meta.main"))) + return records - if ra < 0 and ra > -180: - ra = ra + 360 + def extract_file(self, filepath, path): + """Extract a tar.gz file to a directory. - obj = dict({ - "id": row.get(associations.get("meta.id;meta.main")), - "ra_original": ra_original, - "ra": ra, - "dec": dec, - "key": str(self.get_object_position_key(ra, dec)) - }) + Args: + filepath (str): path to the tar.gz file to be extracted. + path (str): path where the file will be extracted. + """ + tar = tarfile.open(filepath) + tar.extractall(path) + tar.close() - writer.writerow(obj) + def purge_cutoutjob_dir(self, id): + """Removes all files from a Cutout Job from the local directory. + this method must be executed every time a Cutout Job model is deleted. + for this, this method is linked to the model using Signal. - lra.append(ra) - ldec.append(dec) + Args: + id (int): CutoutJob model primary key + """ + try: + # Recupera o Model CutoutJob pelo id + jobpath = self.get_job_path(id) + self.logger.info("Removing Cutout Job Dir. ID: [%s]" % id) - objects_csv.close() + shutil.rmtree(jobpath) - return dict({ - "ra": str(lra), - "dec": str(ldec), - "count": len(rows) - }) + self.logger.info("Removed Dir [ %s ]" % jobpath) - def get_objects_from_file(self, cutoutjob): - cutoutdir = self.get_cutout_dir(cutoutjob) - objects = list() - with open(os.path.join(cutoutdir, "objects.csv"), "r") as objects_csv: - objects_reader = csv.DictReader(objects_csv) - for object in objects_reader: - objects.append(object) - objects_csv.close() + except Exception as e: + self.logger.error(e) - return objects + def on_success(self, id): + """This method is performed at the end of the Cutout Job. + changes status to success, saves information about job completion. + and execute the method that erases DES Job in the DESaccess service. - def get_object_position_key(self, ra, dec): - """ - Monta uma chave usando ra e dec do objeto Args: - ra: float() com 3 casas decimais - dec: float() com 3 casas decimais - - Returns: string() ra+dec ou ra-dec + id (int): CutoutJob model primary key """ - ra = float("{:6.3f}".format(float(ra))) - dec = float("{:6.3f}".format(float(dec))) - # montar uma chave usando ra dec - key = str(ra) - if float(dec) > 0: - key += "+" + str(dec) - else: - key += str(dec) - - return key - - def test_api_help(self): - print("-------------- test_api_help --------------") - token = self.generate_token() - - ra = [10.0, 20.0, 30.0] - dec = [40.0, 50.0, 60.0] - xs = [1.0, 2.0, 3.0, 4.0] - ys = [2.0] - - # create body of request - body = { - "token": token, # required - "ra": str(ra), # required - "dec": str(dec), # required - "job_type": "coadd", # required "coadd" or "single" - "xsize": str(xs), # optional (default : 1.0) - "ysize": str(ys), # optional (default : 1.0) - "band": "g,r,i", # optional for "single" epochs jobs (default: all bands) - "no_blacklist": "false", - # optional for "single" epochs jobs (default: "false"). return or not blacklisted exposures - "list_only": "false", # optional (default : "false") "true": will not generate pngs (faster) - "email": "false" # optional will send email when job is finished - } - - req = requests.post("http://descut.cosmology.illinois.edu/api/jobs/", data=body, verify=self.verify_ssl) - - # create body for files if needed - # body_files = {"csvfile": open("mydata.csv", "rb")} # To load csv file as part of request - # To include files - # req = requests.post("http://descut.cosmology.illinois.edu/api/jobs/", data=body, files=body_files) - - print(req) - print(req.text) - print(req.json()["job"]) - - def create_cutout_model(self, - cutoutjob, filename, thumbname, type, filter=None, object_id=None, object_ra=None, - object_dec=None, file_path=None, file_size=None, start=None, finish=None): - - # Tratamento do file_path para remover o path absoluto guardando apenas o path configurado no settings cutoutdir - if file_path is not None: - file_path = file_path.split(self.cutout_dir)[1] - file_path = os.path.join(self.cutout_dir, file_path.strip('/')) - - # Tratar Ra e Dec para 5 casas decimais - if object_ra is not None: - object_ra = float('%.5f' % float(object_ra)) - - if object_dec is not None: - object_dec = float('%.5f' % float(object_dec)) + self.logger.debug("Finishing the CutoutJob. ID: [%s]" % id) try: + # Recupera o Model CutoutJob pelo id + job = self.get_cutoutjobs_by_id(id) + + # Apagar Os Jobs no Descut + for desjob in job.desjob_set.all(): + self.desapi.delete_job(desjob.djb_jobid) + # Muda o Status do Desjob para deleted. + desjob.djb_status = 'deleted' + desjob.save() + + # Guardar o tamanho total e a quantidade das imagens geradas. + job.cjb_file_size = job.cutout_set.aggregate(sum_size=Sum('ctt_file_size')).get("sum_size") + job.cjb_files = job.cutout_set.count() + # Muda o Status para Done + job.cjb_status = "ok" + job.cjb_finish_time = datetime.utcnow().replace(tzinfo=utc) + job.save() + + # Notificacao por email de Inicio do Job + CutoutJobNotify().create_email_message(job) + + self.logger.info("Cutout Job Finish. ID: [%s]" % id) + except Exception as e: + self.on_error(id, e) - cutout, created = Cutout.objects.update_or_create( - cjb_cutout_job=cutoutjob, - ctt_file_name=filename, - ctt_file_type=type, - ctt_filter=filter, - ctt_object_id=object_id, - ctt_object_ra=object_ra, - ctt_object_dec=object_dec, - defaults={ - "ctt_file_size": file_size, - "ctt_file_path": file_path, - "ctt_thumbname": thumbname, - "ctt_download_start_time": start, - "ctt_download_finish_time": finish - } - ) + def on_error(self, id, error): + """This method is executed only in case of a Cutout job failure. + change the status to error and save the error message and notify the user of the failure. - self.logger.debug("Cutout ID %s Registred" % cutout.pk) - return cutout + Args: + id (int): CutoutJob model primary key + error (str): Error or execption message that caused the failure. + """ + trace = traceback.format_exc() + self.logger.error(trace) + self.logger.error(error) - except Exception as e: - self.logger.error(e) + # Recupera o Model CutoutJob pelo id + job = self.get_cutoutjobs_by_id(id) - # Changing the CutoutJob Status for Error - self.change_cutoutjob_status(cutoutjob, "er") + # Alterar o status do Job + job.cjb_status = "er" + # Alterar a data de termino + job.cjb_finish_time = datetime.utcnow().replace(tzinfo=utc) + # Alterar o campo de erro + job.cjb_error = "{} ERROR: [{}]".format(str(trace), error) + job.save() - raise (e) + # Notificacao por email de Inicio do Job + CutoutJobNotify().create_email_message(job) +# ----------------------------------------------< CUTOUT NOTIFICATION >-------------------------------------------------- class CutoutJobNotify: def __init__(self): # Get an instance of a logger @@ -802,6 +739,7 @@ def create_email_message(self, cutoutjob): if cutoutjob.owner.email: to_email = cutoutjob.owner.email + message = None if cutoutjob.cjb_status == 'st': subject = "Mosaic in progress" @@ -826,8 +764,8 @@ def create_email_message(self, cutoutjob): self.generate_failure_ticket(cutoutjob) if message: + self.logger.info("Sending Notification Email. Subject: [%s]" % subject) Notify().send_email(subject, message, to_email) - else: self.logger.info("It was not possible to notify the user, for not having the email registered.") @@ -842,36 +780,28 @@ def generate_success_email(self, cutoutjob): if cutoutjob.cjb_tag: tag = cutoutjob.cjb_tag.upper() - if cutoutjob.cutout_set.count(): - sum_sizes = cutoutjob.cutout_set.aggregate(sum_size=Sum('ctt_file_size')) - files_size = humanize.naturalsize(sum_sizes.get("sum_size")) + if cutoutjob.cjb_files > 0: + files_size = humanize.naturalsize(cutoutjob.cjb_file_size) tdelta = finish - start seconds = tdelta.total_seconds() - execution_time = str(datetime.timedelta(seconds=seconds)).split('.')[0] - execution_time_humanized = humanize.naturaldelta(datetime.timedelta(seconds=seconds)) - - image_formats = cutoutjob.cjb_image_formats - if image_formats is None: - image_formats = 'png' + execution_time = str(timedelta(seconds=seconds)).split('.')[0] + execution_time_humanized = humanize.naturaldelta(timedelta(seconds=seconds)) context = dict({ "username": cutoutjob.owner.username, "target_display_name": cutoutjob.cjb_product.prd_display_name, "cutoutjob_display_name": cutoutjob.cjb_display_name, - "cutoutjob_type:": cutoutjob.cjb_job_type, "cutoutjob_tag": tag, "cutoutjob_xsize": int((float(cutoutjob.cjb_xsize) * 60)), # converter para arcsec "cutoutjob_ysize": int((float(cutoutjob.cjb_ysize) * 60)), - "cutoutjob_image_formats": image_formats, "n_objects": cutoutjob.cjb_product.table.catalog.ctl_num_objects, - "n_files": cutoutjob.cutout_set.count(), + "n_files": cutoutjob.cjb_files, "files_size": files_size, "start": str(start.strftime("%Y-%m-%d %H:%M")), "finish": str(finish.strftime("%Y-%m-%d %H:%M")), "execution_time": execution_time, "execution_time_humanized": execution_time_humanized - }) return render_to_string("cutout_notification_finish.html", context) @@ -898,7 +828,7 @@ def generate_failure_email(self, cutoutjob): finish = timezone.now() tdelta = finish - start seconds = tdelta.total_seconds() - execution_time_humanized = humanize.naturaldelta(datetime.timedelta(seconds=seconds)) + execution_time_humanized = humanize.naturaldelta(timedelta(seconds=seconds)) context = dict({ "username": cutoutjob.owner.username, @@ -918,12 +848,11 @@ def generate_failure_ticket(self, cutoutjob): subject = "%s Mosaic Failed" % cutoutjob.pk message = ("email: %s\nusername: %s\ncutoutjob: %s - %s\ntarget: %s - %s" % (cutoutjob.owner.username, - cutoutjob.owner.email, - cutoutjob.pk, - cutoutjob.cjb_display_name, - cutoutjob.cjb_product.pk, - cutoutjob.cjb_product.prd_display_name)) - + cutoutjob.owner.email, + cutoutjob.pk, + cutoutjob.cjb_display_name, + cutoutjob.cjb_product.pk, + cutoutjob.cjb_product.prd_display_name)) Notify().send_email_failure_helpdesk(subject, message) diff --git a/api/product/export.py b/api/product/export.py index 87fcb20fb..8b8b05f26 100644 --- a/api/product/export.py +++ b/api/product/export.py @@ -28,7 +28,6 @@ def __init__(self): self.exclude_columns = ['meta_reject', 'meta_reject_id', 'meta_rating', 'meta_rating_id'] - def create_export_dir(self, name): """ Cria um diretorio onde vao ficar os aquivos gerados. @@ -84,7 +83,6 @@ def get_columns(self, row): self.logger.info("Retrieving the columns for the headers") columns = list() - for property in row: cname = str(property.lower().strip()) @@ -105,12 +103,13 @@ def table_to_csv_by_id(self, product_id, table, export_dir, user_id, schema=None self.logger.info("Export table \"%s\" to csv" % table) + # TODO: Pode ser melhorado o processo usando Pandas.Dataframe.to_csv() name = ("%s.csv" % table) filename = os.path.join(export_dir, name) self.logger.debug("Filename: %s" % filename) - rows, count = self.get_catalog_objects(product_id, table, user_id, schema, database, filters=filters) + rows, count = self.get_catalog_objects(product_id, table, user_id, schema, database, filters=filters) self.logger.debug("Row Count: %s" % count) @@ -155,6 +154,8 @@ def table_to_csv(self, table, schema, export_dir, columns=None): self.logger.info("Export table \"%s\" to csv" % table) + # TODO: Pode ser melhorado o processo usando Pandas.Dataframe.to_csv() + name = ("%s.csv" % table) filename = os.path.join(export_dir, name) @@ -241,7 +242,6 @@ def get_catalog_objects(self, product_id, table, user_id, schema=None, database= self.logger.debug("User: %s" % user.username) - # Recuperar o produto try: product = Product.objects.get(pk=int(product_id)) @@ -250,7 +250,6 @@ def get_catalog_objects(self, product_id, table, user_id, schema=None, database= except Product.DoesNotExist as e: self.logger.error("Product matching query does not exist. Product Id: %s" % product_id) - # colunas associadas ao produto associations = Association().get_associations_by_product_id(product_id=product_id) @@ -282,17 +281,15 @@ def product_cutouts(self, name, path_origin, path_destination, format="zip"): :param path_destination: path absoluto do diretorio onde sera criado o zip. :param format: por enquanto apenas o formato .zip """ - self.logger.info("Export cutouts of Job %s" % name) + self.logger.info("Export cutouts of Job [%s]" % name) self.logger.debug("Cutout Job path: %s" % path_origin) - origin_path = os.path.join(settings.DATA_DIR, path_origin.strip("/")) - data_dir_tmp = settings.DATA_TMP_DIR destination_path = path_destination - self.logger.debug("Origin Path: %s" % origin_path) + self.logger.debug("Origin Path: %s" % path_origin) self.logger.debug("Destination Path: %s" % destination_path) not_images_extensions = list([".txt", ".csv", ".log"]) @@ -305,7 +302,7 @@ def product_cutouts(self, name, path_origin, path_destination, format="zip"): self.logger.debug("Zip File: %s" % filename) with zipfile.ZipFile(filename, 'w') as ziphandle: - for root, dirs, files in os.walk(origin_path): + for root, dirs, files in os.walk(path_origin): for file in files: origin_file = os.path.join(root, file) fname, extension = os.path.splitext(origin_file) @@ -436,4 +433,3 @@ def notify_user_export_failure(self, user, product): else: self.logger.info("It was not possible to notify the user, for not having the email registered.") - diff --git a/api/product/migrations/0013_auto_20201110_1507.py b/api/product/migrations/0013_auto_20201110_1507.py new file mode 100644 index 000000000..a8a954bd5 --- /dev/null +++ b/api/product/migrations/0013_auto_20201110_1507.py @@ -0,0 +1,21 @@ +# Generated by Django 2.1.5 on 2020-11-10 15:07 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('product', '0012_auto_20200903_1924'), + ] + + operations = [ + migrations.RemoveField( + model_name='cutout', + name='ctt_download_finish_time', + ), + migrations.RemoveField( + model_name='cutout', + name='ctt_download_start_time', + ), + ] diff --git a/api/product/migrations/0014_remove_cutout_ctt_thumbname.py b/api/product/migrations/0014_remove_cutout_ctt_thumbname.py new file mode 100644 index 000000000..3471a19f7 --- /dev/null +++ b/api/product/migrations/0014_remove_cutout_ctt_thumbname.py @@ -0,0 +1,17 @@ +# Generated by Django 2.1.5 on 2020-11-10 15:24 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('product', '0013_auto_20201110_1507'), + ] + + operations = [ + migrations.RemoveField( + model_name='cutout', + name='ctt_thumbname', + ), + ] diff --git a/api/product/migrations/0015_cutout_ctt_img_format.py b/api/product/migrations/0015_cutout_ctt_img_format.py new file mode 100644 index 000000000..0894d7e3d --- /dev/null +++ b/api/product/migrations/0015_cutout_ctt_img_format.py @@ -0,0 +1,18 @@ +# Generated by Django 2.1.5 on 2020-11-10 15:37 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('product', '0014_remove_cutout_ctt_thumbname'), + ] + + operations = [ + migrations.AddField( + model_name='cutout', + name='ctt_img_format', + field=models.TextField(blank=True, default=None, help_text='Image file format can be fits, stiff, or lupton.', max_length=10, null=True, verbose_name='Image Format'), + ), + ] diff --git a/api/product/migrations/0016_cutout_ctt_jobid.py b/api/product/migrations/0016_cutout_ctt_jobid.py new file mode 100644 index 000000000..de0e0790f --- /dev/null +++ b/api/product/migrations/0016_cutout_ctt_jobid.py @@ -0,0 +1,18 @@ +# Generated by Django 2.2.17 on 2020-11-11 18:06 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('product', '0015_cutout_ctt_img_format'), + ] + + operations = [ + migrations.AddField( + model_name='cutout', + name='ctt_jobid', + field=models.CharField(blank=True, help_text='Descut job id that generated the image.', max_length=1024, null=True, verbose_name='DES Job ID'), + ), + ] diff --git a/api/product/migrations/0017_auto_20201111_1926.py b/api/product/migrations/0017_auto_20201111_1926.py new file mode 100644 index 000000000..f45df912d --- /dev/null +++ b/api/product/migrations/0017_auto_20201111_1926.py @@ -0,0 +1,67 @@ +# Generated by Django 2.2.17 on 2020-11-11 19:26 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('product', '0016_cutout_ctt_jobid'), + ] + + operations = [ + migrations.RemoveField( + model_name='cutoutjob', + name='cjb_Blacklist', + ), + migrations.RemoveField( + model_name='cutoutjob', + name='cjb_band', + ), + migrations.RemoveField( + model_name='cutoutjob', + name='cjb_image_formats', + ), + migrations.RemoveField( + model_name='cutoutjob', + name='cjb_job_type', + ), + migrations.RemoveField( + model_name='cutoutjob', + name='cjb_matched_file', + ), + migrations.RemoveField( + model_name='cutoutjob', + name='cjb_results_file', + ), + migrations.AddField( + model_name='cutoutjob', + name='cjb_fits_colors', + field=models.CharField(blank=True, default='grizy', help_text="Color bands to output (string value containing characters from the set 'grizy').", max_length=10, null=True, verbose_name='Fits Colors'), + ), + migrations.AddField( + model_name='cutoutjob', + name='cjb_lupton_colors', + field=models.CharField(blank=True, default='gri;rig;zgi', help_text='Sets of color band triplets, delineated by semi-colons, denoting by letter ordering the bands to use for Red, Green, Blue in the generated RGB images in Lupton format. Example: gri;zgi will produce two RGB images with Red/Green/Blue using bands G/R/I and Z/G/I, respectively.', max_length=20, null=True, verbose_name='Lupton Colors'), + ), + migrations.AddField( + model_name='cutoutjob', + name='cjb_make_fits', + field=models.BooleanField(default=False, help_text='Generate cutout data files in FITS format', verbose_name='Make Fits'), + ), + migrations.AddField( + model_name='cutoutjob', + name='cjb_make_lupton', + field=models.BooleanField(default=False, help_text='Generate cutout data files in RGB color using the Lupton method', verbose_name='Make Lupton'), + ), + migrations.AddField( + model_name='cutoutjob', + name='cjb_make_stiff', + field=models.BooleanField(default=False, help_text='Generate cutout data files in RGB color using STIFF format', verbose_name='Make Stiff'), + ), + migrations.AddField( + model_name='cutoutjob', + name='cjb_stiff_colors', + field=models.CharField(blank=True, default='gri;rig;zgi', help_text='Sets of color band triplets, delineated by semi-colons, denoting by letter ordering the bands to use for Red, Green, Blue in the generated RGB images in STIFF format. Example: gri;zgi will produce two RGB images with Red/Green/Blue using bands G/R/I and Z/G/I, respectively.', max_length=20, null=True, verbose_name='Stiff Colors'), + ), + ] diff --git a/api/product/migrations/0018_auto_20201111_1947.py b/api/product/migrations/0018_auto_20201111_1947.py new file mode 100644 index 000000000..eb3277a53 --- /dev/null +++ b/api/product/migrations/0018_auto_20201111_1947.py @@ -0,0 +1,28 @@ +# Generated by Django 2.2.17 on 2020-11-11 19:47 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('product', '0017_auto_20201111_1926'), + ] + + operations = [ + migrations.AddField( + model_name='cutoutjob', + name='cjb_file_size', + field=models.PositiveIntegerField(blank=True, default=None, help_text='Total size of files generated in this job', null=True, verbose_name='File Size'), + ), + migrations.AddField( + model_name='cutoutjob', + name='cjb_files', + field=models.PositiveIntegerField(blank=True, default=None, help_text='Total of files generated in this job', null=True, verbose_name='Files'), + ), + migrations.AlterField( + model_name='cutoutjob', + name='cjb_make_stiff', + field=models.BooleanField(default=True, help_text='Generate cutout data files in RGB color using STIFF format', verbose_name='Make Stiff'), + ), + ] diff --git a/api/product/migrations/0019_desjob.py b/api/product/migrations/0019_desjob.py new file mode 100644 index 000000000..ce93b0f1f --- /dev/null +++ b/api/product/migrations/0019_desjob.py @@ -0,0 +1,26 @@ +# Generated by Django 2.2.17 on 2020-11-17 20:04 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('product', '0018_auto_20201111_1947'), + ] + + operations = [ + migrations.CreateModel( + name='Desjob', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('djb_jobid', models.CharField(blank=True, db_index=True, help_text='Descut job id that generated the image.', max_length=1024, null=True, verbose_name='DES Job ID')), + ('djb_status', models.CharField(blank=True, max_length=20, null=True, verbose_name='DES Job Status')), + ('djb_cutout_job', models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, to='product.CutOutJob', verbose_name='Cutout Job')), + ], + options={ + 'unique_together': {('djb_cutout_job', 'djb_jobid')}, + }, + ), + ] diff --git a/api/product/migrations/0020_desjob_djb_message.py b/api/product/migrations/0020_desjob_djb_message.py new file mode 100644 index 000000000..576b5f297 --- /dev/null +++ b/api/product/migrations/0020_desjob_djb_message.py @@ -0,0 +1,18 @@ +# Generated by Django 2.2.17 on 2020-11-17 20:07 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('product', '0019_desjob'), + ] + + operations = [ + migrations.AddField( + model_name='desjob', + name='djb_message', + field=models.TextField(blank=True, null=True, verbose_name='DES Job Message'), + ), + ] diff --git a/api/product/migrations/0021_cutout_cjb_des_job.py b/api/product/migrations/0021_cutout_cjb_des_job.py new file mode 100644 index 000000000..faf38ff62 --- /dev/null +++ b/api/product/migrations/0021_cutout_cjb_des_job.py @@ -0,0 +1,19 @@ +# Generated by Django 2.2.17 on 2020-11-17 20:08 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('product', '0020_desjob_djb_message'), + ] + + operations = [ + migrations.AddField( + model_name='cutout', + name='cjb_des_job', + field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, to='product.Desjob', verbose_name='Des Job'), + ), + ] diff --git a/api/product/migrations/0022_auto_20201117_2155.py b/api/product/migrations/0022_auto_20201117_2155.py new file mode 100644 index 000000000..f89031fc0 --- /dev/null +++ b/api/product/migrations/0022_auto_20201117_2155.py @@ -0,0 +1,23 @@ +# Generated by Django 2.2.17 on 2020-11-17 21:55 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('product', '0021_cutout_cjb_des_job'), + ] + + operations = [ + migrations.AddField( + model_name='desjob', + name='djb_finish_time', + field=models.DateTimeField(blank=True, null=True, verbose_name='Finish'), + ), + migrations.AddField( + model_name='desjob', + name='djb_start_time', + field=models.DateTimeField(auto_now_add=True, null=True, verbose_name='Start'), + ), + ] diff --git a/api/product/migrations/0023_auto_20201119_1817.py b/api/product/migrations/0023_auto_20201119_1817.py new file mode 100644 index 000000000..0edb1afb3 --- /dev/null +++ b/api/product/migrations/0023_auto_20201119_1817.py @@ -0,0 +1,22 @@ +# Generated by Django 2.2.17 on 2020-11-19 18:17 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('product', '0022_auto_20201117_2155'), + ] + + operations = [ + migrations.RemoveField( + model_name='cutoutjob', + name='cjb_job_id', + ), + migrations.AddField( + model_name='cutoutjob', + name='cjb_error', + field=models.TextField(blank=True, null=True, verbose_name='Error Message'), + ), + ] diff --git a/api/product/models.py b/api/product/models.py index 8d5e5e402..d246cae13 100644 --- a/api/product/models.py +++ b/api/product/models.py @@ -1,3 +1,5 @@ +from operator import index +from django.db.models.signals import post_save, pre_delete from coadd.models import Release, Tag from django.db import models from product_classifier.models import ProductClass @@ -108,8 +110,8 @@ def __str__(self): class Catalog(Table): ctl_num_objects = models.PositiveIntegerField( - verbose_name='Num of objects', - null=True, + verbose_name='Num of objects', + null=True, blank=True) def __str__(self): @@ -167,7 +169,7 @@ class CurrentSetting(models.Model): ProductSetting, on_delete=models.CASCADE, verbose_name='Setting') owner = models.ForeignKey( settings.AUTH_USER_MODEL, - on_delete=models.CASCADE, null=True, blank=True, verbose_name='Owner') + on_delete=models.CASCADE, null=True, blank=True, verbose_name='Owner') def __str__(self): return str(self.pk) @@ -234,12 +236,7 @@ def __str__(self): # ------------------------------ Cutouts ------------------------------ class CutOutJob(models.Model): """ - Este Model possui um Signal conectado a ele - toda vez que este model disparar o evento post_save - o metodo start_des_cutout_job do arquivo .signals sera executado. - este metodo ira enviar o job para o servico do DesCutout - - OBS: este signal esta no final do arquivo. para evitar erros de import. + Este Model possui um Signal conectado a ele. """ status_job = ( # Jobs que ainda nao foram enviados @@ -272,23 +269,35 @@ class CutOutJob(models.Model): cjb_display_name = models.CharField( max_length=40, verbose_name='Name') + cjb_tag = models.CharField( + max_length=60, verbose_name='Release Tag', null=True, blank=True) + cjb_xsize = models.CharField( max_length=5, verbose_name='Xsize', help_text='xsize in arcmin, default is 1.0', default='1.0') cjb_ysize = models.CharField( max_length=5, verbose_name='ysize', help_text='ysize in arcmin, default is 1.0', default='1.0') - cjb_job_type = models.CharField( - max_length=10, verbose_name='Job Type', choices=(('coadd', 'Coadd Images'), ('single', 'Single Epoch'))) + cjb_make_fits = models.BooleanField( + verbose_name='Make Fits', default=False, help_text='Generate cutout data files in FITS format') - cjb_tag = models.CharField( - max_length=60, verbose_name='Release Tag', null=True, blank=True) + cjb_fits_colors = models.CharField( + max_length=10, verbose_name='Fits Colors', null=True, blank=True, default="grizy", help_text="Color bands to output (string value containing characters from the set 'grizy').") - cjb_band = models.CharField( - max_length=20, verbose_name='Filters', null=True, blank=True) + # OBS: A criação de imagens Stiff estão ligadas por default. + cjb_make_stiff = models.BooleanField( + verbose_name='Make Stiff', default=True, help_text='Generate cutout data files in RGB color using STIFF format') - cjb_Blacklist = models.BooleanField( - verbose_name='Blacklist', default=False, help_text='Exclude blacklisted ccds') + cjb_stiff_colors = models.CharField( + max_length=20, verbose_name='Stiff Colors', null=True, blank=True, default="gri;rig;zgi", + help_text="Sets of color band triplets, delineated by semi-colons, denoting by letter ordering the bands to use for Red, Green, Blue in the generated RGB images in STIFF format. Example: gri;zgi will produce two RGB images with Red/Green/Blue using bands G/R/I and Z/G/I, respectively.") + + cjb_make_lupton = models.BooleanField( + verbose_name='Make Lupton', default=False, help_text='Generate cutout data files in RGB color using the Lupton method') + + cjb_lupton_colors = models.CharField( + max_length=20, verbose_name='Lupton Colors', null=True, blank=True, default="gri;rig;zgi", + help_text="Sets of color band triplets, delineated by semi-colons, denoting by letter ordering the bands to use for Red, Green, Blue in the generated RGB images in Lupton format. Example: gri;zgi will produce two RGB images with Red/Green/Blue using bands G/R/I and Z/G/I, respectively.") cjb_status = models.CharField( max_length=25, @@ -297,9 +306,6 @@ class CutOutJob(models.Model): verbose_name='Status' ) - cjb_job_id = models.CharField( - max_length=1024, verbose_name='Job ID', null=True, blank=True) - # Fields Referentes as labels que serao aplicadas ao cutout cjb_label_position = models.CharField( max_length=10, verbose_name='Label Position', choices=(('inside', 'Inside'), ('outside', 'Outside')), @@ -320,14 +326,6 @@ class CutOutJob(models.Model): max_length=4096, verbose_name='Cutout Paths', null=True, blank=True, default=None, help_text="Path of the directory where the cutouts of this job are.") - cjb_results_file = models.TextField( - max_length=4096, verbose_name='Result File', - null=True, blank=True, default=None, help_text="File that contains the links returned by the DesCutouts service") - - cjb_matched_file = models.TextField( - max_length=4096, verbose_name='Matched File', - null=True, blank=True, default=None, help_text="File containing the relations between ra, dec with the image") - cjb_start_time = models.DateTimeField( auto_now_add=True, null=True, blank=True, verbose_name='Start') @@ -337,17 +335,50 @@ class CutOutJob(models.Model): cjb_description = models.CharField( max_length=1024, verbose_name='Description', null=True, blank=True) - cjb_image_formats = models.CharField( - max_length=10, verbose_name='Image Formats', null=True, blank=True, - help_text="list of image extensions that will be downloaded from the descut. example \'png,fits\'" - ) + cjb_files = models.PositiveIntegerField( + verbose_name='Files', null=True, blank=True, default=None, help_text='Total of files generated in this job') + + cjb_file_size = models.PositiveIntegerField( + verbose_name='File Size', null=True, blank=True, default=None, help_text='Total size of files generated in this job') + + cjb_error = models.TextField( + verbose_name='Error Message', null=True, blank=True) def __str__(self): return str(self.cjb_display_name) + +class Desjob(models.Model): + djb_cutout_job = models.ForeignKey( + CutOutJob, on_delete=models.CASCADE, verbose_name='Cutout Job', default=None) + + djb_jobid = models.CharField( + max_length=1024, verbose_name='DES Job ID', db_index=True, null=True, blank=True, help_text="Descut job id that generated the image.") + + djb_status = models.CharField( + max_length=20, verbose_name='DES Job Status', null=True, blank=True) + + djb_message = models.TextField( + verbose_name='DES Job Message', null=True, blank=True) + + djb_start_time = models.DateTimeField( + auto_now_add=True, null=True, blank=True, verbose_name='Start') + + djb_finish_time = models.DateTimeField( + auto_now_add=False, null=True, blank=True, verbose_name='Finish') + + class Meta: + unique_together = ('djb_cutout_job', 'djb_jobid') + + def __str__(self): + return str(self.pk) + + class Cutout(models.Model): cjb_cutout_job = models.ForeignKey( CutOutJob, on_delete=models.CASCADE, verbose_name='Cutout Job', default=None) + cjb_des_job = models.ForeignKey( + Desjob, on_delete=models.CASCADE, verbose_name='Des Job', default=None) ctt_object_id = models.CharField( max_length=5, verbose_name='Object ID', null=True, blank=True, help_text='The association is used to know which column will be considered as id.') @@ -357,10 +388,10 @@ class Cutout(models.Model): ctt_object_dec = models.CharField( max_length=10, verbose_name='Dec', null=True, blank=True, help_text='Dec in degrees, the association will be used to identify the column') + ctt_img_format = models.TextField( + max_length=10, verbose_name='Image Format', null=True, blank=True, default=None, help_text="Image file format can be fits, stiff, or lupton.") ctt_filter = models.ForeignKey( 'common.Filter', verbose_name='Filter', on_delete=models.CASCADE, null=True, blank=True, default=None) - ctt_thumbname = models.CharField( - max_length=255, verbose_name='Thumbname', null=True, blank=True, default=None) ctt_file_path = models.TextField( max_length=4096, verbose_name='File Path', null=True, blank=True, default=None) ctt_file_name = models.CharField( @@ -369,10 +400,8 @@ class Cutout(models.Model): max_length=5, verbose_name='File Extension', null=True, blank=True, default=None) ctt_file_size = models.PositiveIntegerField( verbose_name='File Size', null=True, blank=True, default=None, help_text='File Size in bytes') - ctt_download_start_time = models.DateTimeField( - auto_now_add=True, null=True, blank=True, verbose_name='Download Start') - ctt_download_finish_time = models.DateTimeField( - auto_now_add=False, null=True, blank=True, verbose_name='Download finish') + ctt_jobid = models.CharField( + max_length=1024, verbose_name='DES Job ID', null=True, blank=True, help_text="Descut job id that generated the image.") class Meta: unique_together = ('cjb_cutout_job', 'ctt_file_name') @@ -382,10 +411,7 @@ class Meta: ] def __str__(self): - return str(self.pk) - - # ctt_original_url = models.CharField( - # max_length=5, verbose_name='Url to download the file on the cutouts server') + return "{} - {}".format(str(self.pk), self.ctt_file_name) # ------------------------------ Permissoes por Produtos ------------------------------ @@ -480,11 +506,9 @@ def __str__(self): return str(self.pk) - # -------------------------------- Signals -------------------------------------- # Esses signals connect devem ficar no final do arquivo para nao dar problema de import. -from django.db.models.signals import post_save, pre_delete -from .signals import start_des_cutout_job, drop_product_table - -post_save.connect(start_des_cutout_job, sender=CutOutJob) -pre_delete.connect(drop_product_table, sender=Product) \ No newline at end of file +# pylint: disable = E402 +# from .signals import start_des_cutout_job, drop_product_table +# post_save.connect(start_des_cutout_job, sender=CutOutJob) +# pre_delete.connect(drop_product_table, sender=Product) diff --git a/api/product/serializers.py b/api/product/serializers.py index 492705b19..6660c7c3d 100644 --- a/api/product/serializers.py +++ b/api/product/serializers.py @@ -343,10 +343,13 @@ class CutoutJobSerializer(serializers.HyperlinkedModelSerializer): owner = serializers.SerializerMethodField() execution_time = serializers.SerializerMethodField() - count_files = serializers.SerializerMethodField() - file_sizes = serializers.SerializerMethodField() + h_file_sizes = serializers.SerializerMethodField() is_owner = serializers.SerializerMethodField() + status_name = serializers.CharField( + source='get_cjb_status_display', read_only=True + ) + class Meta: model = CutOutJob @@ -355,13 +358,16 @@ class Meta: 'cjb_product', 'cjb_display_name', 'cjb_status', - 'cjb_job_id', + 'status_name', + 'cjb_tag', 'cjb_xsize', 'cjb_ysize', - 'cjb_job_type', - 'cjb_tag', - 'cjb_band', - 'cjb_Blacklist', + 'cjb_make_fits', + 'cjb_fits_colors', + 'cjb_make_stiff', + 'cjb_stiff_colors', + 'cjb_make_lupton', + 'cjb_lupton_colors', 'cjb_label_position', 'cjb_label_properties', 'cjb_label_colors', @@ -369,11 +375,12 @@ class Meta: 'cjb_start_time', 'cjb_finish_time', 'cjb_description', - 'cjb_image_formats', + 'cjb_files', + 'cjb_file_size', + 'cjb_error', 'owner', 'execution_time', - 'count_files', - 'file_sizes', + 'h_file_sizes', 'is_owner' ) @@ -390,18 +397,9 @@ def get_execution_time(self, obj): except: return None - def get_count_files(self, obj): + def get_h_file_sizes(self, obj): try: - return obj.cutout_set.count() - - except: - return None - - def get_file_sizes(self, obj): - try: - sum_sizes = obj.cutout_set.aggregate(sum_size=Sum('ctt_file_size')) - return humanize.naturalsize(sum_sizes.get("sum_size")) - + return humanize.naturalsize(obj.cjb_file_size) except: return None @@ -417,8 +415,11 @@ class CutoutSerializer(serializers.HyperlinkedModelSerializer): cjb_cutout_job = serializers.PrimaryKeyRelatedField( queryset=CutOutJob.objects.all(), many=False) + cjb_des_job = serializers.PrimaryKeyRelatedField( + queryset=Desjob.objects.all(), many=False) + + ctt_img_color = serializers.CharField(source='ctt_filter.filter') ctt_file_source = serializers.SerializerMethodField() - timestamp = serializers.SerializerMethodField() class Meta: model = Cutout @@ -426,42 +427,37 @@ class Meta: fields = ( 'id', 'cjb_cutout_job', + 'cjb_des_job', 'ctt_object_id', 'ctt_object_ra', 'ctt_object_dec', - 'ctt_filter', - 'ctt_thumbname', + 'ctt_img_format', + 'ctt_img_color', + # 'ctt_filter', # 'ctt_file_path', 'ctt_file_name', 'ctt_file_type', 'ctt_file_size', - 'ctt_download_start_time', - 'ctt_download_finish_time', + # 'ctt_jobid', 'ctt_file_source', - 'timestamp' ) def get_ctt_file_source(self, obj): try: - cutout_source = settings.DES_CUTOUT_SERVICE['CUTOUT_SOURCE'] + # Exemplo do source para o arquivo de imagem. + # http://localhost/data/cutouts/18/7bd2a79749974decab360f401310bf60/DES0305-3415/DESJ030506.1606-341532.4000/DESJ030506.1606-341532.4000_gri_stiff.png - if obj.ctt_file_path is not None: + # Recuperar o Host + host = settings.BASE_HOST - source = os.path.join(cutout_source, obj.ctt_file_path) - - return source - else: - return None - - except KeyError as e: - raise Exception("The CUTOUT_SOURCE parameter has not been configured, " - " add this attribute to the DES_CUTOUT_SERVICE section.") + # Substituir o path de Archive por /data que é o alias + base_source = host + obj.ctt_file_path.replace("/archive", "/data") + # Adicionar o filename + source = "{}/{}?_dc={}".format(base_source, obj.ctt_file_name, time.time()) + return source except Exception as e: - raise (e) - - def get_timestamp(self, obj): - return time.time() + return None class MaskSerializer(serializers.HyperlinkedModelSerializer): diff --git a/api/product/signals.py b/api/product/signals.py index 0c0840763..d15670d15 100644 --- a/api/product/signals.py +++ b/api/product/signals.py @@ -1,49 +1,17 @@ from django.db.models.signals import post_save, post_delete, pre_delete from django.dispatch import receiver from product.models import CutOutJob, Product -from product.tasks import start_des_cutout_job_by_id -from product.tasks import download_cutoutjob from product.tasks import purge_cutoutjob_dir -from product.tasks import notify_user_by_email from lib.sqlalchemy_wrapper import DBBase -@receiver(post_save, sender=CutOutJob) -def start_des_cutout_job(sender, instance, created, **kwargs): - """ - Toda vez que um model CutOutJob for criado sera disparado um job para o servico DESCutout - Utilizando uma Task com Cellery - """ - if created: - start_des_cutout_job_by_id.delay(instance.pk) - - else: - # Se e um update da um cutoutjob e o status e Before Download - if instance.cjb_status == 'bd': - # Disparar a task que vai fazer o downaload - download_cutoutjob.delay(instance.pk) - - elif instance.cjb_status == 'dl': - # Quando um Model Cutout Job for marcado como deletado - purge_cutoutjob_dir.delay(instance.pk) - - elif instance.cjb_status == 'ok': - notify_user_by_email.delay(instance.pk) - - elif instance.cjb_status == 'je': - notify_user_by_email.delay(instance.pk) - - elif instance.cjb_status == 'er': - notify_user_by_email.delay(instance.pk) - - @receiver(post_delete, sender=CutOutJob) def purge_cutout_job_dir(sender, instance, using, **kwargs): """ Toda Vez que um CutoutJob for deletado deve remover o diretorio com as imagens """ - purge_cutoutjob_dir.delay(instance.pk, instance.cjb_product.pk) + purge_cutoutjob_dir.delay(instance.pk) @receiver(pre_delete, sender=Product) @@ -80,3 +48,7 @@ def drop_product_table(sender, instance, using, **kwargs): except Exception as e: # Tenta dropar a tabela se nao conseguir nao faz nada. pass + + +# post_save.connect(start_des_cutout_job, sender=CutOutJob) +# pre_delete.connect(drop_product_table, sender=Product) diff --git a/api/product/tasks.py b/api/product/tasks.py index ae7142c4c..154b6aa8f 100644 --- a/api/product/tasks.py +++ b/api/product/tasks.py @@ -4,22 +4,17 @@ import shutil from smtplib import SMTPException -from celery import chord -from celery import shared_task -from celery import task +from celery import chord, shared_task, task from celery.decorators import periodic_task from celery.task.schedules import crontab -from common.download import Download from django.conf import settings from django.contrib.auth.models import User -from django.utils import timezone -from product.descutoutservice import DesCutoutService, CutoutJobNotify + +from product.descutoutservice import CutoutJobNotify, DesCutoutService from product.export import Export -from product.models import CutOutJob -from product.models import FilterCondition -from product.models import Product -from product.saveas import SaveAs from product.importproduct import ImportTargetListCSV +from product.models import CutOutJob, FilterCondition, Product +from product.saveas import SaveAs from product.serializers import FConditionSerializer descutout = DesCutoutService() @@ -29,22 +24,28 @@ importtargetlistcsv = ImportTargetListCSV() - -@task(name="start_des_cutout_job_by_id") -def start_des_cutout_job_by_id(cutoutjob_id): +@periodic_task( + # Tempo de delay para a task check_jobs em minutos + run_every=(crontab(minute='*/%s' % 1)), + name="start_des_cutout_job", + ignore_result=True +) +def start_des_cutout_job(): """ - Esta Task vai instanciar a Classe DesCutoutService, - executar o methodo start_job_by_id - esse job vai enviar o job para o servico do des. - - :param cutoutjob_id: Chave pk do model product.CutOutModel + Recupera todos os Cutoutjob com status start. + instancia a Classe DesCutoutService, + executa o methodo start_job_by_id. + esse metodo vai enviar o job para o servico do des. """ - descutout.start_job_by_id(int(cutoutjob_id)) + # Para cada job com status Start executa o metodo de submissão + for job in CutOutJob.objects.filter(cjb_status="st"): + descutout.start_job_by_id(job.pk) @periodic_task( # run_every=(crontab(minute='*/1')), - run_every=(crontab(minute='*/%s' % descutout.check_jobs_task_delay)), + # Tempo de delay para a task check_jobs em minutos + run_every=(crontab(minute='*/%s' % 1)), # run_every=10.0, name="check_jobs_running", ignore_result=True @@ -52,158 +53,41 @@ def start_des_cutout_job_by_id(cutoutjob_id): def check_jobs_running(): """ Recupera todos os cutoutjobs com status Running - e verifica no servico DESCutout o status do job + e verifica no servico DESaccess o status do job e os marca com status """ - descutout.check_jobs() - - -@task(name="download_cutoutjob") -def download_cutoutjob(id): - logger = descutout.logger - - logger.info("Start downloading Cutout Job [ %s ]" % id) - - cutoutjob = descutout.get_cutoutjobs_by_id(id) - - # Changing the CutoutJob Status for Downloading - descutout.change_cutoutjob_status(cutoutjob, "dw") - - cutoutdir = descutout.get_cutout_dir(cutoutjob) - - allarqs = list() - - image_formats = cutoutjob.cjb_image_formats - if image_formats is None: - image_formats = 'png' - - formats = image_formats.split(',') - - logger.info("Only download the files with these formats: [ %s ]" % image_formats) + # Pegar todos os CutoutJobs com status running + jobs = CutOutJob.objects.filter(cjb_status="rn") + # Faz um for para cara job + for job in jobs: + descutout.check_job_by_id(job.pk) - # Deixar na memoria a lista de objetos ja associada com os nomes dos arquivos - objects = descutout.get_objects_from_file(cutoutjob) - # Recuperar o arquivo de Results - result_file_path = os.path.join(descutout.data_dir, cutoutjob.cjb_results_file) - logger.debug("Result File Path: %s" % result_file_path) - with open(result_file_path, 'r') as result_file: - lines = result_file.readlines() - for url in lines: - arq = descutout.parse_result_url(url) - - # Verifica se o formato do arquivo esta na lista de formatos a serem baixados - # os formatos ficam no campo cjb_image_formats - if arq.get('file_type').lower() in formats: - allarqs.append(arq) - - object_id = None - object_ra = None - object_dec = None - file_size = None - finish = None - - logger.info("Downloading [ %s ]" % arq.get('filename')) - - for obj in objects: - if arq.get("thumbname") == obj.get("thumbname"): - object_id = obj.get("id") - object_ra = obj.get("ra") - object_dec = obj.get("dec") - - start = timezone.now() - file_path = Download().download_file_from_url( - arq.get('url'), - cutoutdir, - arq.get('filename'), - ignore_errors=True - ) - - if file_path is not None: - file_size = os.path.getsize(file_path) - finish = timezone.now() - - cutout = descutout.create_cutout_model( - cutoutjob, - filename=arq.get('filename'), - thumbname=arq.get('thumbname'), - type=arq.get('file_type'), - filter=None, - object_id=object_id, - object_ra=object_ra, - object_dec=object_dec, - file_path=file_path, - file_size=file_size, - start=start, - finish=finish) - - result_file.close() - - # Deletar o job no Servico - descutout.delete_job(cutoutjob) - - # Adicionar o tempo de termino - cutoutjob.cjb_finish_time = timezone.now() - cutoutjob.save() - - # Changing the CutoutJob Status for Done - descutout.change_cutoutjob_status(cutoutjob, "ok") - - -@task(name="purge_cutoutjob_dir") -def purge_cutoutjob_dir(cutoutjob_id, product=None): - """ - :param cutoutjob_id: Chave pk do model product.CutOutModel +@periodic_task( + # Tempo de delay para a task check_jobs em minutos + run_every=(crontab(minute='*/%s' % 1)), + name="download_cutoutjob", + ignore_result=True +) +def download_cutoutjob(): + """Recupera todos os cutoutjobs com status before download. + executa o metodo download_by_id. este metodo vai fazer o download dos resultados. + e finalizar o job. """ - logger = descutout.logger - - logger.info("Purge a Cutout Job [ %s ]" % cutoutjob_id) - - cutoutjob = None - - try: - if product is None: - cutoutjob = descutout.get_cutoutjobs_by_id(cutoutjob_id) - cutout_dir = descutout.get_cutout_dir(cutoutjob) - - else: - cutout_dir = descutout.get_cutout_dir(product=product, jobid=cutoutjob_id) - - logger.debug(cutout_dir) - - shutil.rmtree(cutout_dir) - # shutil.rmtree(cutout_dir, ignore_errors=True) - - logger.info("Removed Dir [ %s ]" % cutout_dir) - - logger.info("Deleting a Cutout Job [ %s ]" % cutoutjob_id) - - if cutoutjob is not None: - cutoutjob.delete() - - logger.info("Purge Done!") - - except Exception as e: - raise e - - -@task(name="notify_user_by_email") -def notify_user_by_email(cutoutjob_id): - logger = descutout.logger - - logger.info("Notify user about Cutout Job [ %s ]" % cutoutjob_id) - - cutoutjob = descutout.get_cutoutjobs_by_id(cutoutjob_id) + # Para cada job com status Before Download executa o metodo de + for job in CutOutJob.objects.filter(cjb_status="bd"): + descutout.download_by_id(job.pk) - user = cutoutjob.owner - logger.debug("User: %s" % user.username) - - try: - cutoutJobNotify.create_email_message(cutoutjob) +@task(name="purge_cutoutjob_dir") +def purge_cutoutjob_dir(cutoutjob_id): + """Remove um diretório de cutout job do armazenamento local. + esta task é disparada toda vez que um model CutouJob é deletado. usando signal. - except SMTPException as e: - logger.error(e) + Args: + cutoutjob_id (int): CutoutJob model primary key + """ + descutout.purge_cutoutjob_dir(cutoutjob_id) # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% Export Product Tasks %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%# @@ -221,7 +105,8 @@ def export_target_by_filter(product_id, filetypes, user_id, filter_id=None, cuto logger = export.logger - logger.info("Starting Export Task for the product %s" % product_id) + logger.info("---------------------------------------------") + logger.info("Starting Export Task for the product [%s]" % product_id) logger.debug("User: %s" % user_id) logger.debug("Product: %s" % product_id) logger.debug("Filetypes: %s" % ", ".join(filetypes)) @@ -230,7 +115,6 @@ def export_target_by_filter(product_id, filetypes, user_id, filter_id=None, cuto # Criar o Diretorio de export try: - # Recuperar o Model Product product = Product.objects.select_related().get(pk=int(product_id)) @@ -240,15 +124,10 @@ def export_target_by_filter(product_id, filetypes, user_id, filter_id=None, cuto user = User.objects.get(pk=int(user_id)) - # Chords Task http://docs.celeryproject.org/en/latest/userguide/canvas.html#chords - header = list() - try: - # Notificação de inicio export.notify_user_export_start(user, product) - # Criar o diretorio de export export_dir = export.create_export_dir(name=product.prd_name) @@ -281,7 +160,6 @@ def export_target_by_filter(product_id, filetypes, user_id, filter_id=None, cuto logger.info("Finished Task target_to_csv") - elif filetype == "fits": # Task To Fits logger.info("Starting Task target_to_fits") @@ -311,21 +189,18 @@ def export_target_by_filter(product_id, filetypes, user_id, filter_id=None, cuto logger.info("Finished Task target_to_fits") - # Cutouts if cutoutjob_id not in [None, "", False, "false", "False", 0]: export_cutoutjob(cutoutjob_id, export_dir) logger.debug("Teste: %s" % cutoutjob_id) - # Cria um arquivo zip com todos os arquivos gerados pelo export. url = export.create_zip(export_dir) # Notifica o Usuario sobre o Download. export.notify_user_export_success(user.pk, product.prd_display_name, url) - except Exception as e: logger.error(e) @@ -409,18 +284,9 @@ def export_cutoutjob(cutoutjob_id, export_dir): path = cutoutjob.cjb_cutouts_path - # Mantendo compatibilidade com Jobs anteriores ao path ser guardado - # TODO: Pode ser removido se todos os cutouts com o campo cjb_cutouts_path forem removidos - if path is None or path == "": - logger.warning( - "CutoutJob does not have the path field, the path will be generated using the result_file field.") - path = cutoutjob.cjb_results_file - path = os.path.dirname(path) - path = path.split(settings.DATA_DIR)[1] - export.product_cutouts( name=cutoutjob.cjb_display_name, - path_origin=path.strip("/"), + path_origin=path, path_destination=export_dir ) @@ -456,7 +322,6 @@ def product_save_as(user_id, product_id, name, filter_id=None, description=None) saveas.create_table_by_product_id(user_id, product_id, name, filter_id, description) - # %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% Product Import Target List CSV %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # @task(name="import_target_list") @shared_task @@ -464,5 +329,3 @@ def import_target_list(user_id, data): if data.get('mime') == 'csv': importtargetlistcsv.start_import(user_id, data) - - diff --git a/api/product/templates/cutout_notification_finish.html b/api/product/templates/cutout_notification_finish.html index 7a85a1773..af87b356a 100644 --- a/api/product/templates/cutout_notification_finish.html +++ b/api/product/templates/cutout_notification_finish.html @@ -1,194 +1,197 @@ + - - - - - - -
- - - - - - - - - + + + + +
-
- + + + + + + + + + + +
+ + + + + + + + + - - - - - - - - - - - - -
+
+ -

- LIneA Science Server

+

+ LIneA Science Server

- -
-
-
- - - - - - - + - + - + - - - - - - - - - - - -
- Dear {{username}}, + + +
- + - + - + - -
+ + + + + + + + + + + + + + + + + +
+ Dear {{username}}, +
+ + - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + +
-
- -
-
- Mosaic has Finished - -
- -
- {{ execution_time_humanized }} -
- - - - - - - - - - - - - - - - - - - - {% if files_size %} - - - - - {% endif %} - - - - - - - {% if cutoutjob_tag %} - - - - - {% endif %} - - - - - - - - - - - - - - - - - - -
Input Target Catalog:{{ target_display_name }}
Name:{{ cutoutjob_display_name }}
Number of objects:{{ n_objects }}
Files:{{ n_files }}
Size:{{ files_size }}
Image Formats:{{ cutoutjob_image_formats }}
Release TAG:{{ cutoutjob_tag }}
Cutout Size:{{ cutoutjob_xsize }} x {{ cutoutjob_ysize }} Arcsec
Start:{{ start }}
End:{{ finish }}
Execution Time:{{ execution_time }}
-
-   -
+
+ +
+
+ Mosaic has + Finished + +
+ +
+ {{ execution_time_humanized }} +
+ + + + + + + + + + + + + + + + + + + + {% if files_size %} + + + + + {% endif %} + + {% if cutoutjob_tag %} + + + + + {% endif %} + + + + + + + + + + + + + + + + + + +
Input Target Catalog:{{ target_display_name }}
Name:{{ cutoutjob_display_name }}
Number of objects:{{ n_objects }}
Files:{{ n_files }}
Size:{{ files_size }}
Release TAG:{{ cutoutjob_tag }}
Cutout Size:{{ cutoutjob_xsize }} x {{ cutoutjob_ysize }} Arcsec
Start:{{ start }}
End:{{ finish }}
Execution Time:{{ execution_time }}
+
+   + +
+
+
+ Powered by LIneA | Dark Energy Survey | NCSA +
- -
- Powered by LIneA | Dark Energy Survey | NCSA -
-
+ +
+
- + \ No newline at end of file diff --git a/api/product/views.py b/api/product/views.py index 44d1639b9..0df127a6a 100644 --- a/api/product/views.py +++ b/api/product/views.py @@ -13,7 +13,7 @@ from rest_framework import viewsets from rest_framework.authentication import SessionAuthentication, BasicAuthentication from rest_framework.authentication import TokenAuthentication -from rest_framework.decorators import list_route +from rest_framework.decorators import action from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response @@ -123,7 +123,7 @@ class CatalogViewSet(viewsets.ModelViewSet, mixins.UpdateModelMixin): ordering_fields = ('id', 'prd_name', 'prd_display_name', 'prd_class') - @list_route() + @action(detail=False) def get_class_tree_by_group(self, request): """ Este metodo retorna uma tree, com todos os produtos de um grupo. estes produtos esto @@ -330,7 +330,7 @@ class ProductContentViewSet(viewsets.ModelViewSet): ordering_fields = ('id', 'pcc_column_name',) - @list_route() + @action(detail=False) def get_display_content(self, request): pcn_product_id = request.query_params.get('pcn_product_id', None) @@ -494,7 +494,7 @@ class ProductAssociationViewSet(viewsets.ModelViewSet): ordering_fields = ('id',) - @list_route() + @action(detail=False) def get_ucds_by_product(self, request): product_id = request.query_params.get('product_id', None) @@ -663,7 +663,7 @@ class CutoutViewSet(viewsets.ModelViewSet): serializer_class = CutoutSerializer - filter_fields = ('id', 'cjb_cutout_job', 'ctt_object_id', 'ctt_filter',) + filter_fields = ('id', 'cjb_cutout_job', 'ctt_object_id', 'ctt_img_format', 'ctt_filter',) ordering_fields = ('id',) diff --git a/api/product_classifier/views.py b/api/product_classifier/views.py index 38540ebc3..db35d1f43 100644 --- a/api/product_classifier/views.py +++ b/api/product_classifier/views.py @@ -1,6 +1,6 @@ from rest_framework import viewsets from rest_framework.response import Response -from rest_framework.decorators import list_route +from rest_framework.decorators import action from rest_framework import filters from django.db.models import Q import django_filters @@ -8,6 +8,7 @@ from .serializers import ProductClassSerializer, ProductGroupSerializer, ProductClassContentSerializer from django_filters.rest_framework import DjangoFilterBackend + class ProductClassViewSet(viewsets.ModelViewSet): queryset = ProductClass.objects.all() @@ -31,7 +32,7 @@ class ProductGroupViewSet(viewsets.ModelViewSet): ordering_fields = ('pgr_name', 'pgr_display_name') - @list_route() + @action(detail=False) def get_group(self, request): queryset = ProductGroup.objects.select_related().filter(is_catalog=True) result = { diff --git a/api/requirements.txt b/api/requirements.txt index c1ab8c45f..8f1c61f74 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -1,40 +1,56 @@ -amqp==2.1.4 -astropy==2.0.1 -billiard==3.5.0.5 -cached-property==1.3.0 -celery==4.1.1 -coverage==4.5.2 -cx-Oracle==7.0.0 -defusedxml==0.5.0 -Django==2.1.5 -django-allauth==0.32.0 -django-auth-ldap==2.1.1 -django-celery-results==1.0.1 -django-cors-headers==2.4.0 -django-filter==2.1.0 +amqp==2.6.0 +astropy==4.1 +attrs==20.3.0 +billiard==3.6.3.0 +cached-property==1.5.2 +celery==4.4.7 +certifi==2020.11.8 +cffi==1.14.3 +chardet==3.0.4 +coverage==5.3 +cryptography==3.2.1 +cx-Oracle==8.0.1 +defusedxml==0.6.0 +Django==2.2.17 +django-allauth==0.43.0 +django-auth-ldap==2.2.0 +django-celery-results==1.2.1 +django-cors-headers==3.5.0 +django-filter==2.4.0 django-settings-export==1.2.1 -django-url-filter==0.3.12 -djangorestframework==3.9.1 -enum-compat==0.0.2 -humanize==0.5.1 -kombu==4.2.2.post1 -Markdown==2.6.5 -model-mommy==1.6.0 -numpy==1.19.2 +django-url-filter==0.3.15 +djangorestframework==3.12.2 +enum-compat==0.0.3 +humanize==3.1.0 +idna==2.10 +importlib-metadata==2.0.0 +iniconfig==1.1.1 +kombu==4.6.11 +Markdown==3.3.3 +model-mommy==2.0.0 +numpy==1.19.4 oauthlib==3.0.1 -pandas==1.1.3 -psycopg2-binary==2.8.4 -py==1.7.0 +packaging==20.4 +pandas==1.1.4 +pluggy==0.13.1 +psycopg2-binary==2.8.6 +py==1.9.0 pyasn1==0.4.8 pyasn1-modules==0.2.8 -pytest==3.1.3 +pycparser==2.20 +PyJWT==1.7.1 +pyparsing==2.4.7 +pytest==6.1.2 python-dateutil==2.8.1 python-ldap==3.2.0 python3-openid==3.1.0 pytz==2018.9 -requests==2.10.0 -requests-oauthlib==1.2.0 -six==1.10.0 -SQLAlchemy==1.1.15 -sqlparse==0.1.19 -vine==1.2.0 +requests==2.24.0 +requests-oauthlib==1.3.0 +six==1.15.0 +SQLAlchemy==1.3.20 +sqlparse==0.4.1 +toml==0.10.2 +urllib3==1.25.11 +vine==1.3.0 +zipp==3.4.0 diff --git a/api/validation/views.py b/api/validation/views.py index bce490761..81b1cfe15 100644 --- a/api/validation/views.py +++ b/api/validation/views.py @@ -10,7 +10,6 @@ from rest_framework import filters from rest_framework import viewsets from django_filters.rest_framework import DjangoFilterBackend -from rest_framework.decorators import list_route from rest_framework.response import Response logger = logging.getLogger(__name__) diff --git a/frontend/eyeballing/package.json b/frontend/eyeballing/package.json index dac5d8d6f..4f86cca54 100644 --- a/frontend/eyeballing/package.json +++ b/frontend/eyeballing/package.json @@ -3,23 +3,24 @@ "version": "0.1.0", "private": true, "dependencies": { - "@devexpress/dx-react-core": "^1.11.1", - "@devexpress/dx-react-grid": "^1.11.1", - "@devexpress/dx-react-grid-material-ui": "^1.11.1", - "@material-ui/core": "^4.1.0", - "@material-ui/icons": "^4.0.1", - "axios": "^0.19.0", + "@devexpress/dx-react-core": "^1.11.2", + "@devexpress/dx-react-grid": "^1.11.2", + "@devexpress/dx-react-grid-material-ui": "^1.11.2", + "@material-ui/core": "^4.11.2", + "@material-ui/icons": "^4.11.2", + "@material-ui/lab": "^4.0.0-alpha.56", + "axios": "^0.21.0", "dateformat": "^3.0.3", "dotenv": "^6.2.0", "lodash": "^4.17.11", "prop-types": "^15.7.2", - "react": "^16.8.3", - "react-dom": "^16.8.3", + "react": "^17.0.1", + "react-dom": "^17.0.1", + "react-infinite-scroll-component": "^5.1.0", "react-player": "^1.14.2", - "react-router-dom": "^5.0.1", - "react-virtuoso": "^0.12.1", + "react-router-dom": "^5.2.0", "react-window": "^1.8.2", - "typeface-roboto": "^0.0.54" + "typeface-roboto": "^1.1.13" }, "devDependencies": { "@babel/core": "^7.3.4", @@ -61,4 +62,4 @@ "not ie <= 11", "not op_mini all" ] -} \ No newline at end of file +} diff --git a/frontend/eyeballing/public/index.html b/frontend/eyeballing/public/index.html index b72b3a1d7..6361f2cd4 100755 --- a/frontend/eyeballing/public/index.html +++ b/frontend/eyeballing/public/index.html @@ -45,7 +45,7 @@ - Tile Inspection + Tile Viewer diff --git a/frontend/eyeballing/src/api/Api.js b/frontend/eyeballing/src/api/Api.js index 02cd8ee7b..3e99fafe9 100644 --- a/frontend/eyeballing/src/api/Api.js +++ b/frontend/eyeballing/src/api/Api.js @@ -44,13 +44,14 @@ axios.interceptors.response.use( }, ); + class DriApi { loggedUser = async () => { const res = await axios.get('/logged/get_logged/'); const user = await res.data; window.gtag('config', 'GA_MEASUREMENT_ID', { - 'user_id': user.id, + user_id: user.id, }); return user; @@ -159,6 +160,18 @@ class DriApi { }).then(res => res.data); getTutorial = () => axios.get('/tutorial/', { params: { app_name: 'tile_inspection' } }).then(res => res.data); + + getDatasetInfo = id => axios.get(`/dataset/${id}/desaccess_tile_info/`).then(res => res.data) + + getTokenizedDatasetUrl = url => axios.post('/dataset/desaccess_get_download_url/', { file_url: url }).then(res => res.data.download_url) + + getTileInfo = id => axios.get(`/tiles/${id}/desaccess_tile_info/`).then(res => res.data) + + getTokenizedTileUrl = url => axios.post('/tiles/desaccess_get_download_url/', { file_url: url }).then(res => res.data.download_url) + + getTileByName = name => axios.get('/tiles/', { params: { search: name } }).then(res => res.data) + + getTileInspectionOption = () => axios.get('/get_setting/', { params: { name: 'TILE_VIEWER_INSPECTION_ENABLED' } }).then(res => res.data) } export default DriApi; diff --git a/frontend/eyeballing/src/components/Counter.js b/frontend/eyeballing/src/components/Counter.js index e4128aeeb..293bc8a05 100644 --- a/frontend/eyeballing/src/components/Counter.js +++ b/frontend/eyeballing/src/components/Counter.js @@ -45,59 +45,65 @@ export default function Counter(props) { {props.counts.tiles ? props.counts.tiles : 0} - - Good: - - - {props.counts.true ? props.counts.true : 0} - + {props.hasInspection ? ( + <> + + Good: + + + {props.counts.true ? props.counts.true : 0} + - - Bad: - - - {props.counts.false ? props.counts.false : 0} - + + Bad: + + + {props.counts.false ? props.counts.false : 0} + - - Not: - - - {props.counts.null ? props.counts.null : 0} - + + Not: + + + {props.counts.null ? props.counts.null : 0} + + + + ) : null} ); } Counter.propTypes = { counts: PropTypes.object.isRequired, + hasInspection: PropTypes.bool.isRequired, }; diff --git a/frontend/eyeballing/src/components/SearchField.js b/frontend/eyeballing/src/components/SearchField.js index 5e7feb6f5..3b6dbd67a 100644 --- a/frontend/eyeballing/src/components/SearchField.js +++ b/frontend/eyeballing/src/components/SearchField.js @@ -1,10 +1,14 @@ import React from 'react'; import PropTypes from 'prop-types'; -import { withStyles, fade } from '@material-ui/core/styles'; +import { makeStyles, fade } from '@material-ui/core/styles'; import InputBase from '@material-ui/core/InputBase'; +import IconButton from '@material-ui/core/IconButton'; import SearchIcon from '@material-ui/icons/Search'; +import CloseIcon from '@material-ui/icons/Close'; +import CircularProgress from '@material-ui/core/CircularProgress'; -const styles = theme => ({ + +const useStyles = makeStyles(theme => ({ search: { position: 'relative', borderRadius: theme.shape.borderRadius, @@ -19,47 +23,61 @@ const styles = theme => ({ }, inputRoot: { color: 'inherit', - float: 'left', + width: '100%', + }, + inputInput: { + padding: theme.spacing(1, 1, 1, 0), + // vertical padding + font size from searchIcon + paddingLeft: `calc(1em + ${theme.spacing(2)}px)`, + transition: theme.transitions.create('width'), width: '100%', }, searchIcon: { - width: theme.spacing(5), + padding: 0, height: '100%', position: 'absolute', pointerEvents: 'none', display: 'flex', alignItems: 'center', justifyContent: 'center', - right: 0, }, - - inputInput: { - padding: theme.spacing(1, 1, 1, 0), - transition: theme.transitions.create('width'), - width: '100%', - [theme.breakpoints.up('md')]: { - width: 200, + clearIcon: { + padding: '0 4px', + height: '100%', + position: 'absolute', + display: 'flex', + alignItems: 'center', + justifyContent: 'center', + right: 0, + top: 0, + zIndex: 1, + cursor: 'pointer', + '&:hover': { + backgroundColor: 'transparent', }, }, -}); +})); function SearchField(props) { - const { classes } = props; + const classes = useStyles(); + const { + searchRef, handleInputSearch, disabled, + } = props; - - function handleInputSearch(event) { - const value = event.target.value.toUpperCase(); - props.handleInputSearch(value); - } + const handleClearSearch = () => { + searchRef.current.value = ''; + handleInputSearch(); + }; return (
- + {disabled ? : }
+ {searchRef.current && searchRef.current.value.length > 0 && ( + + + + + )}
); } SearchField.propTypes = { - inputSearchValue: PropTypes.string.isRequired, handleInputSearch: PropTypes.func.isRequired, + searchRef: PropTypes.shape({ + current: PropTypes.oneOfType([ + PropTypes.instanceOf(Element), + PropTypes.string, + ]), + }).isRequired, + disabled: PropTypes.bool.isRequired, }; -export default withStyles(styles)(SearchField); +export default SearchField; diff --git a/frontend/eyeballing/src/components/TileTable.js b/frontend/eyeballing/src/components/TileTable.js index e2a7a4f39..d8b376b00 100644 --- a/frontend/eyeballing/src/components/TileTable.js +++ b/frontend/eyeballing/src/components/TileTable.js @@ -107,7 +107,7 @@ function TileTable({ backLink, currentRelease }) { const columns = [ { name: 'dts_dataset__tile__tli_tilename', title: 'Tile', getCellValue: row => row.tilename }, - { name: 'dts_dataset__inspected__isp_value', title: 'Status', getCellValue: row => row.isp_value }, + { name: 'dts_dataset__inspected__isp_value', title: 'Status', getCellValue: row => renderInspectionValue(row.isp_value) }, { name: 'owner__username', title: 'Owner', getCellValue: row => row.owner }, { name: 'dts_date', title: 'Date', getCellValue: row => row.dts_date }, { name: 'dts_comment', title: 'Comments', getCellValue: row => row.dts_comment }, @@ -121,10 +121,10 @@ function TileTable({ backLink, currentRelease }) { { columnName: 'dts_comment', width: 'auto' }, ]; - function renderInspectionValue(rowData) { - if (rowData.isp_value !== null) { + function renderInspectionValue(status) { + if (status !== null) { return ( - rowData.isp_value === true ? ( + status === true ? ( ) : ( @@ -141,26 +141,25 @@ function TileTable({ backLink, currentRelease }) { setRows([]); } - async function loadData() { - const comments = await api.comments({ + function loadData() { + api.comments({ release: currentRelease, sorting, search, dts_type: filterComment, offset: currentPage === 0 ? 0 : currentPage * 9, limit: 10, - }); - - if (comments.results && comments.results.length > 0) { - setRows(comments.results.map(row => ({ - ...row, - isp_value: renderInspectionValue(row), - }))); - setTotalCount(comments.count); + }).then((comments) => { + if (comments.results && comments.results.length > 0) { + setRows(comments.results); + setTotalCount(comments.count); + setLoading(false); + } else { + clearData(); + } + }).finally(() => { setLoading(false); - } else { - clearData(); - } + }); } async function loadDownloadData() { diff --git a/frontend/eyeballing/src/components/download/index.js b/frontend/eyeballing/src/components/download/index.js new file mode 100644 index 000000000..706e77ec5 --- /dev/null +++ b/frontend/eyeballing/src/components/download/index.js @@ -0,0 +1,135 @@ +import React, { useState } from 'react'; +import PropTypes from 'prop-types'; +import { + Dialog, + DialogTitle, + DialogContent, + IconButton, + Grid, + Table, + TableHead, + TableBody, + TableCell, + TableRow, + CircularProgress, + Typography, +} from '@material-ui/core'; +import { + Close as CloseIcon, + GetApp as DownloadIcon, +} from '@material-ui/icons'; +import useStyles from './styles'; +import DriApi from '../../api/Api'; + +function DownloadDialog({ + open, + handleClose, + tilename, + images, + catalogs, + error, +}) { + const classes = useStyles(); + const [isAuthenticating, setIsAuthenticating] = useState(''); + + const api = new DriApi(); + + const handleItemClick = (url) => { + setIsAuthenticating(url); + api.getTokenizedDatasetUrl(url) + .then((res) => { + window.open(res, '_blank'); + setIsAuthenticating(''); + }); + }; + + return ( + + {`Download - ${tilename}`} + + + + + + + {!error ? ( + + + + Filename + Band + Image + Catalog + + + + {Object.keys(images).map(key => ( + + + {`${tilename}_${key}.fits.gz`} + + + {key === 'y' ? key.toUpperCase() : key} + + + handleItemClick(images[key])}> + {isAuthenticating === images[key] + ? + : } + + + + handleItemClick(catalogs[key])}> + {isAuthenticating === catalogs[key] + ? + : } + + + + ))} + +
+ ) : ( + Oops! No download was found for this tile. + )} +
+
+
+
+ ); +} + +DownloadDialog.propTypes = { + open: PropTypes.bool.isRequired, + handleClose: PropTypes.func.isRequired, + tilename: PropTypes.string.isRequired, + images: PropTypes.shape({ + g: PropTypes.string, + r: PropTypes.string, + i: PropTypes.string, + z: PropTypes.string, + y: PropTypes.string, + }), + catalogs: PropTypes.shape({ + g: PropTypes.string, + r: PropTypes.string, + i: PropTypes.string, + z: PropTypes.string, + y: PropTypes.string, + }), + error: PropTypes.bool, +}; + +DownloadDialog.defaultProps = { + error: false, + images: null, + catalogs: null, +}; + +export default DownloadDialog; diff --git a/frontend/eyeballing/src/components/download/styles.js b/frontend/eyeballing/src/components/download/styles.js new file mode 100644 index 000000000..cd97ec15c --- /dev/null +++ b/frontend/eyeballing/src/components/download/styles.js @@ -0,0 +1,34 @@ +import { makeStyles } from '@material-ui/core/styles'; + +const useStyles = makeStyles(theme => ({ + closeButton: { + position: 'absolute', + right: theme.spacing(1), + top: 4, + color: theme.palette.grey[500], + }, + closeIcon: { + fontSize: '1rem', + }, + zIndex: { + zIndex: '2001 !important', // Because the z-index of the .leaflet-top.leaflet.left is 2000. + }, + dialogTitle: { + borderBottom: '1px solid rgb(227, 230, 240)', + backgroundColor: 'rgb(248, 249, 252)', + color: '#34465d', + padding: `${theme.spacing(1)}px ${theme.spacing(2)}px`, + marginBottom: theme.spacing(1), + }, + dialogContent: { + padding: theme.spacing(2), + }, + checkboxGroup: { + marginTop: theme.spacing(2), + }, + cardContent: { + paddingBottom: `${theme.spacing(2)}px !important`, // To override the .MuiCardContent-root:last-child. + }, +})); + +export default useStyles; diff --git a/frontend/eyeballing/src/components/visiomatic/Visiomatic.js b/frontend/eyeballing/src/components/visiomatic/Visiomatic.js index 84e7e3d45..ff88bdd66 100644 --- a/frontend/eyeballing/src/components/visiomatic/Visiomatic.js +++ b/frontend/eyeballing/src/components/visiomatic/Visiomatic.js @@ -65,6 +65,7 @@ class VisiomaticPanel extends Component { currentDataset: PropTypes.number, points: PropTypes.array, reloadData: PropTypes.func, + hasInspection: PropTypes.bool.isRequired, }; constructor(props) { @@ -236,8 +237,11 @@ class VisiomaticPanel extends Component { map.on('layeradd', this.onLayerAdd, this); map.on('layerremove', this.onLayerRemove, this); - map.on('contextmenu', this.onContextMenuOpen, this); - map.on('overlaycatalog', this.overlayCatalog, this); + + if (this.props.hasInspection) { + map.on('contextmenu', this.onContextMenuOpen, this); + map.on('overlaycatalog', this.overlayCatalog, this); + } this.map = map; // this.changeImage(); } @@ -262,12 +266,16 @@ class VisiomaticPanel extends Component { this.changeImage(); } - if (prevProps.points !== this.props.points) { + if (this.props.hasInspection && prevProps.points !== this.props.points) { this.overlayCatalog(); if (prevProps.points.length > 0 && this.props.points.length > 0) { this.setView(); } } + + if (this.props.hasInspection && prevProps.contrast !== this.props.contrast) { + this.changeImage(); + } } @@ -317,7 +325,7 @@ class VisiomaticPanel extends Component { // TODO: Deve ser removido solucao temporaria url = url.replace('http://', 'https://'); - const colorRanges = this.getColorRanges(); + const currentColorRanges = this.getColorRanges(); this.layer = this.libL.tileLayer .iip(url, { @@ -333,7 +341,7 @@ class VisiomaticPanel extends Component { colorSat: 2.0, quality: 100, channelLabelMatch: '[ugrizY]', - minMaxValues: colorRanges.minMaxValues, + minMaxValues: currentColorRanges.minMaxValues, // minMaxValues: [ // // g // [-0.390453905, 1000], @@ -375,7 +383,7 @@ class VisiomaticPanel extends Component { m = 0; } const str = `${(h < 10 ? '0' : '') + h.toString()}:${m < 10 ? '0' : ''}${m.toString() - }:${sf < 10.0 ? '0' : ''}${sf.toFixed(3)}`; + }:${sf < 10.0 ? '0' : ''}${sf.toFixed(3)}`; const lat = Math.abs(latlng.lat); @@ -399,6 +407,8 @@ class VisiomaticPanel extends Component { } render() { + const { hasInspection } = this.props; + // Ajuste no Tamanho do container return ( <> @@ -411,16 +421,18 @@ class VisiomaticPanel extends Component { // height: '100%', }} /> - + {hasInspection ? ( + + ) : null} ); } diff --git a/frontend/eyeballing/src/home.js b/frontend/eyeballing/src/home.js index b125ec69e..96a57ce82 100644 --- a/frontend/eyeballing/src/home.js +++ b/frontend/eyeballing/src/home.js @@ -1,5 +1,5 @@ import React, { - useState, useEffect, useRef, useCallback, + useState, useEffect, useRef, } from 'react'; import { makeStyles } from '@material-ui/core/styles'; import { Grid, Link as MaterialLink } from '@material-ui/core'; @@ -18,7 +18,6 @@ import { import ArrowBack from '@material-ui/icons/ArrowBack'; import Typography from '@material-ui/core/Typography'; import Tooltip from '@material-ui/core/Tooltip'; -import { Virtuoso } from 'react-virtuoso'; import ListItem from '@material-ui/core/ListItem'; import ListItemText from '@material-ui/core/ListItemText'; import ListItemSecondaryAction from '@material-ui/core/ListItemSecondaryAction'; @@ -26,6 +25,10 @@ import ThumbUpIcon from '@material-ui/icons/ThumbUp'; import ThumbDownIcon from '@material-ui/icons/ThumbDown'; import Comment from '@material-ui/icons/Comment'; import Divider from '@material-ui/core/Divider'; +import Download from '@material-ui/icons/GetApp'; +import Backdrop from '@material-ui/core/Backdrop'; +import CircularProgress from '@material-ui/core/CircularProgress'; +import InfiniteScroll from 'react-infinite-scroll-component'; import TileTable from './components/TileTable'; import SnackBar from './components/SnackBar'; import ChooseFilterDialog from './components/ChooseFilterDialog'; @@ -36,6 +39,7 @@ import SearchField from './components/SearchField'; import VisiomaticPanel from './components/visiomatic/Visiomatic'; import Footer from './components/Footer'; import Header from './components/Header'; +import DownloadDialog from './components/download'; import DriApi from './api/Api'; @@ -86,11 +90,6 @@ const useStyles = makeStyles(theme => ({ backLinkIcon: { borderRadius: 0, }, - // rootDatasetList: { - // width: '100%', - // backgroundColor: theme.palette.background.paper, - // listStyleType: 'none', - // }, okButton: { color: theme.typography.successColor, }, @@ -109,6 +108,31 @@ const useStyles = makeStyles(theme => ({ cardActionCounter: { padding: '15px 8px 8px 8px', }, + backdrop: { + zIndex: 2001, // Because the z-index of the .leaflet-top.leaflet.left is 2000. + color: '#fff', + }, + tileListContainer: { + [theme.breakpoints.between('sm', 'lg')]: { + width: 382, + }, + [theme.breakpoints.down('sm')]: { + width: '100%', + }, + }, + visiomaticContainer: { + [theme.breakpoints.between('sm', 'lg')]: { + width: 'calc(100% - 382px)', + }, + [theme.breakpoints.down('sm')]: { + width: '100%', + }, + }, + tileButton: { + [theme.breakpoints.down('xl')]: { + padding: theme.spacing(1), + }, + }, })); function Home() { @@ -117,7 +141,8 @@ function Home() { const [currentRelease, setCurrentRelease] = useState(''); const [datasets, setDatasets] = useState([]); const [currentDataset, setCurrentDataset] = useState({}); - const [loading, setLoading] = useState(true); + const [loadingAllTiles, setLoadingAllTiles] = useState(true); + const [loadingList, setLoadingList] = useState(true); const [showComment, setShowComment] = useState(false); const [comments, setComments] = useState([]); const [menuContrastOpen, setMenuContrastOpen] = useState(false); @@ -129,46 +154,86 @@ function Home() { }); const [showFilterDialog, setShowFilterDialog] = useState(false); const [filterInspect, setFilterInspect] = useState(''); - const [inputSearchValue, setInputSearchValue] = useState(''); const [openSnackBar, setOpenSnackBar] = useState(false); const [totalCount, setTotalCount] = useState(0); const [commentsWithFeature, setCommentsWithFeature] = useState([]); const datasetLoading = useRef(false); const [tutorial, setTutorial] = useState([]); + const [downloadInfo, setDownloadInfo] = useState({ visible: false }); + const [backdropOpen, setBackdropOpen] = useState(false); + const [hasInspection, setHasInspection] = useState(false); + const [allTiles, setAllTiles] = useState([]); + const [searchEnabled, setSearchEnabled] = useState(false); + const [visiomaticCenter, setVisiomaticCenter] = useState([]); + const [fov, setFov] = useState(2); + const searchRef = useRef(''); const api = new DriApi(); const classes = useStyles(); - const onChangeRelease = (value) => { - setLoading(true); - setCurrentRelease(value); - setDatasets([]); - setCurrentDataset({}); - }; - useEffect(() => { + api.getTileInspectionOption().then(res => setHasInspection(res.TILE_VIEWER_INSPECTION_ENABLED)); api.loggedUser().then(res => setUsername(res.username)); api.allReleases().then((res) => { setReleases(res); - setCurrentRelease(res.length > 0 ? res[0].id : ''); + + // Getting first item of all available releases: + let release = res.length > 0 ? res[0].id : ''; + + // Filter by releases with the default flag equal to true + const releaseDefault = res.filter(row => row.rls_default); + + // If there's any release with the default flag on, + // set the first item as the current release. + if (releaseDefault.length > 0) { + release = releaseDefault[0].id; + } + + setCurrentRelease(release); }); api.getTutorial().then(res => setTutorial(res)); }, []); - const loadMoreDatasets = useCallback((e) => { + useEffect(() => { + if (loadingAllTiles === true && currentRelease !== '') { + api.datasetsByRelease({ release: currentRelease }).then((res) => { + if (hasInspection) { + // Totais de Tiles boas, ruim e não inspecionadas + const goodTiles = countBy(res, el => el.isp_value); + goodTiles.tiles = res.length; + setCounts(goodTiles); + } else { + setCounts({ tiles: res.length }); + } + if (allTiles.length === 0) { + setAllTiles(res); + } + setLoadingAllTiles(false); + }); + } + }, [hasInspection, currentRelease, filterInspect, loadingAllTiles]); + + const loadMoreDatasets = () => { + if (searchRef.current && searchRef.current.value.split(',').length > 1) { + return; + } + + const offset = datasets.length; + + const filters = [{ property: 'inspected', value: filterInspect, }]; - if (datasetLoading.current) { - return; - } - datasetLoading.current = true; api.datasetsByRelease({ - release: currentRelease, filters, search: inputSearchValue, offset: e || 0, limit: 20, + release: currentRelease, + filters, + search: searchRef.current && searchRef.current.value, + offset, + limit: 20, }) .then((data) => { const datasetConcat = datasets.concat(data.results); @@ -183,40 +248,50 @@ function Home() { if (data.count > 20) { datasetLoading.current = false; } + setLoadingList(false); }); - }, [datasets, currentRelease]); + }; useEffect(() => { - if (loading === true && currentRelease !== '') { - api.datasetsByRelease({ release: currentRelease }).then((res) => { - // Totais de Tiles boas, ruim e não inspecionadas - const goodTiles = countBy(res, el => el.isp_value); - goodTiles.tiles = res.length; - setCounts(goodTiles); - setLoading(false); - }); - datasetLoading.current = false; - loadMoreDatasets(0); + if (loadingList && currentRelease !== '') { + loadMoreDatasets(); } - }, [currentRelease, filterInspect, loading]); + }, [currentRelease, loadingList]); - useEffect(() => { - if (loading === true && currentRelease !== '') loadMoreDatasets(0); - }, [totalCount]); - - const loadData = () => { - if (currentRelease !== '') { - setDatasets([]); - // setCurrentDataset({}); - setCounts({}); - setTotalCount(0); - setLoading(true); - datasetLoading.current = false; - } + const reloadList = () => { + setDatasets([]); + setTotalCount(0); + datasetLoading.current = false; + setLoadingList(true); + }; + + const reloadAllTiles = () => { + setLoadingAllTiles(true); + setCounts({}); }; const onSelectDataset = dataset => setCurrentDataset(dataset); + useEffect(() => { + if (Object.keys(currentDataset).length > 0) { + const searchSplit = searchRef.current.value.split(','); + + if (searchSplit.length === 2) { + setVisiomaticCenter([ + searchSplit[0], + searchSplit[1], + ]); + setFov(0.3); + } else { + setVisiomaticCenter([ + currentDataset.tli_ra, + currentDataset.tli_dec, + ]); + setFov(2); + } + } + }, [currentDataset]); + const handleClickSnackBar = () => setOpenSnackBar(!openSnackBar); const handleComment = (dataset) => { @@ -237,22 +312,24 @@ function Home() { }; useEffect(() => { - getDatasetCommentsByType(); - }, [currentDataset]); + if (hasInspection) { + getDatasetCommentsByType(); + } + }, [currentDataset, hasInspection]); const onComment = (dataset, comment) => { if (comment.id !== null) { // update api.updateComment(comment.id, comment.inputValue, null, null).then(() => { - loadData(); handleComment(dataset); + reloadList(); }); } else { const dts_type = comment.dts_type || '0'; api.createDatasetComment(dataset.id, comment.inputValue, dts_type, null, null).then(() => { - loadData(); if (showComment === true) { handleComment(dataset); + reloadList(); } }); } @@ -300,22 +377,20 @@ function Home() { if (dataset.inspected !== null) { if (valueRef !== null) { api.updateInspectValue(dataset.inspected, valueRef).then(() => { - setLoading(true); handleClickSnackBar(); }); } else { api.deleteInspect(dataset.inspected).then(() => { - setLoading(true); handleClickSnackBar(); }); } } else { api.createinspect(dataset.id, valueRef).then(() => { - setLoading(true); handleClickSnackBar(); }); } - loadData(); + reloadList(); + reloadAllTiles(); }; const handleMenuContrastOpen = () => setMenuContrastOpen(true); @@ -328,79 +403,173 @@ function Home() { const handleMenuFilterOpen = () => setShowFilterDialog(true); const handleMenuFilterClose = (value) => { - setFilterInspect(value); - setShowFilterDialog(false); - setTotalCount(0); - loadData(); + if (value !== filterInspect) { + setFilterInspect(value); + setShowFilterDialog(false); + setTotalCount(0); + reloadList(); + } }; + const filterByRaDec = (ra, dec) => { + /** + * e necessario converter os cantos da tile em ra para -180 e 180 + * para que as tiles que ficam perto do 0 nao deem erro. + * + */ - const handleInputSearch = (value) => { - setTotalCount(0); - setInputSearchValue(value); + const result = []; + + allTiles.forEach((tile) => { + if (ra > 180) { + ra -= 360; + } + + let urall = tile.tli_urall; + let uraur = tile.tli_uraur; + + if (urall > 180) { + urall -= 360; + } + + if (uraur > 180) { + uraur -= 360; + } + + // tli_urall < ra + // AND tli_udecll < dec + // AND tli_uraur > ra + // AND tli_udecur > dec + if (urall < ra && tile.tli_udecll < dec && uraur > ra && tile.tli_udecur > dec) { + result.push(tile); + return false; + } + }); + + return result; + }; + + const handleInputSearch = () => { + const searchSplit = searchRef.current.value.split(','); + + if (searchSplit.length === 2) { + const datasetByPosition = filterByRaDec( + parseFloat(searchSplit[0]), + parseFloat(searchSplit[1]), + ); + + if (datasetByPosition.length > 0) { + datasetLoading.current = true; + setDatasets(datasetByPosition); + setTotalCount(datasetByPosition.length); + datasetLoading.current = false; + } + } else { + reloadList(); + } }; const handleDelete = commentId => api.deleteComment(commentId).then(() => { handleComment(currentDataset); - loadData(); + reloadList(); }); - useEffect(() => { - loadData(); - }, [inputSearchValue]); - - - const Row = (i) => { - if (datasets.length > 0 && datasets[i]) { - return ( - { - onSelectDataset(datasets[i]); - }} - divider - selected={datasets[i].id === currentDataset.id} - > - 0 ? classes.datasetWithComment : null} - onClick={(e) => { - e.stopPropagation(); - e.preventDefault(); - handleComment(datasets[i]); - }} - > - {`${datasets[i].comments} comments`} - - )} - /> + const handleDownloadClick = (dataset) => { + setBackdropOpen(true); + api.getDatasetInfo(dataset.id) + .then((res) => { + setDownloadInfo({ + visible: true, + tilename: dataset.tli_tilename, + images: res.images, + catalogs: res.catalogs, + }); + + setBackdropOpen(false); + }).catch(() => { + setDownloadInfo({ + visible: true, + tilename: dataset.tli_tilename, + error: true, + }); + setBackdropOpen(false); + }); + }; - - qualifyDataset(datasets[i], 'ok')}> - {datasets[i].isp_value ? ( + const onChangeRelease = (value) => { + setLoadingAllTiles(true); + setCurrentRelease(value); + reloadList(); + reloadAllTiles(); + }; + + useEffect(() => { + if (allTiles.length > 0) { + setSearchEnabled(true); + } else { + setSearchEnabled(false); + } + }, [allTiles]); + + const Rows = () => datasets.map(dataset => ( + { + onSelectDataset(dataset); + }} + divider + selected={dataset.id === currentDataset.id} + > + 0 ? classes.datasetWithComment : null} + onClick={(e) => { + e.stopPropagation(); + e.preventDefault(); + handleComment(dataset); + }} + > + {`${dataset.comments} comments`} + + ) : null} + /> + + {hasInspection ? ( + <> + qualifyDataset(dataset, 'ok')}> + {dataset.isp_value ? ( ) : ( )} - qualifyDataset(datasets[i], 'notok')}> - {datasets[i].isp_value === false ? ( + qualifyDataset(dataset, 'notok')}> + {dataset.isp_value === false ? ( ) : ( )} - handleComment(datasets[i])}> + handleComment(dataset)}> - - - ); - } + + ) : null} + handleDownloadClick(dataset)}> + + + + + )); + + const handleDownloadClose = () => { + setDownloadInfo({ + visible: false, + }); }; const header = 64; @@ -412,7 +581,7 @@ function Home() { return (
- + - +
- - - - - - - - - - - - - {}} className={classes.menuButton}> - - - - + {hasInspection ? ( + <> + + + + + + + + + + + + + + + + + + + ) : null} - loadMoreDatasets(e)} - footer={() => ( -
- Loading... -
- )} - /> + > + + Loading... : null} + scrollableTarget="datasetList" + > + {Rows()} + +
- {loading ? ( - - ) : ( - - - - )} - + <> + {loadingAllTiles ? ( + + ) : ( + + + + )} +
- + - + {currentRelease !== '' ? ( + + ) : null} - setShowComment(false)} - handleSubmit={onComment} - handleDelete={handleDelete} - /> + {hasInspection ? ( + setShowComment(false)} + handleSubmit={onComment} + handleDelete={handleDelete} + /> + ) : null} - + {downloadInfo.visible && ( + + )} + + + + {hasInspection ? : null} )} /> - ( - - - - - Back - - - + {hasInspection ? ( + ( + + + + + Back + + + + )} + /> )} - /> - )} - /> + /> + ) : null}