Skip to content

Commit

Permalink
Feature/integrate postgres (#1230)
Browse files Browse the repository at this point in the history
* Added psycopg2 library for integration with postgresql. Added a dump with an example of a catalog to be used as a test. DBBase class changes started. #1225

* connection to postgresql established using SqlAlchemy #1225

* It is now possible to make a query in the catalog database using Postgresql but it is not yet using q3c. initial product class data has also been updated. #1225

* Methods were created to read the attributes of the tables, such as number of rows, columns and size in bytes. exclusive userquery methods for creating tables were created. Added log for userquery #1225

* Improvements in installation documentation for development. #1225

* Minor fix in instalation.md #1225

* The error in importing targets in csv format was fixed, the error was caused by the use of Schemas in Postgresql. an error in the Targets query was also fixed. Postgresql does not accept comparison between columns of different types, in this case the object_id columns, to solve the problem, Cast was made in the column type at the time of joining with other tables. #1225

* A prototype was created to overlay the xray contours.

* The Userquery tool is now integrated with Postgresql.

* Explorer fix for coadd objects.

* Fixed an error in the number of lines inserted in a target upload.

* Fixed error in the number of catalog columns in the list of Targets.

* Foi feita a modicação que permite o target viewer importar tabelas de outros databases, antes era fixo ao database dessci.

* The .env configuration file was removed now all confs are in local_vars.py

* Added sample sql for coadd and galaxy cluster tables

* Changes in Jenkins pipeline for push a new image with latest tag.
  • Loading branch information
glaubervila authored Apr 15, 2020
1 parent 023a233 commit cdb3501
Show file tree
Hide file tree
Showing 48 changed files with 32,404 additions and 26,128 deletions.
5 changes: 5 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
docker-compose.yml

# Diretorios de instalacao
/api/env
/env
/data
/db
Expand Down Expand Up @@ -37,3 +38,7 @@ __pycache__
/api/db
*.coverage
.env
# Diretórios do Postgresql
/pg_data
/pg_backup

File renamed without changes.
29 changes: 29 additions & 0 deletions Docs/exemplo_importacao_coadd_object.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
{
"products": [{
"display_name": "Y3A2 Coadd",
"fields": [],
"releases": ["y3a2_coadd"],
"table": "coadd_objects",
"schema": "y3a2_coadd",
"database": "catalog",
"type": "catalog",
"class": "coadd_objects",
"name": "y3a2_coadd",
"association": [{
"ucd": "meta.id;meta.main",
"property": "coadd_object_id"
},
{
"ucd": "pos.eq.ra;meta.main",
"property": "ra"
},
{
"ucd": "pos.eq.dec;meta.main",
"property": "dec"
}
]
}],
"ticket":"UKW628",
"register_username": "gverde"
}

109 changes: 109 additions & 0 deletions Docs/exemplo_importacao_galaxy_cluster.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
{
"process": {
"owner_username": "gverde",
"process_name": "wazp",
"process_id": "5239",
"process_start_date": "2019-06-11T04:59:19Z",
"process_end_date": "2019-06-11T07:08:54Z",
"process_description": "Sample Catalog for Galaxy Clusters",
"process_comment": "Sample Catalog for Galaxy Clusters",
"releases": ["y1_wide_survey"],
"products": [{
"display_name": "Galaxy Clusters Sample",
"releases": ["y1_wide_survey"],
"fields": ["y1a1_coadd_spt"],
"table": "galaxy_clusters_sample",
"schema": "dri_catalog",
"database": "catalog",
"type": "catalog",
"class": "galaxy_clusters",
"name": "galaxy_clusters_sample",
"association": [{
"ucd": "meta.id;meta.main",
"property": "id"
},
{
"ucd": "pos.eq.ra;meta.main",
"property": "ra"
},
{
"ucd": "pos.eq.dec;meta.main",
"property": "dec"
},
{
"ucd": "src.redshift.phot",
"property": "zp"
},
{
"ucd": "stat.snr",
"property": "snr"
},
{
"ucd": "phys.size.radius",
"property": "radius_mpc"
},
{
"ucd": "phys.angSize;src",
"property": "radius_amin"
},
{
"ucd": "src.class.richness",
"property": "ngals"
}
]
},{
"display_name": "Clusters Members Sample",
"releases": ["y1_wide_survey"],
"fields": ["y1a1_coadd_spt"],
"table": "cluster_members_sample",
"schema": "dri_catalog",
"database": "catalog",
"type": "catalog",
"class": "cluster_members",
"name": "cluster_members_sample",
"association": [{
"ucd": "meta.id;meta.main",
"property": "seqnr"
},
{
"ucd": "pos.eq.ra;meta.main",
"property": "ra"
},
{
"ucd": "pos.eq.dec;meta.main",
"property": "dec"
},
{
"ucd": "src.redshift.phot",
"property": "zp"
},
{
"ucd": "phot.mag;meta.main;em.opt.g",
"property": "mag_g"
},
{
"ucd": "phot.mag;meta.main;em.opt.r",
"property": "mag_r"
},
{
"ucd": "phot.mag;meta.main;em.opt.i",
"property": "mag_i"
},
{
"ucd": "phot.mag;meta.main;em.opt.z",
"property": "mag_z"
},
{
"ucd": "phot.mag;meta.main;em.opt.Y",
"property": "mag_y"
},
{
"ucd": "meta.id.cross",
"property": "id_cluster"
}
]
}]
},
"ticket":"UKW628",
"register_username": "gverde"
}
98 changes: 87 additions & 11 deletions Instalation.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ git clone https://github.com/linea-it/dri.git dri

```
cd dri
cp env_template .env
cp docker-compose-development.yml docker-compose.yml
docker-compose build
```
Expand Down Expand Up @@ -43,39 +42,110 @@ dri/settings/
in this file the variable debug should always be False and it is necessary to add the host allowed in ALLOWED_HOSTS
parameter and allowed hosts CORS in variable CORS_ORIGIN_WHITELIST.

Database settings must be made individually on each of these files,
if you are a developer change only the development case file do not want to use sqlite as the default database.
## Setting Database
### Postgresql
Considering a new installation in a development environment with the postgresql + q3c database.

### Setting Database Params
Database settings must be made only in local_vars.py,

This step is needed only to use oracle database. The default sqlite is pre-configured in the repository files
Whereas the database used is postgresql + q3c and a development environment. the configuration of local_vars.py in the databases attribute is as follows.
```
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'postgres',
'USER': 'postgres',
'PASSWORD': 'postgres',
'HOST': 'database',
'PORT': 5432,
'OPTIONS': {
'options': '-c search_path=dri_admin,public'
}
},
'catalog': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'postgres',
'USER': 'postgres',
'PASSWORD': 'postgres',
'HOST': 'database',
'PORT': 5432,
'OPTIONS': {
'options': '-c search_path=dri_catalog,public'
},
},
}
```

Starting the database container alone, the first time will create the pg_data and pg_backups directory and create the user based on the POSTGRES_DB and POSTGRES_PASSWORD environment variables both with default value 'postgres' the user created is also 'postgres'

### Run All Services
```
docker-compose up
# starts the database container
docker-compose up database
```
it is necessary to create 2 schemas, one for the administrative tables and the other for catalog tables.
in the catalog schema are the tables created by the users.

```
# Creates the administrative schema, in this example it is called dri_admin
docker exec -it $(docker ps -q -f name=dri_database) psql -h localhost -U postgres -d postgres -c "CREATE SCHEMA dri_admin;"
# Changes the permission for the schema, considering that the user is postgres.
docker exec -it $(docker ps -q -f name=dri_database) psql -h localhost -U postgres -d postgres -c "ALTER SCHEMA dri_admin OWNER TO postgres;"
# Same thing for the dri_catalog schema
docker exec -it $(docker ps -q -f name=dri_database) psql -h localhost -U postgres -d postgres -c "CREATE SCHEMA dri_catalog;"
docker exec -it $(docker ps -q -f name=dri_database) psql -h localhost -U postgres -d postgres -c "ALTER SCHEMA dri_catalog OWNER TO postgres;"
```

## Setup Backend
With the configuration file local_vars.py configured. it's time to raise the backend.

The first time the backend is executed, administrative tables and basic catalog tables will be created.
Django takes care of this part, there is no need to do anything, the commands are in the entrypoint.sh that is executed every time the backend container is turned on.

```
# Starts only the containers needed for the backend.
docker-compose up backend
```

Now that the backend is on, it is necessary to load the initial data and create an admin user.

### Create default Super User in django

```
docker exec -it $(docker ps -q -f name=backend) python manage.py createsuperuser
docker exec -it $(docker ps -q -f name=dri_backend) python manage.py createsuperuser
```

### Load Initial Data
For admin database
```
docker exec -it $(docker ps -q -f name=dri_backend) python manage.py loaddata initial_data.json
```
For catalog database


### Example catalog, outside the DRI catalog database.
The step below is **optional**, do not perform this part unless you know what you are doing
```
docker exec -it $(docker ps -q -f name=dri_database) psql -h localhost -U postgres -d postgres -f /data/gaia_dump.sql
```
In this example, the catalog database is the same as the administrative database. this example catalog will create a gaia schema with a gaia_dr2 table. a subset with few objects.
this example creates a gaia schema with a gaia_dr2 table and a subset of data with some objects.



## Run and Stop All Services

```
docker-compose up
```
or
```
docker-compose stop
```


### Useful Commands
## Useful Commands

Returns the ID of a container by filtering by name
```
Expand All @@ -102,6 +172,12 @@ Dump a schema from database postgres
docker exec -it $(docker ps -q -f name=dri_database) pg_dump -h localhost -U postgres -n 'gaia' postgres > /data/gaia_dump.sql
```

Dump data using Django
```
docker exec -it $(docker ps -q -f name=dri_backend) python manage.py dumpdata product_classifier --indent 2 > product_classifier.json
```
Neste exemplo product_classifier é o Django App com todos os models. product_classifier.json é o arquivo com o dump.

### Rabbit + Celery
Descobrir o IP do container rabbit
```
Expand Down
24 changes: 17 additions & 7 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@ pipeline {
registryCredential = 'Dockerhub'
dockerImageBack = ''
dockerImageFront = ''
GIT_COMMIT_SHORT = sh(
script: "printf \$(git rev-parse --short ${GIT_COMMIT})",
returnStdout: true
)
}
agent any

Expand All @@ -15,15 +19,15 @@ pipeline {
dir('frontend') {
sh "cp nginx-deploy.conf nginx-proxy.conf"
script {
dockerImageFront = docker.build registry + ":FRONT$GIT_COMMIT"
dockerImageFront = docker.build registry + ":frontend_$GIT_COMMIT_SHORT"
}
}
},
backend: {
dir('api') {
sh "cp dri/settings/jenkins.py dri/settings/local_vars.py"
script {
dockerImageBack = docker.build registry + ":BACK$GIT_COMMIT"
dockerImageBack = docker.build registry + ":backend_$GIT_COMMIT_SHORT"
}
}
}
Expand All @@ -32,7 +36,7 @@ pipeline {
}
stage('Test Backend') {
steps {
sh "docker run $registry:BACK$GIT_COMMIT coverage run --source=. --omit='*migrations' manage.py test --verbosity=2"
sh "docker run $registry:backend_$GIT_COMMIT_SHORT coverage run --source=. --omit='*migrations' manage.py test --verbosity=2"
}
}
stage('Push Images') {
Expand All @@ -46,14 +50,20 @@ pipeline {
frontend: {
dir('frontend') {
script {
docker.withRegistry( '', registryCredential ) {dockerImageFront.push()}
docker.withRegistry( '', registryCredential ) {
dockerImageFront.push()
dockerImageFront.push("frontend_latest")
}
}
}
},
backend: {
dir('api') {
script {
docker.withRegistry( '', registryCredential ) {dockerImageBack.push()}
docker.withRegistry( '', registryCredential ) {
dockerImageBack.push()
dockerImageBack.push("backend_latest")
}
}
}
}
Expand All @@ -63,8 +73,8 @@ pipeline {
}
post {
always {
sh "docker rmi $registry:FRONT$GIT_COMMIT --force"
sh "docker rmi $registry:BACK$GIT_COMMIT --force"
sh "docker rmi $registry:frontend_$GIT_COMMIT_SHORT --force"
sh "docker rmi $registry:backend_$GIT_COMMIT_SHORT --force"
}
}
}
3 changes: 2 additions & 1 deletion api/.vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
{
"python.pythonPath": "env_dri/bin/python3.6"
"python.pythonPath": "/home/glauber/projetos/linea/dri/api/env/bin/python3",
"git.ignoreLimitWarning": true
}
Loading

0 comments on commit cdb3501

Please sign in to comment.