Skip to content

Commit

Permalink
Change solution for tracking logs (#308)
Browse files Browse the repository at this point in the history
* Change tracking logs method.

* Change version to generate dev package.

* Change path name in S3
  • Loading branch information
moromimay authored Apr 8, 2021
1 parent 6d2a8f9 commit d2c5d39
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 22 deletions.
45 changes: 25 additions & 20 deletions butterfree/_cli/migrate.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,18 @@
import datetime
import importlib
import inspect
import os
import pkgutil
import sys
from typing import Set

import boto3
import setuptools
import typer
from botocore.exceptions import ClientError

from butterfree.clients import SparkClient
from butterfree.configs import environment
from butterfree.configs.logger import __logger
from butterfree.extract.readers import FileReader
from butterfree.migrations.database_migration import ALLOWED_DATABASE
from butterfree.pipelines import FeatureSetPipeline

Expand Down Expand Up @@ -106,30 +107,34 @@ class Migrate:
pipelines: list of Feature Set Pipelines to use to migration.
"""

def __init__(
self, pipelines: Set[FeatureSetPipeline], spark_client: SparkClient = None
) -> None:
def __init__(self, pipelines: Set[FeatureSetPipeline],) -> None:
self.pipelines = pipelines
self.spark_client = spark_client or SparkClient()

def _send_logs_to_s3(self, file_local: bool) -> None:
"""Send all migration logs to S3."""
log_path = "../logging.json"

file_reader = FileReader(id="name", path=log_path, format="json")
df = file_reader.consume(self.spark_client)

path = environment.get_variable("FEATURE_STORE_S3_BUCKET")

self.spark_client.write_dataframe(
dataframe=df,
format_="json",
mode="append",
**{"path": f"s3a://{path}/logging"},
s3_client = boto3.client("s3")

file_name = "../logging.json"
timestamp = datetime.datetime.now()
object_name = (
f"logs/migrate/"
f"{timestamp.strftime('%Y-%m-%d')}"
f"/logging-{timestamp.strftime('%H:%M:%S')}.json"
)
bucket = environment.get_variable("FEATURE_STORE_S3_BUCKET")

try:
s3_client.upload_file(
file_name,
bucket,
object_name,
ExtraArgs={"ACL": "bucket-owner-full-control"},
)
except ClientError:
raise

if not file_local and os.path.exists(log_path):
os.remove(log_path)
if not file_local and os.path.exists(file_name):
os.remove(file_name)

def run(self, generate_logs: bool = False) -> None:
"""Construct and apply the migrations."""
Expand Down
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,5 @@ parameters-validation>=1.1.5,<2.0
pyspark==3.*
typer>=0.3,<0.4
setuptools>=41,<42
typing-extensions==3.7.4.3
typing-extensions==3.7.4.3
boto3==1.17.*
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from setuptools import find_packages, setup

__package_name__ = "butterfree"
__version__ = "1.2.0.dev7"
__version__ = "1.2.0.dev8"
__repository_url__ = "https://github.com/quintoandar/butterfree"

with open("requirements.txt") as f:
Expand Down

0 comments on commit d2c5d39

Please sign in to comment.