Skip to content

Commit

Permalink
Add logging to bucketfs (#101)
Browse files Browse the repository at this point in the history
  • Loading branch information
Nicoretti authored Mar 13, 2024
1 parent 9988c81 commit a53a796
Show file tree
Hide file tree
Showing 7 changed files with 240 additions and 152 deletions.
22 changes: 22 additions & 0 deletions doc/changes/unreleased.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,27 @@
# Unreleased

## Added
- Added logging support
**Overview**

The bucketfs logger can be referenced via `exasol.bucketfs`

```python
import logging
# Get the logger for 'exasol.bucketfs'
logger = logging.getLogger('exasol.bucketfs')
```

For most use cases it should be sufficient to just configure the root logger, in order
to retrieve the logs from bucketfs.

```python
import logging

logging.basicConfig(level=logging.INFO)
```


## Internal
- Relock dependencies
- Update abatilo/actions-poetry from `v2.1.4` to `v3.0.0`
Expand Down
26 changes: 26 additions & 0 deletions doc/examples/logger.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import logging
from exasol.bucketfs import Service

logging.basicConfig(level=logging.INFO)

# Advanced Logging
import logging
from exasol.bucketfs import Service

# Attention:
# It is essential to configure the root logger at the beginning of your script.
# This ensures that log messages, including those from the bucketfs are handled correctly.
# Without proper configuration, log messages might not appear as expected.
logging.basicConfig(level=logging.INFO)

# Explicityly Configure the bucketfs logger if you need to.
#
# 1. Get a reference to the bucketfs logger
bucketfs_logger = logging.getLogger('exasol.bucketfs')

# 2. Configure the bucketfs logger as needed
# Note:
# By default bucketfs logger is set to NOTSET (https://docs.python.org/3.11/library/logging.html#logging.NOTSET)
# which should be sufficient in lots of cases where the root logger is configured approriately.
bucketfs_logger.setLevel(logging.DEBUG)
...
7 changes: 7 additions & 0 deletions doc/user_guide/advanced.rst
Original file line number Diff line number Diff line change
Expand Up @@ -39,3 +39,10 @@ Delete files from Bucket
:start-after: # Expert/Mapped bucket API


Configure logging
+++++++++++++++++

.. literalinclude:: /examples/logger.py
:language: python3
:start-after: # Advanced Logging

7 changes: 7 additions & 0 deletions doc/user_guide/basics.rst
Original file line number Diff line number Diff line change
Expand Up @@ -74,3 +74,10 @@ Delete files from Bucket
:end-before: # Expert/Mapped bucket API


Configure logging
+++++++++++++++++

.. literalinclude:: /examples/logger.py
:language: python3
:end-before: # Advanced Logging

16 changes: 13 additions & 3 deletions exasol/bucketfs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
from __future__ import annotations

import hashlib
import logging
from collections import defaultdict
from pathlib import Path
from typing import (
Expand All @@ -55,7 +56,6 @@
Iterator,
Mapping,
MutableMapping,
Union,
)
from urllib.parse import urlparse

Expand All @@ -67,12 +67,15 @@
"Service",
"Bucket",
"MappedBucket",
"BucketFsError",
"as_bytes",
"as_string",
"as_file",
"as_hash",
]

_logger = logging.getLogger("exasol.bucketfs")


class BucketFsError(Exception):
"""Error occurred while interacting with the bucket fs service."""
Expand Down Expand Up @@ -120,7 +123,7 @@ class Service:
def __init__(
self,
url: str,
credentials: Mapping[str, Mapping[str, str]] = None,
credentials: Mapping[str, Mapping[str, str]] | None = None,
verify: bool | str = True,
):
"""Create a new Service instance.
Expand Down Expand Up @@ -149,6 +152,7 @@ def buckets(self) -> MutableMapping[str, Bucket]:
url = _build_url(service_url=self._url)
response = requests.get(url, verify=self._verify)
try:
_logger.info(f"Retrieving bucket list from {url}")
response.raise_for_status()
except HTTPError as ex:
raise BucketFsError(
Expand All @@ -169,7 +173,7 @@ def buckets(self) -> MutableMapping[str, Bucket]:
def __str__(self) -> str:
return f"Service<{self._url}>"

def __iter__(self) -> Iterator[Bucket]:
def __iter__(self) -> Iterator[str]:
yield from self.buckets

def __getitem__(self, item: str) -> Bucket:
Expand Down Expand Up @@ -222,6 +226,7 @@ def _auth(self) -> HTTPBasicAuth:
@property
def files(self) -> Iterable[str]:
url = _build_url(service_url=self._service, bucket=self.name)
_logger.info(f"Retrieving bucket listing for {self.name}.")
response = requests.get(url, auth=self._auth, verify=self._verify)
try:
response.raise_for_status()
Expand All @@ -245,6 +250,7 @@ def upload(
data: raw content of the file.
"""
url = _build_url(service_url=self._service, bucket=self.name, path=path)
_logger.info(f"Uploading {path} to bucket {self.name}.")
response = requests.put(url, data=data, auth=self._auth, verify=self._verify)
try:
response.raise_for_status()
Expand All @@ -262,6 +268,7 @@ def delete(self, path) -> None:
A BucketFsError if the operation couldn't be executed successfully.
"""
url = _build_url(service_url=self._service, bucket=self.name, path=path)
_logger.info(f"Deleting {path} from bucket {self.name}.")
response = requests.delete(url, auth=self._auth, verify=self._verify)
try:
response.raise_for_status()
Expand All @@ -280,6 +287,9 @@ def download(self, path: str, chunk_size: int = 8192) -> Iterable[ByteString]:
An iterable of binary chunks representing the downloaded file.
"""
url = _build_url(service_url=self._service, bucket=self.name, path=path)
_logger.info(
f"Downloading {path} using a chunk size of {chunk_size} bytes from bucket {self.name}."
)
with requests.get(
url, stream=True, auth=self._auth, verify=self._verify
) as response:
Expand Down
Loading

0 comments on commit a53a796

Please sign in to comment.