Skip to content

Commit

Permalink
replace flake8 with ruff
Browse files Browse the repository at this point in the history
  • Loading branch information
jtherrmann committed Jan 2, 2025
1 parent 43f1eed commit f0b6969
Show file tree
Hide file tree
Showing 12 changed files with 77 additions and 56 deletions.
16 changes: 2 additions & 14 deletions .github/workflows/static_analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,8 @@ on: push

jobs:

flake8:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v4

- uses: actions/setup-python@v5
with:
python-version: 3.12

- run: |
python -m pip install --upgrade pip
python -m pip install flake8 flake8-import-order flake8-builtins # FIXME add flake8-blind-except
- run: flake8 --max-line-length=120 --import-order-style=pycharm --statistics --application-import-names metadata_construction verify
call-ruff-workflow:
uses: ASFHyP3/actions/.github/workflows/[email protected]

cfn-lint:

Check warning

Code scanning / CodeQL

Workflow does not contain permissions Medium

Actions Job or Workflow does not set permissions
runs-on: ubuntu-latest
Expand Down
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/)
and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [2.0.2]
### Changed
- Replaced `flake8` with `ruff`.

## [2.0.1]
### Changed
- Upgraded all Lambda functions to Python 3.12
Expand Down
1 change: 1 addition & 0 deletions cmr-token/src/cmr_token.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import boto3
import requests_pkcs12


log = getLogger()
log.setLevel('INFO')
s3 = boto3.client('s3')
Expand Down
2 changes: 1 addition & 1 deletion ingest/src/ingest.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def lambda_handler(event, context):
'Browse': {
'Bucket': config['browse_bucket'],
'Key': browse_output_key,
}
},
}
log.info('Done processing %s', event['ProductName'])
return output
5 changes: 3 additions & 2 deletions invoke/src/invoke.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,9 @@ def invoke_ingest(config):
log.warning('Processed %s of %s messages. Exiting.', messages_processed, config['max_messages_to_process'])
break

messages = queue.receive_messages(MaxNumberOfMessages=config['max_messages_per_receive'],
WaitTimeSeconds=config['wait_time_in_seconds'])
messages = queue.receive_messages(
MaxNumberOfMessages=config['max_messages_per_receive'], WaitTimeSeconds=config['wait_time_in_seconds']
)
if not messages:
log.info('No messages found. Exiting.')
break
Expand Down
49 changes: 22 additions & 27 deletions metadata-construction/src/metadata_construction.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

import boto3


log = getLogger()
log.setLevel('INFO')
CONFIG = json.loads(os.getenv('CONFIG'))
Expand Down Expand Up @@ -51,7 +52,7 @@ def get_sds_metadata(obj):
def format_polygon(polygon):
coordinates = []
for long, lat in reversed(polygon):
coordinates.append({"Latitude": lat, "Longitude": long})
coordinates.append({'Latitude': lat, 'Longitude': long})
return coordinates


Expand Down Expand Up @@ -111,38 +112,32 @@ def render_granule_metadata(sds_metadata, config, product, browse) -> dict:
'Type': 'Update',
},
],
"DataGranule": {
"ArchiveAndDistributionInformation": [
{
"Name": os.path.basename(product['Key']),
"SizeInBytes": get_s3_file_size(product)
}
'DataGranule': {
'ArchiveAndDistributionInformation': [
{'Name': os.path.basename(product['Key']), 'SizeInBytes': get_s3_file_size(product)}
],
"DayNightFlag": "Unspecified",
"ProductionDateTime": sds_metadata['creation_timestamp']
'DayNightFlag': 'Unspecified',
'ProductionDateTime': sds_metadata['creation_timestamp'],
},
"Platforms": [
{"ShortName": platform} for platform in sorted(set(sds_metadata['metadata']['platform']))
],
"OrbitCalculatedSpatialDomains": [
{"OrbitNumber": orbit} for orbit in sds_metadata['metadata']['orbit_number']
'Platforms': [{'ShortName': platform} for platform in sorted(set(sds_metadata['metadata']['platform']))],
'OrbitCalculatedSpatialDomains': [{'OrbitNumber': orbit} for orbit in sds_metadata['metadata']['orbit_number']],
'InputGranules': sds_metadata['metadata']['reference_scenes'] + sds_metadata['metadata']['secondary_scenes'],
'AdditionalAttributes': [
{'Name': 'ASCENDING_DESCENDING', 'Values': [sds_metadata['metadata']['orbit_direction']]},
{'Name': 'BEAM_MODE', 'Values': [sds_metadata['metadata']['beam_mode']]},
{'Name': 'POLARIZATION', 'Values': [sds_metadata['metadata']['polarization']]},
{'Name': 'PERPENDICULAR_BASELINE', 'Values': [str(sds_metadata['metadata']['perpendicular_baseline'])]},
{'Name': 'VERSION', 'Values': [sds_metadata['metadata']['version']]},
{'Name': 'FRAME_NUMBER', 'Values': [str(sds_metadata['metadata']['frame_number'])]},
{'Name': 'PATH_NUMBER', 'Values': [str(sds_metadata['metadata']['track_number'])]},
{'Name': 'TEMPORAL_BASELINE_DAYS', 'Values': [str(sds_metadata['metadata']['temporal_baseline_days'])]},
],
"InputGranules": sds_metadata['metadata']['reference_scenes'] + sds_metadata['metadata']['secondary_scenes'],
"AdditionalAttributes": [
{"Name": "ASCENDING_DESCENDING", "Values": [sds_metadata['metadata']['orbit_direction']]},
{"Name": "BEAM_MODE", "Values": [sds_metadata['metadata']['beam_mode']]},
{"Name": "POLARIZATION", "Values": [sds_metadata['metadata']['polarization']]},
{"Name": "PERPENDICULAR_BASELINE", "Values": [str(sds_metadata['metadata']['perpendicular_baseline'])]},
{"Name": "VERSION", "Values": [sds_metadata['metadata']['version']]},
{"Name": "FRAME_NUMBER", "Values": [str(sds_metadata['metadata']['frame_number'])]},
{"Name": "PATH_NUMBER", "Values": [str(sds_metadata['metadata']['track_number'])]},
{"Name": "TEMPORAL_BASELINE_DAYS", "Values": [str(sds_metadata['metadata']['temporal_baseline_days'])]}
]
}

if 'weather_model' in sds_metadata['metadata']:
umm['AdditionalAttributes'].append({"Name": "WEATHER_MODEL",
"Values": sds_metadata['metadata']['weather_model']})
umm['AdditionalAttributes'].append(
{'Name': 'WEATHER_MODEL', 'Values': sds_metadata['metadata']['weather_model']}
)

return umm

Expand Down
1 change: 1 addition & 0 deletions metadata-to-cmr/src/cmr.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import boto3
import requests


log = getLogger()


Expand Down
8 changes: 6 additions & 2 deletions metadata-to-cmr/src/daemon.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import boto3
from botocore.client import Config
from botocore.exceptions import ClientError

from cmr import get_session, process_task


Expand Down Expand Up @@ -44,8 +45,11 @@ def daemon_loop(config, get_remaining_time_in_millis_fcn):
sfn_client = get_sfn_client(config['sfn_connect_timeout'])
while True:
if get_remaining_time_in_millis_fcn() < config['max_task_time_in_millis']:
log.info('Remaining time %s less than max task time %s. Exiting.', get_remaining_time_in_millis_fcn(),
config['max_task_time_in_millis'])
log.info(
'Remaining time %s less than max task time %s. Exiting.',
get_remaining_time_in_millis_fcn(),
config['max_task_time_in_millis'],
)
break

task = get_task(sfn_client, config['activity'])
Expand Down
30 changes: 30 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
[project]
requires-python = "==3.12"

[tool.ruff]
line-length = 120
# The directories to consider when resolving first- vs. third-party imports.
# See: https://docs.astral.sh/ruff/settings/#src
src = ["**/src", "tests"]

[tool.ruff.format]
indent-style = "space"
quote-style = "single"

[tool.ruff.lint]
extend-select = [
"I", # isort: https://docs.astral.sh/ruff/rules/#isort-i
"UP", # pyupgrade: https://docs.astral.sh/ruff/rules/#pyupgrade-up

# TODO: uncomment the following extensions and address their warnings:
#"D", # pydocstyle: https://docs.astral.sh/ruff/rules/#pydocstyle-d
#"ANN", # annotations: https://docs.astral.sh/ruff/rules/#flake8-annotations-ann
#"PTH", # use-pathlib-pth: https://docs.astral.sh/ruff/rules/#flake8-use-pathlib-pth
]

[tool.ruff.lint.pydocstyle]
convention = "google"

[tool.ruff.lint.isort]
case-sensitive = true
lines-after-imports = 2
13 changes: 5 additions & 8 deletions tests/test_metadata_construction.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def test_get_file_content_from_s3(s3_stubber):
s3_stubber.add_response(
method='get_object',
expected_params={'Bucket': 'myBucket', 'Key': 'myKey'},
service_response={'Body': io.StringIO('myContent')}
service_response={'Body': io.StringIO('myContent')},
)
assert metadata_construction.get_file_content_from_s3('myBucket', 'myKey') == 'myContent'

Expand All @@ -32,18 +32,15 @@ def test_write_to_file(tmp_path):


def test_get_s3_file_size(s3_stubber):
obj = {
'Bucket': 'myBucket',
'Key': 'myKey'
}
obj = {'Bucket': 'myBucket', 'Key': 'myKey'}
s3_stubber.add_response(method='head_object', expected_params=obj, service_response={'ContentLength': 123})
assert metadata_construction.get_s3_file_size(obj) == 123


def test_get_sds_metadata(test_data_dir, s3_stubber):
obj = {
'Bucket': 'ingest-test-aux',
'Key': 'S1-GUNW-D-R-123-tops-20240212_20240107-032647-00038E_00036N-PP-2e78-v3_0_0'
'Key': 'S1-GUNW-D-R-123-tops-20240212_20240107-032647-00038E_00036N-PP-2e78-v3_0_0',
}

sds_metadata_file = test_data_dir / 'granule1' / 'sds_metadata.json'
Expand All @@ -55,7 +52,7 @@ def test_get_sds_metadata(test_data_dir, s3_stubber):


def test_create_granule_metadata_in_s3_g1(test_data_dir, mocker):
sds_metadata =json.loads((test_data_dir / 'granule1'/ 'sds_metadata.json').read_text())
sds_metadata = json.loads((test_data_dir / 'granule1' / 'sds_metadata.json').read_text())
inputs = json.loads((test_data_dir / 'granule1' / 'inputs.json').read_text())
config = json.loads((test_data_dir / 'granule1' / 'config.json').read_text())

Expand All @@ -80,7 +77,7 @@ def test_create_granule_metadata_in_s3_g1(test_data_dir, mocker):


def test_create_granule_metadata_in_s3_g2(test_data_dir, mocker):
sds_metadata =json.loads((test_data_dir / 'granule2'/ 'sds_metadata.json').read_text())
sds_metadata = json.loads((test_data_dir / 'granule2' / 'sds_metadata.json').read_text())
inputs = json.loads((test_data_dir / 'granule2' / 'inputs.json').read_text())
config = json.loads((test_data_dir / 'granule2' / 'config.json').read_text())

Expand Down
2 changes: 1 addition & 1 deletion tests/test_verify.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def test_get_file_content_from_s3(s3_stubber):
s3_stubber.add_response(
method='get_object',
expected_params={'Bucket': 'myBucket', 'Key': 'myKey'},
service_response={'Body': io.StringIO('myContent')}
service_response={'Body': io.StringIO('myContent')},
)
assert verify.get_file_content_from_s3('myBucket', 'myKey') == 'myContent'

Expand Down
2 changes: 1 addition & 1 deletion verify/src/verify.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def get_file_content_from_s3(bucket, key):


def get_json_from_file(filename):
with open(filename, 'r') as f:
with open(filename) as f:
content = f.read()
return json.loads(content)

Expand Down

0 comments on commit f0b6969

Please sign in to comment.