Skip to content

Commit

Permalink
aligning local dev
Browse files Browse the repository at this point in the history
  • Loading branch information
MacBook di Paolo authored and MacBook di Paolo committed Mar 7, 2024
1 parent 06c5070 commit b4c12e4
Show file tree
Hide file tree
Showing 8 changed files with 12 additions and 5 deletions.
Binary file modified .DS_Store
Binary file not shown.
Binary file added docs/.DS_Store
Binary file not shown.
6 changes: 3 additions & 3 deletions docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@
"proj",
"cartopy",
"scikit-learn",
"cairo"#,
"cairo",
#"ipykernel",
#"tensorflow"
]
Expand Down Expand Up @@ -122,9 +122,9 @@
# the built documents.
#
# The short X.Y version.
version = "0.1.0"
version = "0.1.1"
# The full version, including alpha/beta/rc tags.
release = "0.1.0"
release = "0.1.1"

# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
Expand Down
1 change: 1 addition & 0 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ dependencies:
- scipy
- birdy
- tensorflow
- numpy=1.26.0
# tests
- pytest
- pandoc
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,5 @@ aiohttp
birdy
tensorflow
scipy
ipykernel
ipykernel
numpy
5 changes: 5 additions & 0 deletions requirements_dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -14,3 +14,8 @@ bump2version
Click
cruft
# Changing dependencies above this comment will create merge conflicts when updating the cookiecutter template with cruft. Add extra requirements below this line.
birdy
tensorflow
scipy
ipykernel
numpy
Binary file modified shearwater/.DS_Store
Binary file not shown.
2 changes: 1 addition & 1 deletion shearwater/processes/wps_cyclone.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def _handler(request, response):
end_date = request.inputs['end_day'][0].data
# area = request.inputs['area'][0].data

data1 = pd.read_csv("/home/b/b382633/shearwater/data/test_dailymeans_Sindian_1.csv")
data1 = pd.read_csv("/home/b/b382633/shearwater/data/test_dailymeans_Sindian_1.csv") # to be updated with data repository
data2 = pd.read_csv("/home/b/b382633/shearwater/data/test_dailymeans_Sindian_2.csv")
data = pd.concat((data1, data2), ignore_index=True)
data = data.loc[(data.time >= start_date) & (data.time <= end_date)]
Expand Down

0 comments on commit b4c12e4

Please sign in to comment.