Skip to content
Snippets Groups Projects
Commit 64b2984d authored by Rufai Omowunmi Balogun's avatar Rufai Omowunmi Balogun
Browse files

rebase merge request

parent da2c904a
No related branches found
No related tags found
2 merge requests!3Dev-clean up,!1update test for sm_climatology module
File added
...@@ -30,7 +30,7 @@ classifiers = ...@@ -30,7 +30,7 @@ classifiers =
[options] [options]
include_package_data = True include_package_data = True
package_dir = package_dir =
= src = smodex
packages = find: packages = find:
python_requires = >=3.7 python_requires = >=3.7
install_requires = install_requires =
...@@ -49,7 +49,7 @@ docs = ...@@ -49,7 +49,7 @@ docs =
# smodex = smodex.__main__:main # smodex = smodex.__main__:main
[options.packages.find] [options.packages.find]
where = src where = smodex
[options.package_data] [options.package_data]
* = *.yaml, *.ini * = *.yaml, *.ini
from smodex import sm_anomaly
from smodex import sm_climatology
from smodex import sm_downloader
from smodex import version
from smodex import visual_sma_ts
__all__ = ["sm_anomaly", "sm_climatology", "sm_downloader", "visual_sma_ts", "version"]
"""
Soil Moisture Downloader: Configured to download datasets from the Climate Data Store
Downloads hourly soil moisture datasets for full year
"""
import argparse
import json
import logging
import os
import sys
import numpy as np
import pandas as pd
import cdsapi
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s")
stream_handler = logging.StreamHandler(sys.stdout)
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
c = cdsapi.Client()
class SMDownload:
"""Soil Moisture Downloader"""
def __init__(
self,
start_date: str,
end_date: str,
api: str,
area: tuple = (50.775, 2.775, 42.275, 18.025),
depth: tuple = (1, 2, 3, 4),
download_path: str = "./sm_downloaded/",
) -> None:
self.start_date = start_date
self.end_date = end_date
self.api = api
self.download_path = download_path
self.area = area
self.depth = depth
def era5_sm_downloader(self, year):
"""downloader from ERA5 API"""
variables = []
for dep in self.depth:
variables.append(f"volumetric_soil_water_layer_{dep}")
c.retrieve(
"reanalysis-era5-single-levels",
{
"product_type": "reanalysis",
"variable": variables,
"year": int(year),
"month": [
"01",
"02",
"03",
"04",
"05",
"06",
"07",
"08",
"09",
"10",
"11",
"12",
],
"day": [
"01",
"02",
"03",
"04",
"05",
"06",
"07",
"08",
"09",
"10",
"11",
"12",
"13",
"14",
"15",
"16",
"17",
"18",
"19",
"20",
"21",
"22",
"23",
"24",
"25",
"26",
"27",
"28",
"29",
"30",
"31",
],
"time": [
"00:00",
"06:00",
"12:00",
"18:00",
],
# 'time': [
# '00:00', '01:00', '02:00',
# '03:00', '04:00', '05:00',
# '06:00', '07:00', '08:00',
# '09:00', '10:00', '11:00',
# '12:00', '13:00', '14:00',
# '15:00', '16:00', '17:00',
# '18:00', '19:00', '20:00',
# '21:00', '22:00', '23:00',
# ],
"area": self.area,
"format": "netcdf",
},
self.download_path + f"ERA5_SM_{year}.nc",
)
def downloader(self):
"""download"""
# TODO: Extend to other APIs.
date_ranges = pd.date_range(start=self.start_date, end=self.end_date).year
years = np.unique(date_ranges)
for yr in years:
if self.api == "era5":
logger.info(f"Initiating downloading of ERA5 Soil Moisture for {yr}")
if not os.path.exists(self.download_path):
os.makedirs(self.download_path)
self.era5_sm_downloader(year=yr)
logger.info(f"Downloaded ERA5 Soil Moisture for {yr}")
if __name__ == "__main__":
# command line option
parser = argparse.ArgumentParser(
description="Downloads soil moisture \
datasets from start date to end date"
)
parser.add_argument(
"start_date",
type=str,
help="initial date to start \
downloading from e.g. 1990-01-01",
)
parser.add_argument(
"end_date",
type=str,
help="end date to stop \
downloading datasets from e.g. 2030-12-31",
)
parser.add_argument("api", type=str, help="download portal API e.g. era5, lpdaac, etc.")
parser.add_argument(
"-a",
"--area",
type=json.loads,
help="bounding box area for downloading \
datasets e.g. [50.775, 2.775, 42.275, 18.025]",
)
parser.add_argument(
"-d",
"--depth",
type=json.loads,
help="volumetric \
soil moisture depths e.g. [1, 2, 3, 4]",
)
parser.add_argument(
"path",
type=str,
help="directory to save the \
downloaded datasets e.g. /sm_downloaded/",
)
args = parser.parse_args()
SMDownload = SMDownload( # type: ignore [misc, assignment]
args.start_date, args.end_date, args.api, args.area, args.depth, args.path
)
SMDownload.downloader() # type: ignore [call-arg]
...@@ -152,6 +152,9 @@ def test_get_climatology_stack(): ...@@ -152,6 +152,9 @@ def test_get_climatology_stack():
# captured = capsys.readouterr() # captured = capsys.readouterr()
# assert "Soil Moisture Stack created" in captured.out # assert "Soil Moisture Stack created" in captured.out
# assert "Climatology computation complete" in captured.out # assert "Climatology computation complete" in captured.out
<<<<<<< HEAD
if __name__ == "__main__": if __name__ == "__main__":
pytest.main() pytest.main()
=======
>>>>>>> e26b415 (rebase merge request)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment