diff options
author | Jonas Gunz <himself@jonasgunz.de> | 2024-10-14 02:23:10 +0200 |
---|---|---|
committer | Jonas Gunz <himself@jonasgunz.de> | 2024-10-14 02:23:10 +0200 |
commit | 663edde231af9ad4a782c5438027ef7f839806ea (patch) | |
tree | 3e0d6745e5b1c633a76e404a56b734caddcf6b37 /metchart/aggregator | |
parent | 9e5f1d17a4b0165bd4a3218ca24a159012ea49bd (diff) | |
download | meteo_toolbox-663edde231af9ad4a782c5438027ef7f839806ea.tar.gz |
package with python
Diffstat (limited to 'metchart/aggregator')
-rw-r--r-- | metchart/aggregator/__init__.py | 0 | ||||
-rwxr-xr-x | metchart/aggregator/dwd_icon.py | 149 | ||||
-rw-r--r-- | metchart/aggregator/misc.py | 23 | ||||
-rwxr-xr-x | metchart/aggregator/wyoming_sounding.py | 108 |
4 files changed, 280 insertions, 0 deletions
diff --git a/metchart/aggregator/__init__.py b/metchart/aggregator/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/metchart/aggregator/__init__.py diff --git a/metchart/aggregator/dwd_icon.py b/metchart/aggregator/dwd_icon.py new file mode 100755 index 0000000..ed5c149 --- /dev/null +++ b/metchart/aggregator/dwd_icon.py @@ -0,0 +1,149 @@ +#!/usr/bin/env python3 + +import requests +import datetime +import pytz +import requests +import os + +from multiprocessing import cpu_count +from multiprocessing.pool import ThreadPool + +import subprocess + +import xarray as xr + +from . import misc + +BASE='https://opendata.dwd.de/weather/nwp' + +def get_current_run(): + # we allow up to 3h of slack for DWD to upload the latest run + tz = pytz.timezone('UTC') + now = datetime.datetime.now(datetime.timezone.utc) + corrected = now - datetime.timedelta(hours=3) + + run = int(corrected.hour / 6) * 6 + + return (f'{run:02d}', corrected.strftime('%Y%m%d')) + +def download_url(args): + url, dest = args + r = requests.get(url) + try: + with open(dest, 'wb') as f: + f.write(r.content) + print(f'Downloaded {dest}') + except Exception as e: + print(f'Failed to download {dest}:\n', e) + +def unpack_bz2(dest): + res = subprocess.run(['bzip2', '-df', dest]) + if res.returncode != 0: + print(f'There was an error unpacking {dest}:', res.stderr) + +def download_dwd_gribs( + date, run, target, output, model, steps, model_long, + pressure_level_parameters, parameter_caps_in_filename, + single_level_parameters, pressure_levels +): + misc.create_output_dir(output) + + to_download = [] + + for step in steps: + step_str = f'{step:03d}' + + for parameter in pressure_level_parameters: + parameter2 = parameter.upper() if parameter_caps_in_filename else parameter + + for level in pressure_levels: + filename = f'{model_long}_regular-lat-lon_pressure-level_{date}{run}_{step_str}_{level}_{parameter2}.grib2.bz2' + URL = f'{BASE}/{model}/grib/{run}/{parameter}/{filename}' + + to_download.append((URL, os.path.join(output, filename))) + + for parameter in single_level_parameters: + parameter2 = parameter.upper() if parameter_caps_in_filename else parameter + filename = f'{model_long}_regular-lat-lon_single-level_{date}{run}_{step_str}_{parameter2}.grib2.bz2' + URL = f'{BASE}/{model}/grib/{run}/{parameter}/{filename}' + + to_download.append((URL, os.path.join(output, filename))) + + + for _ in ThreadPool(cpu_count()).imap_unordered(download_url, to_download): + pass + + print('Done Downloading. Uncompressing...') + + for _ in ThreadPool(cpu_count()).imap_unordered(unpack_bz2, [dest for _, dest in to_download]): + pass + + downloaded_gribs = [dest.removesuffix('.bz2') for _, dest in to_download] + + res = subprocess.run(['grib_copy'] + downloaded_gribs + [target]) + if res.returncode != 0: + print('grib_copy failed with: ', res.stderr) + + res = subprocess.run(['rm', '-f'] + downloaded_gribs) + if res.returncode != 0: + print('rm failed with: ', res.stderr) + +def clean_output_dir(directory, target): + to_delete = [f for f in os.listdir(directory) if os.path.isfile(os.path.join(directory, f))] + if target in to_delete: + del to_delete[to_delete.index(target)] + + for f in to_delete: + os.unlink(os.path.join(directory, f)) + +def load_data(name, output, description = None, clean = False, force_filename = None, **kwargs): + target = force_filename + + if target is None: + run, date = get_current_run() + filename = f'{name}_{date}_{run}.grib2' + target = os.path.join(output, filename) + + if not os.path.exists(target): + download_dwd_gribs(date, run, target, output, **kwargs) + else: + print(f'{target} already exists. Using the cached version.') + + if clean: + clean_output_dir(output, filename) + + # we drop heightAboveGround to allow 2m and 10m values to be merged down to one dataset + ds = xr.load_dataset(target, engine='cfgrib', drop_variables='heightAboveGround') + + if description is not None: + ds.attrs['_description'] = description + return ds + + +debug_config = { + 'output':'dwd_icon-eu', + 'model':'icon-eu', + 'model_long':'icon-eu_europe', + 'clean': True, + 'parameter_caps_in_filename':True, + 'pressure_level_parameters': [ + 't', + 'relhum', + 'u', + 'v', + 'fi', + 'clc' + ], + 'single_level_parameters': [ + 'pmsl', + 't_2m', + 'relhum_2m' + ], + 'pressure_levels':[ 1000, 950, 925, 900, 875, 850, 825, 800, 775, 700, 600, 500, 400, 300, 250, 200, 150, 100 ], + 'steps':[0, 3, 6, 9, 12, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45, 48] +} + +if __name__ == '__main__': + load_data('test_icon_eu', **debug_config) + diff --git a/metchart/aggregator/misc.py b/metchart/aggregator/misc.py new file mode 100644 index 0000000..6594d0f --- /dev/null +++ b/metchart/aggregator/misc.py @@ -0,0 +1,23 @@ +import os +import numpy as np +import datetime + +def np_time_convert(dt64, func=datetime.datetime.utcfromtimestamp): + unix_epoch = np.datetime64(0, 's') + one_second = np.timedelta64(1, 's') + seconds_since_epoch = (dt64 - unix_epoch) / one_second + + return func(seconds_since_epoch) + +def np_time_convert_offset(init, step): + return np_time_convert(init) + np_time_convert(step, func=lambda x: datetime.timedelta(seconds=x)) + +def np_time_list_convert_offset(init, steps): + return list(map(lambda x: np_time_convert_offset(init, x), steps)) + +def create_output_dir(path, clear=False): + if not os.path.exists(path): + os.makedirs(path) + elif clear: + raise Exception('clear not implemented') + diff --git a/metchart/aggregator/wyoming_sounding.py b/metchart/aggregator/wyoming_sounding.py new file mode 100755 index 0000000..617d462 --- /dev/null +++ b/metchart/aggregator/wyoming_sounding.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python3 +import os +import datetime +import requests + +import csv + +import xarray as xr + +import numpy as np + +from metpy.units import units +import metpy.calc as mpcalc + +from .. import misc + +def get_current_run(): + date=(datetime.date.today() - datetime.timedelta(days = 1)).strftime('%Y-%m-%d') + # TODO we also want noon + hour='23:00:00' + return (hour, date) + +def download_wyoming_csv(station, date, hour, target): + url=f'http://weather.uwyo.edu/cgi-bin/bufrraob.py?datetime={date}%20{hour}&id={station}&type=TEXT:CSV' + result = requests.get(url) + + if result.status_code >= 400: + raise Exception('Failed to Download sounding csv!') + + with open(target, 'w') as f: + f.write(result.text) + +def load_wyoming_csv(filepath, hour, date): + p = [] + T = [] + Td = [] + wind_speed = [] + wind_dir = [] + r = [] + + with open(filepath,'r', newline='') as f: + reader = csv.reader(f) + next(reader) # Skip header + for row in reader: + if sum(map(lambda s : len(s.strip()) == 0, row)): + # skip any line with empty values + continue + + if float(row[3]) in p: # Skip double p entries + continue + + p.append(float(row[3])) + T.append(float(row[5])) + Td.append(float(row[6])) + r.append(float(row[8])) + wind_speed.append(float(row[12])) + wind_dir.append(float(row[11])) + + T = T * units.degC + Td = Td * units.degC + wind_speed = wind_speed * units.knots + wind_dir = wind_dir * units.degrees + u, v = mpcalc.wind_components(wind_speed, wind_dir) + + time = np.datetime64(f'{date}T{hour}') + + # recreate the structure a DWD GRIB produces + return xr.Dataset( + { + "t": (["step", "isobaricInhPa"], [T.to(units.kelvin).magnitude]), + "td": (["step", "isobaricInhPa"], [Td.to(units.kelvin).magnitude]), + "r": (["step", "isobaricInhPa"], [r]), + "u": (["step", "isobaricInhPa"], [u.to('m/s').magnitude]), + "v": (["step", "isobaricInhPa"], [v.to('m/s').magnitude]), + }, + coords={ + "isobaricInhPa": p, + "step": [np.timedelta64(0, 'ns')], + "valid_time": (['step'], [time]), + "time": time, + }, + attrs={ + "source": "uwyo.edu", + } + ) + +def load_data(name, output, station): + hour, date = get_current_run() + misc.create_output_dir(output) + + target = os.path.join(output, f'{name}_{date}_{hour}.csv') + + if not os.path.exists(target): + download_wyoming_csv(station, date, hour, target) + else: + print(f'{target} alreasy exists. Using the cached version.') + + return load_wyoming_csv(target, hour, date) + +config_debug = { + 'output': 'wyoming_test', + 'station': '10548' +} + +if __name__ == '__main__': + ds = load_data('test_wyoming_sounding', **config_debug) + print(ds) + print(ds.coords['step']) |