aboutsummaryrefslogtreecommitdiff
path: root/metchart
diff options
context:
space:
mode:
Diffstat (limited to 'metchart')
-rw-r--r--metchart/aggregator/__init__.py0
-rwxr-xr-xmetchart/aggregator/dwd_icon.py149
-rw-r--r--metchart/aggregator/misc.py23
-rwxr-xr-xmetchart/aggregator/wyoming_sounding.py108
-rw-r--r--metchart/misc.py23
-rw-r--r--metchart/modifier/__init__.py0
-rw-r--r--metchart/modifier/merge.py4
-rw-r--r--metchart/plotter/__init__.py0
-rw-r--r--metchart/plotter/debug_data.py4
-rwxr-xr-xmetchart/plotter/horizontal.py123
-rwxr-xr-xmetchart/plotter/meteogram.py164
-rwxr-xr-xmetchart/plotter/vertical_from_grib.py101
-rwxr-xr-xmetchart/run.py110
-rw-r--r--metchart/skewt.py118
14 files changed, 927 insertions, 0 deletions
diff --git a/metchart/aggregator/__init__.py b/metchart/aggregator/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/metchart/aggregator/__init__.py
diff --git a/metchart/aggregator/dwd_icon.py b/metchart/aggregator/dwd_icon.py
new file mode 100755
index 0000000..ed5c149
--- /dev/null
+++ b/metchart/aggregator/dwd_icon.py
@@ -0,0 +1,149 @@
+#!/usr/bin/env python3
+
+import requests
+import datetime
+import pytz
+import requests
+import os
+
+from multiprocessing import cpu_count
+from multiprocessing.pool import ThreadPool
+
+import subprocess
+
+import xarray as xr
+
+from . import misc
+
+BASE='https://opendata.dwd.de/weather/nwp'
+
+def get_current_run():
+ # we allow up to 3h of slack for DWD to upload the latest run
+ tz = pytz.timezone('UTC')
+ now = datetime.datetime.now(datetime.timezone.utc)
+ corrected = now - datetime.timedelta(hours=3)
+
+ run = int(corrected.hour / 6) * 6
+
+ return (f'{run:02d}', corrected.strftime('%Y%m%d'))
+
+def download_url(args):
+ url, dest = args
+ r = requests.get(url)
+ try:
+ with open(dest, 'wb') as f:
+ f.write(r.content)
+ print(f'Downloaded {dest}')
+ except Exception as e:
+ print(f'Failed to download {dest}:\n', e)
+
+def unpack_bz2(dest):
+ res = subprocess.run(['bzip2', '-df', dest])
+ if res.returncode != 0:
+ print(f'There was an error unpacking {dest}:', res.stderr)
+
+def download_dwd_gribs(
+ date, run, target, output, model, steps, model_long,
+ pressure_level_parameters, parameter_caps_in_filename,
+ single_level_parameters, pressure_levels
+):
+ misc.create_output_dir(output)
+
+ to_download = []
+
+ for step in steps:
+ step_str = f'{step:03d}'
+
+ for parameter in pressure_level_parameters:
+ parameter2 = parameter.upper() if parameter_caps_in_filename else parameter
+
+ for level in pressure_levels:
+ filename = f'{model_long}_regular-lat-lon_pressure-level_{date}{run}_{step_str}_{level}_{parameter2}.grib2.bz2'
+ URL = f'{BASE}/{model}/grib/{run}/{parameter}/{filename}'
+
+ to_download.append((URL, os.path.join(output, filename)))
+
+ for parameter in single_level_parameters:
+ parameter2 = parameter.upper() if parameter_caps_in_filename else parameter
+ filename = f'{model_long}_regular-lat-lon_single-level_{date}{run}_{step_str}_{parameter2}.grib2.bz2'
+ URL = f'{BASE}/{model}/grib/{run}/{parameter}/{filename}'
+
+ to_download.append((URL, os.path.join(output, filename)))
+
+
+ for _ in ThreadPool(cpu_count()).imap_unordered(download_url, to_download):
+ pass
+
+ print('Done Downloading. Uncompressing...')
+
+ for _ in ThreadPool(cpu_count()).imap_unordered(unpack_bz2, [dest for _, dest in to_download]):
+ pass
+
+ downloaded_gribs = [dest.removesuffix('.bz2') for _, dest in to_download]
+
+ res = subprocess.run(['grib_copy'] + downloaded_gribs + [target])
+ if res.returncode != 0:
+ print('grib_copy failed with: ', res.stderr)
+
+ res = subprocess.run(['rm', '-f'] + downloaded_gribs)
+ if res.returncode != 0:
+ print('rm failed with: ', res.stderr)
+
+def clean_output_dir(directory, target):
+ to_delete = [f for f in os.listdir(directory) if os.path.isfile(os.path.join(directory, f))]
+ if target in to_delete:
+ del to_delete[to_delete.index(target)]
+
+ for f in to_delete:
+ os.unlink(os.path.join(directory, f))
+
+def load_data(name, output, description = None, clean = False, force_filename = None, **kwargs):
+ target = force_filename
+
+ if target is None:
+ run, date = get_current_run()
+ filename = f'{name}_{date}_{run}.grib2'
+ target = os.path.join(output, filename)
+
+ if not os.path.exists(target):
+ download_dwd_gribs(date, run, target, output, **kwargs)
+ else:
+ print(f'{target} already exists. Using the cached version.')
+
+ if clean:
+ clean_output_dir(output, filename)
+
+ # we drop heightAboveGround to allow 2m and 10m values to be merged down to one dataset
+ ds = xr.load_dataset(target, engine='cfgrib', drop_variables='heightAboveGround')
+
+ if description is not None:
+ ds.attrs['_description'] = description
+ return ds
+
+
+debug_config = {
+ 'output':'dwd_icon-eu',
+ 'model':'icon-eu',
+ 'model_long':'icon-eu_europe',
+ 'clean': True,
+ 'parameter_caps_in_filename':True,
+ 'pressure_level_parameters': [
+ 't',
+ 'relhum',
+ 'u',
+ 'v',
+ 'fi',
+ 'clc'
+ ],
+ 'single_level_parameters': [
+ 'pmsl',
+ 't_2m',
+ 'relhum_2m'
+ ],
+ 'pressure_levels':[ 1000, 950, 925, 900, 875, 850, 825, 800, 775, 700, 600, 500, 400, 300, 250, 200, 150, 100 ],
+ 'steps':[0, 3, 6, 9, 12, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45, 48]
+}
+
+if __name__ == '__main__':
+ load_data('test_icon_eu', **debug_config)
+
diff --git a/metchart/aggregator/misc.py b/metchart/aggregator/misc.py
new file mode 100644
index 0000000..6594d0f
--- /dev/null
+++ b/metchart/aggregator/misc.py
@@ -0,0 +1,23 @@
+import os
+import numpy as np
+import datetime
+
+def np_time_convert(dt64, func=datetime.datetime.utcfromtimestamp):
+ unix_epoch = np.datetime64(0, 's')
+ one_second = np.timedelta64(1, 's')
+ seconds_since_epoch = (dt64 - unix_epoch) / one_second
+
+ return func(seconds_since_epoch)
+
+def np_time_convert_offset(init, step):
+ return np_time_convert(init) + np_time_convert(step, func=lambda x: datetime.timedelta(seconds=x))
+
+def np_time_list_convert_offset(init, steps):
+ return list(map(lambda x: np_time_convert_offset(init, x), steps))
+
+def create_output_dir(path, clear=False):
+ if not os.path.exists(path):
+ os.makedirs(path)
+ elif clear:
+ raise Exception('clear not implemented')
+
diff --git a/metchart/aggregator/wyoming_sounding.py b/metchart/aggregator/wyoming_sounding.py
new file mode 100755
index 0000000..617d462
--- /dev/null
+++ b/metchart/aggregator/wyoming_sounding.py
@@ -0,0 +1,108 @@
+#!/usr/bin/env python3
+import os
+import datetime
+import requests
+
+import csv
+
+import xarray as xr
+
+import numpy as np
+
+from metpy.units import units
+import metpy.calc as mpcalc
+
+from .. import misc
+
+def get_current_run():
+ date=(datetime.date.today() - datetime.timedelta(days = 1)).strftime('%Y-%m-%d')
+ # TODO we also want noon
+ hour='23:00:00'
+ return (hour, date)
+
+def download_wyoming_csv(station, date, hour, target):
+ url=f'http://weather.uwyo.edu/cgi-bin/bufrraob.py?datetime={date}%20{hour}&id={station}&type=TEXT:CSV'
+ result = requests.get(url)
+
+ if result.status_code >= 400:
+ raise Exception('Failed to Download sounding csv!')
+
+ with open(target, 'w') as f:
+ f.write(result.text)
+
+def load_wyoming_csv(filepath, hour, date):
+ p = []
+ T = []
+ Td = []
+ wind_speed = []
+ wind_dir = []
+ r = []
+
+ with open(filepath,'r', newline='') as f:
+ reader = csv.reader(f)
+ next(reader) # Skip header
+ for row in reader:
+ if sum(map(lambda s : len(s.strip()) == 0, row)):
+ # skip any line with empty values
+ continue
+
+ if float(row[3]) in p: # Skip double p entries
+ continue
+
+ p.append(float(row[3]))
+ T.append(float(row[5]))
+ Td.append(float(row[6]))
+ r.append(float(row[8]))
+ wind_speed.append(float(row[12]))
+ wind_dir.append(float(row[11]))
+
+ T = T * units.degC
+ Td = Td * units.degC
+ wind_speed = wind_speed * units.knots
+ wind_dir = wind_dir * units.degrees
+ u, v = mpcalc.wind_components(wind_speed, wind_dir)
+
+ time = np.datetime64(f'{date}T{hour}')
+
+ # recreate the structure a DWD GRIB produces
+ return xr.Dataset(
+ {
+ "t": (["step", "isobaricInhPa"], [T.to(units.kelvin).magnitude]),
+ "td": (["step", "isobaricInhPa"], [Td.to(units.kelvin).magnitude]),
+ "r": (["step", "isobaricInhPa"], [r]),
+ "u": (["step", "isobaricInhPa"], [u.to('m/s').magnitude]),
+ "v": (["step", "isobaricInhPa"], [v.to('m/s').magnitude]),
+ },
+ coords={
+ "isobaricInhPa": p,
+ "step": [np.timedelta64(0, 'ns')],
+ "valid_time": (['step'], [time]),
+ "time": time,
+ },
+ attrs={
+ "source": "uwyo.edu",
+ }
+ )
+
+def load_data(name, output, station):
+ hour, date = get_current_run()
+ misc.create_output_dir(output)
+
+ target = os.path.join(output, f'{name}_{date}_{hour}.csv')
+
+ if not os.path.exists(target):
+ download_wyoming_csv(station, date, hour, target)
+ else:
+ print(f'{target} alreasy exists. Using the cached version.')
+
+ return load_wyoming_csv(target, hour, date)
+
+config_debug = {
+ 'output': 'wyoming_test',
+ 'station': '10548'
+}
+
+if __name__ == '__main__':
+ ds = load_data('test_wyoming_sounding', **config_debug)
+ print(ds)
+ print(ds.coords['step'])
diff --git a/metchart/misc.py b/metchart/misc.py
new file mode 100644
index 0000000..6594d0f
--- /dev/null
+++ b/metchart/misc.py
@@ -0,0 +1,23 @@
+import os
+import numpy as np
+import datetime
+
+def np_time_convert(dt64, func=datetime.datetime.utcfromtimestamp):
+ unix_epoch = np.datetime64(0, 's')
+ one_second = np.timedelta64(1, 's')
+ seconds_since_epoch = (dt64 - unix_epoch) / one_second
+
+ return func(seconds_since_epoch)
+
+def np_time_convert_offset(init, step):
+ return np_time_convert(init) + np_time_convert(step, func=lambda x: datetime.timedelta(seconds=x))
+
+def np_time_list_convert_offset(init, steps):
+ return list(map(lambda x: np_time_convert_offset(init, x), steps))
+
+def create_output_dir(path, clear=False):
+ if not os.path.exists(path):
+ os.makedirs(path)
+ elif clear:
+ raise Exception('clear not implemented')
+
diff --git a/metchart/modifier/__init__.py b/metchart/modifier/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/metchart/modifier/__init__.py
diff --git a/metchart/modifier/merge.py b/metchart/modifier/merge.py
new file mode 100644
index 0000000..1fb7fda
--- /dev/null
+++ b/metchart/modifier/merge.py
@@ -0,0 +1,4 @@
+import xarray as xr
+
+def run(data):
+ return xr.merge(data)
diff --git a/metchart/plotter/__init__.py b/metchart/plotter/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/metchart/plotter/__init__.py
diff --git a/metchart/plotter/debug_data.py b/metchart/plotter/debug_data.py
new file mode 100644
index 0000000..560d13c
--- /dev/null
+++ b/metchart/plotter/debug_data.py
@@ -0,0 +1,4 @@
+def run(data, **kwargs):
+ print(data)
+
+ return []
diff --git a/metchart/plotter/horizontal.py b/metchart/plotter/horizontal.py
new file mode 100755
index 0000000..7b35def
--- /dev/null
+++ b/metchart/plotter/horizontal.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python3
+import os
+import json
+
+import xarray as xr
+
+import numpy as np
+import matplotlib.pyplot as plt
+from metpy.plots import MapPanel, PanelContainer, RasterPlot, ContourPlot
+
+from . import misc
+
+config = {
+ 'source': 'dwd_icon-eu/combined.grib2',
+ 'plots': [
+ {
+ 'name':'r_t-750',
+ 'area': None,
+ 'layers': [
+ {
+ 'layertype': 'raster',
+ 'field': 'r',
+ 'level': 750,
+ },
+ {
+ 'layertype': 'contour',
+ 'field': 't',
+ 'level': 750,
+ 'contours': 5,
+ 'clabels': True
+ },
+ ]
+ },
+ ]
+}
+
+def run(data, plots, output='.'):
+ misc.create_output_dir(output)
+ index = []
+
+ for plot in plots:
+ index.append(_plot(data, output, **plot))
+
+ return index
+
+def _plot(data, output, name, layers, area = None):
+ index = []
+
+ for step in data.coords['step']:
+ this_step = data.sel(step=step)
+
+ map_layers = []
+
+ for layer in layers:
+ map_layers.append(_layer(this_step, **layer))
+
+ valid = misc.np_time_convert(step.valid_time.values)
+ init = misc.np_time_convert(step.time.values)
+
+ valid_str = valid.strftime('%d %b %Y - %HUTC')
+ init_str = init.strftime('%d %b %Y - %HUTC')
+ hours_since_init_str = str(int(this_step.step.values / np.timedelta64(1,'h'))).zfill(2)
+ init_for_filename = init.strftime('%Y-%m-%d-%HUTC')
+
+ panel = MapPanel()
+ if area is not None:
+ panel.area = area
+ panel.projection = 'mer'
+ panel.layers = ['coastline', 'borders']
+ panel.plots = map_layers
+ panel.left_title = f'{name} VALID: {valid_str} (INIT +{hours_since_init_str}) INIT: {init_str}'
+ if '_description' in data.attrs:
+ panel.right_title = data.attrs['_description']
+
+ pc = PanelContainer()
+ pc.size = (12.8, 9.6)
+ #pc.figure.layout='constrained'
+ pc.panels = [panel]
+ pc.draw()
+ #pc.show()
+ outname = f'{name}_{init_for_filename}+{hours_since_init_str}.png'
+ pc.save(os.path.join(output, outname))
+ plt.close('all')
+
+ index.append(
+ {
+ 'file': outname,
+ 'init': init_str,
+ 'valid': valid_str,
+ 'valid_offset': hours_since_init_str,
+ 'display_name': hours_since_init_str,
+ 'id': name
+ }
+ )
+
+ with open(os.path.join(output, f'{name}.index.json'), 'w') as f:
+ f.write(json.dumps(index, indent=4))
+
+ return { 'name': name, 'indexfile': f'{name}.index.json', 'list_title': 'INIT+' }
+
+def _layer(data, layertype, **kwargs):
+ layertypes={
+ 'raster': {
+ 'obj': RasterPlot,
+ 'defaults': {
+ 'colorbar': 'vertical',
+ }
+ },
+ 'contour': {
+ 'obj': ContourPlot,
+ 'defaults': {}
+ }
+ }
+
+ args = layertypes[layertype]['defaults'] | kwargs
+
+ ret = layertypes[layertype]['obj'](**args)
+ ret.data = data
+
+ return ret
+
+if __name__ == '__main__':
+ run(**config)
diff --git a/metchart/plotter/meteogram.py b/metchart/plotter/meteogram.py
new file mode 100755
index 0000000..8335247
--- /dev/null
+++ b/metchart/plotter/meteogram.py
@@ -0,0 +1,164 @@
+#!/usr/bin/env python3
+import os
+import json
+
+import numpy as np
+
+import matplotlib.pyplot as plt
+
+import metpy.calc as mpcalc
+
+from .. import misc
+
+HEIGHT = 13
+
+def run(data, plots, output='.', name='meteogram'):
+ misc.create_output_dir(output)
+ index = []
+
+ for plot in plots:
+ index.append(_plot(data, output, **plot))
+
+ with open(os.path.join(output, f'{name}.index.json'), 'w') as f:
+ f.write(json.dumps(index, indent=4))
+
+ return [{ 'name': name, 'indexfile': f'{name}.index.json', 'list_title': 'Location' }]
+
+def _get_next_subplot(size, counter=0):
+ ret = (counter + 1, counter + size)
+ counter += size
+ return counter, ret
+
+def _add_cloudcov(ax, data):
+ ax.set_ylabel('Pressure level [hPa]')
+
+ clc = ax.contourf(data.valid_time, data.isobaricInhPa, data.ccl.transpose(), cmap='clcov', vmin=0, vmax=100, levels=9)
+
+ # use Format parameter for n/8
+ plt.colorbar(clc, label='cloudcov', extendfrac=None, ticks=[100*n/8 for n in range(9)], format=lambda x,_: f'{int(x/12.5)}/8', pad=0.0, fraction=0.015)
+
+ cf = ax.contour(data.valid_time, data.isobaricInhPa, data.t.metpy.convert_units('degC').transpose())
+ ax.clabel(cf, inline=True, fontsize=10)
+
+ ax.barbs(
+ data.valid_time,
+ data.isobaricInhPa,
+ data.u.metpy.convert_units('kt').transpose(),
+ data.v.metpy.convert_units('kt').transpose()
+ )
+
+ ax.invert_yaxis()
+
+def _add_temp_dewpoint(ax, data):
+ ### Temp + Dewpoint
+ ax.plot(data.valid_time, data.t2m.metpy.convert_units('degC').transpose(), color='red', label='Temperature (2m)')
+ ax.plot(data.valid_time, mpcalc.dewpoint_from_relative_humidity(data.t2m, data.r2).transpose(), color='blue', label='Dewpoint (2m)')
+ ax.plot(data.valid_time, data.sel(isobaricInhPa=850.0).t.metpy.convert_units('degC').transpose(), color='grey', label='Temperature (850hPa)')
+ ax.set_ylabel('Temperature [degC]')
+ ax.legend(loc='lower right')
+
+def _add_mslp(ax, data):
+ ax.plot(data.valid_time, data.prmsl.metpy.convert_units('hPa').transpose(), color='black', label='Temperature (2m)')
+ ax.set_ylabel('Mean Sea Level Pressure [hPa]')
+
+def _add_convective_clouds(ax, data):
+ # TODO: ADD HBAS_CON, HTOP_CON
+ # If none: -500m
+ ax.set_ylim(0, 14)
+ ax.set_ylabel('Convective Clouds Height [km]')
+ ax.bar(data.valid_time, alpha=0.5,
+ bottom=data.HBAS_CON.metpy.convert_units('km').transpose(),
+ height=(data.HTOP_CON.metpy.convert_units('km')-data.HBAS_CON.metpy.convert_units('km')).transpose(),
+ align='edge', width=np.timedelta64(3, 'h'))
+
+def _add_precip(ax, data):
+ ax.set_ylabel('Total precipitation [mm]')
+ ax.set_ylim(0, 30)
+ ax.bar(data.valid_time[:-1], data.tp.diff('step').transpose(), width=np.timedelta64(3, 'h'),
+ align='edge', alpha=0.7, color='green')
+
+ ax_p = ax.twinx()
+ ax_p.set_ylabel('Snow depth [m]')
+ ax_p.set_ylim(bottom=0)
+ ax_p.plot(data.valid_time, data.sde.transpose(), color='blue')
+
+def _add_surface_wind(ax, data):
+ ax.plot(data.valid_time, mpcalc.wind_speed(data.u10.transpose(), data.v10.transpose()), color='black', label='Wind (10m)')
+ ax.plot(data.valid_time, data.fg10.transpose(), color='red', label='Gust (10m)')
+
+ ax_b = ax.twinx()
+ ax_b.barbs(
+ data.valid_time,
+ [1 for _ in data.valid_time],
+ data.u10.metpy.convert_units('kt').transpose(),
+ data.v10.metpy.convert_units('kt').transpose()
+ )
+ ax_b.axis('off')
+
+ ax.set_ylabel('Wind Speed [m/s]')
+ ax.legend(loc='lower right')
+
+def _plot(data, output, name, lat, lon):
+ data = data.sel(latitude=lat, longitude = lon, method='nearest')
+
+ fig = plt.figure(figsize=(12, 12), layout="constrained")
+
+ sp_cnt, spec = _get_next_subplot(4)
+ ax = fig.add_subplot(HEIGHT,1,spec)
+ _add_cloudcov(ax, data)
+
+ sp_cnt, spec2 = _get_next_subplot(2,sp_cnt)
+ ax2 = fig.add_subplot(HEIGHT,1,spec2,sharex=ax)
+ _add_temp_dewpoint(ax2, data)
+
+ sp_cnt, spec3 = _get_next_subplot(2,sp_cnt)
+ ax3 = fig.add_subplot(HEIGHT,1,spec3,sharex=ax)
+ #ax3.legend(loc='lower right')
+ _add_mslp(ax3, data)
+
+ ax4 = ax3.twinx()
+ _add_convective_clouds(ax4, data)
+
+ sp_cnt, spec4 = _get_next_subplot(2,sp_cnt)
+ ax5 = fig.add_subplot(HEIGHT,1,spec4,sharex=ax)
+ _add_precip(ax5, data)
+
+ sp_cnt, spec5 = _get_next_subplot(2,sp_cnt)
+ ax6 = fig.add_subplot(HEIGHT,1,spec5,sharex=ax)
+ _add_surface_wind(ax6, data)
+
+ ### Info Lines
+ sp_cnt, spec5 = _get_next_subplot(1,sp_cnt)
+ ax_text = fig.add_subplot(HEIGHT, 1, spec5)
+
+ info_lines = []
+ init = misc.np_time_convert(data.time.values)
+ init_str = init.strftime('%d %b %Y - %HUTC')
+ init_for_filename = init.strftime('%Y-%m-%d-%HUTC')
+
+ info_lines.append(f'{name}')
+ info_lines.append(f"INIT : {init_str}")
+ info_lines.append(f"LAT {lat} LON {lon}")
+
+ if '_description' in data.attrs:
+ info_lines.append(data.attrs['_description'])
+
+ ax_text.text(0, 0, '\n'.join(info_lines), ha='left', va='center',
+ size=10, fontfamily='monospace')
+ ax_text.axis("off")
+
+ ### Output
+
+ outname = f'{name}_{init_for_filename}.png'
+ plt.savefig(os.path.join(output, outname))
+ plt.close('all')
+
+ return (
+ {
+ 'file': outname,
+ 'init': init_str,
+ 'valid': init_str,
+ 'valid_offset': '00',
+ 'display_name': name,
+ 'id': name
+ })
diff --git a/metchart/plotter/vertical_from_grib.py b/metchart/plotter/vertical_from_grib.py
new file mode 100755
index 0000000..d341389
--- /dev/null
+++ b/metchart/plotter/vertical_from_grib.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python3
+import os
+
+import datetime
+import json
+
+import matplotlib.pyplot as plt
+
+import xarray as xr
+from metpy.units import units
+import metpy.calc as mpcalc
+import numpy as np
+
+import skewt
+
+from . import misc
+
+config = {
+ 'source':'dwd_icon-eu/combined.grib2',
+ 'plots':[
+ {
+ 'lat':47.9626,
+ 'lon':11.9964,
+ 'name':'Antersberg',
+ 'analysis':'lcl'
+ },
+ ]
+}
+
+def run(data, plots, output='.'):
+ misc.create_output_dir(output)
+ index = []
+
+ for plot in plots:
+ index.append(_plot(data, output, **plot))
+
+ return index
+
+def _plot(data, output, name, lat=None, lon=None, analysis=None):
+ for_temp = data[['r', 't', 'u', 'v']]
+
+ if not (lat is None and lon is None):
+ for_temp = for_temp.sel(latitude=lat, longitude=lon, method='nearest')
+
+ index = []
+
+ for step in for_temp.coords['step']:
+ this_step = for_temp.sel(step=step)
+
+ p = this_step.coords['isobaricInhPa'].values * units.hPa
+ T = this_step.t.values * units.K
+ relHum = this_step.r.values * units.percent
+ Td = mpcalc.dewpoint_from_relative_humidity(T, relHum)
+ u = this_step.u.values * (units.m / units.s)
+ v = this_step.v.values * (units.m / units.s)
+
+ valid = misc.np_time_convert(step.valid_time.values)
+ init = misc.np_time_convert(step.time.values)
+
+ valid_str = valid.strftime('%d %b %Y - %HUTC')
+ init_str = init.strftime('%d %b %Y - %HUTC')
+ hours_since_init_str = str(int(this_step.step.values / np.timedelta64(1,'h'))).zfill(2)
+
+ skt = skewt.Skewt(p=p, T=T, Td=Td)
+ skt.addWindUV(u, v)
+ skt.addInfo(f'{name} INIT+' + hours_since_init_str)
+ skt.addInfo(f"VALID: {valid_str}")
+ skt.addInfo(f"INIT : {init_str}")
+ skt.addInfo(f"LAT {lat} LON {lon}")
+
+ if analysis is not None:
+ skt.addAnalysis(shade=True, analysis=analysis)
+
+ if '_description' in data.attrs:
+ skt.addInfo(data.attrs['_description'])
+
+ init_for_filename = init.strftime('%Y-%m-%d-%HUTC')
+
+ outname = f'skewt_{name}_{init_for_filename}+{hours_since_init_str}.png'
+ skt.plot(filename=os.path.join(output, outname))
+
+ plt.close('all')
+
+ index.append(
+ {
+ 'file': outname,
+ 'init': init_str,
+ 'valid': valid_str,
+ 'valid_offset': hours_since_init_str,
+ 'display_name': hours_since_init_str,
+ 'id': name
+ }
+ )
+
+ with open(os.path.join(output, f'skewt_{name}.index.json'), 'w') as f:
+ f.write(json.dumps(index, indent=4))
+
+ return {'name': name, 'indexfile': f'skewt_{name}.index.json', 'list_title': 'Location'}
+
+if __name__ == '__main__':
+ run(**config)
diff --git a/metchart/run.py b/metchart/run.py
new file mode 100755
index 0000000..17a0a97
--- /dev/null
+++ b/metchart/run.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python3
+
+import sys
+import yaml
+import json
+import matplotlib.pyplot as plt
+import matplotlib as mpl
+from matplotlib.colors import LinearSegmentedColormap
+
+from metpy.units import units
+
+def create_aggregators(cfg):
+ ret = {}
+ for aggregator in cfg:
+ aggconf = cfg[aggregator]
+ classname = aggconf['module']
+ del aggconf['module']
+
+ module = __import__(classname, fromlist=[None])
+
+ ret[aggregator] = module.load_data(name=aggregator, **aggconf)
+
+ return ret
+
+def create_modifiers(aggregators, cfg):
+ # naming is scuffed
+ ret = {}
+ for modifier in cfg:
+ mod = cfg[modifier]
+ modname = mod['module']
+ del mod['module']
+
+ if 'aggregator' in mod:
+ if type(mod['aggregator']) == list:
+ mod['data'] = []
+ for ag in mod['aggregator']:
+ mod['data'].append(aggregators[ag])
+
+ del mod['aggregator']
+ else:
+ mod['data'] = aggregators[mod['aggregator']]
+ del mod['aggregator']
+
+ pymod = __import__(modname, fromlist=[None])
+ ret[modifier] = pymod.run(**mod)
+
+ return ret
+
+def main():
+ mpl.use('agg')
+
+ # Define custom gpm and gpdm units. The default gpm in metpy is aliased to meter.
+ # We need the correct definition
+ units.define('_gpm = 9.80665 * J/kg')
+ units.define('_gpdm = 10 * _gpm')
+
+ # Define custom colormap
+ clcov_cmap = {
+ 'red': (
+ (0.0, 0.0, 0.0),
+ (0.1, 0.9, 0.9),
+ (1.0, 0.3, 0.3),
+ ),
+ 'green': (
+ (0.0, 0.5, 0.5),
+ (0.1, 0.9, 0.9),
+ (1.0, 0.3, 0.3),
+ ),
+ 'blue': (
+ (0.0, 0.9, 0.9),
+ (0.1, 0.9, 0.9),
+ (1.0, 0.3, 0.3),
+ ),
+ }
+
+ mpl.colormaps.register(LinearSegmentedColormap('clcov', clcov_cmap))
+
+ FILE = 'config.yaml'
+ if len(sys.argv) > 1:
+ FILE = sys.argv[1]
+
+ conf = None
+ with open(FILE, 'r') as f:
+ conf = yaml.safe_load(f)
+
+ aggregators = create_aggregators(conf['aggregator'])
+
+ if 'modifier' in conf:
+ aggregators.update(create_modifiers(aggregators, conf['modifier']))
+
+ index = []
+
+ for plotter in conf['plotter']:
+ modname = plotter['module']
+ del plotter['module']
+
+ if 'aggregator' in plotter:
+ plotter['data'] = aggregators[plotter['aggregator']]
+ del plotter['aggregator']
+
+ mod = __import__(modname, fromlist=[None])
+ index.extend(mod.run(**plotter))
+
+ plt.close('all')
+
+ with open(conf['index'], 'w') as f:
+ f.write(json.dumps(index, indent=4))
+
+if __name__ == '__main__':
+ main()
diff --git a/metchart/skewt.py b/metchart/skewt.py
new file mode 100644
index 0000000..e674d09
--- /dev/null
+++ b/metchart/skewt.py
@@ -0,0 +1,118 @@
+import matplotlib.gridspec as gridspec
+import matplotlib.pyplot as plt
+
+import numpy as np
+
+import metpy.calc as mpcalc
+from metpy.cbook import get_test_data
+from metpy.plots import add_metpy_logo, Hodograph, SkewT
+from metpy.units import units
+
+class Skewt:
+ def __init__(self, p, T, Td, max_barbs=20, title=None):
+ self._p = p
+ self._T = T
+ self._Td = Td
+
+ self._info_lines = []
+
+ self.barb_div = int(max(len(p)/max_barbs,1))
+
+
+ # Create a new figure. The dimensions here give a good aspect ratio
+ self._fig = plt.figure(figsize=(9, 9))
+ plt.rcParams["font.family"] = "monospace"
+ #self._fig = plt.figure()
+
+ if title is not None:
+ plt.suptitle(title, x=0, y=0, va='bottom', ha='left')
+
+ # Grid for plots
+ self._gs = gridspec.GridSpec(3, 3)
+ self._skew = SkewT(self._fig, rotation=45, subplot=self._gs[:, :2])
+
+ # Plot the data using normal plotting functions, in this case using
+ # log scaling in Y, as dictated by the typical meteorological plot
+ self._skew.plot(p, T, 'r')
+ self._skew.plot(p, Td, 'b')
+
+ plt.xlabel('$T$ $[^o C]$')
+ plt.ylabel('$p$ $[hPa]$')
+
+ def addWindUV(self, u, v):
+ self._u = u
+ self._v = v
+
+ ax = self._fig.add_subplot(self._gs[0, -1])
+ h = Hodograph(ax, component_range=max(u + v).magnitude)
+ h.add_grid(increment=20)
+ h.plot_colormapped(u, v, self._p)
+ plt.tight_layout()
+ plt.xlabel('$m/s$')
+ plt.ylabel('$m/s$')
+
+ self._skew.plot_barbs(self._p[::self.barb_div], u[::self.barb_div], v[::self.barb_div])
+
+ def addInfo(self, line):
+ self._info_lines.append(line)
+
+ def addAnalysis(self, analysis='ccl', shade=False):
+ f = {'ccl': self._cclAnalysis, 'lcl': self._lclAnalysis}
+
+ lvl, parcel = f[analysis]()
+
+ self._skew.plot(self._p, parcel, 'y')
+ self._skew.plot(lvl[0], lvl[1], 'o', markerfacecolor='red', linewidth=1)
+
+ # TODO why exception on cape_cin()?
+ # ValueError: zero-size array to reduction operation minimum which has no identity
+ # https://github.com/Unidata/MetPy/pull/3132
+ try:
+ cape, cin = mpcalc.cape_cin(self._p, self._T, self._Td, parcel, which_el='top')
+ self.addInfo(f'CAPE {int(cape.magnitude)} $J/kg$ CIN {int(cin.magnitude)} $J/kg$')
+ except ValueError:
+ print('CAPE/CIN Failed with ValueError')
+ self.addInfo('CAPE #### CIN ####')
+
+ if shade:
+ self._skew.shade_cape(self._p,self._T,parcel)
+ self._skew.shade_cin(self._p,self._T,parcel)
+
+ def _cclAnalysis(self):
+ #p = np.arange(max(self._p).magnitude, min(self._p).magnitude, -50) * units.hPa
+
+ ccl = mpcalc.ccl(self._p,self._T,self._Td)
+ ccl_ground=mpcalc.dry_lapse(self._p[:1], ccl[1], ccl[0])
+ ccl_ground_parcel= mpcalc.parcel_profile(self._p, ccl_ground[0], self._Td[0])
+
+ return (ccl, ccl_ground_parcel)
+
+ def _lclAnalysis(self):
+ ground_parcel= mpcalc.parcel_profile(self._p, self._T[0], self._Td[0])
+ lcl = mpcalc.lcl(self._p[0],self._T[0],self._Td[0])
+
+ return (lcl, ground_parcel)
+
+ def _buildInfoBox(self):
+ ax = self._fig.add_subplot(self._gs[1,-1])
+ ax.text(0, 0, '\n'.join(self._info_lines), ha='left', va='center',
+ size=10, fontfamily='monospace')
+ ax.axis("off")
+
+ def plot(self, filename=None):
+ self._buildInfoBox()
+
+ # Add the relevant special lines
+ #self._skew.ax.set_ylim(max(self._p), min(self._p))
+ self._skew.ax.set_ylim(1000, 100)
+ self._skew.plot_dry_adiabats(linewidth=1)
+ self._skew.plot_moist_adiabats(linewidth=1)
+ self._skew.plot_mixing_lines(linewidth=1)
+
+ # Good bounds for aspect ratio
+ self._skew.ax.set_xlim(-30, 40)
+
+ if filename is not None:
+ plt.savefig(filename)
+ else:
+ plt.show()