# -*- coding: utf-8 -*-
# Created 2015-05-19
"""Module 'nobotouls.inout' defines routines for managing and plotting
raw nobo station measurement files with help of a sqlite
database and matplotlib routines.
"""
# to ignore numpy error reports in pylint:
# pylint: disable=e1101
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from matplotlib.dates import DayLocator, HourLocator, DateFormatter
from matplotlib.ticker import AutoMinorLocator
from mpl_toolkits.basemap import Basemap
from numpy import ma
from scipy.interpolate import interp1d
import csv
import datetime
import glob
import matplotlib
import os
import pickle
import sqlite3
import sys
import matplotlib.dates as mpldat
import matplotlib.pyplot as plt
import numpy as np
import os.path as op
try:
# read version number form package file
from pkg_resources import require, Requirement, resource_filename
__version__ = require("nobotouls")[0].version
except: # if an error occures set manually
__version__ = '0.0.5.dev20161112'
__copyright__ = '(c) D.Thaler - foehn@posteo.org'
__license__ = 'MIT License - http://opensource.org/licenses/MIT'
try:
from osgeo import gdal
except ImportError:
print("Warning: 'gdal.osgeo' cannot be imported",
file=sys.stderr)
print("so you cannot use program 'nobowindmap2' ",
file=sys.stderr)
print("Continuing ... ", file=sys.stderr)
# Define config-directories
USER_CONFIG_DIRECTORY = \
op.join(op.expanduser('~'),
'.nobotouls') # : user config directory
try:
SOURCE_CONFIG_DIRECTORY = \
resource_filename(Requirement.parse("nobotouls"),
"config") # : source config directory
except:
SOURCE_CONFIG_DIRECTORY = op.join('../config')
# f o r r a i n d a t a
RAINPERCOUNT = 0.2 # : rain amount in mm per rain sensor tick
RAINTIMEDELTA = 10.0 # : time interval for noborain data in minutes
# fieldnames names, dicts, etc ....
RAINRAWFN2INTFN = {'Anz.': 'num',
'Datum': 'dat', 'Zeit': 'tim',
'Event': 'event',
'Ext. Line Event': 'event',
'Koppler abgetrennt': 'coupldisconn',
'Koppler verbunden': 'couplconn',
'Host verbunden': 'hostconn',
'Dateiende': 'eofile'}\
# : ext. fieldname beginnings versus internal field names
RAINRAWFIELDNAMES = RAINRAWFN2INTFN.keys()
RAININTERNALFIELDNAMES0 = RAINRAWFN2INTFN.values()
# new list without dat and tim but with datim # ----------------------->>>>
RAININTERNALFIELDNAMES = RAININTERNALFIELDNAMES0 + ['stat', 'dattim',
'timez', 'dattiml']
PROTOK = 'Protokolliert'
# noborain sqlite3-stuff
noboRAINTABLE = 'noborain'
sqlct = "CREATE TABLE 'noborain' "
sqlct = sqlct + "('stat' NOT NULL ,'dattim' NOT NULL , 'dattiml' ,"
sqlct = sqlct + "'timez' ,'rr' ,"
sqlct = sqlct + "'obs' BOOL DEFAULT (0), PRIMARY KEY ('stat', 'dattim'))"
SQLITE_CREATENOBORAINTABLE = sqlct
# : path 2 database if not defined otherwise
NOBO_DATABASE_FILE = "./resources/aichfeld_db.sqlite"
# : path 2 raw nobo files (if not defined otherwise)
nobo_RAW_DATA_DIR = "./"
A4_H = 29.7 / 2.54
A4_W = 21.0 / 2.54
TFONTSIZE = 20
LFONTSIZE = 14
PLOTWITH = \
u"Plotted with nobotouls Vers." + __version__ + \
u" - http://nobotouls.foehnwall.at, \u00a9 D.Thaler 2015-2016"
PLWX = 0.27
PLWY = 0.06
PLWCOLOR = (0, 0.6, 0.5)
PLWFONTSIZE = 9
# Threasholds for wind plots
LOW_WIND = 2.5 # less than: low wind
NO_WIND = 0.1 # less than: no wind
MS2KT = 1.94384449 # : 1 m/s to knots
# fieldnames names, dicts, etc ....
RAWFN2INTFN = {'Anz.': 'num', 'Datum': 'dat', 'Zeit': 'tim',
'Temp.': 'tl', 'RH': 'rh', 'Druck': 'ps',
'Windgeschwindigkeit': 'ff', 'Böengeschwindigkeit': 'fx',
'Windrichtung': 'dd'}\
# : ext. fieldname beginnings versus internal field names
RAWFIELDNAMES = RAWFN2INTFN.keys()
INTERNALFIELDNAMES0 = RAWFN2INTFN.values()
# new list without dat and tim but with datim
INTERNALFIELDNAMES = INTERNALFIELDNAMES0 + ['stat', 'dattim',
'timez', 'dattiml']
PARAMLIM_DICT = {'ps': 'ps_lim', 'tl': 'tl_lim', 'rh': 'rh_lim',
'dd': 'dd_lim', 'ff': 'ff_lim', 'fx': 'ff_lim'} \
# : parameter to parameter limits - dictionary
def version(prtyp='Program', print_it=True):
prn = op.basename(sys.argv[0])
vs1 = prtyp + " " + prn
vs2 = 'Version ' + __version__
vs3 = __copyright__
vs4 = __license__
vs = vs1 + '\n' + vs2 + 2 * '\n' + vs3 + 2 * '\n' + vs4
if print_it:
print(vs1)
print(vs2)
print(vs3)
print(vs4)
print()
return vs
[docs]def show_dict(dictio, header=""):
"""Displays the vontent of the dictionary 'dict' with an optional
header line 'header'."""
print(79 * '-')
print(header)
for k, d in enumerate(dictio):
print(k, ': ', d, ': ', dictio[d])
return None
[docs]def deg2dms(deg):
"""Converts decimal degress to deg, minutes, seconds"""
x, d = np.modf(deg)
x, m = np.modf(x * 60)
s = x * 60
return d, m, s
[docs]def dms2deg(d, m, s):
"""Converts deg, minutes, seconfs to decimal degrees"""
return (d + m / 60. + s / 3600.)
[docs]def signum(x):
"""Missing signum-function in python math lib"""
import math
return int(math.copysign(1, x))
[docs]def time2floor(mptime, mptimedelta):
"""Take the next lower time of mptime that is an (alomst) integer multiple
of mptimedelta"""
return np.floor(mptime / mptimedelta) * mptimedelta
[docs]def time2ceil(mptime, mptimedelta):
"""Take the next lower time of mptime that is an (alomst) integer multiple
of mptimedelta"""
return np.ceil(mptime / mptimedelta) * mptimedelta
[docs]def fine_mpl_dattims(coarse_mpltimes, mpldattimdelta):
"""Make of a coarse irregular grided matplotlib time array an regular
array so that the lower and upper limits of the coarse array fits
completely into the new equally spaced fine array."""
mp1 = time2floor(coarse_mpltimes[0], mpldattimdelta)
mp2 = time2ceil(coarse_mpltimes[-1], mpldattimdelta)
# print('MindatX: ', mpldat.num2date(mp1))
# print('Mindat: ', mpldat.num2date(coarse_mpltimes[0]))
# print('MaxDat: ', mpldat.num2date(coarse_mpltimes[-1]))
# print('MaxdatX: ', mpldat.num2date(mp2))
fine = np.arange(mp1,
mp2 + mpldattimdelta / 100., # make sure mp2 is in fine!
mpldattimdelta)
return fine
[docs]def interpolate_mpldattim_floats(coarse_mpltimes, mpldattimdelta,
coarse_floats):
"""Interpolate a irregular and coarse time series
to a fine grided array.
* coarse_mpltimes .. coarse times array in matplotlib.date-objects
* mpldattimdelta ... time delta (to construct fine date-time array)
* coarse_flotas ... coarse floats
(the first time element of coarse_mpltimes is rounded down to integer
times mpldattimdelta, the last time element is rounded up to inter times
mpldattimdelta)
Returns:
* fine matplotlib dattims (floats)
* fine float array
"""
f = interp1d(coarse_mpltimes, coarse_floats,
kind='linear', bounds_error=False)
finempldattims = fine_mpl_dattims(coarse_mpltimes, mpldattimdelta)
finefloatarray = f(finempldattims)
return finempldattims, finefloatarray
[docs]def yymoddhhmi2pydatetime(idattim, fmt='%Y%m%d%H%M'):
""" Converts a single integer-date-times, e.g. 201505241200, to
a python datetime object.
Probably unnecessarily complicated, but just to avoid side
effects for earlier calls ... sigh ..."""
# keep the old complicated stuff ...
result, mi = divmod(idattim, 100)
result, hh = divmod(result, 100)
result, dd = divmod(result, 100)
yy, mo = divmod(result, 100)
try:
pydattim = datetime.datetime(yy, mo, dd, hh, mi)
except (TypeError, ValueError) as e:
try:
# Should be sufficient for everything
s = str(idattim)
pydattim = datetime.datetime.strptime(s, fmt)
except (TypeError, ValueError) as e:
pydattim = None
print("Warning in 'yymoddhhmi2pydatetime': invalid date-time: ",
idattim, file=sys.stderr)
print(e, file=sys.stderr)
return pydattim
[docs]def pydatetime2yymoddhhmi(dattim, fmt='%Y%m%d%H%M'):
"""Converts a single python datetime object to an integer
date-time in the for yyyymoddhhmi, e.g. 201505241200
or to an integer identified by fmt (ATTENTION!)"""
dtstr = dattim.strftime(fmt)
return int(dtstr)
# return dattim.year * 100000000 + dattim.month * 1000000 +\
# dattim.day * 10000 + dattim.hour * 100 + dattim.minute
[docs]def addtime2yymoddhhmi(idate, days=0, hours=0, minutes=0, seconds=0):
"""Add time to a integer datetime"""
pydattim = yymoddhhmi2pydatetime(idate)
pytimedelta = datetime.timedelta(days=days, hours=hours,
minutes=minutes,
seconds=0)
pydattimnew = pydattim + pytimedelta
return pydatetime2yymoddhhmi(pydattimnew)
[docs]def scan_nobo_timezone(timehead, seconds=False):
"""Get timezone as an timedelta-object from nobo-file header.
Return as timdedelta object and as integer hhmm, if
seconds = True, then hhmmss. Defaults to False"""
idx = timehead.index('GMT')
ts = timehead[idx + 3:]
dh = int(ts[:3])
dm = signum(dh) * int(ts[4:])
pytimedelta = datetime.timedelta(hours=dh, minutes=dm)
if seconds:
itimedelta = dh * 10000 + dm * 100
else:
itimedelta = dh * 100 + dm
return pytimedelta, itimedelta
[docs]def create_station_table(nobodb):
"""Create a empty nobo station table in nobo-sqlite3-db """
sql1 = 'CREATE TABLE "nobostations"'
sql2 = ' ("stat" PRIMARY KEY NOT NULL UNIQUE , '
sql3 = '"longname" , "lon" DEFAULT 0.0, "lat" DEFAULT 0.0, '
sql4 = '"elev" DEFAULT -99., "active" BOOL DEFAULT true)'
SQLITE_CREATENOBOSTATIONTABLE = sql1 + sql2 + sql3 + sql4
dbfile = nobodb
conn = sqlite3.connect(dbfile)
c = conn.cursor()
try:
c.execute(SQLITE_CREATENOBOSTATIONTABLE)
print('Creation command: ')
print(SQLITE_CREATENOBOSTATIONTABLE)
except sqlite3.OperationalError as e:
return e.message
conn.commit()
conn.close()
return "table 'nobostations' created"
[docs]def create_rain_table(nobodb):
"""Create a noborain table in nobo-sqlite3-db """
dbfile = nobodb
conn = sqlite3.connect(dbfile)
c = conn.cursor()
try:
c.execute(SQLITE_CREATENOBORAINTABLE)
print('Creation command: ')
print(SQLITE_CREATENOBORAINTABLE)
except sqlite3.OperationalError as e:
return e.message
conn.commit()
conn.close()
return "table 'noborain' created"
[docs]def insert_aichfeld_rain_db(dbfile, raindata, replace=False):
"""Insert new records in db rain table and ignores new ones
in case of conflict.
* dbfile ... path to nobo database file
* raindata ... raindata of one station in a dictionary defined by
noborain2timedeltarain.
* returns True, otherwise it crashed.
"""
insertignore = "INSERT OR IGNORE "
insertreplace = "INSERT OR REPLACE "
insertss = "INTO noborain VALUES (?, ?, ?, ?, ?, ?)"
if replace:
insertcommand = insertreplace + insertss
else:
insertcommand = insertignore + insertss
conn = sqlite3.connect(dbfile)
c = conn.cursor()
stat = raindata['stat']
dattim = raindata['dattim']
dattiml = raindata['dattiml']
timez = raindata['timez']
rr = raindata['rr']
obs = 1 # observed (0 not observed)
# print('insert_ignore_..rain..: ', end='')
for k, _ in enumerate(dattim):
insertl = (stat[k],
int(dattim[k]),
int(dattiml[k]),
int(timez[k]),
round(rr[k], 1),
obs)
# print(insertcommand, insertl)
c.execute(insertcommand, insertl)
print(79 * '-')
print(k, "fields inserted into db ", dbfile,
" into table 'noborain'")
conn.commit()
conn.close()
return True
[docs]def read_nobo_file(input_csv):
"""Reads single nobo-station file from Aichfeld project and
returns a numpy array for futher processing.
* input_csv ... raw nobo input file (more or less US-csv format
Reurns (with redundancies):
* station ... short nobo station name
* headers ... header line with original long nobo field names
* data ... a dictionary with short internal field names
indicating arrays with field variables, eg. data['tl']:
* 'stat' ... nobo station name
* 'dattim' ... date and times as integer datetime in UTC(!)
* 'timez' ... timezone read from nobo file (LMT = UTC + timez)
* 'dattiml'... date amd time for local time zone in integer
* 'tl' ... air temperatur C
* 'rh' ... relative humidity %
* 'ps' ... station pressure hPa
* 'ff' ... 10 min mean wind speed m/s
* 'fx' ... 10 min max wind speed m/s (gusts)
* 'dd' ... wind direction degrees 0..360
"""
SSTR = 'Plot-Titel: ' # search string for name extraxtion
lstr = len(SSTR) # length of that string
print("\nReading file: ", input_csv)
with open(input_csv, 'rb') as f:
reader = csv.reader(f, delimiter=';')
# title line
title = reader.next()[0]
print(title)
idx = title.find(SSTR)
station = title[(idx + lstr):-1]
# header line with collumn captions
headers = np.array(reader.next())
# scan header and create dictionary headers <-> index
fn_of_idx, idx_of_fn = scan_nobo_header(headers)
timezone, itz = scan_nobo_timezone(headers[idx_of_fn['tim']])
data = {}
for f in INTERNALFIELDNAMES:
data[f] = [] # initialize data field
for row in reader:
for k in fn_of_idx.keys():
fn = fn_of_idx[k]
data[fn].append(row[k])
dd1 = row[idx_of_fn['dat']] + ' ' + row[idx_of_fn['tim']]
dd1i = pydatetime2yymoddhhmi(
datetime.datetime.strptime(dd1, '%Y.%m.%d %H:%M:%S'))
data['dattiml'].append(dd1i)
dd2i = pydatetime2yymoddhhmi(datetime.datetime.strptime(
dd1, '%Y.%m.%d %H:%M:%S') - timezone)
data['dattim'].append(dd2i)
data['timez'].append(itz)
data['stat'].append(station)
# fill all empty dictionary fields (eg. 'tl' if no temp. is measured)
# with 'None' - values
datalen = len(data['stat'])
for f in INTERNALFIELDNAMES:
if len(data[f]) == 0:
for idx in range(datalen):
data[f].append(None)
return station, headers, data
[docs]def read_noborain_file(input_csv):
"""Reads single nobo-rain-station file from Aichfeld project and
returns a numpy array for futher processing.
* input_csv ... raw nobo input file (more or less US-csv format
Reurns (with redundancies):
* station ... short nobo station name
* headers ... header line with original long nobo field names
* data ... a dictionary with short internal field names
indicating arrays with field variables, eg. data['tl']:
* 'stat' ... nobo station name
* 'dattim' ... date and times as integer datetime in UTC(!)
* 'timez' ... timezone read from nobo file (LMT = UTC + timez)
* 'dattiml'... date amd time for local time zone in integer
* 'event' ... rain event of 0.1 mm
* 'coupldisconn' ... coupler disconnected
* 'couplconn' ... coupler connected
* 'hostconn' ... host connected
* 'eofile' ... end of file
"""
SSTR = 'Plot-Titel: ' # search string for name extraxtion
lstr = len(SSTR) # length of that string
with open(input_csv, 'rb') as f:
reader = csv.reader(f, delimiter=';')
# title line
title = reader.next()[0]
# print("\n", title)
idx = title.find(SSTR)
station = title[(idx + lstr):-1]
# header line with collumn captions
headers = np.array(reader.next())
# scan header and create dictionary headers <-> index
fn_of_idx, idx_of_fn = scan_noborain_header(headers)
timezone, itz = scan_nobo_timezone(headers[idx_of_fn['tim']],
seconds=True)
data = {}
for f in RAININTERNALFIELDNAMES:
data[f] = [] # initialize data field
for row in reader:
for k in fn_of_idx.keys():
fn = fn_of_idx[k]
data[fn].append(row[k])
dd1 = row[idx_of_fn['dat']] + ' ' + row[idx_of_fn['tim']]
dd1i = pydatetime2yymoddhhmi(
datetime.datetime.strptime(dd1, '%Y.%m.%d %H:%M:%S'),
fmt='%Y%m%d%H%M%S')
data['dattiml'].append(dd1i)
dd2i = pydatetime2yymoddhhmi(datetime.datetime.strptime(
dd1, '%Y.%m.%d %H:%M:%S') - timezone,
fmt='%Y%m%d%H%M%S')
data['dattim'].append(dd2i)
data['timez'].append(itz)
data['stat'].append(station)
datalen = len(data['stat'])
for f in RAININTERNALFIELDNAMES:
if len(data[f]) == 0:
for idx in range(datalen):
data[f].append(None)
return station, headers, data
def noborain2timedeltarain(rainsums, mpldattimdelta):
show_dict(rainsums, 'noborain2timedeltarain->rainsums')
c_dattims = rainsums['mpldattim']
c_sums = rainsums['sum']
f_dattims = fine_mpl_dattims(c_dattims, mpldattimdelta)
f_sums = np.zeros(len(f_dattims))
f_timez = np.empty_like(f_dattims)
f_idattims = np.empty_like(f_dattims)
# f_idattimsl = np.empty_like(f_dattims)
f_stat = []
stat = rainsums['stat'][0]
timezone = 200 # 02:00 (just in case, should be set further)
sumold = 0.
# fill fine arrays with data that are allready available
for k, d in enumerate(f_dattims):
pydt = mpldat.num2date(d)
f_idattims[k] = int(pydt.strftime("%Y%m%d%H%M"))
f_stat.append(stat)
for fk, fd in enumerate(f_dattims):
for ck, cd in enumerate(c_dattims):
timezone = int(rainsums['timez'][ck] / 100)
if cd > fd and cd <= (fd + mpldattimdelta):
f_sums[fk] = c_sums[ck]
sumold = f_sums[fk]
else:
f_sums[fk] = sumold
f_timez[fk] = timezone
f_interv = np.diff(f_sums)
f_interv = np.insert(f_interv, 0, f_interv[-1])
f_idattimsl = f_idattims + f_timez
hr2td = {'stat': f_stat,
'dattim': f_idattims, 'mpldattim': f_dattims,
'dattiml': f_idattimsl, 'timez': f_timez,
'sum': f_sums, 'rr': f_interv}
return hr2td
[docs]def noborainsums(rawraindict):
"""Construct and fill a dictionary with dattims and rainsums of a raw
rain nobo dictionary.
rawraindict ... dictionary with raw nobo rain data as read from csv
with read_nobo_rain_file
returns:
rainsumdata ... dict. with arrays 'dattims' and 'sum'
"""
data = rawraindict
dattim = data['dattim']
pydattim = []
for _, d in enumerate(dattim):
pd = yymoddhhmi2pydatetime(d, "%Y%m%d%H%M%S")
pydattim.append(pd)
# print('pydattim: ', pydattim)
rsdata = {'stat': [], 'dattim': [], 'dattiml': [], 'timez': [],
'pydattim': [], 'mpldattim': [], 'sum': []}
protokolliert = False # define it before 1st assigned
for l in range(len(dattim)):
if data['coupldisconn'][l].strip() == PROTOK:
protokolliert = True
rsdata['stat'].append(data['stat'][l])
rsdata['dattim'].append(data['dattim'][l])
rsdata['dattiml'].append(data['dattiml'][l])
rsdata['timez'].append(data['timez'][l])
rsdata['pydattim'].append(pydattim[l])
rsdata['mpldattim'].append(mpldat.date2num(pydattim[l]))
rsum = 0.0
rsdata['sum'].append(rsum)
continue # start next loop
if data['couplconn'][l] == PROTOK:
protokolliert = False
# print('Protokoliert: ', protokolliert)
if protokolliert:
rsdata['stat'].append(data['stat'][l])
rsdata['dattim'].append(data['dattim'][l])
rsdata['dattiml'].append(data['dattiml'][l])
rsdata['timez'].append(data['timez'][l])
rsdata['pydattim'].append(pydattim[l])
rsdata['mpldattim'].append(mpldat.date2num(pydattim[l]))
rsum = rsum + RAINPERCOUNT
rsdata['sum'].append(rsum)
rainsums = rsdata
return rainsums
[docs]def plot_raw_nobo(station, data,
istartdattim=190001010000,
ienddattim=210012311159,
outfiledir='./',
outfiletype='png',
forced_outfile_name=None,
dpi=75,
tl_lim=(-5., +35.),
ps_lim=(925., 975.),
rh_lim=(0., 100.),
ff_lim=(0., 25.),
dd_lim=(0., 360.)
):
"""Plots a meteogramm of a single nobo station.
Inut-params:
* station ... station name (string)
* istartdattim ... start date-time as integer yyyymoddhhmi
* ienddattim ... end date-time as integer yyyymoddhhmi
* outfiledir ... directory where outputfile is to be written
* outputfiletype ... 'png', 'pdf', 'jpg' or whatever is allowed by
matplotlib (defaults to png)
* foreced_outfile_name ... optional complete path of output image file
* dpi ... dots per inch (defaults to 75)
* Tuples for limiting y-axes:
* tl_lim ... (tmin, tmax) for temp. C
* ps_lim ... (pmin, pmax) for press. hPa
* rh_lim ... (rhmin, rhmax) for rel. humidity %
* ff_lim ... (ffmin, ffmax) for wind speed (m/s)
* dd_lim ... (ddmin, ddmax) for wind direction (degree)
Output:
* Image-Outputfile either with name either of input parameter
'forced_outfile_name' or calculated in program
'outfiledir + station + startdate + enddate + outfiletype'
"""
# Min-max date regardless of available data!
dmin = yymoddhhmi2pydatetime(istartdattim)
dmax = yymoddhhmi2pydatetime(ienddattim)
# convert in matplot dates format
k = 0
pydattim = []
for k in range(len(data['dattim'])):
idt = data['dattim'][k]
pydattim.append(yymoddhhmi2pydatetime(idt))
pydattim = np.array(pydattim)
mpldate = matplotlib.dates.date2num(pydattim)
mpldat_lim = matplotlib.dates.date2num((dmin, dmax))
# construct headline
timefmt = '%Y-%m-%d %H:%M'
headl = station + "\n" + dmin.strftime(timefmt) + \
" - " + dmax.strftime(timefmt)
# define image outputfile name
if forced_outfile_name is None:
# calculated outfile =
# outfiledir + station + startdate + enddate + outfiletype
timefmt2 = '%Y%m%d%H%M'
fname = station + "_" + dmin.strftime(timefmt2) + \
"_" + dmax.strftime(timefmt2) + "." + outfiletype
outfile = op.join(outfiledir, fname)
else:
outfile = forced_outfile_name
# time locators and time formats
hours = HourLocator(np.linspace(0, 24, 5))
hoursformat = DateFormatter("%H")
hoursm = HourLocator(np.arange(0, 24, 1), interval=3)
days = DayLocator()
# daysformat = DateFormatter('%Y-%m-%d')
daysformat = DateFormatter('%d.%b')
# Plotting
plt.ioff() # turn off interactive plotting (make sure)
# determine width of figure (5 days = A4_Width)
dayrange = (mpldat_lim[1] - mpldat_lim[0])
width_per_day = A4_W / 5.
fig, (thax, dfax, ppax) = \
plt.subplots(3, 1, figsize=(width_per_day * dayrange, A4_H))
fig.suptitle(headl, fontsize=TFONTSIZE)
# temperature and rel. humidity
thax.plot_date(mpldate, data['tl'], fmt='-', c='r', lw=2)
thax.xaxis.set_major_locator(hours)
thax.xaxis.set_major_formatter(hoursformat)
thax.xaxis.set_minor_locator(hoursm)
thax.set_ylabel('Temp. [C]', color='r', fontsize=LFONTSIZE)
thax.set_xlim(mpldat_lim)
thax2 = thax.twinx()
thax2.plot_date(mpldate, data['rh'], fmt='-', c='g')
thax2.set_ylabel(u'rel. humid. [%]', color='g', fontsize=LFONTSIZE)
for tl in thax2.get_yticklabels():
tl.set_color('g')
thax2.set_ylim(rh_lim)
thax2.set_yticks(np.linspace(0, 100, 11))
thax2.set_xlim(mpldat_lim)
# upper x-scale with day-ticks and -labels
ax2 = thax.twiny()
ax2.xaxis.set_major_locator(days)
ax2.xaxis.set_major_formatter(daysformat)
# dummy plotnecessary !?!
ax2.plot(mpldate, data['tl'], c='r')
ax2.set_xlim(mpldat_lim)
thax.set_ylim(tl_lim) # Works only here. A miracle!
thax.grid()
# wind
dfax.plot_date(mpldate, data['ff'], fmt='-', c='b', lw=2)
dfax.plot_date(mpldate, data['fx'], fmt='-', c='r', lw=1)
dfax.minorticks_on()
dfax.xaxis.set_major_locator(hours)
dfax.xaxis.set_major_formatter(hoursformat)
dfax.xaxis.set_minor_locator(hoursm)
dfax.set_ylabel('Wind speed [m/s]', color='b', fontsize=LFONTSIZE)
dfax.set_ylim(ff_lim)
dfax.set_xlim(mpldat_lim)
dfax2 = dfax.twinx()
dfax2.plot_date(mpldate, data['dd'], fmt='.', c='g')
dfax2.set_ylabel('Direction [deg]', color='g', fontsize=LFONTSIZE)
for tl in dfax2.get_yticklabels():
tl.set_color('g')
dfax2.set_ylim(dd_lim)
dfax2.set_xlim(mpldat_lim)
dfax2.set_yticks(np.linspace(0, 360, 9))
dfax.grid()
# pressure
ppax.plot_date(mpldate, data['ps'], fmt='-', c='k', lw=2)
ppax.set_ylabel('Station pressure [hPa]', fontsize=LFONTSIZE)
ppax.minorticks_on()
ppax.xaxis.set_major_locator(hours)
ppax.xaxis.set_major_formatter(hoursformat)
ppax.xaxis.set_minor_locator(hoursm)
ppax.set_ylim(ps_lim)
ppax.set_xlim(mpldat_lim)
ppax.set_xlabel('Time [UTC]', fontsize=LFONTSIZE)
ppax.grid()
# output picture
if outfiletype == "screen":
print("No plotfile saved", outfile)
fig.show()
else:
print("Plot saved to ", outfile)
fig.savefig(outfile, dpi=dpi)
fig.clear()
plt.close(fig)
return outfile
[docs]def plot_noborain(station, data,
istartdattim=190001010000,
ienddattim=210012311159,
outfiledir='./',
outfiletype='png',
forced_outfile_name=None,
dpi=75,
rr_lim=(0., +30.),
):
"""Plots a rain-meteogramm of a single nobo station.
Inut-params:
* station ... station name (string)
* istartdattim ... start date-time as integer yyyymoddhhmi
* ienddattim ... end date-time as integer yyyymoddhhmi
* outfiledir ... directory where outputfile is to be written
* outputfiletype ... 'png', 'pdf', 'jpg' or whatever is allowed by
matplotlib (defaults to png)
* foreced_outfile_name ... optional complete path of output image file
* dpi ... dots per inch (defaults to 75)
* Tuples for limiting y-axes:
* rr_lim ... (tmin, tmax) for temp. C
Output:
* Image-Outputfile either with name either of input parameter
'forced_outfile_name' or calculated in program
'outfiledir + station + startdate + enddate + outfiletype'
"""
# Min-max date regardless of available data!
dmin = yymoddhhmi2pydatetime(istartdattim)
dmax = yymoddhhmi2pydatetime(ienddattim)
# convert in matplot dates format
k = 0
pydattim = []
for k in range(len(data['dattim'])):
idt = data['dattim'][k]
pydattim.append(yymoddhhmi2pydatetime(idt))
pydattim = np.array(pydattim)
mpldate = matplotlib.dates.date2num(pydattim)
mpldat_lim = matplotlib.dates.date2num((dmin, dmax))
# construct headline
timefmt = '%Y-%m-%d %H:%M'
headl = station + "\n" + dmin.strftime(timefmt) + \
" - " + dmax.strftime(timefmt)
# define image outputfile name
if forced_outfile_name is None:
# calculated outfile =
# outfiledir + station + _rain_ + startdate + enddate + outfiletype
timefmt2 = '%Y%m%d%H%M'
fname = station + "_rain_" + dmin.strftime(timefmt2) + \
"_" + dmax.strftime(timefmt2) + "." + outfiletype
outfile = op.join(outfiledir, fname)
else:
outfile = forced_outfile_name
# time locators and time formats
hours = HourLocator(np.linspace(0, 24, 5))
hoursformat = DateFormatter("%H")
hoursm = HourLocator(np.arange(0, 24, 1), interval=3)
days = DayLocator()
# daysformat = DateFormatter('%Y-%m-%d')
daysformat = DateFormatter('%d.%b')
# Plotting
plt.ioff() # turn off interactive plotting (make sure)
# determine width of figure (5 days = A4_Width)
dayrange = (mpldat_lim[1] - mpldat_lim[0])
width_per_day = A4_W / 5.
fig, rrax = \
plt.subplots(1, 1, figsize=(width_per_day * dayrange, A4_H))
fig.suptitle(headl, fontsize=TFONTSIZE)
# r-sum (rr)
rrax.plot_date(mpldate, data['rr'], fmt='o', c='g', lw=1)
rrax.xaxis.set_major_locator(hours)
rrax.xaxis.set_major_formatter(hoursformat)
rrax.xaxis.set_minor_locator(hoursm)
rrax.set_ylabel('rr [mm]', color='g', fontsize=LFONTSIZE)
rrax.set_xlim(mpldat_lim)
# upper x-scale with day-ticks and -labels
ax2 = rrax.twiny()
ax2.xaxis.set_major_locator(days)
ax2.xaxis.set_major_formatter(daysformat)
# dummy plotnecessary !?!
ax2.plot(mpldate, data['rr'], ls='o', c='g', lw=1)
ax2.set_xlim(mpldat_lim)
rrax.set_ylim(rr_lim) # Works only here. A miracle!
rrax.yaxis.set_minor_locator(AutoMinorLocator(10))
rrax.grid()
# output picture
if outfiletype == "screen":
print("No plotfile saved", outfile)
fig.show()
else:
print("Plot saved to ", outfile)
fig.savefig(outfile, dpi=dpi)
fig.clear()
plt.close(fig)
return outfile
[docs]def plot_single_param(stationlist, dbfile=NOBO_DATABASE_FILE,
istartdattim=190001010000,
ienddattim=210012311159,
parameter='ps',
param_lim=(920., 960.),
outfiledir='./',
outfiletype='png',
forced_outfile_name=None,
dpi=75
):
"""Plots a graph of a single nobo station
with a single parameter.
Inut-params:
* stationlist ... list with station names (string), max 16!
* istartdattim ... start date-time as integer yyyymoddhhmi
* ienddattim ... end date-time as integer yyyymoddhhmi
* parameter ... 'tl', 'rh', 'ps', 'dd', 'ff', 'fx'to plot
* param_lim ... (param_min, param_max) for parameter
* outfiledir ... directory where outputfile is to be written
* outputfiletype ... 'png', 'pdf', 'jpg' or whatever is allowed by
matplotlib (defaults to png)
* foreced_outfile_name ... optional complete path of output image file
* dpi ... dots per inch (defaults to 75)
Output:
* Image-Outputfile with name either of input parameter
'forced_outfile_name' or calculated in program
'outfiledir + station + startdate + enddate + outfiletype'
"""
PLOTSTYLES = ['k-', 'b-', 'r-', 'g-',
'k--', 'b--', 'r--', 'g--',
'k-.', 'b-.', 'r-.', 'g-.',
'k:', 'b:', 'r:', 'g:']
# Min-max date regardless of available data!
dmin = yymoddhhmi2pydatetime(istartdattim)
dmax = yymoddhhmi2pydatetime(ienddattim)
# convert in matplot dates format
mpldat_lim = matplotlib.dates.date2num((dmin, dmax))
# construct headline
timefmt = '%Y-%m-%d %H:%M'
headl = parameter + ": " + dmin.strftime(timefmt) + \
" - " + dmax.strftime(timefmt)
# define image outputfile name
if forced_outfile_name is None:
# calculated outfile =
# outfiledir + station + startdate + enddate + outfiletype
timefmt2 = '%Y%m%d%H%M'
fname = parameter + "_" + dmin.strftime(timefmt2) + \
"_" + dmax.strftime(timefmt2) + "." + outfiletype
outfile = op.join(outfiledir, fname)
else:
outfile = forced_outfile_name
# time locators and time formats
hours = HourLocator(np.linspace(0, 24, 3))
hoursformat = DateFormatter("%H")
hoursm = HourLocator(np.arange(0, 24, 1), interval=3)
days = DayLocator()
# daysformat = DateFormatter('%Y-%m-%d')
daysformat = DateFormatter('%d.%b')
# Plotting
plt.ioff() # turn off interactive plotting (make sure)
# determine width of figure (5 days = A4_Width)
dayrange = (mpldat_lim[1] - mpldat_lim[0])
width_per_day = A4_W / 5.
fig, ax = \
plt.subplots(1, 1, figsize=(width_per_day * dayrange, A4_H / 2.))
fig.suptitle(headl, fontsize=TFONTSIZE)
kk = 0
for stat in stationlist:
print('process station: ', stat)
data = retrieve_aichfeld_db(dbfile, stat,
istartdattim, ienddattim)
# print(istartdattim, ienddattim, data)
# determin time-data-array
pydattim = []
for k in range(len(data['dattim'])):
idt = data['dattim'][k]
pydattim.append(yymoddhhmi2pydatetime(idt))
pydattim = np.array(pydattim)
mpldate = matplotlib.dates.date2num(pydattim)
plst = PLOTSTYLES[kk]
ax.plot_date(mpldate, data[parameter], plst, label=stat)
kk = kk + 1
# cycle index thru plotstyles (prevents crashes)
if kk > len(PLOTSTYLES):
kk = kk - len(PLOTSTYLES)
ax.minorticks_on()
ax.xaxis.set_major_locator(hours)
ax.xaxis.set_major_formatter(hoursformat)
ax.xaxis.set_minor_locator(hoursm)
ax.set_ylabel(parameter, fontsize=LFONTSIZE)
ax.set_xlim(mpldat_lim)
ax.set_ylim(param_lim)
# upper x-scale with day-ticks and -labels
ax2 = ax.twiny()
ax2.xaxis.set_major_locator(days)
ax2.xaxis.set_major_formatter(daysformat)
# dummy plotnecessary ???
# ax2.plot(mpldate, data['tl'], c='r')
ax2.set_xlim(mpldat_lim)
# Legend at 'best' location
leg = ax.legend(loc='best')
# Alpha-transparency for legend
leg.get_frame().set_alpha(0.7)
ax.grid()
# output picture
if outfiletype == "screen":
print("No plotfile saved", outfile)
fig.show()
else:
print("\nPlot saved to ", outfile)
fig.savefig(outfile, dpi=dpi)
fig.clear()
plt.close(fig)
return outfile
[docs]def create_aichfeld_db(dbfile):
""" Creates an aichfeld-project general data file in 'dbfile'.
Returns a message string with success info.
"""
conn = sqlite3.connect(dbfile)
c = conn.cursor()
try: # Create table
createcommand = \
"CREATE TABLE noboraw (" + \
"'stat' NOT NULL, " + \
"'dattim' NOT NULL, " + \
"'dattiml', " + \
"'timez', " + \
"'tl', " + \
"'rh', " + \
"'ps', " + \
"'dd', " + \
"'ff', " + \
"'fx', " + \
"PRIMARY KEY ('stat', 'dattim'))"
c.execute(createcommand)
conn.commit()
# print("Created db in file ", dbfile,
# " and successfully created table 'noboraw'")
print("Creation command: ")
print(createcommand)
ret = "Table 'noboraw' in {:} created".format(dbfile)
except sqlite3.Error as e:
print("DB ", dbfile, " with table 'noboraw' exists allready")
ret = e.message
conn.close()
return ret
[docs]def create_nobo_table(dbfile):
""" Creates an aichfeld-project general data file in 'dbfile'.
Returns a message string with success info.
A more reasonable named alias for 'create_aichfeld_db')
"""
return create_aichfeld_db(dbfile)
[docs]def convert_datadict_value(value):
"""Converts the string variables that stand for numbers to floats"""
# print('In noboinout.convert_datadict_value: ', value)
try:
converted = float(value)
except TypeError:
converted = value
return converted
[docs]def insert_ignore_aichfeld_db(dbfile, data):
"""Insert new records in db and ignores new ones
in case of conflict.
* dbfile ... path to nobo database file
* data ... data of one station in a dictionary defined by read_nobo_file.
* returns True, otherwise it crashes.
"""
conn = sqlite3.connect(dbfile)
c = conn.cursor()
stat = data['stat']
dattim = data['dattim']
dattiml = data['dattiml']
timez = data['timez']
tl = data['tl']
rh = data['rh']
ps = data['ps']
dd = data['dd']
ff = data['ff']
fx = data['fx']
for k in range(len(dattim)):
insertl = (stat[k],
int(dattim[k]),
int(dattiml[k]),
int(timez[k]),
convert_datadict_value(tl[k]),
convert_datadict_value(rh[k]),
convert_datadict_value(ps[k]),
convert_datadict_value(dd[k]),
convert_datadict_value(ff[k]),
convert_datadict_value(fx[k])
)
k = k + 1
insertcommand = "INSERT OR IGNORE INTO noboraw VALUES " + \
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
c.execute(insertcommand, insertl)
conn.commit()
conn.close()
return True
[docs]def insert_replace_aichfeld_db(dbfile, data):
"""Insert new records in db and replace old ones by new ones
in case of conflict.
* dbfile ... path to nobo database file
* data ... data of one station in a dictionary defined by read_nobo_file.
* returns True, otherwise it crashes.
"""
conn = sqlite3.connect(dbfile)
c = conn.cursor()
stat = data['stat']
dattim = data['dattim']
dattiml = data['dattiml']
timez = data['timez']
tl = data['tl']
rh = data['rh']
ps = data['ps']
dd = data['dd']
ff = data['ff']
fx = data['fx']
for k in range(len(dattim)):
# print('noboinout.insert_replace_aichfeld_db: dattim', dattim[k])
insertl = (stat[k],
int(dattim[k]),
int(dattiml[k]),
int(timez[k]),
convert_datadict_value(tl[k]),
convert_datadict_value(rh[k]),
convert_datadict_value(ps[k]),
convert_datadict_value(dd[k]),
convert_datadict_value(ff[k]),
convert_datadict_value(fx[k])
)
k = k + 1
insertcommand = "INSERT OR REPLACE INTO noboraw VALUES " + \
"(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
c.execute(insertcommand, insertl)
conn.commit()
conn.close()
return True
[docs]def retrieve_single_datum(dbfile, station, itime):
"""Retrieve a single data set for one station and time from database.
If empty the value 'None' is returned."""
conn = sqlite3.connect(dbfile)
query = "SELECT * FROM noboraw " + \
"WHERE stat = ? and dattim = ?"
tvar = (station, itime)
datqu = conn.execute(query, tvar)
sdata = {}
for d in datqu:
sdata['stat'] = d[0]
sdata['dattim'] = d[1]
sdata['dattiml'] = d[2]
sdata['timez'] = d[3]
sdata['tl'] = d[4]
sdata['rh'] = d[5]
sdata['ps'] = d[6]
sdata['dd'] = d[7]
sdata['ff'] = d[8]
sdata['fx'] = d[9]
conn.commit()
conn.close()
if not sdata:
# empty dictionary evaluates to false in python
# better set it to None!
sdata = None
return sdata
[docs]def retrieve_aichfeld_db(dbfile, station, istartdatetime, ienddatetime):
"""Retreives nobo station data from sqlite database table noborw.
Input-params:
* dbfile ... sqlite-database file
* istatdatetime ... in integer for (yyymoddhhmi)
* istartendtime ... in integer for (yyymoddhhmi)
Returns:
* data ... dictionary with all db-fields data['stat'], ...
* 'stat', 'dattim', 'dattiml', '..'timez', etc ..
"""
conn = sqlite3.connect(dbfile)
query = "SELECT * FROM noboraw " + \
"WHERE stat = ? and dattim >= ? and dattim <= ?"
tvar = (station, istartdatetime, ienddatetime)
c = conn.cursor()
c.execute(query, tvar)
datqu = c.fetchall()
stat = []
dattim = []
dattiml = []
timez = []
tl = []
rh = []
ps = []
dd = []
ff = []
fx = []
for d in datqu:
stat.append(d[0])
dattim.append(d[1])
dattiml.append(d[2])
timez.append(d[3])
tl.append(d[4])
rh.append(d[5])
ps.append(d[6])
dd.append(d[7])
ff.append(d[8])
fx.append(d[9])
data = {'stat': stat, 'dattim': dattim, 'dattiml': dattiml,
'timez': timez, 'tl': tl, 'rh': rh, 'ps': ps,
'dd': dd, 'ff': ff, 'fx': fx}
conn.commit()
conn.close()
return data
[docs]def retrieve_aichfeld_raindb(dbfile, station, istartdatetime, ienddatetime):
"""Retreives nobo station rain data from sqlite database table noborain.
Input-params:
* dbfile ... sqlite-database file
* istatdatetime ... in integer for (yyymoddhhmi)
* istartendtime ... in integer for (yyymoddhhmi)
Returns:
* data ... dictionary with all db-fields data['stat'], ...
* 'stat', 'dattim', 'dattiml', 'tiemz', 'rr', 'obs'
"""
conn = sqlite3.connect(dbfile)
query = "SELECT * FROM noborain " + \
"WHERE stat = ? and dattim >= ? and dattim <= ?"
tvar = (station, istartdatetime, ienddatetime)
c = conn.cursor()
c.execute(query, tvar)
datqu = c.fetchall()
stat = []
dattim = []
dattiml = []
timez = []
rr = []
obs = []
# print('retreive_aichfeld_raindb')
for d in datqu:
stat.append(d[0])
dattim.append(d[1])
dattiml.append(d[2])
timez.append(d[3])
rr.append(d[4])
obs.append(d[5])
data = {'stat': stat, 'dattim': dattim, 'dattiml': dattiml,
'timez': timez, 'rr': rr, 'obs': obs}
# print(data)
conn.commit()
conn.close()
return data
[docs]def all_nobo_raw_files(nobo_raw_dir='./', pattern='*.txt'):
"""Returns a list of raw nobo files for processing.
* nobo_raw_dir ... directory of nobo raw files
* pattern ... file pattern for globbing (e.g. 'nobo*.csv' ...)
* returns a list of matching file names"""
if op.exists(nobo_raw_dir):
infilnam = op.join(nobo_raw_dir, pattern)
else:
print("Warning: ", nobo_raw_dir, " does not exist.", file=sys.stderr)
print("Continuing anayway.", file=sys.stderr)
infiles = glob.glob(infilnam)
return infiles
[docs]def get_nobostation_info(dbfile, station):
"""Get station info from db-table nobostations"""
# define query strings and vars
query = "SELECT * FROM nobostations WHERE stat = ? "
tvar = (station,)
# db-stuff
conn = sqlite3.connect(dbfile)
conn.row_factory = sqlite3.Row
c = conn.cursor()
c.execute(query, tvar)
res = c.fetchone()
return (res['stat'], res['longname'],
res['lon'], res['lat'], res['elev'], res['active'])
[docs]def get_all_nobostation_names(dbfile, onlyactive=False):
"""Returns a list of all active stations. Set onlyactive to true,
and only the active stations are returned."""
# define query strings and vars
if onlyactive:
query = "SELECT stat FROM nobostations WHERE active='true'"
else:
query = "SELECT stat FROM nobostations"
# db-stuff
conn = sqlite3.connect(dbfile)
c = conn.cursor()
c.execute(query)
res = c.fetchall()
stationlist = []
for r in res:
stationlist.append(r[0])
return stationlist
[docs]def get_all_nobostation_infos(dbfile, onlyactive=False):
"""Returns a dictionary of dictionares in the form
stadict{'stat'}{'longname','lon', 'lat', 'elev', 'active'}"""
if onlyactive:
query = "SELECT * FROM nobostations WHERE active='true'"
else:
query = "SELECT * FROM nobostations"
pass
conn = sqlite3.connect(dbfile)
c = conn.cursor()
c.execute(query)
res = c.fetchall()
statinfo = {}
for r in res:
stat = r[0]
dd = {}
dd['longname'] = r[1]
dd['lon'] = r[2]
dd['lat'] = r[3]
dd['elev'] = r[4]
dd['active'] = r[5]
statinfo[stat] = dd
return statinfo
[docs]def make_mercator_map_and_pickle(minlon, maxlon, minlat, maxlat,
picklefile, resolution='f'):
"""Creates a pickle of am basemap object"""
print("Set up map. Wait quite a while ...\n")
m = Basemap(llcrnrlon=minlon,
llcrnrlat=minlat,
urcrnrlon=maxlon,
urcrnrlat=maxlat,
resolution=resolution,
projection='merc',
lat_ts=(minlat + maxlat) / 2.
)
print("Pickle map to ", picklefile, " for later use.")
return pickle.dump(m, open(picklefile, 'wb'), -1)
[docs]def plot_wind(bmap, lon, lat,
u=0, v=0,
barbcolor='k',
flagcolor='k',
length=7,
latlon=False,
label='wind',
fill_empty=False,
emptybarb=0.05,
nildata=False,
station=''):
"""Plot a single wind barb with u,v in map 'bmap' at lat, lon"""
x, y = bmap(float(lon), float(lat))
sizes = dict(emptybarb=emptybarb)
speed = np.sqrt(u * u + v * v)
if nildata:
bmap.plot(x, y, 'r+')
elif (speed > LOW_WIND) or (speed < NO_WIND):
bmap.barbs(
x, y, u, v, lw=2,
barbcolor=barbcolor, flagcolor=flagcolor, length=length,
latlon=latlon, label=label, fill_empty=fill_empty,
sizes=sizes)
else:
# Speed less than LOW_WIND and greater than NO_WOND ->
# standard lenght of quiver arrows
u = u / speed
v = v / speed
bmap.quiver(x, y, u, v,
lw=0.5,
pivot='tip',
units='dots',
width=1,
scale=0.03
)
plt.text(x, y, station, color='0.5', fontsize='small')
return
# def plot_wind(bmap, lon, lat,
# u=0, v=0,
# barbcolor='k',
# flagcolor='k',
# length=7,
# latlon=False,
# label='wind',
# fill_empty=False,
# emptybarb=0.05,
# nildata=False,
# station=''):
# """Plot a single wind barb with u,v in map 'bmap' at lat, lon"""
# # x, y = bmap(lon, lat) # ###########################################
# x, y = bmap(float(lon), float(lat)) # ###########################################
# sizes = dict(emptybarb=emptybarb)
# if nildata:
# bmap.plot(x, y, 'r+')
# else:
# bmap.barbs(
# x, y, u, v, lw=2,
# barbcolor=barbcolor, flagcolor=flagcolor, length=length,
# latlon=latlon, label=label, fill_empty=fill_empty,
# sizes=sizes)
# # plt.text(x, y, station, color='y', fontsize='small')
# plt.text(x, y, station, color='0.5', fontsize='small')
# return
[docs]def moviemaker(picdir="./", moviefile='movie'):
"""Makes movies of png-pictures with mencoder
* picpath ... directory and basename of pictures to be expanded
"*.png"
* moviefile ... pathname of the movie-file to be expanded with ".avi"
"""
# Shell command string
picpath = op.join(picdir, '*.png')
comstr = ("mencoder 'mf://" + picpath +
"' -mf type=png:fps=30 -ovc lavc -lavcopts " +
"vcodec=mpeg4:mbd=2:vbitrate=14000 " +
"-vf scale=1024:768 -oac copy -o " + moviefile + ".avi")
# Execute command
return os.system(comstr)
[docs]def deletefiles(globpattern):
"""Deletes files without request"""
# generate a list of image files
files = glob.iglob(globpattern)
# Final disk cleanup
for fname in files:
try:
os.remove(fname)
except:
print("Cannot remove ", fname, file=sys.stderr)
return
[docs]def pydatetime_array(datetime1, datetime2, d_days=0,
d_hours=0, d_minutes=0., d_seconds=0.):
"""Constructs an array/a list of python datetime objects with an
increment of d_days,d_hours,d_minutes and d_seconds (all in integer,
s_seconds in float)"""
dati_arr = []
dati = datetime1
datidelta = datetime.timedelta(days=d_days, hours=d_hours,
minutes=d_minutes, seconds=d_seconds)
if datidelta > datetime.timedelta(0):
while dati <= datetime2:
dati_arr.append(dati)
dati = dati + datidelta
return dati_arr
[docs]def ddff2uv(dd, ff):
"""Wind in met. direction dd and speed ff in x- and y-component u,v"""
rd = np.deg2rad(270. - dd)
u = ff * np.cos(rd)
v = ff * np.sin(rd)
return u, v
[docs]def makepicklefilename(pickledir, coordinateedges):
"""Construct a picklefilename dependent on coordinateedges"""
lonmin, lonmax, latmin, latmax = coordinateedges
slonex = "{:8.5f}_{:8.5f}".format(lonmin, lonmax)
slatex = "{:8.5f}_{:8.5f}".format(latmin, latmax)
fn = 'windmap_' + slonex + '_' + slatex + '.pickle'
return op.join(pickledir, fn)
[docs]def prepare_map_with_dm(dempath, basem, regridsize=50.):
"""Plotting routin for wind chart:
Prepare a windmap with high-res DM-data"""
# Report gdal-Errors by Exceptions!
gdal.UseExceptions()
gdal.AllRegister() # necessary????
# open DEM-Data
gd = gdal.Open(dempath)
# Request DEM-characteristics
# cols = gd.RasterXSize
# rows = gd.RasterYSize
# bands = gd.RasterCount
geotransform = gd.GetGeoTransform()
# originX = geotransform[0]
# originY = geotransform[3]
pixelWidth = geotransform[1]
pixelHeight = geotransform[5]
# Output DEM-characteristics
# print("No. columns : ", cols)
# print("No. rows : ", rows)
# print("No. raster-bands : ", bands)
# print(" Origin X : ", originX)
# print(" Origin Y : ", originY)
# print("Pixel Width (deg): ", pixelWidth)
# print("Pixel Height (deg): ", pixelHeight)
# print(50 * '-')
# Prepare the map
# print("Read as Array")
array = gd.ReadAsArray()
# print("As Array Read")
coords = geotransform # aus historischen Gruenden
nlons = array.shape[1]
nlats = array.shape[0]
lons = coords[0] + pixelWidth * np.arange(nlons)
lats = coords[3] + pixelHeight * np.arange(nlats)[::-1] # reverse lats
# create masked array, reversing data in latitude direction (so that
# data is oriented in increasing latitude, as transform_scalar
# requires).
# print("Reverse Array")
topoin = ma.masked_values(array[::-1, :], -999.)
# print("Array reversed")
# transform DEM data to a 'regridsize' km native projection grid
# print("Transform to grid, may take a while!")
# print("In case of a program freeze due to memory problems")
# print("increase the value of the config-file variable 'reggridsize'!")
nx = int((basem.xmax - basem.xmin) / regridsize) + 1
ny = int((basem.ymax - basem.ymin) / regridsize) + 1
tp1 = basem.transform_scalar(topoin, lons, lats, nx, ny, masked=True)
# print("To grid Transformed")
# Make ocean height 0
# tp0 = np.where(tp1 > 9999, 0, tp1)
return tp1
[docs]def make_mercator_map_and_pickle_dm(minlon, maxlon, minlat, maxlat,
picklefile,
dempath=None,
resolution='c'):
"""Creates a picklefile with a basemap object and a topo file
with DM-data"""
print("Set up map. Wait a while ...")
m = Basemap(llcrnrlon=minlon,
llcrnrlat=minlat,
urcrnrlon=maxlon,
urcrnrlat=maxlat,
resolution=resolution,
projection='merc',
lat_ts=(minlat + maxlat) / 2.
)
print("Set up topography with DM-Data. Wait a while ...")
topo = prepare_map_with_dm(dempath, m)
print("Pickle map and dm to ", picklefile, " for later use.")
return pickle.dump((m, topo), open(picklefile, 'wb'), -1)
[docs]def windplot_dm(dbfile, picklefilename, outdirname,
startdatetime, enddatetime,
delta=10,
maptitle='',
convert=MS2KT,
dempath=None):
"""Main plotting routine for wind chart"""
statlist = get_all_nobostation_names(dbfile)
statinfos = get_all_nobostation_infos(dbfile)
start = startdatetime
end = enddatetime
# print('Start: ', start, ' End: ', end, ' Timedelta(min): ', delta)
timearray = pydatetime_array(start, end, d_minutes=delta)
fig = plt.figure(figsize=(10.24, 7.68)) # l,w in inches
for dt in timearray:
itime = pydatetime2yymoddhhmi(dt)
fig.clf()
m, topo = pickle.load(open(picklefilename, 'rb'))
m.drawmapboundary(fill_color='white')
m.drawmapboundary(fill_color='white')
m.drawparallels(np.arange(46., 48., 0.1))
m.drawmeridians(np.arange(12., 16., 0.1))
# m.drawrivers(color='b')
m.drawcountries(color='r')
# topo = prepare_map_with_dm(dempath, m)
m.imshow(topo, cmap=plt.cm.gray, alpha=0.3) # @UndefinedVariable
for stat in statlist:
lon = statinfos[stat]['lon']
lat = statinfos[stat]['lat']
dat = retrieve_single_datum(dbfile, stat, itime)
if ((dat is None) or
(dat['ff'] is None) or
(dat['dd'] is None)): # non existing or missing data
nil = True
u = 0
v = 0
else: # existing data
nil = False
ff = convert * dat['ff']
dd = dat['dd']
u, v = ddff2uv(dd, ff)
plot_wind(m, lon, lat, u, v, nildata=nil, station=stat)
dtsr = dt.strftime("%Y-%m-%d %H:%M")
plt.title(maptitle + " - " + dtsr)
plt.figtext(PLWX, PLWY, PLOTWITH, fontsize=PLWFONTSIZE, color=PLWCOLOR)
# plt.figtext(2, 2, PLOTWITH, fontsize=PLWFONTSIZE, color='0.60')
fnm = op.join(outdirname, str(itime) + '.png')
print(fnm)
plt.savefig(fnm)
return
[docs]def windplot(dbfile, picklefilename, outdirname,
startdatetime, enddatetime,
delta=10,
maptitle='',
convert=MS2KT):
"""Main plotting routine for wind chart"""
statlist = get_all_nobostation_names(dbfile)
statinfos = get_all_nobostation_infos(dbfile)
start = startdatetime
end = enddatetime
# print('Start: ', start, ' End: ', end, ' Timedelta(min): ', delta)
timearray = pydatetime_array(start, end, d_minutes=delta)
fig = plt.figure(figsize=(10.24, 7.68)) # l,w in inches
for dt in timearray:
itime = pydatetime2yymoddhhmi(dt)
fig.clf()
m = pickle.load(open(picklefilename, 'rb'))
m.drawmapboundary(fill_color='white')
m.drawmapboundary(fill_color='white')
m.drawparallels(np.arange(46., 48., 0.1))
m.drawmeridians(np.arange(12., 16., 0.1))
m.drawrivers(color='b')
m.drawcountries(color='r')
for stat in statlist:
lon = statinfos[stat]['lon']
lat = statinfos[stat]['lat']
dat = retrieve_single_datum(dbfile, stat, itime)
if ((dat is None) or
(dat['ff'] is None) or
(dat['dd'] is None)): # non existing or missing data
nil = True
u = 0
v = 0
else: # existing data
nil = False
ff = convert * dat['ff']
dd = dat['dd']
u, v = ddff2uv(dd, ff)
plot_wind(m, lon, lat, u, v, nildata=nil, station=stat)
dtsr = dt.strftime("%Y-%m-%d %H:%M")
plt.title(maptitle + " - " + dtsr)
# plt.figtext(0.60, 0.05, PLOTWITH, fontsize=PLWFONTSIZE, color='0.60')
plt.figtext(PLWX, PLWY, PLOTWITH, fontsize=PLWFONTSIZE, color=PLWCOLOR)
fnm = op.join(outdirname, str(itime) + '.png')
print(fnm)
plt.savefig(fnm)
return
if __name__ == "__main__":
version('Module')