Commit cb9f148d authored by joakim kjellsson's avatar joakim kjellsson

Initial commit

parents
# esm-scripts
This diff is collapsed.
#!/bin/bash
#
# Diagnostics of OpenIFS run
#
# Load modules
module load cdo/1.9.2
module load nco
obs_file=$1
obs_name=$2
model_file=$3
model_name=$4
model_off=$5
model_frac=$6
date1=$7
date2=$8
echo "dates " $date1 $date2
tmp_dir="tmp"
mkdir -p ${tmp_dir}
names=(${obs_name} ${model_name})
files=(${obs_file} ${model_file})
interp_files=()
newname=${model_name}
cdo="cdo"
for (( ii=0 ; ii<=1 ; ii++ ))
do
datafile=${files[$ii]}
varname=${names[$ii]}
echo " Make climatology and regrid ${varname} in ${datafile}"
interp_method="remapbil"
target_grid="r180x90"
# Get filename without extension
filename=$(basename -- "$datafile")
extension="${filename##*.}"
filename="${filename%.*}"
# Filenames
diff_file=${tmp_dir}/${filename}_${varname}_diff.nc
clim_file=${tmp_dir}/${filename}_${varname}_clim.nc
interp_file=${tmp_dir}/${filename}_${varname}_clim_${target_grid}.nc
# Make climatology
MAKE_CLIM=1
if [ ${MAKE_CLIM} -gt 0 ]
then
echo " Make climatology to ${clim_file} "
${cdo} yseasmean -seldate,${date1},${date2} -selname,${varname} ${datafile} ${clim_file}
else
echo " Not making climatology "
${cdo} seldate,${date1},${date2} -selname,${varname} ${datafile} ${clim_file}
fi
# Interpolate data to coarse grid
echo " Interpolate data to ${interp_file}"
echo "${cdo} -sellonlatbox,-180,180,-90,90 -${interp_method},${target_grid} ${clim_file} ${interp_file}"
${cdo} -sellonlatbox,-180,180,-90,90 -${interp_method},${target_grid} -chname,${obs_name},${newname} ${clim_file} ${interp_file}
if [ $ii -gt 0 ]
then
# For model data, convert units
conv_file=${interp_file}_conv
${cdo} -O addc,${model_off} ${interp_file} ${conv_file}
${cdo} -O mulc,${model_frac} ${conv_file} ${interp_file}
rm -rf ${conv_file}
fi
# Add final file to list of files to analyse
interp_files+=(${interp_file})
done
echo ${interp_files[*]}
COMPARE=1
if [ $COMPARE -gt 0 ]
then
# Compare files
echo " Compare ${interp_files[1]} and ${interp_files[0]} "
${cdo} sub ${interp_files[1]} ${interp_files[0]} ${diff_file}
fi
#!/bin/bash
module load anaconda2
PRECIP=0
UVEL=1
if [ $PRECIP -gt 0 ]
then
DATE1=1997-01-01T00:00:00
DATE2=2010-01-01T00:00:00
PRECIP_OBS=/gfs1/work/shkjocke/data/OBS/gpcp_v01r03_monthly_1996-2017.nc4
PRECIP_OBS_NAME=precip
PRECIP_MODEL=/gfs1/work/shkjocke/data/OIFS/T159L91/g001/NETCDF/g001_surface_diff_1m_1982-2009.nc
PRECIP_MODEL_NAME=TP
./diag_openifs.sh ${PRECIP_OBS} ${PRECIP_OBS_NAME} ${PRECIP_MODEL} ${PRECIP_MODEL_NAME} 0 4000 ${DATE1} ${DATE2}
python plot_diags.py --file=tmp/g001_surface_diff_1m_1982-2009_TP_diff.nc --vars=TP --diff
python plot_diags.py --file=tmp/g001_surface_diff_1m_1982-2009_TP_clim_r180x90.nc --vars=TP --file2=tmp/gpcp_v01r03_monthly_1996-2017_precip_clim_r180x90.nc
fi
if [ $UVEL -gt 0 ]
then
DATE1=1982-01-01T00:00:00
DATE2=2010-01-01T00:00:00
UVEL_OBS=/gfs1/work/shkjocke/data/OBS/ERAinterim/uas.reana.eraint.1979-2017.r144x73.nc
UVEL_OBS_NAME=uas
UVEL_MODEL=/gfs1/work/shkjocke/data/OIFS/T159L91/g001/NETCDF/g001_surface_1m_1982-2009.nc
UVEL_MODEL_NAME=U10M
./diag_openifs.sh ${UVEL_OBS} ${UVEL_OBS_NAME} ${UVEL_MODEL} ${UVEL_MODEL_NAME} 0 1 ${DATE1} ${DATE2}
python plot_diags.py --file=tmp/g001_surface_1m_1982-2009_U10M_diff.nc --vars=${UVEL_MODEL_NAME} --diff
fi
#!/bin/bash
#
# ifs2nc.sh
#
# Purpose
# -------
# Convert OpenIFS output to netCDF files
#
# Methods
# -------
# * Use CDO to split the ICMSH and ICMGG files
# to one file vertical coordinate, e.g. hybrid, surface etc.
# As surface variables are always stored as GRIB1 and hybrid variables
# are always stored as GRIB2, this will not cause problems for CDO.
# It may be necessary to split according to GRIB edition sometime later
# * Use CDO to convert from reduced Gaussian or spectral to regular Gaussian grid
# * Use CDO to convert the grib file to netCDF4/HDF5.
# Level of compression set by CDO_ZIP variable.
#
# History
# -------
# 2018-07-27: J. Kjellsson. First version
#
#
# Here we need CDO and grib
module load cdo
GRIB_API_DIR=/sw/rhel6-x64/grib_api/grib_api-1.15.0-intel14/
grib_copy=${GRIB_API_DIR}/bin/grib_copy
OIFS_OUTPUT_DIR=$5
OIFS_TMP_DIR=./TMP-IFS2NC/
OIFS_PROCESSED_DIR=$4
OIFS_EXPID=$6
OIFS_FIRSTSTEP=$1
OIFS_LASTSTEP=$2
OIFS_STEP=$3
mkdir -vp $OIFS_TMP_DIR
mkdir -vp $OIFS_PROCESSED_DIR
# Level of zipping (1-9). 0 means no zipping.
# This can reduce the data size by up to 90%
# It adds a little bit to the processing time.
CDO_ZIP=1
echo " Process data from ${OIFS_FIRSTSTEP} to ${OIFS_LASTSTEP} "
for (( step=${OIFS_FIRSTSTEP} ; step<=${OIFS_LASTSTEP} ; step=step+${OIFS_STEP} ))
do
timestamp=$( printf "%010d" $step )
# We split the data into one file per vertical grid
# For writing in netCDF files, I think it makes no sense
# to put data on different grids into the same file.
#
# Each netCDF file will have either surface, hybrid or isobaricInhPa as vert. coord
# Other types could be depthBelowLandLayer
for typeOfLevel in surface isobaricInhPa hybrid
do
FILES_ARRAY=()
FILES_STRING=" "
for GRID in SH GG
do
ORIGINAL_FILE=ICM${GRID}${OIFS_EXPID}+${timestamp}
echo " Get data to ${OIFS_TMP_DIR}/${ORIGINAL_FILE}_${typeOfLevel}.grb "
$grib_copy -w typeOfLevel=$typeOfLevel ${OIFS_OUTPUT_DIR}/${ORIGINAL_FILE} ${OIFS_TMP_DIR}/${ORIGINAL_FILE}_${typeOfLevel}.grb
LEVEL_FILE=${OIFS_TMP_DIR}/${ORIGINAL_FILE}_${typeOfLevel}.grb
REGULAR_FILE=${OIFS_TMP_DIR}/${ORIGINAL_FILE}_${typeOfLevel}_R.grb
NETCDF_FILE=${OIFS_TMP_DIR}/${ORIGINAL_FILE}_${typeOfLevel}_R.nc
if [ -e ${LEVEL_FILE} ]; then
if [ $GRID == "GG" ]; then
echo " Convert ${LEVEL_FILE} to ${REGULAR_FILE} (reduced Gaussian to regular Gaussian)"
cdo -O setgridtype,regular ${LEVEL_FILE} ${REGULAR_FILE}
elif [ $GRID == "SH" ]; then
echo " Convert ${LEVEL_FILE} to ${REGULAR_FILE} (spectral harmonics to regular Gaussian)"
cdo -O sp2gpl ${LEVEL_FILE} ${REGULAR_FILE}
fi
rm -f ${LEVEL_FILE}
# Using grib_to_netcdf to convert data has the advantage of retaining variable names
# but does not get the correct longitudes and latitudes. It also does not put a,b coeffs
# into files with hybrid coordinates.
#OPTS="-u time -k 3 -D NC_FLOAT"
#grib_to_netcdf ${OPTS} ${REGULAR_FILE} -o ${NETCDF_FILE}
# CDO
OPTS="-t ecmwf -f nc4c -z zip_1 copy -sellonlatbox,-180,180,-90,90"
cdo ${OPTS} ${REGULAR_FILE} ${NETCDF_FILE}
rm -f ${REGULAR_FILE}
#if [ $CDO_ZIP -gt 0 ]; then
# cdo -O -f nc4c -z zip_1 copy ${NETCDF_FILE} ${NETCDF_FILE}4
# rm ${NETCDF_FILE}; mv ${NETCDF_FILE}4 ${NETCDF_FILE}
#fi
dtime=$(cdo -s showtimestamp ${NETCDF_FILE})
dtime2=$(echo $dtime | sed -r 's/T/ /g')
DTIME=$(date -d "$dtime2" "+%F_%H%M%S")
echo " Datetime for file ${NETCDF_FILE} is ${DTIME} "
FILES_ARRAY+=(${NETCDF_FILE})
FILES_STRING=${FILES_STRING}" "${NETCDF_FILE}
else
echo " No file named ${LEVEL_FILE}. No such data in your output? "
fi
done
GLUED_FILE=${OIFS_PROCESSED_DIR}/${OIFS_EXPID}_${typeOfLevel}_${DTIME}.nc
echo " Glue files $FILES_STRING to one file $GLUED_FILE "
cdo -O merge $FILES_STRING $GLUED_FILE
rm -f ${FILES_STRING}
if [ "${typeOfLevel}" == "hybrid" ]; then
ZH_FILE=${OIFS_PROCESSED_DIR}/${OIFS_EXPID}_${typeOfLevel}_zh_${DTIME}.nc
NEW_GLUED_FILE=${OIFS_PROCESSED_DIR}/${OIFS_EXPID}_${typeOfLevel}_with_zh_${DTIME}.nc
echo " Calculate geopotential height from hybrid data " ${NEW_GLUED_FILE}
cdo -O geopotheight -chname,lnsp,lsp ${GLUED_FILE} ${ZH_FILE}
cdo -O merge ${GLUED_FILE} ${ZH_FILE} ${NEW_GLUED_FILE}
rm ${GLUED_FILE} ${ZH_FILE}
mv ${NEW_GLUED_FILE} ${GLUED_FILE}
PLEV_FILE=${OIFS_PROCESSED_DIR}/${OIFS_EXPID}_plevel_${DTIME}.nc
STD_LEVELS="100000,92500,85000,77500,70000,60000,50000,40000,30000,25000,20000,15000,"
STD_LEVELS=${STD_LEVELS}"10000,7000,5000,3000,2000,1000,700,500,300,200,100,50,20,10,5,2,1 "
echo " Interpolate to pressure levels ${PLEV_FILE} "
cdo -O ml2pl,${STD_LEVELS} ${GLUED_FILE} ${PLEV_FILE}
fi
done
done
#!/bin/bash
#
# ifs2nc.sh
#
# Purpose
# -------
# Convert OpenIFS output to netCDF files
#
# Methods
# -------
# * Use CDO to split the ICMSH and ICMGG files
# to one file vertical coordinate, e.g. hybrid, surface etc.
# As surface variables are always stored as GRIB1 and hybrid variables
# are always stored as GRIB2, this will not cause problems for CDO.
# It may be necessary to split according to GRIB edition sometime later
# * Use CDO to convert from reduced Gaussian or spectral to regular Gaussian grid
# * Use CDO to convert the grib file to netCDF4/HDF5.
# Level of compression set by CDO_ZIP variable.
#
# History
# -------
# 2018-07-27: J. Kjellsson. First version
#
#
# Here we need CDO and grib
module load cdo
module load grib-api
OIFS_OUTPUT_DIR=$5
OIFS_TMP_DIR=./TMP-IFS2NC/
OIFS_PROCESSED_DIR=$4
OIFS_EXPID=$6
OIFS_FIRSTSTEP=$1
OIFS_LASTSTEP=$2
OIFS_STEP=$3
mkdir -vp $OIFS_TMP_DIR
mkdir -vp $OIFS_PROCESSED_DIR
# Level of zipping (1-9). 0 means no zipping.
# This can reduce the data size by up to 90%
# It adds a little bit to the processing time.
CDO_ZIP=4
echo " Process data from ${OIFS_FIRSTSTEP} to ${OIFS_LASTSTEP} "
for (( step=${OIFS_FIRSTSTEP} ; step<=${OIFS_LASTSTEP} ; step=step+${OIFS_STEP} ))
do
timestamp=$( printf "%06d" $step )
# We split the data into one file per vertical grid
# For writing in netCDF files, I think it makes no sense
# to put data on different grids into the same file.
#
# Each netCDF file will have either surface, hybrid or isobaricInhPa as vert. coord
# Other types could be depthBelowLandLayer
for typeOfLevel in surface isobaricInhPa hybrid
do
FILES_ARRAY=()
FILES_STRING=" "
for GRID in SH GG
do
ORIGINAL_FILE=ICM${GRID}${OIFS_EXPID}+${timestamp}
echo " Get data to ${OIFS_TMP_DIR}/${ORIGINAL_FILE}_${typeOfLevel}.grb "
grib_copy -w typeOfLevel=$typeOfLevel ${OIFS_OUTPUT_DIR}/${ORIGINAL_FILE} ${OIFS_TMP_DIR}/${ORIGINAL_FILE}_${typeOfLevel}.grb
LEVEL_FILE=${OIFS_TMP_DIR}/${ORIGINAL_FILE}_${typeOfLevel}.grb
REGULAR_FILE=${OIFS_TMP_DIR}/${ORIGINAL_FILE}_${typeOfLevel}_R.grb
NETCDF_FILE=${OIFS_TMP_DIR}/${ORIGINAL_FILE}_${typeOfLevel}_R.nc
if [ -e ${LEVEL_FILE} ]; then
if [ $GRID == "GG" ]; then
echo " Convert ${LEVEL_FILE} to ${REGULAR_FILE} (reduced Gaussian to regular Gaussian)"
cdo -O setgridtype,regular ${LEVEL_FILE} ${REGULAR_FILE}
elif [ $GRID == "SH" ]; then
echo " Convert ${LEVEL_FILE} to ${REGULAR_FILE} (spectral harmonics to regular Gaussian)"
cdo -O sp2gpl ${LEVEL_FILE} ${REGULAR_FILE}
fi
rm -f ${LEVEL_FILE}
# Using grib_to_netcdf to convert data has the advantage of retaining variable names
# but does not get the correct longitudes and latitudes. It also does not put a,b coeffs
# into files with hybrid coordinates.
#OPTS="-u time -k 3 -D NC_FLOAT"
#grib_to_netcdf ${OPTS} ${REGULAR_FILE} -o ${NETCDF_FILE}
# CDO
OPTS="-t ecmwf -f nc4c -z zip_${CDO_ZIP} copy -sellonlatbox,-180,180,-90,90"
cdo ${OPTS} ${REGULAR_FILE} ${NETCDF_FILE}
rm -f ${REGULAR_FILE}
#if [ $CDO_ZIP -gt 0 ]; then
# cdo -O -f nc4c -z zip_1 copy ${NETCDF_FILE} ${NETCDF_FILE}4
# rm ${NETCDF_FILE}; mv ${NETCDF_FILE}4 ${NETCDF_FILE}
#fi
dtime=$(cdo -s showtimestamp ${NETCDF_FILE})
dtime2=$(echo $dtime | sed -r 's/T/ /g')
DTIME=$(date -d "$dtime2" "+%F_%H%M%S")
echo " Datetime for file ${NETCDF_FILE} is ${DTIME} "
FILES_ARRAY+=(${NETCDF_FILE})
FILES_STRING=${FILES_STRING}" "${NETCDF_FILE}
else
echo " No file named ${LEVEL_FILE}. No such data in your output? "
fi
done
GLUED_FILE=${OIFS_PROCESSED_DIR}/${OIFS_EXPID}_${typeOfLevel}_${DTIME}.nc
echo " Glue files $FILES_STRING to one file $GLUED_FILE "
cdo -O merge $FILES_STRING $GLUED_FILE
rm -f ${FILES_STRING}
if [ "${typeOfLevel}" == "hybrid" ]; then
ZH_FILE=${OIFS_PROCESSED_DIR}/${OIFS_EXPID}_${typeOfLevel}_zh_${DTIME}.nc
NEW_GLUED_FILE=${OIFS_PROCESSED_DIR}/${OIFS_EXPID}_${typeOfLevel}_with_zh_${DTIME}.nc
echo " Calculate geopotential height from hybrid data " ${NEW_GLUED_FILE}
cdo -O geopotheight -chname,lnsp,lsp ${GLUED_FILE} ${ZH_FILE}
cdo -O merge ${GLUED_FILE} ${ZH_FILE} ${NEW_GLUED_FILE}
rm ${GLUED_FILE} ${ZH_FILE}
mv ${NEW_GLUED_FILE} ${GLUED_FILE}
PLEV_FILE=${OIFS_PROCESSED_DIR}/${OIFS_EXPID}_plevel_${DTIME}.nc
STD_LEVELS="100000,92500,85000,77500,70000,60000,50000,40000,30000,25000,20000,15000,"
STD_LEVELS=${STD_LEVELS}"10000,7000,5000,3000,2000,1000,700,500,300,200,100,50,20,10,5,2,1 "
echo " Interpolate to pressure levels ${PLEV_FILE} "
cdo -O ml2pl,${STD_LEVELS} ${GLUED_FILE} ${PLEV_FILE}
fi
done
done
expid=$1
odate=$2
ndate=$3
## Make new directory
mkdir -p ${ndate}00
for file in cdwavein sfcwindin specwavein uwavein ICMGG${expid}INIT ICMGG${expid}INIUA ICMSH${expid}INIT; do
## old file
old=${odate}00/$file
## new file
new=${ndate}00/$file
if [ -f $old ]; then
## use grib_set to make new files
grib_set -s dataDate=$ndate $old $new
echo " Made new file: " $new
else
echo " Could not find file " $old
exit
fi
done
## Copy surface file
#cp ${odate}00/ICMCL${expid}INIT ${ndate}00/ICMCL${expid}INIT
#!/bin/bash
#SBATCH -t 24:00:00
#SBATCH -N 1
#SBATCH --cpus-per-task=16
#SBATCH -p large:shared
sleep_time=2
max_jobs=16
esteps=(35040 70080 105216 140256 175296)
for ((run=0 ; run<=0 ; run++ ))
do
rrun=$(printf "%03d" $(($run+1)))
estep=${esteps[$run]}
fstep=$((${esteps[$(($run-1))]}+24))
estep=43824
fstep=0
echo $fstep $estep
OIFS_FIRSTSTEP=$fstep
OIFS_LASTSTEP=$estep
OIFS_STEP=24
OIFS_OUTDIR=/scratch/usr/shkjocke/esm-experiments/foci5/outdata/oifs/${rrun}/nc4/
OIFS_INDIR=/scratch/usr/shkjocke/esm-experiments/foci5/outdata/oifs/${rrun}/
OIFS_EXPID=KCM2
mkdir -vp $OIFS_OUTDIR
echo " starting loop "
for ((STEP=${OIFS_FIRSTSTEP}; STEP<=${OIFS_LASTSTEP}; STEP=STEP+${OIFS_STEP} )) ; do
# If too many jobs run at the same time, wait
while (( $(jobs -p | wc -l) >= max_jobs )); do sleep $sleep_time; done
(
echo "convering "
./ifs2nc_10.sh $STEP $STEP $OIFS_STEP $OIFS_OUTDIR $OIFS_INDIR $OIFS_EXPID
echo " Completed $i"
) &
done
echo "Now I wait..."
wait
echo 'OPENIFS-to-netCDF conversion done' $fstep $estep
done
This diff is collapsed.
#!/bin/bash
python swap_sst_icmcl_batch.py $ALPS_APP_PE
#
#
# Plot two fields
#
# Author: Joakim Kjellsson, Dec 2018
#
# Alexa: Play Despacito!
#
import os,sys
import argparse
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from netCDF4 import Dataset
from mpl_toolkits.basemap import Basemap
def parse_args():
# Parse input arguments
long_line = ' --------------------------------------------- '
## Parse input arguments
parser = argparse.ArgumentParser(description='Plot geospatial data')
parser.add_argument('--file', dest='file', action='store', default='data.nc',\
help='Name of file (must be netCDF)')
parser.add_argument('--file2', dest='file2', action='store', default='None',\
help='Name of second file (for comparison)')
parser.add_argument('--vars', dest='vars', action='store', default='T2M',\
help='Comma-separated list of variables to plot')
parser.add_argument('--vmin', dest='vmin',action='store', default='None',\
help='Comma-separated list of minimum for colour levels')
parser.add_argument('--vmax', dest='vmax',action='store', default='None',\
help='Comma-separated list of maximum for colour levels')
parser.add_argument('--figname', dest='figname', action='store', default='plot',\
help='Name of plot (no extension)')
parser.add_argument('--fig_format', dest='fig_format', action='store', default='png',\
help='Format of figure (default: png)')
parser.add_argument('--diff', action='store_true',\
help='Difference plot')
args = parser.parse_args()
return args
def read_netcdf_data(filename,varlist=[]):
"""
"""
data = {}
print(' Read %s ' % (filename,))
nc = Dataset(filename,'r')
vars = nc.variables.keys()
for lonname in ['longitude','lon','nav_lon']:
if lonname in vars:
lons = nc.variables[lonname][:]
for latname in ['latitude','lat','nav_lat']:
if latname in vars:
lats = nc.variables[latname][:]
for timename in ['time','time_counter','time_centered']:
if timename in vars:
id_time = nc.variables[timename]
nt = id_time[:].shape[0]
if lons.ndim == 2 and lats.ndim == 2:
data['lons'] = lons
data['lats'] = lats
elif lons.ndim == 1 and lats.ndim == 1:
lon2,lat2 = np.meshgrid(lons,lats)
data['lons'] = lon2
data['lats'] = lat2
else:
print(' Weird shapes of lons,lats: ',lons.shape,lats.shape)
sys.exit()
if 1:
data['nt'] = nt
if len(varlist) > 0:
for var in varlist:
data[var] = nc.variables[var][:]
else:
print(' No variables to read. Varlist = ',varlist)
print(' nc = ',nc)
sys.exit()
nc.close()
return data
def setup_map(ax,map='global'):
"""
"""
if map == 'global':
m = Basemap(projection='cyl',llcrnrlat=-90,urcrnrlat=90,\
llcrnrlon=-180,urcrnrlon=180,resolution='c',ax=ax)
parallels = np.arange(-90.,91.,30.)
meridians = np.arange(-180.,181.,60.)
if 1:
m.drawcoastlines()
if 0:
m.fillcontinents(color='Gainsboro')
if 1:
m.drawparallels(parallels)
m.drawmeridians(meridians)
#m.drawmapboundary(fill_color='aqua')
#m.drawcoastlines()
return m
def simple_cf_plot(bmap,ax,x,y,zplot,diff=False,set_levels=True,cflevels=np.arange(0,1,0.1),cbar=False,ct=False,vmin=999,vmax=999):
if set_levels and vmin == 999 and vmax == 999:
zmin = np.percentile(zplot,1)
zmax = np.percentile(zplot,99)
if diff:
vmax = max(zmax,-zmin)
vmin = -vmax
else:
vmax = zmax
vmin = zmin
cflevels = np.linspace(vmin,vmax,20)
ctlevels = np.linspace(vmin,vmax,11)
elif (not set_levels) and (vmin != 999) and (vmax != 999):
cflevels = np.linspace(vmin,vmax,20)
else:
ctlevels = cflevels[1::2]
if diff:
cmap = plt.cm.RdBu_r
else:
cmap = plt.cm.viridis
if 1:
cf1 = bmap.contourf(x,y,zplot,levels=cflevels,cmap=cmap,extend='both')
if ct:
ct1 = bmap.contour(x,y,zplot,levels=ctlevels,colors='k')
else:
ct1 = None
if cbar:
cb1 = plt.colorbar(cf1,ax=ax)
else:
cb1 = None
return cf1,ct1,cb1
def single_diff_plot(lons,lats,zplot):
fig1 = plt.figure()
ax1 = fig1.add_subplot(111)
bm1 = setup_map(ax=ax1)
x,y = bm1(lons,lats)
cf, ct, cb = simple_cf_plot(bm1,ax1,x,y,zplot,diff=True,cbar=True)
return fig1
def compare_two_fields(lon1,lat1,zplot1,lon2,lat2,zplot2):
fig1 = plt.figure()
ax1 = fig1.add_axes([0.025,0.2,0.45,0.3])
ax2 = fig1.add_axes([0.525,0.2,0.45,0.3])
cax = fig1.add_axes([0.2,0.15,0.6,0.02])
bm1 = setup_map(ax=ax1)
bm2 = setup_map(ax=ax2)
x1,y1 = bm1(lon1,lat1)
x2,y2 = bm2(lon2,lat2)
cf1,ct1,cb1 = simple_cf_plot(bm1,ax1,x1,y1,zplot1)
levels = cf1.levels
cf2,ct2,cb2 = simple_cf_plot(bm2,ax2,x2,y2,zplot2,set_levels=False,cflevels=levels,cbar=False)
cb = plt.colorbar(cf1,cax=cax,orientation='horizontal',ticks=[levels[0],levels[-1]])
return fig1
if 1:
args = parse_args()
varlist = args.vars.split(',')
vminlist = args.vmin.split(',')
vmaxlist = args.vmax.split(',')
print(' Plotting variables: ')
for var in varlist:
print(' %s ' % (var,))
print(' From file %s ' % (args.file,))
data = read_netcdf_data(args.file,varlist=varlist)
lons = data['lons'][:]
lats = data['lats'][:]
if args.file2 != 'None':
print(' and file2 %s ' % (args.file2,))
data2 = read_netcdf_data(args.file2,varlist=varlist)
lons2 = data2['lons'][:]
lats2 = data2['lats'][:]
if args.diff:
for var in varlist:
if 1:
for jn in range(0,data['nt']):
zplot = data[var][jn,:,:]
fig1 = single_diff_plot(lons,lats,zplot)
figname = '%s_%s_%d.%s' % (args.figname,var,jn,args.fig_format)
fig1.savefig(figname,format=args.fig_format)
print(' Saved figure %s ' % (figname,))
if args.file2 != 'None':
for var in varlist:
print('Comparing two fields, %s ' % (var,))
if 1:
for jn in range(0,data['nt']):
zplot1 = data[var][jn,:,:]
zplot2 = data2[var][jn,:,:]
fig1 = compare_two_fields(lons,lats,zplot1,lons2,lats2,zplot2)
figname = '%s_%s_compare_%d.%s' % (args.figname,var,jn,args.fig_format)
fig1.savefig(figname,format=args.fig_format)
print(' Saved figure %s ' % (figname,))
# load modules needed
import numpy as np