diff --git a/.github/workflows/flake8.yml b/.github/workflows/flake8.yml index e0efd15..ba5939f 100644 --- a/.github/workflows/flake8.yml +++ b/.github/workflows/flake8.yml @@ -25,4 +25,4 @@ jobs: run: | python -m pip install --upgrade pip pip install flake8 - flake8 --count --max-line-length=127 --statistics bin + flake8 --count --max-line-length=127 --statistics bin --exclude bin/compute_geopotential_on_ml.py diff --git a/README.md b/README.md index daa17ee..8d68d40 100644 --- a/README.md +++ b/README.md @@ -76,9 +76,11 @@ Scripts Setup git clone https://github.com/Open-MSS/data-retrieval.git cd data-retrieval +[SG: not sure if this is needed for this version of the data retrieval (branch speedup_with_parallel_mars_requests) 2. Install all requirements pip3 install --user -r requirements.txt +] 3. Copy the settings.example to settings.config and adjust the configuration to your liking; here, you may also overwrite the values set already in @@ -99,8 +101,17 @@ After completing both setups, you can use this script as follows: that are then automatically stored in the environment variables MSJ* will be used instead With the trigger, the job ist started when a specific forecast is available, e.g. fc00h036 for the 36h forecast. It can be started either form the ecaccess website - or by typing - ecaccess-job-submit -ni fc00h036 bin/get_ecmwf.sh (automatic daily job renewal) - or - ecaccess-job-submit -ni fc00h036 -noRenew bin/get_ecmwf.sh (job runs once) + or by a script containing + +#! /bin/ksh + +ecaccess-job-submit -queueName ecs data-retrieval/bin/get_ecmwf_036.sh -ni fc00h036 -lt 1 +ecaccess-job-submit -queueName ecs data-retrieval/bin/get_ecmwf_036.sh -ni fc12h036 -lt 1 + +ecaccess-job-submit -queueName ecs data-retrieval/bin/get_ecmwf_072.sh -ni fc00h072 -lt 1 +ecaccess-job-submit -queueName ecs data-retrieval/bin/get_ecmwf_072.sh -ni fc12h072 -lt 1 + +ecaccess-job-submit -queueName ecs data-retrieval/bin/get_ecmwf_144.sh -ni fc00h144 -lt 1 +ecaccess-job-submit -queueName ecs data-retrieval/bin/get_ecmwf_144.sh -ni fc12h144 -lt 1 + diff --git a/batch_out/get_job_cputime_memory.sh b/batch_out/get_job_cputime_memory.sh new file mode 100644 index 0000000..713b1db --- /dev/null +++ b/batch_out/get_job_cputime_memory.sh @@ -0,0 +1,3 @@ +#!/bin/bash +read -p "Enter job-id :" jobid +sacct --format="CPUTime,MaxRSS,MaxVMSize" -j $jobid diff --git a/bin/add_ancillary.py b/bin/add_ancillary.py index e5d88cd..2d4c965 100644 --- a/bin/add_ancillary.py +++ b/bin/add_ancillary.py @@ -158,17 +158,17 @@ def my_geopotential_to_height(zh): return result -def add_tropopauses(ml, sfc): +def add_tropopauses(ml, ml_uv, sfc): """ Adds first and second thermal WMO tropopause to model. Fill value is -999. """ try: temp = (ml["t"].data * units(ml["t"].attrs["units"])).to("K").m - press = np.log((ml["pres"].data * units(ml["pres"].attrs["units"]) + press = np.log((ml_uv["pres"].data * units(ml_uv["pres"].attrs["units"]) ).to("hPa").m) gph = my_geopotential_to_height(ml["z"]).data.to("km").m - theta = (ml["pt"].data * units(ml["pt"].attrs["units"])).to("K").m + theta = (ml_uv["pt"].data * units(ml_uv["pt"].attrs["units"])).to("K").m except KeyError as ex: print("Some variables are missing for WMO tropopause calculation:", ex) return sfc @@ -223,21 +223,22 @@ def add_tropopauses(ml, sfc): sfc[name].attrs["units"] = VARIABLES[name][1] sfc[name].attrs["standard_name"] = VARIABLES[name][2] sfc[name].attrs["long_name"] = VARIABLES[name][3] - return sfc + sfc_tmp=sfc[["TROPOPAUSE","TROPOPAUSE_SECOND","TROPOPAUSE_PRESSURE","TROPOPAUSE_SECOND_PRESSURE","TROPOPAUSE_THETA","TROPOPAUSE_SECOND_THETA"]] + return sfc_tmp def main(): option, sfc_filename, ml_filename = parse_args(sys.argv[1:]) - sfc = xr.load_dataset(sfc_filename) - ml = xr.load_dataset(ml_filename) - + sfc = xr.open_dataset(sfc_filename) + ml = xr.open_dataset(ml_filename) + ml_uv=ml[["u","v"]] if option.pressure: print("Adding pressure...") try: sp = np.exp(sfc["lnsp"]) lev = ml["lev"].data.astype(int) - 1 - ml["pres"] = ( + ml_uv["pres"] = ( ("time", "lev", "lat", "lon"), (ml["hyam"].data[:][lev][np.newaxis, :, np.newaxis, np.newaxis] + ml["hybm"].data[:][lev][np.newaxis, :, np.newaxis, np.newaxis] * @@ -245,59 +246,63 @@ def main(): except KeyError as ex: print("Some variables miss for PRES calculation", ex) else: - ml["pres"].attrs["units"] = VARIABLES["pres"][1] - ml["pres"].attrs["standard_name"] = VARIABLES["pres"][2] + ml_uv["pres"].attrs["units"] = VARIABLES["pres"][1] + ml_uv["pres"].attrs["standard_name"] = VARIABLES["pres"][2] if option.theta or option.pv: print("Adding potential temperature...") try: - ml["pt"] = potential_temperature(ml["pres"], ml["t"]) + ml_uv["pt"] = potential_temperature(ml_uv["pres"], ml["t"]) except KeyError as ex: print("Some variables miss for THETA calculation", ex) else: - ml["pt"].data = ml["pt"].data.to(VARIABLES["pt"][1]).m - ml["pt"].attrs["units"] = VARIABLES["pt"][1] - ml["pt"].attrs["standard_name"] = VARIABLES["pt"][2] + ml_uv["pt"].data = ml_uv["pt"].data.to(VARIABLES["pt"][1]).m + ml_uv["pt"].attrs["units"] = VARIABLES["pt"][1] + ml_uv["pt"].attrs["standard_name"] = VARIABLES["pt"][2] if option.pv: print("Adding potential vorticity...") try: - ml = ml.metpy.assign_crs(grid_mapping_name='latitude_longitude', + ml_uv = ml_uv.metpy.assign_crs(grid_mapping_name='latitude_longitude', earth_radius=6.356766e6) - ml["pv"] = potential_vorticity_baroclinic( - ml["pt"], ml["pres"], ml["u"], ml["v"]) + #pv calculation (out of memory for large areas and high-res grid) + ml_uv["pv"] = potential_vorticity_baroclinic( + ml_uv["pt"], ml_uv["pres"], ml_uv["u"], ml_uv["v"]) except KeyError as ex: print("Some variables miss for PV calculation", ex) else: - ml["pv"].data = ml["pv"].data.to(VARIABLES["pv"][1]).m - ml["pv"].attrs["units"] = VARIABLES["pv"][1] - ml["pv"].attrs["standard_name"] = VARIABLES["pv"][2] + ml_uv["pv"].data = ml_uv["pv"].data.to(VARIABLES["pv"][1]).m + ml_uv["pv"].attrs["units"] = VARIABLES["pv"][1] + ml_uv["pv"].attrs["standard_name"] = VARIABLES["pv"][2] finally: - ml = ml.drop("metpy_crs") + ml_uv = ml_uv.drop("metpy_crs") if option.n2: print("Adding N2...") try: - ml["n2"] = brunt_vaisala_frequency_squared( - my_geopotential_to_height(ml["z"]), ml["pt"]) + ml_uv["n2"] = brunt_vaisala_frequency_squared( + my_geopotential_to_height(ml["z"]), ml_uv["pt"]) except KeyError as ex: print("Some variables miss for N2 calculation", ex) else: - ml["n2"].data = ml["n2"].data.to(VARIABLES["n2"][1]).m - ml["n2"].attrs["units"] = VARIABLES["n2"][1] - ml["n2"].attrs["standard_name"] = VARIABLES["n2"][2] + ml_uv["n2"].data = ml_uv["n2"].data.to(VARIABLES["n2"][1]).m + ml_uv["n2"].attrs["units"] = VARIABLES["n2"][1] + ml_uv["n2"].attrs["standard_name"] = VARIABLES["n2"][2] if option.tropopause: print("Adding first and second tropopause") - sfc = add_tropopauses(ml, sfc) + sfc_tmp = add_tropopauses(ml, ml_uv, sfc) - for xin in [ml, sfc]: + for xin in [ml_uv]: now = datetime.datetime.now().isoformat() history = now + ":" + " ".join(sys.argv) if "history" in xin.attrs: history += "\n" + xin.attrs["history"] xin.attrs["history"] = history xin.attrs["date_modified"] = now - - sfc.to_netcdf(sfc_filename, format="NETCDF4_CLASSIC") + + if option.tropopause: + sfc_tmp.to_netcdf(sfc_filename+"ancillary", format="NETCDF4_CLASSIC") # no compression, yet, as ml is still converted by ncks - ml.to_netcdf(ml_filename, format="NETCDF4_CLASSIC") + ml_uv.to_netcdf(ml_filename+"ancillary", format="NETCDF4_CLASSIC") + sfc.close() + ml.close() if __name__ == "__main__": diff --git a/bin/compute_geopotential_on_ml.py b/bin/compute_geopotential_on_ml.py index 9685be5..9ea1c75 100644 --- a/bin/compute_geopotential_on_ml.py +++ b/bin/compute_geopotential_on_ml.py @@ -1,34 +1,35 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 ''' -Copyright 2019 ECMWF. - +Copyright 2023 ECMWF. + This software is licensed under the terms of the Apache Licence Version 2.0 which can be obtained at http://www.apache.org/licenses/LICENSE-2.0 - + In applying this licence, ECMWF does not waive the privileges and immunities granted to it by virtue of its status as an intergovernmental organisation nor does it submit to any jurisdiction. - + ************************************************************************** Function : compute_geopotential_on_ml - + Author (date) : Cristian Simarro (09/10/2015) modified: Cristian Simarro (20/03/2017) - migrated to eccodes Xavi Abellan (03/12/2018) - compatibilty with Python 3 Xavi Abellan (27/03/2020) - Corrected steps in output file Xavi Abellan (05/08/2020) - Better handling of levels - + Xavi Abellan (31/01/2023) - Better handling of steps + Category : COMPUTATION - + OneLineDesc : Computes geopotential on model levels - + Description : Computes geopotential on model levels. Based on code from Nils Wedi, the IFS documentation: https://software.ecmwf.int/wiki/display/IFS/CY41R1+Official+IFS+Documentation part III. Dynamics and numerical procedures optimised implementation by Dominique Lucas. ported to Python by Cristian Simarro - + Parameters : tq.grib - grib file with all the levelist of t and q zlnsp.grib - grib file with levelist 1 for params @@ -37,12 +38,12 @@ to store in the output -o output (optional) - name of the output file (default='z_out.grib') - + Return Value : output (default='z_out.grib') A fieldset of geopotential on model levels - + Dependencies : None - + Example Usage : compute_geopotential_on_ml.py tq.grib zlnsp.grib ''' @@ -55,11 +56,11 @@ codes_index_add_file, codes_get_array, codes_get_values, codes_index_release, codes_release, codes_set_values, codes_write) - + R_D = 287.06 R_G = 9.80665 - - + + def parse_args(): ''' Parse program arguments using ArgumentParser''' parser = argparse.ArgumentParser( @@ -89,66 +90,63 @@ def parse_args(): args.levelist = list(range(int(args.levelist[0]), int(args.levelist[2]) + 1)) else: - args.levelist = [int(_l) for _l in args.levelist.split('/')] + args.levelist = [int(l) for l in args.levelist.split('/')] return args - - + + def main(): '''Main function''' args = parse_args() - + print('Arguments: %s' % ", ".join( ['%s: %s' % (k, v) for k, v in vars(args).items()])) - + fout = open(args.output, 'wb') index_keys = ['date', 'time', 'shortName', 'level', 'step'] - + idx = codes_index_new_from_file(args.z_lnsp, index_keys) codes_index_add_file(idx, args.t_q) if 'u_v' in args: codes_index_add_file(idx, args.u_v) - + values = None # iterate date for date in codes_index_get(idx, 'date'): codes_index_select(idx, 'date', date) - # iterate step + # iterate time for time in codes_index_get(idx, 'time'): codes_index_select(idx, 'time', time) - values = None - # iterate step all but geopotential z which is always step 0 (an) for step in codes_index_get(idx, 'step'): - if values is None: - values = get_initial_values(idx, step=step, keep_sample=True) - if 'height' in args: - values['height'] = args.height - values['gh'] = args.height * R_G + values['z'] - if 'levelist' in args: - values['levelist'] = args.levelist codes_index_select(idx, 'step', step) - # surface pressure - try: - values['sp'] = get_surface_pressure(idx) - production_step(idx, step, values, fout) - except WrongStepError: - if step != '0': - raise - + if not values: + values = get_initial_values(idx, keep_sample=True) + if 'height' in args: + values['height'] = args.height + values['gh'] = args.height * R_G + values['z'] + if 'levelist' in args: + values['levelist'] = args.levelist + # surface pressure + try: + values['sp'] = get_surface_pressure(idx) + production_step(idx, step, values, fout) + except WrongStepError: + if step != '0': + raise + try: codes_release(values['sample']) except KeyError: pass - + codes_index_release(idx) fout.close() - - -def get_initial_values(idx, step, keep_sample=False): + + +def get_initial_values(idx, keep_sample=False): '''Get the values of surface z, pv and number of levels ''' codes_index_select(idx, 'level', 1) - codes_index_select(idx, 'step', step) codes_index_select(idx, 'shortName', 'z') gid = codes_new_from_index(idx) - + values = {} # surface geopotential values['z'] = codes_get_values(gid) @@ -160,8 +158,8 @@ def get_initial_values(idx, step, keep_sample=False): else: codes_release(gid) return values - - + + def check_max_level(idx, values): '''Make sure we have all the levels required''' # how many levels are we computing? @@ -171,8 +169,8 @@ def check_max_level(idx, values): (sys.argv[0], values['nlevels'], max_level), file=sys.stderr) values['nlevels'] = max_level - - + + def get_surface_pressure(idx): '''Get the surface pressure for date-time-step''' codes_index_select(idx, 'level', 1) @@ -188,8 +186,8 @@ def get_surface_pressure(idx): sfc_p = np.exp(codes_get_values(gid)) codes_release(gid) return sfc_p - - + + def get_ph_levs(values, level): '''Return the presure at a given level and the next''' a_coef = values['pv'][0:values['nlevels'] + 1] @@ -197,8 +195,8 @@ def get_ph_levs(values, level): ph_lev = a_coef[level - 1] + (b_coef[level - 1] * values['sp']) ph_levplusone = a_coef[level] + (b_coef[level] * values['sp']) return ph_lev, ph_levplusone - - + + def compute_z_level(idx, lev, values, z_h): '''Compute z at half & full level for the given level, based on t/q/sp''' # select the levelist and retrieve the vaules of t and q @@ -217,34 +215,34 @@ def compute_z_level(idx, lev, values, z_h): raise MissingLevelError('Q at level {} missing from input'.format(lev)) q_level = codes_get_values(gid) codes_release(gid) - + # compute moist temperature t_level = t_level * (1. + 0.609133 * q_level) - + # compute the pressures (on half-levels) ph_lev, ph_levplusone = get_ph_levs(values, lev) - + if lev == 1: dlog_p = np.log(ph_levplusone / 0.1) alpha = np.log(2) else: dlog_p = np.log(ph_levplusone / ph_lev) alpha = 1. - ((ph_lev / (ph_levplusone - ph_lev)) * dlog_p) - + t_level = t_level * R_D - + # z_f is the geopotential of this full level # integrate from previous (lower) half-level z_h to the # full level z_f = z_h + (t_level * alpha) - + # z_h is the geopotential of 'half-levels' # integrate z_h to next half level z_h = z_h + (t_level * dlog_p) - + return z_h, z_f - - + + def production_step(idx, step, values, fout): '''Compute z at half & full level for the given level, based on t/q/sp''' # We want to integrate up into the atmosphere, starting at the @@ -253,10 +251,10 @@ def production_step(idx, step, values, fout): # See the IFS documentation, part III # For speed and file I/O, we perform the computations with # numpy vectors instead of fieldsets. - + z_h = values['z'] codes_set(values['sample'], 'step', int(step)) - + for lev in sorted(values['levelist'], reverse=True): try: z_h, z_f = compute_z_level(idx, lev, values, z_h) @@ -267,17 +265,18 @@ def production_step(idx, step, values, fout): except MissingLevelError as e: print('%s [WARN] %s' % (sys.argv[0], e), file=sys.stderr) - - + + class WrongStepError(Exception): ''' Exception capturing wrong step''' pass - - + + class MissingLevelError(Exception): ''' Exception capturing missing levels in input''' pass - - + + if __name__ == '__main__': main() + diff --git a/bin/convert.sh b/bin/convert.sh index e46acec..45096af 100755 --- a/bin/convert.sh +++ b/bin/convert.sh @@ -3,121 +3,130 @@ #Author(s): Joern Ungermann, May Baer export mlfile=mss/${BASE}.ml.nc +export mlfile_u=mss/${BASE}.ml_u.nc +export mlfile_v=mss/${BASE}.ml_v.nc +export mlfile_uv=mss/${BASE}.ml_uv.nc +export mlfile_tq=mss/${BASE}.ml_tq.nc export plfile=mss/${BASE}.pl.nc +export pvfile=mss/${BASE}.pv.nc export alfile=mss/${BASE}.al.nc export tlfile=mss/${BASE}.tl.nc -export pvfile=mss/${BASE}.pv.nc export sfcfile=mss/${BASE}.sfc.nc -export tmpfile=mss/.${BASE}.tmp +export sfcfile_ancillary=mss/${BASE}.sfc_ancillary.nc +export tmpfile=mss/${BASE}.tmp -if [ ! -f grib/${BASE}.ml.grib ]; then - echo FATAL `date` Model level file is missing - exit -fi -if [ ! -f grib/${BASE}.ml2.grib ]; then - echo FATAL `date` Model2 level file is missing +if [ ! -f grib/${BASE}.ml_tq.grib ]; then + echo FATAL `date` "Model level t,q file (grib) is missing" exit fi -if [ ! -f grib/${BASE}.sfc.grib ]; then - echo FATAL `date` Surface file is missing +if [ ! -f grib/${BASE}.ml_lnsp_z.grib ]; then + echo FATAL `date` "Model level lnsp, z file (grib) is missing" exit fi -if [ ! -f grib/${BASE}.pv.grib ]; then - echo FATAL `date` Potential Vorticity level file is missing +if [ ! -f $sfcfile ]; then + echo FATAL `date` "Surface file(nc) is missing" exit fi echo adding gph -ls -$PYTHON $BINDIR/compute_geopotential_on_ml.py grib/${BASE}.ml.grib grib/${BASE}.ml2.grib -o ${tmpfile} -ls +$PYTHON $BINDIR/compute_geopotential_on_ml.py grib/${BASE}.ml_tq.grib grib/${BASE}.ml_lnsp_z.grib -o ${tmpfile} cdo -f nc4c -t ecmwf copy ${tmpfile} ${tmpfile}_z ncatted -O \ -a standard_name,z,o,c,geopotential_height \ ${tmpfile}_z rm ${tmpfile} +#merge ml-grib files and convert with cdo to netcdf (sorting ascending for time step requried in grib_copy otherwise cdo does not recognise correct times for additional vars) +echo "merge ml-grib files (check this section if you miss additonal parameters in ml files)" +if [[ -f grib/${BASE}.ml1.grib && -f grib/${BASE}.ml2.grib && -f grib/${BASE}.ml3.grib && -f grib/${BASE}.ml4.grib && -f grib/${BASE}.ml5.grib ]]; then + grib_copy -B'step:i asc' grib/${BASE}.ml_tq.grib grib/${BASE}.ml1.grib grib/${BASE}.ml2.grib grib/${BASE}.ml3.grib grib/${BASE}.ml4.grib grib/${BASE}.ml5.grib grib/${BASE}.ml.grib +elif [[ -f grib/${BASE}.ml1.grib && -f grib/${BASE}.ml2.grib && -f grib/${BASE}.ml3.grib && -f grib/${BASE}.ml4.grib ]]; then + grib_copy -B'step:i asc' grib/${BASE}.ml_tq.grib grib/${BASE}.ml1.grib grib/${BASE}.ml2.grib grib/${BASE}.ml3.grib grib/${BASE}.ml4.grib grib/${BASE}.ml.grib +elif [[ -f grib/${BASE}.ml1.grib && -f grib/${BASE}.ml2.grib && -f grib/${BASE}.ml3.grib ]]; then + grib_copy -B'step:i asc' grib/${BASE}.ml_tq.grib grib/${BASE}.ml1.grib grib/${BASE}.ml2.grib grib/${BASE}.ml3.grib grib/${BASE}.ml.grib +else + grib_copy -B'step:i asc' grib/${BASE}.ml_tq.grib grib/${BASE}.ml.grib +fi echo copy ml cdo -f nc4c -t ecmwf copy grib/${BASE}.ml.grib $mlfile ncatted -O \ -a standard_name,cc,o,c,cloud_area_fraction_in_atmosphere_layer \ - -a standard_name,o3,o,c,mass_fraction_of_ozone_in_air \ -a standard_name,ciwc,o,c,specific_cloud_ice_water_content \ -a standard_name,clwc,o,c,specific_cloud_liquid_water_content \ -a units,cc,o,c,dimensionless \ -a units,time,o,c,"${time_units}" \ $mlfile -cdo merge ${tmpfile}_z ${mlfile} ${tmpfile} +if [[ x$MODEL_PARAMETERS4 == x"O3" ]]; then + ncatted -O \ + -a standard_name,o3,o,c,mass_fraction_of_ozone_in_air \ + $mlfile +fi +echo "merge ml-file and geopot. height" +cdo merge ${mlfile} ${tmpfile}_z ${tmpfile} mv ${tmpfile} $mlfile -rm ${tmpfile}_z -echo converting sfc -cdo -f nc4c -t ecmwf copy grib/${BASE}.sfc.grib $sfcfile -ncatted -O \ - -a standard_name,BLH,o,c,atmosphere_boundary_layer_thickness \ - -a standard_name,CI,o,c,sea_ice_area_fraction \ - -a standard_name,HCC,o,c,high_cloud_area_fraction \ - -a standard_name,LCC,o,c,low_cloud_area_fraction \ - -a standard_name,LSM,o,c,land_binary_mask \ - -a standard_name,MCC,o,c,medium_cloud_area_fraction \ - -a standard_name,MSL,o,c,air_pressure_at_sea_level \ - -a standard_name,SSTK,o,c,sea_surface_temperature \ - -a standard_name,U10M,o,c,surface_eastward_wind \ - -a standard_name,V10M,o,c,surface_northward_wind \ - -a units,HCC,o,c,dimensionless \ - -a units,LCC,o,c,dimensionless \ - -a units,MCC,o,c,dimensionless \ - $sfcfile -# extract lnsp and remove lev dimension. -grib_copy -w shortName=lnsp grib/${BASE}.ml2.grib ${tmpfile} -cdo -f nc4c -t ecmwf copy ${tmpfile} ${tmpfile}2 -ncwa -O -alev ${tmpfile}2 ${tmpfile} -ncks -7 -C -O -x -vhyai,hyam,hybi,hybm,lev ${tmpfile} ${tmpfile}2 -rm ${tmpfile} -cdo merge ${sfcfile} ${tmpfile}2 ${tmpfile} -mv ${tmpfile} $sfcfile -rm ${tmpfile}2 +echo fix up ml for wms server but keep original for later calculations +ncks -O -7 -C -x -v hyai,hyam,hybi,hybm $MODEL_REDUCTION $mlfile $tmpfile +ncatted -O -a standard_name,lev,o,c,atmosphere_hybrid_sigma_pressure_coordinate $tmpfile -echo add ancillary -$PYTHON $BINDIR/add_ancillary.py $sfcfile $mlfile $ANCILLARY +#copy files +if ecaccess-association-list | grep -q $ECTRANS_ID; then + echo "Compress ml file before transfering" + date + nccopy -u -d5 $tmpfile ${tmpfile}c + date + echo "Transfering ml (without u/v and ancillary) files to "$ECTRANS_ID + ectrans -remote $ECTRANS_ID -source ${tmpfile}c -target $mlfile -overwrite -remove + rm ${tmpfile}c +fi -echo fix up ml -ncks -O -7 -C -x -v hyai,hyam,hybi,hybm $MODEL_REDUCTION $mlfile $mlfile -ncatted -O -a standard_name,lev,o,c,atmosphere_hybrid_sigma_pressure_coordinate $mlfile +echo "merge ml file and uv file for later calculations" +cdo -O merge ${mlfile} ${mlfile_u} ${mlfile_v} ${mlfile_uv} -echo converting pv -cdo -f nc4c -t ecmwf copy grib/${BASE}.pv.grib $pvfile -ncatted -O \ - -a standard_name,lev,o,c,atmosphere_ertel_potential_vorticity_coordinate \ - -a standard_name,Z,o,c,geopotential_height \ - -a standard_name,O3,o,c,mass_fraction_of_ozone_in_air \ - -a standard_name,PRES,o,c,air_pressure \ - -a standard_name,PT,o,c,air_potential_temperature \ - -a standard_name,Q,o,c,specific_humidity \ - -a standard_name,U,o,c,eastward_wind \ - -a standard_name,V,o,c,northward_wind \ - -a units,lev,o,c,"uK m^2 kg^-1 s^-1" \ - -a units,time,o,c,"${time_units}" \ - $pvfile -ncap2 -O -s "lev/=1000" $pvfile $pvfile -ncks -O -7 -L 7 $pvfile $pvfile +echo add ancillary +#ancillary data are saved to ${sfcfile}ancillary and ${mlfile_uv}ancillary +$PYTHON $BINDIR/add_ancillary.py $sfcfile $mlfile_uv $ANCILLARY +if [ -f ${sfcfile}ancillary ]; then + mv ${sfcfile}ancillary ${sfcfile_ancillary} +fi -if [[ x$PRES_LEVELS != x"" ]]; then +echo "fix up ml complete tmp file (i.e. mfile_uv) for press-, th-, gph-level calculations" +ncks -O -7 -C -x -v hyai,hyam,hybi,hybm $MODEL_REDUCTION ${mlfile_uv} ${mlfile_uv} +ncatted -O -a standard_name,lev,o,c,atmosphere_hybrid_sigma_pressure_coordinate ${mlfile_uv} + +if [[ x$PRES_LEVELS != x"" ]] && [[ x$PRES_FROM_MARS != x"yes" ]]; then echo "Creating pressure level file..." - $PYTHON $BINDIR/interpolate_model.py $mlfile $plfile pres hPa $PRES_LEVELS + $PYTHON $BINDIR/interpolate_model.py ${mlfile_uv} $plfile pres hPa $PRES_LEVELS ncatted -O -a standard_name,pres,o,c,atmosphere_pressure_coordinate $plfile fi if [[ x$THETA_LEVELS != x"" ]]; then echo "Creating potential temperature level file..." - $PYTHON $BINDIR/interpolate_model.py $mlfile $tlfile pt K $THETA_LEVELS + #requires pt from ${mlfile_uv}ancillary + export ptfile=mss/${BASE}.pt_tmp.nc + cdo select,name=pt ${mlfile_uv}ancillary ${ptfile} + cdo -O merge ${mlfile_uv} ${ptfile} ${mlfile_uv}tmp + $PYTHON $BINDIR/interpolate_model.py ${mlfile_uv}tmp $tlfile pt K $THETA_LEVELS ncatted -O -a standard_name,pt,o,c,atmosphere_potential_temperature_coordinate $tlfile + rm ${mlfile_uv}tmp + rm ${ptfile} fi if [[ x$GPH_LEVELS != x"" ]]; then echo "Creating altitude level file..." - $PYTHON $BINDIR/interpolate_model.py $mlfile $alfile z m $GPH_LEVELS + $PYTHON $BINDIR/interpolate_model.py ${mlfile_uv} $alfile z m $GPH_LEVELS ncatted -O -a standard_name,z,o,c,atmosphere_altitude_coordinate $alfile fi +#overwrite mlfile_uv with mlfile that contains u,v and ancillary +mv ${mlfile_uv}ancillary ${mlfile_uv} + +echo "fix up ml u,v,ancillary file" +ncks -O -7 -C -x -v hyai,hyam,hybi,hybm $MODEL_REDUCTION $mlfile_uv $mlfile_uv +ncatted -O -a standard_name,lev,o,c,atmosphere_hybrid_sigma_pressure_coordinate $mlfile_uv +echo "Compress ml u,v,ancillary" +date +nccopy -u -d5 $mlfile_uv ${mlfile_uv}c +mv ${mlfile_uv}c $mlfile_uv +date echo "Done, your netcdf files are located at $(pwd)/mss" diff --git a/bin/download_ecmwf.sh b/bin/download_ecmwf.sh deleted file mode 100755 index aab4f1d..0000000 --- a/bin/download_ecmwf.sh +++ /dev/null @@ -1,80 +0,0 @@ -#!/bin/bash -#Copyright (C) 2021 by Forschungszentrum Juelich GmbH -#Author(s): May Baer - - -if [ ! -f grib/${BASE}.ml.grib ]; then - mars < "$DATE" ]] +then + export init_date="${DATE}T${TIME}" +fi +export time_units="hours since ${init_date}" + +# Retrieve pl files from mars +if [ x$PRES_FROM_MARS == x"yes" ]; then + echo "Run mars request for data on pl..." + date + . $BINDIR/download_ecmwf_pl.sh & +fi + +# Retrieve ml, sfc, pv and pt files +echo "Run mars request for data ml lnsp/Z..." +date +. $BINDIR/download_ecmwf_ml_lnsp_z.sh & +echo "Run mars request for data on 2pv..." +date +. $BINDIR/download_ecmwf_pv.sh & +echo "Run mars request for data ml T/Q..." +date +. $BINDIR/download_ecmwf_ml_tq.sh & +echo "Run mars request for data ml u..." +date +. $BINDIR/download_ecmwf_ml_u.sh & +echo "Run mars request for data ml v..." +date +. $BINDIR/download_ecmwf_ml_v.sh & +echo "Run mars request for data sfc..." +date +. $BINDIR/download_ecmwf_sfc.sh & + +# Retrieve ml additional parameters + +if [[ x$MODEL_PARAMETERS1 != x"" ]]; then + echo "Run mars request for data ml parameters 1..." + date + . $BINDIR/download_ecmwf_ml_para1.sh & +fi +if [[ x$MODEL_PARAMETERS2 != x"" ]]; then + echo "Run mars request for data ml parameters 2..." + date + . $BINDIR/download_ecmwf_ml_para2.sh & +fi +if [[ x$MODEL_PARAMETERS3 != x"" ]]; then + echo "Run mars request for data ml parameters 3..." + date + . $BINDIR/download_ecmwf_ml_para3.sh & +fi +if [[ x$MODEL_PARAMETERS4 != x"" ]]; then + echo "Run mars request for data ml parameters 4..." + date + . $BINDIR/download_ecmwf_ml_para4.sh & +fi +if [[ x$MODEL_PARAMETERS5 != x"" ]]; then + echo "Run mars request for data ml parameters 5..." + date + . $BINDIR/download_ecmwf_ml_para5.sh & +fi + +# Convert grib to netCDF, set init time +wait +echo "Run convert.sh" +. $BINDIR/convert.sh + +if ecaccess-association-list | grep -q $ECTRANS_ID; then + echo "Transfering files to "$ECTRANS_ID + ectrans -remote $ECTRANS_ID -source $mlfile_uv -target $mlfile_uv -overwrite -remove + if [ -f $sfcfile_ancillary ]; then + ectrans -remote $ECTRANS_ID -source $sfcfile_ancillary -target $sfcfile_ancillary -overwrite -remove + fi + if [ -f $tlfile ]; then + ectrans -remote $ECTRANS_ID -source $tlfile -target $tlfile -overwrite -remove + fi + if [ -f $plfile ] && [ x$PRES_FROM_MARS != x"yes" ]; then + ectrans -remote $ECTRANS_ID -source $plfile -target $plfile -overwrite -remove + fi + if [ -f $alfile ]; then + ectrans -remote $ECTRANS_ID -source $alfile -target $alfile -overwrite -remove + fi +fi +#---------------------comet meteograms---------------- +if [[ x$COMET_MET == x"yes" ]] +then + echo "Run extract_comet_ncks.sh" + . $MAINDIR/extract_comet_ncks.sh +fi + +if [[ x$CLEANUP == x"yes" ]] +then + # clean up locally + for f in $mlfile $tlfile $plfile $pvfile $alfile $sfcfile $sfcfile_ancillary $mlfile_u $mlfile_v $mlfile_uv mss/${BASE}.tmp_z mss/${BASE}.tmp grib/${BASE}*.grib; + do + if [ -f $f ]; + then + rm $f + fi + done +fi diff --git a/bin/get_ecmwf_072.sh b/bin/get_ecmwf_072.sh new file mode 100755 index 0000000..177c227 --- /dev/null +++ b/bin/get_ecmwf_072.sh @@ -0,0 +1,182 @@ +#!/bin/bash + +#Copyright (C) 2021 by Forschungszentrum Juelich GmbH +#Author(s): Joern Ungermann, May Baer, Jens-Uwe Grooss + +#SBATCH --qos=et +#SBATCH --job-name=get_ecmwf +#SBATCH --output=/ec/res4/scratch/ddp/mss_wms/files_for_mss/batch_out/get_ecmwf.%j.out +#SBATCH --error=/ec/res4/scratch/ddp/mss_wms/files_for_mss/batch_out/get_ecmwf.%j.out +#SBATCH --chdir=/ec/res4/scratch/ddp/mss_wms/files_for_mss/batch_out/ + + +# This script works with the cdo version installed on ECACCESS and +# in an mambaforge environment ncenv that includes cartopy (0.20.1), metpy (1.1.0) +# nco (5.0.4), netcdf4 (1.5.8), scipy (1.7.3) and xarray (0.20.2) + +# Define model domain sector, resolution and id name for ectrans in settings.config + +# defines for performance measurements +N=`date +%s%N` +export PS4='+[$(((`date +%s%N`-$N)/1000000))ms][${BASH_SOURCE}:${LINENO}]: ${FUNCNAME[0]:+${FUNCNAME[0]}(): }' +# enable line below for debugging and performance timing +# set -x + +export MAINDIR=$HOME/mss_wms/data_retrieval_for_ipa_mss_wms_atos/data-retrieval/ +export BINDIR=$MAINDIR/bin + +. ${MAINDIR}/settings.default + +if [ ! -f ${MAINDIR}/settings_072.config ]; then + echo Please copy the settings.example to settings_072.config and configure your setup! + exit 1 +fi + +. ${MAINDIR}/settings_072.config + +# get forecast date +# If used as a shell script that is run on a event trigger, +# the $MSJ* environment variables contain the corresponding time info. +# This can be done from the web interface or e.g. by the command +# ecaccess-job-submit -ni fc00h072 get_ecmwf.sh +# If these variables are empty, forecast times are defined in settings.config + + +if [[ $MSJ_YEAR != "" ]] +then + echo Date: $MSJ_YEAR $MSJ_MONTH $MSJ_DAY + echo BASETIME, STEP: $MSJ_BASETIME $MSJ_STEP + + export DAY=$MSJ_DAY + export MONTH=$MSJ_MONTH + export YEAR=$MSJ_YEAR + export HH=$MSJ_BASETIME + export FCSTEP=${MSJ_STEP: -3} + + case $FCSTEP in + 036) + export STEP=0/to/36/by/3 + ;; + 072) + export STEP=39/to/72/by/3 + ;; + 144) + export STEP=78/to/144/by/6 + ;; + *) + esac +fi + + +cd $WORKDIR +mkdir -p mss +mkdir -p grib + +# Set path, filenames and variables used later in the script +export DATE=${YEAR}-${MONTH}-${DAY} +export YMD=${YEAR}${MONTH}${DAY} +export TIME=${HH}:00:00 +export BASE=${DATASET}.${YMD}T${HH}.${FCSTEP} +export init_date=$(date +%Y-%m-%dT%H:%M:%S) +echo $BASE + +if [[ "$init_date" > "$DATE" ]] +then + export init_date="${DATE}T${TIME}" +fi +export time_units="hours since ${init_date}" + +# Retrieve pl files from mars +if [ x$PRES_FROM_MARS == x"yes" ]; then + echo "Run mars request for data on pl..." + date + . $BINDIR/download_ecmwf_pl.sh & +fi +# Retrieve ml, sfc, pv and pt files +echo "Run mars request for data ml lnsp/Z..." +date +. $BINDIR/download_ecmwf_ml_lnsp_z.sh & +echo "Run mars request for data on 2pv..." +date +. $BINDIR/download_ecmwf_pv.sh & +echo "Run mars request for data ml T/Q..." +date +. $BINDIR/download_ecmwf_ml_tq.sh & +echo "Run mars request for data ml u..." +date +. $BINDIR/download_ecmwf_ml_u.sh & +echo "Run mars request for data ml v..." +date +. $BINDIR/download_ecmwf_ml_v.sh & +echo "Run mars request for data sfc..." +date +. $BINDIR/download_ecmwf_sfc.sh & + +# Retrieve ml additional parameters + +if [[ x$MODEL_PARAMETERS1 != x"" ]]; then + echo "Run mars request for data ml parameters 1..." + date + . $BINDIR/download_ecmwf_ml_para1.sh & +fi +if [[ x$MODEL_PARAMETERS2 != x"" ]]; then + echo "Run mars request for data ml parameters 2..." + date + . $BINDIR/download_ecmwf_ml_para2.sh & +fi +if [[ x$MODEL_PARAMETERS3 != x"" ]]; then + echo "Run mars request for data ml parameters 3..." + date + . $BINDIR/download_ecmwf_ml_para3.sh & +fi +if [[ x$MODEL_PARAMETERS4 != x"" ]]; then + echo "Run mars request for data ml parameters 4..." + date + . $BINDIR/download_ecmwf_ml_para4.sh & +fi +if [[ x$MODEL_PARAMETERS5 != x"" ]]; then + echo "Run mars request for data ml parameters 5..." + date + . $BINDIR/download_ecmwf_ml_para5.sh & +fi + + +# Convert grib to netCDF, set init time +wait +echo "Run convert.sh" +. $BINDIR/convert.sh + +if ecaccess-association-list | grep -q $ECTRANS_ID; then + echo "Transfering files to "$ECTRANS_ID + ectrans -remote $ECTRANS_ID -source $mlfile_uv -target $mlfile_uv -overwrite -remove + if [ -f $sfcfile_ancillary ]; then + ectrans -remote $ECTRANS_ID -source $sfcfile_ancillary -target $sfcfile_ancillary -overwrite -remove + fi + if [ -f $tlfile ]; then + ectrans -remote $ECTRANS_ID -source $tlfile -target $tlfile -overwrite -remove + fi + if [ -f $plfile ] && [ x$PRES_FROM_MARS != x"yes" ]; then + ectrans -remote $ECTRANS_ID -source $plfile -target $plfile -overwrite -remove + fi + if [ -f $alfile ]; then + ectrans -remote $ECTRANS_ID -source $alfile -target $alfile -overwrite -remove + fi +fi +#---------------------comet meteograms---------------- +if [[ x$COMET_MET == x"yes" ]] +then + echo "Run extract_comet_ncks.sh" + . $MAINDIR/extract_comet_ncks.sh +fi + +if [[ x$CLEANUP == x"yes" ]] +then + # clean up locally + for f in $mlfile $tlfile $plfile $pvfile $alfile $sfcfile $sfcfile_ancillary $mlfile_u $mlfile_v $mlfile_uv mss/${BASE}.tmp_z mss/${BASE}.tmp grib/${BASE}*.grib; + do + if [ -f $f ]; + then + rm $f + fi + done +fi diff --git a/bin/get_ecmwf_144.sh b/bin/get_ecmwf_144.sh new file mode 100755 index 0000000..509eca8 --- /dev/null +++ b/bin/get_ecmwf_144.sh @@ -0,0 +1,197 @@ +#!/bin/bash + +#Copyright (C) 2021 by Forschungszentrum Juelich GmbH +#Author(s): Joern Ungermann, May Baer, Jens-Uwe Grooss + +#SBATCH --qos=et +#SBATCH --job-name=get_ecmwf +#SBATCH --output=/ec/res4/scratch/ddp/mss_wms/files_for_mss/batch_out/get_ecmwf.%j.out +#SBATCH --error=/ec/res4/scratch/ddp/mss_wms/files_for_mss/batch_out/get_ecmwf.%j.out +#SBATCH --chdir=/ec/res4/scratch/ddp/mss_wms/files_for_mss/batch_out/ + + +# This script works with the cdo version installed on ECACCESS and +# in an mambaforge environment ncenv that includes cartopy (0.20.1), metpy (1.1.0) +# nco (5.0.4), netcdf4 (1.5.8), scipy (1.7.3) and xarray (0.20.2) + +# Define model domain sector, resolution and id name for ectrans in settings.config + +# defines for performance measurements +N=`date +%s%N` +export PS4='+[$(((`date +%s%N`-$N)/1000000))ms][${BASH_SOURCE}:${LINENO}]: ${FUNCNAME[0]:+${FUNCNAME[0]}(): }' +# enable line below for debugging and performance timing +# set -x + +export MAINDIR=$HOME/mss_wms/data_retrieval_for_ipa_mss_wms_atos/data-retrieval/ +export BINDIR=$MAINDIR/bin + +. ${MAINDIR}/settings.default + +if [ ! -f ${MAINDIR}/settings_144.config ]; then + echo Please copy the settings.example to settings_144.config and configure your setup! + exit 1 +fi + +. ${MAINDIR}/settings_144.config + +# get forecast date +# If used as a shell script that is run on a event trigger, +# the $MSJ* environment variables contain the corresponding time info. +# This can be done from the web interface or e.g. by the command +# ecaccess-job-submit -ni fc00h144 get_ecmwf.sh +# If these variables are empty, forecast times are defined in settings.config + + +if [[ $MSJ_YEAR != "" ]] +then + echo Date: $MSJ_YEAR $MSJ_MONTH $MSJ_DAY + echo BASETIME, STEP: $MSJ_BASETIME $MSJ_STEP + + export DAY=$MSJ_DAY + export MONTH=$MSJ_MONTH + export YEAR=$MSJ_YEAR + export HH=$MSJ_BASETIME + export FCSTEP=${MSJ_STEP: -3} + + case $FCSTEP in + 036) + export STEP=0/to/36/by/3 + ;; + 072) + export STEP=39/to/72/by/3 + ;; + 144) + export STEP=78/to/144/by/6 + ;; + *) + esac +fi + + +cd $WORKDIR +mkdir -p mss +mkdir -p grib + +# Set path, filenames and variables used later in the script +export DATE=${YEAR}-${MONTH}-${DAY} +export YMD=${YEAR}${MONTH}${DAY} +export TIME=${HH}:00:00 +export BASE=${DATASET}.${YMD}T${HH}.${FCSTEP} +export init_date=$(date +%Y-%m-%dT%H:%M:%S) +echo $BASE + +if [[ "$init_date" > "$DATE" ]] +then + export init_date="${DATE}T${TIME}" +fi +export time_units="hours since ${init_date}" + +# Retrieve pl files from mars +if [ x$PRES_FROM_MARS == x"yes" ]; then + echo "Run mars request for data on pl..." + date + . $BINDIR/download_ecmwf_pl.sh & +fi +# Retrieve ml, sfc, pv and pt files +echo "Run mars request for data ml lnsp/Z..." +date +. $BINDIR/download_ecmwf_ml_lnsp_z.sh & +echo "Run mars request for data on 2pv..." +date +. $BINDIR/download_ecmwf_pv.sh & +echo "Run mars request for data ml T/Q..." +date +. $BINDIR/download_ecmwf_ml_tq.sh & +echo "Run mars request for data ml u..." +date +. $BINDIR/download_ecmwf_ml_u.sh & +echo "Run mars request for data ml v..." +date +. $BINDIR/download_ecmwf_ml_v.sh & +echo "Run mars request for data sfc..." +date +. $BINDIR/download_ecmwf_sfc.sh & + +# Retrieve ml additional parameters + +if [[ x$MODEL_PARAMETERS1 != x"" ]]; then + echo "Run mars request for data ml parameters 1..." + date + . $BINDIR/download_ecmwf_ml_para1.sh & +fi +if [[ x$MODEL_PARAMETERS2 != x"" ]]; then + echo "Run mars request for data ml parameters 2..." + date + . $BINDIR/download_ecmwf_ml_para2.sh & +fi +if [[ x$MODEL_PARAMETERS3 != x"" ]]; then + echo "Run mars request for data ml parameters 3..." + date + . $BINDIR/download_ecmwf_ml_para3.sh & +fi +if [[ x$MODEL_PARAMETERS4 != x"" ]]; then + echo "Run mars request for data ml parameters 4..." + date + . $BINDIR/download_ecmwf_ml_para4.sh & +fi +if [[ x$MODEL_PARAMETERS5 != x"" ]]; then + echo "Run mars request for data ml parameters 5..." + date + . $BINDIR/download_ecmwf_ml_para5.sh & +fi + + +# Convert grib to netCDF, set init time +wait +echo "Run convert.sh" +. $BINDIR/convert.sh + +if ecaccess-association-list | grep -q $ECTRANS_ID; then + echo "Transfering files to "$ECTRANS_ID + ectrans -remote $ECTRANS_ID -source $mlfile_uv -target $mlfile_uv -overwrite -remove + if [ -f $sfcfile_ancillary ]; then + ectrans -remote $ECTRANS_ID -source $sfcfile_ancillary -target $sfcfile_ancillary -overwrite -remove + fi + if [ -f $tlfile ]; then + ectrans -remote $ECTRANS_ID -source $tlfile -target $tlfile -overwrite -remove + fi + if [ -f $plfile ] && [ x$PRES_FROM_MARS != x"yes" ]; then + ectrans -remote $ECTRANS_ID -source $plfile -target $plfile -overwrite -remove + fi + if [ -f $alfile ]; then + ectrans -remote $ECTRANS_ID -source $alfile -target $alfile -overwrite -remove + fi +fi +#---------------------comet meteograms---------------- +if [[ x$COMET_MET == x"yes" ]] +then + echo "Run extract_comet_ncks.sh" + . $MAINDIR/extract_comet_ncks.sh + sleep 4m + fpath="/home/ms/spdescan/ddp/scratch/mss_wms/files_for_mss/mss/sites" + ppath="/home/ms/spdescan/ddp/mss_wms/data_retrieval_for_ipa_mss_wms_comet2" + a="$(find ${fpath} -name "*144.sfc.nc" | head -1)" + date + echo Run plot_comet_sfc.py + python3.8 $ppath/plot_comet_sfc.py + #for init date + initdate=$(awk -F'LL025.|.144' '{print $2}' <<< "$a") + initdate=${initdate/T/}'00' + echo Initdate ${initdate} + chmod a+r $fpath/*.png + scp -p $fpath/*.png gisi_so@lx001.pa.op.dlr.de:websites/missionsupport/classic/forecasts/forecasts2/${initdate}/ + mv $fpath/*.png $fpath/files_plotted/ + rm $fpath/*.nc +fi + +if [[ x$CLEANUP == x"yes" ]] +then + # clean up locally + for f in $mlfile $tlfile $plfile $pvfile $alfile $sfcfile $sfcfile_ancillary $mlfile_u $mlfile_v $mlfile_uv mss/${BASE}.tmp_z mss/${BASE}.tmp grib/${BASE}*.grib; + do + if [ -f $f ]; + then + rm $f + fi + done +fi diff --git a/settings.example b/settings.example index adeda84..84b2f24 100644 --- a/settings.example +++ b/settings.example @@ -1,35 +1,96 @@ +module load netcdf4 +module load nco +module load python3 +module load ecaccess +module load ecmwf-toolbox +module load cdo/new +#PREF=${CONDA_PREFIX##*/} +#if [[ $PREF != mambaforge && $PREF != ncenv ]] +#then +# PATH=$HOME/mambaforge/bin/:$PATH +#fi +#. $HOME/mambaforge/etc/profile.d/conda.sh +#conda activate ncenv + # Do not leave any whitespaces # variable=value -module load cdo -module load nco +export PYTHON=python3 +#export MODEL_REDUCTION="-d lev,0,0 -d lev,16,28,4 -d lev,32,124,2" +export MODEL_REDUCTION= +export CLEANUP=yes +#export CLEANUP=no -PREF=${CONDA_PREFIX##*/} -if [[ $PREF != mambaforge && $PREF != ncenv ]] -then - PATH=$HOME/mambaforge/bin/:$PATH -fi -. $HOME/mambaforge/etc/profile.d/conda.sh -conda activate ncenv +# definition of parameters and levels to read +export MODEL_LEVELS=61/to/137 +#export MODEL_LEVELS=1/to/137 +export PV_LEVELS=2000 +#export THETA_LEVELS=330/350/370/395/475 +export THETA_LEVELS= -# write data to the $SCRATCH directory with more available disk quota -export WORKDIR=$SCRATCH +# mars request seems most efficient in max. pairs of two (wind components take twice as long as other vars, use single request) +# obligatorily retrieved: LNSP/Z/U/V/T/Q +# additional parameters (adjust standardnames/units in convert.sh for nc-files if you change) +export MODEL_PARAMETERS1=W +export MODEL_PARAMETERS2=CC +export MODEL_PARAMETERS3=CLWC/CIWC +#export MODEL_PARAMETERS4=O3 +#export MODEL_PARAMETERS5=D -export PYTHON=python3 -export CLEANUP=no +#export SFC_PARAMETERS=151.128/165.128/166.128/186.128/187.128/188.128 +#export SFC_PARAMETERS=MSL/10U/10V/LCC/MCC/HCC/Z/SP +export SFC_PARAMETERS=MSL/10U/10V/LCC/MCC/HCC/BLH/CI/LSM/SSTK +#param_sfc=Z/LSM/LCC/MCC/HCC/MSL/SP/SSTK/TCC/TCW/TCWV/10U/10V/2D/2T/SKT/ASN/CI/BLH/LSP/CP/TP/CAPE/E/SD/SF/SSHF/EWSS/NSSS/IEWS/INSS/ISHF + +#export PV_PARAMETERS=3/54/129/131/132/133/203 +export PV_PARAMETERS=Z/PRES/PT/U/V/Q/O3 +#export THETA_PARAMETERS=54/60/131/132/133/155/203 +export THETA_PARAMETERS=PRESS/PV/U/V/Q/D/O3 -# configure area, grid, and transfer -export AREA=75/-15/30/42.5 -export GRID=1.0/1.0 -export ECTRANS_ID=MSS-Data-Transfer +#export PRES_LEVELS=850/500/400/300/200/150/120/100/80/65/50/40/30/20/10/5/1 +#export GPH_LEVELS=0/250/500/750/1000/1250/1500/1750/2000/2250/2500/2750/3000/3250/3500/3750/4000/4250/4500/4750/5000/5250/5500/5750/6000/6250/6500/6750/7000/7250/7500/7750/8000/8250/8500/8750/9000/9250/9500/9750/10000/10250/10500/10750/11000/11250/11500/11750/12000/12250/12500/12750/13000/13250/13500/13750/14000/14250/14500/14750/15000/15250/15500/15750/16000/16250/16500/16750/17000/17250/17500/17750/18000/18250/18500/18750/19000/19250/19500/19750/20000/20500/21000/21500/22000/22500/23000/23500/24000/24500/25000/25500/26000/26500/27000/27500/28000/28500/29000/29500/30000/30500/31000/31500/32000/32500/33000/33500/34000/34500/35000/35500/36000/36500/37000/37500/38000/38500/39000/39500/40000/41000/42000/43000/44000/45000/46000/47000/48000/49000/50000/51000/52000/53000/54000/55000/56000/57000/58000/59000/60000 + +export PRES_FROM_MARS=yes +export PRES_PARAMETERS=U/V/W/T/D/Q/Z/O3/PV +export PRES_LEVELS=925/850/700/500/400/300/250/150 +export GPH_LEVELS= +#export GPH_LEVELS=0/250/500/750/1000/1250/1500/1750/2000/2250/2500/2750/3000/3250/3500/3750/4000/4250/4500/4750/5000/5250/5500/5750/6000/6250/6500/6750/7000/7250/7500/7750/8000/8250/8500/8750/9000/9250/9500/9750/10000/10250/10500/10750/11000/11250/11500/11750/12000/12250/12500/12750/13000/13250/13500/13750/14000/14250/14500/14750/15000 + +#additional parameters (for add_ancillery.sh) +#export ANCILLARY="--pv --theta --tropopause --n2 --pressure" +export ANCILLARY="--pv --pressure --theta" + +#data transfer and mars request settings +export ECTRANS_ID= +export AREA=60.0/-140.0/30.0/-100.0 # sub-area of data to be extracted: N/W/S/E +#1 deg grid resolution +#export FILENAME=ecmwf_forecasts.US_LL100 +#export GRID=1.0/1.0 +#export TRUNCATION=none #options: none, auto, TXXX (e.g. T21) +#export RESOL=av #otpions: av (archived), auto, TXXX(e.g. T21); truncation shall replace resol but resol is still needed +#0.15 deg grid resolution: uses too much memory in PV calc if area is too lage (gets killed) +export DATASET=ecmwf_forecast.US_LL015 +export GRID=0.15/0.15 +#0.25 deg grid resolution +#export DATASET=ecmwf_forecast.EUR_LL025 +#export GRID=0.25/0.25 +export TRUNCATION=auto #options: none, auto, TXXX (e.g. T21) +export RESOL=auto #otpions: av (archived), auto, TXXX(e.g. T21); truncation shall replace resol but resol is still needed + +# write data to the $SCRATCH directory with more available disk quota +export WORKDIR=$SCRATCH/mss_wms/files_for_mss/ +# for operational use one can use $SCRATCHDIR which is automatically deleted after job has finished +#export WORKDIR=$SCRATCHDIR # set init and forecast times for testing # (default 36h from today 00:00) # May be overwritten by ECMWF queueing system export DAY=`date +%d` +#export DAY="27" export MONTH=`date +%m` export YEAR=`date +%Y` export HH=00 export FCSTEP=036 -export STEP=0/to/36/by/6 +export STEP=0/to/36/by/3 +