Skip to content

Commit

Permalink
Merge pull request #1287 from mpaiao/mpaiao-pr-pft-swapper
Browse files Browse the repository at this point in the history
Indexing fix in multiple python scripts
  • Loading branch information
rgknox authored Dec 26, 2024
2 parents 296e1d6 + 6a44ef4 commit 3a1f17b
Show file tree
Hide file tree
Showing 4 changed files with 45 additions and 18 deletions.
10 changes: 7 additions & 3 deletions tools/BatchPatchParams.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,13 +107,17 @@ def main():
base_cdl = xmlroot.find('base_file').text
new_cdl = xmlroot.find('new_file').text

# Append extension nc to temporary files
# (in some netcdf versions, the lack of extension causes failures)
ext_nc = ".nc"

# Convert the base cdl file into a temp nc binary
base_nc = os.popen('mktemp').read().rstrip('\n')
base_nc = os.popen('mktemp').read().rstrip('\n')+ext_nc
gencmd = "ncgen -o "+base_nc+" "+base_cdl
os.system(gencmd)

# Generate a temp output file name
new_nc = os.popen('mktemp').read().rstrip('\n')
new_nc = os.popen('mktemp').read().rstrip('\n')+ext_nc

os.system("ls "+base_nc)
os.system("ls "+new_nc)
Expand Down Expand Up @@ -190,7 +194,7 @@ def main():
fp_nc.close()

# Sort the new file
newer_nc = os.popen('mktemp').read().rstrip('\n')
newer_nc = os.popen('mktemp').read().rstrip('\n')+ext_nc
os.system("../tools/ncvarsort.py --fin "+new_nc+" --fout "+newer_nc+" --overwrite")


Expand Down
7 changes: 4 additions & 3 deletions tools/FatesPFTIndexSwapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,8 +203,9 @@ def main(argv):
# Copy over the input data
# Tedious, but I have to permute through all combinations of dimension position
if( pft_dim_len == 0 ):
out_var = fp_out.createVariable(key,'d',(fp_in.variables.get(key).dimensions))
out_var.assignValue(float(fp_in.variables.get(key).data))
# Scalar: do not assume any dimensions.
out_var = fp_out.createVariable(key,'d',())
out_var[()] = in_var[()]
elif( (pft_dim_found==-1) & (prt_dim_found==-1) & (litt_dim_found==-1) & (hydro_dim_found==-1) & (landuse_dim_found==-1) ):
out_var = fp_out.createVariable(key,'d',(fp_in.variables.get(key).dimensions))
out_var[:] = in_var[:]
Expand Down Expand Up @@ -283,7 +284,7 @@ def main(argv):
fp_in.close()
fp_out.close()

print('Cloneing complete!')
print('Cloning complete!')
exit(0)


Expand Down
2 changes: 1 addition & 1 deletion tools/modify_fates_paramfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ def main():
for i in range(var.shape[0]):
var[i] = outputval[i]
elif(ndim_file==0):
var.assignValue(outputval[0])
var[()] = outputval[()]

else:
print("Unhandled dimension size in modify_fates_paramfile.py")
Expand Down
44 changes: 33 additions & 11 deletions tools/ncvarsort.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,19 @@
# --input or --fin: input filename.
# --output or --fout: output filename. If missing, will assume its directly modifying the input file, and will prompt unless -O is specified

import netCDF4 as nc
#import netCDF4 as nc
import sys
import os
import argparse

# Newer versions of scipy have dropped the netcdf module and
# netcdf functions are part of the io parent module
try:
from scipy import io as nc

except ImportError:
from scipy.io import netcdf as nc

# program sorts the variables based on the provided list, and pulls them one at a time
# from an existing file and adds them to a new file in the sorted order.
# input/output based on code here: https://gist.github.com/guziy/8543562
Expand All @@ -25,7 +33,7 @@ def main():
args = parser.parse_args()
#
# open the input dataset
dsin = nc.Dataset(args.fnamein, 'r')
dsin = nc.netcdf_file(args.fnamein, 'r')
#
# make empty lists to hold the variable names in. the first of these is a list of sub-lists,
# one for each type of variable (based on dimensionality).
Expand Down Expand Up @@ -98,13 +106,13 @@ def main():
else:
raise ValueError('Output file already exists and overwrite flag not specified for filename: '+args.fnameout)
#
dsout = nc.Dataset(args.fnameout, "w")
dsout = nc.netcdf_file(args.fnameout, "w")
#
#Copy dimensions
for dname, the_dim in dsin.dimensions.items():
if args.debug:
if (verbose): print(dname, the_dim.size)
dsout.createDimension(dname, the_dim.size )
if (verbose): print(dname, the_dim)
dsout.createDimension(dname, int(the_dim) )
#
if (verbose): print()
#
Expand All @@ -118,16 +126,30 @@ def main():
# as well as all metadata to the new file.
for i in range(len(varnames_list_sorted)):
v_name = varnames_list_sorted[i]
varin = dsin.variables[v_name]
outVar = dsout.createVariable(v_name, varin.datatype, varin.dimensions)
varin = dsin.variables.get(v_name)
v_type = dsin.variables[v_name].typecode()
v_dims = varin.dimensions
outVar = dsout.createVariable(v_name, v_type, v_dims)

n_dims = len(v_dims)
if args.debug:
if (verbose): print(v_name)
#
outVar.setncatts({k: varin.getncattr(k) for k in varin.ncattrs()})
outVar[:] = varin[:]

# Copy attributes
for v_attr in varin._attributes:
setattr(outVar,v_attr,getattr(varin,v_attr))

if ( n_dims == 0):
outVar[()] = varin[()]
else:
outVar[:] = varin[:]
#
# copy global attributes
dsout.setncatts({k: dsin.getncattr(k) for k in dsin.ncattrs()})#

# copy global attributes
for g_attr in dsin._attributes:
setattr(dsout,g_attr,getattr(dsin,g_attr))

#
# close the output file
dsin.close()
Expand Down

0 comments on commit 3a1f17b

Please sign in to comment.