Commit 423490e6 authored by ph290's avatar ph290
Browse files

update

parent 985d9d5e
......@@ -18,20 +18,23 @@ num_procs = mp.cpu_count() # this will use all available processors. Note that o
# num_procs = 1
# output_directory = '/gpfs/ts0/projects/Research_Project-148395/s2p3_rv2.0/output/era5/test/' #where you want the output to go
output_directory = '/data/ph290/s2p3_rv2.0/output/era5_global/' #where you want the output to go
# output_directory = '/data/ph290/s2p3_rv2.0/output/era5_global/' #where you want the output to go
output_directory = '/data/ph290/s2p3_rv2.0/output/gbrish/' #where you want the output to go
#output_file_name = 'global_tropics_era5_test'
#meterological_file_name = 'meterological_data'
#domain_file_name = 's12_m2_s2_n2_h_map_test.dat'
#nutrient_file_name = 'initial_nitrate_test.dat'
#executable_file_name = 's2p3_rv2.0'
output_file_name = 'global_tropics_era5'
# output_file_name = 'global_tropics_era5'
# meterological_file_name = 'meterological_data'
# domain_file_name = 's12_m2_s2_n2_h_map_1801803030_0.1.dat'
# nutrient_file_name = 'initial_nitrate_1801803030_0.1.dat'
# executable_file_name = 's2p3_rv2.0'
output_file_name = 'gbrish_era5'
meterological_file_name = 'meterological_data'
domain_file_name = 's12_m2_s2_n2_h_map_1801803030_0.1.dat'
nutrient_file_name = 'initial_nitrate_1801803030_0.1.dat'
domain_file_name = 's12_m2_s2_n2_h_map_gbrish.dat'
nutrient_file_name = 'initial_nitrate_gbrish.dat'
executable_file_name = 's2p3_rv2.0'
# output_file_name = 'gbr_coarse'
# meterological_file_name = 'meterological_data'
# domain_file_name = 's12_m2_s2_n2_h_map_gbr_coarse.dat'
......@@ -45,7 +48,7 @@ met_data_location = '/data/ph290/s2p3_rv2.0/met_data/era5_global/' # The locatio
met_data_temporary_location = '/mnt/ramdisk/' # The location that met data for each year will be un tar.gziped into
# each grid point each year has to read in a new meterology dataset from disk so it may make sense to make this temporary location a RAM disk (see readme)
start_year = 1979
start_year = 1998
#having to restaer becase of isca restart
end_year = 2017 # same as start year resuls in a 1 year run
......@@ -154,7 +157,7 @@ def put_data_into_cube(df,df_domain,variable,specifying_names,standard_name,long
cube.data = np.ma.masked_where(cube.data == -999.99,cube.data)
return cube
def output_netcdf(year,column_names,df,df_domain,column_name,specifying_names,standard_name,long_name,var_name,units,run_start_date,output_cube, output_directory,output_file_name,i):
def output_netcdf(year,column_names,df,df_domain,specifying_names,standard_name,long_name,var_name,units,run_start_date,output_cube, output_directory,output_file_name,i):
column_name = column_names[i]
output_cube = put_data_into_cube(df,df_domain,column_name,specifying_names,standard_name,long_name,var_name,units,run_start_date)
iris.fileformats.netcdf.save(output_cube, output_directory+output_file_name+'_'+column_name.replace(" ", "")+'_'+str(year)+'.nc', zlib=True, complevel=2)
......@@ -400,9 +403,8 @@ if parallel_processing:
func = partial(output_netcdf,year,column_names,df,df_domain,column_name,specifying_names,standard_name,long_name,var_name,units,run_start_date,output_cube, output_directory,output_file_name)
func = partial(output_netcdf,year,column_names,df,df_domain,specifying_names,standard_name,long_name,var_name,units,run_start_date,output_cube, output_directory,output_file_name)
my_log = zip(*pool.map(func, range(4,len(column_names))))
"""
else:
with open(output_directory+output_file_name+'_'+str(year),'w') as fout:
for result in results:
......@@ -411,7 +413,6 @@ if parallel_processing:
with open(output_directory+output_file_name+'_error_'+str(year),'w') as fout:
for error in errors:
fout.write(error)
"""
pool.close()
#clean up and leftover met files
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment