Commit 2239f84c authored by ph290's avatar ph290
Browse files

changing how it deals with the met forcing files to allow it to deal with...

changing how it deals with the met forcing files to allow it to deal with large or high resolution grids
parent 91ed66d4
......@@ -16,14 +16,15 @@ base_directory = '/gpfs/ts0/home/ph290/s2p3_rv2.0/'
num_procs = mp.cpu_count() # this will use all available processors. Note that on a multi-node machine it can only use the processors on one node
# num_procs = 1
output_directory = '/gpfs/ts0/projects/Research_Project-148395/s2p3_rv2.0/output/' #where you want the output to go
output_file_name = 'english_chanel'
output_file_name = 'global_tropics'
meterological_file_name = 'meterological_data'
domain_file_name = 's12_m2_s2_n2_h_map.dat'
nutrient_file_name = 'initial_nitrate.dat'
executable_file_name = 's2p3_rv2.0'
met_data_location = base_directory+'met/spatial_data/'
start_year = 1950
end_year = 2017
met_data_location = '/gpfs/ts0/projects/Research_Project-148395/s2p3_rv2.0/met_data_global_tropics/' # The location containing the tar.gz met files (in the format met_data_year.tar.gz)
met_data_temporary_location = base_directory+'met/spatial_data/' # The location that met data for each year will be un tar.gziped into
start_year = 2006
end_year = 2006 # same as start year resuls in a 1 year run
write_error_output = False
##################################################
......@@ -44,7 +45,7 @@ def return_domain_lon(filename,i):
return filter(lambda a: a != '', lines[i+1].split(' '))[0:2]
def run_model(domain_file_name,lats_lons,year,start_year,unique_job_id,met_data_location,i):
def run_model(domain_file_name,lats_lons,year,start_year,unique_job_id,met_data_temporary_location,i):
#modifying so that the fortran code looks for the correct met file, rather than us having to copy it into the working dorectory
lon,lat = return_domain_lon(base_directory+'domain/'+domain_file_name,i)
forcing_lat_lon = closest(lats_lons, float(lat),float(lon))
......@@ -56,7 +57,7 @@ def run_model(domain_file_name,lats_lons,year,start_year,unique_job_id,met_data_
../domain/"""+domain_file_name+"""
../domain/"""+nutrient_file_name+"""
"""+unique_job_id+"""
"""+met_data_location+"""
"""+met_data_temporary_location+"""
map
"""+str(i+1)+"""
"""+str(start_year)+"""
......@@ -66,7 +67,7 @@ map
../domain/"""+domain_file_name+"""
../domain/"""+nutrient_file_name+"""
"""+unique_job_id+"""
"""+met_data_location+"""
"""+met_data_temporary_location+"""
map
"""+str(i+1)+"""
EOF"""
......@@ -86,7 +87,7 @@ unique_job_id = str(uuid.uuid4())
num_lines = sum(1 for line in open(base_directory+'domain/'+domain_file_name)) - 1
# num_lines = 10
files = glob.glob(met_data_location+'*_'+str(start_year)+'.dat')
files = glob.glob(met_data_temporary_location+'*_'+str(start_year)+'.dat')
w, h = 2, len(files) ;
lats_lons = [[0 for x in range(w)] for y in range(h)]
for i,file in enumerate(files):
......@@ -96,12 +97,25 @@ for i,file in enumerate(files):
for year in range(start_year,end_year+1):
#clean up and prexisting met files
try:
files_to_delete = glob.glob(met_data_temporary_location+'*.dat')
[os.remove(f) for f in files_to_delete]
except:
print 'no met files to clean up'
subprocess.call('tar -C '+met_data_temporary_location+' -zxvf '+met_data_location+'met_data_'+str(year)+'.tar.gz', shell=True)
#remove the files that have now been tar.gzped
files_to_delete = glob.glob(output_directory+' -name '+output_filename+'*.dat')
[os.remove(f) for f in files_to_delete]
met_data_location
tar -C /myfolder -zxvf yourfile.tar.gz
tar -xvzf community_images.tar.gz
try:
shutil.move(output_directory+output_file_name+'_'+str(year), output_directory+output_file_name+'_'+str(year)+'_previous')
except:
print 'no previous output file to move'
pool = mp.Pool(processes=num_procs)
func = partial(run_model, domain_file_name, lats_lons, year, start_year, unique_job_id, met_data_location)
func = partial(run_model, domain_file_name, lats_lons, year, start_year, unique_job_id, met_data_temporary_location)
# results,errors = pool.map(func, range(num_lines))
results, errors = zip(*pool.map(func, range(num_lines)))
# results = pool.map(func, range(num_lines))
......@@ -112,6 +126,12 @@ for year in range(start_year,end_year+1):
with open(output_directory+output_file_name+'_error_'+str(year),'w') as fout:
for error in errors:
fout.write(error)
#clean up and leftover met files
try:
files_to_delete = glob.glob(met_data_temporary_location+'*.dat')
[os.remove(f) for f in files_to_delete]
except:
print 'no met files to clean up'
remove_files = glob.glob(base_directory+'main/*'+unique_job_id+'*')
......@@ -125,7 +145,7 @@ for remove_file in remove_files:
# for year in range(start_year,end_year+1):
# pool = mp.Pool(processes=num_procs)
# func = partial(run_model2, domain_file_name, lats_lons,year,start_year,unique_job_id, met_data_location)
# func = partial(run_model2, domain_file_name, lats_lons,year,start_year,unique_job_id, met_data_temporary_location)
# # results,errors = pool.map(func, range(num_lines))
# results = pool.map(func, range(num_lines))
# with open(output_directory+output_file_name+'_'+str(year)+'_error','w') as fout:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment