Commit e9fcc397 authored by ph290's avatar ph290
Browse files

removing many of teh separate read statements so that it only reads the domain...

removing many of teh separate read statements so that it only reads the domain file etc. once per year rather than for each profile
parent 3e856863
......@@ -18,13 +18,15 @@ num_procs = mp.cpu_count() # this will use all available processors. Note that o
output_directory = '/gpfs/ts0/projects/Research_Project-148395/s2p3_rv2.0/output/' #where you want the output to go
output_file_name = 'global_tropics'
meterological_file_name = 'meterological_data'
domain_file_name = 's12_m2_s2_n2_h_map.dat'
domain_file_name = 's12_m2_s2_n2_h_map_global.dat'
nutrient_file_name = 'initial_nitrate.dat'
executable_file_name = 's2p3_rv2.0'
met_data_location = '/gpfs/ts0/projects/Research_Project-148395/s2p3_rv2.0/met_data_global_tropics/' # The location containing the tar.gz met files (in the format met_data_year.tar.gz)
met_data_temporary_location = base_directory+'met/spatial_data/' # The location that met data for each year will be un tar.gziped into
start_year = 2006
end_year = 2006 # same as start year resuls in a 1 year run
depth_min = 10
depth_max = 100
write_error_output = False
##################################################
......@@ -39,16 +41,58 @@ def distance(lat1, lon1, lat2, lon2):
def closest(data, lat1,lon1):
return min(data, key=lambda p: distance(lat1,lon1,p[0],p[1]))
def return_domain_lon(filename,i):
f=open(filename)
lines=f.readlines()
return filter(lambda a: a != '', lines[i+1].split(' '))[0:2]
def run_model(domain_file_name,lats_lons,year,start_year,unique_job_id,met_data_temporary_location,i):
# def return_domain_lon(filename,i):
# f=open(filename)
# lines=f.readlines()
# return filter(lambda a: a != '', lines[i+1].split(' '))[0:2]
#
#
f=open(base_directory+'domain/'+domain_file_name)
lines=f.readlines()
f2=open(base_directory+'domain/'+nutrient_file_name)
lines2=f2.readlines()
lat_domain=[]
lon_domain=[]
alldepth=[]
smaj1=[]
smin1=[]
smaj2=[]
smin2=[]
smaj3=[]
smin3=[]
smaj4=[]
smin4=[]
smaj5=[]
smin5=[]
woa_nutrient=[]
for i,line in enumerate(lines[1::]):
depth = float(line[77:84])
if ((depth >= depth_min) & (depth <= depth_max) & (depth > 0.0)):
lon_domain.append(line[0:8])
lat_domain.append(line[8:16])
alldepth.append(line[77:84])
smaj1.append(line[16:22])
smin1.append(line[22:28])
smaj2.append(line[28:34])
smin2.append(line[34:40])
smaj3.append(line[40:46])
smin3.append(line[46:52])
smaj4.append(line[52:58])
smin4.append(line[58:64])
smaj5.append(line[64:70])
smin5.append(line[70:76])
woa_nutrient.append(lines2[i][16:22])
def run_model(domain_file_name,lats_lons,year,start_year,unique_job_id,met_data_temporary_location,lon_domain,lat_domain,smaj1,smin1,smaj2,smin2,smaj3,smin3,smaj4,smin4,smaj5,smin5,woa_nutrient,alldepth,i):
#modifying so that the fortran code looks for the correct met file, rather than us having to copy it into the working dorectory
lon,lat = return_domain_lon(base_directory+'domain/'+domain_file_name,i)
forcing_lat_lon = closest(lats_lons, float(lat),float(lon))
# lon,lat = return_domain_lon(base_directory+'domain/'+domain_file_name,i)
forcing_lat_lon = closest(lats_lons, float(lat_domain[i]),float(lon_domain[i]))
run_command = """./"""+executable_file_name+""" << EOF
"""+str(start_year)+"""
"""+str(year)+"""
......@@ -60,6 +104,18 @@ def run_model(domain_file_name,lats_lons,year,start_year,unique_job_id,met_data_
"""+met_data_temporary_location+"""
map
"""+str(i+1)+"""
"""+str(smaj1[i])+"""
"""+str(smin1[i])+"""
"""+str(smaj2[i])+"""
"""+str(smin2[i])+"""
"""+str(smaj3[i])+"""
"""+str(smin3[i])+"""
"""+str(smaj4[i])+"""
"""+str(smin4[i])+"""
"""+str(smaj5[i])+"""
"""+str(smin5[i])+"""
"""+str(woa_nutrient[i])+"""
"""+str(alldepth[i])+"""
"""+str(start_year)+"""
"""+str(year)+"""
"""+str(forcing_lat_lon[0])+"""
......@@ -70,6 +126,18 @@ map
"""+met_data_temporary_location+"""
map
"""+str(i+1)+"""
"""+str(smaj1[i])+"""
"""+str(smin1[i])+"""
"""+str(smaj2[i])+"""
"""+str(smin2[i])+"""
"""+str(smaj3[i])+"""
"""+str(smin3[i])+"""
"""+str(smaj4[i])+"""
"""+str(smin4[i])+"""
"""+str(smaj5[i])+"""
"""+str(smin5[i])+"""
"""+str(woa_nutrient[i])+"""
"""+str(alldepth[i])+"""
EOF"""
# print run_command
proc = subprocess.Popen([run_command], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
......@@ -87,6 +155,9 @@ unique_job_id = str(uuid.uuid4())
num_lines = sum(1 for line in open(base_directory+'domain/'+domain_file_name)) - 1
# num_lines = 10
subprocess.call('tar -C '+met_data_temporary_location+' -zxvf '+met_data_location+'met_data_'+str(start_year)+'.tar.gz', shell=True)
files = glob.glob(met_data_temporary_location+'*_'+str(start_year)+'.dat')
w, h = 2, len(files) ;
lats_lons = [[0 for x in range(w)] for y in range(h)]
......@@ -96,6 +167,8 @@ for i,file in enumerate(files):
lats_lons[i][1] = float(tmp[1].split('_')[0])
for year in range(start_year,end_year+1):
#clean up and prexisting met files
try:
......@@ -104,17 +177,14 @@ for year in range(start_year,end_year+1):
except:
print 'no met files to clean up'
subprocess.call('tar -C '+met_data_temporary_location+' -zxvf '+met_data_location+'met_data_'+str(year)+'.tar.gz', shell=True)
#remove the files that have now been tar.gzped
files_to_delete = glob.glob(output_directory+' -name '+output_filename+'*.dat')
[os.remove(f) for f in files_to_delete]
try:
shutil.move(output_directory+output_file_name+'_'+str(year), output_directory+output_file_name+'_'+str(year)+'_previous')
except:
print 'no previous output file to move'
pool = mp.Pool(processes=num_procs)
func = partial(run_model, domain_file_name, lats_lons, year, start_year, unique_job_id, met_data_temporary_location)
func = partial(run_model, domain_file_name, lats_lons, year, start_year, unique_job_id, met_data_temporary_location,lon_domain,lat_domain,smaj1,smin1,smaj2,smin2,smaj3,smin3,smaj4,smin4,smaj5,smin5,woa_nutrient,alldepth)
# results,errors = pool.map(func, range(num_lines))
results, errors = zip(*pool.map(func, range(num_lines)))
results, errors = zip(*pool.map(func, range(len(lat_domain))))
# results = pool.map(func, range(num_lines))
with open(output_directory+output_file_name+'_'+str(year),'w') as fout:
for result in results:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment