anomaly = (year_euro4/clim_cube)*100
print anomaly.summary(True)
unknown / (1) (grid_latitude: 1000; grid_longitude: 1100)
anomaly.standard_name = year_euro4.standard_name
#I've also tried anomaly.rename(year_euro4.standard_name) but that didn't work either
anomaly.units = '%'
print anomaly.summary(True)
surface_downwelling_shortwave_flux_in_air / (%) (grid_latitude: 1000; grid_longitude: 1100)
iris.save(anomaly, os.path.join(save_dir, filename))
cube = iris.load_cube(path/to/file)
print cube.summary(True)
unknown / (unknown) (grid_latitude: 1000; grid_longitude: 1100)
anomaly.standard_name = year_euro4.standard_name
anomaly.units = year_euro4.units
basepath_clim = '/data/local/ipaspald/euro4_retrieved/clim_averages'
dirnames_clim = [os.path.join(basepath_clim, dirname) for dirname in os.listdir(basepath_clim)\
if os.path.isdir(os.path.join(basepath_clim, dirname))]
basepath_euro4 = os.path.dirname(basepath_clim)
full_euro4 = iris.load_cube(os.path.join(basepath_euro4, '*.pp')) # load all my monthly-averaged cube data, I extract what I need from here
full_euro4.coord('time').bounds=None # otherwise get an error "Cannot determine if point lie in a region of bounded datetime objects (something like that...)
for clim_average in dirnames_clim: # do each clim period I have
print 'Period ',os.path.basename(clim_average)
save_dir = os.path.join(basepath_euro4, 'anomalies', os.path.basename(clim_average))
for year in range(1979,2013+1):
print '\tYear '+str(year)
filename = 'anomaly_Year{0}_vs_{1}{2}'.format(year, os.path.basename(clim_average), '.pp')
clim_cube = iris.load_cube(os.path.join(clim_average, 'full', '*.pp'))
constraint = iris.Constraint(time = iris.time.PartialDateTime(year=year))# get a single year
year_euro4 = full_euro4.extract(constraint).collapsed('time', iris.analysis.MEAN) # collapse my monthly averages for a whole year
anomaly = (year_euro4/clim_cube)*100
anomaly.standard_name = full_euro4.standard_name
print anomaly.standard_name
anomaly.units = 'percent'
print anomaly.units
iris.save(anomaly, os.path.join(save_dir, filename))
#bc.cube_saver(anomaly, save_dir, filename) # this is a func I've defined that creates the save_dir if it doesn't already exist and then passes it to iris.save
for month in range(1,12+1):
print '\t\tMonth' + str(month)
filename = 'anomaly_Year{0}_Month{1}_vs_{2}{3}'.format(year, month, os.path.basename(clim_average), '.pp')
clim_cube = iris.load_cube(os.path.join(clim_average, '*month'+str(month)+'_*'))
constraint = iris.Constraint(time = iris.time.PartialDateTime(year=year, month=month))get just a month
year_month_euro4 = full_euro4.extract(constraint)
anomaly = (year_euro4/clim_cube)*100
anomaly.standard_name = full_euro4.standard_name
anomaly.units = 'percent'
iris.save(anomaly, os.path.join(save_dir, filename))
# bc.cube_saver(anomaly, save_dir, filename)
print 'DONE'
surface_downwelling_shortwave_flux_in_air / (W m-2) (grid_latitude: 1000; grid_longitude: 1100)
Dimension coordinates:
grid_latitude x -
grid_longitude - x
Scalar coordinates:
forecast_period: -346.0 hours, bound=(-718.0, 26.0) hours
forecast_reference_time: 1979-07-16 21:31:40, bound=(1979-01-30 21:31:40, 1979-12-30 21:31:40)
time: 1979-07-02 17:31:40, bound=(1979-01-16 23:31:40, 1979-12-16 11:31:40)
Attributes:
STASH: m01s01i235
source: Data from Met Office Unified Model
um_version: 8.2
Cell methods:
mean: time
mean: time
surface_downwelling_shortwave_flux_in_air / (W m-2) (grid_latitude: 1000; grid_longitude: 1100)
Dimension coordinates:
grid_latitude x -
grid_longitude - x
Scalar coordinates:
forecast_period: -131452.0 hours, bound=(-262930.0, 26.0) hours
forecast_reference_time: 2013-12-30 15:31:40
time: 1999-01-01 11:31:40, bound=(1984-01-02 05:31:40, 2013-12-31 17:31:40)
Attributes:
STASH: m01s01i235
source: Data from Met Office Unified Model
um_version: 8.2
Cell methods:
mean: time