#!/usr/bin/env python2 # -*- coding: utf-8 -*- """ Created on Fri Aug 3 08:58:53 2018 @author: sonja """ import numpy as np from netCDF4 import Dataset from scipy.cluster.hierarchy import linkage, fcluster import matplotlib matplotlib.rcParams['backend'] = "Qt4Agg" from mpl_toolkits.basemap import Basemap import os #needed for loop over files import scipy.stats import operator from matplotlib import pyplot as plt def shiftedColorMap(cmap, start=0, midpoint=0.5, stop=1.0, name='shiftedcmap'): ''' Function to offset the "center" of a colormap. Useful for data with a negative min and positive max and you want the middle of the colormap's dynamic range to be at zero Input ----- cmap : The matplotlib colormap to be altered start : Offset from lowest point in the colormap's range. Defaults to 0.0 (no lower ofset). Should be between 0.0 and `midpoint`. midpoint : The new center of the colormap. Defaults to 0.5 (no shift). Should be between 0.0 and 1.0. In general, this should be 1 - vmax/(vmax + abs(vmin)) For example if your data range from -15.0 to +5.0 and you want the center of the colormap at 0.0, `midpoint` should be set to 1 - 5/(5 + 15)) or 0.75 stop : Offset from highets point in the colormap's range. Defaults to 1.0 (no upper ofset). Should be between `midpoint` and 1.0. ''' cdict = { 'red': [], 'green': [], 'blue': [], 'alpha': [] } # regular index to compute the colors reg_index = np.linspace(start, stop, 257) # shifted index to match the data shift_index = np.hstack([ np.linspace(0.0, midpoint, 128, endpoint=False), np.linspace(midpoint, 1.0, 129, endpoint=True) ]) for ri, si in zip(reg_index, shift_index): r, g, b, a = cmap(ri) cdict['red'].append((si, r, r)) cdict['green'].append((si, g, g)) cdict['blue'].append((si, b, b)) cdict['alpha'].append((si, a, a)) newcmap = matplotlib.colors.LinearSegmentedColormap(name, cdict) plt.register_cmap(cmap=newcmap) return newcmap def join(l, sep): out_str = '' for i, el in enumerate(l): out_str += '{}{}'.format(el, sep) return out_str[:-len(sep)] class setParams(object): def __init__(self,name,lat_min,lat_max,lon_min,lon_max, end_year): self.name = name self.lat_min = lat_min self.lat_max = lat_max self.lon_min = lon_min self.lon_max = lon_max self.end_year = end_year class set_params_for_forecast(object): def __init__(self,name,beg_year, end_year,year_start,tmpDataAllYearsObs2_1D,tmpData2): self.name = name self.beg_year = beg_year self.end_year = end_year self.conditionnumberArray=np.zeros(end_year-beg_year) self.year_start=year_start self.clusterarray = np.zeros((2,tmpDataAllYearsObs2_1D.shape[0]-1))#historic reasons for choice of dimensions self.patternCorr = np.zeros(end_year-beg_year) #array for saving all predictions self.tmpDataAllYearsObs_reshape = tmpData2.copy() self.tmpDataAllYearsPredict = np.reshape(tmpData2,(tmpData2.shape[0], -1)) #array for saving cross-correlations self.tmpDataAllYearsObs_1D_Corr = np.zeros(tmpData2.shape[1]*tmpData2.shape[2]) #array for saving all factors self.a_perce = np.zeros(tmpData2.shape[1]*tmpData2.shape[2]) class load_forecast_variable(object): def __init__(self,name): self.tempParams = setParams("temperature",25.5,72,-12,45,50) # load temperature data tempPath='/media/sonja/Data/Temperature/scripts/GHCN_CAMS_Europe/air.mon.mean.seasmean.winter.detrend.nan.-180.timmean.nc' self.lats, self.lons,self.lat_grid,self.lon_grid, self.tempDataObs = read_data(self.tempParams,tempPath,'air','lat','lon','time' ) #calculate forecast only for Europe and Turkey self.mask_array=np.loadtxt("/media/sonja/Data/Temperature/scripts/GHCN_CAMS_Europe/Europe-mask.txt") self.tmpData2=self.tempDataObs.copy() self.tmpData2[:,self.mask_array==1]=0 #plt.figure() #plt.contourf(self.tmpData2[10],vmin=-0.6, vmax=0.6) # calculate monthly anomalies self.tmpDataAllYearsObs_1D = calculate_monthly_anomalies_1D(self.tmpData2) class load_precursors_data(object): def __init__(self,name): self.name=name self.sicParams = setParams("temperature",60,90,0,180,50) sicPath='/media/sonja/Data/Precipitation/Forecast_Model/1966/HadISST_ice_autumn_1x1_1966_2016_missto0_timmean_detrend.nc' self.latsSIC, self.lonsSIC, self.lat_gridSIC, self.lon_gridSIC, self.seaICData2 = read_data(self.sicParams,sicPath,'sic','latitude','longitude','time' ) self.sceParams = setParams("SCE",20,60,0,180,51) scePath='/media/sonja/Data/Precipitation/Forecast_Model/1966/monmean_sce_autumn_1966_2016_regular_timmean_detrend.nc' self.latSCE, self.lonSCE, self.lat_gridSIC, self.lon_gridSIC, self.snowCEData2 = read_data(self.sceParams,scePath,'snow_cover_extent','lat','lon','time' ) self.sstMediParams = setParams("SST-Medi",25,50,-6,45,50) sstPath_Mediterranean='/media/sonja/Data/Precipitation/Forecast_Model/1966/HadISST_Autumn_1x1_1966_2016_missto0_sst_timmean_detrend.nc' self.lats_Mediterranean, self.lons_Mediterranean, self.lat_gridMediterranean, self.lon_gridMediterranean, self.seastData_Mediterranean2 = read_data(self.sstMediParams,sstPath_Mediterranean,'sst','latitude','longitude','time' ) self.sstAtlParams = setParams("SST-Atlantic",10,70,-110,20,50) sstPath_North_Atl='/media/sonja/Data/Precipitation/Forecast_Model/1966/HadISST_Autumn_1x1_1966_2016_missto0_sst_timmean_detrend.nc' self.lats_North_Atl, self.lons_North_Atl, self.lat_grid_North_Atl, self.lon_grid_North_Atl, self.seastData_North_Atl2 = read_data(self.sstAtlParams,sstPath_North_Atl,'sst','latitude','longitude','time' ) # load mask data : mask Hudson Bay, Pacific Ocean, Actic Ocean, Labrador Sea, Pacific Ocean, Mediterranean sea mask_array_atl=np.loadtxt("/media/sonja/Data/Temperature/scripts/GHCN_CAMS_Europe/Atl-mask-cut.txt") self.seastData_North_Atl2[:,mask_array_atl==0]=0 self.sstSouthPacParams = setParams("SST-South-Pacific",-30,30,150,290,50) sstPath_South_Pac='/media/sonja/Data/Precipitation/Forecast_Model/1966/HadISST_Autumn_1x1_1966_2016_missto0_0_360_timmean_detrend.nc' self.lats_South_Pac, self.lons_South_Pac, self.lat_grid_South_Pac, self.lon_grid_South_Pac, self.seastData_South_Pac2 = read_data(self.sstSouthPacParams,sstPath_South_Pac,'sst','lat','lon','time' ) #load mask to mask Atlantic mask_array_pac=np.loadtxt("/media/sonja/Data/Temperature/scripts/GHCN_CAMS_Europe/Pac-mask.txt") self.seastData_South_Pac2[:,mask_array_pac==0]=0 self.slpParams = setParams("SLP",40,80,0,360,50) slpPath='/media/sonja/Data/Precipitation/Forecast_Model/1966/slp.mnmean_1966_2016_autumn_seasmean_timmean_detrend.nc' self.lats_slp, self.lons_slp, self.lat_grid_slp, self.lon_grid_slp, self.sealpData2 = read_data(self.slpParams,slpPath,'slp','lat','lon','time' ) #load mask to mask Atlantic mask_array_pac=np.loadtxt("/media/sonja/Data/Temperature/scripts/GHCN_CAMS_Europe/Pac-mask.txt") self.seastData_South_Pac2[:,mask_array_pac==0]=0 self.gph500Params = setParams("gph500",0,40,-10,160,50) gph500Path='/media/sonja/Data/Precipitation/Forecast_Model/1966/hgt_monmean_500mb_autumn_1_1_1966_2016_seasmean_timmean_detrend_-180.nc' self.lats_gph500, self.lons_gph500, self.lat_grid_gph500, self.lon_grid_gph500, self.geophp500Data2 = read_data_level(self.gph500Params,gph500Path,'hgt','lat','lon','level','time' ) class reshape_precursors_to_1D(object): def __init__(self,name,precursors): self.snowCEData2_1D = reshape_precursor_to_1d_array(precursors.snowCEData2) self.seaICData2_1D = reshape_precursor_to_1d_array(precursors.seaICData2) self.seastData_South_Pac2_1D = reshape_precursor_to_1d_array(precursors.seastData_South_Pac2) self.seastData_North_Atl2_1D = reshape_precursor_to_1d_array(precursors.seastData_North_Atl2) self.seastData_Mediterranean2_1D = reshape_precursor_to_1d_array(precursors.seastData_Mediterranean2) self.sealpData2_1D = reshape_precursor_to_1d_array(precursors.sealpData2) self.geophp500Data2_1D = reshape_precursor_to_1d_array(precursors.geophp500Data2) class composites_data_1D(object): def __init__(self,name,paramsForecast_var,precursors_1D,year,k): #============================================================================== # SIC Composites #============================================================================== #print("Calculate pins of SIC") self.seaICData_1Dsub = np.delete(precursors_1D.seaICData2_1D,year,axis=0) self.seaceData_1D_longTerm = precursors_1D.seaICData2_1D[year] # calculate composites! self.pin_arraysSIC_1D = np.zeros((k,self.seaICData_1Dsub.shape[1])) self.pin_arraysSIC_1D = calculate_composites(paramsForecast_var,self.pin_arraysSIC_1D,self.seaICData_1Dsub,k) #========================================================================================================================================= # SCE Composites #========================================================================================================================================= #print("Calculate pins of SCE") self.snowCEData2_1DSub = np.delete(precursors_1D.snowCEData2_1D,year,axis=0) self.snowCEData_1D_longTerm = precursors_1D.snowCEData2_1D[year] self.snowCEData2_1DSub, self.snowCEData_1D_longTerm=calculate_standardized_precursors(self.snowCEData2_1DSub,self.snowCEData_1D_longTerm) # calculate composites! self.pin_arraysSCE_1D = np.zeros((k,self.snowCEData2_1DSub.shape[1])) self.pin_arraysSCE_1D = calculate_composites(paramsForecast_var,self.pin_arraysSCE_1D,self.snowCEData2_1DSub,k) #========================================================================================================================================= # Tropics #========================================================================================================================================= #print("Calculate pins of Tropics") self.tropicsData2_1DSub = np.delete(precursors_1D.seastData_South_Pac2_1D,year,axis=0) self.tropicsData_1D_longTerm = precursors_1D.seastData_South_Pac2_1D[year] self.tropicsData2_1DSub, self.tropicsData_1D_longTerm = calculate_standardized_precursors(self.tropicsData2_1DSub,self.tropicsData_1D_longTerm) #calculate composites self.pin_arraystropics_1D = np.zeros((k,self.tropicsData2_1DSub.shape[1])) self.pin_arraystropics_1D =calculate_composites(paramsForecast_var,self.pin_arraystropics_1D,self.tropicsData2_1DSub,k) #========================================================================================================================================= # Atlantic #========================================================================================================================================= #print("Calculate pins of Atlantic") self.atlData2_1DSub = np.delete(precursors_1D.seastData_North_Atl2_1D,year,axis=0) self.atlData_1D_longTerm =precursors_1D.seastData_North_Atl2_1D[year] self.atlData2_1DSub, self.atlData_1D_longTerm = calculate_standardized_precursors(self.atlData2_1DSub,self.atlData_1D_longTerm) #calculate composites self.pin_arraysatl_1D = np.zeros((k,self.atlData2_1DSub.shape[1])) self.pin_arraysatl_1D = calculate_composites(paramsForecast_var,self.pin_arraysatl_1D,self.atlData2_1DSub,k) #========================================================================================================================================= # Mediterranean #========================================================================================================================================= #print("Calculate pins of Mediterranean") self.mediData2_1DSub = np.delete(precursors_1D.seastData_Mediterranean2_1D,year,axis=0) self.mediData_1D_longTerm = precursors_1D.seastData_Mediterranean2_1D[year] self.mediData2_1DSub, self.mediData_1D_longTerm = calculate_standardized_precursors(self.mediData2_1DSub,self.mediData_1D_longTerm) #calculate composites self.pin_arraysmedi_1D = np.zeros((k,self.mediData2_1DSub.shape[1])) self.pin_arraysmedi_1D = calculate_composites(paramsForecast_var,self.pin_arraysmedi_1D,self.mediData2_1DSub,k) # #========================================================================================================================================= # SLP #========================================================================================================================================= #print("Calculate pins of SLP") self.slpData2_1DSub = np.delete(precursors_1D.sealpData2_1D,year,axis=0) self.slpData_1D_longTerm = precursors_1D.sealpData2_1D[year] self.slpData2_1DSub, self.slpData_1D_longTerm = calculate_standardized_precursors(self.slpData2_1DSub,self.slpData_1D_longTerm) #calculate composites self.pin_arraysslp_1D = np.zeros((k,self.slpData2_1DSub.shape[1])) self.pin_arraysslp_1D = calculate_composites(paramsForecast_var,self.pin_arraysslp_1D,self.slpData2_1DSub,k) #============================================================================== # Calculate Geopotential height - 500mb Composites Altantic #============================================================================== #print("Calculate Geopotential height - 500mb ") self.gph500Data2_1DSub = np.delete(precursors_1D.geophp500Data2_1D,year,axis=0) self.gph500Data_1D_longTerm = precursors_1D.geophp500Data2_1D[year] self.gph500Data2_1DSub, self.gph500Data_1D_longTerm = calculate_standardized_precursors(self.gph500Data2_1DSub,self.gph500Data_1D_longTerm) #calculate composites self.pin_arraysgph500_1D = np.zeros((k,self.gph500Data2_1DSub.shape[1])) self.pin_arraysgph500_1D =calculate_composites(paramsForecast_var,self.pin_arraysgph500_1D,self.gph500Data2_1DSub,k) def read_data(setParams_instance, filePath, varname,latname,lonname,timename): path=filePath data = Dataset(path, 'r') lat = data.variables[latname][:] lon = data.variables[lonname][:] #time = data.variables[timename][:] data_obs = data.variables[varname][0:setParams_instance.end_year,(lat>=setParams_instance.lat_min)&(lat<=setParams_instance.lat_max),(lon>=setParams_instance.lon_min)&(lon<=setParams_instance.lon_max)].squeeze() lat_grid = lat[(lat>=setParams_instance.lat_min)&(lat<=setParams_instance.lat_max)] lon_grid = lon[(lon>=setParams_instance.lon_min)&(lon<=setParams_instance.lon_max)] #la = lat_grid.shape[0] #lo = lon_grid.shape[0] lons, lats = np.meshgrid(lon_grid,lat_grid) return lats, lons, lat_grid, lon_grid, data_obs def read_data_level(setParams_instance, filePath, varname,latname,lonname,levname,timename): path=filePath data = Dataset(path, 'r') lat = data.variables[latname][:] lon = data.variables[lonname][:] #level = data.variables[levname][:] #time = data.variables[timename][:] data_obs = data.variables[varname][0:setParams_instance.end_year,:,(lat>=setParams_instance.lat_min)&(lat<=setParams_instance.lat_max),(lon>=setParams_instance.lon_min)&(lon<=setParams_instance.lon_max)].squeeze() lat_grid = lat[(lat>=setParams_instance.lat_min)&(lat<=setParams_instance.lat_max)] lon_grid = lon[(lon>=setParams_instance.lon_min)&(lon<=setParams_instance.lon_max)] #la = lat_grid.shape[0] #lo = lon_grid.shape[0] lons, lats = np.meshgrid(lon_grid,lat_grid) return lats, lons, lat_grid, lon_grid, data_obs def calculate_monthly_anomalies_1D(varData): varData_1D = varData.copy() varData_1D = np.reshape(varData_1D,(varData_1D.shape[0], -1)) varData_1D[varData_1D!=varData_1D]=0 return varData_1D def reshape_precursor_to_1d_array(var2D): return np.reshape(var2D,(var2D.shape[0], -1)) def calculate_clusters(paramsForecast_var,varObs2_1D,methodName,k): varmean=np.mean(varObs2_1D,axis=0) varAnom=varObs2_1D-varmean Z = linkage(varAnom,methodName ) f = fcluster(Z, k, criterion='maxclust') pin_arrays = np.zeros((k,varAnom.shape[1])) for j in range(k): cluster_number = j pin_arrays[j] = np.mean(varAnom[(f-1)==cluster_number], axis = 0) #get time series for i in range(0, varAnom.shape[0]): paramsForecast_var.clusterarray[0,i] =(float(i)) paramsForecast_var.clusterarray[1,i] =f[i]-1 return paramsForecast_var.clusterarray,pin_arrays def calculate_standardized_precursors(precursors_1D,precursors_1D_one_year): varmean=np.mean(precursors_1D,axis=0) varAnom=precursors_1D-varmean # divided by grid (1d-Array) and years - 1 (the year which we would like to forecast) #standardize sigma_var=np.sum(varAnom*varAnom)/(varAnom.shape[0]*varAnom.shape[1]) varAnom=varAnom/sigma_var precursors_1D_one_year=(precursors_1D_one_year-varmean) precursors_1D_one_year=precursors_1D_one_year/sigma_var return varAnom,precursors_1D_one_year def calculate_composites(paramsForecast_var, composites,standardized_precursors,k): count_array = np.zeros(k) for i in range(0,int(paramsForecast_var.clusterarray.shape[1])): composites[int(paramsForecast_var.clusterarray[1,i])]+=standardized_precursors[i] count_array[int(paramsForecast_var.clusterarray[1,i])]+=1 for i in range(int(k)): composites[int(i)] = np.divide( composites[i] ,(count_array[i])) return composites def projection_coefficients(alphaMatrix,pin_arrays_All_Data_1D,All_Data_1D,k): #composite_i*composite_j for i in range(int(k)): for j in range(int(k)): alphaMatrix[i,j]=sum(pin_arrays_All_Data_1D[i,:]*pin_arrays_All_Data_1D[j,:])#scalarproduct--> inner product #composite_i*y(t) rhs = np.zeros(k) for i in range(int(k)): rhs[i]=sum(pin_arrays_All_Data_1D[i,:]*All_Data_1D[:]) #Singular values smaller (in modulus) than 0.01 * largest_singular_value (again, in modulus) are set to zero pinvMatrix=np.linalg.pinv(alphaMatrix,0.01) alphaAll=np.dot(pinvMatrix,rhs) return alphaAll def prediction(temperature,pin_arrays_var, composites_data_temp_1D,listOfprecursors, k): # Concatenate composites arrays data #XX put in one of the functions? pins, data_1D= concatenate_composites_arrays_data(composites_data_temp_1D) # Merge only those precursors which were selected pin_arrays_All_Data_1D, All_Data_1D= \ merge_selected_precursors(listOfprecursors,pins,data_1D,temperature) # Calculate projection coefficients alphaMatrix = np.zeros((k,k)) alphaAll=projection_coefficients(alphaMatrix,pin_arrays_All_Data_1D,All_Data_1D,k) forecast_var = np.zeros(temperature.tmpDataAllYearsObs_1D.shape[1]) for i in range(int(k)): forecast_var+= alphaAll[i]*pin_arrays_var[int(i)] return forecast_var def calculate_time_correlation(var_obs_1D,paramsForecast_var): anomalytemp=(var_obs_1D- np.mean(var_obs_1D,axis=0)) for i in range(paramsForecast_var.tmpDataAllYearsPredict.shape[1]): test = scipy.stats.pearsonr(paramsForecast_var.tmpDataAllYearsPredict[paramsForecast_var.beg_year:paramsForecast_var.end_year,i], anomalytemp[paramsForecast_var.beg_year:paramsForecast_var.end_year,i]) if test[0]==test[0]: paramsForecast_var.tmpDataAllYearsObs_1D_Corr[i] = test[0] else: # tmpDataAllYearsObs_1D_Corr[i] = 0 paramsForecast_var.tmpDataAllYearsObs_1D_Corr[i] = test[0] if test[1]>0.05: paramsForecast_var.a_perce[i] = 0 else: paramsForecast_var.a_perce[i] = 1 return paramsForecast_var.tmpDataAllYearsObs_1D_Corr def save_variable(filename,variable,lat_grid,lon_grid,varname): ###============================================================================== ## save data - pattern correlation ##============================================================================== dataset =Dataset(filename, "w", format="NETCDF4") print(filename) # Create coordinate variables for 4-dimensions level = dataset.createDimension('level', 1) lat = dataset.createDimension('lat', variable.shape[0]) lon= dataset.createDimension('lon', variable.shape[1]) latitudes = dataset.createVariable('latitude', 'f4', ('lat')) longitudes = dataset.createVariable('longitude', 'f4', ('lon')) tmpForecastSave = dataset.createVariable(varname, 'f4', ('lat','lon')) latitudes[:] = lat_grid longitudes[:] = lon_grid tmpForecastSave[:,:] = variable #VERY IMPORTANT, OTHERWISE nothing is saved!!! dataset.close() def save_plot(filename,variable,variable_significance,tempParams,lons,lats): eq_map = Basemap(llcrnrlon=tempParams.lon_min+1.5,llcrnrlat=tempParams.lat_min+1.5,urcrnrlon=tempParams.lon_max-1.5,urcrnrlat=tempParams.lat_max-1,projection='mill', resolution = 'l', area_thresh = 10000.0, lat_0=0, lon_0=0) eq_map.drawcoastlines() eq_map.drawcountries() #clevs1 =np.arange(-1, 1,0.1) eq_map.drawmapboundary() eq_map.drawmapboundary(fill_color='0.3') orig_cmap = matplotlib.cm.coolwarm # variable = np.ma.masked_where(variable != variable, variable) cmap1 = (shiftedColorMap(orig_cmap, midpoint=0.5, name='shifted')) cmap1.set_bad('grey',0.3) im1 = eq_map.pcolormesh(lons,lats,variable,cmap=cmap1,latlon=True,vmin=-0.6, vmax=0.6)#,levels=levels, hatches=["", "."], alpha=0.9) # plot significant values: eq_map.contourf(lons, lats, variable_significance, latlon=True, colors='none',hatches=[None, '..'], extend='both', alpha=0.00 ) cbar = eq_map.colorbar(im1,location='bottom') cbar.set_label('correlation') title_string = " DJF skill "+str(np.nanmean(variable).round(5)) plt.title(title_string) plt.savefig(filename) plt.close() def concatenate_composites_arrays_data(composites_data_temp_1D): pins= (\ composites_data_temp_1D.pin_arraysSIC_1D,\ composites_data_temp_1D.pin_arraysSCE_1D,\ composites_data_temp_1D.pin_arraystropics_1D,\ composites_data_temp_1D.pin_arraysatl_1D,\ composites_data_temp_1D.pin_arraysmedi_1D,\ composites_data_temp_1D.pin_arraysslp_1D,\ composites_data_temp_1D.pin_arraysgph500_1D,\ ) data_1D=(\ composites_data_temp_1D.seaceData_1D_longTerm,\ composites_data_temp_1D.snowCEData_1D_longTerm,\ composites_data_temp_1D.tropicsData_1D_longTerm,\ composites_data_temp_1D.atlData_1D_longTerm,\ composites_data_temp_1D.mediData_1D_longTerm,\ composites_data_temp_1D.slpData_1D_longTerm,\ composites_data_temp_1D.gph500Data_1D_longTerm,\ ) return pins, data_1D # Merge only those precursors which were selected def merge_selected_precursors(listOfprecursors,pins,data_1D,temperature): # Merge only those precursors which were selected if len(listOfprecursors)==1: pin_arrays_All_Data_1D=np.array(pins[listOfprecursors[0]]) All_Data_1D=np.array(data_1D[listOfprecursors[0]]) else: pin_arrays_All_Data_1D=np.concatenate(operator.itemgetter(*listOfprecursors)(pins), axis=1) All_Data_1D = np.concatenate(operator.itemgetter(*listOfprecursors)(data_1D), axis=0) return pin_arrays_All_Data_1D, All_Data_1D def save_plot_and_time_correlation(listOfprecursors,paramsForecast_temp,temperature,tmpDataAllYearsObs_1D_Corr_reshape,saveArray): #define outputname if len(listOfprecursors)==1: saveString=saveArray[listOfprecursors[0]] else: saveString=join(operator.itemgetter(*listOfprecursors)(saveArray),"-") a_perce_reshape = np.reshape(paramsForecast_temp.a_perce,(temperature.tmpData2.shape[1],temperature.tmpData2.shape[2])) file_path = str(len(listOfprecursors))+'-precursor/' directory = os.path.dirname(file_path) try: os.stat(directory) except: os.mkdir(directory) #save plot save_plot(file_path+saveString+'-1967-2016.png',tmpDataAllYearsObs_1D_Corr_reshape,a_perce_reshape,temperature.tempParams,temperature.lons,temperature.lats) #save time correlation file = open(saveString+'-1967-2016.txt','w') file.write(str(np.nanmean(tmpDataAllYearsObs_1D_Corr_reshape).round(5))) #save spatial correlation save_variable(file_path+'/pattern_time_correlation_GHCN_CAMS_'+saveString+'-1967_2016.nc',tmpDataAllYearsObs_1D_Corr_reshape,temperature.lat_grid,temperature.lon_grid,'time_correlation') # save correlation significance save_variable(file_path+'/pattern_time_correlation_GHCN_CAMS_'+saveString+'-1967_2016_significance.nc',a_perce_reshape,temperature.lat_grid,temperature.lon_grid,'a_perce_reshape')