climate-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From huiky...@apache.org
Subject [2/7] climate git commit: CLIMATE-720 - Revise file structure
Date Thu, 21 Jan 2016 21:52:00 GMT
http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/cordex-arctic_cloud_fraction_bias_to_SRB.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/cordex-arctic_cloud_fraction_bias_to_SRB.yaml b/examples/configuration_file_examples/cordex-arctic_cloud_fraction_bias_to_SRB.yaml
deleted file mode 100644
index eb4b4c5..0000000
--- a/examples/configuration_file_examples/cordex-arctic_cloud_fraction_bias_to_SRB.yaml
+++ /dev/null
@@ -1,65 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-workdir: ./
-output_netcdf_filename: cordex-arctic_clt_MAR-SEP.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: True 
-    start_time: 1990-01-01
-    end_time: 2007-12-31
-    temporal_resolution: monthly
-    month_start: 3
-    month_end: 9
-    average_each_year: False
-
-space:
-    min_lat: 55.00 
-    max_lat: 89.5 
-    min_lon: -179.75
-    max_lon: 178.50
-
-regrid:
-    regrid_on_reference: True
-    regrid_dlat: 0.44
-    regrid_dlon: 0.44
-
-datasets:
-    reference:
-        data_source: local
-        data_name: SRB
-        path: ./data/srb_rel3.0_shortwave_from_1983_to_2007.nc                           
-        variable: cld_frac
-        multiplying_factor: 100.0
-
-    targets:
-        data_source: local
-        path: /home/huikyole/data/CORDEX-ARC/clt*.nc                                                    
-        variable: clt     
-
-number_of_metrics_and_plots: 1
-
-metrics1: Map_plot_bias_of_multiyear_climatology
-
-plots1:
-    file_name: cordex-arctic_clt_MAR-SEP_mean_bias_to_SRB
-    subplots_array: !!python/tuple [2,2] 
-    map_projection: npstere
-
-use_subregions: False
-

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/cordex-arctic_rlds_bias_to_SRB.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/cordex-arctic_rlds_bias_to_SRB.yaml b/examples/configuration_file_examples/cordex-arctic_rlds_bias_to_SRB.yaml
deleted file mode 100644
index 1311843..0000000
--- a/examples/configuration_file_examples/cordex-arctic_rlds_bias_to_SRB.yaml
+++ /dev/null
@@ -1,65 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-workdir: ./
-output_netcdf_filename: cordex-arctic_rlds_JUL.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: True 
-    start_time: 1990-01-01
-    end_time: 2007-12-31
-    temporal_resolution: monthly
-    month_start: 7
-    month_end: 7
-    average_each_year: False
-
-space:
-    min_lat: 55.00 
-    max_lat: 89.5 
-    min_lon: -179.75
-    max_lon: 178.50
-
-regrid:
-    regrid_on_reference: True
-    regrid_dlat: 0.44
-    regrid_dlon: 0.44
-
-datasets:
-    reference:
-        data_source: local
-        data_name: SRB
-        path: ./data/srb_rel3.0_longwave_from_1983_to_2007.nc                           
-        variable: lw_sfc_dn
-        multiplying_factor: 1
-
-    targets:
-        data_source: local
-        path: /home/huikyole/data/CORDEX-ARC/rlds*.nc                                                    
-        variable: rlds    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Map_plot_bias_of_multiyear_climatology
-
-plots1:
-    file_name: cordex-arctic_rlds_JUL_mean_bias_to_SRB
-    subplots_array: !!python/tuple [1,2] 
-    map_projection: npstere
-
-use_subregions: False
-

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/cordex-arctic_rlus_bias_to_SRB.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/cordex-arctic_rlus_bias_to_SRB.yaml b/examples/configuration_file_examples/cordex-arctic_rlus_bias_to_SRB.yaml
deleted file mode 100644
index b03738a..0000000
--- a/examples/configuration_file_examples/cordex-arctic_rlus_bias_to_SRB.yaml
+++ /dev/null
@@ -1,65 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-workdir: ./
-output_netcdf_filename: cordex-arctic_rlus_JUL.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: True 
-    start_time: 1990-01-01
-    end_time: 2007-12-31
-    temporal_resolution: monthly
-    month_start: 7
-    month_end: 7
-    average_each_year: False
-
-space:
-    min_lat: 55.00 
-    max_lat: 89.5 
-    min_lon: -179.75
-    max_lon: 178.50
-
-regrid:
-    regrid_on_reference: True
-    regrid_dlat: 0.44
-    regrid_dlon: 0.44
-
-datasets:
-    reference:
-        data_source: local
-        data_name: SRB
-        path: ./data/srb_rel3.0_longwave_from_1983_to_2007.nc                           
-        variable: lw_sfc_up
-        multiplying_factor: 1
-
-    targets:
-        data_source: local
-        path: /home/huikyole/data/CORDEX-ARC/rlus*.nc                                                    
-        variable: rlus    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Map_plot_bias_of_multiyear_climatology
-
-plots1:
-    file_name: cordex-arctic_rlus_JUL_mean_bias_to_SRB
-    subplots_array: !!python/tuple [2,2] 
-    map_projection: npstere
-
-use_subregions: False
-

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/cordex-arctic_rsds_bias_to_SRB.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/cordex-arctic_rsds_bias_to_SRB.yaml b/examples/configuration_file_examples/cordex-arctic_rsds_bias_to_SRB.yaml
deleted file mode 100644
index 9613e46..0000000
--- a/examples/configuration_file_examples/cordex-arctic_rsds_bias_to_SRB.yaml
+++ /dev/null
@@ -1,65 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-workdir: ./
-output_netcdf_filename: cordex-arctic_rsds_JUL.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: True 
-    start_time: 1990-01-01
-    end_time: 2007-12-31
-    temporal_resolution: monthly
-    month_start: 7
-    month_end: 7
-    average_each_year: False
-
-space:
-    min_lat: 55.00 
-    max_lat: 89.5 
-    min_lon: -179.75
-    max_lon: 178.50
-
-regrid:
-    regrid_on_reference: True
-    regrid_dlat: 0.44
-    regrid_dlon: 0.44
-
-datasets:
-    reference:
-        data_source: local
-        data_name: SRB
-        path: ./data/srb_rel3.0_shortwave_from_1983_to_2007.nc                           
-        variable: sw_sfc_dn
-        multiplying_factor: 1
-
-    targets:
-        data_source: local
-        path: /home/huikyole/data/CORDEX-ARC/rsds*.nc                                                    
-        variable: rsds    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Map_plot_bias_of_multiyear_climatology
-
-plots1:
-    file_name: cordex-arctic_rsds_JUL_mean_bias_to_SRB
-    subplots_array: !!python/tuple [2,2] 
-    map_projection: npstere
-
-use_subregions: False
-

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/cordex_AF_prec_subregion_annual_cycle_time_series.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/cordex_AF_prec_subregion_annual_cycle_time_series.yaml b/examples/configuration_file_examples/cordex_AF_prec_subregion_annual_cycle_time_series.yaml
deleted file mode 100644
index 9483cae..0000000
--- a/examples/configuration_file_examples/cordex_AF_prec_subregion_annual_cycle_time_series.yaml
+++ /dev/null
@@ -1,90 +0,0 @@
-workdir: ./
-output_netcdf_filename: cordex_AF_prec_monthly_mean_1990-2007.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: True 
-    start_time: 1998-01-01
-    end_time: 2007-12-31
-    temporal_resolution: monthly
-    month_start: 1
-    month_end: 12
-    average_each_year: False
-
-space:
-    min_lat: -45.76
-    max_lat: 42.24
-    min_lon: -24.64
-    max_lon: 60.28
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.44
-    regrid_dlon: 0.44
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU  
-        dataset_id: 10
-        parameter_id: 37
-
-    targets:
-        data_source: local
-        path: ./data/AFRICA*pr.nc                                
-        variable: pr    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Timeseries_plot_subregion_annual_cycle
-
-plots1:
-    file_name: cordex_AF_prec_subregion_annual_cycle_time_series
-    subplots_array: !!python/tuple [7,3]
-
-use_subregions: True 
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01:
-      [29.0, 36.5, -10.0, 0.0]
-    R02:
-      [29, 37.5, 0, 10]
-    R03:
-      [25, 32.5, 10, 20]
-    R04:
-      [25, 32.5, 20, 33]
-    R05:
-      [12, 20.0, -19.3, -10.2]
-    R06:
-      [15, 25.0, 15, 30]
-    R07:
-      [7.3, 15,  -10, 10]
-    R08:
-      [5, 7.3,  -10, 10]
-    R09:
-      [6.9, 15, 33.9, 40]
-    R10:
-      [2.2, 11.8, 44.2, 51.8]
-    R11:
-      [0, 10, 10, 25]
-    R12:
-      [-10, 0, 10, 25]
-    R13:
-      [-15, 0, 30, 40]
-    R14:
-      [-27.9, -21.4, 13.6, 20]
-    R15:
-      [-35, -27.9, 13.6, 20]
-    R16:
-      [-35, -21.4, 20, 35.7]
-    R17:
-      [-25.8, -11.7, 43.2, 50.3]
-    R18:
-      [25, 35.0, 33, 40]
-    R19:
-      [28, 35, 45, 50]
-    R20:
-      [13, 20.0, 43, 50]
-    R21:
-      [20, 27.5, 50, 58]

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/metrics_and_plots.py
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/metrics_and_plots.py b/examples/configuration_file_examples/metrics_and_plots.py
deleted file mode 100644
index 6e00b0f..0000000
--- a/examples/configuration_file_examples/metrics_and_plots.py
+++ /dev/null
@@ -1,243 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#Apache OCW lib immports
-import ocw.dataset as ds
-import ocw.data_source.local as local
-import ocw.plotter as plotter
-import ocw.utils as utils
-from ocw.evaluation import Evaluation
-import ocw.metrics as metrics
-
-# Python libraries
-import numpy as np
-import numpy.ma as ma
-import matplotlib.pyplot as plt
-from mpl_toolkits.basemap import Basemap 
-from matplotlib import rcParams
-from matplotlib.patches import Polygon
-import string
-
-def Map_plot_bias_of_multiyear_climatology(obs_dataset, obs_name, model_datasets, model_names,
-                                      file_name, row, column, map_projection=None):
-    '''Draw maps of observed multi-year climatology and biases of models"'''
-
-    # calculate climatology of observation data
-    obs_clim = utils.calc_temporal_mean(obs_dataset)
-    # determine the metrics
-    map_of_bias = metrics.TemporalMeanBias()
-
-    # create the Evaluation object
-    bias_evaluation = Evaluation(obs_dataset, # Reference dataset for the evaluation
-                                 model_datasets, # list of target datasets for the evaluation
-                                 [map_of_bias, map_of_bias])
-    # run the evaluation (bias calculation)
-    bias_evaluation.run() 
-
-    rcm_bias = bias_evaluation.results[0]
-
-    fig = plt.figure()
-
-    lat_min = obs_dataset.lats.min()
-    lat_max = obs_dataset.lats.max()
-    lon_min = obs_dataset.lons.min()
-    lon_max = obs_dataset.lons.max()
-
-    string_list = list(string.ascii_lowercase) 
-    ax = fig.add_subplot(row,column,1)
-    if map_projection == 'npstere':
-        m = Basemap(ax=ax, projection ='npstere', boundinglat=lat_min, lon_0=0,
-            resolution = 'l', fix_aspect=False)
-    else:
-        m = Basemap(ax=ax, projection ='cyl', llcrnrlat = lat_min, urcrnrlat = lat_max,
-            llcrnrlon = lon_min, urcrnrlon = lon_max, resolution = 'l', fix_aspect=False)
-    lons, lats = np.meshgrid(obs_dataset.lons, obs_dataset.lats)
-
-    x,y = m(lons, lats)
-
-    m.drawcoastlines(linewidth=1)
-    m.drawcountries(linewidth=1)
-    m.drawstates(linewidth=0.5, color='w')
-    max = m.contourf(x,y,obs_clim,levels = plotter._nice_intervals(obs_dataset.values, 10), extend='both',cmap='rainbow')
-    ax.annotate('(a) \n' + obs_name,xy=(lon_min, lat_min))
-    cax = fig.add_axes([0.02, 1.-float(1./row), 0.01, 1./row*0.6])
-    plt.colorbar(max, cax = cax) 
-    clevs = plotter._nice_intervals(rcm_bias, 11)
-    for imodel in np.arange(len(model_datasets)):
-
-        ax = fig.add_subplot(row, column,2+imodel)
-        if map_projection == 'npstere':
-            m = Basemap(ax=ax, projection ='npstere', boundinglat=lat_min, lon_0=0,
-                resolution = 'l', fix_aspect=False)
-        else:
-            m = Basemap(ax=ax, projection ='cyl', llcrnrlat = lat_min, urcrnrlat = lat_max,
-                llcrnrlon = lon_min, urcrnrlon = lon_max, resolution = 'l', fix_aspect=False)
-        m.drawcoastlines(linewidth=1)
-        m.drawcountries(linewidth=1)
-        m.drawstates(linewidth=0.5, color='w')
-        max = m.contourf(x,y,rcm_bias[imodel,:],levels = clevs, extend='both', cmap='RdBu_r')
-        ax.annotate('('+string_list[imodel+1]+')  \n '+model_names[imodel],xy=(lon_min, lat_min))
-
-    cax = fig.add_axes([0.91, 0.1, 0.015, 0.8])
-    plt.colorbar(max, cax = cax) 
-
-    plt.subplots_adjust(hspace=0.01,wspace=0.05)
-
-    fig.savefig(file_name,dpi=600,bbox_inches='tight')
-
-def Taylor_diagram_spatial_pattern_of_multiyear_climatology(obs_dataset, obs_name, model_datasets, model_names,
-                                      file_name):
-
-    # calculate climatological mean fields
-    obs_clim_dataset = ds.Dataset(obs_dataset.lats, obs_dataset.lons, obs_dataset.times, utils.calc_temporal_mean(obs_dataset))
-    model_clim_datasets = []
-    for dataset in model_datasets:
-        model_clim_datasets.append(ds.Dataset(dataset.lats, dataset.lons, dataset.times, utils.calc_temporal_mean(dataset)))
-
-    # Metrics (spatial standard deviation and pattern correlation)
-    # determine the metrics
-    taylor_diagram = metrics.SpatialPatternTaylorDiagram()
-
-    # create the Evaluation object
-    taylor_evaluation = Evaluation(obs_clim_dataset, # Climatological mean of reference dataset for the evaluation
-                                 model_clim_datasets, # list of climatological means from model datasets for the evaluation
-                                 [taylor_diagram])
-
-    # run the evaluation (bias calculation)
-    taylor_evaluation.run() 
-
-    taylor_data = taylor_evaluation.results[0]
-
-    plotter.draw_taylor_diagram(taylor_data, model_names, obs_name, file_name, pos='upper right',frameon=False)
-
-def Time_series_subregion(obs_subregion_mean, obs_name, model_subregion_mean, model_names, seasonal_cycle, 
-                          file_name, row, column, x_tick=['']):
-
-    nmodel, nt, nregion = model_subregion_mean.shape  
-
-    if seasonal_cycle:
-        obs_data = ma.mean(obs_subregion_mean.reshape([1,nt/12,12,nregion]), axis=1)
-        model_data = ma.mean(model_subregion_mean.reshape([nmodel,nt/12,12,nregion]), axis=1)
-        nt = 12
-    else:
-        obs_data = obs_subregion_mean
-        model_data = model_subregion_mean
-        
-    x_axis = np.arange(nt)
-    x_tick_values = x_axis
-
-    fig = plt.figure()
-    rcParams['xtick.labelsize'] = 6
-    rcParams['ytick.labelsize'] = 6
-  
-    for iregion in np.arange(nregion):
-        ax = fig.add_subplot(row, column, iregion+1) 
-        x_tick_labels = ['']
-        if iregion+1  > column*(row-1):
-            x_tick_labels = x_tick 
-        else:
-            x_tick_labels=['']
-        ax.plot(x_axis, obs_data[0, :, iregion], color='r', lw=2, label=obs_name)
-        for imodel in np.arange(nmodel):
-            ax.plot(x_axis, model_data[imodel, :, iregion], lw=0.5, label = model_names[imodel])
-        ax.set_xlim([-0.5,nt-0.5])
-        ax.set_xticks(x_tick_values)
-        ax.set_xticklabels(x_tick_labels)
-        ax.set_title('Region %02d' % (iregion+1), fontsize=8)
-    
-    ax.legend(bbox_to_anchor=(-0.2, row/2), loc='center' , prop={'size':7}, frameon=False)  
-
-    fig.subplots_adjust(hspace=0.7, wspace=0.5)
-    fig.savefig(file_name, dpi=600, bbox_inches='tight')
-
-def Portrait_diagram_subregion(obs_subregion_mean, obs_name, model_subregion_mean, model_names, seasonal_cycle,
-                               file_name, normalize=True):
-
-    nmodel, nt, nregion = model_subregion_mean.shape
-    
-    if seasonal_cycle:
-        obs_data = ma.mean(obs_subregion_mean.reshape([1,nt/12,12,nregion]), axis=1)
-        model_data = ma.mean(model_subregion_mean.reshape([nmodel,nt/12,12,nregion]), axis=1)
-        nt = 12
-    else:
-        obs_data = obs_subregion_mean
-        model_data = model_subregion_mean
-
-    subregion_metrics = ma.zeros([4, nregion, nmodel])
-
-    for imodel in np.arange(nmodel):
-        for iregion in np.arange(nregion):
-            # First metric: bias
-            subregion_metrics[0, iregion, imodel] = metrics.calc_bias(model_data[imodel, :, iregion], obs_data[0, :, iregion], average_over_time = True)
-            # Second metric: standard deviation
-            subregion_metrics[1, iregion, imodel] = metrics.calc_stddev_ratio(model_data[imodel, :, iregion], obs_data[0, :, iregion])
-            # Third metric: RMSE
-            subregion_metrics[2, iregion, imodel] = metrics.calc_rmse(model_data[imodel, :, iregion], obs_data[0, :, iregion])
-            # Fourth metric: correlation
-            subregion_metrics[3, iregion, imodel] = metrics.calc_correlation(model_data[imodel, :, iregion], obs_data[0, :, iregion])
-   
-    if normalize:
-        for iregion in np.arange(nregion):
-            subregion_metrics[0, iregion, : ] = subregion_metrics[0, iregion, : ]/ma.std(obs_data[0, :, iregion])*100. 
-            subregion_metrics[1, iregion, : ] = subregion_metrics[1, iregion, : ]*100. 
-            subregion_metrics[2, iregion, : ] = subregion_metrics[2, iregion, : ]/ma.std(obs_data[0, :, iregion])*100. 
-
-    region_names = ['R%02d' % i for i in np.arange(nregion)+1]
-
-    for imetric, metric in enumerate(['bias','std','RMSE','corr']):
-        plotter.draw_portrait_diagram(subregion_metrics[imetric, :, :], region_names, model_names, file_name+'_'+metric, 
-                                      xlabel='model',ylabel='region')             
-
-def Map_plot_subregion(subregions, ref_dataset, directory):
-  
-    lons, lats = np.meshgrid(ref_dataset.lons, ref_dataset.lats) 
-    fig = plt.figure()
-    ax = fig.add_subplot(111)
-    m = Basemap(ax=ax, projection='cyl',llcrnrlat = lats.min(), urcrnrlat = lats.max(),
-                llcrnrlon = lons.min(), urcrnrlon = lons.max(), resolution = 'l')
-    m.drawcoastlines(linewidth=0.75)
-    m.drawcountries(linewidth=0.75)
-    m.etopo()  
-    x, y = m(lons, lats) 
-    #subregion_array = ma.masked_equal(subregion_array, 0)
-    #max=m.contourf(x, y, subregion_array, alpha=0.7, cmap='Accent')
-    for subregion in subregions:
-        draw_screen_poly(subregion[1], m, 'w') 
-        plt.annotate(subregion[0],xy=(0.5*(subregion[1][2]+subregion[1][3]), 0.5*(subregion[1][0]+subregion[1][1])), ha='center',va='center', fontsize=8) 
-    fig.savefig(directory+'map_subregion', bbox_inches='tight')
-
-def draw_screen_poly(boundary_array, m, linecolor='k'):
-
-    ''' Draw a polygon on a map
-
-    :param boundary_array: [lat_north, lat_south, lon_east, lon_west]
-    :param m   : Basemap object
-    '''
-
-    lats = [boundary_array[0], boundary_array[0], boundary_array[1], boundary_array[1]]
-    lons = [boundary_array[3], boundary_array[2], boundary_array[2], boundary_array[3]]
-    x, y = m( lons, lats )
-    xy = zip(x,y)
-    poly = Polygon( xy, facecolor='none',edgecolor=linecolor )
-    plt.gca().add_patch(poly)
-    
-    
-   
-
-    
-
-    

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/narccap_prec_JJA_mean_taylor_diagram_to_cru.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/narccap_prec_JJA_mean_taylor_diagram_to_cru.yaml b/examples/configuration_file_examples/narccap_prec_JJA_mean_taylor_diagram_to_cru.yaml
deleted file mode 100644
index c6b96cf..0000000
--- a/examples/configuration_file_examples/narccap_prec_JJA_mean_taylor_diagram_to_cru.yaml
+++ /dev/null
@@ -1,44 +0,0 @@
-workdir: ./                                      
-output_netcdf_filename: narccap_prec_JJA_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1980-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 6
-    month_end: 8
-    average_each_year: True  
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 37
-
-    targets:
-        data_source: local
-        path: ./data/prec.*ncep.monavg.nc                                                    
-        variable: prec    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Taylor_diagram_spatial_pattern_of_multiyear_climatology
-
-plots1:
-    file_name: narccap_prec_JJA_mean_taylor_diagram_to_cru
-
-use_subregions: False

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/narccap_tas_DJF_subregion_interannual_variability_portrait_diagram.yaml
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/narccap_tas_DJF_subregion_interannual_variability_portrait_diagram.yaml b/examples/configuration_file_examples/narccap_tas_DJF_subregion_interannual_variability_portrait_diagram.yaml
deleted file mode 100644
index de2d98e..0000000
--- a/examples/configuration_file_examples/narccap_tas_DJF_subregion_interannual_variability_portrait_diagram.yaml
+++ /dev/null
@@ -1,75 +0,0 @@
-workdir: ./
-output_netcdf_filename: narccap_tas_DJF_mean_mean_1980-2003.nc
-
-# (RCMES will temporally subset data between month_start and month_end. If average_each_year is True (False), seasonal mean in each year is (not) calculated and used for metrics calculation.)
-time:
-    maximum_overlap_period: False
-    start_time: 1980-01-01
-    end_time: 2003-12-31
-    temporal_resolution: monthly
-    month_start: 12
-    month_end: 2
-    average_each_year: True
-
-space:
-    min_lat: 23.75
-    max_lat: 49.75
-    min_lon: -125.75
-    max_lon: -66.75
-
-regrid:
-    regrid_on_reference: False
-    regrid_dlat: 0.50
-    regrid_dlon: 0.50
-
-datasets:
-    reference:
-        data_source: rcmed
-        data_name: CRU
-        dataset_id: 10
-        parameter_id: 38
-
-    targets:
-        data_source: local
-        path: ./data/temp*ncep.monavg.nc                                                    
-        variable: temp    
-
-number_of_metrics_and_plots: 1
-
-metrics1: Portrait_diagram_subregion_interannual_variability
-
-plots1:
-    file_name: narccap_tas_DJF_subregion_interannual_variability_portrait_diagram
-
-use_subregions: True 
-
-subregions:
-#subregion name (R01, R02, R03,....) followed by an array of boundaries [south, north, west, east]
-    R01: 
-      [42.75, 49.75, -123.75, -120.25]
-    R02:
-      [42.75, 49.75, -119.75, -112.75]
-    R03:
-      [37.25, 42.25, -123.75, -117.75]
-    R04: 
-      [32.25, 37.25, -122.75, -114.75]
-    R05:
-      [31.25, 37.25, -113.75, -108.25]
-    R06:
-      [31.25, 37.25, -108.25, -99.75]
-    R07:
-      [37.25, 43.25, -110.25, -103.75]
-    R08: 
-      [45.25, 49.25, -99.75, -90.25]
-    R09: 
-      [34.75, 45.25, -99.75, -90.25]
-    R10: 
-      [29.75, 34.75, -95.75, -84.75]
-    R11: 
-      [38.25, 44.75, -89.75, -80.25]
-    R12: 
-      [38.25, 44.75, -79.75, -70.25]
-    R13: 
-      [30.75, 38.25, -83.75, -75.25]
-    R14: 
-      [24.25, 30.75, -83.75, -80.25]

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/configuration_file_examples/run_RCMES.py
----------------------------------------------------------------------
diff --git a/examples/configuration_file_examples/run_RCMES.py b/examples/configuration_file_examples/run_RCMES.py
deleted file mode 100644
index 1054446..0000000
--- a/examples/configuration_file_examples/run_RCMES.py
+++ /dev/null
@@ -1,246 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-#Apache OCW lib immports
-import ocw.dataset_processor as dsp
-import ocw.data_source.local as local
-import ocw.data_source.rcmed as rcmed
-import ocw.plotter as plotter
-import ocw.utils as utils
-from ocw.dataset import Bounds
-
-import matplotlib.pyplot as plt
-from matplotlib import rcParams
-import numpy as np
-import numpy.ma as ma
-import yaml
-from glob import glob
-import operator
-from dateutil import parser
-from datetime import datetime
-import os
-import sys
-
-from metrics_and_plots import *
-
-import ssl
-if hasattr(ssl, '_create_unverified_context'):
-  ssl._create_default_https_context = ssl._create_unverified_context
-
-config_file = str(sys.argv[1])
-
-print 'Reading the configuration file ', config_file
-config = yaml.load(open(config_file))
-time_info = config['time']
-temporal_resolution = time_info['temporal_resolution']
-
-start_time = datetime.strptime(time_info['start_time'].strftime('%Y%m%d'),'%Y%m%d')
-end_time = datetime.strptime(time_info['end_time'].strftime('%Y%m%d'),'%Y%m%d')
-
-space_info = config['space']
-min_lat = space_info['min_lat']
-max_lat = space_info['max_lat']
-min_lon = space_info['min_lon']
-max_lon = space_info['max_lon']
-
-""" Step 1: Load the reference data """
-ref_data_info = config['datasets']['reference']
-print 'Loading observation dataset:\n',ref_data_info
-ref_name = ref_data_info['data_name']
-if ref_data_info['data_source'] == 'local':
-    ref_dataset = local.load_file(ref_data_info['path'],
-                                  ref_data_info['variable'], name=ref_name)
-elif ref_data_info['data_source'] == 'rcmed':
-      ref_dataset = rcmed.parameter_dataset(ref_data_info['dataset_id'],
-                                            ref_data_info['parameter_id'],
-                                            min_lat, max_lat, min_lon, max_lon,
-                                            start_time, end_time)
-else:
-    print ' '
-    # TO DO: support ESGF
-
-ref_dataset =  dsp.normalize_dataset_datetimes(ref_dataset, temporal_resolution)
-if 'multiplying_factor' in ref_data_info.keys():
-    ref_dataset.values = ref_dataset.values*ref_data_info['multiplying_factor']
-
-""" Step 2: Load model NetCDF Files into OCW Dataset Objects """
-model_data_info = config['datasets']['targets']
-print 'Loading model datasets:\n',model_data_info
-if model_data_info['data_source'] == 'local':
-    model_datasets, model_names = local.load_multiple_files(file_path = model_data_info['path'],
-                                                            variable_name =model_data_info['variable'])
-else:
-    print ' '
-    # TO DO: support RCMED and ESGF
-for idata,dataset in enumerate(model_datasets):
-    model_datasets[idata] = dsp.normalize_dataset_datetimes(dataset, temporal_resolution)
-
-""" Step 3: Subset the data for temporal and spatial domain """
-# Create a Bounds object to use for subsetting
-if time_info['maximum_overlap_period']:
-    start_time, end_time = utils.get_temporal_overlap([ref_dataset]+model_datasets)
-    print 'Maximum overlap period'
-    print 'start_time:', start_time
-    print 'end_time:', end_time
-
-if temporal_resolution == 'monthly' and end_time.day !=1:
-    end_time = end_time.replace(day=1)
-if ref_data_info['data_source'] == 'rcmed':
-    min_lat = np.max([min_lat, ref_dataset.lats.min()])
-    max_lat = np.min([max_lat, ref_dataset.lats.max()])
-    min_lon = np.max([min_lon, ref_dataset.lons.min()])
-    max_lon = np.min([max_lon, ref_dataset.lons.max()])
-bounds = Bounds(min_lat, max_lat, min_lon, max_lon, start_time, end_time)
-
-if ref_dataset.lats.ndim !=2 and ref_dataset.lons.ndim !=2:
-    ref_dataset = dsp.subset(bounds,ref_dataset)
-else:
-    ref_dataset = dsp.temporal_slice(bounds.start, bounds.end, ref_dataset)
-for idata,dataset in enumerate(model_datasets):
-    if dataset.lats.ndim !=2 and dataset.lons.ndim !=2:
-        model_datasets[idata] = dsp.subset(bounds,dataset)
-    else:
-        model_datasets[idata] = dsp.temporal_slice(bounds.start, bounds.end, dataset)
-
-# Temporaly subset both observation and model datasets for the user specified season
-month_start = time_info['month_start']
-month_end = time_info['month_end']
-average_each_year = time_info['average_each_year']
-
-ref_dataset = dsp.temporal_subset(month_start, month_end,ref_dataset,average_each_year)
-for idata,dataset in enumerate(model_datasets):
-    model_datasets[idata] = dsp.temporal_subset(month_start, month_end,dataset,average_each_year)
-
-# generate grid points for regridding
-if config['regrid']['regrid_on_reference']:
-    new_lat = ref_dataset.lats
-    new_lon = ref_dataset.lons 
-else:
-    delta_lat = config['regrid']['regrid_dlat']
-    delta_lon = config['regrid']['regrid_dlon']
-    nlat = (max_lat - min_lat)/delta_lat+1
-    nlon = (max_lon - min_lon)/delta_lon+1
-    new_lat = np.linspace(min_lat, max_lat, nlat)
-    new_lon = np.linspace(min_lon, max_lon, nlon)
-
-# number of models
-nmodel = len(model_datasets)
-print 'Dataset loading completed'
-print 'Observation data:', ref_name 
-print 'Number of model datasets:',nmodel
-for model_name in model_names:
-    print model_name
-
-""" Step 4: Spatial regriding of the reference datasets """
-print 'Regridding datasets: ', config['regrid']
-if not config['regrid']['regrid_on_reference']:
-    ref_dataset = dsp.spatial_regrid(ref_dataset, new_lat, new_lon)
-    print 'Reference dataset has been regridded'
-for idata,dataset in enumerate(model_datasets):
-    model_datasets[idata] = dsp.spatial_regrid(dataset, new_lat, new_lon)
-    print model_names[idata]+' has been regridded'
-
-print 'Propagating missing data information'
-ref_dataset = dsp.mask_missing_data([ref_dataset]+model_datasets)[0]
-model_datasets = dsp.mask_missing_data([ref_dataset]+model_datasets)[1:]
-
-""" Step 5: Checking and converting variable units """
-print 'Checking and converting variable units'
-ref_dataset = dsp.variable_unit_conversion(ref_dataset)
-for idata,dataset in enumerate(model_datasets):
-    model_datasets[idata] = dsp.variable_unit_conversion(dataset)
-    
-
-print 'Generating multi-model ensemble'
-if len(model_datasets) >= 2.:
-    model_datasets.append(dsp.ensemble(model_datasets))
-    model_names.append('ENS')
-
-""" Step 6: Generate subregion average and standard deviation """
-if config['use_subregions']:
-    # sort the subregion by region names and make a list
-    subregions= sorted(config['subregions'].items(),key=operator.itemgetter(0))
-
-    # number of subregions
-    nsubregion = len(subregions)
-
-    print 'Calculating spatial averages and standard deviations of ',str(nsubregion),' subregions'
-
-    ref_subregion_mean, ref_subregion_std, subregion_array = utils.calc_subregion_area_mean_and_std([ref_dataset], subregions) 
-    model_subregion_mean, model_subregion_std, subregion_array = utils.calc_subregion_area_mean_and_std(model_datasets, subregions) 
-
-""" Step 7: Write a netCDF file """
-workdir = config['workdir']
-if workdir[-1] != '/':
-    workdir = workdir+'/'
-print 'Writing a netcdf file: ',workdir+config['output_netcdf_filename']
-if not os.path.exists(workdir):
-    os.system("mkdir "+workdir)
-
-if config['use_subregions']:
-    dsp.write_netcdf_multiple_datasets_with_subregions(ref_dataset, ref_name, model_datasets, model_names,
-                                                       path=workdir+config['output_netcdf_filename'],
-                                                       subregions=subregions, subregion_array = subregion_array, 
-                                                       ref_subregion_mean=ref_subregion_mean, ref_subregion_std=ref_subregion_std,
-                                                       model_subregion_mean=model_subregion_mean, model_subregion_std=model_subregion_std)
-else:
-    dsp.write_netcdf_multiple_datasets_with_subregions(ref_dataset, ref_name, model_datasets, model_names,
-                                                       path=workdir+config['output_netcdf_filename'])
-
-""" Step 8: Calculate metrics and draw plots """
-nmetrics = config['number_of_metrics_and_plots']
-if config['use_subregions']:
-    Map_plot_subregion(subregions, ref_dataset, workdir)
-
-if nmetrics > 0:
-    print 'Calculating metrics and generating plots'
-    for imetric in np.arange(nmetrics)+1:
-        metrics_name = config['metrics'+'%1d' %imetric]
-        plot_info = config['plots'+'%1d' %imetric]
-        file_name = workdir+plot_info['file_name']
-
-        print 'metrics '+str(imetric)+'/'+str(nmetrics)+': ', metrics_name
-        if metrics_name == 'Map_plot_bias_of_multiyear_climatology':
-            row, column = plot_info['subplots_array']
-            if 'map_projection' in plot_info.keys():
-                Map_plot_bias_of_multiyear_climatology(ref_dataset, ref_name, model_datasets, model_names,
-                                          file_name, row, column, map_projection=plot_info['map_projection'])
-            else:
-                Map_plot_bias_of_multiyear_climatology(ref_dataset, ref_name, model_datasets, model_names,
-                                          file_name, row, column)
-        elif metrics_name == 'Taylor_diagram_spatial_pattern_of_multiyear_climatology':
-            Taylor_diagram_spatial_pattern_of_multiyear_climatology(ref_dataset, ref_name, model_datasets, model_names,
-                                      file_name)
-        elif config['use_subregions']:
-            if metrics_name == 'Timeseries_plot_subregion_interannual_variability' and average_each_year:
-                row, column = plot_info['subplots_array']
-                Time_series_subregion(ref_subregion_mean, ref_name, model_subregion_mean, model_names, False,
-                                      file_name, row, column, x_tick=['Y'+str(i+1) for i in np.arange(model_subregion_mean.shape[1])])
-            if metrics_name == 'Timeseries_plot_subregion_annual_cycle' and not average_each_year and month_start==1 and month_end==12:
-                row, column = plot_info['subplots_array']
-                Time_series_subregion(ref_subregion_mean, ref_name, model_subregion_mean, model_names, True,
-                                      file_name, row, column, x_tick=['J','F','M','A','M','J','J','A','S','O','N','D'])
-            if metrics_name == 'Portrait_diagram_subregion_interannual_variability' and average_each_year:
-                Portrait_diagram_subregion(ref_subregion_mean, ref_name, model_subregion_mean, model_names, False,
-                                      file_name)
-            if metrics_name == 'Portrait_diagram_subregion_annual_cycle' and not average_each_year and month_start==1 and month_end==12:
-                Portrait_diagram_subregion(ref_subregion_mean, ref_name, model_subregion_mean, model_names, True,
-                                      file_name)
-        else:
-            print 'please check the currently supported metrics'
-
-

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/statistical_downscaling/MPI_tas_JJA.yaml
----------------------------------------------------------------------
diff --git a/examples/statistical_downscaling/MPI_tas_JJA.yaml b/examples/statistical_downscaling/MPI_tas_JJA.yaml
deleted file mode 100644
index 17a12a7..0000000
--- a/examples/statistical_downscaling/MPI_tas_JJA.yaml
+++ /dev/null
@@ -1,29 +0,0 @@
-case_name: MPI_tas_JJA
-
-# downscaling method (1: delta addition, 2: Delta correction, 3: quantile mapping, 4: asynchronous regression)
-downscaling_option: 4
-
-# longitude (-180 ~ 180) and latitude (-90 ~ 90) of the grid point to downscale model output [in degrees]
-location:
-    name: HoChiMinh_City 
-    grid_lat: 10.75    
-    grid_lon: 106.67   
-
-# Season (for December - February, month_start=12 & month_end =2; for June - August, month_start=6 & month_end = 8)
-month_index: !!python/tuple [6,7,8]
-
-# reference (observation) data
-reference:
-    data_source: local
-    data_name: CRU
-    path: ./data/observation/tas_cru_monthly_1981-2010.nc
-    variable: tas
-
-model:
-    data_name: MPI
-    variable: tas
-    present:
-        path: ./data/model_present/tas_Amon_MPI_decadal1980_198101-201012.nc
-    future:
-        scenario_name: RCP8.5_2041-70
-        path: ./data/model_rcp85/tas_Amon_MPI_rcp85_204101-207012.nc 

http://git-wip-us.apache.org/repos/asf/climate/blob/c6c9dd1c/examples/statistical_downscaling/run_statistical_downscaling.py
----------------------------------------------------------------------
diff --git a/examples/statistical_downscaling/run_statistical_downscaling.py b/examples/statistical_downscaling/run_statistical_downscaling.py
deleted file mode 100644
index 60c6ac2..0000000
--- a/examples/statistical_downscaling/run_statistical_downscaling.py
+++ /dev/null
@@ -1,231 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import datetime
-import yaml
-import os
-import sys
-import xlwt
-
-import numpy as np
-import numpy.ma as ma
-
-import ocw.data_source.local as local
-import ocw.dataset as ds
-import ocw.dataset_processor as dsp
-import ocw.statistical_downscaling as down
-import ocw.plotter as plotter
-
-import ssl
-
-def spatial_aggregation(target_dataset, lon_min, lon_max, lat_min, lat_max):
-    """ Spatially subset a dataset within the given longitude and latitude boundaryd_lon-grid_space, grid_lon+grid_space
-    :param target_dataset: Dataset object that needs spatial subsetting
-    :type target_dataset: Open Climate Workbench Dataset Object
-    :param lon_min: minimum longitude (western boundary)
-    :type lon_min: float
-    :param lon_max: maximum longitude (eastern boundary)
-    :type lon_min: float
-    :param lat_min: minimum latitude (southern boundary) 
-    :type lat_min: float
-    :param lat_min: maximum latitude (northern boundary) 
-    :type lat_min: float
-    :returns: A new spatially subset Dataset
-    :rtype: Open Climate Workbench Dataset Object
-    """
-
-    if target_dataset.lons.ndim == 1 and target_dataset.lats.ndim == 1:
-        new_lon, new_lat = np.meshgrid(target_dataset.lons, target_dataset.lats)
-    elif target_dataset.lons.ndim == 2 and target_dataset.lats.ndim == 2:
-        new_lon = target_datasets.lons
-        new_lat = target_datasets.lats
- 
-    y_index, x_index = np.where((new_lon >= lon_min) & (new_lon <= lon_max) & (new_lat >= lat_min) & (new_lat <= lat_max))[0:2]
-
-    #new_dataset = ds.Dataset(target_dataset.lats[y_index.min():y_index.max()+1],
-    #                         target_dataset.lons[x_index.min():x_index.max()+1],
-    #                         target_dataset.times,
-    #                         target_dataset.values[:,y_index.min():y_index.max()+1,x_index.min():x_index.max()+1],
-    #                         target_dataset.variable,
-    #                         target_dataset.name) 
-    return target_dataset.values[:,y_index.min():y_index.max()+1,x_index.min():x_index.max()+1]
-
-def extract_data_at_nearest_grid_point(target_dataset, longitude, latitude):
-    """ Spatially subset a dataset within the given longitude and latitude boundaryd_lon-grid_space, grid_lon+grid_space
-    :param target_dataset: Dataset object that needs spatial subsetting
-    :type target_dataset: Open Climate Workbench Dataset Object
-    :type longitude: float
-    :param longitude: longitude
-    :type latitude: float
-    :param latitude: latitude 
-    :returns: A new spatially subset Dataset
-    :rtype: Open Climate Workbench Dataset Object
-    """
-
-    if target_dataset.lons.ndim == 1 and target_dataset.lats.ndim == 1:
-        new_lon, new_lat = np.meshgrid(target_dataset.lons, target_dataset.lats)
-    elif target_dataset.lons.ndim == 2 and target_dataset.lats.ndim == 2:
-        new_lon = target_datasets.lons
-        new_lat = target_datasets.lats
-    distance = (new_lon - longitude)**2. + (new_lat - latitude)**2.
-    y_index, x_index = np.where(distance == np.min(distance))[0:2]
-
-    return target_dataset.values[:,y_index[0], x_index[0]]
-
-if hasattr(ssl, '_create_unverified_context'):
-  ssl._create_default_https_context = ssl._create_unverified_context
-
-config_file = str(sys.argv[1])
-
-print 'Reading the configuration file ', config_file
-
-config = yaml.load(open(config_file))
-
-case_name = config['case_name']
-
-downscale_option_names = [' ','delta_addition','delta_correction','quantile_mapping','asynchronous_regression']
-DOWNSCALE_OPTION = config['downscaling_option']
-
-location = config['location']
-grid_lat = location['grid_lat']
-grid_lon = location['grid_lon']
-
-month_index = config['month_index']
-month_start = month_index[0]
-month_end = month_index[-1]    
-
-ref_info = config['reference']
-model_info = config['model']
-
-# Filename for the output data/plot (without file extension)
-OUTPUT = "%s_%s_%s_%s_%s" %(location['name'], ref_info['variable'], model_info['data_name'], ref_info['data_name'],model_info['future']['scenario_name'])
-
-print("Processing "+ ref_info['data_name'] + "  data")
-""" Step 1: Load Local NetCDF Files into OCW Dataset Objects """
-
-print("Loading %s into an OCW Dataset Object" % (ref_info['path'],))
-ref_dataset = local.load_file(ref_info['path'], ref_info['variable'])
-print(ref_info['data_name'] +" values shape: (times, lats, lons) - %s \n" % (ref_dataset.values.shape,))
-
-print("Loading %s into an OCW Dataset Object" % (model_info['present']['path'],))
-model_dataset_present = local.load_file(model_info['present']['path'], model_info['variable'])
-print(model_info['data_name'] +" values shape: (times, lats, lons) - %s \n" % (model_dataset_present.values.shape,))
-dy = model_dataset_present.spatial_resolution()[0]
-dx = model_dataset_present.spatial_resolution()[1]
-
-model_dataset_future = local.load_file(model_info['future']['path'], model_info['variable'])
-print(model_info['future']['scenario_name']+':'+model_info['data_name'] +" values shape: (times, lats, lons) - %s \n" % (model_dataset_future.values.shape,))
-
-""" Step 2: Temporal subsetting """
-print("Temporal subsetting for the selected month(s)")
-ref_temporal_subset = dsp.temporal_subset(month_start, month_end, ref_dataset)
-model_temporal_subset_present = dsp.temporal_subset(month_start, month_end, model_dataset_present)
-model_temporal_subset_future = dsp.temporal_subset(month_start, month_end, model_dataset_future)
-
-""" Step 3: Spatial aggregation of observational data into the model grid """
-print("Spatial aggregation of observational data near latitude %0.2f and longitude %0.2f " % (grid_lat, grid_lon))
-# There are two options to aggregate observational data near a model grid point
-#ref_subset = spatial_aggregation(ref_temporal_subset, grid_lon-0.5*dx, grid_lon+0.5*dx, grid_lat-0.5*dy, grid_lat+0.5*dy)
-#model_subset_present = spatial_aggregation(model_temporal_subset_present, grid_lon-0.5*dx, grid_lon+0.5*dx, grid_lat-0.5*dy, grid_lat+0.5*dy)
-#model_subset_future = spatial_aggregation(model_temporal_subset_future, grid_lon-0.5*dx, grid_lon+0.5*dx, grid_lat-0.5*dy, grid_lat+0.5*dy)
-ref_subset = extract_data_at_nearest_grid_point(ref_temporal_subset, grid_lon, grid_lat)
-model_subset_present = extract_data_at_nearest_grid_point(model_temporal_subset_present, grid_lon, grid_lat)
-model_subset_future = extract_data_at_nearest_grid_point(model_temporal_subset_future, grid_lon, grid_lat)
-
-
-""" Step 4:  Create a statistical downscaling object and downscaling model output """
-# You can add other methods
-print("Creating a statistical downscaling object")
-
-downscale = down.Downscaling(ref_subset, model_subset_present, model_subset_future)
-
-print(downscale_option_names[DOWNSCALE_OPTION]+": Downscaling model output")
-
-if DOWNSCALE_OPTION == 1:
-    downscaled_model_present, downscaled_model_future = downscale.Delta_addition()
-elif DOWNSCALE_OPTION == 2:
-    downscaled_model_present, downscaled_model_future = downscale.Delta_correction()
-elif DOWNSCALE_OPTION == 3:
-    downscaled_model_present, downscaled_model_future = downscale.Quantile_mapping()
-elif DOWNSCALE_OPTION == 4:
-    downscaled_model_present, downscaled_model_future = downscale.Asynchronous_regression()
-else:
-    sys.exit("DOWNSCALE_OPTION must be an integer between 1 and 4")
-
-
-""" Step 5: Create plots and spreadsheet """
-print("Plotting results")
-if not os.path.exists(case_name):
-    os.system("mkdir "+case_name)
-os.chdir(os.getcwd()+"/"+case_name)
-
-plotter.draw_marker_on_map(grid_lat, grid_lon, fname='downscaling_location', location_name=config['location']['name'])
-
-plotter.draw_histogram([ref_subset.ravel(), model_subset_present.ravel(), model_subset_future.ravel()], 
-                       data_names = [ref_info['data_name'], model_info['data_name'], model_info['future']['scenario_name']],
-                       fname=OUTPUT+'_original')
-                        
-plotter.draw_histogram([ref_subset.ravel(), downscaled_model_present, downscaled_model_future], 
-                       data_names = [ref_info['data_name'], model_info['data_name'], model_info['future']['scenario_name']],
-                       fname=OUTPUT+'_downscaled_using_'+downscale_option_names[DOWNSCALE_OPTION])
-
-print("Generating spreadsheet")
-
-workbook = xlwt.Workbook()
-sheet = workbook.add_sheet(downscale_option_names[config['downscaling_option']])
-
-sheet.write(0, 0, config['location']['name'])
-sheet.write(0, 2, 'longitude')
-sheet.write(0, 4, 'latitude')
-sheet.write(0, 6, 'month')
-
-
-sheet.write(0, 3, grid_lon)
-sheet.write(0, 5, grid_lat)
-
-
-
-for imonth,month in enumerate(month_index):
-    sheet.write(0, 7+imonth, month)
-
-sheet.write(3, 1, 'observation')
-sheet.write(4, 1, ref_info['data_name'])
-for idata, data in enumerate(ref_subset.ravel()[~ref_subset.ravel().mask]):
-    sheet.write(5+idata,1,data.item())
-
-sheet.write(3, 2, 'original')
-sheet.write(4, 2, model_info['data_name'])
-for idata, data in enumerate(model_subset_present.ravel()):
-    sheet.write(5+idata,2,data.item())
-
-sheet.write(3, 3, 'original')
-sheet.write(4, 3, model_info['future']['scenario_name'])
-for idata, data in enumerate(model_subset_future.ravel()):
-    sheet.write(5+idata,3,data.item())
-
-sheet.write(3, 4, 'downscaled')
-sheet.write(4, 4, model_info['data_name'])
-for idata, data in enumerate(downscaled_model_present):
-    sheet.write(5+idata,4,data.item())
-
-sheet.write(3, 5, 'downscaled')
-sheet.write(4, 5, model_info['future']['scenario_name'])
-for idata, data in enumerate(downscaled_model_future):
-    sheet.write(5+idata,5,data.item())
-
-workbook.save(OUTPUT+'.xls')
-


Mime
View raw message