climate-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From huiky...@apache.org
Subject [1/4] climate git commit: CLIMATE-770 - Make boundary checking optional in spatial_regrid
Date Fri, 04 Mar 2016 06:47:26 GMT
Repository: climate
Updated Branches:
  refs/heads/master ab83d6884 -> 212441de3


CLIMATE-770 - Make boundary checking optional in spatial_regrid

- A new optional key, monotonic_grids, in run_RCMES configuration files
- boundary_check becomes optional in dataset_processor.spatial_regrid


Project: http://git-wip-us.apache.org/repos/asf/climate/repo
Commit: http://git-wip-us.apache.org/repos/asf/climate/commit/15c20689
Tree: http://git-wip-us.apache.org/repos/asf/climate/tree/15c20689
Diff: http://git-wip-us.apache.org/repos/asf/climate/diff/15c20689

Branch: refs/heads/master
Commit: 15c2068997a11918daef31c843be1c93bfd26863
Parents: 4e36c1d
Author: huikyole <huikyole@argo.jpl.nasa.gov>
Authored: Tue Mar 1 18:49:34 2016 -0800
Committer: huikyole <huikyole@argo.jpl.nasa.gov>
Committed: Tue Mar 1 18:49:34 2016 -0800

----------------------------------------------------------------------
 RCMES/run_RCMES.py       | 11 ++++++++---
 ocw/dataset_processor.py |  7 +++++--
 2 files changed, 13 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/climate/blob/15c20689/RCMES/run_RCMES.py
----------------------------------------------------------------------
diff --git a/RCMES/run_RCMES.py b/RCMES/run_RCMES.py
index c8affc3..2cbc13e 100644
--- a/RCMES/run_RCMES.py
+++ b/RCMES/run_RCMES.py
@@ -65,6 +65,9 @@ if 'latitude_name' in ref_data_info.keys():
     ref_lat_name = ref_data_info['latitude_name']
 if 'longitude_name' in ref_data_info.keys():
     ref_lon_name = ref_data_info['longitude_name']
+boundary_check_obs = True
+if 'monotonic_grids' in ref_data_info.keys():
+    boundary_check_obs = ref_data_info['monotonic_grids']
 print 'Loading observation dataset:\n',ref_data_info
 ref_name = ref_data_info['data_name']
 if ref_data_info['data_source'] == 'local':
@@ -92,6 +95,9 @@ if 'latitude_name' in model_data_info.keys():
     model_lat_name = model_data_info['latitude_name']
 if 'longitude_name' in model_data_info.keys():
     model_lon_name = model_data_info['longitude_name']
+boundary_check_model = True
+if 'monotonic_grids' in model_data_info.keys():
+    boundary_check_model = model_data_info['monotonic_grids']
 print 'Loading model datasets:\n',model_data_info
 if model_data_info['data_source'] == 'local':
     model_datasets, model_names = local.load_multiple_files(file_path = model_data_info['path'],
@@ -161,12 +167,11 @@ for model_name in model_names:
 """ Step 4: Spatial regriding of the reference datasets """
 print 'Regridding datasets: ', config['regrid']
 if not config['regrid']['regrid_on_reference']:
-    ref_dataset = dsp.spatial_regrid(ref_dataset, new_lat, new_lon)
+    ref_dataset = dsp.spatial_regrid(ref_dataset, new_lat, new_lon, boundary_check = boundary_check_obs)
     print 'Reference dataset has been regridded'
 for idata,dataset in enumerate(model_datasets):
-    model_datasets[idata] = dsp.spatial_regrid(dataset, new_lat, new_lon)
+    model_datasets[idata] = dsp.spatial_regrid(dataset, new_lat, new_lon, boundary_check
= boundary_check_model)
     print model_names[idata]+' has been regridded'
-
 print 'Propagating missing data information'
 ref_dataset = dsp.mask_missing_data([ref_dataset]+model_datasets)[0]
 model_datasets = dsp.mask_missing_data([ref_dataset]+model_datasets)[1:]

http://git-wip-us.apache.org/repos/asf/climate/blob/15c20689/ocw/dataset_processor.py
----------------------------------------------------------------------
diff --git a/ocw/dataset_processor.py b/ocw/dataset_processor.py
index 1825864..00e9546 100755
--- a/ocw/dataset_processor.py
+++ b/ocw/dataset_processor.py
@@ -164,7 +164,7 @@ def temporal_rebin_with_time_index(target_dataset, nt_average):
                              origin=target_dataset.origin)
     return new_dataset
 
-def spatial_regrid(target_dataset, new_latitudes, new_longitudes):
+def spatial_regrid(target_dataset, new_latitudes, new_longitudes, boundary_check=True):
     """ Regrid a Dataset using the new latitudes and longitudes
 
     :param target_dataset: Dataset object that needs spatially regridded
@@ -176,6 +176,9 @@ def spatial_regrid(target_dataset, new_latitudes, new_longitudes):
     :param new_longitudes: Array of longitudes
     :type new_longitudes: :class:`numpy.ndarray`
 
+    :param boundary_check:  Check if the regriding domain's boundaries are outside target_dataset's
domain          
+    :type boundary_check: :class:'bool'
+
     :returns: A new spatially regridded Dataset
     :rtype: :class:`dataset.Dataset`
     """
@@ -232,7 +235,7 @@ def spatial_regrid(target_dataset, new_latitudes, new_longitudes):
   
     for iy in np.arange(ny_new):
         for ix in np.arange(nx_new):
-            if path.contains_point([new_lons[iy,ix], new_lats[iy,ix]]): 
+            if path.contains_point([new_lons[iy,ix], new_lats[iy,ix]]) or not boundary_check:

                 if regular_grid:
                     new_lats_indices[iy,ix] = (ny_old -1.)*(new_lats[iy,ix] - lats.min())/(lats.max()
- lats.min())  
                     new_lons_indices[iy,ix] = (nx_old -1.)*(new_lons[iy,ix] - lons.min())/(lons.max()
- lons.min())  


Mime
View raw message