Source code for pycif.plugins.obsoperators.standard.transforms.init_default_transformations

import copy
import numpy as np
from .utils import add_default, init_regrid, init_reindex, \
    init_sparse, propagate_attribute, propagate_dates


[docs]def init_default_transformations( self, all_transforms, backup_comps, mapper): """Initialize default transformations based on compatibility of input/output formats of successive transforms. """ # Propagate info on domain propagation propagate_attribute(self, all_transforms, mapper, "vdomain_from_previous", only_backwards=True) propagate_attribute(self, all_transforms, mapper, "domain_from_previous", only_backwards=True) # Propagate information about the tracer propagate_attribute(self, all_transforms, mapper, "tracer_from_previous") propagate_attribute(self, all_transforms, mapper, "tracer") # Propagate horizontal and vertical domains for reprojection propagate_attribute(self, all_transforms, mapper, "domain") # Propagate whether the data are sampled or continuous propagate_attribute(self, all_transforms, mapper, "sampled") # Propagate whether the data are sparse or not propagate_attribute(self, all_transforms, mapper, "sparse_data") # Propagate whether the data are lbc or not propagate_attribute(self, all_transforms, mapper, "is_lbc") propagate_attribute(self, all_transforms, mapper, "is_top") # Propagate input dates for re-indexing propagate_dates(all_transforms, mapper) # Add default transformations: # reprojections, sampling of full data to sparse data, etc. ref_transforms = copy.deepcopy(all_transforms.attributes) for transform in ref_transforms[::-1]: transf_plg = getattr(all_transforms, transform) transf_mapper = mapper[transform] precursors = transf_mapper["precursors"] successors = transf_mapper["successors"] ref_inputs = transf_mapper["inputs"] # Loop on precursors and add intermediate transforms for reprojections for trid in precursors: trid_dict = ref_inputs[trid] ref_precursors = copy.deepcopy(precursors[trid]) for tr in ref_precursors: tr_plg = getattr(all_transforms, tr) tr_mapper = mapper[tr] tmp_dict = tr_mapper["outputs"][trid] # Fetch info from yml prm = trid[1] cmp = trid[0] components = self.datavect.components comps = components.attributes # Use backup_components if defined # Skip if no backup is defined cmp_in = cmp if cmp in comps else backup_comps.get(cmp, "") cmp_plg = getattr(components, cmp_in, None) param = ref_inputs[trid].get("tracer", None) # Converts full data to sparse data (obs typically) # or conversely # Includes horizontal, vertical and temporal sampling if trid_dict.get("sparse_data", False) \ != tmp_dict.get("sparse_data", False): precursor_id = init_sparse( trid, tmp_dict, trid_dict, tr, transform, param, all_transforms, mapper, backup_comps, precursors) continue # Otherwise, do reprojections if necessary do_reproj = not trid_dict.get("domain", None) is None do_reindex = not trid_dict.get("input_dates", None) is None domain_ref = trid_dict.get("domain", None) domain_precurs = tmp_dict.get("domain", None) # Horizontal and vertical reprojection if needed precursor_id = tr temp_first = getattr(getattr( param, "time_interpolation", None ), "first", False) if temp_first and do_reindex: precursor_id = init_reindex( trid, tmp_dict, trid_dict, precursor_id, transform, param, all_transforms, mapper, backup_comps, precursors) if domain_ref != domain_precurs: precursor_id = init_regrid( trid, tmp_dict, trid_dict, precursor_id, transform, param, all_transforms, mapper, backup_comps, precursors) # Do temporal re-indexing if not temp_first and do_reindex: precursor_id = init_reindex( trid, tmp_dict, trid_dict, precursor_id, transform, param, all_transforms, mapper, backup_comps, precursors)