Source code for pycif.plugins.datastreams.fluxes.flux_plugin_template.read
import numpy as np
import xarray as xr
from logging import debug
[docs]
def read(
self,
name,
varnames,
dates,
files,
interpol_flx=False,
tracer=None,
model=None,
ddi=None,
**kwargs
):
"""Get fluxes from raw files and load them into a pyCIF
variables.
The list of date intervals and corresponding files is directly provided,
coming from what is returned by the :bash:`fetch` function.
One should loop on dates and files and extract the corresponding temporal slice of data
Warning:
Make sure to optimize the opening of files. There is high chances that
the same file has to be open and closed over and over again to loop on the dates.
If this is the case, make sure not to close it between each date.
Args:
name (str): name of the component
varnames (list[str]): original names of variables to read; use `name`
if `varnames` is empty
dates (list): list of the date intervals to extract
files (list): list of the files matching dates
Return:
xr.DataArray: the actual data with dimension:
time, levels, latitudes, longitudes
"""
# Get domain dimensions for random generation
domain = tracer.domain
nlon = domain.nlon
nlat = domain.nlat
nlev = domain.nlev
# Loop over dates/files and import data
data = []
out_dates = []
for dd, ff in zip(dates, files):
debug(
"Reading the file {} for the date interval {}".format(
ff, dd
)
)
# Generate random values instead of reading
data.append(
np.random.normal(
tracer.average_value, tracer.std_value,
(nlev, nlat, nlon)))
out_dates.append(dd[0])
# if only one level for emissions, create the axis:
xmod = xr.DataArray(
np.array(data),
coords={"time": out_dates},
dims=("time", "lev", "lat", "lon"),
)
return xmod