diff --git a/xarray/backends/api.py b/xarray/backends/api.py index 5cb879620cb..1a049fe58d8 100644 --- a/xarray/backends/api.py +++ b/xarray/backends/api.py @@ -158,6 +158,25 @@ def load_dataset(filename_or_obj: T_PathFileOrDataStore, **kwargs) -> Dataset: See Also -------- open_dataset + + Examples + -------- + >>> import numpy as np + >>> ds = xr.Dataset({"a": (("x",), np.arange(3))}) + >>> ds.to_netcdf("example_load_ds.nc") + + >>> ds_loaded = xr.load_dataset("example_load_ds.nc") + >>> ds_loaded + ... + Dimensions: (x: 3) + Dimensions without coordinates: x + Data variables: + a (x) ... + + Clean up the example file: + >>> ds_loaded.close() + >>> import os + >>> os.remove("example_load_ds.nc") """ if "cache" in kwargs: raise TypeError("cache has no effect in this context") @@ -184,6 +203,23 @@ def load_dataarray(filename_or_obj: T_PathFileOrDataStore, **kwargs) -> DataArra See Also -------- open_dataarray + + Examples + -------- + >>> import numpy as np + >>> da = xr.DataArray(np.arange(3), dims="x", name="a") + >>> da.to_netcdf("example_load_da.nc") + + >>> da_loaded = xr.load_dataarray("example_load_da.nc") + >>> da_loaded + ... + ... + Dimensions without coordinates: x + + Clean up the example file: + >>> da_loaded.close() + >>> import os + >>> os.remove("example_load_da.nc") """ if "cache" in kwargs: raise TypeError("cache has no effect in this context") @@ -575,6 +611,35 @@ class (a subclass of ``BackendEntrypoint``) can also be used. See Also -------- open_mfdataset + load_dataset + open_dataarray + + Examples + -------- + Open a dataset from a netCDF file. First, we create a dummy file for this example: + + >>> import numpy as np + >>> ds = xr.Dataset({"a": (("x",), np.arange(3))}) + >>> ds.to_netcdf("example.nc") + + >>> ds_disk = xr.open_dataset("example.nc") + >>> ds_disk + ... + Dimensions: (x: 3) + Dimensions without coordinates: x + Data variables: + a (x) ... + + Open a dataset from a remote OPeNDAP URL: + + >>> ds = xr.open_dataset( + ... "http://www.esrl.noaa.gov/psd/thredds/dodsC/Datasets/ncep.reanalysis.dailyavgs/surface/air.sig995.1948.nc" + ... ) # doctest: +SKIP + + Clean up the example file: + >>> ds_disk.close() + >>> import os + >>> os.remove("example.nc") """ if cache is None: @@ -808,6 +873,26 @@ class (a subclass of ``BackendEntrypoint``) can also be used. See also -------- open_dataset + load_dataarray + + Examples + -------- + Open a DataArray from a netCDF file. First, we create a dummy file: + + >>> import numpy as np + >>> da = xr.DataArray(np.arange(3), dims="x", name="a") + >>> da.to_netcdf("example_da.nc") + + >>> da_disk = xr.open_dataarray("example_da.nc") + >>> da_disk + ... + ... + Dimensions without coordinates: x + + Clean up the example file: + >>> da_disk.close() + >>> import os + >>> os.remove("example_da.nc") """ dataset = open_dataset( @@ -1291,6 +1376,31 @@ def open_groups( xarray.open_datatree xarray.open_dataset xarray.DataTree.from_dict + + Examples + -------- + Open groups from a netCDF file as a dictionary of Datasets. First, create a file with groups: + + >>> import numpy as np + >>> ds1 = xr.Dataset({"a": (("x",), np.arange(3))}) + >>> ds2 = xr.Dataset({"b": (("y",), np.arange(2))}) + >>> ds1.to_netcdf("example_groups.nc", group="group1", mode="w") # doctest: +SKIP + >>> ds2.to_netcdf("example_groups.nc", group="group2", mode="a") # doctest: +SKIP + + >>> groups = xr.open_groups("example_groups.nc") # doctest: +SKIP + >>> sorted(groups.keys()) # doctest: +SKIP + ['group1', 'group2'] + >>> groups["group1"] # doctest: +SKIP + + Dimensions: (x: 3) + Dimensions without coordinates: x + Data variables: + a (x) ... + + Clean up the example file: + >>> groups.close() # doctest: +SKIP + >>> import os # doctest: +SKIP + >>> os.remove("example_groups.nc") # doctest: +SKIP """ if cache is None: cache = chunks is None @@ -1554,6 +1664,25 @@ class (a subclass of ``BackendEntrypoint``) can also be used. Examples -------- + >>> import numpy as np + >>> ds1 = xr.Dataset({"a": (("x",), np.arange(3))}, coords={"x": [0, 1, 2]}) + >>> ds2 = xr.Dataset({"a": (("x",), np.arange(3))}, coords={"x": [3, 4, 5]}) + >>> ds1.to_netcdf("example_mf_1.nc") # doctest: +SKIP + >>> ds2.to_netcdf("example_mf_2.nc") # doctest: +SKIP + >>> ds = xr.open_mfdataset( + ... ["example_mf_1.nc", "example_mf_2.nc"], combine="by_coords" + ... ) # doctest: +SKIP + >>> ds # doctest: +SKIP + ... + Dimensions: (x: 6) + Coordinates: + * x (x) ... + Data variables: + a (x) ... + >>> import os # doctest: +SKIP + >>> os.remove("example_mf_1.nc") # doctest: +SKIP + >>> os.remove("example_mf_2.nc") # doctest: +SKIP + A user might want to pass additional arguments into ``preprocess`` when applying some operation to many individual files that are being opened. One route to do this is through the use of ``functools.partial``. diff --git a/xarray/backends/writers.py b/xarray/backends/writers.py index 07745120e36..235c53402bd 100644 --- a/xarray/backends/writers.py +++ b/xarray/backends/writers.py @@ -561,13 +561,13 @@ def save_mfdataset( >>> ds = xr.Dataset( ... {"a": ("time", np.linspace(0, 1, 48))}, - ... coords={"time": pd.date_range("2010-01-01", freq="ME", periods=48)}, + ... coords={"time": pd.date_range("2010-01-01", freq="MS", periods=48)}, ... ) >>> ds Size: 768B Dimensions: (time: 48) Coordinates: - * time (time) datetime64[ns] 384B 2010-01-31 2010-02-28 ... 2013-12-31 + * time (time) datetime64[ns] 384B 2010-01-01 2010-02-01 ... 2013-12-01 Data variables: a (time) float64 384B 0.0 0.02128 0.04255 ... 0.9574 0.9787 1.0 >>> years, datasets = zip(*ds.groupby("time.year")) diff --git a/xarray/coding/frequencies.py b/xarray/coding/frequencies.py index 34f01aadeef..38de3ec0ec0 100644 --- a/xarray/coding/frequencies.py +++ b/xarray/coding/frequencies.py @@ -79,6 +79,20 @@ def infer_freq(index): If the index is not datetime-like. ValueError If there are fewer than three values or the index is not 1D. + + See Also + -------- + pandas.infer_freq + + Examples + -------- + >>> import pandas as pd + >>> times = pd.date_range("2000-01-01", periods=5, freq="D") + >>> xr.infer_freq(times) + 'D' + >>> times = pd.date_range("2000-01-01", periods=5, freq="2h") + >>> xr.infer_freq(times) + '2h' """ from xarray.core.dataarray import DataArray from xarray.core.variable import Variable diff --git a/xarray/computation/computation.py b/xarray/computation/computation.py index b2b53bb9350..c59e5c9d133 100644 --- a/xarray/computation/computation.py +++ b/xarray/computation/computation.py @@ -821,6 +821,16 @@ def polyval( -------- xarray.DataArray.polyfit numpy.polynomial.polynomial.polyval + + Examples + -------- + >>> import numpy as np + >>> x = xr.DataArray(np.arange(5), dims="x") + >>> coeffs = xr.DataArray([1, 2, 3], dims="degree", coords={"degree": [0, 1, 2]}) + >>> xr.polyval(x, coeffs) + ... + ... + Dimensions without coordinates: x """ if degree_dim not in coeffs._indexes: diff --git a/xarray/conventions.py b/xarray/conventions.py index f1908b1a114..c6b0d878553 100644 --- a/xarray/conventions.py +++ b/xarray/conventions.py @@ -564,6 +564,26 @@ def decode_cf( Returns ------- decoded : Dataset + + See Also + -------- + encode_dataset_coordinates + decode_cf_variable + + Examples + -------- + >>> import numpy as np + >>> import pandas as pd + >>> ds = xr.Dataset({"temp": (("time",), np.arange(5))}) + >>> ds["time"] = pd.date_range("2000-01-01", periods=5) + >>> decoded = xr.conventions.decode_cf(ds) + >>> decoded + ... + Dimensions: (time: 5) + Coordinates: + * time (time) ... + Data variables: + temp (time) ... """ from xarray.backends.common import AbstractDataStore from xarray.core.dataset import Dataset diff --git a/xarray/core/dataset.py b/xarray/core/dataset.py index bce048048da..69abc42e821 100644 --- a/xarray/core/dataset.py +++ b/xarray/core/dataset.py @@ -787,6 +787,13 @@ def compute(self, **kwargs) -> Self: Dataset.load_async DataArray.compute Variable.compute + + Examples + -------- + >>> ds = xr.Dataset({"a": (("x",), [1, 2, 3])}).chunk({"x": 1}) + >>> computed = ds.compute() + >>> type(computed["a"].data) + """ new = self.copy(deep=False) return new.load(**kwargs) @@ -831,7 +838,13 @@ def persist(self, **kwargs) -> Self: See Also -------- dask.persist - """ + + Examples + -------- + >>> ds = xr.Dataset({"a": (("x",), [1, 2, 3])}).chunk({"x": 1}) + >>> persisted = ds.persist() + >>> type(persisted["a"].data) + """ new = self.copy(deep=False) return new._persist_inplace(**kwargs) @@ -6185,6 +6198,34 @@ def drop_dims( obj : Dataset The dataset without the given dimensions (or any variables containing those dimensions). + + See Also + -------- + Dataset.drop_vars + DataArray.drop_vars + + Examples + -------- + >>> ds = xr.Dataset( + ... {"a": (("x", "y"), [[1, 2], [3, 4]]), "b": (("x",), [5, 6])}, + ... coords={"x": [0, 1], "y": [0, 1]}, + ... ) + >>> ds + ... + Dimensions: (x: 2, y: 2) + Coordinates: + * x (x) ... + * y (y) ... + Data variables: + a (x, y) ... + b (x) ... + >>> ds.drop_dims("y") + ... + Dimensions: (x: 2) + Coordinates: + * x (x) ... + Data variables: + b (x) ... """ if errors not in ["raise", "ignore"]: raise ValueError('errors must be either "raise" or "ignore"') diff --git a/xarray/core/options.py b/xarray/core/options.py index 451070ce7b4..e83e0432fd0 100644 --- a/xarray/core/options.py +++ b/xarray/core/options.py @@ -350,8 +350,17 @@ def get_options(): Get options for xarray. See Also - ---------- + -------- set_options + Examples + -------- + >>> original_width = xr.get_options()["display_width"] + >>> with xr.set_options(display_width=original_width + 10): + ... xr.get_options()["display_width"] == original_width + 10 + ... + True + >>> xr.get_options()["display_width"] == original_width + True """ return FrozenDict(OPTIONS) diff --git a/xarray/structure/chunks.py b/xarray/structure/chunks.py index 5e45b3da9ad..509a4226a64 100644 --- a/xarray/structure/chunks.py +++ b/xarray/structure/chunks.py @@ -146,6 +146,21 @@ def unify_chunks(*objects: Dataset | DataArray) -> tuple[Dataset | DataArray, .. See Also -------- dask.array.core.unify_chunks + + Examples + -------- + >>> import numpy as np # doctest: +SKIP + >>> da1 = xr.DataArray(np.arange(6).reshape(2, 3), dims=("x", "y")).chunk( + ... {"x": 1} + ... ) # doctest: +SKIP + >>> da2 = xr.DataArray(np.arange(6).reshape(2, 3), dims=("x", "y")).chunk( + ... {"y": 1} + ... ) # doctest: +SKIP + >>> da1, da2 = xr.unify_chunks(da1, da2) # doctest: +SKIP + >>> da1.chunks # doctest: +SKIP + ((1, 1), (3,)) + >>> da2.chunks # doctest: +SKIP + ((1, 1), (3,)) """ from xarray.core.dataarray import DataArray