diff --git a/doc/_static/custom.css b/doc/_static/custom.css
index e3bc7f9ab..540973616 100644
--- a/doc/_static/custom.css
+++ b/doc/_static/custom.css
@@ -22,3 +22,9 @@ span.project-version {
border-top: 1px solid #dddddd;
padding: 1.8rem 0;
}
+
+/* Add some margins around jupyter-sphinx outputs */
+.cell_output {
+ margin-top: 1em;
+ margin-bottom: 1em;
+}
diff --git a/doc/_templates/layout.html b/doc/_templates/layout.html
index 50fde1fad..bca5e7be9 100644
--- a/doc/_templates/layout.html
+++ b/doc/_templates/layout.html
@@ -8,7 +8,7 @@
{% block htmltitle %}
- {% if title == '' or title == 'Home' %}
+ {% if title == '' or 'no title' in title %}
{{ docstitle|striptags|e }}
{% else %}
{{ title|striptags|e }} | {{ docstitle|striptags|e }}
diff --git a/doc/changes.rst b/doc/changes.rst
index 001563c0b..9c4ec8a6c 100644
--- a/doc/changes.rst
+++ b/doc/changes.rst
@@ -10,10 +10,6 @@ Released on: 2023/05/08
DOI: https://doi.org/10.5281/zenodo.7907182
-.. warning::
-
- **Verde v1.8.0 is the last release that is compatible with Python 3.6.**
-
Deprecations:
* Deprecate the ``engine`` argument of ``Spline/SplineCV`` (`#373 `__)
@@ -74,6 +70,10 @@ Released on: 2022/03/25
DOI: https://doi.org/10.5281/zenodo.6384887
+.. warning::
+
+ **Verde v1.7.0 is the last release that is compatible with Python 3.6.**
+
Deprecation:
* Move the ``CheckerBoard`` class to ``verde.synthetic`` (`#353 `__)
diff --git a/doc/conf.py b/doc/conf.py
index c9c7b8561..dd3c9beae 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -31,11 +31,11 @@
"sphinx.ext.viewcode",
"sphinx.ext.extlinks",
"sphinx.ext.intersphinx",
- "matplotlib.sphinxext.plot_directive",
"sphinx.ext.napoleon",
"sphinx_gallery.gen_gallery",
"sphinx_design",
"sphinx_copybutton",
+ "jupyter_sphinx",
]
# Configuration to include links to other project docs when referencing
diff --git a/doc/conventions.rst b/doc/conventions.rst
new file mode 100644
index 000000000..24fe5b459
--- /dev/null
+++ b/doc/conventions.rst
@@ -0,0 +1,30 @@
+.. _conventions:
+
+Definitions and conventions
+===========================
+
+Here are a few of the conventions and definitions we use across Verde:
+
+.. glossary::
+
+ Coordinate types
+ Coordinates can be **Cartesian or Geographic**. We generally make **no
+ assumptions** about which one you're using.
+
+ Order of coordinates
+ Coordinates are usually given as West-East and South-North. For example,
+ ``longitude, latitude`` or ``easting, northing``. All functions and
+ classes expect coordinates **in this order**. This applies to the
+ actual coordinate values, bounding regions, grid spacing, etc.
+ **Exceptions** to this rule are the ``dims`` and ``shape`` arguments.
+
+ Coordinate names
+ We **don't use names like "x" and "y"** to avoid ambiguity. Cartesian
+ coordinates are "easting" and "northing" and Geographic coordinates are
+ "longitude" and "latitude". Sometimes this doesn't make sense, like
+ when using a polar projection, but we keep the convention for the sake
+ of consistency.
+
+ Region
+ The term "region" means **the bounding box of the data**. It is ordered
+ west, east, south, north.
diff --git a/doc/index.rst b/doc/index.rst
index 80332e664..40c8f5193 100644
--- a/doc/index.rst
+++ b/doc/index.rst
@@ -1,4 +1,3 @@
-.. title:: Home
.. grid::
:gutter: 4 4 4 4
@@ -6,28 +5,24 @@
:padding: 0 0 0 0
.. grid-item::
- :columns: 12 4 12 4
+ :columns: 12 3 3 3
.. image:: ./_static/verde-logo.svg
:width: 200px
:class: sd-m-auto dark-light
.. grid-item::
- :columns: 12 8 12 8
+ :columns: 12 9 9 9
- .. grid::
- :padding: 0 0 0 0
+ .. raw:: html
- .. grid-item::
- :columns: 12 12 12 9
+
- .. raw:: html
+.. raw:: html
-
-
- .. div:: sd-fs-3
-
- Processing and gridding spatial data, machine-learning style
+
+ Processing and gridding spatial data, machine-learning style.
+
**Verde** is a Python library for processing spatial data (bathymetry,
geophysics surveys, etc) and interpolating it on regular grids (i.e.,
@@ -114,8 +109,8 @@ and more!
:hidden:
:caption: Getting Started
- tutorials/overview.rst
install.rst
+ introduction.rst
gallery/index.rst
.. toctree::
@@ -139,6 +134,7 @@ and more!
:caption: Reference documentation
api/index.rst
+ conventions.rst
citing.rst
references.rst
changes.rst
diff --git a/doc/introduction.rst b/doc/introduction.rst
new file mode 100644
index 000000000..786766c88
--- /dev/null
+++ b/doc/introduction.rst
@@ -0,0 +1,443 @@
+.. _overview:
+
+A taste of Verde
+================
+
+Verde offers a wealth of functionality for processing spatial and geophysical
+data, like **bathymetry, GPS, temperature, gravity, or anything else that is
+measured along a surface**.
+While our main focus is on gridding (interpolating on a regular grid), you'll
+also find other things like trend removal, data decimation, spatial
+cross-validation, and blocked operations.
+
+This example will show you some of what Verde can do to process some data and
+generate a grid.
+
+The library
+-----------
+
+Most classes and functions are available through the :mod:`verde` top level
+package. So we can import only that and we'll have everything Verde has to offer:
+
+
+.. jupyter-execute::
+
+ import verde as vd
+
+.. note::
+
+ Throughout the documentation we'll use ``vd`` as the alias for
+ :mod:`verde`.
+
+We'll also import other modules for this example:
+
+.. jupyter-execute::
+
+ # Standard Scipy stack
+ import numpy as np
+ import pandas as pd
+ import matplotlib.pyplot as plt
+ # For projecting data
+ import pyproj
+ # For plotting data on a map
+ import pygmt
+ # For fetching sample datasets
+ import ensaio
+
+
+.. jupyter-execute::
+ :hide-code:
+
+ # Needed so that displaying works on jupyter-sphinx and sphinx-gallery at
+ # the same time. Using PYGMT_USE_EXTERNAL_DISPLAY="false" in the Makefile
+ # for sphinx-gallery to work means that fig.show won't display anything here
+ # either.
+ pygmt.set_display(method="notebook")
+
+Loading some sample data
+------------------------
+
+For this example, we'll download some sample GPS/GNSS velocity data from across
+the Alps using :mod:`ensaio`:
+
+.. jupyter-execute::
+
+ path_to_data_file = ensaio.fetch_alps_gps(version=1)
+ print(path_to_data_file)
+
+Since our data are in `CSV `__
+format, the best way to load them is with :mod:`pandas`:
+
+.. jupyter-execute::
+
+ data = pd.read_csv(path_to_data_file)
+ data
+
+The data are the observed 3D velocity vectors of each GPS/GNSS station in
+mm/year and show the deformation of the crust that is caused by the subduction
+in the Alps.
+For this example, **we'll only the vertical component** (but Verde can handle
+all 3 components as well).
+
+Before we do anything with this data, it would be useful to extract from it the
+West, East, South, North bounding box (this is called a **region** in Verde).
+This will help us make a map and can be useful in other operations as well.
+Verde offers the function :func:`verde.get_region` function for doing just
+that:
+
+.. jupyter-execute::
+
+ region = vd.get_region([data.longitude, data.latitude])
+ print(region)
+
+.. admonition:: Coordinate order
+ :class: tip
+
+ In Verde, coordinates are always given in the order:
+ **West-East, South-North**. All functions and classes expect coordinates in
+ this order. The **only exceptions** are the ``dims`` and ``shape`` arguments
+ that some functions take.
+
+
+Let's plot this on a :mod:`pygmt` map so we can see it more clearly:
+
+.. jupyter-execute::
+
+ # Start a figure
+ fig = pygmt.Figure()
+ # Add a basemap with the data region, Mercator projection, default frame
+ # and ticks, color in the continents, and display national borders
+ fig.coast(
+ region=region, projection="M15c", frame="af",
+ land="#eeeeee", borders="1/#666666", area_thresh=1e4,
+ )
+ # Create a colormap for the velocity
+ pygmt.makecpt(
+ cmap="polar+h",
+ series=[data.velocity_up_mmyr.min(), data.velocity_up_mmyr.max()],
+ )
+ # Plot colored points for the velocities
+ fig.plot(
+ x=data.longitude,
+ y=data.latitude,
+ fill=data.velocity_up_mmyr,
+ style="c0.2c",
+ cmap=True,
+ pen="0.5p,black",
+ )
+ # Add a colorbar with automatic frame and ticks and a label
+ fig.colorbar(frame='af+l"vertical velocity [mm/yr]"')
+ fig.show()
+
+Decimate the data
+-----------------
+
+You may have noticed that that the spacing between the points is highly
+variable.
+This can sometimes cause aliasing problems when gridding and also wastes
+computation when multiple points would fall on the same grid cell.
+To avoid all of the this, it's customary to **block average** the data first.
+
+Block averaging means splitting the region into blocks (usually with the size
+of the desired grid spacing) and then taking the average of all points inside
+each block.
+In Verde, this is done by :class:`verde.BlockMean`:
+
+.. jupyter-execute::
+
+ # Desired grid spacing in degrees
+ spacing = 0.2
+ blockmean = vd.BlockMean(spacing=spacing)
+
+The :meth:`verde.BlockMean.filter` method applies the blocked average operation
+with the given spacing to some data.
+It returns for each block: the mean coordinates, the mean data value, and
+a weight (we'll get to that soon).
+
+.. jupyter-execute::
+
+ block_coordinates, block_velocity, block_weights = blockmean.filter(
+ coordinates=(data.longitude, data.latitude),
+ data=data.velocity_up_mmyr,
+ )
+ block_coordinates
+
+In this case, we have **uncertainty** data for each observation and so we can
+pass that as **input weights** to the block averaging (and compute a
+weighted average instead).
+The weights should usually be **1 over the uncertainty squared** and we need to
+let :class:`verde.BlockMean` know that our input weights are uncertainties.
+**It's always recommended to use weights if you have them!**
+
+.. jupyter-execute::
+
+ blockmean = vd.BlockMean(spacing=spacing, uncertainty=True)
+ block_coordinates, block_velocity, block_weights = blockmean.filter(
+ coordinates=(data.longitude, data.latitude),
+ data=data.velocity_up_mmyr,
+ weights=1 / data.velocity_up_error_mmyr**2,
+ )
+
+.. admonition:: What if I don't have uncertainties?
+ :class: attention
+
+ Don't worry! **Input weights are optional** in Verde and can always be
+ ommited.
+
+.. admonition:: Block weights
+
+ The weights that are returned by :meth:`verde.BlockMean.filter` can be
+ different things. See :class:`verde.BlockMean` for a detailed explanation.
+ In our case, they are 1 over the propagated uncertainty of the mean values
+ for each block.
+ These can be used in the gridding process to give less weight to the data
+ that have higher uncertainties.
+
+Now let's plot the block-averaged data:
+
+.. jupyter-execute::
+
+ fig = pygmt.Figure()
+ fig.coast(
+ region=region, projection="M15c", frame="af",
+ land="#eeeeee", borders="1/#666666", area_thresh=1e4,
+ )
+ pygmt.makecpt(
+ cmap="polar+h", series=[block_velocity.min(), block_velocity.max()],
+ )
+ fig.plot(
+ x=block_coordinates[0],
+ y=block_coordinates[1],
+ fill=block_velocity,
+ style="c0.2c",
+ cmap=True,
+ pen="0.5p,black",
+ )
+ fig.colorbar(frame='af+l"vertical velocity [mm/yr]"')
+ fig.show()
+
+It may not seem like much happened, but if you look closely you'll see that
+data points that were closer than the spacing were combined into one.
+
+Project the data
+----------------
+
+In this example, we'll use Verde's Cartesian interpolators.
+So we need to project the geographic data we have to Cartesian coordinates
+first.
+We'll use :mod:`pyproj` to create a projection function and convert our
+longitude and latitude to easting and northing Mercator projection coordinates.
+
+.. jupyter-execute::
+
+ # Create a Mercator projection with latitude of true scale as the data mean
+ projection = pyproj.Proj(proj="merc", lat_ts=data.latitude.mean())
+
+ easting, northing = projection(block_coordinates[0], block_coordinates[1])
+
+Spline interpolation
+--------------------
+
+Since our data are relatively small (< 10k points), we can use the
+:class:`verde.Spline` class for bi-harmonic spline interpolation
+[Sandwell1987]_ to get a smooth surface that best fits the data:
+
+.. jupyter-execute::
+
+ # Generate a spline with the default arguments
+ spline = vd.Spline()
+ # Fit the spline to our decimated and projected data
+ spline.fit(
+ coordinates=(easting, northing),
+ data=block_velocity,
+ weights=block_weights,
+ )
+
+.. admonition:: Have more than 10k data?
+ :class: seealso
+
+ You may want to use some of our other interpolators instead, like
+ :class:`~verde.KNeighbors` or :class:`~verde.Cubic`. The bi-harmonic spline
+ is very memory intensive so avoid using it for >10k data unless you have a
+ lot of RAM.
+
+Now that we have a fitted spline, we can use it to **make predictions** at any
+location we want using :meth:`verde.Spline.predict`.
+For example, we can predict on the original data points to calculate the
+**residuals** and evaluate how well the spline fits our data.
+To do this, we'll have to project the original coordinates first:
+
+.. jupyter-execute::
+
+ prediction = spline.predict(projection(data.longitude, data.latitude))
+ residuals = data.velocity_up_mmyr - prediction
+
+ fig = pygmt.Figure()
+ fig.coast(
+ region=region, projection="M15c", frame="af",
+ land="#eeeeee", borders="1/#666666", area_thresh=1e4,
+ )
+ pygmt.makecpt(
+ cmap="polar+h", series=[residuals.min(), residuals.max()],
+ )
+ fig.plot(
+ x=data.longitude,
+ y=data.latitude,
+ fill=residuals,
+ style="c0.2c",
+ cmap=True,
+ pen="0.5p,black",
+ )
+ fig.colorbar(frame='af+l"fit residuals [mm/yr]"')
+ fig.show()
+
+As you can see by the colorbar, the residuals are quite small meaning that the
+spline fits the decimated data very well.
+
+Generating a grid
+-----------------
+
+To make a grid using our spline interpolation, we can use
+:meth:`verde.Spline.grid`:
+
+.. jupyter-execute::
+
+ # Set the spacing between grid nodes in km
+ grid = spline.grid(spacing=20e3)
+ grid
+
+The generated grid is an :class:`xarray.Dataset` and is **Cartesian by
+default**.
+The grid contains some metadata and default names for the coordinates and data
+variables.
+Plotting the grid with matplotlib is as easy as:
+
+.. jupyter-execute::
+
+ # scalars is the default name Verde gives to data variables
+ grid.scalars.plot()
+
+But it's not that easy to draw borders and coastlines on top of this Cartesian
+grid.
+
+To generate a **geographic grid** with longitude and latitude, we can pass in
+the geographic region and the projection used like so:
+
+.. jupyter-execute::
+
+ # Spacing in degrees and customize the names of the dimensions and variables
+ grid = spline.grid(
+ region=region,
+ spacing=spacing,
+ dims=("latitude", "longitude"),
+ data_names="velocity_up",
+ projection=projection, # Our projection function from earlier
+ )
+ grid
+
+
+Plotting a geographic grid is easier done with PyGMT so we can add coastlines
+and country borders:
+
+.. jupyter-execute::
+
+ fig = pygmt.Figure()
+ fig.grdimage(grid.velocity_up, cmap="polar+h", projection="M15c")
+ fig.coast(
+ frame="af", shorelines="#333333", borders="1/#666666", area_thresh=1e4,
+ )
+ fig.colorbar(frame='af+l"vertical velocity [mm/yr]"')
+ fig.plot(
+ x=data.longitude,
+ y=data.latitude,
+ fill="#333333",
+ style="c0.1c",
+ )
+ fig.show()
+
+.. admonition:: Did you notice?
+ :class: hint
+
+ The :class:`verde.Spline` was fitted only once on the input that and we then
+ used it to generate 3 separate interpolations. In general, fitting is the
+ most time-consuming part for bi-harmonic splines.
+
+Extracting a profile
+--------------------
+
+Once we have a fitted spline, we can also use it to predict data along a
+profile with the :meth:`verde.Spline.profile` method. For example, let's
+extract a profile that cuts across the Alps:
+
+.. jupyter-execute::
+
+ profile = spline.profile(
+ point1=(4, 51), # longitude, latitude of a point
+ point2=(11, 42),
+ size=200, # number of points
+ dims=("latitude", "longitude"),
+ data_names="velocity_up_mmyr",
+ projection=projection,
+ )
+ profile
+
+.. note::
+
+ We passed in a **projection** because our spline is Cartesian but we want to
+ generate a profile between 2 points specified with geographic coordinates.
+ The resulting points will be evenly spaced in the projected coordinates.
+
+The result is a :class:`pandas.DataFrame` with the coordinates, distance along
+the profile, and interpolated data values.
+Let's plot the location of the profile on our map:
+
+.. jupyter-execute::
+
+ fig = pygmt.Figure()
+ fig.grdimage(grid.velocity_up, cmap="polar+h", projection="M15c")
+ fig.coast(
+ frame="af", shorelines="#333333", borders="1/#666666", area_thresh=1e4,
+ )
+ fig.colorbar(frame='af+l"vertical velocity [mm/yr]"')
+ fig.plot(
+ x=profile.longitude,
+ y=profile.latitude,
+ pen="2p,#333333",
+ )
+ fig.show()
+
+Finally, we can plot the profile data using :mod:`matplotlib`:
+
+.. jupyter-execute::
+
+ plt.figure(figsize=(12, 6))
+ plt.plot(profile.distance, profile.velocity_up_mmyr, "-")
+ plt.title("Vertical GPS/GNSS velocity across the Alps")
+ plt.xlabel("Distance along North-South profile (m)")
+ plt.ylabel("velocity (mm/yr)")
+ plt.xlim(profile.distance.min(), profile.distance.max())
+ plt.grid()
+ plt.show()
+
+Wrapping up
+-----------
+
+This covers the basics of using Verde. Most use cases will involve some
+variation of the following workflow:
+
+1. Load data (coordinates and data values)
+2. Create a gridder
+3. Fit the gridder to the data
+4. Predict new values (using :meth:`~verde.base.BaseGridder.predict`,
+ :meth:`~verde.base.BaseGridder.grid`, or
+ :meth:`~verde.base.BaseGridder.profile`)
+
+**Now go explore the rest of the documentation and try out Verde on your own
+data!**
+
+.. admonition:: Questions or comments?
+ :class: seealso
+
+ Reach out to us through one of our `communication channels
+ `__! We love hearing from users and are
+ always looking for more people to get involved with developing Verde.
diff --git a/doc/tutorials_src/overview.py b/doc/tutorials_src/overview.py
deleted file mode 100644
index 3a67b1ff2..000000000
--- a/doc/tutorials_src/overview.py
+++ /dev/null
@@ -1,164 +0,0 @@
-# Copyright (c) 2017 The Verde Developers.
-# Distributed under the terms of the BSD 3-Clause License.
-# SPDX-License-Identifier: BSD-3-Clause
-#
-# This code is part of the Fatiando a Terra project (https://www.fatiando.org)
-#
-"""
-.. _overview:
-
-Overview
-========
-
-Verde provides classes and functions for processing spatial data, like
-bathymetry, GPS, temperature, gravity, or anything else that is measured along
-a surface. The main focus is on methods for gridding such data (interpolating
-on a regular grid). You'll also find other analysis methods that are often used
-in combination with gridding, like trend removal and blocked operations.
-
-Conventions
------------
-
-Before we get started, here are a few of the conventions we use across Verde:
-
-* Coordinates can be Cartesian or Geographic. We generally make no assumptions
- about which one you're using.
-* All functions and classes expect coordinates in the order: West-East and
- South-North. This applies to the actual coordinate values, bounding regions,
- grid spacing, etc. Exceptions to this rule are the ``dims`` and ``shape``
- arguments.
-* We don't use names like "x" and "y" to avoid ambiguity. Cartesian coordinates
- are "easting" and "northing" and Geographic coordinates are "longitude" and
- "latitude".
-* The term "region" means the bounding box of the data. It is ordered west,
- east, south, north.
-
-The library
------------
-
-Most classes and functions are available through the :mod:`verde` top level
-package. The only exception is the :mod:`verde.synthetic` module that has
-functionality for generating synthetic data. Throughout the documentation we'll
-use ``vd`` as the alias for :mod:`verde`.
-
-"""
-import matplotlib.pyplot as plt
-
-import verde as vd
-
-###############################################################################
-# .. _gridder_interface:
-#
-# The gridder interface
-# ---------------------
-#
-# All gridding and trend estimation classes in Verde share the same interface
-# (they all inherit from :class:`verde.base.BaseGridder`). Since most gridders
-# in Verde are linear models, we based our gridder interface on the
-# `scikit-learn `__ estimator interface: they all
-# implement a :meth:`~verde.base.BaseGridder.fit` method that estimates the
-# model parameters based on data and a :meth:`~verde.base.BaseGridder.predict`
-# method that calculates new data based on the estimated parameters.
-#
-# Unlike scikit-learn, our data model is not a feature matrix and a target
-# vector (e.g., ``est.fit(X, y)``) but a tuple of coordinate arrays and a data
-# vector (e.g., ``grd.fit((easting, northing), data)``). This makes more sense
-# for spatial data and is common to all classes and functions in Verde.
-#
-# As an example, let's generate some synthetic data using
-# :class:`verde.synthetic.CheckerBoard`:
-
-data = vd.synthetic.CheckerBoard().scatter(size=500, random_state=0)
-print(data.head())
-
-
-###############################################################################
-# The data are random points taken from a checkerboard function and returned to
-# us in a :class:`pandas.DataFrame`:
-
-plt.figure()
-plt.scatter(data.easting, data.northing, c=data.scalars, cmap="RdBu_r")
-plt.colorbar()
-plt.show()
-
-###############################################################################
-# Now we can use the bi-harmonic spline method [Sandwell1987]_ to fit this
-# data. First, we create a new :class:`verde.Spline`:
-
-spline = vd.Spline()
-# Printing a gridder shows the class and all of it's configuration options.
-print(spline)
-
-###############################################################################
-# Before we can use the spline, we need to fit it to our synthetic data. After
-# that, we can use the spline to predict values anywhere:
-
-spline.fit((data.easting, data.northing), data.scalars)
-
-# Generate coordinates for a regular grid with 100 m grid spacing (assuming
-# coordinates are in meters).
-grid_coords = vd.grid_coordinates(region=(0, 5000, -5000, 0), spacing=100)
-gridded_scalars = spline.predict(grid_coords)
-
-plt.figure()
-plt.pcolormesh(grid_coords[0], grid_coords[1], gridded_scalars, cmap="RdBu_r")
-plt.colorbar()
-plt.show()
-
-###############################################################################
-# We can compare our predictions with the true values for the checkerboard
-# function using the :meth:`~verde.Spline.score` method to calculate the
-# `R² coefficient of determination
-# `__.
-
-true_values = vd.synthetic.CheckerBoard().predict(grid_coords)
-print(spline.score(grid_coords, true_values))
-
-###############################################################################
-# Generating grids and profiles
-# -----------------------------
-#
-# A more convenient way of generating grids is through the
-# :meth:`~verde.base.BaseGridder.grid` method. It will automatically generate
-# coordinates and output an :class:`xarray.Dataset`.
-
-grid = spline.grid(spacing=30)
-print(grid)
-
-###############################################################################
-# :meth:`~verde.base.BaseGridder.grid` uses default names for the coordinates
-# ("easting" and "northing") and data variables ("scalars"). You can overwrite
-# these names by setting the ``dims`` and ``data_names`` arguments.
-
-grid = spline.grid(spacing=30, dims=["latitude", "longitude"], data_names="gravity")
-print(grid)
-
-plt.figure()
-grid.gravity.plot.pcolormesh()
-plt.show()
-
-###############################################################################
-# Gridders can also be used to interpolate data on a straight line between two
-# points using the :meth:`~verde.base.BaseGridder.profile` method. The profile
-# data are returned as a :class:`pandas.DataFrame`.
-
-prof = spline.profile(point1=(0, 0), point2=(5000, -5000), size=200)
-print(prof.head())
-
-plt.figure()
-plt.plot(prof.distance, prof.scalars, "-")
-plt.show()
-
-
-###############################################################################
-# Wrap up
-# -------
-#
-# This covers the basics of using Verde. Most use cases and examples in the
-# documentation will involve some variation of the following workflow:
-#
-# 1. Load data (coordinates and data values)
-# 2. Create a gridder
-# 3. Fit the gridder to the data
-# 4. Predict new values (using :meth:`~verde.base.BaseGridder.predict` or
-# :meth:`~verde.base.BaseGridder.grid`)
diff --git a/env/requirements-docs.txt b/env/requirements-docs.txt
index 333882d2b..d034e2717 100644
--- a/env/requirements-docs.txt
+++ b/env/requirements-docs.txt
@@ -3,9 +3,11 @@ sphinx-book-theme==1.1.*
sphinx-gallery==0.15.*
sphinx-copybutton==0.5.*
sphinx-design==0.5.*
+jupyter-sphinx==0.5.*
matplotlib==3.8.*
cartopy>=0.20
pyproj
pygmt==0.11.*
gmt==6.5.*
ipython
+ensaio
diff --git a/environment.yml b/environment.yml
index 298e07c1d..772379906 100644
--- a/environment.yml
+++ b/environment.yml
@@ -15,7 +15,7 @@ dependencies:
- xarray
- scikit-learn
- pooch
- - dask!=2021.03.0
+ - dask>=2022.01.0
# Optional
- pykdtree
- numba
@@ -32,10 +32,12 @@ dependencies:
- sphinx-gallery==0.15.*
- sphinx-copybutton==0.5.*
- sphinx-design==0.5.*
+ - jupyter-sphinx==0.5.*
- pyproj
- pygmt==0.11.*
- gmt==6.5.*
- ipython
+ - ensaio
# Style
- black
- pathspec