diff --git a/.gitignore b/.gitignore index 6ad1c5784..16c433dfe 100644 --- a/.gitignore +++ b/.gitignore @@ -108,3 +108,15 @@ info/ # Cython generated C code *.c +*.cpp + + +docs/source/examples/ + + +*.DS_Store + +*.zip + +*.vtu +*.vtr diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5a5f354c2..3d8202201 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -23,7 +23,7 @@ print(gstools.__version__) Open a [new issue](https://github.com/GeoStat-Framework/GSTools/issues) with your idea or suggestion and we'd love to discuss about it. - + ## Do you want to enhance GSTools or fix something? @@ -31,4 +31,6 @@ with your idea or suggestion and we'd love to discuss about it. - Add yourself to AUTHORS.md (if you want to). - We use the black code format, please use the script `black --line-length 79 gstools/` after you have written your code. - Add some tests if possible. +- Add an example showing your new feature in one of the examples sub-folders if possible. + Follow this [Sphinx-Gallary guide](https://sphinx-gallery.github.io/stable/syntax.html#embed-rst-in-your-example-python-files) - Push to your fork and submit a pull request. diff --git a/docs/requirements.txt b/docs/requirements.txt index 3eb3c0287..fcfbc9e58 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -2,4 +2,7 @@ cython>=0.28.3 numpydoc # https://stackoverflow.com/a/11704396/6696397 --r ../requirements.txt \ No newline at end of file +-r ../requirements.txt +sphinx-gallery +matplotlib +pykrige diff --git a/docs/source/conf.py b/docs/source/conf.py index 1961b10a9..ca52f6a86 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -20,8 +20,14 @@ # NOTE: # pip install sphinx_rtd_theme # is needed in order to build the documentation -import os -import sys +# import os +# import sys +import warnings +warnings.filterwarnings( + "ignore", + category=UserWarning, + message='Matplotlib is currently using agg, which is a non-GUI backend, so cannot show the figure.', +) # local module should not be added to sys path if it's installed on RTFD # see: https://stackoverflow.com/a/31882049/6696397 @@ -59,6 +65,7 @@ def setup(app): "sphinx.ext.autosummary", "sphinx.ext.napoleon", # parameters look better than with numpydoc only "numpydoc", + 'sphinx_gallery.gen_gallery', ] # autosummaries from source-files @@ -247,3 +254,47 @@ def setup(app): "hankel": ("https://hankel.readthedocs.io/en/latest/", None), "emcee": ("https://emcee.readthedocs.io/en/latest/", None), } + + + +# -- Sphinx Gallery Options +from sphinx_gallery.sorting import FileNameSortKey + +sphinx_gallery_conf = { + # path to your examples scripts + "examples_dirs": [ + "../../examples/00_misc/", + "../../examples/01_random_field/", + "../../examples/02_cov_model/", + "../../examples/03_variogram/", + "../../examples/04_vector_field/", + "../../examples/05_kriging/", + "../../examples/06_conditioned_fields/", + "../../examples/07_transformations/", + ], + # path where to save gallery generated examples + "gallery_dirs": [ + "examples/00_misc/", + "examples/01_random_field/", + "examples/02_cov_model/", + "examples/03_variogram/", + "examples/04_vector_field/", + "examples/05_kriging/", + "examples/06_conditioned_fields/", + "examples/07_transformations/", + ], + # Pattern to search for example files + "filename_pattern": r"\.py", + # Remove the "Download all examples" button from the top level gallery + "download_all_examples": False, + # Sort gallery example by file name instead of number of lines (default) + "within_subsection_order": FileNameSortKey, + # directory where function granular galleries are stored + "backreferences_dir": None, + # Modules for which function level galleries are created. In + "doc_module": "gstools", + #"image_scrapers": ('pyvista', 'matplotlib'), + # 'first_notebook_cell': ("%matplotlib inline\n" + # "from pyvista import set_plot_theme\n" + # "set_plot_theme('document')"), +} diff --git a/docs/source/tutorial_01_srf.rst b/docs/source/tutorial_01_srf.rst deleted file mode 100644 index 9ce2164ee..000000000 --- a/docs/source/tutorial_01_srf.rst +++ /dev/null @@ -1,281 +0,0 @@ -Tutorial 1: Random Field Generation -=================================== - -The main feature of GSTools is the spatial random field generator :any:`SRF`, -which can generate random fields following a given covariance model. -The generator provides a lot of nice features, which will be explained in -the following - -Theoretical Background ----------------------- - -GSTools generates spatial random fields with a given covariance model or -semi-variogram. This is done by using the so-called randomization method. -The spatial random field is represented by a stochastic Fourier integral -and its discretised modes are evaluated at random frequencies. - -GSTools supports arbitrary and non-isotropic covariance models. - -A very Simple Example ---------------------- - -We are going to start with a very simple example of a spatial random field -with an isotropic Gaussian covariance model and following parameters: - -- variance :math:`\sigma^2=1` -- correlation length :math:`\lambda=10` - -First, we set things up and create the axes for the field. We are going to -need the :any:`SRF` class for the actual generation of the spatial random field. -But :any:`SRF` also needs a covariance model and we will simply take the :any:`Gaussian` model. - -.. code-block:: python - - from gstools import SRF, Gaussian - - x = y = range(100) - -Now we create the covariance model with the parameters :math:`\sigma^2` and -:math:`\lambda` and hand it over to :any:`SRF`. By specifying a seed, -we make sure to create reproducible results: - -.. code-block:: python - - model = Gaussian(dim=2, var=1, len_scale=10) - srf = SRF(model, seed=20170519) - -With these simple steps, everything is ready to create our first random field. -We will create the field on a structured grid (as you might have guessed from the `x` and `y`), which makes it easier to plot. - -.. code-block:: python - - field = srf.structured([x, y]) - srf.plot() - -Yielding - -.. image:: pics/srf_tut_gau_field.png - :width: 600px - :align: center - -Wow, that was pretty easy! - -The script can be found in :download:`gstools/examples/00_gaussian.py<../../examples/00_gaussian.py>` - -Creating an Ensemble of Fields ------------------------------- - -Creating an ensemble of random fields would also be -a great idea. Let's reuse most of the previous code. - -.. code-block:: python - - import numpy as np - import matplotlib.pyplot as pt - from gstools import SRF, Gaussian - - x = y = np.arange(100) - - model = Gaussian(dim=2, var=1, len_scale=10) - srf = SRF(model) - -This time, we did not provide a seed to :any:`SRF`, as the seeds will used -during the actual computation of the fields. We will create four ensemble -members, for better visualisation and save them in a list and in a first -step, we will be using the loop counter as the seeds. - -.. code-block:: python - - ens_no = 4 - field = [] - for i in range(ens_no): - field.append(srf.structured([x, y], seed=i)) - -Now let's have a look at the results: - -.. code-block:: python - - fig, ax = pt.subplots(2, 2, sharex=True, sharey=True) - ax = ax.flatten() - for i in range(ens_no): - ax[i].imshow(field[i].T, origin='lower') - pt.show() - -Yielding - -.. image:: pics/srf_tut_gau_field_ens.png - :width: 600px - :align: center - -The script can be found in :download:`gstools/examples/05_srf_ensemble.py<../../examples/05_srf_ensemble.py>` - -Using better Seeds -^^^^^^^^^^^^^^^^^^ - -It is not always a good idea to use incrementing seeds. Therefore GSTools -provides a seed generator :any:`MasterRNG`. The loop, in which the fields are generated would -then look like - -.. code-block:: python - - from gstools.random import MasterRNG - seed = MasterRNG(20170519) - for i in range(ens_no): - field.append(srf.structured([x, y], seed=seed())) - -Creating Fancier Fields ------------------------ - -Only using Gaussian covariance fields gets boring. Now we are going to create much rougher random fields by using an exponential covariance model and we are going to make them anisotropic. - -The code is very similar to the previous examples, but with a different covariance model class :any:`Exponential`. As model parameters we a using following - -- variance :math:`\sigma^2=1` -- correlation length :math:`\lambda=(12, 3)^T` -- rotation angle :math:`\theta=\pi/8` - - -.. code-block:: python - - import numpy as np - from gstools import SRF, Exponential - - x = y = np.arange(100) - - model = Exponential(dim=2, var=1, len_scale=[12., 3.], angles=np.pi/8.) - srf = SRF(model, seed=20170519) - - srf.structured([x, y]) - srf.plot() - -Yielding - -.. image:: pics/srf_tut_exp_ani_rot.png - :width: 600px - :align: center - -The anisotropy ratio could also have been set with - -.. code-block:: python - - model = Exponential(dim=2, var=1, len_scale=12., anis=3./12., angles=np.pi/8.) - -Using an Unstructured Grid --------------------------- - -For many applications, the random fields are needed on an unstructured grid. -Normally, such a grid would be read in, but we can simply generate one and -then create a random field at those coordinates. - -.. code-block:: python - - import numpy as np - from gstools import SRF, Exponential - from gstools.random import MasterRNG - - seed = MasterRNG(19970221) - rng = np.random.RandomState(seed()) - x = rng.randint(0, 100, size=10000) - y = rng.randint(0, 100, size=10000) - - model = Exponential(dim=2, var=1, len_scale=[12., 3.], angles=np.pi/8.) - - srf = SRF(model, seed=20170519) - srf([x, y]) - srf.plot() - -Yielding - -.. image:: pics/srf_tut_unstr.png - :width: 600px - :align: center - -Comparing this image to the previous one, you can see that be using the same -seed, the same field can be computed on different grids. - -The script can be found in :download:`gstools/examples/06_unstr_srf_export.py<../../examples/06_unstr_srf_export.py>` - -Exporting a Field ------------------ - -Using the field from `previous example <Using an Unstructured Grid_>`__, it can simply be exported to the file -``field.vtu`` and viewed by e.g. paraview with following lines of code - -.. code-block:: python - - srf.vtk_export("field") - -Or it could visualized immediately in Python using `PyVista <https://docs.pyvista.org>`__: - -.. code-block:: python - - mesh = srf.to_pyvista("field") - mesh.plot() - -The script can be found in :download:`gstools/examples/04_export.py<../../examples/04_export.py>` and -in :download:`gstools/examples/06_unstr_srf_export.py<../../examples/06_unstr_srf_export.py>` - -Merging two Fields ------------------- - -We can even generate the same field realisation on different grids. Let's try -to merge two unstructured rectangular fields. The first field will be generated -exactly like in example `Using an Unstructured Grid`_: - -.. code-block:: python - - import numpy as np - import matplotlib.pyplot as pt - from gstools import SRF, Exponential - from gstools.random import MasterRNG - - seed = MasterRNG(19970221) - rng = np.random.RandomState(seed()) - x = rng.randint(0, 100, size=10000) - y = rng.randint(0, 100, size=10000) - - model = Exponential(dim=2, var=1, len_scale=[12., 3.], angles=np.pi/8.) - - srf = SRF(model, seed=20170519) - - field = srf([x, y]) - -But now we extend the field on the right hand side by creating a new -unstructured grid and calculating a field with the same parameters and the -same seed on it: - -.. code-block:: python - - # new grid - seed = MasterRNG(20011012) - rng = np.random.RandomState(seed()) - x2 = rng.randint(99, 150, size=10000) - y2 = rng.randint(20, 80, size=10000) - - field2 = srf((x2, y2)) - - pt.tricontourf(x, y, field.T) - pt.tricontourf(x2, y2, field2.T) - pt.axes().set_aspect('equal') - pt.show() - -Yielding - -.. image:: pics/srf_tut_merge.png - :width: 600px - :align: center - -The slight mismatch where the two fields were merged is merely due to -interpolation problems of the plotting routine. You can convince yourself -be increasing the resolution of the grids by a factor of 10. - -Of course, this merging could also have been done by appending the grid -point ``(x2, y2)`` to the original grid ``(x, y)`` before generating the field. -But one application scenario would be to generate hugh fields, which would not -fit into memory anymore. - -The script can be found in :download:`gstools/examples/07_srf_merge.py<../../examples/07_srf_merge.py>` - -.. raw:: latex - - \clearpage diff --git a/docs/source/tutorial_02_cov.rst b/docs/source/tutorial_02_cov.rst deleted file mode 100644 index 76f6f823d..000000000 --- a/docs/source/tutorial_02_cov.rst +++ /dev/null @@ -1,503 +0,0 @@ -Tutorial 2: The Covariance Model -================================ - -One of the core-features of GSTools is the powerful :any:`CovModel` -class, which allows you to easily define arbitrary covariance models by -yourself. The resulting models provide a bunch of nice features to explore the -covariance models. - -Theoretical Backgound ---------------------- - -A covariance model is used to characterize the -`semi-variogram <https://en.wikipedia.org/wiki/Variogram#Semivariogram>`_, -denoted by :math:`\gamma`, of a spatial random field. -In GSTools, we use the following form for an isotropic and stationary field: - -.. math:: - \gamma\left(r\right)= - \sigma^2\cdot\left(1-\mathrm{cor}\left(r\right)\right)+n - -Where: - - :math:`\mathrm{cor}(r)` is the so called - `correlation <https://en.wikipedia.org/wiki/Autocovariance#Normalization>`_ - function depending on the distance :math:`r` - - :math:`\sigma^2` is the variance - - :math:`n` is the nugget (subscale variance) - -.. note:: - - We are not limited to isotropic models. We support anisotropy ratios for - length scales in orthogonal transversal directions like: - - - :math:`x` (main direction) - - :math:`y` (1. transversal direction) - - :math:`z` (2. transversal direction) - - These main directions can also be rotated, but we will come to that later. - -Example -------- - -Let us start with a short example of a self defined model (Of course, we -provide a lot of predefined models [See: :any:`gstools.covmodel`], -but they all work the same way). -Therefore we reimplement the Gaussian covariance model by defining just the -`correlation <https://en.wikipedia.org/wiki/Autocovariance#Normalization>`_ function: - -.. code-block:: python - - from gstools import CovModel - import numpy as np - # use CovModel as the base-class - class Gau(CovModel): - def correlation(self, r): - return np.exp(-(r/self.len_scale)**2) - -Now we can instantiate this model: - -.. code-block:: python - - model = Gau(dim=2, var=2., len_scale=10) - -To have a look at the variogram, let's plot it: - -.. code-block:: python - - from gstools.covmodel.plot import plot_variogram - plot_variogram(model) - -Which gives: - -.. image:: pics/cov_model_vario.png - :width: 400px - :align: center - -Parameters ----------- - -We already used some parameters, which every covariance models has. The basic ones -are: - - - **dim** : dimension of the model - - **var** : variance of the model (on top of the subscale variance) - - **len_scale** : length scale of the model - - **nugget** : nugget (subscale variance) of the model - -These are the common parameters used to characterize a covariance model and are -therefore used by every model in GSTools. You can also access and reset them: - -.. code-block:: python - - print(model.dim, model.var, model.len_scale, model.nugget, model.sill) - model.dim = 3 - model.var = 1 - model.len_scale = 15 - model.nugget = 0.1 - print(model.dim, model.var, model.len_scale, model.nugget, model.sill) - -Which gives: - -.. code-block:: python - - 2 2.0 10 0.0 2.0 - 3 1.0 15 0.1 1.1 - -.. note:: - - - The sill of the variogram is calculated by ``sill = variance + nugget`` - So we treat the variance as everything **above** the nugget, which is sometimes - called **partial sill**. - - A covariance model can also have additional parameters. - -Anisotropy ----------- - -The internally used (semi-) variogram represents the isotropic case for the model. -Nevertheless, you can provide anisotropy ratios by: - -.. code-block:: python - - model = Gau(dim=3, var=2., len_scale=10, anis=0.5) - print(model.anis) - print(model.len_scale_vec) - -Which gives: - -.. code-block:: python - - [0.5 1. ] - [10. 5. 10.] - -As you can see, we defined just one anisotropy-ratio and the second transversal -direction was filled up with ``1.`` and you can get the length-scales in each -direction by the attribute :any:`len_scale_vec`. For full control you can set -a list of anistropy ratios: ``anis=[0.5, 0.4]``. - -Alternatively you can provide a list of length-scales: - -.. code-block:: python - - model = Gau(dim=3, var=2., len_scale=[10, 5, 4]) - print(model.anis) - print(model.len_scale) - print(model.len_scale_vec) - -Which gives: - -.. code-block:: python - - [0.5 0.4] - 10 - [10. 5. 4.] - -Rotation Angles ---------------- - -The main directions of the field don't have to coincide with the spatial -directions :math:`x`, :math:`y` and :math:`z`. Therefore you can provide -rotation angles for the model: - -.. code-block:: python - - model = Gau(dim=3, var=2., len_scale=10, angles=2.5) - print(model.angles) - -Which gives: - -.. code-block:: python - - [2.5 0. 0. ] - -Again, the angles were filled up with ``0.`` to match the dimension and you -could also provide a list of angles. The number of angles depends on the -given dimension: - -- in 1D: no rotation performable -- in 2D: given as rotation around z-axis -- in 3D: given by yaw, pitch, and roll (known as - `Tait–Bryan <https://en.wikipedia.org/wiki/Euler_angles#Tait-Bryan_angles>`_ - angles) - -Methods -------- - -The covariance model class :any:`CovModel` of GSTools provides a set of handy -methods. - -Basics -^^^^^^ - -One of the following functions defines the main characterization of the -variogram: - -- ``variogram`` : The variogram of the model given by - - .. math:: - \gamma\left(r\right)= - \sigma^2\cdot\left(1-\mathrm{cor}\left(r\right)\right)+n - -- ``covariance`` : The (auto-)covariance of the model given by - - .. math:: - C\left(r\right)= \sigma^2\cdot\mathrm{cor}\left(r\right) - -- ``correlation`` : The (auto-)correlation (or normalized covariance) - of the model given by - - .. math:: - \mathrm{cor}\left(r\right) - -As you can see, it is the easiest way to define a covariance model by giving a -correlation function as demonstrated by the above model ``Gau``. -If one of the above functions is given, the others will be determined: - -.. code-block:: python - - model = Gau(dim=3, var=2., len_scale=10, nugget=0.5) - print(model.variogram(10.)) - print(model.covariance(10.)) - print(model.correlation(10.)) - -Which gives: - -.. code-block:: python - - 1.7642411176571153 - 0.6321205588285577 - 0.7357588823428847 - 0.36787944117144233 - -Spectral methods -^^^^^^^^^^^^^^^^ - -The spectrum of a covariance model is given by: - -.. math:: S(\mathbf{k}) = \left(\frac{1}{2\pi}\right)^n - \int C(\Vert\mathbf{r}\Vert) e^{i b\mathbf{k}\cdot\mathbf{r}} d^n\mathbf{r} - -Since the covariance function :math:`C(r)` is radially symmetric, we can -calculate this by the -`hankel-transformation <https://en.wikipedia.org/wiki/Hankel_transform>`_: - -.. math:: S(k) = \left(\frac{1}{2\pi}\right)^n \cdot - \frac{(2\pi)^{n/2}}{(bk)^{n/2-1}} - \int_0^\infty r^{n/2-1} C(r) J_{n/2-1}(bkr) r dr - -Where :math:`k=\left\Vert\mathbf{k}\right\Vert`. - -Depending on the spectrum, the spectral-density is defined by: - -.. math:: \tilde{S}(k) = \frac{S(k)}{\sigma^2} - -You can access these methods by: - -.. code-block:: python - - model = Gau(dim=3, var=2., len_scale=10) - print(model.spectrum(0.1)) - print(model.spectral_density(0.1)) - -Which gives: - -.. code-block:: python - - 34.96564773852395 - 17.482823869261974 - -.. note:: - The spectral-density is given by the radius of the input phase. But it is - **not** a probability density function for the radius of the phase. - To obtain the pdf for the phase-radius, you can use the methods - :any:`spectral_rad_pdf` or :any:`ln_spectral_rad_pdf` for the logarithm. - - The user can also provide a cdf (cumulative distribution function) by - defining a method called ``spectral_rad_cdf`` and/or a ppf (percent-point function) - by ``spectral_rad_ppf``. - - The attributes :any:`has_cdf` and :any:`has_ppf` will check for that. - -Different scales ----------------- - -Besides the length-scale, there are many other ways of characterizing a certain -scale of a covariance model. We provide two common scales with the covariance -model. - -Integral scale -^^^^^^^^^^^^^^ - -The `integral scale <https://en.wikipedia.org/wiki/Integral_length_scale>`_ -of a covariance model is calculated by: - -.. math:: I = \int_0^\infty \mathrm{cor}(r) dr - -You can access it by: - -.. code-block:: python - - model = Gau(dim=3, var=2., len_scale=10) - print(model.integral_scale) - print(model.integral_scale_vec) - -Which gives: - -.. code-block:: python - - 8.862269254527579 - [8.86226925 8.86226925 8.86226925] - -You can also specify integral length scales like the ordinary length scale, -and len_scale/anis will be recalculated: - -.. code-block:: python - - model = Gau(dim=3, var=2., integral_scale=[10, 4, 2]) - print(model.anis) - print(model.len_scale) - print(model.len_scale_vec) - print(model.integral_scale) - print(model.integral_scale_vec) - -Which gives: - -.. code-block:: python - - [0.4 0.2] - 11.283791670955127 - [11.28379167 4.51351667 2.25675833] - 10.000000000000002 - [10. 4. 2.] - -Percentile scale -^^^^^^^^^^^^^^^^ - -Another scale characterizing the covariance model, is the percentile scale. -It is the distance, where the normalized variogram reaches a certain percentage -of its sill. - -.. code-block:: python - - model = Gau(dim=3, var=2., len_scale=10) - print(model.percentile_scale(0.9)) - -Which gives: - -.. code-block:: python - - 15.174271293851463 - -.. note:: - - The nugget is neglected by this percentile_scale. - -Additional Parameters ---------------------- - -Let's pimp our self-defined model ``Gau`` by setting the exponent as an additional -parameter: - -.. math:: \mathrm{cor}(r) := \exp\left(-\left(\frac{r}{\ell}\right)^{\alpha}\right) - -This leads to the so called **stable** covariance model and we can define it by - -.. code-block:: python - - class Stab(CovModel): - def default_opt_arg(self): - return {"alpha": 1.5} - def correlation(self, r): - return np.exp(-(r/self.len_scale)**self.alpha) - -As you can see, we override the method :any:`CovModel.default_opt_arg` to provide -a standard value for the optional argument ``alpha`` and we can access it -in the correlation function by ``self.alpha``. - -We strongly encourage to define a standard value this way. Otherwise a -warning will occure when creating the model. - -Now we can instantiate this model: - -.. code-block:: python - - model1 = Stab(dim=2, var=2., len_scale=10) - model2 = Stab(dim=2, var=2., len_scale=10, alpha=0.5) - print(model1) - print(model2) - -Which gives: - -.. code-block:: python - - Stab(dim=2, var=2.0, len_scale=10, nugget=0.0, anis=[1.], angles=[0.], alpha=1.5) - Stab(dim=2, var=2.0, len_scale=10, nugget=0.0, anis=[1.], angles=[0.], alpha=0.5) - -.. note:: - - You don't have to overrid the :any:`CovModel.default_opt_arg`, but you will - get a ValueError if you don't set it on creation. - -Fitting variogram data ----------------------- - -The model class comes with a routine to fit the model-parameters to given -variogram data. Have a look at the following: - -.. code-block:: python - - # data - x = [1.0, 3.0, 5.0, 7.0, 9.0, 11.0] - y = [0.2, 0.5, 0.6, 0.8, 0.8, 0.9] - # fitting model - model = Stab(dim=2) - # we have to provide boundaries for the parameters - model.set_arg_bounds(alpha=[0, 3]) - # fit the model to given data, deselect nugget - results, pcov = model.fit_variogram(x, y, nugget=False) - print(results) - # show the fitting - from matplotlib import pyplot as plt - from gstools.covmodel.plot import plot_variogram - plt.scatter(x, y, color="k") - plot_variogram(model) - plt.show() - -Which gives: - -.. code-block:: python - - {'var': 1.024575782651677, - 'len_scale': 5.081620691462197, - 'nugget': 0.0, - 'alpha': 0.906705123369987} - -.. image:: pics/stab_vario_fit.png - :width: 400px - :align: center - -As you can see, we have to provide boundaries for the parameters. -As a default, the following bounds are set: - -- additional parameters: ``[-np.inf, np.inf]`` -- variance: ``[0.0, np.inf]`` -- len_scale: ``[0.0, np.inf]`` -- nugget: ``[0.0, np.inf]`` - -Also, you can deselect parameters from fitting, so their predefined values -will be kept. In our case, we fixed a ``nugget`` of ``0.0``, which was set -by default. You can deselect any standard or optional argument of the covariance model. -The second return value ``pcov`` is the estimated covariance of ``popt`` from -the used scipy routine :any:`scipy.optimize.curve_fit`. - -You can use the following methods to manipulate the used bounds: - -.. currentmodule:: gstools.covmodel.base - -.. autosummary:: - CovModel.default_opt_arg_bounds - CovModel.default_arg_bounds - CovModel.set_arg_bounds - CovModel.check_arg_bounds - -You can override the :any:`CovModel.default_opt_arg_bounds` to provide standard -bounds for your additional parameters. - -To access the bounds you can use: - -.. autosummary:: - CovModel.var_bounds - CovModel.len_scale_bounds - CovModel.nugget_bounds - CovModel.opt_arg_bounds - CovModel.arg_bounds - -Provided Covariance Models --------------------------- - -The following standard covariance models are provided by GSTools - -.. currentmodule:: gstools.covmodel.models - -.. autosummary:: - Gaussian - Exponential - Matern - Stable - Rational - Linear - Circular - Spherical - Intersection - -As a special feature, we also provide truncated power law (TPL) covariance models - -.. currentmodule:: gstools.covmodel.tpl_models - -.. autosummary:: - TPLGaussian - TPLExponential - TPLStable - -.. raw:: latex - - \clearpage diff --git a/docs/source/tutorial_03_vario.rst b/docs/source/tutorial_03_vario.rst deleted file mode 100644 index 6343985db..000000000 --- a/docs/source/tutorial_03_vario.rst +++ /dev/null @@ -1,356 +0,0 @@ -Tutorial 3: Variogram Estimation -================================ - -Estimating the spatial correlations is an important part of geostatistics. -These spatial correlations can be expressed by the variogram, which can be -estimated with the subpackage :any:`gstools.variogram`. The variograms can be -estimated on structured and unstructured grids. - -Theoretical Background ----------------------- - -The same `(semi-)variogram <https://en.wikipedia.org/wiki/Variogram#Semivariogram>`_ as -:doc:`the Covariance Model</tutorial_02_cov>` is being used -by this subpackage. - -An Example with Actual Data ---------------------------- - -This example is going to be a bit more extensive and we are going to do some -basic data preprocessing for the actual variogram estimation. But this example -will be self-contained and all data gathering and processing will be done in -this example script. - -The complete script can be found in :download:`gstools/examples/08_variogram_estimation.py<../../examples/08_variogram_estimation.py>` - -*This example will only work with Python 3.* - -The Data -^^^^^^^^ - -We are going to analyse the Herten aquifer, which is situated in Southern -Germany. Multiple outcrop faces where surveyed and interpolated to a 3D -dataset. In these publications, you can find more information about the data: - -| Bayer, Peter; Comunian, Alessandro; Höyng, Dominik; Mariethoz, Gregoire (2015): Physicochemical properties and 3D geostatistical simulations of the Herten and the Descalvado aquifer analogs. PANGAEA, https://doi.org/10.1594/PANGAEA.844167, -| Supplement to: Bayer, P et al. (2015): Three-dimensional multi-facies realizations of sedimentary reservoir and aquifer analogs. Scientific Data, 2, 150033, https://doi.org/10.1038/sdata.2015.33 -| - -Retrieving the Data -^^^^^^^^^^^^^^^^^^^ - -To begin with, we need to download and extract the data. Therefore, we are -going to use some built-in Python libraries. For simplicity, many values and -strings will be hardcoded. - -.. code-block:: python - - import os - import urllib.request - import zipfile - import numpy as np - import matplotlib.pyplot as pt - - def download_herten(): - # download the data, warning: its about 250MB - print('Downloading Herten data') - data_filename = 'data.zip' - data_url = 'http://store.pangaea.de/Publications/Bayer_et_al_2015/Herten-analog.zip' - urllib.request.urlretrieve(data_url, 'data.zip') - - # extract the data - with zipfile.ZipFile(data_filename, 'r') as zf: - zf.extract(os.path.join('Herten-analog', 'sim-big_1000x1000x140', - 'sim.vtk')) - -That was that. But we also need a script to convert the data into a format we -can use. This script is also kindly provided by the authors. We can download -this script in a very similar manner as the data: - -.. code-block:: python - - def download_scripts(): - # download a script for file conversion - print('Downloading scripts') - tools_filename = 'scripts.zip' - tool_url = 'http://store.pangaea.de/Publications/Bayer_et_al_2015/tools.zip' - urllib.request.urlretrieve(tool_url, tools_filename) - - # only extract the script we need - with zipfile.ZipFile(tools_filename, 'r') as zf: - zf.extract(os.path.join('tools', 'vtk2gslib.py')) - -These two functions can now be called: - -.. code-block:: python - - download_herten() - download_scripts() - - -Preprocessing the Data -^^^^^^^^^^^^^^^^^^^^^^ - -First of all, we have to convert the data with the script we just downloaded - -.. code-block:: python - - # import the downloaded conversion script - from tools.vtk2gslib import vtk2numpy - - # load the Herten aquifer with the downloaded vtk2numpy routine - print('Loading data') - herten, grid = vtk2numpy(os.path.join('Herten-analog', 'sim-big_1000x1000x140', 'sim.vtk')) - -The data only contains facies, but from the supplementary data, we know the -hydraulic conductivity values of each facies, which we will simply paste here -and assign them to the correct facies - -.. code-block:: python - - # conductivity values per fazies from the supplementary data - cond = np.array([2.50E-04, 2.30E-04, 6.10E-05, 2.60E-02, 1.30E-01, - 9.50E-02, 4.30E-05, 6.00E-07, 2.30E-03, 1.40E-04,]) - - # asign the conductivities to the facies - herten_cond = cond[herten] - -Next, we are going to calculate the transmissivity, by integrating over the -vertical axis - -.. code-block:: python - - # integrate over the vertical axis, calculate transmissivity - herten_log_trans = np.log(np.sum(herten_cond, axis=2) * grid['dz']) - -The Herten data provides information about the grid, which was already used in -the previous code block. From this information, we can create our own grid on -which we can estimate the variogram. As a first step, we are going to estimate -an isotropic variogram, meaning that we will take point pairs from all -directions into account. An unstructured grid is a natural choice for this. -Therefore, we are going to create an unstructured grid from the given, -structured one. For this, we are going to write another small function - -.. code-block:: python - - def create_unstructured_grid(x_s, y_s): - x_u, y_u = np.meshgrid(x_s, y_s) - len_unstruct = len(x_s) * len(y_s) - x_u = np.reshape(x_u, len_unstruct) - y_u = np.reshape(y_u, len_unstruct) - return x_u, y_u - - # create a structured grid on which the data is defined - x_s = np.arange(grid['ox'], grid['nx']*grid['dx'], grid['dx']) - y_s = np.arange(grid['oy'], grid['ny']*grid['dy'], grid['dy']) - - # create an unstructured grid for the variogram estimation - x_u, y_u = create_unstructured_grid(x_s, y_s) - -Let's have a look at the transmissivity field of the Herten aquifer - -.. code-block:: python - - pt.imshow(herten_log_trans.T, origin='lower', aspect='equal') - pt.show() - -.. image:: pics/vario_tut_herten.png - :width: 600px - :align: center - - -Estimating the Variogram -^^^^^^^^^^^^^^^^^^^^^^^^ - -Finally, everything is ready for the variogram estimation. For the unstructured -method, we have to define the bins on which the variogram will be estimated. -Through expert knowledge (i.e. fiddling around), we assume that the main -features of the variogram will be below 10 metres distance. And because the -data has a high spatial resolution, the resolution of the bins can also be -high. The transmissivity data is still defined on a structured grid, but we can -simply flatten it with :any:`numpy.ndarray.flatten`, in order to bring it into -the right shape. It might be more memory efficient to use -``herten_log_trans.reshape(-1)``, but for better readability, we will stick to -:any:`numpy.ndarray.flatten`. Taking all data points into account would take a -very long time (expert knowledge \*wink\*), thus we will only take 2000 datapoints into account, which are sampled randomly. In order to make the exact -results reproducible, we can also set a seed. - -.. code-block:: python - - from gstools import vario_estimate_unstructured - - bins = np.linspace(0, 10, 50) - print('Estimating unstructured variogram') - bin_center, gamma = vario_estimate_unstructured( - (x_u, y_u), - herten_log_trans.flatten(), - bins, - sampling_size=2000, - sampling_seed=19920516, - ) - -The estimated variogram is calculated on the centre of the given bins, -therefore, the ``bin_center`` array is also returned. - -Fitting the Variogram -^^^^^^^^^^^^^^^^^^^^^ - -Now, we can see, if the estimated variogram can be modelled by a common -variogram model. Let's try the :any:`Exponential` model. - -.. code-block:: python - - from gstools import Exponential - - # fit an exponential model - fit_model = Exponential(dim=2) - fit_model.fit_variogram(bin_center, gamma, nugget=False) - -Finally, we can visualise some results. For quickly plotting a covariance -model, GSTools provides some helper functions. - -.. code-block:: python - - from gstools.covmodel.plot import plot_variogram - pt.plot(bin_center, gamma) - plot_variogram(fit_model, x_max=bins[-1]) - pt.show() - -.. image:: pics/vario_tut_fit_exp.png - :width: 400px - :align: center - -That looks like a pretty good fit! By printing the model, we can directly see -the fitted parameters - -.. code-block:: python - - print(fit_model) - -which gives - -.. code-block:: python - - Exponential(dim=2, var=0.020193095802479327, len_scale=1.4480057557321007, nugget=0.0, anis=[1.], angles=[0.]) - -With this data, we could start generating new ensembles of the Herten aquifer -with the :any:`SRF` class. - -Estimating the Variogram in Specific Directions -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Estimating a variogram on a structured grid gives us the possibility to only -consider values in a specific direction. This could be a first test, to see if -the data is anisotropic. -In order to speed up the calculations, we are going to only use every 10th datapoint and for a comparison with the isotropic variogram calculated earlier, we -only need the first 21 array items. - -.. code-block:: python - - x_s = x_s[::10][:21] - y_s = y_s[::10][:21] - herten_trans_log = herten_log_trans[::10,::10] - -With this much smaller data set, we can immediately estimate the variogram in -the x- and y-axis - -.. code-block:: python - - from gstools import vario_estimate_structured - print('Estimating structured variograms') - gamma_x = vario_estimate_structured(herten_trans_log, direction='x')[:21] - gamma_y = vario_estimate_structured(herten_trans_log, direction='y')[:21] - -With these two estimated variograms, we can start fitting :any:`Exponential` -covariance models - -.. code-block:: python - - fit_model_x = Exponential(dim=2) - fit_model_x.fit_variogram(x_s, gamma_x, nugget=False) - fit_model_y = Exponential(dim=2) - fit_model_y.fit_variogram(y_s, gamma_y, nugget=False) - -Now, the isotropic variogram and the two variograms in x- and y-direction can -be plotted together with their respective models, which will be plotted with -dashed lines. - -.. code-block:: python - - line, = pt.plot(bin_center, gamma, label='estimated variogram (isotropic)') - pt.plot(bin_center, fit_model.variogram(bin_center), color=line.get_color(), - linestyle='--', label='exp. variogram (isotropic)') - - line, = pt.plot(x_s, gamma_x, label='estimated variogram in x-dir') - pt.plot(x_s, fit_model_x.variogram(x_s), color=line.get_color(), - linestyle='--', label='exp. variogram in x-dir') - - line, = pt.plot(y_s, gamma_y, label='estimated variogram in y-dir') - pt.plot(y_s, fit_model_y.variogram(y_s), - color=line.get_color(), linestyle='--', label='exp. variogram in y-dir') - - pt.legend() - pt.show() - -Giving - -.. image:: pics/vario_tut_aniso_fit_exp.png - :width: 400px - :align: center - -The plot might be a bit cluttered, but at least it is pretty obvious that the -Herten aquifer has no apparent anisotropies in its spatial structure. - -Creating a Spatial Random Field from the Herten Parameters -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -With all the hard work done, it's straight forward now, to generate new -*Herten realisations* - -.. code-block:: python - - from gstools import SRF - - srf = SRF(fit_model, seed=19770928) - new_herten = srf((x_s, y_s), mesh_type='structured') - - pt.imshow(new_herten.T, origin='lower') - pt.show() - -Yielding - -.. image:: pics/vario_tut_new_herten.png - :width: 600px - :align: center - - -That's pretty neat! Executing the code given on this site, will result in a -lower resolution of the field, because we overwrote `x_s` and `y_s` for the -directional variogram estimation. In the example script, this is not the case -and you will get a high resolution field. - - -And Now for Some Cleanup -^^^^^^^^^^^^^^^^^^^^^^^^ - -In case you want all the downloaded data and scripts to be deleted, use -following commands - -.. code-block:: python - - from shutil import rmtree - os.remove('data.zip') - os.remove('scripts.zip') - rmtree('Herten-analog') - rmtree('tools') - -And in case you want to play around a little bit more with the data, you can -comment out the function calls ``download_herten()`` and -``download_scripts()``, after they where called at least once and also comment -out the cleanup. This way, the data will not be downloaded with every script -execution. - - -.. raw:: latex - - \clearpage diff --git a/docs/source/tutorial_04_vec_field.rst b/docs/source/tutorial_04_vec_field.rst deleted file mode 100644 index 8d251085d..000000000 --- a/docs/source/tutorial_04_vec_field.rst +++ /dev/null @@ -1,98 +0,0 @@ -Tutorial 4: Random Vector Field Generation -========================================== - -In 1970, Kraichnan was the first to suggest a randomization method. -For studying the diffusion of single particles in a random incompressible -velocity field, he came up with a randomization method which includes a -projector which ensures the incompressibility of the vector field. - - -Theoretical Background ----------------------- - -Without loss of generality we assume that the mean velocity :math:`\bar{U}` is oriented -towards the direction of the first basis vector :math:`\mathbf{e}_1`. Our goal is now to -generate random fluctuations with a given covariance model around this mean velocity. -And at the same time, making sure that the velocity field remains incompressible or -in other words, ensure :math:`\nabla \cdot \mathbf U = 0`. -This can be done by using the randomization method we already know, but adding a -projector to every mode being summed: - - -.. math:: - - \mathbf{U}(\mathbf{x}) = \bar{U} \mathbf{e}_1 - \sqrt{\frac{\sigma^{2}}{N}} - \sum_{i=1}^{N} \mathbf{p}(\mathbf{k}_i) \left[ Z_{1,i} - \cos\left( \langle \mathbf{k}_{i}, \mathbf{x} \rangle \right) - + \sin\left( \langle \mathbf{k}_{i}, \mathbf{x} \rangle \right) \right] - -with the projector - -.. math:: - - \mathbf{p}(\mathbf{k}_i) = \mathbf{e}_1 - \frac{\mathbf{k}_i k_1}{k^2} \; . - -By calculating :math:`\nabla \cdot \mathbf U = 0`, it can be verified, that -the resulting field is indeed incompressible. - - -Generating a Random Vector Field --------------------------------- - -As a first example we are going to generate a vector field with a Gaussian -covariance model on a structured grid: - -.. code-block:: python - - import numpy as np - import matplotlib.pyplot as plt - from gstools import SRF, Gaussian - x = np.arange(100) - y = np.arange(100) - model = Gaussian(dim=2, var=1, len_scale=10) - srf = SRF(model, generator='VectorField') - srf((x, y), mesh_type='structured', seed=19841203) - srf.plot() - -And we get a beautiful streamflow plot: - -.. image:: pics/vec_srf_tut_gau.png - :width: 600px - :align: center - -Let us have a look at the influence of the covariance model. Choosing the -exponential model and keeping all other parameters the same - -.. code-block:: python - - from gstools import Exponential - - model2 = Exponential(dim=2, var=1, len_scale=10) - srf.model = model2 - srf((x, y), mesh_type='structured', seed=19841203) - srf.plot() - -we get following result - -.. image:: pics/vec_srf_tut_exp.png - :width: 600px - :align: center - -and we see, that the wiggles are much "rougher" than the smooth Gaussian ones. - - -Applications ------------- - -One great advantage of the Kraichnan method is, that after some initializations, -one can compute the velocity field at arbitrary points, online, with hardly any -overhead. -This means, that for a Lagrangian transport simulation for example, the velocity -can be evaluated at each particle position very efficiently and without any -interpolation. These field interpolations are a common problem for Lagrangian -methods. - - -.. raw:: latex - - \clearpage diff --git a/docs/source/tutorial_05_kriging.rst b/docs/source/tutorial_05_kriging.rst deleted file mode 100755 index 7284b8b77..000000000 --- a/docs/source/tutorial_05_kriging.rst +++ /dev/null @@ -1,201 +0,0 @@ -Tutorial 5: Kriging -=================== - -The subpackage :py:mod:`gstools.krige` provides routines for Gaussian process regression, also known as kriging. -Kriging is a method of data interpolation based on predefined covariance models. - -We provide two kinds of kriging routines: - -* Simple: The data is interpolated with a given mean value for the kriging field. -* Ordinary: The mean of the resulting field is unkown and estimated during interpolation. - - -Theoretical Background ----------------------- - -The aim of kriging is to derive the value of a field at some point :math:`x_0`, -when there are fixed observed values :math:`z(x_1)\ldots z(x_n)` at given points :math:`x_i`. - -The resluting value :math:`z_0` at :math:`x_0` is calculated as a weighted mean: - -.. math:: - - z_0 = \sum_{i=1}^n w_i \cdot z_i - -The weights :math:`W = (w_1,\ldots,w_n)` depent on the given covariance model and the location of the target point. - -The different kriging approaches provide different ways of calculating :math:`W`. - - -Implementation --------------- - -The routines for kriging are almost identical to the routines for spatial random fields. -First you define a covariance model, as described in :doc:`the SRF tutorial</tutorial_02_cov>`, -then you initialize the kriging class with this model: - -.. code-block:: python - - from gstools import Gaussian, krige - # condtions - cond_pos = ... - cond_val = ... - model = Gaussian(dim=1, var=0.5, len_scale=2) - krig = krige.Simple(model, mean=1, cond_pos=cond_pos, cond_val=cond_val) - -The resulting field instance ``krig`` has the same methods as the :any:`SRF` class. -You can call it to evaluate the kriged field at different points, -you can plot the latest field or you can export the field and so on. -Have a look at the documentation of :any:`Simple` and :any:`Ordinary`. - - -Simple Kriging --------------- - -Simple kriging assumes a known mean of the data. -For simplicity we assume a mean of 0, -which can be achieved by subtracting the mean from the observed values and -subsequently adding it to the resulting data. - -The resulting equation system for :math:`W` is given by: - -.. math:: - - W = \begin{pmatrix}c(x_1,x_1) & \cdots & c(x_1,x_n) \\ - \vdots & \ddots & \vdots \\ - c(x_n,x_1) & \cdots & c(x_n,x_n) - \end{pmatrix}^{-1} - \begin{pmatrix}c(x_1,x_0) \\ \vdots \\ c(x_n,x_0) \end{pmatrix} - -Thereby :math:`c(x_i,x_j)` is the covariance of the given observations. - - -Example -^^^^^^^ - -Here we use simple kriging in 1D (for plotting reasons) with 5 given observations/conditions. -The mean of the field has to be given beforehand. - -.. code-block:: python - - import numpy as np - from gstools import Gaussian, krige - # condtions - cond_pos = [0.3, 1.9, 1.1, 3.3, 4.7] - cond_val = [0.47, 0.56, 0.74, 1.47, 1.74] - # resulting grid - gridx = np.linspace(0.0, 15.0, 151) - # spatial random field class - model = Gaussian(dim=1, var=0.5, len_scale=2) - krig = krige.Simple(model, mean=1, cond_pos=cond_pos, cond_val=cond_val) - krig(gridx) - ax = krig.plot() - ax.scatter(cond_pos, cond_val, color="k", zorder=10, label="Conditions") - ax.legend() - -.. image:: pics/05_simple.png - :width: 600px - :align: center - - -Ordinary Kriging ----------------- - -Ordinary kriging will estimate an appropriate mean of the field, -based on the given observations/conditions and the covariance model used. - -The resulting system of equations for :math:`W` is given by: - -.. math:: - - \begin{pmatrix}W\\\mu\end{pmatrix} = \begin{pmatrix} - \gamma(x_1,x_1) & \cdots & \gamma(x_1,x_n) &1 \\ - \vdots & \ddots & \vdots & \vdots \\ - \gamma(x_n,x_1) & \cdots & \gamma(x_n,x_n) & 1 \\ - 1 &\cdots& 1 & 0 - \end{pmatrix}^{-1} - \begin{pmatrix}\gamma(x_1,x_0) \\ \vdots \\ \gamma(x_n,x_0) \\ 1\end{pmatrix} - -Thereby :math:`\gamma(x_i,x_j)` is the semi-variogram of the given observations -and :math:`\mu` is a Lagrange multiplier to minimize the kriging error and estimate the mean. - - -Example -^^^^^^^ - -Here we use ordinary kriging in 1D (for plotting reasons) with 5 given observations/conditions. -The estimated mean can be accessed by ``krig.mean``. - -.. code-block:: python - - import numpy as np - from gstools import Gaussian, krige - # condtions - cond_pos = [0.3, 1.9, 1.1, 3.3, 4.7] - cond_val = [0.47, 0.56, 0.74, 1.47, 1.74] - # resulting grid - gridx = np.linspace(0.0, 15.0, 151) - # spatial random field class - model = Gaussian(dim=1, var=0.5, len_scale=2) - krig = krige.Ordinary(model, cond_pos=cond_pos, cond_val=cond_val) - krig(gridx) - ax = krig.plot() - ax.scatter(cond_pos, cond_val, color="k", zorder=10, label="Conditions") - ax.legend() - -.. image:: pics/05_ordinary.png - :width: 600px - :align: center - - -Interface to PyKrige --------------------- - -To use fancier methods like `universal kriging <https://en.wikipedia.org/wiki/Kriging#Universal>`__, -we provide an interface to `PyKrige <https://github.com/bsmurphy/PyKrige>`__. - -You can pass a GSTools Covariance Model to the PyKrige routines as ``variogram_model``. - -To demonstrate the general workflow, we compare the ordinary kriging of PyKrige -with GSTools in 2D: - -.. code-block:: python - - import numpy as np - from gstools import Gaussian, krige - from pykrige.ok import OrdinaryKriging - from matplotlib import pyplot as plt - - # conditioning data - data = np.array([[0.3, 1.2, 0.47], - [1.9, 0.6, 0.56], - [1.1, 3.2, 0.74], - [3.3, 4.4, 1.47], - [4.7, 3.8, 1.74]]) - # grid definition for output field - gridx = np.arange(0.0, 5.5, 0.1) - gridy = np.arange(0.0, 6.5, 0.1) - # a GSTools based covariance model - cov_model = Gaussian(dim=2, len_scale=1, anis=.2, angles=-.5, var=.5, nugget=.1) - # ordinary kriging with pykrige - OK1 = OrdinaryKriging(data[:, 0], data[:, 1], data[:, 2], cov_model) - z1, ss1 = OK1.execute('grid', gridx, gridy) - plt.imshow(z1, origin="lower") - plt.show() - # ordinary kriging with gstools for comparison - OK2 = krige.Ordinary(cov_model, [data[:, 0], data[:, 1]], data[:, 2]) - OK2.structured([gridx, gridy]) - OK2.plot() - -.. image:: pics/20_pykrige.png - :width: 600px - :align: center - -.. image:: pics/20_gstools.png - :width: 600px - :align: center - - -.. raw:: latex - - \clearpage diff --git a/docs/source/tutorial_06_conditioning.rst b/docs/source/tutorial_06_conditioning.rst deleted file mode 100755 index 31ea93f0d..000000000 --- a/docs/source/tutorial_06_conditioning.rst +++ /dev/null @@ -1,80 +0,0 @@ -Tutorial 6: Conditioned Fields -============================== - -Kriged fields tend to approach the field mean outside the area of observations. -To generate random fields, that coincide with given observations, but are still -random according to a given covariance model away from the observations proximity, -we provide the generation of conditioned random fields. - - -Theoretical Background ----------------------- - -The idea behind conditioned random fields builds up on kriging. -First we generate a field with a kriging method, then we generate a random field, -and finally we generate another kriged field to eliminate the error between -the random field and the kriged field of the given observations. - -To do so, you can choose between ordinary and simple kriging. -In case of ordinary kriging, the mean of the SRF will be overwritten by the -estimated mean. - -The setup of the spatial random field is the same as described in -:doc:`the SRF tutorial</tutorial_02_cov>`. -You just need to add the conditions as described in :doc:`the kriging tutorial</tutorial_05_kriging>`: - -.. code-block:: python - - srf.set_condition(cond_pos, cond_val, "simple") - -or: - -.. code-block:: python - - srf.set_condition(cond_pos, cond_val, "ordinary") - - -Example: Conditioning with Ordinary Kriging -------------------------------------------- - -Here we use ordinary kriging in 1D (for plotting reasons) with 5 given observations/conditions, -to generate an ensemble of conditioned random fields. -The estimated mean can be accessed by ``srf.mean``. - -.. code-block:: python - - import numpy as np - from gstools import Gaussian, SRF - import matplotlib.pyplot as plt - # conditions - cond_pos = [0.3, 1.9, 1.1, 3.3, 4.7] - cond_val = [0.47, 0.56, 0.74, 1.47, 1.74] - gridx = np.linspace(0.0, 15.0, 151) - # spatial random field class - model = Gaussian(dim=1, var=0.5, len_scale=2) - srf = SRF(model) - srf.set_condition(cond_pos, cond_val, "ordinary") - fields = [] - for i in range(100): - if i % 10 == 0: print(i) - fields.append(srf(gridx, seed=i)) - label = "Conditioned ensemble" if i == 0 else None - plt.plot(gridx, fields[i], color="k", alpha=0.1, label=label) - plt.plot(gridx, np.full_like(gridx, srf.mean), label="estimated mean") - plt.plot(gridx, np.mean(fields, axis=0), linestyle=':', label="Ensemble mean") - plt.plot(gridx, srf.krige_field, linestyle='dashed', label="kriged field") - plt.scatter(cond_pos, cond_val, color="k", zorder=10, label="Conditions") - plt.legend() - plt.show() - -.. image:: pics/06_ensemble.png - :width: 600px - :align: center - -As you can see, the kriging field coincides with the ensemble mean of the -conditioned random fields and the estimated mean is the mean of the far-field. - - -.. raw:: latex - - \clearpage diff --git a/docs/source/tutorial_07_transformations.rst b/docs/source/tutorial_07_transformations.rst deleted file mode 100755 index 6bea3b5c4..000000000 --- a/docs/source/tutorial_07_transformations.rst +++ /dev/null @@ -1,188 +0,0 @@ -Tutorial 7: Field transformations -================================= - -The generated fields of gstools are ordinary Gaussian random fields. -In application there are several transformations to describe real world -problems in an appropriate manner. - -GStools provides a submodule :py:mod:`gstools.transform` with a range of -common transformations: - -.. currentmodule:: gstools.transform - -.. autosummary:: - binary - boxcox - zinnharvey - normal_force_moments - normal_to_lognormal - normal_to_uniform - normal_to_arcsin - normal_to_uquad - - -Implementation --------------- - -All the transformations take a field class, that holds a generated field, -as input and will manipulate this field inplace. - -Simply import the transform submodule and apply a transformation to the srf class: - -.. code-block:: python - - from gstools import transform as tf - ... - tf.normal_to_lognormal(srf) - - -In the following we will start from a simple random field following a Gaussian covariance: - -.. image:: pics/07_00_std.png - :width: 600px - :align: center - - -1. Example: log-normal fields ------------------------------ - -Here we transform a field to a log-normal distribution: - -.. code-block:: python - - from gstools import SRF, Gaussian - from gstools import transform as tf - # structured field with a size of 100x100 and a grid-size of 1x1 - x = y = range(100) - model = Gaussian(dim=2, var=1, len_scale=10) - srf = SRF(model, seed=20170519) - srf.structured([x, y]) - tf.normal_to_lognormal(srf) - srf.plot() - - -.. image:: pics/07_01_lognormal.png - :width: 600px - :align: center - - -2. Example: binary fields -------------------------- - -Here we transform a field to a binary field with only two values. -The dividing value is the mean by default and the upper and lower values -are derived to preserve the variance. - -.. code-block:: python - - from gstools import SRF, Gaussian - from gstools import transform as tf - # structured field with a size of 100x100 and a grid-size of 1x1 - x = y = range(100) - model = Gaussian(dim=2, var=1, len_scale=10) - srf = SRF(model, seed=20170519) - srf.structured([x, y]) - tf.binary(srf) - srf.plot() - - -.. image:: pics/07_02_binary.png - :width: 600px - :align: center - - -3. Example: Zinn & Harvey transformation ----------------------------------------- - -Here, we transform a field with the so called "Zinn & Harvey" transformation presented in -`Zinn & Harvey (2003) <https://www.researchgate.net/publication/282442995_zinnharvey2003>`__. -With this transformation, one could overcome the restriction that in ordinary -Gaussian random fields the mean values are the ones being the most connected. - -.. code-block:: python - - from gstools import SRF, Gaussian - from gstools import transform as tf - # structured field with a size of 100x100 and a grid-size of 1x1 - x = y = range(100) - model = Gaussian(dim=2, var=1, len_scale=10) - srf = SRF(model, seed=20170519) - srf.structured([x, y]) - tf.zinnharvey(srf, conn="high") - srf.plot() - - -.. image:: pics/07_03_zinnharvey.png - :width: 600px - :align: center - - -4. Example: bimodal fields --------------------------- - -We provide two transformations to obtain bimodal distributions: - -* `arcsin <https://en.wikipedia.org/wiki/Arcsine_distribution>`__. -* `uquad <https://en.wikipedia.org/wiki/U-quadratic_distribution>`__. - -Both transformations will preserve the mean and variance of the given field by default. - -.. code-block:: python - - from gstools import SRF, Gaussian - from gstools import transform as tf - # structured field with a size of 100x100 and a grid-size of 1x1 - x = y = range(100) - model = Gaussian(dim=2, var=1, len_scale=10) - srf = SRF(model, seed=20170519) - field = srf.structured([x, y]) - tf.normal_to_arcsin(srf) - srf.plot() - - -.. image:: pics/07_04_arcsin.png - :width: 600px - :align: center - - -5. Example: Combinations ------------------------- - -You can combine different transformations simply by successively applying them. - -Here, we first force the single field realization to hold the given moments, -namely mean and variance. -Then we apply the Zinn & Harvey transformation to connect the low values. -Afterwards the field is transformed to a binary field and last but not least, -we transform it to log-values. - - -.. code-block:: python - - from gstools import SRF, Gaussian - from gstools import transform as tf - # structured field with a size of 100x100 and a grid-size of 1x1 - x = y = range(100) - model = Gaussian(dim=2, var=1, len_scale=10) - srf = SRF(model, mean=-9, seed=20170519) - srf.structured([x, y]) - tf.normal_force_moments(srf) - tf.zinnharvey(srf, conn="low") - tf.binary(srf) - tf.normal_to_lognormal(srf) - srf.plot() - - -.. image:: pics/07_05_combine.png - :width: 600px - :align: center - - -The resulting field could be interpreted as a transmissivity field, where -the values of low permeability are the ones being the most connected -and only two kinds of soil exist. - - -.. raw:: latex - - \clearpage diff --git a/docs/source/tutorials.rst b/docs/source/tutorials.rst index e5b815017..5c2b6787f 100644 --- a/docs/source/tutorials.rst +++ b/docs/source/tutorials.rst @@ -1,3 +1,5 @@ +.. _tutorials: + ================= GSTools Tutorials ================= @@ -5,13 +7,16 @@ GSTools Tutorials In the following you will find several Tutorials on how to use GSTools to explore its whole beauty and power. + .. toctree:: + :includehidden: :maxdepth: 1 - tutorial_01_srf.rst - tutorial_02_cov.rst - tutorial_03_vario.rst - tutorial_04_vec_field.rst - tutorial_05_kriging.rst - tutorial_06_conditioning.rst - tutorial_07_transformations.rst + examples/01_random_field/index + examples/02_cov_model/index + examples/03_variogram/index + examples/04_vector_field/index + examples/05_kriging/index + examples/06_conditioned_fields/index + examples/07_transformations/index + examples/00_misc/index diff --git a/examples/00_gaussian.py b/examples/00_gaussian.py deleted file mode 100644 index 6800d18d5..000000000 --- a/examples/00_gaussian.py +++ /dev/null @@ -1,8 +0,0 @@ -from gstools import SRF, Gaussian - -# structured field with a size of 100x100 and a grid-size of 1x1 -x = y = range(100) -model = Gaussian(dim=2, var=1, len_scale=10) -srf = SRF(model) -srf.structured([x, y]) -srf.plot() diff --git a/examples/01_tpl_stable.py b/examples/00_misc/00_tpl_stable.py similarity index 95% rename from examples/01_tpl_stable.py rename to examples/00_misc/00_tpl_stable.py index 1e7fae1de..1f5502185 100644 --- a/examples/01_tpl_stable.py +++ b/examples/00_misc/00_tpl_stable.py @@ -1,3 +1,8 @@ +""" +TPL Stable +---------- +""" + import numpy as np from gstools import SRF, TPLStable diff --git a/examples/04_export.py b/examples/00_misc/01_export.py similarity index 88% rename from examples/04_export.py rename to examples/00_misc/01_export.py index bb9cb439d..24613288d 100644 --- a/examples/04_export.py +++ b/examples/00_misc/01_export.py @@ -1,3 +1,8 @@ +""" +Export +------ +""" + from gstools import SRF, Gaussian x = y = range(100) diff --git a/examples/19_check_rand_meth_sampling.py b/examples/00_misc/02_check_rand_meth_sampling.py similarity index 96% rename from examples/19_check_rand_meth_sampling.py rename to examples/00_misc/02_check_rand_meth_sampling.py index 2ea5d5ed7..1fb4dcc52 100644 --- a/examples/19_check_rand_meth_sampling.py +++ b/examples/00_misc/02_check_rand_meth_sampling.py @@ -1,3 +1,7 @@ +""" +Check Random Sampling +--------------------- +""" import numpy as np from matplotlib import pyplot as plt from mpl_toolkits.mplot3d import Axes3D @@ -30,7 +34,7 @@ def plot_rand_meth_samples(generator): z = np.outer(np.ones(np.size(u)), np.cos(v)) ax.plot_surface(x, y, z, rstride=4, cstride=4, color="b", alpha=0.1) ax.scatter(norm[0], norm[1], norm[2]) - ax.set_aspect("equal") + # ax.set_aspect("equal") elif generator.model.dim == 2: ax = fig.add_subplot(121) u = np.linspace(0, 2 * np.pi, 100) diff --git a/examples/00_misc/README.rst b/examples/00_misc/README.rst new file mode 100644 index 000000000..09f69ebc5 --- /dev/null +++ b/examples/00_misc/README.rst @@ -0,0 +1,4 @@ +Miscellaneous +============= + +A few miscellaneous examples diff --git a/examples/01_random_field/00_gaussian.py b/examples/01_random_field/00_gaussian.py new file mode 100644 index 000000000..03356bde8 --- /dev/null +++ b/examples/01_random_field/00_gaussian.py @@ -0,0 +1,38 @@ +""" +A Very Simple Example +--------------------- + +We are going to start with a very simple example of a spatial random field +with an isotropic Gaussian covariance model and following parameters: + +- variance :math:`\sigma^2=1` +- correlation length :math:`\lambda=10` + +First, we set things up and create the axes for the field. We are going to +need the :any:`SRF` class for the actual generation of the spatial random field. +But :any:`SRF` also needs a covariance model and we will simply take the :any:`Gaussian` model. +""" + +from gstools import SRF, Gaussian + +x = y = range(100) + +############################################################################### +# Now we create the covariance model with the parameters :math:`\sigma^2` and +# :math:`\lambda` and hand it over to :any:`SRF`. By specifying a seed, +# we make sure to create reproducible results: + +model = Gaussian(dim=2, var=1, len_scale=10) +srf = SRF(model, seed=20170519) + +############################################################################### +# With these simple steps, everything is ready to create our first random field. +# We will create the field on a structured grid (as you might have guessed from +# the `x` and `y`), which makes it easier to plot. + + +field = srf.structured([x, y]) +srf.plot() + +############################################################################### +# Wow, that was pretty easy! diff --git a/examples/01_random_field/01_srf_ensemble.py b/examples/01_random_field/01_srf_ensemble.py new file mode 100644 index 000000000..4e31a51dd --- /dev/null +++ b/examples/01_random_field/01_srf_ensemble.py @@ -0,0 +1,51 @@ +""" +Creating an Ensemble of Fields +------------------------------ + +Creating an ensemble of random fields would also be +a great idea. Let's reuse most of the previous code. +""" + +import numpy as np +import matplotlib.pyplot as pt +from gstools import SRF, Gaussian + +x = y = np.arange(100) + +model = Gaussian(dim=2, var=1, len_scale=10) +srf = SRF(model) + +############################################################################### +# This time, we did not provide a seed to :any:`SRF`, as the seeds will used +# during the actual computation of the fields. We will create four ensemble +# members, for better visualisation and save them in a list and in a first +# step, we will be using the loop counter as the seeds. + + +ens_no = 4 +field = [] +for i in range(ens_no): + field.append(srf.structured([x, y], seed=i)) + +############################################################################### +# Now let's have a look at the results: + +fig, ax = pt.subplots(2, 2, sharex=True, sharey=True) +ax = ax.flatten() +for i in range(ens_no): + ax[i].imshow(field[i].T, origin='lower') +pt.show() + +############################################################################### +# Using better Seeds +# ^^^^^^^^^^^^^^^^^^ +# +# It is not always a good idea to use incrementing seeds. Therefore GSTools +# provides a seed generator :any:`MasterRNG`. The loop, in which the fields are +# generated would then look like + + +from gstools.random import MasterRNG +seed = MasterRNG(20170519) +for i in range(ens_no): + field.append(srf.structured([x, y], seed=seed())) diff --git a/examples/01_random_field/02_fancier.py b/examples/01_random_field/02_fancier.py new file mode 100644 index 000000000..eabd05095 --- /dev/null +++ b/examples/01_random_field/02_fancier.py @@ -0,0 +1,33 @@ +""" +Creating Fancier Fields +----------------------- + +Only using Gaussian covariance fields gets boring. Now we are going to create +much rougher random fields by using an exponential covariance model and we are going to make them anisotropic. + +The code is very similar to the previous examples, but with a different +covariance model class :any:`Exponential`. As model parameters we a using +following + +- variance :math:`\sigma^2=1` +- correlation length :math:`\lambda=(12, 3)^T` +- rotation angle :math:`\theta=\pi/8` + +""" + + +import numpy as np +from gstools import SRF, Exponential + +x = y = np.arange(100) + +model = Exponential(dim=2, var=1, len_scale=[12., 3.], angles=np.pi/8.) +srf = SRF(model, seed=20170519) + +srf.structured([x, y]) +srf.plot() + +############################################################################### +# The anisotropy ratio could also have been set with + +model = Exponential(dim=2, var=1, len_scale=12., anis=3./12., angles=np.pi/8.) diff --git a/examples/01_random_field/03_unstr_srf_export.py b/examples/01_random_field/03_unstr_srf_export.py new file mode 100644 index 000000000..4bc1d476f --- /dev/null +++ b/examples/01_random_field/03_unstr_srf_export.py @@ -0,0 +1,37 @@ +""" +Using an Unstructured Grid +-------------------------- + +For many applications, the random fields are needed on an unstructured grid. +Normally, such a grid would be read in, but we can simply generate one and +then create a random field at those coordinates. +""" +import numpy as np +import matplotlib.pyplot as pt +from gstools import SRF, Exponential +from gstools.random import MasterRNG + +############################################################################### +# Creating our own unstructured grid +seed = MasterRNG(19970221) +rng = np.random.RandomState(seed()) +x = rng.randint(0, 100, size=10000) +y = rng.randint(0, 100, size=10000) + +model = Exponential(dim=2, var=1, len_scale=[12.0, 3.0], angles=np.pi / 8.0) + +srf = SRF(model, seed=20170519) + +field = srf((x, y)) +srf.vtk_export("field") +# Or create a PyVista dataset +# mesh = srf.to_pyvista() + +############################################################################### +pt.tricontourf(x, y, field.T) +pt.axes().set_aspect("equal") +pt.show() + +############################################################################### +# Comparing this image to the previous one, you can see that be using the same +# seed, the same field can be computed on different grids. diff --git a/examples/01_random_field/04_srf_merge.py b/examples/01_random_field/04_srf_merge.py new file mode 100644 index 000000000..94b1f7f82 --- /dev/null +++ b/examples/01_random_field/04_srf_merge.py @@ -0,0 +1,52 @@ +""" +Merging two Fields +------------------ + +We can even generate the same field realisation on different grids. Let's try +to merge two unstructured rectangular fields. + +""" +import numpy as np +import matplotlib.pyplot as pt +from gstools import SRF, Exponential +from gstools.random import MasterRNG + +# creating our own unstructured grid +seed = MasterRNG(19970221) +rng = np.random.RandomState(seed()) +x = rng.randint(0, 100, size=10000) +y = rng.randint(0, 100, size=10000) + +model = Exponential(dim=2, var=1, len_scale=[12.0, 3.0], angles=np.pi / 8.0) + +srf = SRF(model, seed=20170519) + +field = srf((x, y)) + +############################################################################### +# But now we extend the field on the right hand side by creating a new +# unstructured grid and calculating a field with the same parameters and the +# same seed on it: + +# new grid +seed = MasterRNG(20011012) +rng = np.random.RandomState(seed()) +x2 = rng.randint(99, 150, size=10000) +y2 = rng.randint(20, 80, size=10000) + +field2 = srf((x2, y2)) + +pt.tricontourf(x, y, field.T) +pt.tricontourf(x2, y2, field2.T) +pt.axes().set_aspect('equal') +pt.show() + +############################################################################### +# The slight mismatch where the two fields were merged is merely due to +# interpolation problems of the plotting routine. You can convince yourself +# be increasing the resolution of the grids by a factor of 10. +# +# Of course, this merging could also have been done by appending the grid +# point ``(x2, y2)`` to the original grid ``(x, y)`` before generating the field. +# But one application scenario would be to generate hugh fields, which would not +# fit into memory anymore. diff --git a/examples/01_random_field/README.rst b/examples/01_random_field/README.rst new file mode 100644 index 000000000..db51cd818 --- /dev/null +++ b/examples/01_random_field/README.rst @@ -0,0 +1,14 @@ +Tutorial 1: Random Field Generation +=================================== + +The main feature of GSTools is the spatial random field generator :any:`SRF`, +which can generate random fields following a given covariance model. +The generator provides a lot of nice features, which will be explained in +the following + +GSTools generates spatial random fields with a given covariance model or +semi-variogram. This is done by using the so-called randomization method. +The spatial random field is represented by a stochastic Fourier integral +and its discretised modes are evaluated at random frequencies. + +GSTools supports arbitrary and non-isotropic covariance models. diff --git a/examples/02_cov_model/00_cov_model.py b/examples/02_cov_model/00_cov_model.py new file mode 100644 index 000000000..34c9fb1e6 --- /dev/null +++ b/examples/02_cov_model/00_cov_model.py @@ -0,0 +1,375 @@ +""" +Covariance Model +================ + +Let us start with a short example of a self defined model (Of course, we +provide a lot of predefined models [See: :any:`gstools.covmodel`], +but they all work the same way). +Therefore we reimplement the Gaussian covariance model by defining just the +`correlation <https://en.wikipedia.org/wiki/Autocovariance#Normalization>`_ function: +""" + +from gstools import CovModel +import numpy as np + +# use CovModel as the base-class +class Gau(CovModel): + def correlation(self, r): + return np.exp(-(r / self.len_scale) ** 2) + +############################################################################### +# Now we can instantiate this model: +model = Gau(dim=2, var=2.0, len_scale=10) + +############################################################################### +# To have a look at the variogram, let's plot it: + + +from gstools.covmodel.plot import plot_variogram +plot_variogram(model) + +############################################################################### +model.plot() + +############################################################################### +# Parameters +# ---------- +# +# We already used some parameters, which every covariance models has. The basic ones +# are: +# +# - **dim** : dimension of the model +# - **var** : variance of the model (on top of the subscale variance) +# - **len_scale** : length scale of the model +# - **nugget** : nugget (subscale variance) of the model +# +# These are the common parameters used to characterize a covariance model and are +# therefore used by every model in GSTools. You can also access and reset them: + +print(model.dim, model.var, model.len_scale, model.nugget, model.sill) +model.dim = 3 +model.var = 1 +model.len_scale = 15 +model.nugget = 0.1 +print(model.dim, model.var, model.len_scale, model.nugget, model.sill) + + +############################################################################### +# .. note:: +# +# - The sill of the variogram is calculated by ``sill = variance + nugget`` +# So we treat the variance as everything **above** the nugget, which is sometimes +# called **partial sill**. +# - A covariance model can also have additional parameters. + +############################################################################### +# Anisotropy +# ---------- +# +# The internally used (semi-) variogram represents the isotropic case for the model. +# Nevertheless, you can provide anisotropy ratios by: + +model = Gau(dim=3, var=2.0, len_scale=10, anis=0.5) +print(model.anis) +print(model.len_scale_vec) + + + +############################################################################### +# As you can see, we defined just one anisotropy-ratio and the second transversal +# direction was filled up with ``1.`` and you can get the length-scales in each +# direction by the attribute :any:`len_scale_vec`. For full control you can set +# a list of anistropy ratios: ``anis=[0.5, 0.4]``. +# +# Alternatively you can provide a list of length-scales: + +model = Gau(dim=3, var=2.0, len_scale=[10, 5, 4]) +print(model.anis) +print(model.len_scale) +print(model.len_scale_vec) + +############################################################################### +# Rotation Angles +# --------------- +# +# The main directions of the field don't have to coincide with the spatial +# directions :math:`x`, :math:`y` and :math:`z`. Therefore you can provide +# rotation angles for the model: + +model = Gau(dim=3, var=2.0, len_scale=10, angles=2.5) +print(model.angles) + +############################################################################### +# Again, the angles were filled up with ``0.`` to match the dimension and you +# could also provide a list of angles. The number of angles depends on the +# given dimension: +# +# - in 1D: no rotation performable +# - in 2D: given as rotation around z-axis +# - in 3D: given by yaw, pitch, and roll (known as +# `Tait–Bryan <https://en.wikipedia.org/wiki/Euler_angles#Tait-Bryan_angles>`_ +# angles) + +############################################################################### +# Methods +# ------- +# +# The covariance model class :any:`CovModel` of GSTools provides a set of handy +# methods. +# +# Basics +# ^^^^^^ +# +# One of the following functions defines the main characterization of the +# variogram: +# +# - ``variogram`` : The variogram of the model given by +# +# .. math:: +# \gamma\left(r\right)= +# \sigma^2\cdot\left(1-\mathrm{cor}\left(r\right)\right)+n +# +# - ``covariance`` : The (auto-)covariance of the model given by +# +# .. math:: +# C\left(r\right)= \sigma^2\cdot\mathrm{cor}\left(r\right) +# +# - ``correlation`` : The (auto-)correlation (or normalized covariance) +# of the model given by +# +# .. math:: +# \mathrm{cor}\left(r\right) +# +# As you can see, it is the easiest way to define a covariance model by giving a +# correlation function as demonstrated by the above model ``Gau``. +# If one of the above functions is given, the others will be determined: + +model = Gau(dim=3, var=2.0, len_scale=10, nugget=0.5) +print(model.variogram(10.0)) +print(model.covariance(10.0)) +print(model.correlation(10.0)) + +############################################################################### +# Spectral methods +# ^^^^^^^^^^^^^^^^ +# +# The spectrum of a covariance model is given by: +# +# .. math:: S(\mathbf{k}) = \left(\frac{1}{2\pi}\right)^n +# \int C(\Vert\mathbf{r}\Vert) e^{i b\mathbf{k}\cdot\mathbf{r}} d^n\mathbf{r} +# +# Since the covariance function :math:`C(r)` is radially symmetric, we can +# calculate this by the +# `hankel-transformation <https://en.wikipedia.org/wiki/Hankel_transform>`_: +# +# .. math:: S(k) = \left(\frac{1}{2\pi}\right)^n \cdot +# \frac{(2\pi)^{n/2}}{(bk)^{n/2-1}} +# \int_0^\infty r^{n/2-1} C(r) J_{n/2-1}(bkr) r dr +# +# Where :math:`k=\left\Vert\mathbf{k}\right\Vert`. +# +# Depending on the spectrum, the spectral-density is defined by: +# +# .. math:: \tilde{S}(k) = \frac{S(k)}{\sigma^2} +# +# You can access these methods by: + +model = Gau(dim=3, var=2.0, len_scale=10) +print(model.spectrum(0.1)) +print(model.spectral_density(0.1)) + +############################################################################### +# .. note:: +# The spectral-density is given by the radius of the input phase. But it is +# **not** a probability density function for the radius of the phase. +# To obtain the pdf for the phase-radius, you can use the methods +# :any:`spectral_rad_pdf` or :any:`ln_spectral_rad_pdf` for the logarithm. +# +# The user can also provide a cdf (cumulative distribution function) by +# defining a method called ``spectral_rad_cdf`` and/or a ppf (percent-point function) +# by ``spectral_rad_ppf``. +# +# The attributes :any:`has_cdf` and :any:`has_ppf` will check for that. + + +############################################################################### +# Different scales +# ---------------- +# +# Besides the length-scale, there are many other ways of characterizing a certain +# scale of a covariance model. We provide two common scales with the covariance +# model. +# +# Integral scale +# ^^^^^^^^^^^^^^ +# +# The `integral scale <https://en.wikipedia.org/wiki/Integral_length_scale>`_ +# of a covariance model is calculated by: +# +# .. math:: I = \int_0^\infty \mathrm{cor}(r) dr +# +# You can access it by: + +model = Gau(dim=3, var=2.0, len_scale=10) +print(model.integral_scale) +print(model.integral_scale_vec) + + +############################################################################### +# You can also specify integral length scales like the ordinary length scale, +# and len_scale/anis will be recalculated: + +model = Gau(dim=3, var=2.0, integral_scale=[10, 4, 2]) +print(model.anis) +print(model.len_scale) +print(model.len_scale_vec) +print(model.integral_scale) +print(model.integral_scale_vec) + + +############################################################################### +# Percentile scale +# ^^^^^^^^^^^^^^^^ +# +# Another scale characterizing the covariance model, is the percentile scale. +# It is the distance, where the normalized variogram reaches a certain percentage +# of its sill. + +model = Gau(dim=3, var=2.0, len_scale=10) +print(model.percentile_scale(0.9)) + +############################################################################### +# .. note:: +# +# The nugget is neglected by this percentile_scale. +# + +############################################################################### +# Additional Parameters +# --------------------- +# +# Let's pimp our self-defined model ``Gau`` by setting the exponent as an additional +# parameter: +# +# .. math:: \mathrm{cor}(r) := \exp\left(-\left(\frac{r}{\ell}\right)^{\alpha}\right) +# +# This leads to the so called **stable** covariance model and we can define it by + + +class Stab(CovModel): + def default_opt_arg(self): + return {"alpha": 1.5} + + def correlation(self, r): + return np.exp(-(r / self.len_scale) ** self.alpha) + +############################################################################### +# As you can see, we override the method :any:`CovModel.default_opt_arg` to provide +# a standard value for the optional argument ``alpha`` and we can access it +# in the correlation function by ``self.alpha`` +# +# Now we can instantiate this model: + +model1 = Stab(dim=2, var=2.0, len_scale=10) +model2 = Stab(dim=2, var=2.0, len_scale=10, alpha=0.5) +print(model1) +print(model2) + +############################################################################### +# .. note:: +# +# You don't have to overrid the :any:`CovModel.default_opt_arg`, but you will +# get a ValueError if you don't set it on creation. + + +############################################################################### +# Fitting variogram data +# ---------------------- +# +# The model class comes with a routine to fit the model-parameters to given +# variogram data. Have a look at the following: + +# data +x = [1.0, 3.0, 5.0, 7.0, 9.0, 11.0] +y = [0.2, 0.5, 0.6, 0.8, 0.8, 0.9] +# fitting model +model = Stab(dim=2) +# we have to provide boundaries for the parameters +model.set_arg_bounds(alpha=[0, 3]) +results, pcov = model.fit_variogram(x, y, nugget=False) +print(results) + + +############################################################################### + +# show the fitting +from matplotlib import pyplot as plt +from gstools.covmodel.plot import plot_variogram +plt.scatter(x, y, color="k") +plot_variogram(model) +plt.show() + + +############################################################################### +# As you can see, we have to provide boundaries for the parameters. +# As a default, the following bounds are set: +# +# - additional parameters: ``[-np.inf, np.inf]`` +# - variance: ``[0.0, np.inf]`` +# - len_scale: ``[0.0, np.inf]`` +# - nugget: ``[0.0, np.inf]`` +# +# Also, you can deselect parameters from fitting, so their predefined values +# will be kept. In our case, we fixed a ``nugget`` of ``0.0``, which was set +# by default. You can deselect any standard or optional argument of the covariance model. +# The second return value ``pcov`` is the estimated covariance of ``popt`` from +# the used scipy routine :any:`scipy.optimize.curve_fit`. +# +# You can use the following methods to manipulate the used bounds: +# +# .. currentmodule:: gstools.covmodel.base +# +# .. autosummary:: +# CovModel.default_opt_arg_bounds +# CovModel.default_arg_bounds +# CovModel.set_arg_bounds +# CovModel.check_arg_bounds +# +# You can override the :any:`CovModel.default_opt_arg_bounds` to provide standard +# bounds for your additional parameters. +# +# To access the bounds you can use: +# +# .. autosummary:: +# CovModel.var_bounds +# CovModel.len_scale_bounds +# CovModel.nugget_bounds +# CovModel.opt_arg_bounds +# CovModel.arg_bounds +# +# Provided Covariance Models +# -------------------------- +# +# The following standard covariance models are provided by GSTools +# +# .. currentmodule:: gstools.covmodel.models +# +# .. autosummary:: +# Gaussian +# Exponential +# Matern +# Stable +# Rational +# Linear +# Circular +# Spherical +# Intersection +# +# As a special feature, we also provide truncated power law (TPL) covariance models +# +# .. currentmodule:: gstools.covmodel.tpl_models +# +# .. autosummary:: +# TPLGaussian +# TPLExponential +# TPLStable diff --git a/examples/02_cov_model/README.rst b/examples/02_cov_model/README.rst new file mode 100644 index 000000000..b01cbd9b8 --- /dev/null +++ b/examples/02_cov_model/README.rst @@ -0,0 +1,37 @@ +.. _tutorial_02_cov: + +Tutorial 2: The Covariance Model +================================ + +One of the core-features of GSTools is the powerful :any:`CovModel` +class, which allows you to easily define arbitrary covariance models by +yourself. The resulting models provide a bunch of nice features to explore the +covariance models. + + +A covariance model is used to characterize the +`semi-variogram <https://en.wikipedia.org/wiki/Variogram#Semivariogram>`_, +denoted by :math:`\gamma`, of a spatial random field. +In GSTools, we use the following form for an isotropic and stationary field: + +.. math:: + \gamma\left(r\right)= + \sigma^2\cdot\left(1-\mathrm{cor}\left(r\right)\right)+n + +Where: + - :math:`\mathrm{cor}(r)` is the so called + `correlation <https://en.wikipedia.org/wiki/Autocovariance#Normalization>`_ + function depending on the distance :math:`r` + - :math:`\sigma^2` is the variance + - :math:`n` is the nugget (subscale variance) + +.. note:: + + We are not limited to isotropic models. We support anisotropy ratios for + length scales in orthogonal transversal directions like: + + - :math:`x` (main direction) + - :math:`y` (1. transversal direction) + - :math:`z` (2. transversal direction) + + These main directions can also be rotated, but we will come to that later. diff --git a/examples/03_cov_model.py b/examples/03_cov_model.py deleted file mode 100644 index 9101c360c..000000000 --- a/examples/03_cov_model.py +++ /dev/null @@ -1,81 +0,0 @@ -from gstools import CovModel -import numpy as np - -# use CovModel as the base-class -class Gau(CovModel): - def correlation(self, r): - return np.exp(-(r / self.len_scale) ** 2) - - -model = Gau(dim=2, var=2.0, len_scale=10) - -model.plot() - -print(model.dim, model.var, model.len_scale, model.nugget, model.sill) -model.dim = 3 -model.var = 1 -model.len_scale = 15 -model.nugget = 0.1 -print(model.dim, model.var, model.len_scale, model.nugget, model.sill) - -model = Gau(dim=3, var=2.0, len_scale=10, anis=0.5) -print(model.anis) -print(model.len_scale_vec) - -model = Gau(dim=3, var=2.0, len_scale=[10, 5, 4]) -print(model.anis) -print(model.len_scale) -print(model.len_scale_vec) - -model = Gau(dim=3, var=2.0, len_scale=10, angles=2.5) -print(model.angles) - -model = Gau(dim=3, var=2.0, len_scale=10, nugget=0.5) -print(model.variogram(10.0)) -print(model.covariance(10.0)) -print(model.correlation(10.0)) - -model = Gau(dim=3, var=2.0, len_scale=10) -print(model.spectrum(0.1)) -print(model.spectral_density(0.1)) - -model = Gau(dim=3, var=2.0, len_scale=10) -print(model.integral_scale) -print(model.integral_scale_vec) - -model = Gau(dim=3, var=2.0, integral_scale=[10, 4, 2]) -print(model.anis) -print(model.len_scale) -print(model.len_scale_vec) -print(model.integral_scale) -print(model.integral_scale_vec) - -model = Gau(dim=3, var=2.0, len_scale=10) -print(model.percentile_scale(0.9)) - - -class Stab(CovModel): - def default_opt_arg(self): - return {"alpha": 1.5} - - def correlation(self, r): - return np.exp(-(r / self.len_scale) ** self.alpha) - - -model1 = Stab(dim=2, var=2.0, len_scale=10) -model2 = Stab(dim=2, var=2.0, len_scale=10, alpha=0.5) -print(model1) -print(model2) - -# data -x = [1.0, 3.0, 5.0, 7.0, 9.0, 11.0] -y = [0.2, 0.5, 0.6, 0.8, 0.8, 0.9] -# fitting model -model = Stab(dim=2) -# we have to provide boundaries for the parameters -model.set_arg_bounds(alpha=[0, 3]) -results, pcov = model.fit_variogram(x, y, nugget=False) -print(results) - -ax = model.plot() -ax.scatter(x, y, color="k") diff --git a/examples/02_fit_variogram.py b/examples/03_variogram/00_fit_variogram.py similarity index 95% rename from examples/02_fit_variogram.py rename to examples/03_variogram/00_fit_variogram.py index 3a36a9ba7..8df4c3494 100644 --- a/examples/02_fit_variogram.py +++ b/examples/03_variogram/00_fit_variogram.py @@ -1,3 +1,7 @@ +""" +Fit Variogram +------------- +""" import numpy as np from gstools import SRF, Exponential, Stable, vario_estimate_unstructured diff --git a/examples/03_variogram/01_variogram_estimation.py b/examples/03_variogram/01_variogram_estimation.py new file mode 100644 index 000000000..4ed681ecb --- /dev/null +++ b/examples/03_variogram/01_variogram_estimation.py @@ -0,0 +1,354 @@ +""" +An Example with Actual Data +--------------------------- + +This example is going to be a bit more extensive and we are going to do some +basic data preprocessing for the actual variogram estimation. But this example +will be self-contained and all data gathering and processing will be done in +this example script. + + +*This example will only work with Python 3.* + +The Data +^^^^^^^^ + +We are going to analyse the Herten aquifer, which is situated in Southern +Germany. Multiple outcrop faces where surveyed and interpolated to a 3D +dataset. In these publications, you can find more information about the data: + +| Bayer, Peter; Comunian, Alessandro; Höyng, Dominik; Mariethoz, Gregoire (2015): Physicochemical properties and 3D geostatistical simulations of the Herten and the Descalvado aquifer analogs. PANGAEA, https://doi.org/10.1594/PANGAEA.844167, +| Supplement to: Bayer, P et al. (2015): Three-dimensional multi-facies realizations of sedimentary reservoir and aquifer analogs. Scientific Data, 2, 150033, https://doi.org/10.1038/sdata.2015.33 +| + +Retrieving the Data +^^^^^^^^^^^^^^^^^^^ + +To begin with, we need to download and extract the data. Therefore, we are +going to use some built-in Python libraries. For simplicity, many values and +strings will be hardcoded. +""" +import os +from shutil import rmtree +import zipfile +import urllib.request +import numpy as np +import matplotlib.pyplot as pt +from gstools import ( + vario_estimate_unstructured, + vario_estimate_structured, + Exponential, +) +from gstools.covmodel.plot import plot_variogram + + +def download_herten(): + # download the data, warning: its about 250MB + print("Downloading Herten data") + data_filename = "data.zip" + data_url = "http://store.pangaea.de/Publications/Bayer_et_al_2015/Herten-analog.zip" + urllib.request.urlretrieve(data_url, "data.zip") + + with zipfile.ZipFile(data_filename, "r") as zf: + zf.extract( + os.path.join("Herten-analog", "sim-big_1000x1000x140", "sim.vtk") + ) + + +############################################################################### +# That was that. But we also need a script to convert the data into a format we +# can use. This script is also kindly provided by the authors. We can download +# this script in a very similar manner as the data: + +def download_scripts(): + import fileinput + + # download a script for file conversion + print("Downloading scripts") + tools_filename = "scripts.zip" + tool_url = ( + "http://store.pangaea.de/Publications/Bayer_et_al_2015/tools.zip" + ) + urllib.request.urlretrieve(tool_url, tools_filename) + + filename = os.path.join("tools", "vtk2gslib.py") + + with zipfile.ZipFile(tools_filename, "r") as zf: + zf.extract(filename) + + with fileinput.FileInput(filename, inplace=True) as fin: + for line in fin: + print(line.replace("header=-1", "header=None"), end="") + + +############################################################################### +# These two functions can now be called: +download_herten() +download_scripts() + +############################################################################### +# Preprocessing the Data +# ^^^^^^^^^^^^^^^^^^^^^^ +# +# First of all, we have to convert the data with the script we just downloaded + + +# import the downloaded conversion script +from tools.vtk2gslib import vtk2numpy + +# load the Herten aquifer with the downloaded vtk2numpy routine +print("Loading data") +herten, grid = vtk2numpy( + os.path.join("Herten-analog", "sim-big_1000x1000x140", "sim.vtk") +) + +############################################################################### +# The data only contains facies, but from the supplementary data, we know the +# hydraulic conductivity values of each facies, which we will simply paste here +# and assign them to the correct facies + +# conductivity values per fazies from the supplementary data +cond = np.array( + [ + 2.50e-04, + 2.30e-04, + 6.10e-05, + 2.60e-02, + 1.30e-01, + 9.50e-02, + 4.30e-05, + 6.00e-07, + 2.30e-03, + 1.40e-04, + ] +) + +# asign the conductivities to the facies +herten_cond = cond[herten] + +############################################################################### +# Next, we are going to calculate the transmissivity, by integrating over the +# vertical axis + +# integrate over the vertical axis, calculate transmissivity +herten_log_trans = np.log(np.sum(herten_cond, axis=2) * grid["dz"]) + + +############################################################################### +# The Herten data provides information about the grid, which was already used in +# the previous code block. From this information, we can create our own grid on +# which we can estimate the variogram. As a first step, we are going to estimate +# an isotropic variogram, meaning that we will take point pairs from all +# directions into account. An unstructured grid is a natural choice for this. +# Therefore, we are going to create an unstructured grid from the given, +# structured one. For this, we are going to write another small function + +def create_unstructured_grid(x_s, y_s): + x_u, y_u = np.meshgrid(x_s, y_s) + len_unstruct = len(x_s) * len(y_s) + x_u = np.reshape(x_u, len_unstruct) + y_u = np.reshape(y_u, len_unstruct) + return x_u, y_u + +# create a structured grid on which the data is defined +x_s = np.arange(grid["ox"], grid["nx"] * grid["dx"], grid["dx"]) +y_s = np.arange(grid["oy"], grid["ny"] * grid["dy"], grid["dy"]) + +############################################################################### +# Let's have a look at the transmissivity field of the Herten aquifer + +pt.imshow(herten_log_trans.T, origin="lower", aspect="equal") +pt.show() + +# create an unstructured grid for the variogram estimation +x_u, y_u = create_unstructured_grid(x_s, y_s) + + +############################################################################### +# Estimating the Variogram +# ^^^^^^^^^^^^^^^^^^^^^^^^ +# +# Finally, everything is ready for the variogram estimation. For the unstructured +# method, we have to define the bins on which the variogram will be estimated. +# Through expert knowledge (i.e. fiddling around), we assume that the main +# features of the variogram will be below 10 metres distance. And because the +# data has a high spatial resolution, the resolution of the bins can also be +# high. The transmissivity data is still defined on a structured grid, but we can +# simply flatten it with :any:`numpy.ndarray.flatten`, in order to bring it into +# the right shape. It might be more memory efficient to use +# ``herten_log_trans.reshape(-1)``, but for better readability, we will stick to +# :any:`numpy.ndarray.flatten`. Taking all data points into account would take a +# very long time (expert knowledge \*wink\*), thus we will only take 2000 datapoints into account, which are sampled randomly. In order to make the exact +# results reproducible, we can also set a seed. + + +bins = np.linspace(0, 10, 50) +print("Estimating unstructured variogram") +bin_center, gamma = vario_estimate_unstructured( + (x_u, y_u), + herten_log_trans.reshape(-1), + bins, + sampling_size=2000, + sampling_seed=19920516, +) + +############################################################################### +# The estimated variogram is calculated on the centre of the given bins, +# therefore, the ``bin_center`` array is also returned. + +############################################################################### +# Fitting the Variogram +# ^^^^^^^^^^^^^^^^^^^^^ +# +# Now, we can see, if the estimated variogram can be modelled by a common +# variogram model. Let's try the :any:`Exponential` model. + +# fit an exponential model +fit_model = Exponential(dim=2) +fit_model.fit_variogram(bin_center, gamma, nugget=False) + +############################################################################### +# Finally, we can visualise some results. For quickly plotting a covariance +# model, GSTools provides some helper functions. + +pt.plot(bin_center, gamma) +plot_variogram(fit_model, x_max=bins[-1]) + + +############################################################################### +# That looks like a pretty good fit! By printing the model, we can directly see +# the fitted parameters + +print(fit_model) + +############################################################################### +# With this data, we could start generating new ensembles of the Herten aquifer +# with the :any:`SRF` class. + + + +############################################################################### +# Estimating the Variogram in Specific Directions +# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +# +# Estimating a variogram on a structured grid gives us the possibility to only +# consider values in a specific direction. This could be a first test, to see if +# the data is anisotropic. +# In order to speed up the calculations, we are going to only use every 10th datapoint and for a comparison with the isotropic variogram calculated earlier, we +# only need the first 21 array items. + + +# estimate the variogram on a structured grid +# use only every 10th value, otherwise calculations would take very long +x_s_skip = x_s[::10] +y_s_skip = y_s[::10] +herten_trans_skip = herten_log_trans[::10, ::10] + +############################################################################### +# With this much smaller data set, we can immediately estimate the variogram in +# the x- and y-axis + +print("Estimating structured variograms") +gamma_x = vario_estimate_structured(herten_trans_skip, direction="x") +gamma_y = vario_estimate_structured(herten_trans_skip, direction="y") + +############################################################################### +# With these two estimated variograms, we can start fitting :any:`Exponential` +# covariance models + +x_plot = x_s_skip[:21] +y_plot = y_s_skip[:21] +# fit an exponential model +fit_model_x = Exponential(dim=2) +fit_model_x.fit_variogram(x_plot, gamma_x[:21], nugget=False) +fit_model_y = Exponential(dim=2) +fit_model_y.fit_variogram(y_plot, gamma_y[:21], nugget=False) + +############################################################################### +# Now, the isotropic variogram and the two variograms in x- and y-direction can +# be plotted together with their respective models, which will be plotted with +# dashed lines. + +line, = pt.plot(bin_center, gamma, label="estimated variogram (isotropic)") +pt.plot( + bin_center, + fit_model.variogram(bin_center), + color=line.get_color(), + linestyle="--", + label="exp. variogram (isotropic)", +) + +line, = pt.plot(x_plot, gamma_x[:21], label="estimated variogram in x-dir") +pt.plot( + x_plot, + fit_model_x.variogram(x_plot), + color=line.get_color(), + linestyle="--", + label="exp. variogram in x-dir", +) + +line, = pt.plot(y_plot, gamma_y[:21], label="estimated variogram in y-dir") +pt.plot( + y_plot, + fit_model_y.variogram(y_plot), + color=line.get_color(), + linestyle="--", + label="exp. variogram in y-dir", +) + +pt.legend() +pt.show() + +############################################################################### +# The plot might be a bit cluttered, but at least it is pretty obvious that the +# Herten aquifer has no apparent anisotropies in its spatial structure. + +print("semivariogram model (isotropic):\n", fit_model) +print("semivariogram model (in x-dir.):\n", fit_model_x) +print("semivariogram model (in y-dir.):\n", fit_model_y) + + + +############################################################################### +# Creating a Spatial Random Field from the Herten Parameters +# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +# +# With all the hard work done, it's straight forward now, to generate new +# *Herten realisations* + +from gstools import SRF + +srf = SRF(fit_model, seed=19770928) +print("Calculating SRF") +new_herten = srf((x_s, y_s), mesh_type="structured") + +pt.imshow(new_herten.T, origin="lower") +pt.show() + +############################################################################### +# That's pretty neat! Executing the code given on this site, will result in a +# lower resolution of the field, because we overwrote `x_s` and `y_s` for the +# directional variogram estimation. In the example script, this is not the case +# and you will get a high resolution field. + + +############################################################################### +# And Now for Some Cleanup +# ^^^^^^^^^^^^^^^^^^^^^^^^ +# +# In case you want all the downloaded data and scripts to be deleted, use +# following commands + +# comment all in case you want to keep the data for playing around with it +os.remove("data.zip") +os.remove("scripts.zip") +rmtree("Herten-analog") +rmtree("tools") + + +############################################################################### +# And in case you want to play around a little bit more with the data, you can +# comment out the function calls ``download_herten()`` and +# ``download_scripts()``, after they where called at least once and also comment +# out the cleanup. This way, the data will not be downloaded with every script +# execution. diff --git a/examples/03_variogram/README.rst b/examples/03_variogram/README.rst new file mode 100644 index 000000000..e53e67ee4 --- /dev/null +++ b/examples/03_variogram/README.rst @@ -0,0 +1,12 @@ +Tutorial 3: Variogram Estimation +================================ + +Estimating the spatial correlations is an important part of geostatistics. +These spatial correlations can be expressed by the variogram, which can be +estimated with the subpackage :any:`gstools.variogram`. The variograms can be +estimated on structured and unstructured grids. + + +The same `(semi-)variogram <https://en.wikipedia.org/wiki/Variogram#Semivariogram>`_ as +:ref:`tutorial_02_cov` is being used +by this subpackage. diff --git a/examples/04_vector_field/00_vector_field.py b/examples/04_vector_field/00_vector_field.py new file mode 100644 index 000000000..42c88bf57 --- /dev/null +++ b/examples/04_vector_field/00_vector_field.py @@ -0,0 +1,48 @@ +""" +Generating a Random Vector Field +-------------------------------- + +As a first example we are going to generate a vector field with a Gaussian +covariance model on a structured grid: +""" +import numpy as np +import matplotlib.pyplot as plt +from gstools import SRF, Gaussian, Exponential + +# the grid +x = np.arange(100) +y = np.arange(100) + +# a smooth Gaussian covariance model +model = Gaussian(dim=2, var=1, len_scale=10) + +srf = SRF(model, generator="VectorField") +srf((x, y), mesh_type="structured", seed=19841203) +srf.plot() + +############################################################################### +# Let us have a look at the influence of the covariance model. Choosing the +# exponential model and keeping all other parameters the same + +# a rougher exponential covariance model +model2 = Exponential(dim=2, var=1, len_scale=10) + +srf.model = model2 +srf((x, y), mesh_type="structured", seed=19841203) +srf.plot() + +############################################################################### +# and we see, that the wiggles are much "rougher" than the smooth Gaussian ones. + + +############################################################################### +# Applications +# ------------ +# +# One great advantage of the Kraichnan method is, that after some initializations, +# one can compute the velocity field at arbitrary points, online, with hardly any +# overhead. +# This means, that for a Lagrangian transport simulation for example, the velocity +# can be evaluated at each particle position very efficiently and without any +# interpolation. These field interpolations are a common problem for Lagrangian +# methods. diff --git a/examples/04_vector_field/README.rst b/examples/04_vector_field/README.rst new file mode 100644 index 000000000..85df7ffbc --- /dev/null +++ b/examples/04_vector_field/README.rst @@ -0,0 +1,33 @@ +Tutorial 4: Random Vector Field Generation +========================================== + +In 1970, Kraichnan was the first to suggest a randomization method. +For studying the diffusion of single particles in a random incompressible +velocity field, he came up with a randomization method which includes a +projector which ensures the incompressibility of the vector field. + + +Without loss of generality we assume that the mean velocity :math:`\bar{U}` is oriented +towards the direction of the first basis vector :math:`\mathbf{e}_1`. Our goal is now to +generate random fluctuations with a given covariance model around this mean velocity. +And at the same time, making sure that the velocity field remains incompressible or +in other words, ensure :math:`\nabla \cdot \mathbf U = 0`. +This can be done by using the randomization method we already know, but adding a +projector to every mode being summed: + + +.. math:: + + \mathbf{U}(\mathbf{x}) = \bar{U} \mathbf{e}_1 - \sqrt{\frac{\sigma^{2}}{N}} + \sum_{i=1}^{N} \mathbf{p}(\mathbf{k}_i) \left[ Z_{1,i} + \cos\left( \langle \mathbf{k}_{i}, \mathbf{x} \rangle \right) + + \sin\left( \langle \mathbf{k}_{i}, \mathbf{x} \rangle \right) \right] + +with the projector + +.. math:: + + \mathbf{p}(\mathbf{k}_i) = \mathbf{e}_1 - \frac{\mathbf{k}_i k_1}{k^2} \; . + +By calculating :math:`\nabla \cdot \mathbf U = 0`, it can be verified, that +the resulting field is indeed incompressible. diff --git a/examples/05_kriging/00_simple_kriging.py b/examples/05_kriging/00_simple_kriging.py new file mode 100755 index 000000000..c7bcdcb2a --- /dev/null +++ b/examples/05_kriging/00_simple_kriging.py @@ -0,0 +1,48 @@ +r""" +Simple Kriging +-------------- + +Simple kriging assumes a known mean of the data. +For simplicity we assume a mean of 0, +which can be achieved by subtracting the mean from the observed values and +subsequently adding it to the resulting data. + +The resulting equation system for :math:`W` is given by: + +.. math:: + + W = \begin{pmatrix}c(x_1,x_1) & \cdots & c(x_1,x_n) \\ + \vdots & \ddots & \vdots \\ + c(x_n,x_1) & \cdots & c(x_n,x_n) + \end{pmatrix}^{-1} + \begin{pmatrix}c(x_1,x_0) \\ \vdots \\ c(x_n,x_0) \end{pmatrix} + +Thereby :math:`c(x_i,x_j)` is the covariance of the given observations. + + +Example +^^^^^^^ + +Here we use simple kriging in 1D (for plotting reasons) with 5 given observations/conditions. +The mean of the field has to be given beforehand. + +""" +import numpy as np +from gstools import Gaussian, krige + +# condtions +cond_pos = [0.3, 1.9, 1.1, 3.3, 4.7] +cond_val = [0.47, 0.56, 0.74, 1.47, 1.74] +# resulting grid +gridx = np.linspace(0.0, 15.0, 151) +# spatial random field class +model = Gaussian(dim=1, var=0.5, len_scale=2) + +############################################################################### +krig = krige.Simple(model, mean=1, cond_pos=cond_pos, cond_val=cond_val) +krig(gridx) + +############################################################################### +ax = krig.plot() +ax.scatter(cond_pos, cond_val, color="k", zorder=10, label="Conditions") +ax.legend() diff --git a/examples/05_kriging/01_ordinary_kriging.py b/examples/05_kriging/01_ordinary_kriging.py new file mode 100644 index 000000000..2e56c7972 --- /dev/null +++ b/examples/05_kriging/01_ordinary_kriging.py @@ -0,0 +1,48 @@ +r""" +Ordinary Kriging +---------------- + +Ordinary kriging will estimate an appropriate mean of the field, +based on the given observations/conditions and the covariance model used. + +The resulting system of equations for :math:`W` is given by: + +.. math:: + + \begin{pmatrix}W\\\mu\end{pmatrix} = \begin{pmatrix} + \gamma(x_1,x_1) & \cdots & \gamma(x_1,x_n) &1 \\ + \vdots & \ddots & \vdots & \vdots \\ + \gamma(x_n,x_1) & \cdots & \gamma(x_n,x_n) & 1 \\ + 1 &\cdots& 1 & 0 + \end{pmatrix}^{-1} + \begin{pmatrix}\gamma(x_1,x_0) \\ \vdots \\ \gamma(x_n,x_0) \\ 1\end{pmatrix} + +Thereby :math:`\gamma(x_i,x_j)` is the semi-variogram of the given observations +and :math:`\mu` is a Lagrange multiplier to minimize the kriging error and estimate the mean. + + +Example +^^^^^^^ + +Here we use ordinary kriging in 1D (for plotting reasons) with 5 given observations/conditions. +The estimated mean can be accessed by ``krig.mean``. +""" +import numpy as np +from gstools import Gaussian, krige + +# condtions +cond_pos = [0.3, 1.9, 1.1, 3.3, 4.7] +cond_val = [0.47, 0.56, 0.74, 1.47, 1.74] +# resulting grid +gridx = np.linspace(0.0, 15.0, 151) +# spatial random field class +model = Gaussian(dim=1, var=0.5, len_scale=2) + +############################################################################### +krig = krige.Ordinary(model, cond_pos=cond_pos, cond_val=cond_val) +krig(gridx) + +############################################################################### +ax = krig.plot() +ax.scatter(cond_pos, cond_val, color="k", zorder=10, label="Conditions") +ax.legend() diff --git a/examples/20_pykrige_interface.py b/examples/05_kriging/02_pykrige_interface.py similarity index 57% rename from examples/20_pykrige_interface.py rename to examples/05_kriging/02_pykrige_interface.py index 716cacb87..5834ad5ab 100755 --- a/examples/20_pykrige_interface.py +++ b/examples/05_kriging/02_pykrige_interface.py @@ -1,5 +1,15 @@ -# -*- coding: utf-8 -*- -"""Example how to use the PyKrige routines with a GSTools CovModel.""" +""" +Interface to PyKrige +-------------------- + +To use fancier methods like `universal kriging <https://en.wikipedia.org/wiki/Kriging#Universal>`__, +we provide an interface to `PyKrige <https://github.com/bsmurphy/PyKrige>`__. + +You can pass a GSTools Covariance Model to the PyKrige routines as ``variogram_model``. + +To demonstrate the general workflow, we compare the ordinary kriging of PyKrige +with GSTools in 2D: +""" import numpy as np from gstools import Gaussian, krige from pykrige.ok import OrdinaryKriging @@ -15,18 +25,27 @@ [4.7, 3.8, 1.74], ] ) + # grid definition for output field gridx = np.arange(0.0, 5.5, 0.1) gridy = np.arange(0.0, 6.5, 0.1) + +############################################################################### + # a GSTools based covariance model cov_model = Gaussian( dim=2, len_scale=1, anis=0.2, angles=-0.5, var=0.5, nugget=0.1 ) + +############################################################################### # ordinary kriging with pykrige OK1 = OrdinaryKriging(data[:, 0], data[:, 1], data[:, 2], cov_model) z1, ss1 = OK1.execute("grid", gridx, gridy) plt.imshow(z1, origin="lower") plt.show() + +############################################################################### + # ordinary kriging with gstools for comparison OK2 = krige.Ordinary(cov_model, [data[:, 0], data[:, 1]], data[:, 2]) OK2.structured([gridx, gridy]) diff --git a/examples/12_compare_kriging.py b/examples/05_kriging/13_compare_kriging.py similarity index 70% rename from examples/12_compare_kriging.py rename to examples/05_kriging/13_compare_kriging.py index b22f05ee6..87bff2759 100755 --- a/examples/12_compare_kriging.py +++ b/examples/05_kriging/13_compare_kriging.py @@ -1,3 +1,7 @@ +""" +Compare Kriging +--------------- +""" import numpy as np from gstools import Gaussian, krige import matplotlib.pyplot as plt @@ -7,12 +11,18 @@ cond_val = [0.47, 0.56, 0.74, 1.47, 1.74] # resulting grid gridx = np.linspace(0.0, 15.0, 151) + +############################################################################### # spatial random field class model = Gaussian(dim=1, var=0.5, len_scale=2) + +############################################################################### kr1 = krige.Simple(model=model, mean=1, cond_pos=cond_pos, cond_val=cond_val) kr2 = krige.Ordinary(model=model, cond_pos=cond_pos, cond_val=cond_val) kr1(gridx) kr2(gridx) + +############################################################################### plt.plot(gridx, kr1.field, label="simple kriged field") plt.plot(gridx, kr2.field, label="ordinary kriged field") plt.scatter(cond_pos, cond_val, color="k", zorder=10, label="Conditions") diff --git a/examples/05_kriging/README.rst b/examples/05_kriging/README.rst new file mode 100644 index 000000000..a502d6949 --- /dev/null +++ b/examples/05_kriging/README.rst @@ -0,0 +1,46 @@ +.. _tutorial_05_kriging: + +Tutorial 5: Kriging +=================== + +The subpackage :py:mod:`gstools.krige` provides routines for Gaussian process regression, also known as kriging. +Kriging is a method of data interpolation based on predefined covariance models. + +We provide two kinds of kriging routines: + +* Simple: The data is interpolated with a given mean value for the kriging field. +* Ordinary: The mean of the resulting field is unkown and estimated during interpolation. + + +The aim of kriging is to derive the value of a field at some point :math:`x_0`, +when there are fixed observed values :math:`z(x_1)\ldots z(x_n)` at given points :math:`x_i`. + +The resluting value :math:`z_0` at :math:`x_0` is calculated as a weighted mean: + +.. math:: + + z_0 = \sum_{i=1}^n w_i \cdot z_i + +The weights :math:`W = (w_1,\ldots,w_n)` depent on the given covariance model and the location of the target point. + +The different kriging approaches provide different ways of calculating :math:`W`. + + + +The routines for kriging are almost identical to the routines for spatial random fields. +First you define a covariance model, as described in :ref:`tutorial_02_cov`, +then you initialize the kriging class with this model: + +.. code-block:: python + + from gstools import Gaussian, krige + # condtions + cond_pos = ... + cond_val = ... + model = Gaussian(dim=1, var=0.5, len_scale=2) + krig = krige.Simple(model, mean=1, cond_pos=cond_pos, cond_val=cond_val) + +The resulting field instance ``krig`` has the same methods as the :any:`SRF` class. +You can call it to evaluate the kriged field at different points, +you can plot the latest field or you can export the field and so on. +Have a look at the documentation of :any:`Simple` and :any:`Ordinary`. diff --git a/examples/05_srf_ensemble.py b/examples/05_srf_ensemble.py deleted file mode 100644 index 3db77e0a0..000000000 --- a/examples/05_srf_ensemble.py +++ /dev/null @@ -1,22 +0,0 @@ -import numpy as np -import matplotlib.pyplot as pt -from gstools import SRF, Gaussian -from gstools.random import MasterRNG - -x = y = np.arange(100) - -model = Gaussian(dim=2, var=1, len_scale=10) -srf = SRF(model) - -ens_no = 4 -field = [] -seed = MasterRNG(20170519) -for i in range(ens_no): - field.append(srf((x, y), seed=seed(), mesh_type="structured")) - -fig, ax = pt.subplots(2, 2, sharex=True, sharey=True) -ax = ax.flatten() -for i in range(ens_no): - ax[i].imshow(field[i].T, origin="lower") - -pt.show() diff --git a/examples/13_condition_ensemble.py b/examples/06_conditioned_fields/00_condition_ensemble.py similarity index 56% rename from examples/13_condition_ensemble.py rename to examples/06_conditioned_fields/00_condition_ensemble.py index 8730adcc7..c25824fe2 100644 --- a/examples/13_condition_ensemble.py +++ b/examples/06_conditioned_fields/00_condition_ensemble.py @@ -1,3 +1,11 @@ +""" +Example: Conditioning with Ordinary Kriging +------------------------------------------- + +Here we use ordinary kriging in 1D (for plotting reasons) with 5 given observations/conditions, +to generate an ensemble of conditioned random fields. +The estimated mean can be accessed by ``srf.mean``. +""" import numpy as np from gstools import Gaussian, SRF import matplotlib.pyplot as plt @@ -6,10 +14,15 @@ cond_pos = [0.3, 1.9, 1.1, 3.3, 4.7] cond_val = [0.47, 0.56, 0.74, 1.47, 1.74] gridx = np.linspace(0.0, 15.0, 151) + +############################################################################### + # spatial random field class model = Gaussian(dim=1, var=0.5, len_scale=2) srf = SRF(model) srf.set_condition(cond_pos, cond_val, "ordinary") + +############################################################################### fields = [] for i in range(100): if i % 10 == 0: @@ -23,3 +36,7 @@ plt.scatter(cond_pos, cond_val, color="k", zorder=10, label="Conditions") plt.legend() plt.show() + +############################################################################### +# As you can see, the kriging field coincides with the ensemble mean of the +# conditioned random fields and the estimated mean is the mean of the far-field. diff --git a/examples/06_conditioned_fields/README.rst b/examples/06_conditioned_fields/README.rst new file mode 100644 index 000000000..797ee5b66 --- /dev/null +++ b/examples/06_conditioned_fields/README.rst @@ -0,0 +1,31 @@ +Tutorial 6: Conditioned Fields +============================== + +Kriged fields tend to approach the field mean outside the area of observations. +To generate random fields, that coincide with given observations, but are still +random according to a given covariance model away from the observations proximity, +we provide the generation of conditioned random fields. + + +The idea behind conditioned random fields builds up on kriging. +First we generate a field with a kriging method, then we generate a random field, +and finally we generate another kriged field to eliminate the error between +the random field and the kriged field of the given observations. + +To do so, you can choose between ordinary and simple kriging. +In case of ordinary kriging, the mean of the SRF will be overwritten by the +estimated mean. + +The setup of the spatial random field is the same as described in +:ref:`tutorial_02_cov`. +You just need to add the conditions as described in :ref:`tutorial_05_kriging`: + +.. code-block:: python + + srf.set_condition(cond_pos, cond_val, "simple") + +or: + +.. code-block:: python + + srf.set_condition(cond_pos, cond_val, "ordinary") diff --git a/examples/06_unstr_srf_export.py b/examples/06_unstr_srf_export.py deleted file mode 100644 index 8ea844cab..000000000 --- a/examples/06_unstr_srf_export.py +++ /dev/null @@ -1,23 +0,0 @@ -import numpy as np -import matplotlib.pyplot as pt -from gstools import SRF, Exponential -from gstools.random import MasterRNG - -# creating our own unstructured grid -seed = MasterRNG(19970221) -rng = np.random.RandomState(seed()) -x = rng.randint(0, 100, size=10000) -y = rng.randint(0, 100, size=10000) - -model = Exponential(dim=2, var=1, len_scale=[12.0, 3.0], angles=np.pi / 8.0) - -srf = SRF(model, seed=20170519) - -field = srf((x, y)) -srf.vtk_export("field") -# Or create a PyVista dataset -# mesh = srf.to_pyvista() - -pt.tricontourf(x, y, field.T) -pt.axes().set_aspect("equal") -pt.show() diff --git a/examples/07_srf_merge.py b/examples/07_srf_merge.py deleted file mode 100644 index 3f3cdb3ef..000000000 --- a/examples/07_srf_merge.py +++ /dev/null @@ -1,29 +0,0 @@ -import numpy as np -import matplotlib.pyplot as pt -from gstools import SRF, Exponential -from gstools.random import MasterRNG - -# creating our own unstructured grid -seed = MasterRNG(19970221) -rng = np.random.RandomState(seed()) -x = rng.randint(0, 100, size=10000) -y = rng.randint(0, 100, size=10000) - -model = Exponential(dim=2, var=1, len_scale=[12.0, 3.0], angles=np.pi / 8.0) - -srf = SRF(model, seed=20170519) - -field = srf((x, y)) - -# new grid -seed = MasterRNG(20011012) -rng = np.random.RandomState(seed()) -x2 = rng.randint(99, 150, size=10000) -y2 = rng.randint(20, 80, size=10000) - -field2 = srf((x2, y2)) - -pt.tricontourf(x, y, field.T) -pt.tricontourf(x2, y2, field2.T) -pt.axes().set_aspect("equal") -pt.show() diff --git a/examples/14_transform_01.py b/examples/07_transformations/00_log_normal.py similarity index 74% rename from examples/14_transform_01.py rename to examples/07_transformations/00_log_normal.py index 28dc847fb..a9763378c 100755 --- a/examples/14_transform_01.py +++ b/examples/07_transformations/00_log_normal.py @@ -1,3 +1,9 @@ +""" +log-normal fields +----------------- + +Here we transform a field to a log-normal distribution: +""" from gstools import SRF, Gaussian from gstools import transform as tf diff --git a/examples/15_transform_02.py b/examples/07_transformations/01_binary.py similarity index 56% rename from examples/15_transform_02.py rename to examples/07_transformations/01_binary.py index 7a24cdd73..5a1d932d4 100755 --- a/examples/15_transform_02.py +++ b/examples/07_transformations/01_binary.py @@ -1,3 +1,11 @@ +""" +binary fields +------------- + +Here we transform a field to a binary field with only two values. +The dividing value is the mean by default and the upper and lower values +are derived to preserve the variance. +""" from gstools import SRF, Gaussian from gstools import transform as tf diff --git a/examples/07_transformations/02_zinn_harvey.py b/examples/07_transformations/02_zinn_harvey.py new file mode 100755 index 000000000..c8ed555e9 --- /dev/null +++ b/examples/07_transformations/02_zinn_harvey.py @@ -0,0 +1,20 @@ +""" +Zinn & Harvey transformation +---------------------------- + +Here, we transform a field with the so called "Zinn & Harvey" transformation presented in +`Zinn & Harvey (2003) <https://www.researchgate.net/publication/282442995_zinnharvey2003>`__. +With this transformation, one could overcome the restriction that in ordinary +Gaussian random fields the mean values are the ones being the most connected. + +""" +from gstools import SRF, Gaussian +from gstools import transform as tf + +# structured field with a size of 100x100 and a grid-size of 1x1 +x = y = range(100) +model = Gaussian(dim=2, var=1, len_scale=10) +srf = SRF(model, seed=20170519) +srf.structured([x, y]) +tf.zinnharvey(srf, conn="high") +srf.plot() diff --git a/examples/07_transformations/03_bimodal.py b/examples/07_transformations/03_bimodal.py new file mode 100755 index 000000000..90b569ca5 --- /dev/null +++ b/examples/07_transformations/03_bimodal.py @@ -0,0 +1,21 @@ +""" +bimodal fields +--------------- + +We provide two transformations to obtain bimodal distributions: + +* `arcsin <https://en.wikipedia.org/wiki/Arcsine_distribution>`__. +* `uquad <https://en.wikipedia.org/wiki/U-quadratic_distribution>`__. + +Both transformations will preserve the mean and variance of the given field by default. +""" +from gstools import SRF, Gaussian +from gstools import transform as tf + +# structured field with a size of 100x100 and a grid-size of 1x1 +x = y = range(100) +model = Gaussian(dim=2, var=1, len_scale=10) +srf = SRF(model, seed=20170519) +field = srf.structured([x, y]) +tf.normal_to_arcsin(srf) +srf.plot() diff --git a/examples/07_transformations/04_combinations.py b/examples/07_transformations/04_combinations.py new file mode 100755 index 000000000..f4736162a --- /dev/null +++ b/examples/07_transformations/04_combinations.py @@ -0,0 +1,30 @@ +""" +Combinations +------------ + +You can combine different transformations simply by successively applying them. + +Here, we first force the single field realization to hold the given moments, +namely mean and variance. +Then we apply the Zinn & Harvey transformation to connect the low values. +Afterwards the field is transformed to a binary field and last but not least, +we transform it to log-values. +""" +from gstools import SRF, Gaussian +from gstools import transform as tf + +# structured field with a size of 100x100 and a grid-size of 1x1 +x = y = range(100) +model = Gaussian(dim=2, var=1, len_scale=10) +srf = SRF(model, mean=-9, seed=20170519) +srf.structured([x, y]) +tf.normal_force_moments(srf) +tf.zinnharvey(srf, conn="low") +tf.binary(srf) +tf.normal_to_lognormal(srf) +srf.plot() + +############################################################################### +# The resulting field could be interpreted as a transmissivity field, where +# the values of low permeability are the ones being the most connected +# and only two kinds of soil exist. diff --git a/examples/07_transformations/README.rst b/examples/07_transformations/README.rst new file mode 100644 index 000000000..03cfdfda6 --- /dev/null +++ b/examples/07_transformations/README.rst @@ -0,0 +1,33 @@ +Tutorial 7: Field transformations +================================= + +The generated fields of gstools are ordinary Gaussian random fields. +In application there are several transformations to describe real world +problems in an appropriate manner. + +GStools provides a submodule :py:mod:`gstools.transform` with a range of +common transformations: + +.. currentmodule:: gstools.transform + +.. autosummary:: + binary + boxcox + zinnharvey + normal_force_moments + normal_to_lognormal + normal_to_uniform + normal_to_arcsin + normal_to_uquad + + +All the transformations take a field class, that holds a generated field, +as input and will manipulate this field inplace. + +Simply import the transform submodule and apply a transformation to the srf class: + +.. code-block:: python + + from gstools import transform as tf + ... + tf.normal_to_lognormal(srf) diff --git a/examples/08_variogram_estimation.py b/examples/08_variogram_estimation.py deleted file mode 100644 index 167638d97..000000000 --- a/examples/08_variogram_estimation.py +++ /dev/null @@ -1,206 +0,0 @@ -import os -from shutil import rmtree -import zipfile -import urllib.request -import numpy as np -import matplotlib.pyplot as pt -from gstools import ( - vario_estimate_unstructured, - vario_estimate_structured, - Exponential, -) -from gstools.covmodel.plot import plot_variogram - - -def download_herten(): - # download the data, warning: its about 250MB - print("Downloading Herten data") - data_filename = "data.zip" - data_url = "http://store.pangaea.de/Publications/Bayer_et_al_2015/Herten-analog.zip" - urllib.request.urlretrieve(data_url, "data.zip") - - with zipfile.ZipFile(data_filename, "r") as zf: - zf.extract( - os.path.join("Herten-analog", "sim-big_1000x1000x140", "sim.vtk") - ) - - -def download_scripts(): - import fileinput - - # download a script for file conversion - print("Downloading scripts") - tools_filename = "scripts.zip" - tool_url = ( - "http://store.pangaea.de/Publications/Bayer_et_al_2015/tools.zip" - ) - urllib.request.urlretrieve(tool_url, tools_filename) - - filename = os.path.join("tools", "vtk2gslib.py") - - with zipfile.ZipFile(tools_filename, "r") as zf: - zf.extract(filename) - - with fileinput.FileInput(filename, inplace=True) as fin: - for line in fin: - print(line.replace("header=-1", "header=None"), end="") - - -def create_unstructured_grid(x_s, y_s): - x_u, y_u = np.meshgrid(x_s, y_s) - len_unstruct = len(x_s) * len(y_s) - x_u = np.reshape(x_u, len_unstruct) - y_u = np.reshape(y_u, len_unstruct) - return x_u, y_u - - -############################################################################### -# data preparation ############################################################ -############################################################################### - -# uncomment these two function calls, in case the data was already downloaded -# and you want to execute this script multiple times. But don't forget to -# comment out the cleanup code at the end of this script. -download_herten() -download_scripts() - -# import the downloaded conversion script -from tools.vtk2gslib import vtk2numpy - -# load the Herten aquifer with the downloaded vtk2numpy routine -print("Loading data") -herten, grid = vtk2numpy( - os.path.join("Herten-analog", "sim-big_1000x1000x140", "sim.vtk") -) - -# conductivity values per fazies from the supplementary data -cond = np.array( - [ - 2.50e-04, - 2.30e-04, - 6.10e-05, - 2.60e-02, - 1.30e-01, - 9.50e-02, - 4.30e-05, - 6.00e-07, - 2.30e-03, - 1.40e-04, - ] -) - -# asign the conductivities to the facies -herten_cond = cond[herten] - -# integrate over the vertical axis, calculate transmissivity -herten_log_trans = np.log(np.sum(herten_cond, axis=2) * grid["dz"]) - -# create a structured grid on which the data is defined -x_s = np.arange(grid["ox"], grid["nx"] * grid["dx"], grid["dx"]) -y_s = np.arange(grid["oy"], grid["ny"] * grid["dy"], grid["dy"]) - -pt.imshow(herten_log_trans.T, origin="lower", aspect="equal") -pt.show() - -# create an unstructured grid for the variogram estimation -x_u, y_u = create_unstructured_grid(x_s, y_s) - -############################################################################### -# estimate the variogram on an unstructured grid ############################## -############################################################################### - -bins = np.linspace(0, 10, 50) -print("Estimating unstructured variogram") -bin_center, gamma = vario_estimate_unstructured( - (x_u, y_u), - herten_log_trans.reshape(-1), - bins, - sampling_size=2000, - sampling_seed=19920516, -) - -# fit an exponential model -fit_model = Exponential(dim=2) -fit_model.fit_variogram(bin_center, gamma, nugget=False) - -pt.plot(bin_center, gamma) -plot_variogram(fit_model, x_max=bins[-1]) - -############################################################################### -# estimate the variogram on a structured grid ################################# -############################################################################### - -# estimate the variogram on a structured grid -# use only every 10th value, otherwise calculations would take very long -x_s_skip = x_s[::10] -y_s_skip = y_s[::10] -herten_trans_skip = herten_log_trans[::10, ::10] - -print("Estimating structured variograms") -gamma_x = vario_estimate_structured(herten_trans_skip, direction="x") -gamma_y = vario_estimate_structured(herten_trans_skip, direction="y") - -x_plot = x_s_skip[:21] -y_plot = y_s_skip[:21] -# fit an exponential model -fit_model_x = Exponential(dim=2) -fit_model_x.fit_variogram(x_plot, gamma_x[:21], nugget=False) -fit_model_y = Exponential(dim=2) -fit_model_y.fit_variogram(y_plot, gamma_y[:21], nugget=False) - -line, = pt.plot(bin_center, gamma, label="estimated variogram (isotropic)") -pt.plot( - bin_center, - fit_model.variogram(bin_center), - color=line.get_color(), - linestyle="--", - label="exp. variogram (isotropic)", -) - -line, = pt.plot(x_plot, gamma_x[:21], label="estimated variogram in x-dir") -pt.plot( - x_plot, - fit_model_x.variogram(x_plot), - color=line.get_color(), - linestyle="--", - label="exp. variogram in x-dir", -) - -line, = pt.plot(y_plot, gamma_y[:21], label="estimated variogram in y-dir") -pt.plot( - y_plot, - fit_model_y.variogram(y_plot), - color=line.get_color(), - linestyle="--", - label="exp. variogram in y-dir", -) - -pt.legend() -pt.show() - -print("semivariogram model (isotropic):\n", fit_model) -print("semivariogram model (in x-dir.):\n", fit_model_x) -print("semivariogram model (in y-dir.):\n", fit_model_y) - -############################################################################### -# creating a SRF from the Herten parameters ################################### -############################################################################### - -from gstools import SRF - -srf = SRF(fit_model, seed=19770928) -print("Calculating SRF") -new_herten = srf((x_s, y_s), mesh_type="structured") - -pt.imshow(new_herten.T, origin="lower") -pt.show() - -############################################################################### -# cleanup ##################################################################### -############################################################################### - -# comment all in case you want to keep the data for playing around with it -os.remove("data.zip") -os.remove("scripts.zip") -rmtree("Herten-analog") -rmtree("tools") diff --git a/examples/09_vector_field.py b/examples/09_vector_field.py deleted file mode 100644 index 27c9cf6b9..000000000 --- a/examples/09_vector_field.py +++ /dev/null @@ -1,21 +0,0 @@ -import numpy as np -import matplotlib.pyplot as plt -from gstools import SRF, Gaussian, Exponential - -# the grid -x = np.arange(100) -y = np.arange(100) - -# a smooth Gaussian covariance model -model = Gaussian(dim=2, var=1, len_scale=10) - -srf = SRF(model, generator="VectorField") -srf((x, y), mesh_type="structured", seed=19841203) -srf.plot() - -# a rougher exponential covariance model -model2 = Exponential(dim=2, var=1, len_scale=10) - -srf.model = model2 -srf((x, y), mesh_type="structured", seed=19841203) -srf.plot() diff --git a/examples/10_simple_kriging.py b/examples/10_simple_kriging.py deleted file mode 100755 index 48c676d66..000000000 --- a/examples/10_simple_kriging.py +++ /dev/null @@ -1,15 +0,0 @@ -import numpy as np -from gstools import Gaussian, krige - -# condtions -cond_pos = [0.3, 1.9, 1.1, 3.3, 4.7] -cond_val = [0.47, 0.56, 0.74, 1.47, 1.74] -# resulting grid -gridx = np.linspace(0.0, 15.0, 151) -# spatial random field class -model = Gaussian(dim=1, var=0.5, len_scale=2) -krig = krige.Simple(model, mean=1, cond_pos=cond_pos, cond_val=cond_val) -krig(gridx) -ax = krig.plot() -ax.scatter(cond_pos, cond_val, color="k", zorder=10, label="Conditions") -ax.legend() diff --git a/examples/11_ordinary_kriging.py b/examples/11_ordinary_kriging.py deleted file mode 100644 index 2ea0af240..000000000 --- a/examples/11_ordinary_kriging.py +++ /dev/null @@ -1,15 +0,0 @@ -import numpy as np -from gstools import Gaussian, krige - -# condtions -cond_pos = [0.3, 1.9, 1.1, 3.3, 4.7] -cond_val = [0.47, 0.56, 0.74, 1.47, 1.74] -# resulting grid -gridx = np.linspace(0.0, 15.0, 151) -# spatial random field class -model = Gaussian(dim=1, var=0.5, len_scale=2) -krig = krige.Ordinary(model, cond_pos=cond_pos, cond_val=cond_val) -krig(gridx) -ax = krig.plot() -ax.scatter(cond_pos, cond_val, color="k", zorder=10, label="Conditions") -ax.legend() diff --git a/examples/16_transform_03.py b/examples/16_transform_03.py deleted file mode 100755 index c83f6e456..000000000 --- a/examples/16_transform_03.py +++ /dev/null @@ -1,10 +0,0 @@ -from gstools import SRF, Gaussian -from gstools import transform as tf - -# structured field with a size of 100x100 and a grid-size of 1x1 -x = y = range(100) -model = Gaussian(dim=2, var=1, len_scale=10) -srf = SRF(model, seed=20170519) -srf.structured([x, y]) -tf.zinnharvey(srf, conn="high") -srf.plot() diff --git a/examples/17_transform_04.py b/examples/17_transform_04.py deleted file mode 100755 index e674c2c3d..000000000 --- a/examples/17_transform_04.py +++ /dev/null @@ -1,10 +0,0 @@ -from gstools import SRF, Gaussian -from gstools import transform as tf - -# structured field with a size of 100x100 and a grid-size of 1x1 -x = y = range(100) -model = Gaussian(dim=2, var=1, len_scale=10) -srf = SRF(model, seed=20170519) -field = srf.structured([x, y]) -tf.normal_to_arcsin(srf) -srf.plot() diff --git a/examples/18_transform_05.py b/examples/18_transform_05.py deleted file mode 100755 index 9be5bdaf2..000000000 --- a/examples/18_transform_05.py +++ /dev/null @@ -1,13 +0,0 @@ -from gstools import SRF, Gaussian -from gstools import transform as tf - -# structured field with a size of 100x100 and a grid-size of 1x1 -x = y = range(100) -model = Gaussian(dim=2, var=1, len_scale=10) -srf = SRF(model, mean=-9, seed=20170519) -srf.structured([x, y]) -tf.normal_force_moments(srf) -tf.zinnharvey(srf, conn="low") -tf.binary(srf) -tf.normal_to_lognormal(srf) -srf.plot()