pyerrors.roots

 1import numpy as np
 2import scipy.optimize
 3from autograd import jacobian
 4from .obs import derived_observable
 5
 6
 7def find_root(d, func, guess=1.0, **kwargs):
 8    r'''Finds the root of the function func(x, d) where d is an `Obs`.
 9
10    Parameters
11    -----------------
12    d : Obs
13        Obs passed to the function.
14    func : object
15        Function to be minimized. Any numpy functions have to use the autograd.numpy wrapper.
16        Example:
17        ```python
18        import autograd.numpy as anp
19        def root_func(x, d):
20            return anp.exp(-x ** 2) - d
21        ```
22    guess : float
23        Initial guess for the minimization.
24
25    Returns
26    -------
27    res : Obs
28        `Obs` valued root of the function.
29    '''
30    d_val = np.vectorize(lambda x: x.value)(np.array(d))
31
32    root = scipy.optimize.fsolve(func, guess, d_val)
33
34    # Error propagation as detailed in arXiv:1809.01289
35    dx = jacobian(func)(root[0], d_val)
36    try:
37        da = jacobian(lambda u, v: func(v, u))(d_val, root[0])
38    except TypeError:
39        raise Exception("It is required to use autograd.numpy instead of numpy within root functions, see the documentation for details.") from None
40    deriv = - da / dx
41    res = derived_observable(lambda x, **kwargs: (x[0] + np.finfo(np.float64).eps) / (np.array(d).reshape(-1)[0].value + np.finfo(np.float64).eps) * root[0],
42                             np.array(d).reshape(-1), man_grad=np.array(deriv).reshape(-1))
43    return res
def find_root(d, func, guess=1.0, **kwargs):
 8def find_root(d, func, guess=1.0, **kwargs):
 9    r'''Finds the root of the function func(x, d) where d is an `Obs`.
10
11    Parameters
12    -----------------
13    d : Obs
14        Obs passed to the function.
15    func : object
16        Function to be minimized. Any numpy functions have to use the autograd.numpy wrapper.
17        Example:
18        ```python
19        import autograd.numpy as anp
20        def root_func(x, d):
21            return anp.exp(-x ** 2) - d
22        ```
23    guess : float
24        Initial guess for the minimization.
25
26    Returns
27    -------
28    res : Obs
29        `Obs` valued root of the function.
30    '''
31    d_val = np.vectorize(lambda x: x.value)(np.array(d))
32
33    root = scipy.optimize.fsolve(func, guess, d_val)
34
35    # Error propagation as detailed in arXiv:1809.01289
36    dx = jacobian(func)(root[0], d_val)
37    try:
38        da = jacobian(lambda u, v: func(v, u))(d_val, root[0])
39    except TypeError:
40        raise Exception("It is required to use autograd.numpy instead of numpy within root functions, see the documentation for details.") from None
41    deriv = - da / dx
42    res = derived_observable(lambda x, **kwargs: (x[0] + np.finfo(np.float64).eps) / (np.array(d).reshape(-1)[0].value + np.finfo(np.float64).eps) * root[0],
43                             np.array(d).reshape(-1), man_grad=np.array(deriv).reshape(-1))
44    return res

Finds the root of the function func(x, d) where d is an Obs.

Parameters
  • d (Obs): Obs passed to the function.
  • func (object): Function to be minimized. Any numpy functions have to use the autograd.numpy wrapper. Example:

    import autograd.numpy as anp
    def root_func(x, d):
       return anp.exp(-x ** 2) - d
    
  • guess (float): Initial guess for the minimization.

Returns
  • res (Obs): Obs valued root of the function.