a
    “Àhq\  ã                   @   sŽ   d dl Zd dlmZ ddlmZmZ ddlm	Z	 d dl
mZ d dlmZmZ dZG dd	„ d	ƒZG d
d„ dƒZG dd„ dƒZG dd„ deƒZdS )é    Né   )Úapprox_derivativeÚgroup_columns)ÚHessianUpdateStrategy)ÚLinearOperator)Ú
atleast_ndÚarray_namespace)z2-pointz3-pointÚcsc                   @   sR   e Zd ZdZddd„Zdd„ Zdd„ Zd	d
„ Zdd„ Zdd„ Z	dd„ Z
dd„ ZdS )ÚScalarFunctiona©  Scalar function and its derivatives.

    This class defines a scalar function F: R^n->R and methods for
    computing or approximating its first and second derivatives.

    Parameters
    ----------
    fun : callable
        evaluates the scalar function. Must be of the form ``fun(x, *args)``,
        where ``x`` is the argument in the form of a 1-D array and ``args`` is
        a tuple of any additional fixed parameters needed to completely specify
        the function. Should return a scalar.
    x0 : array-like
        Provides an initial set of variables for evaluating fun. Array of real
        elements of size (n,), where 'n' is the number of independent
        variables.
    args : tuple, optional
        Any additional fixed parameters needed to completely specify the scalar
        function.
    grad : {callable, '2-point', '3-point', 'cs'}
        Method for computing the gradient vector.
        If it is a callable, it should be a function that returns the gradient
        vector:

            ``grad(x, *args) -> array_like, shape (n,)``

        where ``x`` is an array with shape (n,) and ``args`` is a tuple with
        the fixed parameters.
        Alternatively, the keywords  {'2-point', '3-point', 'cs'} can be used
        to select a finite difference scheme for numerical estimation of the
        gradient with a relative step size. These finite difference schemes
        obey any specified `bounds`.
    hess : {callable, '2-point', '3-point', 'cs', HessianUpdateStrategy}
        Method for computing the Hessian matrix. If it is callable, it should
        return the  Hessian matrix:

            ``hess(x, *args) -> {LinearOperator, spmatrix, array}, (n, n)``

        where x is a (n,) ndarray and `args` is a tuple with the fixed
        parameters. Alternatively, the keywords {'2-point', '3-point', 'cs'}
        select a finite difference scheme for numerical estimation. Or, objects
        implementing `HessianUpdateStrategy` interface can be used to
        approximate the Hessian.
        Whenever the gradient is estimated via finite-differences, the Hessian
        cannot be estimated with options {'2-point', '3-point', 'cs'} and needs
        to be estimated using one of the quasi-Newton strategies.
    finite_diff_rel_step : None or array_like
        Relative step size to use. The absolute step size is computed as
        ``h = finite_diff_rel_step * sign(x0) * max(1, abs(x0))``, possibly
        adjusted to fit into the bounds. For ``method='3-point'`` the sign
        of `h` is ignored. If None then finite_diff_rel_step is selected
        automatically,
    finite_diff_bounds : tuple of array_like
        Lower and upper bounds on independent variables. Defaults to no bounds,
        (-np.inf, np.inf). Each bound must match the size of `x0` or be a
        scalar, in the latter case the bound will be the same for all
        variables. Use it to limit the range of function evaluation.
    epsilon : None or array_like, optional
        Absolute step size to use, possibly adjusted to fit into the bounds.
        For ``method='3-point'`` the sign of `epsilon` is ignored. By default
        relative steps are used, only if ``epsilon is not None`` are absolute
        steps used.

    Notes
    -----
    This class implements a memoization logic. There are methods `fun`,
    `grad`, hess` and corresponding attributes `f`, `g` and `H`. The following
    things should be considered:

        1. Use only public methods `fun`, `grad` and `hess`.
        2. After one of the methods is called, the corresponding attribute
           will be set. However, a subsequent call with a different argument
           of *any* of the methods may overwrite the attribute.
    Nc	                    s  t ˆƒs ˆtvr tdt› dƒ‚t ˆƒsJˆtv sJtˆtƒsJtdt› dƒ‚ˆtv rbˆtv rbtdƒ‚t|ƒ ˆ_}	t|d|	d}
|	j}|	 	|
j
d¡r˜|
j
}|	 |
|¡ˆ_|ˆ_ˆjjˆ_dˆ_dˆ_dˆ_d	ˆ_d	ˆ_d	ˆ_d ˆ_tjˆ_i ‰ˆtv rˆˆd
< |ˆd< |ˆd< |ˆd< ˆtv r@ˆˆd
< |ˆd< |ˆd< dˆd< ‡ ‡‡fdd„‰‡‡fdd„}|ˆ_ˆ ¡  t ˆƒr–‡ ‡‡fdd„‰‡‡fdd„}nˆtv r°‡‡‡fdd„}|ˆ_ˆ ¡  t ˆƒrvˆt |¡gˆ ¢R Ž ˆ_dˆ_ˆ jd7  _t  !ˆj¡r"‡ ‡‡fdd„‰t  "ˆj¡ˆ_nDtˆjt#ƒrB‡ ‡‡fdd„‰n$‡ ‡‡fdd„‰t $t %ˆj¡¡ˆ_‡‡fdd„}nhˆtv rž‡‡‡fdd„}|ƒ  dˆ_n@tˆtƒrÞˆˆ_ˆj &ˆjd ¡ dˆ_d ˆ_'d ˆ_(‡fd!d„}|ˆ_)tˆtƒrþ‡fd"d#„}n‡fd$d#„}|ˆ_*d S )%Nz)`grad` must be either callable or one of Ú.z@`hess` must be either callable, HessianUpdateStrategy or one of z‹Whenever the gradient is estimated via finite-differences, we require the Hessian to be estimated using one of the quasi-Newton strategies.r   ©ÚndimÚxpúreal floatingr   FÚmethodÚrel_stepZabs_stepÚboundsTÚas_linear_operatorc              
      sŽ   ˆ j d7  _ ˆt | ¡gˆ ¢R Ž }t |¡stzt |¡ ¡ }W n2 ttfyr } ztdƒ|‚W Y d }~n
d }~0 0 |ˆjk rŠ| ˆ_	|ˆ_|S )Nr   z@The user-provided objective function must return a scalar value.)
ÚnfevÚnpÚcopyZisscalarÚasarrayÚitemÚ	TypeErrorÚ
ValueErrorÚ	_lowest_fÚ	_lowest_x)ÚxZfxÚe)ÚargsÚfunÚself© úd/var/www/html/assistant/venv/lib/python3.9/site-packages/scipy/optimize/_differentiable_functions.pyÚfun_wrappedŒ   s    
ÿý
z,ScalarFunction.__init__.<locals>.fun_wrappedc                      s   ˆ ˆj ƒˆ_d S ©N©r   Úfr"   ©r$   r!   r"   r#   Ú
update_fun¢   s    z+ScalarFunction.__init__.<locals>.update_func                    s*   ˆ j d7  _ t ˆt | ¡gˆ ¢R Ž ¡S ©Nr   )Úngevr   Ú
atleast_1dr   ©r   )r   Úgradr!   r"   r#   Úgrad_wrappedª   s    z-ScalarFunction.__init__.<locals>.grad_wrappedc                      s   ˆ ˆj ƒˆ_d S r%   )r   Úgr"   )r/   r!   r"   r#   Úupdate_grad®   s    z,ScalarFunction.__init__.<locals>.update_gradc                      s6   ˆ  ¡  ˆ jd7  _tˆˆjfdˆjiˆ ¤Žˆ_d S )Nr   Úf0)Ú_update_funr+   r   r   r'   r0   r"   ©Úfinite_diff_optionsr$   r!   r"   r#   r1   ²   s
    ÿc                    s*   ˆ j d7  _ t ˆt | ¡gˆ ¢R Ž ¡S r*   )ÚnhevÚspsÚ
csr_matrixr   r   r-   ©r   Úhessr!   r"   r#   Úhess_wrappedÂ   s    z-ScalarFunction.__init__.<locals>.hess_wrappedc                    s$   ˆ j d7  _ ˆt | ¡gˆ ¢R Ž S r*   )r6   r   r   r-   r9   r"   r#   r;   È   s    c                    s0   ˆ j d7  _ t t ˆt | ¡gˆ ¢R Ž ¡¡S r*   )r6   r   Ú
atleast_2dr   r   r-   r9   r"   r#   r;   Í   s    c                      s   ˆ ˆj ƒˆ_d S r%   )r   ÚHr"   ©r;   r!   r"   r#   Úupdate_hessÒ   s    z,ScalarFunction.__init__.<locals>.update_hessc                      s*   ˆ  ¡  tˆˆjfdˆjiˆ ¤Žˆ_ˆjS ©Nr2   )Ú_update_gradr   r   r0   r=   r"   )r5   r/   r!   r"   r#   r?   Ö   s
    ÿr:   c                      s*   ˆ   ¡  ˆ j ˆ jˆ j ˆ jˆ j ¡ d S r%   )rA   r=   Úupdater   Úx_prevr0   Úg_prevr"   ©r!   r"   r#   r?   å   s    c                    sX   ˆ   ¡  ˆ jˆ _ˆ jˆ _t| dˆ jd}ˆ j |ˆ j¡ˆ _dˆ _	dˆ _
dˆ _ˆ  ¡  d S ©Nr   r   F)rA   r   rC   r0   rD   r   r   ÚastypeÚx_dtypeÚ	f_updatedÚ	g_updatedÚ	H_updatedÚ_update_hess©r   Ú_xrE   r"   r#   Úupdate_xì   s    z)ScalarFunction.__init__.<locals>.update_xc                    s8   t | dˆ jd}ˆ j |ˆ j¡ˆ _dˆ _dˆ _dˆ _d S rF   )r   r   rG   rH   r   rI   rJ   rK   rM   rE   r"   r#   rO   ú   s
    )+ÚcallableÚ
FD_METHODSr   Ú
isinstancer   r   r   r   Úfloat64ÚisdtypeÚdtyperG   r   rH   ÚsizeÚnr   r+   r6   rI   rJ   rK   r   r   Úinfr   Ú_update_fun_implr3   Ú_update_grad_implrA   r   r=   r7   Úissparser8   r   r<   r   Ú
initializerC   rD   Ú_update_hess_implÚ_update_x_impl)r!   r    Úx0r   r.   r:   Úfinite_diff_rel_stepÚfinite_diff_boundsÚepsilonr   rN   Ú_dtyper)   r1   r?   rO   r"   )	r   r5   r    r$   r.   r/   r:   r;   r!   r#   Ú__init__W   s     
ÿÿÿÿ






zScalarFunction.__init__c                 C   s   | j s|  ¡  d| _ d S ©NT©rI   rY   rE   r"   r"   r#   r3     s    zScalarFunction._update_func                 C   s   | j s|  ¡  d| _ d S re   )rJ   rZ   rE   r"   r"   r#   rA   	  s    zScalarFunction._update_gradc                 C   s   | j s|  ¡  d| _ d S re   ©rK   r]   rE   r"   r"   r#   rL     s    zScalarFunction._update_hessc                 C   s&   t  || j¡s|  |¡ |  ¡  | jS r%   )r   Úarray_equalr   r^   r3   r'   ©r!   r   r"   r"   r#   r      s    
zScalarFunction.func                 C   s&   t  || j¡s|  |¡ |  ¡  | jS r%   )r   rh   r   r^   rA   r0   ri   r"   r"   r#   r.     s    
zScalarFunction.gradc                 C   s&   t  || j¡s|  |¡ |  ¡  | jS r%   )r   rh   r   r^   rL   r=   ri   r"   r"   r#   r:     s    
zScalarFunction.hessc                 C   s4   t  || j¡s|  |¡ |  ¡  |  ¡  | j| jfS r%   )r   rh   r   r^   r3   rA   r'   r0   ri   r"   r"   r#   Úfun_and_grad%  s
    
zScalarFunction.fun_and_grad)N)Ú__name__Ú
__module__Ú__qualname__Ú__doc__rd   r3   rA   rL   r    r.   r:   rj   r"   r"   r"   r#   r
      s   K ÿ
 .r
   c                   @   sX   e Zd ZdZdd„ Zdd„ Zdd„ Zdd	„ Zd
d„ Zdd„ Z	dd„ Z
dd„ Zdd„ ZdS )ÚVectorFunctiona‘  Vector function and its derivatives.

    This class defines a vector function F: R^n->R^m and methods for
    computing or approximating its first and second derivatives.

    Notes
    -----
    This class implements a memoization logic. There are methods `fun`,
    `jac`, hess` and corresponding attributes `f`, `J` and `H`. The following
    things should be considered:

        1. Use only public methods `fun`, `jac` and `hess`.
        2. After one of the methods is called, the corresponding attribute
           will be set. However, a subsequent call with a different argument
           of *any* of the methods may overwrite the attribute.
    c	                    s¦  t ˆƒs ˆtvr tdt› dƒ‚t ˆƒsJˆtv sJtˆtƒsJtdt› dƒ‚ˆtv rbˆtv rbtdƒ‚t|ƒ ˆ_}	t|d|	d}
|	j}|	 	|
j
d¡r˜|
j
}|	 |
|¡ˆ_|ˆ_ˆjjˆ_dˆ_dˆ_dˆ_d	ˆ_d	ˆ_d	ˆ_i ‰ ˆtv r,ˆˆ d
< |ˆ d< |d urt|ƒ}||fˆ d< |ˆ d< t ˆj¡ˆ_ˆtv r\ˆˆ d
< |ˆ d< dˆ d< t ˆj¡ˆ_ˆtv rxˆtv rxtdƒ‚‡‡fdd„‰‡‡fdd„}|ˆ_|ƒ  t ˆj¡ˆ_ˆjjˆ_t ˆƒr†ˆˆjƒˆ_dˆ_ˆ jd7  _|s |d u r$t  !ˆj¡r$‡‡fdd„‰t  "ˆj¡ˆ_dˆ_#nRt  !ˆj¡rT‡‡fdd„‰ˆj $¡ ˆ_d	ˆ_#n"‡‡fdd„‰t %ˆj¡ˆ_d	ˆ_#‡‡fdd„}nÆˆtv rLt&ˆˆjfdˆjiˆ ¤Žˆ_dˆ_|sÐ|d u röt  !ˆj¡rö‡ ‡‡fdd„}t  "ˆj¡ˆ_dˆ_#nVt  !ˆj¡r(‡ ‡‡fdd„}ˆj $¡ ˆ_d	ˆ_#n$‡ ‡‡fdd„}t %ˆj¡ˆ_d	ˆ_#|ˆ_'t ˆƒrüˆˆjˆjƒˆ_(dˆ_ˆ jd7  _t  !ˆj(¡r¬‡‡fdd„‰t  "ˆj(¡ˆ_(n@tˆj(t)ƒrÊ‡‡fd d„‰n"‡‡fd!d„‰t %t *ˆj(¡¡ˆ_(‡‡fd"d#„}ntˆtv r0‡fd$d%„‰‡ ‡‡fd&d#„}|ƒ  dˆ_n@tˆtƒrpˆˆ_(ˆj( +ˆjd'¡ dˆ_d ˆ_,d ˆ_-‡fd(d#„}|ˆ_.tˆtƒr‡fd)d*„}n‡fd+d*„}|ˆ_/d S ),Nz(`jac` must be either callable or one of r   z?`hess` must be either callable,HessianUpdateStrategy or one of z‹Whenever the Jacobian is estimated via finite-differences, we require the Hessian to be estimated using one of the quasi-Newton strategies.r   r   r   r   Fr   r   Zsparsityr   Tr   c                    s   ˆ j d7  _ t ˆ | ƒ¡S r*   )r   r   r,   r-   )r    r!   r"   r#   r$   w  s    z,VectorFunction.__init__.<locals>.fun_wrappedc                      s   ˆ ˆj ƒˆ_d S r%   r&   r"   r(   r"   r#   r)   {  s    z+VectorFunction.__init__.<locals>.update_func                    s   ˆ j d7  _ t ˆ | ƒ¡S r*   )Únjevr7   r8   r-   ©Újacr!   r"   r#   Újac_wrappedŒ  s    z,VectorFunction.__init__.<locals>.jac_wrappedc                    s   ˆ j d7  _ ˆ | ƒ ¡ S r*   )rp   Útoarrayr-   rq   r"   r#   rs   “  s    c                    s   ˆ j d7  _ t ˆ | ƒ¡S r*   )rp   r   r<   r-   rq   r"   r#   rs   š  s    c                      s   ˆ ˆj ƒˆ_d S r%   )r   ÚJr"   )rs   r!   r"   r#   Ú
update_jac   s    z+VectorFunction.__init__.<locals>.update_jacr2   c                      s.   ˆ  ¡  t tˆˆjfdˆjiˆ ¤Ž¡ˆ_d S r@   )r3   r7   r8   r   r   r'   ru   r"   r4   r"   r#   rv   ª  s    ÿÿc                      s,   ˆ  ¡  tˆˆjfdˆjiˆ ¤Ž ¡ ˆ_d S r@   )r3   r   r   r'   rt   ru   r"   r4   r"   r#   rv   ³  s    ÿc                      s.   ˆ  ¡  t tˆˆjfdˆjiˆ ¤Ž¡ˆ_d S r@   )r3   r   r<   r   r   r'   ru   r"   r4   r"   r#   rv   »  s    ÿÿc                    s   ˆ j d7  _ t ˆ | |ƒ¡S r*   )r6   r7   r8   ©r   Úv©r:   r!   r"   r#   r;   Ì  s    z-VectorFunction.__init__.<locals>.hess_wrappedc                    s   ˆ j d7  _ ˆ | |ƒS r*   )r6   rw   ry   r"   r#   r;   Ò  s    c                    s$   ˆ j d7  _ t t ˆ | |ƒ¡¡S r*   )r6   r   r<   r   rw   ry   r"   r#   r;   ×  s    c                      s   ˆ ˆj ˆjƒˆ_d S r%   )r   rx   r=   r"   r>   r"   r#   r?   Ü  s    z,VectorFunction.__init__.<locals>.update_hessc                    s   ˆ | ƒj  |¡S r%   )ÚTÚdotrw   )rs   r"   r#   Ú	jac_dot_vß  s    z*VectorFunction.__init__.<locals>.jac_dot_vc                      s8   ˆ  ¡  tˆˆjfˆjj ˆj¡ˆjfdœˆ ¤Žˆ_d S )N)r2   r   )Ú_update_jacr   r   ru   rz   r{   rx   r=   r"   )r5   r|   r!   r"   r#   r?   â  s    
þýr:   c                     sZ   ˆ   ¡  ˆ jd urVˆ jd urVˆ jˆ j } ˆ jj ˆ j¡ˆ jj ˆ j¡ }ˆ j 	| |¡ d S r%   )
r}   rC   ÚJ_prevr   ru   rz   r{   rx   r=   rB   )Zdelta_xZdelta_grE   r"   r#   r?   ñ  s
     c                    sX   ˆ   ¡  ˆ jˆ _ˆ jˆ _t| dˆ jd}ˆ j |ˆ j¡ˆ _dˆ _	dˆ _
dˆ _ˆ  ¡  d S rF   )r}   r   rC   ru   r~   r   r   rG   rH   rI   Ú	J_updatedrK   rL   rM   rE   r"   r#   rO   ý  s    z)VectorFunction.__init__.<locals>.update_xc                    s8   t | dˆ jd}ˆ j |ˆ j¡ˆ _dˆ _dˆ _dˆ _d S rF   )r   r   rG   rH   r   rI   r   rK   rM   rE   r"   r#   rO     s
    )0rP   rQ   r   rR   r   r   r   r   rS   rT   rU   rG   r   rH   rV   rW   r   rp   r6   rI   r   rK   r   r   r   Zx_diffrY   Z
zeros_liker'   rx   Úmru   r7   r[   r8   Úsparse_jacobianrt   r<   r   Ú_update_jac_implr=   r   r   r\   rC   r~   r]   r^   )r!   r    r_   rr   r:   r`   Zfinite_diff_jac_sparsityra   r   r   rN   rc   Zsparsity_groupsr)   rv   r?   rO   r"   )	r5   r    r$   r:   r;   rr   r|   rs   r!   r#   rd   >  sì    ÿÿ



ÿ


ÿ
ÿ
ÿÿ
ÿ

	zVectorFunction.__init__c                 C   s   t  || j¡s|| _d| _d S )NF)r   rh   rx   rK   )r!   rx   r"   r"   r#   Ú	_update_v  s    zVectorFunction._update_vc                 C   s   t  || j¡s|  |¡ d S r%   )r   rh   r   r^   ri   r"   r"   r#   Ú	_update_x  s    zVectorFunction._update_xc                 C   s   | j s|  ¡  d| _ d S re   rf   rE   r"   r"   r#   r3     s    zVectorFunction._update_func                 C   s   | j s|  ¡  d| _ d S re   )r   r‚   rE   r"   r"   r#   r}     s    zVectorFunction._update_jacc                 C   s   | j s|  ¡  d| _ d S re   rg   rE   r"   r"   r#   rL   $  s    zVectorFunction._update_hessc                 C   s   |   |¡ |  ¡  | jS r%   )r„   r3   r'   ri   r"   r"   r#   r    )  s    
zVectorFunction.func                 C   s   |   |¡ |  ¡  | jS r%   )r„   r}   ru   ri   r"   r"   r#   rr   .  s    
zVectorFunction.jacc                 C   s"   |   |¡ |  |¡ |  ¡  | jS r%   )rƒ   r„   rL   r=   ©r!   r   rx   r"   r"   r#   r:   3  s    

zVectorFunction.hessN)rk   rl   rm   rn   rd   rƒ   r„   r3   r}   rL   r    rr   r:   r"   r"   r"   r#   ro   -  s    Tro   c                   @   s8   e Zd ZdZdd„ Zdd„ Zdd„ Zdd	„ Zd
d„ ZdS )ÚLinearVectorFunctionzüLinear vector function and its derivatives.

    Defines a linear function F = A x, where x is N-D vector and
    A is m-by-n matrix. The Jacobian is constant and equals to A. The Hessian
    is identically zero and it is returned as a csr matrix.
    c                 C   sø   |s|d u r*t  |¡r*t  |¡| _d| _n4t  |¡rF| ¡ | _d| _nt t |¡¡| _d| _| jj	\| _
| _t|ƒ | _}t|d|d}|j}| |jd¡r¤|j}| ||¡| _|| _| j | j¡| _d| _tj| j
td| _t  | j| jf¡| _d S )NTFr   r   r   )rU   )r7   r[   r8   ru   r   rt   r   r<   r   Úshaper€   rW   r   r   r   rS   rT   rU   rG   r   rH   r{   r'   rI   ZzerosÚfloatrx   r=   )r!   ÚAr_   r   r   rN   rc   r"   r"   r#   rd   B  s(    

zLinearVectorFunction.__init__c                 C   s:   t  || j¡s6t|d| jd}| j || j¡| _d| _d S rF   )r   rh   r   r   r   rG   rH   rI   )r!   r   rN   r"   r"   r#   r„   `  s    zLinearVectorFunction._update_xc                 C   s*   |   |¡ | js$| j |¡| _d| _| jS re   )r„   rI   ru   r{   r'   ri   r"   r"   r#   r    f  s
    
zLinearVectorFunction.func                 C   s   |   |¡ | jS r%   )r„   ru   ri   r"   r"   r#   rr   m  s    
zLinearVectorFunction.jacc                 C   s   |   |¡ || _| jS r%   )r„   rx   r=   r…   r"   r"   r#   r:   q  s    
zLinearVectorFunction.hessN)	rk   rl   rm   rn   rd   r„   r    rr   r:   r"   r"   r"   r#   r†   ;  s   r†   c                       s    e Zd ZdZ‡ fdd„Z‡  ZS )ÚIdentityVectorFunctionzþIdentity vector function and its derivatives.

    The Jacobian is the identity matrix, returned as a dense array when
    `sparse_jacobian=False` and as a csr matrix otherwise. The Hessian is
    identically zero and it is returned as a csr matrix.
    c                    sJ   t |ƒ}|s|d u r(tj|dd}d}nt |¡}d}tƒ  |||¡ d S )NZcsr)ÚformatTF)Úlenr7   Úeyer   Úsuperrd   )r!   r_   r   rW   r‰   ©Ú	__class__r"   r#   rd   ~  s    
zIdentityVectorFunction.__init__)rk   rl   rm   rn   rd   Ú__classcell__r"   r"   r   r#   rŠ   w  s   rŠ   )Únumpyr   Zscipy.sparseÚsparser7   Z_numdiffr   r   Z_hessian_update_strategyr   Zscipy.sparse.linalgr   Zscipy._lib._array_apir   r   rQ   r
   ro   r†   rŠ   r"   r"   r"   r#   Ú<module>   s     #  <