首页 > 解决方案 > 如何使用Python中的'trust-constr'方法解决约束最小化问题中的ValueError“预期方阵”问题?

问题描述

我想确定最小二乘问题的系数,其约束条件是系数总和为 1,并且系数介于 0 和 1 之间。最小化的函数在函数“forecast_combination”中表示,目标是确定 x。然而,'trust-constr' 方法将约束的雅可比分解(https://docs.scipy.org/doc/scipy/reference/optimize.minimize-trustconstr.html),这会引发 ValueError “预期方阵” . 我应该改变什么?

from scipy.optimize import LinearConstraint
from scipy.optimize import minimize
import numpy as np
import pandas as pd

def combination_jacobian(x, vDataOut, YPred1, YPred2):
    jac= np.zeros_like(x)
    jac[0] = np.sum(-2*np.multiply(YPred1, vDataOut - x[0]*YPred1 - x[1]*YPred2))
    jac[1] = np.sum(-2*np.multiply(YPred1, vDataOut - x[0]*YPred1 - x[1]*YPred2))
    print(jac)
    return jac

def combination_hessian(x, vDataOut, YPred1, YPred2):
    hess = np.zeros((len(x), len(x)))
    hess[0,0] = np.sum(YPred1**2)
    hess[0,1] = np.sum(YPred1*YPred2)
    hess[1,0] = np.sum(YPred1*YPred2)
    hess[1,1] = np.sum(YPred2**2)
    print(hess)
    return hess


def forecast_combination(x, vDataOut, YPred1, YPred2):
    return np.sum((vDataOut - x[0]*YPred1 - x[1]*YPred2)**2)

vDataOut = pd.DataFrame(np.ones((100, 1))+2).values
YPred1 = pd.DataFrame(np.ones((100, 1))+1).values
YPred2 = pd.DataFrame(np.ones((100, 1))+7).values

constrained_least_squares = minimize(fun = forecast_combination, x0 = np.array([0.5, 0.5]), method='trust-constr', args = (vDataOut, YPred1, YPred2), jac = combination_jacobian, hess = combination_hessian, constraints=[LinearConstraint([[1, 1]], [1], [1])],bounds=([0, 0], [1,1]), options={'factorization_method': None})
weights = constrained_least_squares.x

更多细节可以在https://scipy.github.io/devdocs/tutorial/optimize.html#trust-region-constrained-algorithm-method-trust-constr上找到。

使用分解方法“SVDFactorization”,得到初始权重(不能是最小二乘权重)并且没有错误。

ValueError                                Traceback (most recent call last)
<ipython-input-48-cbfef6ae97b2> in <module>
----> 6 constrained_least_squares = minimize(fun = forecast_combination, x0 = np.array([0.5, 0.5]), method='trust-constr', args = (vDataOut, YPred1, YPred2), jac = combination_jacobian, hess = combination_hessian, constraints=[LinearConstraint([[1, 1]], [1], [1])],bounds=([0, 0], [1,1]), options={'factorization_method': None})


~\AppData\Local\Continuum\anaconda3\envs\thesis\lib\site-packages\scipy\optimize\_minimize.py in minimize(fun, x0, args, method, jac, hess, hessp, bounds, constraints, tol, callback, options)
    620         return _minimize_trustregion_constr(fun, x0, args, jac, hess, hessp,
    621                                             bounds, constraints,
--> 622                                             callback=callback, **options)
    623     elif meth == 'dogleg':
    624         return _minimize_dogleg(fun, x0, args, jac, hess,

~\AppData\Local\Continuum\anaconda3\envs\thesis\lib\site-packages\scipy\optimize\_trustregion_constr\minimize_trustregion_constr.py in _minimize_trustregion_constr(fun, x0, args, grad, hess, hessp, bounds, constraints, xtol, gtol, barrier_tol, sparse_jacobian, callback, maxiter, verbose, finite_diff_rel_step, initial_constr_penalty, initial_tr_radius, initial_barrier_parameter, initial_barrier_tolerance, factorization_method, disp)
    503             stop_criteria, state,
    504             initial_constr_penalty, initial_tr_radius,
--> 505             factorization_method)
    506 
    507     elif method == 'tr_interior_point':

~\AppData\Local\Continuum\anaconda3\envs\thesis\lib\site-packages\scipy\optimize\_trustregion_constr\equality_constrained_sqp.py in equality_constrained_sqp(fun_and_constr, grad_and_jac, lagr_hess, x0, fun0, grad0, constr0, jac0, stop_criteria, state, initial_penalty, initial_trust_radius, factorization_method, trust_lb, trust_ub, scaling)
     79     S = scaling(x)
     80     # Get projections
---> 81     Z, LS, Y = projections(A, factorization_method)
     82     # Compute least-square lagrange multipliers
     83     v = -LS.dot(c)

~\AppData\Local\Continuum\anaconda3\envs\thesis\lib\site-packages\scipy\optimize\_trustregion_constr\projections.py in projections(A, method, orth_tol, max_refin, tol)
    400             = svd_factorization_projections(A, m, n, orth_tol, max_refin, tol)
    401 
--> 402     Z = LinearOperator((n, n), null_space)
    403     LS = LinearOperator((m, n), least_squares)
    404     Y = LinearOperator((n, m), row_space)

~\AppData\Local\Continuum\anaconda3\envs\thesis\lib\site-packages\scipy\sparse\linalg\interface.py in __init__(self, shape, matvec, rmatvec, matmat, dtype, rmatmat)
    516         self.__matmat_impl = matmat
    517 
--> 518         self._init_dtype()
    519 
    520     def _matmat(self, X):

~\AppData\Local\Continuum\anaconda3\envs\thesis\lib\site-packages\scipy\sparse\linalg\interface.py in _init_dtype(self)
    173         if self.dtype is None:
    174             v = np.zeros(self.shape[-1])
--> 175             self.dtype = np.asarray(self.matvec(v)).dtype
    176 
    177     def _matmat(self, X):

~\AppData\Local\Continuum\anaconda3\envs\thesis\lib\site-packages\scipy\sparse\linalg\interface.py in matvec(self, x)
    227             raise ValueError('dimension mismatch')
    228 
--> 229         y = self._matvec(x)
    230 
    231         if isinstance(x, np.matrix):

~\AppData\Local\Continuum\anaconda3\envs\thesis\lib\site-packages\scipy\sparse\linalg\interface.py in _matvec(self, x)
    525 
    526     def _matvec(self, x):
--> 527         return self.__matvec_impl(x)
    528 
    529     def _rmatvec(self, x):

~\AppData\Local\Continuum\anaconda3\envs\thesis\lib\site-packages\scipy\optimize\_trustregion_constr\projections.py in null_space(x)
    191         # v = P inv(R) Q.T x
    192         aux1 = Q.T.dot(x)
--> 193         aux2 = scipy.linalg.solve_triangular(R, aux1, lower=False)
    194         v = np.zeros(m)
    195         v[P] = aux2

~\AppData\Local\Continuum\anaconda3\envs\thesis\lib\site-packages\scipy\linalg\basic.py in solve_triangular(a, b, trans, lower, unit_diagonal, overwrite_b, debug, check_finite)
    336     b1 = _asarray_validated(b, check_finite=check_finite)
    337     if len(a1.shape) != 2 or a1.shape[0] != a1.shape[1]:
--> 338         raise ValueError('expected square matrix')
    339     if a1.shape[0] != b1.shape[0]:
    340         raise ValueError('incompatible dimensions')

ValueError: expected square matrix

标签: pythonconstraintsleast-squaresminimizefactorization

解决方案


我不知道确切的问题是什么,但我想建议进行重大更改。您适合x[0]and x[1],即 2 个参数。既然你在强迫x[0]+x[1]=1.,那么问题是单参数。您应该只寻找x[0]并使用x[1]=x[0]. 这大大简化了您的任务。

编辑:

我已经更改:LinearConstraint([[1, 1]], [1], [1])=>LinearConstraint([[1, 1]], [0.99], [1.01])并且错误消失了。


推荐阅读