1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17 """Uses the Python Interior Point Solver (PIPS) to solve QP (quadratic
18 programming) problems.
19 """
20
21 from numpy import Inf, ones, zeros, dot
22
23 from scipy.sparse import csr_matrix as sparse
24
25 from pips import pips
26
27
28 -def qps_pips(H, c, A, l, u, xmin=None, xmax=None, x0=None, opt=None):
29 """Uses the Python Interior Point Solver (PIPS) to solve the following
30 QP (quadratic programming) problem::
31
32 min 1/2 x'*H*x + C'*x
33 x
34
35 subject to::
36
37 l <= A*x <= u (linear constraints)
38 xmin <= x <= xmax (variable bounds)
39
40 Note the calling syntax is almost identical to that of QUADPROG from
41 MathWorks' Optimization Toolbox. The main difference is that the linear
42 constraints are specified with C{A}, C{L}, C{U} instead of C{A}, C{B},
43 C{Aeq}, C{Beq}.
44
45 Example from U{http://www.uc.edu/sashtml/iml/chap8/sect12.htm}:
46
47 >>> from numpy import array, zeros, Inf
48 >>> from scipy.sparse import csr_matrix
49 >>> H = csr_matrix(array([[1003.1, 4.3, 6.3, 5.9],
50 ... [4.3, 2.2, 2.1, 3.9],
51 ... [6.3, 2.1, 3.5, 4.8],
52 ... [5.9, 3.9, 4.8, 10 ]]))
53 >>> c = zeros(4)
54 >>> A = csr_matrix(array([[1, 1, 1, 1 ],
55 ... [0.17, 0.11, 0.10, 0.18]]))
56 >>> l = array([1, 0.10])
57 >>> u = array([1, Inf])
58 >>> xmin = zeros(4)
59 >>> xmax = None
60 >>> x0 = array([1, 0, 0, 1])
61 >>> solution = qps_pips(H, c, A, l, u, xmin, xmax, x0)
62 >>> round(solution["f"], 11) == 1.09666678128
63 True
64 >>> solution["converged"]
65 True
66 >>> solution["output"]["iterations"]
67 10
68
69 All parameters are optional except C{H}, C{c}, C{A} and C{l} or C{u}.
70 @param H: Quadratic cost coefficients.
71 @type H: csr_matrix
72 @param c: vector of linear cost coefficients
73 @type c: array
74 @param A: Optional linear constraints.
75 @type A: csr_matrix
76 @param l: Optional linear constraints. Default values are M{-Inf}.
77 @type l: array
78 @param u: Optional linear constraints. Default values are M{Inf}.
79 @type u: array
80 @param xmin: Optional lower bounds on the M{x} variables, defaults are
81 M{-Inf}.
82 @type xmin: array
83 @param xmax: Optional upper bounds on the M{x} variables, defaults are
84 M{Inf}.
85 @type xmax: array
86 @param x0: Starting value of optimization vector M{x}.
87 @type x0: array
88 @param opt: optional options dictionary with the following keys, all of
89 which are also optional (default values shown in parentheses)
90 - C{verbose} (False) - Controls level of progress output
91 displayed
92 - C{feastol} (1e-6) - termination tolerance for feasibility
93 condition
94 - C{gradtol} (1e-6) - termination tolerance for gradient
95 condition
96 - C{comptol} (1e-6) - termination tolerance for
97 complementarity condition
98 - C{costtol} (1e-6) - termination tolerance for cost
99 condition
100 - C{max_it} (150) - maximum number of iterations
101 - C{step_control} (False) - set to True to enable step-size
102 control
103 - C{max_red} (20) - maximum number of step-size reductions if
104 step-control is on
105 - C{cost_mult} (1.0) - cost multiplier used to scale the
106 objective function for improved conditioning. Note: The
107 same value must also be passed to the Hessian evaluation
108 function so that it can appropriately scale the objective
109 function term in the Hessian of the Lagrangian.
110 @type opt: dict
111
112 @rtype: dict
113 @return: The solution dictionary has the following keys:
114 - C{x} - solution vector
115 - C{f} - final objective function value
116 - C{converged} - exit status
117 - True = first order optimality conditions satisfied
118 - False = maximum number of iterations reached
119 - None = numerically failed
120 - C{output} - output dictionary with keys:
121 - C{iterations} - number of iterations performed
122 - C{hist} - dictionary of arrays with trajectories of the
123 following: feascond, gradcond, coppcond, costcond, gamma,
124 stepsize, obj, alphap, alphad
125 - C{message} - exit message
126 - C{lmbda} - dictionary containing the Langrange and Kuhn-Tucker
127 multipliers on the constraints, with keys:
128 - C{eqnonlin} - nonlinear equality constraints
129 - C{ineqnonlin} - nonlinear inequality constraints
130 - C{mu_l} - lower (left-hand) limit on linear constraints
131 - C{mu_u} - upper (right-hand) limit on linear constraints
132 - C{lower} - lower bound on optimization variables
133 - C{upper} - upper bound on optimization variables
134
135 @see: L{pips}
136
137 @author: Ray Zimmerman (PSERC Cornell)
138 @author: Richard Lincoln
139 """
140 if isinstance(H, dict):
141 p = H
142 else:
143 p = {'H': H, 'c': c, 'A': A, 'l': l, 'u': u}
144 if xmin is not None: p['xmin'] = xmin
145 if xmax is not None: p['xmax'] = xmax
146 if x0 is not None: p['x0'] = x0
147 if opt is not None: p['opt'] = opt
148
149 if 'H' not in p or p['H'] == None:
150 if p['A'] is None or p['A'].nnz == 0 and \
151 'xmin' not in p and \
152 'xmax' not in p:
153
154
155 print 'qps_pips: LP problem must include constraints or variable bounds'
156 return
157 else:
158 if p['A'] is not None and p['A'].nnz >= 0:
159 nx = p['A'].shape[1]
160 elif 'xmin' in p and len(p['xmin']) > 0:
161 nx = p['xmin'].shape[0]
162 elif 'xmax' in p and len(p['xmax']) > 0:
163 nx = p['xmax'].shape[0]
164 p['H'] = sparse((nx, nx))
165 else:
166 nx = p['H'].shape[0]
167
168 p['xmin'] = -Inf * ones(nx) if 'xmin' not in p else p['xmin']
169 p['xmax'] = Inf * ones(nx) if 'xmax' not in p else p['xmax']
170
171 p['c'] = zeros(nx) if p['c'] is None else p['c']
172
173 p['x0'] = zeros(nx) if 'x0' not in p else p['x0']
174
175 def qp_f(x, return_hessian=False):
176 f = 0.5 * dot(x * p['H'], x) + dot(p['c'], x)
177 df = p['H'] * x + p['c']
178 if not return_hessian:
179 return f, df
180 d2f = p['H']
181 return f, df, d2f
182
183 p['f_fcn'] = qp_f
184
185 sol = pips(p)
186
187 return sol["x"], sol["f"], sol["eflag"], sol["output"], sol["lmbda"]
188
189
190 if __name__ == "__main__":
191 import doctest
192 doctest.testmod()
193