CONOPT
Loading...
Searching...
No Matches
tutorial2str.py
Go to the documentation of this file.
6
7
8
9import os
10import sys
11
12import conoptpy
13import adolc
14
15import numpy as np
16
17sys.path.append('../../common/')
18import std
19
20
21class TutModelData(conoptpy.ModelData):
22 def __init__(self):
23 self.Al = 0.16
24 self.Ak = 2.0
25 self.Ainp = 0.16
26 self.Rho = 1.0
27 self.K = 4.0
28 super().__init__()
29
30 def buildModel(self):
31 """
32 adding the variables and constraints to the model
33 @ingroup PYTHON1THREAD_AD_TUTORIAL2STR
34 """
35 # adding the variables to the model
36 self.addVariable(0.1, conoptpy.CONOPT_INF, 0.5)
37 self.addVariable(0.1, conoptpy.CONOPT_INF, 0.5)
38 self.addVariable(0.0, conoptpy.CONOPT_INF)
39 self.addVariable(0.0, conoptpy.CONOPT_INF)
40
41 # adding the constraints to the model
42 self.addConstraint(conoptpy.ConstraintType_Free, -0.1, [0, 1, 2, 3],
43 [-1, -1, 0, 0], [0, 0, 1, 1])
44 self.addConstraint(conoptpy.ConstraintType_Eq, 0.0, [0, 1, 2], [0, 0, -1],
45 [1, 1, 0])
46 self.addConstraint(conoptpy.ConstraintType_Eq, 4.0, [2, 3], [1, 2],
47 [0, 0])
48
49 # setting the objective constraint
50 self.setObjectiveElement(conoptpy.ObjectiveElement_Constraint, 0)
51
52 # setting the optimisation direction
53 self.setOptimizationSense(conoptpy.Sense_Maximize)
54
55 # initialising the automatic differentiation
57
58 # computes and sets the Hessian structure
60
61
62 def tapeFunction(self, x, rowno):
63 """
64 @ingroup PYTHON1THREAD_AD_TUTORIAL2STR
65 evaluates the nonlinear function and records a tape is necessary
66
67 @param x current point to be evaluated
68 @param rowno the index of the constraint. This is also used for the trace tag.
69 """
70 adolc.trace_on(rowno)
71 ax = adolc.as_adouble(x)
72
73 # marking the x variables as independent
74 for item in iter(ax):
75 item.declareIndependent()
76
77 L = ax[0]
78 Inp = ax[1]
79 Out = ax[2]
80 P = ax[3]
81
82 ay = adolc.as_adouble(0)
83 if rowno == 0:
84 ay = P * Out
85 elif rowno == 1:
86 hold1 = (self.Al*pow(L,(-self.Rho)) + self.Ak*pow(self.K,(-self.Rho)) + self.Ainp*pow(Inp,(-self.Rho)))
87 hold2 = pow(hold1,( -1./self.Rho ))
88
89 ay = hold2
90
91 ay.declareDependent()
92 adolc.trace_off()
93
94
96 """
97 @ingroup PYTHON1THREAD_AD_TUTORIAL2STR
98 initialises the automatic differentiation
99 """
100 x = []
101 for v in range(self.numVar()):
102 x.append(self.getVariable(v).curr)
103
104 for c in range(self.numCons()):
105 self.tapeFunction(x, c)
106
107
109 """
110 @ingroup PYTHON1THREAD_AD_TUTORIAL2STR
111 uses the automatic differentiation methods to compute the Hessian
112 structure
113 """
114 x = []
115 for v in range(self.numVar()):
116 x.append(self.getVariable(v).curr)
117
118 hessstr = {}
119 for c in range(self.numCons()):
120 hesspat = adolc.hess_pat(c, x, 0)
121
122 index = 0
123 for i, n in enumerate(hesspat[0]):
124 if n > 0:
125 for j in range(n):
126 if i >= hesspat[1][index + j]:
127 if i not in hessstr.keys():
128 hessstr[i] = []
129 hessstr[i].append(hesspat[1][index + j])
130
131 index += n
132
133 rowindex = [int(r) for r in range(self.numVar()) for _ in hessstr.get(r, [])]
134 colindex = [int(c) for r in range(self.numVar()) for c in hessstr.get(r, [])]
135
136 # setting the structure of the second derivative of the Lagrangian
137 self.setSDLagrangianStructure(rowindex, colindex)
138
139
140 def evaluateNonlinearTerm(self, x, rowno, ignerr, thread):
141 """
142 @ingroup PYTHON1THREAD_AD_TUTORIAL2STR
143 """
144 try:
145 g = adolc.function(rowno, x)[0]
146 except adolc.BranchException:
147 self.tapeFunction(x, rowno)
148 g = adolc.function(rowno, x)[0]
149
150 return g
151
152
153 def evaluateNonlinearJacobian(self, x, rowno, jacnum, ignerr, thread):
154 """
155 @ingroup PYTHON1THREAD_AD_TUTORIAL2STR
156 """
157 jac = []
158 try:
159 jacres = adolc.gradient(rowno, x)
160 except adolc.BranchException:
161 self.tapeFunction(x, rowno)
162 jacres = adolc.gradient(rowno, x)
163
164 for i in jacnum:
165 jac.append(jacres[i])
166
167 return jac
168
169
170 def evaluateSDLagrangian(self, x, u, hessianrow, hessiancol):
171 """
172 @ingroup PYTHON1THREAD_AD_TUTORIAL2STR
173 """
174 numhessian = len(hessianrow)
175 hessian = [0 for i in range(numhessian)]
176
177 for c in range(self.numCons()):
178 try:
179 hessres = adolc.hessian(c, x)
180 except adolc.BranchException:
181 self.tapeFunction(x, rowno)
182 hessres = adolc.hessian(c, x)
183
184 for i in range(numhessian):
185 hessian[i] += u[c]*hessres[hessianrow[i]][hessiancol[i]]
186
187 return hessian
188
189if __name__ == "__main__":
190 name = os.path.basename(__file__)[:-3]
191
192 conopt = conoptpy.Conopt(name)
193 model = TutModelData()
194 msghdlr = std.TutMessageHandler(name)
195
196 model.buildModel()
197
198 conopt.loadModel(model)
199 conopt.setMessageHandler(msghdlr)
200
201 # getting the license variables
202 license_int_1 = os.environ.get('CONOPT_LICENSE_INT_1', None)
203 license_int_2 = os.environ.get('CONOPT_LICENSE_INT_2', None)
204 license_int_3 = os.environ.get('CONOPT_LICENSE_INT_3', None)
205 license_text = os.environ.get('CONOPT_LICENSE_TEXT', None)
206 if license_int_1 is not None and license_int_2 is not None \
207 and license_int_3 is not None and license_text is not None:
208 conopt.setLicense(int(license_int_1), int(license_int_2),
209 int(license_int_3), license_text)
210
211 coi_error = conopt.solve()
212
213 retcode = std.checkSolve(conopt, 0.572943, coi_error)
214
215 sys.exit(retcode)
static int checkSolve(String name, int model_status, int solution_status, double objective, double expected_objective, double tol)
Definition std.java:16
buildModel(self)
adding the variables and constraints to the model
evaluateNonlinearTerm(self, x, rowno, ignerr, thread)
initialiseAutoDiff(self)
initialises the automatic differentiation
tapeFunction(self, x, rowno)
evaluates the nonlinear function and records a tape is necessary
evaluateSDLagrangian(self, x, u, hessianrow, hessiancol)
evaluateNonlinearJacobian(self, x, rowno, jacnum, ignerr, thread)
computeHessianStructure(self)
uses the automatic differentiation methods to compute the Hessian structure