CONOPT
Loading...
Searching...
No Matches
tutorial2.py
Go to the documentation of this file.
1
5
6
7import os
8import sys
9
10import conoptpy
11import adolc
12
13sys.path.append('../../common/')
14import std
15
16
17class TutModelData(conoptpy.ModelData):
18 def __init__(self):
19 self.Al = 0.16
20 self.Ak = 2.0
21 self.Ainp = 0.16
22 self.Rho = 1.0
23 self.K = 4.0
24 super().__init__()
25
26 def buildModel(self):
27 """
28 adding the variables and constraints to the model
29 @ingroup PYTHON1THREAD_AD_TUTORIAL2
30 """
31 # adding the variables to the model
32 self.addVariable(0.1, conoptpy.CONOPT_INF, 0.5)
33 self.addVariable(0.1, conoptpy.CONOPT_INF, 0.5)
34 self.addVariable(0.0, conoptpy.CONOPT_INF)
35 self.addVariable(0.0, conoptpy.CONOPT_INF)
36
37 # adding the constraints to the model
38 self.addConstraint(conoptpy.ConstraintType_Free, -0.1, [0, 1, 2, 3],
39 [-1, -1, 0, 0], [0, 0, 1, 1])
40 self.addConstraint(conoptpy.ConstraintType_Eq, 0.0, [0, 1, 2], [0, 0, -1],
41 [1, 1, 0])
42 self.addConstraint(conoptpy.ConstraintType_Eq, 4.0, [2, 3], [1, 2],
43 [0, 0])
44
45 # setting the objective constraint
46 self.setObjectiveElement(conoptpy.ObjectiveElement_Constraint, 0)
47
48 # setting the optimisation direction
49 self.setOptimizationSense(conoptpy.Sense_Maximize)
50
51 # setting the structure of the second derivative of the Lagrangian
52 self.setSDLagrangianStructure([0, 1, 1, 3], [0, 0, 1, 2])
53
54 # initialising the automatic differentiation
56
57
58 def tapeFunction(self, x, rowno):
59 """
60 @ingroup PYTHON1THREAD_AD_TUTORIAL2
61 evaluates the nonlinear function and records a tape is necessary
62
63 @param x current point to be evaluated
64 @param rowno the index of the constraint. This is also used for the trace tag.
65 """
66 adolc.trace_on(rowno)
67 ax = adolc.as_adouble(x)
68
69 # marking the x variables as independent
70 for item in iter(ax):
71 item.declareIndependent()
72
73 L = ax[0]
74 Inp = ax[1]
75 Out = ax[2]
76 P = ax[3]
77
78 ay = adolc.as_adouble(0)
79 if rowno == 0:
80 ay = P * Out;
81 elif rowno == 1:
82 hold1 = (self.Al*pow(L,(-self.Rho)) + self.Ak*pow(self.K,(-self.Rho)) + self.Ainp*pow(Inp,(-self.Rho)))
83 hold2 = pow(hold1,( -1./self.Rho ))
84
85 ay = hold2
86
87 ay.declareDependent()
88 adolc.trace_off()
89
90
92 """
93 @ingroup PYTHON1THREAD_AD_TUTORIAL2
94 initialises the automatic differentiation
95 """
96 x = []
97 for v in range(self.numVar()):
98 x.append(self.getVariable(v).curr)
99
100 for c in range(self.numCons()):
101 self.tapeFunction(x, c)
102
103
104 def evaluateNonlinearTerm(self, x, rowno, ignerr, thread):
105 """
106 @ingroup PYTHON1THREAD_AD_TUTORIAL2
107 """
108 try:
109 g = adolc.function(rowno, x)[0]
110 except adolc.BranchException:
111 self.tapeFunction(x, rowno)
112 g = adolc.function(rowno, x)[0]
113
114 return g
115
116
117 def evaluateNonlinearJacobian(self, x, rowno, jacnum, ignerr, thread):
118 """
119 @ingroup PYTHON1THREAD_AD_TUTORIAL2
120 """
121 jac = []
122 try:
123 jacres = adolc.gradient(rowno, x)
124 except adolc.BranchException:
125 self.tapeFunction(x, rowno)
126 jacres = adolc.gradient(rowno, x)
127
128 for i in jacnum:
129 jac.append(jacres[i])
130
131 return jac
132
133
134 def evaluateSDLagrangian(self, x, u, hessianrow, hessiancol):
135 """
136 @ingroup PYTHON1THREAD_AD_TUTORIAL2
137 """
138 numhessian = len(hessianrow)
139 hessian = [0 for i in range(numhessian)]
140
141 for c in range(self.numCons()):
142 try:
143 hessres = adolc.hessian(c, x)
144 except adolc.BranchException:
145 self.tapeFunction(x, rowno)
146 hessres = adolc.hessian(c, x)
147
148 for i in range(numhessian):
149 hessian[i] += u[c]*hessres[hessianrow[i]][hessiancol[i]]
150
151 print(hessian)
152 return hessian
153
154if __name__ == "__main__":
155 name = os.path.basename(__file__)[:-3]
156
157 conopt = conoptpy.Conopt(name)
158 model = TutModelData()
159 msghdlr = std.TutMessageHandler(name)
160
161 model.buildModel()
162
163 conopt.loadModel(model)
164 conopt.setMessageHandler(msghdlr)
165
166 # getting the license variables
167 license_int_1 = os.environ.get('CONOPT_LICENSE_INT_1', None)
168 license_int_2 = os.environ.get('CONOPT_LICENSE_INT_2', None)
169 license_int_3 = os.environ.get('CONOPT_LICENSE_INT_3', None)
170 license_text = os.environ.get('CONOPT_LICENSE_TEXT', None)
171 if license_int_1 is not None and license_int_2 is not None \
172 and license_int_3 is not None and license_text is not None:
173 conopt.setLicense(int(license_int_1), int(license_int_2),
174 int(license_int_3), license_text)
175
176 coi_error = conopt.solve()
177
178 retcode = std.checkSolve(conopt, 0.572943, coi_error)
179
180 sys.exit(retcode)
static int checkSolve(String name, int model_status, int solution_status, double objective, double expected_objective, double tol)
Definition std.java:16
initialiseAutoDiff(self)
initialises the automatic differentiation
Definition tutorial2.py:91
tapeFunction(self, x, rowno)
evaluates the nonlinear function and records a tape is necessary
Definition tutorial2.py:58
evaluateNonlinearTerm(self, x, rowno, ignerr, thread)
Definition tutorial2.py:57
buildModel(self)
adding the variables and constraints to the model
Definition tutorial2.py:28
evaluateNonlinearJacobian(self, x, rowno, jacnum, ignerr, thread)
Definition tutorial2.py:77
evaluateSDLagrangian(self, x, u, hessianrow, hessiancol)
Definition tutorial2.py:101