Commit 5387de05 authored by Nicolas Mansard's avatar Nicolas Mansard Committed by Nicolas Mansard
Browse files

[APP] added scipy_optim.

parent d96e350e
'''
Example of use a the optimization toolbox of SciPy.
The function optimized here are meaningless, and just given
as example. They ***are not*** related to the robotic models.
'''
# %jupyter_snippet import
import numpy as np
from scipy.optimize import fmin_bfgs, fmin_slsqp
# %end_jupyter_snippet
# %jupyter_snippet cost
def cost(x):
'''Cost f(x,y) = x^2 + 2y^2 - 2xy - 2x '''
x0 = x[0]
x1 = x[1]
return -1 * (2 * x0 * x1 + 2 * x0 - x0**2 - 2 * x1**2)
# %end_jupyter_snippet
# %jupyter_snippet constraints
def constraint_eq(x):
''' Constraint x^3 = y '''
return np.array([x[0]**3 - x[1]])
def constraint_ineq(x):
'''Constraint x>=2, y>=2'''
return np.array([x[0] - 2, x[1] - 2])
# %end_jupyter_snippet
# %jupyter_snippet callback
class CallbackLogger:
def __init__(self):
self.nfeval = 1
def __call__(self, x):
print('===CBK=== {0:4d} {1: 3.6f} {2: 3.6f}'.format(self.nfeval, x[0], x[1], cost(x)))
self.nfeval += 1
# %end_jupyter_snippet
# %jupyter_snippet bfgs
x0 = np.array([0.0, 0.0])
# Optimize cost without any constraints in BFGS, with traces.
xopt_bfgs = fmin_bfgs(cost, x0, callback=CallbackLogger())
print('\n *** Xopt in BFGS = %s \n\n\n\n' % str(xopt_bfgs))
# %end_jupyter_snippet
# %jupyter_snippet without
# Optimize cost without any constraints in CLSQ
xopt_lsq = fmin_slsqp(cost, [-1.0, 1.0], iprint=2, full_output=1)
print('\n *** Xopt in LSQ = %s \n\n\n\n' % str(xopt_lsq))
# %end_jupyter_snippet
# %jupyter_snippet with
# Optimize cost with equality and inequality constraints in CLSQ
xopt_clsq = fmin_slsqp(cost, [-1.0, 1.0], f_eqcons=constraint_eq, f_ieqcons=constraint_ineq, iprint=2, full_output=1)
print('\n *** Xopt in c-lsq = %s \n\n\n\n' % str(xopt_clsq))
# %end_jupyter_snippet
%% Cell type:markdown id: tags:
# Optimizers in SciPy
This notebook is a very brief introduction to SciPy optimizers, documenting the example appendix/scipy_optim.py.
%% Cell type:markdown id: tags:
There are several optimizers in SciPy, in the module scipy.optimize. You can simply install them with +pip install scipy.
You may find the user manual of this module in https://docs.scipy.org/doc/scipy/tutorial/optimize.html#tutorial-sqlsp.
In this serie of notebooks, we mostly use BFGS, a quasi-Newton constraint-free algorithm, and SLSQP, a sequential QP solver accepting both equality and inequality constraints.
We will then need the two +fmin functions from the scipy.optimize module, as well as +numpy to represent algebraic vectors.
%% Cell type:code id: tags:
``` python
# %load appendix/generated/scipy_optim_import
import numpy as np
from scipy.optimize import fmin_bfgs, fmin_slsqp
```
%% Cell type:markdown id: tags:
They are generally following a similar API, taking as main argument the cost function to optimize +f, the initial guess +x0, and optiminally a callback function +callback and some constraints.
The cost objective should be defined as a function mapping the parameter space $x$ to a real value $f(x)$. Here is a simple polynomial example for $x \in R^2$:
%% Cell type:code id: tags:
``` python
# %load appendix/generated/scipy_optim_cost
def cost(x):
'''Cost f(x,y) = x^2 + 2y^2 - 2xy - 2x '''
x0 = x[0]
x1 = x[1]
return -1 * (2 * x0 * x1 + 2 * x0 - x0**2 - 2 * x1**2)
```
%% Cell type:markdown id: tags:
The callback takes the same signature but returns nothing: it only works by side effect, for example printing something, or displaying some informations in a viewer or on a plot, or possibly storing data in a logger. Here is for example a callback written as the functor of an object, that can be used to adjust its behavior or store some data.
%% Cell type:code id: tags:
``` python
# %load appendix/generated/scipy_optim_callback
class CallbackLogger:
def __init__(self):
self.nfeval = 1
def __call__(self, x):
print('===CBK=== {0:4d} {1: 3.6f} {2: 3.6f}'.format(self.nfeval, x[0], x[1], cost(x)))
self.nfeval += 1
```
%% Cell type:markdown id: tags:
For BFGS, that's all we need, as it does not accept any additional constraints.
%% Cell type:code id: tags:
``` python
# %load appendix/generated/scipy_optim_bfgs
x0 = np.array([0.0, 0.0])
# Optimize cost without any constraints in BFGS, with traces.
xopt_bfgs = fmin_bfgs(cost, x0, callback=CallbackLogger())
print('\n *** Xopt in BFGS = %s \n\n\n\n' % str(xopt_bfgs))
```
%% Output
===CBK=== 1 1.010000 -0.000000
===CBK=== 2 2.014799 1.009848
===CBK=== 3 2.000000 1.000000
Optimization terminated successfully.
Current function value: -2.000000
Iterations: 3
Function evaluations: 12
Gradient evaluations: 4
*** Xopt in BFGS = [1.99999977 0.99999994]
%% Cell type:markdown id: tags:
In that case, the gradients of the cost are computed by BFGS using finite differencing (i.e. not very accurately, but the algorithmic cost is typically very bad). If you can provide some derivatives by yourself, it would greatly improve the result. Yet, as a first draft, it is generally not too bad.
%% Cell type:markdown id: tags:
For SLSQP, you can simply do the same.
%% Cell type:code id: tags:
``` python
# %load appendix/generated/scipy_optim_without
# Optimize cost without any constraints in CLSQ
xopt_lsq = fmin_slsqp(cost, [-1.0, 1.0], iprint=2, full_output=1)
print('\n *** Xopt in LSQ = %s \n\n\n\n' % str(xopt_lsq))
```
%% Output
NIT FC OBJFUN GNORM
1 4 1.150000E+02 8.485281E+00
2 8 -1.928000E+00 1.697056E+00
3 11 -2.000000E+00 3.394113E-01
4 13 -2.000000E+00 2.980232E-08
Optimization terminated successfully (Exit mode 0)
Current function value: -2.0
Iterations: 4
Function evaluations: 13
Gradient evaluations: 4
*** Xopt in LSQ = (array([1.99999999, 1. ]), -2.0, 4, 0, 'Optimization terminated successfully')
%% Cell type:markdown id: tags:
Now, SLSQP can also handle explicit constraints. Equality and inequality constraints must be given separately as function from the parameter $x$ to a vector stacking all the numerical quantities, that must be null for equalities, and positive for inequalities.
We introduce here, as an example, two set of polynomial
%% Cell type:code id: tags:
``` python
```
......@@ -5,18 +5,20 @@ from pathlib import Path
import json
hashtags = ['jupyter_snippet']
#hashtags = [ 'do_load', 'do_not_load', 'load' ]
def generate(tp_number: int):
tp = Path() / f'tp{tp_number}'
ipynb = next(Path().glob(f'{tp_number}_*.ipynb'))
print(f'processing {ipynb} & {tp}')
def generate_from_id(tp_id : int):
folder = Path() / f'tp{tp_id}'
ipynb = next(Path().glob(f'{tp_id}_*.ipynb'))
generate(ipynb,folder)
def generate(ipynb, folder):
print(f'processing {ipynb} with scripts in {folder}')
with ipynb.open() as f:
data = json.load(f)
cells_copy = data['cells'].copy()
generated = tp / 'generated'
generated = folder / 'generated'
generated.mkdir(exist_ok=True)
for filename in tp.glob('*.py'):
for filename in folder.glob('*.py'):
print(f' processing {filename}')
content = []
hidden = False
......@@ -34,6 +36,7 @@ def generate(tp_number: int):
with dest.open('w') as f_out:
f_out.write(''.join(content))
for cell_number, cell in enumerate(cells_copy):
if len(cell['source'])==0: continue
if cell['source'][0].endswith(f'%load {dest}'):
data['cells'][cell_number]['source'] = [f'# %load {dest}\n'] + content
#if f'%do_not_load {dest}' in cell['source'][0]:
......@@ -48,5 +51,8 @@ def generate(tp_number: int):
if __name__ == '__main__':
for tp_number in [0,2]:
generate(tp_number)
for tp_number in [0,1,2]:
generate_from_id(tp_number)
for app in [ 'appendix_scipy_optimizers']:
generate(next(Path().glob(app+'.ipynb')),Path()/'appendix')
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment