File: tao.py

package info (click to toggle)
petsc4py 3.24.3-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 3,612 kB
  • sloc: python: 13,569; ansic: 1,768; makefile: 345; f90: 313; sh: 14
file content (42 lines) | stat: -rw-r--r-- 1,404 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
# The user-defined Python class implementing the gradient descent.
from petsc4py import PETSc

class myGradientDescent:
    def create(self, tao):
        # Create a line search type with constant step size.
        self._ls = PETSc.TAOLineSearch().create(comm=PETSc.COMM_SELF)
        self._ls.useTAORoutine(tao)
        self._ls.setType(PETSc.TAOLineSearch.Type.UNIT)
        self._ls.setInitialStepLength(0.2)

    def solve(self, tao):
        # Get solution and Jacobian vector.
        x = tao.getSolution()
        gradient = tao.getGradient()[0]

        # Prepare search direction for line search.
        search_direction = gradient.copy()

        # Optimization loop.
        for it in range(tao.getMaximumIterations()):
            tao.setIterationNumber(it)

            # Compute search_direction.
            #   search_direction = -gradient
            tao.computeGradient(x, gradient)
            gradient.copy(search_direction)
            search_direction.scale(-1)

            # Apply line search:
            #   x += .2 search_direction
            f, s, reason = self._ls.apply(x, gradient, search_direction)

            if reason < 0:
                raise RuntimeError('LS failed.')

            # Log and update internal state.
            tao.monitor(f=f, res=gradient.norm())

            # Convergence check.
            if tao.checkConverged() > 0:
                break