Skip to content

Commit

Permalink
single precision CH exp
Browse files Browse the repository at this point in the history
  • Loading branch information
lehner committed Sep 20, 2024
1 parent 5115eed commit 39f54a6
Showing 1 changed file with 12 additions and 4 deletions.
16 changes: 12 additions & 4 deletions lib/gpt/core/foundation/lattice/matrix/exp.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import numpy as np


c = None
c = {}


def cayley_hamilton_function_and_gradient_3(iQ, gradient_prime, c):
Expand Down Expand Up @@ -197,6 +197,14 @@ def function(i, cache=default_exp_cache):

def function_and_gradient(x, dx):
global c
if c is None:
c = g.compiler()
return cayley_hamilton_function_and_gradient(x, dx, c)

if x.grid.precision != g.double:
x_dp = g.convert(x, g.double)
dx_dp = g.convert(dx, g.double)
A, B = function_and_gradient(x_dp, dx_dp)
return g.convert(A, x.grid.precision), g.convert(B, x.grid.precision)

key = f"{x.otype.__name__};{x.grid}"
if key not in c:
c[key] = g.compiler()
return cayley_hamilton_function_and_gradient(x, dx, c[key])

0 comments on commit 39f54a6

Please sign in to comment.