Skip to content

Commit

Permalink
switch to automatic differentiation framework for dQ and dE
Browse files Browse the repository at this point in the history
  • Loading branch information
lehner committed Jul 8, 2024
1 parent 3a5d777 commit 98e6f3c
Show file tree
Hide file tree
Showing 8 changed files with 67 additions and 581 deletions.
22 changes: 13 additions & 9 deletions lib/gpt/ad/reverse/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,27 +69,31 @@ def gradient(self, fields, dfields):
for i in indices:
self.arguments[i].gradient = None
self.arguments[i].with_gradient = True
for i in range(len(fields)):
assert fields[i] not in [a.value for a in self.arguments]
self.arguments[i].value @= fields[i]
self.node()
return [self.arguments[i].gradient for i in indices]


def str_traverse(node, indent=0):
if not callable(node._forward):
return (" "*indent) + "leaf(" + str(node._container) + ")"
return (" " * indent) + "leaf(" + str(node._container) + ")"
else:
pre = " "*indent
pre = " " * indent
if node._tag is not None:
tag = node._tag
else:
tag = str(node._forward)
ret = pre + "(" + tag + "):"
for x in node._children:
ret = ret + "\n" + str_traverse(x, indent+1)
ret = ret + "\n" + str_traverse(x, indent + 1)
return ret


# gctr = 0


class node_base(base):
foundation = foundation

Expand All @@ -102,7 +106,7 @@ def __init__(
with_gradient=True,
infinitesimal_to_cartesian=True,
_container=None,
_tag=None
_tag=None,
):
# global gctr
# gctr+=1
Expand Down Expand Up @@ -189,8 +193,8 @@ def setter(y, z):
return x.project(getter, setter)

def project(x, getter, setter):
assert False # for future use
assert False # for future use

def _forward():
return getter(x.value)

Expand Down Expand Up @@ -282,9 +286,9 @@ def backward(self, nodes, first_gradient, initial_gradient):
raise Exception(
"Expression evaluates to a field. Gradient calculation is not unique."
)
#if isinstance(self._container[0], complex) and abs(self.value.imag) > 1e-12 * abs(
# if isinstance(self._container[0], complex) and abs(self.value.imag) > 1e-12 * abs(
# self.value.real
#):
# ):
# raise Exception(
# f"Expression does not evaluate to a real number ({self.value}). Gradient calculation is not unique."
# )
Expand Down
5 changes: 5 additions & 0 deletions lib/gpt/core/foundation/tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,3 +62,8 @@ def component_multiply(a, b):
res = a.new()
res.array = numpy.multiply(a.array, b.array)
return res


def copy(a, b):
for i in range(len(a)):
a[i].array[:] = b[i].array[:]
9 changes: 6 additions & 3 deletions lib/gpt/qcd/gauge/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,12 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from gpt.qcd.gauge.create import random, unit
from gpt.qcd.gauge.loops import rectangle, field_strength
from gpt.qcd.gauge.loops import (
rectangle,
field_strength,
differentiable_topology,
differentiable_energy_density,
)
from gpt.qcd.gauge.topology import topological_charge_5LI
from gpt.qcd.gauge.staples import staple
from gpt.qcd.gauge.transformation import transformed
Expand All @@ -26,8 +31,6 @@
staple_sum,
energy_density,
topological_charge,
differentiable_topology,
projected_topology_gradient,
algebra_laplace,
)
import gpt.qcd.gauge.project
Expand Down
30 changes: 30 additions & 0 deletions lib/gpt/qcd/gauge/loops.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,3 +170,33 @@ def field_strength(U, mu, nu):
F = g.eval(U[mu] * v + g.cshift(v * U[mu], mu, -1))
F = 0.125 * (F - g.adj(F))
return F


def differentiable_topology(aU):
Bx = field_strength(aU, 1, 2)
By = field_strength(aU, 2, 0)
Bz = field_strength(aU, 0, 1)

Ex = field_strength(aU, 3, 0)
Ey = field_strength(aU, 3, 1)
Ez = field_strength(aU, 3, 2)

coeff = 8.0 / (32.0 * np.pi**2)

Q = g.sum(g.trace(Bx * Ex) + g.trace(By * Ey) + g.trace(Bz * Ez)) * coeff

return Q


def differentiable_energy_density(aU):
Nd = len(aU)
grid = aU[0].grid
res = None
for mu in range(Nd):
for nu in range(mu):
Fmunu = field_strength(aU, mu, nu)
if res is None:
res = Fmunu * Fmunu
else:
res += Fmunu * Fmunu
return (-1.0 / grid.gsites) * g.sum(g.trace(res))
2 changes: 1 addition & 1 deletion lib/gpt/qcd/gauge/stencil/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,5 +19,5 @@
from gpt.qcd.gauge.stencil.plaquette import plaquette
from gpt.qcd.gauge.stencil.staple import staple_sum
from gpt.qcd.gauge.stencil.energy_density import energy_density
from gpt.qcd.gauge.stencil.topology import topological_charge, differentiable_topology, projected_topology_gradient
from gpt.qcd.gauge.stencil.topology import topological_charge
from gpt.qcd.gauge.stencil.algebra_laplace import algebra_laplace
Loading

0 comments on commit 98e6f3c

Please sign in to comment.