Skip to content
Snippets Groups Projects
Commit ecd4109a authored by Stephan Seitz's avatar Stephan Seitz
Browse files

Add module mapping pystencils_autodiff -> pystencils.autodiff

parent 6a3ad276
Branches
Tags 0.1.1
No related merge requests found
Pipeline #17036 failed with stage
in 1 minute and 23 seconds
import sys
import pystencils_autodiff.backends # NOQA
from pystencils_autodiff._field_to_tensors import ( # NOQA
tf_constant_from_field, tf_placeholder_from_field, tf_scalar_variable_from_field,
......@@ -15,3 +17,6 @@ __all__ = ['backends',
"tf_constant_from_field", " tf_placeholder_from_field",
"tf_scalar_variable_from_field", " tf_variable_from_field",
"torch_tensor_from_field"]
sys.modules['pystencils.autodiff'] = pystencils_autodiff
sys.modules['pystencils.autodiff.backends'] = pystencils_autodiff.backends
"""
Backends for operators to support automatic Differentation
Currently, we can use pystencils' JIT compilation to register
a Torch or a Tensorflow operation or we can compile a static
library to be directly loaded into Torch/Tensorflow.
"""
AVAILABLE_BACKENDS = ['tensorflow', 'torch', 'tensorflow_cpp', 'torch_native']
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment