Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • Sparse
  • WallLaw
  • improved_comm
  • master
  • mr_refactor_wfb
  • suffa/cumulantfourth_order_correction_with_psm
  • release/0.2.1
  • release/0.2.10
  • release/0.2.11
  • release/0.2.12
  • release/0.2.13
  • release/0.2.14
  • release/0.2.15
  • release/0.2.2
  • release/0.2.3
  • release/0.2.4
  • release/0.2.5
  • release/0.2.6
  • release/0.2.7
  • release/0.2.8
  • release/0.2.9
  • release/0.3.0
  • release/0.3.1
  • release/0.3.2
  • release/0.3.3
  • release/0.3.4
  • release/0.4.0
  • release/0.4.1
  • release/0.4.2
  • release/0.4.3
  • release/0.4.4
  • release/1.0
  • release/1.0.1
  • release/1.1
  • release/1.1.1
  • release/1.2
  • release/1.3
  • release/1.3.1
  • release/1.3.2
  • release/1.3.3
  • release/1.3.4
  • release/1.3.5
  • release/1.3.6
  • release/1.3.7
44 results

Target

Select target project
  • ravi.k.ayyala/lbmpy
  • brendan-waters/lbmpy
  • anirudh.jonnalagadda/lbmpy
  • jbadwaik/lbmpy
  • alexander.reinauer/lbmpy
  • itischler/lbmpy
  • he66coqe/lbmpy
  • ev81oxyl/lbmpy
  • Bindgen/lbmpy
  • da15siwa/lbmpy
  • holzer/lbmpy
  • RudolfWeeber/lbmpy
  • pycodegen/lbmpy
13 results
Select Git revision
  • GetterSetterAPI
  • HRR
  • HydroPressure
  • InplaceConfig
  • Outflow
  • PhaseField
  • Sparse
  • UBBVelocity
  • UpdateAPISparse
  • WallLaw
  • WetNodeBoundaries
  • csebug
  • feature/sparse
  • feature/try
  • improved_comm
  • install_requires
  • master
  • phaseField
  • relaxationrates
  • test_martin
  • release/0.2.1
  • release/0.2.10
  • release/0.2.11
  • release/0.2.12
  • release/0.2.13
  • release/0.2.14
  • release/0.2.15
  • release/0.2.2
  • release/0.2.3
  • release/0.2.4
  • release/0.2.5
  • release/0.2.6
  • release/0.2.7
  • release/0.2.8
  • release/0.2.9
  • release/0.3.0
  • release/0.3.1
  • release/0.3.2
  • release/0.3.3
  • release/0.3.4
  • release/0.4.0
  • release/0.4.1
  • release/0.4.2
  • release/0.4.3
  • release/0.4.4
  • release/1.0
  • release/1.0.1
  • release/1.1
  • release/1.1.1
  • release/1.2
  • release/1.3
  • release/1.3.1
  • release/1.3.2
  • release/1.3.3
  • release/1.3.4
  • release/1.3.5
  • release/1.3.6
57 results
Show changes
Showing
with 926 additions and 2481 deletions
import sympy as sp
from pystencils.simp import (
SimplificationStrategy, apply_to_all_assignments,
insert_aliases, insert_zeros, insert_constants)
def create_phasefield_simplification_strategy(lb_method):
s = SimplificationStrategy()
expand = apply_to_all_assignments(sp.expand)
s.add(expand)
s.add(insert_zeros)
s.add(insert_aliases)
s.add(insert_constants)
s.add(lambda ac: ac.new_without_unused_subexpressions())
return s
This diff is collapsed.
import pytest
import pystencils as ps
from lbmpy.creationfunctions import create_lb_function, LBMConfig
from lbmpy.enums import Method
from lbmpy.scenarios import create_lid_driven_cavity
@pytest.mark.parametrize('double_precision', [False, True])
@pytest.mark.parametrize('method_enum', [Method.SRT, Method.CENTRAL_MOMENT, Method.CUMULANT])
def test_creation(double_precision, method_enum):
"""Simple test that makes sure that only float variables are created"""
lbm_config = LBMConfig(method=method_enum, relaxation_rate=1.5, compressible=True)
config = ps.CreateKernelConfig(data_type="float64" if double_precision else "float32",
default_number_float="float64" if double_precision else "float32")
func = create_lb_function(lbm_config=lbm_config, config=config)
code = ps.get_code_str(func)
if double_precision:
assert 'float' not in code
assert 'double' in code
else:
assert 'double' not in code
assert 'float' in code
@pytest.mark.parametrize('double_precision', [False, True])
@pytest.mark.parametrize('method_enum', [Method.SRT, Method.CENTRAL_MOMENT, Method.CUMULANT])
def test_scenario(method_enum, double_precision):
lbm_config = LBMConfig(method=method_enum, relaxation_rate=1.45, compressible=True)
config = ps.CreateKernelConfig(data_type="float64" if double_precision else "float32",
default_number_float="float64" if double_precision else "float32")
sc = create_lid_driven_cavity((16, 16, 8), lbm_config=lbm_config, config=config)
sc.run(1)
code = ps.get_code_str(sc.ast)
if double_precision:
assert 'float' not in code
assert 'double' in code
else:
assert 'double' not in code
assert 'float' in code
This diff is collapsed.
This diff is collapsed.
from __future__ import annotations
from typing import Sequence
from argparse import ArgumentParser
import os
import nox
import subprocess
import re
nox.options.sessions = ["lint", "typecheck"]
def get_cuda_version(session: nox.Session) -> None | tuple[int, ...]:
query_args = ["nvcc", "--version"]
try:
query_result = subprocess.run(query_args, capture_output=True)
except FileNotFoundError:
return None
matches = re.findall(r"release \d+\.\d+", str(query_result.stdout))
if matches:
match = matches[0]
version_string = match.split()[-1]
try:
return tuple(int(v) for v in version_string.split("."))
except ValueError:
pass
session.warn("nvcc was found, but I am unable to determine the CUDA version.")
return None
def install_cupy(
session: nox.Session, cupy_version: str, skip_if_no_cuda: bool = False
):
if cupy_version is not None:
cuda_version = get_cuda_version(session)
if cuda_version is None or cuda_version[0] not in (11, 12):
if skip_if_no_cuda:
session.skip(
"No compatible installation of CUDA found - Need either CUDA 11 or 12"
)
else:
session.warn(
"Running without cupy: no compatbile installation of CUDA found. Need either CUDA 11 or 12."
)
return
cuda_major = cuda_version[0]
cupy_package = f"cupy-cuda{cuda_major}x=={cupy_version}"
session.install(cupy_package)
def check_external_doc_dependencies(session: nox.Session):
dot_args = ["dot", "--version"]
try:
_ = subprocess.run(dot_args, capture_output=True)
except FileNotFoundError:
session.error(
"Unable to build documentation: "
"Command `dot` from the `graphviz` package (https://www.graphviz.org/) is not available"
)
def editable_install(session: nox.Session, opts: Sequence[str] = ()):
if opts:
opts_str = "[" + ",".join(opts) + "]"
else:
opts_str = ""
session.install("-e", f".{opts_str}")
def install_pystencils_master(session: nox.Session):
session.install("git+https://i10git.cs.fau.de/pycodegen/pystencils.git@master")
def install_sympy_master(session: nox.Session):
session.install("--upgrade", "git+https://github.com/sympy/sympy.git@master")
@nox.session(python="3.10", tags=["qa", "code-quality"])
def lint(session: nox.Session):
"""Lint code using flake8"""
session.install("flake8")
session.run("flake8", "src/lbmpy")
@nox.session(python="3.10", tags=["qa", "code-quality"])
def typecheck(session: nox.Session):
"""Run MyPy for static type checking"""
editable_install(session)
session.install("mypy")
session.run("mypy", "src/lbmpy")
def run_testsuite(session: nox.Session, coverage: bool = True):
num_cores = os.cpu_count()
args = [
"pytest",
"-v",
"-n",
str(num_cores),
"-m",
"not longrun",
"--html",
"test-report/index.html",
"--junitxml=report.xml",
]
if coverage:
args += [
"--cov-report=term",
"--cov=.",
]
session.run(*args)
if coverage:
session.run("coverage", "html")
session.run("coverage", "xml")
@nox.session(python=["3.10", "3.11", "3.12", "3.13"])
def testsuite_cpu(session: nox.Session):
install_pystencils_master(session)
editable_install(session, ["alltrafos", "use_cython", "interactive", "tests"])
run_testsuite(session, coverage=False)
@nox.session(python=["3.10", "3.11", "3.12", "3.13"])
@nox.parametrize("cupy_version", ["12", "13"], ids=["cupy12", "cupy13"])
def testsuite_gpu(session: nox.Session, cupy_version: str | None):
install_cupy(session, cupy_version, skip_if_no_cuda=True)
install_pystencils_master(session)
editable_install(session, ["alltrafos", "use_cython", "interactive", "tests"])
run_testsuite(session)
@nox.parametrize("cupy_version", [None, "12", "13"], ids=["cpu", "cupy12", "cupy13"])
@nox.session(python="3.10", tags=["test"])
def testsuite_pystencils2(session: nox.Session, cupy_version: str | None):
if cupy_version is not None:
install_cupy(session, cupy_version, skip_if_no_cuda=True)
session.install(
"git+https://i10git.cs.fau.de/pycodegen/pystencils.git@v2.0-dev"
)
editable_install(session, ["alltrafos", "use_cython", "interactive", "tests"])
run_testsuite(session)
@nox.session
def quicktest(session: nox.Session):
parser = ArgumentParser()
parser.add_argument(
"--sympy-master", action="store_true", help="Use latest SymPy master revision"
)
args = parser.parse_args(session.posargs)
install_pystencils_master(session)
editable_install(session)
if args.sympy_master:
install_sympy_master(session)
session.run("python", "quicktest.py")
[project]
name = "lbmpy"
description = "Code Generation for Lattice Boltzmann Methods"
dynamic = ["version"]
readme = "README.md"
authors = [
{ name = "Martin Bauer" },
{ name = "Markus Holzer" },
{ name = "Frederik Hennig" },
{ email = "cs10-codegen@fau.de" },
]
license = { file = "COPYING.txt" }
requires-python = ">=3.10"
dependencies = ["pystencils>=1.3", "sympy>=1.12", "numpy>=1.8.0", "appdirs", "joblib", "packaging"]
classifiers = [
"Development Status :: 4 - Beta",
"Framework :: Jupyter",
"Topic :: Software Development :: Code Generators",
"Topic :: Scientific/Engineering :: Physics",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
]
[project.urls]
"Bug Tracker" = "https://i10git.cs.fau.de/pycodegen/lbmpy/-/issues"
"Documentation" = "https://pycodegen.pages.i10git.cs.fau.de/lbmpy/"
"Source Code" = "https://i10git.cs.fau.de/pycodegen/lbmpy"
[project.optional-dependencies]
gpu = ['cupy']
alltrafos = ['islpy', 'py-cpuinfo']
bench_db = ['blitzdb', 'pymongo', 'pandas']
interactive = [
'matplotlib',
'ipy_table',
'imageio',
'jupyter',
'pyevtk',
'rich',
'graphviz',
'scipy',
'scikit-image'
]
use_cython = [
'Cython'
]
doc = [
'sphinx',
'sphinx_rtd_theme',
'nbsphinx',
'sphinxcontrib-bibtex',
'sphinx_autodoc_typehints',
'pandoc',
]
tests = [
'pytest',
'pytest-cov',
'pytest-html',
'ansi2html',
'pytest-xdist',
'flake8',
'nbformat',
'nbconvert',
'ipython',
'randomgen>=1.18',
]
[build-system]
requires = [
"setuptools>=69",
"versioneer[toml]>=0.29",
]
build-backend = "setuptools.build_meta"
[tool.setuptools.package-data]
[tool.setuptools.packages.find]
where = ["src"]
include = ["lbmpy", "lbmpy.*"]
namespaces = false
[tool.versioneer]
# See the docstring in versioneer.py for instructions. Note that you must
# re-run 'versioneer.py setup' after changing this section, and commit the
# resulting files.
VCS = "git"
style = "pep440"
versionfile_source = "src/lbmpy/_version.py"
versionfile_build = "lbmpy/_version.py"
tag_prefix = "release/"
parentdir_prefix = "lbmpy-"
[pytest]
testpaths = src tests doc/notebooks
pythonpath = src
python_files = test_*.py *_test.py scenario_*.py
norecursedirs = *.egg-info .git .cache .ipynb_checkpoints htmlcov
addopts = --doctest-modules --durations=20 --cov-config pytest.ini
addopts =
--doctest-modules --durations=20
--cov-config pytest.ini
--ignore=src/lbmpy/custom_code_nodes.py
--ignore=src/lbmpy/lookup_tables.py
--ignore=src/lbmpy/phasefield_allen_cahn/contact_angle.py
markers =
longrun: tests only run at night since they have large execution time
notebook: jupyter notebooks
notebook: mark for notebooks
# these warnings all come from third party libraries.
filterwarnings =
ignore:the imp module is deprecated in favour of importlib:DeprecationWarning
......@@ -14,15 +21,19 @@ filterwarnings =
[run]
branch = True
source = lbmpy
lbmpy_tests
source = src/lbmpy
tests
omit = doc/*
lbmpy_tests/*
tests/*
setup.py
conftest.py
versioneer.py
lbmpy/_version.py
quicktest.py
noxfile.py
src/lbmpy/_version.py
src/lbmpy/_compat.py
venv/
[report]
exclude_lines =
......@@ -35,6 +46,7 @@ exclude_lines =
# Don't complain if tests don't hit defensive assertion code:
raise AssertionError
raise NotImplementedError
NotImplementedError()
#raise ValueError
# Don't complain if non-runnable code isn't run:
......@@ -43,7 +55,7 @@ exclude_lines =
if __name__ == .__main__.:
skip_covered = True
fail_under = 88
fail_under = 87
[html]
directory = coverage_report
#!/usr/bin/env python3
from contextlib import redirect_stdout
import io
from tests.test_quicktests import (
test_poiseuille_channel_quicktest,
test_entropic_methods,
test_cumulant_ldc
)
quick_tests = [
test_poiseuille_channel_quicktest,
test_entropic_methods,
test_cumulant_ldc,
]
if __name__ == "__main__":
print("Running lbmpy quicktests")
for qt in quick_tests:
print(f" -> {qt.__name__}")
with redirect_stdout(io.StringIO()):
qt()
# See the docstring in versioneer.py for instructions. Note that you must
# re-run 'versioneer.py setup' after changing this section, and commit the
# resulting files.
[versioneer]
VCS = git
style = pep440
versionfile_source = lbmpy/_version.py
versionfile_build = lbmpy/_version.py
tag_prefix = release/
parentdir_prefix = lbmpy-
\ No newline at end of file
import os
import io
from setuptools import setup, find_packages
import distutils
from contextlib import redirect_stdout
from importlib import import_module
from setuptools import setup, __version__ as setuptools_version
import versioneer
try:
import cython # noqa
USE_CYTHON = True
except ImportError:
USE_CYTHON = False
quick_tests = [
'test_quicktests.test_poiseuille_channel_quicktest',
'test_quicktests.test_entropic_methods',
'test_quicktests.test_cumulant_ldc',
]
class SimpleTestRunner(distutils.cmd.Command):
"""A custom command to run selected tests"""
description = 'run some quick tests'
user_options = []
@staticmethod
def _run_tests_in_module(test):
"""Short test runner function - to work also if py.test is not installed."""
test = 'lbmpy_tests.' + test
mod, function_name = test.rsplit('.', 1)
if isinstance(mod, str):
mod = import_module(mod)
if int(setuptools_version.split('.')[0]) < 61:
raise Exception(
"[ERROR] lbmpy requires at least setuptools version 61 to install.\n"
"If this error occurs during an installation via pip, it is likely that there is a conflict between "
"versions of setuptools installed by pip and the system package manager. "
"In this case, it is recommended to install lbmpy into a virtual environment instead."
)
func = getattr(mod, function_name)
with redirect_stdout(io.StringIO()):
func()
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
"""Run command."""
for test in quick_tests:
self._run_tests_in_module(test)
def readme():
with open('README.md') as f:
return f.read()
def cython_extensions(*extensions):
from distutils.extension import Extension
if USE_CYTHON:
ext = '.pyx'
result = [Extension(e, [os.path.join(*e.split(".")) + ext]) for e in extensions]
from Cython.Build import cythonize
result = cythonize(result, language_level=3)
return result
elif all([os.path.exists(os.path.join(*e.split(".")) + '.c') for e in extensions]):
ext = '.c'
result = [Extension(e, [os.path.join(*e.split(".")) + ext]) for e in extensions]
return result
else:
return None
import versioneer
def get_cmdclass():
cmdclass = {"quicktest": SimpleTestRunner}
cmdclass.update(versioneer.get_cmdclass())
return cmdclass
return versioneer.get_cmdclass()
major_version = versioneer.get_version().split("+")[0]
setup(name='lbmpy',
version=versioneer.get_version(),
description='Code Generation for Lattice Boltzmann Methods',
long_description=readme(),
long_description_content_type="text/markdown",
author='Martin Bauer, Markus Holzer, Frederik Hennig',
license='AGPLv3',
author_email='cs10-codegen@fau.de',
url='https://i10git.cs.fau.de/pycodegen/lbmpy/',
packages=['lbmpy'] + ['lbmpy.' + s for s in find_packages('lbmpy')],
install_requires=[f'pystencils>=0.4.0,<={major_version}', 'sympy>=1.5.1,<=1.10.1', 'numpy>=1.11.0'],
package_data={'lbmpy': ['phasefield/simplex_projection.pyx', 'phasefield/simplex_projection.c']},
ext_modules=cython_extensions("lbmpy.phasefield.simplex_projection"),
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Jupyter',
'Topic :: Software Development :: Code Generators',
'Topic :: Scientific/Engineering :: Physics',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)',
],
python_requires=">=3.8",
extras_require={
'gpu': ['pycuda'],
'opencl': ['pyopencl'],
'alltrafos': ['islpy', 'py-cpuinfo'],
'interactive': ['scipy', 'scikit-image', 'cython', 'matplotlib',
'ipy_table', 'imageio', 'jupyter', 'pyevtk'],
'doc': ['sphinx', 'sphinx_rtd_theme', 'nbsphinx',
'sphinxcontrib-bibtex', 'sphinx_autodoc_typehints', 'pandoc'],
'phasefield': ['Cython']
},
cmdclass=get_cmdclass()
)
setup(
version=versioneer.get_version(),
cmdclass=get_cmdclass(),
)
from .creationfunctions import (
create_lb_ast,
create_lb_collision_rule,
create_lb_function,
create_lb_method,
create_lb_update_rule,
LBMConfig,
LBMOptimisation,
)
from .enums import Stencil, Method, ForceModel, CollisionSpace, SubgridScaleModel
from .lbstep import LatticeBoltzmannStep
from .macroscopic_value_kernels import (
pdf_initialization_assignments,
macroscopic_values_getter,
strain_rate_tensor_getter,
compile_macroscopic_values_getter,
compile_macroscopic_values_setter,
create_advanced_velocity_setter_collision_rule,
)
from .maxwellian_equilibrium import get_weights
from .relaxationrates import (
relaxation_rate_from_lattice_viscosity,
lattice_viscosity_from_relaxation_rate,
relaxation_rate_from_magic_number,
)
from .scenarios import create_lid_driven_cavity, create_fully_periodic_flow
from .stencils import LBStencil
__all__ = [
"create_lb_ast",
"create_lb_collision_rule",
"create_lb_function",
"create_lb_method",
"create_lb_update_rule",
"LBMConfig",
"LBMOptimisation",
"Stencil",
"Method",
"ForceModel",
"CollisionSpace",
"SubgridScaleModel",
"LatticeBoltzmannStep",
"pdf_initialization_assignments",
"macroscopic_values_getter",
"strain_rate_tensor_getter",
"compile_macroscopic_values_getter",
"compile_macroscopic_values_setter",
"create_advanced_velocity_setter_collision_rule",
"get_weights",
"relaxation_rate_from_lattice_viscosity",
"lattice_viscosity_from_relaxation_rate",
"relaxation_rate_from_magic_number",
"create_lid_driven_cavity",
"create_fully_periodic_flow",
"LBStencil",
]
from . import _version
__version__ = _version.get_versions()['version']
from pystencils import __version__ as ps_version
# Determine if we're running pystencils 1.x or 2.x
version_tokes = ps_version.split(".")
PYSTENCILS_VERSION_MAJOR = int(version_tokes[0])
IS_PYSTENCILS_2 = PYSTENCILS_VERSION_MAJOR == 2
if IS_PYSTENCILS_2:
from pystencils.defaults import DEFAULTS
def get_loop_counter_symbol(coord: int):
return DEFAULTS.spatial_counters[coord]
def get_supported_instruction_sets():
from pystencils import Target
vector_targets = Target.available_vector_cpu_targets()
isas = []
for target in vector_targets:
tokens = target.name.split("_")
isas.append(tokens[-1].lower())
return isas
else:
from pystencils.backends.simd_instruction_sets import (
get_supported_instruction_sets as get_supported_instruction_sets_,
)
get_supported_instruction_sets = get_supported_instruction_sets_
def get_loop_counter_symbol(coord: int):
from pystencils.astnodes import LoopOverCoordinate
return LoopOverCoordinate.get_loop_counter_symbol(coord)
def import_guard_pystencils1(feature):
if IS_PYSTENCILS_2:
raise ImportError(
f"The following feature is not yet available when running pystencils 2.x: {feature}"
)
return True
from .indexing import BetweenTimestepsIndexing, NeighbourOffsetArrays
from .indexing import BetweenTimestepsIndexing
from .communication import get_communication_slices, LBMPeriodicityHandling
from .utility import Timestep, get_accessor, is_inplace, get_timesteps, \
numeric_index, numeric_offsets, inverse_dir_index, AccessPdfValues
__all__ = ['BetweenTimestepsIndexing', 'NeighbourOffsetArrays',
__all__ = ['BetweenTimestepsIndexing',
'get_communication_slices', 'LBMPeriodicityHandling',
'Timestep', 'get_accessor', 'is_inplace', 'get_timesteps',
'numeric_index', 'numeric_offsets', 'inverse_dir_index', 'AccessPdfValues']
......@@ -2,18 +2,20 @@ import numpy as np
import sympy as sp
import pystencils as ps
from pystencils.typing import TypedSymbol, create_type
from pystencils.backends.cbackend import CustomCodeNode
from .._compat import IS_PYSTENCILS_2
from lbmpy.advanced_streaming.utility import get_accessor, inverse_dir_index, is_inplace, Timestep
if IS_PYSTENCILS_2:
from pystencils import TypedSymbol, create_type
from pystencils.types.quick import Arr
from lbmpy.lookup_tables import TranslationArraysNode
else:
from pystencils.typing import TypedSymbol, create_type
from ..custom_code_nodes import TranslationArraysNode
from lbmpy.advanced_streaming.utility import get_accessor, inverse_dir_index, is_inplace, Timestep
from itertools import product
def _array_pattern(dtype, name, content):
return f"const {str(dtype)} {name} [] = {{ {','.join(str(c) for c in content)} }}; \n"
class BetweenTimestepsIndexing:
# ==============================================
......@@ -30,7 +32,7 @@ class BetweenTimestepsIndexing:
@property
def inverse_dir_symbol(self):
"""Symbol denoting the inversion of a PDF field index.
"""Symbol denoting the inversion of a PDF field index.
Use only at top-level of index to f_out or f_in, otherwise it can't be correctly replaced."""
return sp.IndexedBase('invdir')
......@@ -69,13 +71,21 @@ class BetweenTimestepsIndexing:
assert f_dir in ['in', 'out']
inv = '_inv' if inverse else ''
name = f"f_{f_dir}{inv}_dir_idx"
return TypedSymbol(name, self._index_dtype)
if IS_PYSTENCILS_2:
return TypedSymbol(name, Arr(self._index_dtype, self._q))
else:
return TypedSymbol(name, self._index_dtype)
def _offset_array_symbols(self, f_dir, inverse):
assert f_dir in ['in', 'out']
inv = '_inv' if inverse else ''
name_base = f"f_{f_dir}{inv}_offsets_"
symbols = [TypedSymbol(name_base + d, self._index_dtype) for d in self._coordinate_names]
if IS_PYSTENCILS_2:
symbols = [TypedSymbol(name_base + d, Arr(self._index_dtype, self._q)) for d in self._coordinate_names]
else:
symbols = [TypedSymbol(name_base + d, self._index_dtype) for d in self._coordinate_names]
return symbols
def _array_symbols(self, f_dir, inverse, index):
......@@ -168,90 +178,31 @@ class BetweenTimestepsIndexing:
return trivial_index_translations, trivial_offset_translations
def create_code_node(self):
return BetweenTimestepsIndexing.TranslationArraysNode(self)
class TranslationArraysNode(CustomCodeNode):
def __init__(self, indexing):
code = ''
symbols_defined = set()
for f_dir, inv in indexing._required_index_arrays:
indices, offsets = indexing._get_translated_indices_and_offsets(f_dir, inv)
index_array_symbol = indexing._index_array_symbol(f_dir, inv)
symbols_defined.add(index_array_symbol)
code += _array_pattern(indexing._index_dtype, index_array_symbol.name, indices)
for f_dir, inv in indexing._required_offset_arrays:
indices, offsets = indexing._get_translated_indices_and_offsets(f_dir, inv)
offset_array_symbols = indexing._offset_array_symbols(f_dir, inv)
symbols_defined |= set(offset_array_symbols)
for d, arrsymb in enumerate(offset_array_symbols):
code += _array_pattern(indexing._offsets_dtype, arrsymb.name, offsets[d])
super(BetweenTimestepsIndexing.TranslationArraysNode, self).__init__(
code, symbols_read=set(), symbols_defined=symbols_defined)
def __str__(self):
return "Variable PDF Access Translation Arrays"
def __repr__(self):
return "Variable PDF Access Translation Arrays"
# end class AdvancedStreamingIndexing
array_content = list()
symbols_defined = set()
for f_dir, inv in self._required_index_arrays:
indices, offsets = self._get_translated_indices_and_offsets(f_dir, inv)
index_array_symbol = self._index_array_symbol(f_dir, inv)
symbols_defined.add(index_array_symbol)
class NeighbourOffsetArrays(CustomCodeNode):
if IS_PYSTENCILS_2:
array_content.append((index_array_symbol, indices))
else:
array_content.append((self._index_dtype, index_array_symbol.name, indices))
@staticmethod
def neighbour_offset(dir_idx, stencil):
if isinstance(sp.sympify(dir_idx), sp.Integer):
return stencil[dir_idx]
for f_dir, inv in self._required_offset_arrays:
indices, offsets = self._get_translated_indices_and_offsets(f_dir, inv)
offset_array_symbols = self._offset_array_symbols(f_dir, inv)
symbols_defined |= set(offset_array_symbols)
for d, arrsymb in enumerate(offset_array_symbols):
if IS_PYSTENCILS_2:
array_content.append((arrsymb, offsets[d]))
else:
array_content.append((self._offsets_dtype, arrsymb.name, offsets[d]))
if IS_PYSTENCILS_2:
return TranslationArraysNode(array_content)
else:
return tuple([sp.IndexedBase(symbol, shape=(1,))[dir_idx]
for symbol in NeighbourOffsetArrays._offset_symbols(len(stencil[0]))])
@staticmethod
def _offset_symbols(dim):
return [TypedSymbol(f"neighbour_offset_{d}", create_type(np.int32)) for d in ['x', 'y', 'z'][:dim]]
def __init__(self, stencil, offsets_dtype=np.int32):
offsets_dtype = create_type(offsets_dtype)
dim = len(stencil[0])
array_symbols = NeighbourOffsetArrays._offset_symbols(dim)
code = "\n"
for i, arrsymb in enumerate(array_symbols):
code += _array_pattern(offsets_dtype, arrsymb.name, (d[i] for d in stencil))
offset_symbols = NeighbourOffsetArrays._offset_symbols(dim)
super(NeighbourOffsetArrays, self).__init__(code, symbols_read=set(),
symbols_defined=set(offset_symbols))
return TranslationArraysNode(array_content, symbols_defined)
class MirroredStencilDirections(CustomCodeNode):
@staticmethod
def mirror_stencil(direction, mirror_axis):
assert mirror_axis <= len(direction), f"only {len(direction)} axis available for mirage"
direction = list(direction)
direction[mirror_axis] = -direction[mirror_axis]
return tuple(direction)
@staticmethod
def _mirrored_symbol(mirror_axis):
axis = ['x', 'y', 'z']
return TypedSymbol(f"{axis[mirror_axis]}_axis_mirrored_stencil_dir", create_type(np.int32))
def __init__(self, stencil, mirror_axis, dtype=np.int32):
offsets_dtype = create_type(dtype)
mirrored_stencil_symbol = MirroredStencilDirections._mirrored_symbol(mirror_axis)
mirrored_directions = [stencil.index(MirroredStencilDirections.mirror_stencil(direction, mirror_axis))
for direction in stencil]
code = "\n"
code += _array_pattern(offsets_dtype, mirrored_stencil_symbol.name, mirrored_directions)
super(MirroredStencilDirections, self).__init__(code, symbols_read=set(),
symbols_defined={mirrored_stencil_symbol})
# end class AdvancedStreamingIndexing
......@@ -58,24 +58,27 @@ odd_accessors = {
}
def is_inplace(streaming_pattern):
if streaming_pattern not in streaming_patterns:
raise ValueError('Invalid streaming pattern', streaming_pattern)
return streaming_pattern in ['aa', 'esotwist', 'esopull', 'esopush']
def get_accessor(streaming_pattern: str, timestep: Timestep) -> PdfFieldAccessor:
if streaming_pattern not in streaming_patterns:
raise ValueError(
"Invalid value of parameter 'streaming_pattern'.", streaming_pattern)
if is_inplace(streaming_pattern) and (timestep == Timestep.BOTH):
raise ValueError(f"Invalid timestep for streaming pattern {streaming_pattern}: {str(timestep)}")
if timestep == Timestep.EVEN:
return even_accessors[streaming_pattern]
else:
return odd_accessors[streaming_pattern]
def is_inplace(streaming_pattern):
if streaming_pattern not in streaming_patterns:
raise ValueError('Invalid streaming pattern', streaming_pattern)
return streaming_pattern in ['aa', 'esotwist', 'esopull', 'esopush']
def get_timesteps(streaming_pattern):
return (Timestep.EVEN, Timestep.ODD) if is_inplace(streaming_pattern) else (Timestep.BOTH, )
......
from typing import Union
from numpy.typing import NDArray
def poiseuille_flow(middle_distance: Union[float, NDArray], height,
ext_force_density: float, dyn_visc: float) -> Union[float, NDArray]:
"""
Analytical solution for plane Poiseuille flow.
Args:
middle_distance: Distance to the middle plane of the channel.
height: Distance between the boundaries.
ext_force_density: Force density on the fluid normal to the boundaries.
dyn_visc: dyn_visc
Returns:
A numpy array of the poiseuille profile if middle_distance is given as array otherwise of velocity of
the position given with middle_distance
"""
return ext_force_density * 1. / (2 * dyn_visc) * (height**2.0 / 4.0 - middle_distance**2.0)