Skip to content
Snippets Groups Projects

Dataclasses for method creation

Merged Markus Holzer requested to merge FixPystencilUpdate into master
3 files
+ 44
1252
Compare changes
  • Side-by-side
  • Inline
Files
3
+ 1
23
@@ -396,7 +396,7 @@ def create_lb_collision_rule(lb_method=None, lbm_config=None, lbm_optimisation=N
return collision_rule
def create_lb_method(lbm_config, **params):
def create_lb_method(lbm_config=None, **params):
"""Creates a LB method, defined by moments/cumulants for collision space, equilibrium and relaxation rates."""
lbm_config, _, _ = update_with_default_parameters(params, lbm_config=lbm_config)
@@ -557,28 +557,6 @@ def force_model_from_string(force_model_name, force_values):
return force_model_class(force_values)
def switch_to_symbolic_relaxation_rates_for_omega_adapting_methods(method_parameters, kernel_params, force=False):
"""
For entropic kernels the relaxation rate has to be a variable. If a constant was passed a
new dummy variable is inserted and the value of this variable is later on passed to the kernel
"""
if method_parameters['entropic'] or method_parameters['smagorinsky'] or force:
value_to_symbol_map = {}
new_relaxation_rates = []
for rr in method_parameters['relaxation_rates']:
if not isinstance(rr, sp.Symbol):
if rr not in value_to_symbol_map:
value_to_symbol_map[rr] = sp.Dummy()
dummy_var = value_to_symbol_map[rr]
new_relaxation_rates.append(dummy_var)
kernel_params[dummy_var.name] = rr
else:
new_relaxation_rates.append(rr)
if len(new_relaxation_rates) < 2:
new_relaxation_rates.append(sp.Dummy())
method_parameters['relaxation_rates'] = new_relaxation_rates
def update_with_default_parameters(params, opt_params=None, lbm_config=None, lbm_optimisation=None, config=None):
# Fix CreateKernelConfig params
pystencils_config_params = ['target', 'backend', 'cpu_openmp', 'double_precision', 'gpu_indexing',
Loading