Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions examples/micro-manager-cpp-adaptivity-config.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
"history_param": 0.5,
"coarsening_constant": 0.3,
"refining_constant": 0.4,
"adaptive_coarsening_constant": true,
"every_implicit_iteration": true,
"output_cpu_time": true
}
Expand Down
259 changes: 250 additions & 9 deletions micro_manager/adaptivity/adaptivity.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,18 @@
"""
Functionality for adaptive initialization and control of micro simulations
"""
from copy import deepcopy
import sys
import os
import numpy as np
from math import exp
from typing import Callable
import re
import xml.etree.ElementTree as ET
from warnings import warn
import importlib
from micro_manager.tools.logging_wrapper import Logger

import numpy as np


class AdaptivityCalculator:
def __init__(self, configurator, rank, nsims) -> None:
Expand All @@ -32,6 +35,15 @@ def __init__(self, configurator, rank, nsims) -> None:
self._adaptivity_type = configurator.get_adaptivity_type()
self._adaptivity_output_type = configurator.get_adaptivity_output_type()

self._dynamic_adaptivity = configurator.get_dynamic_adaptivity()
self._dynamic_refine_const = self._refine_const
self._precice_config_file_name = configurator.get_precice_config_file_name()
self._convergence_measure = []
self._min_addition = 1.0
self._logger = Logger(
"adaptivity-logger", "adaptivity-" + str(rank) + ".log", rank
)

self._micro_problem = getattr(
importlib.import_module(
configurator.get_micro_file_name(), "MicroSimulation"
Expand All @@ -50,13 +62,17 @@ def __init__(self, configurator, rank, nsims) -> None:
# Start adaptivity calculation with all sims active
# This array is modified in place via the function update_active_sims and update_inactive_sims
self._is_sim_active = np.array([True] * nsims, dtype=np.bool_)
self._is_sim_active_ref = self._is_sim_active.copy()
self._is_sim_to_solve = self._is_sim_active.copy()
self._global_data_last = None

# sim_is_associated_to: 1D array with values of associated simulations of inactive simulations. Active simulations have None
# Active sims do not have an associated sim
# This array is modified in place via the function associate_inactive_to_active
self._sim_is_associated_to = np.full((nsims), -2, dtype=np.intc)

self._just_deactivated: list[int] = []
self._just_deactivated_ref: list[int] = []

self._similarity_measure = self._get_similarity_measure(
configurator.get_adaptivity_similarity_measure()
Expand Down Expand Up @@ -95,6 +111,60 @@ def __init__(self, configurator, rank, nsims) -> None:
csv_logger=True,
)

if self._dynamic_adaptivity:
# Read convergence measures from preCICE configuration file
self._data_values, self._limit_values = self.read_convergence_measures()

def read_convergence_measures(self):
"""
Reads convergence measures from a preCICE configuration file.

Parameters:
-----------
config_file_name : str
Path to the preCICE configuration file.

Returns:
--------
data_values : list
List of data names involved in the convergence measurement
limit_values : list
List of limit attributes for corresponding data names
"""
# Read the XML configuration file
with open(self._precice_config_file_name, "r") as xml_file:
xml_data = xml_file.read()

unique_names = [
"absolute-convergence-measure",
"relative-convergence-measure",
"residual-relative-convergence-measure",
]

# Initialize lists to store the found attributes
data_values = []
limit_values = []

for unique_name in unique_names:
pattern = f'<{unique_name} limit="([^"]+)" data="([^"]+)" mesh="([^"]+)"'
matches = re.finditer(pattern, xml_data)
for match in matches:
data_values.append(match.group(2))
limit_values.append(match.group(1))

# Check if any matches were found
if data_values and limit_values:
for i, (data_value, limit_value) in enumerate(
zip(data_values, limit_values), start=1
):
print(f"Match {i}:")
print(f"Data: {data_value}")
print(f"Limit: {limit_value}")
else:
print(f"No attributes found for unique names: {unique_names}")

return data_values, limit_values

def _update_similarity_dists(self, dt: float, data: dict) -> None:
"""
Calculate metric which determines if two micro simulations are similar enough to have one of them deactivated.
Expand All @@ -119,27 +189,197 @@ def _update_similarity_dists(self, dt: float, data: dict) -> None:

self._similarity_dists += dt * self._similarity_measure(data_vals)

def _update_active_sims(self) -> None:
def _reset_hist(self) -> None:
self._global_data_last = None

def _get_addition(self) -> float:
"""
Get adapted refining constant based on limit values in preCICE configuration file and convergence measurements in preCICE

Returns
-------
adapted_similarity_const : float
"""

# read convergence value from precice-Mysolver-convergence.log file
convergence_values = [] # last iteration
addition = 0.0

file_path = None
file_name_suffix = "-convergence.log"

for root, _, files in os.walk(os.getcwd()):
for file_name in files:
if file_name.endswith(file_name_suffix):
file_path = os.path.join(root, file_name)
break
with open(file_path, "r") as file:
lines = file.readlines()

if len(lines) > 1 and len(lines) > len(self._convergence_measure):
if len(lines) == 2:
self._convergence_measure.append(lines[0].strip().split())
self._convergence_measure.append(lines[-1].strip().split())
header_line = self._convergence_measure[0]
last_line = self._convergence_measure[-1]

if int(last_line[0]) == 1:
self._logger.log_info("first time window")
addition = 0.0
else:
if int(last_line[1]) == 1:
self._min_addition = 1.0
else:
if self._min_addition == 0.0:
addition = 0.0
else:
for data in self._data_values:
for element in header_line:
if data in element:
index = header_line.index(element)
if last_line[index] == "inf":
convergence_values.append(1e20)
else:
index_config = self._data_values.index(data)
convergence_values.append(
max(
float(last_line[index]),
float(self._limit_values[index_config]),
)
)
min_convergence = np.log10(
np.prod(
np.array(self._limit_values, dtype=float)
/ np.array(convergence_values, dtype=float)
)
)

self._logger.log_info(
"min Convergence: {} ".format(min_convergence)
)

alpha = 3.0
addition = min(
self._min_addition,
min(
(1 + 1.0 / (min_convergence - 1.0)) ** alpha,
float(last_line[2])
/ self._max_similarity_dist
/ self._coarse_const,
),
)
self._min_addition = addition

return addition

def _get_dynamic_adaptivity_refine_const(self) -> float:
"""
Get dynamic adaptivity refine constant.

Returns
-------
dynamic_adaptivity_refine_const : float
Dynamic adaptivity refine constant.
"""
return self._dynamic_refine_const

def _update_active_sims(self, is_sim_active, just_deactivated) -> None:
"""
Update set of active micro simulations.
"""
self._is_sim_active = is_sim_active
self._just_deactivated = just_deactivated
self._is_sim_to_solve = self._is_sim_active.copy()
self._is_sim_active_ref = self._is_sim_active.copy()

def _update_active_sims_ref(self, is_sim_active_ref, just_deactivated_ref) -> None:
"""
Update set of active micro simulations.
"""
self._is_sim_active_ref = is_sim_active_ref
self._just_deactivated_ref = just_deactivated_ref

def _compute_active_sims(self, use_dyn) -> tuple:
"""
Update set of active micro simulations. Active micro simulations are compared to each other
Campute the set of active micro simulations. Active micro simulations are compared to each other
and if found similar, one of them is deactivated.
"""
is_sim_active = self._is_sim_active_ref.copy()
just_deactivated = self._just_deactivated.copy()

if use_dyn and self._dynamic_adaptivity:
addition = self._get_addition() * (1 - self._refine_const)
# self._min_addition = min(self._min_addition, addition)
# addition = self._min_addition
if addition > 0.0:
self._dynamic_refine_const = addition + self._refine_const
else:
self._dynamic_refine_const = self._refine_const
self._logger.log_info(
"Adaptive refine constant: {}".format(self._dynamic_refine_const)
)
else:
self._dynamic_refine_const = self._refine_const

if self._max_similarity_dist == 0.0:
warn(
"All similarity distances are zero, probably because all the data for adaptivity is the same. Coarsening tolerance will be manually set to minimum float number."
)
self._coarse_tol = sys.float_info.min
else:
self._coarse_tol = (
self._coarse_const * self._refine_const * self._max_similarity_dist
self._coarse_const
* self._dynamic_refine_const
* self._max_similarity_dist
)
self._logger.log_info("Coarsening tolerance: {}".format(self._coarse_tol))

# Update the set of active micro sims
for i in range(self._is_sim_active.size):
if self._is_sim_active[i]: # if sim is active
if self._check_for_deactivation(i, self._is_sim_active):
self._is_sim_active[i] = False
self._just_deactivated.append(i)
if is_sim_active[i]: # if sim is active
if self._check_for_deactivation(i, is_sim_active):
is_sim_active[i] = False
just_deactivated.append(i)
return is_sim_active, just_deactivated

def _compute_sims_to_solve(self, global_data: dict) -> None:
"""
Compute which simulations to solve.
"""
self._is_sim_to_solve = self._is_sim_active.copy()
hist_dist = 0.0

if self._global_data_last is None:
self._global_data_last = deepcopy(global_data)
return
else:
tol_u = (
self._dynamic_refine_const
* self._max_similarity_dist
/ sum(self._is_sim_active)
)
self._logger.log_info("tol_u: {}".format(tol_u))
if sum(self._is_sim_active) > 1:
for i in range(self._is_sim_active.size):
if self._is_sim_active[i]:
for name in global_data.keys():
hist_dist = np.abs(
self._global_data_last[name][i] - global_data[name][i]
)
self._logger.log_info(
"_global_data_last: {}, global_data: {}, hist_dist: {} for cell {}".format(
self._global_data_last[name][i],
global_data[name][i],
hist_dist,
i,
)
)
if hist_dist >= tol_u:
self._global_data_last[name][i] = global_data[name][i]
else:
self._is_sim_to_solve[i] = False
if sum(self._is_sim_to_solve) == 0:
self._is_sim_to_solve = self._is_sim_active.copy()

def _associate_inactive_to_active(self) -> None:
"""
Expand All @@ -156,6 +396,7 @@ def _associate_inactive_to_active(self) -> None:
for inactive_id in inactive_ids:
# Begin with a large distance to trigger the search for the most similar active sim
dist_min = dist_min_start_value

for active_id in active_ids:
# Find most similar active sim for every inactive sim
if self._similarity_dists[inactive_id, active_id] < dist_min:
Expand Down
Loading