Figure 11 and 12#
Imports#
import matplotlib.pyplot as plt
import numpy
import os
import dolfin
import dolfin_warp as dwarp
import dolfin_estim as destim
from generate_images import generate_images_and_meshes_from_RivlinCube
### disable deprecation warning to avoid heavy output
import warnings
from ffc.quadrature.deprecation import QuadratureRepresentationDeprecationWarning
warnings.simplefilter("ignore", QuadratureRepresentationDeprecationWarning)
Parameters#
Geometry#
### Geometry
dim = 2 ### the geometry studied is a square
cube_params = {"X0":0.2, "Y0":0.2, "X1":0.8, "Y1":0.8, "l":0.1} ### l: mesh size, in mm, by default 0.1, X0, Y0, X1, Y1 correspond to the min and max x and y coordinates of the square
### mesh sizes investigated
mesh_size = 0.1 ## in mm
Loading and boundary conditions#
### Loading parameters
const_params = {"type":"blox"} ### defining load and boundary conditions: here, the square is clamped on the left x boundary
load_params_body = {"type":"volu", "f":0.3} ### a volume force of 0.3 mN/mm3 is applied on the cube
### volume test
deformation_type_lst = ["grav"]
Material behavior#
### Material
nu_ref = 0.3 ### defining ground-truth value of Poisson ratio [-]
b_ref = 0.3 ### defining ground-truth value of the volume regularization term, in mN/mm3
E_ref = 1 ### kPa, Young's modulus
mat_params = {"model":"CGNH", "parameters":{"E":E_ref, "nu":nu_ref}} ### defining material constants for estimation
Images#
### Noises levels
noise_level_lst = [ ]
noise_level_lst += [0.3]
noise_level_lst += [0.2]
noise_level_lst += [0.1]
noise_level_lst += [0.0]
### Images parameters
n_runs_for_noisy_images = 10 ### number of images generated for a given noise levels; for a same noise level, and for convergence of the estimation, different images are created since the noise added to the images is a random Gaussian field
n_voxels = 100
### volume regularization constants
regul_b_lst = []
regul_b_lst += [0.0]
regul_b_lst += [0.24]
regul_b_lst += [0.27]
regul_b_lst += [0.3]
regul_b_lst += [0.33]
regul_b_lst += [0.36]
### regularization levels
regul_level_lst = [ ]
regul_level_lst += [0.1*2**1] # 0.2
regul_level_lst += [0.1 ] # 0.1
regul_level_lst += [0.1/2**1] # 0.05
regul_level_lst += [0.] # 0.0
### name and folder creation
images_folder = "generate_images"
if not os.path.exists(images_folder): ### checking if folder already exists
os.mkdir(images_folder)
Bias#
bias_lst = [0.8, 0.9, 1., 1.1, 1.2] ### studying the impact of an error of -20, -10, 0, 10, and 20 % on a model parameter on the estimation
bias_param_lst = ['nu', 'b'] ### studying model errors on the Poisson ratio and on the regularization b
Synthetic measurements#
Synthetic images#
for deformation_type in deformation_type_lst:
generate_images_and_meshes_from_RivlinCube( ### creating fine mesh for image computation
images_n_dim = dim,
images_n_voxels = n_voxels,
deformation_type = deformation_type,
texture_type = "no",
noise_level = 0,
run_model = 1,
generate_images = 0)
for noise_level in noise_level_lst :
n_runs = n_runs_for_noisy_images if (noise_level > 0) else 1
for k_run in range(1, n_runs+1):
generate_images_and_meshes_from_RivlinCube(
images_n_dim = dim,
images_n_voxels = n_voxels,
deformation_type = deformation_type,
texture_type = "tagging",
noise_level = noise_level,
k_run = k_run if (n_runs > 1) else None,
run_model = 0,
generate_images = 1)
Ground-truth motion#
for deformation_type in deformation_type_lst:
generate_images_and_meshes_from_RivlinCube(### ground truth motion + meshes
images_n_dim = dim,
images_n_voxels = 1,
mesh_size = mesh_size,
deformation_type = deformation_type,
texture_type = "no",
noise_level = 0,
run_model = 1,
generate_images = 0)
Tracking#
for deformation_type in deformation_type_lst:
for noise_level in noise_level_lst:
n_runs = n_runs_for_noisy_images if (noise_level > 0) else 1 ### if no noise, there is no need for different samples; for noise levels higher than 0, a Gaussian noise id generated; there is hence the need for different samples, as randomness is introduced
for k_run in range(1, n_runs+1):
for regul_b in regul_b_lst:
for regul_level in regul_level_lst:
regul_type = "discrete-equilibrated-tractions-normal-tangential"
working_folder = "run_warp_grav"
#### getting files and folder names
images_basename = "square"
images_basename += "-"+deformation_type
images_basename += "-tagging"
images_basename += "-noise="+str(noise_level)
if (n_runs > 1):
images_basename += "-run="+str(k_run).zfill(2)
mesh_basename = "square"
mesh_basename += "-"+deformation_type
mesh_basename += "-h="+str(mesh_size)
mesh_basename += "-mesh"
working_basename = images_basename
working_basename += "-h="+str(mesh_size)
working_basename += "-"+regul_type
working_basename += "-regul="+str(regul_level)
working_basename += "-b="+str(regul_b)
meshes = [dolfin.Mesh(images_folder+"/"+mesh_basename+"coarse.xml"), dolfin.Mesh(images_folder+"/"+mesh_basename+"refined.xml")]
regul_surface_subdomain = []
regul_surface_subdomain_id = []
for mesh in meshes:
regul_surface_subdomain_ = dolfin.MeshFunction("size_t", mesh, mesh.topology().dim()-1)
regul_surface_subdomain_.set_all(1)
xmin_sd = dolfin.CompiledSubDomain("near(x[0], x0) && on_boundary", x0 = 0.2)
xmin_sd.mark(regul_surface_subdomain_, 0)
regul_surface_subdomain.append(regul_surface_subdomain_)
regul_surface_subdomain_id.append(1)
refinement_levels = [0,1]
dwarp.warp_and_refine(
working_folder = working_folder,
working_basename = working_basename,
images_folder = images_folder,
images_basename = images_basename,
meshes = meshes,
regul_type = regul_type,
regul_model = "ciarletgeymonatneohookean",
regul_level = regul_level,
regul_poisson = 0.3,
regul_b = [regul_b]+[0.]*(dim-1),
regul_surface_subdomain_data = regul_surface_subdomain,
regul_surface_subdomain_id = regul_surface_subdomain_id,
relax_type = "backtracking",
tol_dU = 1e-2,
n_iter_max = 100,
normalize_energies = 1,
refinement_levels = refinement_levels,
silent = True)
Generating plots#
Helpers#
SNR_lst = [] ### defining SNRs -- Signal-to-Noise ratios--
for noise in noise_level_lst:
if noise == 0.:
SNR_lst.append(40.) ### setting the SNR arbitrarily when should be +∞
else:
SNR_lst.append(1/noise)
def writing_results_to_pdf(mesh_size = 0.1, SNR_lst = [], results_all = {}, noise_from_images = True, regul = "", bias_lst = [], bias_param = "", method = "EGM"):
### plotting parameters
fig, ax = plt.subplots()
plt.rc("xtick", labelsize = 16)
plt.rc("ytick", labelsize = 16)
plt.rc("legend", fontsize = 12)
plt.ylim([-100, 100])
if bias_param == "nu":
bias_param = r'$\nu_{truth}$'
param_math = r'$\nu_{estim}$'
else:
param_math = bias_param+"$_{regul}$"
bias_param = bias_param+"$_{truth}$"
plt.xlabel("Signal to Noise Ratio (SNR)", fontsize = 12)
plt.ylabel("Estimation error (%)", fontsize = 12)
color_lst = ['firebrick', 'orange', 'lawngreen', 'deepskyblue', 'orchid']
for bias in bias_lst:
plt.plot(SNR_lst, results_all[str(bias)]["E_average"], color = color_lst[0], label = param_math+" = "+str(bias)+bias_param)
plt.xlim([3.3, 20.])
ax.fill_between(SNR_lst, results_all[str(bias)]["E_+"], results_all[str(bias)]["E_-"], alpha = 0.5, color = color_lst[0])
plt.gca().set_xscale('log')
color_lst = color_lst[1:]
ax.set_xticks([])
ax.set_xticks([], minor = True)
plt.xticks(SNR_lst, [3.3, 5, 10, '$\infty$'])
plt.legend(loc = "upper right", fontsize = 13, ncol = 2)
plt.grid()
plt.savefig("./model_error_for_error_on"+str(bias_param)+"with_method = "+str(method)+str(mesh_size)+"-noise_from_images = "+str(noise_from_images)+"regul = "+str(regul)+".pdf", bbox_inches = 'tight')
plt.show()
def run_noise_on_images(method_lst = [], load_type = "body_force", load_params = {}, mesh_size = 0.1, cube_params = {}, refine = 0, SNR_lst = [], noise_level_lst = [], bias_param = "", bias_lst = [], noise_from_images = True, regul_number = 0.3):
results_all = {}
for method in method_lst:
for bias in bias_lst:
results_std = {}
results = {}
noise_results = []
E_average, E_plus, E_minus = [], [], []
E_all = []
nu_biased = nu_ref
if bias_param == "nu":
nu_biased = bias*nu_ref
elif bias_param == "b":
regul_number = bias*b_ref
for noise in noise_level_lst:
E_results = []
for i in range(1, 11):
run = str(i).zfill(2)
noise_results.append(noise)
E = destim.identifying_parameter(method = method, nu = nu_biased, delta = 5*0.6, load_type = load_type, load_params = load_params, mesh_size = mesh_size, cube_params = cube_params, refine = refine, noise_from_images = noise_from_images, noise = noise, regul = 0.2, regul_number = regul_number, run = run)
E_error = (E-E_ref)/(E_ref)*100
E_all.append(E_error)
E_results.append(E_error)
E_average.append(numpy.average(E_results))
E_plus.append(numpy.average(E_results)+numpy.std(E_results))
E_minus.append(numpy.average(E_results)-numpy.std(E_results))
results_std["noise"] = noise_level_lst
results_std["E_+"] = E_plus
results_std["E_-"] = E_minus
results_std["E_average"] = E_average
results["noise"] = noise_results
results["E"] = E_all
results_all[str(bias)] = results_std
writing_results_to_pdf(mesh_size = mesh_size, SNR_lst = SNR_lst, results_all = results_all, noise_from_images = True, method = method, regul = 0.2, bias_lst = bias_lst, bias_param = bias_param)
Plots#
method_lst = ["EGM", "VFM", "VFM_deng", "FEMU"]
results_all = {}
for bias_param in bias_param_lst:
run_noise_on_images(method_lst = method_lst, load_type = "body_force", load_params = load_params_body, mesh_size = mesh_size, cube_params = cube_params, refine = False, SNR_lst = SNR_lst, bias_param = bias_param, bias_lst = bias_lst, noise_level_lst = noise_level_lst, noise_from_images = True, regul_number = b_ref)