Fig-6-7-8-9#

Imports#

import dolfin
import matplotlib.pyplot as plt
import numpy

from generate_images import generate_images_and_meshes_from_RivlinCube ### this file is available in the repository, and can be accessed to see details in the code

import dolfin_estim as destim
import dolfin_warp as dwarp

### disable deprecation warning to avoid heavy output
import warnings
from ffc.quadrature.deprecation import QuadratureRepresentationDeprecationWarning
warnings.simplefilter("ignore", QuadratureRepresentationDeprecationWarning)

Parameters#

Geometry#

dim = 2 ### the geometry studied is a square
mesh_size_lst  = [        ] ### in mm
mesh_size_lst += [0.1     ] # 0.10
mesh_size_lst += [0.1/2**1] # 0.05

cube_params = {"X0":0.2, "Y0":0.2, "X1":0.8, "Y1":0.8, "l":0.1} # cube of characteristic length 0.6 mm, default element size 0.1 mm

Loading and boundary conditions#

deformation_type_lst  = [       ]
deformation_type_lst += ["grav" ] ### body forces
deformation_type_lst += ["compx"] ### boundary forces

load_params_boundary = {"type":"pres", "f":0.3} ### boundary force, pressure = 0.3 kPa
load_params_body = {"type":"volu", "f":0.3} ### body force = 0.3 mN/mm3

const_params = {"type":"blox"} ### defining load and boundary conditions: here, the square is clamped on the left x boundary

Material behavior#

mat_params = {"model":"CGNH", "parameters":{"E":1., "nu":0.3}} ### defining material constants for estimation, E is the Young's modulus in kPa, nu is the Poisson ratio [-]

Images#

images_folder = "generate_images"
n_voxels = 100 ### number of voxels on the created images

noise_level_lst  = [   ] ### investigated noise levels
noise_level_lst += [0.3]
noise_level_lst += [0.2]
noise_level_lst += [0.1]
noise_level_lst += [0.0]

n_runs_for_noisy_images = 10 ### number of images generated for a given noise levels; for a same noise level, and for convergence of the estimation, different images are created since the noise added to the images is a random Gaussian field

regul_level_lst  = [        ] ### regularization levels
regul_level_lst += [0.1*2**3] # 0.8
regul_level_lst += [0.1*2**1] # 0.2
regul_level_lst += [0.      ] # 0.0

Varying parameters for the estimation#

### different cases investigated
noise_from_images_lst = [True, False] ### synthetic measure generated by adding noise to images (True) or to displacements (False)
refine_mesh = [True, False] ### refining the mesh or not
load_type = ["body_force", "boundary_force"] ### either boundary force or body force
fine_mesh_size = min(mesh_size_lst) ### getting the value for the finer mesh: for this value, the mesh will not be refined

Synthetic measurements#

Synthetic images#

for deformation_type in deformation_type_lst:
    generate_images_and_meshes_from_RivlinCube( ### creating very fine mesh for image creation
        images_n_dim = dim,
        images_n_voxels = n_voxels,
        deformation_type = deformation_type,
        texture_type = "no",
        noise_level = 0,
        run_model = 1,
        generate_images = 0)
    for noise_level in noise_level_lst :
        n_runs = n_runs_for_noisy_images if (noise_level > 0) else 1
        for k_run in range(1, n_runs+1):
            generate_images_and_meshes_from_RivlinCube(### creating images
                images_n_dim = dim,
                images_n_voxels = n_voxels,
                deformation_type = deformation_type,
                texture_type = "tagging",
                noise_level = noise_level,
                k_run = k_run if (n_runs > 1) else None,
                run_model = 0,
                generate_images = 1)

Ground-truth motion#

for deformation_type in deformation_type_lst:
    for mesh_size in mesh_size_lst:
        generate_images_and_meshes_from_RivlinCube(### ground-truth motion + meshes
            images_n_dim = dim,
            # images_n_voxels = 1,
            mesh_size = mesh_size,
            deformation_type = deformation_type,
            texture_type = "no",
            noise_level = 0,
            run_model = 1,
            generate_images = 0)

Tracking#

for deformation_type in deformation_type_lst:
    for noise_level in noise_level_lst:
        n_runs = n_runs_for_noisy_images if (noise_level > 0) else 1 ### if no noise, there is no need for different samples; for noise levels higher than 0, a Gaussian noise id generated; there is hence the need for different samples, as randomness is introduced
        for k_run in range(1, n_runs+1):
            for mesh_size in mesh_size_lst:
                for regul_level in regul_level_lst:
                    ### getting regularization type, depending on the type of deformation applied
                    if deformation_type == "compx":
                        regul_type = "discrete-equilibrated-tractions-tangential"
                        working_folder = "run_warp_compx"
                        regul_b = 0.0
                    else:
                        regul_type = "discrete-equilibrated-tractions-normal-tangential"
                        working_folder = "run_warp_grav"
                        regul_b = 0.3

                    #### getting files and folder names
                    images_basename = "square"
                    images_basename += "-"+deformation_type
                    images_basename += "-tagging"
                    images_basename += "-noise="+str(noise_level)
                    if (n_runs > 1):
                        images_basename += "-run="+str(k_run).zfill(2)
                    mesh_basename = "square"
                    mesh_basename += "-"+deformation_type
                    mesh_basename += "-h="+str(mesh_size)
                    mesh_basename += "-mesh"
                    if mesh_size != fine_mesh_size:
                        meshes = [dolfin.Mesh(images_folder+"/"+mesh_basename+"coarse.xml"), dolfin.Mesh(images_folder+"/"+mesh_basename+"refined.xml")]
                    else:
                        meshes = [dolfin.Mesh(images_folder+"/"+mesh_basename+"coarse.xml")]
                    if deformation_type == "grav":
                        regul_surface_subdomain = []
                        regul_surface_subdomain_id = []
                        for mesh in meshes:
                            regul_surface_subdomain_ = dolfin.MeshFunction("size_t", mesh, mesh.topology().dim()-1)
                            regul_surface_subdomain_.set_all(1)
                            xmin_sd = dolfin.CompiledSubDomain("near(x[0], x0) && on_boundary", x0 = 0.2)
                            xmin_sd.mark(regul_surface_subdomain_, 0)
                            regul_surface_subdomain.append(regul_surface_subdomain_)
                            regul_surface_subdomain_id.append(1)
                    else:
                        regul_surface_subdomain = None
                        regul_surface_subdomain_id = None
                    working_basename = images_basename
                    working_basename += "-h="+str(mesh_size)
                    working_basename += "-"+regul_type
                    working_basename += "-regul="+str(regul_level)
                    working_basename += "-b="+str(regul_b)

                    #### tracking process
                    if mesh_size != fine_mesh_size: ### get refined solution for multiresolution
                        refinement_levels = [0,1]
                    else:
                        refinement_levels = [0]
                    dwarp.warp_and_refine(
                        working_folder = working_folder,
                        working_basename = working_basename,
                        images_folder = images_folder,
                        images_basename = images_basename,
                        meshes = meshes,
                        regul_type = regul_type,
                        regul_model = "ciarletgeymonatneohookean",
                        regul_level = regul_level,
                        regul_poisson = 0.3,
                        regul_b = [regul_b]+[0.]*(dim-1),
                        regul_surface_subdomain_data = regul_surface_subdomain,
                        regul_surface_subdomain_id = regul_surface_subdomain_id,
                        relax_type = "backtracking",
                        tol_dU = 1e-2,
                        n_iter_max = 100,
                        normalize_energies = 1,
                        refinement_levels = refinement_levels,
                        silent = True)

Generating plots#

Helpers#

SNR_lst = [] ### defining SNRs -- Signal-to-Noise ratios--
for noise in noise_level_lst:
    if noise == 0.:
        SNR_lst.append(40.) ### setting the SNR arbitrarily when should be +∞
    else:
        SNR_lst.append(1/noise)
def writing_results_to_pdf(results_all = [], SNR_lst = [], method_lst = [], mesh_size = 0.1, noise_from_images = False, load_type = "body_force", regul = "", refine = True): ### writing results to pdf
    
    ### plotting parameters
    fig, ax = plt.subplots()
    plt.rc("xtick", labelsize = 16)
    plt.rc("ytick", labelsize = 16)
    plt.rc("legend", fontsize = 12)
    plt.ylim([-100, 100])
    color_lst = ['forestgreen', 'royalblue', 'firebrick', 'gold'] #### colors associated to each method

    ### labels
    plt.xlabel("Signal to Noise Ratio (SNR)", fontsize = 12)
    plt.ylabel("Estimation error (%)", fontsize = 12)
    label_lst = ["EGM", "VFM (plane wave as Virtual Field)", "VFM (Virtual Field from [Deng et al.])", "FEMU"] ### labels for legends

    for method in method_lst:
        plt.plot(SNR_lst, results_all[str(method)]["E_average"], color = color_lst[0], label = label_lst[0])
        plt.xlim([3.3, 20.])
        ax.fill_between(SNR_lst, results_all[str(method)]["E_+"], results_all[str(method)]["E_-"], alpha = 0.5, color = color_lst[0])
        plt.gca().set_xscale('log')
        
        color_lst = color_lst[1:]
        label_lst = label_lst[1:]

    ax.set_xticks([])
    ax.set_xticks([], minor = True)
    plt.xticks(SNR_lst, [3.3, 5, 10, '$\infty$'])
    plt.legend(loc = 'upper right')
    plt.grid()
    
    plt.show()

    plt.savefig("./plot_noise_error_different_methods-mesh_size = "+str(mesh_size)+"refine = "+str(refine)+"-noise_from_images = "+str(noise_from_images)+"-load_type = "+str(load_type)+"regul = "+str(regul)+".pdf", bbox_inches = 'tight')
def run_noise_on_disp(method_lst = [], load_type = "body_force", load_params = {}, mesh_size = 0.1, cube_params = {}, refine = True, SNR_lst = [], regul_number = 0.3): ### synthetic data by adding noise to displacements
    results_all = {}
    for method in method_lst: ## for each method
        results_std = {}
        results = {}
        noise_results = []
        E_average, E_plus, E_minus = [], [], []
        E_all = []
        for noise in noise_level_lst: ### for each noise level
            E_results = []
            for i in range(1, 11): ### 10 iterations for each noise level, several iterations are required since we added Gaussian noise 
                noise_results.append(noise)
                try:
                    E = destim.identifying_parameter(method = method, delta = 9*0.6, load_type = load_type, load_params = load_params, mesh_size = mesh_size, cube_params = cube_params, refine = refine, noise_from_images = False, noise = noise, regul_number = regul_number)
                    if "refine" in cube_params.keys() and not refine:
                        cube_params.pop("refine") 
                    E_error = (E-1)*100
                    E_all.append(E_error)
                    E_results.append(E_error)
                except: ##### when adding highly unstructured noise, the computation may not always converge; in this case the results are not taken into account
                    pass
            E_average.append(numpy.average(E_results))
            E_plus.append(numpy.average(E_results) + numpy.std(E_results))
            E_minus.append(numpy.average(E_results) - numpy.std(E_results))
        results_std["noise"] = noise_level_lst
        results_std["E_+"] = E_plus
        results_std["E_-"] = E_minus
        results_std["E_average"] = E_average
        results["noise"] = noise_results
        results["E"] = E_all
        results_all[str(method)] = results_std
    writing_results_to_pdf(mesh_size = mesh_size, SNR_lst = SNR_lst, results_all = results_all, noise_from_images = False, load_type = load_type, method_lst = method_lst, refine = refine)
def run_noise_on_images(method_lst = [], load_type = "body_force", load_params = {}, mesh_size = 0.1, cube_params = {}, refine = True, SNR_lst = [], regul_number = 0.3): ### synthetic data by adding noise to images
    results_all = {}
    for regul in regul_level_lst: ### for each regularization level
        for method in method_lst: ### for each method
            results_std = {}
            results = {}
            noise_results = []
            E_average, E_plus, E_minus = [], [], []
            E_all = []
            for noise in noise_level_lst: ### for each noise level
                E_results = []
                for i in range(1, 11): ### 10 iterations for each noise level, several iterations are required since we added Gaussian noise --10 corresponding to the number of images generated for each noise level, for each regularization number--
                    run = str(i).zfill(2)
                    noise_results.append(noise)
                    try:
                        E = destim.identifying_parameter(method = method, delta = 9*0.6, load_type = load_type, load_params = load_params, mesh_size = mesh_size, cube_params = cube_params, refine = refine, noise_from_images = True, noise = noise, regul = regul, run = run, regul_number = regul_number)
                        E_error = (E-1)*100
                        E_all.append(E_error)
                        E_results.append(E_error)
                        print("E estimated", E)
                    except: ### in case the computation does not converge
                        pass
                E_average.append(numpy.average(E_results))
                E_plus.append(numpy.average(E_results)+numpy.std(E_results))
                E_minus.append(numpy.average(E_results)-numpy.std(E_results))
                results_std["noise"] = noise_level_lst
            results_std["E_+"] = E_plus
            results_std["E_-"] = E_minus
            results_std["E_average"] = E_average
            results["noise"] = noise_results
            results["E"] = E_all
            results_all[str(method)] = results_std
        writing_results_to_pdf(mesh_size = mesh_size, SNR_lst = SNR_lst, results_all = results_all, noise_from_images = True, load_type = load_type, method_lst = method_lst, regul = regul, refine = refine)

Plots#

method_lst = ["EGM", "VFM", "VFM_deng", "FEMU"] ### 4 different estimation methods investigated 

results_all = {}

for mesh_size in mesh_size_lst:
    for refine in refine_mesh:
        print('refine', refine)
        if refine and mesh_size == fine_mesh_size: ### meshes with elements smaller than 0.05 --in this example-- were not investigated in the scope of this article
            pass
        else:
            for load in load_type:
                print("load", load)
                assert (load in ("body_force", "boundary_force" )), "Load should be body_force or boundary_force, aborting..."
                if load == "body_force":
                    load_params = load_params_body
                    regul_number = 0.3    ### sets volume regularization, chosen at the ground-truth value --0.3 mN--
                if load == "boundary_force":
                    load_params = load_params_boundary
                    regul_number = 0.0                    
                for noise_from_images in noise_from_images_lst:
                    print("noise_from_images", noise_from_images)
                    if noise_from_images:
                        run_noise_on_images(method_lst = method_lst, load_type = load, load_params = load_params, mesh_size = mesh_size, cube_params = cube_params, refine = refine, SNR_lst = SNR_lst, regul_number = regul_number)
                    else:
                        run_noise_on_disp(method_lst = method_lst, load_type = load, load_params = load_params, mesh_size = mesh_size, cube_params = cube_params, refine = refine, SNR_lst = SNR_lst, regul_number = regul_number)