diff --git a/.buildinfo b/.buildinfo new file mode 100644 index 0000000..865802a --- /dev/null +++ b/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. +config: 688e180c636424fa7131b68b7068aeaf +tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/.doctrees/api/tomophantom.TomoP2D.doctree b/.doctrees/api/tomophantom.TomoP2D.doctree new file mode 100644 index 0000000..73c7e2b Binary files /dev/null and b/.doctrees/api/tomophantom.TomoP2D.doctree differ diff --git a/.doctrees/api/tomophantom.TomoP3D.doctree b/.doctrees/api/tomophantom.TomoP3D.doctree new file mode 100644 index 0000000..9082b62 Binary files /dev/null and b/.doctrees/api/tomophantom.TomoP3D.doctree differ diff --git a/.doctrees/api/tomophantom.artefacts.doctree b/.doctrees/api/tomophantom.artefacts.doctree new file mode 100644 index 0000000..671e015 Binary files /dev/null and b/.doctrees/api/tomophantom.artefacts.doctree differ diff --git a/.doctrees/api/tomophantom.ctypes.doctree b/.doctrees/api/tomophantom.ctypes.doctree new file mode 100644 index 0000000..e256e30 Binary files /dev/null and b/.doctrees/api/tomophantom.ctypes.doctree differ diff --git a/.doctrees/api/tomophantom.ctypes.dtype.doctree b/.doctrees/api/tomophantom.ctypes.dtype.doctree new file mode 100644 index 0000000..8814eab Binary files /dev/null and b/.doctrees/api/tomophantom.ctypes.dtype.doctree differ diff --git a/.doctrees/api/tomophantom.ctypes.external.doctree b/.doctrees/api/tomophantom.ctypes.external.doctree new file mode 100644 index 0000000..75d6113 Binary files /dev/null and b/.doctrees/api/tomophantom.ctypes.external.doctree differ diff --git a/.doctrees/api/tomophantom.doctree b/.doctrees/api/tomophantom.doctree new file mode 100644 index 0000000..fd17622 Binary files /dev/null and b/.doctrees/api/tomophantom.doctree differ diff --git a/.doctrees/api/tomophantom.flatsgen.doctree b/.doctrees/api/tomophantom.flatsgen.doctree new file mode 100644 index 0000000..5b11299 Binary files /dev/null and b/.doctrees/api/tomophantom.flatsgen.doctree differ diff --git a/.doctrees/api/tomophantom.generator.doctree b/.doctrees/api/tomophantom.generator.doctree new file mode 100644 index 0000000..c50cf00 Binary files /dev/null and b/.doctrees/api/tomophantom.generator.doctree differ diff --git a/.doctrees/api/tomophantom.qualitymetrics.doctree b/.doctrees/api/tomophantom.qualitymetrics.doctree new file mode 100644 index 0000000..a3d12a8 Binary files /dev/null and b/.doctrees/api/tomophantom.qualitymetrics.doctree differ diff --git a/.doctrees/api/tomophantom.supp.doctree b/.doctrees/api/tomophantom.supp.doctree new file mode 100644 index 0000000..b1ec76e Binary files /dev/null and b/.doctrees/api/tomophantom.supp.doctree differ diff --git a/.doctrees/api/tomophantom.supp.libraryToDict.doctree b/.doctrees/api/tomophantom.supp.libraryToDict.doctree new file mode 100644 index 0000000..67a4ff7 Binary files /dev/null and b/.doctrees/api/tomophantom.supp.libraryToDict.doctree differ diff --git a/.doctrees/api/tomophantom.supp.speckle_routines.doctree b/.doctrees/api/tomophantom.supp.speckle_routines.doctree new file mode 100644 index 0000000..f0a8723 Binary files /dev/null and b/.doctrees/api/tomophantom.supp.speckle_routines.doctree differ diff --git a/.doctrees/environment.pickle b/.doctrees/environment.pickle new file mode 100644 index 0000000..da746b5 Binary files /dev/null and b/.doctrees/environment.pickle differ diff --git a/.doctrees/howto/installation.doctree b/.doctrees/howto/installation.doctree new file mode 100644 index 0000000..de28a5c Binary files /dev/null and b/.doctrees/howto/installation.doctree differ diff --git a/.doctrees/index.doctree b/.doctrees/index.doctree new file mode 100644 index 0000000..68d7441 Binary files /dev/null and b/.doctrees/index.doctree differ diff --git a/.doctrees/introduction/about.doctree b/.doctrees/introduction/about.doctree new file mode 100644 index 0000000..3727b7d Binary files /dev/null and b/.doctrees/introduction/about.doctree differ diff --git a/.doctrees/introduction/libraries.doctree b/.doctrees/introduction/libraries.doctree new file mode 100644 index 0000000..302c467 Binary files /dev/null and b/.doctrees/introduction/libraries.doctree differ diff --git a/.doctrees/reference/api.doctree b/.doctrees/reference/api.doctree new file mode 100644 index 0000000..d0b4067 Binary files /dev/null and b/.doctrees/reference/api.doctree differ diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 0000000..e69de29 diff --git a/_images/2DtModel14.gif b/_images/2DtModel14.gif new file mode 100644 index 0000000..00436f4 Binary files /dev/null and b/_images/2DtModel14.gif differ diff --git a/_images/model05.jpg b/_images/model05.jpg new file mode 100644 index 0000000..013243d Binary files /dev/null and b/_images/model05.jpg differ diff --git a/_images/model11_4D.gif b/_images/model11_4D.gif new file mode 100644 index 0000000..0201e87 Binary files /dev/null and b/_images/model11_4D.gif differ diff --git a/_images/tomo3d.jpg b/_images/tomo3d.jpg new file mode 100644 index 0000000..e9813e4 Binary files /dev/null and b/_images/tomo3d.jpg differ diff --git a/_images/tomophantom_apps.png b/_images/tomophantom_apps.png new file mode 100644 index 0000000..f35d6d2 Binary files /dev/null and b/_images/tomophantom_apps.png differ diff --git a/_modules/index.html b/_modules/index.html new file mode 100644 index 0000000..009b9c4 --- /dev/null +++ b/_modules/index.html @@ -0,0 +1,329 @@ + + + + + + + +
+ + +
+"""
+Copyright 2023
+The University of Manchester & Diamond Light Source
+Licensed under the Apache License, Version 2.0.
+
+
+These are modules for generation of 2D and 3D (dynamic extensions)
+phantoms and their analytical data.
+
+API Summary:
+
+* Model - generates a 2D phantom from the Library (Phantom2DLibrary)
+* ModelTemporal - generates a (2D+t) 3D model from the Library (Phantom2DLibrary)
+* ModelSino - generates 2D sinogram for a model from the Library (Phantom2DLibrary)
+* ModelSinoTemporal - generates (2D+t) sinogram for a temporal model from the Library (Phantom2DLibrary)
+* Object - generates a 2D phantom from a given object described in the dictionary.
+* ObjectSino - generates a 2D sinogram from a given object described in the dictionary.
+* SinoNum - calculates a sinogram numerically from any given 2D array.
+"""
+
+import ctypes
+import numpy as np
+from numbers import Number
+from enum import Enum
+from typing import Union
+from pathlib import Path
+
+import tomophantom.ctypes.external as external
+
+__all__ = [
+ "Model",
+ "ModelTemporal",
+ "ModelSino",
+ "ModelSinoTemporal",
+ "Object",
+ "ObjectSino",
+ "SinoNum",
+]
+
+
+class Objects2D(Enum):
+ """Enumeration with the available objects for 2D phantoms"""
+
+ GAUSSIAN = "gaussian"
+ PARABOLA = "parabola"
+ PARABOLA1 = "parabola1"
+ ELLIPSE = "ellipse"
+ CONE = "cone"
+ RECTANGLE = "rectangle"
+
+
+def _check_params2d(model_no: int, models_library_path: Path) -> np.ndarray:
+ """Check parameters before executing the generation script.
+
+ Args:
+ model_no (int): Model number from the Phantom2DLibrary.dat library file.
+ models_library_path (Path): A path to the library file.
+
+ Returns:
+ list: a list of integers
+ """
+ params = np.ascontiguousarray(np.zeros([10], dtype=ctypes.c_int))
+ external.c_checkParams2D(
+ params,
+ model_no,
+ models_library_path,
+ )
+ return params
+
+
+[docs]def Model(model_no: int, phantom_size: int, models_library_path: Path) -> np.ndarray:
+ """Generate 2D phantoms based on the model no. in
+ the library file Phantom2DLibrary.dat.
+
+ Args:
+ model_no (int): Model number from the Phantom2DLibrary.dat library file.
+ phantom_size (int): A size of the generated phantom (squared).
+ models_library_path (Path): A path to the library file.
+
+ Returns:
+ np.ndarray: The generated 2D phantom (N x N).
+ """
+ # check the validity of model's parameters
+ params = np.ascontiguousarray(np.zeros([10], dtype=ctypes.c_int))
+ params = _check_params2d(model_no, models_library_path)
+ __testParams2D(params) # check parameters and terminate if incorrect
+
+ if params[3] == 1:
+ # execute the model building function
+ phantom = np.zeros([phantom_size, phantom_size], dtype="float32", order="C")
+ else:
+ raise ValueError(
+ "The selected model is temporal (2D+time), use 'ModelTemporal' function instead"
+ )
+ external.c_model2d(
+ np.ascontiguousarray(phantom),
+ model_no,
+ phantom_size,
+ models_library_path,
+ )
+ return phantom
+
+
+[docs]def ModelTemporal(
+ model_no: int, phantom_size: int, models_library_path: Path
+) -> np.ndarray:
+ """Generate 2D+time temporal phantoms based on the model no. in
+ the library file Phantom2DLibrary.dat. Note that temporal phantom
+ numbers begin at 100 onwards.
+
+ Args:
+ model_no (int): Temporal model number from the Phantom2DLibrary.dat library file.
+ phantom_size (int): A size of the generated phantom (squared).
+ models_library_path (Path): A path to the library file.
+
+ Returns:
+ np.ndarray: The generated 2D+time phantom (time_frames x N x N).
+ """
+ # check the validity of model's parameters
+ params = np.ascontiguousarray(np.zeros([10], dtype=ctypes.c_int))
+ params = _check_params2d(model_no, models_library_path)
+ __testParams2D(params) # check parameters and terminate if incorrect
+
+ if params[3] == 1:
+ raise ValueError(
+ "The selected model is stationary (2D), use 'Model' function instead"
+ )
+ else:
+ # execute the model building function
+ phantom = np.zeros(
+ [params[3], phantom_size, phantom_size], dtype="float32", order="C"
+ )
+ external.c_model2d(
+ np.ascontiguousarray(phantom),
+ model_no,
+ phantom_size,
+ models_library_path,
+ )
+ return phantom
+
+
+[docs]def ModelSino(
+ model_no: int,
+ phantom_size: int,
+ detector_size: int,
+ angles: np.ndarray,
+ models_library_path: Path,
+) -> np.ndarray:
+ """Generate 2D analytical sinogram for corresponding models in
+ the library file Phantom2DLibrary.dat.
+
+ Args:
+ model_no (int): Model number from the Phantom2DLibrary.dat library file.
+ phantom_size (int): A size of the phantom (squared).
+ detector_size (int): A size of the horizontal detector.
+ angles (np.ndarray): Angles vector in degrees.
+ models_library_path (Path): A path to the library file.
+
+ Returns:
+ np.ndarray: The generated 2D analytical sinogram (angles_total x detector_size).
+ """
+ # check the validity of model's parameters
+ params = np.ascontiguousarray(np.zeros([10], dtype=ctypes.c_int))
+ params = _check_params2d(model_no, models_library_path)
+ __testParams2D(params) # check parameters and terminate if incorrect
+
+ angles_total = len(angles)
+ if params[3] == 1:
+ # execute the model building function
+ sino2d = np.zeros([detector_size, angles_total], dtype="float32", order="C")
+ else:
+ raise ValueError(
+ "The selected model is temporal (2D+time), use 'ModelSinoTemporal' function instead"
+ )
+ external.c_model_sino2d(
+ np.ascontiguousarray(sino2d),
+ model_no,
+ phantom_size,
+ detector_size,
+ np.ascontiguousarray(angles),
+ angles_total,
+ models_library_path,
+ )
+ return sino2d.transpose()
+
+
+[docs]def ModelSinoTemporal(
+ model_no: int,
+ phantom_size: int,
+ detector_size: int,
+ angles: np.ndarray,
+ models_library_path: Path,
+) -> np.ndarray:
+ """Generate 2D+time (temporal )analytical sinogram for
+ corresponding models in the library file Phantom2DLibrary.dat.
+
+ Args:
+ model_no (int): Temporal model number from the Phantom2DLibrary.dat library file.
+ phantom_size (int): A size of the phantom (squared).
+ detector_size (int): A size of the horizontal detector.
+ angles (np.ndarray): Angles vector in degrees.
+ models_library_path (Path): A path to the library file.
+
+ Returns:
+ np.ndarray: The generated 2D+time analytical sinogram (time_frames x detector_size x angles_total).
+ """
+ # check the validity of model's parameters
+ params = np.ascontiguousarray(np.zeros([10], dtype=ctypes.c_int))
+ params = _check_params2d(model_no, models_library_path)
+ __testParams2D(params) # check parameters and terminate if incorrect
+
+ angles_total = len(angles)
+ if params[3] == 1:
+ raise ValueError(
+ "The selected model is stationary (2D), use 'ModelSino' function instead"
+ )
+ else:
+ # execute the model building function
+ sino2d_t = np.zeros(
+ [params[3], detector_size, angles_total], dtype="float32", order="C"
+ )
+ external.c_model_sino2d(
+ np.ascontiguousarray(sino2d_t),
+ model_no,
+ phantom_size,
+ detector_size,
+ np.ascontiguousarray(angles),
+ angles_total,
+ models_library_path,
+ )
+ return sino2d_t
+
+
+[docs]def Object(phantom_size: int, obj_params: Union[list, dict]) -> np.ndarray:
+ """Generate a 2D analytical phantom for the standalone
+ geometrical object that is parametrised in the "obj_params" dictionary.
+
+ Args:
+ phantom_size (int): A size of the phantom (squared).
+ obj_params (list of dict or a dict): A dictionary with parameters of an object, see Demos.
+
+ Returns:
+ np.ndarray: The generated 2D analytical object.
+ """
+ if type(obj_params) is dict:
+ obj_params = [obj_params]
+
+ object2d = np.zeros([phantom_size, phantom_size], dtype="float32", order="C")
+ # unpacking obj_params dictionary
+ for obj in obj_params:
+ if __testParams(obj):
+ objectName = obj["Obj"].value
+ C0 = obj["C0"]
+ x0 = obj["x0"]
+ y0 = obj["y0"]
+ a = obj["a"]
+ b = obj["b"]
+ phi = obj["phi"]
+ external.c_object2d(
+ np.ascontiguousarray(object2d),
+ phantom_size,
+ objectName,
+ C0,
+ x0,
+ y0,
+ a,
+ b,
+ phi_ang=phi,
+ tt=0,
+ )
+ return object2d
+
+
+[docs]def ObjectSino(
+ phantom_size: int,
+ detector_size: int,
+ angles: np.ndarray,
+ obj_params: Union[list, dict],
+) -> np.ndarray:
+ """Generate a 2D analytical sinogram for the standalone
+ geometrical object that is parametrised in the "obj_params" dictionary.
+
+ Args:
+ phantom_size (int): A size of the phantom (squared).
+ detector_size (int): A size of the horizontal detector.
+ angles (np.ndarray): Angles vector in degrees.
+ obj_params (list of dicts or a dict): A dictionary with parameters of an object, see Demos.
+
+ Returns:
+ np.ndarray: The generated 2D analytical sinogram of an object.
+ """
+ angles_total = len(angles)
+ sino2d = np.zeros([detector_size, angles_total], dtype="float32", order="C")
+ # unpacking obj_params dictionary
+ for obj in obj_params:
+ if __testParams(obj):
+ objectName = obj["Obj"].value
+ C0 = obj["C0"]
+ x0 = obj["x0"]
+ y0 = obj["y0"]
+ a = obj["a"]
+ b = obj["b"]
+ phi = obj["phi"]
+ external.c_object_sino2d(
+ np.ascontiguousarray(sino2d),
+ phantom_size,
+ detector_size,
+ np.ascontiguousarray(angles),
+ angles_total,
+ objectName,
+ C0,
+ x0,
+ y0,
+ a,
+ b,
+ phi_ang=-phi,
+ tt=0,
+ )
+ return sino2d.transpose()
+
+
+[docs]def SinoNum(input: np.ndarray, detector_X: int, angles: np.ndarray) -> np.ndarray:
+ """Numerical calculation of 2D sinogram from the 2D input.
+
+ Args:
+ input (np.ndarray): 2D object (e.g. a phantom).
+ detector_X (int): The size of the detector X.
+ angles (np.ndarray): Angles vector in degrees.
+
+ Raises:
+ ValueError: if input is 3D
+
+ Returns:
+ np.ndarray: Numerical sinogram calculated from the object
+ """
+ if np.ndim(input) == 3:
+ raise ValueError("The accepted inputs must be 2D")
+ phantom_size = input.shape[0]
+ angles_total = len(angles)
+
+ sinogram = np.zeros([detector_X, angles_total], dtype="float32", order="C")
+
+ external.c_numerical_sino2d(
+ np.ascontiguousarray(sinogram),
+ np.ascontiguousarray(input),
+ phantom_size,
+ detector_X,
+ np.ascontiguousarray(angles),
+ angles_total,
+ )
+ return sinogram.transpose()
+
+
+def __testParams(obj):
+ """Performs a simple type check of the input parameters and a range check"""
+ if not type(obj) is dict:
+ raise TypeError("obj is not a dict {0}".format(type(obj)))
+
+ # type check
+ for k, v in obj.items():
+ if not isinstance(v, Number):
+ if not k == "Obj":
+ raise TypeError(k, "is not a Number")
+ typecheck = True
+ # range check
+ rangecheck = obj["x0"] >= -1 and obj["x0"] <= 1
+ if not rangecheck:
+ raise ValueError("x0 is out of range. Must be between -1 and 1")
+ rangecheck = rangecheck and obj["y0"] >= -1 and obj["y0"] <= 1
+ if not rangecheck:
+ raise ValueError("y0 is out of range. Must be between -1 and 1")
+ rangecheck = rangecheck and obj["a"] > 0 and obj["a"] <= 2
+ if not rangecheck:
+ raise ValueError("a (object size) must be positive in [0,2] range")
+ rangecheck = rangecheck and obj["b"] > 0 and obj["b"] <= 2
+ if not rangecheck:
+ raise ValueError("b (object size) must be positive in [0,2] range")
+ return rangecheck and typecheck
+
+
+def __testParams2D(obj):
+ if obj[0] == 0:
+ raise TypeError(
+ "Check if the library file <Phantom2DLibrary.dat> exists and the given path is correct"
+ )
+ if obj[1] == 0:
+ raise TypeError(
+ "The given model is not found, check available models in <Phantom2DLibrary.dat> file"
+ )
+ if obj[2] == 0:
+ raise TypeError(
+ "Components number cannot be negative, check <Phantom2DLibrary.dat> file"
+ )
+ if obj[3] == 0:
+ raise TypeError(
+ "TimeSteps cannot be negative, check <Phantom2DLibrary.dat> file"
+ )
+ if obj[4] == 0:
+ raise TypeError("Unknown name of the object, check <Phantom2DLibrary.dat> file")
+ if obj[5] == 0:
+ raise TypeError(
+ "C0 should not be equal to zero, check <Phantom2DLibrary.dat> file"
+ )
+ if obj[6] == 0:
+ raise TypeError(
+ "x0 (object position) must be in [-1,1] range, check <Phantom2DLibrary.dat> file"
+ )
+ if obj[7] == 0:
+ raise TypeError(
+ "y0 (object position) must be in [-1,1] range, check <Phantom2DLibrary.dat> file"
+ )
+ if obj[8] == 0:
+ raise TypeError(
+ "a (object size) must be positive in [0,2] range, check <Phantom2DLibrary.dat> file"
+ )
+ if obj[9] == 0:
+ raise TypeError(
+ "b (object size) must be positive in [0,2] range, check <Phantom2DLibrary.dat> file"
+ )
+ return 0
+
+"""
+Copyright 2023
+The University of Manchester & Diamond Light Source
+Licensed under the Apache License, Version 2.0.
+
+
+These are modules for generation of 3D and 4D (dynamic extensions) phantoms and
+their analytical data.
+
+API Summary:
+
+* Model - generates a 3D phantom from the Library (Phantom3DLibrary)
+* ModelSub - generates a vertical subset (cutoff) for model from the Library (Phantom3DLibrary)
+* ModelTemporal - generates a (3D + t) 4D model from the Library (Phantom3DLibrary)
+* ModelTemporalSub - generates a vertical subset (cutoff) of 4D model from the Library (Phantom3DLibrary)
+* ModelSino - generates 3D projection data for a model from the Library (Phantom3DLibrary)
+* ModelSinoSub - generates a vertical subset (cutoff) for 3D projection data for a model from the Library (Phantom3DLibrary)
+* ModelSinoTemporal - generates (3D + t) 4D projection data for a temporal model from the Library (Phantom3DLibrary)
+* ModelSinoTemporalSub - generates (3D + t) 4D projection data (subset) for a temporal model from the Library (Phantom3DLibrary)
+* Object - generates a 3D phantom from a given object described in the dictionary.
+* ObjectSino - generates a 3D projection data from a given object described in the dictionary.
+"""
+
+import ctypes
+import numpy as np
+from numbers import Number
+from enum import Enum
+from typing import Union, Tuple
+from pathlib import Path
+
+import tomophantom.ctypes.external as external
+
+__all__ = [
+ "Model",
+ "ModelSub",
+ "ModelTemporal",
+ "ModelTemporalSub",
+ "ModelSino",
+ "ModelSinoSub",
+ "ModelSinoTemporal",
+ "ModelSinoTemporalSub",
+ "Object",
+ "ObjectSino",
+]
+
+
+class Objects3D(Enum):
+ """Enumeration with the available objects for 3D phantoms"""
+
+ GAUSSIAN = "gaussian"
+ PARABOLOID = "paraboloid"
+ ELLIPSOID = "ellipsoid"
+ CONE = "cone"
+ CUBOID = "cuboid"
+ ELLIPCYLINDER = "elliptical_cylinder"
+
+
+def _check_params3d(model_no: int, models_library_path: Path) -> np.ndarray:
+ """Check parameters before executing the generation script.
+
+ Args:
+ model_no (int): Model number from the Phantom3DLibrary.dat library file.
+ models_library_path (Path): A path to the library file.
+
+ Returns:
+ list: a list of integers
+ """
+ params = np.ascontiguousarray(np.zeros([12], dtype=ctypes.c_int))
+ external.c_checkParams3D(
+ params,
+ model_no,
+ models_library_path,
+ )
+ return params
+
+
+[docs]def Model(
+ model_no: int,
+ phantom_size: Union[int, Tuple[int, int, int]],
+ models_library_path: Path,
+) -> np.ndarray:
+ """Generates a 3D phantom based on the model no. in the library file Phantom3DLibrary.dat.
+
+ Args:
+ model_no (int): Model number from the Phantom3DLibrary.dat library file.
+ phantom_size (int, Tuple(int)): A scalar or a tuple with phantom dimensions.
+ models_library_path (Path): A path to the library file.
+
+ Returns:
+ np.ndarray: The generated 3D phantom.
+ """
+ if type(phantom_size) == tuple:
+ N1, N2, N3 = [int(i) for i in phantom_size]
+ else:
+ N1 = N2 = N3 = phantom_size
+
+ # check the validity of model's parameters
+ params = np.ascontiguousarray(np.zeros([12], dtype=ctypes.c_int))
+ params = _check_params3d(model_no, models_library_path)
+ __testParams3D(params) # check parameters and terminate if incorrect
+
+ if params[3] == 1:
+ phantom = np.zeros([N1, N2, N3], dtype="float32", order="C")
+ else:
+ raise ValueError(
+ "The selected model is temporal (3D+time), use 'ModelTemporal' function instead"
+ )
+ external.c_model3d(
+ np.ascontiguousarray(phantom),
+ model_no,
+ N1,
+ N2,
+ N3,
+ 0,
+ N1,
+ models_library_path,
+ )
+ return phantom
+
+
+[docs]def ModelSub(
+ model_no: int,
+ phantom_size: Union[int, Tuple[int, int, int]],
+ sub_index: Tuple[int, int],
+ models_library_path: Path,
+) -> np.ndarray:
+ """Generates a subset of 3D phantom (vertical cutoff) based on the model no. in
+ the library file Phantom3DLibrary.dat.
+
+ Args:
+ model_no (int): Model number from the Phantom3DLibrary.dat library file.
+ phantom_size (int, Tuple(int)): A scalar or a tuple with phantom dimensions.
+ sub_index (Tuple[int, int]): a tuple containing 2 indeces (lower, upper) to select a vertical subset.
+ models_library_path (Path): A path to the library file.
+
+ Returns:
+ np.ndarray: The generated 3D phantom.
+ """
+ if type(phantom_size) == tuple:
+ N1, N2, N3 = [int(i) for i in phantom_size]
+ else:
+ N1 = N2 = N3 = phantom_size
+
+ Z1, Z2 = [int(i) for i in sub_index]
+
+ rangecheck = Z2 > Z1
+ if not rangecheck:
+ raise ValueError("Upper index must be larger than the lower one")
+ rangecheck = Z1 >= 0 and Z1 < N3
+ if not rangecheck:
+ raise ValueError("Range of the lower index is incorrect")
+ rangecheck = Z2 >= 0 and Z2 <= N3
+ if not rangecheck:
+ raise ValueError("Range of the higher index is incorrect")
+
+ sub_size = Z2 - Z1 # the size of the vertical slab
+
+ # check the validity of model's parameters
+ params = np.ascontiguousarray(np.zeros([12], dtype=ctypes.c_int))
+ params = _check_params3d(model_no, models_library_path)
+ __testParams3D(params) # check parameters and terminate if incorrect
+
+ if params[3] == 1:
+ phantom = np.zeros([sub_size, N2, N3], dtype="float32", order="C")
+ else:
+ raise ValueError(
+ "The selected model is temporal (3D+time), use 'ModelTemporal' function instead"
+ )
+ external.c_model3d(
+ np.ascontiguousarray(phantom),
+ model_no,
+ N1,
+ N2,
+ N3,
+ Z1,
+ Z2,
+ models_library_path,
+ )
+ return phantom
+
+
+[docs]def ModelTemporal(
+ model_no: int,
+ phantom_size: Union[int, Tuple[int, int, int]],
+ models_library_path: Path,
+) -> np.ndarray:
+ """Generates 4D (3D+time) phantom based on the model no. in
+ the library file Phantom3DLibrary.dat.
+
+ Args:
+ model_no (int): Model number from the Phantom3DLibrary.dat library file.
+ phantom_size (int, Tuple(int)): A scalar or a tuple with phantom dimensions.
+ models_library_path (Path): A path to the library file.
+
+ Returns:
+ np.ndarray: The generated 4D phantom.
+ """
+ if type(phantom_size) == tuple:
+ N1, N2, N3 = [int(i) for i in phantom_size]
+ else:
+ N1 = N2 = N3 = phantom_size
+
+ # check the validity of model's parameters
+ params = np.ascontiguousarray(np.zeros([12], dtype=ctypes.c_int))
+ params = _check_params3d(model_no, models_library_path)
+ __testParams3D(params) # check parameters and terminate if incorrect
+
+ if params[3] == 1:
+ raise ValueError(
+ "The selected model is stationary (3D), use 'Model' function instead"
+ )
+ else:
+ phantom = np.zeros([params[3], N1, N2, N3], dtype="float32", order="C")
+ external.c_model3d(
+ np.ascontiguousarray(phantom),
+ model_no,
+ N1,
+ N2,
+ N3,
+ 0,
+ N1,
+ models_library_path,
+ )
+ return phantom
+
+
+[docs]def ModelTemporalSub(
+ model_no: int,
+ phantom_size: Union[int, Tuple[int, int, int]],
+ sub_index: Tuple[int, int],
+ models_library_path: Path,
+) -> np.ndarray:
+ """Generates a subset of 4D phantom (vertical cutoff) based on the model no. in
+ the library file Phantom3DLibrary.dat.
+
+ Args:
+ model_no (int): Model number from the Phantom3DLibrary.dat library file.
+ phantom_size (int, Tuple(int)): A scalar or a tuple with phantom dimensions.
+ sub_index (Tuple[int, int]): a tuple containing 2 indeces (lower, upper) to select a vertical subset.
+ models_library_path (Path): A path to the library file.
+
+ Returns:
+ np.ndarray: The generated 4D phantom.
+ """
+ if type(phantom_size) == tuple:
+ N1, N2, N3 = [int(i) for i in phantom_size]
+ else:
+ N1 = N2 = N3 = phantom_size
+
+ Z1, Z2 = [int(i) for i in sub_index]
+
+ rangecheck = Z2 > Z1
+ if not rangecheck:
+ raise ValueError("Upper index must be larger than the lower one")
+ rangecheck = Z1 >= 0 and Z1 < N3
+ if not rangecheck:
+ raise ValueError("Range of the lower index is incorrect")
+ rangecheck = Z2 >= 0 and Z2 <= N3
+ if not rangecheck:
+ raise ValueError("Range of the higher index is incorrect")
+
+ sub_size = Z2 - Z1 # the size of the vertical slab
+
+ # check the validity of model's parameters
+ params = np.ascontiguousarray(np.zeros([12], dtype=ctypes.c_int))
+ params = _check_params3d(model_no, models_library_path)
+ __testParams3D(params) # check parameters and terminate if incorrect
+
+ if params[3] == 1:
+ raise ValueError(
+ "The selected model is temporal (3D+time), use 'ModelTemporal' function instead"
+ )
+ else:
+ phantom = np.zeros([params[3], sub_size, N2, N3], dtype="float32", order="C")
+ external.c_model3d(
+ np.ascontiguousarray(phantom),
+ model_no,
+ N1,
+ N2,
+ N3,
+ Z1,
+ Z2,
+ models_library_path,
+ )
+ return phantom
+
+
+[docs]def ModelSino(
+ model_no: int,
+ phantom_size: int,
+ detector_horiz: int,
+ detector_vert: int,
+ angles: np.ndarray,
+ models_library_path: Path,
+) -> np.ndarray:
+ """Generates a 3D analytical sinogram for corresponding models in
+ the library file Phantom3DLibrary.dat.
+
+ Args:
+ model_no (int): Model number from the Phantom3DLibrary.dat library file.
+ phantom_size (int): A scalar for squared phantom dimensions (2D slice).
+ detector_horiz (int): Size of the horizontal detector in pixels.
+ detector_vert (int): Size of the vertical detector in pixels.
+ angles (np.ndarray): Angles vector in degrees.
+ models_library_path (Path): A path to the library file.
+
+ Returns:
+ np.ndarray: The generated 3D projection data of the following shape [detector_vert, AngTot, detector_horiz].
+ """
+ if type(phantom_size) == tuple:
+ raise ValueError(
+ "Please give a scalar for the phantom size (2d slice), if one needs non-cubic data generator please see ModelSinoSub function"
+ )
+
+ # check the validity of model's parameters
+ params = np.ascontiguousarray(np.zeros([12], dtype=ctypes.c_int))
+ params = _check_params3d(model_no, models_library_path)
+ __testParams3D(params) # check parameters and terminate if incorrect
+
+ angles_total = len(angles)
+ if params[3] == 1:
+ # execute the model building function
+ sino3d = np.zeros(
+ [angles_total, detector_vert, detector_horiz], dtype="float32", order="C"
+ )
+ else:
+ raise ValueError(
+ "The selected model is temporal (4D), use 'ModelTemporalSino' function instead"
+ )
+ external.c_model_sino3d(
+ np.ascontiguousarray(sino3d),
+ model_no,
+ detector_horiz,
+ detector_vert,
+ 0,
+ detector_vert,
+ phantom_size,
+ np.ascontiguousarray(angles),
+ angles_total,
+ models_library_path,
+ )
+ return np.swapaxes(sino3d, 0, 1)
+
+
+[docs]def ModelSinoSub(
+ model_no: int,
+ phantom_size: int,
+ detector_horiz: int,
+ detector_vert: int,
+ sub_index: tuple,
+ angles: np.ndarray,
+ models_library_path: Path,
+) -> np.ndarray:
+ """Generates a 3D analytical sinogram with vertical cutoff using sub_index tuple for corresponding models in
+ the library file Phantom3DLibrary.dat.
+
+ Args:
+ model_no (int): Model number from the Phantom3DLibrary.dat library file.
+ phantom_size (int): A scalar for squared phantom dimensions (2D slice).
+ detector_horiz (int): Size of the horizontal detector in pixels.
+ detector_vert (int): Size of the vertical detector in pixels.
+ sub_index (Tuple[int, int]): a tuple containing 2 indeces (lower, upper) to select a vertical subset.
+ angles (np.ndarray): Angles vector in degrees.
+ models_library_path (Path): A path to the library file.
+
+ Returns:
+ np.ndarray: The generated 3D projection data of the following shape [detector_vert, AngTot, detector_horiz].
+ """
+ if type(phantom_size) == tuple:
+ raise ValueError(
+ "Please give a scalar for phantom size, projection data cannot be obtained for non-cubic phantom"
+ )
+
+ Z1, Z2 = [int(i) for i in sub_index]
+
+ rangecheck = Z2 > Z1
+ if not rangecheck:
+ raise ValueError("Upper index must be larger than the lower one")
+ rangecheck = Z1 >= 0 and Z1 < detector_vert
+ if not rangecheck:
+ raise ValueError("Range of the lower index is incorrect")
+ rangecheck = Z2 >= 0 and Z2 <= detector_vert
+ if not rangecheck:
+ raise ValueError("Range of the higher index is incorrect")
+
+ sub_size = Z2 - Z1 # the size of the vertical slab
+
+ # check the validity of model's parameters
+ params = np.ascontiguousarray(np.zeros([12], dtype=ctypes.c_int))
+ params = _check_params3d(model_no, models_library_path)
+ __testParams3D(params) # check parameters and terminate if incorrect
+
+ angles_total = len(angles)
+ if params[3] == 1:
+ # execute the model building function
+ sino3d = np.zeros(
+ [angles_total, sub_size, detector_horiz], dtype="float32", order="C"
+ )
+ else:
+ raise ValueError(
+ "The selected model is temporal (4D), use 'ModelTemporalSino' function instead"
+ )
+ external.c_model_sino3d(
+ np.ascontiguousarray(sino3d),
+ model_no,
+ detector_horiz,
+ detector_vert,
+ Z1,
+ Z2,
+ phantom_size,
+ np.ascontiguousarray(angles),
+ angles_total,
+ models_library_path,
+ )
+ return np.swapaxes(sino3d, 0, 1)
+
+
+[docs]def ModelSinoTemporal(
+ model_no: int,
+ phantom_size: int,
+ detector_horiz: int,
+ detector_vert: int,
+ angles: np.ndarray,
+ models_library_path: Path,
+) -> np.ndarray:
+ """Generates a 4D (3D+time) analytical sinogram for corresponding models in
+ the library file Phantom3DLibrary.dat.
+
+ Args:
+ model_no (int): Temporal model number from the Phantom3DLibrary.dat library file.
+ phantom_size (int): A scalar for squared phantom dimensions (2D slice).
+ detector_horiz (int): Size of the horizontal detector in pixels.
+ detector_vert (int): Size of the vertical detector in pixels.
+ angles (np.ndarray): Angles vector in degrees.
+ models_library_path (Path): A path to the library file.
+
+ Returns:
+ np.ndarray: The generated 4D projection data of the following shape [time_frames, detector_vert, AngTot, detector_horiz].
+ """
+
+ if type(phantom_size) == tuple:
+ raise ValueError(
+ "Please give a scalar for phantom size, projection data cannot be obtained for non-cubic phantom"
+ )
+
+ # check the validity of model's parameters
+ params = np.ascontiguousarray(np.zeros([12], dtype=ctypes.c_int))
+ params = _check_params3d(model_no, models_library_path)
+ __testParams3D(params) # check parameters and terminate if incorrect
+
+ angles_total = len(angles)
+ if params[3] == 1:
+ raise ValueError(
+ "The selected model is stationary (3D), use 'ModelSino' function instead"
+ )
+ else:
+ sino4d = np.zeros(
+ [params[3], angles_total, detector_vert, detector_horiz],
+ dtype="float32",
+ order="C",
+ )
+
+ external.c_model_sino3d(
+ np.ascontiguousarray(sino4d),
+ model_no,
+ detector_horiz,
+ detector_vert,
+ 0,
+ detector_vert,
+ phantom_size,
+ np.ascontiguousarray(angles),
+ angles_total,
+ models_library_path,
+ )
+ return np.swapaxes(sino4d, 1, 2)
+
+
+[docs]def ModelSinoTemporalSub(
+ model_no: int,
+ phantom_size: int,
+ detector_horiz: int,
+ detector_vert: int,
+ sub_index: tuple,
+ angles: np.ndarray,
+ models_library_path: Path,
+) -> np.ndarray:
+ """Generates a 4D analytical sinogram with vertical cutoff using sub_index tuple for corresponding models in
+ the library file Phantom3DLibrary.dat.
+
+ Args:
+ model_no (int): Temporal model number from the Phantom3DLibrary.dat library file.
+ phantom_size (int): A scalar for squared phantom dimensions (2D slice).
+ detector_horiz (int): Size of the horizontal detector in pixels.
+ detector_vert (int): Size of the vertical detector in pixels.
+ sub_index (Tuple[int, int]): a tuple containing 2 indeces (lower, upper) to select a vertical subset.
+ angles (np.ndarray): Angles vector in degrees.
+ models_library_path (Path): A path to the library file.
+
+ Returns:
+ np.ndarray: The generated 3D projection data of the following shape [time, detector_vert, AngTot, detector_horiz].
+ """
+ if type(phantom_size) == tuple:
+ raise ValueError(
+ "Please give a scalar for phantom size, projection data cannot be obtained for non-cubic phantom"
+ )
+
+ Z1, Z2 = [int(i) for i in sub_index]
+
+ rangecheck = Z2 > Z1
+ if not rangecheck:
+ raise ValueError("Upper index must be larger than the lower one")
+ rangecheck = Z1 >= 0 and Z1 < detector_vert
+ if not rangecheck:
+ raise ValueError("Range of the lower index is incorrect")
+ rangecheck = Z2 >= 0 and Z2 <= detector_vert
+ if not rangecheck:
+ raise ValueError("Range of the higher index is incorrect")
+
+ sub_size = Z2 - Z1 # the size of the vertical slab
+
+ # check the validity of model's parameters
+ params = np.ascontiguousarray(np.zeros([12], dtype=ctypes.c_int))
+ params = _check_params3d(model_no, models_library_path)
+ __testParams3D(params) # check parameters and terminate if incorrect
+
+ angles_total = len(angles)
+ if params[3] == 1:
+ raise ValueError(
+ "The selected model is stationary (3D), use 'ModelSino' function instead"
+ )
+ else:
+ sino4d = np.zeros(
+ [params[3], angles_total, sub_size, detector_horiz],
+ dtype="float32",
+ order="C",
+ )
+
+ external.c_model_sino3d(
+ np.ascontiguousarray(sino4d),
+ model_no,
+ detector_horiz,
+ detector_vert,
+ Z1,
+ Z2,
+ phantom_size,
+ np.ascontiguousarray(angles),
+ angles_total,
+ models_library_path,
+ )
+ return np.swapaxes(sino4d, 1, 2)
+
+
+[docs]def Object(
+ phantom_size: Union[int, Tuple[int, int, int]], obj_params: Union[list, dict]
+) -> np.ndarray:
+ """Generates a 3D object for the standalone geometrical figure
+ that is parametrised in the "obj_params" dictionary.
+
+ Args:
+ phantom_size (int, Tuple(int)): A scalar or a tuple with phantom dimensions.
+ obj_params (a list of dicts or dict): A dictionary with parameters of an object, see demos.
+
+ Returns:
+ np.ndarray: The generated 3D object.
+ """
+
+ if type(phantom_size) == tuple:
+ N1, N2, N3 = [int(i) for i in phantom_size]
+ else:
+ N1 = N2 = N3 = phantom_size
+
+ if type(obj_params) is dict:
+ obj_params = [obj_params]
+
+ object3d = np.zeros([N1, N2, N3], dtype="float32", order="C")
+ # unpacking obj_params dictionary
+ for obj in obj_params:
+ if __testParams3d(obj):
+ objectName = obj["Obj"].value
+ C0 = obj["C0"]
+ x0 = obj["x0"]
+ y0 = obj["y0"]
+ z0 = obj["z0"]
+ a = obj["a"]
+ b = obj["b"]
+ c = obj["c"]
+ phi1 = obj["phi1"]
+ phi2 = 0.0
+ phi3 = 0.0
+ tt = 0
+ external.c_object3d(
+ np.ascontiguousarray(object3d),
+ N1,
+ N2,
+ N3,
+ 0,
+ N1,
+ objectName,
+ C0,
+ x0,
+ y0,
+ z0,
+ a,
+ b,
+ c,
+ phi1,
+ phi2,
+ phi3,
+ tt,
+ )
+ return object3d
+
+
+[docs]def ObjectSino(
+ phantom_size: int,
+ detector_horiz: int,
+ detector_vert: int,
+ angles: np.ndarray,
+ obj_params: Union[list, dict],
+) -> np.ndarray:
+ """Generates a 3D analytical projection data for the standalone geometrical figure
+ that is parametrised in the "obj_params" dictionary.
+
+ Args:
+ phantom_size (int): A scalar for phantom dimensions.
+ detector_horiz (int): Size of the horizontal detector in pixels.
+ detector_vert (int): Size of the vertical detector in pixels.
+ angles (np.ndarray): Angles vector in degrees.
+ obj_params (a list of dicts or dict): A dictionary with parameters of an object, see demos.
+
+ Returns:
+ np.ndarray: The generated 3D projection data for an object.
+ """
+ if type(phantom_size) == tuple:
+ raise ValueError(
+ "Please give a scalar for phantom size, projection data cannot be obtained for non-cubic phantom"
+ )
+
+ if type(obj_params) is dict:
+ obj_params = [obj_params]
+
+ angles_total = len(angles)
+ tt = 0
+ sino3d = np.zeros(
+ [angles_total, detector_vert, detector_horiz], dtype="float32", order="C"
+ )
+ # unpacking obj_params dictionary
+ for obj in obj_params:
+ if __testParams3d(obj):
+ objectName = obj["Obj"].value
+ C0 = obj["C0"]
+ x0 = obj["x0"]
+ y0 = obj["y0"]
+ z0 = obj["z0"]
+ a = obj["a"]
+ b = obj["b"]
+ c = obj["c"]
+ phi1 = obj["phi1"]
+ phi2 = 0.0
+ phi3 = 0.0
+ tt = 0
+ external.c_object_sino3d(
+ np.ascontiguousarray(sino3d),
+ detector_horiz,
+ detector_vert,
+ 0,
+ detector_vert,
+ phantom_size,
+ angles,
+ angles_total,
+ objectName,
+ C0,
+ x0,
+ y0,
+ z0,
+ a,
+ b,
+ c,
+ phi1,
+ phi2,
+ phi3,
+ tt,
+ )
+ return np.swapaxes(sino3d, 0, 1)
+
+
+def __testParams3d(obj):
+ """Performs a simple type check of the input parameters and a range check"""
+ if not type(obj) is dict:
+ raise TypeError("obj is not a dict {0}".format(type(obj)))
+ # type check
+ for k, v in obj.items():
+ if not isinstance(v, Number):
+ if not k == "Obj":
+ raise TypeError(k, "is not a Number")
+ typecheck = True
+
+ # range check
+ rangecheck = obj["x0"] >= -1 and obj["x0"] <= 1
+ if not rangecheck:
+ raise ValueError("x0 is out of range. Must be between -1 and 1")
+ rangecheck = rangecheck and obj["y0"] >= -1 and obj["y0"] <= 1
+ if not rangecheck:
+ raise ValueError("y0 is out of range. Must be between -1 and 1")
+ rangecheck = rangecheck and obj["z0"] >= -1 and obj["z0"] <= 1
+ if not rangecheck:
+ raise ValueError("z0 is out of range. Must be between -1 and 1")
+ rangecheck = rangecheck and obj["a"] > 0 and obj["a"] <= 2
+ if not rangecheck:
+ raise ValueError("a (object size) must be positive in [0,2] range")
+ rangecheck = rangecheck and obj["b"] > 0 and obj["b"] <= 2
+ if not rangecheck:
+ raise ValueError("b (object size) must be positive in [0,2] range")
+ rangecheck = rangecheck and obj["c"] > 0 and obj["c"] <= 2
+ if not rangecheck:
+ raise ValueError("c (object size) must be positive in [0,2] range")
+ return rangecheck and typecheck
+
+
+def __testParams3D(obj):
+ if obj[0] == 0:
+ raise TypeError(
+ "Check if the library file <Phantom3DLibrary.dat> exists, the given path is correct and the syntax is valid"
+ )
+ if obj[1] == 0:
+ raise TypeError(
+ "The given model is not found, check available models in <Phantom3DLibrary.dat> file"
+ )
+ if obj[2] == 0:
+ raise TypeError(
+ "Components number cannot be negative, check <Phantom3DLibrary.dat> file"
+ )
+ if obj[3] == 0:
+ raise TypeError(
+ "TimeSteps cannot be negative, check <Phantom3DLibrary.dat> file"
+ )
+ if obj[4] == 0:
+ raise TypeError("Unknown name of the object, check <Phantom3DLibrary.dat> file")
+ if obj[5] == 0:
+ raise TypeError(
+ "C0 should not be equal to zero, check <Phantom3DLibrary.dat> file"
+ )
+ if obj[6] == 0:
+ raise TypeError(
+ "x0 (object position) must be in [-1,1] range, check <Phantom3DLibrary.dat> file"
+ )
+ if obj[7] == 0:
+ raise TypeError(
+ "y0 (object position) must be in [-1,1] range, check <Phantom3DLibrary.dat> file"
+ )
+ if obj[8] == 0:
+ raise TypeError(
+ "z0 (object position) must be in [-1,1] range, check <Phantom3DLibrary.dat> file"
+ )
+ if obj[9] == 0:
+ raise TypeError(
+ "a (object size) must be positive in [0,2] range, check <Phantom3DLibrary.dat> file"
+ )
+ if obj[10] == 0:
+ raise TypeError(
+ "b (object size) must be positive in [0,2] range, check <Phantom3DLibrary.dat> file"
+ )
+ if obj[11] == 0:
+ raise TypeError(
+ "c (object size) must be positive in [0,2] range, check <Phantom3DLibrary.dat> file"
+ )
+ return 0
+
+#!/usr/bin/env python3
+"""
+Copyright 2023
+The University of Manchester & Diamond Light Source
+Licensed under the Apache License, Version 2.0.
+"""
+import numpy as np
+import random
+from typing import Union, Any
+
+
+[docs]def artefacts_mix(data: np.ndarray, **artefacts_dict: Any) -> Union[np.ndarray, list]:
+ """A module to generate and apply a mix of various typical imaging artefacts to add to the simulated data.
+ One can build various dictionaries with keywords arguments specified
+ bellow and pass it to the function as: `artefacts_mix(data, **noise_dict, **zingers, **etc)`.
+
+ DISCLAIMER: Note that most of the features are experimental and do not always reflect the
+ accurate modelling of the inaccuracies.
+
+ Args:
+ data (np.ndarray): 2D or 3D numpy array (sinogram or 3D projection data).
+ The input data must be of the following shape: 2D sinogram (anglesDim, DetectorsHoriz),
+ 3D projection data (DetectorsVert, anglesDim, DetectorsHoriz).
+ **artefacts_dict (dict): A dictionary with keyword arguments related to different artefact type.
+
+ Keyword Args:
+ noise_type (str): Define noise type as 'Poisson' or 'Gaussian'.
+ noise_amplitude (int, float): Photon flux for Poisson or variance for Gaussian noise.
+ noise_seed (int): Seeds for noise generator. 'None' defines random generation.
+ noise_prelog (bool): Set to True if prelog (raw) data required.
+ zingers_percentage (float): The amount of zingers (dead pixels, outliers) to be added to the data.
+ zingers_modulus (int): Modulus to control the amount of 4/6 pixel clusters to be added.
+ stripes_percentage (float): The amount of stripes in the data (rings in reconstruction).
+ stripes_maxthickness (int): Maxthickness defines the maximal thickness of a stripe.
+ stripes_intensity (float): Controls the intensity levels of stripes.
+ stripe_type (str): Stripe types can be 'partial' or 'full'.
+ stripes_variability (float): Variability multiplier to incorporate the change of intensity in stripe.
+ datashifts_maxamplitude_pixel (int): Controls pixel-wise (integer) misalignment in data.
+ datashifts_maxamplitude_subpixel (float): Controls subpixel misalignment in data.
+ pve_strength (int): The strength of partial volume effect, responsible for the limited resolution of a scanner.
+ fresnel_dist_observation (int): The distance for obervation for fresnel propagator.
+ fresnel_scale_factor (float): Fresnel propagator sacaling.
+ fresnel_wavelenght (float): Fresnel propagator wavelength.
+ verbose (bool): Make the output of modules verbose.
+
+ Returns:
+ np.ndarray: 2D/3D numpy array with artefacts applied to input data.
+ [np.ndarray, shifts]: a list of 2D/3D numpy array and simulated shifts.
+ """
+ ####### VERBOSE ########
+ if "verbose" not in artefacts_dict:
+ verbose = True
+ else:
+ verbose = artefacts_dict["verbose"]
+ ####### NOISE DICTIONARY########
+ _noise_: dict = {}
+ if "noise_type" not in artefacts_dict:
+ _noise_["noise_type"] = None
+ else:
+ _noise_["noise_type"] = artefacts_dict["noise_type"]
+ if "noise_amplitude" not in artefacts_dict:
+ _noise_["noise_amplitude"] = 1e5
+ else:
+ _noise_["noise_amplitude"] = artefacts_dict["noise_amplitude"]
+ if "noise_seed" not in artefacts_dict:
+ _noise_["noise_seed"] = None
+ else:
+ _noise_["noise_seed"] = artefacts_dict["noise_seed"]
+ if "noise_prelog" not in artefacts_dict:
+ _noise_["noise_prelog"] = None
+ else:
+ _noise_["noise_prelog"] = artefacts_dict["noise_prelog"]
+ ####### ZINGERS ########
+ _zingers_: dict = {}
+ if "zingers_percentage" not in artefacts_dict:
+ _zingers_["zingers_percentage"] = None
+ else:
+ _zingers_["zingers_percentage"] = artefacts_dict["zingers_percentage"]
+ if "zingers_modulus" not in artefacts_dict:
+ _zingers_["zingers_modulus"] = 10
+ else:
+ _zingers_["zingers_modulus"] = artefacts_dict["zingers_modulus"]
+ ####### STRIPES ########
+ _stripes_: dict = {}
+ if "stripes_percentage" not in artefacts_dict:
+ _stripes_["stripes_percentage"] = None
+ else:
+ _stripes_["stripes_percentage"] = artefacts_dict["stripes_percentage"]
+ if "stripes_maxthickness" not in artefacts_dict:
+ _stripes_["stripes_maxthickness"] = 1.0
+ else:
+ _stripes_["stripes_maxthickness"] = artefacts_dict["stripes_maxthickness"]
+ if "stripes_intensity" not in artefacts_dict:
+ _stripes_["stripes_intensity"] = 0.1
+ else:
+ _stripes_["stripes_intensity"] = artefacts_dict["stripes_intensity"]
+ if "stripes_type" not in artefacts_dict:
+ _stripes_["stripes_type"] = "full"
+ else:
+ _stripes_["stripes_type"] = artefacts_dict["stripes_type"]
+ if "stripes_variability" not in artefacts_dict:
+ _stripes_["stripes_variability"] = 0.0
+ else:
+ _stripes_["stripes_variability"] = artefacts_dict["stripes_variability"]
+ ####### DATASHIFTS ########
+ _datashifts_: dict = {}
+ if "datashifts_maxamplitude_pixel" not in artefacts_dict:
+ _datashifts_["datashifts_maxamplitude_pixel"] = None
+ else:
+ _datashifts_["datashifts_maxamplitude_pixel"] = artefacts_dict[
+ "datashifts_maxamplitude_pixel"
+ ]
+ if "datashifts_maxamplitude_subpixel" not in artefacts_dict:
+ _datashifts_["datashifts_maxamplitude_subpixel"] = None
+ else:
+ _datashifts_["datashifts_maxamplitude_subpixel"] = artefacts_dict[
+ "datashifts_maxamplitude_subpixel"
+ ]
+ ####### PVE ########
+ _pve_: dict = {}
+ if "pve_strength" not in artefacts_dict:
+ _pve_["pve_strength"] = None
+ else:
+ _pve_["pve_strength"] = artefacts_dict["pve_strength"]
+ _fresnel_propagator_: dict = {}
+ if "fresnel_dist_observation" not in artefacts_dict:
+ _fresnel_propagator_["fresnel_dist_observation"] = None
+ else:
+ _fresnel_propagator_["fresnel_dist_observation"] = artefacts_dict[
+ "fresnel_dist_observation"
+ ]
+ if "fresnel_scale_factor" not in artefacts_dict:
+ _fresnel_propagator_["fresnel_scale_factor"] = 10
+ else:
+ _fresnel_propagator_["fresnel_scale_factor"] = artefacts_dict[
+ "fresnel_scale_factor"
+ ]
+ if "fresnel_wavelenght" not in artefacts_dict:
+ _fresnel_propagator_["fresnel_wavelenght"] = 0.0001
+ else:
+ _fresnel_propagator_["fresnel_wavelenght"] = artefacts_dict[
+ "fresnel_wavelenght"
+ ]
+ ###########################################################################
+ ################Applying artefacts and noise to the data###################
+ ###########################################################################
+ # PARTIAL VOLUME EFFECT
+ if _pve_["pve_strength"] is not None:
+ sino_artifacts = np.float32(pve(data=data, pve_strength=_pve_["pve_strength"]))
+ if verbose is True:
+ print("Partial volume effect (PVE) has been simulated.")
+ else:
+ sino_artifacts = np.float32(data)
+ # FRESNEL PROPAGATOR
+ if _fresnel_propagator_["fresnel_dist_observation"] is not None:
+ sino_artifacts = np.float32(
+ fresnel_propagator(
+ data=sino_artifacts,
+ dist_observation=_fresnel_propagator_["fresnel_dist_observation"],
+ scale_factor=_fresnel_propagator_["fresnel_scale_factor"],
+ wavelenght=_fresnel_propagator_["fresnel_wavelenght"],
+ )
+ )
+ if verbose is True:
+ print("Fresnel propagator has been simulated.")
+ # ZINGERS
+ if _zingers_["zingers_percentage"] is not None:
+ sino_artifacts = np.float32(
+ zingers(
+ data=sino_artifacts,
+ percentage=_zingers_["zingers_percentage"],
+ modulus=_zingers_["zingers_modulus"],
+ )
+ )
+ if verbose is True:
+ print("Zingers have been added to the data.")
+ # STRIPES
+ if _stripes_["stripes_percentage"] is not None:
+ sino_artifacts = np.float32(
+ stripes(
+ data=sino_artifacts,
+ percentage=_stripes_["stripes_percentage"],
+ maxthickness=_stripes_["stripes_maxthickness"],
+ intensity_thresh=_stripes_["stripes_intensity"],
+ stripe_type=_stripes_["stripes_type"],
+ variability=_stripes_["stripes_variability"],
+ )
+ )
+ if verbose is True:
+ print("Stripes leading to ring artefacts have been simulated.")
+ # DATASHIFTS
+ if _datashifts_["datashifts_maxamplitude_pixel"] is not None:
+ [sino_artifacts, shifts] = datashifts(
+ data=sino_artifacts,
+ maxamplitude=_datashifts_["datashifts_maxamplitude_pixel"],
+ )
+ if verbose is True:
+ print("Data shifts have been simulated.")
+ if _datashifts_["datashifts_maxamplitude_subpixel"] is not None:
+ [sino_artifacts, shifts] = datashifts_subpixel(
+ data=sino_artifacts,
+ maxamplitude=_datashifts_["datashifts_maxamplitude_subpixel"],
+ )
+ if verbose is True:
+ print("Data shifts (in subpixel precision) have been simulated.")
+ # NOISE
+ if _noise_["noise_type"] is not None:
+ sino_artifacts = noise(
+ data=sino_artifacts,
+ sigma=_noise_["noise_amplitude"],
+ noisetype=_noise_["noise_type"],
+ seed=_noise_["noise_seed"],
+ prelog=_noise_["noise_prelog"],
+ )
+ if verbose is True:
+ print("{} noise has been added to the data.".format(_noise_["noise_type"]))
+
+ if (_datashifts_["datashifts_maxamplitude_pixel"]) or (
+ _datashifts_["datashifts_maxamplitude_subpixel"]
+ ) is not None:
+ return [np.float32(sino_artifacts), shifts]
+ else:
+ return np.float32(sino_artifacts)
+
+
+[docs]def stripes(
+ data: np.ndarray,
+ percentage: float,
+ maxthickness: int,
+ intensity_thresh: float,
+ stripe_type: str,
+ variability: float,
+) -> np.ndarray:
+ """Function to add stripes (constant offsets) to sinograms or 3D projection data which results in rings in the
+ reconstructed image.
+
+ Args:
+ data (np.ndarray): 2D sinogram (anglesDim, DetectorsHoriz) or 3D projection data (DetectorsVert, anglesDim, DetectorsHoriz).
+ percentage (float): Percentage defines the amount of stripes in the data.
+ maxthickness (int): Defines the maximal thickness of a stripe.
+ intensity_thresh (float): Controls the intensity levels of stripes.
+ stripe_type (str): Choose between 'partial' or 'full'.
+ variability (float): Variability multiplier to incorporate change of intensity in the stripe.
+
+ Raises:
+ ValueError: Percentage is out of range.
+ ValueError: Thickness is out of range.
+
+ Returns:
+ np.ndarray: 2D sinogram or 3D projection data with stripes
+ """
+ if data.ndim == 2:
+ (anglesDim, DetectorsDimH) = np.shape(data)
+ else:
+ (DetectorsDimV, anglesDim, DetectorsDimH) = np.shape(data)
+ if 0 < percentage <= 100:
+ pass
+ else:
+ raise ValueError("percentage must be larger than zero but smaller than 100")
+ if 0 <= maxthickness <= 10:
+ pass
+ else:
+ raise ValueError("maximum thickness must be in [0,10] range")
+ if stripe_type != "partial":
+ stripe_type = "full"
+ sino_stripes = data.copy()
+ max_intensity = np.max(sino_stripes)
+ range_detect = int((np.float32(DetectorsDimH)) * (np.float32(percentage) / 100.0))
+ if data.ndim == 2:
+ for x in range(range_detect):
+ for mm in range(0, 20):
+ randind = random.randint(0, DetectorsDimH - 1) # generate random index
+ if sino_stripes[0, randind] != 0.0:
+ break
+ if stripe_type == "partial":
+ randind_ang1 = random.randint(0, anglesDim)
+ randind_ang2 = random.randint(0, anglesDim)
+ else:
+ randind_ang1 = 0
+ randind_ang2 = anglesDim
+ randthickness = random.randint(0, maxthickness) # generate random thickness
+ randintens = random.uniform(-1.0, 0.5) # generate random multiplier
+ intensity = max_intensity * randintens * intensity_thresh
+ if (randind > 0 + randthickness) & (
+ randind < DetectorsDimH - randthickness
+ ):
+ for x1 in range(-randthickness, randthickness + 1):
+ if variability != 0.0:
+ intensity_off = variability * max_intensity
+ else:
+ intensity_off = 0.0
+ for ll in range(randind_ang1, randind_ang2):
+ sino_stripes[ll, randind + x1] += intensity + intensity_off
+ intensity_off += (
+ [-1, 1][random.randrange(2)] * variability * max_intensity
+ )
+ # sino_stripes[randind_ang1:randind_ang2,randind+x1] += intensity
+ else:
+ for j in range(DetectorsDimV):
+ for x in range(range_detect):
+ for mm in range(0, 20):
+ randind = random.randint(
+ 0, DetectorsDimH - 1
+ ) # generate random index
+ if sino_stripes[j, 0, randind] != 0.0:
+ break
+ if stripe_type == "partial":
+ randind_ang1 = random.randint(0, anglesDim)
+ randind_ang2 = random.randint(0, anglesDim)
+ else:
+ randind_ang1 = 0
+ randind_ang2 = anglesDim
+ randthickness = random.randint(
+ 0, maxthickness
+ ) # generate random thickness
+ randintens = random.uniform(-1, 0.5) # generate random multiplier
+ intensity = max_intensity * randintens * intensity_thresh
+ if (randind > 0 + randthickness) & (
+ randind < DetectorsDimH - randthickness
+ ):
+ for x1 in range(-randthickness, randthickness + 1):
+ if variability != 0.0:
+ intensity_off = variability * max_intensity
+ else:
+ intensity_off = 0.0
+ for ll in range(randind_ang1, randind_ang2):
+ sino_stripes[j, ll, randind + x1] += (
+ intensity + intensity_off
+ )
+ intensity_off += (
+ [-1, 1][random.randrange(2)]
+ * variability
+ * max_intensity
+ )
+ return sino_stripes
+
+
+[docs]def zingers(data: np.ndarray, percentage: float, modulus: int) -> np.ndarray:
+ """Adding zingers (zero single pixels or small 4 pixels clusters)
+ to sinograms or 6 voxels to 3D projection data.
+
+ Args:
+ data (np.ndarray): 2D sinogram or 3D projection data. The input data must be of the following shape: 2D sinogram (anglesDim, DetectorsHoriz),
+ 3D projection data (DetectorsVert, anglesDim, DetectorsHoriz).
+ percentage (float): The amount of zingers to be added to the data.
+ modulus (int): Modulus to control the amount of 4/6 pixel clusters to be added.
+
+ Raises:
+ ValueError: Percentage is out of range.
+ ValueError: Modulus must be positive.
+
+ Returns:
+ np.ndarray: 2D or 3D array with zingers.
+ """
+ if data.ndim == 2:
+ (anglesDim, DetectorsDimH) = np.shape(data)
+ else:
+ (DetectorsDimV, anglesDim, DetectorsDimH) = np.shape(data)
+ if 0.0 < percentage <= 100.0:
+ pass
+ else:
+ raise ValueError("percentage must be larger than zero but smaller than 100")
+ if modulus > 0:
+ pass
+ else:
+ raise ValueError("Modulus integer must be positive")
+ sino_zingers = data.copy()
+ length_sino = np.size(sino_zingers)
+ num_values = int((length_sino) * (np.float32(percentage) / 100.0))
+ sino_zingers_fl = sino_zingers.flatten()
+ for x in range(num_values):
+ randind = random.randint(0, length_sino - 1) # generate random index
+ sino_zingers_fl[randind] = 0
+ if (x % int(modulus)) == 0:
+ if data.ndim == 2:
+ if (randind > DetectorsDimH) & (randind < length_sino - DetectorsDimH):
+ sino_zingers_fl[randind + 1] = 0
+ sino_zingers_fl[randind - 1] = 0
+ sino_zingers_fl[randind + DetectorsDimH] = 0
+ sino_zingers_fl[randind - DetectorsDimH] = 0
+ else:
+ if (randind > DetectorsDimH * DetectorsDimV) & (
+ randind < length_sino - DetectorsDimH * DetectorsDimV
+ ):
+ sino_zingers_fl[randind + 1] = 0
+ sino_zingers_fl[randind - 1] = 0
+ sino_zingers_fl[randind + DetectorsDimH] = 0
+ sino_zingers_fl[randind - DetectorsDimH] = 0
+ sino_zingers_fl[randind + DetectorsDimH * DetectorsDimV] = 0
+ sino_zingers_fl[randind - DetectorsDimH * DetectorsDimV] = 0
+ sino_zingers[:] = sino_zingers_fl.reshape(sino_zingers.shape)
+ return sino_zingers
+
+
+[docs]def noise(
+ data: np.ndarray,
+ sigma: Union[int, float],
+ noisetype: str,
+ seed: bool = True,
+ prelog: bool = False,
+) -> Union[list, np.ndarray]:
+ """Adding random noise to data (adapted from LD-CT simulator)
+
+ Args:
+ data (np.ndarray): N-d array
+ sigma (Union[int, float]): int for Poisson or float for Gaussian
+ noisetype (str): 'Gaussian' or 'Poisson'
+ seed (bool, optional): Initiate random seed with True. Defaults to True.
+ prelog (bool, optional): get raw data if true, returns a list [noisy, raw]!. Defaults to False.
+
+ Returns:
+ np.ndarray: N-d array
+ """
+ data_noisy = data.copy()
+ maxData = np.max(data)
+ data_noisy = data / maxData # normalising
+ if seed is True:
+ np.random.seed(int(seed))
+ if noisetype == "Gaussian":
+ # add normal Gaussian noise
+ data_noisy += np.random.normal(loc=0.0, scale=sigma, size=np.shape(data_noisy))
+ data_noisy[data_noisy < 0] = 0
+ data_noisy *= maxData
+ elif noisetype == "Poisson":
+ # add Poisson noise
+ if maxData > 0:
+ ri = 1.0
+ sig = np.sqrt(
+ 11
+ ) # standard variance of electronic noise, a characteristic of a CT scanner
+ yb = (
+ sigma * np.exp(-data_noisy) + ri
+ ) # exponential transform to incident flux
+ data_raw = np.random.poisson(yb) + np.sqrt(sig) * np.reshape(
+ np.random.randn(np.size(yb)), np.shape(yb)
+ )
+ li_hat = -np.log((data_raw - ri) / sigma) * maxData # log corrected data
+ li_hat[(data_raw - ri) <= 0] = 0.0
+ data_noisy = li_hat.copy()
+ else:
+ print("Select 'Gaussian' or 'Poisson' for the noise type")
+ if prelog is True:
+ return [data_noisy, data_raw]
+ else:
+ return data_noisy
+
+
+[docs]def datashifts(data: np.ndarray, maxamplitude: int) -> list:
+ """Function to add random pixel shifts to 3D or 3D data as an offset for each angular position.
+
+ Args:
+ data (np.ndarray): 2D sinogram or 3D projection data. The input data must be of the following shape: 2D sinogram (anglesDim, DetectorsHoriz),
+ 3D projection data (DetectorsVert, anglesDim, DetectorsHoriz).
+ maxamplitude (int): The misilighnment ammplitude. Defines the maximal amplitude of each shift in pixels.
+
+ Returns:
+ list: 2D or 3d data with misalignment and shifts vectors [data, shifts].
+ """
+ if data.ndim == 2:
+ (anglesDim, DetectorsDimH) = np.shape(data)
+ shifts = np.zeros(anglesDim, dtype="int8") # the vector of shifts
+ else:
+ (DetectorsDimV, anglesDim, DetectorsDimH) = np.shape(data)
+ shifts = np.zeros([anglesDim, 2], dtype="int8") # the 2D vector of shifts
+
+ sino_shifts = np.zeros(np.shape(data), dtype="float32")
+ non = lambda s: s if s < 0 else None
+ mom = lambda s: max(0, s)
+ for x in range(anglesDim):
+ rand_shift = random.randint(
+ -maxamplitude, maxamplitude
+ ) # generate random shift (int)
+ if data.ndim == 2:
+ shifts[x] = rand_shift
+ projection = data[x, :] # extract 1D projection
+ projection_shift = np.zeros(np.shape(projection), dtype="float32")
+ projection_shift[mom(rand_shift) : non(rand_shift)] = projection[
+ mom(-rand_shift) : non(-rand_shift)
+ ]
+ sino_shifts[x, :] = projection_shift
+ else:
+ rand_shift2 = random.randint(
+ -maxamplitude, maxamplitude
+ ) # generate random shift (int)
+ shifts[x, 0] = rand_shift2
+ shifts[x, 1] = rand_shift
+ projection2D = data[:, x, :] # extract 2D projection
+ projection2D_shift = np.zeros(np.shape(projection2D), dtype="float32")
+ projection2D_shift[
+ mom(rand_shift) : non(rand_shift), mom(rand_shift2) : non(rand_shift2)
+ ] = projection2D[
+ mom(-rand_shift) : non(-rand_shift),
+ mom(-rand_shift2) : non(-rand_shift2),
+ ]
+ sino_shifts[:, x, :] = projection2D_shift
+ return [sino_shifts, shifts]
+
+
+[docs]def datashifts_subpixel(data: np.ndarray, maxamplitude: float) -> list:
+ """Function to add random sub-pixel shifts to 3D or 3D data as an offset for each angular position.
+
+ Args:
+ data (np.ndarray): 2D sinogram or 3D projection data. The input data must be of the following shape: 2D sinogram (anglesDim, DetectorsHoriz),
+ 3D projection data (DetectorsVert, anglesDim, DetectorsHoriz).
+ maxamplitude (float): The misilighnment ammplitude. Defines the maximal amplitude of each shift.
+
+ Returns:
+ list: 2D or 3d data with misalignment and shifts vectors [data, shifts].
+ """
+ from skimage import transform as tf
+
+ if data.ndim == 2:
+ shifts = np.zeros([1, 2], dtype="float32")
+ random_shift_x = random.uniform(
+ -maxamplitude, maxamplitude
+ ) # generate a random floating point number
+ random_shift_y = random.uniform(
+ -maxamplitude, maxamplitude
+ ) # generate a random floating point number
+
+ tform = tf.SimilarityTransform(translation=(-random_shift_x, -random_shift_y))
+ sino_shifts = tf.warp(data, tform, order=5)
+ else:
+ (DetectorsDimV, anglesDim, DetectorsDimH) = np.shape(data)
+ shifts = np.zeros([anglesDim, 2], dtype="float32") # the 2D vector of shifts
+ sino_shifts = np.zeros(np.shape(data), dtype="float32")
+ for x in range(anglesDim):
+ random_shift_x = random.uniform(
+ -maxamplitude, maxamplitude
+ ) # generate a random floating point number
+ random_shift_y = random.uniform(
+ -maxamplitude, maxamplitude
+ ) # generate a random floating point number
+
+ projection2D = data[:, x, :] # extract 2D projection
+ tform = tf.SimilarityTransform(
+ translation=(-random_shift_x, -random_shift_y)
+ )
+ projection_shifted = tf.warp(projection2D, tform, order=5)
+
+ shifts[x, 0] = random_shift_x
+ shifts[x, 1] = random_shift_y
+
+ sino_shifts[:, x, :] = projection_shifted
+ return [sino_shifts, shifts]
+
+
+[docs]def pve(data: np.ndarray, pve_strength: int) -> np.ndarray:
+ """Applying Partial Volume effect (smoothing) to data.
+
+ Args:
+ data (np.ndarray): 2D sinogram or 3D projection data. The input data must be of the following shape: 2D sinogram (anglesDim, DetectorsHoriz),
+ 3D projection data (DetectorsVert, anglesDim, DetectorsHoriz).
+ pve_strength (int): The level of smoothing, defined by kernel size.
+
+ Raises:
+ ValueError: Smoothing kernel must be positive
+
+ Returns:
+ np.ndarray: Smoothed 2D or 3D data.
+ """
+
+ from scipy.ndimage import gaussian_filter
+
+ data_pve = data.copy()
+ if pve_strength > 0:
+ pass
+ else:
+ raise ValueError("Smoothing kernel must be positive")
+ if data.ndim == 2:
+ (anglesDim, DetectorsDimH) = np.shape(data)
+ for x in range(anglesDim):
+ data_pve[x, :] = gaussian_filter(data_pve[x, :], pve_strength)
+ else:
+ (DetectorsDimV, anglesDim, DetectorsDimH) = np.shape(data)
+ for x in range(anglesDim):
+ data_pve[:, x, :] = gaussian_filter(data_pve[:, x, :], pve_strength)
+ return data_pve
+
+
+[docs]def fresnel_propagator(
+ data: np.ndarray, dist_observation: int, scale_factor: float, wavelenght: float
+) -> np.ndarray:
+ """Fresnel propagator applied to data.
+ Adapted from the script by Adrián Carbajal-Domínguez, adrian.carbajal@ujat.mx
+
+ Args:
+ data (np.ndarray): 2D sinogram or 3D projection data. The input data must be of the following shape: 2D sinogram (anglesDim, DetectorsHoriz),
+ 3D projection data (DetectorsVert, anglesDim, DetectorsHoriz).
+ dist_observation (int): The distance for obervation for fresnel propagator.
+ scale_factor (float): Scaling.
+ wavelenght (float): Wavelength.
+
+ Returns:
+ np.ndarray: 2D or 3D data with propagation.
+ """
+
+ data_fresnel = data.copy()
+ if data.ndim == 2:
+ (anglesDim, DetectorsDimH) = np.shape(data)
+ n1 = DetectorsDimH * 0.5
+ # Define the angular spectrum coordinates
+ u = np.arange(-n1, n1, 1)
+ # Define the propagation matrix
+ propagator = np.exp(
+ 2
+ * np.pi
+ * 1j
+ * (dist_observation / scale_factor)
+ * np.sqrt((1 / wavelenght) ** 2 - (u / 10) ** 2)
+ )
+ #### Compute the Fast Fourier Transform of each 1D projection
+ for x in range(anglesDim):
+ f = np.fft.fft(data_fresnel[x, :])
+ # Correct the low and high frequencies
+ fshift = np.fft.fftshift(f)
+ # multiply both matrices: Fourier transform and the propagator matrices.
+ field = fshift * propagator
+ # Calculate the inverse Fourier transform
+ field2 = np.fft.ifft(field)
+ data_fresnel[x, :] = np.abs(field2)
+ else:
+ (DetectorsDimV, anglesDim, DetectorsDimH) = np.shape(data)
+ ####Define the size of the propagation function p(u,v). It has to be of the same size of the image.
+ n1 = DetectorsDimV * 0.5
+ n2 = DetectorsDimH * 0.5
+ # Define the angular spectrum coordinates
+ u = np.arange(-n1, n1, 1)
+ v = np.arange(-n2, n2, 1)
+ U, V = np.meshgrid(u, v)
+ # Define the propagation matrix
+ propagator = np.exp(
+ 2
+ * np.pi
+ * 1j
+ * (dist_observation / scale_factor)
+ * np.sqrt(
+ (1 / wavelenght) ** 2
+ - (U / scale_factor) ** 2
+ - (V / scale_factor) ** 2
+ )
+ )
+ #### Compute the Fast Fourier Transform of each 2D projection
+ for x in range(anglesDim):
+ f = np.fft.fft2(data_fresnel[:, x, :])
+ # Correct the low and high frequencies
+ fshift = np.fft.fftshift(f)
+ # multiply both matrices: Fourier transform and the propagator matrices.
+ field = fshift * np.transpose(propagator)
+ # Calculate the inverse Fourier transform
+ field2 = np.fft.ifft2(field)
+ data_fresnel[:, x, :] = np.abs(field2)
+ return data_fresnel
+
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import ctypes
+import ctypes.util
+import sys, platform
+import warnings
+
+
+
+
+
+from tomophantom.TomoP2D import *
+
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""
+Module for internal utility functions.
+"""
+
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+import ctypes
+import logging
+import multiprocessing as mp
+
+import numpy as np
+
+logger = logging.getLogger(__name__)
+
+__all__ = [
+ "as_ndarray",
+ "as_dtype",
+ "as_float32",
+ "as_int32",
+ "as_uint8",
+ "as_uint16",
+ "as_c_float_p",
+ "as_c_bool_p",
+ "as_c_uint8_p",
+ "as_c_uint16_p",
+ "as_c_int",
+ "as_c_int_p",
+ "as_c_float",
+ "as_c_char_p",
+ "as_c_void_p",
+ "as_c_size_t",
+]
+
+
+[docs]def as_ndarray(arr, dtype=None, copy=False):
+ if not isinstance(arr, np.ndarray):
+ arr = np.array(arr, dtype=dtype, copy=copy)
+ return arr
+
+
+[docs]def as_dtype(arr, dtype, copy=False):
+ if not arr.dtype == dtype:
+ arr = np.array(arr, dtype=dtype, copy=copy)
+ return arr
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+[docs]def as_c_float_p(arr):
+ c_float_p = ctypes.POINTER(ctypes.c_float)
+ return arr.ctypes.data_as(c_float_p)
+
+
+[docs]def as_c_bool_p(arr):
+ c_bool_p = ctypes.POINTER(ctypes.c_bool)
+ return arr.ctypes.data_as(c_bool_p)
+
+
+[docs]def as_c_uint8_p(arr):
+ c_uint8_p = ctypes.POINTER(ctypes.c_uint8)
+ return arr.ctypes.data_as(c_uint8_p)
+
+
+[docs]def as_c_uint16_p(arr):
+ c_uint16_p = ctypes.POINTER(ctypes.c_uint16)
+ return arr.ctypes.data_as(c_uint16_p)
+
+
+
+
+
+def as_c_long(arr):
+ return ctypes.c_long(arr)
+
+
+[docs]def as_c_int_p(arr):
+ arr = arr.astype(np.intc, copy=False)
+ c_int_p = ctypes.POINTER(ctypes.c_int)
+ return arr.ctypes.data_as(c_int_p)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+def to_numpy_array(obj, dtype, shape):
+ return np.frombuffer(obj, dtype=dtype).reshape(shape)
+
+
+def is_contiguous(arr):
+ return arr.flags.c_contiguous
+
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Ctypes-wrapped C-modules of TomoPhantom library.
+"""
+
+import tomophantom.ctypes.dtype as dtype
+from . import c_shared_lib
+
+__all__ = [
+ "c_checkParams2D",
+ "c_model2d",
+ "c_model_sino2d",
+ "c_object_sino2d",
+ "c_object2d",
+ "c_checkParams3D",
+ "c_model3d",
+ "c_object3d",
+ "c_model_sino3d",
+ "c_object_sino3d",
+]
+
+LIB_TOMOPHANTOM = c_shared_lib("tomophantom")
+
+
+[docs]def c_checkParams2D(
+ params,
+ model_no,
+ models_library_path,
+):
+ LIB_TOMOPHANTOM.checkParams2D.restype = dtype.as_c_void_p()
+ LIB_TOMOPHANTOM.checkParams2D(
+ dtype.as_c_int_p(params),
+ dtype.as_c_int(model_no),
+ dtype.as_c_char_p(models_library_path),
+ )
+ return params
+
+
+[docs]def c_checkParams3D(
+ params,
+ model_no,
+ models_library_path,
+):
+ LIB_TOMOPHANTOM.checkParams3D.restype = dtype.as_c_void_p()
+ LIB_TOMOPHANTOM.checkParams3D(
+ dtype.as_c_int_p(params),
+ dtype.as_c_int(model_no),
+ dtype.as_c_char_p(models_library_path),
+ )
+ return params
+
+
+def c_numerical_sino2d(
+ sinogram,
+ input,
+ phantom_size,
+ detector_X,
+ angles,
+ angles_total,
+):
+ LIB_TOMOPHANTOM.TomoP2DSinoNum_core.restype = dtype.as_c_void_p()
+ LIB_TOMOPHANTOM.TomoP2DSinoNum_core(
+ dtype.as_c_float_p(sinogram),
+ dtype.as_c_float_p(input),
+ dtype.as_c_int(phantom_size),
+ dtype.as_c_int(detector_X),
+ dtype.as_c_float_p(angles),
+ dtype.as_c_int(angles_total),
+ 0,
+ )
+ return sinogram
+
+
+[docs]def c_model2d(
+ output,
+ model_no,
+ phantom_size,
+ models_library_path,
+):
+ LIB_TOMOPHANTOM.TomoP2DModel_core.restype = dtype.as_c_void_p()
+ LIB_TOMOPHANTOM.TomoP2DModel_core(
+ dtype.as_c_float_p(output),
+ dtype.as_c_int(model_no),
+ dtype.as_c_int(phantom_size),
+ dtype.as_c_char_p(models_library_path),
+ )
+ return output
+
+
+[docs]def c_model_sino2d(
+ sinogram,
+ model_no,
+ phantom_size,
+ detector_size,
+ angles,
+ angles_total,
+ models_library_path,
+):
+ LIB_TOMOPHANTOM.TomoP2DModelSino_core.restype = dtype.as_c_void_p()
+ LIB_TOMOPHANTOM.TomoP2DModelSino_core(
+ dtype.as_c_float_p(sinogram),
+ dtype.as_c_int(model_no),
+ dtype.as_c_int(phantom_size),
+ dtype.as_c_int(detector_size),
+ dtype.as_c_float_p(angles),
+ dtype.as_c_int(angles_total),
+ 1,
+ dtype.as_c_char_p(models_library_path),
+ )
+ return sinogram
+
+
+[docs]def c_object_sino2d(
+ sinogram,
+ phantom_size,
+ detector_size,
+ angles,
+ angles_total,
+ objectName,
+ C0,
+ y0,
+ x0,
+ a,
+ b,
+ phi_ang,
+ tt,
+):
+ LIB_TOMOPHANTOM.TomoP2DObjectSino_core.restype = dtype.as_c_void_p()
+ LIB_TOMOPHANTOM.TomoP2DObjectSino_core(
+ dtype.as_c_float_p(sinogram),
+ dtype.as_c_int(phantom_size),
+ dtype.as_c_int(detector_size),
+ dtype.as_c_float_p(angles),
+ dtype.as_c_int(angles_total),
+ 1,
+ dtype.as_c_char_p(objectName),
+ dtype.as_c_float(C0),
+ dtype.as_c_float(y0),
+ dtype.as_c_float(x0),
+ dtype.as_c_float(a),
+ dtype.as_c_float(b),
+ dtype.as_c_float(phi_ang),
+ dtype.as_c_int(tt),
+ )
+ return sinogram
+
+
+[docs]def c_object2d(object2d, phantom_size, objectName, C0, y0, x0, a, b, phi_ang, tt):
+ LIB_TOMOPHANTOM.TomoP2DObject_core.restype = dtype.as_c_void_p()
+ LIB_TOMOPHANTOM.TomoP2DObject_core(
+ dtype.as_c_float_p(object2d),
+ dtype.as_c_int(phantom_size),
+ dtype.as_c_char_p(objectName),
+ dtype.as_c_float(C0),
+ dtype.as_c_float(x0),
+ dtype.as_c_float(y0),
+ dtype.as_c_float(b),
+ dtype.as_c_float(a),
+ dtype.as_c_float(phi_ang),
+ dtype.as_c_int(tt),
+ )
+ return object2d
+
+
+[docs]def c_model3d(
+ output,
+ model_no,
+ N1,
+ N2,
+ N3,
+ Z1,
+ Z2,
+ models_library_path,
+):
+ LIB_TOMOPHANTOM.TomoP3DModel_core.restype = dtype.as_c_void_p()
+ LIB_TOMOPHANTOM.TomoP3DModel_core(
+ dtype.as_c_float_p(output),
+ dtype.as_c_int(model_no),
+ dtype.as_c_long(N3),
+ dtype.as_c_long(N2),
+ dtype.as_c_long(N1),
+ dtype.as_c_long(Z1),
+ dtype.as_c_long(Z2),
+ dtype.as_c_char_p(models_library_path),
+ )
+ return output
+
+
+[docs]def c_object3d(
+ output,
+ N1,
+ N2,
+ N3,
+ Z1,
+ Z2,
+ objectName,
+ C0,
+ x0,
+ y0,
+ z0,
+ a,
+ b,
+ c,
+ phi1,
+ phi2,
+ phi3,
+ tt,
+):
+ LIB_TOMOPHANTOM.TomoP3DObject_core.restype = dtype.as_c_void_p()
+ LIB_TOMOPHANTOM.TomoP3DObject_core(
+ dtype.as_c_float_p(output),
+ dtype.as_c_long(N3),
+ dtype.as_c_long(N2),
+ dtype.as_c_long(N1),
+ dtype.as_c_long(Z1),
+ dtype.as_c_long(Z2),
+ dtype.as_c_char_p(objectName),
+ dtype.as_c_float(C0),
+ dtype.as_c_float(y0),
+ dtype.as_c_float(x0),
+ dtype.as_c_float(z0),
+ dtype.as_c_float(a),
+ dtype.as_c_float(b),
+ dtype.as_c_float(c),
+ dtype.as_c_float(phi1),
+ dtype.as_c_float(phi2),
+ dtype.as_c_float(phi3),
+ dtype.as_c_long(tt),
+ )
+ return output
+
+
+[docs]def c_model_sino3d(
+ output,
+ model_no,
+ detector_horiz,
+ detector_vert,
+ Z1,
+ Z2,
+ phantom_size,
+ angles,
+ angles_total,
+ models_library_path,
+):
+ LIB_TOMOPHANTOM.TomoP3DModelSino_core.restype = dtype.as_c_void_p()
+ LIB_TOMOPHANTOM.TomoP3DModelSino_core(
+ dtype.as_c_float_p(output),
+ dtype.as_c_int(model_no),
+ dtype.as_c_long(detector_horiz),
+ dtype.as_c_long(detector_vert),
+ dtype.as_c_long(Z1),
+ dtype.as_c_long(Z2),
+ dtype.as_c_long(phantom_size),
+ dtype.as_c_float_p(angles),
+ dtype.as_c_int(angles_total),
+ dtype.as_c_char_p(models_library_path),
+ )
+ return output
+
+
+[docs]def c_object_sino3d(
+ output,
+ detector_horiz,
+ detector_vert,
+ Z1,
+ Z2,
+ phantom_size,
+ angles,
+ angles_total,
+ objectName,
+ C0,
+ x0,
+ y0,
+ z0,
+ a,
+ b,
+ c,
+ phi1,
+ phi2,
+ phi3,
+ tt,
+):
+ LIB_TOMOPHANTOM.TomoP3DObjectSino_core.restype = dtype.as_c_void_p()
+ if (
+ ("gaussian" in str(objectName))
+ or ("paraboloid" in str(objectName))
+ or ("ellipsoid" in str(objectName))
+ ):
+ LIB_TOMOPHANTOM.TomoP3DObjectSino_core(
+ dtype.as_c_float_p(output),
+ dtype.as_c_long(detector_horiz),
+ dtype.as_c_long(detector_vert),
+ dtype.as_c_long(Z1),
+ dtype.as_c_long(Z2),
+ dtype.as_c_long(phantom_size),
+ dtype.as_c_float_p(angles),
+ dtype.as_c_int(angles_total),
+ dtype.as_c_char_p(objectName),
+ dtype.as_c_float(C0),
+ dtype.as_c_float(y0),
+ dtype.as_c_float(-z0),
+ dtype.as_c_float(-x0),
+ dtype.as_c_float(b),
+ dtype.as_c_float(a),
+ dtype.as_c_float(c),
+ dtype.as_c_float(phi3),
+ dtype.as_c_float(phi2),
+ dtype.as_c_float(phi1),
+ dtype.as_c_long(tt),
+ )
+ elif "elliptical_cylinder" in str(objectName):
+ LIB_TOMOPHANTOM.TomoP3DObjectSino_core(
+ dtype.as_c_float_p(output),
+ dtype.as_c_long(detector_horiz),
+ dtype.as_c_long(detector_vert),
+ dtype.as_c_long(Z1),
+ dtype.as_c_long(Z2),
+ dtype.as_c_long(phantom_size),
+ dtype.as_c_float_p(angles),
+ dtype.as_c_int(angles_total),
+ dtype.as_c_char_p(objectName),
+ dtype.as_c_float(C0),
+ dtype.as_c_float(x0),
+ dtype.as_c_float(-y0),
+ dtype.as_c_float(z0),
+ dtype.as_c_float(b),
+ dtype.as_c_float(a),
+ dtype.as_c_float(c),
+ dtype.as_c_float(phi3),
+ dtype.as_c_float(phi2),
+ dtype.as_c_float(phi1),
+ dtype.as_c_long(tt),
+ )
+ else:
+ LIB_TOMOPHANTOM.TomoP3DObjectSino_core(
+ dtype.as_c_float_p(output),
+ dtype.as_c_long(detector_horiz),
+ dtype.as_c_long(detector_vert),
+ dtype.as_c_long(Z1),
+ dtype.as_c_long(Z2),
+ dtype.as_c_long(phantom_size),
+ dtype.as_c_float_p(angles),
+ dtype.as_c_int(angles_total),
+ dtype.as_c_char_p(objectName),
+ dtype.as_c_float(C0),
+ dtype.as_c_float(x0),
+ dtype.as_c_float(y0),
+ dtype.as_c_float(z0),
+ dtype.as_c_float(a),
+ dtype.as_c_float(b),
+ dtype.as_c_float(c),
+ dtype.as_c_float(phi3),
+ dtype.as_c_float(phi2),
+ dtype.as_c_float(-phi1),
+ dtype.as_c_long(tt),
+ )
+ return output
+
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+Copyright 2023
+The University of Manchester & Diamond Light Source
+Licensed under the Apache License, Version 2.0.
+"""
+
+from scipy.special import spherical_yn
+from scipy.ndimage import gaussian_filter
+from scipy.ndimage import shift
+import random
+import numpy as np
+from tomophantom.artefacts import noise
+from tomophantom.supp.speckle_routines import simulate_speckles_with_shot_noise
+
+
+[docs]def synth_flats(
+ projData3D_clean: np.ndarray,
+ source_intensity: int,
+ detectors_miscallibration: float = 0.05,
+ variations_number: int = 3,
+ arguments_Bessel: tuple = (1, 25),
+ specklesize: int = 2,
+ kbar: int = 2,
+ sigmasmooth: int = 3,
+ jitter_projections: float = 0.0,
+ flatsnum: int = 20,
+) -> list:
+ """Function to generate synthetic flat field images and raw data for projection data normalisation.
+ This is more realistic modelling of stripes and various normalisation artefacts.
+
+ Args:
+ projData3D_clean (np.ndarray): 3D projection data of the following shape: (DetectorsVert, anglesDim, DetectorsHoriz).
+ source_intensity (int): Source intensity which affects the amount of the Poisson noise added to data.
+ detectors_miscallibration (float, optional): A constant which perturbs some detectors positions, leading to more prounonced ring artefacts. Defaults to 0.05.
+ variations_number (int, optional): A type of function to control stripe type (1 - linear, 2, - sinusoidal, 3 - exponential). Defaults to 3.
+ arguments_Bessel (tuple, optional): A tuple of 2 arguments for 2 Bessel functions to control background variations in flats. Defaults to (1,25).
+ specklesize (int, optional): Speckle size in pixel units for flats background simulation. Defaults to 2.
+ kbar (int, optional): Mean photon density (photons per pixel) for background simulation. Defaults to 2.
+ sigmasmooth (int, optional): Gaussian smoothing parameter to blur the speckled backround (1,3,5,7,...). Defaults to 3.
+ jitter_projections (float, optional): An additional random jitter to the projections in pixels. Defaults to 0.0.
+ flatsnum (int, optional): A number of flats to generate. Defaults to 20.
+
+ Returns:
+ list: List that contains [np.uint16(projData3D_raw), np.uint16(simulated_flats), blurred_speckles_map]
+ """
+ [DetectorsDimV, projectionsNo, DetectorsDimH] = np.shape(projData3D_clean)
+
+ # output datasets
+ flats_combined3D = np.zeros(
+ (DetectorsDimV, flatsnum, DetectorsDimH), dtype="uint16"
+ )
+ projData3D_raw = np.zeros(np.shape(projData3D_clean), dtype="float32")
+
+ # normalise the data
+ projData3D_clean /= np.max(projData3D_clean)
+
+ # using spherical Bessel functions to emulate the background (scintillator) variations
+ func = spherical_yn(
+ 1,
+ np.linspace(
+ arguments_Bessel[0], arguments_Bessel[1], DetectorsDimV, dtype="float32"
+ ),
+ )
+ func += abs(np.min(func))
+
+ flatfield = np.zeros((DetectorsDimV, DetectorsDimH))
+ for i in range(0, DetectorsDimH):
+ flatfield[:, i] = func
+ for i in range(0, DetectorsDimH):
+ flatfield[:, i] += np.flipud(func)
+
+ if specklesize != 0.0:
+ # using speckle generator routines to create a photon count texture in the background
+ speckle_background = simulate_speckles_with_shot_noise(
+ [DetectorsDimV, DetectorsDimH], 1, specklesize, kbar
+ )
+ else:
+ speckle_background = np.ones((DetectorsDimV, DetectorsDimH))
+
+ # model miscallibrated detectors (a possible path to generate ring artifacts)
+ blurred_speckles_map = np.zeros((DetectorsDimV, DetectorsDimH, variations_number))
+ for i in range(0, variations_number):
+ speckles = simulate_speckles_with_shot_noise(
+ [DetectorsDimV, DetectorsDimH], 1, 10, 0.03
+ )
+ # blur the speckled background
+ blurred_speckles = gaussian_filter(speckles.copy(), sigma=sigmasmooth)
+ # threshold the result
+ blurred_speckles[blurred_speckles < 0.6 * np.max(blurred_speckles)] = 0
+ blurred_speckles_map[:, :, i] = blurred_speckles
+ blurred_speckles_map /= np.max(blurred_speckles_map)
+
+ sinusoidal_response = (
+ np.sin(np.linspace(0, 1.5 * np.pi, projectionsNo))
+ + np.random.random(projectionsNo) * 0.1
+ )
+ sinusoidal_response /= np.max(sinusoidal_response)
+ exponential_response = (
+ np.exp(np.linspace(0, np.pi, projectionsNo))
+ + np.random.random(projectionsNo) * 0.1
+ )
+ exponential_response /= np.max(exponential_response)
+
+ # prepeare flat fields
+ for i in range(0, flatsnum):
+ # add speckled background to the initial image with the Bessel background
+ flatfield_combined = flatfield.copy() + 0.5 * (
+ speckle_background / np.max(speckle_background)
+ )
+ flatfield_combined /= np.max(flatfield_combined)
+
+ # adding Poisson noise to flat fields
+ flatfield_poisson = noise(
+ flatfield_combined * source_intensity, source_intensity, noisetype="Poisson"
+ )
+ flatfield_poisson /= np.max(flatfield_poisson)
+
+ flats_combined3D[:, i, :] = np.uint16(flatfield_poisson * 65535)
+
+ # convert synthetic projections to raw-data like projection ready for normalisation
+ for i in range(0, projectionsNo):
+ proj_exp = (
+ np.exp(-projData3D_clean[:, i, :]) * source_intensity * flatfield_poisson
+ ) # raw projection
+ for j in range(0, variations_number):
+ if j == 0:
+ # adding a consistent offset for certain detectors
+ proj_exp += (
+ blurred_speckles_map[:, :, j]
+ * detectors_miscallibration
+ * source_intensity
+ )
+ if j == 1:
+ # adding a sinusoidal-like response offset for certain detectors
+ proj_exp += (
+ sinusoidal_response[i]
+ * blurred_speckles_map[:, :, j]
+ * detectors_miscallibration
+ * source_intensity
+ )
+ if j == 2:
+ # adding an exponential response offset for certain detectors
+ proj_exp += (
+ exponential_response[i]
+ * blurred_speckles_map[:, :, j]
+ * detectors_miscallibration
+ * source_intensity
+ )
+
+ projection_poisson = noise(proj_exp, source_intensity, noisetype="Poisson")
+
+ # apply jitter to projections
+ if jitter_projections != 0.0:
+ horiz_shift = random.uniform(
+ -jitter_projections, jitter_projections
+ ) # generate random directional shift
+ vert_shift = random.uniform(
+ -jitter_projections, jitter_projections
+ ) # generate random directional shift
+ projection_poisson = shift(
+ projection_poisson.copy(), [vert_shift, horiz_shift], mode="reflect"
+ )
+ projData3D_raw[:, i, :] = projection_poisson
+
+ projData3D_raw /= np.max(projData3D_raw)
+ return [
+ np.uint16(projData3D_raw * 65535),
+ np.uint16(flats_combined3D),
+ blurred_speckles_map,
+ ]
+
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+The set of functions to generate random 2D/3D phantoms
+
+The TomoPhantom package is released under Apache License, Version 2.0
+@author: Daniil Kazantsev
+"""
+
+
+[docs]def rand_init2D(x0min, x0max, y0min, y0max, c0min, c0max, ab_min, ab_max):
+ import numpy as np
+
+ x0 = np.random.uniform(low=x0min, high=x0max)
+ y0 = np.random.uniform(low=y0min, high=y0max)
+ c0 = np.random.uniform(low=c0min, high=c0max)
+ ab = np.random.uniform(low=ab_min, high=ab_max)
+ return (x0, y0, c0, ab)
+
+
+[docs]def rand_init3D(x0min, x0max, y0min, y0max, z0min, z0max, c0min, c0max, ab_min, ab_max):
+ import numpy as np
+
+ x0 = np.random.uniform(low=x0min, high=x0max)
+ y0 = np.random.uniform(low=y0min, high=y0max)
+ z0 = np.random.uniform(low=z0min, high=z0max)
+ c0 = np.random.uniform(low=c0min, high=c0max)
+ ab = np.random.uniform(low=ab_min, high=ab_max)
+ return (x0, y0, z0, c0, ab)
+
+
+# Function to generate 2D foam-like structures using randomly located circles
+[docs]def foam2D(
+ x0min,
+ x0max,
+ y0min,
+ y0max,
+ c0min,
+ c0max,
+ ab_min,
+ ab_max,
+ N_size,
+ tot_objects,
+ object_type,
+):
+ import numpy as np
+ import math
+ import random
+
+ # 2D functions
+ from tomophantom import TomoP2D
+ from tomophantom.TomoP2D import Objects2D
+
+ attemptsNo = 2000 # the number of attempts to fit the object
+ # objects accepted: 'ellipse', 'parabola', 'gaussian', 'mix'
+ mix_objects = False
+ if object_type == "ellipse":
+ object_type = Objects2D.ELLIPSE
+ elif object_type == "parabola":
+ object_type = Objects2D.PARABOLA
+ elif object_type == "gaussian":
+ object_type = Objects2D.GAUSSIAN
+ elif object_type == "mix":
+ mix_objects = True
+ else:
+ raise TypeError("object_type can be only ellipse, parabola, gaussian or mix")
+ X0 = np.float32(np.zeros(tot_objects))
+ Y0 = np.float32(np.zeros(tot_objects))
+ AB = np.float32(np.zeros(tot_objects))
+ C0_var = np.float32(np.zeros(tot_objects))
+ for i in range(0, tot_objects):
+ (x0, y0, c0, ab) = rand_init2D(
+ x0min, x0max, y0min, y0max, c0min, c0max, ab_min, ab_max
+ )
+ if i > 0:
+ breakj = False
+ for j in range(0, attemptsNo):
+ if breakj == True:
+ (x0, y0, c0, ab) = rand_init2D(
+ x0min, x0max, y0min, y0max, c0min, c0max, ab_min, ab_max
+ )
+ breakj = False
+ else:
+ for l in range(
+ 0, i
+ ): # checks consistency with previously created objects
+ dist = math.sqrt((X0[l] - x0) ** 2 + (Y0[l] - y0) ** 2)
+ if (dist < (ab + AB[l])) or (
+ (abs(x0) + ab) ** 2 + (abs(y0) + ab) ** 2 > 1.0
+ ):
+ breakj = True
+ break
+ if breakj == False: # re-initialise if doesn't fit the criteria
+ X0[i] = x0
+ Y0[i] = y0
+ AB[i] = ab
+ C0_var[i] = c0
+ break
+ if AB[i] == 0.0:
+ X0[i] = x0
+ Y0[i] = y0
+ AB[i] = 0.0001
+ C0_var[i] = c0
+
+ myObjects = [] # dictionary of objects
+ for obj in range(0, len(X0)):
+ if mix_objects == True:
+ rand_obj = random.randint(0, 2)
+ if rand_obj == 0:
+ object_type = Objects2D.ELLIPSE
+ if rand_obj == 1:
+ object_type = Objects2D.PARABOLA
+ if rand_obj == 2:
+ object_type = Objects2D.GAUSSIAN
+ curr_obj = {
+ "Obj": object_type,
+ "C0": C0_var[obj],
+ "x0": X0[obj],
+ "y0": Y0[obj],
+ "a": AB[obj],
+ "b": AB[obj],
+ "phi": 0.0,
+ }
+ myObjects.append(curr_obj)
+ Object = TomoP2D.Object(N_size, myObjects)
+ return (Object, myObjects)
+
+
+# Function to generate 3D foam-like structures using randomly located spheres
+[docs]def foam3D(
+ x0min,
+ x0max,
+ y0min,
+ y0max,
+ z0min,
+ z0max,
+ c0min,
+ c0max,
+ ab_min,
+ ab_max,
+ N_size,
+ tot_objects,
+ object_type,
+):
+ import numpy as np
+ import math
+ import random
+
+ # 3D functions
+ from tomophantom import TomoP3D
+ from tomophantom.TomoP3D import Objects3D
+
+ attemptsNo = 2000
+ # objects accepted: 'ellipsoid', 'paraboloid', 'gaussian', 'mix'
+ mix_objects = False
+ if object_type == "ellipsoid":
+ object_type = Objects3D.ELLIPSOID
+ elif object_type == "paraboloid":
+ object_type = Objects3D.PARABOLOID
+ elif object_type == "gaussian":
+ object_type = Objects3D.GAUSSIAN
+ elif object_type == "mix":
+ mix_objects = True
+ else:
+ raise TypeError("object_type can be only ellipse, parabola, gaussian or mix")
+ X0 = np.float32(np.zeros(tot_objects))
+ Y0 = np.float32(np.zeros(tot_objects))
+ Z0 = np.float32(np.zeros(tot_objects))
+ AB = np.float32(np.zeros(tot_objects))
+ C0_var = np.float32(np.zeros(tot_objects))
+ for i in range(0, tot_objects):
+ (x0, y0, z0, c0, ab) = rand_init3D(
+ x0min, x0max, y0min, y0max, z0min, z0max, c0min, c0max, ab_min, ab_max
+ )
+ if i > 0:
+ breakj = False
+ for j in range(0, attemptsNo):
+ if breakj:
+ (x0, y0, z0, c0, ab) = rand_init3D(
+ x0min,
+ x0max,
+ y0min,
+ y0max,
+ z0min,
+ z0max,
+ c0min,
+ c0max,
+ ab_min,
+ ab_max,
+ )
+ breakj = False
+ else:
+ for l in range(
+ 0, i
+ ): # checks consistency with previously created objects
+ dist = math.sqrt(
+ (X0[l] - x0) ** 2 + (Y0[l] - y0) ** 2 + (Z0[l] - z0) ** 2
+ )
+ if (dist < (ab + AB[l])) or (
+ (abs(x0) + ab) ** 2
+ + (abs(y0) + ab) ** 2
+ + (abs(z0) + ab) ** 2
+ > 1.0
+ ):
+ breakj = True
+ break
+ if breakj == False: # re-initialise if doesn't fit the criteria
+ X0[i] = x0
+ Y0[i] = y0
+ Z0[i] = z0
+ AB[i] = ab
+ C0_var[i] = c0
+ break
+ if AB[i] == 0.0:
+ X0[i] = x0
+ Y0[i] = y0
+ AB[i] = 0.0001
+ C0_var[i] = c0
+
+ myObjects = [] # dictionary of objects
+ for obj in range(0, len(X0)):
+ if mix_objects == True:
+ rand_obj = random.randint(0, 2)
+ if rand_obj == 0:
+ object_type = Objects3D.ELLIPSOID
+ if rand_obj == 1:
+ object_type = Objects3D.PARABOLOID
+ if rand_obj == 2:
+ object_type = Objects3D.GAUSSIAN
+ curr_obj = {
+ "Obj": object_type,
+ "C0": C0_var[obj],
+ "x0": X0[obj],
+ "y0": Y0[obj],
+ "z0": Z0[obj],
+ "a": AB[obj],
+ "b": AB[obj],
+ "c": AB[obj],
+ "phi1": 0.0,
+ }
+ myObjects.append(curr_obj)
+ Object3D = TomoP3D.Object(N_size, myObjects)
+ return (Object3D, myObjects)
+
+#!/usr/bin/env python2
+# -*- coding: utf-8 -*-
+"""
+A class for some standard image quality metrics
+
+@author: Daniil Kazantsev
+"""
+import numpy as np
+
+
+[docs]class QualityTools:
+ def __init__(self, im1, im2):
+ if im1.size != im2.size:
+ print("Error: Sizes of images/volumes are different")
+ raise SystemExit
+ self.im1 = im1 # image or volume - 1
+ self.im2 = im2 # image or volume - 2
+
+[docs] def nrmse(self):
+ """Normalised Root Mean Square Error"""
+ rmse = np.sqrt(np.sum((self.im2 - self.im1) ** 2) / float(self.im1.size))
+ max_val = max(np.max(self.im1), np.max(self.im2))
+ min_val = min(np.min(self.im1), np.min(self.im2))
+ return 1 - (rmse / (max_val - min_val))
+
+[docs] def rmse(self):
+ """Root Mean Square Error"""
+ rmse = np.sqrt(np.sum((self.im1 - self.im2) ** 2) / float(self.im1.size))
+ return rmse
+
+[docs] def ssim(self, window, k=(0.01, 0.03), l=255):
+ from scipy.signal import fftconvolve
+
+ """See https://ece.uwaterloo.ca/~z70wang/research/ssim/"""
+ # Check if the window is smaller than the images.
+ for a, b in zip(window.shape, self.im1.shape):
+ if a > b:
+ return None, None
+ # Values in k must be positive according to the base implementation.
+ for ki in k:
+ if ki < 0:
+ return None, None
+
+ c1 = (k[0] * l) ** 2
+ c2 = (k[1] * l) ** 2
+ window = window / np.sum(window)
+
+ mu1 = fftconvolve(self.im1, window, mode="valid")
+ mu2 = fftconvolve(self.im2, window, mode="valid")
+ mu1_sq = mu1 * mu1
+ mu2_sq = mu2 * mu2
+ mu1_mu2 = mu1 * mu2
+ sigma1_sq = fftconvolve(self.im1 * self.im1, window, mode="valid") - mu1_sq
+ sigma2_sq = fftconvolve(self.im2 * self.im2, window, mode="valid") - mu2_sq
+ sigma12 = fftconvolve(self.im1 * self.im2, window, mode="valid") - mu1_mu2
+
+ if c1 > 0 and c2 > 0:
+ num = (2 * mu1_mu2 + c1) * (2 * sigma12 + c2)
+ den = (mu1_sq + mu2_sq + c1) * (sigma1_sq + sigma2_sq + c2)
+ ssim_map = num / den
+ else:
+ num1 = 2 * mu1_mu2 + c1
+ num2 = 2 * sigma12 + c2
+ den1 = mu1_sq + mu2_sq + c1
+ den2 = sigma1_sq + sigma2_sq + c2
+ ssim_map = np.ones(np.shape(mu1))
+ index = (den1 * den2) > 0
+ ssim_map[index] = (num1[index] * num2[index]) / (den1[index] * den2[index])
+ index = (den1 != 0) & (den2 == 0)
+ ssim_map[index] = num1[index] / den1[index]
+ mssim = ssim_map.mean()
+ return mssim, ssim_map
+
+# -*- coding: utf-8 -*-
+"""
+Created on Thu Mar 15 11:12:51 2018
+
+@author: ofn77899
+"""
+import re
+from tomophantom.TomoP2D import Objects2D
+
+
+[docs]def modelfile2Dtolist(filepath, model):
+ # read all the file content
+ f = open(filepath).readlines()
+
+ # model 4 is lines 56
+ i = 0
+ model = model
+ components = 0
+ # timesteps = 1
+ while True:
+ if f[i] == "Model : {0:02};\n".format(model):
+ components = int(re.search("^Components : (\d+);$", f[i + 1]).groups()[0])
+ # timesteps = int( re.search('^TimeSteps : (\d+);$',f[i+2]).groups()[0] )
+ # print (i)
+ i += 3
+ # print (i)
+ # print (f[i])
+ break
+ i += 1
+ # print (i)
+
+ objectlist = []
+
+ for j in range(components):
+ descr = f[i + j].split()
+ objstr, C0, x0, y0, a, b, phi = descr[2:]
+ for oo in Objects2D:
+ if oo.value == objstr:
+ break
+ # print (phi)
+ objectlist.append(
+ {
+ "Obj": oo,
+ "C0": float(C0),
+ "x0": float(y0),
+ "y0": float(x0),
+ "a": float(b),
+ "b": float(a),
+ "phi": float(phi[:-1]),
+ }
+ )
+ return objectlist
+
+
+if __name__ == "__main__":
+ pathTP = "../phantomlib/Phantom2DLibrary.dat"
+
+ print(modelfile2Dtolist(pathTP, 6))
+
+# this is a collection of speckle generating routines taken from:
+# https://github.com/fperakis/Speckles-simulations
+
+import numpy as np
+from scipy.ndimage import gaussian_filter
+
+
+[docs]def sample2ddist(asicshape, kbar, dist):
+ """
+ samples photons from a 2D distribution
+ """
+ # Definitions
+ nasic = asicshape[0] * asicshape[1]
+ nphotons = np.round(kbar * nasic).astype(int)
+ photons = np.zeros(asicshape)
+
+ # sample photon positions from given distribution
+ sampl = np.random.choice(nasic, nphotons, p=dist)
+
+ # convert to 2d and accumulate photons
+ for i in range(nphotons):
+ ix = int(sampl[i] / asicshape[1])
+ iy = int(sampl[i] % asicshape[1])
+ photons[ix, iy] += 1
+
+ return photons
+
+
+[docs]def model_speckles(asicshape, specklesize):
+ """
+ models a speckle pattern with a given speckle size (integer) based on random
+ phasors (see JC Goodman - Speckle Phenomena in Optics - Appendix G)
+ """
+ # definitions speckles
+ randphasors = np.zeros([asicshape[0], asicshape[1]], dtype=complex)
+ nphasorsx = np.round(asicshape[0] / specklesize).astype(int)
+ nphasorsy = np.round(asicshape[1] / specklesize).astype(int)
+
+ # random phasors
+ for i in range(nphasorsx):
+ for j in range(nphasorsy):
+ randphase = np.random.random_sample() * 2.0j * np.pi
+ randphasors[i, j] = np.exp(randphase)
+
+ # speckles
+ specklefield = np.fft.fft2(randphasors)
+ speckleint = np.absolute(specklefield) * np.absolute(specklefield)
+ speckleint /= np.sum(speckleint) # normalise
+
+ return speckleint
+
+
+[docs]def model_speckles_modes(asicshape, specklesize, modes):
+ """
+ models a speckle pattern with a given speckle size (integer) and
+ contrast (1/modes)
+ """
+
+ # definitions
+ nx, ny = asicshape[0], asicshape[1]
+
+ # add speckle patterns as many times as the number of modes
+ dist = model_speckles([nx, ny], specklesize)
+ for m in range(modes - 1):
+ dist += model_speckles([nx, ny], specklesize)
+ dist /= np.sum(dist) # normalise
+
+ return dist
+
+
+[docs]def simulate_speckles_with_shot_noise(asicshape, modes, specklesize, kbar):
+ """
+ returns a 2D speckle pattern with given speckle size, contrast
+ (1/modes) and photon density kbar (photons/pixel).
+ """
+ # definitions
+ nx, ny = asicshape[0], asicshape[1]
+ dist = model_speckles_modes(asicshape, specklesize, modes).flatten()
+ # sample photons from the distribution
+ sig = sample2ddist([nx, ny], kbar, dist)
+
+ return sig
+
+
+[docs]def simulate_shot_noise(asicshape, kbar):
+ """
+ returns a 2d shot noise pattern with given dimensions and
+ photon density kbar (photons/pixel)
+ """
+ # definitions
+ nx, ny = asicshape[0], asicshape[1]
+
+ # flat 2d distribution
+ dist = np.ones((nx, ny)).flatten()
+ dist /= np.sum(dist)
+
+ # sample photons from the distribution
+ sig = sample2ddist([nx, ny], kbar, dist)
+
+ return np.array(sig, dtype=float)
+
+
+[docs]def simulate_charge_sharing(asicshape, modes, specklesize, kbar, sigma=1):
+ """
+ returns a 2D speckle pattern with given speckle size, contrast
+ (1/modes) and photon density kbar (photons/pixel)
+ including charge sharing simulated with a gaussian at each photon position with with sigma
+ """
+ # Definitions
+ nasic = asicshape[0] * asicshape[1]
+ nphotons = np.round(kbar * nasic).astype(int)
+ photons = np.zeros(asicshape)
+ photons_blur = np.zeros(asicshape)
+ dist = model_speckles_modes(asicshape, specklesize, modes).flatten()
+
+ # sample photon positions from given distribution
+ sampl = np.random.choice(nasic, nphotons, p=dist)
+
+ # convert to 2d and accumulate photons
+ for i in range(nphotons):
+ ix = int(sampl[i] / asicshape[1])
+ iy = int(sampl[i] % asicshape[1])
+ photons_blur = np.zeros(asicshape)
+ photons_blur[ix, iy] = 1
+ photons_blur = gaussian_filter(photons_blur, sigma=sigma)
+ photons += photons_blur
+
+ return photons
+
Short
+ */ + .o-tooltip--left { + position: relative; + } + + .o-tooltip--left:after { + opacity: 0; + visibility: hidden; + position: absolute; + content: attr(data-tooltip); + padding: .2em; + font-size: .8em; + left: -.2em; + background: grey; + color: white; + white-space: nowrap; + z-index: 2; + border-radius: 2px; + transform: translateX(-102%) translateY(0); + transition: opacity 0.2s cubic-bezier(0.64, 0.09, 0.08, 1), transform 0.2s cubic-bezier(0.64, 0.09, 0.08, 1); +} + +.o-tooltip--left:hover:after { + display: block; + opacity: 1; + visibility: visible; + transform: translateX(-100%) translateY(0); + transition: opacity 0.2s cubic-bezier(0.64, 0.09, 0.08, 1), transform 0.2s cubic-bezier(0.64, 0.09, 0.08, 1); + transition-delay: .5s; +} + +/* By default the copy button shouldn't show up when printing a page */ +@media print { + button.copybtn { + display: none; + } +} diff --git a/_static/copybutton.js b/_static/copybutton.js new file mode 100644 index 0000000..2ea7ff3 --- /dev/null +++ b/_static/copybutton.js @@ -0,0 +1,248 @@ +// Localization support +const messages = { + 'en': { + 'copy': 'Copy', + 'copy_to_clipboard': 'Copy to clipboard', + 'copy_success': 'Copied!', + 'copy_failure': 'Failed to copy', + }, + 'es' : { + 'copy': 'Copiar', + 'copy_to_clipboard': 'Copiar al portapapeles', + 'copy_success': '¡Copiado!', + 'copy_failure': 'Error al copiar', + }, + 'de' : { + 'copy': 'Kopieren', + 'copy_to_clipboard': 'In die Zwischenablage kopieren', + 'copy_success': 'Kopiert!', + 'copy_failure': 'Fehler beim Kopieren', + }, + 'fr' : { + 'copy': 'Copier', + 'copy_to_clipboard': 'Copier dans le presse-papier', + 'copy_success': 'Copié !', + 'copy_failure': 'Échec de la copie', + }, + 'ru': { + 'copy': 'Скопировать', + 'copy_to_clipboard': 'Скопировать в буфер', + 'copy_success': 'Скопировано!', + 'copy_failure': 'Не удалось скопировать', + }, + 'zh-CN': { + 'copy': '复制', + 'copy_to_clipboard': '复制到剪贴板', + 'copy_success': '复制成功!', + 'copy_failure': '复制失败', + }, + 'it' : { + 'copy': 'Copiare', + 'copy_to_clipboard': 'Copiato negli appunti', + 'copy_success': 'Copiato!', + 'copy_failure': 'Errore durante la copia', + } +} + +let locale = 'en' +if( document.documentElement.lang !== undefined + && messages[document.documentElement.lang] !== undefined ) { + locale = document.documentElement.lang +} + +let doc_url_root = DOCUMENTATION_OPTIONS.URL_ROOT; +if (doc_url_root == '#') { + doc_url_root = ''; +} + +/** + * SVG files for our copy buttons + */ +let iconCheck = `` + +// If the user specified their own SVG use that, otherwise use the default +let iconCopy = ``; +if (!iconCopy) { + iconCopy = `` +} + +/** + * Set up copy/paste for code blocks + */ + +const runWhenDOMLoaded = cb => { + if (document.readyState != 'loading') { + cb() + } else if (document.addEventListener) { + document.addEventListener('DOMContentLoaded', cb) + } else { + document.attachEvent('onreadystatechange', function() { + if (document.readyState == 'complete') cb() + }) + } +} + +const codeCellId = index => `codecell${index}` + +// Clears selected text since ClipboardJS will select the text when copying +const clearSelection = () => { + if (window.getSelection) { + window.getSelection().removeAllRanges() + } else if (document.selection) { + document.selection.empty() + } +} + +// Changes tooltip text for a moment, then changes it back +// We want the timeout of our `success` class to be a bit shorter than the +// tooltip and icon change, so that we can hide the icon before changing back. +var timeoutIcon = 2000; +var timeoutSuccessClass = 1500; + +const temporarilyChangeTooltip = (el, oldText, newText) => { + el.setAttribute('data-tooltip', newText) + el.classList.add('success') + // Remove success a little bit sooner than we change the tooltip + // So that we can use CSS to hide the copybutton first + setTimeout(() => el.classList.remove('success'), timeoutSuccessClass) + setTimeout(() => el.setAttribute('data-tooltip', oldText), timeoutIcon) +} + +// Changes the copy button icon for two seconds, then changes it back +const temporarilyChangeIcon = (el) => { + el.innerHTML = iconCheck; + setTimeout(() => {el.innerHTML = iconCopy}, timeoutIcon) +} + +const addCopyButtonToCodeCells = () => { + // If ClipboardJS hasn't loaded, wait a bit and try again. This + // happens because we load ClipboardJS asynchronously. + if (window.ClipboardJS === undefined) { + setTimeout(addCopyButtonToCodeCells, 250) + return + } + + // Add copybuttons to all of our code cells + const COPYBUTTON_SELECTOR = 'div.highlight pre'; + const codeCells = document.querySelectorAll(COPYBUTTON_SELECTOR) + codeCells.forEach((codeCell, index) => { + const id = codeCellId(index) + codeCell.setAttribute('id', id) + + const clipboardButton = id => + `` + codeCell.insertAdjacentHTML('afterend', clipboardButton(id)) + }) + +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +} + +/** + * Removes excluded text from a Node. + * + * @param {Node} target Node to filter. + * @param {string} exclude CSS selector of nodes to exclude. + * @returns {DOMString} Text from `target` with text removed. + */ +function filterText(target, exclude) { + const clone = target.cloneNode(true); // clone as to not modify the live DOM + if (exclude) { + // remove excluded nodes + clone.querySelectorAll(exclude).forEach(node => node.remove()); + } + return clone.innerText; +} + +// Callback when a copy button is clicked. Will be passed the node that was clicked +// should then grab the text and replace pieces of text that shouldn't be used in output +function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") { + var regexp; + var match; + + // Do we check for line continuation characters and "HERE-documents"? + var useLineCont = !!lineContinuationChar + var useHereDoc = !!hereDocDelim + + // create regexp to capture prompt and remaining line + if (isRegexp) { + regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)') + } else { + regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)') + } + + const outputLines = []; + var promptFound = false; + var gotLineCont = false; + var gotHereDoc = false; + const lineGotPrompt = []; + for (const line of textContent.split('\n')) { + match = line.match(regexp) + if (match || gotLineCont || gotHereDoc) { + promptFound = regexp.test(line) + lineGotPrompt.push(promptFound) + if (removePrompts && promptFound) { + outputLines.push(match[2]) + } else { + outputLines.push(line) + } + gotLineCont = line.endsWith(lineContinuationChar) & useLineCont + if (line.includes(hereDocDelim) & useHereDoc) + gotHereDoc = !gotHereDoc + } else if (!onlyCopyPromptLines) { + outputLines.push(line) + } else if (copyEmptyLines && line.trim() === '') { + outputLines.push(line) + } + } + + // If no lines with the prompt were found then just use original lines + if (lineGotPrompt.some(v => v === true)) { + textContent = outputLines.join('\n'); + } + + // Remove a trailing newline to avoid auto-running when pasting + if (textContent.endsWith("\n")) { + textContent = textContent.slice(0, -1) + } + return textContent +} + + +var copyTargetText = (trigger) => { + var target = document.querySelector(trigger.attributes['data-clipboard-target'].value); + + // get filtered text + let exclude = '.linenos'; + + let text = filterText(target, exclude); + return formatCopyText(text, '', false, true, true, true, '', '') +} + + // Initialize with a callback so we can modify the text before copy + const clipboard = new ClipboardJS('.copybtn', {text: copyTargetText}) + + // Update UI with error/success messages + clipboard.on('success', event => { + clearSelection() + temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_success']) + temporarilyChangeIcon(event.trigger) + }) + + clipboard.on('error', event => { + temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_failure']) + }) +} + +runWhenDOMLoaded(addCopyButtonToCodeCells) \ No newline at end of file diff --git a/_static/copybutton_funcs.js b/_static/copybutton_funcs.js new file mode 100644 index 0000000..dbe1aaa --- /dev/null +++ b/_static/copybutton_funcs.js @@ -0,0 +1,73 @@ +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +} + +/** + * Removes excluded text from a Node. + * + * @param {Node} target Node to filter. + * @param {string} exclude CSS selector of nodes to exclude. + * @returns {DOMString} Text from `target` with text removed. + */ +export function filterText(target, exclude) { + const clone = target.cloneNode(true); // clone as to not modify the live DOM + if (exclude) { + // remove excluded nodes + clone.querySelectorAll(exclude).forEach(node => node.remove()); + } + return clone.innerText; +} + +// Callback when a copy button is clicked. Will be passed the node that was clicked +// should then grab the text and replace pieces of text that shouldn't be used in output +export function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") { + var regexp; + var match; + + // Do we check for line continuation characters and "HERE-documents"? + var useLineCont = !!lineContinuationChar + var useHereDoc = !!hereDocDelim + + // create regexp to capture prompt and remaining line + if (isRegexp) { + regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)') + } else { + regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)') + } + + const outputLines = []; + var promptFound = false; + var gotLineCont = false; + var gotHereDoc = false; + const lineGotPrompt = []; + for (const line of textContent.split('\n')) { + match = line.match(regexp) + if (match || gotLineCont || gotHereDoc) { + promptFound = regexp.test(line) + lineGotPrompt.push(promptFound) + if (removePrompts && promptFound) { + outputLines.push(match[2]) + } else { + outputLines.push(line) + } + gotLineCont = line.endsWith(lineContinuationChar) & useLineCont + if (line.includes(hereDocDelim) & useHereDoc) + gotHereDoc = !gotHereDoc + } else if (!onlyCopyPromptLines) { + outputLines.push(line) + } else if (copyEmptyLines && line.trim() === '') { + outputLines.push(line) + } + } + + // If no lines with the prompt were found then just use original lines + if (lineGotPrompt.some(v => v === true)) { + textContent = outputLines.join('\n'); + } + + // Remove a trailing newline to avoid auto-running when pasting + if (textContent.endsWith("\n")) { + textContent = textContent.slice(0, -1) + } + return textContent +} diff --git a/_static/doctools.js b/_static/doctools.js new file mode 100644 index 0000000..e1bfd70 --- /dev/null +++ b/_static/doctools.js @@ -0,0 +1,358 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Sphinx JavaScript utilities for all documentation. + * + * :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/** + * select a different prefix for underscore + */ +$u = _.noConflict(); + +/** + * make the code below compatible with browsers without + * an installed firebug like debugger +if (!window.console || !console.firebug) { + var names = ["log", "debug", "info", "warn", "error", "assert", "dir", + "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", + "profile", "profileEnd"]; + window.console = {}; + for (var i = 0; i < names.length; ++i) + window.console[names[i]] = function() {}; +} + */ + +/** + * small helper function to urldecode strings + * + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL + */ +jQuery.urldecode = function(x) { + if (!x) { + return x + } + return decodeURIComponent(x.replace(/\+/g, ' ')); +}; + +/** + * small helper function to urlencode strings + */ +jQuery.urlencode = encodeURIComponent; + +/** + * This function returns the parsed url parameters of the + * current request. Multiple values per key are supported, + * it will always return arrays of strings for the value parts. + */ +jQuery.getQueryParameters = function(s) { + if (typeof s === 'undefined') + s = document.location.search; + var parts = s.substr(s.indexOf('?') + 1).split('&'); + var result = {}; + for (var i = 0; i < parts.length; i++) { + var tmp = parts[i].split('=', 2); + var key = jQuery.urldecode(tmp[0]); + var value = jQuery.urldecode(tmp[1]); + if (key in result) + result[key].push(value); + else + result[key] = [value]; + } + return result; +}; + +/** + * highlight a given string on a jquery object by wrapping it in + * span elements with the given class name. + */ +jQuery.fn.highlightText = function(text, className) { + function highlight(node, addItems) { + if (node.nodeType === 3) { + var val = node.nodeValue; + var pos = val.toLowerCase().indexOf(text); + if (pos >= 0 && + !jQuery(node.parentNode).hasClass(className) && + !jQuery(node.parentNode).hasClass("nohighlight")) { + var span; + var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.className = className; + } + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + node.parentNode.insertBefore(span, node.parentNode.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling)); + node.nodeValue = val.substr(0, pos); + if (isInSVG) { + var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); + var bbox = node.parentElement.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute('class', className); + addItems.push({ + "parent": node.parentNode, + "target": rect}); + } + } + } + else if (!jQuery(node).is("button, select, textarea")) { + jQuery.each(node.childNodes, function() { + highlight(this, addItems); + }); + } + } + var addItems = []; + var result = this.each(function() { + highlight(this, addItems); + }); + for (var i = 0; i < addItems.length; ++i) { + jQuery(addItems[i].parent).before(addItems[i].target); + } + return result; +}; + +/* + * backward compatibility for jQuery.browser + * This will be supported until firefox bug is fixed. + */ +if (!jQuery.browser) { + jQuery.uaMatch = function(ua) { + ua = ua.toLowerCase(); + + var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || + /(webkit)[ \/]([\w.]+)/.exec(ua) || + /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || + /(msie) ([\w.]+)/.exec(ua) || + ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || + []; + + return { + browser: match[ 1 ] || "", + version: match[ 2 ] || "0" + }; + }; + jQuery.browser = {}; + jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; +} + +/** + * Small JavaScript module for the documentation. + */ +var Documentation = { + + init : function() { + this.fixFirefoxAnchorBug(); + this.highlightSearchWords(); + this.initIndexTable(); + this.initOnKeyListeners(); + }, + + /** + * i18n support + */ + TRANSLATIONS : {}, + PLURAL_EXPR : function(n) { return n === 1 ? 0 : 1; }, + LOCALE : 'unknown', + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext : function(string) { + var translated = Documentation.TRANSLATIONS[string]; + if (typeof translated === 'undefined') + return string; + return (typeof translated === 'string') ? translated : translated[0]; + }, + + ngettext : function(singular, plural, n) { + var translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated === 'undefined') + return (n == 1) ? singular : plural; + return translated[Documentation.PLURALEXPR(n)]; + }, + + addTranslations : function(catalog) { + for (var key in catalog.messages) + this.TRANSLATIONS[key] = catalog.messages[key]; + this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); + this.LOCALE = catalog.locale; + }, + + /** + * add context elements like header anchor links + */ + addContextElements : function() { + $('div[id] > :header:first').each(function() { + $('\u00B6'). + attr('href', '#' + this.id). + attr('title', _('Permalink to this headline')). + appendTo(this); + }); + $('dt[id]').each(function() { + $('\u00B6'). + attr('href', '#' + this.id). + attr('title', _('Permalink to this definition')). + appendTo(this); + }); + }, + + /** + * workaround a firefox stupidity + * see: https://bugzilla.mozilla.org/show_bug.cgi?id=645075 + */ + fixFirefoxAnchorBug : function() { + if (document.location.hash && $.browser.mozilla) + window.setTimeout(function() { + document.location.href += ''; + }, 10); + }, + + /** + * highlight the search words provided in the url in the text + */ + highlightSearchWords : function() { + var params = $.getQueryParameters(); + var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; + if (terms.length) { + var body = $('div.body'); + if (!body.length) { + body = $('body'); + } + window.setTimeout(function() { + $.each(terms, function() { + body.highlightText(this.toLowerCase(), 'highlighted'); + }); + }, 10); + $('') + .appendTo($('#searchbox')); + } + }, + + /** + * init the domain index toggle buttons + */ + initIndexTable : function() { + var togglers = $('img.toggler').click(function() { + var src = $(this).attr('src'); + var idnum = $(this).attr('id').substr(7); + $('tr.cg-' + idnum).toggle(); + if (src.substr(-9) === 'minus.png') + $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); + else + $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); + }).css('display', ''); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { + togglers.click(); + } + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords : function() { + $('#searchbox .highlight-link').fadeOut(300); + $('span.highlighted').removeClass('highlighted'); + var url = new URL(window.location); + url.searchParams.delete('highlight'); + window.history.replaceState({}, '', url); + }, + + /** + * helper function to focus on search bar + */ + focusSearchBar : function() { + $('input[name=q]').first().focus(); + }, + + /** + * make the url absolute + */ + makeURL : function(relativeURL) { + return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; + }, + + /** + * get the current relative url + */ + getCurrentURL : function() { + var path = document.location.pathname; + var parts = path.split(/\//); + $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { + if (this === '..') + parts.pop(); + }); + var url = parts.join('/'); + return path.substring(url.lastIndexOf('/') + 1, path.length - 1); + }, + + initOnKeyListeners: function() { + // only install a listener if it is really needed + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) + return; + + $(document).keydown(function(event) { + var activeElementType = document.activeElement.tagName; + // don't navigate when in search box, textarea, dropdown or button + if (activeElementType !== 'TEXTAREA' && activeElementType !== 'INPUT' && activeElementType !== 'SELECT' + && activeElementType !== 'BUTTON') { + if (event.altKey || event.ctrlKey || event.metaKey) + return; + + if (!event.shiftKey) { + switch (event.key) { + case 'ArrowLeft': + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) + break; + var prevHref = $('link[rel="prev"]').prop('href'); + if (prevHref) { + window.location.href = prevHref; + return false; + } + break; + case 'ArrowRight': + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) + break; + var nextHref = $('link[rel="next"]').prop('href'); + if (nextHref) { + window.location.href = nextHref; + return false; + } + break; + case 'Escape': + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) + break; + Documentation.hideSearchWords(); + return false; + } + } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case '/': + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) + break; + Documentation.focusSearchBar(); + return false; + } + } + }); + } +}; + +// quick alias for translations +_ = Documentation.gettext; + +$(document).ready(function() { + Documentation.init(); +}); diff --git a/_static/documentation_options.js b/_static/documentation_options.js new file mode 100644 index 0000000..a9933a5 --- /dev/null +++ b/_static/documentation_options.js @@ -0,0 +1,14 @@ +var DOCUMENTATION_OPTIONS = { + URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), + VERSION: '3ff85376699bf831e40ab97a48fb2fe49a577f88', + LANGUAGE: 'en', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '', + NAVIGATION_WITH_KEYS: true, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, +}; \ No newline at end of file diff --git a/_static/file.png b/_static/file.png new file mode 100644 index 0000000..a858a41 Binary files /dev/null and b/_static/file.png differ diff --git a/_static/images/logo_binder.svg b/_static/images/logo_binder.svg new file mode 100644 index 0000000..45fecf7 --- /dev/null +++ b/_static/images/logo_binder.svg @@ -0,0 +1,19 @@ + + + diff --git a/_static/images/logo_colab.png b/_static/images/logo_colab.png new file mode 100644 index 0000000..b7560ec Binary files /dev/null and b/_static/images/logo_colab.png differ diff --git a/_static/images/logo_deepnote.svg b/_static/images/logo_deepnote.svg new file mode 100644 index 0000000..fa77ebf --- /dev/null +++ b/_static/images/logo_deepnote.svg @@ -0,0 +1 @@ + diff --git a/_static/images/logo_jupyterhub.svg b/_static/images/logo_jupyterhub.svg new file mode 100644 index 0000000..60cfe9f --- /dev/null +++ b/_static/images/logo_jupyterhub.svg @@ -0,0 +1 @@ + diff --git a/_static/inv_crime/a_rec.jpg b/_static/inv_crime/a_rec.jpg new file mode 100644 index 0000000..47a6031 Binary files /dev/null and b/_static/inv_crime/a_rec.jpg differ diff --git a/_static/inv_crime/a_sino.jpg b/_static/inv_crime/a_sino.jpg new file mode 100644 index 0000000..596c92a Binary files /dev/null and b/_static/inv_crime/a_sino.jpg differ diff --git a/_static/inv_crime/d_rec.jpg b/_static/inv_crime/d_rec.jpg new file mode 100644 index 0000000..03666b4 Binary files /dev/null and b/_static/inv_crime/d_rec.jpg differ diff --git a/_static/inv_crime/d_sino.jpg b/_static/inv_crime/d_sino.jpg new file mode 100644 index 0000000..0a3e806 Binary files /dev/null and b/_static/inv_crime/d_sino.jpg differ diff --git a/_static/inv_crime/inv_crime_demo.svg b/_static/inv_crime/inv_crime_demo.svg new file mode 100644 index 0000000..fd824e0 --- /dev/null +++ b/_static/inv_crime/inv_crime_demo.svg @@ -0,0 +1,146 @@ + + + + diff --git a/_static/jquery-3.5.1.js b/_static/jquery-3.5.1.js new file mode 100644 index 0000000..5093733 --- /dev/null +++ b/_static/jquery-3.5.1.js @@ -0,0 +1,10872 @@ +/*! + * jQuery JavaScript Library v3.5.1 + * https://jquery.com/ + * + * Includes Sizzle.js + * https://sizzlejs.com/ + * + * Copyright JS Foundation and other contributors + * Released under the MIT license + * https://jquery.org/license + * + * Date: 2020-05-04T22:49Z + */ +( function( global, factory ) { + + "use strict"; + + if ( typeof module === "object" && typeof module.exports === "object" ) { + + // For CommonJS and CommonJS-like environments where a proper `window` + // is present, execute the factory and get jQuery. + // For environments that do not have a `window` with a `document` + // (such as Node.js), expose a factory as module.exports. + // This accentuates the need for the creation of a real `window`. + // e.g. var jQuery = require("jquery")(window); + // See ticket #14549 for more info. + module.exports = global.document ? + factory( global, true ) : + function( w ) { + if ( !w.document ) { + throw new Error( "jQuery requires a window with a document" ); + } + return factory( w ); + }; + } else { + factory( global ); + } + +// Pass this if window is not defined yet +} )( typeof window !== "undefined" ? window : this, function( window, noGlobal ) { + +// Edge <= 12 - 13+, Firefox <=18 - 45+, IE 10 - 11, Safari 5.1 - 9+, iOS 6 - 9.1 +// throw exceptions when non-strict code (e.g., ASP.NET 4.5) accesses strict mode +// arguments.callee.caller (trac-13335). But as of jQuery 3.0 (2016), strict mode should be common +// enough that all such attempts are guarded in a try block. +"use strict"; + +var arr = []; + +var getProto = Object.getPrototypeOf; + +var slice = arr.slice; + +var flat = arr.flat ? function( array ) { + return arr.flat.call( array ); +} : function( array ) { + return arr.concat.apply( [], array ); +}; + + +var push = arr.push; + +var indexOf = arr.indexOf; + +var class2type = {}; + +var toString = class2type.toString; + +var hasOwn = class2type.hasOwnProperty; + +var fnToString = hasOwn.toString; + +var ObjectFunctionString = fnToString.call( Object ); + +var support = {}; + +var isFunction = function isFunction( obj ) { + + // Support: Chrome <=57, Firefox <=52 + // In some browsers, typeof returns "function" for HTML